Repository: hacspec/hax Branch: main Commit: ad110bfa7a25 Files: 1353 Total size: 7.2 MB Directory structure: gitextract_cva_42qr/ ├── .cargo/ │ └── config.toml ├── .docker/ │ └── Dockerfile ├── .dockerignore ├── .envrc ├── .github/ │ ├── assets/ │ │ └── change-padding.sh │ └── workflows/ │ ├── bertie.yml │ ├── changelog.yml │ ├── clippy_rust_engine.yml │ ├── extract_and_run_coq.yml │ ├── flake_lock.yml │ ├── format.yml │ ├── gh_pages.yml │ ├── install_and_test.yml │ ├── licenses.yml │ ├── mldsa.yml │ ├── mlkem.yml │ ├── playwright-docs.yml │ ├── release.yml │ ├── rustc-coverage-tests.yml │ ├── stale.yml │ ├── test.yml │ ├── test_installs.yml │ └── this-month-in-hax.yml ├── .gitignore ├── .utils/ │ ├── jq_utils.jq │ ├── rebuild.sh │ ├── rust-by-example.js │ └── this-month-in-hax-skeleton.sh ├── CHANGELOG.md ├── CI.md ├── CODEOWNERS ├── CONTRIBUTING.md ├── Cargo.toml ├── LICENSE ├── PUBLISHING.md ├── README.md ├── cli/ │ ├── default.nix │ ├── driver/ │ │ ├── Cargo.toml │ │ └── src/ │ │ ├── callbacks_wrapper.rs │ │ ├── driver.rs │ │ ├── exporter.rs │ │ └── features.rs │ └── subcommands/ │ ├── Cargo.toml │ ├── build.rs │ └── src/ │ ├── cargo_hax.rs │ ├── engine_debug_webapp/ │ │ ├── README.md │ │ ├── mod.rs │ │ └── static/ │ │ ├── index.html │ │ └── script.js │ └── json_schema.rs ├── deny.toml ├── dependabot.yml ├── docs/ │ ├── .test/ │ │ ├── .gitignore │ │ ├── global-setup.ts │ │ ├── package.json │ │ ├── playwright.config.ts │ │ └── tests/ │ │ └── docs.spec.ts │ ├── RFCs/ │ │ ├── .nav.yml │ │ ├── 0000-template.md │ │ └── index.md │ ├── blog/ │ │ ├── .authors.yml │ │ ├── index.md │ │ └── posts/ │ │ ├── announce-v0.1.md │ │ ├── hax-for-everyone.md │ │ ├── lucas-departure.md │ │ ├── reworking-names/ │ │ │ └── reworking-names.md │ │ ├── rust-gcd-1.md │ │ ├── rust-gcd-2.md │ │ └── this-month-in-hax/ │ │ ├── 2025-01.md │ │ ├── 2025-02.md │ │ ├── 2025-03.md │ │ ├── 2025-04.md │ │ ├── 2025-05.md │ │ ├── 2025-06.md │ │ ├── 2025-07.md │ │ ├── 2025-08.md │ │ ├── 2025-09.md │ │ ├── 2025-10.md │ │ ├── 2025-11.md │ │ ├── 2026-01.md │ │ ├── 2026-02.md │ │ ├── 2026-03.md │ │ └── 2026-04.md │ ├── default.nix │ ├── dev/ │ │ ├── architecture.md │ │ ├── ast_ebnf.md │ │ ├── docs.md │ │ ├── index.md │ │ └── libraries_macros.md │ ├── engine/ │ │ ├── index.md │ │ └── toolchain_structure/ │ │ └── index.md │ ├── frontend/ │ │ ├── evaluation.md │ │ └── index.md │ ├── index.md │ ├── javascripts/ │ │ ├── ansi_up.js │ │ ├── fstar.js │ │ ├── hax_playground.js │ │ └── lz-string.js │ ├── manual/ │ │ ├── faq/ │ │ │ ├── include-flags.md │ │ │ ├── index.md │ │ │ └── into.md │ │ ├── fstar/ │ │ │ ├── .nav.yml │ │ │ ├── index.md │ │ │ ├── quick_start.md │ │ │ └── tutorial/ │ │ │ ├── data-invariants.md │ │ │ ├── index.md │ │ │ ├── panic-freedom.md │ │ │ ├── proofs/ │ │ │ │ └── fstar/ │ │ │ │ └── extraction/ │ │ │ │ ├── Makefile │ │ │ │ ├── Tutorial_src.Math.Lemmas.fst │ │ │ │ └── Tutorial_src.fst │ │ │ └── properties.md │ │ ├── index.md │ │ └── lean/ │ │ ├── index.md │ │ ├── internals.md │ │ ├── quick_start.md │ │ └── tutorial/ │ │ ├── index.md │ │ ├── panic-freedom.md │ │ └── properties.md │ ├── overrides/ │ │ └── main.html │ ├── publications.md │ └── stylesheets/ │ ├── hax_playground.css │ ├── logo.css │ └── tags-colors.css ├── engine/ │ ├── .ocamlformat │ ├── DEV.md │ ├── backends/ │ │ ├── coq/ │ │ │ ├── coq/ │ │ │ │ ├── coq_backend.ml │ │ │ │ ├── coq_backend.mli │ │ │ │ └── dune │ │ │ ├── coq_ast.ml │ │ │ ├── dune │ │ │ └── ssprove/ │ │ │ ├── dune │ │ │ ├── ssprove_backend.ml │ │ │ └── ssprove_backend.mli │ │ ├── easycrypt/ │ │ │ ├── dune │ │ │ ├── easycrypt_backend.ml │ │ │ └── easycrypt_backend.mli │ │ ├── fstar/ │ │ │ ├── dune │ │ │ ├── fstar-surface-ast/ │ │ │ │ ├── .gitignore │ │ │ │ ├── .ocamlformat-ignore │ │ │ │ ├── FStar_BaseTypes.ml │ │ │ │ ├── FStar_Char.ml │ │ │ │ ├── FStar_Compiler_Effect.ml │ │ │ │ ├── FStar_Compiler_List.ml │ │ │ │ ├── FStar_Compiler_Range.ml │ │ │ │ ├── FStar_Compiler_Util.ml │ │ │ │ ├── FStar_Const.ml │ │ │ │ ├── FStar_Errors.ml │ │ │ │ ├── FStar_Errors_Codes.ml │ │ │ │ ├── FStar_Getopt.ml │ │ │ │ ├── FStar_Ident.ml │ │ │ │ ├── FStar_ImmutableArray_Base.ml │ │ │ │ ├── FStar_List.ml │ │ │ │ ├── FStar_Parser_AST.ml │ │ │ │ ├── FStar_Parser_AST_Util.ml │ │ │ │ ├── FStar_Parser_Const.ml │ │ │ │ ├── FStar_Parser_Driver.ml │ │ │ │ ├── FStar_Parser_LexFStar.ml │ │ │ │ ├── FStar_Parser_Parse.ml │ │ │ │ ├── FStar_Parser_ParseIt.ml │ │ │ │ ├── FStar_Parser_ToDocument.ml │ │ │ │ ├── FStar_Parser_Utf8.ml │ │ │ │ ├── FStar_Parser_Util.ml │ │ │ │ ├── FStar_Pervasives.ml │ │ │ │ ├── FStar_Pervasives_Native.ml │ │ │ │ ├── FStar_Pprint.ml │ │ │ │ ├── FStar_Sedlexing.ml │ │ │ │ ├── FStar_String.ml │ │ │ │ ├── FStar_VConfig.ml │ │ │ │ ├── README │ │ │ │ ├── dune │ │ │ │ ├── prims.ml │ │ │ │ └── z.ml │ │ │ ├── fstar_ast.ml │ │ │ ├── fstar_backend.ml │ │ │ └── fstar_backend.mli │ │ ├── lean/ │ │ │ ├── dune │ │ │ └── lean_backend.ml │ │ └── proverif/ │ │ ├── dune │ │ ├── proverif_backend.ml │ │ └── proverif_backend.mli │ ├── bin/ │ │ ├── dune │ │ ├── dune-js │ │ ├── js_driver.ml │ │ ├── js_stubs/ │ │ │ ├── mutex.js │ │ │ ├── stdint.js │ │ │ └── unix.js │ │ ├── lib.ml │ │ ├── lib.mli │ │ └── native_driver.ml │ ├── default.nix │ ├── doc/ │ │ ├── dune │ │ └── index.mld │ ├── dune-project │ ├── hax-engine.opam │ ├── hax-engine.opam.template │ ├── lib/ │ │ ├── analyses/ │ │ │ ├── function_dependency.ml │ │ │ └── mutable_variables.ml │ │ ├── analyses.ml │ │ ├── ast.ml │ │ ├── ast_builder.ml │ │ ├── ast_destruct.ml │ │ ├── ast_utils.ml │ │ ├── attr_payloads.ml │ │ ├── backend.ml │ │ ├── concrete_ident/ │ │ │ ├── concrete_ident.ml │ │ │ ├── concrete_ident.mli │ │ │ ├── concrete_ident_render_sig.ml │ │ │ ├── concrete_ident_types.ml │ │ │ ├── concrete_ident_view.ml │ │ │ ├── concrete_ident_view.mli │ │ │ ├── concrete_ident_view_types.ml │ │ │ ├── explicit_def_id.ml │ │ │ ├── explicit_def_id.mli │ │ │ ├── impl_infos.ml │ │ │ └── thir_simple_types.ml │ │ ├── dependencies.ml │ │ ├── dependencies.mli │ │ ├── deprecated_generic_printer/ │ │ │ ├── deprecated_generic_printer.ml │ │ │ ├── deprecated_generic_printer.mli │ │ │ └── deprecated_generic_printer_base.ml │ │ ├── diagnostics.ml │ │ ├── dune │ │ ├── export_ast.ml │ │ ├── feature_gate.ml │ │ ├── features.ml │ │ ├── generic_printer/ │ │ │ ├── generic_printer.ml │ │ │ ├── generic_printer_template.generate.js │ │ │ └── generic_printer_template.ml │ │ ├── hax_io.ml │ │ ├── import_ast.ml │ │ ├── import_thir.ml │ │ ├── import_thir.mli │ │ ├── local_ident.ml │ │ ├── local_ident.mli │ │ ├── phase_utils.ml │ │ ├── phases/ │ │ │ ├── phase_and_mut_defsite.ml │ │ │ ├── phase_and_mut_defsite.mli │ │ │ ├── phase_bundle_cycles.ml │ │ │ ├── phase_bundle_cycles.mli │ │ │ ├── phase_cf_into_monads.ml │ │ │ ├── phase_cf_into_monads.mli │ │ │ ├── phase_direct_and_mut.ml │ │ │ ├── phase_direct_and_mut.mli │ │ │ ├── phase_drop_blocks.ml │ │ │ ├── phase_drop_blocks.mli │ │ │ ├── phase_drop_match_guards.ml │ │ │ ├── phase_drop_match_guards.mli │ │ │ ├── phase_drop_references.ml │ │ │ ├── phase_drop_references.mli │ │ │ ├── phase_drop_return_break_continue.ml │ │ │ ├── phase_drop_return_break_continue.mli │ │ │ ├── phase_drop_sized_trait.ml │ │ │ ├── phase_drop_sized_trait.mli │ │ │ ├── phase_explicit_conversions.ml │ │ │ ├── phase_explicit_conversions.mli │ │ │ ├── phase_functionalize_loops.ml │ │ │ ├── phase_functionalize_loops.mli │ │ │ ├── phase_hoist_disjunctive_patterns.ml │ │ │ ├── phase_hoist_disjunctive_patterns.mli │ │ │ ├── phase_local_mutation.ml │ │ │ ├── phase_local_mutation.mli │ │ │ ├── phase_newtype_as_refinement.ml │ │ │ ├── phase_newtype_as_refinement.mli │ │ │ ├── phase_reconstruct_asserts.ml │ │ │ ├── phase_reconstruct_asserts.mli │ │ │ ├── phase_reconstruct_for_index_loops.ml │ │ │ ├── phase_reconstruct_for_index_loops.mli │ │ │ ├── phase_reconstruct_for_loops.ml │ │ │ ├── phase_reconstruct_for_loops.mli │ │ │ ├── phase_reconstruct_question_marks.ml │ │ │ ├── phase_reconstruct_question_marks.mli │ │ │ ├── phase_reconstruct_while_loops.ml │ │ │ ├── phase_reconstruct_while_loops.mli │ │ │ ├── phase_reject.ml │ │ │ ├── phase_reject_impl_type_method.ml │ │ │ ├── phase_reject_impl_type_method.mli │ │ │ ├── phase_reorder_fields.ml │ │ │ ├── phase_reorder_fields.mli │ │ │ ├── phase_rewrite_control_flow.ml │ │ │ ├── phase_rewrite_control_flow.mli │ │ │ ├── phase_rewrite_local_self.ml │ │ │ ├── phase_rewrite_local_self.mli │ │ │ ├── phase_simplify_hoisting.ml │ │ │ ├── phase_simplify_hoisting.mli │ │ │ ├── phase_simplify_match_return.ml │ │ │ ├── phase_simplify_match_return.mli │ │ │ ├── phase_simplify_question_marks.ml │ │ │ ├── phase_simplify_question_marks.mli │ │ │ ├── phase_sort_items.ml │ │ │ ├── phase_sort_items.mli │ │ │ ├── phase_sort_items_namespace_wise.ml │ │ │ ├── phase_sort_items_namespace_wise.mli │ │ │ ├── phase_specialize.ml │ │ │ ├── phase_specialize.mli │ │ │ ├── phase_traits_specs.ml │ │ │ ├── phase_traits_specs.mli │ │ │ ├── phase_transform_hax_lib_inline.ml │ │ │ ├── phase_transform_hax_lib_inline.mli │ │ │ ├── phase_trivialize_assign_lhs.ml │ │ │ └── phase_trivialize_assign_lhs.mli │ │ ├── phases.ml │ │ ├── prelude.ml │ │ ├── print_rust.ml │ │ ├── print_rust.mli │ │ ├── profiling.ml │ │ ├── rust_engine_types.ml │ │ ├── side_effect_utils.ml │ │ ├── span.ml │ │ ├── span.mli │ │ ├── subtype.ml │ │ ├── untyped_phases/ │ │ │ ├── gen.js │ │ │ └── untyped_phases.ml │ │ └── utils.ml │ ├── names/ │ │ ├── Cargo.toml │ │ ├── README.md │ │ ├── extract/ │ │ │ ├── Cargo.toml │ │ │ ├── build.rs │ │ │ └── src/ │ │ │ └── main.rs │ │ └── src/ │ │ ├── crypto_abstractions.rs │ │ └── lib.rs │ └── utils/ │ ├── generate_from_ast/ │ │ ├── README.md │ │ ├── codegen_ast_builder.ml │ │ ├── codegen_ast_destruct.ml │ │ ├── codegen_printer.ml │ │ ├── codegen_visitor.ml │ │ ├── dune │ │ ├── errors.ml │ │ ├── generate_from_ast.ml │ │ ├── primitive_types.ml │ │ ├── types.ml │ │ ├── utils.ml │ │ └── visitors.ml │ ├── hacspeclib-macro-parser/ │ │ ├── dune │ │ └── hacspeclib_macro_parser.ml │ ├── ocaml_of_json_schema/ │ │ └── ocaml_of_json_schema.js │ ├── ppx_functor_application/ │ │ ├── README.md │ │ ├── dune │ │ └── ppx_functor_application.ml │ ├── ppx_generate_features/ │ │ ├── README.md │ │ ├── dune │ │ └── ppx_generate_features.ml │ ├── ppx_inline/ │ │ ├── README.md │ │ ├── dune │ │ └── ppx_inline.ml │ ├── ppx_phases_index/ │ │ ├── README.md │ │ ├── dune │ │ └── ppx_phases_index.ml │ ├── sourcemaps/ │ │ ├── base64.ml │ │ ├── dune │ │ ├── location.ml │ │ ├── mappings/ │ │ │ ├── dual.ml │ │ │ ├── instruction.ml │ │ │ ├── mappings.ml │ │ │ ├── mappings.mli │ │ │ ├── spanned.ml │ │ │ └── types.ml │ │ ├── prelude.ml │ │ ├── source_maps.ml │ │ ├── source_maps.mli │ │ └── vql.ml │ └── universe-hash.sh ├── examples/ │ ├── .envrc │ ├── .gitignore │ ├── Cargo.toml │ ├── Makefile │ ├── README.md │ ├── barrett/ │ │ ├── Cargo.toml │ │ ├── Makefile │ │ ├── proofs/ │ │ │ ├── fstar/ │ │ │ │ └── extraction/ │ │ │ │ └── Makefile │ │ │ ├── lean/ │ │ │ │ ├── lakefile.toml │ │ │ │ └── lean-toolchain │ │ │ └── rust/ │ │ │ └── extraction/ │ │ │ └── Cargo.toml │ │ └── src/ │ │ └── lib.rs │ ├── chacha20/ │ │ ├── Cargo.toml │ │ ├── Makefile │ │ ├── proofs/ │ │ │ ├── coq/ │ │ │ │ └── extraction/ │ │ │ │ ├── Chacha20.Hacspec_helper.v │ │ │ │ └── Chacha20.v │ │ │ └── fstar/ │ │ │ └── extraction/ │ │ │ ├── Chacha20.Hacspec_helper.fst │ │ │ ├── Chacha20.fst │ │ │ └── Makefile │ │ ├── src/ │ │ │ ├── hacspec_helper.rs │ │ │ └── lib.rs │ │ └── tests/ │ │ └── kat.rs │ ├── commonArgs.nix │ ├── coq-example/ │ │ ├── Cargo.toml │ │ ├── README.md │ │ ├── proofs/ │ │ │ └── coq/ │ │ │ └── extraction/ │ │ │ ├── Coq_example.v │ │ │ ├── Coq_example_Dummy_core_lib.v │ │ │ ├── Coq_proofs.v │ │ │ ├── Makefile │ │ │ └── dummy_core_lib.v │ │ └── src/ │ │ ├── dummy_core_lib.rs │ │ └── lib.rs │ ├── coverage/ │ │ ├── Cargo.toml │ │ ├── default.nix │ │ └── src/ │ │ ├── lib.rs │ │ ├── test_arrays.rs │ │ ├── test_closures.rs │ │ ├── test_enum.rs │ │ ├── test_functions.rs │ │ ├── test_instance.rs │ │ ├── test_primitives.rs │ │ ├── test_sequence.rs │ │ ├── test_struct.rs │ │ └── test_trait.rs │ ├── default.nix │ ├── hax.fst.config.json │ ├── kyber_compress/ │ │ ├── Cargo.toml │ │ ├── Makefile │ │ ├── proofs/ │ │ │ └── fstar/ │ │ │ └── extraction/ │ │ │ └── Makefile │ │ └── src/ │ │ └── lib.rs │ ├── lean_adc/ │ │ ├── Cargo.toml │ │ ├── Makefile │ │ ├── proofs/ │ │ │ └── lean/ │ │ │ ├── lake-manifest.json │ │ │ ├── lakefile.toml │ │ │ └── lean-toolchain │ │ └── src/ │ │ └── lib.rs │ ├── lean_barrett/ │ │ ├── Cargo.toml │ │ ├── Makefile │ │ ├── proofs/ │ │ │ └── lean/ │ │ │ ├── lake-manifest.json │ │ │ ├── lakefile.toml │ │ │ └── lean-toolchain │ │ └── src/ │ │ └── lib.rs │ ├── lean_chacha20/ │ │ ├── Cargo.toml │ │ ├── Makefile │ │ ├── proofs/ │ │ │ └── lean/ │ │ │ ├── lake-manifest.json │ │ │ ├── lakefile.toml │ │ │ └── lean-toolchain │ │ └── src/ │ │ ├── hacspec_helper.rs │ │ └── lib.rs │ ├── lean_tutorial/ │ │ ├── Cargo.toml │ │ ├── Makefile │ │ ├── proofs/ │ │ │ └── lean/ │ │ │ ├── lake-manifest.json │ │ │ ├── lakefile.toml │ │ │ └── lean-toolchain │ │ └── src/ │ │ └── lib.rs │ ├── limited-order-book/ │ │ ├── Cargo.toml │ │ ├── Makefile │ │ ├── README.md │ │ ├── lob_backend.did │ │ ├── proofs/ │ │ │ ├── coq/ │ │ │ │ └── extraction/ │ │ │ │ └── Lob_backend.v │ │ │ └── fstar/ │ │ │ └── extraction/ │ │ │ ├── Lob_backend.fst │ │ │ └── Makefile │ │ └── src/ │ │ ├── canister.rs │ │ └── lib.rs │ ├── proverif-psk/ │ │ ├── Cargo.toml │ │ ├── Makefile │ │ ├── Readme.md │ │ ├── proofs/ │ │ │ └── proverif/ │ │ │ └── analysis.pv │ │ ├── psk.pv │ │ ├── pv_div_by_zero_fix.diff │ │ └── src/ │ │ └── lib.rs │ └── sha256/ │ ├── .gitignore │ ├── Cargo.toml │ ├── Makefile │ ├── proofs/ │ │ ├── coq/ │ │ │ └── extraction/ │ │ │ └── Sha256.v │ │ └── fstar/ │ │ └── extraction/ │ │ ├── Makefile │ │ └── Sha256.fst │ ├── src/ │ │ └── sha256.rs │ └── tests/ │ └── test_sha256.rs ├── flake.nix ├── frontend/ │ └── exporter/ │ ├── Cargo.toml │ ├── README.md │ ├── adt-into/ │ │ ├── .gitignore │ │ ├── Cargo.toml │ │ ├── README.md │ │ ├── src/ │ │ │ └── lib.rs │ │ └── tests/ │ │ └── lib.rs │ ├── default.nix │ ├── options/ │ │ ├── Cargo.toml │ │ └── src/ │ │ └── lib.rs │ └── src/ │ ├── body.rs │ ├── comments.rs │ ├── constant_utils/ │ │ └── uneval.rs │ ├── constant_utils.rs │ ├── deterministic_hash.rs │ ├── id_table.rs │ ├── index_vec.rs │ ├── lib.rs │ ├── prelude.rs │ ├── rustc_utils.rs │ ├── sinto.rs │ ├── state.rs │ ├── traits/ │ │ ├── resolution.rs │ │ └── utils.rs │ ├── traits.rs │ ├── types/ │ │ ├── attributes.rs │ │ ├── def_id.rs │ │ ├── hir.rs │ │ ├── mir.rs │ │ ├── mod.rs │ │ ├── new/ │ │ │ ├── full_def.rs │ │ │ ├── impl_infos.rs │ │ │ ├── item_attributes.rs │ │ │ ├── mod.rs │ │ │ ├── predicate_id.rs │ │ │ ├── synthetic_items.rs │ │ │ └── variant_infos.rs │ │ ├── serialize_int.rs │ │ ├── span.rs │ │ ├── thir.rs │ │ └── ty.rs │ └── utils/ │ ├── error_macros.rs │ ├── mod.rs │ └── type_map.rs ├── hax-bounded-integers/ │ ├── Cargo.toml │ ├── proofs/ │ │ └── fstar/ │ │ └── extraction/ │ │ ├── Hax_bounded_integers.Num_traits.fst │ │ └── Hax_bounded_integers.fst │ └── src/ │ ├── lib.rs │ └── num_traits.rs ├── hax-lib/ │ ├── Cargo.toml │ ├── README.md │ ├── build.rs │ ├── core-models/ │ │ ├── .gitignore │ │ ├── Cargo.toml │ │ ├── README.md │ │ ├── alloc/ │ │ │ ├── Cargo.toml │ │ │ └── src/ │ │ │ └── lib.rs │ │ ├── hax.sh │ │ ├── rand_core/ │ │ │ ├── Cargo.toml │ │ │ └── src/ │ │ │ └── lib.rs │ │ ├── rust_primitives/ │ │ │ ├── Cargo.toml │ │ │ └── src/ │ │ │ └── lib.rs │ │ ├── src/ │ │ │ ├── core/ │ │ │ │ ├── array.rs │ │ │ │ ├── borrow.rs │ │ │ │ ├── clone.rs │ │ │ │ ├── cmp.rs │ │ │ │ ├── convert.rs │ │ │ │ ├── default.rs │ │ │ │ ├── error.rs │ │ │ │ ├── f32.rs │ │ │ │ ├── fmt.rs │ │ │ │ ├── hash.rs │ │ │ │ ├── hint.rs │ │ │ │ ├── iter.rs │ │ │ │ ├── marker.rs │ │ │ │ ├── mem.rs │ │ │ │ ├── num/ │ │ │ │ │ ├── error.rs │ │ │ │ │ └── mod.rs │ │ │ │ ├── ops.rs │ │ │ │ ├── option.rs │ │ │ │ ├── panicking.rs │ │ │ │ ├── result.rs │ │ │ │ ├── slice.rs │ │ │ │ └── str.rs │ │ │ └── lib.rs │ │ └── std/ │ │ ├── Cargo.toml │ │ └── src/ │ │ └── lib.rs │ ├── macros/ │ │ ├── Cargo.toml │ │ ├── README.md │ │ ├── src/ │ │ │ ├── dummy.rs │ │ │ ├── hax_paths.rs │ │ │ ├── impl_fn_decoration.rs │ │ │ ├── implementation.rs │ │ │ ├── lib.rs │ │ │ ├── quote.rs │ │ │ ├── rewrite_self.rs │ │ │ ├── syn_ext.rs │ │ │ └── utils.rs │ │ └── types/ │ │ ├── Cargo.toml │ │ ├── README.md │ │ └── src/ │ │ └── lib.rs │ ├── proof-libs/ │ │ ├── coq/ │ │ │ ├── coq/ │ │ │ │ ├── .gitignore │ │ │ │ ├── default.nix │ │ │ │ └── generated-core/ │ │ │ │ ├── _CoqProject │ │ │ │ ├── phase_library/ │ │ │ │ │ ├── ControlFlow.v │ │ │ │ │ ├── NumberNotation.v │ │ │ │ │ └── TODO.v │ │ │ │ ├── spec/ │ │ │ │ │ ├── Core_Base_Spec.v │ │ │ │ │ ├── Core_Base_Spec_Binary.v │ │ │ │ │ ├── Core_Base_Spec_Binary_Pos.v │ │ │ │ │ ├── Core_Base_Spec_Binary_Positive.v │ │ │ │ │ ├── Core_Base_Spec_Constants.v │ │ │ │ │ ├── Core_Base_Spec_Haxint.v │ │ │ │ │ ├── Core_Base_Spec_Seq.v │ │ │ │ │ ├── Core_Base_Spec_Unary.v │ │ │ │ │ └── Core_Base_Spec_Z.v │ │ │ │ └── src/ │ │ │ │ ├── Core.v │ │ │ │ ├── Core_Array.v │ │ │ │ ├── Core_Array_Iter.v │ │ │ │ ├── Core_Array_Rec_bundle_579704328.v │ │ │ │ ├── Core_Base.v │ │ │ │ ├── Core_Base_Binary.v │ │ │ │ ├── Core_Base_Number_conversion.v │ │ │ │ ├── Core_Base_Pos.v │ │ │ │ ├── Core_Base_Seq.v │ │ │ │ ├── Core_Base_Z.v │ │ │ │ ├── Core_Base_interface.v │ │ │ │ ├── Core_Base_interface_Coerce.v │ │ │ │ ├── Core_Base_interface_Int.v │ │ │ │ ├── Core_Base_interface_Int_I128_proofs.v │ │ │ │ ├── Core_Base_interface_Int_I16_proofs.v │ │ │ │ ├── Core_Base_interface_Int_I32_proofs.v │ │ │ │ ├── Core_Base_interface_Int_I64_proofs.v │ │ │ │ ├── Core_Base_interface_Int_I8_proofs.v │ │ │ │ ├── Core_Base_interface_Int_U128_proofs.v │ │ │ │ ├── Core_Base_interface_Int_U16_proofs.v │ │ │ │ ├── Core_Base_interface_Int_U32_proofs.v │ │ │ │ ├── Core_Base_interface_Int_U64_proofs.v │ │ │ │ ├── Core_Base_interface_Int_U8_proofs.v │ │ │ │ ├── Core_Clone.v │ │ │ │ ├── Core_Cmp.v │ │ │ │ ├── Core_Convert.v │ │ │ │ ├── Core_Fmt.v │ │ │ │ ├── Core_Intrinsics.v │ │ │ │ ├── Core_Iter.v │ │ │ │ ├── Core_Iter_Range.v │ │ │ │ ├── Core_Iter_Traits.v │ │ │ │ ├── Core_Iter_Traits_Collect.v │ │ │ │ ├── Core_Iter_Traits_Exact_size.v │ │ │ │ ├── Core_Iter_Traits_Iterator.v │ │ │ │ ├── Core_Iter_Traits_Marker.v │ │ │ │ ├── Core_Marker.v │ │ │ │ ├── Core_Num.v │ │ │ │ ├── Core_Num_Int_macros.v │ │ │ │ ├── Core_Num_Uint_macros.v │ │ │ │ ├── Core_Ops.v │ │ │ │ ├── Core_Ops_Arith.v │ │ │ │ ├── Core_Ops_Arith_Impls_for_prims.v │ │ │ │ ├── Core_Ops_Bit.v │ │ │ │ ├── Core_Ops_Bit_Impls_for_prims.v │ │ │ │ ├── Core_Ops_Function.v │ │ │ │ ├── Core_Ops_Index.v │ │ │ │ ├── Core_Ops_Index_range.v │ │ │ │ ├── Core_Ops_Range.v │ │ │ │ ├── Core_Option.v │ │ │ │ ├── Core_Panicking.v │ │ │ │ ├── Core_Primitive.v │ │ │ │ ├── Core_Primitive_Number_conversion.v │ │ │ │ ├── Core_Primitive_Number_conversion_i.v │ │ │ │ ├── Core_Result.v │ │ │ │ ├── Core_Slice.v │ │ │ │ ├── Core_Slice_Index.v │ │ │ │ ├── Core_Slice_Index_Private_slice_index.v │ │ │ │ ├── Core_Slice_Iter.v │ │ │ │ ├── Core_Slice_Iter_Macros.v │ │ │ │ └── _CoqProject │ │ │ └── ssprove/ │ │ │ ├── .gitignore │ │ │ ├── README.md │ │ │ ├── _CoqProject │ │ │ ├── coq-hacspec-ssprove.opam.template │ │ │ ├── docker_build/ │ │ │ │ └── Dockerfile │ │ │ └── src/ │ │ │ ├── ChoiceEquality.v │ │ │ ├── ConCertLib.v │ │ │ ├── Hacspec_Lib.v │ │ │ ├── Hacspec_Lib_Coercions.v │ │ │ ├── Hacspec_Lib_Comparable.v │ │ │ ├── Hacspec_Lib_Controlflow.v │ │ │ ├── Hacspec_Lib_Eq.v │ │ │ ├── Hacspec_Lib_Integers.v │ │ │ ├── Hacspec_Lib_Loops.v │ │ │ ├── Hacspec_Lib_Ltac.v │ │ │ ├── Hacspec_Lib_Monad.v │ │ │ ├── Hacspec_Lib_Natmod.v │ │ │ ├── Hacspec_Lib_Notation.v │ │ │ ├── Hacspec_Lib_Pre.v │ │ │ ├── Hacspec_Lib_Seq.v │ │ │ ├── Hacspec_Lib_TODO.v │ │ │ ├── LocationUtility.v │ │ │ └── dune │ │ ├── fstar/ │ │ │ ├── .envrc │ │ │ ├── Makefile.copy │ │ │ ├── README.md │ │ │ ├── core/ │ │ │ │ ├── Alloc.Alloc.fst │ │ │ │ ├── Alloc.Borrow.fst │ │ │ │ ├── Alloc.Boxed.fst │ │ │ │ ├── Alloc.Collections.Binary_heap.fst │ │ │ │ ├── Alloc.Collections.Btree.Set.fsti │ │ │ │ ├── Alloc.Collections.Vec_deque.fsti │ │ │ │ ├── Alloc.Fmt.fst │ │ │ │ ├── Alloc.Slice.fst │ │ │ │ ├── Alloc.String.fst │ │ │ │ ├── Alloc.Vec.Drain.fst │ │ │ │ ├── Alloc.Vec.Into_iter.fsti │ │ │ │ ├── Alloc.Vec.fst │ │ │ │ ├── Core_models.Array.Iter.fst │ │ │ │ ├── Core_models.Array.fst │ │ │ │ ├── Core_models.Borrow.fsti │ │ │ │ ├── Core_models.Bundle.fst │ │ │ │ ├── Core_models.Clone.fst │ │ │ │ ├── Core_models.Cmp.fst │ │ │ │ ├── Core_models.Convert.fst │ │ │ │ ├── Core_models.Core_arch.Arm_shared.Neon.fsti │ │ │ │ ├── Core_models.Core_arch.X86.Pclmulqdq.fsti │ │ │ │ ├── Core_models.Core_arch.X86.Sse2.fsti │ │ │ │ ├── Core_models.Core_arch.X86.fsti │ │ │ │ ├── Core_models.Core_arch.X86_64_.Sse2.fsti │ │ │ │ ├── Core_models.Core_arch.fsti │ │ │ │ ├── Core_models.Default.fsti │ │ │ │ ├── Core_models.Error.fsti │ │ │ │ ├── Core_models.F32.fst │ │ │ │ ├── Core_models.Fmt.Rt.fsti │ │ │ │ ├── Core_models.Fmt.fsti │ │ │ │ ├── Core_models.Hash.fsti │ │ │ │ ├── Core_models.Hint.fsti │ │ │ │ ├── Core_models.Iter.Adapters.Enumerate.fst │ │ │ │ ├── Core_models.Iter.Adapters.Flat_map.fst │ │ │ │ ├── Core_models.Iter.Adapters.Flatten.fst │ │ │ │ ├── Core_models.Iter.Adapters.Map.fst │ │ │ │ ├── Core_models.Iter.Adapters.Rev.fsti │ │ │ │ ├── Core_models.Iter.Adapters.Step_by.fst │ │ │ │ ├── Core_models.Iter.Adapters.Take.fst │ │ │ │ ├── Core_models.Iter.Adapters.Zip.fst │ │ │ │ ├── Core_models.Iter.Bundle.fst │ │ │ │ ├── Core_models.Iter.Sources.Repeat_with.fsti │ │ │ │ ├── Core_models.Iter.Traits.Collect.fst │ │ │ │ ├── Core_models.Iter.Traits.Iterator.fst │ │ │ │ ├── Core_models.Iter.Traits.fst │ │ │ │ ├── Core_models.Marker.fst │ │ │ │ ├── Core_models.Mem.Manually_drop.fsti │ │ │ │ ├── Core_models.Mem.Maybe_uninit.fsti │ │ │ │ ├── Core_models.Mem.Transmutability.fsti │ │ │ │ ├── Core_models.Mem.fsti │ │ │ │ ├── Core_models.Num.Error.fsti │ │ │ │ ├── Core_models.Num.Niche_types.fsti │ │ │ │ ├── Core_models.Num.fst │ │ │ │ ├── Core_models.Ops.Arith.fsti │ │ │ │ ├── Core_models.Ops.Bit.fsti │ │ │ │ ├── Core_models.Ops.Control_flow.fst │ │ │ │ ├── Core_models.Ops.Deref.fst │ │ │ │ ├── Core_models.Ops.Drop.fst │ │ │ │ ├── Core_models.Ops.Function.fst │ │ │ │ ├── Core_models.Ops.Index.IndexMut.fst │ │ │ │ ├── Core_models.Ops.Index.Index_mut.fst │ │ │ │ ├── Core_models.Ops.Index.fst │ │ │ │ ├── Core_models.Ops.Range.fst │ │ │ │ ├── Core_models.Ops.Try_trait.fst │ │ │ │ ├── Core_models.Option.fst │ │ │ │ ├── Core_models.Panicking.Internal.fsti │ │ │ │ ├── Core_models.Panicking.fst │ │ │ │ ├── Core_models.Result.fst │ │ │ │ ├── Core_models.Slice.Iter.fst │ │ │ │ ├── Core_models.Slice.fst │ │ │ │ ├── Core_models.Str.Converts.fsti │ │ │ │ ├── Core_models.Str.Error.fsti │ │ │ │ ├── Core_models.Str.Iter.fsti │ │ │ │ ├── Core_models.Str.Traits.fsti │ │ │ │ ├── Core_models.Str.fsti │ │ │ │ ├── Core_models.Time.fsti │ │ │ │ ├── Core_models.TypeClassPlaceHolder.fst │ │ │ │ ├── Core_models.fst │ │ │ │ ├── Makefile │ │ │ │ ├── README.md │ │ │ │ ├── Rand.Distr.Distribution.fsti │ │ │ │ ├── Rand.Distr.Integer.fsti │ │ │ │ ├── Rand.Distributions.Distribution.fsti │ │ │ │ ├── Rand.Distributions.Integer.fsti │ │ │ │ ├── Rand.Rng.fsti │ │ │ │ ├── Rand_core.Os.fsti │ │ │ │ ├── Rand_core.fsti │ │ │ │ ├── Std.Collections.Hash.Map.fsti │ │ │ │ ├── Std.F64.fsti │ │ │ │ ├── Std.Hash.Random.fsti │ │ │ │ ├── Std.Io.Error.fsti │ │ │ │ ├── Std.Io.Impls.fsti │ │ │ │ ├── Std.Io.Stdio.fsti │ │ │ │ └── Std.Io.fsti │ │ │ ├── hax_lib/ │ │ │ │ └── Makefile │ │ │ └── rust_primitives/ │ │ │ ├── Makefile │ │ │ ├── Rust_primitives.Arithmetic.fsti │ │ │ ├── Rust_primitives.Arrays.fsti │ │ │ ├── Rust_primitives.BitVectors.fsti │ │ │ ├── Rust_primitives.Char.fsti │ │ │ ├── Rust_primitives.Float.fsti │ │ │ ├── Rust_primitives.Hax.Control_flow_monad.Mexception.fst │ │ │ ├── Rust_primitives.Hax.Control_flow_monad.Moption.fst │ │ │ ├── Rust_primitives.Hax.Control_flow_monad.Mresult.fst │ │ │ ├── Rust_primitives.Hax.Folds.fsti │ │ │ ├── Rust_primitives.Hax.Int.fst │ │ │ ├── Rust_primitives.Hax.Monomorphized_update_at.fsti │ │ │ ├── Rust_primitives.Hax.fst │ │ │ ├── Rust_primitives.Integers.fsti │ │ │ ├── Rust_primitives.Iterators.fsti │ │ │ ├── Rust_primitives.Mem.fsti │ │ │ ├── Rust_primitives.Notations.fsti │ │ │ ├── Rust_primitives.Sequence.fst │ │ │ ├── Rust_primitives.Slice.fsti │ │ │ ├── Rust_primitives.String.fsti │ │ │ └── Rust_primitives.fst │ │ ├── fstar-secret-integers/ │ │ │ ├── .envrc │ │ │ ├── Makefile.copy │ │ │ ├── README.md │ │ │ ├── core/ │ │ │ │ ├── Alloc.Alloc.fst │ │ │ │ ├── Alloc.Collections.Binary_heap.fsti │ │ │ │ ├── Alloc.Slice.fst │ │ │ │ ├── Alloc.Vec.fst │ │ │ │ ├── Core.Array.Iter.fsti │ │ │ │ ├── Core.Array.fst │ │ │ │ ├── Core.Clone.fst │ │ │ │ ├── Core.Cmp.fsti │ │ │ │ ├── Core.Convert.fst │ │ │ │ ├── Core.Iter.Adapters.Enumerate.fst │ │ │ │ ├── Core.Iter.Adapters.Step_by.fst │ │ │ │ ├── Core.Iter.Traits.Collect.fst │ │ │ │ ├── Core.Iter.Traits.Iterator.fst │ │ │ │ ├── Core.Iter.fsti │ │ │ │ ├── Core.Marker.fst │ │ │ │ ├── Core.Num.Error.fsti │ │ │ │ ├── Core.Num.fsti │ │ │ │ ├── Core.Ops.Arith.Neg.fsti │ │ │ │ ├── Core.Ops.Arith.fsti │ │ │ │ ├── Core.Ops.Control_flow.fst │ │ │ │ ├── Core.Ops.Deref.fst │ │ │ │ ├── Core.Ops.Index.IndexMut.fst │ │ │ │ ├── Core.Ops.Index.fst │ │ │ │ ├── Core.Ops.Range.fsti │ │ │ │ ├── Core.Ops.Try_trait.fst │ │ │ │ ├── Core.Ops.fst │ │ │ │ ├── Core.Option.fst │ │ │ │ ├── Core.Panicking.fst │ │ │ │ ├── Core.Result.fst │ │ │ │ ├── Core.Slice.Iter.fst │ │ │ │ ├── Core.Slice.fsti │ │ │ │ ├── Core.Str.Converts.fsti │ │ │ │ ├── Core.Str.Error.fsti │ │ │ │ ├── Core.Str.fsti │ │ │ │ ├── Core.fst │ │ │ │ ├── Makefile │ │ │ │ └── README.md │ │ │ ├── hax_lib/ │ │ │ │ └── Makefile │ │ │ └── rust_primitives/ │ │ │ ├── Makefile │ │ │ ├── Rust_primitives.Arrays.fst │ │ │ ├── Rust_primitives.Arrays.fsti │ │ │ ├── Rust_primitives.BitVectors.fst │ │ │ ├── Rust_primitives.BitVectors.fsti │ │ │ ├── Rust_primitives.Hax.Monomorphized_update_at.fst │ │ │ ├── Rust_primitives.Hax.Monomorphized_update_at.fsti │ │ │ ├── Rust_primitives.Hax.fst │ │ │ ├── Rust_primitives.Integers.fst │ │ │ ├── Rust_primitives.Integers.fsti │ │ │ ├── Rust_primitives.Iterators.fsti │ │ │ └── Rust_primitives.fst │ │ └── lean/ │ │ ├── Hax/ │ │ │ ├── MissingLean/ │ │ │ │ ├── Init/ │ │ │ │ │ ├── Data/ │ │ │ │ │ │ ├── Array/ │ │ │ │ │ │ │ └── Lemmas.lean │ │ │ │ │ │ ├── BitVec/ │ │ │ │ │ │ │ └── Basic.lean │ │ │ │ │ │ ├── Int/ │ │ │ │ │ │ │ └── DivMod/ │ │ │ │ │ │ │ └── Lemmas.lean │ │ │ │ │ │ ├── Nat/ │ │ │ │ │ │ │ ├── Div/ │ │ │ │ │ │ │ │ └── Basic.lean │ │ │ │ │ │ │ └── MinMax.lean │ │ │ │ │ │ ├── SInt/ │ │ │ │ │ │ │ ├── Basic.lean │ │ │ │ │ │ │ ├── Basic_Int128.lean │ │ │ │ │ │ │ ├── Lemmas.lean │ │ │ │ │ │ │ └── Lemmas_Int128.lean │ │ │ │ │ │ ├── UInt/ │ │ │ │ │ │ │ ├── Basic.lean │ │ │ │ │ │ │ ├── BasicAux.lean │ │ │ │ │ │ │ ├── Lemmas.lean │ │ │ │ │ │ │ └── Lemmas_UInt128.lean │ │ │ │ │ │ └── Vector/ │ │ │ │ │ │ └── Basic.lean │ │ │ │ │ ├── GrindInstances/ │ │ │ │ │ │ ├── Ring/ │ │ │ │ │ │ │ ├── SInt.lean │ │ │ │ │ │ │ └── UInt.lean │ │ │ │ │ │ └── ToInt.lean │ │ │ │ │ ├── Prelude.lean │ │ │ │ │ └── While.lean │ │ │ │ ├── Lean/ │ │ │ │ │ ├── Tactic/ │ │ │ │ │ │ └── Simp/ │ │ │ │ │ │ └── BuiltinSimpProcs/ │ │ │ │ │ │ ├── SInt.lean │ │ │ │ │ │ └── UInt.lean │ │ │ │ │ └── ToExpr.lean │ │ │ │ └── Std/ │ │ │ │ └── Do/ │ │ │ │ ├── PostCond.lean │ │ │ │ └── Triple/ │ │ │ │ ├── Basic.lean │ │ │ │ └── SpecLemmas.lean │ │ │ ├── MissingLean.lean │ │ │ ├── Tactic/ │ │ │ │ ├── HaxBVDecide.lean │ │ │ │ ├── HaxConstructPure.lean │ │ │ │ ├── HaxMvcgen.lean │ │ │ │ ├── HaxSpec.lean │ │ │ │ ├── HaxZify.lean │ │ │ │ ├── Init.lean │ │ │ │ └── SpecSet.lean │ │ │ ├── Tactic.lean │ │ │ ├── core_models/ │ │ │ │ ├── core_models.lean │ │ │ │ ├── epilogue/ │ │ │ │ │ ├── alloc.lean │ │ │ │ │ ├── convert.lean │ │ │ │ │ ├── float.lean │ │ │ │ │ ├── folds.lean │ │ │ │ │ ├── function.lean │ │ │ │ │ ├── num.lean │ │ │ │ │ ├── ops.lean │ │ │ │ │ ├── range.lean │ │ │ │ │ ├── result.lean │ │ │ │ │ ├── slice.lean │ │ │ │ │ └── string.lean │ │ │ │ ├── epilogue.lean │ │ │ │ ├── prologue/ │ │ │ │ │ ├── clone.lean │ │ │ │ │ ├── marker.lean │ │ │ │ │ └── num.lean │ │ │ │ └── prologue.lean │ │ │ ├── core_models.lean │ │ │ ├── rust_primitives/ │ │ │ │ ├── BVDecide.lean │ │ │ │ ├── Cast.lean │ │ │ │ ├── GetElemResult.lean │ │ │ │ ├── RustM.lean │ │ │ │ ├── Spec.lean │ │ │ │ ├── USize64.lean │ │ │ │ ├── arithmetic.lean │ │ │ │ ├── boxed.lean │ │ │ │ ├── hax/ │ │ │ │ │ ├── array.lean │ │ │ │ │ ├── int.lean │ │ │ │ │ ├── logical_op.lean │ │ │ │ │ ├── machine_int.lean │ │ │ │ │ ├── never.lean │ │ │ │ │ ├── tuple.lean │ │ │ │ │ └── while_loop.lean │ │ │ │ ├── hax.lean │ │ │ │ ├── hax_lib.lean │ │ │ │ ├── mem.lean │ │ │ │ ├── ops.lean │ │ │ │ ├── sequence.lean │ │ │ │ └── slice.lean │ │ │ └── rust_primitives.lean │ │ ├── Hax.lean │ │ ├── README.md │ │ ├── lake-manifest.json │ │ ├── lakefile.toml │ │ └── lean-toolchain │ ├── proofs/ │ │ └── fstar/ │ │ └── extraction/ │ │ ├── Hax_lib.Abstraction.fst │ │ ├── Hax_lib.Bundle.fst │ │ ├── Hax_lib.Int.Bigint.fst │ │ ├── Hax_lib.Int.fst │ │ ├── Hax_lib.Prop.Bundle.fst │ │ ├── Hax_lib.Prop.Constructors.fst │ │ ├── Hax_lib.Prop.fst │ │ ├── Hax_lib.fst │ │ └── Makefile │ └── src/ │ ├── abstraction.rs │ ├── dummy.rs │ ├── implementation.rs │ ├── int/ │ │ ├── bigint.rs │ │ └── mod.rs │ ├── lib.rs │ ├── proc_macros.rs │ └── prop.rs ├── hax-lib-protocol/ │ ├── Cargo.toml │ ├── README.md │ └── src/ │ ├── crypto.rs │ ├── lib.rs │ └── state_machine.rs ├── hax-lib-protocol-macros/ │ ├── Cargo.toml │ └── src/ │ └── lib.rs ├── hax-types/ │ ├── Cargo.toml │ ├── README.md │ ├── build.rs │ └── src/ │ ├── cli_options/ │ │ ├── extension.rs │ │ └── mod.rs │ ├── diagnostics/ │ │ ├── message.rs │ │ ├── mod.rs │ │ └── report.rs │ ├── driver_api.rs │ ├── engine_api.rs │ ├── lib.rs │ └── prelude.rs ├── justfile ├── mkdocs.yml ├── rust-engine/ │ ├── Cargo.toml │ ├── README.md │ ├── macros/ │ │ ├── Cargo.toml │ │ └── src/ │ │ ├── lib.rs │ │ ├── partial_application.rs │ │ ├── replace.rs │ │ └── struct_fields.rs │ └── src/ │ ├── ast/ │ │ ├── diagnostics.rs │ │ ├── fragment.rs │ │ ├── identifiers/ │ │ │ ├── global_id/ │ │ │ │ ├── compact_serialization.rs │ │ │ │ ├── generated.rs │ │ │ │ ├── generated_names.rs │ │ │ │ └── view.rs │ │ │ └── global_id.rs │ │ ├── identifiers.rs │ │ ├── literals.rs │ │ ├── resugared.rs │ │ ├── span.rs │ │ ├── utils.rs │ │ └── visitors.rs │ ├── ast.rs │ ├── attributes.rs │ ├── backends/ │ │ ├── fstar.rs │ │ ├── lean.rs │ │ ├── rust/ │ │ │ ├── renamings │ │ │ └── renamings.rs │ │ └── rust.rs │ ├── backends.rs │ ├── debugger.rs │ ├── hax_io.rs │ ├── import_thir.rs │ ├── interning.rs │ ├── lib.rs │ ├── main.rs │ ├── names.rs │ ├── ocaml_engine.rs │ ├── phase/ │ │ ├── explicit_monadic.rs │ │ ├── filter_unprintable_items.rs │ │ ├── legacy.rs │ │ └── reject_not_do_lean_dsl.rs │ ├── phase.rs │ ├── printer/ │ │ ├── pretty_ast/ │ │ │ ├── debug_json.rs │ │ │ └── to_document.rs │ │ ├── pretty_ast.rs │ │ └── render_view.rs │ ├── printer.rs │ ├── resugarings.rs │ └── symbol.rs ├── rust-toolchain.toml ├── rustc-coverage-tests/ │ ├── Cargo.toml │ ├── README.md │ ├── proofs/ │ │ ├── fstar/ │ │ │ └── extraction/ │ │ │ └── Makefile │ │ └── lean/ │ │ └── extraction/ │ │ └── lakefile.toml │ ├── requirements.txt │ ├── run-coverage-tests.py │ ├── snapshots/ │ │ └── fstar/ │ │ ├── Coverage.Abort.fst │ │ ├── Coverage.Assert.fst │ │ ├── Coverage.Assert_ne.fst │ │ ├── Coverage.Assert_not.fst │ │ ├── Coverage.Attr.Impl_.fst │ │ ├── Coverage.Attr.Module.Nested_a.Nested_b.fst │ │ ├── Coverage.Attr.Module.Off.fst │ │ ├── Coverage.Attr.Module.On.fst │ │ ├── Coverage.Attr.Module.fst │ │ ├── Coverage.Attr.Off_on_sandwich.fst │ │ ├── Coverage.Attr.Trait_impl_inherit.fst │ │ ├── Coverage.Auxiliary.Discard_all_helper.fst │ │ ├── Coverage.Auxiliary.Used_crate.fst │ │ ├── Coverage.Auxiliary.Used_inline_crate.fst │ │ ├── Coverage.Closure_macro.fst │ │ ├── Coverage.Closure_unit_return.fst │ │ ├── Coverage.Color.fst │ │ ├── Coverage.Condition.Conditions.fst │ │ ├── Coverage.Conditions.fst │ │ ├── Coverage.Continue_.fst │ │ ├── Coverage.Dead_code.fst │ │ ├── Coverage.Drop_trait.fst │ │ ├── Coverage.Fn_sig_into_try.fst │ │ ├── Coverage.Generics.fst │ │ ├── Coverage.If_.fst │ │ ├── Coverage.If_else.fst │ │ ├── Coverage.If_not.fst │ │ ├── Coverage.Ignore_map.fst │ │ ├── Coverage.Ignore_run.fst │ │ ├── Coverage.Inline.fst │ │ ├── Coverage.Inline_dead.fst │ │ ├── Coverage.Inner_items.fst │ │ ├── Coverage.Issue_83601_.fst │ │ ├── Coverage.Lazy_boolean.fst │ │ ├── Coverage.Let_else_loop.fst │ │ ├── Coverage.Long_and_wide.fst │ │ ├── Coverage.Loop_break.fst │ │ ├── Coverage.Loop_break_value.fst │ │ ├── Coverage.Loops_branches.fst │ │ ├── Coverage.Macro_in_closure.fst │ │ ├── Coverage.Match_or_pattern.fst │ │ ├── Coverage.Mcdc.Condition_limit.fst │ │ ├── Coverage.Mcdc.If_.fst │ │ ├── Coverage.Mcdc.Inlined_expressions.fst │ │ ├── Coverage.Mcdc.Nested_if.fst │ │ ├── Coverage.Mcdc.Non_control_flow.fst │ │ ├── Coverage.Nested_loops.fst │ │ ├── Coverage.No_cov_crate.Nested_fns.fst │ │ ├── Coverage.No_cov_crate.fst │ │ ├── Coverage.No_spans.fst │ │ ├── Coverage.No_spans_if_not.fst │ │ ├── Coverage.Overflow.fst │ │ ├── Coverage.Panic_unwind.fst │ │ ├── Coverage.Partial_eq.fst │ │ ├── Coverage.Simple_loop.fst │ │ ├── Coverage.Simple_match.fst │ │ ├── Coverage.Sort_groups.fst │ │ ├── Coverage.Test_harness.fst │ │ ├── Coverage.Tight_inf_loop.fst │ │ ├── Coverage.Trivial.fst │ │ ├── Coverage.Try_error_result.fst │ │ ├── Coverage.Unicode.fst │ │ ├── Coverage.Unused.fst │ │ ├── Coverage.Unused_mod.Unused_module.fst │ │ ├── Coverage.Unused_mod.fst │ │ ├── Coverage.While_.fst │ │ └── Coverage.While_early_ret.fst │ ├── src/ │ │ ├── abort.rs │ │ ├── assert-ne.rs │ │ ├── assert.rs │ │ ├── assert_not.rs │ │ ├── async.rs │ │ ├── async2.rs │ │ ├── async_block.rs │ │ ├── async_closure.rs │ │ ├── attr/ │ │ │ ├── impl.rs │ │ │ ├── mod.rs │ │ │ ├── module.rs │ │ │ ├── nested.rs │ │ │ ├── off-on-sandwich.rs │ │ │ └── trait-impl-inherit.rs │ │ ├── auxiliary/ │ │ │ ├── discard_all_helper.rs │ │ │ ├── executor.rs │ │ │ ├── inline_always_with_dead_code.rs │ │ │ ├── inline_mixed_helper.rs │ │ │ ├── macro_name_span_helper.rs │ │ │ ├── mod.rs │ │ │ ├── unused_mod_helper.rs │ │ │ ├── used_crate.rs │ │ │ └── used_inline_crate.rs │ │ ├── await_ready.rs │ │ ├── bad_counter_ids.rs │ │ ├── bench.rs │ │ ├── branch/ │ │ │ ├── generics.rs │ │ │ ├── guard.rs │ │ │ ├── if-let.rs │ │ │ ├── if.rs │ │ │ ├── lazy-boolean.rs │ │ │ ├── let-else.rs │ │ │ ├── match-arms.rs │ │ │ ├── match-trivial.rs │ │ │ ├── mod.rs │ │ │ ├── no-mir-spans.rs │ │ │ └── while.rs │ │ ├── closure.rs │ │ ├── closure_bug.rs │ │ ├── closure_macro.rs │ │ ├── closure_macro_async.rs │ │ ├── closure_unit_return.rs │ │ ├── color.rs │ │ ├── condition/ │ │ │ ├── conditions.rs │ │ │ └── mod.rs │ │ ├── conditions.rs │ │ ├── continue.rs │ │ ├── coroutine.rs │ │ ├── coverage_attr_closure.rs │ │ ├── dead_code.rs │ │ ├── discard-all-issue-133606.rs │ │ ├── drop_trait.rs │ │ ├── fn_sig_into_try.rs │ │ ├── generic-unused-impl.rs │ │ ├── generics.rs │ │ ├── holes.rs │ │ ├── if.rs │ │ ├── if_else.rs │ │ ├── if_not.rs │ │ ├── ignore_map.rs │ │ ├── ignore_run.rs │ │ ├── inline-dead.rs │ │ ├── inline.rs │ │ ├── inline_mixed.rs │ │ ├── inner_items.rs │ │ ├── issue-83601.rs │ │ ├── issue-84561.rs │ │ ├── issue-85461.rs │ │ ├── issue-93054.rs │ │ ├── lazy_boolean.rs │ │ ├── let_else_loop.rs │ │ ├── lib.rs │ │ ├── long_and_wide.rs │ │ ├── loop-break.rs │ │ ├── loop_break_value.rs │ │ ├── loops_branches.rs │ │ ├── macro_in_closure.rs │ │ ├── macro_name_span.rs │ │ ├── match_or_pattern.rs │ │ ├── mcdc/ │ │ │ ├── condition-limit.rs │ │ │ ├── if.rs │ │ │ ├── inlined_expressions.rs │ │ │ ├── mod.rs │ │ │ ├── nested_if.rs │ │ │ └── non_control_flow.rs │ │ ├── nested_loops.rs │ │ ├── no-core.rs │ │ ├── no_cov_crate.rs │ │ ├── no_spans.rs │ │ ├── no_spans_if_not.rs │ │ ├── overflow.rs │ │ ├── panic_unwind.rs │ │ ├── partial_eq.rs │ │ ├── simple_loop.rs │ │ ├── simple_match.rs │ │ ├── sort_groups.rs │ │ ├── test_harness.rs │ │ ├── tight_inf_loop.rs │ │ ├── trivial.rs │ │ ├── try_error_result.rs │ │ ├── unicode.rs │ │ ├── unreachable.rs │ │ ├── unused.rs │ │ ├── unused_mod.rs │ │ ├── uses_crate.rs │ │ ├── uses_inline_crate.rs │ │ ├── while.rs │ │ ├── while_early_ret.rs │ │ └── yield.rs │ ├── test_config.yaml │ └── update-test-sources.sh ├── rustfmt.toml ├── setup.sh ├── test-harness/ │ ├── .gitignore │ ├── Cargo.toml │ ├── README.md │ └── src/ │ ├── command_hax_ext.rs │ ├── harness.rs │ ├── lib.rs │ └── snapshots/ │ ├── toolchain__assert into-coq.snap │ ├── toolchain__assert into-fstar.snap │ ├── toolchain__assert into-ssprove.snap │ ├── toolchain__attribute-opaque into-fstar.snap │ ├── toolchain__attributes into-fstar.snap │ ├── toolchain__constructor-as-closure into-fstar.snap │ ├── toolchain__cyclic-modules into-fstar.snap │ ├── toolchain__cyclic-modules into-lean.snap │ ├── toolchain__dyn into-fstar.snap │ ├── toolchain__enum-repr into-coq.snap │ ├── toolchain__enum-repr into-fstar.snap │ ├── toolchain__enum-repr into-ssprove.snap │ ├── toolchain__functions into-coq.snap │ ├── toolchain__functions into-fstar.snap │ ├── toolchain__generics into-fstar.snap │ ├── toolchain__guards into-coq.snap │ ├── toolchain__guards into-fstar.snap │ ├── toolchain__guards into-ssprove.snap │ ├── toolchain__include-flag into-coq.snap │ ├── toolchain__include-flag into-fstar.snap │ ├── toolchain__interface-only into-fstar.snap │ ├── toolchain__lean-core-models into-lean.snap │ ├── toolchain__lean-tests into-lean.snap │ ├── toolchain__let-else into-coq.snap │ ├── toolchain__let-else into-fstar.snap │ ├── toolchain__let-else into-ssprove.snap │ ├── toolchain__literals into-coq.snap │ ├── toolchain__literals into-fstar.snap │ ├── toolchain__literals into-lean.snap │ ├── toolchain__loops into-fstar.snap │ ├── toolchain__mut-ref-functionalization into-fstar.snap │ ├── toolchain__naming into-coq.snap │ ├── toolchain__naming into-fstar.snap │ ├── toolchain__pattern-or into-coq.snap │ ├── toolchain__pattern-or into-fstar.snap │ ├── toolchain__patterns into-fstar.snap │ ├── toolchain__recursion into-fstar.snap │ ├── toolchain__reordering into-coq.snap │ ├── toolchain__reordering into-fstar.snap │ ├── toolchain__reordering into-ssprove.snap │ ├── toolchain__side-effects into-fstar.snap │ ├── toolchain__side-effects into-ssprove.snap │ ├── toolchain__slices into-coq.snap │ ├── toolchain__slices into-fstar.snap │ ├── toolchain__statics into-fstar.snap │ ├── toolchain__traits into-fstar.snap │ └── toolchain__unsafe into-fstar.snap └── tests/ ├── .gitignore ├── Cargo.toml ├── README.md ├── assert/ │ ├── Cargo.toml │ └── src/ │ └── lib.rs ├── attribute-opaque/ │ ├── Cargo.toml │ └── src/ │ └── lib.rs ├── attributes/ │ ├── Cargo.toml │ └── src/ │ └── lib.rs ├── cli/ │ ├── include-flag/ │ │ ├── Cargo.toml │ │ └── src/ │ │ └── lib.rs │ └── interface-only/ │ ├── Cargo.toml │ └── src/ │ └── lib.rs ├── constructor-as-closure/ │ ├── Cargo.toml │ └── src/ │ └── lib.rs ├── cyclic-modules/ │ ├── Cargo.toml │ └── src/ │ └── lib.rs ├── dyn/ │ ├── Cargo.toml │ └── src/ │ └── lib.rs ├── enum-repr/ │ ├── Cargo.toml │ └── src/ │ └── lib.rs ├── enum-struct-variant/ │ ├── Cargo.toml │ └── src/ │ └── lib.rs ├── even/ │ ├── Cargo.toml │ └── src/ │ └── lib.rs ├── functions/ │ ├── Cargo.toml │ └── src/ │ └── lib.rs ├── generics/ │ ├── Cargo.toml │ └── src/ │ └── lib.rs ├── guards/ │ ├── Cargo.toml │ └── src/ │ └── lib.rs ├── if-let/ │ ├── Cargo.toml │ └── src/ │ └── lib.rs ├── lean-core-models/ │ ├── Cargo.toml │ └── src/ │ ├── default.rs │ ├── function.rs │ ├── lib.rs │ ├── option.rs │ ├── phantom.rs │ └── result.rs ├── lean-tests/ │ ├── Cargo.toml │ └── src/ │ ├── array.rs │ ├── associated_types.rs │ ├── binops.rs │ ├── casts.rs │ ├── comments.rs │ ├── constants.rs │ ├── enums.rs │ ├── floats.rs │ ├── ite.rs │ ├── lib.rs │ ├── loops.rs │ ├── matching.rs │ ├── monadic.rs │ ├── nested_control_flow.rs │ ├── opaque.rs │ ├── recursion.rs │ ├── specs.rs │ ├── structs.rs │ ├── traits.rs │ └── types.rs ├── let-else/ │ ├── Cargo.toml │ └── src/ │ └── lib.rs ├── literals/ │ ├── Cargo.toml │ └── src/ │ └── lib.rs ├── loops/ │ ├── Cargo.toml │ └── src/ │ └── lib.rs ├── mut-ref-functionalization/ │ ├── Cargo.toml │ └── src/ │ └── lib.rs ├── naming/ │ ├── Cargo.toml │ └── src/ │ └── lib.rs ├── nested-derefs/ │ ├── Cargo.toml │ └── src/ │ └── lib.rs ├── never-type/ │ ├── Cargo.toml │ └── src/ │ └── lib.rs ├── odd/ │ ├── Cargo.toml │ └── src/ │ └── lib.rs ├── pattern-or/ │ ├── Cargo.toml │ └── src/ │ └── lib.rs ├── patterns/ │ ├── Cargo.toml │ └── src/ │ └── lib.rs ├── proverif-basic-structs/ │ ├── Cargo.toml │ └── src/ │ └── lib.rs ├── proverif-fn-to-letfun/ │ ├── Cargo.toml │ └── src/ │ └── lib.rs ├── proverif-minimal/ │ ├── Cargo.toml │ └── src/ │ └── lib.rs ├── proverif-noise/ │ ├── Cargo.toml │ └── src/ │ ├── lib.rs │ ├── noise_crypto.rs │ ├── noise_kkpsk0.rs │ └── noise_lib.rs ├── proverif-ping-pong/ │ ├── Cargo.toml │ ├── pingpong.pv │ └── src/ │ ├── a.rs │ ├── b.rs │ └── lib.rs ├── raw-attributes/ │ ├── Cargo.toml │ ├── README.md │ └── src/ │ └── lib.rs ├── recursion/ │ ├── Cargo.toml │ └── src/ │ └── lib.rs ├── reordering/ │ ├── Cargo.toml │ └── src/ │ └── lib.rs ├── side-effects/ │ ├── Cargo.toml │ └── src/ │ └── lib.rs ├── slices/ │ ├── Cargo.toml │ └── src/ │ └── lib.rs ├── statics/ │ ├── Cargo.toml │ └── src/ │ └── lib.rs ├── traits/ │ ├── Cargo.toml │ └── src/ │ └── lib.rs ├── tuples/ │ ├── Cargo.toml │ └── src/ │ └── lib.rs └── unsafe/ ├── Cargo.toml └── src/ └── lib.rs ================================================ FILE CONTENTS ================================================ ================================================ FILE: .cargo/config.toml ================================================ [build] rustflags = ["--cfg", "hax"] ================================================ FILE: .docker/Dockerfile ================================================ # This Dockerfile should be run from the root directory of this repo # e.g. `docker build -f .docker/Dockerfile .` from the parent directory FROM nixpkgs/nix-flakes # See issue #71 RUN if [ "$(uname)" = Darwin ]; then \ echo "filter-syscalls = false" >> /etc/nix/nix.conf; \ fi # Prepare the sources COPY . /hax-sources RUN cd /hax-sources && git init && git add . # Use cache to speed up install ENV PATH="$PATH:/root/.nix-profile/bin" RUN nix-env -iA cachix -f https://cachix.org/api/v1/install RUN cachix use hacspec # Install RUN nix profile install /hax-sources ================================================ FILE: .dockerignore ================================================ .git .gitignore **/target **/_build debug ================================================ FILE: .envrc ================================================ watch_file rust-toolchain.toml use flake ================================================ FILE: .github/assets/change-padding.sh ================================================ #!/usr/bin/env bash # set padding so that logos are centered when rendered by GH set -euo pipefail X="${1:?Usage: $0 }" find . -type f -name '*.svg' -exec sd 'id="__topPaddingWrapper" transform="translate\(0, \d+\)"' "id=\"__topPaddingWrapper\" transform=\"translate(0, ${1})\"" {} + ================================================ FILE: .github/workflows/bertie.yml ================================================ name: Extract Bertie on: pull_request: merge_group: workflow_dispatch: push: branches: [main] env: CARGO_TERM_COLOR: always jobs: extract-bertie: if: ${{ github.event_name == 'workflow_dispatch' || github.event_name == 'merge_group' }} runs-on: "ubuntu-latest" steps: - name: ⤵ Clone Bertie repository uses: actions/checkout@v4 with: repository: cryspen/bertie - uses: actions/checkout@v4 with: path: hax - uses: DeterminateSystems/nix-installer-action@main - name: ⤵ Install hax run: | nix profile install ./hax - name: 🏃 Extract fstar run: ./hax-driver.py extract-fstar ================================================ FILE: .github/workflows/changelog.yml ================================================ name: Check Changelog Update on: pull_request: types: [opened, synchronize, reopened, edited] if: github.actor != 'github-merge-queue[bot]' jobs: check-changelog: if: github.actor != 'github-merge-queue[bot]' runs-on: ubuntu-latest steps: - name: Check for [skip changelog] tag in PR body id: skip_check uses: actions/github-script@v7 with: script: | const body = context.payload.pull_request.body || ""; core.debug(body); if (body.includes('[skip changelog]')) { core.notice("Skipping changelog check because [skip changelog] was found in PR body."); core.setOutput("skip", "true"); } else { core.setOutput("skip", "false"); } - name: Checkout full git history if: steps.skip_check.outputs.skip == 'false' uses: actions/checkout@v4 with: fetch-depth: 0 - name: Fetch base branch if: steps.skip_check.outputs.skip == 'false' run: git fetch origin ${{ github.base_ref }} - name: Check if CHANGELOG.md was updated if: steps.skip_check.outputs.skip == 'false' id: updated run: | git diff --name-only origin/${{ github.base_ref }} HEAD > changed_files.txt echo "::group::Changed files" cat changed_files.txt echo "::endgroup::" if ! grep -q 'CHANGELOG.md' changed_files.txt; then { echo '**Missing `CHANGELOG.md` entry**' echo '' echo 'Please do one of the following:' echo '- Add relevant changes to `CHANGELOG.md`' echo '- Or add `[skip changelog]` to the pull request body' echo '' echo 'Once done, re-run this workflow by clicking **"Re-run jobs"**.' echo '' cat CONTRIBUTING.md | awk '/^### Changelog$/{f=1;next} /^##?#? /&&f{exit} f' | sed 's/^###\s*//' } > error-message cat error-message >> $GITHUB_STEP_SUMMARY exit 1 fi - name: Fail with markdown error if: failure() uses: actions/github-script@v7 with: script: | const msg = require('fs').readFileSync('error-message', 'utf8'); core.setFailed(msg); ================================================ FILE: .github/workflows/clippy_rust_engine.yml ================================================ name: Linting for the Rust engine on: pull_request: merge_group: workflow_dispatch: push: branches: [main] jobs: clippy: name: clippy runs-on: ubuntu-latest steps: - uses: actions/checkout@v4 - name: Install clippy run: | rustup component add clippy - name: Run clippy run: | cargo clippy -p hax-rust-engine -- -D warnings --no-deps ================================================ FILE: .github/workflows/extract_and_run_coq.yml ================================================ name: Extract and Run - Coq on: [pull_request] jobs: build: runs-on: ubuntu-latest steps: - uses: actions/checkout@v4 - uses: DeterminateSystems/nix-installer-action@main - name: ⤵ Install hax run: | nix build .\#check-coq-coverage ================================================ FILE: .github/workflows/flake_lock.yml ================================================ name: Make sure flake.lock is up-to-date on: pull_request: merge_group: workflow_dispatch: jobs: flake_lock_up_to_date: runs-on: ubuntu-latest steps: - uses: actions/checkout@v4 - uses: DeterminateSystems/nix-installer-action@main - name: Lock flake run: nix flake lock - name: Diff `flake.lock` run: git diff --exit-code flake.lock ================================================ FILE: .github/workflows/format.yml ================================================ name: Ensure formatting on: pull_request: merge_group: workflow_dispatch: push: branches: [main] jobs: ocamlformat: name: ocamlformat runs-on: ubuntu-latest steps: - uses: actions/checkout@v4 - uses: DeterminateSystems/nix-installer-action@main - name: Run OCaml formatter run: | nix shell ..#ocamlformat -c \ ocamlformat --check $(find . -name '*.ml') working-directory: engine rustfmt: name: rustfmt runs-on: ubuntu-latest steps: - uses: actions/checkout@v4 - name: Run Rust formatter run: | cargo fmt --check ================================================ FILE: .github/workflows/gh_pages.yml ================================================ name: Deploy to GH Pages on: workflow_dispatch: push: branches: [main] jobs: # Build job build: runs-on: ubuntu-latest steps: - uses: actions/checkout@v4 - uses: DeterminateSystems/nix-installer-action@main - name: Build documentation run: nix build .#docs - name: Upload static files as artifact id: deployment uses: actions/upload-pages-artifact@v3 with: path: result/ # Deploy job deploy: needs: build permissions: pages: write # to deploy to Pages id-token: write # to verify the deployment originates from an appropriate source # Deploy to the github-pages environment environment: name: github-pages url: ${{ steps.deployment.outputs.page_url }} # Specify runner + deployment step runs-on: ubuntu-latest steps: - name: Deploy to GitHub Pages id: deployment uses: actions/deploy-pages@v4 # or specific "vX.X.X" version tag for this action ================================================ FILE: .github/workflows/install_and_test.yml ================================================ name: Install & test on: pull_request: merge_group: workflow_dispatch: push: branches: [main] jobs: tests: name: nix-action runs-on: ubuntu-latest steps: - uses: actions/checkout@v4 - uses: DeterminateSystems/nix-installer-action@main - uses: cachix/cachix-action@v15 with: name: hax skipPush: true extraPullNames: fstar-nix-versions, z3-nix-versions - name: Build run: nix build -L - name: Install the toolchain run: | nix profile install nixpkgs#yq nix profile install .#rustc nix profile install . - name: Ensure readme coherency run: | nix build .#check-readme-coherency -L - name: Test the toolchain run: | nix build .#check-toolchain -L - name: Try to extract Rust By Examples run: | nix build .#rust-by-example-hax-extraction -L - name: Test the examples run: | cd examples nix develop ..#ci-examples --command make clean nix develop ..#ci-examples --command make - name: Checkout specifications uses: actions/checkout@v4 with: repository: 'hacspec/specs' path: specs - name: Push to Cachix if: ${{ github.event_name == 'workflow_dispatch' || github.event_name == 'merge_group' }} env: CACHIX_AUTH_TOKEN: ${{ secrets.CACHIX_AUTH_TOKEN }} run: | nix-store -qR --include-outputs $(nix build .# --json | jq -r '.[].outputs | to_entries[].value') \ | cachix push hax ================================================ FILE: .github/workflows/licenses.yml ================================================ name: Check licenses on: pull_request: merge_group: workflow_dispatch: push: branches: [main] jobs: tests: name: nix-action runs-on: ubuntu-latest steps: - uses: actions/checkout@v4 - uses: extractions/setup-just@v1 - name: Set-up OCaml uses: ocaml/setup-ocaml@v3 with: ocaml-compiler: 5 - uses: actions-rust-lang/setup-rust-toolchain@v1 with: toolchain: stable - name: Install cargo-deny run: cargo install cargo-deny - name: Install cargo-deny run: cargo install toml2json - name: Check the licenses run: just check-licenses ================================================ FILE: .github/workflows/mldsa.yml ================================================ name: Extract and lax-check libcrux ML-DSA on: schedule: - cron: '0 0 * * *' workflow_dispatch: env: CARGO_TERM_COLOR: always jobs: extract-and-lax-mldsa: runs-on: "ubuntu-latest" steps: - name: ⤵ Clone Libcrux repository uses: actions/checkout@v4 with: repository: cryspen/libcrux path: libcrux - uses: actions/checkout@v4 with: path: hax - name: Use local hax-lib working-directory: libcrux run: | cargo remove hax-lib -v -p libcrux-ml-dsa cargo add hax-lib --path "../hax/hax-lib" -v -p libcrux-ml-dsa - uses: DeterminateSystems/nix-installer-action@main - name: Set up Cachix uses: cachix/cachix-action@v15 with: name: fstar-nix-versions push: false - name: ⤵ Install hax run: | nix profile install ./hax - name: ⤵ Install FStar run: nix profile install github:FStarLang/FStar/v2025.03.25 - name: 🏃 Extract ML-DSA crate working-directory: libcrux/libcrux-ml-dsa run: ./hax.py extract - name: 🏃 Lax ML-DSA crate working-directory: libcrux/libcrux-ml-dsa run: | env FSTAR_HOME=${{ github.workspace }}/fstar \ HAX_HOME=${{ github.workspace }}/hax \ PATH="${PATH}:${{ github.workspace }}/fstar/bin" \ ./hax.py prove --admit ================================================ FILE: .github/workflows/mlkem.yml ================================================ name: Extract and TC ML-Kem on: pull_request: merge_group: workflow_dispatch: push: branches: [main] env: CARGO_TERM_COLOR: always jobs: extract-mlkem: if: ${{ github.event_name == 'workflow_dispatch' || github.event_name == 'merge_group' }} runs-on: "ubuntu-latest" steps: - name: ⤵ Extract libcrux version from PR body id: extract_version uses: actions/github-script@v7 with: result-encoding: string script: | let extractLibcruxRef = body => body.match(/libcrux-ref:\s*([a-zA-Z0-9._\/-]+)/)?.[1]; const refMap = new Map(); if (context.eventName === 'pull_request') { const ref = extractLibcruxRef(context.payload.pull_request?.body || '') ?? 'main'; core.notice(`Using libcrux ref: ${resolved}`); return resolved; } else if (context.eventName === 'merge_group') { const query = 'query {repository(owner:"cryspen", name:"hax") {mergeQueue {entries(first:100) {nodes {pullRequest {body, number}}}}}}'; const result = await github.graphql(query); const mergeQueuePRs = result.repository.mergeQueue.entries.nodes; for (const entry of mergeQueuePRs) { const pr = entry.pullRequest core.notice("Found merge queue PR:", pr); const ref = extractLibcruxRef(pr.body); ref && refMap.set(pr.number, ref); } if (refMap.size === 0) { core.notice('No libcrux-ref specified, defaulting to "main"'); return 'main'; } const uniqueRefs = new Set(refMap.values()); if (uniqueRefs.size > 1) { let errorMessage = 'Error: Multiple different libcrux refs detected:\n'; for (const [prNum, ref] of refMap.entries()) errorMessage += `- PR #${prNum}: ${ref}\n`; core.setFailed(errorMessage); return; } const [ref] = uniqueRefs; core.notice(`Using libcrux ref: ${ref}`); return ref; } core.warning(`Unsupported event type: ${context.eventName}, default to main`); return 'main'; - name: ⤵ Clone Libcrux repository uses: actions/checkout@v4 with: repository: cryspen/libcrux path: libcrux ref: ${{ steps.extract_version.outputs.result }} - uses: actions/checkout@v4 with: path: hax - name: Use local hax-lib working-directory: libcrux run: | cargo remove hax-lib -v -p libcrux-ml-kem cargo add hax-lib --path "../hax/hax-lib" -v -p libcrux-ml-kem - uses: DeterminateSystems/nix-installer-action@main - name: Set up Cachix uses: cachix/cachix-action@v15 with: name: fstar-nix-versions push: false - name: ⤵ Install hax run: | nix profile install ./hax - name: ⤵ Install FStar run: nix profile install github:FStarLang/FStar/v2025.02.17 - name: 🏃 Extract ML-KEM crate working-directory: libcrux/libcrux-ml-kem run: ./hax.py extract - name: 🏃 Lax ML-KEM crate working-directory: libcrux/libcrux-ml-kem run: | env FSTAR_HOME=${{ github.workspace }}/fstar \ HAX_HOME=${{ github.workspace }}/hax \ PATH="${PATH}:${{ github.workspace }}/fstar/bin" \ ./hax.py prove --admit ================================================ FILE: .github/workflows/playwright-docs.yml ================================================ name: Playwright Docs Tests on: schedule: - cron: '0 0 * * *' workflow_dispatch: jobs: test: timeout-minutes: 45 runs-on: ubuntu-latest steps: - uses: actions/checkout@v5 - uses: DeterminateSystems/determinate-nix-action@v3 - uses: actions/setup-node@v5 with: node-version: lts/* - name: Install dependencies working-directory: docs/.test run: npm ci - name: Install Playwright Browsers working-directory: docs/.test run: npx playwright install --with-deps - name: Replace version with commit hash run: sed -i "s/const HAX_PLAYGROUND_FORCED_VERSION = false;/const HAX_PLAYGROUND_FORCED_VERSION = \"${GITHUB_SHA}\";/" hax_playground.js working-directory: docs/javascripts - name: Playground warmup and build docs run: | set -euo pipefail nix build .#docs & pid1=$! curl -sS "https://hax-playground.cryspen.com/query/$GITHUB_SHA/fstar" \ -X POST \ -H 'User-Agent: bot' \ -H 'Accept: application/json' \ -H 'Content-Type: application/json' \ --data-raw '[["src/lib.rs","fn f(){}"]]' & pid2=$! wait $pid1 || exit_code1=$? wait $pid2 || exit_code2=$? exit ${exit_code1:-0} || exit ${exit_code2:-0} - name: Run Playwright tests working-directory: docs/.test run: npx playwright test --reporter github,html --trace on - uses: actions/upload-artifact@v4 if: ${{ !cancelled() }} with: name: playwright-report path: docs/.test/playwright-report/ retention-days: 30 ================================================ FILE: .github/workflows/release.yml ================================================ name: Release binaries for hax-engine on: push: tags: - '*' jobs: release-js: if: startsWith(github.ref, 'refs/tags/') runs-on: ubuntu-latest permissions: contents: write steps: - uses: actions/checkout@v4 - uses: DeterminateSystems/nix-installer-action@main - name: Build JS run: nix build .#hax-engine.passthru.js -L -o hax-engine.js - run: | tar --dereference -czf hacspec_js.tar.gz hax-engine.js - name: Release uses: softprops/action-gh-release@v1 with: files: hacspec_js.tar.gz release: if: startsWith(github.ref, 'refs/tags/') strategy: fail-fast: false matrix: os: - macos-latest - ubuntu-latest # - windows-latest (See #4) ocaml-compiler: [4.14.x] runs-on: ${{ matrix.os }} permissions: contents: write steps: - name: Checkout code uses: actions/checkout@v4 - uses: ocaml/setup-ocaml@v2 with: ocaml-compiler: ${{ matrix.ocaml-compiler }} - uses: dtolnay/rust-toolchain@1.70 - run: cargo install --path cli/driver && cargo install --path cli/subcommands - run: opam install . --deps-only working-directory: engine - run: opam exec -- dune build working-directory: engine - run: | cp engine/_build/default/bin/native_driver.exe hax-engine tar -czf hacspec_${{ matrix.os }}.tar.gz hax-engine - name: Release uses: softprops/action-gh-release@v1 with: files: hacspec_${{ matrix.os }}.tar.gz ================================================ FILE: .github/workflows/rustc-coverage-tests.yml ================================================ name: Rustc coverage tests with negative results, and snapshots verification on: pull_request: merge_group: workflow_dispatch: push: branches: [main] jobs: rustc-coverage-tests: runs-on: ubuntu-latest steps: - uses: actions/checkout@v4 - if: runner.environment == 'github-hosted' uses: DeterminateSystems/nix-installer-action@main - name: Set up Cachix uses: cachix/cachix-action@v15 with: name: fstar-nix-versions push: false - name: ⤵ Install hax run: | nix profile install . nix profile install nixpkgs#rustup - name: ⤵ Install FStar run: nix profile install github:FStarLang/FStar/v2025.02.17 - name: Set up Python uses: actions/setup-python@v5 with: python-version: '3.x' - name: Install Python dependencies uses: py-actions/py-dependency-install@v4 with: path: rustc-coverage-tests/requirements.txt - name: Run tests with negative checking working-directory: rustc-coverage-tests run: | FSTAR_HOME=~/.nix-profile python3 run-coverage-tests.py all --with-negative --check-stability ================================================ FILE: .github/workflows/stale.yml ================================================ name: 'Triage stale issues and PRs' on: schedule: - cron: '00 00 * * 4' workflow_dispatch: jobs: stale: runs-on: ubuntu-latest steps: - uses: actions/stale@v9 with: stale-issue-message: "This issue has been marked as stale due to a lack of activity for 60 days. If you believe this issue is still relevant, please provide an update or comment to keep it open. Otherwise, it will be closed in 7 days." stale-pr-message: "This PR has been marked as stale due to a lack of activity for 60 days. If you believe this pull request is still relevant, please provide an update or comment to keep it open. Otherwise, it will be closed in 7 days." stale-issue-label: 'stale' exempt-issue-labels: 'keep-open' stale-pr-label: 'stale' exempt-pr-labels: 'keep-open' days-before-stale: 60 days-before-close: 7 close-issue-message: "This issue has been closed due to a lack of activity since being marked as stale. If you believe this issue is still relevant, please reopen it with an update or comment." close-pr-message: "This PR has been closed due to a lack of activity since being marked as stale. If you believe this pull request is still relevant, please reopen it with an update or comment." ================================================ FILE: .github/workflows/test.yml ================================================ name: Test Workspace on: push: branches: [main] pull_request: merge_group: workflow_dispatch: concurrency: group: ${{ github.workflow }}-${{ github.ref }} cancel-in-progress: true jobs: test-workspace: strategy: fail-fast: false matrix: os: - macos-latest - ubuntu-latest - windows-latest runs-on: ${{ matrix.os }} steps: - uses: actions/checkout@v4 - uses: Swatinem/rust-cache@v2 - name: Test run: cargo test --workspace --exclude hax-engine-names-extract --exclude hax-rust-engine --verbose - name: Test `hax-frontend-exporter` with feature `rustc` off run: cargo check -p hax-frontend-exporter --no-default-features --verbose no-std-lib: runs-on: ubuntu-latest steps: - uses: dtolnay/rust-toolchain@master with: toolchain: stable targets: thumbv7em-none-eabi - uses: actions/checkout@v4 - uses: Swatinem/rust-cache@v2 - name: Build no-std run: | rustup target add thumbv7em-none-eabi cargo build -p hax-lib --target thumbv7em-none-eabi ================================================ FILE: .github/workflows/test_installs.yml ================================================ name: Test installations on: pull_request: merge_group: workflow_dispatch: push: branches: [main] jobs: docker: if: ${{ github.event_name == 'workflow_dispatch' || github.event_name == 'merge_group' }} runs-on: ubuntu-latest steps: - uses: actions/checkout@v4 - run: docker build -f .docker/Dockerfile . -t hax setup_sh: if: ${{ github.event_name == 'workflow_dispatch' || github.event_name == 'merge_group' }} strategy: matrix: os: - ubuntu-latest - ubuntu-22.04 - macos-latest - macos-15-intel runs-on: ${{ matrix.os }} steps: - uses: actions/checkout@v4 - if: runner.os == 'macOS' run: brew install opam nodejs rustup-init jq - if: runner.os == 'Linux' run: sudo apt-get update - if: runner.os == 'Linux' run: sudo apt-get install -y opam nodejs jq - run: curl --proto '=https' --tlsv1.3 https://sh.rustup.rs -sSf | sh -s -- -y - run: opam init --bare -y && opam switch create -y 4.14.1 - name: Run `setup.sh` run: | export OPAMERRLOGLEN=0 ./setup.sh - run: cargo hax --version - name: Test an extraction run: | cd examples/chacha20 eval $(opam env) cargo hax into fstar setup_sh_status: if: | always() && github.event_name == 'workflow_dispatch' ||github.event_name == 'merge_group' needs: setup_sh runs-on: ubuntu-latest steps: - name: Successful if: ${{ !(contains(needs.*.result, 'failure')) }} run: exit 0 - name: Failing if: ${{ contains(needs.*.result, 'failure') }} run: exit 1 ================================================ FILE: .github/workflows/this-month-in-hax.yml ================================================ name: Generate This Month in hax on: workflow_dispatch: schedule: - cron: '0 4 1 * *' jobs: generate: permissions: issues: write contents: write if: github.repository == 'cryspen/hax' runs-on: ubuntu-latest steps: - name: Checkout repo uses: actions/checkout@v5 - name: Run script and capture output id: run_script run: | bash .utils/this-month-in-hax-skeleton.sh env: GH_TOKEN: ${{ github.token }} - name: Commit changes run: | git config user.name "github-actions[bot]" git config user.email "41898282+github-actions[bot]@users.noreply.github.com" git checkout -b $(cat this-month-in-hax-branch) git add docs/blog git commit -m "chore(blog): set up a skeleton for 'This Month in hax'" git push --force origin $(cat this-month-in-hax-branch) - uses: JasonEtco/create-an-issue@v2 env: GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} with: filename: this-month-in-hax-issue.yml ================================================ FILE: .gitignore ================================================ debug/ target/ **/*.rs.bk **/*.profraw node_modules TODO.org .direnv _build result .DS_Store .depend .cache .lake proof-libs/fstar/rust_primitives/#*# **/proofs/*/extraction/* !**/proofs/*/extraction/Makefile !**/proofs/*/extraction/*.diff !**/proofs/fstar/extraction/*.fst !**/proofs/coq/extraction/*.v !**/proofs/lean/extraction/lakefile.toml !**/proofs/rust/extraction/Cargo.toml ================================================ FILE: .utils/jq_utils.jq ================================================ # Removes a field from an object at any depth def remove_field(field): walk(if type == "object" and has(field) then del(.[field]) else . end); # Remove `table_id` indirections whenever a value is found def thir_drop_table_id_nodes: walk(if type == "object" and has("cache_id") and has("value") and .value then .value else . end); # Prints a THIR def_id as a string, useful for searching def thir_str_of_def_id_contents: ( [.krate] + [ .path.[] | try (.disambiguator as $d | .data | . as $data | keys | .[0] | $data[.] + (if $d > 0 then "#" + $d else "" end)) | select(type == "string")] ) | join("::"); # Prints all THIR def_ids def thir_str_of_def_ids: thir_drop_table_id_nodes | walk( # if type == "object" and has("contents") and (.contents | type) == "object" and .contents | has("krate") and .contents | has("path") then if try(. as $o | ($o.contents.krate | type == "string") and ($o.contents.path | type == "array")) catch false then .contents | thir_str_of_def_id_contents else . end); ================================================ FILE: .utils/rebuild.sh ================================================ #!/usr/bin/env bash # This is a small script to rebuild Hax (the Rust CLI & frontend and # OCaml engine) quickly. # Options: # - the flag `--online` allow Cargo to look for updates on the internet; # - the environment variable `DUNEJOBS` limits the number of jobs `dune` # is allowed to spawn in parallel while building. set -euo pipefail OFFLINE_FLAG="--offline" if [[ "${1:-}" == "--online" ]]; then OFFLINE_FLAG="" shift 1 fi TARGETS="${1:-rust ocaml}" DUNEJOBS=${DUNEJOBS:-} # required since `set -u` YELLOW=43 GREEN=42 RED=41 BLACK=90 status () { echo -e "\033[1m[rebuild script] \033[30m\033[$1m$2\033[0m"; } cd_rootwise () { cd $(git rev-parse --show-toplevel)/$1 } rust () { cd_rootwise "cli" for i in driver subcommands ../engine/names/extract ../rust-engine; do CURRENT="rust/$i" cargo install --locked --quiet $OFFLINE_FLAG --debug --path $i done } ocaml () { cd_rootwise "engine" CURRENT="ocaml" dune build $([ -z $DUNEJOBS ] || echo "-j $DUNEJOBS") CURRENT="ocaml/install" # Small hack for those that are not using [opam] at all: by # default install OCaml binaries in `~/.cargo` (which is supposed # to be in PATH anyway). INSTALL_PREFIX="${OPAM_SWITCH_PREFIX:-${DUNE_INSTALL_PREFIX:-$HOME/.cargo}}" dune install --profile dev --prefix $INSTALL_PREFIX if ( command -v "which" && command -v "sort" && command -v "wc" ) >/dev/null; then case $(which -a hax-engine | sort -u | wc -l) in 0) status $YELLOW 'Warning: cannot detect `hax-engine` in PATH';; 1) :;; *) status $YELLOW 'Warning: multiple `hax-engine` detected in PATH. Maybe you installed Hax with OPAM (i.e. via `setup.sh`)? Please uninstall it, otherwise you might use a stale engine!';; esac else status $YELLOW 'Warning: cannot run sanity checks because `which`, `sort` or `wc` commands are not available. Please install them.' fi } on_exit () { if [ $? -ne 0 ]; then status $RED "ERR: $CURRENT"; fi } trap on_exit EXIT ERR trap "status $RED 'SIGINT'" SIGINT CURRENT="none" started() { [ -z ${QUIET+x} ] && status $BLACK "$1 build started" || true; } if [[ "$TARGETS" == *rust* ]]; then started rust rust status $GREEN "rust succeed" fi if [[ "$TARGETS" == *ml* ]]; then started ocaml ocaml status $GREEN "ocaml succeed" fi ================================================ FILE: .utils/rust-by-example.js ================================================ // This script expects Rust By Example to be in current directory // (clone the repo https://github.com/rust-lang/rust-by-example, `cd` into it, and run `node rust-by-examples.js`) const fs = require('fs'); const SRC_DIR = 'src'; // Lists all markdown files under `SRC_DIR` function getMarkdownFiles() { return fs.readdirSync(SRC_DIR, { recursive: true }) .filter(path => path.endsWith('.md')); } // Code blocks from a file of given path function extractCodeBlocks(path) { let contents = fs.readFileSync(SRC_DIR + '/' + path).toString(); let blocks = contents .split(/^```/m) .filter((_, i) => i % 2 == 1) .map(s => { let lines = s.split('\n'); let modifiers = lines[0].split(',').map(x => x.trim()).filter(x => x); let contents = lines.slice(1).join('\n'); return {modifiers, contents}; }) .filter(x => x.modifiers.includes('rust')); let name = path.replace(/[.]md$/, '').split('/').join('_'); return {name, blocks}; } let code = getMarkdownFiles() .map(extractCodeBlocks) .filter(({blocks}) => blocks.length); // Strips the comments of a rust snippet let stripComments = rust_snippet => rust_snippet.replace(/[/][/]+.*/mg, ''); // Given a Rust snippet, returns `true` whenever we detect a top-level // `let` binding: this means we need to wrap the snippet in a function. let isDirectLet = rust_snippet => stripComments(rust_snippet).trim().startsWith('let '); // Wraps a Rust snippet inside a function let protectSnippet = rust_snippet => `fn wrapper_fn() { let _ = {${rust_snippet}}; }`; function codeBlocksToModules(code_blocks) { let denylist = [ /unsafe_asm \d+/ ]; let modules = {}; for(let {name, blocks} of code_blocks) { let mod_section = `section_${name}`; modules[mod_section] = {}; let nth = 0; for(let {modifiers, contents} of blocks) { nth += 1; if(['edition2015', 'compile_fail', 'ignore'].some(m => modifiers.includes(m))) { continue; } let id = `section_${name} ${nth}`; // Remove top-level assertions contents = contents.replace(/^# assert.*\n?/mg, ''); // Strip `# ` (the mdbook marker to hide a line) contents = contents.replace(/^# /mg, ''); // Whenever we detect a `let` if(isDirectLet(contents)) contents = protectSnippet(contents); if(denylist.some(re => id.match(re))) continue; let mod_snippet = `snippet_${nth}`; // Replace `crate::` by a full path to the current module contents = contents.replace(/crate::/g, 'crate::' + mod_section + '::' + mod_snippet + '::'); modules[mod_section][mod_snippet] = `// modifiers: ${modifiers.join(', ')}\n` + contents; } } return modules; } let modules = codeBlocksToModules(code); let OUTPUT_CRATE = 'rust-by-examples-crate'; fs.rmSync(OUTPUT_CRATE, { recursive: true, force: true }); fs.mkdirSync(OUTPUT_CRATE, { recursive: true }); const { execSync } = require('child_process'); execSync("cargo init --lib", { cwd: OUTPUT_CRATE }); let OUTPUT_CRATE_SRC = OUTPUT_CRATE + '/src/'; fs.rmSync(OUTPUT_CRATE_SRC, { recursive: true, force: true }); let root_mod = '#![allow(unused)]'; for(let mod_name in modules) { let submodules = modules[mod_name]; fs.mkdirSync(OUTPUT_CRATE_SRC + mod_name, { recursive: true }); let mod_contents = ''; for (let submod_name in submodules) { let contents = submodules[submod_name]; fs.writeFileSync(OUTPUT_CRATE_SRC + mod_name + '/' + submod_name + '.rs', contents); mod_contents += 'pub mod ' + submod_name + ';\n'; } fs.writeFileSync(OUTPUT_CRATE_SRC + mod_name + '.rs', mod_contents); root_mod += 'pub mod ' + mod_name + ';\n'; } fs.writeFileSync(OUTPUT_CRATE_SRC + '/lib.rs', root_mod); // A list of [, []] that are known not to be processed by hax let cargo_hax_denylist = [ ['error_iter_result', [3]], ['error_multiple_error_types_boxing_errors', [1]], // uses dyn ['error_multiple_error_types_reenter_question_mark', [2]], // uses dyn ['error_multiple_error_types_wrap_error', [1]], // uses dyn ['error_option_unwrap_defaults', [3,4]], ['flow_control_for', [1,2,3,5]], ['flow_control_if_let', [3]], ['flow_control_let_else', [1,2]], // Let else panics, bug #1460 ['flow_control_loop_nested', [1]], ['flow_control_loop_return', [1]], ['flow_control_loop', [1]], ['flow_control_match_binding', [1,2]], ['flow_control_match_destructuring_destructure_pointers', [1]], ['flow_control_match_destructuring_destructure_slice', [1]], ['flow_control_match_destructuring_destructure_tuple', [1]], // .. pattern, bug #1462 ['flow_control_match', [1]], ['flow_control_while_let', [1,2]], ['fn_closures_capture', [1]], ['fn_closures_input_parameters', [1]], ['fn', [1]], ['hello_print_fmt', [1]], ['generics_bounds_testcase_empty', [1]], // Marker traits, bug #1221 ['macros_dry', [1]], ['scope_borrow_alias', [1]], ['scope_borrow_ref', [1]], ['scope_move_mut', [1]], ['scope_raii', [1]], ['std_arc', [1]], ['std_hash', [1]], ['std_misc_arg_matching', [1]], ['std_misc_channels', [1]], ['std_misc_file_read_lines', [3]], ['std_misc_threads', [1]], ['std_misc_threads_testcase_mapreduce', [1]], ['std_str', [1]], ['trait_iter', [1]], ['trait', [1]], ['trait_dyn', [1]], // uses dyn ['trait_supertraits', [1]], // uses dyn ['unsafe', [1,2]], ].map(([module, snippets]) => snippets.map(n => `section_${module}::snippet_${n}`)).flat(); let include_clause = cargo_hax_denylist.map(path => `-*::${path}::**`).join(' '); execSync(`cargo hax into -i '${include_clause}' fstar`, { cwd: OUTPUT_CRATE, stdio: 'inherit' }); ================================================ FILE: .utils/this-month-in-hax-skeleton.sh ================================================ #!/usr/bin/env bash # This script creates a skeleton blog post for the "This Month in hax" blog series. # It writes a new markdown file, and outputs a PR body. # This script is an helper for the github action workflow "this-month-in-hax.yml". set -e # Go to the folder of blog posts cd $(git rev-parse --show-toplevel)/docs/blog/posts/this-month-in-hax # By default, use `cryspen/hax`, and the month and year from two weeks ago repo="--repo cryspen/hax" month=$(date -d "14 days ago" +'%m') year=$(date -d "14 days ago" +'%Y') # Set date formatting to English export LC_ALL=C # Parse command line arguments. all_args=("$@") while [ $# -gt 0 ]; do case "$1" in -r | --repo) repo="--repo $2"; shift ;; -m | --month) month=$2; shift ;; -y | --year) year=$2; shift ;; --author) author=$2; shift ;; esac shift done report() { # Calculate the first day of the month start=$(date -u -d "$year-$month-01" +"%Y-%m-%dT%H:%M:%SZ") # Get the next month end=$(date -u -d "$year-$month-01 + 1 month - 1 day" +"%Y-%m-%dT%H:%M:%SZ") # Get all closed PRs with number, title, and description pr_data=$( gh pr list $repo --state merged --limit 1000 \ --json number,title,url,author,mergedAt \ --jq "map(select(.mergedAt >= \"$start\" and .mergedAt <= \"$end\" and .author.login != \"app/dependabot\")) | .[] | {number, title, url, author}" | jq -s ) echo "In $(date -d "$year-$month-01" +"%B"), we successfully merged **$(echo "$pr_data" | jq -r 'length') pull requests**!" echo "" echo "" echo "" echo "### Full list of PRs" # Extract markdown list with jq echo "$pr_data" | jq -r '.[] | . | "* \\#\(.number): [\(.title)](\(.url))"' echo "" echo "### Contributors" # Extract markdown list of authors with jq echo "$pr_data" | jq -r 'map(.author.login) | unique | .[] | "* [@\(.)](https://github.com/\(.))"' } # Available authors, and their GH handles authors_and_handles() { sort -u < $BLOG_POST_FILE --- authors: - $author title: "This Month in Hax: $(date -d "$year-$month-15" +"%B %Y")" date: $(date +"%Y-%m-%d") --- HEADER report >> $BLOG_POST_FILE BLOG_POST="$(cat $BLOG_POST_FILE)" # Go to root cd $(git rev-parse --show-toplevel) BRANCH="this-month-in-hax-blog-post-$year-$month" echo $BRANCH > this-month-in-hax-branch # Echo the author's handle cat < this-month-in-hax-issue.yml --- title: Write This Month in Hax assignees: $(handle_of $author) --- This is an auto-generated issue for the "This Month in hax" blog series. Branch [\`$BRANCH\`](https://github.com/cryspen/hax/tree/$BRANCH) have been created with the following template: \`\`\`md $(echo "$BLOG_POST") \`\`\` It is an skeleton blog post with the list of PRs pushed in $(date -d "$year-$month-01" +"%B %Y") and a list of contributor. Suggested person to pick this draft PR: @$(handle_of $author) ## Action Items - [ ] Write the blog article - [ ] Release a new version of hax - [ ] Follow \`PUBLISHING.md\` - [ ] Create Github release MESSAGE ================================================ FILE: CHANGELOG.md ================================================ # Changelog All notable changes to this project will be documented in this file. The format is based on [Keep a Changelog](https://keepachangelog.com/en/1.1.0/), and this project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0.html). ## [Unreleased] Changes to the Rust Engine: - Rename `GenericConstraint::Type` to `TypeClass` and `::Projection` to `Equality` (#1996) - Remove `BinOp` resugaring (#1950) - Apply resugarings to linked items (pre/post conditions) (#1961) - Add new import_thir implemented in Rust and using `FullDef`, activated with `--experimental-full-def` (#1967) Changes to the engine: - Omit type aliases whose body has unresolvable trait bounds instead of crashing (#2014) - Report let-chains (`if let .. && let ..`) as a soft error instead of panicking (#2014) Changes to the frontend: - Fix support for ellipsis: add wildcard for every field (based on type info rather than number of subpatterns) (#2001) - Fix panic on constants of type `&[&T]` (e.g. `&[&str]`) caused by a wrong type for the synthesized array length (#2014) Changes to cargo-hax: Changes to hax-lib: - Lean lib: use Rust core models (#1865) - Lean lib: specs for negation (#1891) - Lean lib: Add casting for all integer type pairs (#1837) - Lean lib: bump lean to v4.28.0-rc1 (#1900) - Lean lib: Extract more core models (#1919) - Lean lib: Separate symbolic and bit-blasting specs (#1933) - Lean lib: Communicate user-generated specs to mvcgen (#1937) - Lean lib: Rust primitives for prop (#1942) - Lean lib: For-loops for all unsigned integers (#1951) - Lean lib: Upgrade to Lean v4.29.0-rc1 (#1962) - Lean lib: Add support for Int128 and UInt128 while waiting for upstream in Lean (#1968) - Lean lib: Refactor `RustM` as `ExceptT Error Option` (#1994) - Lean lib: Add Repr instance for tuples (#2000) - Lean lib: Make the proof of `RustM.toBVRustM_bind` compatible with Lean 4.29.0 (#2005) Changes to the Lean backend: - Add `hax_zify` and `hax_construct_pure` tactics (#1888) - Add support for opaque `impl`s (#1887) - Fix support for associated constants in trait impls (#1906) - Gather definitions in namespaces, shortening names (#1901) - Add support for associated types with constraints and inheritance (#1909) - Fix bug with monadic wrapping of trait constants (#1929) - Add type annotation for cast_op (#1925) - Add attributes for pureEnsures/pureRequires (#1931) - Extract correct `PhantomData` structure (#1932) - Standardize generated Lean naming to lowercase namespaces (#1914) - Fix associated constants with default values (#1941) - New default proof for the Lean backend & proof method attribute (#1938) - Prettier proof_mode annotations (#1943) - Detect recursive functions and mark them partial_fixpoint (#1946) - Add more binops (#1963) - Add a resugaring for ellipsis patterns (#2002) Miscellaneous: - Fix Nix development shell: add an `fstar` devShell providing F* and the required environment variables (#1972) ## 0.3.6 Changes to the Rust Engine: - Add a rejection phase for interleaving of expressions and statements not supported by the Lean do-notation syntax (#1739). - Add a phase to handle the monadic encoding: it explicitly introduces two new Hax primitives `pure` (to wrap values as monadic computations) and `lift` (to lift monadic computations into values) (#1746) - Add a mechanism to lookup pre- and post-conditions (#1805) - Add a proper Rust backend (#1898) Changes to the frontend: - Update the pin of rustc (#1765) - Miscellaneous changes related to Charon (#1765) Change to cargo-hax: Changes to hax-lib: - Add Lean core models for options, results, default (#1747) - F* lib: improved while loops support, additions of some specific arithmetic operations and fixed `TryInto` for integer types (#1742) - Lean lib: use macros for int operations (#1795) - Lean lib: add new setup for `bv_decide` (#1828) - Lean lib: base specs on mathematical integers (#1829) - Lean lib: represent `usize` via a copy of `UInt64` (#1829) - Lean lib: Add support for while loops (#1857, #1863) - Core models: integers, arrays, iterators, full replacement of the F* proof-lib (#1898) Changes to the Lean backend: - Support for constants with arbitrary computation (#1738) - Add support for base-expressions of structs (#1736) - Use the explicit monadic phase to insert `pure` and `←` only on demand, and not introduce extra `do` block (#1746) - Rename `Result` monad to `RustM` to avoid confusion with Rust `Result` type (#1768) - Add support for shift-left (#1785) - Add support for default methods of traits (#1777) - Add support for floats (#1784) - Add support for pattern matching on constant literals (#1789) - Add support for binding subpatterns in match constructs (#1790) - Add error when using patterns in function parameters (#1792) - Add grind annotations for various lemmas in the Lean library (#1802) - Add support for constant parameters to functions and traits (#1797) - Add support for associated types with equality constraints (#1806) - Make trait-level arguments explicit for all trait functions, adding them as extra parameters (#1803) - Add generation of specs from requires/ensures-annotations (#1815) - Add support for nonliteral array sizes (#1826) - Add `hax_lib::lean::proof` attribute (#1831) - Add support for `#[hax_lib::opaque]` (#1846) - Turn rejection phase into a transformation phase (#1840) - Fix string escaping (#1834) Miscellaneous: - Reserve extraction folder for auto-generated files in Lean examples (#1754) - Add `lean_adc` example to the Lean examples section, demonstrating tactics introduced in PR(#1933) ## 0.3.5 Changes to the Rust Engine: - The module `names` now produces `ExplicitDefId`s instead of `DefId`s (#1648) - Add a resugaring `FunctionsToConstants` (#1559) - Drop the tuple nodes of the AST, add resugaring node for tuples (#1662) - Add support for enums and structs to the Lean backend (type definitions, expressions, pattern-matching) (#1623) - Update name rendering infrastructure in the Lean backend (#1623, #1624) - Printers now emit proper diagnostics (PR #1669) - Global identifiers are now interned (#1689) - Global identifiers are encapsulated properly, and provide easy destructuring as tuple identifiers (#1693) - Add support for `trait` and `impl` in the Lean backend (#1679): trait definitions, trait bounds on functions, impl definitions. The typeclass resolution in the generated code is left implicit (relies on Lean). Limited support for associated types. No support for default implementations. - Refactor of the printing infrastructure: lowers the boilerplate, get rid of most lifetimes annotation, add proper contextual span support (#1735) Changes to the frontend: - Add an explicit `Self: Trait` clause to trait methods and consts (#1559) - Fix `ImplExpr::Builtin` that had some type errors (#1559) - Improve the translation of `Drop` information (#1559) - Add variance information to type parameters (#1559) - Cleanup the `State` infrastructure a little bit (#1559) - Add information about the metadata to use in unsize coercions (#1559) - Resolve `dyn Trait` predicates (#1559) - Many improvements to `FullDef` (#1559) - Add infrastructure to get a monomorphized `FullDef`; this is used in charon to monomorphize a crate graph (#1559) - Fix a regression affecting projection predicates (#1678) Change to cargo-hax: - Improve the caching of rustc when using `cargo hax` commands (#1719) - Add hidden commands and flags to explicitly manipulate `haxmeta` files (#1722) Changes to hax-lib: - New behavior for `hax_lib::include`: it now forces inclusion when in contradiction with `-i` flag. - hax-lib requires edition 2021 instead of 2024 (#1726) - Improved `VecDeque` model in F* proof lib (#1728) - Split the Lean library into several files, update to lean 4.23.0 (#1696) Changes to the Lean backend: - Improve support for functionalized loops (#1695) - Improve error messages, having each error (coming from the Lean backend) point to a specific github issue (#1717). Miscellaneous: - A lean tutorial has been added to the hax website (#1626) - Add end-to-end tests for the website (#1690) - Diagnostics reporting were improved (#1692) ## 0.3.4 The release of `0.3.3` got troubles because of the new Rust Engine crates. This release is mostly empty. ## 0.3.3 Changes to the frontend: - A field `visibility` was added to HIR items (#1643) Rust Engine: - A Lean backend was introduced (#1593, #1591, #1590, #1607) - The Rust engine was improved (#1624, #1603, #1600, #1585) - The F* backend has been improved (#1587, #1585) ## 0.3.2 Changes to the frontend: - Provide the `FnOnce` shim for closures (#1477) - Update pin of rustc (#1482) - Add `Ty::FnDef` (splitting `FnPtr` and `FnDef`) (#1487) - Regroup generic and trait arguments in a struct `ItemRef` (#1514) - Support trait aliases in `FullDef` (#1494) - Separate `{Add,Sub,Mul}Unchecked` and `{Add,Sub,Mul}` (#1513) - Our pin to rustc was updated (#1534) Changes to the engine: - introduce an experimental Rust engine (#1501, #1502, #1504, #1505, #1518) Changes the `hax-lib`: - Support hax octal and binary literals in the `int!` macro - F*: additions of integer function implementations (#1520) - F*: change the definition of the `Clone` tyepclass (#1552) ## 0.3.1 (2025-05-26) Changes to `hax-lib`: - Bug fix with PartialOrd in f* lib: [#1473](https://github.com/cryspen/hax/pull/1473) - Move `proof-libs` into `hax-lib` to allow dependencies using crates.io ## 0.3.0 (2025-05-16) Changes to `hax-lib`: - Support for SMT patterns in lemmas: [#1428](https://github.com/cryspen/hax/pull/1428) - While loop invariants and termination (`loop_decreases`): [#1375](https://github.com/cryspen/hax/pull/1375) - Removal of deprecated dependencies: [#1385](https://github.com/cryspen/hax/pull/1385) and [#1394](https://github.com/cryspen/hax/pull/1394) - Support for mathematical integers and logical propositions has been strengthened: [#1372](https://github.com/cryspen/hax/pull/1372), [#1352](https://github.com/cryspen/hax/pull/1352), [#1351](https://github.com/cryspen/hax/pull/1351) - `hax_lib::BACKEND::replace_body`: [#1321](https://github.com/cryspen/hax/pull/1321) - `hax_lib::decreases`: [#1342](https://github.com/cryspen/hax/pull/1342) ## 0.2.0 (2024-01-20) - Initial release ================================================ FILE: CI.md ================================================ # Continuous Integration (CI) ## Github Actions - [`add_to_project`](./.github/workflows/add_to_project.yml): each time an issue or a PR is open, this action adds it to the project [https://github.com/orgs/hacspec/projects/1](https://github.com/orgs/hacspec/projects/1). - [`release`](./.github/workflows/release.yml): whenever a tagged commit is pushed, this action builds the Linux binary, MacOS binary and JS of `hax-engine`, and uploads them to a new GitHub release. - [`format`](./.github/workflows/format.yml): ensure formatting for Rust and OCaml files. - [`specs`](./.github/workflows/specs.yml): compiles the toolchain (using Nix) and runs it on (for now) a selection of the examples provided by [hacspec/specs](https://github.com/hacspec/specs). For now this only tests the extraction of the specifications to Coq and FStar, we do not run Coq or FStar on the extractions. - [`test_installs`](./.github/workflows/test_installs.yml): compiles the toolchain on two versions of Ubuntu and two versions of MacOS using `apt` or `homebrew` and the `setup.sh` script; - [`engine-js-build`](./.github/workflows/engine_js_build.yml): tests the build the JS version of the engine. ## Merge queue Additional actions are triggered on pull requests in the merge queue. They are found in [`test_installs`](./.github/workflows/test_installs.yml). ================================================ FILE: CODEOWNERS ================================================ * @cryspen/hax ================================================ FILE: CONTRIBUTING.md ================================================ # Engineering & Contributing Guidelines The following is a set of guidelines for contributing to this repository. These are mostly guidelines, not rules. Use your best judgement, and feel free to propose changes to this document in a pull request. The processes described here is not to pester you but to increase and maintain code quality. ## Working with this repository We use issues to organise and prioritise work items. **Assignee meaning in issues:** The assignee is the person responsible for following up on the issue (making sure it eventually gets addressed). It is usually (but not necessarily) the one working on it. After picking up an issue, create a branch. There can be any number of branches and pull requests for one issue. But make sure that each issue is clearly linked to the pull request. There must be one pull request that closes the issue. If there are multiple PRs for an issue, make sure this is clear in the pull request. ## Pull Requests We use the GitHub-based PR workflow. When starting to work on an issue, create a branch and an according pull request that fixes the issue. The changeset in a pull request must not be larger than 1000 lines (with some exceptions for test snapshots or generated code). If an issue needs more work than that, split it into multiple pull requests. After submitting the pull request, verify that all [status checks](https://help.github.com/articles/about-status-checks/) are passing before asking for review. While the prerequisites above must be satisfied prior to having your pull request reviewed, the reviewer(s) may ask you to complete additional design work, tests, or other changes before your pull request can be ultimately accepted. ### PR & Commit Guidelines - Split out mass-changes or mechanical changes into a separate PR from the substantive changes. - Separate commits into conceptually-separate pieces for review purposes (even if you then later collapse them into a single changeset to merge), if technically possible. - Address all comments from previous reviews (either by fixing as requested, or explaining why you haven't) before requesting another review. - If your request only relates to part of the changes, say so clearly. ### Force pushing It is fine to force-push either (1) before asking for a review or (2) after PR approval, just before merging. Otherwise, in between two reviews, please do not force-push. ### Regressions When a PR introduces a regression, a fix should be submitted in a window of 2 days, otherwise the PR will be reverted. ## Rules for the OCaml code - Never use the OCaml standard library, always use [`base`](https://v3.ocaml.org/p/base/latest/doc/index.html), [`core`](https://v3.ocaml.org/p/core/latest/doc/index.html) or [`stdlib`](https://v3.ocaml.org/p/stdlib/latest/doc/index.html) instead. - Avoid non-total functions (e.g. all the `_exn` functions in `base`). - Try to avoid exceptions, if possible. - Never use `==`, which is the physical equality, and almost never what you want. ### Changelog Our changelog format is based on https://keepachangelog.com/. Please add an entry in a subsection (`Added`, `Changed`, `Deprecated`, `Removed`, `Fixed` -- see https://keepachangelog.com/en/1.0.0/#how) for each notable change. Please prefix with `engine:`, `frontend:` or similar. #### Should I add an entry to `CHANGELOG.md`? **Include in CHANGELOG.md:** - New features and enhancements - Bug fixes - Breaking changes - Security patches - Major documentation updates - Dependency updates that affect users **Do not include:** - Code refactoring with no user impact - Minor doc fixes (typos, grammar) - CI/CD or tooling changes with no external effect - Linting, formatting, or style-only commits - Reverts or fixup commits - Dependency bumps with no behavioral impact **Rule of thumb:** If a user (developer or customer) wouldn’t notice or need to know, leave it out. ## Styleguides ### Optional Title Prefixes for Issues To help quickly convey the focus of an issue, we sometimes add a short prefix in square brackets at the start of the title: `[prefix] Issue short title`. This is optional; you can just use it if the issue has a clear direction or goal. Keep it short and intuitive, think of it as a lightweight hint, not a strict taxonomy or replacements for labels or milestones. Use it when it helps. Leave it out when it doesn’t. ### Git Commit Messages - Use the present tense - Use the imperative mood - Limit the first line to 80 characters - Don't end the first line of the commit message with a period - Reference issues and pull requests liberally after the first line - If the patch is of nontrivial size, point to the important comments in the non-first lines of the commit message. ### Styleguide Use `rustfmt` for every Rust code and `ocamlformat` for every OCaml code. From the command line, run `cargo fmt` in the root of hax and `dune fmt` in `engine`. ### Documentation Styleguide Use [rustdoc](https://doc.rust-lang.org/rustdoc/index.html) comments on Rust files and functions. Use [`odoc`](https://ocaml.github.io/odoc/) comments on OCaml files. It is mandatory on public functions and encouraged on internal functions. ## Reviews As a reviewer always keep in mind the following principles - Reviewing code is more valuable than writing code as it results in higher overall project activity. If you find you can't write code any more due to prioritizing reviews over coding, let's talk. - You should respond to a review request within one working day of getting it, either with a review, a deadline by which you promise to do the review, or a polite refusal. If you think a patch is lower priority than your other work communicate that. ### Review Guidelines - Check that the issue is assigned and linked. - Commit title and message make sense and says what is being changed. - Check that the PR applies cleanly on the target branch. - Check new files for license and administrative issues. - Check out code changes - Run automated tests - Manually verify changes if possible - Code review - Does the change address the issue at hand? - Is the code well documented? - Do you understand the code changes? - If not, add a comment. The PR can't be accepted in this stage. - Is the public API changed? - Are the changes well documented for consumers? - Do the changes break backwards compatibility? - Is the new API sensible/needed? - Is the code maintainable after these changes? - Are there any security issues with these changes? - Are all code changes tested? - Do the changes effect performance? - Look at the interdiff for second and subsequent reviews. - Ask if more information is needed to understand and judge the changes. ## AI guidelines Using AI tools to generate code for Hax is accepted under the following conditions: - The PR should clearly state that AI has been used and say for which parts of the code, tests, or documentation. - The author should also explain the methodology: how AI has been used and how the result has been tested. - Any AI generated content should be carefully reviewed by the author of the PR (before the reviewer). ================================================ FILE: Cargo.toml ================================================ [workspace] members = [ "frontend/exporter", "frontend/exporter/options", "cli/subcommands", "cli/driver", "test-harness", "hax-lib", "hax-lib/macros", "hax-lib/macros/types", "hax-lib-protocol", "hax-lib-protocol-macros", "hax-bounded-integers", "engine/names", "engine/names/extract", "hax-types", "rust-engine", "rust-engine/macros", ] exclude = ["tests", "rustc-coverage-tests", "rust-engine/tests", "hax-lib/core-models"] default-members = [ "frontend/exporter", "frontend/exporter/options", "cli/subcommands", "cli/driver", "test-harness", "hax-lib", "hax-lib/macros", "hax-lib/macros/types", "hax-lib-protocol", "hax-lib-protocol-macros", "engine/names", ] resolver = "2" [workspace.package] version = "0.3.6" authors = ["hax Authors"] license = "Apache-2.0" homepage = "https://github.com/hacspec/hax" edition = "2024" repository = "https://github.com/hacspec/hax" readme = "README.md" [workspace.dependencies] itertools = "0.11.0" schemars = "0.8" which = "4.4" serde = { version = "1.0", features = ["derive", "rc"] } serde_json = "1.0" clap = { version = "4.0", features = ["derive"] } syn = { version = "1.0.107", features = [ "derive", "printing", "extra-traits", "parsing", "full", ] } tracing = { version = "0.1", features = [ "max_level_trace", "release_max_level_trace", ] } tracing-subscriber = { version = "0.3", features = [ "env-filter", "std", "fmt", ] } tracing-tree = "^0.2" quote = "1.0.32" proc-macro2 = "1.0.66" cargo_metadata = "0.15" colored = "2" annotate-snippets = "0.11" # Crates in this repository hax-frontend-exporter = { path = "frontend/exporter", version = "=0.3.6", default-features = false } hax-adt-into = { path = "frontend/exporter/adt-into", version = "=0.3.6" } hax-frontend-exporter-options = { path = "frontend/exporter/options", version = "=0.3.6" } hax-lib-macros = { path = "hax-lib/macros", version = "=0.3.6" } hax-lib-macros-types = { path = "hax-lib/macros/types", version = "=0.3.6" } hax-lib = { path = "hax-lib", version = "=0.3.6" } hax-engine-names = { path = "engine/names", version = "=0.3.6" } hax-types = { path = "hax-types", version = "=0.3.6" } hax-rust-engine = { path = "rust-engine", version = "=0.3.6" } hax-rust-engine-macros = { path = "rust-engine/macros", version = "=0.3.6" } [workspace.metadata.release] owners = ["github:cryspen:tools"] ================================================ FILE: LICENSE ================================================ Apache License Version 2.0, January 2004 http://www.apache.org/licenses/ TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION 1. Definitions. "License" shall mean the terms and conditions for use, reproduction, and distribution as defined by Sections 1 through 9 of this document. "Licensor" shall mean the copyright owner or entity authorized by the copyright owner that is granting the License. "Legal Entity" shall mean the union of the acting entity and all other entities that control, are controlled by, or are under common control with that entity. For the purposes of this definition, "control" means (i) the power, direct or indirect, to cause the direction or management of such entity, whether by contract or otherwise, or (ii) ownership of fifty percent (50%) or more of the outstanding shares, or (iii) beneficial ownership of such entity. "You" (or "Your") shall mean an individual or Legal Entity exercising permissions granted by this License. "Source" form shall mean the preferred form for making modifications, including but not limited to software source code, documentation source, and configuration files. "Object" form shall mean any form resulting from mechanical transformation or translation of a Source form, including but not limited to compiled object code, generated documentation, and conversions to other media types. "Work" shall mean the work of authorship, whether in Source or Object form, made available under the License, as indicated by a copyright notice that is included in or attached to the work (an example is provided in the Appendix below). "Derivative Works" shall mean any work, whether in Source or Object form, that is based on (or derived from) the Work and for which the editorial revisions, annotations, elaborations, or other modifications represent, as a whole, an original work of authorship. For the purposes of this License, Derivative Works shall not include works that remain separable from, or merely link (or bind by name) to the interfaces of, the Work and Derivative Works thereof. "Contribution" shall mean any work of authorship, including the original version of the Work and any modifications or additions to that Work or Derivative Works thereof, that is intentionally submitted to Licensor for inclusion in the Work by the copyright owner or by an individual or Legal Entity authorized to submit on behalf of the copyright owner. For the purposes of this definition, "submitted" means any form of electronic, verbal, or written communication sent to the Licensor or its representatives, including but not limited to communication on electronic mailing lists, source code control systems, and issue tracking systems that are managed by, or on behalf of, the Licensor for the purpose of discussing and improving the Work, but excluding communication that is conspicuously marked or otherwise designated in writing by the copyright owner as "Not a Contribution." "Contributor" shall mean Licensor and any individual or Legal Entity on behalf of whom a Contribution has been received by Licensor and subsequently incorporated within the Work. 2. Grant of Copyright License. Subject to the terms and conditions of this License, each Contributor hereby grants to You a perpetual, worldwide, non-exclusive, no-charge, royalty-free, irrevocable copyright license to reproduce, prepare Derivative Works of, publicly display, publicly perform, sublicense, and distribute the Work and such Derivative Works in Source or Object form. 3. Grant of Patent License. Subject to the terms and conditions of this License, each Contributor hereby grants to You a perpetual, worldwide, non-exclusive, no-charge, royalty-free, irrevocable (except as stated in this section) patent license to make, have made, use, offer to sell, sell, import, and otherwise transfer the Work, where such license applies only to those patent claims licensable by such Contributor that are necessarily infringed by their Contribution(s) alone or by combination of their Contribution(s) with the Work to which such Contribution(s) was submitted. If You institute patent litigation against any entity (including a cross-claim or counterclaim in a lawsuit) alleging that the Work or a Contribution incorporated within the Work constitutes direct or contributory patent infringement, then any patent licenses granted to You under this License for that Work shall terminate as of the date such litigation is filed. 4. Redistribution. You may reproduce and distribute copies of the Work or Derivative Works thereof in any medium, with or without modifications, and in Source or Object form, provided that You meet the following conditions: (a) You must give any other recipients of the Work or Derivative Works a copy of this License; and (b) You must cause any modified files to carry prominent notices stating that You changed the files; and (c) You must retain, in the Source form of any Derivative Works that You distribute, all copyright, patent, trademark, and attribution notices from the Source form of the Work, excluding those notices that do not pertain to any part of the Derivative Works; and (d) If the Work includes a "NOTICE" text file as part of its distribution, then any Derivative Works that You distribute must include a readable copy of the attribution notices contained within such NOTICE file, excluding those notices that do not pertain to any part of the Derivative Works, in at least one of the following places: within a NOTICE text file distributed as part of the Derivative Works; within the Source form or documentation, if provided along with the Derivative Works; or, within a display generated by the Derivative Works, if and wherever such third-party notices normally appear. The contents of the NOTICE file are for informational purposes only and do not modify the License. You may add Your own attribution notices within Derivative Works that You distribute, alongside or as an addendum to the NOTICE text from the Work, provided that such additional attribution notices cannot be construed as modifying the License. You may add Your own copyright statement to Your modifications and may provide additional or different license terms and conditions for use, reproduction, or distribution of Your modifications, or for any such Derivative Works as a whole, provided Your use, reproduction, and distribution of the Work otherwise complies with the conditions stated in this License. 5. Submission of Contributions. Unless You explicitly state otherwise, any Contribution intentionally submitted for inclusion in the Work by You to the Licensor shall be under the terms and conditions of this License, without any additional terms or conditions. Notwithstanding the above, nothing herein shall supersede or modify the terms of any separate license agreement you may have executed with Licensor regarding such Contributions. 6. Trademarks. This License does not grant permission to use the trade names, trademarks, service marks, or product names of the Licensor, except as required for reasonable and customary use in describing the origin of the Work and reproducing the content of the NOTICE file. 7. Disclaimer of Warranty. Unless required by applicable law or agreed to in writing, Licensor provides the Work (and each Contributor provides its Contributions) on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied, including, without limitation, any warranties or conditions of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A PARTICULAR PURPOSE. You are solely responsible for determining the appropriateness of using or redistributing the Work and assume any risks associated with Your exercise of permissions under this License. 8. Limitation of Liability. In no event and under no legal theory, whether in tort (including negligence), contract, or otherwise, unless required by applicable law (such as deliberate and grossly negligent acts) or agreed to in writing, shall any Contributor be liable to You for damages, including any direct, indirect, special, incidental, or consequential damages of any character arising as a result of this License or out of the use or inability to use the Work (including but not limited to damages for loss of goodwill, work stoppage, computer failure or malfunction, or any and all other commercial damages or losses), even if such Contributor has been advised of the possibility of such damages. 9. Accepting Warranty or Additional Liability. While redistributing the Work or Derivative Works thereof, You may choose to offer, and charge a fee for, acceptance of support, warranty, indemnity, or other liability obligations and/or rights consistent with this License. However, in accepting such obligations, You may act only on Your own behalf and on Your sole responsibility, not on behalf of any other Contributor, and only if You agree to indemnify, defend, and hold each Contributor harmless for any liability incurred by, or claims asserted against, such Contributor by reason of your accepting any such warranty or additional liability. END OF TERMS AND CONDITIONS Copyright 2023 Hax Authors Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. ================================================ FILE: PUBLISHING.md ================================================ # Publishing ## OCaml There is only the package `hax-engine`, that includes a binary and a number of libraries. We have no particular release procedure for the engine: we don't plan on publishing it to opam. ## Rust This repository is divided into several crates, some to be published, some not. All crates should start with the `hax-` prefix, but `cargo-hax` which is the entrypoint to the cargo `hax` subcommand. Here is the list of the crates in this repository (excluding `tests` and `examples`): - `hax-test-harness` **(doesn't need to be published)** ### cargo-hax 1. `hax-frontend-exporter-options` (`frontend/exporter/options `) 2. `hax-adt-into` (`frontend/exporter/adt-into`) 3. `hax-frontend-exporter` (`frontend/exporter`) 4. `hax-types` (`hax-types`) 5. `hax-subcommands` (binaries) (`cli/subcommands`) - `cargo-hax` - `hax-export-json-schemas` - `hax-pretty-print-diagnostics` - `hax-driver` ### hax-lib We publish the following crates that are helper libraries to be used for hax code: 1. `hax-lib-macros-types` 2. `hax-lib-macros` 3. `hax-lib` ### Supporting crates for the engine The crate listed below are used only by the OCaml build of the engine. Those should not be published on `crate.io`. 1. `cargo-hax-engine-names` 2. `cargo-hax-engine-names-extract` ## Procedure 1. Move the contents of `CHANGELOG.md` under the `[Unreleased]` section to a new section named following the target version. Commit this change. 2. Bump the version number with `cargo release LEVEL --workspace --no-publish --no-tag --execute` (`cargo release --help` for more details on `LEVEL`, `cargo install cargo-release` if you don't already have this package). This will bump the version of every Rust crate, but also the version in `engine/dune-project`. This will also regenerate `engine/hax-engine.opam`. Note this will *not* publish the crate. 3. PR the change 4. when the PR is merged in main, checkout `main` and run `cargo release --workspace --execute` Note: for now, we are not publishing to Opam. Instead, let's just advertise the following for installation: ```bash opam pin hax-engine https://github.com/hacspec/hax.git#the-release-tag opam install hax-engine ``` ## Notes `cargo release` reads the `Cargo.toml` of each crates of the workspace. Some creates are excluded from releasing: in their `Cargo.toml` manifest, they have `package.metadata.release.release` set to `false`. Also, `cli/subcommands/Cargo.toml` specifies pre-release replacements for the engine: the version of the engine is bumped automatically by `cargo release`. ================================================ FILE: README.md ================================================

Zulip Playground Website Blog License: Apache-2.0

# Hax hax is a tool for high assurance translations of a large subset of Rust into formal languages such as [F\*](https://www.fstar-lang.org/) or [Rocq](https://rocq-prover.org/).

Try out hax online now!

### Supported Backends
General purpose proof assistants Cryptography & protocols
F* Rocq Lean SSProve ProVerif
🟢 stable 🟡 partial 🚀 active dev. 🟡 partial 🟠 PoC
## Learn more Here are some resources for learning more about hax: - [Manual](https://hax.cryspen.com/manual/index.html) (work in progress) + Quick start: [F*](https://hax.cryspen.com/manual/fstar/quick_start/), [Lean](https://hax.cryspen.com/manual/lean/quick_start/) + Tutorial: [F*](https://hax.cryspen.com/manual/fstar/tutorial/), [Lean](https://hax.cryspen.com/manual/lean/tutorial/) - [Examples](./examples/): the [examples directory](./examples/) contains a set of examples that show what hax can do for you. - Other [specifications](https://github.com/hacspec/specs) of cryptographic protocols. Questions? Join us on [Zulip](https://hacspec.zulipchat.com/) or open a [GitHub Discussion](https://github.com/cryspen/hax/discussions). For bugs, file an [Issue](https://github.com/cryspen/hax/issues). ## Usage Hax is a cargo subcommand. The command `cargo hax` accepts the following subcommands: * **`into`** (`cargo hax into BACKEND`): translate a Rust crate to the backend `BACKEND` (e.g. `fstar`, `coq`, `lean`). * **`json`** (`cargo hax json`): extract the typed AST of your crate as a JSON file. Note: * `BACKEND` can be `fstar`, `lean`, `coq`, `easycrypt` or `pro-verif`. `cargo hax into --help` gives the full list of supported backends. * The subcommands `cargo hax`, `cargo hax into` and `cargo hax into ` takes options. For instance, you can `cargo hax into fstar --z3rlimit 100`. Use `--help` on those subcommands to list all options. ## Installation
Manual installation 1. Make sure to have the following installed on your system: - [`opam`](https://opam.ocaml.org/) (`opam switch create 5.1.1`) - [`rustup`](https://rustup.rs/) - [`nodejs`](https://nodejs.org/) - [`jq`](https://jqlang.github.io/jq/) 2. Clone this repo: `git clone git@github.com:cryspen/hax.git && cd hax` 3. Run the [setup.sh](./setup.sh) script: `./setup.sh`. 4. Run `cargo-hax --help`
Nix This should work on [Linux](https://nixos.org/download.html#nix-install-linux), [MacOS](https://nixos.org/download.html#nix-install-macos) and [Windows](https://nixos.org/download.html#nix-install-windows).
Prerequisites: Nix package manager (with flakes enabled) - Either using the [Determinate Nix Installer](https://github.com/DeterminateSystems/nix-installer), with the following bash one-liner: ```bash curl --proto '=https' --tlsv1.2 -sSf -L https://install.determinate.systems/nix | sh -s -- install ``` - or following [those steps](https://github.com/mschwaig/howto-install-nix-with-flake-support).
+ **Run hax on a crate directly** to get F\*/Coq/Lean/... (assuming you are in the crate's folder): - `nix run github:hacspec/hax -- into fstar` extracts F*. + **Install hax**: `nix profile install github:hacspec/hax`, then run `cargo hax --help` anywhere + **Note**: in any of the Nix commands above, replace `github:hacspec/hax` by `./dir` to compile a local checkout of hax that lives in `./some-dir` + **Setup binary cache**: [using Cachix](https://app.cachix.org/cache/hax), just `cachix use hax`
Using Docker 1. Clone this repo: `git clone git@github.com:hacspec/hax.git && cd hax` 3. Build the docker image: `docker build -f .docker/Dockerfile . -t hax` 4. Get a shell: `docker run -it --rm -v /some/dir/with/a/crate:/work hax bash` 5. You can now run `cargo-hax --help` (notice here we use `cargo-hax` instead of `cargo hax`) Note: Please make sure that `$HOME/.cargo/bin` is in your `$PATH`, as that is where `setup.sh` will install hax.
## Supported Subset of the Rust Language Hax intends to support full Rust, with the one exception, promoting a functional style: mutable references (aka `&mut T`) on return types or when aliasing (see https://github.com/hacspec/hax/issues/420) are forbidden. Each unsupported Rust feature is documented as an issue labeled [`unsupported-rust`](https://github.com/hacspec/hax/issues?q=is%3Aissue+is%3Aopen+label%3Aunsupported-rust). When the issue is labeled [`wontfix-v1`](https://github.com/hacspec/hax/issues?q=is%3Aissue+is%3Aopen+label%3Aunsupported-rust+label%3Awontfix%2Cwontfix-v1), that means we don't plan on supporting that feature soon. Quicklinks: - [🔨 Rejected rust we want to support](https://github.com/hacspec/hax/issues?q=is%3Aissue+is%3Aopen+label%3Aunsupported-rust+-label%3Awontfix%2Cwontfix-v1); - [💭 Rejected rust we don't plan to support in v1](https://github.com/hacspec/hax/issues?q=is%3Aissue+is%3Aopen+label%3Aunsupported-rust+label%3Awontfix%2Cwontfix-v1). ## Hacking on Hax The documentation of the internal crate of hax and its engine can be found [here for the engine](https://hax.cryspen.com/engine/index.html) and [here for the frontend](https://hax.cryspen.com/frontend/index.html). ### Edit the sources (Nix) Just clone & `cd` into the repo, then run `nix develop .`. You can also just use [direnv](https://github.com/nix-community/nix-direnv), with [editor integration](https://github.com/direnv/direnv/wiki#editor-integration). ### Structure of this repository - `rust-frontend/`: Rust library that hooks in the rust compiler and extract its internal typed abstract syntax tree [**THIR**](https://rustc-dev-guide.rust-lang.org/thir.html) as JSON. - `engine/`: the simplification and elaboration engine that translates programs from the Rust language to various backends (see `engine/backends/`). Written in OCaml. - `rust-engine/`: an on-going rewrite of our engine from OCaml to Rust. - `cli/`: the `hax` subcommand for Cargo. ### Compiling, formatting, and more We use the [`just` command runner](https://just.systems/). If you use Nix, the dev shell provides it automatically, if you don't use Nix, please [install `just`](https://just.systems/man/en/packages.html) on your system. Anywhere within the repository, you can build and install in PATH (1) the Rust parts with `just rust`, (2) the OCaml parts with `just ocaml` or (3) both with `just build`. More commands (e.g. `just fmt` to format) are available, please run `just` or `just --list` to get all the commands. ## Publications & Other material * [📕 Tech report](https://hal.inria.fr/hal-03176482) * [📕 HACSpec: A gateway to high-assurance cryptography](https://github.com/hacspec/hacspec/blob/master/rwc2023-abstract.pdf) * [📕 Original hacspec paper](https://www.franziskuskiefer.de/publications/hacspec-ssr18-paper.pdf) ### Secondary literature, using hacspec: * [📕 Last yard](https://eprint.iacr.org/2023/185) * [📕 A Verified Pipeline from a Specification Language to Optimized, Safe Rust](https://github.com/hacspec/hacspec.github.io/blob/master/coqpl22-final61.pdf) at [CoqPL'22](https://popl22.sigplan.org/details/CoqPL-2022-papers/5/A-Verified-Pipeline-from-a-Specification-Language-to-Optimized-Safe-Rust) * [📕 Hax - Enabling High Assurance Cryptographic Software](https://github.com/hacspec/hacspec.github.io/blob/master/RustVerify24.pdf) at [RustVerify24](https://sites.google.com/view/rustverify2024) * [📕 A formal security analysis of Blockchain voting](https://github.com/hacspec/hacspec.github.io/blob/master/coqpl24-paper8-2.pdf) at [CoqPL'24](https://popl24.sigplan.org/details/CoqPL-2024-papers/8/A-formal-security-analysis-of-Blockchain-voting) * [📕 Specifying Smart Contract with Hax and ConCert](https://github.com/hacspec/hacspec.github.io/blob/master/coqpl24-paper9-13.pdf) at [CoqPL'24](https://popl24.sigplan.org/details/CoqPL-2024-papers/9/Specifying-Smart-Contract-with-Hax-and-ConCert) ## Contributing Before starting any work please join the [Zulip chat][chat-link], start a [discussion on Github](https://github.com/hacspec/hax/discussions), or file an [issue](https://github.com/hacspec/hax/issues) to discuss your contribution. [chat-link]: https://hacspec.zulipchat.com ## Acknowledgements [Zulip] graciously provides the hacspec & hax community with a "Zulip Cloud Standard" tier. [Zulip]: https://zulip.com/ ================================================ FILE: cli/default.nix ================================================ { craneLib, stdenv, makeWrapper, lib, rustc, rustc-docs, gcc, hax-engine , doCheck ? true, libz, libiconv }: let pname = "hax"; is-webapp-static-asset = path: builtins.match ".*(script[.]js|index[.]html)" path != null; buildInputs = lib.optionals stdenv.isDarwin [ libiconv libz.dev ]; binaries = [ hax hax-engine.bin rustc gcc hax_rust_engine ] ++ buildInputs; commonArgs = { version = "0.0.1"; src = lib.cleanSourceWith { src = craneLib.path ./..; filter = path: type: (builtins.isNull (builtins.match ".*/(tests|examples|docs|proof-libs)/.*" path) && builtins.isNull (builtins.match ".*[.](md|svg)" path) && (craneLib.filterCargoSources path type || is-webapp-static-asset path)) || !(builtins.isNull (builtins.match ".*/renamings" path)); }; inherit buildInputs doCheck; doNotRemoveReferencesToRustToolchain = true; } // (if doCheck then { # [cargo test] builds independent workspaces. Each time another # workspace is added, it's corresponding lockfile should be added # in the [cargoLockList] list below. cargoVendorDir = craneLib.vendorMultipleCargoDeps { cargoLockList = [ ../Cargo.lock ../tests/Cargo.lock ]; }; } else { }); # hax dependencies (without hax itself) cargoArtifacts = craneLib.buildDepsOnly (commonArgs // { pname = pname; }); # hax with cargo artifact for incremental compilation hax_with_artifacts = craneLib.buildPackage (commonArgs // { inherit cargoArtifacts pname; doInstallCargoArtifacts = true; }); # hax without cargo artifacts: only binaries hax = stdenv.mkDerivation { name = hax_with_artifacts.name; unpackPhase = "true"; buildPhase = "true"; installPhase = '' mkdir -p $out cp -r ${hax_with_artifacts}/bin $out/bin ''; }; hax_rust_engine = craneLib.buildPackage (commonArgs // { inherit cargoArtifacts; buildInputs = buildInputs ++ [ makeWrapper ]; pname = "hax-rust-engine"; cargoExtraArgs = "--manifest-path rust-engine/Cargo.toml --locked"; }); docs = craneLib.cargoDoc (commonArgs // { # preBuildPhases = [ "addRustcDocs" ]; cargoDocExtraArgs = "--document-private-items"; # addRustcDocs = '' # mkdir -p target/doc # cp --no-preserve=mode -rf ${rustc-docs}/share/doc/rust/html/rustc/* target/doc/ # ''; inherit cargoArtifacts pname; }); tests = craneLib.buildPackage (commonArgs // { inherit cargoArtifacts; pname = "hax-tests"; doCheck = true; CI = "true"; cargoBuildCommand = "true"; checkPhaseCargoCommand = '' SNAPS_DIR=test-harness/src/snapshots && rmdir "$SNAPS_DIR" TESTS_DIR=tests && rmdir "$TESTS_DIR" ln -s ${../test-harness/src/snapshots} "$SNAPS_DIR" cp -r --no-preserve=mode ${../tests} "$TESTS_DIR" cargo test --test toolchain --profile release ''; buildInputs = binaries; CARGO_TESTS_ASSUME_BUILT = "yes"; }); in stdenv.mkDerivation { name = hax.name; buildInputs = [ makeWrapper ]; phases = [ "installPhase" ]; installPhase = '' mkdir -p $out/bin makeWrapper ${hax}/bin/cargo-hax $out/bin/cargo-hax \ --prefix PATH : ${lib.makeBinPath binaries} \ ${ lib.optionalString stdenv.isDarwin '' --prefix RUSTFLAGS : "-C link-arg=-L${libiconv}/lib" \ --suffix DYLD_LIBRARY_PATH : ${lib.makeLibraryPath [ libz rustc ]} '' } ''; meta.mainProgram = "cargo-hax"; passthru = { unwrapped = hax; hax-engine-names-extract = craneLib.buildPackage (commonArgs // { pname = "hax_engine_names_extract"; cargoLock = ../Cargo.lock; cargoToml = ../engine/names/extract/Cargo.toml; cargoArtifacts = hax_with_artifacts; nativeBuildInputs = [ hax_with_artifacts ]; postUnpack = '' cd $sourceRoot/engine/names/extract sourceRoot="." ''; }); inherit docs tests; }; } ================================================ FILE: cli/driver/Cargo.toml ================================================ [package] name = "hax-driver" version.workspace = true authors.workspace = true license.workspace = true homepage.workspace = true edition.workspace = true repository.workspace = true readme.workspace = true description = "The custom rustc driver used by hax." [package.metadata.rust-analyzer] rustc_private = true [[bin]] path = "src/driver.rs" name = "driver-hax-frontend-exporter" [dependencies] serde.workspace = true serde_json.workspace = true clap.workspace = true colored.workspace = true hax-frontend-exporter = {workspace = true, features = ["rustc"]} hax-types = {workspace = true, features = ["rustc"]} hax-frontend-exporter-options.workspace = true hax-lib-macros-types.workspace = true itertools.workspace = true tracing.workspace = true tracing-subscriber.workspace = true tracing-tree.workspace = true ================================================ FILE: cli/driver/src/callbacks_wrapper.rs ================================================ use hax_types::cli_options::{ENV_VAR_OPTIONS_FRONTEND, ExporterOptions}; use rustc_ast::Crate; use rustc_driver::{Callbacks, Compilation}; use rustc_interface::interface; use rustc_middle::ty::TyCtxt; use rustc_span::symbol::Symbol; /// Wraps a [Callbacks] structure, and injects some cache-related /// configuration in the `config` phase of rustc pub struct CallbacksWrapper<'a> { pub sub: &'a mut (dyn Callbacks + Send + 'a), pub options: ExporterOptions, } impl<'a> Callbacks for CallbacksWrapper<'a> { fn config(&mut self, config: &mut interface::Config) { let options = self.options.clone(); config.psess_created = Some(Box::new(move |parse_sess| { // Silence the "unexpected cfg" lints. parse_sess.check_config.exhaustive_names = false; let depinfo = parse_sess.env_depinfo.get_mut(); depinfo.insert(( Symbol::intern(ENV_VAR_OPTIONS_FRONTEND), Some(Symbol::intern(&serde_json::to_string(&options).unwrap())), )); depinfo.insert(( Symbol::intern("HAX_CARGO_CACHE_KEY"), std::env::var("HAX_CARGO_CACHE_KEY") .ok() .as_deref() .map(Symbol::intern), )); })); self.sub.config(config) } fn after_crate_root_parsing<'tcx>( &mut self, compiler: &interface::Compiler, krate: &mut Crate, ) -> Compilation { self.sub.after_crate_root_parsing(compiler, krate) } fn after_expansion<'tcx>( &mut self, compiler: &interface::Compiler, tcx: TyCtxt<'tcx>, ) -> Compilation { self.sub.after_expansion(compiler, tcx) } fn after_analysis<'tcx>( &mut self, compiler: &interface::Compiler, tcx: TyCtxt<'tcx>, ) -> Compilation { self.sub.after_analysis(compiler, tcx) } } ================================================ FILE: cli/driver/src/driver.rs ================================================ #![feature(rustc_private)] #![feature(box_patterns)] #![feature(trait_alias)] #![allow(unused_imports)] #![allow(unused_variables)] #![allow(unreachable_code)] #![allow(dead_code)] #![feature(macro_metavar_expr)] #![feature(internal_output_capture)] extern crate rustc_ast; extern crate rustc_borrowck; extern crate rustc_data_structures; extern crate rustc_driver; extern crate rustc_errors; extern crate rustc_feature; extern crate rustc_hashes; extern crate rustc_hir; extern crate rustc_hir_analysis; extern crate rustc_hir_id; extern crate rustc_index; extern crate rustc_interface; extern crate rustc_middle; extern crate rustc_mir_build; extern crate rustc_session; extern crate rustc_span; extern crate rustc_target; extern crate rustc_type_ir; mod exporter; use std::collections::HashSet; use exporter::ExtractionCallbacks; mod callbacks_wrapper; mod features; use callbacks_wrapper::*; use features::*; use hax_types::cli_options::{ENV_VAR_OPTIONS_FRONTEND, ExporterOptions}; use rustc_driver::{Callbacks, Compilation}; use rustc_interface::interface; use rustc_span::symbol::Symbol; fn rustc_sysroot() -> String { std::process::Command::new("rustc") .args(["--print", "sysroot"]) .output() .ok() .and_then(|out| String::from_utf8(out.stdout).ok()) .map(|s| s.trim().to_string()) .unwrap() } fn setup_logging() { use tracing_subscriber::prelude::*; let enable_colors = { /* Respect [never] in [RUST_LOG_STYLE] */ !std::env::var("RUST_LOG_STYLE").is_ok_and(|style| style == "never") }; let subscriber = tracing_subscriber::Registry::default() .with(tracing_subscriber::EnvFilter::from_default_env()) .with( tracing_tree::HierarchicalLayer::new(2) .with_ansi(enable_colors) .with_indent_lines(true), ); tracing::subscriber::set_global_default(subscriber).unwrap(); } const HAX_VANILLA_RUSTC: &str = "HAX_VANILLA_RUSTC"; fn main() { setup_logging(); let options: ExporterOptions = serde_json::from_str( &std::env::var(ENV_VAR_OPTIONS_FRONTEND).unwrap_or_else(|_| { panic!( "Cannot find environnement variable {}", ENV_VAR_OPTIONS_FRONTEND ) }), ) .unwrap_or_else(|_| { panic!( "Invalid value for the environnement variable {}", ENV_VAR_OPTIONS_FRONTEND ) }); let mut rustc_args: Vec = std::env::args().skip(1).collect(); // add [--sysroot] if not present if !rustc_args.iter().any(|arg| arg.starts_with("--sysroot")) { rustc_args.extend(vec!["--sysroot".into(), rustc_sysroot()]) }; // When `HAX_FEATURES_DETECTION_MODE` is set, we just detect // features for the current crate, output them in JSON on stderr // and exit immediately if std::env::var("HAX_FEATURES_DETECTION_MODE").is_ok() { use std::io::BufWriter; return serde_json::to_writer( BufWriter::new(std::io::stderr()), &Features::detect(&options, &rustc_args), ) .unwrap(); } let (vanilla_rustc, vanilla_rustc_never) = { let vanilla_rustc = std::env::var(HAX_VANILLA_RUSTC); let vanilla_rustc_never = vanilla_rustc == Ok("never".into()); ( !vanilla_rustc_never && vanilla_rustc.is_ok(), vanilla_rustc_never, ) }; // fetch the correct callback structure given the command, and // coerce options let is_primary_package = std::env::var("CARGO_PRIMARY_PACKAGE").is_ok(); let is_build_script = std::env::var("CARGO_CRATE_NAME") == Ok("build_script_build".to_string()); // FIXME: is there a more robust way to do this? let translate_package = !vanilla_rustc && !is_build_script && (options.deps || is_primary_package); let mut callbacks: Box = if translate_package { Box::new(exporter::ExtractionCallbacks { body_kinds: options.body_kinds.clone(), experimental_full_def: options.experimental_full_def, }) } else { struct CallbacksNoop; impl Callbacks for CallbacksNoop {} Box::new(CallbacksNoop) }; if translate_package { // We want to enable certain features, but only if the crate // itself doesn't enable those let features = Features { adt_const_params: false, // not useful for now generic_const_exprs: false, // not useful for now register_tool: true, registered_tools: HashSet::from_iter(vec![hax_lib_macros_types::HAX_TOOL.into()]), auto_traits: true, negative_impls: true, } - Features::detect_forking(); rustc_args = [rustc_args[0].clone()] .into_iter() .chain([ "--cfg".into(), hax_lib_macros_types::HAX_CFG_OPTION_NAME.into(), ]) .chain(match &options.backend { Some(backend) => vec!["--cfg".into(), format!("hax_backend_{backend}")], None => vec![], }) .chain(features.into_iter().map(|s| format!("-Zcrate-attr={}", s))) .chain(rustc_args[1..].iter().cloned()) .collect(); }; let mut callbacks = CallbacksWrapper { sub: &mut *callbacks, options: { let mut options = options.clone(); options.force_cargo_build = if translate_package { options.force_cargo_build } else { hax_types::cli_options::ForceCargoBuild::default() }; options }, }; let exit_code = rustc_driver::catch_with_exit_code({ let rustc_args = rustc_args.clone(); move || rustc_driver::run_compiler(&rustc_args, &mut callbacks) }); std::process::exit( if !vanilla_rustc_never && translate_package && exit_code == 0 { // When the hax translation is successful, we need to re-run // rustc. Indeed, hax translation doesn't actually build a // package: no `rlib` will be written on disk. self::vanilla_rustc() } else { exit_code }, ) } /// Re-run rustc without doing any hax translation. This ensures a /// `rlib` is produced (when the crate compiles correctly). fn vanilla_rustc() -> i32 { use std::process::{Command, Stdio}; let output = Command::new(std::env::args().next().unwrap()) .args(std::env::args().skip(1)) .env(HAX_VANILLA_RUSTC, "1") .stdout(Stdio::piped()) .stderr(Stdio::piped()) .spawn() .unwrap() .wait_with_output() .unwrap(); if output.status.success() { 0 } else { let stdout = &std::str::from_utf8(&output.stdout).unwrap(); let stderr = &std::str::from_utf8(&output.stderr).unwrap(); println!("{stdout}"); eprintln!("{stderr}"); output.status.code().unwrap_or(1) } } ================================================ FILE: cli/driver/src/exporter.rs ================================================ use hax_frontend_exporter::SInto; use hax_frontend_exporter::state::LocalContextS; use hax_types::cli_options::PathOrDash; use hax_types::driver_api::Items; use rustc_driver::{Callbacks, Compilation}; use rustc_interface::interface; use rustc_interface::interface::Compiler; use rustc_middle::middle::region::Scope; use rustc_middle::ty::TyCtxt; use rustc_middle::{ thir, thir::{Block, BlockId, Expr, ExprId, ExprKind, Pat, PatKind, Stmt, StmtId, StmtKind, Thir}, }; use rustc_span::symbol::Symbol; use serde::Serialize; use std::cell::RefCell; use std::collections::{HashMap, HashSet}; use std::rc::Rc; /// Browse a crate and translate every item #[tracing::instrument(skip_all)] fn export_crate<'tcx, Body: hax_frontend_exporter::IsBody>( options: &hax_frontend_exporter_options::Options, tcx: TyCtxt<'tcx>, experimental_full_def: bool, ) -> ( Vec, Vec, Vec<( hax_frontend_exporter::DefId, hax_frontend_exporter::ImplInfos, )>, Items, hax_frontend_exporter::id_table::Table, ) { use hax_frontend_exporter::WithGlobalCacheExt; let state = hax_frontend_exporter::state::State::new(tcx, options.clone()); let result = if experimental_full_def { let owners = tcx.hir_crate_items(()).owners(); Items::FullDef( owners .map(|owner_id| { owner_id .to_def_id() .sinto(&state) .full_def(&state) .as_ref() .clone() }) .collect(), ) } else { Items::Legacy( tcx.hir_free_items() .map(|id| tcx.hir_item(id).sinto(&state)) .collect(), ) }; let impl_infos = hax_frontend_exporter::impl_def_ids_to_impled_types_and_bounds(&state) .into_iter() .collect(); let exported_spans = state.with_global_cache(|cache| cache.spans.keys().copied().collect()); let exported_def_ids = state.with_global_cache(|cache| { cache .per_item .values() .filter_map(|per_item_cache| per_item_cache.def_id.clone()) .collect() }); let cache_map = state.with_global_cache(|cache| cache.id_table_session.table().clone()); ( exported_spans, exported_def_ids, impl_infos, result, cache_map, ) } /// Callback for extraction #[derive(Debug, Clone, Serialize)] pub(crate) struct ExtractionCallbacks { pub body_kinds: Vec, pub experimental_full_def: bool, } impl From for hax_frontend_exporter_options::Options { fn from(opts: ExtractionCallbacks) -> hax_frontend_exporter_options::Options { hax_frontend_exporter_options::Options { inline_anon_consts: true, bounds_options: hax_frontend_exporter_options::BoundsOptions { resolve_destruct: false, prune_sized: true, }, item_ref_use_concrete_impl: false, } } } impl Callbacks for ExtractionCallbacks { fn config(&mut self, config: &mut rustc_interface::interface::Config) { config.override_queries = Some(|_sess, providers| { hax_frontend_exporter::override_queries_store_body(providers); }); } fn after_expansion<'tcx>(&mut self, compiler: &Compiler, tcx: TyCtxt<'tcx>) -> Compilation { use std::ops::{Deref, DerefMut}; use hax_frontend_exporter::ThirBody; use hax_types::cli_options::Command; use rustc_session::config::CrateType; use serde::{Deserialize, Serialize}; use std::fs::File; use std::io::BufWriter; use std::path::PathBuf; let opts = &compiler.sess.opts; let externs: Vec<_> = opts .externs .iter() .flat_map(|(_, ext)| match &ext.location { rustc_session::config::ExternLocation::ExactPaths(set) => set .iter() .map(|cp| cp.canonicalized()) .collect::>() .into_iter(), _ => vec![].into_iter(), }) .map(|path| path.with_extension("haxmeta")) .collect(); let cg_metadata = opts.cg.metadata[0].clone(); let crate_name = opts.crate_name.clone().unwrap(); let output_dir = compiler.sess.io.output_dir.clone().unwrap(); let haxmeta_path = output_dir.join(format!("{crate_name}-{cg_metadata}.haxmeta",)); let mut file = BufWriter::new(File::create(&haxmeta_path).unwrap()); use hax_types::driver_api::{HaxMeta, with_kind_type}; with_kind_type!( self.body_kinds.clone(), || { let (spans, def_ids, impl_infos, items, cache_map) = export_crate(&self.clone().into(), tcx, self.experimental_full_def); let files: HashSet = match &items { Items::Legacy(items) => HashSet::from_iter(items .iter() .flat_map(|item| item.span.filename.to_path().map(|path| path.to_path_buf()))), Items::FullDef(items) => HashSet::from_iter(items .iter() .flat_map(|item| item.span.filename.to_path().map(|path| path.to_path_buf()))), } ; let haxmeta: HaxMeta = HaxMeta { crate_name, cg_metadata, externs, impl_infos, items, comments: files.into_iter() .flat_map(|path|hax_frontend_exporter::comments::comments_of_file(path).ok()) .flatten() .collect(), def_ids, hax_version: hax_types::HAX_VERSION.into(), }; haxmeta.write(&mut file, cache_map); } ); let manifest_dir = std::env::var("CARGO_MANIFEST_DIR").unwrap(); let manifest_dir = std::path::Path::new(&manifest_dir); let data = hax_types::driver_api::EmitHaxMetaMessage { manifest_dir: Some(manifest_dir.to_path_buf()), working_dir: Some( opts.working_dir .to_path(rustc_span::FileNameDisplayPreference::Local) .to_path_buf(), ), path: haxmeta_path, }; eprintln!( "{}{}", hax_types::driver_api::HAX_DRIVER_STDERR_PREFIX, &serde_json::to_string(&hax_types::driver_api::HaxDriverMessage::EmitHaxMeta(data)) .unwrap() ); Compilation::Stop } } ================================================ FILE: cli/driver/src/features.rs ================================================ use std::collections::HashSet; use rustc_driver::{Callbacks, Compilation}; use rustc_interface::interface; use rustc_middle::ty::TyCtxt; use rustc_span::symbol::Symbol; use crate::callbacks_wrapper::CallbacksWrapper; use serde::{Deserialize, Serialize}; /// A subset of `rustc_feature::Features` that is relevant to us #[derive(Debug, Clone, Serialize, Deserialize)] pub struct Features { pub adt_const_params: bool, pub generic_const_exprs: bool, pub register_tool: bool, pub auto_traits: bool, pub negative_impls: bool, pub registered_tools: HashSet, } impl From<&rustc_feature::Features> for Features { fn from(rfeatures: &rustc_feature::Features) -> Self { Features { adt_const_params: rfeatures.adt_const_params(), generic_const_exprs: rfeatures.generic_const_exprs(), register_tool: rfeatures.register_tool(), auto_traits: rfeatures.auto_traits(), negative_impls: rfeatures.negative_impls(), registered_tools: HashSet::new(), } } } impl core::ops::Sub for Features { type Output = Self; fn sub(self, rhs: Self) -> Self { fn sub(x: bool, y: bool) -> bool { x & !y } Features { adt_const_params: sub(self.adt_const_params, rhs.adt_const_params), generic_const_exprs: sub(self.generic_const_exprs, rhs.generic_const_exprs), register_tool: sub(self.register_tool, rhs.register_tool), auto_traits: sub(self.auto_traits, rhs.auto_traits), negative_impls: sub(self.negative_impls, rhs.negative_impls), registered_tools: self .registered_tools .difference(&rhs.registered_tools) .cloned() .collect(), } } } impl Default for Features { fn default() -> Self { (&rustc_feature::Features::default()).into() } } impl Features { pub fn into_iter(&self) -> impl Iterator { [ self.adt_const_params.then_some("adt_const_params"), self.generic_const_exprs.then_some("generic_const_exprs"), self.register_tool.then_some("register_tool"), ] .into_iter() .flatten() .map(|s| format!("feature({})", s)) .chain( self.registered_tools .clone() .into_iter() .map(|tool| format!("register_tool({})", tool)), ) } /// Runs Rustc with a driver that only collects which unstable /// Rustc features are enabled pub fn detect( options: &hax_types::cli_options::ExporterOptions, rustc_args: &Vec, ) -> Self { struct CollectFeatures { features: Features, } impl Callbacks for CollectFeatures { fn after_expansion<'tcx>( &mut self, compiler: &interface::Compiler, tcx: TyCtxt<'tcx>, ) -> Compilation { self.features = tcx.features().into(); self.features.registered_tools = tcx .registered_tools(()) .iter() .map(|x| x.name.to_ident_string()) .collect(); rustc_driver::Compilation::Stop } } let mut callbacks = CollectFeatures { features: Features::default(), }; let exit_code = rustc_driver::catch_with_exit_code(|| { rustc_driver::run_compiler( rustc_args, &mut CallbacksWrapper { sub: &mut callbacks, options: options.clone(), }, ) }); if exit_code != 0 { std::process::exit(exit_code); } callbacks.features.clone() } /// Just like `detect`, but wraps the call in a subprocess so that /// we can capture `stdout` and `stderr`: we don't want the use to /// see error message from Rustc twice, or Cargo to have to parse /// Rustc messages twice. pub fn detect_forking() -> Self { use std::process::{Command, Stdio}; let output = Command::new(std::env::args().next().unwrap()) .args(std::env::args().skip(1)) .env("HAX_FEATURES_DETECTION_MODE", "1") .stdout(Stdio::piped()) .stderr(Stdio::piped()) .spawn() .unwrap() .wait_with_output() .unwrap(); let stderr = &std::str::from_utf8(&output.stderr).unwrap(); serde_json::from_str(stderr).unwrap_or_else(|e| { eprintln!("{}", stderr); tracing::error!("rustc emitted an error, aborting hax custom driver."); std::process::exit(1); }) } } ================================================ FILE: cli/subcommands/Cargo.toml ================================================ [package] name = "cargo-hax" version.workspace = true authors.workspace = true license.workspace = true homepage.workspace = true edition.workspace = true repository.workspace = true readme.workspace = true build = "build.rs" description = "The high assurance translation toolchain" [[bin]] path = "src/cargo_hax.rs" name = "cargo-hax" [[bin]] path = "src/json_schema.rs" name = "hax-export-json-schemas" [dependencies] serde.workspace = true serde_json.workspace = true schemars.workspace = true itertools.workspace = true clap.workspace = true paste = "1.0.11" hax-frontend-exporter.workspace = true hax-frontend-exporter-options.workspace = true hax-types.workspace = true path-clean = "1.0.1" tempfile = "3.8" which.workspace = true version_check = "0.9" rustup-toolchain = "0.1" colored.workspace = true is-terminal = "0.4.9" tiny_http = "0.12" inquire = "0.6" annotate-snippets.workspace = true serde-jsonlines = "0.5.0" prettyplease = "0.2.20" syn = { version = "2.*", features = ["full"] } cargo_metadata.workspace = true extension-traits = "1.0.1" [build-dependencies] serde.workspace = true serde_json.workspace = true hax-types.workspace = true hax-rust-engine.workspace = true schemars.workspace = true hax-frontend-exporter.workspace = true hax-lib-macros-types = { workspace = true, features = ["schemars"] } version_check = "0.9" toml = "0.8" [package.metadata.release] pre-release-hook = [ "dune", "build", "--root", "../../engine", "hax-engine.opam", ] [[package.metadata.release.pre-release-replacements]] file = "../../engine/dune-project" search = "version [a-z0-9\\.-]+" replace = "version {{version}}" prerelease = true ================================================ FILE: cli/subcommands/build.rs ================================================ fn rustc_version_env_var() { let (_version, channel, date) = version_check::triple().unwrap(); println!("cargo:rustc-env=HAX_RUSTC_VERSION={channel}-{date}"); let rust_toolchain_file = include_str!("rust-toolchain.toml") .parse::() .unwrap(); println!( "cargo:rustc-env=HAX_TOOLCHAIN={}", rust_toolchain_file["toolchain"]["channel"] .as_str() .expect("Could not find key [toolchain.channel] in [rust-toolchain.toml]") ); } fn json_schema_static_asset() { let mut schema = schemars::schema_for!(( hax_frontend_exporter::Item, hax_types::cli_options::Options, hax_types::diagnostics::Diagnostics, hax_types::engine_api::EngineOptions, hax_types::engine_api::Output, hax_types::engine_api::WithDefIds, hax_types::engine_api::protocol::FromEngine, hax_types::engine_api::protocol::ToEngine, hax_lib_macros_types::AttrPayload, hax_rust_engine::ocaml_engine::Query, hax_rust_engine::ocaml_engine::Response, )); schema.schema.metadata.get_or_insert_default().id = Some(hax_types::HAX_VERSION.into()); serde_json::to_writer( std::fs::File::create(format!("{}/schema.json", std::env::var("OUT_DIR").unwrap())) .unwrap(), &schema, ) .unwrap(); } fn git_dirty_env_var() { println!("cargo:rurun-if-env-changed=HAX_GIT_IS_DIRTY"); let dirty = { use std::process::Command; let _ = Command::new("git") .args(["update-index", "-q", "--refresh"]) .status(); !Command::new("git") .args(["diff-index", "--quiet", "HEAD", "--"]) .status() .map(|status| status.success()) .unwrap_or(true) }; println!("cargo:rustc-env=HAX_GIT_IS_DIRTY={}", dirty); } fn main() { rustc_version_env_var(); json_schema_static_asset(); git_dirty_env_var(); } ================================================ FILE: cli/subcommands/src/cargo_hax.rs ================================================ #![feature(rustc_private)] use annotate_snippets::{Level, Renderer}; use clap::Parser; use colored::Colorize; use hax_types::cli_options::*; use hax_types::driver_api::*; use hax_types::engine_api::*; use is_terminal::IsTerminal; use serde_jsonlines::BufReadExt; use std::collections::HashMap; use std::fs; use std::io::BufRead; use std::io::Write; use std::path::PathBuf; use std::process; mod engine_debug_webapp; use hax_frontend_exporter::id_table; /// Return a toolchain argument to pass to `cargo`: when the correct nightly is /// already present, this is None, otherwise we (1) ensure `rustup` is available /// (2) install the nightly (3) return the toolchain fn toolchain() -> Option<&'static str> { let current_rustc_version = version_check::triple() .map(|(_, channel, date)| format!("{channel}-{date}")) .unwrap_or("unknown".into()); if env!("HAX_RUSTC_VERSION") != current_rustc_version { const TOOLCHAIN: &str = env!("HAX_TOOLCHAIN"); // ensure rustup is available which::which("rustup").ok().unwrap_or_else(|| { println!("Error: {} was not found, but toolchain {} is required while the current toolchain is {}\n\nExiting.", "rustup".bold(), TOOLCHAIN.bold(), current_rustc_version.bold()); std::process::exit(1) }); // make sure the toolchain is installed rustup_toolchain::install(TOOLCHAIN).unwrap(); // return the correct toolchain Some(TOOLCHAIN) } else { None } } /// [`get_args`] is a wrapper of `std::env::args` that strips a possible /// cargo subcommand. This allows for a binary `BINARY` to be called /// both with `cargo BINARY args...` and `cargo-BINARY args...`. pub fn get_args(subcommand: &str) -> Vec { let mut args: Vec<_> = std::env::args().collect(); if args.get(1) == Some(&subcommand.to_string()) { // we face a call `cargo [subcommand]`: we need to get rid of the first argument args = args.into_iter().skip(1).collect(); } args } /// Our custom rustc driver will *not* be run in an proper terminal, /// thus logs would appear uncolored. When no `RUST_LOG_STYLE` env. var. /// is set, [`rust_log_style`] checks wether the `cargo hax` command was /// run inside a terminal. If it was inside a terminal, /// [`rust_log_style`] returns `"always"`, which is the usual default /// behavior. Otherwise we return `"never"`. When [`RUST_LOG_STYLE`] is /// set, we just return its value. const RUST_LOG_STYLE: &str = "RUST_LOG_STYLE"; fn rust_log_style() -> String { std::env::var(RUST_LOG_STYLE).unwrap_or_else(|_| { if std::io::stderr().is_terminal() { "always".to_string() } else { "never".to_string() } }) } /// We set `cfg(hax)` so that client crates can include dependencies /// or cfg-gate pieces of code. const RUSTFLAGS: &str = "RUSTFLAGS"; fn rustflags() -> String { let rustflags = std::env::var(RUSTFLAGS).unwrap_or("".into()); [rustflags, "--cfg hax".into()].join(" ") } const ENGINE_BINARY_NAME: &str = "hax-engine"; const ENGINE_BINARY_NOT_FOUND: &str = "The binary [hax-engine] was not found in your [PATH]."; /// Dynamically looks for binary [ENGINE_BINARY_NAME]. First, we /// check whether [HAX_ENGINE_BINARY] is set, and use that if it /// is. Then, we try to find [ENGINE_BINARY_NAME] in PATH. If not /// found, detect whether nodejs is available, download the JS-compiled /// engine and use it. #[allow(unused_variables, unreachable_code)] fn find_hax_engine(message_format: MessageFormat) -> process::Command { use which::which; std::env::var("HAX_ENGINE_BINARY") .ok() .map(process::Command::new) .or_else(|| which(ENGINE_BINARY_NAME).ok().map(process::Command::new)) .or_else(|| { which("node").ok().and_then(|_| { if let Ok(true) = inquire::Confirm::new(&format!( "{} Should I try to download it from GitHub?", ENGINE_BINARY_NOT_FOUND, )) .with_default(true) .prompt() { let cmd = process::Command::new("node"); let engine_js_path: String = panic!("TODO: Downloading from GitHub is not supported yet."); cmd.arg(engine_js_path); Some(cmd) } else { None } }) }) .unwrap_or_else(|| { fn is_opam_setup_correctly() -> bool { std::env::var("OPAM_SWITCH_PREFIX").is_ok() } HaxMessage::EngineNotFound { is_opam_setup_correctly: is_opam_setup_correctly(), } .report(message_format, None); std::process::exit(2); }) } const RUST_ENGINE_BINARY_NAME: &str = "hax-rust-engine"; const RUST_ENGINE_BINARY_NOT_FOUND: &str = "The binary [hax-rust-engine] was not found in your [PATH]."; #[allow(unused_variables, unreachable_code)] fn find_rust_hax_engine(message_format: MessageFormat) -> process::Command { use which::which; std::env::var("HAX_RUST_ENGINE_BINARY") .ok() .map(process::Command::new) .or_else(|| { which(RUST_ENGINE_BINARY_NAME) .ok() .map(process::Command::new) }) .expect(RUST_ENGINE_BINARY_NOT_FOUND) } use hax_types::diagnostics::message::HaxMessage; use hax_types::diagnostics::report::ReportCtx; #[extension_traits::extension(trait ExtHaxMessage)] impl HaxMessage { fn report(self, message_format: MessageFormat, mut rctx: Option<&mut ReportCtx>) { if let (Some(r), HaxMessage::Diagnostic { diagnostic, .. }) = (rctx.as_mut(), &self) && r.seen_already(diagnostic.clone()) { return; } match message_format { MessageFormat::Json => println!("{}", serde_json::to_string(&self).unwrap()), MessageFormat::Human => self.report_styled(rctx), } } fn report_styled(self, rctx: Option<&mut ReportCtx>) { let renderer = Renderer::styled(); match self { Self::Diagnostic { diagnostic, working_dir, } => { let mut _rctx = None; let rctx = rctx.unwrap_or_else(|| _rctx.get_or_insert(ReportCtx::default())); diagnostic.with_message( rctx, working_dir.as_ref().map(PathBuf::as_path), Level::Error, |msg| eprintln!("{}", renderer.render(msg)), ); } Self::EngineNotFound { is_opam_setup_correctly, } => { use colored::Colorize; let message = format!("hax: {}\n{}\n\n{} {}\n", &ENGINE_BINARY_NOT_FOUND, "Please make sure the engine is installed and is in PATH!", "Hint: With OPAM, `eval $(opam env)` is necessary for OPAM binaries to be in PATH: make sure to run `eval $(opam env)` before running `cargo hax`.".bright_black(), format!("(diagnostics: {})", if is_opam_setup_correctly { "opam seems okay ✓" } else {"opam seems not okay ❌"}).bright_black() ); let message = Level::Error.title(&message); eprintln!("{}", renderer.render(message)) } Self::ProducedFile { mut path, wrote } => { // Make path relative if possible if let Ok(current_dir) = std::env::current_dir() { if let Ok(relative) = path.strip_prefix(current_dir) { path = PathBuf::from(".").join(relative).to_path_buf(); } } let title = if wrote { format!("hax: wrote file {}", path.display()) } else { format!("hax: unchanged file {}", path.display()) }; eprintln!("{}", renderer.render(Level::Info.title(&title))) } Self::HaxEngineFailure { exit_code } => { let title = format!( "hax: {} exited with non-zero code {}", ENGINE_BINARY_NAME, exit_code, ); eprintln!("{}", renderer.render(Level::Error.title(&title))); } Self::ProfilingData(data) => { fn format_with_dot(shift: u32, n: u64) -> String { let factor = 10u64.pow(shift); format!("{}.{}", n / factor, n % factor) } let title = format!( "hax[profiling]: {}: {}ms, memory={}, {} item{}{}", data.context, format_with_dot(6, data.time_ns), data.memory, data.quantity, if data.quantity > 1 { "s" } else { "" }, if data.errored { " (note: this failed!)" } else { "" } ); eprintln!("{}", renderer.render(Level::Info.title(&title))); } Self::Stats { errors_per_item } => { let success_items = errors_per_item.iter().filter(|(_, n)| *n == 0).count(); let total = errors_per_item.len(); let title = format!( "hax: {}/{} items were successfully translated ({}% success rate)", success_items, total, (success_items * 100) / total ); eprintln!("{}", renderer.render(Level::Info.title(&title))); } Self::CargoBuildFailure => { let title = "hax: running `cargo build` was not successful, continuing anyway.".to_string(); eprintln!("{}", renderer.render(Level::Warning.title(&title))); } Self::WarnExperimentalBackend { backend } => { let title = format!( "hax: Experimental backend \"{}\" is work in progress.", backend ); eprintln!("{}", renderer.render(Level::Warning.title(&title))); } } } } /// Runs `hax-engine` fn run_engine( haxmeta: HaxMeta, id_table: id_table::Table, working_dir: Option, manifest_dir: Option, backend: &BackendOptions<()>, message_format: MessageFormat, ) -> bool { let engine_options = EngineOptions { hax_version: haxmeta.hax_version, backend: backend.clone(), input: haxmeta.items, impl_infos: haxmeta.impl_infos, }; let mut hax_engine_command = match &engine_options.backend.backend { Backend::Coq | Backend::Ssprove | Backend::Easycrypt | Backend::ProVerif(_) => { find_hax_engine(message_format) } Backend::Fstar(_) if matches!(&engine_options.input, Items::Legacy(_)) => { find_hax_engine(message_format) } _ => find_rust_hax_engine(message_format), }; let mut engine_subprocess = hax_engine_command .stdin(std::process::Stdio::piped()) .stdout(std::process::Stdio::piped()) .spawn() .inspect_err(|e| { if let std::io::ErrorKind::NotFound = e.kind() { panic!( "The binary [{}] was not found in your [PATH].", ENGINE_BINARY_NAME ) } }) .unwrap(); let mut error = false; let mut output = Output { diagnostics: vec![], files: vec![], debug_json: vec![], }; { let mut rctx = hax_types::diagnostics::report::ReportCtx::default(); let mut stdin = std::io::BufWriter::new( engine_subprocess .stdin .as_mut() .expect("Could not write on stdin"), ); macro_rules! send { ($value:expr) => { serde_json::to_writer(&mut stdin, $value).unwrap(); stdin.write_all(b"\n").unwrap(); stdin.flush().unwrap(); }; } id_table::WithTable::run(id_table, engine_options, |with_table| { send!(with_table); }); let out_dir = backend.output_dir.clone().unwrap_or({ let relative_path: PathBuf = [ "proofs", format!("{}", backend.backend).as_str(), "extraction", ] .iter() .collect(); manifest_dir .map(|manifest_dir| manifest_dir.join(&relative_path)) .unwrap_or(relative_path) }); let stdout = std::io::BufReader::new(engine_subprocess.stdout.take().unwrap()); let mut errors_per_item: HashMap<_, usize> = HashMap::new(); for msg in stdout.json_lines() { let msg = msg.expect( "Hax engine sent an invalid json value. \ This might be caused by debug messages on stdout, \ which is reserved for JSON communication with cargo-hax", ); use protocol::*; match msg { FromEngine::Exit => break, FromEngine::Diagnostic(diagnostic) => { error = true; if backend.dry_run { output.diagnostics.push(diagnostic.clone()) } if let Some(owner_id) = &diagnostic.owner_id { *errors_per_item.entry(owner_id.clone()).or_default() += 1; } HaxMessage::Diagnostic { diagnostic, working_dir: working_dir.clone(), } .report(message_format, Some(&mut rctx)); } FromEngine::File(file) => { if backend.dry_run { output.files.push(file) } else { let path = out_dir.join(&file.path); std::fs::create_dir_all(path.parent().unwrap()).unwrap(); let mut wrote = false; if fs::read_to_string(&path).as_ref().ok() != Some(&file.contents) { std::fs::write(&path, file.contents).unwrap(); wrote = true; } if let Some(mut sourcemap) = file.sourcemap.clone() { sourcemap.sourcesContent = sourcemap .sources .iter() .map(PathBuf::from) .map(|path| { if let Some(working_dir) = working_dir.as_ref() && path.is_relative() { working_dir.join(path).to_path_buf() } else { path } }) .map(|path| fs::read_to_string(path).ok()) .collect(); let f = std::fs::File::create(path.with_file_name(format!( "{}.map", path.file_name().unwrap().to_string_lossy() ))) .unwrap(); serde_json::to_writer(std::io::BufWriter::new(f), &sourcemap).unwrap() } HaxMessage::ProducedFile { path, wrote }.report(message_format, None) } } FromEngine::DebugString(debug) => output.debug_json.push(debug), FromEngine::PrettyPrintDiagnostic(diag) => { send!(&ToEngine::PrettyPrintedDiagnostic(format!("{}", diag))); } FromEngine::PrettyPrintRust(code) => { let code = match syn::parse_file(&code) { Ok(file) => match std::panic::catch_unwind(|| prettyplease::unparse(&file)) { Ok(pp) => Ok(pp), Err(err) => Err(format!("prettyplease panicked with: {:#?}", err)), }, Err(err) => Err(format!("{}", err)), }; send!(&ToEngine::PrettyPrintedRust(code)); } FromEngine::ProfilingData(profiling_data) => { HaxMessage::ProfilingData(profiling_data).report(message_format, None) } FromEngine::ItemProcessed(items) => { for item in items { errors_per_item.insert(item, 0); } } FromEngine::Ping => { send!(&ToEngine::Pong); } } } if backend.stats { HaxMessage::Stats { errors_per_item: errors_per_item.into_iter().collect(), } .report(message_format, None) } drop(stdin); } let exit_status = engine_subprocess.wait().unwrap(); if !exit_status.success() { HaxMessage::HaxEngineFailure { exit_code: exit_status.code().unwrap_or(-1), } .report(message_format, None); std::process::exit(1); } if backend.dry_run { serde_json::to_writer(std::io::BufWriter::new(std::io::stdout()), &output).unwrap() } if !output.debug_json.is_empty() { use DebugEngineMode; let debug_json = &format!("[{}]", output.debug_json.join(",")); match &backend.debug_engine { Some(DebugEngineMode::Interactive) => { eprintln!("----------------------------------------------"); eprintln!("----------------------------------------------"); eprintln!("----------------------------------------------"); eprintln!("-- Engine debug mode. Press CTRL+C to exit. --"); eprintln!("----------------------------------------------"); eprintln!("----------------------------------------------"); eprintln!("----------------------------------------------"); engine_debug_webapp::run(|| debug_json.clone()) } Some(DebugEngineMode::File(file)) if !backend.dry_run => { let mut file = file.open_or_stdout(); write!(file, "{debug_json}").unwrap() } _ => (), } } error } /// Uses `cargo metadata` to compute a derived target directory. fn target_dir(suffix: &str) -> PathBuf { let metadata = cargo_metadata::MetadataCommand::new().exec().unwrap(); let mut dir = metadata.target_directory; dir.push(suffix); dir.into() } /// Gets hax version: if hax is being compiled from a dirty git repo, /// then this function taints the hax version with the hash of the /// current executable. This makes sure cargo doesn't cache across /// different versions of hax, for more information see /// https://github.com/hacspec/hax/issues/801. fn get_hax_version() -> String { let mut version = hax_types::HAX_VERSION.to_string(); if env!("HAX_GIT_IS_DIRTY") == "true" { version += &std::env::current_exe() .ok() .and_then(|exe_path| std::fs::read(exe_path).ok()) .map(|contents| { use std::hash::{DefaultHasher, Hash, Hasher}; let mut s = DefaultHasher::new(); contents.hash(&mut s); format!("hash-exe-{}", s.finish()) }) .expect("Expect read path") } version } /// Returns the path to the custom rustc driver used by cargo-hax. /// /// This function retrieves the path of the current executable (i.e. `cargo-hax`), determines its /// parent directory, and then appends the driver executable name `"driver-hax-frontend-exporter"` to it. /// This path is used to locate the custom rustc driver that computes `haxmeta` files. fn get_hax_rustc_driver_path() -> PathBuf { std::env::current_exe() .expect("Could not get the current executable path for `cargo-hax`.") .parent().expect("The executable `cargo-hax` is supposed to be a file, which is supposed to have a parent folder.") .join("driver-hax-frontend-exporter") } /// Calls `cargo` with a custom driver which computes `haxmeta` files /// in `TARGET`. One `haxmeta` file is produced by crate. Each /// `haxmeta` file contains the full AST of one crate. fn compute_haxmeta_files(options: &Options) -> (Vec, i32) { let frontend_options = ExporterOptions::from(options); let mut cmd = { let mut cmd = process::Command::new("cargo"); if let Some(toolchain) = toolchain() { cmd.env("RUSTUP_TOOLCHAIN", toolchain); } cmd.args(["check".into()].iter().chain(options.cargo_flags.iter())); const COLOR_FLAG: &str = "--color"; let explicit_color_flag = options.cargo_flags.iter().any(|flag| flag == COLOR_FLAG); if !explicit_color_flag && std::io::stderr().is_terminal() { cmd.args([COLOR_FLAG, "always"]); } const MSG_FMT_FLAG: &str = "--message-format"; let explicit_msg_fmt_flag = options.cargo_flags.iter().any(|flag| flag == MSG_FMT_FLAG); if !explicit_msg_fmt_flag && options.message_format == MessageFormat::Json { cmd.args([MSG_FMT_FLAG, "json"]); } cmd.stderr(std::process::Stdio::piped()); if !options.no_custom_target_directory { cmd.env("CARGO_TARGET_DIR", target_dir("hax")); }; cmd.env("RUSTC_WORKSPACE_WRAPPER", get_hax_rustc_driver_path()) .env(RUST_LOG_STYLE, rust_log_style()) .env(RUSTFLAGS, rustflags()) .env("HAX_CARGO_CACHE_KEY", get_hax_version()) .env( ENV_VAR_OPTIONS_FRONTEND, serde_json::to_string(&frontend_options) .expect("Options could not be converted to a JSON string"), ); cmd }; let mut child = cmd.spawn().unwrap(); let haxmeta_files = { let mut haxmeta_files = vec![]; let stderr = child.stderr.take().unwrap(); let stderr = std::io::BufReader::new(stderr); for line in std::io::BufReader::new(stderr).lines() { if let Ok(line) = line { if let Some(msg) = line.strip_prefix(HAX_DRIVER_STDERR_PREFIX) { use HaxDriverMessage; let msg = serde_json::from_str(msg).unwrap(); match msg { HaxDriverMessage::EmitHaxMeta(data) => haxmeta_files.push(data), } } else { eprintln!("{}", line); } } } haxmeta_files }; let status = child .wait() .expect("`driver-hax-frontend-exporter`: could not start?"); let exit_code = if !status.success() { HaxMessage::CargoBuildFailure.report(options.message_format, None); status.code().unwrap_or(254) } else { 0 }; (haxmeta_files, exit_code) } /// Run the command given by the user fn run_command(options: &Options, haxmeta_files: Vec) -> bool { match options.command.clone() { Command::JSON { output_file, kind, include_extra, use_ids, .. } => { with_kind_type!(kind, || { for EmitHaxMetaMessage { path, .. } in haxmeta_files { let (haxmeta, id_table): (HaxMeta, _) = HaxMeta::read(fs::File::open(&path).unwrap()); let dest = output_file.open_or_stdout(); (if include_extra { let data = WithDefIds { def_ids: haxmeta.def_ids, impl_infos: haxmeta.impl_infos, items: haxmeta.items, comments: haxmeta.comments, }; if use_ids { id_table::WithTable::run(id_table, data, |with_table| { serde_json::to_writer(dest, with_table) }) } else { serde_json::to_writer(dest, &data) } } else { if use_ids { id_table::WithTable::run(id_table, haxmeta.items, |with_table| { serde_json::to_writer(dest, with_table) }) } else { serde_json::to_writer(dest, &haxmeta.items) } }) .unwrap() } }); false } Command::Backend(backend) => { use Backend; use hax_frontend_exporter::ThirBody as Body; if matches!(backend.backend, Backend::Easycrypt | Backend::ProVerif(..)) { HaxMessage::WarnExperimentalBackend { backend: backend.backend.clone(), } .report(options.message_format, None); } let mut error = false; for EmitHaxMetaMessage { working_dir, manifest_dir, path, } in haxmeta_files { let (mut haxmeta, id_table): (HaxMeta, _) = HaxMeta::read(fs::File::open(&path).unwrap()); if let Some(root_module) = &backend.prune_haxmeta { use hax_frontend_exporter::{DefPathItem, DisambiguatedDefPathItem, IsBody}; /// Remove every item from an `HaxMeta` whose path is not `*::::**`, where `root_module` is a string. fn prune_haxmeta(haxmeta: &mut HaxMeta, root_module: &str) { match &mut haxmeta.items { Items::Legacy(items) => { items.retain(|item| match &item.owner_id.path[..] { [] => true, [ DisambiguatedDefPathItem { data: DefPathItem::TypeNs(s), disambiguator: 0, }, .., ] => s == root_module, _ => false, }) } Items::FullDef(items) => { items.retain(|item| match &item.this.contents().def_id.path[..] { [] => true, [ DisambiguatedDefPathItem { data: DefPathItem::TypeNs(s), disambiguator: 0, }, .., ] => s == root_module, _ => false, }) } }; } prune_haxmeta(&mut haxmeta, root_module.as_str()) } error = error || run_engine( haxmeta, id_table, working_dir, manifest_dir, &backend, options.message_format, ); } error } Command::Serialize { .. } => { for EmitHaxMetaMessage { path, .. } in haxmeta_files { HaxMessage::ProducedFile { path, wrote: true }.report(options.message_format, None); } false } } } fn main() { let args: Vec = get_args("hax"); let mut options = match &args[..] { [_, kw] if kw == "__json" => { serde_json::from_str(&std::env::var(ENV_VAR_OPTIONS_FULL).unwrap_or_else(|_| { panic!( "Cannot find environnement variable {}", ENV_VAR_OPTIONS_FULL ) })) .unwrap_or_else(|_| { panic!( "Invalid value for the environnement variable {}", ENV_VAR_OPTIONS_FULL ) }) } _ => Options::parse_from(args.iter()), }; options.normalize_paths(); let (haxmeta_files, exit_code) = options .haxmeta .clone() .map(|path| { ( vec![EmitHaxMetaMessage { working_dir: None, manifest_dir: None, path, }], 0, ) }) .unwrap_or_else(|| compute_haxmeta_files(&options)); let error = run_command(&options, haxmeta_files); std::process::exit(if exit_code == 0 && error { 1 } else { exit_code }) } ================================================ FILE: cli/subcommands/src/engine_debug_webapp/README.md ================================================ This folder implements a small webapp designed for viewing how a rust crate is translated by the engine, step-by-step. The engine works by phases. First, it receives a tweaked version of Rust's internal typed representation. On this representation, the engine then applies sequentially a certain number of phases. Each phase transports your code from a representation to another, by performing some translation or rewriting. This webapp allows you to display a rust code before and after each phase. ### How to When running `cargo hax into BACKEND`, pass the option `--debug-engine` (or `-d`) to the subcommand `into`. This will spawn a small webserver with the webapp. ================================================ FILE: cli/subcommands/src/engine_debug_webapp/mod.rs ================================================ use tiny_http::{Header, Response, Server}; fn get_server() -> Server { let mut port = std::env::var_os("HAX_DEBUGGER_PORT") .and_then(|s| s.into_string().ok()) .and_then(|s| s.parse::().ok()) .unwrap_or(8000); loop { if let Ok(server) = Server::http(format!("0.0.0.0:{}", port)) { eprintln!("Hax webapp is available on http://localhost:{:?}", port); return server; } std::thread::sleep(std::time::Duration::from_millis(300)); eprintln!("Could not listen to port {:?}, trying another", port); port += 1; } } pub fn run(get_json: impl Fn() -> String) { let server = get_server(); let ct_html = Header::from_bytes(&b"Content-Type"[..], &b"text/html"[..]).unwrap(); let ct_js = Header::from_bytes(&b"Content-Type"[..], &b"text/javascript"[..]).unwrap(); let ct_utf8 = Header::from_bytes(&b"charset"[..], &b"utf-8"[..]).unwrap(); for request in server.incoming_requests() { let response = match request.url() { "/" => Response::from_string(include_str!("static/index.html")) .with_header(ct_html.clone()) .with_header(ct_utf8.clone()), "/script.js" => Response::from_string(include_str!("static/script.js")) .with_header(ct_js.clone()) .with_header(ct_utf8.clone()), path if path.starts_with("/debug-hax-engine.json") => { Response::from_string(get_json()).with_header(ct_utf8.clone()) } _ => Response::from_string("Unknown route".to_string()).with_status_code(404), }; let _ = request.respond(response); } } ================================================ FILE: cli/subcommands/src/engine_debug_webapp/static/index.html ================================================
?

Help

The screen divides in two:
  • on the upper part, there is the list of the different phases that were applied;
  • on the lower part, the "rustish" code of the selected phase is displayed.

You can click on a phase name in the upper part or press the left and right (or p and n) keys on your keyboard to display the rust code of another phase. Press r to refresh.

On the lower part, you can click on any chunk of code to show a dialog displaying its AST representation. Then, you can browse the AST and open/collapse AST nodes by clicking on them.

================================================ FILE: cli/subcommands/src/engine_debug_webapp/static/script.js ================================================ /* This webapp is written in vanilla JS as two pure components: `json` and `phases_viewer`. */ // Make a DOM node let mk = (kind, body = [], classes = []) => { let e = document.createElement(kind); classes.forEach(cl => e.classList.add(cl)); if (typeof body == 'string') { e.innerText = body; } else if (body instanceof Array) { body.forEach(sub => e.appendChild(sub)); } else if (body instanceof HTMLElement) { e.appendChild(body); } else { console.error('wrong type for body', body); } return e; }; function findNode(o, search){ let h = o => o instanceof Object ? (search(o) ? o : Object.values(o).map(h).find(x => x)) : null; return h(o); } let is_span = o => o instanceof Object && "data" in o && "id" in o; let spanned = span_id => o => Object.values(o).some(o => is_span(o) && o["id"] === span_id); let rewrite = f => o => f( o instanceof Array ? o.map(rewrite(f)) : (o instanceof Object ? Object.fromEntries(Object.entries(o).map(([k, v]) => [k, rewrite(f)(v)])) : o) ); let loc_to_string = ({col, line}) => `${line}:${col}`; let filename_to_string = name => ((name instanceof Array && name[0] == 'Real' && name[1]?.[0] =='LocalPath') ? name?.[1]?.[1] : null) || JSON.stringify(name); let span_data_to_string = ({filename, lo, hi}) => `<${filename_to_string(filename)} ${loc_to_string(lo)}→${loc_to_string(hi)}>`; let span_to_string = ({id, data}) => data.length ? data.map(span_data_to_string).join('∪') : ''; let clean = rewrite(o => { if(!(o instanceof Object)) return o; if (is_span(o)) return span_to_string(o); return o; }); function json(json) { let o = JSON.parse(JSON.stringify(json)); let root = mk('div', [], ['json-viewer']); let state = { open: new Map(), default_open: false, }; function render_all() { root.replaceChildren(render(o, [])); let expand_button = mk('button', state.default_open ? '🡒🡐' : '🡘', ['expand-all']); expand_button.style = ` position: absolute; top: 1px; right: 1px; padding: 0 3px; margin: 0; line-height: 0; height: 16px; `; expand_button.onclick = () => { state.default_open = !state.default_open; render_all(); }; root.prepend(expand_button); } let key_of_path = path => JSON.stringify(path); let set_open = (path, v) => state.open.set(key_of_path(path), v); let is_open = (path, def = path.length < 6) => { let b = state.open.get(key_of_path(path)); return b === undefined ? (state.default_open || def) : b; }; let swap = (path, def) => { set_open(path, !is_open(path, def), false); render_all(); }; let is_constructor = o => { if (o instanceof Array && (o.length == 2 || o.length == 1)) { let [constructor, arg] = o; if(typeof constructor == 'string' && constructor[0] == constructor[0].toUpperCase()) { return true; } } return false; }; let is_simple = o => { if (o instanceof Object) { if (is_constructor(o)) { return o[1] === undefined; } return false; } return true; }; function render(o, path, add_comma = true) { function as_code(o) { let code = mk('code'); code.innerHTML = Prism.highlight(JSON.stringify(o, null, 4), Prism.languages.json, 'json'); return add_comma ? mk('span', [code, mk('span', ',')]) : code; } if (o instanceof Object) { if (is_constructor(o)) { let [constructor, arg] = o; let cdiv = mk('span', constructor + (arg === undefined ? '' : ' '), ['constructor']); if (constructor == "Concrete" && "crate" in arg && "path" in arg) { let {crate, path} = arg; return mk('span', [ ...[crate, ...path].map((chunk, i) => [...(i > 0 ? [mk('span', '::', ['pathsep'])] : []), mk('span', chunk, ['pathchunk'])]).flat(), add_comma ? [mk('span', ',')] : [] ].flat()); } let contents = arg === undefined ? [] : render(arg, path, false); if(arg !== undefined && is_constructor(arg)) contents = mk('span', [mk('span', '('), contents, mk('span', ')')]); let self_path = [...path, []]; let elide = mk('span', '…'); elide.onclick = () => swap(self_path); let open = arg === undefined || is_open(self_path); cdiv.onclick = () => swap(self_path); return mk('span', [ cdiv, open ? contents : elide, add_comma ? [mk('span', ',')] : [] ].flat()); } if (o instanceof Array) { let self_path = [...path, []]; let open = is_open(self_path); let bracket = mk('code', '['); bracket.onclick = () => swap(self_path); let elide = mk('span', '…'); elide.onclick = () => swap(self_path); return mk('span', [ bracket, open ? mk('ul', o.map((v, i) => { let new_path = [...path, i]; let simple_val = is_simple(v); let open = simple_val || is_open(new_path); return [mk('li', render(v, new_path), ['v'])]; }).flat()) : elide, mk('code', ']'), ...(add_comma ? [mk('span', ',')] : []) ]); } return mk('span', [ mk('code', '{'), mk('ul', Object.entries(o).map(([k, v]) => { let new_path = [...path, k]; let simple_val = is_simple(v); let open = simple_val || is_open(new_path); let elide = mk('span', '…'); elide.onclick = () => swap(new_path, open); let contents = mk((simple_val || !open) ? 'span' : 'span', open ? render(v, new_path) : [elide, mk('span', ',')], ['v']); let key = mk('span', [ mk('span', k+': '), ].flat(), ['k']); key.onclick = () => swap(new_path); return [mk('li', [ key, contents ], ['p'])]; }).flat(), ['o']), mk('code', '}'), ...(add_comma ? [mk('span', ',')] : []) ]); } else if (typeof o == "string" && o.length > 20) { let new_path = [...path, 'v']; let code = as_code(is_open(new_path, false) ? o : o.slice(0, 20)+'…'); code.onclick = () => swap(new_path, false); return code; } else { return as_code(o); } }; render_all(); return root; } const SEED = Date.now(); async function phases_viewer(state = {index: 0, ast_focus: null, seed: SEED}) { let data = await (await fetch('debug-hax-engine.json?seed='+state.seed)).json(); if (!data[state.index] && state.index != 0) { return phases_viewer({...state, index: 0}); }; let current = null; let s = ''; let header = mk('header'); for(let i in data) { let o = data[i]; let w = 100; let active = state.index == i; let self = mk('div', o.name.toLowerCase().replace(/reject_not_in_/g, 'rej ~').replace(/_/g, ' '), ['header', active ? 'active' : 'inactive']); self.style = `width: ${w}px; font-variant: small-caps; position: relative; top: ${w}px; left: 10px; transform-origin: 0% 50%; transform: rotate(-40deg); color: ${active ? 'black' : 'gray'}; user-select: none;`; self.onclick = () => phases_viewer({...state, index: i, ast_focus: null}); let container = mk('div', self, []); container.style = `display: inline-block; width: 18px; height: ${w}px;`; if(active){ current = o; } header.appendChild(container); } let last_item = null; let codes = [current.rustish].map(({string, map}) => { let src = string; let code = mk('code', [], ['language-rust']); code.innerHTML = Prism.highlight(src, Prism.languages.rust, 'rust'); [...code.childNodes] .filter(o => o.nodeType === Node.TEXT_NODE) .forEach(o => { let n = mk('span'); n.textContent = o.textContent; code.replaceChild(n, o); }); let mappings = map.slice(0).reverse(); let stack = [...code.childNodes].reverse(); let highlighted = null; let maybe = []; while(stack.length) { let node = stack.pop(); let [len, id, s] = mappings.pop(); let text = node.textContent; if (len > text.length) { mappings.push([len - text.length, id, s.slice(text.length)]); } else if (len < text.length) { let after = node.cloneNode(); let left = text.slice(0, len); let right = text.slice(len); src = right + src; after.textContent = right; node.textContent = left; node.after(after); stack.push(after); } let active = state.ast_focus === id && text.trim(); node.onclick = ev => { phases_viewer({...state, ast_focus: id}); ev.stopPropagation(); }; if (active) { highlighted = highlighted || []; highlighted.push(...maybe); maybe = []; active && node.classList.add('active'); last_item = node; } else if (highlighted) { maybe.push(node); } } (highlighted||[]).map(o => o.classList.add('in-range')); return code; }); let pre = mk('pre', codes); let main = mk('main', [header, pre]); if(last_item) { let ast = clean(findNode(current.items, spanned(state.ast_focus))); let dialog = mk('dialog', json(ast)); dialog.setAttribute('open', true); dialog.onclick = ev => { ev.stopPropagation(); }; main.onclick = ev => phases_viewer({...state, ast_focus: null}); last_item.after(dialog); } let app_root = document.querySelector('#app'); app_root.childNodes.forEach(old => old.remove()); app_root.appendChild(main); document.body.onkeydown = (e) => { let key = ({'ArrowRight': 'n', 'ArrowLeft': 'p'})[e.key] || e.key; (({ 'n': () => phases_viewer({...state, index: state.index + 1, ast_focus: null}), 'p': () => phases_viewer({...state, index: state.index ? state.index - 1 : data.length - 1, ast_focus: null}), 'r': () => phases_viewer({...state, seed: Date.now(), ast_focus: null}), })[key] || Function)(); }; } phases_viewer(); ================================================ FILE: cli/subcommands/src/json_schema.rs ================================================ const JSON_SCHEMA: &str = include_str!(concat!(env!("OUT_DIR"), "/schema.json")); fn main() { println!("{}", JSON_SCHEMA); } ================================================ FILE: deny.toml ================================================ [licenses] unused-allowed-license = "allow" allow = [ "Apache-2.0", "MIT", "Unicode-DFS-2016", "MPL-2.0", # Licences used in the OCaml dependencies in the engine "BSD-3-Clause", "LGPL-2.1", "LGPL-2.0", "ISC", ] ================================================ FILE: dependabot.yml ================================================ version: 2 updates: - package-ecosystem: "cargo" directory: "/" schedule: interval: "weekly" - package-ecosystem: "github-actions" directory: "/" schedule: interval: "weekly" ================================================ FILE: docs/.test/.gitignore ================================================ test-results/ generated.json playwright-report ================================================ FILE: docs/.test/global-setup.ts ================================================ // Global setup: writes `generated.json`, the list of pages import { BrowserContext, chromium, expect, FullConfig } from '@playwright/test'; import fs from 'fs'; export type CrawlPage = { url: string; has_playground: boolean, links: string[] }; const DOCS_HOST = 'localhost:8000'; const skip_url = (s: string) => s.includes("/livereload"); /// Run jobs in parallel. /// `job` runs a new job, returns true if more jobs are to be run. async function parallel( job: () => Promise, maxJobs = 10 ): Promise { const workers: { promise?: Promise, free: boolean }[] = (new Array(maxJobs)).fill(0).map(_ => ({ free: true })); let spawn = (self: { promise?: Promise, free: boolean }) => { self.promise = (async () => { self.free = false; let cont = await job(); self.free = true; cont && control() })(); }; let control = () => workers.filter(w => w.free).forEach(spawn); control(); let active_workers: Promise[] = []; do { active_workers = workers.filter(w => !w.free).map(w => w.promise).filter(p => p !== undefined); await Promise.all(active_workers); } while (active_workers.length > 0) } /// Crawl the documentation const crawl = async (baseURL: string, context: BrowserContext): Promise => { if (!baseURL) throw new Error('Base URL not configured.'); const pages: CrawlPage[] = []; const visited = new Set(); const queue: string[] = [new URL('/', baseURL).toString()]; await parallel(async () => { const url = queue.shift(); if (url === undefined || visited.has(url)) return false; visited.add(url); const page = await context.newPage(); const res = await page.goto(url, { waitUntil: 'domcontentloaded' }); await page.waitForLoadState('networkidle').catch(() => { }); const status = res?.status() ?? 0; expect(status, `Failed to GET ${url}`).toBeGreaterThanOrEqual(200); expect(status, `Failed to GET ${url}`).toBeLessThan(400); const has_playground = (await page.content()).includes('md-hax-playground'); const links = await page.$$eval('a[href]', as => as.map(a => (a as HTMLAnchorElement).getAttribute('href')!)); pages.push({ url, has_playground, links }); for (const href of links) { if (!href || href.startsWith('mailto:') || href.startsWith('tel:') || href.startsWith('javascript:')) continue; const absolute = new URL(href, url); const sameHost = absolute.host === DOCS_HOST; if (!sameHost) continue; absolute.hash = ''; const absStr = absolute.toString(); if (!visited.has(absStr) && !skip_url(absStr) && !queue.includes(absStr)) queue.push(absStr); } page.close(); return true; }); return pages; } async function globalSetup(config: FullConfig) { const browser = await chromium.launch(); let PAGES = await crawl('http://localhost:8000', await browser.newContext()); await browser.close(); fs.writeFileSync('generated.json', JSON.stringify(PAGES, null, 2), 'utf-8'); } export default globalSetup; ================================================ FILE: docs/.test/package.json ================================================ { "name": "docs-ci-checks", "private": true, "type": "module", "scripts": { "test": "playwright test --reporter=list" }, "devDependencies": { "@playwright/test": "^1.56.1", "http-server": "^14.1.1" } } ================================================ FILE: docs/.test/playwright.config.ts ================================================ import { defineConfig } from '@playwright/test'; export default defineConfig({ globalSetup: './global-setup.ts', timeout: 600_000, expect: { timeout: 300_000 }, reporter: [['list']], use: { baseURL: 'http://localhost:8000', serviceWorkers: 'block', trace: 'on-first-retry', }, webServer: { command: 'nix run ../..#serve-docs', port: 8000, reuseExistingServer: !process.env.CI, timeout: 5 * 60 * 1000, stderr: 'ignore', stdout: 'ignore', } }); ================================================ FILE: docs/.test/tests/docs.spec.ts ================================================ import { test, expect, request, BrowserContext, Page } from '@playwright/test'; import { CrawlPage } from '../global-setup'; import fs from 'fs'; const cssEscape = (s: string) => s.replace(/^[0-9-]|[^a-zA-Z0-9_-]/g, (ch, idx) => { const code = ch.codePointAt(0)!.toString(16).toUpperCase(); return `\\${code} `; }); const PAGES = JSON.parse(fs.readFileSync('generated.json', 'utf-8')) as CrawlPage[]; async function tryNavigateTo(page: Page, url: string) { const response = await page.request.get(url, { headers: { accept: "text/html,application/xhtml+xml,application/xml;q=0.9,*/*;q=0.8" } }); const status = response.status(); const contentType = response.headers()['content-type'] || ''; if (!contentType.includes('text/html')) return { status, html: false }; try { await page.goto(url, { waitUntil: 'domcontentloaded' }); return { status, html: true }; } catch (e) { return { status, html: false }; } } // test.describe('Documentation consistency checks', () => { let run_tests = () => { let tried = new Set(); let links_origins: Map> = new Map(); let links: Set = new Set(); for (let page of PAGES) { for (let link of page.links) { let absolute_link = (new URL(link, page.url)).toString(); links.add(absolute_link); links_origins.has(absolute_link) || links_origins.set(absolute_link, new Set()); links_origins.get(absolute_link)!.add(page.url); } } for (let link of links) { if (link.includes("hax-playground.cryspen.com/") || link.includes("#__codelineno")) continue; test('Check if link is live: ' + link, (async ({ page, baseURL }, testInfo) => { await testInfo.attach('Parent pages', { body: [...(links_origins.get(link) || new Set())].join('\n'), contentType: 'text/plain', }); let other_page = await page.context().newPage(); let { status, html } = await tryNavigateTo(other_page, link.toString()); let anti_bot_codes = [401, 403, 429, 451, 999].includes(status); expect(anti_bot_codes || (status >= 200 && status < 300)).toBeTruthy() let hash = (new URL(link)).hash?.replace(/^#/, ''); if (hash && !link.includes("")) await test.step("Try detection of fragment `" + hash + '`', async () => { let el = other_page.locator('[id="' + cssEscape(hash) + '"]'); if (await el.count() === 0) console.warn('⚠️ Could not find anchor in a page ', link); }); })); } for (let p of PAGES) { if (!p.has_playground) continue; test('Test playgrounds in `' + p.url + '`', async ({ page, baseURL }, testInfo) => { await page.goto(p.url, { waitUntil: 'domcontentloaded' }); const playableLocators = page.locator('.playable:has(.md-hax-playground .fa-check)'); const count = await playableLocators.count(); for (let i = 0; i < count; i++) { await test.step(`Try playground #${i}`, async () => { const playable = playableLocators.nth(i); const contents = await playable.locator(".cm-content").first().innerText(); await testInfo.attach('Code snippet contents', { body: contents, contentType: 'text/plain', }); const checkBtn = playable.locator('.md-hax-playground .fa-check'); await checkBtn.first().click(); let classes = ''; let hasSuccess = false; let hasFailure = false; for (let i = 0; i < 60; i++) { classes = (await playable.getAttribute('class')) || ''; hasSuccess = classes.includes('state-success'); hasFailure = classes.includes('state-failure'); if (hasSuccess || hasFailure) break; await new Promise(r => setTimeout(r, 1000)); } expect(hasSuccess || hasFailure, "At least class `state-success` or `state-failure` should have been attached; none detected.").toBeTruthy(); const expectFailure = classes.includes('expect-failure'); if (expectFailure) { expect(hasFailure, '`.state-failure` should be set (the snippet is tagged with a class `expect-failure`), but `.state-success` was detected').toBeTruthy(); } else { expect(hasSuccess, '`.state-success` should be set, but `.state-failure` was detected').toBeTruthy(); } }); } }); } }; run_tests(); ================================================ FILE: docs/RFCs/.nav.yml ================================================ hide: true ================================================ FILE: docs/RFCs/0000-template.md ================================================ --- title: 0000 template tags: - Accepted --- ``` --- tags: Draft | Proposed | Rejected | Accepted | Superseded | Deprecated --- ``` | Authors | :material-account: Franziskus Kiefer | | :------------ | :---------------------------------------------------------- | | Last update | :material-calendar: Jan 1 2025 | | Extends | :material-directions-fork: [xxxx-other](./0000-template.md) | | Superseded by | :material-cancel: [xxxx-other](./0000-template.md) | | Based on | :material-forward: [xxxx-other](./0000-template.md) | ## Context What is the issue that we're seeing that is motivating this decision or change? ## Assumptions Anything that could cause problems if untrue now or later ## Decision What is the change that we're proposing and/or doing? ### Considered Options What other options are there and why didn’t you pick them? ## Risks Anything that could cause malfunction, delay, or other negative impacts ## Consequences What becomes easier or more difficult to do because of this change? ## Additional Information Provide additional evidence/confidence for the decision outcome Links to other decisions and resources might here appear as well. ================================================ FILE: docs/RFCs/index.md ================================================ # RFCs ================================================ FILE: docs/blog/.authors.yml ================================================ authors: franziskus: name: Franziskus Kiefer description: Creator avatar: /blog/avatars/franziskus.jpg lucas: name: Lucas Franceschino description: Creator avatar: /blog/avatars/lucas.jpg clement: name: Clement Blaudeau description: Engineer avatar: /blog/avatars/clement.jpg maxime: name: Maxime Buyse description: Engineer avatar: /blog/avatars/maxime.png alex: name: Alexander Bentkamp description: Engineer avatar: /blog/avatars/alex.jpg ================================================ FILE: docs/blog/index.md ================================================ --- weight: 4 --- # Blog The hax blog. Here you find announcement, development news, and more. ================================================ FILE: docs/blog/posts/announce-v0.1.md ================================================ --- authors: - franziskus - lucas title: "A new chapter" date: 2025-01-21 --- # Hax Takes Flight: Announcing Our First Release and New Home at Cryspen! We're thrilled to announce that hax is entering a new era of stability and growth with the launch of our new website, a fresh start at Cryspen, and our first official release, [v0.1.0](https://github.com/cryspen/hax/releases/tag/cargo-hax-v0.1.0)! After an intense period of research and development, hax is transitioning to a more stable phase. To support this evolution, we've moved the repository to its new home within the Cryspen GitHub organization. This change streamlines our processes and clarifies project ownership while maintaining hax's open-source nature. Cryspen is responsible for driving hax forward, but we enthusiastically welcome contributions from the community, and continue working closely with the team of existing contributors! This move also marks our shift to a release-driven development model, culminating in our first official release, v0.1.0. While we anticipate some breaking changes in the lead-up to v1.0, detailed release notes will clearly outline any backward compatibility issues. ### The state of hax Hax currently boasts three actively used backends: ([F\*](https://fstar-lang.org/), [Rocq](https://rocq-prover.org/) and [SSProve](https://github.com/SSProve/ssprove)). While Cryspen primarily focuses on the F\* backend, [Bas Spitters](https://www.au.dk/en/spitters@cs.au.dk) and his team at the University of Aarhus are actively developing and utilizing the Rocq and SSProve backends. Cryspen also supports an experimental backend for [ProVerif](https://bblanche.gitlabpages.inria.fr/proverif/). With this initial release, hax can process a significant subset of Rust code. Both the frontend, which extracts a JSON AST from the Rust compiler, and the engine, which lowers the code to the backends, have undergone major improvements and stabilization throughout 2024. Our new website provides a central hub for all things hax. Users can explore the [manual](../../manual/index.md), experiment with the interactive [hax playground](https://hax-playground.cryspen.com/), and delve into a diverse collection of [examples](https://github.com/cryspen/hax/tree/main/examples) showcasing hax's capabilities. We will work on improving the manual and developer documentation over the next few months. #### Hax in Action Over the past year, hax has proven its versatility in various projects: - [Verifying Bertie](https://cryspen.com/post/hax-pv/): A TLS 1.3 implementation, verified with the ProVerif backend - [Verifying ML-KEM](https://cryspen.com/post/ml-kem-verification): A post quantum cryptographic algorithm verified with the F\* backend - [Verifying Smart Contracts](https://github.com/hacspec/hacspec.github.io/blob/master/coqpl24-paper9-13.pdf): Leveraging the Rocq backend for enhanced security verification. #### The Road Ahead While hax can handle a substantial portion of Rust code, certain limitations remain. Features like Generic Associated Types (GATs), some Rust nightly features, specific loop and pattern structures, and a range of mutations are not yet supported. ??? hint "Detailed list of unsupported features" Here's some content. **GATs** Support for Generic Associated Types (GATs) in the frontend is under consideration ([Issue #915](https://github.com/cryspen/hax/issues/915)) **Rust nightly features** A full list of unsupported Rust nightly features can be found with the [unsupported-rust label](https://github.com/cryspen/hax/issues?q=is%3Aissue%20state%3Aopen%20nightly%20label%3Aunsupported-rust). **Pattern** Some expressive Rust patterns are not supported yet in the hax engine. For example, [range patterns](https://github.com/cryspen/hax/issues/925) such as `0..12`, [`as` patterns](https://github.com/cryspen/hax/issues/833) such as `x @ Option(_)` or [array or slice patterns](https://github.com/cryspen/hax/issues/804) such as `[head, ..tail]` are not supported. **Mutation** - Mutations inside closures are not supported ([Issue #1060](https://github.com/cryspen/hax/issues/1060)) - Re-borrowing mutable refferences is not allowed ([Issue #420](https://github.com/cryspen/hax/issues/420)) - Implicit reborrowing of mutable references is not supported ([Issue #419](https://github.com/cryspen/hax/issues/419)) - User-defined functions cannot return `&mut`s ([Issue #418](https://github.com/cryspen/hax/issues/418)) - Calling `&mut`-returning functions is not allowed in general ([Issue #418](https://github.com/cryspen/hax/issues/418), [Issue #494](https://github.com/cryspen/hax/issues/494) and [Issue #491](https://github.com/cryspen/hax/issues/491)) - Enum variants cannot be mutated ([Issue #493](https://github.com/cryspen/hax/issues/493)) **Loops** - Unconditional loops `loop {...}` ([Issue #124](https://github.com/cryspen/hax/issues/124)) - While let `while let .. = .. {}` ([Issue #113](https://github.com/cryspen/hax/issues/113)) - Loops without side effect ([Issue #405](https://github.com/cryspen/hax/issues/405)) **`const` inline blocks** Inline `const` blocks are not supported yet. [Issue #923](https://github.com/cryspen/hax/issues/923) ### Parting Thoughts This is an exciting time for hax! With our new home at Cryspen, a dedicated release model, and a growing community, we're confident that hax will continue to mature and empower developers to build secure and reliable software. We encourage you to explore the new hax website, dive into the documentation, and experiment with the playground. Join us on this journey! Contribute to the project, share your feedback, and help us shape the future of Rust verification. ================================================ FILE: docs/blog/posts/hax-for-everyone.md ================================================ --- authors: - maxime title: "Hax for everyone" date: 2025-02-25 --- # Trying to make hax usable in more contexts The hax toolchain has been successfully used to formally verify our cryptographic implementations for [ML-KEM](https://cryspen.com/post/ml-kem-verification/),[Bertie](https://cryspen.com/post/hax-pv/) and more. All these projects are developed with formal verification (using hax) in mind, and use a limited subset of Rust features. However, hax is under constant development and the improvements we bring are targeted at making it more usable. With these improvements we want to bring hax to a new kind of projects that don’t have restrictions on the Rust patterns they use. We want hax to be usable in this context with minimal modifications to the code (ideally no modification at all). An example of such a project is the verification of [sandwich](https://github.com/sandbox-quantum/sandwich), a high-level cryptographic library built by [SandboxAQ](https://cryspen.com/post/hax-sandbox/). This project revealed the weaknesses of hax in this context which brought us to implement some improvements that will be presented in this blog post. ## Challenges The projects that use hax from the beginning can limit themselves to the subset of Rust supported by hax. Applying hax to a pre-existing project means that it may use various Rust features that are probably not supported yet in hax. The challenge is then to identify which features to prioritize for support in hax (and adding support is yet another challenge), and which features have no short-term plan for support. For the latter we need to abstract out the code (if it is not relevant for proofs) or rewrite it (when possible; ideally we try to avoid this). Having external users encourages us even more to make hax an easily-usable and well-documented tool. ## Frontend improvements The hax frontend is mostly relying on rustc and cargo to extract intermediary representations of a Rust crate. It is supposed to produce a result for any Rust crate (restrictions on the available Rust features come later in the toolchain). However the information given by rustc is sometimes partial or lacks some parts that are needed for our translations. A crucial example of this is trait resolution as we need to know the trait derivation that is used by each call of a trait method. This is a part of the hax frontend that has proven tricky and still had many bugs a few months ago. At that time, launching it on a somehow complicated crate had big chances of resulting in a crash. As part of our effort to improve the usability of hax, many of these bugs have now been fixed (in collaboration with our colleagues at Inria). This is a big step forward, since even for a project that looks small and simple, we need to handle all of its dependencies which are usually more problematic. According to our tests on the top 500 crates (by number of downloads on crates.io), hax frontend succeeds without crashing or timing out on more than 99%. However we are still looking for a better way to measure the coverage of the Rust features, and identifying the situations where we can still improve. ## Recursive Bundles Rust code is organized in modules, where modules can be seen as a namespacing system. When translating modules to our backends (F*, Coq, ProVerif) we need to generate the corresponding module-like abstraction in the backend, which typically works quite differently. In particular our backends require the module dependency graph to be acyclic while Rust has no such restriction. It is quite common in Rust to make use of this and create cyclic dependencies between modules which means it is necessary for us to have a solution for this problem. Here is an example (you can open it in the hax playground to check the code hax generates out of it): ```rust pub struct Error(); mod private { pub(crate) fn f() -> Result<(), super::Error> { Ok(()) } } pub fn user_f() -> Result<(), Error> { private::f() } ``` [Open this code snippet in the hax playground](https://hax-playground.cryspen.com/#fstar/b7fe08cccd/gist=fcb9cb9854c69ee6e2788648a380ff79) In this example there is a dependency between the top level module and the `private` module. Our solution to break these cycles is simply to put the content of the cyclic modules in a single module (that we call bundle), and then re-exposing the items in their original modules. This solution is not perfect because it changes the architecture of the generated code compared to the original code, and it could be improved by minimizing the content of the bundles (choosing a set of definitions to break the cycle instead of the full content of the modules). But so far it has proven very useful as it removes a big limitation on the Rust we support. ## Opaque items Large projects usually contain code that we don’t support yet but we still want to reason about the rest of the project and have an abstract model (axiomatization) for the parts that we don’t support. We need to control which parts we want to fully extract and which parts we extract only as opaque items. The command-line options offered by the hax toolchain provide a solution to this, but they only allow to choose at the model level, which is inconvenient for large projects. To make this more practical we added another way to specify inside the source with the attribute `hax_lib::opaque` makes an item axiomatized. There is still the problem of complicated `-i` flags which will be solved in the future by having the corresponding information in configuration files. ## Control flow rewriting without monads including inside loops Translating imperative code to functional backends for verification implies some handling of side effects and transformation of control flow. A classic solution for this is to have a monadic encoding state which results in generated code that can be hard to read (and to reason about). This is the solution that was implemented (with some bugs) in hax but we decided to replace it with a solution without monads. The code we produce is simpler to read, but the main limitation is that there is code duplication which in some cases can lead to an extracted code that is exponentially bigger than the source. Here is a simple example of this: ```rust fn f() -> i32{ if true { if true { return 1 } } 3 } ``` [Open this code snippet in the hax playground](https://hax-playground.cryspen.com/#fstar/b7fe08cccd/gist=078ca6da8dad17541533bb5a0724784b) The F* code extracted from this example is the following: ```ocaml let f (_: Prims.unit) : i32 = if true then if true then mk_i32 1 else mk_i32 3 else mk_i32 3 ``` Here the semantics is preserved, but adding the `else` branches results in a duplication of the return value `3`. Our idea to improve in the future is to revive the monadic version, but use it only if the duplication is too big. Support for control flow (`return`, `break` and `continue`) in loops has been added as well. In hax, loops are translated as a functional fold in which the accumulator keeps track of the modification of the environment done by the effectful operations in the source. This extension relies on a monadic encoding of the loop result, that is passed in the accumulator to deal with the specific cases of `return`, `break` and `continue`. ## Items sorting A quality of life feature that we have been lacking for a long time is trying to respect, as much as possible, the same order of items in the generated code compared to the source. We need to modify the order because (as for modules), Rust allows items to be defined in any order, while our backends need items to be defined after the other items they depend on (except for mutual recursion). We rely on a graph topological sort to ensure this property, and now use a modified version of the stable topological sort provided by ocamlgraph, which produces an order that respects the dependencies, but in the absence of constraints tries respects the order of the source. ## Conclusion Bringing hax to a new kind of project revealed the gap needed for it to be usable, but thanks to our active work, we have made great progress towards this goal. Even though there is still much more to do, this has allowed us to get results in these new applications of hax (stay tuned for more details about that!). ================================================ FILE: docs/blog/posts/lucas-departure.md ================================================ --- authors: - lucas title: "My Departure from hax and Cryspen" date: 2026-01-14 --- Today, I want to share an update on both my professional path and my role in hax. I decided to leave Cryspen, and as a result, I will also be stepping away from hax. ## Looking Back Back in September 2023, while I was working at Inria, I started working with Karthikeyan on [Hacspec](https://github.com/hacspec/hacspec). Hacspec was a domain-specific language embedded in Rust's syntax, aimed at cryptography specification and verification. It relied on the surface AST (abstract syntax tree) of the Rust compiler (rustc). Using such an early representation in the compiler pipeline gave us very limited information: no types, no name resolution -- essentially just syntax. Both technically and in terms of intent, Hacspec had limitations. In December 2023, we decided to take a fresh start and build a new tool from the ground up: hax. Designing and implementing hax has been a fun adventure. I had the constraint to write the "compiler" part of hax in OCaml. That led me to design hax in two main parts: - **The frontend**: hooks into rustc and dumps enhanced ASTs, inlining a large amount of semantic information about Rust programs. The frontend produces a comprehensive, complete, and easy-to-consume AST that other tools can build upon. It grew a lot, notably thanks to our collaboration with Inria (for Charon and Aeneas), and especially thanks to [Nadrieril](https://github.com/Nadrieril), with whom it has been a great pleasure and a lot of fun to work. - **The engine**: an OCaml binary that reads our frontend's Rust AST, applies a sequence of translation phases, and finally outputs F*, Coq, etc. For a full year at Inria and then two years at Cryspen, I was the main developer of hax. Throughout this time, I greatly enjoyed working with Karthik; we discussed many aspects of hax countless times: its design, its applications, the workflows, and more. Those were great conversations, essential to the development of hax. Leading the development of hax was a great and intense experience. I had to engineer a pretty large piece of software, design interesting semantic compiler passes, build debugging tools, do DevOps work, build a playground, and more. I also learned how complicated human interactions can be. ## Working at Cryspen During my time at Cryspen, the proofs and tools team grew a lot. When I arrived, it was Karthik and me. Then [Maxime](https://cryspen.com/post/welcome_maxime/) joined towards the end of my first year (in August 2024). In May the next year [Clément](https://cryspen.com/post/welcome_clement/) arrived, and very recently, in November 2025, [Alex](https://cryspen.com/post/welcome_alex/) arrived. I really enjoyed working with everyone in the proofs and tools team at Cryspen! Beyond the proofs and tools team, it was also great to work with others at Cryspen: Jan, Jonas, Clara. ## The Future After three years working on hax, I decided it was time for me to leave. Hax is a bit my baby, so that was a very hard decision to make. That said, the rest of the proofs and tools team at Cryspen will continue maintaining, improving, and applying hax to cool real-world Rust projects! They are already working on the new Lean backend, on better libraries, and on very exciting applications! I'm proud of what hax has become, and I hope it will have a bright future! If hax speaks to you, consider following the project, trying it out, or contributing. ================================================ FILE: docs/blog/posts/reworking-names/reworking-names.md ================================================ --- authors: - lucas title: "Redesigning Global Identifiers in hax" date: 2025-04-01 --- # Redesigning Global Identifiers in hax A careful treatment of identifiers lies at the heart of all code analysis frameworks, and we hope our experience here proves useful to others. In Rust, global identifier serves to uniquely locate uniquely an item: for instance `::serde::ser::Serialize` designates the `Serialize` trait from the Serde library. In constrat, local identifiers are relative, limited to the scope in which they are declared. ## Global Identifiers from the Rust Compiler Initially, hax assumed that all identifiers originated exclusively from Rust. While this assumption held in the early stages, it was eventually challenged as the system grew[^1]. As hax evolved, new requirements emerged, prompting the engine to generate identifiers internally: - **Trait pre- and post-conditions:** in hax, these are explicitly represented as concrete methods within typeclasses. Conversely, in Rust, these conditions exist only as anonymous standalone functions. - **Explicit enum cast operations:** enum casts are primitive operations in Rust, but hax treats these casts as specialized operations, assigning distinct identifiers to them. - **Cross-module mutually recursive item bundles:** these bundles[^2] are internally introduced by hax, necessitating the generation of unique identifiers to prevent naming conflicts. [^1]: See [PR #935](https://github.com/cryspen/hax/pull/935), [PR #211](https://github.com/cryspen/hax/pull/211) or [PR #571](https://github.com/cryspen/hax/pull/571) for examples of such new features. [^2]: Rust supports cross-module mutual recursion without enforcing declaration order, an uncommon feature among programming languages. In contrast, most of our backends require some form of forward declaration. To bridge this gap and accommodate Rust’s permissive namespacing, we group related items into bundles and reorder them to eliminate cross-module recursion. Moreover, the previous identifier system lacked detailed metadata, such as the type of identifier (struct, function, type, etc.), complicating identifier rendering for backend tools. ## Issues with the Previous Design {#issues-with-previous-design} Initially, identifiers were represented using slightly modified Rust `DefId`s accompanied by minimal metadata indicating the identifier's kind. This approach presumed that hax would never alter these `DefId`s but merely use those directly produced by the Rust compiler. This assumption was quickly challenged. The need to prefix or suffix identifiers emerged early, but the introduction of new internal modules completely disrupted the assumption. Identifiers had to be relocated across modules, representing a significant departure from the original design. As the API for manipulating identifiers grew increasingly permissive and transparent, the foundational assumption—that `DefId`s were unique, consistent, and Rust-generated—was entirely undermined. In consequence, rendering names for the backends became a complicated, error-prone process. This resulted in numerous bugs in identifier rendering in backend outputs, leading to at least 16 documented issues ([#1135](https://github.com/cryspen/hax/issues/1135)). As an example, the rendering process made distinguishing the two functions `c` very difficult in the following snippet of code. This resulted in a bug (see [\#1136](https://github.com/cryspen/hax/issues/1136)) where hax would extract F\* code with two functions both named `c` in the same module `Mycrate.A.B`! ```rust mod a { mod b { fn c() { ... } } } fn a() { mod b { fn c() { ... } } } ``` ## Our New Approach The frontend has been enhanced to explicitly indicate the kind of each identifier, clarifying whether it represents a function, an associated type, a constant, etc. Additionally, it now provides detailed parent information, making the origin of identifiers more transparent. Alongside these improvements, we have redesigned our internal engine's identifier representation, introducing a layered structure where each layer addresses a distinct aspect. 1. **Raw Rust Identifiers:** using Rust's `DefId` type, generated from Rust to OCaml, with minor normalization to address potential duplicate references. These identifiers are immutable and cannot be arbitrarily created or altered. 2. **Explicit_def_id:** addresses Rust's ambiguity between a struct constructor and the type itself, explicitly distinguishing identifiers belonging to types from those belonging to values, enhancing clarity for backend translation. 3. **Concrete_ident:** built upon `Explicit_def_id`, this layer adds capabilities for generating fresh module names or adding hygienic suffixes. It ensures identifier uniqueness and declares constraints clearly when creating new names or namespaces. ### Simplified Identifier Views Rust's namespace structure is highly flexible, allowing various forms of nesting, such as types within functions, functions within constants, and more. Broadly, there are two kinds of nesting in Rust. Consider the following snippet: ```rust mod a { impl MyTrait for MyType { fn assoc_fn() { struct LocalStruct { field: u8, }; } } } ``` In this example, the user has intentionally placed `LocalStruct` within the method `assoc_fn`, which itself resides inside the module `a`. This is an instance of **user-driven nesting**, where the developer freely organizes elements within the code for clarity, convenience, or structural preference. At the same time, we observe another form of nesting: `field` is contained within `LocalStruct`, and `assoc_fn` is enclosed within the `impl` block implementing `MyTrait` for `MyType`. This represents **hierarchical nesting**, which is dictated by the Rust language itself. Unlike user-driven nesting, hierarchical relationships are inherent to Rust's type system: a field **must** belong to a struct or an enum variant, and a method **must** exist within an impl block. The following diagram shows how these hierarchical relationships are structured. ![](name-example.excalidraw.png) Distinguishing between these two types of nesting is crucial when rendering names. Hierarchical nesting often requires special handling in backends due to its structural constraints, whereas user-driven nesting primarily serves readability and organization. To manage this effectively, we introduced a hierarchical view for identifiers. Instead of handling Rust's deeply nested identifier paths as-is, we transform them into structured, relational representations. This approach simplifies backend processing, minimizes namespace conflicts, and ensures better compatibility with backend language constraints. Looking back at our [`a::b::c` example](./reworking-names.md#issues-with-previous-design), this hierarchical view makes the problem very easy, since modules and functions are user nesting. ## Conclusion: Say Goodbye to Naming Issues (Almost)! This comprehensive redesign of identifier representation and handling has resolved most previously identified naming issues and significantly enhanced the expressiveness and robustness of backend identifier rendering in hax. Check out the pull request [#1199](https://github.com/cryspen/hax/pull/1199) on the GitHub repository of hax for more details! We are confident that this enhanced representation is sufficiently robust and flexible to accommodate future developments and evolving project requirements. ================================================ FILE: docs/blog/posts/rust-gcd-1.md ================================================ --- authors: - alex title: "Verifying a real world Rust crate" date: 2025-12-08 --- # Verifying a real world Rust crate In this post, we are going to use hax and F\* to verify a small real world Rust crate. Then, we will try other verification tools (Kani, Verus, Aeneas) to the same thing. The Rust crate [gcd](https://crates.io/crates/gcd) by Corey Farwell that we are going to verify implements functions to compute the greatest common divisor of two integers. We will focus on proving termination and panic freedom for now. In a future post, we will look at functional correctness. We have forked the repository [here](https://github.com/cryspen/rust-gcd). The results of this tutorial can be found in different branches of this fork: * [hax_fstar](https://github.com/cryspen/rust-gcd/tree/hax_fstar) * [kani](https://github.com/cryspen/rust-gcd/tree/kani) * [aeneas](https://github.com/cryspen/rust-gcd/tree/aeneas) * [verus](https://github.com/cryspen/rust-gcd/tree/verus) ## Preparation First, install Hax and F\*: * [Install Hax](https://github.com/cryspen/hax?tab=readme-ov-file#installation) (We are using commit `0334b38`, so after `git clone git@github.com:cryspen/hax.git && cd hax`, run `git checkout 0334b38`) * [Install F\*](https://github.com/FStarLang/FStar/blob/master/INSTALL.md) To get started, we clone the repo of the Rust crate and switch to the commit that we use in this post (`8fb3a59`): ``` git clone git@github.com:frewsxcv/rust-gcd.git && cd rust-gcd git checkout 8fb3a59 ``` We add hax-lib as a dependency, which will allow us to make annotations in the Rust code: ``` cargo add --git https://github.com/hacspec/hax hax-lib --rev 0334b38 ``` ## Extraction Now we can attempt to translate the Rust code into F\* code, which we will later verify. Our Rust crate implements two variants to compute the greatest common divisor, the euclidean algorithm and the binary algorithm, each in various variants for different integer types. To start simple, we will focus on the `u8` variant of the euclidean algorithm first. The following command instructs hax to extract only the function `gcd::euclid_u8` and its dependencies. ``` cargo hax into -i '-** +gcd::euclid_u8' fstar ``` This creates a new file `proofs/fstar/extraction/Gcd.fst`, which contains a translation of our Rust crate in F\*. To help F\* find the correct dependencies, we download [this Makefile](https://gist.githubusercontent.com/W95Psp/4c304132a1f85c5af4e4959dd6b356c3/raw/a54aec2538c625eb525281106ff73ea96f7b96dc/Makefile) and put it into `proofs/fstar/extraction/`. Before we instruct F\* to start proving anything, we first check that all dependencies can be found: ``` OTHERFLAGS="--lax" make -C proofs/fstar/extraction/ ``` This yields some harmless warnings and eventually: ``` All verification conditions discharged successfully ``` This means that all dependencies are available and we can start proving things. The Makefile we are using helps us to cache the results of the F\* verification, but this cache has the dangerous flaw that it does not invalidate when removing the `--lax` flag we used above. So we should delete the cache now: ``` rm -rf .fstar-cache ``` ## Panic freedom of Euclidean GCD By default, without us specifying anything, hax's F\* backend will attempt to prove that the Rust program terminates and does not panic: ``` make -C proofs/fstar/extraction/ ``` The proof attempt fails with the following error: ``` * Error 19 at Gcd.fst(26,10-26,14): - Subtyping check failed - Expected type o: (Rust_primitives.Integers.u8 & Rust_primitives.Integers.u8) { (let _, _ = o in true) /\ (let _, _ = o in Rust_primitives.Hax.Int.from_machine (Rust_primitives.Integers.mk_u32 0) <: Hax_lib.Int.t_Int) < (let _, _ = temp_0_ in Rust_primitives.Hax.Int.from_machine (Rust_primitives.Integers.mk_u32 0) <: Hax_lib.Int.t_Int) } got type Rust_primitives.Integers.u8 & Rust_primitives.Integers.u8 ``` To prove that a while-loop terminates, F\* requires a measure that decreases with every loop iteration. By default, the measure is simply the number 0, which always fails and results in errors resembling the one above. We need to find a better expression that decreases with every loop iteration. The relevant while-loop is the following: ```rust while b != 0 { let temp = a; a = b; b = temp; b %= a; } ``` In each iteration, the variables `a` and `b` get swapped and `b` is then set to `b % a`. If we focus only on what is happening to `b` here, we observe that `b` is set to `a % b` over the course of one iteration. Since `b` is always smaller than `a % b`, `b` is decreasing with every iteration, and we can set it as our termination measure: ```rust while b != 0 { hax_lib::loop_decreases!(b); let temp = a; a = b; b = temp; b %= a; } ``` We extract the F\* code again and rerun F\*: ``` cargo hax into -i '-** +gcd::euclid_u8' fstar make -C proofs/fstar/extraction/ ``` We get: ``` [CHECK] Gcd.fst Verified module: Gcd All verification conditions discharged successfully ``` So the `gcd::euclid_u8` function terminates on all inputs and never panics! We would like to verify the other variants of this function for different bit lengths as well, but using ``` cargo hax into -i '-** +gcd::euclid_u8 +gcd::euclid_u16 +gcd::euclid_u32 +gcd::euclid_u64 +gcd::euclid_u128 +gcd::euclid_usize' fstar ``` is a bit inconvenient. Instead, we can also mark the functions that we want to extract in the Rust code using the `#[hax_lib::include]` annotation: ```rust #[hax_lib::include] pub const fn $euclid(a: $T, b: $T) -> $T { [...] ``` and then we can extract those functions using ``` cargo hax into -i '-**' fstar ``` You can open the the file `Gcd.fst` to make sure that all desired functions have indeed been extracted. Now we can verify all variants, which should work without any further changes: ``` make -C proofs/fstar/extraction/ ``` ## Panic freedom of binary GCD Next, we will attempt to prove panic freedom also for the binary variants. We add an `include`-annotation to the `$binary` function: ```rust #[hax_lib::include] pub const fn $binary(mut u: $T, mut v: $T) -> $T { [...] ``` We attempt to extract this function as well: ``` cargo hax into -i '-**' fstar ``` Unfortunately, it's not that easy. Hax can only translate a fragment of Rust. If something cannot be translated, we need to work around that. In our case, we get lots of errors of this kind: ``` error: [HAX0001] something is not implemented yet.This is discussed in issue https://github.com/hacspec/hax/issues/933. Please upvote or comment this issue if you see this error message. Unhandled loop kind This is discussed in issue https://github.com/hacspec/hax/issues/933. Please upvote or comment this issue if you see this error message. Note: the error was labeled with context `FunctionalizeLoops`. --> src/lib.rs:45:13 | 45 | / loop { 46 | | v >>= v.trailing_zeros(); ... | 58 | | if v == 0 { break; } 59 | | } | |_____________^ | ``` This is because the `loop`-construct cannot be translated. As a first (temporary) fix, we replace `loop` in `src/lib.rs` by `while true`. We run extraction again: ``` cargo hax into -i '-**' fstar ``` Now it succeeds. We verify: ``` make -C proofs/fstar/extraction/ ``` This yields a couple of harmless warnings and one error: ``` Error 72 at Gcd.fst(29,38-29,61): - Identifier impl_u8__trailing_zeros not found in module Core_models.Num ``` This is happening because the function `trailing_zeros` is missing in hax's F\* library. We can add it locally to our project by creating a file named `Core_models.Num.fsti` in `proofs/fstar/extraction`, and inserting the following code: ```fstar module Core_models.Num open Rust_primitives val trailing_zeros: #t:inttype -> int_t t -> (n:u32{v n >= 0 /\ v n <= bits t}) unfold let impl_u8__trailing_zeros (n:u8) = trailing_zeros n unfold let impl_u16__trailing_zeros (n:u16) = trailing_zeros n unfold let impl_u32__trailing_zeros (n:u32) = trailing_zeros n unfold let impl_u64__trailing_zeros (n:u64) = trailing_zeros n unfold let impl_u128__trailing_zeros (n:u128) = trailing_zeros n unfold let impl_usize__trailing_zeros (n:usize) = trailing_zeros n ``` This code tells F\* about the `trailing_zeros` functions and their signature for all unsigned integer types. Running verification again, we get the next error: ``` * Error 72 at Gcd.fst(19,28-19,41): - Identifier while_loop_cf not found in module Rust_primitives.Hax ``` This is another missing function in hax's F\* libraries ([issue #1204](https://github.com/cryspen/hax/issues/1204)). We can avoid it by refactoring the Rust code such that it avoids `break`s in while-loops. Here is the problematic while-loop: ```rust pub const fn $binary(mut u: $T, mut v: $T) -> $T { if u == 0 { return v; } if v == 0 { return u; } let shift = (u | v).trailing_zeros(); u >>= shift; v >>= shift; u >>= u.trailing_zeros(); while true { v >>= v.trailing_zeros(); if u > v { let temp = u; u = v; v = temp; } v -= u; if v == 0 { break; } } u << shift } ``` So how can we get rid of the `break`? We will have to modify the Rust code a little. We will try to move the line `if v == 0 { break; }` further up. Since `v - u == 0` if and only if `u == v`, we can check for that before the assignment `v -= u`: ```rust while true { v >>= v.trailing_zeros(); if u > v { let temp = u; u = v; v = temp; } if u == v { break; } v -= u; } ``` Moreover, for the condition `u == v` it does not matter whether `u` and `v` are swapped. So we can also move the check before the swapping: ```rust while true { v >>= v.trailing_zeros(); if u == v { break; } if u > v { let temp = u; u = v; v = temp; } v -= u; } ``` Since the loop-condition is always true, we can do the assignment `v >>= v.trailing_zeros();` just as well at the end of every iteration instead of the beginning of each iteration if we perform it one additional time before the loop starts: ```rust v >>= v.trailing_zeros(); while true { if u == v { break; } if u > v { let temp = u; u = v; v = temp; } v -= u; v >>= v.trailing_zeros(); } ``` Finally, we can move the line `if u == v { break; }` into the loop's condition: ```rust v >>= v.trailing_zeros(); while u != v { if u > v { let temp = u; u = v; v = temp; } v -= u; v >>= v.trailing_zeros(); } ``` Extracting and running F\* now yields: ``` * Error 19 at Gcd.fst(15,23-15,28): - Subtyping check failed - Expected type b: Rust_primitives.Integers.int_t Rust_primitives.Integers.U32 { Rust_primitives.Integers.v b >= 0 /\ Rust_primitives.Integers.v b < Rust_primitives.Integers.bits Rust_primitives.Integers.U8 } got type Rust_primitives.Integers.u32 ``` This error occurs because the F\* specification of the `>>`-function expects its right-hand argument to be smaller than the total number of bits of the employed integer type. This is already the case in our code, but F\* is not able to figure out that the value `shift` is indeed small enough. Most right-shifts in our code are by the number of trailing zeros of the given integer. That number of zeros can in principle be equal to the number of bits of the integer (which would be to large for `>>`), but only if the integer is `0`. So we can help F\* to figure out that everything is okay by adding the following lemma to `Core_models.Num.fsti`: ```fstar val trailing_zeros_lt_bits #t (a: int_t t): Lemma (requires (v a <> 0)) (ensures (v (trailing_zeros a) < bits t)) [SMTPat (trailing_zeros a)] ``` The lemma states that the number of trailing zeros is smaller than the total number of bits whenever the integer is nonzero. The `SMTPat`-annotation tells F\* that this lemma should be considered whenever a problem contains the `trailing_zeros` function. However, there are also two occurrences of `>>` where we shift `u` and `v` by `(u | v).trailing_zeros()`. For these, we need the following additional lemmas: ```fstar val trailing_zeros_band_le_left #t (a b : int_t t): Lemma (v (trailing_zeros (a |. b)) <= v (trailing_zeros a)) [SMTPat (trailing_zeros (a |. b))] val trailing_zeros_band_le_right #t (a b : int_t t): Lemma (v (trailing_zeros (a |. b)) <= v (trailing_zeros b)) [SMTPat (trailing_zeros (a |. b))] ``` These lemmas state that the trailing zeros of `a |. b` will always be at most as many as the trailing zeros of `a`, and similarly for `b`. Via `SMTPat`, we tell F\* to use this lemma when it encounters expressions of the form `trailing_zeros (a |. b)`. Since our first lemma applies only when the integer is nonzero, we also need to enable F\* to know that our integers do not become zero by shifting: ```fstar val shift_right_trailing_zeros_nonzero #t (a: int_t t) (b : u32): Lemma (requires (v a <> 0) && (v b <= v (trailing_zeros a))) (ensures (v (shift_right a b) <> 0)) [SMTPat (shift_right a b)] ``` This resolves the error around `>>`. Finally, we need to add a termination measure to the while-loop. This is the loop: ```rust while u != v { if u > v { let temp = u; u = v; v = temp; } v -= u; v >>= v.trailing_zeros(); } ``` Here is a summary of what the while loop is doing: It subtracts the smaller number among `u` and `v` from the larger one among them. Then, it removes any trailing zeros from the result. So in each iteration, as long as both numbers are nonzero, the larger one of the two numbers will definitely get smaller, and the other one will remain the same. Therefore, we will use the larger number among `u` and `v` as our termination measure: ```rust while u != v { hax_lib::loop_decreases!(if v < u { u } else { v }); if u > v { let temp = u; u = v; v = temp; } v -= u; v >>= v.trailing_zeros(); } ``` Since subtracting `0` does not decrease the number, it is cruicial that `v` and `u` do not become `0`. We annotate the loop with this invariant to make F\* aware of this: ```rust while u != v { hax_lib::loop_decreases!(if v < u { u } else { v }); hax_lib::loop_invariant!(v != 0 && u != 0); if u > v { let temp = u; u = v; v = temp; } v -= u; v >>= v.trailing_zeros(); } ``` We also need to make F\* aware that `v >>= v.trailing_zeros();` cannot increase `v` with an additional lemma: ```fstar val shift_right_trailing_zeros_le #t (a: int_t t): Lemma (requires (v a <> 0)) (ensures (v (shift_right a (trailing_zeros a)) <= v a)) [SMTPat (shift_right a (trailing_zeros a))] ``` We extract and reverify: ``` [CHECK] Gcd.fst Verified module: Gcd All verification conditions discharged successfully ``` Yay, we made it! Also the binary implementation always terminates and never panics. ## Verification using other tools For comparison, we verify panic freedom and termination using other tools as well. ### Kani First, [install Kani](https://model-checking.github.io/kani/install-guide.html). We will use version 0.66.0. To verify the function `$euclid` using Kani, we need to add another function that implements the verification. Since `$euclid` is part of a macro `gcd_impl` that duplicates it for various bit-lengths, we add a third identifier `$check_euclid:ident` to that macro, and provide the following identifiers: ```rust gcd_impl! { (u8) binary_u8 euclid_u8 check_euclid_u8, (u16) binary_u16 euclid_u16 check_euclid_u16, (u32) binary_u32 euclid_u32 check_euclid_u32, (u64) binary_u64 euclid_u64 check_euclid_u64, (u128) binary_u128 euclid_u128 check_euclid_u128, (usize) binary_usize euclid_usize check_euclid_usize } ``` Now we can implement the verification function `check_gcd` inside the macro. Here is a first draft: ```rust #[kani::proof] #[cfg(kani)] fn $check_gcd() { let x: $T = kani::any(); let y: $T = kani::any(); $euclid(x, y); } ``` The first annotation tells Kani to run this verification function, and the second annotation tells the normal Rust compiler to ignore this function. The expression `kani::any()` tells Kani to test all possible integer values for `x` and `y`. Now we run Kani: ``` kani ./src/lib.rs ``` Unfortunatlely, Kani does not terminate because the loop in `$euclid` is potentially unbounded. #### Unwinding bound We need to specify an upper bound on how often we want Kani to unwind the loop, using the `kani::unwind` annotation. When setting such an upper bound, we also need to limit the variables `x` and `y` to values that will make the number of loop iterations stay below the given bound, using `kani::assume`. Here are some numbers that work okay: ```rust #[kani::proof] #[cfg(kani)] #[kani::unwind(15)] fn $check_euclid() { let limit: u128 = 200; let x: $T = kani::any(); let y: $T = kani::any(); kani::assume((x as u128) < limit); kani::assume((y as u128) < limit); let res = $euclid(x, y); } ``` Now we run `kani ./src/lib.rs` again: ``` Complete - 12 successfully verified harnesses, 0 failures, 12 total. ``` The binary version can be verified in the exact same way by adding an analogous verification function `$check_binary` to the macro. #### Loop contracts However, there is yet another option to verify loops in Kani, without the need to limit the verified input values: loop contracts. To use them, we first need to add the following annotations at the top of our file: ```rust #![feature(stmt_expr_attributes)] #![feature(proc_macro_hygiene)] ``` Now we can use the annotation `kani::loop_invariant` to annotate our loop with an invariant. Since we only want to show panic-freedom here, simple using `true` works as an invariant for the loop in `$euclid`: ```rust #[kani::loop_invariant(true)] while b != 0 { let temp = a; a = b; b = temp; b %= a; } ``` With this annotation, Kani will abstract over the loop instead of unwinding it. So we no longer need to limit the size of our inputs in `$check_euclid`. We can comment out the corresponding lines: ```rust // kani::assume(x < limit); // kani::assume(y < limit); ``` To activate the loop contract feature we need to add the following option when invoking Kani: ``` kani ./src/lib.rs -Z loop-contracts ``` Now verification is much faster, and it verifies all possible inputs: ``` Complete - 12 successfully verified harnesses, 0 failures, 12 total. ``` However, in contrast to the approach without loop contracts, this does not verify termination! Similarly, we can also verify the function `$binary` using loop contracts. However, using `true` as an invariant does not work here because the line ```rust v >>= v.trailing_zeros(); ``` can panic when `v` is `0`. So we add `v != 0` as an invariant: ```rust #[kani::loop_invariant(v != 0)] loop { v >>= v.trailing_zeros(); #[allow(clippy::manual_swap)] if u > v { // mem::swap(&mut u, &mut v); let temp = u; u = v; v = temp; } v -= u; // here v >= u if v == 0 { break; } } ``` *What's to love:* Kani requires amazingly little manual labor to set up! ### Verus [Install Verus](https://github.com/verus-lang/verus/blob/main/INSTALL.md). (We use version `0.2025.11.07.a99b6c7`.) To start verifying with Verus, we add the following import to `src/lib.rs`: ```rust use vstd::prelude::*; ``` And we wrap the function `$euclid` that we would like to verify into ```rust verus! { } ``` Now we can try to run Verus: ``` verus src/lib.rs --crate-type=lib ``` We get: ``` error: loop must have a decreases clause ``` From our discussion above, we know that the variable `b` decreases in the loop. Let's tell Verus about that: ```rust while b != 0 decreases b { let temp = a; a = b; b = temp; b %= a; } ``` Running Verus again now yields: ``` verification results:: 12 verified, 0 errors ``` The `$euclid` function terminates and is panic-free! Next, we wrap the `$binary` function into `verus! { ... }` as well. Running Verus now results in an error: ``` error: `core::num::impl&%11::trailing_zeros` is not supported ``` The `trailing_zeros` function is present in Verus's library, but only for certain bit sizes. We could add the missing functions, but to simplify things, let's simply comment out the large bit sizes: ```rust gcd_impl! { (u8) binary_u8 euclid_u8, (u16) binary_u16 euclid_u16, (u32) binary_u32 euclid_u32, (u64) binary_u64 euclid_u64//, // (u128) binary_u128 euclid_u128, // (usize) binary_usize euclid_usize } ``` and ```rust gcd_impl_nonzero! { (NonZeroU8) binary_nonzero_u8/binary_u8 euclid_nonzero_u8/euclid_u8, (NonZeroU16) binary_nonzero_u16/binary_u16 euclid_nonzero_u16/euclid_u16, (NonZeroU32) binary_nonzero_u32/binary_u32 euclid_nonzero_u32/euclid_u32, (NonZeroU64) binary_nonzero_u64/binary_u64 euclid_nonzero_u64/euclid_u64//, // (NonZeroU128) binary_nonzero_u128/binary_u128 euclid_nonzero_u128/euclid_u128, // (NonZeroUsize) binary_nonzero_usize/binary_usize euclid_nonzero_usize/euclid_usize } ``` The next error that we get is: ``` error: loop must have a decreases clause ``` Let us reuse the same measure as we have used for Hax: ```rust loop decreases if v < u { u } else { v } { v >>= v.trailing_zeros(); if u > v { let temp = u; u = v; v = temp; } v -= u; // here v >= u if v == 0 { break; } } ``` Our next error is: ``` error: possible bit shift underflow/overflow --> src/lib.rs:45:13 | 45 | u >>= shift; ``` We can make our lives easier by simply commenting out the two lines ```rust u >>= shift; v >>= shift; ``` Note that this does not change the function's behavior. The following line and the first line of the loop will shift `v` and `u` by all trailing zeros anyway. There is no need to shift them by their common trailing zeros before that. With those lines commented out, we now get: ``` error: decreases not satisfied at end of loop ``` Our measure does actually decrease, but Verus is unable to prove it. First, we need to add a loop invariant that `u` and `v` are nonzero. If one of them was zero, then subtracting one from the other would not make the measure decrease. ```rust loop invariant_except_break u != 0 && v != 0 decreases if v < u { u } else { v } { ``` Also, Verus has trouble figuring out that the line ``` v >>= v.trailing_zeros(); ``` will never make `v` larger and will never cause `v` to become `0`. We can add the following assumptions to fix this temporarily: ``` assume(v != 0 ==> v >> v.trailing_zeros() != 0); assume(forall |i: u8| v >> i <= v); v >>= v.trailing_zeros(); ``` The next error we get is: ``` error: invariant not satisfied before loop --> src/lib.rs:50:40 | 50 | invariant_except_break u != 0 && v != 0 ``` This is because Verus cannot see that ``` u >>= u.trailing_zeros(); ``` cannot make `u` become zero. We can fix this temporarily using another assumption: ```rust assume(u != 0 ==> u >> u.trailing_zeros() != 0); u >>= u.trailing_zeros(); ``` The only remaining error is: ``` error: possible bit shift underflow/overflow --> src/lib.rs:71:13 | 71 | u << shift ``` This error occurs because shift could in principle be equal to the full number of bits of `u` when `u | v` is zero. Adding the following assumption above the definition of `shift`, helps Verus figure out that this cannot happen: ```rust assume(u != 0 && v != 0 ==> u | v != 0); let shift = (u | v).trailing_zeros(); ``` Now verification succeeds: ``` $ verus src/lib.rs --crate-type=lib verification results:: 16 verified, 0 errors ``` However, this confirms termination and panic freedom only up to the assumptions we have inserted using `assume`. Let's try to prove them. The `bit_vector` tactic can prove some of them: ``` assert(u != 0 && v != 0 ==> u | v != 0) by (bit_vector); ``` and ``` assert(forall |i: u8| v >> i <= v) by (bit_vector); ``` We can use these lines to replace the corresponding `assume`s. The remaining two `assume`s are harder to prove. We will simply add them as an axiom by adding the following function to our `gcd_impl` macro: ``` #[verifier::external_body] proof fn $trailing_zeros_axiom(x: $T) ensures x != 0 ==> x >> #[trigger] x.trailing_zeros() != 0 {} ``` Then, we can replace the two remaining assumes by ``` proof! { $trailing_zeros_axiom(u); } ``` and ``` proof! { $trailing_zeros_axiom(v); } ``` *What's to love:* Verus allows us to work directly with the Rust code! ### Aeneas [Install Aeneas](https://github.com/AeneasVerif/aeneas?tab=readme-ov-file#installation--build). We use commit `f2fbd655` here. [Install Lean](https://lean-lang.org/install/). We will use the following `Makefile` to make Aeneas extract Lean code from our crate: ``` CHARON_HOME ?= $(dir $(abspath $(lastword $(MAKEFILE_LIST))))/../charon AENEAS_HOME ?= $(dir $(abspath $(lastword $(MAKEFILE_LIST))))/../aeneas CHARON_EXE = $(CHARON_HOME)/bin/charon AENEAS_EXE = $(AENEAS_HOME)/bin/aeneas AENEAS_OPTIONS ?= .PHONY: extract extract: gcd.llbc $(AENEAS_EXE) -backend lean gcd.llbc -split-files -dest proofs/Gcd $(AENEAS_OPTIONS) gcd.llbc: $(wildcard */*.rs) RUSTFLAGS="--cfg eurydice" $(CHARON_EXE) cargo --preset=aeneas --start-from crate::euclid_u8 --start-from crate::binary_u8 ``` Save this under the name `Makefile` and run `make`. Note that we specify the options `--start-from crate::euclid_u8 --start-from crate::binary_u8`, which will extract specifically the functions `euclid_u8` and `binary_u8` into Lean. Running `make` produces a couple of Lean files in the directory `proofs/Gcd`. We create a new Lean project around these files: ``` cd proofs lake +v4.24.0 init Gcd lib ``` Add the following lines to `lakefile.toml` to add the Aeneas Lean library as a dependency, adjusting the path as needed: ```toml [[require]] name = "aeneas" path = "../../aeneas/backends/lean" ``` Then run ``` lake update ``` to update the dependencies. This will download `mathlib`, a dependeny of Aeneas, which may take a while. Aeneas created a file called `FunsExternal_Template.lean` because the `trailing_zeros` function is not part of the Aeneas library. Rename this template file to `FunsExternal.lean`. We could write a precise definition of this function here, but for now, we just define it as `sorry`, which is a placeholder for a missing definition. Replace the line ```lean axiom core.num.U8.trailing_zeros : U8 → Result U32 ``` by ```lean def core.num.U8.trailing_zeros : U8 → Result U32 := sorry ``` Now in the root file of our Lean project, `Gcd.lean`, add the import ``` import Gcd.Funs ``` Now we run ``` lake build ``` to ensure that our Lean code typechecks: ``` warning: Gcd/FunsExternal.lean:15:4: declaration uses 'sorry' Build completed successfully (1500 jobs). ``` Let's have a look at how the Lean translations of our Rust functions look like. Open the file `Funs.lean` in VSCode (with the Lean extension installed). You may see a lot of red in the editor, which will go away by pressing `Restart file`. The file contains four definitions: The functions `binary_u8` and `euclid_u8` themselves, and for each of them a function representing the contained loop, which has become a recursive function in Lean. The `euclid_u8` funciton for example looks as follows: ```lean /- [gcd::euclid_u8]: loop 0: Source: 'src/lib.rs', lines 75:12-82:13 -/ def euclid_u8_loop (a : U8) (b : U8) : Result U8 := do if b != 0#u8 then let b1 ← a % b euclid_u8_loop b b1 else ok a partial_fixpoint /- [gcd::euclid_u8]: Source: 'src/lib.rs', lines 65:8-85:9 -/ def euclid_u8 (a : U8) (b : U8) : Result U8 := do let (a1, b1) ← if a > b then ok (a, b) else ok (b, a) euclid_u8_loop a1 b1 ``` #### Euclidean GCD Now we can start proving. Open the file `Gcd.lean`. Let us verify termination and panic-freedom of `euclid_u8`. This can be expressed in Lean as follows: ``` theorem euclid_u8_spec (a b : U8) : ∃ y, euclid_u8 a b = ok y := by sorry ``` Here, the `sorry` stands for a missing proof. A typical Aeneas proof looks like this: ``` theorem euclid_u8_spec (a b : U8) : ∃ y, euclid_u8 a b = ok y := by unfold euclid_u8 progress* ``` Unfortunately, this proof does not quite work yet. We get the error: ``` unsolved goals case isTrue a b : U8 h✝ : a > b ⊢ ∃ y, euclid_u8_loop a b = ok y case isFalse a b : U8 h✝ : ¬a > b ⊢ ∃ y, euclid_u8_loop b a = ok y ``` The problem is that the `progress*` tactic does not know the specification of the `euclid_u8_loop` function. Let's create a seperate theorem about that function. Put the following code above the theorem that we just wrote: ```lean @[progress] theorem euclid_loop_u8_spec (a b : U8) : ∃ y, euclid_u8_loop a b = ok y := by sorry ``` This theorem states that `euclid_u8_loop` terminates and does not panic, for now without proof (`sorry`). Note that after adding this theorem, the error on the theorem below has disappeared. The annotation `@[progress]` informs the `progress*` tactic about this specification and it can be used in the proof of `euclid_u8_spec`. Now we need to replace the `sorry` with an actual proof. Let's try the same idea: ```lean @[progress] theorem euclid_loop_u8_spec (a b : U8) : ∃ y, euclid_u8_loop a b = ok y := by unfold euclid_u8_loop progress* ``` We get an error: ``` fail to show termination for gcd.euclid_loop_u8_spec ``` From our discussion above, we know that `b` is a variable that decreases in this recursive function. We can tell Lean about this as follows: ```lean @[progress] theorem euclid_loop_u8_spec (a b : U8) : ∃ y, euclid_u8_loop a b = ok y := by unfold euclid_u8_loop progress* termination_by b.val decreasing_by scalar_decr_tac ``` Now all errors have disappeared and there are little check marks in the margin. That means `euclid_u8` really terminates and is panic-free! #### Binary GCD Now, let's try to verify the binary version as well. The Lean translation looks like this: ```lean /- [gcd::binary_u8]: loop 0: Source: 'src/lib.rs', lines 45:12-59:13 -/ def binary_u8_loop (u : U8) (v : U8) : Result U8 := do let i ← core.num.U8.trailing_zeros v let v1 ← v >>> i let (u1, v2) ← if u > v1 then ok (v1, u) else ok (u, v1) let v3 ← v2 - u1 if v3 = 0#u8 then ok u1 else binary_u8_loop u1 v3 partial_fixpoint /- [gcd::binary_u8]: Source: 'src/lib.rs', lines 35:8-62:9 -/ def binary_u8 (u : U8) (v : U8) : Result U8 := do if u = 0#u8 then ok v else if v = 0#u8 then ok u else let i ← (↑(u ||| v) : Result U8) let shift ← core.num.U8.trailing_zeros i let u1 ← u >>> shift let v1 ← v >>> shift let i1 ← core.num.U8.trailing_zeros u1 let u2 ← u1 >>> i1 let u3 ← binary_u8_loop u2 v1 u3 <<< shift ``` We use the same approach as for `euclid_u8`, adding the following code to `Gdc.lean`: ```lean theorem binary_u8_spec (a b : U8) : ∃ y, binary_u8 a b = ok y := by unfold binary_u8 progress* ``` We get the following error: ``` unsolved goals a b : U8 h✝¹ : ¬a = 0#u8 h✝ : ¬b = 0#u8 i : U8 _ : [> let i ← ↑(a ||| b) <] i_post_1 : ↑i = ↑(a ||| b) i_post_2 : i.bv = a.bv ||| b.bv ⊢ ∃ y, (do let shift ← core.num.U8.trailing_zeros i let u1 ← a >>> shift let v1 ← b >>> shift let i1 ← core.num.U8.trailing_zeros u1 let u2 ← u1 >>> i1 let u3 ← binary_u8_loop u2 v1 u3 <<< shift) = ok y ``` The `progress*` tactic gets stuck at `core.num.U8.trailing_zeros` because there is no specification about this function. Let's provide one, for instance directly above `binary_u8_spec`: ```lean @[progress] theorem trailing_zeros_spec (v : U8) (hv : v ≠ 0#u8): ∃ y, core.num.U8.trailing_zeros v = .ok y ∧ y < 8#u32 := sorry ``` Here, we have added the fact that `trailing_zeros` will be less than the bit length when the input is nonzero since we have seen above that this is crucial for verification of binary GCD. Next, we get the error: ``` unsolved goals case hv a b : U8 h✝¹ : ¬a = 0#u8 h✝ : ¬b = 0#u8 i : U8 _ : [> let i ← ↑(a ||| b) <] i_post_1 : ↑i = ↑(a ||| b) i_post_2 : i.bv = a.bv ||| b.bv ⊢ i ≠ 0#u8 ``` The tactic gets stuck because there is no specification saying that bitwise or (`|||`) will not yield zero when the inputs are nonzero. Let's add that: ```lean @[progress] theorem bor_spec (u v : U8) (hu : u ≠ 0#u8) (hv : v ≠ 0#u8) : ∃ y, (↑(u ||| v) : Result U8) = .ok y ∧ y ≠ 0#u8 := sorry ``` The next error is: ``` unsolved goals case hv a b : U8 h✝¹ : ¬a = 0#u8 h✝ : ¬b = 0#u8 i : U8 _✝² : [> let i ← ↑(a ||| b) <] i_post : i ≠ 0#u8 shift : U32 _✝¹ : [> let shift ← core.num.U8.trailing_zeros i <] shift_post : shift < 8#u32 u1 : U8 _✝ : [> let u1 ← a >>> shift <] u1_post_1 : ↑u1 = ↑a >>> ↑shift u1_post_2 : u1.bv = a.bv >>> ↑shift v1 : U8 _ : [> let v1 ← b >>> shift <] v1_post_1 : ↑v1 = ↑b >>> ↑shift v1_post_2 : v1.bv = b.bv >>> ↑shift ⊢ u1 ≠ 0#u8 ``` Here, we need to tell Lean that right shifting by the number of trailing zeros (or less) will not turn a nonzero number into zero. Here is a first attempt to state that: ```lean @[progress] theorem shift_right_spec (u : U8) (v : U32) (hu : u ≠ 0#u8) (hv : v ≤ core.num.U8.trailing_zeros u): ∃ y, u >>> v = .ok y ∧ y ≠ 0#u8 := sorry ``` Unfortunately, this does not work because `core.num.U8.trailing_zeros` lives in the `Result` monad, i.e., it's type is `U8 → Result U32`, not `U8 → U32`. To get around this issue, we define another function `trailing_zeros`: ```lean def trailing_zeros : U8 → U32 := sorry ``` Since implementing it is beyond the scope of this blog post, we use the placeholder `sorry`. Now, we extend our specification of `core.num.U8.trailing_zeros` to state that it will always return the same result as prescibed by our new `trailing_zeros` function: ```lean @[progress] theorem trailing_zeros_spec (v : U8) (hv : v ≠ 0#u8): ∃ y, core.num.U8.trailing_zeros v = .ok y ∧ y < 8#u32 ∧ y = trailing_zeros v := sorry ``` Then we can fix the specification of right-shift using our new function: ```lean @[progress] theorem shift_right_spec (u : U8) (v : U32) (hu : u ≠ 0#u8) (hv : v ≤ trailing_zeros u): ∃ y, u >>> v = .ok y ∧ y ≠ 0#u8 := sorry ``` The next error is this: ``` unsolved goals case hv a b : U8 h✝¹ : ¬a = 0#u8 h✝ : ¬b = 0#u8 i : U8 _✝ : [> let i ← ↑(a ||| b) <] i_post : i ≠ 0#u8 shift : U32 _ : [> let shift ← core.num.U8.trailing_zeros i <] shift_post_1 : shift < 8#u32 shift_post_2 : shift = trailing_zeros i ⊢ shift ≤ trailing_zeros a ``` What's missing here, is that bitwise or (`|||`) will always yield less trailing zeros than in the inputs. We can edit the specification of bitwise or to fix that: ```lean @[progress] theorem bor_spec (u v : U8) (hu : u ≠ 0#u8) (hv : v ≠ 0#u8) : ∃ y, (↑(u ||| v) : Result U8) = .ok y ∧ trailing_zeros y ≤ trailing_zeros u ∧ trailing_zeros y ≤ trailing_zeros v ∧ y ≠ 0#u8 := sorry ``` Next, the tactic gets stuck on: ``` ⊢ ∃ y, (do let u3 ← binary_u8_loop u2 v1 u3 <<< shift) = ok y ``` This is because we don't have a specification for `binary_u8_loop` yet. Let's add one: ```lean @[progress] theorem binary_u8_loop_spec (a b : U8) : ∃ y, binary_u8_loop a b = ok y := by unfold binary_u8_loop progress* termination_by max a.val b.val decreasing_by all_goals scalar_decr_tac ``` Since the tactic is recursive, we need to provide a measure for termination. We'll use the maximum of `a` and `b`, just like we have done above. We add `all_goals` because `decreasing_by` is decreasing two goals here. This still fails because we are missing two more things: First, we need to extend or specification of right-shift to state that it will make the input smaller: ``` @[progress] theorem shift_right_spec (u : U8) (v : U32) (hu : u ≠ 0#u8) (hv : v ≤ trailing_zeros u): ∃ y, u >>> v = .ok y ∧ y ≠ 0#u8 ∧ y ≤ u := sorry ``` Second, we need to add what corresponds to a loop invariant in the specification of `binary_u8_loop`: ```lean @[progress] theorem binary_u8_loop_spec (a b : U8) (ha : a ≠ 0#u8) (hb : b ≠ 0#u8) : ∃ y, binary_u8_loop a b = ok y ∧ y ≠ 0#u8 := by unfold binary_u8_loop progress* termination_by max a.val b.val decreasing_by all_goals scalar_decr_tac ``` No more errors! So Aeneas, too, agrees that `binary_u8` terminates and does not panic. *What's to love:* Aeneas leaves our source code completely untouched! ================================================ FILE: docs/blog/posts/rust-gcd-2.md ================================================ --- authors: - alex title: "Verifying a real world Rust crate" date: 2026-01-19 --- # Verifying a while loop in Hax/Lean In our last blog post, the dog that didn't bark was Hax/Lean. It was missing because we did not have support for while loops then. Now, we support them and we will demonstrate it here. You can find the results of this tutorial on [https://github.com/cryspen/rust-gcd/tree/hax_lean1](https://github.com/cryspen/rust-gcd/tree/hax_lean1) on the branch `hax_lean1`. ## Preparation First, we need to install Hax and Lean: * [Hax](https://github.com/cryspen/hax?tab=readme-ov-file#installation) (We are using commit `d1365d4`, so after `git clone git@github.com:cryspen/hax.git && cd hax`, run `git checkout d1365d4`) * [Lean](https://lean-lang.org/install/) Again, we will use the [gcd Rust crate](https://github.com/frewsxcv/rust-gcd) as an example: ``` git clone git@github.com:frewsxcv/rust-gcd.git && cd rust-gcd git checkout 8fb3a59 ``` We add hax-lib as a dependency, which will allow us to make annotations in the Rust code: ``` cargo add --git https://github.com/hacspec/hax hax-lib --rev d1365d4 ``` ## Extraction Now we are ready to translate the Rust code into Lean code. We will limit ourselves to the `euclid_u16` function here: ``` cargo hax into -i '-** +gcd::euclid_u16' lean ``` This will create a new file `proofs/lean/extraction/Gcd.lean` containing the Lean version of the extracted function. For Lean to find the required dependencies, we must add the following two files in `proofs/lean`: `lean-toolchain`: ``` leanprover/lean4:v4.23.0 ``` `lakefile.toml`: ``` name = "Gcd" version = "0.1.0" defaultTargets = ["Gcd"] [[lean_lib]] name = "Gcd" roots = ["extraction.Gcd"] [[require]] name = "Hax" path = "../../../hax/hax-lib/proof-libs/lean" ``` Make sure that the path above points to the subdirectory `hax-lib/proof-libs/lean` of the repository that you checked out during the installation of Hax (i.e., `git@github.com:cryspen/hax.git` on commit `d1365d4`). The path can be relative to the `lakefile.toml` file or absolute. Now we can run Lean on the extracted code. ``` (cd proofs/lean && lake build) ``` It should take a moment and then say: ``` Build completed successfully (35 jobs). ``` So it this already verified? No, currently, we need to add a pre- or post-condition to a function to make Hax generate a specification that we can prove correct. (This will likely change in the near future.) ## Verification We can add the following `hax_lib::ensures` annoation above the definition of `$euclid` to say that we want to prove termination and panic-freedom: ``` #[hax_lib::ensures(|_| true)] pub const fn $euclid(a: $T, b: $T) -> $T { ... } ``` We run Hax and Lean again: ``` cargo hax into -i '-** +gcd::euclid_u16' lean (cd proofs/lean && lake build) ``` Now, we get lots of `unsolved goals` errors. We can open the `Gcd.lean` file to get a better impression of what is going on. The file contains a definition of `Gcd.euclid_u16`, which is the Lean version of our `euclid_u16` function and which compiles without error. Below, we have a definition of `Gcd.euclid_u16.spec`, which contains the specification of the function and an attempted proof of correctness. It should have a red squiggly underline on the `contract` proof, indicating that the error occurs there. The default proof `by mvcgen[Gcd.euclid_u16] <;> try grind` fails. If we click just behind `mvcgen[Gcd.euclid_u16]`, we can see the verification conditions that Lean's `mvcgen` tactic generated in Lean's infoview. It shows a list of four goals. The second and the forth goal end in: ``` ToNat.toNat 0 < ToNat.toNat 0 ``` So this says that the `u16` value `0`, converted to a natural number, is smaller than itself. This is simply wrong and will be impossible to prove. These verification conditions are coming from the default termination measure associated with while loops, which is constant `0` by default. We will have to provide a better measure to prove termination, using the `hax_lib::loop_decreases` annotation. From our last blog post, we know that `b` is a useful measure for this loop: ``` while b != 0 { hax_lib::loop_decreases!(b); // mem::swap(&mut a, &mut b); let temp = a; a = b; b = temp; b %= a; } ``` After running Hax and Lean again, the default proof still fails, but the generated verification conditions can now be proved with some manual effort. After developing the proof in Lean, we can copy the proof into the Rust file so that it does not get overwritten when reextracting the code: ``` #[hax_lib::ensures(|_| true)] #[hax_lib::lean::proof("by mvcgen[Gcd.euclid_u16] · expose_names intro simp_all [a_1] · expose_names simp only [ToNat.toNat, h_4] apply Nat.mod_lt grind · expose_names intro simp_all [a_1] · expose_names simp only [ToNat.toNat, h_4] apply Nat.mod_lt grind")] pub const fn $euclid(a: $T, b: $T) -> $T { ... } ``` (Be careful with the indentation here! Lean is white-space sensitive!) After running Hax again, `lake build` now says: ``` Build completed successfully (35 jobs). ``` Yay! We are working on better automation for proofs like this one and on better coverage of the Rust core library, e.g., to be able to verify the binary gcd implementation in this crate as well. ================================================ FILE: docs/blog/posts/this-month-in-hax/2025-01.md ================================================ --- authors: - lucas title: "This Month in Hax: January 2025" date: 2025-02-10 --- This blog post continues our ongoing series introduced in the [previous blog of hax](https://hacspec.org/blog/tags/this-month-in-hax/), a monthly collection of highlights showcasing key developments in hax and its ecosystem. This month, we merged **31 pull requests** and celebrated a major milestone by releasing the first official version of hax: [v0.1.0](https://github.com/cryspen/hax/releases/tag/cargo-hax-v0.1.0). If you haven’t already, be sure to check out [our blog post](../announce-v0.1.md) for more details on this release! We tackled a variety of bug fixes and engine improvements. One significant achievement was resolving a long-standing issue related to the inconsistent preservation of declaration orders between Rust and the extractions. This problem [was finally fixed](https://github.com/cryspen/hax/pull/1247). 🎉 Additionally, we merged [a comprehensive overhaul](https://github.com/cryspen/hax/pull/1199) of how identifiers are treated and represented within the engine. This rework allowed us to fix nearly ten related issues, making the system more robust and efficient. In the F\* backend, we transitioned away from using [HACL\*](https://github.com/hacl-star/hacl-star) machine integers. Instead, we now rely on a [thin wrapper](https://github.com/cryspen/hax/pull/1238) over F\*'s native mathematical integers. Unlike HACL\*'s opaque machine integers, this new representation allows us to use F\*'s normalizer freely, offering a cleaner and more lightweight solution. Stay tuned for more updates in the coming months! ### Full list of PRs * \#1278: [ci(gha): drop magic-nix-cache action because of EOL](https://github.com/cryspen/hax/pull/1278) * \#1277: [fix(mkdocs): use codemirror instead of ace, re-setup on page reload](https://github.com/cryspen/hax/pull/1277) * \#1275: [Create CODEOWNERS](https://github.com/cryspen/hax/pull/1275) * \#1273: [Various F* core lib additions.](https://github.com/cryspen/hax/pull/1273) * \#1267: [fix(hax-lib/macros): handle correctly `&mut Self` arguments in `ensures`](https://github.com/cryspen/hax/pull/1267) * \#1265: [Fix announce-v0.1.md](https://github.com/cryspen/hax/pull/1265) * \#1263: [updatge readme and docs](https://github.com/cryspen/hax/pull/1263) * \#1261: [Update website landing page](https://github.com/cryspen/hax/pull/1261) * \#1260: [chore(deps): bump hashbrown from 0.15.0 to 0.15.2](https://github.com/cryspen/hax/pull/1260) * \#1259: [changelog: initialize](https://github.com/cryspen/hax/pull/1259) * \#1258: [Delete frontend/exporter/json-visualizer directory](https://github.com/cryspen/hax/pull/1258) * \#1247: [Stable topological sort using original order.](https://github.com/cryspen/hax/pull/1247) * \#1245: [Release hax v0.1.0](https://github.com/cryspen/hax/pull/1245) * \#1241: [hax v0.1 blog post](https://github.com/cryspen/hax/pull/1241) * \#1238: [Transparent integers](https://github.com/cryspen/hax/pull/1238) * \#1237: [Fix order of `Call` trait clauses](https://github.com/cryspen/hax/pull/1237) * \#1236: [Add more info to `ImplExprAtom::Builtin`](https://github.com/cryspen/hax/pull/1236) * \#1230: [fix(engine) Propagate return rewrite to avoid crash in side_effect_utils](https://github.com/cryspen/hax/pull/1230) * \#1229: [fix(engine) Add type arguments for associated constants.](https://github.com/cryspen/hax/pull/1229) * \#1228: [fix(engine) Use ocamlgraph fork to fix missing rec bug.](https://github.com/cryspen/hax/pull/1228) * \#1225: [Hax home page using mkdocs](https://github.com/cryspen/hax/pull/1225) * \#1223: [fix(engine) Attempt to fix double return bug.](https://github.com/cryspen/hax/pull/1223) * \#1222: [Make predicate handling a bit more consistent](https://github.com/cryspen/hax/pull/1222) * \#1220: [Visit trait goals to rename impl expr they may contain.](https://github.com/cryspen/hax/pull/1220) * \#1216: [Update README.md: `unsafe` is OK to use](https://github.com/cryspen/hax/pull/1216) * \#1215: [Fix generics handling for function calls](https://github.com/cryspen/hax/pull/1215) * \#1212: [fix(CI) Update F* version to fix mlkem CI job ](https://github.com/cryspen/hax/pull/1212) * \#1206: [fix(engine) Make sub-parts of `Quote` visited by visitors](https://github.com/cryspen/hax/pull/1206) * \#1199: [Engine: rework global name representation](https://github.com/cryspen/hax/pull/1199) * \#1075: [Move trait methods in cyclic dependencies bundling.](https://github.com/cryspen/hax/pull/1075) * \#1066: [Add EBNF for AST to book](https://github.com/cryspen/hax/pull/1066) ### Contributors * [@Nadrieril](https://github.com/Nadrieril) * [@W95Psp](https://github.com/W95Psp) * [@app/dependabot](https://github.com/dependabot) * [@cmester0](https://github.com/cmester0) * [@franziskuskiefer](https://github.com/franziskuskiefer) * [@karthikbhargavan](https://github.com/karthikbhargavan) * [@maximebuyse](https://github.com/maximebuyse) ================================================ FILE: docs/blog/posts/this-month-in-hax/2025-02.md ================================================ --- authors: - lucas title: "This Month in Hax: February 2025" date: 2025-03-05 --- In February, we merged **23 pull requests**! The MIR translation of the frontend was improved by [@Nadrieril](https://github.com/Nadrieril): some bugs were fixed, and our handling of constants have been improved and is now more robust. One of the major updates this month was the introduction of a new [`Prop` abstraction](https://github.com/cryspen/hax/pull/1301) in `hax-lib`, which enhances expressiveness in property-based reasoning within the Hax engine. With `Prop`, it is now possible to write non-computable properties that leverage universal quantifiers. We also made significant progress in the engine, including fixing issues related to [`continue` handling in loops](https://github.com/cryspen/hax/pull/1296) and ensuring proper naming and disambiguation in bundled components ([#1280](https://github.com/cryspen/hax/pull/1280), [#1286](https://github.com/cryspen/hax/pull/1286)). We also tackled improvements in the F\* backend, such as fixing trait inheritance in `rand-core` ([#1322](https://github.com/cryspen/hax/pull/1322)) and expanding the core library ([#1292](https://github.com/cryspen/hax/pull/1292)). Stay tuned for more updates in the coming months! ### Full list of PRs * \#1325: [mkdocs: add Maxime description](https://github.com/cryspen/hax/pull/1325) * \#1322: [Proof libs (F*): fix trait inheritance in rand-core](https://github.com/cryspen/hax/pull/1322) * \#1320: ['hax for everyone' blog post.](https://github.com/cryspen/hax/pull/1320) * \#1319: [Translate less data in MIR](https://github.com/cryspen/hax/pull/1319) * \#1318: [ Not all evaluated MIR constants are byte strings](https://github.com/cryspen/hax/pull/1318) * \#1317: [Avoid an ICE by matching on type earlier](https://github.com/cryspen/hax/pull/1317) * \#1312: [full_def: no need to normalize clauses eagerly anymore](https://github.com/cryspen/hax/pull/1312) * \#1309: [full_def: group generic and predicates into a common struct](https://github.com/cryspen/hax/pull/1309) * \#1307: [update website landing page](https://github.com/cryspen/hax/pull/1307) * \#1306: [init(docs/blog): this month in hax: January](https://github.com/cryspen/hax/pull/1306) * \#1305: [fix(engine) Fix question marks simplification with deref/borrow.](https://github.com/cryspen/hax/pull/1305) * \#1304: [feat(manual): hax-playground integration: use latest `main`](https://github.com/cryspen/hax/pull/1304) * \#1303: [fix(engine) Fix return inside closure.](https://github.com/cryspen/hax/pull/1303) * \#1302: [Engine: fix implicit representation for enums](https://github.com/cryspen/hax/pull/1302) * \#1301: [`hax-lib`: introduce a `Prop` abstraction](https://github.com/cryspen/hax/pull/1301) * \#1296: [fix(engine) Fix loops with `continue` and no `return`/`break`](https://github.com/cryspen/hax/pull/1296) * \#1293: [fix(engine) Add const parameter for assoc const of parametric impl.](https://github.com/cryspen/hax/pull/1293) * \#1292: [Additions and corrections in F* core lib.](https://github.com/cryspen/hax/pull/1292) * \#1286: [fix(engine) Fix naming bundle regression](https://github.com/cryspen/hax/pull/1286) * \#1284: [fix(engine) Make sure origins are renamed in bundles.](https://github.com/cryspen/hax/pull/1284) * \#1282: [Update CI dependencies](https://github.com/cryspen/hax/pull/1282) * \#1281: [Library additions for ML-DSA verification](https://github.com/cryspen/hax/pull/1281) * \#1280: [fix(engine) Add default case for disambiguation of bundle element names](https://github.com/cryspen/hax/pull/1280) ### Contributors * [@Nadrieril](https://github.com/Nadrieril) * [@W95Psp](https://github.com/W95Psp) * [@franziskuskiefer](https://github.com/franziskuskiefer) * [@karthikbhargavan](https://github.com/karthikbhargavan) * [@maximebuyse](https://github.com/maximebuyse) ================================================ FILE: docs/blog/posts/this-month-in-hax/2025-03.md ================================================ --- authors: - lucas title: "This Month in Hax: March 2025" date: 2025-04-01 --- In March, we successfully merged **32 pull requests**! Thanks [@Nadrieril](https://github.com/Nadrieril), who helped move `hax` forward by pinning it to a more recent nightly version of the Rust compiler ([#1380](https://github.com/cryspen/hax/pull/1380)). Nadrieril also continued work on the frontend. Trait resolution is now more robust, especially in the presence of closures ([#1376](https://github.com/cryspen/hax/pull/1376)), and our handling of constants has seen significant improvements, with refinements introduced in both [#1367](https://github.com/cryspen/hax/pull/1367) and [#1337](https://github.com/cryspen/hax/pull/1337). Outside of the frontend, we also focused on enhancements and fixes within `hax-lib` and the engine. Notably, support for mathematical integers and logical propositions has been strengthened, making reasoning more precise and expressive ([#1372](https://github.com/cryspen/hax/pull/1372), [#1352](https://github.com/cryspen/hax/pull/1352), [#1351](https://github.com/cryspen/hax/pull/1351)). Additionally, we resolved several issues related to the use of `self` in contracts, improving overall stability and correctness in those scenarios. March also brought new capabilities to `hax-lib`. The newly introduced `decreases` attribute makes it possible to express termination arguments directly in Rust, giving users better control over termination checking. Furthermore, the addition of the `::replace_body` family of attributes allows developers to substitute the body of a Rust function with backend-specific code, offering a powerful mechanism for fine-tuned extraction when needed. Stay tuned for more updates next month! ### Full list of PRs * \#1380: [Update the rustc pin](https://github.com/cryspen/hax/pull/1380) * \#1377: [Stop depending on ocamlgraph fork.](https://github.com/cryspen/hax/pull/1377) * \#1376: [Correctly handle impl exprs for closures](https://github.com/cryspen/hax/pull/1376) * \#1373: [simd types](https://github.com/cryspen/hax/pull/1373) * \#1372: [`hax-lib`: `Int` improvements and fixes](https://github.com/cryspen/hax/pull/1372) * \#1367: [Remove `ConstantExt` and its `translate_uneval` machinery](https://github.com/cryspen/hax/pull/1367) * \#1363: [fix: update flake.lock](https://github.com/cryspen/hax/pull/1363) * \#1361: [Various fstar core additions, mostly for iterators.](https://github.com/cryspen/hax/pull/1361) * \#1357: [fix(hax-lib): allow `future(self)`](https://github.com/cryspen/hax/pull/1357) * \#1356: [feat(proof-libs): add missing definitions](https://github.com/cryspen/hax/pull/1356) * \#1355: [fix(engine/fstar-backend): drop spurious precondition on `Lemma`s](https://github.com/cryspen/hax/pull/1355) * \#1354: [fix(hax-lib/dummy): intro `int!`](https://github.com/cryspen/hax/pull/1354) * \#1353: [fix(proof-libs/F*): fix name `f_TryInto`](https://github.com/cryspen/hax/pull/1353) * \#1352: [hax-lib: prop: allow equality on every type](https://github.com/cryspen/hax/pull/1352) * \#1351: [fix(hax-lib/assume): fixes assume and assert_prop](https://github.com/cryspen/hax/pull/1351) * \#1350: [fix(engine) Avoid replacing 'let rec' in interfaces.](https://github.com/cryspen/hax/pull/1350) * \#1349: [fix(engine/fstar backend): subst self_ to self](https://github.com/cryspen/hax/pull/1349) * \#1348: [Hax shouldn't distinguish the `If` case in MIR](https://github.com/cryspen/hax/pull/1348) * \#1345: [Engine: import static items (but mutable ones), reject asm blocks](https://github.com/cryspen/hax/pull/1345) * \#1342: [feat(hax-lib): add support for `decreases` clauses in F*](https://github.com/cryspen/hax/pull/1342) * \#1339: [Bertie libs](https://github.com/cryspen/hax/pull/1339) * \#1338: [Don't error on built-in associated types](https://github.com/cryspen/hax/pull/1338) * \#1337: [Translate MIR constants using the const-eval interpreter](https://github.com/cryspen/hax/pull/1337) * \#1336: [F* typeclass for `core::ops::BitXor`](https://github.com/cryspen/hax/pull/1336) * \#1333: [feat(engine/names): extend name policy expressivity](https://github.com/cryspen/hax/pull/1333) * \#1332: [fix(engine/gen-printer): fixes #1294](https://github.com/cryspen/hax/pull/1332) * \#1331: [ci(nix): use F* bin cache in mlkem.yml](https://github.com/cryspen/hax/pull/1331) * \#1330: [This month in hax 02-25 + release 0.2.0](https://github.com/cryspen/hax/pull/1330) * \#1329: [fix(engine) Allow implementing arithmetic traits.](https://github.com/cryspen/hax/pull/1329) * \#1328: [fix(setup.sh): rustup 1.28](https://github.com/cryspen/hax/pull/1328) * \#1327: [fix(nix): MacOS: add rustc and libz dylib to `DYLD_LIBRARY_PATH`](https://github.com/cryspen/hax/pull/1327) * \#1323: [Add more facts to logand_lemma](https://github.com/cryspen/hax/pull/1323) * \#1321: [Introduce `hax_lib::BACKEND::replace_body` attribute](https://github.com/cryspen/hax/pull/1321) ### Contributors * [@Nadrieril](https://github.com/Nadrieril) * [@W95Psp](https://github.com/W95Psp) * [@jschneider-bensch](https://github.com/jschneider-bensch) * [@karthikbhargavan](https://github.com/karthikbhargavan) * [@mamonet](https://github.com/mamonet) * [@maximebuyse](https://github.com/maximebuyse) ================================================ FILE: docs/blog/posts/this-month-in-hax/2025-04.md ================================================ --- authors: - maxime title: "This Month in Hax: April 2025" date: 2025-05-05 --- In April, we successfully merged **38 pull requests**! Thanks [@Nadrieril](https://github.com/Nadrieril), for pinning a more recent nightly version of the Rust compiler ([#1391](https://github.com/cryspen/hax/pull/1391)). Nadrieril also continued making the frontend more robust and complete with work on constants [#1402](https://github.com/cryspen/hax/pull/1402), [#1420](https://github.com/cryspen/hax/pull/1420), [#1429](https://github.com/cryspen/hax/pull/1429)) and item's children ([#1412](https://github.com/cryspen/hax/pull/1412)). [@W95Psp](https://github.com/W95Psp) worked on `hax-lib` with improved support for writing f* lemmas in rust ([#1428](https://github.com/cryspen/hax/pull/1428)), and fstar post-processing with tactics ([#1437](https://github.com/cryspen/hax/pull/1437)). I worked on while loops which now support invariants and variants (to prove termination) in [#1375](https://github.com/cryspen/hax/pull/1375) We also worked on various improvements like removing deprecated dependencies used by hax-lib ([#1385](https://github.com/cryspen/hax/pull/1385) and [#1394](https://github.com/cryspen/hax/pull/1394)), some ProVerif backend workarounds by [@jschneider-bensch](https://github.com/jschneider-bensch) ([#1360](https://github.com/cryspen/hax/pull/1360), [#1401](https://github.com/cryspen/hax/pull/1401) and [#1406](https://github.com/cryspen/hax/pull/1406)), and multiple f* core lib additions. Stay tuned for more updates next month! ### Full list of PRs * \#1437: [feat(hax_lib/macros): F*: add `postprocess_with`](https://github.com/cryspen/hax/pull/1437) * \#1436: [Silence unused inputs in lemmas](https://github.com/cryspen/hax/pull/1436) * \#1435: [Add `t_Debug` instance for `u128`](https://github.com/cryspen/hax/pull/1435) * \#1432: [Add Instances of `Core.Fmt.t_Debug` for `Prims.bool` and pairs](https://github.com/cryspen/hax/pull/1432) * \#1430: [Fix range loops for empty ranges.](https://github.com/cryspen/hax/pull/1430) * \#1429: [Translate evaluated closure constants](https://github.com/cryspen/hax/pull/1429) * \#1428: [feat(hax-lib&backend): F*: support for SMT patterns](https://github.com/cryspen/hax/pull/1428) * \#1427: [feat(proof-libs): add `impl_i32__wrapping_sub`](https://github.com/cryspen/hax/pull/1427) * \#1422: [Barrett example tutorial](https://github.com/cryspen/hax/pull/1422) * \#1420: [Add a fake `DefId` for promoted constants](https://github.com/cryspen/hax/pull/1420) * \#1417: [Add `arg_count` to MIR bodies](https://github.com/cryspen/hax/pull/1417) * \#1416: [fix(engine) Fix name clashes for functions defined in impl methods.](https://github.com/cryspen/hax/pull/1416) * \#1415: [fix(proof-libs): give a computable definition to `>>`](https://github.com/cryspen/hax/pull/1415) * \#1414: [Use `ConstantExprKind::Todo` more](https://github.com/cryspen/hax/pull/1414) * \#1413: [feat(justfile): `just expand`: always use nightly](https://github.com/cryspen/hax/pull/1413) * \#1412: [full_def: Add helper to explore an item's children](https://github.com/cryspen/hax/pull/1412) * \#1410: [Typeclass for`BitAnd`; Instantiations for `Prims.bool`](https://github.com/cryspen/hax/pull/1410) * \#1409: [Libs needed for Bertie](https://github.com/cryspen/hax/pull/1409) * \#1408: [feat(fstar/proof-libs): add a lemma for simplifying double casts](https://github.com/cryspen/hax/pull/1408) * \#1406: [[ProVerif] Match arm type error workaround](https://github.com/cryspen/hax/pull/1406) * \#1404: [feat(backends/fstar): make `unfold` the opaque proxy functions](https://github.com/cryspen/hax/pull/1404) * \#1402: [Improve support for getting constant bodies](https://github.com/cryspen/hax/pull/1402) * \#1401: [[ProVerif] Match arm type workaround](https://github.com/cryspen/hax/pull/1401) * \#1395: [Put macro_metavar_expr_concat feature under hax cfg.](https://github.com/cryspen/hax/pull/1395) * \#1394: [Replace `paste` by `with_builtin_macros`.](https://github.com/cryspen/hax/pull/1394) * \#1393: [Tell crane to keep references to the rust toolchain](https://github.com/cryspen/hax/pull/1393) * \#1391: [Update the rustc pin](https://github.com/cryspen/hax/pull/1391) * \#1390: [Revert #1377](https://github.com/cryspen/hax/pull/1390) * \#1389: [Cut ASTs printed in errors when they are too long.](https://github.com/cryspen/hax/pull/1389) * \#1388: [Remove AST printing in import_thir errors.](https://github.com/cryspen/hax/pull/1388) * \#1385: [Switch to proc-macro-error2 because original is unmaintained.](https://github.com/cryspen/hax/pull/1385) * \#1384: [Remove deprecated macro parsing infrastructure](https://github.com/cryspen/hax/pull/1384) * \#1381: [feat(docs/blog): this month in hax 03 2025](https://github.com/cryspen/hax/pull/1381) * \#1375: [Add invariants for while loops.](https://github.com/cryspen/hax/pull/1375) * \#1368: [feat(blog): add blog post about the rework of names](https://github.com/cryspen/hax/pull/1368) * \#1360: [[PV] Generate consistent field accessor names](https://github.com/cryspen/hax/pull/1360) * \#1340: [Add logor_disjoint to Rust_primitives.Integers](https://github.com/cryspen/hax/pull/1340) * \#808: [Fix dependencies bounded integers](https://github.com/cryspen/hax/pull/808) ### Contributors * [@N1ark](https://github.com/N1ark) * [@Nadrieril](https://github.com/Nadrieril) * [@W95Psp](https://github.com/W95Psp) * [@jschneider-bensch](https://github.com/jschneider-bensch) * [@karthikbhargavan](https://github.com/karthikbhargavan) * [@mamonet](https://github.com/mamonet) * [@maximebuyse](https://github.com/maximebuyse) ================================================ FILE: docs/blog/posts/this-month-in-hax/2025-05.md ================================================ --- authors: - maxime title: "This Month in Hax: May 2025" date: 2025-05-05 --- In May, we successfully merged **19 pull requests**! [@Nadrieril](https://github.com/Nadrieril) helped making the frontend more robust and complete with work on impl exprs ([#1431](https://github.com/cryspen/hax/pull/1431)), MIR extraction ([#1444](https://github.com/cryspen/hax/pull/1444), [#1457](https://github.com/cryspen/hax/pull/1457)) and `FnOnce` ([#1477](https://github.com/cryspen/hax/pull/1477)). [@W95Psp](https://github.com/W95Psp) worked on `hax-lib` with improved support for writing F* lemmas in Rust ([#1456](https://github.com/cryspen/hax/pull/1456)). [@cmester0](https://github.com/cmester0) improved the Coq and SSProve backends ([#1426](https://github.com/cryspen/hax/pull/1426) and [#1108](https://github.com/cryspen/hax/pull/1108)) Apart from that, we contributed multiple F* [`core` library](https://doc.rust-lang.org/stable/core/) additions. Stay tuned for more updates next month! ### Full list of PRs * \#1481: [Update owners metadata](https://github.com/cryspen/hax/pull/1481) * \#1477: [Provide the `FnOnce` shim for closures](https://github.com/cryspen/hax/pull/1477) * \#1476: [Release 0.3.1](https://github.com/cryspen/hax/pull/1476) * \#1473: [fix(proof-libs) Remove fields that shouldn't be in PartialOrd.](https://github.com/cryspen/hax/pull/1473) * \#1471: [fix(engine) Add InlineConst in concrete_idents.](https://github.com/cryspen/hax/pull/1471) * \#1465: [Release 0.3.0](https://github.com/cryspen/hax/pull/1465) * \#1458: [feat(proof-libs): add `rem_euclid` for every int types](https://github.com/cryspen/hax/pull/1458) * \#1457: [Simplify MIR place translation](https://github.com/cryspen/hax/pull/1457) * \#1456: [Fix unused in lemmas](https://github.com/cryspen/hax/pull/1456) * \#1455: [feat(proof-libs): F*: implement some wrapping operations on i64](https://github.com/cryspen/hax/pull/1455) * \#1454: [fix(engine/nix): pin ocamlgraph, waiting for https://github.com/NixOS/nixpkgs/pull/397883](https://github.com/cryspen/hax/pull/1454) * \#1451: [fix(engine): naming: items under closures](https://github.com/cryspen/hax/pull/1451) * \#1445: [Add interfaces to fstar core and rust_primitives](https://github.com/cryspen/hax/pull/1445) * \#1444: [Add missing unwind information in MIR](https://github.com/cryspen/hax/pull/1444) * \#1439: [Upstream evit changes up to Feb 21](https://github.com/cryspen/hax/pull/1439) * \#1438: [This month in hax April 2025.](https://github.com/cryspen/hax/pull/1438) * \#1431: [Consistently translate impl exprs for parent items](https://github.com/cryspen/hax/pull/1431) * \#1426: [Bertie ssprove](https://github.com/cryspen/hax/pull/1426) * \#1108: [Coq small fixes](https://github.com/cryspen/hax/pull/1108) ### Contributors * [@Nadrieril](https://github.com/Nadrieril) * [@W95Psp](https://github.com/W95Psp) * [@clementblaudeau](https://github.com/clementblaudeau) * [@cmester0](https://github.com/cmester0) * [@franziskuskiefer](https://github.com/franziskuskiefer) * [@maximebuyse](https://github.com/maximebuyse) ================================================ FILE: docs/blog/posts/this-month-in-hax/2025-06.md ================================================ --- authors: - maxime title: "This Month in Hax: June 2025" date: 2025-06-08 --- In June, we successfully merged **21 pull requests**! [@Nadrieril](https://github.com/Nadrieril) and [@N1ark](https://github.com/N1ark) continued the improvements on the frontend side with the addition of unchecked arithmetic operators ([#1513](https://github.com/cryspen/hax/pull/1513)), regrouping generic and trait arguments in a struct ([#1514](https://github.com/cryspen/hax/pull/1514)), support of trait aliases in `full_def` ([#1494](https://github.com/cryspen/hax/pull/1494)), addition of `Ty::FnDef` ([#1487](https://github.com/cryspen/hax/pull/1487)), drop calls resolution ([#1467](https://github.com/cryspen/hax/pull/1467)) and more. [@W95Psp](https://github.com/W95Psp), [@clementblaudeau](https://github.com/clementblaudeau) and myself worked on adding infrastructure for writing backends and compilation phases for hax in Rust (instead of Ocaml). We now have a Rust version of the hax AST and we can convert back and forth from the Ocaml version (which should allow to incrementally replace Ocaml phases by Rust phases). We also offer utilities for printing this AST when implementing backends. Our plan for the next months is to use this for the new backends we will add, and experiment with Rust phases. Stay tuned for more updates next month! ### Full list of PRs * \#1517: [Update charon.yml: add `workflow_dispatch`](https://github.com/cryspen/hax/pull/1517) * \#1514: [Regroup generic and trait arguments in a struct](https://github.com/cryspen/hax/pull/1514) * \#1513: [Separate `{Add,Sub,Mul}Unchecked`](https://github.com/cryspen/hax/pull/1513) * \#1510: [Fix following merges changing the frontend AST](https://github.com/cryspen/hax/pull/1510) * \#1507: [Rust Engine: rename rust printer to rust engine](https://github.com/cryspen/hax/pull/1507) * \#1506: [Rust engine: Add spans to the Rust AST.](https://github.com/cryspen/hax/pull/1506) * \#1505: [Rust Engine: OCaml bridge for the AST (OCaml AST -> Rust AST)](https://github.com/cryspen/hax/pull/1505) * \#1504: [Rust Engine: transport the Rust AST to OCaml](https://github.com/cryspen/hax/pull/1504) * \#1502: [Upstream: Rust engine ast](https://github.com/cryspen/hax/pull/1502) * \#1501: [Upstream evit changes up to May 19th](https://github.com/cryspen/hax/pull/1501) * \#1499: [docs: Escape "*" in "F*" from Markdown](https://github.com/cryspen/hax/pull/1499) * \#1494: [full_def: support trait aliases](https://github.com/cryspen/hax/pull/1494) * \#1492: [sha256 example typecheck in f*](https://github.com/cryspen/hax/pull/1492) * \#1491: [This month in hax May 2025.](https://github.com/cryspen/hax/pull/1491) * \#1490: [proof-lib/fstar Add an actual instance for ordering of bound integers](https://github.com/cryspen/hax/pull/1490) * \#1487: [Add `Ty::FnDef`](https://github.com/cryspen/hax/pull/1487) * \#1485: [Fix detection of trait associated constants](https://github.com/cryspen/hax/pull/1485) * \#1482: [Update rustc pin](https://github.com/cryspen/hax/pull/1482) * \#1480: [Upstream evit changes up to April 25](https://github.com/cryspen/hax/pull/1480) * \#1470: [Add enum coverage test for coq](https://github.com/cryspen/hax/pull/1470) * \#1467: [Resolve Drop calls](https://github.com/cryspen/hax/pull/1467) ### Contributors * [@N1ark](https://github.com/N1ark) * [@Nadrieril](https://github.com/Nadrieril) * [@W95Psp](https://github.com/W95Psp) * [@chrysn](https://github.com/chrysn) * [@clementblaudeau](https://github.com/clementblaudeau) * [@cmester0](https://github.com/cmester0) * [@karthikbhargavan](https://github.com/karthikbhargavan) * [@maximebuyse](https://github.com/maximebuyse) ================================================ FILE: docs/blog/posts/this-month-in-hax/2025-07.md ================================================ --- authors: - maxime title: "This Month in Hax: July 2025" date: 2025-07-07 --- In July, we successfully merged **32 pull requests**! [@Nadrieril](https://github.com/Nadrieril) made sure that we use a recent version of rustc (PR [\#1534](https://github.com/cryspen/hax/pull/1534)) and made new improvements to trait resolution in the frontend (PR [\#1522](https://github.com/cryspen/hax/pull/1522)). We continued efforts to improve the usability of hax with several f* core lib additions, and improvements to the CI. More importantly, [@W95Psp](https://github.com/W95Psp), [@clementblaudeau](https://github.com/clementblaudeau) and myself worked on improvements of the new hax engine implemented in Rust (PR [\#1508](https://github.com/cryspen/hax/pull/1508), [\#1518](https://github.com/cryspen/hax/pull/1518), [\#1525](https://github.com/cryspen/hax/pull/1525) and [\#1526](https://github.com/cryspen/hax/pull/1526)). Finally, let's celebrate the arrival of our new backend for lean (PR [\#1509](https://github.com/cryspen/hax/pull/1509))! [@clementblaudeau](https://github.com/clementblaudeau) is taking the lead on this project. This backend is implemented in Rust using our new infrastructure. It is still under active development and many improvements will come in the next couple of months. Stay tuned for more updates next month! ### Full list of PRs * \#1587: [Fix inconsistent field naming in marker traits.](https://github.com/cryspen/hax/pull/1587) * \#1583: [fix(fstar-backend) Add hint for type class resolution with inheritance](https://github.com/cryspen/hax/pull/1583) * \#1581: [Regen code](https://github.com/cryspen/hax/pull/1581) * \#1576: [feat(ci/mlkem): make job work with merge queues](https://github.com/cryspen/hax/pull/1576) * \#1575: [fix(ci/mlkem): clone twice](https://github.com/cryspen/hax/pull/1575) * \#1574: [feat(ci): mlkem job: use specific libcrux revision](https://github.com/cryspen/hax/pull/1574) * \#1572: [fix(lib) Fix _super hashes for Cmp traits.](https://github.com/cryspen/hax/pull/1572) * \#1570: [Frontend: Fix regression \#1566](https://github.com/cryspen/hax/pull/1570) * \#1562: [Local hax lib for ml kem ci](https://github.com/cryspen/hax/pull/1562) * \#1561: [Check rust formatting with 'cargo fmt'.](https://github.com/cryspen/hax/pull/1561) * \#1558: [chore(ci/gh actions): `ubuntu-20.04` -> `ubuntu-22.04`](https://github.com/cryspen/hax/pull/1558) * \#1557: [Fix typo in properties.md](https://github.com/cryspen/hax/pull/1557) * \#1556: [Nix: un-pin ocamlgraph](https://github.com/cryspen/hax/pull/1556) * \#1552: [fix Core.Clone](https://github.com/cryspen/hax/pull/1552) * \#1549: [Delete .github/workflows/engine_js_build.yml](https://github.com/cryspen/hax/pull/1549) * \#1548: [Fix type of u64 rotate left](https://github.com/cryspen/hax/pull/1548) * \#1546: [This month in hax June 2025.](https://github.com/cryspen/hax/pull/1546) * \#1545: [fix(nix): make `nix run` work on darwin](https://github.com/cryspen/hax/pull/1545) * \#1544: [fix(setup.sh): install Rust engine](https://github.com/cryspen/hax/pull/1544) * \#1540: [chore(rengine/lean): fix warnings](https://github.com/cryspen/hax/pull/1540) * \#1535: [Fix typo: frontent -> frontend](https://github.com/cryspen/hax/pull/1535) * \#1534: [Update rustc to latest nightly](https://github.com/cryspen/hax/pull/1534) * \#1533: [feat(hax-lib): `int!`: support hex, octal and binary literals](https://github.com/cryspen/hax/pull/1533) * \#1526: [Rust Engine: expose common `DefId`s in the Rust engine](https://github.com/cryspen/hax/pull/1526) * \#1525: [Rust Engine: allow multiple backends to implement the `Pretty` trait](https://github.com/cryspen/hax/pull/1525) * \#1523: [Release 0.3.2](https://github.com/cryspen/hax/pull/1523) * \#1522: [Don't erase inner binders in trait resolution](https://github.com/cryspen/hax/pull/1522) * \#1520: [Addition of integer function implementations in Core.Num.fsti, along with generic functions in Rust_primitives.Integer.fsti to support them.](https://github.com/cryspen/hax/pull/1520) * \#1518: [Rust Engine: intro. resugared AST fragment](https://github.com/cryspen/hax/pull/1518) * \#1509: [Lean backend [Part 1/3]](https://github.com/cryspen/hax/pull/1509) * \#1508: [Rust Engine: turn it into a hax-frontend compatible engine](https://github.com/cryspen/hax/pull/1508) * \#1466: [Proof lib/fstar support more rbe](https://github.com/cryspen/hax/pull/1466) ### Contributors * [@Coda-Coda](https://github.com/Coda-Coda) * [@Nadrieril](https://github.com/Nadrieril) * [@Parrot7483](https://github.com/Parrot7483) * [@W95Psp](https://github.com/W95Psp) * [@clementblaudeau](https://github.com/clementblaudeau) * [@cmester0](https://github.com/cmester0) * [@franziskuskiefer](https://github.com/franziskuskiefer) * [@maximebuyse](https://github.com/maximebuyse) * [@satiscugcat](https://github.com/satiscugcat) ================================================ FILE: docs/blog/posts/this-month-in-hax/2025-08.md ================================================ --- authors: - lucas title: "This Month in Hax: August 2025" date: 2025-09-02 --- In August, we successfully merged **17 pull requests**! This month, we continued the effort of building out the new hax engine in Rust, with a focus on creating a robust infrastructure for backend development. We introduced a generic [`Backend` trait](https://github.com/cryspen/hax/pull/1603) and a new [printing infrastructure](https://github.com/cryspen/hax/pull/1600), which will simplify the process of creating new backends. We also improved how [global identifiers](https://github.com/cryspen/hax/pull/1624) are handled and added [visitors](https://github.com/cryspen/hax/pull/1585). Building on this new infrastructure, the Lean backend saw significant progress. This month, we merged [the first Lean proofs](https://github.com/cryspen/hax/pull/1590) and a set of [Lean examples](https://github.com/cryspen/hax/pull/1593). The Lean printer was also [updated](https://github.com/cryspen/hax/pull/1607) to leverage the latest improvements in the Rust engine. Stay tuned for more updates next month! ### Full list of PRs * \#1624: [Rust Engine: global identifiers: add view and rendering](https://github.com/cryspen/hax/pull/1624) * \#1613: [ci(rengine): clippy: deny lints](https://github.com/cryspen/hax/pull/1613) * \#1612: [misc(rengine): stop pinning `derive-generic-visitor` with a git branch](https://github.com/cryspen/hax/pull/1612) * \#1609: [feat(nix/ci): run examples outside of the sandbox](https://github.com/cryspen/hax/pull/1609) * \#1608: [Rust Engine: improve debug utility `show-json`](https://github.com/cryspen/hax/pull/1608) * \#1607: [Update Lean printer to new infrastructure](https://github.com/cryspen/hax/pull/1607) * \#1605: [rename explicit_panic](https://github.com/cryspen/hax/pull/1605) * \#1603: [Rust Engine: add a `Backend` trait](https://github.com/cryspen/hax/pull/1603) * \#1600: [Rust Engine: print infrastructure](https://github.com/cryspen/hax/pull/1600) * \#1597: [Fixes this month in hax](https://github.com/cryspen/hax/pull/1597) * \#1596: [This month in hax July 2025.](https://github.com/cryspen/hax/pull/1596) * \#1594: [Update publications.md](https://github.com/cryspen/hax/pull/1594) * \#1593: [Lean backend [M2] - 2/3 - Examples](https://github.com/cryspen/hax/pull/1593) * \#1592: [Upstream changes from evit up to June 11th 2025.](https://github.com/cryspen/hax/pull/1592) * \#1590: [Lean backend [M2] - 1/3 - First proofs](https://github.com/cryspen/hax/pull/1590) * \#1588: [fix(engine) Export traits defined in bundles.](https://github.com/cryspen/hax/pull/1588) * \#1585: [Visitors using `derive-generic-visitor`](https://github.com/cryspen/hax/pull/1585) ### Contributors * [@Parrot7483](https://github.com/Parrot7483) * [@W95Psp](https://github.com/W95Psp) * [@clementblaudeau](https://github.com/clementblaudeau) * [@franziskuskiefer](https://github.com/franziskuskiefer) * [@maximebuyse](https://github.com/maximebuyse) ================================================ FILE: docs/blog/posts/this-month-in-hax/2025-09.md ================================================ --- authors: - clement title: "This Month in Hax: September 2025" date: 2025-10-01 --- In September, we successfully merged **44 pull requests**! After a summer slow down, we focused on improving the new rust-written engine of Hax (that progressively replaces the OCaml one), along with its flagship backend (Lean), while consolidating the documentation and tutorial. We released a [new version of hax](https://github.com/cryspen/hax/pull/1647) 🎉 In the rust-engine, a lot of work went into the treatment of identifiers ([#1648](https://github.com/cryspen/hax/pull/1648), [#1689](https://github.com/cryspen/hax/pull/1689), [#1693](https://github.com/cryspen/hax/pull/1693)). We improved the caching ([#1701](https://github.com/cryspen/hax/pull/1701), [#1719](https://github.com/cryspen/hax/pull/1719)), and the control over extraction [with attributes](https://github.com/cryspen/hax/pull/1685). The Lean backend saw a lot of new features: structs, enums, basic support for traits, support for functionalized loops. Along those, we improved the documentation in the [tutorial](https://hax.cryspen.com/manual/lean/tutorial/) and in the [manual](https://hax.cryspen.com/manual/lean/). The F\*-*parity* and the official launch of the Lean backend are getting closer! Stay tuned for more updates next month! ### Full list of PRs * \#1719: [Improve rustc caching with `cargo-hax`](https://github.com/cryspen/hax/pull/1719) * \#1718: [feat(CONTRIBUTING): document issue prefixes style](https://github.com/cryspen/hax/pull/1718) * \#1701: [fix(ci): cache hax entirely on Cachix](https://github.com/cryspen/hax/pull/1701) * \#1695: [(Lean Backend) Improve support for functionalized loops](https://github.com/cryspen/hax/pull/1695) * \#1694: [Show Lean backend when doing `cargo hax into --help`](https://github.com/cryspen/hax/pull/1694) * \#1693: [Rust Engine: refactor names, change tuples representation](https://github.com/cryspen/hax/pull/1693) * \#1692: [Cargo hax: improve error reports](https://github.com/cryspen/hax/pull/1692) * \#1691: [Update CHANGELOG.md](https://github.com/cryspen/hax/pull/1691) * \#1690: [Website: add tests for dead links and playground integration](https://github.com/cryspen/hax/pull/1690) * \#1689: [Rust Engine: add interning table, intern `GlobalId`s](https://github.com/cryspen/hax/pull/1689) * \#1688: [Fix names in Lean tutorial.](https://github.com/cryspen/hax/pull/1688) * \#1687: [Docs: fix build](https://github.com/cryspen/hax/pull/1687) * \#1686: [Docs: hide RFCs tab and add toolchain structure page.](https://github.com/cryspen/hax/pull/1686) * \#1685: [Allow hax_lib::include to override -i flags.](https://github.com/cryspen/hax/pull/1685) * \#1684: [Fix libcrux-ref for the merge queue.](https://github.com/cryspen/hax/pull/1684) * \#1683: [Update README.md](https://github.com/cryspen/hax/pull/1683) * \#1682: [Add documentation for the Lean backend (manual)](https://github.com/cryspen/hax/pull/1682) * \#1681: [fix(engine): add rewrite local self as a proper phase](https://github.com/cryspen/hax/pull/1681) * \#1679: [(Lean Backend) Add basic support for traits](https://github.com/cryspen/hax/pull/1679) * \#1678: [Merge evit Aug 21](https://github.com/cryspen/hax/pull/1678) * \#1676: [Temporarily remove ocaml doc build because of odoc issue.](https://github.com/cryspen/hax/pull/1676) * \#1669: [feat(rengine): output diagnostics in `todo!`s in printers](https://github.com/cryspen/hax/pull/1669) * \#1665: [Improve readme](https://github.com/cryspen/hax/pull/1665) * \#1662: [feat(rengine): add resugaring for tuples](https://github.com/cryspen/hax/pull/1662) * \#1661: [CONTRIBUTING.md: change and clarify the meaning of assignee](https://github.com/cryspen/hax/pull/1661) * \#1659: [feat(rengine): resugaring: add `FunctionsToConstants`](https://github.com/cryspen/hax/pull/1659) * \#1655: [fix(just): rename `show-json` into `debug-json`: that's the correct name](https://github.com/cryspen/hax/pull/1655) * \#1654: [feat(engine): import thir: add missing borrows](https://github.com/cryspen/hax/pull/1654) * \#1649: [feat(gh actions): job that creates a "this month in hax" skeleton](https://github.com/cryspen/hax/pull/1649) * \#1648: [feat(rengine): use `ExplicitDefId` instead of `DefId` for names](https://github.com/cryspen/hax/pull/1648) * \#1647: [Release hax 0.3.4](https://github.com/cryspen/hax/pull/1647) * \#1645: [fix(rengine): missed case `Static` in name rendering](https://github.com/cryspen/hax/pull/1645) * \#1644: [Hax release 0.3.3](https://github.com/cryspen/hax/pull/1644) * \#1643: [feat(frontend/hir): add visibility to items](https://github.com/cryspen/hax/pull/1643) * \#1642: [feat(blog): this month in hax](https://github.com/cryspen/hax/pull/1642) * \#1640: [Rust engine: Optimize communication with hax driver and Ocaml engine.](https://github.com/cryspen/hax/pull/1640) * \#1635: [Lean backend - Run rustc coverage tests for lean.](https://github.com/cryspen/hax/pull/1635) * \#1634: [chore(deps): bump tracing-subscriber from 0.3.19 to 0.3.20](https://github.com/cryspen/hax/pull/1634) * \#1633: [Rust-engine / Lean backend: pass include flag from the ocaml engine to the rust engine.](https://github.com/cryspen/hax/pull/1633) * \#1626: [Lean tutorial first version.](https://github.com/cryspen/hax/pull/1626) * \#1623: [feat(Lean backend) Add support for enums and structs](https://github.com/cryspen/hax/pull/1623) * \#1591: [Lean backend (M2) - 3/3 - Resugarings](https://github.com/cryspen/hax/pull/1591) * \#1564: [feat(ci): add an action to ensure changelog updates](https://github.com/cryspen/hax/pull/1564) * \#1559: [Merge frontend improvements](https://github.com/cryspen/hax/pull/1559) ### Contributors * [@Nadrieril](https://github.com/Nadrieril) * [@W95Psp](https://github.com/W95Psp) * [@alexanderlhicks](https://github.com/alexanderlhicks) * [@app/dependabot](https://github.com/dependabot) * [@clementblaudeau](https://github.com/clementblaudeau) * [@maximebuyse](https://github.com/maximebuyse) ================================================ FILE: docs/blog/posts/this-month-in-hax/2025-10.md ================================================ --- authors: - lucas title: "This Month in Hax: October 2025" date: 2025-11-01 --- In October, we successfully merged **15 pull requests**! The Rust engine and Lean backend gained a monadic phase that wraps pure values and binds computations through `pure` and `lift` insertions, ensuring that the Lean backend faithfully uses the Lean `Result` monad. A rejection phase was added to enforce Lean's do-notation DSL, preventing interleaving of expressions and statements and providing clearer diagnostics. We also introduced a `FunctionsToConstants` resugaring: Lean can now extract values from Rust `const`s using helper functions, guaranteeing panic-free constant evaluation. Struct updates are now supported via base-expression syntax, and we refactored the printer traits to return static document builders, improve span handling, and simplify lifetimes. The F\* proof libraries now include a better `VecDeque` model and several fixes; loops without mutation are now accepted. The Lean backend also gained improved error messages and a refactored proof library. Stay tuned for more updates next month! ### Full list of PRs * \#1746: [feat(rust-engine/lean): monadic phase](https://github.com/cryspen/hax/pull/1746) * \#1739: [feat(rengine, lean): add rejection phase that ensures an expression is in the Lean do-notation DSL](https://github.com/cryspen/hax/pull/1739) * \#1738: [feat(lean): Use `FunctionsToConstants`](https://github.com/cryspen/hax/pull/1738) * \#1737: [This month in hax blog post 2025 09](https://github.com/cryspen/hax/pull/1737) * \#1736: [feat(lean): add support for base expression of structs](https://github.com/cryspen/hax/pull/1736) * \#1735: [refactor(rengine): revisit printer traits](https://github.com/cryspen/hax/pull/1735) * \#1733: [Fix rustc coverage tests.](https://github.com/cryspen/hax/pull/1733) * \#1732: [Accept loops without mutation.](https://github.com/cryspen/hax/pull/1732) * \#1730: [Add nightly CI job for ML-DSA lax-checking.](https://github.com/cryspen/hax/pull/1730) * \#1729: [Release 0.3.5](https://github.com/cryspen/hax/pull/1729) * \#1728: [Better VecDeque model and other F* proof lib improvements/fixes.](https://github.com/cryspen/hax/pull/1728) * \#1726: [Switch hax-lib to Rust edition 2021.](https://github.com/cryspen/hax/pull/1726) * \#1724: [fix(engine): fix owner_id](https://github.com/cryspen/hax/pull/1724) * \#1717: [[Lean] Proper error messages](https://github.com/cryspen/hax/pull/1717) * \#1696: [proof-libs/lean Library update and refactor](https://github.com/cryspen/hax/pull/1696) ### Contributors * [@W95Psp](https://github.com/W95Psp) * [@clementblaudeau](https://github.com/clementblaudeau) * [@maximebuyse](https://github.com/maximebuyse) ================================================ FILE: docs/blog/posts/this-month-in-hax/2025-11.md ================================================ --- authors: - maxime title: "This Month in Hax: November 2025" date: 2025-12-01 --- In November, we successfully merged **16 pull requests**! The frontend continues getting improvements thanks to [@Nadrieril](https://github.com/Nadrieril). So does the lean backend, with support for default methods and lib improvements including renaming the `Result` monad to `RustM`. The lib has been the focus as we now have started incorporating the new core models written in Rust. A first batch of changes already happened in November and are automatically extracted to the F* library (manually for lean). We continue the development of these models which will hopefully cover all we already have in the F* lib as manual F* models. ### Full list of PRs * \#1778: [Change impl_u64__rotate_right second parameter type to u32](https://github.com/cryspen/hax/pull/1778) * \#1777: [feat(lean): add support for default methods](https://github.com/cryspen/hax/pull/1777) * \#1775: [docs(blog): add avatar pictures locally](https://github.com/cryspen/hax/pull/1775) * \#1770: [Update author avatars in .authors.yml](https://github.com/cryspen/hax/pull/1770) * \#1769: [Fix broken links in README.](https://github.com/cryspen/hax/pull/1769) * \#1768: [refactor(lean): rename Result to RustM](https://github.com/cryspen/hax/pull/1768) * \#1767: [fix(engine/fstar): print [Fstar.Char.char] instead of [char]](https://github.com/cryspen/hax/pull/1767) * \#1765: [Incorporate charon changes to the frontend](https://github.com/cryspen/hax/pull/1765) * \#1754: [chore(examples): reserve extraction folder for auto-generated files](https://github.com/cryspen/hax/pull/1754) * \#1752: [fix(docs): fix a few typos in tutorial](https://github.com/cryspen/hax/pull/1752) * \#1751: [fix(gha): fix this month in hax template: drop extra `**` markdown](https://github.com/cryspen/hax/pull/1751) * \#1750: [This month in hax 2025-10](https://github.com/cryspen/hax/pull/1750) * \#1749: [Merge hax-evit approved changes](https://github.com/cryspen/hax/pull/1749) * \#1747: [feat(proof-lib/lean) core models](https://github.com/cryspen/hax/pull/1747) * \#1743: [chore(deps): bump playwright and @playwright/test in /docs/.test](https://github.com/cryspen/hax/pull/1743) * \#1742: [[F* lib] Tls codec panic freedom](https://github.com/cryspen/hax/pull/1742) ### Contributors * [@Nadrieril](https://github.com/Nadrieril) * [@Parrot7483](https://github.com/Parrot7483) * [@W95Psp](https://github.com/W95Psp) * [@abentkamp](https://github.com/abentkamp) * [@app/dependabot](https://github.com/dependabot) * [@clementblaudeau](https://github.com/clementblaudeau) * [@franziskuskiefer](https://github.com/franziskuskiefer) * [@maximebuyse](https://github.com/maximebuyse) ================================================ FILE: docs/blog/posts/this-month-in-hax/2026-01.md ================================================ --- authors: - maxime title: "This Month in Hax: January 2026" date: 2026-02-02 --- In January, we successfully merged **29 pull requests**! The Lean backend continues to get improvements, mostly thanks to [@abentkamp](https://github.com/abentkamp)! We now have more specs in the proof library (for while loops, negation and more), and improved tactics to reason with our annotations of pre/post-conditions ([\#1888](https://github.com/cryspen/hax/pull/1888)). Some naming fixes, and improvements of the handling of associated types make us closer to reaching parity with the F* backend! The other significant improvements are on the core models side. Thanks to some new models, the F* proof library is now entirely extracted from Rust core models. The Rust primitives models will remain hand-written in each backend, but they have been designed to be as small as possible, and are intended to be modelled in a very backend-specific way. The Lean library is now also partly extracted from core models! Some modules are excluded for now as they rely on Rust features that we don't support in the Lean backend for now. We will prioritize these missing features in the next few months to extend the Lean library using extraction from core models. ### Full list of PRs * \#1902: [Fix broken dependabot links](https://github.com/cryspen/hax/pull/1902) * \#1900: [feat: bump lean to v4.28.0-rc1](https://github.com/cryspen/hax/pull/1900) * \#1899: [Various core models fixes.](https://github.com/cryspen/hax/pull/1899) * \#1898: [Upstream Evit changes up to 24 dec 2025](https://github.com/cryspen/hax/pull/1898) * \#1896: [fix(lean): more generous timeout for bvdecide](https://github.com/cryspen/hax/pull/1896) * \#1895: [refactor(lean): rearrange lean lib file structure](https://github.com/cryspen/hax/pull/1895) * \#1891: [feat(lean): spec for negation](https://github.com/cryspen/hax/pull/1891) * \#1888: [feat(lean): hax_zify and hax_construct_pure tactics](https://github.com/cryspen/hax/pull/1888) * \#1887: [feat(lean): support for opaque impls](https://github.com/cryspen/hax/pull/1887) * \#1885: [[Lean] add Core_models.Slice.Impl.is_empty](https://github.com/cryspen/hax/pull/1885) * \#1875: [set cfg for docs.rs](https://github.com/cryspen/hax/pull/1875) * \#1872: [refactor(ci/nix): clean up a bit frontends cli src](https://github.com/cryspen/hax/pull/1872) * \#1870: [Release 0.3.6](https://github.com/cryspen/hax/pull/1870) * \#1869: [Blog post about my departure from Cryspen (and hax)](https://github.com/cryspen/hax/pull/1869) * \#1868: [OCaml engine: export namespace insensitive sort, make Lean use it](https://github.com/cryspen/hax/pull/1868) * \#1867: [fix(lean): escape keywords systematically](https://github.com/cryspen/hax/pull/1867) * \#1865: [feat(lean): core models](https://github.com/cryspen/hax/pull/1865) * \#1864: [docs(blog): Verifying a while loop in Hax/Lean](https://github.com/cryspen/hax/pull/1864) * \#1863: [feat(lean): add negated condition to while loop spec](https://github.com/cryspen/hax/pull/1863) * \#1860: [Lean: correctly call `fn_like_linked_expressions`, fixing `self_` -> `self`](https://github.com/cryspen/hax/pull/1860) * \#1857: [feat(lean): Add support for while loops](https://github.com/cryspen/hax/pull/1857) * \#1851: [fix(lean): associated type projections on multiple parameters](https://github.com/cryspen/hax/pull/1851) * \#1850: [fix(lean): fix rendering of impl items with constraints](https://github.com/cryspen/hax/pull/1850) * \#1849: [feat(lean): preliminary core model extraction](https://github.com/cryspen/hax/pull/1849) * \#1848: [fix: replace macro should not affect other backends](https://github.com/cryspen/hax/pull/1848) * \#1846: [feat(lean): Add support for `#[hax_lib::opaque]`](https://github.com/cryspen/hax/pull/1846) * \#1840: [feat(lean) turn rejection phase into a transformation phase](https://github.com/cryspen/hax/pull/1840) * \#1837: [[lean] add casting for all integer type pairs](https://github.com/cryspen/hax/pull/1837) * \#1822: [feat(lean): Define usize as a newtype of UInt64](https://github.com/cryspen/hax/pull/1822) ### Contributors * [@W95Psp](https://github.com/W95Psp) * [@abentkamp](https://github.com/abentkamp) * [@franziskuskiefer](https://github.com/franziskuskiefer) * [@klausnat](https://github.com/klausnat) * [@maximebuyse](https://github.com/maximebuyse) * [@rusch95](https://github.com/rusch95) ================================================ FILE: docs/blog/posts/this-month-in-hax/2026-02.md ================================================ --- authors: - alex title: "This Month in Hax: February 2026" date: 2026-03-03 --- In February, we successfully merged **32 pull requests**! This month, the Lean backend made headway by getting new Rust proof attributes. With the new attributes, Users can choose between two proof methods, one based on symbolic reasoning (`grind`) and one based on bit-blasting (`bv_decide`). This new setup is illustrated in our updated [Tutorial](https://hax.cryspen.com/manual/lean/tutorial/) and the [Chacha20](https://github.com/cryspen/hax/blob/main/examples/lean_chacha20/src/lib.rs) example. Moreover, the Lean backend produces prettier output by opening namespaces, handles for-loops more reliably, and its library contains more of our Rust core models. We also made great progress on our new THIR importer, implemented in Rust. Currenlty, it can be activated using `--experimental-full-def`, and we intend to make it the default soon. Special thanks to [@JuanCoRo](https://github.com/JuanCoRo), [@klausnat](https://github.com/klausnat), and [@rusch95](https://github.com/rusch95) for their contributions this month! ### Full list of PRs * \#1967: [Evit upstream january 2026](https://github.com/cryspen/hax/pull/1967) * \#1962: [feat(lean): bump to Lean v4.29.0-rc1](https://github.com/cryspen/hax/pull/1962) * \#1961: [Apply resugarings to linked items.](https://github.com/cryspen/hax/pull/1961) * \#1959: [ADC example. documented](https://github.com/cryspen/hax/pull/1959) * \#1956: [feat(lean_chacha20): use new attributes](https://github.com/cryspen/hax/pull/1956) * \#1955: [Lean cleanup](https://github.com/cryspen/hax/pull/1955) * \#1954: [fix(lean): print "do" in all ITE & match-branches](https://github.com/cryspen/hax/pull/1954) * \#1951: [feat(lean): for-loops for all unsigned integers](https://github.com/cryspen/hax/pull/1951) * \#1950: [Remove `BinOp` resugaring](https://github.com/cryspen/hax/pull/1950) * \#1947: [[doc] Add AI contribution guidelines](https://github.com/cryspen/hax/pull/1947) * \#1946: [feat(lean): detect recursive functions and mark them `partial_fixpoint`](https://github.com/cryspen/hax/pull/1946) * \#1943: [feat(lean): prettier proof_mode annotations](https://github.com/cryspen/hax/pull/1943) * \#1942: [feat(lean): Rust primitives for prop](https://github.com/cryspen/hax/pull/1942) * \#1941: [fix(lean): default value for associated constants are pure.](https://github.com/cryspen/hax/pull/1941) * \#1938: [New default proof for the Lean backend & proof method attribute](https://github.com/cryspen/hax/pull/1938) * \#1937: [feat: communicate specs to mvcgen](https://github.com/cryspen/hax/pull/1937) * \#1936: [doc: add `lean-toolchain` file to quick start](https://github.com/cryspen/hax/pull/1936) * \#1935: [Render suffixes in the rust engine and backends.](https://github.com/cryspen/hax/pull/1935) * \#1934: [fix(lean): Support functions without arguments in specs](https://github.com/cryspen/hax/pull/1934) * \#1933: [feat(lean): Separate symbolic and bit-blasting specs](https://github.com/cryspen/hax/pull/1933) * \#1932: [fix(lean): Extract correct `PhantomData` structure](https://github.com/cryspen/hax/pull/1932) * \#1931: [feat(lean): attributes for pureEnsures/pureRequires](https://github.com/cryspen/hax/pull/1931) * \#1929: [[Lean] Fix monadic phase bug with constants](https://github.com/cryspen/hax/pull/1929) * \#1927: [lean: keep Rust crate/module names unchanged](https://github.com/cryspen/hax/pull/1927) * \#1925: [feat(lean): add type annotation for cast_op](https://github.com/cryspen/hax/pull/1925) * \#1919: [feat(lean): Extract more core models](https://github.com/cryspen/hax/pull/1919) * \#1918: [fix(lean): support for opaque structs](https://github.com/cryspen/hax/pull/1918) * \#1909: [feat(lean): assoc types with constraints and inheritance](https://github.com/cryspen/hax/pull/1909) * \#1908: [This month in hax blog post 2026 01](https://github.com/cryspen/hax/pull/1908) * \#1906: [Resugaring for associated constants from associated functions without parameters](https://github.com/cryspen/hax/pull/1906) * \#1901: [feat(lean): Add support for namespaces (resurrected old PR)](https://github.com/cryspen/hax/pull/1901) * \#1834: [fix(lean): escape special characters in string literals](https://github.com/cryspen/hax/pull/1834) ### Contributors * [@JuanCoRo](https://github.com/JuanCoRo) * [@abentkamp](https://github.com/abentkamp) * [@klausnat](https://github.com/klausnat) * [@maximebuyse](https://github.com/maximebuyse) * [@rusch95](https://github.com/rusch95) ================================================ FILE: docs/blog/posts/this-month-in-hax/2026-03.md ================================================ --- authors: - maxime title: "This Month in Hax: March 2026" date: 2026-04-01 --- In March, we successfully merged **8 pull requests**! This was a rather quiet month with some of us away for Real World Crypto and HACS in Taipei. But we continued to improve the Lean library with bugfixes and new additions like support for `u128` and `i128`. Special thanks to [@JuanCoRo](https://github.com/JuanCoRo) for improving and extending the treatment of binops in the Lean backend. And thank you [@redshiftzero](https://github.com/redshiftzero) for your first PR! ### Full list of PRs * \#1986: [fix(lean): fix bugs found by claude](https://github.com/cryspen/hax/pull/1986) * \#1982: [fix: use specs for ops only in specset int](https://github.com/cryspen/hax/pull/1982) * \#1979: [add bitwise OR trait to `core_models::ops::bit` ](https://github.com/cryspen/hax/pull/1979) * \#1974: [fix(lean): Remove unnecessary noncomputable tags](https://github.com/cryspen/hax/pull/1974) * \#1973: [This month in hax blog post 2026 02](https://github.com/cryspen/hax/pull/1973) * \#1968: [feat(lean): Int128 and UInt128](https://github.com/cryspen/hax/pull/1968) * \#1966: [[Engine] proper rejection for anonymous associated types.](https://github.com/cryspen/hax/pull/1966) * \#1963: [[Lean] Add more binops](https://github.com/cryspen/hax/pull/1963) ### Contributors * [@JuanCoRo](https://github.com/JuanCoRo) * [@abentkamp](https://github.com/abentkamp) * [@maximebuyse](https://github.com/maximebuyse) * [@redshiftzero](https://github.com/redshiftzero) ================================================ FILE: docs/blog/posts/this-month-in-hax/2026-04.md ================================================ --- authors: - alex title: "This Month in Hax: April 2026" date: 2026-05-07 --- In April, we successfully merged **11 pull requests**! Besides various bug fixes, we improved the rendering of ellipsis in pattern-matching and refactored the mvcgen registration of the RustM monad to support partial correctness lemmas. Thanks to the external contributors [@MavenRain](https://github.com/MavenRain), [@niooss-ledger](https://github.com/niooss-ledger), and [@remix7531](https://github.com/remix7531)! Towards the end of the month, we participated in the [Software Verification in Lean](https://beneficial-ai-foundation.github.io/SVIL2026/) workshop and hackathon organized by the Beneficial AI Foundation and the Lean FRO. We came away with lots of new ideas for our Lean backend. For a couple of weeks, we have been working on merging the Aeneas engine into Hax. We haven't made any public contributions to Hax in that direction yet, but we have created a new repo containing our Rust core models with an Aeneas-compatible extraction: https://github.com/cryspen/rust-core-models ### Full list of PRs * \#2010: [refactor: prettier barrett example](https://github.com/cryspen/hax/pull/2010) * \#2005: [fix(lean): make the proof of RustM.toBVRustM_bind compatible with Lean 4.29.0](https://github.com/cryspen/hax/pull/2005) * \#2002: [feat(lean): add resugaring for ellipsis in pattern-matching](https://github.com/cryspen/hax/pull/2002) * \#2001: [fix(exporter): support ellipsis patterns by adding wildcards](https://github.com/cryspen/hax/pull/2001) * \#2000: [feat(lean): derive Repr, BEq, DecidableEq for tuples](https://github.com/cryspen/hax/pull/2000) * \#1998: [This month in hax blog post 2026 03](https://github.com/cryspen/hax/pull/1998) * \#1996: [Rename GenericConstraint::Type to TypeClass and ::Projection to Equality](https://github.com/cryspen/hax/pull/1996) * \#1995: [fix(lean): fix core models extraction](https://github.com/cryspen/hax/pull/1995) * \#1994: [refactor: redefine RustM monad using ExceptT Error Option](https://github.com/cryspen/hax/pull/1994) * \#1991: [fix: remove monad_extract_simplify](https://github.com/cryspen/hax/pull/1991) * \#1972: [nix: fix development shell](https://github.com/cryspen/hax/pull/1972) ### Contributors * [@MavenRain](https://github.com/MavenRain) * [@abentkamp](https://github.com/abentkamp) * [@clementblaudeau](https://github.com/clementblaudeau) * [@maximebuyse](https://github.com/maximebuyse) * [@niooss-ledger](https://github.com/niooss-ledger) * [@remix7531](https://github.com/remix7531) ================================================ FILE: docs/default.nix ================================================ { stdenv, buildPythonPackage, fetchPypi, setuptools, wheel, mkdocs , mkdocs-material, fetchFromGitHub, natsort, wcmatch, hax-frontend-docs , mkdocs-awesome-nav }: let mkdocs-glightbox = buildPythonPackage rec { pname = "mkdocs-glightbox"; version = "0.4.0"; src = fetchPypi { inherit pname version; hash = "sha256-OSs0IHv5WZEHGhbV+JFtHS8s1dW7Wa4pl0hczXeMcNk="; }; doCheck = false; pyproject = true; build-system = [ setuptools wheel ]; }; mkdocs-nav-weight = buildPythonPackage rec { pname = "mkdocs-nav-weight"; version = "0.0.7"; src = fetchPypi { inherit pname version; hash = "sha256-gAQGD3U3/NmWW/3uUSrCjo/T+rqdIlMkKn83TjDgbp0="; }; doCheck = false; pyproject = true; build-system = [ setuptools wheel mkdocs ]; }; in stdenv.mkDerivation { name = "hax-docs"; src = ./..; buildInputs = [ mkdocs mkdocs-material mkdocs-glightbox mkdocs-nav-weight mkdocs-awesome-nav ]; buildPhase = '' mkdocs build ''; installPhase = '' mv site $out cp -rf ${hax-frontend-docs}/share/doc/ $out/frontend/docs mkdir -p $out/engine/docs/hax-engine echo 'Sorry, this page is temporarily unavailable (see issue)' > $out/engine/docs/hax-engine/index.html ''; } ================================================ FILE: docs/dev/architecture.md ================================================ # Architecture Hax is a software pipeline designed to transform Rust code into various formal verification backends such as **F\***, **Coq**, **ProVerif**, and **EasyCrypt**. It comprises two main components: 1. **The Frontend** (written in Rust) 2. **The Engine** (written in OCaml) The frontend hooks into the Rust compiler, producing a abstract syntax tree for a given crate. The engine then takes this AST in input, applies various transformation, to reach in the end the language of the backend: F*, Coq... ## The Frontend (Rust) The frontend is responsible for extracting and exporting Rust code's abstract syntax trees (ASTs) in a format suitable for processing by the engine (or by other tools). ### [`hax-frontend-exporter` Library](https://hax.cryspen.com/frontend/docs/hax_frontend_exporter/) This library mirrors the internal types of the Rust compiler (`rustc`) that constitute the **HIR** (High-Level Intermediate Representation), **THIR** (Typed High-Level Intermediate Representation), and **MIR** (Mid-Level Intermediate Representation) ASTs. It extends them with additional information such as attributes, trait implementations, and removes ID indirections. **`SInto` Trait:** The library defines an entry point for translating a given `rustc` value to its mirrored hax version using the [`SInto`](https://hax.cryspen.com/frontend/docs/hax_frontend_exporter/trait.SInto.html) trait (stateful `into`). For a value `x` of type `T` from `rustc`, if `T` is mirrored by hax, then `x.sinto(s)` produces an augmented and simplified "hax-ified" AST for `x`. Here, `s` represents the state holding information about the translation process. ### `hax-driver` Binary `hax-driver` is a custom Rust compiler driver that behaves like `rustc` but performs additional tasks: 1. **Item Enumeration:** Lists all items in a crate. 2. **AST Transformation:** Applies `sinto` on each item to generate the hax-ified AST. 3. **Output Generation:** Outputs the mirrored items into a `haxmeta` file within the `target` directory. ### `cargo-hax` Binary `cargo-hax` provides a `hax` subcommand for Cargo, accessible via `cargo hax --help`. It serves as the command-line interface for hax, orchestrating both the frontend and the engine. **Workflow:** 1. **Custom Build Execution:** Runs `cargo build`, instructing Cargo to use `hax-driver` instead of `rustc`. 2. **Multiple Compiler Invocations:** `cargo build` invokes `hax-driver` multiple times with various options. 3. **Inter-Process Communication:** `hax-driver` communicates with `cargo-hax` via `stderr` using JSON lines. 4. **Metadata Generation:** Produces `haxmeta` files containing the transformed ASTs. 5. **Engine Invocation (Optional):** If requested, runs the engine, passing options and `haxmeta` information via `stdin` serialized as JSON. 6. **Interactive Communication:** Engages in interactive communication with the engine. 7. **User Reporting:** Outputs results and diagnostics to the user. ## The Engine (OCaml - [documentation](/engine/docs/hax-engine/index.html)) The engine processes the transformed ASTs and options provided via JSON input from `stdin`. It performs several key functions to convert the hax-ified Rust code into the target backend language. ### Importing and Simplifying ASTs - **AST Importation:** Imports the hax-ified Rust THIR AST. This is module `Import_thir`. - **Internal AST Conversion:** Converts the imported AST into a simplified and opinionated internal AST designed for ease of transformation and analysis. This is mostly the functor `Ast.Make`. ### Internal AST and Features The internal AST is defined using a **functor** that takes a list of type-level booleans, referred to as **features**, and produces the AST types accordingly. Features are for instances, mutation, loops, unsafe code. The enumeration `Features.Enumeration` lists all those features. **Feature Witnesses:** On relevant AST nodes, feature witnesses are included to enforce constraints at the type level. For example, in the `loop` expression constructor, a witness of type `F.loop` is used, where `F` represents the current feature set. If `F.loop` is an empty type, constructing a `loop` expression is prohibited, ensuring that loops are disallowed in contexts where they are not supported. ### Transformation Phases The engine executes a sequence of **phases**, which are determined based on the target backend. Each phase: 1. **Input:** Takes a list of items from an AST with specific feature constraints. 2. **Output:** Transforms these items into a new AST type, potentially enabling or disabling features through type-level changes. The phases can be found in the `Phases` module. ### Backend Code Generation After completing the transformation phases: 1. **Backend Printer Invocation:** Calls the printer associated with the selected backend to generate the target code. 2. **File Map Creation:** Produces a map from file names to their contents, representing the generated code. 3. **Output Serialization:** Outputs the file map and additional information (e.g., errors) as JSON to `stderr`. ### Communication Protocol The engine communicates asynchronously with the frontend using a protocol defined in [`hax_types::engine_api::protocol`](https://hax.cryspen.com/frontend/docs/hax_types/engine_api/protocol/index.html). This communication includes: - **Diagnostic Data:** Sending error messages, warnings, and other diagnostics. - **Profiling Information:** Providing performance metrics and profiling data. - **Pretty-Printing Requests:** Requesting formatted versions of Rust source code or diagnostics for better readability. ================================================ FILE: docs/dev/ast_ebnf.md ================================================ We currently take inputs from the following AST. Literals are strings, numbers and booleans. ``` ebnf char ::= [a-zA-Z] string ::= char* digit ::= [0-9] uint ::= digit+ int ::= ("-")? uint float ::= int (".")? uint bool ::= "true" | "false" local_var ::= ident global_var ::= rust-path-identifier literal ::= | '"' string '"' | "'" char "'" | int | float (* [a] *) | bool ``` We support a number of simple types characters, strings, booleans and numbers. Number types for integers (8,16,32,64,128 bit or machine sized) and floats (16,32, or 64 bit). Composite types are tuples, fixed length lists (arrays), variable length lists (vectors/slices), ptr types, and function types. Lastly we have named types defined by items, e.g. enums and structs. ``` ebnf ty ::= | "bool" | "char" | "u8" | "u16" | "u32" | "u64" | "u128" | "usize" | "i8" | "i16" | "i32" | "i64" | "i128" | "isize" | "f16" | "f32" | "f64" (* [a] *) | "str" | (ty ",")* | "[" ty ";" int "]" | "[" ty "]" | "*const" ty | "*mut" ty (* [b] *) | "*" expr | "*mut" expr (* [b] *) | ident | (ty "->")* ty | "dyn" (goal)+ (* [c] *) ``` The patterns allowed reflect these types. Wildcard patterns, literal types, typed patterns, list patterns, record or tuple patterns. ``` ebnf pat ::= | "_" | ident "{" (ident ":" pat ";")* "}" | ident "(" (pat ",")* ")" | (pat "|")* pat | "[" (pat ",")* "]" (* [d] *) | "&" pat | literal | ("&")? ("mut")? ident ("@" pat)? (* [e] *) ``` The simple expressions are literals, local or global variables, type casts, assignments and lists. Control flow expressions, if statements, match statements, loops, return, break and continue. The rest is blocks, macro calls, lambda functions and borrowing. ``` ebnf expr ::= | "if" expr "{" expr "}" ("else" "{" expr "}")? | "if" "let" pat (":" ty)? "=" expr "{" expr "}" ("else" "{" expr "}")? | expr "(" (expr ",")* ")" | literal | "[" (expr ",")* "]" | "[" expr ";" int "]" | ident "{" (ident ":"expr ";")* "}" | ident "{" (ident ":"expr ";")* ".." expr "}" | "match" expr guard "{" (("|" pat)* "=>" (expr "," | "{" expr "}"))* "}" | "let" pat (":" ty)? "=" expr ";" expr | "let" pat (":" ty)? "=" expr "else" "{" expr "}" ";" expr | modifiers "{" expr "}" | local_var | global_var | expr "as" ty | "loop" "{" expr "}" | "while" "(" expr ")" "{" expr "}" | "for" "(" pat "in" expr ")" "{" expr "}" | "for" "(" "let" ident "in" expr ".." expr ")" "{" expr "}" | "break" expr | "continue" | pat "=" expr | "return" expr | expr "?" | "&" ("mut")? expr (* [e] *) | "&" expr "as" "&const _" (* [b] *) | "&mut" expr "as" "&mut _" | "|" pat "|" expr ``` The items supported are functions, type aliasing, enums, structs, trait definitions and implementations, and imports. ``` ebnf item ::= | "const" ident "=" expr | "static" ident "=" expr (* [b] *) | modifiers "fn" ident ("<" (generics ",")* ">")? "(" (pat ":" ty ",")* ")" (":" ty)? "{" expr "}" | "type" ident "=" ty | "enum" ident ("<" (generics ",")* ">")? "{" (ident ("(" (ty)* ")")? ",")* "}" | "struct" ident ("<" (generics ",")* ">")? "{" (ident ":" ty ",")* "}" | "trait" ident ("<" (generics ",")* ">")? "{" (trait_item)* "}" | "impl" ("<" (generics ",")* ">")? ident "for" ty "{" (impl_item)* "}" | "mod" ident "{" (item)* "}" | "use" path ";" ``` ## Full eBNF ``` ebnf char ::= [a-zA-Z] string ::= char* digit ::= [0-9] uint ::= digit+ int ::= ("-")? uint float ::= int (".")? uint bool ::= "true" | "false" local_var ::= ident global_var ::= rust-path-identifier literal ::= | '"' string '"' | "'" char "'" | int | float [a] | bool generic_value ::= | "'" ident | ty | expr goal ::= | ident "<" (generic_value ",")* ">" ty ::= | "bool" | "char" | "u8" | "u16" | "u32" | "u64" | "u128" | "usize" | "i8" | "i16" | "i32" | "i64" | "i128" | "isize" | "f16" | "f32" | "f64" (* [a] *) | "str" | (ty ",")* | "[" ty ";" int "]" | "[" ty "]" | "*const" ty | "*mut" ty (* [b] *) | "*" expr | "*mut" expr (* [b] *) | ident | (ty "->")* ty | "dyn" (goal)+ (* [c] *) pat ::= | "_" | ident "{" (ident ":" pat ";")* "}" | ident "(" (pat ",")* ")" | (pat "|")* pat | "[" (pat ",")* "]" (* [d] *) | "&" pat | literal | ("&")? ("mut")? ident ("@" pat)? (* [e] *) modifiers ::= | "" | "unsafe" modifiers | "const" modifiers | "async" modifiers (* [b] *) guard ::= | "if" "let" pat (":" ty)? "=" expr expr ::= | "if" expr "{" expr "}" ("else" "{" expr "}")? | "if" "let" pat (":" ty)? "=" expr "{" expr "}" ("else" "{" expr "}")? | expr "(" (expr ",")* ")" | literal | "[" (expr ",")* "]" | "[" expr ";" int "]" | ident "{" (ident ":"expr ";")* "}" | ident "{" (ident ":"expr ";")* ".." expr "}" | "match" expr guard "{" (("|" pat)* "=>" (expr "," | "{" expr "}"))* "}" | "let" pat (":" ty)? "=" expr ";" expr | "let" pat (":" ty)? "=" expr "else" "{" expr "}" ";" expr | modifiers "{" expr "}" | local_var | global_var | expr "as" ty | "loop" "{" expr "}" | "while" "(" expr ")" "{" expr "}" | "for" "(" pat "in" expr ")" "{" expr "}" | "for" "(" "let" ident "in" expr ".." expr ")" "{" expr "}" | "break" expr | "continue" | pat "=" expr | "return" expr | expr "?" | "&" ("mut")? expr (* [e] *) | "&" expr "as" "&const _" (* [b] *) | "&mut" expr "as" "&mut _" | "|" pat "|" expr impl_item ::= | "type" ident "=" ty ";" | modifiers "fn" ident ("<" (generics ",")* ">")? "(" (pat ":" ty ",")* ")" (":" ty)? "{" expr "}" trait_item ::= | "type" ident ";" | modifiers "fn" ident ("<" (generics ",")* ">")? "(" (pat ":" ty ",")* ")" (":" ty)? ("{" expr "}" | ";") item ::= | "const" ident "=" expr | "static" ident "=" expr (* [b] *) | modifiers "fn" ident ("<" (generics ",")* ">")? "(" (pat ":" ty ",")* ")" (":" ty)? "{" expr "}" | "type" ident "=" ty | "enum" ident ("<" (generics ",")* ">")? "{" (ident ("(" (ty)* ")")? ",")* "}" | "struct" ident ("<" (generics ",")* ">")? "{" (ident ":" ty ",")* "}" | "trait" ident ("<" (generics ",")* ">")? "{" (trait_item)* "}" | "impl" ("<" (generics ",")* ">")? ident "for" ty "{" (impl_item)* "}" | "mod" ident "{" (item)* "}" | "use" path ";" ``` ## Footnotes * **[a]** no support yet for raw pointers, async/await, static, extern, or union types * **[b]** partial support for nested matching and range patterns * **[c]** partial support for mutable borrows * **[d]** most backends lack support for dynamic dispatch, floating point operations * **[e]** some backends only handle specific forms of iterators ================================================ FILE: docs/dev/docs.md ================================================ # Docs ## mkdocs material (this page) Install dependencies ```bash pip install mkdocs-glightbox mkdocs-nav-weight mkdocs-material ``` [Official docs](https://squidfunk.github.io/mkdocs-material). ### Commands * `mkdocs new [dir-name]` - Create a new project. * `mkdocs serve` - Start the live-reloading docs server. * `mkdocs build` - Build the documentation site. * `mkdocs -h` - Print help message and exit. ### Project layout mkdocs.yml # The configuration file. docs/ index.md # The documentation homepage. ... # Other markdown pages, images and other files. blog/ # The blog posts/ # Blog posts ### Including external files ``` ;--8<-- "engine/DEV.md:3:7" ``` --8<-- "engine/DEV.md:3:7" ================================================ FILE: docs/dev/index.md ================================================ --- weight: 5 --- # Dev on hax This chapter contains information about internals of hax. Please read the [`CONTRIBUTING.md`](https://github.com/cryspen/hax/blob/main/CONTRIBUTING.md) before opening a pull request. ================================================ FILE: docs/dev/libraries_macros.md ================================================ # Libraries # Macros and attributes The hax engine understands only one attribute: `#[_hax::json(PAYLOAD)]`, where `PAYLOAD` is a JSON serialization of the Rust enum `hax_lib_macros_types::AttrPayload`. Note `#[_hax::json(PAYLOAD)]` is a [tool attribute](https://github.com/rust-lang/rust/issues/66079): an attribute that is never expanded. In the engine, the OCaml module `Attr_payloads` offers an API to query attributes easily. The types in crate `hax_lib_macros_types` and corresponding serializers/deserializers are automatically generated in OCaml, thus there is no manual parsing involved. ## User experience Asking the user to type `#[_hax::json(some_long_json)]` is not very friendly. Thus, the crate `hax-lib-macros` defines a bunch of [proc macros](https://doc.rust-lang.org/beta/reference/procedural-macros.html) that defines nice and simple-to-use macros. Those macro take care of cooking some `hax_lib_macros_types::AttrPayload` payload(s), then serialize those payloads to JSON and produce one or more `#[_hax::json(serialized_payload)]` attributes. ================================================ FILE: docs/engine/index.md ================================================ # Engine The hax engine. Its [odoc](https://ocaml.github.io/odoc/) documentation can be found [here](./docs/hax-engine/index.html). ================================================ FILE: docs/engine/toolchain_structure/index.md ================================================ # Toolchain structure Hax is composed of three main parts: * The frontend, which interfaces with rustc to extract Rust intermediary representation ASTs (for MIR or THIR) out of Rust code. * The engine, which imports the Rust THIR AST to the internal hax AST, and defines a set of transformation phases on this internal AST. * The backends, which make use of a set of phases from the engine, and print it to a target verification framework or language. A backend also usually needs to provide a proof library and some more utilities. ================================================ FILE: docs/frontend/evaluation.md ================================================ # Evaluation of the hax Frontend This section provides an evaluation of the hax frontend, consisting of two parts: - A **quantitative evaluation**, which measures how effectively the frontend processes Rust code. - A **qualitative analysis**, which explores the frontend’s capabilities in real-world scenarios. Together, these evaluations document the current strengths and limitations of the hax frontend. ## Quantitative Evaluation The quantitative evaluation aims to estimate how much Rust code the frontend can handle. It does **not** assess the correctness of the frontend's output. ### Overview The hax toolchain is composed of several components (see [high-level architecture](./index.md#high-level-arch)): - **Frontend**: hooks into the Rust compiler to export rich Abstract Syntax Trees (ASTs) for specified crates. - **Engine** and **Backends**: consume those ASTs to produce code. - **Libraries**: `hax-lib` provides supporting functionality, and reference models for existing Rust libraries (e.g. the [Core library](https://doc.rust-lang.org/stable/core/) of Rust). In this quantitative evaluation, we focus on the **frontend**: the process of generating JSON-encoded ASTs from Rust code. We aim to assess: 1. **Successful Extraction**: The success rate of producing ASTs. 2. **Performance**: Ensuring the extraction process remains efficient enough for real-world usage. ### Methodology For each Rust crate in our test set, we follow these steps: 1. Clone the crate's source code. 2. Run `cargo fetch` to download its dependencies. 3. Execute `cargo hax json --use-ids`, recording any errors and the time taken. 4. Clean Cargo's cache with `cargo clean`. 5. Run `cargo check`, again recording any errors and time. Since `cargo hax json` is effectively `cargo check` with extra work, this serves as our performance baseline. We implemented this protocol in an internal Cryspen tool, which also evaluates other parts of the hax toolchain. ### Crate Selection To ensure we capture a diverse set of crates: - We include the **5,000 most downloaded** crates from crates.io. - We also include the **top 1,500 crates** in the **cryptography** category on crates.io, reflecting hax's relevance for verifying critical software like cryptographic libraries. ### Success Rate Each crate falls into one of three categories: 1. **Successful**: hax produced a valid AST. 2. **Failed**: hax could not produce an AST (despite `cargo check` succeeding). 3. **Both Failed**: Both `cargo check` and `cargo hax` failed. ```mermaid %%{init: {'theme': 'base', 'themeVariables': { 'pie1': '#27ae60', 'pie2': '#f1c40f', 'pie3': '#e74c3c'}}}%% pie showData "`cargo check` failure" : 41 "`cargo hax` failure" : 24 "Success" : 935 ``` Out of 1000 crates, our tool failed to run `cargo check` on 41 of them due to setup issues. These problems typically involve missing system packages that Cargo cannot automatically install or unusual Cargo configurations that require manual intervention. We therefore exclude these 41 crates from further analysis. Of the remaining 959 crates, the hax frontend successfully processed a **vast majority (97.5%)**. The remaining failures fall into four distinct categories, as illustrated in the pie chart below. ```mermaid %%{init: {'theme': 'base', 'themeVariables': { 'pie1': '#c0392b', 'pie2': '#3498db', 'pie3': '#2980b9', 'pie4': '#e74c3c'}}}%% pie showData title Frontend failures "Unsupported Rust toolchain" : 4 "Rust setup issue" : 6 "Binder panic" : 10 "Stack overflow in Rustc" : 4 ``` The errors marked in **blue** on the chart indicate situations where the Rust toolchain used by the tested crate or its dependencies is incompatible with the specific version hax is pinned to, or where the crate and hax are sensitive to toolchain variations. Rust edition 2024 was updated very recently, which explains roughly half of these issues. The errors shown in **red**, however, are directly related to hax. The binder-related panics are a [known bug](https://github.com/cryspen/hax/issues/1046). Additionally, the stack overflow errors occur due to specific code paths in the Rust compiler being incorrectly triggered by hax. Ultimately, only **1.6%** of crates encounter such hax-specific bugs. ### Performance Analysis For the crates that successfully generated ASTs, we compared the time taken by `cargo hax json` against `cargo check`. Because crate size and complexity vary greatly, we normalized[^1] the times to allow fair comparisons.
| Statistic | Cargo Check | Cargo Hax | |-----------------|------------:|----------:| | **Median** | 0.147 | 0.780 | | **Mean** | 0.215 | 0.771 | | **10th Decile** | 0.425 | 0.953 |
- On average, `cargo hax` is about 4–5 times slower than `cargo check`. - At the **10th decile**, the slowdown is only about 2×, indicating better scaling for larger crates. ### Conclusion Our quantitative evaluation shows that the hax frontend successfully extracts ASTs for a large portion of the Rust ecosystem. Nevertheless, a small portion of crates reveal performance bottlenecks or outright failures that require further investigation and optimization. These results also highlight a few **limitations** of this initial study: - We only evaluated the **frontend** process. Other stages of the toolchain, such as the engine, backends or libraries, require separate assessments. - We did not assess the **correctness or completeness** of the generated JSON, highlighting the need for a qualitative analysis to verify that the extracted ASTs meet the required specifications. Overall, the hax frontend demonstrates capabilities for large-scale Rust code verification, but continued refinement is needed to handle edge cases and improve performance. [^1]: For a given crate, we normalize the times by dividing them by the total time. ## Qualitative evaluation The qualitative evaluation aims at identifying what Rust patterns the frontend can handle. It also tests whether the information extracted from the frontend describes correctly the input Rust code. ### Rustc coverage test suite The Rust compiler (rustc) has extensive test suites that describe various expectations of how it should handle Rust input. One of them is the [coverage test suite](https://rustc-dev-guide.rust-lang.org/tests/compiletest.html#coverage-tests) which contains a set of Rust inputs that is supposed to cover a wide range of Rust constructs. This test suite has been adapted to test hax. We use the following methodology: - The Rust inputs from the test suite have been copied to `rustc-coverage-tests/src/`, and can be updated using a script. - A Rust crate structure is built around these source files, to allow hax to handle them. The files that fail `cargo check` are excluded. There are currently 26 excluded (out of 81) tests, mostly because they contain asynchronous code, which requires a runtime file that is missing in our infrastructure. - To test hax frontend, we run `cargo hax json`. If the command succeeds, the test is considered successful. These tests aim at increasing the confidence in the ability of hax frontend to handle Rust inputs covering all of the language constructs. As of today, all tests are handled successfully by hax frontend. However we don't test any requirement on the output (see the following section for tests of hax frontend output quality). ### Rust printer testing This method aims at testing the quality of hax frontend's output. It uses the Rust hax engine. This tool (written in Rust) takes the output of hax frontend (a json file describing the content of a Rust crate), it imports it as an AST (similar to the hax engine AST), and then prints this AST in Rust syntax. If the Rust code we get out of this tool is equivalent to the Rust code it was given as input, then this means hax frontend correctly extracted the input code without losing or altering any information. There is no easy way of testing the full input/output equivalence so the methodology here is to test that the resulting code behaves the same as the input code with respect to relevant test cases. This work is available in the `hax-rust-engine` folder. In the `tests` subfolder, an input file is available with tests for all Rust constructs supported by the printer (currently functions and expressions). For now these tests pass after extracting and printing the file with hax frontend and the Rust printer. This means that for the Rust constructs covered by the printer and the test file, hax frontend's extraction is correct. However this still needs to be extended to test more Rust constructs. ================================================ FILE: docs/frontend/index.md ================================================ # Frontend hax is a tool designed to facilitate the formal verification of Rust programs. It enables the translation of Rust crates into formal languages like F* or Coq. Once translated, these formal representations allow to write formal proofs about the behavior and correctness of their Rust code. ## User flow This document focuses on a specific user flow: extracting F\* code. The process described here applies similarly to all other backends, including F*, Rocq, SSProve, ProVerif, and EasyCrypt. The goal is for the user to prove a property on a Rust function, `f`, using the F\* formal language. The function `f` is defined in the module `mymod`, within the crate `mycrate`. To achieve this, the user follows these steps: 1. Annotate the Rust crate mycrate with the properties to be proven and, if needed, provide proof hints. 2. Run the command `cargo hax into -i '-** +mycrate::mymod::f' fstar`. 3. Execute F\*. If F\* fails to prove the specified properties, the user revisits step (1) to refine the annotations and proof hints. For a practical guide on using Hax, please refer to the [manual](../manual/index.md). ![](./user-flow.excalidraw.png) ## High-Level Architecture of Hax {#high-level-arch} Hax consists of five main components, as illustrated in the diagram below, with each numbered step directly corresponding to its labeled section in the diagram: 1. The **frontend** handles the extraction and export of given [Rust crates](https://doc.rust-lang.org/book/ch07-01-packages-and-crates.html) into abstract syntax trees (ASTs). 2. The **engine** performs a series of phases and rewrites, simplifying and transforming the Rust program as needed. 3. The **backends** --one per target language-- request the engine to simplify the Rust program for their specific target and then pretty-print the program as F*, Roq, PV, or other formats. 4. The Rust helper crate, **hax-lib**, provides Hax-specific helpers and macros to annotate a Rust program with properties, invariants, or proof hints. 5. The **annotated standard library** is a work in progress partial model for the Rust base libraries (`core`, `std`, `alloc`), enriched with logical properties to assist in formal verification. ![](./high-level-arch.excalidraw.png){:.center} In the diagram, the dotted lines indicate dependencies between Rust or F\* artifacts, flowing from left to right. The user begins by exporting a crate (referred to as the "Input crate" in the top left of the diagram). This crate depends on both the Rust `std` library and `hax-lib`. The `std` dependency is added automatically by Rust, while the `hax-lib` dependency is introduced by hax. The input crate passes through the hax toolchain as follows: - First, the crate is parsed and exported as JSON by the **frontend** (➊), with assistance from the Rust compiler. - Next, the **engine** processes the exported data (➋), transforming and simplifying it. - Finally, the **F\*** backend generates the corresponding F\* files (➌). Since the input crate depends on both `std` and `hax-lib`, the generated F\* modules maintain these dependencies. To handle this, Hax translates `hax-lib` to F\* (➍) and also translates our model of the `std` library (➎) into F\*. ## Ast Extraction Workflow This section delves into the technical details of hax's extraction process. The process involves the following components (as illustrated in the diagram below, with each numbered step directly corresponding to its labeled section in the diagram): 1. **`cargo-hax`**: a binary that provides a [custom command](https://doc.rust-lang.org/book/ch14-05-extending-cargo.html) `hax` to `cargo`. 2. **`cargo check`**: the `check` subcommand of `cargo`. It is sensitive to the environment variable `RUSTC_WORKSPACE_WRAPPER`: when set, `cargo` will call the program specified by `$RUSTC_WORKSPACE_WRAPPER` instead of `rustc`. 3. **`driver-hax-frontend-exporter`**: a [custom `rustc` driver](https://jyn.dev/rustc-driver/#paths). Instead of compiling Rust into a binary, `driver-hax-frontend-exporter` exports ASTs as JSON. 4. **`hax-frontend-exporter`**: a library that mirrors Rust's internal ASTs as its own enriched AST representation. This library also provides bridges from Rust's ASTs to its enriched ASTs. ![](./workflow-diagram.excalidraw.png) When `cargo check` compiles a crate, it invokes `rustc` multiple times, but only some of these calls are relevant to us. Additionally, `cargo check` may also build dependencies. As a result, we inject our custom export logic selectively, ensuring that `driver-hax-frontend-exporter` behaves exactly like `rustc` in all other cases. During compilation, `rustc` produces several artifacts: - **`*.rmeta` files**: metadata files containing type information, function signatures, constants, and more. - **`*.rlib` files**: static library artifacts with compiled Rust code and metadata. - **Diagnostic messages**: messages on standard output for communication with `cargo check`, including errors, warnings, and status updates. Our custom export logic extends this by generating additional artifacts: - **`*.haxmeta` files**: similar to Rust's `*.rmeta` files, but contains full enriched abstract syntax trees. The `haxmeta` files contains a binary serialization of the [`HaxMeta`](https://hax.cryspen.com/frontend/docs/hax_types/driver_api/struct.HaxMeta.html) type. - **Diagnostic messages**: sent to standard output and used to communicate specifically with `cargo hax`. Those messages are JSON serializations of the [`HaxDriverMessage`](https://hax.cryspen.com/frontend/docs/hax_types/driver_api/enum.HaxDriverMessage.html) type. After calling `cargo check`, `cargo hax` parses the `*.haxmeta` files and continues further along the hax toolchain, either by outputting JSON directly or by calling the engine to generate files for targets such as F\*, ProVerif, or Roqc. `cargo-hax`, `driver-hax-frontend-exporter`, and `hax-frontend-exporter` together form what we refer to as "the frontend". The engine is represented by the binary `hax-engine`, which includes the backends. The frontend is implemented in Rust, while the engine is implemented in OCaml. Communication between all components occurs through **stdout**, **stderr**, or **stdin**, using JSON messages defined in the Rust crate [`hax-types`](https://hax.cryspen.com/frontend/docs/hax_types/index.html). This section provides an overview of the workflow of the frontend of hax. ================================================ FILE: docs/index.md ================================================ # hax hax is a tool for high assurance translations of a large subset of Rust into formal languages such as [F\*](https://www.fstar-lang.org/) or [Rocq](https://rocq-prover.org/). Head over to the [Manual](./manual/index.md) or the [playground](https://hax-playground.cryspen.com) to get started! ![hax overview](static/img/overview.png) ## Playground Try out hax in the browser on the [playground](https://hax-playground.cryspen.com). [![Playground screenshot](static/img/playground.png)](https://hax-playground.cryspen.com) ## Community Join the hax community on our [Zulip chat](https://hacspec.zulipchat.com). ================================================ FILE: docs/javascripts/ansi_up.js ================================================ /** * Minified by jsDelivr using Terser v5.19.2. * Original file: /npm/ansi_up@6.0.2/ansi_up.js * * Do NOT use SRI with dynamically generated files! More information: https://www.jsdelivr.com/using-sri-with-dynamic-files */ "use strict";var PacketKind,templateObject_1,templateObject_2,templateObject_3,__makeTemplateObject=this&&this.__makeTemplateObject||function(e,t){return Object.defineProperty?Object.defineProperty(e,"raw",{value:t}):e.raw=t,e};!function(e){e[e.EOS=0]="EOS",e[e.Text=1]="Text",e[e.Incomplete=2]="Incomplete",e[e.ESC=3]="ESC",e[e.Unknown=4]="Unknown",e[e.SGR=5]="SGR",e[e.OSCURL=6]="OSCURL"}(PacketKind||(PacketKind={}));class AnsiUp{constructor(){this.VERSION="6.0.2",this.setup_palettes(),this._use_classes=!1,this.bold=!1,this.faint=!1,this.italic=!1,this.underline=!1,this.fg=this.bg=null,this._buffer="",this._url_allowlist={http:1,https:1},this._escape_html=!0,this.boldStyle="font-weight:bold",this.faintStyle="opacity:0.7",this.italicStyle="font-style:italic",this.underlineStyle="text-decoration:underline"}set use_classes(e){this._use_classes=e}get use_classes(){return this._use_classes}set url_allowlist(e){this._url_allowlist=e}get url_allowlist(){return this._url_allowlist}set escape_html(e){this._escape_html=e}get escape_html(){return this._escape_html}set boldStyle(e){this._boldStyle=e}get boldStyle(){return this._boldStyle}set faintStyle(e){this._faintStyle=e}get faintStyle(){return this._faintStyle}set italicStyle(e){this._italicStyle=e}get italicStyle(){return this._italicStyle}set underlineStyle(e){this._underlineStyle=e}get underlineStyle(){return this._underlineStyle}setup_palettes(){this.ansi_colors=[[{rgb:[0,0,0],class_name:"ansi-black"},{rgb:[187,0,0],class_name:"ansi-red"},{rgb:[0,187,0],class_name:"ansi-green"},{rgb:[187,187,0],class_name:"ansi-yellow"},{rgb:[0,0,187],class_name:"ansi-blue"},{rgb:[187,0,187],class_name:"ansi-magenta"},{rgb:[0,187,187],class_name:"ansi-cyan"},{rgb:[255,255,255],class_name:"ansi-white"}],[{rgb:[85,85,85],class_name:"ansi-bright-black"},{rgb:[255,85,85],class_name:"ansi-bright-red"},{rgb:[0,255,0],class_name:"ansi-bright-green"},{rgb:[255,255,85],class_name:"ansi-bright-yellow"},{rgb:[85,85,255],class_name:"ansi-bright-blue"},{rgb:[255,85,255],class_name:"ansi-bright-magenta"},{rgb:[85,255,255],class_name:"ansi-bright-cyan"},{rgb:[255,255,255],class_name:"ansi-bright-white"}]],this.palette_256=[],this.ansi_colors.forEach((e=>{e.forEach((e=>{this.palette_256.push(e)}))}));let e=[0,95,135,175,215,255];for(let t=0;t<6;++t)for(let n=0;n<6;++n)for(let i=0;i<6;++i){let s={rgb:[e[t],e[n],e[i]],class_name:"truecolor"};this.palette_256.push(s)}let t=8;for(let e=0;e<24;++e,t+=10){let e={rgb:[t,t,t],class_name:"truecolor"};this.palette_256.push(e)}}escape_txt_for_html(e){return this._escape_html?e.replace(/[&<>"']/gm,(e=>"&"===e?"&":"<"===e?"<":">"===e?">":'"'===e?""":"'"===e?"'":void 0)):e}append_buffer(e){var t=this._buffer+e;this._buffer=t}get_next_packet(){var e={kind:PacketKind.EOS,text:"",url:""},t=this._buffer.length;if(0==t)return e;var n=this._buffer.indexOf("");if(-1==n)return e.kind=PacketKind.Text,e.text=this._buffer,this._buffer="",e;if(n>0)return e.kind=PacketKind.Text,e.text=this._buffer.slice(0,n),this._buffer=this._buffer.slice(n),e;if(0==n){if(t<3)return e.kind=PacketKind.Incomplete,e;var i=this._buffer.charAt(1);if("["!=i&&"]"!=i&&"("!=i)return e.kind=PacketKind.ESC,e.text=this._buffer.slice(0,1),this._buffer=this._buffer.slice(1),e;if("["==i){this._csi_regex||(this._csi_regex=rgx(templateObject_1||(templateObject_1=__makeTemplateObject(["\n ^ # beginning of line\n #\n # First attempt\n (?: # legal sequence\n [ # CSI\n ([<-?]?) # private-mode char\n ([d;]*) # any digits or semicolons\n ([ -/]? # an intermediate modifier\n [@-~]) # the command\n )\n | # alternate (second attempt)\n (?: # illegal sequence\n [ # CSI\n [ -~]* # anything legal\n ([\0-:]) # anything illegal\n )\n "],["\n ^ # beginning of line\n #\n # First attempt\n (?: # legal sequence\n \\x1b\\[ # CSI\n ([\\x3c-\\x3f]?) # private-mode char\n ([\\d;]*) # any digits or semicolons\n ([\\x20-\\x2f]? # an intermediate modifier\n [\\x40-\\x7e]) # the command\n )\n | # alternate (second attempt)\n (?: # illegal sequence\n \\x1b\\[ # CSI\n [\\x20-\\x7e]* # anything legal\n ([\\x00-\\x1f:]) # anything illegal\n )\n "]))));let t=this._buffer.match(this._csi_regex);if(null===t)return e.kind=PacketKind.Incomplete,e;if(t[4])return e.kind=PacketKind.ESC,e.text=this._buffer.slice(0,1),this._buffer=this._buffer.slice(1),e;""!=t[1]||"m"!=t[3]?e.kind=PacketKind.Unknown:e.kind=PacketKind.SGR,e.text=t[2];var s=t[0].length;return this._buffer=this._buffer.slice(s),e}if("]"==i){if(t<4)return e.kind=PacketKind.Incomplete,e;if("8"!=this._buffer.charAt(2)||";"!=this._buffer.charAt(3))return e.kind=PacketKind.ESC,e.text=this._buffer.slice(0,1),this._buffer=this._buffer.slice(1),e;this._osc_st||(this._osc_st=rgxG(templateObject_2||(templateObject_2=__makeTemplateObject(["\n (?: # legal sequence\n (\\) # ESC | # alternate\n () # BEL (what xterm did)\n )\n | # alternate (second attempt)\n ( # illegal sequence\n [\0-] # anything illegal\n | # alternate\n [\b-] # anything illegal\n | # alternate\n [-] # anything illegal\n )\n "],["\n (?: # legal sequence\n (\\x1b\\\\) # ESC \\\n | # alternate\n (\\x07) # BEL (what xterm did)\n )\n | # alternate (second attempt)\n ( # illegal sequence\n [\\x00-\\x06] # anything illegal\n | # alternate\n [\\x08-\\x1a] # anything illegal\n | # alternate\n [\\x1c-\\x1f] # anything illegal\n )\n "])))),this._osc_st.lastIndex=0;{let t=this._osc_st.exec(this._buffer);if(null===t)return e.kind=PacketKind.Incomplete,e;if(t[3])return e.kind=PacketKind.ESC,e.text=this._buffer.slice(0,1),this._buffer=this._buffer.slice(1),e}{let t=this._osc_st.exec(this._buffer);if(null===t)return e.kind=PacketKind.Incomplete,e;if(t[3])return e.kind=PacketKind.ESC,e.text=this._buffer.slice(0,1),this._buffer=this._buffer.slice(1),e}this._osc_regex||(this._osc_regex=rgx(templateObject_3||(templateObject_3=__makeTemplateObject(["\n ^ # beginning of line\n #\n ]8; # OSC Hyperlink\n [ -:<-~]* # params (excluding ;)\n ; # end of params\n ([!-~]{0,512}) # URL capture\n (?: # ST\n (?:\\) # ESC | # alternate\n (?:) # BEL (what xterm did)\n )\n ([ -~]+) # TEXT capture\n ]8;; # OSC Hyperlink End\n (?: # ST\n (?:\\) # ESC | # alternate\n (?:) # BEL (what xterm did)\n )\n "],["\n ^ # beginning of line\n #\n \\x1b\\]8; # OSC Hyperlink\n [\\x20-\\x3a\\x3c-\\x7e]* # params (excluding ;)\n ; # end of params\n ([\\x21-\\x7e]{0,512}) # URL capture\n (?: # ST\n (?:\\x1b\\\\) # ESC \\\n | # alternate\n (?:\\x07) # BEL (what xterm did)\n )\n ([\\x20-\\x7e]+) # TEXT capture\n \\x1b\\]8;; # OSC Hyperlink End\n (?: # ST\n (?:\\x1b\\\\) # ESC \\\n | # alternate\n (?:\\x07) # BEL (what xterm did)\n )\n "]))));let n=this._buffer.match(this._osc_regex);if(null===n)return e.kind=PacketKind.ESC,e.text=this._buffer.slice(0,1),this._buffer=this._buffer.slice(1),e;e.kind=PacketKind.OSCURL,e.url=n[1],e.text=n[2];s=n[0].length;return this._buffer=this._buffer.slice(s),e}if("("==i)return e.kind=PacketKind.Unknown,this._buffer=this._buffer.slice(3),e}}ansi_to_html(e){this.append_buffer(e);for(var t=[];;){var n=this.get_next_packet();if(n.kind==PacketKind.EOS||n.kind==PacketKind.Incomplete)break;n.kind!=PacketKind.ESC&&n.kind!=PacketKind.Unknown&&(n.kind==PacketKind.Text?t.push(this.transform_to_html(this.with_state(n))):n.kind==PacketKind.SGR?this.process_ansi(n):n.kind==PacketKind.OSCURL&&t.push(this.process_hyperlink(n)))}return t.join("")}with_state(e){return{bold:this.bold,faint:this.faint,italic:this.italic,underline:this.underline,fg:this.fg,bg:this.bg,text:e.text}}process_ansi(e){let t=e.text.split(";");for(;t.length>0;){let e=t.shift(),n=parseInt(e,10);if(isNaN(n)||0===n)this.fg=null,this.bg=null,this.bold=!1,this.faint=!1,this.italic=!1,this.underline=!1;else if(1===n)this.bold=!0;else if(2===n)this.faint=!0;else if(3===n)this.italic=!0;else if(4===n)this.underline=!0;else if(21===n)this.bold=!1;else if(22===n)this.faint=!1,this.bold=!1;else if(23===n)this.italic=!1;else if(24===n)this.underline=!1;else if(39===n)this.fg=null;else if(49===n)this.bg=null;else if(n>=30&&n<38)this.fg=this.ansi_colors[0][n-30];else if(n>=40&&n<48)this.bg=this.ansi_colors[0][n-40];else if(n>=90&&n<98)this.fg=this.ansi_colors[1][n-90];else if(n>=100&&n<108)this.bg=this.ansi_colors[1][n-100];else if((38===n||48===n)&&t.length>0){let e=38===n,i=t.shift();if("5"===i&&t.length>0){let n=parseInt(t.shift(),10);n>=0&&n<=255&&(e?this.fg=this.palette_256[n]:this.bg=this.palette_256[n])}if("2"===i&&t.length>2){let n=parseInt(t.shift(),10),i=parseInt(t.shift(),10),s=parseInt(t.shift(),10);if(n>=0&&n<=255&&i>=0&&i<=255&&s>=0&&s<=255){let t={rgb:[n,i,s],class_name:"truecolor"};e?this.fg=t:this.bg=t}}}}}transform_to_html(e){let t=e.text;if(0===t.length)return t;if(t=this.escape_txt_for_html(t),!e.bold&&!e.italic&&!e.underline&&null===e.fg&&null===e.bg)return t;let n=[],i=[],s=e.fg,l=e.bg;e.bold&&n.push(this._boldStyle),e.faint&&n.push(this._faintStyle),e.italic&&n.push(this._italicStyle),e.underline&&n.push(this._underlineStyle),this._use_classes?(s&&("truecolor"!==s.class_name?i.push(`${s.class_name}-fg`):n.push(`color:rgb(${s.rgb.join(",")})`)),l&&("truecolor"!==l.class_name?i.push(`${l.class_name}-bg`):n.push(`background-color:rgb(${l.rgb.join(",")})`))):(s&&n.push(`color:rgb(${s.rgb.join(",")})`),l&&n.push(`background-color:rgb(${l.rgb})`));let a="",r="";return i.length&&(a=` class="${i.join(" ")}"`),n.length&&(r=` style="${n.join(";")}"`),`${t}`}process_hyperlink(e){let t=e.url.split(":");return t.length<1?"":this._url_allowlist[t[0]]?`${this.escape_txt_for_html(e.text)}`:""}}function rgx(e,...t){let n=e.raw[0].replace(/^\s+|\s+\n|\s*#[\s\S]*?\n|\n/gm,"");return new RegExp(n)}window.AnsiUp=AnsiUp;function rgxG(e,...t){let n=e.raw[0].replace(/^\s+|\s+\n|\s*#[\s\S]*?\n|\n/gm,"");return new RegExp(n,"g");} ================================================ FILE: docs/javascripts/fstar.js ================================================ /*! `ocaml` grammar compiled for Highlight.js 11.10.0 */ (function(){ var hljsGrammar = (function () { 'use strict'; function fstar(hljs) { /* missing support for heredoc-like string (OCaml 4.0.2+) */ return { name: 'FStar', aliases: [ 'fstar', 'fst', 'fsti' ], keywords: { $pattern: '[a-z_]\\w*!?', keyword: 'attributes noeq unopteq and assert assume begin by calc class default decreases effect eliminate else end ensures exception exists false friend forall fun λ function if in include inline inline_for_extraction instance introduce irreducible let logic match returns as module new new_effect layered_effect polymonadic_bind polymonadic_subcomp noextract of open opaque private quote range_of rec reifiable reify reflectable requires set_range_of sub_effect synth then total true try type unfold unfoldable val when with string', built_in: 'unit', literal: 'true false' }, // illegal: /\/\/|>>/, contains: [ { className: 'literal', begin: '\\[(\\|\\|)?\\]|\\(\\)', relevance: 0 }, hljs.COMMENT( '\\(\\*', '\\*\\)', { contains: [ 'self' ] } ), // hljs.inherit( // hljs.COMMENT(), // { // match: [ // /(^|\s)/, // /\/\/.*$/ // ], // scope: { // 2: 'comment' // } // } // ), { /* type variable */ className: 'symbol', begin: '\'[A-Za-z_](?!\')[\\w\']*' /* the grammar is ambiguous on how 'a'b should be interpreted but not the compiler */ }, { /* module or constructor */ className: 'type', begin: '\\b[A-Z][\\w\']*', relevance: 0 }, { /* don't color identifiers, but safely catch all identifiers with ' */ begin: '[a-z_]\\w*\'[\\w\']*', relevance: 0 }, hljs.inherit(hljs.APOS_STRING_MODE, { className: 'string', relevance: 0 }), hljs.inherit(hljs.QUOTE_STRING_MODE, { illegal: null }), { className: 'number', begin: '\\b(0[xX][a-fA-F0-9_]+[Lln]?|' + '0[oO][0-7_]+[Lln]?|' + '0[bB][01_]+[Lln]?|' + '[0-9][0-9_]*([Lln]|(\\.[0-9_]*)?([eE][-+]?[0-9_]+)?)?)', relevance: 0 }, { begin: /->/ // relevance booster } ] }; } return fstar; })(); hljs.registerLanguage('fstar', hljsGrammar); })(); // hljs.initHighlightingOnLoad(); ================================================ FILE: docs/javascripts/hax_playground.js ================================================ const PLAYGROUND_URL = 'https://hax-playground.cryspen.com'; // Fetches the commit hash for latest `main` of hax async function get_latest_hax_main() { let commits = await (await fetch(PLAYGROUND_URL + '/git-refs')).text(); return commits.match(/(.*);refs\/remotes\/origin\/main;/).pop(); } // This line should not be edited: it is used in the action `playwright-docs.yml`. const HAX_PLAYGROUND_FORCED_VERSION = false; // Call into the API of the hax playground async function call_playground(result_block, query, text, parent_node) { let raw_query = async (API_URL, hax_version, query, files, on_line_received) => { let response = await fetch(`${API_URL}/query/${hax_version}/${query}`, { method: "POST", headers: { 'Accept': 'application/json', 'Content-Type': 'application/json' }, body: JSON.stringify(files), }); let decoder = new TextDecoder(); let leftover = ""; let reader = response.body.getReader(); while (true) { const { done, value } = await reader.read(); if (done) break; leftover += decoder.decode(value); let lines = leftover.split('\n'); let entire_lines = lines.slice(0, -1); leftover = lines.slice(-1)[0]; for (const line of entire_lines) on_line_received(line); } }; let ansi_up = new AnsiUp(); let first = true; let logs = document.createElement('div'); logs.style = 'font-size: 80%; background: #00000010; padding: 3px; white-space:pre-wrap;'; let hax_version = HAX_PLAYGROUND_FORCED_VERSION || await get_latest_hax_main(); let lean_backend = query.startsWith('lean'); raw_query( PLAYGROUND_URL, hax_version, query, [['src/lib.rs', text]], x => { if (first) { result_block.style.padding = '0.7em 1.2em'; result_block.innerText = ""; result_block.appendChild(logs); } first = false; let json = {}; try { json = JSON.parse(x); } catch (_) { } if (json.Stderr || json.Stdout) { logs.innerHTML += '
' + ansi_up.ansi_to_html(json.Stderr || json.Stdout) + "
"; } if (json.Done) { let out = []; for (let file in json.Done.files) { if (file.endsWith('.rs')) continue; let contents = json.Done.files[file]; contents = (contents.split(lean_backend ? 'set_option linter.unusedVariables false' : 'open FStar.Mul')[1] || contents).trim(); contents = contents.replace(/$/gm, ' ').trim(); out.push([file, contents]); } if (json.Done.success) result_block.innerText = ""; else result_block.innerHTML += "
"; let result = document.createElement('pre'); result.style.whiteSpace = 'pre-wrap'; if (out.length == 1) { result.textContent = out[0][1]; } else { result.textContent = out.map(([file, s]) => '(* File: ' + file + ' *) \n' + s).join('\n\n').trim(); } result_block.appendChild(result); hljs.highlightBlock(result); result_block.innerHTML += `
Open in hax playground ↗`; parent_node.classList.remove("state-success", "state-failure"); parent_node.classList.add("state-" + (json.Done.success ? "success" : "failure")); if (json.Done.success && query.includes('+tc')) { result_block.innerHTML += `
Status: ✓ ${lean_backend ? "Lean" : "F*"} successfully typechecked!
`; } } }, ); } function setup_hax_playground() { if (document.querySelector('.md-hax-playground')) return; console.log('setup'); for (let e of document.querySelectorAll('pre')) { let code = e.querySelector("code"); if (!code) continue; let lines = [ ...code.children ].map(line => line.innerText.replace(/^\n+/, '').replace(/\n+$/, '')) .join("\n").trim().split('\n'); console.log({ lines }); let contents = lines.filter(line => !line.startsWith('# ')).join('\n'); let w = e.parentElement; if (!w.classList.contains("playable")) continue; let backend = w.classList.contains("lean-backend") ? 'lean' : 'fstar'; code.innerHTML = "
";
        let inner = code.children[0];
        inner.style.backgroundColor = "transparent";
        inner.classList.add("md-hax-playground-pre");

        let editor = new codemirror.EditorView({
            doc: contents,
            extensions: [codemirror.basicSetup, codemirror.rust()],
            parent: inner,
            lineNumbers: false,
        });

        let result_block = document.createElement("pre");
        result_block.classList.add("hax-playground-pre");
        result_block.style.fontFamily = '"Monaco", "Menlo", "Ubuntu Mono", "Consolas", "Source Code Pro", "source-code-pro", monospace';
        result_block.style.fontSize = '0.85em';
        result_block.style.background = '#f3f3f3';
        w.append(result_block);

        let header = lines.filter(line => line.startsWith('# ')).map(line => line.slice(2)).join('\n');
        let getCode = () => header + '\n' + editor.state.doc.toString();


        let button_translate = document.createElement("button");
        button_translate.innerHTML = ``;
        button_translate.classList.add('md-icon');
        button_translate.classList.add('md-clipboard');
        button_translate.classList.add('md-hax-playground');
        button_translate.style.right = "2.4em";
        button_translate.onclick = () => {
            call_playground(result_block, backend, getCode(), w);
        };
        e.prepend(button_translate);

        let button_tc = document.createElement("button");
        button_tc.innerHTML = ``;
        button_tc.classList.add('md-icon');
        button_tc.classList.add('md-clipboard');
        button_tc.classList.add('md-hax-playground');
        button_tc.style.right = "4.5em";
        button_tc.onclick = () => {
            call_playground(result_block, backend + '+tc', getCode(), w);
        };

        e.prepend(button_tc);
    }
}

window.addEventListener('load', () => {
    setup_hax_playground();
    const observer = new MutationObserver(() => {
        if (document.querySelector('.md-hax-playground'))
            return;
        setTimeout(setup_hax_playground, 200);
    });
    observer.observe(document.querySelector('body'), { childList: true, subtree: true });
});




================================================
FILE: docs/javascripts/lz-string.js
================================================
var LZString=function(){var r=String.fromCharCode,o="ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz0123456789+/=",n="ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz0123456789+-$",e={};function t(r,o){if(!e[r]){e[r]={};for(var n=0;n>>8,n[2*e+1]=s%256}return n},decompressFromUint8Array:function(o){if(null==o)return i.decompress(o);for(var n=new Array(o.length/2),e=0,t=n.length;e>=1}else{for(t=1,e=0;e>=1}0==--l&&(l=Math.pow(2,h),h++),delete u[c]}else for(t=s[c],e=0;e>=1;0==--l&&(l=Math.pow(2,h),h++),s[p]=f++,c=String(a)}if(""!==c){if(Object.prototype.hasOwnProperty.call(u,c)){if(c.charCodeAt(0)<256){for(e=0;e>=1}else{for(t=1,e=0;e>=1}0==--l&&(l=Math.pow(2,h),h++),delete u[c]}else for(t=s[c],e=0;e>=1;0==--l&&(l=Math.pow(2,h),h++)}for(t=2,e=0;e>=1;for(;;){if(m<<=1,v==o-1){d.push(n(m));break}v++}return d.join("")},decompress:function(r){return null==r?"":""==r?null:i._decompress(r.length,32768,function(o){return r.charCodeAt(o)})},_decompress:function(o,n,e){var t,i,s,u,a,p,c,l=[],f=4,h=4,d=3,m="",v=[],g={val:e(0),position:n,index:1};for(t=0;t<3;t+=1)l[t]=t;for(s=0,a=Math.pow(2,2),p=1;p!=a;)u=g.val&g.position,g.position>>=1,0==g.position&&(g.position=n,g.val=e(g.index++)),s|=(u>0?1:0)*p,p<<=1;switch(s){case 0:for(s=0,a=Math.pow(2,8),p=1;p!=a;)u=g.val&g.position,g.position>>=1,0==g.position&&(g.position=n,g.val=e(g.index++)),s|=(u>0?1:0)*p,p<<=1;c=r(s);break;case 1:for(s=0,a=Math.pow(2,16),p=1;p!=a;)u=g.val&g.position,g.position>>=1,0==g.position&&(g.position=n,g.val=e(g.index++)),s|=(u>0?1:0)*p,p<<=1;c=r(s);break;case 2:return""}for(l[3]=c,i=c,v.push(c);;){if(g.index>o)return"";for(s=0,a=Math.pow(2,d),p=1;p!=a;)u=g.val&g.position,g.position>>=1,0==g.position&&(g.position=n,g.val=e(g.index++)),s|=(u>0?1:0)*p,p<<=1;switch(c=s){case 0:for(s=0,a=Math.pow(2,8),p=1;p!=a;)u=g.val&g.position,g.position>>=1,0==g.position&&(g.position=n,g.val=e(g.index++)),s|=(u>0?1:0)*p,p<<=1;l[h++]=r(s),c=h-1,f--;break;case 1:for(s=0,a=Math.pow(2,16),p=1;p!=a;)u=g.val&g.position,g.position>>=1,0==g.position&&(g.position=n,g.val=e(g.index++)),s|=(u>0?1:0)*p,p<<=1;l[h++]=r(s),c=h-1,f--;break;case 2:return v.join("")}if(0==f&&(f=Math.pow(2,d),d++),l[c])m=l[c];else{if(c!==h)return null;m=i+i.charAt(0)}v.push(m),l[h++]=i+m.charAt(0),i=m,0==--f&&(f=Math.pow(2,d),d++)}}};return i}();"function"==typeof define&&define.amd?define(function(){return LZString}):"undefined"!=typeof module&&null!=module?module.exports=LZString:"undefined"!=typeof angular&&null!=angular&&angular.module("LZString",[]).factory("LZString",function(){return LZString});


================================================
FILE: docs/manual/faq/include-flags.md
================================================
# **Rust Item Extraction Using `cargo hax`**

## **Overview**
When extracting Rust items with hax, it is often necessary to include only a specific subset of items from a crate. The `cargo hax into` subcommand provides the `-i` flag to control which items are included or excluded, and how their dependencies are handled. This allows precise tailoring of the extraction process.

## **The `-i` Flag**
The `-i` flag accepts a list of patterns with modifiers to define inclusion or exclusion rules for Rust items. Patterns are processed sequentially from left to right, determining which items are extracted.

### **Basic Concepts**
- **Patterns**: Rust paths with support for `*` and `**` globs.
  - `*` matches any single segment (e.g., `mycrate::*::myfn`).
  - `**` matches any subpath, including empty segments (e.g., `**::myfn`).
- **Modifiers**:
  - `+`: Includes items and their dependencies (transitively).
  - `+~`: Includes items and their **direct dependencies only**.
  - `+!`: Includes only the item itself (no dependencies).
  - `+:`: Includes only the item's type signature (no body or dependencies).
  - `-`: Excludes items.

By default, **all items are included**, unless explicitly modified.

### **Practical Examples of the `-i` Flag Usage**

Consider the following crate (`mycrate`) with the `lib.rs` module:

```rust
fn interesting_function() { aux() }
fn aux() { foo::f() }
fn something_else() { /* ... */ }

mod foo {
    fn f() { /* ... */ }
    fn g() { /* ... */ }
    fn h() { /* ... */ }
    fn interesting_function() { something() }
    fn something() { /* ... */ }

    mod bar {
        fn interesting_function() { /* ... */ }
    }
}

fn not_that_one() { not_that_one_dependency() }
fn not_that_one_dependency() { /* ... */ }

fn not_extracting_function(_: u8) -> u8 {
    unsafe { /* ... */ }
    0
}
```

#### **1. Selectively Including Items with Dependencies**
```bash
cargo hax into -i '-** +mycrate::**::interesting_function' 
```

- **Explanation**:
  - `-**`: Excludes all items by default.
  - `+mycrate::**::interesting_function`: Includes all items matching `mycrate::**::interesting_function` and their dependencies.
- **Extracted Items**:
  1. `mycrate::interesting_function` (direct match).
  2. `mycrate::foo::interesting_function` (direct match).
  3. `mycrate::foo::bar::interesting_function` (direct match).
  4. `mycrate::aux` (dependency of `mycrate::interesting_function`).
  5. `mycrate::foo::f` (dependency of `mycrate::aux`).
  6. `mycrate::foo::something` (dependency of `mycrate::foo::interesting_function`).

#### **2. Excluding Specific Items**
```bash
cargo hax into -i '+** -*::not_that_one' 
```

- **Explanation**:
  - `+**`: Includes all items by default.
  - `-*::not_that_one`: Excludes any item named `not_that_one`, but keeps all other items, including `not_that_one_dependency`.
- **Extracted Items**: All except `mycrate::not_that_one`.

#### **3. Including Items Without Dependencies**
```bash
cargo hax into -i '-** +!mycrate::interesting_function' 
```

- **Explanation**:
  - `-**`: Excludes all items by default.
  - `+!mycrate::interesting_function`: Includes only `mycrate::interesting_function`, without dependencies.
- **Extracted Items**: Only `mycrate::interesting_function`.

#### **4. Including Items with Direct Dependencies Only**
```bash
cargo hax into -i '-** +~mycrate::interesting_function' 
```

- **Explanation**:
  - `-**`: Excludes all items by default.
  - `+~mycrate::interesting_function`: Includes `mycrate::interesting_function` and its direct dependencies (but not their transitive dependencies).
- **Extracted Items**:
  1. `mycrate::interesting_function`.
  2. `mycrate::aux` (direct dependency).
- **Excluded Items**:
  - `mycrate::foo::f` (transitive dependency of `mycrate::aux`).

#### **5. Including Items in Signature-Only Mode**
```bash
cargo hax into -i '+:mycrate::not_extracting_function' 
```

- **Explanation**:
  - `+:mycrate::not_extracting_function`: Includes only the type signature of `mycrate::not_extracting_function` (e.g., as an assumed or axiomatized symbol).
- **Extracted Items**:
  - The type signature of `mycrate::not_extracting_function`, without its body or dependencies.



#### **6. Including anonymous items using `hax_lib::include`**
Some items like [trait impls](https://doc.rust-lang.org/reference/items/implementations.html#r-items.impl.trait), or [inherent impls](https://doc.rust-lang.org/reference/items/implementations.html#r-items.impl.inherent) have no name so it is impossible to target them specifically using the `-i` flag.
In this case, one can use [`hax_lib::include`](https://docs.rs/hax-lib/latest/hax_lib/attr.include.html) to extract these items, and override the default behavior for the rest of the module.
```rust
struct S;

#[hax_lib::include]
impl S {
    fn f() {}
}

impl S {
    #[hax_lib::include]
    fn g() {}
    fn h () {}
}
```
To include only `S::f` and `S::g` in the example above, the `hax_lib::include` annotations does the trick, together with the following extraction command:
```bash
cargo hax into -i '-**' 
```

### **Summary**
The `-i` flag offers powerful control over extraction, allowing fine-grained inclusion and exclusion of items with various dependency handling strategies. Use it to:
- Extract specific items and their dependencies (`+` or `+~`).
- Exclude certain items (`-`).
- Include items without dependencies (`+!`).
- Extract type signatures only (`+:`).

For complex crates, this flexibility ensures only the necessary parts are extracted, optimizing analysis or transformation workflows.



================================================
FILE: docs/manual/faq/index.md
================================================
---
weight: 200
---

# Troubleshooting/FAQ

This chapter captures a list of common questions or issues and how to resolve them. If you happen to run into an issue that is not documented here, please consider submitting a pull request!


================================================
FILE: docs/manual/faq/into.md
================================================
# Troubleshooting/FAQ


================================================
FILE: docs/manual/fstar/.nav.yml
================================================
title: F*

================================================
FILE: docs/manual/fstar/index.md
================================================
This section introduces the F\* backend of hax. It covers how to setup a project, and the basics of how to use the hax and F\* to verify Rust code.

================================================
FILE: docs/manual/fstar/quick_start.md
================================================
---
weight: 0
---

# Quick start

Do you want to try hax out on a Rust crate of yours? This chapter is
what you are looking for!

## Setup the tools

 -  [Install the hax toolchain](https://github.com/hacspec/hax?tab=readme-ov-file#installation).  
   🪄 Running `cargo hax --version` should print some version info.
 -  [Install F\*](https://github.com/FStarLang/FStar/blob/master/INSTALL.md)

## Setup the crate you want to verify

*Note: the instructions below assume you are in the folder of the specific crate (**not workspace!**) you want to extract.*


 -  Create the folder `proofs/fstar/extraction`folder, right next to the `Cargo.toml` of the crate you want to verify.  
   🪄 `mkdir -p proofs/fstar/extraction`
 -  Copy [this makefile](https://gist.github.com/W95Psp/4c304132a1f85c5af4e4959dd6b356c3) to `proofs/fstar/extraction/Makefile`  
   🪄 `curl -o proofs/fstar/extraction/Makefile https://gist.githubusercontent.com/W95Psp/4c304132a1f85c5af4e4959dd6b356c3/raw/Makefile`
 -  Add `hax-lib` as a dependency to your crate, enabled only when using hax.  
   🪄 `cargo add --target 'cfg(hax)' --git https://github.com/hacspec/hax hax-lib`  
   🪄 *(`hax-lib` is not mandatory, but this guide assumes it is present)*

## Partial extraction

*Note: the instructions below assume you are in the folder of the
specific crate you want to extract.*

Run the command `cargo hax into fstar` to extract every item of your
crate as F\* modules in the subfolder `proofs/fstar/extraction`.

**What is critical? What is worth verifying?**  
Probably, your Rust crate contains mixed kinds of code: some parts are
critical (e.g. the library functions at the core of your crate) while
some others are not (e.g. the binary driver that wraps the
library). In this case, you likely want to extract only partially your
crate, so that you can focus on the important part.

**Partial extraction.**  
If you want to extract a function
`your_crate::some_module::my_function`, you need to tell `hax` to
extract nothing but `my_function`:

```bash
cargo hax into -i '-** +your_crate::some_module::my_function' fstar
```

Note this command will extract `my_function` but also any item
(function, type, etc.) from your crate which is used directly or
indirectly by `my_function`. If you don't want the dependency, use
`+!` instead of `+` in the `-i` flag.

**Unsupported Rust code.**  
hax [doesn't support every Rust
constructs](https://github.com/hacspec/hax?tab=readme-ov-file#supported-subset-of-the-rust-language),
`unsafe` code, or complicated mutation scheme. That is another reason
for extracting only a part of your crate. When running hax, if an item
of your crate, say a function `my_crate::f`, is not handled by hax,
you can append `-my_crate::f` to the `-i` flag. You can learn more
about the `-i` flag [in the FAQ](../faq/include-flags.md).

## Start F\* verification
After running the hax toolchain on your Rust code, you will end up
with various F\* modules in the `proofs/fstar/extraction` folder. The
`Makefile` in `proofs/fstar/extraction` will run F\*.

1. **Lax check:** the first step is to run `OTHERFLAGS="--lax" make`,
   which will run F\* in "lax" mode. The lax mode just makes sure basic
   typechecking works: it is not proving anything. This first step is
   important because there might be missing libraries. If F\* is not
   able to find a definition, it is probably a `libcore` issue: you
   probably need to edit the F\* library, which lives in the
   `proofs-libs` directory in the root of the hax repo.
2. **Typecheck:** the second step is to run `make`. This will ask F\*
   to typecheck fully your crate. This is very likely that you need to
   add preconditions and postconditions at this stage. Indeed, this
   second step is about panic freedom: if F\* can typecheck your crate,
   it means your code *never* panics, which already is an important
   property.

To go further, please read the next chapter.


================================================
FILE: docs/manual/fstar/tutorial/data-invariants.md
================================================
---
weight: 2
---

# Data invariants

In the two previous chapters we saw how to write specifications on
functions, be it with pre and post-condition or with lemmas. In this
chapter, we will see how to maintain invariants with precise types.

## Making illegal states unpresentable
With the Barrett example, we were working on a certain field, whose
elements were represented as `i32` integers. To simplify, let's
consider `F₃`, the finite field with 3 elements (say `0`, `1` and
`2`). Every element of `F3` can be represented as a `i32` integers,
but the converse doesn't hold: the vast majority of `i32` integers are
not in of `F₃`.

Representing `F₃` as `i32`s, every time we define a function consuming
`F₃` elements, we face the risk to consume *illegal* elements. We are
thus back to [chapter 4.1](panic-freedom.md): we should panic on
illegal elements, and add hax pre-conditions on every single
function. That's not ideal: the property of being either `0`, `1` or
`2` should be encoded directly on the type representing `F₃` elements.

### `enum`s to then rescue
Rust alone already can solve our representation issues with
[enums](https://doc.rust-lang.org/book/ch06-00-enums.html)! Below, we
define the `enum` type `F3` which has only three constructor: `F3`
represent exactly the elements of `F₃`, not more, not less.

```{.rust .playable}
enum F3 {
    E1,
    E2,
    E3,
}
```

With `F3`, there doesn't exist illegal values at all: we can now
define [*total*
functions](https://en.wikipedia.org/wiki/Partial_function) on `F₃`
elements. We dropped altogether a source of panic!

Soon you want to work with a bigger finite field: say
`F₂₃₄₇`. Representing this many `q` different elements with an Rust
enum would be very painful... The `enum` approach falls apart.

### Newtype and refinements
Since we don't want an `enum` with 2347 elements, we have to revert to
a type that can hold this many elements. The smallest integer type
large enough provided by Rust is `u16`.

Let's define `F` a
["newtype"](https://matklad.github.io/2018/06/04/newtype-index-pattern.html):
a [struct](https://doc.rust-lang.org/book/ch05-00-structs.html) with
one `u16` field `v`. Notice the refinement annotation on `v`: the
extraction of this type `F` via hax will result in a type enforcing
`v` small enough.

``` {.rust .playable}
pub const Q: u16 = 2347;

#[hax_lib::attributes]
pub struct F {
    #[hax_lib::refine(v < Q)]
    pub v: u16,
}
```

In Rust, we can now define functions that operates on type `F`,
assuming they are in bounds with respect to `F₂₃₄₇`: every such
assumption will be checked and enforced by the proof assistant. As an
example, below is the implementation of the addition for type `F`.

``` {.rust .playable}
# pub const Q: u16 = 2347;
# 
# #[hax_lib::attributes]
# pub struct F {
#     #[hax_lib::refine(v < Q)]
#     pub v: u16,
# }

use core::ops::Add;

impl Add for F {
    type Output = Self;
    fn add(self, rhs: Self) -> Self {
        Self {
            v: (self.v + rhs.v) % Q,
        }
    }
}
```

Here, F\* is able to prove automatically that (1) the addition doesn't
overflow and (2) that the invariant of `F` is preserved. The
definition of type `F` in F\* (named `t_F`) very explicitly requires
the invariant as a refinement on `v`.


================================================
FILE: docs/manual/fstar/tutorial/index.md
================================================
---
weight: 1
---

# Tutorial

This tutorial is a guide for formally verifying properties about Rust
programs using the hax toolchain. hax is a tool that translates Rust
programs to various formal programming languages.

The formal programming languages we target are called *backends*. Some
of them, e.g. [F\*](https://fstar-lang.org/), [Lean](https://lean-lang.org/) or
[Coq](https://coq.inria.fr/), are general purpose formal programming
languages. Others are specialized tools:
[ProVerif](https://bblanche.gitlabpages.inria.fr/proverif/) is
dedicated to proving properties about protocols.

This tutorial focuses on proving properties with
[F\*](https://fstar-lang.org/).


================================================
FILE: docs/manual/fstar/tutorial/panic-freedom.md
================================================
---
weight: 0
---

# Panic freedom

Let's start with a simple example: a function that squares a `u8`
integer. To extract this function to F\* using hax, we simply need to
run the command `cargo hax into fstar` in the directory of the crate
in which the function `square` is defined.

*Note: throughout this tutorial, you can edit the snippets of code and
extract to F\* by clicking the play button (:material-play:), or even typecheck it with the button (:material-check:).*

```{.rust .playable .expect-failure }
fn square(x: u8) -> u8 {
    x * x
}
```

Though, if we try to verify this function, F\* is complaining about a
subtyping issue: F\* tells us that it is not able to prove that the
result of the multiplication `x * x` fits the range of `u8`. The
multiplication `x * x` might indeed be overflowing!

For instance, running `square(16)` panics: `16 * 16` is `256`, which
is just over `255`, the largest integer that fits `u8`. Rust does not
ensure that functions are *total*: a function might panic at any
point, or might never terminate.


## Rust and panicking code
Quoting the chapter [To `panic!` or Not to
`panic!`](https://doc.rust-lang.org/book/ch09-03-to-panic-or-not-to-panic.html)
from the Rust book:

> The `panic!` macro signals that your program is in a state it can't
> handle and lets you tell the process to stop instead of trying to
> proceed with invalid or incorrect values.

A Rust program should panic only in a situation where an assumption
or an invariant is broken: a panic models an *invalid* state. Formal
verification is about proving such invalid state cannot occur, at all.

From this observation emerges the urge of proving Rust programs to be
panic-free!

## Fixing our squaring function
Let's come back to our example. There is an informal assumption to the
multiplication operator in Rust: the inputs should be small enough so
that the addition doesn't overflow.

Note that Rust also provides `wrapping_mul`, a non-panicking variant
of the multiplication on `u8` that wraps when the result is bigger
than `255`. Replacing the common multiplication with `wrapping_mul` in
`square` would fix the panic, but then, `square(256)` returns zero.
Semantically, this is not what one would expect from `square`.

Our problem is that our function `square` is well-defined only when
its input is within `0` and `15`.

### Solution A: reflect the partialness of the function in Rust
A first solution is to make `square` return an `Option` instead of a `u8`:
``` {.rust .playable}
fn square_option(x: u8) -> Option {
    if x >= 16 {
        None
    } else {
        Some(x * x)
    }
}
```

Here, F\* is able to prove panic-freedom: calling `square` with any
input is safe. Though, one may argue that `square`'s input being small
enough should really be an assumption. Having to deal with the
possible integer overflowing whenever squaring is a huge burden. Can
we do better?

### Solution B: add a precondition
The type system of Rust doesn't allow the programmer to formalize the
assumption that `square` expects a small `u8`. This becomes
possible using hax: one can annotate a function with a pre-condition
on its inputs.

The pre-conditions and post-conditions on a function form a
*contract*: "if you give me some inputs that satisfies a given formula
(*the precondition*), I will produce a return value that satisfy
another formula (*the postcondition*)". Outside this contract,
anything might happen: the function might panic, might run forever,
erase your disk, or anything.

The helper crate
[hax-lib](https://github.com/cryspen/hax/tree/main/hax-lib)
provides the `requires`
[proc-macro](https://doc.rust-lang.org/reference/procedural-macros.html)
which lets user writing pre-conditions directly in Rust.

```{.rust .playable}
#[hax_lib::requires(x < 16)]
fn square_requires(x: u8) -> u8 {
    x * x
}
```

With this precondition, F\* is able to prove panic freedom. From now
on, it is the responsibility of the clients of `square` to respect the
contract. The next step is thus be to verify, through hax extraction,
that `square` is used correctly at every call site.

## Common panicking situations
Multiplication is not the only panicking function provided by the Rust
library: most of the other integer arithmetic operation have such
informal assumptions.

Another source of panics is indexing. Indexing in an array, a slice or
a vector is a partial operation: the index might be out of range.

In the example folder of hax, you can find the [`chacha20`
example](https://github.com/cryspen/hax/blob/main/examples/chacha20/src/lib.rs)
that makes use of pre-conditions to prove panic freedom.

Another solution for safe indexing is to use the [newtype index
pattern](https://matklad.github.io/2018/06/04/newtype-index-pattern.html),
which is [also supported by
hax](https://github.com/cryspen/hax/blob/d668de4d17e5ddee3a613068dc30b71353a9db4f/tests/attributes/src/lib.rs#L98-L126). The [data invariants](data-invariants.md#newtype-and-refinements) chapter gives more details about this.



================================================
FILE: docs/manual/fstar/tutorial/proofs/fstar/extraction/Makefile
================================================
# This is a generically useful Makefile for F* that is self-contained
#
# It is tempting to factor this out into multiple Makefiles but that
# makes it less portable, so resist temptation, or move to a more
# sophisticated build system.
#
# We expect:
#  1. `fstar.exe` to be in PATH (alternatively, you can also set
#     $FSTAR_HOME to be set to your F* repo/install directory)
#
#  2. `cargo`, `hax` and `rustup` to be installed and in PATH.
#
#  3. the extracted Cargo crate to have "hax-lib" as a dependency:
#     `hax-lib = { version = "0.1.0-pre.1", git = "https://github.com/hacspec/hax"}`
#
# Optionally, you can set `HACL_HOME`.
#
# ROOTS contains all the top-level F* files you wish to verify
# The default target `verify` verified ROOTS and its dependencies
# To lax-check instead, set `OTHERFLAGS="--lax"` on the command-line
#
# To make F* emacs mode use the settings in this file, you need to
# add the following lines to your .emacs
#
# (setq-default fstar-executable "/bin/fstar.exe")
# (setq-default fstar-smt-executable "/bin/z3")
#
# (defun my-fstar-compute-prover-args-using-make ()
#   "Construct arguments to pass to F* by calling make."
#   (with-demoted-errors "Error when constructing arg string: %S"
#     (let* ((fname (file-name-nondirectory buffer-file-name))
# 	   (target (concat fname "-in"))
# 	   (argstr (car (process-lines "make" "--quiet" target))))
#       (split-string argstr))))
# (setq fstar-subp-prover-args #'my-fstar-compute-prover-args-using-make)
#

HACL_HOME     ?= $(HOME)/.hax/hacl_home
FSTAR_BIN     ?= $(shell command -v fstar.exe 1>&2 2> /dev/null && echo "fstar.exe" || echo "$(FSTAR_HOME)/bin/fstar.exe")

CACHE_DIR     ?= .cache
HINT_DIR      ?= .hints

.PHONY: all verify clean

all:
	rm -f .depend && $(MAKE) .depend
	$(MAKE) verify

# If $HACL_HOME doesn't exist, clone it
${HACL_HOME}:
	mkdir -p "${HACL_HOME}"
	git clone --depth 1 https://github.com/hacl-star/hacl-star.git "${HACL_HOME}"

# By default, we process all the files in the current directory
ROOTS = $(wildcard *.fst *fsti)

# The following is a bash script that discovers F* libraries
define FINDLIBS
    # Prints a path if and only if it exists. Takes one argument: the
    # path.
    function print_if_exists() {
        if [ -d "$$1" ]; then
            echo "$$1"
        fi
    }
    # Asks Cargo all the dependencies for the current crate or workspace,
    # and extract all "root" directories for each. Takes zero argument.
    function dependencies() {
        cargo metadata --format-version 1 |
            jq -r '.packages | .[] | .manifest_path | split("/") | .[:-1] | join("/")'
    }
    # Find hax libraries *around* a given path. Takes one argument: the
    # path.
    function find_hax_libraries_at_path() {
        path="$$1"
        # if there is a `proofs/fstar/extraction` subfolder, then that's a
        # F* library
        print_if_exists "$$path/proofs/fstar/extraction"
        # Maybe the `proof-libs` folder of hax is around?
        MAYBE_PROOF_LIBS=$$(realpath -q "$$path/../proof-libs/fstar")
        if [ $$? -eq 0 ]; then
            print_if_exists "$$MAYBE_PROOF_LIBS/core"
            print_if_exists "$$MAYBE_PROOF_LIBS/rust_primitives"
        fi
    }
    { while IFS= read path; do
          find_hax_libraries_at_path "$$path"
      done < <(dependencies)
    } | sort -u
endef
export FINDLIBS

FSTAR_INCLUDE_DIRS = $(HACL_HOME)/lib $(shell bash -c "$$FINDLIBS")

FSTAR_FLAGS = --cmi \
  --warn_error -331 \
  --cache_checked_modules --cache_dir $(CACHE_DIR) \
  --already_cached "+Prims+FStar+LowStar+C+Spec.Loops+TestLib" \
  $(addprefix --include ,$(FSTAR_INCLUDE_DIRS))

FSTAR = $(FSTAR_BIN) $(FSTAR_FLAGS)

.depend: $(HINT_DIR) $(CACHE_DIR) $(ROOTS)
	$(info $(ROOTS))
	$(FSTAR) --cmi --dep full $(ROOTS) --extract '* -Prims -LowStar -FStar' > $@

include .depend

$(HINT_DIR):
	mkdir -p $@

$(CACHE_DIR):
	mkdir -p $@

$(CACHE_DIR)/%.checked: | .depend $(HINT_DIR) $(CACHE_DIR)
	$(FSTAR) $(OTHERFLAGS) $< $(ENABLE_HINTS) --hint_file $(HINT_DIR)/$(notdir $*).hints

verify: $(addsuffix .checked, $(addprefix $(CACHE_DIR)/,$(ROOTS)))

# Targets for interactive mode

%.fst-in:
	$(info $(FSTAR_FLAGS) \
	  $(ENABLE_HINTS) --hint_file $(HINT_DIR)/$(basename $@).fst.hints)

%.fsti-in:
	$(info $(FSTAR_FLAGS) \
	  $(ENABLE_HINTS) --hint_file $(HINT_DIR)/$(basename $@).fsti.hints)


# Clean targets

SHELL=bash

clean:
	rm -rf $(CACHE_DIR)/*
	rm *.fst



================================================
FILE: docs/manual/fstar/tutorial/proofs/fstar/extraction/Tutorial_src.Math.Lemmas.fst
================================================
module Tutorial_src.Math.Lemmas
#set-options "--fuel 0 --ifuel 1 --z3rlimit 150"
open Core
open FStar.Mul


val cancel_mul_mod (a:i32) (n:i32 {v n >= 0}) : Lemma ((v a * v n) % v n == 0)
let cancel_mul_mod a n =
  FStar.Math.Lemmas.cancel_mul_mod (v a) (v n)


================================================
FILE: docs/manual/fstar/tutorial/proofs/fstar/extraction/Tutorial_src.fst
================================================
module Tutorial_src
#set-options "--fuel 0 --ifuel 1 --z3rlimit 15"
open Core
open FStar.Mul

type t_F3 =
  | F3_E1 : t_F3
  | F3_E2 : t_F3
  | F3_E3 : t_F3

let t_F3_cast_to_repr (x: t_F3) : isize =
  match x with
  | F3_E1  -> isz 0
  | F3_E2  -> isz 1
  | F3_E3  -> isz 3

unfold
let t_FieldElement = i32

let v_BARRETT_MULTIPLIER: i64 = 20159L

let v_BARRETT_R: i64 = 67108864L

let v_BARRETT_SHIFT: i64 = 26L

let v_FIELD_MODULUS: i32 = 3329l

let v_Q: u16 = 2347us

let barrett_reduce (value: i32)
    : Prims.Pure i32
      (requires
        (Core.Convert.f_from value <: i64) >=. (Core.Ops.Arith.Neg.neg v_BARRETT_R <: i64) &&
        (Core.Convert.f_from value <: i64) <=. v_BARRETT_R)
      (ensures
        fun result ->
          let result:i32 = result in
          result >. (Core.Ops.Arith.Neg.neg v_FIELD_MODULUS <: i32) && result <. v_FIELD_MODULUS &&
          (result %! v_FIELD_MODULUS <: i32) =. (value %! v_FIELD_MODULUS <: i32)) =
  let t:i64 = (Core.Convert.f_from value <: i64) *! v_BARRETT_MULTIPLIER in
  let t:i64 = t +! (v_BARRETT_R >>! 1l <: i64) in
  let quotient:i64 = t >>! v_BARRETT_SHIFT in
  let quotient:i32 = cast (quotient <: i64) <: i32 in
  let sub:i32 = quotient *! v_FIELD_MODULUS in
  let _:Prims.unit = Tutorial_src.Math.Lemmas.cancel_mul_mod quotient 3329l in
  value -! sub

let decrypt (ciphertext key: u32) : u32 = ciphertext ^. key

let encrypt (plaintext key: u32) : u32 = plaintext ^. key

let encrypt_decrypt_identity (key plaintext: u32)
    : Lemma (requires true)
      (ensures (decrypt (encrypt plaintext key <: u32) key <: u32) =. plaintext) = ()

let square (x: u8) : u8 = x *! x

let square_ensures (x: u8)
    : Prims.Pure u8
      (requires x <. 16uy)
      (ensures
        fun result ->
          let result:u8 = result in
          result >=. x) = x *! x

let square_option (x: u8) : Core.Option.t_Option u8 =
  if x >=. 16uy
  then Core.Option.Option_None <: Core.Option.t_Option u8
  else Core.Option.Option_Some (x *! x) <: Core.Option.t_Option u8

let square_requires (x: u8) : Prims.Pure u8 (requires x <. 16uy) (fun _ -> Prims.l_True) = x *! x

type t_F = { f_v:f_v: u16{f_v <. v_Q} }

[@@ FStar.Tactics.Typeclasses.tcinstance]
let impl: Core.Ops.Arith.t_Add t_F t_F =
  {
    f_Output = t_F;
    f_add_pre = (fun (self: t_F) (rhs: t_F) -> true);
    f_add_post = (fun (self: t_F) (rhs: t_F) (out: t_F) -> true);
    f_add = fun (self: t_F) (rhs: t_F) -> { f_v = (self.f_v +! rhs.f_v <: u16) %! v_Q } <: t_F
  }


================================================
FILE: docs/manual/fstar/tutorial/properties.md
================================================
---
weight: 1
---

# Proving properties

In the last chapter, we proved one property on the `square` function:
panic freedom. After adding a precondition, the signature of the
`square` function was `x:u8 -> Pure u8 (requires x <. 16uy) (ensures fun _ -> True)`.

This contract stipulates that, given a small input, the function will
_return a value_: it will not panic or diverge. We could enrich the
contract of `square` with a post-condition about the fact it is a
increasing function:

``` {.rust .playable}
#[hax_lib::requires(x < 16)]
#[hax_lib::ensures(|result| result >= x)]
fn square_ensures(x: u8) -> u8 {
    x * x
}
```

Such a simple post-condition is automatically proven by F\*. The
properties of our `square` function are not fascinating. Let's study a
more interesting example: [Barrett reduction](https://en.wikipedia.org/wiki/Barrett_reduction).

## A concrete example of contract: Barrett reduction

While the correctness of `square` is obvious, the Barrett reduction is
not.

Given `value` a field element (a `i32` whose absolute value is at most
`BARRET_R`), the function `barrett_reduce` defined below computes
`result` such that:

- `result ≡ value (mod FIELD_MODULUS)`;
- the absolute value of `result` is bound as follows:
  `|result| < FIELD_MODULUS`.

It is easy to write this contract directly as `hax::requires` and
`hax::ensures` annotations, as shown in the snippet below.

```{.rust .playable}
type FieldElement = i32;
const FIELD_MODULUS: i32 = 3329;
const BARRETT_SHIFT: i64 = 26;
const BARRETT_R: i64 = 0x4000000; // 2^26
const BARRETT_MULTIPLIER: i64 = 20159; // ⌊(BARRETT_R / FIELD_MODULUS) + 1/2⌋

#[hax_lib::fstar::options("--z3rlimit 500")]
#[hax_lib::requires((i64::from(value) >= -BARRETT_R && i64::from(value) <= BARRETT_R))]
#[hax_lib::ensures(|result| result > -FIELD_MODULUS && result < FIELD_MODULUS
                     && result %  FIELD_MODULUS ==  value % FIELD_MODULUS)]
fn barrett_reduce(value: i32) -> i32 {
    let t = i64::from(value) * BARRETT_MULTIPLIER;
    let t = t + (BARRETT_R >> 1);

    let quotient = t >> BARRETT_SHIFT;
    let quotient = quotient as i32;

    let sub = quotient * FIELD_MODULUS;

    value - sub
}
```



The proof for the code above uses the Z3 SMT solver to prove the
post-condition.  Since the SMT solver needs to reason about non-linear
arithmetic (multiplication, modulus, division) it needs more
resources, hence we bump up the `rlimit` to 100 in an annotation above
the function. With this annotation F\* and Z3 are able to automatically
verify this function. However, it is worth noting that the heuristic
strategies used by Z3 for non-linear arithmetic may sometimes fail to
complete in the given `rlimit` depending on the solver version or random
number generator, so we often give Z3 a generous resource limit.

Conversely, instead of relying on the SMT solver, we can also
elaborate the proof of this function by hand to make it more
predictable.  For example, before the final line of the function, 
we could call a mathematical lemma may have to help F\* prove
the correctness of the reduction.  The lemma call would be:
```
    fstar!("Math.Lemmas.cancel_mul_mod (v quotient) 3329");
```
This lemma establishes that `(quotient * 3329) % 3329` is zero. We often use lemmas like
these to limit our dependence on Z3. 

This Barrett reduction examples is taken from
[libcrux](https://github.com/cryspen/libcrux/tree/main)'s proof of
Kyber which is using hax and F\*.

This example showcases an **intrinsic proof**: the function
`barrett_reduce` not only computes a value, but it also ship a proof
that the post-condition holds. The pre-condition and post-condition
gives the function a formal specification, which is useful both for
further formal verification and for documentation purposes.

## Extrinsic properties with lemmas

Consider the `encrypt` and `decrypt` functions below. Those functions
have no precondition, don't have particularly interesting properties
individually. However, the composition of the two yields an useful
property: encrypting a ciphertext and decrypting the result with a
same key produces the ciphertext again. `|c| decrypt(c, key)` is the
inverse of `|p| encrypt(p, key)`.

```{.rust .playable}
fn encrypt(plaintext: u32, key: u32) -> u32 {
    plaintext ^ key
}

fn decrypt(ciphertext: u32, key: u32) -> u32 {
    ciphertext ^ key
}
```

In this situation, adding a pre- or a post-condition to either
`encrypt` or `decrypt` is not useful: we want to state our inverse
property about both of them. Better, we want this property to be
stated directly in Rust: just as with pre and post-conditions, the
Rust sources should clearly state what is to be proven.

To this end, Hax provides a macro `lemma`. Below, the Rust function
`encrypt_decrypt_identity` takes a key and a plaintext, and then
states the inverse property. The body is empty: the details of the
proof itself are not relevant, at this stage, we only care about the
statement. The proof will be completed manually in the proof
assistant.

```{.rust .playable .expect-failure}
# fn encrypt(plaintext: u32, key: u32) -> u32 {
#     plaintext ^ key
# }
# 
# fn decrypt(ciphertext: u32, key: u32) -> u32 {
#     ciphertext ^ key
# }
# 
#[hax_lib::lemma]
#[hax_lib::requires(true)]

fn encrypt_decrypt_identity(
    key: u32,
    plaintext: u32,
) -> Proof<{ decrypt(encrypt(plaintext, key), key) == plaintext }> {
}
```


================================================
FILE: docs/manual/index.md
================================================
---
weight: -5
---

# Introduction

hax is a tool for high assurance translations of a large subset of
Rust into formal languages such as [F\*](https://www.fstar-lang.org/), [Lean](https://lean-lang.org/) or [Rocq](https://rocq-prover.org/).

## Usage

Hax is a cargo subcommand. 
The command `cargo hax` accepts the following subcommands:

* **`into`** (`cargo hax into BACKEND`): translate a Rust crate to the backend `BACKEND` (e.g. `fstar`, `coq`, `lean`).
* **`json`** (`cargo hax json`): extract the typed AST of your crate as a JSON file.
 
Note:

* `BACKEND` can be `fstar`, `coq`, `lean`, `easycrypt` or `pro-verif`. `cargo hax into --help`
   gives the full list of supported backends.
* The subcommands `cargo hax`, `cargo hax into` and `cargo hax into
   ` takes options. For instance, you can `cargo hax into
   fstar --z3rlimit 100`. Use `--help` on those subcommands to list
   all options.

## Installation

### Manual installation

1. Make sure to have the following installed on your system:

      - [`opam`](https://opam.ocaml.org/) (`opam switch create 5.1.1`)
      - [`rustup`](https://rustup.rs/)
      - [`nodejs`](https://nodejs.org/)
      - [`jq`](https://jqlang.github.io/jq/)

2. Clone this repo: `git clone git@github.com:hacspec/hax.git && cd hax`
3. Run the `setup.sh` script: `./setup.sh`.
4. Run `cargo-hax --help`

### Nix

This should work on [Linux](https://nixos.org/download.html#nix-install-linux), [MacOS](https://nixos.org/download.html#nix-install-macos) and [Windows](https://nixos.org/download.html#nix-install-windows).

Prerequisites: Nix package
manager (with flakes enabled)

  - Either using the [Determinate Nix Installer](https://github.com/DeterminateSystems/nix-installer), with the following bash one-liner:
    ```bash
    curl --proto '=https' --tlsv1.2 -sSf -L https://install.determinate.systems/nix | sh -s -- install
    ```
  - or following [those steps](https://github.com/mschwaig/howto-install-nix-with-flake-support).

+ **Run hax on a crate directly** to get F\*/Coq/... (assuming you are in the crate's folder):
   - `nix run github:hacspec/hax -- into fstar` extracts F\*.

+ **Install hax**:  `nix profile install github:hacspec/hax`, then run `cargo hax --help` anywhere
+ **Note**: in any of the Nix commands above, replace `github:hacspec/hax` by `./dir` to compile a local checkout of hax that lives in `./some-dir`
+ **Setup binary cache**: [using Cachix](https://app.cachix.org/cache/hax), just `cachix use hax`

### Docker

1. Clone this repo: `git clone git@github.com:hacspec/hax.git && cd hax`
2. Build the docker image: `docker build -f .docker/Dockerfile . -t hax`
3. Get a shell: `docker run -it --rm -v /some/dir/with/a/crate:/work hax bash`
4. You can now run `cargo-hax --help` (notice here we use `cargo-hax` instead of `cargo hax`)

Note: Please make sure that `$HOME/.cargo/bin` is in your `$PATH`, as
that is where `setup.sh` will install hax.



================================================
FILE: docs/manual/lean/index.md
================================================
This section introduces the Lean backend of hax. It shows the basic setup to use hax and Lean on a Rust crate, and gives an introduction to the basic features that can be used to make Lean proofs about Rust code.

================================================
FILE: docs/manual/lean/internals.md
================================================
---
weight: 102
---

# Internals

The encoding of Rust in Lean has three main components:

* the *syntax* (items, functions, `if`-`else`, `match`, etc), defined by the *backend*
  ([`/rust-engine/src/backends/lean.rs`](https://github.com/cryspen/hax/blob/main/rust-engine/src/backends/lean.rs))
* the *primitives/intrinsics* (`u32`, `isize`, slices, etc) defined by in the *Prelude*
  ([`hax-lib/proof-libs/lean`](https://github.com/cryspen/hax/tree/main/hax-lib/proof-libs/lean))
* the models of *core* and *std* libraries.

While mostly separated, the Backend make some assumption on the Prelude,
typically when it inserts notations for some symbol (i.e. `+?` for addition).

[!Disclaimer] : The lean backend is still experimental. See the list of [open
issues](https://github.com/cryspen/hax/issues?q=is%3Aissue%20state%3Aopen%20label%3Alean)
for known problems and workaround. See also the [Hax
Zulip](https://hacspec.zulipchat.com/) for technical support.

## Backend

### Monadic encoding

All rust computations can panic or diverge, while Lean ones cannot (by
default). To account for this, Rust types are wrapped inside a
[monad](https://en.wikipedia.org/wiki/Monad_(functional_programming)) that
represents the possible results:

```lean
inductive Error where
   | assertionFailure: Error
   | integerOverflow: Error
   | divisionByZero: Error
   | arrayOutOfBounds: Error
   | maximumSizeExceeded: Error
   | panic: Error
   | undef: Error

inductive RustM.{u} (α : Type u) where
  | ok (v: α): RustM α
  | fail (e: Error): RustM α
  | div
```

This monadic encoding shows for simple expressions: the result of the lean
extracted function is not `u32` but `RustM u32`.

/// html | div[style='float: left; width: 48%;']
```rust
fn f (x: u32) -> u32 {
    x + 1
}
```
///

/// html | div[style='float: right;width: 48%;']
```lean
def f (x : u32) : RustM u32
  := do (← x +? (1 : u32))
```
///

/// html | div[style='clear: both;']
///

The backend relies on the
[do-notation](https://lean-lang.org/doc/reference/latest//Functors___-Monads-and--do--Notation/Syntax/#do-notation):
all functions start with the `do` keyword, indicating that the sequence of bindings should
actually be understood as bindings in the monad, propagating potential
errors to the top.

The `do` keywords enables the lifting `←` and the `pure` operators. Intuitively,
lifting turns a value of type `RustM T` into a value of type `T` by turning the
rest of the program into a use of `bind`. Conversely, `pure` turns a value of
type `T` into a value of type `RustM T`. This shows also for let-bindings :


/// html | div[style='float: left; width: 48%;']
```rust
fn f (x: u32) -> u32 {
    let y = x + 1;
    let z = y + 1;
    y + z
}
```
///

/// html | div[style='float: right;width: 48%;']
```lean
def f (x : u32) : RustM u32 := do
  let y : u32 ← (pure
    (← x +? (1 : u32)));
  let z : u32 ← (pure
    (← y +? (1 : u32)));
  (← y +? z)
```
///

/// html | div[style='clear: both;']
///


Currently, the backend does not try to be parsimonious with the introduction of `pure` and `←`.

### Structs

#### Type definitions

Rust structs are encoded as [Lean
structures](https://lean-lang.org/doc/reference/latest//The-Type-System/Inductive-Types/#structures). The
special case of [tuple
structs](https://doc.rust-lang.org/book/ch05-01-defining-structs.html#using-tuple-structs-without-named-fields-to-create-different-types)
are also encoded as Lean structures, where the fields are numbered : `_0`, `_1`,
etc. See for instance :

/// html | div[style='float: left; width: 48%;']

```rust
struct S1 {
    f1: usize,
    f2: usize,
}

struct S2 {
    f1: S1,
    f2: usize,
}

// Tuple structs
struct T0();
struct T1(A);
struct T2(A, B);
struct T3(A, B, C);
struct T3p(A, T2);
```
///

/// html | div[style='float: right;width: 48%;']
```lean
structure S1 where
  f1 : usize
  f2 : usize

structure S2 where
  f1 : S1
  f2 : usize

structure T0 where
structure T1 A where
  _0 : A
structure T2 A B where
  _0 : A
  _1 : B
structure T3 A B C where
  _0 : A
  _1 : B
  _2 : C
structure T3p A B C where
  _0 : A
  _1 : (T2 B C)
```
///

/// html | div[style='clear: both;']
///

#### Expressions, accessors and pattern-matching

Building, accessing and destructing structs :

/// html | div[style='float: left; width: 48%;']
```rust
// Building
let s1 = S1 { f1: 0, f2: 1 };

let t3 = T3(T0(), T1(1), T2(1, 2));

// Matching
let S1 { f1, f2 } = s1;

let T3(T0(), T1(_), T2(_, _)) = t3;

// Accessing
let _ = (s1.f1, s1.f2);

let _ = t3.0;
let _ = t3.1;
let _ = t3.2;
let _ = t3.2.1;

```
///

/// html | div[style='float: right;width: 48%;']

```lean
-- Building
let s1 : S1 ← (pure
  (S1.mk
    (f1 := (0 : usize))
    (f2 := (1 : usize))));

let t3 :
  (T3 T0 (T1 i32) (T2 i32 i32))
  ← (pure
      (T3.mk
        T0.mk
        (T1.mk (1 : i32))
        (T2.mk
          (1 : i32)
          (2 : i32))));

-- Matching
let ({f1 := (f1 : usize),
      f2 := (f2 : usize)} : S1) ←
  (pure s1);

let (⟨(⟨⟩ : T0),
      (⟨(_ : i32)⟩ : (T1 i32)),
      (⟨(_ : i32), (_ : i32)⟩
        : (T2 i32 i32))⟩ :
    (T3 T0 (T1 i32) (T2 i32 i32))) ←
  (pure t3);

-- Accessing
let (_ : (Tuple2 usize usize)) ←
  (pure
    (Tuple2.mk
      (S1.f1 s1)
      (S1.f2 s1)));

let (_ : i32) ← (pure
  (T2._1 t2));
let (_ : T0) ← (pure
  (T3._0 t3));
let (_ : (T1 i32)) ← (pure
  (T3._1 t3));
let (_ : (T2 i32 i32)) ← (pure
  (T3._2 t3));
let (_ : i32) ← (pure
  (T2._1 (T3._2 t3)));
```
///

/// html | div[style='clear: both;']
///

### Enums

#### Type definitions

Rust enums are encoded as [Lean inductive
types](https://lean-lang.org/doc/reference/latest/The-Type-System/Inductive-Types/#inductive-types). Variants
with record fields use *named* arguments, whereas variants with tuple fields use
normal positional arguments.


/// html | div[style='float: left; width: 48%;']
```rust
// 1. Type definition
enum E {
    // unit-like
    V1,
    V2,
    // with positional arguments
    V3(usize),
    V4(usize, usize, usize),
    // with named arguments
    V5 { f1: usize, f2: usize },
    V6 { f1: usize, f2: usize },
}
```
///

/// html | div[style='float: right;width: 48%;']

```lean
inductive E : Type
| V1  : E
| V2  : E
| V3  : usize -> E
| V4  : usize -> usize -> usize -> E
| V5 (f1 : usize) (f2 : usize) : E
| V6 (f1 : usize) (f2 : usize) : E
```
///

/// html | div[style='clear: both;']
///

#### Expressions and pattern-matching

/// html | div[style='float: left;width: 48%;']

```rust
// Building
let e_v1 = E::V1;
let e_v2 = E::V2;
let e_v3 = E::V3(23);
let e_v4 = E::V4(23, 12, 1);
let e_v5 = E::V5 { f1: 23, f2: 43 };
let e_v6 = E::V6 { f1: 12, f2: 13 };

// Matching
match e_v1 {
    E::V1 => (),
    E::V2 => (),
    E::V3(_) => (),
    E::V4(x1, x2, x3) => {
        let y1 = x1 + x2;
        let y2 = y1 - x2;
        let y3 = y2 + x3;
        ()
    }
    E::V5 { f1, f2 } => (),
    E::V6 {
        f1,
        f2: other_name_for_f2,
    } => (),
}
```
///

/// html | div[style='float: right;width: 48%;']
```lean
def enums (_ : Tuple0)
  : RustM Tuple0
  := do
  let e_v1 : E ← (pure E.V1);
  let e_v2 : E ← (pure E.V2);
  let e_v3 : E ← (pure
    (E.V3 (23 : usize)));
  let e_v4 : E ← (pure
    (E.V4
      (23 : usize)
      (12 : usize)
      (1 : usize)));
  let e_v5 : E ← (pure
    (E.V5
      (f1 := (23 : usize))
      (f2 := (43 : usize))));
  let e_v6 : E ← (pure
    (E.V6
      (f1 := (12 : usize))
      (f2 := (13 : usize))));
  (match e_v1 with
    | (E.V1 ) => do Tuple0.mk
    | (E.V2 ) => do Tuple0.mk
    | (E.V3 (_ : usize))
      => do Tuple0.mk
    | (E.V4
        (x1 : usize)
        (x2 : usize)
        (x3 : usize))
      => do
        let y1 : usize ← (pure
          (← x1 +? x2));
        let y2 : usize ← (pure
          (← y1 -? x2));
        let y3 : usize ← (pure
          (← y2 +? x3));
        Tuple0.mk
    | (E.V5
        (f1 := (f1 : usize))
        (f2 := (f2 : usize)))
      => do Tuple0.mk
    | (E.V6
        (f1 := (f1 : usize))
        (f2 :=
          (other_name_for_f2 : usize)))
      => do Tuple0.mk)
```
///

/// html | div[style='clear: both;']
///
### Traits

Rust traits are represented as Lean classes, while Rust impl are Lean
instances. The Lean code relies on the typeclass inference of Lean. Hax exposes
identifiers for rust impls (that are otherwise implicit), like
`8040238289193487104`. Lean uses them for naming fields or parameters.


/// html | div[style='float: left; width: 48%;']

```rust
trait T1 {
    fn f1(&self) -> usize;
    fn f2(&self, y: &Self) -> usize;
}

struct S;

impl T1 for S {
    fn f1(&self) -> usize {
        42
    }
 fn f2(&self, y: &Self) -> usize {
        43
    }
}

fn f(x: T) -> usize {
    x.f1() + x.f2(&x)
}
```
///

/// html | div[style='float: right;width: 48%;']
```lean
class T1 (Self : Type) where
  f1 : Self -> RustM usize
  f2 : Self -> Self -> RustM usize

structure S where

instance Impl : T1 S where
  f1 (self : S)
    := do (42 : usize)
  f2 (self : S) (y : S)
    := do (43 : usize)

def f (T : Type) [(T1 T)] (x : T)
  : RustM usize
  := do
  (← (← T1.f1 x) +? (← T1.f2 x x))
```
///

/// html | div[style='clear: both;']
///


#### Supertraits

Super trait bounds are represented as extra fields


/// html | div[style='float: left; width: 48%;']
```rust
trait Test: T2 {
   fn f_test(&self, x: &T) -> usize;
}
```
///

/// html | div[style='float: right;width: 48%;']
```lean
class Test
  (Self : Type)
  (T : Type)
  where
  [_constr_8040238289193487104 :
    (T2 Self)]
  [_constr_7570495343596639253 :
    (T1 T)]
  f_test :
    Self -> T -> RustM usize
```
///

/// html | div[style='clear: both;']
///

#### Associated types

The support for associated types is currently restricted to types defined within
the current trait


/// html | div[style='float: left; width: 48%;']
```rust
mod associated_types {
    trait T1 {
        type T;
        fn f(&self, x: Self::T) -> Self::T;
    }

    trait T2 {
        type T: T1;
        fn f(&self, x: Self::T) -> usize;
    }

    trait Foo {}
    trait Bar {}

    trait T3 {
        type T: Foo<()>;
        type Tp: Foo;
        fn f(&self, x: Self::T, y: Self::Tp) -> usize;
    }
}
```
///

/// html | div[style='float: right;width: 48%;']
```lean
class Foo (Self : Type) (T : Type) where


class Bar (Self : Type) where


class T1 (Self : Type) where
  T : Type
  f : Self -> T -> RustM T

class T3 (Self : Type) where
  T : Type
  [_constr_13086648656846024831 :
    (Foo T Tuple0)]
  Tp : Type
  [_constr_15450263461214744089 : (Foo Tp T)]
  f (A : Type) [(Bar A)] :
    Self -> T -> Tp -> RustM usize

class T2 (Self : Type) where
  T : Type
  [_constr_18277713886489441014 : (T1 T)]
  f : Self -> T -> RustM usize

```
///

/// html | div[style='clear: both;']
///


## Prelude

See the [Hax Lean library](https://github.com/cryspen/hax/tree/main/hax-lib/proof-libs/lean)


================================================
FILE: docs/manual/lean/quick_start.md
================================================
---
weight: 100
---

# Quick start

## Setup the tools

 -  [Install the hax toolchain](https://github.com/hacspec/hax?tab=readme-ov-file#installation).  
   🪄 Running `cargo hax --version` should print some version info.
 -  [Install Lean](https://lean-lang.org/install/)
  -  Add `hax-lib` as a dependency to your crate, enabled only when using hax.  
   🪄 `cargo add --target 'cfg(hax)' --git https://github.com/hacspec/hax hax-lib`  
   🪄 *(`hax-lib` is not mandatory, but this guide assumes it is present)*

## Setup the crate you want to verify

*Note: the instructions below assume you are in the folder of the specific crate (**not workspace!**) you want to extract.*


 -  Create the folder `proofs/lean/extraction`folder, right next to the `Cargo.toml` of the crate you want to verify.  
   🪄 `mkdir -p proofs/lean/extraction`
 -  Create `proofs/lean/extraction/lakefile.toml`, and add the following content:  
```toml
name = "your_crate_name"
version = "0.1.0"
defaultTargets = ["your_crate_name"]

[[lean_lib]]
name = "your_crate_name"

[[require]]
name = "Hax"
git.url = "https://github.com/cryspen/hax"
git.subDir = "hax-lib/proof-libs/lean"
rev = "main"
``` 
 -  Create `proofs/lean/extraction/lean-toolchain`,
 with the following content:
```
leanprover/lean4:v4.29.0-rc1 
```
This version should be the same version as in the file `hax-lib/proof-libs/lean/lean-toolchain` of
your hax installation.

## Partial extraction

*Note: the instructions below assume you are in the folder of the
specific crate you want to extract.*

Run the command `cargo hax into lean` to extract every item of your
crate as F\* modules in the subfolder `proofs/lean/extraction`.

**What is critical? What is worth verifying?**  
Probably, your Rust crate contains mixed kinds of code: some parts are
critical (e.g. the library functions at the core of your crate) while
some others are not (e.g. the binary driver that wraps the
library). In this case, you likely want to extract only partially your
crate, so that you can focus on the important part.

**Using the `-i` flag.**  
If you want to extract a function
`your_crate::some_module::my_function`, you need to tell `hax` to
extract nothing but `my_function`:

```bash
cargo hax into -i '-** +your_crate::some_module::my_function' lean
```

This command will remove all items from extraction (`-**`) and add back `my_function`, along with all its dependencies (other functions, type definitions, etc.) from your crate. If you don't want the dependencies, you can use `+!` instead of `+`. See [the the FAQ](../faq/include-flags.md) or `cargo hax into --help` for more options for partial extraction.

**Unsupported Rust code.**  
hax [doesn't support every Rust
constructs](https://github.com/hacspec/hax?tab=readme-ov-file#supported-subset-of-the-rust-language),
`unsafe` code, or complicated mutation scheme. That is another reason
for extracting only a part of your crate. When running hax, if an item
of your crate, say a function `my_crate::f`, is not handled by hax,
you can remove it from the extraction target by adding  `-my_crate::f` as an option to the `-i` flag. 

## Start Lean verification
After extracting your Rust code to Lean, the result is in the `proofs/lean/extraction` folder. The
`lakefile.toml` allows you to run Lean on this folder by running `lake build` (or directly in the IDE 
using the LSP). Contrarily to F\*, successfully building the code doesn't prove panic freedom, this
happens only if the specification states that the code is panic-free. 

### Current limitations
The Lean backend of Hax is under active development, and extraction can *fail* even on supported Rust. This can come from a missing Rust feature (i.e. supported by the Hax engine but not yet by the Lean backend). Testing the same extraction target on the *F\** backend can be an easy way to check. If all the Rust features are supported, then the extracted code can fail to build if it uses definitions from Rust `core` and `std` libraries that are missing in our Lean model (in `hax-lib/proof-libs/lean`). We're actively extending it to support idiomatic code, but feel free to report it on [zulip](https://hacspec.zulipchat.com/) or [github](https://github.com/cryspen/hax/issues)


================================================
FILE: docs/manual/lean/tutorial/index.md
================================================
---
weight: 101
---

# Tutorial

This tutorial focuses on proving properties with the hax toolchain and its
[Lean](https://lean-lang.org/) backend.


================================================
FILE: docs/manual/lean/tutorial/panic-freedom.md
================================================
---
weight: 0
---

# Panic freedom

Let's start with a simple example: a function that squares a `u8`
integer. To extract this function to Lean using hax, we simply need to
run the command `cargo hax into lean` in the directory of the crate
in which the function `square` is defined.

*Note: throughout this tutorial, you can edit the snippets of code and
extract to Lean by clicking the play button (:material-play:), or even typecheck it with the button (:material-check:).*

```{.rust .playable .lean-backend}
fn square(x: u8) -> u8 {
    x * x
}
```

If we run `lake build` on the result (or type-check using the playground), we get a success. If you followed the F\* tutorial, this might be a surprise because the function is not 
panic-free. Indeed, our encoding of Rust code in Lean wraps everything in a result monad. And 
functions that panic return an error in this monad. To try to prove panic-freedom, we have to 
specify that the result of `square` is expected not to be an error in this result type. A way
to do that is the following:
```{.rust .playable .lean-backend .expect-failure}
#[hax_lib::requires(true)]
#[hax_lib::ensures(|res| true)]
fn square(x: u8) -> u8 {
    x * x
}
```
Adding a `hax_lib::requires` and a `hax_lib::ensures` annotation will make Hax generate a specification of the function, asserting panic freedom as well as the postcondition. Here, we used the trivial postcondition `true`, so we only assert panic freedom.

If we try running `lake build`
after extracting this code, we get an error: 
`The prover found a counterexample, consider the following assignment: value = 255`. Indeed `square(255)` 
panics because the multiplication overflows.

## Rust and panicking code
Quoting the chapter [To `panic!` or Not to
`panic!`](https://doc.rust-lang.org/book/ch09-03-to-panic-or-not-to-panic.html)
from the Rust book:

> The `panic!` macro signals that your program is in a state it can't
> handle and lets you tell the process to stop instead of trying to
> proceed with invalid or incorrect values.

A Rust program should panic only in a situation where an assumption
or an invariant is broken: a panic models an *invalid* state. Formal
verification is about proving such invalid state cannot occur, at all.

From this observation emerges the urge of proving Rust programs to be
panic-free!

## Fixing our squaring function
Let's come back to our example. There is an informal assumption to the
multiplication operator in Rust: the inputs should be small enough so
that the addition doesn't overflow.

Note that Rust also provides `wrapping_mul`, a non-panicking variant
of the multiplication on `u8` that wraps when the result is bigger
than `255`. Replacing the common multiplication with `wrapping_mul` in
`square` would fix the panic, but then, `square(256)` returns zero.
Semantically, this is not what one would expect from `square`.

Our problem is that our function `square` is well-defined only when
its input is within `0` and `15`.

### Solution: add a precondition

We already added a pre-condition to specify panic-freedom but we can turn it into a more interesting pre-condition to restrict the inputs and stay in the domain where the multiplication fits in a `u8`. We only need to modify the Rust condition that is passed to the `hax_lib::requires` macro: 

```{.rust .playable .lean-backend}
#[hax_lib::requires(x < 16)]
#[hax_lib::ensures(|res| true)]
fn square(x: u8) -> u8 {
    x * x
}
```

With this precondition, Lean is able to prove panic freedom. From now
on, it is the responsibility of the clients of `square` to respect the
contract.

## Common panicking situations
Multiplication is not the only panicking function provided by the Rust
library: most of the other integer arithmetic operation have such
informal assumptions.

Another source of panics is indexing. Indexing in an array, a slice or
a vector is a partial operation: the index might be out of range.

In the example folder of hax, you can find the [`chacha20`
example](https://github.com/cryspen/hax/blob/main/examples/chacha20/src/lib.rs)
that makes use of pre-conditions to prove panic freedom.


================================================
FILE: docs/manual/lean/tutorial/properties.md
================================================
---
weight: 1
---

# Proving properties

In the previous chapter, we proved one property of the `square` function:
panic freedom.

This contract stipulates that, given a small input, the function will
_return a value_: it will not panic or diverge. We could enrich the
contract of `square` with a post-condition about the fact it is an
increasing function:
```{.rust .playable .lean-backend}
#[hax_lib::requires(x < 16)]
#[hax_lib::ensures(|res| res >= x)]
fn square(x: u8) -> u8 {
    x * x
}
```
This works as well.

The property that we prove above demonstrates a very simple case of a proof using hax and Lean. For a more complex example, a version of the Barrett example is available in the 
[`examples`](https://github.com/cryspen/hax/tree/main/examples/lean_barrett) 
section of hax. 




================================================
FILE: docs/overrides/main.html
================================================
{% extends "base.html" %}

{% block site_meta %}





{{ super() }}
{% endblock %}


================================================
FILE: docs/publications.md
================================================
---
weight: 5
---

To cite hax, please use
**[hax: Verifying Security-Critical Rust Software using Multiple Provers](https://eprint.iacr.org/2025/142)**.

# Publications

* [Formal Security and Functional Verification of Cryptographic Protocol Implementations in Rust](https://eprint.iacr.org/2025/980)
* [hax: Verifying Security-Critical Rust Software using Multiple Provers](https://eprint.iacr.org/2025/142)
* [hacspec Tech report](https://hal.inria.fr/hal-03176482)
* [HACSpec: A gateway to high-assurance cryptography](https://github.com/hacspec/hacspec/blob/master/rwc2023-abstract.pdf)
* [Original hacspec paper](https://www.franziskuskiefer.de/publications/hacspec-ssr18-paper.pdf)

### Secondary literature, using hacspec & hax:
* [Last yard](https://eprint.iacr.org/2023/185)
* [A Verified Pipeline from a Specification Language to Optimized, Safe Rust](https://github.com/hacspec/hacspec.github.io/blob/master/coqpl22-final61.pdf) at [CoqPL'22](https://popl22.sigplan.org/details/CoqPL-2022-papers/5/A-Verified-Pipeline-from-a-Specification-Language-to-Optimized-Safe-Rust)
* [Hax - Enabling High Assurance Cryptographic Software](https://github.com/hacspec/hacspec.github.io/blob/master/RustVerify24.pdf) at [RustVerify24](https://sites.google.com/view/rustverify2024)
* [A formal security analysis of Blockchain voting](https://github.com/hacspec/hacspec.github.io/blob/master/coqpl24-paper8-2.pdf) at [CoqPL'24](https://popl24.sigplan.org/details/CoqPL-2024-papers/8/A-formal-security-analysis-of-Blockchain-voting)
* [Specifying Smart Contract with Hax and ConCert](https://github.com/hacspec/hacspec.github.io/blob/master/coqpl24-paper9-13.pdf) at [CoqPL'24](https://popl24.sigplan.org/details/CoqPL-2024-papers/9/Specifying-Smart-Contract-with-Hax-and-ConCert)


================================================
FILE: docs/stylesheets/hax_playground.css
================================================
.md-hax-playground::after {
    display: none;
}

textarea.code.inline+div.CodeMirror div.CodeMirror-lines {
    padding: 0px !important;
    /* added !important as padding is an inline stlye */
}

.cm-editor {
    outline: none !important;
}

.md-hax-playground~.md-code__content {
    padding: 14px 7px 14px 7px !important;
}

pre.md-hax-playground-pre {
    margin: 0 !important;
}

.center {
    display: block;
    margin: 0 auto;
}

/* Center Markdown Tables (requires md_in_html extension) */
.center-table {
    text-align: center;
}

.md-typeset .center-table :is(td, th):not([align]) {
    /* Reset alignment for table cells */
    text-align: initial;
}

.mermaid {
    text-align: center;
}


================================================
FILE: docs/stylesheets/logo.css
================================================
.md-header__button[data-md-component="logo"] {
    display: block !important;
}

label.md-header__button.md-icon[for="__drawer"] {
    order: -1;
}

label.md-nav__title[for="__drawer"] img {
    margin: auto;
}

label.md-nav__title[for="__drawer"] {
    font-size: 0.0001px !important;
    color: red !important;
    text-align: center;
    height: 4rem;
    overflow: none;
    /* color: transparent !important; */
}

.md-header__topic {
    display: none;
}

================================================
FILE: docs/stylesheets/tags-colors.css
================================================
/* This sets the color of the ADR status tags see https://github.com/squidfunk/mkdocs-material/discussions/5101 */

.md-typeset .md-tag--draft,
.md-typeset .md-tag--draft[href] {
    background-color: #5694ca;
    color: white;
}

.md-typeset .md-tag--accepted,
.md-typeset .md-tag--accepted[href] {
    background-color: #00703c;
    color: white;
}

.md-typeset .md-tag--deprecated,
.md-typeset .md-tag--deprecated[href] {
    background-color: #b1b4b6;
    color: white;
}

.md-typeset .md-tag--proposed,
.md-typeset .md-tag--proposed[href] {
    background-color: #003078;
    color: white;
}

.md-typeset .md-tag--rejected,
.md-typeset .md-tag--rejected[href] {
    background-color: #f47738;
    color: white;
}

.md-typeset .md-tag--superseded,
.md-typeset .md-tag--superseded[href] {
    background-color: #505a5f;
    color: white;
}


================================================
FILE: engine/.ocamlformat
================================================
profile = default
version = 0.27.0


================================================
FILE: engine/DEV.md
================================================
# Notes about developping on the engine

Note that Hax uses [JSON schemas](https://json-schema.org/): an OCaml
module `types.ml` definining the types we declared on the Rust side
with JSON parser and serializer is automatically generated. Thus, when
we refer here to a Rust type in the context of the engine, we mean its
OCaml automatically derived counterpart in `Types.ml`.

The engine is designed to behave like a "pure" function.
 1. It receives a JSON string in its stdin.
 2. Parses the JSON as a
 [`EngineOptions`](../cli/options/engine/src/lib.rs): note this
 structure has everything the engine needs to know. The behavior of
 the engine should be completely deterministic given this structure.
 3. From the engine options we received on stdin, we extract the
    `input` field, that contains all the items of the Rust crate we
    want to translate.
 4. Those items are translated in [our internal AST](lib/ast.ml) by
    the module [`Import_thir`](lib/import_thir.ml).
 5. According to the engine options we just got as JSON, we choose a backend.
 6. We run the `translate` function of that backend, that applies a
    certain number of rewrite phases, transporting the items in a
    type-safe manner from an AST to another.
 7. The backend produces a list of
    [`File`](../cli/options/engine/src/lib.rs)s. Each phase might also
    (as a side effect) have produced diagnostics messages. Those are
    collected in [`Diagnostics.Core.state`](lib/diagnostics.ml).
 8. Gathering files and diagnostics, we make a
    [`Output`](../cli/options/engine/src/lib.rs) value, serialize it
    to JSON, and output that on stdout.  
    *Note that the engine doesn't write or read anything on the
    hard-drive, it is supposed to be entirely side-ffect free (when
    not in debug mode): files are created by the [Rust
    driver](../cli/driver/src/exporter.rs)*

## Miscellaneous
### How to show types of `Types.ml`?
`dune build` produces that file using `utils/ocaml_of_json_schema`,
and stores it in `build/default/lib/types.ml`.

To show the file nicely formated, use: `dune describe pp lib/types.ml` (or `dune describe pp lib/types.ml | bat -l ml`, if you have [`bat`](https://github.com/sharkdp/bat))

You can also use `dune utop` and then `#show_type Hax_engine.Types.SOME_TYPE` and `#show_constructor Hax_engine.Types.SOME_CONSTRUCTOR`.

### Visitors
The module `Ast_visitors` provides a `Make` functor that takes a feature set and outputs a module that defines the `map`, `mapreduce` and `reduce` classes.

Those visitors are generated by `./utils/generate_visitors`. 
To see the implementation of the `Ast_visitors` module, run `dune describe pp lib/ast_visitors.ml`.

### Core_models extraction

Core_models extraction requires some special treatment, in particular changing the imports in the generated code. The information that the crate being extracted is core_models can be passed to the engine by setting HAX_CORE_MODELS_EXTRACTION_MODE to 'on'.

## Debugging the phases
You can enable a debug mode that prints a Rustish AST at each phase,
that you can browse interactively along with the actual AST.

Just add the flag `--debug-engine i` (or `-d i`, `i` stands for
**i**nteractive) to the `into` subcommand.  At the end of the
translation, `cargo hax` will spawn a webserver that lets you browse
the debug information. Note, you can change to port by setting the
environment variable `HAX_DEBUGGER_PORT`.


================================================
FILE: engine/backends/coq/coq/coq_backend.ml
================================================
open Hax_engine
open Utils
open Base
open Coq_ast

include
  Backend.Make
    (struct
      open Features
      include Off
      include On.Slice
      include On.Monadic_binding
      include On.Macro
      include On.Construct_base
    end)
    (struct
      let backend = Diagnostics.Backend.Coq
    end)

module SubtypeToInputLanguage
    (FA :
      Features.T
        with type mutable_reference = Features.Off.mutable_reference
         and type continue = Features.Off.continue
         and type break = Features.Off.break
         and type mutable_reference = Features.Off.mutable_reference
         and type mutable_pointer = Features.Off.mutable_pointer
         and type mutable_variable = Features.Off.mutable_variable
         and type reference = Features.Off.reference
         and type raw_pointer = Features.Off.raw_pointer
         and type early_exit = Features.Off.early_exit
         and type question_mark = Features.Off.question_mark
         and type as_pattern = Features.Off.as_pattern
         and type lifetime = Features.Off.lifetime
         and type monadic_action = Features.Off.monadic_action
         and type arbitrary_lhs = Features.Off.arbitrary_lhs
         and type nontrivial_lhs = Features.Off.nontrivial_lhs
         and type block = Features.Off.block
         and type quote = Features.Off.quote
         and type dyn = Features.Off.dyn
         and type match_guard = Features.Off.match_guard
         and type trait_item_default = Features.Off.trait_item_default
         and type unsafe = Features.Off.unsafe
         and type loop = Features.Off.loop
         and type for_loop = Features.Off.for_loop
         and type while_loop = Features.Off.while_loop
         and type for_index_loop = Features.Off.for_index_loop
         and type state_passing_loop = Features.Off.state_passing_loop
         and type fold_like_loop = Features.Off.fold_like_loop) =
struct
  module FB = InputLanguage

  include
    Subtype.Make (FA) (FB)
      (struct
        module A = FA
        module B = FB
        include Features.SUBTYPE.Id
        include Features.SUBTYPE.On.Monadic_binding
        include Features.SUBTYPE.On.Construct_base
        include Features.SUBTYPE.On.Slice
        include Features.SUBTYPE.On.Macro
      end)

  let metadata = Phase_utils.Metadata.make (Reject (NotInBackendLang backend))
end

module CoqNamePolicy = struct
  include Concrete_ident.DefaultNamePolicy

  (** List of all words that have a special meaning in the target language, and
      that should thus be escaped. *)
  let reserved_words : string Hash_set.t =
    Hash_set.of_list
      (module String)
      [
        "Definition";
        "Inductive";
        "match";
        "if";
        "then";
        "else";
        "as";
        "into";
        "end";
        "Record";
        "Arguments";
        "Type";
      ]
  (* TODO: Make complete *)

  (** Transformation applied to indexes fields name (i.e. [x.1]) *)
  let anonymous_field_transform x = x

  let named_field_prefix = Some `TypeName
  let struct_constructor_prefix = Some "Build_t_"
  let enum_constructor_prefix = Some "AABBCC"
  let union_constructor_prefix = Some "DDEEFF"
  let prefix__constructors_with_type = true
  let prefix_struct_constructors_with_type = true
  let prefix_enum_constructors_with_type = true
  let prefix_union_constructors_with_type = true
  let prefix_associated_item_with_trait_name = true
end

module AST = Ast.Make (InputLanguage)
module BackendOptions = Backend.UnitBackendOptions
open Ast
module U = Ast_utils.Make (InputLanguage)
module RenderId = Concrete_ident.MakeRenderAPI (CoqNamePolicy)
open AST

let hardcoded_coq_headers =
  "(* File automatically generated by Hacspec *)\n\
   From Coq Require Import ZArith.\n\
   Require Import List.\n\
   Import List.ListNotations.\n\
   Open Scope Z_scope.\n\
   Open Scope bool_scope.\n\
   Require Import Ascii.\n\
   Require Import String.\n\
   Require Import Coq.Floats.Floats.\n\
   From RecordUpdate Require Import RecordSet.\n\
   Import RecordSetNotations.\n\
   From Core Require Import Core.\n\n"

module BasePrinter = Generic_printer.Make (InputLanguage)

module Make
    (Default : sig
      val default : string -> string
    end)
    (Attrs : Attrs.WITH_ITEMS) =
struct
  open PPrint

  let default_string_for s = "TODO: please implement the method `" ^ s ^ "`"
  let default_document_for = default_string_for >> string

  let concat_with ?(pre = empty) ?(post = empty) l =
    concat_map (fun x -> pre ^^ x ^^ post) l

  let concat_map_with ?(pre = empty) ?(post = empty) f l =
    concat_map (fun x -> pre ^^ f x ^^ post) l

  let concat_spaced_doc l = concat_map_with ~pre:space (fun x -> x#p) l

  module CoqNotation = struct
    let definition_struct keyword n name generics params typ body =
      keyword ^^ space ^^ name ^^ generics
      ^^ concat_with ~pre:space params
      ^^ space ^^ colon ^^ space ^^ typ ^^ space ^^ string ":="
      ^^ nest n (break 1 ^^ body)
      ^^ dot

    let proof_struct keyword name generics params statement =
      keyword ^^ space ^^ name ^^ generics
      ^^ concat_with ~pre:space params
      ^^ space ^^ colon
      ^^ nest 2 (break 1 ^^ statement ^^ dot)
      ^^ break 1 ^^ string "Proof" ^^ dot ^^ space ^^ string "Admitted" ^^ dot

    let definition = definition_struct (string "Definition") 2
    let fixpoint = definition_struct (string "Fixpoint") 2
    let inductive = definition_struct (string "Inductive") 0
    let record = definition_struct (string "Record") 2
    let instance = definition_struct (string "Instance") 2
    let class_ = definition_struct (string "Class") 2
    let lemma = proof_struct (string "Lemma")
    let comment v = !^"(*" ^^ space ^^ v ^^ space ^^ !^"*)"

    let arguments name (explicivity : bool list) =
      if List.is_empty explicivity then empty
      else
        !^"Arguments" ^^ space ^^ name
        ^^ concat_map_with ~pre:space
             (function true -> string "(_)" | false -> string "{_}")
             explicivity
        ^^ dot

    let notation pattern value =
      !^"Notation" ^^ space ^^ string "\"" ^^ pattern ^^ string "\"" ^^ space
      ^^ !^":=" ^^ space ^^ value ^^ dot

    let notation_name name value =
      notation (string "'" ^^ name ^^ string "'") value
  end

  type ('get_span_data, 'a) object_type =
    ('get_span_data, 'a) BasePrinter.Gen.object_type

  class printer =
    object (self)
      inherit BasePrinter.base

      method private primitive_to_string (id : primitive_ident) : document =
        match id with
        | Deref -> default_document_for "(TODO: Deref)"
        | Cast -> string "cast"
        | LogicalOp op -> (
            match op with And -> string "andb" | Or -> string "orb")

      method arm ~arm ~span:_ = arm#p

      method arm' ~super:_ ~arm_pat ~body ~guard:_ =
        arm_pat#p ^^ space ^^ string "=>" ^^ nest 2 (break 1 ^^ body#p)

      method attrs x1 = default_document_for "attrs"

      method binding_mode_ByRef _x1 _x2 =
        default_document_for "binding_mode_ByRef"

      method binding_mode_ByValue = default_document_for "binding_mode_ByValue"
      method borrow_kind_Mut _x1 = default_document_for "borrow_kind_Mut"
      method borrow_kind_Shared = default_document_for "borrow_kind_Shared"
      method borrow_kind_Unique = default_document_for "borrow_kind_Unique"
      method common_array x1 = brackets (separate (semi ^^ space) x1)

      method dyn_trait_goal ~trait:_ ~non_self_args:_ =
        default_document_for "dyn_trait_goal"

      method error_expr x1 = parens (string x1 ^^ string "(* ERROR_EXPR *)")
      method error_item x1 = parens (string x1 ^^ string "(* ERROR_ITEM *)")
      method error_pat x1 = parens (string x1 ^^ string "(* ERROR_PAT *)")
      method expr ~e ~span:_ ~typ = e#p

      method expr'_AddressOf ~super:_ ~mut:_ ~e:_ ~witness =
        match witness with _ -> .

      method expr'_App_application ~super:_ ~f ~args ~generics:_ =
        f#p ^^ concat_map_with ~pre:space (fun x -> parens x#p) args

      method expr'_App_constant ~super:_ ~constant ~generics:_ = constant#p

      method expr'_App_field_projection ~super:_ ~field ~e =
        field#p ^^ space ^^ e#p

      method expr'_App_tuple_projection ~super:_ ~size ~nth ~e =
        (* !^"constructor_App_tuple_projection_" *)
        (* ^^ *)
        (* (match e#v.e with *)
        (*  | Construct { constructor; is_record; is_struct; fields; base } -> *)
        (*    (match constructor with *)
        (*    | `Concrete cid -> !^((RenderId.render cid).name) *)
        (*    | _ -> !^"TODO") *)
        (*  | _ -> empty) ^^ *)
        let size =
          match e#v.e with
          | Construct { constructor; is_record; is_struct; fields; base } ->
              List.length fields
          | _ -> size (* TODO: Size argument incorrect? *)
        in
        List.fold_right ~init:e#p
          ~f:(fun x y -> parens (x ^^ y))
          ((if Stdlib.(nth != 0) then [ string "snd" ] else [])
          @
          if size - 1 - nth > 0 then
            List.init (size - 1 - nth) ~f:(fun _ -> string "fst")
          else [])

      method expr'_Ascription ~super:_ ~e ~typ =
        e#p ^^ space ^^ colon ^^ space ^^ typ#p

      method expr'_Assign ~super:_ ~lhs:_ ~e:_ ~witness =
        match witness with _ -> .

      method expr'_Block ~super:_ ~e:_ ~safety_mode:_ ~witness =
        match witness with _ -> .

      method expr'_Borrow ~super:_ ~kind:_ ~e:_ ~witness =
        match witness with _ -> .

      method expr'_Break ~super:_ ~e:_ ~acc:_ ~label:_ ~witness =
        match witness with _ -> .

      method expr'_Closure ~super:_ ~params ~body ~captures:_ =
        !^"fun" ^^ concat_spaced_doc params ^^ space ^^ !^"=>" ^^ space
        ^^ nest 2 (break 1 ^^ body#p)

      method expr'_Construct_inductive ~super:_ ~constructor ~is_record
          ~is_struct ~fields ~base =
        match (is_record, is_struct, base, fields) with
        | true, true, Some x, _ ->
            x#p
            ^^ concat_map_with ~pre:space
                 (fun x ->
                   string "<|" ^^ constructor#p ^^ (fst x)#p ^^ space ^^ !^":="
                   ^^ space ^^ (snd x)#p ^^ space ^^ string "|>")
                 fields
        | true, true, None, [] | false, true, _, [] | false, false, _, [] ->
            constructor#p
        | true, true, None, _ | false, true, _, _ | false, false, _, _ ->
            constructor#p ^^ space
            ^^ separate_map space (fun x -> parens (snd x)#p) fields
        | _ ->
            constructor#p ^^ space ^^ string "{|" ^^ space
            ^^ separate_map (semi ^^ space)
                 (fun (ident, exp) ->
                   constructor#p ^^ !^"_" ^^ ident#p ^^ space ^^ string ":="
                   ^^ space ^^ parens exp#p)
                 fields
            ^^ space ^^ string "|}"

      method expr'_Construct_tuple ~super:_ ~components =
        if List.length components == 0 then !^"tt"
        else parens (separate_map comma (fun x -> x#p) components)

      method expr'_Continue ~super:_ ~acc:_ ~label:_ ~witness =
        match witness with _ -> .

      method expr'_EffectAction ~super:_ ~action:_ ~argument:_ =
        default_document_for "expr'_EffectAction"

      method expr'_GlobalVar_concrete ~super:_ x2 =
        (* TODO: prefix here? *)
        (* !^"Build_" ^^ *)
        x2#p (* ^^ !^"_record" *)

      method expr'_GlobalVar_primitive ~super:_ x2 = self#primitive_to_string x2

      method expr'_If ~super:_ ~cond ~then_ ~else_ =
        string "if"
        ^^ nest 2 (break 1 ^^ cond#p)
        ^^ break 1 ^^ string "then"
        ^^ nest 2 (break 1 ^^ then_#p)
        ^^ break 1 ^^ string "else"
        ^^ nest 2
             (break 1 ^^ match else_ with Some x -> x#p | None -> string "tt")

      method expr'_Let ~super:_ ~monadic:_ ~lhs ~rhs ~body =
        string "let" ^^ space ^^ lhs#p ^^ space ^^ string ":=" ^^ space ^^ rhs#p
        ^^ space ^^ string "in" ^^ break 1 ^^ body#p

      method expr'_Literal ~super x2 =
        parens
          (x2#p ^^ space ^^ colon ^^ space
         ^^ (self#_do_not_override_lazy_of_ty AstPos_expr'_Literal_x0 super.typ)
              #p)

      method expr'_LocalVar ~super:_ x2 = x2#p

      method expr'_Loop ~super:_ ~body ~kind ~state ~control_flow ~label:_
          ~witness:_ =
        kind#p ^^ space
        ^^ brackets
             (Option.value ~default:(string "is_none")
                (Option.map ~f:(fun x -> x#p) control_flow))
        ^^ Option.value ~default:(string "default")
             (Option.map ~f:(fun x -> x#p) state)
        ^^ space ^^ string "of" ^^ space
        ^^ parens (nest 2 (break 1 ^^ body#p))

      method expr'_MacroInvokation ~super:_ ~macro:_ ~args:_ ~witness:_ =
        default_document_for "expr'_MacroInvokation"

      method expr'_Match ~super:_ ~scrutinee ~arms =
        string "match" ^^ space ^^ scrutinee#p ^^ space ^^ string "with"
        ^^ break 1
        ^^ concat_map_with
             ~pre:(string "|" ^^ space)
             ~post:(break 1)
             (fun x -> x#p)
             arms
        ^^ string "end"

      method expr'_QuestionMark ~super:_ ~e:_ ~return_typ:_ ~witness =
        match witness with _ -> .

      method expr'_Quote ~super:_ _x2 = default_document_for "expr'_Quote"
      method expr'_Return ~super:_ ~e:_ ~witness = match witness with _ -> .

      method cf_kind_BreakOrReturn =
        default_document_for "cf_kind_BreakOrReturn"

      method cf_kind_BreakOnly = default_document_for "cf_kind_BreakOnly"
      method field_pat ~field ~pat = pat#p

      method generic_constraint_GCLifetime _x1 _x2 =
        default_document_for "generic_constraint_GCLifetime"

      method generic_constraint_GCProjection x1 = string "`" ^^ braces x1#p
      method generic_constraint_GCType x1 = string "`" ^^ braces x1#p

      method generic_param ~ident ~span:_ ~attrs:_ ~kind =
        ident#p ^^ space ^^ colon ^^ space ^^ kind#p

      method generic_param_kind_GPConst ~typ = typ#p

      method generic_param_kind_GPLifetime ~witness =
        match witness with _ -> .

      method generic_param_kind_GPType = string "Type"
      method generic_value_GConst x1 = x1#p

      method generic_value_GLifetime ~lt:_ ~witness =
        match witness with _ -> .

      method generic_value_GType x1 = parens x1#p

      method generics ~params ~constraints =
        let params_document =
          concat_map_with ~pre:space (fun x -> string "`" ^^ braces x#p) params
        in
        let constraints_document = concat_spaced_doc constraints in
        params_document ^^ constraints_document

      method guard ~guard:_ ~span:_ = default_document_for "guard"

      method guard'_IfLet ~super:_ ~lhs:_ ~rhs:_ ~witness =
        match witness with _ -> .

      method impl_expr ~kind:_ ~goal = goal#p

      method impl_expr_kind_Builtin _x1 =
        default_document_for "impl_expr_kind_Builtin"

      method impl_expr_kind_Concrete _x1 =
        default_document_for "impl_expr_kind_Concrete"

      method impl_expr_kind_Dyn = default_document_for "impl_expr_kind_Dyn"

      method impl_expr_kind_ImplApp ~impl:_ ~args:_ =
        default_document_for "impl_expr_kind_ImplApp"

      method impl_expr_kind_LocalBound ~id:_ =
        default_document_for "impl_expr_kind_LocalBound"

      method impl_expr_kind_Parent ~impl:_ ~ident:_ =
        default_document_for "impl_expr_kind_Parent"

      method impl_expr_kind_Projection ~impl:_ ~item:_ ~ident:_ =
        default_document_for "impl_expr_kind_Projection"

      method impl_expr_kind_Self = default_document_for "impl_expr_kind_Self"
      method impl_ident ~goal ~name:_ = goal#p

      method impl_item ~ii_span:_ ~ii_generics:_ ~ii_v ~ii_ident ~ii_attrs:_ =
        ii_ident#p ^^ space ^^ string ":=" ^^ space ^^ ii_v#p ^^ semi

      method impl_item'_IIFn ~body ~params =
        if List.length params == 0 then body#p
        else
          string "fun" ^^ space ^^ concat_spaced_doc params ^^ string "=>"
          ^^ nest 2 (break 1 ^^ body#p)

      method impl_item'_IIType ~typ ~parent_bounds:_ = typ#p
      method item ~v ~span:_ ~ident:_ ~attrs:_ = v#p ^^ break 1

      method item'_Alias ~super:_ ~name ~item =
        CoqNotation.notation_name name#p (parens item#p)

      method item'_Fn ~super ~name ~generics ~body ~params ~safety:_ =
        (* TODO: Why is type not available here ? *)
        let is_rec =
          Set.mem
            (U.Reducers.collect_concrete_idents#visit_expr () body#v)
            name#v
        in
        let typ =
          (self#_do_not_override_lazy_of_ty AstPos_item'_Fn_body body#v.typ)#p
        in

        let params =
          List.map
            ~f:(fun x ->
              match x#v with
              | {
               pat =
                 {
                   p = PBinding { mut; mode; var; typ = _; subpat };
                   span : span;
                   typ = _;
                 };
               typ;
               typ_span;
               attrs;
              } ->
                  x#p
              | _ -> string "'" ^^ x#p)
            params
        in

        let get_expr_of kind f : document option =
          Attrs.associated_expr kind super.attrs
          |> Option.map ~f:(self#entrypoint_expr >> f)
        in
        let get_fn_of kind f : document option =
          Attrs.associated_fn kind super.attrs
          |> Option.map ~f:(fun (g, p, x) ->
                 f (g, List.hd_exn (List.rev p), self#entrypoint_expr x))
        in
        let requires =
          get_expr_of Requires (fun x ->
              x ^^ space ^^ string "=" ^^ space ^^ string "true")
        in
        let ensures =
          get_expr_of Ensures (fun x ->
              x ^^ space ^^ string "=" ^^ space ^^ string "true")
        in
        let ensures_fn =
          get_fn_of Ensures (fun (g, p, x) ->
              string "let" ^^ space ^^ self#entrypoint_pat p.pat ^^ space
              ^^ string ":=" ^^ space ^^ string "@" ^^ name#p ^^ space
              ^^ concat_map_with (fun x -> x) params
              ^^ Option.value ~default:empty
                   (Option.map
                      ~f:(fun r -> space ^^ string "H_requires")
                      requires)
              ^^ space ^^ string "in" ^^ break 1 ^^ x ^^ space ^^ string "="
              ^^ space ^^ string "true")
        in
        let is_lemma = Attrs.lemma super.attrs in
        if is_lemma then
          CoqNotation.lemma name#p generics#p params
            (Option.value ~default:empty requires
            ^^ space ^^ !^"->" ^^ break 1
            ^^ Option.value ~default:empty ensures)
        else if is_rec then
          CoqNotation.fixpoint name#p generics#p
            (params
            @ Option.value ~default:[]
                (Option.map ~f:(fun x -> [ string "`" ^^ braces x ]) requires))
            typ body#p
          ^^ Option.value ~default:empty
               (Option.map
                  ~f:(fun ensure ->
                    break 1
                    ^^ CoqNotation.lemma
                         (name#p ^^ string "_" ^^ string "ensures")
                         generics#p params
                         (Option.value ~default:empty
                            (Option.map
                               ~f:(fun r ->
                                 string "forall (H_requires : "
                                 ^^ r ^^ string ")," ^^ break 1)
                               requires)
                         ^^ ensure))
                  ensures_fn)
        else
          CoqNotation.definition name#p generics#p
            (params
            @ Option.value ~default:[]
                (Option.map ~f:(fun x -> [ string "`" ^^ braces x ]) requires))
            typ body#p
          ^^ Option.value ~default:empty
               (Option.map
                  ~f:(fun ensure ->
                    break 1
                    ^^ CoqNotation.lemma
                         (name#p ^^ string "_" ^^ string "ensures")
                         generics#p params
                         (Option.value ~default:empty
                            (Option.map
                               ~f:(fun r ->
                                 string "forall (H_requires : "
                                 ^^ r ^^ string ")," ^^ break 1)
                               requires)
                         ^^ ensure))
                  ensures_fn)

      method item'_HaxError ~super:_ _x2 = default_document_for "item'_HaxError"

      method item'_IMacroInvokation ~super:_ ~macro:_ ~argument:_ ~span:_
          ~witness:_ =
        default_document_for "item'_IMacroInvokation"

      method item'_Impl ~super ~generics ~self_ty ~of_trait ~items
          ~parent_bounds:_ ~safety:_ =
        let name, args = of_trait#v in
        if Attrs.is_erased super.attrs then empty
        else
          CoqNotation.instance
            (name#p ^^ string "_"
            ^^ string (Int.to_string ([%hash: item] super)))
            generics#p []
            (name#p ^^ concat_map_with ~pre:space (fun x -> parens x#p) args)
            (braces
               (nest 2
                  (concat_map_with
                     ~pre:
                       (break 1
                       ^^ string ("implaabbcc_" ^ (RenderId.render name#v).name)
                       ^^ !^"_")
                     (fun x -> x#p)
                     items)
               ^^ break 1))

      method item'_NotImplementedYet = string "(* NotImplementedYet *)"

      method item'_Quote ~super:_ ~quote:_ ~origin:_ =
        default_document_for "item'_Quote"

      method item'_Trait ~super:_ ~name ~generics ~items ~safety:_ =
        let _, params, constraints = generics#v in
        CoqNotation.class_ name#p
          (concat_map_with ~pre:space (fun x -> parens x#p) params
          ^^ concat_map_with ~pre:space (fun x -> x#p) constraints)
          [] !^"Type"
          (braces
             (nest 2 (concat_map_with ~pre:(break 1) (fun x -> x#p) items)
             ^^ break 1))
        ^^ break 1
        ^^ CoqNotation.arguments name#p
             (List.map ~f:(fun _ -> true) params
             @ List.map ~f:(fun _ -> false) constraints)

      method item'_TyAlias ~super:_ ~name ~generics:_ ~ty =
        CoqNotation.notation_name name#p ty#p

      method item'_Type_struct ~super:_ ~type_name:name ~constructor_name
          ~generics ~tuple_struct ~arguments =
        let arguments_explicity_with_ty =
          List.map ~f:(fun _ -> true) generics#v.params
          @ List.map ~f:(fun _ -> false) generics#v.constraints
        in
        let arguments_explicity_without_ty =
          List.map ~f:(fun _ -> false) generics#v.params
          @ List.map ~f:(fun _ -> false) generics#v.constraints
        in
        let base_name = (RenderId.render constructor_name#v).name ^ "_record" in
        let name_doc = constructor_name#p ^^ string "_record" in
        CoqNotation.record name_doc
          (concat_map_with ~pre:space
             (fun x -> parens (self#entrypoint_generic_param x))
             generics#v.params
          ^^ concat_map_with ~pre:space
               (fun x -> self#entrypoint_generic_constraint x)
               generics#v.constraints)
          [] (string "Type")
          (braces
             (nest 2
                (concat_map
                   (fun (ident, typ, attr) ->
                     break 1 ^^ constructor_name#p ^^ string "_" ^^ ident#p
                     ^^ space ^^ colon ^^ space ^^ typ#p ^^ semi)
                   arguments)
             ^^ break 1))
        ^^ break 1
        ^^ CoqNotation.arguments (!^"Build_" ^^ name_doc)
             arguments_explicity_without_ty (* arguments_explicity_with_ty *)
        ^^ concat_map_with ~pre:(break 1)
             (fun (ident, typ, attr) ->
               CoqNotation.arguments
                 (constructor_name#p ^^ !^"_" ^^ ident#p)
                 arguments_explicity_without_ty)
             arguments
        ^^ break 1 ^^ !^"#[export]" ^^ space
        ^^ (if List.is_empty arguments then empty
            else
              CoqNotation.instance
                (string "settable" ^^ string "_" ^^ name_doc)
                generics#p []
                (!^"Settable" ^^ space ^^ !^"_")
                (string "settable!" ^^ space
                ^^ parens
                     (!^"Build_" ^^ name_doc
                     ^^ concat_map_with ~pre:space
                          (fun (x : generic_param) ->
                            match x with
                            | { ident; _ } ->
                                let idx =
                                  (self#_do_not_override_lazy_of_local_ident
                                     AstPos_item'_Type_generics ident)
                                    #p
                                in
                                parens (idx ^^ space ^^ !^":=" ^^ space ^^ idx))
                          generics#v.params)
                ^^ space ^^ string "<"
                ^^ separate_map (semi ^^ space)
                     (fun (ident, typ, attr) ->
                       constructor_name#p ^^ !^"_" ^^ ident#p)
                     arguments
                ^^ string ">"))
        ^^
        if tuple_struct then
          break 1
          ^^ CoqNotation.notation_name (string base_name)
               (string "Build_" ^^ name_doc)
        else empty

      (* map_def_path_item_string (fun x -> x) x#v.name *)

      method item'_Type_enum ~super ~name ~generics ~variants =
        let arguments_explicity_without_ty =
          List.map ~f:(fun _ -> false) generics#v.params
          @ List.map ~f:(fun _ -> false) generics#v.constraints
        in

        concat_map_with ~post:(break 1)
          (fun x ->
            self#item'_Type_struct ~super
              ~constructor_name:
                (self#_do_not_override_lazy_of_concrete_ident
                   AstPos_variant__name x#v.name)
              ~type_name:name ~generics ~tuple_struct:false
              ~arguments:
                (List.map
                   ~f:(fun (ident, typ, attrs) ->
                     ( self#_do_not_override_lazy_of_concrete_ident
                         AstPos_variant__arguments ident,
                       self#_do_not_override_lazy_of_ty
                         AstPos_variant__arguments typ,
                       self#_do_not_override_lazy_of_attrs AstPos_variant__attrs
                         attrs ))
                   x#v.arguments))
          (List.filter ~f:(fun x -> x#v.is_record) variants)
        ^^ CoqNotation.inductive name#p
             (concat_map_with ~pre:space
                (fun x -> parens (self#entrypoint_generic_param x))
                generics#v.params
             ^^ concat_map_with ~pre:space
                  (fun x -> self#entrypoint_generic_constraint x)
                  generics#v.constraints)
             [] (string "Type")
             (separate_map (break 1)
                (fun x ->
                  string "|" ^^ space ^^ x#p
                  ^^
                  if x#v.is_record then
                    concat_map_with ~pre:space
                      (fun (x : generic_param) ->
                        (self#_do_not_override_lazy_of_local_ident
                           AstPos_item'_Type_generics x.ident)
                          #p)
                      generics#v.params
                    ^^ space ^^ !^"->" ^^ space ^^ !^"_"
                  else empty)
                variants)
        ^^ concat_map_with ~pre:(break 1)
             (fun v ->
               CoqNotation.arguments
                 (self#_do_not_override_lazy_of_concrete_ident
                    AstPos_variant__arguments v#v.name)
                   #p
                 arguments_explicity_without_ty)
             variants

      (* ^^ break 1 ^^ !^"Arguments" ^^ space ^^ name#p ^^ colon *)
      (* ^^ !^"clear implicits" ^^ dot ^^ break 1 ^^ !^"Arguments" ^^ space *)
      (* ^^ name#p *)
      (* ^^ concat_map (fun _ -> space ^^ !^"(_)") generics#v.params *)
      (* ^^ concat_map (fun _ -> space ^^ !^"{_}") generics#v.constraints *)
      (* ^^ dot *)

      method item'_Use ~super:_ ~path ~is_external ~rename:_ =
        if List.length path == 0 || is_external then empty
        else
          let crate =
            String.capitalize
              (Option.value ~default:"(TODO CRATE)"
                 (Option.bind ~f:List.hd current_namespace))
          in
          let concat_capitalize l =
            String.concat ~sep:"_" (List.map ~f:String.capitalize l)
          in
          let concat_capitalize_include l =
            concat_capitalize (List.drop_last_exn l)
            ^ " (t_" ^ List.last_exn l ^ ")"
          in
          let path_string =
            match path with
            | "crate" :: xs -> concat_capitalize_include (crate :: xs)
            | "super" :: xs ->
                concat_capitalize
                  (crate
                   :: List.drop_last_exn
                        (Option.value ~default:[]
                           (Option.bind ~f:List.tl current_namespace))
                  @ xs)
            | [ a ] -> a
            | xs -> concat_capitalize_include xs
          in
          if String.is_empty path_string then empty
          else
            string "From" ^^ space ^^ string crate ^^ space
            ^^ string "Require Import" ^^ space ^^ string path_string ^^ dot
            ^^ break 1 ^^ string "Export" ^^ space ^^ string path_string ^^ dot

      method item_quote_origin ~item_kind:_ ~item_ident:_ ~position:_ =
        default_document_for "item_quote_origin"

      method lhs_LhsArbitraryExpr ~e:_ ~witness = match witness with _ -> .

      method lhs_LhsArrayAccessor ~e:_ ~typ:_ ~index:_ ~witness =
        match witness with _ -> .

      method lhs_LhsFieldAccessor_field ~e:_ ~typ:_ ~field:_ ~witness =
        match witness with _ -> .

      method lhs_LhsFieldAccessor_tuple ~e:_ ~typ:_ ~nth:_ ~size:_ ~witness =
        match witness with _ -> .

      method lhs_LhsLocalVar ~var:_ ~typ:_ =
        default_document_for "lhs_LhsLocalVar"

      method lhs_LhsVecRef ~e:_ ~typ:_ ~witness:_ =
        default_document_for "lhs_LhsVecRef"

      method literal_Bool x1 = string (if x1 then "true" else "false")

      method literal_Char x1 =
        string "\"" ^^ string (Char.escaped x1) ^^ string "\"" ^^ string "%char"

      method literal_Float ~value ~negative ~kind:_ =
        (if negative then parens (!^"-" ^^ string value) else string value)
        ^^ string "%float"

      method literal_Int ~value ~negative ~kind =
        (if negative then !^"-" else empty) ^^ string value

      method literal_String x1 = string "\"" ^^ string x1 ^^ string "\"%string"

      method loop_kind_ForIndexLoop ~start:_ ~end_:_ ~var:_ ~var_typ:_ ~witness
          =
        default_document_for "loop_kind_ForIndexLoop"

      method loop_kind_ForLoop ~pat ~it ~witness =
        braces it#p ^^ space ^^ string "inP?" ^^ space ^^ brackets pat#p

      method loop_kind_UnconditionalLoop =
        default_document_for "loop_kind_UnconditionalLoop"

      method loop_kind_WhileLoop ~condition:_ ~witness:_ =
        default_document_for "loop_kind_WhileLoop"

      method loop_state ~init ~bpat ~witness:_ =
        parens (init#p ^^ space ^^ !^"state" ^^ space ^^ bpat#p)

      method modul x1 = separate_map (break 1) (fun x -> x#p) x1

      method param ~pat ~typ ~typ_span:_ ~attrs:_ =
        parens (pat#p ^^ space ^^ colon ^^ space ^^ typ#p)

      method pat ~p ~span:_ ~typ:_ = p#p

      method pat'_PAscription ~super:_ ~typ ~typ_span:_ ~pat =
        pat#p ^^ space ^^ colon ^^ space ^^ typ#p

      method pat'_PBinding ~super:_ ~mut:_ ~mode:_ ~var ~typ:_ ~subpat:_ = var#p
      method pat'_PConstant ~super:_ ~lit = lit#p

      method pat'_PConstruct_inductive ~super:_ ~constructor ~is_record
          ~is_struct ~fields =
        match (is_record, is_struct) with
        | true, true ->
            constructor#p ^^ space
            ^^ parens
                 (separate_map (comma ^^ space)
                    (fun field_pat -> (snd field_pat)#p)
                    fields)
        | _, true ->
            (* constructor#p ^^ *)
            string "{|"
            ^^ separate_map (semi ^^ space)
                 (fun (ident, exp) ->
                   constructor#p ^^ ident#p ^^ space ^^ string ":=" ^^ space
                   ^^ parens exp#p)
                 fields
            ^^ string "|}"
        | _, false ->
            constructor#p
            ^^ concat_map_with ~pre:space
                 (fun (ident, exp) -> parens exp#p)
                 fields

      method pat'_PConstruct_tuple ~super:_ ~components =
        (* TODO: Only add `'` if you are a top-level pattern *)
        (* string "'" ^^ *)
        parens (separate_map comma (fun x -> x#p) components)

      method pat'_PDeref ~super:_ ~subpat:_ ~witness:_ =
        default_document_for "pat'_PDeref"

      method pat'_PWild = string "_"
      method printer_name = "Coq printer"

      method projection_predicate ~impl:_ ~assoc_item ~typ =
        string "_" (* TODO: name of impl#p *) ^^ dot
        ^^ parens assoc_item#p ^^ space ^^ string "=" ^^ space ^^ typ#p

      method safety_kind_Safe = default_document_for "safety_kind_Safe"
      method safety_kind_Unsafe _x1 = default_document_for "safety_kind_Unsafe"

      method supported_monads_MException _x1 =
        default_document_for "supported_monads_MException"

      method supported_monads_MOption =
        default_document_for "supported_monads_MOption"

      method supported_monads_MResult _x1 =
        default_document_for "supported_monads_MResult"

      method trait_goal ~trait ~args =
        trait#p ^^ concat_map (fun x -> space ^^ x#p) args

      method trait_item ~ti_span:_ ~ti_generics ~ti_v ~ti_ident ~ti_attrs:_ =
        let _, params, constraints = ti_generics#v in
        let generic_params = concat_map (fun x -> space ^^ x#p) params in
        let filter_constraints = function
          | GCProjection { impl = { goal = { trait; _ }; _ }; _ } -> true
          | GCType
              {
                goal = { trait; args = [ GType (TAssociatedType { item; _ }) ] };
                _;
              } ->
              Concrete_ident.(item == ti_ident#v)
          | _ -> true
        in
        let generic_constraints_other =
          concat_map
            (fun x -> space ^^ self#entrypoint_generic_constraint x)
            (List.filter ~f:filter_constraints
               (List.map ~f:(fun x -> x#v) constraints))
        in
        let generic_constraints_self =
          concat_map
            (fun x ->
              break 1 ^^ string "_" ^^ space ^^ string "::" ^^ space
              ^^ self#entrypoint_generic_constraint x
              ^^ semi)
            (List.filter
               ~f:(fun x -> not (filter_constraints x))
               (List.map ~f:(fun x -> x#v) constraints))
        in
        ti_ident#p ^^ generic_params ^^ generic_constraints_other ^^ space
        ^^ (match ti_v#v with TIDefault _ -> string ":=" | _ -> colon)
        ^^ space ^^ ti_v#p ^^ semi ^^ generic_constraints_self

      method trait_item'_TIDefault ~params ~body ~witness:_ =
        (if List.is_empty params then empty
         else
           string "fun" ^^ space
           ^^ separate_map space (fun x -> x#p) params
           ^^ space ^^ string "=>")
        ^^ nest 2 (break 1 ^^ body#p)

      method trait_item'_TIFn x1 = x1#p
      method trait_item'_TIType x1 = string "Type"

      method ty_TApp_application ~typ ~generics =
        typ#p ^^ concat_map (fun x -> space ^^ parens x#p) generics

      method ty_TApp_tuple ~types =
        if List.length types == 0 then string "unit"
        else parens (separate_map star (fun x -> self#entrypoint_ty x) types)

      method ty_TArray ~typ ~length =
        string "t_Array" ^^ space ^^ parens typ#p ^^ space ^^ parens length#p

      method ty_TArrow x1 x2 =
        concat_map (fun x -> x#p ^^ space ^^ string "->" ^^ space) x1 ^^ x2#p

      method ty_TAssociatedType ~impl:_ ~item = item#p
      method ty_TBool = string "bool"
      method ty_TChar = string "ascii"
      method ty_TDyn ~witness:_ ~goals:_ = default_document_for "ty_TDyn"
      method ty_TFloat _x1 = string "float"

      method ty_TInt x1 =
        string "t_"
        ^^
        match x1 with
        | { size; signedness } -> (
            (match signedness with
            | Unsigned -> string "u"
            | Signed -> string "i")
            ^^
            match size with
            | S8 -> string "8"
            | S16 -> string "16"
            | S32 -> string "32"
            | S64 -> string "64"
            | S128 -> string "128"
            | SSize -> string "size")

      method ty_TOpaque x1 = x1#p
      method ty_TParam x1 = x1#p
      method ty_TRawPointer ~witness:_ = default_document_for "ty_TRawPointer"

      method ty_TRef ~witness:_ ~region:_ ~typ:_ ~mut:_ =
        default_document_for "ty_TRef"

      method ty_TSlice ~witness:_ ~ty = !^"t_Slice" ^^ space ^^ ty#p
      method ty_TStr = string "string"

      method item'_Enum_Variant ~name ~arguments ~is_record ~attrs:_ =
        if is_record then
          name#p ^^ space ^^ colon ^^ space ^^ name#p ^^ !^"_record" ^^ space
          (* concat_map *)
          (*   (fun (ident, typ, attr) -> *)
          (*     ident#p ^^ space ^^ colon ^^ space ^^ typ#p) *)
          (*   arguments *)
        else if List.length arguments == 0 then name#p
        else
          name#p ^^ space ^^ colon ^^ space
          ^^ separate_map
               (space ^^ string "->" ^^ space)
               (fun (ident, typ, attr) -> typ#p)
               arguments
          ^^ space ^^ string "->" ^^ space ^^ string "_"

      (* method quote (quote : quote) : document = empty *)
      method module_path_separator = "."

      method concrete_ident ~local:_ id : document =
        match id.name with
        | "not" -> !^"negb"
        | "eq" -> !^"PartialEq_f_eq"
        | "lt" -> !^"PartialOrd_f_lt"
        | "gt" -> !^"PartialOrd_f_gt"
        | "le" -> !^"PartialOrd_f_le"
        | "ge" -> !^"PartialOrd_f_ge"
        | "rem" -> !^"Rem_f_rem"
        | "add" -> !^"Add_f_add"
        | "sub" -> !^"Sub_f_sub"
        | "mul" -> !^"Mul_f_mul"
        | "div" -> !^"Div_f_div"
        | "index" -> !^"Index_f_index"
        | "f_to_string" -> CoqNotation.comment !^"f_to_string"
        | x -> !^x
    end

  let new_printer : BasePrinter.finalized_printer =
    BasePrinter.finalize (fun () -> (new printer :> BasePrinter.printer))
end

module type S = sig
  val new_printer : BasePrinter.finalized_printer
end

let make (module M : Attrs.WITH_ITEMS) =
  let open (
    Make
      (struct
        let default x = x
      end)
      (M) :
      S) in
  new_printer

let translate m _ ~bundles:_ (items : AST.item list) : Types.file list =
  let my_printer = make m in
  let grouped_items =
    U.group_items_by_namespace items
    |> Map.to_alist
    |> List.filter_map ~f:(fun (_, items) ->
           let* first_item = List.hd items in
           Some ((RenderId.render first_item.ident).path, items))
  in
  (grouped_items
  |> List.map ~f:(fun (ns, items) ->
         let mod_name =
           String.concat ~sep:"_"
             (List.map ~f:(map_first_letter String.uppercase) ns)
         in
         let sourcemap, contents =
           let annotated = my_printer#entrypoint_modul items in
           let open Generic_printer.AnnotatedString in
           let header = pure hardcoded_coq_headers in
           let annotated = concat header annotated in
           (to_sourcemap annotated, to_string annotated)
         in
         let sourcemap = Some sourcemap in
         let path = mod_name ^ ".v" in
         Types.{ path; contents; sourcemap }))
  @ [
      Types.
        {
          path = "_CoqProject";
          contents =
            "-R ./ " ^ "TODO" ^ "\n-arg -w\n-arg all\n\n"
            ^ String.concat ~sep:"\n"
                (List.rev
                   (grouped_items
                   |> List.map ~f:(fun (ns, items) ->
                          let mod_name =
                            String.concat ~sep:"_"
                              (List.map
                                 ~f:(map_first_letter String.uppercase)
                                 ns)
                          in
                          mod_name ^ ".v")));
          sourcemap = None;
        };
    ]

open Phase_utils

module TransformToInputLanguage =
  [%functor_application
  Phases.Reject.Unsafe(Features.Rust)
  |> Phases.Rewrite_local_self
  |> Phases.Reject.RawOrMutPointer
  |> Phases.And_mut_defsite
  |> Phases.Reconstruct_asserts
  |> Phases.Reconstruct_for_loops
  |> Phases.Direct_and_mut
  |> Phases.Reject.Arbitrary_lhs
  |> Phases.Drop_blocks
  |> Phases.Drop_match_guards
  |> Phases.Reject.Continue
  |> Phases.Drop_references
  |> Phases.Trivialize_assign_lhs
  |> Phases.Reconstruct_question_marks
  |> Side_effect_utils.Hoist
  |> Phases.Local_mutation
  |> Phases.Reject.Continue
  |> Phases.Cf_into_monads
  |> Phases.Reject.EarlyExit
  |> Phases.Drop_return_break_continue
  |> Phases.Functionalize_loops
  |> Phases.Reject.As_pattern
  |> Phases.Reject.Dyn
  |> Phases.Reject.Trait_item_default
  |> Phases.Reorder_fields
  |> Phases.Bundle_cycles
  |> Phases.Sort_items_namespace_wise
  |> SubtypeToInputLanguage
  |> Identity
  ]
  [@ocamlformat "disable"]

let apply_phases (_bo : BackendOptions.t) (items : Ast.Rust.item list) :
    AST.item list =
  TransformToInputLanguage.ditems items


================================================
FILE: engine/backends/coq/coq/coq_backend.mli
================================================
open Hax_engine.Backend
include T with module BackendOptions = UnitBackendOptions


================================================
FILE: engine/backends/coq/coq/dune
================================================
(library
 (name coq_backend)
 (package hax-engine)
 (libraries hax_engine base hacspeclib_macro_parser coq_ast)
 (preprocess
  (pps
   ppx_yojson_conv
   ppx_sexp_conv
   ppx_compare
   ppx_hash
   ppx_deriving.show
   ppx_deriving.eq
   ppx_inline
   ppx_functor_application
   ppx_matches)))

(env
 (_
  (flags
   (:standard -w -A))))


================================================
FILE: engine/backends/coq/coq_ast.ml
================================================
open Hax_engine
open Utils
open Base

module type Library = sig
  module Notation : sig
    val int_repr : string -> string -> string
    val type_str : string
    val bool_str : string
    val unit_str : string
  end
end

module Coq =
functor
  (Lib : Library)
  ->
  struct
    module AST = struct
      type int_size = U8 | U16 | U32 | U64 | U128 | USize
      type int_type = { size : int_size; signed : bool }

      type ty =
        | WildTy
        | Bool
        | Unit
        | TypeTy
        | Int of int_type
        | NameTy of string
        | RecordTy of string * (string * ty) list
        | Product of ty list
        | Coproduct of ty list
        | Arrow of ty * ty
        | ArrayTy of ty * string (* int *)
        | SliceTy of ty
        | AppTy of ty * ty list
        | NatMod of string * int * string
        | Forall of string list * string list * ty
        | Exists of string list * string list * ty

      type literal =
        | Const_string of string
        | Const_char of int
        | Const_int of string * int_type
        | Const_bool of bool

      type pat =
        | WildPat
        | UnitPat
        | Ident of string
        | Lit of literal
        | RecordPat of string * (string * pat) list
        | ConstructorPat of string * pat list
        | TuplePat of pat list
        | AscriptionPat of pat * ty
        | DisjunctivePat of pat list

      type monad_type = Option | Result of ty | Exception of ty

      type term =
        | UnitTerm
        | Let of let_args
        | If of term * term * term
        | Match of term * (pat * term) list
        | Const of literal
        | Literal of string
        | AppFormat of string list * notation_elements list
        | App of term * term list
        | Var of string
        | NameTerm of string
        | RecordConstructor of string * (string * term) list
        | RecordUpdate of string * term * (string * term) list
        | Type of ty
        | Lambda of pat list * term
        | Tuple of term list
        | Array of term list
        | TypedTerm of term * ty

      and notation_elements =
        | Newline of int
        | Typing of ty * bool * int
        | Variable of pat * int
        | Value of term * bool * int

      and let_args = {
        pattern : pat;
        mut : bool;
        value : term;
        body : term;
        value_typ : ty;
        monad_typ : monad_type option;
      }

      (* TODO: I don't get why you've got InductiveCase VS BaseCase. Why not an inductive case (i.e. a variant, right?) is a name + a list of types? *)
      type inductive_case = InductiveCase of string * ty | BaseCase of string

      type argument =
        | Implicit of pat * ty
        | Explicit of pat * ty
        | Typeclass of string option * ty

      (* name, arguments, body, type *)
      type definition_type = string * argument list * term * ty
      type record_field = Named of string * ty | Coercion of string * ty

      type instance_decl =
        | InlineDef of definition_type
        | LetDef of definition_type

      type instance_decls =
        | InstanceDecls of instance_decl list
        | TermDef of term

      type decl =
        | MultipleDecls of decl list
        | Unimplemented of string
        | Comment of string
        | Definition of definition_type
        | ProgramDefinition of definition_type
        | Lemma of string * argument list * term
        | Equations of definition_type
        | EquationsQuestionmark of definition_type
        | Notation of string * term * string option
        | Record of string * argument list * record_field list
        | Inductive of string * argument list * inductive_case list
        | Class of string * argument list * record_field list
        | Instance of
            string * argument list * ty * ty list * definition_type list
        | ProgramInstance of
            string * argument list * ty * ty list * instance_decls
        | Require of string option * string list * string option
        | ModuleType of string * argument list * record_field list
        | Module of string * string * argument list * record_field list
        | Parameter of string * ty (* definition_type minus 'term' *)
        | HintUnfold of string * ty option
    end

    let __TODO_pat__ s = AST.Ident (s ^ " todo(pat)")
    let __TODO_ty__ s : AST.ty = AST.NameTy (s ^ " todo(ty)")
    let __TODO_term__ s = AST.Const (AST.Const_string (s ^ " todo(term)"))
    let __TODO_item__ s = AST.Unimplemented (s ^ " todo(item)")

    let int_size_to_string (x : AST.int_size) : string =
      match x with
      | AST.U8 -> "8"
      | AST.U16 -> "16"
      | AST.U32 -> "32"
      | AST.U64 -> "64"
      | AST.U128 -> "128"
      | AST.USize -> "32"

    let rec ty_to_string (x : AST.ty) : string * bool =
      match x with
      | AST.WildTy -> ("_", false)
      | AST.Bool -> (Lib.Notation.bool_str, false)
      | AST.Coproduct [] -> ("⊥", false)
      | AST.Coproduct l ->
          let ty_str =
            String.concat
              ~sep:(" " ^ "∐" ^ " ")
              (List.map ~f:ty_to_string_without_paren l)
          in
          (ty_str, true)
      | AST.Product [] | AST.Unit ->
          (Lib.Notation.unit_str, false (* TODO: might need paren *))
      | AST.TypeTy -> (Lib.Notation.type_str, false (* TODO: might need paren *))
      | AST.Int { size = AST.USize; _ } -> ("uint_size", false)
      | AST.Int { size; _ } -> ("int" ^ int_size_to_string size, false)
      | AST.NameTy s -> (s, false)
      | AST.RecordTy (name, _fields) ->
          (* [ AST.Record (name, fields) ] *) (name, false)
      | AST.Product l ->
          let ty_str =
            String.concat
              ~sep:(" " ^ "×" ^ " ")
              (List.map ~f:ty_to_string_without_paren l)
          in
          (ty_str, true)
      | AST.Arrow (a, b) ->
          let a_ty_str = ty_to_string_without_paren a in
          let b_ty_str = ty_to_string_without_paren b in
          (a_ty_str ^ " " ^ "->" ^ " " ^ b_ty_str, true)
      | AST.ArrayTy (t, l) ->
          let ty_str = ty_to_string_with_paren t in
          ( "nseq" ^ " " ^ ty_str ^ " "
            ^
            (* Int.to_string *)
            l,
            true )
      | AST.SliceTy t ->
          let ty_str = ty_to_string_with_paren t in
          ("seq" ^ " " ^ ty_str, true)
      | AST.AppTy (i, []) -> ty_to_string i
      (* | AST.AppTy (i, [ y ]) -> *)
      (*     let ty_defs, ty_str = ty_to_string y in *)
      (*     (ty_defs, i ^ " " ^ ty_str) *)
      | AST.AppTy (i, p) ->
          let ty_str =
            String.concat ~sep:" " (List.map ~f:ty_to_string_with_paren p)
          in
          (ty_to_string_without_paren i ^ " " ^ ty_str, true)
      | AST.NatMod (_t, _i, s) ->
          (* [ *)
          (*   AST.Notation *)
          (*     ( t, *)
          (*       AST.ArrayTy *)
          (*         (AST.Int { size = U8; signed = false }, Int.to_string i) ); *)
          (* ] *)
          ("nat_mod 0x" ^ s, true)
      | AST.Forall ([], [], ty) -> ty_to_string ty
      | AST.Forall (implicit_vars, [], ty) ->
          ( "forall" ^ " " ^ "{"
            ^ String.concat ~sep:" " implicit_vars
            ^ "}" ^ "," ^ " "
            ^ ty_to_string_without_paren ty,
            true )
      | AST.Forall ([], vars, ty) ->
          ( "forall" ^ " "
            ^ String.concat ~sep:" " vars
            ^ "," ^ " "
            ^ ty_to_string_without_paren ty,
            true )
      | AST.Forall (implicit_vars, vars, ty) ->
          ( "forall" ^ " " ^ "{"
            ^ String.concat ~sep:" " implicit_vars
            ^ "}" ^ "," ^ " "
            ^ String.concat ~sep:" " vars
            ^ "," ^ " "
            ^ ty_to_string_without_paren ty,
            true )
      | AST.Exists ([], [], ty) -> ty_to_string ty
      | AST.Exists (implicit_vars, [], ty) ->
          ( "exists" ^ " " ^ "{"
            ^ String.concat ~sep:" " implicit_vars
            ^ "}" ^ "," ^ " "
            ^ ty_to_string_without_paren ty,
            true )
      | AST.Exists ([], vars, ty) ->
          ( "exists" ^ " "
            ^ String.concat ~sep:" " vars
            ^ "," ^ " "
            ^ ty_to_string_without_paren ty,
            true )
      | AST.Exists (implicit_vars, vars, ty) ->
          ( "exists" ^ " " ^ "{"
            ^ String.concat ~sep:" " implicit_vars
            ^ "}" ^ "," ^ " "
            ^ String.concat ~sep:" " vars
            ^ "," ^ " "
            ^ ty_to_string_without_paren ty,
            true )
      | _ -> .

    and ty_to_string_with_paren (x : AST.ty) : string =
      let s, b = ty_to_string x in
      if b then "(" ^ s ^ ")" else s

    and ty_to_string_without_paren (x : AST.ty) : string =
      let s, _ = ty_to_string x in
      s

    let literal_to_string (x : AST.literal) : string =
      match x with
      | Const_string s -> s
      | Const_char c -> Int.to_string c (* TODO *)
      | Const_int (i, { size; _ }) ->
          Lib.Notation.int_repr (int_size_to_string size) i
      (* *)
      | Const_bool b -> Bool.to_string b

    let rec pat_to_string (x : AST.pat) (is_top_expr : bool) depth : string =
      match x with
      | AST.WildPat -> "_"
      | AST.UnitPat -> tick_if is_top_expr ^ "tt"
      | AST.Ident s -> s
      | AST.Lit l -> literal_to_string l
      | AST.ConstructorPat (name, args) ->
          name ^ " " ^ String.concat ~sep:" "
          @@ List.map ~f:(fun pat -> pat_to_string pat false depth) args
      | AST.RecordPat (_name, []) -> "(* Empty Record *)" (* TODO *)
      | AST.RecordPat (_name, args) ->
          (* name ^ " " ^ *)
          "{|"
          ^ String.concat ~sep:";"
              (List.map
                 ~f:(fun (name, pat) ->
                   newline_indent (depth + 1)
                   ^ name ^ " " ^ ":=" ^ " "
                   ^ pat_to_string pat true (depth + 1))
                 args)
          ^ newline_indent depth ^ "|}"
      | AST.TuplePat [] -> "_" (* TODO: empty tuple pattern? *)
      | AST.TuplePat [ v ] -> pat_to_string v is_top_expr (depth + 1)
      | AST.TuplePat vals ->
          tick_if is_top_expr ^ "("
          ^ String.concat ~sep:","
              (List.map ~f:(fun t -> pat_to_string t false (depth + 1)) vals)
          ^ ")"
      | AST.AscriptionPat (p, ty) ->
          "(" ^ pat_to_string p true depth ^ " " ^ ":" ^ " "
          ^ ty_to_string_without_paren ty
          ^ ")" (* TODO: Should this be true of false? *)
      | AST.DisjunctivePat pats ->
          let f pat = pat_to_string pat true depth in
          String.concat ~sep:" | " @@ List.map ~f pats

    and tick_if is_top_expr = if is_top_expr then "'" else ""

    let rec term_to_string (x : AST.term) depth : string * bool =
      match x with
      | AST.UnitTerm -> ("tt", false)
      | AST.Let { pattern = pat; value = bind; value_typ = typ; body = term; _ }
        ->
          (* TODO: propegate type definition *)
          let var_str = pat_to_string pat true depth in
          let expr_str = term_to_string_without_paren bind (depth + 1) in
          let typ_str = ty_to_string_without_paren typ in
          let body_str = term_to_string_without_paren term depth in
          ( "let" ^ " " ^ var_str ^ " " ^ ":=" ^ " " ^ expr_str ^ " " ^ ":"
            ^ " " ^ typ_str ^ " " ^ "in" ^ newline_indent depth ^ body_str,
            true )
      | AST.If (cond, then_, else_) ->
          ( "if" ^ " "
            ^ term_to_string_without_paren cond (depth + 1)
            ^ newline_indent depth ^ "then" ^ " "
            ^ term_to_string_without_paren then_ (depth + 1)
            ^ newline_indent depth ^ "else" ^ " "
            ^ term_to_string_without_paren else_ (depth + 1),
            true )
      | AST.Match (match_val, arms) ->
          ( "match" ^ " "
            ^ term_to_string_without_paren match_val (depth + 1)
            ^ " " ^ "with" ^ newline_indent depth
            ^ String.concat ~sep:(newline_indent depth)
                (List.map
                   ~f:(fun (pat, body) ->
                     "|" ^ " "
                     ^ pat_to_string pat true depth
                     ^ " " ^ "=>"
                     ^ newline_indent (depth + 1)
                     ^ term_to_string_without_paren body (depth + 1))
                   arms)
            ^ newline_indent depth ^ "end",
            false )
      | AST.Const c -> (literal_to_string c, false)
      | AST.Literal s -> (s, false)
      | AST.AppFormat (format, args) ->
          ( format_to_string format
              (List.map
                 ~f:(function
                   | AST.Newline n -> newline_indent (depth + n)
                   | AST.Typing (typ, true, _n) -> ty_to_string_with_paren typ
                   | AST.Typing (typ, false, _n) ->
                       ty_to_string_without_paren typ
                   | AST.Value (x, true, n) ->
                       term_to_string_with_paren x (depth + n)
                   | AST.Value (x, false, n) ->
                       term_to_string_without_paren x (depth + n)
                   | AST.Variable (p, n) -> pat_to_string p true (depth + n))
                 args),
            true (* TODO? Notation does not always need paren *) )
      | AST.App (f, args) ->
          let f_s, f_b = term_to_string f depth in
          (f_s ^ term_list_to_string args depth, f_b || List.length args > 0)
      | AST.Var s -> (s, false)
      | AST.NameTerm s -> (s, false)
      | AST.RecordConstructor (f, args) ->
          ( "Build_" ^ f
            ^ (if List.length args > 0 then " " else "")
            ^ String.concat ~sep:" "
                (List.map
                   ~f:(fun (n, t) ->
                     "(" ^ n ^ " " ^ ":=" ^ " "
                     ^ term_to_string_without_paren t depth
                     ^ ")")
                   args),
            true )
      | AST.RecordUpdate (f, base, args) ->
          ( "Build_" ^ f ^ "["
            ^ term_to_string_without_paren base depth
            ^ "]"
            ^ (if List.length args > 0 then " " else "")
            ^ String.concat ~sep:" "
                (List.map
                   ~f:(fun (n, t) ->
                     "(" ^ n ^ " " ^ ":=" ^ " "
                     ^ term_to_string_without_paren t depth
                     ^ ")")
                   args),
            true )
      | AST.Type t ->
          let ty_str = ty_to_string_with_paren t in
          (* TODO: Make definitions? *)
          (ty_str, false (* TODO? does this always need paren? *))
      | AST.Lambda (params, body) ->
          ( String.concat ~sep:" "
              (List.map
                 ~f:(fun x ->
                   "fun" ^ " " ^ pat_to_string x true depth ^ " " ^ "=>")
                 params)
            ^ newline_indent (depth + 1)
            ^ term_to_string_without_paren body (depth + 1),
            true )
      | AST.Tuple [] -> ("tt (* Empty tuple *)", false) (* TODO: Empty tuple? *)
      | AST.Tuple vals ->
          ( "("
            ^ String.concat ~sep:","
                (List.map
                   ~f:(fun t -> term_to_string_without_paren t (depth + 1))
                   vals)
            ^ ")",
            false )
      | AST.Array (t :: ts) ->
          ( "array_from_list" ^ " " ^ "["
            ^ List.fold_left
                ~init:(term_to_string_without_paren t (depth + 1))
                ~f:(fun x y ->
                  x ^ ";"
                  ^ newline_indent (depth + 1)
                  ^ term_to_string_without_paren y (depth + 1))
                ts
            ^ "]",
            true )
      | AST.Array [] -> ("!TODO empty array!", false)
      | AST.TypedTerm (e, t) ->
          ( term_to_string_without_paren e depth
            ^ " " ^ ":" ^ " " ^ ty_to_string_with_paren t,
            true )
      | _ -> .

    and term_to_string_with_paren (x : AST.term) depth : string =
      let s, b = term_to_string x depth in
      if b then "(" ^ s ^ ")" else s

    and term_to_string_without_paren (x : AST.term) depth : string =
      let s, _ = term_to_string x depth in
      s

    and format_to_string (format : string list) (args : string list) : string =
      match format with
      | f :: fs -> (
          match args with x :: xs -> f ^ x ^ format_to_string fs xs | [] -> f)
      | [] -> failwith "incorrect formatting"

    and term_list_to_string (terms : AST.term list) depth : string =
      (if List.is_empty terms then "" else " ")
      ^ String.concat ~sep:" "
          (List.map ~f:(fun t -> term_to_string_with_paren t depth) terms)

    let rec decl_to_string (x : AST.decl) : string =
      match x with
      | AST.MultipleDecls decls ->
          String.concat ~sep:"\n" (List.map ~f:decl_to_string decls)
      | AST.Unimplemented s -> "(*" ^ s ^ "*)"
      | AST.Comment s -> "(**" ^ " " ^ s ^ " " ^ "**)"
      | AST.Definition (name, arguments, term, ty) ->
          "Definition" ^ " "
          ^ definition_value_to_string (name, arguments, term, ty)
      | AST.ProgramDefinition (name, arguments, term, ty) ->
          "Program" ^ " " ^ "Definition" ^ " "
          ^ definition_value_to_string (name, arguments, term, ty)
          ^ fail_next_obligation
      | AST.Lemma (name, arguments, term) ->
          "Lemma" ^ " " ^ name ^ " "
          ^ params_to_string_typed arguments
          ^ " " ^ ":" ^ " "
          ^ term_to_string_without_paren term 1
          ^ "."
      | AST.Equations (name, arguments, term, ty) ->
          "Equations" ^ " "
          ^ definition_value_to_equation_definition (name, arguments, term, ty)
      | AST.EquationsQuestionmark (name, arguments, term, ty) ->
          "Equations?" ^ " "
          ^ definition_value_to_equation_definition (name, arguments, term, ty)
      | AST.Notation (notation, value, extra) ->
          "Notation" ^ " " ^ "\"" ^ notation ^ "\"" ^ " " ^ ":=" ^ " "
          ^ term_to_string_with_paren value 0
          ^ (match extra with None -> "" | Some x -> " " ^ "(" ^ x ^ ")")
          ^ "."
      | AST.Record (name, arguments, variants) ->
          let variants_str =
            variants_to_string variants (newline_indent 1) ";"
          in
          "Record" ^ " " ^ name
          ^ params_to_string_typed arguments
          ^ " " ^ ":" ^ " " ^ "Type" ^ " " ^ ":=" ^ " " ^ "{" ^ variants_str
          ^ newline_indent 0 ^ "}."
      | AST.Inductive (name, arguments, variants) ->
          let name_arguments = name ^ params_to_string_typed arguments in
          let variants_str =
            String.concat ~sep:(newline_indent 0)
              (List.map
                 ~f:(fun x ->
                   let mid_str =
                     match x with
                     | AST.BaseCase ty_name -> ty_name ^ " : "
                     | AST.InductiveCase (ty_name, ty) ->
                         let ty_str = ty_to_string_with_paren ty in
                         ty_name ^ " " ^ ":" ^ " " ^ ty_str ^ " " ^ "->" ^ " "
                   in
                   ("|" ^ " ") ^ mid_str ^ name_arguments)
                 variants)
          in
          let args_str =
            if List.is_empty arguments then ""
            else
              inductive_case_args_to_string variants
                (newline_indent 0 ^ "Arguments" ^ " ")
                (List.fold_left ~init:"" ~f:(fun a _ -> a ^ " {_}") arguments)
                "."
          in
          "Inductive" ^ " " ^ name_arguments ^ " " ^ ":" ^ " " ^ "Type" ^ " "
          ^ ":=" ^ newline_indent 0 ^ variants_str ^ "." ^ args_str
      | AST.Class (name, arguments, trait_items) ->
          let field_str =
            List.fold_left ~init:""
              ~f:(fun x y ->
                let field_name, sep, field_ty =
                  match y with
                  | Named (field_name, field_ty) -> (field_name, ":", field_ty)
                  | Coercion (field_name, field_ty) ->
                      (field_name, ":>", field_ty)
                  (* Should be "::" in newer versions of coq *)
                in
                let ty_str = ty_to_string_with_paren field_ty in
                x ^ newline_indent 1 ^ field_name ^ " " ^ sep ^ " " ^ ty_str
                ^ " " ^ ";")
              trait_items
          in
          "Class" ^ " " ^ name ^ " " ^ "(Self : "
          ^ ty_to_string_with_paren AST.TypeTy
          ^ ")"
          ^ params_to_string_typed arguments
          ^ " " ^ ":=" ^ " " ^ "{" ^ field_str ^ newline_indent 0 ^ "}" ^ "."
      | AST.ModuleType (name, arguments, trait_items) ->
          let field_str =
            List.fold_left ~init:""
              ~f:(fun x y ->
                x ^ newline_indent 1
                ^
                match y with
                | Named (field_name, field_ty) ->
                    decl_to_string (AST.Parameter (field_name, field_ty))
                | Coercion (field_name, field_ty) ->
                    decl_to_string
                      (AST.Module
                         (field_name, ty_to_string_with_paren field_ty, [], []))
                (* Should be "::" in newer versions of coq *))
              trait_items
          in
          let arguments_str = params_to_string_typed arguments in
          "Module Type" ^ " " ^ name ^ arguments_str ^ "." ^ newline_indent 1
          ^ field_str ^ newline_indent 0 ^ "End" ^ " " ^ name ^ "."
      | AST.Parameter (name, typ) ->
          String.concat ~sep:" " [ name; ":"; ty_to_string_without_paren typ ]
      | AST.Module (name, typ, arguments, _trait_items) ->
          let arguments_str = params_to_string_typed arguments in
          "Module" ^ " " ^ name ^ arguments_str ^ " " ^ ":" ^ " " ^ typ ^ "."
          ^ " " ^ "End" ^ " " ^ name ^ "."
      | AST.Instance (name, arguments, self_ty, ty_list, impl_list) ->
          let ty_list_str =
            String.concat ~sep:" " (List.map ~f:ty_to_string_with_paren ty_list)
          in
          let impl_str =
            List.fold_left ~init:""
              ~f:(fun x (name, arguments, term, _ty) ->
                x ^ newline_indent 1 ^ name
                ^ params_to_string_typed arguments
                ^ " " ^ ":=" ^ " "
                ^ term_to_string_without_paren term 1
                ^ ";")
              impl_list
          in
          let ty_str = ty_to_string_without_paren self_ty in
          "#[global] Instance" ^ " " ^ ty_str ^ "_" ^ name
          ^ params_to_string_typed arguments
          ^ " " ^ ":" ^ " " ^ name ^ " " ^ ty_list_str ^ " " ^ ":=" ^ " " ^ "{"
          ^ impl_str ^ newline_indent 0 ^ "}" ^ "."
      | AST.ProgramInstance
          (name, arguments, self_ty, ty_list, InstanceDecls impl_list) ->
          let ty_list_str =
            String.concat ~sep:" " (List.map ~f:ty_to_string_with_paren ty_list)
          in
          let impl_str, impl_str_empty =
            let fl =
              List.filter_map
                ~f:(function
                  | LetDef (name, arguments, term, ty) ->
                      Some
                        ("let" ^ " " ^ name ^ " " ^ ":=" ^ " "
                        ^ (if List.is_empty arguments then ""
                           else
                             "fun" ^ " "
                             ^ params_to_string_typed arguments
                             ^ " " ^ "=>" ^ " ")
                        ^ term_to_string_without_paren term 1
                        ^ " " ^ ":" ^ " "
                        ^ ty_to_string_without_paren ty
                        ^ " " ^ "in")
                  | _ -> None)
                impl_list
            in
            (String.concat ~sep:(newline_indent 1) fl, List.is_empty fl)
          in
          let arg_str =
            String.concat
              ~sep:(";" ^ newline_indent 1)
              (List.map
                 ~f:(function
                   | LetDef (name, _arguments, _term, _ty) ->
                       name ^ " " ^ ":=" ^ " " ^ "(" ^ "@" ^ name ^ ")"
                   | InlineDef (name, arguments, term, ty) ->
                       name ^ " " ^ ":=" ^ " " ^ "("
                       ^ (if List.is_empty arguments then ""
                          else
                            "fun" ^ " "
                            ^ params_to_string_typed arguments
                            ^ " " ^ "=>" ^ " ")
                       ^ term_to_string_without_paren term 1
                       ^ " " ^ ":" ^ " "
                       ^ ty_to_string_without_paren ty
                       ^ ")")
                 impl_list)
          in
          let ty_str = ty_to_string_without_paren self_ty in
          "#[global] Program Instance" ^ " " ^ ty_str ^ "_" ^ name
          ^ params_to_string_typed arguments
          ^ " " ^ ":" ^ " " ^ name ^ " " ^ ty_list_str ^ " " ^ ":="
          ^ newline_indent 1 ^ impl_str
          ^ (if impl_str_empty then "" else newline_indent 1)
          ^ (match impl_list with
            | [] -> "_"
            | _ -> "{|" (* ^ name ^ " " ^ ty_list_str *) ^ " " ^ arg_str ^ "|}")
          ^ "." ^ fail_next_obligation
      | AST.ProgramInstance (name, arguments, self_ty, ty_list, TermDef term) ->
          let ty_list_str =
            String.concat ~sep:" " (List.map ~f:ty_to_string_with_paren ty_list)
          in
          let ty_str = ty_to_string_without_paren self_ty in
          "#[global] Program Instance" ^ " " ^ ty_str ^ "_" ^ name
          ^ params_to_string_typed arguments
          ^ " " ^ ":" ^ " " ^ name ^ " " ^ ty_list_str ^ " " ^ ":="
          ^ newline_indent 1
          ^ term_to_string_without_paren term 1
          ^ "." ^ fail_next_obligation
      | AST.Require (_, [], _rename) -> ""
      | AST.Require (None, import :: imports, rename) ->
          (* map_first_letter String.uppercase import *)
          let import_name =
            match rename with
            | Some s -> s
            | _ ->
                List.fold_left
                  ~init:(map_first_letter String.uppercase import)
                  ~f:(fun x y -> x ^ "_" ^ map_first_letter String.uppercase y)
                  imports
          in
          "Require Import" ^ " " ^ import_name ^ "." ^ newline_indent 0
          ^ "Export" ^ " " ^ import_name ^ "."
      | AST.Require (Some x, imports, rename) ->
          "From" ^ " " ^ x ^ " "
          ^ decl_to_string (AST.Require (None, imports, rename))
      | AST.HintUnfold (n, Some typ) ->
          let ty_str = ty_to_string_without_paren typ in
          "Hint Unfold" ^ " " ^ ty_str ^ "_" ^ n ^ "."
      | AST.HintUnfold (n, None) -> "Hint Unfold" ^ " " ^ n ^ "."

    and definition_value_to_equation_definition
        ((name, arguments, term, ty) : AST.definition_type) =
      let ty_str = ty_to_string_without_paren ty in
      definition_value_to_shell_string
        (name, arguments, term, ty)
        (name ^ " "
        ^ params_to_string
            (List.filter_map
               ~f:(fun x ->
                 match x with Explicit (y, z) -> Some (y, z) | _ -> None)
               arguments)
        ^ " " ^ ":=" ^ newline_indent 2
        ^ term_to_string_without_paren term 2
        ^ " " ^ ":" ^ " " ^ ty_str)
      ^ fail_next_obligation

    and definition_value_to_shell_string
        ((name, arguments, _, ty) : AST.definition_type) (body : string) :
        string =
      let ty_str = ty_to_string_without_paren ty in
      name
      ^ params_to_string_typed arguments
      ^ " " ^ ":" ^ " " ^ ty_str ^ " " ^ ":=" ^ newline_indent 1 ^ body ^ "."

    and definition_value_to_string
        ((name, arguments, term, ty) : AST.definition_type) : string =
      definition_value_to_shell_string
        (name, arguments, term, ty)
        (term_to_string_without_paren term 1)

    and fail_next_obligation : string =
      newline_indent 0 ^ "Fail" ^ " " ^ "Next" ^ " " ^ "Obligation."

    and params_to_string_typed params : string =
      if List.is_empty params then ""
      else
        " "
        ^ String.concat ~sep:" "
            (List.map
               ~f:(fun param ->
                 match param with
                 | Implicit (pat, ty) ->
                     "{" ^ pat_to_string pat true 0 ^ " " ^ ":" ^ " "
                     ^ ty_to_string_without_paren ty
                     ^ "}"
                 | Explicit (pat, ty) ->
                     "(" ^ pat_to_string pat true 0 ^ " " ^ ":" ^ " "
                     ^ ty_to_string_without_paren ty
                     ^ ")"
                 | Typeclass (None, ty) ->
                     "`{" ^ " " ^ ty_to_string_without_paren ty ^ "}"
                 | Typeclass (Some name, ty) ->
                     "`{" ^ name ^ " " ^ ":" ^ " "
                     ^ ty_to_string_without_paren ty
                     ^ "}")
               params)

    and params_to_string params : string =
      String.concat ~sep:""
        (List.map
           ~f:(fun (pat, _ty) ->
             (* let ty_str = ty_to_string_without_paren ty in *)
             pat_to_string pat true 0 ^ " ")
           params)

    (* and inductive_case_to_string variants pre post : string = *)
    (*   match variants with *)
    (*   | x :: xs -> *)
    (*       let mid_str = *)
    (*         match x with *)
    (*         | AST.BaseCase ty_name -> ty_name *)
    (*         | AST.InductiveCase (ty_name, ty) -> *)
    (*             let ty_str = ty_to_string ty in *)
    (*             ty_name ^ " " ^ ":" ^ " " ^ ty_str ^ " " ^ "->" ^ " " *)
    (*       in *)
    (*       let variants_str = inductive_case_to_string xs pre post in *)
    (*       pre ^ mid_str ^ post ^ variants_str *)
    (*   | [] -> "" *)

    and inductive_case_args_to_string variants pre mid post : string =
      String.concat ~sep:""
        (List.map
           ~f:(fun x ->
             let mid_str, ty_str =
               match x with
               | AST.BaseCase ty_name -> (ty_name, "")
               | AST.InductiveCase (ty_name, ty) ->
                   (ty_name, " " ^ ty_to_string_with_paren ty)
             in
             pre ^ mid_str ^ mid ^ ty_str ^ post)
           variants)

    and variants_to_string variants pre post : string =
      String.concat ~sep:""
        (List.map
           ~f:(fun y ->
             let ty_name, _sep, ty =
               match y with
               | Named (ty_name, ty) -> (ty_name, ":", ty)
               | Coercion (ty_name, ty) -> (ty_name, ":>", ty)
               (* Should be "::" in newer versions of coq *)
             in
             pre ^ ty_name ^ " " ^ ":" ^ " "
             ^ ty_to_string_without_paren ty
             ^ post)
           variants)
  end


================================================
FILE: engine/backends/coq/dune
================================================
(library
 (name coq_ast)
 (package hax-engine)
 (libraries hax_engine base hacspeclib_macro_parser)
 (preprocess
  (pps
   ppx_yojson_conv
   ppx_sexp_conv
   ppx_compare
   ppx_hash
   ppx_deriving.show
   ppx_deriving.eq
   ppx_inline
   ppx_functor_application
   ppx_matches)))

; (env
;  (_
;   (flags
;    (:standard -warn-error -A -warn-error +8))))

(env
 (_
  (flags
   (:standard -w +A-4-40-42-44))))


================================================
FILE: engine/backends/coq/ssprove/dune
================================================
(library
 (name ssprove_backend)
 (package hax-engine)
 (libraries hax_engine base hacspeclib_macro_parser coq_ast)
 (preprocess
  (pps
   ppx_yojson_conv
   ppx_sexp_conv
   ppx_compare
   ppx_hash
   ppx_deriving.show
   ppx_deriving.eq
   ppx_inline
   ppx_functor_application
   ppx_matches)))

(env
 (_
  (flags
   (:standard -w -A))))


================================================
FILE: engine/backends/coq/ssprove/ssprove_backend.ml
================================================
open Hax_engine
open Utils
open Base
open Coq_ast

include
  Backend.Make
    (struct
      open Features
      include Off
      include On.Slice
      include On.Monadic_binding
      include On.Macro
      include On.Construct_base
      include On.Loop
      include On.For_loop
      include On.While_loop
      include On.For_index_loop
      include On.State_passing_loop
      include On.Fold_like_loop
    end)
    (struct
      let backend = Diagnostics.Backend.SSProve
    end)

module SubtypeToInputLanguage
    (FA :
      Features.T
        with type mutable_reference = Features.Off.mutable_reference
         and type continue = Features.Off.continue
         and type break = Features.Off.break
         and type mutable_pointer = Features.Off.mutable_pointer
         and type mutable_variable = Features.Off.mutable_variable
         and type reference = Features.Off.reference
         and type raw_pointer = Features.Off.raw_pointer
         and type early_exit = Features.Off.early_exit
         and type question_mark = Features.Off.question_mark
         and type as_pattern = Features.Off.as_pattern
         and type lifetime = Features.Off.lifetime
         and type monadic_action = Features.Off.monadic_action
         and type arbitrary_lhs = Features.Off.arbitrary_lhs
         and type nontrivial_lhs = Features.Off.nontrivial_lhs
         and type quote = Features.Off.quote
         and type block = Features.Off.block
         and type dyn = Features.Off.dyn
         and type match_guard = Features.Off.match_guard
         and type trait_item_default = Features.Off.trait_item_default
         and type unsafe = Features.Off.unsafe) =
struct
  module FB = InputLanguage

  include
    Subtype.Make (FA) (FB)
      (struct
        module A = FA
        module B = FB
        include Features.SUBTYPE.Id
        include Features.SUBTYPE.On.Monadic_binding
        include Features.SUBTYPE.On.Construct_base
        include Features.SUBTYPE.On.Slice
        include Features.SUBTYPE.On.Macro
        include Features.SUBTYPE.On.Loop
        include Features.SUBTYPE.On.For_loop
        include Features.SUBTYPE.On.While_loop
        include Features.SUBTYPE.On.For_index_loop
        include Features.SUBTYPE.On.State_passing_loop
        include Features.SUBTYPE.On.Fold_like_loop
      end)

  let metadata = Phase_utils.Metadata.make (Reject (NotInBackendLang backend))
end

module AST = Ast.Make (InputLanguage)
module BackendOptions = Backend.UnitBackendOptions
open Ast

module CoqNamePolicy = struct
  include Concrete_ident.DefaultNamePolicy

  let reserved_words = Hash_set.of_list (module String) [ "left"; "right" ]
  (* let temp = Hash_set.create (module String) in *)
  (* temp *)
  (* Hash_set.add temp "left" *)
end

module U = Ast_utils.Make (InputLanguage)
module RenderId = Concrete_ident.MakeRenderAPI (CoqNamePolicy)
open AST

module SSProveLibrary : Library = struct
  module Notation = struct
    let int_repr (_x : string) (i : string) : string = i
    let type_str : string = "choice_type"
    let bool_str : string = "'bool"
    let unit_str : string = "'unit"
  end
end

module SSP = Coq (SSProveLibrary)

module SSPExtraDefinitions (* : ANALYSIS *) = struct
  let wrap_type_in_both (a : SSP.AST.ty) =
    SSP.AST.AppTy (SSP.AST.NameTy "both", [ a ])

  let unit_term : SSP.AST.term =
    SSP.AST.TypedTerm (SSP.AST.UnitTerm, SSP.AST.Unit)

  let rec variables_of_ssp_pat (p : SSP.AST.pat) : string list =
    match p with
    | RecordPat (_, npl) -> List.concat_map ~f:(snd >> variables_of_ssp_pat) npl
    | ConstructorPat (_, pl) -> List.concat_map ~f:variables_of_ssp_pat pl
    | TuplePat pl -> List.concat_map ~f:variables_of_ssp_pat pl
    | AscriptionPat (p, _) -> variables_of_ssp_pat p
    | Ident x -> [ x ]
    | DisjunctivePat pl -> List.concat_map ~f:variables_of_ssp_pat pl
    | WildPat | UnitPat | Lit _ -> []

  let letb
      ({ pattern; mut; value; body; value_typ; monad_typ } : SSP.AST.let_args) :
      SSP.AST.term =
    match monad_typ with
    | Some (Exception _typ) ->
        SSP.AST.AppFormat
          ( [
              "letm[choice_typeMonad.result_bind_code ";
              (*typ*)
              "] ";
              (*p*)
              " := ";
              (*expr*)
              " in";
              "";
              (*body*)
              "";
            ],
            [
              SSP.AST.Typing (value_typ, true, 0);
              SSP.AST.Variable (pattern, 0);
              SSP.AST.Value (value, false, 0);
              SSP.AST.Newline 0;
              SSP.AST.Value (body, false, 0);
            ] )
    | Some (Result _typ) ->
        SSP.AST.AppFormat
          ( [
              "letm[choice_typeMonad.result_bind_code ";
              (*typ*)
              "] ";
              (*p*)
              " := ";
              (*expr*)
              " in";
              "";
              (*body*)
              "";
            ],
            [
              SSP.AST.Typing (value_typ, true, 0);
              SSP.AST.Variable (pattern, 0);
              SSP.AST.Value (value, false, 0);
              SSP.AST.Newline 0;
              SSP.AST.Value (body, false, 0);
            ] )
    | Some Option ->
        SSP.AST.AppFormat
          ( [
              "letm[choice_typeMonad.option_bind_code] ";
              (*p*)
              " := ";
              (*expr*)
              " in";
              "";
              (*body*)
              "";
            ],
            [
              SSP.AST.Variable (pattern, 0);
              SSP.AST.Value (value, false, 0);
              SSP.AST.Newline 0;
              SSP.AST.Value (body, false, 0);
            ] )
    | None ->
        if mut then
          SSP.AST.AppFormat
            ( [
                "letb ";
                (*p*)
                " loc(" (*p_loc*);
                ") := ";
                (*expr*)
                " in";
                "";
                (*body*)
                "";
              ],
              [
                SSP.AST.Variable (pattern, 0);
                SSP.AST.Variable
                  ( (match
                       List.map
                         ~f:(fun x -> SSP.AST.Ident (x ^ "_loc"))
                         (variables_of_ssp_pat pattern)
                     with
                    | [] -> SSP.AST.WildPat
                    | [ x ] -> x
                    | xs -> SSP.AST.TuplePat xs),
                    0 );
                SSP.AST.Value (value, false, 0);
                SSP.AST.Newline 0;
                SSP.AST.Value (body, false, 0);
              ] )
        else
          SSP.AST.AppFormat
            ( [ "letb "; (*p*) " := "; (*expr*) " in"; ""; (*body*) "" ],
              [
                SSP.AST.Variable (pattern, 0);
                SSP.AST.Value (value, false, 0);
                SSP.AST.Newline 0;
                SSP.AST.Value (body, false, 0);
              ] )

  let rec pat_as_expr (p : SSP.AST.pat) : (SSP.AST.pat * SSP.AST.term) option =
    match p with
    | WildPat | UnitPat -> None
    | Ident s -> Some (SSP.AST.Ident s, SSP.AST.Var s)
    | Lit l -> Some (SSP.AST.Lit l, Const l)
    | RecordPat (s, sps) ->
        let v =
          List.filter_map
            ~f:(fun (s, ps) ->
              Option.map ~f:(fun (p, t) -> ((s, p), (s, t))) (pat_as_expr ps))
            sps
        in
        Some
          ( SSP.AST.RecordPat (s, List.map ~f:fst v),
            SSP.AST.RecordConstructor (s, List.map ~f:snd v) )
    | ConstructorPat (_, ps) | TuplePat ps ->
        let pt_list = List.filter_map ~f:pat_as_expr ps in
        Some
          ( TuplePat (List.map ~f:fst pt_list),
            SSP.AST.Tuple (List.map ~f:snd pt_list) )
    | AscriptionPat (p, _) -> pat_as_expr p (* TypedTerm (, t) *)
    | DisjunctivePat ps ->
        let pt_list = List.filter_map ~f:pat_as_expr ps in
        Some
          ( TuplePat (List.map ~f:fst pt_list),
            SSP.AST.Tuple (List.map ~f:snd pt_list) )

  let ifb ((cond, then_, else_) : SSP.AST.term * SSP.AST.term * SSP.AST.term) :
      SSP.AST.term =
    SSP.AST.AppFormat
      ( [ "ifb "; (*expr*) ""; "then "; ""; "else "; "" ],
        [
          SSP.AST.Value (cond, false, 0);
          SSP.AST.Newline 0;
          SSP.AST.Value (then_, false, 0);
          SSP.AST.Newline 0;
          SSP.AST.Value (else_, false, 0);
        ] )

  let matchb ((expr, arms) : SSP.AST.term * (SSP.AST.pat * SSP.AST.term) list) :
      SSP.AST.term =
    SSP.AST.AppFormat
      ( [ "matchb "; (*expr*) " with" ]
        @ List.concat_map ~f:(fun _ -> [ "| "; " =>"; ""; "" ]) arms
        @ [ "end" ],
        [ SSP.AST.Value (expr, false, 0); SSP.AST.Newline 0 ]
        @ List.concat_map
            ~f:(fun (arm_pat, body) ->
              [
                SSP.AST.Variable (arm_pat, 0);
                SSP.AST.Newline 1;
                SSP.AST.Value (body, false, 1);
                SSP.AST.Newline 0;
              ])
            arms )

  let updatable_record
      ((name, arguments, variants) :
        string * SSP.AST.argument list * SSP.AST.record_field list) :
      SSP.AST.decl =
    let fields =
      List.concat_map
        ~f:(function
          | SSP.AST.Named (x, y) -> [ (x, y) ] | SSP.AST.Coercion _ -> [])
        variants
    in
    let ty_name =
      "("
      ^ String.concat ~sep:" "
          (name
          :: List.filter_map
               ~f:(fun x ->
                 match x with
                 | SSP.AST.Explicit (p, _t) ->
                     Some (SSP.pat_to_string p false 0)
                 | _ -> None)
               arguments)
      ^ ")"
    in
    SSP.AST.MultipleDecls
      ([
         SSP.AST.Definition
           ( name,
             arguments,
             SSP.AST.Type (SSP.AST.Product (List.map ~f:snd fields)),
             SSP.AST.TypeTy );
       ]
      @ List.mapi
          ~f:(fun i (x, y) ->
            SSP.AST.Equations
              ( x,
                List.map
                  ~f:(function
                    | SSP.AST.Explicit (a, b) -> SSP.AST.Implicit (a, b)
                    | v -> v)
                  arguments
                @ [
                    SSP.AST.Explicit
                      ( SSP.AST.Ident "s",
                        wrap_type_in_both (SSP.AST.NameTy name) );
                  ],
                SSP.AST.App
                  ( SSP.AST.Var "bind_both",
                    [
                      SSP.AST.Var "s";
                      SSP.AST.Lambda
                        ( [ SSP.AST.Ident "x" ],
                          (* SSP.AST.App *)
                          (*   ( SSP.AST.Var "solve_lift", *)
                          (* [ *)
                          SSP.AST.App
                            ( SSP.AST.Var "ret_both",
                              [
                                SSP.AST.TypedTerm
                                  ( List.fold_right ~init:(SSP.AST.Var "x")
                                      ~f:(fun x y ->
                                        SSP.AST.App (SSP.AST.Var x, [ y ]))
                                      ((if Stdlib.(i != 0) then [ "snd" ]
                                        else [])
                                      @ List.init
                                          (List.length fields - 1 - i)
                                          ~f:(fun _ -> "fst")),
                                    y );
                              ] )
                          (* ] ) *) );
                    ] ),
                wrap_type_in_both y ))
          fields
      @ [
          SSP.AST.Equations
            ( "Build_" ^ name,
              List.map
                ~f:(function
                  | SSP.AST.Explicit (a, b) -> SSP.AST.Implicit (a, b) | v -> v)
                arguments
              @ List.mapi
                  ~f:(fun i (x, y) ->
                    SSP.AST.Implicit (SSP.AST.Ident x, wrap_type_in_both y))
                  fields,
              List.fold_left
                ~init:
                  ((* SSP.AST.App *)
                   (*   ( SSP.AST.Var "solve_lift", *)
                   (* [ *)
                     SSP.AST.App
                     ( SSP.AST.Var "ret_both",
                       [
                         SSP.AST.TypedTerm
                           ( SSP.AST.Tuple
                               (List.map
                                  ~f:(fst >> fun x -> SSP.AST.Var x)
                                  fields),
                             SSP.AST.NameTy ty_name );
                       ] )
                     (* ] ) *))
                ~f:(fun z (x, _y) ->
                  SSP.AST.App
                    ( SSP.AST.Var "bind_both",
                      [ SSP.AST.Var x; SSP.AST.Lambda ([ SSP.AST.Ident x ], z) ]
                    ))
                fields,
              SSP.AST.NameTy ("both" ^ " " ^ ty_name) )
          (* :: SSP.AST.Arguments ("Build_" ^ pconcrete_ident name,) *);
        ]
      (* @ [SSP.AST.ProgramInstance *)
      (*      (\* (name, arguments, self_ty, ty_list, impl_list) *\) *)
      (*      ( "Settable", *)
      (*        [], *)
      (*        SSP.AST.NameTy name, *)
      (*        [wrap_type_in_both (SSP.AST.NameTy name)], *)
      (*        SSP.AST.InstanceDecls [SSP.AST.LetDef ("mkT", [], *)
      (*          SSP.AST.App (SSP.AST.Var "fun x => ", [ *)
      (*          List.fold_left *)
      (*           ~init: *)
      (*             ((\* SSP.AST.App *\) *)
      (*              (\*   ( SSP.AST.Var "solve_lift", *\) *)
      (*                  (\* [ *\) *)
      (*                    SSP.AST.App *)
      (*                      ( SSP.AST.Var "ret_both", *)
      (*                        [ *)
      (*                          SSP.AST.TypedTerm *)
      (*                            ( SSP.AST.Tuple *)
      (*                                (List.map *)
      (*                                   ~f:(fst >> fun x -> SSP.AST.Var x) *)
      (*                                   fields), *)
      (*                              SSP.AST.NameTy ty_name ); *)
      (*                        ] ); *)
      (*                    (\* ] ) *\)) *)
      (*           ~f:(fun z (x, _y) -> *)
      (*             SSP.AST.App *)
      (*               ( SSP.AST.Var "bind_both", *)
      (*                 [ SSP.AST.App (SSP.AST.Var x , [ (SSP.AST.Var "x") ] ); SSP.AST.Lambda ([ SSP.AST.Ident x ], z) ] *)
      (*               )) *)
      (*           fields]), *)
      (*          SSP.AST.WildTy)] *)
      (*      ) *)
      (*   ] *)
      @ List.mapi
          ~f:(fun i (x, _y) ->
            SSP.AST.Notation
              ( "'Build_" ^ name ^ "'" ^ " " ^ "'['" ^ " " ^ "x" ^ " " ^ "']'"
                ^ " " ^ "'('" ^ " " ^ "'" ^ x ^ "'" ^ " " ^ "':='" ^ " " ^ "y"
                ^ " " ^ "')'",
                SSP.AST.App
                  ( SSP.AST.Var ("Build_" ^ name),
                    List.mapi
                      ~f:(fun j (x, _y) ->
                        SSP.AST.AppFormat
                          ( [ x ^ " " ^ ":=" ^ " "; (*v*) "" ],
                            [
                              SSP.AST.Value
                                ( (if Stdlib.(j == i) then SSP.AST.Var "y"
                                   else
                                     SSP.AST.App
                                       (SSP.AST.Var x, [ SSP.AST.Var "x" ])),
                                  false,
                                  0 );
                            ] ))
                      fields ),
                None ))
          fields)

  let both_enum
      ((name, arguments, cases) :
        string * SSP.AST.argument list * SSP.AST.inductive_case list) :
      SSP.AST.decl =
    SSP.AST.MultipleDecls
      ((* Type definition *)
       SSP.AST.Definition
         ( (* "t_" ^ *) name,
           arguments,
           SSP.AST.Type
             (SSP.AST.Coproduct
                (List.map
                   ~f:(function
                     | BaseCase _ -> SSP.AST.Unit
                     | InductiveCase (_, typ) -> typ)
                   cases))
           (* (SSP.AST.NameTy ("chFin (mkpos " ^ number_of_cases ^ ")")) *),
           SSP.AST.TypeTy )
      :: (* Index names and constructors *)
         List.concat_mapi cases ~f:(fun i c ->
             let v_name, curr_typ =
               match c with
               | BaseCase v_name -> (v_name, [])
               | InductiveCase (v_name, typ) -> (v_name, [ typ ])
             in
             let injections inner_val =
               List.fold_left ~init:inner_val
                 ~f:(fun y x -> SSP.AST.App (SSP.AST.Var x, [ y ]))
                 ((if Stdlib.(i != 0) then [ "inr" ] else [])
                 @ List.init (List.length cases - 1 - i) ~f:(fun _ -> "inl"))
             in
             let definition_body =
               let inject_argument inner_val =
                 (* SSP.AST.App *)
                 (*   ( SSP.AST.Var "solve_lift", *)
                 (* [ *)
                 SSP.AST.App
                   ( SSP.AST.Var "ret_both",
                     [
                       SSP.AST.TypedTerm
                         (injections inner_val, SSP.AST.NameTy name);
                     ] )
                 (* ] ) *)
               in
               match curr_typ with
               | [] -> inject_argument unit_term
               | _ ->
                   SSP.AST.App
                     ( SSP.AST.Var "bind_both",
                       [
                         SSP.AST.Var "x";
                         SSP.AST.Lambda
                           ( [ SSP.AST.Ident "x" ],
                             inject_argument (SSP.AST.Var "x") );
                       ] )
             in
             [
               (let arg, body =
                  match curr_typ with
                  | [] ->
                      ("", injections SSP.AST.UnitTerm)
                      (* TODO: Fix unit translation *)
                  | _ -> (" " ^ "x", injections (SSP.AST.Var "x"))
                in
                SSP.AST.Notation
                  ("'" ^ v_name ^ "_case" ^ "'" ^ arg, body, Some "at level 100"));
               SSP.AST.Equations
                 ( v_name,
                   List.map
                     ~f:(fun x ->
                       SSP.AST.Explicit (SSP.AST.Ident "x", wrap_type_in_both x))
                     curr_typ,
                   definition_body,
                   wrap_type_in_both (SSP.AST.NameTy name) );
             ]))
end

module StaticAnalysis (* : ANALYSIS *) = struct
  module FunctionDependency (* : ANALYSIS *) =
    [%functor_application
    Analyses.Function_dependency InputLanguage]

  module MutableVariables (* : ANALYSIS *) =
    [%functor_application
    Analyses.Mutable_variables InputLanguage]

  type analysis_data = { mut_var : MutableVariables.analysis_data }

  let analyse items =
    let func_dep = FunctionDependency.analyse items in
    let mut_var =
      MutableVariables.analyse (func_dep : MutableVariables.pre_data) items
    in
    { mut_var }
end

module Context = struct
  type t = {
    current_namespace : string list;
    analysis_data : StaticAnalysis.analysis_data;
  }
end

let primitive_to_string (id : Ast.primitive_ident) : string =
  match id with
  | Deref -> "(TODO: Deref)" (* failwith "Deref" *)
  | Cast -> "cast_int (WS2 := _)" (* failwith "Cast" *)
  | LogicalOp op -> ( match op with And -> "andb" | Or -> "orb")

open Phase_utils

module TransformToInputLanguage =
  [%functor_application
    Phases.Reject.Unsafe(Features.Rust)
    |> Phases.Rewrite_local_self
    |> Phases.Reject.RawOrMutPointer
    |> Phases.And_mut_defsite
    |> Phases.Reconstruct_asserts
    |> Phases.Reconstruct_for_loops
    |> Phases.Direct_and_mut
    |> Phases.Reject.Arbitrary_lhs
    |> Phases.Drop_blocks
    |> Phases.Drop_match_guards
    |> Phases.Reject.Continue
    |> Phases.Drop_references
    |> Phases.Trivialize_assign_lhs
    |> Phases.Reconstruct_question_marks
    |> Side_effect_utils.Hoist
    |> Phases.Local_mutation
    (* |> Phases.State_passing_loop *)
    |> Phases.Reject.Continue
    |> Phases.Cf_into_monads
    |> Phases.Reject.EarlyExit
    (* |> Phases.Functionalize_loops *)
    |> Phases.Reject.As_pattern
    |> Phases.Reject.Dyn
    |> Phases.Reject.Trait_item_default
    |> Phases.Bundle_cycles
    |> Phases.Sort_items_namespace_wise
    |> SubtypeToInputLanguage
    |> Identity
  ]
  [@ocamlformat "disable"]

(* let token_list (tokens : string) : string list list = *)
(*   List.map ~f:(split_str ~on:"=") (split_str ~on:"," tokens) *)

(* let get_argument (s : string) (token_list : string list list) = *)
(*   List.find_map *)
(*     ~f:(function *)
(*       | [ v; a ] when String.equal (String.strip v) s -> Some a | _ -> None) *)
(*     token_list *)

(* let strip (x : string) = *)
(*   String.strip *)
(*     ?drop:(Some (function '\"' -> true | _ -> false)) *)
(*     (String.strip x) *)

(* let strip_or_error (err : string) (s : string option) span = *)
(*   match s with *)
(*   | Some x -> strip x *)
(*   | None -> Error.unimplemented ~details:err span *)

let pconcrete_ident (id : Ast.concrete_ident) : string =
  (RenderId.render id).name

let plocal_ident (e : Local_ident.t) : string =
  RenderId.local_ident
    (match String.chop_prefix ~prefix:"impl " e.name with
    | Some name ->
        let name = "impl_" ^ Int.to_string ([%hash: string] name) in
        { e with name }
    | _ -> e)

module Make
    (Attrs : Attrs.WITH_ITEMS)
    (Ctx : sig
      val ctx : Context.t
    end) =
struct
  open Ctx

  let pglobal_ident (id : Ast.global_ident) : string =
    match id with
    | `Projector (`Concrete cid) | `Concrete cid -> pconcrete_ident cid
    | `Primitive p_id -> primitive_to_string p_id
    | `TupleType _i -> "TODO (global ident) tuple type"
    | `TupleCons _i -> "TODO (global ident) tuple cons"
    | `Projector (`TupleField (_i, _j)) | `TupleField (_i, _j) ->
        "TODO (global ident) tuple field"
    | _ -> .

  (* module TODOs_debug = struct *)
  (*   let __TODO_pat__ _ s = SSP.AST.Ident (s ^ " todo(pat)") *)
  (*   let __TODO_ty__ _ s : SSP.AST.ty = SSP.AST.NameTy (s ^ " todo(ty)") *)
  (*   let __TODO_item__ _ s = SSP.AST.Unimplemented (s ^ " todo(item)") *)

  (*   let __TODO_term__ _ s = *)
  (*     SSP.AST.Const (SSP.AST.Const_string (s ^ " todo(term)")) *)
  (* end *)

  module TODOs = struct
    let __TODO_ty__ span s : SSP.AST.ty =
      Error.unimplemented ~details:("[ty] node " ^ s) span

    let __TODO_pat__ span s =
      Error.unimplemented ~details:("[pat] node " ^ s) span

    let __TODO_term__ span s =
      Error.unimplemented ~details:("[expr] node " ^ s) span

    let __TODO_item__ _span s = SSP.AST.Unimplemented (s ^ " todo(item)")
  end

  open TODOs

  let pint_kind (k : Ast.int_kind) : SSP.AST.int_type =
    {
      size =
        (match k.size with
        | S8 -> U8
        | S16 -> U16
        | S32 -> U32
        | S64 -> U64
        | S128 -> U128
        | SSize -> USize);
      signed = Stdlib.(k.signedness == Signed);
    }

  let pliteral (e : Ast.literal) =
    match e with
    | String s -> SSP.AST.Const_string s
    | Char c -> SSP.AST.Const_char (Char.to_int c)
    | Int { value; kind; _ } -> SSP.AST.Const_int (value, pint_kind kind)
    | Float _ -> failwith "Float: todo"
    | Bool b -> SSP.AST.Const_bool b

  let operators =
    let c = Ast.Global_ident.of_name ~value:true in
    [
      (c Rust_primitives__hax__array_of_list, (3, [ ""; ".a["; "]<-"; "" ]));
      (c Core__ops__index__Index__index, (2, [ ""; ".a["; "]" ]));
      (c Core__ops__bit__BitXor__bitxor, (2, [ ""; " .^ "; "" ]));
      (c Core__ops__bit__BitAnd__bitand, (2, [ ""; " .& "; "" ]));
      (c Core__ops__bit__BitOr__bitor, (2, [ ""; " .| "; "" ]));
      (c Core__ops__arith__Add__add, (2, [ ""; " .+ "; "" ]));
      (c Core__ops__arith__Sub__sub, (2, [ ""; " .- "; "" ]));
      (c Core__ops__arith__Mul__mul, (2, [ ""; " .* "; "" ]));
      (c Core__ops__arith__Div__div, (2, [ ""; " ./ "; "" ]));
      (c Core__cmp__PartialEq__eq, (2, [ ""; " =.? "; "" ]));
      (c Core__cmp__PartialOrd__lt, (2, [ ""; " <.? "; "" ]));
      (c Core__cmp__PartialOrd__le, (2, [ ""; " <=.? "; "" ]));
      (c Core__cmp__PartialOrd__ge, (2, [ ""; " >=.? "; "" ]));
      (c Core__cmp__PartialOrd__gt, (2, [ ""; " >.? "; "" ]));
      (c Core__cmp__PartialEq__ne, (2, [ ""; " <> "; "" ]));
      (c Core__ops__arith__Rem__rem, (2, [ ""; " .% "; "" ]));
      (c Core__ops__bit__Shl__shl, (2, [ ""; " shift_left "; "" ]));
      (c Core__ops__bit__Shr__shr, (2, [ ""; " shift_right "; "" ]));
    ]
    |> Map.of_alist_exn (module Ast.Global_ident)

  module LocalIdentOrLisIis =
  StaticAnalysis.MutableVariables.LocalIdentOrData (struct
    type ty = string list * string list [@@deriving compare, sexp]
  end)

  let rec pty span (t : ty) : SSP.AST.ty =
    match t with
    | TBool -> SSP.AST.Bool
    | TChar -> __TODO_ty__ span "char"
    | TInt k -> SSP.AST.Int (pint_kind k)
    | TStr -> SSP.AST.NameTy "chString"
    | TApp { ident = `TupleType 0; args = []; _ } -> SSP.AST.Unit
    | TApp { ident = `TupleType 1; args = [ GType ty ]; _ } -> pty span ty
    | TApp { ident = `TupleType n; args; _ } when n >= 2 ->
        SSP.AST.Product (args_ty span args)
    | TApp { ident; args; _ } ->
        SSP.AST.AppTy (SSP.AST.NameTy (pglobal_ident ident), args_ty span args)
    | TArrow ([ TApp { ident = `TupleType 0; args = []; _ } ], output) ->
        pty span output
    | TArrow (inputs, output) ->
        List.fold_right ~init:(pty span output)
          ~f:(fun x y -> SSP.AST.Arrow (x, y))
          (List.map ~f:(pty span) inputs)
    | TFloat _ -> __TODO_ty__ span "pty: Float"
    | TArray { typ; length = { e = Literal (Int { value; _ }); _ }; _ } ->
        SSP.AST.ArrayTy (pty span typ, value)
    | TArray { typ; length } ->
        SSP.AST.ArrayTy
          ( pty span typ,
            "(" ^ "is_pure" ^ " " ^ "("
            ^ SSP.term_to_string_with_paren
                (pexpr (Map.empty (module Local_ident)) false length)
                0
            ^ ")" ^ ")" )
        (* TODO: check int.to_string is correct! *)
    | TSlice { ty; _ } -> SSP.AST.SliceTy (pty span ty)
    | TParam i -> SSP.AST.NameTy (plocal_ident i)
    | TAssociatedType { item; _ } -> SSP.AST.NameTy (pconcrete_ident item)
    | TOpaque _ -> __TODO_ty__ span "pty: TAssociatedType/TOpaque"
    | _ -> .

  and args_ty span (args : generic_value list) : SSP.AST.ty list =
    List.map
      ~f:(function
        | GLifetime _ -> __TODO_ty__ span "lifetime"
        | GType typ -> pty span typ
        | GConst { typ; _ } ->
            SSPExtraDefinitions.wrap_type_in_both (pty span typ))
      args
  (* match args with *)
  (* | arg :: xs -> *)
  (*     (match arg with *)
  (*     | GLifetime { lt; witness } -> __TODO_ty__ span "lifetime" *)
  (*     | GType typ -> pty span typ *)
  (*     | GConst { typ; _ } -> *)
  (*         wrap_type_in_both "(fset [])" "(fset [])" (pty span typ)) *)
  (*     :: args_ty span xs *)
  (* | [] -> [] *)

  and ppat (p : pat) : SSP.AST.pat =
    match p.p with
    | PWild -> SSP.AST.WildPat
    | PAscription { typ; pat; _ } ->
        SSP.AST.AscriptionPat (ppat pat, pty p.span typ)
    | PBinding
        {
          mut = Immutable;
          mode = _;
          var;
          typ = _ (* we skip type annot here *);
          _;
        } ->
        SSP.AST.Ident (plocal_ident var)
    | PBinding
        {
          mut = Mutable _;
          mode = _;
          var;
          typ = _ (* we skip type annot here *);
          _;
        } ->
        SSP.AST.Ident (plocal_ident var) (* TODO Mutable binding ! *)
    | POr { subpats } -> SSP.AST.DisjunctivePat (List.map ~f:ppat subpats)
    | PArray _ -> __TODO_pat__ p.span "Parray?"
    | PConstruct { constructor = `TupleCons 0; fields = []; _ } ->
        SSP.AST.WildPat (* UnitPat *)
    | PConstruct { constructor = `TupleCons 1; fields = [ _ ]; _ } ->
        __TODO_pat__ p.span "tuple 1"
    | PConstruct { constructor = `TupleCons _n; fields = args; _ } ->
        SSP.AST.TuplePat (List.map ~f:(fun { pat; _ } -> ppat pat) args)
    (* Record *)
    | PConstruct { is_record = true; _ } -> __TODO_pat__ p.span "record pattern"
    (* (\* SSP.AST.Ident (pglobal_ident name) *\) *)
    (* SSP.AST.RecordPat (pglobal_ident name, List.map ~f:(fun {field; pat} -> (pglobal_ident field, ppat pat)) args) *)
    (*       (\* SSP.AST.ConstructorPat (pglobal_ident name ^ "_case", [SSP.AST.Ident "temp"]) *\) *)
    (*       (\* List.map ~f:(fun {field; pat} -> (pat, SSP.AST.App (SSP.AST.Var (pglobal_ident field), [SSP.AST.Var "temp"]))) args *\) *)
    (* Enum *)
    | PConstruct { constructor = name; fields = args; is_record = false; _ } ->
        SSP.AST.ConstructorPat
          ( pglobal_ident name,
            match args with
            | [] -> []
            | _ -> [ SSP.AST.TuplePat (List.map ~f:(fun p -> ppat p.pat) args) ]
          )
    | PConstant { lit } -> SSP.AST.Lit (pliteral lit)
    | _ -> .

  (* and analyse_fset (data : StaticAnalysis.MutableVariables.analysis_data) items = *)
  (*   (object *)
  (*      inherit [_] expr_reduce as super *)
  (*      inherit [_] U.Reducers.expr_list_monoid as m (\* TODO: Raname into list monoid *\) *)
  (*      method visit_t _ _ = m#zero *)
  (*      (\* method visit_mutability (_f : string -> _ -> _) (ctx : string) _ = m#zero *\) *)
  (*      method visit_mutability (f : string -> _ -> _) (ctx : string) mu = *)
  (*        match mu with Mutable wit -> f ctx wit | _ -> m#zero *)

  (*      method! visit_PBinding env mut _ var _typ subpat = *)
  (*        m#plus *)
  (*          (match mut with *)
  (*           | Mutable _ -> *)
  (*             var.name *)
  (*           | Immutable -> *)
  (*             (\* Set.singleton (module U.TypedLocalIdent) (var, typ) *\) *)
  (*             "") *)
  (*          (Option.value_map subpat ~default:m#zero *)
  (*             ~f:(fst >> super#visit_pat env)) *)

  (*      method! visit_global_ident (env : string) (x : Global_ident.t) = *)
  (*        match x with *)
  (*        | `Projector (`Concrete cid) | `Concrete cid -> *)
  (*          (match Map.find data (Uprint.Concrete_ident_view.to_definition_name cid) with *)
  (*           | Some (x,_) -> Set.of_list (module LocalIdent) x *)
  (*           | _ -> m#zero) *)
  (*        | _ -> m#zero *)

  (*      method visit_expr (env : string) e = [(e, env)] (\* :: super#visit_expr f e *\) *)
  (*   end) *)
  (*   #visit_expr *)
  (*     "" *)

  and pexpr (env : LocalIdentOrLisIis.W.t list Map.M(Local_ident).t)
      (add_solve : bool) (e : expr) : SSP.AST.term =
    let span = e.span in
    (* (match (add_solve, e.e) with *)
    (* | ( true, *)
    (*     ( Construct { is_record = true; _ } *)
    (*     | If _ (\* | Match _ *\) | Literal _ *)
    (*     | Construct { constructor = `TupleCons _; _ } *)
    (*     | App _ | GlobalVar _ | LocalVar _ ) ) -> *)
    (*     fun x -> x (\* SSP.AST.App (SSP.AST.Var "solve_lift", [ x ]) *\) *)
    (* | _ -> fun x -> x) *)
    match e.e with
    | Literal lit ->
        SSP.AST.App
          ( SSP.AST.Var "ret_both",
            [ SSP.AST.TypedTerm (SSP.AST.Const (pliteral lit), pty span e.typ) ]
          )
    | LocalVar local_ident -> SSP.AST.NameTerm (plocal_ident local_ident)
    | GlobalVar (`TupleCons 0)
    | Construct { constructor = `TupleCons 0; fields = []; _ } ->
        SSP.AST.App (SSP.AST.Var "ret_both", [ SSPExtraDefinitions.unit_term ])
    | GlobalVar global_ident -> SSP.AST.Var (pglobal_ident global_ident)
    | App
        {
          f = { e = GlobalVar (`Projector (`TupleField (i, j))); _ };
          args = [ _ ];
          _;
        } ->
        (* SSP.AST.App (SSP.AST.Var (Int.to_string i), [ SSP.AST.Var (Int.to_string j) ]) *)
        __TODO_term__ span "app global vcar projector tuple"
    | App
        {
          f;
          args =
            [
              {
                e =
                  ( GlobalVar (`TupleCons 0)
                  | Construct { constructor = `TupleCons 0; fields = []; _ } );
              };
            ];
          _;
        } ->
        (pexpr env false) f
    | App { f = { e = GlobalVar x; _ }; args; _ } when Map.mem operators x ->
        let arity, op = Map.find_exn operators x in
        if List.length args <> arity then failwith "Bad arity";
        let args =
          List.map
            ~f:(fun x -> SSP.AST.Value ((pexpr env false) x, true, 0))
            args
        in
        SSP.AST.AppFormat (op, args)
    (* | App { f = { e = GlobalVar x }; args } -> *)
    (*    __TODO_term__ span "GLOBAL APP?" *)
    | App { f; args; _ } ->
        let base = (pexpr env false) f in
        let args = List.map ~f:(pexpr env false) args in
        SSP.AST.App (base, args)
    | If { cond; then_; else_ } ->
        SSPExtraDefinitions.ifb
          ( (pexpr env false) cond,
            (pexpr env false) then_,
            Option.value_map else_ ~default:(SSP.AST.Literal "()")
              ~f:(pexpr env false) )
    | Array l -> SSP.AST.Array (List.map ~f:(pexpr env add_solve) l)
    | Let { lhs; rhs; body; monadic } ->
        let extra_set, _extra_env =
          LocalIdentOrLisIis.analyse_expr ctx.analysis_data.mut_var env rhs
        in
        let new_env =
          extend_env env
            (Map.of_alist_exn
               (module Local_ident)
               (List.map
                  ~f:(fun v -> (v, extra_set))
                  (Set.to_list (U.Reducers.variables_of_pat lhs))))
        in
        let new_env =
          match (monadic, is_mutable_pat lhs) with
          | None, true ->
              extend_env new_env
                (Map.of_alist_exn
                   (module Local_ident)
                   (List.map
                      ~f:(fun v -> (v, [ LocalIdentOrLisIis.W.Identifier v ]))
                      (Set.to_list (U.Reducers.variables_of_pat lhs))))
          | _, _ -> new_env
        in
        SSPExtraDefinitions.letb
          {
            pattern = ppat lhs;
            mut = is_mutable_pat lhs;
            value = (pexpr env false) rhs;
            body = (pexpr new_env add_solve) body;
            value_typ =
              (match monadic with
              | Some (MException typ, _) -> pty span typ
              | Some (MResult typ, _) -> pty span typ
              | _ ->
                  SSP.AST.WildTy
                  (* TODO : What should the correct type be here? `lhs.span lhs.typ` *));
            monad_typ =
              Option.map
                ~f:(fun (m, _) ->
                  match m with
                  | MException typ -> SSP.AST.Exception (pty span typ)
                  | MResult typ -> SSP.AST.Result (pty span typ)
                  | MOption -> SSP.AST.Option)
                monadic;
          }
    | EffectAction _ -> . (* __TODO_term__ span "monadic action" *)
    | Match
        {
          scrutinee;
          arms =
            [
              {
                arm =
                  {
                    arm_pat =
                      {
                        p =
                          PConstruct
                            {
                              fields = [ { pat; _ } ];
                              is_record = false;
                              is_struct = true;
                              _;
                            };
                        _;
                      };
                    body;
                  };
                _;
              };
            ];
        } ->
        (* Record match expressions *)
        (* (pexpr env true) body *)
        SSPExtraDefinitions.letb
          {
            pattern = ppat pat;
            mut = false;
            value = (pexpr env false) scrutinee;
            body = (pexpr env true) body;
            value_typ = pty pat.span pat.typ;
            monad_typ = None;
          }
    | Match { scrutinee; arms } ->
        SSPExtraDefinitions.matchb
          ( (pexpr env false) scrutinee,
            List.map
              ~f:(fun { arm = { arm_pat; body }; _ } ->
                match arm_pat.p with
                | PConstruct
                    {
                      constructor = name;
                      fields = args;
                      is_record = false;
                      is_struct = false;
                    } -> (
                    let arg_tuple =
                      SSP.AST.TuplePat (List.map ~f:(fun p -> ppat p.pat) args)
                    in
                    ( SSP.AST.ConstructorPat
                        ( pglobal_ident name ^ "_case",
                          match args with [] -> [] | _ -> [ arg_tuple ] ),
                      match
                        (args, SSPExtraDefinitions.pat_as_expr arg_tuple)
                      with
                      | _ :: _, Some (redefine_pat, redefine_expr) ->
                          SSPExtraDefinitions.letb
                            {
                              pattern = redefine_pat (* TODO *);
                              mut = false;
                              value =
                                SSP.AST.App
                                  ( SSP.AST.Var "ret_both",
                                    [
                                      SSP.AST.TypedTerm
                                        ( redefine_expr,
                                          SSP.AST.Product
                                            (List.map
                                               ~f:(fun x ->
                                                 pty arm_pat.span x.pat.typ)
                                               args) );
                                    ] );
                              body = (pexpr env true) body;
                              value_typ =
                                SSP.AST.Product
                                  (List.map
                                     ~f:(fun x -> pty arm_pat.span x.pat.typ)
                                     args);
                              monad_typ = None;
                            }
                      | _, _ -> (pexpr env true) body ))
                | _ -> (ppat arm_pat, (pexpr env true) body))
              arms )
    | Ascription _ -> __TODO_term__ span "asciption"
    | Construct { constructor = `TupleCons 1; fields = [ (_, e) ]; _ } ->
        (pexpr env false) e
    | Construct { constructor = `TupleCons _n; fields; _ } ->
        SSP.AST.App
          ( SSP.AST.Var "prod_b",
            [ SSP.AST.Tuple (List.map ~f:(snd >> pexpr env false) fields) ] )
    | Construct { is_record = true; constructor; fields; base = None; _ } ->
        SSP.AST.RecordConstructor
          ( "t_" ^ pglobal_ident constructor,
            List.map
              ~f:(fun (f, e) -> (pglobal_ident f, (pexpr env false) e))
              fields )
    | Construct { is_record = true; constructor; fields; base = Some (x, _); _ }
      ->
        SSP.AST.RecordUpdate
          ( pglobal_ident constructor,
            (pexpr env false) x,
            List.map
              ~f:(fun (f, e) -> (pglobal_ident f, (pexpr env false) e))
              fields )
    (* TODO: Is there only 1 field? *)
    | Construct { constructor; fields = [ (_f, e) ]; _ } ->
        SSP.AST.App
          (SSP.AST.Var (pglobal_ident constructor), [ (pexpr env add_solve) e ])
    | Construct { constructor; fields; _ } ->
        (* __TODO_term__ span "constructor" *)
        SSP.AST.App
          ( SSP.AST.Var (pglobal_ident constructor),
            List.map ~f:(snd >> pexpr env add_solve) fields )
    | Closure { params; body; _ } ->
        SSP.AST.Lambda
          ( List.map ~f:ppat params,
            (pexpr (extend_env_with_params env params) add_solve) body )
    | MacroInvokation { macro; _ } ->
        Error.raise
        @@ {
             kind = UnsupportedMacro { id = [%show: Ast.global_ident] macro };
             span = e.span;
           }
    | Assign _ ->
        SSP.AST.Const (SSP.AST.Const_string ("assign" ^ " todo(term)"))
    (* __TODO_term__ span "assign" *)
    | Loop { body; kind; state = None; label; witness } ->
        (pexpr env false)
          {
            e =
              Loop
                {
                  body;
                  kind;
                  state =
                    Some
                      {
                        init =
                          {
                            e =
                              Construct
                                {
                                  is_record = false;
                                  is_struct = false;
                                  base = None;
                                  constructor = `TupleCons 0;
                                  fields = [];
                                };
                            span = Span.dummy ();
                            typ = TApp { ident = `TupleType 0; args = [] };
                          };
                        bpat =
                          {
                            p =
                              PConstruct
                                {
                                  constructor = `TupleCons 0;
                                  fields = [];
                                  is_record = false;
                                  is_struct = false;
                                };
                            span = Span.dummy ();
                            typ = TApp { ident = `TupleType 0; args = [] };
                          };
                        witness =
                          Features.On.state_passing_loop
                          (* state_passing_loop *);
                      };
                  label;
                  witness;
                  control_flow = None;
                  (* TODO? *)
                };
            typ = e.typ;
            span = e.span;
          }
    | Loop
        {
          body;
          kind = ForIndexLoop { start; end_; var; _ };
          state = Some { init; bpat; _ };
          _;
        } ->
        SSP.AST.App
          ( SSP.AST.Var "foldi_both",
            [
              (pexpr env false) start;
              (pexpr env false) end_;
              SSP.AST.Lambda
                ( [
                    (* SSP.AST.Ident "{L I _ _}";  *)
                    SSP.AST.Ident (plocal_ident var);
                  ],
                  SSP.AST.App
                    ( SSP.AST.Var "ssp",
                      [
                        SSP.AST.Lambda
                          ( [ ppat bpat ],
                            both_type_expr
                              (extend_env env
                                 (Map.of_alist_exn
                                    (module Local_ident)
                                    ([
                                       ( var,
                                         [
                                           LocalIdentOrLisIis.W.Data
                                             ( [ plocal_ident var ^ "?" ],
                                               [ plocal_ident var ^ "?" ] );
                                         ] );
                                     ]
                                    @ List.map
                                        ~f:(fun v ->
                                          ( v,
                                            [
                                              LocalIdentOrLisIis.W.Data
                                                ( [ plocal_ident v ^ "!" ],
                                                  [ plocal_ident v ^ "!" ] );
                                            ] ))
                                        (vars_from_pat bpat))))
                              true [] body );
                      ] ) );
              (pexpr env false) init;
            ] )
    | Loop
        {
          body;
          kind = ForLoop { pat; it; _ };
          state = Some { init; bpat; _ };
          _;
        } ->
        let extra_set_init, _extra_env =
          LocalIdentOrLisIis.analyse_expr ctx.analysis_data.mut_var env init
        in
        let new_env =
          extend_env env
            (Map.of_alist_exn
               (module Local_ident)
               (List.map
                  ~f:(fun v -> (v, extra_set_init))
                  (Set.to_list (U.Reducers.variables_of_pat bpat))))
        in
        let extra_set_iter, _extra_env =
          LocalIdentOrLisIis.analyse_expr ctx.analysis_data.mut_var env it
        in
        let new_env =
          extend_env new_env
            (Map.of_alist_exn
               (module Local_ident)
               (List.map
                  ~f:(fun v -> (v, extra_set_iter))
                  (Set.to_list (U.Reducers.variables_of_pat bpat))))
        in
        SSP.AST.App
          ( SSP.AST.Var "foldi_both_list",
            [
              (pexpr env false) it;
              SSP.AST.Lambda
                ( [ (* SSP.AST.Ident "{L I _ _}";  *) ppat pat ],
                  SSP.AST.App
                    ( SSP.AST.Var "ssp",
                      [
                        SSP.AST.Lambda
                          ( [ ppat bpat ],
                            both_type_expr new_env true
                              (extra_set_iter @ extra_set_init)
                              body );
                      ] ) );
              (pexpr env false) init;
            ] )
    | Loop _ ->
        SSP.AST.Const (SSP.AST.Const_string ("other loop" ^ " todo(term)"))
    (* __TODO_term__ span "other loop" *)
    (* | Break { e; _ } -> *)
    (*     SSP.AST.Const (SSP.AST.Const_string ("break" ^ " todo(term)")) *)
    (*     (* __TODO_term__ span "break" *) *)
    | _ -> .

  and vars_from_pat : pat -> Local_ident.t list =
    U.Reducers.variables_of_pat >> Set.to_list

  and env_from_param (params : pat list) :
      LocalIdentOrLisIis.W.t list Map.M(Local_ident).t =
    Map.of_alist_exn
      (module Local_ident)
      (List.concat_mapi
         ~f:(fun i pat ->
           List.map
             ~f:(fun var ->
               ( var,
                 [
                   LocalIdentOrLisIis.W.Data
                     ( [ "L" ^ Int.to_string (i + 1) ],
                       [ "I" ^ Int.to_string (i + 1) ] );
                 ] ))
             (vars_from_pat pat))
         params)

  and extend_env (env : LocalIdentOrLisIis.W.t list Map.M(Local_ident).t)
      (env_ext : LocalIdentOrLisIis.W.t list Map.M(Local_ident).t) :
      LocalIdentOrLisIis.W.t list Map.M(Local_ident).t =
    Map.merge_skewed env env_ext ~combine:(fun ~key:_ a b -> a @ b)
  (* TODO: Just combine values? Should do this as sets! *)

  and extend_env_with_params
      (env : LocalIdentOrLisIis.W.t list Map.M(Local_ident).t)
      (params : pat list) : LocalIdentOrLisIis.W.t list Map.M(Local_ident).t =
    extend_env env (env_from_param params)

  and analyse_env_of_expr
      (env : LocalIdentOrLisIis.W.t list Map.M(Local_ident).t) (e : expr)
      extra_set =
    let expr_env, new_env =
      LocalIdentOrLisIis.analyse_expr ctx.analysis_data.mut_var env e
    in
    let expr_env = expr_env @ extra_set in
    let identifiers =
      List.filter_map
        ~f:(function Identifier x -> Some x | Data _ -> None)
        expr_env
    in
    let data =
      List.filter_map
        ~f:(function Identifier _ -> None | Data x -> Some x)
        expr_env
    in
    let lis, iis = (List.concat *** List.concat) (List.unzip data) in
    (identifiers, lis, iis, new_env)

  and both_type_expr (env : LocalIdentOrLisIis.W.t list Map.M(Local_ident).t)
      (add_solve : bool) (extra_set : LocalIdentOrLisIis.W.t list) (e : expr) =
    let identifiers, lis, iis, _new_env = analyse_env_of_expr env e extra_set in
    SSP.AST.TypedTerm
      ( (pexpr env add_solve) e,
        SSPExtraDefinitions.wrap_type_in_both (pty e.span e.typ) )

  and is_mutable_pat (pat : pat) =
    match pat.p with
    | PWild -> false
    | PAscription { pat; _ } -> is_mutable_pat pat
    | PConstruct { constructor = `TupleCons _; fields = args; _ } ->
        List.fold ~init:false ~f:( || )
          (List.map ~f:(fun p -> is_mutable_pat p.pat) args)
    | PConstruct _ -> false
    | PArray _ ->
        (* List.fold ~init:false ~f:(||) (List.map ~f:(fun p -> is_mutable_pat p) args) *)
        false
    | PConstant _ -> false
    | PBinding { mut = Mutable _; _ } -> true
    | PBinding _ -> false
    | POr _ ->
        (* List.fold ~init:false ~f:( || ) *)
        (*   (List.map ~f:(fun p -> is_mutable_pat p) subpats) *)
        false
        (* TODO? *)
    | _ -> .

  let pgeneric_param_as_argument span : AST.generic_param -> SSP.AST.argument =
    function
    | { ident; kind; _ } ->
        SSP.AST.Implicit
          ( SSP.AST.Ident (plocal_ident ident),
            match kind with
            | GPType (* { default = Some t } *) ->
                SSP.AST.NameTy (plocal_ident ident) (* pty span t *)
            | GPConst { typ = t } ->
                SSPExtraDefinitions.wrap_type_in_both (pty span t)
            (* | GPType { default = None } -> SSP.AST.WildTy *)
            | _ -> . )

  let pgeneric_constraints_as_argument span :
      generic_constraint -> SSP.AST.argument list = function
    | GCType { goal = { trait; args }; _ } ->
        [
          SSP.AST.Typeclass
            ( None,
              SSP.AST.AppTy
                ( SSP.AST.NameTy (pconcrete_ident trait),
                  List.map
                    ~f:(function
                      | GType typ -> pty span typ
                      | GConst { typ; _ } ->
                          SSPExtraDefinitions.wrap_type_in_both (pty span typ)
                      | _ -> .)
                    args ) );
        ]
    | GCProjection { impl; assoc_item; typ } ->
        []
        (* Error.unimplemented ~issue_id:549 *)
        (*   ~details:"Projections of an associated type is not yet supported." *)
        (*   span *)
    | _ -> .

  let pgeneric (span : Ast.span) (generics : AST.generics) :
      SSP.AST.argument list =
    List.map ~f:(pgeneric_param_as_argument span) generics.params
    @ List.concat_map
        ~f:(pgeneric_constraints_as_argument span)
        generics.constraints

  let rec split_arrow_in_args (a : SSP.AST.ty) : SSP.AST.ty list * SSP.AST.ty =
    match a with
    | SSP.AST.Arrow (x, y) ->
        let l, r = split_arrow_in_args y in
        (x :: l, r)
    | _ -> ([], a)

  let rec wrap_type_in_enumerator_helper (i : int) (a : SSP.AST.ty) =
    let l, r = split_arrow_in_args a in
    let size, t =
      List.fold_left
        ~f:(fun (yi, ys) x ->
          let size, x_val = wrap_type_in_enumerator_helper yi x in
          ( size,
            match ys with
            | Some v -> Some (SSP.AST.Arrow (v, x_val))
            | None -> Some x_val ))
        ~init:(i, None) l
    in
    match t with
    | Some v ->
        (size, SSP.AST.Arrow (v, SSPExtraDefinitions.wrap_type_in_both r))
    | None -> (size + 1, SSPExtraDefinitions.wrap_type_in_both r)

  let wrap_type_in_enumerator (a : SSP.AST.ty) =
    let size, v = wrap_type_in_enumerator_helper 0 a in
    (* Throw away anotation of last type, and replace with accumulation of all locations and imports *)
    let xs, a =
      match v with
      | SSP.AST.Arrow (x, SSP.AST.AppTy (SSP.AST.NameTy _, [ a ])) -> ([ x ], a)
      | SSP.AST.AppTy (SSP.AST.NameTy _, [ a ]) -> ([], a)
      | _ ->
          Error.unimplemented
            ~details:
              "SSProve: TODO: wrap_type_in_enumerator encountered an \
               unexpected type"
            (Span.dummy ())
    in
    let ret_ty =
      List.fold
        ~init:(SSPExtraDefinitions.wrap_type_in_both a)
        ~f:(fun y x -> SSP.AST.Arrow (x, y))
        xs
    in
    (size, ret_ty)

  let rec pitem (e : AST.item) : SSP.AST.decl list =
    try pitem_unwrapped e
    with Diagnostics.SpanFreeError.Exn _kind ->
      [ SSP.AST.Unimplemented "item error backend" ]

  and pitem_unwrapped (e : AST.item) : SSP.AST.decl list =
    let span = e.span in
    let decls_from_item =
      match e.v with
      | Fn { name = f_name; generics; body; params } ->
          [
            (let args, ret_typ =
               lift_definition_type_to_both f_name
                 (pgeneric span generics
                 @ List.map
                     ~f:(fun { pat; typ; _ } ->
                       SSP.AST.Explicit (ppat pat, pty span typ))
                     params)
                 (pty span body.typ)
             in
             if Attrs.lemma e.attrs then
               SSP.AST.Lemma
                 ( pconcrete_ident f_name,
                   args,
                   (pexpr
                      (extend_env_with_params
                         (Map.empty (module Local_ident))
                         (List.map ~f:(fun { pat; _ } -> pat) params))
                      true)
                     (Option.value ~default:body
                        (Attrs.associated_expr Ensures e.attrs)) )
             else
               SSP.AST.Equations
                 ( pconcrete_ident f_name,
                   args,
                   (pexpr
                      (extend_env_with_params
                         (Map.empty (module Local_ident))
                         (List.map ~f:(fun { pat; _ } -> pat) params))
                      true)
                     body,
                   ret_typ ));
          ]
      | TyAlias { name; generics; ty } ->
          let g = pgeneric span generics in
          [
            (if List.is_empty g then
               SSP.AST.Notation
                 ( "'" ^ pconcrete_ident name ^ "'",
                   SSP.AST.Type (pty span ty),
                   None )
             else
               SSP.AST.Definition
                 ( pconcrete_ident name,
                   g,
                   SSP.AST.Type (pty span ty),
                   SSP.AST.TypeTy ));
          ]
      (* record *)
      | Type
          {
            name;
            generics;
            variants = [ { name = _record_name; arguments; _ } ];
            is_struct = true;
          } ->
          [
            SSPExtraDefinitions.updatable_record
              ( pconcrete_ident name,
                pgeneric span generics,
                List.map
                  ~f:(fun (x, y) -> SSP.AST.Named (x, y))
                  (p_record_record span arguments) );
          ]
      (* enum *)
      | Type { name; generics; variants; _ } ->
          (* Define all record types in enums (no anonymous records) *)
          List.filter_map variants
            ~f:(fun { name = v_name; arguments; is_record; _ } ->
              if is_record then
                Some
                  (SSPExtraDefinitions.updatable_record
                     ( (match
                          String.chop_prefix ~prefix:"C_"
                            (pconcrete_ident v_name)
                        with
                       | Some name -> "t_" ^ name
                       | _ -> failwith "Incorrect prefix of record name in enum"),
                       pgeneric span generics,
                       List.map
                         ~f:(fun (x, y) -> SSP.AST.Named (x, y))
                         (p_record_record span arguments) ))
              else None)
          @ [
              SSPExtraDefinitions.both_enum
                ( pconcrete_ident name,
                  pgeneric span generics,
                  List.map variants
                    ~f:(fun { name = v_name; arguments; is_record; _ } ->
                      if is_record then
                        SSP.AST.InductiveCase
                          ( pconcrete_ident v_name,
                            SSP.AST.RecordTy
                              ( (match
                                   String.chop_prefix ~prefix:"C_"
                                     (pconcrete_ident v_name)
                                 with
                                | Some name -> "t_" ^ name
                                | _ ->
                                    failwith
                                      "Incorrect prefix of record name in enum"),
                                p_record_record span arguments ) )
                      else
                        match arguments with
                        | [] -> SSP.AST.BaseCase (pconcrete_ident v_name)
                        | [ (_arg_name, arg_ty, _attr) ] ->
                            SSP.AST.InductiveCase
                              (* arg_name = ?? *)
                              (pconcrete_ident v_name, pty span arg_ty)
                        | _ ->
                            SSP.AST.InductiveCase
                              ( pconcrete_ident v_name,
                                SSP.AST.Product
                                  (List.map
                                     ~f:((fun (_x, y, _z) -> y) >> pty span)
                                     arguments) )) );
            ]
      | IMacroInvokation { macro; argument; _ } -> (
          let unsupported () =
            let id = [%show: concrete_ident] macro in
            Error.raise { kind = UnsupportedMacro { id }; span = e.span }
          in
          match RenderId.render macro with
          | { path = "hacspec_lib" :: _; name } -> (
              match name with
              | "public_nat_mod" ->
                  let open Hacspeclib_macro_parser in
                  let o : PublicNatMod.t =
                    PublicNatMod.parse argument |> Result.ok_or_failwith
                  in
                  [
                    SSP.AST.Notation
                      ( "'" ^ "t_" ^ o.type_name ^ "'",
                        SSP.AST.Type
                          (SSP.AST.NatMod
                             ( o.type_of_canvas,
                               o.bit_size_of_field,
                               o.modulo_value )),
                        None );
                    SSP.AST.Definition
                      ( o.type_name,
                        [],
                        SSP.AST.Var "id",
                        SSP.AST.Arrow
                          ( SSPExtraDefinitions.wrap_type_in_both
                              (SSP.AST.NameTy ("t_" ^ o.type_name)),
                            SSPExtraDefinitions.wrap_type_in_both
                              (SSP.AST.NameTy ("t_" ^ o.type_name)) ) );
                  ]
              | "bytes" ->
                  let open Hacspeclib_macro_parser in
                  let o : Bytes.t =
                    Bytes.parse argument |> Result.ok_or_failwith
                  in
                  [
                    SSP.AST.Notation
                      ( "'" ^ "t_" ^ o.bytes_name ^ "'",
                        SSP.AST.Type
                          (SSP.AST.ArrayTy
                             ( SSP.AST.Int { size = SSP.AST.U8; signed = false },
                               (* int_of_string *) o.size )),
                        None );
                    SSP.AST.Definition
                      ( o.bytes_name,
                        [],
                        SSP.AST.Var "id",
                        SSP.AST.Arrow
                          ( SSPExtraDefinitions.wrap_type_in_both
                              (SSP.AST.NameTy ("t_" ^ o.bytes_name)),
                            SSPExtraDefinitions.wrap_type_in_both
                              (SSP.AST.NameTy ("t_" ^ o.bytes_name)) ) );
                  ]
              | "unsigned_public_integer" ->
                  let open Hacspeclib_macro_parser in
                  let o =
                    UnsignedPublicInteger.parse argument
                    |> Result.ok_or_failwith
                  in
                  [
                    SSP.AST.Notation
                      ( "'" ^ "t_" ^ o.integer_name ^ "'",
                        SSP.AST.Type
                          (SSP.AST.ArrayTy
                             ( SSP.AST.Int { size = SSP.AST.U8; signed = false },
                               Int.to_string ((o.bits + 7) / 8) )),
                        None );
                    SSP.AST.Definition
                      ( o.integer_name,
                        [],
                        SSP.AST.Var "id",
                        SSP.AST.Arrow
                          ( SSPExtraDefinitions.wrap_type_in_both
                              (SSP.AST.NameTy ("t_" ^ o.integer_name)),
                            SSPExtraDefinitions.wrap_type_in_both
                              (SSP.AST.NameTy ("t_" ^ o.integer_name)) ) );
                  ]
              | "public_bytes" ->
                  let open Hacspeclib_macro_parser in
                  let o : Bytes.t =
                    Bytes.parse argument |> Result.ok_or_failwith
                  in
                  let typ =
                    SSP.AST.ArrayTy
                      ( SSP.AST.Int { size = SSP.AST.U8; signed = false },
                        (* int_of_string *) o.size )
                  in
                  [
                    SSP.AST.Notation
                      ("'" ^ "t_" ^ o.bytes_name ^ "'", SSP.AST.Type typ, None);
                    SSP.AST.Definition
                      ( o.bytes_name,
                        [],
                        SSP.AST.Var "id",
                        SSP.AST.Arrow
                          ( SSPExtraDefinitions.wrap_type_in_both
                              (SSP.AST.NameTy ("t_" ^ o.bytes_name)),
                            SSPExtraDefinitions.wrap_type_in_both
                              (SSP.AST.NameTy ("t_" ^ o.bytes_name)) ) );
                  ]
              | "array" ->
                  let open Hacspeclib_macro_parser in
                  let o : Array.t =
                    Array.parse argument |> Result.ok_or_failwith
                  in
                  let typ =
                    match o.typ with
                    | "U128" -> SSP.AST.U128
                    | "U64" -> SSP.AST.U64
                    | "U32" -> SSP.AST.U32
                    | "U16" -> SSP.AST.U16
                    | "U8" -> SSP.AST.U8
                    | _usize -> SSP.AST.U32 (* TODO: usize? *)
                  in
                  [
                    SSP.AST.Notation
                      ( "'" ^ "t_" ^ o.array_name ^ "'",
                        SSP.AST.Type
                          (SSP.AST.ArrayTy
                             ( SSP.AST.Int { size = typ; signed = false },
                               (* int_of_string *) o.size )),
                        None );
                    SSP.AST.Definition
                      ( o.array_name,
                        [],
                        SSP.AST.Var "id",
                        SSP.AST.Arrow
                          ( SSPExtraDefinitions.wrap_type_in_both
                              (SSP.AST.NameTy ("t_" ^ o.array_name)),
                            SSPExtraDefinitions.wrap_type_in_both
                              (SSP.AST.NameTy ("t_" ^ o.array_name)) ) );
                  ]
              | _ -> unsupported ())
          | _ -> unsupported ())
      | Use { path; is_external; rename } ->
          let _ns_path = ctx.current_namespace in
          if is_external then []
          else
            [ SSP.AST.Require (None, (* ns_crate:: ns_path @ *) path, rename) ]
      | HaxError s -> [ __TODO_item__ span s ]
      | NotImplementedYet -> [ __TODO_item__ span "Not implemented yet?" ]
      | Alias _ -> [ __TODO_item__ span "Not implemented yet? alias" ]
      | Trait { name; items; generics } ->
          [
            SSP.AST.Class
              ( pconcrete_ident name,
                (match pgeneric span generics with
                | SSP.AST.Implicit (x, y) :: xs -> SSP.AST.Explicit (x, y) :: xs
                | x -> x),
                List.concat_map
                  ~f:(fun x ->
                    match x.ti_v with
                    | TIFn fn_ty ->
                        let size, value =
                          wrap_type_in_enumerator (pty x.ti_span fn_ty)
                        in
                        [
                          SSP.AST.Named
                            ( pconcrete_ident x.ti_ident,
                              SSP.AST.Forall ([], [], value) );
                        ]
                    | TIType impl_idents ->
                        SSP.AST.Named
                          (pconcrete_ident x.ti_ident, SSP.AST.TypeTy)
                        :: List.map
                             ~f:(fun { goal = tr; _ } ->
                               SSP.AST.Coercion
                                 ( pconcrete_ident x.ti_ident ^ "_"
                                   ^ pconcrete_ident tr.trait,
                                   SSP.AST.AppTy
                                     ( SSP.AST.NameTy (pconcrete_ident tr.trait),
                                       [
                                         SSP.AST.NameTy
                                           (pconcrete_ident x.ti_ident);
                                       ] ) ))
                             impl_idents
                    | _ -> .)
                  items );
          ]
      | Impl { generics; self_ty; of_trait = name, gen_vals; items } ->
          [
            SSP.AST.ProgramInstance
              ( pconcrete_ident name,
                pgeneric span generics,
                pty span self_ty,
                args_ty span gen_vals,
                SSP.AST.InstanceDecls
                  (List.concat_map
                     ~f:(fun x ->
                       match x.ii_v with
                       | IIFn { body; params } ->
                           [
                             (let args, ret_typ =
                                lift_definition_type_to_both x.ii_ident
                                  (List.map
                                     ~f:(fun { pat; typ; _ } ->
                                       SSP.AST.Explicit (ppat pat, pty span typ))
                                     params)
                                  (pty span body.typ)
                              in
                              SSP.AST.LetDef
                                ( pconcrete_ident x.ii_ident,
                                  args,
                                  (pexpr
                                     (extend_env_with_params
                                        (Map.empty (module Local_ident))
                                        (List.map
                                           ~f:(fun { pat; _ } -> pat)
                                           params))
                                     true)
                                    body,
                                  ret_typ ));
                           ]
                       | IIType { typ; _ } ->
                           [
                             SSP.AST.LetDef
                               ( pconcrete_ident x.ii_ident,
                                 [],
                                 SSP.AST.Type (pty span typ),
                                 SSP.AST.TypeTy );
                           ])
                     items) );
          ]
          @ [
              SSP.AST.HintUnfold (pconcrete_ident name, Some (pty span self_ty));
            ]
    in
    decls_from_item

  and new_arguments (arguments : SSP.AST.argument list) =
    snd
      (List.fold_left ~init:(0, [])
         ~f:(fun (i, y) arg ->
           let f = SSPExtraDefinitions.wrap_type_in_both in
           match arg with
           | Implicit (p, t) -> (i, y @ [ SSP.AST.Implicit (p, t) ])
           | Explicit (p, t) -> (i + 1, y @ [ SSP.AST.Explicit (p, f t) ])
           | Typeclass (so, t) -> (i, y @ [ SSP.AST.Typeclass (so, t) ]))
         arguments)

  and lift_definition_type_to_both (name : concrete_ident)
      (arguments : SSP.AST.argument list) (typ : SSP.AST.ty) :
      SSP.AST.argument list * SSP.AST.ty =
    let new_args = new_arguments arguments in
    let return_typ = both_return_type_from_name name typ in
    (new_args, return_typ)

  and both_return_type_from_name name typ =
    SSPExtraDefinitions.wrap_type_in_both typ

  and p_record_record span arguments : (string * SSP.AST.ty) list =
    List.map
      ~f:(function
        | arg_name, arg_ty, _arg_attrs ->
            (pconcrete_ident arg_name, pty span arg_ty))
      arguments
end

module type S = sig
  val pitem : AST.item -> SSP.AST.decl list
  (* val pgeneric : Ast.span -> AST.generics -> SSP.AST.argument list *)
end

let make (module M : Attrs.WITH_ITEMS) ctx =
  (module Make
            (M)
            (struct
              let ctx = ctx
            end) : S)

let decls_to_string (decls : SSP.AST.decl list) : string =
  String.concat ~sep:"\n" (List.map ~f:SSP.decl_to_string decls)

let print_item m (analysis_data : StaticAnalysis.analysis_data)
    (item : AST.item) : SSP.AST.decl list =
  let (module Print) =
    make m
      { current_namespace = (RenderId.render item.ident).path; analysis_data }
  in
  Print.pitem item

let cleanup_item_strings =
  List.map ~f:String.strip
  >> List.filter ~f:(String.is_empty >> not)
  >> String.concat ~sep:"\n\n"

(* module ConCert = struct *)
(*   let translate_concert_annotations *)
(*       (analysis_data : StaticAnalysis.analysis_data) (e : item) : *)
(*       SSP.AST.decl list = *)
(*     let (module Print) = *)
(*       make *)
(*         { *)
(*           current_namespace = U.Concrete_ident_view.to_namespace e.ident; *)
(*           analysis_data; *)
(*         } *)
(*     in *)
(*     match e.v with *)
(*     | Fn { name = f_name; generics; _ } -> *)
(*         List.concat_map *)
(*           ~f:(fun { kind; span } -> *)
(*             match kind with *)
(*             | Tool { path; tokens } -> ( *)
(*                 let token_list = token_list tokens in *)
(*                 match path with *)
(*                 | "hax::init" -> *)
(*                     let contract = *)
(*                       strip_or_error "contract argument missing" *)
(*                         (get_argument "contract" token_list) *)
(*                         e.span *)
(*                     in *)
(*                     [ *)
(*                       SSP.AST.Definition *)
(*                         ( "init_" ^ contract, *)
(*                           [ *)
(*                             SSP.AST.Explicit *)
(*                               (SSP.AST.Ident "chain", SSP.AST.NameTy "Chain"); *)
(*                             SSP.AST.Explicit *)
(*                               ( SSP.AST.Ident "ctx", *)
(*                                 SSP.AST.NameTy "ContractCallContext" ); *)
(*                             SSP.AST.Explicit *)
(*                               ( SSP.AST.Ident "st", *)
(*                                 SSP.AST.NameTy ("state_" ^ contract) ); *)
(*                           ], *)
(*                           SSP.AST.App *)
(*                             (SSP.AST.Var "ResultMonad.Ok", [ SSP.AST.Var "st" ]), *)
(*                           SSP.AST.AppTy *)
(*                             ( SSP.AST.NameTy "ResultMonad.result", *)
(*                               [ *)
(*                                 SSP.AST.NameTy ("state_" ^ contract); *)
(*                                 SSP.AST.NameTy "t_ParseError"; *)
(*                               ] ) ); *)
(*                     ] *)
(*                 | "hax::receive" -> *)
(*                     let contract = *)
(*                       strip_or_error "contract argument missing" *)
(*                         (get_argument "contract" token_list) *)
(*                         e.span *)
(*                     in *)
(*                     let name = *)
(*                       strip_or_error "name argument missing" *)
(*                         (get_argument "name" token_list) *)
(*                         e.span *)
(*                     in *)
(*                     let parameter = get_argument "parameter" token_list in *)
(*                     (\* let logger = get_argument "logger" token_list in *\) *)
(*                     (\* let payable = get_argument "payable" token_list in *\) *)
(*                     let param_instances, param_list, count, param_vars = *)
(*                       match parameter with *)
(*                       | Some x -> *)
(*                           ( [ *)
(*                               SSP.AST.ProgramInstance *)
(*                                 ( "t_HasReceiveContext", *)
(*                                   [], *)
(*                                   SSP.AST.NameTy ("t_" ^ strip x), *)
(*                                   [ *)
(*                                     SSP.AST.NameTy ("t_" ^ strip x); *)
(*                                     SSP.AST.Unit; *)
(*                                   ], *)
(*                                   SSP.AST.InstanceDecls *)
(*                                     [ *)
(*                                       SSP.AST.InlineDef *)
(*                                         ( "f_get", *)
(*                                           [ *)
(*                                             SSP.AST.Implicit *)
(*                                               ( SSP.AST.Ident "Ctx", *)
(*                                                 SSP.AST.WildTy ); *)
(*                                             SSP.AST.Implicit *)
(*                                               ( SSP.AST.Ident "L", *)
(*                                                 (SSP.AST.NameTy *)
(*                                                    "{fset Location}" *)
(*                                                   : SSP.AST.ty) ); *)
(*                                             SSP.AST.Implicit *)
(*                                               ( SSP.AST.Ident "I", *)
(*                                                 (SSP.AST.NameTy "Interface" *)
(*                                                   : SSP.AST.ty) ); *)
(*                                           ], *)
(*                                           SSP.AST.Var *)
(* "(solve_lift (@ret_both \ *)
   (*                                              (t_ParamType × t_Result Ctx \ *)
   (*                                              t_ParseError)) (tt, inr tt))", *)
(*                                           SSP.AST.WildTy ); *)
(*                                     ] ); *)
(*                               SSP.AST.ProgramInstance *)
(*                                 ( "t_Sized", *)
(*                                   [], *)
(*                                   SSP.AST.NameTy ("t_" ^ strip x), *)
(*                                   [ SSP.AST.NameTy ("t_" ^ strip x) ], *)
(*                                   SSP.AST.TermDef *)
(*                                     (SSP.AST.Lambda *)
(*                                        ([ SSP.AST.Ident "x" ], SSP.AST.Var "x")) *)
(*                                 ); *)
(*                             ], *)
(*                             [ *)
(*                               SSP.AST.Explicit *)
(*                                 ( SSP.AST.Ident "ctx", *)
(*                                   SSPExtraDefinitions.wrap_type_in_both "L0" *)
(*                                     "I0" *)
(*                                     (SSP.AST.NameTy ("t_" ^ strip x)) ); *)
(*                             ], *)
(*                             1, *)
(*                             [ SSP.AST.Var "ctx" ] ) *)
(*                       | _ -> ([], [], 0, []) *)
(*                     in *)
(*                     param_instances *)
(*                     @ [ *)
(*                         SSP.AST.Definition *)
(*                           ( "receive_" ^ contract ^ "_" ^ name, *)
(*                             Print.pgeneric span generics *)
(*                             @ List.map *)
(*                                 ~f:(fun x -> *)
(*                                   SSP.AST.Implicit *)
(*                                     ( SSP.AST.Ident x, *)
(*                                       (SSP.AST.NameTy "{fset Location}" *)
(*                                         : SSP.AST.ty) )) *)
(*                                 (List.map *)
(*                                    ~f:(fun i -> "L" ^ Int.to_string i) *)
(*                                    (List.range 0 (count + 1))) *)
(*                             @ List.map *)
(*                                 ~f:(fun x -> *)
(*                                   SSP.AST.Implicit *)
(*                                     ( SSP.AST.Ident x, *)
(*                                       (SSP.AST.NameTy "Interface" : SSP.AST.ty) *)
(*                                     )) *)
(*                                 (List.map *)
(*                                    ~f:(fun i -> "I" ^ Int.to_string i) *)
(*                                    (List.range 0 (count + 1))) *)
(*                             @ param_list *)
(*                             @ [ *)
(*                                 SSP.AST.Explicit *)
(*                                   ( SSP.AST.Ident "st", *)
(*                                     SSPExtraDefinitions.wrap_type_in_both *)
(*                                       ("L" ^ Int.to_string count) *)
(*                                       ("I" ^ Int.to_string count) *)
(*                                       (SSP.AST.NameTy ("state_" ^ contract)) ); *)
(*                                 (\* TODO: L, I *\) *)
(*                               ], *)
(*                             (\* Arguments *\) *)
(*                             SSP.AST.App *)
(*                               ( SSP.AST.Var (pconcrete_ident f_name) *)
(*                                 (\* contract *\), *)
(*                                 param_vars @ [ SSP.AST.Var "st" ] ), *)
(*                             SSPExtraDefinitions.wrap_type_in_both "_" "_" *)
(*                               (SSP.AST.NameTy *)
(*                                  ("t_Result ((v_A × state_" ^ contract *)
(*                                 ^ ")) (t_ParseError)")) ); *)
(*                         (\* TODO: L , I *\) *)
(*                       ] *)
(*                 | _ -> []) *)
(*             | _ -> []) *)
(*           e.attrs *)
(*     | Type { name; variants = [ _ ]; is_struct = true; _ } -> *)
(*         List.concat_map *)
(*           ~f:(fun { kind; _ } -> *)
(*             match kind with *)
(*             | Tool { path; tokens } when String.equal path "hax::contract_state" *)
(*               -> *)
(*                 let token_list = token_list tokens in *)
(*                 let contract = *)
(*                   strip_or_error "contract argument missing" *)
(*                     (get_argument "contract" token_list) *)
(*                     e.span *)
(*                 in *)
(*                 [ *)
(*                   SSP.AST.Definition *)
(*                     ( "state_" ^ contract, *)
(*                       [], *)
(*                       SSP.AST.Var (pconcrete_ident name), *)
(*                       SSP.AST.TypeTy ); *)
(*                 ] *)
(*             | _ -> []) *)
(*           e.attrs *)
(*     | _ -> [] *)

(*   let concert_contract_type_decls (items : item list) : SSP.AST.decl list list = *)
(*     let contract_items = *)
(*       List.filter_map *)
(*         ~f:(function *)
(*           | { kind = Tool { path; tokens }; _ } *)
(*             when String.equal path "hax::receive" -> *)
(*               let token_list = token_list tokens in *)
(*               let contract = *)
(*                 strip_or_error "contract argument missing" *)
(*                   (get_argument "contract" token_list) *)
(*                   (Span.dummy ()) *)
(*                 (\* TODO: carry span information *\) *)
(*               in *)
(*               let name = *)
(*                 strip_or_error "name argument missing" *)
(*                   (get_argument "name" token_list) *)
(*                   (Span.dummy ()) *)
(*                 (\* TODO: carry span information *\) *)
(*               in *)
(*               let parameter = get_argument "parameter" token_list in *)
(*               Some (contract, parameter, name) *)
(*           | _ -> None) *)
(*         (List.concat_map ~f:(fun x -> x.attrs) items) *)
(*     in *)
(*     if List.is_empty contract_items then [] *)
(*     else *)
(*       let contract_map = *)
(*         List.fold_left *)
(*           ~init:(Map.empty (module String)) *)
(*           ~f:(fun y (x_name, x_parameter, x_item) -> *)
(*             Map.set y ~key:x_name *)
(*               ~data: *)
(*                 (Option.value ~default:[] (Map.find y x_name) *)
(*                 @ [ (x_parameter, x_item) ])) *)
(*           contract_items *)
(*       in *)
(*       List.map *)
(*         ~f:(fun contract -> *)
(*           let receive_functions : (_ * string) list = *)
(*             Option.value ~default:[] (Map.find contract_map contract) *)
(*           in *)
(*           [ *)
(*             SSP.AST.Inductive *)
(*               ( "Msg_" ^ contract, *)
(*                 [], *)
(*                 List.map *)
(*                   ~f:(function *)
(*                     | Some param, x_item -> *)
(*                         SSP.AST.InductiveCase *)
(*                           ( "msg_" ^ contract ^ "_" ^ x_item, *)
(*                             SSP.AST.NameTy ("t_" ^ strip param) ) *)
(*                     | None, x_item -> *)
(*                         SSP.AST.BaseCase ("msg_" ^ contract ^ "_" ^ x_item)) *)
(*                   receive_functions ); *)
(*             SSP.AST.ProgramInstance *)
(*               ( "t_HasReceiveContext", *)
(*                 [], *)
(*                 SSP.AST.NameTy ("state_" ^ contract), *)
(*                 [ SSP.AST.NameTy ("state_" ^ contract); SSP.AST.Unit ], *)
(*                 SSP.AST.InstanceDecls *)
(*                   [ *)
(*                     SSP.AST.InlineDef *)
(*                       ( "f_get", *)
(*                         [ *)
(*                           SSP.AST.Explicit (SSP.AST.Ident "Ctx", SSP.AST.WildTy); *)
(*                           SSP.AST.Implicit *)
(*                             ( SSP.AST.Ident "L", *)
(*                               (SSP.AST.NameTy "{fset Location}" : SSP.AST.ty) ); *)
(*                           SSP.AST.Implicit *)
(*                             ( SSP.AST.Ident "I", *)
(*                               (SSP.AST.NameTy "Interface" : SSP.AST.ty) ); *)
(*                         ], *)
(*                         SSP.AST.Var *)
(* "(solve_lift (@ret_both (t_ParamType × t_Result Ctx \ *)
   (*                            t_ParseError)) (tt, inr tt))", *)
(*                         SSP.AST.WildTy ); *)
(*                   ] ); *)
(*             SSP.AST.ProgramInstance *)
(*               ( "t_Sized", *)
(*                 [], *)
(*                 SSP.AST.NameTy ("state_" ^ contract), *)
(*                 [ SSP.AST.NameTy ("state_" ^ contract) ], *)
(*                 SSP.AST.TermDef *)
(*                   (SSP.AST.Lambda ([ SSP.AST.Ident "x" ], SSP.AST.Var "x")) ); *)
(*             SSP.AST.ProgramInstance *)
(*               ( "t_HasActions", *)
(*                 [], *)
(*                 SSP.AST.NameTy ("state_" ^ contract), *)
(*                 [ SSP.AST.NameTy ("state_" ^ contract) ], *)
(*                 SSP.AST.TermDef (SSP.AST.Var "Admitted") ); *)
(*             SSP.AST.Equations *)
(*               ( "receive_" ^ contract, *)
(*                 [ *)
(*                   SSP.AST.Explicit *)
(*                     (SSP.AST.Ident "chain", SSP.AST.NameTy "Chain"); *)
(*                   SSP.AST.Explicit *)
(*                     (SSP.AST.Ident "ctx", SSP.AST.NameTy "ContractCallContext"); *)
(*                   SSP.AST.Explicit *)
(*                     (SSP.AST.Ident "st", SSP.AST.NameTy ("state_" ^ contract)); *)
(*                   SSP.AST.Explicit *)
(*                     ( SSP.AST.Ident "msg", *)
(*                       SSP.AST.NameTy ("Datatypes.option Msg_" ^ contract) ); *)
(*                 ], *)
(*                 SSP.AST.Match *)
(*                   ( SSP.AST.Var "msg", *)
(*                     List.map *)
(*                       ~f:(function *)
(*                         | Some _param, x_item -> *)
(*                             ( SSP.AST.Ident *)
(*                                 ("Some" ^ " " ^ "(" ^ "msg_" ^ contract ^ "_" *)
(*                                ^ x_item ^ " " ^ "val" ^ ")"), *)
(*                               SSP.AST.Var *)
(*                                 ("match (is_pure (both_prog (receive_" *)
(*                                ^ contract ^ "_" ^ x_item *)
(* ^ " (ret_both val) (ret_both st)))) with\n\ *)
   (*                                  \         | inl x => ResultMonad.Ok ((fst x), \ *)
   (*                                   [])\n\ *)
   (*                                  \         | inr x => ResultMonad.Err x\n\ *)
   (*                                  \         end") ) *)
(*                         | None, x_item -> *)
(*                             ( SSP.AST.Ident *)
(*                                 ("Some" ^ " " ^ "msg_" ^ contract ^ "_" ^ x_item), *)
(*                               SSP.AST.Var *)
(*                                 ("match (is_pure (both_prog (receive_" *)
(*                                ^ contract ^ "_" ^ x_item *)
(* ^ " (ret_both st)))) with\n\ *)
   (*                                  \         | inl x => ResultMonad.Ok ((fst x), \ *)
   (*                                   [])\n\ *)
   (*                                  \         | inr x => ResultMonad.Err x\n\ *)
   (*                                  \         end") )) *)
(*                       receive_functions *)
(*                     @ [ (SSP.AST.WildPat, SSP.AST.Var "ResultMonad.Err tt") ] ), *)
(*                 SSP.AST.NameTy *)
(*                   ("ResultMonad.result (state_" ^ contract *)
(*                  ^ " * list ActionBody) t_ParseError") ); *)
(*             SSP.AST.ProgramInstance *)
(*               ( "Serializable", *)
(*                 [], *)
(*                 SSP.AST.NameTy ("state_" ^ contract), *)
(*                 [ SSP.AST.NameTy ("state_" ^ contract) ], *)
(*                 SSP.AST.InstanceDecls [] ); *)
(*             SSP.AST.ProgramInstance *)
(*               ( "Serializable", *)
(*                 [], *)
(*                 SSP.AST.NameTy ("Msg_" ^ contract), *)
(*                 [ SSP.AST.NameTy ("Msg_" ^ contract) ], *)
(*                 SSP.AST.TermDef *)
(*                   (SSP.AST.Var *)
(*                      ("Derive Serializable Msg_OVN_rect<" *)
(*                      ^ String.concat ~sep:"," *)
(*                          (List.map *)
(*                             ~f:(fun x -> "msg_" ^ contract ^ "_" ^ snd x) *)
(*                             receive_functions) *)
(*                      ^ ">")) ); *)
(*             SSP.AST.Definition *)
(*               ( "contract_" ^ contract, *)
(*                 [], *)
(*                 SSP.AST.App *)
(*                   ( SSP.AST.Var "build_contract", *)
(*                     [ *)
(*                       SSP.AST.Var ("init_" ^ contract); *)
(*                       SSP.AST.Var ("receive_" ^ contract); *)
(*                     ] ), *)
(*                 SSP.AST.AppTy *)
(*                   ( SSP.AST.NameTy "Contract", *)
(*                     [ *)
(*                       SSP.AST.NameTy ("state_" ^ contract); *)
(*                       SSP.AST.NameTy ("Msg_" ^ contract); *)
(*                       SSP.AST.NameTy ("state_" ^ contract); *)
(*                       SSP.AST.NameTy "t_ParseError"; *)
(*                     ] ) ); *)
(*           ]) *)
(*         (Map.keys contract_map) *)

(*   let concert_header = *)
(*     [ *)
(*       SSP.AST.Comment "Concert lib part"; *)
(*       SSP.AST.Require (Some "ConCert.Utils", [ "Extras" ], None); *)
(*       SSP.AST.Require (Some "ConCert.Utils", [ "Automation" ], None); *)
(*       SSP.AST.Require (Some "ConCert.Execution", [ "Serializable" ], None); *)
(*       SSP.AST.Require (Some "ConCert.Execution", [ "Blockchain" ], None); *)
(*       SSP.AST.Require (Some "ConCert.Execution", [ "ContractCommon" ], None); *)
(*       SSP.AST.Require (Some "ConCert.Execution", [ "Serializable" ], None); *)
(*       SSP.AST.Require (None, [ "ConCertLib" ], None); *)
(*     ] *)
(* end *)

let process_annotation (x : 'a list) (f2 : ('b * ('a -> 'b)) list) : 'b list =
  List.concat_map
    ~f:(fun (d, f) ->
      let temp = List.map ~f x in
      if List.is_empty (List.concat temp) then [] else d :: temp)
    f2

let string_of_items m (x, y) =
  cleanup_item_strings
    (List.map ~f:decls_to_string
       (process_annotation x
          [
            ([], print_item m y);
            (* ConCert.(concert_header, translate_concert_annotations y); *)
          ]
          (* @ ConCert.concert_contract_type_decls x *)))

(* TODO move into string_of_items, as SSP.AST decl *)
let hardcoded_coq_headers =
  "(* File automatically generated by Hacspec *)\n\
   Set Warnings \"-notation-overridden,-ambiguous-paths\".\n\
   From Crypt Require Import choice_type Package Prelude.\n\
   Import PackageNotation.\n\
   From extructures Require Import ord fset.\n\
   From mathcomp Require Import word_ssrZ word.\n\
   (* From Jasmin Require Import word. *)\n\n\
   From Coq Require Import ZArith.\n\
   From Coq Require Import Strings.String.\n\
   Import List.ListNotations.\n\
   Open Scope list_scope.\n\
   Open Scope Z_scope.\n\
   Open Scope bool_scope.\n\n\
   From Hacspec Require Import ChoiceEquality.\n\
   From Hacspec Require Import LocationUtility.\n\
   From Hacspec Require Import Hacspec_Lib_Comparable.\n\
   From Hacspec Require Import Hacspec_Lib_Pre.\n\
   From Hacspec Require Import Hacspec_Lib.\n\n\
   Open Scope hacspec_scope.\n\
   Import choice.Choice.Exports.\n\n\
   From RecordUpdate Require Import RecordUpdate.\n\n\
   Import RecordSetNotations.\n\n\
   Obligation Tactic := (* try timeout 8 *) solve_ssprove_obligations.\n"

let translate m (_bo : BackendOptions.t) ~(bundles : AST.item list list)
    (items : AST.item list) : Types.file list =
  let analysis_data = StaticAnalysis.analyse items in
  U.group_items_by_namespace items
  |> Map.to_alist
  |> List.filter_map ~f:(fun (_, items) ->
         let* first_item = List.hd items in
         Some ((RenderId.render first_item.ident).path, items))
  |> List.map ~f:(fun (ns, items) ->
         let mod_name =
           String.concat ~sep:"_"
             (List.map ~f:(map_first_letter String.uppercase) ns)
         in
         let file_content =
           hardcoded_coq_headers ^ "\n"
           ^ string_of_items m (items, analysis_data)
           ^ "\n"
         in
         Types.
           { path = mod_name ^ ".v"; contents = file_content; sourcemap = None })

let apply_phases (_bo : BackendOptions.t) (i : Ast.Rust.item list) :
    AST.item list =
  TransformToInputLanguage.ditems i


================================================
FILE: engine/backends/coq/ssprove/ssprove_backend.mli
================================================
open Hax_engine.Backend
include T with module BackendOptions = UnitBackendOptions


================================================
FILE: engine/backends/easycrypt/dune
================================================
(library
 (name easycrypt_backend)
 (package hax-engine)
 (libraries hax_engine)
 (preprocess
  (pps
   ppx_yojson_conv
   ppx_sexp_conv
   ppx_compare
   ppx_hash
   ppx_deriving.show
   ppx_deriving.eq
   ppx_inline
   ppx_functor_application
   ppx_matches)))

(env
 (_
  (flags
   (:standard -w -A))))


================================================
FILE: engine/backends/easycrypt/easycrypt_backend.ml
================================================
(* -------------------------------------------------------------------- *)
open Hax_engine
open Base

(* -------------------------------------------------------------------- *)

include
  Backend.Make
    (struct
      open Features
      include Off
      include On.Loop
      include On.For_index_loop
      include On.Mutable_variable
      include On.Macro
      include On.Construct_base
    end)
    (struct
      let backend = Diagnostics.Backend.EasyCrypt
    end)

module BackendOptions = Backend.UnitBackendOptions
module AST = Ast.Make (InputLanguage)
module ECNamePolicy = Concrete_ident.DefaultNamePolicy
module U = Ast_utils.Make (InputLanguage)
module RenderId = Concrete_ident.MakeRenderAPI (ECNamePolicy)
open AST

module RejectNotEC (FA : Features.T) = struct
  module FB = InputLanguage

  include
    Feature_gate.Make (FA) (FB)
      (struct
        module A = FA
        module B = FB
        include Feature_gate.DefaultSubtype

        let mutable_variable _ _ = Features.On.mutable_variable
        let loop _ _ = Features.On.loop
        let continue = reject
        let mutable_reference = reject
        let mutable_pointer = reject
        let reference = reject
        let slice = reject
        let raw_pointer = reject
        let early_exit = reject
        let question_mark = reject
        let break = reject
        let macro _ _ = Features.On.macro
        let as_pattern = reject
        let lifetime = reject
        let monadic_action = reject
        let monadic_binding = reject
        let arbitrary_lhs = reject
        let state_passing_loop = reject
        let fold_like_loop = reject
        let nontrivial_lhs = reject
        let block = reject
        let for_loop = reject
        let while_loop = reject
        let quote = reject
        let dyn = reject
        let match_guard = reject
        let trait_item_default = reject
        let unsafe = reject
        let construct_base _ _ = Features.On.construct_base
        let for_index_loop _ _ = Features.On.for_index_loop

        let metadata =
          Phase_utils.Metadata.make (Reject (NotInBackendLang EasyCrypt))
      end)
end

type nmtree = { subnms : (string, nmtree) Map.Poly.t; items : AST.item list }

module NM = struct
  let empty : nmtree = { subnms = Map.Poly.empty; items = [] }

  let rec push_using_longname (the : nmtree) (nm : string list)
      (item : AST.item) =
    match nm with
    | [] -> { the with items = the.items @ [ item ] }
    | name :: nm ->
        let update (subnm : nmtree option) =
          let subnm = Option.value ~default:empty subnm in
          push_using_longname subnm nm item
        in

        { the with subnms = Map.Poly.update ~f:update the.subnms name }

  let push_using_namespace (the : nmtree) (nm : string list) (item : AST.item) =
    push_using_longname the (List.rev nm) item

  let push (the : nmtree) (item : AST.item) =
    push_using_namespace the (RenderId.render item.ident).path item
end

let suffix_of_size (size : Ast.size) =
  match size with
  | Ast.S8 -> "8"
  | Ast.S16 -> "16"
  | Ast.S32 -> "32"
  | Ast.S64 -> "64"
  | Ast.S128 -> "128"
  | Ast.SSize -> "P"

let suffix_of_signedness (s : Ast.signedness) =
  match s with Signed -> "S" | Unsigned -> "U"

let intmodule_of_kind (Ast.{ size; signedness } : Ast.int_kind) =
  Stdlib.Format.sprintf "W%s%s"
    (suffix_of_signedness signedness)
    (suffix_of_size size)

let translate' (_bo : BackendOptions.t) (items : AST.item list) :
    Types.file list =
  let items = List.fold_left ~init:NM.empty ~f:NM.push items in

  let rec doit (fmt : Formatter.t) (the : nmtree) =
    the.subnms
    |> Map.Poly.iteri ~f:(fun ~key ~data ->
           Stdlib.Format.fprintf fmt "theory %s.@." key;
           doit fmt data;
           Stdlib.Format.fprintf fmt "end.@.");

    let doitems (fmt : Formatter.t) =
      the.items
      |> List.iter ~f:(fun item ->
             match item.v with
             | Fn { name; generics; body; params }
               when List.is_empty generics.params ->
                 let name = (RenderId.render name).name in

                 doit_fn fmt (name, params, body)
             | Fn _ -> assert false
             | TyAlias _ -> assert false
             | Type _ -> assert false
             | Trait _ -> assert false
             | Impl _ -> assert false
             | HaxError _ -> ()
             | IMacroInvokation _ -> ()
             | Use _ -> ()
             | Alias _ -> ()
             | NotImplementedYet -> ()
             | _ -> .)
    in

    if not (List.is_empty the.items) then
      Stdlib.Format.fprintf fmt "@[module Procs = {@,  @[%t@]@,}@]@,"
        doitems
  and doit_fn (fmt : Formatter.t) (name, params, body) =
    let pp_param (fmt : Formatter.t) (p : param) =
      match p.pat.p with
      | PBinding { var; typ; mode = ByValue; mut = Immutable; subpat = None } ->
          Stdlib.Format.fprintf fmt "%s : %a" var.name doit_type typ
      | _ -> assert false
    in

    Stdlib.Format.fprintf fmt "@[proc %s(%a) = {@,  @[%a@]@,}@]@\n@\n"
      name
      (Stdlib.Format.pp_print_list
         ~pp_sep:(fun fmt () -> Stdlib.Format.fprintf fmt ", ")
         pp_param)
      params doit_stmt body
  and doit_concrete_ident (fmt : Formatter.t) (p : Concrete_ident.t) =
    Stdlib.Format.fprintf fmt "%s" (RenderId.render p).name
  and doit_type (fmt : Formatter.t) (typ : ty) =
    match typ with
    | TBool -> assert false
    | TChar -> assert false
    | TInt kind -> Stdlib.Format.fprintf fmt "%s.t" (intmodule_of_kind kind)
    | TFloat _ -> assert false
    | TStr -> assert false
    | TApp { ident = `Concrete ident; args = [] } ->
        doit_concrete_ident fmt ident
    | TApp { ident = `Concrete ident; args } ->
        Stdlib.Format.fprintf fmt "(%a) %a"
          (Stdlib.Format.pp_print_list
             ~pp_sep:(fun fmt () -> Stdlib.Format.fprintf fmt ", ")
             doit_type_arg)
          args doit_concrete_ident ident
    | TApp _ -> assert false
    | TArray _ -> assert false
    | TParam _ -> assert false
    | TArrow (_, _) -> assert false
    | TAssociatedType _ -> assert false
    | TOpaque _ -> assert false
    | _ -> .
  and doit_type_arg (fmt : Formatter.t) (tyarg : generic_value) =
    match tyarg with GType ty -> doit_type fmt ty | _ -> assert false
  and doit_stmt (fmt : Formatter.t) (expr : expr) =
    let foo () =
      Stdlib.Format.eprintf "%a@.@." pp_expr expr;
      assert false
    in

    match expr.e with
    | If { cond; then_; else_ = None } ->
        Stdlib.Format.fprintf fmt "@[if (%a) {@,  @[%a@]@,}@]" doit_expr
          cond doit_stmt then_
    | If _ -> assert false
    | Let
        {
          lhs =
            {
              p =
                PBinding
                  {
                    mut = _;
                    mode = ByValue;
                    var = { name; _ };
                    subpat = None;
                    _;
                  };
              _;
            };
          rhs;
          body;
          monadic = None;
        } ->
        Stdlib.Format.fprintf fmt "%s <- %a;@," name doit_expr rhs;
        Stdlib.Format.fprintf fmt "%a" doit_stmt body
    | Let
        {
          lhs = { p = PWild; typ = TApp { ident = `TupleType 0; args = [] }; _ };
          rhs;
          body;
          monadic = None;
        } ->
        Stdlib.Format.fprintf fmt "%a@," doit_stmt rhs;
        Stdlib.Format.fprintf fmt "%a" doit_stmt body
    | Let _ -> foo ()
    | Assign { lhs; e; _ } ->
        Stdlib.Format.fprintf fmt "%a <- %a;" doit_lhs lhs doit_expr e
    | Match _ -> foo ()
    | Loop
        {
          body;
          kind = ForIndexLoop { start; end_; var = { name; _ }; _ };
          state = None;
          _;
        } ->
        let _ = match start.typ with TInt kind -> kind | _ -> assert false in

        Stdlib.Format.fprintf fmt "%s <- %a;@," name doit_expr start;
        Stdlib.Format.fprintf fmt "@[while (%s < %a) {@,  @[%a%t@]@,}@]"
          name doit_expr end_ doit_stmt body (fun fmt ->
            Stdlib.Format.fprintf fmt "%s <- %s + 1;@," name name)
    | Loop _ -> foo ()
    | MacroInvokation _ -> foo ()
    | GlobalVar (`TupleCons 0) -> ()
    | Ascription _ | Array _ | Closure _ -> assert false
    | App _ | Literal _ | Construct _ | LocalVar _ | GlobalVar _ ->
        Stdlib.Format.fprintf fmt "return %a;" doit_expr expr
    | _ -> .
  and doit_lhs (fmt : Formatter.t) (lhs : lhs) =
    match lhs with
    | LhsFieldAccessor _ | LhsVecRef _ | LhsArrayAccessor { e = LhsVecRef _; _ }
      ->
        assert false
    | LhsArrayAccessor
        { e = LhsLocalVar { var = { name; _ }; _ }; index; typ = _; _ } ->
        Stdlib.Format.fprintf fmt "%s.[%a]" name doit_expr index
    | LhsLocalVar { var = { name; _ }; _ } ->
        Stdlib.Format.fprintf fmt "%s" name
    | _ -> .
  and doit_expr (fmt : Formatter.t) (expr : expr) =
    match expr.e with
    | If _ -> assert false
    | App { f = { e = GlobalVar ident; _ }; args = [ a; i ]; _ }
      when Ast.Global_ident.eq_name Core__ops__index__Index__index ident ->
        Stdlib.Format.fprintf fmt "(%a).[%a]" doit_expr a doit_expr i
    | App { f = { e = GlobalVar (`Concrete op); _ }; args = [ e1; e2 ]; _ }
      when Concrete_ident.(
             eq_name Core__ops__bit__BitXor__bitxor op
             || eq_name Core__ops__bit__BitAnd__bitand op
             || eq_name Core__ops__bit__BitOr__bitor op
             || eq_name Core__ops__arith__Add__add op
             || eq_name Core__ops__arith__Mul__mul op
             || eq_name Core__cmp__PartialEq__ne op
             || eq_name Core__cmp__PartialEq__eq op) ->
        Stdlib.Format.fprintf fmt "(%a) %s (%a)" doit_expr e1
          (match (RenderId.render op).name with
          | "bitxor" -> "^"
          | "bitand" -> "&"
          | "bitor" -> "|"
          | "add" -> "+"
          | "mul" -> "*"
          | "eq" -> "="
          | "ne" -> "<>"
          | _ -> assert false)
          doit_expr e2
    | App { f = { e = GlobalVar (`Concrete ident); _ }; args = []; _ } ->
        Stdlib.Format.fprintf fmt "%a" doit_concrete_ident ident
    | App { f = { e = GlobalVar (`Concrete ident); _ }; args; _ } ->
        Stdlib.Format.fprintf fmt "%a %a" doit_concrete_ident ident
          (Stdlib.Format.pp_print_list
             ~pp_sep:(fun fmt () -> Stdlib.Format.fprintf fmt " ")
             (fun fmt e -> Stdlib.Format.fprintf fmt "(%a)" doit_expr e))
          args
    | App _ ->
        Stdlib.Format.eprintf "%a@.@." pp_expr expr;
        assert false
    | Literal (Int { value; kind; _ }) ->
        Stdlib.Format.fprintf fmt "%s.ofint %a" (intmodule_of_kind kind)
          String.pp value
    | Literal _ -> assert false
    | Array _ -> assert false
    | Construct
        {
          constructor = `Concrete ident;
          is_record = false;
          is_struct = false;
          base = None;
          fields = _;
        } ->
        Stdlib.Format.eprintf "%a." doit_concrete_ident ident
    | Construct _ -> assert false
    | Match _ -> assert false
    | Let _ -> assert false
    | LocalVar { name; _ } -> Stdlib.Format.fprintf fmt "%s" name
    | GlobalVar _ -> assert false
    | Ascription _ -> assert false
    | MacroInvokation _ -> assert false
    | Assign _ -> assert false
    | Loop _ -> assert false
    (* | ForLoop _ -> assert false *)
    | Closure _ -> assert false
    | _ -> .
  in

  doit Stdlib.Format.err_formatter items;
  []

let translate _ (bo : BackendOptions.t) ~(bundles : AST.item list list)
    (items : AST.item list) : Types.file list =
  try translate' bo items
  with Assert_failure (file, line, col) ->
    Diagnostics.failure ~context:(Backend FStar) ~span:(Span.dummy ())
      (AssertionFailure
         {
           details =
             "Assertion failed in " ^ file ^ ":" ^ Int.to_string line ^ ":"
             ^ Int.to_string col;
         })

open Phase_utils

module TransformToInputLanguage =
  [%functor_application
  Phases.Reject.RawOrMutPointer Features.Rust |> Phases.Reject.Unsafe
  |> Phases.And_mut_defsite |> Phases.Reconstruct_asserts
  |> Phases.Reconstruct_for_loops |> Phases.Direct_and_mut |> Phases.Drop_blocks
  |> Phases.Reject.Continue |> Phases.Drop_references |> Phases.Bundle_cycles
  |> Phases.Sort_items_namespace_wise |> RejectNotEC]

let apply_phases (_bo : BackendOptions.t) (items : Ast.Rust.item list) :
    AST.item list =
  TransformToInputLanguage.ditems items


================================================
FILE: engine/backends/easycrypt/easycrypt_backend.mli
================================================
open Hax_engine.Backend
include T with module BackendOptions = UnitBackendOptions


================================================
FILE: engine/backends/fstar/dune
================================================
(library
 (name fstar_backend)
 (package hax-engine)
 (wrapped false)
 (libraries hax_engine base fstar_surface_ast hacspeclib_macro_parser)
 (preprocess
  (pps
   ppx_yojson_conv
   ppx_sexp_conv
   ppx_compare
   ppx_hash
   ppx_deriving.show
   ppx_deriving.eq
   ppx_inline
   ppx_functor_application
   ppx_matches)))

(env
 (_
  (flags
   (:standard -w -A))))


================================================
FILE: engine/backends/fstar/fstar-surface-ast/.gitignore
================================================
_build
result


================================================
FILE: engine/backends/fstar/fstar-surface-ast/.ocamlformat-ignore
================================================
*


================================================
FILE: engine/backends/fstar/fstar-surface-ast/FStar_BaseTypes.ml
================================================
type char   = FStar_Char.char[@@deriving yojson,show]
type float  = Base.Float.t
type double = Base.Float.t
type byte   = Base.Int.t
type int8   = Stdint.Int8.t
type uint8  = Stdint.Uint8.t
type int16   = Stdint.Int16.t
type uint16  = Stdint.Uint16.t
type int32  = Stdint.Int32.t
type int64  = Stdint.Int64.t


================================================
FILE: engine/backends/fstar/fstar-surface-ast/FStar_Char.ml
================================================
module UChar = BatUChar
type char = int[@@deriving yojson,show]


================================================
FILE: engine/backends/fstar/fstar-surface-ast/FStar_Compiler_Effect.ml
================================================
let op_Bar_Greater (x : 'a) (f : ('a -> 'b)) : 'b = f x
let op_Less_Bar  (f : ('a -> 'b)) (x : 'a) : 'b = f x

type 'a ref' = 'a ref[@@deriving yojson,show]
type 'a ref = 'a ref'[@@deriving yojson,show]

let op_Bang (r:'a ref) = !r
let op_Colon_Equals x y = x := y
let alloc x = ref x
let raise = raise
let exit i = exit (Z.to_int i)
let try_with f1 f2 = try f1 () with | e -> f2 e
exception Failure = Failure
let failwith x = raise (Failure x)


================================================
FILE: engine/backends/fstar/fstar-surface-ast/FStar_Compiler_List.ml
================================================
(* We give an implementation here using OCaml's BatList,
   which provides tail-recursive versions of most functions *)
include FStar_List


================================================
FILE: engine/backends/fstar/fstar-surface-ast/FStar_Compiler_Range.ml
================================================
open Prims
type file_name = Prims.string[@@deriving yojson,show]
type pos = {
  line: Prims.int ;
  col: Prims.int }[@@deriving yojson,show,yojson,show]
let (__proj__Mkpos__item__line : pos -> Prims.int) =
  fun projectee -> match projectee with | { line; col;_} -> line
let (__proj__Mkpos__item__col : pos -> Prims.int) =
  fun projectee -> match projectee with | { line; col;_} -> col
let (max : Prims.int -> Prims.int -> Prims.int) =
  fun i -> fun j -> if i < j then j else i
let (pos_geq : pos -> pos -> Prims.bool) =
  fun p1 ->
    fun p2 ->
      (p1.line > p2.line) || ((p1.line = p2.line) && (p1.col >= p2.col))
type rng = {
  file_name: file_name ;
  start_pos: pos ;
  end_pos: pos }[@@deriving yojson,show,yojson,show]
let (__proj__Mkrng__item__file_name : rng -> file_name) =
  fun projectee ->
    match projectee with
    | { file_name = file_name1; start_pos; end_pos;_} -> file_name1
let (__proj__Mkrng__item__start_pos : rng -> pos) =
  fun projectee ->
    match projectee with
    | { file_name = file_name1; start_pos; end_pos;_} -> start_pos
let (__proj__Mkrng__item__end_pos : rng -> pos) =
  fun projectee ->
    match projectee with
    | { file_name = file_name1; start_pos; end_pos;_} -> end_pos
type range = {
  def_range: rng ;
  use_range: rng }[@@deriving yojson,show,yojson,show]
let (__proj__Mkrange__item__def_range : range -> rng) =
  fun projectee ->
    match projectee with | { def_range; use_range;_} -> def_range
let (__proj__Mkrange__item__use_range : range -> rng) =
  fun projectee ->
    match projectee with | { def_range; use_range;_} -> use_range
let (dummy_pos : pos) = { line = Prims.int_zero; col = Prims.int_zero }
let (dummy_rng : rng) =
  { file_name = " dummy"; start_pos = dummy_pos; end_pos = dummy_pos }
let (dummyRange : range) = { def_range = dummy_rng; use_range = dummy_rng }
let (use_range : range -> rng) = fun r -> r.use_range
let (def_range : range -> rng) = fun r -> r.def_range
let (range_of_rng : rng -> rng -> range) =
  fun d -> fun u -> { def_range = d; use_range = u }
let (set_use_range : range -> rng -> range) =
  fun r2 ->
    fun use_rng ->
      if use_rng <> dummy_rng
      then { def_range = (r2.def_range); use_range = use_rng }
      else r2
let (set_def_range : range -> rng -> range) =
  fun r2 ->
    fun def_rng ->
      if def_rng <> dummy_rng
      then { def_range = def_rng; use_range = (r2.use_range) }
      else r2
let (mk_pos : Prims.int -> Prims.int -> pos) =
  fun l ->
    fun c -> { line = (max Prims.int_zero l); col = (max Prims.int_zero c) }
let (mk_rng : file_name -> pos -> pos -> rng) =
  fun file_name1 ->
    fun start_pos ->
      fun end_pos -> { file_name = file_name1; start_pos; end_pos }
let (mk_range : Prims.string -> pos -> pos -> range) =
  fun f -> fun b -> fun e -> let r = mk_rng f b e in range_of_rng r r
let (union_rng : rng -> rng -> rng) =
  fun r1 ->
    fun r2 ->
      if r1.file_name <> r2.file_name
      then r2
      else
        (let start_pos =
           if pos_geq r1.start_pos r2.start_pos
           then r2.start_pos
           else r1.start_pos in
         let end_pos =
           if pos_geq r1.end_pos r2.end_pos then r1.end_pos else r2.end_pos in
         mk_rng r1.file_name start_pos end_pos)
let (union_ranges : range -> range -> range) =
  fun r1 ->
    fun r2 ->
      let uu___ = union_rng r1.def_range r2.def_range in
      let uu___1 = union_rng r1.use_range r2.use_range in
      { def_range = uu___; use_range = uu___1 }
let (rng_included : rng -> rng -> Prims.bool) =
  fun r1 ->
    fun r2 ->
      if r1.file_name <> r2.file_name
      then false
      else
        (pos_geq r1.start_pos r2.start_pos) &&
          (pos_geq r2.end_pos r1.end_pos)
let (string_of_pos : pos -> Prims.string) =
  fun pos1 ->
    let uu___ = FStar_Compiler_Util.string_of_int pos1.line in
    let uu___1 = FStar_Compiler_Util.string_of_int pos1.col in
    FStar_Compiler_Util.format2 "%s,%s" uu___ uu___1
let (string_of_file_name : Prims.string -> Prims.string) =
  fun f ->
    f
let (file_of_range : range -> Prims.string) =
  fun r -> let f = (r.def_range).file_name in string_of_file_name f
let (set_file_of_range : range -> Prims.string -> range) =
  fun r ->
    fun f ->
      {
        def_range =
          (let uu___ = r.def_range in
           {
             file_name = f;
             start_pos = (uu___.start_pos);
             end_pos = (uu___.end_pos)
           });
        use_range = (r.use_range)
      }
let (string_of_rng : rng -> Prims.string) =
  fun r ->
    let uu___ = string_of_file_name r.file_name in
    let uu___1 = string_of_pos r.start_pos in
    let uu___2 = string_of_pos r.end_pos in
    FStar_Compiler_Util.format3 "%s(%s-%s)" uu___ uu___1 uu___2
let (string_of_def_range : range -> Prims.string) =
  fun r -> string_of_rng r.def_range
let (string_of_use_range : range -> Prims.string) =
  fun r -> string_of_rng r.use_range
let (string_of_range : range -> Prims.string) =
  fun r -> string_of_def_range r
let (start_of_range : range -> pos) = fun r -> (r.def_range).start_pos
let (end_of_range : range -> pos) = fun r -> (r.def_range).end_pos
let (file_of_use_range : range -> Prims.string) =
  fun r -> (r.use_range).file_name
let (start_of_use_range : range -> pos) = fun r -> (r.use_range).start_pos
let (end_of_use_range : range -> pos) = fun r -> (r.use_range).end_pos
let (line_of_pos : pos -> Prims.int) = fun p -> p.line
let (col_of_pos : pos -> Prims.int) = fun p -> p.col
let (end_range : range -> range) =
  fun r ->
    mk_range (r.def_range).file_name (r.def_range).end_pos
      (r.def_range).end_pos
let (compare_rng : rng -> rng -> Prims.int) =
  fun r1 ->
    fun r2 ->
      let fcomp = FStar_String.compare r1.file_name r2.file_name in
      if fcomp = Prims.int_zero
      then
        let start1 = r1.start_pos in
        let start2 = r2.start_pos in
        let lcomp = start1.line - start2.line in
        (if lcomp = Prims.int_zero then start1.col - start2.col else lcomp)
      else fcomp
let (compare : range -> range -> Prims.int) =
  fun r1 -> fun r2 -> compare_rng r1.def_range r2.def_range
let (compare_use_range : range -> range -> Prims.int) =
  fun r1 -> fun r2 -> compare_rng r1.use_range r2.use_range
let (range_before_pos : range -> pos -> Prims.bool) =
  fun m1 -> fun p -> let uu___ = end_of_range m1 in pos_geq p uu___
let (end_of_line : pos -> pos) =
  fun p -> { line = (p.line); col = FStar_Compiler_Util.max_int }
let (extend_to_end_of_line : range -> range) =
  fun r ->
    let uu___ = file_of_range r in
    let uu___1 = start_of_range r in
    let uu___2 = let uu___3 = end_of_range r in end_of_line uu___3 in
    mk_range uu___ uu___1 uu___2


================================================
FILE: engine/backends/fstar/fstar-surface-ast/FStar_Compiler_Util.ml
================================================
let ensure_decimal s = Z.to_string (Z.of_string s)


let max_int = Z.of_int max_int
let is_letter c = if c > 255 then false else BatChar.is_letter (BatChar.chr c)
let is_digit  c = if c > 255 then false else BatChar.is_digit  (BatChar.chr c)
let is_letter_or_digit c = is_letter c || is_digit c
let is_symbol c = if c > 255 then false else BatChar.is_symbol (BatChar.chr c)

(* Modeled after: Char.IsPunctuation in .NET
   (http://www.dotnetperls.com/char-ispunctuation)
*)
let is_punctuation c = List.mem c [33; 34; 35; 37; 38; 39; 40; 41; 42; 44; 45; 46; 47; 58; 59; 63; 64; 91; 92; 93; 95; 123; 125]
(*'!','"','#','%','&','\'','(',')','*',',','-','.','/',':',';','?','@','[','\\',']','_','{','}'*)

let return_all x = x

let get_file_last_modification_time f = (BatUnix.stat f).BatUnix.st_mtime
let is_before t1 t2 = compare t1 t2 < 0
let string_of_time = string_of_float

exception Impos

let cur_sigint_handler : Sys.signal_behavior ref =
  ref Sys.Signal_default

exception SigInt
type sigint_handler = Sys.signal_behavior

let sigint_ignore: sigint_handler =
  Sys.Signal_ignore

let sigint_delay = ref 0
let sigint_pending = ref false

let raise_sigint _ =
  sigint_pending := false;
  raise SigInt

let raise_sigint_maybe_delay _ =
  (* This function should not do anything complicated, lest it cause deadlocks.
   * Calling print_string, for example, can cause a deadlock (print_string →
   * caml_flush → process_pending_signals → caml_execute_signal → raise_sigint →
   * print_string → caml_io_mutex_lock ⇒ deadlock) *)
  if !sigint_delay = 0
  then raise_sigint ()
  else sigint_pending := true

let sigint_raise: sigint_handler =
  Sys.Signal_handle raise_sigint_maybe_delay

let set_sigint_handler sigint_handler =
  cur_sigint_handler := sigint_handler;
  Sys.set_signal Sys.sigint !cur_sigint_handler

let with_sigint_handler handler f =
  let original_handler = !cur_sigint_handler in
  BatPervasives.finally
    (fun () -> Sys.set_signal Sys.sigint original_handler)
    (fun () -> set_sigint_handler handler; f ())
    ()

let get_file_extension (fn:string) : string = snd (BatString.rsplit fn ".")
let is_path_absolute path_str =
  let open Batteries.Incubator in
  let open BatPathGen.OfString in
  let path_str' = of_string path_str in
  is_absolute path_str'
let join_paths path_str0 path_str1 =
  let open Batteries.Incubator in
  let open BatPathGen.OfString in
  let open BatPathGen.OfString.Operators in
  to_string ((of_string path_str0) //@ (of_string path_str1))

let normalize_file_path (path_str:string) =
  let open Batteries.Incubator in
  let open BatPathGen.OfString in
  let open BatPathGen.OfString.Operators in
  to_string
    (normalize_in_tree
       (let path = of_string path_str in
         if is_absolute path then
           path
         else
           let pwd = of_string (BatSys.getcwd ()) in
           pwd //@ path))

type stream_reader = BatIO.input
let open_stdin () = BatIO.stdin
let read_line s =
  try
    Some (BatIO.read_line s)
  with
    _ -> None
let nread (s:stream_reader) (n:Z.t) =
  try
    Some (BatIO.nread s (Z.to_int n))
  with
    _ -> None

let poll_stdin (f:float) =
    try 
      let ready_fds, _, _ = Unix.select [Unix.stdin] [] [] f in
      match ready_fds with
      | [] -> false
      | _ -> true
    with
    | _ -> false

type string_builder = BatBuffer.t
let new_string_builder () = BatBuffer.create 256
let clear_string_builder b = BatBuffer.clear b
let string_of_string_builder b = BatBuffer.contents b
let string_builder_append b s = BatBuffer.add_string b s

let message_of_exn (e:exn) = Printexc.to_string e
let trace_of_exn (e:exn) = Printexc.get_backtrace ()

type 'a set = ('a list) * ('a -> 'a -> bool)
[@@deriving show]
let set_to_yojson _ _ = `Null
let set_of_yojson _ _ = failwith "cannot readback"

let set_is_empty ((s, _):'a set) =
  match s with
  | [] -> true
  | _ -> false

let as_set (l:'a list) (cmp:('a -> 'a -> Z.t)) = (l, fun x y -> cmp x y = Z.zero)
let new_set (cmp:'a -> 'a -> Z.t) : 'a set = as_set [] cmp

let set_elements ((s1, eq):'a set) : 'a list =
  let rec aux out = function
    | [] -> BatList.rev_append out []
    | hd::tl ->
       if BatList.exists (eq hd) out then
         aux out tl
       else
         aux (hd::out) tl in
  aux [] s1

let set_add a ((s, b):'a set) = (s@[a], b)
let set_remove x ((s1, eq):'a set) =
  (BatList.filter (fun y -> not (eq x y)) s1, eq)
let set_mem a ((s, b):'a set) = BatList.exists (b a) s
let set_union ((s1, b):'a set) ((s2, _):'a set) = (s1@s2, b)
let set_intersect ((s1, eq):'a set) ((s2, _):'a set) =
  (BatList.filter (fun y -> BatList.exists (eq y) s2) s1, eq)
let set_is_subset_of ((s1, eq):'a set) ((s2, _):'a set) =
  BatList.for_all (fun y -> BatList.exists (eq y) s2) s1
let set_count ((s1, _):'a set) = Z.of_int (BatList.length s1)
let set_difference ((s1, eq):'a set) ((s2, _):'a set) : 'a set =
  (BatList.filter (fun y -> not (BatList.exists (eq y) s2)) s1, eq)
let set_symmetric_difference ((s1, eq):'a set) ((s2, _):'a set) : 'a set =
  set_union (set_difference (s1, eq) (s2, eq))
            (set_difference (s2, eq) (s1, eq))
let set_eq ((s1, eq):'a set) ((s2, _):'a set) : bool =
  set_is_empty (set_symmetric_difference (s1, eq) (s2, eq))

(* module StringOps = *)
(*   struct *)
(*     type t = string *)
(*     let equal (x:t) (y:t) = x=y *)
(*     let compare (x:t) (y:t) = BatString.compare x y *)
(*     let hash (x:t) = BatHashtbl.hash x *)
(*   end *)

(* module StringHashtbl = BatHashtbl.Make(StringOps) *)
(* module StringMap = BatMap.Make(StringOps) *)

(* type 'value smap = 'value StringHashtbl.t *)
(* let smap_create (i:Z.t) : 'value smap = StringHashtbl.create (Z.to_int i) *)
(* let smap_clear (s:('value smap)) = StringHashtbl.clear s *)
(* let smap_add (m:'value smap) k (v:'value) = StringHashtbl.replace m k v *)
(* let smap_of_list (l: (string * 'value) list) = *)
(*   let s = StringHashtbl.create (BatList.length l) in *)
(*   FStar_List.iter (fun (x,y) -> smap_add s x y) l; *)
(*   s *)
(* let smap_try_find (m:'value smap) k = StringHashtbl.find_option m k *)
(* let smap_fold (m:'value smap) f a = StringHashtbl.fold f m a *)
(* let smap_remove (m:'value smap) k = StringHashtbl.remove m k *)
(* let smap_keys (m:'value smap) = smap_fold m (fun k _ acc -> k::acc) [] *)
(* let smap_copy (m:'value smap) = StringHashtbl.copy m *)
(* let smap_size (m:'value smap) = StringHashtbl.length m *)
(* let smap_iter (m:'value smap) f = StringHashtbl.iter f m *)

(* exception PSMap_Found *)
(* type 'value psmap = 'value StringMap.t *)
(* let psmap_empty (_: unit) : 'value psmap = StringMap.empty *)
(* let psmap_add (map: 'value psmap) (key: string) (value: 'value) = StringMap.add key value map *)
(* let psmap_find_default (map: 'value psmap) (key: string) (dflt: 'value) = *)
(*   StringMap.find_default dflt key map *)
(* let psmap_try_find (map: 'value psmap) (key: string) = *)
(*   StringMap.Exceptionless.find key map *)
(* let psmap_fold (m:'value psmap) f a = StringMap.fold f m a *)
(* let psmap_find_map (m:'value psmap) f = *)
(*   let res = ref None in *)
(*   let upd k v = *)
(*     let r = f k v in *)
(*     if r <> None then (res := r; raise PSMap_Found) in *)
(*   (try StringMap.iter upd m with PSMap_Found -> ()); *)
(*   !res *)
(* let psmap_modify (m: 'value psmap) (k: string) (upd: 'value option -> 'value) = *)
(*   StringMap.modify_opt k (fun vopt -> Some (upd vopt)) m *)

(* let psmap_merge (m1: 'value psmap) (m2: 'value psmap) : 'value psmap = *)
(*   psmap_fold m1 (fun k v m -> psmap_add m k v) m2 *)

(* module ZHashtbl = BatHashtbl.Make(Z) *)
(* module ZMap = BatMap.Make(Z) *)

(* type 'value imap = 'value ZHashtbl.t *)
(* let imap_create (i:Z.t) : 'value imap = ZHashtbl.create (Z.to_int i) *)
(* let imap_clear (s:('value imap)) = ZHashtbl.clear s *)
(* let imap_add (m:'value imap) k (v:'value) = ZHashtbl.replace m k v *)
(* let imap_of_list (l: (Z.t * 'value) list) = *)
(*   let s = ZHashtbl.create (BatList.length l) in *)
(*   FStar_List.iter (fun (x,y) -> imap_add s x y) l; *)
(*   s *)
(* let imap_try_find (m:'value imap) k = ZHashtbl.find_option m k *)
(* let imap_fold (m:'value imap) f a = ZHashtbl.fold f m a *)
(* let imap_remove (m:'value imap) k = ZHashtbl.remove m k *)
(* let imap_keys (m:'value imap) = imap_fold m (fun k _ acc -> k::acc) [] *)
(* let imap_copy (m:'value imap) = ZHashtbl.copy m *)

(* type 'value pimap = 'value ZMap.t *)
(* let pimap_empty (_: unit) : 'value pimap = ZMap.empty *)
(* let pimap_add (map: 'value pimap) (key: Z.t) (value: 'value) = ZMap.add key value map *)
(* let pimap_find_default (map: 'value pimap) (key: Z.t) (dflt: 'value) = *)
(*   ZMap.find_default dflt key map *)
(* let pimap_try_find (map: 'value pimap) (key: Z.t) = *)
(*   ZMap.Exceptionless.find key map *)
(* let pimap_fold (m:'value pimap) f a = ZMap.fold f m a *)

(* restore pre-2.11 BatString.nsplit behavior,
   see https://github.com/ocaml-batteries-team/batteries-included/issues/845 *)
let batstring_nsplit s t =
  if s = "" then [] else BatString.split_on_string t s

let format (fmt:string) (args:string list) =
  let frags = batstring_nsplit fmt "%s" in
  if BatList.length frags <> BatList.length args + 1 then
    failwith ("Not enough arguments to format string " ^fmt^ " : expected " ^ (Stdlib.string_of_int (BatList.length frags)) ^ " got [" ^ (BatString.concat ", " args) ^ "] frags are [" ^ (BatString.concat ", " frags) ^ "]")
  else
    let args = args@[""] in
    BatList.fold_left2 (fun out frag arg -> out ^ frag ^ arg) "" frags args

let format1 f a = format f [a]
let format2 f a b = format f [a;b]
let format3 f a b c = format f [a;b;c]
let format4 f a b c d = format f [a;b;c;d]
let format5 f a b c d e = format f [a;b;c;d;e]
let format6 f a b c d e g = format f [a;b;c;d;e;g]

let flush_stdout () = flush stdout

let stdout_isatty () = Some (Unix.isatty Unix.stdout)

let colorize s colors =
  match colors with
  | (c1,c2) ->
     match stdout_isatty () with
     | Some true -> format3 "%s%s%s" c1 s c2
     | _ -> s

let colorize_bold s =
  match stdout_isatty () with
  | Some true -> format3 "%s%s%s" "\x1b[39;1m" s "\x1b[0m"
  | _ -> s

let colorize_red s =
  match stdout_isatty () with
  | Some true -> format3 "%s%s%s" "\x1b[31;1m" s "\x1b[0m"
  | _ -> s

let colorize_cyan s =
  match stdout_isatty () with
  | Some true -> format3 "%s%s%s" "\x1b[36;1m" s "\x1b[0m"
  | _ -> s

let pr  = Printf.printf
let spr = Printf.sprintf
let fpr = Printf.fprintf

type json =
| JsonNull
| JsonBool of bool
| JsonInt of Z.t
| JsonStr of string
| JsonList of json list
| JsonAssoc of (string * json) list

type printer = {
  printer_prinfo: string -> unit;
  printer_prwarning: string -> unit;
  printer_prerror: string -> unit;
  printer_prgeneric: string -> (unit -> string) -> (unit -> json) -> unit
}

let default_printer =
  { printer_prinfo = (fun s -> pr "%s" s; flush stdout);
    printer_prwarning = (fun s -> fpr stderr "%s" (colorize_cyan s); flush stdout; flush stderr);
    printer_prerror = (fun s -> fpr stderr "%s" (colorize_red s); flush stdout; flush stderr);
    printer_prgeneric = fun label get_string get_json -> pr "%s: %s" label (get_string ())}

let current_printer = ref default_printer
let set_printer printer = current_printer := printer

let print_raw s = set_binary_mode_out stdout true; pr "%s" s; flush stdout
let print_string s = (!current_printer).printer_prinfo s
let print_generic label to_string to_json a = (!current_printer).printer_prgeneric label (fun () -> to_string a) (fun () -> to_json a)
let print_any s = (!current_printer).printer_prinfo (Marshal.to_string s [])
let strcat s1 s2 = s1 ^ s2
let concat_l sep (l:string list) = BatString.concat sep l

let string_of_unicode (bytes:int array) =
  BatArray.fold_left (fun acc b -> acc^(BatUTF8.init 1 (fun _ -> BatUChar.of_int b))) "" bytes
let unicode_of_string (string:string) =
  let n = BatUTF8.length string in
  let t = Array.make n 0 in
  let i = ref 0 in
  BatUTF8.iter (fun c -> t.(!i) <- BatUChar.code c; incr i) string;
  t
let base64_encode s = BatBase64.str_encode s
let base64_decode s = BatBase64.str_decode s
let char_of_int i = Z.to_int i
let int_of_string = Z.of_string
let safe_int_of_string x = try Some (int_of_string x) with Invalid_argument _ -> None
let int_of_char x = Z.of_int x
let int_of_byte x = x
let int_of_uint8 x = Z.of_int (Char.code x)
let uint16_of_int i = Z.to_int i
let byte_of_char c = c

let float_of_string s = float_of_string s
let float_of_byte b = float_of_int (Char.code b)
let float_of_int32 = float_of_int
let float_of_int64 = BatInt64.to_float

let int_of_int32 i = i
let int32_of_int i = BatInt32.of_int i

let string_of_int = Z.to_string
let string_of_bool = string_of_bool
let string_of_int32 = BatInt32.to_string
let string_of_int64 = BatInt64.to_string
let string_of_float = string_of_float
let string_of_char i = BatUTF8.init 1 (fun _ -> BatUChar.chr i)
let hex_string_of_byte (i:int) =
  let hs = spr "%x" i in
  if (String.length hs = 1) then "0" ^ hs
  else hs
let string_of_bytes = string_of_unicode
let bytes_of_string = unicode_of_string
let starts_with = BatString.starts_with
let trim_string = BatString.trim
let ends_with = BatString.ends_with
let char_at s index = BatUChar.code (BatUTF8.get s (Z.to_int index))
let is_upper c = 65 <= c && c <= 90
let contains (s1:string) (s2:string) = BatString.exists s1 s2
let substring_from s index = BatString.tail s (Z.to_int index)
let substring s i j = BatString.sub s (Z.to_int i) (Z.to_int j)
let replace_char (s:string) c1 c2 =
  let c1, c2 = BatUChar.chr c1, BatUChar.chr c2 in
  BatUTF8.map (fun x -> if x = c1 then c2 else x) s
let replace_chars (s:string) c (by:string) =
  BatString.replace_chars (fun x -> if x = Char.chr c then by else BatString.of_char x) s
(* let hashcode s = Z.of_int (StringOps.hash s) *)
let compare s1 s2 = Z.of_int (BatString.compare s1 s2)
let split s sep = BatString.split_on_string sep s
let splitlines s = split s "\n"

let iof = int_of_float
let foi = float_of_int

let print1 a b = print_string (format1 a b)
let print2 a b c = print_string (format2 a b c)
let print3 a b c d = print_string (format3 a b c d)
let print4 a b c d e = print_string (format4 a b c d e)
let print5 a b c d e f = print_string (format5 a b c d e f)
let print6 a b c d e f g = print_string (format6 a b c d e f g)
let print fmt args = print_string (format fmt args)

let print_error s = (!current_printer).printer_prerror s
let print1_error a b = print_error (format1 a b)
let print2_error a b c = print_error (format2 a b c)
let print3_error a b c d = print_error (format3 a b c d)

let print_warning s = (!current_printer).printer_prwarning s
let print1_warning a b = print_warning (format1 a b)
let print2_warning a b c = print_warning (format2 a b c)
let print3_warning a b c d = print_warning (format3 a b c d)

let stderr = stderr
let stdout = stdout

let fprint oc fmt args = Printf.fprintf oc "%s" (format fmt args)

[@@deriving yojson,show]

let is_left = function
  | FStar_Pervasives.Inl _ -> true
  | _ -> false

let is_right = function
  | FStar_Pervasives.Inr _ -> true
  | _ -> false

let left = function
  | FStar_Pervasives.Inl x -> x
  | _ -> failwith "Not in left"
let right = function
  | FStar_Pervasives.Inr x -> x
  | _ -> failwith "Not in right"

let (-<-) f g x = f (g x)

let find_dup f l =
  let rec aux = function
    | hd::tl ->
       let hds, tl' = BatList.partition (f hd) tl in
       (match hds with
        | [] -> aux tl'
        | _ -> Some hd)
    | _ -> None in
  aux l

let nodups f l = match find_dup f l with | None -> true | _ -> false

let remove_dups f l =
  let rec aux out = function
    | hd::tl -> let _, tl' = BatList.partition (f hd) tl in aux (hd::out) tl'
    | _ -> out in
  aux [] l

let must = function
  | Some x -> x
  | None -> failwith "Empty option"

let dflt x = function
  | None   -> x
  | Some x -> x

let bind_opt opt f =
  match opt with
  | None -> None
  | Some x -> f x

let map_opt opt f =
  match opt with
  | None -> None
  | Some x -> Some (f x)

let try_find f l = BatList.find_opt f l

let for_all f l = BatList.for_all f l
let for_some f l = BatList.exists f l

let first_N n l =
  let n = Z.to_int n in
  let rec f acc i l =
    if i = n then BatList.rev acc,l else
      match l with
      | h::tl -> f (h::acc) (i+1) tl
      | _     -> failwith "firstN"
  in
  f [] 0 l

let nth_tail n l =
  let rec aux n l =
    if n=0 then l else aux (n - 1) (BatList.tl l)
  in
  aux (Z.to_int n) l

let prefix l =
  match BatList.rev l with
  | hd::tl -> BatList.rev tl, hd
  | _ -> failwith "impossible"

let mk_ref a = ref a


================================================
FILE: engine/backends/fstar/fstar-surface-ast/FStar_Const.ml
================================================
open Prims
type signedness =
  | Unsigned 
  | Signed [@@deriving yojson,show]
type width =
  | Int8 
  | Int16 
  | Int32 
  | Int64 
  | Sizet [@@deriving yojson,show]
type sconst =
  | Const_effect 
  | Const_unit 
  | Const_bool of Prims.bool 
  | Const_int of (Prims.string * (signedness * width)
  FStar_Pervasives_Native.option) 
  | Const_char of FStar_BaseTypes.char 
  | Const_real of Prims.string 
  | Const_string of (Prims.string * FStar_Compiler_Range.range) 
  | Const_range_of 
  | Const_set_range_of 
  | Const_range of FStar_Compiler_Range.range 
  | Const_reify of FStar_Ident.lid FStar_Pervasives_Native.option 
  | Const_reflect of FStar_Ident.lid [@@deriving yojson,show]
let (eq_const : sconst -> sconst -> Prims.bool) =
  fun c1 ->
    fun c2 ->
      match (c1, c2) with
      | (Const_int (s1, o1), Const_int (s2, o2)) ->
          (let uu___ = FStar_Compiler_Util.ensure_decimal s1 in
           let uu___1 = FStar_Compiler_Util.ensure_decimal s2 in
           uu___ = uu___1) && (o1 = o2)
      | (Const_string (a, uu___), Const_string (b, uu___1)) -> a = b
      | (Const_reflect l1, Const_reflect l2) -> FStar_Ident.lid_equals l1 l2
      | (Const_reify uu___, Const_reify uu___1) -> true
      | uu___ -> c1 = c2


================================================
FILE: engine/backends/fstar/fstar-surface-ast/FStar_Errors.ml
================================================
open Prims
exception Invalid_warn_error_setting of Prims.string 
let (uu___is_Invalid_warn_error_setting : Prims.exn -> Prims.bool) =
  fun projectee ->
    match projectee with
    | Invalid_warn_error_setting uu___ -> true
    | uu___ -> false
let (__proj__Invalid_warn_error_setting__item__uu___ :
  Prims.exn -> Prims.string) =
  fun projectee ->
    match projectee with | Invalid_warn_error_setting uu___ -> uu___
let lookup_error :
  'uuuuu 'uuuuu1 'uuuuu2 .
    ('uuuuu * 'uuuuu1 * 'uuuuu2) Prims.list ->
      'uuuuu -> ('uuuuu * 'uuuuu1 * 'uuuuu2)
  =
  fun settings ->
    fun e ->
      let uu___ =
        FStar_Compiler_Util.try_find
          (fun uu___1 -> match uu___1 with | (v, uu___2, i) -> e = v)
          settings in
      match uu___ with
      | FStar_Pervasives_Native.Some i -> i
      | FStar_Pervasives_Native.None ->
          failwith "Impossible: unrecognized error"
let lookup_error_range :
  'uuuuu 'uuuuu1 .
    ('uuuuu * 'uuuuu1 * Prims.int) Prims.list ->
      (Prims.int * Prims.int) -> ('uuuuu * 'uuuuu1 * Prims.int) Prims.list
  =
  fun settings ->
    fun uu___ ->
      match uu___ with
      | (l, h) ->
          let uu___1 =
            FStar_Compiler_List.partition
              (fun uu___2 ->
                 match uu___2 with
                 | (uu___3, uu___4, i) -> (l <= i) && (i <= h)) settings in
          (match uu___1 with | (matches, uu___2) -> matches)
let (error_number : FStar_Errors_Codes.error_setting -> Prims.int) =
  fun uu___ -> match uu___ with | (uu___1, uu___2, i) -> i
let (errno : FStar_Errors_Codes.raw_error -> Prims.int) =
  fun e ->
    let uu___ = lookup_error FStar_Errors_Codes.default_settings e in
    error_number uu___
let (warn_on_use_errno : Prims.int) =
  errno FStar_Errors_Codes.Warning_WarnOnUse
let (defensive_errno : Prims.int) =
  errno FStar_Errors_Codes.Warning_Defensive
let (call_to_erased_errno : Prims.int) =
  errno FStar_Errors_Codes.Error_CallToErased
let (update_flags :
  (FStar_Errors_Codes.error_flag * Prims.string) Prims.list ->
    FStar_Errors_Codes.error_setting Prims.list)
  =
  fun l ->
    let set_one_flag i flag default_flag =
      match (flag, default_flag) with
      | (FStar_Errors_Codes.CWarning, FStar_Errors_Codes.CAlwaysError) ->
          let uu___ =
            let uu___1 =
              let uu___2 = FStar_Compiler_Util.string_of_int i in
              FStar_Compiler_Util.format1 "cannot turn error %s into warning"
                uu___2 in
            Invalid_warn_error_setting uu___1 in
          FStar_Compiler_Effect.raise uu___
      | (FStar_Errors_Codes.CError, FStar_Errors_Codes.CAlwaysError) ->
          let uu___ =
            let uu___1 =
              let uu___2 = FStar_Compiler_Util.string_of_int i in
              FStar_Compiler_Util.format1 "cannot turn error %s into warning"
                uu___2 in
            Invalid_warn_error_setting uu___1 in
          FStar_Compiler_Effect.raise uu___
      | (FStar_Errors_Codes.CSilent, FStar_Errors_Codes.CAlwaysError) ->
          let uu___ =
            let uu___1 =
              let uu___2 = FStar_Compiler_Util.string_of_int i in
              FStar_Compiler_Util.format1 "cannot silence error %s" uu___2 in
            Invalid_warn_error_setting uu___1 in
          FStar_Compiler_Effect.raise uu___
      | (uu___, FStar_Errors_Codes.CFatal) ->
          let uu___1 =
            let uu___2 =
              let uu___3 = FStar_Compiler_Util.string_of_int i in
              FStar_Compiler_Util.format1
                "cannot change the error level of fatal error %s" uu___3 in
            Invalid_warn_error_setting uu___2 in
          FStar_Compiler_Effect.raise uu___1
      | uu___ -> flag in
    let set_flag_for_range uu___ =
      match uu___ with
      | (flag, range) ->
          let errs =
            lookup_error_range FStar_Errors_Codes.default_settings range in
          FStar_Compiler_List.map
            (fun uu___1 ->
               match uu___1 with
               | (v, default_flag, i) ->
                   let uu___2 = set_one_flag i flag default_flag in
                   (v, uu___2, i)) errs in
    let compute_range uu___ =
      match uu___ with
      | (flag, s) ->
          let r = FStar_Compiler_Util.split s ".." in
          let uu___1 =
            match r with
            | r1::r2::[] ->
                let uu___2 = FStar_Compiler_Util.int_of_string r1 in
                let uu___3 = FStar_Compiler_Util.int_of_string r2 in
                (uu___2, uu___3)
            | uu___2 ->
                let uu___3 =
                  let uu___4 =
                    FStar_Compiler_Util.format1
                      "Malformed warn-error range %s" s in
                  Invalid_warn_error_setting uu___4 in
                FStar_Compiler_Effect.raise uu___3 in
          (match uu___1 with | (l1, h) -> (flag, (l1, h))) in
    let error_range_settings = FStar_Compiler_List.map compute_range l in
    let uu___ =
      FStar_Compiler_List.collect set_flag_for_range error_range_settings in
    FStar_Compiler_List.op_At uu___ FStar_Errors_Codes.default_settings
type error =
  (FStar_Errors_Codes.raw_error * Prims.string * FStar_Compiler_Range.range *
    Prims.string Prims.list)
type issue_level =
  | ENotImplemented 
  | EInfo 
  | EWarning 
  | EError 
let (uu___is_ENotImplemented : issue_level -> Prims.bool) =
  fun projectee ->
    match projectee with | ENotImplemented -> true | uu___ -> false
let (uu___is_EInfo : issue_level -> Prims.bool) =
  fun projectee -> match projectee with | EInfo -> true | uu___ -> false
let (uu___is_EWarning : issue_level -> Prims.bool) =
  fun projectee -> match projectee with | EWarning -> true | uu___ -> false
let (uu___is_EError : issue_level -> Prims.bool) =
  fun projectee -> match projectee with | EError -> true | uu___ -> false
type issue =
  {
  issue_msg: Prims.string ;
  issue_level: issue_level ;
  issue_range: FStar_Compiler_Range.range FStar_Pervasives_Native.option ;
  issue_number: Prims.int FStar_Pervasives_Native.option ;
  issue_ctx: Prims.string Prims.list }
let (__proj__Mkissue__item__issue_msg : issue -> Prims.string) =
  fun projectee ->
    match projectee with
    | { issue_msg; issue_level = issue_level1; issue_range; issue_number;
        issue_ctx;_} -> issue_msg
let (__proj__Mkissue__item__issue_level : issue -> issue_level) =
  fun projectee ->
    match projectee with
    | { issue_msg; issue_level = issue_level1; issue_range; issue_number;
        issue_ctx;_} -> issue_level1
let (__proj__Mkissue__item__issue_range :
  issue -> FStar_Compiler_Range.range FStar_Pervasives_Native.option) =
  fun projectee ->
    match projectee with
    | { issue_msg; issue_level = issue_level1; issue_range; issue_number;
        issue_ctx;_} -> issue_range
let (__proj__Mkissue__item__issue_number :
  issue -> Prims.int FStar_Pervasives_Native.option) =
  fun projectee ->
    match projectee with
    | { issue_msg; issue_level = issue_level1; issue_range; issue_number;
        issue_ctx;_} -> issue_number
let (__proj__Mkissue__item__issue_ctx : issue -> Prims.string Prims.list) =
  fun projectee ->
    match projectee with
    | { issue_msg; issue_level = issue_level1; issue_range; issue_number;
        issue_ctx;_} -> issue_ctx
type error_handler =
  {
  eh_add_one: issue -> unit ;
  eh_count_errors: unit -> Prims.int ;
  eh_report: unit -> issue Prims.list ;
  eh_clear: unit -> unit }
let (__proj__Mkerror_handler__item__eh_add_one :
  error_handler -> issue -> unit) =
  fun projectee ->
    match projectee with
    | { eh_add_one; eh_count_errors; eh_report; eh_clear;_} -> eh_add_one
let (__proj__Mkerror_handler__item__eh_count_errors :
  error_handler -> unit -> Prims.int) =
  fun projectee ->
    match projectee with
    | { eh_add_one; eh_count_errors; eh_report; eh_clear;_} ->
        eh_count_errors
let (__proj__Mkerror_handler__item__eh_report :
  error_handler -> unit -> issue Prims.list) =
  fun projectee ->
    match projectee with
    | { eh_add_one; eh_count_errors; eh_report; eh_clear;_} -> eh_report
let (__proj__Mkerror_handler__item__eh_clear : error_handler -> unit -> unit)
  =
  fun projectee ->
    match projectee with
    | { eh_add_one; eh_count_errors; eh_report; eh_clear;_} -> eh_clear
exception Error of error 
let (uu___is_Error : Prims.exn -> Prims.bool) =
  fun projectee ->
    match projectee with | Error uu___ -> true | uu___ -> false
let (__proj__Error__item__uu___ : Prims.exn -> error) =
  fun projectee -> match projectee with | Error uu___ -> uu___
exception Err of (FStar_Errors_Codes.raw_error * Prims.string * Prims.string
  Prims.list) 
let (uu___is_Err : Prims.exn -> Prims.bool) =
  fun projectee -> match projectee with | Err uu___ -> true | uu___ -> false
let (__proj__Err__item__uu___ :
  Prims.exn ->
    (FStar_Errors_Codes.raw_error * Prims.string * Prims.string Prims.list))
  = fun projectee -> match projectee with | Err uu___ -> uu___
exception Warning of error 
let (uu___is_Warning : Prims.exn -> Prims.bool) =
  fun projectee ->
    match projectee with | Warning uu___ -> true | uu___ -> false
let (__proj__Warning__item__uu___ : Prims.exn -> error) =
  fun projectee -> match projectee with | Warning uu___ -> uu___
exception Stop 
let (uu___is_Stop : Prims.exn -> Prims.bool) =
  fun projectee -> match projectee with | Stop -> true | uu___ -> false
exception Empty_frag 
let (uu___is_Empty_frag : Prims.exn -> Prims.bool) =
  fun projectee -> match projectee with | Empty_frag -> true | uu___ -> false
let (ctx_string : Prims.string Prims.list -> Prims.string) =
  fun ctx -> ""
let (issue_message : issue -> Prims.string) =
  fun i ->
    let uu___ = ctx_string i.issue_ctx in
    FStar_String.op_Hat i.issue_msg uu___
let (format_issue : issue -> Prims.string) =
  fun issue1 ->
    let level_header =
      match issue1.issue_level with
      | EInfo -> "Info"
      | EWarning -> "Warning"
      | EError -> "Error"
      | ENotImplemented -> "Feature not yet implemented: " in
    let uu___ =
      match issue1.issue_range with
      | FStar_Pervasives_Native.None -> ("", "")
      | FStar_Pervasives_Native.Some r when
          r = FStar_Compiler_Range.dummyRange ->
          let uu___1 =
            let uu___2 =
              let uu___3 = FStar_Compiler_Range.def_range r in
              let uu___4 =
                FStar_Compiler_Range.def_range
                  FStar_Compiler_Range.dummyRange in
              uu___3 = uu___4 in
            if uu___2
            then ""
            else
              (let uu___4 = FStar_Compiler_Range.string_of_range r in
               FStar_Compiler_Util.format1 " (see also %s)" uu___4) in
          ("", uu___1)
      | FStar_Pervasives_Native.Some r ->
          let uu___1 =
            let uu___2 = FStar_Compiler_Range.string_of_use_range r in
            FStar_Compiler_Util.format1 "%s: " uu___2 in
          let uu___2 =
            let uu___3 =
              (let uu___4 = FStar_Compiler_Range.use_range r in
               let uu___5 = FStar_Compiler_Range.def_range r in
               uu___4 = uu___5) ||
                (let uu___4 = FStar_Compiler_Range.def_range r in
                 let uu___5 =
                   FStar_Compiler_Range.def_range
                     FStar_Compiler_Range.dummyRange in
                 uu___4 = uu___5) in
            if uu___3
            then ""
            else
              (let uu___5 = FStar_Compiler_Range.string_of_range r in
               FStar_Compiler_Util.format1 " (see also %s)" uu___5) in
          (uu___1, uu___2) in
    match uu___ with
    | (range_str, see_also_str) ->
        let issue_number =
          match issue1.issue_number with
          | FStar_Pervasives_Native.None -> ""
          | FStar_Pervasives_Native.Some n ->
              let uu___1 = FStar_Compiler_Util.string_of_int n in
              FStar_Compiler_Util.format1 " %s" uu___1 in
        let uu___1 = issue_message issue1 in
        FStar_Compiler_Util.format5 "%s(%s%s) %s%s" range_str level_header
          issue_number uu___1 see_also_str
let (print_issue : issue -> unit) =
  fun issue1 ->
    let printer =
      match issue1.issue_level with
      | EInfo -> FStar_Compiler_Util.print_string
      | EWarning -> FStar_Compiler_Util.print_warning
      | EError -> FStar_Compiler_Util.print_error
      | ENotImplemented -> FStar_Compiler_Util.print_error in
    let uu___ =
      let uu___1 = format_issue issue1 in FStar_String.op_Hat uu___1 "\n" in
    printer uu___
let (compare_issues : issue -> issue -> Prims.int) =
  fun i1 ->
    fun i2 ->
      match ((i1.issue_range), (i2.issue_range)) with
      | (FStar_Pervasives_Native.None, FStar_Pervasives_Native.None) ->
          Prims.int_zero
      | (FStar_Pervasives_Native.None, FStar_Pervasives_Native.Some uu___) ->
          ~- Prims.int_one
      | (FStar_Pervasives_Native.Some uu___, FStar_Pervasives_Native.None) ->
          Prims.int_one
      | (FStar_Pervasives_Native.Some r1, FStar_Pervasives_Native.Some r2) ->
          FStar_Compiler_Range.compare_use_range r1 r2
let (mk_default_handler : Prims.bool -> error_handler) =
  fun print ->
    let issues = FStar_Compiler_Util.mk_ref [] in
    let err_count = FStar_Compiler_Util.mk_ref Prims.int_zero in
    let add_one e =
      if e.issue_level = EError
      then
        (let uu___1 =
           let uu___2 = FStar_Compiler_Effect.op_Bang err_count in
           Prims.int_one + uu___2 in
         FStar_Compiler_Effect.op_Colon_Equals err_count uu___1)
      else ();
      (match e.issue_level with
       | EInfo -> print_issue e
       | uu___2 ->
           let uu___3 =
             let uu___4 = FStar_Compiler_Effect.op_Bang issues in e :: uu___4 in
           FStar_Compiler_Effect.op_Colon_Equals issues uu___3);
      (let uu___3 =
         (false) &&
           (e.issue_number = (FStar_Pervasives_Native.Some defensive_errno)) in
       if uu___3 then failwith "Aborting due to --defensive abort" else ()) in
    let count_errors uu___ = FStar_Compiler_Effect.op_Bang err_count in
    let report uu___ =
      let unique_issues =
        let uu___1 = FStar_Compiler_Effect.op_Bang issues in
        FStar_Compiler_Util.remove_dups (fun i0 -> fun i1 -> i0 = i1) uu___1 in
      let sorted_unique_issues =
        FStar_Compiler_List.sortWith compare_issues unique_issues in
      if print
      then FStar_Compiler_List.iter print_issue sorted_unique_issues
      else ();
      sorted_unique_issues in
    let clear uu___ =
      FStar_Compiler_Effect.op_Colon_Equals issues [];
      FStar_Compiler_Effect.op_Colon_Equals err_count Prims.int_zero in
    {
      eh_add_one = add_one;
      eh_count_errors = count_errors;
      eh_report = report;
      eh_clear = clear
    }
let (default_handler : error_handler) = mk_default_handler true
let (current_handler : error_handler FStar_Compiler_Effect.ref) =
  FStar_Compiler_Util.mk_ref default_handler
let (mk_issue :
  issue_level ->
    FStar_Compiler_Range.range FStar_Pervasives_Native.option ->
      Prims.string ->
        Prims.int FStar_Pervasives_Native.option ->
          Prims.string Prims.list -> issue)
  =
  fun level ->
    fun range ->
      fun msg ->
        fun n ->
          fun ctx ->
            {
              issue_msg = msg;
              issue_level = level;
              issue_range = range;
              issue_number = n;
              issue_ctx = ctx
            }
let (get_err_count : unit -> Prims.int) =
  fun uu___ ->
    let uu___1 = FStar_Compiler_Effect.op_Bang current_handler in
    uu___1.eh_count_errors ()
let (wrapped_eh_add_one : error_handler -> issue -> unit) =
  fun h ->
    fun issue1 ->
      h.eh_add_one issue1;
      ()
let (add_one : issue -> unit) =
  fun issue1 ->
      (
         let uu___1 = FStar_Compiler_Effect.op_Bang current_handler in
         wrapped_eh_add_one uu___1 issue1)
let (add_many : issue Prims.list -> unit) =
  fun issues ->
      (
         let uu___1 =
           let uu___2 = FStar_Compiler_Effect.op_Bang current_handler in
           wrapped_eh_add_one uu___2 in
         FStar_Compiler_List.iter uu___1 issues)
let (report_all : unit -> issue Prims.list) =
  fun uu___ ->
    let uu___1 = FStar_Compiler_Effect.op_Bang current_handler in
    uu___1.eh_report ()
let (clear : unit -> unit) =
  fun uu___ ->
    let uu___1 = FStar_Compiler_Effect.op_Bang current_handler in
    uu___1.eh_clear ()
let (set_handler : error_handler -> unit) =
  fun handler ->
    let issues = report_all () in
    clear ();
    FStar_Compiler_Effect.op_Colon_Equals current_handler handler;
    add_many issues
type error_context_t =
  {
  push: Prims.string -> unit ;
  pop: unit -> Prims.string ;
  clear: unit -> unit ;
  get: unit -> Prims.string Prims.list ;
  set: Prims.string Prims.list -> unit }
let (__proj__Mkerror_context_t__item__push :
  error_context_t -> Prims.string -> unit) =
  fun projectee ->
    match projectee with | { push; pop; clear = clear1; get; set;_} -> push
let (__proj__Mkerror_context_t__item__pop :
  error_context_t -> unit -> Prims.string) =
  fun projectee ->
    match projectee with | { push; pop; clear = clear1; get; set;_} -> pop
let (__proj__Mkerror_context_t__item__clear :
  error_context_t -> unit -> unit) =
  fun projectee ->
    match projectee with | { push; pop; clear = clear1; get; set;_} -> clear1
let (__proj__Mkerror_context_t__item__get :
  error_context_t -> unit -> Prims.string Prims.list) =
  fun projectee ->
    match projectee with | { push; pop; clear = clear1; get; set;_} -> get
let (__proj__Mkerror_context_t__item__set :
  error_context_t -> Prims.string Prims.list -> unit) =
  fun projectee ->
    match projectee with | { push; pop; clear = clear1; get; set;_} -> set
let (error_context : error_context_t) =
  let ctxs = FStar_Compiler_Util.mk_ref [] in
  let push s =
    let uu___ =
      let uu___1 = FStar_Compiler_Effect.op_Bang ctxs in s :: uu___1 in
    FStar_Compiler_Effect.op_Colon_Equals ctxs uu___ in
  let pop s =
    let uu___ = FStar_Compiler_Effect.op_Bang ctxs in
    match uu___ with
    | h::t -> (FStar_Compiler_Effect.op_Colon_Equals ctxs t; h)
    | uu___1 -> failwith "cannot pop error prefix..." in
  let clear1 uu___ = FStar_Compiler_Effect.op_Colon_Equals ctxs [] in
  let get uu___ = FStar_Compiler_Effect.op_Bang ctxs in
  let set c = FStar_Compiler_Effect.op_Colon_Equals ctxs c in
  { push; pop; clear = clear1; get; set }
let (get_ctx : unit -> Prims.string Prims.list) =
  fun uu___ -> error_context.get ()
let (diag : FStar_Compiler_Range.range -> Prims.string -> unit) =
  fun r ->
    fun msg ->
      if false
      then
        add_one
          (mk_issue EInfo (FStar_Pervasives_Native.Some r) msg
             FStar_Pervasives_Native.None [])
      else ()
let (warn_unsafe_options :
  FStar_Compiler_Range.range FStar_Pervasives_Native.option ->
    Prims.string -> unit)
  =
  fun rng_opt ->
    fun msg -> ()
let (set_option_warning_callback_range :
  FStar_Compiler_Range.range FStar_Pervasives_Native.option -> unit) =
  fun ropt ->
    ()
    (* FStar_Options.set_option_warning_callback (warn_unsafe_options ropt) *)
let (uu___279 :
  (((Prims.string -> FStar_Errors_Codes.error_setting Prims.list) -> unit) *
    (unit -> FStar_Errors_Codes.error_setting Prims.list)))
  =
  let parser_callback =
    FStar_Compiler_Util.mk_ref FStar_Pervasives_Native.None in
  (* let error_flags = FStar_Compiler_Util.smap_create (Prims.of_int (10)) in *)
  let set_error_flags uu___ = () in
  let get_error_flags uu___ =
     FStar_Errors_Codes.default_settings in
  let set_callbacks f =
    FStar_Compiler_Effect.op_Colon_Equals parser_callback
      (FStar_Pervasives_Native.Some f)
    (* FStar_Options.set_option_warning_callback *)
    (*   (warn_unsafe_options FStar_Pervasives_Native.None) *)
  in
  (set_callbacks, get_error_flags)
let (t_set_parse_warn_error :
  (Prims.string -> FStar_Errors_Codes.error_setting Prims.list) -> unit) =
  match uu___279 with
  | (t_set_parse_warn_error1, error_flags) -> t_set_parse_warn_error1
let (error_flags : unit -> FStar_Errors_Codes.error_setting Prims.list) =
  match uu___279 with
  | (t_set_parse_warn_error1, error_flags1) -> error_flags1
let (set_parse_warn_error :
  (Prims.string -> FStar_Errors_Codes.error_setting Prims.list) -> unit) =
  t_set_parse_warn_error
let (lookup :
  FStar_Errors_Codes.raw_error -> FStar_Errors_Codes.error_setting) =
  fun err ->
    let flags = error_flags () in
    let uu___ = lookup_error flags err in
    match uu___ with
    | (v, level, i) ->
        let with_level level1 = (v, level1, i) in
        (match v with
         | uu___1 -> with_level level)

let raise_error :
  'a .
    (FStar_Errors_Codes.raw_error * Prims.string) ->
      FStar_Compiler_Range.range -> 'a
  =
  fun uu___ ->
    fun r ->
      match uu___ with
      | (e, msg) ->
          let uu___1 =
            let uu___2 =
              let uu___3 = error_context.get () in (e, msg, r, uu___3) in
            Error uu___2 in
          FStar_Compiler_Effect.raise uu___1
let raise_err : 'a . (FStar_Errors_Codes.raw_error * Prims.string) -> 'a =
  fun uu___ ->
    match uu___ with
    | (e, msg) ->
        let uu___1 =
          let uu___2 = let uu___3 = error_context.get () in (e, msg, uu___3) in
          Err uu___2 in
        FStar_Compiler_Effect.raise uu___1

let (log_issue_ctx :
  FStar_Compiler_Range.range ->
    (FStar_Errors_Codes.raw_error * Prims.string) ->
      Prims.string Prims.list -> unit)
  =
  fun r ->
    fun uu___ ->
      fun ctx ->
        match uu___ with
        | (e, msg) ->
            let uu___1 = lookup e in
            (match uu___1 with
             | (uu___2, FStar_Errors_Codes.CAlwaysError, errno1) ->
                 add_one
                   (mk_issue EError (FStar_Pervasives_Native.Some r) msg
                      (FStar_Pervasives_Native.Some errno1) ctx)
             | (uu___2, FStar_Errors_Codes.CError, errno1) ->
                 add_one
                   (mk_issue EError (FStar_Pervasives_Native.Some r) msg
                      (FStar_Pervasives_Native.Some errno1) ctx)
             | (uu___2, FStar_Errors_Codes.CWarning, errno1) ->
                 add_one
                   (mk_issue EWarning (FStar_Pervasives_Native.Some r) msg
                      (FStar_Pervasives_Native.Some errno1) ctx)
             | (uu___2, FStar_Errors_Codes.CSilent, uu___3) -> ()
             | (uu___2, FStar_Errors_Codes.CFatal, errno1) ->
                 let i =
                   mk_issue EError (FStar_Pervasives_Native.Some r) msg
                     (FStar_Pervasives_Native.Some errno1) ctx in
                 let uu___3 = false in
                 if uu___3
                 then add_one i
                 else
                   (let uu___5 =
                      let uu___6 = format_issue i in
                      FStar_String.op_Hat
                        "don't use log_issue to report fatal error, should use raise_error: "
                        uu___6 in
                    failwith uu___5))
let (log_issue :
  FStar_Compiler_Range.range ->
    (FStar_Errors_Codes.raw_error * Prims.string) -> unit)
  =
  fun r ->
    fun uu___ ->
      match uu___ with
      | (e, msg) ->
          let ctx = error_context.get () in log_issue_ctx r (e, msg) ctx


================================================
FILE: engine/backends/fstar/fstar-surface-ast/FStar_Errors_Codes.ml
================================================
open Prims
type error_flag =
  | CFatal 
  | CAlwaysError 
  | CError 
  | CWarning 
  | CSilent 
let (uu___is_CFatal : error_flag -> Prims.bool) =
  fun projectee -> match projectee with | CFatal -> true | uu___ -> false
let (uu___is_CAlwaysError : error_flag -> Prims.bool) =
  fun projectee ->
    match projectee with | CAlwaysError -> true | uu___ -> false
let (uu___is_CError : error_flag -> Prims.bool) =
  fun projectee -> match projectee with | CError -> true | uu___ -> false
let (uu___is_CWarning : error_flag -> Prims.bool) =
  fun projectee -> match projectee with | CWarning -> true | uu___ -> false
let (uu___is_CSilent : error_flag -> Prims.bool) =
  fun projectee -> match projectee with | CSilent -> true | uu___ -> false
type raw_error =
  | Error_DependencyAnalysisFailed 
  | Error_IDETooManyPops 
  | Error_IDEUnrecognized 
  | Error_InductiveTypeNotSatisfyPositivityCondition 
  | Error_InvalidUniverseVar 
  | Error_MissingFileName 
  | Error_ModuleFileNameMismatch 
  | Error_OpPlusInUniverse 
  | Error_OutOfRange 
  | Error_ProofObligationFailed 
  | Error_TooManyFiles 
  | Error_TypeCheckerFailToProve 
  | Error_TypeError 
  | Error_UncontrainedUnificationVar 
  | Error_UnexpectedGTotComputation 
  | Error_UnexpectedInstance 
  | Error_UnknownFatal_AssertionFailure 
  | Error_Z3InvocationError 
  | Error_IDEAssertionFailure 
  | Error_Z3SolverError 
  | Fatal_AbstractTypeDeclarationInInterface 
  | Fatal_ActionMustHaveFunctionType 
  | Fatal_AlreadyDefinedTopLevelDeclaration 
  | Fatal_ArgumentLengthMismatch 
  | Fatal_AssertionFailure 
  | Fatal_AssignToImmutableValues 
  | Fatal_AssumeValInInterface 
  | Fatal_BadlyInstantiatedSynthByTactic 
  | Fatal_BadSignatureShape 
  | Fatal_BinderAndArgsLengthMismatch 
  | Fatal_BothValAndLetInInterface 
  | Fatal_CardinalityConstraintViolated 
  | Fatal_ComputationNotTotal 
  | Fatal_ComputationTypeNotAllowed 
  | Fatal_ComputedTypeNotMatchAnnotation 
  | Fatal_ConstructorArgLengthMismatch 
  | Fatal_ConstructorFailedCheck 
  | Fatal_ConstructorNotFound 
  | Fatal_ConstsructorBuildWrongType 
  | Fatal_CycleInRecTypeAbbreviation 
  | Fatal_DataContructorNotFound 
  | Fatal_DefaultQualifierNotAllowedOnEffects 
  | Fatal_DefinitionNotFound 
  | Fatal_DisjuctivePatternVarsMismatch 
  | Fatal_DivergentComputationCannotBeIncludedInTotal 
  | Fatal_DuplicateInImplementation 
  | Fatal_DuplicateModuleOrInterface 
  | Fatal_DuplicateTopLevelNames 
  | Fatal_DuplicateTypeAnnotationAndValDecl 
  | Fatal_EffectCannotBeReified 
  | Fatal_EffectConstructorNotFullyApplied 
  | Fatal_EffectfulAndPureComputationMismatch 
  | Fatal_EffectNotFound 
  | Fatal_EffectsCannotBeComposed 
  | Fatal_ErrorInSolveDeferredConstraints 
  | Fatal_ErrorsReported 
  | Fatal_EscapedBoundVar 
  | Fatal_ExpectedArrowAnnotatedType 
  | Fatal_ExpectedGhostExpression 
  | Fatal_ExpectedPureExpression 
  | Fatal_ExpectNormalizedEffect 
  | Fatal_ExpectTermGotFunction 
  | Fatal_ExpectTrivialPreCondition 
  | Fatal_FailToCompileNativeTactic 
  | Fatal_FailToExtractNativeTactic 
  | Fatal_FailToProcessPragma 
  | Fatal_FailToResolveImplicitArgument 
  | Fatal_FailToSolveUniverseInEquality 
  | Fatal_FieldsNotBelongToSameRecordType 
  | Fatal_ForbiddenReferenceToCurrentModule 
  | Fatal_FreeVariables 
  | Fatal_FunctionTypeExpected 
  | Fatal_IdentifierNotFound 
  | Fatal_IllAppliedConstant 
  | Fatal_IllegalCharInByteArray 
  | Fatal_IllegalCharInOperatorName 
  | Fatal_IllTyped 
  | Fatal_ImpossibleAbbrevLidBundle 
  | Fatal_ImpossibleAbbrevRenameBundle 
  | Fatal_ImpossibleInductiveWithAbbrev 
  | Fatal_ImpossiblePrePostAbs 
  | Fatal_ImpossiblePrePostArrow 
  | Fatal_ImpossibleToGenerateDMEffect 
  | Fatal_ImpossibleTypeAbbrevBundle 
  | Fatal_ImpossibleTypeAbbrevSigeltBundle 
  | Fatal_IncludeModuleNotPrepared 
  | Fatal_IncoherentInlineUniverse 
  | Fatal_IncompatibleKinds 
  | Fatal_IncompatibleNumberOfTypes 
  | Fatal_IncompatibleSetOfUniverse 
  | Fatal_IncompatibleUniverse 
  | Fatal_InconsistentImplicitArgumentAnnotation 
  | Fatal_InconsistentImplicitQualifier 
  | Fatal_InconsistentQualifierAnnotation 
  | Fatal_InferredTypeCauseVarEscape 
  | Fatal_InlineRenamedAsUnfold 
  | Fatal_InsufficientPatternArguments 
  | Fatal_InterfaceAlreadyProcessed 
  | Fatal_InterfaceNotImplementedByModule 
  | Fatal_InterfaceWithTypeImplementation 
  | Fatal_InvalidFloatingPointNumber 
  | Fatal_InvalidFSDocKeyword 
  | Fatal_InvalidIdentifier 
  | Fatal_InvalidLemmaArgument 
  | Fatal_InvalidNumericLiteral 
  | Fatal_InvalidRedefinitionOfLexT 
  | Fatal_InvalidUnicodeInStringLiteral 
  | Fatal_InvalidUTF8Encoding 
  | Fatal_InvalidWarnErrorSetting 
  | Fatal_LetBoundMonadicMismatch 
  | Fatal_LetMutableForVariablesOnly 
  | Fatal_LetOpenModuleOnly 
  | Fatal_LetRecArgumentMismatch 
  | Fatal_MalformedActionDeclaration 
  | Fatal_MismatchedPatternType 
  | Fatal_MismatchUniversePolymorphic 
  | Fatal_MissingDataConstructor 
  | Fatal_MissingExposeInterfacesOption 
  | Fatal_MissingFieldInRecord 
  | Fatal_MissingImplementation 
  | Fatal_MissingImplicitArguments 
  | Fatal_MissingInterface 
  | Fatal_MissingNameInBinder 
  | Fatal_MissingPrimsModule 
  | Fatal_MissingQuantifierBinder 
  | Fatal_ModuleExpected 
  | Fatal_ModuleFileNotFound 
  | Fatal_ModuleFirstStatement 
  | Fatal_ModuleNotFound 
  | Fatal_ModuleOrFileNotFound 
  | Fatal_MonadAlreadyDefined 
  | Fatal_MoreThanOneDeclaration 
  | Fatal_MultipleLetBinding 
  | Fatal_NameNotFound 
  | Fatal_NameSpaceNotFound 
  | Fatal_NegativeUniverseConstFatal_NotSupported 
  | Fatal_NoFileProvided 
  | Fatal_NonInductiveInMutuallyDefinedType 
  | Fatal_NonLinearPatternNotPermitted 
  | Fatal_NonLinearPatternVars 
  | Fatal_NonSingletonTopLevel 
  | Fatal_NonSingletonTopLevelModule 
  | Error_NonTopRecFunctionNotFullyEncoded 
  | Fatal_NonTrivialPreConditionInPrims 
  | Fatal_NonVariableInductiveTypeParameter 
  | Fatal_NotApplicationOrFv 
  | Fatal_NotEnoughArgsToEffect 
  | Fatal_NotEnoughArgumentsForEffect 
  | Fatal_NotFunctionType 
  | Fatal_NotSupported 
  | Fatal_NotTopLevelModule 
  | Fatal_NotValidFStarFile 
  | Fatal_NotValidIncludeDirectory 
  | Fatal_OneModulePerFile 
  | Fatal_OpenGoalsInSynthesis 
  | Fatal_OptionsNotCompatible 
  | Fatal_OutOfOrder 
  | Fatal_ParseErrors 
  | Fatal_ParseItError 
  | Fatal_PolyTypeExpected 
  | Fatal_PossibleInfiniteTyp 
  | Fatal_PreModuleMismatch 
  | Fatal_QulifierListNotPermitted 
  | Fatal_RecursiveFunctionLiteral 
  | Fatal_ReflectOnlySupportedOnEffects 
  | Fatal_ReservedPrefix 
  | Fatal_SMTOutputParseError 
  | Fatal_SMTSolverError 
  | Fatal_SyntaxError 
  | Fatal_SynthByTacticError 
  | Fatal_TacticGotStuck 
  | Fatal_TcOneFragmentFailed 
  | Fatal_TermOutsideOfDefLanguage 
  | Fatal_ToManyArgumentToFunction 
  | Fatal_TooManyOrTooFewFileMatch 
  | Fatal_TooManyPatternArguments 
  | Fatal_TooManyUniverse 
  | Fatal_TypeMismatch 
  | Fatal_TypeWithinPatternsAllowedOnVariablesOnly 
  | Fatal_UnableToReadFile 
  | Fatal_UnepxectedOrUnboundOperator 
  | Fatal_UnexpectedBinder 
  | Fatal_UnexpectedBindShape 
  | Fatal_UnexpectedChar 
  | Fatal_UnexpectedComputationTypeForLetRec 
  | Fatal_UnexpectedConstructorType 
  | Fatal_UnexpectedDataConstructor 
  | Fatal_UnexpectedEffect 
  | Fatal_UnexpectedEmptyRecord 
  | Fatal_UnexpectedExpressionType 
  | Fatal_UnexpectedFunctionParameterType 
  | Fatal_UnexpectedGeneralizedUniverse 
  | Fatal_UnexpectedGTotForLetRec 
  | Fatal_UnexpectedGuard 
  | Fatal_UnexpectedIdentifier 
  | Fatal_UnexpectedImplicitArgument 
  | Fatal_UnexpectedImplictArgument 
  | Fatal_UnexpectedInductivetype 
  | Fatal_UnexpectedLetBinding 
  | Fatal_UnexpectedModuleDeclaration 
  | Fatal_UnexpectedNumberOfUniverse 
  | Fatal_UnexpectedNumericLiteral 
  | Fatal_UnexpectedPattern 
  | Fatal_UnexpectedPosition 
  | Fatal_UnExpectedPreCondition 
  | Fatal_UnexpectedReturnShape 
  | Fatal_UnexpectedSignatureForMonad 
  | Fatal_UnexpectedTerm 
  | Fatal_UnexpectedTermInUniverse 
  | Fatal_UnexpectedTermType 
  | Fatal_UnexpectedTermVQuote 
  | Fatal_UnexpectedUniversePolymorphicReturn 
  | Fatal_UnexpectedUniverseVariable 
  | Fatal_UnfoldableDeprecated 
  | Fatal_UnificationNotWellFormed 
  | Fatal_Uninstantiated 
  | Error_UninstantiatedUnificationVarInTactic 
  | Fatal_UninstantiatedVarInTactic 
  | Fatal_UniverseMightContainSumOfTwoUnivVars 
  | Fatal_UniversePolymorphicInnerLetBound 
  | Fatal_UnknownAttribute 
  | Fatal_UnknownToolForDep 
  | Fatal_UnrecognizedExtension 
  | Fatal_UnresolvedPatternVar 
  | Fatal_UnsupportedConstant 
  | Fatal_UnsupportedDisjuctivePatterns 
  | Fatal_UnsupportedQualifier 
  | Fatal_UserTacticFailure 
  | Fatal_ValueRestriction 
  | Fatal_VariableNotFound 
  | Fatal_WrongBodyTypeForReturnWP 
  | Fatal_WrongDataAppHeadFormat 
  | Fatal_WrongDefinitionOrder 
  | Fatal_WrongResultTypeAfterConstrutor 
  | Fatal_WrongTerm 
  | Fatal_WhenClauseNotSupported 
  | Unused01 
  | Warning_AddImplicitAssumeNewQualifier 
  | Warning_AdmitWithoutDefinition 
  | Warning_CachedFile 
  | Warning_DefinitionNotTranslated 
  | Warning_DependencyFound 
  | Warning_DeprecatedEqualityOnBinder 
  | Warning_DeprecatedOpaqueQualifier 
  | Warning_DocOverwrite 
  | Warning_FileNotWritten 
  | Warning_Filtered 
  | Warning_FunctionLiteralPrecisionLoss 
  | Warning_FunctionNotExtacted 
  | Warning_HintFailedToReplayProof 
  | Warning_HitReplayFailed 
  | Warning_IDEIgnoreCodeGen 
  | Warning_IllFormedGoal 
  | Warning_InaccessibleArgument 
  | Warning_IncoherentImplicitQualifier 
  | Warning_IrrelevantQualifierOnArgumentToReflect 
  | Warning_IrrelevantQualifierOnArgumentToReify 
  | Warning_MalformedWarnErrorList 
  | Warning_MetaAlienNotATmUnknown 
  | Warning_MultipleAscriptions 
  | Warning_NondependentUserDefinedDataType 
  | Warning_NonListLiteralSMTPattern 
  | Warning_NormalizationFailure 
  | Warning_NotDependentArrow 
  | Warning_NotEmbedded 
  | Warning_PatternMissingBoundVar 
  | Warning_RecursiveDependency 
  | Warning_RedundantExplicitCurrying 
  | Warning_SMTPatTDeprecated 
  | Warning_SMTPatternIllFormed 
  | Warning_TopLevelEffect 
  | Warning_UnboundModuleReference 
  | Warning_UnexpectedFile 
  | Warning_UnexpectedFsTypApp 
  | Warning_UnexpectedZ3Output 
  | Warning_UnprotectedTerm 
  | Warning_UnrecognizedAttribute 
  | Warning_UpperBoundCandidateAlreadyVisited 
  | Warning_UseDefaultEffect 
  | Warning_WrongErrorLocation 
  | Warning_Z3InvocationWarning 
  | Warning_PluginNotImplemented 
  | Warning_MissingInterfaceOrImplementation 
  | Warning_ConstructorBuildsUnexpectedType 
  | Warning_ModuleOrFileNotFoundWarning 
  | Error_NoLetMutable 
  | Error_BadImplicit 
  | Warning_DeprecatedDefinition 
  | Fatal_SMTEncodingArityMismatch 
  | Warning_Defensive 
  | Warning_CantInspect 
  | Warning_NilGivenExplicitArgs 
  | Warning_ConsAppliedExplicitArgs 
  | Warning_UnembedBinderKnot 
  | Fatal_TacticProofRelevantGoal 
  | Warning_TacAdmit 
  | Fatal_IncoherentPatterns 
  | Error_NoSMTButNeeded 
  | Fatal_UnexpectedAntiquotation 
  | Fatal_SplicedUndef 
  | Fatal_SpliceUnembedFail 
  | Warning_ExtractionUnexpectedEffect 
  | Error_DidNotFail 
  | Warning_UnappliedFail 
  | Warning_QuantifierWithoutPattern 
  | Error_EmptyFailErrs 
  | Warning_logicqualifier 
  | Fatal_CyclicDependence 
  | Error_InductiveAnnotNotAType 
  | Fatal_FriendInterface 
  | Error_CannotRedefineConst 
  | Error_BadClassDecl 
  | Error_BadInductiveParam 
  | Error_FieldShadow 
  | Error_UnexpectedDM4FType 
  | Fatal_EffectAbbreviationResultTypeMismatch 
  | Error_AlreadyCachedAssertionFailure 
  | Error_MustEraseMissing 
  | Warning_EffectfulArgumentToErasedFunction 
  | Fatal_EmptySurfaceLet 
  | Warning_UnexpectedCheckedFile 
  | Fatal_ExtractionUnsupported 
  | Warning_SMTErrorReason 
  | Warning_CoercionNotFound 
  | Error_QuakeFailed 
  | Error_IllSMTPat 
  | Error_IllScopedTerm 
  | Warning_UnusedLetRec 
  | Fatal_Effects_Ordering_Coherence 
  | Warning_BleedingEdge_Feature 
  | Warning_IgnoredBinding 
  | Warning_CouldNotReadHints 
  | Fatal_BadUvar 
  | Warning_WarnOnUse 
  | Warning_DeprecatedAttributeSyntax 
  | Warning_DeprecatedGeneric 
  | Error_BadSplice 
  | Error_UnexpectedUnresolvedUvar 
  | Warning_UnfoldPlugin 
  | Error_LayeredMissingAnnot 
  | Error_CallToErased 
  | Error_ErasedCtor 
  | Error_RemoveUnusedTypeParameter 
  | Warning_NoMagicInFSharp 
  | Error_BadLetOpenRecord 
  | Error_UnexpectedTypeclassInstance 
  | Warning_AmbiguousResolveImplicitsHook 
  | Warning_SplitAndRetryQueries 
  | Warning_DeprecatedLightDoNotation 
  | Warning_FailedToCheckInitialTacticGoal 
  | Warning_Adhoc_IndexedEffect_Combinator 
  | Error_PluginDynlink 
  | Error_InternalQualifier 
  | Warning_NameEscape 
let (uu___is_Error_DependencyAnalysisFailed : raw_error -> Prims.bool) =
  fun projectee ->
    match projectee with
    | Error_DependencyAnalysisFailed -> true
    | uu___ -> false
let (uu___is_Error_IDETooManyPops : raw_error -> Prims.bool) =
  fun projectee ->
    match projectee with | Error_IDETooManyPops -> true | uu___ -> false
let (uu___is_Error_IDEUnrecognized : raw_error -> Prims.bool) =
  fun projectee ->
    match projectee with | Error_IDEUnrecognized -> true | uu___ -> false
let (uu___is_Error_InductiveTypeNotSatisfyPositivityCondition :
  raw_error -> Prims.bool) =
  fun projectee ->
    match projectee with
    | Error_InductiveTypeNotSatisfyPositivityCondition -> true
    | uu___ -> false
let (uu___is_Error_InvalidUniverseVar : raw_error -> Prims.bool) =
  fun projectee ->
    match projectee with | Error_InvalidUniverseVar -> true | uu___ -> false
let (uu___is_Error_MissingFileName : raw_error -> Prims.bool) =
  fun projectee ->
    match projectee with | Error_MissingFileName -> true | uu___ -> false
let (uu___is_Error_ModuleFileNameMismatch : raw_error -> Prims.bool) =
  fun projectee ->
    match projectee with
    | Error_ModuleFileNameMismatch -> true
    | uu___ -> false
let (uu___is_Error_OpPlusInUniverse : raw_error -> Prims.bool) =
  fun projectee ->
    match projectee with | Error_OpPlusInUniverse -> true | uu___ -> false
let (uu___is_Error_OutOfRange : raw_error -> Prims.bool) =
  fun projectee ->
    match projectee with | Error_OutOfRange -> true | uu___ -> false
let (uu___is_Error_ProofObligationFailed : raw_error -> Prims.bool) =
  fun projectee ->
    match projectee with
    | Error_ProofObligationFailed -> true
    | uu___ -> false
let (uu___is_Error_TooManyFiles : raw_error -> Prims.bool) =
  fun projectee ->
    match projectee with | Error_TooManyFiles -> true | uu___ -> false
let (uu___is_Error_TypeCheckerFailToProve : raw_error -> Prims.bool) =
  fun projectee ->
    match projectee with
    | Error_TypeCheckerFailToProve -> true
    | uu___ -> false
let (uu___is_Error_TypeError : raw_error -> Prims.bool) =
  fun projectee ->
    match projectee with | Error_TypeError -> true | uu___ -> false
let (uu___is_Error_UncontrainedUnificationVar : raw_error -> Prims.bool) =
  fun projectee ->
    match projectee with
    | Error_UncontrainedUnificationVar -> true
    | uu___ -> false
let (uu___is_Error_UnexpectedGTotComputation : raw_error -> Prims.bool) =
  fun projectee ->
    match projectee with
    | Error_UnexpectedGTotComputation -> true
    | uu___ -> false
let (uu___is_Error_UnexpectedInstance : raw_error -> Prims.bool) =
  fun projectee ->
    match projectee with | Error_UnexpectedInstance -> true | uu___ -> false
let (uu___is_Error_UnknownFatal_AssertionFailure : raw_error -> Prims.bool) =
  fun projectee ->
    match projectee with
    | Error_UnknownFatal_AssertionFailure -> true
    | uu___ -> false
let (uu___is_Error_Z3InvocationError : raw_error -> Prims.bool) =
  fun projectee ->
    match projectee with | Error_Z3InvocationError -> true | uu___ -> false
let (uu___is_Error_IDEAssertionFailure : raw_error -> Prims.bool) =
  fun projectee ->
    match projectee with | Error_IDEAssertionFailure -> true | uu___ -> false
let (uu___is_Error_Z3SolverError : raw_error -> Prims.bool) =
  fun projectee ->
    match projectee with | Error_Z3SolverError -> true | uu___ -> false
let (uu___is_Fatal_AbstractTypeDeclarationInInterface :
  raw_error -> Prims.bool) =
  fun projectee ->
    match projectee with
    | Fatal_AbstractTypeDeclarationInInterface -> true
    | uu___ -> false
let (uu___is_Fatal_ActionMustHaveFunctionType : raw_error -> Prims.bool) =
  fun projectee ->
    match projectee with
    | Fatal_ActionMustHaveFunctionType -> true
    | uu___ -> false
let (uu___is_Fatal_AlreadyDefinedTopLevelDeclaration :
  raw_error -> Prims.bool) =
  fun projectee ->
    match projectee with
    | Fatal_AlreadyDefinedTopLevelDeclaration -> true
    | uu___ -> false
let (uu___is_Fatal_ArgumentLengthMismatch : raw_error -> Prims.bool) =
  fun projectee ->
    match projectee with
    | Fatal_ArgumentLengthMismatch -> true
    | uu___ -> false
let (uu___is_Fatal_AssertionFailure : raw_error -> Prims.bool) =
  fun projectee ->
    match projectee with | Fatal_AssertionFailure -> true | uu___ -> false
let (uu___is_Fatal_AssignToImmutableValues : raw_error -> Prims.bool) =
  fun projectee ->
    match projectee with
    | Fatal_AssignToImmutableValues -> true
    | uu___ -> false
let (uu___is_Fatal_AssumeValInInterface : raw_error -> Prims.bool) =
  fun projectee ->
    match projectee with
    | Fatal_AssumeValInInterface -> true
    | uu___ -> false
let (uu___is_Fatal_BadlyInstantiatedSynthByTactic : raw_error -> Prims.bool)
  =
  fun projectee ->
    match projectee with
    | Fatal_BadlyInstantiatedSynthByTactic -> true
    | uu___ -> false
let (uu___is_Fatal_BadSignatureShape : raw_error -> Prims.bool) =
  fun projectee ->
    match projectee with | Fatal_BadSignatureShape -> true | uu___ -> false
let (uu___is_Fatal_BinderAndArgsLengthMismatch : raw_error -> Prims.bool) =
  fun projectee ->
    match projectee with
    | Fatal_BinderAndArgsLengthMismatch -> true
    | uu___ -> false
let (uu___is_Fatal_BothValAndLetInInterface : raw_error -> Prims.bool) =
  fun projectee ->
    match projectee with
    | Fatal_BothValAndLetInInterface -> true
    | uu___ -> false
let (uu___is_Fatal_CardinalityConstraintViolated : raw_error -> Prims.bool) =
  fun projectee ->
    match projectee with
    | Fatal_CardinalityConstraintViolated -> true
    | uu___ -> false
let (uu___is_Fatal_ComputationNotTotal : raw_error -> Prims.bool) =
  fun projectee ->
    match projectee with | Fatal_ComputationNotTotal -> true | uu___ -> false
let (uu___is_Fatal_ComputationTypeNotAllowed : raw_error -> Prims.bool) =
  fun projectee ->
    match projectee with
    | Fatal_ComputationTypeNotAllowed -> true
    | uu___ -> false
let (uu___is_Fatal_ComputedTypeNotMatchAnnotation : raw_error -> Prims.bool)
  =
  fun projectee ->
    match projectee with
    | Fatal_ComputedTypeNotMatchAnnotation -> true
    | uu___ -> false
let (uu___is_Fatal_ConstructorArgLengthMismatch : raw_error -> Prims.bool) =
  fun projectee ->
    match projectee with
    | Fatal_ConstructorArgLengthMismatch -> true
    | uu___ -> false
let (uu___is_Fatal_ConstructorFailedCheck : raw_error -> Prims.bool) =
  fun projectee ->
    match projectee with
    | Fatal_ConstructorFailedCheck -> true
    | uu___ -> false
let (uu___is_Fatal_ConstructorNotFound : raw_error -> Prims.bool) =
  fun projectee ->
    match projectee with | Fatal_ConstructorNotFound -> true | uu___ -> false
let (uu___is_Fatal_ConstsructorBuildWrongType : raw_error -> Prims.bool) =
  fun projectee ->
    match projectee with
    | Fatal_ConstsructorBuildWrongType -> true
    | uu___ -> false
let (uu___is_Fatal_CycleInRecTypeAbbreviation : raw_error -> Prims.bool) =
  fun projectee ->
    match projectee with
    | Fatal_CycleInRecTypeAbbreviation -> true
    | uu___ -> false
let (uu___is_Fatal_DataContructorNotFound : raw_error -> Prims.bool) =
  fun projectee ->
    match projectee with
    | Fatal_DataContructorNotFound -> true
    | uu___ -> false
let (uu___is_Fatal_DefaultQualifierNotAllowedOnEffects :
  raw_error -> Prims.bool) =
  fun projectee ->
    match projectee with
    | Fatal_DefaultQualifierNotAllowedOnEffects -> true
    | uu___ -> false
let (uu___is_Fatal_DefinitionNotFound : raw_error -> Prims.bool) =
  fun projectee ->
    match projectee with | Fatal_DefinitionNotFound -> true | uu___ -> false
let (uu___is_Fatal_DisjuctivePatternVarsMismatch : raw_error -> Prims.bool) =
  fun projectee ->
    match projectee with
    | Fatal_DisjuctivePatternVarsMismatch -> true
    | uu___ -> false
let (uu___is_Fatal_DivergentComputationCannotBeIncludedInTotal :
  raw_error -> Prims.bool) =
  fun projectee ->
    match projectee with
    | Fatal_DivergentComputationCannotBeIncludedInTotal -> true
    | uu___ -> false
let (uu___is_Fatal_DuplicateInImplementation : raw_error -> Prims.bool) =
  fun projectee ->
    match projectee with
    | Fatal_DuplicateInImplementation -> true
    | uu___ -> false
let (uu___is_Fatal_DuplicateModuleOrInterface : raw_error -> Prims.bool) =
  fun projectee ->
    match projectee with
    | Fatal_DuplicateModuleOrInterface -> true
    | uu___ -> false
let (uu___is_Fatal_DuplicateTopLevelNames : raw_error -> Prims.bool) =
  fun projectee ->
    match projectee with
    | Fatal_DuplicateTopLevelNames -> true
    | uu___ -> false
let (uu___is_Fatal_DuplicateTypeAnnotationAndValDecl :
  raw_error -> Prims.bool) =
  fun projectee ->
    match projectee with
    | Fatal_DuplicateTypeAnnotationAndValDecl -> true
    | uu___ -> false
let (uu___is_Fatal_EffectCannotBeReified : raw_error -> Prims.bool) =
  fun projectee ->
    match projectee with
    | Fatal_EffectCannotBeReified -> true
    | uu___ -> false
let (uu___is_Fatal_EffectConstructorNotFullyApplied :
  raw_error -> Prims.bool) =
  fun projectee ->
    match projectee with
    | Fatal_EffectConstructorNotFullyApplied -> true
    | uu___ -> false
let (uu___is_Fatal_EffectfulAndPureComputationMismatch :
  raw_error -> Prims.bool) =
  fun projectee ->
    match projectee with
    | Fatal_EffectfulAndPureComputationMismatch -> true
    | uu___ -> false
let (uu___is_Fatal_EffectNotFound : raw_error -> Prims.bool) =
  fun projectee ->
    match projectee with | Fatal_EffectNotFound -> true | uu___ -> false
let (uu___is_Fatal_EffectsCannotBeComposed : raw_error -> Prims.bool) =
  fun projectee ->
    match projectee with
    | Fatal_EffectsCannotBeComposed -> true
    | uu___ -> false
let (uu___is_Fatal_ErrorInSolveDeferredConstraints : raw_error -> Prims.bool)
  =
  fun projectee ->
    match projectee with
    | Fatal_ErrorInSolveDeferredConstraints -> true
    | uu___ -> false
let (uu___is_Fatal_ErrorsReported : raw_error -> Prims.bool) =
  fun projectee ->
    match projectee with | Fatal_ErrorsReported -> true | uu___ -> false
let (uu___is_Fatal_EscapedBoundVar : raw_error -> Prims.bool) =
  fun projectee ->
    match projectee with | Fatal_EscapedBoundVar -> true | uu___ -> false
let (uu___is_Fatal_ExpectedArrowAnnotatedType : raw_error -> Prims.bool) =
  fun projectee ->
    match projectee with
    | Fatal_ExpectedArrowAnnotatedType -> true
    | uu___ -> false
let (uu___is_Fatal_ExpectedGhostExpression : raw_error -> Prims.bool) =
  fun projectee ->
    match projectee with
    | Fatal_ExpectedGhostExpression -> true
    | uu___ -> false
let (uu___is_Fatal_ExpectedPureExpression : raw_error -> Prims.bool) =
  fun projectee ->
    match projectee with
    | Fatal_ExpectedPureExpression -> true
    | uu___ -> false
let (uu___is_Fatal_ExpectNormalizedEffect : raw_error -> Prims.bool) =
  fun projectee ->
    match projectee with
    | Fatal_ExpectNormalizedEffect -> true
    | uu___ -> false
let (uu___is_Fatal_ExpectTermGotFunction : raw_error -> Prims.bool) =
  fun projectee ->
    match projectee with
    | Fatal_ExpectTermGotFunction -> true
    | uu___ -> false
let (uu___is_Fatal_ExpectTrivialPreCondition : raw_error -> Prims.bool) =
  fun projectee ->
    match projectee with
    | Fatal_ExpectTrivialPreCondition -> true
    | uu___ -> false
let (uu___is_Fatal_FailToCompileNativeTactic : raw_error -> Prims.bool) =
  fun projectee ->
    match projectee with
    | Fatal_FailToCompileNativeTactic -> true
    | uu___ -> false
let (uu___is_Fatal_FailToExtractNativeTactic : raw_error -> Prims.bool) =
  fun projectee ->
    match projectee with
    | Fatal_FailToExtractNativeTactic -> true
    | uu___ -> false
let (uu___is_Fatal_FailToProcessPragma : raw_error -> Prims.bool) =
  fun projectee ->
    match projectee with | Fatal_FailToProcessPragma -> true | uu___ -> false
let (uu___is_Fatal_FailToResolveImplicitArgument : raw_error -> Prims.bool) =
  fun projectee ->
    match projectee with
    | Fatal_FailToResolveImplicitArgument -> true
    | uu___ -> false
let (uu___is_Fatal_FailToSolveUniverseInEquality : raw_error -> Prims.bool) =
  fun projectee ->
    match projectee with
    | Fatal_FailToSolveUniverseInEquality -> true
    | uu___ -> false
let (uu___is_Fatal_FieldsNotBelongToSameRecordType : raw_error -> Prims.bool)
  =
  fun projectee ->
    match projectee with
    | Fatal_FieldsNotBelongToSameRecordType -> true
    | uu___ -> false
let (uu___is_Fatal_ForbiddenReferenceToCurrentModule :
  raw_error -> Prims.bool) =
  fun projectee ->
    match projectee with
    | Fatal_ForbiddenReferenceToCurrentModule -> true
    | uu___ -> false
let (uu___is_Fatal_FreeVariables : raw_error -> Prims.bool) =
  fun projectee ->
    match projectee with | Fatal_FreeVariables -> true | uu___ -> false
let (uu___is_Fatal_FunctionTypeExpected : raw_error -> Prims.bool) =
  fun projectee ->
    match projectee with
    | Fatal_FunctionTypeExpected -> true
    | uu___ -> false
let (uu___is_Fatal_IdentifierNotFound : raw_error -> Prims.bool) =
  fun projectee ->
    match projectee with | Fatal_IdentifierNotFound -> true | uu___ -> false
let (uu___is_Fatal_IllAppliedConstant : raw_error -> Prims.bool) =
  fun projectee ->
    match projectee with | Fatal_IllAppliedConstant -> true | uu___ -> false
let (uu___is_Fatal_IllegalCharInByteArray : raw_error -> Prims.bool) =
  fun projectee ->
    match projectee with
    | Fatal_IllegalCharInByteArray -> true
    | uu___ -> false
let (uu___is_Fatal_IllegalCharInOperatorName : raw_error -> Prims.bool) =
  fun projectee ->
    match projectee with
    | Fatal_IllegalCharInOperatorName -> true
    | uu___ -> false
let (uu___is_Fatal_IllTyped : raw_error -> Prims.bool) =
  fun projectee ->
    match projectee with | Fatal_IllTyped -> true | uu___ -> false
let (uu___is_Fatal_ImpossibleAbbrevLidBundle : raw_error -> Prims.bool) =
  fun projectee ->
    match projectee with
    | Fatal_ImpossibleAbbrevLidBundle -> true
    | uu___ -> false
let (uu___is_Fatal_ImpossibleAbbrevRenameBundle : raw_error -> Prims.bool) =
  fun projectee ->
    match projectee with
    | Fatal_ImpossibleAbbrevRenameBundle -> true
    | uu___ -> false
let (uu___is_Fatal_ImpossibleInductiveWithAbbrev : raw_error -> Prims.bool) =
  fun projectee ->
    match projectee with
    | Fatal_ImpossibleInductiveWithAbbrev -> true
    | uu___ -> false
let (uu___is_Fatal_ImpossiblePrePostAbs : raw_error -> Prims.bool) =
  fun projectee ->
    match projectee with
    | Fatal_ImpossiblePrePostAbs -> true
    | uu___ -> false
let (uu___is_Fatal_ImpossiblePrePostArrow : raw_error -> Prims.bool) =
  fun projectee ->
    match projectee with
    | Fatal_ImpossiblePrePostArrow -> true
    | uu___ -> false
let (uu___is_Fatal_ImpossibleToGenerateDMEffect : raw_error -> Prims.bool) =
  fun projectee ->
    match projectee with
    | Fatal_ImpossibleToGenerateDMEffect -> true
    | uu___ -> false
let (uu___is_Fatal_ImpossibleTypeAbbrevBundle : raw_error -> Prims.bool) =
  fun projectee ->
    match projectee with
    | Fatal_ImpossibleTypeAbbrevBundle -> true
    | uu___ -> false
let (uu___is_Fatal_ImpossibleTypeAbbrevSigeltBundle :
  raw_error -> Prims.bool) =
  fun projectee ->
    match projectee with
    | Fatal_ImpossibleTypeAbbrevSigeltBundle -> true
    | uu___ -> false
let (uu___is_Fatal_IncludeModuleNotPrepared : raw_error -> Prims.bool) =
  fun projectee ->
    match projectee with
    | Fatal_IncludeModuleNotPrepared -> true
    | uu___ -> false
let (uu___is_Fatal_IncoherentInlineUniverse : raw_error -> Prims.bool) =
  fun projectee ->
    match projectee with
    | Fatal_IncoherentInlineUniverse -> true
    | uu___ -> false
let (uu___is_Fatal_IncompatibleKinds : raw_error -> Prims.bool) =
  fun projectee ->
    match projectee with | Fatal_IncompatibleKinds -> true | uu___ -> false
let (uu___is_Fatal_IncompatibleNumberOfTypes : raw_error -> Prims.bool) =
  fun projectee ->
    match projectee with
    | Fatal_IncompatibleNumberOfTypes -> true
    | uu___ -> false
let (uu___is_Fatal_IncompatibleSetOfUniverse : raw_error -> Prims.bool) =
  fun projectee ->
    match projectee with
    | Fatal_IncompatibleSetOfUniverse -> true
    | uu___ -> false
let (uu___is_Fatal_IncompatibleUniverse : raw_error -> Prims.bool) =
  fun projectee ->
    match projectee with
    | Fatal_IncompatibleUniverse -> true
    | uu___ -> false
let (uu___is_Fatal_InconsistentImplicitArgumentAnnotation :
  raw_error -> Prims.bool) =
  fun projectee ->
    match projectee with
    | Fatal_InconsistentImplicitArgumentAnnotation -> true
    | uu___ -> false
let (uu___is_Fatal_InconsistentImplicitQualifier : raw_error -> Prims.bool) =
  fun projectee ->
    match projectee with
    | Fatal_InconsistentImplicitQualifier -> true
    | uu___ -> false
let (uu___is_Fatal_InconsistentQualifierAnnotation : raw_error -> Prims.bool)
  =
  fun projectee ->
    match projectee with
    | Fatal_InconsistentQualifierAnnotation -> true
    | uu___ -> false
let (uu___is_Fatal_InferredTypeCauseVarEscape : raw_error -> Prims.bool) =
  fun projectee ->
    match projectee with
    | Fatal_InferredTypeCauseVarEscape -> true
    | uu___ -> false
let (uu___is_Fatal_InlineRenamedAsUnfold : raw_error -> Prims.bool) =
  fun projectee ->
    match projectee with
    | Fatal_InlineRenamedAsUnfold -> true
    | uu___ -> false
let (uu___is_Fatal_InsufficientPatternArguments : raw_error -> Prims.bool) =
  fun projectee ->
    match projectee with
    | Fatal_InsufficientPatternArguments -> true
    | uu___ -> false
let (uu___is_Fatal_InterfaceAlreadyProcessed : raw_error -> Prims.bool) =
  fun projectee ->
    match projectee with
    | Fatal_InterfaceAlreadyProcessed -> true
    | uu___ -> false
let (uu___is_Fatal_InterfaceNotImplementedByModule : raw_error -> Prims.bool)
  =
  fun projectee ->
    match projectee with
    | Fatal_InterfaceNotImplementedByModule -> true
    | uu___ -> false
let (uu___is_Fatal_InterfaceWithTypeImplementation : raw_error -> Prims.bool)
  =
  fun projectee ->
    match projectee with
    | Fatal_InterfaceWithTypeImplementation -> true
    | uu___ -> false
let (uu___is_Fatal_InvalidFloatingPointNumber : raw_error -> Prims.bool) =
  fun projectee ->
    match projectee with
    | Fatal_InvalidFloatingPointNumber -> true
    | uu___ -> false
let (uu___is_Fatal_InvalidFSDocKeyword : raw_error -> Prims.bool) =
  fun projectee ->
    match projectee with | Fatal_InvalidFSDocKeyword -> true | uu___ -> false
let (uu___is_Fatal_InvalidIdentifier : raw_error -> Prims.bool) =
  fun projectee ->
    match projectee with | Fatal_InvalidIdentifier -> true | uu___ -> false
let (uu___is_Fatal_InvalidLemmaArgument : raw_error -> Prims.bool) =
  fun projectee ->
    match projectee with
    | Fatal_InvalidLemmaArgument -> true
    | uu___ -> false
let (uu___is_Fatal_InvalidNumericLiteral : raw_error -> Prims.bool) =
  fun projectee ->
    match projectee with
    | Fatal_InvalidNumericLiteral -> true
    | uu___ -> false
let (uu___is_Fatal_InvalidRedefinitionOfLexT : raw_error -> Prims.bool) =
  fun projectee ->
    match projectee with
    | Fatal_InvalidRedefinitionOfLexT -> true
    | uu___ -> false
let (uu___is_Fatal_InvalidUnicodeInStringLiteral : raw_error -> Prims.bool) =
  fun projectee ->
    match projectee with
    | Fatal_InvalidUnicodeInStringLiteral -> true
    | uu___ -> false
let (uu___is_Fatal_InvalidUTF8Encoding : raw_error -> Prims.bool) =
  fun projectee ->
    match projectee with | Fatal_InvalidUTF8Encoding -> true | uu___ -> false
let (uu___is_Fatal_InvalidWarnErrorSetting : raw_error -> Prims.bool) =
  fun projectee ->
    match projectee with
    | Fatal_InvalidWarnErrorSetting -> true
    | uu___ -> false
let (uu___is_Fatal_LetBoundMonadicMismatch : raw_error -> Prims.bool) =
  fun projectee ->
    match projectee with
    | Fatal_LetBoundMonadicMismatch -> true
    | uu___ -> false
let (uu___is_Fatal_LetMutableForVariablesOnly : raw_error -> Prims.bool) =
  fun projectee ->
    match projectee with
    | Fatal_LetMutableForVariablesOnly -> true
    | uu___ -> false
let (uu___is_Fatal_LetOpenModuleOnly : raw_error -> Prims.bool) =
  fun projectee ->
    match projectee with | Fatal_LetOpenModuleOnly -> true | uu___ -> false
let (uu___is_Fatal_LetRecArgumentMismatch : raw_error -> Prims.bool) =
  fun projectee ->
    match projectee with
    | Fatal_LetRecArgumentMismatch -> true
    | uu___ -> false
let (uu___is_Fatal_MalformedActionDeclaration : raw_error -> Prims.bool) =
  fun projectee ->
    match projectee with
    | Fatal_MalformedActionDeclaration -> true
    | uu___ -> false
let (uu___is_Fatal_MismatchedPatternType : raw_error -> Prims.bool) =
  fun projectee ->
    match projectee with
    | Fatal_MismatchedPatternType -> true
    | uu___ -> false
let (uu___is_Fatal_MismatchUniversePolymorphic : raw_error -> Prims.bool) =
  fun projectee ->
    match projectee with
    | Fatal_MismatchUniversePolymorphic -> true
    | uu___ -> false
let (uu___is_Fatal_MissingDataConstructor : raw_error -> Prims.bool) =
  fun projectee ->
    match projectee with
    | Fatal_MissingDataConstructor -> true
    | uu___ -> false
let (uu___is_Fatal_MissingExposeInterfacesOption : raw_error -> Prims.bool) =
  fun projectee ->
    match projectee with
    | Fatal_MissingExposeInterfacesOption -> true
    | uu___ -> false
let (uu___is_Fatal_MissingFieldInRecord : raw_error -> Prims.bool) =
  fun projectee ->
    match projectee with
    | Fatal_MissingFieldInRecord -> true
    | uu___ -> false
let (uu___is_Fatal_MissingImplementation : raw_error -> Prims.bool) =
  fun projectee ->
    match projectee with
    | Fatal_MissingImplementation -> true
    | uu___ -> false
let (uu___is_Fatal_MissingImplicitArguments : raw_error -> Prims.bool) =
  fun projectee ->
    match projectee with
    | Fatal_MissingImplicitArguments -> true
    | uu___ -> false
let (uu___is_Fatal_MissingInterface : raw_error -> Prims.bool) =
  fun projectee ->
    match projectee with | Fatal_MissingInterface -> true | uu___ -> false
let (uu___is_Fatal_MissingNameInBinder : raw_error -> Prims.bool) =
  fun projectee ->
    match projectee with | Fatal_MissingNameInBinder -> true | uu___ -> false
let (uu___is_Fatal_MissingPrimsModule : raw_error -> Prims.bool) =
  fun projectee ->
    match projectee with | Fatal_MissingPrimsModule -> true | uu___ -> false
let (uu___is_Fatal_MissingQuantifierBinder : raw_error -> Prims.bool) =
  fun projectee ->
    match projectee with
    | Fatal_MissingQuantifierBinder -> true
    | uu___ -> false
let (uu___is_Fatal_ModuleExpected : raw_error -> Prims.bool) =
  fun projectee ->
    match projectee with | Fatal_ModuleExpected -> true | uu___ -> false
let (uu___is_Fatal_ModuleFileNotFound : raw_error -> Prims.bool) =
  fun projectee ->
    match projectee with | Fatal_ModuleFileNotFound -> true | uu___ -> false
let (uu___is_Fatal_ModuleFirstStatement : raw_error -> Prims.bool) =
  fun projectee ->
    match projectee with
    | Fatal_ModuleFirstStatement -> true
    | uu___ -> false
let (uu___is_Fatal_ModuleNotFound : raw_error -> Prims.bool) =
  fun projectee ->
    match projectee with | Fatal_ModuleNotFound -> true | uu___ -> false
let (uu___is_Fatal_ModuleOrFileNotFound : raw_error -> Prims.bool) =
  fun projectee ->
    match projectee with
    | Fatal_ModuleOrFileNotFound -> true
    | uu___ -> false
let (uu___is_Fatal_MonadAlreadyDefined : raw_error -> Prims.bool) =
  fun projectee ->
    match projectee with | Fatal_MonadAlreadyDefined -> true | uu___ -> false
let (uu___is_Fatal_MoreThanOneDeclaration : raw_error -> Prims.bool) =
  fun projectee ->
    match projectee with
    | Fatal_MoreThanOneDeclaration -> true
    | uu___ -> false
let (uu___is_Fatal_MultipleLetBinding : raw_error -> Prims.bool) =
  fun projectee ->
    match projectee with | Fatal_MultipleLetBinding -> true | uu___ -> false
let (uu___is_Fatal_NameNotFound : raw_error -> Prims.bool) =
  fun projectee ->
    match projectee with | Fatal_NameNotFound -> true | uu___ -> false
let (uu___is_Fatal_NameSpaceNotFound : raw_error -> Prims.bool) =
  fun projectee ->
    match projectee with | Fatal_NameSpaceNotFound -> true | uu___ -> false
let (uu___is_Fatal_NegativeUniverseConstFatal_NotSupported :
  raw_error -> Prims.bool) =
  fun projectee ->
    match projectee with
    | Fatal_NegativeUniverseConstFatal_NotSupported -> true
    | uu___ -> false
let (uu___is_Fatal_NoFileProvided : raw_error -> Prims.bool) =
  fun projectee ->
    match projectee with | Fatal_NoFileProvided -> true | uu___ -> false
let (uu___is_Fatal_NonInductiveInMutuallyDefinedType :
  raw_error -> Prims.bool) =
  fun projectee ->
    match projectee with
    | Fatal_NonInductiveInMutuallyDefinedType -> true
    | uu___ -> false
let (uu___is_Fatal_NonLinearPatternNotPermitted : raw_error -> Prims.bool) =
  fun projectee ->
    match projectee with
    | Fatal_NonLinearPatternNotPermitted -> true
    | uu___ -> false
let (uu___is_Fatal_NonLinearPatternVars : raw_error -> Prims.bool) =
  fun projectee ->
    match projectee with
    | Fatal_NonLinearPatternVars -> true
    | uu___ -> false
let (uu___is_Fatal_NonSingletonTopLevel : raw_error -> Prims.bool) =
  fun projectee ->
    match projectee with
    | Fatal_NonSingletonTopLevel -> true
    | uu___ -> false
let (uu___is_Fatal_NonSingletonTopLevelModule : raw_error -> Prims.bool) =
  fun projectee ->
    match projectee with
    | Fatal_NonSingletonTopLevelModule -> true
    | uu___ -> false
let (uu___is_Error_NonTopRecFunctionNotFullyEncoded :
  raw_error -> Prims.bool) =
  fun projectee ->
    match projectee with
    | Error_NonTopRecFunctionNotFullyEncoded -> true
    | uu___ -> false
let (uu___is_Fatal_NonTrivialPreConditionInPrims : raw_error -> Prims.bool) =
  fun projectee ->
    match projectee with
    | Fatal_NonTrivialPreConditionInPrims -> true
    | uu___ -> false
let (uu___is_Fatal_NonVariableInductiveTypeParameter :
  raw_error -> Prims.bool) =
  fun projectee ->
    match projectee with
    | Fatal_NonVariableInductiveTypeParameter -> true
    | uu___ -> false
let (uu___is_Fatal_NotApplicationOrFv : raw_error -> Prims.bool) =
  fun projectee ->
    match projectee with | Fatal_NotApplicationOrFv -> true | uu___ -> false
let (uu___is_Fatal_NotEnoughArgsToEffect : raw_error -> Prims.bool) =
  fun projectee ->
    match projectee with
    | Fatal_NotEnoughArgsToEffect -> true
    | uu___ -> false
let (uu___is_Fatal_NotEnoughArgumentsForEffect : raw_error -> Prims.bool) =
  fun projectee ->
    match projectee with
    | Fatal_NotEnoughArgumentsForEffect -> true
    | uu___ -> false
let (uu___is_Fatal_NotFunctionType : raw_error -> Prims.bool) =
  fun projectee ->
    match projectee with | Fatal_NotFunctionType -> true | uu___ -> false
let (uu___is_Fatal_NotSupported : raw_error -> Prims.bool) =
  fun projectee ->
    match projectee with | Fatal_NotSupported -> true | uu___ -> false
let (uu___is_Fatal_NotTopLevelModule : raw_error -> Prims.bool) =
  fun projectee ->
    match projectee with | Fatal_NotTopLevelModule -> true | uu___ -> false
let (uu___is_Fatal_NotValidFStarFile : raw_error -> Prims.bool) =
  fun projectee ->
    match projectee with | Fatal_NotValidFStarFile -> true | uu___ -> false
let (uu___is_Fatal_NotValidIncludeDirectory : raw_error -> Prims.bool) =
  fun projectee ->
    match projectee with
    | Fatal_NotValidIncludeDirectory -> true
    | uu___ -> false
let (uu___is_Fatal_OneModulePerFile : raw_error -> Prims.bool) =
  fun projectee ->
    match projectee with | Fatal_OneModulePerFile -> true | uu___ -> false
let (uu___is_Fatal_OpenGoalsInSynthesis : raw_error -> Prims.bool) =
  fun projectee ->
    match projectee with
    | Fatal_OpenGoalsInSynthesis -> true
    | uu___ -> false
let (uu___is_Fatal_OptionsNotCompatible : raw_error -> Prims.bool) =
  fun projectee ->
    match projectee with
    | Fatal_OptionsNotCompatible -> true
    | uu___ -> false
let (uu___is_Fatal_OutOfOrder : raw_error -> Prims.bool) =
  fun projectee ->
    match projectee with | Fatal_OutOfOrder -> true | uu___ -> false
let (uu___is_Fatal_ParseErrors : raw_error -> Prims.bool) =
  fun projectee ->
    match projectee with | Fatal_ParseErrors -> true | uu___ -> false
let (uu___is_Fatal_ParseItError : raw_error -> Prims.bool) =
  fun projectee ->
    match projectee with | Fatal_ParseItError -> true | uu___ -> false
let (uu___is_Fatal_PolyTypeExpected : raw_error -> Prims.bool) =
  fun projectee ->
    match projectee with | Fatal_PolyTypeExpected -> true | uu___ -> false
let (uu___is_Fatal_PossibleInfiniteTyp : raw_error -> Prims.bool) =
  fun projectee ->
    match projectee with | Fatal_PossibleInfiniteTyp -> true | uu___ -> false
let (uu___is_Fatal_PreModuleMismatch : raw_error -> Prims.bool) =
  fun projectee ->
    match projectee with | Fatal_PreModuleMismatch -> true | uu___ -> false
let (uu___is_Fatal_QulifierListNotPermitted : raw_error -> Prims.bool) =
  fun projectee ->
    match projectee with
    | Fatal_QulifierListNotPermitted -> true
    | uu___ -> false
let (uu___is_Fatal_RecursiveFunctionLiteral : raw_error -> Prims.bool) =
  fun projectee ->
    match projectee with
    | Fatal_RecursiveFunctionLiteral -> true
    | uu___ -> false
let (uu___is_Fatal_ReflectOnlySupportedOnEffects : raw_error -> Prims.bool) =
  fun projectee ->
    match projectee with
    | Fatal_ReflectOnlySupportedOnEffects -> true
    | uu___ -> false
let (uu___is_Fatal_ReservedPrefix : raw_error -> Prims.bool) =
  fun projectee ->
    match projectee with | Fatal_ReservedPrefix -> true | uu___ -> false
let (uu___is_Fatal_SMTOutputParseError : raw_error -> Prims.bool) =
  fun projectee ->
    match projectee with | Fatal_SMTOutputParseError -> true | uu___ -> false
let (uu___is_Fatal_SMTSolverError : raw_error -> Prims.bool) =
  fun projectee ->
    match projectee with | Fatal_SMTSolverError -> true | uu___ -> false
let (uu___is_Fatal_SyntaxError : raw_error -> Prims.bool) =
  fun projectee ->
    match projectee with | Fatal_SyntaxError -> true | uu___ -> false
let (uu___is_Fatal_SynthByTacticError : raw_error -> Prims.bool) =
  fun projectee ->
    match projectee with | Fatal_SynthByTacticError -> true | uu___ -> false
let (uu___is_Fatal_TacticGotStuck : raw_error -> Prims.bool) =
  fun projectee ->
    match projectee with | Fatal_TacticGotStuck -> true | uu___ -> false
let (uu___is_Fatal_TcOneFragmentFailed : raw_error -> Prims.bool) =
  fun projectee ->
    match projectee with | Fatal_TcOneFragmentFailed -> true | uu___ -> false
let (uu___is_Fatal_TermOutsideOfDefLanguage : raw_error -> Prims.bool) =
  fun projectee ->
    match projectee with
    | Fatal_TermOutsideOfDefLanguage -> true
    | uu___ -> false
let (uu___is_Fatal_ToManyArgumentToFunction : raw_error -> Prims.bool) =
  fun projectee ->
    match projectee with
    | Fatal_ToManyArgumentToFunction -> true
    | uu___ -> false
let (uu___is_Fatal_TooManyOrTooFewFileMatch : raw_error -> Prims.bool) =
  fun projectee ->
    match projectee with
    | Fatal_TooManyOrTooFewFileMatch -> true
    | uu___ -> false
let (uu___is_Fatal_TooManyPatternArguments : raw_error -> Prims.bool) =
  fun projectee ->
    match projectee with
    | Fatal_TooManyPatternArguments -> true
    | uu___ -> false
let (uu___is_Fatal_TooManyUniverse : raw_error -> Prims.bool) =
  fun projectee ->
    match projectee with | Fatal_TooManyUniverse -> true | uu___ -> false
let (uu___is_Fatal_TypeMismatch : raw_error -> Prims.bool) =
  fun projectee ->
    match projectee with | Fatal_TypeMismatch -> true | uu___ -> false
let (uu___is_Fatal_TypeWithinPatternsAllowedOnVariablesOnly :
  raw_error -> Prims.bool) =
  fun projectee ->
    match projectee with
    | Fatal_TypeWithinPatternsAllowedOnVariablesOnly -> true
    | uu___ -> false
let (uu___is_Fatal_UnableToReadFile : raw_error -> Prims.bool) =
  fun projectee ->
    match projectee with | Fatal_UnableToReadFile -> true | uu___ -> false
let (uu___is_Fatal_UnepxectedOrUnboundOperator : raw_error -> Prims.bool) =
  fun projectee ->
    match projectee with
    | Fatal_UnepxectedOrUnboundOperator -> true
    | uu___ -> false
let (uu___is_Fatal_UnexpectedBinder : raw_error -> Prims.bool) =
  fun projectee ->
    match projectee with | Fatal_UnexpectedBinder -> true | uu___ -> false
let (uu___is_Fatal_UnexpectedBindShape : raw_error -> Prims.bool) =
  fun projectee ->
    match projectee with | Fatal_UnexpectedBindShape -> true | uu___ -> false
let (uu___is_Fatal_UnexpectedChar : raw_error -> Prims.bool) =
  fun projectee ->
    match projectee with | Fatal_UnexpectedChar -> true | uu___ -> false
let (uu___is_Fatal_UnexpectedComputationTypeForLetRec :
  raw_error -> Prims.bool) =
  fun projectee ->
    match projectee with
    | Fatal_UnexpectedComputationTypeForLetRec -> true
    | uu___ -> false
let (uu___is_Fatal_UnexpectedConstructorType : raw_error -> Prims.bool) =
  fun projectee ->
    match projectee with
    | Fatal_UnexpectedConstructorType -> true
    | uu___ -> false
let (uu___is_Fatal_UnexpectedDataConstructor : raw_error -> Prims.bool) =
  fun projectee ->
    match projectee with
    | Fatal_UnexpectedDataConstructor -> true
    | uu___ -> false
let (uu___is_Fatal_UnexpectedEffect : raw_error -> Prims.bool) =
  fun projectee ->
    match projectee with | Fatal_UnexpectedEffect -> true | uu___ -> false
let (uu___is_Fatal_UnexpectedEmptyRecord : raw_error -> Prims.bool) =
  fun projectee ->
    match projectee with
    | Fatal_UnexpectedEmptyRecord -> true
    | uu___ -> false
let (uu___is_Fatal_UnexpectedExpressionType : raw_error -> Prims.bool) =
  fun projectee ->
    match projectee with
    | Fatal_UnexpectedExpressionType -> true
    | uu___ -> false
let (uu___is_Fatal_UnexpectedFunctionParameterType : raw_error -> Prims.bool)
  =
  fun projectee ->
    match projectee with
    | Fatal_UnexpectedFunctionParameterType -> true
    | uu___ -> false
let (uu___is_Fatal_UnexpectedGeneralizedUniverse : raw_error -> Prims.bool) =
  fun projectee ->
    match projectee with
    | Fatal_UnexpectedGeneralizedUniverse -> true
    | uu___ -> false
let (uu___is_Fatal_UnexpectedGTotForLetRec : raw_error -> Prims.bool) =
  fun projectee ->
    match projectee with
    | Fatal_UnexpectedGTotForLetRec -> true
    | uu___ -> false
let (uu___is_Fatal_UnexpectedGuard : raw_error -> Prims.bool) =
  fun projectee ->
    match projectee with | Fatal_UnexpectedGuard -> true | uu___ -> false
let (uu___is_Fatal_UnexpectedIdentifier : raw_error -> Prims.bool) =
  fun projectee ->
    match projectee with
    | Fatal_UnexpectedIdentifier -> true
    | uu___ -> false
let (uu___is_Fatal_UnexpectedImplicitArgument : raw_error -> Prims.bool) =
  fun projectee ->
    match projectee with
    | Fatal_UnexpectedImplicitArgument -> true
    | uu___ -> false
let (uu___is_Fatal_UnexpectedImplictArgument : raw_error -> Prims.bool) =
  fun projectee ->
    match projectee with
    | Fatal_UnexpectedImplictArgument -> true
    | uu___ -> false
let (uu___is_Fatal_UnexpectedInductivetype : raw_error -> Prims.bool) =
  fun projectee ->
    match projectee with
    | Fatal_UnexpectedInductivetype -> true
    | uu___ -> false
let (uu___is_Fatal_UnexpectedLetBinding : raw_error -> Prims.bool) =
  fun projectee ->
    match projectee with
    | Fatal_UnexpectedLetBinding -> true
    | uu___ -> false
let (uu___is_Fatal_UnexpectedModuleDeclaration : raw_error -> Prims.bool) =
  fun projectee ->
    match projectee with
    | Fatal_UnexpectedModuleDeclaration -> true
    | uu___ -> false
let (uu___is_Fatal_UnexpectedNumberOfUniverse : raw_error -> Prims.bool) =
  fun projectee ->
    match projectee with
    | Fatal_UnexpectedNumberOfUniverse -> true
    | uu___ -> false
let (uu___is_Fatal_UnexpectedNumericLiteral : raw_error -> Prims.bool) =
  fun projectee ->
    match projectee with
    | Fatal_UnexpectedNumericLiteral -> true
    | uu___ -> false
let (uu___is_Fatal_UnexpectedPattern : raw_error -> Prims.bool) =
  fun projectee ->
    match projectee with | Fatal_UnexpectedPattern -> true | uu___ -> false
let (uu___is_Fatal_UnexpectedPosition : raw_error -> Prims.bool) =
  fun projectee ->
    match projectee with | Fatal_UnexpectedPosition -> true | uu___ -> false
let (uu___is_Fatal_UnExpectedPreCondition : raw_error -> Prims.bool) =
  fun projectee ->
    match projectee with
    | Fatal_UnExpectedPreCondition -> true
    | uu___ -> false
let (uu___is_Fatal_UnexpectedReturnShape : raw_error -> Prims.bool) =
  fun projectee ->
    match projectee with
    | Fatal_UnexpectedReturnShape -> true
    | uu___ -> false
let (uu___is_Fatal_UnexpectedSignatureForMonad : raw_error -> Prims.bool) =
  fun projectee ->
    match projectee with
    | Fatal_UnexpectedSignatureForMonad -> true
    | uu___ -> false
let (uu___is_Fatal_UnexpectedTerm : raw_error -> Prims.bool) =
  fun projectee ->
    match projectee with | Fatal_UnexpectedTerm -> true | uu___ -> false
let (uu___is_Fatal_UnexpectedTermInUniverse : raw_error -> Prims.bool) =
  fun projectee ->
    match projectee with
    | Fatal_UnexpectedTermInUniverse -> true
    | uu___ -> false
let (uu___is_Fatal_UnexpectedTermType : raw_error -> Prims.bool) =
  fun projectee ->
    match projectee with | Fatal_UnexpectedTermType -> true | uu___ -> false
let (uu___is_Fatal_UnexpectedTermVQuote : raw_error -> Prims.bool) =
  fun projectee ->
    match projectee with
    | Fatal_UnexpectedTermVQuote -> true
    | uu___ -> false
let (uu___is_Fatal_UnexpectedUniversePolymorphicReturn :
  raw_error -> Prims.bool) =
  fun projectee ->
    match projectee with
    | Fatal_UnexpectedUniversePolymorphicReturn -> true
    | uu___ -> false
let (uu___is_Fatal_UnexpectedUniverseVariable : raw_error -> Prims.bool) =
  fun projectee ->
    match projectee with
    | Fatal_UnexpectedUniverseVariable -> true
    | uu___ -> false
let (uu___is_Fatal_UnfoldableDeprecated : raw_error -> Prims.bool) =
  fun projectee ->
    match projectee with
    | Fatal_UnfoldableDeprecated -> true
    | uu___ -> false
let (uu___is_Fatal_UnificationNotWellFormed : raw_error -> Prims.bool) =
  fun projectee ->
    match projectee with
    | Fatal_UnificationNotWellFormed -> true
    | uu___ -> false
let (uu___is_Fatal_Uninstantiated : raw_error -> Prims.bool) =
  fun projectee ->
    match projectee with | Fatal_Uninstantiated -> true | uu___ -> false
let (uu___is_Error_UninstantiatedUnificationVarInTactic :
  raw_error -> Prims.bool) =
  fun projectee ->
    match projectee with
    | Error_UninstantiatedUnificationVarInTactic -> true
    | uu___ -> false
let (uu___is_Fatal_UninstantiatedVarInTactic : raw_error -> Prims.bool) =
  fun projectee ->
    match projectee with
    | Fatal_UninstantiatedVarInTactic -> true
    | uu___ -> false
let (uu___is_Fatal_UniverseMightContainSumOfTwoUnivVars :
  raw_error -> Prims.bool) =
  fun projectee ->
    match projectee with
    | Fatal_UniverseMightContainSumOfTwoUnivVars -> true
    | uu___ -> false
let (uu___is_Fatal_UniversePolymorphicInnerLetBound :
  raw_error -> Prims.bool) =
  fun projectee ->
    match projectee with
    | Fatal_UniversePolymorphicInnerLetBound -> true
    | uu___ -> false
let (uu___is_Fatal_UnknownAttribute : raw_error -> Prims.bool) =
  fun projectee ->
    match projectee with | Fatal_UnknownAttribute -> true | uu___ -> false
let (uu___is_Fatal_UnknownToolForDep : raw_error -> Prims.bool) =
  fun projectee ->
    match projectee with | Fatal_UnknownToolForDep -> true | uu___ -> false
let (uu___is_Fatal_UnrecognizedExtension : raw_error -> Prims.bool) =
  fun projectee ->
    match projectee with
    | Fatal_UnrecognizedExtension -> true
    | uu___ -> false
let (uu___is_Fatal_UnresolvedPatternVar : raw_error -> Prims.bool) =
  fun projectee ->
    match projectee with
    | Fatal_UnresolvedPatternVar -> true
    | uu___ -> false
let (uu___is_Fatal_UnsupportedConstant : raw_error -> Prims.bool) =
  fun projectee ->
    match projectee with | Fatal_UnsupportedConstant -> true | uu___ -> false
let (uu___is_Fatal_UnsupportedDisjuctivePatterns : raw_error -> Prims.bool) =
  fun projectee ->
    match projectee with
    | Fatal_UnsupportedDisjuctivePatterns -> true
    | uu___ -> false
let (uu___is_Fatal_UnsupportedQualifier : raw_error -> Prims.bool) =
  fun projectee ->
    match projectee with
    | Fatal_UnsupportedQualifier -> true
    | uu___ -> false
let (uu___is_Fatal_UserTacticFailure : raw_error -> Prims.bool) =
  fun projectee ->
    match projectee with | Fatal_UserTacticFailure -> true | uu___ -> false
let (uu___is_Fatal_ValueRestriction : raw_error -> Prims.bool) =
  fun projectee ->
    match projectee with | Fatal_ValueRestriction -> true | uu___ -> false
let (uu___is_Fatal_VariableNotFound : raw_error -> Prims.bool) =
  fun projectee ->
    match projectee with | Fatal_VariableNotFound -> true | uu___ -> false
let (uu___is_Fatal_WrongBodyTypeForReturnWP : raw_error -> Prims.bool) =
  fun projectee ->
    match projectee with
    | Fatal_WrongBodyTypeForReturnWP -> true
    | uu___ -> false
let (uu___is_Fatal_WrongDataAppHeadFormat : raw_error -> Prims.bool) =
  fun projectee ->
    match projectee with
    | Fatal_WrongDataAppHeadFormat -> true
    | uu___ -> false
let (uu___is_Fatal_WrongDefinitionOrder : raw_error -> Prims.bool) =
  fun projectee ->
    match projectee with
    | Fatal_WrongDefinitionOrder -> true
    | uu___ -> false
let (uu___is_Fatal_WrongResultTypeAfterConstrutor : raw_error -> Prims.bool)
  =
  fun projectee ->
    match projectee with
    | Fatal_WrongResultTypeAfterConstrutor -> true
    | uu___ -> false
let (uu___is_Fatal_WrongTerm : raw_error -> Prims.bool) =
  fun projectee ->
    match projectee with | Fatal_WrongTerm -> true | uu___ -> false
let (uu___is_Fatal_WhenClauseNotSupported : raw_error -> Prims.bool) =
  fun projectee ->
    match projectee with
    | Fatal_WhenClauseNotSupported -> true
    | uu___ -> false
let (uu___is_Unused01 : raw_error -> Prims.bool) =
  fun projectee -> match projectee with | Unused01 -> true | uu___ -> false
let (uu___is_Warning_AddImplicitAssumeNewQualifier : raw_error -> Prims.bool)
  =
  fun projectee ->
    match projectee with
    | Warning_AddImplicitAssumeNewQualifier -> true
    | uu___ -> false
let (uu___is_Warning_AdmitWithoutDefinition : raw_error -> Prims.bool) =
  fun projectee ->
    match projectee with
    | Warning_AdmitWithoutDefinition -> true
    | uu___ -> false
let (uu___is_Warning_CachedFile : raw_error -> Prims.bool) =
  fun projectee ->
    match projectee with | Warning_CachedFile -> true | uu___ -> false
let (uu___is_Warning_DefinitionNotTranslated : raw_error -> Prims.bool) =
  fun projectee ->
    match projectee with
    | Warning_DefinitionNotTranslated -> true
    | uu___ -> false
let (uu___is_Warning_DependencyFound : raw_error -> Prims.bool) =
  fun projectee ->
    match projectee with | Warning_DependencyFound -> true | uu___ -> false
let (uu___is_Warning_DeprecatedEqualityOnBinder : raw_error -> Prims.bool) =
  fun projectee ->
    match projectee with
    | Warning_DeprecatedEqualityOnBinder -> true
    | uu___ -> false
let (uu___is_Warning_DeprecatedOpaqueQualifier : raw_error -> Prims.bool) =
  fun projectee ->
    match projectee with
    | Warning_DeprecatedOpaqueQualifier -> true
    | uu___ -> false
let (uu___is_Warning_DocOverwrite : raw_error -> Prims.bool) =
  fun projectee ->
    match projectee with | Warning_DocOverwrite -> true | uu___ -> false
let (uu___is_Warning_FileNotWritten : raw_error -> Prims.bool) =
  fun projectee ->
    match projectee with | Warning_FileNotWritten -> true | uu___ -> false
let (uu___is_Warning_Filtered : raw_error -> Prims.bool) =
  fun projectee ->
    match projectee with | Warning_Filtered -> true | uu___ -> false
let (uu___is_Warning_FunctionLiteralPrecisionLoss : raw_error -> Prims.bool)
  =
  fun projectee ->
    match projectee with
    | Warning_FunctionLiteralPrecisionLoss -> true
    | uu___ -> false
let (uu___is_Warning_FunctionNotExtacted : raw_error -> Prims.bool) =
  fun projectee ->
    match projectee with
    | Warning_FunctionNotExtacted -> true
    | uu___ -> false
let (uu___is_Warning_HintFailedToReplayProof : raw_error -> Prims.bool) =
  fun projectee ->
    match projectee with
    | Warning_HintFailedToReplayProof -> true
    | uu___ -> false
let (uu___is_Warning_HitReplayFailed : raw_error -> Prims.bool) =
  fun projectee ->
    match projectee with | Warning_HitReplayFailed -> true | uu___ -> false
let (uu___is_Warning_IDEIgnoreCodeGen : raw_error -> Prims.bool) =
  fun projectee ->
    match projectee with | Warning_IDEIgnoreCodeGen -> true | uu___ -> false
let (uu___is_Warning_IllFormedGoal : raw_error -> Prims.bool) =
  fun projectee ->
    match projectee with | Warning_IllFormedGoal -> true | uu___ -> false
let (uu___is_Warning_InaccessibleArgument : raw_error -> Prims.bool) =
  fun projectee ->
    match projectee with
    | Warning_InaccessibleArgument -> true
    | uu___ -> false
let (uu___is_Warning_IncoherentImplicitQualifier : raw_error -> Prims.bool) =
  fun projectee ->
    match projectee with
    | Warning_IncoherentImplicitQualifier -> true
    | uu___ -> false
let (uu___is_Warning_IrrelevantQualifierOnArgumentToReflect :
  raw_error -> Prims.bool) =
  fun projectee ->
    match projectee with
    | Warning_IrrelevantQualifierOnArgumentToReflect -> true
    | uu___ -> false
let (uu___is_Warning_IrrelevantQualifierOnArgumentToReify :
  raw_error -> Prims.bool) =
  fun projectee ->
    match projectee with
    | Warning_IrrelevantQualifierOnArgumentToReify -> true
    | uu___ -> false
let (uu___is_Warning_MalformedWarnErrorList : raw_error -> Prims.bool) =
  fun projectee ->
    match projectee with
    | Warning_MalformedWarnErrorList -> true
    | uu___ -> false
let (uu___is_Warning_MetaAlienNotATmUnknown : raw_error -> Prims.bool) =
  fun projectee ->
    match projectee with
    | Warning_MetaAlienNotATmUnknown -> true
    | uu___ -> false
let (uu___is_Warning_MultipleAscriptions : raw_error -> Prims.bool) =
  fun projectee ->
    match projectee with
    | Warning_MultipleAscriptions -> true
    | uu___ -> false
let (uu___is_Warning_NondependentUserDefinedDataType :
  raw_error -> Prims.bool) =
  fun projectee ->
    match projectee with
    | Warning_NondependentUserDefinedDataType -> true
    | uu___ -> false
let (uu___is_Warning_NonListLiteralSMTPattern : raw_error -> Prims.bool) =
  fun projectee ->
    match projectee with
    | Warning_NonListLiteralSMTPattern -> true
    | uu___ -> false
let (uu___is_Warning_NormalizationFailure : raw_error -> Prims.bool) =
  fun projectee ->
    match projectee with
    | Warning_NormalizationFailure -> true
    | uu___ -> false
let (uu___is_Warning_NotDependentArrow : raw_error -> Prims.bool) =
  fun projectee ->
    match projectee with | Warning_NotDependentArrow -> true | uu___ -> false
let (uu___is_Warning_NotEmbedded : raw_error -> Prims.bool) =
  fun projectee ->
    match projectee with | Warning_NotEmbedded -> true | uu___ -> false
let (uu___is_Warning_PatternMissingBoundVar : raw_error -> Prims.bool) =
  fun projectee ->
    match projectee with
    | Warning_PatternMissingBoundVar -> true
    | uu___ -> false
let (uu___is_Warning_RecursiveDependency : raw_error -> Prims.bool) =
  fun projectee ->
    match projectee with
    | Warning_RecursiveDependency -> true
    | uu___ -> false
let (uu___is_Warning_RedundantExplicitCurrying : raw_error -> Prims.bool) =
  fun projectee ->
    match projectee with
    | Warning_RedundantExplicitCurrying -> true
    | uu___ -> false
let (uu___is_Warning_SMTPatTDeprecated : raw_error -> Prims.bool) =
  fun projectee ->
    match projectee with | Warning_SMTPatTDeprecated -> true | uu___ -> false
let (uu___is_Warning_SMTPatternIllFormed : raw_error -> Prims.bool) =
  fun projectee ->
    match projectee with
    | Warning_SMTPatternIllFormed -> true
    | uu___ -> false
let (uu___is_Warning_TopLevelEffect : raw_error -> Prims.bool) =
  fun projectee ->
    match projectee with | Warning_TopLevelEffect -> true | uu___ -> false
let (uu___is_Warning_UnboundModuleReference : raw_error -> Prims.bool) =
  fun projectee ->
    match projectee with
    | Warning_UnboundModuleReference -> true
    | uu___ -> false
let (uu___is_Warning_UnexpectedFile : raw_error -> Prims.bool) =
  fun projectee ->
    match projectee with | Warning_UnexpectedFile -> true | uu___ -> false
let (uu___is_Warning_UnexpectedFsTypApp : raw_error -> Prims.bool) =
  fun projectee ->
    match projectee with
    | Warning_UnexpectedFsTypApp -> true
    | uu___ -> false
let (uu___is_Warning_UnexpectedZ3Output : raw_error -> Prims.bool) =
  fun projectee ->
    match projectee with
    | Warning_UnexpectedZ3Output -> true
    | uu___ -> false
let (uu___is_Warning_UnprotectedTerm : raw_error -> Prims.bool) =
  fun projectee ->
    match projectee with | Warning_UnprotectedTerm -> true | uu___ -> false
let (uu___is_Warning_UnrecognizedAttribute : raw_error -> Prims.bool) =
  fun projectee ->
    match projectee with
    | Warning_UnrecognizedAttribute -> true
    | uu___ -> false
let (uu___is_Warning_UpperBoundCandidateAlreadyVisited :
  raw_error -> Prims.bool) =
  fun projectee ->
    match projectee with
    | Warning_UpperBoundCandidateAlreadyVisited -> true
    | uu___ -> false
let (uu___is_Warning_UseDefaultEffect : raw_error -> Prims.bool) =
  fun projectee ->
    match projectee with | Warning_UseDefaultEffect -> true | uu___ -> false
let (uu___is_Warning_WrongErrorLocation : raw_error -> Prims.bool) =
  fun projectee ->
    match projectee with
    | Warning_WrongErrorLocation -> true
    | uu___ -> false
let (uu___is_Warning_Z3InvocationWarning : raw_error -> Prims.bool) =
  fun projectee ->
    match projectee with
    | Warning_Z3InvocationWarning -> true
    | uu___ -> false
let (uu___is_Warning_PluginNotImplemented : raw_error -> Prims.bool) =
  fun projectee ->
    match projectee with
    | Warning_PluginNotImplemented -> true
    | uu___ -> false
let (uu___is_Warning_MissingInterfaceOrImplementation :
  raw_error -> Prims.bool) =
  fun projectee ->
    match projectee with
    | Warning_MissingInterfaceOrImplementation -> true
    | uu___ -> false
let (uu___is_Warning_ConstructorBuildsUnexpectedType :
  raw_error -> Prims.bool) =
  fun projectee ->
    match projectee with
    | Warning_ConstructorBuildsUnexpectedType -> true
    | uu___ -> false
let (uu___is_Warning_ModuleOrFileNotFoundWarning : raw_error -> Prims.bool) =
  fun projectee ->
    match projectee with
    | Warning_ModuleOrFileNotFoundWarning -> true
    | uu___ -> false
let (uu___is_Error_NoLetMutable : raw_error -> Prims.bool) =
  fun projectee ->
    match projectee with | Error_NoLetMutable -> true | uu___ -> false
let (uu___is_Error_BadImplicit : raw_error -> Prims.bool) =
  fun projectee ->
    match projectee with | Error_BadImplicit -> true | uu___ -> false
let (uu___is_Warning_DeprecatedDefinition : raw_error -> Prims.bool) =
  fun projectee ->
    match projectee with
    | Warning_DeprecatedDefinition -> true
    | uu___ -> false
let (uu___is_Fatal_SMTEncodingArityMismatch : raw_error -> Prims.bool) =
  fun projectee ->
    match projectee with
    | Fatal_SMTEncodingArityMismatch -> true
    | uu___ -> false
let (uu___is_Warning_Defensive : raw_error -> Prims.bool) =
  fun projectee ->
    match projectee with | Warning_Defensive -> true | uu___ -> false
let (uu___is_Warning_CantInspect : raw_error -> Prims.bool) =
  fun projectee ->
    match projectee with | Warning_CantInspect -> true | uu___ -> false
let (uu___is_Warning_NilGivenExplicitArgs : raw_error -> Prims.bool) =
  fun projectee ->
    match projectee with
    | Warning_NilGivenExplicitArgs -> true
    | uu___ -> false
let (uu___is_Warning_ConsAppliedExplicitArgs : raw_error -> Prims.bool) =
  fun projectee ->
    match projectee with
    | Warning_ConsAppliedExplicitArgs -> true
    | uu___ -> false
let (uu___is_Warning_UnembedBinderKnot : raw_error -> Prims.bool) =
  fun projectee ->
    match projectee with | Warning_UnembedBinderKnot -> true | uu___ -> false
let (uu___is_Fatal_TacticProofRelevantGoal : raw_error -> Prims.bool) =
  fun projectee ->
    match projectee with
    | Fatal_TacticProofRelevantGoal -> true
    | uu___ -> false
let (uu___is_Warning_TacAdmit : raw_error -> Prims.bool) =
  fun projectee ->
    match projectee with | Warning_TacAdmit -> true | uu___ -> false
let (uu___is_Fatal_IncoherentPatterns : raw_error -> Prims.bool) =
  fun projectee ->
    match projectee with | Fatal_IncoherentPatterns -> true | uu___ -> false
let (uu___is_Error_NoSMTButNeeded : raw_error -> Prims.bool) =
  fun projectee ->
    match projectee with | Error_NoSMTButNeeded -> true | uu___ -> false
let (uu___is_Fatal_UnexpectedAntiquotation : raw_error -> Prims.bool) =
  fun projectee ->
    match projectee with
    | Fatal_UnexpectedAntiquotation -> true
    | uu___ -> false
let (uu___is_Fatal_SplicedUndef : raw_error -> Prims.bool) =
  fun projectee ->
    match projectee with | Fatal_SplicedUndef -> true | uu___ -> false
let (uu___is_Fatal_SpliceUnembedFail : raw_error -> Prims.bool) =
  fun projectee ->
    match projectee with | Fatal_SpliceUnembedFail -> true | uu___ -> false
let (uu___is_Warning_ExtractionUnexpectedEffect : raw_error -> Prims.bool) =
  fun projectee ->
    match projectee with
    | Warning_ExtractionUnexpectedEffect -> true
    | uu___ -> false
let (uu___is_Error_DidNotFail : raw_error -> Prims.bool) =
  fun projectee ->
    match projectee with | Error_DidNotFail -> true | uu___ -> false
let (uu___is_Warning_UnappliedFail : raw_error -> Prims.bool) =
  fun projectee ->
    match projectee with | Warning_UnappliedFail -> true | uu___ -> false
let (uu___is_Warning_QuantifierWithoutPattern : raw_error -> Prims.bool) =
  fun projectee ->
    match projectee with
    | Warning_QuantifierWithoutPattern -> true
    | uu___ -> false
let (uu___is_Error_EmptyFailErrs : raw_error -> Prims.bool) =
  fun projectee ->
    match projectee with | Error_EmptyFailErrs -> true | uu___ -> false
let (uu___is_Warning_logicqualifier : raw_error -> Prims.bool) =
  fun projectee ->
    match projectee with | Warning_logicqualifier -> true | uu___ -> false
let (uu___is_Fatal_CyclicDependence : raw_error -> Prims.bool) =
  fun projectee ->
    match projectee with | Fatal_CyclicDependence -> true | uu___ -> false
let (uu___is_Error_InductiveAnnotNotAType : raw_error -> Prims.bool) =
  fun projectee ->
    match projectee with
    | Error_InductiveAnnotNotAType -> true
    | uu___ -> false
let (uu___is_Fatal_FriendInterface : raw_error -> Prims.bool) =
  fun projectee ->
    match projectee with | Fatal_FriendInterface -> true | uu___ -> false
let (uu___is_Error_CannotRedefineConst : raw_error -> Prims.bool) =
  fun projectee ->
    match projectee with | Error_CannotRedefineConst -> true | uu___ -> false
let (uu___is_Error_BadClassDecl : raw_error -> Prims.bool) =
  fun projectee ->
    match projectee with | Error_BadClassDecl -> true | uu___ -> false
let (uu___is_Error_BadInductiveParam : raw_error -> Prims.bool) =
  fun projectee ->
    match projectee with | Error_BadInductiveParam -> true | uu___ -> false
let (uu___is_Error_FieldShadow : raw_error -> Prims.bool) =
  fun projectee ->
    match projectee with | Error_FieldShadow -> true | uu___ -> false
let (uu___is_Error_UnexpectedDM4FType : raw_error -> Prims.bool) =
  fun projectee ->
    match projectee with | Error_UnexpectedDM4FType -> true | uu___ -> false
let (uu___is_Fatal_EffectAbbreviationResultTypeMismatch :
  raw_error -> Prims.bool) =
  fun projectee ->
    match projectee with
    | Fatal_EffectAbbreviationResultTypeMismatch -> true
    | uu___ -> false
let (uu___is_Error_AlreadyCachedAssertionFailure : raw_error -> Prims.bool) =
  fun projectee ->
    match projectee with
    | Error_AlreadyCachedAssertionFailure -> true
    | uu___ -> false
let (uu___is_Error_MustEraseMissing : raw_error -> Prims.bool) =
  fun projectee ->
    match projectee with | Error_MustEraseMissing -> true | uu___ -> false
let (uu___is_Warning_EffectfulArgumentToErasedFunction :
  raw_error -> Prims.bool) =
  fun projectee ->
    match projectee with
    | Warning_EffectfulArgumentToErasedFunction -> true
    | uu___ -> false
let (uu___is_Fatal_EmptySurfaceLet : raw_error -> Prims.bool) =
  fun projectee ->
    match projectee with | Fatal_EmptySurfaceLet -> true | uu___ -> false
let (uu___is_Warning_UnexpectedCheckedFile : raw_error -> Prims.bool) =
  fun projectee ->
    match projectee with
    | Warning_UnexpectedCheckedFile -> true
    | uu___ -> false
let (uu___is_Fatal_ExtractionUnsupported : raw_error -> Prims.bool) =
  fun projectee ->
    match projectee with
    | Fatal_ExtractionUnsupported -> true
    | uu___ -> false
let (uu___is_Warning_SMTErrorReason : raw_error -> Prims.bool) =
  fun projectee ->
    match projectee with | Warning_SMTErrorReason -> true | uu___ -> false
let (uu___is_Warning_CoercionNotFound : raw_error -> Prims.bool) =
  fun projectee ->
    match projectee with | Warning_CoercionNotFound -> true | uu___ -> false
let (uu___is_Error_QuakeFailed : raw_error -> Prims.bool) =
  fun projectee ->
    match projectee with | Error_QuakeFailed -> true | uu___ -> false
let (uu___is_Error_IllSMTPat : raw_error -> Prims.bool) =
  fun projectee ->
    match projectee with | Error_IllSMTPat -> true | uu___ -> false
let (uu___is_Error_IllScopedTerm : raw_error -> Prims.bool) =
  fun projectee ->
    match projectee with | Error_IllScopedTerm -> true | uu___ -> false
let (uu___is_Warning_UnusedLetRec : raw_error -> Prims.bool) =
  fun projectee ->
    match projectee with | Warning_UnusedLetRec -> true | uu___ -> false
let (uu___is_Fatal_Effects_Ordering_Coherence : raw_error -> Prims.bool) =
  fun projectee ->
    match projectee with
    | Fatal_Effects_Ordering_Coherence -> true
    | uu___ -> false
let (uu___is_Warning_BleedingEdge_Feature : raw_error -> Prims.bool) =
  fun projectee ->
    match projectee with
    | Warning_BleedingEdge_Feature -> true
    | uu___ -> false
let (uu___is_Warning_IgnoredBinding : raw_error -> Prims.bool) =
  fun projectee ->
    match projectee with | Warning_IgnoredBinding -> true | uu___ -> false
let (uu___is_Warning_CouldNotReadHints : raw_error -> Prims.bool) =
  fun projectee ->
    match projectee with | Warning_CouldNotReadHints -> true | uu___ -> false
let (uu___is_Fatal_BadUvar : raw_error -> Prims.bool) =
  fun projectee ->
    match projectee with | Fatal_BadUvar -> true | uu___ -> false
let (uu___is_Warning_WarnOnUse : raw_error -> Prims.bool) =
  fun projectee ->
    match projectee with | Warning_WarnOnUse -> true | uu___ -> false
let (uu___is_Warning_DeprecatedAttributeSyntax : raw_error -> Prims.bool) =
  fun projectee ->
    match projectee with
    | Warning_DeprecatedAttributeSyntax -> true
    | uu___ -> false
let (uu___is_Warning_DeprecatedGeneric : raw_error -> Prims.bool) =
  fun projectee ->
    match projectee with | Warning_DeprecatedGeneric -> true | uu___ -> false
let (uu___is_Error_BadSplice : raw_error -> Prims.bool) =
  fun projectee ->
    match projectee with | Error_BadSplice -> true | uu___ -> false
let (uu___is_Error_UnexpectedUnresolvedUvar : raw_error -> Prims.bool) =
  fun projectee ->
    match projectee with
    | Error_UnexpectedUnresolvedUvar -> true
    | uu___ -> false
let (uu___is_Warning_UnfoldPlugin : raw_error -> Prims.bool) =
  fun projectee ->
    match projectee with | Warning_UnfoldPlugin -> true | uu___ -> false
let (uu___is_Error_LayeredMissingAnnot : raw_error -> Prims.bool) =
  fun projectee ->
    match projectee with | Error_LayeredMissingAnnot -> true | uu___ -> false
let (uu___is_Error_CallToErased : raw_error -> Prims.bool) =
  fun projectee ->
    match projectee with | Error_CallToErased -> true | uu___ -> false
let (uu___is_Error_ErasedCtor : raw_error -> Prims.bool) =
  fun projectee ->
    match projectee with | Error_ErasedCtor -> true | uu___ -> false
let (uu___is_Error_RemoveUnusedTypeParameter : raw_error -> Prims.bool) =
  fun projectee ->
    match projectee with
    | Error_RemoveUnusedTypeParameter -> true
    | uu___ -> false
let (uu___is_Warning_NoMagicInFSharp : raw_error -> Prims.bool) =
  fun projectee ->
    match projectee with | Warning_NoMagicInFSharp -> true | uu___ -> false
let (uu___is_Error_BadLetOpenRecord : raw_error -> Prims.bool) =
  fun projectee ->
    match projectee with | Error_BadLetOpenRecord -> true | uu___ -> false
let (uu___is_Error_UnexpectedTypeclassInstance : raw_error -> Prims.bool) =
  fun projectee ->
    match projectee with
    | Error_UnexpectedTypeclassInstance -> true
    | uu___ -> false
let (uu___is_Warning_AmbiguousResolveImplicitsHook : raw_error -> Prims.bool)
  =
  fun projectee ->
    match projectee with
    | Warning_AmbiguousResolveImplicitsHook -> true
    | uu___ -> false
let (uu___is_Warning_SplitAndRetryQueries : raw_error -> Prims.bool) =
  fun projectee ->
    match projectee with
    | Warning_SplitAndRetryQueries -> true
    | uu___ -> false
let (uu___is_Warning_DeprecatedLightDoNotation : raw_error -> Prims.bool) =
  fun projectee ->
    match projectee with
    | Warning_DeprecatedLightDoNotation -> true
    | uu___ -> false
let (uu___is_Warning_FailedToCheckInitialTacticGoal :
  raw_error -> Prims.bool) =
  fun projectee ->
    match projectee with
    | Warning_FailedToCheckInitialTacticGoal -> true
    | uu___ -> false
let (uu___is_Warning_Adhoc_IndexedEffect_Combinator :
  raw_error -> Prims.bool) =
  fun projectee ->
    match projectee with
    | Warning_Adhoc_IndexedEffect_Combinator -> true
    | uu___ -> false
let (uu___is_Error_PluginDynlink : raw_error -> Prims.bool) =
  fun projectee ->
    match projectee with | Error_PluginDynlink -> true | uu___ -> false
let (uu___is_Error_InternalQualifier : raw_error -> Prims.bool) =
  fun projectee ->
    match projectee with | Error_InternalQualifier -> true | uu___ -> false
let (uu___is_Warning_NameEscape : raw_error -> Prims.bool) =
  fun projectee ->
    match projectee with | Warning_NameEscape -> true | uu___ -> false
type error_setting = (raw_error * error_flag * Prims.int)
let (default_settings : error_setting Prims.list) =
  [(Error_DependencyAnalysisFailed, CAlwaysError, Prims.int_zero);
  (Error_IDETooManyPops, CAlwaysError, Prims.int_one);
  (Error_IDEUnrecognized, CAlwaysError, (Prims.of_int (2)));
  (Error_InductiveTypeNotSatisfyPositivityCondition, CAlwaysError,
    (Prims.of_int (3)));
  (Error_InvalidUniverseVar, CAlwaysError, (Prims.of_int (4)));
  (Error_MissingFileName, CAlwaysError, (Prims.of_int (5)));
  (Error_ModuleFileNameMismatch, CAlwaysError, (Prims.of_int (6)));
  (Error_OpPlusInUniverse, CAlwaysError, (Prims.of_int (7)));
  (Error_OutOfRange, CAlwaysError, (Prims.of_int (8)));
  (Error_ProofObligationFailed, CError, (Prims.of_int (9)));
  (Error_TooManyFiles, CAlwaysError, (Prims.of_int (10)));
  (Error_TypeCheckerFailToProve, CAlwaysError, (Prims.of_int (11)));
  (Error_TypeError, CAlwaysError, (Prims.of_int (12)));
  (Error_UncontrainedUnificationVar, CAlwaysError, (Prims.of_int (13)));
  (Error_UnexpectedGTotComputation, CAlwaysError, (Prims.of_int (14)));
  (Error_UnexpectedInstance, CAlwaysError, (Prims.of_int (15)));
  (Error_UnknownFatal_AssertionFailure, CError, (Prims.of_int (16)));
  (Error_Z3InvocationError, CAlwaysError, (Prims.of_int (17)));
  (Error_IDEAssertionFailure, CAlwaysError, (Prims.of_int (18)));
  (Error_Z3SolverError, CError, (Prims.of_int (19)));
  (Fatal_AbstractTypeDeclarationInInterface, CFatal, (Prims.of_int (20)));
  (Fatal_ActionMustHaveFunctionType, CFatal, (Prims.of_int (21)));
  (Fatal_AlreadyDefinedTopLevelDeclaration, CFatal, (Prims.of_int (22)));
  (Fatal_ArgumentLengthMismatch, CFatal, (Prims.of_int (23)));
  (Fatal_AssertionFailure, CFatal, (Prims.of_int (24)));
  (Fatal_AssignToImmutableValues, CFatal, (Prims.of_int (25)));
  (Fatal_AssumeValInInterface, CFatal, (Prims.of_int (26)));
  (Fatal_BadlyInstantiatedSynthByTactic, CFatal, (Prims.of_int (27)));
  (Fatal_BadSignatureShape, CFatal, (Prims.of_int (28)));
  (Fatal_BinderAndArgsLengthMismatch, CFatal, (Prims.of_int (29)));
  (Fatal_BothValAndLetInInterface, CFatal, (Prims.of_int (30)));
  (Fatal_CardinalityConstraintViolated, CFatal, (Prims.of_int (31)));
  (Fatal_ComputationNotTotal, CFatal, (Prims.of_int (32)));
  (Fatal_ComputationTypeNotAllowed, CFatal, (Prims.of_int (33)));
  (Fatal_ComputedTypeNotMatchAnnotation, CFatal, (Prims.of_int (34)));
  (Fatal_ConstructorArgLengthMismatch, CFatal, (Prims.of_int (35)));
  (Fatal_ConstructorFailedCheck, CFatal, (Prims.of_int (36)));
  (Fatal_ConstructorNotFound, CFatal, (Prims.of_int (37)));
  (Fatal_ConstsructorBuildWrongType, CFatal, (Prims.of_int (38)));
  (Fatal_CycleInRecTypeAbbreviation, CFatal, (Prims.of_int (39)));
  (Fatal_DataContructorNotFound, CFatal, (Prims.of_int (40)));
  (Fatal_DefaultQualifierNotAllowedOnEffects, CFatal, (Prims.of_int (41)));
  (Fatal_DefinitionNotFound, CFatal, (Prims.of_int (42)));
  (Fatal_DisjuctivePatternVarsMismatch, CFatal, (Prims.of_int (43)));
  (Fatal_DivergentComputationCannotBeIncludedInTotal, CFatal,
    (Prims.of_int (44)));
  (Fatal_DuplicateInImplementation, CFatal, (Prims.of_int (45)));
  (Fatal_DuplicateModuleOrInterface, CFatal, (Prims.of_int (46)));
  (Fatal_DuplicateTopLevelNames, CFatal, (Prims.of_int (47)));
  (Fatal_DuplicateTypeAnnotationAndValDecl, CFatal, (Prims.of_int (48)));
  (Fatal_EffectCannotBeReified, CFatal, (Prims.of_int (49)));
  (Fatal_EffectConstructorNotFullyApplied, CFatal, (Prims.of_int (50)));
  (Fatal_EffectfulAndPureComputationMismatch, CFatal, (Prims.of_int (51)));
  (Fatal_EffectNotFound, CFatal, (Prims.of_int (52)));
  (Fatal_EffectsCannotBeComposed, CFatal, (Prims.of_int (53)));
  (Fatal_ErrorInSolveDeferredConstraints, CFatal, (Prims.of_int (54)));
  (Fatal_ErrorsReported, CFatal, (Prims.of_int (55)));
  (Fatal_EscapedBoundVar, CFatal, (Prims.of_int (56)));
  (Fatal_ExpectedArrowAnnotatedType, CFatal, (Prims.of_int (57)));
  (Fatal_ExpectedGhostExpression, CFatal, (Prims.of_int (58)));
  (Fatal_ExpectedPureExpression, CFatal, (Prims.of_int (59)));
  (Fatal_ExpectNormalizedEffect, CFatal, (Prims.of_int (60)));
  (Fatal_ExpectTermGotFunction, CFatal, (Prims.of_int (61)));
  (Fatal_ExpectTrivialPreCondition, CFatal, (Prims.of_int (62)));
  (Fatal_FailToExtractNativeTactic, CFatal, (Prims.of_int (63)));
  (Fatal_FailToCompileNativeTactic, CFatal, (Prims.of_int (64)));
  (Fatal_FailToProcessPragma, CFatal, (Prims.of_int (65)));
  (Fatal_FailToResolveImplicitArgument, CFatal, (Prims.of_int (66)));
  (Fatal_FailToSolveUniverseInEquality, CFatal, (Prims.of_int (67)));
  (Fatal_FieldsNotBelongToSameRecordType, CFatal, (Prims.of_int (68)));
  (Fatal_ForbiddenReferenceToCurrentModule, CFatal, (Prims.of_int (69)));
  (Fatal_FreeVariables, CFatal, (Prims.of_int (70)));
  (Fatal_FunctionTypeExpected, CFatal, (Prims.of_int (71)));
  (Fatal_IdentifierNotFound, CFatal, (Prims.of_int (72)));
  (Fatal_IllAppliedConstant, CFatal, (Prims.of_int (73)));
  (Fatal_IllegalCharInByteArray, CFatal, (Prims.of_int (74)));
  (Fatal_IllegalCharInOperatorName, CFatal, (Prims.of_int (75)));
  (Fatal_IllTyped, CFatal, (Prims.of_int (76)));
  (Fatal_ImpossibleAbbrevLidBundle, CFatal, (Prims.of_int (77)));
  (Fatal_ImpossibleAbbrevRenameBundle, CFatal, (Prims.of_int (78)));
  (Fatal_ImpossibleInductiveWithAbbrev, CFatal, (Prims.of_int (79)));
  (Fatal_ImpossiblePrePostAbs, CFatal, (Prims.of_int (80)));
  (Fatal_ImpossiblePrePostArrow, CFatal, (Prims.of_int (81)));
  (Fatal_ImpossibleToGenerateDMEffect, CFatal, (Prims.of_int (82)));
  (Fatal_ImpossibleTypeAbbrevBundle, CFatal, (Prims.of_int (83)));
  (Fatal_ImpossibleTypeAbbrevSigeltBundle, CFatal, (Prims.of_int (84)));
  (Fatal_IncludeModuleNotPrepared, CFatal, (Prims.of_int (85)));
  (Fatal_IncoherentInlineUniverse, CFatal, (Prims.of_int (86)));
  (Fatal_IncompatibleKinds, CFatal, (Prims.of_int (87)));
  (Fatal_IncompatibleNumberOfTypes, CFatal, (Prims.of_int (88)));
  (Fatal_IncompatibleSetOfUniverse, CFatal, (Prims.of_int (89)));
  (Fatal_IncompatibleUniverse, CFatal, (Prims.of_int (90)));
  (Fatal_InconsistentImplicitArgumentAnnotation, CFatal, (Prims.of_int (91)));
  (Fatal_InconsistentImplicitQualifier, CFatal, (Prims.of_int (92)));
  (Fatal_InconsistentQualifierAnnotation, CFatal, (Prims.of_int (93)));
  (Fatal_InferredTypeCauseVarEscape, CFatal, (Prims.of_int (94)));
  (Fatal_InlineRenamedAsUnfold, CFatal, (Prims.of_int (95)));
  (Fatal_InsufficientPatternArguments, CFatal, (Prims.of_int (96)));
  (Fatal_InterfaceAlreadyProcessed, CFatal, (Prims.of_int (97)));
  (Fatal_InterfaceNotImplementedByModule, CFatal, (Prims.of_int (98)));
  (Fatal_InterfaceWithTypeImplementation, CFatal, (Prims.of_int (99)));
  (Fatal_InvalidFloatingPointNumber, CFatal, (Prims.of_int (100)));
  (Fatal_InvalidFSDocKeyword, CFatal, (Prims.of_int (101)));
  (Fatal_InvalidIdentifier, CFatal, (Prims.of_int (102)));
  (Fatal_InvalidLemmaArgument, CFatal, (Prims.of_int (103)));
  (Fatal_InvalidNumericLiteral, CFatal, (Prims.of_int (104)));
  (Fatal_InvalidRedefinitionOfLexT, CFatal, (Prims.of_int (105)));
  (Fatal_InvalidUnicodeInStringLiteral, CFatal, (Prims.of_int (106)));
  (Fatal_InvalidUTF8Encoding, CFatal, (Prims.of_int (107)));
  (Fatal_InvalidWarnErrorSetting, CFatal, (Prims.of_int (108)));
  (Fatal_LetBoundMonadicMismatch, CFatal, (Prims.of_int (109)));
  (Fatal_LetMutableForVariablesOnly, CFatal, (Prims.of_int (110)));
  (Fatal_LetOpenModuleOnly, CFatal, (Prims.of_int (111)));
  (Fatal_LetRecArgumentMismatch, CFatal, (Prims.of_int (112)));
  (Fatal_MalformedActionDeclaration, CFatal, (Prims.of_int (113)));
  (Fatal_MismatchedPatternType, CFatal, (Prims.of_int (114)));
  (Fatal_MismatchUniversePolymorphic, CFatal, (Prims.of_int (115)));
  (Fatal_MissingDataConstructor, CFatal, (Prims.of_int (116)));
  (Fatal_MissingExposeInterfacesOption, CFatal, (Prims.of_int (117)));
  (Fatal_MissingFieldInRecord, CFatal, (Prims.of_int (118)));
  (Fatal_MissingImplementation, CFatal, (Prims.of_int (119)));
  (Fatal_MissingImplicitArguments, CFatal, (Prims.of_int (120)));
  (Fatal_MissingInterface, CFatal, (Prims.of_int (121)));
  (Fatal_MissingNameInBinder, CFatal, (Prims.of_int (122)));
  (Fatal_MissingPrimsModule, CFatal, (Prims.of_int (123)));
  (Fatal_MissingQuantifierBinder, CFatal, (Prims.of_int (124)));
  (Fatal_ModuleExpected, CFatal, (Prims.of_int (125)));
  (Fatal_ModuleFileNotFound, CFatal, (Prims.of_int (126)));
  (Fatal_ModuleFirstStatement, CFatal, (Prims.of_int (127)));
  (Fatal_ModuleNotFound, CFatal, (Prims.of_int (128)));
  (Fatal_ModuleOrFileNotFound, CFatal, (Prims.of_int (129)));
  (Fatal_MonadAlreadyDefined, CFatal, (Prims.of_int (130)));
  (Fatal_MoreThanOneDeclaration, CFatal, (Prims.of_int (131)));
  (Fatal_MultipleLetBinding, CFatal, (Prims.of_int (132)));
  (Fatal_NameNotFound, CFatal, (Prims.of_int (133)));
  (Fatal_NameSpaceNotFound, CFatal, (Prims.of_int (134)));
  (Fatal_NegativeUniverseConstFatal_NotSupported, CFatal,
    (Prims.of_int (135)));
  (Fatal_NoFileProvided, CFatal, (Prims.of_int (136)));
  (Fatal_NonInductiveInMutuallyDefinedType, CFatal, (Prims.of_int (137)));
  (Fatal_NonLinearPatternNotPermitted, CFatal, (Prims.of_int (138)));
  (Fatal_NonLinearPatternVars, CFatal, (Prims.of_int (139)));
  (Fatal_NonSingletonTopLevel, CFatal, (Prims.of_int (140)));
  (Fatal_NonSingletonTopLevelModule, CFatal, (Prims.of_int (141)));
  (Error_NonTopRecFunctionNotFullyEncoded, CError, (Prims.of_int (142)));
  (Fatal_NonTrivialPreConditionInPrims, CFatal, (Prims.of_int (143)));
  (Fatal_NonVariableInductiveTypeParameter, CFatal, (Prims.of_int (144)));
  (Fatal_NotApplicationOrFv, CFatal, (Prims.of_int (145)));
  (Fatal_NotEnoughArgsToEffect, CFatal, (Prims.of_int (146)));
  (Fatal_NotEnoughArgumentsForEffect, CFatal, (Prims.of_int (147)));
  (Fatal_NotFunctionType, CFatal, (Prims.of_int (148)));
  (Fatal_NotSupported, CFatal, (Prims.of_int (149)));
  (Fatal_NotTopLevelModule, CFatal, (Prims.of_int (150)));
  (Fatal_NotValidFStarFile, CFatal, (Prims.of_int (151)));
  (Fatal_NotValidIncludeDirectory, CFatal, (Prims.of_int (152)));
  (Fatal_OneModulePerFile, CFatal, (Prims.of_int (153)));
  (Fatal_OpenGoalsInSynthesis, CFatal, (Prims.of_int (154)));
  (Fatal_OptionsNotCompatible, CFatal, (Prims.of_int (155)));
  (Fatal_OutOfOrder, CFatal, (Prims.of_int (156)));
  (Fatal_ParseErrors, CFatal, (Prims.of_int (157)));
  (Fatal_ParseItError, CFatal, (Prims.of_int (158)));
  (Fatal_PolyTypeExpected, CFatal, (Prims.of_int (159)));
  (Fatal_PossibleInfiniteTyp, CFatal, (Prims.of_int (160)));
  (Fatal_PreModuleMismatch, CFatal, (Prims.of_int (161)));
  (Fatal_QulifierListNotPermitted, CFatal, (Prims.of_int (162)));
  (Fatal_RecursiveFunctionLiteral, CFatal, (Prims.of_int (163)));
  (Fatal_ReflectOnlySupportedOnEffects, CFatal, (Prims.of_int (164)));
  (Fatal_ReservedPrefix, CFatal, (Prims.of_int (165)));
  (Fatal_SMTOutputParseError, CFatal, (Prims.of_int (166)));
  (Fatal_SMTSolverError, CFatal, (Prims.of_int (167)));
  (Fatal_SyntaxError, CFatal, (Prims.of_int (168)));
  (Fatal_SynthByTacticError, CFatal, (Prims.of_int (169)));
  (Fatal_TacticGotStuck, CFatal, (Prims.of_int (170)));
  (Fatal_TcOneFragmentFailed, CFatal, (Prims.of_int (171)));
  (Fatal_TermOutsideOfDefLanguage, CFatal, (Prims.of_int (172)));
  (Fatal_ToManyArgumentToFunction, CFatal, (Prims.of_int (173)));
  (Fatal_TooManyOrTooFewFileMatch, CFatal, (Prims.of_int (174)));
  (Fatal_TooManyPatternArguments, CFatal, (Prims.of_int (175)));
  (Fatal_TooManyUniverse, CFatal, (Prims.of_int (176)));
  (Fatal_TypeMismatch, CFatal, (Prims.of_int (177)));
  (Fatal_TypeWithinPatternsAllowedOnVariablesOnly, CFatal,
    (Prims.of_int (178)));
  (Fatal_UnableToReadFile, CFatal, (Prims.of_int (179)));
  (Fatal_UnepxectedOrUnboundOperator, CFatal, (Prims.of_int (180)));
  (Fatal_UnexpectedBinder, CFatal, (Prims.of_int (181)));
  (Fatal_UnexpectedBindShape, CFatal, (Prims.of_int (182)));
  (Fatal_UnexpectedChar, CFatal, (Prims.of_int (183)));
  (Fatal_UnexpectedComputationTypeForLetRec, CFatal, (Prims.of_int (184)));
  (Fatal_UnexpectedConstructorType, CFatal, (Prims.of_int (185)));
  (Fatal_UnexpectedDataConstructor, CFatal, (Prims.of_int (186)));
  (Fatal_UnexpectedEffect, CFatal, (Prims.of_int (187)));
  (Fatal_UnexpectedEmptyRecord, CFatal, (Prims.of_int (188)));
  (Fatal_UnexpectedExpressionType, CFatal, (Prims.of_int (189)));
  (Fatal_UnexpectedFunctionParameterType, CFatal, (Prims.of_int (190)));
  (Fatal_UnexpectedGeneralizedUniverse, CFatal, (Prims.of_int (191)));
  (Fatal_UnexpectedGTotForLetRec, CFatal, (Prims.of_int (192)));
  (Fatal_UnexpectedGuard, CFatal, (Prims.of_int (193)));
  (Fatal_UnexpectedIdentifier, CFatal, (Prims.of_int (194)));
  (Fatal_UnexpectedImplicitArgument, CFatal, (Prims.of_int (195)));
  (Fatal_UnexpectedImplictArgument, CFatal, (Prims.of_int (196)));
  (Fatal_UnexpectedInductivetype, CFatal, (Prims.of_int (197)));
  (Fatal_UnexpectedLetBinding, CFatal, (Prims.of_int (198)));
  (Fatal_UnexpectedModuleDeclaration, CFatal, (Prims.of_int (199)));
  (Fatal_UnexpectedNumberOfUniverse, CFatal, (Prims.of_int (200)));
  (Fatal_UnexpectedNumericLiteral, CFatal, (Prims.of_int (201)));
  (Fatal_UnexpectedPattern, CFatal, (Prims.of_int (203)));
  (Fatal_UnexpectedPosition, CFatal, (Prims.of_int (204)));
  (Fatal_UnExpectedPreCondition, CFatal, (Prims.of_int (205)));
  (Fatal_UnexpectedReturnShape, CFatal, (Prims.of_int (206)));
  (Fatal_UnexpectedSignatureForMonad, CFatal, (Prims.of_int (207)));
  (Fatal_UnexpectedTerm, CFatal, (Prims.of_int (208)));
  (Fatal_UnexpectedTermInUniverse, CFatal, (Prims.of_int (209)));
  (Fatal_UnexpectedTermType, CFatal, (Prims.of_int (210)));
  (Fatal_UnexpectedTermVQuote, CFatal, (Prims.of_int (211)));
  (Fatal_UnexpectedUniversePolymorphicReturn, CFatal, (Prims.of_int (212)));
  (Fatal_UnexpectedUniverseVariable, CFatal, (Prims.of_int (213)));
  (Fatal_UnfoldableDeprecated, CFatal, (Prims.of_int (214)));
  (Fatal_UnificationNotWellFormed, CFatal, (Prims.of_int (215)));
  (Fatal_Uninstantiated, CFatal, (Prims.of_int (216)));
  (Error_UninstantiatedUnificationVarInTactic, CError, (Prims.of_int (217)));
  (Fatal_UninstantiatedVarInTactic, CFatal, (Prims.of_int (218)));
  (Fatal_UniverseMightContainSumOfTwoUnivVars, CFatal, (Prims.of_int (219)));
  (Fatal_UniversePolymorphicInnerLetBound, CFatal, (Prims.of_int (220)));
  (Fatal_UnknownAttribute, CFatal, (Prims.of_int (221)));
  (Fatal_UnknownToolForDep, CFatal, (Prims.of_int (222)));
  (Fatal_UnrecognizedExtension, CFatal, (Prims.of_int (223)));
  (Fatal_UnresolvedPatternVar, CFatal, (Prims.of_int (224)));
  (Fatal_UnsupportedConstant, CFatal, (Prims.of_int (225)));
  (Fatal_UnsupportedDisjuctivePatterns, CFatal, (Prims.of_int (226)));
  (Fatal_UnsupportedQualifier, CFatal, (Prims.of_int (227)));
  (Fatal_UserTacticFailure, CFatal, (Prims.of_int (228)));
  (Fatal_ValueRestriction, CFatal, (Prims.of_int (229)));
  (Fatal_VariableNotFound, CFatal, (Prims.of_int (230)));
  (Fatal_WrongBodyTypeForReturnWP, CFatal, (Prims.of_int (231)));
  (Fatal_WrongDataAppHeadFormat, CFatal, (Prims.of_int (232)));
  (Fatal_WrongDefinitionOrder, CFatal, (Prims.of_int (233)));
  (Fatal_WrongResultTypeAfterConstrutor, CFatal, (Prims.of_int (234)));
  (Fatal_WrongTerm, CFatal, (Prims.of_int (235)));
  (Fatal_WhenClauseNotSupported, CFatal, (Prims.of_int (236)));
  (Unused01, CFatal, (Prims.of_int (237)));
  (Warning_PluginNotImplemented, CError, (Prims.of_int (238)));
  (Warning_AddImplicitAssumeNewQualifier, CWarning, (Prims.of_int (239)));
  (Warning_AdmitWithoutDefinition, CWarning, (Prims.of_int (240)));
  (Warning_CachedFile, CWarning, (Prims.of_int (241)));
  (Warning_DefinitionNotTranslated, CWarning, (Prims.of_int (242)));
  (Warning_DependencyFound, CWarning, (Prims.of_int (243)));
  (Warning_DeprecatedEqualityOnBinder, CWarning, (Prims.of_int (244)));
  (Warning_DeprecatedOpaqueQualifier, CWarning, (Prims.of_int (245)));
  (Warning_DocOverwrite, CWarning, (Prims.of_int (246)));
  (Warning_FileNotWritten, CWarning, (Prims.of_int (247)));
  (Warning_Filtered, CWarning, (Prims.of_int (248)));
  (Warning_FunctionLiteralPrecisionLoss, CWarning, (Prims.of_int (249)));
  (Warning_FunctionNotExtacted, CWarning, (Prims.of_int (250)));
  (Warning_HintFailedToReplayProof, CWarning, (Prims.of_int (251)));
  (Warning_HitReplayFailed, CWarning, (Prims.of_int (252)));
  (Warning_IDEIgnoreCodeGen, CWarning, (Prims.of_int (253)));
  (Warning_IllFormedGoal, CWarning, (Prims.of_int (254)));
  (Warning_InaccessibleArgument, CWarning, (Prims.of_int (255)));
  (Warning_IncoherentImplicitQualifier, CWarning, (Prims.of_int (256)));
  (Warning_IrrelevantQualifierOnArgumentToReflect, CWarning,
    (Prims.of_int (257)));
  (Warning_IrrelevantQualifierOnArgumentToReify, CWarning,
    (Prims.of_int (258)));
  (Warning_MalformedWarnErrorList, CWarning, (Prims.of_int (259)));
  (Warning_MetaAlienNotATmUnknown, CWarning, (Prims.of_int (260)));
  (Warning_MultipleAscriptions, CWarning, (Prims.of_int (261)));
  (Warning_NondependentUserDefinedDataType, CWarning, (Prims.of_int (262)));
  (Warning_NonListLiteralSMTPattern, CWarning, (Prims.of_int (263)));
  (Warning_NormalizationFailure, CWarning, (Prims.of_int (264)));
  (Warning_NotDependentArrow, CWarning, (Prims.of_int (265)));
  (Warning_NotEmbedded, CWarning, (Prims.of_int (266)));
  (Warning_PatternMissingBoundVar, CWarning, (Prims.of_int (267)));
  (Warning_RecursiveDependency, CWarning, (Prims.of_int (268)));
  (Warning_RedundantExplicitCurrying, CWarning, (Prims.of_int (269)));
  (Warning_SMTPatTDeprecated, CWarning, (Prims.of_int (270)));
  (Warning_SMTPatternIllFormed, CWarning, (Prims.of_int (271)));
  (Warning_TopLevelEffect, CWarning, (Prims.of_int (272)));
  (Warning_UnboundModuleReference, CWarning, (Prims.of_int (273)));
  (Warning_UnexpectedFile, CWarning, (Prims.of_int (274)));
  (Warning_UnexpectedFsTypApp, CWarning, (Prims.of_int (275)));
  (Warning_UnexpectedZ3Output, CError, (Prims.of_int (276)));
  (Warning_UnprotectedTerm, CWarning, (Prims.of_int (277)));
  (Warning_UnrecognizedAttribute, CWarning, (Prims.of_int (278)));
  (Warning_UpperBoundCandidateAlreadyVisited, CWarning, (Prims.of_int (279)));
  (Warning_UseDefaultEffect, CWarning, (Prims.of_int (280)));
  (Warning_WrongErrorLocation, CWarning, (Prims.of_int (281)));
  (Warning_Z3InvocationWarning, CWarning, (Prims.of_int (282)));
  (Warning_MissingInterfaceOrImplementation, CWarning, (Prims.of_int (283)));
  (Warning_ConstructorBuildsUnexpectedType, CWarning, (Prims.of_int (284)));
  (Warning_ModuleOrFileNotFoundWarning, CWarning, (Prims.of_int (285)));
  (Error_NoLetMutable, CAlwaysError, (Prims.of_int (286)));
  (Error_BadImplicit, CAlwaysError, (Prims.of_int (287)));
  (Warning_DeprecatedDefinition, CWarning, (Prims.of_int (288)));
  (Fatal_SMTEncodingArityMismatch, CFatal, (Prims.of_int (289)));
  (Warning_Defensive, CWarning, (Prims.of_int (290)));
  (Warning_CantInspect, CWarning, (Prims.of_int (291)));
  (Warning_NilGivenExplicitArgs, CWarning, (Prims.of_int (292)));
  (Warning_ConsAppliedExplicitArgs, CWarning, (Prims.of_int (293)));
  (Warning_UnembedBinderKnot, CWarning, (Prims.of_int (294)));
  (Fatal_TacticProofRelevantGoal, CFatal, (Prims.of_int (295)));
  (Warning_TacAdmit, CWarning, (Prims.of_int (296)));
  (Fatal_IncoherentPatterns, CFatal, (Prims.of_int (297)));
  (Error_NoSMTButNeeded, CAlwaysError, (Prims.of_int (298)));
  (Fatal_UnexpectedAntiquotation, CFatal, (Prims.of_int (299)));
  (Fatal_SplicedUndef, CFatal, (Prims.of_int (300)));
  (Fatal_SpliceUnembedFail, CFatal, (Prims.of_int (301)));
  (Warning_ExtractionUnexpectedEffect, CWarning, (Prims.of_int (302)));
  (Error_DidNotFail, CError, (Prims.of_int (303)));
  (Warning_UnappliedFail, CWarning, (Prims.of_int (304)));
  (Warning_QuantifierWithoutPattern, CSilent, (Prims.of_int (305)));
  (Error_EmptyFailErrs, CAlwaysError, (Prims.of_int (306)));
  (Warning_logicqualifier, CWarning, (Prims.of_int (307)));
  (Fatal_CyclicDependence, CFatal, (Prims.of_int (308)));
  (Error_InductiveAnnotNotAType, CError, (Prims.of_int (309)));
  (Fatal_FriendInterface, CFatal, (Prims.of_int (310)));
  (Error_CannotRedefineConst, CError, (Prims.of_int (311)));
  (Error_BadClassDecl, CError, (Prims.of_int (312)));
  (Error_BadInductiveParam, CFatal, (Prims.of_int (313)));
  (Error_FieldShadow, CFatal, (Prims.of_int (314)));
  (Error_UnexpectedDM4FType, CFatal, (Prims.of_int (315)));
  (Fatal_EffectAbbreviationResultTypeMismatch, CFatal, (Prims.of_int (316)));
  (Error_AlreadyCachedAssertionFailure, CFatal, (Prims.of_int (317)));
  (Error_MustEraseMissing, CWarning, (Prims.of_int (318)));
  (Warning_EffectfulArgumentToErasedFunction, CWarning, (Prims.of_int (319)));
  (Fatal_EmptySurfaceLet, CFatal, (Prims.of_int (320)));
  (Warning_UnexpectedCheckedFile, CWarning, (Prims.of_int (321)));
  (Fatal_ExtractionUnsupported, CFatal, (Prims.of_int (322)));
  (Warning_SMTErrorReason, CWarning, (Prims.of_int (323)));
  (Warning_CoercionNotFound, CWarning, (Prims.of_int (324)));
  (Error_QuakeFailed, CError, (Prims.of_int (325)));
  (Error_IllSMTPat, CError, (Prims.of_int (326)));
  (Error_IllScopedTerm, CError, (Prims.of_int (327)));
  (Warning_UnusedLetRec, CWarning, (Prims.of_int (328)));
  (Fatal_Effects_Ordering_Coherence, CError, (Prims.of_int (329)));
  (Warning_BleedingEdge_Feature, CWarning, (Prims.of_int (330)));
  (Warning_IgnoredBinding, CWarning, (Prims.of_int (331)));
  (Warning_CouldNotReadHints, CWarning, (Prims.of_int (333)));
  (Fatal_BadUvar, CFatal, (Prims.of_int (334)));
  (Warning_WarnOnUse, CSilent, (Prims.of_int (335)));
  (Warning_DeprecatedAttributeSyntax, CSilent, (Prims.of_int (336)));
  (Warning_DeprecatedGeneric, CWarning, (Prims.of_int (337)));
  (Error_BadSplice, CError, (Prims.of_int (338)));
  (Error_UnexpectedUnresolvedUvar, CAlwaysError, (Prims.of_int (339)));
  (Warning_UnfoldPlugin, CWarning, (Prims.of_int (340)));
  (Error_LayeredMissingAnnot, CAlwaysError, (Prims.of_int (341)));
  (Error_CallToErased, CError, (Prims.of_int (342)));
  (Error_ErasedCtor, CError, (Prims.of_int (343)));
  (Error_RemoveUnusedTypeParameter, CWarning, (Prims.of_int (344)));
  (Warning_NoMagicInFSharp, CWarning, (Prims.of_int (345)));
  (Error_BadLetOpenRecord, CAlwaysError, (Prims.of_int (346)));
  (Error_UnexpectedTypeclassInstance, CAlwaysError, (Prims.of_int (347)));
  (Warning_AmbiguousResolveImplicitsHook, CWarning, (Prims.of_int (348)));
  (Warning_SplitAndRetryQueries, CWarning, (Prims.of_int (349)));
  (Warning_DeprecatedLightDoNotation, CWarning, (Prims.of_int (350)));
  (Warning_FailedToCheckInitialTacticGoal, CSilent, (Prims.of_int (351)));
  (Warning_Adhoc_IndexedEffect_Combinator, CWarning, (Prims.of_int (352)));
  (Error_PluginDynlink, CError, (Prims.of_int (353)));
  (Error_InternalQualifier, CAlwaysError, (Prims.of_int (354)));
  (Warning_NameEscape, CWarning, (Prims.of_int (355)))]

================================================
FILE: engine/backends/fstar/fstar-surface-ast/FStar_Getopt.ml
================================================
let noshort = 0
type 'a opt_variant =
  | ZeroArgs of (unit -> 'a)
  | OneArg of (string -> 'a) * string
type 'a opt' = FStar_Char.char * string * 'a opt_variant * string
type opt = unit opt'
type parse_cmdline_res =
  | Empty
  | Help
  | Error of string
  | Success

let bind l f =
    match l with
    | Help
    | Error _ -> l
    | Success -> f ()
    (* | Empty  *)
    (* ^ Empty does not occur internally. *)

(* Returns None if this wasn't an option arg (did not start with "-")
 * Otherwise, returns Some (o, s) where [s] is the trimmed option, and [o]
 * is the opt we found in specs (possibly None if not present, which should
 * trigger an error) *)
let find_matching_opt specs s : (opt option * string) option =
  if String.length s < 2 then
    None
  else if String.sub s 0 2 = "--" then
    (* long opts *)
    let strim = String.sub s 2 ((String.length s) - 2) in
    let o = FStar_List.tryFind (fun (_, option, _, _) -> option = strim) specs in
    Some (o, strim)
  else if String.sub s 0 1 = "-" then
    (* short opts *)
    let strim = String.sub s 1 ((String.length s) - 1) in
    let o = FStar_List.tryFind (fun (shortoption, _, _, _) -> FStar_String.make Z.one shortoption = strim) specs in
    Some (o, strim)
  else
    None

(* remark: doesn't work with files starting with -- *)
let rec parse (opts:opt list) def ar ix max i : parse_cmdline_res =
  if ix > max then Success
  else
    let arg = ar.(ix) in
    let go_on () = bind (def arg) (fun _ -> parse opts def ar (ix + 1) max (i + 1)) in
    match find_matching_opt opts arg with
    | None -> go_on ()
    | Some (None, _) -> Error ("unrecognized option '" ^ arg ^ "'\n")
    | Some (Some (_, _, p, _), argtrim) ->
      begin match p with
      | ZeroArgs f -> f (); parse opts def ar (ix + 1) max (i + 1)
      | OneArg (f, _) ->
         if ix + 1 > max
         then Error ("last option '" ^ argtrim ^ "' takes an argument but has none\n")
         else
           let r =
               try (f (ar.(ix + 1)); Success)
               with _ -> Error ("wrong argument given to option `" ^ argtrim ^ "`\n")
           in bind r (fun () -> parse opts def ar (ix + 2) max (i + 1))
      end

let parse_array specs others args offset =
  parse specs others args offset (Array.length args - 1) 0

let parse_cmdline specs others =
  if Array.length Sys.argv = 1 then Empty
  else parse_array specs others Sys.argv 1

let parse_string specs others (str:string) =
    let split_spaces (str:string) =
      let seps = [int_of_char ' '; int_of_char '\t'] in
      FStar_List.filter (fun s -> s != "") (FStar_String.split seps str)
    in
    (* to match the style of the F# code in FStar.GetOpt.fs *)
    let index_of str c =
      try
        String.index str c
      with Not_found -> -1
    in
    let substring_from s j =
        let len = String.length s - j in
        String.sub s j len
    in
    let rec split_quoted_fragments (str:string) =
        let i = index_of str '\'' in
        if i < 0 then Some (split_spaces str)
        else let prefix = String.sub str 0 i in
             let suffix = substring_from str (i + 1) in
             let j = index_of suffix '\'' in
             if j < 0 then None
             else let quoted_frag = String.sub suffix 0 j in
                  let rest = split_quoted_fragments (substring_from suffix (j + 1)) in
                  match rest with
                  | None -> None
                  | Some rest -> Some (split_spaces prefix @ quoted_frag::rest)

    in
    match split_quoted_fragments str with
    | None -> Error("Failed to parse options; unmatched quote \"'\"")
    | Some args ->
      parse_array specs others (Array.of_list args) 0

let parse_list specs others lst =
  parse_array specs others (Array.of_list lst) 0

let cmdline () =
   Array.to_list (Sys.argv)


================================================
FILE: engine/backends/fstar/fstar-surface-ast/FStar_Ident.ml
================================================
open Prims
type ident = {
  idText: Prims.string ;
  idRange: FStar_Compiler_Range.range }[@@deriving yojson,show,yojson,show]
let (__proj__Mkident__item__idText : ident -> Prims.string) =
  fun projectee -> match projectee with | { idText; idRange;_} -> idText
let (__proj__Mkident__item__idRange : ident -> FStar_Compiler_Range.range) =
  fun projectee -> match projectee with | { idText; idRange;_} -> idRange
type path = Prims.string Prims.list[@@deriving yojson,show]
type ipath = ident Prims.list[@@deriving yojson,show]
type lident =
  {
  ns: ipath ;
  ident: ident ;
  nsstr: Prims.string ;
  str: Prims.string }[@@deriving yojson,show,yojson,show]
let (__proj__Mklident__item__ns : lident -> ipath) =
  fun projectee ->
    match projectee with | { ns; ident = ident1; nsstr; str;_} -> ns
let (__proj__Mklident__item__ident : lident -> ident) =
  fun projectee ->
    match projectee with | { ns; ident = ident1; nsstr; str;_} -> ident1
let (__proj__Mklident__item__nsstr : lident -> Prims.string) =
  fun projectee ->
    match projectee with | { ns; ident = ident1; nsstr; str;_} -> nsstr
let (__proj__Mklident__item__str : lident -> Prims.string) =
  fun projectee ->
    match projectee with | { ns; ident = ident1; nsstr; str;_} -> str
let (mk_ident : (Prims.string * FStar_Compiler_Range.range) -> ident) =
  fun uu___ ->
    match uu___ with | (text, range) -> { idText = text; idRange = range }
let (set_id_range : FStar_Compiler_Range.range -> ident -> ident) =
  fun r -> fun i -> { idText = (i.idText); idRange = r }
let (reserved_prefix : Prims.string) = "uu___"
let (uu___32 :
  (((unit -> Prims.int) * (unit -> unit)) * Prims.int
    FStar_Compiler_Effect.ref))
  =
  let x = ref Prims.int_zero in
  let next_id uu___ =
    let v = FStar_Compiler_Effect.op_Bang x in
    FStar_Compiler_Effect.op_Colon_Equals x (v + Prims.int_one); v in
  let reset uu___ = FStar_Compiler_Effect.op_Colon_Equals x Prims.int_zero in
  ((next_id, reset), x)
let (_gen : ((unit -> Prims.int) * (unit -> unit))) =
  match uu___32 with | (_gen1, _secret_ref) -> _gen1
let (_secret_ref : Prims.int FStar_Compiler_Effect.ref) =
  match uu___32 with | (_gen1, _secret_ref1) -> _secret_ref1
let (next_id : unit -> Prims.int) =
  fun uu___ -> FStar_Pervasives_Native.fst _gen ()
let (reset_gensym : unit -> unit) =
  fun uu___ -> FStar_Pervasives_Native.snd _gen ()
let with_frozen_gensym : 'a . (unit -> 'a) -> 'a =
  fun f ->
    let v = FStar_Compiler_Effect.op_Bang _secret_ref in
    let r =
      try (fun uu___ -> match () with | () -> f ()) ()
      with
      | uu___ ->
          (FStar_Compiler_Effect.op_Colon_Equals _secret_ref v;
           FStar_Compiler_Effect.raise uu___) in
    FStar_Compiler_Effect.op_Colon_Equals _secret_ref v; r
let (gen' : Prims.string -> FStar_Compiler_Range.range -> ident) =
  fun s ->
    fun r ->
      let i = next_id () in
      mk_ident ((Prims.op_Hat s (Prims.string_of_int i)), r)
let (gen : FStar_Compiler_Range.range -> ident) =
  fun r -> gen' reserved_prefix r
let (ident_of_lid : lident -> ident) = fun l -> l.ident
let (range_of_id : ident -> FStar_Compiler_Range.range) =
  fun id -> id.idRange
let (id_of_text : Prims.string -> ident) =
  fun str -> mk_ident (str, FStar_Compiler_Range.dummyRange)
let (string_of_id : ident -> Prims.string) = fun id -> id.idText
let (text_of_path : path -> Prims.string) =
  fun path1 -> FStar_Compiler_Util.concat_l "." path1
let (path_of_text : Prims.string -> path) =
  fun text -> FStar_String.split [46] text
let (path_of_ns : ipath -> path) =
  fun ns -> FStar_Compiler_List.map string_of_id ns
let (path_of_lid : lident -> path) =
  fun lid ->
    FStar_Compiler_List.map string_of_id
      (FStar_Compiler_List.op_At lid.ns [lid.ident])
let (ns_of_lid : lident -> ipath) = fun lid -> lid.ns
let (ids_of_lid : lident -> ipath) =
  fun lid -> FStar_Compiler_List.op_At lid.ns [lid.ident]
let (lid_of_ns_and_id : ipath -> ident -> lident) =
  fun ns ->
    fun id ->
      let nsstr =
        let uu___ = FStar_Compiler_List.map string_of_id ns in
        FStar_Compiler_Effect.op_Bar_Greater uu___ text_of_path in
      {
        ns;
        ident = id;
        nsstr;
        str =
          (if nsstr = ""
           then id.idText
           else Prims.op_Hat nsstr (Prims.op_Hat "." id.idText))
      }
let (lid_of_ids : ipath -> lident) =
  fun ids ->
    let uu___ = FStar_Compiler_Util.prefix ids in
    match uu___ with | (ns, id) -> lid_of_ns_and_id ns id
let (lid_of_str : Prims.string -> lident) =
  fun str ->
    let uu___ =
      FStar_Compiler_List.map id_of_text (FStar_Compiler_Util.split str ".") in
    lid_of_ids uu___
let (lid_of_path : path -> FStar_Compiler_Range.range -> lident) =
  fun path1 ->
    fun pos ->
      let ids = FStar_Compiler_List.map (fun s -> mk_ident (s, pos)) path1 in
      lid_of_ids ids
let (text_of_lid : lident -> Prims.string) = fun lid -> lid.str
let (lid_equals : lident -> lident -> Prims.bool) =
  fun l1 -> fun l2 -> l1.str = l2.str
let (ident_equals : ident -> ident -> Prims.bool) =
  fun id1 -> fun id2 -> id1.idText = id2.idText
type lid = lident[@@deriving yojson,show]
let (range_of_lid : lident -> FStar_Compiler_Range.range) =
  fun lid1 -> range_of_id lid1.ident
let (set_lid_range : lident -> FStar_Compiler_Range.range -> lident) =
  fun l ->
    fun r ->
      {
        ns = (l.ns);
        ident =
          (let uu___ = l.ident in { idText = (uu___.idText); idRange = r });
        nsstr = (l.nsstr);
        str = (l.str)
      }
let (lid_add_suffix : lident -> Prims.string -> lident) =
  fun l ->
    fun s ->
      let path1 = path_of_lid l in
      let uu___ = range_of_lid l in
      lid_of_path (FStar_Compiler_List.op_At path1 [s]) uu___
let (ml_path_of_lid : lident -> Prims.string) =
  fun lid1 ->
    let uu___ =
      let uu___1 = path_of_ns lid1.ns in
      let uu___2 = let uu___3 = string_of_id lid1.ident in [uu___3] in
      FStar_Compiler_List.op_At uu___1 uu___2 in
    FStar_Compiler_Effect.op_Less_Bar (FStar_String.concat "_") uu___
let (string_of_lid : lident -> Prims.string) = fun lid1 -> lid1.str
let (qual_id : lident -> ident -> lident) =
  fun lid1 ->
    fun id ->
      let uu___ =
        lid_of_ids (FStar_Compiler_List.op_At lid1.ns [lid1.ident; id]) in
      let uu___1 = range_of_id id in set_lid_range uu___ uu___1
let (nsstr : lident -> Prims.string) = fun l -> l.nsstr


================================================
FILE: engine/backends/fstar/fstar-surface-ast/FStar_ImmutableArray_Base.ml
================================================
type 'a t = 'a array

let of_list (l:'a list) = Array.of_list l

let length (a: 'a t) = Z.of_int (Array.length a)

let index (a: 'a t) (i:Z.t) = Array.get a (Z.to_int i)


================================================
FILE: engine/backends/fstar/fstar-surface-ast/FStar_List.ml
================================================
(* We give an implementation here using OCaml's BatList,
   which provides tail-recursive versions of most functions *)
let isEmpty l = l = []
let hd = BatList.hd
let tail = BatList.tl
let tl = BatList.tl

let rec last = function
  | x :: [] -> x
  | _ :: tl -> last tl
let length l = Z.of_int (BatList.length l)
let rev = BatList.rev
let append = BatList.append
let op_At = append
let flatten = BatList.flatten
let map = BatList.map
let mapi f l = BatList.mapi (fun i x -> f (Z.of_int i) x) l
let fold_left = BatList.fold_left
let fold_right = BatList.fold_right
let fold_left2 = BatList.fold_left2
let existsb f l = BatList.exists f l
let find f l = try Some (BatList.find f l) with | Not_found -> None
let filter = BatList.filter
let for_all = BatList.for_all
let collect f l = BatList.flatten (BatList.map f l)
let tryFind = find
let choose = BatList.filter_map
let partition = BatList.partition
let sortWith f l = BatList.sort (fun x y -> Z.to_int (f x y)) l

let isEmpty l = l = []
let singleton x = [x]
let mem = BatList.mem
let memT = mem
let hd = BatList.hd
let tl = BatList.tl
let tail = BatList.tl
let iter = BatList.iter
let forall2 = BatList.for_all2


================================================
FILE: engine/backends/fstar/fstar-surface-ast/FStar_Parser_AST.ml
================================================
open Prims
type level =
  | Un 
  | Expr 
  | Type_level 
  | Kind 
  | Formula 
let (uu___is_Un : level -> Prims.bool) =
  fun projectee -> match projectee with | Un -> true | uu___ -> false
let (uu___is_Expr : level -> Prims.bool) =
  fun projectee -> match projectee with | Expr -> true | uu___ -> false
let (uu___is_Type_level : level -> Prims.bool) =
  fun projectee -> match projectee with | Type_level -> true | uu___ -> false
let (uu___is_Kind : level -> Prims.bool) =
  fun projectee -> match projectee with | Kind -> true | uu___ -> false
let (uu___is_Formula : level -> Prims.bool) =
  fun projectee -> match projectee with | Formula -> true | uu___ -> false
type let_qualifier =
  | NoLetQualifier 
  | Rec 
let (uu___is_NoLetQualifier : let_qualifier -> Prims.bool) =
  fun projectee ->
    match projectee with | NoLetQualifier -> true | uu___ -> false
let (uu___is_Rec : let_qualifier -> Prims.bool) =
  fun projectee -> match projectee with | Rec -> true | uu___ -> false
type quote_kind =
  | Static 
  | Dynamic 
let (uu___is_Static : quote_kind -> Prims.bool) =
  fun projectee -> match projectee with | Static -> true | uu___ -> false
let (uu___is_Dynamic : quote_kind -> Prims.bool) =
  fun projectee -> match projectee with | Dynamic -> true | uu___ -> false
type term' =
  | Wild 
  | Const of FStar_Const.sconst 
  | Op of (FStar_Ident.ident * term Prims.list) 
  | Tvar of FStar_Ident.ident 
  | Uvar of FStar_Ident.ident 
  | Var of FStar_Ident.lid 
  | Name of FStar_Ident.lid 
  | Projector of (FStar_Ident.lid * FStar_Ident.ident) 
  | Construct of (FStar_Ident.lid * (term * imp) Prims.list) 
  | Abs of (pattern Prims.list * term) 
  | App of (term * term * imp) 
  | Let of (let_qualifier * (term Prims.list FStar_Pervasives_Native.option *
  (pattern * term)) Prims.list * term) 
  | LetOperator of ((FStar_Ident.ident * pattern * term) Prims.list * term) 
  | LetOpen of (FStar_Ident.lid * term) 
  | LetOpenRecord of (term * term * term) 
  | Seq of (term * term) 
  | Bind of (FStar_Ident.ident * term * term) 
  | If of (term * FStar_Ident.ident FStar_Pervasives_Native.option *
  (FStar_Ident.ident FStar_Pervasives_Native.option * term * Prims.bool)
  FStar_Pervasives_Native.option * term * term) 
  | Match of (term * FStar_Ident.ident FStar_Pervasives_Native.option *
  (FStar_Ident.ident FStar_Pervasives_Native.option * term * Prims.bool)
  FStar_Pervasives_Native.option * (pattern * term
  FStar_Pervasives_Native.option * term) Prims.list) 
  | TryWith of (term * (pattern * term FStar_Pervasives_Native.option * term)
  Prims.list) 
  | Ascribed of (term * term * term FStar_Pervasives_Native.option *
  Prims.bool) 
  | Record of (term FStar_Pervasives_Native.option * (FStar_Ident.lid * term)
  Prims.list) 
  | Project of (term * FStar_Ident.lid) 
  | Product of (binder Prims.list * term) 
  | Sum of ((binder, term) FStar_Pervasives.either Prims.list * term) 
  | QForall of (binder Prims.list * (FStar_Ident.ident Prims.list * term
  Prims.list Prims.list) * term) 
  | QExists of (binder Prims.list * (FStar_Ident.ident Prims.list * term
  Prims.list Prims.list) * term) 
  | Refine of (binder * term) 
  | NamedTyp of (FStar_Ident.ident * term) 
  | Paren of term 
  | Requires of (term * Prims.string FStar_Pervasives_Native.option) 
  | Ensures of (term * Prims.string FStar_Pervasives_Native.option) 
  | LexList of term Prims.list 
  | WFOrder of (term * term) 
  | Decreases of (term * Prims.string FStar_Pervasives_Native.option) 
  | Labeled of (term * Prims.string * Prims.bool) 
  | Discrim of FStar_Ident.lid 
  | Attributes of term Prims.list 
  | Antiquote of term 
  | Quote of (term * quote_kind) 
  | VQuote of term 
  | CalcProof of (term * term * calc_step Prims.list) 
  | IntroForall of (binder Prims.list * term * term) 
  | IntroExists of (binder Prims.list * term * term Prims.list * term) 
  | IntroImplies of (term * term * binder * term) 
  | IntroOr of (Prims.bool * term * term * term) 
  | IntroAnd of (term * term * term * term) 
  | ElimForall of (binder Prims.list * term * term Prims.list) 
  | ElimExists of (binder Prims.list * term * term * binder * term) 
  | ElimImplies of (term * term * term) 
  | ElimOr of (term * term * term * binder * term * binder * term) 
  | ElimAnd of (term * term * term * binder * binder * term) 
and term = {
  tm: term' ;
  range: FStar_Compiler_Range.range ;
  level: level }
and calc_step =
  | CalcStep of (term * term * term) 
and binder' =
  | Variable of FStar_Ident.ident 
  | TVariable of FStar_Ident.ident 
  | Annotated of (FStar_Ident.ident * term) 
  | TAnnotated of (FStar_Ident.ident * term) 
  | NoName of term 
and binder =
  {
  b: binder' ;
  brange: FStar_Compiler_Range.range ;
  blevel: level ;
  aqual: arg_qualifier FStar_Pervasives_Native.option ;
  battributes: term Prims.list }
and pattern' =
  | PatWild of (arg_qualifier FStar_Pervasives_Native.option * term
  Prims.list) 
  | PatConst of FStar_Const.sconst 
  | PatApp of (pattern * pattern Prims.list) 
  | PatVar of (FStar_Ident.ident * arg_qualifier
  FStar_Pervasives_Native.option * term Prims.list) 
  | PatName of FStar_Ident.lid 
  | PatTvar of (FStar_Ident.ident * arg_qualifier
  FStar_Pervasives_Native.option * term Prims.list) 
  | PatList of pattern Prims.list 
  | PatTuple of (pattern Prims.list * Prims.bool) 
  | PatRecord of (FStar_Ident.lid * pattern) Prims.list 
  | PatAscribed of (pattern * (term * term FStar_Pervasives_Native.option)) 
  | PatOr of pattern Prims.list 
  | PatOp of FStar_Ident.ident 
  | PatVQuote of term 
and pattern = {
  pat: pattern' ;
  prange: FStar_Compiler_Range.range }
and arg_qualifier =
  | Implicit 
  | Equality 
  | Meta of term 
  | TypeClassArg 
and imp =
  | FsTypApp 
  | Hash 
  | UnivApp 
  | HashBrace of term 
  | Infix 
  | Nothing 
let (uu___is_Wild : term' -> Prims.bool) =
  fun projectee -> match projectee with | Wild -> true | uu___ -> false
let (uu___is_Const : term' -> Prims.bool) =
  fun projectee -> match projectee with | Const _0 -> true | uu___ -> false
let (__proj__Const__item___0 : term' -> FStar_Const.sconst) =
  fun projectee -> match projectee with | Const _0 -> _0
let (uu___is_Op : term' -> Prims.bool) =
  fun projectee -> match projectee with | Op _0 -> true | uu___ -> false
let (__proj__Op__item___0 : term' -> (FStar_Ident.ident * term Prims.list)) =
  fun projectee -> match projectee with | Op _0 -> _0
let (uu___is_Tvar : term' -> Prims.bool) =
  fun projectee -> match projectee with | Tvar _0 -> true | uu___ -> false
let (__proj__Tvar__item___0 : term' -> FStar_Ident.ident) =
  fun projectee -> match projectee with | Tvar _0 -> _0
let (uu___is_Uvar : term' -> Prims.bool) =
  fun projectee -> match projectee with | Uvar _0 -> true | uu___ -> false
let (__proj__Uvar__item___0 : term' -> FStar_Ident.ident) =
  fun projectee -> match projectee with | Uvar _0 -> _0
let (uu___is_Var : term' -> Prims.bool) =
  fun projectee -> match projectee with | Var _0 -> true | uu___ -> false
let (__proj__Var__item___0 : term' -> FStar_Ident.lid) =
  fun projectee -> match projectee with | Var _0 -> _0
let (uu___is_Name : term' -> Prims.bool) =
  fun projectee -> match projectee with | Name _0 -> true | uu___ -> false
let (__proj__Name__item___0 : term' -> FStar_Ident.lid) =
  fun projectee -> match projectee with | Name _0 -> _0
let (uu___is_Projector : term' -> Prims.bool) =
  fun projectee ->
    match projectee with | Projector _0 -> true | uu___ -> false
let (__proj__Projector__item___0 :
  term' -> (FStar_Ident.lid * FStar_Ident.ident)) =
  fun projectee -> match projectee with | Projector _0 -> _0
let (uu___is_Construct : term' -> Prims.bool) =
  fun projectee ->
    match projectee with | Construct _0 -> true | uu___ -> false
let (__proj__Construct__item___0 :
  term' -> (FStar_Ident.lid * (term * imp) Prims.list)) =
  fun projectee -> match projectee with | Construct _0 -> _0
let (uu___is_Abs : term' -> Prims.bool) =
  fun projectee -> match projectee with | Abs _0 -> true | uu___ -> false
let (__proj__Abs__item___0 : term' -> (pattern Prims.list * term)) =
  fun projectee -> match projectee with | Abs _0 -> _0
let (uu___is_App : term' -> Prims.bool) =
  fun projectee -> match projectee with | App _0 -> true | uu___ -> false
let (__proj__App__item___0 : term' -> (term * term * imp)) =
  fun projectee -> match projectee with | App _0 -> _0
let (uu___is_Let : term' -> Prims.bool) =
  fun projectee -> match projectee with | Let _0 -> true | uu___ -> false
let (__proj__Let__item___0 :
  term' ->
    (let_qualifier * (term Prims.list FStar_Pervasives_Native.option *
      (pattern * term)) Prims.list * term))
  = fun projectee -> match projectee with | Let _0 -> _0
let (uu___is_LetOperator : term' -> Prims.bool) =
  fun projectee ->
    match projectee with | LetOperator _0 -> true | uu___ -> false
let (__proj__LetOperator__item___0 :
  term' -> ((FStar_Ident.ident * pattern * term) Prims.list * term)) =
  fun projectee -> match projectee with | LetOperator _0 -> _0
let (uu___is_LetOpen : term' -> Prims.bool) =
  fun projectee -> match projectee with | LetOpen _0 -> true | uu___ -> false
let (__proj__LetOpen__item___0 : term' -> (FStar_Ident.lid * term)) =
  fun projectee -> match projectee with | LetOpen _0 -> _0
let (uu___is_LetOpenRecord : term' -> Prims.bool) =
  fun projectee ->
    match projectee with | LetOpenRecord _0 -> true | uu___ -> false
let (__proj__LetOpenRecord__item___0 : term' -> (term * term * term)) =
  fun projectee -> match projectee with | LetOpenRecord _0 -> _0
let (uu___is_Seq : term' -> Prims.bool) =
  fun projectee -> match projectee with | Seq _0 -> true | uu___ -> false
let (__proj__Seq__item___0 : term' -> (term * term)) =
  fun projectee -> match projectee with | Seq _0 -> _0
let (uu___is_Bind : term' -> Prims.bool) =
  fun projectee -> match projectee with | Bind _0 -> true | uu___ -> false
let (__proj__Bind__item___0 : term' -> (FStar_Ident.ident * term * term)) =
  fun projectee -> match projectee with | Bind _0 -> _0
let (uu___is_If : term' -> Prims.bool) =
  fun projectee -> match projectee with | If _0 -> true | uu___ -> false
let (__proj__If__item___0 :
  term' ->
    (term * FStar_Ident.ident FStar_Pervasives_Native.option *
      (FStar_Ident.ident FStar_Pervasives_Native.option * term * Prims.bool)
      FStar_Pervasives_Native.option * term * term))
  = fun projectee -> match projectee with | If _0 -> _0
let (uu___is_Match : term' -> Prims.bool) =
  fun projectee -> match projectee with | Match _0 -> true | uu___ -> false
let (__proj__Match__item___0 :
  term' ->
    (term * FStar_Ident.ident FStar_Pervasives_Native.option *
      (FStar_Ident.ident FStar_Pervasives_Native.option * term * Prims.bool)
      FStar_Pervasives_Native.option * (pattern * term
      FStar_Pervasives_Native.option * term) Prims.list))
  = fun projectee -> match projectee with | Match _0 -> _0
let (uu___is_TryWith : term' -> Prims.bool) =
  fun projectee -> match projectee with | TryWith _0 -> true | uu___ -> false
let (__proj__TryWith__item___0 :
  term' ->
    (term * (pattern * term FStar_Pervasives_Native.option * term)
      Prims.list))
  = fun projectee -> match projectee with | TryWith _0 -> _0
let (uu___is_Ascribed : term' -> Prims.bool) =
  fun projectee ->
    match projectee with | Ascribed _0 -> true | uu___ -> false
let (__proj__Ascribed__item___0 :
  term' -> (term * term * term FStar_Pervasives_Native.option * Prims.bool))
  = fun projectee -> match projectee with | Ascribed _0 -> _0
let (uu___is_Record : term' -> Prims.bool) =
  fun projectee -> match projectee with | Record _0 -> true | uu___ -> false
let (__proj__Record__item___0 :
  term' ->
    (term FStar_Pervasives_Native.option * (FStar_Ident.lid * term)
      Prims.list))
  = fun projectee -> match projectee with | Record _0 -> _0
let (uu___is_Project : term' -> Prims.bool) =
  fun projectee -> match projectee with | Project _0 -> true | uu___ -> false
let (__proj__Project__item___0 : term' -> (term * FStar_Ident.lid)) =
  fun projectee -> match projectee with | Project _0 -> _0
let (uu___is_Product : term' -> Prims.bool) =
  fun projectee -> match projectee with | Product _0 -> true | uu___ -> false
let (__proj__Product__item___0 : term' -> (binder Prims.list * term)) =
  fun projectee -> match projectee with | Product _0 -> _0
let (uu___is_Sum : term' -> Prims.bool) =
  fun projectee -> match projectee with | Sum _0 -> true | uu___ -> false
let (__proj__Sum__item___0 :
  term' -> ((binder, term) FStar_Pervasives.either Prims.list * term)) =
  fun projectee -> match projectee with | Sum _0 -> _0
let (uu___is_QForall : term' -> Prims.bool) =
  fun projectee -> match projectee with | QForall _0 -> true | uu___ -> false
let (__proj__QForall__item___0 :
  term' ->
    (binder Prims.list * (FStar_Ident.ident Prims.list * term Prims.list
      Prims.list) * term))
  = fun projectee -> match projectee with | QForall _0 -> _0
let (uu___is_QExists : term' -> Prims.bool) =
  fun projectee -> match projectee with | QExists _0 -> true | uu___ -> false
let (__proj__QExists__item___0 :
  term' ->
    (binder Prims.list * (FStar_Ident.ident Prims.list * term Prims.list
      Prims.list) * term))
  = fun projectee -> match projectee with | QExists _0 -> _0
let (uu___is_Refine : term' -> Prims.bool) =
  fun projectee -> match projectee with | Refine _0 -> true | uu___ -> false
let (__proj__Refine__item___0 : term' -> (binder * term)) =
  fun projectee -> match projectee with | Refine _0 -> _0
let (uu___is_NamedTyp : term' -> Prims.bool) =
  fun projectee ->
    match projectee with | NamedTyp _0 -> true | uu___ -> false
let (__proj__NamedTyp__item___0 : term' -> (FStar_Ident.ident * term)) =
  fun projectee -> match projectee with | NamedTyp _0 -> _0
let (uu___is_Paren : term' -> Prims.bool) =
  fun projectee -> match projectee with | Paren _0 -> true | uu___ -> false
let (__proj__Paren__item___0 : term' -> term) =
  fun projectee -> match projectee with | Paren _0 -> _0
let (uu___is_Requires : term' -> Prims.bool) =
  fun projectee ->
    match projectee with | Requires _0 -> true | uu___ -> false
let (__proj__Requires__item___0 :
  term' -> (term * Prims.string FStar_Pervasives_Native.option)) =
  fun projectee -> match projectee with | Requires _0 -> _0
let (uu___is_Ensures : term' -> Prims.bool) =
  fun projectee -> match projectee with | Ensures _0 -> true | uu___ -> false
let (__proj__Ensures__item___0 :
  term' -> (term * Prims.string FStar_Pervasives_Native.option)) =
  fun projectee -> match projectee with | Ensures _0 -> _0
let (uu___is_LexList : term' -> Prims.bool) =
  fun projectee -> match projectee with | LexList _0 -> true | uu___ -> false
let (__proj__LexList__item___0 : term' -> term Prims.list) =
  fun projectee -> match projectee with | LexList _0 -> _0
let (uu___is_WFOrder : term' -> Prims.bool) =
  fun projectee -> match projectee with | WFOrder _0 -> true | uu___ -> false
let (__proj__WFOrder__item___0 : term' -> (term * term)) =
  fun projectee -> match projectee with | WFOrder _0 -> _0
let (uu___is_Decreases : term' -> Prims.bool) =
  fun projectee ->
    match projectee with | Decreases _0 -> true | uu___ -> false
let (__proj__Decreases__item___0 :
  term' -> (term * Prims.string FStar_Pervasives_Native.option)) =
  fun projectee -> match projectee with | Decreases _0 -> _0
let (uu___is_Labeled : term' -> Prims.bool) =
  fun projectee -> match projectee with | Labeled _0 -> true | uu___ -> false
let (__proj__Labeled__item___0 : term' -> (term * Prims.string * Prims.bool))
  = fun projectee -> match projectee with | Labeled _0 -> _0
let (uu___is_Discrim : term' -> Prims.bool) =
  fun projectee -> match projectee with | Discrim _0 -> true | uu___ -> false
let (__proj__Discrim__item___0 : term' -> FStar_Ident.lid) =
  fun projectee -> match projectee with | Discrim _0 -> _0
let (uu___is_Attributes : term' -> Prims.bool) =
  fun projectee ->
    match projectee with | Attributes _0 -> true | uu___ -> false
let (__proj__Attributes__item___0 : term' -> term Prims.list) =
  fun projectee -> match projectee with | Attributes _0 -> _0
let (uu___is_Antiquote : term' -> Prims.bool) =
  fun projectee ->
    match projectee with | Antiquote _0 -> true | uu___ -> false
let (__proj__Antiquote__item___0 : term' -> term) =
  fun projectee -> match projectee with | Antiquote _0 -> _0
let (uu___is_Quote : term' -> Prims.bool) =
  fun projectee -> match projectee with | Quote _0 -> true | uu___ -> false
let (__proj__Quote__item___0 : term' -> (term * quote_kind)) =
  fun projectee -> match projectee with | Quote _0 -> _0
let (uu___is_VQuote : term' -> Prims.bool) =
  fun projectee -> match projectee with | VQuote _0 -> true | uu___ -> false
let (__proj__VQuote__item___0 : term' -> term) =
  fun projectee -> match projectee with | VQuote _0 -> _0
let (uu___is_CalcProof : term' -> Prims.bool) =
  fun projectee ->
    match projectee with | CalcProof _0 -> true | uu___ -> false
let (__proj__CalcProof__item___0 :
  term' -> (term * term * calc_step Prims.list)) =
  fun projectee -> match projectee with | CalcProof _0 -> _0
let (uu___is_IntroForall : term' -> Prims.bool) =
  fun projectee ->
    match projectee with | IntroForall _0 -> true | uu___ -> false
let (__proj__IntroForall__item___0 :
  term' -> (binder Prims.list * term * term)) =
  fun projectee -> match projectee with | IntroForall _0 -> _0
let (uu___is_IntroExists : term' -> Prims.bool) =
  fun projectee ->
    match projectee with | IntroExists _0 -> true | uu___ -> false
let (__proj__IntroExists__item___0 :
  term' -> (binder Prims.list * term * term Prims.list * term)) =
  fun projectee -> match projectee with | IntroExists _0 -> _0
let (uu___is_IntroImplies : term' -> Prims.bool) =
  fun projectee ->
    match projectee with | IntroImplies _0 -> true | uu___ -> false
let (__proj__IntroImplies__item___0 : term' -> (term * term * binder * term))
  = fun projectee -> match projectee with | IntroImplies _0 -> _0
let (uu___is_IntroOr : term' -> Prims.bool) =
  fun projectee -> match projectee with | IntroOr _0 -> true | uu___ -> false
let (__proj__IntroOr__item___0 : term' -> (Prims.bool * term * term * term))
  = fun projectee -> match projectee with | IntroOr _0 -> _0
let (uu___is_IntroAnd : term' -> Prims.bool) =
  fun projectee ->
    match projectee with | IntroAnd _0 -> true | uu___ -> false
let (__proj__IntroAnd__item___0 : term' -> (term * term * term * term)) =
  fun projectee -> match projectee with | IntroAnd _0 -> _0
let (uu___is_ElimForall : term' -> Prims.bool) =
  fun projectee ->
    match projectee with | ElimForall _0 -> true | uu___ -> false
let (__proj__ElimForall__item___0 :
  term' -> (binder Prims.list * term * term Prims.list)) =
  fun projectee -> match projectee with | ElimForall _0 -> _0
let (uu___is_ElimExists : term' -> Prims.bool) =
  fun projectee ->
    match projectee with | ElimExists _0 -> true | uu___ -> false
let (__proj__ElimExists__item___0 :
  term' -> (binder Prims.list * term * term * binder * term)) =
  fun projectee -> match projectee with | ElimExists _0 -> _0
let (uu___is_ElimImplies : term' -> Prims.bool) =
  fun projectee ->
    match projectee with | ElimImplies _0 -> true | uu___ -> false
let (__proj__ElimImplies__item___0 : term' -> (term * term * term)) =
  fun projectee -> match projectee with | ElimImplies _0 -> _0
let (uu___is_ElimOr : term' -> Prims.bool) =
  fun projectee -> match projectee with | ElimOr _0 -> true | uu___ -> false
let (__proj__ElimOr__item___0 :
  term' -> (term * term * term * binder * term * binder * term)) =
  fun projectee -> match projectee with | ElimOr _0 -> _0
let (uu___is_ElimAnd : term' -> Prims.bool) =
  fun projectee -> match projectee with | ElimAnd _0 -> true | uu___ -> false
let (__proj__ElimAnd__item___0 :
  term' -> (term * term * term * binder * binder * term)) =
  fun projectee -> match projectee with | ElimAnd _0 -> _0
let (__proj__Mkterm__item__tm : term -> term') =
  fun projectee ->
    match projectee with | { tm; range; level = level1;_} -> tm
let (__proj__Mkterm__item__range : term -> FStar_Compiler_Range.range) =
  fun projectee ->
    match projectee with | { tm; range; level = level1;_} -> range
let (__proj__Mkterm__item__level : term -> level) =
  fun projectee ->
    match projectee with | { tm; range; level = level1;_} -> level1
let (uu___is_CalcStep : calc_step -> Prims.bool) = fun projectee -> true
let (__proj__CalcStep__item___0 : calc_step -> (term * term * term)) =
  fun projectee -> match projectee with | CalcStep _0 -> _0
let (uu___is_Variable : binder' -> Prims.bool) =
  fun projectee ->
    match projectee with | Variable _0 -> true | uu___ -> false
let (__proj__Variable__item___0 : binder' -> FStar_Ident.ident) =
  fun projectee -> match projectee with | Variable _0 -> _0
let (uu___is_TVariable : binder' -> Prims.bool) =
  fun projectee ->
    match projectee with | TVariable _0 -> true | uu___ -> false
let (__proj__TVariable__item___0 : binder' -> FStar_Ident.ident) =
  fun projectee -> match projectee with | TVariable _0 -> _0
let (uu___is_Annotated : binder' -> Prims.bool) =
  fun projectee ->
    match projectee with | Annotated _0 -> true | uu___ -> false
let (__proj__Annotated__item___0 : binder' -> (FStar_Ident.ident * term)) =
  fun projectee -> match projectee with | Annotated _0 -> _0
let (uu___is_TAnnotated : binder' -> Prims.bool) =
  fun projectee ->
    match projectee with | TAnnotated _0 -> true | uu___ -> false
let (__proj__TAnnotated__item___0 : binder' -> (FStar_Ident.ident * term)) =
  fun projectee -> match projectee with | TAnnotated _0 -> _0
let (uu___is_NoName : binder' -> Prims.bool) =
  fun projectee -> match projectee with | NoName _0 -> true | uu___ -> false
let (__proj__NoName__item___0 : binder' -> term) =
  fun projectee -> match projectee with | NoName _0 -> _0
let (__proj__Mkbinder__item__b : binder -> binder') =
  fun projectee ->
    match projectee with | { b; brange; blevel; aqual; battributes;_} -> b
let (__proj__Mkbinder__item__brange : binder -> FStar_Compiler_Range.range) =
  fun projectee ->
    match projectee with
    | { b; brange; blevel; aqual; battributes;_} -> brange
let (__proj__Mkbinder__item__blevel : binder -> level) =
  fun projectee ->
    match projectee with
    | { b; brange; blevel; aqual; battributes;_} -> blevel
let (__proj__Mkbinder__item__aqual :
  binder -> arg_qualifier FStar_Pervasives_Native.option) =
  fun projectee ->
    match projectee with
    | { b; brange; blevel; aqual; battributes;_} -> aqual
let (__proj__Mkbinder__item__battributes : binder -> term Prims.list) =
  fun projectee ->
    match projectee with
    | { b; brange; blevel; aqual; battributes;_} -> battributes
let (uu___is_PatWild : pattern' -> Prims.bool) =
  fun projectee -> match projectee with | PatWild _0 -> true | uu___ -> false
let (__proj__PatWild__item___0 :
  pattern' ->
    (arg_qualifier FStar_Pervasives_Native.option * term Prims.list))
  = fun projectee -> match projectee with | PatWild _0 -> _0
let (uu___is_PatConst : pattern' -> Prims.bool) =
  fun projectee ->
    match projectee with | PatConst _0 -> true | uu___ -> false
let (__proj__PatConst__item___0 : pattern' -> FStar_Const.sconst) =
  fun projectee -> match projectee with | PatConst _0 -> _0
let (uu___is_PatApp : pattern' -> Prims.bool) =
  fun projectee -> match projectee with | PatApp _0 -> true | uu___ -> false
let (__proj__PatApp__item___0 : pattern' -> (pattern * pattern Prims.list)) =
  fun projectee -> match projectee with | PatApp _0 -> _0
let (uu___is_PatVar : pattern' -> Prims.bool) =
  fun projectee -> match projectee with | PatVar _0 -> true | uu___ -> false
let (__proj__PatVar__item___0 :
  pattern' ->
    (FStar_Ident.ident * arg_qualifier FStar_Pervasives_Native.option * term
      Prims.list))
  = fun projectee -> match projectee with | PatVar _0 -> _0
let (uu___is_PatName : pattern' -> Prims.bool) =
  fun projectee -> match projectee with | PatName _0 -> true | uu___ -> false
let (__proj__PatName__item___0 : pattern' -> FStar_Ident.lid) =
  fun projectee -> match projectee with | PatName _0 -> _0
let (uu___is_PatTvar : pattern' -> Prims.bool) =
  fun projectee -> match projectee with | PatTvar _0 -> true | uu___ -> false
let (__proj__PatTvar__item___0 :
  pattern' ->
    (FStar_Ident.ident * arg_qualifier FStar_Pervasives_Native.option * term
      Prims.list))
  = fun projectee -> match projectee with | PatTvar _0 -> _0
let (uu___is_PatList : pattern' -> Prims.bool) =
  fun projectee -> match projectee with | PatList _0 -> true | uu___ -> false
let (__proj__PatList__item___0 : pattern' -> pattern Prims.list) =
  fun projectee -> match projectee with | PatList _0 -> _0
let (uu___is_PatTuple : pattern' -> Prims.bool) =
  fun projectee ->
    match projectee with | PatTuple _0 -> true | uu___ -> false
let (__proj__PatTuple__item___0 :
  pattern' -> (pattern Prims.list * Prims.bool)) =
  fun projectee -> match projectee with | PatTuple _0 -> _0
let (uu___is_PatRecord : pattern' -> Prims.bool) =
  fun projectee ->
    match projectee with | PatRecord _0 -> true | uu___ -> false
let (__proj__PatRecord__item___0 :
  pattern' -> (FStar_Ident.lid * pattern) Prims.list) =
  fun projectee -> match projectee with | PatRecord _0 -> _0
let (uu___is_PatAscribed : pattern' -> Prims.bool) =
  fun projectee ->
    match projectee with | PatAscribed _0 -> true | uu___ -> false
let (__proj__PatAscribed__item___0 :
  pattern' -> (pattern * (term * term FStar_Pervasives_Native.option))) =
  fun projectee -> match projectee with | PatAscribed _0 -> _0
let (uu___is_PatOr : pattern' -> Prims.bool) =
  fun projectee -> match projectee with | PatOr _0 -> true | uu___ -> false
let (__proj__PatOr__item___0 : pattern' -> pattern Prims.list) =
  fun projectee -> match projectee with | PatOr _0 -> _0
let (uu___is_PatOp : pattern' -> Prims.bool) =
  fun projectee -> match projectee with | PatOp _0 -> true | uu___ -> false
let (__proj__PatOp__item___0 : pattern' -> FStar_Ident.ident) =
  fun projectee -> match projectee with | PatOp _0 -> _0
let (uu___is_PatVQuote : pattern' -> Prims.bool) =
  fun projectee ->
    match projectee with | PatVQuote _0 -> true | uu___ -> false
let (__proj__PatVQuote__item___0 : pattern' -> term) =
  fun projectee -> match projectee with | PatVQuote _0 -> _0
let (__proj__Mkpattern__item__pat : pattern -> pattern') =
  fun projectee -> match projectee with | { pat; prange;_} -> pat
let (__proj__Mkpattern__item__prange : pattern -> FStar_Compiler_Range.range)
  = fun projectee -> match projectee with | { pat; prange;_} -> prange
let (uu___is_Implicit : arg_qualifier -> Prims.bool) =
  fun projectee -> match projectee with | Implicit -> true | uu___ -> false
let (uu___is_Equality : arg_qualifier -> Prims.bool) =
  fun projectee -> match projectee with | Equality -> true | uu___ -> false
let (uu___is_Meta : arg_qualifier -> Prims.bool) =
  fun projectee -> match projectee with | Meta _0 -> true | uu___ -> false
let (__proj__Meta__item___0 : arg_qualifier -> term) =
  fun projectee -> match projectee with | Meta _0 -> _0
let (uu___is_TypeClassArg : arg_qualifier -> Prims.bool) =
  fun projectee ->
    match projectee with | TypeClassArg -> true | uu___ -> false
let (uu___is_FsTypApp : imp -> Prims.bool) =
  fun projectee -> match projectee with | FsTypApp -> true | uu___ -> false
let (uu___is_Hash : imp -> Prims.bool) =
  fun projectee -> match projectee with | Hash -> true | uu___ -> false
let (uu___is_UnivApp : imp -> Prims.bool) =
  fun projectee -> match projectee with | UnivApp -> true | uu___ -> false
let (uu___is_HashBrace : imp -> Prims.bool) =
  fun projectee ->
    match projectee with | HashBrace _0 -> true | uu___ -> false
let (__proj__HashBrace__item___0 : imp -> term) =
  fun projectee -> match projectee with | HashBrace _0 -> _0
let (uu___is_Infix : imp -> Prims.bool) =
  fun projectee -> match projectee with | Infix -> true | uu___ -> false
let (uu___is_Nothing : imp -> Prims.bool) =
  fun projectee -> match projectee with | Nothing -> true | uu___ -> false
type match_returns_annotation =
  (FStar_Ident.ident FStar_Pervasives_Native.option * term * Prims.bool)
type patterns = (FStar_Ident.ident Prims.list * term Prims.list Prims.list)
type attributes_ = term Prims.list
type branch = (pattern * term FStar_Pervasives_Native.option * term)
type aqual = arg_qualifier FStar_Pervasives_Native.option
type knd = term
type typ = term
type expr = term
type tycon_record =
  (FStar_Ident.ident * aqual * attributes_ * term) Prims.list
type constructor_payload =
  | VpOfNotation of typ 
  | VpArbitrary of typ 
  | VpRecord of (tycon_record * typ FStar_Pervasives_Native.option) 
let (uu___is_VpOfNotation : constructor_payload -> Prims.bool) =
  fun projectee ->
    match projectee with | VpOfNotation _0 -> true | uu___ -> false
let (__proj__VpOfNotation__item___0 : constructor_payload -> typ) =
  fun projectee -> match projectee with | VpOfNotation _0 -> _0
let (uu___is_VpArbitrary : constructor_payload -> Prims.bool) =
  fun projectee ->
    match projectee with | VpArbitrary _0 -> true | uu___ -> false
let (__proj__VpArbitrary__item___0 : constructor_payload -> typ) =
  fun projectee -> match projectee with | VpArbitrary _0 -> _0
let (uu___is_VpRecord : constructor_payload -> Prims.bool) =
  fun projectee ->
    match projectee with | VpRecord _0 -> true | uu___ -> false
let (__proj__VpRecord__item___0 :
  constructor_payload -> (tycon_record * typ FStar_Pervasives_Native.option))
  = fun projectee -> match projectee with | VpRecord _0 -> _0
type tycon =
  | TyconAbstract of (FStar_Ident.ident * binder Prims.list * knd
  FStar_Pervasives_Native.option) 
  | TyconAbbrev of (FStar_Ident.ident * binder Prims.list * knd
  FStar_Pervasives_Native.option * term) 
  | TyconRecord of (FStar_Ident.ident * binder Prims.list * knd
  FStar_Pervasives_Native.option * attributes_ * tycon_record) 
  | TyconVariant of (FStar_Ident.ident * binder Prims.list * knd
  FStar_Pervasives_Native.option * (FStar_Ident.ident * constructor_payload
  FStar_Pervasives_Native.option * attributes_) Prims.list) 
let (uu___is_TyconAbstract : tycon -> Prims.bool) =
  fun projectee ->
    match projectee with | TyconAbstract _0 -> true | uu___ -> false
let (__proj__TyconAbstract__item___0 :
  tycon ->
    (FStar_Ident.ident * binder Prims.list * knd
      FStar_Pervasives_Native.option))
  = fun projectee -> match projectee with | TyconAbstract _0 -> _0
let (uu___is_TyconAbbrev : tycon -> Prims.bool) =
  fun projectee ->
    match projectee with | TyconAbbrev _0 -> true | uu___ -> false
let (__proj__TyconAbbrev__item___0 :
  tycon ->
    (FStar_Ident.ident * binder Prims.list * knd
      FStar_Pervasives_Native.option * term))
  = fun projectee -> match projectee with | TyconAbbrev _0 -> _0
let (uu___is_TyconRecord : tycon -> Prims.bool) =
  fun projectee ->
    match projectee with | TyconRecord _0 -> true | uu___ -> false
let (__proj__TyconRecord__item___0 :
  tycon ->
    (FStar_Ident.ident * binder Prims.list * knd
      FStar_Pervasives_Native.option * attributes_ * tycon_record))
  = fun projectee -> match projectee with | TyconRecord _0 -> _0
let (uu___is_TyconVariant : tycon -> Prims.bool) =
  fun projectee ->
    match projectee with | TyconVariant _0 -> true | uu___ -> false
let (__proj__TyconVariant__item___0 :
  tycon ->
    (FStar_Ident.ident * binder Prims.list * knd
      FStar_Pervasives_Native.option * (FStar_Ident.ident *
      constructor_payload FStar_Pervasives_Native.option * attributes_)
      Prims.list))
  = fun projectee -> match projectee with | TyconVariant _0 -> _0
type qualifier =
  | Private 
  | Noeq 
  | Unopteq 
  | Assumption 
  | DefaultEffect 
  | TotalEffect 
  | Effect_qual 
  | New 
  | Inline 
  | Visible 
  | Unfold_for_unification_and_vcgen 
  | Inline_for_extraction 
  | Irreducible 
  | NoExtract 
  | Reifiable 
  | Reflectable 
  | Opaque 
  | Logic 
let (uu___is_Private : qualifier -> Prims.bool) =
  fun projectee -> match projectee with | Private -> true | uu___ -> false
let (uu___is_Noeq : qualifier -> Prims.bool) =
  fun projectee -> match projectee with | Noeq -> true | uu___ -> false
let (uu___is_Unopteq : qualifier -> Prims.bool) =
  fun projectee -> match projectee with | Unopteq -> true | uu___ -> false
let (uu___is_Assumption : qualifier -> Prims.bool) =
  fun projectee -> match projectee with | Assumption -> true | uu___ -> false
let (uu___is_DefaultEffect : qualifier -> Prims.bool) =
  fun projectee ->
    match projectee with | DefaultEffect -> true | uu___ -> false
let (uu___is_TotalEffect : qualifier -> Prims.bool) =
  fun projectee ->
    match projectee with | TotalEffect -> true | uu___ -> false
let (uu___is_Effect_qual : qualifier -> Prims.bool) =
  fun projectee ->
    match projectee with | Effect_qual -> true | uu___ -> false
let (uu___is_New : qualifier -> Prims.bool) =
  fun projectee -> match projectee with | New -> true | uu___ -> false
let (uu___is_Inline : qualifier -> Prims.bool) =
  fun projectee -> match projectee with | Inline -> true | uu___ -> false
let (uu___is_Visible : qualifier -> Prims.bool) =
  fun projectee -> match projectee with | Visible -> true | uu___ -> false
let (uu___is_Unfold_for_unification_and_vcgen : qualifier -> Prims.bool) =
  fun projectee ->
    match projectee with
    | Unfold_for_unification_and_vcgen -> true
    | uu___ -> false
let (uu___is_Inline_for_extraction : qualifier -> Prims.bool) =
  fun projectee ->
    match projectee with | Inline_for_extraction -> true | uu___ -> false
let (uu___is_Irreducible : qualifier -> Prims.bool) =
  fun projectee ->
    match projectee with | Irreducible -> true | uu___ -> false
let (uu___is_NoExtract : qualifier -> Prims.bool) =
  fun projectee -> match projectee with | NoExtract -> true | uu___ -> false
let (uu___is_Reifiable : qualifier -> Prims.bool) =
  fun projectee -> match projectee with | Reifiable -> true | uu___ -> false
let (uu___is_Reflectable : qualifier -> Prims.bool) =
  fun projectee ->
    match projectee with | Reflectable -> true | uu___ -> false
let (uu___is_Opaque : qualifier -> Prims.bool) =
  fun projectee -> match projectee with | Opaque -> true | uu___ -> false
let (uu___is_Logic : qualifier -> Prims.bool) =
  fun projectee -> match projectee with | Logic -> true | uu___ -> false
type qualifiers = qualifier Prims.list
type decoration =
  | Qualifier of qualifier 
  | DeclAttributes of term Prims.list 
let (uu___is_Qualifier : decoration -> Prims.bool) =
  fun projectee ->
    match projectee with | Qualifier _0 -> true | uu___ -> false
let (__proj__Qualifier__item___0 : decoration -> qualifier) =
  fun projectee -> match projectee with | Qualifier _0 -> _0
let (uu___is_DeclAttributes : decoration -> Prims.bool) =
  fun projectee ->
    match projectee with | DeclAttributes _0 -> true | uu___ -> false
let (__proj__DeclAttributes__item___0 : decoration -> term Prims.list) =
  fun projectee -> match projectee with | DeclAttributes _0 -> _0
type lift_op =
  | NonReifiableLift of term 
  | ReifiableLift of (term * term) 
  | LiftForFree of term 
let (uu___is_NonReifiableLift : lift_op -> Prims.bool) =
  fun projectee ->
    match projectee with | NonReifiableLift _0 -> true | uu___ -> false
let (__proj__NonReifiableLift__item___0 : lift_op -> term) =
  fun projectee -> match projectee with | NonReifiableLift _0 -> _0
let (uu___is_ReifiableLift : lift_op -> Prims.bool) =
  fun projectee ->
    match projectee with | ReifiableLift _0 -> true | uu___ -> false
let (__proj__ReifiableLift__item___0 : lift_op -> (term * term)) =
  fun projectee -> match projectee with | ReifiableLift _0 -> _0
let (uu___is_LiftForFree : lift_op -> Prims.bool) =
  fun projectee ->
    match projectee with | LiftForFree _0 -> true | uu___ -> false
let (__proj__LiftForFree__item___0 : lift_op -> term) =
  fun projectee -> match projectee with | LiftForFree _0 -> _0
type lift =
  {
  msource: FStar_Ident.lid ;
  mdest: FStar_Ident.lid ;
  lift_op: lift_op ;
  braced: Prims.bool }
let (__proj__Mklift__item__msource : lift -> FStar_Ident.lid) =
  fun projectee ->
    match projectee with
    | { msource; mdest; lift_op = lift_op1; braced;_} -> msource
let (__proj__Mklift__item__mdest : lift -> FStar_Ident.lid) =
  fun projectee ->
    match projectee with
    | { msource; mdest; lift_op = lift_op1; braced;_} -> mdest
let (__proj__Mklift__item__lift_op : lift -> lift_op) =
  fun projectee ->
    match projectee with
    | { msource; mdest; lift_op = lift_op1; braced;_} -> lift_op1
let (__proj__Mklift__item__braced : lift -> Prims.bool) =
  fun projectee ->
    match projectee with
    | { msource; mdest; lift_op = lift_op1; braced;_} -> braced
type pragma =
  | SetOptions of Prims.string 
  | ResetOptions of Prims.string FStar_Pervasives_Native.option 
  | PushOptions of Prims.string FStar_Pervasives_Native.option 
  | PopOptions 
  | RestartSolver 
  | PrintEffectsGraph 
let (uu___is_SetOptions : pragma -> Prims.bool) =
  fun projectee ->
    match projectee with | SetOptions _0 -> true | uu___ -> false
let (__proj__SetOptions__item___0 : pragma -> Prims.string) =
  fun projectee -> match projectee with | SetOptions _0 -> _0
let (uu___is_ResetOptions : pragma -> Prims.bool) =
  fun projectee ->
    match projectee with | ResetOptions _0 -> true | uu___ -> false
let (__proj__ResetOptions__item___0 :
  pragma -> Prims.string FStar_Pervasives_Native.option) =
  fun projectee -> match projectee with | ResetOptions _0 -> _0
let (uu___is_PushOptions : pragma -> Prims.bool) =
  fun projectee ->
    match projectee with | PushOptions _0 -> true | uu___ -> false
let (__proj__PushOptions__item___0 :
  pragma -> Prims.string FStar_Pervasives_Native.option) =
  fun projectee -> match projectee with | PushOptions _0 -> _0
let (uu___is_PopOptions : pragma -> Prims.bool) =
  fun projectee -> match projectee with | PopOptions -> true | uu___ -> false
let (uu___is_RestartSolver : pragma -> Prims.bool) =
  fun projectee ->
    match projectee with | RestartSolver -> true | uu___ -> false
let (uu___is_PrintEffectsGraph : pragma -> Prims.bool) =
  fun projectee ->
    match projectee with | PrintEffectsGraph -> true | uu___ -> false
type decl' =
  | TopLevelModule of FStar_Ident.lid 
  | Open of FStar_Ident.lid 
  | Friend of FStar_Ident.lid 
  | Include of FStar_Ident.lid 
  | ModuleAbbrev of (FStar_Ident.ident * FStar_Ident.lid) 
  | TopLevelLet of (let_qualifier * (pattern * term) Prims.list) 
  | Tycon of (Prims.bool * Prims.bool * tycon Prims.list) 
  | Val of (FStar_Ident.ident * term) 
  | Exception of (FStar_Ident.ident * term FStar_Pervasives_Native.option) 
  | NewEffect of effect_decl 
  | LayeredEffect of effect_decl 
  | SubEffect of lift 
  | Polymonadic_bind of (FStar_Ident.lid * FStar_Ident.lid * FStar_Ident.lid
  * term) 
  | Polymonadic_subcomp of (FStar_Ident.lid * FStar_Ident.lid * term) 
  | Pragma of pragma 
  | Assume of (FStar_Ident.ident * term) 
  | Splice of (FStar_Ident.ident Prims.list * term) 
and decl =
  {
  d: decl' ;
  drange: FStar_Compiler_Range.range ;
  quals: qualifiers ;
  attrs: attributes_ }
and effect_decl =
  | DefineEffect of (FStar_Ident.ident * binder Prims.list * term * decl
  Prims.list) 
  | RedefineEffect of (FStar_Ident.ident * binder Prims.list * term) 
let (uu___is_TopLevelModule : decl' -> Prims.bool) =
  fun projectee ->
    match projectee with | TopLevelModule _0 -> true | uu___ -> false
let (__proj__TopLevelModule__item___0 : decl' -> FStar_Ident.lid) =
  fun projectee -> match projectee with | TopLevelModule _0 -> _0
let (uu___is_Open : decl' -> Prims.bool) =
  fun projectee -> match projectee with | Open _0 -> true | uu___ -> false
let (__proj__Open__item___0 : decl' -> FStar_Ident.lid) =
  fun projectee -> match projectee with | Open _0 -> _0
let (uu___is_Friend : decl' -> Prims.bool) =
  fun projectee -> match projectee with | Friend _0 -> true | uu___ -> false
let (__proj__Friend__item___0 : decl' -> FStar_Ident.lid) =
  fun projectee -> match projectee with | Friend _0 -> _0
let (uu___is_Include : decl' -> Prims.bool) =
  fun projectee -> match projectee with | Include _0 -> true | uu___ -> false
let (__proj__Include__item___0 : decl' -> FStar_Ident.lid) =
  fun projectee -> match projectee with | Include _0 -> _0
let (uu___is_ModuleAbbrev : decl' -> Prims.bool) =
  fun projectee ->
    match projectee with | ModuleAbbrev _0 -> true | uu___ -> false
let (__proj__ModuleAbbrev__item___0 :
  decl' -> (FStar_Ident.ident * FStar_Ident.lid)) =
  fun projectee -> match projectee with | ModuleAbbrev _0 -> _0
let (uu___is_TopLevelLet : decl' -> Prims.bool) =
  fun projectee ->
    match projectee with | TopLevelLet _0 -> true | uu___ -> false
let (__proj__TopLevelLet__item___0 :
  decl' -> (let_qualifier * (pattern * term) Prims.list)) =
  fun projectee -> match projectee with | TopLevelLet _0 -> _0
let (uu___is_Tycon : decl' -> Prims.bool) =
  fun projectee -> match projectee with | Tycon _0 -> true | uu___ -> false
let (__proj__Tycon__item___0 :
  decl' -> (Prims.bool * Prims.bool * tycon Prims.list)) =
  fun projectee -> match projectee with | Tycon _0 -> _0
let (uu___is_Val : decl' -> Prims.bool) =
  fun projectee -> match projectee with | Val _0 -> true | uu___ -> false
let (__proj__Val__item___0 : decl' -> (FStar_Ident.ident * term)) =
  fun projectee -> match projectee with | Val _0 -> _0
let (uu___is_Exception : decl' -> Prims.bool) =
  fun projectee ->
    match projectee with | Exception _0 -> true | uu___ -> false
let (__proj__Exception__item___0 :
  decl' -> (FStar_Ident.ident * term FStar_Pervasives_Native.option)) =
  fun projectee -> match projectee with | Exception _0 -> _0
let (uu___is_NewEffect : decl' -> Prims.bool) =
  fun projectee ->
    match projectee with | NewEffect _0 -> true | uu___ -> false
let (__proj__NewEffect__item___0 : decl' -> effect_decl) =
  fun projectee -> match projectee with | NewEffect _0 -> _0
let (uu___is_LayeredEffect : decl' -> Prims.bool) =
  fun projectee ->
    match projectee with | LayeredEffect _0 -> true | uu___ -> false
let (__proj__LayeredEffect__item___0 : decl' -> effect_decl) =
  fun projectee -> match projectee with | LayeredEffect _0 -> _0
let (uu___is_SubEffect : decl' -> Prims.bool) =
  fun projectee ->
    match projectee with | SubEffect _0 -> true | uu___ -> false
let (__proj__SubEffect__item___0 : decl' -> lift) =
  fun projectee -> match projectee with | SubEffect _0 -> _0
let (uu___is_Polymonadic_bind : decl' -> Prims.bool) =
  fun projectee ->
    match projectee with | Polymonadic_bind _0 -> true | uu___ -> false
let (__proj__Polymonadic_bind__item___0 :
  decl' -> (FStar_Ident.lid * FStar_Ident.lid * FStar_Ident.lid * term)) =
  fun projectee -> match projectee with | Polymonadic_bind _0 -> _0
let (uu___is_Polymonadic_subcomp : decl' -> Prims.bool) =
  fun projectee ->
    match projectee with | Polymonadic_subcomp _0 -> true | uu___ -> false
let (__proj__Polymonadic_subcomp__item___0 :
  decl' -> (FStar_Ident.lid * FStar_Ident.lid * term)) =
  fun projectee -> match projectee with | Polymonadic_subcomp _0 -> _0
let (uu___is_Pragma : decl' -> Prims.bool) =
  fun projectee -> match projectee with | Pragma _0 -> true | uu___ -> false
let (__proj__Pragma__item___0 : decl' -> pragma) =
  fun projectee -> match projectee with | Pragma _0 -> _0
let (uu___is_Assume : decl' -> Prims.bool) =
  fun projectee -> match projectee with | Assume _0 -> true | uu___ -> false
let (__proj__Assume__item___0 : decl' -> (FStar_Ident.ident * term)) =
  fun projectee -> match projectee with | Assume _0 -> _0
let (uu___is_Splice : decl' -> Prims.bool) =
  fun projectee -> match projectee with | Splice _0 -> true | uu___ -> false
let (__proj__Splice__item___0 :
  decl' -> (FStar_Ident.ident Prims.list * term)) =
  fun projectee -> match projectee with | Splice _0 -> _0
let (__proj__Mkdecl__item__d : decl -> decl') =
  fun projectee -> match projectee with | { d; drange; quals; attrs;_} -> d
let (__proj__Mkdecl__item__drange : decl -> FStar_Compiler_Range.range) =
  fun projectee ->
    match projectee with | { d; drange; quals; attrs;_} -> drange
let (__proj__Mkdecl__item__quals : decl -> qualifiers) =
  fun projectee ->
    match projectee with | { d; drange; quals; attrs;_} -> quals
let (__proj__Mkdecl__item__attrs : decl -> attributes_) =
  fun projectee ->
    match projectee with | { d; drange; quals; attrs;_} -> attrs
let (uu___is_DefineEffect : effect_decl -> Prims.bool) =
  fun projectee ->
    match projectee with | DefineEffect _0 -> true | uu___ -> false
let (__proj__DefineEffect__item___0 :
  effect_decl ->
    (FStar_Ident.ident * binder Prims.list * term * decl Prims.list))
  = fun projectee -> match projectee with | DefineEffect _0 -> _0
let (uu___is_RedefineEffect : effect_decl -> Prims.bool) =
  fun projectee ->
    match projectee with | RedefineEffect _0 -> true | uu___ -> false
let (__proj__RedefineEffect__item___0 :
  effect_decl -> (FStar_Ident.ident * binder Prims.list * term)) =
  fun projectee -> match projectee with | RedefineEffect _0 -> _0
type modul =
  | Module of (FStar_Ident.lid * decl Prims.list) 
  | Interface of (FStar_Ident.lid * decl Prims.list * Prims.bool) 
let (uu___is_Module : modul -> Prims.bool) =
  fun projectee -> match projectee with | Module _0 -> true | uu___ -> false
let (__proj__Module__item___0 : modul -> (FStar_Ident.lid * decl Prims.list))
  = fun projectee -> match projectee with | Module _0 -> _0
let (uu___is_Interface : modul -> Prims.bool) =
  fun projectee ->
    match projectee with | Interface _0 -> true | uu___ -> false
let (__proj__Interface__item___0 :
  modul -> (FStar_Ident.lid * decl Prims.list * Prims.bool)) =
  fun projectee -> match projectee with | Interface _0 -> _0
type file = modul
type inputFragment = (file, decl Prims.list) FStar_Pervasives.either
let (decl_drange : decl -> FStar_Compiler_Range.range) =
  fun decl1 -> decl1.drange
let (check_id : FStar_Ident.ident -> unit) =
  fun id ->
    let first_char =
      let uu___ = FStar_Ident.string_of_id id in
      FStar_String.substring uu___ Prims.int_zero Prims.int_one in
    if (FStar_String.lowercase first_char) = first_char
    then ()
    else
      (let uu___1 =
         let uu___2 =
           let uu___3 = FStar_Ident.string_of_id id in
           FStar_Compiler_Util.format1
             "Invalid identifer '%s'; expected a symbol that begins with a lower-case character"
             uu___3 in
         (FStar_Errors_Codes.Fatal_InvalidIdentifier, uu___2) in
       let uu___2 = FStar_Ident.range_of_id id in
       FStar_Errors.raise_error uu___1 uu___2)
let at_most_one :
  'uuuuu .
    Prims.string ->
      FStar_Compiler_Range.range ->
        'uuuuu Prims.list -> 'uuuuu FStar_Pervasives_Native.option
  =
  fun s ->
    fun r ->
      fun l ->
        match l with
        | x::[] -> FStar_Pervasives_Native.Some x
        | [] -> FStar_Pervasives_Native.None
        | uu___ ->
            let uu___1 =
              let uu___2 =
                FStar_Compiler_Util.format1
                  "At most one %s is allowed on declarations" s in
              (FStar_Errors_Codes.Fatal_MoreThanOneDeclaration, uu___2) in
            FStar_Errors.raise_error uu___1 r
let (mk_decl :
  decl' -> FStar_Compiler_Range.range -> decoration Prims.list -> decl) =
  fun d ->
    fun r ->
      fun decorations ->
        let attributes_1 =
          let uu___ =
            FStar_Compiler_List.choose
              (fun uu___1 ->
                 match uu___1 with
                 | DeclAttributes a -> FStar_Pervasives_Native.Some a
                 | uu___2 -> FStar_Pervasives_Native.None) decorations in
          at_most_one "attribute set" r uu___ in
        let attributes_2 = FStar_Compiler_Util.dflt [] attributes_1 in
        let qualifiers1 =
          FStar_Compiler_List.choose
            (fun uu___ ->
               match uu___ with
               | Qualifier q -> FStar_Pervasives_Native.Some q
               | uu___1 -> FStar_Pervasives_Native.None) decorations in
        { d; drange = r; quals = qualifiers1; attrs = attributes_2 }
let (mk_binder_with_attrs :
  binder' ->
    FStar_Compiler_Range.range ->
      level ->
        arg_qualifier FStar_Pervasives_Native.option ->
          term Prims.list -> binder)
  =
  fun b ->
    fun r ->
      fun l ->
        fun i ->
          fun attrs ->
            { b; brange = r; blevel = l; aqual = i; battributes = attrs }
let (mk_binder :
  binder' ->
    FStar_Compiler_Range.range ->
      level -> arg_qualifier FStar_Pervasives_Native.option -> binder)
  = fun b -> fun r -> fun l -> fun i -> mk_binder_with_attrs b r l i []
let (mk_term : term' -> FStar_Compiler_Range.range -> level -> term) =
  fun t -> fun r -> fun l -> { tm = t; range = r; level = l }
let (mk_uminus :
  term ->
    FStar_Compiler_Range.range -> FStar_Compiler_Range.range -> level -> term)
  =
  fun t ->
    fun rminus ->
      fun r ->
        fun l ->
          let t1 =
            match t.tm with
            | Const (FStar_Const.Const_int
                (s, FStar_Pervasives_Native.Some (FStar_Const.Signed, width)))
                ->
                Const
                  (FStar_Const.Const_int
                     ((Prims.op_Hat "-" s),
                       (FStar_Pervasives_Native.Some
                          (FStar_Const.Signed, width))))
            | uu___ ->
                let uu___1 =
                  let uu___2 = FStar_Ident.mk_ident ("-", rminus) in
                  (uu___2, [t]) in
                Op uu___1 in
          mk_term t1 r l
let (mk_pattern : pattern' -> FStar_Compiler_Range.range -> pattern) =
  fun p -> fun r -> { pat = p; prange = r }
let (un_curry_abs : pattern Prims.list -> term -> term') =
  fun ps ->
    fun body ->
      match body.tm with
      | Abs (p', body') -> Abs ((FStar_Compiler_List.op_At ps p'), body')
      | uu___ -> Abs (ps, body)
let (mk_function :
  (pattern * term FStar_Pervasives_Native.option * term) Prims.list ->
    FStar_Compiler_Range.range -> FStar_Compiler_Range.range -> term)
  =
  fun branches ->
    fun r1 ->
      fun r2 ->
        let x = FStar_Ident.gen r1 in
        let uu___ =
          let uu___1 =
            let uu___2 =
              let uu___3 =
                let uu___4 =
                  let uu___5 =
                    let uu___6 =
                      let uu___7 = FStar_Ident.lid_of_ids [x] in Var uu___7 in
                    mk_term uu___6 r1 Expr in
                  (uu___5, FStar_Pervasives_Native.None,
                    FStar_Pervasives_Native.None, branches) in
                Match uu___4 in
              mk_term uu___3 r2 Expr in
            ([mk_pattern (PatVar (x, FStar_Pervasives_Native.None, [])) r1],
              uu___2) in
          Abs uu___1 in
        mk_term uu___ r2 Expr
let (un_function :
  pattern -> term -> (pattern * term) FStar_Pervasives_Native.option) =
  fun p ->
    fun tm ->
      match ((p.pat), (tm.tm)) with
      | (PatVar uu___, Abs (pats, body)) ->
          FStar_Pervasives_Native.Some
            ((mk_pattern (PatApp (p, pats)) p.prange), body)
      | uu___ -> FStar_Pervasives_Native.None
let (lid_with_range :
  FStar_Ident.lident -> FStar_Compiler_Range.range -> FStar_Ident.lident) =
  fun lid ->
    fun r ->
      let uu___ = FStar_Ident.path_of_lid lid in
      FStar_Ident.lid_of_path uu___ r
let (consPat : FStar_Compiler_Range.range -> pattern -> pattern -> pattern')
  =
  fun r ->
    fun hd ->
      fun tl ->
        PatApp
          ((mk_pattern (PatName FStar_Parser_Const.cons_lid) r), [hd; tl])
let (consTerm : FStar_Compiler_Range.range -> term -> term -> term) =
  fun r ->
    fun hd ->
      fun tl ->
        mk_term
          (Construct
             (FStar_Parser_Const.cons_lid, [(hd, Nothing); (tl, Nothing)])) r
          Expr
let (mkConsList : FStar_Compiler_Range.range -> term Prims.list -> term) =
  fun r ->
    fun elts ->
      let nil = mk_term (Construct (FStar_Parser_Const.nil_lid, [])) r Expr in
      FStar_Compiler_List.fold_right (fun e -> fun tl -> consTerm r e tl)
        elts nil
let (unit_const : FStar_Compiler_Range.range -> term) =
  fun r -> mk_term (Const FStar_Const.Const_unit) r Expr
let (ml_comp : term -> term) =
  fun t ->
    let lid = FStar_Parser_Const.effect_ML_lid () in
    let ml = mk_term (Name lid) t.range Expr in
    let t1 = mk_term (App (ml, t, Nothing)) t.range Expr in t1
let (tot_comp : term -> term) =
  fun t ->
    let ml = mk_term (Name FStar_Parser_Const.effect_Tot_lid) t.range Expr in
    let t1 = mk_term (App (ml, t, Nothing)) t.range Expr in t1
let (mkApp :
  term -> (term * imp) Prims.list -> FStar_Compiler_Range.range -> term) =
  fun t ->
    fun args ->
      fun r ->
        match args with
        | [] -> t
        | uu___ ->
            (match t.tm with
             | Name s -> mk_term (Construct (s, args)) r Un
             | uu___1 ->
                 FStar_Compiler_List.fold_left
                   (fun t1 ->
                      fun uu___2 ->
                        match uu___2 with
                        | (a, imp1) -> mk_term (App (t1, a, imp1)) r Un) t
                   args)
let (mkRefSet : FStar_Compiler_Range.range -> term Prims.list -> term) =
  fun r ->
    fun elts ->
      let uu___ =
        (FStar_Parser_Const.set_empty, FStar_Parser_Const.set_singleton,
          FStar_Parser_Const.set_union, FStar_Parser_Const.heap_addr_of_lid) in
      match uu___ with
      | (empty_lid, singleton_lid, union_lid, addr_of_lid) ->
          let empty =
            let uu___1 =
              let uu___2 = FStar_Ident.set_lid_range empty_lid r in
              Var uu___2 in
            mk_term uu___1 r Expr in
          let addr_of =
            let uu___1 =
              let uu___2 = FStar_Ident.set_lid_range addr_of_lid r in
              Var uu___2 in
            mk_term uu___1 r Expr in
          let singleton =
            let uu___1 =
              let uu___2 = FStar_Ident.set_lid_range singleton_lid r in
              Var uu___2 in
            mk_term uu___1 r Expr in
          let union =
            let uu___1 =
              let uu___2 = FStar_Ident.set_lid_range union_lid r in
              Var uu___2 in
            mk_term uu___1 r Expr in
          FStar_Compiler_List.fold_right
            (fun e ->
               fun tl ->
                 let e1 = mkApp addr_of [(e, Nothing)] r in
                 let single_e = mkApp singleton [(e1, Nothing)] r in
                 mkApp union [(single_e, Nothing); (tl, Nothing)] r) elts
            empty
let (mkExplicitApp :
  term -> term Prims.list -> FStar_Compiler_Range.range -> term) =
  fun t ->
    fun args ->
      fun r ->
        match args with
        | [] -> t
        | uu___ ->
            (match t.tm with
             | Name s ->
                 let uu___1 =
                   let uu___2 =
                     let uu___3 =
                       FStar_Compiler_List.map (fun a -> (a, Nothing)) args in
                     (s, uu___3) in
                   Construct uu___2 in
                 mk_term uu___1 r Un
             | uu___1 ->
                 FStar_Compiler_List.fold_left
                   (fun t1 -> fun a -> mk_term (App (t1, a, Nothing)) r Un) t
                   args)
let (mkAdmitMagic : FStar_Compiler_Range.range -> term) =
  fun r ->
    let admit =
      let admit_name =
        let uu___ =
          let uu___1 =
            FStar_Ident.set_lid_range FStar_Parser_Const.admit_lid r in
          Var uu___1 in
        mk_term uu___ r Expr in
      mkExplicitApp admit_name [unit_const r] r in
    let magic =
      let magic_name =
        let uu___ =
          let uu___1 =
            FStar_Ident.set_lid_range FStar_Parser_Const.magic_lid r in
          Var uu___1 in
        mk_term uu___ r Expr in
      mkExplicitApp magic_name [unit_const r] r in
    let admit_magic = mk_term (Seq (admit, magic)) r Expr in admit_magic
let mkWildAdmitMagic :
  'uuuuu .
    FStar_Compiler_Range.range ->
      (pattern * 'uuuuu FStar_Pervasives_Native.option * term)
  =
  fun r ->
    let uu___ = mkAdmitMagic r in
    ((mk_pattern (PatWild (FStar_Pervasives_Native.None, [])) r),
      FStar_Pervasives_Native.None, uu___)
let focusBranches :
  'uuuuu .
    (Prims.bool * (pattern * 'uuuuu FStar_Pervasives_Native.option * term))
      Prims.list ->
      FStar_Compiler_Range.range ->
        (pattern * 'uuuuu FStar_Pervasives_Native.option * term) Prims.list
  =
  fun branches ->
    fun r ->
      let should_filter =
        FStar_Compiler_Util.for_some FStar_Pervasives_Native.fst branches in
      if should_filter
      then
        (FStar_Errors.log_issue r
           (FStar_Errors_Codes.Warning_Filtered,
             "Focusing on only some cases");
         (let focussed =
            let uu___1 =
              FStar_Compiler_List.filter FStar_Pervasives_Native.fst branches in
            FStar_Compiler_Effect.op_Bar_Greater uu___1
              (FStar_Compiler_List.map FStar_Pervasives_Native.snd) in
          let uu___1 = let uu___2 = mkWildAdmitMagic r in [uu___2] in
          FStar_Compiler_List.op_At focussed uu___1))
      else
        FStar_Compiler_Effect.op_Bar_Greater branches
          (FStar_Compiler_List.map FStar_Pervasives_Native.snd)
let focusLetBindings :
  'uuuuu .
    (Prims.bool * ('uuuuu * term)) Prims.list ->
      FStar_Compiler_Range.range -> ('uuuuu * term) Prims.list
  =
  fun lbs ->
    fun r ->
      let should_filter =
        FStar_Compiler_Util.for_some FStar_Pervasives_Native.fst lbs in
      if should_filter
      then
        (FStar_Errors.log_issue r
           (FStar_Errors_Codes.Warning_Filtered,
             "Focusing on only some cases in this (mutually) recursive definition");
         FStar_Compiler_List.map
           (fun uu___1 ->
              match uu___1 with
              | (f, lb) ->
                  if f
                  then lb
                  else
                    (let uu___3 = mkAdmitMagic r in
                     ((FStar_Pervasives_Native.fst lb), uu___3))) lbs)
      else
        FStar_Compiler_Effect.op_Bar_Greater lbs
          (FStar_Compiler_List.map FStar_Pervasives_Native.snd)
let focusAttrLetBindings :
  'uuuuu 'uuuuu1 .
    ('uuuuu * (Prims.bool * ('uuuuu1 * term))) Prims.list ->
      FStar_Compiler_Range.range -> ('uuuuu * ('uuuuu1 * term)) Prims.list
  =
  fun lbs ->
    fun r ->
      let should_filter =
        FStar_Compiler_Util.for_some
          (fun uu___ -> match uu___ with | (attr, (focus, uu___1)) -> focus)
          lbs in
      if should_filter
      then
        (FStar_Errors.log_issue r
           (FStar_Errors_Codes.Warning_Filtered,
             "Focusing on only some cases in this (mutually) recursive definition");
         FStar_Compiler_List.map
           (fun uu___1 ->
              match uu___1 with
              | (attr, (f, lb)) ->
                  if f
                  then (attr, lb)
                  else
                    (let uu___3 =
                       let uu___4 = mkAdmitMagic r in
                       ((FStar_Pervasives_Native.fst lb), uu___4) in
                     (attr, uu___3))) lbs)
      else
        FStar_Compiler_Effect.op_Bar_Greater lbs
          (FStar_Compiler_List.map
             (fun uu___1 ->
                match uu___1 with | (attr, (uu___2, lb)) -> (attr, lb)))
let (mkFsTypApp :
  term -> term Prims.list -> FStar_Compiler_Range.range -> term) =
  fun t ->
    fun args ->
      fun r ->
        let uu___ = FStar_Compiler_List.map (fun a -> (a, FsTypApp)) args in
        mkApp t uu___ r
let (mkTuple : term Prims.list -> FStar_Compiler_Range.range -> term) =
  fun args ->
    fun r ->
      let cons =
        FStar_Parser_Const.mk_tuple_data_lid
          (FStar_Compiler_List.length args) r in
      let uu___ = FStar_Compiler_List.map (fun x -> (x, Nothing)) args in
      mkApp (mk_term (Name cons) r Expr) uu___ r
let (mkDTuple : term Prims.list -> FStar_Compiler_Range.range -> term) =
  fun args ->
    fun r ->
      let cons =
        FStar_Parser_Const.mk_dtuple_data_lid
          (FStar_Compiler_List.length args) r in
      let uu___ = FStar_Compiler_List.map (fun x -> (x, Nothing)) args in
      mkApp (mk_term (Name cons) r Expr) uu___ r
let (mkRefinedBinder :
  FStar_Ident.ident ->
    term ->
      Prims.bool ->
        term FStar_Pervasives_Native.option ->
          FStar_Compiler_Range.range ->
            arg_qualifier FStar_Pervasives_Native.option ->
              term Prims.list -> binder)
  =
  fun id ->
    fun t ->
      fun should_bind_var ->
        fun refopt ->
          fun m ->
            fun implicit ->
              fun attrs ->
                let b =
                  mk_binder_with_attrs (Annotated (id, t)) m Type_level
                    implicit attrs in
                match refopt with
                | FStar_Pervasives_Native.None -> b
                | FStar_Pervasives_Native.Some phi ->
                    if should_bind_var
                    then
                      mk_binder_with_attrs
                        (Annotated
                           (id, (mk_term (Refine (b, phi)) m Type_level))) m
                        Type_level implicit attrs
                    else
                      (let x = FStar_Ident.gen t.range in
                       let b1 =
                         mk_binder_with_attrs (Annotated (x, t)) m Type_level
                           implicit attrs in
                       mk_binder_with_attrs
                         (Annotated
                            (id, (mk_term (Refine (b1, phi)) m Type_level)))
                         m Type_level implicit attrs)
let (mkRefinedPattern :
  pattern ->
    term ->
      Prims.bool ->
        term FStar_Pervasives_Native.option ->
          FStar_Compiler_Range.range -> FStar_Compiler_Range.range -> pattern)
  =
  fun pat ->
    fun t ->
      fun should_bind_pat ->
        fun phi_opt ->
          fun t_range ->
            fun range ->
              let t1 =
                match phi_opt with
                | FStar_Pervasives_Native.None -> t
                | FStar_Pervasives_Native.Some phi ->
                    if should_bind_pat
                    then
                      (match pat.pat with
                       | PatVar (x, uu___, attrs) ->
                           mk_term
                             (Refine
                                ((mk_binder_with_attrs (Annotated (x, t))
                                    t_range Type_level
                                    FStar_Pervasives_Native.None attrs), phi))
                             range Type_level
                       | uu___ ->
                           let x = FStar_Ident.gen t_range in
                           let phi1 =
                             let x_var =
                               let uu___1 =
                                 let uu___2 = FStar_Ident.lid_of_ids [x] in
                                 Var uu___2 in
                               mk_term uu___1 phi.range Formula in
                             let pat_branch =
                               (pat, FStar_Pervasives_Native.None, phi) in
                             let otherwise_branch =
                               let uu___1 =
                                 let uu___2 =
                                   let uu___3 =
                                     FStar_Ident.lid_of_path ["False"]
                                       phi.range in
                                   Name uu___3 in
                                 mk_term uu___2 phi.range Formula in
                               ((mk_pattern
                                   (PatWild
                                      (FStar_Pervasives_Native.None, []))
                                   phi.range), FStar_Pervasives_Native.None,
                                 uu___1) in
                             mk_term
                               (Match
                                  (x_var, FStar_Pervasives_Native.None,
                                    FStar_Pervasives_Native.None,
                                    [pat_branch; otherwise_branch]))
                               phi.range Formula in
                           mk_term
                             (Refine
                                ((mk_binder (Annotated (x, t)) t_range
                                    Type_level FStar_Pervasives_Native.None),
                                  phi1)) range Type_level)
                    else
                      (let x = FStar_Ident.gen t.range in
                       mk_term
                         (Refine
                            ((mk_binder (Annotated (x, t)) t_range Type_level
                                FStar_Pervasives_Native.None), phi)) range
                         Type_level) in
              mk_pattern
                (PatAscribed (pat, (t1, FStar_Pervasives_Native.None))) range
let rec (extract_named_refinement :
  term ->
    (FStar_Ident.ident * term * term FStar_Pervasives_Native.option)
      FStar_Pervasives_Native.option)
  =
  fun t1 ->
    match t1.tm with
    | NamedTyp (x, t) ->
        FStar_Pervasives_Native.Some (x, t, FStar_Pervasives_Native.None)
    | Refine
        ({ b = Annotated (x, t); brange = uu___; blevel = uu___1;
           aqual = uu___2; battributes = uu___3;_},
         t')
        ->
        FStar_Pervasives_Native.Some
          (x, t, (FStar_Pervasives_Native.Some t'))
    | Paren t -> extract_named_refinement t
    | uu___ -> FStar_Pervasives_Native.None
let rec (as_mlist :
  ((FStar_Ident.lid * decl) * decl Prims.list) -> decl Prims.list -> modul) =
  fun cur ->
    fun ds ->
      let uu___ = cur in
      match uu___ with
      | ((m_name, m_decl), cur1) ->
          (match ds with
           | [] ->
               Module (m_name, (m_decl :: (FStar_Compiler_List.rev cur1)))
           | d::ds1 ->
               (match d.d with
                | TopLevelModule m' ->
                    FStar_Errors.raise_error
                      (FStar_Errors_Codes.Fatal_UnexpectedModuleDeclaration,
                        "Unexpected module declaration") d.drange
                | uu___1 -> as_mlist ((m_name, m_decl), (d :: cur1)) ds1))
let (as_frag : decl Prims.list -> inputFragment) =
  fun ds ->
    let uu___ =
      match ds with
      | d::ds1 -> (d, ds1)
      | [] -> FStar_Compiler_Effect.raise FStar_Errors.Empty_frag in
    match uu___ with
    | (d, ds1) ->
        (match d.d with
         | TopLevelModule m ->
             let m1 = as_mlist ((m, d), []) ds1 in FStar_Pervasives.Inl m1
         | uu___1 ->
             let ds2 = d :: ds1 in
             (FStar_Compiler_List.iter
                (fun uu___3 ->
                   match uu___3 with
                   | { d = TopLevelModule uu___4; drange = r; quals = uu___5;
                       attrs = uu___6;_} ->
                       FStar_Errors.raise_error
                         (FStar_Errors_Codes.Fatal_UnexpectedModuleDeclaration,
                           "Unexpected module declaration") r
                   | uu___4 -> ()) ds2;
              FStar_Pervasives.Inr ds2))
let (strip_prefix :
  Prims.string -> Prims.string -> Prims.string FStar_Pervasives_Native.option)
  =
  fun prefix ->
    fun s ->
      if FStar_Compiler_Util.starts_with s prefix
      then
        let uu___ =
          FStar_Compiler_Util.substring_from s (FStar_String.length prefix) in
        FStar_Pervasives_Native.Some uu___
      else FStar_Pervasives_Native.None
let compile_op : 'uuuuu . Prims.int -> Prims.string -> 'uuuuu -> Prims.string
  =
  fun arity ->
    fun s ->
      fun r ->
        let name_of_char uu___ =
          match uu___ with
          | 38 -> "Amp"
          | 64 -> "At"
          | 43 -> "Plus"
          | 45 when arity = Prims.int_one -> "Minus"
          | 45 -> "Subtraction"
          | 126 -> "Tilde"
          | 47 -> "Slash"
          | 92 -> "Backslash"
          | 60 -> "Less"
          | 61 -> "Equals"
          | 62 -> "Greater"
          | 95 -> "Underscore"
          | 124 -> "Bar"
          | 33 -> "Bang"
          | 94 -> "Hat"
          | 37 -> "Percent"
          | 42 -> "Star"
          | 63 -> "Question"
          | 58 -> "Colon"
          | 36 -> "Dollar"
          | 46 -> "Dot"
          | c ->
              let uu___1 =
                FStar_Compiler_Util.string_of_int
                  (FStar_Compiler_Util.int_of_char c) in
              Prims.op_Hat "u" uu___1 in
        match s with
        | ".[]<-" -> "op_String_Assignment"
        | ".()<-" -> "op_Array_Assignment"
        | ".[||]<-" -> "op_Brack_Lens_Assignment"
        | ".(||)<-" -> "op_Lens_Assignment"
        | ".[]" -> "op_String_Access"
        | ".()" -> "op_Array_Access"
        | ".[||]" -> "op_Brack_Lens_Access"
        | ".(||)" -> "op_Lens_Access"
        | uu___ ->
            let uu___1 =
              if
                (FStar_Compiler_Util.starts_with s "let") ||
                  (FStar_Compiler_Util.starts_with s "and")
              then
                let uu___2 =
                  let uu___3 =
                    FStar_Compiler_Util.substring s Prims.int_zero
                      (Prims.of_int (3)) in
                  Prims.op_Hat uu___3 "_" in
                let uu___3 =
                  FStar_Compiler_Util.substring_from s (Prims.of_int (3)) in
                (uu___2, uu___3)
              else ("", s) in
            (match uu___1 with
             | (prefix, s1) ->
                 let uu___2 =
                   let uu___3 =
                     let uu___4 =
                       let uu___5 = FStar_String.list_of_string s1 in
                       FStar_Compiler_List.map name_of_char uu___5 in
                     FStar_String.concat "_" uu___4 in
                   Prims.op_Hat prefix uu___3 in
                 Prims.op_Hat "op_" uu___2)
let compile_op' : 'uuuuu . Prims.string -> 'uuuuu -> Prims.string =
  fun s -> fun r -> compile_op (~- Prims.int_one) s r
let (string_to_op :
  Prims.string ->
    (Prims.string * Prims.int FStar_Pervasives_Native.option)
      FStar_Pervasives_Native.option)
  =
  fun s ->
    let name_of_op uu___ =
      match uu___ with
      | "Amp" ->
          FStar_Pervasives_Native.Some ("&", FStar_Pervasives_Native.None)
      | "At" ->
          FStar_Pervasives_Native.Some ("@", FStar_Pervasives_Native.None)
      | "Plus" ->
          FStar_Pervasives_Native.Some ("+", FStar_Pervasives_Native.None)
      | "Minus" ->
          FStar_Pervasives_Native.Some ("-", FStar_Pervasives_Native.None)
      | "Subtraction" ->
          FStar_Pervasives_Native.Some
            ("-", (FStar_Pervasives_Native.Some (Prims.of_int (2))))
      | "Tilde" ->
          FStar_Pervasives_Native.Some ("~", FStar_Pervasives_Native.None)
      | "Slash" ->
          FStar_Pervasives_Native.Some ("/", FStar_Pervasives_Native.None)
      | "Backslash" ->
          FStar_Pervasives_Native.Some ("\\", FStar_Pervasives_Native.None)
      | "Less" ->
          FStar_Pervasives_Native.Some ("<", FStar_Pervasives_Native.None)
      | "Equals" ->
          FStar_Pervasives_Native.Some ("=", FStar_Pervasives_Native.None)
      | "Greater" ->
          FStar_Pervasives_Native.Some (">", FStar_Pervasives_Native.None)
      | "Underscore" ->
          FStar_Pervasives_Native.Some ("_", FStar_Pervasives_Native.None)
      | "Bar" ->
          FStar_Pervasives_Native.Some ("|", FStar_Pervasives_Native.None)
      | "Bang" ->
          FStar_Pervasives_Native.Some ("!", FStar_Pervasives_Native.None)
      | "Hat" ->
          FStar_Pervasives_Native.Some ("^", FStar_Pervasives_Native.None)
      | "Percent" ->
          FStar_Pervasives_Native.Some ("%", FStar_Pervasives_Native.None)
      | "Star" ->
          FStar_Pervasives_Native.Some ("*", FStar_Pervasives_Native.None)
      | "Question" ->
          FStar_Pervasives_Native.Some ("?", FStar_Pervasives_Native.None)
      | "Colon" ->
          FStar_Pervasives_Native.Some (":", FStar_Pervasives_Native.None)
      | "Dollar" ->
          FStar_Pervasives_Native.Some ("$", FStar_Pervasives_Native.None)
      | "Dot" ->
          FStar_Pervasives_Native.Some (".", FStar_Pervasives_Native.None)
      | "let" ->
          FStar_Pervasives_Native.Some (s, FStar_Pervasives_Native.None)
      | "and" ->
          FStar_Pervasives_Native.Some (s, FStar_Pervasives_Native.None)
      | uu___1 -> FStar_Pervasives_Native.None in
    match s with
    | "op_String_Assignment" ->
        FStar_Pervasives_Native.Some (".[]<-", FStar_Pervasives_Native.None)
    | "op_Array_Assignment" ->
        FStar_Pervasives_Native.Some (".()<-", FStar_Pervasives_Native.None)
    | "op_Brack_Lens_Assignment" ->
        FStar_Pervasives_Native.Some
          (".[||]<-", FStar_Pervasives_Native.None)
    | "op_Lens_Assignment" ->
        FStar_Pervasives_Native.Some
          (".(||)<-", FStar_Pervasives_Native.None)
    | "op_String_Access" ->
        FStar_Pervasives_Native.Some (".[]", FStar_Pervasives_Native.None)
    | "op_Array_Access" ->
        FStar_Pervasives_Native.Some (".()", FStar_Pervasives_Native.None)
    | "op_Brack_Lens_Access" ->
        FStar_Pervasives_Native.Some (".[||]", FStar_Pervasives_Native.None)
    | "op_Lens_Access" ->
        FStar_Pervasives_Native.Some (".(||)", FStar_Pervasives_Native.None)
    | uu___ ->
        if FStar_Compiler_Util.starts_with s "op_"
        then
          let s1 =
            let uu___1 =
              FStar_Compiler_Util.substring_from s
                (FStar_String.length "op_") in
            FStar_Compiler_Util.split uu___1 "_" in
          (match s1 with
           | op::[] ->
               if FStar_Compiler_Util.starts_with op "u"
               then
                 let uu___1 =
                   let uu___2 =
                     FStar_Compiler_Util.substring_from op Prims.int_one in
                   FStar_Compiler_Util.safe_int_of_string uu___2 in
                 FStar_Compiler_Util.map_opt uu___1
                   (fun op1 ->
                      ((FStar_Compiler_Util.string_of_char
                          (FStar_Compiler_Util.char_of_int op1)),
                        FStar_Pervasives_Native.None))
               else name_of_op op
           | uu___1 ->
               let maybeop =
                 let uu___2 = FStar_Compiler_List.map name_of_op s1 in
                 FStar_Compiler_List.fold_left
                   (fun acc ->
                      fun x ->
                        match acc with
                        | FStar_Pervasives_Native.None ->
                            FStar_Pervasives_Native.None
                        | FStar_Pervasives_Native.Some acc1 ->
                            (match x with
                             | FStar_Pervasives_Native.Some (op, uu___3) ->
                                 FStar_Pervasives_Native.Some
                                   (Prims.op_Hat acc1 op)
                             | FStar_Pervasives_Native.None ->
                                 FStar_Pervasives_Native.None))
                   (FStar_Pervasives_Native.Some "") uu___2 in
               FStar_Compiler_Util.map_opt maybeop
                 (fun o -> (o, FStar_Pervasives_Native.None)))
        else FStar_Pervasives_Native.None
let (string_of_fsdoc :
  (Prims.string * (Prims.string * Prims.string) Prims.list) -> Prims.string)
  =
  fun uu___ ->
    match uu___ with
    | (comment, keywords) ->
        let uu___1 =
          let uu___2 =
            FStar_Compiler_List.map
              (fun uu___3 ->
                 match uu___3 with
                 | (k, v) -> Prims.op_Hat k (Prims.op_Hat "->" v)) keywords in
          FStar_String.concat "," uu___2 in
        Prims.op_Hat comment uu___1
let (string_of_let_qualifier : let_qualifier -> Prims.string) =
  fun uu___ -> match uu___ with | NoLetQualifier -> "" | Rec -> "rec"
let to_string_l :
  'uuuuu .
    Prims.string ->
      ('uuuuu -> Prims.string) -> 'uuuuu Prims.list -> Prims.string
  =
  fun sep ->
    fun f ->
      fun l ->
        let uu___ = FStar_Compiler_List.map f l in
        FStar_String.concat sep uu___
let (imp_to_string : imp -> Prims.string) =
  fun uu___ -> match uu___ with | Hash -> "#" | uu___1 -> ""
let rec (term_to_string : term -> Prims.string) =
  fun x ->
    match x.tm with
    | Wild -> "_"
    | LexList l ->
        let uu___ =
          match l with
          | [] -> " "
          | hd::tl ->
              let uu___1 =
                let uu___2 = term_to_string hd in
                FStar_Compiler_List.fold_left
                  (fun s ->
                     fun t ->
                       let uu___3 =
                         let uu___4 = term_to_string t in
                         Prims.op_Hat "; " uu___4 in
                       Prims.op_Hat s uu___3) uu___2 in
              FStar_Compiler_Effect.op_Bar_Greater tl uu___1 in
        FStar_Compiler_Util.format1 "%[%s]" uu___
    | Decreases (t, uu___) ->
        let uu___1 = term_to_string t in
        FStar_Compiler_Util.format1 "(decreases %s)" uu___1
    | Requires (t, uu___) ->
        let uu___1 = term_to_string t in
        FStar_Compiler_Util.format1 "(requires %s)" uu___1
    | Ensures (t, uu___) ->
        let uu___1 = term_to_string t in
        FStar_Compiler_Util.format1 "(ensures %s)" uu___1
    | Labeled (t, l, uu___) ->
        let uu___1 = term_to_string t in
        FStar_Compiler_Util.format2 "(labeled %s %s)" l uu___1
    | Const c -> FStar_Parser_Const.const_to_string c
    | Op (s, xs) ->
        let uu___ = FStar_Ident.string_of_id s in
        let uu___1 =
          let uu___2 =
            FStar_Compiler_List.map
              (fun x1 ->
                 FStar_Compiler_Effect.op_Bar_Greater x1 term_to_string) xs in
          FStar_String.concat ", " uu___2 in
        FStar_Compiler_Util.format2 "%s(%s)" uu___ uu___1
    | Tvar id -> FStar_Ident.string_of_id id
    | Uvar id -> FStar_Ident.string_of_id id
    | Var l -> FStar_Ident.string_of_lid l
    | Name l -> FStar_Ident.string_of_lid l
    | Projector (rec_lid, field_id) ->
        let uu___ = FStar_Ident.string_of_lid rec_lid in
        let uu___1 = FStar_Ident.string_of_id field_id in
        FStar_Compiler_Util.format2 "%s?.%s" uu___ uu___1
    | Construct (l, args) ->
        let uu___ = FStar_Ident.string_of_lid l in
        let uu___1 =
          to_string_l " "
            (fun uu___2 ->
               match uu___2 with
               | (a, imp1) ->
                   let uu___3 = term_to_string a in
                   FStar_Compiler_Util.format2 "%s%s" (imp_to_string imp1)
                     uu___3) args in
        FStar_Compiler_Util.format2 "(%s %s)" uu___ uu___1
    | Abs (pats, t) ->
        let uu___ = to_string_l " " pat_to_string pats in
        let uu___1 = FStar_Compiler_Effect.op_Bar_Greater t term_to_string in
        FStar_Compiler_Util.format2 "(fun %s -> %s)" uu___ uu___1
    | App (t1, t2, imp1) ->
        let uu___ = FStar_Compiler_Effect.op_Bar_Greater t1 term_to_string in
        let uu___1 = FStar_Compiler_Effect.op_Bar_Greater t2 term_to_string in
        FStar_Compiler_Util.format3 "%s %s%s" uu___ (imp_to_string imp1)
          uu___1
    | Let (Rec, (a, (p, b))::lbs, body) ->
        let uu___ = attrs_opt_to_string a in
        let uu___1 =
          let uu___2 = FStar_Compiler_Effect.op_Bar_Greater p pat_to_string in
          let uu___3 = FStar_Compiler_Effect.op_Bar_Greater b term_to_string in
          FStar_Compiler_Util.format2 "%s=%s" uu___2 uu___3 in
        let uu___2 =
          to_string_l " "
            (fun uu___3 ->
               match uu___3 with
               | (a1, (p1, b1)) ->
                   let uu___4 = attrs_opt_to_string a1 in
                   let uu___5 =
                     FStar_Compiler_Effect.op_Bar_Greater p1 pat_to_string in
                   let uu___6 =
                     FStar_Compiler_Effect.op_Bar_Greater b1 term_to_string in
                   FStar_Compiler_Util.format3 "%sand %s=%s" uu___4 uu___5
                     uu___6) lbs in
        let uu___3 = FStar_Compiler_Effect.op_Bar_Greater body term_to_string in
        FStar_Compiler_Util.format4 "%slet rec %s%s in %s" uu___ uu___1
          uu___2 uu___3
    | Let (q, (attrs, (pat, tm))::[], body) ->
        let uu___ = attrs_opt_to_string attrs in
        let uu___1 = FStar_Compiler_Effect.op_Bar_Greater pat pat_to_string in
        let uu___2 = FStar_Compiler_Effect.op_Bar_Greater tm term_to_string in
        let uu___3 = FStar_Compiler_Effect.op_Bar_Greater body term_to_string in
        FStar_Compiler_Util.format5 "%slet %s %s = %s in %s" uu___
          (string_of_let_qualifier q) uu___1 uu___2 uu___3
    | Let (uu___, uu___1, uu___2) ->
        FStar_Errors.raise_error
          (FStar_Errors_Codes.Fatal_EmptySurfaceLet,
            "Internal error: found an invalid surface Let") x.range
    | LetOpen (lid, t) ->
        let uu___ = FStar_Ident.string_of_lid lid in
        let uu___1 = term_to_string t in
        FStar_Compiler_Util.format2 "let open %s in %s" uu___ uu___1
    | Seq (t1, t2) ->
        let uu___ = FStar_Compiler_Effect.op_Bar_Greater t1 term_to_string in
        let uu___1 = FStar_Compiler_Effect.op_Bar_Greater t2 term_to_string in
        FStar_Compiler_Util.format2 "%s; %s" uu___ uu___1
    | Bind (id, t1, t2) ->
        let uu___ = FStar_Ident.string_of_id id in
        let uu___1 = term_to_string t1 in
        let uu___2 = term_to_string t2 in
        FStar_Compiler_Util.format3 "%s <- %s; %s" uu___ uu___1 uu___2
    | If (t1, op_opt, ret_opt, t2, t3) ->
        let uu___ =
          match op_opt with
          | FStar_Pervasives_Native.Some op -> FStar_Ident.string_of_id op
          | FStar_Pervasives_Native.None -> "" in
        let uu___1 = FStar_Compiler_Effect.op_Bar_Greater t1 term_to_string in
        let uu___2 =
          match ret_opt with
          | FStar_Pervasives_Native.None -> ""
          | FStar_Pervasives_Native.Some (as_opt, ret, use_eq) ->
              let s = if use_eq then "returns$" else "returns" in
              let uu___3 =
                match as_opt with
                | FStar_Pervasives_Native.None -> ""
                | FStar_Pervasives_Native.Some as_ident ->
                    let uu___4 = FStar_Ident.string_of_id as_ident in
                    FStar_Compiler_Util.format1 " as %s " uu___4 in
              let uu___4 = term_to_string ret in
              FStar_Compiler_Util.format3 "%s%s %s " uu___3 s uu___4 in
        let uu___3 = FStar_Compiler_Effect.op_Bar_Greater t2 term_to_string in
        let uu___4 = FStar_Compiler_Effect.op_Bar_Greater t3 term_to_string in
        FStar_Compiler_Util.format5 "if%s %s %sthen %s else %s" uu___ uu___1
          uu___2 uu___3 uu___4
    | Match (t, op_opt, ret_opt, branches) ->
        try_or_match_to_string x t branches op_opt ret_opt
    | TryWith (t, branches) ->
        try_or_match_to_string x t branches FStar_Pervasives_Native.None
          FStar_Pervasives_Native.None
    | Ascribed (t1, t2, FStar_Pervasives_Native.None, flag) ->
        let s = if flag then "$:" else "<:" in
        let uu___ = FStar_Compiler_Effect.op_Bar_Greater t1 term_to_string in
        let uu___1 = FStar_Compiler_Effect.op_Bar_Greater t2 term_to_string in
        FStar_Compiler_Util.format3 "(%s %s %s)" uu___ s uu___1
    | Ascribed (t1, t2, FStar_Pervasives_Native.Some tac, flag) ->
        let s = if flag then "$:" else "<:" in
        let uu___ = FStar_Compiler_Effect.op_Bar_Greater t1 term_to_string in
        let uu___1 = FStar_Compiler_Effect.op_Bar_Greater t2 term_to_string in
        let uu___2 = FStar_Compiler_Effect.op_Bar_Greater tac term_to_string in
        FStar_Compiler_Util.format4 "(%s %s %s by %s)" uu___ s uu___1 uu___2
    | Record (FStar_Pervasives_Native.Some e, fields) ->
        let uu___ = FStar_Compiler_Effect.op_Bar_Greater e term_to_string in
        let uu___1 =
          to_string_l " "
            (fun uu___2 ->
               match uu___2 with
               | (l, e1) ->
                   let uu___3 = FStar_Ident.string_of_lid l in
                   let uu___4 =
                     FStar_Compiler_Effect.op_Bar_Greater e1 term_to_string in
                   FStar_Compiler_Util.format2 "%s=%s" uu___3 uu___4) fields in
        FStar_Compiler_Util.format2 "{%s with %s}" uu___ uu___1
    | Record (FStar_Pervasives_Native.None, fields) ->
        let uu___ =
          to_string_l " "
            (fun uu___1 ->
               match uu___1 with
               | (l, e) ->
                   let uu___2 = FStar_Ident.string_of_lid l in
                   let uu___3 =
                     FStar_Compiler_Effect.op_Bar_Greater e term_to_string in
                   FStar_Compiler_Util.format2 "%s=%s" uu___2 uu___3) fields in
        FStar_Compiler_Util.format1 "{%s}" uu___
    | Project (e, l) ->
        let uu___ = FStar_Compiler_Effect.op_Bar_Greater e term_to_string in
        let uu___1 = FStar_Ident.string_of_lid l in
        FStar_Compiler_Util.format2 "%s.%s" uu___ uu___1
    | Product ([], t) -> term_to_string t
    | Product (b::hd::tl, t) ->
        term_to_string
          (mk_term
             (Product
                ([b], (mk_term (Product ((hd :: tl), t)) x.range x.level)))
             x.range x.level)
    | Product (b::[], t) when x.level = Type_level ->
        let uu___ = FStar_Compiler_Effect.op_Bar_Greater b binder_to_string in
        let uu___1 = FStar_Compiler_Effect.op_Bar_Greater t term_to_string in
        FStar_Compiler_Util.format2 "%s -> %s" uu___ uu___1
    | Product (b::[], t) when x.level = Kind ->
        let uu___ = FStar_Compiler_Effect.op_Bar_Greater b binder_to_string in
        let uu___1 = FStar_Compiler_Effect.op_Bar_Greater t term_to_string in
        FStar_Compiler_Util.format2 "%s => %s" uu___ uu___1
    | Sum (binders, t) ->
        let uu___ =
          FStar_Compiler_Effect.op_Bar_Greater
            (FStar_Compiler_List.op_At binders [FStar_Pervasives.Inr t])
            (FStar_Compiler_List.map
               (fun uu___1 ->
                  match uu___1 with
                  | FStar_Pervasives.Inl b -> binder_to_string b
                  | FStar_Pervasives.Inr t1 -> term_to_string t1)) in
        FStar_Compiler_Effect.op_Bar_Greater uu___
          (FStar_String.concat " & ")
    | QForall (bs, (uu___, pats), t) ->
        let uu___1 = to_string_l " " binder_to_string bs in
        let uu___2 =
          to_string_l " \\/ " (to_string_l "; " term_to_string) pats in
        let uu___3 = FStar_Compiler_Effect.op_Bar_Greater t term_to_string in
        FStar_Compiler_Util.format3 "forall %s.{:pattern %s} %s" uu___1
          uu___2 uu___3
    | QExists (bs, (uu___, pats), t) ->
        let uu___1 = to_string_l " " binder_to_string bs in
        let uu___2 =
          to_string_l " \\/ " (to_string_l "; " term_to_string) pats in
        let uu___3 = FStar_Compiler_Effect.op_Bar_Greater t term_to_string in
        FStar_Compiler_Util.format3 "exists %s.{:pattern %s} %s" uu___1
          uu___2 uu___3
    | Refine (b, t) ->
        let uu___ = FStar_Compiler_Effect.op_Bar_Greater b binder_to_string in
        let uu___1 = FStar_Compiler_Effect.op_Bar_Greater t term_to_string in
        FStar_Compiler_Util.format2 "%s:{%s}" uu___ uu___1
    | NamedTyp (x1, t) ->
        let uu___ = FStar_Ident.string_of_id x1 in
        let uu___1 = FStar_Compiler_Effect.op_Bar_Greater t term_to_string in
        FStar_Compiler_Util.format2 "%s:%s" uu___ uu___1
    | Paren t ->
        let uu___ = FStar_Compiler_Effect.op_Bar_Greater t term_to_string in
        FStar_Compiler_Util.format1 "(%s)" uu___
    | Product (bs, t) ->
        let uu___ =
          let uu___1 =
            FStar_Compiler_Effect.op_Bar_Greater bs
              (FStar_Compiler_List.map binder_to_string) in
          FStar_Compiler_Effect.op_Bar_Greater uu___1
            (FStar_String.concat ",") in
        let uu___1 = FStar_Compiler_Effect.op_Bar_Greater t term_to_string in
        FStar_Compiler_Util.format2 "Unidentified product: [%s] %s" uu___
          uu___1
    | Discrim lid ->
        let uu___ = FStar_Ident.string_of_lid lid in
        FStar_Compiler_Util.format1 "%s?" uu___
    | Attributes ts ->
        let uu___ =
          let uu___1 = FStar_Compiler_List.map term_to_string ts in
          FStar_Compiler_Effect.op_Less_Bar (FStar_String.concat " ") uu___1 in
        FStar_Compiler_Util.format1 "(attributes %s)" uu___
    | Antiquote t ->
        let uu___ = term_to_string t in
        FStar_Compiler_Util.format1 "(`#%s)" uu___
    | Quote (t, Static) ->
        let uu___ = term_to_string t in
        FStar_Compiler_Util.format1 "(`(%s))" uu___
    | Quote (t, Dynamic) ->
        let uu___ = term_to_string t in
        FStar_Compiler_Util.format1 "quote (%s)" uu___
    | VQuote t ->
        let uu___ = term_to_string t in
        FStar_Compiler_Util.format1 "`%%%s" uu___
    | CalcProof (rel, init, steps) ->
        let uu___ = term_to_string rel in
        let uu___1 = term_to_string init in
        let uu___2 =
          let uu___3 = FStar_Compiler_List.map calc_step_to_string steps in
          FStar_Compiler_Effect.op_Less_Bar (FStar_String.concat " ") uu___3 in
        FStar_Compiler_Util.format3 "calc (%s) { %s %s }" uu___ uu___1 uu___2
    | ElimForall (bs, t, vs) ->
        let uu___ = binders_to_string " " bs in
        let uu___1 = term_to_string t in
        let uu___2 =
          let uu___3 = FStar_Compiler_List.map term_to_string vs in
          FStar_String.concat " " uu___3 in
        FStar_Compiler_Util.format3 "_elim_ forall %s. %s using %s" uu___
          uu___1 uu___2
    | ElimExists (bs, p, q, b, e) ->
        let uu___ = binders_to_string " " bs in
        let uu___1 = term_to_string p in
        let uu___2 = term_to_string q in
        let uu___3 = binder_to_string b in
        let uu___4 = term_to_string e in
        FStar_Compiler_Util.format5
          "_elim_ exists %s. %s _to_ %s\n\\with %s. %s" uu___ uu___1 uu___2
          uu___3 uu___4
    | ElimImplies (p, q, e) ->
        let uu___ = term_to_string p in
        let uu___1 = term_to_string q in
        let uu___2 = term_to_string e in
        FStar_Compiler_Util.format3 "_elim_ %s ==> %s with %s" uu___ uu___1
          uu___2
    | ElimOr (p, q, r, x1, e, y, e') ->
        let uu___ =
          let uu___1 = term_to_string p in
          let uu___2 =
            let uu___3 = term_to_string q in
            let uu___4 =
              let uu___5 = term_to_string r in
              let uu___6 =
                let uu___7 = binder_to_string x1 in
                let uu___8 =
                  let uu___9 = term_to_string e in
                  let uu___10 =
                    let uu___11 = binder_to_string y in
                    let uu___12 =
                      let uu___13 = term_to_string e' in [uu___13] in
                    uu___11 :: uu___12 in
                  uu___9 :: uu___10 in
                uu___7 :: uu___8 in
              uu___5 :: uu___6 in
            uu___3 :: uu___4 in
          uu___1 :: uu___2 in
        FStar_Compiler_Util.format
          "_elim_ %s \\/ %s _to_ %s\n\\with %s. %s\n\\and %s.%s" uu___
    | ElimAnd (p, q, r, x1, y, e) ->
        let uu___ =
          let uu___1 = term_to_string p in
          let uu___2 =
            let uu___3 = term_to_string q in
            let uu___4 =
              let uu___5 = term_to_string r in
              let uu___6 =
                let uu___7 = binder_to_string x1 in
                let uu___8 =
                  let uu___9 = binder_to_string y in
                  let uu___10 = let uu___11 = term_to_string e in [uu___11] in
                  uu___9 :: uu___10 in
                uu___7 :: uu___8 in
              uu___5 :: uu___6 in
            uu___3 :: uu___4 in
          uu___1 :: uu___2 in
        FStar_Compiler_Util.format
          "_elim_ %s /\\ %s _to_ %s\n\\with %s %s. %s" uu___
    | IntroForall (xs, p, e) ->
        let uu___ = binders_to_string " " xs in
        let uu___1 = term_to_string p in
        let uu___2 = term_to_string e in
        FStar_Compiler_Util.format3 "_intro_ forall %s. %s with %s" uu___
          uu___1 uu___2
    | IntroExists (xs, t, vs, e) ->
        let uu___ = binders_to_string " " xs in
        let uu___1 = term_to_string t in
        let uu___2 =
          let uu___3 = FStar_Compiler_List.map term_to_string vs in
          FStar_String.concat " " uu___3 in
        let uu___3 = term_to_string e in
        FStar_Compiler_Util.format4 "_intro_ exists %s. %s using %s with %s"
          uu___ uu___1 uu___2 uu___3
    | IntroImplies (p, q, x1, e) ->
        let uu___ = term_to_string p in
        let uu___1 = term_to_string q in
        let uu___2 = binder_to_string x1 in
        let uu___3 = term_to_string p in
        FStar_Compiler_Util.format4 "_intro_ %s ==> %s with %s. %s" uu___
          uu___1 uu___2 uu___3
    | IntroOr (b, p, q, r) ->
        let uu___ = term_to_string p in
        let uu___1 = term_to_string q in
        let uu___2 = term_to_string r in
        FStar_Compiler_Util.format4 "_intro_ %s \\/ %s using %s with %s"
          uu___ uu___1 (if b then "Left" else "Right") uu___2
    | IntroAnd (p, q, e1, e2) ->
        let uu___ = term_to_string p in
        let uu___1 = term_to_string q in
        let uu___2 = term_to_string e1 in
        let uu___3 = term_to_string e2 in
        FStar_Compiler_Util.format4 "_intro_ %s /\\ %s with %s and %s" uu___
          uu___1 uu___2 uu___3
and (binders_to_string : Prims.string -> binder Prims.list -> Prims.string) =
  fun sep ->
    fun bs ->
      let uu___ = FStar_Compiler_List.map binder_to_string bs in
      FStar_Compiler_Effect.op_Bar_Greater uu___ (FStar_String.concat sep)
and (try_or_match_to_string :
  term ->
    term ->
      (pattern * term FStar_Pervasives_Native.option * term) Prims.list ->
        FStar_Ident.ident FStar_Pervasives_Native.option ->
          (FStar_Ident.ident FStar_Pervasives_Native.option * term *
            Prims.bool) FStar_Pervasives_Native.option -> Prims.string)
  =
  fun x ->
    fun scrutinee ->
      fun branches ->
        fun op_opt ->
          fun ret_opt ->
            let s =
              match x.tm with
              | Match uu___ -> "match"
              | TryWith uu___ -> "try"
              | uu___ -> failwith "impossible" in
            let uu___ =
              match op_opt with
              | FStar_Pervasives_Native.Some op ->
                  FStar_Ident.string_of_id op
              | FStar_Pervasives_Native.None -> "" in
            let uu___1 =
              FStar_Compiler_Effect.op_Bar_Greater scrutinee term_to_string in
            let uu___2 =
              match ret_opt with
              | FStar_Pervasives_Native.None -> ""
              | FStar_Pervasives_Native.Some (as_opt, ret, use_eq) ->
                  let s1 = if use_eq then "returns$" else "returns" in
                  let uu___3 =
                    match as_opt with
                    | FStar_Pervasives_Native.None -> ""
                    | FStar_Pervasives_Native.Some as_ident ->
                        let uu___4 = FStar_Ident.string_of_id as_ident in
                        FStar_Compiler_Util.format1 "as %s " uu___4 in
                  let uu___4 = term_to_string ret in
                  FStar_Compiler_Util.format3 "%s%s %s " s1 uu___3 uu___4 in
            let uu___3 =
              to_string_l " | "
                (fun uu___4 ->
                   match uu___4 with
                   | (p, w, e) ->
                       let uu___5 =
                         FStar_Compiler_Effect.op_Bar_Greater p pat_to_string in
                       let uu___6 =
                         match w with
                         | FStar_Pervasives_Native.None -> ""
                         | FStar_Pervasives_Native.Some e1 ->
                             let uu___7 = term_to_string e1 in
                             FStar_Compiler_Util.format1 "when %s" uu___7 in
                       let uu___7 =
                         FStar_Compiler_Effect.op_Bar_Greater e
                           term_to_string in
                       FStar_Compiler_Util.format3 "%s %s -> %s" uu___5
                         uu___6 uu___7) branches in
            FStar_Compiler_Util.format5 "%s%s %s %swith %s" s uu___ uu___1
              uu___2 uu___3
and (calc_step_to_string : calc_step -> Prims.string) =
  fun uu___ ->
    match uu___ with
    | CalcStep (rel, just, next) ->
        let uu___1 = term_to_string rel in
        let uu___2 = term_to_string just in
        let uu___3 = term_to_string next in
        FStar_Compiler_Util.format3 "%s{ %s } %s" uu___1 uu___2 uu___3
and (binder_to_string : binder -> Prims.string) =
  fun x ->
    let pr x1 =
      let s =
        match x1.b with
        | Variable i -> FStar_Ident.string_of_id i
        | TVariable i ->
            let uu___ = FStar_Ident.string_of_id i in
            FStar_Compiler_Util.format1 "%s:_" uu___
        | TAnnotated (i, t) ->
            let uu___ = FStar_Ident.string_of_id i in
            let uu___1 =
              FStar_Compiler_Effect.op_Bar_Greater t term_to_string in
            FStar_Compiler_Util.format2 "%s:%s" uu___ uu___1
        | Annotated (i, t) ->
            let uu___ = FStar_Ident.string_of_id i in
            let uu___1 =
              FStar_Compiler_Effect.op_Bar_Greater t term_to_string in
            FStar_Compiler_Util.format2 "%s:%s" uu___ uu___1
        | NoName t -> FStar_Compiler_Effect.op_Bar_Greater t term_to_string in
      let uu___ = aqual_to_string x1.aqual in
      let uu___1 = attr_list_to_string x1.battributes in
      FStar_Compiler_Util.format3 "%s%s%s" uu___ uu___1 s in
    match x.aqual with
    | FStar_Pervasives_Native.Some (TypeClassArg) ->
        let uu___ = let uu___1 = pr x in Prims.op_Hat uu___1 " |}" in
        Prims.op_Hat "{| " uu___
    | uu___ -> pr x
and (aqual_to_string :
  arg_qualifier FStar_Pervasives_Native.option -> Prims.string) =
  fun uu___ ->
    match uu___ with
    | FStar_Pervasives_Native.Some (Equality) -> "$"
    | FStar_Pervasives_Native.Some (Implicit) -> "#"
    | FStar_Pervasives_Native.None -> ""
    | FStar_Pervasives_Native.Some (Meta uu___1) ->
        failwith "aqual_to_strings: meta arg qualifier?"
    | FStar_Pervasives_Native.Some (TypeClassArg) ->
        failwith "aqual_to_strings: meta arg qualifier?"
and (attr_list_to_string : term Prims.list -> Prims.string) =
  fun uu___ ->
    match uu___ with
    | [] -> ""
    | l -> attrs_opt_to_string (FStar_Pervasives_Native.Some l)
and (pat_to_string : pattern -> Prims.string) =
  fun x ->
    match x.pat with
    | PatWild (FStar_Pervasives_Native.None, attrs) ->
        let uu___ = attr_list_to_string attrs in Prims.op_Hat uu___ "_"
    | PatWild (uu___, attrs) ->
        let uu___1 =
          let uu___2 = attr_list_to_string attrs in Prims.op_Hat uu___2 "_" in
        Prims.op_Hat "#" uu___1
    | PatConst c -> FStar_Parser_Const.const_to_string c
    | PatVQuote t ->
        let uu___ = term_to_string t in
        FStar_Compiler_Util.format1 "`%%%s" uu___
    | PatApp (p, ps) ->
        let uu___ = FStar_Compiler_Effect.op_Bar_Greater p pat_to_string in
        let uu___1 = to_string_l " " pat_to_string ps in
        FStar_Compiler_Util.format2 "(%s %s)" uu___ uu___1
    | PatTvar (i, aq, attrs) ->
        let uu___ = aqual_to_string aq in
        let uu___1 = attr_list_to_string attrs in
        let uu___2 = FStar_Ident.string_of_id i in
        FStar_Compiler_Util.format3 "%s%s%s" uu___ uu___1 uu___2
    | PatVar (i, aq, attrs) ->
        let uu___ = aqual_to_string aq in
        let uu___1 = attr_list_to_string attrs in
        let uu___2 = FStar_Ident.string_of_id i in
        FStar_Compiler_Util.format3 "%s%s%s" uu___ uu___1 uu___2
    | PatName l -> FStar_Ident.string_of_lid l
    | PatList l ->
        let uu___ = to_string_l "; " pat_to_string l in
        FStar_Compiler_Util.format1 "[%s]" uu___
    | PatTuple (l, false) ->
        let uu___ = to_string_l ", " pat_to_string l in
        FStar_Compiler_Util.format1 "(%s)" uu___
    | PatTuple (l, true) ->
        let uu___ = to_string_l ", " pat_to_string l in
        FStar_Compiler_Util.format1 "(|%s|)" uu___
    | PatRecord l ->
        let uu___ =
          to_string_l "; "
            (fun uu___1 ->
               match uu___1 with
               | (f, e) ->
                   let uu___2 = FStar_Ident.string_of_lid f in
                   let uu___3 =
                     FStar_Compiler_Effect.op_Bar_Greater e pat_to_string in
                   FStar_Compiler_Util.format2 "%s=%s" uu___2 uu___3) l in
        FStar_Compiler_Util.format1 "{%s}" uu___
    | PatOr l -> to_string_l "|\n " pat_to_string l
    | PatOp op ->
        let uu___ = FStar_Ident.string_of_id op in
        FStar_Compiler_Util.format1 "(%s)" uu___
    | PatAscribed (p, (t, FStar_Pervasives_Native.None)) ->
        let uu___ = FStar_Compiler_Effect.op_Bar_Greater p pat_to_string in
        let uu___1 = FStar_Compiler_Effect.op_Bar_Greater t term_to_string in
        FStar_Compiler_Util.format2 "(%s:%s)" uu___ uu___1
    | PatAscribed (p, (t, FStar_Pervasives_Native.Some tac)) ->
        let uu___ = FStar_Compiler_Effect.op_Bar_Greater p pat_to_string in
        let uu___1 = FStar_Compiler_Effect.op_Bar_Greater t term_to_string in
        let uu___2 = FStar_Compiler_Effect.op_Bar_Greater tac term_to_string in
        FStar_Compiler_Util.format3 "(%s:%s by %s)" uu___ uu___1 uu___2
and (attrs_opt_to_string :
  term Prims.list FStar_Pervasives_Native.option -> Prims.string) =
  fun uu___ ->
    match uu___ with
    | FStar_Pervasives_Native.None -> ""
    | FStar_Pervasives_Native.Some attrs ->
        let uu___1 =
          let uu___2 = FStar_Compiler_List.map term_to_string attrs in
          FStar_Compiler_Effect.op_Bar_Greater uu___2
            (FStar_String.concat "; ") in
        FStar_Compiler_Util.format1 "[@ %s]" uu___1
let rec (head_id_of_pat : pattern -> FStar_Ident.lident Prims.list) =
  fun p ->
    match p.pat with
    | PatName l -> [l]
    | PatVar (i, uu___, uu___1) ->
        let uu___2 = FStar_Ident.lid_of_ids [i] in [uu___2]
    | PatApp (p1, uu___) -> head_id_of_pat p1
    | PatAscribed (p1, uu___) -> head_id_of_pat p1
    | uu___ -> []
let lids_of_let :
  'uuuuu . (pattern * 'uuuuu) Prims.list -> FStar_Ident.lident Prims.list =
  fun defs ->
    FStar_Compiler_Effect.op_Bar_Greater defs
      (FStar_Compiler_List.collect
         (fun uu___ -> match uu___ with | (p, uu___1) -> head_id_of_pat p))
let (id_of_tycon : tycon -> Prims.string) =
  fun uu___ ->
    match uu___ with
    | TyconAbstract (i, uu___1, uu___2) -> FStar_Ident.string_of_id i
    | TyconAbbrev (i, uu___1, uu___2, uu___3) -> FStar_Ident.string_of_id i
    | TyconRecord (i, uu___1, uu___2, uu___3, uu___4) ->
        FStar_Ident.string_of_id i
    | TyconVariant (i, uu___1, uu___2, uu___3) -> FStar_Ident.string_of_id i
let (string_of_pragma : pragma -> Prims.string) =
  fun uu___ ->
    match uu___ with
    | SetOptions s -> FStar_Compiler_Util.format1 "set-options \"%s\"" s
    | ResetOptions s ->
        FStar_Compiler_Util.format1 "reset-options \"%s\""
          (FStar_Compiler_Util.dflt "" s)
    | PushOptions s ->
        FStar_Compiler_Util.format1 "push-options \"%s\""
          (FStar_Compiler_Util.dflt "" s)
    | PopOptions -> "pop-options"
    | RestartSolver -> "restart-solver"
    | PrintEffectsGraph -> "print-effects-graph"
let (decl_to_string : decl -> Prims.string) =
  fun d ->
    match d.d with
    | TopLevelModule l ->
        let uu___ = FStar_Ident.string_of_lid l in
        Prims.op_Hat "module " uu___
    | Open l ->
        let uu___ = FStar_Ident.string_of_lid l in Prims.op_Hat "open " uu___
    | Friend l ->
        let uu___ = FStar_Ident.string_of_lid l in
        Prims.op_Hat "friend " uu___
    | Include l ->
        let uu___ = FStar_Ident.string_of_lid l in
        Prims.op_Hat "include " uu___
    | ModuleAbbrev (i, l) ->
        let uu___ = FStar_Ident.string_of_id i in
        let uu___1 = FStar_Ident.string_of_lid l in
        FStar_Compiler_Util.format2 "module %s = %s" uu___ uu___1
    | TopLevelLet (uu___, pats) ->
        let uu___1 =
          let uu___2 =
            let uu___3 = lids_of_let pats in
            FStar_Compiler_Effect.op_Bar_Greater uu___3
              (FStar_Compiler_List.map (fun l -> FStar_Ident.string_of_lid l)) in
          FStar_Compiler_Effect.op_Bar_Greater uu___2
            (FStar_String.concat ", ") in
        Prims.op_Hat "let " uu___1
    | Assume (i, uu___) ->
        let uu___1 = FStar_Ident.string_of_id i in
        Prims.op_Hat "assume " uu___1
    | Tycon (uu___, uu___1, tys) ->
        let uu___2 =
          let uu___3 =
            FStar_Compiler_Effect.op_Bar_Greater tys
              (FStar_Compiler_List.map id_of_tycon) in
          FStar_Compiler_Effect.op_Bar_Greater uu___3
            (FStar_String.concat ", ") in
        Prims.op_Hat "type " uu___2
    | Val (i, uu___) ->
        let uu___1 = FStar_Ident.string_of_id i in Prims.op_Hat "val " uu___1
    | Exception (i, uu___) ->
        let uu___1 = FStar_Ident.string_of_id i in
        Prims.op_Hat "exception " uu___1
    | NewEffect (DefineEffect (i, uu___, uu___1, uu___2)) ->
        let uu___3 = FStar_Ident.string_of_id i in
        Prims.op_Hat "new_effect " uu___3
    | NewEffect (RedefineEffect (i, uu___, uu___1)) ->
        let uu___2 = FStar_Ident.string_of_id i in
        Prims.op_Hat "new_effect " uu___2
    | LayeredEffect (DefineEffect (i, uu___, uu___1, uu___2)) ->
        let uu___3 = FStar_Ident.string_of_id i in
        Prims.op_Hat "layered_effect " uu___3
    | LayeredEffect (RedefineEffect (i, uu___, uu___1)) ->
        let uu___2 = FStar_Ident.string_of_id i in
        Prims.op_Hat "layered_effect " uu___2
    | Polymonadic_bind (l1, l2, l3, uu___) ->
        let uu___1 = FStar_Ident.string_of_lid l1 in
        let uu___2 = FStar_Ident.string_of_lid l2 in
        let uu___3 = FStar_Ident.string_of_lid l3 in
        FStar_Compiler_Util.format3 "polymonadic_bind (%s, %s) |> %s" uu___1
          uu___2 uu___3
    | Polymonadic_subcomp (l1, l2, uu___) ->
        let uu___1 = FStar_Ident.string_of_lid l1 in
        let uu___2 = FStar_Ident.string_of_lid l2 in
        FStar_Compiler_Util.format2 "polymonadic_subcomp %s <: %s" uu___1
          uu___2
    | Splice (ids, t) ->
        let uu___ =
          let uu___1 =
            let uu___2 =
              FStar_Compiler_List.map (fun i -> FStar_Ident.string_of_id i)
                ids in
            FStar_Compiler_Effect.op_Less_Bar (FStar_String.concat ";")
              uu___2 in
          let uu___2 =
            let uu___3 =
              let uu___4 = term_to_string t in Prims.op_Hat uu___4 ")" in
            Prims.op_Hat "] (" uu___3 in
          Prims.op_Hat uu___1 uu___2 in
        Prims.op_Hat "splice[" uu___
    | SubEffect uu___ -> "sub_effect"
    | Pragma p ->
        let uu___ = string_of_pragma p in Prims.op_Hat "pragma #" uu___
let (modul_to_string : modul -> Prims.string) =
  fun m ->
    match m with
    | Module (uu___, decls) ->
        let uu___1 =
          FStar_Compiler_Effect.op_Bar_Greater decls
            (FStar_Compiler_List.map decl_to_string) in
        FStar_Compiler_Effect.op_Bar_Greater uu___1
          (FStar_String.concat "\n")
    | Interface (uu___, decls, uu___1) ->
        let uu___2 =
          FStar_Compiler_Effect.op_Bar_Greater decls
            (FStar_Compiler_List.map decl_to_string) in
        FStar_Compiler_Effect.op_Bar_Greater uu___2
          (FStar_String.concat "\n")
let (decl_is_val : FStar_Ident.ident -> decl -> Prims.bool) =
  fun id ->
    fun decl1 ->
      match decl1.d with
      | Val (id', uu___) -> FStar_Ident.ident_equals id id'
      | uu___ -> false
let (thunk : term -> term) =
  fun ens ->
    let wildpat =
      mk_pattern (PatWild (FStar_Pervasives_Native.None, [])) ens.range in
    mk_term (Abs ([wildpat], ens)) ens.range Expr
let (ident_of_binder :
  FStar_Compiler_Range.range -> binder -> FStar_Ident.ident) =
  fun r ->
    fun b ->
      match b.b with
      | Variable i -> i
      | TVariable i -> i
      | Annotated (i, uu___) -> i
      | TAnnotated (i, uu___) -> i
      | NoName uu___ ->
          FStar_Errors.raise_error
            (FStar_Errors_Codes.Fatal_MissingQuantifierBinder,
              "Wildcard binders in quantifiers are not allowed") r
let (idents_of_binders :
  binder Prims.list ->
    FStar_Compiler_Range.range -> FStar_Ident.ident Prims.list)
  =
  fun bs ->
    fun r ->
      FStar_Compiler_Effect.op_Bar_Greater bs
        (FStar_Compiler_List.map (ident_of_binder r))
let (decl_syntax_is_delimited : decl -> Prims.bool) =
  fun d ->
    match d.d with
    | Pragma (ResetOptions (FStar_Pervasives_Native.None)) -> false
    | Pragma (PushOptions (FStar_Pervasives_Native.None)) -> false
    | Pragma uu___ -> true
    | NewEffect (DefineEffect uu___) -> true
    | LayeredEffect (DefineEffect uu___) -> true
    | SubEffect
        { msource = uu___; mdest = uu___1; lift_op = uu___2; braced = true;_}
        -> true
    | Tycon (uu___, b, uu___1) -> b
    | uu___ -> false

================================================
FILE: engine/backends/fstar/fstar-surface-ast/FStar_Parser_AST_Util.ml
================================================
open Prims
let (eq_ident : FStar_Ident.ident -> FStar_Ident.ident -> Prims.bool) =
  fun i1 -> fun i2 -> FStar_Ident.ident_equals i1 i2
let eq_list :
  'a .
    ('a -> 'a -> Prims.bool) -> 'a Prims.list -> 'a Prims.list -> Prims.bool
  =
  fun f ->
    fun t1 ->
      fun t2 ->
        ((FStar_Compiler_List.length t1) = (FStar_Compiler_List.length t2))
          && (FStar_Compiler_List.forall2 f t1 t2)
let eq_option :
  'a .
    ('a -> 'a -> Prims.bool) ->
      'a FStar_Pervasives_Native.option ->
        'a FStar_Pervasives_Native.option -> Prims.bool
  =
  fun f ->
    fun t1 ->
      fun t2 ->
        match (t1, t2) with
        | (FStar_Pervasives_Native.None, FStar_Pervasives_Native.None) ->
            true
        | (FStar_Pervasives_Native.Some t11, FStar_Pervasives_Native.Some
           t21) -> f t11 t21
        | uu___ -> false
let (eq_sconst : FStar_Const.sconst -> FStar_Const.sconst -> Prims.bool) =
  fun c1 ->
    fun c2 ->
      match (c1, c2) with
      | (FStar_Const.Const_effect, FStar_Const.Const_effect) -> true
      | (FStar_Const.Const_unit, FStar_Const.Const_unit) -> true
      | (FStar_Const.Const_bool b1, FStar_Const.Const_bool b2) -> b1 = b2
      | (FStar_Const.Const_int (s1, sw1), FStar_Const.Const_int (s2, sw2)) ->
          (s1 = s2) && (sw1 = sw2)
      | (FStar_Const.Const_char c11, FStar_Const.Const_char c21) -> c11 = c21
      | (FStar_Const.Const_string (s1, uu___), FStar_Const.Const_string
         (s2, uu___1)) -> s1 = s2
      | (FStar_Const.Const_real s1, FStar_Const.Const_real s2) -> s1 = s2
      | (FStar_Const.Const_range r1, FStar_Const.Const_range r2) -> r1 = r2
      | (FStar_Const.Const_reify uu___, FStar_Const.Const_reify uu___1) ->
          true
      | (FStar_Const.Const_reflect l1, FStar_Const.Const_reflect l2) ->
          FStar_Ident.lid_equals l1 l2
      | uu___ -> false
let rec (eq_term :
  FStar_Parser_AST.term -> FStar_Parser_AST.term -> Prims.bool) =
  fun t1 -> fun t2 -> eq_term' t1.FStar_Parser_AST.tm t2.FStar_Parser_AST.tm
and (eq_terms :
  FStar_Parser_AST.term Prims.list ->
    FStar_Parser_AST.term Prims.list -> Prims.bool)
  = fun t1 -> fun t2 -> eq_list eq_term t1 t2
and (eq_arg :
  (FStar_Parser_AST.term * FStar_Parser_AST.imp) ->
    (FStar_Parser_AST.term * FStar_Parser_AST.imp) -> Prims.bool)
  =
  fun t1 ->
    fun t2 ->
      let uu___ = t1 in
      match uu___ with
      | (t11, a1) ->
          let uu___1 = t2 in
          (match uu___1 with
           | (t21, a2) -> (eq_term t11 t21) && (eq_imp a1 a2))
and (eq_imp : FStar_Parser_AST.imp -> FStar_Parser_AST.imp -> Prims.bool) =
  fun i1 ->
    fun i2 ->
      match (i1, i2) with
      | (FStar_Parser_AST.FsTypApp, FStar_Parser_AST.FsTypApp) -> true
      | (FStar_Parser_AST.Hash, FStar_Parser_AST.Hash) -> true
      | (FStar_Parser_AST.UnivApp, FStar_Parser_AST.UnivApp) -> true
      | (FStar_Parser_AST.Infix, FStar_Parser_AST.Infix) -> true
      | (FStar_Parser_AST.Nothing, FStar_Parser_AST.Nothing) -> true
      | (FStar_Parser_AST.HashBrace t1, FStar_Parser_AST.HashBrace t2) ->
          eq_term t1 t2
      | uu___ -> false
and (eq_args :
  (FStar_Parser_AST.term * FStar_Parser_AST.imp) Prims.list ->
    (FStar_Parser_AST.term * FStar_Parser_AST.imp) Prims.list -> Prims.bool)
  = fun t1 -> fun t2 -> eq_list eq_arg t1 t2
and (eq_arg_qualifier :
  FStar_Parser_AST.arg_qualifier ->
    FStar_Parser_AST.arg_qualifier -> Prims.bool)
  =
  fun arg_qualifier1 ->
    fun arg_qualifier2 ->
      match (arg_qualifier1, arg_qualifier2) with
      | (FStar_Parser_AST.Implicit, FStar_Parser_AST.Implicit) -> true
      | (FStar_Parser_AST.Equality, FStar_Parser_AST.Equality) -> true
      | (FStar_Parser_AST.Meta t1, FStar_Parser_AST.Meta t2) -> eq_term t1 t2
      | (FStar_Parser_AST.TypeClassArg, FStar_Parser_AST.TypeClassArg) ->
          true
      | uu___ -> false
and (eq_pattern :
  FStar_Parser_AST.pattern -> FStar_Parser_AST.pattern -> Prims.bool) =
  fun p1 ->
    fun p2 -> eq_pattern' p1.FStar_Parser_AST.pat p2.FStar_Parser_AST.pat
and (eq_aqual :
  FStar_Parser_AST.arg_qualifier FStar_Pervasives_Native.option ->
    FStar_Parser_AST.arg_qualifier FStar_Pervasives_Native.option ->
      Prims.bool)
  = fun a1 -> fun a2 -> eq_option eq_arg_qualifier a1 a2
and (eq_pattern' :
  FStar_Parser_AST.pattern' -> FStar_Parser_AST.pattern' -> Prims.bool) =
  fun p1 ->
    fun p2 ->
      match (p1, p2) with
      | (FStar_Parser_AST.PatWild (q1, a1), FStar_Parser_AST.PatWild
         (q2, a2)) -> (eq_aqual q1 q2) && (eq_terms a1 a2)
      | (FStar_Parser_AST.PatConst s1, FStar_Parser_AST.PatConst s2) ->
          eq_sconst s1 s2
      | (FStar_Parser_AST.PatApp (p11, ps1), FStar_Parser_AST.PatApp
         (p21, ps2)) -> (eq_pattern p11 p21) && (eq_list eq_pattern ps1 ps2)
      | (FStar_Parser_AST.PatTvar (i1, aq1, as1), FStar_Parser_AST.PatTvar
         (i2, aq2, as2)) ->
          ((FStar_Ident.ident_equals i1 i2) && (eq_aqual aq1 aq2)) &&
            (eq_terms as1 as2)
      | (FStar_Parser_AST.PatVar (i1, aq1, as1), FStar_Parser_AST.PatVar
         (i2, aq2, as2)) ->
          ((FStar_Ident.ident_equals i1 i2) && (eq_aqual aq1 aq2)) &&
            (eq_terms as1 as2)
      | (FStar_Parser_AST.PatName l1, FStar_Parser_AST.PatName l2) ->
          FStar_Ident.lid_equals l1 l2
      | (FStar_Parser_AST.PatOr ps1, FStar_Parser_AST.PatOr ps2) ->
          eq_list eq_pattern ps1 ps2
      | (FStar_Parser_AST.PatList ps1, FStar_Parser_AST.PatList ps2) ->
          eq_list eq_pattern ps1 ps2
      | (FStar_Parser_AST.PatTuple (ps1, b1), FStar_Parser_AST.PatTuple
         (ps2, b2)) -> (eq_list eq_pattern ps1 ps2) && (b1 = b2)
      | (FStar_Parser_AST.PatRecord ps1, FStar_Parser_AST.PatRecord ps2) ->
          eq_list
            (fun uu___ ->
               fun uu___1 ->
                 match (uu___, uu___1) with
                 | ((l1, p11), (l2, p21)) ->
                     (FStar_Ident.lid_equals l1 l2) && (eq_pattern p11 p21))
            ps1 ps2
      | (FStar_Parser_AST.PatAscribed (p11, (t1, topt1)),
         FStar_Parser_AST.PatAscribed (p21, (t2, topt2))) ->
          ((eq_pattern p11 p21) && (eq_term t1 t2)) &&
            (eq_option eq_term topt1 topt2)
      | (FStar_Parser_AST.PatOp i1, FStar_Parser_AST.PatOp i2) ->
          eq_ident i1 i2
      | (FStar_Parser_AST.PatVQuote t1, FStar_Parser_AST.PatVQuote t2) ->
          eq_term t1 t2
      | uu___ -> false
and (eq_term' :
  FStar_Parser_AST.term' -> FStar_Parser_AST.term' -> Prims.bool) =
  fun t1 ->
    fun t2 ->
      match (t1, t2) with
      | (FStar_Parser_AST.Wild, FStar_Parser_AST.Wild) -> true
      | (FStar_Parser_AST.Const s1, FStar_Parser_AST.Const s2) ->
          FStar_Const.eq_const s1 s2
      | (FStar_Parser_AST.Op (i1, ts1), FStar_Parser_AST.Op (i2, ts2)) ->
          (eq_ident i1 i2) && (eq_terms ts1 ts2)
      | (FStar_Parser_AST.Tvar i1, FStar_Parser_AST.Tvar i2) ->
          eq_ident i1 i2
      | (FStar_Parser_AST.Uvar i1, FStar_Parser_AST.Uvar i2) ->
          eq_ident i1 i2
      | (FStar_Parser_AST.Var l1, FStar_Parser_AST.Var l2) ->
          FStar_Ident.lid_equals l1 l2
      | (FStar_Parser_AST.Name l1, FStar_Parser_AST.Name l2) ->
          FStar_Ident.lid_equals l1 l2
      | (FStar_Parser_AST.Projector (l1, i1), FStar_Parser_AST.Projector
         (l2, i2)) ->
          (FStar_Ident.lid_equals l1 l2) && (FStar_Ident.ident_equals i1 i2)
      | (FStar_Parser_AST.Construct (l1, args1), FStar_Parser_AST.Construct
         (l2, args2)) ->
          (FStar_Ident.lid_equals l1 l2) && (eq_args args1 args2)
      | (FStar_Parser_AST.Abs (ps1, t11), FStar_Parser_AST.Abs (ps2, t21)) ->
          (eq_list eq_pattern ps1 ps2) && (eq_term t11 t21)
      | (FStar_Parser_AST.App (h1, t11, i1), FStar_Parser_AST.App
         (h2, t21, i2)) ->
          ((eq_term h1 h2) && (eq_term t11 t21)) && (eq_imp i1 i2)
      | (FStar_Parser_AST.Let (lq1, defs1, t11), FStar_Parser_AST.Let
         (lq2, defs2, t21)) ->
          ((lq1 = lq2) &&
             (eq_list
                (fun uu___ ->
                   fun uu___1 ->
                     match (uu___, uu___1) with
                     | ((o1, (p1, t12)), (o2, (p2, t22))) ->
                         ((eq_option eq_terms o1 o2) && (eq_pattern p1 p2))
                           && (eq_term t12 t22)) defs1 defs2))
            && (eq_term t11 t21)
      | (FStar_Parser_AST.LetOperator (defs1, t11),
         FStar_Parser_AST.LetOperator (defs2, t21)) ->
          (eq_list
             (fun uu___ ->
                fun uu___1 ->
                  match (uu___, uu___1) with
                  | ((i1, ps1, t12), (i2, ps2, t22)) ->
                      ((eq_ident i1 i2) && (eq_pattern ps1 ps2)) &&
                        (eq_term t12 t22)) defs1 defs2)
            && (eq_term t11 t21)
      | (FStar_Parser_AST.LetOpen (l1, t11), FStar_Parser_AST.LetOpen
         (l2, t21)) -> (FStar_Ident.lid_equals l1 l2) && (eq_term t11 t21)
      | (FStar_Parser_AST.LetOpenRecord (t11, t21, t3),
         FStar_Parser_AST.LetOpenRecord (t4, t5, t6)) ->
          ((eq_term t11 t4) && (eq_term t21 t5)) && (eq_term t3 t6)
      | (FStar_Parser_AST.Seq (t11, t21), FStar_Parser_AST.Seq (t3, t4)) ->
          (eq_term t11 t3) && (eq_term t21 t4)
      | (FStar_Parser_AST.Bind (i1, t11, t21), FStar_Parser_AST.Bind
         (i2, t3, t4)) ->
          ((FStar_Ident.ident_equals i1 i2) && (eq_term t11 t3)) &&
            (eq_term t21 t4)
      | (FStar_Parser_AST.If (t11, i1, mra1, t21, t3), FStar_Parser_AST.If
         (t4, i2, mra2, t5, t6)) ->
          ((((eq_term t11 t4) && (eq_option eq_ident i1 i2)) &&
              (eq_option eq_match_returns_annotation mra1 mra2))
             && (eq_term t21 t5))
            && (eq_term t3 t6)
      | (FStar_Parser_AST.Match (t11, i1, mra1, bs1), FStar_Parser_AST.Match
         (t21, i2, mra2, bs2)) ->
          (((eq_term t11 t21) && (eq_option eq_ident i1 i2)) &&
             (eq_option eq_match_returns_annotation mra1 mra2))
            && (eq_list eq_branch bs1 bs2)
      | (FStar_Parser_AST.TryWith (t11, bs1), FStar_Parser_AST.TryWith
         (t21, bs2)) -> (eq_term t11 t21) && (eq_list eq_branch bs1 bs2)
      | (FStar_Parser_AST.Ascribed (t11, t21, topt1, b1),
         FStar_Parser_AST.Ascribed (t3, t4, topt2, b2)) ->
          (((eq_term t11 t3) && (eq_term t21 t4)) &&
             (eq_option eq_term topt1 topt2))
            && (b1 = b2)
      | (FStar_Parser_AST.Record (topt1, fs1), FStar_Parser_AST.Record
         (topt2, fs2)) ->
          (eq_option eq_term topt1 topt2) &&
            (eq_list
               (fun uu___ ->
                  fun uu___1 ->
                    match (uu___, uu___1) with
                    | ((l1, t11), (l2, t21)) ->
                        (FStar_Ident.lid_equals l1 l2) && (eq_term t11 t21))
               fs1 fs2)
      | (FStar_Parser_AST.Project (t11, l1), FStar_Parser_AST.Project
         (t21, l2)) -> (eq_term t11 t21) && (FStar_Ident.lid_equals l1 l2)
      | (FStar_Parser_AST.Product (bs1, t11), FStar_Parser_AST.Product
         (bs2, t21)) -> (eq_list eq_binder bs1 bs2) && (eq_term t11 t21)
      | (FStar_Parser_AST.Sum (bs1, t11), FStar_Parser_AST.Sum (bs2, t21)) ->
          (eq_list
             (fun b1 ->
                fun b2 ->
                  match (b1, b2) with
                  | (FStar_Pervasives.Inl b11, FStar_Pervasives.Inl b21) ->
                      eq_binder b11 b21
                  | (FStar_Pervasives.Inr t12, FStar_Pervasives.Inr t22) ->
                      eq_term t12 t22
                  | (FStar_Pervasives.Inl uu___, FStar_Pervasives.Inr uu___1)
                      -> false
                  | (FStar_Pervasives.Inr uu___, FStar_Pervasives.Inl uu___1)
                      -> false) bs1 bs2)
            && (eq_term t11 t21)
      | (FStar_Parser_AST.QForall (bs1, ps1, t11), FStar_Parser_AST.QForall
         (bs2, ps2, t21)) ->
          let eq_ps uu___ uu___1 =
            match (uu___, uu___1) with
            | ((is1, ts1), (is2, ts2)) ->
                (eq_list eq_ident is1 is2) &&
                  (eq_list (eq_list eq_term) ts1 ts2) in
          ((eq_list eq_binder bs1 bs2) && (eq_ps ps1 ps2)) &&
            (eq_term t11 t21)
      | (FStar_Parser_AST.QExists (bs1, ps1, t11), FStar_Parser_AST.QExists
         (bs2, ps2, t21)) ->
          let eq_ps uu___ uu___1 =
            match (uu___, uu___1) with
            | ((is1, ts1), (is2, ts2)) ->
                (eq_list eq_ident is1 is2) &&
                  (eq_list (eq_list eq_term) ts1 ts2) in
          ((eq_list eq_binder bs1 bs2) && (eq_ps ps1 ps2)) &&
            (eq_term t11 t21)
      | (FStar_Parser_AST.Refine (t11, t21), FStar_Parser_AST.Refine
         (t3, t4)) -> (eq_binder t11 t3) && (eq_term t21 t4)
      | (FStar_Parser_AST.NamedTyp (i1, t11), FStar_Parser_AST.NamedTyp
         (i2, t21)) -> (eq_ident i1 i2) && (eq_term t11 t21)
      | (FStar_Parser_AST.Paren t11, FStar_Parser_AST.Paren t21) ->
          eq_term t11 t21
      | (FStar_Parser_AST.Requires (t11, s1), FStar_Parser_AST.Requires
         (t21, s2)) -> (eq_term t11 t21) && (eq_option (=) s1 s2)
      | (FStar_Parser_AST.Ensures (t11, s1), FStar_Parser_AST.Ensures
         (t21, s2)) -> (eq_term t11 t21) && (eq_option (=) s1 s2)
      | (FStar_Parser_AST.LexList ts1, FStar_Parser_AST.LexList ts2) ->
          eq_list eq_term ts1 ts2
      | (FStar_Parser_AST.WFOrder (t11, t21), FStar_Parser_AST.WFOrder
         (t3, t4)) -> (eq_term t11 t3) && (eq_term t21 t4)
      | (FStar_Parser_AST.Decreases (t11, s1), FStar_Parser_AST.Decreases
         (t21, s2)) -> (eq_term t11 t21) && (eq_option (=) s1 s2)
      | (FStar_Parser_AST.Labeled (t11, s1, b1), FStar_Parser_AST.Labeled
         (t21, s2, b2)) -> ((eq_term t11 t21) && (s1 = s2)) && (b1 = b2)
      | (FStar_Parser_AST.Discrim l1, FStar_Parser_AST.Discrim l2) ->
          FStar_Ident.lid_equals l1 l2
      | (FStar_Parser_AST.Attributes ts1, FStar_Parser_AST.Attributes ts2) ->
          eq_list eq_term ts1 ts2
      | (FStar_Parser_AST.Antiquote t11, FStar_Parser_AST.Antiquote t21) ->
          eq_term t11 t21
      | (FStar_Parser_AST.Quote (t11, k1), FStar_Parser_AST.Quote (t21, k2))
          -> (eq_term t11 t21) && (k1 = k2)
      | (FStar_Parser_AST.VQuote t11, FStar_Parser_AST.VQuote t21) ->
          eq_term t11 t21
      | (FStar_Parser_AST.CalcProof (t11, t21, cs1),
         FStar_Parser_AST.CalcProof (t3, t4, cs2)) ->
          ((eq_term t11 t3) && (eq_term t21 t4)) &&
            (eq_list eq_calc_step cs1 cs2)
      | (FStar_Parser_AST.IntroForall (bs1, t11, t21),
         FStar_Parser_AST.IntroForall (bs2, t3, t4)) ->
          ((eq_list eq_binder bs1 bs2) && (eq_term t11 t3)) &&
            (eq_term t21 t4)
      | (FStar_Parser_AST.IntroExists (bs1, t11, ts1, t21),
         FStar_Parser_AST.IntroExists (bs2, t3, ts2, t4)) ->
          (((eq_list eq_binder bs1 bs2) && (eq_term t11 t3)) &&
             (eq_list eq_term ts1 ts2))
            && (eq_term t21 t4)
      | (FStar_Parser_AST.IntroImplies (t11, t21, b1, t3),
         FStar_Parser_AST.IntroImplies (t4, t5, b2, t6)) ->
          (((eq_term t11 t4) && (eq_term t21 t5)) && (eq_binder b1 b2)) &&
            (eq_term t3 t6)
      | (FStar_Parser_AST.IntroOr (b1, t11, t21, t3),
         FStar_Parser_AST.IntroOr (b2, t4, t5, t6)) ->
          (((b1 = b2) && (eq_term t11 t4)) && (eq_term t21 t5)) &&
            (eq_term t3 t6)
      | (FStar_Parser_AST.IntroAnd (t11, t21, t3, t4),
         FStar_Parser_AST.IntroAnd (t5, t6, t7, t8)) ->
          (((eq_term t11 t5) && (eq_term t21 t6)) && (eq_term t3 t7)) &&
            (eq_term t4 t8)
      | (FStar_Parser_AST.ElimForall (bs1, t11, ts1),
         FStar_Parser_AST.ElimForall (bs2, t21, ts2)) ->
          ((eq_list eq_binder bs1 bs2) && (eq_term t11 t21)) &&
            (eq_list eq_term ts1 ts2)
      | (FStar_Parser_AST.ElimExists (bs1, t11, t21, b1, t3),
         FStar_Parser_AST.ElimExists (bs2, t4, t5, b2, t6)) ->
          ((((eq_list eq_binder bs1 bs2) && (eq_term t11 t4)) &&
              (eq_term t21 t5))
             && (eq_binder b1 b2))
            && (eq_term t3 t6)
      | (FStar_Parser_AST.ElimImplies (t11, t21, t3),
         FStar_Parser_AST.ElimImplies (t4, t5, t6)) ->
          ((eq_term t11 t4) && (eq_term t21 t5)) && (eq_term t3 t6)
      | (FStar_Parser_AST.ElimOr (t11, t21, t3, b1, t4, b2, t5),
         FStar_Parser_AST.ElimOr (t6, t7, t8, b3, t9, b4, t10)) ->
          ((((((eq_term t11 t6) && (eq_term t21 t7)) && (eq_term t3 t8)) &&
               (eq_binder b1 b3))
              && (eq_term t4 t9))
             && (eq_binder b2 b4))
            && (eq_term t5 t10)
      | (FStar_Parser_AST.ElimAnd (t11, t21, t3, b1, b2, t4),
         FStar_Parser_AST.ElimAnd (t5, t6, t7, b3, b4, t8)) ->
          (((((eq_term t11 t5) && (eq_term t21 t6)) && (eq_term t3 t7)) &&
              (eq_binder b1 b3))
             && (eq_binder b2 b4))
            && (eq_term t4 t8)
      | uu___ -> false
and (eq_calc_step :
  FStar_Parser_AST.calc_step -> FStar_Parser_AST.calc_step -> Prims.bool) =
  fun uu___ ->
    fun uu___1 ->
      match (uu___, uu___1) with
      | (FStar_Parser_AST.CalcStep (t1, t2, t3), FStar_Parser_AST.CalcStep
         (t4, t5, t6)) ->
          ((eq_term t1 t4) && (eq_term t2 t5)) && (eq_term t3 t6)
and (eq_binder :
  FStar_Parser_AST.binder -> FStar_Parser_AST.binder -> Prims.bool) =
  fun b1 ->
    fun b2 ->
      ((eq_binder' b1.FStar_Parser_AST.b b2.FStar_Parser_AST.b) &&
         (eq_aqual b1.FStar_Parser_AST.aqual b2.FStar_Parser_AST.aqual))
        &&
        (eq_list eq_term b1.FStar_Parser_AST.battributes
           b2.FStar_Parser_AST.battributes)
and (eq_binder' :
  FStar_Parser_AST.binder' -> FStar_Parser_AST.binder' -> Prims.bool) =
  fun b1 ->
    fun b2 ->
      match (b1, b2) with
      | (FStar_Parser_AST.Variable i1, FStar_Parser_AST.Variable i2) ->
          eq_ident i1 i2
      | (FStar_Parser_AST.TVariable i1, FStar_Parser_AST.TVariable i2) ->
          eq_ident i1 i2
      | (FStar_Parser_AST.Annotated (i1, t1), FStar_Parser_AST.Annotated
         (i2, t2)) -> (eq_ident i1 i2) && (eq_term t1 t2)
      | (FStar_Parser_AST.TAnnotated (i1, t1), FStar_Parser_AST.TAnnotated
         (i2, t2)) -> (eq_ident i1 i2) && (eq_term t1 t2)
      | (FStar_Parser_AST.NoName t1, FStar_Parser_AST.NoName t2) ->
          eq_term t1 t2
      | uu___ -> false
and (eq_match_returns_annotation :
  (FStar_Ident.ident FStar_Pervasives_Native.option * FStar_Parser_AST.term *
    Prims.bool) ->
    (FStar_Ident.ident FStar_Pervasives_Native.option * FStar_Parser_AST.term
      * Prims.bool) -> Prims.bool)
  =
  fun uu___ ->
    fun uu___1 ->
      match (uu___, uu___1) with
      | ((i1, t1, b1), (i2, t2, b2)) ->
          ((eq_option eq_ident i1 i2) && (eq_term t1 t2)) && (b1 = b2)
and (eq_branch :
  (FStar_Parser_AST.pattern * FStar_Parser_AST.term
    FStar_Pervasives_Native.option * FStar_Parser_AST.term) ->
    (FStar_Parser_AST.pattern * FStar_Parser_AST.term
      FStar_Pervasives_Native.option * FStar_Parser_AST.term) -> Prims.bool)
  =
  fun uu___ ->
    fun uu___1 ->
      match (uu___, uu___1) with
      | ((p1, o1, t1), (p2, o2, t2)) ->
          ((eq_pattern p1 p2) && (eq_option eq_term o1 o2)) &&
            (eq_term t1 t2)
let (eq_tycon_record :
  FStar_Parser_AST.tycon_record ->
    FStar_Parser_AST.tycon_record -> Prims.bool)
  =
  fun t1 ->
    fun t2 ->
      eq_list
        (fun uu___ ->
           fun uu___1 ->
             match (uu___, uu___1) with
             | ((i1, a1, a2, t11), (i2, a3, a4, t21)) ->
                 (((eq_ident i1 i2) && (eq_aqual a1 a3)) &&
                    (eq_list eq_term a2 a4))
                   && (eq_term t11 t21)) t1 t2
let (eq_constructor_payload :
  FStar_Parser_AST.constructor_payload ->
    FStar_Parser_AST.constructor_payload -> Prims.bool)
  =
  fun t1 ->
    fun t2 ->
      match (t1, t2) with
      | (FStar_Parser_AST.VpOfNotation t11, FStar_Parser_AST.VpOfNotation
         t21) -> eq_term t11 t21
      | (FStar_Parser_AST.VpArbitrary t11, FStar_Parser_AST.VpArbitrary t21)
          -> eq_term t11 t21
      | (FStar_Parser_AST.VpRecord (r1, k1), FStar_Parser_AST.VpRecord
         (r2, k2)) -> (eq_tycon_record r1 r2) && (eq_option eq_term k1 k2)
      | uu___ -> false
let (eq_tycon :
  FStar_Parser_AST.tycon -> FStar_Parser_AST.tycon -> Prims.bool) =
  fun t1 ->
    fun t2 ->
      match (t1, t2) with
      | (FStar_Parser_AST.TyconAbstract (i1, bs1, k1),
         FStar_Parser_AST.TyconAbstract (i2, bs2, k2)) ->
          ((eq_ident i1 i2) && (eq_list eq_binder bs1 bs2)) &&
            (eq_option eq_term k1 k2)
      | (FStar_Parser_AST.TyconAbbrev (i1, bs1, k1, t11),
         FStar_Parser_AST.TyconAbbrev (i2, bs2, k2, t21)) ->
          (((eq_ident i1 i2) && (eq_list eq_binder bs1 bs2)) &&
             (eq_option eq_term k1 k2))
            && (eq_term t11 t21)
      | (FStar_Parser_AST.TyconRecord (i1, bs1, k1, a1, r1),
         FStar_Parser_AST.TyconRecord (i2, bs2, k2, a2, r2)) ->
          ((((eq_ident i1 i2) && (eq_list eq_binder bs1 bs2)) &&
              (eq_option eq_term k1 k2))
             && (eq_list eq_term a1 a2))
            && (eq_tycon_record r1 r2)
      | (FStar_Parser_AST.TyconVariant (i1, bs1, k1, cs1),
         FStar_Parser_AST.TyconVariant (i2, bs2, k2, cs2)) ->
          (((eq_ident i1 i2) && (eq_list eq_binder bs1 bs2)) &&
             (eq_option eq_term k1 k2))
            &&
            (eq_list
               (fun uu___ ->
                  fun uu___1 ->
                    match (uu___, uu___1) with
                    | ((i11, o1, a1), (i21, o2, a2)) ->
                        ((eq_ident i11 i21) &&
                           (eq_option eq_constructor_payload o1 o2))
                          && (eq_list eq_term a1 a2)) cs1 cs2)
      | uu___ -> false
let (eq_lid : FStar_Ident.lident -> FStar_Ident.lident -> Prims.bool) =
  FStar_Ident.lid_equals
let (eq_lift : FStar_Parser_AST.lift -> FStar_Parser_AST.lift -> Prims.bool)
  =
  fun t1 ->
    fun t2 ->
      ((eq_lid t1.FStar_Parser_AST.msource t2.FStar_Parser_AST.msource) &&
         (eq_lid t1.FStar_Parser_AST.mdest t2.FStar_Parser_AST.mdest))
        &&
        (match ((t1.FStar_Parser_AST.lift_op), (t2.FStar_Parser_AST.lift_op))
         with
         | (FStar_Parser_AST.NonReifiableLift t11,
            FStar_Parser_AST.NonReifiableLift t21) -> eq_term t11 t21
         | (FStar_Parser_AST.ReifiableLift (t11, t21),
            FStar_Parser_AST.ReifiableLift (t3, t4)) ->
             (eq_term t11 t3) && (eq_term t21 t4)
         | (FStar_Parser_AST.LiftForFree t11, FStar_Parser_AST.LiftForFree
            t21) -> eq_term t11 t21
         | uu___ -> false)
let (eq_pragma :
  FStar_Parser_AST.pragma -> FStar_Parser_AST.pragma -> Prims.bool) =
  fun t1 ->
    fun t2 ->
      match (t1, t2) with
      | (FStar_Parser_AST.SetOptions s1, FStar_Parser_AST.SetOptions s2) ->
          s1 = s2
      | (FStar_Parser_AST.ResetOptions s1, FStar_Parser_AST.ResetOptions s2)
          -> eq_option (fun s11 -> fun s21 -> s11 = s21) s1 s2
      | (FStar_Parser_AST.PushOptions s1, FStar_Parser_AST.PushOptions s2) ->
          eq_option (fun s11 -> fun s21 -> s11 = s21) s1 s2
      | (FStar_Parser_AST.PopOptions, FStar_Parser_AST.PopOptions) -> true
      | (FStar_Parser_AST.RestartSolver, FStar_Parser_AST.RestartSolver) ->
          true
      | (FStar_Parser_AST.PrintEffectsGraph,
         FStar_Parser_AST.PrintEffectsGraph) -> true
      | uu___ -> false
let (eq_qualifier :
  FStar_Parser_AST.qualifier -> FStar_Parser_AST.qualifier -> Prims.bool) =
  fun t1 ->
    fun t2 ->
      match (t1, t2) with
      | (FStar_Parser_AST.Private, FStar_Parser_AST.Private) -> true
      | (FStar_Parser_AST.Noeq, FStar_Parser_AST.Noeq) -> true
      | (FStar_Parser_AST.Unopteq, FStar_Parser_AST.Unopteq) -> true
      | (FStar_Parser_AST.Assumption, FStar_Parser_AST.Assumption) -> true
      | (FStar_Parser_AST.DefaultEffect, FStar_Parser_AST.DefaultEffect) ->
          true
      | (FStar_Parser_AST.TotalEffect, FStar_Parser_AST.TotalEffect) -> true
      | (FStar_Parser_AST.Effect_qual, FStar_Parser_AST.Effect_qual) -> true
      | (FStar_Parser_AST.New, FStar_Parser_AST.New) -> true
      | (FStar_Parser_AST.Inline, FStar_Parser_AST.Inline) -> true
      | (FStar_Parser_AST.Visible, FStar_Parser_AST.Visible) -> true
      | (FStar_Parser_AST.Unfold_for_unification_and_vcgen,
         FStar_Parser_AST.Unfold_for_unification_and_vcgen) -> true
      | (FStar_Parser_AST.Inline_for_extraction,
         FStar_Parser_AST.Inline_for_extraction) -> true
      | (FStar_Parser_AST.Irreducible, FStar_Parser_AST.Irreducible) -> true
      | (FStar_Parser_AST.NoExtract, FStar_Parser_AST.NoExtract) -> true
      | (FStar_Parser_AST.Reifiable, FStar_Parser_AST.Reifiable) -> true
      | (FStar_Parser_AST.Reflectable, FStar_Parser_AST.Reflectable) -> true
      | (FStar_Parser_AST.Opaque, FStar_Parser_AST.Opaque) -> true
      | (FStar_Parser_AST.Logic, FStar_Parser_AST.Logic) -> true
      | uu___ -> false
let (eq_qualifiers :
  FStar_Parser_AST.qualifiers -> FStar_Parser_AST.qualifiers -> Prims.bool) =
  fun t1 -> fun t2 -> eq_list eq_qualifier t1 t2
let rec (eq_decl' :
  FStar_Parser_AST.decl' -> FStar_Parser_AST.decl' -> Prims.bool) =
  fun d1 ->
    fun d2 ->
      match (d1, d2) with
      | (FStar_Parser_AST.TopLevelModule lid1,
         FStar_Parser_AST.TopLevelModule lid2) -> eq_lid lid1 lid2
      | (FStar_Parser_AST.Open lid1, FStar_Parser_AST.Open lid2) ->
          eq_lid lid1 lid2
      | (FStar_Parser_AST.Friend lid1, FStar_Parser_AST.Friend lid2) ->
          eq_lid lid1 lid2
      | (FStar_Parser_AST.Include lid1, FStar_Parser_AST.Include lid2) ->
          eq_lid lid1 lid2
      | (FStar_Parser_AST.ModuleAbbrev (i1, lid1),
         FStar_Parser_AST.ModuleAbbrev (i2, lid2)) ->
          (eq_ident i1 i2) && (eq_lid lid1 lid2)
      | (FStar_Parser_AST.TopLevelLet (lq1, pats1),
         FStar_Parser_AST.TopLevelLet (lq2, pats2)) ->
          (lq1 = lq2) &&
            (eq_list
               (fun uu___ ->
                  fun uu___1 ->
                    match (uu___, uu___1) with
                    | ((p1, t1), (p2, t2)) ->
                        (eq_pattern p1 p2) && (eq_term t1 t2)) pats1 pats2)
      | (FStar_Parser_AST.Tycon (b1, b2, tcs1), FStar_Parser_AST.Tycon
         (b3, b4, tcs2)) ->
          ((b1 = b3) && (b2 = b4)) && (eq_list eq_tycon tcs1 tcs2)
      | (FStar_Parser_AST.Val (i1, t1), FStar_Parser_AST.Val (i2, t2)) ->
          (eq_ident i1 i2) && (eq_term t1 t2)
      | (FStar_Parser_AST.Exception (i1, t1), FStar_Parser_AST.Exception
         (i2, t2)) -> (eq_ident i1 i2) && (eq_option eq_term t1 t2)
      | (FStar_Parser_AST.NewEffect ed1, FStar_Parser_AST.NewEffect ed2) ->
          eq_effect_decl ed1 ed2
      | (FStar_Parser_AST.LayeredEffect ed1, FStar_Parser_AST.LayeredEffect
         ed2) -> eq_effect_decl ed1 ed2
      | (FStar_Parser_AST.SubEffect l1, FStar_Parser_AST.SubEffect l2) ->
          eq_lift l1 l2
      | (FStar_Parser_AST.Polymonadic_bind (lid1, lid2, lid3, t1),
         FStar_Parser_AST.Polymonadic_bind (lid4, lid5, lid6, t2)) ->
          (((eq_lid lid1 lid4) && (eq_lid lid2 lid5)) && (eq_lid lid3 lid6))
            && (eq_term t1 t2)
      | (FStar_Parser_AST.Polymonadic_subcomp (lid1, lid2, t1),
         FStar_Parser_AST.Polymonadic_subcomp (lid3, lid4, t2)) ->
          ((eq_lid lid1 lid3) && (eq_lid lid2 lid4)) && (eq_term t1 t2)
      | (FStar_Parser_AST.Pragma p1, FStar_Parser_AST.Pragma p2) ->
          eq_pragma p1 p2
      | (FStar_Parser_AST.Assume (i1, t1), FStar_Parser_AST.Assume (i2, t2))
          -> (eq_ident i1 i2) && (eq_term t1 t2)
      | (FStar_Parser_AST.Splice (is1, t1), FStar_Parser_AST.Splice
         (is2, t2)) -> (eq_list eq_ident is1 is2) && (eq_term t1 t2)
      | uu___ -> false
and (eq_effect_decl :
  FStar_Parser_AST.effect_decl -> FStar_Parser_AST.effect_decl -> Prims.bool)
  =
  fun t1 ->
    fun t2 ->
      match (t1, t2) with
      | (FStar_Parser_AST.DefineEffect (i1, bs1, t11, ds1),
         FStar_Parser_AST.DefineEffect (i2, bs2, t21, ds2)) ->
          (((eq_ident i1 i2) && (eq_list eq_binder bs1 bs2)) &&
             (eq_term t11 t21))
            && (eq_list eq_decl ds1 ds2)
      | (FStar_Parser_AST.RedefineEffect (i1, bs1, t11),
         FStar_Parser_AST.RedefineEffect (i2, bs2, t21)) ->
          ((eq_ident i1 i2) && (eq_list eq_binder bs1 bs2)) &&
            (eq_term t11 t21)
      | uu___ -> false
and (eq_decl : FStar_Parser_AST.decl -> FStar_Parser_AST.decl -> Prims.bool)
  =
  fun d1 ->
    fun d2 ->
      ((eq_decl' d1.FStar_Parser_AST.d d2.FStar_Parser_AST.d) &&
         (eq_list eq_qualifier d1.FStar_Parser_AST.quals
            d2.FStar_Parser_AST.quals))
        &&
        (eq_list eq_term d1.FStar_Parser_AST.attrs d2.FStar_Parser_AST.attrs)
let concat_map :
  'uuuuu 'uuuuu1 .
    unit ->
      ('uuuuu -> 'uuuuu1 Prims.list) ->
        'uuuuu Prims.list -> 'uuuuu1 Prims.list
  = fun uu___ -> FStar_Compiler_List.collect
let opt_map :
  'uuuuu 'a .
    ('a -> 'uuuuu Prims.list) ->
      'a FStar_Pervasives_Native.option -> 'uuuuu Prims.list
  =
  fun f ->
    fun x ->
      match x with
      | FStar_Pervasives_Native.None -> []
      | FStar_Pervasives_Native.Some x1 -> f x1
let rec (lidents_of_term :
  FStar_Parser_AST.term -> FStar_Ident.lident Prims.list) =
  fun t -> lidents_of_term' t.FStar_Parser_AST.tm
and (lidents_of_term' :
  FStar_Parser_AST.term' -> FStar_Ident.lident Prims.list) =
  fun t ->
    match t with
    | FStar_Parser_AST.Wild -> []
    | FStar_Parser_AST.Const uu___ -> []
    | FStar_Parser_AST.Op (s, ts) -> (concat_map ()) lidents_of_term ts
    | FStar_Parser_AST.Tvar uu___ -> []
    | FStar_Parser_AST.Uvar uu___ -> []
    | FStar_Parser_AST.Var lid -> [lid]
    | FStar_Parser_AST.Name lid -> [lid]
    | FStar_Parser_AST.Projector (lid, uu___) -> [lid]
    | FStar_Parser_AST.Construct (lid, ts) ->
        let uu___ =
          (concat_map ())
            (fun uu___1 ->
               match uu___1 with | (t1, uu___2) -> lidents_of_term t1) ts in
        lid :: uu___
    | FStar_Parser_AST.Abs (ps, t1) ->
        let uu___ = (concat_map ()) lidents_of_pattern ps in
        let uu___1 = lidents_of_term t1 in
        FStar_Compiler_List.op_At uu___ uu___1
    | FStar_Parser_AST.App (t1, t2, uu___) ->
        let uu___1 = lidents_of_term t1 in
        let uu___2 = lidents_of_term t2 in
        FStar_Compiler_List.op_At uu___1 uu___2
    | FStar_Parser_AST.Let (uu___, lbs, t1) ->
        let uu___1 =
          (concat_map ())
            (fun uu___2 ->
               match uu___2 with
               | (uu___3, (p, t2)) ->
                   let uu___4 = lidents_of_pattern p in
                   let uu___5 = lidents_of_term t2 in
                   FStar_Compiler_List.op_At uu___4 uu___5) lbs in
        let uu___2 = lidents_of_term t1 in
        FStar_Compiler_List.op_At uu___1 uu___2
    | FStar_Parser_AST.LetOperator (lbs, t1) ->
        let uu___ =
          (concat_map ())
            (fun uu___1 ->
               match uu___1 with
               | (uu___2, p, t2) ->
                   let uu___3 = lidents_of_pattern p in
                   let uu___4 = lidents_of_term t2 in
                   FStar_Compiler_List.op_At uu___3 uu___4) lbs in
        let uu___1 = lidents_of_term t1 in
        FStar_Compiler_List.op_At uu___ uu___1
    | FStar_Parser_AST.LetOpen (lid, t1) ->
        let uu___ = lidents_of_term t1 in lid :: uu___
    | FStar_Parser_AST.LetOpenRecord (t1, t2, t3) ->
        let uu___ = lidents_of_term t1 in
        let uu___1 =
          let uu___2 = lidents_of_term t2 in
          let uu___3 = lidents_of_term t3 in
          FStar_Compiler_List.op_At uu___2 uu___3 in
        FStar_Compiler_List.op_At uu___ uu___1
    | FStar_Parser_AST.Seq (t1, t2) ->
        let uu___ = lidents_of_term t1 in
        let uu___1 = lidents_of_term t2 in
        FStar_Compiler_List.op_At uu___ uu___1
    | FStar_Parser_AST.Bind (uu___, t1, t2) ->
        let uu___1 = lidents_of_term t1 in
        let uu___2 = lidents_of_term t2 in
        FStar_Compiler_List.op_At uu___1 uu___2
    | FStar_Parser_AST.If (t1, uu___, uu___1, t2, t3) ->
        let uu___2 = lidents_of_term t1 in
        let uu___3 =
          let uu___4 = lidents_of_term t2 in
          let uu___5 = lidents_of_term t3 in
          FStar_Compiler_List.op_At uu___4 uu___5 in
        FStar_Compiler_List.op_At uu___2 uu___3
    | FStar_Parser_AST.Match (t1, uu___, uu___1, bs) ->
        let uu___2 = lidents_of_term t1 in
        let uu___3 = (concat_map ()) lidents_of_branch bs in
        FStar_Compiler_List.op_At uu___2 uu___3
    | FStar_Parser_AST.TryWith (t1, bs) ->
        let uu___ = lidents_of_term t1 in
        let uu___1 = (concat_map ()) lidents_of_branch bs in
        FStar_Compiler_List.op_At uu___ uu___1
    | FStar_Parser_AST.Ascribed (t1, t2, uu___, uu___1) ->
        let uu___2 = lidents_of_term t1 in
        let uu___3 = lidents_of_term t2 in
        FStar_Compiler_List.op_At uu___2 uu___3
    | FStar_Parser_AST.Record (t1, ts) ->
        let uu___ =
          (concat_map ())
            (fun uu___1 ->
               match uu___1 with | (uu___2, t2) -> lidents_of_term t2) ts in
        let uu___1 = opt_map lidents_of_term t1 in
        FStar_Compiler_List.op_At uu___ uu___1
    | FStar_Parser_AST.Project (t1, uu___) -> lidents_of_term t1
    | FStar_Parser_AST.Product (ts, t1) ->
        let uu___ = (concat_map ()) lidents_of_binder ts in
        let uu___1 = lidents_of_term t1 in
        FStar_Compiler_List.op_At uu___ uu___1
    | FStar_Parser_AST.Sum (ts, t1) ->
        let uu___ =
          (concat_map ())
            (fun uu___1 ->
               match uu___1 with
               | FStar_Pervasives.Inl b -> lidents_of_binder b
               | FStar_Pervasives.Inr t2 -> lidents_of_term t2) ts in
        let uu___1 = lidents_of_term t1 in
        FStar_Compiler_List.op_At uu___ uu___1
    | FStar_Parser_AST.QForall (bs, _pats, t1) -> lidents_of_term t1
    | FStar_Parser_AST.QExists (bs, _pats, t1) -> lidents_of_term t1
    | FStar_Parser_AST.Refine (b, t1) -> lidents_of_term t1
    | FStar_Parser_AST.NamedTyp (i, t1) -> lidents_of_term t1
    | FStar_Parser_AST.Paren t1 -> lidents_of_term t1
    | FStar_Parser_AST.Requires (t1, uu___) -> lidents_of_term t1
    | FStar_Parser_AST.Ensures (t1, uu___) -> lidents_of_term t1
    | FStar_Parser_AST.LexList ts -> (concat_map ()) lidents_of_term ts
    | FStar_Parser_AST.WFOrder (t1, t2) ->
        let uu___ = lidents_of_term t1 in
        let uu___1 = lidents_of_term t2 in
        FStar_Compiler_List.op_At uu___ uu___1
    | FStar_Parser_AST.Decreases (t1, uu___) -> lidents_of_term t1
    | FStar_Parser_AST.Labeled (t1, uu___, uu___1) -> lidents_of_term t1
    | FStar_Parser_AST.Discrim lid -> [lid]
    | FStar_Parser_AST.Attributes ts -> (concat_map ()) lidents_of_term ts
    | FStar_Parser_AST.Antiquote t1 -> lidents_of_term t1
    | FStar_Parser_AST.Quote (t1, uu___) -> lidents_of_term t1
    | FStar_Parser_AST.VQuote t1 -> lidents_of_term t1
    | FStar_Parser_AST.CalcProof (t1, t2, ts) ->
        let uu___ = lidents_of_term t1 in
        let uu___1 =
          let uu___2 = lidents_of_term t2 in
          let uu___3 = (concat_map ()) lidents_of_calc_step ts in
          FStar_Compiler_List.op_At uu___2 uu___3 in
        FStar_Compiler_List.op_At uu___ uu___1
    | FStar_Parser_AST.IntroForall (bs, t1, t2) ->
        let uu___ = lidents_of_term t1 in
        let uu___1 = lidents_of_term t2 in
        FStar_Compiler_List.op_At uu___ uu___1
    | FStar_Parser_AST.IntroExists (bs, t1, ts, t2) ->
        let uu___ = lidents_of_term t1 in
        let uu___1 =
          let uu___2 = (concat_map ()) lidents_of_term ts in
          let uu___3 = lidents_of_term t2 in
          FStar_Compiler_List.op_At uu___2 uu___3 in
        FStar_Compiler_List.op_At uu___ uu___1
    | FStar_Parser_AST.IntroImplies (t1, t2, b, t3) ->
        let uu___ = lidents_of_term t1 in
        let uu___1 =
          let uu___2 = lidents_of_term t2 in
          let uu___3 = lidents_of_term t3 in
          FStar_Compiler_List.op_At uu___2 uu___3 in
        FStar_Compiler_List.op_At uu___ uu___1
    | FStar_Parser_AST.IntroOr (b, t1, t2, t3) ->
        let uu___ = lidents_of_term t1 in
        let uu___1 =
          let uu___2 = lidents_of_term t2 in
          let uu___3 = lidents_of_term t3 in
          FStar_Compiler_List.op_At uu___2 uu___3 in
        FStar_Compiler_List.op_At uu___ uu___1
    | FStar_Parser_AST.IntroAnd (t1, t2, t3, t4) ->
        let uu___ = lidents_of_term t1 in
        let uu___1 =
          let uu___2 = lidents_of_term t2 in
          let uu___3 =
            let uu___4 = lidents_of_term t3 in
            let uu___5 = lidents_of_term t4 in
            FStar_Compiler_List.op_At uu___4 uu___5 in
          FStar_Compiler_List.op_At uu___2 uu___3 in
        FStar_Compiler_List.op_At uu___ uu___1
    | FStar_Parser_AST.ElimForall (bs, t1, ts) ->
        let uu___ = (concat_map ()) lidents_of_binder bs in
        let uu___1 =
          let uu___2 = lidents_of_term t1 in
          let uu___3 = (concat_map ()) lidents_of_term ts in
          FStar_Compiler_List.op_At uu___2 uu___3 in
        FStar_Compiler_List.op_At uu___ uu___1
    | FStar_Parser_AST.ElimExists (bs, t1, t2, b, t3) ->
        let uu___ = (concat_map ()) lidents_of_binder bs in
        let uu___1 =
          let uu___2 = lidents_of_term t1 in
          let uu___3 =
            let uu___4 = lidents_of_term t2 in
            let uu___5 = lidents_of_term t3 in
            FStar_Compiler_List.op_At uu___4 uu___5 in
          FStar_Compiler_List.op_At uu___2 uu___3 in
        FStar_Compiler_List.op_At uu___ uu___1
    | FStar_Parser_AST.ElimImplies (t1, t2, t3) ->
        let uu___ = lidents_of_term t1 in
        let uu___1 =
          let uu___2 = lidents_of_term t2 in
          let uu___3 = lidents_of_term t3 in
          FStar_Compiler_List.op_At uu___2 uu___3 in
        FStar_Compiler_List.op_At uu___ uu___1
    | FStar_Parser_AST.ElimOr (t1, t2, t3, b1, t4, b2, t5) ->
        let uu___ = lidents_of_term t1 in
        let uu___1 =
          let uu___2 = lidents_of_term t2 in
          let uu___3 =
            let uu___4 = lidents_of_term t3 in
            let uu___5 =
              let uu___6 = lidents_of_term t4 in
              let uu___7 = lidents_of_term t5 in
              FStar_Compiler_List.op_At uu___6 uu___7 in
            FStar_Compiler_List.op_At uu___4 uu___5 in
          FStar_Compiler_List.op_At uu___2 uu___3 in
        FStar_Compiler_List.op_At uu___ uu___1
    | FStar_Parser_AST.ElimAnd (t1, t2, t3, b1, b2, t4) ->
        let uu___ = lidents_of_term t1 in
        let uu___1 =
          let uu___2 = lidents_of_term t2 in
          let uu___3 =
            let uu___4 = lidents_of_term t3 in
            let uu___5 = lidents_of_term t4 in
            FStar_Compiler_List.op_At uu___4 uu___5 in
          FStar_Compiler_List.op_At uu___2 uu___3 in
        FStar_Compiler_List.op_At uu___ uu___1
and (lidents_of_branch :
  (FStar_Parser_AST.pattern * FStar_Parser_AST.term
    FStar_Pervasives_Native.option * FStar_Parser_AST.term) ->
    FStar_Ident.lident Prims.list)
  =
  fun uu___ ->
    match uu___ with
    | (p, uu___1, t) ->
        let uu___2 = lidents_of_pattern p in
        let uu___3 = lidents_of_term t in
        FStar_Compiler_List.op_At uu___2 uu___3
and (lidents_of_calc_step :
  FStar_Parser_AST.calc_step -> FStar_Ident.lident Prims.list) =
  fun uu___ ->
    match uu___ with
    | FStar_Parser_AST.CalcStep (t1, t2, t3) ->
        let uu___1 = lidents_of_term t1 in
        let uu___2 =
          let uu___3 = lidents_of_term t2 in
          let uu___4 = lidents_of_term t3 in
          FStar_Compiler_List.op_At uu___3 uu___4 in
        FStar_Compiler_List.op_At uu___1 uu___2
and (lidents_of_pattern :
  FStar_Parser_AST.pattern -> FStar_Ident.lident Prims.list) =
  fun p ->
    match p.FStar_Parser_AST.pat with
    | FStar_Parser_AST.PatWild uu___ -> []
    | FStar_Parser_AST.PatConst uu___ -> []
    | FStar_Parser_AST.PatApp (p1, ps) ->
        let uu___ = lidents_of_pattern p1 in
        let uu___1 = (concat_map ()) lidents_of_pattern ps in
        FStar_Compiler_List.op_At uu___ uu___1
    | FStar_Parser_AST.PatVar (i, uu___, uu___1) ->
        let uu___2 = FStar_Ident.lid_of_ids [i] in [uu___2]
    | FStar_Parser_AST.PatName lid -> [lid]
    | FStar_Parser_AST.PatTvar (i, uu___, uu___1) -> []
    | FStar_Parser_AST.PatList ps -> (concat_map ()) lidents_of_pattern ps
    | FStar_Parser_AST.PatTuple (ps, uu___) ->
        (concat_map ()) lidents_of_pattern ps
    | FStar_Parser_AST.PatRecord ps ->
        (concat_map ())
          (fun uu___ ->
             match uu___ with | (uu___1, p1) -> lidents_of_pattern p1) ps
    | FStar_Parser_AST.PatAscribed (p1, (t1, t2)) ->
        let uu___ = lidents_of_pattern p1 in
        let uu___1 =
          let uu___2 = lidents_of_term t1 in
          let uu___3 = opt_map lidents_of_term t2 in
          FStar_Compiler_List.op_At uu___2 uu___3 in
        FStar_Compiler_List.op_At uu___ uu___1
    | FStar_Parser_AST.PatOr ps -> (concat_map ()) lidents_of_pattern ps
    | FStar_Parser_AST.PatOp uu___ -> []
    | FStar_Parser_AST.PatVQuote t -> lidents_of_term t
and (lidents_of_binder :
  FStar_Parser_AST.binder -> FStar_Ident.lident Prims.list) =
  fun b ->
    match b.FStar_Parser_AST.b with
    | FStar_Parser_AST.Annotated (uu___, t) -> lidents_of_term t
    | FStar_Parser_AST.TAnnotated (uu___, t) -> lidents_of_term t
    | FStar_Parser_AST.NoName t -> lidents_of_term t
    | uu___ -> []
let lidents_of_tycon_record :
  'uuuuu 'uuuuu1 'uuuuu2 .
    ('uuuuu * 'uuuuu1 * 'uuuuu2 * FStar_Parser_AST.term) ->
      FStar_Ident.lident Prims.list
  =
  fun uu___ ->
    match uu___ with | (uu___1, uu___2, uu___3, t) -> lidents_of_term t
let (lidents_of_constructor_payload :
  FStar_Parser_AST.constructor_payload -> FStar_Ident.lident Prims.list) =
  fun t ->
    match t with
    | FStar_Parser_AST.VpOfNotation t1 -> lidents_of_term t1
    | FStar_Parser_AST.VpArbitrary t1 -> lidents_of_term t1
    | FStar_Parser_AST.VpRecord (tc, FStar_Pervasives_Native.None) ->
        (concat_map ()) lidents_of_tycon_record tc
    | FStar_Parser_AST.VpRecord (tc, FStar_Pervasives_Native.Some t1) ->
        let uu___ = (concat_map ()) lidents_of_tycon_record tc in
        let uu___1 = lidents_of_term t1 in
        FStar_Compiler_List.op_At uu___ uu___1
let (lidents_of_tycon_variant :
  (FStar_Ident.ident * FStar_Parser_AST.constructor_payload
    FStar_Pervasives_Native.option * FStar_Parser_AST.attributes_) ->
    FStar_Ident.lident Prims.list)
  =
  fun tc ->
    match tc with
    | (uu___, FStar_Pervasives_Native.None, uu___1) -> []
    | (uu___, FStar_Pervasives_Native.Some t, uu___1) ->
        lidents_of_constructor_payload t
let (lidents_of_tycon :
  FStar_Parser_AST.tycon -> FStar_Ident.lident Prims.list) =
  fun tc ->
    match tc with
    | FStar_Parser_AST.TyconAbstract (uu___, bs, k) ->
        let uu___1 = (concat_map ()) lidents_of_binder bs in
        let uu___2 = opt_map lidents_of_term k in
        FStar_Compiler_List.op_At uu___1 uu___2
    | FStar_Parser_AST.TyconAbbrev (uu___, bs, k, t) ->
        let uu___1 = (concat_map ()) lidents_of_binder bs in
        let uu___2 =
          let uu___3 = opt_map lidents_of_term k in
          let uu___4 = lidents_of_term t in
          FStar_Compiler_List.op_At uu___3 uu___4 in
        FStar_Compiler_List.op_At uu___1 uu___2
    | FStar_Parser_AST.TyconRecord (uu___, bs, k, uu___1, tcs) ->
        let uu___2 = (concat_map ()) lidents_of_binder bs in
        let uu___3 =
          let uu___4 = opt_map lidents_of_term k in
          let uu___5 = (concat_map ()) lidents_of_tycon_record tcs in
          FStar_Compiler_List.op_At uu___4 uu___5 in
        FStar_Compiler_List.op_At uu___2 uu___3
    | FStar_Parser_AST.TyconVariant (uu___, bs, k, tcs) ->
        let uu___1 = (concat_map ()) lidents_of_binder bs in
        let uu___2 =
          let uu___3 = opt_map lidents_of_term k in
          let uu___4 = (concat_map ()) lidents_of_tycon_variant tcs in
          FStar_Compiler_List.op_At uu___3 uu___4 in
        FStar_Compiler_List.op_At uu___1 uu___2
let (lidents_of_lift :
  FStar_Parser_AST.lift -> FStar_Ident.lident Prims.list) =
  fun l ->
    let uu___ =
      match l.FStar_Parser_AST.lift_op with
      | FStar_Parser_AST.NonReifiableLift t -> lidents_of_term t
      | FStar_Parser_AST.ReifiableLift (t1, t2) ->
          let uu___1 = lidents_of_term t1 in
          let uu___2 = lidents_of_term t2 in
          FStar_Compiler_List.op_At uu___1 uu___2
      | FStar_Parser_AST.LiftForFree t -> lidents_of_term t in
    FStar_Compiler_List.op_At
      [l.FStar_Parser_AST.msource; l.FStar_Parser_AST.mdest] uu___
let rec (lidents_of_decl :
  FStar_Parser_AST.decl -> FStar_Ident.lident Prims.list) =
  fun d ->
    match d.FStar_Parser_AST.d with
    | FStar_Parser_AST.TopLevelModule uu___ -> []
    | FStar_Parser_AST.Open l -> [l]
    | FStar_Parser_AST.Friend l -> [l]
    | FStar_Parser_AST.Include l -> [l]
    | FStar_Parser_AST.ModuleAbbrev (uu___, l) -> [l]
    | FStar_Parser_AST.TopLevelLet (_q, lbs) ->
        (concat_map ())
          (fun uu___ ->
             match uu___ with
             | (p, t) ->
                 let uu___1 = lidents_of_pattern p in
                 let uu___2 = lidents_of_term t in
                 FStar_Compiler_List.op_At uu___1 uu___2) lbs
    | FStar_Parser_AST.Tycon (uu___, uu___1, tcs) ->
        (concat_map ()) lidents_of_tycon tcs
    | FStar_Parser_AST.Val (uu___, t) -> lidents_of_term t
    | FStar_Parser_AST.Exception (uu___, FStar_Pervasives_Native.None) -> []
    | FStar_Parser_AST.Exception (uu___, FStar_Pervasives_Native.Some t) ->
        lidents_of_term t
    | FStar_Parser_AST.NewEffect ed -> lidents_of_effect_decl ed
    | FStar_Parser_AST.LayeredEffect ed -> lidents_of_effect_decl ed
    | FStar_Parser_AST.SubEffect lift -> lidents_of_lift lift
    | FStar_Parser_AST.Polymonadic_bind (l0, l1, l2, t) ->
        let uu___ =
          let uu___1 = let uu___2 = lidents_of_term t in l2 :: uu___2 in l1
            :: uu___1 in
        l0 :: uu___
    | FStar_Parser_AST.Polymonadic_subcomp (l0, l1, t) ->
        let uu___ = let uu___1 = lidents_of_term t in l1 :: uu___1 in l0 ::
          uu___
    | FStar_Parser_AST.Pragma uu___ -> []
    | FStar_Parser_AST.Assume (uu___, t) -> lidents_of_term t
    | FStar_Parser_AST.Splice (uu___, t) -> lidents_of_term t
and (lidents_of_effect_decl :
  FStar_Parser_AST.effect_decl -> FStar_Ident.lident Prims.list) =
  fun ed ->
    match ed with
    | FStar_Parser_AST.DefineEffect (uu___, bs, t, ds) ->
        let uu___1 = (concat_map ()) lidents_of_binder bs in
        let uu___2 =
          let uu___3 = lidents_of_term t in
          let uu___4 = (concat_map ()) lidents_of_decl ds in
          FStar_Compiler_List.op_At uu___3 uu___4 in
        FStar_Compiler_List.op_At uu___1 uu___2
    | FStar_Parser_AST.RedefineEffect (uu___, bs, t) ->
        let uu___1 = (concat_map ()) lidents_of_binder bs in
        let uu___2 = lidents_of_term t in
        FStar_Compiler_List.op_At uu___1 uu___2

================================================
FILE: engine/backends/fstar/fstar-surface-ast/FStar_Parser_Const.ml
================================================
open Prims
let (p2l : FStar_Ident.path -> FStar_Ident.lident) =
  fun l -> FStar_Ident.lid_of_path l FStar_Compiler_Range.dummyRange
let (pconst : Prims.string -> FStar_Ident.lident) = fun s -> p2l ["Prims"; s]
let (psconst : Prims.string -> FStar_Ident.lident) =
  fun s -> p2l ["FStar"; "Pervasives"; s]
let (psnconst : Prims.string -> FStar_Ident.lident) =
  fun s -> p2l ["FStar"; "Pervasives"; "Native"; s]
let (prims_lid : FStar_Ident.lident) = p2l ["Prims"]
let (pervasives_native_lid : FStar_Ident.lident) =
  p2l ["FStar"; "Pervasives"; "Native"]
let (pervasives_lid : FStar_Ident.lident) = p2l ["FStar"; "Pervasives"]
let (fstar_ns_lid : FStar_Ident.lident) = p2l ["FStar"]
let (bool_lid : FStar_Ident.lident) = pconst "bool"
let (unit_lid : FStar_Ident.lident) = pconst "unit"
let (squash_lid : FStar_Ident.lident) = pconst "squash"
let (auto_squash_lid : FStar_Ident.lident) = pconst "auto_squash"
let (string_lid : FStar_Ident.lident) = pconst "string"
let (bytes_lid : FStar_Ident.lident) = pconst "bytes"
let (int_lid : FStar_Ident.lident) = pconst "int"
let (exn_lid : FStar_Ident.lident) = pconst "exn"
let (list_lid : FStar_Ident.lident) = pconst "list"
let (immutable_array_t_lid : FStar_Ident.lident) =
  p2l ["FStar"; "ImmutableArray"; "Base"; "t"]
let (immutable_array_of_list_lid : FStar_Ident.lident) =
  p2l ["FStar"; "ImmutableArray"; "Base"; "of_list"]
let (immutable_array_length_lid : FStar_Ident.lident) =
  p2l ["FStar"; "ImmutableArray"; "Base"; "length"]
let (immutable_array_index_lid : FStar_Ident.lident) =
  p2l ["FStar"; "ImmutableArray"; "Base"; "index"]
let (eqtype_lid : FStar_Ident.lident) = pconst "eqtype"
let (option_lid : FStar_Ident.lident) = psnconst "option"
let (either_lid : FStar_Ident.lident) = psconst "either"
let (pattern_lid : FStar_Ident.lident) = psconst "pattern"
let (lex_t_lid : FStar_Ident.lident) = pconst "lex_t"
let (precedes_lid : FStar_Ident.lident) = pconst "precedes"
let (smtpat_lid : FStar_Ident.lident) = psconst "smt_pat"
let (smtpatOr_lid : FStar_Ident.lident) = psconst "smt_pat_or"
let (monadic_lid : FStar_Ident.lident) = pconst "M"
let (spinoff_lid : FStar_Ident.lident) = psconst "spinoff"
let (inl_lid : FStar_Ident.lident) = psconst "Inl"
let (inr_lid : FStar_Ident.lident) = psconst "Inr"
let (int8_lid : FStar_Ident.lident) = p2l ["FStar"; "Int8"; "t"]
let (uint8_lid : FStar_Ident.lident) = p2l ["FStar"; "UInt8"; "t"]
let (int16_lid : FStar_Ident.lident) = p2l ["FStar"; "Int16"; "t"]
let (uint16_lid : FStar_Ident.lident) = p2l ["FStar"; "UInt16"; "t"]
let (int32_lid : FStar_Ident.lident) = p2l ["FStar"; "Int32"; "t"]
let (uint32_lid : FStar_Ident.lident) = p2l ["FStar"; "UInt32"; "t"]
let (int64_lid : FStar_Ident.lident) = p2l ["FStar"; "Int64"; "t"]
let (uint64_lid : FStar_Ident.lident) = p2l ["FStar"; "UInt64"; "t"]
let (salloc_lid : FStar_Ident.lident) = p2l ["FStar"; "ST"; "salloc"]
let (swrite_lid : FStar_Ident.lident) =
  p2l ["FStar"; "ST"; "op_Colon_Equals"]
let (sread_lid : FStar_Ident.lident) = p2l ["FStar"; "ST"; "op_Bang"]
let (max_lid : FStar_Ident.lident) = p2l ["max"]
let (real_lid : FStar_Ident.lident) = p2l ["FStar"; "Real"; "real"]
let (float_lid : FStar_Ident.lident) = p2l ["FStar"; "Float"; "float"]
let (char_lid : FStar_Ident.lident) = p2l ["FStar"; "Char"; "char"]
let (heap_lid : FStar_Ident.lident) = p2l ["FStar"; "Heap"; "heap"]
let (logical_lid : FStar_Ident.lident) = pconst "logical"
let (smt_theory_symbol_attr_lid : FStar_Ident.lident) =
  pconst "smt_theory_symbol"
let (true_lid : FStar_Ident.lident) = pconst "l_True"
let (false_lid : FStar_Ident.lident) = pconst "l_False"
let (and_lid : FStar_Ident.lident) = pconst "l_and"
let (or_lid : FStar_Ident.lident) = pconst "l_or"
let (not_lid : FStar_Ident.lident) = pconst "l_not"
let (imp_lid : FStar_Ident.lident) = pconst "l_imp"
let (iff_lid : FStar_Ident.lident) = pconst "l_iff"
let (ite_lid : FStar_Ident.lident) = pconst "l_ITE"
let (exists_lid : FStar_Ident.lident) = pconst "l_Exists"
let (forall_lid : FStar_Ident.lident) = pconst "l_Forall"
let (haseq_lid : FStar_Ident.lident) = pconst "hasEq"
let (b2t_lid : FStar_Ident.lident) = pconst "b2t"
let (admit_lid : FStar_Ident.lident) = pconst "admit"
let (magic_lid : FStar_Ident.lident) = pconst "magic"
let (has_type_lid : FStar_Ident.lident) = pconst "has_type"
let (c_true_lid : FStar_Ident.lident) = pconst "trivial"
let (empty_type_lid : FStar_Ident.lident) = pconst "empty"
let (c_and_lid : FStar_Ident.lident) = pconst "pair"
let (c_or_lid : FStar_Ident.lident) = pconst "sum"
let (dtuple2_lid : FStar_Ident.lident) = pconst "dtuple2"
let (eq2_lid : FStar_Ident.lident) = pconst "eq2"
let (eq3_lid : FStar_Ident.lident) = pconst "op_Equals_Equals_Equals"
let (c_eq2_lid : FStar_Ident.lident) = pconst "equals"
let (cons_lid : FStar_Ident.lident) = pconst "Cons"
let (nil_lid : FStar_Ident.lident) = pconst "Nil"
let (some_lid : FStar_Ident.lident) = psnconst "Some"
let (none_lid : FStar_Ident.lident) = psnconst "None"
let (assume_lid : FStar_Ident.lident) = pconst "_assume"
let (assert_lid : FStar_Ident.lident) = pconst "_assert"
let (pure_wp_lid : FStar_Ident.lident) = pconst "pure_wp"
let (pure_wp_monotonic_lid : FStar_Ident.lident) = pconst "pure_wp_monotonic"
let (pure_wp_monotonic0_lid : FStar_Ident.lident) =
  pconst "pure_wp_monotonic0"
let (trivial_pure_post_lid : FStar_Ident.lident) =
  psconst "trivial_pure_post"
let (pure_assert_wp_lid : FStar_Ident.lident) = pconst "pure_assert_wp0"
let (pure_assume_wp_lid : FStar_Ident.lident) = pconst "pure_assume_wp0"
let (assert_norm_lid : FStar_Ident.lident) =
  p2l ["FStar"; "Pervasives"; "assert_norm"]
let (list_append_lid : FStar_Ident.lident) = p2l ["FStar"; "List"; "append"]
let (list_tot_append_lid : FStar_Ident.lident) =
  p2l ["FStar"; "List"; "Tot"; "Base"; "append"]
let (id_lid : FStar_Ident.lident) = psconst "id"
let (c2l : Prims.string -> FStar_Ident.lident) =
  fun s -> p2l ["FStar"; "Char"; s]
let (char_u32_of_char : FStar_Ident.lident) = c2l "u32_of_char"
let (s2l : Prims.string -> FStar_Ident.lident) =
  fun n -> p2l ["FStar"; "String"; n]
let (string_list_of_string_lid : FStar_Ident.lident) = s2l "list_of_string"
let (string_string_of_list_lid : FStar_Ident.lident) = s2l "string_of_list"
let (string_make_lid : FStar_Ident.lident) = s2l "make"
let (string_split_lid : FStar_Ident.lident) = s2l "split"
let (string_concat_lid : FStar_Ident.lident) = s2l "concat"
let (string_compare_lid : FStar_Ident.lident) = s2l "compare"
let (string_lowercase_lid : FStar_Ident.lident) = s2l "lowercase"
let (string_uppercase_lid : FStar_Ident.lident) = s2l "uppercase"
let (string_index_lid : FStar_Ident.lident) = s2l "index"
let (string_index_of_lid : FStar_Ident.lident) = s2l "index_of"
let (string_sub_lid : FStar_Ident.lident) = s2l "sub"
let (prims_strcat_lid : FStar_Ident.lident) = pconst "strcat"
let (prims_op_Hat_lid : FStar_Ident.lident) = pconst "op_Hat"
let (let_in_typ : FStar_Ident.lident) = p2l ["Prims"; "Let"]
let (string_of_int_lid : FStar_Ident.lident) = p2l ["Prims"; "string_of_int"]
let (string_of_bool_lid : FStar_Ident.lident) =
  p2l ["Prims"; "string_of_bool"]
let (string_compare : FStar_Ident.lident) =
  p2l ["FStar"; "String"; "compare"]
let (order_lid : FStar_Ident.lident) = p2l ["FStar"; "Order"; "order"]
let (vconfig_lid : FStar_Ident.lident) = p2l ["FStar"; "VConfig"; "vconfig"]
let (mkvconfig_lid : FStar_Ident.lident) =
  p2l ["FStar"; "VConfig"; "Mkvconfig"]
let (op_Eq : FStar_Ident.lident) = pconst "op_Equality"
let (op_notEq : FStar_Ident.lident) = pconst "op_disEquality"
let (op_LT : FStar_Ident.lident) = pconst "op_LessThan"
let (op_LTE : FStar_Ident.lident) = pconst "op_LessThanOrEqual"
let (op_GT : FStar_Ident.lident) = pconst "op_GreaterThan"
let (op_GTE : FStar_Ident.lident) = pconst "op_GreaterThanOrEqual"
let (op_Subtraction : FStar_Ident.lident) = pconst "op_Subtraction"
let (op_Minus : FStar_Ident.lident) = pconst "op_Minus"
let (op_Addition : FStar_Ident.lident) = pconst "op_Addition"
let (op_Multiply : FStar_Ident.lident) = pconst "op_Multiply"
let (op_Division : FStar_Ident.lident) = pconst "op_Division"
let (op_Modulus : FStar_Ident.lident) = pconst "op_Modulus"
let (op_And : FStar_Ident.lident) = pconst "op_AmpAmp"
let (op_Or : FStar_Ident.lident) = pconst "op_BarBar"
let (op_Negation : FStar_Ident.lident) = pconst "op_Negation"
let (real_const : Prims.string -> FStar_Ident.lident) =
  fun s -> p2l ["FStar"; "Real"; s]
let (real_op_LT : FStar_Ident.lident) = real_const "op_Less_Dot"
let (real_op_LTE : FStar_Ident.lident) = real_const "op_Less_Equals_Dot"
let (real_op_GT : FStar_Ident.lident) = real_const "op_Greater_Dot"
let (real_op_GTE : FStar_Ident.lident) = real_const "op_Greater_Equals_Dot"
let (real_op_Subtraction : FStar_Ident.lident) =
  real_const "op_Subtraction_Dot"
let (real_op_Addition : FStar_Ident.lident) = real_const "op_Plus_Dot"
let (real_op_Multiply : FStar_Ident.lident) = real_const "op_Star_Dot"
let (real_op_Division : FStar_Ident.lident) = real_const "op_Slash_Dot"
let (real_of_int : FStar_Ident.lident) = real_const "of_int"
let (bvconst : Prims.string -> FStar_Ident.lident) =
  fun s -> p2l ["FStar"; "BV"; s]
let (bv_t_lid : FStar_Ident.lident) = bvconst "bv_t"
let (nat_to_bv_lid : FStar_Ident.lident) = bvconst "int2bv"
let (bv_to_nat_lid : FStar_Ident.lident) = bvconst "bv2int"
let (bv_and_lid : FStar_Ident.lident) = bvconst "bvand"
let (bv_xor_lid : FStar_Ident.lident) = bvconst "bvxor"
let (bv_or_lid : FStar_Ident.lident) = bvconst "bvor"
let (bv_add_lid : FStar_Ident.lident) = bvconst "bvadd"
let (bv_sub_lid : FStar_Ident.lident) = bvconst "bvsub"
let (bv_shift_left_lid : FStar_Ident.lident) = bvconst "bvshl"
let (bv_shift_right_lid : FStar_Ident.lident) = bvconst "bvshr"
let (bv_udiv_lid : FStar_Ident.lident) = bvconst "bvdiv"
let (bv_mod_lid : FStar_Ident.lident) = bvconst "bvmod"
let (bv_mul_lid : FStar_Ident.lident) = bvconst "bvmul"
let (bv_ult_lid : FStar_Ident.lident) = bvconst "bvult"
let (bv_uext_lid : FStar_Ident.lident) = bvconst "bv_uext"
let (array_lid : FStar_Ident.lident) = p2l ["FStar"; "Array"; "array"]
let (array_of_list_lid : FStar_Ident.lident) =
  p2l ["FStar"; "Array"; "of_list"]
let (st_lid : FStar_Ident.lident) = p2l ["FStar"; "ST"]
let (write_lid : FStar_Ident.lident) = p2l ["FStar"; "ST"; "write"]
let (read_lid : FStar_Ident.lident) = p2l ["FStar"; "ST"; "read"]
let (alloc_lid : FStar_Ident.lident) = p2l ["FStar"; "ST"; "alloc"]
let (op_ColonEq : FStar_Ident.lident) =
  p2l ["FStar"; "ST"; "op_Colon_Equals"]
let (ref_lid : FStar_Ident.lident) = p2l ["FStar"; "Heap"; "ref"]
let (heap_addr_of_lid : FStar_Ident.lident) =
  p2l ["FStar"; "Heap"; "addr_of"]
let (set_empty : FStar_Ident.lident) = p2l ["FStar"; "Set"; "empty"]
let (set_singleton : FStar_Ident.lident) = p2l ["FStar"; "Set"; "singleton"]
let (set_union : FStar_Ident.lident) = p2l ["FStar"; "Set"; "union"]
let (fstar_hyperheap_lid : FStar_Ident.lident) = p2l ["FStar"; "HyperHeap"]
let (rref_lid : FStar_Ident.lident) = p2l ["FStar"; "HyperHeap"; "rref"]
let (erased_lid : FStar_Ident.lident) = p2l ["FStar"; "Ghost"; "erased"]
let (effect_PURE_lid : FStar_Ident.lident) = pconst "PURE"
let (effect_Pure_lid : FStar_Ident.lident) = pconst "Pure"
let (effect_Tot_lid : FStar_Ident.lident) = pconst "Tot"
let (effect_Lemma_lid : FStar_Ident.lident) = psconst "Lemma"
let (effect_GTot_lid : FStar_Ident.lident) = pconst "GTot"
let (effect_GHOST_lid : FStar_Ident.lident) = pconst "GHOST"
let (effect_Ghost_lid : FStar_Ident.lident) = pconst "Ghost"
let (effect_DIV_lid : FStar_Ident.lident) = psconst "DIV"
let (effect_Div_lid : FStar_Ident.lident) = psconst "Div"
let (effect_Dv_lid : FStar_Ident.lident) = psconst "Dv"
let (compiler_effect_lid : FStar_Ident.lident) =
  p2l ["FStar"; "Compiler"; "Effect"]
let (compiler_effect_ALL_lid : FStar_Ident.lident) =
  p2l ["FStar"; "Compiler"; "Effect"; "ALL"]
let (compiler_effect_ML_lid : FStar_Ident.lident) =
  p2l ["FStar"; "Compiler"; "Effect"; "ML"]
let (compiler_effect_failwith_lid : FStar_Ident.lident) =
  p2l ["FStar"; "Compiler"; "Effect"; "failwith"]
let (compiler_effect_try_with_lid : FStar_Ident.lident) =
  p2l ["FStar"; "Compiler"; "Effect"; "try_with"]
let (all_lid : FStar_Ident.lident) = p2l ["FStar"; "All"]
let (all_ALL_lid : FStar_Ident.lident) = p2l ["FStar"; "All"; "All"]
let (all_ML_lid : FStar_Ident.lident) = p2l ["FStar"; "All"; "ML"]
let (all_failwith_lid : FStar_Ident.lident) =
  p2l ["FStar"; "All"; "failwith"]
let (all_try_with_lid : FStar_Ident.lident) =
  p2l ["FStar"; "All"; "try_with"]
let (effect_ALL_lid : unit -> FStar_Ident.lident) =
  fun uu___ ->
    let uu___1 = false in
    if uu___1 then compiler_effect_ALL_lid else all_lid
let (effect_ML_lid : unit -> FStar_Ident.lident) =
  fun uu___ ->
    let uu___1 = false in
    if uu___1 then compiler_effect_ML_lid else all_ML_lid
let (failwith_lid : unit -> FStar_Ident.lident) =
  fun uu___ ->
    let uu___1 = false in
    if uu___1 then compiler_effect_failwith_lid else all_failwith_lid
let (try_with_lid : unit -> FStar_Ident.lident) =
  fun uu___ ->
    let uu___1 = false in
    if uu___1 then compiler_effect_try_with_lid else all_try_with_lid
let (as_requires : FStar_Ident.lident) = pconst "as_requires"
let (as_ensures : FStar_Ident.lident) = pconst "as_ensures"
let (decreases_lid : FStar_Ident.lident) = pconst "decreases"
let (inspect : FStar_Ident.lident) =
  p2l ["FStar"; "Tactics"; "Builtins"; "inspect"]
let (pack : FStar_Ident.lident) =
  p2l ["FStar"; "Tactics"; "Builtins"; "pack"]
let (binder_to_term : FStar_Ident.lident) =
  p2l ["FStar"; "Tactics"; "Derived"; "binder_to_term"]
let (reveal : FStar_Ident.lident) = p2l ["FStar"; "Ghost"; "reveal"]
let (hide : FStar_Ident.lident) = p2l ["FStar"; "Ghost"; "hide"]
let (term_lid : FStar_Ident.lident) =
  p2l ["FStar"; "Reflection"; "Types"; "term"]
let (term_view_lid : FStar_Ident.lident) =
  p2l ["FStar"; "Reflection"; "Data"; "term_view"]
let (decls_lid : FStar_Ident.lident) =
  p2l ["FStar"; "Reflection"; "Data"; "decls"]
let (ctx_uvar_and_subst_lid : FStar_Ident.lident) =
  p2l ["FStar"; "Reflection"; "Types"; "ctx_uvar_and_subst"]
let (universe_uvar_lid : FStar_Ident.lident) =
  p2l ["FStar"; "Reflection"; "Types"; "universe_uvar"]
let (range_lid : FStar_Ident.lident) = pconst "range"
let (range_of_lid : FStar_Ident.lident) = pconst "range_of"
let (labeled_lid : FStar_Ident.lident) = pconst "labeled"
let (range_0 : FStar_Ident.lident) = pconst "range_0"
let (guard_free : FStar_Ident.lident) = pconst "guard_free"
let (inversion_lid : FStar_Ident.lident) =
  p2l ["FStar"; "Pervasives"; "inversion"]
let (normalize : FStar_Ident.lident) = psconst "normalize"
let (normalize_term : FStar_Ident.lident) = psconst "normalize_term"
let (norm : FStar_Ident.lident) = psconst "norm"
let (steps_simpl : FStar_Ident.lident) = psconst "simplify"
let (steps_weak : FStar_Ident.lident) = psconst "weak"
let (steps_hnf : FStar_Ident.lident) = psconst "hnf"
let (steps_primops : FStar_Ident.lident) = psconst "primops"
let (steps_zeta : FStar_Ident.lident) = psconst "zeta"
let (steps_zeta_full : FStar_Ident.lident) = psconst "zeta_full"
let (steps_iota : FStar_Ident.lident) = psconst "iota"
let (steps_delta : FStar_Ident.lident) = psconst "delta"
let (steps_reify : FStar_Ident.lident) = psconst "reify_"
let (steps_unfoldonly : FStar_Ident.lident) = psconst "delta_only"
let (steps_unfoldfully : FStar_Ident.lident) = psconst "delta_fully"
let (steps_unfoldattr : FStar_Ident.lident) = psconst "delta_attr"
let (steps_unfoldqual : FStar_Ident.lident) = psconst "delta_qualifier"
let (steps_unfoldnamespace : FStar_Ident.lident) = psconst "delta_namespace"
let (steps_unascribe : FStar_Ident.lident) = psconst "unascribe"
let (steps_nbe : FStar_Ident.lident) = psconst "nbe"
let (steps_unmeta : FStar_Ident.lident) = psconst "unmeta"
let (deprecated_attr : FStar_Ident.lident) = pconst "deprecated"
let (warn_on_use_attr : FStar_Ident.lident) = pconst "warn_on_use"
let (inline_let_attr : FStar_Ident.lident) =
  p2l ["FStar"; "Pervasives"; "inline_let"]
let (rename_let_attr : FStar_Ident.lident) =
  p2l ["FStar"; "Pervasives"; "rename_let"]
let (plugin_attr : FStar_Ident.lident) =
  p2l ["FStar"; "Pervasives"; "plugin"]
let (tcnorm_attr : FStar_Ident.lident) =
  p2l ["FStar"; "Pervasives"; "tcnorm"]
let (dm4f_bind_range_attr : FStar_Ident.lident) =
  p2l ["FStar"; "Pervasives"; "dm4f_bind_range"]
let (must_erase_for_extraction_attr : FStar_Ident.lident) =
  psconst "must_erase_for_extraction"
let (strict_on_arguments_attr : FStar_Ident.lident) =
  p2l ["FStar"; "Pervasives"; "strict_on_arguments"]
let (resolve_implicits_attr_string : Prims.string) =
  "FStar.Pervasives.resolve_implicits"
let (override_resolve_implicits_handler_lid : FStar_Ident.lident) =
  p2l ["FStar"; "Pervasives"; "override_resolve_implicits_handler"]
let (handle_smt_goals_attr : FStar_Ident.lident) = psconst "handle_smt_goals"
let (handle_smt_goals_attr_string : Prims.string) =
  "FStar.Pervasives.handle_smt_goals"
let (erasable_attr : FStar_Ident.lident) =
  p2l ["FStar"; "Pervasives"; "erasable"]
let (comment_attr : FStar_Ident.lident) =
  p2l ["FStar"; "Pervasives"; "Comment"]
let (fail_attr : FStar_Ident.lident) = psconst "expect_failure"
let (fail_lax_attr : FStar_Ident.lident) = psconst "expect_lax_failure"
let (tcdecltime_attr : FStar_Ident.lident) = psconst "tcdecltime"
let (noextract_to_attr : FStar_Ident.lident) = psconst "noextract_to"
let (unifier_hint_injective_lid : FStar_Ident.lident) =
  psconst "unifier_hint_injective"
let (normalize_for_extraction_lid : FStar_Ident.lident) =
  psconst "normalize_for_extraction"
let (postprocess_with : FStar_Ident.lident) =
  p2l ["FStar"; "Tactics"; "Effect"; "postprocess_with"]
let (preprocess_with : FStar_Ident.lident) =
  p2l ["FStar"; "Tactics"; "Effect"; "preprocess_with"]
let (postprocess_extr_with : FStar_Ident.lident) =
  p2l ["FStar"; "Tactics"; "Effect"; "postprocess_for_extraction_with"]
let (check_with_lid : FStar_Ident.lident) =
  FStar_Ident.lid_of_path ["FStar"; "Reflection"; "Builtins"; "check_with"]
    FStar_Compiler_Range.dummyRange
let (commute_nested_matches_lid : FStar_Ident.lident) =
  psconst "commute_nested_matches"
let (remove_unused_type_parameters_lid : FStar_Ident.lident) =
  psconst "remove_unused_type_parameters"
let (ite_soundness_by_attr : FStar_Ident.lident) = psconst "ite_soundness_by"
let (default_effect_attr : FStar_Ident.lident) = psconst "default_effect"
let (top_level_effect_attr : FStar_Ident.lident) = psconst "top_level_effect"
let (effect_parameter_attr : FStar_Ident.lident) = psconst "effect_param"
let (bind_has_range_args_attr : FStar_Ident.lident) =
  psconst "bind_has_range_args"
let (primitive_extraction_attr : FStar_Ident.lident) =
  psconst "primitive_extraction"
let (binder_strictly_positive_attr : FStar_Ident.lident) =
  psconst "strictly_positive"
let (no_auto_projectors_attr : FStar_Ident.lident) =
  psconst "no_auto_projectors"
let (no_subtping_attr_lid : FStar_Ident.lident) = psconst "no_subtyping"
let (attr_substitute_lid : FStar_Ident.lident) =
  p2l ["FStar"; "Pervasives"; "Substitute"]
let (well_founded_relation_lid : FStar_Ident.lident) =
  p2l ["FStar"; "WellFounded"; "well_founded_relation"]

let (sli : FStar_Ident.lident -> Prims.string) =
  fun l ->
    let uu___ = false in
    if uu___
    then FStar_Ident.string_of_lid l
    else
      (let uu___2 = FStar_Ident.ident_of_lid l in
       FStar_Ident.string_of_id uu___2)
let (const_to_string : FStar_Const.sconst -> Prims.string) =
  fun x ->
    match x with
    | FStar_Const.Const_effect -> "Effect"
    | FStar_Const.Const_unit -> "()"
    | FStar_Const.Const_bool b -> if b then "true" else "false"
    | FStar_Const.Const_real r -> FStar_String.op_Hat r "R"
    | FStar_Const.Const_string (s, uu___) ->
        FStar_Compiler_Util.format1 "\"%s\"" s
    | FStar_Const.Const_int (x1, uu___) -> x1
    | FStar_Const.Const_char c ->
        let uu___ =
          FStar_String.op_Hat (FStar_Compiler_Util.string_of_char c) "'" in
        FStar_String.op_Hat "'" uu___
    | FStar_Const.Const_range r -> FStar_Compiler_Range.string_of_range r
    | FStar_Const.Const_range_of -> "range_of"
    | FStar_Const.Const_set_range_of -> "set_range_of"
    | FStar_Const.Const_reify lopt ->
        let uu___ =
          match lopt with
          | FStar_Pervasives_Native.None -> ""
          | FStar_Pervasives_Native.Some l ->
              let uu___1 = FStar_Ident.string_of_lid l in
              FStar_Compiler_Util.format1 "<%s>" uu___1 in
        FStar_Compiler_Util.format1 "reify%s" uu___
    | FStar_Const.Const_reflect l ->
        let uu___ = sli l in
        FStar_Compiler_Util.format1 "[[%s.reflect]]" uu___
let (mk_tuple_lid :
  Prims.int -> FStar_Compiler_Range.range -> FStar_Ident.lident) =
  fun n ->
    fun r ->
      let t =
        let uu___ = FStar_Compiler_Util.string_of_int n in
        FStar_Compiler_Util.format1 "tuple%s" uu___ in
      let uu___ = psnconst t in FStar_Ident.set_lid_range uu___ r
let (lid_tuple2 : FStar_Ident.lident) =
  mk_tuple_lid (Prims.of_int (2)) FStar_Compiler_Range.dummyRange
let (lid_tuple3 : FStar_Ident.lident) =
  mk_tuple_lid (Prims.of_int (3)) FStar_Compiler_Range.dummyRange
let (is_tuple_constructor_string : Prims.string -> Prims.bool) =
  fun s -> FStar_Compiler_Util.starts_with s "FStar.Pervasives.Native.tuple"
let (is_tuple_constructor_id : FStar_Ident.ident -> Prims.bool) =
  fun id ->
    let uu___ = FStar_Ident.string_of_id id in
    is_tuple_constructor_string uu___
let (is_tuple_constructor_lid : FStar_Ident.lident -> Prims.bool) =
  fun lid ->
    let uu___ = FStar_Ident.string_of_lid lid in
    is_tuple_constructor_string uu___
let (mk_tuple_data_lid :
  Prims.int -> FStar_Compiler_Range.range -> FStar_Ident.lident) =
  fun n ->
    fun r ->
      let t =
        let uu___ = FStar_Compiler_Util.string_of_int n in
        FStar_Compiler_Util.format1 "Mktuple%s" uu___ in
      let uu___ = psnconst t in FStar_Ident.set_lid_range uu___ r
let (lid_Mktuple2 : FStar_Ident.lident) =
  mk_tuple_data_lid (Prims.of_int (2)) FStar_Compiler_Range.dummyRange
let (lid_Mktuple3 : FStar_Ident.lident) =
  mk_tuple_data_lid (Prims.of_int (3)) FStar_Compiler_Range.dummyRange
let (is_tuple_datacon_string : Prims.string -> Prims.bool) =
  fun s ->
    FStar_Compiler_Util.starts_with s "FStar.Pervasives.Native.Mktuple"
let (is_tuple_datacon_id : FStar_Ident.ident -> Prims.bool) =
  fun id ->
    let uu___ = FStar_Ident.string_of_id id in is_tuple_datacon_string uu___
let (is_tuple_datacon_lid : FStar_Ident.lident -> Prims.bool) =
  fun lid ->
    let uu___ = FStar_Ident.string_of_lid lid in
    is_tuple_datacon_string uu___
let (is_tuple_data_lid : FStar_Ident.lident -> Prims.int -> Prims.bool) =
  fun f ->
    fun n ->
      let uu___ = mk_tuple_data_lid n FStar_Compiler_Range.dummyRange in
      FStar_Ident.lid_equals f uu___
let (is_tuple_data_lid' : FStar_Ident.lident -> Prims.bool) =
  fun f ->
    let uu___ = FStar_Ident.string_of_lid f in is_tuple_datacon_string uu___
let (mod_prefix_dtuple : Prims.int -> Prims.string -> FStar_Ident.lident) =
  fun n -> if n = (Prims.of_int (2)) then pconst else psconst
let (mk_dtuple_lid :
  Prims.int -> FStar_Compiler_Range.range -> FStar_Ident.lident) =
  fun n ->
    fun r ->
      let t =
        let uu___ = FStar_Compiler_Util.string_of_int n in
        FStar_Compiler_Util.format1 "dtuple%s" uu___ in
      let uu___ = let uu___1 = mod_prefix_dtuple n in uu___1 t in
      FStar_Ident.set_lid_range uu___ r
let (is_dtuple_constructor_string : Prims.string -> Prims.bool) =
  fun s ->
    (s = "Prims.dtuple2") ||
      (FStar_Compiler_Util.starts_with s "FStar.Pervasives.dtuple")
let (is_dtuple_constructor_lid : FStar_Ident.lident -> Prims.bool) =
  fun lid ->
    let uu___ = FStar_Ident.string_of_lid lid in
    is_dtuple_constructor_string uu___
let (mk_dtuple_data_lid :
  Prims.int -> FStar_Compiler_Range.range -> FStar_Ident.lident) =
  fun n ->
    fun r ->
      let t =
        let uu___ = FStar_Compiler_Util.string_of_int n in
        FStar_Compiler_Util.format1 "Mkdtuple%s" uu___ in
      let uu___ = let uu___1 = mod_prefix_dtuple n in uu___1 t in
      FStar_Ident.set_lid_range uu___ r
let (is_dtuple_datacon_string : Prims.string -> Prims.bool) =
  fun s ->
    (s = "Prims.Mkdtuple2") ||
      (FStar_Compiler_Util.starts_with s "FStar.Pervasives.Mkdtuple")
let (is_dtuple_data_lid : FStar_Ident.lident -> Prims.int -> Prims.bool) =
  fun f ->
    fun n ->
      let uu___ = mk_dtuple_data_lid n FStar_Compiler_Range.dummyRange in
      FStar_Ident.lid_equals f uu___
let (is_dtuple_data_lid' : FStar_Ident.lident -> Prims.bool) =
  fun f ->
    let uu___ = FStar_Ident.string_of_lid f in is_dtuple_datacon_string uu___
let (is_name : FStar_Ident.lident -> Prims.bool) =
  fun lid ->
    let c =
      let uu___ =
        let uu___1 = FStar_Ident.ident_of_lid lid in
        FStar_Ident.string_of_id uu___1 in
      FStar_Compiler_Util.char_at uu___ Prims.int_zero in
    FStar_Compiler_Util.is_upper c
let (fstar_tactics_lid' : Prims.string Prims.list -> FStar_Ident.lid) =
  fun s ->
    FStar_Ident.lid_of_path
      (FStar_Compiler_List.op_At ["FStar"; "Tactics"] s)
      FStar_Compiler_Range.dummyRange
let (fstar_tactics_lid : Prims.string -> FStar_Ident.lid) =
  fun s -> fstar_tactics_lid' [s]
let (tac_lid : FStar_Ident.lid) = fstar_tactics_lid' ["Effect"; "tac"]
let (tactic_lid : FStar_Ident.lid) = fstar_tactics_lid' ["Effect"; "tactic"]
let (mk_class_lid : FStar_Ident.lid) =
  fstar_tactics_lid' ["Typeclasses"; "mk_class"]
let (tcresolve_lid : FStar_Ident.lid) =
  fstar_tactics_lid' ["Typeclasses"; "tcresolve"]
let (solve_lid : FStar_Ident.lid) =
  fstar_tactics_lid' ["Typeclasses"; "solve"]
let (tcclass_lid : FStar_Ident.lid) =
  fstar_tactics_lid' ["Typeclasses"; "tcclass"]
let (tcinstance_lid : FStar_Ident.lid) =
  fstar_tactics_lid' ["Typeclasses"; "tcinstance"]
let (no_method_lid : FStar_Ident.lid) =
  fstar_tactics_lid' ["Typeclasses"; "no_method"]
let (effect_TAC_lid : FStar_Ident.lid) = fstar_tactics_lid' ["Effect"; "TAC"]
let (effect_Tac_lid : FStar_Ident.lid) = fstar_tactics_lid' ["Effect"; "Tac"]
let (by_tactic_lid : FStar_Ident.lid) =
  fstar_tactics_lid' ["Effect"; "with_tactic"]
let (rewrite_by_tactic_lid : FStar_Ident.lid) =
  fstar_tactics_lid' ["Effect"; "rewrite_with_tactic"]
let (synth_lid : FStar_Ident.lid) =
  fstar_tactics_lid' ["Effect"; "synth_by_tactic"]
let (assert_by_tactic_lid : FStar_Ident.lid) =
  fstar_tactics_lid' ["Effect"; "assert_by_tactic"]
let (fstar_syntax_syntax_term : FStar_Ident.lident) =
  FStar_Ident.lid_of_str "FStar.Syntax.Syntax.term"
let (binder_lid : FStar_Ident.lident) =
  FStar_Ident.lid_of_path ["FStar"; "Reflection"; "Types"; "binder"]
    FStar_Compiler_Range.dummyRange
let (binders_lid : FStar_Ident.lident) =
  FStar_Ident.lid_of_path ["FStar"; "Reflection"; "Types"; "binders"]
    FStar_Compiler_Range.dummyRange
let (bv_lid : FStar_Ident.lident) =
  FStar_Ident.lid_of_path ["FStar"; "Reflection"; "Types"; "bv"]
    FStar_Compiler_Range.dummyRange
let (fv_lid : FStar_Ident.lident) =
  FStar_Ident.lid_of_path ["FStar"; "Reflection"; "Types"; "fv"]
    FStar_Compiler_Range.dummyRange
let (norm_step_lid : FStar_Ident.lident) = psconst "norm_step"
let (calc_lid : Prims.string -> FStar_Ident.lid) =
  fun i ->
    FStar_Ident.lid_of_path ["FStar"; "Calc"; i]
      FStar_Compiler_Range.dummyRange
let (calc_init_lid : FStar_Ident.lid) = calc_lid "calc_init"
let (calc_step_lid : FStar_Ident.lid) = calc_lid "calc_step"
let (calc_finish_lid : FStar_Ident.lid) = calc_lid "calc_finish"
let (calc_push_impl_lid : FStar_Ident.lid) = calc_lid "calc_push_impl"
let (classical_sugar_lid : Prims.string -> FStar_Ident.lid) =
  fun i ->
    FStar_Ident.lid_of_path ["FStar"; "Classical"; "Sugar"; i]
      FStar_Compiler_Range.dummyRange
let (forall_intro_lid : FStar_Ident.lid) = classical_sugar_lid "forall_intro"
let (exists_intro_lid : FStar_Ident.lid) = classical_sugar_lid "exists_intro"
let (implies_intro_lid : FStar_Ident.lid) =
  classical_sugar_lid "implies_intro"
let (or_intro_left_lid : FStar_Ident.lid) =
  classical_sugar_lid "or_intro_left"
let (or_intro_right_lid : FStar_Ident.lid) =
  classical_sugar_lid "or_intro_right"
let (and_intro_lid : FStar_Ident.lid) = classical_sugar_lid "and_intro"
let (forall_elim_lid : FStar_Ident.lid) = classical_sugar_lid "forall_elim"
let (exists_elim_lid : FStar_Ident.lid) = classical_sugar_lid "exists_elim"
let (implies_elim_lid : FStar_Ident.lid) = classical_sugar_lid "implies_elim"
let (or_elim_lid : FStar_Ident.lid) = classical_sugar_lid "or_elim"
let (and_elim_lid : FStar_Ident.lid) = classical_sugar_lid "and_elim"
let (match_returns_def_name : Prims.string) =
  FStar_String.op_Hat FStar_Ident.reserved_prefix "_ret_"
let (steel_memory_inv_lid : FStar_Ident.lident) =
  FStar_Ident.lid_of_path ["Steel"; "Memory"; "inv"]
    FStar_Compiler_Range.dummyRange
let (steel_new_invariant_lid : FStar_Ident.lident) =
  FStar_Ident.lid_of_path ["Steel"; "Effect"; "Atomic"; "new_invariant"]
    FStar_Compiler_Range.dummyRange
let (steel_st_new_invariant_lid : FStar_Ident.lident) =
  FStar_Ident.lid_of_path ["Steel"; "ST"; "Util"; "new_invariant"]
    FStar_Compiler_Range.dummyRange
let (steel_with_invariant_g_lid : FStar_Ident.lident) =
  FStar_Ident.lid_of_path ["Steel"; "Effect"; "Atomic"; "with_invariant_g"]
    FStar_Compiler_Range.dummyRange
let (steel_st_with_invariant_g_lid : FStar_Ident.lident) =
  FStar_Ident.lid_of_path ["Steel"; "ST"; "Util"; "with_invariant_g"]
    FStar_Compiler_Range.dummyRange
let (steel_with_invariant_lid : FStar_Ident.lident) =
  FStar_Ident.lid_of_path ["Steel"; "Effect"; "Atomic"; "with_invariant"]
    FStar_Compiler_Range.dummyRange
let (steel_st_with_invariant_lid : FStar_Ident.lident) =
  FStar_Ident.lid_of_path ["Steel"; "ST"; "Util"; "with_invariant"]
    FStar_Compiler_Range.dummyRange
let (fext_lid : Prims.string -> FStar_Ident.lident) =
  fun s ->
    FStar_Ident.lid_of_path ["FStar"; "FunctionalExtensionality"; s]
      FStar_Compiler_Range.dummyRange
let (fext_on_domain_lid : FStar_Ident.lident) = fext_lid "on_domain"
let (fext_on_dom_lid : FStar_Ident.lident) = fext_lid "on_dom"
let (fext_on_domain_g_lid : FStar_Ident.lident) = fext_lid "on_domain_g"
let (fext_on_dom_g_lid : FStar_Ident.lident) = fext_lid "on_dom_g"
let (sealed_lid : FStar_Ident.lident) = p2l ["FStar"; "Sealed"; "sealed"]
let (seal_lid : FStar_Ident.lident) = p2l ["FStar"; "Sealed"; "seal"]
let (unseal_lid : FStar_Ident.lident) =
  p2l ["FStar"; "Tactics"; "Builtins"; "unseal"]


================================================
FILE: engine/backends/fstar/fstar-surface-ast/FStar_Parser_Driver.ml
================================================
open Prims
let (is_cache_file : Prims.string -> Prims.bool) =
  fun fn ->
    let uu___ = FStar_Compiler_Util.get_file_extension fn in uu___ = ".cache"
type fragment =
  | Empty 
  | Modul of FStar_Parser_AST.modul 
  | Decls of FStar_Parser_AST.decl Prims.list 
  | DeclsWithContent of (FStar_Parser_AST.decl *
  FStar_Parser_ParseIt.code_fragment) Prims.list 
let (uu___is_Empty : fragment -> Prims.bool) =
  fun projectee -> match projectee with | Empty -> true | uu___ -> false
let (uu___is_Modul : fragment -> Prims.bool) =
  fun projectee -> match projectee with | Modul _0 -> true | uu___ -> false
let (__proj__Modul__item___0 : fragment -> FStar_Parser_AST.modul) =
  fun projectee -> match projectee with | Modul _0 -> _0
let (uu___is_Decls : fragment -> Prims.bool) =
  fun projectee -> match projectee with | Decls _0 -> true | uu___ -> false
let (__proj__Decls__item___0 : fragment -> FStar_Parser_AST.decl Prims.list)
  = fun projectee -> match projectee with | Decls _0 -> _0
let (uu___is_DeclsWithContent : fragment -> Prims.bool) =
  fun projectee ->
    match projectee with | DeclsWithContent _0 -> true | uu___ -> false
let (__proj__DeclsWithContent__item___0 :
  fragment ->
    (FStar_Parser_AST.decl * FStar_Parser_ParseIt.code_fragment) Prims.list)
  = fun projectee -> match projectee with | DeclsWithContent _0 -> _0
let (parse_fragment : FStar_Parser_ParseIt.input_frag -> fragment) =
  fun frag ->
    let uu___ =
      FStar_Parser_ParseIt.parse (FStar_Parser_ParseIt.Toplevel frag) in
    match uu___ with
    | FStar_Parser_ParseIt.ASTFragment (FStar_Pervasives.Inl modul, uu___1)
        -> Modul modul
    | FStar_Parser_ParseIt.ASTFragment (FStar_Pervasives.Inr [], uu___1) ->
        Empty
    | FStar_Parser_ParseIt.ASTFragment (FStar_Pervasives.Inr decls, uu___1)
        -> Decls decls
    | FStar_Parser_ParseIt.IncrementalFragment (decls, uu___1, uu___2) ->
        DeclsWithContent decls
    | FStar_Parser_ParseIt.ParseError (e, msg, r) ->
        FStar_Errors.raise_error (e, msg) r
    | FStar_Parser_ParseIt.Term uu___1 ->
        failwith
          "Impossible: parsing a Toplevel always results in an ASTFragment"
let (parse_file :
  Prims.string ->
    (FStar_Parser_AST.file * (Prims.string * FStar_Compiler_Range.range)
      Prims.list))
  =
  fun fn ->
    let uu___ = FStar_Parser_ParseIt.parse (FStar_Parser_ParseIt.Filename fn) in
    match uu___ with
    | FStar_Parser_ParseIt.ASTFragment (FStar_Pervasives.Inl ast, comments)
        -> (ast, comments)
    | FStar_Parser_ParseIt.ASTFragment (FStar_Pervasives.Inr uu___1, uu___2)
        ->
        let msg = FStar_Compiler_Util.format1 "%s: expected a module\n" fn in
        let r = FStar_Compiler_Range.dummyRange in
        FStar_Errors.raise_error
          (FStar_Errors_Codes.Fatal_ModuleExpected, msg) r
    | FStar_Parser_ParseIt.ParseError (e, msg, r) ->
        FStar_Errors.raise_error (e, msg) r
    | FStar_Parser_ParseIt.Term uu___1 ->
        failwith
          "Impossible: parsing a Filename always results in an ASTFragment"

================================================
FILE: engine/backends/fstar/fstar-surface-ast/FStar_Parser_LexFStar.ml
================================================
open FStar_Parser_Parse
open FStar_Parser_Util

module Option  = BatOption
module String  = BatString
module Hashtbl = BatHashtbl
module Sedlexing = FStar_Sedlexing
module L = Sedlexing
module E = FStar_Errors
module Codes = FStar_Errors_Codes

let ba_of_string s = Array.init (String.length s) (fun i -> Char.code (String.get s i))
let array_trim_both a n m = Array.sub a n (Array.length a - n - m)
let string_trim_both s n m = BatString.sub s n (String.length s - (n+m))
let trim_both   lexbuf n m = string_trim_both (L.lexeme lexbuf) n m
let utrim_both  lexbuf n m = array_trim_both (L.ulexeme lexbuf) n m
let trim_right  lexbuf n = trim_both lexbuf 0 n
let trim_left   lexbuf n = trim_both lexbuf n 0

let unescape (a:int array) : int =
  match a.(0) with
  | 92 (* \ *) ->
    (match a.(1) with
    | 48  (*0*) -> 0
    | 98  (*b*) -> 8
    | 116 (*t*) -> 9
    | 110 (*n*) -> 10
    | 118 (*v*) -> 11
    | 102 (*f*) -> 12
    | 114 (*r*) -> 13
    | 117 (*u*) ->
      let s = FStar_Parser_Utf8.from_int_array a 2 4 in
      int_of_string ("0x"^s)
    | 120 (*x*) ->
      let s = FStar_Parser_Utf8.from_int_array a 2 2 in
      int_of_string ("0x"^s)
    | c -> c)
  | c -> c

let keywords = Hashtbl.create 0
let constructors = Hashtbl.create 0
let operators = Hashtbl.create 0

let () =
  Hashtbl.add keywords "attributes"    ATTRIBUTES  ;
  Hashtbl.add keywords "noeq"          NOEQUALITY  ;
  Hashtbl.add keywords "unopteq"       UNOPTEQUALITY  ;
  Hashtbl.add keywords "and"           AND         ;
  Hashtbl.add keywords "assert"        ASSERT      ;
  Hashtbl.add keywords "assume"        ASSUME      ;
  Hashtbl.add keywords "begin"         BEGIN       ;
  Hashtbl.add keywords "by"            BY          ;
  Hashtbl.add keywords "calc"          CALC        ;
  Hashtbl.add keywords "class"         CLASS       ;
  Hashtbl.add keywords "default"       DEFAULT     ;
  Hashtbl.add keywords "decreases"     DECREASES   ;
  Hashtbl.add keywords "effect"        EFFECT      ;
  Hashtbl.add keywords "eliminate"     ELIM;
  Hashtbl.add keywords "else"          ELSE        ;
  Hashtbl.add keywords "end"           END         ;
  Hashtbl.add keywords "ensures"       ENSURES     ;
  Hashtbl.add keywords "exception"     EXCEPTION   ;
  Hashtbl.add keywords "exists"        EXISTS      ;
  Hashtbl.add keywords "false"         FALSE       ;
  Hashtbl.add keywords "friend"        FRIEND      ;
  Hashtbl.add keywords "forall"        FORALL      ;
  Hashtbl.add keywords "fun"           FUN         ;
  Hashtbl.add keywords "λ"             FUN         ;
  Hashtbl.add keywords "function"      FUNCTION    ;
  Hashtbl.add keywords "if"            IF          ;
  Hashtbl.add keywords "in"            IN          ;
  Hashtbl.add keywords "include"       INCLUDE     ;
  Hashtbl.add keywords "inline"        INLINE      ;
  Hashtbl.add keywords "inline_for_extraction"        INLINE_FOR_EXTRACTION      ;
  Hashtbl.add keywords "instance"      INSTANCE    ;
  Hashtbl.add keywords "introduce"     INTRO ;
  Hashtbl.add keywords "irreducible"   IRREDUCIBLE ;
  Hashtbl.add keywords "let"           (LET false) ;
  Hashtbl.add keywords "logic"         LOGIC       ;
  Hashtbl.add keywords "match"         MATCH       ;
  Hashtbl.add keywords "returns"       RETURNS     ;
  Hashtbl.add keywords "as"            AS          ;
  Hashtbl.add keywords "module"        MODULE      ;
  Hashtbl.add keywords "new"           NEW         ;
  Hashtbl.add keywords "new_effect"    NEW_EFFECT  ;
  Hashtbl.add keywords "layered_effect"               LAYERED_EFFECT             ;
  Hashtbl.add keywords "polymonadic_bind"             POLYMONADIC_BIND           ;
  Hashtbl.add keywords "polymonadic_subcomp"          POLYMONADIC_SUBCOMP        ;
  Hashtbl.add keywords "noextract"     NOEXTRACT   ;
  Hashtbl.add keywords "of"            OF          ;
  Hashtbl.add keywords "open"          OPEN        ;
  Hashtbl.add keywords "opaque"        OPAQUE      ;
  Hashtbl.add keywords "private"       PRIVATE     ;
  Hashtbl.add keywords "quote"         QUOTE       ;
  Hashtbl.add keywords "range_of"      RANGE_OF    ;
  Hashtbl.add keywords "rec"           REC         ;
  Hashtbl.add keywords "reifiable"     REIFIABLE   ;
  Hashtbl.add keywords "reify"         REIFY       ;
  Hashtbl.add keywords "reflectable"   REFLECTABLE ;
  Hashtbl.add keywords "requires"      REQUIRES    ;
  Hashtbl.add keywords "set_range_of"  SET_RANGE_OF;
  Hashtbl.add keywords "sub_effect"    SUB_EFFECT  ;
  Hashtbl.add keywords "synth"         SYNTH       ;
  Hashtbl.add keywords "then"          THEN        ;
  Hashtbl.add keywords "total"         TOTAL       ;
  Hashtbl.add keywords "true"          TRUE        ;
  Hashtbl.add keywords "try"           TRY         ;
  Hashtbl.add keywords "type"          TYPE        ;
  Hashtbl.add keywords "unfold"        UNFOLD      ;
  Hashtbl.add keywords "unfoldable"    UNFOLDABLE  ;
  Hashtbl.add keywords "val"           VAL         ;
  Hashtbl.add keywords "when"          WHEN        ;
  Hashtbl.add keywords "with"          WITH        ;
  Hashtbl.add keywords "_"             UNDERSCORE  ;
  Hashtbl.add keywords "α"             (TVAR "a")  ;
  Hashtbl.add keywords "β"             (TVAR "b")  ;
  Hashtbl.add keywords "γ"             (TVAR "c")  ;
  Hashtbl.add keywords "δ"             (TVAR "d")  ;
  Hashtbl.add keywords "ε"             (TVAR "e")  ;
  Hashtbl.add keywords "φ"             (TVAR "f")  ;
  Hashtbl.add keywords "χ"             (TVAR "g")  ;
  Hashtbl.add keywords "η"             (TVAR "h")  ;
  Hashtbl.add keywords "ι"             (TVAR "i")  ;
  Hashtbl.add keywords "κ"             (TVAR "k")  ;
  Hashtbl.add keywords "μ"             (TVAR "m")  ;
  Hashtbl.add keywords "ν"             (TVAR "n")  ;
  Hashtbl.add keywords "π"             (TVAR "p")  ;
  Hashtbl.add keywords "θ"             (TVAR "q")  ;
  Hashtbl.add keywords "ρ"             (TVAR "r")  ;
  Hashtbl.add keywords "σ"             (TVAR "s")  ;
  Hashtbl.add keywords "τ"             (TVAR "t")  ;
  Hashtbl.add keywords "ψ"             (TVAR "u")  ;
  Hashtbl.add keywords "ω"             (TVAR "w")  ;
  Hashtbl.add keywords "ξ"             (TVAR "x")  ;
  Hashtbl.add keywords "ζ"             (TVAR "z")  ;
  Hashtbl.add constructors "ℕ"         (IDENT "nat");
  Hashtbl.add constructors "ℤ"         (IDENT "int");
  Hashtbl.add constructors "𝔹"         (IDENT "bool");
  let l =
  ["~", TILDE "~";
   "-", MINUS;
   "/\\", CONJUNCTION;
   "\\/", DISJUNCTION;
   "<:", SUBTYPE;
   "$:", EQUALTYPE;
   "<@", SUBKIND;
   "(|", LENS_PAREN_LEFT;
   "|)", LENS_PAREN_RIGHT;
   "#", HASH;
   "u#", UNIV_HASH;
   "&", AMP;
   "()", LPAREN_RPAREN;
   "(", LPAREN;
   ")", RPAREN;
   ",", COMMA;
   "~>", SQUIGGLY_RARROW;
   "->", RARROW;
   "<--", LONG_LEFT_ARROW;
   "<-", LARROW;
   "<==>", IFF;
   "==>", IMPLIES;
   ".", DOT;
   "?.", QMARK_DOT;
   "?", QMARK;
   ".[", DOT_LBRACK;
   ".(|", DOT_LENS_PAREN_LEFT;
   ".(", DOT_LPAREN;
   ".[|", DOT_LBRACK_BAR;
   "{:pattern", LBRACE_COLON_PATTERN;
   "{:well-founded", LBRACE_COLON_WELL_FOUNDED;
   "returns$", RETURNS_EQ;
   ":", COLON;
   "::", COLON_COLON;
   ":=", COLON_EQUALS;
   ";", SEMICOLON;
   "=", EQUALS;
   "%[", PERCENT_LBRACK;
   "!{", BANG_LBRACE;
   "[@@@", LBRACK_AT_AT_AT;
   "[@@", LBRACK_AT_AT;
   "[@", LBRACK_AT;
   "[", LBRACK;
   "[|", LBRACK_BAR;
   "{|", LBRACE_BAR;
   "|>", PIPE_RIGHT;
   "]", RBRACK;
   "|]", BAR_RBRACK;
   "|}", BAR_RBRACE;
   "{", LBRACE;
   "|", BAR;
   "}", RBRACE;
   "$", DOLLAR;
     (* New Unicode equivalents *)
   "∀", FORALL;
   "∃", EXISTS;
   "⊤", NAME "True";
   "⊥", NAME "False";
   "⟹", IMPLIES;
   "⟺", IFF;
   "→", RARROW;
   "←", LARROW;
   "⟵", LONG_LEFT_ARROW;
   "↝", SQUIGGLY_RARROW;
   "≔", COLON_EQUALS;
   "∧", CONJUNCTION;
   "∨", DISJUNCTION;
   "¬", TILDE "~";
   "⸬", COLON_COLON;
   "▹", PIPE_RIGHT;
   "÷", OPINFIX3 "÷";
   "‖", OPINFIX0a "||";
   "×", IDENT "op_Multiply";
   "∗", OPINFIX3 "*";
   "⇒", OPINFIX0c "=>";
   "≥", OPINFIX0c ">=";
   "≤", OPINFIX0c "<=";
   "≠", OPINFIX0c "<>";
   "≪", OPINFIX0c "<<";
   "◃", OPINFIX0c "<|";
   "±", OPPREFIX "±";
   "∁", OPPREFIX "∁";
   "∂", OPPREFIX "∂";
   "√", OPPREFIX "√";
    ] in
   List.iter (fun (k,v) -> Hashtbl.add operators k v) l

let current_range lexbuf =
    FStar_Parser_Util.mksyn_range (fst (L.range lexbuf)) (snd (L.range lexbuf))

let fail lexbuf (e, msg) =
     let m = current_range lexbuf in
     E.raise_error (e, msg) m

type delimiters = { angle:int ref; paren:int ref; }
let n_typ_apps = ref 0

let is_typ_app_gt () =
  if !n_typ_apps > 0
  then (decr n_typ_apps; true)
  else false

let rec mknewline n lexbuf =
  if n = 0 then ()
  else (L.new_line lexbuf; mknewline (n-1) lexbuf)

let clean_number x = String.strip ~chars:"uzyslLUnIN" x

(* Try to trim each line of [comment] by the ammount of space
    on the first line of the comment if possible *)
(* TODO : apply this to FSDOC too *)
let maybe_trim_lines start_column comment =
  if start_column = 0 then comment
  else
    let comment_lines = String.split_on_char '\n' comment in
    let ensures_empty_prefix k s =
      let j = min k (String.length s - 1) in
      let rec aux i = if i > j then k else if s.[i] <> ' ' then i else aux (i+1) in
      aux 0 in
    let trim_width = List.fold_left ensures_empty_prefix start_column comment_lines in
    String.concat "\n" (List.map (fun s -> String.tail s trim_width) comment_lines)

let comment_buffer = Buffer.create 128

let start_comment lexbuf =
  Buffer.add_string comment_buffer "(*" ;
  (false, comment_buffer, fst (L.range lexbuf))

let terminate_comment buffer startpos lexbuf =
  let endpos = snd (L.range lexbuf) in
  Buffer.add_string buffer "*)" ;
  let comment = Buffer.contents buffer in
  let comment = maybe_trim_lines (startpos.Lexing.pos_cnum - startpos.Lexing.pos_bol) comment in
  Buffer.clear buffer;
  add_comment (comment, FStar_Parser_Util.mksyn_range startpos endpos)

let push_one_line_comment pre lexbuf =
  let startpos, endpos = L.range lexbuf in
  assert (startpos.Lexing.pos_lnum = endpos.Lexing.pos_lnum);
  add_comment (pre ^ L.lexeme lexbuf, FStar_Parser_Util.mksyn_range startpos endpos)

(** Unicode class definitions
  Auto-generated from http:/ /www.unicode.org/Public/8.0.0/ucd/UnicodeData.txt **)
(** Ll **)
let u_lower = [%sedlex.regexp? ll]
(** Lu *)
let u_upper = [%sedlex.regexp? lu]
(** Lo *)
let u_other = [%sedlex.regexp? lo]
(** Lm *)
let u_modifier = [%sedlex.regexp? lm]
(** Lt *)
let u_title = [%sedlex.regexp? lt]
(** Zs *)
let u_space = [%sedlex.regexp? zs]
(** These are not unicode spaces but we accept as whitespace in F* source (e.g. tab and BOM) *)
let u_space_extra = [%sedlex.regexp? '\t' | '\x0B' | '\x0C' | '\xA0' | 0xfeff]
(** Zl and Zp *)
let u_line_sep = [%sedlex.regexp? zl]
let u_par_sep = [%sedlex.regexp? zp]
(** Sm math symbols *)
let u_math = [%sedlex.regexp? sm]
let u_math_ascii = [%sedlex.regexp? 0x002b | 0x003c .. 0x003e | 0x007c | 0x007e]
let u_math_nonascii = [%sedlex.regexp? Sub(u_math, u_math_ascii)]
(** Sc currency *)
let u_currency = [%sedlex.regexp? sc]
(** Sk *)
let u_modifier_symbol = [%sedlex.regexp? sk]
(** So *)
let u_other_symbol = [%sedlex.regexp? so]
(** Nd *)
let u_decimal_digit = [%sedlex.regexp? nd]
(** Nl *)
let u_digit_letter = [%sedlex.regexp? nl]
(** No *)
let u_other_digit = [%sedlex.regexp? no]
(** Pd *)
let u_punct_hyphen = [%sedlex.regexp? pd]
(** Ps *)
let u_punct_obra = [%sedlex.regexp? ps]
(** Pe *)
let u_punct_cbra = [%sedlex.regexp? pe]
(** Pi *)
let u_punct_oquot = [%sedlex.regexp? pi]
(** Pf *)
let u_punct_cquot = [%sedlex.regexp? pf]
(** Pc *)
let u_punct_connect = [%sedlex.regexp? pc]
(** Po *)
let u_punct_other = [%sedlex.regexp? po]
(** Mn *)
let u_mod_nospace = [%sedlex.regexp? mn]
(** Mc *)
let u_mod = [%sedlex.regexp? mc]
(** Me *)
let u_mod_enclose = [%sedlex.regexp? me]
(** Cc *)
let u_ascii_control = [%sedlex.regexp? cc]
(** Cf *)
let u_format_control = [%sedlex.regexp? cf]
(** Co *)
let u_private_use = [%sedlex.regexp? co]
(** Cs *)
let u_surrogate = [%sedlex.regexp? cs]

(* -------------------------------------------------------------------- *)
let lower  = [%sedlex.regexp? u_lower]
let upper  = [%sedlex.regexp? u_upper | u_title]
let letter = [%sedlex.regexp? u_lower | u_upper | u_other | u_modifier]
let digit  = [%sedlex.regexp? '0'..'9']
let hex    = [%sedlex.regexp? '0'..'9' | 'A'..'F' | 'a'..'f']

(* -------------------------------------------------------------------- *)
let anywhite  = [%sedlex.regexp? u_space | u_space_extra]
let newline   = [%sedlex.regexp? "\r\n" | 10 | 13 | 0x2028 | 0x2029]

(* -------------------------------------------------------------------- *)
let op_char = [%sedlex.regexp? Chars "!$%&*+-.<>=?^|~:@#\\/"]

(* op_token must be splt into seperate regular expressions to prevent
   compliation from hanging *)
let op_token_1 = [%sedlex.regexp? "~" | "-" | "/\\" | "\\/" | "<:" | "$:" | "<@" | "(|" | "|)" | "#" ]
let op_token_2 = [%sedlex.regexp? "u#" | "&" | "()" | "(" | ")" | "," | "~>" | "->" | "<--" ]
let op_token_3 = [%sedlex.regexp? "<-" | "<==>" | "==>" | "." | "?." | "?" | ".[|" | ".[" | ".(|" | ".(" ]
let op_token_4 = [%sedlex.regexp? "$" | "{:pattern" | "{:well-founded" | ":" | "::" | ":=" | ";;" | ";" | "=" | "%[" | "returns$" ]
let op_token_5 = [%sedlex.regexp? "!{" | "[@@@" | "[@@" | "[@" | "[|" | "{|" | "[" | "|>" | "]" | "|]" | "|}" | "{" | "|" | "}" ]

(* -------------------------------------------------------------------- *)
let xinteger =
  [%sedlex.regexp?
  (  '0', ('x'| 'X'), Plus hex
   | '0', ('o'| 'O'), Plus ('0' .. '7')
   | '0', ('b'| 'B'), Plus ('0' .. '1') )]
let integer = [%sedlex.regexp? Plus digit]
let any_integer = [%sedlex.regexp? xinteger | integer]
let unsigned = [%sedlex.regexp? Chars "uU"]
let int8 = [%sedlex.regexp? any_integer, 'y']
let uint8 = [%sedlex.regexp? any_integer, unsigned, 'y']
let int16 = [%sedlex.regexp? any_integer, 's']
let uint16 = [%sedlex.regexp? any_integer, unsigned, 's']
let int32 = [%sedlex.regexp? any_integer, 'l']
let uint32 = [%sedlex.regexp? any_integer, unsigned, 'l']
let int64 = [%sedlex.regexp? any_integer, 'L']
let uint64 = [%sedlex.regexp? any_integer, unsigned, 'L']
let char8 = [%sedlex.regexp? any_integer, 'z']
let sizet = [%sedlex.regexp? any_integer, "sz"]

let floatp     = [%sedlex.regexp? Plus digit, '.', Star digit]
let floate     = [%sedlex.regexp? Plus digit, Opt ('.', Star digit), Chars "eE", Opt (Chars "+-"), Plus digit]
let real       = [%sedlex.regexp? floatp, 'R']
let ieee64     = [%sedlex.regexp? floatp | floate]
let xieee64    = [%sedlex.regexp? xinteger, 'L', 'F']
let range      = [%sedlex.regexp? Plus digit, '.', '.', Plus digit]

let op_prefix  = [%sedlex.regexp? Chars "!~?"]
let op_infix0a = [%sedlex.regexp? Chars "|"] (* left *)
let op_infix0b = [%sedlex.regexp? Chars "&"] (* left *)
let op_infix0c = [%sedlex.regexp? Chars "=<>"] (* left *)
let op_infix0c_nogt = [%sedlex.regexp? Chars "=<"] (* left *)
let op_infix0d = [%sedlex.regexp? Chars "$"] (* left *)

let op_infix0  = [%sedlex.regexp? op_infix0a | op_infix0b | op_infix0c | op_infix0d]
let op_infix1  = [%sedlex.regexp? Chars "@^"] (* right *)
let op_infix2  = [%sedlex.regexp? Chars "+-"] (* left *)
let op_infix3  = [%sedlex.regexp? Chars "*/%"] (* left *)
let symbolchar = [%sedlex.regexp? op_prefix | op_infix0 | op_infix1 | op_infix2 | op_infix3 | Chars ".:"]
let uoperator  = [%sedlex.regexp? u_math_nonascii]

(* -------------------------------------------------------------------- *)
let escape_char = [%sedlex.regexp? '\\', (Chars "\\\"'bfntrv0" | "x", hex, hex | "u", hex, hex, hex, hex)]
let char        = [%sedlex.regexp? Compl '\\' | escape_char]

(* -------------------------------------------------------------------- *)
let constructor_start_char = [%sedlex.regexp? upper]
let ident_start_char       = [%sedlex.regexp? lower  | '_']
let ident_char             = [%sedlex.regexp? letter | digit | '\'' | '_']
let tvar_char              = [%sedlex.regexp? letter | digit | '\'' | '_']

let constructor = [%sedlex.regexp? constructor_start_char, Star ident_char]
let ident       = [%sedlex.regexp? ident_start_char, Star ident_char]
let tvar        = [%sedlex.regexp? '\'', (ident_start_char | constructor_start_char), Star tvar_char]

(* [ensure_no_comment lexbuf next] takes a [lexbuf] and [next], a
   continuation. It is to be called after a regexp was matched, to
   ensure match text does not contain any comment start.

   If the match [s] contains a comment start (an occurence of [//])
   then we place the lexer at that comment start.  We continue with
   [next s], [s] being either the whole match, or the chunk before
   [//].
*)
let ensure_no_comment lexbuf (next: string -> token): token =
  let s = L.lexeme lexbuf in
  next (try let before, _after = BatString.split s "//" in
            (* rollback to the begining of the match *)
            L.rollback lexbuf;
            (* skip [n] characters in the lexer, with [n] being [hd]'s len *)
            BatString.iter (fun _ -> let _ = L.next lexbuf in ()) before;
            before with | Not_found -> s)

let rec token lexbuf =
match%sedlex lexbuf with
 | "%splice" -> SPLICE
 | "`%" -> BACKTICK_PERC
 | "`#" -> BACKTICK_HASH
 | "`@" -> BACKTICK_AT
 | "#set-options" -> PRAGMA_SET_OPTIONS
 | "#reset-options" -> PRAGMA_RESET_OPTIONS
 | "#push-options" -> PRAGMA_PUSH_OPTIONS
 | "#pop-options" -> PRAGMA_POP_OPTIONS
 | "#restart-solver" -> PRAGMA_RESTART_SOLVER
 | "#print-effects-graph" -> PRAGMA_PRINT_EFFECTS_GRAPH
 | "__SOURCE_FILE__" -> STRING (L.source_file lexbuf)
 | "__LINE__" -> INT (string_of_int (L.current_line lexbuf), false)

 | Plus anywhite -> token lexbuf
 | newline -> L.new_line lexbuf; token lexbuf

 (* Must appear before tvar to avoid 'a <-> 'a' conflict *)
 | ('\'', char, '\'') -> CHAR (unescape (utrim_both lexbuf 1 1))
 | ('\'', char, '\'', 'B') -> CHAR (unescape (utrim_both lexbuf 1 2))
 | '`' -> BACKTICK

 | "match", Plus op_char ->
    ensure_no_comment lexbuf (fun s ->
        match BatString.lchop ~n:5 s with
        | "" -> MATCH
        | s  -> MATCH_OP s
      )

 | "if", Plus op_char ->
    ensure_no_comment lexbuf (fun s ->
        match BatString.lchop ~n:2 s with
        | "" -> IF
        | s  -> IF_OP s
      )

 | "let", Plus op_char ->
    ensure_no_comment lexbuf (fun s ->
        match BatString.lchop ~n:3 s with
        | "" -> LET false
        | s  -> LET_OP s
      )

 | "and", Plus op_char ->
    ensure_no_comment lexbuf (fun s ->
        match BatString.lchop ~n:3 s with
        | "" -> AND
        | s  -> AND_OP s
      )

 | ";", Plus op_char ->
    ensure_no_comment lexbuf (fun s ->
        match BatString.lchop ~n:1 s with
        | "" -> SEMICOLON
        | s  -> SEMICOLON_OP (Some s)
      )

 | ";;" -> SEMICOLON_OP None

 | ident -> let id = L.lexeme lexbuf in
   if FStar_Compiler_Util.starts_with id FStar_Ident.reserved_prefix
   then FStar_Errors.raise_error
                    (Codes.Fatal_ReservedPrefix,
                     FStar_Ident.reserved_prefix  ^ " is a reserved prefix for an identifier")
                    (current_range lexbuf);
   Hashtbl.find_option keywords id |> Option.default (IDENT id)
 | constructor -> let id = L.lexeme lexbuf in
   Hashtbl.find_option constructors id |> Option.default (NAME id)

 | tvar -> TVAR (L.lexeme lexbuf)
 | (integer | xinteger) -> INT (clean_number (L.lexeme lexbuf), false)
 | (uint8 | char8) ->
   let c = clean_number (L.lexeme lexbuf) in
   let cv = int_of_string c in
   if cv < 0 || cv > 255 then fail lexbuf (Codes.Fatal_SyntaxError, "Out-of-range character literal")
   else UINT8 (c)
 | int8 -> INT8 (clean_number (L.lexeme lexbuf), false)
 | uint16 -> UINT16 (clean_number (L.lexeme lexbuf))
 | int16 -> INT16 (clean_number (L.lexeme lexbuf), false)
 | uint32 -> UINT32 (clean_number (L.lexeme lexbuf))
 | int32 -> INT32 (clean_number (L.lexeme lexbuf), false)
 | uint64 -> UINT64 (clean_number (L.lexeme lexbuf))
 | int64 -> INT64 (clean_number (L.lexeme lexbuf), false)
 | sizet -> SIZET (clean_number (L.lexeme lexbuf))
 | range -> RANGE (L.lexeme lexbuf)
 | real -> REAL(trim_right lexbuf 1)
 | (integer | xinteger | ieee64 | xieee64), Plus ident_char ->
   fail lexbuf (Codes.Fatal_SyntaxError, "This is not a valid numeric literal: " ^ L.lexeme lexbuf)

 | "(*" ->
   let inner, buffer, startpos = start_comment lexbuf in
   comment inner buffer startpos lexbuf

 | "// IN F*:" -> token lexbuf
 | "//" ->
     (* Only match on "//" to allow the longest-match rule to catch IN F*. This
      * creates a lexing conflict with op_infix3 which is caught below. *)
     one_line_comment (L.lexeme lexbuf) lexbuf

 | '"' -> string (Buffer.create 0) lexbuf.Sedlexing.start_p lexbuf

 | '`', '`', (Plus (Compl ('`' | 10 | 13 | 0x2028 | 0x2029) | '`', Compl ('`' | 10 | 13 | 0x2028 | 0x2029))), '`', '`' ->
   IDENT (trim_both lexbuf 2 2)

 | op_token_1
 | op_token_2
 | op_token_3
 | op_token_4
 | op_token_5 -> L.lexeme lexbuf |> Hashtbl.find operators

 | "<" -> OPINFIX0c("<")
 | ">" -> if is_typ_app_gt ()
          then TYP_APP_GREATER
          else begin match%sedlex lexbuf with
               | Star symbolchar -> ensure_no_comment lexbuf (fun s -> OPINFIX0c (">" ^ s))
               | _ -> assert false end

 (* Operators. *)
 | op_prefix,  Star symbolchar -> ensure_no_comment lexbuf (fun s -> OPPREFIX  s)
 | op_infix0a, Star symbolchar -> ensure_no_comment lexbuf (fun s -> OPINFIX0a s)
 | op_infix0b, Star symbolchar -> ensure_no_comment lexbuf (fun s -> OPINFIX0b s)
 | op_infix0c_nogt, Star symbolchar -> ensure_no_comment lexbuf (fun s -> OPINFIX0c s)
 | op_infix0d, Star symbolchar -> ensure_no_comment lexbuf (fun s -> OPINFIX0d s)
 | op_infix1,  Star symbolchar -> ensure_no_comment lexbuf (fun s -> OPINFIX1  s)
 | op_infix2,  Star symbolchar -> ensure_no_comment lexbuf (fun s -> OPINFIX2  s)
 | op_infix3,  Star symbolchar -> ensure_no_comment lexbuf (function
                                      | "" -> one_line_comment "" lexbuf
                                      | s  -> OPINFIX3 s
                                    )
 | "**"     ,  Star symbolchar -> ensure_no_comment lexbuf (fun s -> OPINFIX4  s)

 (* Unicode Operators *)
 | uoperator -> let id = L.lexeme lexbuf in
   Hashtbl.find_option operators id |> Option.default (OPINFIX4 id)

 | ".[]<-"                 -> OP_MIXFIX_ASSIGNMENT (L.lexeme lexbuf)
 | ".()<-"                 -> OP_MIXFIX_ASSIGNMENT (L.lexeme lexbuf)
 | ".(||)<-"                -> OP_MIXFIX_ASSIGNMENT (L.lexeme lexbuf)
 | ".[||]<-"                 -> OP_MIXFIX_ASSIGNMENT (L.lexeme lexbuf)
 | ".[]"                  -> OP_MIXFIX_ACCESS (L.lexeme lexbuf)
 | ".()"                  -> OP_MIXFIX_ACCESS (L.lexeme lexbuf)
 | ".(||)"                 -> OP_MIXFIX_ACCESS (L.lexeme lexbuf)
 | ".[||]"                  -> OP_MIXFIX_ACCESS (L.lexeme lexbuf)

 | eof -> EOF
 | _ -> fail lexbuf (Codes.Fatal_SyntaxError, "unexpected char")

and one_line_comment pre lexbuf =
match%sedlex lexbuf with
 | Star (Compl (10 | 13 | 0x2028 | 0x2029)) -> push_one_line_comment pre lexbuf; token lexbuf
 | _ -> assert false

and string buffer start_pos lexbuf =
match%sedlex lexbuf with
 | '\\', newline, Star anywhite -> L.new_line lexbuf; string buffer start_pos lexbuf
 | newline ->
   Buffer.add_string buffer (L.lexeme lexbuf);
   L.new_line lexbuf; string buffer start_pos lexbuf
 | escape_char ->
   Buffer.add_string buffer (BatUTF8.init 1 (fun _ -> unescape (L.ulexeme lexbuf) |> BatUChar.chr));
   string buffer start_pos lexbuf
 | '"' ->
   (* position info must be set since the start of the string *)
   lexbuf.Sedlexing.start_p <- start_pos;
   STRING (Buffer.contents buffer)
 | eof -> fail lexbuf (Codes.Fatal_SyntaxError, "unterminated string")
 | any ->
  Buffer.add_string buffer (L.lexeme lexbuf);
  string buffer start_pos lexbuf
 | _ -> assert false

and comment inner buffer startpos lexbuf =
match%sedlex lexbuf with
 | "(*" ->
   Buffer.add_string buffer "(*" ;
   let _ = comment true buffer startpos lexbuf in
   comment inner buffer startpos lexbuf
 | newline ->
   L.new_line lexbuf;
   Buffer.add_string buffer (L.lexeme lexbuf);
   comment inner buffer startpos lexbuf
 | "*)" ->
   terminate_comment buffer startpos lexbuf;
   if inner then EOF else token lexbuf
 | eof ->
   terminate_comment buffer startpos lexbuf; EOF
 | any ->
   Buffer.add_string buffer (L.lexeme lexbuf);
   comment inner buffer startpos lexbuf
 | _ -> assert false

and ignore_endline lexbuf =
match%sedlex lexbuf with
 | Star ' ', newline -> token lexbuf
 | _ -> assert false


================================================
FILE: engine/backends/fstar/fstar-surface-ast/FStar_Parser_Parse.ml
================================================
open Prims
open FStar_Errors
open FStar_Compiler_List
open FStar_Compiler_Util
open FStar_Compiler_Range
(* open FStar_Options *)
open FStar_Parser_Const
open FStar_Parser_AST
open FStar_Parser_Util
open FStar_Const
open FStar_Ident
open FStar_String
type token =
  | AMP
  | AND
  | AND_OP of (string)
  | AS
  | ASSERT
  | ASSUME
  | ATTRIBUTES
  | BACKTICK
  | BACKTICK_AT
  | BACKTICK_HASH
  | BACKTICK_PERC
  | BANG_LBRACE
  | BAR
  | BAR_RBRACE
  | BAR_RBRACK
  | BEGIN
  | BY
  | CALC
  | CHAR of (char)
  | CLASS
  | COLON
  | COLON_COLON
  | COLON_EQUALS
  | COMMA
  | CONJUNCTION
  | DECREASES
  | DEFAULT
  | DISJUNCTION
  | DOLLAR
  | DOT
  | DOT_LBRACK
  | DOT_LBRACK_BAR
  | DOT_LENS_PAREN_LEFT
  | DOT_LPAREN
  | EFFECT
  | ELIM
  | ELSE
  | END
  | ENSURES
  | EOF
  | EQUALS
  | EQUALTYPE
  | EXCEPTION
  | EXISTS
  | FALSE
  | FORALL
  | FRIEND
  | FUN
  | FUNCTION
  | HASH
  | IDENT of (string)
  | IF
  | IFF
  | IF_OP of (string)
  | IMPLIES
  | IN
  | INCLUDE
  | INLINE
  | INLINE_FOR_EXTRACTION
  | INSTANCE
  | INT of (string * bool)
  | INT16 of (string * bool)
  | INT32 of (string * bool)
  | INT64 of (string * bool)
  | INT8 of (string * bool)
  | INTRO
  | IRREDUCIBLE
  | LARROW
  | LAYERED_EFFECT
  | LBRACE
  | LBRACE_BAR
  | LBRACE_COLON_PATTERN
  | LBRACE_COLON_WELL_FOUNDED
  | LBRACK
  | LBRACK_AT
  | LBRACK_AT_AT
  | LBRACK_AT_AT_AT
  | LBRACK_BAR
  | LENS_PAREN_LEFT
  | LENS_PAREN_RIGHT
  | LET of (bool)
  | LET_OP of (string)
  | LOGIC
  | LONG_LEFT_ARROW
  | LPAREN
  | LPAREN_RPAREN
  | MATCH
  | MATCH_OP of (string)
  | MINUS
  | MODULE
  | NAME of (string)
  | NEW
  | NEW_EFFECT
  | NOEQUALITY
  | NOEXTRACT
  | OF
  | OPAQUE
  | OPEN
  | OPINFIX0a of (string)
  | OPINFIX0b of (string)
  | OPINFIX0c of (string)
  | OPINFIX0d of (string)
  | OPINFIX1 of (string)
  | OPINFIX2 of (string)
  | OPINFIX3 of (string)
  | OPINFIX4 of (string)
  | OPPREFIX of (string)
  | OP_MIXFIX_ACCESS of (string)
  | OP_MIXFIX_ASSIGNMENT of (string)
  | PERCENT_LBRACK
  | PIPE_RIGHT
  | POLYMONADIC_BIND
  | POLYMONADIC_SUBCOMP
  | PRAGMA_POP_OPTIONS
  | PRAGMA_PRINT_EFFECTS_GRAPH
  | PRAGMA_PUSH_OPTIONS
  | PRAGMA_RESET_OPTIONS
  | PRAGMA_RESTART_SOLVER
  | PRAGMA_SET_OPTIONS
  | PRIVATE
  | QMARK
  | QMARK_DOT
  | QUOTE
  | RANGE of (string)
  | RANGE_OF
  | RARROW
  | RBRACE
  | RBRACK
  | REAL of (string)
  | REC
  | REFLECTABLE
  | REIFIABLE
  | REIFY
  | REQUIRES
  | RETURNS
  | RETURNS_EQ
  | RPAREN
  | SEMICOLON
  | SEMICOLON_OP of (string option)
  | SET_RANGE_OF
  | SIZET of (string)
  | SPLICE
  | SQUIGGLY_RARROW
  | STRING of (string)
  | SUBKIND
  | SUBTYPE
  | SUB_EFFECT
  | SYNTH
  | THEN
  | TILDE of (string)
  | TOTAL
  | TRUE
  | TRY
  | TVAR of (string)
  | TYPE
  | TYP_APP_GREATER
  | TYP_APP_LESS
  | UINT16 of (string)
  | UINT32 of (string)
  | UINT64 of (string)
  | UINT8 of (string)
  | UNDERSCORE
  | UNFOLD
  | UNFOLDABLE
  | UNIV_HASH
  | UNOPTEQUALITY
  | VAL
  | WHEN
  | WITH

open Parsing;;
let _ = parse_error;;
# 2 "parse.mly"
(*
 We are expected to have only 6 shift-reduce conflicts in ML and 8 in F#.
 A lot (176) of end-of-stream conflicts are also reported and
 should be investigated...
*)
(* (c) Microsoft Corporation. All rights reserved *)
open Prims
open FStar_Pervasives
open FStar_Errors
open FStar_Compiler_List
open FStar_Compiler_Util
open FStar_Compiler_Range
(* open FStar_Options *)
(* TODO : these files should be deprecated and removed *)
(* open FStar_Syntax_Syntax *)
open FStar_Parser_Const
open FStar_Parser_AST
open FStar_Parser_Util
open FStar_Const
open FStar_Ident
open FStar_String

let logic_qualifier_deprecation_warning =
  "logic qualifier is deprecated, please remove it from the source program. In case your program verifies with the qualifier annotated but not without it, please try to minimize the example and file a github issue"

let mk_meta_tac m = Meta m

let old_attribute_syntax_warning =
  "The `[@ ...]` syntax of attributes is deprecated. \
   Use `[@@ a1; a2; ...; an]`, a semi-colon separated list of attributes, instead"

let do_notation_deprecation_warning =
  "The lightweight do notation [x <-- y; z] or [x ;; z] is deprecated, use let operators (i.e. [let* x = y in z] or [y ;* z], [*] being any sequence of operator characters) instead."

let none_to_empty_list x =
  match x with
  | None -> []
  | Some l -> l

# 216 "parse.ml"
let yytransl_const = [|
  257 (* AMP *);
  258 (* AND *);
  260 (* AS *);
  261 (* ASSERT *);
  262 (* ASSUME *);
  263 (* ATTRIBUTES *);
  264 (* BACKTICK *);
  265 (* BACKTICK_AT *);
  266 (* BACKTICK_HASH *);
  267 (* BACKTICK_PERC *);
  268 (* BANG_LBRACE *);
  269 (* BAR *);
  270 (* BAR_RBRACE *);
  271 (* BAR_RBRACK *);
  272 (* BEGIN *);
  273 (* BY *);
  274 (* CALC *);
  276 (* CLASS *);
  277 (* COLON *);
  278 (* COLON_COLON *);
  279 (* COLON_EQUALS *);
  280 (* COMMA *);
  281 (* CONJUNCTION *);
  282 (* DECREASES *);
  283 (* DEFAULT *);
  284 (* DISJUNCTION *);
  285 (* DOLLAR *);
  286 (* DOT *);
  287 (* DOT_LBRACK *);
  288 (* DOT_LBRACK_BAR *);
  289 (* DOT_LENS_PAREN_LEFT *);
  290 (* DOT_LPAREN *);
  291 (* EFFECT *);
  292 (* ELIM *);
  293 (* ELSE *);
  294 (* END *);
  295 (* ENSURES *);
    0 (* EOF *);
  296 (* EQUALS *);
  297 (* EQUALTYPE *);
  298 (* EXCEPTION *);
  299 (* EXISTS *);
  300 (* FALSE *);
  301 (* FORALL *);
  302 (* FRIEND *);
  303 (* FUN *);
  304 (* FUNCTION *);
  305 (* HASH *);
  307 (* IF *);
  308 (* IFF *);
  310 (* IMPLIES *);
  311 (* IN *);
  312 (* INCLUDE *);
  313 (* INLINE *);
  314 (* INLINE_FOR_EXTRACTION *);
  315 (* INSTANCE *);
  321 (* INTRO *);
  322 (* IRREDUCIBLE *);
  323 (* LARROW *);
  324 (* LAYERED_EFFECT *);
  325 (* LBRACE *);
  326 (* LBRACE_BAR *);
  327 (* LBRACE_COLON_PATTERN *);
  328 (* LBRACE_COLON_WELL_FOUNDED *);
  329 (* LBRACK *);
  330 (* LBRACK_AT *);
  331 (* LBRACK_AT_AT *);
  332 (* LBRACK_AT_AT_AT *);
  333 (* LBRACK_BAR *);
  334 (* LENS_PAREN_LEFT *);
  335 (* LENS_PAREN_RIGHT *);
  338 (* LOGIC *);
  339 (* LONG_LEFT_ARROW *);
  340 (* LPAREN *);
  341 (* LPAREN_RPAREN *);
  342 (* MATCH *);
  344 (* MINUS *);
  345 (* MODULE *);
  347 (* NEW *);
  348 (* NEW_EFFECT *);
  349 (* NOEQUALITY *);
  350 (* NOEXTRACT *);
  351 (* OF *);
  352 (* OPAQUE *);
  353 (* OPEN *);
  365 (* PERCENT_LBRACK *);
  366 (* PIPE_RIGHT *);
  367 (* POLYMONADIC_BIND *);
  368 (* POLYMONADIC_SUBCOMP *);
  369 (* PRAGMA_POP_OPTIONS *);
  370 (* PRAGMA_PRINT_EFFECTS_GRAPH *);
  371 (* PRAGMA_PUSH_OPTIONS *);
  372 (* PRAGMA_RESET_OPTIONS *);
  373 (* PRAGMA_RESTART_SOLVER *);
  374 (* PRAGMA_SET_OPTIONS *);
  375 (* PRIVATE *);
  376 (* QMARK *);
  377 (* QMARK_DOT *);
  378 (* QUOTE *);
  380 (* RANGE_OF *);
  381 (* RARROW *);
  382 (* RBRACE *);
  383 (* RBRACK *);
  385 (* REC *);
  386 (* REFLECTABLE *);
  387 (* REIFIABLE *);
  388 (* REIFY *);
  389 (* REQUIRES *);
  390 (* RETURNS *);
  391 (* RETURNS_EQ *);
  392 (* RPAREN *);
  393 (* SEMICOLON *);
  395 (* SET_RANGE_OF *);
  397 (* SPLICE *);
  398 (* SQUIGGLY_RARROW *);
  400 (* SUBKIND *);
  401 (* SUBTYPE *);
  402 (* SUB_EFFECT *);
  403 (* SYNTH *);
  404 (* THEN *);
  406 (* TOTAL *);
  407 (* TRUE *);
  408 (* TRY *);
  410 (* TYPE *);
  411 (* TYP_APP_GREATER *);
  412 (* TYP_APP_LESS *);
  417 (* UNDERSCORE *);
  418 (* UNFOLD *);
  419 (* UNFOLDABLE *);
  420 (* UNIV_HASH *);
  421 (* UNOPTEQUALITY *);
  422 (* VAL *);
  423 (* WHEN *);
  424 (* WITH *);
    0|]

let yytransl_block = [|
  259 (* AND_OP *);
  275 (* CHAR *);
  306 (* IDENT *);
  309 (* IF_OP *);
  316 (* INT *);
  317 (* INT16 *);
  318 (* INT32 *);
  319 (* INT64 *);
  320 (* INT8 *);
  336 (* LET *);
  337 (* LET_OP *);
  343 (* MATCH_OP *);
  346 (* NAME *);
  354 (* OPINFIX0a *);
  355 (* OPINFIX0b *);
  356 (* OPINFIX0c *);
  357 (* OPINFIX0d *);
  358 (* OPINFIX1 *);
  359 (* OPINFIX2 *);
  360 (* OPINFIX3 *);
  361 (* OPINFIX4 *);
  362 (* OPPREFIX *);
  363 (* OP_MIXFIX_ACCESS *);
  364 (* OP_MIXFIX_ASSIGNMENT *);
  379 (* RANGE *);
  384 (* REAL *);
  394 (* SEMICOLON_OP *);
  396 (* SIZET *);
  399 (* STRING *);
  405 (* TILDE *);
  409 (* TVAR *);
  413 (* UINT16 *);
  414 (* UINT32 *);
  415 (* UINT64 *);
  416 (* UINT8 *);
    0|]

let yylhs = "\255\255\
\006\000\006\000\008\000\008\000\009\000\009\000\011\000\011\000\
\013\000\013\000\014\000\014\000\016\000\016\000\017\000\017\000\
\018\000\018\000\020\000\020\000\021\000\021\000\023\000\023\000\
\025\000\025\000\027\000\027\000\029\000\029\000\031\000\031\000\
\034\000\034\000\036\000\036\000\037\000\037\000\038\000\038\000\
\040\000\040\000\042\000\042\000\044\000\044\000\046\000\046\000\
\046\000\046\000\049\000\049\000\051\000\051\000\051\000\054\000\
\054\000\055\000\055\000\057\000\057\000\059\000\059\000\061\000\
\061\000\063\000\063\000\065\000\065\000\067\000\067\000\068\000\
\068\000\070\000\070\000\072\000\072\000\074\000\074\000\075\000\
\075\000\077\000\077\000\077\000\077\000\077\000\077\000\077\000\
\077\000\078\000\078\000\080\000\080\000\082\000\082\000\084\000\
\084\000\039\000\039\000\087\000\087\000\088\000\088\000\090\000\
\090\000\092\000\092\000\094\000\094\000\096\000\096\000\097\000\
\097\000\041\000\041\000\043\000\043\000\001\000\002\000\002\000\
\100\000\100\000\100\000\100\000\100\000\100\000\101\000\101\000\
\066\000\066\000\064\000\064\000\064\000\106\000\106\000\105\000\
\105\000\105\000\105\000\105\000\105\000\105\000\105\000\105\000\
\105\000\105\000\105\000\105\000\105\000\105\000\105\000\105\000\
\105\000\105\000\105\000\105\000\105\000\105\000\105\000\105\000\
\083\000\109\000\109\000\119\000\118\000\118\000\118\000\118\000\
\118\000\123\000\026\000\026\000\026\000\062\000\062\000\058\000\
\058\000\045\000\081\000\081\000\081\000\081\000\081\000\081\000\
\081\000\081\000\112\000\112\000\126\000\113\000\114\000\098\000\
\115\000\115\000\115\000\116\000\117\000\103\000\103\000\103\000\
\103\000\103\000\103\000\103\000\103\000\103\000\103\000\103\000\
\103\000\103\000\103\000\103\000\103\000\103\000\125\000\107\000\
\107\000\129\000\129\000\129\000\122\000\131\000\085\000\089\000\
\089\000\089\000\073\000\073\000\073\000\073\000\073\000\073\000\
\073\000\073\000\073\000\073\000\073\000\073\000\073\000\073\000\
\073\000\073\000\073\000\135\000\135\000\079\000\079\000\079\000\
\079\000\052\000\052\000\053\000\053\000\053\000\120\000\069\000\
\069\000\069\000\069\000\124\000\124\000\124\000\124\000\124\000\
\124\000\124\000\124\000\124\000\124\000\124\000\124\000\124\000\
\124\000\124\000\124\000\124\000\124\000\124\000\124\000\124\000\
\124\000\124\000\124\000\071\000\071\000\071\000\071\000\050\000\
\108\000\137\000\137\000\138\000\138\000\099\000\099\000\139\000\
\139\000\139\000\139\000\139\000\139\000\140\000\140\000\141\000\
\141\000\136\000\136\000\005\000\104\000\093\000\015\000\130\000\
\019\000\010\000\024\000\022\000\142\000\003\000\003\000\003\000\
\003\000\030\000\030\000\012\000\012\000\012\000\012\000\012\000\
\012\000\012\000\012\000\012\000\012\000\012\000\012\000\012\000\
\012\000\012\000\012\000\012\000\012\000\012\000\012\000\012\000\
\012\000\012\000\012\000\012\000\012\000\012\000\012\000\012\000\
\012\000\012\000\012\000\012\000\012\000\012\000\012\000\148\000\
\151\000\151\000\151\000\060\000\007\000\007\000\007\000\152\000\
\152\000\153\000\095\000\033\000\033\000\154\000\154\000\155\000\
\155\000\143\000\143\000\156\000\156\000\157\000\157\000\157\000\
\157\000\157\000\157\000\157\000\157\000\127\000\127\000\127\000\
\127\000\127\000\127\000\127\000\127\000\132\000\132\000\158\000\
\158\000\158\000\158\000\158\000\147\000\147\000\149\000\149\000\
\150\000\160\000\160\000\160\000\160\000\160\000\160\000\160\000\
\160\000\160\000\160\000\160\000\160\000\160\000\160\000\160\000\
\160\000\162\000\162\000\162\000\162\000\162\000\162\000\162\000\
\162\000\162\000\162\000\162\000\162\000\162\000\162\000\162\000\
\162\000\161\000\161\000\161\000\161\000\161\000\161\000\161\000\
\161\000\161\000\163\000\163\000\163\000\163\000\163\000\163\000\
\163\000\163\000\163\000\111\000\111\000\111\000\111\000\111\000\
\111\000\111\000\111\000\111\000\111\000\111\000\111\000\111\000\
\111\000\111\000\111\000\111\000\111\000\159\000\091\000\128\000\
\165\000\165\000\133\000\048\000\048\000\167\000\167\000\086\000\
\164\000\047\000\047\000\047\000\168\000\168\000\056\000\056\000\
\056\000\170\000\170\000\144\000\144\000\144\000\144\000\144\000\
\144\000\144\000\144\000\144\000\144\000\144\000\144\000\172\000\
\171\000\173\000\173\000\173\000\173\000\173\000\173\000\173\000\
\173\000\173\000\028\000\174\000\175\000\032\000\110\000\110\000\
\110\000\110\000\110\000\110\000\110\000\110\000\110\000\110\000\
\110\000\110\000\110\000\110\000\110\000\110\000\110\000\110\000\
\110\000\169\000\177\000\177\000\177\000\076\000\076\000\076\000\
\076\000\004\000\178\000\178\000\179\000\179\000\179\000\180\000\
\180\000\035\000\176\000\181\000\181\000\181\000\102\000\102\000\
\102\000\182\000\182\000\182\000\183\000\183\000\183\000\134\000\
\134\000\121\000\121\000\166\000\166\000\146\000\146\000\146\000\
\145\000\145\000\145\000\000\000\000\000\000\000\000\000"

let yylen = "\002\000\
\000\000\002\000\000\000\002\000\000\000\002\000\000\000\003\000\
\000\000\002\000\000\000\002\000\000\000\002\000\000\000\002\000\
\000\000\002\000\000\000\002\000\000\000\001\000\000\000\001\000\
\000\000\001\000\000\000\001\000\000\000\001\000\000\000\002\000\
\000\000\001\000\000\000\001\000\000\000\001\000\000\000\001\000\
\000\000\001\000\000\000\001\000\000\000\003\000\000\000\002\000\
\004\000\005\000\000\000\003\000\000\000\002\000\002\000\000\000\
\002\000\000\000\002\000\000\000\002\000\000\000\002\000\000\000\
\002\000\000\000\002\000\000\000\002\000\000\000\002\000\001\000\
\002\000\001\000\002\000\001\000\002\000\001\000\002\000\001\000\
\002\000\003\000\003\000\003\000\003\000\004\000\004\000\004\000\
\004\000\001\000\002\000\001\000\003\000\001\000\003\000\001\000\
\003\000\001\000\003\000\001\000\003\000\001\000\003\000\001\000\
\003\000\001\000\003\000\001\000\003\000\001\000\003\000\001\000\
\003\000\001\000\003\000\001\000\003\000\002\000\001\000\001\000\
\002\000\002\000\002\000\001\000\001\000\001\000\003\000\003\000\
\001\000\001\000\004\000\002\000\002\000\002\000\003\000\001\000\
\002\000\002\000\002\000\004\000\002\000\002\000\002\000\005\000\
\003\000\002\000\005\000\007\000\007\000\007\000\007\000\007\000\
\005\000\003\000\002\000\002\000\002\000\002\000\002\000\002\000\
\004\000\001\000\001\000\003\000\000\000\002\000\004\000\005\000\
\002\000\003\000\002\000\002\000\004\000\003\000\004\000\002\000\
\003\000\003\000\006\000\008\000\008\000\008\000\008\000\008\000\
\005\000\004\000\001\000\001\000\003\000\008\000\006\000\004\000\
\005\000\008\000\012\000\009\000\005\000\001\000\001\000\001\000\
\001\000\001\000\001\000\001\000\001\000\001\000\001\000\001\000\
\001\000\001\000\001\000\001\000\001\000\001\000\001\000\001\000\
\000\000\004\000\001\000\001\000\003\000\001\000\001\000\003\000\
\002\000\001\000\006\000\003\000\003\000\005\000\003\000\001\000\
\003\000\003\000\003\000\003\000\003\000\001\000\002\000\001\000\
\002\000\001\000\001\000\003\000\001\000\005\000\003\000\001\000\
\007\000\001\000\001\000\005\000\003\000\006\000\001\000\003\000\
\002\000\002\000\001\000\003\000\005\000\005\000\005\000\005\000\
\005\000\002\000\004\000\004\000\004\000\004\000\004\000\002\000\
\004\000\004\000\004\000\004\000\004\000\001\000\003\000\003\000\
\003\000\003\000\003\000\003\000\002\000\002\000\001\000\001\000\
\001\000\001\000\003\000\001\000\003\000\001\000\001\000\001\000\
\003\000\003\000\003\000\003\000\003\000\001\000\001\000\001\000\
\001\000\001\000\001\000\001\000\001\000\001\000\001\000\001\000\
\001\000\001\000\003\000\002\000\001\000\001\000\003\000\003\000\
\005\000\003\000\003\000\001\000\004\000\004\000\006\000\006\000\
\006\000\006\000\002\000\002\000\002\000\004\000\002\000\007\000\
\005\000\004\000\005\000\005\000\006\000\007\000\005\000\002\000\
\002\000\003\000\003\000\002\000\007\000\007\000\009\000\008\000\
\007\000\008\000\007\000\011\000\006\000\014\000\010\000\001\000\
\001\000\003\000\001\000\006\000\001\000\005\000\005\000\000\000\
\003\000\001\000\001\000\001\000\004\000\001\000\001\000\003\000\
\005\000\003\000\001\000\003\000\001\000\005\000\006\000\007\000\
\003\000\004\000\004\000\005\000\001\000\005\000\006\000\007\000\
\003\000\004\000\004\000\005\000\001\000\003\000\001\000\003\000\
\001\000\002\000\002\000\003\000\003\000\001\000\003\000\001\000\
\001\000\003\000\003\000\003\000\003\000\003\000\003\000\003\000\
\003\000\003\000\003\000\002\000\002\000\002\000\002\000\002\000\
\001\000\003\000\003\000\003\000\003\000\003\000\003\000\003\000\
\003\000\003\000\003\000\002\000\002\000\002\000\002\000\002\000\
\001\000\003\000\003\000\003\000\005\000\003\000\003\000\002\000\
\002\000\001\000\003\000\003\000\003\000\005\000\003\000\003\000\
\002\000\002\000\001\000\001\000\001\000\001\000\001\000\001\000\
\001\000\001\000\001\000\001\000\001\000\001\000\001\000\001\000\
\001\000\001\000\001\000\001\000\001\000\001\000\001\000\001\000\
\004\000\001\000\001\000\001\000\003\000\003\000\001\000\002\000\
\002\000\001\000\002\000\001\000\002\000\001\000\001\000\001\000\
\001\000\001\000\004\000\001\000\001\000\001\000\001\000\003\000\
\003\000\003\000\003\000\003\000\005\000\002\000\003\000\002\000\
\002\000\001\000\001\000\004\000\003\000\003\000\003\000\003\000\
\003\000\002\000\003\000\002\000\002\000\001\000\001\000\001\000\
\001\000\001\000\001\000\001\000\001\000\001\000\001\000\001\000\
\001\000\001\000\001\000\001\000\001\000\001\000\001\000\001\000\
\001\000\002\000\001\000\003\000\002\000\001\000\001\000\001\000\
\003\000\002\000\002\000\003\000\001\000\001\000\001\000\001\000\
\001\000\001\000\001\000\000\000\001\000\003\000\000\000\001\000\
\003\000\000\000\001\000\003\000\000\000\001\000\003\000\001\000\
\003\000\001\000\003\000\001\000\003\000\000\000\001\000\003\000\
\001\000\002\000\003\000\002\000\002\000\002\000\002\000"

let yydefred = "\000\000\
\000\000\000\000\000\000\000\000\000\000\000\000\205\000\199\000\
\201\000\203\000\000\000\000\000\211\000\210\000\208\000\204\000\
\212\000\207\000\214\000\213\000\206\000\202\000\200\000\209\000\
\076\002\000\000\000\000\000\000\000\000\129\000\130\000\119\000\
\077\002\120\000\000\000\000\000\000\000\000\000\000\000\000\000\
\000\000\000\000\000\000\000\000\017\002\000\000\220\000\000\000\
\000\000\000\000\020\002\000\000\000\000\000\000\000\000\000\000\
\048\001\049\001\016\002\025\002\027\002\029\002\023\002\000\000\
\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\
\000\000\015\002\046\001\047\001\000\000\053\001\000\000\000\000\
\000\000\032\002\021\002\031\002\000\000\033\002\030\002\018\002\
\000\000\000\000\019\002\000\000\054\001\024\002\026\002\028\002\
\022\002\000\000\078\002\034\001\068\001\000\000\109\001\000\000\
\238\001\226\001\153\001\000\000\245\001\000\000\000\000\000\000\
\246\001\000\000\000\000\000\000\032\001\033\001\000\000\000\000\
\000\000\000\000\000\000\000\000\152\001\000\000\000\000\000\000\
\000\000\203\001\000\000\240\001\241\001\247\001\000\000\002\002\
\003\002\047\002\045\002\046\002\079\002\000\000\000\000\000\000\
\052\001\000\000\244\001\000\000\000\000\239\001\000\000\000\000\
\000\000\118\000\067\000\000\000\000\000\000\000\000\000\000\000\
\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\
\124\000\126\000\000\000\000\000\125\000\000\000\000\000\000\000\
\000\000\000\000\136\000\132\000\133\000\198\000\069\000\000\000\
\089\001\000\000\079\001\000\000\000\000\182\001\183\001\184\001\
\201\001\000\000\040\000\000\000\000\000\000\000\000\000\077\001\
\116\001\000\000\000\000\000\000\000\000\076\001\050\001\000\000\
\000\000\051\001\255\000\000\000\000\000\250\000\251\000\000\000\
\000\000\000\000\031\001\000\000\000\000\000\000\000\000\000\000\
\000\000\000\000\000\000\238\000\003\001\242\000\248\000\000\000\
\000\000\232\000\000\000\243\000\240\000\000\000\000\000\000\000\
\000\000\226\000\222\000\000\000\223\000\000\000\000\000\000\000\
\000\000\073\002\000\000\000\000\000\000\000\000\000\000\000\000\
\000\000\000\000\000\000\000\000\228\001\000\000\000\000\000\000\
\000\000\000\000\000\000\000\000\216\000\000\000\000\000\000\000\
\000\000\219\001\218\001\214\001\215\001\207\001\216\001\213\001\
\000\000\204\001\205\001\206\001\208\001\209\001\210\001\211\001\
\212\001\000\000\221\001\220\001\217\001\000\000\000\000\000\000\
\000\000\180\001\000\000\000\002\001\002\000\000\181\001\075\001\
\092\001\202\001\000\000\000\000\000\000\000\000\000\000\012\002\
\028\000\000\000\000\000\000\000\000\000\010\002\000\000\051\002\
\013\002\000\000\000\000\000\000\000\000\000\000\000\000\000\000\
\000\000\000\000\000\000\000\000\000\000\000\000\237\001\000\000\
\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\
\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\
\000\000\000\000\000\000\000\000\000\000\000\000\232\001\000\000\
\234\001\236\001\000\000\254\001\042\002\048\002\049\002\000\000\
\000\000\127\000\059\000\000\000\128\000\038\001\134\000\000\000\
\039\001\000\000\000\000\157\000\000\000\138\000\139\000\000\000\
\000\000\156\000\000\000\141\000\000\000\142\000\000\000\155\000\
\188\000\187\000\137\000\000\000\159\000\000\000\160\000\050\002\
\123\000\034\000\122\000\121\000\000\000\000\000\158\000\143\000\
\000\000\000\000\000\000\146\000\000\000\090\001\079\000\000\000\
\000\000\000\000\000\000\008\002\000\000\255\001\000\000\000\000\
\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\
\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\
\000\000\000\000\000\000\000\000\000\000\000\000\194\001\000\000\
\000\000\000\000\054\000\055\000\000\000\030\001\000\000\029\001\
\000\000\241\000\239\000\000\000\000\000\000\000\000\000\000\000\
\000\000\000\000\044\000\000\000\000\000\000\000\000\000\000\000\
\000\000\000\000\000\000\000\000\000\000\091\000\000\000\002\001\
\001\001\000\000\074\002\000\000\000\000\000\000\225\000\000\000\
\118\001\119\001\000\000\000\000\000\000\056\001\000\000\000\000\
\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\
\000\000\000\000\200\001\000\000\000\000\000\000\000\000\006\002\
\221\000\005\002\000\000\000\000\038\000\215\000\000\000\000\000\
\000\000\000\000\000\000\000\000\024\000\251\001\252\001\248\001\
\250\001\014\002\000\000\000\000\249\001\000\000\000\000\000\000\
\007\002\000\000\091\001\055\001\063\001\064\001\000\000\000\000\
\105\000\000\000\035\001\037\001\000\000\009\002\000\000\000\000\
\000\000\000\000\000\000\000\000\000\000\000\000\000\000\030\000\
\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\
\129\001\151\001\122\001\124\001\000\000\000\000\179\001\000\000\
\000\000\000\000\000\000\000\000\178\001\000\000\000\000\000\000\
\000\000\000\000\000\000\000\000\235\001\000\000\039\002\000\000\
\038\002\040\002\034\002\048\000\000\000\044\002\131\000\057\002\
\000\000\000\000\162\000\163\000\000\000\000\000\000\000\154\000\
\135\000\000\000\145\000\000\000\000\000\000\000\000\000\000\000\
\000\000\042\000\000\000\000\000\000\000\000\000\000\000\000\000\
\000\000\000\000\000\000\000\000\058\001\006\000\000\000\000\000\
\000\000\000\000\099\000\000\000\078\001\000\000\000\000\000\000\
\000\000\000\000\166\001\167\001\168\001\192\001\000\000\000\000\
\164\001\165\001\193\001\146\001\000\000\147\001\253\000\000\000\
\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\
\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\
\000\000\000\000\233\001\000\000\075\000\000\000\000\000\028\001\
\000\000\000\000\000\000\229\000\000\000\247\000\000\000\228\000\
\000\000\000\000\236\000\237\000\233\000\235\000\000\000\000\000\
\000\000\231\000\234\000\117\001\000\001\097\000\000\000\103\000\
\077\000\000\000\120\001\075\002\218\000\000\000\000\000\000\000\
\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\
\040\001\229\001\230\001\000\000\069\002\000\000\000\000\000\000\
\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\
\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\
\178\000\004\002\032\000\000\000\000\000\000\000\000\000\011\002\
\000\000\243\001\130\001\000\000\131\001\000\000\227\001\225\001\
\000\000\000\000\000\000\000\000\000\000\000\000\000\000\070\001\
\000\000\069\001\000\000\000\000\000\000\000\000\000\000\000\000\
\000\000\000\000\035\002\000\000\000\000\052\000\000\000\000\000\
\000\000\000\000\022\000\000\000\000\000\002\000\000\000\000\000\
\140\000\189\000\000\000\000\000\000\000\000\000\000\000\095\000\
\000\000\000\000\000\000\000\000\000\000\071\000\000\000\000\000\
\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\
\000\000\191\001\144\001\148\001\000\000\142\001\000\000\000\000\
\163\001\000\000\000\000\000\000\000\000\000\000\162\001\000\000\
\000\000\000\000\000\000\000\000\000\000\057\000\000\000\000\000\
\114\001\000\000\115\001\000\000\111\001\110\001\244\000\000\000\
\065\002\000\000\117\000\000\000\000\000\073\000\000\000\000\000\
\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\
\000\000\000\000\000\000\126\001\253\001\084\001\176\000\000\000\
\061\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\
\000\000\000\000\046\000\087\001\000\000\000\000\000\000\000\000\
\059\001\000\000\000\000\000\000\010\000\000\000\000\000\101\000\
\000\000\132\001\000\000\065\001\066\001\067\001\000\000\071\002\
\000\000\057\001\020\000\018\000\000\000\087\000\000\000\088\000\
\000\000\089\000\000\000\086\000\000\000\000\000\049\000\037\002\
\000\000\000\000\041\002\164\000\000\000\061\001\060\001\000\000\
\161\000\000\000\144\000\000\000\093\000\000\000\000\000\153\000\
\115\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\
\147\000\000\000\107\001\000\000\000\000\105\001\000\000\000\000\
\000\000\000\000\101\001\000\000\252\000\000\000\000\000\000\000\
\000\000\113\001\000\000\246\000\230\000\000\000\000\000\121\001\
\000\000\000\000\000\000\104\001\000\000\000\000\044\001\045\001\
\041\001\043\001\042\001\063\002\085\001\177\000\000\000\000\000\
\000\000\000\000\000\000\000\000\186\000\000\000\000\000\000\000\
\000\000\000\000\000\000\012\000\137\001\000\000\000\000\000\000\
\127\001\000\000\008\000\000\000\000\000\072\001\073\001\074\001\
\071\001\050\000\081\000\036\002\107\000\000\000\000\000\166\000\
\169\000\000\000\000\000\000\000\000\000\000\000\197\000\193\000\
\000\000\000\000\000\000\000\000\000\000\000\000\000\000\093\001\
\063\000\000\000\000\000\099\001\000\000\000\000\000\000\254\000\
\111\000\109\000\054\002\000\000\227\000\000\000\094\001\097\001\
\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\
\185\000\000\000\000\000\000\000\138\001\000\000\139\001\128\001\
\086\001\072\002\080\001\000\000\000\000\000\000\000\000\000\000\
\000\000\000\000\000\000\000\000\065\000\000\000\191\000\000\000\
\000\000\000\000\151\000\152\000\148\000\150\000\149\000\106\001\
\036\000\000\000\000\000\000\000\000\000\249\000\000\000\096\001\
\098\001\000\000\000\000\000\000\000\000\000\000\179\000\134\001\
\000\000\000\000\140\001\000\000\000\000\000\000\174\000\026\000\
\000\000\000\000\000\000\000\000\000\000\000\000\167\000\000\000\
\016\001\000\000\000\000\000\000\010\001\000\000\000\000\000\000\
\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\
\095\001\000\000\000\000\000\000\000\000\000\000\000\000\135\001\
\171\000\000\000\022\001\000\000\172\000\175\000\000\000\000\000\
\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\
\000\000\067\002\170\000\000\000\000\000\000\000\000\000\000\000\
\000\000\004\001\168\000\000\000\190\000\000\000\000\000\194\000\
\000\000\000\000\000\000\000\000\103\001\183\000\184\000\180\000\
\182\000\181\000\136\001\000\000\000\000\000\000\000\000\000\000\
\000\000\020\001\021\001\017\001\019\001\018\001\000\000\014\001\
\015\001\011\001\013\001\012\001\000\000\000\000\000\000\000\000\
\000\000\000\000\113\000\196\000\000\000\108\001\100\001\000\000\
\026\001\027\001\023\001\025\001\024\001\000\000\173\000\060\002\
\008\001\009\001\005\001\007\001\006\001\192\000\000\000\000\000\
\004\000\000\000\000\000\195\000\102\001"

let yydgoto = "\005\000\
\025\000\033\000\039\001\141\000\100\000\096\002\101\000\199\004\
\158\001\118\002\247\002\102\000\233\002\121\003\027\002\045\002\
\046\002\002\003\139\003\000\003\018\003\019\003\012\002\013\002\
\087\004\088\004\056\001\057\001\047\002\048\002\019\002\020\002\
\103\000\145\001\146\001\066\004\006\002\194\000\195\000\105\002\
\106\002\210\001\211\001\010\002\015\001\103\001\104\001\001\001\
\108\001\104\000\211\000\212\000\213\000\163\002\148\000\105\000\
\218\002\219\002\180\003\181\003\249\003\250\003\026\000\027\000\
\028\000\029\000\116\002\183\002\230\000\193\001\214\000\231\001\
\242\000\187\000\152\003\011\003\079\001\232\000\233\000\099\002\
\100\002\152\001\153\001\243\000\244\000\106\000\032\002\245\000\
\246\000\107\000\108\000\015\003\109\000\073\003\074\003\075\003\
\105\004\106\004\120\001\179\000\110\000\152\000\031\000\111\000\
\180\000\181\000\014\001\112\000\090\002\113\000\040\001\136\001\
\130\001\124\001\151\001\141\001\143\001\161\003\091\002\204\003\
\048\004\114\000\050\004\051\004\008\002\138\001\229\002\230\002\
\115\000\239\001\248\000\185\001\248\002\206\001\080\003\153\000\
\117\000\118\000\004\001\119\000\120\000\159\003\121\000\189\000\
\249\000\135\003\123\000\205\003\124\000\125\000\183\003\167\002\
\076\003\236\001\250\000\126\000\127\000\187\001\188\001\189\001\
\190\001\128\000\129\000\191\001\130\000\005\001\006\001\131\000\
\106\001\132\000\133\000\134\000\135\000\136\000\137\000\065\001\
\013\003\142\000\143\000\112\001\081\003\138\004\213\002"

let yysindex = "\105\003\
\061\006\035\004\046\057\010\001\000\000\020\255\000\000\000\000\
\000\000\000\000\078\081\046\057\000\000\000\000\000\000\000\000\
\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\
\000\000\099\000\061\006\199\008\216\007\000\000\000\000\000\000\
\000\000\000\000\078\081\078\081\078\081\182\070\078\081\078\081\
\078\081\078\081\046\057\078\081\000\000\202\065\000\000\030\069\
\196\066\027\000\000\000\027\000\233\009\206\074\076\255\000\000\
\000\000\000\000\000\000\000\000\000\000\000\000\000\000\132\069\
\180\081\182\070\046\057\046\057\046\057\182\070\196\255\224\076\
\053\054\000\000\000\000\000\000\182\070\000\000\026\082\046\057\
\182\070\000\000\000\000\000\000\196\066\000\000\000\000\000\000\
\078\081\078\081\000\000\046\057\000\000\000\000\000\000\000\000\
\000\000\140\255\000\000\000\000\000\000\122\000\000\000\008\255\
\000\000\000\000\000\000\160\255\000\000\112\255\179\255\034\255\
\000\000\182\070\234\069\005\000\000\000\000\000\046\057\046\057\
\059\255\154\002\081\255\007\000\000\000\028\000\048\000\225\000\
\062\000\000\000\094\072\000\000\000\000\000\000\113\000\000\000\
\000\000\000\000\000\000\000\000\000\000\125\001\045\255\134\000\
\000\000\053\054\000\000\090\000\078\081\000\000\117\000\192\000\
\027\001\000\000\000\000\014\000\017\000\020\255\020\255\020\255\
\193\000\012\001\193\000\014\000\032\000\020\255\016\001\020\255\
\000\000\000\000\246\000\246\000\000\000\246\000\036\001\020\255\
\014\000\147\005\000\000\000\000\000\000\000\000\000\000\117\001\
\000\000\078\081\000\000\000\000\189\002\000\000\000\000\000\000\
\000\000\009\001\000\000\115\001\105\001\079\001\046\057\000\000\
\000\000\027\000\027\000\244\255\142\001\000\000\000\000\190\067\
\053\255\000\000\000\000\027\000\027\000\000\000\000\000\120\001\
\089\255\101\255\000\000\135\001\078\081\107\255\014\000\190\067\
\224\076\224\076\002\056\000\000\000\000\000\000\000\000\047\001\
\233\009\000\000\143\001\000\000\000\000\127\001\042\000\224\076\
\002\056\000\000\000\000\166\001\000\000\062\001\224\076\000\255\
\168\001\000\000\254\073\027\000\027\000\242\000\164\001\212\054\
\074\001\008\255\024\001\162\001\000\000\070\001\235\000\087\001\
\103\001\220\001\213\001\046\057\000\000\102\001\242\001\226\001\
\112\001\000\000\000\000\000\000\000\000\000\000\000\000\000\000\
\056\075\000\000\000\000\000\000\000\000\000\000\000\000\000\000\
\000\000\132\078\000\000\000\000\000\000\234\078\108\001\113\001\
\234\069\000\000\026\082\000\000\000\000\126\001\000\000\000\000\
\000\000\000\000\093\001\078\081\046\057\046\057\078\081\000\000\
\000\000\182\070\193\000\014\000\046\057\000\000\127\001\000\000\
\000\000\105\255\182\070\134\255\078\081\046\057\004\002\004\002\
\059\068\059\068\046\057\046\057\046\057\046\057\000\000\182\070\
\059\068\182\070\059\068\059\068\182\070\182\070\182\070\182\070\
\182\070\182\070\182\070\182\070\182\070\182\070\254\073\254\073\
\254\073\254\073\254\073\080\079\180\081\068\255\000\000\094\072\
\000\000\000\000\014\000\000\000\000\000\000\000\000\000\010\001\
\046\057\000\000\000\000\046\057\000\000\000\000\000\000\095\255\
\000\000\020\255\095\255\000\000\178\001\000\000\000\000\102\001\
\020\255\000\000\102\001\000\000\011\001\000\000\234\001\000\000\
\000\000\000\000\000\000\020\255\000\000\131\001\000\000\000\000\
\000\000\000\000\000\000\000\000\014\000\139\001\000\000\000\000\
\029\002\078\008\231\000\000\000\196\066\000\000\000\000\046\057\
\046\057\046\057\046\057\000\000\078\081\000\000\046\057\172\001\
\008\002\017\002\182\070\182\070\182\070\028\071\078\081\078\081\
\078\081\180\081\028\071\028\071\028\071\078\081\028\071\080\070\
\018\002\022\002\180\001\000\000\215\002\098\000\000\000\200\072\
\030\002\053\255\000\000\000\000\237\001\000\000\089\255\000\000\
\237\001\000\000\000\000\021\002\033\002\196\001\186\001\057\002\
\052\002\205\001\000\000\201\001\083\001\197\001\209\001\215\001\
\216\001\119\000\046\255\218\001\046\057\000\000\020\255\000\000\
\000\000\127\001\000\000\224\076\224\076\224\076\000\000\224\076\
\000\000\000\000\182\070\046\057\224\076\000\000\228\001\062\000\
\070\002\073\002\182\070\182\070\182\070\231\001\162\075\182\079\
\028\080\236\001\000\000\219\255\046\057\219\255\255\001\000\000\
\000\000\000\000\182\070\072\002\000\000\000\000\169\000\074\077\
\224\076\075\002\050\073\085\002\000\000\000\000\000\000\000\000\
\000\000\000\000\248\001\065\067\000\000\182\070\109\255\195\255\
\000\000\206\074\000\000\000\000\000\000\000\000\109\002\235\001\
\000\000\102\001\000\000\000\000\001\002\000\000\059\068\148\255\
\059\068\067\002\006\002\127\001\005\002\016\002\229\001\000\000\
\011\002\136\002\145\002\049\002\162\002\101\002\068\002\007\000\
\000\000\000\000\000\000\000\000\059\001\116\001\000\000\040\003\
\166\002\116\001\026\001\015\001\000\000\015\001\088\000\120\000\
\062\000\180\255\100\002\180\081\000\000\081\002\000\000\088\255\
\000\000\000\000\000\000\000\000\113\000\000\000\000\000\000\000\
\059\002\194\002\000\000\000\000\027\000\177\002\196\066\000\000\
\000\000\027\000\000\000\216\002\020\255\065\067\201\002\020\255\
\105\002\000\000\098\002\020\255\014\000\092\002\106\002\113\002\
\115\002\116\002\231\000\220\002\000\000\000\000\118\002\221\002\
\174\002\120\002\000\000\122\002\000\000\046\057\046\057\071\255\
\025\255\123\002\000\000\000\000\000\000\000\000\149\002\001\003\
\000\000\000\000\000\000\000\000\028\071\000\000\000\000\184\068\
\184\068\028\071\028\071\028\071\028\071\028\071\028\071\028\071\
\028\071\028\071\028\071\100\074\100\074\100\074\100\074\100\074\
\078\081\200\072\000\000\184\068\000\000\078\081\046\057\000\000\
\046\057\224\076\014\000\000\000\014\000\000\000\184\068\000\000\
\224\076\224\076\000\000\000\000\000\000\000\000\000\003\119\000\
\184\068\000\000\000\000\000\000\000\000\000\000\004\003\000\000\
\000\000\031\255\000\000\000\000\000\000\046\057\046\057\033\255\
\035\255\126\002\000\000\000\000\000\000\000\000\000\000\193\082\
\000\000\000\000\000\000\146\002\000\000\059\068\205\002\046\057\
\102\001\242\002\169\000\042\003\145\056\233\009\035\001\242\001\
\046\057\254\073\053\054\254\073\025\003\176\002\152\073\046\057\
\000\000\000\000\000\000\185\255\179\002\168\001\078\081\000\000\
\169\000\000\000\000\000\059\068\000\000\046\057\000\000\000\000\
\046\057\000\000\059\068\059\068\224\076\046\057\196\066\000\000\
\196\066\000\000\058\001\208\001\088\002\141\002\254\073\180\002\
\094\072\000\000\000\000\068\255\234\254\000\000\147\002\032\003\
\050\073\009\003\000\000\153\002\196\066\000\000\045\003\102\001\
\000\000\000\000\020\255\030\003\078\081\014\000\146\000\000\000\
\231\000\231\000\231\000\231\000\231\000\000\000\196\066\189\002\
\189\002\189\002\189\002\077\065\195\002\169\002\046\057\046\057\
\046\057\000\000\000\000\000\000\060\003\000\000\047\002\253\002\
\000\000\087\003\186\002\253\002\043\001\088\001\000\000\088\001\
\108\000\189\000\098\000\192\255\234\002\000\000\067\002\203\002\
\000\000\058\003\000\000\217\002\000\000\000\000\000\000\207\002\
\000\000\077\003\000\000\018\003\184\068\000\000\067\002\046\057\
\190\002\191\002\015\003\027\000\046\057\230\002\236\002\237\002\
\238\002\241\002\219\255\000\000\000\000\000\000\000\000\046\057\
\000\000\102\001\244\002\246\002\252\002\254\002\002\003\226\001\
\046\057\096\003\000\000\000\000\123\003\234\069\019\003\078\081\
\000\000\050\073\254\073\021\003\000\000\022\003\059\068\000\000\
\094\003\000\000\027\003\000\000\000\000\000\000\138\003\000\000\
\119\003\000\000\000\000\000\000\046\057\000\000\046\057\000\000\
\046\057\000\000\046\057\000\000\100\002\094\072\000\000\000\000\
\068\255\088\255\000\000\000\000\059\002\000\000\000\000\071\066\
\000\000\254\073\000\000\050\073\000\000\026\003\065\067\000\000\
\000\000\065\067\108\003\142\003\146\003\147\003\148\003\150\003\
\000\000\014\000\000\000\047\003\077\065\000\000\111\003\046\057\
\078\081\006\003\000\000\014\003\000\000\100\074\048\003\078\081\
\078\081\000\000\014\000\000\000\000\000\067\002\059\003\000\000\
\078\081\046\057\046\057\000\000\153\003\192\003\000\000\000\000\
\000\000\000\000\000\000\000\000\000\000\000\000\233\009\233\009\
\233\009\233\009\233\009\158\003\000\000\046\057\076\003\182\070\
\066\003\062\000\050\073\000\000\000\000\081\003\050\073\059\068\
\000\000\046\057\000\000\224\076\046\057\000\000\000\000\000\000\
\000\000\000\000\000\000\000\000\000\000\066\001\030\078\000\000\
\000\000\197\003\130\071\085\003\044\003\103\003\000\000\000\000\
\174\003\196\066\196\066\196\066\196\066\196\066\207\003\000\000\
\000\000\046\057\050\003\000\000\027\000\027\000\234\002\000\000\
\000\000\000\000\000\000\080\003\000\000\219\003\000\000\000\000\
\046\057\046\057\226\001\226\001\226\001\226\001\226\001\046\057\
\000\000\050\073\089\003\098\003\000\000\050\073\000\000\000\000\
\000\000\000\000\000\000\022\000\020\255\115\055\000\000\101\003\
\251\255\095\003\210\003\177\000\000\000\030\078\000\000\127\001\
\020\255\065\067\000\000\000\000\000\000\000\000\000\000\000\000\
\000\000\110\003\027\000\204\003\209\003\000\000\046\057\000\000\
\000\000\198\003\200\003\202\003\203\003\205\003\000\000\000\000\
\112\003\050\073\000\000\196\066\222\000\196\066\000\000\000\000\
\022\000\115\003\012\076\130\080\232\080\118\003\000\000\221\082\
\000\000\222\000\196\066\054\083\000\000\103\000\120\003\027\000\
\122\003\107\003\216\003\024\000\046\057\227\003\046\057\046\057\
\000\000\046\057\046\057\046\057\046\057\046\057\050\073\000\000\
\000\000\082\083\000\000\134\003\000\000\000\000\000\000\000\000\
\000\000\000\000\000\000\129\003\130\003\131\003\132\003\133\003\
\144\003\000\000\000\000\152\003\154\003\155\003\156\003\159\003\
\171\083\000\000\000\000\230\003\000\000\127\001\065\067\000\000\
\232\003\157\003\046\057\025\004\000\000\000\000\000\000\000\000\
\000\000\000\000\000\000\167\003\168\003\170\003\172\003\173\003\
\012\004\000\000\000\000\000\000\000\000\000\000\222\000\000\000\
\000\000\000\000\000\000\000\000\176\003\177\003\178\003\180\003\
\181\003\065\067\000\000\000\000\014\004\000\000\000\000\027\000\
\000\000\000\000\000\000\000\000\000\000\196\066\000\000\000\000\
\000\000\000\000\000\000\000\000\000\000\000\000\065\067\033\004\
\000\000\194\003\046\057\000\000\000\000"

let yyrindex = "\000\000\
\161\003\114\082\000\000\000\000\000\000\132\012\000\000\000\000\
\000\000\000\000\196\003\199\003\000\000\000\000\000\000\000\000\
\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\
\000\000\000\000\161\003\000\000\114\082\000\000\000\000\000\000\
\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\
\000\000\195\003\000\000\000\000\000\000\000\000\000\000\000\000\
\000\000\038\004\000\000\038\004\000\000\000\000\232\071\153\015\
\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\
\000\000\000\000\199\003\199\003\058\004\000\000\118\076\000\000\
\000\000\000\000\000\000\000\000\000\000\000\000\000\000\199\003\
\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\
\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\
\000\000\146\017\000\000\000\000\000\000\016\047\000\000\154\013\
\000\000\000\000\000\000\169\036\000\000\000\000\070\014\065\016\
\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\
\124\040\133\021\148\038\081\037\000\000\213\039\059\039\088\035\
\107\029\000\000\120\025\000\000\000\000\000\000\240\014\000\000\
\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\
\000\000\000\000\000\000\000\000\022\041\000\000\054\255\000\000\
\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\
\118\076\000\000\118\076\000\000\000\000\000\000\000\000\000\000\
\000\000\000\000\253\051\253\051\000\000\000\000\000\000\000\000\
\000\000\000\000\000\000\000\000\000\000\000\000\000\000\176\041\
\000\000\074\042\000\000\234\016\059\018\000\000\000\000\000\000\
\000\000\000\000\000\000\206\003\000\000\000\000\000\000\000\000\
\000\000\038\004\038\004\000\000\019\001\000\000\000\000\000\000\
\000\000\000\000\000\000\047\048\047\048\000\000\000\000\000\000\
\000\000\000\000\000\000\000\000\000\000\002\001\000\000\000\000\
\214\003\000\000\000\000\000\000\000\000\000\000\000\000\000\000\
\243\255\000\000\205\048\000\000\000\000\000\000\000\000\000\000\
\000\000\000\000\000\000\197\255\000\000\122\004\061\001\000\000\
\173\044\000\000\000\000\038\004\038\004\000\000\019\001\000\000\
\000\000\247\058\000\000\095\000\000\000\211\003\000\000\000\000\
\000\000\000\000\000\000\000\000\000\000\180\077\019\004\226\000\
\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\
\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\
\000\000\000\000\000\000\000\000\000\000\000\000\189\003\000\000\
\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\
\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\
\000\000\000\000\118\076\000\000\000\000\000\000\000\000\000\000\
\000\000\000\000\000\000\000\000\000\000\000\000\165\000\074\000\
\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\
\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\
\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\
\000\000\000\000\000\000\000\000\000\000\000\000\000\000\120\025\
\000\000\000\000\000\000\000\000\000\000\000\000\000\000\077\005\
\000\000\000\000\000\000\070\255\000\000\000\000\000\000\099\049\
\000\000\000\000\039\004\000\000\098\052\000\000\000\000\180\077\
\000\000\000\000\180\077\000\000\199\052\000\000\000\000\000\000\
\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\
\000\000\000\000\000\000\000\000\215\003\000\000\000\000\000\000\
\044\053\000\000\057\004\000\000\000\000\000\000\000\000\000\000\
\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\
\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\
\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\
\000\000\000\000\000\000\122\255\188\255\104\004\000\000\208\024\
\000\000\067\004\000\000\000\000\203\057\000\000\000\000\000\000\
\203\057\000\000\000\000\130\000\000\000\000\000\220\003\000\000\
\000\000\000\000\000\000\218\003\000\000\000\000\000\000\000\000\
\000\000\091\255\000\000\000\000\000\000\000\000\000\000\000\000\
\000\000\000\000\000\000\000\000\000\000\000\000\000\000\084\004\
\000\000\000\000\000\000\000\000\000\000\000\000\000\000\166\047\
\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\
\000\000\000\000\000\000\000\000\000\000\224\003\000\000\000\000\
\000\000\000\000\000\000\000\000\000\000\000\000\041\004\000\000\
\000\000\000\000\000\000\108\255\000\000\000\000\000\000\000\000\
\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\
\000\000\000\000\000\000\000\000\000\000\000\000\201\003\000\000\
\000\000\180\077\000\000\000\000\000\000\000\000\000\000\000\000\
\000\000\033\026\000\000\000\000\000\000\000\000\000\000\000\000\
\000\000\228\042\126\043\000\000\000\000\000\000\000\000\250\037\
\000\000\000\000\000\000\000\000\000\036\182\032\000\000\175\034\
\007\034\094\033\013\032\188\030\000\000\101\031\195\028\000\000\
\020\030\026\028\201\026\000\000\000\000\000\000\000\000\000\000\
\000\000\000\000\000\000\000\000\240\014\000\000\000\000\000\000\
\000\000\217\049\000\000\000\000\187\003\000\000\000\000\000\000\
\000\000\077\004\000\000\145\053\000\000\000\000\000\000\000\000\
\000\000\000\000\233\003\000\000\000\000\000\000\000\000\000\000\
\000\000\000\000\057\004\000\000\000\000\000\000\000\000\000\000\
\000\000\000\000\000\000\000\000\000\000\000\000\000\000\052\001\
\000\000\182\000\000\000\000\000\000\000\000\000\000\000\000\000\
\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\
\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\
\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\
\000\000\208\024\000\000\000\000\000\000\000\000\000\000\000\000\
\000\000\000\000\000\000\000\000\241\003\000\000\000\000\000\000\
\000\000\000\000\000\000\000\000\000\000\000\000\000\000\091\004\
\000\000\000\000\000\000\000\000\000\000\000\000\045\005\000\000\
\000\000\000\000\000\000\000\000\000\000\000\000\000\000\052\001\
\000\000\182\000\123\062\017\063\167\063\061\064\211\064\000\000\
\000\000\000\000\000\000\243\003\000\000\000\000\000\000\000\000\
\180\077\000\000\041\004\000\000\000\000\130\001\000\000\019\004\
\000\000\000\000\000\000\000\000\255\000\132\011\000\000\000\000\
\000\000\000\000\000\000\000\000\000\000\071\045\000\000\000\000\
\041\004\000\000\000\000\000\000\000\000\000\000\000\000\000\000\
\000\000\209\000\000\000\000\000\024\044\000\000\000\000\000\000\
\000\000\000\000\046\022\214\022\127\023\039\024\000\000\000\000\
\120\025\144\000\000\000\000\000\000\000\000\000\000\000\221\003\
\000\000\051\051\000\000\000\000\000\000\000\000\000\000\180\077\
\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\
\057\004\057\004\057\004\057\004\057\004\000\000\000\000\227\018\
\140\019\052\020\221\020\244\003\000\000\000\000\000\000\000\000\
\000\000\000\000\000\000\000\000\000\000\000\000\080\000\249\001\
\000\000\016\000\118\255\006\005\185\001\250\002\000\000\186\003\
\091\002\000\000\251\006\153\005\212\010\000\000\235\003\069\255\
\000\000\249\003\000\000\000\000\000\000\000\000\000\000\251\003\
\000\000\000\000\000\000\000\000\000\000\000\000\235\003\000\000\
\000\000\000\000\000\000\038\004\000\000\000\000\000\000\000\000\
\000\000\000\000\224\003\000\000\000\000\000\000\000\000\000\000\
\000\000\180\077\000\000\000\000\000\000\000\000\000\000\082\004\
\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\
\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\
\000\000\000\000\000\000\000\000\000\000\000\000\220\045\000\000\
\118\046\000\000\000\000\000\000\000\000\000\000\000\000\000\000\
\000\000\000\000\000\000\000\000\114\027\120\025\000\000\000\000\
\050\255\000\000\000\000\000\000\000\000\000\000\000\000\152\051\
\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\
\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\
\000\000\000\000\000\000\000\000\244\003\000\000\000\000\000\000\
\022\041\000\000\000\000\000\000\000\000\000\000\000\000\000\000\
\000\000\000\000\241\003\000\000\000\000\235\003\000\000\000\000\
\121\004\000\000\000\000\000\000\000\000\000\000\000\000\000\000\
\000\000\000\000\000\000\000\000\000\000\000\000\161\001\210\001\
\000\002\013\002\062\002\000\000\000\000\000\000\000\000\000\000\
\000\000\078\007\000\000\000\000\000\000\000\000\000\000\000\000\
\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\
\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\
\000\000\152\051\000\000\000\000\000\000\000\000\000\000\000\000\
\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\
\000\000\004\004\000\000\000\000\038\004\038\004\011\012\000\000\
\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\
\000\000\000\000\082\004\082\004\082\004\082\004\082\004\000\000\
\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\
\000\000\000\000\000\000\083\050\000\000\000\000\097\058\000\000\
\000\000\006\004\000\000\000\000\000\000\000\000\000\000\000\000\
\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\
\000\000\000\000\038\004\000\000\000\000\000\000\000\000\000\000\
\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\
\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\
\083\050\000\000\000\000\000\000\000\000\000\000\000\000\000\000\
\000\000\007\004\000\000\000\000\000\000\000\000\000\000\039\004\
\000\000\008\004\000\000\000\000\000\000\000\000\000\000\000\000\
\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\
\000\000\000\000\000\000\000\000\000\000\000\000\141\059\035\060\
\185\060\079\061\229\061\000\000\000\000\000\000\000\000\000\000\
\011\004\000\000\000\000\000\000\000\000\000\000\000\000\000\000\
\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\
\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\
\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\
\205\050\000\000\000\000\000\000\000\000\000\000\007\004\000\000\
\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\
\000\000\000\000\000\000\000\000\000\000\000\000\000\000\038\004\
\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\
\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\
\000\000\000\000\000\000\000\000\000\000"

let yygindex = "\000\000\
\000\000\000\000\241\007\000\000\242\007\000\000\049\000\000\000\
\000\000\000\000\000\000\011\000\000\000\000\000\118\253\000\000\
\000\000\000\000\137\002\000\000\000\000\000\000\250\252\173\002\
\053\001\000\000\000\000\158\255\069\004\000\000\000\000\000\000\
\072\254\229\004\230\004\000\000\000\000\000\000\238\003\000\000\
\125\002\000\000\235\002\192\002\149\003\167\254\093\254\185\254\
\072\003\193\255\140\001\000\000\241\254\255\002\110\255\200\000\
\033\254\000\000\239\001\000\000\165\001\000\000\140\005\166\005\
\142\005\000\000\188\253\247\002\042\255\231\003\073\255\212\003\
\208\255\242\004\023\002\165\254\002\002\051\255\000\000\150\002\
\002\255\069\003\023\005\222\003\190\255\018\000\200\002\068\254\
\055\255\229\254\116\005\034\002\108\006\250\001\000\000\254\001\
\045\001\000\000\114\254\000\000\019\000\238\255\000\000\038\010\
\000\000\000\000\183\255\184\010\070\004\106\010\150\255\000\000\
\031\005\000\000\000\000\000\000\000\000\000\000\000\000\223\255\
\100\253\208\003\091\252\000\000\000\000\000\000\044\253\009\255\
\030\005\000\000\000\000\056\255\033\253\000\000\241\004\003\002\
\214\254\240\254\000\000\000\000\000\000\000\000\073\000\253\255\
\226\003\000\000\235\255\041\252\247\255\209\254\000\000\017\004\
\000\000\028\003\026\255\128\004\187\254\000\000\129\254\137\255\
\171\253\046\000\054\004\144\004\000\000\228\003\024\254\239\007\
\000\000\178\255\000\000\000\000\000\000\000\000\000\000\000\000\
\061\002\111\004\000\000\000\000\032\002\055\001\129\002"

let yytablesize = 21824
let yytable = "\122\000\
\045\001\002\001\115\001\238\001\231\000\016\001\107\002\150\000\
\122\000\227\001\083\002\057\002\218\001\064\001\084\002\007\002\
\216\000\035\002\220\000\030\000\030\000\212\002\151\000\208\001\
\213\001\194\001\204\000\222\001\162\002\078\002\033\002\150\000\
\150\000\150\000\058\002\150\000\150\000\150\000\205\000\122\000\
\150\000\192\002\254\000\036\002\007\001\030\000\038\003\030\000\
\008\001\009\001\010\001\136\002\080\001\068\004\255\000\140\002\
\142\002\082\001\080\001\196\000\158\003\046\001\080\001\122\000\
\122\000\122\000\185\002\061\001\056\002\122\000\065\003\066\003\
\067\003\068\003\069\003\044\001\122\000\151\000\151\000\151\000\
\154\003\047\000\003\001\190\000\055\002\150\000\150\000\128\001\
\122\000\131\001\151\000\235\002\066\001\068\001\200\000\082\001\
\110\000\206\000\154\000\073\001\132\001\055\000\207\000\191\003\
\110\001\220\003\135\002\110\004\080\001\078\000\009\000\242\000\
\242\000\155\003\242\000\122\000\122\000\145\000\201\000\199\003\
\220\001\201\000\042\001\047\000\233\001\130\002\047\001\079\002\
\068\000\097\002\072\001\158\001\080\001\048\001\220\001\143\001\
\080\001\145\000\207\000\115\002\158\001\234\001\122\000\055\000\
\207\000\150\000\168\003\079\002\251\000\250\001\207\000\080\002\
\080\000\062\001\063\001\233\001\052\001\201\000\212\001\204\001\
\219\001\080\001\009\000\055\001\208\000\229\003\235\001\111\001\
\169\001\170\001\068\000\080\002\234\001\026\003\219\001\080\001\
\068\000\078\000\209\000\251\000\056\002\186\002\150\000\058\001\
\231\000\080\000\158\001\096\001\137\004\055\001\143\001\059\001\
\048\003\012\003\110\000\122\000\055\002\202\002\232\001\157\002\
\091\003\222\001\092\003\074\001\047\003\081\001\036\002\253\003\
\060\001\168\001\149\000\045\001\080\001\210\000\016\001\158\001\
\158\001\150\000\241\001\242\001\172\003\173\003\174\003\175\003\
\176\003\243\002\242\000\245\002\081\002\039\002\020\004\215\002\
\208\004\041\002\184\000\185\000\186\000\228\003\191\000\192\000\
\193\000\034\002\158\001\198\000\237\002\084\003\145\001\093\000\
\081\002\210\000\089\002\105\003\122\000\158\001\162\002\210\000\
\222\001\143\001\041\002\184\002\008\003\210\000\196\002\090\000\
\122\000\137\004\194\001\203\001\145\000\052\003\037\004\171\001\
\244\002\129\003\039\004\023\002\074\004\075\004\076\004\077\004\
\078\004\069\001\090\000\191\002\099\001\157\001\044\001\082\001\
\049\001\050\001\150\000\241\002\012\001\002\001\157\001\044\001\
\160\002\172\001\084\004\085\002\145\000\040\002\208\002\114\002\
\150\000\122\000\122\000\150\000\078\000\244\002\131\002\047\000\
\222\001\122\000\062\001\063\001\137\002\138\002\095\001\145\000\
\126\003\096\000\122\000\222\001\013\001\096\001\056\002\122\000\
\122\000\122\000\122\000\055\000\207\000\080\004\096\004\083\001\
\043\002\083\004\096\000\097\001\157\001\122\001\092\002\070\001\
\095\001\092\002\085\004\145\000\149\000\155\001\055\001\096\001\
\208\000\088\002\156\002\115\002\129\001\084\001\068\000\078\000\
\015\004\157\002\078\000\096\000\156\002\122\000\209\000\090\000\
\122\000\157\001\002\001\157\002\086\004\068\000\003\001\158\002\
\095\001\078\000\212\002\087\002\109\001\120\004\151\000\007\003\
\035\002\186\000\061\002\062\002\063\002\064\002\065\002\066\002\
\067\002\068\002\069\002\070\002\157\001\097\001\107\001\107\002\
\100\003\050\002\051\002\047\000\155\001\152\004\129\002\157\001\
\145\000\103\004\113\001\059\002\122\000\122\000\122\000\122\000\
\153\004\128\002\163\004\122\000\202\001\098\001\099\001\055\000\
\145\000\045\001\217\002\150\000\150\000\150\000\130\003\151\003\
\153\003\124\002\150\000\093\000\016\001\064\001\196\000\232\001\
\124\004\170\003\145\004\210\000\209\002\156\002\209\002\098\001\
\099\001\038\001\068\000\003\001\190\003\223\002\016\001\053\003\
\054\003\159\002\160\002\038\001\155\001\117\002\151\001\013\000\
\015\000\151\001\158\002\159\002\160\002\194\002\171\003\155\001\
\114\001\122\000\103\003\071\003\231\001\029\000\201\002\098\001\
\099\001\184\002\145\000\038\001\023\000\201\000\082\003\231\001\
\122\000\200\002\117\003\151\001\119\003\050\001\255\003\124\003\
\087\003\000\004\011\000\012\000\044\001\150\000\040\002\085\001\
\255\001\122\000\047\000\028\002\068\000\116\001\031\002\245\000\
\236\002\011\000\053\001\054\001\100\004\116\000\080\001\211\002\
\086\001\023\000\245\000\012\003\002\001\243\001\055\000\145\000\
\112\003\115\002\115\002\115\002\115\002\115\002\136\003\040\002\
\023\000\220\002\055\003\056\003\057\003\058\003\059\003\060\003\
\061\003\062\003\063\003\064\003\159\002\160\002\011\000\244\001\
\060\001\068\000\013\000\015\000\208\000\116\000\150\001\069\001\
\038\001\122\004\101\002\219\000\219\000\011\000\219\000\011\002\
\087\001\233\003\209\000\020\003\242\003\153\003\117\001\243\000\
\023\003\013\001\088\001\089\001\090\001\091\001\092\001\093\001\
\150\001\243\000\113\003\116\000\029\000\219\000\094\001\149\001\
\129\001\243\000\243\000\229\001\243\000\230\001\014\000\016\000\
\160\001\161\001\162\001\163\001\201\000\003\001\116\000\195\001\
\196\001\138\000\086\001\140\001\243\000\098\003\087\001\079\003\
\229\001\149\001\178\002\214\003\149\001\204\001\212\001\139\000\
\140\000\087\001\111\003\243\000\092\001\093\001\132\002\133\002\
\134\002\116\000\122\000\122\000\141\003\139\002\225\003\092\001\
\093\001\108\004\148\002\230\003\144\001\157\001\164\001\094\001\
\045\003\046\003\165\001\243\000\198\003\068\000\166\001\022\003\
\153\002\154\002\087\001\167\001\116\000\197\001\003\001\003\001\
\155\002\003\001\219\001\078\000\088\001\089\001\090\001\091\001\
\092\001\093\001\040\004\122\000\201\001\122\000\173\001\201\000\
\094\001\003\001\252\003\221\001\223\001\231\000\201\000\148\002\
\145\000\077\003\228\001\078\003\237\001\236\000\236\000\072\003\
\236\000\243\000\243\000\243\000\245\001\153\002\154\002\252\001\
\050\001\182\003\122\000\122\000\243\000\243\000\160\001\251\001\
\236\000\253\001\243\000\087\001\219\000\219\000\254\001\160\001\
\089\003\090\003\186\001\219\000\122\000\000\002\219\000\219\000\
\091\001\092\001\093\001\198\001\200\001\122\000\188\004\122\000\
\160\001\094\001\209\001\243\000\122\000\001\002\237\000\237\000\
\035\004\237\000\002\002\150\000\003\002\220\002\160\001\161\001\
\162\001\163\001\122\000\005\002\009\002\122\000\011\002\014\002\
\021\002\237\000\122\000\018\002\025\002\160\001\219\000\219\000\
\131\003\206\004\116\000\220\002\026\002\042\004\154\001\044\002\
\137\003\027\004\028\004\029\004\030\004\031\004\116\000\154\001\
\095\002\102\002\143\003\104\002\233\000\233\000\210\004\233\000\
\108\002\150\000\160\001\160\001\160\001\160\001\109\002\143\002\
\154\001\235\000\235\000\209\002\235\000\126\002\012\004\233\000\
\150\000\125\002\144\002\122\000\122\000\122\000\127\002\138\003\
\145\002\138\003\164\002\166\002\235\000\160\001\022\004\116\000\
\116\000\186\003\187\003\188\003\170\002\154\001\171\002\116\000\
\160\001\172\002\173\002\133\003\134\003\163\003\174\002\201\000\
\175\002\201\000\182\003\176\002\179\002\116\000\116\000\116\000\
\116\000\177\002\234\000\234\000\122\000\234\000\147\002\177\003\
\180\002\122\000\154\001\154\001\154\001\201\000\181\002\182\002\
\068\001\187\002\197\002\198\002\122\000\234\000\199\002\206\003\
\187\001\001\000\002\000\003\000\004\000\122\000\203\002\201\000\
\187\001\187\001\007\004\207\002\150\000\154\001\160\001\161\001\
\162\001\163\001\219\000\214\002\232\002\219\000\216\002\234\002\
\154\001\225\002\187\001\204\001\239\002\240\002\148\002\246\002\
\242\002\122\000\251\002\122\000\253\002\122\000\249\002\122\000\
\149\002\150\002\151\002\152\002\153\002\154\002\252\002\238\003\
\255\002\239\003\145\003\240\003\155\002\241\003\254\002\187\001\
\066\000\001\003\116\000\116\000\116\000\116\000\231\000\231\000\
\231\000\231\000\231\000\160\001\161\001\162\001\163\001\003\003\
\004\003\150\000\187\001\005\003\122\000\150\000\031\002\002\001\
\075\001\076\001\077\001\078\001\187\001\187\001\187\001\187\001\
\187\001\187\001\011\004\094\004\219\000\150\000\122\000\122\000\
\187\001\168\002\040\002\006\003\099\001\086\001\009\003\147\003\
\248\003\072\003\072\003\093\000\023\004\024\004\017\003\187\001\
\021\003\024\003\122\000\160\001\161\001\162\001\163\001\116\000\
\027\003\147\002\187\001\033\003\028\002\066\001\122\000\029\003\
\201\000\122\000\030\003\041\003\069\004\146\002\116\000\201\000\
\039\003\034\003\201\000\179\003\040\003\136\004\002\001\043\004\
\035\003\144\004\036\003\037\003\042\003\087\001\147\002\043\003\
\049\003\217\000\044\003\217\000\238\000\238\000\122\000\161\001\
\003\001\090\001\091\001\092\001\093\001\045\001\051\003\168\004\
\161\001\148\002\050\003\094\001\085\003\122\000\122\000\238\000\
\016\001\229\001\099\003\101\003\122\000\151\002\152\002\153\002\
\154\002\161\001\032\000\072\004\073\004\093\003\185\004\155\002\
\104\003\120\003\122\000\106\003\122\003\156\003\148\002\127\003\
\160\003\150\003\059\004\060\004\061\004\062\004\063\004\157\003\
\149\002\150\002\151\002\152\002\153\002\154\002\161\001\028\002\
\162\003\164\003\067\001\122\000\155\002\167\003\148\004\003\001\
\184\003\189\003\201\000\201\000\201\000\201\000\201\000\086\001\
\185\003\113\004\160\002\192\003\148\002\193\003\194\003\195\003\
\044\001\150\000\196\003\161\001\161\001\161\001\161\001\219\000\
\197\003\152\002\153\002\154\002\219\000\201\003\202\003\161\001\
\203\003\122\000\155\002\122\000\122\000\207\003\122\000\122\000\
\122\000\122\000\122\000\208\003\209\003\210\003\161\001\154\004\
\211\003\156\004\157\004\215\003\179\003\216\003\147\002\087\001\
\149\000\161\001\201\000\217\003\121\004\218\003\125\004\222\003\
\223\003\219\003\089\001\090\001\091\001\092\001\093\001\227\003\
\149\000\231\003\232\003\139\004\234\003\094\001\236\003\122\000\
\235\003\217\000\217\000\237\003\201\000\001\004\201\000\183\001\
\217\000\254\003\002\004\217\000\217\000\191\004\003\004\004\004\
\005\004\199\001\006\004\201\000\008\004\013\004\148\002\183\001\
\238\000\238\000\238\000\010\004\068\000\014\004\025\004\016\004\
\238\000\150\002\151\002\152\002\153\002\154\002\226\001\238\000\
\238\000\026\004\021\004\068\000\155\002\032\004\238\000\156\001\
\034\004\036\004\068\000\217\000\217\000\038\004\068\000\122\000\
\156\001\246\003\055\004\056\004\057\004\058\004\064\004\070\004\
\068\000\067\004\116\000\068\000\071\004\213\004\082\004\201\000\
\081\004\156\001\095\004\116\000\068\000\116\000\099\004\098\004\
\238\000\111\004\116\000\109\004\119\004\114\004\112\004\115\004\
\068\000\116\004\117\004\150\004\118\004\147\004\209\004\149\004\
\022\002\068\000\127\004\116\000\068\000\131\004\156\001\151\004\
\155\004\068\000\201\000\169\004\142\003\144\003\146\003\148\003\
\170\004\171\004\172\004\173\004\174\004\186\004\201\000\068\000\
\068\000\068\000\068\000\068\000\068\000\068\000\068\000\201\000\
\175\004\189\004\192\004\156\001\156\001\156\001\156\001\176\004\
\198\004\177\004\178\004\179\004\050\001\190\004\180\004\156\001\
\006\000\142\003\144\003\146\003\148\003\068\000\193\004\194\004\
\240\001\195\004\068\000\196\004\197\004\207\004\156\001\201\004\
\202\004\203\004\068\000\204\004\205\004\007\000\211\004\212\004\
\039\000\156\001\058\000\053\000\031\000\055\002\068\000\217\000\
\055\002\045\000\217\000\098\000\043\002\070\000\053\000\218\000\
\068\002\218\000\239\000\239\000\043\000\041\000\076\000\074\000\
\116\000\064\002\116\000\008\000\009\000\061\002\219\000\060\000\
\076\000\053\000\053\000\100\000\010\000\239\000\041\001\114\000\
\076\000\076\000\116\000\076\000\011\000\012\000\052\002\072\000\
\062\002\062\000\007\000\116\000\013\000\169\001\108\000\106\000\
\053\002\023\000\058\000\076\000\102\000\014\000\169\001\015\000\
\016\000\035\000\017\000\066\002\058\002\112\000\102\000\141\002\
\059\002\140\003\076\000\114\003\049\002\126\004\102\000\169\001\
\147\001\217\000\123\002\148\001\071\002\072\002\073\002\074\002\
\075\002\018\000\169\003\083\003\014\003\224\002\053\004\115\003\
\070\003\102\000\076\000\009\004\019\000\020\000\155\000\034\000\
\165\002\238\000\183\000\159\001\169\001\165\003\086\003\243\003\
\102\000\032\003\119\001\238\000\238\000\238\000\128\003\238\000\
\021\000\011\001\018\004\193\002\238\000\017\004\245\003\169\001\
\094\002\190\002\187\004\137\001\022\000\023\000\238\000\024\000\
\102\000\169\001\169\001\169\001\169\001\169\001\169\001\207\001\
\076\000\076\000\076\000\060\002\042\002\169\001\244\003\238\000\
\238\000\169\002\228\002\076\000\076\000\088\003\086\002\210\002\
\116\000\076\000\019\004\212\003\169\001\200\004\000\000\218\000\
\218\000\238\000\000\000\000\000\116\000\184\001\218\000\169\001\
\000\000\218\000\218\000\000\000\000\000\000\000\102\000\102\000\
\102\000\000\000\076\000\238\002\000\000\184\001\239\000\239\000\
\239\000\102\000\102\000\000\000\000\000\000\000\239\000\102\000\
\000\000\000\000\000\000\000\000\116\000\239\000\239\000\219\000\
\219\000\000\000\000\000\159\001\239\000\000\000\000\000\000\000\
\000\000\218\000\218\000\000\000\159\001\000\000\000\000\000\000\
\102\000\000\000\116\000\000\000\000\000\000\000\000\000\000\000\
\000\000\000\000\000\000\000\000\217\000\159\001\000\000\224\000\
\116\000\217\000\000\000\000\000\000\000\000\000\239\000\000\000\
\000\000\224\000\000\000\000\000\000\000\000\000\000\000\000\000\
\240\001\224\000\000\000\000\000\224\000\219\000\000\000\000\000\
\000\000\000\000\159\001\000\000\000\000\000\000\000\000\000\000\
\000\000\000\000\000\000\000\000\224\000\000\000\000\000\000\000\
\000\000\000\000\000\000\000\000\000\000\000\000\000\000\183\001\
\183\001\000\000\000\000\224\000\000\000\000\000\000\000\159\001\
\159\001\159\001\219\000\000\000\000\000\000\000\000\000\000\000\
\000\000\000\000\000\000\183\001\116\000\116\000\116\000\116\000\
\116\000\238\000\000\000\224\000\000\000\000\000\183\001\000\000\
\238\000\238\000\159\001\000\000\000\000\000\000\000\000\238\000\
\183\001\000\000\000\000\000\000\000\000\159\001\000\000\000\000\
\000\000\000\000\000\000\000\000\000\000\218\000\000\000\000\000\
\218\000\188\001\000\000\000\000\000\000\215\000\000\000\215\000\
\234\000\234\000\000\000\000\000\000\000\045\000\188\001\000\000\
\000\000\224\000\224\000\224\000\238\000\238\000\188\001\188\001\
\000\000\000\000\000\000\234\000\224\000\224\000\123\003\000\000\
\000\000\000\000\224\000\000\000\000\000\000\000\051\000\000\000\
\188\001\000\000\219\000\000\000\145\000\000\000\000\000\000\000\
\000\000\000\000\000\000\000\000\238\000\000\000\059\000\060\000\
\061\000\062\000\063\000\224\000\000\000\000\000\000\000\000\000\
\000\000\000\000\000\000\000\000\000\000\188\001\000\000\218\000\
\228\002\000\000\000\000\000\000\000\000\000\000\154\001\074\000\
\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\
\188\001\000\000\000\000\000\000\000\000\000\000\000\000\239\000\
\000\000\000\000\188\001\188\001\188\001\188\001\188\001\188\001\
\188\001\239\000\239\000\239\000\000\000\239\000\188\001\000\000\
\000\000\000\000\239\000\000\000\000\000\000\000\082\000\000\000\
\000\000\000\000\083\000\000\000\239\000\188\001\084\000\240\001\
\000\000\240\001\000\000\000\000\240\001\086\000\087\000\000\000\
\188\001\088\000\000\000\000\000\183\001\239\000\239\000\000\000\
\231\002\091\000\000\000\217\000\000\000\000\000\000\000\094\000\
\095\000\096\000\097\000\000\000\000\000\215\000\215\000\239\000\
\000\000\000\000\000\000\000\000\149\003\000\000\000\000\215\000\
\215\000\000\000\006\000\000\000\000\000\224\003\240\001\000\000\
\000\000\228\002\000\000\000\000\234\000\234\000\234\000\000\000\
\000\000\000\000\000\000\000\000\234\000\000\000\000\000\007\000\
\000\000\000\000\000\000\234\000\234\000\000\000\000\000\000\000\
\000\000\000\000\234\000\000\000\000\000\000\000\000\000\215\000\
\215\000\000\000\000\000\000\000\000\000\000\000\000\000\251\003\
\000\000\000\000\000\000\228\002\000\000\008\000\009\000\000\000\
\000\000\000\000\218\000\000\000\000\000\000\000\010\000\218\000\
\000\000\000\000\000\000\000\000\234\000\000\000\011\000\012\000\
\000\000\000\000\000\000\000\000\000\000\000\000\013\000\000\000\
\000\000\000\000\000\000\000\000\000\000\000\000\000\000\014\000\
\000\000\015\000\016\000\000\000\017\000\000\000\000\000\000\000\
\000\000\000\000\000\000\000\000\000\000\000\000\238\000\238\000\
\238\000\238\000\238\000\226\003\000\000\184\001\184\001\240\001\
\240\001\000\000\228\002\018\000\000\000\000\000\228\002\000\000\
\000\000\000\000\000\000\238\000\000\000\000\000\019\000\020\000\
\000\000\184\001\000\000\000\000\000\000\045\004\049\004\239\000\
\000\000\000\000\000\000\000\000\184\001\000\000\239\000\239\000\
\000\000\000\000\021\000\000\000\000\000\239\000\184\001\240\001\
\000\000\240\001\000\000\000\000\217\000\217\000\022\000\023\000\
\000\000\024\000\000\000\215\000\000\000\000\000\215\000\000\000\
\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\
\000\000\228\002\000\000\099\000\000\000\228\002\000\000\000\000\
\000\000\000\000\239\000\239\000\000\000\000\000\000\000\000\000\
\118\003\000\000\000\000\102\004\000\000\049\004\000\000\000\000\
\186\001\000\000\000\000\000\000\000\000\000\000\000\000\000\000\
\000\000\186\001\217\000\000\000\000\000\226\003\000\000\000\000\
\240\001\000\000\239\000\197\000\240\001\000\000\000\000\000\000\
\000\000\228\002\186\001\000\000\049\004\000\000\229\000\229\000\
\000\000\000\000\238\000\000\000\000\000\000\000\231\002\000\000\
\000\000\049\004\000\000\000\000\000\000\000\000\000\000\217\000\
\000\000\229\000\000\000\000\000\000\000\000\000\000\000\186\001\
\000\000\000\000\000\000\000\000\000\000\000\000\228\002\000\000\
\000\000\000\000\000\000\000\000\051\001\000\000\000\000\234\000\
\234\000\234\000\186\001\234\000\000\000\000\000\000\000\240\001\
\234\000\000\000\000\000\240\001\186\001\186\001\186\001\186\001\
\186\001\186\001\234\000\000\000\185\001\185\001\185\001\071\001\
\186\001\185\001\000\000\000\000\000\000\000\000\000\000\000\000\
\000\000\105\001\184\001\234\000\234\000\185\001\000\000\186\001\
\000\000\218\000\000\000\000\000\000\000\000\000\049\004\000\000\
\000\000\000\000\186\001\000\000\000\000\234\000\000\000\240\001\
\000\000\000\000\000\000\000\000\000\000\118\001\000\000\217\000\
\000\000\000\000\000\000\000\000\000\000\000\000\000\000\231\002\
\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\
\000\000\000\000\118\001\155\001\000\000\185\001\000\000\000\000\
\000\000\000\000\000\000\000\000\240\001\000\000\000\000\185\001\
\185\001\185\001\185\001\185\001\185\001\000\000\000\000\000\000\
\000\000\000\000\000\000\185\001\000\000\000\000\192\001\000\000\
\000\000\231\002\000\000\000\000\016\003\000\000\000\000\000\000\
\215\000\000\000\185\001\000\000\000\000\215\000\192\001\000\000\
\000\000\000\000\229\000\229\000\229\000\224\001\000\000\000\000\
\000\000\000\000\229\000\000\000\000\000\182\000\000\000\224\001\
\225\001\229\000\229\000\000\000\000\000\000\000\000\000\000\000\
\229\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\
\000\000\000\000\007\000\000\000\239\000\239\000\239\000\239\000\
\239\000\000\000\000\000\000\000\004\002\000\000\000\000\000\000\
\231\002\000\000\000\000\000\000\231\002\000\000\000\000\000\000\
\000\000\239\000\229\000\000\000\000\000\000\000\000\000\000\000\
\008\000\009\000\000\000\000\000\052\004\234\000\000\000\000\000\
\000\000\010\000\000\000\000\000\234\000\234\000\000\000\000\000\
\000\000\011\000\012\000\000\000\000\000\029\002\030\002\000\000\
\000\000\013\000\218\000\218\000\000\000\037\002\000\000\000\000\
\038\002\000\000\014\000\192\001\015\000\016\000\000\000\017\000\
\000\000\000\000\000\000\052\002\053\002\054\002\055\002\231\002\
\000\000\000\000\000\000\231\002\000\000\000\000\000\000\000\000\
\234\000\234\000\000\000\000\000\000\000\000\000\018\000\000\000\
\110\002\000\000\077\002\052\004\000\000\000\000\105\001\082\002\
\000\000\019\000\020\000\000\000\000\000\000\000\000\000\000\000\
\218\000\000\000\000\000\018\001\019\001\000\000\020\001\000\000\
\234\000\021\001\000\000\000\000\000\000\021\000\000\000\231\002\
\000\000\000\000\052\004\000\000\000\000\022\001\000\000\000\000\
\239\000\022\000\023\000\000\000\024\000\000\000\000\000\052\004\
\000\000\023\001\000\000\024\001\000\000\218\000\118\001\000\000\
\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\
\119\002\120\002\121\002\122\002\231\002\000\000\000\000\000\000\
\000\000\000\000\000\000\000\000\192\001\000\000\111\002\000\000\
\000\000\192\001\192\001\192\001\000\000\192\001\192\001\000\000\
\000\000\000\000\000\000\000\000\000\000\000\000\105\001\026\001\
\027\001\028\001\029\001\030\001\031\001\032\001\033\001\112\002\
\035\001\036\001\000\000\037\001\000\000\000\000\000\000\000\000\
\000\000\000\000\000\000\000\000\000\000\000\000\000\000\215\000\
\000\000\000\000\000\000\229\000\052\004\188\002\000\000\000\000\
\000\000\000\000\000\000\189\002\000\000\229\000\229\000\229\000\
\000\000\229\000\156\000\000\000\195\002\218\000\229\000\000\000\
\000\000\000\000\113\002\000\000\000\000\000\000\000\000\000\000\
\229\000\157\000\000\000\000\000\000\000\000\000\000\000\000\000\
\158\000\000\000\000\000\000\000\159\000\000\000\000\000\000\000\
\000\000\222\002\229\000\000\000\000\000\000\000\160\000\000\000\
\000\000\161\000\000\000\000\000\000\000\000\000\000\000\000\000\
\016\003\000\000\162\000\229\000\000\000\000\000\000\000\000\000\
\000\000\000\000\000\000\000\000\000\000\000\000\163\000\000\000\
\000\000\000\000\000\000\000\000\000\000\250\002\000\000\164\000\
\000\000\000\000\165\000\000\000\000\000\000\000\000\000\166\000\
\000\000\000\000\000\000\144\000\000\000\000\000\000\000\000\000\
\000\000\000\000\000\000\000\000\000\000\167\000\168\000\169\000\
\170\000\171\000\172\000\173\000\174\000\000\000\000\000\000\000\
\000\000\010\003\234\000\234\000\234\000\234\000\234\000\000\000\
\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\
\000\000\000\000\000\000\175\000\000\000\000\000\000\000\234\000\
\176\000\000\000\235\000\235\000\000\000\000\000\118\001\000\000\
\177\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\
\000\000\000\000\000\000\000\000\178\000\235\000\000\000\000\000\
\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\
\215\000\215\000\000\000\192\001\000\000\000\000\192\001\192\001\
\192\001\192\001\192\001\192\001\192\001\192\001\192\001\192\001\
\192\001\192\001\192\001\192\001\192\001\192\001\192\001\077\002\
\105\001\000\000\192\001\000\000\000\000\000\000\000\000\000\000\
\000\000\000\000\000\000\229\000\000\000\192\001\237\000\237\000\
\000\000\000\000\229\000\229\000\000\000\000\000\000\000\192\001\
\000\000\229\000\000\000\000\000\000\000\000\000\215\000\000\000\
\000\000\237\000\000\000\000\000\000\000\000\000\000\000\000\000\
\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\
\000\000\121\001\123\001\125\001\235\000\235\000\234\000\000\000\
\102\003\133\001\135\001\235\000\000\000\235\000\229\000\229\000\
\000\000\116\003\000\000\215\000\000\000\235\000\121\001\000\000\
\125\003\000\000\000\000\000\000\000\000\000\000\000\000\000\000\
\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\
\000\000\132\003\000\000\000\000\236\000\247\000\229\000\000\000\
\000\000\000\000\000\000\221\000\000\000\000\000\000\000\105\001\
\000\000\000\000\000\000\045\000\000\000\082\002\000\000\247\000\
\000\000\000\000\000\000\000\000\205\001\047\000\235\000\235\000\
\235\000\000\000\000\000\000\000\000\000\000\000\235\000\118\001\
\000\000\000\000\000\000\000\000\051\000\235\000\235\000\000\000\
\000\000\222\000\145\000\156\001\235\000\000\000\000\000\000\000\
\000\000\000\000\000\000\000\000\059\000\060\000\061\000\062\000\
\063\000\000\000\000\000\215\000\000\000\223\000\224\000\000\000\
\000\000\225\000\000\000\000\000\068\000\000\000\226\000\000\000\
\000\000\000\000\000\000\000\000\227\000\074\000\235\000\000\000\
\000\000\000\000\078\000\192\001\000\000\000\000\000\000\000\000\
\200\003\000\000\237\000\237\000\237\000\000\000\000\000\000\000\
\000\000\000\000\237\000\000\000\000\000\000\000\126\001\127\001\
\213\003\237\000\237\000\134\001\000\000\139\001\000\000\142\001\
\237\000\221\003\000\000\000\000\082\000\000\000\000\000\150\001\
\083\000\000\000\000\000\000\000\084\000\000\000\000\000\000\000\
\000\000\000\000\000\000\086\000\087\000\000\000\000\000\088\000\
\000\000\000\000\000\000\000\000\000\000\000\000\000\000\091\000\
\000\000\093\000\237\000\141\001\105\001\094\000\095\000\096\000\
\097\000\228\000\082\002\010\003\000\000\000\000\000\000\000\000\
\205\001\000\000\000\000\000\000\000\000\000\000\000\000\000\000\
\247\000\247\000\247\000\000\000\000\000\000\000\000\000\093\002\
\236\000\000\000\000\000\000\000\000\000\000\000\098\002\247\000\
\247\000\000\000\000\000\000\000\192\001\000\000\236\000\000\000\
\000\000\235\000\000\000\000\000\000\000\000\000\000\000\000\000\
\000\000\000\000\121\001\000\000\000\000\000\000\000\000\000\000\
\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\
\229\000\229\000\229\000\229\000\229\000\000\000\033\004\000\000\
\247\000\000\000\000\000\000\000\190\001\000\000\000\000\000\000\
\000\000\000\000\041\004\190\001\000\000\229\000\000\000\000\000\
\000\000\190\001\024\002\000\000\000\000\000\000\000\000\000\000\
\047\004\190\001\190\001\000\000\000\000\000\000\000\000\000\000\
\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\
\000\000\000\000\065\004\190\001\000\000\000\000\000\000\000\000\
\000\000\000\000\000\000\000\000\235\000\000\000\000\000\000\000\
\000\000\235\000\235\000\235\000\000\000\235\000\000\000\000\000\
\079\004\000\000\235\000\000\000\000\000\000\000\000\000\000\000\
\190\001\000\000\000\000\000\000\235\000\000\000\000\000\000\000\
\000\000\205\001\097\004\205\001\000\000\101\004\000\000\047\004\
\000\000\104\004\000\000\190\001\000\000\235\000\235\000\000\000\
\000\000\000\000\000\000\000\000\000\000\190\001\190\001\190\001\
\190\001\190\001\190\001\190\001\000\000\000\000\000\000\235\000\
\000\000\190\001\000\000\103\002\000\000\000\000\123\004\000\000\
\000\000\000\000\000\000\000\000\229\000\237\000\237\000\237\000\
\190\001\237\000\000\000\123\004\000\000\000\000\237\000\146\004\
\000\000\000\000\000\000\190\001\000\000\000\000\000\000\000\000\
\237\000\000\000\158\004\159\004\160\004\161\004\162\004\000\000\
\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\
\000\000\237\000\237\000\000\000\000\000\121\001\000\000\000\000\
\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\
\000\000\000\000\000\000\237\000\000\000\141\001\141\001\104\004\
\000\000\141\001\235\000\000\000\000\000\235\000\000\000\000\000\
\000\000\235\000\121\001\000\000\141\001\000\000\000\000\141\001\
\000\000\000\000\000\000\247\000\247\000\247\000\141\001\236\000\
\123\004\000\000\000\000\000\000\247\000\000\000\141\001\000\000\
\000\000\000\000\000\000\141\001\000\000\141\001\247\000\000\000\
\000\000\141\001\000\000\000\000\000\000\000\000\000\000\000\000\
\000\000\000\000\141\001\141\001\141\001\141\001\141\001\247\000\
\247\000\000\000\000\000\000\000\000\000\141\001\000\000\141\001\
\000\000\000\000\000\000\000\000\000\000\141\001\141\001\235\000\
\205\001\247\000\205\001\141\001\000\000\141\001\235\000\235\000\
\000\000\000\000\000\000\000\000\141\001\000\000\141\001\141\001\
\141\001\141\001\000\000\141\001\141\001\000\000\000\000\000\000\
\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\
\000\000\000\000\141\001\141\001\141\001\141\001\141\001\141\001\
\141\001\141\001\141\001\000\000\000\000\000\000\000\000\000\000\
\000\000\000\000\235\000\235\000\000\000\141\001\141\001\000\000\
\000\000\000\000\000\000\189\001\000\000\000\000\000\000\000\000\
\141\001\000\000\189\001\237\000\000\000\141\001\000\000\000\000\
\189\001\141\001\237\000\237\000\025\003\141\001\000\000\028\003\
\189\001\189\001\235\000\031\003\000\000\141\001\141\001\000\000\
\141\001\141\001\000\000\141\001\000\000\000\000\000\000\000\000\
\000\000\000\000\189\001\000\000\000\000\000\000\000\000\000\000\
\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\
\235\000\000\000\000\000\121\001\000\000\000\000\237\000\237\000\
\000\000\000\000\000\000\000\000\000\000\000\000\000\000\189\001\
\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\
\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\
\000\000\247\000\189\001\000\000\000\000\000\000\237\000\000\000\
\247\000\247\000\000\000\000\000\189\001\189\001\189\001\189\001\
\189\001\189\001\189\001\000\000\000\000\000\000\000\000\000\000\
\189\001\000\000\000\000\000\000\000\000\000\000\000\000\000\000\
\000\000\000\000\000\000\000\000\000\000\000\000\000\000\189\001\
\205\001\198\000\000\000\000\000\000\000\000\000\000\000\000\000\
\000\000\000\000\189\001\000\000\247\000\236\000\000\000\198\000\
\000\000\027\000\000\000\000\000\000\000\000\000\198\000\000\000\
\000\000\000\000\000\000\000\000\000\000\000\000\198\000\000\000\
\000\000\000\000\000\000\000\000\000\000\198\000\000\000\000\000\
\000\000\198\000\000\000\000\000\247\000\000\000\000\000\000\000\
\000\000\000\000\000\000\198\000\198\000\198\000\198\000\121\001\
\000\000\000\000\000\000\000\000\000\000\198\000\000\000\198\000\
\000\000\000\000\000\000\000\000\000\000\198\000\198\000\000\000\
\000\000\000\000\166\003\198\000\000\000\198\000\000\000\205\001\
\000\000\000\000\000\000\000\000\198\000\000\000\198\000\198\000\
\198\000\198\000\000\000\198\000\198\000\000\000\000\000\000\000\
\205\001\000\000\000\000\000\000\000\000\000\000\000\000\000\000\
\000\000\000\000\198\000\198\000\198\000\198\000\198\000\198\000\
\198\000\198\000\198\000\000\000\235\000\235\000\235\000\235\000\
\235\000\000\000\000\000\000\000\000\000\198\000\198\000\000\000\
\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\
\198\000\235\000\000\000\000\000\000\000\198\000\000\000\000\000\
\000\000\198\000\000\000\044\004\000\000\198\000\000\000\000\000\
\000\000\000\000\000\000\000\000\000\000\198\000\198\000\000\000\
\198\000\198\000\000\000\000\000\000\000\000\000\000\000\000\000\
\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\
\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\
\237\000\237\000\237\000\237\000\237\000\036\001\000\000\000\000\
\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\
\000\000\000\000\089\004\000\000\000\000\237\000\000\000\000\000\
\000\000\000\000\000\000\000\000\000\000\000\000\235\000\000\000\
\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\
\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\
\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\
\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\
\235\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\
\000\000\000\000\000\000\000\000\000\000\000\000\236\000\236\000\
\236\000\236\000\236\000\000\000\000\000\000\000\000\000\000\000\
\000\000\000\000\027\000\027\000\027\000\027\000\000\000\027\000\
\000\000\027\000\000\000\247\000\027\000\027\000\027\000\027\000\
\027\000\027\000\027\000\000\000\027\000\027\000\027\000\027\000\
\027\000\027\000\027\000\000\000\027\000\027\000\027\000\027\000\
\027\000\027\000\027\000\027\000\027\000\000\000\027\000\027\000\
\000\000\027\000\027\000\027\000\237\000\027\000\000\000\027\000\
\000\000\000\000\027\000\027\000\000\000\027\000\000\000\027\000\
\027\000\027\000\027\000\027\000\027\000\027\000\027\000\027\000\
\027\000\027\000\000\000\027\000\000\000\027\000\027\000\027\000\
\000\000\000\000\027\000\027\000\027\000\027\000\027\000\027\000\
\027\000\027\000\000\000\027\000\000\000\027\000\027\000\051\000\
\107\004\027\000\027\000\027\000\027\000\027\000\027\000\027\000\
\000\000\027\000\027\000\027\000\027\000\027\000\027\000\027\000\
\027\000\027\000\027\000\027\000\000\000\000\000\027\000\027\000\
\027\000\027\000\027\000\027\000\027\000\027\000\027\000\027\000\
\027\000\000\000\247\000\000\000\000\000\027\000\027\000\027\000\
\027\000\027\000\000\000\027\000\027\000\027\000\000\000\027\000\
\027\000\027\000\027\000\027\000\027\000\027\000\027\000\027\000\
\027\000\027\000\027\000\027\000\000\000\027\000\000\000\027\000\
\027\000\000\000\027\000\027\000\027\000\000\000\027\000\027\000\
\027\000\027\000\027\000\027\000\027\000\027\000\027\000\027\000\
\027\000\027\000\000\000\000\000\000\000\000\000\036\001\036\001\
\036\001\036\001\000\000\036\001\000\000\036\001\000\000\000\000\
\036\001\036\001\036\001\036\001\036\001\036\001\036\001\000\000\
\036\001\036\001\036\001\036\001\036\001\036\001\036\001\000\000\
\036\001\036\001\036\001\000\000\000\000\000\000\000\000\036\001\
\036\001\000\000\036\001\036\001\000\000\036\001\036\001\036\001\
\000\000\036\001\000\000\036\001\000\000\000\000\036\001\036\001\
\000\000\036\001\000\000\036\001\036\001\036\001\036\001\036\001\
\036\001\036\001\036\001\036\001\036\001\036\001\000\000\036\001\
\000\000\036\001\036\001\036\001\000\000\000\000\036\001\036\001\
\036\001\036\001\036\001\036\001\036\001\036\001\000\000\036\001\
\052\001\036\001\036\001\000\000\000\000\036\001\036\001\036\001\
\036\001\036\001\036\001\036\001\000\000\036\001\036\001\036\001\
\036\001\036\001\036\001\036\001\036\001\036\001\036\001\036\001\
\000\000\000\000\036\001\036\001\036\001\036\001\036\001\036\001\
\036\001\036\001\036\001\036\001\036\001\036\001\036\001\000\000\
\000\000\036\001\036\001\036\001\036\001\036\001\000\000\036\001\
\036\001\036\001\000\000\036\001\036\001\036\001\036\001\036\001\
\036\001\036\001\036\001\036\001\036\001\036\001\036\001\036\001\
\000\000\036\001\000\000\036\001\036\001\000\000\036\001\036\001\
\036\001\036\001\036\001\036\001\036\001\036\001\036\001\036\001\
\036\001\036\001\036\001\036\001\036\001\036\001\000\000\000\000\
\051\000\051\000\051\000\051\000\000\000\051\000\000\000\051\000\
\000\000\000\000\051\000\051\000\051\000\051\000\051\000\051\000\
\051\000\000\000\051\000\051\000\051\000\051\000\051\000\051\000\
\051\000\000\000\051\000\051\000\051\000\000\000\051\000\051\000\
\051\000\051\000\051\000\000\000\051\000\051\000\000\000\051\000\
\051\000\051\000\000\000\051\000\000\000\051\000\000\000\000\000\
\051\000\051\000\000\000\051\000\000\000\051\000\051\000\051\000\
\051\000\051\000\051\000\051\000\051\000\051\000\051\000\051\000\
\000\000\051\000\000\000\051\000\051\000\051\000\000\000\000\000\
\051\000\051\000\051\000\051\000\051\000\051\000\051\000\051\000\
\242\001\051\000\000\000\051\000\051\000\000\000\000\000\051\000\
\051\000\051\000\051\000\051\000\051\000\051\000\000\000\051\000\
\051\000\051\000\051\000\051\000\051\000\051\000\051\000\051\000\
\051\000\051\000\000\000\000\000\051\000\051\000\051\000\051\000\
\051\000\051\000\051\000\051\000\051\000\051\000\051\000\000\000\
\000\000\000\000\000\000\051\000\051\000\051\000\051\000\051\000\
\000\000\051\000\051\000\051\000\000\000\051\000\051\000\051\000\
\051\000\051\000\051\000\051\000\051\000\051\000\051\000\051\000\
\051\000\051\000\000\000\051\000\000\000\051\000\051\000\000\000\
\051\000\051\000\051\000\000\000\051\000\051\000\051\000\051\000\
\051\000\051\000\051\000\051\000\051\000\051\000\051\000\051\000\
\000\000\052\001\052\001\052\001\052\001\000\000\052\001\000\000\
\052\001\000\000\000\000\000\000\052\001\052\001\052\001\052\001\
\052\001\052\001\000\000\052\001\052\001\050\001\052\001\052\001\
\052\001\052\001\000\000\052\001\052\001\000\000\052\001\052\001\
\052\001\052\001\052\001\052\001\000\000\052\001\052\001\000\000\
\052\001\052\001\052\001\000\000\052\001\000\000\052\001\000\000\
\000\000\052\001\052\001\000\000\052\001\000\000\052\001\052\001\
\052\001\052\001\052\001\052\001\052\001\052\001\052\001\052\001\
\052\001\000\000\052\001\000\000\052\001\052\001\000\000\000\000\
\000\000\052\001\052\001\052\001\000\000\052\001\052\001\052\001\
\052\001\244\001\052\001\050\001\052\001\052\001\000\000\000\000\
\052\001\052\001\052\001\052\001\052\001\052\001\052\001\000\000\
\052\001\052\001\052\001\052\001\052\001\052\001\052\001\052\001\
\052\001\052\001\052\001\000\000\000\000\052\001\052\001\052\001\
\052\001\052\001\052\001\052\001\052\001\052\001\052\001\052\001\
\000\000\000\000\000\000\000\000\052\001\052\001\052\001\052\001\
\052\001\000\000\052\001\052\001\052\001\000\000\052\001\052\001\
\052\001\052\001\052\001\052\001\052\001\052\001\052\001\052\001\
\052\001\052\001\052\001\000\000\052\001\000\000\052\001\052\001\
\000\000\052\001\052\001\000\000\052\001\052\001\052\001\052\001\
\052\001\052\001\052\001\052\001\052\001\052\001\052\001\000\000\
\052\001\242\001\242\001\242\001\242\001\000\000\242\001\000\000\
\242\001\000\000\000\000\242\001\242\001\242\001\242\001\242\001\
\242\001\242\001\000\000\242\001\242\001\242\001\242\001\242\001\
\242\001\242\001\000\000\242\001\242\001\242\001\000\000\000\000\
\000\000\000\000\000\000\242\001\000\000\242\001\242\001\000\000\
\242\001\242\001\242\001\000\000\242\001\000\000\242\001\000\000\
\000\000\242\001\242\001\000\000\242\001\000\000\242\001\242\001\
\242\001\242\001\242\001\242\001\242\001\242\001\242\001\242\001\
\242\001\000\000\242\001\000\000\242\001\242\001\242\001\000\000\
\000\000\242\001\242\001\242\001\242\001\242\001\242\001\242\001\
\242\001\244\001\242\001\000\000\242\001\242\001\000\000\000\000\
\242\001\242\001\242\001\242\001\242\001\242\001\242\001\000\000\
\242\001\242\001\242\001\242\001\242\001\242\001\242\001\242\001\
\242\001\242\001\242\001\000\000\000\000\242\001\242\001\242\001\
\242\001\242\001\242\001\242\001\242\001\242\001\242\001\242\001\
\000\000\000\000\000\000\000\000\242\001\242\001\242\001\242\001\
\242\001\000\000\242\001\242\001\242\001\000\000\242\001\242\001\
\242\001\242\001\242\001\242\001\242\001\242\001\242\001\242\001\
\242\001\242\001\242\001\000\000\242\001\000\000\242\001\242\001\
\000\000\242\001\242\001\242\001\000\000\242\001\242\001\242\001\
\242\001\242\001\242\001\242\001\242\001\242\001\242\001\242\001\
\242\001\000\000\244\001\244\001\244\001\244\001\000\000\244\001\
\000\000\244\001\000\000\000\000\000\000\244\001\244\001\244\001\
\244\001\244\001\244\001\000\000\244\001\244\001\051\001\244\001\
\244\001\244\001\244\001\000\000\244\001\244\001\000\000\000\000\
\244\001\244\001\244\001\244\001\244\001\000\000\244\001\244\001\
\000\000\244\001\244\001\244\001\000\000\244\001\000\000\244\001\
\000\000\000\000\244\001\244\001\000\000\244\001\000\000\244\001\
\244\001\244\001\244\001\244\001\244\001\244\001\244\001\244\001\
\244\001\244\001\000\000\244\001\000\000\244\001\244\001\000\000\
\000\000\000\000\244\001\244\001\244\001\000\000\244\001\244\001\
\244\001\244\001\239\001\244\001\000\000\244\001\244\001\000\000\
\000\000\244\001\244\001\244\001\244\001\244\001\244\001\244\001\
\000\000\244\001\244\001\244\001\244\001\244\001\244\001\244\001\
\244\001\244\001\244\001\244\001\000\000\000\000\244\001\244\001\
\244\001\244\001\244\001\244\001\244\001\244\001\244\001\244\001\
\244\001\000\000\000\000\000\000\000\000\244\001\244\001\244\001\
\244\001\244\001\000\000\244\001\244\001\244\001\000\000\244\001\
\244\001\244\001\244\001\244\001\244\001\244\001\244\001\244\001\
\244\001\244\001\244\001\244\001\000\000\244\001\000\000\244\001\
\244\001\000\000\244\001\244\001\000\000\000\000\244\001\244\001\
\244\001\244\001\244\001\244\001\244\001\244\001\244\001\244\001\
\000\000\244\001\244\001\244\001\244\001\244\001\000\000\244\001\
\000\000\244\001\000\000\000\000\000\000\244\001\244\001\000\000\
\244\001\244\001\000\000\000\000\244\001\244\001\051\001\244\001\
\244\001\244\001\244\001\000\000\244\001\244\001\000\000\000\000\
\244\001\244\001\244\001\244\001\244\001\000\000\244\001\244\001\
\000\000\244\001\244\001\244\001\000\000\244\001\000\000\244\001\
\000\000\000\000\244\001\244\001\000\000\244\001\000\000\244\001\
\244\001\244\001\244\001\244\001\244\001\244\001\244\001\244\001\
\244\001\244\001\000\000\244\001\000\000\244\001\244\001\000\000\
\000\000\000\000\244\001\244\001\244\001\000\000\244\001\244\001\
\244\001\244\001\083\000\244\001\051\001\244\001\244\001\000\000\
\000\000\244\001\244\001\244\001\244\001\244\001\244\001\244\001\
\000\000\244\001\244\001\244\001\244\001\244\001\244\001\244\001\
\244\001\244\001\244\001\244\001\000\000\000\000\244\001\244\001\
\244\001\244\001\244\001\244\001\244\001\244\001\244\001\244\001\
\244\001\000\000\000\000\000\000\000\000\244\001\244\001\244\001\
\244\001\244\001\000\000\244\001\244\001\244\001\000\000\244\001\
\244\001\244\001\244\001\244\001\244\001\244\001\244\001\000\000\
\244\001\244\001\244\001\244\001\000\000\244\001\000\000\244\001\
\244\001\000\000\244\001\244\001\000\000\000\000\244\001\244\001\
\244\001\244\001\244\001\244\001\244\001\244\001\244\001\244\001\
\000\000\244\001\000\000\239\001\239\001\239\001\239\001\000\000\
\239\001\000\000\239\001\000\000\000\000\000\000\239\001\239\001\
\239\001\239\001\239\001\239\001\000\000\239\001\239\001\000\000\
\239\001\239\001\239\001\239\001\000\000\239\001\239\001\000\000\
\000\000\000\000\000\000\000\000\000\000\239\001\000\000\239\001\
\239\001\000\000\239\001\239\001\239\001\000\000\239\001\000\000\
\239\001\000\000\000\000\239\001\239\001\000\000\239\001\000\000\
\239\001\239\001\239\001\239\001\239\001\239\001\239\001\239\001\
\239\001\239\001\239\001\000\000\239\001\000\000\239\001\239\001\
\000\000\000\000\000\000\239\001\239\001\239\001\000\000\239\001\
\239\001\239\001\239\001\084\000\239\001\000\000\239\001\239\001\
\000\000\000\000\239\001\239\001\239\001\239\001\239\001\239\001\
\239\001\000\000\239\001\239\001\239\001\239\001\239\001\239\001\
\239\001\239\001\239\001\239\001\239\001\000\000\000\000\239\001\
\239\001\239\001\239\001\239\001\239\001\239\001\239\001\239\001\
\239\001\239\001\000\000\000\000\000\000\000\000\239\001\239\001\
\239\001\239\001\239\001\000\000\239\001\239\001\239\001\000\000\
\239\001\239\001\239\001\239\001\239\001\239\001\239\001\239\001\
\239\001\239\001\239\001\239\001\239\001\000\000\239\001\000\000\
\239\001\239\001\000\000\239\001\239\001\000\000\000\000\239\001\
\239\001\239\001\239\001\239\001\239\001\239\001\239\001\239\001\
\239\001\000\000\239\001\083\000\083\000\083\000\083\000\000\000\
\083\000\000\000\083\000\000\000\000\000\000\000\083\000\083\000\
\083\000\083\000\083\000\083\000\000\000\083\000\083\000\000\000\
\083\000\083\000\083\000\083\000\000\000\083\000\083\000\000\000\
\000\000\000\000\000\000\000\000\000\000\083\000\000\000\083\000\
\083\000\000\000\083\000\083\000\083\000\000\000\083\000\000\000\
\083\000\000\000\000\000\083\000\083\000\000\000\083\000\000\000\
\083\000\083\000\083\000\083\000\083\000\083\000\083\000\083\000\
\083\000\083\000\083\000\000\000\083\000\000\000\083\000\083\000\
\000\000\000\000\000\000\083\000\083\000\083\000\000\000\083\000\
\083\000\083\000\083\000\085\000\083\000\000\000\083\000\083\000\
\000\000\000\000\083\000\083\000\083\000\083\000\083\000\083\000\
\083\000\000\000\083\000\083\000\083\000\083\000\083\000\083\000\
\083\000\083\000\083\000\083\000\083\000\000\000\000\000\083\000\
\083\000\083\000\083\000\083\000\083\000\083\000\083\000\083\000\
\083\000\083\000\000\000\000\000\000\000\000\000\083\000\083\000\
\083\000\083\000\083\000\000\000\083\000\083\000\083\000\000\000\
\083\000\083\000\083\000\083\000\083\000\083\000\083\000\083\000\
\083\000\083\000\083\000\083\000\083\000\000\000\083\000\000\000\
\083\000\083\000\000\000\083\000\083\000\000\000\000\000\083\000\
\083\000\083\000\083\000\083\000\083\000\083\000\083\000\083\000\
\083\000\000\000\083\000\000\000\084\000\084\000\084\000\084\000\
\000\000\084\000\000\000\084\000\000\000\000\000\000\000\084\000\
\084\000\084\000\084\000\084\000\084\000\000\000\084\000\084\000\
\000\000\084\000\084\000\084\000\084\000\000\000\084\000\084\000\
\000\000\000\000\000\000\000\000\000\000\000\000\084\000\000\000\
\084\000\084\000\000\000\084\000\084\000\084\000\000\000\084\000\
\000\000\084\000\000\000\000\000\084\000\084\000\000\000\084\000\
\000\000\084\000\084\000\084\000\084\000\084\000\084\000\084\000\
\084\000\084\000\084\000\084\000\000\000\084\000\000\000\084\000\
\084\000\000\000\000\000\000\000\084\000\084\000\084\000\000\000\
\084\000\084\000\084\000\084\000\082\000\084\000\000\000\084\000\
\084\000\000\000\000\000\084\000\084\000\084\000\084\000\084\000\
\084\000\084\000\000\000\084\000\084\000\084\000\084\000\084\000\
\084\000\084\000\084\000\084\000\084\000\084\000\000\000\000\000\
\084\000\084\000\084\000\084\000\084\000\084\000\084\000\084\000\
\084\000\084\000\084\000\000\000\000\000\000\000\000\000\084\000\
\084\000\084\000\084\000\084\000\000\000\084\000\084\000\084\000\
\000\000\084\000\084\000\084\000\084\000\084\000\084\000\084\000\
\084\000\084\000\084\000\084\000\084\000\084\000\000\000\084\000\
\000\000\084\000\084\000\000\000\084\000\084\000\000\000\000\000\
\084\000\084\000\084\000\084\000\084\000\084\000\084\000\084\000\
\084\000\084\000\000\000\084\000\085\000\085\000\085\000\085\000\
\000\000\085\000\000\000\085\000\000\000\000\000\000\000\085\000\
\085\000\085\000\085\000\085\000\085\000\000\000\085\000\085\000\
\000\000\085\000\085\000\085\000\085\000\000\000\085\000\085\000\
\000\000\000\000\000\000\000\000\000\000\000\000\085\000\000\000\
\085\000\085\000\000\000\085\000\085\000\085\000\000\000\085\000\
\000\000\085\000\000\000\000\000\085\000\085\000\000\000\085\000\
\000\000\085\000\085\000\085\000\085\000\085\000\085\000\085\000\
\085\000\085\000\085\000\085\000\000\000\085\000\000\000\085\000\
\085\000\000\000\000\000\000\000\085\000\085\000\085\000\000\000\
\085\000\085\000\085\000\085\000\239\001\085\000\000\000\085\000\
\085\000\000\000\000\000\085\000\085\000\085\000\085\000\085\000\
\085\000\085\000\000\000\085\000\085\000\085\000\085\000\085\000\
\085\000\085\000\085\000\085\000\085\000\085\000\000\000\000\000\
\085\000\085\000\085\000\085\000\085\000\085\000\085\000\085\000\
\085\000\085\000\085\000\000\000\000\000\000\000\000\000\085\000\
\085\000\085\000\085\000\085\000\000\000\085\000\085\000\085\000\
\000\000\085\000\085\000\085\000\085\000\085\000\085\000\085\000\
\085\000\085\000\085\000\085\000\085\000\085\000\000\000\085\000\
\000\000\085\000\085\000\000\000\085\000\085\000\000\000\000\000\
\085\000\085\000\085\000\085\000\085\000\085\000\085\000\085\000\
\085\000\085\000\000\000\085\000\000\000\082\000\082\000\082\000\
\082\000\000\000\082\000\000\000\082\000\000\000\000\000\000\000\
\082\000\082\000\082\000\082\000\082\000\082\000\000\000\082\000\
\082\000\000\000\082\000\082\000\082\000\082\000\000\000\082\000\
\082\000\000\000\000\000\000\000\000\000\000\000\000\000\082\000\
\000\000\082\000\082\000\000\000\082\000\082\000\082\000\000\000\
\082\000\000\000\082\000\000\000\000\000\082\000\082\000\000\000\
\082\000\000\000\082\000\082\000\082\000\082\000\082\000\082\000\
\082\000\082\000\082\000\082\000\082\000\000\000\082\000\000\000\
\082\000\082\000\000\000\000\000\000\000\082\000\082\000\082\000\
\000\000\082\000\082\000\082\000\082\000\083\000\082\000\000\000\
\082\000\082\000\000\000\000\000\082\000\082\000\082\000\082\000\
\082\000\082\000\082\000\000\000\082\000\082\000\082\000\082\000\
\082\000\082\000\082\000\082\000\082\000\082\000\082\000\000\000\
\000\000\082\000\082\000\082\000\082\000\082\000\082\000\082\000\
\082\000\082\000\082\000\082\000\000\000\000\000\000\000\000\000\
\082\000\082\000\082\000\082\000\082\000\000\000\082\000\082\000\
\082\000\000\000\082\000\082\000\082\000\082\000\082\000\082\000\
\082\000\082\000\082\000\082\000\082\000\082\000\082\000\000\000\
\082\000\000\000\082\000\082\000\000\000\082\000\082\000\000\000\
\000\000\082\000\082\000\082\000\082\000\082\000\082\000\082\000\
\082\000\082\000\082\000\000\000\082\000\239\001\239\001\239\001\
\239\001\000\000\239\001\000\000\239\001\000\000\000\000\000\000\
\239\001\239\001\000\000\239\001\239\001\000\000\000\000\239\001\
\239\001\000\000\239\001\239\001\239\001\239\001\000\000\239\001\
\239\001\000\000\000\000\000\000\000\000\000\000\000\000\239\001\
\000\000\239\001\239\001\000\000\239\001\239\001\239\001\000\000\
\239\001\000\000\239\001\000\000\000\000\239\001\239\001\000\000\
\239\001\000\000\239\001\239\001\239\001\239\001\239\001\239\001\
\239\001\239\001\239\001\239\001\239\001\000\000\239\001\000\000\
\239\001\239\001\000\000\000\000\000\000\239\001\239\001\239\001\
\000\000\239\001\239\001\239\001\239\001\084\000\239\001\000\000\
\239\001\239\001\000\000\000\000\239\001\239\001\239\001\239\001\
\239\001\239\001\239\001\000\000\239\001\239\001\239\001\239\001\
\239\001\239\001\239\001\239\001\239\001\239\001\239\001\000\000\
\000\000\239\001\239\001\239\001\239\001\239\001\239\001\239\001\
\239\001\239\001\239\001\239\001\000\000\000\000\000\000\000\000\
\239\001\239\001\239\001\239\001\239\001\000\000\239\001\239\001\
\239\001\000\000\239\001\239\001\239\001\239\001\239\001\239\001\
\239\001\239\001\000\000\239\001\239\001\239\001\239\001\000\000\
\239\001\000\000\239\001\239\001\000\000\239\001\239\001\000\000\
\000\000\239\001\239\001\239\001\239\001\239\001\239\001\239\001\
\239\001\239\001\239\001\000\000\239\001\000\000\083\000\083\000\
\083\000\083\000\000\000\083\000\000\000\083\000\000\000\000\000\
\000\000\083\000\083\000\000\000\083\000\083\000\000\000\000\000\
\083\000\083\000\000\000\083\000\083\000\083\000\083\000\000\000\
\083\000\083\000\000\000\000\000\000\000\000\000\000\000\000\000\
\083\000\000\000\083\000\083\000\000\000\083\000\083\000\083\000\
\000\000\083\000\000\000\083\000\000\000\000\000\083\000\083\000\
\000\000\083\000\000\000\083\000\083\000\083\000\083\000\083\000\
\083\000\083\000\083\000\083\000\083\000\083\000\000\000\083\000\
\000\000\083\000\083\000\000\000\000\000\000\000\083\000\083\000\
\083\000\000\000\083\000\083\000\083\000\083\000\085\000\083\000\
\000\000\083\000\083\000\000\000\000\000\083\000\083\000\083\000\
\083\000\083\000\083\000\083\000\000\000\083\000\083\000\083\000\
\083\000\083\000\083\000\083\000\083\000\083\000\083\000\083\000\
\000\000\000\000\083\000\083\000\083\000\083\000\083\000\083\000\
\083\000\083\000\083\000\083\000\083\000\000\000\000\000\000\000\
\000\000\083\000\083\000\083\000\083\000\083\000\000\000\083\000\
\083\000\083\000\000\000\083\000\083\000\083\000\083\000\083\000\
\083\000\083\000\083\000\000\000\083\000\083\000\083\000\083\000\
\000\000\083\000\000\000\083\000\083\000\000\000\083\000\083\000\
\000\000\000\000\083\000\083\000\083\000\083\000\083\000\083\000\
\083\000\083\000\083\000\083\000\000\000\083\000\084\000\084\000\
\084\000\084\000\000\000\084\000\000\000\084\000\000\000\000\000\
\000\000\084\000\084\000\000\000\084\000\084\000\000\000\000\000\
\084\000\084\000\000\000\084\000\084\000\084\000\084\000\000\000\
\084\000\084\000\000\000\000\000\000\000\000\000\000\000\000\000\
\084\000\000\000\084\000\084\000\000\000\084\000\084\000\084\000\
\000\000\084\000\000\000\084\000\000\000\000\000\084\000\084\000\
\000\000\084\000\000\000\084\000\084\000\084\000\084\000\084\000\
\084\000\084\000\084\000\084\000\084\000\084\000\000\000\084\000\
\000\000\084\000\084\000\000\000\000\000\000\000\084\000\084\000\
\084\000\000\000\084\000\084\000\084\000\084\000\082\000\084\000\
\000\000\084\000\084\000\000\000\000\000\084\000\084\000\084\000\
\084\000\084\000\084\000\084\000\000\000\084\000\084\000\084\000\
\084\000\084\000\084\000\084\000\084\000\084\000\084\000\084\000\
\000\000\000\000\084\000\084\000\084\000\084\000\084\000\084\000\
\084\000\084\000\084\000\084\000\084\000\000\000\000\000\000\000\
\000\000\084\000\084\000\084\000\084\000\084\000\000\000\084\000\
\084\000\084\000\000\000\084\000\084\000\084\000\084\000\084\000\
\084\000\084\000\084\000\000\000\084\000\084\000\084\000\084\000\
\000\000\084\000\000\000\084\000\084\000\000\000\084\000\084\000\
\000\000\000\000\084\000\084\000\084\000\084\000\084\000\084\000\
\084\000\084\000\084\000\084\000\000\000\084\000\000\000\085\000\
\085\000\085\000\085\000\000\000\085\000\000\000\085\000\000\000\
\000\000\000\000\085\000\085\000\000\000\085\000\085\000\000\000\
\000\000\085\000\085\000\000\000\085\000\085\000\085\000\085\000\
\000\000\085\000\085\000\000\000\000\000\000\000\000\000\000\000\
\000\000\085\000\000\000\085\000\085\000\000\000\085\000\085\000\
\085\000\000\000\085\000\000\000\085\000\000\000\000\000\085\000\
\085\000\000\000\085\000\000\000\085\000\085\000\085\000\085\000\
\085\000\085\000\085\000\085\000\085\000\085\000\085\000\000\000\
\085\000\000\000\085\000\085\000\000\000\000\000\000\000\085\000\
\085\000\085\000\000\000\085\000\085\000\085\000\085\000\056\000\
\085\000\000\000\085\000\085\000\000\000\000\000\085\000\085\000\
\085\000\085\000\085\000\085\000\085\000\000\000\085\000\085\000\
\085\000\085\000\085\000\085\000\085\000\085\000\085\000\085\000\
\085\000\000\000\000\000\085\000\085\000\085\000\085\000\085\000\
\085\000\085\000\085\000\085\000\085\000\085\000\000\000\000\000\
\000\000\000\000\085\000\085\000\085\000\085\000\085\000\000\000\
\085\000\085\000\085\000\000\000\085\000\085\000\085\000\085\000\
\085\000\085\000\085\000\085\000\000\000\085\000\085\000\085\000\
\085\000\000\000\085\000\000\000\085\000\085\000\000\000\085\000\
\085\000\000\000\000\000\085\000\085\000\085\000\085\000\085\000\
\085\000\085\000\085\000\085\000\085\000\000\000\085\000\082\000\
\082\000\082\000\082\000\000\000\082\000\000\000\082\000\000\000\
\000\000\000\000\082\000\082\000\000\000\082\000\082\000\000\000\
\000\000\082\000\082\000\000\000\082\000\082\000\082\000\082\000\
\000\000\082\000\082\000\000\000\000\000\000\000\000\000\000\000\
\000\000\082\000\000\000\082\000\082\000\000\000\082\000\082\000\
\082\000\000\000\082\000\000\000\082\000\000\000\000\000\082\000\
\082\000\000\000\082\000\000\000\082\000\082\000\082\000\082\000\
\082\000\082\000\082\000\082\000\082\000\082\000\082\000\000\000\
\082\000\000\000\082\000\082\000\000\000\000\000\000\000\082\000\
\082\000\082\000\000\000\082\000\082\000\082\000\082\000\047\000\
\082\000\000\000\082\000\082\000\000\000\000\000\082\000\082\000\
\082\000\082\000\082\000\082\000\082\000\000\000\082\000\082\000\
\082\000\082\000\082\000\082\000\082\000\082\000\082\000\082\000\
\082\000\000\000\000\000\082\000\082\000\082\000\082\000\082\000\
\082\000\082\000\082\000\082\000\082\000\082\000\000\000\000\000\
\000\000\000\000\082\000\082\000\082\000\082\000\082\000\000\000\
\082\000\082\000\082\000\000\000\082\000\082\000\082\000\082\000\
\082\000\082\000\082\000\082\000\000\000\082\000\082\000\082\000\
\082\000\000\000\082\000\000\000\082\000\082\000\000\000\082\000\
\082\000\000\000\000\000\082\000\082\000\082\000\082\000\082\000\
\082\000\082\000\082\000\082\000\082\000\000\000\082\000\000\000\
\056\000\056\000\056\000\056\000\000\000\056\000\000\000\056\000\
\000\000\000\000\000\000\000\000\056\000\056\000\056\000\000\000\
\056\000\000\000\000\000\056\000\000\000\056\000\056\000\056\000\
\056\000\000\000\056\000\056\000\000\000\000\000\000\000\000\000\
\000\000\000\000\056\000\000\000\056\000\056\000\000\000\056\000\
\056\000\056\000\000\000\000\000\000\000\056\000\000\000\000\000\
\000\000\000\000\000\000\056\000\000\000\056\000\056\000\056\000\
\056\000\056\000\056\000\000\000\000\000\000\000\000\000\000\000\
\000\000\056\000\000\000\056\000\056\000\000\000\000\000\000\000\
\000\000\056\000\056\000\000\000\000\000\000\000\056\000\056\000\
\007\000\056\000\000\000\000\000\000\000\000\000\000\000\056\000\
\056\000\000\000\056\000\056\000\056\000\056\000\000\000\056\000\
\056\000\056\000\056\000\056\000\056\000\056\000\056\000\056\000\
\056\000\000\000\000\000\000\000\000\000\056\000\056\000\056\000\
\056\000\056\000\056\000\056\000\056\000\056\000\056\000\000\000\
\000\000\000\000\000\000\000\000\056\000\056\000\056\000\000\000\
\000\000\056\000\056\000\000\000\000\000\056\000\056\000\056\000\
\056\000\056\000\000\000\000\000\056\000\056\000\000\000\056\000\
\056\000\056\000\000\000\056\000\000\000\056\000\000\000\000\000\
\000\000\056\000\000\000\000\000\000\000\000\000\000\000\000\000\
\000\000\056\000\056\000\000\000\056\000\056\000\000\000\056\000\
\047\000\047\000\047\000\047\000\000\000\047\000\000\000\047\000\
\000\000\000\000\000\000\000\000\047\000\047\000\047\000\000\000\
\047\000\000\000\000\000\047\000\000\000\047\000\047\000\047\000\
\047\000\000\000\047\000\047\000\000\000\000\000\000\000\000\000\
\000\000\000\000\047\000\000\000\047\000\047\000\000\000\047\000\
\047\000\047\000\000\000\000\000\000\000\047\000\000\000\000\000\
\000\000\000\000\000\000\047\000\000\000\047\000\047\000\047\000\
\047\000\047\000\047\000\000\000\000\000\000\000\000\000\000\000\
\000\000\047\000\000\000\047\000\000\000\000\000\000\000\000\000\
\000\000\047\000\047\000\000\000\000\000\000\000\047\000\047\000\
\199\001\047\000\000\000\000\000\000\000\000\000\000\000\047\000\
\047\000\000\000\047\000\047\000\047\000\047\000\000\000\047\000\
\047\000\047\000\047\000\047\000\047\000\047\000\047\000\047\000\
\047\000\000\000\000\000\000\000\000\000\047\000\047\000\047\000\
\047\000\047\000\047\000\047\000\047\000\047\000\047\000\000\000\
\000\000\000\000\000\000\000\000\047\000\047\000\047\000\000\000\
\000\000\047\000\047\000\000\000\000\000\047\000\047\000\047\000\
\047\000\047\000\000\000\000\000\047\000\047\000\000\000\047\000\
\047\000\047\000\000\000\047\000\000\000\047\000\000\000\000\000\
\000\000\047\000\000\000\000\000\000\000\000\000\000\000\000\000\
\000\000\047\000\047\000\000\000\047\000\047\000\000\000\047\000\
\000\000\007\000\007\000\007\000\007\000\000\000\007\000\000\000\
\007\000\000\000\000\000\000\000\000\000\007\000\007\000\007\000\
\000\000\007\000\000\000\000\000\007\000\000\000\007\000\007\000\
\007\000\007\000\000\000\007\000\007\000\000\000\000\000\000\000\
\000\000\000\000\000\000\007\000\000\000\007\000\007\000\000\000\
\007\000\007\000\007\000\000\000\000\000\000\000\007\000\000\000\
\000\000\000\000\000\000\000\000\007\000\000\000\007\000\007\000\
\007\000\007\000\007\000\007\000\000\000\000\000\000\000\000\000\
\000\000\000\000\007\000\000\000\007\000\000\000\000\000\000\000\
\000\000\000\000\007\000\007\000\000\000\000\000\000\000\007\000\
\007\000\198\001\007\000\000\000\000\000\000\000\000\000\000\000\
\007\000\007\000\000\000\007\000\007\000\007\000\007\000\000\000\
\007\000\007\000\007\000\007\000\007\000\007\000\007\000\007\000\
\007\000\007\000\000\000\000\000\000\000\000\000\007\000\007\000\
\007\000\007\000\007\000\007\000\007\000\007\000\007\000\007\000\
\000\000\000\000\000\000\000\000\000\000\007\000\007\000\007\000\
\000\000\000\000\007\000\007\000\000\000\000\000\007\000\007\000\
\007\000\007\000\007\000\000\000\000\000\007\000\007\000\000\000\
\007\000\007\000\007\000\000\000\007\000\000\000\007\000\000\000\
\000\000\000\000\007\000\000\000\000\000\000\000\000\000\000\000\
\000\000\000\000\007\000\007\000\000\000\007\000\007\000\000\000\
\007\000\199\001\199\001\199\001\199\001\000\000\199\001\000\000\
\199\001\000\000\000\000\000\000\000\000\199\001\199\001\199\001\
\000\000\199\001\000\000\000\000\199\001\000\000\199\001\199\001\
\199\001\199\001\000\000\199\001\199\001\000\000\000\000\000\000\
\000\000\000\000\000\000\199\001\000\000\199\001\199\001\000\000\
\199\001\199\001\199\001\000\000\000\000\000\000\199\001\000\000\
\000\000\000\000\000\000\000\000\199\001\000\000\199\001\199\001\
\199\001\199\001\199\001\199\001\000\000\000\000\000\000\000\000\
\000\000\000\000\199\001\000\000\199\001\000\000\000\000\000\000\
\000\000\000\000\199\001\199\001\000\000\000\000\000\000\199\001\
\199\001\197\001\199\001\000\000\000\000\000\000\000\000\000\000\
\199\001\199\001\000\000\199\001\199\001\199\001\199\001\000\000\
\199\001\199\001\199\001\199\001\199\001\199\001\199\001\199\001\
\199\001\000\000\000\000\000\000\000\000\000\000\199\001\199\001\
\199\001\199\001\199\001\199\001\199\001\199\001\199\001\199\001\
\000\000\000\000\000\000\000\000\000\000\199\001\199\001\199\001\
\000\000\000\000\199\001\199\001\000\000\000\000\199\001\199\001\
\199\001\199\001\199\001\000\000\000\000\199\001\199\001\000\000\
\199\001\199\001\199\001\000\000\199\001\000\000\199\001\000\000\
\000\000\000\000\199\001\000\000\000\000\000\000\000\000\000\000\
\000\000\000\000\199\001\199\001\000\000\199\001\199\001\000\000\
\199\001\000\000\198\001\198\001\198\001\198\001\000\000\198\001\
\000\000\198\001\000\000\000\000\000\000\000\000\198\001\198\001\
\198\001\000\000\198\001\000\000\000\000\198\001\000\000\198\001\
\198\001\198\001\198\001\000\000\198\001\198\001\000\000\000\000\
\000\000\000\000\000\000\000\000\198\001\000\000\198\001\198\001\
\000\000\198\001\198\001\198\001\000\000\000\000\000\000\198\001\
\000\000\000\000\000\000\000\000\000\000\198\001\000\000\198\001\
\198\001\198\001\198\001\198\001\198\001\000\000\000\000\000\000\
\000\000\000\000\000\000\198\001\000\000\198\001\000\000\000\000\
\000\000\000\000\000\000\198\001\198\001\000\000\000\000\000\000\
\198\001\198\001\196\001\198\001\000\000\000\000\000\000\000\000\
\000\000\198\001\198\001\000\000\198\001\198\001\198\001\198\001\
\000\000\198\001\198\001\198\001\198\001\198\001\198\001\198\001\
\198\001\198\001\000\000\000\000\000\000\000\000\000\000\198\001\
\198\001\198\001\198\001\198\001\198\001\198\001\198\001\198\001\
\198\001\000\000\000\000\000\000\000\000\000\000\198\001\198\001\
\198\001\000\000\000\000\198\001\198\001\000\000\000\000\198\001\
\198\001\198\001\198\001\198\001\000\000\000\000\198\001\198\001\
\000\000\198\001\198\001\198\001\000\000\198\001\000\000\198\001\
\000\000\000\000\000\000\198\001\000\000\000\000\000\000\000\000\
\000\000\000\000\000\000\198\001\198\001\000\000\198\001\198\001\
\000\000\198\001\197\001\197\001\197\001\197\001\000\000\197\001\
\000\000\000\000\000\000\000\000\000\000\000\000\197\001\197\001\
\197\001\000\000\197\001\000\000\000\000\197\001\000\000\197\001\
\197\001\197\001\197\001\000\000\197\001\197\001\000\000\000\000\
\000\000\000\000\000\000\000\000\197\001\000\000\197\001\197\001\
\000\000\197\001\197\001\197\001\000\000\000\000\000\000\197\001\
\000\000\000\000\000\000\000\000\000\000\197\001\000\000\197\001\
\197\001\197\001\197\001\197\001\197\001\000\000\000\000\000\000\
\000\000\000\000\000\000\197\001\000\000\197\001\000\000\000\000\
\000\000\000\000\000\000\197\001\197\001\000\000\000\000\000\000\
\197\001\197\001\185\001\197\001\000\000\000\000\000\000\000\000\
\000\000\197\001\197\001\000\000\197\001\197\001\197\001\197\001\
\000\000\197\001\197\001\197\001\197\001\197\001\197\001\197\001\
\197\001\197\001\000\000\000\000\000\000\000\000\000\000\197\001\
\197\001\197\001\197\001\197\001\197\001\197\001\197\001\197\001\
\197\001\000\000\000\000\000\000\000\000\000\000\197\001\197\001\
\197\001\000\000\000\000\197\001\197\001\000\000\000\000\197\001\
\197\001\197\001\197\001\197\001\000\000\000\000\197\001\197\001\
\000\000\197\001\197\001\197\001\000\000\197\001\000\000\197\001\
\000\000\000\000\000\000\197\001\000\000\000\000\000\000\000\000\
\000\000\000\000\000\000\197\001\197\001\000\000\197\001\197\001\
\000\000\197\001\000\000\000\000\196\001\196\001\196\001\000\000\
\196\001\000\000\000\000\000\000\000\000\000\000\000\000\196\001\
\196\001\196\001\000\000\196\001\000\000\000\000\196\001\000\000\
\196\001\196\001\196\001\196\001\000\000\196\001\196\001\000\000\
\000\000\000\000\000\000\000\000\000\000\196\001\000\000\196\001\
\196\001\000\000\196\001\196\001\196\001\000\000\000\000\000\000\
\196\001\000\000\000\000\000\000\000\000\000\000\196\001\000\000\
\196\001\196\001\196\001\196\001\196\001\196\001\000\000\000\000\
\000\000\000\000\000\000\000\000\196\001\000\000\196\001\000\000\
\000\000\000\000\000\000\000\000\196\001\196\001\000\000\000\000\
\000\000\196\001\196\001\195\001\196\001\000\000\000\000\000\000\
\000\000\000\000\196\001\196\001\000\000\196\001\196\001\196\001\
\196\001\000\000\196\001\196\001\196\001\196\001\196\001\196\001\
\196\001\196\001\000\000\000\000\000\000\000\000\000\000\000\000\
\196\001\196\001\196\001\196\001\196\001\196\001\196\001\196\001\
\196\001\196\001\000\000\000\000\000\000\000\000\000\000\196\001\
\196\001\196\001\000\000\000\000\196\001\196\001\000\000\000\000\
\196\001\196\001\196\001\196\001\196\001\000\000\000\000\196\001\
\196\001\000\000\196\001\196\001\196\001\000\000\196\001\000\000\
\196\001\000\000\000\000\000\000\196\001\000\000\000\000\000\000\
\000\000\000\000\000\000\000\000\196\001\196\001\000\000\196\001\
\196\001\000\000\196\001\000\000\185\001\185\001\185\001\000\000\
\185\001\000\000\000\000\000\000\000\000\000\000\000\000\185\001\
\185\001\185\001\000\000\185\001\000\000\000\000\185\001\000\000\
\000\000\185\001\185\001\185\001\000\000\185\001\185\001\000\000\
\000\000\000\000\000\000\000\000\000\000\185\001\000\000\185\001\
\185\001\000\000\185\001\185\001\185\001\000\000\000\000\000\000\
\185\001\000\000\000\000\000\000\000\000\000\000\185\001\000\000\
\185\001\185\001\185\001\185\001\185\001\185\001\000\000\000\000\
\000\000\000\000\000\000\000\000\185\001\000\000\185\001\000\000\
\000\000\000\000\000\000\000\000\185\001\185\001\000\000\000\000\
\000\000\185\001\185\001\177\001\185\001\000\000\000\000\000\000\
\000\000\000\000\185\001\185\001\000\000\185\001\185\001\185\001\
\185\001\000\000\185\001\185\001\185\001\185\001\185\001\185\001\
\185\001\185\001\000\000\000\000\000\000\000\000\000\000\000\000\
\185\001\185\001\185\001\185\001\185\001\185\001\185\001\185\001\
\185\001\185\001\000\000\000\000\000\000\000\000\000\000\185\001\
\185\001\185\001\000\000\000\000\185\001\185\001\000\000\000\000\
\185\001\185\001\185\001\185\001\185\001\000\000\000\000\185\001\
\185\001\000\000\185\001\185\001\185\001\000\000\185\001\000\000\
\185\001\000\000\000\000\000\000\185\001\000\000\000\000\000\000\
\000\000\000\000\000\000\000\000\185\001\185\001\000\000\185\001\
\185\001\000\000\185\001\000\000\000\000\195\001\195\001\195\001\
\000\000\195\001\000\000\000\000\000\000\000\000\000\000\000\000\
\195\001\195\001\195\001\000\000\195\001\000\000\000\000\195\001\
\000\000\000\000\195\001\195\001\195\001\000\000\195\001\195\001\
\000\000\000\000\000\000\000\000\000\000\000\000\195\001\000\000\
\195\001\195\001\000\000\195\001\195\001\195\001\000\000\000\000\
\000\000\195\001\000\000\000\000\000\000\000\000\000\000\195\001\
\000\000\195\001\195\001\195\001\195\001\195\001\195\001\000\000\
\000\000\000\000\000\000\000\000\000\000\195\001\000\000\195\001\
\000\000\000\000\000\000\000\000\000\000\195\001\195\001\000\000\
\000\000\000\000\195\001\195\001\172\001\195\001\000\000\000\000\
\000\000\000\000\000\000\195\001\195\001\000\000\195\001\195\001\
\195\001\195\001\000\000\195\001\195\001\195\001\195\001\195\001\
\195\001\195\001\195\001\000\000\000\000\000\000\000\000\000\000\
\000\000\195\001\195\001\195\001\195\001\195\001\195\001\195\001\
\195\001\195\001\195\001\000\000\000\000\000\000\000\000\000\000\
\195\001\195\001\195\001\000\000\000\000\195\001\195\001\000\000\
\000\000\195\001\195\001\195\001\195\001\195\001\000\000\000\000\
\195\001\195\001\000\000\195\001\195\001\195\001\000\000\195\001\
\000\000\195\001\000\000\000\000\000\000\195\001\000\000\000\000\
\000\000\000\000\000\000\000\000\000\000\195\001\195\001\000\000\
\195\001\195\001\000\000\195\001\000\000\177\001\177\001\177\001\
\000\000\177\001\000\000\000\000\000\000\000\000\000\000\000\000\
\177\001\177\001\177\001\000\000\177\001\000\000\000\000\177\001\
\000\000\000\000\177\001\177\001\177\001\000\000\177\001\177\001\
\000\000\000\000\000\000\000\000\000\000\000\000\177\001\000\000\
\177\001\177\001\000\000\177\001\177\001\177\001\000\000\000\000\
\000\000\177\001\000\000\000\000\000\000\000\000\000\000\177\001\
\000\000\177\001\177\001\177\001\177\001\177\001\177\001\000\000\
\000\000\000\000\000\000\000\000\000\000\177\001\000\000\177\001\
\000\000\000\000\000\000\000\000\000\000\177\001\177\001\000\000\
\000\000\000\000\177\001\177\001\176\001\177\001\000\000\000\000\
\000\000\000\000\000\000\000\000\177\001\000\000\177\001\177\001\
\177\001\177\001\000\000\177\001\177\001\177\001\177\001\177\001\
\177\001\000\000\000\000\000\000\000\000\000\000\000\000\000\000\
\000\000\177\001\177\001\177\001\177\001\177\001\177\001\177\001\
\177\001\177\001\177\001\000\000\000\000\000\000\000\000\000\000\
\177\001\177\001\177\001\000\000\000\000\177\001\177\001\000\000\
\000\000\177\001\177\001\177\001\177\001\177\001\000\000\000\000\
\177\001\177\001\000\000\177\001\177\001\177\001\000\000\177\001\
\000\000\177\001\000\000\000\000\000\000\177\001\000\000\000\000\
\000\000\000\000\000\000\000\000\000\000\177\001\177\001\000\000\
\177\001\177\001\000\000\177\001\000\000\000\000\172\001\172\001\
\172\001\000\000\172\001\000\000\000\000\000\000\000\000\000\000\
\000\000\172\001\172\001\172\001\000\000\172\001\000\000\000\000\
\172\001\000\000\000\000\172\001\172\001\172\001\000\000\172\001\
\172\001\000\000\000\000\000\000\000\000\000\000\000\000\172\001\
\000\000\172\001\172\001\000\000\172\001\172\001\172\001\000\000\
\000\000\000\000\172\001\000\000\000\000\000\000\000\000\000\000\
\172\001\000\000\172\001\172\001\172\001\172\001\172\001\172\001\
\000\000\000\000\000\000\000\000\000\000\000\000\172\001\000\000\
\172\001\000\000\000\000\000\000\000\000\000\000\172\001\172\001\
\000\000\000\000\000\000\172\001\172\001\170\001\172\001\000\000\
\000\000\000\000\000\000\000\000\000\000\172\001\000\000\172\001\
\172\001\172\001\172\001\000\000\172\001\172\001\172\001\172\001\
\172\001\172\001\000\000\000\000\000\000\000\000\000\000\000\000\
\000\000\000\000\172\001\172\001\172\001\172\001\172\001\172\001\
\172\001\172\001\172\001\172\001\000\000\000\000\000\000\000\000\
\000\000\172\001\172\001\172\001\000\000\000\000\172\001\172\001\
\000\000\000\000\172\001\172\001\172\001\172\001\172\001\000\000\
\000\000\172\001\172\001\000\000\172\001\172\001\172\001\000\000\
\172\001\000\000\172\001\000\000\000\000\000\000\172\001\000\000\
\000\000\000\000\000\000\000\000\000\000\000\000\172\001\172\001\
\000\000\172\001\172\001\000\000\172\001\000\000\176\001\176\001\
\176\001\000\000\176\001\000\000\000\000\000\000\000\000\000\000\
\000\000\176\001\176\001\176\001\000\000\176\001\000\000\000\000\
\176\001\000\000\000\000\176\001\176\001\176\001\000\000\176\001\
\176\001\000\000\000\000\000\000\000\000\000\000\000\000\176\001\
\000\000\176\001\176\001\000\000\176\001\176\001\176\001\000\000\
\000\000\000\000\176\001\000\000\000\000\000\000\000\000\000\000\
\176\001\000\000\176\001\176\001\176\001\176\001\176\001\176\001\
\000\000\000\000\000\000\000\000\000\000\000\000\176\001\000\000\
\176\001\000\000\000\000\000\000\000\000\000\000\176\001\176\001\
\000\000\000\000\000\000\176\001\176\001\175\001\176\001\000\000\
\000\000\000\000\000\000\000\000\000\000\176\001\000\000\176\001\
\176\001\176\001\176\001\000\000\176\001\176\001\176\001\176\001\
\176\001\176\001\000\000\000\000\000\000\000\000\000\000\000\000\
\000\000\000\000\000\000\176\001\176\001\176\001\176\001\176\001\
\176\001\176\001\176\001\176\001\000\000\000\000\000\000\000\000\
\000\000\176\001\176\001\176\001\000\000\000\000\176\001\176\001\
\000\000\000\000\176\001\176\001\176\001\176\001\176\001\000\000\
\000\000\176\001\176\001\000\000\176\001\176\001\176\001\000\000\
\176\001\000\000\176\001\000\000\000\000\000\000\176\001\000\000\
\000\000\000\000\000\000\000\000\000\000\000\000\176\001\176\001\
\000\000\176\001\176\001\000\000\176\001\000\000\000\000\170\001\
\170\001\170\001\000\000\170\001\000\000\000\000\000\000\000\000\
\000\000\000\000\170\001\170\001\170\001\000\000\170\001\000\000\
\000\000\170\001\000\000\000\000\170\001\170\001\170\001\000\000\
\170\001\170\001\000\000\000\000\000\000\000\000\000\000\000\000\
\170\001\000\000\170\001\170\001\000\000\170\001\170\001\170\001\
\000\000\000\000\000\000\170\001\000\000\000\000\000\000\000\000\
\000\000\170\001\000\000\170\001\170\001\170\001\170\001\170\001\
\170\001\000\000\000\000\000\000\000\000\000\000\000\000\170\001\
\000\000\170\001\000\000\000\000\000\000\000\000\000\000\170\001\
\170\001\000\000\000\000\000\000\170\001\170\001\174\001\170\001\
\000\000\000\000\000\000\000\000\000\000\000\000\170\001\000\000\
\170\001\170\001\170\001\170\001\000\000\170\001\170\001\170\001\
\170\001\170\001\000\000\000\000\000\000\000\000\000\000\000\000\
\000\000\000\000\000\000\000\000\170\001\170\001\170\001\170\001\
\170\001\170\001\170\001\170\001\170\001\000\000\000\000\000\000\
\000\000\000\000\170\001\170\001\170\001\000\000\000\000\170\001\
\170\001\000\000\000\000\170\001\170\001\170\001\170\001\170\001\
\000\000\000\000\170\001\170\001\000\000\170\001\170\001\170\001\
\000\000\170\001\000\000\170\001\000\000\000\000\000\000\170\001\
\000\000\000\000\000\000\000\000\000\000\000\000\000\000\170\001\
\170\001\000\000\170\001\170\001\000\000\170\001\000\000\175\001\
\175\001\175\001\000\000\175\001\000\000\000\000\000\000\000\000\
\000\000\000\000\175\001\175\001\175\001\000\000\175\001\000\000\
\000\000\175\001\000\000\000\000\175\001\175\001\175\001\000\000\
\175\001\175\001\000\000\000\000\000\000\000\000\000\000\000\000\
\175\001\000\000\175\001\175\001\000\000\175\001\175\001\175\001\
\000\000\000\000\000\000\175\001\000\000\000\000\000\000\000\000\
\000\000\175\001\000\000\175\001\175\001\175\001\175\001\175\001\
\175\001\000\000\000\000\000\000\000\000\000\000\000\000\175\001\
\000\000\175\001\000\000\000\000\000\000\000\000\000\000\175\001\
\175\001\000\000\000\000\000\000\175\001\175\001\173\001\175\001\
\000\000\000\000\000\000\000\000\000\000\000\000\175\001\000\000\
\175\001\175\001\175\001\175\001\000\000\175\001\175\001\175\001\
\175\001\175\001\000\000\000\000\000\000\000\000\000\000\000\000\
\000\000\000\000\000\000\000\000\175\001\175\001\175\001\175\001\
\175\001\175\001\175\001\175\001\175\001\000\000\000\000\000\000\
\000\000\000\000\175\001\175\001\175\001\000\000\000\000\175\001\
\175\001\000\000\000\000\175\001\175\001\175\001\175\001\175\001\
\000\000\000\000\175\001\175\001\000\000\175\001\175\001\175\001\
\000\000\175\001\000\000\175\001\000\000\000\000\000\000\175\001\
\000\000\000\000\000\000\000\000\000\000\000\000\000\000\175\001\
\175\001\000\000\175\001\175\001\000\000\175\001\000\000\000\000\
\174\001\174\001\174\001\000\000\174\001\000\000\000\000\000\000\
\000\000\000\000\000\000\174\001\174\001\174\001\000\000\174\001\
\000\000\000\000\174\001\000\000\000\000\174\001\174\001\174\001\
\000\000\174\001\174\001\000\000\000\000\000\000\000\000\000\000\
\000\000\174\001\000\000\174\001\174\001\000\000\000\000\174\001\
\174\001\000\000\000\000\000\000\174\001\000\000\000\000\000\000\
\000\000\000\000\174\001\000\000\174\001\174\001\174\001\174\001\
\174\001\174\001\000\000\000\000\000\000\000\000\000\000\000\000\
\174\001\000\000\174\001\000\000\000\000\000\000\000\000\000\000\
\174\001\174\001\000\000\000\000\000\000\174\001\174\001\223\001\
\174\001\000\000\000\000\000\000\000\000\000\000\000\000\174\001\
\000\000\174\001\174\001\174\001\174\001\000\000\174\001\174\001\
\174\001\174\001\000\000\000\000\000\000\000\000\000\000\000\000\
\000\000\000\000\000\000\000\000\000\000\174\001\174\001\174\001\
\174\001\174\001\174\001\174\001\174\001\174\001\000\000\000\000\
\000\000\000\000\000\000\174\001\174\001\174\001\000\000\000\000\
\174\001\174\001\000\000\000\000\174\001\174\001\174\001\174\001\
\174\001\000\000\000\000\174\001\174\001\000\000\174\001\174\001\
\174\001\000\000\174\001\000\000\174\001\000\000\000\000\000\000\
\174\001\000\000\000\000\000\000\000\000\000\000\000\000\000\000\
\174\001\174\001\000\000\174\001\174\001\000\000\174\001\000\000\
\173\001\173\001\173\001\000\000\173\001\000\000\000\000\000\000\
\000\000\000\000\000\000\173\001\173\001\173\001\000\000\173\001\
\000\000\000\000\173\001\000\000\000\000\173\001\173\001\173\001\
\000\000\173\001\173\001\000\000\000\000\000\000\000\000\000\000\
\000\000\173\001\000\000\173\001\173\001\000\000\000\000\173\001\
\173\001\000\000\000\000\000\000\173\001\000\000\000\000\000\000\
\000\000\000\000\173\001\000\000\173\001\173\001\173\001\173\001\
\173\001\173\001\000\000\000\000\000\000\000\000\000\000\000\000\
\173\001\000\000\173\001\000\000\000\000\000\000\000\000\000\000\
\173\001\173\001\000\000\000\000\000\000\173\001\173\001\171\001\
\173\001\000\000\000\000\000\000\000\000\000\000\000\000\173\001\
\000\000\173\001\173\001\173\001\173\001\000\000\173\001\173\001\
\173\001\000\000\000\000\000\000\000\000\000\000\000\000\000\000\
\000\000\000\000\000\000\000\000\000\000\173\001\173\001\173\001\
\173\001\173\001\173\001\173\001\173\001\173\001\000\000\000\000\
\000\000\000\000\000\000\173\001\173\001\173\001\000\000\000\000\
\173\001\173\001\000\000\000\000\173\001\173\001\173\001\173\001\
\173\001\000\000\000\000\173\001\173\001\000\000\173\001\173\001\
\173\001\000\000\173\001\000\000\173\001\000\000\000\000\000\000\
\173\001\000\000\000\000\000\000\000\000\000\000\000\000\000\000\
\173\001\173\001\000\000\173\001\173\001\000\000\173\001\000\000\
\000\000\223\001\223\001\223\001\000\000\223\001\000\000\000\000\
\000\000\000\000\000\000\000\000\223\001\223\001\223\001\000\000\
\223\001\000\000\000\000\223\001\000\000\000\000\000\000\223\001\
\223\001\000\000\223\001\223\001\000\000\000\000\000\000\000\000\
\000\000\000\000\223\001\000\000\223\001\223\001\000\000\000\000\
\223\001\223\001\000\000\000\000\000\000\223\001\000\000\000\000\
\000\000\000\000\000\000\223\001\000\000\223\001\223\001\223\001\
\223\001\223\001\223\001\000\000\000\000\000\000\000\000\000\000\
\000\000\223\001\000\000\223\001\000\000\000\000\000\000\000\000\
\000\000\223\001\223\001\000\000\000\000\000\000\223\001\223\001\
\104\000\223\001\000\000\000\000\000\000\000\000\000\000\000\000\
\223\001\000\000\223\001\223\001\223\001\223\001\000\000\223\001\
\223\001\000\000\000\000\000\000\000\000\000\000\000\000\000\000\
\000\000\000\000\000\000\000\000\000\000\000\000\223\001\223\001\
\223\001\223\001\223\001\223\001\223\001\223\001\223\001\000\000\
\000\000\000\000\000\000\000\000\223\001\223\001\223\001\000\000\
\000\000\223\001\223\001\000\000\000\000\223\001\223\001\223\001\
\223\001\223\001\000\000\000\000\223\001\223\001\000\000\223\001\
\223\001\223\001\000\000\223\001\000\000\223\001\000\000\000\000\
\000\000\223\001\000\000\000\000\000\000\000\000\000\000\000\000\
\000\000\223\001\223\001\000\000\223\001\223\001\000\000\223\001\
\000\000\171\001\171\001\171\001\000\000\171\001\000\000\000\000\
\000\000\000\000\000\000\000\000\171\001\171\001\171\001\000\000\
\171\001\000\000\000\000\171\001\000\000\000\000\000\000\171\001\
\171\001\000\000\171\001\171\001\000\000\000\000\000\000\000\000\
\000\000\000\000\171\001\000\000\171\001\171\001\000\000\000\000\
\171\001\171\001\000\000\000\000\000\000\171\001\000\000\000\000\
\000\000\000\000\000\000\171\001\000\000\171\001\171\001\171\001\
\171\001\171\001\171\001\000\000\000\000\000\000\000\000\000\000\
\000\000\171\001\000\000\171\001\000\000\000\000\000\000\000\000\
\000\000\171\001\171\001\000\000\000\000\000\000\171\001\171\001\
\150\001\171\001\000\000\000\000\000\000\000\000\000\000\000\000\
\171\001\000\000\171\001\171\001\171\001\171\001\000\000\171\001\
\171\001\000\000\000\000\000\000\000\000\000\000\000\000\000\000\
\000\000\000\000\000\000\000\000\000\000\000\000\171\001\171\001\
\171\001\171\001\171\001\171\001\171\001\171\001\171\001\000\000\
\000\000\000\000\000\000\000\000\171\001\171\001\171\001\000\000\
\000\000\171\001\171\001\000\000\000\000\171\001\171\001\171\001\
\171\001\171\001\000\000\000\000\171\001\171\001\000\000\171\001\
\171\001\171\001\000\000\171\001\000\000\171\001\000\000\000\000\
\000\000\171\001\000\000\000\000\000\000\000\000\000\000\000\000\
\000\000\171\001\171\001\000\000\171\001\171\001\000\000\171\001\
\000\000\000\000\104\000\104\000\104\000\000\000\104\000\000\000\
\000\000\000\000\000\000\000\000\000\000\104\000\104\000\104\000\
\000\000\104\000\000\000\000\000\104\000\000\000\000\000\000\000\
\000\000\104\000\000\000\104\000\104\000\000\000\000\000\000\000\
\000\000\000\000\000\000\104\000\000\000\104\000\104\000\000\000\
\000\000\104\000\104\000\000\000\000\000\000\000\104\000\000\000\
\000\000\000\000\000\000\000\000\104\000\000\000\104\000\104\000\
\104\000\104\000\104\000\104\000\000\000\000\000\000\000\000\000\
\000\000\000\000\104\000\000\000\104\000\000\000\000\000\000\000\
\000\000\000\000\104\000\104\000\000\000\000\000\000\000\104\000\
\104\000\149\001\104\000\000\000\000\000\000\000\000\000\000\000\
\000\000\104\000\000\000\104\000\104\000\104\000\104\000\000\000\
\104\000\104\000\000\000\000\000\000\000\000\000\000\000\000\000\
\000\000\000\000\000\000\000\000\000\000\000\000\000\000\104\000\
\104\000\104\000\104\000\104\000\104\000\104\000\104\000\104\000\
\000\000\000\000\000\000\000\000\000\000\104\000\104\000\104\000\
\000\000\000\000\104\000\104\000\000\000\000\000\104\000\104\000\
\104\000\104\000\104\000\000\000\000\000\104\000\104\000\000\000\
\104\000\104\000\104\000\000\000\104\000\000\000\104\000\000\000\
\000\000\000\000\104\000\000\000\000\000\000\000\000\000\000\000\
\000\000\000\000\104\000\104\000\000\000\104\000\104\000\000\000\
\104\000\000\000\150\001\150\001\150\001\000\000\150\001\000\000\
\000\000\000\000\000\000\000\000\000\000\150\001\150\001\150\001\
\000\000\150\001\000\000\000\000\150\001\000\000\000\000\000\000\
\000\000\000\000\000\000\150\001\150\001\000\000\000\000\000\000\
\000\000\000\000\000\000\150\001\000\000\150\001\150\001\000\000\
\000\000\150\001\150\001\000\000\000\000\000\000\150\001\000\000\
\000\000\000\000\000\000\000\000\150\001\000\000\150\001\150\001\
\150\001\150\001\150\001\150\001\000\000\000\000\000\000\000\000\
\000\000\000\000\150\001\133\001\150\001\000\000\000\000\000\000\
\000\000\000\000\150\001\150\001\000\000\000\000\000\000\150\001\
\150\001\000\000\150\001\000\000\000\000\000\000\000\000\000\000\
\000\000\150\001\000\000\150\001\150\001\150\001\150\001\000\000\
\150\001\150\001\000\000\000\000\000\000\000\000\000\000\000\000\
\000\000\000\000\000\000\000\000\000\000\000\000\000\000\150\001\
\150\001\150\001\150\001\150\001\150\001\150\001\150\001\150\001\
\000\000\000\000\000\000\000\000\000\000\150\001\150\001\150\001\
\000\000\000\000\150\001\150\001\000\000\000\000\150\001\150\001\
\150\001\150\001\150\001\000\000\000\000\150\001\150\001\000\000\
\150\001\150\001\150\001\000\000\150\001\000\000\150\001\000\000\
\000\000\000\000\150\001\000\000\000\000\000\000\000\000\000\000\
\000\000\000\000\150\001\150\001\000\000\150\001\150\001\000\000\
\150\001\000\000\000\000\149\001\149\001\149\001\000\000\149\001\
\000\000\000\000\000\000\000\000\000\000\000\000\149\001\149\001\
\149\001\000\000\149\001\000\000\000\000\149\001\000\000\000\000\
\000\000\000\000\000\000\000\000\149\001\149\001\000\000\000\000\
\000\000\000\000\000\000\000\000\149\001\000\000\149\001\149\001\
\000\000\000\000\149\001\149\001\000\000\000\000\000\000\149\001\
\000\000\000\000\000\000\000\000\000\000\149\001\000\000\149\001\
\149\001\149\001\149\001\149\001\149\001\000\000\000\000\000\000\
\000\000\000\000\125\001\149\001\000\000\149\001\000\000\000\000\
\000\000\000\000\000\000\149\001\149\001\000\000\000\000\000\000\
\149\001\149\001\000\000\149\001\000\000\000\000\000\000\000\000\
\000\000\000\000\149\001\000\000\149\001\149\001\149\001\149\001\
\000\000\149\001\149\001\000\000\000\000\000\000\000\000\000\000\
\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\
\149\001\149\001\149\001\149\001\149\001\149\001\149\001\149\001\
\149\001\000\000\000\000\000\000\000\000\000\000\149\001\149\001\
\149\001\000\000\000\000\149\001\149\001\000\000\000\000\149\001\
\149\001\149\001\149\001\149\001\000\000\000\000\149\001\149\001\
\000\000\149\001\149\001\149\001\000\000\149\001\000\000\149\001\
\000\000\000\000\000\000\149\001\000\000\133\001\133\001\133\001\
\000\000\133\001\000\000\149\001\149\001\000\000\149\001\149\001\
\133\001\149\001\133\001\000\000\133\001\000\000\000\000\133\001\
\000\000\000\000\000\000\000\000\000\000\000\000\133\001\000\000\
\000\000\000\000\000\000\000\000\000\000\000\000\133\001\000\000\
\133\001\133\001\000\000\000\000\133\001\133\001\000\000\000\000\
\000\000\133\001\000\000\000\000\000\000\000\000\000\000\133\001\
\000\000\133\001\133\001\133\001\133\001\133\001\133\001\000\000\
\000\000\000\000\000\000\000\000\123\001\133\001\000\000\133\001\
\000\000\000\000\000\000\000\000\000\000\133\001\133\001\000\000\
\000\000\000\000\133\001\133\001\000\000\133\001\000\000\000\000\
\000\000\000\000\000\000\000\000\133\001\000\000\133\001\133\001\
\133\001\133\001\000\000\133\001\133\001\000\000\000\000\000\000\
\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\
\000\000\000\000\133\001\133\001\133\001\133\001\133\001\133\001\
\133\001\133\001\133\001\000\000\000\000\000\000\000\000\000\000\
\000\000\133\001\133\001\000\000\000\000\133\001\133\001\000\000\
\000\000\133\001\133\001\133\001\133\001\133\001\000\000\000\000\
\133\001\000\000\000\000\133\001\133\001\133\001\000\000\133\001\
\000\000\133\001\000\000\000\000\000\000\133\001\000\000\000\000\
\000\000\000\000\000\000\000\000\000\000\133\001\133\001\000\000\
\133\001\133\001\000\000\133\001\125\001\125\001\125\001\000\000\
\125\001\000\000\000\000\000\000\000\000\000\000\000\000\125\001\
\000\000\125\001\000\000\125\001\000\000\000\000\125\001\000\000\
\000\000\000\000\000\000\000\000\000\000\125\001\000\000\000\000\
\000\000\000\000\000\000\000\000\000\000\125\001\000\000\125\001\
\125\001\000\000\000\000\125\001\125\001\000\000\000\000\000\000\
\125\001\000\000\000\000\000\000\000\000\000\000\125\001\000\000\
\000\000\125\001\125\001\125\001\125\001\125\001\000\000\000\000\
\000\000\000\000\000\000\116\001\125\001\000\000\125\001\000\000\
\000\000\000\000\000\000\000\000\125\001\125\001\000\000\000\000\
\000\000\125\001\125\001\000\000\125\001\000\000\000\000\000\000\
\000\000\000\000\000\000\125\001\000\000\125\001\125\001\125\001\
\125\001\000\000\125\001\125\001\000\000\000\000\000\000\000\000\
\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\
\000\000\125\001\125\001\125\001\125\001\125\001\125\001\125\001\
\125\001\125\001\000\000\000\000\000\000\000\000\000\000\000\000\
\125\001\125\001\000\000\000\000\125\001\125\001\000\000\000\000\
\125\001\125\001\125\001\125\001\125\001\000\000\000\000\125\001\
\000\000\000\000\125\001\125\001\125\001\000\000\125\001\000\000\
\125\001\000\000\000\000\000\000\125\001\000\000\123\001\123\001\
\123\001\000\000\123\001\000\000\125\001\125\001\000\000\125\001\
\125\001\123\001\125\001\123\001\000\000\123\001\000\000\000\000\
\123\001\000\000\000\000\000\000\000\000\000\000\000\000\123\001\
\000\000\000\000\000\000\000\000\000\000\000\000\000\000\123\001\
\000\000\123\001\123\001\000\000\000\000\123\001\123\001\000\000\
\000\000\000\000\123\001\000\000\000\000\000\000\000\000\000\000\
\000\000\000\000\000\000\123\001\123\001\123\001\123\001\123\001\
\000\000\000\000\000\000\000\000\000\000\058\000\123\001\000\000\
\123\001\000\000\000\000\000\000\000\000\000\000\123\001\123\001\
\000\000\000\000\000\000\123\001\123\001\000\000\123\001\000\000\
\000\000\000\000\000\000\000\000\000\000\123\001\000\000\123\001\
\123\001\123\001\123\001\000\000\123\001\123\001\000\000\000\000\
\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\
\000\000\000\000\000\000\123\001\123\001\123\001\123\001\123\001\
\123\001\123\001\123\001\123\001\000\000\000\000\000\000\000\000\
\000\000\000\000\123\001\123\001\000\000\000\000\123\001\123\001\
\000\000\000\000\123\001\123\001\123\001\123\001\123\001\000\000\
\000\000\123\001\000\000\000\000\123\001\123\001\123\001\000\000\
\123\001\000\000\123\001\000\000\000\000\000\000\123\001\000\000\
\000\000\000\000\000\000\000\000\000\000\000\000\123\001\123\001\
\000\000\123\001\123\001\000\000\123\001\116\001\116\001\116\001\
\000\000\116\001\000\000\000\000\000\000\000\000\000\000\000\000\
\116\001\000\000\116\001\000\000\000\000\000\000\000\000\116\001\
\000\000\000\000\000\000\000\000\000\000\000\000\116\001\000\000\
\000\000\000\000\000\000\000\000\000\000\000\000\116\001\000\000\
\116\001\116\001\000\000\000\000\000\000\116\001\000\000\000\000\
\000\000\116\001\000\000\000\000\000\000\000\000\000\000\005\000\
\000\000\000\000\116\001\116\001\116\001\116\001\116\001\000\000\
\000\000\000\000\000\000\000\000\000\000\116\001\000\000\116\001\
\000\000\000\000\000\000\000\000\000\000\116\001\116\001\000\000\
\000\000\000\000\116\001\116\001\000\000\116\001\000\000\000\000\
\000\000\000\000\000\000\000\000\116\001\000\000\116\001\116\001\
\116\001\116\001\000\000\116\001\116\001\000\000\000\000\000\000\
\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\
\000\000\000\000\116\001\116\001\116\001\116\001\116\001\116\001\
\116\001\116\001\116\001\000\000\000\000\000\000\000\000\000\000\
\000\000\116\001\116\001\000\000\000\000\116\001\116\001\000\000\
\000\000\116\001\116\001\116\001\116\001\116\001\000\000\000\000\
\116\001\000\000\000\000\116\001\000\000\116\001\000\000\116\001\
\000\000\116\001\000\000\000\000\000\000\116\001\000\000\058\000\
\058\000\058\000\000\000\058\000\000\000\116\001\116\001\000\000\
\116\001\116\001\058\000\116\001\058\000\000\000\000\000\000\000\
\000\000\058\000\000\000\000\000\000\000\000\000\000\000\000\000\
\058\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\
\058\000\000\000\058\000\058\000\000\000\000\000\000\000\058\000\
\000\000\000\000\000\000\058\000\000\000\000\000\000\000\000\000\
\000\000\078\000\000\000\000\000\058\000\058\000\058\000\058\000\
\058\000\000\000\000\000\000\000\000\000\000\000\000\000\058\000\
\000\000\058\000\000\000\000\000\000\000\000\000\000\000\058\000\
\058\000\000\000\000\000\000\000\058\000\058\000\000\000\058\000\
\000\000\000\000\000\000\000\000\000\000\000\000\058\000\000\000\
\058\000\058\000\058\000\058\000\000\000\058\000\058\000\000\000\
\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\
\000\000\000\000\000\000\000\000\058\000\058\000\058\000\058\000\
\058\000\058\000\058\000\058\000\058\000\000\000\000\000\000\000\
\000\000\000\000\000\000\058\000\058\000\000\000\000\000\058\000\
\058\000\000\000\000\000\058\000\058\000\058\000\058\000\058\000\
\000\000\000\000\058\000\000\000\000\000\058\000\000\000\058\000\
\000\000\058\000\000\000\058\000\000\000\000\000\000\000\058\000\
\000\000\005\000\005\000\005\000\000\000\005\000\000\000\058\000\
\058\000\000\000\058\000\058\000\005\000\058\000\005\000\000\000\
\000\000\000\000\000\000\005\000\000\000\000\000\000\000\000\000\
\000\000\000\000\005\000\000\000\000\000\000\000\000\000\000\000\
\000\000\000\000\005\000\000\000\005\000\005\000\000\000\000\000\
\000\000\005\000\000\000\000\000\000\000\005\000\000\000\000\000\
\000\000\000\000\000\000\019\000\000\000\000\000\005\000\005\000\
\005\000\005\000\005\000\000\000\000\000\000\000\000\000\000\000\
\000\000\005\000\000\000\005\000\000\000\000\000\000\000\000\000\
\000\000\005\000\005\000\000\000\000\000\000\000\005\000\005\000\
\000\000\005\000\000\000\000\000\000\000\000\000\000\000\000\000\
\005\000\000\000\005\000\005\000\005\000\005\000\000\000\005\000\
\005\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\
\000\000\000\000\000\000\000\000\000\000\000\000\005\000\005\000\
\005\000\005\000\005\000\005\000\005\000\005\000\005\000\000\000\
\000\000\000\000\000\000\000\000\000\000\005\000\005\000\000\000\
\000\000\005\000\005\000\000\000\000\000\005\000\005\000\005\000\
\005\000\005\000\000\000\000\000\005\000\000\000\000\000\005\000\
\000\000\005\000\000\000\005\000\000\000\005\000\000\000\000\000\
\000\000\005\000\000\000\078\000\078\000\078\000\000\000\078\000\
\000\000\005\000\005\000\000\000\005\000\005\000\078\000\005\000\
\078\000\000\000\000\000\000\000\000\000\078\000\000\000\000\000\
\000\000\000\000\000\000\000\000\078\000\000\000\000\000\000\000\
\000\000\000\000\000\000\000\000\078\000\000\000\078\000\078\000\
\000\000\000\000\000\000\078\000\000\000\000\000\000\000\078\000\
\000\000\000\000\000\000\000\000\000\000\017\000\000\000\000\000\
\078\000\078\000\078\000\078\000\078\000\000\000\000\000\000\000\
\000\000\000\000\000\000\078\000\000\000\078\000\000\000\000\000\
\000\000\000\000\000\000\078\000\078\000\000\000\000\000\000\000\
\078\000\078\000\000\000\078\000\000\000\000\000\000\000\000\000\
\000\000\000\000\078\000\000\000\078\000\078\000\078\000\078\000\
\000\000\078\000\078\000\000\000\000\000\000\000\000\000\000\000\
\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\
\078\000\078\000\078\000\078\000\078\000\078\000\078\000\078\000\
\078\000\000\000\000\000\000\000\000\000\000\000\000\000\078\000\
\078\000\000\000\000\000\078\000\078\000\000\000\000\000\078\000\
\078\000\078\000\078\000\078\000\000\000\000\000\078\000\000\000\
\000\000\078\000\000\000\078\000\000\000\078\000\000\000\078\000\
\000\000\000\000\000\000\078\000\000\000\019\000\019\000\019\000\
\000\000\019\000\000\000\078\000\078\000\000\000\078\000\078\000\
\019\000\078\000\019\000\000\000\000\000\000\000\000\000\019\000\
\000\000\000\000\000\000\000\000\000\000\000\000\019\000\000\000\
\000\000\000\000\000\000\000\000\000\000\000\000\019\000\000\000\
\019\000\019\000\000\000\000\000\000\000\019\000\000\000\000\000\
\000\000\019\000\000\000\000\000\000\000\000\000\000\000\070\002\
\000\000\000\000\019\000\019\000\019\000\019\000\019\000\000\000\
\000\000\000\000\000\000\000\000\000\000\019\000\000\000\019\000\
\000\000\000\000\000\000\000\000\000\000\019\000\019\000\000\000\
\000\000\000\000\019\000\019\000\000\000\019\000\000\000\000\000\
\000\000\000\000\000\000\000\000\019\000\000\000\019\000\019\000\
\019\000\019\000\000\000\019\000\019\000\000\000\000\000\000\000\
\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\
\000\000\000\000\019\000\019\000\019\000\019\000\019\000\019\000\
\019\000\019\000\019\000\000\000\000\000\000\000\000\000\000\000\
\000\000\019\000\019\000\000\000\000\000\019\000\019\000\000\000\
\000\000\019\000\019\000\019\000\019\000\019\000\000\000\000\000\
\019\000\000\000\000\000\019\000\000\000\019\000\000\000\019\000\
\000\000\019\000\000\000\000\000\000\000\019\000\000\000\017\000\
\017\000\017\000\000\000\017\000\000\000\019\000\019\000\000\000\
\019\000\019\000\017\000\019\000\017\000\000\000\000\000\000\000\
\000\000\017\000\000\000\000\000\000\000\000\000\000\000\000\000\
\017\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\
\017\000\000\000\017\000\017\000\000\000\000\000\000\000\017\000\
\000\000\000\000\000\000\017\000\088\001\000\000\000\000\000\000\
\000\000\000\000\000\000\000\000\017\000\017\000\017\000\017\000\
\017\000\000\000\000\000\000\000\000\000\000\000\000\000\017\000\
\000\000\017\000\000\000\000\000\000\000\000\000\000\000\017\000\
\017\000\000\000\000\000\000\000\017\000\017\000\000\000\017\000\
\000\000\000\000\000\000\000\000\000\000\000\000\017\000\000\000\
\017\000\017\000\017\000\017\000\000\000\017\000\017\000\000\000\
\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\
\000\000\000\000\000\000\000\000\017\000\017\000\017\000\017\000\
\017\000\017\000\017\000\017\000\017\000\000\000\000\000\000\000\
\000\000\000\000\000\000\017\000\017\000\000\000\000\000\017\000\
\017\000\000\000\000\000\017\000\017\000\017\000\017\000\017\000\
\000\000\000\000\017\000\000\000\000\000\017\000\000\000\017\000\
\000\000\017\000\000\000\017\000\000\000\000\000\000\000\017\000\
\000\000\070\002\070\002\070\002\000\000\070\002\000\000\017\000\
\017\000\000\000\017\000\017\000\070\002\017\000\070\002\000\000\
\000\000\000\000\000\000\070\002\000\000\000\000\000\000\000\000\
\000\000\000\000\070\002\000\000\000\000\000\000\000\000\000\000\
\000\000\000\000\070\002\000\000\070\002\070\002\000\000\000\000\
\000\000\070\002\000\000\000\000\000\000\070\002\082\001\000\000\
\000\000\000\000\000\000\000\000\000\000\000\000\070\002\070\002\
\070\002\070\002\070\002\000\000\000\000\000\000\000\000\000\000\
\000\000\070\002\000\000\070\002\000\000\000\000\000\000\000\000\
\000\000\070\002\070\002\000\000\000\000\000\000\070\002\070\002\
\000\000\070\002\000\000\000\000\000\000\000\000\000\000\000\000\
\070\002\000\000\070\002\070\002\070\002\070\002\000\000\070\002\
\070\002\000\000\000\000\000\000\000\000\000\000\000\000\000\000\
\000\000\000\000\000\000\000\000\000\000\000\000\070\002\070\002\
\070\002\070\002\070\002\070\002\070\002\070\002\070\002\000\000\
\000\000\000\000\000\000\000\000\000\000\070\002\070\002\000\000\
\000\000\070\002\070\002\000\000\000\000\070\002\070\002\070\002\
\070\002\070\002\000\000\000\000\070\002\000\000\000\000\070\002\
\000\000\070\002\000\000\070\002\000\000\070\002\088\001\088\001\
\088\001\070\002\088\001\000\000\000\000\000\000\000\000\000\000\
\000\000\070\002\070\002\088\001\070\002\070\002\000\000\070\002\
\088\001\000\000\000\000\000\000\000\000\000\000\000\000\088\001\
\000\000\000\000\000\000\000\000\000\000\000\000\000\000\088\001\
\000\000\088\001\088\001\000\000\000\000\000\000\088\001\000\000\
\000\000\000\000\088\001\083\001\000\000\000\000\000\000\000\000\
\000\000\000\000\000\000\088\001\088\001\088\001\088\001\088\001\
\000\000\000\000\000\000\000\000\000\000\000\000\088\001\000\000\
\088\001\000\000\000\000\000\000\000\000\000\000\088\001\088\001\
\000\000\000\000\000\000\088\001\088\001\000\000\088\001\000\000\
\000\000\000\000\000\000\000\000\000\000\088\001\000\000\088\001\
\088\001\088\001\088\001\000\000\088\001\088\001\000\000\000\000\
\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\
\000\000\000\000\000\000\088\001\088\001\088\001\088\001\088\001\
\088\001\088\001\088\001\088\001\000\000\000\000\000\000\000\000\
\000\000\000\000\088\001\088\001\000\000\000\000\088\001\088\001\
\000\000\000\000\088\001\088\001\088\001\088\001\088\001\000\000\
\000\000\088\001\000\000\000\000\088\001\000\000\088\001\000\000\
\088\001\000\000\088\001\000\000\000\000\000\000\088\001\000\000\
\082\001\082\001\082\001\000\000\082\001\000\000\088\001\088\001\
\000\000\088\001\088\001\000\000\088\001\082\001\000\000\000\000\
\000\000\000\000\082\001\000\000\000\000\000\000\000\000\000\000\
\000\000\082\001\000\000\000\000\000\000\000\000\000\000\000\000\
\000\000\082\001\000\000\082\001\082\001\000\000\000\000\000\000\
\082\001\000\000\000\000\000\000\082\001\081\001\000\000\000\000\
\000\000\000\000\000\000\000\000\000\000\082\001\082\001\082\001\
\082\001\082\001\000\000\000\000\000\000\000\000\000\000\000\000\
\082\001\000\000\082\001\000\000\000\000\000\000\000\000\000\000\
\082\001\082\001\000\000\000\000\000\000\082\001\082\001\000\000\
\082\001\000\000\000\000\000\000\000\000\000\000\000\000\082\001\
\000\000\082\001\082\001\082\001\082\001\000\000\082\001\082\001\
\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\
\000\000\000\000\000\000\000\000\000\000\082\001\082\001\082\001\
\082\001\082\001\082\001\082\001\082\001\082\001\000\000\000\000\
\000\000\000\000\000\000\000\000\082\001\082\001\000\000\000\000\
\082\001\082\001\000\000\000\000\082\001\082\001\082\001\082\001\
\082\001\000\000\000\000\082\001\000\000\000\000\082\001\000\000\
\082\001\000\000\082\001\000\000\082\001\083\001\083\001\083\001\
\082\001\083\001\000\000\000\000\000\000\000\000\000\000\000\000\
\082\001\082\001\083\001\082\001\082\001\000\000\082\001\083\001\
\000\000\000\000\000\000\000\000\000\000\000\000\083\001\000\000\
\000\000\000\000\000\000\000\000\000\000\000\000\083\001\000\000\
\083\001\083\001\000\000\000\000\000\000\083\001\000\000\000\000\
\000\000\083\001\000\000\000\000\000\000\000\000\000\000\062\001\
\000\000\000\000\083\001\083\001\083\001\083\001\083\001\000\000\
\000\000\000\000\000\000\000\000\000\000\083\001\000\000\083\001\
\000\000\000\000\000\000\000\000\000\000\083\001\083\001\000\000\
\000\000\000\000\083\001\083\001\000\000\083\001\000\000\000\000\
\000\000\000\000\000\000\000\000\083\001\000\000\083\001\083\001\
\083\001\083\001\000\000\083\001\083\001\000\000\000\000\000\000\
\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\
\000\000\000\000\083\001\083\001\083\001\083\001\083\001\083\001\
\083\001\083\001\083\001\000\000\000\000\000\000\000\000\000\000\
\000\000\083\001\083\001\000\000\000\000\083\001\083\001\000\000\
\000\000\083\001\083\001\083\001\083\001\083\001\000\000\000\000\
\083\001\000\000\000\000\083\001\000\000\083\001\000\000\083\001\
\000\000\083\001\000\000\000\000\000\000\083\001\000\000\081\001\
\081\001\081\001\000\000\081\001\000\000\083\001\083\001\000\000\
\083\001\083\001\081\001\083\001\081\001\000\000\000\000\000\000\
\000\000\081\001\000\000\000\000\000\000\000\000\000\000\000\000\
\081\001\000\000\000\000\000\000\000\000\000\000\000\000\000\000\
\081\001\000\000\000\000\081\001\000\000\000\000\000\000\081\001\
\000\000\000\000\000\000\081\001\000\000\224\001\000\000\000\000\
\000\000\000\000\000\000\000\000\081\001\081\001\081\001\081\001\
\081\001\000\000\000\000\000\000\000\000\000\000\000\000\081\001\
\000\000\081\001\000\000\000\000\000\000\000\000\000\000\081\001\
\081\001\000\000\000\000\000\000\081\001\081\001\000\000\081\001\
\000\000\000\000\000\000\000\000\000\000\000\000\081\001\000\000\
\081\001\081\001\081\001\081\001\000\000\081\001\081\001\000\000\
\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\
\000\000\000\000\000\000\000\000\081\001\081\001\081\001\081\001\
\081\001\081\001\081\001\081\001\081\001\000\000\000\000\000\000\
\000\000\000\000\000\000\081\001\081\001\000\000\000\000\081\001\
\081\001\000\000\000\000\081\001\081\001\081\001\081\001\081\001\
\000\000\000\000\081\001\000\000\000\000\081\001\000\000\081\001\
\000\000\081\001\000\000\081\001\000\000\000\000\000\000\081\001\
\000\000\062\001\062\001\062\001\000\000\062\001\000\000\081\001\
\081\001\000\000\081\001\081\001\062\001\081\001\062\001\000\000\
\000\000\000\000\000\000\062\001\000\000\000\000\000\000\000\000\
\000\000\000\000\062\001\000\000\000\000\000\000\053\000\000\000\
\000\000\000\000\062\001\000\000\062\001\062\001\000\000\000\000\
\000\000\062\001\000\000\000\000\000\000\062\001\000\000\000\000\
\000\000\000\000\000\000\000\000\000\000\000\000\062\001\062\001\
\062\001\062\001\062\001\000\000\000\000\000\000\000\000\000\000\
\000\000\062\001\000\000\062\001\000\000\000\000\000\000\000\000\
\000\000\062\001\062\001\000\000\000\000\000\000\062\001\062\001\
\000\000\062\001\000\000\000\000\000\000\000\000\000\000\000\000\
\062\001\000\000\062\001\062\001\062\001\062\001\000\000\062\001\
\062\001\000\000\000\000\000\000\000\000\000\000\000\000\000\000\
\000\000\000\000\000\000\000\000\000\000\000\000\062\001\062\001\
\062\001\062\001\062\001\062\001\062\001\062\001\062\001\000\000\
\000\000\000\000\000\000\000\000\000\000\062\001\062\001\000\000\
\000\000\062\001\062\001\000\000\000\000\062\001\062\001\062\001\
\000\000\000\000\000\000\000\000\062\001\000\000\000\000\062\001\
\000\000\062\001\000\000\062\001\000\000\062\001\000\000\224\001\
\224\001\062\001\000\000\224\001\000\000\000\000\000\000\000\000\
\000\000\062\001\062\001\224\001\062\001\062\001\224\001\062\001\
\000\000\224\001\000\000\000\000\000\000\000\000\000\000\000\000\
\224\001\000\000\000\000\000\000\000\000\000\000\000\000\000\000\
\224\001\000\000\000\000\000\000\036\001\224\001\000\000\224\001\
\000\000\000\000\000\000\224\001\000\000\000\000\000\000\000\000\
\000\000\000\000\000\000\000\000\224\001\224\001\224\001\224\001\
\224\001\000\000\000\000\000\000\000\000\000\000\000\000\224\001\
\000\000\224\001\000\000\000\000\000\000\000\000\000\000\224\001\
\224\001\000\000\000\000\000\000\000\000\224\001\000\000\224\001\
\000\000\000\000\000\000\000\000\000\000\000\000\224\001\000\000\
\224\001\224\001\224\001\224\001\000\000\224\001\224\001\000\000\
\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\
\000\000\000\000\000\000\000\000\224\001\224\001\224\001\224\001\
\224\001\224\001\224\001\224\001\224\001\000\000\000\000\000\000\
\000\000\000\000\224\001\224\001\224\001\000\000\000\000\224\001\
\224\001\000\000\000\000\000\000\000\000\000\000\000\000\000\000\
\053\000\000\000\224\001\000\000\053\000\000\000\000\000\224\001\
\000\000\000\000\000\000\224\001\000\000\000\000\000\000\224\001\
\000\000\000\000\053\000\053\000\000\000\000\000\000\000\224\001\
\224\001\053\000\224\001\224\001\053\000\224\001\000\000\000\000\
\000\000\053\000\000\000\000\000\000\000\000\000\053\000\000\000\
\053\000\000\000\000\000\000\000\053\000\000\000\000\000\000\000\
\000\000\000\000\053\000\000\000\000\000\000\000\053\000\053\000\
\053\000\053\000\000\000\000\000\000\000\000\000\000\000\000\000\
\053\000\000\000\053\000\000\000\000\000\000\000\000\000\000\000\
\053\000\053\000\000\000\000\000\000\000\000\000\053\000\000\000\
\053\000\000\000\000\000\000\000\000\000\000\000\000\000\053\000\
\000\000\053\000\053\000\053\000\053\000\000\000\053\000\053\000\
\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\
\000\000\000\000\000\000\000\000\000\000\053\000\053\000\053\000\
\053\000\053\000\053\000\053\000\053\000\053\000\000\000\000\000\
\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\
\053\000\053\000\000\000\000\000\000\000\000\000\000\000\000\000\
\000\000\000\000\000\000\053\000\000\000\000\000\000\000\000\000\
\053\000\000\000\000\000\000\000\053\000\000\000\000\000\000\000\
\053\000\000\000\000\000\000\000\000\000\000\000\000\000\036\001\
\053\000\053\000\036\001\053\000\053\000\000\000\053\000\036\001\
\021\000\036\001\000\000\000\000\000\000\000\000\000\000\036\001\
\036\001\036\001\036\001\000\000\036\001\000\000\000\000\036\001\
\000\000\036\001\000\000\000\000\000\000\000\000\000\000\036\001\
\000\000\000\000\000\000\000\000\036\001\000\000\036\001\000\000\
\036\001\000\000\036\001\000\000\000\000\036\001\036\001\000\000\
\000\000\000\000\000\000\036\001\036\001\036\001\036\001\036\001\
\036\001\036\001\036\001\036\001\036\001\000\000\036\001\000\000\
\036\001\036\001\036\001\000\000\000\000\036\001\036\001\036\001\
\036\001\000\000\036\001\036\001\036\001\000\000\036\001\000\000\
\036\001\036\001\000\000\000\000\000\000\036\001\036\001\036\001\
\036\001\036\001\036\001\000\000\036\001\036\001\000\000\000\000\
\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\
\000\000\000\000\000\000\036\001\036\001\036\001\036\001\036\001\
\036\001\036\001\036\001\036\001\000\000\000\000\000\000\000\000\
\036\001\036\001\036\001\036\001\036\001\000\000\036\001\036\001\
\036\001\000\000\025\000\000\000\036\001\036\001\000\000\036\001\
\036\001\036\001\036\001\036\001\000\000\036\001\036\001\000\000\
\000\000\000\000\036\001\036\001\053\000\036\001\036\001\000\000\
\053\000\036\001\036\001\036\001\036\001\036\001\036\001\036\001\
\000\000\036\001\036\001\036\001\000\000\000\000\053\000\053\000\
\000\000\000\000\000\000\000\000\000\000\053\000\000\000\000\000\
\000\000\000\000\000\000\000\000\000\000\053\000\000\000\000\000\
\000\000\000\000\053\000\000\000\053\000\000\000\000\000\000\000\
\053\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\
\000\000\000\000\053\000\053\000\053\000\053\000\000\000\000\000\
\000\000\000\000\000\000\000\000\053\000\000\000\053\000\000\000\
\000\000\000\000\000\000\000\000\053\000\053\000\000\000\000\000\
\000\000\000\000\053\000\000\000\053\000\000\000\000\000\000\000\
\000\000\000\000\000\000\053\000\000\000\053\000\053\000\053\000\
\053\000\000\000\053\000\053\000\000\000\000\000\000\000\000\000\
\000\000\000\000\000\000\000\000\003\000\000\000\000\000\000\000\
\000\000\053\000\053\000\053\000\053\000\053\000\053\000\053\000\
\053\000\053\000\021\000\000\000\000\000\000\000\021\000\000\000\
\000\000\000\000\000\000\000\000\053\000\053\000\000\000\000\000\
\000\000\000\000\000\000\000\000\021\000\000\000\000\000\053\000\
\000\000\000\000\000\000\021\000\053\000\000\000\000\000\000\000\
\053\000\000\000\000\000\021\000\053\000\000\000\000\000\000\000\
\021\000\000\000\021\000\000\000\053\000\053\000\021\000\053\000\
\053\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\
\021\000\021\000\021\000\021\000\000\000\000\000\000\000\000\000\
\000\000\000\000\021\000\000\000\021\000\000\000\000\000\000\000\
\000\000\000\000\021\000\021\000\000\000\000\000\000\000\000\000\
\021\000\000\000\021\000\000\000\000\000\000\000\000\000\000\000\
\000\000\021\000\165\000\021\000\021\000\021\000\021\000\000\000\
\021\000\021\000\000\000\000\000\000\000\000\000\000\000\000\000\
\000\000\000\000\000\000\000\000\000\000\000\000\000\000\021\000\
\021\000\021\000\021\000\021\000\021\000\021\000\021\000\021\000\
\000\000\000\000\000\000\000\000\025\000\000\000\000\000\000\000\
\025\000\000\000\021\000\021\000\000\000\000\000\000\000\025\000\
\000\000\000\000\000\000\000\000\000\000\021\000\025\000\000\000\
\000\000\000\000\021\000\000\000\000\000\025\000\021\000\000\000\
\000\000\000\000\021\000\000\000\000\000\025\000\000\000\000\000\
\000\000\000\000\021\000\021\000\025\000\021\000\021\000\000\000\
\025\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\
\000\000\000\000\025\000\025\000\025\000\025\000\000\000\000\000\
\000\000\000\000\000\000\000\000\025\000\000\000\025\000\064\000\
\000\000\000\000\000\000\000\000\025\000\025\000\000\000\000\000\
\000\000\000\000\025\000\000\000\025\000\000\000\000\000\000\000\
\000\000\000\000\000\000\025\000\000\000\025\000\025\000\025\000\
\025\000\000\000\025\000\025\000\000\000\000\000\000\000\000\000\
\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\
\000\000\025\000\025\000\025\000\025\000\025\000\025\000\025\000\
\025\000\025\000\000\000\000\000\000\000\000\000\003\000\000\000\
\000\000\000\000\003\000\000\000\025\000\025\000\000\000\000\000\
\000\000\003\000\000\000\000\000\000\000\000\000\000\000\025\000\
\003\000\000\000\000\000\000\000\025\000\000\000\000\000\003\000\
\025\000\000\000\000\000\000\000\025\000\000\000\000\000\003\000\
\000\000\000\000\000\000\000\000\025\000\025\000\003\000\025\000\
\025\000\000\000\003\000\000\000\033\000\000\000\000\000\000\000\
\000\000\000\000\000\000\000\000\003\000\003\000\003\000\003\000\
\000\000\000\000\000\000\000\000\000\000\000\000\003\000\000\000\
\003\000\000\000\000\000\000\000\000\000\000\000\003\000\003\000\
\000\000\000\000\000\000\000\000\003\000\000\000\003\000\000\000\
\000\000\000\000\000\000\000\000\000\000\003\000\000\000\003\000\
\003\000\003\000\003\000\000\000\003\000\003\000\000\000\000\000\
\000\000\000\000\000\000\000\000\165\000\000\000\000\000\000\000\
\165\000\000\000\000\000\003\000\003\000\003\000\003\000\003\000\
\003\000\003\000\003\000\003\000\000\000\000\000\165\000\000\000\
\000\000\000\000\000\000\000\000\000\000\165\000\003\000\003\000\
\000\000\000\000\000\000\000\000\000\000\165\000\000\000\000\000\
\000\000\003\000\000\000\000\000\165\000\000\000\003\000\000\000\
\165\000\001\000\003\000\000\000\000\000\000\000\003\000\000\000\
\000\000\000\000\165\000\165\000\165\000\165\000\003\000\003\000\
\000\000\003\000\003\000\000\000\165\000\000\000\165\000\000\000\
\000\000\000\000\000\000\000\000\165\000\165\000\000\000\000\000\
\000\000\000\000\165\000\000\000\165\000\000\000\000\000\000\000\
\000\000\000\000\000\000\165\000\000\000\165\000\165\000\165\000\
\165\000\000\000\165\000\165\000\000\000\000\000\000\000\000\000\
\000\000\064\000\000\000\000\000\000\000\064\000\000\000\000\000\
\000\000\165\000\165\000\165\000\165\000\165\000\165\000\165\000\
\165\000\165\000\000\000\064\000\000\000\000\000\000\000\000\000\
\000\000\000\000\064\000\000\000\165\000\165\000\000\000\000\000\
\000\000\000\000\064\000\000\000\000\000\000\000\000\000\165\000\
\000\000\064\000\000\000\000\000\165\000\064\000\036\001\000\000\
\165\000\000\000\000\000\000\000\165\000\000\000\000\000\064\000\
\064\000\064\000\064\000\000\000\165\000\165\000\000\000\165\000\
\165\000\064\000\000\000\064\000\000\000\000\000\000\000\000\000\
\000\000\064\000\064\000\000\000\000\000\000\000\000\000\064\000\
\000\000\064\000\000\000\000\000\000\000\000\000\000\000\000\000\
\064\000\000\000\064\000\064\000\064\000\064\000\000\000\064\000\
\064\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\
\000\000\000\000\033\000\000\000\000\000\000\000\064\000\064\000\
\064\000\064\000\064\000\064\000\064\000\064\000\064\000\000\000\
\033\000\000\000\000\000\000\000\000\000\000\000\000\000\033\000\
\000\000\064\000\064\000\000\000\000\000\000\000\000\000\033\000\
\000\000\000\000\000\000\000\000\064\000\000\000\033\000\000\000\
\000\000\064\000\033\000\094\000\000\000\064\000\000\000\000\000\
\000\000\064\000\000\000\000\000\033\000\033\000\033\000\033\000\
\000\000\064\000\064\000\000\000\064\000\064\000\033\000\000\000\
\033\000\000\000\000\000\000\000\000\000\000\000\033\000\033\000\
\000\000\000\000\000\000\000\000\033\000\000\000\033\000\000\000\
\000\000\000\000\000\000\000\000\000\000\033\000\000\000\033\000\
\033\000\033\000\033\000\000\000\033\000\033\000\000\000\000\000\
\000\000\000\000\000\000\000\000\000\000\000\000\000\000\001\000\
\000\000\000\000\000\000\033\000\033\000\033\000\033\000\033\000\
\033\000\033\000\033\000\033\000\000\000\001\000\000\000\000\000\
\000\000\000\000\000\000\000\000\001\000\000\000\033\000\033\000\
\000\000\000\000\000\000\000\000\001\000\000\000\000\000\000\000\
\000\000\033\000\000\000\001\000\000\000\000\000\033\000\001\000\
\092\000\000\000\033\000\000\000\000\000\000\000\033\000\000\000\
\000\000\001\000\001\000\001\000\001\000\000\000\033\000\033\000\
\000\000\033\000\033\000\001\000\000\000\001\000\000\000\000\000\
\000\000\000\000\000\000\001\000\001\000\000\000\000\000\000\000\
\000\000\001\000\000\000\001\000\000\000\000\000\000\000\000\000\
\000\000\000\000\001\000\000\000\001\000\001\000\001\000\001\000\
\000\000\001\000\001\000\000\000\000\000\000\000\000\000\000\000\
\000\000\000\000\000\000\000\000\036\001\000\000\000\000\000\000\
\001\000\001\000\001\000\001\000\001\000\001\000\001\000\001\000\
\001\000\000\000\036\001\000\000\000\000\000\000\000\000\000\000\
\000\000\036\001\000\000\001\000\001\000\000\000\000\000\000\000\
\000\000\036\001\000\000\000\000\000\000\000\000\001\000\000\000\
\036\001\000\000\000\000\001\000\036\001\000\000\000\000\001\000\
\000\000\000\000\000\000\001\000\000\000\000\000\036\001\036\001\
\036\001\036\001\000\000\001\000\001\000\000\000\001\000\001\000\
\036\001\000\000\036\001\000\000\000\000\000\000\000\000\000\000\
\036\001\036\001\000\000\000\000\000\000\000\000\036\001\000\000\
\036\001\000\000\000\000\000\000\000\000\000\000\000\000\036\001\
\000\000\036\001\036\001\036\001\036\001\000\000\036\001\036\001\
\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\
\000\000\094\000\000\000\000\000\000\000\036\001\036\001\036\001\
\036\001\036\001\036\001\036\001\036\001\036\001\000\000\094\000\
\000\000\000\000\000\000\000\000\000\000\000\000\094\000\000\000\
\036\001\036\001\000\000\000\000\000\000\000\000\094\000\000\000\
\000\000\000\000\000\000\036\001\000\000\094\000\000\000\000\000\
\036\001\094\000\000\000\000\000\036\001\000\000\000\000\000\000\
\036\001\000\000\000\000\094\000\094\000\094\000\094\000\000\000\
\036\001\036\001\000\000\036\001\036\001\094\000\000\000\094\000\
\000\000\000\000\000\000\000\000\000\000\094\000\094\000\000\000\
\000\000\000\000\000\000\094\000\000\000\094\000\000\000\000\000\
\000\000\000\000\000\000\000\000\094\000\000\000\094\000\094\000\
\094\000\094\000\000\000\094\000\094\000\000\000\000\000\000\000\
\000\000\000\000\000\000\000\000\000\000\000\000\092\000\000\000\
\000\000\000\000\094\000\094\000\094\000\094\000\094\000\094\000\
\094\000\094\000\094\000\000\000\092\000\000\000\000\000\000\000\
\000\000\000\000\000\000\092\000\000\000\094\000\094\000\000\000\
\000\000\000\000\000\000\092\000\000\000\000\000\000\000\000\000\
\094\000\000\000\092\000\000\000\000\000\094\000\092\000\000\000\
\000\000\094\000\000\000\000\000\000\000\094\000\000\000\000\000\
\092\000\092\000\092\000\092\000\000\000\094\000\094\000\000\000\
\094\000\094\000\092\000\000\000\092\000\000\000\000\000\000\000\
\000\000\000\000\092\000\092\000\000\000\000\000\000\000\000\000\
\092\000\000\000\092\000\000\000\000\000\000\000\000\000\000\000\
\000\000\092\000\000\000\092\000\092\000\092\000\092\000\000\000\
\092\000\092\000\000\000\000\000\000\000\000\000\000\000\000\000\
\000\000\000\000\000\000\000\000\000\000\000\000\000\000\092\000\
\092\000\092\000\092\000\092\000\092\000\092\000\092\000\092\000\
\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\
\000\000\000\000\092\000\092\000\000\000\000\000\000\000\000\000\
\000\000\000\000\000\000\000\000\000\000\092\000\000\000\000\000\
\000\000\000\000\092\000\000\000\000\000\000\000\092\000\000\000\
\000\000\000\000\092\000\000\000\000\000\000\000\000\000\000\000\
\000\000\000\000\092\000\092\000\000\000\092\000\092\000\017\001\
\000\000\035\000\036\000\037\000\038\000\039\000\040\000\041\000\
\042\000\000\000\000\000\000\000\043\000\000\000\044\000\045\000\
\000\000\000\000\018\001\019\001\000\000\020\001\046\000\000\000\
\021\001\047\000\000\000\000\000\000\000\000\000\000\000\000\000\
\048\000\000\000\000\000\049\000\022\001\000\000\000\000\050\000\
\051\000\052\000\000\000\053\000\054\000\055\000\056\000\057\000\
\023\001\058\000\024\001\000\000\000\000\000\000\000\000\000\000\
\059\000\060\000\061\000\062\000\063\000\064\000\000\000\000\000\
\000\000\065\000\066\000\000\000\000\000\067\000\011\000\012\000\
\068\000\069\000\070\000\000\000\071\000\025\001\000\000\000\000\
\073\000\074\000\075\000\076\000\077\000\000\000\078\000\000\000\
\000\000\000\000\000\000\000\000\000\000\000\000\026\001\027\001\
\028\001\029\001\030\001\031\001\032\001\033\001\034\001\035\001\
\036\001\080\000\037\001\000\000\000\000\000\000\000\000\000\000\
\000\000\000\000\000\000\000\000\000\000\000\000\081\000\000\000\
\082\000\000\000\000\000\000\000\083\000\000\000\000\000\000\000\
\084\000\085\000\000\000\000\000\000\000\000\000\000\000\086\000\
\087\000\000\000\000\000\088\000\000\000\000\000\000\000\089\000\
\000\000\038\001\000\000\091\000\092\000\093\000\000\000\000\000\
\000\000\094\000\095\000\096\000\097\000\098\000\246\001\000\000\
\035\000\036\000\037\000\038\000\039\000\040\000\041\000\042\000\
\000\000\000\000\000\000\043\000\000\000\044\000\045\000\000\000\
\000\000\018\001\019\001\000\000\020\001\046\000\000\000\021\001\
\047\000\000\000\000\000\000\000\000\000\000\000\000\000\048\000\
\000\000\000\000\049\000\022\001\000\000\000\000\050\000\051\000\
\052\000\000\000\053\000\054\000\055\000\056\000\057\000\023\001\
\058\000\024\001\000\000\000\000\000\000\000\000\000\000\059\000\
\060\000\061\000\062\000\063\000\064\000\000\000\000\000\000\000\
\065\000\066\000\000\000\000\000\067\000\011\000\012\000\068\000\
\069\000\070\000\000\000\071\000\247\001\000\000\000\000\073\000\
\074\000\075\000\076\000\077\000\000\000\078\000\000\000\000\000\
\000\000\000\000\000\000\000\000\000\000\026\001\027\001\028\001\
\029\001\030\001\031\001\032\001\033\001\248\001\035\001\036\001\
\080\000\037\001\000\000\000\000\000\000\000\000\000\000\000\000\
\000\000\000\000\000\000\000\000\000\000\081\000\000\000\082\000\
\000\000\000\000\000\000\083\000\000\000\000\000\000\000\084\000\
\085\000\000\000\000\000\000\000\000\000\000\000\086\000\087\000\
\000\000\000\000\088\000\000\000\000\000\000\000\089\000\000\000\
\249\001\000\000\091\000\092\000\093\000\000\000\000\000\000\000\
\094\000\095\000\096\000\097\000\098\000\090\004\000\000\035\000\
\036\000\037\000\038\000\039\000\040\000\041\000\042\000\000\000\
\000\000\000\000\043\000\000\000\044\000\045\000\000\000\000\000\
\018\001\019\001\000\000\020\001\046\000\000\000\021\001\047\000\
\000\000\000\000\000\000\000\000\000\000\000\000\048\000\000\000\
\000\000\049\000\022\001\000\000\000\000\050\000\051\000\052\000\
\000\000\053\000\054\000\055\000\056\000\057\000\023\001\058\000\
\024\001\000\000\000\000\000\000\000\000\000\000\059\000\060\000\
\061\000\062\000\063\000\064\000\000\000\000\000\000\000\065\000\
\066\000\000\000\000\000\067\000\011\000\012\000\068\000\069\000\
\070\000\000\000\071\000\091\004\000\000\000\000\073\000\074\000\
\075\000\076\000\077\000\000\000\078\000\000\000\000\000\000\000\
\000\000\000\000\000\000\000\000\026\001\027\001\028\001\029\001\
\030\001\031\001\032\001\033\001\092\004\035\001\036\001\080\000\
\037\001\000\000\000\000\000\000\000\000\000\000\000\000\000\000\
\000\000\000\000\000\000\000\000\081\000\000\000\082\000\000\000\
\000\000\000\000\083\000\000\000\000\000\000\000\084\000\085\000\
\000\000\000\000\000\000\000\000\000\000\086\000\087\000\000\000\
\000\000\088\000\000\000\000\000\214\001\089\000\000\000\093\004\
\000\000\091\000\092\000\093\000\221\000\000\000\000\000\094\000\
\095\000\096\000\097\000\098\000\045\000\000\000\000\000\018\001\
\019\001\000\000\020\001\000\000\000\000\021\001\047\000\000\000\
\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\
\000\000\022\001\000\000\000\000\000\000\051\000\000\000\000\000\
\000\000\000\000\222\000\145\000\000\000\023\001\000\000\024\001\
\000\000\000\000\000\000\000\000\000\000\059\000\060\000\061\000\
\062\000\063\000\000\000\000\000\000\000\000\000\223\000\000\000\
\000\000\000\000\225\000\000\000\000\000\068\000\000\000\226\000\
\000\000\000\000\215\001\000\000\000\000\241\000\074\000\000\000\
\000\000\000\000\000\000\078\000\000\000\000\000\000\000\000\000\
\000\000\000\000\000\000\026\001\027\001\028\001\029\001\030\001\
\031\001\032\001\033\001\216\001\035\001\036\001\000\000\037\001\
\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\
\000\000\000\000\000\000\000\000\000\000\082\000\000\000\000\000\
\000\000\083\000\000\000\000\000\000\000\084\000\000\000\000\000\
\000\000\000\000\000\000\000\000\086\000\087\000\000\000\000\000\
\088\000\000\000\000\000\107\003\000\000\000\000\217\001\000\000\
\091\000\000\000\093\000\221\000\000\000\000\000\094\000\095\000\
\096\000\097\000\228\000\045\000\000\000\000\000\018\001\019\001\
\000\000\020\001\000\000\000\000\021\001\047\000\000\000\000\000\
\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\
\022\001\000\000\000\000\000\000\051\000\000\000\000\000\000\000\
\000\000\222\000\145\000\000\000\023\001\000\000\024\001\000\000\
\000\000\000\000\000\000\000\000\059\000\060\000\061\000\062\000\
\063\000\000\000\000\000\000\000\000\000\223\000\000\000\000\000\
\000\000\225\000\000\000\000\000\068\000\000\000\226\000\000\000\
\000\000\108\003\000\000\000\000\241\000\074\000\000\000\000\000\
\000\000\000\000\078\000\000\000\000\000\000\000\000\000\000\000\
\000\000\000\000\026\001\027\001\028\001\029\001\030\001\031\001\
\032\001\033\001\109\003\035\001\036\001\000\000\037\001\000\000\
\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\
\000\000\000\000\000\000\000\000\082\000\000\000\000\000\000\000\
\083\000\000\000\000\000\000\000\084\000\000\000\000\000\000\000\
\000\000\000\000\000\000\086\000\087\000\000\000\000\000\088\000\
\000\000\000\000\000\000\000\000\000\000\110\003\000\000\091\000\
\000\000\093\000\000\000\000\000\000\000\094\000\095\000\096\000\
\097\000\228\000\035\000\036\000\037\000\038\000\039\000\040\000\
\041\000\042\000\000\000\000\000\000\000\043\000\000\000\044\000\
\045\000\000\000\000\000\000\000\000\000\000\000\000\000\046\000\
\000\000\000\000\047\000\000\000\000\000\000\000\000\000\000\000\
\000\000\048\000\000\000\000\000\049\000\000\000\000\000\000\000\
\050\000\051\000\052\000\000\000\053\000\054\000\055\000\056\000\
\057\000\000\000\058\000\000\000\000\000\000\000\000\000\000\000\
\000\000\059\000\060\000\061\000\062\000\063\000\064\000\000\000\
\000\000\000\000\065\000\066\000\000\000\000\000\067\000\011\000\
\012\000\068\000\069\000\070\000\000\000\071\000\072\000\000\000\
\000\000\073\000\074\000\075\000\076\000\077\000\000\000\078\000\
\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\
\000\000\000\000\000\000\000\000\000\000\000\000\000\000\079\000\
\000\000\000\000\080\000\000\000\000\000\000\000\000\000\000\000\
\000\000\000\000\000\000\000\000\000\000\000\000\000\000\081\000\
\000\000\082\000\000\000\000\000\000\000\083\000\000\000\000\000\
\000\000\084\000\085\000\000\000\000\000\000\000\000\000\000\000\
\086\000\087\000\000\000\000\000\088\000\000\000\000\000\000\000\
\089\000\000\000\090\000\000\000\091\000\092\000\093\000\000\000\
\000\000\000\000\094\000\095\000\096\000\097\000\098\000\112\001\
\112\001\112\001\112\001\112\001\112\001\112\001\112\001\000\000\
\000\000\000\000\112\001\000\000\112\001\112\001\000\000\000\000\
\000\000\000\000\000\000\000\000\112\001\000\000\000\000\112\001\
\000\000\000\000\000\000\000\000\000\000\000\000\112\001\000\000\
\000\000\112\001\000\000\000\000\000\000\112\001\112\001\112\001\
\000\000\112\001\112\001\112\001\112\001\112\001\000\000\112\001\
\000\000\000\000\000\000\000\000\000\000\000\000\112\001\112\001\
\112\001\112\001\112\001\112\001\000\000\000\000\000\000\112\001\
\112\001\000\000\000\000\112\001\112\001\112\001\112\001\112\001\
\112\001\000\000\112\001\112\001\000\000\000\000\112\001\112\001\
\112\001\112\001\112\001\000\000\112\001\000\000\000\000\000\000\
\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\
\000\000\000\000\000\000\000\000\112\001\000\000\000\000\112\001\
\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\
\000\000\000\000\000\000\000\000\112\001\000\000\112\001\000\000\
\000\000\000\000\112\001\000\000\000\000\000\000\112\001\112\001\
\000\000\000\000\000\000\000\000\000\000\112\001\112\001\000\000\
\000\000\112\001\000\000\000\000\000\000\112\001\000\000\112\001\
\000\000\112\001\112\001\112\001\000\000\000\000\000\000\112\001\
\112\001\112\001\112\001\112\001\034\001\000\000\000\000\000\000\
\034\001\000\000\000\000\034\001\000\000\022\001\000\000\000\000\
\000\000\000\000\000\000\000\000\000\000\000\000\034\001\034\001\
\034\001\034\001\034\001\000\000\000\000\000\000\000\000\000\000\
\034\001\000\000\000\000\000\000\034\001\000\000\000\000\000\000\
\000\000\034\001\034\001\000\000\000\000\000\000\000\000\000\000\
\000\000\000\000\000\000\000\000\034\001\034\001\034\001\034\001\
\034\001\000\000\000\000\000\000\000\000\034\001\000\000\000\000\
\000\000\034\001\000\000\000\000\000\000\034\001\034\001\000\000\
\000\000\000\000\000\000\000\000\034\001\034\001\000\000\000\000\
\000\000\000\000\034\001\000\000\000\000\000\000\000\000\000\000\
\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\
\000\000\000\000\034\001\000\000\000\000\034\001\000\000\000\000\
\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\
\000\000\000\000\000\000\000\000\034\001\000\000\034\001\000\000\
\034\001\000\000\000\000\000\000\034\001\000\000\000\000\000\000\
\000\000\034\001\000\000\034\001\034\001\000\000\000\000\034\001\
\000\000\000\000\000\000\000\000\000\000\000\000\000\000\034\001\
\000\000\034\001\000\000\000\000\034\001\034\001\034\001\034\001\
\034\001\034\001\027\000\000\000\034\001\000\000\027\000\000\000\
\034\001\027\000\000\000\000\000\000\000\000\000\000\000\000\000\
\000\000\000\000\000\000\000\000\027\000\027\000\027\000\027\000\
\027\000\000\000\000\000\000\000\000\000\000\000\040\001\000\000\
\000\000\000\000\027\000\000\000\000\000\000\000\000\000\027\000\
\027\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\
\000\000\000\000\027\000\027\000\027\000\027\000\027\000\000\000\
\000\000\000\000\000\000\027\000\000\000\000\000\000\000\027\000\
\000\000\000\000\000\000\027\000\027\000\000\000\000\000\000\000\
\000\000\000\000\027\000\027\000\000\000\000\000\000\000\000\000\
\027\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\
\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\
\027\000\000\000\000\000\027\000\000\000\000\000\000\000\000\000\
\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\
\000\000\000\000\027\000\000\000\040\001\000\000\027\000\000\000\
\000\000\000\000\027\000\000\000\000\000\000\000\000\000\040\001\
\000\000\027\000\027\000\000\000\000\000\027\000\000\000\000\000\
\000\000\000\000\000\000\000\000\000\000\027\000\000\000\027\000\
\000\000\000\000\000\000\027\000\027\000\027\000\027\000\027\000\
\251\001\000\000\027\000\000\000\251\001\000\000\027\000\251\001\
\000\000\026\001\000\000\000\000\000\000\000\000\000\000\000\000\
\000\000\000\000\000\000\251\001\251\001\251\001\251\001\000\000\
\000\000\000\000\000\000\000\000\044\001\000\000\000\000\000\000\
\251\001\000\000\000\000\000\000\000\000\251\001\251\001\000\000\
\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\
\251\001\251\001\251\001\251\001\251\001\000\000\000\000\000\000\
\000\000\251\001\000\000\000\000\000\000\251\001\000\000\000\000\
\000\000\251\001\251\001\000\000\000\000\000\000\000\000\000\000\
\251\001\251\001\000\000\000\000\000\000\000\000\251\001\000\000\
\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\
\000\000\000\000\000\000\000\000\000\000\000\000\251\001\000\000\
\000\000\251\001\000\000\000\000\000\000\000\000\000\000\000\000\
\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\
\251\001\000\000\044\001\000\000\251\001\000\000\000\000\000\000\
\251\001\000\000\000\000\000\000\000\000\044\001\000\000\251\001\
\251\001\000\000\000\000\251\001\000\000\000\000\000\000\000\000\
\000\000\000\000\000\000\251\001\000\000\251\001\000\000\000\000\
\000\000\251\001\251\001\251\001\251\001\251\001\252\001\000\000\
\251\001\000\000\252\001\000\000\251\001\252\001\000\000\027\001\
\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\
\000\000\252\001\252\001\252\001\252\001\000\000\000\000\000\000\
\000\000\000\000\045\001\000\000\000\000\000\000\252\001\000\000\
\000\000\000\000\000\000\252\001\252\001\000\000\000\000\000\000\
\000\000\000\000\000\000\000\000\000\000\000\000\252\001\252\001\
\252\001\252\001\252\001\000\000\000\000\000\000\000\000\252\001\
\000\000\000\000\000\000\252\001\000\000\000\000\000\000\252\001\
\252\001\000\000\000\000\000\000\000\000\000\000\252\001\252\001\
\000\000\000\000\000\000\000\000\252\001\000\000\000\000\000\000\
\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\
\000\000\000\000\000\000\000\000\252\001\000\000\000\000\252\001\
\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\
\000\000\000\000\000\000\000\000\000\000\000\000\252\001\000\000\
\045\001\000\000\252\001\000\000\000\000\000\000\252\001\000\000\
\000\000\000\000\000\000\045\001\000\000\252\001\252\001\000\000\
\000\000\252\001\000\000\000\000\000\000\000\000\000\000\000\000\
\000\000\252\001\000\000\252\001\000\000\000\000\000\000\252\001\
\252\001\252\001\252\001\252\001\248\001\000\000\252\001\000\000\
\248\001\000\000\252\001\248\001\000\000\023\001\000\000\000\000\
\000\000\000\000\000\000\000\000\000\000\000\000\000\000\248\001\
\248\001\248\001\248\001\000\000\000\000\000\000\000\000\000\000\
\041\001\000\000\000\000\000\000\248\001\000\000\000\000\000\000\
\000\000\248\001\248\001\000\000\000\000\000\000\000\000\000\000\
\000\000\000\000\000\000\000\000\248\001\248\001\248\001\248\001\
\248\001\000\000\000\000\000\000\000\000\248\001\000\000\000\000\
\000\000\248\001\000\000\000\000\000\000\248\001\248\001\000\000\
\000\000\000\000\000\000\000\000\248\001\248\001\000\000\000\000\
\000\000\000\000\248\001\000\000\000\000\000\000\000\000\000\000\
\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\
\000\000\000\000\248\001\000\000\000\000\248\001\000\000\000\000\
\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\
\000\000\000\000\000\000\000\000\248\001\000\000\041\001\000\000\
\248\001\000\000\000\000\000\000\248\001\000\000\000\000\000\000\
\000\000\041\001\000\000\248\001\248\001\000\000\000\000\248\001\
\000\000\000\000\000\000\000\000\000\000\000\000\000\000\248\001\
\000\000\248\001\000\000\000\000\000\000\248\001\248\001\248\001\
\248\001\248\001\250\001\000\000\248\001\000\000\250\001\000\000\
\248\001\250\001\000\000\025\001\000\000\000\000\000\000\000\000\
\000\000\000\000\000\000\000\000\000\000\250\001\250\001\250\001\
\250\001\000\000\000\000\000\000\000\000\000\000\043\001\000\000\
\000\000\000\000\250\001\000\000\000\000\000\000\000\000\250\001\
\250\001\000\000\000\000\000\000\000\000\000\000\000\000\000\000\
\000\000\000\000\250\001\250\001\250\001\250\001\250\001\000\000\
\000\000\000\000\000\000\250\001\000\000\000\000\000\000\250\001\
\000\000\000\000\000\000\250\001\250\001\000\000\000\000\000\000\
\000\000\000\000\250\001\250\001\000\000\000\000\000\000\000\000\
\250\001\000\000\000\000\000\000\000\000\000\000\000\000\000\000\
\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\
\250\001\000\000\000\000\250\001\000\000\000\000\000\000\000\000\
\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\
\000\000\000\000\250\001\000\000\043\001\000\000\250\001\000\000\
\000\000\000\000\250\001\000\000\000\000\000\000\000\000\043\001\
\000\000\250\001\250\001\000\000\000\000\250\001\000\000\000\000\
\000\000\000\000\000\000\000\000\000\000\250\001\000\000\250\001\
\000\000\000\000\000\000\250\001\250\001\250\001\250\001\250\001\
\249\001\000\000\250\001\000\000\249\001\000\000\250\001\249\001\
\000\000\024\001\000\000\000\000\000\000\000\000\000\000\000\000\
\000\000\000\000\000\000\249\001\249\001\249\001\249\001\000\000\
\000\000\000\000\000\000\000\000\042\001\000\000\000\000\000\000\
\249\001\000\000\000\000\000\000\000\000\249\001\249\001\000\000\
\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\
\249\001\249\001\249\001\249\001\249\001\000\000\000\000\000\000\
\000\000\249\001\000\000\000\000\000\000\249\001\000\000\000\000\
\000\000\249\001\249\001\000\000\000\000\000\000\000\000\000\000\
\249\001\249\001\000\000\000\000\000\000\000\000\249\001\000\000\
\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\
\000\000\000\000\000\000\000\000\000\000\000\000\249\001\000\000\
\000\000\249\001\000\000\000\000\000\000\000\000\000\000\000\000\
\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\
\249\001\000\000\042\001\000\000\249\001\000\000\000\000\000\000\
\249\001\000\000\000\000\000\000\000\000\042\001\000\000\249\001\
\249\001\000\000\000\000\249\001\000\000\000\000\000\000\000\000\
\000\000\000\000\000\000\249\001\000\000\249\001\000\000\000\000\
\000\000\249\001\249\001\249\001\249\001\249\001\251\001\000\000\
\249\001\000\000\251\001\000\000\249\001\251\001\000\000\000\000\
\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\
\000\000\251\001\251\001\251\001\251\001\000\000\000\000\000\000\
\000\000\000\000\044\001\000\000\000\000\000\000\251\001\000\000\
\000\000\000\000\000\000\251\001\251\001\000\000\000\000\000\000\
\000\000\000\000\000\000\000\000\000\000\000\000\251\001\251\001\
\251\001\251\001\251\001\000\000\000\000\000\000\000\000\251\001\
\000\000\000\000\000\000\251\001\000\000\000\000\000\000\251\001\
\251\001\000\000\000\000\000\000\000\000\000\000\251\001\251\001\
\000\000\000\000\000\000\000\000\251\001\000\000\000\000\000\000\
\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\
\000\000\000\000\000\000\000\000\251\001\000\000\000\000\251\001\
\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\
\000\000\000\000\000\000\000\000\000\000\000\000\251\001\000\000\
\044\001\000\000\251\001\000\000\000\000\000\000\251\001\000\000\
\000\000\000\000\000\000\044\001\000\000\251\001\251\001\000\000\
\000\000\251\001\000\000\000\000\000\000\000\000\000\000\000\000\
\000\000\251\001\000\000\251\001\000\000\000\000\000\000\251\001\
\251\001\251\001\251\001\251\001\252\001\000\000\251\001\000\000\
\252\001\000\000\251\001\252\001\000\000\000\000\000\000\000\000\
\000\000\000\000\000\000\000\000\000\000\000\000\000\000\252\001\
\252\001\252\001\252\001\000\000\000\000\000\000\000\000\000\000\
\045\001\000\000\000\000\000\000\252\001\000\000\000\000\000\000\
\000\000\252\001\252\001\000\000\000\000\000\000\000\000\000\000\
\000\000\000\000\000\000\000\000\252\001\252\001\252\001\252\001\
\252\001\000\000\000\000\000\000\000\000\252\001\000\000\000\000\
\000\000\252\001\000\000\000\000\000\000\252\001\252\001\000\000\
\000\000\000\000\000\000\000\000\252\001\252\001\000\000\000\000\
\000\000\000\000\252\001\000\000\000\000\000\000\000\000\000\000\
\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\
\000\000\000\000\252\001\000\000\000\000\252\001\000\000\000\000\
\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\
\000\000\000\000\000\000\000\000\252\001\000\000\045\001\000\000\
\252\001\000\000\000\000\000\000\252\001\000\000\000\000\000\000\
\000\000\045\001\000\000\252\001\252\001\000\000\000\000\252\001\
\000\000\000\000\000\000\000\000\000\000\000\000\000\000\252\001\
\000\000\252\001\000\000\000\000\000\000\252\001\252\001\252\001\
\252\001\252\001\248\001\000\000\252\001\000\000\248\001\000\000\
\252\001\248\001\000\000\000\000\000\000\000\000\000\000\000\000\
\000\000\000\000\000\000\000\000\000\000\248\001\248\001\248\001\
\248\001\000\000\000\000\000\000\000\000\000\000\041\001\000\000\
\000\000\000\000\248\001\000\000\000\000\000\000\000\000\248\001\
\248\001\000\000\000\000\000\000\000\000\000\000\000\000\000\000\
\000\000\000\000\248\001\248\001\248\001\248\001\248\001\000\000\
\000\000\000\000\000\000\248\001\000\000\000\000\000\000\248\001\
\000\000\000\000\000\000\248\001\248\001\000\000\000\000\000\000\
\000\000\000\000\248\001\248\001\000\000\000\000\000\000\000\000\
\248\001\000\000\000\000\000\000\000\000\000\000\000\000\000\000\
\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\
\248\001\000\000\000\000\248\001\000\000\000\000\000\000\000\000\
\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\
\000\000\000\000\248\001\000\000\041\001\000\000\248\001\000\000\
\000\000\000\000\248\001\000\000\000\000\000\000\000\000\041\001\
\000\000\248\001\248\001\000\000\000\000\248\001\000\000\000\000\
\000\000\000\000\000\000\000\000\000\000\248\001\000\000\248\001\
\000\000\000\000\000\000\248\001\248\001\248\001\248\001\248\001\
\250\001\000\000\248\001\000\000\250\001\000\000\248\001\250\001\
\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\
\000\000\000\000\000\000\250\001\250\001\250\001\250\001\000\000\
\000\000\000\000\000\000\000\000\043\001\000\000\000\000\000\000\
\250\001\000\000\000\000\000\000\000\000\250\001\250\001\000\000\
\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\
\250\001\250\001\250\001\250\001\250\001\000\000\000\000\000\000\
\000\000\250\001\000\000\000\000\000\000\250\001\000\000\000\000\
\000\000\250\001\250\001\000\000\000\000\000\000\000\000\000\000\
\250\001\250\001\000\000\000\000\000\000\000\000\250\001\000\000\
\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\
\000\000\000\000\000\000\000\000\000\000\000\000\250\001\000\000\
\000\000\250\001\000\000\000\000\000\000\000\000\000\000\000\000\
\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\
\250\001\000\000\043\001\000\000\250\001\000\000\000\000\000\000\
\250\001\000\000\000\000\000\000\000\000\043\001\000\000\250\001\
\250\001\000\000\000\000\250\001\000\000\000\000\000\000\000\000\
\000\000\000\000\000\000\250\001\000\000\250\001\000\000\000\000\
\000\000\250\001\250\001\250\001\250\001\250\001\249\001\000\000\
\250\001\000\000\249\001\000\000\250\001\249\001\000\000\000\000\
\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\
\000\000\249\001\249\001\249\001\249\001\000\000\000\000\000\000\
\000\000\000\000\042\001\000\000\000\000\000\000\249\001\000\000\
\000\000\000\000\000\000\249\001\249\001\000\000\000\000\000\000\
\000\000\000\000\000\000\000\000\000\000\000\000\249\001\249\001\
\249\001\249\001\249\001\000\000\000\000\000\000\000\000\249\001\
\000\000\000\000\000\000\249\001\000\000\000\000\000\000\249\001\
\249\001\000\000\000\000\000\000\000\000\000\000\249\001\249\001\
\000\000\000\000\000\000\000\000\249\001\000\000\000\000\000\000\
\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\
\000\000\000\000\000\000\000\000\249\001\000\000\000\000\249\001\
\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\
\000\000\000\000\000\000\000\000\000\000\000\000\249\001\000\000\
\042\001\000\000\249\001\000\000\178\003\000\000\249\001\000\000\
\042\000\000\000\000\000\042\001\043\000\249\001\249\001\045\000\
\000\000\249\001\018\001\019\001\000\000\020\001\000\000\000\000\
\021\001\249\001\000\000\249\001\000\000\000\000\000\000\249\001\
\249\001\249\001\249\001\249\001\022\001\000\000\249\001\000\000\
\051\000\000\000\249\001\000\000\000\000\000\000\145\000\000\000\
\023\001\000\000\024\001\000\000\000\000\000\000\000\000\000\000\
\059\000\060\000\061\000\062\000\063\000\000\000\000\000\000\000\
\000\000\000\000\000\000\000\000\000\000\067\000\000\000\000\000\
\000\000\069\000\070\000\000\000\000\000\000\000\000\000\000\000\
\146\000\074\000\000\000\000\000\000\000\000\000\078\000\000\000\
\000\000\000\000\000\000\000\000\000\000\000\000\026\001\027\001\
\028\001\029\001\030\001\031\001\032\001\033\001\079\000\035\001\
\036\001\080\000\037\001\000\000\000\000\000\000\000\000\000\000\
\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\
\082\000\000\000\000\000\000\000\083\000\000\000\000\000\000\000\
\084\000\038\000\039\000\040\000\041\000\042\000\000\000\086\000\
\087\000\043\000\000\000\088\000\045\000\000\000\000\000\000\000\
\000\000\000\000\000\000\091\000\000\000\093\000\047\000\000\000\
\000\000\094\000\095\000\096\000\097\000\147\000\000\000\000\000\
\000\000\000\000\000\000\000\000\050\000\051\000\052\000\000\000\
\053\000\000\000\055\000\056\000\000\000\000\000\000\000\000\000\
\000\000\000\000\000\000\000\000\000\000\059\000\060\000\061\000\
\062\000\063\000\000\000\000\000\000\000\000\000\065\000\066\000\
\000\000\199\000\067\000\000\000\000\000\068\000\069\000\070\000\
\000\000\000\000\000\000\000\000\000\000\073\000\074\000\000\000\
\000\000\077\000\000\000\078\000\000\000\000\000\000\000\000\000\
\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\
\000\000\000\000\000\000\079\000\000\000\000\000\080\000\000\000\
\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\
\000\000\000\000\000\000\081\000\000\000\082\000\000\000\000\000\
\000\000\083\000\000\000\000\000\000\000\084\000\038\000\039\000\
\040\000\041\000\042\000\246\003\086\000\087\000\043\000\000\000\
\088\000\045\000\000\000\000\000\000\000\000\000\090\000\000\000\
\091\000\000\000\093\000\047\000\000\000\000\000\094\000\095\000\
\096\000\097\000\188\000\000\000\000\000\000\000\000\000\000\000\
\000\000\050\000\051\000\052\000\000\000\053\000\000\000\055\000\
\056\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\
\000\000\000\000\059\000\060\000\061\000\062\000\063\000\000\000\
\000\000\000\000\000\000\247\003\066\000\000\000\000\000\067\000\
\000\000\000\000\068\000\069\000\070\000\000\000\000\000\000\000\
\000\000\000\000\073\000\074\000\000\000\000\000\077\000\000\000\
\078\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\
\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\
\079\000\000\000\000\000\080\000\000\000\000\000\000\000\000\000\
\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\
\081\000\000\000\082\000\000\000\000\000\000\000\083\000\000\000\
\000\000\000\000\084\000\038\000\039\000\040\000\041\000\042\000\
\000\000\086\000\087\000\043\000\000\000\088\000\045\000\000\000\
\000\000\000\000\000\000\090\000\000\000\091\000\000\000\093\000\
\047\000\000\000\000\000\094\000\095\000\096\000\097\000\188\000\
\000\000\000\000\000\000\000\000\000\000\000\000\050\000\051\000\
\052\000\000\000\053\000\000\000\055\000\056\000\000\000\000\000\
\000\000\000\000\000\000\000\000\000\000\000\000\000\000\059\000\
\060\000\061\000\062\000\063\000\000\000\000\000\000\000\000\000\
\065\000\066\000\000\000\000\000\067\000\000\000\000\000\068\000\
\069\000\070\000\000\000\000\000\000\000\000\000\000\000\073\000\
\074\000\000\000\000\000\077\000\000\000\078\000\000\000\000\000\
\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\
\000\000\000\000\000\000\000\000\000\000\079\000\000\000\000\000\
\080\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\
\000\000\000\000\000\000\000\000\000\000\081\000\000\000\082\000\
\000\000\000\000\000\000\083\000\000\000\000\000\000\000\084\000\
\038\000\039\000\040\000\041\000\042\000\000\000\086\000\087\000\
\043\000\000\000\088\000\045\000\000\000\000\000\000\000\000\000\
\090\000\000\000\091\000\000\000\093\000\047\000\000\000\000\000\
\094\000\095\000\096\000\097\000\188\000\000\000\000\000\000\000\
\000\000\000\000\000\000\000\000\051\000\000\000\000\000\053\000\
\000\000\055\000\056\000\000\000\000\000\000\000\000\000\000\000\
\000\000\000\000\000\000\000\000\059\000\060\000\061\000\062\000\
\063\000\000\000\000\000\000\000\000\000\065\000\066\000\000\000\
\000\000\067\000\000\000\000\000\068\000\069\000\070\000\000\000\
\000\000\000\000\000\000\000\000\073\000\074\000\000\000\000\000\
\077\000\000\000\078\000\000\000\000\000\000\000\000\000\000\000\
\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\
\000\000\000\000\079\000\000\000\000\000\080\000\000\000\000\000\
\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\
\000\000\000\000\081\000\000\000\082\000\000\000\000\000\000\000\
\083\000\000\000\000\000\000\000\084\000\174\001\175\001\176\001\
\177\001\042\000\000\000\086\000\087\000\043\000\000\000\088\000\
\045\000\000\000\000\000\000\000\000\000\090\000\000\000\091\000\
\000\000\093\000\047\000\000\000\000\000\094\000\095\000\096\000\
\097\000\188\000\000\000\000\000\000\000\000\000\000\000\000\000\
\000\000\051\000\000\000\000\000\000\000\000\000\055\000\056\000\
\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\
\000\000\059\000\060\000\061\000\062\000\063\000\000\000\000\000\
\000\000\000\000\178\001\179\001\000\000\000\000\067\000\000\000\
\000\000\068\000\069\000\070\000\000\000\000\000\000\000\000\000\
\000\000\146\000\074\000\000\000\000\000\180\001\000\000\078\000\
\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\
\000\000\000\000\000\000\000\000\000\000\000\000\000\000\079\000\
\000\000\000\000\080\000\000\000\000\000\000\000\000\000\000\000\
\000\000\000\000\000\000\000\000\000\000\000\000\000\000\181\001\
\000\000\082\000\000\000\000\000\000\000\083\000\000\000\000\000\
\000\000\084\000\038\000\039\000\040\000\041\000\042\000\000\000\
\086\000\087\000\043\000\000\000\088\000\045\000\000\000\000\000\
\000\000\000\000\182\001\000\000\091\000\000\000\093\000\047\000\
\000\000\000\000\094\000\095\000\096\000\097\000\188\000\000\000\
\000\000\000\000\000\000\000\000\000\000\000\000\051\000\000\000\
\000\000\000\000\000\000\055\000\056\000\000\000\000\000\000\000\
\000\000\000\000\000\000\000\000\000\000\000\000\059\000\060\000\
\061\000\062\000\063\000\000\000\000\000\000\000\000\000\065\000\
\066\000\000\000\000\000\067\000\000\000\000\000\068\000\069\000\
\070\000\000\000\000\000\000\000\000\000\000\000\073\000\074\000\
\000\000\000\000\077\000\000\000\078\000\000\000\000\000\000\000\
\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\
\000\000\000\000\000\000\000\000\079\000\000\000\000\000\080\000\
\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\
\000\000\000\000\000\000\000\000\081\000\000\000\082\000\000\000\
\000\000\000\000\083\000\000\000\000\000\000\000\084\000\174\001\
\175\001\176\001\177\001\042\000\000\000\086\000\087\000\043\000\
\000\000\088\000\045\000\000\000\000\000\000\000\000\000\090\000\
\000\000\091\000\000\000\093\000\047\000\000\000\000\000\094\000\
\095\000\096\000\097\000\188\000\000\000\000\000\000\000\000\000\
\000\000\000\000\000\000\051\000\000\000\000\000\000\000\000\000\
\055\000\145\000\000\000\000\000\000\000\000\000\000\000\000\000\
\000\000\000\000\000\000\059\000\060\000\061\000\062\000\063\000\
\000\000\000\000\000\000\000\000\178\001\179\001\000\000\000\000\
\067\000\000\000\000\000\068\000\069\000\070\000\000\000\000\000\
\000\000\000\000\000\000\146\000\074\000\000\000\000\000\180\001\
\000\000\078\000\000\000\000\000\000\000\000\000\000\000\000\000\
\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\
\000\000\079\000\000\000\000\000\080\000\038\000\039\000\040\000\
\041\000\042\000\000\000\000\000\000\000\043\000\000\000\000\000\
\045\000\181\001\000\000\082\000\000\000\000\000\000\000\083\000\
\000\000\000\000\000\000\084\000\000\000\000\000\000\000\000\000\
\000\000\000\000\086\000\087\000\000\000\000\000\088\000\000\000\
\202\000\051\000\203\000\000\000\182\001\000\000\091\000\056\000\
\093\000\000\000\000\000\000\000\094\000\095\000\096\000\097\000\
\147\000\059\000\060\000\061\000\062\000\063\000\000\000\000\000\
\000\000\000\000\065\000\000\000\000\000\000\000\067\000\000\000\
\000\000\000\000\069\000\070\000\000\000\000\000\000\000\000\000\
\000\000\146\000\074\000\000\000\000\000\077\000\000\000\078\000\
\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\
\000\000\000\000\000\000\000\000\000\000\000\000\000\000\079\000\
\000\000\000\000\080\000\038\000\039\000\040\000\041\000\042\000\
\000\000\000\000\000\000\043\000\000\000\000\000\045\000\081\000\
\000\000\082\000\000\000\000\000\000\000\083\000\000\000\000\000\
\000\000\084\000\000\000\000\000\000\000\000\000\000\000\000\000\
\086\000\087\000\000\000\000\000\088\000\000\000\252\000\051\000\
\253\000\000\000\090\000\000\000\091\000\056\000\093\000\000\000\
\000\000\000\000\094\000\095\000\096\000\097\000\188\000\059\000\
\060\000\061\000\062\000\063\000\000\000\000\000\000\000\000\000\
\065\000\000\000\000\000\000\000\067\000\000\000\000\000\000\000\
\069\000\070\000\000\000\000\000\000\000\000\000\000\000\146\000\
\074\000\000\000\000\000\077\000\000\000\078\000\000\000\000\000\
\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\
\000\000\000\000\000\000\000\000\000\000\079\000\000\000\000\000\
\080\000\038\000\039\000\040\000\041\000\042\000\000\000\000\000\
\000\000\043\000\000\000\000\000\045\000\081\000\000\000\082\000\
\000\000\000\000\000\000\083\000\000\000\000\000\000\000\084\000\
\000\000\000\000\000\000\000\000\000\000\000\000\086\000\087\000\
\000\000\000\000\088\000\000\000\000\000\051\000\000\000\000\000\
\090\000\000\000\091\000\056\000\093\000\000\000\000\000\000\000\
\094\000\095\000\096\000\097\000\188\000\059\000\060\000\061\000\
\062\000\063\000\000\000\000\000\000\000\000\000\065\000\000\000\
\000\000\000\000\067\000\000\000\000\000\068\000\069\000\070\000\
\000\000\000\000\000\000\000\000\000\000\146\000\074\000\000\000\
\000\000\077\000\000\000\078\000\000\000\000\000\000\000\000\000\
\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\
\000\000\000\000\000\000\079\000\000\000\000\000\080\000\174\001\
\175\001\176\001\177\001\042\000\000\000\000\000\000\000\043\000\
\000\000\000\000\045\000\081\000\000\000\082\000\000\000\000\000\
\000\000\083\000\000\000\000\000\000\000\084\000\000\000\000\000\
\000\000\000\000\000\000\000\000\086\000\087\000\000\000\000\000\
\088\000\000\000\000\000\051\000\000\000\000\000\090\000\000\000\
\091\000\145\000\093\000\000\000\000\000\000\000\094\000\095\000\
\096\000\097\000\188\000\059\000\060\000\061\000\062\000\063\000\
\000\000\000\000\000\000\000\000\178\001\000\000\000\000\000\000\
\067\000\000\000\000\000\068\000\069\000\070\000\000\000\000\000\
\000\000\000\000\000\000\146\000\074\000\000\000\000\000\180\001\
\000\000\078\000\000\000\000\000\000\000\000\000\000\000\000\000\
\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\
\000\000\079\000\000\000\000\000\080\000\038\000\039\000\040\000\
\041\000\042\000\000\000\000\000\000\000\043\000\000\000\000\000\
\045\000\181\001\000\000\082\000\000\000\000\000\000\000\083\000\
\000\000\000\000\000\000\084\000\000\000\000\000\000\000\000\000\
\000\000\000\000\086\000\087\000\000\000\000\000\088\000\000\000\
\000\000\051\000\000\000\000\000\182\001\000\000\091\000\056\000\
\093\000\000\000\000\000\000\000\094\000\095\000\096\000\097\000\
\147\000\059\000\060\000\061\000\062\000\063\000\000\000\000\000\
\000\000\000\000\065\000\000\000\000\000\000\000\067\000\000\000\
\000\000\000\000\069\000\070\000\000\000\000\000\000\000\000\000\
\000\000\146\000\074\000\000\000\000\000\077\000\000\000\078\000\
\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\
\000\000\000\000\000\000\000\000\000\000\000\000\000\000\079\000\
\000\000\000\000\080\000\174\001\175\001\176\001\177\001\042\000\
\000\000\000\000\000\000\043\000\000\000\000\000\045\000\081\000\
\000\000\082\000\000\000\000\000\000\000\083\000\000\000\000\000\
\000\000\084\000\000\000\000\000\000\000\000\000\000\000\000\000\
\086\000\087\000\000\000\000\000\088\000\000\000\000\000\051\000\
\000\000\000\000\090\000\000\000\091\000\145\000\093\000\000\000\
\000\000\000\000\094\000\095\000\096\000\097\000\188\000\059\000\
\060\000\061\000\062\000\063\000\000\000\000\000\000\000\000\000\
\178\001\000\000\000\000\000\000\067\000\000\000\000\000\000\000\
\069\000\070\000\000\000\000\000\000\000\000\000\000\000\146\000\
\074\000\000\000\000\000\180\001\000\000\078\000\000\000\000\000\
\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\
\000\000\000\000\000\000\000\000\000\000\079\000\000\000\000\000\
\080\000\038\000\039\000\040\000\041\000\042\000\000\000\000\000\
\000\000\043\000\000\000\000\000\045\000\181\001\000\000\082\000\
\000\000\000\000\000\000\083\000\000\000\000\000\000\000\084\000\
\000\000\000\000\000\000\000\000\000\000\000\000\086\000\087\000\
\000\000\000\000\088\000\000\000\000\000\051\000\000\000\000\000\
\182\001\000\000\091\000\056\000\093\000\000\000\000\000\000\000\
\094\000\095\000\096\000\097\000\147\000\059\000\060\000\061\000\
\062\000\063\000\000\000\000\000\000\000\000\000\054\004\000\000\
\000\000\000\000\067\000\000\000\000\000\000\000\069\000\070\000\
\000\000\000\000\000\000\000\000\000\000\146\000\074\000\000\000\
\000\000\077\000\000\000\078\000\000\000\000\000\000\000\000\000\
\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\
\000\000\000\000\000\000\079\000\000\000\000\000\080\000\219\000\
\219\000\219\000\219\000\219\000\000\000\000\000\000\000\219\000\
\000\000\000\000\219\000\081\000\000\000\082\000\000\000\000\000\
\000\000\083\000\000\000\000\000\000\000\084\000\000\000\000\000\
\000\000\000\000\000\000\000\000\086\000\087\000\000\000\000\000\
\088\000\000\000\000\000\219\000\000\000\000\000\090\000\000\000\
\091\000\219\000\093\000\000\000\000\000\000\000\094\000\095\000\
\096\000\097\000\188\000\219\000\219\000\219\000\219\000\219\000\
\000\000\000\000\000\000\000\000\219\000\000\000\000\000\000\000\
\000\000\000\000\000\000\219\000\219\000\219\000\000\000\000\000\
\000\000\000\000\000\000\219\000\219\000\000\000\000\000\219\000\
\000\000\219\000\000\000\000\000\000\000\000\000\000\000\000\000\
\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\
\000\000\219\000\000\000\000\000\219\000\000\000\000\000\000\000\
\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\
\000\000\219\000\000\000\219\000\000\000\000\000\000\000\219\000\
\000\000\042\000\000\000\219\000\000\000\043\000\000\000\000\000\
\045\000\000\000\219\000\219\000\000\000\000\000\219\000\000\000\
\000\000\000\000\000\000\000\000\219\000\000\000\219\000\000\000\
\219\000\000\000\000\000\000\000\219\000\219\000\219\000\219\000\
\219\000\051\000\000\000\000\000\000\000\000\000\100\001\145\000\
\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\
\000\000\059\000\060\000\061\000\062\000\063\000\000\000\000\000\
\000\000\000\000\101\001\000\000\000\000\000\000\067\000\000\000\
\000\000\000\000\069\000\070\000\000\000\000\000\000\000\000\000\
\000\000\146\000\074\000\000\000\000\000\000\000\000\000\078\000\
\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\
\000\000\000\000\000\000\000\000\000\000\000\000\000\000\079\000\
\000\000\000\000\080\000\000\000\000\000\000\000\000\000\000\000\
\000\000\000\000\000\000\042\000\000\000\000\000\000\000\043\000\
\000\000\082\000\045\000\000\000\000\000\083\000\000\000\000\000\
\000\000\084\000\000\000\000\000\000\000\000\000\000\000\000\000\
\086\000\087\000\000\000\000\000\088\000\000\000\000\000\000\000\
\000\000\000\000\000\000\051\000\091\000\000\000\093\000\000\000\
\161\002\145\000\094\000\095\000\096\000\097\000\147\000\000\000\
\000\000\102\001\000\000\059\000\060\000\061\000\062\000\063\000\
\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\
\067\000\000\000\000\000\000\000\069\000\070\000\000\000\000\000\
\000\000\000\000\000\000\146\000\074\000\000\000\000\000\000\000\
\000\000\078\000\000\000\000\000\000\000\000\000\000\000\000\000\
\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\
\000\000\079\000\000\000\000\000\080\000\000\000\000\000\000\000\
\000\000\000\000\000\000\000\000\041\000\042\000\000\000\000\000\
\000\000\043\000\000\000\082\000\045\000\000\000\000\000\083\000\
\000\000\000\000\000\000\084\000\000\000\000\000\047\000\000\000\
\000\000\000\000\086\000\087\000\000\000\000\000\088\000\000\000\
\000\000\000\000\000\000\000\000\000\000\051\000\091\000\000\000\
\093\000\000\000\055\000\056\000\094\000\095\000\096\000\097\000\
\147\000\000\000\000\000\102\001\000\000\059\000\060\000\061\000\
\062\000\063\000\000\000\000\000\000\000\000\000\065\000\226\002\
\000\000\000\000\067\000\000\000\000\000\068\000\069\000\070\000\
\000\000\000\000\000\000\000\000\000\000\227\002\074\000\000\000\
\000\000\000\000\000\000\078\000\000\000\000\000\000\000\000\000\
\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\
\000\000\000\000\000\000\079\000\000\000\000\000\080\000\000\000\
\000\000\000\000\041\000\042\000\000\000\000\000\000\000\043\000\
\000\000\000\000\045\000\000\000\000\000\082\000\000\000\000\000\
\000\000\083\000\000\000\000\000\000\000\084\000\000\000\000\000\
\000\000\000\000\000\000\000\000\086\000\087\000\000\000\000\000\
\088\000\000\000\000\000\051\000\000\000\000\000\090\000\000\000\
\091\000\056\000\093\000\000\000\000\000\000\000\094\000\095\000\
\096\000\097\000\188\000\059\000\060\000\061\000\062\000\063\000\
\000\000\000\000\000\000\000\000\065\000\000\000\000\000\000\000\
\067\000\000\000\000\000\068\000\069\000\070\000\000\000\000\000\
\000\000\000\000\000\000\146\000\074\000\000\000\000\000\000\000\
\000\000\078\000\000\000\000\000\000\000\000\000\000\000\000\000\
\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\
\000\000\079\000\000\000\000\000\080\000\000\000\000\000\000\000\
\041\000\042\000\000\000\000\000\000\000\043\000\000\000\000\000\
\045\000\000\000\000\000\082\000\000\000\000\000\000\000\083\000\
\000\000\000\000\000\000\084\000\000\000\000\000\000\000\000\000\
\000\000\000\000\086\000\087\000\000\000\000\000\088\000\000\000\
\000\000\051\000\000\000\000\000\090\000\000\000\091\000\056\000\
\093\000\000\000\000\000\000\000\094\000\095\000\096\000\097\000\
\188\000\059\000\060\000\061\000\062\000\063\000\000\000\000\000\
\000\000\000\000\065\000\000\000\000\000\000\000\067\000\000\000\
\000\000\000\000\069\000\070\000\000\000\000\000\000\000\000\000\
\000\000\146\000\074\000\000\000\000\000\000\000\000\000\078\000\
\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\
\000\000\000\000\000\000\000\000\000\000\000\000\000\000\079\000\
\000\000\000\000\080\000\000\000\000\000\000\000\177\001\042\000\
\000\000\000\000\000\000\043\000\000\000\000\000\045\000\000\000\
\000\000\082\000\000\000\000\000\000\000\083\000\000\000\000\000\
\000\000\084\000\000\000\000\000\000\000\000\000\000\000\000\000\
\086\000\087\000\000\000\000\000\088\000\000\000\000\000\051\000\
\000\000\000\000\090\000\000\000\091\000\145\000\093\000\000\000\
\000\000\000\000\094\000\095\000\096\000\097\000\188\000\059\000\
\060\000\061\000\062\000\063\000\000\000\000\000\000\000\000\000\
\178\001\000\000\000\000\000\000\067\000\000\000\000\000\000\000\
\069\000\070\000\000\000\000\000\000\000\000\000\000\000\146\000\
\074\000\000\000\000\000\000\000\000\000\078\000\000\000\000\000\
\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\
\000\000\000\000\000\000\000\000\000\000\079\000\000\000\000\000\
\080\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\
\221\000\000\000\240\000\000\000\000\000\000\000\000\000\082\000\
\045\000\000\000\000\000\083\000\000\000\000\000\000\000\084\000\
\000\000\000\000\047\000\000\000\000\000\000\000\086\000\087\000\
\000\000\000\000\088\000\000\000\000\000\000\000\000\000\000\000\
\182\001\051\000\091\000\000\000\093\000\000\000\222\000\145\000\
\094\000\095\000\096\000\097\000\147\000\000\000\000\000\000\000\
\000\000\059\000\060\000\061\000\062\000\063\000\000\000\000\000\
\000\000\000\000\223\000\000\000\000\000\000\000\225\000\000\000\
\000\000\068\000\000\000\226\000\000\000\000\000\000\000\000\000\
\000\000\241\000\074\000\000\000\000\000\000\000\000\000\078\000\
\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\
\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\
\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\
\000\000\000\000\221\000\000\000\000\000\000\000\000\000\000\000\
\000\000\082\000\045\000\000\000\000\000\083\000\000\000\000\000\
\000\000\084\000\000\000\000\000\047\000\000\000\000\000\000\000\
\086\000\087\000\000\000\000\000\088\000\000\000\000\000\000\000\
\000\000\000\000\000\000\051\000\091\000\000\000\093\000\000\000\
\222\000\145\000\094\000\095\000\096\000\097\000\228\000\000\000\
\000\000\000\000\000\000\059\000\060\000\061\000\062\000\063\000\
\000\000\000\000\000\000\000\000\223\000\000\000\000\000\000\000\
\225\000\000\000\000\000\068\000\000\000\226\000\000\000\000\000\
\000\000\000\000\000\000\241\000\074\000\000\000\000\000\000\000\
\000\000\078\000\000\000\000\000\000\000\000\000\000\000\000\000\
\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\
\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\
\000\000\000\000\000\000\000\000\221\000\000\000\000\000\000\000\
\000\000\000\000\000\000\082\000\045\000\000\000\000\000\083\000\
\000\000\000\000\000\000\084\000\000\000\000\000\047\000\015\002\
\000\000\000\000\086\000\087\000\000\000\000\000\088\000\000\000\
\000\000\000\000\000\000\000\000\000\000\051\000\091\000\000\000\
\093\000\000\000\222\000\145\000\094\000\095\000\096\000\097\000\
\228\000\000\000\000\000\000\000\000\000\059\000\060\000\061\000\
\062\000\063\000\000\000\000\000\000\000\000\000\223\000\000\000\
\000\000\000\000\225\000\000\000\000\000\068\000\000\000\226\000\
\000\000\000\000\000\000\000\000\000\000\241\000\074\000\000\000\
\000\000\000\000\000\000\078\000\000\000\000\000\000\000\000\000\
\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\
\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\
\000\000\000\000\000\000\000\000\000\000\000\000\221\000\000\000\
\000\000\000\000\000\000\000\000\000\000\082\000\045\000\000\000\
\000\000\083\000\000\000\000\000\000\000\084\000\000\000\000\000\
\047\000\204\002\000\000\000\000\086\000\087\000\000\000\000\000\
\088\000\000\000\000\000\000\000\000\000\000\000\000\000\051\000\
\091\000\000\000\093\000\000\000\222\000\145\000\094\000\095\000\
\096\000\097\000\228\000\000\000\000\000\000\000\000\000\059\000\
\060\000\061\000\062\000\063\000\000\000\000\000\000\000\000\000\
\223\000\000\000\000\000\000\000\225\000\000\000\000\000\068\000\
\000\000\226\000\000\000\000\000\000\000\000\000\000\000\241\000\
\074\000\000\000\000\000\000\000\000\000\078\000\000\000\000\000\
\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\
\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\
\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\
\217\000\000\000\000\000\000\000\000\000\000\000\000\000\082\000\
\217\000\000\000\000\000\083\000\000\000\000\000\000\000\084\000\
\000\000\000\000\217\000\128\004\000\000\000\000\086\000\087\000\
\000\000\000\000\088\000\000\000\000\000\000\000\000\000\000\000\
\000\000\217\000\091\000\000\000\093\000\000\000\217\000\217\000\
\094\000\095\000\096\000\097\000\228\000\000\000\000\000\000\000\
\000\000\217\000\217\000\217\000\217\000\217\000\000\000\000\000\
\000\000\000\000\217\000\000\000\000\000\000\000\217\000\000\000\
\000\000\217\000\000\000\217\000\000\000\000\000\000\000\000\000\
\000\000\217\000\217\000\000\000\000\000\000\000\000\000\217\000\
\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\
\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\
\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\
\000\000\000\000\221\000\000\000\000\000\000\000\000\000\000\000\
\000\000\217\000\045\000\000\000\000\000\217\000\000\000\000\000\
\000\000\217\000\000\000\000\000\047\000\000\000\000\000\000\000\
\217\000\217\000\000\000\217\000\217\000\000\000\000\000\000\000\
\000\000\000\000\000\000\051\000\217\000\000\000\217\000\000\000\
\222\000\145\000\217\000\217\000\217\000\217\000\217\000\000\000\
\000\000\000\000\000\000\059\000\060\000\061\000\062\000\063\000\
\000\000\000\000\000\000\000\000\223\000\000\000\000\000\000\000\
\225\000\000\000\000\000\068\000\000\000\226\000\000\000\000\000\
\000\000\000\000\000\000\241\000\074\000\000\000\000\000\000\000\
\000\000\078\000\000\000\000\000\000\000\000\000\000\000\000\000\
\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\
\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\
\000\000\000\000\000\000\000\000\221\000\000\000\000\000\000\000\
\000\000\000\000\000\000\082\000\045\000\000\000\000\000\083\000\
\000\000\000\000\000\000\084\000\000\000\000\000\047\000\000\000\
\000\000\000\000\086\000\087\000\000\000\000\000\088\000\000\000\
\000\000\000\000\000\000\000\000\000\000\051\000\091\000\000\000\
\093\000\000\000\222\000\145\000\094\000\095\000\096\000\097\000\
\228\000\000\000\000\000\000\000\000\000\059\000\060\000\061\000\
\062\000\063\000\000\000\000\000\000\000\000\000\223\000\000\000\
\000\000\000\000\225\000\000\000\000\000\068\000\000\000\226\000\
\000\000\000\000\000\000\000\000\000\000\221\002\074\000\000\000\
\000\000\000\000\000\000\078\000\000\000\000\000\000\000\000\000\
\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\
\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\
\000\000\000\000\000\000\000\000\000\000\000\000\037\000\000\000\
\000\000\000\000\000\000\000\000\000\000\082\000\037\000\000\000\
\000\000\083\000\000\000\000\000\000\000\084\000\000\000\000\000\
\037\000\000\000\000\000\000\000\086\000\087\000\000\000\000\000\
\088\000\000\000\000\000\000\000\000\000\000\000\000\000\037\000\
\091\000\000\000\093\000\000\000\037\000\037\000\094\000\095\000\
\096\000\097\000\228\000\000\000\000\000\000\000\000\000\037\000\
\037\000\037\000\037\000\037\000\000\000\000\000\000\000\000\000\
\037\000\000\000\000\000\000\000\037\000\000\000\000\000\037\000\
\000\000\037\000\000\000\000\000\000\000\000\000\000\000\037\000\
\037\000\000\000\000\000\000\000\000\000\037\000\000\000\000\000\
\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\
\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\
\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\
\000\000\042\000\000\000\000\000\000\000\043\000\000\000\037\000\
\045\000\000\000\000\000\037\000\000\000\000\000\000\000\037\000\
\000\000\000\000\047\000\000\000\000\000\000\000\037\000\037\000\
\000\000\000\000\037\000\000\000\000\000\000\000\000\000\000\000\
\000\000\051\000\037\000\000\000\037\000\000\000\055\000\145\000\
\037\000\037\000\037\000\037\000\037\000\000\000\000\000\000\000\
\000\000\059\000\060\000\061\000\062\000\063\000\000\000\000\000\
\000\000\000\000\000\000\000\000\000\000\000\000\067\000\000\000\
\000\000\068\000\069\000\070\000\000\000\000\000\000\000\000\000\
\000\000\046\004\074\000\000\000\000\000\000\000\000\000\078\000\
\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\
\000\000\000\000\000\000\000\000\000\000\000\000\000\000\079\000\
\000\000\000\000\080\000\000\000\000\000\000\000\000\000\042\000\
\000\000\000\000\000\000\043\000\000\000\000\000\045\000\000\000\
\000\000\082\000\000\000\000\000\000\000\083\000\000\000\000\000\
\000\000\084\000\000\000\000\000\000\000\000\000\000\000\000\000\
\086\000\087\000\000\000\000\000\088\000\000\000\000\000\051\000\
\000\000\000\000\000\000\000\000\091\000\145\000\093\000\000\000\
\000\000\000\000\094\000\095\000\096\000\097\000\147\000\059\000\
\060\000\061\000\062\000\063\000\000\000\000\000\000\000\000\000\
\000\000\000\000\000\000\000\000\067\000\000\000\000\000\000\000\
\069\000\070\000\000\000\000\000\000\000\000\000\000\000\146\000\
\074\000\000\000\000\000\000\000\000\000\078\000\000\000\000\000\
\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\
\000\000\000\000\000\000\000\000\000\000\043\001\000\000\000\000\
\080\000\000\000\000\000\000\000\000\000\042\000\000\000\000\000\
\000\000\043\000\000\000\000\000\045\000\000\000\000\000\082\000\
\000\000\000\000\000\000\083\000\000\000\000\000\000\000\084\000\
\000\000\000\000\000\000\016\002\000\000\000\000\086\000\087\000\
\000\000\000\000\088\000\000\000\000\000\051\000\000\000\000\000\
\000\000\000\000\091\000\145\000\093\000\000\000\000\000\000\000\
\094\000\095\000\096\000\097\000\147\000\059\000\060\000\061\000\
\062\000\063\000\000\000\000\000\000\000\000\000\000\000\000\000\
\000\000\000\000\067\000\000\000\000\000\000\000\069\000\070\000\
\000\000\000\000\000\000\000\000\000\000\146\000\074\000\000\000\
\000\000\000\000\000\000\078\000\000\000\000\000\000\000\000\000\
\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\
\000\000\000\000\000\000\079\000\000\000\000\000\080\000\000\000\
\000\000\000\000\000\000\042\000\000\000\000\000\000\000\043\000\
\000\000\000\000\045\000\000\000\000\000\082\000\000\000\000\000\
\000\000\083\000\000\000\000\000\000\000\084\000\000\000\000\000\
\000\000\017\002\000\000\000\000\086\000\087\000\000\000\000\000\
\088\000\000\000\000\000\051\000\000\000\000\000\000\000\000\000\
\091\000\145\000\093\000\000\000\000\000\000\000\094\000\095\000\
\096\000\097\000\147\000\059\000\060\000\061\000\062\000\063\000\
\000\000\000\000\000\000\000\000\076\002\000\000\000\000\000\000\
\067\000\000\000\000\000\000\000\069\000\070\000\000\000\000\000\
\000\000\000\000\000\000\146\000\074\000\000\000\000\000\000\000\
\000\000\078\000\000\000\000\000\000\000\000\000\000\000\000\000\
\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\
\000\000\079\000\000\000\000\000\080\000\000\000\000\000\000\000\
\000\000\042\000\000\000\000\000\000\000\043\000\000\000\000\000\
\045\000\000\000\000\000\082\000\000\000\000\000\000\000\083\000\
\000\000\000\000\000\000\084\000\000\000\000\000\000\000\000\000\
\000\000\000\000\086\000\087\000\000\000\000\000\088\000\000\000\
\000\000\051\000\000\000\000\000\000\000\000\000\091\000\145\000\
\093\000\000\000\000\000\000\000\094\000\095\000\096\000\097\000\
\147\000\059\000\060\000\061\000\062\000\063\000\000\000\000\000\
\000\000\000\000\000\000\000\000\000\000\000\000\067\000\000\000\
\000\000\000\000\069\000\070\000\000\000\000\000\000\000\000\000\
\000\000\146\000\074\000\000\000\000\000\000\000\000\000\078\000\
\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\
\000\000\000\000\000\000\000\000\000\000\000\000\000\000\043\001\
\000\000\000\000\080\000\000\000\000\000\000\000\000\000\042\000\
\000\000\000\000\000\000\043\000\000\000\000\000\045\000\000\000\
\000\000\082\000\000\000\000\000\000\000\083\000\000\000\000\000\
\000\000\084\000\000\000\000\000\000\000\205\002\000\000\000\000\
\086\000\087\000\000\000\000\000\088\000\000\000\000\000\051\000\
\000\000\000\000\000\000\000\000\091\000\145\000\093\000\000\000\
\000\000\000\000\094\000\095\000\096\000\097\000\147\000\059\000\
\060\000\061\000\062\000\063\000\000\000\000\000\000\000\000\000\
\000\000\000\000\000\000\000\000\067\000\000\000\000\000\000\000\
\069\000\070\000\000\000\000\000\000\000\000\000\000\000\146\000\
\074\000\000\000\000\000\000\000\000\000\078\000\000\000\000\000\
\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\
\000\000\000\000\000\000\000\000\000\000\079\000\000\000\000\000\
\080\000\000\000\000\000\000\000\000\000\042\000\000\000\000\000\
\000\000\043\000\000\000\000\000\045\000\000\000\000\000\082\000\
\000\000\000\000\000\000\083\000\000\000\000\000\000\000\084\000\
\000\000\000\000\000\000\206\002\000\000\000\000\086\000\087\000\
\000\000\000\000\088\000\000\000\000\000\051\000\000\000\000\000\
\000\000\000\000\091\000\145\000\093\000\000\000\000\000\000\000\
\094\000\095\000\096\000\097\000\147\000\059\000\060\000\061\000\
\062\000\063\000\000\000\000\000\000\000\000\000\000\000\000\000\
\000\000\000\000\067\000\000\000\000\000\000\000\069\000\070\000\
\000\000\000\000\000\000\000\000\000\000\146\000\074\000\000\000\
\000\000\000\000\000\000\078\000\000\000\000\000\000\000\000\000\
\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\
\000\000\000\000\000\000\043\001\000\000\000\000\080\000\000\000\
\000\000\000\000\000\000\042\000\000\000\000\000\000\000\043\000\
\000\000\000\000\045\000\000\000\000\000\082\000\000\000\000\000\
\000\000\083\000\000\000\000\000\000\000\084\000\000\000\000\000\
\000\000\129\004\000\000\000\000\086\000\087\000\000\000\000\000\
\088\000\000\000\000\000\051\000\000\000\000\000\000\000\000\000\
\091\000\145\000\093\000\000\000\000\000\000\000\094\000\095\000\
\096\000\097\000\147\000\059\000\060\000\061\000\062\000\063\000\
\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\
\067\000\000\000\000\000\000\000\069\000\070\000\000\000\000\000\
\000\000\000\000\000\000\146\000\074\000\000\000\000\000\000\000\
\000\000\078\000\000\000\000\000\000\000\000\000\000\000\000\000\
\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\
\000\000\079\000\000\000\000\000\080\000\000\000\000\000\000\000\
\000\000\042\000\000\000\000\000\000\000\043\000\000\000\000\000\
\045\000\000\000\000\000\082\000\000\000\000\000\000\000\083\000\
\000\000\000\000\000\000\084\000\000\000\000\000\000\000\130\004\
\000\000\000\000\086\000\087\000\000\000\000\000\088\000\000\000\
\000\000\051\000\000\000\000\000\000\000\000\000\091\000\145\000\
\093\000\000\000\000\000\000\000\094\000\095\000\096\000\097\000\
\147\000\059\000\060\000\061\000\062\000\063\000\000\000\000\000\
\000\000\000\000\000\000\000\000\000\000\000\000\067\000\000\000\
\000\000\000\000\069\000\070\000\000\000\000\000\000\000\000\000\
\000\000\146\000\074\000\000\000\000\000\000\000\000\000\078\000\
\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\
\000\000\000\000\000\000\000\000\000\000\000\000\000\000\079\000\
\000\000\000\000\080\000\000\000\000\000\000\000\000\000\042\000\
\000\000\000\000\000\000\043\000\000\000\000\000\045\000\000\000\
\000\000\082\000\000\000\000\000\000\000\083\000\000\000\000\000\
\000\000\084\000\000\000\000\000\000\000\000\000\000\000\000\000\
\086\000\087\000\000\000\000\000\088\000\000\000\000\000\051\000\
\000\000\000\000\000\000\000\000\091\000\145\000\093\000\000\000\
\000\000\000\000\094\000\095\000\096\000\097\000\147\000\059\000\
\060\000\061\000\062\000\063\000\000\000\000\000\000\000\000\000\
\000\000\000\000\000\000\000\000\067\000\000\000\000\000\000\000\
\069\000\070\000\000\000\000\000\000\000\000\000\000\000\000\001\
\074\000\000\000\000\000\000\000\000\000\078\000\000\000\000\000\
\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\
\000\000\000\000\000\000\000\000\000\000\079\000\000\000\000\000\
\080\000\000\000\000\000\000\000\000\000\042\000\000\000\000\000\
\000\000\043\000\000\000\000\000\045\000\000\000\000\000\082\000\
\000\000\000\000\000\000\083\000\000\000\000\000\000\000\084\000\
\000\000\000\000\000\000\000\000\000\000\000\000\086\000\087\000\
\000\000\000\000\088\000\000\000\000\000\051\000\000\000\000\000\
\000\000\000\000\091\000\145\000\093\000\000\000\000\000\000\000\
\094\000\095\000\096\000\097\000\147\000\059\000\060\000\061\000\
\062\000\063\000\000\000\000\000\000\000\000\000\000\000\000\000\
\000\000\000\000\067\000\000\000\000\000\000\000\069\000\070\000\
\000\000\000\000\000\000\000\000\000\000\146\000\074\000\000\000\
\000\000\000\000\000\000\078\000\000\000\000\000\000\000\000\000\
\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\
\000\000\000\000\000\000\043\001\000\000\068\000\080\000\000\000\
\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\
\000\000\000\000\000\000\000\000\068\000\082\000\000\000\000\000\
\000\000\083\000\000\000\068\000\000\000\084\000\000\000\068\000\
\000\000\000\000\000\000\000\000\086\000\087\000\000\000\000\000\
\088\000\068\000\000\000\000\000\068\000\000\000\000\000\000\000\
\091\000\000\000\093\000\000\000\000\000\068\000\094\000\095\000\
\096\000\097\000\147\000\000\000\000\000\000\000\000\000\000\000\
\000\000\068\000\000\000\094\003\000\000\000\000\000\000\000\000\
\000\000\000\000\068\000\000\000\000\000\068\000\000\000\000\000\
\000\000\000\000\068\000\000\000\000\000\000\000\018\001\019\001\
\000\000\020\001\000\000\000\000\021\001\000\000\000\000\132\004\
\068\000\068\000\068\000\068\000\068\000\068\000\068\000\068\000\
\022\001\000\000\000\000\000\000\000\000\000\000\000\000\000\000\
\000\000\000\000\018\001\019\001\023\001\020\001\024\001\000\000\
\021\001\000\000\000\000\000\000\000\000\000\000\068\000\000\000\
\000\000\000\000\000\000\068\000\022\001\000\000\000\000\000\000\
\000\000\000\000\000\000\068\000\000\000\000\000\000\000\000\000\
\023\001\095\003\024\001\000\000\000\000\000\000\000\000\068\000\
\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\
\000\000\000\000\026\001\027\001\028\001\029\001\030\001\031\001\
\032\001\033\001\096\003\035\001\036\001\133\004\037\001\000\000\
\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\
\140\004\000\000\000\000\000\000\000\000\000\000\026\001\027\001\
\028\001\029\001\030\001\031\001\032\001\033\001\134\004\035\001\
\036\001\000\000\037\001\018\001\019\001\000\000\020\001\000\000\
\000\000\021\001\000\000\000\000\164\004\097\003\000\000\000\000\
\000\000\000\000\000\000\000\000\000\000\022\001\000\000\000\000\
\000\000\000\000\000\000\000\000\000\000\000\000\000\000\018\001\
\019\001\023\001\020\001\024\001\000\000\021\001\000\000\000\000\
\000\000\135\004\000\000\000\000\000\000\000\000\000\000\000\000\
\000\000\022\001\000\000\000\000\000\000\000\000\000\000\000\000\
\000\000\000\000\000\000\000\000\000\000\023\001\141\004\024\001\
\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\
\000\000\000\000\000\000\000\000\000\000\000\000\000\000\026\001\
\027\001\028\001\029\001\030\001\031\001\032\001\033\001\142\004\
\035\001\036\001\165\004\037\001\000\000\000\000\000\000\000\000\
\000\000\000\000\000\000\000\000\000\000\181\004\000\000\000\000\
\000\000\000\000\000\000\026\001\027\001\028\001\029\001\030\001\
\031\001\032\001\033\001\166\004\035\001\036\001\000\000\037\001\
\018\001\019\001\000\000\020\001\000\000\000\000\021\001\000\000\
\000\000\000\000\143\004\000\000\000\000\000\000\000\000\000\000\
\000\000\000\000\022\001\000\000\000\000\000\000\000\000\000\000\
\000\000\000\000\000\000\000\000\000\000\000\000\023\001\000\000\
\024\001\000\000\000\000\000\000\000\000\000\000\167\004\000\000\
\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\
\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\
\000\000\000\000\000\000\182\004\000\000\000\000\000\000\000\000\
\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\
\000\000\000\000\000\000\000\000\026\001\027\001\028\001\029\001\
\030\001\031\001\032\001\033\001\183\004\035\001\036\001\000\000\
\037\001\000\000\000\000\000\000\000\000\000\000\000\000\000\000\
\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\
\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\
\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\
\000\000\000\000\000\000\000\000\000\000\000\000\000\000\184\004"

let yycheck = "\003\000\
\079\000\065\000\149\000\251\000\053\000\072\000\149\001\011\000\
\012\000\240\000\102\001\081\001\227\000\112\000\104\001\014\001\
\050\000\060\001\052\000\001\000\002\000\254\001\012\000\224\000\
\226\000\209\000\048\000\233\000\192\001\101\001\058\001\035\000\
\036\000\037\000\082\001\039\000\040\000\041\000\048\000\043\000\
\044\000\230\001\064\000\060\001\066\000\027\000\115\002\029\000\
\067\000\068\000\069\000\179\001\028\001\013\004\064\000\183\001\
\184\001\025\001\028\001\042\000\017\003\080\000\028\001\067\000\
\068\000\069\000\021\001\034\001\015\001\073\000\156\002\157\002\
\158\002\159\002\160\002\079\000\080\000\067\000\068\000\069\000\
\103\001\029\001\065\000\038\000\015\001\089\000\090\000\161\000\
\092\000\163\000\080\000\020\002\114\000\115\000\046\000\025\001\
\028\001\049\000\000\000\041\001\164\000\049\001\050\001\071\003\
\060\001\112\003\178\001\067\004\028\001\090\001\003\001\021\001\
\022\001\136\001\024\001\119\000\120\000\050\001\046\000\087\003\
\227\000\049\000\077\000\029\001\125\001\173\001\081\000\060\001\
\076\001\128\001\120\000\014\001\028\001\085\000\241\000\014\001\
\028\001\050\001\050\001\155\001\023\001\142\001\146\000\049\001\
\050\001\149\000\029\003\060\001\073\001\000\001\050\001\084\001\
\103\001\120\001\121\001\125\001\017\001\085\000\225\000\223\000\
\227\000\028\001\055\001\156\001\070\001\122\003\167\001\123\001\
\202\000\203\000\076\001\084\001\142\001\102\002\241\000\028\001\
\076\001\090\001\084\001\073\001\127\001\136\001\186\000\024\001\
\233\000\136\001\069\001\008\001\098\004\156\001\069\001\080\001\
\168\001\080\002\126\001\199\000\127\001\245\001\247\000\008\001\
\168\001\014\001\168\001\145\001\134\001\125\001\223\001\164\003\
\030\001\199\000\011\000\034\001\028\001\161\001\025\001\098\001\
\099\001\221\000\252\000\253\000\033\003\034\003\035\003\036\003\
\037\003\039\002\136\001\041\002\161\001\125\001\198\003\003\002\
\192\004\125\001\035\000\036\000\037\000\120\003\039\000\040\000\
\041\000\059\001\125\001\044\000\136\001\178\002\125\001\153\001\
\161\001\161\001\156\001\219\002\000\001\136\001\162\002\161\001\
\069\001\136\001\125\001\218\001\076\002\161\001\237\001\021\001\
\012\001\175\004\194\001\161\001\050\001\141\002\227\003\028\001\
\125\001\241\002\231\003\041\001\027\004\028\004\029\004\030\004\
\031\004\021\001\040\001\229\001\105\001\014\001\034\001\025\001\
\089\000\090\000\038\001\034\002\097\001\101\001\023\001\043\001\
\105\001\054\001\021\001\107\001\050\001\067\001\084\001\154\001\
\052\001\053\001\054\001\055\001\090\001\125\001\174\001\029\001\
\125\001\061\001\120\001\121\001\180\001\181\001\001\001\050\001\
\136\001\125\001\070\001\136\001\129\001\008\001\080\001\075\001\
\076\001\077\001\078\001\049\001\050\001\034\004\084\001\052\001\
\070\001\038\004\142\001\022\001\069\001\069\001\120\001\083\001\
\001\001\123\001\069\001\050\001\149\000\014\001\156\001\008\001\
\070\001\116\001\001\001\115\002\069\001\054\001\076\001\090\001\
\190\003\008\001\090\001\167\001\001\001\113\001\084\001\125\001\
\116\001\098\001\178\001\008\001\095\001\076\001\101\001\022\001\
\001\001\090\001\099\003\113\001\000\000\082\004\116\001\008\001\
\171\002\186\000\085\001\086\001\087\001\088\001\089\001\090\001\
\091\001\092\001\093\001\094\001\125\001\022\001\030\001\030\003\
\214\002\073\001\074\001\029\001\069\001\126\001\172\001\136\001\
\050\001\054\004\021\001\083\001\160\001\161\001\162\001\163\001\
\137\001\171\001\119\004\167\001\221\000\104\001\105\001\049\001\
\050\001\248\001\002\001\175\001\176\001\177\001\244\002\009\003\
\012\003\167\001\182\001\153\001\247\001\024\002\165\001\232\001\
\085\004\040\001\084\001\161\001\252\001\001\001\254\001\104\001\
\105\001\050\001\076\001\178\001\008\001\008\002\009\002\144\002\
\145\002\104\001\105\001\060\001\125\001\157\001\025\001\134\001\
\135\001\028\001\022\001\104\001\105\001\235\001\069\001\136\001\
\127\001\221\001\217\002\164\002\126\001\148\001\244\001\104\001\
\105\001\184\002\050\001\084\001\003\001\157\001\175\002\137\001\
\236\001\243\001\226\002\054\001\228\002\038\001\167\003\231\002\
\185\002\170\003\074\001\075\001\248\001\249\001\103\001\023\001\
\014\001\253\001\029\001\052\001\076\001\137\001\055\001\126\001\
\022\002\003\001\137\001\138\001\084\001\003\000\028\001\253\001\
\040\001\040\001\137\001\154\003\076\002\028\001\049\001\050\001\
\222\002\033\003\034\003\035\003\036\003\037\003\253\002\136\001\
\055\001\007\002\146\002\147\002\148\002\149\002\150\002\151\002\
\152\002\153\002\154\002\155\002\104\001\105\001\040\001\054\001\
\030\001\076\001\134\001\135\001\070\001\043\000\028\001\021\001\
\161\001\084\001\040\001\050\001\050\000\055\001\052\000\021\001\
\088\001\127\003\084\001\093\002\150\003\153\003\127\001\003\001\
\098\002\129\001\098\001\099\001\100\001\101\001\102\001\103\001\
\054\001\013\001\040\001\073\000\168\001\076\001\110\001\028\001\
\069\001\021\001\022\001\022\001\024\001\024\001\134\001\135\001\
\031\001\032\001\033\001\034\001\020\002\076\002\092\000\212\000\
\213\000\088\001\040\001\084\001\040\001\208\002\088\001\170\002\
\022\001\054\001\024\001\106\003\073\001\173\002\177\002\102\001\
\103\001\088\001\221\002\055\001\102\001\103\001\175\001\176\001\
\177\001\119\000\126\002\127\002\067\001\182\001\118\003\102\001\
\103\001\058\004\088\001\123\003\143\001\017\001\126\001\110\001\
\126\002\127\002\024\001\079\001\085\003\076\001\038\001\095\002\
\102\001\103\001\088\001\069\001\146\000\030\001\021\001\022\001\
\110\001\024\001\221\002\090\001\098\001\099\001\100\001\101\001\
\102\001\103\001\232\003\167\002\030\001\169\002\025\001\095\002\
\110\001\040\001\162\003\125\001\030\001\222\002\102\002\088\001\
\050\001\167\002\013\001\169\002\013\001\021\001\022\001\166\002\
\024\001\125\001\126\001\127\001\025\001\102\001\103\001\168\001\
\249\001\044\003\198\002\199\002\136\001\137\001\014\001\126\001\
\040\001\040\001\142\001\088\001\202\000\203\000\137\001\023\001\
\198\002\199\002\208\000\209\000\216\002\127\001\212\000\213\000\
\101\001\102\001\103\001\217\000\218\000\225\002\151\004\227\002\
\040\001\110\001\224\000\167\001\232\002\127\001\021\001\022\001\
\224\003\024\001\015\001\239\002\024\001\219\002\031\001\032\001\
\033\001\034\001\246\002\142\001\003\001\249\002\021\001\136\001\
\136\001\040\001\254\002\144\001\127\001\069\001\252\000\253\000\
\246\002\186\004\000\001\241\002\168\001\236\003\014\001\004\001\
\254\002\215\003\216\003\217\003\218\003\219\003\012\001\023\001\
\095\001\040\001\067\001\145\001\021\001\022\001\207\004\024\001\
\142\001\029\003\098\001\099\001\100\001\101\001\002\001\014\001\
\040\001\021\001\022\001\099\003\024\001\030\001\185\003\040\001\
\044\003\126\001\021\001\047\003\048\003\049\003\030\001\255\002\
\125\001\001\003\021\001\071\001\040\001\125\001\201\003\053\001\
\054\001\047\003\048\003\049\003\040\001\069\001\030\001\061\001\
\136\001\126\001\137\001\251\002\252\002\021\003\014\001\255\002\
\021\001\001\003\181\003\127\001\136\001\075\001\076\001\077\001\
\078\001\137\001\021\001\022\001\088\003\024\001\040\001\039\003\
\136\001\093\003\098\001\099\001\100\001\021\003\136\001\136\001\
\118\003\136\001\127\001\030\001\104\003\040\001\030\001\093\003\
\014\001\001\000\002\000\003\000\004\000\113\003\136\001\039\003\
\022\001\023\001\178\003\136\001\120\003\125\001\031\001\032\001\
\033\001\034\001\120\001\125\001\040\001\123\001\055\001\136\001\
\136\001\055\001\040\001\195\003\024\001\155\001\088\001\069\001\
\136\001\141\003\134\001\143\003\168\001\145\003\137\001\147\003\
\098\001\099\001\100\001\101\001\102\001\103\001\135\001\141\003\
\017\001\143\003\067\001\145\003\110\001\147\003\148\001\069\001\
\000\000\017\001\160\001\161\001\162\001\163\001\215\003\216\003\
\217\003\218\003\219\003\031\001\032\001\033\001\034\001\127\001\
\015\001\181\003\088\001\079\001\184\003\185\003\239\002\247\003\
\031\001\032\001\033\001\034\001\098\001\099\001\100\001\101\001\
\102\001\103\001\184\003\046\004\194\001\201\003\202\003\203\003\
\110\001\199\001\224\003\136\001\105\001\040\001\126\001\067\001\
\160\003\192\003\193\003\153\001\202\003\203\003\021\001\125\001\
\040\001\002\001\222\003\031\001\032\001\033\001\034\001\221\001\
\024\001\040\001\136\001\136\001\029\003\251\003\234\003\127\001\
\160\003\237\003\137\001\015\001\014\004\023\001\236\001\167\003\
\021\001\136\001\170\003\044\003\127\001\096\004\054\004\237\003\
\136\001\100\004\136\001\136\001\079\001\088\001\040\001\136\001\
\134\001\050\000\137\001\052\000\053\000\054\000\010\004\014\001\
\247\003\100\001\101\001\102\001\103\001\092\004\014\001\122\004\
\023\001\088\001\126\001\110\001\021\001\025\004\026\004\072\000\
\091\004\022\001\137\001\079\001\032\004\100\001\101\001\102\001\
\103\001\040\001\000\000\025\004\026\004\168\001\145\004\110\001\
\055\001\017\001\046\004\002\001\125\001\155\001\088\001\125\001\
\040\001\126\001\002\004\003\004\004\004\005\004\006\004\024\001\
\098\001\099\001\100\001\101\001\102\001\103\001\069\001\120\003\
\168\001\021\001\115\000\071\004\110\001\040\001\104\004\054\004\
\134\001\014\001\002\004\003\004\004\004\005\004\006\004\040\001\
\168\001\071\004\105\001\137\001\088\001\028\001\126\001\137\001\
\092\004\093\004\014\001\098\001\099\001\100\001\101\001\093\002\
\079\001\101\001\102\001\103\001\098\002\168\001\168\001\110\001\
\090\001\109\004\110\001\111\004\112\004\136\001\114\004\115\004\
\116\004\117\004\118\004\136\001\136\001\136\001\125\001\109\004\
\136\001\111\004\112\004\136\001\181\003\136\001\040\001\088\001\
\185\003\136\001\058\004\136\001\084\004\136\001\086\004\040\001\
\014\001\136\001\099\001\100\001\101\001\102\001\103\001\125\001\
\201\003\125\001\125\001\099\004\055\001\110\001\013\001\155\004\
\126\001\202\000\203\000\037\001\084\004\050\001\086\004\208\000\
\209\000\136\001\021\001\212\000\213\000\155\004\021\001\021\001\
\021\001\218\000\021\001\099\004\126\001\168\001\088\001\224\000\
\225\000\226\000\227\000\069\001\020\001\168\001\030\001\136\001\
\233\000\099\001\100\001\101\001\102\001\103\001\239\000\240\000\
\241\000\002\001\136\001\035\001\110\001\040\001\247\000\014\001\
\125\001\136\001\042\001\252\000\253\000\125\001\046\001\211\004\
\023\001\013\001\126\001\168\001\110\001\040\001\008\001\136\001\
\056\001\168\001\216\002\059\001\002\001\211\004\125\001\151\004\
\136\001\040\001\126\001\225\002\068\001\227\002\021\001\137\001\
\025\001\030\001\232\002\126\001\125\001\040\001\030\001\040\001\
\080\001\040\001\040\001\137\001\040\001\126\001\198\004\126\001\
\041\001\089\001\136\001\249\002\092\001\136\001\069\001\040\001\
\030\001\097\001\186\004\126\001\003\003\004\003\005\003\006\003\
\136\001\136\001\136\001\136\001\136\001\040\001\198\004\111\001\
\112\001\113\001\114\001\115\001\116\001\117\001\118\001\207\004\
\137\001\050\001\002\001\098\001\099\001\100\001\101\001\136\001\
\021\001\136\001\136\001\136\001\093\004\137\001\136\001\110\001\
\006\001\040\003\041\003\042\003\043\003\141\001\136\001\136\001\
\251\000\136\001\146\001\136\001\136\001\040\001\125\001\136\001\
\136\001\136\001\154\001\136\001\136\001\027\001\030\001\126\001\
\126\001\136\001\127\001\030\001\136\001\127\001\166\001\120\001\
\015\001\055\001\123\001\126\001\000\000\021\001\040\001\050\000\
\126\001\052\000\053\000\054\000\127\001\127\001\003\001\021\001\
\127\001\126\001\088\003\057\001\058\001\126\001\092\003\055\001\
\013\001\021\001\168\001\155\001\066\001\072\000\073\000\127\001\
\021\001\022\001\104\003\024\001\074\001\075\001\126\001\021\001\
\126\001\126\001\136\001\113\003\082\001\014\001\126\001\155\001\
\126\001\040\001\002\001\040\001\003\001\091\001\023\001\093\001\
\094\001\126\001\096\001\126\001\126\001\126\001\013\001\184\001\
\126\001\001\003\055\001\223\002\072\001\089\004\021\001\040\001\
\172\000\194\001\165\001\174\000\095\001\096\001\097\001\098\001\
\099\001\119\001\030\003\177\002\085\002\009\002\250\003\224\002\
\162\002\040\001\079\001\181\003\130\001\131\001\027\000\002\000\
\194\001\218\001\029\000\186\000\069\001\024\003\184\002\153\003\
\055\001\109\002\156\000\228\001\229\001\230\001\239\002\232\001\
\150\001\070\000\193\003\232\001\237\001\192\003\157\003\088\001\
\123\001\228\001\150\004\165\000\162\001\163\001\247\001\165\001\
\079\001\098\001\099\001\100\001\101\001\102\001\103\001\223\000\
\125\001\126\001\127\001\084\001\069\001\110\001\154\003\008\002\
\009\002\201\001\011\002\136\001\137\001\194\002\112\001\252\001\
\222\003\142\001\195\003\099\003\125\001\175\004\255\255\202\000\
\203\000\026\002\255\255\255\255\234\003\208\000\209\000\136\001\
\255\255\212\000\213\000\255\255\255\255\255\255\125\001\126\001\
\127\001\255\255\167\001\026\002\255\255\224\000\225\000\226\000\
\227\000\136\001\137\001\255\255\255\255\255\255\233\000\142\001\
\255\255\255\255\255\255\255\255\010\004\240\000\241\000\013\004\
\014\004\255\255\255\255\014\001\247\000\255\255\255\255\255\255\
\255\255\252\000\253\000\255\255\023\001\255\255\255\255\255\255\
\167\001\255\255\032\004\255\255\255\255\255\255\255\255\255\255\
\255\255\255\255\255\255\255\255\093\002\040\001\255\255\003\001\
\046\004\098\002\255\255\255\255\255\255\255\255\025\001\255\255\
\255\255\013\001\255\255\255\255\255\255\255\255\255\255\255\255\
\011\002\021\001\255\255\255\255\024\001\067\004\255\255\255\255\
\255\255\255\255\069\001\255\255\255\255\255\255\255\255\255\255\
\255\255\255\255\255\255\255\255\040\001\255\255\255\255\255\255\
\255\255\255\255\255\255\255\255\255\255\255\255\255\255\144\002\
\145\002\255\255\255\255\055\001\255\255\255\255\255\255\098\001\
\099\001\100\001\104\004\255\255\255\255\255\255\255\255\255\255\
\255\255\255\255\255\255\164\002\114\004\115\004\116\004\117\004\
\118\004\170\002\255\255\079\001\255\255\255\255\175\002\255\255\
\177\002\178\002\125\001\255\255\255\255\255\255\255\255\184\002\
\185\002\255\255\255\255\255\255\255\255\136\001\255\255\255\255\
\255\255\255\255\255\255\255\255\255\255\120\001\255\255\255\255\
\123\001\001\001\255\255\255\255\255\255\050\000\255\255\052\000\
\053\000\054\000\255\255\255\255\255\255\019\001\014\001\255\255\
\255\255\125\001\126\001\127\001\221\002\222\002\022\001\023\001\
\255\255\255\255\255\255\072\000\136\001\137\001\231\002\255\255\
\255\255\255\255\142\001\255\255\255\255\255\255\044\001\255\255\
\040\001\255\255\192\004\255\255\050\001\255\255\255\255\255\255\
\255\255\255\255\255\255\255\255\253\002\255\255\060\001\061\001\
\062\001\063\001\064\001\167\001\255\255\255\255\255\255\255\255\
\255\255\255\255\255\255\255\255\255\255\069\001\255\255\194\001\
\017\003\255\255\255\255\255\255\255\255\255\255\084\001\085\001\
\255\255\255\255\255\255\255\255\255\255\255\255\255\255\255\255\
\088\001\255\255\255\255\255\255\255\255\255\255\255\255\218\001\
\255\255\255\255\098\001\099\001\100\001\101\001\102\001\103\001\
\104\001\228\001\229\001\230\001\255\255\232\001\110\001\255\255\
\255\255\255\255\237\001\255\255\255\255\255\255\124\001\255\255\
\255\255\255\255\128\001\255\255\247\001\125\001\132\001\226\002\
\255\255\228\002\255\255\255\255\231\002\139\001\140\001\255\255\
\136\001\143\001\255\255\255\255\085\003\008\002\009\002\255\255\
\011\002\151\001\255\255\092\003\255\255\255\255\255\255\157\001\
\158\001\159\001\160\001\255\255\255\255\202\000\203\000\026\002\
\255\255\255\255\255\255\255\255\007\003\255\255\255\255\212\000\
\213\000\255\255\006\001\255\255\255\255\118\003\017\003\255\255\
\255\255\122\003\255\255\255\255\225\000\226\000\227\000\255\255\
\255\255\255\255\255\255\255\255\233\000\255\255\255\255\027\001\
\255\255\255\255\255\255\240\000\241\000\255\255\255\255\255\255\
\255\255\255\255\247\000\255\255\255\255\255\255\255\255\252\000\
\253\000\255\255\255\255\255\255\255\255\255\255\255\255\160\003\
\255\255\255\255\255\255\164\003\255\255\057\001\058\001\255\255\
\255\255\255\255\093\002\255\255\255\255\255\255\066\001\098\002\
\255\255\255\255\255\255\255\255\025\001\255\255\074\001\075\001\
\255\255\255\255\255\255\255\255\255\255\255\255\082\001\255\255\
\255\255\255\255\255\255\255\255\255\255\255\255\255\255\091\001\
\255\255\093\001\094\001\255\255\096\001\255\255\255\255\255\255\
\255\255\255\255\255\255\255\255\255\255\255\255\215\003\216\003\
\217\003\218\003\219\003\118\003\255\255\144\002\145\002\122\003\
\123\003\255\255\227\003\119\001\255\255\255\255\231\003\255\255\
\255\255\255\255\255\255\236\003\255\255\255\255\130\001\131\001\
\255\255\164\002\255\255\255\255\255\255\246\003\247\003\170\002\
\255\255\255\255\255\255\255\255\175\002\255\255\177\002\178\002\
\255\255\255\255\150\001\255\255\255\255\184\002\185\002\162\003\
\255\255\164\003\255\255\255\255\013\004\014\004\162\001\163\001\
\255\255\165\001\255\255\120\001\255\255\255\255\123\001\255\255\
\255\255\255\255\255\255\255\255\255\255\255\255\255\255\255\255\
\255\255\034\004\255\255\003\000\255\255\038\004\255\255\255\255\
\255\255\255\255\221\002\222\002\255\255\255\255\255\255\255\255\
\227\002\255\255\255\255\052\004\255\255\054\004\255\255\255\255\
\014\001\255\255\255\255\255\255\255\255\255\255\255\255\255\255\
\255\255\023\001\067\004\255\255\255\255\224\003\255\255\255\255\
\227\003\255\255\253\002\043\000\231\003\255\255\255\255\255\255\
\255\255\082\004\040\001\255\255\085\004\255\255\053\000\054\000\
\255\255\255\255\091\004\255\255\255\255\255\255\017\003\255\255\
\255\255\098\004\255\255\255\255\255\255\255\255\255\255\104\004\
\255\255\072\000\255\255\255\255\255\255\255\255\255\255\069\001\
\255\255\255\255\255\255\255\255\255\255\255\255\119\004\255\255\
\255\255\255\255\255\255\255\255\092\000\255\255\255\255\228\001\
\229\001\230\001\088\001\232\001\255\255\255\255\255\255\034\004\
\237\001\255\255\255\255\038\004\098\001\099\001\100\001\101\001\
\102\001\103\001\247\001\255\255\023\001\024\001\025\001\119\000\
\110\001\028\001\255\255\255\255\255\255\255\255\255\255\255\255\
\255\255\131\000\085\003\008\002\009\002\040\001\255\255\125\001\
\255\255\092\003\255\255\255\255\255\255\255\255\175\004\255\255\
\255\255\255\255\136\001\255\255\255\255\026\002\255\255\082\004\
\255\255\255\255\255\255\255\255\255\255\156\000\255\255\192\004\
\255\255\255\255\255\255\255\255\255\255\255\255\255\255\122\003\
\255\255\255\255\255\255\255\255\255\255\255\255\255\255\255\255\
\255\255\255\255\177\000\178\000\255\255\088\001\255\255\255\255\
\255\255\255\255\255\255\255\255\119\004\255\255\255\255\098\001\
\099\001\100\001\101\001\102\001\103\001\255\255\255\255\255\255\
\255\255\255\255\255\255\110\001\255\255\255\255\208\000\255\255\
\255\255\164\003\255\255\255\255\089\002\255\255\255\255\255\255\
\093\002\255\255\125\001\255\255\255\255\098\002\224\000\255\255\
\255\255\255\255\225\000\226\000\227\000\136\001\255\255\255\255\
\255\255\255\255\233\000\255\255\255\255\006\001\255\255\238\000\
\239\000\240\000\241\000\255\255\255\255\255\255\255\255\255\255\
\247\000\255\255\255\255\255\255\255\255\255\255\255\255\255\255\
\255\255\255\255\027\001\255\255\215\003\216\003\217\003\218\003\
\219\003\255\255\255\255\255\255\012\001\255\255\255\255\255\255\
\227\003\255\255\255\255\255\255\231\003\255\255\255\255\255\255\
\255\255\236\003\025\001\255\255\255\255\255\255\255\255\255\255\
\057\001\058\001\255\255\255\255\247\003\170\002\255\255\255\255\
\255\255\066\001\255\255\255\255\177\002\178\002\255\255\255\255\
\255\255\074\001\075\001\255\255\255\255\053\001\054\001\255\255\
\255\255\082\001\013\004\014\004\255\255\061\001\255\255\255\255\
\063\001\255\255\091\001\069\001\093\001\094\001\255\255\096\001\
\255\255\255\255\255\255\075\001\076\001\077\001\078\001\034\004\
\255\255\255\255\255\255\038\004\255\255\255\255\255\255\255\255\
\221\002\222\002\255\255\255\255\255\255\255\255\119\001\255\255\
\003\001\255\255\100\001\054\004\255\255\255\255\104\001\102\001\
\255\255\130\001\131\001\255\255\255\255\255\255\255\255\255\255\
\067\004\255\255\255\255\022\001\023\001\255\255\025\001\255\255\
\253\002\028\001\255\255\255\255\255\255\150\001\255\255\082\004\
\255\255\255\255\085\004\255\255\255\255\040\001\255\255\255\255\
\091\004\162\001\163\001\255\255\165\001\255\255\255\255\098\004\
\255\255\052\001\255\255\054\001\255\255\104\004\149\001\255\255\
\255\255\255\255\255\255\255\255\255\255\255\255\255\255\255\255\
\160\001\161\001\162\001\163\001\119\004\255\255\255\255\255\255\
\255\255\255\255\255\255\255\255\174\001\255\255\081\001\255\255\
\255\255\179\001\180\001\181\001\255\255\183\001\184\001\255\255\
\255\255\255\255\255\255\255\255\255\255\255\255\192\001\098\001\
\099\001\100\001\101\001\102\001\103\001\104\001\105\001\106\001\
\107\001\108\001\255\255\110\001\255\255\255\255\255\255\255\255\
\255\255\255\255\255\255\255\255\255\255\255\255\255\255\092\003\
\255\255\255\255\255\255\218\001\175\004\221\001\255\255\255\255\
\255\255\255\255\255\255\226\001\255\255\228\001\229\001\230\001\
\255\255\232\001\020\001\255\255\236\001\192\004\237\001\255\255\
\255\255\255\255\149\001\255\255\255\255\255\255\255\255\255\255\
\247\001\035\001\255\255\255\255\255\255\255\255\255\255\255\255\
\042\001\255\255\255\255\255\255\046\001\255\255\255\255\255\255\
\255\255\008\002\009\002\255\255\255\255\255\255\056\001\255\255\
\255\255\059\001\255\255\255\255\255\255\255\255\255\255\255\255\
\157\003\255\255\068\001\026\002\255\255\255\255\255\255\255\255\
\255\255\255\255\255\255\255\255\255\255\255\255\080\001\255\255\
\255\255\255\255\255\255\255\255\255\255\044\002\255\255\089\001\
\255\255\255\255\092\001\255\255\255\255\255\255\255\255\097\001\
\255\255\255\255\255\255\006\000\255\255\255\255\255\255\255\255\
\255\255\255\255\255\255\255\255\255\255\111\001\112\001\113\001\
\114\001\115\001\116\001\117\001\118\001\255\255\255\255\255\255\
\255\255\080\002\215\003\216\003\217\003\218\003\219\003\255\255\
\255\255\255\255\255\255\255\255\255\255\255\255\255\255\255\255\
\255\255\255\255\255\255\141\001\255\255\255\255\255\255\236\003\
\146\001\255\255\053\000\054\000\255\255\255\255\109\002\255\255\
\154\001\255\255\255\255\255\255\255\255\255\255\255\255\255\255\
\255\255\255\255\255\255\255\255\166\001\072\000\255\255\255\255\
\255\255\255\255\255\255\255\255\255\255\255\255\255\255\255\255\
\013\004\014\004\255\255\141\002\255\255\255\255\144\002\145\002\
\146\002\147\002\148\002\149\002\150\002\151\002\152\002\153\002\
\154\002\155\002\156\002\157\002\158\002\159\002\160\002\161\002\
\162\002\255\255\164\002\255\255\255\255\255\255\255\255\255\255\
\255\255\255\255\255\255\170\002\255\255\175\002\053\000\054\000\
\255\255\255\255\177\002\178\002\255\255\255\255\255\255\185\002\
\255\255\184\002\255\255\255\255\255\255\255\255\067\004\255\255\
\255\255\072\000\255\255\255\255\255\255\255\255\255\255\255\255\
\255\255\255\255\255\255\255\255\255\255\255\255\255\255\255\255\
\255\255\156\000\157\000\158\000\159\000\160\000\091\004\255\255\
\216\002\164\000\165\000\166\000\255\255\168\000\221\002\222\002\
\255\255\225\002\255\255\104\004\255\255\176\000\177\000\255\255\
\232\002\255\255\255\255\255\255\255\255\255\255\255\255\255\255\
\255\255\255\255\255\255\255\255\255\255\255\255\255\255\255\255\
\255\255\249\002\255\255\255\255\053\000\054\000\253\002\255\255\
\255\255\255\255\255\255\011\001\255\255\255\255\255\255\009\003\
\255\255\255\255\255\255\019\001\255\255\012\003\255\255\072\000\
\255\255\255\255\255\255\255\255\223\000\029\001\225\000\226\000\
\227\000\255\255\255\255\255\255\255\255\255\255\233\000\030\003\
\255\255\255\255\255\255\255\255\044\001\240\000\241\000\255\255\
\255\255\049\001\050\001\178\000\247\000\255\255\255\255\255\255\
\255\255\255\255\255\255\255\255\060\001\061\001\062\001\063\001\
\064\001\255\255\255\255\192\004\255\255\069\001\070\001\255\255\
\255\255\073\001\255\255\255\255\076\001\255\255\078\001\255\255\
\255\255\255\255\255\255\255\255\084\001\085\001\025\001\255\255\
\255\255\255\255\090\001\085\003\255\255\255\255\255\255\255\255\
\088\003\255\255\225\000\226\000\227\000\255\255\255\255\255\255\
\255\255\255\255\233\000\255\255\255\255\255\255\159\000\160\000\
\104\003\240\000\241\000\164\000\255\255\166\000\255\255\168\000\
\247\000\113\003\255\255\255\255\124\001\255\255\255\255\176\000\
\128\001\255\255\255\255\255\255\132\001\255\255\255\255\255\255\
\255\255\255\255\255\255\139\001\140\001\255\255\255\255\143\001\
\255\255\255\255\255\255\255\255\255\255\255\255\255\255\151\001\
\255\255\153\001\025\001\000\000\150\003\157\001\158\001\159\001\
\160\001\161\001\153\003\154\003\255\255\255\255\255\255\255\255\
\107\001\255\255\255\255\255\255\255\255\255\255\255\255\255\255\
\225\000\226\000\227\000\255\255\255\255\255\255\255\255\122\001\
\233\000\255\255\255\255\255\255\255\255\255\255\129\001\240\000\
\241\000\255\255\255\255\255\255\190\003\255\255\247\000\255\255\
\255\255\140\001\255\255\255\255\255\255\255\255\255\255\255\255\
\255\255\255\255\149\001\255\255\255\255\255\255\255\255\255\255\
\255\255\255\255\255\255\255\255\255\255\255\255\255\255\255\255\
\215\003\216\003\217\003\218\003\219\003\255\255\222\003\255\255\
\025\001\255\255\255\255\255\255\001\001\255\255\255\255\255\255\
\255\255\255\255\234\003\008\001\255\255\236\003\255\255\255\255\
\255\255\014\001\043\001\255\255\255\255\255\255\255\255\255\255\
\247\003\022\001\023\001\255\255\255\255\255\255\255\255\255\255\
\255\255\255\255\255\255\255\255\255\255\255\255\255\255\255\255\
\255\255\255\255\010\004\040\001\255\255\255\255\255\255\255\255\
\255\255\255\255\255\255\255\255\223\001\255\255\255\255\255\255\
\255\255\228\001\229\001\230\001\255\255\232\001\255\255\255\255\
\032\004\255\255\237\001\255\255\255\255\255\255\255\255\255\255\
\069\001\255\255\255\255\255\255\247\001\255\255\255\255\255\255\
\255\255\252\001\049\004\254\001\255\255\052\004\255\255\054\004\
\255\255\056\004\255\255\088\001\255\255\008\002\009\002\255\255\
\255\255\255\255\255\255\255\255\255\255\098\001\099\001\100\001\
\101\001\102\001\103\001\104\001\255\255\255\255\255\255\026\002\
\255\255\110\001\255\255\140\001\255\255\255\255\085\004\255\255\
\255\255\255\255\255\255\255\255\091\004\228\001\229\001\230\001\
\125\001\232\001\255\255\098\004\255\255\255\255\237\001\102\004\
\255\255\255\255\255\255\136\001\255\255\255\255\255\255\255\255\
\247\001\255\255\114\004\115\004\116\004\117\004\118\004\255\255\
\255\255\255\255\255\255\255\255\255\255\255\255\255\255\255\255\
\255\255\008\002\009\002\255\255\255\255\080\002\255\255\255\255\
\255\255\255\255\255\255\255\255\255\255\255\255\255\255\255\255\
\255\255\255\255\255\255\026\002\255\255\002\001\003\001\150\004\
\255\255\006\001\101\002\255\255\255\255\104\002\255\255\255\255\
\255\255\108\002\109\002\255\255\017\001\255\255\255\255\020\001\
\255\255\255\255\255\255\228\001\229\001\230\001\027\001\232\001\
\175\004\255\255\255\255\255\255\237\001\255\255\035\001\255\255\
\255\255\255\255\255\255\040\001\255\255\042\001\247\001\255\255\
\255\255\046\001\255\255\255\255\255\255\255\255\255\255\255\255\
\255\255\255\255\055\001\056\001\057\001\058\001\059\001\008\002\
\009\002\255\255\255\255\255\255\255\255\066\001\255\255\068\001\
\255\255\255\255\255\255\255\255\255\255\074\001\075\001\170\002\
\171\002\026\002\173\002\080\001\255\255\082\001\177\002\178\002\
\255\255\255\255\255\255\255\255\089\001\255\255\091\001\092\001\
\093\001\094\001\255\255\096\001\097\001\255\255\255\255\255\255\
\255\255\255\255\255\255\255\255\255\255\255\255\255\255\255\255\
\255\255\255\255\111\001\112\001\113\001\114\001\115\001\116\001\
\117\001\118\001\119\001\255\255\255\255\255\255\255\255\255\255\
\255\255\255\255\221\002\222\002\255\255\130\001\131\001\255\255\
\255\255\255\255\255\255\001\001\255\255\255\255\255\255\255\255\
\141\001\255\255\008\001\170\002\255\255\146\001\255\255\255\255\
\014\001\150\001\177\002\178\002\101\002\154\001\255\255\104\002\
\022\001\023\001\253\002\108\002\255\255\162\001\163\001\255\255\
\165\001\166\001\255\255\168\001\255\255\255\255\255\255\255\255\
\255\255\255\255\040\001\255\255\255\255\255\255\255\255\255\255\
\255\255\255\255\255\255\255\255\255\255\255\255\255\255\255\255\
\027\003\255\255\255\255\030\003\255\255\255\255\221\002\222\002\
\255\255\255\255\255\255\255\255\255\255\255\255\255\255\069\001\
\255\255\255\255\255\255\255\255\255\255\255\255\255\255\255\255\
\255\255\255\255\255\255\255\255\255\255\255\255\255\255\255\255\
\255\255\170\002\088\001\255\255\255\255\255\255\253\002\255\255\
\177\002\178\002\255\255\255\255\098\001\099\001\100\001\101\001\
\102\001\103\001\104\001\255\255\255\255\255\255\255\255\255\255\
\110\001\255\255\255\255\255\255\255\255\255\255\255\255\255\255\
\255\255\255\255\255\255\255\255\255\255\255\255\255\255\125\001\
\099\003\006\001\255\255\255\255\255\255\255\255\255\255\255\255\
\255\255\255\255\136\001\255\255\221\002\222\002\255\255\020\001\
\255\255\000\000\255\255\255\255\255\255\255\255\027\001\255\255\
\255\255\255\255\255\255\255\255\255\255\255\255\035\001\255\255\
\255\255\255\255\255\255\255\255\255\255\042\001\255\255\255\255\
\255\255\046\001\255\255\255\255\253\002\255\255\255\255\255\255\
\255\255\255\255\255\255\056\001\057\001\058\001\059\001\154\003\
\255\255\255\255\255\255\255\255\255\255\066\001\255\255\068\001\
\255\255\255\255\255\255\255\255\255\255\074\001\075\001\255\255\
\255\255\255\255\027\003\080\001\255\255\082\001\255\255\178\003\
\255\255\255\255\255\255\255\255\089\001\255\255\091\001\092\001\
\093\001\094\001\255\255\096\001\097\001\255\255\255\255\255\255\
\195\003\255\255\255\255\255\255\255\255\255\255\255\255\255\255\
\255\255\255\255\111\001\112\001\113\001\114\001\115\001\116\001\
\117\001\118\001\119\001\255\255\215\003\216\003\217\003\218\003\
\219\003\255\255\255\255\255\255\255\255\130\001\131\001\255\255\
\255\255\255\255\255\255\255\255\255\255\255\255\255\255\255\255\
\141\001\236\003\255\255\255\255\255\255\146\001\255\255\255\255\
\255\255\150\001\255\255\246\003\255\255\154\001\255\255\255\255\
\255\255\255\255\255\255\255\255\255\255\162\001\163\001\255\255\
\165\001\166\001\255\255\255\255\255\255\255\255\255\255\255\255\
\255\255\255\255\255\255\255\255\255\255\255\255\255\255\255\255\
\255\255\255\255\255\255\255\255\255\255\255\255\255\255\255\255\
\215\003\216\003\217\003\218\003\219\003\000\000\255\255\255\255\
\255\255\255\255\255\255\255\255\255\255\255\255\255\255\255\255\
\255\255\255\255\045\004\255\255\255\255\236\003\255\255\255\255\
\255\255\255\255\255\255\255\255\255\255\255\255\057\004\255\255\
\255\255\255\255\255\255\255\255\255\255\255\255\255\255\255\255\
\255\255\255\255\255\255\255\255\255\255\255\255\255\255\255\255\
\255\255\255\255\255\255\255\255\255\255\255\255\255\255\255\255\
\255\255\255\255\255\255\255\255\255\255\255\255\255\255\255\255\
\091\004\255\255\255\255\255\255\255\255\255\255\255\255\255\255\
\255\255\255\255\255\255\255\255\255\255\255\255\215\003\216\003\
\217\003\218\003\219\003\255\255\255\255\255\255\255\255\255\255\
\255\255\255\255\001\001\002\001\003\001\004\001\255\255\006\001\
\255\255\008\001\255\255\236\003\011\001\012\001\013\001\014\001\
\015\001\016\001\017\001\255\255\019\001\020\001\021\001\022\001\
\023\001\024\001\025\001\255\255\027\001\028\001\029\001\030\001\
\031\001\032\001\033\001\034\001\035\001\255\255\037\001\038\001\
\255\255\040\001\041\001\042\001\091\004\044\001\255\255\046\001\
\255\255\255\255\049\001\050\001\255\255\052\001\255\255\054\001\
\055\001\056\001\057\001\058\001\059\001\060\001\061\001\062\001\
\063\001\064\001\255\255\066\001\255\255\068\001\069\001\070\001\
\255\255\255\255\073\001\074\001\075\001\076\001\077\001\078\001\
\079\001\080\001\255\255\082\001\255\255\084\001\085\001\000\000\
\057\004\088\001\089\001\090\001\091\001\092\001\093\001\094\001\
\255\255\096\001\097\001\098\001\099\001\100\001\101\001\102\001\
\103\001\104\001\105\001\106\001\255\255\255\255\109\001\110\001\
\111\001\112\001\113\001\114\001\115\001\116\001\117\001\118\001\
\119\001\255\255\091\004\255\255\255\255\124\001\125\001\126\001\
\127\001\128\001\255\255\130\001\131\001\132\001\255\255\134\001\
\135\001\136\001\137\001\138\001\139\001\140\001\141\001\142\001\
\143\001\144\001\145\001\146\001\255\255\148\001\255\255\150\001\
\151\001\255\255\153\001\154\001\155\001\255\255\157\001\158\001\
\159\001\160\001\161\001\162\001\163\001\164\001\165\001\166\001\
\167\001\168\001\255\255\255\255\255\255\255\255\001\001\002\001\
\003\001\004\001\255\255\006\001\255\255\008\001\255\255\255\255\
\011\001\012\001\013\001\014\001\015\001\016\001\017\001\255\255\
\019\001\020\001\021\001\022\001\023\001\024\001\025\001\255\255\
\027\001\028\001\029\001\255\255\255\255\255\255\255\255\034\001\
\035\001\255\255\037\001\038\001\255\255\040\001\041\001\042\001\
\255\255\044\001\255\255\046\001\255\255\255\255\049\001\050\001\
\255\255\052\001\255\255\054\001\055\001\056\001\057\001\058\001\
\059\001\060\001\061\001\062\001\063\001\064\001\255\255\066\001\
\255\255\068\001\069\001\070\001\255\255\255\255\073\001\074\001\
\075\001\076\001\077\001\078\001\079\001\080\001\255\255\082\001\
\000\000\084\001\085\001\255\255\255\255\088\001\089\001\090\001\
\091\001\092\001\093\001\094\001\255\255\096\001\097\001\098\001\
\099\001\100\001\101\001\102\001\103\001\104\001\105\001\106\001\
\255\255\255\255\109\001\110\001\111\001\112\001\113\001\114\001\
\115\001\116\001\117\001\118\001\119\001\120\001\121\001\255\255\
\255\255\124\001\125\001\126\001\127\001\128\001\255\255\130\001\
\131\001\132\001\255\255\134\001\135\001\136\001\137\001\138\001\
\139\001\140\001\141\001\142\001\143\001\144\001\145\001\146\001\
\255\255\148\001\255\255\150\001\151\001\255\255\153\001\154\001\
\155\001\156\001\157\001\158\001\159\001\160\001\161\001\162\001\
\163\001\164\001\165\001\166\001\167\001\168\001\255\255\255\255\
\001\001\002\001\003\001\004\001\255\255\006\001\255\255\008\001\
\255\255\255\255\011\001\012\001\013\001\014\001\015\001\016\001\
\017\001\255\255\019\001\020\001\021\001\022\001\023\001\024\001\
\025\001\255\255\027\001\028\001\029\001\255\255\031\001\032\001\
\033\001\034\001\035\001\255\255\037\001\038\001\255\255\040\001\
\041\001\042\001\255\255\044\001\255\255\046\001\255\255\255\255\
\049\001\050\001\255\255\052\001\255\255\054\001\055\001\056\001\
\057\001\058\001\059\001\060\001\061\001\062\001\063\001\064\001\
\255\255\066\001\255\255\068\001\069\001\070\001\255\255\255\255\
\073\001\074\001\075\001\076\001\077\001\078\001\079\001\080\001\
\000\000\082\001\255\255\084\001\085\001\255\255\255\255\088\001\
\089\001\090\001\091\001\092\001\093\001\094\001\255\255\096\001\
\097\001\098\001\099\001\100\001\101\001\102\001\103\001\104\001\
\105\001\106\001\255\255\255\255\109\001\110\001\111\001\112\001\
\113\001\114\001\115\001\116\001\117\001\118\001\119\001\255\255\
\255\255\255\255\255\255\124\001\125\001\126\001\127\001\128\001\
\255\255\130\001\131\001\132\001\255\255\134\001\135\001\136\001\
\137\001\138\001\139\001\140\001\141\001\142\001\143\001\144\001\
\145\001\146\001\255\255\148\001\255\255\150\001\151\001\255\255\
\153\001\154\001\155\001\255\255\157\001\158\001\159\001\160\001\
\161\001\162\001\163\001\164\001\165\001\166\001\167\001\168\001\
\255\255\001\001\002\001\003\001\004\001\255\255\006\001\255\255\
\008\001\255\255\255\255\255\255\012\001\013\001\014\001\015\001\
\016\001\017\001\255\255\019\001\020\001\021\001\022\001\023\001\
\024\001\025\001\255\255\027\001\028\001\255\255\030\001\031\001\
\032\001\033\001\034\001\035\001\255\255\037\001\038\001\255\255\
\040\001\041\001\042\001\255\255\044\001\255\255\046\001\255\255\
\255\255\049\001\050\001\255\255\052\001\255\255\054\001\055\001\
\056\001\057\001\058\001\059\001\060\001\061\001\062\001\063\001\
\064\001\255\255\066\001\255\255\068\001\069\001\255\255\255\255\
\255\255\073\001\074\001\075\001\255\255\077\001\078\001\079\001\
\080\001\000\000\082\001\083\001\084\001\085\001\255\255\255\255\
\088\001\089\001\090\001\091\001\092\001\093\001\094\001\255\255\
\096\001\097\001\098\001\099\001\100\001\101\001\102\001\103\001\
\104\001\105\001\106\001\255\255\255\255\109\001\110\001\111\001\
\112\001\113\001\114\001\115\001\116\001\117\001\118\001\119\001\
\255\255\255\255\255\255\255\255\124\001\125\001\126\001\127\001\
\128\001\255\255\130\001\131\001\132\001\255\255\134\001\135\001\
\136\001\137\001\138\001\139\001\140\001\141\001\142\001\143\001\
\144\001\145\001\146\001\255\255\148\001\255\255\150\001\151\001\
\255\255\153\001\154\001\255\255\156\001\157\001\158\001\159\001\
\160\001\161\001\162\001\163\001\164\001\165\001\166\001\255\255\
\168\001\001\001\002\001\003\001\004\001\255\255\006\001\255\255\
\008\001\255\255\255\255\011\001\012\001\013\001\014\001\015\001\
\016\001\017\001\255\255\019\001\020\001\021\001\022\001\023\001\
\024\001\025\001\255\255\027\001\028\001\029\001\255\255\255\255\
\255\255\255\255\255\255\035\001\255\255\037\001\038\001\255\255\
\040\001\041\001\042\001\255\255\044\001\255\255\046\001\255\255\
\255\255\049\001\050\001\255\255\052\001\255\255\054\001\055\001\
\056\001\057\001\058\001\059\001\060\001\061\001\062\001\063\001\
\064\001\255\255\066\001\255\255\068\001\069\001\070\001\255\255\
\255\255\073\001\074\001\075\001\076\001\077\001\078\001\079\001\
\080\001\000\000\082\001\255\255\084\001\085\001\255\255\255\255\
\088\001\089\001\090\001\091\001\092\001\093\001\094\001\255\255\
\096\001\097\001\098\001\099\001\100\001\101\001\102\001\103\001\
\104\001\105\001\106\001\255\255\255\255\109\001\110\001\111\001\
\112\001\113\001\114\001\115\001\116\001\117\001\118\001\119\001\
\255\255\255\255\255\255\255\255\124\001\125\001\126\001\127\001\
\128\001\255\255\130\001\131\001\132\001\255\255\134\001\135\001\
\136\001\137\001\138\001\139\001\140\001\141\001\142\001\143\001\
\144\001\145\001\146\001\255\255\148\001\255\255\150\001\151\001\
\255\255\153\001\154\001\155\001\255\255\157\001\158\001\159\001\
\160\001\161\001\162\001\163\001\164\001\165\001\166\001\167\001\
\168\001\255\255\001\001\002\001\003\001\004\001\255\255\006\001\
\255\255\008\001\255\255\255\255\255\255\012\001\013\001\014\001\
\015\001\016\001\017\001\255\255\019\001\020\001\021\001\022\001\
\023\001\024\001\025\001\255\255\027\001\028\001\255\255\255\255\
\031\001\032\001\033\001\034\001\035\001\255\255\037\001\038\001\
\255\255\040\001\041\001\042\001\255\255\044\001\255\255\046\001\
\255\255\255\255\049\001\050\001\255\255\052\001\255\255\054\001\
\055\001\056\001\057\001\058\001\059\001\060\001\061\001\062\001\
\063\001\064\001\255\255\066\001\255\255\068\001\069\001\255\255\
\255\255\255\255\073\001\074\001\075\001\255\255\077\001\078\001\
\079\001\080\001\000\000\082\001\255\255\084\001\085\001\255\255\
\255\255\088\001\089\001\090\001\091\001\092\001\093\001\094\001\
\255\255\096\001\097\001\098\001\099\001\100\001\101\001\102\001\
\103\001\104\001\105\001\106\001\255\255\255\255\109\001\110\001\
\111\001\112\001\113\001\114\001\115\001\116\001\117\001\118\001\
\119\001\255\255\255\255\255\255\255\255\124\001\125\001\126\001\
\127\001\128\001\255\255\130\001\131\001\132\001\255\255\134\001\
\135\001\136\001\137\001\138\001\139\001\140\001\141\001\142\001\
\143\001\144\001\145\001\146\001\255\255\148\001\255\255\150\001\
\151\001\255\255\153\001\154\001\255\255\255\255\157\001\158\001\
\159\001\160\001\161\001\162\001\163\001\164\001\165\001\166\001\
\255\255\168\001\001\001\002\001\003\001\004\001\255\255\006\001\
\255\255\008\001\255\255\255\255\255\255\012\001\013\001\255\255\
\015\001\016\001\255\255\255\255\019\001\020\001\021\001\022\001\
\023\001\024\001\025\001\255\255\027\001\028\001\255\255\255\255\
\031\001\032\001\033\001\034\001\035\001\255\255\037\001\038\001\
\255\255\040\001\041\001\042\001\255\255\044\001\255\255\046\001\
\255\255\255\255\049\001\050\001\255\255\052\001\255\255\054\001\
\055\001\056\001\057\001\058\001\059\001\060\001\061\001\062\001\
\063\001\064\001\255\255\066\001\255\255\068\001\069\001\255\255\
\255\255\255\255\073\001\074\001\075\001\255\255\077\001\078\001\
\079\001\080\001\000\000\082\001\083\001\084\001\085\001\255\255\
\255\255\088\001\089\001\090\001\091\001\092\001\093\001\094\001\
\255\255\096\001\097\001\098\001\099\001\100\001\101\001\102\001\
\103\001\104\001\105\001\106\001\255\255\255\255\109\001\110\001\
\111\001\112\001\113\001\114\001\115\001\116\001\117\001\118\001\
\119\001\255\255\255\255\255\255\255\255\124\001\125\001\126\001\
\127\001\128\001\255\255\130\001\131\001\132\001\255\255\134\001\
\135\001\136\001\137\001\138\001\139\001\140\001\141\001\255\255\
\143\001\144\001\145\001\146\001\255\255\148\001\255\255\150\001\
\151\001\255\255\153\001\154\001\255\255\255\255\157\001\158\001\
\159\001\160\001\161\001\162\001\163\001\164\001\165\001\166\001\
\255\255\168\001\255\255\001\001\002\001\003\001\004\001\255\255\
\006\001\255\255\008\001\255\255\255\255\255\255\012\001\013\001\
\014\001\015\001\016\001\017\001\255\255\019\001\020\001\255\255\
\022\001\023\001\024\001\025\001\255\255\027\001\028\001\255\255\
\255\255\255\255\255\255\255\255\255\255\035\001\255\255\037\001\
\038\001\255\255\040\001\041\001\042\001\255\255\044\001\255\255\
\046\001\255\255\255\255\049\001\050\001\255\255\052\001\255\255\
\054\001\055\001\056\001\057\001\058\001\059\001\060\001\061\001\
\062\001\063\001\064\001\255\255\066\001\255\255\068\001\069\001\
\255\255\255\255\255\255\073\001\074\001\075\001\255\255\077\001\
\078\001\079\001\080\001\000\000\082\001\255\255\084\001\085\001\
\255\255\255\255\088\001\089\001\090\001\091\001\092\001\093\001\
\094\001\255\255\096\001\097\001\098\001\099\001\100\001\101\001\
\102\001\103\001\104\001\105\001\106\001\255\255\255\255\109\001\
\110\001\111\001\112\001\113\001\114\001\115\001\116\001\117\001\
\118\001\119\001\255\255\255\255\255\255\255\255\124\001\125\001\
\126\001\127\001\128\001\255\255\130\001\131\001\132\001\255\255\
\134\001\135\001\136\001\137\001\138\001\139\001\140\001\141\001\
\142\001\143\001\144\001\145\001\146\001\255\255\148\001\255\255\
\150\001\151\001\255\255\153\001\154\001\255\255\255\255\157\001\
\158\001\159\001\160\001\161\001\162\001\163\001\164\001\165\001\
\166\001\255\255\168\001\001\001\002\001\003\001\004\001\255\255\
\006\001\255\255\008\001\255\255\255\255\255\255\012\001\013\001\
\014\001\015\001\016\001\017\001\255\255\019\001\020\001\255\255\
\022\001\023\001\024\001\025\001\255\255\027\001\028\001\255\255\
\255\255\255\255\255\255\255\255\255\255\035\001\255\255\037\001\
\038\001\255\255\040\001\041\001\042\001\255\255\044\001\255\255\
\046\001\255\255\255\255\049\001\050\001\255\255\052\001\255\255\
\054\001\055\001\056\001\057\001\058\001\059\001\060\001\061\001\
\062\001\063\001\064\001\255\255\066\001\255\255\068\001\069\001\
\255\255\255\255\255\255\073\001\074\001\075\001\255\255\077\001\
\078\001\079\001\080\001\000\000\082\001\255\255\084\001\085\001\
\255\255\255\255\088\001\089\001\090\001\091\001\092\001\093\001\
\094\001\255\255\096\001\097\001\098\001\099\001\100\001\101\001\
\102\001\103\001\104\001\105\001\106\001\255\255\255\255\109\001\
\110\001\111\001\112\001\113\001\114\001\115\001\116\001\117\001\
\118\001\119\001\255\255\255\255\255\255\255\255\124\001\125\001\
\126\001\127\001\128\001\255\255\130\001\131\001\132\001\255\255\
\134\001\135\001\136\001\137\001\138\001\139\001\140\001\141\001\
\142\001\143\001\144\001\145\001\146\001\255\255\148\001\255\255\
\150\001\151\001\255\255\153\001\154\001\255\255\255\255\157\001\
\158\001\159\001\160\001\161\001\162\001\163\001\164\001\165\001\
\166\001\255\255\168\001\255\255\001\001\002\001\003\001\004\001\
\255\255\006\001\255\255\008\001\255\255\255\255\255\255\012\001\
\013\001\014\001\015\001\016\001\017\001\255\255\019\001\020\001\
\255\255\022\001\023\001\024\001\025\001\255\255\027\001\028\001\
\255\255\255\255\255\255\255\255\255\255\255\255\035\001\255\255\
\037\001\038\001\255\255\040\001\041\001\042\001\255\255\044\001\
\255\255\046\001\255\255\255\255\049\001\050\001\255\255\052\001\
\255\255\054\001\055\001\056\001\057\001\058\001\059\001\060\001\
\061\001\062\001\063\001\064\001\255\255\066\001\255\255\068\001\
\069\001\255\255\255\255\255\255\073\001\074\001\075\001\255\255\
\077\001\078\001\079\001\080\001\000\000\082\001\255\255\084\001\
\085\001\255\255\255\255\088\001\089\001\090\001\091\001\092\001\
\093\001\094\001\255\255\096\001\097\001\098\001\099\001\100\001\
\101\001\102\001\103\001\104\001\105\001\106\001\255\255\255\255\
\109\001\110\001\111\001\112\001\113\001\114\001\115\001\116\001\
\117\001\118\001\119\001\255\255\255\255\255\255\255\255\124\001\
\125\001\126\001\127\001\128\001\255\255\130\001\131\001\132\001\
\255\255\134\001\135\001\136\001\137\001\138\001\139\001\140\001\
\141\001\142\001\143\001\144\001\145\001\146\001\255\255\148\001\
\255\255\150\001\151\001\255\255\153\001\154\001\255\255\255\255\
\157\001\158\001\159\001\160\001\161\001\162\001\163\001\164\001\
\165\001\166\001\255\255\168\001\001\001\002\001\003\001\004\001\
\255\255\006\001\255\255\008\001\255\255\255\255\255\255\012\001\
\013\001\014\001\015\001\016\001\017\001\255\255\019\001\020\001\
\255\255\022\001\023\001\024\001\025\001\255\255\027\001\028\001\
\255\255\255\255\255\255\255\255\255\255\255\255\035\001\255\255\
\037\001\038\001\255\255\040\001\041\001\042\001\255\255\044\001\
\255\255\046\001\255\255\255\255\049\001\050\001\255\255\052\001\
\255\255\054\001\055\001\056\001\057\001\058\001\059\001\060\001\
\061\001\062\001\063\001\064\001\255\255\066\001\255\255\068\001\
\069\001\255\255\255\255\255\255\073\001\074\001\075\001\255\255\
\077\001\078\001\079\001\080\001\000\000\082\001\255\255\084\001\
\085\001\255\255\255\255\088\001\089\001\090\001\091\001\092\001\
\093\001\094\001\255\255\096\001\097\001\098\001\099\001\100\001\
\101\001\102\001\103\001\104\001\105\001\106\001\255\255\255\255\
\109\001\110\001\111\001\112\001\113\001\114\001\115\001\116\001\
\117\001\118\001\119\001\255\255\255\255\255\255\255\255\124\001\
\125\001\126\001\127\001\128\001\255\255\130\001\131\001\132\001\
\255\255\134\001\135\001\136\001\137\001\138\001\139\001\140\001\
\141\001\142\001\143\001\144\001\145\001\146\001\255\255\148\001\
\255\255\150\001\151\001\255\255\153\001\154\001\255\255\255\255\
\157\001\158\001\159\001\160\001\161\001\162\001\163\001\164\001\
\165\001\166\001\255\255\168\001\255\255\001\001\002\001\003\001\
\004\001\255\255\006\001\255\255\008\001\255\255\255\255\255\255\
\012\001\013\001\014\001\015\001\016\001\017\001\255\255\019\001\
\020\001\255\255\022\001\023\001\024\001\025\001\255\255\027\001\
\028\001\255\255\255\255\255\255\255\255\255\255\255\255\035\001\
\255\255\037\001\038\001\255\255\040\001\041\001\042\001\255\255\
\044\001\255\255\046\001\255\255\255\255\049\001\050\001\255\255\
\052\001\255\255\054\001\055\001\056\001\057\001\058\001\059\001\
\060\001\061\001\062\001\063\001\064\001\255\255\066\001\255\255\
\068\001\069\001\255\255\255\255\255\255\073\001\074\001\075\001\
\255\255\077\001\078\001\079\001\080\001\000\000\082\001\255\255\
\084\001\085\001\255\255\255\255\088\001\089\001\090\001\091\001\
\092\001\093\001\094\001\255\255\096\001\097\001\098\001\099\001\
\100\001\101\001\102\001\103\001\104\001\105\001\106\001\255\255\
\255\255\109\001\110\001\111\001\112\001\113\001\114\001\115\001\
\116\001\117\001\118\001\119\001\255\255\255\255\255\255\255\255\
\124\001\125\001\126\001\127\001\128\001\255\255\130\001\131\001\
\132\001\255\255\134\001\135\001\136\001\137\001\138\001\139\001\
\140\001\141\001\142\001\143\001\144\001\145\001\146\001\255\255\
\148\001\255\255\150\001\151\001\255\255\153\001\154\001\255\255\
\255\255\157\001\158\001\159\001\160\001\161\001\162\001\163\001\
\164\001\165\001\166\001\255\255\168\001\001\001\002\001\003\001\
\004\001\255\255\006\001\255\255\008\001\255\255\255\255\255\255\
\012\001\013\001\255\255\015\001\016\001\255\255\255\255\019\001\
\020\001\255\255\022\001\023\001\024\001\025\001\255\255\027\001\
\028\001\255\255\255\255\255\255\255\255\255\255\255\255\035\001\
\255\255\037\001\038\001\255\255\040\001\041\001\042\001\255\255\
\044\001\255\255\046\001\255\255\255\255\049\001\050\001\255\255\
\052\001\255\255\054\001\055\001\056\001\057\001\058\001\059\001\
\060\001\061\001\062\001\063\001\064\001\255\255\066\001\255\255\
\068\001\069\001\255\255\255\255\255\255\073\001\074\001\075\001\
\255\255\077\001\078\001\079\001\080\001\000\000\082\001\255\255\
\084\001\085\001\255\255\255\255\088\001\089\001\090\001\091\001\
\092\001\093\001\094\001\255\255\096\001\097\001\098\001\099\001\
\100\001\101\001\102\001\103\001\104\001\105\001\106\001\255\255\
\255\255\109\001\110\001\111\001\112\001\113\001\114\001\115\001\
\116\001\117\001\118\001\119\001\255\255\255\255\255\255\255\255\
\124\001\125\001\126\001\127\001\128\001\255\255\130\001\131\001\
\132\001\255\255\134\001\135\001\136\001\137\001\138\001\139\001\
\140\001\141\001\255\255\143\001\144\001\145\001\146\001\255\255\
\148\001\255\255\150\001\151\001\255\255\153\001\154\001\255\255\
\255\255\157\001\158\001\159\001\160\001\161\001\162\001\163\001\
\164\001\165\001\166\001\255\255\168\001\255\255\001\001\002\001\
\003\001\004\001\255\255\006\001\255\255\008\001\255\255\255\255\
\255\255\012\001\013\001\255\255\015\001\016\001\255\255\255\255\
\019\001\020\001\255\255\022\001\023\001\024\001\025\001\255\255\
\027\001\028\001\255\255\255\255\255\255\255\255\255\255\255\255\
\035\001\255\255\037\001\038\001\255\255\040\001\041\001\042\001\
\255\255\044\001\255\255\046\001\255\255\255\255\049\001\050\001\
\255\255\052\001\255\255\054\001\055\001\056\001\057\001\058\001\
\059\001\060\001\061\001\062\001\063\001\064\001\255\255\066\001\
\255\255\068\001\069\001\255\255\255\255\255\255\073\001\074\001\
\075\001\255\255\077\001\078\001\079\001\080\001\000\000\082\001\
\255\255\084\001\085\001\255\255\255\255\088\001\089\001\090\001\
\091\001\092\001\093\001\094\001\255\255\096\001\097\001\098\001\
\099\001\100\001\101\001\102\001\103\001\104\001\105\001\106\001\
\255\255\255\255\109\001\110\001\111\001\112\001\113\001\114\001\
\115\001\116\001\117\001\118\001\119\001\255\255\255\255\255\255\
\255\255\124\001\125\001\126\001\127\001\128\001\255\255\130\001\
\131\001\132\001\255\255\134\001\135\001\136\001\137\001\138\001\
\139\001\140\001\141\001\255\255\143\001\144\001\145\001\146\001\
\255\255\148\001\255\255\150\001\151\001\255\255\153\001\154\001\
\255\255\255\255\157\001\158\001\159\001\160\001\161\001\162\001\
\163\001\164\001\165\001\166\001\255\255\168\001\001\001\002\001\
\003\001\004\001\255\255\006\001\255\255\008\001\255\255\255\255\
\255\255\012\001\013\001\255\255\015\001\016\001\255\255\255\255\
\019\001\020\001\255\255\022\001\023\001\024\001\025\001\255\255\
\027\001\028\001\255\255\255\255\255\255\255\255\255\255\255\255\
\035\001\255\255\037\001\038\001\255\255\040\001\041\001\042\001\
\255\255\044\001\255\255\046\001\255\255\255\255\049\001\050\001\
\255\255\052\001\255\255\054\001\055\001\056\001\057\001\058\001\
\059\001\060\001\061\001\062\001\063\001\064\001\255\255\066\001\
\255\255\068\001\069\001\255\255\255\255\255\255\073\001\074\001\
\075\001\255\255\077\001\078\001\079\001\080\001\000\000\082\001\
\255\255\084\001\085\001\255\255\255\255\088\001\089\001\090\001\
\091\001\092\001\093\001\094\001\255\255\096\001\097\001\098\001\
\099\001\100\001\101\001\102\001\103\001\104\001\105\001\106\001\
\255\255\255\255\109\001\110\001\111\001\112\001\113\001\114\001\
\115\001\116\001\117\001\118\001\119\001\255\255\255\255\255\255\
\255\255\124\001\125\001\126\001\127\001\128\001\255\255\130\001\
\131\001\132\001\255\255\134\001\135\001\136\001\137\001\138\001\
\139\001\140\001\141\001\255\255\143\001\144\001\145\001\146\001\
\255\255\148\001\255\255\150\001\151\001\255\255\153\001\154\001\
\255\255\255\255\157\001\158\001\159\001\160\001\161\001\162\001\
\163\001\164\001\165\001\166\001\255\255\168\001\255\255\001\001\
\002\001\003\001\004\001\255\255\006\001\255\255\008\001\255\255\
\255\255\255\255\012\001\013\001\255\255\015\001\016\001\255\255\
\255\255\019\001\020\001\255\255\022\001\023\001\024\001\025\001\
\255\255\027\001\028\001\255\255\255\255\255\255\255\255\255\255\
\255\255\035\001\255\255\037\001\038\001\255\255\040\001\041\001\
\042\001\255\255\044\001\255\255\046\001\255\255\255\255\049\001\
\050\001\255\255\052\001\255\255\054\001\055\001\056\001\057\001\
\058\001\059\001\060\001\061\001\062\001\063\001\064\001\255\255\
\066\001\255\255\068\001\069\001\255\255\255\255\255\255\073\001\
\074\001\075\001\255\255\077\001\078\001\079\001\080\001\000\000\
\082\001\255\255\084\001\085\001\255\255\255\255\088\001\089\001\
\090\001\091\001\092\001\093\001\094\001\255\255\096\001\097\001\
\098\001\099\001\100\001\101\001\102\001\103\001\104\001\105\001\
\106\001\255\255\255\255\109\001\110\001\111\001\112\001\113\001\
\114\001\115\001\116\001\117\001\118\001\119\001\255\255\255\255\
\255\255\255\255\124\001\125\001\126\001\127\001\128\001\255\255\
\130\001\131\001\132\001\255\255\134\001\135\001\136\001\137\001\
\138\001\139\001\140\001\141\001\255\255\143\001\144\001\145\001\
\146\001\255\255\148\001\255\255\150\001\151\001\255\255\153\001\
\154\001\255\255\255\255\157\001\158\001\159\001\160\001\161\001\
\162\001\163\001\164\001\165\001\166\001\255\255\168\001\001\001\
\002\001\003\001\004\001\255\255\006\001\255\255\008\001\255\255\
\255\255\255\255\012\001\013\001\255\255\015\001\016\001\255\255\
\255\255\019\001\020\001\255\255\022\001\023\001\024\001\025\001\
\255\255\027\001\028\001\255\255\255\255\255\255\255\255\255\255\
\255\255\035\001\255\255\037\001\038\001\255\255\040\001\041\001\
\042\001\255\255\044\001\255\255\046\001\255\255\255\255\049\001\
\050\001\255\255\052\001\255\255\054\001\055\001\056\001\057\001\
\058\001\059\001\060\001\061\001\062\001\063\001\064\001\255\255\
\066\001\255\255\068\001\069\001\255\255\255\255\255\255\073\001\
\074\001\075\001\255\255\077\001\078\001\079\001\080\001\000\000\
\082\001\255\255\084\001\085\001\255\255\255\255\088\001\089\001\
\090\001\091\001\092\001\093\001\094\001\255\255\096\001\097\001\
\098\001\099\001\100\001\101\001\102\001\103\001\104\001\105\001\
\106\001\255\255\255\255\109\001\110\001\111\001\112\001\113\001\
\114\001\115\001\116\001\117\001\118\001\119\001\255\255\255\255\
\255\255\255\255\124\001\125\001\126\001\127\001\128\001\255\255\
\130\001\131\001\132\001\255\255\134\001\135\001\136\001\137\001\
\138\001\139\001\140\001\141\001\255\255\143\001\144\001\145\001\
\146\001\255\255\148\001\255\255\150\001\151\001\255\255\153\001\
\154\001\255\255\255\255\157\001\158\001\159\001\160\001\161\001\
\162\001\163\001\164\001\165\001\166\001\255\255\168\001\255\255\
\001\001\002\001\003\001\004\001\255\255\006\001\255\255\008\001\
\255\255\255\255\255\255\255\255\013\001\014\001\015\001\255\255\
\017\001\255\255\255\255\020\001\255\255\022\001\023\001\024\001\
\025\001\255\255\027\001\028\001\255\255\255\255\255\255\255\255\
\255\255\255\255\035\001\255\255\037\001\038\001\255\255\040\001\
\041\001\042\001\255\255\255\255\255\255\046\001\255\255\255\255\
\255\255\255\255\255\255\052\001\255\255\054\001\055\001\056\001\
\057\001\058\001\059\001\255\255\255\255\255\255\255\255\255\255\
\255\255\066\001\255\255\068\001\069\001\255\255\255\255\255\255\
\255\255\074\001\075\001\255\255\255\255\255\255\079\001\080\001\
\000\000\082\001\255\255\255\255\255\255\255\255\255\255\088\001\
\089\001\255\255\091\001\092\001\093\001\094\001\255\255\096\001\
\097\001\098\001\099\001\100\001\101\001\102\001\103\001\104\001\
\105\001\255\255\255\255\255\255\255\255\110\001\111\001\112\001\
\113\001\114\001\115\001\116\001\117\001\118\001\119\001\255\255\
\255\255\255\255\255\255\255\255\125\001\126\001\127\001\255\255\
\255\255\130\001\131\001\255\255\255\255\134\001\135\001\136\001\
\137\001\138\001\255\255\255\255\141\001\142\001\255\255\144\001\
\145\001\146\001\255\255\148\001\255\255\150\001\255\255\255\255\
\255\255\154\001\255\255\255\255\255\255\255\255\255\255\255\255\
\255\255\162\001\163\001\255\255\165\001\166\001\255\255\168\001\
\001\001\002\001\003\001\004\001\255\255\006\001\255\255\008\001\
\255\255\255\255\255\255\255\255\013\001\014\001\015\001\255\255\
\017\001\255\255\255\255\020\001\255\255\022\001\023\001\024\001\
\025\001\255\255\027\001\028\001\255\255\255\255\255\255\255\255\
\255\255\255\255\035\001\255\255\037\001\038\001\255\255\040\001\
\041\001\042\001\255\255\255\255\255\255\046\001\255\255\255\255\
\255\255\255\255\255\255\052\001\255\255\054\001\055\001\056\001\
\057\001\058\001\059\001\255\255\255\255\255\255\255\255\255\255\
\255\255\066\001\255\255\068\001\255\255\255\255\255\255\255\255\
\255\255\074\001\075\001\255\255\255\255\255\255\079\001\080\001\
\000\000\082\001\255\255\255\255\255\255\255\255\255\255\088\001\
\089\001\255\255\091\001\092\001\093\001\094\001\255\255\096\001\
\097\001\098\001\099\001\100\001\101\001\102\001\103\001\104\001\
\105\001\255\255\255\255\255\255\255\255\110\001\111\001\112\001\
\113\001\114\001\115\001\116\001\117\001\118\001\119\001\255\255\
\255\255\255\255\255\255\255\255\125\001\126\001\127\001\255\255\
\255\255\130\001\131\001\255\255\255\255\134\001\135\001\136\001\
\137\001\138\001\255\255\255\255\141\001\142\001\255\255\144\001\
\145\001\146\001\255\255\148\001\255\255\150\001\255\255\255\255\
\255\255\154\001\255\255\255\255\255\255\255\255\255\255\255\255\
\255\255\162\001\163\001\255\255\165\001\166\001\255\255\168\001\
\255\255\001\001\002\001\003\001\004\001\255\255\006\001\255\255\
\008\001\255\255\255\255\255\255\255\255\013\001\014\001\015\001\
\255\255\017\001\255\255\255\255\020\001\255\255\022\001\023\001\
\024\001\025\001\255\255\027\001\028\001\255\255\255\255\255\255\
\255\255\255\255\255\255\035\001\255\255\037\001\038\001\255\255\
\040\001\041\001\042\001\255\255\255\255\255\255\046\001\255\255\
\255\255\255\255\255\255\255\255\052\001\255\255\054\001\055\001\
\056\001\057\001\058\001\059\001\255\255\255\255\255\255\255\255\
\255\255\255\255\066\001\255\255\068\001\255\255\255\255\255\255\
\255\255\255\255\074\001\075\001\255\255\255\255\255\255\079\001\
\080\001\000\000\082\001\255\255\255\255\255\255\255\255\255\255\
\088\001\089\001\255\255\091\001\092\001\093\001\094\001\255\255\
\096\001\097\001\098\001\099\001\100\001\101\001\102\001\103\001\
\104\001\105\001\255\255\255\255\255\255\255\255\110\001\111\001\
\112\001\113\001\114\001\115\001\116\001\117\001\118\001\119\001\
\255\255\255\255\255\255\255\255\255\255\125\001\126\001\127\001\
\255\255\255\255\130\001\131\001\255\255\255\255\134\001\135\001\
\136\001\137\001\138\001\255\255\255\255\141\001\142\001\255\255\
\144\001\145\001\146\001\255\255\148\001\255\255\150\001\255\255\
\255\255\255\255\154\001\255\255\255\255\255\255\255\255\255\255\
\255\255\255\255\162\001\163\001\255\255\165\001\166\001\255\255\
\168\001\001\001\002\001\003\001\004\001\255\255\006\001\255\255\
\008\001\255\255\255\255\255\255\255\255\013\001\014\001\015\001\
\255\255\017\001\255\255\255\255\020\001\255\255\022\001\023\001\
\024\001\025\001\255\255\027\001\028\001\255\255\255\255\255\255\
\255\255\255\255\255\255\035\001\255\255\037\001\038\001\255\255\
\040\001\041\001\042\001\255\255\255\255\255\255\046\001\255\255\
\255\255\255\255\255\255\255\255\052\001\255\255\054\001\055\001\
\056\001\057\001\058\001\059\001\255\255\255\255\255\255\255\255\
\255\255\255\255\066\001\255\255\068\001\255\255\255\255\255\255\
\255\255\255\255\074\001\075\001\255\255\255\255\255\255\079\001\
\080\001\000\000\082\001\255\255\255\255\255\255\255\255\255\255\
\088\001\089\001\255\255\091\001\092\001\093\001\094\001\255\255\
\096\001\097\001\098\001\099\001\100\001\101\001\102\001\103\001\
\104\001\255\255\255\255\255\255\255\255\255\255\110\001\111\001\
\112\001\113\001\114\001\115\001\116\001\117\001\118\001\119\001\
\255\255\255\255\255\255\255\255\255\255\125\001\126\001\127\001\
\255\255\255\255\130\001\131\001\255\255\255\255\134\001\135\001\
\136\001\137\001\138\001\255\255\255\255\141\001\142\001\255\255\
\144\001\145\001\146\001\255\255\148\001\255\255\150\001\255\255\
\255\255\255\255\154\001\255\255\255\255\255\255\255\255\255\255\
\255\255\255\255\162\001\163\001\255\255\165\001\166\001\255\255\
\168\001\255\255\001\001\002\001\003\001\004\001\255\255\006\001\
\255\255\008\001\255\255\255\255\255\255\255\255\013\001\014\001\
\015\001\255\255\017\001\255\255\255\255\020\001\255\255\022\001\
\023\001\024\001\025\001\255\255\027\001\028\001\255\255\255\255\
\255\255\255\255\255\255\255\255\035\001\255\255\037\001\038\001\
\255\255\040\001\041\001\042\001\255\255\255\255\255\255\046\001\
\255\255\255\255\255\255\255\255\255\255\052\001\255\255\054\001\
\055\001\056\001\057\001\058\001\059\001\255\255\255\255\255\255\
\255\255\255\255\255\255\066\001\255\255\068\001\255\255\255\255\
\255\255\255\255\255\255\074\001\075\001\255\255\255\255\255\255\
\079\001\080\001\000\000\082\001\255\255\255\255\255\255\255\255\
\255\255\088\001\089\001\255\255\091\001\092\001\093\001\094\001\
\255\255\096\001\097\001\098\001\099\001\100\001\101\001\102\001\
\103\001\104\001\255\255\255\255\255\255\255\255\255\255\110\001\
\111\001\112\001\113\001\114\001\115\001\116\001\117\001\118\001\
\119\001\255\255\255\255\255\255\255\255\255\255\125\001\126\001\
\127\001\255\255\255\255\130\001\131\001\255\255\255\255\134\001\
\135\001\136\001\137\001\138\001\255\255\255\255\141\001\142\001\
\255\255\144\001\145\001\146\001\255\255\148\001\255\255\150\001\
\255\255\255\255\255\255\154\001\255\255\255\255\255\255\255\255\
\255\255\255\255\255\255\162\001\163\001\255\255\165\001\166\001\
\255\255\168\001\001\001\002\001\003\001\004\001\255\255\006\001\
\255\255\255\255\255\255\255\255\255\255\255\255\013\001\014\001\
\015\001\255\255\017\001\255\255\255\255\020\001\255\255\022\001\
\023\001\024\001\025\001\255\255\027\001\028\001\255\255\255\255\
\255\255\255\255\255\255\255\255\035\001\255\255\037\001\038\001\
\255\255\040\001\041\001\042\001\255\255\255\255\255\255\046\001\
\255\255\255\255\255\255\255\255\255\255\052\001\255\255\054\001\
\055\001\056\001\057\001\058\001\059\001\255\255\255\255\255\255\
\255\255\255\255\255\255\066\001\255\255\068\001\255\255\255\255\
\255\255\255\255\255\255\074\001\075\001\255\255\255\255\255\255\
\079\001\080\001\000\000\082\001\255\255\255\255\255\255\255\255\
\255\255\088\001\089\001\255\255\091\001\092\001\093\001\094\001\
\255\255\096\001\097\001\098\001\099\001\100\001\101\001\102\001\
\103\001\104\001\255\255\255\255\255\255\255\255\255\255\110\001\
\111\001\112\001\113\001\114\001\115\001\116\001\117\001\118\001\
\119\001\255\255\255\255\255\255\255\255\255\255\125\001\126\001\
\127\001\255\255\255\255\130\001\131\001\255\255\255\255\134\001\
\135\001\136\001\137\001\138\001\255\255\255\255\141\001\142\001\
\255\255\144\001\145\001\146\001\255\255\148\001\255\255\150\001\
\255\255\255\255\255\255\154\001\255\255\255\255\255\255\255\255\
\255\255\255\255\255\255\162\001\163\001\255\255\165\001\166\001\
\255\255\168\001\255\255\255\255\002\001\003\001\004\001\255\255\
\006\001\255\255\255\255\255\255\255\255\255\255\255\255\013\001\
\014\001\015\001\255\255\017\001\255\255\255\255\020\001\255\255\
\022\001\023\001\024\001\025\001\255\255\027\001\028\001\255\255\
\255\255\255\255\255\255\255\255\255\255\035\001\255\255\037\001\
\038\001\255\255\040\001\041\001\042\001\255\255\255\255\255\255\
\046\001\255\255\255\255\255\255\255\255\255\255\052\001\255\255\
\054\001\055\001\056\001\057\001\058\001\059\001\255\255\255\255\
\255\255\255\255\255\255\255\255\066\001\255\255\068\001\255\255\
\255\255\255\255\255\255\255\255\074\001\075\001\255\255\255\255\
\255\255\079\001\080\001\000\000\082\001\255\255\255\255\255\255\
\255\255\255\255\088\001\089\001\255\255\091\001\092\001\093\001\
\094\001\255\255\096\001\097\001\098\001\099\001\100\001\101\001\
\102\001\103\001\255\255\255\255\255\255\255\255\255\255\255\255\
\110\001\111\001\112\001\113\001\114\001\115\001\116\001\117\001\
\118\001\119\001\255\255\255\255\255\255\255\255\255\255\125\001\
\126\001\127\001\255\255\255\255\130\001\131\001\255\255\255\255\
\134\001\135\001\136\001\137\001\138\001\255\255\255\255\141\001\
\142\001\255\255\144\001\145\001\146\001\255\255\148\001\255\255\
\150\001\255\255\255\255\255\255\154\001\255\255\255\255\255\255\
\255\255\255\255\255\255\255\255\162\001\163\001\255\255\165\001\
\166\001\255\255\168\001\255\255\002\001\003\001\004\001\255\255\
\006\001\255\255\255\255\255\255\255\255\255\255\255\255\013\001\
\014\001\015\001\255\255\017\001\255\255\255\255\020\001\255\255\
\255\255\023\001\024\001\025\001\255\255\027\001\028\001\255\255\
\255\255\255\255\255\255\255\255\255\255\035\001\255\255\037\001\
\038\001\255\255\040\001\041\001\042\001\255\255\255\255\255\255\
\046\001\255\255\255\255\255\255\255\255\255\255\052\001\255\255\
\054\001\055\001\056\001\057\001\058\001\059\001\255\255\255\255\
\255\255\255\255\255\255\255\255\066\001\255\255\068\001\255\255\
\255\255\255\255\255\255\255\255\074\001\075\001\255\255\255\255\
\255\255\079\001\080\001\000\000\082\001\255\255\255\255\255\255\
\255\255\255\255\088\001\089\001\255\255\091\001\092\001\093\001\
\094\001\255\255\096\001\097\001\098\001\099\001\100\001\101\001\
\102\001\103\001\255\255\255\255\255\255\255\255\255\255\255\255\
\110\001\111\001\112\001\113\001\114\001\115\001\116\001\117\001\
\118\001\119\001\255\255\255\255\255\255\255\255\255\255\125\001\
\126\001\127\001\255\255\255\255\130\001\131\001\255\255\255\255\
\134\001\135\001\136\001\137\001\138\001\255\255\255\255\141\001\
\142\001\255\255\144\001\145\001\146\001\255\255\148\001\255\255\
\150\001\255\255\255\255\255\255\154\001\255\255\255\255\255\255\
\255\255\255\255\255\255\255\255\162\001\163\001\255\255\165\001\
\166\001\255\255\168\001\255\255\255\255\002\001\003\001\004\001\
\255\255\006\001\255\255\255\255\255\255\255\255\255\255\255\255\
\013\001\014\001\015\001\255\255\017\001\255\255\255\255\020\001\
\255\255\255\255\023\001\024\001\025\001\255\255\027\001\028\001\
\255\255\255\255\255\255\255\255\255\255\255\255\035\001\255\255\
\037\001\038\001\255\255\040\001\041\001\042\001\255\255\255\255\
\255\255\046\001\255\255\255\255\255\255\255\255\255\255\052\001\
\255\255\054\001\055\001\056\001\057\001\058\001\059\001\255\255\
\255\255\255\255\255\255\255\255\255\255\066\001\255\255\068\001\
\255\255\255\255\255\255\255\255\255\255\074\001\075\001\255\255\
\255\255\255\255\079\001\080\001\000\000\082\001\255\255\255\255\
\255\255\255\255\255\255\088\001\089\001\255\255\091\001\092\001\
\093\001\094\001\255\255\096\001\097\001\098\001\099\001\100\001\
\101\001\102\001\103\001\255\255\255\255\255\255\255\255\255\255\
\255\255\110\001\111\001\112\001\113\001\114\001\115\001\116\001\
\117\001\118\001\119\001\255\255\255\255\255\255\255\255\255\255\
\125\001\126\001\127\001\255\255\255\255\130\001\131\001\255\255\
\255\255\134\001\135\001\136\001\137\001\138\001\255\255\255\255\
\141\001\142\001\255\255\144\001\145\001\146\001\255\255\148\001\
\255\255\150\001\255\255\255\255\255\255\154\001\255\255\255\255\
\255\255\255\255\255\255\255\255\255\255\162\001\163\001\255\255\
\165\001\166\001\255\255\168\001\255\255\002\001\003\001\004\001\
\255\255\006\001\255\255\255\255\255\255\255\255\255\255\255\255\
\013\001\014\001\015\001\255\255\017\001\255\255\255\255\020\001\
\255\255\255\255\023\001\024\001\025\001\255\255\027\001\028\001\
\255\255\255\255\255\255\255\255\255\255\255\255\035\001\255\255\
\037\001\038\001\255\255\040\001\041\001\042\001\255\255\255\255\
\255\255\046\001\255\255\255\255\255\255\255\255\255\255\052\001\
\255\255\054\001\055\001\056\001\057\001\058\001\059\001\255\255\
\255\255\255\255\255\255\255\255\255\255\066\001\255\255\068\001\
\255\255\255\255\255\255\255\255\255\255\074\001\075\001\255\255\
\255\255\255\255\079\001\080\001\000\000\082\001\255\255\255\255\
\255\255\255\255\255\255\255\255\089\001\255\255\091\001\092\001\
\093\001\094\001\255\255\096\001\097\001\098\001\099\001\100\001\
\101\001\255\255\255\255\255\255\255\255\255\255\255\255\255\255\
\255\255\110\001\111\001\112\001\113\001\114\001\115\001\116\001\
\117\001\118\001\119\001\255\255\255\255\255\255\255\255\255\255\
\125\001\126\001\127\001\255\255\255\255\130\001\131\001\255\255\
\255\255\134\001\135\001\136\001\137\001\138\001\255\255\255\255\
\141\001\142\001\255\255\144\001\145\001\146\001\255\255\148\001\
\255\255\150\001\255\255\255\255\255\255\154\001\255\255\255\255\
\255\255\255\255\255\255\255\255\255\255\162\001\163\001\255\255\
\165\001\166\001\255\255\168\001\255\255\255\255\002\001\003\001\
\004\001\255\255\006\001\255\255\255\255\255\255\255\255\255\255\
\255\255\013\001\014\001\015\001\255\255\017\001\255\255\255\255\
\020\001\255\255\255\255\023\001\024\001\025\001\255\255\027\001\
\028\001\255\255\255\255\255\255\255\255\255\255\255\255\035\001\
\255\255\037\001\038\001\255\255\040\001\041\001\042\001\255\255\
\255\255\255\255\046\001\255\255\255\255\255\255\255\255\255\255\
\052\001\255\255\054\001\055\001\056\001\057\001\058\001\059\001\
\255\255\255\255\255\255\255\255\255\255\255\255\066\001\255\255\
\068\001\255\255\255\255\255\255\255\255\255\255\074\001\075\001\
\255\255\255\255\255\255\079\001\080\001\000\000\082\001\255\255\
\255\255\255\255\255\255\255\255\255\255\089\001\255\255\091\001\
\092\001\093\001\094\001\255\255\096\001\097\001\098\001\099\001\
\100\001\101\001\255\255\255\255\255\255\255\255\255\255\255\255\
\255\255\255\255\110\001\111\001\112\001\113\001\114\001\115\001\
\116\001\117\001\118\001\119\001\255\255\255\255\255\255\255\255\
\255\255\125\001\126\001\127\001\255\255\255\255\130\001\131\001\
\255\255\255\255\134\001\135\001\136\001\137\001\138\001\255\255\
\255\255\141\001\142\001\255\255\144\001\145\001\146\001\255\255\
\148\001\255\255\150\001\255\255\255\255\255\255\154\001\255\255\
\255\255\255\255\255\255\255\255\255\255\255\255\162\001\163\001\
\255\255\165\001\166\001\255\255\168\001\255\255\002\001\003\001\
\004\001\255\255\006\001\255\255\255\255\255\255\255\255\255\255\
\255\255\013\001\014\001\015\001\255\255\017\001\255\255\255\255\
\020\001\255\255\255\255\023\001\024\001\025\001\255\255\027\001\
\028\001\255\255\255\255\255\255\255\255\255\255\255\255\035\001\
\255\255\037\001\038\001\255\255\040\001\041\001\042\001\255\255\
\255\255\255\255\046\001\255\255\255\255\255\255\255\255\255\255\
\052\001\255\255\054\001\055\001\056\001\057\001\058\001\059\001\
\255\255\255\255\255\255\255\255\255\255\255\255\066\001\255\255\
\068\001\255\255\255\255\255\255\255\255\255\255\074\001\075\001\
\255\255\255\255\255\255\079\001\080\001\000\000\082\001\255\255\
\255\255\255\255\255\255\255\255\255\255\089\001\255\255\091\001\
\092\001\093\001\094\001\255\255\096\001\097\001\098\001\099\001\
\100\001\101\001\255\255\255\255\255\255\255\255\255\255\255\255\
\255\255\255\255\255\255\111\001\112\001\113\001\114\001\115\001\
\116\001\117\001\118\001\119\001\255\255\255\255\255\255\255\255\
\255\255\125\001\126\001\127\001\255\255\255\255\130\001\131\001\
\255\255\255\255\134\001\135\001\136\001\137\001\138\001\255\255\
\255\255\141\001\142\001\255\255\144\001\145\001\146\001\255\255\
\148\001\255\255\150\001\255\255\255\255\255\255\154\001\255\255\
\255\255\255\255\255\255\255\255\255\255\255\255\162\001\163\001\
\255\255\165\001\166\001\255\255\168\001\255\255\255\255\002\001\
\003\001\004\001\255\255\006\001\255\255\255\255\255\255\255\255\
\255\255\255\255\013\001\014\001\015\001\255\255\017\001\255\255\
\255\255\020\001\255\255\255\255\023\001\024\001\025\001\255\255\
\027\001\028\001\255\255\255\255\255\255\255\255\255\255\255\255\
\035\001\255\255\037\001\038\001\255\255\040\001\041\001\042\001\
\255\255\255\255\255\255\046\001\255\255\255\255\255\255\255\255\
\255\255\052\001\255\255\054\001\055\001\056\001\057\001\058\001\
\059\001\255\255\255\255\255\255\255\255\255\255\255\255\066\001\
\255\255\068\001\255\255\255\255\255\255\255\255\255\255\074\001\
\075\001\255\255\255\255\255\255\079\001\080\001\000\000\082\001\
\255\255\255\255\255\255\255\255\255\255\255\255\089\001\255\255\
\091\001\092\001\093\001\094\001\255\255\096\001\097\001\098\001\
\099\001\100\001\255\255\255\255\255\255\255\255\255\255\255\255\
\255\255\255\255\255\255\255\255\111\001\112\001\113\001\114\001\
\115\001\116\001\117\001\118\001\119\001\255\255\255\255\255\255\
\255\255\255\255\125\001\126\001\127\001\255\255\255\255\130\001\
\131\001\255\255\255\255\134\001\135\001\136\001\137\001\138\001\
\255\255\255\255\141\001\142\001\255\255\144\001\145\001\146\001\
\255\255\148\001\255\255\150\001\255\255\255\255\255\255\154\001\
\255\255\255\255\255\255\255\255\255\255\255\255\255\255\162\001\
\163\001\255\255\165\001\166\001\255\255\168\001\255\255\002\001\
\003\001\004\001\255\255\006\001\255\255\255\255\255\255\255\255\
\255\255\255\255\013\001\014\001\015\001\255\255\017\001\255\255\
\255\255\020\001\255\255\255\255\023\001\024\001\025\001\255\255\
\027\001\028\001\255\255\255\255\255\255\255\255\255\255\255\255\
\035\001\255\255\037\001\038\001\255\255\040\001\041\001\042\001\
\255\255\255\255\255\255\046\001\255\255\255\255\255\255\255\255\
\255\255\052\001\255\255\054\001\055\001\056\001\057\001\058\001\
\059\001\255\255\255\255\255\255\255\255\255\255\255\255\066\001\
\255\255\068\001\255\255\255\255\255\255\255\255\255\255\074\001\
\075\001\255\255\255\255\255\255\079\001\080\001\000\000\082\001\
\255\255\255\255\255\255\255\255\255\255\255\255\089\001\255\255\
\091\001\092\001\093\001\094\001\255\255\096\001\097\001\098\001\
\099\001\100\001\255\255\255\255\255\255\255\255\255\255\255\255\
\255\255\255\255\255\255\255\255\111\001\112\001\113\001\114\001\
\115\001\116\001\117\001\118\001\119\001\255\255\255\255\255\255\
\255\255\255\255\125\001\126\001\127\001\255\255\255\255\130\001\
\131\001\255\255\255\255\134\001\135\001\136\001\137\001\138\001\
\255\255\255\255\141\001\142\001\255\255\144\001\145\001\146\001\
\255\255\148\001\255\255\150\001\255\255\255\255\255\255\154\001\
\255\255\255\255\255\255\255\255\255\255\255\255\255\255\162\001\
\163\001\255\255\165\001\166\001\255\255\168\001\255\255\255\255\
\002\001\003\001\004\001\255\255\006\001\255\255\255\255\255\255\
\255\255\255\255\255\255\013\001\014\001\015\001\255\255\017\001\
\255\255\255\255\020\001\255\255\255\255\023\001\024\001\025\001\
\255\255\027\001\028\001\255\255\255\255\255\255\255\255\255\255\
\255\255\035\001\255\255\037\001\038\001\255\255\255\255\041\001\
\042\001\255\255\255\255\255\255\046\001\255\255\255\255\255\255\
\255\255\255\255\052\001\255\255\054\001\055\001\056\001\057\001\
\058\001\059\001\255\255\255\255\255\255\255\255\255\255\255\255\
\066\001\255\255\068\001\255\255\255\255\255\255\255\255\255\255\
\074\001\075\001\255\255\255\255\255\255\079\001\080\001\000\000\
\082\001\255\255\255\255\255\255\255\255\255\255\255\255\089\001\
\255\255\091\001\092\001\093\001\094\001\255\255\096\001\097\001\
\098\001\099\001\255\255\255\255\255\255\255\255\255\255\255\255\
\255\255\255\255\255\255\255\255\255\255\111\001\112\001\113\001\
\114\001\115\001\116\001\117\001\118\001\119\001\255\255\255\255\
\255\255\255\255\255\255\125\001\126\001\127\001\255\255\255\255\
\130\001\131\001\255\255\255\255\134\001\135\001\136\001\137\001\
\138\001\255\255\255\255\141\001\142\001\255\255\144\001\145\001\
\146\001\255\255\148\001\255\255\150\001\255\255\255\255\255\255\
\154\001\255\255\255\255\255\255\255\255\255\255\255\255\255\255\
\162\001\163\001\255\255\165\001\166\001\255\255\168\001\255\255\
\002\001\003\001\004\001\255\255\006\001\255\255\255\255\255\255\
\255\255\255\255\255\255\013\001\014\001\015\001\255\255\017\001\
\255\255\255\255\020\001\255\255\255\255\023\001\024\001\025\001\
\255\255\027\001\028\001\255\255\255\255\255\255\255\255\255\255\
\255\255\035\001\255\255\037\001\038\001\255\255\255\255\041\001\
\042\001\255\255\255\255\255\255\046\001\255\255\255\255\255\255\
\255\255\255\255\052\001\255\255\054\001\055\001\056\001\057\001\
\058\001\059\001\255\255\255\255\255\255\255\255\255\255\255\255\
\066\001\255\255\068\001\255\255\255\255\255\255\255\255\255\255\
\074\001\075\001\255\255\255\255\255\255\079\001\080\001\000\000\
\082\001\255\255\255\255\255\255\255\255\255\255\255\255\089\001\
\255\255\091\001\092\001\093\001\094\001\255\255\096\001\097\001\
\098\001\255\255\255\255\255\255\255\255\255\255\255\255\255\255\
\255\255\255\255\255\255\255\255\255\255\111\001\112\001\113\001\
\114\001\115\001\116\001\117\001\118\001\119\001\255\255\255\255\
\255\255\255\255\255\255\125\001\126\001\127\001\255\255\255\255\
\130\001\131\001\255\255\255\255\134\001\135\001\136\001\137\001\
\138\001\255\255\255\255\141\001\142\001\255\255\144\001\145\001\
\146\001\255\255\148\001\255\255\150\001\255\255\255\255\255\255\
\154\001\255\255\255\255\255\255\255\255\255\255\255\255\255\255\
\162\001\163\001\255\255\165\001\166\001\255\255\168\001\255\255\
\255\255\002\001\003\001\004\001\255\255\006\001\255\255\255\255\
\255\255\255\255\255\255\255\255\013\001\014\001\015\001\255\255\
\017\001\255\255\255\255\020\001\255\255\255\255\255\255\024\001\
\025\001\255\255\027\001\028\001\255\255\255\255\255\255\255\255\
\255\255\255\255\035\001\255\255\037\001\038\001\255\255\255\255\
\041\001\042\001\255\255\255\255\255\255\046\001\255\255\255\255\
\255\255\255\255\255\255\052\001\255\255\054\001\055\001\056\001\
\057\001\058\001\059\001\255\255\255\255\255\255\255\255\255\255\
\255\255\066\001\255\255\068\001\255\255\255\255\255\255\255\255\
\255\255\074\001\075\001\255\255\255\255\255\255\079\001\080\001\
\000\000\082\001\255\255\255\255\255\255\255\255\255\255\255\255\
\089\001\255\255\091\001\092\001\093\001\094\001\255\255\096\001\
\097\001\255\255\255\255\255\255\255\255\255\255\255\255\255\255\
\255\255\255\255\255\255\255\255\255\255\255\255\111\001\112\001\
\113\001\114\001\115\001\116\001\117\001\118\001\119\001\255\255\
\255\255\255\255\255\255\255\255\125\001\126\001\127\001\255\255\
\255\255\130\001\131\001\255\255\255\255\134\001\135\001\136\001\
\137\001\138\001\255\255\255\255\141\001\142\001\255\255\144\001\
\145\001\146\001\255\255\148\001\255\255\150\001\255\255\255\255\
\255\255\154\001\255\255\255\255\255\255\255\255\255\255\255\255\
\255\255\162\001\163\001\255\255\165\001\166\001\255\255\168\001\
\255\255\002\001\003\001\004\001\255\255\006\001\255\255\255\255\
\255\255\255\255\255\255\255\255\013\001\014\001\015\001\255\255\
\017\001\255\255\255\255\020\001\255\255\255\255\255\255\024\001\
\025\001\255\255\027\001\028\001\255\255\255\255\255\255\255\255\
\255\255\255\255\035\001\255\255\037\001\038\001\255\255\255\255\
\041\001\042\001\255\255\255\255\255\255\046\001\255\255\255\255\
\255\255\255\255\255\255\052\001\255\255\054\001\055\001\056\001\
\057\001\058\001\059\001\255\255\255\255\255\255\255\255\255\255\
\255\255\066\001\255\255\068\001\255\255\255\255\255\255\255\255\
\255\255\074\001\075\001\255\255\255\255\255\255\079\001\080\001\
\000\000\082\001\255\255\255\255\255\255\255\255\255\255\255\255\
\089\001\255\255\091\001\092\001\093\001\094\001\255\255\096\001\
\097\001\255\255\255\255\255\255\255\255\255\255\255\255\255\255\
\255\255\255\255\255\255\255\255\255\255\255\255\111\001\112\001\
\113\001\114\001\115\001\116\001\117\001\118\001\119\001\255\255\
\255\255\255\255\255\255\255\255\125\001\126\001\127\001\255\255\
\255\255\130\001\131\001\255\255\255\255\134\001\135\001\136\001\
\137\001\138\001\255\255\255\255\141\001\142\001\255\255\144\001\
\145\001\146\001\255\255\148\001\255\255\150\001\255\255\255\255\
\255\255\154\001\255\255\255\255\255\255\255\255\255\255\255\255\
\255\255\162\001\163\001\255\255\165\001\166\001\255\255\168\001\
\255\255\255\255\002\001\003\001\004\001\255\255\006\001\255\255\
\255\255\255\255\255\255\255\255\255\255\013\001\014\001\015\001\
\255\255\017\001\255\255\255\255\020\001\255\255\255\255\255\255\
\255\255\025\001\255\255\027\001\028\001\255\255\255\255\255\255\
\255\255\255\255\255\255\035\001\255\255\037\001\038\001\255\255\
\255\255\041\001\042\001\255\255\255\255\255\255\046\001\255\255\
\255\255\255\255\255\255\255\255\052\001\255\255\054\001\055\001\
\056\001\057\001\058\001\059\001\255\255\255\255\255\255\255\255\
\255\255\255\255\066\001\255\255\068\001\255\255\255\255\255\255\
\255\255\255\255\074\001\075\001\255\255\255\255\255\255\079\001\
\080\001\000\000\082\001\255\255\255\255\255\255\255\255\255\255\
\255\255\089\001\255\255\091\001\092\001\093\001\094\001\255\255\
\096\001\097\001\255\255\255\255\255\255\255\255\255\255\255\255\
\255\255\255\255\255\255\255\255\255\255\255\255\255\255\111\001\
\112\001\113\001\114\001\115\001\116\001\117\001\118\001\119\001\
\255\255\255\255\255\255\255\255\255\255\125\001\126\001\127\001\
\255\255\255\255\130\001\131\001\255\255\255\255\134\001\135\001\
\136\001\137\001\138\001\255\255\255\255\141\001\142\001\255\255\
\144\001\145\001\146\001\255\255\148\001\255\255\150\001\255\255\
\255\255\255\255\154\001\255\255\255\255\255\255\255\255\255\255\
\255\255\255\255\162\001\163\001\255\255\165\001\166\001\255\255\
\168\001\255\255\002\001\003\001\004\001\255\255\006\001\255\255\
\255\255\255\255\255\255\255\255\255\255\013\001\014\001\015\001\
\255\255\017\001\255\255\255\255\020\001\255\255\255\255\255\255\
\255\255\255\255\255\255\027\001\028\001\255\255\255\255\255\255\
\255\255\255\255\255\255\035\001\255\255\037\001\038\001\255\255\
\255\255\041\001\042\001\255\255\255\255\255\255\046\001\255\255\
\255\255\255\255\255\255\255\255\052\001\255\255\054\001\055\001\
\056\001\057\001\058\001\059\001\255\255\255\255\255\255\255\255\
\255\255\255\255\066\001\000\000\068\001\255\255\255\255\255\255\
\255\255\255\255\074\001\075\001\255\255\255\255\255\255\079\001\
\080\001\255\255\082\001\255\255\255\255\255\255\255\255\255\255\
\255\255\089\001\255\255\091\001\092\001\093\001\094\001\255\255\
\096\001\097\001\255\255\255\255\255\255\255\255\255\255\255\255\
\255\255\255\255\255\255\255\255\255\255\255\255\255\255\111\001\
\112\001\113\001\114\001\115\001\116\001\117\001\118\001\119\001\
\255\255\255\255\255\255\255\255\255\255\125\001\126\001\127\001\
\255\255\255\255\130\001\131\001\255\255\255\255\134\001\135\001\
\136\001\137\001\138\001\255\255\255\255\141\001\142\001\255\255\
\144\001\145\001\146\001\255\255\148\001\255\255\150\001\255\255\
\255\255\255\255\154\001\255\255\255\255\255\255\255\255\255\255\
\255\255\255\255\162\001\163\001\255\255\165\001\166\001\255\255\
\168\001\255\255\255\255\002\001\003\001\004\001\255\255\006\001\
\255\255\255\255\255\255\255\255\255\255\255\255\013\001\014\001\
\015\001\255\255\017\001\255\255\255\255\020\001\255\255\255\255\
\255\255\255\255\255\255\255\255\027\001\028\001\255\255\255\255\
\255\255\255\255\255\255\255\255\035\001\255\255\037\001\038\001\
\255\255\255\255\041\001\042\001\255\255\255\255\255\255\046\001\
\255\255\255\255\255\255\255\255\255\255\052\001\255\255\054\001\
\055\001\056\001\057\001\058\001\059\001\255\255\255\255\255\255\
\255\255\255\255\000\000\066\001\255\255\068\001\255\255\255\255\
\255\255\255\255\255\255\074\001\075\001\255\255\255\255\255\255\
\079\001\080\001\255\255\082\001\255\255\255\255\255\255\255\255\
\255\255\255\255\089\001\255\255\091\001\092\001\093\001\094\001\
\255\255\096\001\097\001\255\255\255\255\255\255\255\255\255\255\
\255\255\255\255\255\255\255\255\255\255\255\255\255\255\255\255\
\111\001\112\001\113\001\114\001\115\001\116\001\117\001\118\001\
\119\001\255\255\255\255\255\255\255\255\255\255\125\001\126\001\
\127\001\255\255\255\255\130\001\131\001\255\255\255\255\134\001\
\135\001\136\001\137\001\138\001\255\255\255\255\141\001\142\001\
\255\255\144\001\145\001\146\001\255\255\148\001\255\255\150\001\
\255\255\255\255\255\255\154\001\255\255\002\001\003\001\004\001\
\255\255\006\001\255\255\162\001\163\001\255\255\165\001\166\001\
\013\001\168\001\015\001\255\255\017\001\255\255\255\255\020\001\
\255\255\255\255\255\255\255\255\255\255\255\255\027\001\255\255\
\255\255\255\255\255\255\255\255\255\255\255\255\035\001\255\255\
\037\001\038\001\255\255\255\255\041\001\042\001\255\255\255\255\
\255\255\046\001\255\255\255\255\255\255\255\255\255\255\052\001\
\255\255\054\001\055\001\056\001\057\001\058\001\059\001\255\255\
\255\255\255\255\255\255\255\255\000\000\066\001\255\255\068\001\
\255\255\255\255\255\255\255\255\255\255\074\001\075\001\255\255\
\255\255\255\255\079\001\080\001\255\255\082\001\255\255\255\255\
\255\255\255\255\255\255\255\255\089\001\255\255\091\001\092\001\
\093\001\094\001\255\255\096\001\097\001\255\255\255\255\255\255\
\255\255\255\255\255\255\255\255\255\255\255\255\255\255\255\255\
\255\255\255\255\111\001\112\001\113\001\114\001\115\001\116\001\
\117\001\118\001\119\001\255\255\255\255\255\255\255\255\255\255\
\255\255\126\001\127\001\255\255\255\255\130\001\131\001\255\255\
\255\255\134\001\135\001\136\001\137\001\138\001\255\255\255\255\
\141\001\255\255\255\255\144\001\145\001\146\001\255\255\148\001\
\255\255\150\001\255\255\255\255\255\255\154\001\255\255\255\255\
\255\255\255\255\255\255\255\255\255\255\162\001\163\001\255\255\
\165\001\166\001\255\255\168\001\002\001\003\001\004\001\255\255\
\006\001\255\255\255\255\255\255\255\255\255\255\255\255\013\001\
\255\255\015\001\255\255\017\001\255\255\255\255\020\001\255\255\
\255\255\255\255\255\255\255\255\255\255\027\001\255\255\255\255\
\255\255\255\255\255\255\255\255\255\255\035\001\255\255\037\001\
\038\001\255\255\255\255\041\001\042\001\255\255\255\255\255\255\
\046\001\255\255\255\255\255\255\255\255\255\255\052\001\255\255\
\255\255\055\001\056\001\057\001\058\001\059\001\255\255\255\255\
\255\255\255\255\255\255\000\000\066\001\255\255\068\001\255\255\
\255\255\255\255\255\255\255\255\074\001\075\001\255\255\255\255\
\255\255\079\001\080\001\255\255\082\001\255\255\255\255\255\255\
\255\255\255\255\255\255\089\001\255\255\091\001\092\001\093\001\
\094\001\255\255\096\001\097\001\255\255\255\255\255\255\255\255\
\255\255\255\255\255\255\255\255\255\255\255\255\255\255\255\255\
\255\255\111\001\112\001\113\001\114\001\115\001\116\001\117\001\
\118\001\119\001\255\255\255\255\255\255\255\255\255\255\255\255\
\126\001\127\001\255\255\255\255\130\001\131\001\255\255\255\255\
\134\001\135\001\136\001\137\001\138\001\255\255\255\255\141\001\
\255\255\255\255\144\001\145\001\146\001\255\255\148\001\255\255\
\150\001\255\255\255\255\255\255\154\001\255\255\002\001\003\001\
\004\001\255\255\006\001\255\255\162\001\163\001\255\255\165\001\
\166\001\013\001\168\001\015\001\255\255\017\001\255\255\255\255\
\020\001\255\255\255\255\255\255\255\255\255\255\255\255\027\001\
\255\255\255\255\255\255\255\255\255\255\255\255\255\255\035\001\
\255\255\037\001\038\001\255\255\255\255\041\001\042\001\255\255\
\255\255\255\255\046\001\255\255\255\255\255\255\255\255\255\255\
\255\255\255\255\255\255\055\001\056\001\057\001\058\001\059\001\
\255\255\255\255\255\255\255\255\255\255\000\000\066\001\255\255\
\068\001\255\255\255\255\255\255\255\255\255\255\074\001\075\001\
\255\255\255\255\255\255\079\001\080\001\255\255\082\001\255\255\
\255\255\255\255\255\255\255\255\255\255\089\001\255\255\091\001\
\092\001\093\001\094\001\255\255\096\001\097\001\255\255\255\255\
\255\255\255\255\255\255\255\255\255\255\255\255\255\255\255\255\
\255\255\255\255\255\255\111\001\112\001\113\001\114\001\115\001\
\116\001\117\001\118\001\119\001\255\255\255\255\255\255\255\255\
\255\255\255\255\126\001\127\001\255\255\255\255\130\001\131\001\
\255\255\255\255\134\001\135\001\136\001\137\001\138\001\255\255\
\255\255\141\001\255\255\255\255\144\001\145\001\146\001\255\255\
\148\001\255\255\150\001\255\255\255\255\255\255\154\001\255\255\
\255\255\255\255\255\255\255\255\255\255\255\255\162\001\163\001\
\255\255\165\001\166\001\255\255\168\001\002\001\003\001\004\001\
\255\255\006\001\255\255\255\255\255\255\255\255\255\255\255\255\
\013\001\255\255\015\001\255\255\255\255\255\255\255\255\020\001\
\255\255\255\255\255\255\255\255\255\255\255\255\027\001\255\255\
\255\255\255\255\255\255\255\255\255\255\255\255\035\001\255\255\
\037\001\038\001\255\255\255\255\255\255\042\001\255\255\255\255\
\255\255\046\001\255\255\255\255\255\255\255\255\255\255\000\000\
\255\255\255\255\055\001\056\001\057\001\058\001\059\001\255\255\
\255\255\255\255\255\255\255\255\255\255\066\001\255\255\068\001\
\255\255\255\255\255\255\255\255\255\255\074\001\075\001\255\255\
\255\255\255\255\079\001\080\001\255\255\082\001\255\255\255\255\
\255\255\255\255\255\255\255\255\089\001\255\255\091\001\092\001\
\093\001\094\001\255\255\096\001\097\001\255\255\255\255\255\255\
\255\255\255\255\255\255\255\255\255\255\255\255\255\255\255\255\
\255\255\255\255\111\001\112\001\113\001\114\001\115\001\116\001\
\117\001\118\001\119\001\255\255\255\255\255\255\255\255\255\255\
\255\255\126\001\127\001\255\255\255\255\130\001\131\001\255\255\
\255\255\134\001\135\001\136\001\137\001\138\001\255\255\255\255\
\141\001\255\255\255\255\144\001\255\255\146\001\255\255\148\001\
\255\255\150\001\255\255\255\255\255\255\154\001\255\255\002\001\
\003\001\004\001\255\255\006\001\255\255\162\001\163\001\255\255\
\165\001\166\001\013\001\168\001\015\001\255\255\255\255\255\255\
\255\255\020\001\255\255\255\255\255\255\255\255\255\255\255\255\
\027\001\255\255\255\255\255\255\255\255\255\255\255\255\255\255\
\035\001\255\255\037\001\038\001\255\255\255\255\255\255\042\001\
\255\255\255\255\255\255\046\001\255\255\255\255\255\255\255\255\
\255\255\000\000\255\255\255\255\055\001\056\001\057\001\058\001\
\059\001\255\255\255\255\255\255\255\255\255\255\255\255\066\001\
\255\255\068\001\255\255\255\255\255\255\255\255\255\255\074\001\
\075\001\255\255\255\255\255\255\079\001\080\001\255\255\082\001\
\255\255\255\255\255\255\255\255\255\255\255\255\089\001\255\255\
\091\001\092\001\093\001\094\001\255\255\096\001\097\001\255\255\
\255\255\255\255\255\255\255\255\255\255\255\255\255\255\255\255\
\255\255\255\255\255\255\255\255\111\001\112\001\113\001\114\001\
\115\001\116\001\117\001\118\001\119\001\255\255\255\255\255\255\
\255\255\255\255\255\255\126\001\127\001\255\255\255\255\130\001\
\131\001\255\255\255\255\134\001\135\001\136\001\137\001\138\001\
\255\255\255\255\141\001\255\255\255\255\144\001\255\255\146\001\
\255\255\148\001\255\255\150\001\255\255\255\255\255\255\154\001\
\255\255\002\001\003\001\004\001\255\255\006\001\255\255\162\001\
\163\001\255\255\165\001\166\001\013\001\168\001\015\001\255\255\
\255\255\255\255\255\255\020\001\255\255\255\255\255\255\255\255\
\255\255\255\255\027\001\255\255\255\255\255\255\255\255\255\255\
\255\255\255\255\035\001\255\255\037\001\038\001\255\255\255\255\
\255\255\042\001\255\255\255\255\255\255\046\001\255\255\255\255\
\255\255\255\255\255\255\000\000\255\255\255\255\055\001\056\001\
\057\001\058\001\059\001\255\255\255\255\255\255\255\255\255\255\
\255\255\066\001\255\255\068\001\255\255\255\255\255\255\255\255\
\255\255\074\001\075\001\255\255\255\255\255\255\079\001\080\001\
\255\255\082\001\255\255\255\255\255\255\255\255\255\255\255\255\
\089\001\255\255\091\001\092\001\093\001\094\001\255\255\096\001\
\097\001\255\255\255\255\255\255\255\255\255\255\255\255\255\255\
\255\255\255\255\255\255\255\255\255\255\255\255\111\001\112\001\
\113\001\114\001\115\001\116\001\117\001\118\001\119\001\255\255\
\255\255\255\255\255\255\255\255\255\255\126\001\127\001\255\255\
\255\255\130\001\131\001\255\255\255\255\134\001\135\001\136\001\
\137\001\138\001\255\255\255\255\141\001\255\255\255\255\144\001\
\255\255\146\001\255\255\148\001\255\255\150\001\255\255\255\255\
\255\255\154\001\255\255\002\001\003\001\004\001\255\255\006\001\
\255\255\162\001\163\001\255\255\165\001\166\001\013\001\168\001\
\015\001\255\255\255\255\255\255\255\255\020\001\255\255\255\255\
\255\255\255\255\255\255\255\255\027\001\255\255\255\255\255\255\
\255\255\255\255\255\255\255\255\035\001\255\255\037\001\038\001\
\255\255\255\255\255\255\042\001\255\255\255\255\255\255\046\001\
\255\255\255\255\255\255\255\255\255\255\000\000\255\255\255\255\
\055\001\056\001\057\001\058\001\059\001\255\255\255\255\255\255\
\255\255\255\255\255\255\066\001\255\255\068\001\255\255\255\255\
\255\255\255\255\255\255\074\001\075\001\255\255\255\255\255\255\
\079\001\080\001\255\255\082\001\255\255\255\255\255\255\255\255\
\255\255\255\255\089\001\255\255\091\001\092\001\093\001\094\001\
\255\255\096\001\097\001\255\255\255\255\255\255\255\255\255\255\
\255\255\255\255\255\255\255\255\255\255\255\255\255\255\255\255\
\111\001\112\001\113\001\114\001\115\001\116\001\117\001\118\001\
\119\001\255\255\255\255\255\255\255\255\255\255\255\255\126\001\
\127\001\255\255\255\255\130\001\131\001\255\255\255\255\134\001\
\135\001\136\001\137\001\138\001\255\255\255\255\141\001\255\255\
\255\255\144\001\255\255\146\001\255\255\148\001\255\255\150\001\
\255\255\255\255\255\255\154\001\255\255\002\001\003\001\004\001\
\255\255\006\001\255\255\162\001\163\001\255\255\165\001\166\001\
\013\001\168\001\015\001\255\255\255\255\255\255\255\255\020\001\
\255\255\255\255\255\255\255\255\255\255\255\255\027\001\255\255\
\255\255\255\255\255\255\255\255\255\255\255\255\035\001\255\255\
\037\001\038\001\255\255\255\255\255\255\042\001\255\255\255\255\
\255\255\046\001\255\255\255\255\255\255\255\255\255\255\000\000\
\255\255\255\255\055\001\056\001\057\001\058\001\059\001\255\255\
\255\255\255\255\255\255\255\255\255\255\066\001\255\255\068\001\
\255\255\255\255\255\255\255\255\255\255\074\001\075\001\255\255\
\255\255\255\255\079\001\080\001\255\255\082\001\255\255\255\255\
\255\255\255\255\255\255\255\255\089\001\255\255\091\001\092\001\
\093\001\094\001\255\255\096\001\097\001\255\255\255\255\255\255\
\255\255\255\255\255\255\255\255\255\255\255\255\255\255\255\255\
\255\255\255\255\111\001\112\001\113\001\114\001\115\001\116\001\
\117\001\118\001\119\001\255\255\255\255\255\255\255\255\255\255\
\255\255\126\001\127\001\255\255\255\255\130\001\131\001\255\255\
\255\255\134\001\135\001\136\001\137\001\138\001\255\255\255\255\
\141\001\255\255\255\255\144\001\255\255\146\001\255\255\148\001\
\255\255\150\001\255\255\255\255\255\255\154\001\255\255\002\001\
\003\001\004\001\255\255\006\001\255\255\162\001\163\001\255\255\
\165\001\166\001\013\001\168\001\015\001\255\255\255\255\255\255\
\255\255\020\001\255\255\255\255\255\255\255\255\255\255\255\255\
\027\001\255\255\255\255\255\255\255\255\255\255\255\255\255\255\
\035\001\255\255\037\001\038\001\255\255\255\255\255\255\042\001\
\255\255\255\255\255\255\046\001\000\000\255\255\255\255\255\255\
\255\255\255\255\255\255\255\255\055\001\056\001\057\001\058\001\
\059\001\255\255\255\255\255\255\255\255\255\255\255\255\066\001\
\255\255\068\001\255\255\255\255\255\255\255\255\255\255\074\001\
\075\001\255\255\255\255\255\255\079\001\080\001\255\255\082\001\
\255\255\255\255\255\255\255\255\255\255\255\255\089\001\255\255\
\091\001\092\001\093\001\094\001\255\255\096\001\097\001\255\255\
\255\255\255\255\255\255\255\255\255\255\255\255\255\255\255\255\
\255\255\255\255\255\255\255\255\111\001\112\001\113\001\114\001\
\115\001\116\001\117\001\118\001\119\001\255\255\255\255\255\255\
\255\255\255\255\255\255\126\001\127\001\255\255\255\255\130\001\
\131\001\255\255\255\255\134\001\135\001\136\001\137\001\138\001\
\255\255\255\255\141\001\255\255\255\255\144\001\255\255\146\001\
\255\255\148\001\255\255\150\001\255\255\255\255\255\255\154\001\
\255\255\002\001\003\001\004\001\255\255\006\001\255\255\162\001\
\163\001\255\255\165\001\166\001\013\001\168\001\015\001\255\255\
\255\255\255\255\255\255\020\001\255\255\255\255\255\255\255\255\
\255\255\255\255\027\001\255\255\255\255\255\255\255\255\255\255\
\255\255\255\255\035\001\255\255\037\001\038\001\255\255\255\255\
\255\255\042\001\255\255\255\255\255\255\046\001\000\000\255\255\
\255\255\255\255\255\255\255\255\255\255\255\255\055\001\056\001\
\057\001\058\001\059\001\255\255\255\255\255\255\255\255\255\255\
\255\255\066\001\255\255\068\001\255\255\255\255\255\255\255\255\
\255\255\074\001\075\001\255\255\255\255\255\255\079\001\080\001\
\255\255\082\001\255\255\255\255\255\255\255\255\255\255\255\255\
\089\001\255\255\091\001\092\001\093\001\094\001\255\255\096\001\
\097\001\255\255\255\255\255\255\255\255\255\255\255\255\255\255\
\255\255\255\255\255\255\255\255\255\255\255\255\111\001\112\001\
\113\001\114\001\115\001\116\001\117\001\118\001\119\001\255\255\
\255\255\255\255\255\255\255\255\255\255\126\001\127\001\255\255\
\255\255\130\001\131\001\255\255\255\255\134\001\135\001\136\001\
\137\001\138\001\255\255\255\255\141\001\255\255\255\255\144\001\
\255\255\146\001\255\255\148\001\255\255\150\001\002\001\003\001\
\004\001\154\001\006\001\255\255\255\255\255\255\255\255\255\255\
\255\255\162\001\163\001\015\001\165\001\166\001\255\255\168\001\
\020\001\255\255\255\255\255\255\255\255\255\255\255\255\027\001\
\255\255\255\255\255\255\255\255\255\255\255\255\255\255\035\001\
\255\255\037\001\038\001\255\255\255\255\255\255\042\001\255\255\
\255\255\255\255\046\001\000\000\255\255\255\255\255\255\255\255\
\255\255\255\255\255\255\055\001\056\001\057\001\058\001\059\001\
\255\255\255\255\255\255\255\255\255\255\255\255\066\001\255\255\
\068\001\255\255\255\255\255\255\255\255\255\255\074\001\075\001\
\255\255\255\255\255\255\079\001\080\001\255\255\082\001\255\255\
\255\255\255\255\255\255\255\255\255\255\089\001\255\255\091\001\
\092\001\093\001\094\001\255\255\096\001\097\001\255\255\255\255\
\255\255\255\255\255\255\255\255\255\255\255\255\255\255\255\255\
\255\255\255\255\255\255\111\001\112\001\113\001\114\001\115\001\
\116\001\117\001\118\001\119\001\255\255\255\255\255\255\255\255\
\255\255\255\255\126\001\127\001\255\255\255\255\130\001\131\001\
\255\255\255\255\134\001\135\001\136\001\137\001\138\001\255\255\
\255\255\141\001\255\255\255\255\144\001\255\255\146\001\255\255\
\148\001\255\255\150\001\255\255\255\255\255\255\154\001\255\255\
\002\001\003\001\004\001\255\255\006\001\255\255\162\001\163\001\
\255\255\165\001\166\001\255\255\168\001\015\001\255\255\255\255\
\255\255\255\255\020\001\255\255\255\255\255\255\255\255\255\255\
\255\255\027\001\255\255\255\255\255\255\255\255\255\255\255\255\
\255\255\035\001\255\255\037\001\038\001\255\255\255\255\255\255\
\042\001\255\255\255\255\255\255\046\001\000\000\255\255\255\255\
\255\255\255\255\255\255\255\255\255\255\055\001\056\001\057\001\
\058\001\059\001\255\255\255\255\255\255\255\255\255\255\255\255\
\066\001\255\255\068\001\255\255\255\255\255\255\255\255\255\255\
\074\001\075\001\255\255\255\255\255\255\079\001\080\001\255\255\
\082\001\255\255\255\255\255\255\255\255\255\255\255\255\089\001\
\255\255\091\001\092\001\093\001\094\001\255\255\096\001\097\001\
\255\255\255\255\255\255\255\255\255\255\255\255\255\255\255\255\
\255\255\255\255\255\255\255\255\255\255\111\001\112\001\113\001\
\114\001\115\001\116\001\117\001\118\001\119\001\255\255\255\255\
\255\255\255\255\255\255\255\255\126\001\127\001\255\255\255\255\
\130\001\131\001\255\255\255\255\134\001\135\001\136\001\137\001\
\138\001\255\255\255\255\141\001\255\255\255\255\144\001\255\255\
\146\001\255\255\148\001\255\255\150\001\002\001\003\001\004\001\
\154\001\006\001\255\255\255\255\255\255\255\255\255\255\255\255\
\162\001\163\001\015\001\165\001\166\001\255\255\168\001\020\001\
\255\255\255\255\255\255\255\255\255\255\255\255\027\001\255\255\
\255\255\255\255\255\255\255\255\255\255\255\255\035\001\255\255\
\037\001\038\001\255\255\255\255\255\255\042\001\255\255\255\255\
\255\255\046\001\255\255\255\255\255\255\255\255\255\255\000\000\
\255\255\255\255\055\001\056\001\057\001\058\001\059\001\255\255\
\255\255\255\255\255\255\255\255\255\255\066\001\255\255\068\001\
\255\255\255\255\255\255\255\255\255\255\074\001\075\001\255\255\
\255\255\255\255\079\001\080\001\255\255\082\001\255\255\255\255\
\255\255\255\255\255\255\255\255\089\001\255\255\091\001\092\001\
\093\001\094\001\255\255\096\001\097\001\255\255\255\255\255\255\
\255\255\255\255\255\255\255\255\255\255\255\255\255\255\255\255\
\255\255\255\255\111\001\112\001\113\001\114\001\115\001\116\001\
\117\001\118\001\119\001\255\255\255\255\255\255\255\255\255\255\
\255\255\126\001\127\001\255\255\255\255\130\001\131\001\255\255\
\255\255\134\001\135\001\136\001\137\001\138\001\255\255\255\255\
\141\001\255\255\255\255\144\001\255\255\146\001\255\255\148\001\
\255\255\150\001\255\255\255\255\255\255\154\001\255\255\002\001\
\003\001\004\001\255\255\006\001\255\255\162\001\163\001\255\255\
\165\001\166\001\013\001\168\001\015\001\255\255\255\255\255\255\
\255\255\020\001\255\255\255\255\255\255\255\255\255\255\255\255\
\027\001\255\255\255\255\255\255\255\255\255\255\255\255\255\255\
\035\001\255\255\255\255\038\001\255\255\255\255\255\255\042\001\
\255\255\255\255\255\255\046\001\255\255\000\000\255\255\255\255\
\255\255\255\255\255\255\255\255\055\001\056\001\057\001\058\001\
\059\001\255\255\255\255\255\255\255\255\255\255\255\255\066\001\
\255\255\068\001\255\255\255\255\255\255\255\255\255\255\074\001\
\075\001\255\255\255\255\255\255\079\001\080\001\255\255\082\001\
\255\255\255\255\255\255\255\255\255\255\255\255\089\001\255\255\
\091\001\092\001\093\001\094\001\255\255\096\001\097\001\255\255\
\255\255\255\255\255\255\255\255\255\255\255\255\255\255\255\255\
\255\255\255\255\255\255\255\255\111\001\112\001\113\001\114\001\
\115\001\116\001\117\001\118\001\119\001\255\255\255\255\255\255\
\255\255\255\255\255\255\126\001\127\001\255\255\255\255\130\001\
\131\001\255\255\255\255\134\001\135\001\136\001\137\001\138\001\
\255\255\255\255\141\001\255\255\255\255\144\001\255\255\146\001\
\255\255\148\001\255\255\150\001\255\255\255\255\255\255\154\001\
\255\255\002\001\003\001\004\001\255\255\006\001\255\255\162\001\
\163\001\255\255\165\001\166\001\013\001\168\001\015\001\255\255\
\255\255\255\255\255\255\020\001\255\255\255\255\255\255\255\255\
\255\255\255\255\027\001\255\255\255\255\255\255\000\000\255\255\
\255\255\255\255\035\001\255\255\037\001\038\001\255\255\255\255\
\255\255\042\001\255\255\255\255\255\255\046\001\255\255\255\255\
\255\255\255\255\255\255\255\255\255\255\255\255\055\001\056\001\
\057\001\058\001\059\001\255\255\255\255\255\255\255\255\255\255\
\255\255\066\001\255\255\068\001\255\255\255\255\255\255\255\255\
\255\255\074\001\075\001\255\255\255\255\255\255\079\001\080\001\
\255\255\082\001\255\255\255\255\255\255\255\255\255\255\255\255\
\089\001\255\255\091\001\092\001\093\001\094\001\255\255\096\001\
\097\001\255\255\255\255\255\255\255\255\255\255\255\255\255\255\
\255\255\255\255\255\255\255\255\255\255\255\255\111\001\112\001\
\113\001\114\001\115\001\116\001\117\001\118\001\119\001\255\255\
\255\255\255\255\255\255\255\255\255\255\126\001\127\001\255\255\
\255\255\130\001\131\001\255\255\255\255\134\001\135\001\136\001\
\255\255\255\255\255\255\255\255\141\001\255\255\255\255\144\001\
\255\255\146\001\255\255\148\001\255\255\150\001\255\255\002\001\
\003\001\154\001\255\255\006\001\255\255\255\255\255\255\255\255\
\255\255\162\001\163\001\014\001\165\001\166\001\017\001\168\001\
\255\255\020\001\255\255\255\255\255\255\255\255\255\255\255\255\
\027\001\255\255\255\255\255\255\255\255\255\255\255\255\255\255\
\035\001\255\255\255\255\255\255\000\000\040\001\255\255\042\001\
\255\255\255\255\255\255\046\001\255\255\255\255\255\255\255\255\
\255\255\255\255\255\255\255\255\055\001\056\001\057\001\058\001\
\059\001\255\255\255\255\255\255\255\255\255\255\255\255\066\001\
\255\255\068\001\255\255\255\255\255\255\255\255\255\255\074\001\
\075\001\255\255\255\255\255\255\255\255\080\001\255\255\082\001\
\255\255\255\255\255\255\255\255\255\255\255\255\089\001\255\255\
\091\001\092\001\093\001\094\001\255\255\096\001\097\001\255\255\
\255\255\255\255\255\255\255\255\255\255\255\255\255\255\255\255\
\255\255\255\255\255\255\255\255\111\001\112\001\113\001\114\001\
\115\001\116\001\117\001\118\001\119\001\255\255\255\255\255\255\
\255\255\255\255\125\001\126\001\127\001\255\255\255\255\130\001\
\131\001\255\255\255\255\255\255\255\255\255\255\255\255\255\255\
\002\001\255\255\141\001\255\255\006\001\255\255\255\255\146\001\
\255\255\255\255\255\255\150\001\255\255\255\255\255\255\154\001\
\255\255\255\255\020\001\021\001\255\255\255\255\255\255\162\001\
\163\001\027\001\165\001\166\001\030\001\168\001\255\255\255\255\
\255\255\035\001\255\255\255\255\255\255\255\255\040\001\255\255\
\042\001\255\255\255\255\255\255\046\001\255\255\255\255\255\255\
\255\255\255\255\000\000\255\255\255\255\255\255\056\001\057\001\
\058\001\059\001\255\255\255\255\255\255\255\255\255\255\255\255\
\066\001\255\255\068\001\255\255\255\255\255\255\255\255\255\255\
\074\001\075\001\255\255\255\255\255\255\255\255\080\001\255\255\
\082\001\255\255\255\255\255\255\255\255\255\255\255\255\089\001\
\255\255\091\001\092\001\093\001\094\001\255\255\096\001\097\001\
\255\255\255\255\255\255\255\255\255\255\255\255\255\255\255\255\
\255\255\255\255\255\255\255\255\255\255\111\001\112\001\113\001\
\114\001\115\001\116\001\117\001\118\001\119\001\255\255\255\255\
\255\255\255\255\255\255\255\255\255\255\255\255\255\255\255\255\
\130\001\131\001\255\255\255\255\255\255\255\255\255\255\255\255\
\255\255\255\255\255\255\141\001\255\255\255\255\255\255\255\255\
\146\001\255\255\255\255\255\255\150\001\255\255\255\255\255\255\
\154\001\255\255\255\255\255\255\255\255\255\255\255\255\003\001\
\162\001\163\001\006\001\165\001\166\001\255\255\168\001\011\001\
\000\000\013\001\255\255\255\255\255\255\255\255\255\255\019\001\
\020\001\021\001\022\001\255\255\024\001\255\255\255\255\027\001\
\255\255\029\001\255\255\255\255\255\255\255\255\255\255\035\001\
\255\255\255\255\255\255\255\255\040\001\255\255\042\001\255\255\
\044\001\255\255\046\001\255\255\255\255\049\001\050\001\255\255\
\255\255\255\255\255\255\055\001\056\001\057\001\058\001\059\001\
\060\001\061\001\062\001\063\001\064\001\255\255\066\001\255\255\
\068\001\069\001\070\001\255\255\255\255\073\001\074\001\075\001\
\076\001\255\255\078\001\079\001\080\001\255\255\082\001\255\255\
\084\001\085\001\255\255\255\255\255\255\089\001\090\001\091\001\
\092\001\093\001\094\001\255\255\096\001\097\001\255\255\255\255\
\255\255\255\255\255\255\255\255\255\255\255\255\255\255\255\255\
\255\255\255\255\255\255\111\001\112\001\113\001\114\001\115\001\
\116\001\117\001\118\001\119\001\255\255\255\255\255\255\255\255\
\124\001\125\001\126\001\127\001\128\001\255\255\130\001\131\001\
\132\001\255\255\000\000\255\255\136\001\137\001\255\255\139\001\
\140\001\141\001\142\001\143\001\255\255\145\001\146\001\255\255\
\255\255\255\255\150\001\151\001\002\001\153\001\154\001\255\255\
\006\001\157\001\158\001\159\001\160\001\161\001\162\001\163\001\
\255\255\165\001\166\001\167\001\255\255\255\255\020\001\021\001\
\255\255\255\255\255\255\255\255\255\255\027\001\255\255\255\255\
\255\255\255\255\255\255\255\255\255\255\035\001\255\255\255\255\
\255\255\255\255\040\001\255\255\042\001\255\255\255\255\255\255\
\046\001\255\255\255\255\255\255\255\255\255\255\255\255\255\255\
\255\255\255\255\056\001\057\001\058\001\059\001\255\255\255\255\
\255\255\255\255\255\255\255\255\066\001\255\255\068\001\255\255\
\255\255\255\255\255\255\255\255\074\001\075\001\255\255\255\255\
\255\255\255\255\080\001\255\255\082\001\255\255\255\255\255\255\
\255\255\255\255\255\255\089\001\255\255\091\001\092\001\093\001\
\094\001\255\255\096\001\097\001\255\255\255\255\255\255\255\255\
\255\255\255\255\255\255\255\255\000\000\255\255\255\255\255\255\
\255\255\111\001\112\001\113\001\114\001\115\001\116\001\117\001\
\118\001\119\001\002\001\255\255\255\255\255\255\006\001\255\255\
\255\255\255\255\255\255\255\255\130\001\131\001\255\255\255\255\
\255\255\255\255\255\255\255\255\020\001\255\255\255\255\141\001\
\255\255\255\255\255\255\027\001\146\001\255\255\255\255\255\255\
\150\001\255\255\255\255\035\001\154\001\255\255\255\255\255\255\
\040\001\255\255\042\001\255\255\162\001\163\001\046\001\165\001\
\166\001\255\255\255\255\255\255\255\255\255\255\255\255\255\255\
\056\001\057\001\058\001\059\001\255\255\255\255\255\255\255\255\
\255\255\255\255\066\001\255\255\068\001\255\255\255\255\255\255\
\255\255\255\255\074\001\075\001\255\255\255\255\255\255\255\255\
\080\001\255\255\082\001\255\255\255\255\255\255\255\255\255\255\
\255\255\089\001\000\000\091\001\092\001\093\001\094\001\255\255\
\096\001\097\001\255\255\255\255\255\255\255\255\255\255\255\255\
\255\255\255\255\255\255\255\255\255\255\255\255\255\255\111\001\
\112\001\113\001\114\001\115\001\116\001\117\001\118\001\119\001\
\255\255\255\255\255\255\255\255\002\001\255\255\255\255\255\255\
\006\001\255\255\130\001\131\001\255\255\255\255\255\255\013\001\
\255\255\255\255\255\255\255\255\255\255\141\001\020\001\255\255\
\255\255\255\255\146\001\255\255\255\255\027\001\150\001\255\255\
\255\255\255\255\154\001\255\255\255\255\035\001\255\255\255\255\
\255\255\255\255\162\001\163\001\042\001\165\001\166\001\255\255\
\046\001\255\255\255\255\255\255\255\255\255\255\255\255\255\255\
\255\255\255\255\056\001\057\001\058\001\059\001\255\255\255\255\
\255\255\255\255\255\255\255\255\066\001\255\255\068\001\000\000\
\255\255\255\255\255\255\255\255\074\001\075\001\255\255\255\255\
\255\255\255\255\080\001\255\255\082\001\255\255\255\255\255\255\
\255\255\255\255\255\255\089\001\255\255\091\001\092\001\093\001\
\094\001\255\255\096\001\097\001\255\255\255\255\255\255\255\255\
\255\255\255\255\255\255\255\255\255\255\255\255\255\255\255\255\
\255\255\111\001\112\001\113\001\114\001\115\001\116\001\117\001\
\118\001\119\001\255\255\255\255\255\255\255\255\002\001\255\255\
\255\255\255\255\006\001\255\255\130\001\131\001\255\255\255\255\
\255\255\013\001\255\255\255\255\255\255\255\255\255\255\141\001\
\020\001\255\255\255\255\255\255\146\001\255\255\255\255\027\001\
\150\001\255\255\255\255\255\255\154\001\255\255\255\255\035\001\
\255\255\255\255\255\255\255\255\162\001\163\001\042\001\165\001\
\166\001\255\255\046\001\255\255\000\000\255\255\255\255\255\255\
\255\255\255\255\255\255\255\255\056\001\057\001\058\001\059\001\
\255\255\255\255\255\255\255\255\255\255\255\255\066\001\255\255\
\068\001\255\255\255\255\255\255\255\255\255\255\074\001\075\001\
\255\255\255\255\255\255\255\255\080\001\255\255\082\001\255\255\
\255\255\255\255\255\255\255\255\255\255\089\001\255\255\091\001\
\092\001\093\001\094\001\255\255\096\001\097\001\255\255\255\255\
\255\255\255\255\255\255\255\255\002\001\255\255\255\255\255\255\
\006\001\255\255\255\255\111\001\112\001\113\001\114\001\115\001\
\116\001\117\001\118\001\119\001\255\255\255\255\020\001\255\255\
\255\255\255\255\255\255\255\255\255\255\027\001\130\001\131\001\
\255\255\255\255\255\255\255\255\255\255\035\001\255\255\255\255\
\255\255\141\001\255\255\255\255\042\001\255\255\146\001\255\255\
\046\001\000\000\150\001\255\255\255\255\255\255\154\001\255\255\
\255\255\255\255\056\001\057\001\058\001\059\001\162\001\163\001\
\255\255\165\001\166\001\255\255\066\001\255\255\068\001\255\255\
\255\255\255\255\255\255\255\255\074\001\075\001\255\255\255\255\
\255\255\255\255\080\001\255\255\082\001\255\255\255\255\255\255\
\255\255\255\255\255\255\089\001\255\255\091\001\092\001\093\001\
\094\001\255\255\096\001\097\001\255\255\255\255\255\255\255\255\
\255\255\002\001\255\255\255\255\255\255\006\001\255\255\255\255\
\255\255\111\001\112\001\113\001\114\001\115\001\116\001\117\001\
\118\001\119\001\255\255\020\001\255\255\255\255\255\255\255\255\
\255\255\255\255\027\001\255\255\130\001\131\001\255\255\255\255\
\255\255\255\255\035\001\255\255\255\255\255\255\255\255\141\001\
\255\255\042\001\255\255\255\255\146\001\046\001\000\000\255\255\
\150\001\255\255\255\255\255\255\154\001\255\255\255\255\056\001\
\057\001\058\001\059\001\255\255\162\001\163\001\255\255\165\001\
\166\001\066\001\255\255\068\001\255\255\255\255\255\255\255\255\
\255\255\074\001\075\001\255\255\255\255\255\255\255\255\080\001\
\255\255\082\001\255\255\255\255\255\255\255\255\255\255\255\255\
\089\001\255\255\091\001\092\001\093\001\094\001\255\255\096\001\
\097\001\255\255\255\255\255\255\255\255\255\255\255\255\255\255\
\255\255\255\255\006\001\255\255\255\255\255\255\111\001\112\001\
\113\001\114\001\115\001\116\001\117\001\118\001\119\001\255\255\
\020\001\255\255\255\255\255\255\255\255\255\255\255\255\027\001\
\255\255\130\001\131\001\255\255\255\255\255\255\255\255\035\001\
\255\255\255\255\255\255\255\255\141\001\255\255\042\001\255\255\
\255\255\146\001\046\001\000\000\255\255\150\001\255\255\255\255\
\255\255\154\001\255\255\255\255\056\001\057\001\058\001\059\001\
\255\255\162\001\163\001\255\255\165\001\166\001\066\001\255\255\
\068\001\255\255\255\255\255\255\255\255\255\255\074\001\075\001\
\255\255\255\255\255\255\255\255\080\001\255\255\082\001\255\255\
\255\255\255\255\255\255\255\255\255\255\089\001\255\255\091\001\
\092\001\093\001\094\001\255\255\096\001\097\001\255\255\255\255\
\255\255\255\255\255\255\255\255\255\255\255\255\255\255\006\001\
\255\255\255\255\255\255\111\001\112\001\113\001\114\001\115\001\
\116\001\117\001\118\001\119\001\255\255\020\001\255\255\255\255\
\255\255\255\255\255\255\255\255\027\001\255\255\130\001\131\001\
\255\255\255\255\255\255\255\255\035\001\255\255\255\255\255\255\
\255\255\141\001\255\255\042\001\255\255\255\255\146\001\046\001\
\000\000\255\255\150\001\255\255\255\255\255\255\154\001\255\255\
\255\255\056\001\057\001\058\001\059\001\255\255\162\001\163\001\
\255\255\165\001\166\001\066\001\255\255\068\001\255\255\255\255\
\255\255\255\255\255\255\074\001\075\001\255\255\255\255\255\255\
\255\255\080\001\255\255\082\001\255\255\255\255\255\255\255\255\
\255\255\255\255\089\001\255\255\091\001\092\001\093\001\094\001\
\255\255\096\001\097\001\255\255\255\255\255\255\255\255\255\255\
\255\255\255\255\255\255\255\255\006\001\255\255\255\255\255\255\
\111\001\112\001\113\001\114\001\115\001\116\001\117\001\118\001\
\119\001\255\255\020\001\255\255\255\255\255\255\255\255\255\255\
\255\255\027\001\255\255\130\001\131\001\255\255\255\255\255\255\
\255\255\035\001\255\255\255\255\255\255\255\255\141\001\255\255\
\042\001\255\255\255\255\146\001\046\001\255\255\255\255\150\001\
\255\255\255\255\255\255\154\001\255\255\255\255\056\001\057\001\
\058\001\059\001\255\255\162\001\163\001\255\255\165\001\166\001\
\066\001\255\255\068\001\255\255\255\255\255\255\255\255\255\255\
\074\001\075\001\255\255\255\255\255\255\255\255\080\001\255\255\
\082\001\255\255\255\255\255\255\255\255\255\255\255\255\089\001\
\255\255\091\001\092\001\093\001\094\001\255\255\096\001\097\001\
\255\255\255\255\255\255\255\255\255\255\255\255\255\255\255\255\
\255\255\006\001\255\255\255\255\255\255\111\001\112\001\113\001\
\114\001\115\001\116\001\117\001\118\001\119\001\255\255\020\001\
\255\255\255\255\255\255\255\255\255\255\255\255\027\001\255\255\
\130\001\131\001\255\255\255\255\255\255\255\255\035\001\255\255\
\255\255\255\255\255\255\141\001\255\255\042\001\255\255\255\255\
\146\001\046\001\255\255\255\255\150\001\255\255\255\255\255\255\
\154\001\255\255\255\255\056\001\057\001\058\001\059\001\255\255\
\162\001\163\001\255\255\165\001\166\001\066\001\255\255\068\001\
\255\255\255\255\255\255\255\255\255\255\074\001\075\001\255\255\
\255\255\255\255\255\255\080\001\255\255\082\001\255\255\255\255\
\255\255\255\255\255\255\255\255\089\001\255\255\091\001\092\001\
\093\001\094\001\255\255\096\001\097\001\255\255\255\255\255\255\
\255\255\255\255\255\255\255\255\255\255\255\255\006\001\255\255\
\255\255\255\255\111\001\112\001\113\001\114\001\115\001\116\001\
\117\001\118\001\119\001\255\255\020\001\255\255\255\255\255\255\
\255\255\255\255\255\255\027\001\255\255\130\001\131\001\255\255\
\255\255\255\255\255\255\035\001\255\255\255\255\255\255\255\255\
\141\001\255\255\042\001\255\255\255\255\146\001\046\001\255\255\
\255\255\150\001\255\255\255\255\255\255\154\001\255\255\255\255\
\056\001\057\001\058\001\059\001\255\255\162\001\163\001\255\255\
\165\001\166\001\066\001\255\255\068\001\255\255\255\255\255\255\
\255\255\255\255\074\001\075\001\255\255\255\255\255\255\255\255\
\080\001\255\255\082\001\255\255\255\255\255\255\255\255\255\255\
\255\255\089\001\255\255\091\001\092\001\093\001\094\001\255\255\
\096\001\097\001\255\255\255\255\255\255\255\255\255\255\255\255\
\255\255\255\255\255\255\255\255\255\255\255\255\255\255\111\001\
\112\001\113\001\114\001\115\001\116\001\117\001\118\001\119\001\
\255\255\255\255\255\255\255\255\255\255\255\255\255\255\255\255\
\255\255\255\255\130\001\131\001\255\255\255\255\255\255\255\255\
\255\255\255\255\255\255\255\255\255\255\141\001\255\255\255\255\
\255\255\255\255\146\001\255\255\255\255\255\255\150\001\255\255\
\255\255\255\255\154\001\255\255\255\255\255\255\255\255\255\255\
\255\255\255\255\162\001\163\001\255\255\165\001\166\001\003\001\
\255\255\005\001\006\001\007\001\008\001\009\001\010\001\011\001\
\012\001\255\255\255\255\255\255\016\001\255\255\018\001\019\001\
\255\255\255\255\022\001\023\001\255\255\025\001\026\001\255\255\
\028\001\029\001\255\255\255\255\255\255\255\255\255\255\255\255\
\036\001\255\255\255\255\039\001\040\001\255\255\255\255\043\001\
\044\001\045\001\255\255\047\001\048\001\049\001\050\001\051\001\
\052\001\053\001\054\001\255\255\255\255\255\255\255\255\255\255\
\060\001\061\001\062\001\063\001\064\001\065\001\255\255\255\255\
\255\255\069\001\070\001\255\255\255\255\073\001\074\001\075\001\
\076\001\077\001\078\001\255\255\080\001\081\001\255\255\255\255\
\084\001\085\001\086\001\087\001\088\001\255\255\090\001\255\255\
\255\255\255\255\255\255\255\255\255\255\255\255\098\001\099\001\
\100\001\101\001\102\001\103\001\104\001\105\001\106\001\107\001\
\108\001\109\001\110\001\255\255\255\255\255\255\255\255\255\255\
\255\255\255\255\255\255\255\255\255\255\255\255\122\001\255\255\
\124\001\255\255\255\255\255\255\128\001\255\255\255\255\255\255\
\132\001\133\001\255\255\255\255\255\255\255\255\255\255\139\001\
\140\001\255\255\255\255\143\001\255\255\255\255\255\255\147\001\
\255\255\149\001\255\255\151\001\152\001\153\001\255\255\255\255\
\255\255\157\001\158\001\159\001\160\001\161\001\003\001\255\255\
\005\001\006\001\007\001\008\001\009\001\010\001\011\001\012\001\
\255\255\255\255\255\255\016\001\255\255\018\001\019\001\255\255\
\255\255\022\001\023\001\255\255\025\001\026\001\255\255\028\001\
\029\001\255\255\255\255\255\255\255\255\255\255\255\255\036\001\
\255\255\255\255\039\001\040\001\255\255\255\255\043\001\044\001\
\045\001\255\255\047\001\048\001\049\001\050\001\051\001\052\001\
\053\001\054\001\255\255\255\255\255\255\255\255\255\255\060\001\
\061\001\062\001\063\001\064\001\065\001\255\255\255\255\255\255\
\069\001\070\001\255\255\255\255\073\001\074\001\075\001\076\001\
\077\001\078\001\255\255\080\001\081\001\255\255\255\255\084\001\
\085\001\086\001\087\001\088\001\255\255\090\001\255\255\255\255\
\255\255\255\255\255\255\255\255\255\255\098\001\099\001\100\001\
\101\001\102\001\103\001\104\001\105\001\106\001\107\001\108\001\
\109\001\110\001\255\255\255\255\255\255\255\255\255\255\255\255\
\255\255\255\255\255\255\255\255\255\255\122\001\255\255\124\001\
\255\255\255\255\255\255\128\001\255\255\255\255\255\255\132\001\
\133\001\255\255\255\255\255\255\255\255\255\255\139\001\140\001\
\255\255\255\255\143\001\255\255\255\255\255\255\147\001\255\255\
\149\001\255\255\151\001\152\001\153\001\255\255\255\255\255\255\
\157\001\158\001\159\001\160\001\161\001\003\001\255\255\005\001\
\006\001\007\001\008\001\009\001\010\001\011\001\012\001\255\255\
\255\255\255\255\016\001\255\255\018\001\019\001\255\255\255\255\
\022\001\023\001\255\255\025\001\026\001\255\255\028\001\029\001\
\255\255\255\255\255\255\255\255\255\255\255\255\036\001\255\255\
\255\255\039\001\040\001\255\255\255\255\043\001\044\001\045\001\
\255\255\047\001\048\001\049\001\050\001\051\001\052\001\053\001\
\054\001\255\255\255\255\255\255\255\255\255\255\060\001\061\001\
\062\001\063\001\064\001\065\001\255\255\255\255\255\255\069\001\
\070\001\255\255\255\255\073\001\074\001\075\001\076\001\077\001\
\078\001\255\255\080\001\081\001\255\255\255\255\084\001\085\001\
\086\001\087\001\088\001\255\255\090\001\255\255\255\255\255\255\
\255\255\255\255\255\255\255\255\098\001\099\001\100\001\101\001\
\102\001\103\001\104\001\105\001\106\001\107\001\108\001\109\001\
\110\001\255\255\255\255\255\255\255\255\255\255\255\255\255\255\
\255\255\255\255\255\255\255\255\122\001\255\255\124\001\255\255\
\255\255\255\255\128\001\255\255\255\255\255\255\132\001\133\001\
\255\255\255\255\255\255\255\255\255\255\139\001\140\001\255\255\
\255\255\143\001\255\255\255\255\003\001\147\001\255\255\149\001\
\255\255\151\001\152\001\153\001\011\001\255\255\255\255\157\001\
\158\001\159\001\160\001\161\001\019\001\255\255\255\255\022\001\
\023\001\255\255\025\001\255\255\255\255\028\001\029\001\255\255\
\255\255\255\255\255\255\255\255\255\255\255\255\255\255\255\255\
\255\255\040\001\255\255\255\255\255\255\044\001\255\255\255\255\
\255\255\255\255\049\001\050\001\255\255\052\001\255\255\054\001\
\255\255\255\255\255\255\255\255\255\255\060\001\061\001\062\001\
\063\001\064\001\255\255\255\255\255\255\255\255\069\001\255\255\
\255\255\255\255\073\001\255\255\255\255\076\001\255\255\078\001\
\255\255\255\255\081\001\255\255\255\255\084\001\085\001\255\255\
\255\255\255\255\255\255\090\001\255\255\255\255\255\255\255\255\
\255\255\255\255\255\255\098\001\099\001\100\001\101\001\102\001\
\103\001\104\001\105\001\106\001\107\001\108\001\255\255\110\001\
\255\255\255\255\255\255\255\255\255\255\255\255\255\255\255\255\
\255\255\255\255\255\255\255\255\255\255\124\001\255\255\255\255\
\255\255\128\001\255\255\255\255\255\255\132\001\255\255\255\255\
\255\255\255\255\255\255\255\255\139\001\140\001\255\255\255\255\
\143\001\255\255\255\255\003\001\255\255\255\255\149\001\255\255\
\151\001\255\255\153\001\011\001\255\255\255\255\157\001\158\001\
\159\001\160\001\161\001\019\001\255\255\255\255\022\001\023\001\
\255\255\025\001\255\255\255\255\028\001\029\001\255\255\255\255\
\255\255\255\255\255\255\255\255\255\255\255\255\255\255\255\255\
\040\001\255\255\255\255\255\255\044\001\255\255\255\255\255\255\
\255\255\049\001\050\001\255\255\052\001\255\255\054\001\255\255\
\255\255\255\255\255\255\255\255\060\001\061\001\062\001\063\001\
\064\001\255\255\255\255\255\255\255\255\069\001\255\255\255\255\
\255\255\073\001\255\255\255\255\076\001\255\255\078\001\255\255\
\255\255\081\001\255\255\255\255\084\001\085\001\255\255\255\255\
\255\255\255\255\090\001\255\255\255\255\255\255\255\255\255\255\
\255\255\255\255\098\001\099\001\100\001\101\001\102\001\103\001\
\104\001\105\001\106\001\107\001\108\001\255\255\110\001\255\255\
\255\255\255\255\255\255\255\255\255\255\255\255\255\255\255\255\
\255\255\255\255\255\255\255\255\124\001\255\255\255\255\255\255\
\128\001\255\255\255\255\255\255\132\001\255\255\255\255\255\255\
\255\255\255\255\255\255\139\001\140\001\255\255\255\255\143\001\
\255\255\255\255\255\255\255\255\255\255\149\001\255\255\151\001\
\255\255\153\001\255\255\255\255\255\255\157\001\158\001\159\001\
\160\001\161\001\005\001\006\001\007\001\008\001\009\001\010\001\
\011\001\012\001\255\255\255\255\255\255\016\001\255\255\018\001\
\019\001\255\255\255\255\255\255\255\255\255\255\255\255\026\001\
\255\255\255\255\029\001\255\255\255\255\255\255\255\255\255\255\
\255\255\036\001\255\255\255\255\039\001\255\255\255\255\255\255\
\043\001\044\001\045\001\255\255\047\001\048\001\049\001\050\001\
\051\001\255\255\053\001\255\255\255\255\255\255\255\255\255\255\
\255\255\060\001\061\001\062\001\063\001\064\001\065\001\255\255\
\255\255\255\255\069\001\070\001\255\255\255\255\073\001\074\001\
\075\001\076\001\077\001\078\001\255\255\080\001\081\001\255\255\
\255\255\084\001\085\001\086\001\087\001\088\001\255\255\090\001\
\255\255\255\255\255\255\255\255\255\255\255\255\255\255\255\255\
\255\255\255\255\255\255\255\255\255\255\255\255\255\255\106\001\
\255\255\255\255\109\001\255\255\255\255\255\255\255\255\255\255\
\255\255\255\255\255\255\255\255\255\255\255\255\255\255\122\001\
\255\255\124\001\255\255\255\255\255\255\128\001\255\255\255\255\
\255\255\132\001\133\001\255\255\255\255\255\255\255\255\255\255\
\139\001\140\001\255\255\255\255\143\001\255\255\255\255\255\255\
\147\001\255\255\149\001\255\255\151\001\152\001\153\001\255\255\
\255\255\255\255\157\001\158\001\159\001\160\001\161\001\005\001\
\006\001\007\001\008\001\009\001\010\001\011\001\012\001\255\255\
\255\255\255\255\016\001\255\255\018\001\019\001\255\255\255\255\
\255\255\255\255\255\255\255\255\026\001\255\255\255\255\029\001\
\255\255\255\255\255\255\255\255\255\255\255\255\036\001\255\255\
\255\255\039\001\255\255\255\255\255\255\043\001\044\001\045\001\
\255\255\047\001\048\001\049\001\050\001\051\001\255\255\053\001\
\255\255\255\255\255\255\255\255\255\255\255\255\060\001\061\001\
\062\001\063\001\064\001\065\001\255\255\255\255\255\255\069\001\
\070\001\255\255\255\255\073\001\074\001\075\001\076\001\077\001\
\078\001\255\255\080\001\081\001\255\255\255\255\084\001\085\001\
\086\001\087\001\088\001\255\255\090\001\255\255\255\255\255\255\
\255\255\255\255\255\255\255\255\255\255\255\255\255\255\255\255\
\255\255\255\255\255\255\255\255\106\001\255\255\255\255\109\001\
\255\255\255\255\255\255\255\255\255\255\255\255\255\255\255\255\
\255\255\255\255\255\255\255\255\122\001\255\255\124\001\255\255\
\255\255\255\255\128\001\255\255\255\255\255\255\132\001\133\001\
\255\255\255\255\255\255\255\255\255\255\139\001\140\001\255\255\
\255\255\143\001\255\255\255\255\255\255\147\001\255\255\149\001\
\255\255\151\001\152\001\153\001\255\255\255\255\255\255\157\001\
\158\001\159\001\160\001\161\001\012\001\255\255\255\255\255\255\
\016\001\255\255\255\255\019\001\255\255\021\001\255\255\255\255\
\255\255\255\255\255\255\255\255\255\255\255\255\030\001\031\001\
\032\001\033\001\034\001\255\255\255\255\255\255\255\255\255\255\
\040\001\255\255\255\255\255\255\044\001\255\255\255\255\255\255\
\255\255\049\001\050\001\255\255\255\255\255\255\255\255\255\255\
\255\255\255\255\255\255\255\255\060\001\061\001\062\001\063\001\
\064\001\255\255\255\255\255\255\255\255\069\001\255\255\255\255\
\255\255\073\001\255\255\255\255\255\255\077\001\078\001\255\255\
\255\255\255\255\255\255\255\255\084\001\085\001\255\255\255\255\
\255\255\255\255\090\001\255\255\255\255\255\255\255\255\255\255\
\255\255\255\255\255\255\255\255\255\255\255\255\255\255\255\255\
\255\255\255\255\106\001\255\255\255\255\109\001\255\255\255\255\
\255\255\255\255\255\255\255\255\255\255\255\255\255\255\255\255\
\255\255\255\255\255\255\255\255\124\001\255\255\126\001\255\255\
\128\001\255\255\255\255\255\255\132\001\255\255\255\255\255\255\
\255\255\137\001\255\255\139\001\140\001\255\255\255\255\143\001\
\255\255\255\255\255\255\255\255\255\255\255\255\255\255\151\001\
\255\255\153\001\255\255\255\255\156\001\157\001\158\001\159\001\
\160\001\161\001\012\001\255\255\164\001\255\255\016\001\255\255\
\168\001\019\001\255\255\255\255\255\255\255\255\255\255\255\255\
\255\255\255\255\255\255\255\255\030\001\031\001\032\001\033\001\
\034\001\255\255\255\255\255\255\255\255\255\255\040\001\255\255\
\255\255\255\255\044\001\255\255\255\255\255\255\255\255\049\001\
\050\001\255\255\255\255\255\255\255\255\255\255\255\255\255\255\
\255\255\255\255\060\001\061\001\062\001\063\001\064\001\255\255\
\255\255\255\255\255\255\069\001\255\255\255\255\255\255\073\001\
\255\255\255\255\255\255\077\001\078\001\255\255\255\255\255\255\
\255\255\255\255\084\001\085\001\255\255\255\255\255\255\255\255\
\090\001\255\255\255\255\255\255\255\255\255\255\255\255\255\255\
\255\255\255\255\255\255\255\255\255\255\255\255\255\255\255\255\
\106\001\255\255\255\255\109\001\255\255\255\255\255\255\255\255\
\255\255\255\255\255\255\255\255\255\255\255\255\255\255\255\255\
\255\255\255\255\124\001\255\255\126\001\255\255\128\001\255\255\
\255\255\255\255\132\001\255\255\255\255\255\255\255\255\137\001\
\255\255\139\001\140\001\255\255\255\255\143\001\255\255\255\255\
\255\255\255\255\255\255\255\255\255\255\151\001\255\255\153\001\
\255\255\255\255\255\255\157\001\158\001\159\001\160\001\161\001\
\012\001\255\255\164\001\255\255\016\001\255\255\168\001\019\001\
\255\255\021\001\255\255\255\255\255\255\255\255\255\255\255\255\
\255\255\255\255\255\255\031\001\032\001\033\001\034\001\255\255\
\255\255\255\255\255\255\255\255\040\001\255\255\255\255\255\255\
\044\001\255\255\255\255\255\255\255\255\049\001\050\001\255\255\
\255\255\255\255\255\255\255\255\255\255\255\255\255\255\255\255\
\060\001\061\001\062\001\063\001\064\001\255\255\255\255\255\255\
\255\255\069\001\255\255\255\255\255\255\073\001\255\255\255\255\
\255\255\077\001\078\001\255\255\255\255\255\255\255\255\255\255\
\084\001\085\001\255\255\255\255\255\255\255\255\090\001\255\255\
\255\255\255\255\255\255\255\255\255\255\255\255\255\255\255\255\
\255\255\255\255\255\255\255\255\255\255\255\255\106\001\255\255\
\255\255\109\001\255\255\255\255\255\255\255\255\255\255\255\255\
\255\255\255\255\255\255\255\255\255\255\255\255\255\255\255\255\
\124\001\255\255\126\001\255\255\128\001\255\255\255\255\255\255\
\132\001\255\255\255\255\255\255\255\255\137\001\255\255\139\001\
\140\001\255\255\255\255\143\001\255\255\255\255\255\255\255\255\
\255\255\255\255\255\255\151\001\255\255\153\001\255\255\255\255\
\255\255\157\001\158\001\159\001\160\001\161\001\012\001\255\255\
\164\001\255\255\016\001\255\255\168\001\019\001\255\255\021\001\
\255\255\255\255\255\255\255\255\255\255\255\255\255\255\255\255\
\255\255\031\001\032\001\033\001\034\001\255\255\255\255\255\255\
\255\255\255\255\040\001\255\255\255\255\255\255\044\001\255\255\
\255\255\255\255\255\255\049\001\050\001\255\255\255\255\255\255\
\255\255\255\255\255\255\255\255\255\255\255\255\060\001\061\001\
\062\001\063\001\064\001\255\255\255\255\255\255\255\255\069\001\
\255\255\255\255\255\255\073\001\255\255\255\255\255\255\077\001\
\078\001\255\255\255\255\255\255\255\255\255\255\084\001\085\001\
\255\255\255\255\255\255\255\255\090\001\255\255\255\255\255\255\
\255\255\255\255\255\255\255\255\255\255\255\255\255\255\255\255\
\255\255\255\255\255\255\255\255\106\001\255\255\255\255\109\001\
\255\255\255\255\255\255\255\255\255\255\255\255\255\255\255\255\
\255\255\255\255\255\255\255\255\255\255\255\255\124\001\255\255\
\126\001\255\255\128\001\255\255\255\255\255\255\132\001\255\255\
\255\255\255\255\255\255\137\001\255\255\139\001\140\001\255\255\
\255\255\143\001\255\255\255\255\255\255\255\255\255\255\255\255\
\255\255\151\001\255\255\153\001\255\255\255\255\255\255\157\001\
\158\001\159\001\160\001\161\001\012\001\255\255\164\001\255\255\
\016\001\255\255\168\001\019\001\255\255\021\001\255\255\255\255\
\255\255\255\255\255\255\255\255\255\255\255\255\255\255\031\001\
\032\001\033\001\034\001\255\255\255\255\255\255\255\255\255\255\
\040\001\255\255\255\255\255\255\044\001\255\255\255\255\255\255\
\255\255\049\001\050\001\255\255\255\255\255\255\255\255\255\255\
\255\255\255\255\255\255\255\255\060\001\061\001\062\001\063\001\
\064\001\255\255\255\255\255\255\255\255\069\001\255\255\255\255\
\255\255\073\001\255\255\255\255\255\255\077\001\078\001\255\255\
\255\255\255\255\255\255\255\255\084\001\085\001\255\255\255\255\
\255\255\255\255\090\001\255\255\255\255\255\255\255\255\255\255\
\255\255\255\255\255\255\255\255\255\255\255\255\255\255\255\255\
\255\255\255\255\106\001\255\255\255\255\109\001\255\255\255\255\
\255\255\255\255\255\255\255\255\255\255\255\255\255\255\255\255\
\255\255\255\255\255\255\255\255\124\001\255\255\126\001\255\255\
\128\001\255\255\255\255\255\255\132\001\255\255\255\255\255\255\
\255\255\137\001\255\255\139\001\140\001\255\255\255\255\143\001\
\255\255\255\255\255\255\255\255\255\255\255\255\255\255\151\001\
\255\255\153\001\255\255\255\255\255\255\157\001\158\001\159\001\
\160\001\161\001\012\001\255\255\164\001\255\255\016\001\255\255\
\168\001\019\001\255\255\021\001\255\255\255\255\255\255\255\255\
\255\255\255\255\255\255\255\255\255\255\031\001\032\001\033\001\
\034\001\255\255\255\255\255\255\255\255\255\255\040\001\255\255\
\255\255\255\255\044\001\255\255\255\255\255\255\255\255\049\001\
\050\001\255\255\255\255\255\255\255\255\255\255\255\255\255\255\
\255\255\255\255\060\001\061\001\062\001\063\001\064\001\255\255\
\255\255\255\255\255\255\069\001\255\255\255\255\255\255\073\001\
\255\255\255\255\255\255\077\001\078\001\255\255\255\255\255\255\
\255\255\255\255\084\001\085\001\255\255\255\255\255\255\255\255\
\090\001\255\255\255\255\255\255\255\255\255\255\255\255\255\255\
\255\255\255\255\255\255\255\255\255\255\255\255\255\255\255\255\
\106\001\255\255\255\255\109\001\255\255\255\255\255\255\255\255\
\255\255\255\255\255\255\255\255\255\255\255\255\255\255\255\255\
\255\255\255\255\124\001\255\255\126\001\255\255\128\001\255\255\
\255\255\255\255\132\001\255\255\255\255\255\255\255\255\137\001\
\255\255\139\001\140\001\255\255\255\255\143\001\255\255\255\255\
\255\255\255\255\255\255\255\255\255\255\151\001\255\255\153\001\
\255\255\255\255\255\255\157\001\158\001\159\001\160\001\161\001\
\012\001\255\255\164\001\255\255\016\001\255\255\168\001\019\001\
\255\255\021\001\255\255\255\255\255\255\255\255\255\255\255\255\
\255\255\255\255\255\255\031\001\032\001\033\001\034\001\255\255\
\255\255\255\255\255\255\255\255\040\001\255\255\255\255\255\255\
\044\001\255\255\255\255\255\255\255\255\049\001\050\001\255\255\
\255\255\255\255\255\255\255\255\255\255\255\255\255\255\255\255\
\060\001\061\001\062\001\063\001\064\001\255\255\255\255\255\255\
\255\255\069\001\255\255\255\255\255\255\073\001\255\255\255\255\
\255\255\077\001\078\001\255\255\255\255\255\255\255\255\255\255\
\084\001\085\001\255\255\255\255\255\255\255\255\090\001\255\255\
\255\255\255\255\255\255\255\255\255\255\255\255\255\255\255\255\
\255\255\255\255\255\255\255\255\255\255\255\255\106\001\255\255\
\255\255\109\001\255\255\255\255\255\255\255\255\255\255\255\255\
\255\255\255\255\255\255\255\255\255\255\255\255\255\255\255\255\
\124\001\255\255\126\001\255\255\128\001\255\255\255\255\255\255\
\132\001\255\255\255\255\255\255\255\255\137\001\255\255\139\001\
\140\001\255\255\255\255\143\001\255\255\255\255\255\255\255\255\
\255\255\255\255\255\255\151\001\255\255\153\001\255\255\255\255\
\255\255\157\001\158\001\159\001\160\001\161\001\012\001\255\255\
\164\001\255\255\016\001\255\255\168\001\019\001\255\255\255\255\
\255\255\255\255\255\255\255\255\255\255\255\255\255\255\255\255\
\255\255\031\001\032\001\033\001\034\001\255\255\255\255\255\255\
\255\255\255\255\040\001\255\255\255\255\255\255\044\001\255\255\
\255\255\255\255\255\255\049\001\050\001\255\255\255\255\255\255\
\255\255\255\255\255\255\255\255\255\255\255\255\060\001\061\001\
\062\001\063\001\064\001\255\255\255\255\255\255\255\255\069\001\
\255\255\255\255\255\255\073\001\255\255\255\255\255\255\077\001\
\078\001\255\255\255\255\255\255\255\255\255\255\084\001\085\001\
\255\255\255\255\255\255\255\255\090\001\255\255\255\255\255\255\
\255\255\255\255\255\255\255\255\255\255\255\255\255\255\255\255\
\255\255\255\255\255\255\255\255\106\001\255\255\255\255\109\001\
\255\255\255\255\255\255\255\255\255\255\255\255\255\255\255\255\
\255\255\255\255\255\255\255\255\255\255\255\255\124\001\255\255\
\126\001\255\255\128\001\255\255\255\255\255\255\132\001\255\255\
\255\255\255\255\255\255\137\001\255\255\139\001\140\001\255\255\
\255\255\143\001\255\255\255\255\255\255\255\255\255\255\255\255\
\255\255\151\001\255\255\153\001\255\255\255\255\255\255\157\001\
\158\001\159\001\160\001\161\001\012\001\255\255\164\001\255\255\
\016\001\255\255\168\001\019\001\255\255\255\255\255\255\255\255\
\255\255\255\255\255\255\255\255\255\255\255\255\255\255\031\001\
\032\001\033\001\034\001\255\255\255\255\255\255\255\255\255\255\
\040\001\255\255\255\255\255\255\044\001\255\255\255\255\255\255\
\255\255\049\001\050\001\255\255\255\255\255\255\255\255\255\255\
\255\255\255\255\255\255\255\255\060\001\061\001\062\001\063\001\
\064\001\255\255\255\255\255\255\255\255\069\001\255\255\255\255\
\255\255\073\001\255\255\255\255\255\255\077\001\078\001\255\255\
\255\255\255\255\255\255\255\255\084\001\085\001\255\255\255\255\
\255\255\255\255\090\001\255\255\255\255\255\255\255\255\255\255\
\255\255\255\255\255\255\255\255\255\255\255\255\255\255\255\255\
\255\255\255\255\106\001\255\255\255\255\109\001\255\255\255\255\
\255\255\255\255\255\255\255\255\255\255\255\255\255\255\255\255\
\255\255\255\255\255\255\255\255\124\001\255\255\126\001\255\255\
\128\001\255\255\255\255\255\255\132\001\255\255\255\255\255\255\
\255\255\137\001\255\255\139\001\140\001\255\255\255\255\143\001\
\255\255\255\255\255\255\255\255\255\255\255\255\255\255\151\001\
\255\255\153\001\255\255\255\255\255\255\157\001\158\001\159\001\
\160\001\161\001\012\001\255\255\164\001\255\255\016\001\255\255\
\168\001\019\001\255\255\255\255\255\255\255\255\255\255\255\255\
\255\255\255\255\255\255\255\255\255\255\031\001\032\001\033\001\
\034\001\255\255\255\255\255\255\255\255\255\255\040\001\255\255\
\255\255\255\255\044\001\255\255\255\255\255\255\255\255\049\001\
\050\001\255\255\255\255\255\255\255\255\255\255\255\255\255\255\
\255\255\255\255\060\001\061\001\062\001\063\001\064\001\255\255\
\255\255\255\255\255\255\069\001\255\255\255\255\255\255\073\001\
\255\255\255\255\255\255\077\001\078\001\255\255\255\255\255\255\
\255\255\255\255\084\001\085\001\255\255\255\255\255\255\255\255\
\090\001\255\255\255\255\255\255\255\255\255\255\255\255\255\255\
\255\255\255\255\255\255\255\255\255\255\255\255\255\255\255\255\
\106\001\255\255\255\255\109\001\255\255\255\255\255\255\255\255\
\255\255\255\255\255\255\255\255\255\255\255\255\255\255\255\255\
\255\255\255\255\124\001\255\255\126\001\255\255\128\001\255\255\
\255\255\255\255\132\001\255\255\255\255\255\255\255\255\137\001\
\255\255\139\001\140\001\255\255\255\255\143\001\255\255\255\255\
\255\255\255\255\255\255\255\255\255\255\151\001\255\255\153\001\
\255\255\255\255\255\255\157\001\158\001\159\001\160\001\161\001\
\012\001\255\255\164\001\255\255\016\001\255\255\168\001\019\001\
\255\255\255\255\255\255\255\255\255\255\255\255\255\255\255\255\
\255\255\255\255\255\255\031\001\032\001\033\001\034\001\255\255\
\255\255\255\255\255\255\255\255\040\001\255\255\255\255\255\255\
\044\001\255\255\255\255\255\255\255\255\049\001\050\001\255\255\
\255\255\255\255\255\255\255\255\255\255\255\255\255\255\255\255\
\060\001\061\001\062\001\063\001\064\001\255\255\255\255\255\255\
\255\255\069\001\255\255\255\255\255\255\073\001\255\255\255\255\
\255\255\077\001\078\001\255\255\255\255\255\255\255\255\255\255\
\084\001\085\001\255\255\255\255\255\255\255\255\090\001\255\255\
\255\255\255\255\255\255\255\255\255\255\255\255\255\255\255\255\
\255\255\255\255\255\255\255\255\255\255\255\255\106\001\255\255\
\255\255\109\001\255\255\255\255\255\255\255\255\255\255\255\255\
\255\255\255\255\255\255\255\255\255\255\255\255\255\255\255\255\
\124\001\255\255\126\001\255\255\128\001\255\255\255\255\255\255\
\132\001\255\255\255\255\255\255\255\255\137\001\255\255\139\001\
\140\001\255\255\255\255\143\001\255\255\255\255\255\255\255\255\
\255\255\255\255\255\255\151\001\255\255\153\001\255\255\255\255\
\255\255\157\001\158\001\159\001\160\001\161\001\012\001\255\255\
\164\001\255\255\016\001\255\255\168\001\019\001\255\255\255\255\
\255\255\255\255\255\255\255\255\255\255\255\255\255\255\255\255\
\255\255\031\001\032\001\033\001\034\001\255\255\255\255\255\255\
\255\255\255\255\040\001\255\255\255\255\255\255\044\001\255\255\
\255\255\255\255\255\255\049\001\050\001\255\255\255\255\255\255\
\255\255\255\255\255\255\255\255\255\255\255\255\060\001\061\001\
\062\001\063\001\064\001\255\255\255\255\255\255\255\255\069\001\
\255\255\255\255\255\255\073\001\255\255\255\255\255\255\077\001\
\078\001\255\255\255\255\255\255\255\255\255\255\084\001\085\001\
\255\255\255\255\255\255\255\255\090\001\255\255\255\255\255\255\
\255\255\255\255\255\255\255\255\255\255\255\255\255\255\255\255\
\255\255\255\255\255\255\255\255\106\001\255\255\255\255\109\001\
\255\255\255\255\255\255\255\255\255\255\255\255\255\255\255\255\
\255\255\255\255\255\255\255\255\255\255\255\255\124\001\255\255\
\126\001\255\255\128\001\255\255\008\001\255\255\132\001\255\255\
\012\001\255\255\255\255\137\001\016\001\139\001\140\001\019\001\
\255\255\143\001\022\001\023\001\255\255\025\001\255\255\255\255\
\028\001\151\001\255\255\153\001\255\255\255\255\255\255\157\001\
\158\001\159\001\160\001\161\001\040\001\255\255\164\001\255\255\
\044\001\255\255\168\001\255\255\255\255\255\255\050\001\255\255\
\052\001\255\255\054\001\255\255\255\255\255\255\255\255\255\255\
\060\001\061\001\062\001\063\001\064\001\255\255\255\255\255\255\
\255\255\255\255\255\255\255\255\255\255\073\001\255\255\255\255\
\255\255\077\001\078\001\255\255\255\255\255\255\255\255\255\255\
\084\001\085\001\255\255\255\255\255\255\255\255\090\001\255\255\
\255\255\255\255\255\255\255\255\255\255\255\255\098\001\099\001\
\100\001\101\001\102\001\103\001\104\001\105\001\106\001\107\001\
\108\001\109\001\110\001\255\255\255\255\255\255\255\255\255\255\
\255\255\255\255\255\255\255\255\255\255\255\255\255\255\255\255\
\124\001\255\255\255\255\255\255\128\001\255\255\255\255\255\255\
\132\001\008\001\009\001\010\001\011\001\012\001\255\255\139\001\
\140\001\016\001\255\255\143\001\019\001\255\255\255\255\255\255\
\255\255\255\255\255\255\151\001\255\255\153\001\029\001\255\255\
\255\255\157\001\158\001\159\001\160\001\161\001\255\255\255\255\
\255\255\255\255\255\255\255\255\043\001\044\001\045\001\255\255\
\047\001\255\255\049\001\050\001\255\255\255\255\255\255\255\255\
\255\255\255\255\255\255\255\255\255\255\060\001\061\001\062\001\
\063\001\064\001\255\255\255\255\255\255\255\255\069\001\070\001\
\255\255\072\001\073\001\255\255\255\255\076\001\077\001\078\001\
\255\255\255\255\255\255\255\255\255\255\084\001\085\001\255\255\
\255\255\088\001\255\255\090\001\255\255\255\255\255\255\255\255\
\255\255\255\255\255\255\255\255\255\255\255\255\255\255\255\255\
\255\255\255\255\255\255\106\001\255\255\255\255\109\001\255\255\
\255\255\255\255\255\255\255\255\255\255\255\255\255\255\255\255\
\255\255\255\255\255\255\122\001\255\255\124\001\255\255\255\255\
\255\255\128\001\255\255\255\255\255\255\132\001\008\001\009\001\
\010\001\011\001\012\001\013\001\139\001\140\001\016\001\255\255\
\143\001\019\001\255\255\255\255\255\255\255\255\149\001\255\255\
\151\001\255\255\153\001\029\001\255\255\255\255\157\001\158\001\
\159\001\160\001\161\001\255\255\255\255\255\255\255\255\255\255\
\255\255\043\001\044\001\045\001\255\255\047\001\255\255\049\001\
\050\001\255\255\255\255\255\255\255\255\255\255\255\255\255\255\
\255\255\255\255\060\001\061\001\062\001\063\001\064\001\255\255\
\255\255\255\255\255\255\069\001\070\001\255\255\255\255\073\001\
\255\255\255\255\076\001\077\001\078\001\255\255\255\255\255\255\
\255\255\255\255\084\001\085\001\255\255\255\255\088\001\255\255\
\090\001\255\255\255\255\255\255\255\255\255\255\255\255\255\255\
\255\255\255\255\255\255\255\255\255\255\255\255\255\255\255\255\
\106\001\255\255\255\255\109\001\255\255\255\255\255\255\255\255\
\255\255\255\255\255\255\255\255\255\255\255\255\255\255\255\255\
\122\001\255\255\124\001\255\255\255\255\255\255\128\001\255\255\
\255\255\255\255\132\001\008\001\009\001\010\001\011\001\012\001\
\255\255\139\001\140\001\016\001\255\255\143\001\019\001\255\255\
\255\255\255\255\255\255\149\001\255\255\151\001\255\255\153\001\
\029\001\255\255\255\255\157\001\158\001\159\001\160\001\161\001\
\255\255\255\255\255\255\255\255\255\255\255\255\043\001\044\001\
\045\001\255\255\047\001\255\255\049\001\050\001\255\255\255\255\
\255\255\255\255\255\255\255\255\255\255\255\255\255\255\060\001\
\061\001\062\001\063\001\064\001\255\255\255\255\255\255\255\255\
\069\001\070\001\255\255\255\255\073\001\255\255\255\255\076\001\
\077\001\078\001\255\255\255\255\255\255\255\255\255\255\084\001\
\085\001\255\255\255\255\088\001\255\255\090\001\255\255\255\255\
\255\255\255\255\255\255\255\255\255\255\255\255\255\255\255\255\
\255\255\255\255\255\255\255\255\255\255\106\001\255\255\255\255\
\109\001\255\255\255\255\255\255\255\255\255\255\255\255\255\255\
\255\255\255\255\255\255\255\255\255\255\122\001\255\255\124\001\
\255\255\255\255\255\255\128\001\255\255\255\255\255\255\132\001\
\008\001\009\001\010\001\011\001\012\001\255\255\139\001\140\001\
\016\001\255\255\143\001\019\001\255\255\255\255\255\255\255\255\
\149\001\255\255\151\001\255\255\153\001\029\001\255\255\255\255\
\157\001\158\001\159\001\160\001\161\001\255\255\255\255\255\255\
\255\255\255\255\255\255\255\255\044\001\255\255\255\255\047\001\
\255\255\049\001\050\001\255\255\255\255\255\255\255\255\255\255\
\255\255\255\255\255\255\255\255\060\001\061\001\062\001\063\001\
\064\001\255\255\255\255\255\255\255\255\069\001\070\001\255\255\
\255\255\073\001\255\255\255\255\076\001\077\001\078\001\255\255\
\255\255\255\255\255\255\255\255\084\001\085\001\255\255\255\255\
\088\001\255\255\090\001\255\255\255\255\255\255\255\255\255\255\
\255\255\255\255\255\255\255\255\255\255\255\255\255\255\255\255\
\255\255\255\255\106\001\255\255\255\255\109\001\255\255\255\255\
\255\255\255\255\255\255\255\255\255\255\255\255\255\255\255\255\
\255\255\255\255\122\001\255\255\124\001\255\255\255\255\255\255\
\128\001\255\255\255\255\255\255\132\001\008\001\009\001\010\001\
\011\001\012\001\255\255\139\001\140\001\016\001\255\255\143\001\
\019\001\255\255\255\255\255\255\255\255\149\001\255\255\151\001\
\255\255\153\001\029\001\255\255\255\255\157\001\158\001\159\001\
\160\001\161\001\255\255\255\255\255\255\255\255\255\255\255\255\
\255\255\044\001\255\255\255\255\255\255\255\255\049\001\050\001\
\255\255\255\255\255\255\255\255\255\255\255\255\255\255\255\255\
\255\255\060\001\061\001\062\001\063\001\064\001\255\255\255\255\
\255\255\255\255\069\001\070\001\255\255\255\255\073\001\255\255\
\255\255\076\001\077\001\078\001\255\255\255\255\255\255\255\255\
\255\255\084\001\085\001\255\255\255\255\088\001\255\255\090\001\
\255\255\255\255\255\255\255\255\255\255\255\255\255\255\255\255\
\255\255\255\255\255\255\255\255\255\255\255\255\255\255\106\001\
\255\255\255\255\109\001\255\255\255\255\255\255\255\255\255\255\
\255\255\255\255\255\255\255\255\255\255\255\255\255\255\122\001\
\255\255\124\001\255\255\255\255\255\255\128\001\255\255\255\255\
\255\255\132\001\008\001\009\001\010\001\011\001\012\001\255\255\
\139\001\140\001\016\001\255\255\143\001\019\001\255\255\255\255\
\255\255\255\255\149\001\255\255\151\001\255\255\153\001\029\001\
\255\255\255\255\157\001\158\001\159\001\160\001\161\001\255\255\
\255\255\255\255\255\255\255\255\255\255\255\255\044\001\255\255\
\255\255\255\255\255\255\049\001\050\001\255\255\255\255\255\255\
\255\255\255\255\255\255\255\255\255\255\255\255\060\001\061\001\
\062\001\063\001\064\001\255\255\255\255\255\255\255\255\069\001\
\070\001\255\255\255\255\073\001\255\255\255\255\076\001\077\001\
\078\001\255\255\255\255\255\255\255\255\255\255\084\001\085\001\
\255\255\255\255\088\001\255\255\090\001\255\255\255\255\255\255\
\255\255\255\255\255\255\255\255\255\255\255\255\255\255\255\255\
\255\255\255\255\255\255\255\255\106\001\255\255\255\255\109\001\
\255\255\255\255\255\255\255\255\255\255\255\255\255\255\255\255\
\255\255\255\255\255\255\255\255\122\001\255\255\124\001\255\255\
\255\255\255\255\128\001\255\255\255\255\255\255\132\001\008\001\
\009\001\010\001\011\001\012\001\255\255\139\001\140\001\016\001\
\255\255\143\001\019\001\255\255\255\255\255\255\255\255\149\001\
\255\255\151\001\255\255\153\001\029\001\255\255\255\255\157\001\
\158\001\159\001\160\001\161\001\255\255\255\255\255\255\255\255\
\255\255\255\255\255\255\044\001\255\255\255\255\255\255\255\255\
\049\001\050\001\255\255\255\255\255\255\255\255\255\255\255\255\
\255\255\255\255\255\255\060\001\061\001\062\001\063\001\064\001\
\255\255\255\255\255\255\255\255\069\001\070\001\255\255\255\255\
\073\001\255\255\255\255\076\001\077\001\078\001\255\255\255\255\
\255\255\255\255\255\255\084\001\085\001\255\255\255\255\088\001\
\255\255\090\001\255\255\255\255\255\255\255\255\255\255\255\255\
\255\255\255\255\255\255\255\255\255\255\255\255\255\255\255\255\
\255\255\106\001\255\255\255\255\109\001\008\001\009\001\010\001\
\011\001\012\001\255\255\255\255\255\255\016\001\255\255\255\255\
\019\001\122\001\255\255\124\001\255\255\255\255\255\255\128\001\
\255\255\255\255\255\255\132\001\255\255\255\255\255\255\255\255\
\255\255\255\255\139\001\140\001\255\255\255\255\143\001\255\255\
\043\001\044\001\045\001\255\255\149\001\255\255\151\001\050\001\
\153\001\255\255\255\255\255\255\157\001\158\001\159\001\160\001\
\161\001\060\001\061\001\062\001\063\001\064\001\255\255\255\255\
\255\255\255\255\069\001\255\255\255\255\255\255\073\001\255\255\
\255\255\255\255\077\001\078\001\255\255\255\255\255\255\255\255\
\255\255\084\001\085\001\255\255\255\255\088\001\255\255\090\001\
\255\255\255\255\255\255\255\255\255\255\255\255\255\255\255\255\
\255\255\255\255\255\255\255\255\255\255\255\255\255\255\106\001\
\255\255\255\255\109\001\008\001\009\001\010\001\011\001\012\001\
\255\255\255\255\255\255\016\001\255\255\255\255\019\001\122\001\
\255\255\124\001\255\255\255\255\255\255\128\001\255\255\255\255\
\255\255\132\001\255\255\255\255\255\255\255\255\255\255\255\255\
\139\001\140\001\255\255\255\255\143\001\255\255\043\001\044\001\
\045\001\255\255\149\001\255\255\151\001\050\001\153\001\255\255\
\255\255\255\255\157\001\158\001\159\001\160\001\161\001\060\001\
\061\001\062\001\063\001\064\001\255\255\255\255\255\255\255\255\
\069\001\255\255\255\255\255\255\073\001\255\255\255\255\255\255\
\077\001\078\001\255\255\255\255\255\255\255\255\255\255\084\001\
\085\001\255\255\255\255\088\001\255\255\090\001\255\255\255\255\
\255\255\255\255\255\255\255\255\255\255\255\255\255\255\255\255\
\255\255\255\255\255\255\255\255\255\255\106\001\255\255\255\255\
\109\001\008\001\009\001\010\001\011\001\012\001\255\255\255\255\
\255\255\016\001\255\255\255\255\019\001\122\001\255\255\124\001\
\255\255\255\255\255\255\128\001\255\255\255\255\255\255\132\001\
\255\255\255\255\255\255\255\255\255\255\255\255\139\001\140\001\
\255\255\255\255\143\001\255\255\255\255\044\001\255\255\255\255\
\149\001\255\255\151\001\050\001\153\001\255\255\255\255\255\255\
\157\001\158\001\159\001\160\001\161\001\060\001\061\001\062\001\
\063\001\064\001\255\255\255\255\255\255\255\255\069\001\255\255\
\255\255\255\255\073\001\255\255\255\255\076\001\077\001\078\001\
\255\255\255\255\255\255\255\255\255\255\084\001\085\001\255\255\
\255\255\088\001\255\255\090\001\255\255\255\255\255\255\255\255\
\255\255\255\255\255\255\255\255\255\255\255\255\255\255\255\255\
\255\255\255\255\255\255\106\001\255\255\255\255\109\001\008\001\
\009\001\010\001\011\001\012\001\255\255\255\255\255\255\016\001\
\255\255\255\255\019\001\122\001\255\255\124\001\255\255\255\255\
\255\255\128\001\255\255\255\255\255\255\132\001\255\255\255\255\
\255\255\255\255\255\255\255\255\139\001\140\001\255\255\255\255\
\143\001\255\255\255\255\044\001\255\255\255\255\149\001\255\255\
\151\001\050\001\153\001\255\255\255\255\255\255\157\001\158\001\
\159\001\160\001\161\001\060\001\061\001\062\001\063\001\064\001\
\255\255\255\255\255\255\255\255\069\001\255\255\255\255\255\255\
\073\001\255\255\255\255\076\001\077\001\078\001\255\255\255\255\
\255\255\255\255\255\255\084\001\085\001\255\255\255\255\088\001\
\255\255\090\001\255\255\255\255\255\255\255\255\255\255\255\255\
\255\255\255\255\255\255\255\255\255\255\255\255\255\255\255\255\
\255\255\106\001\255\255\255\255\109\001\008\001\009\001\010\001\
\011\001\012\001\255\255\255\255\255\255\016\001\255\255\255\255\
\019\001\122\001\255\255\124\001\255\255\255\255\255\255\128\001\
\255\255\255\255\255\255\132\001\255\255\255\255\255\255\255\255\
\255\255\255\255\139\001\140\001\255\255\255\255\143\001\255\255\
\255\255\044\001\255\255\255\255\149\001\255\255\151\001\050\001\
\153\001\255\255\255\255\255\255\157\001\158\001\159\001\160\001\
\161\001\060\001\061\001\062\001\063\001\064\001\255\255\255\255\
\255\255\255\255\069\001\255\255\255\255\255\255\073\001\255\255\
\255\255\255\255\077\001\078\001\255\255\255\255\255\255\255\255\
\255\255\084\001\085\001\255\255\255\255\088\001\255\255\090\001\
\255\255\255\255\255\255\255\255\255\255\255\255\255\255\255\255\
\255\255\255\255\255\255\255\255\255\255\255\255\255\255\106\001\
\255\255\255\255\109\001\008\001\009\001\010\001\011\001\012\001\
\255\255\255\255\255\255\016\001\255\255\255\255\019\001\122\001\
\255\255\124\001\255\255\255\255\255\255\128\001\255\255\255\255\
\255\255\132\001\255\255\255\255\255\255\255\255\255\255\255\255\
\139\001\140\001\255\255\255\255\143\001\255\255\255\255\044\001\
\255\255\255\255\149\001\255\255\151\001\050\001\153\001\255\255\
\255\255\255\255\157\001\158\001\159\001\160\001\161\001\060\001\
\061\001\062\001\063\001\064\001\255\255\255\255\255\255\255\255\
\069\001\255\255\255\255\255\255\073\001\255\255\255\255\255\255\
\077\001\078\001\255\255\255\255\255\255\255\255\255\255\084\001\
\085\001\255\255\255\255\088\001\255\255\090\001\255\255\255\255\
\255\255\255\255\255\255\255\255\255\255\255\255\255\255\255\255\
\255\255\255\255\255\255\255\255\255\255\106\001\255\255\255\255\
\109\001\008\001\009\001\010\001\011\001\012\001\255\255\255\255\
\255\255\016\001\255\255\255\255\019\001\122\001\255\255\124\001\
\255\255\255\255\255\255\128\001\255\255\255\255\255\255\132\001\
\255\255\255\255\255\255\255\255\255\255\255\255\139\001\140\001\
\255\255\255\255\143\001\255\255\255\255\044\001\255\255\255\255\
\149\001\255\255\151\001\050\001\153\001\255\255\255\255\255\255\
\157\001\158\001\159\001\160\001\161\001\060\001\061\001\062\001\
\063\001\064\001\255\255\255\255\255\255\255\255\069\001\255\255\
\255\255\255\255\073\001\255\255\255\255\255\255\077\001\078\001\
\255\255\255\255\255\255\255\255\255\255\084\001\085\001\255\255\
\255\255\088\001\255\255\090\001\255\255\255\255\255\255\255\255\
\255\255\255\255\255\255\255\255\255\255\255\255\255\255\255\255\
\255\255\255\255\255\255\106\001\255\255\255\255\109\001\008\001\
\009\001\010\001\011\001\012\001\255\255\255\255\255\255\016\001\
\255\255\255\255\019\001\122\001\255\255\124\001\255\255\255\255\
\255\255\128\001\255\255\255\255\255\255\132\001\255\255\255\255\
\255\255\255\255\255\255\255\255\139\001\140\001\255\255\255\255\
\143\001\255\255\255\255\044\001\255\255\255\255\149\001\255\255\
\151\001\050\001\153\001\255\255\255\255\255\255\157\001\158\001\
\159\001\160\001\161\001\060\001\061\001\062\001\063\001\064\001\
\255\255\255\255\255\255\255\255\069\001\255\255\255\255\255\255\
\255\255\255\255\255\255\076\001\077\001\078\001\255\255\255\255\
\255\255\255\255\255\255\084\001\085\001\255\255\255\255\088\001\
\255\255\090\001\255\255\255\255\255\255\255\255\255\255\255\255\
\255\255\255\255\255\255\255\255\255\255\255\255\255\255\255\255\
\255\255\106\001\255\255\255\255\109\001\255\255\255\255\255\255\
\255\255\255\255\255\255\255\255\255\255\255\255\255\255\255\255\
\255\255\122\001\255\255\124\001\255\255\255\255\255\255\128\001\
\255\255\012\001\255\255\132\001\255\255\016\001\255\255\255\255\
\019\001\255\255\139\001\140\001\255\255\255\255\143\001\255\255\
\255\255\255\255\255\255\255\255\149\001\255\255\151\001\255\255\
\153\001\255\255\255\255\255\255\157\001\158\001\159\001\160\001\
\161\001\044\001\255\255\255\255\255\255\255\255\049\001\050\001\
\255\255\255\255\255\255\255\255\255\255\255\255\255\255\255\255\
\255\255\060\001\061\001\062\001\063\001\064\001\255\255\255\255\
\255\255\255\255\069\001\255\255\255\255\255\255\073\001\255\255\
\255\255\255\255\077\001\078\001\255\255\255\255\255\255\255\255\
\255\255\084\001\085\001\255\255\255\255\255\255\255\255\090\001\
\255\255\255\255\255\255\255\255\255\255\255\255\255\255\255\255\
\255\255\255\255\255\255\255\255\255\255\255\255\255\255\106\001\
\255\255\255\255\109\001\255\255\255\255\255\255\255\255\255\255\
\255\255\255\255\255\255\012\001\255\255\255\255\255\255\016\001\
\255\255\124\001\019\001\255\255\255\255\128\001\255\255\255\255\
\255\255\132\001\255\255\255\255\255\255\255\255\255\255\255\255\
\139\001\140\001\255\255\255\255\143\001\255\255\255\255\255\255\
\255\255\255\255\255\255\044\001\151\001\255\255\153\001\255\255\
\049\001\050\001\157\001\158\001\159\001\160\001\161\001\255\255\
\255\255\164\001\255\255\060\001\061\001\062\001\063\001\064\001\
\255\255\255\255\255\255\255\255\255\255\255\255\255\255\255\255\
\073\001\255\255\255\255\255\255\077\001\078\001\255\255\255\255\
\255\255\255\255\255\255\084\001\085\001\255\255\255\255\255\255\
\255\255\090\001\255\255\255\255\255\255\255\255\255\255\255\255\
\255\255\255\255\255\255\255\255\255\255\255\255\255\255\255\255\
\255\255\106\001\255\255\255\255\109\001\255\255\255\255\255\255\
\255\255\255\255\255\255\255\255\011\001\012\001\255\255\255\255\
\255\255\016\001\255\255\124\001\019\001\255\255\255\255\128\001\
\255\255\255\255\255\255\132\001\255\255\255\255\029\001\255\255\
\255\255\255\255\139\001\140\001\255\255\255\255\143\001\255\255\
\255\255\255\255\255\255\255\255\255\255\044\001\151\001\255\255\
\153\001\255\255\049\001\050\001\157\001\158\001\159\001\160\001\
\161\001\255\255\255\255\164\001\255\255\060\001\061\001\062\001\
\063\001\064\001\255\255\255\255\255\255\255\255\069\001\070\001\
\255\255\255\255\073\001\255\255\255\255\076\001\077\001\078\001\
\255\255\255\255\255\255\255\255\255\255\084\001\085\001\255\255\
\255\255\255\255\255\255\090\001\255\255\255\255\255\255\255\255\
\255\255\255\255\255\255\255\255\255\255\255\255\255\255\255\255\
\255\255\255\255\255\255\106\001\255\255\255\255\109\001\255\255\
\255\255\255\255\011\001\012\001\255\255\255\255\255\255\016\001\
\255\255\255\255\019\001\255\255\255\255\124\001\255\255\255\255\
\255\255\128\001\255\255\255\255\255\255\132\001\255\255\255\255\
\255\255\255\255\255\255\255\255\139\001\140\001\255\255\255\255\
\143\001\255\255\255\255\044\001\255\255\255\255\149\001\255\255\
\151\001\050\001\153\001\255\255\255\255\255\255\157\001\158\001\
\159\001\160\001\161\001\060\001\061\001\062\001\063\001\064\001\
\255\255\255\255\255\255\255\255\069\001\255\255\255\255\255\255\
\073\001\255\255\255\255\076\001\077\001\078\001\255\255\255\255\
\255\255\255\255\255\255\084\001\085\001\255\255\255\255\255\255\
\255\255\090\001\255\255\255\255\255\255\255\255\255\255\255\255\
\255\255\255\255\255\255\255\255\255\255\255\255\255\255\255\255\
\255\255\106\001\255\255\255\255\109\001\255\255\255\255\255\255\
\011\001\012\001\255\255\255\255\255\255\016\001\255\255\255\255\
\019\001\255\255\255\255\124\001\255\255\255\255\255\255\128\001\
\255\255\255\255\255\255\132\001\255\255\255\255\255\255\255\255\
\255\255\255\255\139\001\140\001\255\255\255\255\143\001\255\255\
\255\255\044\001\255\255\255\255\149\001\255\255\151\001\050\001\
\153\001\255\255\255\255\255\255\157\001\158\001\159\001\160\001\
\161\001\060\001\061\001\062\001\063\001\064\001\255\255\255\255\
\255\255\255\255\069\001\255\255\255\255\255\255\073\001\255\255\
\255\255\255\255\077\001\078\001\255\255\255\255\255\255\255\255\
\255\255\084\001\085\001\255\255\255\255\255\255\255\255\090\001\
\255\255\255\255\255\255\255\255\255\255\255\255\255\255\255\255\
\255\255\255\255\255\255\255\255\255\255\255\255\255\255\106\001\
\255\255\255\255\109\001\255\255\255\255\255\255\011\001\012\001\
\255\255\255\255\255\255\016\001\255\255\255\255\019\001\255\255\
\255\255\124\001\255\255\255\255\255\255\128\001\255\255\255\255\
\255\255\132\001\255\255\255\255\255\255\255\255\255\255\255\255\
\139\001\140\001\255\255\255\255\143\001\255\255\255\255\044\001\
\255\255\255\255\149\001\255\255\151\001\050\001\153\001\255\255\
\255\255\255\255\157\001\158\001\159\001\160\001\161\001\060\001\
\061\001\062\001\063\001\064\001\255\255\255\255\255\255\255\255\
\069\001\255\255\255\255\255\255\073\001\255\255\255\255\255\255\
\077\001\078\001\255\255\255\255\255\255\255\255\255\255\084\001\
\085\001\255\255\255\255\255\255\255\255\090\001\255\255\255\255\
\255\255\255\255\255\255\255\255\255\255\255\255\255\255\255\255\
\255\255\255\255\255\255\255\255\255\255\106\001\255\255\255\255\
\109\001\255\255\255\255\255\255\255\255\255\255\255\255\255\255\
\011\001\255\255\013\001\255\255\255\255\255\255\255\255\124\001\
\019\001\255\255\255\255\128\001\255\255\255\255\255\255\132\001\
\255\255\255\255\029\001\255\255\255\255\255\255\139\001\140\001\
\255\255\255\255\143\001\255\255\255\255\255\255\255\255\255\255\
\149\001\044\001\151\001\255\255\153\001\255\255\049\001\050\001\
\157\001\158\001\159\001\160\001\161\001\255\255\255\255\255\255\
\255\255\060\001\061\001\062\001\063\001\064\001\255\255\255\255\
\255\255\255\255\069\001\255\255\255\255\255\255\073\001\255\255\
\255\255\076\001\255\255\078\001\255\255\255\255\255\255\255\255\
\255\255\084\001\085\001\255\255\255\255\255\255\255\255\090\001\
\255\255\255\255\255\255\255\255\255\255\255\255\255\255\255\255\
\255\255\255\255\255\255\255\255\255\255\255\255\255\255\255\255\
\255\255\255\255\255\255\255\255\255\255\255\255\255\255\255\255\
\255\255\255\255\011\001\255\255\255\255\255\255\255\255\255\255\
\255\255\124\001\019\001\255\255\255\255\128\001\255\255\255\255\
\255\255\132\001\255\255\255\255\029\001\255\255\255\255\255\255\
\139\001\140\001\255\255\255\255\143\001\255\255\255\255\255\255\
\255\255\255\255\255\255\044\001\151\001\255\255\153\001\255\255\
\049\001\050\001\157\001\158\001\159\001\160\001\161\001\255\255\
\255\255\255\255\255\255\060\001\061\001\062\001\063\001\064\001\
\255\255\255\255\255\255\255\255\069\001\255\255\255\255\255\255\
\073\001\255\255\255\255\076\001\255\255\078\001\255\255\255\255\
\255\255\255\255\255\255\084\001\085\001\255\255\255\255\255\255\
\255\255\090\001\255\255\255\255\255\255\255\255\255\255\255\255\
\255\255\255\255\255\255\255\255\255\255\255\255\255\255\255\255\
\255\255\255\255\255\255\255\255\255\255\255\255\255\255\255\255\
\255\255\255\255\255\255\255\255\011\001\255\255\255\255\255\255\
\255\255\255\255\255\255\124\001\019\001\255\255\255\255\128\001\
\255\255\255\255\255\255\132\001\255\255\255\255\029\001\136\001\
\255\255\255\255\139\001\140\001\255\255\255\255\143\001\255\255\
\255\255\255\255\255\255\255\255\255\255\044\001\151\001\255\255\
\153\001\255\255\049\001\050\001\157\001\158\001\159\001\160\001\
\161\001\255\255\255\255\255\255\255\255\060\001\061\001\062\001\
\063\001\064\001\255\255\255\255\255\255\255\255\069\001\255\255\
\255\255\255\255\073\001\255\255\255\255\076\001\255\255\078\001\
\255\255\255\255\255\255\255\255\255\255\084\001\085\001\255\255\
\255\255\255\255\255\255\090\001\255\255\255\255\255\255\255\255\
\255\255\255\255\255\255\255\255\255\255\255\255\255\255\255\255\
\255\255\255\255\255\255\255\255\255\255\255\255\255\255\255\255\
\255\255\255\255\255\255\255\255\255\255\255\255\011\001\255\255\
\255\255\255\255\255\255\255\255\255\255\124\001\019\001\255\255\
\255\255\128\001\255\255\255\255\255\255\132\001\255\255\255\255\
\029\001\136\001\255\255\255\255\139\001\140\001\255\255\255\255\
\143\001\255\255\255\255\255\255\255\255\255\255\255\255\044\001\
\151\001\255\255\153\001\255\255\049\001\050\001\157\001\158\001\
\159\001\160\001\161\001\255\255\255\255\255\255\255\255\060\001\
\061\001\062\001\063\001\064\001\255\255\255\255\255\255\255\255\
\069\001\255\255\255\255\255\255\073\001\255\255\255\255\076\001\
\255\255\078\001\255\255\255\255\255\255\255\255\255\255\084\001\
\085\001\255\255\255\255\255\255\255\255\090\001\255\255\255\255\
\255\255\255\255\255\255\255\255\255\255\255\255\255\255\255\255\
\255\255\255\255\255\255\255\255\255\255\255\255\255\255\255\255\
\255\255\255\255\255\255\255\255\255\255\255\255\255\255\255\255\
\011\001\255\255\255\255\255\255\255\255\255\255\255\255\124\001\
\019\001\255\255\255\255\128\001\255\255\255\255\255\255\132\001\
\255\255\255\255\029\001\136\001\255\255\255\255\139\001\140\001\
\255\255\255\255\143\001\255\255\255\255\255\255\255\255\255\255\
\255\255\044\001\151\001\255\255\153\001\255\255\049\001\050\001\
\157\001\158\001\159\001\160\001\161\001\255\255\255\255\255\255\
\255\255\060\001\061\001\062\001\063\001\064\001\255\255\255\255\
\255\255\255\255\069\001\255\255\255\255\255\255\073\001\255\255\
\255\255\076\001\255\255\078\001\255\255\255\255\255\255\255\255\
\255\255\084\001\085\001\255\255\255\255\255\255\255\255\090\001\
\255\255\255\255\255\255\255\255\255\255\255\255\255\255\255\255\
\255\255\255\255\255\255\255\255\255\255\255\255\255\255\255\255\
\255\255\255\255\255\255\255\255\255\255\255\255\255\255\255\255\
\255\255\255\255\011\001\255\255\255\255\255\255\255\255\255\255\
\255\255\124\001\019\001\255\255\255\255\128\001\255\255\255\255\
\255\255\132\001\255\255\255\255\029\001\255\255\255\255\255\255\
\139\001\140\001\255\255\142\001\143\001\255\255\255\255\255\255\
\255\255\255\255\255\255\044\001\151\001\255\255\153\001\255\255\
\049\001\050\001\157\001\158\001\159\001\160\001\161\001\255\255\
\255\255\255\255\255\255\060\001\061\001\062\001\063\001\064\001\
\255\255\255\255\255\255\255\255\069\001\255\255\255\255\255\255\
\073\001\255\255\255\255\076\001\255\255\078\001\255\255\255\255\
\255\255\255\255\255\255\084\001\085\001\255\255\255\255\255\255\
\255\255\090\001\255\255\255\255\255\255\255\255\255\255\255\255\
\255\255\255\255\255\255\255\255\255\255\255\255\255\255\255\255\
\255\255\255\255\255\255\255\255\255\255\255\255\255\255\255\255\
\255\255\255\255\255\255\255\255\011\001\255\255\255\255\255\255\
\255\255\255\255\255\255\124\001\019\001\255\255\255\255\128\001\
\255\255\255\255\255\255\132\001\255\255\255\255\029\001\255\255\
\255\255\255\255\139\001\140\001\255\255\255\255\143\001\255\255\
\255\255\255\255\255\255\255\255\255\255\044\001\151\001\255\255\
\153\001\255\255\049\001\050\001\157\001\158\001\159\001\160\001\
\161\001\255\255\255\255\255\255\255\255\060\001\061\001\062\001\
\063\001\064\001\255\255\255\255\255\255\255\255\069\001\255\255\
\255\255\255\255\073\001\255\255\255\255\076\001\255\255\078\001\
\255\255\255\255\255\255\255\255\255\255\084\001\085\001\255\255\
\255\255\255\255\255\255\090\001\255\255\255\255\255\255\255\255\
\255\255\255\255\255\255\255\255\255\255\255\255\255\255\255\255\
\255\255\255\255\255\255\255\255\255\255\255\255\255\255\255\255\
\255\255\255\255\255\255\255\255\255\255\255\255\011\001\255\255\
\255\255\255\255\255\255\255\255\255\255\124\001\019\001\255\255\
\255\255\128\001\255\255\255\255\255\255\132\001\255\255\255\255\
\029\001\255\255\255\255\255\255\139\001\140\001\255\255\255\255\
\143\001\255\255\255\255\255\255\255\255\255\255\255\255\044\001\
\151\001\255\255\153\001\255\255\049\001\050\001\157\001\158\001\
\159\001\160\001\161\001\255\255\255\255\255\255\255\255\060\001\
\061\001\062\001\063\001\064\001\255\255\255\255\255\255\255\255\
\069\001\255\255\255\255\255\255\073\001\255\255\255\255\076\001\
\255\255\078\001\255\255\255\255\255\255\255\255\255\255\084\001\
\085\001\255\255\255\255\255\255\255\255\090\001\255\255\255\255\
\255\255\255\255\255\255\255\255\255\255\255\255\255\255\255\255\
\255\255\255\255\255\255\255\255\255\255\255\255\255\255\255\255\
\255\255\255\255\255\255\255\255\255\255\255\255\255\255\255\255\
\255\255\012\001\255\255\255\255\255\255\016\001\255\255\124\001\
\019\001\255\255\255\255\128\001\255\255\255\255\255\255\132\001\
\255\255\255\255\029\001\255\255\255\255\255\255\139\001\140\001\
\255\255\255\255\143\001\255\255\255\255\255\255\255\255\255\255\
\255\255\044\001\151\001\255\255\153\001\255\255\049\001\050\001\
\157\001\158\001\159\001\160\001\161\001\255\255\255\255\255\255\
\255\255\060\001\061\001\062\001\063\001\064\001\255\255\255\255\
\255\255\255\255\255\255\255\255\255\255\255\255\073\001\255\255\
\255\255\076\001\077\001\078\001\255\255\255\255\255\255\255\255\
\255\255\084\001\085\001\255\255\255\255\255\255\255\255\090\001\
\255\255\255\255\255\255\255\255\255\255\255\255\255\255\255\255\
\255\255\255\255\255\255\255\255\255\255\255\255\255\255\106\001\
\255\255\255\255\109\001\255\255\255\255\255\255\255\255\012\001\
\255\255\255\255\255\255\016\001\255\255\255\255\019\001\255\255\
\255\255\124\001\255\255\255\255\255\255\128\001\255\255\255\255\
\255\255\132\001\255\255\255\255\255\255\255\255\255\255\255\255\
\139\001\140\001\255\255\255\255\143\001\255\255\255\255\044\001\
\255\255\255\255\255\255\255\255\151\001\050\001\153\001\255\255\
\255\255\255\255\157\001\158\001\159\001\160\001\161\001\060\001\
\061\001\062\001\063\001\064\001\255\255\255\255\255\255\255\255\
\255\255\255\255\255\255\255\255\073\001\255\255\255\255\255\255\
\077\001\078\001\255\255\255\255\255\255\255\255\255\255\084\001\
\085\001\255\255\255\255\255\255\255\255\090\001\255\255\255\255\
\255\255\255\255\255\255\255\255\255\255\255\255\255\255\255\255\
\255\255\255\255\255\255\255\255\255\255\106\001\255\255\255\255\
\109\001\255\255\255\255\255\255\255\255\012\001\255\255\255\255\
\255\255\016\001\255\255\255\255\019\001\255\255\255\255\124\001\
\255\255\255\255\255\255\128\001\255\255\255\255\255\255\132\001\
\255\255\255\255\255\255\136\001\255\255\255\255\139\001\140\001\
\255\255\255\255\143\001\255\255\255\255\044\001\255\255\255\255\
\255\255\255\255\151\001\050\001\153\001\255\255\255\255\255\255\
\157\001\158\001\159\001\160\001\161\001\060\001\061\001\062\001\
\063\001\064\001\255\255\255\255\255\255\255\255\255\255\255\255\
\255\255\255\255\073\001\255\255\255\255\255\255\077\001\078\001\
\255\255\255\255\255\255\255\255\255\255\084\001\085\001\255\255\
\255\255\255\255\255\255\090\001\255\255\255\255\255\255\255\255\
\255\255\255\255\255\255\255\255\255\255\255\255\255\255\255\255\
\255\255\255\255\255\255\106\001\255\255\255\255\109\001\255\255\
\255\255\255\255\255\255\012\001\255\255\255\255\255\255\016\001\
\255\255\255\255\019\001\255\255\255\255\124\001\255\255\255\255\
\255\255\128\001\255\255\255\255\255\255\132\001\255\255\255\255\
\255\255\136\001\255\255\255\255\139\001\140\001\255\255\255\255\
\143\001\255\255\255\255\044\001\255\255\255\255\255\255\255\255\
\151\001\050\001\153\001\255\255\255\255\255\255\157\001\158\001\
\159\001\160\001\161\001\060\001\061\001\062\001\063\001\064\001\
\255\255\255\255\255\255\255\255\069\001\255\255\255\255\255\255\
\073\001\255\255\255\255\255\255\077\001\078\001\255\255\255\255\
\255\255\255\255\255\255\084\001\085\001\255\255\255\255\255\255\
\255\255\090\001\255\255\255\255\255\255\255\255\255\255\255\255\
\255\255\255\255\255\255\255\255\255\255\255\255\255\255\255\255\
\255\255\106\001\255\255\255\255\109\001\255\255\255\255\255\255\
\255\255\012\001\255\255\255\255\255\255\016\001\255\255\255\255\
\019\001\255\255\255\255\124\001\255\255\255\255\255\255\128\001\
\255\255\255\255\255\255\132\001\255\255\255\255\255\255\255\255\
\255\255\255\255\139\001\140\001\255\255\255\255\143\001\255\255\
\255\255\044\001\255\255\255\255\255\255\255\255\151\001\050\001\
\153\001\255\255\255\255\255\255\157\001\158\001\159\001\160\001\
\161\001\060\001\061\001\062\001\063\001\064\001\255\255\255\255\
\255\255\255\255\255\255\255\255\255\255\255\255\073\001\255\255\
\255\255\255\255\077\001\078\001\255\255\255\255\255\255\255\255\
\255\255\084\001\085\001\255\255\255\255\255\255\255\255\090\001\
\255\255\255\255\255\255\255\255\255\255\255\255\255\255\255\255\
\255\255\255\255\255\255\255\255\255\255\255\255\255\255\106\001\
\255\255\255\255\109\001\255\255\255\255\255\255\255\255\012\001\
\255\255\255\255\255\255\016\001\255\255\255\255\019\001\255\255\
\255\255\124\001\255\255\255\255\255\255\128\001\255\255\255\255\
\255\255\132\001\255\255\255\255\255\255\136\001\255\255\255\255\
\139\001\140\001\255\255\255\255\143\001\255\255\255\255\044\001\
\255\255\255\255\255\255\255\255\151\001\050\001\153\001\255\255\
\255\255\255\255\157\001\158\001\159\001\160\001\161\001\060\001\
\061\001\062\001\063\001\064\001\255\255\255\255\255\255\255\255\
\255\255\255\255\255\255\255\255\073\001\255\255\255\255\255\255\
\077\001\078\001\255\255\255\255\255\255\255\255\255\255\084\001\
\085\001\255\255\255\255\255\255\255\255\090\001\255\255\255\255\
\255\255\255\255\255\255\255\255\255\255\255\255\255\255\255\255\
\255\255\255\255\255\255\255\255\255\255\106\001\255\255\255\255\
\109\001\255\255\255\255\255\255\255\255\012\001\255\255\255\255\
\255\255\016\001\255\255\255\255\019\001\255\255\255\255\124\001\
\255\255\255\255\255\255\128\001\255\255\255\255\255\255\132\001\
\255\255\255\255\255\255\136\001\255\255\255\255\139\001\140\001\
\255\255\255\255\143\001\255\255\255\255\044\001\255\255\255\255\
\255\255\255\255\151\001\050\001\153\001\255\255\255\255\255\255\
\157\001\158\001\159\001\160\001\161\001\060\001\061\001\062\001\
\063\001\064\001\255\255\255\255\255\255\255\255\255\255\255\255\
\255\255\255\255\073\001\255\255\255\255\255\255\077\001\078\001\
\255\255\255\255\255\255\255\255\255\255\084\001\085\001\255\255\
\255\255\255\255\255\255\090\001\255\255\255\255\255\255\255\255\
\255\255\255\255\255\255\255\255\255\255\255\255\255\255\255\255\
\255\255\255\255\255\255\106\001\255\255\255\255\109\001\255\255\
\255\255\255\255\255\255\012\001\255\255\255\255\255\255\016\001\
\255\255\255\255\019\001\255\255\255\255\124\001\255\255\255\255\
\255\255\128\001\255\255\255\255\255\255\132\001\255\255\255\255\
\255\255\136\001\255\255\255\255\139\001\140\001\255\255\255\255\
\143\001\255\255\255\255\044\001\255\255\255\255\255\255\255\255\
\151\001\050\001\153\001\255\255\255\255\255\255\157\001\158\001\
\159\001\160\001\161\001\060\001\061\001\062\001\063\001\064\001\
\255\255\255\255\255\255\255\255\255\255\255\255\255\255\255\255\
\073\001\255\255\255\255\255\255\077\001\078\001\255\255\255\255\
\255\255\255\255\255\255\084\001\085\001\255\255\255\255\255\255\
\255\255\090\001\255\255\255\255\255\255\255\255\255\255\255\255\
\255\255\255\255\255\255\255\255\255\255\255\255\255\255\255\255\
\255\255\106\001\255\255\255\255\109\001\255\255\255\255\255\255\
\255\255\012\001\255\255\255\255\255\255\016\001\255\255\255\255\
\019\001\255\255\255\255\124\001\255\255\255\255\255\255\128\001\
\255\255\255\255\255\255\132\001\255\255\255\255\255\255\136\001\
\255\255\255\255\139\001\140\001\255\255\255\255\143\001\255\255\
\255\255\044\001\255\255\255\255\255\255\255\255\151\001\050\001\
\153\001\255\255\255\255\255\255\157\001\158\001\159\001\160\001\
\161\001\060\001\061\001\062\001\063\001\064\001\255\255\255\255\
\255\255\255\255\255\255\255\255\255\255\255\255\073\001\255\255\
\255\255\255\255\077\001\078\001\255\255\255\255\255\255\255\255\
\255\255\084\001\085\001\255\255\255\255\255\255\255\255\090\001\
\255\255\255\255\255\255\255\255\255\255\255\255\255\255\255\255\
\255\255\255\255\255\255\255\255\255\255\255\255\255\255\106\001\
\255\255\255\255\109\001\255\255\255\255\255\255\255\255\012\001\
\255\255\255\255\255\255\016\001\255\255\255\255\019\001\255\255\
\255\255\124\001\255\255\255\255\255\255\128\001\255\255\255\255\
\255\255\132\001\255\255\255\255\255\255\255\255\255\255\255\255\
\139\001\140\001\255\255\255\255\143\001\255\255\255\255\044\001\
\255\255\255\255\255\255\255\255\151\001\050\001\153\001\255\255\
\255\255\255\255\157\001\158\001\159\001\160\001\161\001\060\001\
\061\001\062\001\063\001\064\001\255\255\255\255\255\255\255\255\
\255\255\255\255\255\255\255\255\073\001\255\255\255\255\255\255\
\077\001\078\001\255\255\255\255\255\255\255\255\255\255\084\001\
\085\001\255\255\255\255\255\255\255\255\090\001\255\255\255\255\
\255\255\255\255\255\255\255\255\255\255\255\255\255\255\255\255\
\255\255\255\255\255\255\255\255\255\255\106\001\255\255\255\255\
\109\001\255\255\255\255\255\255\255\255\012\001\255\255\255\255\
\255\255\016\001\255\255\255\255\019\001\255\255\255\255\124\001\
\255\255\255\255\255\255\128\001\255\255\255\255\255\255\132\001\
\255\255\255\255\255\255\255\255\255\255\255\255\139\001\140\001\
\255\255\255\255\143\001\255\255\255\255\044\001\255\255\255\255\
\255\255\255\255\151\001\050\001\153\001\255\255\255\255\255\255\
\157\001\158\001\159\001\160\001\161\001\060\001\061\001\062\001\
\063\001\064\001\255\255\255\255\255\255\255\255\255\255\255\255\
\255\255\255\255\073\001\255\255\255\255\255\255\077\001\078\001\
\255\255\255\255\255\255\255\255\255\255\084\001\085\001\255\255\
\255\255\255\255\255\255\090\001\255\255\255\255\255\255\255\255\
\255\255\255\255\255\255\255\255\255\255\255\255\255\255\255\255\
\255\255\255\255\255\255\106\001\255\255\020\001\109\001\255\255\
\255\255\255\255\255\255\255\255\255\255\255\255\255\255\255\255\
\255\255\255\255\255\255\255\255\035\001\124\001\255\255\255\255\
\255\255\128\001\255\255\042\001\255\255\132\001\255\255\046\001\
\255\255\255\255\255\255\255\255\139\001\140\001\255\255\255\255\
\143\001\056\001\255\255\255\255\059\001\255\255\255\255\255\255\
\151\001\255\255\153\001\255\255\255\255\068\001\157\001\158\001\
\159\001\160\001\161\001\255\255\255\255\255\255\255\255\255\255\
\255\255\080\001\255\255\003\001\255\255\255\255\255\255\255\255\
\255\255\255\255\089\001\255\255\255\255\092\001\255\255\255\255\
\255\255\255\255\097\001\255\255\255\255\255\255\022\001\023\001\
\255\255\025\001\255\255\255\255\028\001\255\255\255\255\003\001\
\111\001\112\001\113\001\114\001\115\001\116\001\117\001\118\001\
\040\001\255\255\255\255\255\255\255\255\255\255\255\255\255\255\
\255\255\255\255\022\001\023\001\052\001\025\001\054\001\255\255\
\028\001\255\255\255\255\255\255\255\255\255\255\141\001\255\255\
\255\255\255\255\255\255\146\001\040\001\255\255\255\255\255\255\
\255\255\255\255\255\255\154\001\255\255\255\255\255\255\255\255\
\052\001\081\001\054\001\255\255\255\255\255\255\255\255\166\001\
\255\255\255\255\255\255\255\255\255\255\255\255\255\255\255\255\
\255\255\255\255\098\001\099\001\100\001\101\001\102\001\103\001\
\104\001\105\001\106\001\107\001\108\001\081\001\110\001\255\255\
\255\255\255\255\255\255\255\255\255\255\255\255\255\255\255\255\
\003\001\255\255\255\255\255\255\255\255\255\255\098\001\099\001\
\100\001\101\001\102\001\103\001\104\001\105\001\106\001\107\001\
\108\001\255\255\110\001\022\001\023\001\255\255\025\001\255\255\
\255\255\028\001\255\255\255\255\003\001\149\001\255\255\255\255\
\255\255\255\255\255\255\255\255\255\255\040\001\255\255\255\255\
\255\255\255\255\255\255\255\255\255\255\255\255\255\255\022\001\
\023\001\052\001\025\001\054\001\255\255\028\001\255\255\255\255\
\255\255\149\001\255\255\255\255\255\255\255\255\255\255\255\255\
\255\255\040\001\255\255\255\255\255\255\255\255\255\255\255\255\
\255\255\255\255\255\255\255\255\255\255\052\001\081\001\054\001\
\255\255\255\255\255\255\255\255\255\255\255\255\255\255\255\255\
\255\255\255\255\255\255\255\255\255\255\255\255\255\255\098\001\
\099\001\100\001\101\001\102\001\103\001\104\001\105\001\106\001\
\107\001\108\001\081\001\110\001\255\255\255\255\255\255\255\255\
\255\255\255\255\255\255\255\255\255\255\003\001\255\255\255\255\
\255\255\255\255\255\255\098\001\099\001\100\001\101\001\102\001\
\103\001\104\001\105\001\106\001\107\001\108\001\255\255\110\001\
\022\001\023\001\255\255\025\001\255\255\255\255\028\001\255\255\
\255\255\255\255\149\001\255\255\255\255\255\255\255\255\255\255\
\255\255\255\255\040\001\255\255\255\255\255\255\255\255\255\255\
\255\255\255\255\255\255\255\255\255\255\255\255\052\001\255\255\
\054\001\255\255\255\255\255\255\255\255\255\255\149\001\255\255\
\255\255\255\255\255\255\255\255\255\255\255\255\255\255\255\255\
\255\255\255\255\255\255\255\255\255\255\255\255\255\255\255\255\
\255\255\255\255\255\255\081\001\255\255\255\255\255\255\255\255\
\255\255\255\255\255\255\255\255\255\255\255\255\255\255\255\255\
\255\255\255\255\255\255\255\255\098\001\099\001\100\001\101\001\
\102\001\103\001\104\001\105\001\106\001\107\001\108\001\255\255\
\110\001\255\255\255\255\255\255\255\255\255\255\255\255\255\255\
\255\255\255\255\255\255\255\255\255\255\255\255\255\255\255\255\
\255\255\255\255\255\255\255\255\255\255\255\255\255\255\255\255\
\255\255\255\255\255\255\255\255\255\255\255\255\255\255\255\255\
\255\255\255\255\255\255\255\255\255\255\255\255\255\255\149\001"

let yynames_const = "\
  AMP\000\
  AND\000\
  AS\000\
  ASSERT\000\
  ASSUME\000\
  ATTRIBUTES\000\
  BACKTICK\000\
  BACKTICK_AT\000\
  BACKTICK_HASH\000\
  BACKTICK_PERC\000\
  BANG_LBRACE\000\
  BAR\000\
  BAR_RBRACE\000\
  BAR_RBRACK\000\
  BEGIN\000\
  BY\000\
  CALC\000\
  CLASS\000\
  COLON\000\
  COLON_COLON\000\
  COLON_EQUALS\000\
  COMMA\000\
  CONJUNCTION\000\
  DECREASES\000\
  DEFAULT\000\
  DISJUNCTION\000\
  DOLLAR\000\
  DOT\000\
  DOT_LBRACK\000\
  DOT_LBRACK_BAR\000\
  DOT_LENS_PAREN_LEFT\000\
  DOT_LPAREN\000\
  EFFECT\000\
  ELIM\000\
  ELSE\000\
  END\000\
  ENSURES\000\
  EOF\000\
  EQUALS\000\
  EQUALTYPE\000\
  EXCEPTION\000\
  EXISTS\000\
  FALSE\000\
  FORALL\000\
  FRIEND\000\
  FUN\000\
  FUNCTION\000\
  HASH\000\
  IF\000\
  IFF\000\
  IMPLIES\000\
  IN\000\
  INCLUDE\000\
  INLINE\000\
  INLINE_FOR_EXTRACTION\000\
  INSTANCE\000\
  INTRO\000\
  IRREDUCIBLE\000\
  LARROW\000\
  LAYERED_EFFECT\000\
  LBRACE\000\
  LBRACE_BAR\000\
  LBRACE_COLON_PATTERN\000\
  LBRACE_COLON_WELL_FOUNDED\000\
  LBRACK\000\
  LBRACK_AT\000\
  LBRACK_AT_AT\000\
  LBRACK_AT_AT_AT\000\
  LBRACK_BAR\000\
  LENS_PAREN_LEFT\000\
  LENS_PAREN_RIGHT\000\
  LOGIC\000\
  LONG_LEFT_ARROW\000\
  LPAREN\000\
  LPAREN_RPAREN\000\
  MATCH\000\
  MINUS\000\
  MODULE\000\
  NEW\000\
  NEW_EFFECT\000\
  NOEQUALITY\000\
  NOEXTRACT\000\
  OF\000\
  OPAQUE\000\
  OPEN\000\
  PERCENT_LBRACK\000\
  PIPE_RIGHT\000\
  POLYMONADIC_BIND\000\
  POLYMONADIC_SUBCOMP\000\
  PRAGMA_POP_OPTIONS\000\
  PRAGMA_PRINT_EFFECTS_GRAPH\000\
  PRAGMA_PUSH_OPTIONS\000\
  PRAGMA_RESET_OPTIONS\000\
  PRAGMA_RESTART_SOLVER\000\
  PRAGMA_SET_OPTIONS\000\
  PRIVATE\000\
  QMARK\000\
  QMARK_DOT\000\
  QUOTE\000\
  RANGE_OF\000\
  RARROW\000\
  RBRACE\000\
  RBRACK\000\
  REC\000\
  REFLECTABLE\000\
  REIFIABLE\000\
  REIFY\000\
  REQUIRES\000\
  RETURNS\000\
  RETURNS_EQ\000\
  RPAREN\000\
  SEMICOLON\000\
  SET_RANGE_OF\000\
  SPLICE\000\
  SQUIGGLY_RARROW\000\
  SUBKIND\000\
  SUBTYPE\000\
  SUB_EFFECT\000\
  SYNTH\000\
  THEN\000\
  TOTAL\000\
  TRUE\000\
  TRY\000\
  TYPE\000\
  TYP_APP_GREATER\000\
  TYP_APP_LESS\000\
  UNDERSCORE\000\
  UNFOLD\000\
  UNFOLDABLE\000\
  UNIV_HASH\000\
  UNOPTEQUALITY\000\
  VAL\000\
  WHEN\000\
  WITH\000\
  "

let yynames_block = "\
  AND_OP\000\
  CHAR\000\
  IDENT\000\
  IF_OP\000\
  INT\000\
  INT16\000\
  INT32\000\
  INT64\000\
  INT8\000\
  LET\000\
  LET_OP\000\
  MATCH_OP\000\
  NAME\000\
  OPINFIX0a\000\
  OPINFIX0b\000\
  OPINFIX0c\000\
  OPINFIX0d\000\
  OPINFIX1\000\
  OPINFIX2\000\
  OPINFIX3\000\
  OPINFIX4\000\
  OPPREFIX\000\
  OP_MIXFIX_ACCESS\000\
  OP_MIXFIX_ASSIGNMENT\000\
  RANGE\000\
  REAL\000\
  SEMICOLON_OP\000\
  SIZET\000\
  STRING\000\
  TILDE\000\
  TVAR\000\
  UINT16\000\
  UINT32\000\
  UINT64\000\
  UINT8\000\
  "

let yyact = [|
  (fun _ -> failwith "parser")
; (fun __caml_parser_env ->
    Obj.repr(
# 240 "parse.mly"
    (    ( None ))
# 6707 "parse.ml"
               : 'option___anonymous_0_))
; (fun __caml_parser_env ->
    let _2 = (Parsing.peek_val __caml_parser_env 0 : 'typ) in
    Obj.repr(
# 242 "parse.mly"
    (let (_1, t) = ((), _2) in
let x =                                                (t) in
    ( Some x ))
# 6716 "parse.ml"
               : 'option___anonymous_0_))
; (fun __caml_parser_env ->
    Obj.repr(
# 248 "parse.mly"
    (    ( None ))
# 6722 "parse.ml"
               : 'option___anonymous_1_))
; (fun __caml_parser_env ->
    let _2 = (Parsing.peek_val __caml_parser_env 0 : 'typ) in
    Obj.repr(
# 250 "parse.mly"
    (let (_1, t) = ((), _2) in
let x =                                                    (t) in
    ( Some x ))
# 6731 "parse.ml"
               : 'option___anonymous_1_))
; (fun __caml_parser_env ->
    Obj.repr(
# 256 "parse.mly"
    (    ( None ))
# 6737 "parse.ml"
               : 'option___anonymous_12_))
; (fun __caml_parser_env ->
    let _2 = (Parsing.peek_val __caml_parser_env 0 : 'thunk2_typ_) in
    Obj.repr(
# 258 "parse.mly"
    (let (_1, tactic) = ((), _2) in
let x =                                                                 (tactic) in
    ( Some x ))
# 6746 "parse.ml"
               : 'option___anonymous_12_))
; (fun __caml_parser_env ->
    Obj.repr(
# 264 "parse.mly"
    (    ( None ))
# 6752 "parse.ml"
               : 'option___anonymous_13_))
; (fun __caml_parser_env ->
    let _2 = (Parsing.peek_val __caml_parser_env 1 : 'noSeqTerm) in
    Obj.repr(
# 266 "parse.mly"
    (let (_1, e, _3) = ((), _2, ()) in
let x =
  let phi =                 ( {e with level=Formula} ) in
                                               (phi)
in
    ( Some x ))
# 6764 "parse.ml"
               : 'option___anonymous_13_))
; (fun __caml_parser_env ->
    Obj.repr(
# 275 "parse.mly"
    (    ( None ))
# 6770 "parse.ml"
               : 'option___anonymous_2_))
; (fun __caml_parser_env ->
    let _2 = (Parsing.peek_val __caml_parser_env 0 : FStar_Parser_AST.term) in
    Obj.repr(
# 277 "parse.mly"
    (let (_1, tm) = ((), _2) in
let x =                                                                    (tm) in
    ( Some x ))
# 6779 "parse.ml"
               : 'option___anonymous_2_))
; (fun __caml_parser_env ->
    Obj.repr(
# 283 "parse.mly"
    (    ( None ))
# 6785 "parse.ml"
               : 'option___anonymous_5_))
; (fun __caml_parser_env ->
    let _2 = (Parsing.peek_val __caml_parser_env 0 : 'thunk_atomicTerm_) in
    Obj.repr(
# 285 "parse.mly"
    (let (_1, tactic) = ((), _2) in
let x =                                                                       (tactic) in
    ( Some x ))
# 6794 "parse.ml"
               : 'option___anonymous_5_))
; (fun __caml_parser_env ->
    Obj.repr(
# 291 "parse.mly"
    (    ( None ))
# 6800 "parse.ml"
               : 'option___anonymous_6_))
; (fun __caml_parser_env ->
    let _2 = (Parsing.peek_val __caml_parser_env 0 : FStar_Ident.ident) in
    Obj.repr(
# 293 "parse.mly"
    (let (_1, i) = ((), _2) in
let x =                               (i) in
    ( Some x ))
# 6809 "parse.ml"
               : 'option___anonymous_6_))
; (fun __caml_parser_env ->
    Obj.repr(
# 299 "parse.mly"
    (    ( None ))
# 6815 "parse.ml"
               : 'option___anonymous_7_))
; (fun __caml_parser_env ->
    let _2 = (Parsing.peek_val __caml_parser_env 0 : FStar_Ident.ident) in
    Obj.repr(
# 301 "parse.mly"
    (let (_1, i) = ((), _2) in
let x =                               (i) in
    ( Some x ))
# 6824 "parse.ml"
               : 'option___anonymous_7_))
; (fun __caml_parser_env ->
    Obj.repr(
# 307 "parse.mly"
    (    ( None ))
# 6830 "parse.ml"
               : 'option___anonymous_8_))
; (fun __caml_parser_env ->
    let _2 = (Parsing.peek_val __caml_parser_env 0 : 'thunk_typ_) in
    Obj.repr(
# 309 "parse.mly"
    (let (_1, tactic) = ((), _2) in
let x =                                                                    (tactic) in
    ( Some x ))
# 6839 "parse.ml"
               : 'option___anonymous_8_))
; (fun __caml_parser_env ->
    Obj.repr(
# 315 "parse.mly"
    (    ( None ))
# 6845 "parse.ml"
               : 'option___anonymous_9_))
; (fun __caml_parser_env ->
    let _2 = (Parsing.peek_val __caml_parser_env 0 : 'thunk_typ_) in
    Obj.repr(
# 317 "parse.mly"
    (let (_1, tactic) = ((), _2) in
let x =                                                                      (tactic) in
    ( Some x ))
# 6854 "parse.ml"
               : 'option___anonymous_9_))
; (fun __caml_parser_env ->
    Obj.repr(
# 323 "parse.mly"
    (    ( None ))
# 6860 "parse.ml"
               : 'option_ascribeKind_))
; (fun __caml_parser_env ->
    let _1 = (Parsing.peek_val __caml_parser_env 0 : 'ascribeKind) in
    Obj.repr(
# 325 "parse.mly"
    (let x = _1 in
    ( Some x ))
# 6868 "parse.ml"
               : 'option_ascribeKind_))
; (fun __caml_parser_env ->
    Obj.repr(
# 330 "parse.mly"
    (    ( None ))
# 6874 "parse.ml"
               : 'option_ascribeTyp_))
; (fun __caml_parser_env ->
    let _1 = (Parsing.peek_val __caml_parser_env 0 : 'ascribeTyp) in
    Obj.repr(
# 332 "parse.mly"
    (let x = _1 in
    ( Some x ))
# 6882 "parse.ml"
               : 'option_ascribeTyp_))
; (fun __caml_parser_env ->
    Obj.repr(
# 337 "parse.mly"
    (    ( None ))
# 6888 "parse.ml"
               : 'option_constructorPayload_))
; (fun __caml_parser_env ->
    let _1 = (Parsing.peek_val __caml_parser_env 0 : 'constructorPayload) in
    Obj.repr(
# 339 "parse.mly"
    (let x = _1 in
    ( Some x ))
# 6896 "parse.ml"
               : 'option_constructorPayload_))
; (fun __caml_parser_env ->
    Obj.repr(
# 344 "parse.mly"
    (    ( None ))
# 6902 "parse.ml"
               : 'option_fsTypeArgs_))
; (fun __caml_parser_env ->
    let _1 = (Parsing.peek_val __caml_parser_env 0 : 'fsTypeArgs) in
    Obj.repr(
# 346 "parse.mly"
    (let x = _1 in
    ( Some x ))
# 6910 "parse.ml"
               : 'option_fsTypeArgs_))
; (fun __caml_parser_env ->
    Obj.repr(
# 351 "parse.mly"
    (    ( None ))
# 6916 "parse.ml"
               : 'option_match_returning_))
; (fun __caml_parser_env ->
    let _1 = (Parsing.peek_val __caml_parser_env 0 : 'match_returning) in
    Obj.repr(
# 353 "parse.mly"
    (let x = _1 in
    ( Some x ))
# 6924 "parse.ml"
               : 'option_match_returning_))
; (fun __caml_parser_env ->
    Obj.repr(
# 358 "parse.mly"
    (    ( None ))
# 6930 "parse.ml"
               : 'option_pair_hasSort_simpleTerm__))
; (fun __caml_parser_env ->
    let _1 = (Parsing.peek_val __caml_parser_env 1 : 'hasSort) in
    let _2 = (Parsing.peek_val __caml_parser_env 0 : 'simpleTerm) in
    Obj.repr(
# 360 "parse.mly"
    (let (x, y) = (_1, _2) in
let x =     ( (x, y) ) in
    ( Some x ))
# 6940 "parse.ml"
               : 'option_pair_hasSort_simpleTerm__))
; (fun __caml_parser_env ->
    Obj.repr(
# 366 "parse.mly"
    (    ( None ))
# 6946 "parse.ml"
               : 'option_string_))
; (fun __caml_parser_env ->
    let _1 = (Parsing.peek_val __caml_parser_env 0 : 'string) in
    Obj.repr(
# 368 "parse.mly"
    (let x = _1 in
    ( Some x ))
# 6954 "parse.ml"
               : 'option_string_))
; (fun __caml_parser_env ->
    Obj.repr(
# 373 "parse.mly"
    (    ( None ))
# 6960 "parse.ml"
               : 'option_term_))
; (fun __caml_parser_env ->
    let _1 = (Parsing.peek_val __caml_parser_env 0 : FStar_Parser_AST.term) in
    Obj.repr(
# 375 "parse.mly"
    (let x = _1 in
    ( Some x ))
# 6968 "parse.ml"
               : 'option_term_))
; (fun __caml_parser_env ->
    Obj.repr(
# 380 "parse.mly"
    (    ( false ))
# 6974 "parse.ml"
               : 'boption_SQUIGGLY_RARROW_))
; (fun __caml_parser_env ->
    Obj.repr(
# 382 "parse.mly"
    (let _1 = () in
    ( true ))
# 6981 "parse.ml"
               : 'boption_SQUIGGLY_RARROW_))
; (fun __caml_parser_env ->
    Obj.repr(
# 387 "parse.mly"
    (    ( [] ))
# 6987 "parse.ml"
               : 'loption_separated_nonempty_list_COMMA_appTerm__))
; (fun __caml_parser_env ->
    let _1 = (Parsing.peek_val __caml_parser_env 0 : 'separated_nonempty_list_COMMA_appTerm_) in
    Obj.repr(
# 389 "parse.mly"
    (let x = _1 in
    ( x ))
# 6995 "parse.ml"
               : 'loption_separated_nonempty_list_COMMA_appTerm__))
; (fun __caml_parser_env ->
    Obj.repr(
# 394 "parse.mly"
    (    ( [] ))
# 7001 "parse.ml"
               : 'loption_separated_nonempty_list_SEMICOLON_ident__))
; (fun __caml_parser_env ->
    let _1 = (Parsing.peek_val __caml_parser_env 0 : 'separated_nonempty_list_SEMICOLON_ident_) in
    Obj.repr(
# 396 "parse.mly"
    (let x = _1 in
    ( x ))
# 7009 "parse.ml"
               : 'loption_separated_nonempty_list_SEMICOLON_ident__))
; (fun __caml_parser_env ->
    Obj.repr(
# 401 "parse.mly"
    (    ( [] ))
# 7015 "parse.ml"
               : 'loption_separated_nonempty_list_SEMICOLON_tuplePattern__))
; (fun __caml_parser_env ->
    let _1 = (Parsing.peek_val __caml_parser_env 0 : 'separated_nonempty_list_SEMICOLON_tuplePattern_) in
    Obj.repr(
# 403 "parse.mly"
    (let x = _1 in
    ( x ))
# 7023 "parse.ml"
               : 'loption_separated_nonempty_list_SEMICOLON_tuplePattern__))
; (fun __caml_parser_env ->
    Obj.repr(
# 408 "parse.mly"
    (    ( [] ))
# 7029 "parse.ml"
               : 'list___anonymous_11_))
; (fun __caml_parser_env ->
    let _1 = (Parsing.peek_val __caml_parser_env 2 : string) in
    let _2 = (Parsing.peek_val __caml_parser_env 1 : 'letoperatorbinding) in
    let _3 = (Parsing.peek_val __caml_parser_env 0 : 'list___anonymous_11_) in
    Obj.repr(
# 410 "parse.mly"
    (let (op, b, xs) = (_1, _2, _3) in
let x =
  let op =               ( mk_ident ("and" ^ op, rhs parseState 1) ) in
                                                                             ((op, b))
in
    ( x :: xs ))
# 7043 "parse.ml"
               : 'list___anonymous_11_))
; (fun __caml_parser_env ->
    Obj.repr(
# 419 "parse.mly"
    (    ( [] ))
# 7049 "parse.ml"
               : 'list___anonymous_14_))
; (fun __caml_parser_env ->
    let _1 = (Parsing.peek_val __caml_parser_env 1 : 'argTerm) in
    let _2 = (Parsing.peek_val __caml_parser_env 0 : 'list___anonymous_14_) in
    Obj.repr(
# 421 "parse.mly"
    (let (t, xs) = (_1, _2) in
let x =                               (t) in
    ( x :: xs ))
# 7059 "parse.ml"
               : 'list___anonymous_14_))
; (fun __caml_parser_env ->
    let _2 = (Parsing.peek_val __caml_parser_env 2 : 'recordExp) in
    let _4 = (Parsing.peek_val __caml_parser_env 0 : 'list___anonymous_14_) in
    Obj.repr(
# 425 "parse.mly"
    (let (_2, t, _4, xs) = ((), _2, (), _4) in
let x =
  let h =          ( Nothing ) in
                                                                            (h, t)
in
    ( x :: xs ))
# 7072 "parse.ml"
               : 'list___anonymous_14_))
; (fun __caml_parser_env ->
    let _3 = (Parsing.peek_val __caml_parser_env 2 : 'recordExp) in
    let _5 = (Parsing.peek_val __caml_parser_env 0 : 'list___anonymous_14_) in
    Obj.repr(
# 432 "parse.mly"
    (let (_1, _2, t, _4, xs) = ((), (), _3, (), _5) in
let x =
  let h =          ( Hash ) in
                                                                            (h, t)
in
    ( x :: xs ))
# 7085 "parse.ml"
               : 'list___anonymous_14_))
; (fun __caml_parser_env ->
    Obj.repr(
# 441 "parse.mly"
    (    ( [] ))
# 7091 "parse.ml"
               : 'list___anonymous_15_))
; (fun __caml_parser_env ->
    let _2 = (Parsing.peek_val __caml_parser_env 1 : 'qlident) in
    let _3 = (Parsing.peek_val __caml_parser_env 0 : 'list___anonymous_15_) in
    Obj.repr(
# 443 "parse.mly"
    (let (_1, id, xs) = ((), _2, _3) in
let x =                                                     (id) in
    ( x :: xs ))
# 7101 "parse.ml"
               : 'list___anonymous_15_))
; (fun __caml_parser_env ->
    Obj.repr(
# 449 "parse.mly"
    (    ( [] ))
# 7107 "parse.ml"
               : 'list___anonymous_4_))
; (fun __caml_parser_env ->
    let _1 = (Parsing.peek_val __caml_parser_env 1 : 'binder) in
    let _2 = (Parsing.peek_val __caml_parser_env 0 : 'list___anonymous_4_) in
    Obj.repr(
# 451 "parse.mly"
    (let (b, xs) = (_1, _2) in
let x =                            ([b]) in
    ( x :: xs ))
# 7117 "parse.ml"
               : 'list___anonymous_4_))
; (fun __caml_parser_env ->
    let _1 = (Parsing.peek_val __caml_parser_env 1 : 'multiBinder) in
    let _2 = (Parsing.peek_val __caml_parser_env 0 : 'list___anonymous_4_) in
    Obj.repr(
# 455 "parse.mly"
    (let (bs, xs) = (_1, _2) in
let x =                                                   (bs) in
    ( x :: xs ))
# 7127 "parse.ml"
               : 'list___anonymous_4_))
; (fun __caml_parser_env ->
    Obj.repr(
# 461 "parse.mly"
    (    ( [] ))
# 7133 "parse.ml"
               : 'list_argTerm_))
; (fun __caml_parser_env ->
    let _1 = (Parsing.peek_val __caml_parser_env 1 : 'argTerm) in
    let _2 = (Parsing.peek_val __caml_parser_env 0 : 'list_argTerm_) in
    Obj.repr(
# 463 "parse.mly"
    (let (x, xs) = (_1, _2) in
    ( x :: xs ))
# 7142 "parse.ml"
               : 'list_argTerm_))
; (fun __caml_parser_env ->
    Obj.repr(
# 468 "parse.mly"
    (    ( [] ))
# 7148 "parse.ml"
               : 'list_atomicTerm_))
; (fun __caml_parser_env ->
    let _1 = (Parsing.peek_val __caml_parser_env 1 : 'atomicTerm) in
    let _2 = (Parsing.peek_val __caml_parser_env 0 : 'list_atomicTerm_) in
    Obj.repr(
# 470 "parse.mly"
    (let (x, xs) = (_1, _2) in
    ( x :: xs ))
# 7157 "parse.ml"
               : 'list_atomicTerm_))
; (fun __caml_parser_env ->
    Obj.repr(
# 475 "parse.mly"
    (    ( [] ))
# 7163 "parse.ml"
               : 'list_attr_letbinding_))
; (fun __caml_parser_env ->
    let _1 = (Parsing.peek_val __caml_parser_env 1 : 'attr_letbinding) in
    let _2 = (Parsing.peek_val __caml_parser_env 0 : 'list_attr_letbinding_) in
    Obj.repr(
# 477 "parse.mly"
    (let (x, xs) = (_1, _2) in
    ( x :: xs ))
# 7172 "parse.ml"
               : 'list_attr_letbinding_))
; (fun __caml_parser_env ->
    Obj.repr(
# 482 "parse.mly"
    (    ( [] ))
# 7178 "parse.ml"
               : 'list_calcStep_))
; (fun __caml_parser_env ->
    let _1 = (Parsing.peek_val __caml_parser_env 1 : 'calcStep) in
    let _2 = (Parsing.peek_val __caml_parser_env 0 : 'list_calcStep_) in
    Obj.repr(
# 484 "parse.mly"
    (let (x, xs) = (_1, _2) in
    ( x :: xs ))
# 7187 "parse.ml"
               : 'list_calcStep_))
; (fun __caml_parser_env ->
    Obj.repr(
# 489 "parse.mly"
    (    ( [] ))
# 7193 "parse.ml"
               : 'list_constructorDecl_))
; (fun __caml_parser_env ->
    let _1 = (Parsing.peek_val __caml_parser_env 1 : 'constructorDecl) in
    let _2 = (Parsing.peek_val __caml_parser_env 0 : 'list_constructorDecl_) in
    Obj.repr(
# 491 "parse.mly"
    (let (x, xs) = (_1, _2) in
    ( x :: xs ))
# 7202 "parse.ml"
               : 'list_constructorDecl_))
; (fun __caml_parser_env ->
    Obj.repr(
# 496 "parse.mly"
    (    ( [] ))
# 7208 "parse.ml"
               : 'list_decl_))
; (fun __caml_parser_env ->
    let _1 = (Parsing.peek_val __caml_parser_env 1 : 'decl) in
    let _2 = (Parsing.peek_val __caml_parser_env 0 : 'list_decl_) in
    Obj.repr(
# 498 "parse.mly"
    (let (x, xs) = (_1, _2) in
    ( x :: xs ))
# 7217 "parse.ml"
               : 'list_decl_))
; (fun __caml_parser_env ->
    Obj.repr(
# 503 "parse.mly"
    (    ( [] ))
# 7223 "parse.ml"
               : 'list_decoration_))
; (fun __caml_parser_env ->
    let _1 = (Parsing.peek_val __caml_parser_env 1 : 'decoration) in
    let _2 = (Parsing.peek_val __caml_parser_env 0 : 'list_decoration_) in
    Obj.repr(
# 505 "parse.mly"
    (let (x, xs) = (_1, _2) in
    ( x :: xs ))
# 7232 "parse.ml"
               : 'list_decoration_))
; (fun __caml_parser_env ->
    Obj.repr(
# 510 "parse.mly"
    (    ( [] ))
# 7238 "parse.ml"
               : 'list_multiBinder_))
; (fun __caml_parser_env ->
    let _1 = (Parsing.peek_val __caml_parser_env 1 : 'multiBinder) in
    let _2 = (Parsing.peek_val __caml_parser_env 0 : 'list_multiBinder_) in
    Obj.repr(
# 512 "parse.mly"
    (let (x, xs) = (_1, _2) in
    ( x :: xs ))
# 7247 "parse.ml"
               : 'list_multiBinder_))
; (fun __caml_parser_env ->
    let _1 = (Parsing.peek_val __caml_parser_env 0 : 'aqualifiedWithAttrs_lident_) in
    Obj.repr(
# 517 "parse.mly"
    (let x = _1 in
    ( [ x ] ))
# 7255 "parse.ml"
               : 'nonempty_list_aqualifiedWithAttrs_lident__))
; (fun __caml_parser_env ->
    let _1 = (Parsing.peek_val __caml_parser_env 1 : 'aqualifiedWithAttrs_lident_) in
    let _2 = (Parsing.peek_val __caml_parser_env 0 : 'nonempty_list_aqualifiedWithAttrs_lident__) in
    Obj.repr(
# 520 "parse.mly"
    (let (x, xs) = (_1, _2) in
    ( x :: xs ))
# 7264 "parse.ml"
               : 'nonempty_list_aqualifiedWithAttrs_lident__))
; (fun __caml_parser_env ->
    let _1 = (Parsing.peek_val __caml_parser_env 0 : 'aqualifiedWithAttrs_lidentOrUnderscore_) in
    Obj.repr(
# 525 "parse.mly"
    (let x = _1 in
    ( [ x ] ))
# 7272 "parse.ml"
               : 'nonempty_list_aqualifiedWithAttrs_lidentOrUnderscore__))
; (fun __caml_parser_env ->
    let _1 = (Parsing.peek_val __caml_parser_env 1 : 'aqualifiedWithAttrs_lidentOrUnderscore_) in
    let _2 = (Parsing.peek_val __caml_parser_env 0 : 'nonempty_list_aqualifiedWithAttrs_lidentOrUnderscore__) in
    Obj.repr(
# 528 "parse.mly"
    (let (x, xs) = (_1, _2) in
    ( x :: xs ))
# 7281 "parse.ml"
               : 'nonempty_list_aqualifiedWithAttrs_lidentOrUnderscore__))
; (fun __caml_parser_env ->
    let _1 = (Parsing.peek_val __caml_parser_env 0 : 'atomicPattern) in
    Obj.repr(
# 533 "parse.mly"
    (let x = _1 in
    ( [ x ] ))
# 7289 "parse.ml"
               : 'nonempty_list_atomicPattern_))
; (fun __caml_parser_env ->
    let _1 = (Parsing.peek_val __caml_parser_env 1 : 'atomicPattern) in
    let _2 = (Parsing.peek_val __caml_parser_env 0 : 'nonempty_list_atomicPattern_) in
    Obj.repr(
# 536 "parse.mly"
    (let (x, xs) = (_1, _2) in
    ( x :: xs ))
# 7298 "parse.ml"
               : 'nonempty_list_atomicPattern_))
; (fun __caml_parser_env ->
    let _1 = (Parsing.peek_val __caml_parser_env 0 : 'atomicTerm) in
    Obj.repr(
# 541 "parse.mly"
    (let x = _1 in
    ( [ x ] ))
# 7306 "parse.ml"
               : 'nonempty_list_atomicTerm_))
; (fun __caml_parser_env ->
    let _1 = (Parsing.peek_val __caml_parser_env 1 : 'atomicTerm) in
    let _2 = (Parsing.peek_val __caml_parser_env 0 : 'nonempty_list_atomicTerm_) in
    Obj.repr(
# 544 "parse.mly"
    (let (x, xs) = (_1, _2) in
    ( x :: xs ))
# 7315 "parse.ml"
               : 'nonempty_list_atomicTerm_))
; (fun __caml_parser_env ->
    let _1 = (Parsing.peek_val __caml_parser_env 0 : 'atomicUniverse) in
    Obj.repr(
# 549 "parse.mly"
    (let x = _1 in
    ( [ x ] ))
# 7323 "parse.ml"
               : 'nonempty_list_atomicUniverse_))
; (fun __caml_parser_env ->
    let _1 = (Parsing.peek_val __caml_parser_env 1 : 'atomicUniverse) in
    let _2 = (Parsing.peek_val __caml_parser_env 0 : 'nonempty_list_atomicUniverse_) in
    Obj.repr(
# 552 "parse.mly"
    (let (x, xs) = (_1, _2) in
    ( x :: xs ))
# 7332 "parse.ml"
               : 'nonempty_list_atomicUniverse_))
; (fun __caml_parser_env ->
    let _2 = (Parsing.peek_val __caml_parser_env 1 : FStar_Parser_AST.term) in
    Obj.repr(
# 557 "parse.mly"
    (let (_1, e, _3) = ((), _2, ()) in
let x =                              ( mk_ident (".()", rhs parseState 1), e, rhs2 parseState 1 3 ) in
    ( [ x ] ))
# 7341 "parse.ml"
               : 'nonempty_list_dotOperator_))
; (fun __caml_parser_env ->
    let _2 = (Parsing.peek_val __caml_parser_env 1 : FStar_Parser_AST.term) in
    Obj.repr(
# 561 "parse.mly"
    (let (_1, e, _3) = ((), _2, ()) in
let x =                              ( mk_ident (".[]", rhs parseState 1), e, rhs2 parseState 1 3 ) in
    ( [ x ] ))
# 7350 "parse.ml"
               : 'nonempty_list_dotOperator_))
; (fun __caml_parser_env ->
    let _2 = (Parsing.peek_val __caml_parser_env 1 : FStar_Parser_AST.term) in
    Obj.repr(
# 565 "parse.mly"
    (let (_1, e, _3) = ((), _2, ()) in
let x =                                      ( mk_ident (".[||]", rhs parseState 1), e, rhs2 parseState 1 3 ) in
    ( [ x ] ))
# 7359 "parse.ml"
               : 'nonempty_list_dotOperator_))
; (fun __caml_parser_env ->
    let _2 = (Parsing.peek_val __caml_parser_env 1 : FStar_Parser_AST.term) in
    Obj.repr(
# 569 "parse.mly"
    (let (_1, e, _3) = ((), _2, ()) in
let x =                                                 ( mk_ident (".(||)", rhs parseState 1), e, rhs2 parseState 1 3 ) in
    ( [ x ] ))
# 7368 "parse.ml"
               : 'nonempty_list_dotOperator_))
; (fun __caml_parser_env ->
    let _2 = (Parsing.peek_val __caml_parser_env 2 : FStar_Parser_AST.term) in
    let _4 = (Parsing.peek_val __caml_parser_env 0 : 'nonempty_list_dotOperator_) in
    Obj.repr(
# 573 "parse.mly"
    (let (_1, e, _3, xs) = ((), _2, (), _4) in
let x =                              ( mk_ident (".()", rhs parseState 1), e, rhs2 parseState 1 3 ) in
    ( x :: xs ))
# 7378 "parse.ml"
               : 'nonempty_list_dotOperator_))
; (fun __caml_parser_env ->
    let _2 = (Parsing.peek_val __caml_parser_env 2 : FStar_Parser_AST.term) in
    let _4 = (Parsing.peek_val __caml_parser_env 0 : 'nonempty_list_dotOperator_) in
    Obj.repr(
# 577 "parse.mly"
    (let (_1, e, _3, xs) = ((), _2, (), _4) in
let x =                              ( mk_ident (".[]", rhs parseState 1), e, rhs2 parseState 1 3 ) in
    ( x :: xs ))
# 7388 "parse.ml"
               : 'nonempty_list_dotOperator_))
; (fun __caml_parser_env ->
    let _2 = (Parsing.peek_val __caml_parser_env 2 : FStar_Parser_AST.term) in
    let _4 = (Parsing.peek_val __caml_parser_env 0 : 'nonempty_list_dotOperator_) in
    Obj.repr(
# 581 "parse.mly"
    (let (_1, e, _3, xs) = ((), _2, (), _4) in
let x =                                      ( mk_ident (".[||]", rhs parseState 1), e, rhs2 parseState 1 3 ) in
    ( x :: xs ))
# 7398 "parse.ml"
               : 'nonempty_list_dotOperator_))
; (fun __caml_parser_env ->
    let _2 = (Parsing.peek_val __caml_parser_env 2 : FStar_Parser_AST.term) in
    let _4 = (Parsing.peek_val __caml_parser_env 0 : 'nonempty_list_dotOperator_) in
    Obj.repr(
# 585 "parse.mly"
    (let (_1, e, _3, xs) = ((), _2, (), _4) in
let x =                                                 ( mk_ident (".(||)", rhs parseState 1), e, rhs2 parseState 1 3 ) in
    ( x :: xs ))
# 7408 "parse.ml"
               : 'nonempty_list_dotOperator_))
; (fun __caml_parser_env ->
    let _1 = (Parsing.peek_val __caml_parser_env 0 : 'patternOrMultibinder) in
    Obj.repr(
# 591 "parse.mly"
    (let x = _1 in
    ( [ x ] ))
# 7416 "parse.ml"
               : 'nonempty_list_patternOrMultibinder_))
; (fun __caml_parser_env ->
    let _1 = (Parsing.peek_val __caml_parser_env 1 : 'patternOrMultibinder) in
    let _2 = (Parsing.peek_val __caml_parser_env 0 : 'nonempty_list_patternOrMultibinder_) in
    Obj.repr(
# 594 "parse.mly"
    (let (x, xs) = (_1, _2) in
    ( x :: xs ))
# 7425 "parse.ml"
               : 'nonempty_list_patternOrMultibinder_))
; (fun __caml_parser_env ->
    let _1 = (Parsing.peek_val __caml_parser_env 0 : 'letbinding) in
    Obj.repr(
# 599 "parse.mly"
    (let x = _1 in
    ( [ x ] ))
# 7433 "parse.ml"
               : 'separated_nonempty_list_AND_letbinding_))
; (fun __caml_parser_env ->
    let _1 = (Parsing.peek_val __caml_parser_env 2 : 'letbinding) in
    let _3 = (Parsing.peek_val __caml_parser_env 0 : 'separated_nonempty_list_AND_letbinding_) in
    Obj.repr(
# 602 "parse.mly"
    (let (x, _2, xs) = (_1, (), _3) in
    ( x :: xs ))
# 7442 "parse.ml"
               : 'separated_nonempty_list_AND_letbinding_))
; (fun __caml_parser_env ->
    let _1 = (Parsing.peek_val __caml_parser_env 0 : 'typeDecl) in
    Obj.repr(
# 607 "parse.mly"
    (let x = _1 in
    ( [ x ] ))
# 7450 "parse.ml"
               : 'separated_nonempty_list_AND_typeDecl_))
; (fun __caml_parser_env ->
    let _1 = (Parsing.peek_val __caml_parser_env 2 : 'typeDecl) in
    let _3 = (Parsing.peek_val __caml_parser_env 0 : 'separated_nonempty_list_AND_typeDecl_) in
    Obj.repr(
# 610 "parse.mly"
    (let (x, _2, xs) = (_1, (), _3) in
    ( x :: xs ))
# 7459 "parse.ml"
               : 'separated_nonempty_list_AND_typeDecl_))
; (fun __caml_parser_env ->
    let _1 = (Parsing.peek_val __caml_parser_env 0 : 'tuplePattern) in
    Obj.repr(
# 615 "parse.mly"
    (let x = _1 in
    ( [ x ] ))
# 7467 "parse.ml"
               : 'separated_nonempty_list_BAR_tuplePattern_))
; (fun __caml_parser_env ->
    let _1 = (Parsing.peek_val __caml_parser_env 2 : 'tuplePattern) in
    let _3 = (Parsing.peek_val __caml_parser_env 0 : 'separated_nonempty_list_BAR_tuplePattern_) in
    Obj.repr(
# 618 "parse.mly"
    (let (x, _2, xs) = (_1, (), _3) in
    ( x :: xs ))
# 7476 "parse.ml"
               : 'separated_nonempty_list_BAR_tuplePattern_))
; (fun __caml_parser_env ->
    let _1 = (Parsing.peek_val __caml_parser_env 0 : 'appTerm) in
    Obj.repr(
# 623 "parse.mly"
    (let x = _1 in
    ( [ x ] ))
# 7484 "parse.ml"
               : 'separated_nonempty_list_COMMA_appTerm_))
; (fun __caml_parser_env ->
    let _1 = (Parsing.peek_val __caml_parser_env 2 : 'appTerm) in
    let _3 = (Parsing.peek_val __caml_parser_env 0 : 'separated_nonempty_list_COMMA_appTerm_) in
    Obj.repr(
# 626 "parse.mly"
    (let (x, _2, xs) = (_1, (), _3) in
    ( x :: xs ))
# 7493 "parse.ml"
               : 'separated_nonempty_list_COMMA_appTerm_))
; (fun __caml_parser_env ->
    let _1 = (Parsing.peek_val __caml_parser_env 0 : 'atomicTerm) in
    Obj.repr(
# 631 "parse.mly"
    (let x = _1 in
    ( [ x ] ))
# 7501 "parse.ml"
               : 'separated_nonempty_list_COMMA_atomicTerm_))
; (fun __caml_parser_env ->
    let _1 = (Parsing.peek_val __caml_parser_env 2 : 'atomicTerm) in
    let _3 = (Parsing.peek_val __caml_parser_env 0 : 'separated_nonempty_list_COMMA_atomicTerm_) in
    Obj.repr(
# 634 "parse.mly"
    (let (x, _2, xs) = (_1, (), _3) in
    ( x :: xs ))
# 7510 "parse.ml"
               : 'separated_nonempty_list_COMMA_atomicTerm_))
; (fun __caml_parser_env ->
    let _1 = (Parsing.peek_val __caml_parser_env 0 : 'constructorPattern) in
    Obj.repr(
# 639 "parse.mly"
    (let x = _1 in
    ( [ x ] ))
# 7518 "parse.ml"
               : 'separated_nonempty_list_COMMA_constructorPattern_))
; (fun __caml_parser_env ->
    let _1 = (Parsing.peek_val __caml_parser_env 2 : 'constructorPattern) in
    let _3 = (Parsing.peek_val __caml_parser_env 0 : 'separated_nonempty_list_COMMA_constructorPattern_) in
    Obj.repr(
# 642 "parse.mly"
    (let (x, _2, xs) = (_1, (), _3) in
    ( x :: xs ))
# 7527 "parse.ml"
               : 'separated_nonempty_list_COMMA_constructorPattern_))
; (fun __caml_parser_env ->
    let _1 = (Parsing.peek_val __caml_parser_env 0 : 'tmEq) in
    Obj.repr(
# 647 "parse.mly"
    (let x = _1 in
    ( [ x ] ))
# 7535 "parse.ml"
               : 'separated_nonempty_list_COMMA_tmEq_))
; (fun __caml_parser_env ->
    let _1 = (Parsing.peek_val __caml_parser_env 2 : 'tmEq) in
    let _3 = (Parsing.peek_val __caml_parser_env 0 : 'separated_nonempty_list_COMMA_tmEq_) in
    Obj.repr(
# 650 "parse.mly"
    (let (x, _2, xs) = (_1, (), _3) in
    ( x :: xs ))
# 7544 "parse.ml"
               : 'separated_nonempty_list_COMMA_tmEq_))
; (fun __caml_parser_env ->
    let _1 = (Parsing.peek_val __caml_parser_env 0 : 'tvar) in
    Obj.repr(
# 655 "parse.mly"
    (let x = _1 in
    ( [ x ] ))
# 7552 "parse.ml"
               : 'separated_nonempty_list_COMMA_tvar_))
; (fun __caml_parser_env ->
    let _1 = (Parsing.peek_val __caml_parser_env 2 : 'tvar) in
    let _3 = (Parsing.peek_val __caml_parser_env 0 : 'separated_nonempty_list_COMMA_tvar_) in
    Obj.repr(
# 658 "parse.mly"
    (let (x, _2, xs) = (_1, (), _3) in
    ( x :: xs ))
# 7561 "parse.ml"
               : 'separated_nonempty_list_COMMA_tvar_))
; (fun __caml_parser_env ->
    let _1 = (Parsing.peek_val __caml_parser_env 0 : 'conjunctivePat) in
    Obj.repr(
# 663 "parse.mly"
    (let x = _1 in
    ( [ x ] ))
# 7569 "parse.ml"
               : 'separated_nonempty_list_DISJUNCTION_conjunctivePat_))
; (fun __caml_parser_env ->
    let _1 = (Parsing.peek_val __caml_parser_env 2 : 'conjunctivePat) in
    let _3 = (Parsing.peek_val __caml_parser_env 0 : 'separated_nonempty_list_DISJUNCTION_conjunctivePat_) in
    Obj.repr(
# 666 "parse.mly"
    (let (x, _2, xs) = (_1, (), _3) in
    ( x :: xs ))
# 7578 "parse.ml"
               : 'separated_nonempty_list_DISJUNCTION_conjunctivePat_))
; (fun __caml_parser_env ->
    let _1 = (Parsing.peek_val __caml_parser_env 0 : 'appTerm) in
    Obj.repr(
# 671 "parse.mly"
    (let x = _1 in
    ( [ x ] ))
# 7586 "parse.ml"
               : 'separated_nonempty_list_SEMICOLON_appTerm_))
; (fun __caml_parser_env ->
    let _1 = (Parsing.peek_val __caml_parser_env 2 : 'appTerm) in
    let _3 = (Parsing.peek_val __caml_parser_env 0 : 'separated_nonempty_list_SEMICOLON_appTerm_) in
    Obj.repr(
# 674 "parse.mly"
    (let (x, _2, xs) = (_1, (), _3) in
    ( x :: xs ))
# 7595 "parse.ml"
               : 'separated_nonempty_list_SEMICOLON_appTerm_))
; (fun __caml_parser_env ->
    let _1 = (Parsing.peek_val __caml_parser_env 0 : 'effectDecl) in
    Obj.repr(
# 679 "parse.mly"
    (let x = _1 in
    ( [ x ] ))
# 7603 "parse.ml"
               : 'separated_nonempty_list_SEMICOLON_effectDecl_))
; (fun __caml_parser_env ->
    let _1 = (Parsing.peek_val __caml_parser_env 2 : 'effectDecl) in
    let _3 = (Parsing.peek_val __caml_parser_env 0 : 'separated_nonempty_list_SEMICOLON_effectDecl_) in
    Obj.repr(
# 682 "parse.mly"
    (let (x, _2, xs) = (_1, (), _3) in
    ( x :: xs ))
# 7612 "parse.ml"
               : 'separated_nonempty_list_SEMICOLON_effectDecl_))
; (fun __caml_parser_env ->
    let _1 = (Parsing.peek_val __caml_parser_env 0 : 'ident) in
    Obj.repr(
# 687 "parse.mly"
    (let x = _1 in
    ( [ x ] ))
# 7620 "parse.ml"
               : 'separated_nonempty_list_SEMICOLON_ident_))
; (fun __caml_parser_env ->
    let _1 = (Parsing.peek_val __caml_parser_env 2 : 'ident) in
    let _3 = (Parsing.peek_val __caml_parser_env 0 : 'separated_nonempty_list_SEMICOLON_ident_) in
    Obj.repr(
# 690 "parse.mly"
    (let (x, _2, xs) = (_1, (), _3) in
    ( x :: xs ))
# 7629 "parse.ml"
               : 'separated_nonempty_list_SEMICOLON_ident_))
; (fun __caml_parser_env ->
    let _1 = (Parsing.peek_val __caml_parser_env 0 : 'tuplePattern) in
    Obj.repr(
# 695 "parse.mly"
    (let x = _1 in
    ( [ x ] ))
# 7637 "parse.ml"
               : 'separated_nonempty_list_SEMICOLON_tuplePattern_))
; (fun __caml_parser_env ->
    let _1 = (Parsing.peek_val __caml_parser_env 2 : 'tuplePattern) in
    let _3 = (Parsing.peek_val __caml_parser_env 0 : 'separated_nonempty_list_SEMICOLON_tuplePattern_) in
    Obj.repr(
# 698 "parse.mly"
    (let (x, _2, xs) = (_1, (), _3) in
    ( x :: xs ))
# 7646 "parse.ml"
               : 'separated_nonempty_list_SEMICOLON_tuplePattern_))
; (fun __caml_parser_env ->
    let _1 = (Parsing.peek_val __caml_parser_env 1 : 'list_decl_) in
    Obj.repr(
# 703 "parse.mly"
    (let (decls, _2) = (_1, ()) in
      (
        as_frag decls
      ))
# 7656 "parse.ml"
               : FStar_Parser_AST.inputFragment))
; (fun __caml_parser_env ->
    Obj.repr(
# 710 "parse.mly"
    (let _1 = () in
        ( None ))
# 7663 "parse.ml"
               : FStar_Parser_AST.decl option))
; (fun __caml_parser_env ->
    let _1 = (Parsing.peek_val __caml_parser_env 0 : 'decl) in
    Obj.repr(
# 713 "parse.mly"
    (let d = _1 in
           ( Some d ))
# 7671 "parse.ml"
               : FStar_Parser_AST.decl option))
; (fun __caml_parser_env ->
    let _2 = (Parsing.peek_val __caml_parser_env 0 : 'string) in
    Obj.repr(
# 718 "parse.mly"
    (let (_1, s) = ((), _2) in
      ( SetOptions s ))
# 7679 "parse.ml"
               : 'pragma))
; (fun __caml_parser_env ->
    let _2 = (Parsing.peek_val __caml_parser_env 0 : 'option_string_) in
    Obj.repr(
# 721 "parse.mly"
    (let (_1, s_opt) = ((), _2) in
      ( ResetOptions s_opt ))
# 7687 "parse.ml"
               : 'pragma))
; (fun __caml_parser_env ->
    let _2 = (Parsing.peek_val __caml_parser_env 0 : 'option_string_) in
    Obj.repr(
# 724 "parse.mly"
    (let (_1, s_opt) = ((), _2) in
      ( PushOptions s_opt ))
# 7695 "parse.ml"
               : 'pragma))
; (fun __caml_parser_env ->
    Obj.repr(
# 727 "parse.mly"
    (let _1 = () in
      ( PopOptions ))
# 7702 "parse.ml"
               : 'pragma))
; (fun __caml_parser_env ->
    Obj.repr(
# 730 "parse.mly"
    (let _1 = () in
      ( RestartSolver ))
# 7709 "parse.ml"
               : 'pragma))
; (fun __caml_parser_env ->
    Obj.repr(
# 733 "parse.mly"
    (let _1 = () in
      ( PrintEffectsGraph ))
# 7716 "parse.ml"
               : 'pragma))
; (fun __caml_parser_env ->
    let _2 = (Parsing.peek_val __caml_parser_env 1 : 'list_atomicTerm_) in
    Obj.repr(
# 738 "parse.mly"
    (let (_1, x, _3) = ((), _2, ()) in
      (
        let _ =
            match x with
            | _::_::_ ->
                  log_issue (lhs parseState) (Warning_DeprecatedAttributeSyntax,
                                              old_attribute_syntax_warning)
            | _ -> () in
         x
      ))
# 7732 "parse.ml"
               : 'attribute))
; (fun __caml_parser_env ->
    let _2 = (Parsing.peek_val __caml_parser_env 1 : 'right_flexible_list_SEMICOLON_noSeqTerm_) in
    Obj.repr(
# 749 "parse.mly"
    (let (_1, l, _3) = ((), _2, ()) in
let x =                                                 ( l ) in
      ( x ))
# 7741 "parse.ml"
               : 'attribute))
; (fun __caml_parser_env ->
    let _1 = (Parsing.peek_val __caml_parser_env 0 : 'attribute) in
    Obj.repr(
# 755 "parse.mly"
    (let x = _1 in
      ( DeclAttributes x ))
# 7749 "parse.ml"
               : 'decoration))
; (fun __caml_parser_env ->
    let _1 = (Parsing.peek_val __caml_parser_env 0 : 'qualifier) in
    Obj.repr(
# 758 "parse.mly"
    (let x = _1 in
      ( Qualifier x ))
# 7757 "parse.ml"
               : 'decoration))
; (fun __caml_parser_env ->
    let _2 = (Parsing.peek_val __caml_parser_env 2 : 'uident) in
    let _4 = (Parsing.peek_val __caml_parser_env 0 : 'noSeqTerm) in
    Obj.repr(
# 763 "parse.mly"
    (let (_1, lid, _3, e) = ((), _2, (), _4) in
let phi =                 ( {e with level=Formula} ) in
      ( mk_decl (Assume(lid, phi)) (rhs2 parseState 1 4) [ Qualifier Assumption ] ))
# 7767 "parse.ml"
               : 'decl))
; (fun __caml_parser_env ->
    let _1 = (Parsing.peek_val __caml_parser_env 1 : 'list_decoration_) in
    let _2 = (Parsing.peek_val __caml_parser_env 0 : 'rawDecl) in
    Obj.repr(
# 767 "parse.mly"
    (let (ds, decl) = (_1, _2) in
      ( mk_decl decl (rhs parseState 2) ds ))
# 7776 "parse.ml"
               : 'decl))
; (fun __caml_parser_env ->
    let _1 = (Parsing.peek_val __caml_parser_env 1 : 'list_decoration_) in
    let _2 = (Parsing.peek_val __caml_parser_env 0 : 'typeclassDecl) in
    Obj.repr(
# 770 "parse.mly"
    (let (ds, decl) = (_1, _2) in
      ( let (decl, extra_attrs) = decl in
        let d = mk_decl decl (rhs parseState 2) ds in
        { d with attrs = extra_attrs @ d.attrs }
      ))
# 7788 "parse.ml"
               : 'decl))
; (fun __caml_parser_env ->
    let _2 = (Parsing.peek_val __caml_parser_env 0 : 'typeDecl) in
    Obj.repr(
# 778 "parse.mly"
    (let (_1, tcdef) = ((), _2) in
      (
        (* Only a single type decl allowed, but construct it the same as for multiple ones.
         * Only difference is the `true` below marking that this a class so desugaring
         * adds the needed %splice. *)
        let d = Tycon (false, true, [tcdef]) in

        (* No attrs yet, but perhaps we want a `class` attribute *)
        (d, [])
      ))
# 7804 "parse.ml"
               : 'typeclassDecl))
; (fun __caml_parser_env ->
    let _2 = (Parsing.peek_val __caml_parser_env 1 : 'letqualifier) in
    let _3 = (Parsing.peek_val __caml_parser_env 0 : 'letbinding) in
    Obj.repr(
# 789 "parse.mly"
    (let (_1, q, lb) = ((), _2, _3) in
      (
        (* Making a single letbinding *)
        let r = rhs2 parseState 1 3 in
        let lbs = focusLetBindings [lb] r in (* lbs is a singleton really *)
        let d = TopLevelLet(q, lbs) in

        (* Slapping a `tcinstance` attribute to it *)
        let at = mk_term (Var tcinstance_lid) r Type_level in

        (d, [at])
      ))
# 7823 "parse.ml"
               : 'typeclassDecl))
; (fun __caml_parser_env ->
    let _1 = (Parsing.peek_val __caml_parser_env 0 : 'pragma) in
    Obj.repr(
# 804 "parse.mly"
    (let p = _1 in
      ( Pragma p ))
# 7831 "parse.ml"
               : 'rawDecl))
; (fun __caml_parser_env ->
    let _2 = (Parsing.peek_val __caml_parser_env 0 : 'quident) in
    Obj.repr(
# 807 "parse.mly"
    (let (_1, uid) = ((), _2) in
      ( Open uid ))
# 7839 "parse.ml"
               : 'rawDecl))
; (fun __caml_parser_env ->
    let _2 = (Parsing.peek_val __caml_parser_env 0 : 'quident) in
    Obj.repr(
# 810 "parse.mly"
    (let (_1, uid) = ((), _2) in
      ( Friend uid ))
# 7847 "parse.ml"
               : 'rawDecl))
; (fun __caml_parser_env ->
    let _2 = (Parsing.peek_val __caml_parser_env 0 : 'quident) in
    Obj.repr(
# 813 "parse.mly"
    (let (_1, uid) = ((), _2) in
      ( Include uid ))
# 7855 "parse.ml"
               : 'rawDecl))
; (fun __caml_parser_env ->
    let _2 = (Parsing.peek_val __caml_parser_env 2 : 'uident) in
    let _4 = (Parsing.peek_val __caml_parser_env 0 : 'quident) in
    Obj.repr(
# 816 "parse.mly"
    (let (_1, uid1, _3, uid2) = ((), _2, (), _4) in
      ( ModuleAbbrev(uid1, uid2) ))
# 7864 "parse.ml"
               : 'rawDecl))
; (fun __caml_parser_env ->
    let _2 = (Parsing.peek_val __caml_parser_env 0 : 'qlident) in
    Obj.repr(
# 819 "parse.mly"
    (let (_1, _2) = ((), _2) in
      ( raise_error (Fatal_SyntaxError, "Syntax error: expected a module name") (rhs parseState 2) ))
# 7872 "parse.ml"
               : 'rawDecl))
; (fun __caml_parser_env ->
    let _2 = (Parsing.peek_val __caml_parser_env 0 : 'quident) in
    Obj.repr(
# 822 "parse.mly"
    (let (_1, uid) = ((), _2) in
      (  TopLevelModule uid ))
# 7880 "parse.ml"
               : 'rawDecl))
; (fun __caml_parser_env ->
    let _2 = (Parsing.peek_val __caml_parser_env 0 : 'separated_nonempty_list_AND_typeDecl_) in
    Obj.repr(
# 825 "parse.mly"
    (let (_1, tcdefs) = ((), _2) in
      ( Tycon (false, false, tcdefs) ))
# 7888 "parse.ml"
               : 'rawDecl))
; (fun __caml_parser_env ->
    let _2 = (Parsing.peek_val __caml_parser_env 3 : 'uident) in
    let _3 = (Parsing.peek_val __caml_parser_env 2 : 'typars) in
    let _5 = (Parsing.peek_val __caml_parser_env 0 : 'typ) in
    Obj.repr(
# 828 "parse.mly"
    (let (_1, uid, tparams, _4, t) = ((), _2, _3, (), _5) in
      ( Tycon(true, false, [(TyconAbbrev(uid, tparams, None, t))]) ))
# 7898 "parse.ml"
               : 'rawDecl))
; (fun __caml_parser_env ->
    let _1 = (Parsing.peek_val __caml_parser_env 2 : bool) in
    let _2 = (Parsing.peek_val __caml_parser_env 1 : 'letqualifier) in
    let _3 = (Parsing.peek_val __caml_parser_env 0 : 'separated_nonempty_list_AND_letbinding_) in
    Obj.repr(
# 831 "parse.mly"
    (let (_1, q, lbs) = (_1, _2, _3) in
      (
        let r = rhs2 parseState 1 3 in
        let lbs = focusLetBindings lbs r in
        if q <> Rec && List.length lbs <> 1
        then raise_error (Fatal_MultipleLetBinding, "Unexpected multiple let-binding (Did you forget some rec qualifier ?)") r;
        TopLevelLet(q, lbs)
      ))
# 7914 "parse.ml"
               : 'rawDecl))
; (fun __caml_parser_env ->
    let _2 = (Parsing.peek_val __caml_parser_env 0 : 'constant) in
    Obj.repr(
# 840 "parse.mly"
    (let (_1, c) = ((), _2) in
      (
        (* This is just to provide a better error than "syntax error" *)
        raise_error (Fatal_SyntaxError, "Syntax error: constants are not allowed in val declarations") (rhs2 parseState 1 2)
      ))
# 7925 "parse.ml"
               : 'rawDecl))
; (fun __caml_parser_env ->
    let _2 = (Parsing.peek_val __caml_parser_env 3 : FStar_Ident.ident) in
    let _3 = (Parsing.peek_val __caml_parser_env 2 : 'list_multiBinder_) in
    let _5 = (Parsing.peek_val __caml_parser_env 0 : 'typ) in
    Obj.repr(
# 846 "parse.mly"
    (let (_1, id, bss, _4, t) = ((), _2, _3, (), _5) in
let lid =               ( id ) in
      (
        let t = match flatten bss with
          | [] -> t
          | bs -> mk_term (Product(bs, t)) (rhs2 parseState 3 5) Type_level
        in Val(lid, t)
      ))
# 7941 "parse.ml"
               : 'rawDecl))
; (fun __caml_parser_env ->
    let _3 = (Parsing.peek_val __caml_parser_env 4 : string) in
    let _5 = (Parsing.peek_val __caml_parser_env 2 : 'list_multiBinder_) in
    let _7 = (Parsing.peek_val __caml_parser_env 0 : 'typ) in
    Obj.repr(
# 855 "parse.mly"
    (let (_1, _1_inlined1, op, _3, bss, _4, t) = ((), (), _3, (), _5, (), _7) in
let lid =
  let id =     ( mk_ident (op, rhs parseState 1) ) in
      ( mk_ident (compile_op' (string_of_id id) (range_of_id id), range_of_id id) )
in
      (
        let t = match flatten bss with
          | [] -> t
          | bs -> mk_term (Product(bs, t)) (rhs2 parseState 3 5) Type_level
        in Val(lid, t)
      ))
# 7960 "parse.ml"
               : 'rawDecl))
; (fun __caml_parser_env ->
    let _3 = (Parsing.peek_val __caml_parser_env 4 : 'binop_name) in
    let _5 = (Parsing.peek_val __caml_parser_env 2 : 'list_multiBinder_) in
    let _7 = (Parsing.peek_val __caml_parser_env 0 : 'typ) in
    Obj.repr(
# 867 "parse.mly"
    (let (_1, _1_inlined1, op, _3, bss, _4, t) = ((), (), _3, (), _5, (), _7) in
let lid =
  let id =     ( op ) in
      ( mk_ident (compile_op' (string_of_id id) (range_of_id id), range_of_id id) )
in
      (
        let t = match flatten bss with
          | [] -> t
          | bs -> mk_term (Product(bs, t)) (rhs2 parseState 3 5) Type_level
        in Val(lid, t)
      ))
# 7979 "parse.ml"
               : 'rawDecl))
; (fun __caml_parser_env ->
    let _3 = (Parsing.peek_val __caml_parser_env 4 : string) in
    let _5 = (Parsing.peek_val __caml_parser_env 2 : 'list_multiBinder_) in
    let _7 = (Parsing.peek_val __caml_parser_env 0 : 'typ) in
    Obj.repr(
# 879 "parse.mly"
    (let (_1, _1_inlined1, op, _3, bss, _4, t) = ((), (), _3, (), _5, (), _7) in
let lid =
  let id =     ( mk_ident (op, rhs parseState 1) ) in
      ( mk_ident (compile_op' (string_of_id id) (range_of_id id), range_of_id id) )
in
      (
        let t = match flatten bss with
          | [] -> t
          | bs -> mk_term (Product(bs, t)) (rhs2 parseState 3 5) Type_level
        in Val(lid, t)
      ))
# 7998 "parse.ml"
               : 'rawDecl))
; (fun __caml_parser_env ->
    let _3 = (Parsing.peek_val __caml_parser_env 4 : string) in
    let _5 = (Parsing.peek_val __caml_parser_env 2 : 'list_multiBinder_) in
    let _7 = (Parsing.peek_val __caml_parser_env 0 : 'typ) in
    Obj.repr(
# 891 "parse.mly"
    (let (_1, _1_inlined1, op, _3, bss, _4, t) = ((), (), _3, (), _5, (), _7) in
let lid =
  let id =
    let op =               ( mk_ident ("and" ^ op, rhs parseState 1) ) in
                        (op)
  in
      ( mk_ident (compile_op' (string_of_id id) (range_of_id id), range_of_id id) )
in
      (
        let t = match flatten bss with
          | [] -> t
          | bs -> mk_term (Product(bs, t)) (rhs2 parseState 3 5) Type_level
        in Val(lid, t)
      ))
# 8020 "parse.ml"
               : 'rawDecl))
; (fun __caml_parser_env ->
    let _3 = (Parsing.peek_val __caml_parser_env 4 : string) in
    let _5 = (Parsing.peek_val __caml_parser_env 2 : 'list_multiBinder_) in
    let _7 = (Parsing.peek_val __caml_parser_env 0 : 'typ) in
    Obj.repr(
# 906 "parse.mly"
    (let (_1, _1_inlined1, op, _3, bss, _4, t) = ((), (), _3, (), _5, (), _7) in
let lid =
  let id =
    let op =               ( mk_ident ("let" ^ op, rhs parseState 1) ) in
                        (op)
  in
      ( mk_ident (compile_op' (string_of_id id) (range_of_id id), range_of_id id) )
in
      (
        let t = match flatten bss with
          | [] -> t
          | bs -> mk_term (Product(bs, t)) (rhs2 parseState 3 5) Type_level
        in Val(lid, t)
      ))
# 8042 "parse.ml"
               : 'rawDecl))
; (fun __caml_parser_env ->
    let _3 = (Parsing.peek_val __caml_parser_env 2 : 'loption_separated_nonempty_list_SEMICOLON_ident__) in
    let _5 = (Parsing.peek_val __caml_parser_env 0 : 'thunk_atomicTerm_) in
    Obj.repr(
# 921 "parse.mly"
    (let (_1, _2, xs, _4, t) = ((), (), _3, (), _5) in
let ids =     ( xs ) in
      ( Splice (ids, t) ))
# 8052 "parse.ml"
               : 'rawDecl))
; (fun __caml_parser_env ->
    let _2 = (Parsing.peek_val __caml_parser_env 1 : 'uident) in
    let _3 = (Parsing.peek_val __caml_parser_env 0 : 'option___anonymous_0_) in
    Obj.repr(
# 925 "parse.mly"
    (let (_1, lid, t_opt) = ((), _2, _3) in
      ( Exception(lid, t_opt) ))
# 8061 "parse.ml"
               : 'rawDecl))
; (fun __caml_parser_env ->
    let _2 = (Parsing.peek_val __caml_parser_env 0 : 'newEffect) in
    Obj.repr(
# 928 "parse.mly"
    (let (_1, ne) = ((), _2) in
      ( NewEffect ne ))
# 8069 "parse.ml"
               : 'rawDecl))
; (fun __caml_parser_env ->
    let _2 = (Parsing.peek_val __caml_parser_env 0 : 'effectDefinition) in
    Obj.repr(
# 931 "parse.mly"
    (let (_1, ne) = ((), _2) in
      ( LayeredEffect ne ))
# 8077 "parse.ml"
               : 'rawDecl))
; (fun __caml_parser_env ->
    let _2 = (Parsing.peek_val __caml_parser_env 0 : 'layeredEffectDefinition) in
    Obj.repr(
# 934 "parse.mly"
    (let (_1, ne) = ((), _2) in
      ( LayeredEffect ne ))
# 8085 "parse.ml"
               : 'rawDecl))
; (fun __caml_parser_env ->
    let _2 = (Parsing.peek_val __caml_parser_env 0 : 'subEffect) in
    Obj.repr(
# 937 "parse.mly"
    (let (_1, se) = ((), _2) in
      ( SubEffect se ))
# 8093 "parse.ml"
               : 'rawDecl))
; (fun __caml_parser_env ->
    let _2 = (Parsing.peek_val __caml_parser_env 0 : 'polymonadic_bind) in
    Obj.repr(
# 940 "parse.mly"
    (let (_1, b) = ((), _2) in
      ( Polymonadic_bind b ))
# 8101 "parse.ml"
               : 'rawDecl))
; (fun __caml_parser_env ->
    let _2 = (Parsing.peek_val __caml_parser_env 0 : 'polymonadic_subcomp) in
    Obj.repr(
# 943 "parse.mly"
    (let (_1, c) = ((), _2) in
      ( Polymonadic_subcomp c ))
# 8109 "parse.ml"
               : 'rawDecl))
; (fun __caml_parser_env ->
    let _1 = (Parsing.peek_val __caml_parser_env 3 : 'ident) in
    let _2 = (Parsing.peek_val __caml_parser_env 2 : 'typars) in
    let _3 = (Parsing.peek_val __caml_parser_env 1 : 'option_ascribeKind_) in
    let _4 = (Parsing.peek_val __caml_parser_env 0 : 'typeDefinition) in
    Obj.repr(
# 948 "parse.mly"
    (let (lid, tparams, ascr_opt, tcdef) = (_1, _2, _3, _4) in
      ( tcdef lid tparams ascr_opt ))
# 8120 "parse.ml"
               : 'typeDecl))
; (fun __caml_parser_env ->
    let _1 = (Parsing.peek_val __caml_parser_env 0 : 'tvarinsts) in
    Obj.repr(
# 953 "parse.mly"
    (let x = _1 in
                             ( x ))
# 8128 "parse.ml"
               : 'typars))
; (fun __caml_parser_env ->
    let _1 = (Parsing.peek_val __caml_parser_env 0 : 'binders) in
    Obj.repr(
# 956 "parse.mly"
    (let x = _1 in
                             ( x ))
# 8136 "parse.ml"
               : 'typars))
; (fun __caml_parser_env ->
    let _2 = (Parsing.peek_val __caml_parser_env 1 : 'separated_nonempty_list_COMMA_tvar_) in
    Obj.repr(
# 961 "parse.mly"
    (let (_1, tvs, _3) = ((), _2, ()) in
      ( map (fun tv -> mk_binder (TVariable(tv)) (range_of_id tv) Kind None) tvs ))
# 8144 "parse.ml"
               : 'tvarinsts))
; (fun __caml_parser_env ->
    Obj.repr(
# 966 "parse.mly"
    (      ( (fun id binders kopt -> check_id id; TyconAbstract(id, binders, kopt)) ))
# 8150 "parse.ml"
               : 'typeDefinition))
; (fun __caml_parser_env ->
    let _2 = (Parsing.peek_val __caml_parser_env 0 : 'typ) in
    Obj.repr(
# 968 "parse.mly"
    (let (_1, t) = ((), _2) in
      ( (fun id binders kopt ->  check_id id; TyconAbbrev(id, binders, kopt, t)) ))
# 8158 "parse.ml"
               : 'typeDefinition))
; (fun __caml_parser_env ->
    let _3 = (Parsing.peek_val __caml_parser_env 1 : 'right_flexible_nonempty_list_SEMICOLON_recordFieldDecl_) in
    Obj.repr(
# 971 "parse.mly"
    (let (_1, _1_inlined1, record_field_decls, _3) = ((), (), _3, ()) in
let record_field_decls =     ( record_field_decls ) in
let attrs_opt =     ( None ) in
      ( (fun id binders kopt -> check_id id; TyconRecord(id, binders, kopt, none_to_empty_list attrs_opt, record_field_decls)) ))
# 8168 "parse.ml"
               : 'typeDefinition))
; (fun __caml_parser_env ->
    let _2 = (Parsing.peek_val __caml_parser_env 3 : 'binderAttributes) in
    let _4 = (Parsing.peek_val __caml_parser_env 1 : 'right_flexible_nonempty_list_SEMICOLON_recordFieldDecl_) in
    Obj.repr(
# 976 "parse.mly"
    (let (_1, x, _1_inlined1, record_field_decls, _3) = ((), _2, (), _4, ()) in
let record_field_decls =     ( record_field_decls ) in
let attrs_opt =     ( Some x ) in
      ( (fun id binders kopt -> check_id id; TyconRecord(id, binders, kopt, none_to_empty_list attrs_opt, record_field_decls)) ))
# 8179 "parse.ml"
               : 'typeDefinition))
; (fun __caml_parser_env ->
    let _2 = (Parsing.peek_val __caml_parser_env 0 : 'list_constructorDecl_) in
    Obj.repr(
# 981 "parse.mly"
    (let (_1, ct_decls) = ((), _2) in
      ( (fun id binders kopt -> check_id id; TyconVariant(id, binders, kopt, ct_decls)) ))
# 8187 "parse.ml"
               : 'typeDefinition))
; (fun __caml_parser_env ->
    let _1 = (Parsing.peek_val __caml_parser_env 2 : 'aqualifiedWithAttrs_lidentOrOperator_) in
    let _3 = (Parsing.peek_val __caml_parser_env 0 : 'typ) in
    Obj.repr(
# 986 "parse.mly"
    (let (qualified_lid, _2, t) = (_1, (), _3) in
      (
        let (qual, attrs), lid = qualified_lid in
        (lid, qual, attrs, t)
      ))
# 8199 "parse.ml"
               : 'recordFieldDecl))
; (fun __caml_parser_env ->
    let _2 = (Parsing.peek_val __caml_parser_env 0 : 'typ) in
    Obj.repr(
# 994 "parse.mly"
    (let (_1, t) = ((), _2) in
                                                        (VpArbitrary  t))
# 8207 "parse.ml"
               : 'constructorPayload))
; (fun __caml_parser_env ->
    let _2 = (Parsing.peek_val __caml_parser_env 0 : 'typ) in
    Obj.repr(
# 997 "parse.mly"
    (let (_1, t) = ((), _2) in
                                                        (VpOfNotation t))
# 8215 "parse.ml"
               : 'constructorPayload))
; (fun __caml_parser_env ->
    let _2 = (Parsing.peek_val __caml_parser_env 2 : 'right_flexible_nonempty_list_SEMICOLON_recordFieldDecl_) in
    let _4 = (Parsing.peek_val __caml_parser_env 0 : 'option___anonymous_1_) in
    Obj.repr(
# 1000 "parse.mly"
    (let (_1, record_field_decls, _3, opt) = ((), _2, (), _4) in
let fields =     ( record_field_decls ) in
                                                        (VpRecord(fields, opt)))
# 8225 "parse.ml"
               : 'constructorPayload))
; (fun __caml_parser_env ->
    let _2 = (Parsing.peek_val __caml_parser_env 1 : 'uident) in
    let _3 = (Parsing.peek_val __caml_parser_env 0 : 'option_constructorPayload_) in
    Obj.repr(
# 1006 "parse.mly"
    (let (_1, uid, payload) = ((), _2, _3) in
let attrs_opt =     ( None ) in
    ( uid, payload, none_to_empty_list attrs_opt ))
# 8235 "parse.ml"
               : 'constructorDecl))
; (fun __caml_parser_env ->
    let _2 = (Parsing.peek_val __caml_parser_env 2 : 'binderAttributes) in
    let _3 = (Parsing.peek_val __caml_parser_env 1 : 'uident) in
    let _4 = (Parsing.peek_val __caml_parser_env 0 : 'option_constructorPayload_) in
    Obj.repr(
# 1010 "parse.mly"
    (let (_1, x, uid, payload) = ((), _2, _3, _4) in
let attrs_opt =     ( Some x ) in
    ( uid, payload, none_to_empty_list attrs_opt ))
# 8246 "parse.ml"
               : 'constructorDecl))
; (fun __caml_parser_env ->
    let _2 = (Parsing.peek_val __caml_parser_env 0 : 'letbinding) in
    Obj.repr(
# 1016 "parse.mly"
    (let (_2, lb) = ((), _2) in
let attr =     ( None ) in
    ( attr, lb ))
# 8255 "parse.ml"
               : 'attr_letbinding))
; (fun __caml_parser_env ->
    let _1 = (Parsing.peek_val __caml_parser_env 2 : 'attribute) in
    let _3 = (Parsing.peek_val __caml_parser_env 0 : 'letbinding) in
    Obj.repr(
# 1020 "parse.mly"
    (let (x, _2, lb) = (_1, (), _3) in
let attr =     ( Some x ) in
    ( attr, lb ))
# 8265 "parse.ml"
               : 'attr_letbinding))
; (fun __caml_parser_env ->
    let _1 = (Parsing.peek_val __caml_parser_env 2 : 'tuplePattern) in
    let _2 = (Parsing.peek_val __caml_parser_env 1 : 'option_ascribeTyp_) in
    let _3 = (Parsing.peek_val __caml_parser_env 0 : 'option___anonymous_2_) in
    Obj.repr(
# 1026 "parse.mly"
    (let (pat, ascr_opt, tm) = (_1, _2, _3) in
    (
        let h tm
	  = ( ( match ascr_opt with
              | None   -> pat
              | Some t -> mk_pattern (PatAscribed(pat, t)) (rhs2 parseState 1 2) )
	    , tm)
	in
	match pat.pat, tm with
        | _               , Some tm -> h tm
        | PatVar (v, _, _), None    ->
          let v = lid_of_ns_and_id [] v in
          h (mk_term (Var v) (rhs parseState 1) Expr)
        | _ -> raise_error (Fatal_SyntaxError, "Syntax error: let-punning expects a name, not a pattern") (rhs parseState 2)
    ))
# 8288 "parse.ml"
               : 'letoperatorbinding))
; (fun __caml_parser_env ->
    let _1 = (Parsing.peek_val __caml_parser_env 5 : 'maybeFocus) in
    let _2 = (Parsing.peek_val __caml_parser_env 4 : FStar_Ident.ident) in
    let _3 = (Parsing.peek_val __caml_parser_env 3 : 'nonempty_list_patternOrMultibinder_) in
    let _4 = (Parsing.peek_val __caml_parser_env 2 : 'option_ascribeTyp_) in
    let _6 = (Parsing.peek_val __caml_parser_env 0 : FStar_Parser_AST.term) in
    Obj.repr(
# 1044 "parse.mly"
    (let (focus_opt, id, lbp, ascr_opt, _5, tm) = (_1, _2, _3, _4, (), _6) in
let lid =               ( id ) in
      (
        let pat = mk_pattern (PatVar(lid, None, [])) (rhs parseState 2) in
        let pat = mk_pattern (PatApp (pat, flatten lbp)) (rhs2 parseState 1 3) in
        let pos = rhs2 parseState 1 6 in
        match ascr_opt with
        | None -> (focus_opt, (pat, tm))
        | Some t -> (focus_opt, (mk_pattern (PatAscribed(pat, t)) pos, tm))
      ))
# 8308 "parse.ml"
               : 'letbinding))
; (fun __caml_parser_env ->
    let _1 = (Parsing.peek_val __caml_parser_env 7 : 'maybeFocus) in
    let _3 = (Parsing.peek_val __caml_parser_env 5 : string) in
    let _5 = (Parsing.peek_val __caml_parser_env 3 : 'nonempty_list_patternOrMultibinder_) in
    let _6 = (Parsing.peek_val __caml_parser_env 2 : 'option_ascribeTyp_) in
    let _8 = (Parsing.peek_val __caml_parser_env 0 : FStar_Parser_AST.term) in
    Obj.repr(
# 1055 "parse.mly"
    (let (focus_opt, _1, op, _3, lbp, ascr_opt, _5, tm) = (_1, (), _3, (), _5, _6, (), _8) in
let lid =
  let id =     ( mk_ident (op, rhs parseState 1) ) in
      ( mk_ident (compile_op' (string_of_id id) (range_of_id id), range_of_id id) )
in
      (
        let pat = mk_pattern (PatVar(lid, None, [])) (rhs parseState 2) in
        let pat = mk_pattern (PatApp (pat, flatten lbp)) (rhs2 parseState 1 3) in
        let pos = rhs2 parseState 1 6 in
        match ascr_opt with
        | None -> (focus_opt, (pat, tm))
        | Some t -> (focus_opt, (mk_pattern (PatAscribed(pat, t)) pos, tm))
      ))
# 8331 "parse.ml"
               : 'letbinding))
; (fun __caml_parser_env ->
    let _1 = (Parsing.peek_val __caml_parser_env 7 : 'maybeFocus) in
    let _3 = (Parsing.peek_val __caml_parser_env 5 : 'binop_name) in
    let _5 = (Parsing.peek_val __caml_parser_env 3 : 'nonempty_list_patternOrMultibinder_) in
    let _6 = (Parsing.peek_val __caml_parser_env 2 : 'option_ascribeTyp_) in
    let _8 = (Parsing.peek_val __caml_parser_env 0 : FStar_Parser_AST.term) in
    Obj.repr(
# 1069 "parse.mly"
    (let (focus_opt, _1, op, _3, lbp, ascr_opt, _5, tm) = (_1, (), _3, (), _5, _6, (), _8) in
let lid =
  let id =     ( op ) in
      ( mk_ident (compile_op' (string_of_id id) (range_of_id id), range_of_id id) )
in
      (
        let pat = mk_pattern (PatVar(lid, None, [])) (rhs parseState 2) in
        let pat = mk_pattern (PatApp (pat, flatten lbp)) (rhs2 parseState 1 3) in
        let pos = rhs2 parseState 1 6 in
        match ascr_opt with
        | None -> (focus_opt, (pat, tm))
        | Some t -> (focus_opt, (mk_pattern (PatAscribed(pat, t)) pos, tm))
      ))
# 8354 "parse.ml"
               : 'letbinding))
; (fun __caml_parser_env ->
    let _1 = (Parsing.peek_val __caml_parser_env 7 : 'maybeFocus) in
    let _3 = (Parsing.peek_val __caml_parser_env 5 : string) in
    let _5 = (Parsing.peek_val __caml_parser_env 3 : 'nonempty_list_patternOrMultibinder_) in
    let _6 = (Parsing.peek_val __caml_parser_env 2 : 'option_ascribeTyp_) in
    let _8 = (Parsing.peek_val __caml_parser_env 0 : FStar_Parser_AST.term) in
    Obj.repr(
# 1083 "parse.mly"
    (let (focus_opt, _1, op, _3, lbp, ascr_opt, _5, tm) = (_1, (), _3, (), _5, _6, (), _8) in
let lid =
  let id =     ( mk_ident (op, rhs parseState 1) ) in
      ( mk_ident (compile_op' (string_of_id id) (range_of_id id), range_of_id id) )
in
      (
        let pat = mk_pattern (PatVar(lid, None, [])) (rhs parseState 2) in
        let pat = mk_pattern (PatApp (pat, flatten lbp)) (rhs2 parseState 1 3) in
        let pos = rhs2 parseState 1 6 in
        match ascr_opt with
        | None -> (focus_opt, (pat, tm))
        | Some t -> (focus_opt, (mk_pattern (PatAscribed(pat, t)) pos, tm))
      ))
# 8377 "parse.ml"
               : 'letbinding))
; (fun __caml_parser_env ->
    let _1 = (Parsing.peek_val __caml_parser_env 7 : 'maybeFocus) in
    let _3 = (Parsing.peek_val __caml_parser_env 5 : string) in
    let _5 = (Parsing.peek_val __caml_parser_env 3 : 'nonempty_list_patternOrMultibinder_) in
    let _6 = (Parsing.peek_val __caml_parser_env 2 : 'option_ascribeTyp_) in
    let _8 = (Parsing.peek_val __caml_parser_env 0 : FStar_Parser_AST.term) in
    Obj.repr(
# 1097 "parse.mly"
    (let (focus_opt, _1, op, _3, lbp, ascr_opt, _5, tm) = (_1, (), _3, (), _5, _6, (), _8) in
let lid =
  let id =
    let op =               ( mk_ident ("and" ^ op, rhs parseState 1) ) in
                        (op)
  in
      ( mk_ident (compile_op' (string_of_id id) (range_of_id id), range_of_id id) )
in
      (
        let pat = mk_pattern (PatVar(lid, None, [])) (rhs parseState 2) in
        let pat = mk_pattern (PatApp (pat, flatten lbp)) (rhs2 parseState 1 3) in
        let pos = rhs2 parseState 1 6 in
        match ascr_opt with
        | None -> (focus_opt, (pat, tm))
        | Some t -> (focus_opt, (mk_pattern (PatAscribed(pat, t)) pos, tm))
      ))
# 8403 "parse.ml"
               : 'letbinding))
; (fun __caml_parser_env ->
    let _1 = (Parsing.peek_val __caml_parser_env 7 : 'maybeFocus) in
    let _3 = (Parsing.peek_val __caml_parser_env 5 : string) in
    let _5 = (Parsing.peek_val __caml_parser_env 3 : 'nonempty_list_patternOrMultibinder_) in
    let _6 = (Parsing.peek_val __caml_parser_env 2 : 'option_ascribeTyp_) in
    let _8 = (Parsing.peek_val __caml_parser_env 0 : FStar_Parser_AST.term) in
    Obj.repr(
# 1114 "parse.mly"
    (let (focus_opt, _1, op, _3, lbp, ascr_opt, _5, tm) = (_1, (), _3, (), _5, _6, (), _8) in
let lid =
  let id =
    let op =               ( mk_ident ("let" ^ op, rhs parseState 1) ) in
                        (op)
  in
      ( mk_ident (compile_op' (string_of_id id) (range_of_id id), range_of_id id) )
in
      (
        let pat = mk_pattern (PatVar(lid, None, [])) (rhs parseState 2) in
        let pat = mk_pattern (PatApp (pat, flatten lbp)) (rhs2 parseState 1 3) in
        let pos = rhs2 parseState 1 6 in
        match ascr_opt with
        | None -> (focus_opt, (pat, tm))
        | Some t -> (focus_opt, (mk_pattern (PatAscribed(pat, t)) pos, tm))
      ))
# 8429 "parse.ml"
               : 'letbinding))
; (fun __caml_parser_env ->
    let _1 = (Parsing.peek_val __caml_parser_env 4 : 'maybeFocus) in
    let _2 = (Parsing.peek_val __caml_parser_env 3 : 'tuplePattern) in
    let _3 = (Parsing.peek_val __caml_parser_env 2 : 'ascribeTyp) in
    let _5 = (Parsing.peek_val __caml_parser_env 0 : FStar_Parser_AST.term) in
    Obj.repr(
# 1131 "parse.mly"
    (let (focus_opt, pat, ascr, _4, tm) = (_1, _2, _3, (), _5) in
      ( focus_opt, (mk_pattern (PatAscribed(pat, ascr)) (rhs2 parseState 1 4), tm) ))
# 8440 "parse.ml"
               : 'letbinding))
; (fun __caml_parser_env ->
    let _1 = (Parsing.peek_val __caml_parser_env 3 : 'maybeFocus) in
    let _2 = (Parsing.peek_val __caml_parser_env 2 : 'tuplePattern) in
    let _4 = (Parsing.peek_val __caml_parser_env 0 : FStar_Parser_AST.term) in
    Obj.repr(
# 1134 "parse.mly"
    (let (focus_opt, pat, _3, tm) = (_1, _2, (), _4) in
      ( focus_opt, (pat, tm) ))
# 8450 "parse.ml"
               : 'letbinding))
; (fun __caml_parser_env ->
    let _1 = (Parsing.peek_val __caml_parser_env 0 : 'effectRedefinition) in
    Obj.repr(
# 1139 "parse.mly"
    (let ed = _1 in
    ( ed ))
# 8458 "parse.ml"
               : 'newEffect))
; (fun __caml_parser_env ->
    let _1 = (Parsing.peek_val __caml_parser_env 0 : 'effectDefinition) in
    Obj.repr(
# 1142 "parse.mly"
    (let ed = _1 in
    ( ed ))
# 8466 "parse.ml"
               : 'newEffect))
; (fun __caml_parser_env ->
    let _1 = (Parsing.peek_val __caml_parser_env 2 : 'uident) in
    let _3 = (Parsing.peek_val __caml_parser_env 0 : 'simpleTerm) in
    Obj.repr(
# 1147 "parse.mly"
    (let (lid, _2, t) = (_1, (), _3) in
    ( RedefineEffect(lid, [], t) ))
# 8475 "parse.ml"
               : 'effectRedefinition))
; (fun __caml_parser_env ->
    let _2 = (Parsing.peek_val __caml_parser_env 6 : 'uident) in
    let _3 = (Parsing.peek_val __caml_parser_env 5 : 'binders) in
    let _5 = (Parsing.peek_val __caml_parser_env 3 : 'tmArrow_tmNoEq_) in
    let _7 = (Parsing.peek_val __caml_parser_env 1 : 'separated_nonempty_list_SEMICOLON_effectDecl_) in
    Obj.repr(
# 1152 "parse.mly"
    (let (_1, lid, bs, _4, typ, _6, eds, _8) = ((), _2, _3, (), _5, (), _7, ()) in
    ( DefineEffect(lid, bs, typ, eds) ))
# 8486 "parse.ml"
               : 'effectDefinition))
; (fun __caml_parser_env ->
    let _2 = (Parsing.peek_val __caml_parser_env 4 : 'uident) in
    let _3 = (Parsing.peek_val __caml_parser_env 3 : 'binders) in
    let _5 = (Parsing.peek_val __caml_parser_env 1 : 'tmNoEq) in
    Obj.repr(
# 1157 "parse.mly"
    (let (_1, lid, bs, _4, r, _6) = ((), _2, _3, (), _5, ()) in
    (
      let typ =  (* bs -> Effect *)
        let first_b, last_b =
          match bs with
          | [] ->
             raise_error (Fatal_SyntaxError,
                          "Syntax error: unexpected empty binders list in the layered effect definition")
                         (range_of_id lid)
          | _ -> hd bs, last bs in
        let r = union_ranges first_b.brange last_b.brange in
        mk_term (Product (bs, mk_term (Name (lid_of_str "Effect")) r Type_level)) r Type_level in
      let rec decls (r:term) =
        match r.tm with
        | Paren r -> decls r
        | Record (None, flds) ->
           flds |> List.map (fun (lid, t) ->
                              mk_decl (Tycon (false,
                                              false,
                                              [TyconAbbrev (ident_of_lid lid, [], None, t)]))
                                      t.range [])
        | _ ->
           raise_error (Fatal_SyntaxError,
                        "Syntax error: layered effect combinators should be declared as a record")
                       r.range in
      DefineEffect (lid, [], typ, decls r) ))
# 8520 "parse.ml"
               : 'layeredEffectDefinition))
; (fun __caml_parser_env ->
    let _1 = (Parsing.peek_val __caml_parser_env 3 : FStar_Ident.ident) in
    let _2 = (Parsing.peek_val __caml_parser_env 2 : 'binders) in
    let _4 = (Parsing.peek_val __caml_parser_env 0 : 'simpleTerm) in
    Obj.repr(
# 1186 "parse.mly"
    (let (lid, action_params, _3, t) = (_1, _2, (), _4) in
    ( mk_decl (Tycon (false, false, [TyconAbbrev(lid, action_params, None, t)])) (rhs2 parseState 1 3) [] ))
# 8530 "parse.ml"
               : 'effectDecl))
; (fun __caml_parser_env ->
    let _1 = (Parsing.peek_val __caml_parser_env 4 : 'quident) in
    let _3 = (Parsing.peek_val __caml_parser_env 2 : 'quident) in
    let _5 = (Parsing.peek_val __caml_parser_env 0 : 'simpleTerm) in
    Obj.repr(
# 1191 "parse.mly"
    (let (src_eff, _2, tgt_eff, _4, lift) = (_1, (), _3, (), _5) in
      ( { msource = src_eff; mdest = tgt_eff; lift_op = NonReifiableLift lift; braced=false } ))
# 8540 "parse.ml"
               : 'subEffect))
; (fun __caml_parser_env ->
    let _1 = (Parsing.peek_val __caml_parser_env 7 : 'quident) in
    let _3 = (Parsing.peek_val __caml_parser_env 5 : 'quident) in
    let _5 = (Parsing.peek_val __caml_parser_env 3 : string) in
    let _7 = (Parsing.peek_val __caml_parser_env 1 : 'simpleTerm) in
    Obj.repr(
# 1194 "parse.mly"
    (let (src_eff, _2, tgt_eff, _4, x, _2_inlined1, y, _7) = (_1, (), _3, (), _5, (), _7, ()) in
let lift2_opt =     ( None ) in
let lift1 =     ( (x, y) ) in
     (
       match lift2_opt with
       | None ->
          begin match lift1 with
          | ("lift", lift) ->
             { msource = src_eff; mdest = tgt_eff; lift_op = LiftForFree lift; braced=true }
          | ("lift_wp", lift_wp) ->
             { msource = src_eff; mdest = tgt_eff; lift_op = NonReifiableLift lift_wp; braced=true }
          | _ ->
             raise_error (Fatal_UnexpectedIdentifier, "Unexpected identifier; expected {'lift', and possibly 'lift_wp'}") (lhs parseState)
          end
       | Some (id2, tm2) ->
          let (id1, tm1) = lift1 in
          let lift, lift_wp = match (id1, id2) with
                  | "lift_wp", "lift" -> tm1, tm2
                  | "lift", "lift_wp" -> tm2, tm1
                  | _ -> raise_error (Fatal_UnexpectedIdentifier, "Unexpected identifier; expected {'lift', 'lift_wp'}") (lhs parseState)
          in
          { msource = src_eff; mdest = tgt_eff; lift_op = ReifiableLift (lift, lift_wp); braced=true }
     ))
# 8572 "parse.ml"
               : 'subEffect))
; (fun __caml_parser_env ->
    let _1 = (Parsing.peek_val __caml_parser_env 11 : 'quident) in
    let _3 = (Parsing.peek_val __caml_parser_env 9 : 'quident) in
    let _5 = (Parsing.peek_val __caml_parser_env 7 : string) in
    let _7 = (Parsing.peek_val __caml_parser_env 5 : 'simpleTerm) in
    let _9 = (Parsing.peek_val __caml_parser_env 3 : string) in
    let _11 = (Parsing.peek_val __caml_parser_env 1 : 'simpleTerm) in
    Obj.repr(
# 1218 "parse.mly"
    (let (src_eff, _2, tgt_eff, _4, x, _2_inlined1, y, _1, id, _2_inlined2, y_inlined1, _7) = (_1, (), _3, (), _5, (), _7, (), _9, (), _11, ()) in
let lift2_opt =
  let y = y_inlined1 in
  let x =
    let x =                                                           (id) in
        ( (x, y) )
  in
      ( Some x )
in
let lift1 =     ( (x, y) ) in
     (
       match lift2_opt with
       | None ->
          begin match lift1 with
          | ("lift", lift) ->
             { msource = src_eff; mdest = tgt_eff; lift_op = LiftForFree lift; braced=true }
          | ("lift_wp", lift_wp) ->
             { msource = src_eff; mdest = tgt_eff; lift_op = NonReifiableLift lift_wp; braced=true }
          | _ ->
             raise_error (Fatal_UnexpectedIdentifier, "Unexpected identifier; expected {'lift', and possibly 'lift_wp'}") (lhs parseState)
          end
       | Some (id2, tm2) ->
          let (id1, tm1) = lift1 in
          let lift, lift_wp = match (id1, id2) with
                  | "lift_wp", "lift" -> tm1, tm2
                  | "lift", "lift_wp" -> tm2, tm1
                  | _ -> raise_error (Fatal_UnexpectedIdentifier, "Unexpected identifier; expected {'lift', 'lift_wp'}") (lhs parseState)
          in
          { msource = src_eff; mdest = tgt_eff; lift_op = ReifiableLift (lift, lift_wp); braced=true }
     ))
# 8613 "parse.ml"
               : 'subEffect))
; (fun __caml_parser_env ->
    let _2 = (Parsing.peek_val __caml_parser_env 7 : 'quident) in
    let _4 = (Parsing.peek_val __caml_parser_env 5 : 'quident) in
    let _7 = (Parsing.peek_val __caml_parser_env 2 : 'quident) in
    let _9 = (Parsing.peek_val __caml_parser_env 0 : 'simpleTerm) in
    Obj.repr(
# 1251 "parse.mly"
    (let (_1, m_eff, _3, n_eff, _5, _6, p_eff, _8, bind) = ((), _2, (), _4, (), (), _7, (), _9) in
      ( (m_eff, n_eff, p_eff, bind) ))
# 8624 "parse.ml"
               : 'polymonadic_bind))
; (fun __caml_parser_env ->
    let _1 = (Parsing.peek_val __caml_parser_env 4 : 'quident) in
    let _3 = (Parsing.peek_val __caml_parser_env 2 : 'quident) in
    let _5 = (Parsing.peek_val __caml_parser_env 0 : 'simpleTerm) in
    Obj.repr(
# 1256 "parse.mly"
    (let (m_eff, _2, n_eff, _4, subcomp) = (_1, (), _3, (), _5) in
    ( (m_eff, n_eff, subcomp) ))
# 8634 "parse.ml"
               : 'polymonadic_subcomp))
; (fun __caml_parser_env ->
    Obj.repr(
# 1261 "parse.mly"
    (let _1 = () in
                  ( Assumption ))
# 8641 "parse.ml"
               : 'qualifier))
; (fun __caml_parser_env ->
    Obj.repr(
# 1264 "parse.mly"
    (let _1 = () in
                  (
    raise_error (Fatal_InlineRenamedAsUnfold, "The 'inline' qualifier has been renamed to 'unfold'") (lhs parseState)
   ))
# 8650 "parse.ml"
               : 'qualifier))
; (fun __caml_parser_env ->
    Obj.repr(
# 1269 "parse.mly"
    (let _1 = () in
                  (
              raise_error (Fatal_UnfoldableDeprecated, "The 'unfoldable' qualifier is no longer denotable; it is the default qualifier so just omit it") (lhs parseState)
   ))
# 8659 "parse.ml"
               : 'qualifier))
; (fun __caml_parser_env ->
    Obj.repr(
# 1274 "parse.mly"
    (let _1 = () in
                          (
     Inline_for_extraction
  ))
# 8668 "parse.ml"
               : 'qualifier))
; (fun __caml_parser_env ->
    Obj.repr(
# 1279 "parse.mly"
    (let _1 = () in
           (
     Unfold_for_unification_and_vcgen
  ))
# 8677 "parse.ml"
               : 'qualifier))
; (fun __caml_parser_env ->
    Obj.repr(
# 1284 "parse.mly"
    (let _1 = () in
                  ( Irreducible ))
# 8684 "parse.ml"
               : 'qualifier))
; (fun __caml_parser_env ->
    Obj.repr(
# 1287 "parse.mly"
    (let _1 = () in
                  ( NoExtract ))
# 8691 "parse.ml"
               : 'qualifier))
; (fun __caml_parser_env ->
    Obj.repr(
# 1290 "parse.mly"
    (let _1 = () in
                  ( DefaultEffect ))
# 8698 "parse.ml"
               : 'qualifier))
; (fun __caml_parser_env ->
    Obj.repr(
# 1293 "parse.mly"
    (let _1 = () in
                  ( TotalEffect ))
# 8705 "parse.ml"
               : 'qualifier))
; (fun __caml_parser_env ->
    Obj.repr(
# 1296 "parse.mly"
    (let _1 = () in
                  ( Private ))
# 8712 "parse.ml"
               : 'qualifier))
; (fun __caml_parser_env ->
    Obj.repr(
# 1299 "parse.mly"
    (let _1 = () in
                  ( Noeq ))
# 8719 "parse.ml"
               : 'qualifier))
; (fun __caml_parser_env ->
    Obj.repr(
# 1302 "parse.mly"
    (let _1 = () in
                  ( Unopteq ))
# 8726 "parse.ml"
               : 'qualifier))
; (fun __caml_parser_env ->
    Obj.repr(
# 1305 "parse.mly"
    (let _1 = () in
                  ( New ))
# 8733 "parse.ml"
               : 'qualifier))
; (fun __caml_parser_env ->
    Obj.repr(
# 1308 "parse.mly"
    (let _1 = () in
                  ( log_issue (lhs parseState) (Warning_logicqualifier,
                                                logic_qualifier_deprecation_warning);
                    Logic ))
# 8742 "parse.ml"
               : 'qualifier))
; (fun __caml_parser_env ->
    Obj.repr(
# 1313 "parse.mly"
    (let _1 = () in
                  ( Opaque ))
# 8749 "parse.ml"
               : 'qualifier))
; (fun __caml_parser_env ->
    Obj.repr(
# 1316 "parse.mly"
    (let _1 = () in
                  ( Reifiable ))
# 8756 "parse.ml"
               : 'qualifier))
; (fun __caml_parser_env ->
    Obj.repr(
# 1319 "parse.mly"
    (let _1 = () in
                  ( Reflectable ))
# 8763 "parse.ml"
               : 'qualifier))
; (fun __caml_parser_env ->
    let _1 = (Parsing.peek_val __caml_parser_env 0 : 'boption_SQUIGGLY_RARROW_) in
    Obj.repr(
# 1324 "parse.mly"
    (let b = _1 in
                               ( b ))
# 8771 "parse.ml"
               : 'maybeFocus))
; (fun __caml_parser_env ->
    Obj.repr(
# 1329 "parse.mly"
    (let _1 = () in
                ( Rec ))
# 8778 "parse.ml"
               : 'letqualifier))
; (fun __caml_parser_env ->
    Obj.repr(
# 1332 "parse.mly"
    (                ( NoLetQualifier ))
# 8784 "parse.ml"
               : 'letqualifier))
; (fun __caml_parser_env ->
    let _3 = (Parsing.peek_val __caml_parser_env 1 : 'thunk_tmNoEq_) in
    Obj.repr(
# 1336 "parse.mly"
    (let (_1, _2, t, _4) = ((), (), _3, ()) in
                                       ( mk_meta_tac t ))
# 8792 "parse.ml"
               : 'aqual))
; (fun __caml_parser_env ->
    Obj.repr(
# 1339 "parse.mly"
    (let _1 = () in
              ( Implicit ))
# 8799 "parse.ml"
               : 'aqual))
; (fun __caml_parser_env ->
    Obj.repr(
# 1342 "parse.mly"
    (let _1 = () in
              ( Equality ))
# 8806 "parse.ml"
               : 'aqual))
; (fun __caml_parser_env ->
    let _2 = (Parsing.peek_val __caml_parser_env 1 : 'right_flexible_list_SEMICOLON_noSeqTerm_) in
    Obj.repr(
# 1347 "parse.mly"
    (let (_1, l, _3) = ((), _2, ()) in
let t =                                                 ( l ) in
                                               ( t ))
# 8815 "parse.ml"
               : 'binderAttributes))
; (fun __caml_parser_env ->
    let _1 = (Parsing.peek_val __caml_parser_env 0 : 'separated_nonempty_list_BAR_tuplePattern_) in
    Obj.repr(
# 1353 "parse.mly"
    (let pats = _1 in
                                                    ( pats ))
# 8823 "parse.ml"
               : 'disjunctivePattern))
; (fun __caml_parser_env ->
    let _1 = (Parsing.peek_val __caml_parser_env 0 : 'separated_nonempty_list_COMMA_constructorPattern_) in
    Obj.repr(
# 1358 "parse.mly"
    (let pats = _1 in
      ( match pats with | [x] -> x | l -> mk_pattern (PatTuple (l, false)) (rhs parseState 1) ))
# 8831 "parse.ml"
               : 'tuplePattern))
; (fun __caml_parser_env ->
    let _1 = (Parsing.peek_val __caml_parser_env 2 : 'constructorPattern) in
    let _3 = (Parsing.peek_val __caml_parser_env 0 : 'constructorPattern) in
    Obj.repr(
# 1363 "parse.mly"
    (let (pat, _2, pats) = (_1, (), _3) in
      ( mk_pattern (consPat (rhs parseState 3) pat pats) (rhs2 parseState 1 3) ))
# 8840 "parse.ml"
               : 'constructorPattern))
; (fun __caml_parser_env ->
    let _1 = (Parsing.peek_val __caml_parser_env 1 : 'quident) in
    let _2 = (Parsing.peek_val __caml_parser_env 0 : 'nonempty_list_atomicPattern_) in
    Obj.repr(
# 1366 "parse.mly"
    (let (uid, args) = (_1, _2) in
      (
        let head_pat = mk_pattern (PatName uid) (rhs parseState 1) in
        mk_pattern (PatApp (head_pat, args)) (rhs2 parseState 1 2)
      ))
# 8852 "parse.ml"
               : 'constructorPattern))
; (fun __caml_parser_env ->
    let _1 = (Parsing.peek_val __caml_parser_env 0 : 'atomicPattern) in
    Obj.repr(
# 1372 "parse.mly"
    (let pat = _1 in
      ( pat ))
# 8860 "parse.ml"
               : 'constructorPattern))
; (fun __caml_parser_env ->
    let _2 = (Parsing.peek_val __caml_parser_env 4 : 'tuplePattern) in
    let _4 = (Parsing.peek_val __caml_parser_env 2 : 'simpleArrow) in
    let _5 = (Parsing.peek_val __caml_parser_env 1 : 'refineOpt) in
    Obj.repr(
# 1377 "parse.mly"
    (let (_1, pat, _3, t, phi_opt, _6) = ((), _2, (), _4, _5, ()) in
      (
        let pos_t = rhs2 parseState 2 4 in
        let pos = rhs2 parseState 1 6 in
        mkRefinedPattern pat t true phi_opt pos_t pos
      ))
# 8874 "parse.ml"
               : 'atomicPattern))
; (fun __caml_parser_env ->
    let _2 = (Parsing.peek_val __caml_parser_env 1 : 'loption_separated_nonempty_list_SEMICOLON_tuplePattern__) in
    Obj.repr(
# 1384 "parse.mly"
    (let (_1, xs, _3) = ((), _2, ()) in
let pats =     ( xs ) in
      ( mk_pattern (PatList pats) (rhs2 parseState 1 3) ))
# 8883 "parse.ml"
               : 'atomicPattern))
; (fun __caml_parser_env ->
    let _2 = (Parsing.peek_val __caml_parser_env 1 : 'right_flexible_nonempty_list_SEMICOLON_fieldPattern_) in
    Obj.repr(
# 1388 "parse.mly"
    (let (_1, record_pat, _3) = ((), _2, ()) in
      ( mk_pattern (PatRecord record_pat) (rhs2 parseState 1 3) ))
# 8891 "parse.ml"
               : 'atomicPattern))
; (fun __caml_parser_env ->
    let _2 = (Parsing.peek_val __caml_parser_env 3 : 'constructorPattern) in
    let _4 = (Parsing.peek_val __caml_parser_env 1 : 'separated_nonempty_list_COMMA_constructorPattern_) in
    Obj.repr(
# 1391 "parse.mly"
    (let (_1, pat0, _3, pats, _5) = ((), _2, (), _4, ()) in
      ( mk_pattern (PatTuple(pat0::pats, true)) (rhs2 parseState 1 5) ))
# 8900 "parse.ml"
               : 'atomicPattern))
; (fun __caml_parser_env ->
    let _2 = (Parsing.peek_val __caml_parser_env 1 : 'tuplePattern) in
    Obj.repr(
# 1394 "parse.mly"
    (let (_1, pat, _3) = ((), _2, ()) in
                                     ( pat ))
# 8908 "parse.ml"
               : 'atomicPattern))
; (fun __caml_parser_env ->
    let _1 = (Parsing.peek_val __caml_parser_env 0 : 'tvar) in
    Obj.repr(
# 1397 "parse.mly"
    (let tv = _1 in
                              ( mk_pattern (PatTvar (tv, None, [])) (rhs parseState 1) ))
# 8916 "parse.ml"
               : 'atomicPattern))
; (fun __caml_parser_env ->
    let _2 = (Parsing.peek_val __caml_parser_env 1 : string) in
    Obj.repr(
# 1400 "parse.mly"
    (let (_1, op, _3) = ((), _2, ()) in
let op =     ( mk_ident (op, rhs parseState 1) ) in
      ( mk_pattern (PatOp op) (rhs2 parseState 1 3) ))
# 8925 "parse.ml"
               : 'atomicPattern))
; (fun __caml_parser_env ->
    let _2 = (Parsing.peek_val __caml_parser_env 1 : 'binop_name) in
    Obj.repr(
# 1404 "parse.mly"
    (let (_1, op, _3) = ((), _2, ()) in
let op =     ( op ) in
      ( mk_pattern (PatOp op) (rhs2 parseState 1 3) ))
# 8934 "parse.ml"
               : 'atomicPattern))
; (fun __caml_parser_env ->
    let _2 = (Parsing.peek_val __caml_parser_env 1 : string) in
    Obj.repr(
# 1408 "parse.mly"
    (let (_1, op, _3) = ((), _2, ()) in
let op =     ( mk_ident (op, rhs parseState 1) ) in
      ( mk_pattern (PatOp op) (rhs2 parseState 1 3) ))
# 8943 "parse.ml"
               : 'atomicPattern))
; (fun __caml_parser_env ->
    let _2 = (Parsing.peek_val __caml_parser_env 1 : string) in
    Obj.repr(
# 1412 "parse.mly"
    (let (_1, op, _3) = ((), _2, ()) in
let op =
  let op =               ( mk_ident ("and" ^ op, rhs parseState 1) ) in
                      (op)
in
      ( mk_pattern (PatOp op) (rhs2 parseState 1 3) ))
# 8955 "parse.ml"
               : 'atomicPattern))
; (fun __caml_parser_env ->
    let _2 = (Parsing.peek_val __caml_parser_env 1 : string) in
    Obj.repr(
# 1419 "parse.mly"
    (let (_1, op, _3) = ((), _2, ()) in
let op =
  let op =               ( mk_ident ("let" ^ op, rhs parseState 1) ) in
                      (op)
in
      ( mk_pattern (PatOp op) (rhs2 parseState 1 3) ))
# 8967 "parse.ml"
               : 'atomicPattern))
; (fun __caml_parser_env ->
    Obj.repr(
# 1426 "parse.mly"
    (let _1 = () in
      ( mk_pattern (PatWild (None, [])) (rhs parseState 1) ))
# 8974 "parse.ml"
               : 'atomicPattern))
; (fun __caml_parser_env ->
    Obj.repr(
# 1429 "parse.mly"
    (let (_1, _2) = ((), ()) in
      ( mk_pattern (PatWild (Some Implicit, [])) (rhs parseState 1) ))
# 8981 "parse.ml"
               : 'atomicPattern))
; (fun __caml_parser_env ->
    let _1 = (Parsing.peek_val __caml_parser_env 0 : 'constant) in
    Obj.repr(
# 1432 "parse.mly"
    (let c = _1 in
      ( mk_pattern (PatConst c) (rhs parseState 1) ))
# 8989 "parse.ml"
               : 'atomicPattern))
; (fun __caml_parser_env ->
    let _2 = (Parsing.peek_val __caml_parser_env 0 : 'atomicTerm) in
    Obj.repr(
# 1435 "parse.mly"
    (let (_1, q) = ((), _2) in
      ( mk_pattern (PatVQuote q) (rhs2 parseState 1 2) ))
# 8997 "parse.ml"
               : 'atomicPattern))
; (fun __caml_parser_env ->
    let _1 = (Parsing.peek_val __caml_parser_env 0 : 'aqualifiedWithAttrs_lident_) in
    Obj.repr(
# 1438 "parse.mly"
    (let qual_id = _1 in
    (
      let (aqual, attrs), lid = qual_id in
      mk_pattern (PatVar (lid, aqual, attrs)) (rhs parseState 1) ))
# 9007 "parse.ml"
               : 'atomicPattern))
; (fun __caml_parser_env ->
    let _1 = (Parsing.peek_val __caml_parser_env 0 : 'quident) in
    Obj.repr(
# 1443 "parse.mly"
    (let uid = _1 in
      ( mk_pattern (PatName uid) (rhs parseState 1) ))
# 9015 "parse.ml"
               : 'atomicPattern))
; (fun __caml_parser_env ->
    let _1 = (Parsing.peek_val __caml_parser_env 2 : 'qlident) in
    let _3 = (Parsing.peek_val __caml_parser_env 0 : 'tuplePattern) in
    Obj.repr(
# 1448 "parse.mly"
    (let (x, _2, y) = (_1, (), _3) in
let p =     ( (x, y) ) in
      ( p ))
# 9025 "parse.ml"
               : 'fieldPattern))
; (fun __caml_parser_env ->
    let _1 = (Parsing.peek_val __caml_parser_env 0 : 'qlident) in
    Obj.repr(
# 1452 "parse.mly"
    (let lid = _1 in
      ( lid, mk_pattern (PatVar (ident_of_lid lid, None, [])) (rhs parseState 1) ))
# 9033 "parse.ml"
               : 'fieldPattern))
; (fun __caml_parser_env ->
    let _2 = (Parsing.peek_val __caml_parser_env 3 : 'lidentOrUnderscore) in
    let _4 = (Parsing.peek_val __caml_parser_env 1 : 'simpleArrow) in
    Obj.repr(
# 1457 "parse.mly"
    (let (_1, id, _3, t, _5) = ((), _2, (), _4, ()) in
      ( let r = rhs2 parseState 1 5 in
        let w = mk_pattern (PatVar (id, Some TypeClassArg, [])) r in
        let asc = (t, None) in
        [mk_pattern (PatAscribed(w, asc)) r]
      ))
# 9046 "parse.ml"
               : 'patternOrMultibinder))
; (fun __caml_parser_env ->
    let _2 = (Parsing.peek_val __caml_parser_env 1 : 'simpleArrow) in
    Obj.repr(
# 1464 "parse.mly"
    (let (_1, t, _3) = ((), _2, ()) in
      ( let r = rhs2 parseState 1 3 in
        let id = gen r in
        let w = mk_pattern (PatVar (id, Some TypeClassArg, [])) r in
        let asc = (t, None) in
        [mk_pattern (PatAscribed(w, asc)) r]
      ))
# 9059 "parse.ml"
               : 'patternOrMultibinder))
; (fun __caml_parser_env ->
    let _1 = (Parsing.peek_val __caml_parser_env 0 : 'atomicPattern) in
    Obj.repr(
# 1472 "parse.mly"
    (let pat = _1 in
                      ( [pat] ))
# 9067 "parse.ml"
               : 'patternOrMultibinder))
; (fun __caml_parser_env ->
    let _2 = (Parsing.peek_val __caml_parser_env 5 : 'aqualifiedWithAttrs_lident_) in
    let _3 = (Parsing.peek_val __caml_parser_env 4 : 'nonempty_list_aqualifiedWithAttrs_lident__) in
    let _5 = (Parsing.peek_val __caml_parser_env 2 : 'simpleArrow) in
    let _6 = (Parsing.peek_val __caml_parser_env 1 : 'refineOpt) in
    Obj.repr(
# 1475 "parse.mly"
    (let (_1, qual_id0, qual_ids, _4, t, r, _7) = ((), _2, _3, (), _5, _6, ()) in
      (
        let pos = rhs2 parseState 1 7 in
        let t_pos = rhs parseState 5 in
        let qual_ids = qual_id0 :: qual_ids in
        List.map (fun ((aq, attrs), x) -> mkRefinedPattern (mk_pattern (PatVar (x, aq, attrs)) pos) t false r t_pos pos) qual_ids
      ))
# 9083 "parse.ml"
               : 'patternOrMultibinder))
; (fun __caml_parser_env ->
    let _1 = (Parsing.peek_val __caml_parser_env 0 : 'aqualifiedWithAttrs_lidentOrUnderscore_) in
    Obj.repr(
# 1485 "parse.mly"
    (let aqualifiedWithAttrs_lid = _1 in
     (
       let (q, attrs), lid = aqualifiedWithAttrs_lid in
       mk_binder_with_attrs (Variable lid) (rhs parseState 1) Type_level q attrs
     ))
# 9094 "parse.ml"
               : 'binder))
; (fun __caml_parser_env ->
    let _1 = (Parsing.peek_val __caml_parser_env 0 : 'tvar) in
    Obj.repr(
# 1491 "parse.mly"
    (let tv = _1 in
             ( mk_binder (TVariable tv) (rhs parseState 1) Kind None  ))
# 9102 "parse.ml"
               : 'binder))
; (fun __caml_parser_env ->
    let _2 = (Parsing.peek_val __caml_parser_env 3 : 'lidentOrUnderscore) in
    let _4 = (Parsing.peek_val __caml_parser_env 1 : 'simpleArrow) in
    Obj.repr(
# 1496 "parse.mly"
    (let (_1, id, _3, t, _5) = ((), _2, (), _4, ()) in
      ( let r = rhs2 parseState 1 5 in
        [mk_binder (Annotated (id, t)) r Type_level (Some TypeClassArg)]
      ))
# 9113 "parse.ml"
               : 'multiBinder))
; (fun __caml_parser_env ->
    let _2 = (Parsing.peek_val __caml_parser_env 1 : 'simpleArrow) in
    Obj.repr(
# 1501 "parse.mly"
    (let (_1, t, _3) = ((), _2, ()) in
      ( let r = rhs2 parseState 1 3 in
        let id = gen r in
        [mk_binder (Annotated (id, t)) r Type_level (Some TypeClassArg)]
      ))
# 9124 "parse.ml"
               : 'multiBinder))
; (fun __caml_parser_env ->
    let _2 = (Parsing.peek_val __caml_parser_env 4 : 'nonempty_list_aqualifiedWithAttrs_lidentOrUnderscore__) in
    let _4 = (Parsing.peek_val __caml_parser_env 2 : 'simpleArrow) in
    let _5 = (Parsing.peek_val __caml_parser_env 1 : 'refineOpt) in
    Obj.repr(
# 1507 "parse.mly"
    (let (_1, qual_ids, _3, t, r, _6) = ((), _2, (), _4, _5, ()) in
     (
       let should_bind_var = match qual_ids with | [ _ ] -> true | _ -> false in
       List.map (fun ((q, attrs), x) ->
         mkRefinedBinder x t should_bind_var r (rhs2 parseState 1 6) q attrs) qual_ids
     ))
# 9138 "parse.ml"
               : 'multiBinder))
; (fun __caml_parser_env ->
    let _1 = (Parsing.peek_val __caml_parser_env 0 : 'list___anonymous_4_) in
    Obj.repr(
# 1516 "parse.mly"
    (let bss = _1 in
                                                        ( flatten bss ))
# 9146 "parse.ml"
               : 'binders))
; (fun __caml_parser_env ->
    let _1 = (Parsing.peek_val __caml_parser_env 2 : 'aqual) in
    let _2 = (Parsing.peek_val __caml_parser_env 1 : 'binderAttributes) in
    let _3 = (Parsing.peek_val __caml_parser_env 0 : FStar_Ident.ident) in
    Obj.repr(
# 1521 "parse.mly"
    (let (aq, attrs, x) = (_1, _2, _3) in
                                        ( (Some aq, attrs), x ))
# 9156 "parse.ml"
               : 'aqualifiedWithAttrs_lident_))
; (fun __caml_parser_env ->
    let _1 = (Parsing.peek_val __caml_parser_env 1 : 'aqual) in
    let _2 = (Parsing.peek_val __caml_parser_env 0 : FStar_Ident.ident) in
    Obj.repr(
# 1524 "parse.mly"
    (let (aq, x) = (_1, _2) in
                 ( (Some aq, []), x ))
# 9165 "parse.ml"
               : 'aqualifiedWithAttrs_lident_))
; (fun __caml_parser_env ->
    let _1 = (Parsing.peek_val __caml_parser_env 1 : 'binderAttributes) in
    let _2 = (Parsing.peek_val __caml_parser_env 0 : FStar_Ident.ident) in
    Obj.repr(
# 1527 "parse.mly"
    (let (attrs, x) = (_1, _2) in
                               ( (None, attrs), x ))
# 9174 "parse.ml"
               : 'aqualifiedWithAttrs_lident_))
; (fun __caml_parser_env ->
    let _1 = (Parsing.peek_val __caml_parser_env 0 : FStar_Ident.ident) in
    Obj.repr(
# 1530 "parse.mly"
    (let x = _1 in
        ( (None, []), x ))
# 9182 "parse.ml"
               : 'aqualifiedWithAttrs_lident_))
; (fun __caml_parser_env ->
    let _1 = (Parsing.peek_val __caml_parser_env 2 : 'aqual) in
    let _2 = (Parsing.peek_val __caml_parser_env 1 : 'binderAttributes) in
    let _3 = (Parsing.peek_val __caml_parser_env 0 : FStar_Ident.ident) in
    Obj.repr(
# 1535 "parse.mly"
    (let (aq, attrs, id) = (_1, _2, _3) in
let x =               ( id ) in
                                        ( (Some aq, attrs), x ))
# 9193 "parse.ml"
               : 'aqualifiedWithAttrs_lidentOrOperator_))
; (fun __caml_parser_env ->
    let _1 = (Parsing.peek_val __caml_parser_env 4 : 'aqual) in
    let _2 = (Parsing.peek_val __caml_parser_env 3 : 'binderAttributes) in
    let _4 = (Parsing.peek_val __caml_parser_env 1 : string) in
    Obj.repr(
# 1539 "parse.mly"
    (let (aq, attrs, _1, op, _3) = (_1, _2, (), _4, ()) in
let x =
  let id =     ( mk_ident (op, rhs parseState 1) ) in
      ( mk_ident (compile_op' (string_of_id id) (range_of_id id), range_of_id id) )
in
                                        ( (Some aq, attrs), x ))
# 9207 "parse.ml"
               : 'aqualifiedWithAttrs_lidentOrOperator_))
; (fun __caml_parser_env ->
    let _1 = (Parsing.peek_val __caml_parser_env 4 : 'aqual) in
    let _2 = (Parsing.peek_val __caml_parser_env 3 : 'binderAttributes) in
    let _4 = (Parsing.peek_val __caml_parser_env 1 : 'binop_name) in
    Obj.repr(
# 1546 "parse.mly"
    (let (aq, attrs, _1, op, _3) = (_1, _2, (), _4, ()) in
let x =
  let id =     ( op ) in
      ( mk_ident (compile_op' (string_of_id id) (range_of_id id), range_of_id id) )
in
                                        ( (Some aq, attrs), x ))
# 9221 "parse.ml"
               : 'aqualifiedWithAttrs_lidentOrOperator_))
; (fun __caml_parser_env ->
    let _1 = (Parsing.peek_val __caml_parser_env 4 : 'aqual) in
    let _2 = (Parsing.peek_val __caml_parser_env 3 : 'binderAttributes) in
    let _4 = (Parsing.peek_val __caml_parser_env 1 : string) in
    Obj.repr(
# 1553 "parse.mly"
    (let (aq, attrs, _1, op, _3) = (_1, _2, (), _4, ()) in
let x =
  let id =     ( mk_ident (op, rhs parseState 1) ) in
      ( mk_ident (compile_op' (string_of_id id) (range_of_id id), range_of_id id) )
in
                                        ( (Some aq, attrs), x ))
# 9235 "parse.ml"
               : 'aqualifiedWithAttrs_lidentOrOperator_))
; (fun __caml_parser_env ->
    let _1 = (Parsing.peek_val __caml_parser_env 4 : 'aqual) in
    let _2 = (Parsing.peek_val __caml_parser_env 3 : 'binderAttributes) in
    let _4 = (Parsing.peek_val __caml_parser_env 1 : string) in
    Obj.repr(
# 1560 "parse.mly"
    (let (aq, attrs, _1, op, _3) = (_1, _2, (), _4, ()) in
let x =
  let id =
    let op =               ( mk_ident ("and" ^ op, rhs parseState 1) ) in
                        (op)
  in
      ( mk_ident (compile_op' (string_of_id id) (range_of_id id), range_of_id id) )
in
                                        ( (Some aq, attrs), x ))
# 9252 "parse.ml"
               : 'aqualifiedWithAttrs_lidentOrOperator_))
; (fun __caml_parser_env ->
    let _1 = (Parsing.peek_val __caml_parser_env 4 : 'aqual) in
    let _2 = (Parsing.peek_val __caml_parser_env 3 : 'binderAttributes) in
    let _4 = (Parsing.peek_val __caml_parser_env 1 : string) in
    Obj.repr(
# 1570 "parse.mly"
    (let (aq, attrs, _1, op, _3) = (_1, _2, (), _4, ()) in
let x =
  let id =
    let op =               ( mk_ident ("let" ^ op, rhs parseState 1) ) in
                        (op)
  in
      ( mk_ident (compile_op' (string_of_id id) (range_of_id id), range_of_id id) )
in
                                        ( (Some aq, attrs), x ))
# 9269 "parse.ml"
               : 'aqualifiedWithAttrs_lidentOrOperator_))
; (fun __caml_parser_env ->
    let _1 = (Parsing.peek_val __caml_parser_env 1 : 'aqual) in
    let _2 = (Parsing.peek_val __caml_parser_env 0 : FStar_Ident.ident) in
    Obj.repr(
# 1580 "parse.mly"
    (let (aq, id) = (_1, _2) in
let x =               ( id ) in
                 ( (Some aq, []), x ))
# 9279 "parse.ml"
               : 'aqualifiedWithAttrs_lidentOrOperator_))
; (fun __caml_parser_env ->
    let _1 = (Parsing.peek_val __caml_parser_env 3 : 'aqual) in
    let _3 = (Parsing.peek_val __caml_parser_env 1 : string) in
    Obj.repr(
# 1584 "parse.mly"
    (let (aq, _1, op, _3) = (_1, (), _3, ()) in
let x =
  let id =     ( mk_ident (op, rhs parseState 1) ) in
      ( mk_ident (compile_op' (string_of_id id) (range_of_id id), range_of_id id) )
in
                 ( (Some aq, []), x ))
# 9292 "parse.ml"
               : 'aqualifiedWithAttrs_lidentOrOperator_))
; (fun __caml_parser_env ->
    let _1 = (Parsing.peek_val __caml_parser_env 3 : 'aqual) in
    let _3 = (Parsing.peek_val __caml_parser_env 1 : 'binop_name) in
    Obj.repr(
# 1591 "parse.mly"
    (let (aq, _1, op, _3) = (_1, (), _3, ()) in
let x =
  let id =     ( op ) in
      ( mk_ident (compile_op' (string_of_id id) (range_of_id id), range_of_id id) )
in
                 ( (Some aq, []), x ))
# 9305 "parse.ml"
               : 'aqualifiedWithAttrs_lidentOrOperator_))
; (fun __caml_parser_env ->
    let _1 = (Parsing.peek_val __caml_parser_env 3 : 'aqual) in
    let _3 = (Parsing.peek_val __caml_parser_env 1 : string) in
    Obj.repr(
# 1598 "parse.mly"
    (let (aq, _1, op, _3) = (_1, (), _3, ()) in
let x =
  let id =     ( mk_ident (op, rhs parseState 1) ) in
      ( mk_ident (compile_op' (string_of_id id) (range_of_id id), range_of_id id) )
in
                 ( (Some aq, []), x ))
# 9318 "parse.ml"
               : 'aqualifiedWithAttrs_lidentOrOperator_))
; (fun __caml_parser_env ->
    let _1 = (Parsing.peek_val __caml_parser_env 3 : 'aqual) in
    let _3 = (Parsing.peek_val __caml_parser_env 1 : string) in
    Obj.repr(
# 1605 "parse.mly"
    (let (aq, _1, op, _3) = (_1, (), _3, ()) in
let x =
  let id =
    let op =               ( mk_ident ("and" ^ op, rhs parseState 1) ) in
                        (op)
  in
      ( mk_ident (compile_op' (string_of_id id) (range_of_id id), range_of_id id) )
in
                 ( (Some aq, []), x ))
# 9334 "parse.ml"
               : 'aqualifiedWithAttrs_lidentOrOperator_))
; (fun __caml_parser_env ->
    let _1 = (Parsing.peek_val __caml_parser_env 3 : 'aqual) in
    let _3 = (Parsing.peek_val __caml_parser_env 1 : string) in
    Obj.repr(
# 1615 "parse.mly"
    (let (aq, _1, op, _3) = (_1, (), _3, ()) in
let x =
  let id =
    let op =               ( mk_ident ("let" ^ op, rhs parseState 1) ) in
                        (op)
  in
      ( mk_ident (compile_op' (string_of_id id) (range_of_id id), range_of_id id) )
in
                 ( (Some aq, []), x ))
# 9350 "parse.ml"
               : 'aqualifiedWithAttrs_lidentOrOperator_))
; (fun __caml_parser_env ->
    let _1 = (Parsing.peek_val __caml_parser_env 1 : 'binderAttributes) in
    let _2 = (Parsing.peek_val __caml_parser_env 0 : FStar_Ident.ident) in
    Obj.repr(
# 1625 "parse.mly"
    (let (attrs, id) = (_1, _2) in
let x =               ( id ) in
                               ( (None, attrs), x ))
# 9360 "parse.ml"
               : 'aqualifiedWithAttrs_lidentOrOperator_))
; (fun __caml_parser_env ->
    let _1 = (Parsing.peek_val __caml_parser_env 3 : 'binderAttributes) in
    let _3 = (Parsing.peek_val __caml_parser_env 1 : string) in
    Obj.repr(
# 1629 "parse.mly"
    (let (attrs, _1, op, _3) = (_1, (), _3, ()) in
let x =
  let id =     ( mk_ident (op, rhs parseState 1) ) in
      ( mk_ident (compile_op' (string_of_id id) (range_of_id id), range_of_id id) )
in
                               ( (None, attrs), x ))
# 9373 "parse.ml"
               : 'aqualifiedWithAttrs_lidentOrOperator_))
; (fun __caml_parser_env ->
    let _1 = (Parsing.peek_val __caml_parser_env 3 : 'binderAttributes) in
    let _3 = (Parsing.peek_val __caml_parser_env 1 : 'binop_name) in
    Obj.repr(
# 1636 "parse.mly"
    (let (attrs, _1, op, _3) = (_1, (), _3, ()) in
let x =
  let id =     ( op ) in
      ( mk_ident (compile_op' (string_of_id id) (range_of_id id), range_of_id id) )
in
                               ( (None, attrs), x ))
# 9386 "parse.ml"
               : 'aqualifiedWithAttrs_lidentOrOperator_))
; (fun __caml_parser_env ->
    let _1 = (Parsing.peek_val __caml_parser_env 3 : 'binderAttributes) in
    let _3 = (Parsing.peek_val __caml_parser_env 1 : string) in
    Obj.repr(
# 1643 "parse.mly"
    (let (attrs, _1, op, _3) = (_1, (), _3, ()) in
let x =
  let id =     ( mk_ident (op, rhs parseState 1) ) in
      ( mk_ident (compile_op' (string_of_id id) (range_of_id id), range_of_id id) )
in
                               ( (None, attrs), x ))
# 9399 "parse.ml"
               : 'aqualifiedWithAttrs_lidentOrOperator_))
; (fun __caml_parser_env ->
    let _1 = (Parsing.peek_val __caml_parser_env 3 : 'binderAttributes) in
    let _3 = (Parsing.peek_val __caml_parser_env 1 : string) in
    Obj.repr(
# 1650 "parse.mly"
    (let (attrs, _1, op, _3) = (_1, (), _3, ()) in
let x =
  let id =
    let op =               ( mk_ident ("and" ^ op, rhs parseState 1) ) in
                        (op)
  in
      ( mk_ident (compile_op' (string_of_id id) (range_of_id id), range_of_id id) )
in
                               ( (None, attrs), x ))
# 9415 "parse.ml"
               : 'aqualifiedWithAttrs_lidentOrOperator_))
; (fun __caml_parser_env ->
    let _1 = (Parsing.peek_val __caml_parser_env 3 : 'binderAttributes) in
    let _3 = (Parsing.peek_val __caml_parser_env 1 : string) in
    Obj.repr(
# 1660 "parse.mly"
    (let (attrs, _1, op, _3) = (_1, (), _3, ()) in
let x =
  let id =
    let op =               ( mk_ident ("let" ^ op, rhs parseState 1) ) in
                        (op)
  in
      ( mk_ident (compile_op' (string_of_id id) (range_of_id id), range_of_id id) )
in
                               ( (None, attrs), x ))
# 9431 "parse.ml"
               : 'aqualifiedWithAttrs_lidentOrOperator_))
; (fun __caml_parser_env ->
    let _1 = (Parsing.peek_val __caml_parser_env 0 : FStar_Ident.ident) in
    Obj.repr(
# 1670 "parse.mly"
    (let id = _1 in
let x =               ( id ) in
        ( (None, []), x ))
# 9440 "parse.ml"
               : 'aqualifiedWithAttrs_lidentOrOperator_))
; (fun __caml_parser_env ->
    let _2 = (Parsing.peek_val __caml_parser_env 1 : string) in
    Obj.repr(
# 1674 "parse.mly"
    (let (_1, op, _3) = ((), _2, ()) in
let x =
  let id =     ( mk_ident (op, rhs parseState 1) ) in
      ( mk_ident (compile_op' (string_of_id id) (range_of_id id), range_of_id id) )
in
        ( (None, []), x ))
# 9452 "parse.ml"
               : 'aqualifiedWithAttrs_lidentOrOperator_))
; (fun __caml_parser_env ->
    let _2 = (Parsing.peek_val __caml_parser_env 1 : 'binop_name) in
    Obj.repr(
# 1681 "parse.mly"
    (let (_1, op, _3) = ((), _2, ()) in
let x =
  let id =     ( op ) in
      ( mk_ident (compile_op' (string_of_id id) (range_of_id id), range_of_id id) )
in
        ( (None, []), x ))
# 9464 "parse.ml"
               : 'aqualifiedWithAttrs_lidentOrOperator_))
; (fun __caml_parser_env ->
    let _2 = (Parsing.peek_val __caml_parser_env 1 : string) in
    Obj.repr(
# 1688 "parse.mly"
    (let (_1, op, _3) = ((), _2, ()) in
let x =
  let id =     ( mk_ident (op, rhs parseState 1) ) in
      ( mk_ident (compile_op' (string_of_id id) (range_of_id id), range_of_id id) )
in
        ( (None, []), x ))
# 9476 "parse.ml"
               : 'aqualifiedWithAttrs_lidentOrOperator_))
; (fun __caml_parser_env ->
    let _2 = (Parsing.peek_val __caml_parser_env 1 : string) in
    Obj.repr(
# 1695 "parse.mly"
    (let (_1, op, _3) = ((), _2, ()) in
let x =
  let id =
    let op =               ( mk_ident ("and" ^ op, rhs parseState 1) ) in
                        (op)
  in
      ( mk_ident (compile_op' (string_of_id id) (range_of_id id), range_of_id id) )
in
        ( (None, []), x ))
# 9491 "parse.ml"
               : 'aqualifiedWithAttrs_lidentOrOperator_))
; (fun __caml_parser_env ->
    let _2 = (Parsing.peek_val __caml_parser_env 1 : string) in
    Obj.repr(
# 1705 "parse.mly"
    (let (_1, op, _3) = ((), _2, ()) in
let x =
  let id =
    let op =               ( mk_ident ("let" ^ op, rhs parseState 1) ) in
                        (op)
  in
      ( mk_ident (compile_op' (string_of_id id) (range_of_id id), range_of_id id) )
in
        ( (None, []), x ))
# 9506 "parse.ml"
               : 'aqualifiedWithAttrs_lidentOrOperator_))
; (fun __caml_parser_env ->
    let _1 = (Parsing.peek_val __caml_parser_env 2 : 'aqual) in
    let _2 = (Parsing.peek_val __caml_parser_env 1 : 'binderAttributes) in
    let _3 = (Parsing.peek_val __caml_parser_env 0 : 'lidentOrUnderscore) in
    Obj.repr(
# 1717 "parse.mly"
    (let (aq, attrs, x) = (_1, _2, _3) in
                                        ( (Some aq, attrs), x ))
# 9516 "parse.ml"
               : 'aqualifiedWithAttrs_lidentOrUnderscore_))
; (fun __caml_parser_env ->
    let _1 = (Parsing.peek_val __caml_parser_env 1 : 'aqual) in
    let _2 = (Parsing.peek_val __caml_parser_env 0 : 'lidentOrUnderscore) in
    Obj.repr(
# 1720 "parse.mly"
    (let (aq, x) = (_1, _2) in
                 ( (Some aq, []), x ))
# 9525 "parse.ml"
               : 'aqualifiedWithAttrs_lidentOrUnderscore_))
; (fun __caml_parser_env ->
    let _1 = (Parsing.peek_val __caml_parser_env 1 : 'binderAttributes) in
    let _2 = (Parsing.peek_val __caml_parser_env 0 : 'lidentOrUnderscore) in
    Obj.repr(
# 1723 "parse.mly"
    (let (attrs, x) = (_1, _2) in
                               ( (None, attrs), x ))
# 9534 "parse.ml"
               : 'aqualifiedWithAttrs_lidentOrUnderscore_))
; (fun __caml_parser_env ->
    let _1 = (Parsing.peek_val __caml_parser_env 0 : 'lidentOrUnderscore) in
    Obj.repr(
# 1726 "parse.mly"
    (let x = _1 in
        ( (None, []), x ))
# 9542 "parse.ml"
               : 'aqualifiedWithAttrs_lidentOrUnderscore_))
; (fun __caml_parser_env ->
    let _1 = (Parsing.peek_val __caml_parser_env 0 : 'path_lident_) in
    Obj.repr(
# 1731 "parse.mly"
    (let ids = _1 in
                     ( lid_of_ids ids ))
# 9550 "parse.ml"
               : 'qlident))
; (fun __caml_parser_env ->
    let _1 = (Parsing.peek_val __caml_parser_env 0 : 'path_uident_) in
    Obj.repr(
# 1736 "parse.mly"
    (let ids = _1 in
                     ( lid_of_ids ids ))
# 9558 "parse.ml"
               : 'quident))
; (fun __caml_parser_env ->
    let _1 = (Parsing.peek_val __caml_parser_env 0 : FStar_Ident.ident) in
    Obj.repr(
# 1741 "parse.mly"
    (let id = _1 in
          ( [id] ))
# 9566 "parse.ml"
               : 'path_lident_))
; (fun __caml_parser_env ->
    let _1 = (Parsing.peek_val __caml_parser_env 2 : 'uident) in
    let _3 = (Parsing.peek_val __caml_parser_env 0 : 'path_lident_) in
    Obj.repr(
# 1744 "parse.mly"
    (let (uid, _2, p) = (_1, (), _3) in
                              ( uid::p ))
# 9575 "parse.ml"
               : 'path_lident_))
; (fun __caml_parser_env ->
    let _1 = (Parsing.peek_val __caml_parser_env 0 : 'uident) in
    Obj.repr(
# 1749 "parse.mly"
    (let id = _1 in
          ( [id] ))
# 9583 "parse.ml"
               : 'path_uident_))
; (fun __caml_parser_env ->
    let _1 = (Parsing.peek_val __caml_parser_env 2 : 'uident) in
    let _3 = (Parsing.peek_val __caml_parser_env 0 : 'path_uident_) in
    Obj.repr(
# 1752 "parse.mly"
    (let (uid, _2, p) = (_1, (), _3) in
                              ( uid::p ))
# 9592 "parse.ml"
               : 'path_uident_))
; (fun __caml_parser_env ->
    let _1 = (Parsing.peek_val __caml_parser_env 0 : FStar_Ident.ident) in
    Obj.repr(
# 1757 "parse.mly"
    (let x = _1 in
             ( x ))
# 9600 "parse.ml"
               : 'ident))
; (fun __caml_parser_env ->
    let _1 = (Parsing.peek_val __caml_parser_env 0 : 'uident) in
    Obj.repr(
# 1760 "parse.mly"
    (let x = _1 in
              ( x ))
# 9608 "parse.ml"
               : 'ident))
; (fun __caml_parser_env ->
    let _1 = (Parsing.peek_val __caml_parser_env 0 : 'qlident) in
    Obj.repr(
# 1765 "parse.mly"
    (let qid = _1 in
                ( qid ))
# 9616 "parse.ml"
               : 'qlidentOrOperator))
; (fun __caml_parser_env ->
    let _2 = (Parsing.peek_val __caml_parser_env 1 : string) in
    Obj.repr(
# 1768 "parse.mly"
    (let (_1, op, _3) = ((), _2, ()) in
let id =     ( mk_ident (op, rhs parseState 1) ) in
    ( lid_of_ns_and_id [] (id_of_text (compile_op' (string_of_id id) (range_of_id id))) ))
# 9625 "parse.ml"
               : 'qlidentOrOperator))
; (fun __caml_parser_env ->
    let _2 = (Parsing.peek_val __caml_parser_env 1 : 'binop_name) in
    Obj.repr(
# 1772 "parse.mly"
    (let (_1, op, _3) = ((), _2, ()) in
let id =     ( op ) in
    ( lid_of_ns_and_id [] (id_of_text (compile_op' (string_of_id id) (range_of_id id))) ))
# 9634 "parse.ml"
               : 'qlidentOrOperator))
; (fun __caml_parser_env ->
    let _2 = (Parsing.peek_val __caml_parser_env 1 : string) in
    Obj.repr(
# 1776 "parse.mly"
    (let (_1, op, _3) = ((), _2, ()) in
let id =     ( mk_ident (op, rhs parseState 1) ) in
    ( lid_of_ns_and_id [] (id_of_text (compile_op' (string_of_id id) (range_of_id id))) ))
# 9643 "parse.ml"
               : 'qlidentOrOperator))
; (fun __caml_parser_env ->
    let _2 = (Parsing.peek_val __caml_parser_env 1 : string) in
    Obj.repr(
# 1780 "parse.mly"
    (let (_1, op, _3) = ((), _2, ()) in
let id =
  let op =               ( mk_ident ("and" ^ op, rhs parseState 1) ) in
                      (op)
in
    ( lid_of_ns_and_id [] (id_of_text (compile_op' (string_of_id id) (range_of_id id))) ))
# 9655 "parse.ml"
               : 'qlidentOrOperator))
; (fun __caml_parser_env ->
    let _2 = (Parsing.peek_val __caml_parser_env 1 : string) in
    Obj.repr(
# 1787 "parse.mly"
    (let (_1, op, _3) = ((), _2, ()) in
let id =
  let op =               ( mk_ident ("let" ^ op, rhs parseState 1) ) in
                      (op)
in
    ( lid_of_ns_and_id [] (id_of_text (compile_op' (string_of_id id) (range_of_id id))) ))
# 9667 "parse.ml"
               : 'qlidentOrOperator))
; (fun __caml_parser_env ->
    Obj.repr(
# 1796 "parse.mly"
    (let _1 = () in
          (None))
# 9674 "parse.ml"
               : 'matchMaybeOp))
; (fun __caml_parser_env ->
    let _1 = (Parsing.peek_val __caml_parser_env 0 : string) in
    Obj.repr(
# 1799 "parse.mly"
    (let op = _1 in
                ( Some (mk_ident ("let" ^ op, rhs parseState 1)) ))
# 9682 "parse.ml"
               : 'matchMaybeOp))
; (fun __caml_parser_env ->
    Obj.repr(
# 1804 "parse.mly"
    (let _1 = () in
       (None))
# 9689 "parse.ml"
               : 'ifMaybeOp))
; (fun __caml_parser_env ->
    let _1 = (Parsing.peek_val __caml_parser_env 0 : string) in
    Obj.repr(
# 1807 "parse.mly"
    (let op = _1 in
             ( Some (mk_ident ("let" ^ op, rhs parseState 1)) ))
# 9697 "parse.ml"
               : 'ifMaybeOp))
; (fun __caml_parser_env ->
    let _1 = (Parsing.peek_val __caml_parser_env 0 : string) in
    Obj.repr(
# 1812 "parse.mly"
    (let id = _1 in
             ( mk_ident(id, rhs parseState 1)))
# 9705 "parse.ml"
               : 'lidentOrUnderscore))
; (fun __caml_parser_env ->
    Obj.repr(
# 1815 "parse.mly"
    (let _1 = () in
               ( gen (rhs parseState 1) ))
# 9712 "parse.ml"
               : 'lidentOrUnderscore))
; (fun __caml_parser_env ->
    let _1 = (Parsing.peek_val __caml_parser_env 0 : string) in
    Obj.repr(
# 1820 "parse.mly"
    (let id = _1 in
             ( mk_ident(id, rhs parseState 1)))
# 9720 "parse.ml"
               : FStar_Ident.ident))
; (fun __caml_parser_env ->
    let _1 = (Parsing.peek_val __caml_parser_env 0 : string) in
    Obj.repr(
# 1825 "parse.mly"
    (let id = _1 in
            ( mk_ident(id, rhs parseState 1) ))
# 9728 "parse.ml"
               : 'uident))
; (fun __caml_parser_env ->
    let _1 = (Parsing.peek_val __caml_parser_env 0 : string) in
    Obj.repr(
# 1830 "parse.mly"
    (let tv = _1 in
            ( mk_ident(tv, rhs parseState 1) ))
# 9736 "parse.ml"
               : 'tvar))
; (fun __caml_parser_env ->
    let _1 = (Parsing.peek_val __caml_parser_env 0 : 'atomicTerm) in
    Obj.repr(
# 1835 "parse.mly"
    (let t = _1 in
                ( mk_term (Abs ([mk_pattern (PatWild (None, [])) (rhs parseState 3)], t)) (rhs parseState 3) Expr ))
# 9744 "parse.ml"
               : 'thunk_atomicTerm_))
; (fun __caml_parser_env ->
    let _1 = (Parsing.peek_val __caml_parser_env 0 : 'tmNoEq) in
    Obj.repr(
# 1840 "parse.mly"
    (let t = _1 in
                ( mk_term (Abs ([mk_pattern (PatWild (None, [])) (rhs parseState 3)], t)) (rhs parseState 3) Expr ))
# 9752 "parse.ml"
               : 'thunk_tmNoEq_))
; (fun __caml_parser_env ->
    let _1 = (Parsing.peek_val __caml_parser_env 0 : 'typ) in
    Obj.repr(
# 1845 "parse.mly"
    (let t = _1 in
                ( mk_term (Abs ([mk_pattern (PatWild (None, [])) (rhs parseState 3)], t)) (rhs parseState 3) Expr ))
# 9760 "parse.ml"
               : 'thunk_typ_))
; (fun __caml_parser_env ->
    let _1 = (Parsing.peek_val __caml_parser_env 0 : 'typ) in
    Obj.repr(
# 1850 "parse.mly"
    (let t = _1 in
     ( let u = mk_term (Const Const_unit) (rhs parseState 3) Expr in
       let t = mk_term (Seq (u, t)) (rhs parseState 3) Expr in
       mk_term (Abs ([mk_pattern (PatWild (None, [])) (rhs parseState 3)], t)) (rhs parseState 3) Expr ))
# 9770 "parse.ml"
               : 'thunk2_typ_))
; (fun __caml_parser_env ->
    let _2 = (Parsing.peek_val __caml_parser_env 1 : 'tmArrow_tmNoEq_) in
    let _3 = (Parsing.peek_val __caml_parser_env 0 : 'option___anonymous_5_) in
    Obj.repr(
# 1857 "parse.mly"
    (let (_1, t, tacopt) = ((), _2, _3) in
                                                                                ( t, tacopt ))
# 9779 "parse.ml"
               : 'ascribeTyp))
; (fun __caml_parser_env ->
    let _2 = (Parsing.peek_val __caml_parser_env 0 : 'kind) in
    Obj.repr(
# 1862 "parse.mly"
    (let (_1, k) = ((), _2) in
                  ( k ))
# 9787 "parse.ml"
               : 'ascribeKind))
; (fun __caml_parser_env ->
    let _1 = (Parsing.peek_val __caml_parser_env 0 : 'tmArrow_tmNoEq_) in
    Obj.repr(
# 1867 "parse.mly"
    (let t = _1 in
                      ( {t with level=Kind} ))
# 9795 "parse.ml"
               : 'kind))
; (fun __caml_parser_env ->
    let _1 = (Parsing.peek_val __caml_parser_env 0 : 'noSeqTerm) in
    Obj.repr(
# 1872 "parse.mly"
    (let e = _1 in
      ( e ))
# 9803 "parse.ml"
               : FStar_Parser_AST.term))
; (fun __caml_parser_env ->
    let _1 = (Parsing.peek_val __caml_parser_env 2 : 'noSeqTerm) in
    let _3 = (Parsing.peek_val __caml_parser_env 0 : FStar_Parser_AST.term) in
    Obj.repr(
# 1875 "parse.mly"
    (let (e1, _2, e2) = (_1, (), _3) in
      ( mk_term (Seq(e1, e2)) (rhs2 parseState 1 3) Expr ))
# 9812 "parse.ml"
               : FStar_Parser_AST.term))
; (fun __caml_parser_env ->
    let _1 = (Parsing.peek_val __caml_parser_env 2 : 'noSeqTerm) in
    let _2 = (Parsing.peek_val __caml_parser_env 1 : string option) in
    let _3 = (Parsing.peek_val __caml_parser_env 0 : FStar_Parser_AST.term) in
    Obj.repr(
# 1878 "parse.mly"
    (let (e1, op, e2) = (_1, _2, _3) in
      ( let t = match op with
	  | Some op ->
	     let op = mk_ident ("let" ^ op, rhs parseState 2) in
	     let pat = mk_pattern (PatWild(None, [])) (rhs parseState 2) in
	     LetOperator ([(op, pat, e1)], e2)
	  | None   ->
             log_issue (lhs parseState) (Warning_DeprecatedLightDoNotation, do_notation_deprecation_warning);
	     Bind(gen (rhs parseState 2), e1, e2)
        in mk_term t (rhs2 parseState 1 3) Expr
      ))
# 9831 "parse.ml"
               : FStar_Parser_AST.term))
; (fun __caml_parser_env ->
    let _1 = (Parsing.peek_val __caml_parser_env 4 : 'lidentOrUnderscore) in
    let _3 = (Parsing.peek_val __caml_parser_env 2 : 'noSeqTerm) in
    let _5 = (Parsing.peek_val __caml_parser_env 0 : FStar_Parser_AST.term) in
    Obj.repr(
# 1890 "parse.mly"
    (let (x, _2, e1, _4, e2) = (_1, (), _3, (), _5) in
    ( log_issue (lhs parseState) (Warning_DeprecatedLightDoNotation, do_notation_deprecation_warning);
      mk_term (Bind(x, e1, e2)) (rhs2 parseState 1 5) Expr ))
# 9842 "parse.ml"
               : FStar_Parser_AST.term))
; (fun __caml_parser_env ->
    let _1 = (Parsing.peek_val __caml_parser_env 2 : 'option___anonymous_6_) in
    let _3 = (Parsing.peek_val __caml_parser_env 0 : 'tmIff) in
    Obj.repr(
# 1896 "parse.mly"
    (let (as_opt, _2, t) = (_1, (), _3) in
                                                   (as_opt,t,false))
# 9851 "parse.ml"
               : 'match_returning))
; (fun __caml_parser_env ->
    let _1 = (Parsing.peek_val __caml_parser_env 2 : 'option___anonymous_7_) in
    let _3 = (Parsing.peek_val __caml_parser_env 0 : 'tmIff) in
    Obj.repr(
# 1899 "parse.mly"
    (let (as_opt, _2, t) = (_1, (), _3) in
                                                      (as_opt,t,true))
# 9860 "parse.ml"
               : 'match_returning))
; (fun __caml_parser_env ->
    let _1 = (Parsing.peek_val __caml_parser_env 0 : 'typ) in
    Obj.repr(
# 1904 "parse.mly"
    (let t = _1 in
           ( t ))
# 9868 "parse.ml"
               : 'noSeqTerm))
; (fun __caml_parser_env ->
    let _1 = (Parsing.peek_val __caml_parser_env 3 : 'tmIff) in
    let _3 = (Parsing.peek_val __caml_parser_env 1 : 'tmIff) in
    let _4 = (Parsing.peek_val __caml_parser_env 0 : 'option___anonymous_8_) in
    Obj.repr(
# 1907 "parse.mly"
    (let (e, _2, t, tactic_opt) = (_1, (), _3, _4) in
      ( mk_term (Ascribed(e,{t with level=Expr},tactic_opt,false)) (rhs2 parseState 1 4) Expr ))
# 9878 "parse.ml"
               : 'noSeqTerm))
; (fun __caml_parser_env ->
    let _1 = (Parsing.peek_val __caml_parser_env 3 : 'tmIff) in
    let _3 = (Parsing.peek_val __caml_parser_env 1 : 'tmIff) in
    let _4 = (Parsing.peek_val __caml_parser_env 0 : 'option___anonymous_9_) in
    Obj.repr(
# 1910 "parse.mly"
    (let (e, _2, t, tactic_opt) = (_1, (), _3, _4) in
      (
        log_issue (lhs parseState)
	          (Warning_BleedingEdge_Feature,
		   "Equality type ascriptions is an experimental feature subject to redesign in the future");
        mk_term (Ascribed(e,{t with level=Expr},tactic_opt,true)) (rhs2 parseState 1 4) Expr
      ))
# 9893 "parse.ml"
               : 'noSeqTerm))
; (fun __caml_parser_env ->
    let _1 = (Parsing.peek_val __caml_parser_env 5 : 'atomicTermNotQUident) in
    let _3 = (Parsing.peek_val __caml_parser_env 3 : FStar_Parser_AST.term) in
    let _6 = (Parsing.peek_val __caml_parser_env 0 : 'noSeqTerm) in
    Obj.repr(
# 1918 "parse.mly"
    (let (e1, _1, e, _3_inlined1, _3, e3) = (_1, (), _3, (), (), _6) in
let op_expr =                              ( mk_ident (".()", rhs parseState 1), e, rhs2 parseState 1 3 ) in
      (
        let (op, e2, _) = op_expr in
        let opid = mk_ident (string_of_id op ^ "<-", range_of_id op) in
        mk_term (Op(opid, [ e1; e2; e3 ])) (rhs2 parseState 1 4) Expr
      ))
# 9908 "parse.ml"
               : 'noSeqTerm))
; (fun __caml_parser_env ->
    let _1 = (Parsing.peek_val __caml_parser_env 5 : 'atomicTermNotQUident) in
    let _3 = (Parsing.peek_val __caml_parser_env 3 : FStar_Parser_AST.term) in
    let _6 = (Parsing.peek_val __caml_parser_env 0 : 'noSeqTerm) in
    Obj.repr(
# 1926 "parse.mly"
    (let (e1, _1, e, _3_inlined1, _3, e3) = (_1, (), _3, (), (), _6) in
let op_expr =                              ( mk_ident (".[]", rhs parseState 1), e, rhs2 parseState 1 3 ) in
      (
        let (op, e2, _) = op_expr in
        let opid = mk_ident (string_of_id op ^ "<-", range_of_id op) in
        mk_term (Op(opid, [ e1; e2; e3 ])) (rhs2 parseState 1 4) Expr
      ))
# 9923 "parse.ml"
               : 'noSeqTerm))
; (fun __caml_parser_env ->
    let _1 = (Parsing.peek_val __caml_parser_env 5 : 'atomicTermNotQUident) in
    let _3 = (Parsing.peek_val __caml_parser_env 3 : FStar_Parser_AST.term) in
    let _6 = (Parsing.peek_val __caml_parser_env 0 : 'noSeqTerm) in
    Obj.repr(
# 1934 "parse.mly"
    (let (e1, _1, e, _3_inlined1, _3, e3) = (_1, (), _3, (), (), _6) in
let op_expr =                                      ( mk_ident (".[||]", rhs parseState 1), e, rhs2 parseState 1 3 ) in
      (
        let (op, e2, _) = op_expr in
        let opid = mk_ident (string_of_id op ^ "<-", range_of_id op) in
        mk_term (Op(opid, [ e1; e2; e3 ])) (rhs2 parseState 1 4) Expr
      ))
# 9938 "parse.ml"
               : 'noSeqTerm))
; (fun __caml_parser_env ->
    let _1 = (Parsing.peek_val __caml_parser_env 5 : 'atomicTermNotQUident) in
    let _3 = (Parsing.peek_val __caml_parser_env 3 : FStar_Parser_AST.term) in
    let _6 = (Parsing.peek_val __caml_parser_env 0 : 'noSeqTerm) in
    Obj.repr(
# 1942 "parse.mly"
    (let (e1, _1, e, _3_inlined1, _3, e3) = (_1, (), _3, (), (), _6) in
let op_expr =                                                 ( mk_ident (".(||)", rhs parseState 1), e, rhs2 parseState 1 3 ) in
      (
        let (op, e2, _) = op_expr in
        let opid = mk_ident (string_of_id op ^ "<-", range_of_id op) in
        mk_term (Op(opid, [ e1; e2; e3 ])) (rhs2 parseState 1 4) Expr
      ))
# 9953 "parse.ml"
               : 'noSeqTerm))
; (fun __caml_parser_env ->
    let _2 = (Parsing.peek_val __caml_parser_env 0 : 'typ) in
    Obj.repr(
# 1950 "parse.mly"
    (let (_1, t) = ((), _2) in
      ( mk_term (Requires(t, None)) (rhs2 parseState 1 2) Type_level ))
# 9961 "parse.ml"
               : 'noSeqTerm))
; (fun __caml_parser_env ->
    let _2 = (Parsing.peek_val __caml_parser_env 0 : 'typ) in
    Obj.repr(
# 1953 "parse.mly"
    (let (_1, t) = ((), _2) in
      ( mk_term (Ensures(t, None)) (rhs2 parseState 1 2) Type_level ))
# 9969 "parse.ml"
               : 'noSeqTerm))
; (fun __caml_parser_env ->
    let _2 = (Parsing.peek_val __caml_parser_env 0 : 'typ) in
    Obj.repr(
# 1956 "parse.mly"
    (let (_1, t) = ((), _2) in
      ( mk_term (Decreases (t, None)) (rhs2 parseState 1 2) Type_level ))
# 9977 "parse.ml"
               : 'noSeqTerm))
; (fun __caml_parser_env ->
    let _3 = (Parsing.peek_val __caml_parser_env 1 : 'noSeqTerm) in
    Obj.repr(
# 1959 "parse.mly"
    (let (_1, _2, t, _4) = ((), (), _3, ()) in
      ( match t.tm with
        | App (t1, t2, _) ->
	  let ot = mk_term (WFOrder (t1, t2)) (rhs2 parseState 3 3) Type_level in
	  mk_term (Decreases (ot, None)) (rhs2 parseState 1 4) Type_level
	| _ ->
	  raise_error (Fatal_SyntaxError,
	    "Syntax error: To use well-founded relations, write e1 e2") (rhs parseState 3) ))
# 9991 "parse.ml"
               : 'noSeqTerm))
; (fun __caml_parser_env ->
    let _2 = (Parsing.peek_val __caml_parser_env 0 : 'nonempty_list_atomicTerm_) in
    Obj.repr(
# 1968 "parse.mly"
    (let (_1, es) = ((), _2) in
      ( mk_term (Attributes es) (rhs2 parseState 1 2) Type_level ))
# 9999 "parse.ml"
               : 'noSeqTerm))
; (fun __caml_parser_env ->
    let _1 = (Parsing.peek_val __caml_parser_env 6 : 'ifMaybeOp) in
    let _2 = (Parsing.peek_val __caml_parser_env 5 : 'noSeqTerm) in
    let _3 = (Parsing.peek_val __caml_parser_env 4 : 'option_match_returning_) in
    let _5 = (Parsing.peek_val __caml_parser_env 2 : 'noSeqTerm) in
    let _7 = (Parsing.peek_val __caml_parser_env 0 : 'noSeqTerm) in
    Obj.repr(
# 1971 "parse.mly"
    (let (op, e1, ret_opt, _4, e2, _6, e3) = (_1, _2, _3, (), _5, (), _7) in
      ( mk_term (If(e1, op, ret_opt, e2, e3)) (rhs2 parseState 1 7) Expr ))
# 10011 "parse.ml"
               : 'noSeqTerm))
; (fun __caml_parser_env ->
    let _1 = (Parsing.peek_val __caml_parser_env 4 : 'ifMaybeOp) in
    let _2 = (Parsing.peek_val __caml_parser_env 3 : 'noSeqTerm) in
    let _3 = (Parsing.peek_val __caml_parser_env 2 : 'option_match_returning_) in
    let _5 = (Parsing.peek_val __caml_parser_env 0 : 'noSeqTerm) in
    Obj.repr(
# 1974 "parse.mly"
    (let (op, e1, ret_opt, _4, e2) = (_1, _2, _3, (), _5) in
      (
        let e3 = mk_term (Const Const_unit) (rhs2 parseState 1 5) Expr in
        mk_term (If(e1, op, ret_opt, e2, e3)) (rhs2 parseState 1 5) Expr
      ))
# 10025 "parse.ml"
               : 'noSeqTerm))
; (fun __caml_parser_env ->
    let _2 = (Parsing.peek_val __caml_parser_env 2 : FStar_Parser_AST.term) in
    let _4 = (Parsing.peek_val __caml_parser_env 0 : 'reverse_left_flexible_nonempty_list_BAR_patternBranch_) in
    Obj.repr(
# 1980 "parse.mly"
    (let (_1, e1, _3, xs) = ((), _2, (), _4) in
let pbs =    ( List.rev xs ) in
      (
         let branches = focusBranches (pbs) (rhs2 parseState 1 4) in
         mk_term (TryWith(e1, branches)) (rhs2 parseState 1 4) Expr
      ))
# 10038 "parse.ml"
               : 'noSeqTerm))
; (fun __caml_parser_env ->
    let _1 = (Parsing.peek_val __caml_parser_env 4 : 'matchMaybeOp) in
    let _2 = (Parsing.peek_val __caml_parser_env 3 : FStar_Parser_AST.term) in
    let _3 = (Parsing.peek_val __caml_parser_env 2 : 'option_match_returning_) in
    let _5 = (Parsing.peek_val __caml_parser_env 0 : 'reverse_left_flexible_list_BAR___anonymous_10_) in
    Obj.repr(
# 1987 "parse.mly"
    (let (op, e, ret_opt, _4, xs) = (_1, _2, _3, (), _5) in
let pbs =    ( List.rev xs ) in
      (
        let branches = focusBranches pbs (rhs2 parseState 1 5) in
        mk_term (Match(e, op, ret_opt, branches)) (rhs2 parseState 1 5) Expr
      ))
# 10053 "parse.ml"
               : 'noSeqTerm))
; (fun __caml_parser_env ->
    let _1 = (Parsing.peek_val __caml_parser_env 4 : bool) in
    let _3 = (Parsing.peek_val __caml_parser_env 2 : FStar_Parser_AST.term) in
    let _5 = (Parsing.peek_val __caml_parser_env 0 : FStar_Parser_AST.term) in
    Obj.repr(
# 1994 "parse.mly"
    (let (_1, _2, t, _4, e) = (_1, (), _3, (), _5) in
      (
            match t.tm with
            | Ascribed(r, rty, None, _) ->
              mk_term (LetOpenRecord(r, rty, e)) (rhs2 parseState 1 5) Expr

            | Name uid ->
              mk_term (LetOpen(uid, e)) (rhs2 parseState 1 5) Expr

            | _ ->
              raise_error (Fatal_SyntaxError, "Syntax error: local opens expects either opening\n\
                                               a module or namespace using `let open T in e`\n\
                                               or, a record type with `let open e <: t in e'`")
                          (rhs parseState 3)
      ))
# 10076 "parse.ml"
               : 'noSeqTerm))
; (fun __caml_parser_env ->
    let _1 = (Parsing.peek_val __caml_parser_env 5 : bool) in
    let _2 = (Parsing.peek_val __caml_parser_env 4 : 'letqualifier) in
    let _3 = (Parsing.peek_val __caml_parser_env 3 : 'letbinding) in
    let _4 = (Parsing.peek_val __caml_parser_env 2 : 'list_attr_letbinding_) in
    let _6 = (Parsing.peek_val __caml_parser_env 0 : FStar_Parser_AST.term) in
    Obj.repr(
# 2010 "parse.mly"
    (let (_2, q, lb, lbs, _6, e) = (_1, _2, _3, _4, (), _6) in
let attrs =     ( None ) in
      (
        let lbs = (attrs, lb)::lbs in
        let lbs = focusAttrLetBindings lbs (rhs2 parseState 2 3) in
        mk_term (Let(q, lbs, e)) (rhs2 parseState 1 6) Expr
      ))
# 10093 "parse.ml"
               : 'noSeqTerm))
; (fun __caml_parser_env ->
    let _1 = (Parsing.peek_val __caml_parser_env 6 : 'attribute) in
    let _2 = (Parsing.peek_val __caml_parser_env 5 : bool) in
    let _3 = (Parsing.peek_val __caml_parser_env 4 : 'letqualifier) in
    let _4 = (Parsing.peek_val __caml_parser_env 3 : 'letbinding) in
    let _5 = (Parsing.peek_val __caml_parser_env 2 : 'list_attr_letbinding_) in
    let _7 = (Parsing.peek_val __caml_parser_env 0 : FStar_Parser_AST.term) in
    Obj.repr(
# 2018 "parse.mly"
    (let (x, _2, q, lb, lbs, _6, e) = (_1, _2, _3, _4, _5, (), _7) in
let attrs =     ( Some x ) in
      (
        let lbs = (attrs, lb)::lbs in
        let lbs = focusAttrLetBindings lbs (rhs2 parseState 2 3) in
        mk_term (Let(q, lbs, e)) (rhs2 parseState 1 6) Expr
      ))
# 10111 "parse.ml"
               : 'noSeqTerm))
; (fun __caml_parser_env ->
    let _1 = (Parsing.peek_val __caml_parser_env 4 : string) in
    let _2 = (Parsing.peek_val __caml_parser_env 3 : 'letoperatorbinding) in
    let _3 = (Parsing.peek_val __caml_parser_env 2 : 'list___anonymous_11_) in
    let _5 = (Parsing.peek_val __caml_parser_env 0 : FStar_Parser_AST.term) in
    Obj.repr(
# 2026 "parse.mly"
    (let (op, b, lbs, _4, e) = (_1, _2, _3, (), _5) in
let op =               ( mk_ident ("let" ^ op, rhs parseState 1) ) in
    ( let lbs = (op, b)::lbs in
      mk_term (LetOperator ( List.map (fun (op, (pat, tm)) -> (op, pat, tm)) lbs
			   , e)) (rhs2 parseState 1 5) Expr
    ))
# 10126 "parse.ml"
               : 'noSeqTerm))
; (fun __caml_parser_env ->
    let _2 = (Parsing.peek_val __caml_parser_env 0 : 'reverse_left_flexible_nonempty_list_BAR_patternBranch_) in
    Obj.repr(
# 2033 "parse.mly"
    (let (_1, xs) = ((), _2) in
let pbs =    ( List.rev xs ) in
      (
        let branches = focusBranches pbs (rhs2 parseState 1 2) in
        mk_function branches (lhs parseState) (rhs2 parseState 1 2)
      ))
# 10138 "parse.ml"
               : 'noSeqTerm))
; (fun __caml_parser_env ->
    let _2 = (Parsing.peek_val __caml_parser_env 0 : 'atomicTerm) in
    Obj.repr(
# 2040 "parse.mly"
    (let (_1, e) = ((), _2) in
      ( let a = set_lid_range assume_lid (rhs parseState 1) in
        mkExplicitApp (mk_term (Var a) (rhs parseState 1) Expr) [e] (rhs2 parseState 1 2)
      ))
# 10148 "parse.ml"
               : 'noSeqTerm))
; (fun __caml_parser_env ->
    let _2 = (Parsing.peek_val __caml_parser_env 1 : 'atomicTerm) in
    let _3 = (Parsing.peek_val __caml_parser_env 0 : 'option___anonymous_12_) in
    Obj.repr(
# 2045 "parse.mly"
    (let (_1, e, tactic_opt) = ((), _2, _3) in
      (
        match tactic_opt with
        | None ->
          let a = set_lid_range assert_lid (rhs parseState 1) in
          mkExplicitApp (mk_term (Var a) (rhs parseState 1) Expr) [e] (rhs2 parseState 1 2)
        | Some tac ->
          let a = set_lid_range assert_by_tactic_lid (rhs parseState 1) in
          mkExplicitApp (mk_term (Var a) (rhs parseState 1) Expr) [e; tac] (rhs2 parseState 1 4)
      ))
# 10165 "parse.ml"
               : 'noSeqTerm))
; (fun __caml_parser_env ->
    let _3 = (Parsing.peek_val __caml_parser_env 0 : 'thunk_atomicTerm_) in
    Obj.repr(
# 2056 "parse.mly"
    (let (_1, _2, tactic) = ((), (), _3) in
     (
         let a = set_lid_range synth_lid (rhs parseState 1) in
         mkExplicitApp (mk_term (Var a) (rhs parseState 1) Expr) [tactic] (rhs2 parseState 1 2)
     ))
# 10176 "parse.ml"
               : 'noSeqTerm))
; (fun __caml_parser_env ->
    let _2 = (Parsing.peek_val __caml_parser_env 0 : 'atomicTerm) in
    Obj.repr(
# 2062 "parse.mly"
    (let (_1, tactic) = ((), _2) in
     (
         let a = set_lid_range synth_lid (rhs parseState 1) in
         mkExplicitApp (mk_term (Var a) (rhs parseState 1) Expr) [tactic] (rhs2 parseState 1 2)
     ))
# 10187 "parse.ml"
               : 'noSeqTerm))
; (fun __caml_parser_env ->
    let _2 = (Parsing.peek_val __caml_parser_env 5 : 'atomicTerm) in
    let _4 = (Parsing.peek_val __caml_parser_env 3 : 'noSeqTerm) in
    let _6 = (Parsing.peek_val __caml_parser_env 1 : 'list_calcStep_) in
    Obj.repr(
# 2068 "parse.mly"
    (let (_1, rel, _3, init, _5, steps, _7) = ((), _2, (), _4, (), _6, ()) in
     (
         mk_term (CalcProof (rel, init, steps)) (rhs2 parseState 1 7) Expr
     ))
# 10199 "parse.ml"
               : 'noSeqTerm))
; (fun __caml_parser_env ->
    let _3 = (Parsing.peek_val __caml_parser_env 4 : 'binders) in
    let _5 = (Parsing.peek_val __caml_parser_env 2 : 'noSeqTerm) in
    let _7 = (Parsing.peek_val __caml_parser_env 0 : 'noSeqTerm) in
    Obj.repr(
# 2073 "parse.mly"
    (let (_1, _2, bs, _4, p, _6, e) = ((), (), _3, (), _5, (), _7) in
     (
        mk_term (IntroForall(bs, p, e)) (rhs2 parseState 1 7) Expr
     ))
# 10211 "parse.ml"
               : 'noSeqTerm))
; (fun __caml_parser_env ->
    let _3 = (Parsing.peek_val __caml_parser_env 6 : 'binders) in
    let _5 = (Parsing.peek_val __caml_parser_env 4 : 'noSeqTerm) in
    let _7 = (Parsing.peek_val __caml_parser_env 2 : 'list_atomicTerm_) in
    let _9 = (Parsing.peek_val __caml_parser_env 0 : 'noSeqTerm) in
    Obj.repr(
# 2078 "parse.mly"
    (let (_1, _2, bs, _4, p, _6, vs, _8, e) = ((), (), _3, (), _5, (), _7, (), _9) in
     (
        if List.length bs <> List.length vs
        then raise_error (Fatal_SyntaxError, "Syntax error: expected instantiations for all binders") (rhs parseState 7)
        else mk_term (IntroExists(bs, p, vs, e)) (rhs2 parseState 1 9) Expr
     ))
# 10226 "parse.ml"
               : 'noSeqTerm))
; (fun __caml_parser_env ->
    let _2 = (Parsing.peek_val __caml_parser_env 6 : 'tmFormula) in
    let _4 = (Parsing.peek_val __caml_parser_env 4 : 'tmFormula) in
    let _6 = (Parsing.peek_val __caml_parser_env 2 : 'singleBinder) in
    let _8 = (Parsing.peek_val __caml_parser_env 0 : 'noSeqTerm) in
    Obj.repr(
# 2085 "parse.mly"
    (let (_1, p, _3, q, _5, y, _7, e) = ((), _2, (), _4, (), _6, (), _8) in
     (
        mk_term (IntroImplies(p, q, y, e)) (rhs2 parseState 1 8) Expr
     ))
# 10239 "parse.ml"
               : 'noSeqTerm))
; (fun __caml_parser_env ->
    let _2 = (Parsing.peek_val __caml_parser_env 5 : 'tmFormula) in
    let _4 = (Parsing.peek_val __caml_parser_env 3 : 'tmConjunction) in
    let _6 = (Parsing.peek_val __caml_parser_env 1 : string) in
    let _7 = (Parsing.peek_val __caml_parser_env 0 : 'noSeqTerm) in
    Obj.repr(
# 2090 "parse.mly"
    (let (_1, p, _3, q, _5, lr, e) = ((), _2, (), _4, (), _6, _7) in
     (
        let b =
            if lr = "Left" then true
            else if lr = "Right" then false
            else raise_error (Fatal_SyntaxError, "Syntax error: _intro_ \\/ expects either 'Left' or 'Right'") (rhs parseState 6)
        in
        mk_term (IntroOr(b, p, q, e))  (rhs2 parseState 1 7) Expr
     ))
# 10257 "parse.ml"
               : 'noSeqTerm))
; (fun __caml_parser_env ->
    let _2 = (Parsing.peek_val __caml_parser_env 6 : 'tmConjunction) in
    let _4 = (Parsing.peek_val __caml_parser_env 4 : 'tmTuple) in
    let _6 = (Parsing.peek_val __caml_parser_env 2 : 'noSeqTerm) in
    let _8 = (Parsing.peek_val __caml_parser_env 0 : 'noSeqTerm) in
    Obj.repr(
# 2100 "parse.mly"
    (let (_1, p, _3, q, _5, e1, _7, e2) = ((), _2, (), _4, (), _6, (), _8) in
     (
        mk_term (IntroAnd(p, q, e1, e2))  (rhs2 parseState 1 8) Expr
     ))
# 10270 "parse.ml"
               : 'noSeqTerm))
; (fun __caml_parser_env ->
    let _3 = (Parsing.peek_val __caml_parser_env 4 : 'binders) in
    let _5 = (Parsing.peek_val __caml_parser_env 2 : 'noSeqTerm) in
    let _7 = (Parsing.peek_val __caml_parser_env 0 : 'list_atomicTerm_) in
    Obj.repr(
# 2105 "parse.mly"
    (let (_1, _2, xs, _4, p, _6, vs) = ((), (), _3, (), _5, (), _7) in
     (
        mk_term (ElimForall(xs, p, vs)) (rhs2 parseState 1 7) Expr
     ))
# 10282 "parse.ml"
               : 'noSeqTerm))
; (fun __caml_parser_env ->
    let _3 = (Parsing.peek_val __caml_parser_env 8 : 'binders) in
    let _5 = (Parsing.peek_val __caml_parser_env 6 : 'noSeqTerm) in
    let _7 = (Parsing.peek_val __caml_parser_env 4 : 'noSeqTerm) in
    let _9 = (Parsing.peek_val __caml_parser_env 2 : 'singleBinder) in
    let _11 = (Parsing.peek_val __caml_parser_env 0 : 'noSeqTerm) in
    Obj.repr(
# 2110 "parse.mly"
    (let (_1, _2, bs, _4, p, _6, q, _8, y, _10, e) = ((), (), _3, (), _5, (), _7, (), _9, (), _11) in
     (
        mk_term (ElimExists(bs, p, q, y, e)) (rhs2 parseState 1 11) Expr
     ))
# 10296 "parse.ml"
               : 'noSeqTerm))
; (fun __caml_parser_env ->
    let _2 = (Parsing.peek_val __caml_parser_env 4 : 'tmFormula) in
    let _4 = (Parsing.peek_val __caml_parser_env 2 : 'tmFormula) in
    let _6 = (Parsing.peek_val __caml_parser_env 0 : 'noSeqTerm) in
    Obj.repr(
# 2115 "parse.mly"
    (let (_1, p, _3, q, _5, e) = ((), _2, (), _4, (), _6) in
     (
        mk_term (ElimImplies(p, q, e)) (rhs2 parseState 1 6) Expr
     ))
# 10308 "parse.ml"
               : 'noSeqTerm))
; (fun __caml_parser_env ->
    let _2 = (Parsing.peek_val __caml_parser_env 12 : 'tmFormula) in
    let _4 = (Parsing.peek_val __caml_parser_env 10 : 'tmConjunction) in
    let _6 = (Parsing.peek_val __caml_parser_env 8 : 'noSeqTerm) in
    let _8 = (Parsing.peek_val __caml_parser_env 6 : 'singleBinder) in
    let _10 = (Parsing.peek_val __caml_parser_env 4 : 'noSeqTerm) in
    let _12 = (Parsing.peek_val __caml_parser_env 2 : 'singleBinder) in
    let _14 = (Parsing.peek_val __caml_parser_env 0 : 'noSeqTerm) in
    Obj.repr(
# 2120 "parse.mly"
    (let (_1, p, _3, q, _5, r, _7, x, _9, e1, _11, y, _13, e2) = ((), _2, (), _4, (), _6, (), _8, (), _10, (), _12, (), _14) in
     (
        mk_term (ElimOr(p, q, r, x, e1, y, e2)) (rhs2 parseState 1 14) Expr
     ))
# 10324 "parse.ml"
               : 'noSeqTerm))
; (fun __caml_parser_env ->
    let _2 = (Parsing.peek_val __caml_parser_env 8 : 'tmConjunction) in
    let _4 = (Parsing.peek_val __caml_parser_env 6 : 'tmTuple) in
    let _6 = (Parsing.peek_val __caml_parser_env 4 : 'noSeqTerm) in
    let _8 = (Parsing.peek_val __caml_parser_env 2 : 'binders) in
    let _10 = (Parsing.peek_val __caml_parser_env 0 : 'noSeqTerm) in
    Obj.repr(
# 2125 "parse.mly"
    (let (_1, p, _3, q, _5, r, _7, xs, _9, e) = ((), _2, (), _4, (), _6, (), _8, (), _10) in
     (
        match xs with
        | [x;y] -> mk_term (ElimAnd(p, q, r, x, y, e)) (rhs2 parseState 1 10) Expr
     ))
# 10339 "parse.ml"
               : 'noSeqTerm))
; (fun __caml_parser_env ->
    let _1 = (Parsing.peek_val __caml_parser_env 0 : 'binders) in
    Obj.repr(
# 2133 "parse.mly"
    (let bs = _1 in
    (
       match bs with
       | [b] -> b
       | _ -> raise_error (Fatal_SyntaxError, "Syntax error: expected a single binder") (rhs parseState 1)
    ))
# 10351 "parse.ml"
               : 'singleBinder))
; (fun __caml_parser_env ->
    let _1 = (Parsing.peek_val __caml_parser_env 0 : 'binop_name) in
    Obj.repr(
# 2142 "parse.mly"
    (let i = _1 in
                 ( mk_term (Op (i, [])) (rhs parseState 1) Expr ))
# 10359 "parse.ml"
               : 'calcRel))
; (fun __caml_parser_env ->
    let _2 = (Parsing.peek_val __caml_parser_env 1 : 'qlident) in
    Obj.repr(
# 2145 "parse.mly"
    (let (_1, id, _3) = ((), _2, ()) in
                                 ( mk_term (Var id) (rhs2 parseState 2 4) Un ))
# 10367 "parse.ml"
               : 'calcRel))
; (fun __caml_parser_env ->
    let _1 = (Parsing.peek_val __caml_parser_env 0 : 'atomicTerm) in
    Obj.repr(
# 2148 "parse.mly"
    (let t = _1 in
                 ( t ))
# 10375 "parse.ml"
               : 'calcRel))
; (fun __caml_parser_env ->
    let _1 = (Parsing.peek_val __caml_parser_env 5 : 'calcRel) in
    let _3 = (Parsing.peek_val __caml_parser_env 3 : 'option_term_) in
    let _5 = (Parsing.peek_val __caml_parser_env 1 : 'noSeqTerm) in
    Obj.repr(
# 2153 "parse.mly"
    (let (rel, _2, justif, _4, next, _6) = (_1, (), _3, (), _5, ()) in
     (
         let justif =
             match justif with
             | Some t -> t
             | None -> mk_term (Const Const_unit) (rhs2 parseState 2 4) Expr
         in
         CalcStep (rel, justif, next)
     ))
# 10392 "parse.ml"
               : 'calcStep))
; (fun __caml_parser_env ->
    let _1 = (Parsing.peek_val __caml_parser_env 0 : 'simpleTerm) in
    Obj.repr(
# 2165 "parse.mly"
    (let t = _1 in
                 ( t ))
# 10400 "parse.ml"
               : 'typ))
; (fun __caml_parser_env ->
    let _2 = (Parsing.peek_val __caml_parser_env 3 : 'binders) in
    let _4 = (Parsing.peek_val __caml_parser_env 1 : 'trigger) in
    let _5 = (Parsing.peek_val __caml_parser_env 0 : 'noSeqTerm) in
    Obj.repr(
# 2168 "parse.mly"
    (let (_1, bs, _3, trigger, e) = ((), _2, (), _4, _5) in
let q =            ( fun x -> QForall x ) in
      (
        match bs with
        | [] ->
          raise_error (Fatal_MissingQuantifierBinder, "Missing binders for a quantifier") (rhs2 parseState 1 3)
        | _ ->
          let idents = idents_of_binders bs (rhs2 parseState 1 3) in
          mk_term (q (bs, (idents, trigger), e)) (rhs2 parseState 1 5) Formula
      ))
# 10418 "parse.ml"
               : 'typ))
; (fun __caml_parser_env ->
    let _2 = (Parsing.peek_val __caml_parser_env 3 : 'binders) in
    let _4 = (Parsing.peek_val __caml_parser_env 1 : 'trigger) in
    let _5 = (Parsing.peek_val __caml_parser_env 0 : 'noSeqTerm) in
    Obj.repr(
# 2179 "parse.mly"
    (let (_1, bs, _3, trigger, e) = ((), _2, (), _4, _5) in
let q =            ( fun x -> QExists x) in
      (
        match bs with
        | [] ->
          raise_error (Fatal_MissingQuantifierBinder, "Missing binders for a quantifier") (rhs2 parseState 1 3)
        | _ ->
          let idents = idents_of_binders bs (rhs2 parseState 1 3) in
          mk_term (q (bs, (idents, trigger), e)) (rhs2 parseState 1 5) Formula
      ))
# 10436 "parse.ml"
               : 'typ))
; (fun __caml_parser_env ->
    Obj.repr(
# 2192 "parse.mly"
    (      ( [] ))
# 10442 "parse.ml"
               : 'trigger))
; (fun __caml_parser_env ->
    let _2 = (Parsing.peek_val __caml_parser_env 1 : 'disjunctivePats) in
    Obj.repr(
# 2194 "parse.mly"
    (let (_1, pats, _3) = ((), _2, ()) in
                                                     ( pats ))
# 10450 "parse.ml"
               : 'trigger))
; (fun __caml_parser_env ->
    let _1 = (Parsing.peek_val __caml_parser_env 0 : 'separated_nonempty_list_DISJUNCTION_conjunctivePat_) in
    Obj.repr(
# 2199 "parse.mly"
    (let pats = _1 in
                                                              ( pats ))
# 10458 "parse.ml"
               : 'disjunctivePats))
; (fun __caml_parser_env ->
    let _1 = (Parsing.peek_val __caml_parser_env 0 : 'separated_nonempty_list_SEMICOLON_appTerm_) in
    Obj.repr(
# 2204 "parse.mly"
    (let pats = _1 in
                                                              ( pats ))
# 10466 "parse.ml"
               : 'conjunctivePat))
; (fun __caml_parser_env ->
    let _1 = (Parsing.peek_val __caml_parser_env 0 : 'tmIff) in
    Obj.repr(
# 2209 "parse.mly"
    (let e = _1 in
            ( e ))
# 10474 "parse.ml"
               : 'simpleTerm))
; (fun __caml_parser_env ->
    let _2 = (Parsing.peek_val __caml_parser_env 2 : 'nonempty_list_patternOrMultibinder_) in
    let _4 = (Parsing.peek_val __caml_parser_env 0 : FStar_Parser_AST.term) in
    Obj.repr(
# 2212 "parse.mly"
    (let (_1, pats, _3, e) = ((), _2, (), _4) in
      ( mk_term (Abs(flatten pats, e)) (rhs2 parseState 1 4) Un ))
# 10483 "parse.ml"
               : 'simpleTerm))
; (fun __caml_parser_env ->
    Obj.repr(
# 2217 "parse.mly"
    (let _1 = () in
                    ( false ))
# 10490 "parse.ml"
               : 'maybeFocusArrow))
; (fun __caml_parser_env ->
    Obj.repr(
# 2220 "parse.mly"
    (let _1 = () in
                    ( true ))
# 10497 "parse.ml"
               : 'maybeFocusArrow))
; (fun __caml_parser_env ->
    let _1 = (Parsing.peek_val __caml_parser_env 2 : 'disjunctivePattern) in
    let _2 = (Parsing.peek_val __caml_parser_env 1 : 'maybeFocusArrow) in
    let _3 = (Parsing.peek_val __caml_parser_env 0 : FStar_Parser_AST.term) in
    Obj.repr(
# 2225 "parse.mly"
    (let (pat, focus, e) = (_1, _2, _3) in
let when_opt =                          ( None ) in
      (
        let pat = match pat with
          | [p] -> p
          | ps -> mk_pattern (PatOr ps) (rhs2 parseState 1 1)
        in
        (focus, (pat, when_opt, e))
      ))
# 10514 "parse.ml"
               : 'patternBranch))
; (fun __caml_parser_env ->
    let _1 = (Parsing.peek_val __caml_parser_env 4 : 'disjunctivePattern) in
    let _3 = (Parsing.peek_val __caml_parser_env 2 : 'tmFormula) in
    let _4 = (Parsing.peek_val __caml_parser_env 1 : 'maybeFocusArrow) in
    let _5 = (Parsing.peek_val __caml_parser_env 0 : FStar_Parser_AST.term) in
    Obj.repr(
# 2235 "parse.mly"
    (let (pat, _1, e_inlined1, focus, e) = (_1, (), _3, _4, _5) in
let when_opt =
  let e = e_inlined1 in
                           ( Some e )
in
      (
        let pat = match pat with
          | [p] -> p
          | ps -> mk_pattern (PatOr ps) (rhs2 parseState 1 1)
        in
        (focus, (pat, when_opt, e))
      ))
# 10535 "parse.ml"
               : 'patternBranch))
; (fun __caml_parser_env ->
    let _1 = (Parsing.peek_val __caml_parser_env 2 : 'tmImplies) in
    let _3 = (Parsing.peek_val __caml_parser_env 0 : 'tmIff) in
    Obj.repr(
# 2250 "parse.mly"
    (let (e1, _2, e2) = (_1, (), _3) in
      ( mk_term (Op(mk_ident("<==>", rhs parseState 2), [e1; e2])) (rhs2 parseState 1 3) Formula ))
# 10544 "parse.ml"
               : 'tmIff))
; (fun __caml_parser_env ->
    let _1 = (Parsing.peek_val __caml_parser_env 0 : 'tmImplies) in
    Obj.repr(
# 2253 "parse.mly"
    (let e = _1 in
                ( e ))
# 10552 "parse.ml"
               : 'tmIff))
; (fun __caml_parser_env ->
    let _1 = (Parsing.peek_val __caml_parser_env 2 : 'tmArrow_tmFormula_) in
    let _3 = (Parsing.peek_val __caml_parser_env 0 : 'tmImplies) in
    Obj.repr(
# 2258 "parse.mly"
    (let (e1, _2, e2) = (_1, (), _3) in
      ( mk_term (Op(mk_ident("==>", rhs parseState 2), [e1; e2])) (rhs2 parseState 1 3) Formula ))
# 10561 "parse.ml"
               : 'tmImplies))
; (fun __caml_parser_env ->
    let _1 = (Parsing.peek_val __caml_parser_env 0 : 'tmArrow_tmFormula_) in
    Obj.repr(
# 2261 "parse.mly"
    (let e = _1 in
      ( e ))
# 10569 "parse.ml"
               : 'tmImplies))
; (fun __caml_parser_env ->
    let _2 = (Parsing.peek_val __caml_parser_env 3 : 'tmFormula) in
    let _5 = (Parsing.peek_val __caml_parser_env 0 : 'tmArrow_tmFormula_) in
    Obj.repr(
# 2266 "parse.mly"
    (let (_1, t, _3, _2, tgt) = ((), _2, (), (), _5) in
let dom =                                ( ((Some TypeClassArg, []), t) ) in
     (
       let ((aq_opt, attrs), dom_tm) = dom in
       let b = match extract_named_refinement dom_tm with
         | None -> mk_binder_with_attrs (NoName dom_tm) (rhs parseState 1) Un aq_opt attrs
         | Some (x, t, f) -> mkRefinedBinder x t true f (rhs2 parseState 1 1) aq_opt attrs
       in
       mk_term (Product([b], tgt)) (rhs2 parseState 1 3)  Un
     ))
# 10586 "parse.ml"
               : 'tmArrow_tmFormula_))
; (fun __caml_parser_env ->
    let _2 = (Parsing.peek_val __caml_parser_env 4 : 'aqual) in
    let _3 = (Parsing.peek_val __caml_parser_env 3 : 'tmFormula) in
    let _6 = (Parsing.peek_val __caml_parser_env 0 : 'tmArrow_tmFormula_) in
    Obj.repr(
# 2277 "parse.mly"
    (let (_1, q, dom_tm, _5, _2, tgt) = ((), _2, _3, (), (), _6) in
let dom =
  let attrs_opt =     ( None ) in
                                                                          ( (Some q, none_to_empty_list attrs_opt), dom_tm )
in
     (
       let ((aq_opt, attrs), dom_tm) = dom in
       let b = match extract_named_refinement dom_tm with
         | None -> mk_binder_with_attrs (NoName dom_tm) (rhs parseState 1) Un aq_opt attrs
         | Some (x, t, f) -> mkRefinedBinder x t true f (rhs2 parseState 1 1) aq_opt attrs
       in
       mk_term (Product([b], tgt)) (rhs2 parseState 1 3)  Un
     ))
# 10607 "parse.ml"
               : 'tmArrow_tmFormula_))
; (fun __caml_parser_env ->
    let _2 = (Parsing.peek_val __caml_parser_env 5 : 'aqual) in
    let _3 = (Parsing.peek_val __caml_parser_env 4 : 'binderAttributes) in
    let _4 = (Parsing.peek_val __caml_parser_env 3 : 'tmFormula) in
    let _7 = (Parsing.peek_val __caml_parser_env 0 : 'tmArrow_tmFormula_) in
    Obj.repr(
# 2291 "parse.mly"
    (let (_1, q, x, dom_tm, _5, _2, tgt) = ((), _2, _3, _4, (), (), _7) in
let dom =
  let attrs_opt =     ( Some x ) in
                                                                          ( (Some q, none_to_empty_list attrs_opt), dom_tm )
in
     (
       let ((aq_opt, attrs), dom_tm) = dom in
       let b = match extract_named_refinement dom_tm with
         | None -> mk_binder_with_attrs (NoName dom_tm) (rhs parseState 1) Un aq_opt attrs
         | Some (x, t, f) -> mkRefinedBinder x t true f (rhs2 parseState 1 1) aq_opt attrs
       in
       mk_term (Product([b], tgt)) (rhs2 parseState 1 3)  Un
     ))
# 10629 "parse.ml"
               : 'tmArrow_tmFormula_))
; (fun __caml_parser_env ->
    let _1 = (Parsing.peek_val __caml_parser_env 2 : 'tmFormula) in
    let _3 = (Parsing.peek_val __caml_parser_env 0 : 'tmArrow_tmFormula_) in
    Obj.repr(
# 2305 "parse.mly"
    (let (dom_tm, _2, tgt) = (_1, (), _3) in
let dom =
  let attrs_opt =     ( None ) in
  let aq_opt =     ( None ) in
                                                                          ( (aq_opt, none_to_empty_list attrs_opt), dom_tm )
in
     (
       let ((aq_opt, attrs), dom_tm) = dom in
       let b = match extract_named_refinement dom_tm with
         | None -> mk_binder_with_attrs (NoName dom_tm) (rhs parseState 1) Un aq_opt attrs
         | Some (x, t, f) -> mkRefinedBinder x t true f (rhs2 parseState 1 1) aq_opt attrs
       in
       mk_term (Product([b], tgt)) (rhs2 parseState 1 3)  Un
     ))
# 10650 "parse.ml"
               : 'tmArrow_tmFormula_))
; (fun __caml_parser_env ->
    let _1 = (Parsing.peek_val __caml_parser_env 3 : 'binderAttributes) in
    let _2 = (Parsing.peek_val __caml_parser_env 2 : 'tmFormula) in
    let _4 = (Parsing.peek_val __caml_parser_env 0 : 'tmArrow_tmFormula_) in
    Obj.repr(
# 2320 "parse.mly"
    (let (x, dom_tm, _2, tgt) = (_1, _2, (), _4) in
let dom =
  let attrs_opt =     ( Some x ) in
  let aq_opt =     ( None ) in
                                                                          ( (aq_opt, none_to_empty_list attrs_opt), dom_tm )
in
     (
       let ((aq_opt, attrs), dom_tm) = dom in
       let b = match extract_named_refinement dom_tm with
         | None -> mk_binder_with_attrs (NoName dom_tm) (rhs parseState 1) Un aq_opt attrs
         | Some (x, t, f) -> mkRefinedBinder x t true f (rhs2 parseState 1 1) aq_opt attrs
       in
       mk_term (Product([b], tgt)) (rhs2 parseState 1 3)  Un
     ))
# 10672 "parse.ml"
               : 'tmArrow_tmFormula_))
; (fun __caml_parser_env ->
    let _1 = (Parsing.peek_val __caml_parser_env 3 : 'aqual) in
    let _2 = (Parsing.peek_val __caml_parser_env 2 : 'tmFormula) in
    let _4 = (Parsing.peek_val __caml_parser_env 0 : 'tmArrow_tmFormula_) in
    Obj.repr(
# 2335 "parse.mly"
    (let (x, dom_tm, _2, tgt) = (_1, _2, (), _4) in
let dom =
  let attrs_opt =     ( None ) in
  let aq_opt =     ( Some x ) in
                                                                          ( (aq_opt, none_to_empty_list attrs_opt), dom_tm )
in
     (
       let ((aq_opt, attrs), dom_tm) = dom in
       let b = match extract_named_refinement dom_tm with
         | None -> mk_binder_with_attrs (NoName dom_tm) (rhs parseState 1) Un aq_opt attrs
         | Some (x, t, f) -> mkRefinedBinder x t true f (rhs2 parseState 1 1) aq_opt attrs
       in
       mk_term (Product([b], tgt)) (rhs2 parseState 1 3)  Un
     ))
# 10694 "parse.ml"
               : 'tmArrow_tmFormula_))
; (fun __caml_parser_env ->
    let _1 = (Parsing.peek_val __caml_parser_env 4 : 'aqual) in
    let _2 = (Parsing.peek_val __caml_parser_env 3 : 'binderAttributes) in
    let _3 = (Parsing.peek_val __caml_parser_env 2 : 'tmFormula) in
    let _5 = (Parsing.peek_val __caml_parser_env 0 : 'tmArrow_tmFormula_) in
    Obj.repr(
# 2350 "parse.mly"
    (let (x, x_inlined1, dom_tm, _2, tgt) = (_1, _2, _3, (), _5) in
let dom =
  let attrs_opt =
    let x = x_inlined1 in
        ( Some x )
  in
  let aq_opt =     ( Some x ) in
                                                                          ( (aq_opt, none_to_empty_list attrs_opt), dom_tm )
in
     (
       let ((aq_opt, attrs), dom_tm) = dom in
       let b = match extract_named_refinement dom_tm with
         | None -> mk_binder_with_attrs (NoName dom_tm) (rhs parseState 1) Un aq_opt attrs
         | Some (x, t, f) -> mkRefinedBinder x t true f (rhs2 parseState 1 1) aq_opt attrs
       in
       mk_term (Product([b], tgt)) (rhs2 parseState 1 3)  Un
     ))
# 10720 "parse.ml"
               : 'tmArrow_tmFormula_))
; (fun __caml_parser_env ->
    let _1 = (Parsing.peek_val __caml_parser_env 0 : 'tmFormula) in
    Obj.repr(
# 2368 "parse.mly"
    (let e = _1 in
         ( e ))
# 10728 "parse.ml"
               : 'tmArrow_tmFormula_))
; (fun __caml_parser_env ->
    let _2 = (Parsing.peek_val __caml_parser_env 3 : 'tmNoEq) in
    let _5 = (Parsing.peek_val __caml_parser_env 0 : 'tmArrow_tmNoEq_) in
    Obj.repr(
# 2373 "parse.mly"
    (let (_1, t, _3, _2, tgt) = ((), _2, (), (), _5) in
let dom =                                ( ((Some TypeClassArg, []), t) ) in
     (
       let ((aq_opt, attrs), dom_tm) = dom in
       let b = match extract_named_refinement dom_tm with
         | None -> mk_binder_with_attrs (NoName dom_tm) (rhs parseState 1) Un aq_opt attrs
         | Some (x, t, f) -> mkRefinedBinder x t true f (rhs2 parseState 1 1) aq_opt attrs
       in
       mk_term (Product([b], tgt)) (rhs2 parseState 1 3)  Un
     ))
# 10745 "parse.ml"
               : 'tmArrow_tmNoEq_))
; (fun __caml_parser_env ->
    let _2 = (Parsing.peek_val __caml_parser_env 4 : 'aqual) in
    let _3 = (Parsing.peek_val __caml_parser_env 3 : 'tmNoEq) in
    let _6 = (Parsing.peek_val __caml_parser_env 0 : 'tmArrow_tmNoEq_) in
    Obj.repr(
# 2384 "parse.mly"
    (let (_1, q, dom_tm, _5, _2, tgt) = ((), _2, _3, (), (), _6) in
let dom =
  let attrs_opt =     ( None ) in
                                                                          ( (Some q, none_to_empty_list attrs_opt), dom_tm )
in
     (
       let ((aq_opt, attrs), dom_tm) = dom in
       let b = match extract_named_refinement dom_tm with
         | None -> mk_binder_with_attrs (NoName dom_tm) (rhs parseState 1) Un aq_opt attrs
         | Some (x, t, f) -> mkRefinedBinder x t true f (rhs2 parseState 1 1) aq_opt attrs
       in
       mk_term (Product([b], tgt)) (rhs2 parseState 1 3)  Un
     ))
# 10766 "parse.ml"
               : 'tmArrow_tmNoEq_))
; (fun __caml_parser_env ->
    let _2 = (Parsing.peek_val __caml_parser_env 5 : 'aqual) in
    let _3 = (Parsing.peek_val __caml_parser_env 4 : 'binderAttributes) in
    let _4 = (Parsing.peek_val __caml_parser_env 3 : 'tmNoEq) in
    let _7 = (Parsing.peek_val __caml_parser_env 0 : 'tmArrow_tmNoEq_) in
    Obj.repr(
# 2398 "parse.mly"
    (let (_1, q, x, dom_tm, _5, _2, tgt) = ((), _2, _3, _4, (), (), _7) in
let dom =
  let attrs_opt =     ( Some x ) in
                                                                          ( (Some q, none_to_empty_list attrs_opt), dom_tm )
in
     (
       let ((aq_opt, attrs), dom_tm) = dom in
       let b = match extract_named_refinement dom_tm with
         | None -> mk_binder_with_attrs (NoName dom_tm) (rhs parseState 1) Un aq_opt attrs
         | Some (x, t, f) -> mkRefinedBinder x t true f (rhs2 parseState 1 1) aq_opt attrs
       in
       mk_term (Product([b], tgt)) (rhs2 parseState 1 3)  Un
     ))
# 10788 "parse.ml"
               : 'tmArrow_tmNoEq_))
; (fun __caml_parser_env ->
    let _1 = (Parsing.peek_val __caml_parser_env 2 : 'tmNoEq) in
    let _3 = (Parsing.peek_val __caml_parser_env 0 : 'tmArrow_tmNoEq_) in
    Obj.repr(
# 2412 "parse.mly"
    (let (dom_tm, _2, tgt) = (_1, (), _3) in
let dom =
  let attrs_opt =     ( None ) in
  let aq_opt =     ( None ) in
                                                                          ( (aq_opt, none_to_empty_list attrs_opt), dom_tm )
in
     (
       let ((aq_opt, attrs), dom_tm) = dom in
       let b = match extract_named_refinement dom_tm with
         | None -> mk_binder_with_attrs (NoName dom_tm) (rhs parseState 1) Un aq_opt attrs
         | Some (x, t, f) -> mkRefinedBinder x t true f (rhs2 parseState 1 1) aq_opt attrs
       in
       mk_term (Product([b], tgt)) (rhs2 parseState 1 3)  Un
     ))
# 10809 "parse.ml"
               : 'tmArrow_tmNoEq_))
; (fun __caml_parser_env ->
    let _1 = (Parsing.peek_val __caml_parser_env 3 : 'binderAttributes) in
    let _2 = (Parsing.peek_val __caml_parser_env 2 : 'tmNoEq) in
    let _4 = (Parsing.peek_val __caml_parser_env 0 : 'tmArrow_tmNoEq_) in
    Obj.repr(
# 2427 "parse.mly"
    (let (x, dom_tm, _2, tgt) = (_1, _2, (), _4) in
let dom =
  let attrs_opt =     ( Some x ) in
  let aq_opt =     ( None ) in
                                                                          ( (aq_opt, none_to_empty_list attrs_opt), dom_tm )
in
     (
       let ((aq_opt, attrs), dom_tm) = dom in
       let b = match extract_named_refinement dom_tm with
         | None -> mk_binder_with_attrs (NoName dom_tm) (rhs parseState 1) Un aq_opt attrs
         | Some (x, t, f) -> mkRefinedBinder x t true f (rhs2 parseState 1 1) aq_opt attrs
       in
       mk_term (Product([b], tgt)) (rhs2 parseState 1 3)  Un
     ))
# 10831 "parse.ml"
               : 'tmArrow_tmNoEq_))
; (fun __caml_parser_env ->
    let _1 = (Parsing.peek_val __caml_parser_env 3 : 'aqual) in
    let _2 = (Parsing.peek_val __caml_parser_env 2 : 'tmNoEq) in
    let _4 = (Parsing.peek_val __caml_parser_env 0 : 'tmArrow_tmNoEq_) in
    Obj.repr(
# 2442 "parse.mly"
    (let (x, dom_tm, _2, tgt) = (_1, _2, (), _4) in
let dom =
  let attrs_opt =     ( None ) in
  let aq_opt =     ( Some x ) in
                                                                          ( (aq_opt, none_to_empty_list attrs_opt), dom_tm )
in
     (
       let ((aq_opt, attrs), dom_tm) = dom in
       let b = match extract_named_refinement dom_tm with
         | None -> mk_binder_with_attrs (NoName dom_tm) (rhs parseState 1) Un aq_opt attrs
         | Some (x, t, f) -> mkRefinedBinder x t true f (rhs2 parseState 1 1) aq_opt attrs
       in
       mk_term (Product([b], tgt)) (rhs2 parseState 1 3)  Un
     ))
# 10853 "parse.ml"
               : 'tmArrow_tmNoEq_))
; (fun __caml_parser_env ->
    let _1 = (Parsing.peek_val __caml_parser_env 4 : 'aqual) in
    let _2 = (Parsing.peek_val __caml_parser_env 3 : 'binderAttributes) in
    let _3 = (Parsing.peek_val __caml_parser_env 2 : 'tmNoEq) in
    let _5 = (Parsing.peek_val __caml_parser_env 0 : 'tmArrow_tmNoEq_) in
    Obj.repr(
# 2457 "parse.mly"
    (let (x, x_inlined1, dom_tm, _2, tgt) = (_1, _2, _3, (), _5) in
let dom =
  let attrs_opt =
    let x = x_inlined1 in
        ( Some x )
  in
  let aq_opt =     ( Some x ) in
                                                                          ( (aq_opt, none_to_empty_list attrs_opt), dom_tm )
in
     (
       let ((aq_opt, attrs), dom_tm) = dom in
       let b = match extract_named_refinement dom_tm with
         | None -> mk_binder_with_attrs (NoName dom_tm) (rhs parseState 1) Un aq_opt attrs
         | Some (x, t, f) -> mkRefinedBinder x t true f (rhs2 parseState 1 1) aq_opt attrs
       in
       mk_term (Product([b], tgt)) (rhs2 parseState 1 3)  Un
     ))
# 10879 "parse.ml"
               : 'tmArrow_tmNoEq_))
; (fun __caml_parser_env ->
    let _1 = (Parsing.peek_val __caml_parser_env 0 : 'tmNoEq) in
    Obj.repr(
# 2475 "parse.mly"
    (let e = _1 in
         ( e ))
# 10887 "parse.ml"
               : 'tmArrow_tmNoEq_))
; (fun __caml_parser_env ->
    let _1 = (Parsing.peek_val __caml_parser_env 2 : 'simpleArrowDomain) in
    let _3 = (Parsing.peek_val __caml_parser_env 0 : 'simpleArrow) in
    Obj.repr(
# 2480 "parse.mly"
    (let (dom, _2, tgt) = (_1, (), _3) in
     (
       let ((aq_opt, attrs), dom_tm) = dom in
       let b = match extract_named_refinement dom_tm with
         | None -> mk_binder_with_attrs (NoName dom_tm) (rhs parseState 1) Un aq_opt attrs
         | Some (x, t, f) -> mkRefinedBinder x t true f (rhs2 parseState 1 1) aq_opt attrs
       in
       mk_term (Product([b], tgt)) (rhs2 parseState 1 3)  Un
     ))
# 10903 "parse.ml"
               : 'simpleArrow))
; (fun __caml_parser_env ->
    let _1 = (Parsing.peek_val __caml_parser_env 0 : 'tmEqNoRefinement) in
    Obj.repr(
# 2490 "parse.mly"
    (let e = _1 in
                       ( e ))
# 10911 "parse.ml"
               : 'simpleArrow))
; (fun __caml_parser_env ->
    let _2 = (Parsing.peek_val __caml_parser_env 1 : 'tmEqNoRefinement) in
    Obj.repr(
# 2495 "parse.mly"
    (let (_1, t, _3) = ((), _2, ()) in
                                             ( ((Some TypeClassArg, []), t) ))
# 10919 "parse.ml"
               : 'simpleArrowDomain))
; (fun __caml_parser_env ->
    let _1 = (Parsing.peek_val __caml_parser_env 0 : 'tmEqNoRefinement) in
    Obj.repr(
# 2498 "parse.mly"
    (let dom_tm = _1 in
let attrs_opt =     ( None ) in
let aq_opt =     ( None ) in
                                                                                      ( (aq_opt, none_to_empty_list attrs_opt), dom_tm ))
# 10929 "parse.ml"
               : 'simpleArrowDomain))
; (fun __caml_parser_env ->
    let _1 = (Parsing.peek_val __caml_parser_env 1 : 'binderAttributes) in
    let _2 = (Parsing.peek_val __caml_parser_env 0 : 'tmEqNoRefinement) in
    Obj.repr(
# 2503 "parse.mly"
    (let (x, dom_tm) = (_1, _2) in
let attrs_opt =     ( Some x ) in
let aq_opt =     ( None ) in
                                                                                      ( (aq_opt, none_to_empty_list attrs_opt), dom_tm ))
# 10940 "parse.ml"
               : 'simpleArrowDomain))
; (fun __caml_parser_env ->
    let _1 = (Parsing.peek_val __caml_parser_env 1 : 'aqual) in
    let _2 = (Parsing.peek_val __caml_parser_env 0 : 'tmEqNoRefinement) in
    Obj.repr(
# 2508 "parse.mly"
    (let (x, dom_tm) = (_1, _2) in
let attrs_opt =     ( None ) in
let aq_opt =     ( Some x ) in
                                                                                      ( (aq_opt, none_to_empty_list attrs_opt), dom_tm ))
# 10951 "parse.ml"
               : 'simpleArrowDomain))
; (fun __caml_parser_env ->
    let _1 = (Parsing.peek_val __caml_parser_env 2 : 'aqual) in
    let _2 = (Parsing.peek_val __caml_parser_env 1 : 'binderAttributes) in
    let _3 = (Parsing.peek_val __caml_parser_env 0 : 'tmEqNoRefinement) in
    Obj.repr(
# 2513 "parse.mly"
    (let (x, x_inlined1, dom_tm) = (_1, _2, _3) in
let attrs_opt =
  let x = x_inlined1 in
      ( Some x )
in
let aq_opt =     ( Some x ) in
                                                                                      ( (aq_opt, none_to_empty_list attrs_opt), dom_tm ))
# 10966 "parse.ml"
               : 'simpleArrowDomain))
; (fun __caml_parser_env ->
    let _1 = (Parsing.peek_val __caml_parser_env 2 : 'tmFormula) in
    let _3 = (Parsing.peek_val __caml_parser_env 0 : 'tmConjunction) in
    Obj.repr(
# 2523 "parse.mly"
    (let (e1, _2, e2) = (_1, (), _3) in
      ( mk_term (Op(mk_ident("\\/", rhs parseState 2), [e1;e2])) (rhs2 parseState 1 3) Formula ))
# 10975 "parse.ml"
               : 'tmFormula))
; (fun __caml_parser_env ->
    let _1 = (Parsing.peek_val __caml_parser_env 0 : 'tmConjunction) in
    Obj.repr(
# 2526 "parse.mly"
    (let e = _1 in
                    ( e ))
# 10983 "parse.ml"
               : 'tmFormula))
; (fun __caml_parser_env ->
    let _1 = (Parsing.peek_val __caml_parser_env 2 : 'tmConjunction) in
    let _3 = (Parsing.peek_val __caml_parser_env 0 : 'tmTuple) in
    Obj.repr(
# 2531 "parse.mly"
    (let (e1, _2, e2) = (_1, (), _3) in
      ( mk_term (Op(mk_ident("/\\", rhs parseState 2), [e1;e2])) (rhs2 parseState 1 3) Formula ))
# 10992 "parse.ml"
               : 'tmConjunction))
; (fun __caml_parser_env ->
    let _1 = (Parsing.peek_val __caml_parser_env 0 : 'tmTuple) in
    Obj.repr(
# 2534 "parse.mly"
    (let e = _1 in
              ( e ))
# 11000 "parse.ml"
               : 'tmConjunction))
; (fun __caml_parser_env ->
    let _1 = (Parsing.peek_val __caml_parser_env 0 : 'separated_nonempty_list_COMMA_tmEq_) in
    Obj.repr(
# 2539 "parse.mly"
    (let el = _1 in
      (
        match el with
          | [x] -> x
          | components -> mkTuple components (rhs2 parseState 1 1)
      ))
# 11012 "parse.ml"
               : 'tmTuple))
; (fun __caml_parser_env ->
    let _1 = (Parsing.peek_val __caml_parser_env 2 : 'tmEqWith_appTermNoRecordExp_) in
    let _3 = (Parsing.peek_val __caml_parser_env 0 : 'tmEqWith_appTermNoRecordExp_) in
    Obj.repr(
# 2548 "parse.mly"
    (let (e1, _2, e2) = (_1, (), _3) in
      ( mk_term (Op(mk_ident("=", rhs parseState 2), [e1; e2])) (rhs2 parseState 1 3) Un))
# 11021 "parse.ml"
               : 'tmEqWith_appTermNoRecordExp_))
; (fun __caml_parser_env ->
    let _1 = (Parsing.peek_val __caml_parser_env 2 : 'tmEqWith_appTermNoRecordExp_) in
    let _3 = (Parsing.peek_val __caml_parser_env 0 : 'tmEqWith_appTermNoRecordExp_) in
    Obj.repr(
# 2551 "parse.mly"
    (let (e1, _2, e2) = (_1, (), _3) in
      ( mk_term (Op(mk_ident(":=", rhs parseState 2), [e1; e2])) (rhs2 parseState 1 3) Un))
# 11030 "parse.ml"
               : 'tmEqWith_appTermNoRecordExp_))
; (fun __caml_parser_env ->
    let _1 = (Parsing.peek_val __caml_parser_env 2 : 'tmEqWith_appTermNoRecordExp_) in
    let _3 = (Parsing.peek_val __caml_parser_env 0 : 'tmEqWith_appTermNoRecordExp_) in
    Obj.repr(
# 2554 "parse.mly"
    (let (e1, _2, e2) = (_1, (), _3) in
      ( mk_term (Op(mk_ident("|>", rhs parseState 2), [e1; e2])) (rhs2 parseState 1 3) Un))
# 11039 "parse.ml"
               : 'tmEqWith_appTermNoRecordExp_))
; (fun __caml_parser_env ->
    let _1 = (Parsing.peek_val __caml_parser_env 2 : 'tmEqWith_appTermNoRecordExp_) in
    let _2 = (Parsing.peek_val __caml_parser_env 1 : string) in
    let _3 = (Parsing.peek_val __caml_parser_env 0 : 'tmEqWith_appTermNoRecordExp_) in
    Obj.repr(
# 2557 "parse.mly"
    (let (e1, op, e2) = (_1, _2, _3) in
let op =      ( mk_ident (op, rhs parseState 1) ) in
      ( mk_term (Op(op, [e1; e2])) (rhs2 parseState 1 3) Un))
# 11050 "parse.ml"
               : 'tmEqWith_appTermNoRecordExp_))
; (fun __caml_parser_env ->
    let _1 = (Parsing.peek_val __caml_parser_env 2 : 'tmEqWith_appTermNoRecordExp_) in
    let _2 = (Parsing.peek_val __caml_parser_env 1 : string) in
    let _3 = (Parsing.peek_val __caml_parser_env 0 : 'tmEqWith_appTermNoRecordExp_) in
    Obj.repr(
# 2561 "parse.mly"
    (let (e1, op, e2) = (_1, _2, _3) in
let op =      ( mk_ident (op, rhs parseState 1) ) in
      ( mk_term (Op(op, [e1; e2])) (rhs2 parseState 1 3) Un))
# 11061 "parse.ml"
               : 'tmEqWith_appTermNoRecordExp_))
; (fun __caml_parser_env ->
    let _1 = (Parsing.peek_val __caml_parser_env 2 : 'tmEqWith_appTermNoRecordExp_) in
    let _2 = (Parsing.peek_val __caml_parser_env 1 : string) in
    let _3 = (Parsing.peek_val __caml_parser_env 0 : 'tmEqWith_appTermNoRecordExp_) in
    Obj.repr(
# 2565 "parse.mly"
    (let (e1, op, e2) = (_1, _2, _3) in
let op =      ( mk_ident (op, rhs parseState 1) ) in
      ( mk_term (Op(op, [e1; e2])) (rhs2 parseState 1 3) Un))
# 11072 "parse.ml"
               : 'tmEqWith_appTermNoRecordExp_))
; (fun __caml_parser_env ->
    let _1 = (Parsing.peek_val __caml_parser_env 2 : 'tmEqWith_appTermNoRecordExp_) in
    let _2 = (Parsing.peek_val __caml_parser_env 1 : string) in
    let _3 = (Parsing.peek_val __caml_parser_env 0 : 'tmEqWith_appTermNoRecordExp_) in
    Obj.repr(
# 2569 "parse.mly"
    (let (e1, op, e2) = (_1, _2, _3) in
let op =      ( mk_ident (op, rhs parseState 1) ) in
      ( mk_term (Op(op, [e1; e2])) (rhs2 parseState 1 3) Un))
# 11083 "parse.ml"
               : 'tmEqWith_appTermNoRecordExp_))
; (fun __caml_parser_env ->
    let _1 = (Parsing.peek_val __caml_parser_env 2 : 'tmEqWith_appTermNoRecordExp_) in
    let _2 = (Parsing.peek_val __caml_parser_env 1 : string) in
    let _3 = (Parsing.peek_val __caml_parser_env 0 : 'tmEqWith_appTermNoRecordExp_) in
    Obj.repr(
# 2573 "parse.mly"
    (let (e1, op, e2) = (_1, _2, _3) in
let op =      ( mk_ident (op, rhs parseState 1) ) in
      ( mk_term (Op(op, [e1; e2])) (rhs2 parseState 1 3) Un))
# 11094 "parse.ml"
               : 'tmEqWith_appTermNoRecordExp_))
; (fun __caml_parser_env ->
    let _1 = (Parsing.peek_val __caml_parser_env 2 : 'tmEqWith_appTermNoRecordExp_) in
    let _2 = (Parsing.peek_val __caml_parser_env 1 : string) in
    let _3 = (Parsing.peek_val __caml_parser_env 0 : 'tmEqWith_appTermNoRecordExp_) in
    Obj.repr(
# 2577 "parse.mly"
    (let (e1, op, e2) = (_1, _2, _3) in
let op =      ( mk_ident (op, rhs parseState 1) ) in
      ( mk_term (Op(op, [e1; e2])) (rhs2 parseState 1 3) Un))
# 11105 "parse.ml"
               : 'tmEqWith_appTermNoRecordExp_))
; (fun __caml_parser_env ->
    let _1 = (Parsing.peek_val __caml_parser_env 2 : 'tmEqWith_appTermNoRecordExp_) in
    let _3 = (Parsing.peek_val __caml_parser_env 0 : 'tmEqWith_appTermNoRecordExp_) in
    Obj.repr(
# 2581 "parse.mly"
    (let (e1, _2, e2) = (_1, (), _3) in
      ( mk_term (Op(mk_ident("-", rhs parseState 2), [e1; e2])) (rhs2 parseState 1 3) Un))
# 11114 "parse.ml"
               : 'tmEqWith_appTermNoRecordExp_))
; (fun __caml_parser_env ->
    let _2 = (Parsing.peek_val __caml_parser_env 0 : 'tmEqWith_appTermNoRecordExp_) in
    Obj.repr(
# 2584 "parse.mly"
    (let (_1, e) = ((), _2) in
      ( mk_uminus e (rhs parseState 1) (rhs2 parseState 1 2) Expr ))
# 11122 "parse.ml"
               : 'tmEqWith_appTermNoRecordExp_))
; (fun __caml_parser_env ->
    let _2 = (Parsing.peek_val __caml_parser_env 0 : 'tmEqWith_appTermNoRecordExp_) in
    Obj.repr(
# 2587 "parse.mly"
    (let (_1, e) = ((), _2) in
      ( mk_term (Quote (e, Dynamic)) (rhs2 parseState 1 3) Un ))
# 11130 "parse.ml"
               : 'tmEqWith_appTermNoRecordExp_))
; (fun __caml_parser_env ->
    let _2 = (Parsing.peek_val __caml_parser_env 0 : 'tmEqWith_appTermNoRecordExp_) in
    Obj.repr(
# 2590 "parse.mly"
    (let (_1, e) = ((), _2) in
      ( mk_term (Quote (e, Static)) (rhs2 parseState 1 3) Un ))
# 11138 "parse.ml"
               : 'tmEqWith_appTermNoRecordExp_))
; (fun __caml_parser_env ->
    let _2 = (Parsing.peek_val __caml_parser_env 0 : 'atomicTerm) in
    Obj.repr(
# 2593 "parse.mly"
    (let (_1, e) = ((), _2) in
      ( let q = mk_term (Quote (e, Dynamic)) (rhs2 parseState 1 3) Un in
        mk_term (Antiquote q) (rhs2 parseState 1 3) Un ))
# 11147 "parse.ml"
               : 'tmEqWith_appTermNoRecordExp_))
; (fun __caml_parser_env ->
    let _2 = (Parsing.peek_val __caml_parser_env 0 : 'atomicTerm) in
    Obj.repr(
# 2597 "parse.mly"
    (let (_1, e) = ((), _2) in
      ( mk_term (Antiquote e) (rhs2 parseState 1 3) Un ))
# 11155 "parse.ml"
               : 'tmEqWith_appTermNoRecordExp_))
; (fun __caml_parser_env ->
    let _1 = (Parsing.peek_val __caml_parser_env 0 : 'tmNoEqWith_appTermNoRecordExp_) in
    Obj.repr(
# 2600 "parse.mly"
    (let e = _1 in
      ( e ))
# 11163 "parse.ml"
               : 'tmEqWith_appTermNoRecordExp_))
; (fun __caml_parser_env ->
    let _1 = (Parsing.peek_val __caml_parser_env 2 : 'tmEqWith_tmRefinement_) in
    let _3 = (Parsing.peek_val __caml_parser_env 0 : 'tmEqWith_tmRefinement_) in
    Obj.repr(
# 2605 "parse.mly"
    (let (e1, _2, e2) = (_1, (), _3) in
      ( mk_term (Op(mk_ident("=", rhs parseState 2), [e1; e2])) (rhs2 parseState 1 3) Un))
# 11172 "parse.ml"
               : 'tmEqWith_tmRefinement_))
; (fun __caml_parser_env ->
    let _1 = (Parsing.peek_val __caml_parser_env 2 : 'tmEqWith_tmRefinement_) in
    let _3 = (Parsing.peek_val __caml_parser_env 0 : 'tmEqWith_tmRefinement_) in
    Obj.repr(
# 2608 "parse.mly"
    (let (e1, _2, e2) = (_1, (), _3) in
      ( mk_term (Op(mk_ident(":=", rhs parseState 2), [e1; e2])) (rhs2 parseState 1 3) Un))
# 11181 "parse.ml"
               : 'tmEqWith_tmRefinement_))
; (fun __caml_parser_env ->
    let _1 = (Parsing.peek_val __caml_parser_env 2 : 'tmEqWith_tmRefinement_) in
    let _3 = (Parsing.peek_val __caml_parser_env 0 : 'tmEqWith_tmRefinement_) in
    Obj.repr(
# 2611 "parse.mly"
    (let (e1, _2, e2) = (_1, (), _3) in
      ( mk_term (Op(mk_ident("|>", rhs parseState 2), [e1; e2])) (rhs2 parseState 1 3) Un))
# 11190 "parse.ml"
               : 'tmEqWith_tmRefinement_))
; (fun __caml_parser_env ->
    let _1 = (Parsing.peek_val __caml_parser_env 2 : 'tmEqWith_tmRefinement_) in
    let _2 = (Parsing.peek_val __caml_parser_env 1 : string) in
    let _3 = (Parsing.peek_val __caml_parser_env 0 : 'tmEqWith_tmRefinement_) in
    Obj.repr(
# 2614 "parse.mly"
    (let (e1, op, e2) = (_1, _2, _3) in
let op =      ( mk_ident (op, rhs parseState 1) ) in
      ( mk_term (Op(op, [e1; e2])) (rhs2 parseState 1 3) Un))
# 11201 "parse.ml"
               : 'tmEqWith_tmRefinement_))
; (fun __caml_parser_env ->
    let _1 = (Parsing.peek_val __caml_parser_env 2 : 'tmEqWith_tmRefinement_) in
    let _2 = (Parsing.peek_val __caml_parser_env 1 : string) in
    let _3 = (Parsing.peek_val __caml_parser_env 0 : 'tmEqWith_tmRefinement_) in
    Obj.repr(
# 2618 "parse.mly"
    (let (e1, op, e2) = (_1, _2, _3) in
let op =      ( mk_ident (op, rhs parseState 1) ) in
      ( mk_term (Op(op, [e1; e2])) (rhs2 parseState 1 3) Un))
# 11212 "parse.ml"
               : 'tmEqWith_tmRefinement_))
; (fun __caml_parser_env ->
    let _1 = (Parsing.peek_val __caml_parser_env 2 : 'tmEqWith_tmRefinement_) in
    let _2 = (Parsing.peek_val __caml_parser_env 1 : string) in
    let _3 = (Parsing.peek_val __caml_parser_env 0 : 'tmEqWith_tmRefinement_) in
    Obj.repr(
# 2622 "parse.mly"
    (let (e1, op, e2) = (_1, _2, _3) in
let op =      ( mk_ident (op, rhs parseState 1) ) in
      ( mk_term (Op(op, [e1; e2])) (rhs2 parseState 1 3) Un))
# 11223 "parse.ml"
               : 'tmEqWith_tmRefinement_))
; (fun __caml_parser_env ->
    let _1 = (Parsing.peek_val __caml_parser_env 2 : 'tmEqWith_tmRefinement_) in
    let _2 = (Parsing.peek_val __caml_parser_env 1 : string) in
    let _3 = (Parsing.peek_val __caml_parser_env 0 : 'tmEqWith_tmRefinement_) in
    Obj.repr(
# 2626 "parse.mly"
    (let (e1, op, e2) = (_1, _2, _3) in
let op =      ( mk_ident (op, rhs parseState 1) ) in
      ( mk_term (Op(op, [e1; e2])) (rhs2 parseState 1 3) Un))
# 11234 "parse.ml"
               : 'tmEqWith_tmRefinement_))
; (fun __caml_parser_env ->
    let _1 = (Parsing.peek_val __caml_parser_env 2 : 'tmEqWith_tmRefinement_) in
    let _2 = (Parsing.peek_val __caml_parser_env 1 : string) in
    let _3 = (Parsing.peek_val __caml_parser_env 0 : 'tmEqWith_tmRefinement_) in
    Obj.repr(
# 2630 "parse.mly"
    (let (e1, op, e2) = (_1, _2, _3) in
let op =      ( mk_ident (op, rhs parseState 1) ) in
      ( mk_term (Op(op, [e1; e2])) (rhs2 parseState 1 3) Un))
# 11245 "parse.ml"
               : 'tmEqWith_tmRefinement_))
; (fun __caml_parser_env ->
    let _1 = (Parsing.peek_val __caml_parser_env 2 : 'tmEqWith_tmRefinement_) in
    let _2 = (Parsing.peek_val __caml_parser_env 1 : string) in
    let _3 = (Parsing.peek_val __caml_parser_env 0 : 'tmEqWith_tmRefinement_) in
    Obj.repr(
# 2634 "parse.mly"
    (let (e1, op, e2) = (_1, _2, _3) in
let op =      ( mk_ident (op, rhs parseState 1) ) in
      ( mk_term (Op(op, [e1; e2])) (rhs2 parseState 1 3) Un))
# 11256 "parse.ml"
               : 'tmEqWith_tmRefinement_))
; (fun __caml_parser_env ->
    let _1 = (Parsing.peek_val __caml_parser_env 2 : 'tmEqWith_tmRefinement_) in
    let _3 = (Parsing.peek_val __caml_parser_env 0 : 'tmEqWith_tmRefinement_) in
    Obj.repr(
# 2638 "parse.mly"
    (let (e1, _2, e2) = (_1, (), _3) in
      ( mk_term (Op(mk_ident("-", rhs parseState 2), [e1; e2])) (rhs2 parseState 1 3) Un))
# 11265 "parse.ml"
               : 'tmEqWith_tmRefinement_))
; (fun __caml_parser_env ->
    let _2 = (Parsing.peek_val __caml_parser_env 0 : 'tmEqWith_tmRefinement_) in
    Obj.repr(
# 2641 "parse.mly"
    (let (_1, e) = ((), _2) in
      ( mk_uminus e (rhs parseState 1) (rhs2 parseState 1 2) Expr ))
# 11273 "parse.ml"
               : 'tmEqWith_tmRefinement_))
; (fun __caml_parser_env ->
    let _2 = (Parsing.peek_val __caml_parser_env 0 : 'tmEqWith_tmRefinement_) in
    Obj.repr(
# 2644 "parse.mly"
    (let (_1, e) = ((), _2) in
      ( mk_term (Quote (e, Dynamic)) (rhs2 parseState 1 3) Un ))
# 11281 "parse.ml"
               : 'tmEqWith_tmRefinement_))
; (fun __caml_parser_env ->
    let _2 = (Parsing.peek_val __caml_parser_env 0 : 'tmEqWith_tmRefinement_) in
    Obj.repr(
# 2647 "parse.mly"
    (let (_1, e) = ((), _2) in
      ( mk_term (Quote (e, Static)) (rhs2 parseState 1 3) Un ))
# 11289 "parse.ml"
               : 'tmEqWith_tmRefinement_))
; (fun __caml_parser_env ->
    let _2 = (Parsing.peek_val __caml_parser_env 0 : 'atomicTerm) in
    Obj.repr(
# 2650 "parse.mly"
    (let (_1, e) = ((), _2) in
      ( let q = mk_term (Quote (e, Dynamic)) (rhs2 parseState 1 3) Un in
        mk_term (Antiquote q) (rhs2 parseState 1 3) Un ))
# 11298 "parse.ml"
               : 'tmEqWith_tmRefinement_))
; (fun __caml_parser_env ->
    let _2 = (Parsing.peek_val __caml_parser_env 0 : 'atomicTerm) in
    Obj.repr(
# 2654 "parse.mly"
    (let (_1, e) = ((), _2) in
      ( mk_term (Antiquote e) (rhs2 parseState 1 3) Un ))
# 11306 "parse.ml"
               : 'tmEqWith_tmRefinement_))
; (fun __caml_parser_env ->
    let _1 = (Parsing.peek_val __caml_parser_env 0 : 'tmNoEqWith_tmRefinement_) in
    Obj.repr(
# 2657 "parse.mly"
    (let e = _1 in
      ( e ))
# 11314 "parse.ml"
               : 'tmEqWith_tmRefinement_))
; (fun __caml_parser_env ->
    let _1 = (Parsing.peek_val __caml_parser_env 2 : 'tmNoEqWith_appTermNoRecordExp_) in
    let _3 = (Parsing.peek_val __caml_parser_env 0 : 'tmNoEqWith_appTermNoRecordExp_) in
    Obj.repr(
# 2662 "parse.mly"
    (let (e1, _2, e2) = (_1, (), _3) in
      ( consTerm (rhs parseState 2) e1 e2 ))
# 11323 "parse.ml"
               : 'tmNoEqWith_appTermNoRecordExp_))
; (fun __caml_parser_env ->
    let _1 = (Parsing.peek_val __caml_parser_env 2 : 'tmNoEqWith_appTermNoRecordExp_) in
    let _3 = (Parsing.peek_val __caml_parser_env 0 : 'tmNoEqWith_appTermNoRecordExp_) in
    Obj.repr(
# 2665 "parse.mly"
    (let (e1, _2, e2) = (_1, (), _3) in
      (
            let dom =
               match extract_named_refinement e1 with
               | Some (x, t, f) ->
                 let dom = mkRefinedBinder x t true f (rhs parseState 1) None [] in
                 Inl dom
               | _ ->
                 Inr e1
            in
            let tail = e2 in
            let dom, res =
                match tail.tm with
                | Sum(dom', res) -> dom::dom', res
                | _ -> [dom], tail
            in
            mk_term (Sum(dom, res)) (rhs2 parseState 1 3) Type_level
      ))
# 11348 "parse.ml"
               : 'tmNoEqWith_appTermNoRecordExp_))
; (fun __caml_parser_env ->
    let _1 = (Parsing.peek_val __caml_parser_env 2 : 'tmNoEqWith_appTermNoRecordExp_) in
    let _2 = (Parsing.peek_val __caml_parser_env 1 : string) in
    let _3 = (Parsing.peek_val __caml_parser_env 0 : 'tmNoEqWith_appTermNoRecordExp_) in
    Obj.repr(
# 2684 "parse.mly"
    (let (e1, op, e2) = (_1, _2, _3) in
      ( mk_term (Op(mk_ident(op, rhs parseState 2), [e1; e2])) (rhs2 parseState 1 3) Un))
# 11358 "parse.ml"
               : 'tmNoEqWith_appTermNoRecordExp_))
; (fun __caml_parser_env ->
    let _1 = (Parsing.peek_val __caml_parser_env 4 : 'tmNoEqWith_appTermNoRecordExp_) in
    let _3 = (Parsing.peek_val __caml_parser_env 2 : 'tmNoEqWith_appTermNoRecordExp_) in
    let _5 = (Parsing.peek_val __caml_parser_env 0 : 'tmNoEqWith_appTermNoRecordExp_) in
    Obj.repr(
# 2687 "parse.mly"
    (let (e1, _2, op, _4, e2) = (_1, (), _3, (), _5) in
      ( mkApp op [ e1, Infix; e2, Nothing ] (rhs2 parseState 1 5) ))
# 11368 "parse.ml"
               : 'tmNoEqWith_appTermNoRecordExp_))
; (fun __caml_parser_env ->
    let _1 = (Parsing.peek_val __caml_parser_env 2 : 'tmNoEqWith_appTermNoRecordExp_) in
    let _2 = (Parsing.peek_val __caml_parser_env 1 : string) in
    let _3 = (Parsing.peek_val __caml_parser_env 0 : 'tmNoEqWith_appTermNoRecordExp_) in
    Obj.repr(
# 2690 "parse.mly"
    (let (e1, op, e2) = (_1, _2, _3) in
      ( mk_term (Op(mk_ident(op, rhs parseState 2), [e1; e2])) (rhs2 parseState 1 3) Un))
# 11378 "parse.ml"
               : 'tmNoEqWith_appTermNoRecordExp_))
; (fun __caml_parser_env ->
    let _2 = (Parsing.peek_val __caml_parser_env 1 : 'recordExp) in
    Obj.repr(
# 2693 "parse.mly"
    (let (_1, e, _3) = ((), _2, ()) in
                              ( e ))
# 11386 "parse.ml"
               : 'tmNoEqWith_appTermNoRecordExp_))
; (fun __caml_parser_env ->
    let _2 = (Parsing.peek_val __caml_parser_env 0 : 'atomicTerm) in
    Obj.repr(
# 2696 "parse.mly"
    (let (_1, e) = ((), _2) in
      ( mk_term (VQuote e) (rhs2 parseState 1 3) Un ))
# 11394 "parse.ml"
               : 'tmNoEqWith_appTermNoRecordExp_))
; (fun __caml_parser_env ->
    let _1 = (Parsing.peek_val __caml_parser_env 1 : string) in
    let _2 = (Parsing.peek_val __caml_parser_env 0 : 'atomicTerm) in
    Obj.repr(
# 2699 "parse.mly"
    (let (op, e) = (_1, _2) in
      ( mk_term (Op(mk_ident (op, rhs parseState 1), [e])) (rhs2 parseState 1 2) Formula ))
# 11403 "parse.ml"
               : 'tmNoEqWith_appTermNoRecordExp_))
; (fun __caml_parser_env ->
    let _1 = (Parsing.peek_val __caml_parser_env 0 : 'appTermNoRecordExp) in
    Obj.repr(
# 2702 "parse.mly"
    (let e = _1 in
        ( e ))
# 11411 "parse.ml"
               : 'tmNoEqWith_appTermNoRecordExp_))
; (fun __caml_parser_env ->
    let _1 = (Parsing.peek_val __caml_parser_env 2 : 'tmNoEqWith_tmRefinement_) in
    let _3 = (Parsing.peek_val __caml_parser_env 0 : 'tmNoEqWith_tmRefinement_) in
    Obj.repr(
# 2707 "parse.mly"
    (let (e1, _2, e2) = (_1, (), _3) in
      ( consTerm (rhs parseState 2) e1 e2 ))
# 11420 "parse.ml"
               : 'tmNoEqWith_tmRefinement_))
; (fun __caml_parser_env ->
    let _1 = (Parsing.peek_val __caml_parser_env 2 : 'tmNoEqWith_tmRefinement_) in
    let _3 = (Parsing.peek_val __caml_parser_env 0 : 'tmNoEqWith_tmRefinement_) in
    Obj.repr(
# 2710 "parse.mly"
    (let (e1, _2, e2) = (_1, (), _3) in
      (
            let dom =
               match extract_named_refinement e1 with
               | Some (x, t, f) ->
                 let dom = mkRefinedBinder x t true f (rhs parseState 1) None [] in
                 Inl dom
               | _ ->
                 Inr e1
            in
            let tail = e2 in
            let dom, res =
                match tail.tm with
                | Sum(dom', res) -> dom::dom', res
                | _ -> [dom], tail
            in
            mk_term (Sum(dom, res)) (rhs2 parseState 1 3) Type_level
      ))
# 11445 "parse.ml"
               : 'tmNoEqWith_tmRefinement_))
; (fun __caml_parser_env ->
    let _1 = (Parsing.peek_val __caml_parser_env 2 : 'tmNoEqWith_tmRefinement_) in
    let _2 = (Parsing.peek_val __caml_parser_env 1 : string) in
    let _3 = (Parsing.peek_val __caml_parser_env 0 : 'tmNoEqWith_tmRefinement_) in
    Obj.repr(
# 2729 "parse.mly"
    (let (e1, op, e2) = (_1, _2, _3) in
      ( mk_term (Op(mk_ident(op, rhs parseState 2), [e1; e2])) (rhs2 parseState 1 3) Un))
# 11455 "parse.ml"
               : 'tmNoEqWith_tmRefinement_))
; (fun __caml_parser_env ->
    let _1 = (Parsing.peek_val __caml_parser_env 4 : 'tmNoEqWith_tmRefinement_) in
    let _3 = (Parsing.peek_val __caml_parser_env 2 : 'tmNoEqWith_tmRefinement_) in
    let _5 = (Parsing.peek_val __caml_parser_env 0 : 'tmNoEqWith_tmRefinement_) in
    Obj.repr(
# 2732 "parse.mly"
    (let (e1, _2, op, _4, e2) = (_1, (), _3, (), _5) in
      ( mkApp op [ e1, Infix; e2, Nothing ] (rhs2 parseState 1 5) ))
# 11465 "parse.ml"
               : 'tmNoEqWith_tmRefinement_))
; (fun __caml_parser_env ->
    let _1 = (Parsing.peek_val __caml_parser_env 2 : 'tmNoEqWith_tmRefinement_) in
    let _2 = (Parsing.peek_val __caml_parser_env 1 : string) in
    let _3 = (Parsing.peek_val __caml_parser_env 0 : 'tmNoEqWith_tmRefinement_) in
    Obj.repr(
# 2735 "parse.mly"
    (let (e1, op, e2) = (_1, _2, _3) in
      ( mk_term (Op(mk_ident(op, rhs parseState 2), [e1; e2])) (rhs2 parseState 1 3) Un))
# 11475 "parse.ml"
               : 'tmNoEqWith_tmRefinement_))
; (fun __caml_parser_env ->
    let _2 = (Parsing.peek_val __caml_parser_env 1 : 'recordExp) in
    Obj.repr(
# 2738 "parse.mly"
    (let (_1, e, _3) = ((), _2, ()) in
                              ( e ))
# 11483 "parse.ml"
               : 'tmNoEqWith_tmRefinement_))
; (fun __caml_parser_env ->
    let _2 = (Parsing.peek_val __caml_parser_env 0 : 'atomicTerm) in
    Obj.repr(
# 2741 "parse.mly"
    (let (_1, e) = ((), _2) in
      ( mk_term (VQuote e) (rhs2 parseState 1 3) Un ))
# 11491 "parse.ml"
               : 'tmNoEqWith_tmRefinement_))
; (fun __caml_parser_env ->
    let _1 = (Parsing.peek_val __caml_parser_env 1 : string) in
    let _2 = (Parsing.peek_val __caml_parser_env 0 : 'atomicTerm) in
    Obj.repr(
# 2744 "parse.mly"
    (let (op, e) = (_1, _2) in
      ( mk_term (Op(mk_ident (op, rhs parseState 1), [e])) (rhs2 parseState 1 2) Formula ))
# 11500 "parse.ml"
               : 'tmNoEqWith_tmRefinement_))
; (fun __caml_parser_env ->
    let _1 = (Parsing.peek_val __caml_parser_env 0 : 'tmRefinement) in
    Obj.repr(
# 2747 "parse.mly"
    (let e = _1 in
        ( e ))
# 11508 "parse.ml"
               : 'tmNoEqWith_tmRefinement_))
; (fun __caml_parser_env ->
    let _1 = (Parsing.peek_val __caml_parser_env 0 : string) in
    Obj.repr(
# 2752 "parse.mly"
    (let o = _1 in
                             ( mk_ident (o, rhs parseState 1) ))
# 11516 "parse.ml"
               : 'binop_name))
; (fun __caml_parser_env ->
    let _1 = (Parsing.peek_val __caml_parser_env 0 : string) in
    Obj.repr(
# 2755 "parse.mly"
    (let o = _1 in
                             ( mk_ident (o, rhs parseState 1) ))
# 11524 "parse.ml"
               : 'binop_name))
; (fun __caml_parser_env ->
    let _1 = (Parsing.peek_val __caml_parser_env 0 : string) in
    Obj.repr(
# 2758 "parse.mly"
    (let o = _1 in
                             ( mk_ident (o, rhs parseState 1) ))
# 11532 "parse.ml"
               : 'binop_name))
; (fun __caml_parser_env ->
    Obj.repr(
# 2761 "parse.mly"
    (let o = () in
                             ( mk_ident ("=", rhs parseState 1) ))
# 11539 "parse.ml"
               : 'binop_name))
; (fun __caml_parser_env ->
    let _1 = (Parsing.peek_val __caml_parser_env 0 : string) in
    Obj.repr(
# 2764 "parse.mly"
    (let o = _1 in
                             ( mk_ident (o, rhs parseState 1) ))
# 11547 "parse.ml"
               : 'binop_name))
; (fun __caml_parser_env ->
    let _1 = (Parsing.peek_val __caml_parser_env 0 : string) in
    Obj.repr(
# 2767 "parse.mly"
    (let o = _1 in
                             ( mk_ident (o, rhs parseState 1) ))
# 11555 "parse.ml"
               : 'binop_name))
; (fun __caml_parser_env ->
    let _1 = (Parsing.peek_val __caml_parser_env 0 : string) in
    Obj.repr(
# 2770 "parse.mly"
    (let o = _1 in
                             ( mk_ident (o, rhs parseState 1) ))
# 11563 "parse.ml"
               : 'binop_name))
; (fun __caml_parser_env ->
    let _1 = (Parsing.peek_val __caml_parser_env 0 : string) in
    Obj.repr(
# 2773 "parse.mly"
    (let o = _1 in
                             ( mk_ident (o, rhs parseState 1) ))
# 11571 "parse.ml"
               : 'binop_name))
; (fun __caml_parser_env ->
    let _1 = (Parsing.peek_val __caml_parser_env 0 : string) in
    Obj.repr(
# 2776 "parse.mly"
    (let o = _1 in
                             ( mk_ident (o, rhs parseState 1) ))
# 11579 "parse.ml"
               : 'binop_name))
; (fun __caml_parser_env ->
    Obj.repr(
# 2779 "parse.mly"
    (let o = () in
                             ( mk_ident ("==>", rhs parseState 1) ))
# 11586 "parse.ml"
               : 'binop_name))
; (fun __caml_parser_env ->
    Obj.repr(
# 2782 "parse.mly"
    (let o = () in
                             ( mk_ident ("/\\", rhs parseState 1) ))
# 11593 "parse.ml"
               : 'binop_name))
; (fun __caml_parser_env ->
    Obj.repr(
# 2785 "parse.mly"
    (let o = () in
                             ( mk_ident ("\\/", rhs parseState 1) ))
# 11600 "parse.ml"
               : 'binop_name))
; (fun __caml_parser_env ->
    Obj.repr(
# 2788 "parse.mly"
    (let o = () in
                             ( mk_ident ("<==>", rhs parseState 1) ))
# 11607 "parse.ml"
               : 'binop_name))
; (fun __caml_parser_env ->
    Obj.repr(
# 2791 "parse.mly"
    (let o = () in
                             ( mk_ident ("|>", rhs parseState 1) ))
# 11614 "parse.ml"
               : 'binop_name))
; (fun __caml_parser_env ->
    Obj.repr(
# 2794 "parse.mly"
    (let o = () in
                             ( mk_ident (":=", rhs parseState 1) ))
# 11621 "parse.ml"
               : 'binop_name))
; (fun __caml_parser_env ->
    Obj.repr(
# 2797 "parse.mly"
    (let o = () in
                             ( mk_ident ("::", rhs parseState 1) ))
# 11628 "parse.ml"
               : 'binop_name))
; (fun __caml_parser_env ->
    let _1 = (Parsing.peek_val __caml_parser_env 0 : string) in
    Obj.repr(
# 2800 "parse.mly"
    (let o = _1 in
                             ( mk_ident (o, rhs parseState 1) ))
# 11636 "parse.ml"
               : 'binop_name))
; (fun __caml_parser_env ->
    let _1 = (Parsing.peek_val __caml_parser_env 0 : string) in
    Obj.repr(
# 2803 "parse.mly"
    (let o = _1 in
                             ( mk_ident (o, rhs parseState 1) ))
# 11644 "parse.ml"
               : 'binop_name))
; (fun __caml_parser_env ->
    let _1 = (Parsing.peek_val __caml_parser_env 0 : 'tmEqWith_appTermNoRecordExp_) in
    Obj.repr(
# 2808 "parse.mly"
    (let e = _1 in
                                   ( e ))
# 11652 "parse.ml"
               : 'tmEqNoRefinement))
; (fun __caml_parser_env ->
    let _1 = (Parsing.peek_val __caml_parser_env 0 : 'tmEqWith_tmRefinement_) in
    Obj.repr(
# 2813 "parse.mly"
    (let e = _1 in
                              ( e ))
# 11660 "parse.ml"
               : 'tmEq))
; (fun __caml_parser_env ->
    let _1 = (Parsing.peek_val __caml_parser_env 0 : 'tmNoEqWith_tmRefinement_) in
    Obj.repr(
# 2818 "parse.mly"
    (let e = _1 in
                               ( e ))
# 11668 "parse.ml"
               : 'tmNoEq))
; (fun __caml_parser_env ->
    let _1 = (Parsing.peek_val __caml_parser_env 3 : 'lidentOrUnderscore) in
    let _3 = (Parsing.peek_val __caml_parser_env 1 : 'appTermNoRecordExp) in
    let _4 = (Parsing.peek_val __caml_parser_env 0 : 'refineOpt) in
    Obj.repr(
# 2823 "parse.mly"
    (let (id, _2, e, phi_opt) = (_1, (), _3, _4) in
      (
        let t = match phi_opt with
          | None -> NamedTyp(id, e)
          | Some phi -> Refine(mk_binder (Annotated(id, e)) (rhs2 parseState 1 3) Type_level None, phi)
        in mk_term t (rhs2 parseState 1 4) Type_level
      ))
# 11683 "parse.ml"
               : 'tmRefinement))
; (fun __caml_parser_env ->
    let _1 = (Parsing.peek_val __caml_parser_env 0 : 'appTerm) in
    Obj.repr(
# 2831 "parse.mly"
    (let e = _1 in
               ( e ))
# 11691 "parse.ml"
               : 'tmRefinement))
; (fun __caml_parser_env ->
    let _1 = (Parsing.peek_val __caml_parser_env 0 : 'option___anonymous_13_) in
    Obj.repr(
# 2836 "parse.mly"
    (let phi_opt = _1 in
                                                    (phi_opt))
# 11699 "parse.ml"
               : 'refineOpt))
; (fun __caml_parser_env ->
    let _1 = (Parsing.peek_val __caml_parser_env 0 : 'right_flexible_nonempty_list_SEMICOLON_simpleDef_) in
    Obj.repr(
# 2841 "parse.mly"
    (let record_fields = _1 in
      ( mk_term (Record (None, record_fields)) (rhs parseState 1) Expr ))
# 11707 "parse.ml"
               : 'recordExp))
; (fun __caml_parser_env ->
    let _1 = (Parsing.peek_val __caml_parser_env 2 : 'appTerm) in
    let _3 = (Parsing.peek_val __caml_parser_env 0 : 'right_flexible_nonempty_list_SEMICOLON_simpleDef_) in
    Obj.repr(
# 2844 "parse.mly"
    (let (e, _2, record_fields) = (_1, (), _3) in
      ( mk_term (Record (Some e, record_fields)) (rhs2 parseState 1 3) Expr ))
# 11716 "parse.ml"
               : 'recordExp))
; (fun __caml_parser_env ->
    let _1 = (Parsing.peek_val __caml_parser_env 2 : 'qlidentOrOperator) in
    let _3 = (Parsing.peek_val __caml_parser_env 0 : 'noSeqTerm) in
    Obj.repr(
# 2849 "parse.mly"
    (let (x, _2, y) = (_1, (), _3) in
let e =     ( (x, y) ) in
                                                           ( e ))
# 11726 "parse.ml"
               : 'simpleDef))
; (fun __caml_parser_env ->
    let _1 = (Parsing.peek_val __caml_parser_env 0 : 'qlidentOrOperator) in
    Obj.repr(
# 2853 "parse.mly"
    (let lid = _1 in
                          ( lid, mk_term (Name (lid_of_ids [ ident_of_lid lid ])) (rhs parseState 1) Un ))
# 11734 "parse.ml"
               : 'simpleDef))
; (fun __caml_parser_env ->
    let _1 = (Parsing.peek_val __caml_parser_env 1 : 'indexingTerm) in
    let _2 = (Parsing.peek_val __caml_parser_env 0 : 'list___anonymous_14_) in
    Obj.repr(
# 2858 "parse.mly"
    (let (head, args) = (_1, _2) in
let t =       ( mkApp head (map (fun (x,y) -> (y,x)) args) (rhs2 parseState 1 2) ) in
                                                                                  (t))
# 11744 "parse.ml"
               : 'appTerm))
; (fun __caml_parser_env ->
    let _1 = (Parsing.peek_val __caml_parser_env 1 : 'indexingTerm) in
    let _2 = (Parsing.peek_val __caml_parser_env 0 : 'list_argTerm_) in
    Obj.repr(
# 2864 "parse.mly"
    (let (head, args) = (_1, _2) in
let t =       ( mkApp head (map (fun (x,y) -> (y,x)) args) (rhs2 parseState 1 2) ) in
                             (t))
# 11754 "parse.ml"
               : 'appTermNoRecordExp))
; (fun __caml_parser_env ->
    let _1 = (Parsing.peek_val __caml_parser_env 0 : 'indexingTerm) in
    Obj.repr(
# 2870 "parse.mly"
    (let y = _1 in
let x =
  let x =          ( Nothing ) in
      ( (x, y) )
in
                                    ( x ))
# 11766 "parse.ml"
               : 'argTerm))
; (fun __caml_parser_env ->
    let _2 = (Parsing.peek_val __caml_parser_env 0 : 'indexingTerm) in
    Obj.repr(
# 2877 "parse.mly"
    (let (_1, y) = ((), _2) in
let x =
  let x =          ( Hash ) in
      ( (x, y) )
in
                                    ( x ))
# 11778 "parse.ml"
               : 'argTerm))
; (fun __caml_parser_env ->
    let _1 = (Parsing.peek_val __caml_parser_env 0 : 'universe) in
    Obj.repr(
# 2884 "parse.mly"
    (let u = _1 in
               ( u ))
# 11786 "parse.ml"
               : 'argTerm))
; (fun __caml_parser_env ->
    let _1 = (Parsing.peek_val __caml_parser_env 1 : 'atomicTermNotQUident) in
    let _2 = (Parsing.peek_val __caml_parser_env 0 : 'nonempty_list_dotOperator_) in
    Obj.repr(
# 2889 "parse.mly"
    (let (e1, op_exprs) = (_1, _2) in
      (
        List.fold_left (fun e1 (op, e2, r) ->
            mk_term (Op(op, [ e1; e2 ])) (union_ranges e1.range r) Expr)
            e1 op_exprs
      ))
# 11799 "parse.ml"
               : 'indexingTerm))
; (fun __caml_parser_env ->
    let _1 = (Parsing.peek_val __caml_parser_env 0 : 'atomicTerm) in
    Obj.repr(
# 2896 "parse.mly"
    (let e = _1 in
    ( e ))
# 11807 "parse.ml"
               : 'indexingTerm))
; (fun __caml_parser_env ->
    let _1 = (Parsing.peek_val __caml_parser_env 0 : 'atomicTermNotQUident) in
    Obj.repr(
# 2901 "parse.mly"
    (let x = _1 in
    ( x ))
# 11815 "parse.ml"
               : 'atomicTerm))
; (fun __caml_parser_env ->
    let _1 = (Parsing.peek_val __caml_parser_env 0 : 'atomicTermQUident) in
    Obj.repr(
# 2904 "parse.mly"
    (let x = _1 in
    ( x ))
# 11823 "parse.ml"
               : 'atomicTerm))
; (fun __caml_parser_env ->
    let _1 = (Parsing.peek_val __caml_parser_env 0 : 'opPrefixTerm_atomicTermQUident_) in
    Obj.repr(
# 2907 "parse.mly"
    (let x = _1 in
    ( x ))
# 11831 "parse.ml"
               : 'atomicTerm))
; (fun __caml_parser_env ->
    let _1 = (Parsing.peek_val __caml_parser_env 0 : 'quident) in
    Obj.repr(
# 2912 "parse.mly"
    (let id = _1 in
    (
        let t = Name id in
        let e = mk_term t (rhs parseState 1) Un in
              e
    ))
# 11843 "parse.ml"
               : 'atomicTermQUident))
; (fun __caml_parser_env ->
    let _1 = (Parsing.peek_val __caml_parser_env 3 : 'quident) in
    let _3 = (Parsing.peek_val __caml_parser_env 1 : FStar_Parser_AST.term) in
    Obj.repr(
# 2919 "parse.mly"
    (let (id, _2, t, _4) = (_1, (), _3, ()) in
    (
      mk_term (LetOpen (id, t)) (rhs2 parseState 1 4) Expr
    ))
# 11854 "parse.ml"
               : 'atomicTermQUident))
; (fun __caml_parser_env ->
    Obj.repr(
# 2926 "parse.mly"
    (let _1 = () in
               ( mk_term Wild (rhs parseState 1) Un ))
# 11861 "parse.ml"
               : 'atomicTermNotQUident))
; (fun __caml_parser_env ->
    let _1 = (Parsing.peek_val __caml_parser_env 0 : 'tvar) in
    Obj.repr(
# 2929 "parse.mly"
    (let tv = _1 in
                ( mk_term (Tvar tv) (rhs parseState 1) Type_level ))
# 11869 "parse.ml"
               : 'atomicTermNotQUident))
; (fun __caml_parser_env ->
    let _1 = (Parsing.peek_val __caml_parser_env 0 : 'constant) in
    Obj.repr(
# 2932 "parse.mly"
    (let c = _1 in
               ( mk_term (Const c) (rhs parseState 1) Expr ))
# 11877 "parse.ml"
               : 'atomicTermNotQUident))
; (fun __caml_parser_env ->
    let _1 = (Parsing.peek_val __caml_parser_env 0 : 'opPrefixTerm_atomicTermNotQUident_) in
    Obj.repr(
# 2935 "parse.mly"
    (let x = _1 in
    ( x ))
# 11885 "parse.ml"
               : 'atomicTermNotQUident))
; (fun __caml_parser_env ->
    let _2 = (Parsing.peek_val __caml_parser_env 1 : string) in
    Obj.repr(
# 2938 "parse.mly"
    (let (_1, op, _3) = ((), _2, ()) in
let op =     ( mk_ident (op, rhs parseState 1) ) in
      ( mk_term (Op(op, [])) (rhs2 parseState 1 3) Un ))
# 11894 "parse.ml"
               : 'atomicTermNotQUident))
; (fun __caml_parser_env ->
    let _2 = (Parsing.peek_val __caml_parser_env 1 : 'binop_name) in
    Obj.repr(
# 2942 "parse.mly"
    (let (_1, op, _3) = ((), _2, ()) in
let op =     ( op ) in
      ( mk_term (Op(op, [])) (rhs2 parseState 1 3) Un ))
# 11903 "parse.ml"
               : 'atomicTermNotQUident))
; (fun __caml_parser_env ->
    let _2 = (Parsing.peek_val __caml_parser_env 1 : string) in
    Obj.repr(
# 2946 "parse.mly"
    (let (_1, op, _3) = ((), _2, ()) in
let op =     ( mk_ident (op, rhs parseState 1) ) in
      ( mk_term (Op(op, [])) (rhs2 parseState 1 3) Un ))
# 11912 "parse.ml"
               : 'atomicTermNotQUident))
; (fun __caml_parser_env ->
    let _2 = (Parsing.peek_val __caml_parser_env 1 : string) in
    Obj.repr(
# 2950 "parse.mly"
    (let (_1, op, _3) = ((), _2, ()) in
let op =
  let op =               ( mk_ident ("and" ^ op, rhs parseState 1) ) in
                      (op)
in
      ( mk_term (Op(op, [])) (rhs2 parseState 1 3) Un ))
# 11924 "parse.ml"
               : 'atomicTermNotQUident))
; (fun __caml_parser_env ->
    let _2 = (Parsing.peek_val __caml_parser_env 1 : string) in
    Obj.repr(
# 2957 "parse.mly"
    (let (_1, op, _3) = ((), _2, ()) in
let op =
  let op =               ( mk_ident ("let" ^ op, rhs parseState 1) ) in
                      (op)
in
      ( mk_term (Op(op, [])) (rhs2 parseState 1 3) Un ))
# 11936 "parse.ml"
               : 'atomicTermNotQUident))
; (fun __caml_parser_env ->
    let _2 = (Parsing.peek_val __caml_parser_env 3 : 'tmEq) in
    let _4 = (Parsing.peek_val __caml_parser_env 1 : 'separated_nonempty_list_COMMA_tmEq_) in
    Obj.repr(
# 2964 "parse.mly"
    (let (_1, e0, _3, el, _5) = ((), _2, (), _4, ()) in
      ( mkDTuple (e0::el) (rhs2 parseState 1 5) ))
# 11945 "parse.ml"
               : 'atomicTermNotQUident))
; (fun __caml_parser_env ->
    let _1 = (Parsing.peek_val __caml_parser_env 1 : 'projectionLHS) in
    let _2 = (Parsing.peek_val __caml_parser_env 0 : 'list___anonymous_15_) in
    Obj.repr(
# 2967 "parse.mly"
    (let (e, field_projs) = (_1, _2) in
      ( fold_left (fun e lid -> mk_term (Project(e, lid)) (rhs2 parseState 1 2) Expr ) e field_projs ))
# 11954 "parse.ml"
               : 'atomicTermNotQUident))
; (fun __caml_parser_env ->
    let _2 = (Parsing.peek_val __caml_parser_env 1 : FStar_Parser_AST.term) in
    Obj.repr(
# 2970 "parse.mly"
    (let (_1, e, _3) = ((), _2, ()) in
      ( e ))
# 11962 "parse.ml"
               : 'atomicTermNotQUident))
; (fun __caml_parser_env ->
    let _1 = (Parsing.peek_val __caml_parser_env 1 : string) in
    let _2 = (Parsing.peek_val __caml_parser_env 0 : 'atomicTermNotQUident) in
    Obj.repr(
# 2975 "parse.mly"
    (let (op, e) = (_1, _2) in
      ( mk_term (Op(mk_ident(op, rhs parseState 1), [e])) (rhs2 parseState 1 2) Expr ))
# 11971 "parse.ml"
               : 'opPrefixTerm_atomicTermNotQUident_))
; (fun __caml_parser_env ->
    let _1 = (Parsing.peek_val __caml_parser_env 1 : string) in
    let _2 = (Parsing.peek_val __caml_parser_env 0 : 'atomicTermQUident) in
    Obj.repr(
# 2980 "parse.mly"
    (let (op, e) = (_1, _2) in
      ( mk_term (Op(mk_ident(op, rhs parseState 1), [e])) (rhs2 parseState 1 2) Expr ))
# 11980 "parse.ml"
               : 'opPrefixTerm_atomicTermQUident_))
; (fun __caml_parser_env ->
    let _1 = (Parsing.peek_val __caml_parser_env 0 : 'qidentWithTypeArgs_qlident_option_fsTypeArgs__) in
    Obj.repr(
# 2985 "parse.mly"
    (let e = _1 in
      ( e ))
# 11988 "parse.ml"
               : 'projectionLHS))
; (fun __caml_parser_env ->
    let _1 = (Parsing.peek_val __caml_parser_env 0 : 'qidentWithTypeArgs_quident_some_fsTypeArgs__) in
    Obj.repr(
# 2988 "parse.mly"
    (let e = _1 in
      ( e ))
# 11996 "parse.ml"
               : 'projectionLHS))
; (fun __caml_parser_env ->
    let _2 = (Parsing.peek_val __caml_parser_env 2 : FStar_Parser_AST.term) in
    let _3 = (Parsing.peek_val __caml_parser_env 1 : 'option_pair_hasSort_simpleTerm__) in
    Obj.repr(
# 2991 "parse.mly"
    (let (_1, e, sort_opt, _4) = ((), _2, _3, ()) in
      (
        (* Note: we have to keep the parentheses here. Consider t * u * v. This
         * is parsed as Op2( *, Op2( *, t, u), v). The desugaring phase then looks
         * up * and figures out that it hasn't been overridden, meaning that
         * it's a tuple type, and proceeds to flatten out the whole tuple. Now
         * consider (t * u) * v. We keep the Paren node, which prevents the
         * flattening from happening, hence ensuring the proper type is
         * generated. *)
        let e1 = match sort_opt with
          | None -> e
          | Some (level, t) -> mk_term (Ascribed(e,{t with level=level},None,false)) (rhs2 parseState 1 4) level
        in mk_term (Paren e1) (rhs2 parseState 1 4) (e.level)
      ))
# 12017 "parse.ml"
               : 'projectionLHS))
; (fun __caml_parser_env ->
    let _2 = (Parsing.peek_val __caml_parser_env 1 : 'right_flexible_list_SEMICOLON_noSeqTerm_) in
    Obj.repr(
# 3006 "parse.mly"
    (let (_1, l, _3) = ((), _2, ()) in
let es =                                                 ( l ) in
      (
        let l = mkConsList (rhs2 parseState 1 3) es in
        let pos = (rhs2 parseState 1 3) in
        mkExplicitApp (mk_term (Var (array_of_list_lid)) pos Expr) [l] pos
      ))
# 12030 "parse.ml"
               : 'projectionLHS))
; (fun __caml_parser_env ->
    let _2 = (Parsing.peek_val __caml_parser_env 1 : 'right_flexible_list_SEMICOLON_noSeqTerm_) in
    Obj.repr(
# 3014 "parse.mly"
    (let (_1, l, _3) = ((), _2, ()) in
let es =                                                 ( l ) in
      ( mkConsList (rhs2 parseState 1 3) es ))
# 12039 "parse.ml"
               : 'projectionLHS))
; (fun __caml_parser_env ->
    let _2 = (Parsing.peek_val __caml_parser_env 1 : 'right_flexible_list_SEMICOLON_noSeqTerm_) in
    Obj.repr(
# 3018 "parse.mly"
    (let (_1, l, _3) = ((), _2, ()) in
let es =                                                 ( l ) in
      ( mk_term (LexList es) (rhs2 parseState 1 3) Type_level ))
# 12048 "parse.ml"
               : 'projectionLHS))
; (fun __caml_parser_env ->
    let _2 = (Parsing.peek_val __caml_parser_env 1 : 'loption_separated_nonempty_list_COMMA_appTerm__) in
    Obj.repr(
# 3022 "parse.mly"
    (let (_1, xs, _3) = ((), _2, ()) in
let es =     ( xs ) in
      ( mkRefSet (rhs2 parseState 1 3) es ))
# 12057 "parse.ml"
               : 'projectionLHS))
; (fun __caml_parser_env ->
    let _1 = (Parsing.peek_val __caml_parser_env 2 : 'quident) in
    let _3 = (Parsing.peek_val __caml_parser_env 0 : FStar_Ident.ident) in
    Obj.repr(
# 3026 "parse.mly"
    (let (ns, _2, id) = (_1, (), _3) in
      ( mk_term (Projector (ns, id)) (rhs2 parseState 1 3) Expr ))
# 12066 "parse.ml"
               : 'projectionLHS))
; (fun __caml_parser_env ->
    let _1 = (Parsing.peek_val __caml_parser_env 1 : 'quident) in
    Obj.repr(
# 3029 "parse.mly"
    (let (lid, _2) = (_1, ()) in
      ( mk_term (Discrim lid) (rhs2 parseState 1 2) Un ))
# 12074 "parse.ml"
               : 'projectionLHS))
; (fun __caml_parser_env ->
    let _2 = (Parsing.peek_val __caml_parser_env 1 : 'separated_nonempty_list_COMMA_atomicTerm_) in
    Obj.repr(
# 3034 "parse.mly"
    (let (_1, targs, _3) = ((), _2, ()) in
    (targs))
# 12082 "parse.ml"
               : 'fsTypeArgs))
; (fun __caml_parser_env ->
    let _1 = (Parsing.peek_val __caml_parser_env 1 : 'qlident) in
    let _2 = (Parsing.peek_val __caml_parser_env 0 : 'option_fsTypeArgs_) in
    Obj.repr(
# 3039 "parse.mly"
    (let (id, targs_opt) = (_1, _2) in
      (
        let t = if is_name id then Name id else Var id in
        let e = mk_term t (rhs parseState 1) Un in
        match targs_opt with
        | None -> e
        | Some targs -> mkFsTypApp e targs (rhs2 parseState 1 2)
      ))
# 12097 "parse.ml"
               : 'qidentWithTypeArgs_qlident_option_fsTypeArgs__))
; (fun __caml_parser_env ->
    let _1 = (Parsing.peek_val __caml_parser_env 1 : 'quident) in
    let _2 = (Parsing.peek_val __caml_parser_env 0 : 'some_fsTypeArgs_) in
    Obj.repr(
# 3050 "parse.mly"
    (let (id, targs_opt) = (_1, _2) in
      (
        let t = if is_name id then Name id else Var id in
        let e = mk_term t (rhs parseState 1) Un in
        match targs_opt with
        | None -> e
        | Some targs -> mkFsTypApp e targs (rhs2 parseState 1 2)
      ))
# 12112 "parse.ml"
               : 'qidentWithTypeArgs_quident_some_fsTypeArgs__))
; (fun __caml_parser_env ->
    Obj.repr(
# 3061 "parse.mly"
    (let _1 = () in
            ( Type_level ))
# 12119 "parse.ml"
               : 'hasSort))
; (fun __caml_parser_env ->
    Obj.repr(
# 3066 "parse.mly"
    (let _1 = () in
                  ( Const_unit ))
# 12126 "parse.ml"
               : 'constant))
; (fun __caml_parser_env ->
    let _1 = (Parsing.peek_val __caml_parser_env 0 : string * bool) in
    Obj.repr(
# 3069 "parse.mly"
    (let n = _1 in
     (
        if snd n then
          log_issue (lhs parseState) (Error_OutOfRange, "This number is outside the allowable range for representable integer constants");
        Const_int (fst n, None)
     ))
# 12138 "parse.ml"
               : 'constant))
; (fun __caml_parser_env ->
    let _1 = (Parsing.peek_val __caml_parser_env 0 : char) in
    Obj.repr(
# 3076 "parse.mly"
    (let c = _1 in
           ( Const_char c ))
# 12146 "parse.ml"
               : 'constant))
; (fun __caml_parser_env ->
    let _1 = (Parsing.peek_val __caml_parser_env 0 : string) in
    Obj.repr(
# 3079 "parse.mly"
    (let s = _1 in
             ( Const_string (s,lhs(parseState)) ))
# 12154 "parse.ml"
               : 'constant))
; (fun __caml_parser_env ->
    Obj.repr(
# 3082 "parse.mly"
    (let _1 = () in
         ( Const_bool true ))
# 12161 "parse.ml"
               : 'constant))
; (fun __caml_parser_env ->
    Obj.repr(
# 3085 "parse.mly"
    (let _1 = () in
          ( Const_bool false ))
# 12168 "parse.ml"
               : 'constant))
; (fun __caml_parser_env ->
    let _1 = (Parsing.peek_val __caml_parser_env 0 : string) in
    Obj.repr(
# 3088 "parse.mly"
    (let r = _1 in
           ( Const_real r ))
# 12176 "parse.ml"
               : 'constant))
; (fun __caml_parser_env ->
    let _1 = (Parsing.peek_val __caml_parser_env 0 : string) in
    Obj.repr(
# 3091 "parse.mly"
    (let n = _1 in
            ( Const_int (n, Some (Unsigned, Int8)) ))
# 12184 "parse.ml"
               : 'constant))
; (fun __caml_parser_env ->
    let _1 = (Parsing.peek_val __caml_parser_env 0 : string * bool) in
    Obj.repr(
# 3094 "parse.mly"
    (let n = _1 in
      (
        if snd n then
          log_issue (lhs(parseState)) (Error_OutOfRange, "This number is outside the allowable range for 8-bit signed integers");
        Const_int (fst n, Some (Signed, Int8))
      ))
# 12196 "parse.ml"
               : 'constant))
; (fun __caml_parser_env ->
    let _1 = (Parsing.peek_val __caml_parser_env 0 : string) in
    Obj.repr(
# 3101 "parse.mly"
    (let n = _1 in
             ( Const_int (n, Some (Unsigned, Int16)) ))
# 12204 "parse.ml"
               : 'constant))
; (fun __caml_parser_env ->
    let _1 = (Parsing.peek_val __caml_parser_env 0 : string * bool) in
    Obj.repr(
# 3104 "parse.mly"
    (let n = _1 in
      (
        if snd n then
          log_issue (lhs(parseState)) (Error_OutOfRange, "This number is outside the allowable range for 16-bit signed integers");
        Const_int (fst n, Some (Signed, Int16))
      ))
# 12216 "parse.ml"
               : 'constant))
; (fun __caml_parser_env ->
    let _1 = (Parsing.peek_val __caml_parser_env 0 : string) in
    Obj.repr(
# 3111 "parse.mly"
    (let n = _1 in
             ( Const_int (n, Some (Unsigned, Int32)) ))
# 12224 "parse.ml"
               : 'constant))
; (fun __caml_parser_env ->
    let _1 = (Parsing.peek_val __caml_parser_env 0 : string * bool) in
    Obj.repr(
# 3114 "parse.mly"
    (let n = _1 in
      (
        if snd n then
          log_issue (lhs(parseState)) (Error_OutOfRange, "This number is outside the allowable range for 32-bit signed integers");
        Const_int (fst n, Some (Signed, Int32))
      ))
# 12236 "parse.ml"
               : 'constant))
; (fun __caml_parser_env ->
    let _1 = (Parsing.peek_val __caml_parser_env 0 : string) in
    Obj.repr(
# 3121 "parse.mly"
    (let n = _1 in
             ( Const_int (n, Some (Unsigned, Int64)) ))
# 12244 "parse.ml"
               : 'constant))
; (fun __caml_parser_env ->
    let _1 = (Parsing.peek_val __caml_parser_env 0 : string * bool) in
    Obj.repr(
# 3124 "parse.mly"
    (let n = _1 in
      (
        if snd n then
          log_issue (lhs(parseState)) (Error_OutOfRange, "This number is outside the allowable range for 64-bit signed integers");
        Const_int (fst n, Some (Signed, Int64))
      ))
# 12256 "parse.ml"
               : 'constant))
; (fun __caml_parser_env ->
    let _1 = (Parsing.peek_val __caml_parser_env 0 : string) in
    Obj.repr(
# 3131 "parse.mly"
    (let n = _1 in
            ( Const_int (n, Some (Unsigned, Sizet)) ))
# 12264 "parse.ml"
               : 'constant))
; (fun __caml_parser_env ->
    Obj.repr(
# 3134 "parse.mly"
    (let _1 = () in
            ( Const_reify None ))
# 12271 "parse.ml"
               : 'constant))
; (fun __caml_parser_env ->
    Obj.repr(
# 3137 "parse.mly"
    (let _1 = () in
                 ( Const_range_of ))
# 12278 "parse.ml"
               : 'constant))
; (fun __caml_parser_env ->
    Obj.repr(
# 3140 "parse.mly"
    (let _1 = () in
                 ( Const_set_range_of ))
# 12285 "parse.ml"
               : 'constant))
; (fun __caml_parser_env ->
    let _2 = (Parsing.peek_val __caml_parser_env 0 : 'atomicUniverse) in
    Obj.repr(
# 3145 "parse.mly"
    (let (_1, ua) = ((), _2) in
                                ( (UnivApp, ua) ))
# 12293 "parse.ml"
               : 'universe))
; (fun __caml_parser_env ->
    let _1 = (Parsing.peek_val __caml_parser_env 0 : 'atomicUniverse) in
    Obj.repr(
# 3150 "parse.mly"
    (let ua = _1 in
                      ( ua ))
# 12301 "parse.ml"
               : 'universeFrom))
; (fun __caml_parser_env ->
    let _1 = (Parsing.peek_val __caml_parser_env 2 : 'universeFrom) in
    let _2 = (Parsing.peek_val __caml_parser_env 1 : string) in
    let _3 = (Parsing.peek_val __caml_parser_env 0 : 'universeFrom) in
    Obj.repr(
# 3153 "parse.mly"
    (let (u1, op_plus, u2) = (_1, _2, _3) in
       (
         if op_plus <> "+"
         then log_issue (rhs parseState 2) (Error_OpPlusInUniverse, ("The operator " ^ op_plus ^ " was found in universe context."
                           ^ "The only allowed operator in that context is +."));
         mk_term (Op(mk_ident (op_plus, rhs parseState 2), [u1 ; u2])) (rhs2 parseState 1 3) Expr
       ))
# 12316 "parse.ml"
               : 'universeFrom))
; (fun __caml_parser_env ->
    let _1 = (Parsing.peek_val __caml_parser_env 1 : 'ident) in
    let _2 = (Parsing.peek_val __caml_parser_env 0 : 'nonempty_list_atomicUniverse_) in
    Obj.repr(
# 3161 "parse.mly"
    (let (max, us) = (_1, _2) in
      (
        if string_of_id max <> string_of_lid max_lid
        then log_issue (rhs parseState 1) (Error_InvalidUniverseVar, "A lower case ident " ^ string_of_id max ^
                          " was found in a universe context. " ^
                          "It should be either max or a universe variable 'usomething.");
        let max = mk_term (Var (lid_of_ids [max])) (rhs parseState 1) Expr in
        mkApp max (map (fun u -> u, Nothing) us) (rhs2 parseState 1 2)
      ))
# 12332 "parse.ml"
               : 'universeFrom))
; (fun __caml_parser_env ->
    Obj.repr(
# 3173 "parse.mly"
    (let _1 = () in
      ( mk_term Wild (rhs parseState 1) Expr ))
# 12339 "parse.ml"
               : 'atomicUniverse))
; (fun __caml_parser_env ->
    let _1 = (Parsing.peek_val __caml_parser_env 0 : string * bool) in
    Obj.repr(
# 3176 "parse.mly"
    (let n = _1 in
      (
        if snd n then
          log_issue (lhs(parseState)) (Error_OutOfRange, "This number is outside the allowable range for representable integer constants");
        mk_term (Const (Const_int (fst n, None))) (rhs parseState 1) Expr
      ))
# 12351 "parse.ml"
               : 'atomicUniverse))
; (fun __caml_parser_env ->
    let _1 = (Parsing.peek_val __caml_parser_env 0 : FStar_Ident.ident) in
    Obj.repr(
# 3183 "parse.mly"
    (let u = _1 in
             ( mk_term (Uvar u) (range_of_id u) Expr ))
# 12359 "parse.ml"
               : 'atomicUniverse))
; (fun __caml_parser_env ->
    let _2 = (Parsing.peek_val __caml_parser_env 1 : 'universeFrom) in
    Obj.repr(
# 3186 "parse.mly"
    (let (_1, u, _3) = ((), _2, ()) in
    ( u (*mk_term (Paren u) (rhs2 parseState 1 3) Expr*) ))
# 12367 "parse.ml"
               : 'atomicUniverse))
; (fun __caml_parser_env ->
    let _1 = (Parsing.peek_val __caml_parser_env 1 : 'warn_error) in
    Obj.repr(
# 3191 "parse.mly"
    (let (e, _2) = (_1, ()) in
                     ( e ))
# 12375 "parse.ml"
               : (FStar_Errors_Codes.error_flag * string) list))
; (fun __caml_parser_env ->
    let _1 = (Parsing.peek_val __caml_parser_env 1 : 'flag) in
    let _2 = (Parsing.peek_val __caml_parser_env 0 : 'range) in
    Obj.repr(
# 3196 "parse.mly"
    (let (f, r) = (_1, _2) in
    ( [(f, r)] ))
# 12384 "parse.ml"
               : 'warn_error))
; (fun __caml_parser_env ->
    let _1 = (Parsing.peek_val __caml_parser_env 2 : 'flag) in
    let _2 = (Parsing.peek_val __caml_parser_env 1 : 'range) in
    let _3 = (Parsing.peek_val __caml_parser_env 0 : 'warn_error) in
    Obj.repr(
# 3199 "parse.mly"
    (let (f, r, e) = (_1, _2, _3) in
    ( (f, r) :: e ))
# 12394 "parse.ml"
               : 'warn_error))
; (fun __caml_parser_env ->
    let _1 = (Parsing.peek_val __caml_parser_env 0 : string) in
    Obj.repr(
# 3204 "parse.mly"
    (let op = _1 in
    ( if op = "@" then CAlwaysError else failwith (format1 "unexpected token %s in warn-error list" op)))
# 12402 "parse.ml"
               : 'flag))
; (fun __caml_parser_env ->
    let _1 = (Parsing.peek_val __caml_parser_env 0 : string) in
    Obj.repr(
# 3207 "parse.mly"
    (let op = _1 in
    ( if op = "+" then CWarning else failwith (format1 "unexpected token %s in warn-error list" op)))
# 12410 "parse.ml"
               : 'flag))
; (fun __caml_parser_env ->
    Obj.repr(
# 3210 "parse.mly"
    (let _1 = () in
          ( CSilent ))
# 12417 "parse.ml"
               : 'flag))
; (fun __caml_parser_env ->
    let _1 = (Parsing.peek_val __caml_parser_env 0 : string * bool) in
    Obj.repr(
# 3215 "parse.mly"
    (let i = _1 in
    ( format2 "%s..%s" (fst i) (fst i) ))
# 12425 "parse.ml"
               : 'range))
; (fun __caml_parser_env ->
    let _1 = (Parsing.peek_val __caml_parser_env 0 : string) in
    Obj.repr(
# 3218 "parse.mly"
    (let r = _1 in
    ( r ))
# 12433 "parse.ml"
               : 'range))
; (fun __caml_parser_env ->
    let _1 = (Parsing.peek_val __caml_parser_env 0 : string) in
    Obj.repr(
# 3223 "parse.mly"
    (let s = _1 in
             ( s ))
# 12441 "parse.ml"
               : 'string))
; (fun __caml_parser_env ->
    let _1 = (Parsing.peek_val __caml_parser_env 0 : 'fsTypeArgs) in
    Obj.repr(
# 3228 "parse.mly"
    (let x = _1 in
        ( Some x ))
# 12449 "parse.ml"
               : 'some_fsTypeArgs_))
; (fun __caml_parser_env ->
    Obj.repr(
# 3233 "parse.mly"
    (        ( [] ))
# 12455 "parse.ml"
               : 'right_flexible_list_SEMICOLON_fieldPattern_))
; (fun __caml_parser_env ->
    let _1 = (Parsing.peek_val __caml_parser_env 0 : 'fieldPattern) in
    Obj.repr(
# 3235 "parse.mly"
    (let x = _1 in
        ( [x] ))
# 12463 "parse.ml"
               : 'right_flexible_list_SEMICOLON_fieldPattern_))
; (fun __caml_parser_env ->
    let _1 = (Parsing.peek_val __caml_parser_env 2 : 'fieldPattern) in
    let _3 = (Parsing.peek_val __caml_parser_env 0 : 'right_flexible_list_SEMICOLON_fieldPattern_) in
    Obj.repr(
# 3238 "parse.mly"
    (let (x, _2, xs) = (_1, (), _3) in
                                           ( x :: xs ))
# 12472 "parse.ml"
               : 'right_flexible_list_SEMICOLON_fieldPattern_))
; (fun __caml_parser_env ->
    Obj.repr(
# 3243 "parse.mly"
    (        ( [] ))
# 12478 "parse.ml"
               : 'right_flexible_list_SEMICOLON_noSeqTerm_))
; (fun __caml_parser_env ->
    let _1 = (Parsing.peek_val __caml_parser_env 0 : 'noSeqTerm) in
    Obj.repr(
# 3245 "parse.mly"
    (let x = _1 in
        ( [x] ))
# 12486 "parse.ml"
               : 'right_flexible_list_SEMICOLON_noSeqTerm_))
; (fun __caml_parser_env ->
    let _1 = (Parsing.peek_val __caml_parser_env 2 : 'noSeqTerm) in
    let _3 = (Parsing.peek_val __caml_parser_env 0 : 'right_flexible_list_SEMICOLON_noSeqTerm_) in
    Obj.repr(
# 3248 "parse.mly"
    (let (x, _2, xs) = (_1, (), _3) in
                                           ( x :: xs ))
# 12495 "parse.ml"
               : 'right_flexible_list_SEMICOLON_noSeqTerm_))
; (fun __caml_parser_env ->
    Obj.repr(
# 3253 "parse.mly"
    (        ( [] ))
# 12501 "parse.ml"
               : 'right_flexible_list_SEMICOLON_recordFieldDecl_))
; (fun __caml_parser_env ->
    let _1 = (Parsing.peek_val __caml_parser_env 0 : 'recordFieldDecl) in
    Obj.repr(
# 3255 "parse.mly"
    (let x = _1 in
        ( [x] ))
# 12509 "parse.ml"
               : 'right_flexible_list_SEMICOLON_recordFieldDecl_))
; (fun __caml_parser_env ->
    let _1 = (Parsing.peek_val __caml_parser_env 2 : 'recordFieldDecl) in
    let _3 = (Parsing.peek_val __caml_parser_env 0 : 'right_flexible_list_SEMICOLON_recordFieldDecl_) in
    Obj.repr(
# 3258 "parse.mly"
    (let (x, _2, xs) = (_1, (), _3) in
                                           ( x :: xs ))
# 12518 "parse.ml"
               : 'right_flexible_list_SEMICOLON_recordFieldDecl_))
; (fun __caml_parser_env ->
    Obj.repr(
# 3263 "parse.mly"
    (        ( [] ))
# 12524 "parse.ml"
               : 'right_flexible_list_SEMICOLON_simpleDef_))
; (fun __caml_parser_env ->
    let _1 = (Parsing.peek_val __caml_parser_env 0 : 'simpleDef) in
    Obj.repr(
# 3265 "parse.mly"
    (let x = _1 in
        ( [x] ))
# 12532 "parse.ml"
               : 'right_flexible_list_SEMICOLON_simpleDef_))
; (fun __caml_parser_env ->
    let _1 = (Parsing.peek_val __caml_parser_env 2 : 'simpleDef) in
    let _3 = (Parsing.peek_val __caml_parser_env 0 : 'right_flexible_list_SEMICOLON_simpleDef_) in
    Obj.repr(
# 3268 "parse.mly"
    (let (x, _2, xs) = (_1, (), _3) in
                                           ( x :: xs ))
# 12541 "parse.ml"
               : 'right_flexible_list_SEMICOLON_simpleDef_))
; (fun __caml_parser_env ->
    let _1 = (Parsing.peek_val __caml_parser_env 0 : 'fieldPattern) in
    Obj.repr(
# 3273 "parse.mly"
    (let x = _1 in
        ( [x] ))
# 12549 "parse.ml"
               : 'right_flexible_nonempty_list_SEMICOLON_fieldPattern_))
; (fun __caml_parser_env ->
    let _1 = (Parsing.peek_val __caml_parser_env 2 : 'fieldPattern) in
    let _3 = (Parsing.peek_val __caml_parser_env 0 : 'right_flexible_list_SEMICOLON_fieldPattern_) in
    Obj.repr(
# 3276 "parse.mly"
    (let (x, _2, xs) = (_1, (), _3) in
                                           ( x :: xs ))
# 12558 "parse.ml"
               : 'right_flexible_nonempty_list_SEMICOLON_fieldPattern_))
; (fun __caml_parser_env ->
    let _1 = (Parsing.peek_val __caml_parser_env 0 : 'recordFieldDecl) in
    Obj.repr(
# 3281 "parse.mly"
    (let x = _1 in
        ( [x] ))
# 12566 "parse.ml"
               : 'right_flexible_nonempty_list_SEMICOLON_recordFieldDecl_))
; (fun __caml_parser_env ->
    let _1 = (Parsing.peek_val __caml_parser_env 2 : 'recordFieldDecl) in
    let _3 = (Parsing.peek_val __caml_parser_env 0 : 'right_flexible_list_SEMICOLON_recordFieldDecl_) in
    Obj.repr(
# 3284 "parse.mly"
    (let (x, _2, xs) = (_1, (), _3) in
                                           ( x :: xs ))
# 12575 "parse.ml"
               : 'right_flexible_nonempty_list_SEMICOLON_recordFieldDecl_))
; (fun __caml_parser_env ->
    let _1 = (Parsing.peek_val __caml_parser_env 0 : 'simpleDef) in
    Obj.repr(
# 3289 "parse.mly"
    (let x = _1 in
        ( [x] ))
# 12583 "parse.ml"
               : 'right_flexible_nonempty_list_SEMICOLON_simpleDef_))
; (fun __caml_parser_env ->
    let _1 = (Parsing.peek_val __caml_parser_env 2 : 'simpleDef) in
    let _3 = (Parsing.peek_val __caml_parser_env 0 : 'right_flexible_list_SEMICOLON_simpleDef_) in
    Obj.repr(
# 3292 "parse.mly"
    (let (x, _2, xs) = (_1, (), _3) in
                                           ( x :: xs ))
# 12592 "parse.ml"
               : 'right_flexible_nonempty_list_SEMICOLON_simpleDef_))
; (fun __caml_parser_env ->
    Obj.repr(
# 3297 "parse.mly"
    (   ( [] ))
# 12598 "parse.ml"
               : 'reverse_left_flexible_list_BAR___anonymous_10_))
; (fun __caml_parser_env ->
    let _1 = (Parsing.peek_val __caml_parser_env 0 : 'patternBranch) in
    Obj.repr(
# 3299 "parse.mly"
    (let pb = _1 in
let x =                                                                                                              (pb) in
   ( [x] ))
# 12607 "parse.ml"
               : 'reverse_left_flexible_list_BAR___anonymous_10_))
; (fun __caml_parser_env ->
    let _1 = (Parsing.peek_val __caml_parser_env 2 : 'reverse_left_flexible_list_BAR___anonymous_10_) in
    let _3 = (Parsing.peek_val __caml_parser_env 0 : 'patternBranch) in
    Obj.repr(
# 3303 "parse.mly"
    (let (xs, _2, pb) = (_1, (), _3) in
let x =                                                                                                              (pb) in
   ( x :: xs ))
# 12617 "parse.ml"
               : 'reverse_left_flexible_list_BAR___anonymous_10_))
; (fun __caml_parser_env ->
    let _1 = (Parsing.peek_val __caml_parser_env 0 : 'patternBranch) in
    Obj.repr(
# 3309 "parse.mly"
    (let x = _1 in
let _1 =     ( None ) in
   ( [x] ))
# 12626 "parse.ml"
               : 'reverse_left_flexible_nonempty_list_BAR_patternBranch_))
; (fun __caml_parser_env ->
    let _2 = (Parsing.peek_val __caml_parser_env 0 : 'patternBranch) in
    Obj.repr(
# 3313 "parse.mly"
    (let (x_inlined1, x) = ((), _2) in
let _1 =
  let x = x_inlined1 in
      ( Some x )
in
   ( [x] ))
# 12638 "parse.ml"
               : 'reverse_left_flexible_nonempty_list_BAR_patternBranch_))
; (fun __caml_parser_env ->
    let _1 = (Parsing.peek_val __caml_parser_env 2 : 'reverse_left_flexible_nonempty_list_BAR_patternBranch_) in
    let _3 = (Parsing.peek_val __caml_parser_env 0 : 'patternBranch) in
    Obj.repr(
# 3320 "parse.mly"
    (let (xs, _2, x) = (_1, (), _3) in
   ( x :: xs ))
# 12647 "parse.ml"
               : 'reverse_left_flexible_nonempty_list_BAR_patternBranch_))
(* Entry inputFragment *)
; (fun __caml_parser_env -> raise (Parsing.YYexit (Parsing.peek_val __caml_parser_env 0)))
(* Entry oneDeclOrEOF *)
; (fun __caml_parser_env -> raise (Parsing.YYexit (Parsing.peek_val __caml_parser_env 0)))
(* Entry term *)
; (fun __caml_parser_env -> raise (Parsing.YYexit (Parsing.peek_val __caml_parser_env 0)))
(* Entry warn_error_list *)
; (fun __caml_parser_env -> raise (Parsing.YYexit (Parsing.peek_val __caml_parser_env 0)))
|]
let yytables =
  { Parsing.actions=yyact;
    Parsing.transl_const=yytransl_const;
    Parsing.transl_block=yytransl_block;
    Parsing.lhs=yylhs;
    Parsing.len=yylen;
    Parsing.defred=yydefred;
    Parsing.dgoto=yydgoto;
    Parsing.sindex=yysindex;
    Parsing.rindex=yyrindex;
    Parsing.gindex=yygindex;
    Parsing.tablesize=yytablesize;
    Parsing.table=yytable;
    Parsing.check=yycheck;
    Parsing.error_function=parse_error;
    Parsing.names_const=yynames_const;
    Parsing.names_block=yynames_block }
let inputFragment (lexfun : Lexing.lexbuf -> token) (lexbuf : Lexing.lexbuf) =
   (Parsing.yyparse yytables 1 lexfun lexbuf : FStar_Parser_AST.inputFragment)
let oneDeclOrEOF (lexfun : Lexing.lexbuf -> token) (lexbuf : Lexing.lexbuf) =
   (Parsing.yyparse yytables 2 lexfun lexbuf : FStar_Parser_AST.decl option)
let term (lexfun : Lexing.lexbuf -> token) (lexbuf : Lexing.lexbuf) =
   (Parsing.yyparse yytables 3 lexfun lexbuf : FStar_Parser_AST.term)
let warn_error_list (lexfun : Lexing.lexbuf -> token) (lexbuf : Lexing.lexbuf) =
   (Parsing.yyparse yytables 4 lexfun lexbuf : (FStar_Errors_Codes.error_flag * string) list)
;;


================================================
FILE: engine/backends/fstar/fstar-surface-ast/FStar_Parser_ParseIt.ml
================================================
module U = FStar_Compiler_Util
open FStar_Errors
(* open FStar_Syntax_Syntax *)
open Lexing
open FStar_Sedlexing
open FStar_Errors_Codes
module Codes = FStar_Errors_Codes

type filename = string

type input_frag = {
    frag_fname:filename;
    frag_text:string;
    frag_line:Prims.int;
    frag_col:Prims.int
}

let resetLexbufPos filename lexbuf =
  lexbuf.cur_p <- {
    pos_fname= filename;
    pos_cnum = 0;
    pos_bol = 0;
    pos_lnum = 1 }

let setLexbufPos filename lexbuf line col =
  lexbuf.cur_p <- {
    pos_fname= filename;
    pos_cnum = col;
    pos_bol  = 0;
    pos_lnum = line }

module Path = BatPathGen.OfString

let find_file filename =
      raise_err (Fatal_ModuleOrFileNotFound, U.format1 "Unable to find file: %s\n" filename)

(* let vfs_entries : (U.time * string) U.smap = U.smap_create (Z.of_int 1) *)

(* let read_vfs_entry fname = *)
(*   U.smap_try_find vfs_entries (U.normalize_file_path fname) *)

(* let add_vfs_entry fname contents = *)
(*   U.smap_add vfs_entries (U.normalize_file_path fname) (U.now (), contents) *)

(* let get_file_last_modification_time filename = *)
(*   match read_vfs_entry filename with *)
(*   | Some (mtime, _contents) -> mtime *)
(*   | None -> U.get_file_last_modification_time filename *)

let read_physical_file (filename: string) =
  (* BatFile.with_file_in uses Unix.openfile (which isn't available in
     js_of_ocaml) instead of Pervasives.open_in, so we don't use it here. *)
  try
    let channel = open_in_bin filename in
    BatPervasives.finally
      (fun () -> close_in channel)
      (fun channel -> really_input_string channel (in_channel_length channel))
      channel
  with e ->
    raise_err (Fatal_UnableToReadFile, U.format1 "Unable to read file %s\n" filename)

let read_file (filename:string) =
  let debug = false in
  let filename = find_file filename in
  if debug then U.print1 "Opening file %s\n" filename;
  filename, read_physical_file filename

let fs_extensions = [".fs"; ".fsi"]
let fst_extensions = [".fst"; ".fsti"]
let interface_extensions = [".fsti"; ".fsi"]

let valid_extensions () =
  fst_extensions @ if false then fs_extensions else []

let has_extension file extensions =
  FStar_List.existsb (U.ends_with file) extensions

let check_extension fn =
  if (not (has_extension fn (valid_extensions ()))) then
    let message = U.format1 "Unrecognized extension '%s'" fn in
    raise_err (Fatal_UnrecognizedExtension, if has_extension fn fs_extensions then
                  message ^ " (pass --MLish to process .fs and .fsi files)"
                else message)

type parse_frag =
    | Filename of filename
    | Toplevel of input_frag
    | Incremental of input_frag
    | Fragment of input_frag

type parse_error = (Codes.raw_error * string * FStar_Compiler_Range.range)


type code_fragment = {
   range: FStar_Compiler_Range.range;
   code: string;
}

type parse_result =
    | ASTFragment of (FStar_Parser_AST.inputFragment * (string * FStar_Compiler_Range.range) list)
    | IncrementalFragment of ((FStar_Parser_AST.decl * code_fragment) list * (string * FStar_Compiler_Range.range) list * parse_error option)
    | Term of FStar_Parser_AST.term
    | ParseError of parse_error

module BU = FStar_Compiler_Util
module Range = FStar_Compiler_Range

let parse fn =
  FStar_Parser_Util.warningHandler := (function
    | e -> Printf.printf "There was some warning (TODO)\n");

  let lexbuf, filename, contents = match fn with
    | Filename f ->
        check_extension f;
        let f', contents = read_file f in
        (try create contents f' 1 0, f', contents
         with _ -> raise_err (Fatal_InvalidUTF8Encoding, U.format1 "File %s has invalid UTF-8 encoding.\n" f'))
    | Incremental s
    | Toplevel s
    | Fragment s ->
      create s.frag_text s.frag_fname (Z.to_int s.frag_line) (Z.to_int s.frag_col), "", s.frag_text
  in

  let lexer () =
    let tok = FStar_Parser_LexFStar.token lexbuf in
    (tok, lexbuf.start_p, lexbuf.cur_p)
  in
  let range_of_positions start fin = 
    let start_pos = FStar_Parser_Util.pos_of_lexpos start in
    let end_pos = FStar_Parser_Util.pos_of_lexpos fin in
    FStar_Compiler_Range.mk_range filename start_pos end_pos
  in
  let err_of_parse_error () =
      let pos = lexbuf.cur_p in
      Fatal_SyntaxError,
      "Syntax error",
      range_of_positions pos pos
  in
  let parse_incremental_decls () =
      let parse_one_decl = MenhirLib.Convert.Simplified.traditional2revised FStar_Parser_Parse.oneDeclOrEOF in
      let contents_at =
        let lines = U.splitlines contents in
        let split_line_at_col line col =
            if col > 0
            then (
                (* Don't index directly into the string, since this is a UTF-8 string.
                   Convert first to a list of charaters, index into that, and then convert
                   back to a string *)
                let chars = FStar_String.list_of_string line in
                if col <= List.length chars
                then (
                  let prefix, suffix = FStar_Compiler_Util.first_N (Z.of_int col) chars in
                  Some (FStar_String.string_of_list prefix, 
                        FStar_String.string_of_list suffix)
                )
                else (
                  None
                )
            )
            else None
        in
        let line_from_col line pos =
          match split_line_at_col line pos with
          | None -> None
          | Some (_, p) -> Some p
        in
        let line_to_col line pos =
          match split_line_at_col line pos with
          | None -> None
          | Some (p, _) -> Some p
        in
        (* Find the raw content of the input from the line of the start_pos to the end_pos.
           This is used by Interactive.Incremental to record exactly the raw content of the
           fragment that was checked *) 
        fun (range:Range.range) ->
          (* discard all lines until the start line *)
          let start_pos = Range.start_of_range range in
          let end_pos = Range.end_of_range range in
          let start_line = Z.to_int (Range.line_of_pos start_pos) in
          let start_col = Z.to_int (Range.col_of_pos start_pos) in
          let end_line = Z.to_int (Range.line_of_pos end_pos) in
          let end_col = Z.to_int (Range.col_of_pos end_pos) in          
          let suffix = 
            FStar_Compiler_Util.nth_tail 
              (Z.of_int (if start_line > 0 then start_line - 1 else 0))
              lines
          in
          (* Take all the lines between the start and end lines *)
          let text, rest =
            FStar_Compiler_Util.first_N
              (Z.of_int (end_line - start_line))
              suffix
          in
          let text =
            match text with
            | first_line::rest -> (
              match line_from_col first_line start_col with
              | Some s -> s :: rest
              | _ -> text
            )
            | _ -> text
          in
          let text = 
          (* For the last line itself, take the prefix of it up to the character of the end_pos *)
            match rest with
            | last::_ -> (
              match line_to_col last end_col with
              | None -> text
              | Some last ->
                (* The last line is also the first line *)
                match text with
                | [] -> (
                  match line_from_col last start_col with
                  | None -> [last]
                  | Some l -> [l]
                )
                | _ -> text @ [last]
            )
            | _ -> text
          in
          { range;
            code = FStar_String.concat "\n" text }
      in
      let open FStar_Pervasives in
      let rec parse decls =
        let start_pos = current_pos lexbuf in
        let d =
          try
            (* Reset the gensym between decls, to ensure determinism, 
               otherwise, every _ is parsed as different name *)
            FStar_Ident.reset_gensym();
            Inl (parse_one_decl lexer)
          with 
          | FStar_Errors.Error(e, msg, r, _ctx) ->
            Inr (e, msg, r)

          | Parsing.Parse_error as _e -> 
            Inr (err_of_parse_error ())
        in
        match d with
        | Inl None -> List.rev decls, None
        | Inl (Some d) -> 
          (* The parser may advance the lexer beyond the decls last token.
             E.g., in `let f x = 0 let g = 1`, we will have parsed the decl for `f`
                   but the lexer will have advanced to `let ^ g ...` since the
                   parser will have looked ahead.
                   Rollback the lexer one token for declarations whose syntax
                   requires such lookahead to complete a production.
          *)
          let end_pos =
            if not (FStar_Parser_AST.decl_syntax_is_delimited d)
            then (
              rollback lexbuf;
              current_pos lexbuf
            )
            else (
              current_pos lexbuf
            )
          in
          let raw_contents = contents_at d.drange in
          (*
          if FStar_Options.debug_any()
          then (
            FStar_Compiler_Util.print4 "Parsed decl@%s=%s\nRaw contents@%s=%s\n"
              (FStar_Compiler_Range.string_of_def_range d.drange)
              (FStar_Parser_AST.decl_to_string d)
              (FStar_Compiler_Range.string_of_def_range raw_contents.range)
              raw_contents.code
          );
          *)
          parse ((d, raw_contents)::decls)
        | Inr err -> List.rev decls, Some err
      in
      parse []
  in
  let parse_incremental_fragment () =
      let decls, err_opt = parse_incremental_decls () in
      match err_opt with
      | None ->
        FStar_Parser_AST.as_frag (List.map fst decls)
      | Some (e, msg, r) ->
        raise (FStar_Errors.Error(e, msg, r, []))
  in

  try
    match fn with
    | Filename _
    | Toplevel _ -> begin
      let fileOrFragment =
          MenhirLib.Convert.Simplified.traditional2revised FStar_Parser_Parse.inputFragment lexer
      in
      let frags = match fileOrFragment with
          | FStar_Pervasives.Inl modul ->
             if has_extension filename interface_extensions
             then match modul with
                  | FStar_Parser_AST.Module(l,d) ->
                    FStar_Pervasives.Inl (FStar_Parser_AST.Interface(l, d, true))
                  | _ -> failwith "Impossible"
             else FStar_Pervasives.Inl modul
          | _ -> fileOrFragment
      in ASTFragment (frags, FStar_Parser_Util.flush_comments ())
      end
      
    | Incremental _ ->
      let decls, err_opt = parse_incremental_decls () in
      IncrementalFragment(decls, FStar_Parser_Util.flush_comments(), err_opt)
    
    | Fragment _ ->
      Term (MenhirLib.Convert.Simplified.traditional2revised FStar_Parser_Parse.term lexer)
  with
    | FStar_Errors.Empty_frag ->
      ASTFragment (FStar_Pervasives.Inr [], [])

    | FStar_Errors.Error(e, msg, r, _ctx) ->
      ParseError (e, msg, r)

    | Parsing.Parse_error as _e ->
      ParseError (err_of_parse_error())

(** Parsing of command-line error/warning/silent flags. *)
let parse_warn_error s =
  let user_flags =
    if s = ""
    then []
    else
      let lexbuf = FStar_Sedlexing.create s "" 0 (String.length s) in
      let lexer() = let tok = FStar_Parser_LexFStar.token lexbuf in
        (tok, lexbuf.start_p, lexbuf.cur_p)
      in
      try
        MenhirLib.Convert.Simplified.traditional2revised FStar_Parser_Parse.warn_error_list lexer
      with e ->
        failwith (U.format1 "Malformed warn-error list: %s" s)
  in
  FStar_Errors.update_flags user_flags


================================================
FILE: engine/backends/fstar/fstar-surface-ast/FStar_Parser_ToDocument.ml
================================================
open Prims
let (maybe_unthunk : FStar_Parser_AST.term -> FStar_Parser_AST.term) =
  fun t ->
    match t.FStar_Parser_AST.tm with
    | FStar_Parser_AST.Abs (uu___::[], body) -> body
    | uu___ -> t
let (min : Prims.int -> Prims.int -> Prims.int) =
  fun x -> fun y -> if x > y then y else x
let (max : Prims.int -> Prims.int -> Prims.int) =
  fun x -> fun y -> if x > y then x else y
let map_rev : 'a 'b . ('a -> 'b) -> 'a Prims.list -> 'b Prims.list =
  fun f ->
    fun l ->
      let rec aux l1 acc =
        match l1 with
        | [] -> acc
        | x::xs ->
            let uu___ = let uu___1 = f x in uu___1 :: acc in aux xs uu___ in
      aux l []
let map_if_all :
  'a 'b .
    ('a -> 'b FStar_Pervasives_Native.option) ->
      'a Prims.list -> 'b Prims.list FStar_Pervasives_Native.option
  =
  fun f ->
    fun l ->
      let rec aux l1 acc =
        match l1 with
        | [] -> acc
        | x::xs ->
            let uu___ = f x in
            (match uu___ with
             | FStar_Pervasives_Native.Some r -> aux xs (r :: acc)
             | FStar_Pervasives_Native.None -> []) in
      let r = aux l [] in
      if (FStar_Compiler_List.length l) = (FStar_Compiler_List.length r)
      then FStar_Pervasives_Native.Some r
      else FStar_Pervasives_Native.None
let rec all : 'a . ('a -> Prims.bool) -> 'a Prims.list -> Prims.bool =
  fun f ->
    fun l ->
      match l with
      | [] -> true
      | x::xs -> let uu___ = f x in if uu___ then all f xs else false
let (all1_explicit :
  (FStar_Parser_AST.term * FStar_Parser_AST.imp) Prims.list -> Prims.bool) =
  fun args ->
    (Prims.op_Negation (FStar_Compiler_List.isEmpty args)) &&
      (FStar_Compiler_Util.for_all
         (fun uu___ ->
            match uu___ with
            | (uu___1, FStar_Parser_AST.Nothing) -> true
            | uu___1 -> false) args)
let (unfold_tuples : Prims.bool FStar_Compiler_Effect.ref) =
  FStar_Compiler_Util.mk_ref true
let (str : Prims.string -> FStar_Pprint.document) =
  fun s -> FStar_Pprint.doc_of_string s
let default_or_map :
  'uuuuu 'uuuuu1 .
    'uuuuu ->
      ('uuuuu1 -> 'uuuuu) -> 'uuuuu1 FStar_Pervasives_Native.option -> 'uuuuu
  =
  fun n ->
    fun f ->
      fun x ->
        match x with
        | FStar_Pervasives_Native.None -> n
        | FStar_Pervasives_Native.Some x' -> f x'
let (prefix2 :
  FStar_Pprint.document -> FStar_Pprint.document -> FStar_Pprint.document) =
  fun prefix_ ->
    fun body ->
      FStar_Pprint.prefix (Prims.of_int (2)) Prims.int_one prefix_ body
let (prefix2_nonempty :
  FStar_Pprint.document -> FStar_Pprint.document -> FStar_Pprint.document) =
  fun prefix_ ->
    fun body ->
      if body = FStar_Pprint.empty then prefix_ else prefix2 prefix_ body
let (op_Hat_Slash_Plus_Hat :
  FStar_Pprint.document -> FStar_Pprint.document -> FStar_Pprint.document) =
  fun prefix_ -> fun body -> prefix2 prefix_ body
let (jump2 : FStar_Pprint.document -> FStar_Pprint.document) =
  fun body -> FStar_Pprint.jump (Prims.of_int (2)) Prims.int_one body
let (infix2 :
  FStar_Pprint.document ->
    FStar_Pprint.document -> FStar_Pprint.document -> FStar_Pprint.document)
  = FStar_Pprint.infix (Prims.of_int (2)) Prims.int_one
let (infix0 :
  FStar_Pprint.document ->
    FStar_Pprint.document -> FStar_Pprint.document -> FStar_Pprint.document)
  = FStar_Pprint.infix Prims.int_zero Prims.int_one
let (break1 : FStar_Pprint.document) = FStar_Pprint.break_ Prims.int_one
let separate_break_map :
  'uuuuu .
    FStar_Pprint.document ->
      ('uuuuu -> FStar_Pprint.document) ->
        'uuuuu Prims.list -> FStar_Pprint.document
  =
  fun sep ->
    fun f ->
      fun l ->
        let uu___ =
          let uu___1 =
            let uu___2 = FStar_Pprint.op_Hat_Hat sep break1 in
            FStar_Pprint.op_Hat_Hat FStar_Pprint.space uu___2 in
          FStar_Pprint.separate_map uu___1 f l in
        FStar_Pprint.group uu___
let precede_break_separate_map :
  'uuuuu .
    FStar_Pprint.document ->
      FStar_Pprint.document ->
        ('uuuuu -> FStar_Pprint.document) ->
          'uuuuu Prims.list -> FStar_Pprint.document
  =
  fun prec ->
    fun sep ->
      fun f ->
        fun l ->
          let uu___ =
            let uu___1 = FStar_Pprint.op_Hat_Hat prec FStar_Pprint.space in
            let uu___2 =
              let uu___3 = FStar_Compiler_List.hd l in
              FStar_Compiler_Effect.op_Bar_Greater uu___3 f in
            FStar_Pprint.precede uu___1 uu___2 in
          let uu___1 =
            let uu___2 = FStar_Compiler_List.tl l in
            FStar_Pprint.concat_map
              (fun x ->
                 let uu___3 =
                   let uu___4 =
                     let uu___5 = f x in
                     FStar_Pprint.op_Hat_Hat FStar_Pprint.space uu___5 in
                   FStar_Pprint.op_Hat_Hat sep uu___4 in
                 FStar_Pprint.op_Hat_Hat break1 uu___3) uu___2 in
          FStar_Pprint.op_Hat_Hat uu___ uu___1
let concat_break_map :
  'uuuuu .
    ('uuuuu -> FStar_Pprint.document) ->
      'uuuuu Prims.list -> FStar_Pprint.document
  =
  fun f ->
    fun l ->
      let uu___ =
        FStar_Pprint.concat_map
          (fun x -> let uu___1 = f x in FStar_Pprint.op_Hat_Hat uu___1 break1)
          l in
      FStar_Pprint.group uu___
let (parens_with_nesting : FStar_Pprint.document -> FStar_Pprint.document) =
  fun contents ->
    FStar_Pprint.surround (Prims.of_int (2)) Prims.int_zero
      FStar_Pprint.lparen contents FStar_Pprint.rparen
let (soft_parens_with_nesting :
  FStar_Pprint.document -> FStar_Pprint.document) =
  fun contents ->
    FStar_Pprint.soft_surround (Prims.of_int (2)) Prims.int_zero
      FStar_Pprint.lparen contents FStar_Pprint.rparen
let (braces_with_nesting : FStar_Pprint.document -> FStar_Pprint.document) =
  fun contents ->
    FStar_Pprint.surround (Prims.of_int (2)) Prims.int_one
      FStar_Pprint.lbrace contents FStar_Pprint.rbrace
let (soft_braces_with_nesting :
  FStar_Pprint.document -> FStar_Pprint.document) =
  fun contents ->
    FStar_Pprint.soft_surround (Prims.of_int (2)) Prims.int_one
      FStar_Pprint.lbrace contents FStar_Pprint.rbrace
let (soft_braces_with_nesting_tight :
  FStar_Pprint.document -> FStar_Pprint.document) =
  fun contents ->
    FStar_Pprint.soft_surround (Prims.of_int (2)) Prims.int_zero
      FStar_Pprint.lbrace contents FStar_Pprint.rbrace
let (brackets_with_nesting : FStar_Pprint.document -> FStar_Pprint.document)
  =
  fun contents ->
    FStar_Pprint.surround (Prims.of_int (2)) Prims.int_one
      FStar_Pprint.lbracket contents FStar_Pprint.rbracket
let (soft_brackets_with_nesting :
  FStar_Pprint.document -> FStar_Pprint.document) =
  fun contents ->
    FStar_Pprint.soft_surround (Prims.of_int (2)) Prims.int_one
      FStar_Pprint.lbracket contents FStar_Pprint.rbracket
let (soft_begin_end_with_nesting :
  FStar_Pprint.document -> FStar_Pprint.document) =
  fun contents ->
    let uu___ = str "begin" in
    let uu___1 = str "end" in
    FStar_Pprint.soft_surround (Prims.of_int (2)) Prims.int_one uu___
      contents uu___1
let (tc_arg : FStar_Pprint.document -> FStar_Pprint.document) =
  fun contents ->
    let uu___ = str "{|" in
    let uu___1 = str "|}" in
    FStar_Pprint.soft_surround (Prims.of_int (2)) Prims.int_one uu___
      contents uu___1
let (is_tc_binder : FStar_Parser_AST.binder -> Prims.bool) =
  fun b ->
    match b.FStar_Parser_AST.aqual with
    | FStar_Pervasives_Native.Some (FStar_Parser_AST.TypeClassArg) -> true
    | uu___ -> false
let (is_meta_qualifier :
  FStar_Parser_AST.arg_qualifier FStar_Pervasives_Native.option -> Prims.bool)
  =
  fun aq ->
    match aq with
    | FStar_Pervasives_Native.Some (FStar_Parser_AST.Meta uu___) -> true
    | uu___ -> false
let (is_joinable_binder : FStar_Parser_AST.binder -> Prims.bool) =
  fun b ->
    (let uu___ = is_tc_binder b in Prims.op_Negation uu___) &&
      (Prims.op_Negation (is_meta_qualifier b.FStar_Parser_AST.aqual))
let separate_map_last :
  'uuuuu .
    FStar_Pprint.document ->
      (Prims.bool -> 'uuuuu -> FStar_Pprint.document) ->
        'uuuuu Prims.list -> FStar_Pprint.document
  =
  fun sep ->
    fun f ->
      fun es ->
        let l = FStar_Compiler_List.length es in
        let es1 =
          FStar_Compiler_List.mapi
            (fun i -> fun e -> f (i <> (l - Prims.int_one)) e) es in
        FStar_Pprint.separate sep es1
let separate_break_map_last :
  'uuuuu .
    FStar_Pprint.document ->
      (Prims.bool -> 'uuuuu -> FStar_Pprint.document) ->
        'uuuuu Prims.list -> FStar_Pprint.document
  =
  fun sep ->
    fun f ->
      fun l ->
        let uu___ =
          let uu___1 =
            let uu___2 = FStar_Pprint.op_Hat_Hat sep break1 in
            FStar_Pprint.op_Hat_Hat FStar_Pprint.space uu___2 in
          separate_map_last uu___1 f l in
        FStar_Pprint.group uu___
let separate_map_or_flow :
  'uuuuu .
    FStar_Pprint.document ->
      ('uuuuu -> FStar_Pprint.document) ->
        'uuuuu Prims.list -> FStar_Pprint.document
  =
  fun sep ->
    fun f ->
      fun l ->
        if (FStar_Compiler_List.length l) < (Prims.of_int (10))
        then FStar_Pprint.separate_map sep f l
        else FStar_Pprint.flow_map sep f l
let flow_map_last :
  'uuuuu .
    FStar_Pprint.document ->
      (Prims.bool -> 'uuuuu -> FStar_Pprint.document) ->
        'uuuuu Prims.list -> FStar_Pprint.document
  =
  fun sep ->
    fun f ->
      fun es ->
        let l = FStar_Compiler_List.length es in
        let es1 =
          FStar_Compiler_List.mapi
            (fun i -> fun e -> f (i <> (l - Prims.int_one)) e) es in
        FStar_Pprint.flow sep es1
let separate_map_or_flow_last :
  'uuuuu .
    FStar_Pprint.document ->
      (Prims.bool -> 'uuuuu -> FStar_Pprint.document) ->
        'uuuuu Prims.list -> FStar_Pprint.document
  =
  fun sep ->
    fun f ->
      fun l ->
        if (FStar_Compiler_List.length l) < (Prims.of_int (10))
        then separate_map_last sep f l
        else flow_map_last sep f l
let (separate_or_flow :
  FStar_Pprint.document ->
    FStar_Pprint.document Prims.list -> FStar_Pprint.document)
  = fun sep -> fun l -> separate_map_or_flow sep FStar_Pervasives.id l
let (surround_maybe_empty :
  Prims.int ->
    Prims.int ->
      FStar_Pprint.document ->
        FStar_Pprint.document ->
          FStar_Pprint.document -> FStar_Pprint.document)
  =
  fun n ->
    fun b ->
      fun doc1 ->
        fun doc2 ->
          fun doc3 ->
            if doc2 = FStar_Pprint.empty
            then
              let uu___ = FStar_Pprint.op_Hat_Slash_Hat doc1 doc3 in
              FStar_Pprint.group uu___
            else FStar_Pprint.surround n b doc1 doc2 doc3
let soft_surround_separate_map :
  'uuuuu .
    Prims.int ->
      Prims.int ->
        FStar_Pprint.document ->
          FStar_Pprint.document ->
            FStar_Pprint.document ->
              FStar_Pprint.document ->
                ('uuuuu -> FStar_Pprint.document) ->
                  'uuuuu Prims.list -> FStar_Pprint.document
  =
  fun n ->
    fun b ->
      fun void_ ->
        fun opening ->
          fun sep ->
            fun closing ->
              fun f ->
                fun xs ->
                  if xs = []
                  then void_
                  else
                    (let uu___1 = FStar_Pprint.separate_map sep f xs in
                     FStar_Pprint.soft_surround n b opening uu___1 closing)
let soft_surround_map_or_flow :
  'uuuuu .
    Prims.int ->
      Prims.int ->
        FStar_Pprint.document ->
          FStar_Pprint.document ->
            FStar_Pprint.document ->
              FStar_Pprint.document ->
                ('uuuuu -> FStar_Pprint.document) ->
                  'uuuuu Prims.list -> FStar_Pprint.document
  =
  fun n ->
    fun b ->
      fun void_ ->
        fun opening ->
          fun sep ->
            fun closing ->
              fun f ->
                fun xs ->
                  if xs = []
                  then void_
                  else
                    (let uu___1 = separate_map_or_flow sep f xs in
                     FStar_Pprint.soft_surround n b opening uu___1 closing)
let (is_unit : FStar_Parser_AST.term -> Prims.bool) =
  fun e ->
    match e.FStar_Parser_AST.tm with
    | FStar_Parser_AST.Const (FStar_Const.Const_unit) -> true
    | uu___ -> false
let (matches_var : FStar_Parser_AST.term -> FStar_Ident.ident -> Prims.bool)
  =
  fun t ->
    fun x ->
      match t.FStar_Parser_AST.tm with
      | FStar_Parser_AST.Var y ->
          let uu___ = FStar_Ident.string_of_id x in
          let uu___1 = FStar_Ident.string_of_lid y in uu___ = uu___1
      | uu___ -> false
let (is_tuple_constructor : FStar_Ident.lident -> Prims.bool) =
  FStar_Parser_Const.is_tuple_data_lid'
let (is_dtuple_constructor : FStar_Ident.lident -> Prims.bool) =
  FStar_Parser_Const.is_dtuple_data_lid'
let (is_list_structure :
  FStar_Ident.lident ->
    FStar_Ident.lident -> FStar_Parser_AST.term -> Prims.bool)
  =
  fun cons_lid ->
    fun nil_lid ->
      let rec aux e =
        match e.FStar_Parser_AST.tm with
        | FStar_Parser_AST.Construct (lid, []) ->
            FStar_Ident.lid_equals lid nil_lid
        | FStar_Parser_AST.Construct (lid, uu___::(e2, uu___1)::[]) ->
            (FStar_Ident.lid_equals lid cons_lid) && (aux e2)
        | uu___ -> false in
      aux
let (is_list : FStar_Parser_AST.term -> Prims.bool) =
  is_list_structure FStar_Parser_Const.cons_lid FStar_Parser_Const.nil_lid
let rec (extract_from_list :
  FStar_Parser_AST.term -> FStar_Parser_AST.term Prims.list) =
  fun e ->
    match e.FStar_Parser_AST.tm with
    | FStar_Parser_AST.Construct (uu___, []) -> []
    | FStar_Parser_AST.Construct
        (uu___,
         (e1, FStar_Parser_AST.Nothing)::(e2, FStar_Parser_AST.Nothing)::[])
        -> let uu___1 = extract_from_list e2 in e1 :: uu___1
    | uu___ ->
        let uu___1 =
          let uu___2 = FStar_Parser_AST.term_to_string e in
          FStar_Compiler_Util.format1 "Not a list %s" uu___2 in
        failwith uu___1
let (is_array : FStar_Parser_AST.term -> Prims.bool) =
  fun e ->
    match e.FStar_Parser_AST.tm with
    | FStar_Parser_AST.App
        ({ FStar_Parser_AST.tm = FStar_Parser_AST.Var lid;
           FStar_Parser_AST.range = uu___; FStar_Parser_AST.level = uu___1;_},
         l, FStar_Parser_AST.Nothing)
        ->
        (FStar_Ident.lid_equals lid FStar_Parser_Const.array_of_list_lid) &&
          (is_list l)
    | uu___ -> false
let rec (is_ref_set : FStar_Parser_AST.term -> Prims.bool) =
  fun e ->
    match e.FStar_Parser_AST.tm with
    | FStar_Parser_AST.Var maybe_empty_lid ->
        FStar_Ident.lid_equals maybe_empty_lid FStar_Parser_Const.set_empty
    | FStar_Parser_AST.App
        ({ FStar_Parser_AST.tm = FStar_Parser_AST.Var maybe_singleton_lid;
           FStar_Parser_AST.range = uu___; FStar_Parser_AST.level = uu___1;_},
         {
           FStar_Parser_AST.tm = FStar_Parser_AST.App
             ({ FStar_Parser_AST.tm = FStar_Parser_AST.Var maybe_addr_of_lid;
                FStar_Parser_AST.range = uu___2;
                FStar_Parser_AST.level = uu___3;_},
              e1, FStar_Parser_AST.Nothing);
           FStar_Parser_AST.range = uu___4;
           FStar_Parser_AST.level = uu___5;_},
         FStar_Parser_AST.Nothing)
        ->
        (FStar_Ident.lid_equals maybe_singleton_lid
           FStar_Parser_Const.set_singleton)
          &&
          (FStar_Ident.lid_equals maybe_addr_of_lid
             FStar_Parser_Const.heap_addr_of_lid)
    | FStar_Parser_AST.App
        ({
           FStar_Parser_AST.tm = FStar_Parser_AST.App
             ({ FStar_Parser_AST.tm = FStar_Parser_AST.Var maybe_union_lid;
                FStar_Parser_AST.range = uu___;
                FStar_Parser_AST.level = uu___1;_},
              e1, FStar_Parser_AST.Nothing);
           FStar_Parser_AST.range = uu___2;
           FStar_Parser_AST.level = uu___3;_},
         e2, FStar_Parser_AST.Nothing)
        ->
        ((FStar_Ident.lid_equals maybe_union_lid FStar_Parser_Const.set_union)
           && (is_ref_set e1))
          && (is_ref_set e2)
    | uu___ -> false
let rec (extract_from_ref_set :
  FStar_Parser_AST.term -> FStar_Parser_AST.term Prims.list) =
  fun e ->
    match e.FStar_Parser_AST.tm with
    | FStar_Parser_AST.Var uu___ -> []
    | FStar_Parser_AST.App
        ({ FStar_Parser_AST.tm = FStar_Parser_AST.Var uu___;
           FStar_Parser_AST.range = uu___1;
           FStar_Parser_AST.level = uu___2;_},
         {
           FStar_Parser_AST.tm = FStar_Parser_AST.App
             ({ FStar_Parser_AST.tm = FStar_Parser_AST.Var uu___3;
                FStar_Parser_AST.range = uu___4;
                FStar_Parser_AST.level = uu___5;_},
              e1, FStar_Parser_AST.Nothing);
           FStar_Parser_AST.range = uu___6;
           FStar_Parser_AST.level = uu___7;_},
         FStar_Parser_AST.Nothing)
        -> [e1]
    | FStar_Parser_AST.App
        ({
           FStar_Parser_AST.tm = FStar_Parser_AST.App
             ({ FStar_Parser_AST.tm = FStar_Parser_AST.Var uu___;
                FStar_Parser_AST.range = uu___1;
                FStar_Parser_AST.level = uu___2;_},
              e1, FStar_Parser_AST.Nothing);
           FStar_Parser_AST.range = uu___3;
           FStar_Parser_AST.level = uu___4;_},
         e2, FStar_Parser_AST.Nothing)
        ->
        let uu___5 = extract_from_ref_set e1 in
        let uu___6 = extract_from_ref_set e2 in
        FStar_Compiler_List.op_At uu___5 uu___6
    | uu___ ->
        let uu___1 =
          let uu___2 = FStar_Parser_AST.term_to_string e in
          FStar_Compiler_Util.format1 "Not a ref set %s" uu___2 in
        failwith uu___1
let (is_general_application : FStar_Parser_AST.term -> Prims.bool) =
  fun e ->
    let uu___ = (is_array e) || (is_ref_set e) in Prims.op_Negation uu___
let (is_general_construction : FStar_Parser_AST.term -> Prims.bool) =
  fun e -> let uu___ = is_list e in Prims.op_Negation uu___
let (is_general_prefix_op : FStar_Ident.ident -> Prims.bool) =
  fun op ->
    let op_starting_char =
      let uu___ = FStar_Ident.string_of_id op in
      FStar_Compiler_Util.char_at uu___ Prims.int_zero in
    ((op_starting_char = 33) || (op_starting_char = 63)) ||
      ((op_starting_char = 126) &&
         (let uu___ = FStar_Ident.string_of_id op in uu___ <> "~"))
let (head_and_args :
  FStar_Parser_AST.term ->
    (FStar_Parser_AST.term * (FStar_Parser_AST.term * FStar_Parser_AST.imp)
      Prims.list))
  =
  fun e ->
    let rec aux e1 acc =
      match e1.FStar_Parser_AST.tm with
      | FStar_Parser_AST.App (head, arg, imp) -> aux head ((arg, imp) :: acc)
      | uu___ -> (e1, acc) in
    aux e []
type associativity =
  | Left 
  | Right 
  | NonAssoc 
let (uu___is_Left : associativity -> Prims.bool) =
  fun projectee -> match projectee with | Left -> true | uu___ -> false
let (uu___is_Right : associativity -> Prims.bool) =
  fun projectee -> match projectee with | Right -> true | uu___ -> false
let (uu___is_NonAssoc : associativity -> Prims.bool) =
  fun projectee -> match projectee with | NonAssoc -> true | uu___ -> false
type token =
  | StartsWith of FStar_Char.char 
  | Exact of Prims.string 
  | UnicodeOperator 
let (uu___is_StartsWith : token -> Prims.bool) =
  fun projectee ->
    match projectee with | StartsWith _0 -> true | uu___ -> false
let (__proj__StartsWith__item___0 : token -> FStar_Char.char) =
  fun projectee -> match projectee with | StartsWith _0 -> _0
let (uu___is_Exact : token -> Prims.bool) =
  fun projectee -> match projectee with | Exact _0 -> true | uu___ -> false
let (__proj__Exact__item___0 : token -> Prims.string) =
  fun projectee -> match projectee with | Exact _0 -> _0
let (uu___is_UnicodeOperator : token -> Prims.bool) =
  fun projectee ->
    match projectee with | UnicodeOperator -> true | uu___ -> false
type associativity_level = (associativity * token Prims.list)
let (token_to_string : token -> Prims.string) =
  fun uu___ ->
    match uu___ with
    | StartsWith c ->
        Prims.op_Hat (FStar_Compiler_Util.string_of_char c) ".*"
    | Exact s -> s
    | UnicodeOperator -> ""
let (is_non_latin_char : FStar_Char.char -> Prims.bool) =
  fun s -> (FStar_Compiler_Util.int_of_char s) > (Prims.of_int (0x024f))
let (matches_token : Prims.string -> token -> Prims.bool) =
  fun s ->
    fun uu___ ->
      match uu___ with
      | StartsWith c ->
          let uu___1 = FStar_String.get s Prims.int_zero in uu___1 = c
      | Exact s' -> s = s'
      | UnicodeOperator ->
          let uu___1 = FStar_String.get s Prims.int_zero in
          is_non_latin_char uu___1
let matches_level :
  'uuuuu . Prims.string -> ('uuuuu * token Prims.list) -> Prims.bool =
  fun s ->
    fun uu___ ->
      match uu___ with
      | (assoc_levels, tokens) ->
          let uu___1 = FStar_Compiler_List.tryFind (matches_token s) tokens in
          uu___1 <> FStar_Pervasives_Native.None
let (opinfix4 : associativity_level) = (Right, [Exact "**"; UnicodeOperator])
let (opinfix3 : associativity_level) =
  (Left, [StartsWith 42; StartsWith 47; StartsWith 37])
let (opinfix2 : associativity_level) = (Left, [StartsWith 43; StartsWith 45])
let (minus_lvl : associativity_level) = (Left, [Exact "-"])
let (opinfix1 : associativity_level) =
  (Right, [StartsWith 64; StartsWith 94])
let (pipe_right : associativity_level) = (Left, [Exact "|>"])
let (opinfix0d : associativity_level) = (Left, [StartsWith 36])
let (opinfix0c : associativity_level) =
  (Left, [StartsWith 61; StartsWith 60; StartsWith 62])
let (equal : associativity_level) = (Left, [Exact "="])
let (opinfix0b : associativity_level) = (Left, [StartsWith 38])
let (opinfix0a : associativity_level) = (Left, [StartsWith 124])
let (colon_equals : associativity_level) = (NonAssoc, [Exact ":="])
let (amp : associativity_level) = (Right, [Exact "&"])
let (colon_colon : associativity_level) = (Right, [Exact "::"])
let (level_associativity_spec : associativity_level Prims.list) =
  [opinfix4;
  opinfix3;
  opinfix2;
  opinfix1;
  pipe_right;
  opinfix0d;
  opinfix0c;
  opinfix0b;
  opinfix0a;
  colon_equals;
  amp;
  colon_colon]
let (level_table :
  ((Prims.int * Prims.int * Prims.int) * token Prims.list) Prims.list) =
  let levels_from_associativity l uu___ =
    match uu___ with
    | Left -> (l, l, (l - Prims.int_one))
    | Right -> ((l - Prims.int_one), l, l)
    | NonAssoc -> ((l - Prims.int_one), l, (l - Prims.int_one)) in
  FStar_Compiler_List.mapi
    (fun i ->
       fun uu___ ->
         match uu___ with
         | (assoc, tokens) -> ((levels_from_associativity i assoc), tokens))
    level_associativity_spec
let (assign_levels :
  associativity_level Prims.list ->
    Prims.string -> (Prims.int * Prims.int * Prims.int))
  =
  fun token_associativity_spec ->
    fun s ->
      let uu___ = FStar_Compiler_List.tryFind (matches_level s) level_table in
      match uu___ with
      | FStar_Pervasives_Native.Some (assoc_levels, uu___1) -> assoc_levels
      | uu___1 -> failwith (Prims.op_Hat "Unrecognized operator " s)
let max_level : 'uuuuu . ('uuuuu * token Prims.list) Prims.list -> Prims.int
  =
  fun l ->
    let find_level_and_max n level =
      let uu___ =
        FStar_Compiler_List.tryFind
          (fun uu___1 ->
             match uu___1 with
             | (uu___2, tokens) ->
                 tokens = (FStar_Pervasives_Native.snd level)) level_table in
      match uu___ with
      | FStar_Pervasives_Native.Some ((uu___1, l1, uu___2), uu___3) ->
          max n l1
      | FStar_Pervasives_Native.None ->
          let uu___1 =
            let uu___2 =
              let uu___3 =
                FStar_Compiler_List.map token_to_string
                  (FStar_Pervasives_Native.snd level) in
              FStar_String.concat "," uu___3 in
            FStar_Compiler_Util.format1 "Undefined associativity level %s"
              uu___2 in
          failwith uu___1 in
    FStar_Compiler_List.fold_left find_level_and_max Prims.int_zero l
let (levels : Prims.string -> (Prims.int * Prims.int * Prims.int)) =
  fun op ->
    let uu___ = assign_levels level_associativity_spec op in
    match uu___ with
    | (left, mine, right) ->
        if op = "*"
        then ((left - Prims.int_one), mine, right)
        else (left, mine, right)
let (operatorInfix0ad12 : associativity_level Prims.list) =
  [opinfix0a; opinfix0b; opinfix0c; opinfix0d; opinfix1; opinfix2]
let (is_operatorInfix0ad12 : FStar_Ident.ident -> Prims.bool) =
  fun op ->
    let uu___ =
      let uu___1 =
        let uu___2 = FStar_Ident.string_of_id op in
        FStar_Compiler_Effect.op_Less_Bar matches_level uu___2 in
      FStar_Compiler_List.tryFind uu___1 operatorInfix0ad12 in
    uu___ <> FStar_Pervasives_Native.None
let (is_operatorInfix34 : FStar_Ident.ident -> Prims.bool) =
  let opinfix34 = [opinfix3; opinfix4] in
  fun op ->
    let uu___ =
      let uu___1 =
        let uu___2 = FStar_Ident.string_of_id op in
        FStar_Compiler_Effect.op_Less_Bar matches_level uu___2 in
      FStar_Compiler_List.tryFind uu___1 opinfix34 in
    uu___ <> FStar_Pervasives_Native.None
let (handleable_args_length : FStar_Ident.ident -> Prims.int) =
  fun op ->
    let op_s = FStar_Ident.string_of_id op in
    let uu___ =
      (is_general_prefix_op op) || (FStar_Compiler_List.mem op_s ["-"; "~"]) in
    if uu___
    then Prims.int_one
    else
      (let uu___2 =
         ((is_operatorInfix0ad12 op) || (is_operatorInfix34 op)) ||
           (FStar_Compiler_List.mem op_s
              ["<==>"; "==>"; "\\/"; "/\\"; "="; "|>"; ":="; ".()"; ".[]"]) in
       if uu___2
       then (Prims.of_int (2))
       else
         if FStar_Compiler_List.mem op_s [".()<-"; ".[]<-"]
         then (Prims.of_int (3))
         else Prims.int_zero)
let handleable_op :
  'uuuuu . FStar_Ident.ident -> 'uuuuu Prims.list -> Prims.bool =
  fun op ->
    fun args ->
      match FStar_Compiler_List.length args with
      | uu___ when uu___ = Prims.int_zero -> true
      | uu___ when uu___ = Prims.int_one ->
          (is_general_prefix_op op) ||
            (let uu___1 = FStar_Ident.string_of_id op in
             FStar_Compiler_List.mem uu___1 ["-"; "~"])
      | uu___ when uu___ = (Prims.of_int (2)) ->
          ((is_operatorInfix0ad12 op) || (is_operatorInfix34 op)) ||
            (let uu___1 = FStar_Ident.string_of_id op in
             FStar_Compiler_List.mem uu___1
               ["<==>"; "==>"; "\\/"; "/\\"; "="; "|>"; ":="; ".()"; ".[]"])
      | uu___ when uu___ = (Prims.of_int (3)) ->
          let uu___1 = FStar_Ident.string_of_id op in
          FStar_Compiler_List.mem uu___1 [".()<-"; ".[]<-"]
      | uu___ -> false
type annotation_style =
  | Binders of (Prims.int * Prims.int * Prims.bool) 
  | Arrows of (Prims.int * Prims.int) 
let (uu___is_Binders : annotation_style -> Prims.bool) =
  fun projectee -> match projectee with | Binders _0 -> true | uu___ -> false
let (__proj__Binders__item___0 :
  annotation_style -> (Prims.int * Prims.int * Prims.bool)) =
  fun projectee -> match projectee with | Binders _0 -> _0
let (uu___is_Arrows : annotation_style -> Prims.bool) =
  fun projectee -> match projectee with | Arrows _0 -> true | uu___ -> false
let (__proj__Arrows__item___0 : annotation_style -> (Prims.int * Prims.int))
  = fun projectee -> match projectee with | Arrows _0 -> _0
let (all_binders_annot : FStar_Parser_AST.term -> Prims.bool) =
  fun e ->
    let is_binder_annot b =
      match b.FStar_Parser_AST.b with
      | FStar_Parser_AST.Annotated uu___ -> true
      | uu___ -> false in
    let rec all_binders e1 l =
      match e1.FStar_Parser_AST.tm with
      | FStar_Parser_AST.Product (bs, tgt) ->
          let uu___ = FStar_Compiler_List.for_all is_binder_annot bs in
          if uu___
          then all_binders tgt (l + (FStar_Compiler_List.length bs))
          else (false, Prims.int_zero)
      | uu___ -> (true, (l + Prims.int_one)) in
    let uu___ = all_binders e Prims.int_zero in
    match uu___ with
    | (b, l) -> if b && (l > Prims.int_one) then true else false
type catf =
  FStar_Pprint.document -> FStar_Pprint.document -> FStar_Pprint.document
let (cat_with_colon :
  FStar_Pprint.document -> FStar_Pprint.document -> FStar_Pprint.document) =
  fun x ->
    fun y ->
      let uu___ = FStar_Pprint.op_Hat_Slash_Hat FStar_Pprint.colon y in
      FStar_Pprint.op_Hat_Hat x uu___
let (comment_stack :
  (Prims.string * FStar_Compiler_Range.range) Prims.list
    FStar_Compiler_Effect.ref)
  = FStar_Compiler_Util.mk_ref []
type decl_meta =
  {
  r: FStar_Compiler_Range.range ;
  has_qs: Prims.bool ;
  has_attrs: Prims.bool }
let (__proj__Mkdecl_meta__item__r : decl_meta -> FStar_Compiler_Range.range)
  = fun projectee -> match projectee with | { r; has_qs; has_attrs;_} -> r
let (__proj__Mkdecl_meta__item__has_qs : decl_meta -> Prims.bool) =
  fun projectee -> match projectee with | { r; has_qs; has_attrs;_} -> has_qs
let (__proj__Mkdecl_meta__item__has_attrs : decl_meta -> Prims.bool) =
  fun projectee ->
    match projectee with | { r; has_qs; has_attrs;_} -> has_attrs
let (dummy_meta : decl_meta) =
  { r = FStar_Compiler_Range.dummyRange; has_qs = false; has_attrs = false }
let with_comment :
  'uuuuu .
    ('uuuuu -> FStar_Pprint.document) ->
      'uuuuu -> FStar_Compiler_Range.range -> FStar_Pprint.document
  =
  fun printer ->
    fun tm ->
      fun tmrange ->
        let rec comments_before_pos acc print_pos lookahead_pos =
          let uu___ = FStar_Compiler_Effect.op_Bang comment_stack in
          match uu___ with
          | [] -> (acc, false)
          | (c, crange)::cs ->
              let comment =
                let uu___1 = str c in
                FStar_Pprint.op_Hat_Hat uu___1 FStar_Pprint.hardline in
              let uu___1 =
                FStar_Compiler_Range.range_before_pos crange print_pos in
              if uu___1
              then
                (FStar_Compiler_Effect.op_Colon_Equals comment_stack cs;
                 (let uu___3 = FStar_Pprint.op_Hat_Hat acc comment in
                  comments_before_pos uu___3 print_pos lookahead_pos))
              else
                (let uu___3 =
                   FStar_Compiler_Range.range_before_pos crange lookahead_pos in
                 (acc, uu___3)) in
        let uu___ =
          let uu___1 =
            let uu___2 = FStar_Compiler_Range.start_of_range tmrange in
            FStar_Compiler_Range.end_of_line uu___2 in
          let uu___2 = FStar_Compiler_Range.end_of_range tmrange in
          comments_before_pos FStar_Pprint.empty uu___1 uu___2 in
        match uu___ with
        | (comments, has_lookahead) ->
            let printed_e = printer tm in
            let comments1 =
              if has_lookahead
              then
                let pos = FStar_Compiler_Range.end_of_range tmrange in
                let uu___1 = comments_before_pos comments pos pos in
                FStar_Pervasives_Native.fst uu___1
              else comments in
            if comments1 = FStar_Pprint.empty
            then printed_e
            else
              (let uu___2 = FStar_Pprint.op_Hat_Hat comments1 printed_e in
               FStar_Pprint.group uu___2)
let with_comment_sep :
  'uuuuu 'uuuuu1 .
    ('uuuuu -> 'uuuuu1) ->
      'uuuuu ->
        FStar_Compiler_Range.range -> (FStar_Pprint.document * 'uuuuu1)
  =
  fun printer ->
    fun tm ->
      fun tmrange ->
        let rec comments_before_pos acc print_pos lookahead_pos =
          let uu___ = FStar_Compiler_Effect.op_Bang comment_stack in
          match uu___ with
          | [] -> (acc, false)
          | (c, crange)::cs ->
              let comment = str c in
              let uu___1 =
                FStar_Compiler_Range.range_before_pos crange print_pos in
              if uu___1
              then
                (FStar_Compiler_Effect.op_Colon_Equals comment_stack cs;
                 (let uu___3 =
                    if acc = FStar_Pprint.empty
                    then comment
                    else
                      (let uu___5 =
                         FStar_Pprint.op_Hat_Hat FStar_Pprint.hardline
                           comment in
                       FStar_Pprint.op_Hat_Hat acc uu___5) in
                  comments_before_pos uu___3 print_pos lookahead_pos))
              else
                (let uu___3 =
                   FStar_Compiler_Range.range_before_pos crange lookahead_pos in
                 (acc, uu___3)) in
        let uu___ =
          let uu___1 =
            let uu___2 = FStar_Compiler_Range.start_of_range tmrange in
            FStar_Compiler_Range.end_of_line uu___2 in
          let uu___2 = FStar_Compiler_Range.end_of_range tmrange in
          comments_before_pos FStar_Pprint.empty uu___1 uu___2 in
        match uu___ with
        | (comments, has_lookahead) ->
            let printed_e = printer tm in
            let comments1 =
              if has_lookahead
              then
                let pos = FStar_Compiler_Range.end_of_range tmrange in
                let uu___1 = comments_before_pos comments pos pos in
                FStar_Pervasives_Native.fst uu___1
              else comments in
            (comments1, printed_e)
let rec (place_comments_until_pos :
  Prims.int ->
    Prims.int ->
      FStar_Compiler_Range.pos ->
        decl_meta ->
          FStar_Pprint.document ->
            Prims.bool -> Prims.bool -> FStar_Pprint.document)
  =
  fun k ->
    fun lbegin ->
      fun pos ->
        fun meta_decl ->
          fun doc ->
            fun r ->
              fun init ->
                let uu___ = FStar_Compiler_Effect.op_Bang comment_stack in
                match uu___ with
                | (comment, crange)::cs when
                    FStar_Compiler_Range.range_before_pos crange pos ->
                    (FStar_Compiler_Effect.op_Colon_Equals comment_stack cs;
                     (let lnum =
                        let uu___2 =
                          let uu___3 =
                            let uu___4 =
                              FStar_Compiler_Range.start_of_range crange in
                            FStar_Compiler_Range.line_of_pos uu___4 in
                          uu___3 - lbegin in
                        max k uu___2 in
                      let lnum1 = min (Prims.of_int (2)) lnum in
                      let doc1 =
                        let uu___2 =
                          let uu___3 =
                            FStar_Pprint.repeat lnum1 FStar_Pprint.hardline in
                          let uu___4 = str comment in
                          FStar_Pprint.op_Hat_Hat uu___3 uu___4 in
                        FStar_Pprint.op_Hat_Hat doc uu___2 in
                      let uu___2 =
                        let uu___3 = FStar_Compiler_Range.end_of_range crange in
                        FStar_Compiler_Range.line_of_pos uu___3 in
                      place_comments_until_pos Prims.int_one uu___2 pos
                        meta_decl doc1 true init))
                | uu___1 ->
                    if doc = FStar_Pprint.empty
                    then FStar_Pprint.empty
                    else
                      (let lnum =
                         let uu___3 = FStar_Compiler_Range.line_of_pos pos in
                         uu___3 - lbegin in
                       let lnum1 = min (Prims.of_int (3)) lnum in
                       let lnum2 =
                         if meta_decl.has_qs || meta_decl.has_attrs
                         then lnum1 - Prims.int_one
                         else lnum1 in
                       let lnum3 = max k lnum2 in
                       let lnum4 =
                         if meta_decl.has_qs && meta_decl.has_attrs
                         then (Prims.of_int (2))
                         else lnum3 in
                       let lnum5 = if init then (Prims.of_int (2)) else lnum4 in
                       let uu___3 =
                         FStar_Pprint.repeat lnum5 FStar_Pprint.hardline in
                       FStar_Pprint.op_Hat_Hat doc uu___3)
let separate_map_with_comments :
  'uuuuu .
    FStar_Pprint.document ->
      FStar_Pprint.document ->
        ('uuuuu -> FStar_Pprint.document) ->
          'uuuuu Prims.list -> ('uuuuu -> decl_meta) -> FStar_Pprint.document
  =
  fun prefix ->
    fun sep ->
      fun f ->
        fun xs ->
          fun extract_meta ->
            let fold_fun uu___ x =
              match uu___ with
              | (last_line, doc) ->
                  let meta_decl = extract_meta x in
                  let r = meta_decl.r in
                  let doc1 =
                    let uu___1 = FStar_Compiler_Range.start_of_range r in
                    place_comments_until_pos Prims.int_one last_line uu___1
                      meta_decl doc false false in
                  let uu___1 =
                    let uu___2 = FStar_Compiler_Range.end_of_range r in
                    FStar_Compiler_Range.line_of_pos uu___2 in
                  let uu___2 =
                    let uu___3 =
                      let uu___4 = f x in FStar_Pprint.op_Hat_Hat sep uu___4 in
                    FStar_Pprint.op_Hat_Hat doc1 uu___3 in
                  (uu___1, uu___2) in
            let uu___ =
              let uu___1 = FStar_Compiler_List.hd xs in
              let uu___2 = FStar_Compiler_List.tl xs in (uu___1, uu___2) in
            match uu___ with
            | (x, xs1) ->
                let init =
                  let meta_decl = extract_meta x in
                  let uu___1 =
                    let uu___2 =
                      FStar_Compiler_Range.end_of_range meta_decl.r in
                    FStar_Compiler_Range.line_of_pos uu___2 in
                  let uu___2 =
                    let uu___3 = f x in FStar_Pprint.op_Hat_Hat prefix uu___3 in
                  (uu___1, uu___2) in
                let uu___1 = FStar_Compiler_List.fold_left fold_fun init xs1 in
                FStar_Pervasives_Native.snd uu___1
let separate_map_with_comments_kw :
  'uuuuu 'uuuuu1 .
    'uuuuu ->
      'uuuuu ->
        ('uuuuu -> 'uuuuu1 -> FStar_Pprint.document) ->
          'uuuuu1 Prims.list ->
            ('uuuuu1 -> decl_meta) -> FStar_Pprint.document
  =
  fun prefix ->
    fun sep ->
      fun f ->
        fun xs ->
          fun extract_meta ->
            let fold_fun uu___ x =
              match uu___ with
              | (last_line, doc) ->
                  let meta_decl = extract_meta x in
                  let r = meta_decl.r in
                  let doc1 =
                    let uu___1 = FStar_Compiler_Range.start_of_range r in
                    place_comments_until_pos Prims.int_one last_line uu___1
                      meta_decl doc false false in
                  let uu___1 =
                    let uu___2 = FStar_Compiler_Range.end_of_range r in
                    FStar_Compiler_Range.line_of_pos uu___2 in
                  let uu___2 =
                    let uu___3 = f sep x in
                    FStar_Pprint.op_Hat_Hat doc1 uu___3 in
                  (uu___1, uu___2) in
            let uu___ =
              let uu___1 = FStar_Compiler_List.hd xs in
              let uu___2 = FStar_Compiler_List.tl xs in (uu___1, uu___2) in
            match uu___ with
            | (x, xs1) ->
                let init =
                  let meta_decl = extract_meta x in
                  let uu___1 =
                    let uu___2 =
                      FStar_Compiler_Range.end_of_range meta_decl.r in
                    FStar_Compiler_Range.line_of_pos uu___2 in
                  let uu___2 = f prefix x in (uu___1, uu___2) in
                let uu___1 = FStar_Compiler_List.fold_left fold_fun init xs1 in
                FStar_Pervasives_Native.snd uu___1
let (p_char_literal' :
  FStar_Char.char -> FStar_BaseTypes.char -> FStar_Pprint.document) =
  fun quote_char ->
    fun c ->
      str
        (match c with
         | 8 -> "\\b"
         | 12 -> "\\f"
         | 10 -> "\\n"
         | 9 -> "\\t"
         | 13 -> "\\r"
         | 11 -> "\\v"
         | 0 -> "\\0"
         | c1 ->
             let s = FStar_Compiler_Util.string_of_char c1 in
             if quote_char = c1 then "\\" ^ s else s)
let (p_char_literal : FStar_BaseTypes.char -> FStar_Pprint.document) =
  fun c -> let uu___ = p_char_literal' 39 c in FStar_Pprint.squotes uu___
let (p_string_literal : Prims.string -> FStar_Pprint.document) =
  fun s ->
    let quotation_mark = 34 in
    let uu___ =
      FStar_Pprint.concat_map (p_char_literal' quotation_mark)
        (FStar_String.list_of_string s) in
    FStar_Pprint.dquotes uu___
let rec (p_decl : FStar_Parser_AST.decl -> FStar_Pprint.document) =
  fun d ->
    let qualifiers =
      match ((d.FStar_Parser_AST.quals), (d.FStar_Parser_AST.d)) with
      | ((FStar_Parser_AST.Assumption)::[], FStar_Parser_AST.Assume
         (id, uu___)) ->
          let uu___1 =
            let uu___2 =
              let uu___3 = FStar_Ident.string_of_id id in
              FStar_Compiler_Util.char_at uu___3 Prims.int_zero in
            FStar_Compiler_Effect.op_Bar_Greater uu___2
              FStar_Compiler_Util.is_upper in
          if uu___1
          then
            let uu___2 = p_qualifier FStar_Parser_AST.Assumption in
            FStar_Pprint.op_Hat_Hat uu___2 FStar_Pprint.space
          else p_qualifiers d.FStar_Parser_AST.quals
      | uu___ -> p_qualifiers d.FStar_Parser_AST.quals in
    let uu___ = p_attributes true d.FStar_Parser_AST.attrs in
    let uu___1 =
      let uu___2 = p_rawDecl d in FStar_Pprint.op_Hat_Hat qualifiers uu___2 in
    FStar_Pprint.op_Hat_Hat uu___ uu___1
and (p_attributes :
  Prims.bool -> FStar_Parser_AST.attributes_ -> FStar_Pprint.document) =
  fun isTopLevel ->
    fun attrs ->
      match attrs with
      | [] -> FStar_Pprint.empty
      | uu___ ->
          let uu___1 =
            let uu___2 = str (if isTopLevel then "@@ " else "@@@ ") in
            let uu___3 =
              let uu___4 =
                let uu___5 =
                  let uu___6 =
                    let uu___7 = str "; " in
                    let uu___8 =
                      FStar_Compiler_List.map
                        (p_noSeqTermAndComment false false) attrs in
                    FStar_Pprint.flow uu___7 uu___8 in
                  FStar_Pprint.op_Hat_Hat uu___6 FStar_Pprint.rbracket in
                FStar_Pprint.align uu___5 in
              FStar_Pprint.op_Hat_Hat uu___4
                (if isTopLevel
                 then FStar_Pprint.hardline
                 else FStar_Pprint.empty) in
            FStar_Pprint.op_Hat_Hat uu___2 uu___3 in
          FStar_Pprint.op_Hat_Hat FStar_Pprint.lbracket uu___1
and (p_justSig : FStar_Parser_AST.decl -> FStar_Pprint.document) =
  fun d ->
    match d.FStar_Parser_AST.d with
    | FStar_Parser_AST.Val (lid, t) ->
        let uu___ =
          let uu___1 = str "val" in
          let uu___2 =
            let uu___3 =
              let uu___4 = p_lident lid in
              let uu___5 =
                FStar_Pprint.op_Hat_Hat FStar_Pprint.space FStar_Pprint.colon in
              FStar_Pprint.op_Hat_Hat uu___4 uu___5 in
            FStar_Pprint.op_Hat_Hat FStar_Pprint.space uu___3 in
          FStar_Pprint.op_Hat_Hat uu___1 uu___2 in
        let uu___1 = p_typ false false t in
        FStar_Pprint.op_Hat_Hat uu___ uu___1
    | FStar_Parser_AST.TopLevelLet (uu___, lbs) ->
        FStar_Pprint.separate_map FStar_Pprint.hardline
          (fun lb ->
             let uu___1 = let uu___2 = str "let" in p_letlhs uu___2 lb false in
             FStar_Pprint.group uu___1) lbs
    | uu___ -> FStar_Pprint.empty
and (p_list :
  (FStar_Ident.ident -> FStar_Pprint.document) ->
    FStar_Pprint.document ->
      FStar_Ident.ident Prims.list -> FStar_Pprint.document)
  =
  fun f ->
    fun sep ->
      fun l ->
        let rec p_list' uu___ =
          match uu___ with
          | [] -> FStar_Pprint.empty
          | x::[] -> f x
          | x::xs ->
              let uu___1 = f x in
              let uu___2 =
                let uu___3 = p_list' xs in FStar_Pprint.op_Hat_Hat sep uu___3 in
              FStar_Pprint.op_Hat_Hat uu___1 uu___2 in
        let uu___ = str "[" in
        let uu___1 =
          let uu___2 = p_list' l in
          let uu___3 = str "]" in FStar_Pprint.op_Hat_Hat uu___2 uu___3 in
        FStar_Pprint.op_Hat_Hat uu___ uu___1
and (p_rawDecl : FStar_Parser_AST.decl -> FStar_Pprint.document) =
  fun d ->
    match d.FStar_Parser_AST.d with
    | FStar_Parser_AST.Open uid ->
        let uu___ =
          let uu___1 = str "open" in
          let uu___2 = p_quident uid in
          FStar_Pprint.op_Hat_Slash_Hat uu___1 uu___2 in
        FStar_Pprint.group uu___
    | FStar_Parser_AST.Include uid ->
        let uu___ =
          let uu___1 = str "include" in
          let uu___2 = p_quident uid in
          FStar_Pprint.op_Hat_Slash_Hat uu___1 uu___2 in
        FStar_Pprint.group uu___
    | FStar_Parser_AST.Friend uid ->
        let uu___ =
          let uu___1 = str "friend" in
          let uu___2 = p_quident uid in
          FStar_Pprint.op_Hat_Slash_Hat uu___1 uu___2 in
        FStar_Pprint.group uu___
    | FStar_Parser_AST.ModuleAbbrev (uid1, uid2) ->
        let uu___ =
          let uu___1 = str "module" in
          let uu___2 =
            let uu___3 =
              let uu___4 = p_uident uid1 in
              let uu___5 =
                FStar_Pprint.op_Hat_Hat FStar_Pprint.space
                  FStar_Pprint.equals in
              FStar_Pprint.op_Hat_Hat uu___4 uu___5 in
            FStar_Pprint.op_Hat_Hat FStar_Pprint.space uu___3 in
          FStar_Pprint.op_Hat_Hat uu___1 uu___2 in
        let uu___1 = p_quident uid2 in op_Hat_Slash_Plus_Hat uu___ uu___1
    | FStar_Parser_AST.TopLevelModule uid ->
        let uu___ =
          let uu___1 = str "module" in
          let uu___2 =
            let uu___3 = p_quident uid in
            FStar_Pprint.op_Hat_Hat FStar_Pprint.space uu___3 in
          FStar_Pprint.op_Hat_Hat uu___1 uu___2 in
        FStar_Pprint.group uu___
    | FStar_Parser_AST.Tycon
        (true, uu___, (FStar_Parser_AST.TyconAbbrev
         (uid, tpars, FStar_Pervasives_Native.None, t))::[])
        ->
        let effect_prefix_doc =
          let uu___1 = str "effect" in
          let uu___2 =
            let uu___3 = p_uident uid in
            FStar_Pprint.op_Hat_Hat FStar_Pprint.space uu___3 in
          FStar_Pprint.op_Hat_Hat uu___1 uu___2 in
        let uu___1 =
          let uu___2 = p_typars tpars in
          FStar_Pprint.surround (Prims.of_int (2)) Prims.int_one
            effect_prefix_doc uu___2 FStar_Pprint.equals in
        let uu___2 = p_typ false false t in
        op_Hat_Slash_Plus_Hat uu___1 uu___2
    | FStar_Parser_AST.Tycon (false, tc, tcdefs) ->
        let s = if tc then str "class" else str "type" in
        let uu___ =
          let uu___1 = FStar_Compiler_List.hd tcdefs in
          p_typeDeclWithKw s uu___1 in
        let uu___1 =
          let uu___2 = FStar_Compiler_List.tl tcdefs in
          FStar_Compiler_Effect.op_Less_Bar
            (FStar_Pprint.concat_map
               (fun x ->
                  let uu___3 =
                    let uu___4 = str "and" in p_typeDeclWithKw uu___4 x in
                  FStar_Pprint.op_Hat_Hat break1 uu___3)) uu___2 in
        FStar_Pprint.op_Hat_Hat uu___ uu___1
    | FStar_Parser_AST.TopLevelLet (q, lbs) ->
        let let_doc =
          let uu___ = str "let" in
          let uu___1 = p_letqualifier q in
          FStar_Pprint.op_Hat_Hat uu___ uu___1 in
        let uu___ = str "and" in
        separate_map_with_comments_kw let_doc uu___ p_letbinding lbs
          (fun uu___1 ->
             match uu___1 with
             | (p, t) ->
                 let uu___2 =
                   FStar_Compiler_Range.union_ranges
                     p.FStar_Parser_AST.prange t.FStar_Parser_AST.range in
                 { r = uu___2; has_qs = false; has_attrs = false })
    | FStar_Parser_AST.Val (lid, t) ->
        let uu___ =
          let uu___1 = str "val" in
          let uu___2 =
            let uu___3 =
              let uu___4 = p_lident lid in
              let uu___5 = sig_as_binders_if_possible t false in
              FStar_Pprint.op_Hat_Hat uu___4 uu___5 in
            FStar_Pprint.op_Hat_Hat FStar_Pprint.space uu___3 in
          FStar_Pprint.op_Hat_Hat uu___1 uu___2 in
        FStar_Compiler_Effect.op_Less_Bar FStar_Pprint.group uu___
    | FStar_Parser_AST.Assume (id, t) ->
        let decl_keyword =
          let uu___ =
            let uu___1 =
              let uu___2 = FStar_Ident.string_of_id id in
              FStar_Compiler_Util.char_at uu___2 Prims.int_zero in
            FStar_Compiler_Effect.op_Bar_Greater uu___1
              FStar_Compiler_Util.is_upper in
          if uu___
          then FStar_Pprint.empty
          else
            (let uu___2 = str "val" in
             FStar_Pprint.op_Hat_Hat uu___2 FStar_Pprint.space) in
        let uu___ =
          let uu___1 = p_ident id in
          let uu___2 =
            let uu___3 =
              let uu___4 =
                let uu___5 = p_typ false false t in
                FStar_Pprint.op_Hat_Hat FStar_Pprint.space uu___5 in
              FStar_Pprint.op_Hat_Hat FStar_Pprint.colon uu___4 in
            FStar_Pprint.group uu___3 in
          FStar_Pprint.op_Hat_Hat uu___1 uu___2 in
        FStar_Pprint.op_Hat_Hat decl_keyword uu___
    | FStar_Parser_AST.Exception (uid, t_opt) ->
        let uu___ = str "exception" in
        let uu___1 =
          let uu___2 =
            let uu___3 = p_uident uid in
            let uu___4 =
              FStar_Pprint.optional
                (fun t ->
                   let uu___5 =
                     let uu___6 = str "of" in
                     let uu___7 = p_typ false false t in
                     op_Hat_Slash_Plus_Hat uu___6 uu___7 in
                   FStar_Pprint.op_Hat_Hat break1 uu___5) t_opt in
            FStar_Pprint.op_Hat_Hat uu___3 uu___4 in
          FStar_Pprint.op_Hat_Hat FStar_Pprint.space uu___2 in
        FStar_Pprint.op_Hat_Hat uu___ uu___1
    | FStar_Parser_AST.NewEffect ne ->
        let uu___ = str "new_effect" in
        let uu___1 =
          let uu___2 = p_newEffect ne in
          FStar_Pprint.op_Hat_Hat FStar_Pprint.space uu___2 in
        FStar_Pprint.op_Hat_Hat uu___ uu___1
    | FStar_Parser_AST.SubEffect se ->
        let uu___ = str "sub_effect" in
        let uu___1 =
          let uu___2 = p_subEffect se in
          FStar_Pprint.op_Hat_Hat FStar_Pprint.space uu___2 in
        FStar_Pprint.op_Hat_Hat uu___ uu___1
    | FStar_Parser_AST.LayeredEffect ne ->
        let uu___ = str "layered_effect" in
        let uu___1 =
          let uu___2 = p_newEffect ne in
          FStar_Pprint.op_Hat_Hat FStar_Pprint.space uu___2 in
        FStar_Pprint.op_Hat_Hat uu___ uu___1
    | FStar_Parser_AST.Polymonadic_bind (l1, l2, l3, t) ->
        let uu___ = str "polymonadic_bind" in
        let uu___1 =
          let uu___2 =
            let uu___3 = p_quident l1 in
            let uu___4 =
              let uu___5 =
                let uu___6 =
                  let uu___7 = p_quident l2 in
                  let uu___8 =
                    let uu___9 =
                      let uu___10 = str "|>" in
                      let uu___11 =
                        let uu___12 = p_quident l3 in
                        let uu___13 =
                          let uu___14 = p_simpleTerm false false t in
                          FStar_Pprint.op_Hat_Hat FStar_Pprint.equals uu___14 in
                        FStar_Pprint.op_Hat_Hat uu___12 uu___13 in
                      FStar_Pprint.op_Hat_Hat uu___10 uu___11 in
                    FStar_Pprint.op_Hat_Hat FStar_Pprint.rparen uu___9 in
                  FStar_Pprint.op_Hat_Hat uu___7 uu___8 in
                FStar_Pprint.op_Hat_Hat break1 uu___6 in
              FStar_Pprint.op_Hat_Hat FStar_Pprint.comma uu___5 in
            FStar_Pprint.op_Hat_Hat uu___3 uu___4 in
          FStar_Pprint.op_Hat_Hat FStar_Pprint.lparen uu___2 in
        FStar_Pprint.op_Hat_Hat uu___ uu___1
    | FStar_Parser_AST.Pragma p -> p_pragma p
    | FStar_Parser_AST.Tycon (true, uu___, uu___1) ->
        failwith
          "Effect abbreviation is expected to be defined by an abbreviation"
    | FStar_Parser_AST.Splice (ids, t) ->
        let uu___ = str "%splice" in
        let uu___1 =
          let uu___2 = let uu___3 = str ";" in p_list p_uident uu___3 ids in
          let uu___3 =
            let uu___4 = p_term false false t in
            FStar_Pprint.op_Hat_Hat FStar_Pprint.space uu___4 in
          FStar_Pprint.op_Hat_Hat uu___2 uu___3 in
        FStar_Pprint.op_Hat_Hat uu___ uu___1
and (p_pragma : FStar_Parser_AST.pragma -> FStar_Pprint.document) =
  fun uu___ ->
    match uu___ with
    | FStar_Parser_AST.SetOptions s ->
        let uu___1 = str "#set-options" in
        let uu___2 =
          let uu___3 = let uu___4 = str s in FStar_Pprint.dquotes uu___4 in
          FStar_Pprint.op_Hat_Hat FStar_Pprint.space uu___3 in
        FStar_Pprint.op_Hat_Hat uu___1 uu___2
    | FStar_Parser_AST.ResetOptions s_opt ->
        let uu___1 = str "#reset-options" in
        let uu___2 =
          FStar_Pprint.optional
            (fun s ->
               let uu___3 = let uu___4 = str s in FStar_Pprint.dquotes uu___4 in
               FStar_Pprint.op_Hat_Hat FStar_Pprint.space uu___3) s_opt in
        FStar_Pprint.op_Hat_Hat uu___1 uu___2
    | FStar_Parser_AST.PushOptions s_opt ->
        let uu___1 = str "#push-options" in
        let uu___2 =
          FStar_Pprint.optional
            (fun s ->
               let uu___3 = let uu___4 = str s in FStar_Pprint.dquotes uu___4 in
               FStar_Pprint.op_Hat_Hat FStar_Pprint.space uu___3) s_opt in
        FStar_Pprint.op_Hat_Hat uu___1 uu___2
    | FStar_Parser_AST.PopOptions -> str "#pop-options"
    | FStar_Parser_AST.RestartSolver -> str "#restart-solver"
    | FStar_Parser_AST.PrintEffectsGraph -> str "#print-effects-graph"
and (p_typars : FStar_Parser_AST.binder Prims.list -> FStar_Pprint.document)
  = fun bs -> p_binders true bs
and (p_typeDeclWithKw :
  FStar_Pprint.document -> FStar_Parser_AST.tycon -> FStar_Pprint.document) =
  fun kw ->
    fun typedecl ->
      let uu___ = p_typeDecl kw typedecl in
      match uu___ with
      | (comm, decl, body, pre) ->
          if comm = FStar_Pprint.empty
          then let uu___1 = pre body in FStar_Pprint.op_Hat_Hat decl uu___1
          else
            (let uu___2 =
               let uu___3 =
                 let uu___4 =
                   let uu___5 = pre body in
                   FStar_Pprint.op_Hat_Slash_Hat uu___5 comm in
                 FStar_Pprint.op_Hat_Hat decl uu___4 in
               let uu___4 =
                 let uu___5 =
                   let uu___6 =
                     let uu___7 =
                       let uu___8 =
                         FStar_Pprint.op_Hat_Hat FStar_Pprint.hardline body in
                       FStar_Pprint.op_Hat_Hat comm uu___8 in
                     FStar_Pprint.op_Hat_Hat FStar_Pprint.hardline uu___7 in
                   FStar_Pprint.nest (Prims.of_int (2)) uu___6 in
                 FStar_Pprint.op_Hat_Hat decl uu___5 in
               FStar_Pprint.ifflat uu___3 uu___4 in
             FStar_Compiler_Effect.op_Less_Bar FStar_Pprint.group uu___2)
and (p_typeDecl :
  FStar_Pprint.document ->
    FStar_Parser_AST.tycon ->
      (FStar_Pprint.document * FStar_Pprint.document * FStar_Pprint.document
        * (FStar_Pprint.document -> FStar_Pprint.document)))
  =
  fun pre ->
    fun uu___ ->
      match uu___ with
      | FStar_Parser_AST.TyconAbstract (lid, bs, typ_opt) ->
          let uu___1 = p_typeDeclPrefix pre false lid bs typ_opt in
          (FStar_Pprint.empty, uu___1, FStar_Pprint.empty,
            FStar_Pervasives.id)
      | FStar_Parser_AST.TyconAbbrev (lid, bs, typ_opt, t) ->
          let uu___1 = p_typ_sep false false t in
          (match uu___1 with
           | (comm, doc) ->
               let uu___2 = p_typeDeclPrefix pre true lid bs typ_opt in
               (comm, uu___2, doc, jump2))
      | FStar_Parser_AST.TyconRecord
          (lid, bs, typ_opt, attrs, record_field_decls) ->
          let uu___1 = p_typeDeclPrefix pre true lid bs typ_opt in
          let uu___2 =
            let uu___3 = p_attributes false attrs in
            let uu___4 = p_typeDeclRecord record_field_decls in
            FStar_Pprint.op_Hat_Hat uu___3 uu___4 in
          (FStar_Pprint.empty, uu___1, uu___2,
            ((fun d -> FStar_Pprint.op_Hat_Hat FStar_Pprint.space d)))
      | FStar_Parser_AST.TyconVariant (lid, bs, typ_opt, ct_decls) ->
          let p_constructorBranchAndComments uu___1 =
            match uu___1 with
            | (uid, payload, attrs) ->
                let range =
                  let uu___2 =
                    let uu___3 = FStar_Ident.range_of_id uid in
                    let uu___4 =
                      FStar_Compiler_Util.bind_opt payload
                        (fun uu___5 ->
                           match uu___5 with
                           | FStar_Parser_AST.VpOfNotation t ->
                               FStar_Pervasives_Native.Some
                                 (t.FStar_Parser_AST.range)
                           | FStar_Parser_AST.VpArbitrary t ->
                               FStar_Pervasives_Native.Some
                                 (t.FStar_Parser_AST.range)
                           | FStar_Parser_AST.VpRecord (record, uu___6) ->
                               FStar_Pervasives_Native.None) in
                    FStar_Compiler_Util.dflt uu___3 uu___4 in
                  FStar_Compiler_Range.extend_to_end_of_line uu___2 in
                let uu___2 =
                  with_comment_sep p_constructorBranch (uid, payload, attrs)
                    range in
                (match uu___2 with
                 | (comm, ctor) ->
                     inline_comment_or_above comm ctor FStar_Pprint.empty) in
          let datacon_doc =
            FStar_Pprint.separate_map FStar_Pprint.hardline
              p_constructorBranchAndComments ct_decls in
          let uu___1 = p_typeDeclPrefix pre true lid bs typ_opt in
          (FStar_Pprint.empty, uu___1, datacon_doc, jump2)
and (p_typeDeclRecord :
  FStar_Parser_AST.tycon_record -> FStar_Pprint.document) =
  fun fields ->
    let p_recordField ps uu___ =
      match uu___ with
      | (lid, aq, attrs, t) ->
          let uu___1 =
            let uu___2 =
              FStar_Compiler_Range.extend_to_end_of_line
                t.FStar_Parser_AST.range in
            with_comment_sep (p_recordFieldDecl ps) (lid, aq, attrs, t)
              uu___2 in
          (match uu___1 with
           | (comm, field) ->
               let sep = if ps then FStar_Pprint.semi else FStar_Pprint.empty in
               inline_comment_or_above comm field sep) in
    let uu___ = separate_map_last FStar_Pprint.hardline p_recordField fields in
    FStar_Compiler_Effect.op_Bar_Greater uu___ braces_with_nesting
and (p_typeDeclPrefix :
  FStar_Pprint.document ->
    Prims.bool ->
      FStar_Ident.ident ->
        FStar_Parser_AST.binder Prims.list ->
          FStar_Parser_AST.knd FStar_Pervasives_Native.option ->
            FStar_Pprint.document)
  =
  fun kw ->
    fun eq ->
      fun lid ->
        fun bs ->
          fun typ_opt ->
            let with_kw cont =
              let lid_doc = p_ident lid in
              let kw_lid =
                let uu___ = FStar_Pprint.op_Hat_Slash_Hat kw lid_doc in
                FStar_Pprint.group uu___ in
              cont kw_lid in
            let typ =
              let maybe_eq =
                if eq then FStar_Pprint.equals else FStar_Pprint.empty in
              match typ_opt with
              | FStar_Pervasives_Native.None -> maybe_eq
              | FStar_Pervasives_Native.Some t ->
                  let uu___ =
                    let uu___1 =
                      let uu___2 = p_typ false false t in
                      FStar_Pprint.op_Hat_Slash_Hat uu___2 maybe_eq in
                    FStar_Pprint.op_Hat_Hat FStar_Pprint.space uu___1 in
                  FStar_Pprint.op_Hat_Hat FStar_Pprint.colon uu___ in
            if bs = []
            then with_kw (fun n -> prefix2 n typ)
            else
              (let binders = p_binders_list true bs in
               with_kw
                 (fun n ->
                    let uu___1 =
                      let uu___2 = FStar_Pprint.flow break1 binders in
                      prefix2 n uu___2 in
                    prefix2 uu___1 typ))
and (p_recordFieldDecl :
  Prims.bool ->
    (FStar_Ident.ident * FStar_Parser_AST.aqual *
      FStar_Parser_AST.attributes_ * FStar_Parser_AST.term) ->
      FStar_Pprint.document)
  =
  fun ps ->
    fun uu___ ->
      match uu___ with
      | (lid, aq, attrs, t) ->
          let uu___1 =
            let uu___2 = FStar_Pprint.optional p_aqual aq in
            let uu___3 =
              let uu___4 = p_attributes false attrs in
              let uu___5 =
                let uu___6 = p_lident lid in
                let uu___7 =
                  let uu___8 = p_typ ps false t in
                  FStar_Pprint.op_Hat_Hat FStar_Pprint.colon uu___8 in
                FStar_Pprint.op_Hat_Hat uu___6 uu___7 in
              FStar_Pprint.op_Hat_Hat uu___4 uu___5 in
            FStar_Pprint.op_Hat_Hat uu___2 uu___3 in
          FStar_Pprint.group uu___1
and (p_constructorBranch :
  (FStar_Ident.ident * FStar_Parser_AST.constructor_payload
    FStar_Pervasives_Native.option * FStar_Parser_AST.attributes_) ->
    FStar_Pprint.document)
  =
  fun uu___ ->
    match uu___ with
    | (uid, variant, attrs) ->
        let h isOf t =
          let uu___1 = if isOf then str "of" else FStar_Pprint.colon in
          let uu___2 =
            let uu___3 = p_typ false false t in
            FStar_Pprint.op_Hat_Hat FStar_Pprint.space uu___3 in
          FStar_Pprint.op_Hat_Hat uu___1 uu___2 in
        let uu___1 =
          let uu___2 =
            let uu___3 =
              let uu___4 =
                let uu___5 = p_attributes false attrs in
                let uu___6 = p_uident uid in
                FStar_Pprint.op_Hat_Hat uu___5 uu___6 in
              FStar_Pprint.op_Hat_Hat FStar_Pprint.space uu___4 in
            FStar_Pprint.op_Hat_Hat FStar_Pprint.bar uu___3 in
          FStar_Pprint.group uu___2 in
        let uu___2 =
          default_or_map FStar_Pprint.empty
            (fun payload ->
               let uu___3 =
                 let uu___4 =
                   match payload with
                   | FStar_Parser_AST.VpOfNotation t -> h true t
                   | FStar_Parser_AST.VpArbitrary t -> h false t
                   | FStar_Parser_AST.VpRecord (r, t) ->
                       let uu___5 = p_typeDeclRecord r in
                       let uu___6 =
                         default_or_map FStar_Pprint.empty (h false) t in
                       FStar_Pprint.op_Hat_Hat uu___5 uu___6 in
                 FStar_Pprint.group uu___4 in
               FStar_Pprint.op_Hat_Hat FStar_Pprint.space uu___3) variant in
        FStar_Pprint.op_Hat_Hat uu___1 uu___2
and (p_letlhs :
  FStar_Pprint.document ->
    (FStar_Parser_AST.pattern * FStar_Parser_AST.term) ->
      Prims.bool -> FStar_Pprint.document)
  =
  fun kw ->
    fun uu___ ->
      fun inner_let ->
        match uu___ with
        | (pat, uu___1) ->
            let uu___2 =
              match pat.FStar_Parser_AST.pat with
              | FStar_Parser_AST.PatAscribed
                  (pat1, (t, FStar_Pervasives_Native.None)) ->
                  (pat1,
                    (FStar_Pervasives_Native.Some (t, FStar_Pprint.empty)))
              | FStar_Parser_AST.PatAscribed
                  (pat1, (t, FStar_Pervasives_Native.Some tac)) ->
                  let uu___3 =
                    let uu___4 =
                      let uu___5 =
                        let uu___6 =
                          let uu___7 =
                            let uu___8 = str "by" in
                            let uu___9 =
                              let uu___10 = p_atomicTerm (maybe_unthunk tac) in
                              FStar_Pprint.op_Hat_Hat FStar_Pprint.space
                                uu___10 in
                            FStar_Pprint.op_Hat_Hat uu___8 uu___9 in
                          FStar_Pprint.op_Hat_Hat FStar_Pprint.space uu___7 in
                        FStar_Pprint.group uu___6 in
                      (t, uu___5) in
                    FStar_Pervasives_Native.Some uu___4 in
                  (pat1, uu___3)
              | uu___3 -> (pat, FStar_Pervasives_Native.None) in
            (match uu___2 with
             | (pat1, ascr) ->
                 (match pat1.FStar_Parser_AST.pat with
                  | FStar_Parser_AST.PatApp
                      ({
                         FStar_Parser_AST.pat = FStar_Parser_AST.PatVar
                           (lid, uu___3, uu___4);
                         FStar_Parser_AST.prange = uu___5;_},
                       pats)
                      ->
                      let ascr_doc =
                        match ascr with
                        | FStar_Pervasives_Native.Some (t, tac) ->
                            let uu___6 = sig_as_binders_if_possible t true in
                            FStar_Pprint.op_Hat_Hat uu___6 tac
                        | FStar_Pervasives_Native.None -> FStar_Pprint.empty in
                      let uu___6 =
                        if inner_let
                        then
                          let uu___7 = pats_as_binders_if_possible pats in
                          match uu___7 with | (bs, style) -> (bs, style)
                        else
                          (let uu___8 = pats_as_binders_if_possible pats in
                           match uu___8 with | (bs, style) -> (bs, style)) in
                      (match uu___6 with
                       | (terms, style) ->
                           let uu___7 =
                             let uu___8 =
                               let uu___9 =
                                 let uu___10 = p_lident lid in
                                 let uu___11 =
                                   format_sig style terms ascr_doc true true in
                                 FStar_Pprint.op_Hat_Hat uu___10 uu___11 in
                               FStar_Pprint.op_Hat_Hat FStar_Pprint.space
                                 uu___9 in
                             FStar_Pprint.op_Hat_Hat kw uu___8 in
                           FStar_Compiler_Effect.op_Less_Bar
                             FStar_Pprint.group uu___7)
                  | uu___3 ->
                      let ascr_doc =
                        match ascr with
                        | FStar_Pervasives_Native.Some (t, tac) ->
                            let uu___4 =
                              let uu___5 =
                                let uu___6 =
                                  p_typ_top
                                    (Arrows
                                       ((Prims.of_int (2)),
                                         (Prims.of_int (2)))) false false t in
                                FStar_Pprint.op_Hat_Hat FStar_Pprint.colon
                                  uu___6 in
                              FStar_Pprint.group uu___5 in
                            FStar_Pprint.op_Hat_Hat uu___4 tac
                        | FStar_Pervasives_Native.None -> FStar_Pprint.empty in
                      let uu___4 =
                        let uu___5 =
                          let uu___6 =
                            let uu___7 = p_tuplePattern pat1 in
                            FStar_Pprint.op_Hat_Slash_Hat kw uu___7 in
                          FStar_Pprint.group uu___6 in
                        FStar_Pprint.op_Hat_Hat uu___5 ascr_doc in
                      FStar_Pprint.group uu___4))
and (p_letbinding :
  FStar_Pprint.document ->
    (FStar_Parser_AST.pattern * FStar_Parser_AST.term) ->
      FStar_Pprint.document)
  =
  fun kw ->
    fun uu___ ->
      match uu___ with
      | (pat, e) ->
          let doc_pat = p_letlhs kw (pat, e) false in
          let uu___1 = p_term_sep false false e in
          (match uu___1 with
           | (comm, doc_expr) ->
               let doc_expr1 =
                 inline_comment_or_above comm doc_expr FStar_Pprint.empty in
               let uu___2 =
                 let uu___3 =
                   FStar_Pprint.op_Hat_Slash_Hat FStar_Pprint.equals
                     doc_expr1 in
                 FStar_Pprint.op_Hat_Slash_Hat doc_pat uu___3 in
               let uu___3 =
                 let uu___4 =
                   let uu___5 =
                     let uu___6 =
                       let uu___7 = jump2 doc_expr1 in
                       FStar_Pprint.op_Hat_Hat FStar_Pprint.equals uu___7 in
                     FStar_Pprint.group uu___6 in
                   FStar_Pprint.op_Hat_Hat FStar_Pprint.space uu___5 in
                 FStar_Pprint.op_Hat_Hat doc_pat uu___4 in
               FStar_Pprint.ifflat uu___2 uu___3)
and (p_term_list :
  Prims.bool ->
    Prims.bool -> FStar_Parser_AST.term Prims.list -> FStar_Pprint.document)
  =
  fun ps ->
    fun pb ->
      fun l ->
        let rec aux uu___ =
          match uu___ with
          | [] -> FStar_Pprint.empty
          | x::[] -> p_term ps pb x
          | x::xs ->
              let uu___1 = p_term ps pb x in
              let uu___2 =
                let uu___3 = str ";" in
                let uu___4 = aux xs in FStar_Pprint.op_Hat_Hat uu___3 uu___4 in
              FStar_Pprint.op_Hat_Hat uu___1 uu___2 in
        let uu___ = str "[" in
        let uu___1 =
          let uu___2 = aux l in
          let uu___3 = str "]" in FStar_Pprint.op_Hat_Hat uu___2 uu___3 in
        FStar_Pprint.op_Hat_Hat uu___ uu___1
and (p_newEffect : FStar_Parser_AST.effect_decl -> FStar_Pprint.document) =
  fun uu___ ->
    match uu___ with
    | FStar_Parser_AST.RedefineEffect (lid, bs, t) ->
        p_effectRedefinition lid bs t
    | FStar_Parser_AST.DefineEffect (lid, bs, t, eff_decls) ->
        p_effectDefinition lid bs t eff_decls
and (p_effectRedefinition :
  FStar_Ident.ident ->
    FStar_Parser_AST.binder Prims.list ->
      FStar_Parser_AST.term -> FStar_Pprint.document)
  =
  fun uid ->
    fun bs ->
      fun t ->
        let uu___ = p_uident uid in
        let uu___1 = p_binders true bs in
        let uu___2 =
          let uu___3 = p_simpleTerm false false t in
          prefix2 FStar_Pprint.equals uu___3 in
        surround_maybe_empty (Prims.of_int (2)) Prims.int_one uu___ uu___1
          uu___2
and (p_effectDefinition :
  FStar_Ident.ident ->
    FStar_Parser_AST.binder Prims.list ->
      FStar_Parser_AST.term ->
        FStar_Parser_AST.decl Prims.list -> FStar_Pprint.document)
  =
  fun uid ->
    fun bs ->
      fun t ->
        fun eff_decls ->
          let binders = p_binders true bs in
          let uu___ =
            let uu___1 =
              let uu___2 =
                let uu___3 = p_uident uid in
                let uu___4 = p_binders true bs in
                let uu___5 =
                  let uu___6 = p_typ false false t in
                  prefix2 FStar_Pprint.colon uu___6 in
                surround_maybe_empty (Prims.of_int (2)) Prims.int_one uu___3
                  uu___4 uu___5 in
              FStar_Pprint.group uu___2 in
            let uu___2 =
              let uu___3 = str "with" in
              let uu___4 =
                let uu___5 =
                  let uu___6 =
                    let uu___7 =
                      let uu___8 =
                        let uu___9 =
                          FStar_Pprint.op_Hat_Hat FStar_Pprint.semi
                            FStar_Pprint.space in
                        FStar_Pprint.op_Hat_Hat FStar_Pprint.hardline uu___9 in
                      separate_map_last uu___8 p_effectDecl eff_decls in
                    FStar_Pprint.op_Hat_Hat FStar_Pprint.space uu___7 in
                  FStar_Pprint.op_Hat_Hat FStar_Pprint.space uu___6 in
                FStar_Pprint.op_Hat_Hat FStar_Pprint.hardline uu___5 in
              FStar_Pprint.op_Hat_Hat uu___3 uu___4 in
            FStar_Pprint.op_Hat_Slash_Hat uu___1 uu___2 in
          braces_with_nesting uu___
and (p_effectDecl :
  Prims.bool -> FStar_Parser_AST.decl -> FStar_Pprint.document) =
  fun ps ->
    fun d ->
      match d.FStar_Parser_AST.d with
      | FStar_Parser_AST.Tycon
          (false, uu___, (FStar_Parser_AST.TyconAbbrev
           (lid, [], FStar_Pervasives_Native.None, e))::[])
          ->
          let uu___1 =
            let uu___2 = p_lident lid in
            let uu___3 =
              FStar_Pprint.op_Hat_Hat FStar_Pprint.space FStar_Pprint.equals in
            FStar_Pprint.op_Hat_Hat uu___2 uu___3 in
          let uu___2 = p_simpleTerm ps false e in prefix2 uu___1 uu___2
      | uu___ ->
          let uu___1 =
            let uu___2 = FStar_Parser_AST.decl_to_string d in
            FStar_Compiler_Util.format1
              "Not a declaration of an effect member... or at least I hope so : %s"
              uu___2 in
          failwith uu___1
and (p_subEffect : FStar_Parser_AST.lift -> FStar_Pprint.document) =
  fun lift ->
    let lift_op_doc =
      let lifts =
        match lift.FStar_Parser_AST.lift_op with
        | FStar_Parser_AST.NonReifiableLift t -> [("lift_wp", t)]
        | FStar_Parser_AST.ReifiableLift (t1, t2) ->
            [("lift_wp", t1); ("lift", t2)]
        | FStar_Parser_AST.LiftForFree t -> [("lift", t)] in
      let p_lift ps uu___ =
        match uu___ with
        | (kwd, t) ->
            let uu___1 =
              let uu___2 = str kwd in
              let uu___3 =
                FStar_Pprint.op_Hat_Hat FStar_Pprint.space
                  FStar_Pprint.equals in
              FStar_Pprint.op_Hat_Hat uu___2 uu___3 in
            let uu___2 = p_simpleTerm ps false t in prefix2 uu___1 uu___2 in
      separate_break_map_last FStar_Pprint.semi p_lift lifts in
    let uu___ =
      let uu___1 =
        let uu___2 = p_quident lift.FStar_Parser_AST.msource in
        let uu___3 =
          let uu___4 = str "~>" in
          FStar_Pprint.op_Hat_Hat FStar_Pprint.space uu___4 in
        FStar_Pprint.op_Hat_Hat uu___2 uu___3 in
      let uu___2 = p_quident lift.FStar_Parser_AST.mdest in
      prefix2 uu___1 uu___2 in
    let uu___1 =
      let uu___2 = braces_with_nesting lift_op_doc in
      FStar_Pprint.op_Hat_Hat FStar_Pprint.space uu___2 in
    FStar_Pprint.op_Hat_Hat uu___ uu___1
and (p_qualifier : FStar_Parser_AST.qualifier -> FStar_Pprint.document) =
  fun uu___ ->
    match uu___ with
    | FStar_Parser_AST.Private -> str "private"
    | FStar_Parser_AST.Noeq -> str "noeq"
    | FStar_Parser_AST.Unopteq -> str "unopteq"
    | FStar_Parser_AST.Assumption -> str "assume"
    | FStar_Parser_AST.DefaultEffect -> str "default"
    | FStar_Parser_AST.TotalEffect -> str "total"
    | FStar_Parser_AST.Effect_qual -> FStar_Pprint.empty
    | FStar_Parser_AST.New -> str "new"
    | FStar_Parser_AST.Inline -> str "inline"
    | FStar_Parser_AST.Visible -> FStar_Pprint.empty
    | FStar_Parser_AST.Unfold_for_unification_and_vcgen -> str "unfold"
    | FStar_Parser_AST.Inline_for_extraction -> str "inline_for_extraction"
    | FStar_Parser_AST.Irreducible -> str "irreducible"
    | FStar_Parser_AST.NoExtract -> str "noextract"
    | FStar_Parser_AST.Reifiable -> str "reifiable"
    | FStar_Parser_AST.Reflectable -> str "reflectable"
    | FStar_Parser_AST.Opaque -> str "opaque"
    | FStar_Parser_AST.Logic -> str "logic"
and (p_qualifiers : FStar_Parser_AST.qualifiers -> FStar_Pprint.document) =
  fun qs ->
    match qs with
    | [] -> FStar_Pprint.empty
    | q::[] ->
        let uu___ = p_qualifier q in
        FStar_Pprint.op_Hat_Hat uu___ FStar_Pprint.hardline
    | uu___ ->
        let uu___1 =
          let uu___2 = FStar_Compiler_List.map p_qualifier qs in
          FStar_Pprint.flow break1 uu___2 in
        FStar_Pprint.op_Hat_Hat uu___1 FStar_Pprint.hardline
and (p_letqualifier :
  FStar_Parser_AST.let_qualifier -> FStar_Pprint.document) =
  fun uu___ ->
    match uu___ with
    | FStar_Parser_AST.Rec ->
        let uu___1 = str "rec" in
        FStar_Pprint.op_Hat_Hat FStar_Pprint.space uu___1
    | FStar_Parser_AST.NoLetQualifier -> FStar_Pprint.empty
and (p_aqual : FStar_Parser_AST.arg_qualifier -> FStar_Pprint.document) =
  fun uu___ ->
    match uu___ with
    | FStar_Parser_AST.Implicit -> str "#"
    | FStar_Parser_AST.Equality -> str "$"
    | FStar_Parser_AST.Meta t ->
        let t1 =
          match t.FStar_Parser_AST.tm with
          | FStar_Parser_AST.Abs (uu___1, e) -> e
          | uu___1 ->
              FStar_Parser_AST.mk_term
                (FStar_Parser_AST.App
                   (t,
                     (FStar_Parser_AST.unit_const t.FStar_Parser_AST.range),
                     FStar_Parser_AST.Nothing)) t.FStar_Parser_AST.range
                FStar_Parser_AST.Expr in
        let uu___1 = str "#[" in
        let uu___2 =
          let uu___3 = p_term false false t1 in
          let uu___4 =
            let uu___5 = str "]" in FStar_Pprint.op_Hat_Hat uu___5 break1 in
          FStar_Pprint.op_Hat_Hat uu___3 uu___4 in
        FStar_Pprint.op_Hat_Hat uu___1 uu___2
    | FStar_Parser_AST.TypeClassArg -> FStar_Pprint.empty
and (p_disjunctivePattern :
  FStar_Parser_AST.pattern -> FStar_Pprint.document) =
  fun p ->
    match p.FStar_Parser_AST.pat with
    | FStar_Parser_AST.PatOr pats ->
        let uu___ =
          let uu___1 =
            let uu___2 =
              FStar_Pprint.op_Hat_Hat FStar_Pprint.bar FStar_Pprint.space in
            FStar_Pprint.op_Hat_Hat break1 uu___2 in
          FStar_Pprint.separate_map uu___1 p_tuplePattern pats in
        FStar_Pprint.group uu___
    | uu___ -> p_tuplePattern p
and (p_tuplePattern : FStar_Parser_AST.pattern -> FStar_Pprint.document) =
  fun p ->
    match p.FStar_Parser_AST.pat with
    | FStar_Parser_AST.PatTuple (pats, false) ->
        let uu___ =
          let uu___1 = FStar_Pprint.op_Hat_Hat FStar_Pprint.comma break1 in
          FStar_Pprint.separate_map uu___1 p_constructorPattern pats in
        FStar_Pprint.group uu___
    | uu___ -> p_constructorPattern p
and (p_constructorPattern :
  FStar_Parser_AST.pattern -> FStar_Pprint.document) =
  fun p ->
    match p.FStar_Parser_AST.pat with
    | FStar_Parser_AST.PatApp
        ({ FStar_Parser_AST.pat = FStar_Parser_AST.PatName maybe_cons_lid;
           FStar_Parser_AST.prange = uu___;_},
         hd::tl::[])
        when
        FStar_Ident.lid_equals maybe_cons_lid FStar_Parser_Const.cons_lid ->
        let uu___1 =
          FStar_Pprint.op_Hat_Hat FStar_Pprint.colon FStar_Pprint.colon in
        let uu___2 = p_constructorPattern hd in
        let uu___3 = p_constructorPattern tl in infix0 uu___1 uu___2 uu___3
    | FStar_Parser_AST.PatApp
        ({ FStar_Parser_AST.pat = FStar_Parser_AST.PatName uid;
           FStar_Parser_AST.prange = uu___;_},
         pats)
        ->
        let uu___1 = p_quident uid in
        let uu___2 = FStar_Pprint.separate_map break1 p_atomicPattern pats in
        prefix2 uu___1 uu___2
    | uu___ -> p_atomicPattern p
and (p_atomicPattern : FStar_Parser_AST.pattern -> FStar_Pprint.document) =
  fun p ->
    match p.FStar_Parser_AST.pat with
    | FStar_Parser_AST.PatAscribed (pat, (t, FStar_Pervasives_Native.None))
        ->
        (match ((pat.FStar_Parser_AST.pat), (t.FStar_Parser_AST.tm)) with
         | (FStar_Parser_AST.PatVar (lid, aqual, attrs),
            FStar_Parser_AST.Refine
            ({ FStar_Parser_AST.b = FStar_Parser_AST.Annotated (lid', t1);
               FStar_Parser_AST.brange = uu___;
               FStar_Parser_AST.blevel = uu___1;
               FStar_Parser_AST.aqual = uu___2;
               FStar_Parser_AST.battributes = uu___3;_},
             phi)) when
             let uu___4 = FStar_Ident.string_of_id lid in
             let uu___5 = FStar_Ident.string_of_id lid' in uu___4 = uu___5 ->
             let uu___4 =
               let uu___5 = p_ident lid in
               p_refinement aqual attrs uu___5 t1 phi in
             soft_parens_with_nesting uu___4
         | (FStar_Parser_AST.PatWild (aqual, attrs), FStar_Parser_AST.Refine
            ({ FStar_Parser_AST.b = FStar_Parser_AST.NoName t1;
               FStar_Parser_AST.brange = uu___;
               FStar_Parser_AST.blevel = uu___1;
               FStar_Parser_AST.aqual = uu___2;
               FStar_Parser_AST.battributes = uu___3;_},
             phi)) ->
             let uu___4 =
               p_refinement aqual attrs FStar_Pprint.underscore t1 phi in
             soft_parens_with_nesting uu___4
         | (FStar_Parser_AST.PatVar (uu___, aqual, uu___1), uu___2) ->
             let wrap =
               if
                 aqual =
                   (FStar_Pervasives_Native.Some
                      FStar_Parser_AST.TypeClassArg)
               then tc_arg
               else soft_parens_with_nesting in
             let uu___3 =
               let uu___4 = p_tuplePattern pat in
               let uu___5 =
                 let uu___6 = p_tmEqNoRefinement t in
                 FStar_Pprint.op_Hat_Slash_Hat FStar_Pprint.colon uu___6 in
               FStar_Pprint.op_Hat_Hat uu___4 uu___5 in
             wrap uu___3
         | (FStar_Parser_AST.PatWild (aqual, uu___), uu___1) ->
             let wrap =
               if
                 aqual =
                   (FStar_Pervasives_Native.Some
                      FStar_Parser_AST.TypeClassArg)
               then tc_arg
               else soft_parens_with_nesting in
             let uu___2 =
               let uu___3 = p_tuplePattern pat in
               let uu___4 =
                 let uu___5 = p_tmEqNoRefinement t in
                 FStar_Pprint.op_Hat_Slash_Hat FStar_Pprint.colon uu___5 in
               FStar_Pprint.op_Hat_Hat uu___3 uu___4 in
             wrap uu___2
         | uu___ ->
             let uu___1 =
               let uu___2 = p_tuplePattern pat in
               let uu___3 =
                 let uu___4 = p_tmEqNoRefinement t in
                 FStar_Pprint.op_Hat_Slash_Hat FStar_Pprint.colon uu___4 in
               FStar_Pprint.op_Hat_Hat uu___2 uu___3 in
             soft_parens_with_nesting uu___1)
    | FStar_Parser_AST.PatList pats ->
        let uu___ = separate_break_map FStar_Pprint.semi p_tuplePattern pats in
        FStar_Pprint.surround (Prims.of_int (2)) Prims.int_zero
          FStar_Pprint.lbracket uu___ FStar_Pprint.rbracket
    | FStar_Parser_AST.PatRecord pats ->
        let p_recordFieldPat uu___ =
          match uu___ with
          | (lid, pat) ->
              let uu___1 = p_qlident lid in
              let uu___2 = p_tuplePattern pat in
              infix2 FStar_Pprint.equals uu___1 uu___2 in
        let uu___ =
          separate_break_map FStar_Pprint.semi p_recordFieldPat pats in
        soft_braces_with_nesting uu___
    | FStar_Parser_AST.PatTuple (pats, true) ->
        let uu___ =
          FStar_Pprint.op_Hat_Hat FStar_Pprint.lparen FStar_Pprint.bar in
        let uu___1 =
          separate_break_map FStar_Pprint.comma p_constructorPattern pats in
        let uu___2 =
          FStar_Pprint.op_Hat_Hat FStar_Pprint.bar FStar_Pprint.rparen in
        FStar_Pprint.surround (Prims.of_int (2)) Prims.int_one uu___ uu___1
          uu___2
    | FStar_Parser_AST.PatTvar (tv, arg_qualifier_opt, attrs) -> p_tvar tv
    | FStar_Parser_AST.PatOp op ->
        let uu___ =
          let uu___1 =
            let uu___2 =
              let uu___3 = FStar_Ident.string_of_id op in str uu___3 in
            let uu___3 =
              FStar_Pprint.op_Hat_Hat FStar_Pprint.space FStar_Pprint.rparen in
            FStar_Pprint.op_Hat_Hat uu___2 uu___3 in
          FStar_Pprint.op_Hat_Hat FStar_Pprint.space uu___1 in
        FStar_Pprint.op_Hat_Hat FStar_Pprint.lparen uu___
    | FStar_Parser_AST.PatWild (aqual, attrs) ->
        let uu___ = FStar_Pprint.optional p_aqual aqual in
        let uu___1 =
          let uu___2 = p_attributes false attrs in
          FStar_Pprint.op_Hat_Hat uu___2 FStar_Pprint.underscore in
        FStar_Pprint.op_Hat_Hat uu___ uu___1
    | FStar_Parser_AST.PatConst c -> p_constant c
    | FStar_Parser_AST.PatVQuote e ->
        let uu___ =
          let uu___1 = str "`%" in
          let uu___2 = p_noSeqTermAndComment false false e in
          FStar_Pprint.op_Hat_Hat uu___1 uu___2 in
        FStar_Pprint.group uu___
    | FStar_Parser_AST.PatVar (lid, aqual, attrs) ->
        let uu___ = FStar_Pprint.optional p_aqual aqual in
        let uu___1 =
          let uu___2 = p_attributes false attrs in
          let uu___3 = p_lident lid in FStar_Pprint.op_Hat_Hat uu___2 uu___3 in
        FStar_Pprint.op_Hat_Hat uu___ uu___1
    | FStar_Parser_AST.PatName uid -> p_quident uid
    | FStar_Parser_AST.PatOr uu___ -> failwith "Inner or pattern !"
    | FStar_Parser_AST.PatApp
        ({ FStar_Parser_AST.pat = FStar_Parser_AST.PatName uu___;
           FStar_Parser_AST.prange = uu___1;_},
         uu___2)
        -> let uu___3 = p_tuplePattern p in soft_parens_with_nesting uu___3
    | FStar_Parser_AST.PatTuple (uu___, false) ->
        let uu___1 = p_tuplePattern p in soft_parens_with_nesting uu___1
    | uu___ ->
        let uu___1 =
          let uu___2 = FStar_Parser_AST.pat_to_string p in
          FStar_Compiler_Util.format1 "Invalid pattern %s" uu___2 in
        failwith uu___1
and (is_typ_tuple : FStar_Parser_AST.term -> Prims.bool) =
  fun e ->
    match e.FStar_Parser_AST.tm with
    | FStar_Parser_AST.Op (id, uu___) when
        let uu___1 = FStar_Ident.string_of_id id in uu___1 = "*" -> true
    | uu___ -> false
and (p_binder :
  Prims.bool -> FStar_Parser_AST.binder -> FStar_Pprint.document) =
  fun is_atomic ->
    fun b ->
      let is_tc = is_tc_binder b in
      let uu___ = p_binder' false (is_atomic && (Prims.op_Negation is_tc)) b in
      match uu___ with
      | (b', t') ->
          let d =
            match t' with
            | FStar_Pervasives_Native.Some (typ, catf1) -> catf1 b' typ
            | FStar_Pervasives_Native.None -> b' in
          if is_tc then tc_arg d else d
and (p_binder' :
  Prims.bool ->
    Prims.bool ->
      FStar_Parser_AST.binder ->
        (FStar_Pprint.document * (FStar_Pprint.document * catf)
          FStar_Pervasives_Native.option))
  =
  fun no_pars ->
    fun is_atomic ->
      fun b ->
        match b.FStar_Parser_AST.b with
        | FStar_Parser_AST.Variable lid ->
            let uu___ =
              let uu___1 =
                FStar_Pprint.optional p_aqual b.FStar_Parser_AST.aqual in
              let uu___2 =
                let uu___3 =
                  p_attributes false b.FStar_Parser_AST.battributes in
                let uu___4 = p_lident lid in
                FStar_Pprint.op_Hat_Hat uu___3 uu___4 in
              FStar_Pprint.op_Hat_Hat uu___1 uu___2 in
            (uu___, FStar_Pervasives_Native.None)
        | FStar_Parser_AST.TVariable lid ->
            let uu___ =
              let uu___1 = p_attributes false b.FStar_Parser_AST.battributes in
              let uu___2 = p_lident lid in
              FStar_Pprint.op_Hat_Hat uu___1 uu___2 in
            (uu___, FStar_Pervasives_Native.None)
        | FStar_Parser_AST.Annotated (lid, t) ->
            let uu___ =
              match t.FStar_Parser_AST.tm with
              | FStar_Parser_AST.Refine
                  ({
                     FStar_Parser_AST.b = FStar_Parser_AST.Annotated
                       (lid', t1);
                     FStar_Parser_AST.brange = uu___1;
                     FStar_Parser_AST.blevel = uu___2;
                     FStar_Parser_AST.aqual = uu___3;
                     FStar_Parser_AST.battributes = uu___4;_},
                   phi)
                  when
                  let uu___5 = FStar_Ident.string_of_id lid in
                  let uu___6 = FStar_Ident.string_of_id lid' in
                  uu___5 = uu___6 ->
                  let uu___5 = p_lident lid in
                  p_refinement' b.FStar_Parser_AST.aqual
                    b.FStar_Parser_AST.battributes uu___5 t1 phi
              | uu___1 ->
                  let t' =
                    let uu___2 = is_typ_tuple t in
                    if uu___2
                    then
                      let uu___3 = p_tmFormula t in
                      soft_parens_with_nesting uu___3
                    else p_tmFormula t in
                  let uu___2 =
                    let uu___3 =
                      FStar_Pprint.optional p_aqual b.FStar_Parser_AST.aqual in
                    let uu___4 =
                      let uu___5 =
                        p_attributes false b.FStar_Parser_AST.battributes in
                      let uu___6 = p_lident lid in
                      FStar_Pprint.op_Hat_Hat uu___5 uu___6 in
                    FStar_Pprint.op_Hat_Hat uu___3 uu___4 in
                  (uu___2, t') in
            (match uu___ with
             | (b', t') ->
                 let catf1 =
                   if
                     is_atomic ||
                       ((is_meta_qualifier b.FStar_Parser_AST.aqual) &&
                          (Prims.op_Negation no_pars))
                   then
                     fun x ->
                       fun y ->
                         let uu___1 =
                           let uu___2 =
                             let uu___3 = cat_with_colon x y in
                             FStar_Pprint.op_Hat_Hat uu___3
                               FStar_Pprint.rparen in
                           FStar_Pprint.op_Hat_Hat FStar_Pprint.lparen uu___2 in
                         FStar_Pprint.group uu___1
                   else
                     (fun x ->
                        fun y ->
                          let uu___2 = cat_with_colon x y in
                          FStar_Pprint.group uu___2) in
                 (b', (FStar_Pervasives_Native.Some (t', catf1))))
        | FStar_Parser_AST.TAnnotated uu___ ->
            failwith "Is this still used ?"
        | FStar_Parser_AST.NoName t ->
            (match t.FStar_Parser_AST.tm with
             | FStar_Parser_AST.Refine
                 ({ FStar_Parser_AST.b = FStar_Parser_AST.NoName t1;
                    FStar_Parser_AST.brange = uu___;
                    FStar_Parser_AST.blevel = uu___1;
                    FStar_Parser_AST.aqual = uu___2;
                    FStar_Parser_AST.battributes = uu___3;_},
                  phi)
                 ->
                 let uu___4 =
                   p_refinement' b.FStar_Parser_AST.aqual
                     b.FStar_Parser_AST.battributes FStar_Pprint.underscore
                     t1 phi in
                 (match uu___4 with
                  | (b', t') ->
                      (b',
                        (FStar_Pervasives_Native.Some (t', cat_with_colon))))
             | uu___ ->
                 let pref =
                   let uu___1 =
                     FStar_Pprint.optional p_aqual b.FStar_Parser_AST.aqual in
                   let uu___2 =
                     p_attributes false b.FStar_Parser_AST.battributes in
                   FStar_Pprint.op_Hat_Hat uu___1 uu___2 in
                 let p_Tm = if is_atomic then p_atomicTerm else p_appTerm in
                 let uu___1 =
                   let uu___2 = p_Tm t in FStar_Pprint.op_Hat_Hat pref uu___2 in
                 (uu___1, FStar_Pervasives_Native.None))
and (p_refinement :
  FStar_Parser_AST.arg_qualifier FStar_Pervasives_Native.option ->
    FStar_Parser_AST.term Prims.list ->
      FStar_Pprint.document ->
        FStar_Parser_AST.term ->
          FStar_Parser_AST.term -> FStar_Pprint.document)
  =
  fun aqual_opt ->
    fun attrs ->
      fun binder ->
        fun t ->
          fun phi ->
            let uu___ = p_refinement' aqual_opt attrs binder t phi in
            match uu___ with | (b, typ) -> cat_with_colon b typ
and (p_refinement' :
  FStar_Parser_AST.arg_qualifier FStar_Pervasives_Native.option ->
    FStar_Parser_AST.term Prims.list ->
      FStar_Pprint.document ->
        FStar_Parser_AST.term ->
          FStar_Parser_AST.term ->
            (FStar_Pprint.document * FStar_Pprint.document))
  =
  fun aqual_opt ->
    fun attrs ->
      fun binder ->
        fun t ->
          fun phi ->
            let is_t_atomic =
              match t.FStar_Parser_AST.tm with
              | FStar_Parser_AST.Construct uu___ -> false
              | FStar_Parser_AST.App uu___ -> false
              | FStar_Parser_AST.Op uu___ -> false
              | uu___ -> true in
            let uu___ = p_noSeqTerm false false phi in
            match uu___ with
            | (comm, phi1) ->
                let phi2 =
                  if comm = FStar_Pprint.empty
                  then phi1
                  else
                    (let uu___2 =
                       FStar_Pprint.op_Hat_Hat FStar_Pprint.hardline phi1 in
                     FStar_Pprint.op_Hat_Hat comm uu___2) in
                let jump_break =
                  if is_t_atomic then Prims.int_zero else Prims.int_one in
                let uu___1 =
                  let uu___2 = FStar_Pprint.optional p_aqual aqual_opt in
                  let uu___3 =
                    let uu___4 = p_attributes false attrs in
                    FStar_Pprint.op_Hat_Hat uu___4 binder in
                  FStar_Pprint.op_Hat_Hat uu___2 uu___3 in
                let uu___2 =
                  let uu___3 = p_appTerm t in
                  let uu___4 =
                    let uu___5 =
                      let uu___6 =
                        let uu___7 = soft_braces_with_nesting_tight phi2 in
                        let uu___8 = soft_braces_with_nesting phi2 in
                        FStar_Pprint.ifflat uu___7 uu___8 in
                      FStar_Pprint.group uu___6 in
                    FStar_Pprint.jump (Prims.of_int (2)) jump_break uu___5 in
                  FStar_Pprint.op_Hat_Hat uu___3 uu___4 in
                (uu___1, uu___2)
and (p_binders_list :
  Prims.bool ->
    FStar_Parser_AST.binder Prims.list -> FStar_Pprint.document Prims.list)
  =
  fun is_atomic -> fun bs -> FStar_Compiler_List.map (p_binder is_atomic) bs
and (p_binders :
  Prims.bool -> FStar_Parser_AST.binder Prims.list -> FStar_Pprint.document)
  =
  fun is_atomic ->
    fun bs ->
      let uu___ = p_binders_list is_atomic bs in
      separate_or_flow break1 uu___
and (p_binders_sep :
  FStar_Parser_AST.binder Prims.list -> FStar_Pprint.document) =
  fun bs ->
    let uu___ = p_binders_list true bs in
    FStar_Pprint.separate_map FStar_Pprint.space (fun x -> x) uu___
and (string_of_id_or_underscore : FStar_Ident.ident -> FStar_Pprint.document)
  =
  fun lid ->
    let uu___ =
      (let uu___1 = FStar_Ident.string_of_id lid in
       FStar_Compiler_Util.starts_with uu___1 FStar_Ident.reserved_prefix) &&
        (let uu___1 = false in
         Prims.op_Negation uu___1) in
    if uu___
    then FStar_Pprint.underscore
    else (let uu___2 = FStar_Ident.string_of_id lid in str uu___2)
and (text_of_lid_or_underscore : FStar_Ident.lident -> FStar_Pprint.document)
  =
  fun lid ->
    let uu___ =
      (let uu___1 =
         let uu___2 = FStar_Ident.ident_of_lid lid in
         FStar_Ident.string_of_id uu___2 in
       FStar_Compiler_Util.starts_with uu___1 FStar_Ident.reserved_prefix) &&
        (let uu___1 = false in
         Prims.op_Negation uu___1) in
    if uu___
    then FStar_Pprint.underscore
    else (let uu___2 = FStar_Ident.string_of_lid lid in str uu___2)
and (p_qlident : FStar_Ident.lid -> FStar_Pprint.document) =
  fun lid -> text_of_lid_or_underscore lid
and (p_quident : FStar_Ident.lid -> FStar_Pprint.document) =
  fun lid -> text_of_lid_or_underscore lid
and (p_ident : FStar_Ident.ident -> FStar_Pprint.document) =
  fun lid -> string_of_id_or_underscore lid
and (p_lident : FStar_Ident.ident -> FStar_Pprint.document) =
  fun lid -> string_of_id_or_underscore lid
and (p_uident : FStar_Ident.ident -> FStar_Pprint.document) =
  fun lid -> string_of_id_or_underscore lid
and (p_tvar : FStar_Ident.ident -> FStar_Pprint.document) =
  fun lid -> string_of_id_or_underscore lid
and (paren_if : Prims.bool -> FStar_Pprint.document -> FStar_Pprint.document)
  = fun b -> if b then soft_parens_with_nesting else (fun x -> x)
and (inline_comment_or_above :
  FStar_Pprint.document ->
    FStar_Pprint.document -> FStar_Pprint.document -> FStar_Pprint.document)
  =
  fun comm ->
    fun doc ->
      fun sep ->
        if comm = FStar_Pprint.empty
        then
          let uu___ = FStar_Pprint.op_Hat_Hat doc sep in
          FStar_Pprint.group uu___
        else
          (let uu___1 =
             let uu___2 =
               let uu___3 =
                 let uu___4 =
                   let uu___5 = FStar_Pprint.op_Hat_Hat break1 comm in
                   FStar_Pprint.op_Hat_Hat sep uu___5 in
                 FStar_Pprint.op_Hat_Hat doc uu___4 in
               FStar_Pprint.group uu___3 in
             let uu___3 =
               let uu___4 =
                 let uu___5 = FStar_Pprint.op_Hat_Hat doc sep in
                 FStar_Pprint.op_Hat_Hat FStar_Pprint.hardline uu___5 in
               FStar_Pprint.op_Hat_Hat comm uu___4 in
             FStar_Pprint.ifflat uu___2 uu___3 in
           FStar_Compiler_Effect.op_Less_Bar FStar_Pprint.group uu___1)
and (p_term :
  Prims.bool -> Prims.bool -> FStar_Parser_AST.term -> FStar_Pprint.document)
  =
  fun ps ->
    fun pb ->
      fun e ->
        match e.FStar_Parser_AST.tm with
        | FStar_Parser_AST.Seq (e1, e2) ->
            let uu___ = p_noSeqTerm true false e1 in
            (match uu___ with
             | (comm, t1) ->
                 let uu___1 =
                   inline_comment_or_above comm t1 FStar_Pprint.semi in
                 let uu___2 =
                   let uu___3 = p_term ps pb e2 in
                   FStar_Pprint.op_Hat_Hat FStar_Pprint.hardline uu___3 in
                 FStar_Pprint.op_Hat_Hat uu___1 uu___2)
        | FStar_Parser_AST.Bind (x, e1, e2) ->
            let uu___ =
              let uu___1 =
                let uu___2 =
                  let uu___3 = p_lident x in
                  let uu___4 =
                    FStar_Pprint.op_Hat_Hat FStar_Pprint.space
                      FStar_Pprint.long_left_arrow in
                  FStar_Pprint.op_Hat_Hat uu___3 uu___4 in
                let uu___3 =
                  let uu___4 = p_noSeqTermAndComment true false e1 in
                  let uu___5 =
                    FStar_Pprint.op_Hat_Hat FStar_Pprint.space
                      FStar_Pprint.semi in
                  FStar_Pprint.op_Hat_Hat uu___4 uu___5 in
                op_Hat_Slash_Plus_Hat uu___2 uu___3 in
              FStar_Pprint.group uu___1 in
            let uu___1 = p_term ps pb e2 in
            FStar_Pprint.op_Hat_Slash_Hat uu___ uu___1
        | uu___ ->
            let uu___1 = p_noSeqTermAndComment ps pb e in
            FStar_Pprint.group uu___1
and (p_term_sep :
  Prims.bool ->
    Prims.bool ->
      FStar_Parser_AST.term ->
        (FStar_Pprint.document * FStar_Pprint.document))
  =
  fun ps ->
    fun pb ->
      fun e ->
        match e.FStar_Parser_AST.tm with
        | FStar_Parser_AST.Seq (e1, e2) ->
            let uu___ = p_noSeqTerm true false e1 in
            (match uu___ with
             | (comm, t1) ->
                 let uu___1 =
                   let uu___2 =
                     let uu___3 =
                       FStar_Pprint.op_Hat_Hat t1 FStar_Pprint.semi in
                     FStar_Pprint.group uu___3 in
                   let uu___3 =
                     let uu___4 = p_term ps pb e2 in
                     FStar_Pprint.op_Hat_Hat FStar_Pprint.hardline uu___4 in
                   FStar_Pprint.op_Hat_Hat uu___2 uu___3 in
                 (comm, uu___1))
        | FStar_Parser_AST.Bind (x, e1, e2) ->
            let uu___ =
              let uu___1 =
                let uu___2 =
                  let uu___3 =
                    let uu___4 = p_lident x in
                    let uu___5 =
                      FStar_Pprint.op_Hat_Hat FStar_Pprint.space
                        FStar_Pprint.long_left_arrow in
                    FStar_Pprint.op_Hat_Hat uu___4 uu___5 in
                  let uu___4 =
                    let uu___5 = p_noSeqTermAndComment true false e1 in
                    let uu___6 =
                      FStar_Pprint.op_Hat_Hat FStar_Pprint.space
                        FStar_Pprint.semi in
                    FStar_Pprint.op_Hat_Hat uu___5 uu___6 in
                  op_Hat_Slash_Plus_Hat uu___3 uu___4 in
                FStar_Pprint.group uu___2 in
              let uu___2 = p_term ps pb e2 in
              FStar_Pprint.op_Hat_Slash_Hat uu___1 uu___2 in
            (FStar_Pprint.empty, uu___)
        | uu___ -> p_noSeqTerm ps pb e
and (p_noSeqTerm :
  Prims.bool ->
    Prims.bool ->
      FStar_Parser_AST.term ->
        (FStar_Pprint.document * FStar_Pprint.document))
  =
  fun ps ->
    fun pb ->
      fun e ->
        with_comment_sep (p_noSeqTerm' ps pb) e e.FStar_Parser_AST.range
and (p_noSeqTermAndComment :
  Prims.bool -> Prims.bool -> FStar_Parser_AST.term -> FStar_Pprint.document)
  =
  fun ps ->
    fun pb ->
      fun e -> with_comment (p_noSeqTerm' ps pb) e e.FStar_Parser_AST.range
and (p_noSeqTerm' :
  Prims.bool -> Prims.bool -> FStar_Parser_AST.term -> FStar_Pprint.document)
  =
  fun ps ->
    fun pb ->
      fun e ->
        match e.FStar_Parser_AST.tm with
        | FStar_Parser_AST.Ascribed
            (e1, t, FStar_Pervasives_Native.None, use_eq) ->
            let uu___ =
              let uu___1 = p_tmIff e1 in
              let uu___2 =
                let uu___3 =
                  let uu___4 = p_typ ps pb t in
                  FStar_Pprint.op_Hat_Slash_Hat FStar_Pprint.colon uu___4 in
                FStar_Pprint.op_Hat_Hat
                  (if use_eq
                   then FStar_Pprint.dollar
                   else FStar_Pprint.langle) uu___3 in
              FStar_Pprint.op_Hat_Slash_Hat uu___1 uu___2 in
            FStar_Pprint.group uu___
        | FStar_Parser_AST.Ascribed
            (e1, t, FStar_Pervasives_Native.Some tac, use_eq) ->
            let uu___ =
              let uu___1 = p_tmIff e1 in
              let uu___2 =
                let uu___3 =
                  let uu___4 =
                    let uu___5 = p_typ false false t in
                    let uu___6 =
                      let uu___7 = str "by" in
                      let uu___8 = p_typ ps pb (maybe_unthunk tac) in
                      FStar_Pprint.op_Hat_Slash_Hat uu___7 uu___8 in
                    FStar_Pprint.op_Hat_Slash_Hat uu___5 uu___6 in
                  FStar_Pprint.op_Hat_Slash_Hat FStar_Pprint.colon uu___4 in
                FStar_Pprint.op_Hat_Hat
                  (if use_eq
                   then FStar_Pprint.dollar
                   else FStar_Pprint.langle) uu___3 in
              FStar_Pprint.op_Hat_Slash_Hat uu___1 uu___2 in
            FStar_Pprint.group uu___
        | FStar_Parser_AST.Op (id, e1::e2::e3::[]) when
            let uu___ = FStar_Ident.string_of_id id in uu___ = ".()<-" ->
            let uu___ =
              let uu___1 =
                let uu___2 =
                  let uu___3 = p_atomicTermNotQUident e1 in
                  let uu___4 =
                    let uu___5 =
                      let uu___6 =
                        let uu___7 = p_term false false e2 in
                        soft_parens_with_nesting uu___7 in
                      let uu___7 =
                        FStar_Pprint.op_Hat_Hat FStar_Pprint.space
                          FStar_Pprint.larrow in
                      FStar_Pprint.op_Hat_Hat uu___6 uu___7 in
                    FStar_Pprint.op_Hat_Hat FStar_Pprint.dot uu___5 in
                  FStar_Pprint.op_Hat_Hat uu___3 uu___4 in
                FStar_Pprint.group uu___2 in
              let uu___2 =
                let uu___3 = p_noSeqTermAndComment ps pb e3 in jump2 uu___3 in
              FStar_Pprint.op_Hat_Hat uu___1 uu___2 in
            FStar_Pprint.group uu___
        | FStar_Parser_AST.Op (id, e1::e2::e3::[]) when
            let uu___ = FStar_Ident.string_of_id id in uu___ = ".[]<-" ->
            let uu___ =
              let uu___1 =
                let uu___2 =
                  let uu___3 = p_atomicTermNotQUident e1 in
                  let uu___4 =
                    let uu___5 =
                      let uu___6 =
                        let uu___7 = p_term false false e2 in
                        soft_brackets_with_nesting uu___7 in
                      let uu___7 =
                        FStar_Pprint.op_Hat_Hat FStar_Pprint.space
                          FStar_Pprint.larrow in
                      FStar_Pprint.op_Hat_Hat uu___6 uu___7 in
                    FStar_Pprint.op_Hat_Hat FStar_Pprint.dot uu___5 in
                  FStar_Pprint.op_Hat_Hat uu___3 uu___4 in
                FStar_Pprint.group uu___2 in
              let uu___2 =
                let uu___3 = p_noSeqTermAndComment ps pb e3 in jump2 uu___3 in
              FStar_Pprint.op_Hat_Hat uu___1 uu___2 in
            FStar_Pprint.group uu___
        | FStar_Parser_AST.Requires (e1, wtf) ->
            let uu___1 =
              let uu___2 = str "requires" in
              let uu___3 = p_typ ps pb e1 in
              FStar_Pprint.op_Hat_Slash_Hat uu___2 uu___3 in
            FStar_Pprint.group uu___1
        | FStar_Parser_AST.Ensures (e1, wtf) ->
            let uu___1 =
              let uu___2 = str "ensures" in
              let uu___3 = p_typ ps pb e1 in
              FStar_Pprint.op_Hat_Slash_Hat uu___2 uu___3 in
            FStar_Pprint.group uu___1
        | FStar_Parser_AST.WFOrder (rel, e1) -> p_dec_wf ps pb rel e1
        | FStar_Parser_AST.LexList l ->
            let uu___ =
              let uu___1 = str "%" in
              let uu___2 = p_term_list ps pb l in
              FStar_Pprint.op_Hat_Hat uu___1 uu___2 in
            FStar_Pprint.group uu___
        | FStar_Parser_AST.Decreases (e1, wtf) ->
            let uu___1 =
              let uu___2 = str "decreases" in
              let uu___3 = p_typ ps pb e1 in
              FStar_Pprint.op_Hat_Slash_Hat uu___2 uu___3 in
            FStar_Pprint.group uu___1
        | FStar_Parser_AST.Attributes es ->
            let uu___ =
              let uu___1 = str "attributes" in
              let uu___2 = FStar_Pprint.separate_map break1 p_atomicTerm es in
              FStar_Pprint.op_Hat_Slash_Hat uu___1 uu___2 in
            FStar_Pprint.group uu___
        | FStar_Parser_AST.If (e1, op_opt, ret_opt, e2, e3) ->
            if is_unit e3
            then
              let uu___ =
                let uu___1 =
                  let uu___2 =
                    let uu___3 =
                      let uu___4 =
                        let uu___5 =
                          let uu___6 =
                            FStar_Compiler_Util.map_opt op_opt
                              FStar_Ident.string_of_id in
                          FStar_Compiler_Util.bind_opt uu___6
                            (FStar_Parser_AST.strip_prefix "let") in
                        FStar_Compiler_Util.dflt "" uu___5 in
                      Prims.op_Hat "if" uu___4 in
                    str uu___3 in
                  let uu___3 = p_noSeqTermAndComment false false e1 in
                  op_Hat_Slash_Plus_Hat uu___2 uu___3 in
                let uu___2 =
                  let uu___3 = str "then" in
                  let uu___4 = p_noSeqTermAndComment ps pb e2 in
                  op_Hat_Slash_Plus_Hat uu___3 uu___4 in
                FStar_Pprint.op_Hat_Slash_Hat uu___1 uu___2 in
              FStar_Pprint.group uu___
            else
              (let e2_doc =
                 match e2.FStar_Parser_AST.tm with
                 | FStar_Parser_AST.If (uu___1, uu___2, uu___3, uu___4, e31)
                     when is_unit e31 ->
                     let uu___5 = p_noSeqTermAndComment false false e2 in
                     soft_parens_with_nesting uu___5
                 | uu___1 -> p_noSeqTermAndComment false false e2 in
               match ret_opt with
               | FStar_Pervasives_Native.None ->
                   let uu___1 =
                     let uu___2 =
                       let uu___3 = str "if" in
                       let uu___4 = p_noSeqTermAndComment false false e1 in
                       op_Hat_Slash_Plus_Hat uu___3 uu___4 in
                     let uu___3 =
                       let uu___4 =
                         let uu___5 = str "then" in
                         op_Hat_Slash_Plus_Hat uu___5 e2_doc in
                       let uu___5 =
                         let uu___6 = str "else" in
                         let uu___7 = p_noSeqTermAndComment ps pb e3 in
                         op_Hat_Slash_Plus_Hat uu___6 uu___7 in
                       FStar_Pprint.op_Hat_Slash_Hat uu___4 uu___5 in
                     FStar_Pprint.op_Hat_Slash_Hat uu___2 uu___3 in
                   FStar_Pprint.group uu___1
               | FStar_Pervasives_Native.Some (as_opt, ret, use_eq) ->
                   let uu___1 =
                     let uu___2 =
                       let uu___3 = str "if" in
                       let uu___4 = p_noSeqTermAndComment false false e1 in
                       op_Hat_Slash_Plus_Hat uu___3 uu___4 in
                     let uu___3 =
                       let uu___4 =
                         let uu___5 =
                           match as_opt with
                           | FStar_Pervasives_Native.None ->
                               FStar_Pprint.empty
                           | FStar_Pervasives_Native.Some as_ident ->
                               let uu___6 = str "as" in
                               let uu___7 = p_ident as_ident in
                               FStar_Pprint.op_Hat_Slash_Hat uu___6 uu___7 in
                         let uu___6 =
                           let uu___7 =
                             str (if use_eq then "returns$" else "returns") in
                           let uu___8 = p_tmIff ret in
                           op_Hat_Slash_Plus_Hat uu___7 uu___8 in
                         FStar_Pprint.op_Hat_Slash_Hat uu___5 uu___6 in
                       let uu___5 =
                         let uu___6 =
                           let uu___7 = str "then" in
                           op_Hat_Slash_Plus_Hat uu___7 e2_doc in
                         let uu___7 =
                           let uu___8 = str "else" in
                           let uu___9 = p_noSeqTermAndComment ps pb e3 in
                           op_Hat_Slash_Plus_Hat uu___8 uu___9 in
                         FStar_Pprint.op_Hat_Slash_Hat uu___6 uu___7 in
                       FStar_Pprint.op_Hat_Slash_Hat uu___4 uu___5 in
                     FStar_Pprint.op_Hat_Slash_Hat uu___2 uu___3 in
                   FStar_Pprint.group uu___1)
        | FStar_Parser_AST.TryWith (e1, branches) ->
            let uu___ =
              let uu___1 =
                let uu___2 =
                  let uu___3 = str "try" in
                  let uu___4 = p_noSeqTermAndComment false false e1 in
                  prefix2 uu___3 uu___4 in
                let uu___3 =
                  let uu___4 = str "with" in
                  let uu___5 =
                    separate_map_last FStar_Pprint.hardline p_patternBranch
                      branches in
                  FStar_Pprint.op_Hat_Slash_Hat uu___4 uu___5 in
                FStar_Pprint.op_Hat_Slash_Hat uu___2 uu___3 in
              FStar_Pprint.group uu___1 in
            let uu___1 = paren_if (ps || pb) in uu___1 uu___
        | FStar_Parser_AST.Match (e1, op_opt, ret_opt, branches) ->
            let match_doc =
              let uu___ =
                let uu___1 =
                  let uu___2 =
                    let uu___3 =
                      FStar_Compiler_Util.map_opt op_opt
                        FStar_Ident.string_of_id in
                    FStar_Compiler_Util.bind_opt uu___3
                      (FStar_Parser_AST.strip_prefix "let") in
                  FStar_Compiler_Util.dflt "" uu___2 in
                Prims.op_Hat "match" uu___1 in
              str uu___ in
            let uu___ =
              let uu___1 =
                match ret_opt with
                | FStar_Pervasives_Native.None ->
                    let uu___2 =
                      let uu___3 = p_noSeqTermAndComment false false e1 in
                      let uu___4 = str "with" in
                      FStar_Pprint.surround (Prims.of_int (2)) Prims.int_one
                        match_doc uu___3 uu___4 in
                    FStar_Pprint.group uu___2
                | FStar_Pervasives_Native.Some (as_opt, ret, use_eq) ->
                    let uu___2 =
                      let uu___3 =
                        let uu___4 = p_noSeqTermAndComment false false e1 in
                        let uu___5 =
                          let uu___6 =
                            match as_opt with
                            | FStar_Pervasives_Native.None ->
                                FStar_Pprint.empty
                            | FStar_Pervasives_Native.Some as_ident ->
                                let uu___7 = str "as" in
                                let uu___8 = p_ident as_ident in
                                op_Hat_Slash_Plus_Hat uu___7 uu___8 in
                          let uu___7 =
                            let uu___8 =
                              str (if use_eq then "returns$" else "returns") in
                            let uu___9 = p_tmIff ret in
                            op_Hat_Slash_Plus_Hat uu___8 uu___9 in
                          op_Hat_Slash_Plus_Hat uu___6 uu___7 in
                        op_Hat_Slash_Plus_Hat uu___4 uu___5 in
                      let uu___4 = str "with" in
                      FStar_Pprint.surround (Prims.of_int (2)) Prims.int_one
                        match_doc uu___3 uu___4 in
                    FStar_Pprint.group uu___2 in
              let uu___2 =
                separate_map_last FStar_Pprint.hardline p_patternBranch
                  branches in
              FStar_Pprint.op_Hat_Slash_Hat uu___1 uu___2 in
            let uu___1 = paren_if (ps || pb) in uu___1 uu___
        | FStar_Parser_AST.LetOpen (uid, e1) ->
            let uu___ =
              let uu___1 =
                let uu___2 =
                  let uu___3 = str "let open" in
                  let uu___4 = p_quident uid in
                  let uu___5 = str "in" in
                  FStar_Pprint.surround (Prims.of_int (2)) Prims.int_one
                    uu___3 uu___4 uu___5 in
                let uu___3 = p_term false pb e1 in
                FStar_Pprint.op_Hat_Slash_Hat uu___2 uu___3 in
              FStar_Pprint.group uu___1 in
            let uu___1 = paren_if ps in uu___1 uu___
        | FStar_Parser_AST.LetOpenRecord (r, rty, e1) ->
            let uu___ =
              let uu___1 =
                let uu___2 =
                  let uu___3 = str "let open" in
                  let uu___4 = p_term false pb r in
                  let uu___5 = str "as" in
                  FStar_Pprint.surround (Prims.of_int (2)) Prims.int_one
                    uu___3 uu___4 uu___5 in
                let uu___3 =
                  let uu___4 = p_term false pb rty in
                  let uu___5 =
                    let uu___6 = str "in" in
                    let uu___7 = p_term false pb e1 in
                    FStar_Pprint.op_Hat_Slash_Hat uu___6 uu___7 in
                  FStar_Pprint.op_Hat_Slash_Hat uu___4 uu___5 in
                FStar_Pprint.op_Hat_Slash_Hat uu___2 uu___3 in
              FStar_Pprint.group uu___1 in
            let uu___1 = paren_if ps in uu___1 uu___
        | FStar_Parser_AST.LetOperator (lets, body) ->
            let p_let uu___ is_last =
              match uu___ with
              | (id, pat, e1) ->
                  let doc_let_or_and =
                    let uu___1 = FStar_Ident.string_of_id id in str uu___1 in
                  let doc_pat = p_letlhs doc_let_or_and (pat, e1) true in
                  (match ((pat.FStar_Parser_AST.pat),
                           (e1.FStar_Parser_AST.tm))
                   with
                   | (FStar_Parser_AST.PatVar (pid, uu___1, uu___2),
                      FStar_Parser_AST.Name tid) when
                       let uu___3 = FStar_Ident.string_of_id pid in
                       let uu___4 =
                         let uu___5 = FStar_Ident.path_of_lid tid in
                         FStar_Compiler_List.last uu___5 in
                       uu___3 = uu___4 ->
                       let uu___3 =
                         if is_last then str "in" else FStar_Pprint.empty in
                       FStar_Pprint.op_Hat_Slash_Hat doc_pat uu___3
                   | (FStar_Parser_AST.PatVar (pid, uu___1, uu___2),
                      FStar_Parser_AST.Var tid) when
                       let uu___3 = FStar_Ident.string_of_id pid in
                       let uu___4 =
                         let uu___5 = FStar_Ident.path_of_lid tid in
                         FStar_Compiler_List.last uu___5 in
                       uu___3 = uu___4 ->
                       let uu___3 =
                         if is_last then str "in" else FStar_Pprint.empty in
                       FStar_Pprint.op_Hat_Slash_Hat doc_pat uu___3
                   | uu___1 ->
                       let uu___2 = p_term_sep false false e1 in
                       (match uu___2 with
                        | (comm, doc_expr) ->
                            let doc_expr1 =
                              inline_comment_or_above comm doc_expr
                                FStar_Pprint.empty in
                            if is_last
                            then
                              let uu___3 =
                                FStar_Pprint.flow break1
                                  [doc_pat; FStar_Pprint.equals] in
                              let uu___4 = str "in" in
                              FStar_Pprint.surround (Prims.of_int (2))
                                Prims.int_one uu___3 doc_expr1 uu___4
                            else
                              (let uu___4 =
                                 FStar_Pprint.flow break1
                                   [doc_pat; FStar_Pprint.equals; doc_expr1] in
                               FStar_Pprint.hang (Prims.of_int (2)) uu___4))) in
            let l = FStar_Compiler_List.length lets in
            let lets_docs =
              FStar_Compiler_List.mapi
                (fun i ->
                   fun lb ->
                     let uu___ = p_let lb (i = (l - Prims.int_one)) in
                     FStar_Pprint.group uu___) lets in
            let lets_doc =
              let uu___ = FStar_Pprint.separate break1 lets_docs in
              FStar_Pprint.group uu___ in
            let r =
              let uu___ =
                let uu___1 =
                  let uu___2 =
                    let uu___3 = p_term false pb body in
                    FStar_Pprint.op_Hat_Hat FStar_Pprint.hardline uu___3 in
                  FStar_Pprint.op_Hat_Hat lets_doc uu___2 in
                FStar_Pprint.group uu___1 in
              let uu___1 = paren_if ps in uu___1 uu___ in
            r
        | FStar_Parser_AST.Let (q, lbs, e1) ->
            let p_lb q1 uu___ is_last =
              match uu___ with
              | (a, (pat, e2)) ->
                  let attrs = p_attrs_opt true a in
                  let doc_let_or_and =
                    match q1 with
                    | FStar_Pervasives_Native.Some (FStar_Parser_AST.Rec) ->
                        let uu___1 =
                          let uu___2 = str "let" in
                          let uu___3 = str "rec" in
                          FStar_Pprint.op_Hat_Slash_Hat uu___2 uu___3 in
                        FStar_Pprint.group uu___1
                    | FStar_Pervasives_Native.Some
                        (FStar_Parser_AST.NoLetQualifier) -> str "let"
                    | uu___1 -> str "and" in
                  let doc_pat = p_letlhs doc_let_or_and (pat, e2) true in
                  let uu___1 = p_term_sep false false e2 in
                  (match uu___1 with
                   | (comm, doc_expr) ->
                       let doc_expr1 =
                         inline_comment_or_above comm doc_expr
                           FStar_Pprint.empty in
                       let uu___2 =
                         if is_last
                         then
                           let uu___3 =
                             FStar_Pprint.flow break1
                               [doc_pat; FStar_Pprint.equals] in
                           let uu___4 = str "in" in
                           FStar_Pprint.surround (Prims.of_int (2))
                             Prims.int_one uu___3 doc_expr1 uu___4
                         else
                           (let uu___4 =
                              FStar_Pprint.flow break1
                                [doc_pat; FStar_Pprint.equals; doc_expr1] in
                            FStar_Pprint.hang (Prims.of_int (2)) uu___4) in
                       FStar_Pprint.op_Hat_Hat attrs uu___2) in
            let l = FStar_Compiler_List.length lbs in
            let lbs_docs =
              FStar_Compiler_List.mapi
                (fun i ->
                   fun lb ->
                     if i = Prims.int_zero
                     then
                       let uu___ =
                         p_lb (FStar_Pervasives_Native.Some q) lb
                           (i = (l - Prims.int_one)) in
                       FStar_Pprint.group uu___
                     else
                       (let uu___1 =
                          p_lb FStar_Pervasives_Native.None lb
                            (i = (l - Prims.int_one)) in
                        FStar_Pprint.group uu___1)) lbs in
            let lbs_doc =
              let uu___ = FStar_Pprint.separate break1 lbs_docs in
              FStar_Pprint.group uu___ in
            let uu___ =
              let uu___1 =
                let uu___2 =
                  let uu___3 = p_term false pb e1 in
                  FStar_Pprint.op_Hat_Hat FStar_Pprint.hardline uu___3 in
                FStar_Pprint.op_Hat_Hat lbs_doc uu___2 in
              FStar_Pprint.group uu___1 in
            let uu___1 = paren_if ps in uu___1 uu___
        | FStar_Parser_AST.Abs
            ({
               FStar_Parser_AST.pat = FStar_Parser_AST.PatVar
                 (x, typ_opt, uu___);
               FStar_Parser_AST.prange = uu___1;_}::[],
             {
               FStar_Parser_AST.tm = FStar_Parser_AST.Match
                 (maybe_x, FStar_Pervasives_Native.None,
                  FStar_Pervasives_Native.None, branches);
               FStar_Parser_AST.range = uu___2;
               FStar_Parser_AST.level = uu___3;_})
            when matches_var maybe_x x ->
            let uu___4 =
              let uu___5 =
                let uu___6 = str "function" in
                let uu___7 =
                  separate_map_last FStar_Pprint.hardline p_patternBranch
                    branches in
                FStar_Pprint.op_Hat_Slash_Hat uu___6 uu___7 in
              FStar_Pprint.group uu___5 in
            let uu___5 = paren_if (ps || pb) in uu___5 uu___4
        | FStar_Parser_AST.Quote (e1, FStar_Parser_AST.Dynamic) ->
            let uu___ =
              let uu___1 = str "quote" in
              let uu___2 = p_noSeqTermAndComment ps pb e1 in
              FStar_Pprint.op_Hat_Slash_Hat uu___1 uu___2 in
            FStar_Pprint.group uu___
        | FStar_Parser_AST.Quote (e1, FStar_Parser_AST.Static) ->
            let uu___ =
              let uu___1 = str "`" in
              let uu___2 = p_noSeqTermAndComment ps pb e1 in
              FStar_Pprint.op_Hat_Hat uu___1 uu___2 in
            FStar_Pprint.group uu___
        | FStar_Parser_AST.VQuote e1 ->
            let uu___ =
              let uu___1 = str "`%" in
              let uu___2 = p_noSeqTermAndComment ps pb e1 in
              FStar_Pprint.op_Hat_Hat uu___1 uu___2 in
            FStar_Pprint.group uu___
        | FStar_Parser_AST.Antiquote
            {
              FStar_Parser_AST.tm = FStar_Parser_AST.Quote
                (e1, FStar_Parser_AST.Dynamic);
              FStar_Parser_AST.range = uu___;
              FStar_Parser_AST.level = uu___1;_}
            ->
            let uu___2 =
              let uu___3 = str "`@" in
              let uu___4 = p_noSeqTermAndComment ps pb e1 in
              FStar_Pprint.op_Hat_Hat uu___3 uu___4 in
            FStar_Pprint.group uu___2
        | FStar_Parser_AST.Antiquote e1 ->
            let uu___ =
              let uu___1 = str "`#" in
              let uu___2 = p_noSeqTermAndComment ps pb e1 in
              FStar_Pprint.op_Hat_Hat uu___1 uu___2 in
            FStar_Pprint.group uu___
        | FStar_Parser_AST.CalcProof (rel, init, steps) ->
            let head =
              let uu___ = str "calc" in
              let uu___1 =
                let uu___2 =
                  let uu___3 = p_noSeqTermAndComment false false rel in
                  let uu___4 =
                    FStar_Pprint.op_Hat_Hat FStar_Pprint.space
                      FStar_Pprint.lbrace in
                  FStar_Pprint.op_Hat_Hat uu___3 uu___4 in
                FStar_Pprint.op_Hat_Hat FStar_Pprint.space uu___2 in
              FStar_Pprint.op_Hat_Hat uu___ uu___1 in
            let bot = FStar_Pprint.rbrace in
            let uu___ = FStar_Pprint.op_Hat_Hat FStar_Pprint.hardline bot in
            let uu___1 =
              let uu___2 =
                let uu___3 =
                  let uu___4 = p_noSeqTermAndComment false false init in
                  let uu___5 =
                    let uu___6 = str ";" in
                    let uu___7 =
                      let uu___8 =
                        separate_map_last FStar_Pprint.hardline p_calcStep
                          steps in
                      FStar_Pprint.op_Hat_Hat FStar_Pprint.hardline uu___8 in
                    FStar_Pprint.op_Hat_Hat uu___6 uu___7 in
                  FStar_Pprint.op_Hat_Hat uu___4 uu___5 in
                FStar_Pprint.op_Hat_Hat FStar_Pprint.hardline uu___3 in
              FStar_Compiler_Effect.op_Less_Bar
                (FStar_Pprint.nest (Prims.of_int (2))) uu___2 in
            FStar_Pprint.enclose head uu___ uu___1
        | FStar_Parser_AST.IntroForall (xs, p, e1) ->
            let p1 = p_noSeqTermAndComment false false p in
            let e2 = p_noSeqTermAndComment false false e1 in
            let xs1 = p_binders_sep xs in
            let uu___ = str "introduce forall" in
            let uu___1 =
              let uu___2 =
                let uu___3 =
                  let uu___4 =
                    let uu___5 = str "." in
                    let uu___6 =
                      let uu___7 =
                        let uu___8 =
                          let uu___9 =
                            let uu___10 = str "with" in
                            let uu___11 =
                              FStar_Pprint.op_Hat_Hat FStar_Pprint.space e2 in
                            FStar_Pprint.op_Hat_Hat uu___10 uu___11 in
                          FStar_Pprint.op_Hat_Hat FStar_Pprint.hardline
                            uu___9 in
                        FStar_Pprint.op_Hat_Hat p1 uu___8 in
                      FStar_Pprint.op_Hat_Hat FStar_Pprint.space uu___7 in
                    FStar_Pprint.op_Hat_Hat uu___5 uu___6 in
                  FStar_Pprint.op_Hat_Hat FStar_Pprint.space uu___4 in
                FStar_Pprint.op_Hat_Hat xs1 uu___3 in
              FStar_Pprint.op_Hat_Hat FStar_Pprint.space uu___2 in
            FStar_Pprint.op_Hat_Hat uu___ uu___1
        | FStar_Parser_AST.IntroExists (xs, p, vs, e1) ->
            let p1 = p_noSeqTermAndComment false false p in
            let e2 = p_noSeqTermAndComment false false e1 in
            let xs1 = p_binders_sep xs in
            let uu___ = str "introduce" in
            let uu___1 =
              let uu___2 =
                let uu___3 = str "exists" in
                let uu___4 =
                  let uu___5 =
                    let uu___6 =
                      let uu___7 = str "." in
                      let uu___8 =
                        let uu___9 =
                          let uu___10 =
                            let uu___11 = str "with" in
                            let uu___12 =
                              let uu___13 =
                                let uu___14 =
                                  FStar_Pprint.separate_map
                                    FStar_Pprint.space p_atomicTerm vs in
                                let uu___15 =
                                  let uu___16 =
                                    let uu___17 = str "and" in
                                    let uu___18 =
                                      FStar_Pprint.op_Hat_Hat
                                        FStar_Pprint.space e2 in
                                    FStar_Pprint.op_Hat_Hat uu___17 uu___18 in
                                  FStar_Pprint.op_Hat_Hat
                                    FStar_Pprint.hardline uu___16 in
                                FStar_Pprint.op_Hat_Hat uu___14 uu___15 in
                              FStar_Pprint.op_Hat_Hat FStar_Pprint.space
                                uu___13 in
                            FStar_Pprint.op_Hat_Hat uu___11 uu___12 in
                          FStar_Pprint.op_Hat_Hat FStar_Pprint.hardline
                            uu___10 in
                        FStar_Pprint.op_Hat_Hat p1 uu___9 in
                      FStar_Pprint.op_Hat_Hat uu___7 uu___8 in
                    FStar_Pprint.op_Hat_Hat xs1 uu___6 in
                  FStar_Pprint.op_Hat_Hat FStar_Pprint.space uu___5 in
                FStar_Pprint.op_Hat_Hat uu___3 uu___4 in
              FStar_Pprint.op_Hat_Hat FStar_Pprint.space uu___2 in
            FStar_Pprint.op_Hat_Hat uu___ uu___1
        | FStar_Parser_AST.IntroImplies (p, q, x, e1) ->
            let p1 = p_tmFormula p in
            let q1 = p_tmFormula q in
            let e2 = p_noSeqTermAndComment false false e1 in
            let x1 = p_binders_sep [x] in
            let uu___ = str "introduce" in
            let uu___1 =
              let uu___2 =
                let uu___3 =
                  let uu___4 =
                    let uu___5 = str "==>" in
                    let uu___6 =
                      let uu___7 =
                        let uu___8 =
                          let uu___9 =
                            let uu___10 = str "with" in
                            let uu___11 =
                              let uu___12 =
                                let uu___13 =
                                  let uu___14 = str "." in
                                  let uu___15 =
                                    FStar_Pprint.op_Hat_Hat
                                      FStar_Pprint.space e2 in
                                  FStar_Pprint.op_Hat_Hat uu___14 uu___15 in
                                FStar_Pprint.op_Hat_Hat x1 uu___13 in
                              FStar_Pprint.op_Hat_Hat FStar_Pprint.space
                                uu___12 in
                            FStar_Pprint.op_Hat_Hat uu___10 uu___11 in
                          FStar_Pprint.op_Hat_Hat FStar_Pprint.hardline
                            uu___9 in
                        FStar_Pprint.op_Hat_Hat q1 uu___8 in
                      FStar_Pprint.op_Hat_Hat FStar_Pprint.space uu___7 in
                    FStar_Pprint.op_Hat_Hat uu___5 uu___6 in
                  FStar_Pprint.op_Hat_Hat FStar_Pprint.space uu___4 in
                FStar_Pprint.op_Hat_Hat p1 uu___3 in
              FStar_Pprint.op_Hat_Hat FStar_Pprint.space uu___2 in
            FStar_Pprint.op_Hat_Hat uu___ uu___1
        | FStar_Parser_AST.IntroOr (b, p, q, e1) ->
            let p1 = p_tmFormula p in
            let q1 = p_tmFormula q in
            let e2 = p_noSeqTermAndComment false false e1 in
            let uu___ = str "introduce" in
            let uu___1 =
              let uu___2 =
                let uu___3 =
                  let uu___4 =
                    let uu___5 = str "\\/" in
                    let uu___6 =
                      let uu___7 =
                        let uu___8 =
                          let uu___9 =
                            let uu___10 = str "with" in
                            let uu___11 =
                              let uu___12 =
                                let uu___13 =
                                  if b then str "Left" else str "Right" in
                                let uu___14 =
                                  FStar_Pprint.op_Hat_Hat FStar_Pprint.space
                                    e2 in
                                FStar_Pprint.op_Hat_Hat uu___13 uu___14 in
                              FStar_Pprint.op_Hat_Hat FStar_Pprint.space
                                uu___12 in
                            FStar_Pprint.op_Hat_Hat uu___10 uu___11 in
                          FStar_Pprint.op_Hat_Hat FStar_Pprint.hardline
                            uu___9 in
                        FStar_Pprint.op_Hat_Hat q1 uu___8 in
                      FStar_Pprint.op_Hat_Hat FStar_Pprint.space uu___7 in
                    FStar_Pprint.op_Hat_Hat uu___5 uu___6 in
                  FStar_Pprint.op_Hat_Hat FStar_Pprint.space uu___4 in
                FStar_Pprint.op_Hat_Hat p1 uu___3 in
              FStar_Pprint.op_Hat_Hat FStar_Pprint.space uu___2 in
            FStar_Pprint.op_Hat_Hat uu___ uu___1
        | FStar_Parser_AST.IntroAnd (p, q, e1, e2) ->
            let p1 = p_tmFormula p in
            let q1 = p_tmTuple q in
            let e11 = p_noSeqTermAndComment false false e1 in
            let e21 = p_noSeqTermAndComment false false e2 in
            let uu___ = str "introduce" in
            let uu___1 =
              let uu___2 =
                let uu___3 =
                  let uu___4 =
                    let uu___5 = str "/\\" in
                    let uu___6 =
                      let uu___7 =
                        let uu___8 =
                          let uu___9 =
                            let uu___10 = str "with" in
                            let uu___11 =
                              let uu___12 =
                                let uu___13 =
                                  let uu___14 =
                                    let uu___15 = str "and" in
                                    let uu___16 =
                                      FStar_Pprint.op_Hat_Hat
                                        FStar_Pprint.space e21 in
                                    FStar_Pprint.op_Hat_Hat uu___15 uu___16 in
                                  FStar_Pprint.op_Hat_Hat
                                    FStar_Pprint.hardline uu___14 in
                                FStar_Pprint.op_Hat_Hat e11 uu___13 in
                              FStar_Pprint.op_Hat_Hat FStar_Pprint.space
                                uu___12 in
                            FStar_Pprint.op_Hat_Hat uu___10 uu___11 in
                          FStar_Pprint.op_Hat_Hat FStar_Pprint.hardline
                            uu___9 in
                        FStar_Pprint.op_Hat_Hat q1 uu___8 in
                      FStar_Pprint.op_Hat_Hat FStar_Pprint.space uu___7 in
                    FStar_Pprint.op_Hat_Hat uu___5 uu___6 in
                  FStar_Pprint.op_Hat_Hat FStar_Pprint.space uu___4 in
                FStar_Pprint.op_Hat_Hat p1 uu___3 in
              FStar_Pprint.op_Hat_Hat FStar_Pprint.space uu___2 in
            FStar_Pprint.op_Hat_Hat uu___ uu___1
        | FStar_Parser_AST.ElimForall (xs, p, vs) ->
            let xs1 = p_binders_sep xs in
            let p1 = p_noSeqTermAndComment false false p in
            let vs1 =
              FStar_Pprint.separate_map FStar_Pprint.space p_atomicTerm vs in
            let uu___ = str "eliminate" in
            let uu___1 =
              let uu___2 =
                let uu___3 = str "forall" in
                let uu___4 =
                  let uu___5 =
                    let uu___6 =
                      let uu___7 = str "." in
                      let uu___8 =
                        let uu___9 =
                          let uu___10 =
                            let uu___11 =
                              let uu___12 = str "with" in
                              let uu___13 =
                                FStar_Pprint.op_Hat_Hat FStar_Pprint.space
                                  vs1 in
                              FStar_Pprint.op_Hat_Hat uu___12 uu___13 in
                            FStar_Pprint.op_Hat_Hat FStar_Pprint.hardline
                              uu___11 in
                          FStar_Pprint.op_Hat_Hat p1 uu___10 in
                        FStar_Pprint.op_Hat_Hat FStar_Pprint.space uu___9 in
                      FStar_Pprint.op_Hat_Hat uu___7 uu___8 in
                    FStar_Pprint.op_Hat_Hat xs1 uu___6 in
                  FStar_Pprint.op_Hat_Hat FStar_Pprint.space uu___5 in
                FStar_Pprint.op_Hat_Hat uu___3 uu___4 in
              FStar_Pprint.op_Hat_Hat FStar_Pprint.space uu___2 in
            FStar_Pprint.op_Hat_Hat uu___ uu___1
        | FStar_Parser_AST.ElimExists (bs, p, q, b, e1) ->
            let head =
              let uu___ = str "eliminate exists" in
              let uu___1 =
                let uu___2 =
                  let uu___3 = p_binders_sep bs in
                  let uu___4 = str "." in
                  FStar_Pprint.op_Hat_Hat uu___3 uu___4 in
                FStar_Pprint.op_Hat_Hat FStar_Pprint.space uu___2 in
              FStar_Pprint.op_Hat_Hat uu___ uu___1 in
            let p1 = p_noSeqTermAndComment false false p in
            let q1 = p_noSeqTermAndComment false false q in
            let e2 = p_noSeqTermAndComment false false e1 in
            let uu___ =
              let uu___1 =
                let uu___2 =
                  let uu___3 =
                    let uu___4 = str "returns" in
                    let uu___5 =
                      let uu___6 =
                        let uu___7 =
                          let uu___8 =
                            let uu___9 = str "with" in
                            let uu___10 =
                              let uu___11 =
                                let uu___12 = p_binders_sep [b] in
                                let uu___13 =
                                  let uu___14 = str "." in
                                  let uu___15 =
                                    FStar_Pprint.op_Hat_Hat
                                      FStar_Pprint.hardline e2 in
                                  FStar_Pprint.op_Hat_Hat uu___14 uu___15 in
                                FStar_Pprint.op_Hat_Hat uu___12 uu___13 in
                              FStar_Pprint.op_Hat_Hat FStar_Pprint.space
                                uu___11 in
                            FStar_Pprint.op_Hat_Hat uu___9 uu___10 in
                          FStar_Pprint.op_Hat_Hat FStar_Pprint.hardline
                            uu___8 in
                        FStar_Pprint.op_Hat_Hat q1 uu___7 in
                      FStar_Pprint.op_Hat_Hat FStar_Pprint.space uu___6 in
                    FStar_Pprint.op_Hat_Hat uu___4 uu___5 in
                  FStar_Pprint.op_Hat_Hat FStar_Pprint.hardline uu___3 in
                FStar_Pprint.op_Hat_Hat p1 uu___2 in
              FStar_Pprint.op_Hat_Hat FStar_Pprint.hardline uu___1 in
            FStar_Pprint.op_Hat_Hat head uu___
        | FStar_Parser_AST.ElimImplies (p, q, e1) ->
            let p1 = p_tmFormula p in
            let q1 = p_tmFormula q in
            let e2 = p_noSeqTermAndComment false false e1 in
            let uu___ = str "eliminate" in
            let uu___1 =
              let uu___2 =
                let uu___3 =
                  let uu___4 =
                    let uu___5 = str "==>" in
                    let uu___6 =
                      let uu___7 =
                        let uu___8 =
                          let uu___9 =
                            let uu___10 = str "with" in
                            let uu___11 =
                              FStar_Pprint.op_Hat_Hat FStar_Pprint.space e2 in
                            FStar_Pprint.op_Hat_Hat uu___10 uu___11 in
                          FStar_Pprint.op_Hat_Hat FStar_Pprint.hardline
                            uu___9 in
                        FStar_Pprint.op_Hat_Hat q1 uu___8 in
                      FStar_Pprint.op_Hat_Hat FStar_Pprint.space uu___7 in
                    FStar_Pprint.op_Hat_Hat uu___5 uu___6 in
                  FStar_Pprint.op_Hat_Hat FStar_Pprint.space uu___4 in
                FStar_Pprint.op_Hat_Hat p1 uu___3 in
              FStar_Pprint.op_Hat_Hat FStar_Pprint.space uu___2 in
            FStar_Pprint.op_Hat_Hat uu___ uu___1
        | FStar_Parser_AST.ElimOr (p, q, r, x, e1, y, e2) ->
            let p1 = p_tmFormula p in
            let q1 = p_tmFormula q in
            let r1 = p_noSeqTermAndComment false false r in
            let x1 = p_binders_sep [x] in
            let e11 = p_noSeqTermAndComment false false e1 in
            let y1 = p_binders_sep [y] in
            let e21 = p_noSeqTermAndComment false false e2 in
            let uu___ = str "eliminate" in
            let uu___1 =
              let uu___2 =
                let uu___3 =
                  let uu___4 =
                    let uu___5 = str "\\/" in
                    let uu___6 =
                      let uu___7 =
                        let uu___8 =
                          let uu___9 =
                            let uu___10 = str "returns" in
                            let uu___11 =
                              let uu___12 =
                                let uu___13 =
                                  let uu___14 =
                                    let uu___15 = str "with" in
                                    let uu___16 =
                                      let uu___17 =
                                        let uu___18 =
                                          let uu___19 =
                                            let uu___20 = str "." in
                                            let uu___21 =
                                              let uu___22 =
                                                let uu___23 =
                                                  let uu___24 =
                                                    let uu___25 = str "and" in
                                                    let uu___26 =
                                                      let uu___27 =
                                                        let uu___28 =
                                                          let uu___29 =
                                                            let uu___30 =
                                                              str "." in
                                                            let uu___31 =
                                                              FStar_Pprint.op_Hat_Hat
                                                                FStar_Pprint.space
                                                                e21 in
                                                            FStar_Pprint.op_Hat_Hat
                                                              uu___30 uu___31 in
                                                          FStar_Pprint.op_Hat_Hat
                                                            FStar_Pprint.space
                                                            uu___29 in
                                                        FStar_Pprint.op_Hat_Hat
                                                          y1 uu___28 in
                                                      FStar_Pprint.op_Hat_Hat
                                                        FStar_Pprint.space
                                                        uu___27 in
                                                    FStar_Pprint.op_Hat_Hat
                                                      uu___25 uu___26 in
                                                  FStar_Pprint.op_Hat_Hat
                                                    FStar_Pprint.hardline
                                                    uu___24 in
                                                FStar_Pprint.op_Hat_Hat e11
                                                  uu___23 in
                                              FStar_Pprint.op_Hat_Hat
                                                FStar_Pprint.space uu___22 in
                                            FStar_Pprint.op_Hat_Hat uu___20
                                              uu___21 in
                                          FStar_Pprint.op_Hat_Hat
                                            FStar_Pprint.space uu___19 in
                                        FStar_Pprint.op_Hat_Hat x1 uu___18 in
                                      FStar_Pprint.op_Hat_Hat
                                        FStar_Pprint.space uu___17 in
                                    FStar_Pprint.op_Hat_Hat uu___15 uu___16 in
                                  FStar_Pprint.op_Hat_Hat
                                    FStar_Pprint.hardline uu___14 in
                                FStar_Pprint.op_Hat_Hat r1 uu___13 in
                              FStar_Pprint.op_Hat_Hat FStar_Pprint.space
                                uu___12 in
                            FStar_Pprint.op_Hat_Hat uu___10 uu___11 in
                          FStar_Pprint.op_Hat_Hat FStar_Pprint.hardline
                            uu___9 in
                        FStar_Pprint.op_Hat_Hat q1 uu___8 in
                      FStar_Pprint.op_Hat_Hat FStar_Pprint.space uu___7 in
                    FStar_Pprint.op_Hat_Hat uu___5 uu___6 in
                  FStar_Pprint.op_Hat_Hat FStar_Pprint.space uu___4 in
                FStar_Pprint.op_Hat_Hat p1 uu___3 in
              FStar_Pprint.op_Hat_Hat FStar_Pprint.space uu___2 in
            FStar_Pprint.op_Hat_Hat uu___ uu___1
        | FStar_Parser_AST.ElimAnd (p, q, r, x, y, e1) ->
            let p1 = p_tmFormula p in
            let q1 = p_tmTuple q in
            let r1 = p_noSeqTermAndComment false false r in
            let xy = p_binders_sep [x; y] in
            let e2 = p_noSeqTermAndComment false false e1 in
            let uu___ = str "eliminate" in
            let uu___1 =
              let uu___2 =
                let uu___3 =
                  let uu___4 =
                    let uu___5 = str "/\\" in
                    let uu___6 =
                      let uu___7 =
                        let uu___8 =
                          let uu___9 =
                            let uu___10 = str "returns" in
                            let uu___11 =
                              let uu___12 =
                                let uu___13 =
                                  let uu___14 =
                                    let uu___15 = str "with" in
                                    let uu___16 =
                                      let uu___17 =
                                        let uu___18 =
                                          let uu___19 =
                                            let uu___20 = str "." in
                                            let uu___21 =
                                              FStar_Pprint.op_Hat_Hat
                                                FStar_Pprint.space e2 in
                                            FStar_Pprint.op_Hat_Hat uu___20
                                              uu___21 in
                                          FStar_Pprint.op_Hat_Hat
                                            FStar_Pprint.space uu___19 in
                                        FStar_Pprint.op_Hat_Hat xy uu___18 in
                                      FStar_Pprint.op_Hat_Hat
                                        FStar_Pprint.space uu___17 in
                                    FStar_Pprint.op_Hat_Hat uu___15 uu___16 in
                                  FStar_Pprint.op_Hat_Hat
                                    FStar_Pprint.hardline uu___14 in
                                FStar_Pprint.op_Hat_Hat r1 uu___13 in
                              FStar_Pprint.op_Hat_Hat FStar_Pprint.space
                                uu___12 in
                            FStar_Pprint.op_Hat_Hat uu___10 uu___11 in
                          FStar_Pprint.op_Hat_Hat FStar_Pprint.hardline
                            uu___9 in
                        FStar_Pprint.op_Hat_Hat q1 uu___8 in
                      FStar_Pprint.op_Hat_Hat FStar_Pprint.space uu___7 in
                    FStar_Pprint.op_Hat_Hat uu___5 uu___6 in
                  FStar_Pprint.op_Hat_Hat FStar_Pprint.space uu___4 in
                FStar_Pprint.op_Hat_Hat p1 uu___3 in
              FStar_Pprint.op_Hat_Hat FStar_Pprint.space uu___2 in
            FStar_Pprint.op_Hat_Hat uu___ uu___1
        | uu___ -> p_typ ps pb e
and (p_dec_wf :
  Prims.bool ->
    Prims.bool ->
      FStar_Parser_AST.term -> FStar_Parser_AST.term -> FStar_Pprint.document)
  =
  fun ps ->
    fun pb ->
      fun rel ->
        fun e ->
          let uu___ =
            let uu___1 = str "{:well-founded " in
            let uu___2 =
              let uu___3 = p_typ ps pb rel in
              let uu___4 =
                let uu___5 = p_typ ps pb e in
                let uu___6 = str " }" in
                FStar_Pprint.op_Hat_Hat uu___5 uu___6 in
              FStar_Pprint.op_Hat_Slash_Hat uu___3 uu___4 in
            FStar_Pprint.op_Hat_Hat uu___1 uu___2 in
          FStar_Pprint.group uu___
and (p_calcStep :
  Prims.bool -> FStar_Parser_AST.calc_step -> FStar_Pprint.document) =
  fun uu___ ->
    fun uu___1 ->
      match uu___1 with
      | FStar_Parser_AST.CalcStep (rel, just, next) ->
          let uu___2 =
            let uu___3 = p_noSeqTermAndComment false false rel in
            let uu___4 =
              let uu___5 =
                let uu___6 =
                  let uu___7 =
                    let uu___8 = p_noSeqTermAndComment false false just in
                    let uu___9 =
                      let uu___10 =
                        let uu___11 =
                          let uu___12 =
                            let uu___13 =
                              p_noSeqTermAndComment false false next in
                            let uu___14 = str ";" in
                            FStar_Pprint.op_Hat_Hat uu___13 uu___14 in
                          FStar_Pprint.op_Hat_Hat FStar_Pprint.hardline
                            uu___12 in
                        FStar_Pprint.op_Hat_Hat FStar_Pprint.rbrace uu___11 in
                      FStar_Pprint.op_Hat_Hat FStar_Pprint.space uu___10 in
                    FStar_Pprint.op_Hat_Hat uu___8 uu___9 in
                  FStar_Pprint.op_Hat_Hat FStar_Pprint.space uu___7 in
                FStar_Pprint.op_Hat_Hat FStar_Pprint.lbrace uu___6 in
              FStar_Pprint.op_Hat_Hat FStar_Pprint.space uu___5 in
            FStar_Pprint.op_Hat_Hat uu___3 uu___4 in
          FStar_Pprint.group uu___2
and (p_attrs_opt :
  Prims.bool ->
    FStar_Parser_AST.term Prims.list FStar_Pervasives_Native.option ->
      FStar_Pprint.document)
  =
  fun isTopLevel ->
    fun uu___ ->
      match uu___ with
      | FStar_Pervasives_Native.None -> FStar_Pprint.empty
      | FStar_Pervasives_Native.Some terms ->
          let uu___1 =
            let uu___2 = str (if isTopLevel then "[@@" else "[@@@") in
            let uu___3 =
              let uu___4 =
                let uu___5 = str "; " in
                FStar_Pprint.separate_map uu___5
                  (p_noSeqTermAndComment false false) terms in
              let uu___5 = str "]" in
              FStar_Pprint.op_Hat_Slash_Hat uu___4 uu___5 in
            FStar_Pprint.op_Hat_Slash_Hat uu___2 uu___3 in
          FStar_Pprint.group uu___1
and (p_typ :
  Prims.bool -> Prims.bool -> FStar_Parser_AST.term -> FStar_Pprint.document)
  =
  fun ps ->
    fun pb -> fun e -> with_comment (p_typ' ps pb) e e.FStar_Parser_AST.range
and (p_typ_sep :
  Prims.bool ->
    Prims.bool ->
      FStar_Parser_AST.term ->
        (FStar_Pprint.document * FStar_Pprint.document))
  =
  fun ps ->
    fun pb ->
      fun e -> with_comment_sep (p_typ' ps pb) e e.FStar_Parser_AST.range
and (p_typ' :
  Prims.bool -> Prims.bool -> FStar_Parser_AST.term -> FStar_Pprint.document)
  =
  fun ps ->
    fun pb ->
      fun e ->
        match e.FStar_Parser_AST.tm with
        | FStar_Parser_AST.QForall (bs, (uu___, trigger), e1) ->
            let binders_doc = p_binders true bs in
            let term_doc = p_noSeqTermAndComment ps pb e1 in
            (match trigger with
             | [] ->
                 let uu___1 =
                   let uu___2 =
                     let uu___3 = p_quantifier e in
                     FStar_Pprint.op_Hat_Hat uu___3 FStar_Pprint.space in
                   FStar_Pprint.soft_surround (Prims.of_int (2))
                     Prims.int_zero uu___2 binders_doc FStar_Pprint.dot in
                 prefix2 uu___1 term_doc
             | pats ->
                 let uu___1 =
                   let uu___2 =
                     let uu___3 =
                       let uu___4 =
                         let uu___5 = p_quantifier e in
                         FStar_Pprint.op_Hat_Hat uu___5 FStar_Pprint.space in
                       FStar_Pprint.soft_surround (Prims.of_int (2))
                         Prims.int_zero uu___4 binders_doc FStar_Pprint.dot in
                     let uu___4 = p_trigger trigger in prefix2 uu___3 uu___4 in
                   FStar_Pprint.group uu___2 in
                 prefix2 uu___1 term_doc)
        | FStar_Parser_AST.QExists (bs, (uu___, trigger), e1) ->
            let binders_doc = p_binders true bs in
            let term_doc = p_noSeqTermAndComment ps pb e1 in
            (match trigger with
             | [] ->
                 let uu___1 =
                   let uu___2 =
                     let uu___3 = p_quantifier e in
                     FStar_Pprint.op_Hat_Hat uu___3 FStar_Pprint.space in
                   FStar_Pprint.soft_surround (Prims.of_int (2))
                     Prims.int_zero uu___2 binders_doc FStar_Pprint.dot in
                 prefix2 uu___1 term_doc
             | pats ->
                 let uu___1 =
                   let uu___2 =
                     let uu___3 =
                       let uu___4 =
                         let uu___5 = p_quantifier e in
                         FStar_Pprint.op_Hat_Hat uu___5 FStar_Pprint.space in
                       FStar_Pprint.soft_surround (Prims.of_int (2))
                         Prims.int_zero uu___4 binders_doc FStar_Pprint.dot in
                     let uu___4 = p_trigger trigger in prefix2 uu___3 uu___4 in
                   FStar_Pprint.group uu___2 in
                 prefix2 uu___1 term_doc)
        | uu___ -> p_simpleTerm ps pb e
and (p_typ_top :
  annotation_style ->
    Prims.bool ->
      Prims.bool -> FStar_Parser_AST.term -> FStar_Pprint.document)
  =
  fun style ->
    fun ps ->
      fun pb ->
        fun e ->
          with_comment (p_typ_top' style ps pb) e e.FStar_Parser_AST.range
and (p_typ_top' :
  annotation_style ->
    Prims.bool ->
      Prims.bool -> FStar_Parser_AST.term -> FStar_Pprint.document)
  =
  fun style ->
    fun ps -> fun pb -> fun e -> p_tmArrow style true p_tmFormula e
and (sig_as_binders_if_possible :
  FStar_Parser_AST.term -> Prims.bool -> FStar_Pprint.document) =
  fun t ->
    fun extra_space ->
      let s = if extra_space then FStar_Pprint.space else FStar_Pprint.empty in
      let uu___ = all_binders_annot t in
      if uu___
      then
        let uu___1 =
          p_typ_top (Binders ((Prims.of_int (4)), Prims.int_zero, true))
            false false t in
        FStar_Pprint.op_Hat_Hat s uu___1
      else
        (let uu___2 =
           let uu___3 =
             let uu___4 =
               p_typ_top (Arrows ((Prims.of_int (2)), (Prims.of_int (2))))
                 false false t in
             FStar_Pprint.op_Hat_Hat s uu___4 in
           FStar_Pprint.op_Hat_Hat FStar_Pprint.colon uu___3 in
         FStar_Pprint.group uu___2)
and (collapse_pats :
  (FStar_Pprint.document * FStar_Pprint.document * Prims.bool * Prims.bool)
    Prims.list -> FStar_Pprint.document Prims.list)
  =
  fun pats ->
    let fold_fun bs x =
      let uu___ = x in
      match uu___ with
      | (b1, t1, tc1, j1) ->
          (match bs with
           | [] -> [([b1], t1, tc1, j1)]
           | hd::tl ->
               let uu___1 = hd in
               (match uu___1 with
                | (b2s, t2, tc2, j2) ->
                    if ((t1 = t2) && j1) && j2
                    then
                      ((FStar_Compiler_List.op_At b2s [b1]), t1, false, true)
                      :: tl
                    else ([b1], t1, tc1, j1) :: hd :: tl)) in
    let p_collapsed_binder cb =
      let uu___ = cb in
      match uu___ with
      | (bs, typ, istcarg, uu___1) ->
          let body =
            match bs with
            | [] -> failwith "Impossible"
            | hd::tl ->
                let uu___2 =
                  FStar_Compiler_List.fold_left
                    (fun x ->
                       fun y ->
                         let uu___3 =
                           FStar_Pprint.op_Hat_Hat FStar_Pprint.space y in
                         FStar_Pprint.op_Hat_Hat x uu___3) hd tl in
                cat_with_colon uu___2 typ in
          if istcarg then tc_arg body else soft_parens_with_nesting body in
    let binders =
      FStar_Compiler_List.fold_left fold_fun []
        (FStar_Compiler_List.rev pats) in
    map_rev p_collapsed_binder binders
and (pats_as_binders_if_possible :
  FStar_Parser_AST.pattern Prims.list ->
    (FStar_Pprint.document Prims.list * annotation_style))
  =
  fun pats ->
    let all_binders p =
      match p.FStar_Parser_AST.pat with
      | FStar_Parser_AST.PatAscribed (pat, (t, FStar_Pervasives_Native.None))
          ->
          (match ((pat.FStar_Parser_AST.pat), (t.FStar_Parser_AST.tm)) with
           | (FStar_Parser_AST.PatVar (lid, aqual, attrs),
              FStar_Parser_AST.Refine
              ({ FStar_Parser_AST.b = FStar_Parser_AST.Annotated (lid', t1);
                 FStar_Parser_AST.brange = uu___;
                 FStar_Parser_AST.blevel = uu___1;
                 FStar_Parser_AST.aqual = uu___2;
                 FStar_Parser_AST.battributes = uu___3;_},
               phi)) when
               let uu___4 = FStar_Ident.string_of_id lid in
               let uu___5 = FStar_Ident.string_of_id lid' in uu___4 = uu___5
               ->
               let uu___4 =
                 let uu___5 = p_ident lid in
                 p_refinement' aqual attrs uu___5 t1 phi in
               (match uu___4 with
                | (x, y) -> FStar_Pervasives_Native.Some (x, y, false, false))
           | (FStar_Parser_AST.PatVar (lid, aqual, attrs), uu___) ->
               let is_tc =
                 aqual =
                   (FStar_Pervasives_Native.Some
                      FStar_Parser_AST.TypeClassArg) in
               let is_meta =
                 match aqual with
                 | FStar_Pervasives_Native.Some (FStar_Parser_AST.Meta
                     uu___1) -> true
                 | uu___1 -> false in
               let uu___1 =
                 let uu___2 =
                   let uu___3 = FStar_Pprint.optional p_aqual aqual in
                   let uu___4 =
                     let uu___5 = p_attributes false attrs in
                     let uu___6 = p_ident lid in
                     FStar_Pprint.op_Hat_Hat uu___5 uu___6 in
                   FStar_Pprint.op_Hat_Hat uu___3 uu___4 in
                 let uu___3 = p_tmEqNoRefinement t in
                 (uu___2, uu___3, is_tc,
                   ((Prims.op_Negation is_tc) && (Prims.op_Negation is_meta))) in
               FStar_Pervasives_Native.Some uu___1
           | uu___ -> FStar_Pervasives_Native.None)
      | uu___ -> FStar_Pervasives_Native.None in
    let uu___ = map_if_all all_binders pats in
    match uu___ with
    | FStar_Pervasives_Native.Some bs ->
        let uu___1 = collapse_pats bs in
        (uu___1, (Binders ((Prims.of_int (4)), Prims.int_zero, true)))
    | FStar_Pervasives_Native.None ->
        let uu___1 = FStar_Compiler_List.map p_atomicPattern pats in
        (uu___1, (Binders ((Prims.of_int (4)), Prims.int_zero, false)))
and (p_quantifier : FStar_Parser_AST.term -> FStar_Pprint.document) =
  fun e ->
    match e.FStar_Parser_AST.tm with
    | FStar_Parser_AST.QForall uu___ -> str "forall"
    | FStar_Parser_AST.QExists uu___ -> str "exists"
    | uu___ ->
        failwith "Imposible : p_quantifier called on a non-quantifier term"
and (p_trigger :
  FStar_Parser_AST.term Prims.list Prims.list -> FStar_Pprint.document) =
  fun uu___ ->
    match uu___ with
    | [] -> FStar_Pprint.empty
    | pats ->
        let uu___1 =
          let uu___2 =
            let uu___3 =
              let uu___4 = str "pattern" in
              let uu___5 =
                let uu___6 =
                  let uu___7 = p_disjunctivePats pats in
                  FStar_Pprint.jump (Prims.of_int (2)) Prims.int_zero uu___7 in
                FStar_Pprint.op_Hat_Hat uu___6 FStar_Pprint.rbrace in
              FStar_Pprint.op_Hat_Slash_Hat uu___4 uu___5 in
            FStar_Pprint.op_Hat_Hat FStar_Pprint.colon uu___3 in
          FStar_Pprint.op_Hat_Hat FStar_Pprint.lbrace uu___2 in
        FStar_Pprint.group uu___1
and (p_disjunctivePats :
  FStar_Parser_AST.term Prims.list Prims.list -> FStar_Pprint.document) =
  fun pats ->
    let uu___ = str "\\/" in
    FStar_Pprint.separate_map uu___ p_conjunctivePats pats
and (p_conjunctivePats :
  FStar_Parser_AST.term Prims.list -> FStar_Pprint.document) =
  fun pats ->
    let uu___ =
      let uu___1 = FStar_Pprint.op_Hat_Hat FStar_Pprint.semi break1 in
      FStar_Pprint.separate_map uu___1 p_appTerm pats in
    FStar_Pprint.group uu___
and (p_simpleTerm :
  Prims.bool -> Prims.bool -> FStar_Parser_AST.term -> FStar_Pprint.document)
  =
  fun ps ->
    fun pb ->
      fun e ->
        match e.FStar_Parser_AST.tm with
        | FStar_Parser_AST.Abs (pats, e1) ->
            let uu___ = p_term_sep false pb e1 in
            (match uu___ with
             | (comm, doc) ->
                 let prefix =
                   let uu___1 = str "fun" in
                   let uu___2 =
                     let uu___3 =
                       FStar_Pprint.separate_map break1 p_atomicPattern pats in
                     FStar_Pprint.op_Hat_Slash_Hat uu___3 FStar_Pprint.rarrow in
                   op_Hat_Slash_Plus_Hat uu___1 uu___2 in
                 let uu___1 =
                   if comm <> FStar_Pprint.empty
                   then
                     let uu___2 =
                       let uu___3 =
                         let uu___4 =
                           FStar_Pprint.op_Hat_Hat FStar_Pprint.hardline doc in
                         FStar_Pprint.op_Hat_Hat comm uu___4 in
                       FStar_Pprint.op_Hat_Hat FStar_Pprint.hardline uu___3 in
                     FStar_Pprint.op_Hat_Hat prefix uu___2
                   else
                     (let uu___3 = op_Hat_Slash_Plus_Hat prefix doc in
                      FStar_Pprint.group uu___3) in
                 let uu___2 = paren_if ps in uu___2 uu___1)
        | uu___ -> p_tmIff e
and (p_maybeFocusArrow : Prims.bool -> FStar_Pprint.document) =
  fun b -> if b then str "~>" else FStar_Pprint.rarrow
and (p_patternBranch :
  Prims.bool ->
    (FStar_Parser_AST.pattern * FStar_Parser_AST.term
      FStar_Pervasives_Native.option * FStar_Parser_AST.term) ->
      FStar_Pprint.document)
  =
  fun pb ->
    fun uu___ ->
      match uu___ with
      | (pat, when_opt, e) ->
          let one_pattern_branch p =
            let branch =
              match when_opt with
              | FStar_Pervasives_Native.None ->
                  let uu___1 =
                    let uu___2 =
                      let uu___3 =
                        let uu___4 = p_tuplePattern p in
                        let uu___5 =
                          FStar_Pprint.op_Hat_Hat FStar_Pprint.space
                            FStar_Pprint.rarrow in
                        FStar_Pprint.op_Hat_Hat uu___4 uu___5 in
                      FStar_Pprint.op_Hat_Hat FStar_Pprint.space uu___3 in
                    FStar_Pprint.op_Hat_Hat FStar_Pprint.bar uu___2 in
                  FStar_Pprint.group uu___1
              | FStar_Pervasives_Native.Some f ->
                  let uu___1 =
                    let uu___2 =
                      let uu___3 =
                        let uu___4 =
                          let uu___5 =
                            let uu___6 = p_tuplePattern p in
                            let uu___7 = str "when" in
                            FStar_Pprint.op_Hat_Slash_Hat uu___6 uu___7 in
                          FStar_Pprint.group uu___5 in
                        let uu___5 =
                          let uu___6 =
                            let uu___7 = p_tmFormula f in
                            [uu___7; FStar_Pprint.rarrow] in
                          FStar_Pprint.flow break1 uu___6 in
                        FStar_Pprint.op_Hat_Slash_Hat uu___4 uu___5 in
                      FStar_Pprint.op_Hat_Hat FStar_Pprint.space uu___3 in
                    FStar_Pprint.op_Hat_Hat FStar_Pprint.bar uu___2 in
                  FStar_Pprint.hang (Prims.of_int (2)) uu___1 in
            let uu___1 = p_term_sep false pb e in
            match uu___1 with
            | (comm, doc) ->
                if pb
                then
                  (if comm = FStar_Pprint.empty
                   then
                     let uu___2 = op_Hat_Slash_Plus_Hat branch doc in
                     FStar_Pprint.group uu___2
                   else
                     (let uu___3 =
                        let uu___4 =
                          let uu___5 =
                            let uu___6 =
                              let uu___7 =
                                FStar_Pprint.op_Hat_Hat break1 comm in
                              FStar_Pprint.op_Hat_Hat doc uu___7 in
                            op_Hat_Slash_Plus_Hat branch uu___6 in
                          FStar_Pprint.group uu___5 in
                        let uu___5 =
                          let uu___6 =
                            let uu___7 =
                              inline_comment_or_above comm doc
                                FStar_Pprint.empty in
                            jump2 uu___7 in
                          FStar_Pprint.op_Hat_Hat branch uu___6 in
                        FStar_Pprint.ifflat uu___4 uu___5 in
                      FStar_Pprint.group uu___3))
                else
                  if comm <> FStar_Pprint.empty
                  then
                    (let uu___3 =
                       let uu___4 =
                         FStar_Pprint.op_Hat_Hat FStar_Pprint.hardline doc in
                       FStar_Pprint.op_Hat_Hat comm uu___4 in
                     op_Hat_Slash_Plus_Hat branch uu___3)
                  else op_Hat_Slash_Plus_Hat branch doc in
          (match pat.FStar_Parser_AST.pat with
           | FStar_Parser_AST.PatOr pats ->
               (match FStar_Compiler_List.rev pats with
                | hd::tl ->
                    let last_pat_branch = one_pattern_branch hd in
                    let uu___1 =
                      let uu___2 =
                        let uu___3 =
                          let uu___4 =
                            let uu___5 =
                              let uu___6 =
                                FStar_Pprint.op_Hat_Hat FStar_Pprint.bar
                                  FStar_Pprint.space in
                              FStar_Pprint.op_Hat_Hat break1 uu___6 in
                            FStar_Pprint.separate_map uu___5 p_tuplePattern
                              (FStar_Compiler_List.rev tl) in
                          FStar_Pprint.op_Hat_Slash_Hat uu___4
                            last_pat_branch in
                        FStar_Pprint.op_Hat_Hat FStar_Pprint.space uu___3 in
                      FStar_Pprint.op_Hat_Hat FStar_Pprint.bar uu___2 in
                    FStar_Pprint.group uu___1
                | [] ->
                    failwith "Impossible: disjunctive pattern can't be empty")
           | uu___1 -> one_pattern_branch pat)
and (p_tmIff : FStar_Parser_AST.term -> FStar_Pprint.document) =
  fun e ->
    match e.FStar_Parser_AST.tm with
    | FStar_Parser_AST.Op (id, e1::e2::[]) when
        let uu___ = FStar_Ident.string_of_id id in uu___ = "<==>" ->
        let uu___ = str "<==>" in
        let uu___1 = p_tmImplies e1 in
        let uu___2 = p_tmIff e2 in infix0 uu___ uu___1 uu___2
    | uu___ -> p_tmImplies e
and (p_tmImplies : FStar_Parser_AST.term -> FStar_Pprint.document) =
  fun e ->
    match e.FStar_Parser_AST.tm with
    | FStar_Parser_AST.Op (id, e1::e2::[]) when
        let uu___ = FStar_Ident.string_of_id id in uu___ = "==>" ->
        let uu___ = str "==>" in
        let uu___1 =
          p_tmArrow (Arrows ((Prims.of_int (2)), (Prims.of_int (2)))) false
            p_tmFormula e1 in
        let uu___2 = p_tmImplies e2 in infix0 uu___ uu___1 uu___2
    | uu___ ->
        p_tmArrow (Arrows ((Prims.of_int (2)), (Prims.of_int (2)))) false
          p_tmFormula e
and (format_sig :
  annotation_style ->
    FStar_Pprint.document Prims.list ->
      FStar_Pprint.document ->
        Prims.bool -> Prims.bool -> FStar_Pprint.document)
  =
  fun style ->
    fun terms ->
      fun ret_d ->
        fun no_last_op ->
          fun flat_space ->
            let uu___ =
              match style with
              | Arrows (n, ln) ->
                  let uu___1 =
                    let uu___2 =
                      FStar_Pprint.op_Hat_Hat FStar_Pprint.rarrow break1 in
                    FStar_Pprint.op_Hat_Hat FStar_Pprint.space uu___2 in
                  let uu___2 =
                    FStar_Pprint.op_Hat_Hat FStar_Pprint.rarrow
                      FStar_Pprint.space in
                  (n, ln, uu___1, uu___2)
              | Binders (n, ln, parens) ->
                  let uu___1 =
                    FStar_Pprint.op_Hat_Hat FStar_Pprint.colon
                      FStar_Pprint.space in
                  (n, ln, break1, uu___1) in
            match uu___ with
            | (n, last_n, sep, last_op) ->
                let last_op1 =
                  if
                    ((FStar_Compiler_List.length terms) > Prims.int_zero) &&
                      (Prims.op_Negation no_last_op)
                  then last_op
                  else FStar_Pprint.empty in
                let one_line_space =
                  if
                    (Prims.op_Negation (ret_d = FStar_Pprint.empty)) ||
                      (Prims.op_Negation no_last_op)
                  then FStar_Pprint.space
                  else FStar_Pprint.empty in
                let single_line_arg_indent =
                  FStar_Pprint.repeat n FStar_Pprint.space in
                let fs =
                  if flat_space
                  then FStar_Pprint.space
                  else FStar_Pprint.empty in
                (match FStar_Compiler_List.length terms with
                 | uu___1 when uu___1 = Prims.int_zero -> ret_d
                 | uu___1 ->
                     let uu___2 =
                       let uu___3 =
                         let uu___4 =
                           let uu___5 = FStar_Pprint.separate sep terms in
                           let uu___6 =
                             let uu___7 =
                               FStar_Pprint.op_Hat_Hat last_op1 ret_d in
                             FStar_Pprint.op_Hat_Hat one_line_space uu___7 in
                           FStar_Pprint.op_Hat_Hat uu___5 uu___6 in
                         FStar_Pprint.op_Hat_Hat fs uu___4 in
                       let uu___4 =
                         let uu___5 =
                           let uu___6 =
                             let uu___7 =
                               let uu___8 = FStar_Pprint.separate sep terms in
                               FStar_Pprint.op_Hat_Hat fs uu___8 in
                             let uu___8 =
                               let uu___9 =
                                 let uu___10 =
                                   let uu___11 =
                                     FStar_Pprint.op_Hat_Hat sep
                                       single_line_arg_indent in
                                   let uu___12 =
                                     FStar_Compiler_List.map
                                       (fun x ->
                                          let uu___13 =
                                            FStar_Pprint.hang
                                              (Prims.of_int (2)) x in
                                          FStar_Pprint.align uu___13) terms in
                                   FStar_Pprint.separate uu___11 uu___12 in
                                 FStar_Pprint.op_Hat_Hat
                                   single_line_arg_indent uu___10 in
                               jump2 uu___9 in
                             FStar_Pprint.ifflat uu___7 uu___8 in
                           FStar_Pprint.group uu___6 in
                         let uu___6 =
                           let uu___7 =
                             let uu___8 =
                               FStar_Pprint.op_Hat_Hat last_op1 ret_d in
                             FStar_Pprint.hang last_n uu___8 in
                           FStar_Pprint.align uu___7 in
                         FStar_Pprint.prefix n Prims.int_one uu___5 uu___6 in
                       FStar_Pprint.ifflat uu___3 uu___4 in
                     FStar_Pprint.group uu___2)
and (p_tmArrow :
  annotation_style ->
    Prims.bool ->
      (FStar_Parser_AST.term -> FStar_Pprint.document) ->
        FStar_Parser_AST.term -> FStar_Pprint.document)
  =
  fun style ->
    fun flat_space ->
      fun p_Tm ->
        fun e ->
          let uu___ =
            match style with
            | Arrows uu___1 -> p_tmArrow' p_Tm e
            | Binders uu___1 -> collapse_binders style p_Tm e in
          match uu___ with
          | (terms, ret_d) -> format_sig style terms ret_d false flat_space
and (p_tmArrow' :
  (FStar_Parser_AST.term -> FStar_Pprint.document) ->
    FStar_Parser_AST.term ->
      (FStar_Pprint.document Prims.list * FStar_Pprint.document))
  =
  fun p_Tm ->
    fun e ->
      match e.FStar_Parser_AST.tm with
      | FStar_Parser_AST.Product (bs, tgt) ->
          let bs_ds = FStar_Compiler_List.map (fun b -> p_binder false b) bs in
          let uu___ = p_tmArrow' p_Tm tgt in
          (match uu___ with
           | (bs_ds', ret) -> ((FStar_Compiler_List.op_At bs_ds bs_ds'), ret))
      | uu___ -> let uu___1 = p_Tm e in ([], uu___1)
and (collapse_binders :
  annotation_style ->
    (FStar_Parser_AST.term -> FStar_Pprint.document) ->
      FStar_Parser_AST.term ->
        (FStar_Pprint.document Prims.list * FStar_Pprint.document))
  =
  fun style ->
    fun p_Tm ->
      fun e ->
        let atomize =
          match style with | Binders (uu___, uu___1, a) -> a | uu___ -> false in
        let wrap is_tc doc =
          if is_tc
          then tc_arg doc
          else if atomize then soft_parens_with_nesting doc else doc in
        let rec accumulate_binders p_Tm1 e1 =
          match e1.FStar_Parser_AST.tm with
          | FStar_Parser_AST.Product (bs, tgt) ->
              let bs_ds =
                FStar_Compiler_List.map
                  (fun b ->
                     let uu___ = p_binder' true false b in
                     let uu___1 = is_tc_binder b in
                     let uu___2 = is_joinable_binder b in
                     (uu___, uu___1, uu___2)) bs in
              let uu___ = accumulate_binders p_Tm1 tgt in
              (match uu___ with
               | (bs_ds', ret) ->
                   ((FStar_Compiler_List.op_At bs_ds bs_ds'), ret))
          | uu___ -> let uu___1 = p_Tm1 e1 in ([], uu___1) in
        let fold_fun bs x =
          let uu___ = x in
          match uu___ with
          | ((b1, t1), tc1, j1) ->
              (match bs with
               | [] -> [([b1], t1, tc1, j1)]
               | hd::tl ->
                   let uu___1 = hd in
                   (match uu___1 with
                    | (b2s, t2, tc2, j2) ->
                        (match (t1, t2) with
                         | (FStar_Pervasives_Native.Some (typ1, catf1),
                            FStar_Pervasives_Native.Some (typ2, uu___2)) when
                             ((typ1 = typ2) && j1) && j2 ->
                             ((FStar_Compiler_List.op_At b2s [b1]), t1,
                               false, true)
                             :: tl
                         | uu___2 -> ([b1], t1, tc1, j1) :: bs))) in
        let p_collapsed_binder cb =
          let uu___ = cb in
          match uu___ with
          | (bs, t, is_tc, uu___1) ->
              (match t with
               | FStar_Pervasives_Native.None ->
                   (match bs with
                    | b::[] -> wrap is_tc b
                    | uu___2 -> failwith "Impossible")
               | FStar_Pervasives_Native.Some (typ, f) ->
                   (match bs with
                    | [] -> failwith "Impossible"
                    | hd::tl ->
                        let uu___2 =
                          let uu___3 =
                            FStar_Compiler_List.fold_left
                              (fun x ->
                                 fun y ->
                                   let uu___4 =
                                     FStar_Pprint.op_Hat_Hat
                                       FStar_Pprint.space y in
                                   FStar_Pprint.op_Hat_Hat x uu___4) hd tl in
                          f uu___3 typ in
                        FStar_Compiler_Effect.op_Less_Bar (wrap is_tc) uu___2)) in
        let uu___ = accumulate_binders p_Tm e in
        match uu___ with
        | (bs_ds, ret_d) ->
            let binders = FStar_Compiler_List.fold_left fold_fun [] bs_ds in
            let uu___1 = map_rev p_collapsed_binder binders in
            (uu___1, ret_d)
and (p_tmFormula : FStar_Parser_AST.term -> FStar_Pprint.document) =
  fun e ->
    let conj =
      let uu___ =
        let uu___1 = str "/\\" in FStar_Pprint.op_Hat_Hat uu___1 break1 in
      FStar_Pprint.op_Hat_Hat FStar_Pprint.space uu___ in
    let disj =
      let uu___ =
        let uu___1 = str "\\/" in FStar_Pprint.op_Hat_Hat uu___1 break1 in
      FStar_Pprint.op_Hat_Hat FStar_Pprint.space uu___ in
    let formula = p_tmDisjunction e in
    FStar_Pprint.flow_map disj
      (fun d -> FStar_Pprint.flow_map conj (fun x -> FStar_Pprint.group x) d)
      formula
and (p_tmDisjunction :
  FStar_Parser_AST.term -> FStar_Pprint.document Prims.list Prims.list) =
  fun e ->
    match e.FStar_Parser_AST.tm with
    | FStar_Parser_AST.Op (id, e1::e2::[]) when
        let uu___ = FStar_Ident.string_of_id id in uu___ = "\\/" ->
        let uu___ = p_tmDisjunction e1 in
        let uu___1 = let uu___2 = p_tmConjunction e2 in [uu___2] in
        FStar_Compiler_List.op_At uu___ uu___1
    | uu___ -> let uu___1 = p_tmConjunction e in [uu___1]
and (p_tmConjunction :
  FStar_Parser_AST.term -> FStar_Pprint.document Prims.list) =
  fun e ->
    match e.FStar_Parser_AST.tm with
    | FStar_Parser_AST.Op (id, e1::e2::[]) when
        let uu___ = FStar_Ident.string_of_id id in uu___ = "/\\" ->
        let uu___ = p_tmConjunction e1 in
        let uu___1 = let uu___2 = p_tmTuple e2 in [uu___2] in
        FStar_Compiler_List.op_At uu___ uu___1
    | uu___ -> let uu___1 = p_tmTuple e in [uu___1]
and (p_tmTuple : FStar_Parser_AST.term -> FStar_Pprint.document) =
  fun e -> with_comment p_tmTuple' e e.FStar_Parser_AST.range
and (p_tmTuple' : FStar_Parser_AST.term -> FStar_Pprint.document) =
  fun e ->
    match e.FStar_Parser_AST.tm with
    | FStar_Parser_AST.Construct (lid, args) when
        (is_tuple_constructor lid) && (all1_explicit args) ->
        let uu___ = FStar_Pprint.op_Hat_Hat FStar_Pprint.comma break1 in
        FStar_Pprint.separate_map uu___
          (fun uu___1 -> match uu___1 with | (e1, uu___2) -> p_tmEq e1) args
    | uu___ -> p_tmEq e
and (paren_if_gt :
  Prims.int -> Prims.int -> FStar_Pprint.document -> FStar_Pprint.document) =
  fun curr ->
    fun mine ->
      fun doc ->
        if mine <= curr
        then doc
        else
          (let uu___1 =
             let uu___2 = FStar_Pprint.op_Hat_Hat doc FStar_Pprint.rparen in
             FStar_Pprint.op_Hat_Hat FStar_Pprint.lparen uu___2 in
           FStar_Pprint.group uu___1)
and (p_tmEqWith :
  (FStar_Parser_AST.term -> FStar_Pprint.document) ->
    FStar_Parser_AST.term -> FStar_Pprint.document)
  =
  fun p_X ->
    fun e ->
      let n =
        max_level
          (FStar_Compiler_List.op_At [colon_equals; pipe_right]
             operatorInfix0ad12) in
      p_tmEqWith' p_X n e
and (p_tmEqWith' :
  (FStar_Parser_AST.term -> FStar_Pprint.document) ->
    Prims.int -> FStar_Parser_AST.term -> FStar_Pprint.document)
  =
  fun p_X ->
    fun curr ->
      fun e ->
        match e.FStar_Parser_AST.tm with
        | FStar_Parser_AST.Op (op, e1::e2::[]) when
            (let uu___ =
               (let uu___1 = FStar_Ident.string_of_id op in uu___1 = "==>")
                 ||
                 (let uu___1 = FStar_Ident.string_of_id op in uu___1 = "<==>") in
             Prims.op_Negation uu___) &&
              (((is_operatorInfix0ad12 op) ||
                  (let uu___ = FStar_Ident.string_of_id op in uu___ = "="))
                 || (let uu___ = FStar_Ident.string_of_id op in uu___ = "|>"))
            ->
            let op1 = FStar_Ident.string_of_id op in
            let uu___ = levels op1 in
            (match uu___ with
             | (left, mine, right) ->
                 let uu___1 =
                   let uu___2 = FStar_Compiler_Effect.op_Less_Bar str op1 in
                   let uu___3 = p_tmEqWith' p_X left e1 in
                   let uu___4 = p_tmEqWith' p_X right e2 in
                   infix0 uu___2 uu___3 uu___4 in
                 paren_if_gt curr mine uu___1)
        | FStar_Parser_AST.Op (id, e1::e2::[]) when
            let uu___ = FStar_Ident.string_of_id id in uu___ = ":=" ->
            let uu___ =
              let uu___1 = p_tmEqWith p_X e1 in
              let uu___2 =
                let uu___3 =
                  let uu___4 =
                    let uu___5 = p_tmEqWith p_X e2 in
                    op_Hat_Slash_Plus_Hat FStar_Pprint.equals uu___5 in
                  FStar_Pprint.op_Hat_Hat FStar_Pprint.colon uu___4 in
                FStar_Pprint.op_Hat_Hat FStar_Pprint.space uu___3 in
              FStar_Pprint.op_Hat_Hat uu___1 uu___2 in
            FStar_Pprint.group uu___
        | FStar_Parser_AST.Op (id, e1::[]) when
            let uu___ = FStar_Ident.string_of_id id in uu___ = "-" ->
            let uu___ = levels "-" in
            (match uu___ with
             | (left, mine, right) ->
                 let uu___1 = p_tmEqWith' p_X mine e1 in
                 FStar_Pprint.op_Hat_Slash_Hat FStar_Pprint.minus uu___1)
        | uu___ -> p_tmNoEqWith p_X e
and (p_tmNoEqWith :
  (FStar_Parser_AST.term -> FStar_Pprint.document) ->
    FStar_Parser_AST.term -> FStar_Pprint.document)
  =
  fun p_X ->
    fun e ->
      let n = max_level [colon_colon; amp; opinfix3; opinfix4] in
      p_tmNoEqWith' false p_X n e
and (p_tmNoEqWith' :
  Prims.bool ->
    (FStar_Parser_AST.term -> FStar_Pprint.document) ->
      Prims.int -> FStar_Parser_AST.term -> FStar_Pprint.document)
  =
  fun inside_tuple ->
    fun p_X ->
      fun curr ->
        fun e ->
          match e.FStar_Parser_AST.tm with
          | FStar_Parser_AST.Construct (lid, (e1, uu___)::(e2, uu___1)::[])
              when
              (FStar_Ident.lid_equals lid FStar_Parser_Const.cons_lid) &&
                (let uu___2 = is_list e in Prims.op_Negation uu___2)
              ->
              let op = "::" in
              let uu___2 = levels op in
              (match uu___2 with
               | (left, mine, right) ->
                   let uu___3 =
                     let uu___4 = str op in
                     let uu___5 = p_tmNoEqWith' false p_X left e1 in
                     let uu___6 = p_tmNoEqWith' false p_X right e2 in
                     infix0 uu___4 uu___5 uu___6 in
                   paren_if_gt curr mine uu___3)
          | FStar_Parser_AST.Sum (binders, res) ->
              let op = "&" in
              let uu___ = levels op in
              (match uu___ with
               | (left, mine, right) ->
                   let p_dsumfst bt =
                     match bt with
                     | FStar_Pervasives.Inl b ->
                         let uu___1 = p_binder false b in
                         let uu___2 =
                           let uu___3 =
                             let uu___4 = str op in
                             FStar_Pprint.op_Hat_Hat uu___4 break1 in
                           FStar_Pprint.op_Hat_Hat FStar_Pprint.space uu___3 in
                         FStar_Pprint.op_Hat_Hat uu___1 uu___2
                     | FStar_Pervasives.Inr t ->
                         let uu___1 = p_tmNoEqWith' false p_X left t in
                         let uu___2 =
                           let uu___3 =
                             let uu___4 = str op in
                             FStar_Pprint.op_Hat_Hat uu___4 break1 in
                           FStar_Pprint.op_Hat_Hat FStar_Pprint.space uu___3 in
                         FStar_Pprint.op_Hat_Hat uu___1 uu___2 in
                   let uu___1 =
                     let uu___2 = FStar_Pprint.concat_map p_dsumfst binders in
                     let uu___3 = p_tmNoEqWith' false p_X right res in
                     FStar_Pprint.op_Hat_Hat uu___2 uu___3 in
                   paren_if_gt curr mine uu___1)
          | FStar_Parser_AST.Op (id, e1::e2::[]) when
              (let uu___ = FStar_Ident.string_of_id id in uu___ = "*") &&
                (FStar_Compiler_Effect.op_Bang unfold_tuples)
              ->
              let op = "*" in
              let uu___ = levels op in
              (match uu___ with
               | (left, mine, right) ->
                   if inside_tuple
                   then
                     let uu___1 = str op in
                     let uu___2 = p_tmNoEqWith' true p_X left e1 in
                     let uu___3 = p_tmNoEqWith' true p_X right e2 in
                     infix0 uu___1 uu___2 uu___3
                   else
                     (let uu___2 =
                        let uu___3 = str op in
                        let uu___4 = p_tmNoEqWith' true p_X left e1 in
                        let uu___5 = p_tmNoEqWith' true p_X right e2 in
                        infix0 uu___3 uu___4 uu___5 in
                      paren_if_gt curr mine uu___2))
          | FStar_Parser_AST.Op (op, e1::e2::[]) when is_operatorInfix34 op
              ->
              let op1 = FStar_Ident.string_of_id op in
              let uu___ = levels op1 in
              (match uu___ with
               | (left, mine, right) ->
                   let uu___1 =
                     let uu___2 = str op1 in
                     let uu___3 = p_tmNoEqWith' false p_X left e1 in
                     let uu___4 = p_tmNoEqWith' false p_X right e2 in
                     infix0 uu___2 uu___3 uu___4 in
                   paren_if_gt curr mine uu___1)
          | FStar_Parser_AST.Record (with_opt, record_fields) ->
              let uu___ =
                let uu___1 =
                  default_or_map FStar_Pprint.empty p_with_clause with_opt in
                let uu___2 =
                  let uu___3 =
                    FStar_Pprint.op_Hat_Hat FStar_Pprint.semi break1 in
                  separate_map_last uu___3 p_simpleDef record_fields in
                FStar_Pprint.op_Hat_Hat uu___1 uu___2 in
              braces_with_nesting uu___
          | FStar_Parser_AST.Op (id, e1::[]) when
              let uu___ = FStar_Ident.string_of_id id in uu___ = "~" ->
              let uu___ =
                let uu___1 = str "~" in
                let uu___2 = p_atomicTerm e1 in
                FStar_Pprint.op_Hat_Hat uu___1 uu___2 in
              FStar_Pprint.group uu___
          | FStar_Parser_AST.Paren p when inside_tuple ->
              (match p.FStar_Parser_AST.tm with
               | FStar_Parser_AST.Op (id, e1::e2::[]) when
                   let uu___ = FStar_Ident.string_of_id id in uu___ = "*" ->
                   let op = "*" in
                   let uu___ = levels op in
                   (match uu___ with
                    | (left, mine, right) ->
                        let uu___1 =
                          let uu___2 = str op in
                          let uu___3 = p_tmNoEqWith' true p_X left e1 in
                          let uu___4 = p_tmNoEqWith' true p_X right e2 in
                          infix0 uu___2 uu___3 uu___4 in
                        paren_if_gt curr mine uu___1)
               | uu___ -> p_X e)
          | uu___ -> p_X e
and (p_tmEqNoRefinement : FStar_Parser_AST.term -> FStar_Pprint.document) =
  fun e -> p_tmEqWith p_appTerm e
and (p_tmEq : FStar_Parser_AST.term -> FStar_Pprint.document) =
  fun e -> p_tmEqWith p_tmRefinement e
and (p_tmNoEq : FStar_Parser_AST.term -> FStar_Pprint.document) =
  fun e -> p_tmNoEqWith p_tmRefinement e
and (p_tmRefinement : FStar_Parser_AST.term -> FStar_Pprint.document) =
  fun e ->
    match e.FStar_Parser_AST.tm with
    | FStar_Parser_AST.NamedTyp (lid, e1) ->
        let uu___ =
          let uu___1 = p_lident lid in
          let uu___2 =
            let uu___3 = p_appTerm e1 in
            FStar_Pprint.op_Hat_Slash_Hat FStar_Pprint.colon uu___3 in
          FStar_Pprint.op_Hat_Slash_Hat uu___1 uu___2 in
        FStar_Pprint.group uu___
    | FStar_Parser_AST.Refine (b, phi) -> p_refinedBinder b phi
    | uu___ -> p_appTerm e
and (p_with_clause : FStar_Parser_AST.term -> FStar_Pprint.document) =
  fun e ->
    let uu___ = p_appTerm e in
    let uu___1 =
      let uu___2 =
        let uu___3 = str "with" in FStar_Pprint.op_Hat_Hat uu___3 break1 in
      FStar_Pprint.op_Hat_Hat FStar_Pprint.space uu___2 in
    FStar_Pprint.op_Hat_Hat uu___ uu___1
and (p_refinedBinder :
  FStar_Parser_AST.binder -> FStar_Parser_AST.term -> FStar_Pprint.document)
  =
  fun b ->
    fun phi ->
      match b.FStar_Parser_AST.b with
      | FStar_Parser_AST.Annotated (lid, t) ->
          let uu___ = p_lident lid in
          p_refinement b.FStar_Parser_AST.aqual
            b.FStar_Parser_AST.battributes uu___ t phi
      | FStar_Parser_AST.Variable lid ->
          let uu___ = p_lident lid in
          let uu___1 =
            let uu___2 = FStar_Ident.range_of_id lid in
            FStar_Parser_AST.mk_term FStar_Parser_AST.Wild uu___2
              FStar_Parser_AST.Type_level in
          p_refinement b.FStar_Parser_AST.aqual
            b.FStar_Parser_AST.battributes uu___ uu___1 phi
      | FStar_Parser_AST.TAnnotated uu___ -> failwith "Is this still used ?"
      | FStar_Parser_AST.TVariable uu___ ->
          let uu___1 =
            let uu___2 = FStar_Parser_AST.binder_to_string b in
            FStar_Compiler_Util.format1
              "Impossible: a refined binder ought to be annotated (%s)"
              uu___2 in
          failwith uu___1
      | FStar_Parser_AST.NoName uu___ ->
          let uu___1 =
            let uu___2 = FStar_Parser_AST.binder_to_string b in
            FStar_Compiler_Util.format1
              "Impossible: a refined binder ought to be annotated (%s)"
              uu___2 in
          failwith uu___1
and (p_simpleDef :
  Prims.bool ->
    (FStar_Ident.lid * FStar_Parser_AST.term) -> FStar_Pprint.document)
  =
  fun ps ->
    fun uu___ ->
      match uu___ with
      | (lid, e) ->
          let uu___1 =
            let uu___2 = p_qlident lid in
            let uu___3 =
              let uu___4 = p_noSeqTermAndComment ps false e in
              FStar_Pprint.op_Hat_Slash_Hat FStar_Pprint.equals uu___4 in
            FStar_Pprint.op_Hat_Slash_Hat uu___2 uu___3 in
          FStar_Pprint.group uu___1
and (p_appTerm : FStar_Parser_AST.term -> FStar_Pprint.document) =
  fun e ->
    match e.FStar_Parser_AST.tm with
    | FStar_Parser_AST.App uu___ when is_general_application e ->
        let uu___1 = head_and_args e in
        (match uu___1 with
         | (head, args) ->
             (match args with
              | e1::e2::[] when
                  (FStar_Pervasives_Native.snd e1) = FStar_Parser_AST.Infix
                  ->
                  let uu___2 = p_argTerm e1 in
                  let uu___3 =
                    let uu___4 =
                      let uu___5 =
                        let uu___6 = str "`" in
                        let uu___7 =
                          let uu___8 = p_indexingTerm head in
                          let uu___9 = str "`" in
                          FStar_Pprint.op_Hat_Hat uu___8 uu___9 in
                        FStar_Pprint.op_Hat_Hat uu___6 uu___7 in
                      FStar_Pprint.group uu___5 in
                    let uu___5 = p_argTerm e2 in
                    FStar_Pprint.op_Hat_Slash_Hat uu___4 uu___5 in
                  FStar_Pprint.op_Hat_Slash_Hat uu___2 uu___3
              | uu___2 ->
                  let uu___3 =
                    let uu___4 = p_indexingTerm head in (uu___4, args) in
                  (match uu___3 with
                   | (head_doc, args1) ->
                       let uu___4 =
                         let uu___5 =
                           FStar_Pprint.op_Hat_Hat head_doc
                             FStar_Pprint.space in
                         soft_surround_map_or_flow (Prims.of_int (2))
                           Prims.int_zero head_doc uu___5 break1
                           FStar_Pprint.empty p_argTerm args1 in
                       FStar_Pprint.group uu___4)))
    | FStar_Parser_AST.Construct (lid, args) when
        (is_general_construction e) &&
          (let uu___ = (is_dtuple_constructor lid) && (all1_explicit args) in
           Prims.op_Negation uu___)
        ->
        (match args with
         | [] -> p_quident lid
         | arg::[] ->
             let uu___ =
               let uu___1 = p_quident lid in
               let uu___2 = p_argTerm arg in
               FStar_Pprint.op_Hat_Slash_Hat uu___1 uu___2 in
             FStar_Pprint.group uu___
         | hd::tl ->
             let uu___ =
               let uu___1 =
                 let uu___2 =
                   let uu___3 = p_quident lid in
                   let uu___4 = p_argTerm hd in prefix2 uu___3 uu___4 in
                 FStar_Pprint.group uu___2 in
               let uu___2 =
                 let uu___3 = FStar_Pprint.separate_map break1 p_argTerm tl in
                 jump2 uu___3 in
               FStar_Pprint.op_Hat_Hat uu___1 uu___2 in
             FStar_Pprint.group uu___)
    | uu___ -> p_indexingTerm e
and (p_argTerm :
  (FStar_Parser_AST.term * FStar_Parser_AST.imp) -> FStar_Pprint.document) =
  fun arg_imp ->
    match arg_imp with
    | (u, FStar_Parser_AST.UnivApp) -> p_universe u
    | (e, FStar_Parser_AST.FsTypApp) ->
        (FStar_Errors.log_issue e.FStar_Parser_AST.range
           (FStar_Errors_Codes.Warning_UnexpectedFsTypApp,
             "Unexpected FsTypApp, output might not be formatted correctly.");
         (let uu___1 = p_indexingTerm e in
          FStar_Pprint.surround (Prims.of_int (2)) Prims.int_one
            FStar_Pprint.langle uu___1 FStar_Pprint.rangle))
    | (e, FStar_Parser_AST.Hash) ->
        let uu___ = str "#" in
        let uu___1 = p_indexingTerm e in FStar_Pprint.op_Hat_Hat uu___ uu___1
    | (e, FStar_Parser_AST.HashBrace t) ->
        let uu___ = str "#[" in
        let uu___1 =
          let uu___2 = p_indexingTerm t in
          let uu___3 =
            let uu___4 = str "]" in
            let uu___5 = p_indexingTerm e in
            FStar_Pprint.op_Hat_Hat uu___4 uu___5 in
          FStar_Pprint.op_Hat_Hat uu___2 uu___3 in
        FStar_Pprint.op_Hat_Hat uu___ uu___1
    | (e, FStar_Parser_AST.Infix) -> p_indexingTerm e
    | (e, FStar_Parser_AST.Nothing) -> p_indexingTerm e
and (p_indexingTerm_aux :
  (FStar_Parser_AST.term -> FStar_Pprint.document) ->
    FStar_Parser_AST.term -> FStar_Pprint.document)
  =
  fun exit ->
    fun e ->
      match e.FStar_Parser_AST.tm with
      | FStar_Parser_AST.Op (id, e1::e2::[]) when
          let uu___ = FStar_Ident.string_of_id id in uu___ = ".()" ->
          let uu___ =
            let uu___1 = p_indexingTerm_aux p_atomicTermNotQUident e1 in
            let uu___2 =
              let uu___3 =
                let uu___4 = p_term false false e2 in
                soft_parens_with_nesting uu___4 in
              FStar_Pprint.op_Hat_Hat FStar_Pprint.dot uu___3 in
            FStar_Pprint.op_Hat_Hat uu___1 uu___2 in
          FStar_Pprint.group uu___
      | FStar_Parser_AST.Op (id, e1::e2::[]) when
          let uu___ = FStar_Ident.string_of_id id in uu___ = ".[]" ->
          let uu___ =
            let uu___1 = p_indexingTerm_aux p_atomicTermNotQUident e1 in
            let uu___2 =
              let uu___3 =
                let uu___4 = p_term false false e2 in
                soft_brackets_with_nesting uu___4 in
              FStar_Pprint.op_Hat_Hat FStar_Pprint.dot uu___3 in
            FStar_Pprint.op_Hat_Hat uu___1 uu___2 in
          FStar_Pprint.group uu___
      | uu___ -> exit e
and (p_indexingTerm : FStar_Parser_AST.term -> FStar_Pprint.document) =
  fun e -> p_indexingTerm_aux p_atomicTerm e
and (p_atomicTerm : FStar_Parser_AST.term -> FStar_Pprint.document) =
  fun e ->
    match e.FStar_Parser_AST.tm with
    | FStar_Parser_AST.LetOpen (lid, e1) ->
        let uu___ = p_quident lid in
        let uu___1 =
          let uu___2 =
            let uu___3 = p_term false false e1 in
            soft_parens_with_nesting uu___3 in
          FStar_Pprint.op_Hat_Hat FStar_Pprint.dot uu___2 in
        FStar_Pprint.op_Hat_Hat uu___ uu___1
    | FStar_Parser_AST.Name lid -> p_quident lid
    | FStar_Parser_AST.Construct (lid, []) when is_general_construction e ->
        p_quident lid
    | FStar_Parser_AST.Op (op, e1::[]) when is_general_prefix_op op ->
        let uu___ = let uu___1 = FStar_Ident.string_of_id op in str uu___1 in
        let uu___1 = p_atomicTerm e1 in FStar_Pprint.op_Hat_Hat uu___ uu___1
    | uu___ -> p_atomicTermNotQUident e
and (p_atomicTermNotQUident : FStar_Parser_AST.term -> FStar_Pprint.document)
  =
  fun e ->
    match e.FStar_Parser_AST.tm with
    | FStar_Parser_AST.Wild -> FStar_Pprint.underscore
    | FStar_Parser_AST.Var lid when
        FStar_Ident.lid_equals lid FStar_Parser_Const.assert_lid ->
        str "assert"
    | FStar_Parser_AST.Var lid when
        FStar_Ident.lid_equals lid FStar_Parser_Const.assume_lid ->
        str "assume"
    | FStar_Parser_AST.Tvar tv -> p_tvar tv
    | FStar_Parser_AST.Const c -> p_constant c
    | FStar_Parser_AST.Name lid when
        FStar_Ident.lid_equals lid FStar_Parser_Const.true_lid -> str "True"
    | FStar_Parser_AST.Name lid when
        FStar_Ident.lid_equals lid FStar_Parser_Const.false_lid ->
        str "False"
    | FStar_Parser_AST.Op (op, e1::[]) when is_general_prefix_op op ->
        let uu___ = let uu___1 = FStar_Ident.string_of_id op in str uu___1 in
        let uu___1 = p_atomicTermNotQUident e1 in
        FStar_Pprint.op_Hat_Hat uu___ uu___1
    | FStar_Parser_AST.Op (op, []) ->
        let uu___ =
          let uu___1 =
            let uu___2 =
              let uu___3 = FStar_Ident.string_of_id op in str uu___3 in
            let uu___3 =
              FStar_Pprint.op_Hat_Hat FStar_Pprint.space FStar_Pprint.rparen in
            FStar_Pprint.op_Hat_Hat uu___2 uu___3 in
          FStar_Pprint.op_Hat_Hat FStar_Pprint.space uu___1 in
        FStar_Pprint.op_Hat_Hat FStar_Pprint.lparen uu___
    | FStar_Parser_AST.Construct (lid, args) when
        (is_dtuple_constructor lid) && (all1_explicit args) ->
        let uu___ =
          FStar_Pprint.op_Hat_Hat FStar_Pprint.lparen FStar_Pprint.bar in
        let uu___1 =
          let uu___2 = FStar_Pprint.op_Hat_Hat FStar_Pprint.comma break1 in
          FStar_Pprint.separate_map uu___2
            (fun uu___3 -> match uu___3 with | (e1, uu___4) -> p_tmEq e1)
            args in
        let uu___2 =
          FStar_Pprint.op_Hat_Hat FStar_Pprint.bar FStar_Pprint.rparen in
        FStar_Pprint.surround (Prims.of_int (2)) Prims.int_one uu___ uu___1
          uu___2
    | FStar_Parser_AST.Project (e1, lid) ->
        let uu___ =
          let uu___1 = p_atomicTermNotQUident e1 in
          let uu___2 =
            let uu___3 = p_qlident lid in
            FStar_Pprint.op_Hat_Hat FStar_Pprint.dot uu___3 in
          FStar_Pprint.prefix (Prims.of_int (2)) Prims.int_zero uu___1 uu___2 in
        FStar_Pprint.group uu___
    | uu___ -> p_projectionLHS e
and (p_projectionLHS : FStar_Parser_AST.term -> FStar_Pprint.document) =
  fun e ->
    match e.FStar_Parser_AST.tm with
    | FStar_Parser_AST.Var lid -> p_qlident lid
    | FStar_Parser_AST.Projector (constr_lid, field_lid) ->
        let uu___ = p_quident constr_lid in
        let uu___1 =
          let uu___2 =
            let uu___3 = p_lident field_lid in
            FStar_Pprint.op_Hat_Hat FStar_Pprint.dot uu___3 in
          FStar_Pprint.op_Hat_Hat FStar_Pprint.qmark uu___2 in
        FStar_Pprint.op_Hat_Hat uu___ uu___1
    | FStar_Parser_AST.Discrim constr_lid ->
        let uu___ = p_quident constr_lid in
        FStar_Pprint.op_Hat_Hat uu___ FStar_Pprint.qmark
    | FStar_Parser_AST.Paren e1 ->
        let uu___ = p_term_sep false false e1 in
        (match uu___ with
         | (comm, t) ->
             let doc = soft_parens_with_nesting t in
             if comm = FStar_Pprint.empty
             then doc
             else
               (let uu___2 =
                  FStar_Pprint.op_Hat_Hat FStar_Pprint.hardline doc in
                FStar_Pprint.op_Hat_Hat comm uu___2))
    | uu___ when is_array e ->
        let es = extract_from_list e in
        let uu___1 =
          FStar_Pprint.op_Hat_Hat FStar_Pprint.lbracket FStar_Pprint.bar in
        let uu___2 =
          let uu___3 = FStar_Pprint.op_Hat_Hat FStar_Pprint.semi break1 in
          separate_map_or_flow_last uu___3
            (fun ps -> p_noSeqTermAndComment ps false) es in
        let uu___3 =
          FStar_Pprint.op_Hat_Hat FStar_Pprint.bar FStar_Pprint.rbracket in
        FStar_Pprint.surround (Prims.of_int (2)) Prims.int_zero uu___1 uu___2
          uu___3
    | uu___ when is_list e ->
        let uu___1 =
          let uu___2 = FStar_Pprint.op_Hat_Hat FStar_Pprint.semi break1 in
          let uu___3 = extract_from_list e in
          separate_map_or_flow_last uu___2
            (fun ps -> p_noSeqTermAndComment ps false) uu___3 in
        FStar_Pprint.surround (Prims.of_int (2)) Prims.int_zero
          FStar_Pprint.lbracket uu___1 FStar_Pprint.rbracket
    | uu___ when is_ref_set e ->
        let es = extract_from_ref_set e in
        let uu___1 =
          FStar_Pprint.op_Hat_Hat FStar_Pprint.bang FStar_Pprint.lbrace in
        let uu___2 =
          let uu___3 = FStar_Pprint.op_Hat_Hat FStar_Pprint.comma break1 in
          separate_map_or_flow uu___3 p_appTerm es in
        FStar_Pprint.surround (Prims.of_int (2)) Prims.int_zero uu___1 uu___2
          FStar_Pprint.rbrace
    | FStar_Parser_AST.Labeled (e1, s, b) ->
        let uu___ = str (Prims.op_Hat "(*" (Prims.op_Hat s "*)")) in
        let uu___1 = p_term false false e1 in
        FStar_Pprint.op_Hat_Slash_Hat uu___ uu___1
    | FStar_Parser_AST.Op (op, args) when
        let uu___ = handleable_op op args in Prims.op_Negation uu___ ->
        let uu___ =
          let uu___1 =
            let uu___2 = FStar_Ident.string_of_id op in
            let uu___3 =
              let uu___4 =
                let uu___5 =
                  FStar_Compiler_Util.string_of_int
                    (FStar_Compiler_List.length args) in
                Prims.op_Hat uu___5
                  " arguments couldn't be handled by the pretty printer" in
              Prims.op_Hat " with " uu___4 in
            Prims.op_Hat uu___2 uu___3 in
          Prims.op_Hat "Operation " uu___1 in
        failwith uu___
    | FStar_Parser_AST.Uvar id ->
        failwith "Unexpected universe variable out of universe context"
    | FStar_Parser_AST.Wild ->
        let uu___ = p_term false false e in soft_parens_with_nesting uu___
    | FStar_Parser_AST.Const uu___ ->
        let uu___1 = p_term false false e in soft_parens_with_nesting uu___1
    | FStar_Parser_AST.Op uu___ ->
        let uu___1 = p_term false false e in soft_parens_with_nesting uu___1
    | FStar_Parser_AST.Tvar uu___ ->
        let uu___1 = p_term false false e in soft_parens_with_nesting uu___1
    | FStar_Parser_AST.Var uu___ ->
        let uu___1 = p_term false false e in soft_parens_with_nesting uu___1
    | FStar_Parser_AST.Name uu___ ->
        let uu___1 = p_term false false e in soft_parens_with_nesting uu___1
    | FStar_Parser_AST.Construct uu___ ->
        let uu___1 = p_term false false e in soft_parens_with_nesting uu___1
    | FStar_Parser_AST.Abs uu___ ->
        let uu___1 = p_term false false e in soft_parens_with_nesting uu___1
    | FStar_Parser_AST.App uu___ ->
        let uu___1 = p_term false false e in soft_parens_with_nesting uu___1
    | FStar_Parser_AST.Let uu___ ->
        let uu___1 = p_term false false e in soft_parens_with_nesting uu___1
    | FStar_Parser_AST.LetOperator uu___ ->
        let uu___1 = p_term false false e in soft_parens_with_nesting uu___1
    | FStar_Parser_AST.LetOpen uu___ ->
        let uu___1 = p_term false false e in soft_parens_with_nesting uu___1
    | FStar_Parser_AST.LetOpenRecord uu___ ->
        let uu___1 = p_term false false e in soft_parens_with_nesting uu___1
    | FStar_Parser_AST.Seq uu___ ->
        let uu___1 = p_term false false e in soft_parens_with_nesting uu___1
    | FStar_Parser_AST.Bind uu___ ->
        let uu___1 = p_term false false e in soft_parens_with_nesting uu___1
    | FStar_Parser_AST.If uu___ ->
        let uu___1 = p_term false false e in soft_parens_with_nesting uu___1
    | FStar_Parser_AST.Match uu___ ->
        let uu___1 = p_term false false e in soft_parens_with_nesting uu___1
    | FStar_Parser_AST.TryWith uu___ ->
        let uu___1 = p_term false false e in soft_parens_with_nesting uu___1
    | FStar_Parser_AST.Ascribed uu___ ->
        let uu___1 = p_term false false e in soft_parens_with_nesting uu___1
    | FStar_Parser_AST.Record uu___ ->
        let uu___1 = p_term false false e in soft_parens_with_nesting uu___1
    | FStar_Parser_AST.Project uu___ ->
        let uu___1 = p_term false false e in soft_parens_with_nesting uu___1
    | FStar_Parser_AST.Product uu___ ->
        let uu___1 = p_term false false e in soft_parens_with_nesting uu___1
    | FStar_Parser_AST.Sum uu___ ->
        let uu___1 = p_term false false e in soft_parens_with_nesting uu___1
    | FStar_Parser_AST.QForall uu___ ->
        let uu___1 = p_term false false e in soft_parens_with_nesting uu___1
    | FStar_Parser_AST.QExists uu___ ->
        let uu___1 = p_term false false e in soft_parens_with_nesting uu___1
    | FStar_Parser_AST.Refine uu___ ->
        let uu___1 = p_term false false e in soft_parens_with_nesting uu___1
    | FStar_Parser_AST.NamedTyp uu___ ->
        let uu___1 = p_term false false e in soft_parens_with_nesting uu___1
    | FStar_Parser_AST.Requires uu___ ->
        let uu___1 = p_term false false e in soft_parens_with_nesting uu___1
    | FStar_Parser_AST.Ensures uu___ ->
        let uu___1 = p_term false false e in soft_parens_with_nesting uu___1
    | FStar_Parser_AST.Decreases uu___ ->
        let uu___1 = p_term false false e in soft_parens_with_nesting uu___1
    | FStar_Parser_AST.Attributes uu___ ->
        let uu___1 = p_term false false e in soft_parens_with_nesting uu___1
    | FStar_Parser_AST.Quote uu___ ->
        let uu___1 = p_term false false e in soft_parens_with_nesting uu___1
    | FStar_Parser_AST.VQuote uu___ ->
        let uu___1 = p_term false false e in soft_parens_with_nesting uu___1
    | FStar_Parser_AST.Antiquote uu___ ->
        let uu___1 = p_term false false e in soft_parens_with_nesting uu___1
    | FStar_Parser_AST.CalcProof uu___ ->
        let uu___1 = p_term false false e in soft_parens_with_nesting uu___1
    | FStar_Parser_AST.ElimExists uu___ ->
        let uu___1 = p_term false false e in soft_parens_with_nesting uu___1
    | FStar_Parser_AST.LexList l ->
        let uu___ =
          let uu___1 = str "%" in
          let uu___2 = p_term_list false false l in
          FStar_Pprint.op_Hat_Hat uu___1 uu___2 in
        FStar_Pprint.group uu___
    | FStar_Parser_AST.WFOrder (rel, e1) -> p_dec_wf false false rel e1
and (p_constant : FStar_Const.sconst -> FStar_Pprint.document) =
  fun uu___ ->
    match uu___ with
    | FStar_Const.Const_effect -> str "Effect"
    | FStar_Const.Const_unit -> str "()"
    | FStar_Const.Const_bool b -> FStar_Pprint.doc_of_bool b
    | FStar_Const.Const_real r -> str (Prims.op_Hat r "R")
    | FStar_Const.Const_char x -> p_char_literal x
    | FStar_Const.Const_string (s, uu___1) -> p_string_literal s
    | FStar_Const.Const_int (repr, sign_width_opt) ->
        let signedness uu___1 =
          match uu___1 with
          | FStar_Const.Unsigned -> str "u"
          | FStar_Const.Signed -> FStar_Pprint.empty in
        let width uu___1 =
          match uu___1 with
          | FStar_Const.Int8 -> str "y"
          | FStar_Const.Int16 -> str "s"
          | FStar_Const.Int32 -> str "l"
          | FStar_Const.Int64 -> str "L" in
        let suffix uu___1 =
          match uu___1 with
          | (s, w) ->
              (match (s, w) with
               | (uu___2, FStar_Const.Sizet) -> str "sz"
               | uu___2 ->
                   let uu___3 = signedness s in
                   let uu___4 = width w in
                   FStar_Pprint.op_Hat_Hat uu___3 uu___4) in
        let ending = default_or_map FStar_Pprint.empty suffix sign_width_opt in
        let uu___1 = str repr in
        let result = FStar_Pprint.op_Hat_Hat uu___1 ending in
        let ( ^^ ) = FStar_Pprint.op_Hat_Hat in
        if String.get repr 0 == '-'
        then str "(" ^^ result ^^ str ")" else result
    | FStar_Const.Const_range_of -> str "range_of"
    | FStar_Const.Const_set_range_of -> str "set_range_of"
    | FStar_Const.Const_range r ->
        let uu___1 = FStar_Compiler_Range.string_of_range r in str uu___1
    | FStar_Const.Const_reify uu___1 -> str "reify"
    | FStar_Const.Const_reflect lid ->
        let uu___1 = p_quident lid in
        let uu___2 =
          let uu___3 =
            let uu___4 = str "reflect" in
            FStar_Pprint.op_Hat_Hat FStar_Pprint.dot uu___4 in
          FStar_Pprint.op_Hat_Hat FStar_Pprint.qmark uu___3 in
        FStar_Pprint.op_Hat_Hat uu___1 uu___2
and (p_universe : FStar_Parser_AST.term -> FStar_Pprint.document) =
  fun u ->
    let uu___ = str "u#" in
    let uu___1 = p_atomicUniverse u in FStar_Pprint.op_Hat_Hat uu___ uu___1
and (p_universeFrom : FStar_Parser_AST.term -> FStar_Pprint.document) =
  fun u ->
    match u.FStar_Parser_AST.tm with
    | FStar_Parser_AST.Op (id, u1::u2::[]) when
        let uu___ = FStar_Ident.string_of_id id in uu___ = "+" ->
        let uu___ =
          let uu___1 = p_universeFrom u1 in
          let uu___2 =
            let uu___3 = p_universeFrom u2 in
            FStar_Pprint.op_Hat_Slash_Hat FStar_Pprint.plus uu___3 in
          FStar_Pprint.op_Hat_Slash_Hat uu___1 uu___2 in
        FStar_Pprint.group uu___
    | FStar_Parser_AST.App uu___ ->
        let uu___1 = head_and_args u in
        (match uu___1 with
         | (head, args) ->
             (match head.FStar_Parser_AST.tm with
              | FStar_Parser_AST.Var maybe_max_lid when
                  FStar_Ident.lid_equals maybe_max_lid
                    FStar_Parser_Const.max_lid
                  ->
                  let uu___2 =
                    let uu___3 = p_qlident FStar_Parser_Const.max_lid in
                    let uu___4 =
                      FStar_Pprint.separate_map FStar_Pprint.space
                        (fun uu___5 ->
                           match uu___5 with
                           | (u1, uu___6) -> p_atomicUniverse u1) args in
                    op_Hat_Slash_Plus_Hat uu___3 uu___4 in
                  FStar_Pprint.group uu___2
              | uu___2 ->
                  let uu___3 =
                    let uu___4 = FStar_Parser_AST.term_to_string u in
                    FStar_Compiler_Util.format1
                      "Invalid term in universe context %s" uu___4 in
                  failwith uu___3))
    | uu___ -> p_atomicUniverse u
and (p_atomicUniverse : FStar_Parser_AST.term -> FStar_Pprint.document) =
  fun u ->
    match u.FStar_Parser_AST.tm with
    | FStar_Parser_AST.Wild -> FStar_Pprint.underscore
    | FStar_Parser_AST.Const (FStar_Const.Const_int (r, sw)) ->
        p_constant (FStar_Const.Const_int (r, sw))
    | FStar_Parser_AST.Uvar id ->
        let uu___ = FStar_Ident.string_of_id id in str uu___
    | FStar_Parser_AST.Paren u1 ->
        let uu___ = p_universeFrom u1 in soft_parens_with_nesting uu___
    | FStar_Parser_AST.App uu___ ->
        let uu___1 = p_universeFrom u in soft_parens_with_nesting uu___1
    | FStar_Parser_AST.Op (id, uu___::uu___1::[]) when
        let uu___2 = FStar_Ident.string_of_id id in uu___2 = "+" ->
        let uu___2 = p_universeFrom u in soft_parens_with_nesting uu___2
    | uu___ ->
        let uu___1 =
          let uu___2 = FStar_Parser_AST.term_to_string u in
          FStar_Compiler_Util.format1 "Invalid term in universe context %s"
            uu___2 in
        failwith uu___1
let (term_to_document : FStar_Parser_AST.term -> FStar_Pprint.document) =
  fun e ->
    let old_unfold_tuples = FStar_Compiler_Effect.op_Bang unfold_tuples in
    FStar_Compiler_Effect.op_Colon_Equals unfold_tuples false;
    (let res = p_term false false e in
     FStar_Compiler_Effect.op_Colon_Equals unfold_tuples old_unfold_tuples;
     res)
let (signature_to_document : FStar_Parser_AST.decl -> FStar_Pprint.document)
  = fun e -> p_justSig e
let (decl_to_document : FStar_Parser_AST.decl -> FStar_Pprint.document) =
  fun e -> p_decl e
let (pat_to_document : FStar_Parser_AST.pattern -> FStar_Pprint.document) =
  fun p -> p_disjunctivePattern p
let (binder_to_document : FStar_Parser_AST.binder -> FStar_Pprint.document) =
  fun b -> p_binder true b
let (modul_to_document : FStar_Parser_AST.modul -> FStar_Pprint.document) =
  fun m ->
    match m with
    | FStar_Parser_AST.Module (uu___, decls) ->
        let uu___1 =
          FStar_Compiler_Effect.op_Bar_Greater decls
            (FStar_Compiler_List.map decl_to_document) in
        FStar_Compiler_Effect.op_Bar_Greater uu___1
          (FStar_Pprint.separate FStar_Pprint.hardline)
    | FStar_Parser_AST.Interface (uu___, decls, uu___1) ->
        let uu___2 =
          FStar_Compiler_Effect.op_Bar_Greater decls
            (FStar_Compiler_List.map decl_to_document) in
        FStar_Compiler_Effect.op_Bar_Greater uu___2
          (FStar_Pprint.separate FStar_Pprint.hardline)
let (comments_to_document :
  (Prims.string * FStar_Compiler_Range.range) Prims.list ->
    FStar_Pprint.document)
  =
  fun comments ->
    FStar_Pprint.separate_map FStar_Pprint.hardline
      (fun uu___ -> match uu___ with | (comment, range) -> str comment)
      comments
let (extract_decl_range : FStar_Parser_AST.decl -> decl_meta) =
  fun d ->
    let has_qs =
      match ((d.FStar_Parser_AST.quals), (d.FStar_Parser_AST.d)) with
      | ((FStar_Parser_AST.Assumption)::[], FStar_Parser_AST.Assume
         (id, uu___)) -> false
      | ([], uu___) -> false
      | uu___ -> true in
    {
      r = (d.FStar_Parser_AST.drange);
      has_qs;
      has_attrs =
        (Prims.op_Negation
           (FStar_Compiler_List.isEmpty d.FStar_Parser_AST.attrs))
    }
let (decls_with_comments_to_document :
  FStar_Parser_AST.decl Prims.list ->
    (Prims.string * FStar_Compiler_Range.range) Prims.list ->
      (FStar_Pprint.document * (Prims.string * FStar_Compiler_Range.range)
        Prims.list))
  =
  fun decls ->
    fun comments ->
      match decls with
      | [] -> (FStar_Pprint.empty, comments)
      | d::ds ->
          let uu___ = ((d :: ds), (d.FStar_Parser_AST.drange)) in
          (match uu___ with
           | (decls1, first_range) ->
               (FStar_Compiler_Effect.op_Colon_Equals comment_stack comments;
                (let initial_comment =
                   let uu___2 =
                     FStar_Compiler_Range.start_of_range first_range in
                   place_comments_until_pos Prims.int_zero Prims.int_one
                     uu___2 dummy_meta FStar_Pprint.empty false true in
                 let doc =
                   separate_map_with_comments FStar_Pprint.empty
                     FStar_Pprint.empty p_decl decls1 extract_decl_range in
                 let comments1 = FStar_Compiler_Effect.op_Bang comment_stack in
                 FStar_Compiler_Effect.op_Colon_Equals comment_stack [];
                 (let uu___3 = FStar_Pprint.op_Hat_Hat initial_comment doc in
                  (uu___3, comments1)))))
let (modul_with_comments_to_document :
  FStar_Parser_AST.modul ->
    (Prims.string * FStar_Compiler_Range.range) Prims.list ->
      (FStar_Pprint.document * (Prims.string * FStar_Compiler_Range.range)
        Prims.list))
  =
  fun m ->
    fun comments ->
      let decls =
        match m with
        | FStar_Parser_AST.Module (uu___, decls1) -> decls1
        | FStar_Parser_AST.Interface (uu___, decls1, uu___1) -> decls1 in
      decls_with_comments_to_document decls comments
let (decl_with_comments_to_document :
  FStar_Parser_AST.decl ->
    (Prims.string * FStar_Compiler_Range.range) Prims.list ->
      (FStar_Pprint.document * (Prims.string * FStar_Compiler_Range.range)
        Prims.list))
  = fun d -> fun comments -> decls_with_comments_to_document [d] comments


================================================
FILE: engine/backends/fstar/fstar-surface-ast/FStar_Parser_Utf8.ml
================================================
(*
   Originally part of the ulex package with the following license:

   Copyright 2005 by Alain Frisch.

   Permission is hereby granted, free of charge, to any person obtaining
   a copy of this software and associated documentation files (the
   "Software"), to deal in the Software without restriction, including
   without limitation the rights to use, copy, modify, merge, publish,
   distribute, sublicense, and/or sell copies of the Software, and to
   permit persons to whom the Software is furnished to do so, subject to
   the following conditions:

   The above copyright notice and this permission notice shall be
   included in all copies or substantial portions of the Software.

   THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
   EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
   MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
   NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE
   LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
   OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION
   WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
*)


exception MalFormed

(* cf http://www.faqs.org/rfcs/rfc3629.html *)

let width = Array.make 256 (-1)
let () =
  for i = 0 to 127 do width.(i) <- 1 done;
  for i = 192 to 223 do width.(i) <- 2 done;
  for i = 224 to 239 do width.(i) <- 3 done;
  for i = 240 to 247 do width.(i) <- 4 done

let next s i =
  match s.[i] with
    | '\000'..'\127' as c ->
        Char.code c
    | '\192'..'\223' as c ->
	let n1 = Char.code c in
	let n2 = Char.code s.[i+1] in
        if (n2 lsr 6 != 0b10) then raise MalFormed;
        ((n1 land 0x1f) lsl 6) lor (n2 land 0x3f)
    | '\224'..'\239' as c ->
	let n1 = Char.code c in
	let n2 = Char.code s.[i+1] in
	let n3 = Char.code s.[i+2] in
        if (n2 lsr 6 != 0b10) || (n3 lsr 6 != 0b10) then raise MalFormed;
	let p =
          ((n1 land 0x0f) lsl 12) lor ((n2 land 0x3f) lsl 6) lor (n3 land 0x3f)
	in
	if (p >= 0xd800) && (p <= 0xdf00) then raise MalFormed;
	p
    | '\240'..'\247' as c ->
	let n1 = Char.code c in
	let n2 = Char.code s.[i+1] in
	let n3 = Char.code s.[i+2] in
	let n4 = Char.code s.[i+3] in
        if (n2 lsr 6 != 0b10) || (n3 lsr 6 != 0b10) || (n4 lsr 6 != 0b10)
	then raise MalFormed;
        ((n1 land 0x07) lsl 18) lor ((n2 land 0x3f) lsl 12) lor
        ((n3 land 0x3f) lsl 6) lor (n4 land 0x3f)
    | _ -> raise MalFormed


(* With this implementation, a truncated code point will result
   in Stream.Failure, not in MalFormed. *)

let from_stream s =
  match Stream.next s with
    | '\000'..'\127' as c ->
        Char.code c
    | '\192'..'\223' as c ->
	let n1 = Char.code c in
	let n2 = Char.code (Stream.next s) in
        if (n2 lsr 6 != 0b10) then raise MalFormed;
        ((n1 land 0x1f) lsl 6) lor (n2 land 0x3f)
    | '\224'..'\239' as c ->
	let n1 = Char.code c in
	let n2 = Char.code (Stream.next s) in
	let n3 = Char.code (Stream.next s) in
        if (n2 lsr 6 != 0b10) || (n3 lsr 6 != 0b10) then raise MalFormed;
        ((n1 land 0x0f) lsl 12) lor ((n2 land 0x3f) lsl 6) lor (n3 land 0x3f)
    | '\240'..'\247' as c ->
	let n1 = Char.code c in
	let n2 = Char.code (Stream.next s) in
	let n3 = Char.code (Stream.next s) in
	let n4 = Char.code (Stream.next s) in
        if (n2 lsr 6 != 0b10) || (n3 lsr 6 != 0b10) || (n4 lsr 6 != 0b10)
	then raise MalFormed;
        ((n1 land 0x07) lsl 18) lor ((n2 land 0x3f) lsl 12) lor
        ((n3 land 0x3f) lsl 6) lor (n4 land 0x3f)
    | _ -> raise MalFormed



let compute_len s pos bytes =
  let rec aux n i =
    if i >= pos + bytes then if i = pos + bytes then n else raise MalFormed
    else
      let w = width.(Char.code s.[i]) in
      if w > 0 then aux (succ n) (i + w)
      else raise MalFormed
  in
  aux 0 pos

let rec blit_to_int s spos a apos n =
  if n > 0 then begin
    a.(apos) <- next s spos;
    blit_to_int s (spos + width.(Char.code s.[spos])) a (succ apos) (pred n)
  end

let to_int_array s pos bytes =
  let n = compute_len s pos bytes in
  let a = Array.make n 0 in
  blit_to_int s pos a 0 n;
  a

(**************************)

let width_code_point p =
  if p <= 0x7f then 1
  else if p <= 0x7ff then 2
  else if p <= 0xffff then 3
  else if p <= 0x10ffff then 4
  else raise MalFormed

let store b p =
  if p <= 0x7f then
    Buffer.add_char b (Char.chr p)
  else if p <= 0x7ff then (
    Buffer.add_char b (Char.chr (0xc0 lor (p lsr 6)));
    Buffer.add_char b (Char.chr (0x80 lor (p land 0x3f)))
  )
  else if p <= 0xffff then (
    if (p >= 0xd800 && p < 0xe000) then raise MalFormed;
    Buffer.add_char b (Char.chr (0xe0 lor (p lsr 12)));
    Buffer.add_char b (Char.chr (0x80 lor ((p lsr 6) land 0x3f)));
    Buffer.add_char b (Char.chr (0x80 lor (p land 0x3f)))
  )
  else if p <= 0x10ffff then (
    Buffer.add_char b (Char.chr (0xf0 lor (p lsr 18)));
    Buffer.add_char b (Char.chr (0x80 lor ((p lsr 12) land 0x3f)));
    Buffer.add_char b (Char.chr (0x80 lor ((p lsr 6)  land 0x3f)));
    Buffer.add_char b (Char.chr (0x80 lor (p land 0x3f)))
  )
  else raise MalFormed


let from_int_array a apos len =
  let b = Buffer.create (len * 4) in
  let rec aux apos len =
    if len > 0 then (store b a.(apos); aux (succ apos) (pred len))
    else Buffer.contents b in
  aux apos len

let stream_from_char_stream s =
  Stream.from
    (fun _ ->
       try Some (from_stream s)
       with Stream.Failure -> None)


================================================
FILE: engine/backends/fstar/fstar-surface-ast/FStar_Parser_Util.ml
================================================
open FStar_Compiler_Range
open Lexing

(* This brings into scope enough the translation of F# type names into the
 * corresponding OCaml type names; the reason for that is that we massage
 * parse.fsy (using sed) into parse.mly; but, we don't rename types. *)
include FStar_BaseTypes
type single = float
type decimal = int
type bytes = byte array

let parseState = ()

let pos_of_lexpos (p:position) =
  mk_pos (Z.of_int p.pos_lnum) (Z.of_int (p.pos_cnum - p.pos_bol))

let mksyn_range (p1:position) p2 =
  mk_range p1.pos_fname (pos_of_lexpos p1) (pos_of_lexpos p2)

let getLexerRange (lexbuf:lexbuf) =
  mksyn_range lexbuf.lex_start_p lexbuf.lex_curr_p

let lhs () =
  mksyn_range (Parsing.symbol_start_pos ()) (Parsing.symbol_end_pos ())

let rhs () n =
  mksyn_range (Parsing.rhs_start_pos n) (Parsing.rhs_end_pos n)

let rhspos () n =
  pos_of_lexpos (Parsing.rhs_start_pos n)

let rhs2 () n m =
  mksyn_range (Parsing.rhs_start_pos n) (Parsing.rhs_end_pos m)

exception WrappedError of exn * range
exception ReportedError
exception StopProcessing

let warningHandler = ref (fun (e:exn) -> 
                          FStar_Compiler_Util.print_string "no warning handler installed\n" ; 
                          FStar_Compiler_Util.print_any e; ())
let errorHandler = ref (fun (e:exn) -> 
                        FStar_Compiler_Util.print_string "no warning handler installed\n" ; 
                        FStar_Compiler_Util.print_any e; ())
let errorAndWarningCount = ref 0
let errorR  exn = incr errorAndWarningCount; match exn with StopProcessing | ReportedError -> raise exn | _ -> !errorHandler exn
let warning exn = incr errorAndWarningCount; match exn with StopProcessing | ReportedError -> raise exn | _ -> !warningHandler exn

let comments : (string * FStar_Compiler_Range.range) list ref = ref []
let add_comment x = comments := x :: !comments
let flush_comments () =
  let lexed_comments = !comments in
  comments := []; lexed_comments


================================================
FILE: engine/backends/fstar/fstar-surface-ast/FStar_Pervasives.ml
================================================
let id : 'a . 'a -> 'a = fun x -> x
type ('a, 'b) either =
  | Inl of 'a
  | Inr of 'b


================================================
FILE: engine/backends/fstar/fstar-surface-ast/FStar_Pervasives_Native.ml
================================================
type 'a option' = 'a option =
  | None
  | Some of 'a[@@deriving yojson,show]

type 'a option = 'a option' =
  | None
  | Some of 'a[@@deriving yojson,show]

let fst = Stdlib.fst
let snd = Stdlib.snd


================================================
FILE: engine/backends/fstar/fstar-surface-ast/FStar_Pprint.ml
================================================
(*
   Copyright 2016 Microsoft Research

   Licensed under the Apache License, Version 2.0 (the "License");
   you may not use this file except in compliance with the License.
   You may obtain a copy of the License at

       http://www.apache.org/licenses/LICENSE-2.0

   Unless required by applicable law or agreed to in writing, software
   distributed under the License is distributed on an "AS IS" BASIS,
   WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
   See the License for the specific language governing permissions and
   limitations under the License.
*)

(*  prettyprint.fsti's OCaml implementation is just a thin wrapper around
    Francois Pottier's pprint package. *)
include PPrint

(* FIXME(adl) also print the char in a comment if it's representable *)
let doc_of_char c = PPrint.OCaml.char (Char.chr c)
let doc_of_string = PPrint.string
let doc_of_bool b = PPrint.string (string_of_bool b)
let blank_buffer_doc = [ ("", PPrint.empty) ]

let substring s ofs len =
    PPrint.substring s (Z.to_int ofs) (Z.to_int len)

let fancystring s apparent_length =
    PPrint.fancystring s (Z.to_int apparent_length)

let fancysubstring s ofs len apparent_length =
    PPrint.fancysubstring  s (Z.to_int ofs) (Z.to_int len) (Z.to_int apparent_length)

let blank n = PPrint.blank (Z.to_int n)

let break_ n = PPrint.break (Z.to_int n)

let op_Hat_Hat = PPrint.(^^)
let op_Hat_Slash_Hat = PPrint.(^/^)

let nest j doc = PPrint.nest (Z.to_int j) doc

let long_left_arrow = PPrint.string "<--"
let larrow = PPrint.string "<-"
let rarrow = PPrint.string "->"

let repeat n doc = PPrint.repeat (Z.to_int n) doc

let hang n doc = PPrint.hang (Z.to_int n) doc

let prefix n b left right =
    PPrint.prefix (Z.to_int n) (Z.to_int b) left right

let jump n b right =
    PPrint.jump (Z.to_int n) (Z.to_int b) right

let infix n b middle left right =
    PPrint.infix (Z.to_int n) (Z.to_int b) middle left right

let surround n b opening contents closing =
    PPrint.surround (Z.to_int n) (Z.to_int b) opening contents closing

let soft_surround n b opening contents closing =
    PPrint.soft_surround (Z.to_int n) (Z.to_int b) opening contents closing

let surround_separate n b void_ opening sep closing docs =
    PPrint.surround_separate (Z.to_int n) (Z.to_int b) void_ opening sep closing docs

let surround_separate_map n b void_ opening sep closing f xs =
    PPrint.surround_separate_map (Z.to_int n) (Z.to_int b) void_ opening sep closing f xs

(* Wrap up ToBuffer.pretty. *)
let pretty_string rfrac width doc =
    let buf = Buffer.create 0 in
    PPrint.ToBuffer.pretty rfrac (Z.to_int width) buf doc;
    Buffer.contents buf

(* Wrap up ToChannel.pretty *)
let pretty_out_channel rfrac width doc ch =
    PPrint.ToChannel.pretty rfrac (Z.to_int width) ch doc;
    flush ch


================================================
FILE: engine/backends/fstar/fstar-surface-ast/FStar_Sedlexing.ml
================================================
(**
A custom version of Sedlexing enhanced with
lc, bol and fname position tracking and
specialized for UTF-8 string inputs
(the parser driver always reads whole files)
**)

exception Error

module L = Lexing
type pos = L.position

type lexbuf = {
  buf: int array;
  len: int;

  mutable cur: int;
  mutable cur_p: pos;
  mutable start: int;
  mutable start_p: pos;

  mutable mark: int;
  mutable mark_p: pos;
  mutable mark_val: int;
}

let get_buf lb = lb.buf
let get_cur lb = lb.cur
let get_start lb = lb.start

(* N.B. the offsets are for interactive mode
   we want to ble able to interpret a fragment as if it was part
   of a larger file and report absolute error positions *)
let create (s:string) fn loffset coffset =
  let a = FStar_Parser_Utf8.to_int_array s 0 (String.length s) in
  let start_p = {
    L.pos_fname = fn;
    L.pos_cnum = coffset;
    L.pos_bol  = 0;
    L.pos_lnum = loffset; }
  in {
    buf = a;
    len = Array.length a;

    cur = 0;
    cur_p = start_p;

    start = 0;
    start_p = start_p;

    mark = 0;
    mark_p = start_p;
    mark_val = 0;
  }

let current_pos b = b.cur_p
  
let start b =
  b.mark <- b.cur;
  b.mark_val <- (-1);
  b.mark_p <- b.cur_p;
  b.start <- b.cur;
  b.start_p <- b.cur_p

let mark b i =
  b.mark <- b.cur;
  b.mark_p <- b.cur_p;
  b.mark_val <- i

let backtrack b =
  b.cur <- b.mark;
  b.cur_p <- b.mark_p;
  b.mark_val

let next b =
  if b.cur = b.len then None
  else
    let c = b.buf.(b.cur) in
    (b.cur <- b.cur + 1;
    b.cur_p <- {b.cur_p with L.pos_cnum = b.cur_p.L.pos_cnum + 1}; Some (Uchar.of_int c))

let new_line b =
  b.cur_p <- { b.cur_p with
    L.pos_lnum = b.cur_p.L.pos_lnum + 1;
    L.pos_bol = b.cur_p.L.pos_cnum;
  }

let range b = (b.start_p, b.cur_p)

let ulexeme lexbuf =
  Array.sub lexbuf.buf lexbuf.start (lexbuf.cur - lexbuf.start)

let rollback b =
  b.cur <- b.start;
  b.cur_p <- b.start_p

let lexeme lexbuf =
  FStar_Parser_Utf8.from_int_array lexbuf.buf lexbuf.start (lexbuf.cur - lexbuf.start)

let lookahead b pos =
  if b.len <= pos then ""
  else FStar_Parser_Utf8.from_int_array b.buf pos (b.len - pos)

let source_file b =
  b.cur_p.L.pos_fname

let current_line b =
  b.cur_p.Lexing.pos_lnum

(* Since sedlex 2.4, we need to expose Sedlexing.__private_next_int
   (see #2343)

   From https://github.com/ocaml-communi-ty/sedlex/blob/268c553f474457574e22701679d68f66aa771551/src/lib/sedlexing.mli#L154-L161
   [next] and [__private__next_int] have the same doc description,
   the only difference is the return type *)
let __private__next_int b =
  match next b with
  | Some v -> Uchar.to_int v
  | None -> -1


================================================
FILE: engine/backends/fstar/fstar-surface-ast/FStar_String.ml
================================================
let make i c = BatUTF8.init (Z.to_int i) (fun _ -> BatUChar.chr c)
let strcat s t = s ^ t
let op_Hat s t =  strcat s t

(* restore pre-2.11 BatString.nsplit behavior,
   see https://github.com/ocaml-batteries-team/batteries-included/issues/845 *)
let batstring_nsplit s t =
  if s = "" then [] else BatString.split_on_string t s

let split seps s =
  let rec repeat_split acc = function
    | [] -> acc
    | sep::seps ->
       let usep = BatUTF8.init 1 (fun _ -> BatUChar.chr sep) in
       let l = BatList.flatten (BatList.map (fun x -> batstring_nsplit x usep) acc) in
       repeat_split l seps in
  repeat_split [s] seps
let compare x y = Z.of_int (BatString.compare x y)
let concat = BatString.concat
let length s = Z.of_int (BatUTF8.length s)

let substring s i j =
  BatUTF8.init (Z.to_int j) (fun k -> BatUTF8.get s (k + Z.to_int i))

let get s i = BatUChar.code (BatUTF8.get s (Z.to_int i))
let lowercase = BatString.lowercase_ascii
let uppercase = BatString.uppercase_ascii
let escaped = BatString.escaped
let list_of_string s = BatList.init (BatUTF8.length s) (fun i -> BatUChar.code (BatUTF8.get s i))
let string_of_list l = BatUTF8.init (BatList.length l) (fun i -> BatUChar.chr (BatList.at l i))


================================================
FILE: engine/backends/fstar/fstar-surface-ast/FStar_VConfig.ml
================================================
open Prims
type vconfig =
  {
  initial_fuel: Prims.int ;
  max_fuel: Prims.int ;
  initial_ifuel: Prims.int ;
  max_ifuel: Prims.int ;
  detail_errors: Prims.bool ;
  detail_hint_replay: Prims.bool ;
  no_smt: Prims.bool ;
  quake_lo: Prims.int ;
  quake_hi: Prims.int ;
  quake_keep: Prims.bool ;
  retry: Prims.bool ;
  smtencoding_elim_box: Prims.bool ;
  smtencoding_nl_arith_repr: Prims.string ;
  smtencoding_l_arith_repr: Prims.string ;
  smtencoding_valid_intro: Prims.bool ;
  smtencoding_valid_elim: Prims.bool ;
  tcnorm: Prims.bool ;
  no_plugins: Prims.bool ;
  no_tactics: Prims.bool ;
  z3cliopt: Prims.string Prims.list ;
  z3smtopt: Prims.string Prims.list ;
  z3refresh: Prims.bool ;
  z3rlimit: Prims.int ;
  z3rlimit_factor: Prims.int ;
  z3seed: Prims.int ;
  trivial_pre_for_unannotated_effectful_fns: Prims.bool ;
  reuse_hint_for: Prims.string FStar_Pervasives_Native.option }
let (__proj__Mkvconfig__item__initial_fuel : vconfig -> Prims.int) =
  fun projectee ->
    match projectee with
    | { initial_fuel; max_fuel; initial_ifuel; max_ifuel; detail_errors;
        detail_hint_replay; no_smt; quake_lo; quake_hi; quake_keep; retry;
        smtencoding_elim_box; smtencoding_nl_arith_repr;
        smtencoding_l_arith_repr; smtencoding_valid_intro;
        smtencoding_valid_elim; tcnorm; no_plugins; no_tactics; z3cliopt;
        z3smtopt; z3refresh; z3rlimit; z3rlimit_factor; z3seed;
        trivial_pre_for_unannotated_effectful_fns; reuse_hint_for;_} ->
        initial_fuel
let (__proj__Mkvconfig__item__max_fuel : vconfig -> Prims.int) =
  fun projectee ->
    match projectee with
    | { initial_fuel; max_fuel; initial_ifuel; max_ifuel; detail_errors;
        detail_hint_replay; no_smt; quake_lo; quake_hi; quake_keep; retry;
        smtencoding_elim_box; smtencoding_nl_arith_repr;
        smtencoding_l_arith_repr; smtencoding_valid_intro;
        smtencoding_valid_elim; tcnorm; no_plugins; no_tactics; z3cliopt;
        z3smtopt; z3refresh; z3rlimit; z3rlimit_factor; z3seed;
        trivial_pre_for_unannotated_effectful_fns; reuse_hint_for;_} ->
        max_fuel
let (__proj__Mkvconfig__item__initial_ifuel : vconfig -> Prims.int) =
  fun projectee ->
    match projectee with
    | { initial_fuel; max_fuel; initial_ifuel; max_ifuel; detail_errors;
        detail_hint_replay; no_smt; quake_lo; quake_hi; quake_keep; retry;
        smtencoding_elim_box; smtencoding_nl_arith_repr;
        smtencoding_l_arith_repr; smtencoding_valid_intro;
        smtencoding_valid_elim; tcnorm; no_plugins; no_tactics; z3cliopt;
        z3smtopt; z3refresh; z3rlimit; z3rlimit_factor; z3seed;
        trivial_pre_for_unannotated_effectful_fns; reuse_hint_for;_} ->
        initial_ifuel
let (__proj__Mkvconfig__item__max_ifuel : vconfig -> Prims.int) =
  fun projectee ->
    match projectee with
    | { initial_fuel; max_fuel; initial_ifuel; max_ifuel; detail_errors;
        detail_hint_replay; no_smt; quake_lo; quake_hi; quake_keep; retry;
        smtencoding_elim_box; smtencoding_nl_arith_repr;
        smtencoding_l_arith_repr; smtencoding_valid_intro;
        smtencoding_valid_elim; tcnorm; no_plugins; no_tactics; z3cliopt;
        z3smtopt; z3refresh; z3rlimit; z3rlimit_factor; z3seed;
        trivial_pre_for_unannotated_effectful_fns; reuse_hint_for;_} ->
        max_ifuel
let (__proj__Mkvconfig__item__detail_errors : vconfig -> Prims.bool) =
  fun projectee ->
    match projectee with
    | { initial_fuel; max_fuel; initial_ifuel; max_ifuel; detail_errors;
        detail_hint_replay; no_smt; quake_lo; quake_hi; quake_keep; retry;
        smtencoding_elim_box; smtencoding_nl_arith_repr;
        smtencoding_l_arith_repr; smtencoding_valid_intro;
        smtencoding_valid_elim; tcnorm; no_plugins; no_tactics; z3cliopt;
        z3smtopt; z3refresh; z3rlimit; z3rlimit_factor; z3seed;
        trivial_pre_for_unannotated_effectful_fns; reuse_hint_for;_} ->
        detail_errors
let (__proj__Mkvconfig__item__detail_hint_replay : vconfig -> Prims.bool) =
  fun projectee ->
    match projectee with
    | { initial_fuel; max_fuel; initial_ifuel; max_ifuel; detail_errors;
        detail_hint_replay; no_smt; quake_lo; quake_hi; quake_keep; retry;
        smtencoding_elim_box; smtencoding_nl_arith_repr;
        smtencoding_l_arith_repr; smtencoding_valid_intro;
        smtencoding_valid_elim; tcnorm; no_plugins; no_tactics; z3cliopt;
        z3smtopt; z3refresh; z3rlimit; z3rlimit_factor; z3seed;
        trivial_pre_for_unannotated_effectful_fns; reuse_hint_for;_} ->
        detail_hint_replay
let (__proj__Mkvconfig__item__no_smt : vconfig -> Prims.bool) =
  fun projectee ->
    match projectee with
    | { initial_fuel; max_fuel; initial_ifuel; max_ifuel; detail_errors;
        detail_hint_replay; no_smt; quake_lo; quake_hi; quake_keep; retry;
        smtencoding_elim_box; smtencoding_nl_arith_repr;
        smtencoding_l_arith_repr; smtencoding_valid_intro;
        smtencoding_valid_elim; tcnorm; no_plugins; no_tactics; z3cliopt;
        z3smtopt; z3refresh; z3rlimit; z3rlimit_factor; z3seed;
        trivial_pre_for_unannotated_effectful_fns; reuse_hint_for;_} ->
        no_smt
let (__proj__Mkvconfig__item__quake_lo : vconfig -> Prims.int) =
  fun projectee ->
    match projectee with
    | { initial_fuel; max_fuel; initial_ifuel; max_ifuel; detail_errors;
        detail_hint_replay; no_smt; quake_lo; quake_hi; quake_keep; retry;
        smtencoding_elim_box; smtencoding_nl_arith_repr;
        smtencoding_l_arith_repr; smtencoding_valid_intro;
        smtencoding_valid_elim; tcnorm; no_plugins; no_tactics; z3cliopt;
        z3smtopt; z3refresh; z3rlimit; z3rlimit_factor; z3seed;
        trivial_pre_for_unannotated_effectful_fns; reuse_hint_for;_} ->
        quake_lo
let (__proj__Mkvconfig__item__quake_hi : vconfig -> Prims.int) =
  fun projectee ->
    match projectee with
    | { initial_fuel; max_fuel; initial_ifuel; max_ifuel; detail_errors;
        detail_hint_replay; no_smt; quake_lo; quake_hi; quake_keep; retry;
        smtencoding_elim_box; smtencoding_nl_arith_repr;
        smtencoding_l_arith_repr; smtencoding_valid_intro;
        smtencoding_valid_elim; tcnorm; no_plugins; no_tactics; z3cliopt;
        z3smtopt; z3refresh; z3rlimit; z3rlimit_factor; z3seed;
        trivial_pre_for_unannotated_effectful_fns; reuse_hint_for;_} ->
        quake_hi
let (__proj__Mkvconfig__item__quake_keep : vconfig -> Prims.bool) =
  fun projectee ->
    match projectee with
    | { initial_fuel; max_fuel; initial_ifuel; max_ifuel; detail_errors;
        detail_hint_replay; no_smt; quake_lo; quake_hi; quake_keep; retry;
        smtencoding_elim_box; smtencoding_nl_arith_repr;
        smtencoding_l_arith_repr; smtencoding_valid_intro;
        smtencoding_valid_elim; tcnorm; no_plugins; no_tactics; z3cliopt;
        z3smtopt; z3refresh; z3rlimit; z3rlimit_factor; z3seed;
        trivial_pre_for_unannotated_effectful_fns; reuse_hint_for;_} ->
        quake_keep
let (__proj__Mkvconfig__item__retry : vconfig -> Prims.bool) =
  fun projectee ->
    match projectee with
    | { initial_fuel; max_fuel; initial_ifuel; max_ifuel; detail_errors;
        detail_hint_replay; no_smt; quake_lo; quake_hi; quake_keep; retry;
        smtencoding_elim_box; smtencoding_nl_arith_repr;
        smtencoding_l_arith_repr; smtencoding_valid_intro;
        smtencoding_valid_elim; tcnorm; no_plugins; no_tactics; z3cliopt;
        z3smtopt; z3refresh; z3rlimit; z3rlimit_factor; z3seed;
        trivial_pre_for_unannotated_effectful_fns; reuse_hint_for;_} -> retry
let (__proj__Mkvconfig__item__smtencoding_elim_box : vconfig -> Prims.bool) =
  fun projectee ->
    match projectee with
    | { initial_fuel; max_fuel; initial_ifuel; max_ifuel; detail_errors;
        detail_hint_replay; no_smt; quake_lo; quake_hi; quake_keep; retry;
        smtencoding_elim_box; smtencoding_nl_arith_repr;
        smtencoding_l_arith_repr; smtencoding_valid_intro;
        smtencoding_valid_elim; tcnorm; no_plugins; no_tactics; z3cliopt;
        z3smtopt; z3refresh; z3rlimit; z3rlimit_factor; z3seed;
        trivial_pre_for_unannotated_effectful_fns; reuse_hint_for;_} ->
        smtencoding_elim_box
let (__proj__Mkvconfig__item__smtencoding_nl_arith_repr :
  vconfig -> Prims.string) =
  fun projectee ->
    match projectee with
    | { initial_fuel; max_fuel; initial_ifuel; max_ifuel; detail_errors;
        detail_hint_replay; no_smt; quake_lo; quake_hi; quake_keep; retry;
        smtencoding_elim_box; smtencoding_nl_arith_repr;
        smtencoding_l_arith_repr; smtencoding_valid_intro;
        smtencoding_valid_elim; tcnorm; no_plugins; no_tactics; z3cliopt;
        z3smtopt; z3refresh; z3rlimit; z3rlimit_factor; z3seed;
        trivial_pre_for_unannotated_effectful_fns; reuse_hint_for;_} ->
        smtencoding_nl_arith_repr
let (__proj__Mkvconfig__item__smtencoding_l_arith_repr :
  vconfig -> Prims.string) =
  fun projectee ->
    match projectee with
    | { initial_fuel; max_fuel; initial_ifuel; max_ifuel; detail_errors;
        detail_hint_replay; no_smt; quake_lo; quake_hi; quake_keep; retry;
        smtencoding_elim_box; smtencoding_nl_arith_repr;
        smtencoding_l_arith_repr; smtencoding_valid_intro;
        smtencoding_valid_elim; tcnorm; no_plugins; no_tactics; z3cliopt;
        z3smtopt; z3refresh; z3rlimit; z3rlimit_factor; z3seed;
        trivial_pre_for_unannotated_effectful_fns; reuse_hint_for;_} ->
        smtencoding_l_arith_repr
let (__proj__Mkvconfig__item__smtencoding_valid_intro :
  vconfig -> Prims.bool) =
  fun projectee ->
    match projectee with
    | { initial_fuel; max_fuel; initial_ifuel; max_ifuel; detail_errors;
        detail_hint_replay; no_smt; quake_lo; quake_hi; quake_keep; retry;
        smtencoding_elim_box; smtencoding_nl_arith_repr;
        smtencoding_l_arith_repr; smtencoding_valid_intro;
        smtencoding_valid_elim; tcnorm; no_plugins; no_tactics; z3cliopt;
        z3smtopt; z3refresh; z3rlimit; z3rlimit_factor; z3seed;
        trivial_pre_for_unannotated_effectful_fns; reuse_hint_for;_} ->
        smtencoding_valid_intro
let (__proj__Mkvconfig__item__smtencoding_valid_elim : vconfig -> Prims.bool)
  =
  fun projectee ->
    match projectee with
    | { initial_fuel; max_fuel; initial_ifuel; max_ifuel; detail_errors;
        detail_hint_replay; no_smt; quake_lo; quake_hi; quake_keep; retry;
        smtencoding_elim_box; smtencoding_nl_arith_repr;
        smtencoding_l_arith_repr; smtencoding_valid_intro;
        smtencoding_valid_elim; tcnorm; no_plugins; no_tactics; z3cliopt;
        z3smtopt; z3refresh; z3rlimit; z3rlimit_factor; z3seed;
        trivial_pre_for_unannotated_effectful_fns; reuse_hint_for;_} ->
        smtencoding_valid_elim
let (__proj__Mkvconfig__item__tcnorm : vconfig -> Prims.bool) =
  fun projectee ->
    match projectee with
    | { initial_fuel; max_fuel; initial_ifuel; max_ifuel; detail_errors;
        detail_hint_replay; no_smt; quake_lo; quake_hi; quake_keep; retry;
        smtencoding_elim_box; smtencoding_nl_arith_repr;
        smtencoding_l_arith_repr; smtencoding_valid_intro;
        smtencoding_valid_elim; tcnorm; no_plugins; no_tactics; z3cliopt;
        z3smtopt; z3refresh; z3rlimit; z3rlimit_factor; z3seed;
        trivial_pre_for_unannotated_effectful_fns; reuse_hint_for;_} ->
        tcnorm
let (__proj__Mkvconfig__item__no_plugins : vconfig -> Prims.bool) =
  fun projectee ->
    match projectee with
    | { initial_fuel; max_fuel; initial_ifuel; max_ifuel; detail_errors;
        detail_hint_replay; no_smt; quake_lo; quake_hi; quake_keep; retry;
        smtencoding_elim_box; smtencoding_nl_arith_repr;
        smtencoding_l_arith_repr; smtencoding_valid_intro;
        smtencoding_valid_elim; tcnorm; no_plugins; no_tactics; z3cliopt;
        z3smtopt; z3refresh; z3rlimit; z3rlimit_factor; z3seed;
        trivial_pre_for_unannotated_effectful_fns; reuse_hint_for;_} ->
        no_plugins
let (__proj__Mkvconfig__item__no_tactics : vconfig -> Prims.bool) =
  fun projectee ->
    match projectee with
    | { initial_fuel; max_fuel; initial_ifuel; max_ifuel; detail_errors;
        detail_hint_replay; no_smt; quake_lo; quake_hi; quake_keep; retry;
        smtencoding_elim_box; smtencoding_nl_arith_repr;
        smtencoding_l_arith_repr; smtencoding_valid_intro;
        smtencoding_valid_elim; tcnorm; no_plugins; no_tactics; z3cliopt;
        z3smtopt; z3refresh; z3rlimit; z3rlimit_factor; z3seed;
        trivial_pre_for_unannotated_effectful_fns; reuse_hint_for;_} ->
        no_tactics
let (__proj__Mkvconfig__item__z3cliopt : vconfig -> Prims.string Prims.list)
  =
  fun projectee ->
    match projectee with
    | { initial_fuel; max_fuel; initial_ifuel; max_ifuel; detail_errors;
        detail_hint_replay; no_smt; quake_lo; quake_hi; quake_keep; retry;
        smtencoding_elim_box; smtencoding_nl_arith_repr;
        smtencoding_l_arith_repr; smtencoding_valid_intro;
        smtencoding_valid_elim; tcnorm; no_plugins; no_tactics; z3cliopt;
        z3smtopt; z3refresh; z3rlimit; z3rlimit_factor; z3seed;
        trivial_pre_for_unannotated_effectful_fns; reuse_hint_for;_} ->
        z3cliopt
let (__proj__Mkvconfig__item__z3smtopt : vconfig -> Prims.string Prims.list)
  =
  fun projectee ->
    match projectee with
    | { initial_fuel; max_fuel; initial_ifuel; max_ifuel; detail_errors;
        detail_hint_replay; no_smt; quake_lo; quake_hi; quake_keep; retry;
        smtencoding_elim_box; smtencoding_nl_arith_repr;
        smtencoding_l_arith_repr; smtencoding_valid_intro;
        smtencoding_valid_elim; tcnorm; no_plugins; no_tactics; z3cliopt;
        z3smtopt; z3refresh; z3rlimit; z3rlimit_factor; z3seed;
        trivial_pre_for_unannotated_effectful_fns; reuse_hint_for;_} ->
        z3smtopt
let (__proj__Mkvconfig__item__z3refresh : vconfig -> Prims.bool) =
  fun projectee ->
    match projectee with
    | { initial_fuel; max_fuel; initial_ifuel; max_ifuel; detail_errors;
        detail_hint_replay; no_smt; quake_lo; quake_hi; quake_keep; retry;
        smtencoding_elim_box; smtencoding_nl_arith_repr;
        smtencoding_l_arith_repr; smtencoding_valid_intro;
        smtencoding_valid_elim; tcnorm; no_plugins; no_tactics; z3cliopt;
        z3smtopt; z3refresh; z3rlimit; z3rlimit_factor; z3seed;
        trivial_pre_for_unannotated_effectful_fns; reuse_hint_for;_} ->
        z3refresh
let (__proj__Mkvconfig__item__z3rlimit : vconfig -> Prims.int) =
  fun projectee ->
    match projectee with
    | { initial_fuel; max_fuel; initial_ifuel; max_ifuel; detail_errors;
        detail_hint_replay; no_smt; quake_lo; quake_hi; quake_keep; retry;
        smtencoding_elim_box; smtencoding_nl_arith_repr;
        smtencoding_l_arith_repr; smtencoding_valid_intro;
        smtencoding_valid_elim; tcnorm; no_plugins; no_tactics; z3cliopt;
        z3smtopt; z3refresh; z3rlimit; z3rlimit_factor; z3seed;
        trivial_pre_for_unannotated_effectful_fns; reuse_hint_for;_} ->
        z3rlimit
let (__proj__Mkvconfig__item__z3rlimit_factor : vconfig -> Prims.int) =
  fun projectee ->
    match projectee with
    | { initial_fuel; max_fuel; initial_ifuel; max_ifuel; detail_errors;
        detail_hint_replay; no_smt; quake_lo; quake_hi; quake_keep; retry;
        smtencoding_elim_box; smtencoding_nl_arith_repr;
        smtencoding_l_arith_repr; smtencoding_valid_intro;
        smtencoding_valid_elim; tcnorm; no_plugins; no_tactics; z3cliopt;
        z3smtopt; z3refresh; z3rlimit; z3rlimit_factor; z3seed;
        trivial_pre_for_unannotated_effectful_fns; reuse_hint_for;_} ->
        z3rlimit_factor
let (__proj__Mkvconfig__item__z3seed : vconfig -> Prims.int) =
  fun projectee ->
    match projectee with
    | { initial_fuel; max_fuel; initial_ifuel; max_ifuel; detail_errors;
        detail_hint_replay; no_smt; quake_lo; quake_hi; quake_keep; retry;
        smtencoding_elim_box; smtencoding_nl_arith_repr;
        smtencoding_l_arith_repr; smtencoding_valid_intro;
        smtencoding_valid_elim; tcnorm; no_plugins; no_tactics; z3cliopt;
        z3smtopt; z3refresh; z3rlimit; z3rlimit_factor; z3seed;
        trivial_pre_for_unannotated_effectful_fns; reuse_hint_for;_} ->
        z3seed
let (__proj__Mkvconfig__item__trivial_pre_for_unannotated_effectful_fns :
  vconfig -> Prims.bool) =
  fun projectee ->
    match projectee with
    | { initial_fuel; max_fuel; initial_ifuel; max_ifuel; detail_errors;
        detail_hint_replay; no_smt; quake_lo; quake_hi; quake_keep; retry;
        smtencoding_elim_box; smtencoding_nl_arith_repr;
        smtencoding_l_arith_repr; smtencoding_valid_intro;
        smtencoding_valid_elim; tcnorm; no_plugins; no_tactics; z3cliopt;
        z3smtopt; z3refresh; z3rlimit; z3rlimit_factor; z3seed;
        trivial_pre_for_unannotated_effectful_fns; reuse_hint_for;_} ->
        trivial_pre_for_unannotated_effectful_fns
let (__proj__Mkvconfig__item__reuse_hint_for :
  vconfig -> Prims.string FStar_Pervasives_Native.option) =
  fun projectee ->
    match projectee with
    | { initial_fuel; max_fuel; initial_ifuel; max_ifuel; detail_errors;
        detail_hint_replay; no_smt; quake_lo; quake_hi; quake_keep; retry;
        smtencoding_elim_box; smtencoding_nl_arith_repr;
        smtencoding_l_arith_repr; smtencoding_valid_intro;
        smtencoding_valid_elim; tcnorm; no_plugins; no_tactics; z3cliopt;
        z3smtopt; z3refresh; z3rlimit; z3rlimit_factor; z3seed;
        trivial_pre_for_unannotated_effectful_fns; reuse_hint_for;_} ->
        reuse_hint_for

================================================
FILE: engine/backends/fstar/fstar-surface-ast/README
================================================
Those files were extracted from github:fstarlang/fstar


================================================
FILE: engine/backends/fstar/fstar-surface-ast/dune
================================================
(library
 (name fstar_surface_ast)
 (package hax-engine)
 (libraries batteries stdint ppxlib menhirLib pprint base)
 (wrapped false)
 (preprocess
  (pps ppx_deriving.show ppx_deriving_yojson sedlex.ppx)))

(env
 (_
  (flags
   (:standard -warn-error -A -warn-error +8))))


================================================
FILE: engine/backends/fstar/fstar-surface-ast/prims.ml
================================================
type int = Z.t[@printer Z.pp_print][@@deriving show]
let of_int = Z.of_int
let int_zero = Z.zero
let int_one = Z.one
let parse_int = Z.of_string
let to_string = Z.to_string

type tmp = string [@@deriving yojson]
let int_to_yojson x = tmp_to_yojson (to_string x)
let int_of_yojson x =
  match tmp_of_yojson x with
  | Ok x -> Ok (parse_int x)
  | Error x -> Error x

type bool' = bool
[@@deriving yojson,show]
type bool = bool'
[@@deriving yojson,show]

type string' = string[@@deriving yojson,show]
type string = string'[@@deriving yojson,show]

let op_Negation x = not x

let ( + )     = Z.add
let ( - )     = Z.sub
let ( * )     = Z.mul
let ( / )     = Z.ediv
let ( <= )    = Z.leq
let ( >= )    = Z.geq
let ( < )     = Z.lt
let ( > )     = Z.gt
let ( mod )   = Z.erem
let ( ~- )    = Z.neg
let abs       = Z.abs

type nonrec exn = exn
let op_Hat x y = x ^ y

type 'a list' = 'a list[@@deriving yojson,show]
type 'a list = 'a list'[@@deriving yojson,show]

type nat = int
type pos = int
let string_of_bool = string_of_bool
let string_of_int = to_string


================================================
FILE: engine/backends/fstar/fstar-surface-ast/z.ml
================================================
type t = String.t [@@deriving show]

let to_t = Base.Int.of_string
let of_t = Base.Int.to_string

let compare = String.compare
let pp_print = pp
let hash = Base.String.hash


let to_int: String.t -> Base.Int.t = Base.Int.of_string
let of_int: Base.Int.t -> String.t = Base.Int.to_string


let zero: String.t = "0"
let one: String.t = "1"
let of_string x = x
let to_string x = x

open struct
    let map (f: int -> int): string -> string = fun s -> Base.Int.of_string s |> f |> Base.Int.to_string
    let map2 (f: int -> int -> int): string -> string -> string = fun x y -> f (Base.Int.of_string x) (Base.Int.of_string y) |> Base.Int.to_string
    let map2' (f: int -> int -> 'a): string -> string -> 'a = fun x y -> f (Base.Int.of_string x) (Base.Int.of_string y)
    end

let add = map2 ( + )
let sub = map2 ( - )
let mul = map2 ( * )
let ediv = map2 ( / )
let leq = map2' ( <= )
let geq = map2' ( >= )
let lt = map2' ( < )
let gt = map2' ( > )
let erem = map2 Base.Int.( % )
let neg = map Base.Int.neg
let abs = map abs
let shift_left: string -> Base.Int.t -> string = fun x i -> Base.Int.shift_left (Base.Int.of_string x) i |> Base.Int.to_string
let shift_right: string -> Base.Int.t -> string = fun x i -> Base.Int.shift_right (Base.Int.of_string x) i |> Base.Int.to_string


================================================
FILE: engine/backends/fstar/fstar_ast.ml
================================================
open Hax_engine.Utils
open Base
module Util = FStar_Parser_Util
module AST = FStar_Parser_AST
module Const = FStar_Const
module Range = FStar_Compiler_Range
module Char = FStar_Char
module Ident = FStar_Ident

let dummyRange = Range.dummyRange
let id ident = Ident.mk_ident (ident, dummyRange)
let id_prime (ident : Ident.ident) = id (ident.idText ^ "'")

let lid path =
  let init, last = List.(drop_last_exn path, last_exn path) in
  let last = if String.(last = "new") then "new_" else last in
  let init = List.map ~f:(map_first_letter String.uppercase) init in
  let path = init @ [ last ] in
  Ident.lid_of_path path dummyRange

let lid_of_id id = Ident.lid_of_ids [ id ]
let term (tm : AST.term') = AST.{ tm; range = dummyRange; level = Expr }
let generate_fresh_ident () = Ident.gen dummyRange

let decl ?(fsti = true) ?(quals = []) ?(attrs = []) (d : AST.decl') =
  let decl = AST.{ d; drange = dummyRange; quals; attrs } in
  if fsti then `Intf decl else `Impl decl

let decls ?(fsti = true) ?(quals = []) ?(attrs = []) x =
  [ decl ~fsti ~quals ~attrs x ]

let pat (pat : AST.pattern') = AST.{ pat; prange = dummyRange }

module Attrs = struct
  let no_method = term @@ AST.Var FStar_Parser_Const.no_method_lid
end

let tcresolve = term @@ AST.Var FStar_Parser_Const.tcresolve_lid
let solve = term @@ AST.Var FStar_Parser_Const.solve_lid

let pat_var_tcresolve (var : string option) =
  let tcresolve = Some (AST.Meta tcresolve) in
  pat
  @@
  match var with
  | Some var -> AST.PatVar (id var, tcresolve, [])
  | _ -> AST.PatWild (tcresolve, [])

let pat_app name l = pat @@ AST.PatApp (name, l)
let wild = pat @@ AST.PatWild (None, [])

let mk_e_abs args body =
  if List.is_empty args then body else term (AST.Abs (args, body))

let mk_e_app base args =
  AST.mkApp base (List.map ~f:(fun arg -> (arg, AST.Nothing)) args) dummyRange

let mk_app base args = AST.mkApp base args dummyRange
let unit = term AST.(Const Const_unit)

let tc_solve =
  term
  @@ AST.Var (FStar_Parser_Const.fstar_tactics_lid' [ "Typeclasses"; "solve" ])

let mk_binder ?(aqual : FStar_Parser_AST.arg_qualifier option = Some Implicit) b
    =
  AST.{ b; brange = dummyRange; blevel = Un; aqual; battributes = [] }

let mk_e_binder b = mk_binder ~aqual:None b
let term_of_lid path = term @@ AST.Name (lid path)

let binder_of_term ?name (t : AST.term) : AST.binder =
  let b =
    match name with None -> AST.NoName t | Some n -> AST.Annotated (n, t)
  in
  mk_e_binder b

let mk_e_arrow inputs output =
  term @@ AST.Product (List.map ~f:binder_of_term inputs, output)

let mk_e_arrow' types =
  let inputs, output = (List.drop_last_exn types, List.last_exn types) in
  mk_e_arrow inputs output

let mk_refined (x : string) (typ : AST.term) (phi : x:AST.term -> AST.term) =
  let x = id x in
  let x_bd = mk_e_binder @@ AST.Annotated (x, typ) in
  term @@ AST.Refine (x_bd, phi (term @@ AST.Var (lid_of_id x)))

let type0_term = AST.Name (lid [ "Type0" ]) |> term
let eqtype_term = AST.Name (lid [ "eqtype" ]) |> term

let parse_string f s =
  let open FStar_Parser_ParseIt in
  let frag_of_text s =
    {
      frag_fname = "";
      frag_line = Z.of_int 1;
      frag_col = Z.of_int 0;
      frag_text = s;
    }
  in
  match parse (f (frag_of_text s)) with
  | ParseError (_, err, _) ->
      failwith ("string_of_term: got error [" ^ err ^ "] on input: [" ^ s ^ "]")
  | x -> x

let term_of_string s =
  match parse_string (fun x -> Fragment x) s with
  | Term t -> t
  | _ -> failwith "parse failed"

let decls_of_string s =
  match parse_string (fun x -> Toplevel x) s with
  | ASTFragment (Inr l, _) -> List.map ~f:(fun i -> `Impl i) l
  | _ -> failwith "parse failed"

let decl_of_string s =
  match decls_of_string s with [ d ] -> d | _ -> failwith "decl_of_string"

let ascribe t e = term @@ AST.Ascribed (e, t, None, false)
let implies p q = AST.Op (id "==>", [ p; q ]) |> term


================================================
FILE: engine/backends/fstar/fstar_backend.ml
================================================
open Hax_engine
open Utils
open Base

include
  Backend.Make
    (struct
      open Features
      include Off
      include On.Monadic_binding
      include On.Slice
      include On.Macro
      include On.Construct_base
      include On.Quote
      include On.Dyn
      include On.Unsafe
    end)
    (struct
      let backend = Diagnostics.Backend.FStar
    end)

module SubtypeToInputLanguage
    (FA :
      Features.T
        with type mutable_reference = Features.Off.mutable_reference
         and type continue = Features.Off.continue
         and type break = Features.Off.break
         and type mutable_reference = Features.Off.mutable_reference
         and type mutable_pointer = Features.Off.mutable_pointer
         and type mutable_variable = Features.Off.mutable_variable
         and type reference = Features.Off.reference
         and type raw_pointer = Features.Off.raw_pointer
         and type early_exit = Features.Off.early_exit
         and type question_mark = Features.Off.question_mark
         and type as_pattern = Features.Off.as_pattern
         and type lifetime = Features.Off.lifetime
         and type monadic_action = Features.Off.monadic_action
         and type arbitrary_lhs = Features.Off.arbitrary_lhs
         and type nontrivial_lhs = Features.Off.nontrivial_lhs
         and type loop = Features.Off.loop
         and type block = Features.Off.block
         and type for_loop = Features.Off.for_loop
         and type while_loop = Features.Off.while_loop
         and type for_index_loop = Features.Off.for_index_loop
         and type state_passing_loop = Features.Off.state_passing_loop
         and type fold_like_loop = Features.Off.fold_like_loop
         and type match_guard = Features.Off.match_guard
         and type trait_item_default = Features.Off.trait_item_default) =
struct
  module FB = InputLanguage

  include
    Subtype.Make (FA) (FB)
      (struct
        module A = FA
        module B = FB
        include Features.SUBTYPE.Id
        include Features.SUBTYPE.On.Monadic_binding
        include Features.SUBTYPE.On.Construct_base
        include Features.SUBTYPE.On.Slice
        include Features.SUBTYPE.On.Macro
        include Features.SUBTYPE.On.Quote
        include Features.SUBTYPE.On.Dyn
        include Features.SUBTYPE.On.Unsafe
      end)

  let metadata = Phase_utils.Metadata.make (Reject (NotInBackendLang backend))
end

module AST = Ast.Make (InputLanguage)

module BackendOptions = struct
  type t = Hax_engine.Types.f_star_options_for__null
end

open Ast

module FStarNamePolicy = struct
  include Concrete_ident.DefaultNamePolicy

  [@@@ocamlformat "disable"]

  let anonymous_field_transform index = "_" ^ index

  let reserved_words = Hash_set.of_list (module String) ["attributes";"noeq";"unopteq";"and";"assert";"assume";"begin";"by";"calc";"class";"default";"decreases";"b2t";"effect";"eliminate";"else";"end";"ensures";"exception";"exists";"false";"friend";"forall";"fun";"λ";"function";"if";"in";"include";"inline";"inline_for_extraction";"instance";"introduce";"irreducible";"let";"logic";"match";"returns";"as";"module";"new";"new_effect";"layered_effect";"polymonadic_bind";"polymonadic_subcomp";"noextract";"of";"open";"opaque";"private";"quote";"range_of";"rec";"reifiable";"reify";"reflectable";"requires";"set_range_of";"sub_effect";"synth";"then";"total";"true";"try";"type";"unfold";"unfoldable";"val";"when";"with";"_";"__SOURCE_FILE__";"__LINE__";"match";"if";"let";"and";"string"]
end

module RenderId = Concrete_ident.MakeRenderAPI (FStarNamePolicy)
module U = Ast_utils.Make (InputLanguage)
module Visitors = Ast_visitors.Make (InputLanguage)
open AST
module F = Fstar_ast
module Destruct = Ast_destruct.Make (InputLanguage)

module Context = struct
  type t = {
    current_namespace : string list;
    items : item list;
    interface_mode : bool;
    line_width : int;
  }
end

(** Convers a namespace to a module name *)
let module_name (ns : string list) : string =
  String.concat ~sep:"." (List.map ~f:(map_first_letter String.uppercase) ns)

(** Set to true when extracting core_models (HAX_CORE_MODELS_EXTRACTION_MODE set
    to 'on') *)
let hax_core_models_extraction =
  Sys.getenv "HAX_CORE_MODELS_EXTRACTION_MODE"
  |> [%equal: string option] (Some "on")

module Make
    (Attrs : Attrs.WITH_ITEMS)
    (Ctx : sig
      val ctx : Context.t
    end) =
struct
  open Ctx

  module StringToFStar = struct
    let catch_parsing_error (type a b) kind span (f : a -> b) x =
      try f x
      with e ->
        let kind =
          Types.FStarParseError
            {
              fstar_snippet = "";
              details =
                "While parsing a " ^ kind ^ ", error: "
                ^ Base.Error.to_string_hum (Base.Error.of_exn e);
            }
        in
        Error.raise { span; kind }

    let term span = catch_parsing_error "term" span F.term_of_string
  end

  let doc_to_string : PPrint.document -> string =
    FStar_Pprint.pretty_string 1.0 (Z.of_int ctx.line_width)

  let term_to_string : F.AST.term -> string =
    FStar_Parser_ToDocument.term_to_document >> doc_to_string

  let pat_to_string : F.AST.pattern -> string =
    FStar_Parser_ToDocument.pat_to_document >> doc_to_string

  let decl_to_string : F.AST.decl -> string =
    FStar_Parser_ToDocument.decl_to_document >> doc_to_string

  let pprim_ident (span : span) (id : primitive_ident) =
    match id with
    | Deref -> Error.assertion_failure span "pprim_ident Deref"
    | Cast -> F.lid [ "cast" ]
    | LogicalOp op -> (
        match op with
        | And -> F.lid [ "Prims"; "op_AmpAmp" ]
        | Or -> F.lid [ "Prims"; "op_BarBar" ])

  let pnegative = function true -> "-" | false -> ""

  let dummy_clone_impl =
    StringToFStar.term Span.default
      {fstar|{
        f_clone = (fun x -> x);
        f_clone_pre = (fun _ -> True);
        f_clone_post = (fun _ _ -> True);
      }|fstar}

  (* Print a literal as an F* constant *)
  let rec pliteral_as_const span (e : literal) =
    match e with
    | String s -> F.Const.Const_string (s, F.dummyRange)
    | Char c -> F.Const.Const_char (Char.to_int c)
    | Int { value; kind = { size; signedness }; negative } ->
        Error.unimplemented
          ~details:
            "Integers cannot be printed as constants, they can only be printed \
             as expressions."
          span
    | Float _ ->
        Error.unimplemented
          ~details:
            "Floats cannot be printed as constants, they can only be printed \
             as expressions."
          span
    | Bool b -> F.Const.Const_bool b

  (* Print a literal appearing in a pattern as an F* pattern *)
  let rec pliteral_as_pat span (e : literal) =
    match e with
    | Int { value; kind = { size; signedness }; negative } ->
        let pat_name =
          F.pat
          @@ F.AST.PatName (F.lid [ "Rust_primitives"; "Integers"; "MkInt" ])
        in
        let mk_const c = F.AST.PatConst c |> F.pat in
        let mk_int value negative =
          mk_const (F.Const.Const_int (pnegative negative ^ value, None))
        in
        F.pat_app pat_name @@ [ mk_int value negative ]
    | Float _ ->
        Error.unimplemented ~issue_id:464
          ~details:"Pattern matching on floats is not yet supported." span
    | _ -> F.pat @@ F.AST.PatConst (pliteral_as_const span e)

  let pliteral_as_expr span (e : literal) =
    let mk_const c = F.AST.Const c |> F.term in
    let mk_int value negative =
      mk_const (F.Const.Const_int (pnegative negative ^ value, None))
    in
    match e with
    | Int { value; kind = { size; signedness }; negative = n } ->
        let f =
          match (size, signedness) with
          | S8, Signed -> F.lid [ "mk_i8" ]
          | S16, Signed -> F.lid [ "mk_i16" ]
          | S32, Signed -> F.lid [ "mk_i32" ]
          | S64, Signed -> F.lid [ "mk_i64" ]
          | S128, Signed -> F.lid [ "mk_i128" ]
          | SSize, Signed -> F.lid [ "mk_isize" ]
          | S8, Unsigned -> F.lid [ "mk_u8" ]
          | S16, Unsigned -> F.lid [ "mk_u16" ]
          | S32, Unsigned -> F.lid [ "mk_u32" ]
          | S64, Unsigned -> F.lid [ "mk_u64" ]
          | S128, Unsigned -> F.lid [ "mk_u128" ]
          | SSize, Unsigned -> F.lid [ "mk_usize" ]
        in
        F.mk_e_app (F.term @@ F.AST.Name f) [ mk_int value n ]
    | Float { value; negative; _ } ->
        F.mk_e_app
          (F.term_of_lid [ "mk_float" ])
          [
            mk_const
              (F.Const.Const_string (pnegative negative ^ value, F.dummyRange));
          ]
    | _ -> mk_const @@ pliteral_as_const span e

  let pconcrete_ident (id : concrete_ident) =
    let id = RenderId.render id in
    let path = ctx.current_namespace in
    if [%eq: string list] path id.path then F.lid [ id.name ]
    else F.lid (id.path @ [ id.name ])

  let rec pglobal_ident (span : span) (id : global_ident) =
    match id with
    | `Concrete cid -> pconcrete_ident cid
    | `Primitive prim_id -> pprim_ident span prim_id
    | `TupleType 0 -> F.lid [ "prims"; "unit" ]
    | `TupleCons n when n <= 1 ->
        Error.assertion_failure span
          ("Got a [TupleCons " ^ string_of_int n ^ "]")
    | `TupleType n when n <= 14 ->
        F.lid [ "FStar"; "Pervasives"; "tuple" ^ string_of_int n ]
    | `TupleCons n when n <= 14 ->
        F.lid [ "FStar"; "Pervasives"; "Mktuple" ^ string_of_int n ]
    | `TupleType n | `TupleCons n ->
        let reason = "F* doesn't support tuple of size greater than 14" in
        Error.raise
          {
            kind = UnsupportedTupleSize { tuple_size = Int64.of_int n; reason };
            span;
          }
    | `TupleField _ | `Projector _ ->
        Error.assertion_failure span
          ("pglobal_ident: expected to be handled somewhere else: "
         ^ show_global_ident id)

  let plocal_ident_str (e : Local_ident.t) =
    RenderId.local_ident
      (match String.chop_prefix ~prefix:"impl " e.name with
      | Some name ->
          let name = "impl_" ^ Int.to_string ([%hash: string] name) in
          { e with name }
      | _ -> e)

  let plocal_ident = plocal_ident_str >> F.id

  let pfield_ident span (f : global_ident) : F.Ident.lident =
    match f with
    | `Concrete cid -> pconcrete_ident cid
    | `Projector (`TupleField (n, len)) | `TupleField (n, len) ->
        F.lid [ "_" ^ Int.to_string (n + 1) ]
    | `Projector (`Concrete cid) -> pconcrete_ident cid
    | _ ->
        Error.assertion_failure span
          ("pfield_ident: not a valid field name in F* backend: "
         ^ show_global_ident f)

  let index_of_field_concrete id =
    try Some (Int.of_string @@ (RenderId.render id).name) with _ -> None

  let index_of_field = function
    | `Concrete id -> index_of_field_concrete id
    | `TupleField (nth, _) -> Some nth
    | _ -> None

  let is_field_an_index = index_of_field >> Option.is_some

  let operators =
    let c = Global_ident.of_name ~value:true in
    [
      (c Rust_primitives__hax__array_of_list, (3, ".[]<-"));
      (c Core__ops__index__Index__index, (2, ".[]"));
      (c Core__ops__bit__Not__not, (1, "~."));
      (c Rust_primitives__hax__machine_int__not, (1, "~."));
      (c Rust_primitives__hax__machine_int__add, (2, "+!"));
      (c Rust_primitives__hax__machine_int__sub, (2, "-!"));
      (c Rust_primitives__hax__machine_int__div, (2, "/!"));
      (c Rust_primitives__hax__machine_int__mul, (2, "*!"));
      (c Rust_primitives__hax__machine_int__rem, (2, "%!"));
      (c Rust_primitives__hax__machine_int__shl, (2, "<>!"));
      (c Rust_primitives__hax__machine_int__bitxor, (2, "^."));
      (c Rust_primitives__hax__machine_int__bitor, (2, "|."));
      (c Rust_primitives__hax__machine_int__bitand, (2, "&."));
      (c Core__cmp__PartialEq__eq, (2, "=."));
      (c Rust_primitives__hax__machine_int__eq, (2, "=."));
      (c Core__cmp__PartialEq__ne, (2, "<>."));
      (c Rust_primitives__hax__machine_int__ne, (2, "<>."));
      (c Rust_primitives__hax__machine_int__le, (2, "<=."));
      (c Rust_primitives__hax__machine_int__lt, (2, "<."));
      (c Rust_primitives__hax__machine_int__gt, (2, ">."));
      (c Rust_primitives__hax__machine_int__ge, (2, ">=."));
      (`Primitive (LogicalOp And), (2, "&&"));
      (`Primitive (LogicalOp Or), (2, "||"));
      (c Rust_primitives__hax__int__add, (2, "+"));
      (c Rust_primitives__hax__int__sub, (2, "-"));
      (c Rust_primitives__hax__int__mul, (2, "*"));
      (c Rust_primitives__hax__int__div, (2, "/"));
      (c Rust_primitives__hax__int__rem, (2, "%"));
      (c Rust_primitives__hax__int__neg, (1, "-"));
      (c Rust_primitives__hax__int__ge, (2, ">="));
      (c Rust_primitives__hax__int__le, (2, "<="));
      (c Rust_primitives__hax__int__gt, (2, ">"));
      (c Rust_primitives__hax__int__lt, (2, "<"));
      (c Rust_primitives__hax__int__ne, (2, "<>"));
      (c Rust_primitives__hax__int__eq, (2, "="));
      (c Hax_lib__prop__constructors__and, (2, "/\\"));
      (c Hax_lib__prop__constructors__or, (2, "\\/"));
      (c Hax_lib__prop__constructors__not, (1, "~"));
      (c Hax_lib__prop__constructors__eq, (2, "=="));
      (c Hax_lib__prop__constructors__ne, (2, "=!="));
      (c Hax_lib__prop__constructors__implies, (2, "==>"));
    ]
    |> Map.of_alist_exn (module Global_ident)

  let rec pty span (t : ty) =
    match t with
    | TBool -> F.term_of_lid [ "bool" ]
    | TChar -> F.term_of_lid [ "FStar"; "Char"; "char" ]
    | TInt k -> F.term_of_lid [ show_int_kind k ]
    | TStr -> F.term_of_lid [ "string" ]
    | TSlice { ty; _ } ->
        F.mk_e_app (F.term_of_lid [ "t_Slice" ]) [ pty span ty ]
    | TApp { ident = `TupleType 0 as ident; args = [] } ->
        F.term @@ F.AST.Name (pglobal_ident span ident)
    | TApp { ident = `TupleType 1; args = [ GType ty ] } -> pty span ty
    | TApp { ident = `TupleType n; args } when n >= 2 -> (
        let args =
          List.filter_map
            ~f:(function GType t -> Some (pty span t) | _ -> None)
            args
        in
        let mk_star a b = F.term @@ F.AST.Op (F.id "&", [ a; b ]) in
        match args with
        | hd :: tl ->
            F.term @@ F.AST.Paren (List.fold_left ~init:hd ~f:mk_star tl)
        | _ -> Error.assertion_failure span "Tuple type: bad arity")
    | TApp { ident; args } ->
        let base = F.term @@ F.AST.Name (pglobal_ident span ident) in
        let args = List.map ~f:(pgeneric_value span) args in
        F.mk_e_app base args
    | TArrow (inputs, output) ->
        F.mk_e_arrow (List.map ~f:(pty span) inputs) (pty span output)
    | TFloat _ -> F.term_of_lid [ "float" ]
    | TArray { typ; length } ->
        F.mk_e_app (F.term_of_lid [ "t_Array" ]) [ pty span typ; pexpr length ]
    | TParam i -> F.term @@ F.AST.Var (F.lid_of_id @@ plocal_ident i)
    | TAssociatedType { impl = { kind = Self; _ }; item } ->
        F.term @@ F.AST.Var (F.lid [ (RenderId.render item).name ])
    | TAssociatedType { impl; item } -> (
        match pimpl_expr span impl with
        | Some impl ->
            F.term @@ F.AST.Project (impl, F.lid [ (RenderId.render item).name ])
        | None -> F.term @@ F.AST.Wild)
    | TOpaque s -> F.term @@ F.AST.Wild
    | TDyn { goals; _ } ->
        let traits = List.map ~f:(pdyn_trait_goal span) goals in
        let dyn = F.AST.Var (F.lid [ "dyn" ]) |> F.term in
        let length =
          F.AST.Const
            (FStar_Const.Const_int (List.length goals |> Int.to_string, None))
          |> F.term
        in
        F.mk_e_app dyn (length :: traits)
    | _ -> .

  and pdyn_trait_goal span (goal : dyn_trait_goal) =
    (* This introduces a potential shadowing *)
    let type_var = "z" in
    let pat = F.pat @@ F.AST.PatVar (F.id type_var, None, []) in
    let trait = F.AST.Var (pconcrete_ident goal.trait) |> F.term in
    let args =
      (F.AST.Var (F.lid [ type_var ]) |> F.term)
      :: List.map ~f:(pgeneric_value span) goal.non_self_args
    in
    F.mk_e_abs [ pat ] (F.mk_e_app trait args)

  and pimpl_expr span (ie : impl_expr) =
    let some = Option.some in
    let hax_unstable_impl_exprs = hax_core_models_extraction in
    match ie.kind with
    | Concrete tr -> c_trait_goal span tr |> some
    | LocalBound { id } ->
        let local_ident =
          Local_ident.{ name = id; id = Local_ident.mk_id Expr 0 }
        in
        F.term @@ F.AST.Var (F.lid_of_id @@ plocal_ident local_ident) |> some
    | ImplApp { impl; _ } when not hax_unstable_impl_exprs ->
        pimpl_expr span impl
    | Parent { impl; ident }
      when hax_unstable_impl_exprs && [%matches? Self _] impl.kind ->
        let trait = "_super_" ^ ident.name in
        F.term_of_lid [ trait ] |> some
    | Parent { impl; ident } when hax_unstable_impl_exprs ->
        let* impl = pimpl_expr span impl in
        let trait = "_super_" ^ ident.name in
        F.term @@ F.AST.Project (impl, F.lid [ trait ]) |> some
    | ImplApp { impl; args = [] } when hax_unstable_impl_exprs ->
        pimpl_expr span impl
    | ImplApp { impl; args } when hax_unstable_impl_exprs ->
        let* impl = pimpl_expr span impl in
        let* args = List.map ~f:(pimpl_expr span) args |> Option.all in
        F.mk_e_app impl args |> some
    | Projection _ when hax_unstable_impl_exprs ->
        F.term_of_lid [ "_Projection" ] |> some
    | Dyn _ when hax_unstable_impl_exprs -> F.term_of_lid [ "_Dyn" ] |> some
    | Builtin _ when hax_unstable_impl_exprs ->
        F.term_of_lid [ "_Builtin" ] |> some
    | _ -> None

  and c_trait_goal span trait_goal =
    let trait = F.term @@ F.AST.Name (pconcrete_ident trait_goal.trait) in
    List.map ~f:(pgeneric_value span) trait_goal.args |> F.mk_e_app trait

  and pgeneric_value span (g : generic_value) =
    match g with
    | GType ty -> pty span ty
    | GConst e -> pexpr e
    | GLifetime _ -> .

  and ppat (p : pat) = ppat' true p

  and ppat' (shallow : bool) (p : pat) =
    let ppat = ppat' false in
    match p.p with
    | PWild -> F.wild
    | PAscription { typ; pat = { p = PBinding _; _ } as pat } ->
        F.pat @@ F.AST.PatAscribed (ppat pat, (pty p.span typ, None))
    | PAscription { pat; _ } -> ppat pat
    | PBinding
        {
          mut = Immutable;
          mode = _;
          subpat = None;
          var;
          typ = _ (* we skip type annot here *);
        } ->
        F.pat @@ F.AST.PatVar (plocal_ident var, None, [])
    | POr { subpats } when shallow ->
        F.pat @@ F.AST.PatOr (List.map ~f:ppat subpats)
    | POr _ ->
        Error.assertion_failure p.span
          "Nested disjuntive patterns should have been eliminated by phase \
           `HoistDisjunctions` (see PR #830)."
    | PArray { args } -> F.pat @@ F.AST.PatList (List.map ~f:ppat args)
    | PConstruct { constructor = `TupleCons 0; fields = [] } ->
        F.pat @@ F.AST.PatConst F.Const.Const_unit
    | PConstruct { constructor = `TupleCons 1; fields = [ { pat } ] } ->
        ppat pat
    | PConstruct { constructor = `TupleCons n; fields } ->
        F.pat
        @@ F.AST.PatTuple (List.map ~f:(fun { pat } -> ppat pat) fields, false)
    | PConstruct { constructor; fields; is_record; is_struct } ->
        let pat_rec () =
          F.pat @@ F.AST.PatRecord (List.map ~f:pfield_pat fields)
        in
        if is_struct && is_record then pat_rec ()
        else
          let pat_name =
            F.pat @@ F.AST.PatName (pglobal_ident p.span constructor)
          in
          F.pat_app pat_name
          @@
          if is_record then [ pat_rec () ]
          else List.map ~f:(fun { field; pat } -> ppat pat) fields
    | PConstant { lit } -> pliteral_as_pat p.span lit
    | _ -> .

  and pfield_pat ({ field; pat } : field_pat) =
    (pglobal_ident pat.span field, ppat pat)

  and pexpr (e : expr) =
    try pexpr_unwrapped e
    with Diagnostics.SpanFreeError.Exn _ ->
      (* let typ = *)
      (* try pty e.span e.typ *)
      (* with Diagnostics.SpanFreeError _ -> U.hax_failure_typ *)
      (* in *)
      F.term @@ F.AST.Const (F.Const.Const_string ("failure", F.dummyRange))

  and fun_application ~span f args ~trait_generic_args ~generic_args =
    let pgeneric_args ?qualifier =
      let qualifier_or default = Option.value ~default qualifier in
      List.map ~f:(function
        | GConst const -> (pexpr const, qualifier_or F.AST.Nothing)
        | GLifetime _ -> .
        | GType ty -> (pty span ty, qualifier_or F.AST.Hash))
    in
    let args = List.map ~f:(pexpr &&& Fn.const F.AST.Nothing) args in
    let trait_generic_args =
      Option.map
        ~f:
          (pgeneric_args ~qualifier:F.AST.Hash
          >> Fn.flip ( @ ) [ (F.solve, F.AST.Hash) ])
        trait_generic_args
      |> Option.value ~default:[]
    in
    F.mk_app f (trait_generic_args @ pgeneric_args generic_args @ args)

  and pexpr_unwrapped (e : expr) =
    match e.e with
    | Literal l -> pliteral_as_expr e.span l
    | LocalVar local_ident ->
        F.term @@ F.AST.Var (F.lid_of_id @@ plocal_ident local_ident)
    | GlobalVar (`TupleCons 0)
    | Construct { constructor = `TupleCons 0; fields = [] } ->
        F.AST.unit_const F.dummyRange
    | GlobalVar global_ident ->
        F.term @@ F.AST.Var (pglobal_ident e.span @@ global_ident)
    | App { f = { e = GlobalVar f; _ }; args = [ x ] }
      when Global_ident.eq_name Hax_lib__prop__constructors__from_bool f ->
        let x = pexpr x in
        F.mk_e_app (F.term_of_lid [ "b2t" ]) [ x ]
    | App
        {
          f = { e = GlobalVar f; _ };
          args = [ { e = Closure { params = [ x ]; body = phi; _ }; _ } ];
        }
      when Global_ident.eq_name Hax_lib__prop__constructors__forall f ->
        let phi = pexpr phi in
        let binders =
          let b = Destruct.pat_PBinding x |> Option.value_exn in
          [
            F.AST.
              {
                b = F.AST.Annotated (plocal_ident b.var, pty x.span b.typ);
                brange = F.dummyRange;
                blevel = Un;
                aqual = None;
                battributes = [];
              };
          ]
        in
        F.term @@ F.AST.QForall (binders, ([], []), phi)
    | App
        {
          f = { e = GlobalVar f; _ };
          args = [ { e = Closure { params = [ x ]; body = phi; _ }; _ } ];
        }
      when Global_ident.eq_name Hax_lib__prop__constructors__exists f ->
        let phi = pexpr phi in
        let binders =
          let b = Destruct.pat_PBinding x |> Option.value_exn in
          [
            F.AST.
              {
                b = F.AST.Annotated (plocal_ident b.var, pty x.span b.typ);
                brange = F.dummyRange;
                blevel = Un;
                aqual = None;
                battributes = [];
              };
          ]
        in
        F.term @@ F.AST.QExists (binders, ([], []), phi)
    | App
        {
          f = { e = GlobalVar (`Projector (`TupleField (_, 1))) };
          args = [ arg ];
        } ->
        pexpr arg
    | App
        {
          f = { e = GlobalVar (`Projector (`TupleField (n, len))) };
          args = [ arg ];
        } ->
        F.term
        @@ F.AST.Project (pexpr arg, F.lid [ "_" ^ string_of_int (n + 1) ])
    | App { f = { e = GlobalVar (`Projector (`Concrete cid)) }; args = [ arg ] }
      ->
        F.term @@ F.AST.Project (pexpr arg, pconcrete_ident cid)
    | App { f = { e = GlobalVar x }; args } when Map.mem operators x ->
        let arity, op = Map.find_exn operators x in
        if List.length args <> arity then
          Error.assertion_failure e.span
            ("pexpr: bad arity for operator application (" ^ op ^ ")");
        F.term @@ F.AST.Op (F.Ident.id_of_text op, List.map ~f:pexpr args)
    | App
        {
          f = { e = GlobalVar f; _ };
          args = [ { e = Literal (String s); _ } ];
          generic_args = _;
        }
      when Global_ident.eq_name Hax_lib__int__Impl_7___unsafe_from_str f ->
        (match
           String.chop_prefix ~prefix:"-" s
           |> Option.value ~default:s
           |> String.filter ~f:([%matches? '0' .. '9'] >> not)
         with
        | "" -> ()
        | s ->
            Error.assertion_failure e.span
            @@ "pexpr: expected a integer, found the following non-digit \
                chars: '" ^ s ^ "'");
        F.AST.Const (F.Const.Const_int (s, None)) |> F.term
    | App { f; args; generic_args; bounds_impls = _; trait } ->
        let trait_generic_args = Option.map ~f:snd trait in
        fun_application (pexpr f) args ~span:e.span ~trait_generic_args
          ~generic_args
    | If { cond; then_; else_ } ->
        F.term
        @@ F.AST.If
             ( pexpr cond,
               None,
               None,
               pexpr then_,
               Option.value_map else_ ~default:F.unit ~f:pexpr )
    | Array l ->
        let len = List.length l in
        let body = F.AST.mkConsList F.dummyRange (List.map ~f:pexpr l) in
        let array_of_list =
          let id =
            Concrete_ident.of_name ~value:true
              Rust_primitives__hax__array_of_list
          in
          F.term @@ F.AST.Name (pconcrete_ident id)
        in
        let list_ident = F.id "list" in
        let list = F.term_of_lid [ "list" ] in
        let assert_norm =
          F.term_of_lid [ "FStar"; "Pervasives"; "assert_norm" ]
        in
        let equality = F.term_of_lid [ "Prims"; "eq2" ] in
        let length = F.term_of_lid [ "List"; "Tot"; "length" ] in
        let length = F.mk_e_app length [ list ] in
        let len =
          F.term @@ F.AST.Const (F.Const.Const_int (Int.to_string len, None))
        in
        let array = F.mk_e_app array_of_list [ len; list ] in
        let formula = F.mk_e_app equality [ length; len ] in
        let assertion = F.mk_e_app assert_norm [ formula ] in
        let pat = F.AST.PatVar (list_ident, None, []) |> F.pat in
        let pat =
          match l with
          | [] ->
              let list_ty =
                let prims_list = F.term_of_lid [ "Prims"; "list" ] in
                let inner_typ =
                  match e.typ with
                  | TArray { typ; _ } -> pty e.span typ
                  | _ ->
                      Error.assertion_failure e.span
                        "Malformed type for array literal"
                in
                F.mk_e_app prims_list [ inner_typ ]
              in
              F.pat @@ F.AST.PatAscribed (pat, (list_ty, None))
          | _ -> pat
        in
        F.term
        @@ F.AST.Let
             ( NoLetQualifier,
               [ (None, (pat, body)) ],
               F.term @@ F.AST.Seq (assertion, array) )
    | Let { lhs; rhs; body; monadic = Some (monad, _) } ->
        let p =
          F.pat @@ F.AST.PatAscribed (ppat lhs, (pty lhs.span lhs.typ, None))
        in
        let op =
          "let"
          ^
          match monad with
          | MResult _ -> "|"
          | MOption -> "?"
          | MException _ -> "!"
        in
        F.term @@ F.AST.LetOperator ([ (F.id op, p, pexpr rhs) ], pexpr body)
    | Let { lhs; rhs; body; monadic = None } ->
        let rec ascribe_tuple_components pattern =
          match pattern with
          | { p = PConstruct { constructor = `TupleCons n1; fields; _ }; _ }
            when n1 > 1 ->
              (* F* type inference works better if the ascription is on each component intead of the whole tuple. *)
              F.pat
              @@ F.AST.PatTuple
                   ( List.map
                       ~f:(fun { pat } -> ascribe_tuple_components pat)
                       fields,
                     false )
          | _ ->
              (* TODO: temp patch that remove annotation when we see an associated type *)
              if [%matches? TAssociatedType _] @@ U.remove_tuple1 pattern.typ
              then ppat pattern
              else
                F.pat
                @@ F.AST.PatAscribed
                     (ppat pattern, (pty pattern.span pattern.typ, None))
        in
        F.term
        @@ F.AST.Let
             ( NoLetQualifier,
               [ (None, (ascribe_tuple_components lhs, pexpr rhs)) ],
               pexpr body )
    | EffectAction _ -> .
    | Match { scrutinee; arms } ->
        F.term
        @@ F.AST.Match (pexpr scrutinee, None, None, List.map ~f:parm arms)
    | Ascription { e; typ } ->
        F.term @@ F.AST.Ascribed (pexpr e, pty e.span typ, None, false)
    | Construct { constructor = `TupleCons 1; fields = [ (_, e') ]; base } ->
        pexpr e'
    | Construct { constructor = `TupleCons n; fields; base = None } ->
        F.AST.mkTuple (List.map ~f:(snd >> pexpr) fields) F.dummyRange
    | Construct
        { is_record = true; is_struct = true; constructor; fields; base } ->
        F.term
        @@ F.AST.Record
             ( Option.map ~f:(fst >> pexpr) base,
               List.map
                 ~f:(fun (f, e) -> (pfield_ident e.span f, pexpr e))
                 fields )
    | Construct { is_record = false; constructor; fields; base } ->
        if [%matches? Some _] base then
          Diagnostics.failure ~context:(Backend FStar) ~span:e.span
            (AssertionFailure { details = "non-record type with base present" });
        F.mk_e_app (F.term @@ F.AST.Name (pglobal_ident e.span constructor))
        @@ List.map ~f:(snd >> pexpr) fields
    | Construct { is_record = true; constructor; fields; base } ->
        let r =
          F.term
          @@ F.AST.Record
               ( Option.map ~f:(fst >> pexpr) base,
                 List.map
                   ~f:(fun (f, e') -> (pglobal_ident e.span f, pexpr e'))
                   fields )
        in
        F.mk_e_app
          (F.term @@ F.AST.Name (pglobal_ident e.span constructor))
          [ r ]
    | Closure { params; body } ->
        let params =
          List.mapi
            ~f:(fun i p ->
              match p.p with
              | PBinding { var; subpat = None; _ } -> (var, p)
              | _ ->
                  ( Local_ident.
                      { name = "temp_" ^ Int.to_string i; id = mk_id Expr (-1) },
                    p ))
            params
        in
        let body =
          let f (lid, (pat : pat)) =
            let rhs = { e = LocalVar lid; span = pat.span; typ = pat.typ } in
            U.make_let pat rhs
          in
          List.fold_right ~init:body ~f params
        in
        let mk_pat ((lid, pat) : local_ident * pat) =
          ppat (U.make_var_pat lid pat.typ pat.span)
        in
        F.mk_e_abs (List.map ~f:mk_pat params) (pexpr body)
    | Return { e } ->
        F.term @@ F.AST.App (F.term_of_lid [ "RETURN_STMT" ], pexpr e, Nothing)
    | MacroInvokation { macro; args; witness } ->
        Error.raise
        @@ {
             kind = UnsupportedMacro { id = [%show: global_ident] macro };
             span = e.span;
           }
    | Quote quote -> pquote e.span quote |> StringToFStar.term e.span
    | _ -> .

  (** Prints a `quote` to a string *)
  and pquote span { contents; _ } =
    List.map
      ~f:(function
        | Verbatim code -> code
        | Expr e -> pexpr e |> term_to_string
        | Pattern p -> ppat p |> pat_to_string
        | Typ p -> pty span p |> term_to_string)
      contents
    |> String.concat

  and parm { arm = { arm_pat; body } } = (ppat arm_pat, None, pexpr body)

  module FStarBinder = struct
    type kind = Implicit | Tcresolve | Explicit
    type t = { kind : kind; ident : F.Ident.ident; typ : F.AST.term }

    let make_explicit x = { x with kind = Explicit }

    let implicit_to_explicit x =
      if [%matches? Tcresolve] x.kind then x else make_explicit x

    let of_generic_param span (p : generic_param) : t =
      let ident = plocal_ident p.ident in
      match p.kind with
      | GPLifetime _ -> Error.assertion_failure span "pgeneric_param:LIFETIME"
      | GPType -> { kind = Implicit; typ = F.type0_term; ident }
      | GPConst { typ } -> { kind = Explicit; typ = pty span typ; ident }

    let of_generic_constraint span (nth : int) (c : generic_constraint) =
      match c with
      | GCLifetime _ -> .
      | GCType { goal; name } ->
          let typ = c_trait_goal span goal in
          Some { kind = Tcresolve; ident = F.id name; typ }
      | GCProjection { impl = { kind = LocalBound { id }; _ }; assoc_item; typ }
        ->
          let proj =
            F.term
            @@ F.AST.Project
                 (F.term @@ F.AST.Var (F.lid [ id ]), pconcrete_ident assoc_item)
          in
          let typ =
            F.mk_refined "_" (F.term_of_string "unit") (fun ~x ->
                F.term
                @@ F.AST.Op (FStar_Ident.id_of_text "==", [ proj; pty span typ ]))
          in
          Some { kind = Implicit; typ; ident = FStar_Ident.id_of_text "_" }
      | _ -> None

    let of_generics span generics : t list =
      List.map ~f:(of_generic_param span) generics.params
      @ (generics.constraints
        |> List.sort ~compare:(fun c1 c2 ->
               match (c1, c2) with
               | GCType _, GCProjection _ -> -1
               | GCProjection _, GCType _ -> 1
               | _ -> 0)
        |> List.filter_mapi ~f:(of_generic_constraint span))

    let of_typ span (nth : int) typ : t =
      let ident = F.id ("x" ^ Int.to_string nth) in
      { kind = Explicit; ident; typ = pty span typ }

    (** Makes an F* binder from a name and an F* type *)
    let of_named_fstar_typ span name typ : t =
      let ident = plocal_ident name in
      { kind = Explicit; ident; typ }

    (** Makes an F* binder from a name and an hax type *)
    let of_named_typ span name = pty span >> of_named_fstar_typ span name

    let to_pattern (x : t) : F.AST.pattern =
      let subpat =
        match x.kind with
        | Tcresolve ->
            let tcresolve =
              Some
                (F.AST.Meta
                   (F.term @@ F.AST.Var FStar_Parser_Const.tcresolve_lid))
            in
            F.pat @@ F.AST.PatVar (x.ident, tcresolve, [])
        | _ ->
            let aqual =
              match x.kind with Implicit -> Some F.AST.Implicit | _ -> None
            in
            F.pat @@ F.AST.PatVar (x.ident, aqual, [])
      in
      F.pat @@ F.AST.PatAscribed (subpat, (x.typ, None))

    let to_typ (x : t) : F.AST.term = x.typ
    let to_ident (x : t) : F.Ident.ident = x.ident

    let to_term (x : t) : F.AST.term =
      F.term @@ F.AST.Var (FStar_Ident.lid_of_ns_and_id [] (to_ident x))

    let to_imp (x : t) : F.AST.imp =
      match x.kind with Tcresolve | Implicit -> Hash | Explicit -> Nothing

    let to_qualified_term : t -> F.AST.term * F.AST.imp = to_term &&& to_imp

    let to_qualifier (x : t) : F.AST.arg_qualifier option =
      match x.kind with
      | Tcresolve -> Some TypeClassArg
      | Implicit -> Some Implicit
      | Explicit -> None

    let to_binder (x : t) : F.AST.binder =
      F.AST.
        {
          b = F.AST.Annotated (x.ident, x.typ);
          brange = F.dummyRange;
          blevel = Un;
          aqual = to_qualifier x;
          battributes = [];
        }
  end

  let rec pgeneric_constraint_type span (c : generic_constraint) =
    match c with
    | GCLifetime _ ->
        Error.assertion_failure span "pgeneric_constraint_bd:LIFETIME"
    | GCType { goal; name = _ } -> c_trait_goal span goal

  let pmaybe_refined_ty span (free_variables : string list) (attrs : attrs)
      (binder_name : string) (ty : ty) : F.AST.term =
    match Attrs.associated_refinement_in_type span free_variables attrs with
    | Some refinement ->
        F.mk_refined binder_name (pty span ty) (fun ~x -> pexpr refinement)
    | None -> pty span ty

  let add_clauses_effect_type ~self ~no_tot_abbrev (attrs : attrs) typ :
      F.AST.typ =
    let attr_term ?keep_last_args ?map_expr kind f =
      (* A clause on a method with a `self` produces a function whose first argument is `self_`.
         `subst_self` will substitute that first argument `self_` into the provided local identifier `self`.
      *)
      let subst_self : (expr -> expr) option =
        (* If `self` was present on the original function.  *)
        let* self = self in
        (* Lookup the pre/post/decreases function, get the first argument: that is `self`. *)
        let* self' =
          let* _, params, _ = Attrs.associated_fn kind attrs in
          let* first_param = List.hd params in
          let* { var; _ } = Destruct.pat_PBinding first_param.pat in
          Some var
        in
        let f id = if [%eq: local_ident] self' id then self else id in
        Some ((U.Mappers.rename_local_idents f)#visit_expr ())
      in
      Attrs.associated_expr ?keep_last_args kind attrs
      |> Option.map
           ~f:
             (Option.value ~default:Fn.id subst_self
             >> Option.value ~default:Fn.id map_expr
             >> pexpr >> f >> F.term)
    in
    let extract_any_to_unit_payload =
      let visitor =
        object
          inherit [_] U.Visitors.map as super

          method! visit_expr () e =
            match e.e with
            | App { f = { e = GlobalVar f; _ }; args = [ e ]; _ }
              when Global_ident.eq_name Hax_lib__any_to_unit f ->
                e
            | _ -> super#visit_expr () e
        end
      in
      visitor#visit_expr ()
    in
    let decreases =
      attr_term Decreases ~map_expr:extract_any_to_unit_payload (fun t ->
          F.AST.Decreases (t, None))
    in
    let smtpat =
      let smt_pat = F.term_of_lid [ "SMTPat" ] in
      attr_term SMTPat ~map_expr:extract_any_to_unit_payload (fun t ->
          let payload = F.mk_e_app smt_pat [ t ] in
          (F.AST.mkConsList F.dummyRange [ payload ]).tm)
    in
    let is_lemma = Attrs.lemma attrs in
    let prepost_bundle =
      let trivial_pre = F.term_of_lid [ "Prims"; "l_True" ] in
      let trivial_post =
        if is_lemma then trivial_pre
        else F.mk_e_abs [ F.pat @@ F.AST.PatWild (None, []) ] trivial_pre
      in
      let pre = attr_term Requires (fun t -> F.AST.Requires (t, None)) in
      let post =
        let keep_last_args = if is_lemma then 0 else 1 in
        attr_term ~keep_last_args Ensures (fun t -> F.AST.Ensures (t, None))
      in
      if is_lemma || no_tot_abbrev || Option.is_some pre || Option.is_some post
      then
        Some
          ( Option.value ~default:trivial_pre pre,
            Option.value ~default:trivial_post post )
      else None
    in
    let args =
      (Option.map ~f:(fun (req, ens) -> [ req; ens ]) prepost_bundle
      |> Option.value ~default:[])
      @ Option.to_list decreases @ Option.to_list smtpat
    in
    match args with
    | [] -> typ
    | _ ->
        let mk namespace eff = F.term_of_lid (namespace @ [ eff ]) in
        let prims = mk [ "Prims" ] in
        let eff =
          if Option.is_some prepost_bundle then
            if is_lemma then mk [] "Lemma" else prims "Pure"
          else prims "Tot"
        in
        F.mk_e_app eff (if is_lemma then List.drop args 1 else typ :: args)

  (** Prints doc comments out of a list of attributes *)
  let pdoc_comments attrs =
    attrs
    |> List.filter_map ~f:(fun (attr : attr) ->
           match attr.kind with
           | DocComment { kind; body } -> Some (kind, body)
           | _ -> None)
    |> List.map ~f:(fun (kind, string) ->
           match kind with
           | DCKLine ->
               String.split_lines string
               |> List.map ~f:(fun s -> "///" ^ s)
               |> String.concat_lines
           | DCKBlock -> "(**" ^ string ^ "*)")
    |> List.map ~f:(fun s -> `VerbatimIntf (s, `NoNewline))

  let rec pitem (e : item) :
      [> `Impl of F.AST.decl
      | `Intf of F.AST.decl
      | `VerbatimImpl of string * [ `NoNewline | `Newline ]
      | `VerbatimIntf of string * [ `NoNewline | `Newline ]
      | `Comment of string ]
      list =
    try
      match pitem_unwrapped e with
      | [] -> []
      | printed_items ->
          (* Print comments only for items that are being printed *)
          pdoc_comments e.attrs @ printed_items
    with Diagnostics.SpanFreeError.Exn error ->
      let error = Diagnostics.SpanFreeError.payload error in
      let error = [%show: Diagnostics.Context.t * Diagnostics.kind] error in
      [
        `Comment
          ("item error backend: " ^ error ^ "\n\nLast AST:\n"
          ^ (U.LiftToFullAst.item e |> Print_rust.pitem_str));
      ]

  and pitem_unwrapped (e : item) :
      [> `Impl of F.AST.decl
      | `Intf of F.AST.decl
      | `VerbatimImpl of string * [ `NoNewline | `Newline ]
      | `VerbatimIntf of string * [ `NoNewline | `Newline ]
      | `Comment of string ]
      list =
    let is_erased = Attrs.is_erased e.attrs in
    let erased_impl name ty attrs binders =
      let name' = F.id_prime name in
      let pat = F.AST.PatVar (name, None, []) in
      let term = F.term @@ F.AST.Var (F.lid_of_id @@ name') in
      let pat, term =
        match binders with
        | [] -> (pat, term)
        | _ ->
            ( F.AST.PatApp
                (F.pat pat, List.map ~f:FStarBinder.to_pattern binders),
              List.fold_left binders ~init:term ~f:(fun term binder ->
                  let binder_term, binder_imp =
                    FStarBinder.to_qualified_term binder
                  in
                  F.term @@ F.AST.App (term, binder_term, binder_imp)) )
      in
      [
        F.decl ~quals:[ Assumption ] ~fsti:false ~attrs
        @@ F.AST.Assume (name', ty);
        F.decl
          ~quals:
            (if ctx.interface_mode then []
             else [ Unfold_for_unification_and_vcgen ])
          ~fsti:false
        @@ F.AST.TopLevelLet (NoLetQualifier, [ (F.pat @@ pat, term) ]);
      ]
    in
    match e.v with
    | Alias { name; item } ->
        (* These should come from bundled items (in the case of cyclic module dependencies).
           We make use of this f* feature: https://github.com/FStarLang/FStar/pull/3369 *)
        let bundle = (RenderId.render item).path |> module_name in
        [
          `VerbatimImpl
            ( Printf.sprintf "include %s {%s as %s}" bundle
                (RenderId.render item).name (RenderId.render name).name,
              `Newline );
        ]
    | Fn { name; generics; body; params } ->
        let name = F.id @@ (RenderId.render name).name in
        let pat = F.pat @@ F.AST.PatVar (name, None, []) in
        let generics = FStarBinder.of_generics e.span generics in
        let pat_args =
          List.map ~f:FStarBinder.to_pattern generics
          @ List.map
              ~f:(fun { pat; typ_span; typ } ->
                let span = Option.value ~default:e.span typ_span in
                F.pat @@ F.AST.PatAscribed (ppat pat, (pty span typ, None)))
              params
        in
        let pat = F.pat @@ F.AST.PatApp (pat, pat_args) in
        let qualifier = F.AST.(NoLetQualifier) in
        let impl =
          F.decl ~fsti:false
          @@ F.AST.TopLevelLet (qualifier, [ (pat, pexpr body) ])
        in
        let is_const = List.is_empty params in
        let ty =
          add_clauses_effect_type
            ~self:
              (let* hd = List.hd params in
               let* { var; _ } = Destruct.pat_PBinding hd.pat in
               let*? () = String.equal var.name "self" in
               Some var)
            ~no_tot_abbrev:(ctx.interface_mode && not is_const)
            e.attrs (pty body.span body.typ)
        in
        let arrow_typ =
          F.term
          @@ F.AST.Product
               ( List.map ~f:FStarBinder.to_binder generics
                 @ List.mapi
                     ~f:(fun i { pat; typ_span; typ } ->
                       let name =
                         match pat.p with
                         | PBinding { var; _ } ->
                             Some (RenderId.local_ident var)
                         | _ ->
                             (* TODO: this might generate bad code,
                                see
                                https://github.com/hacspec/hax/issues/402
                             *)
                             None
                       in
                       let name = Option.map ~f:F.id name in
                       let span = Option.value ~default:e.span typ_span in
                       pty span typ |> F.binder_of_term ?name)
                     params,
                 ty )
        in
        let pat = F.pat @@ F.AST.PatAscribed (pat, (ty, None)) in
        let full =
          F.decl @@ F.AST.TopLevelLet (qualifier, [ (pat, pexpr body) ])
        in

        let intf = F.decl ~fsti:true (F.AST.Val (name, arrow_typ)) in

        let erased = erased_impl name arrow_typ [] generics in
        let impl, full =
          if is_erased then (erased, erased) else ([ impl ], [ full ])
        in
        if ctx.interface_mode && ((not is_const) || is_erased) then intf :: impl
        else full
    | TyAlias { name; generics; ty } ->
        let pat =
          F.pat @@ F.AST.PatVar (F.id @@ (RenderId.render name).name, None, [])
        in
        let ty, quals =
          (* Adds a refinement if a refinement attribute is detected *)
          match Attrs.associated_expr ~keep_last_args:1 Ensures e.attrs with
          | Some { e = Closure { params = [ binder ]; body; _ }; _ } ->
              let binder, _ =
                U.Expect.pbinding_simple binder |> Option.value_exn
              in
              let ty =
                F.mk_refined (plocal_ident_str binder) (pty e.span ty)
                  (fun ~x -> pexpr body)
              in
              (ty, [])
          | _ -> (pty e.span ty, [ F.AST.Unfold_for_unification_and_vcgen ])
        in
        F.decls ~quals
        @@ F.AST.TopLevelLet
             ( NoLetQualifier,
               [
                 ( F.pat
                   @@ F.AST.PatApp
                        ( pat,
                          FStarBinder.(
                            of_generics e.span generics
                            |> List.map ~f:to_pattern) ),
                   ty );
               ] )
    | Type { name; generics; _ } when is_erased ->
        let generics =
          FStarBinder.of_generics e.span generics
          |> List.map ~f:FStarBinder.implicit_to_explicit
        in
        let ty = F.eqtype_term in
        let arrow_typ =
          F.term
          @@ F.AST.Product (List.map ~f:FStarBinder.to_binder generics, ty)
        in
        let name = F.id @@ (RenderId.render name).name in
        let erased = erased_impl name arrow_typ [] generics in
        let intf = F.decl ~fsti:true (F.AST.Val (name, arrow_typ)) in
        if ctx.interface_mode then intf :: erased else erased
    | Type
        {
          name;
          generics;
          variants = [ { arguments; is_record = true; _ } ];
          is_struct = true;
        } ->
        F.decls
        @@ F.AST.Tycon
             ( false,
               false,
               [
                 F.AST.TyconRecord
                   ( F.id @@ (RenderId.render name).name,
                     FStarBinder.of_generics e.span generics
                     |> List.map ~f:FStarBinder.implicit_to_explicit
                     |> List.map ~f:FStarBinder.to_binder,
                     None,
                     [],
                     List.map
                       ~f:(fun (prev, (field, ty, attrs)) ->
                         let fname : string = (RenderId.render field).name in
                         let fvars =
                           List.map prev ~f:(fun (field, _, _) ->
                               (RenderId.render field).name)
                         in
                         ( F.id fname,
                           None,
                           [],
                           pmaybe_refined_ty e.span fvars attrs fname ty ))
                       (inits arguments) );
               ] )
    | Type { name; generics; variants; _ } ->
        let self =
          F.mk_e_app
            (F.term_of_lid [ (RenderId.render name).name ])
            (List.map
               ~f:FStarBinder.(of_generic_param e.span >> to_ident)
               generics.params
            |> List.map ~f:(fun id -> F.term @@ F.AST.Name (F.lid_of_id id)))
        in

        let constructors =
          List.map
            ~f:(fun { name; arguments; is_record; _ } ->
              ( F.id (RenderId.render name).name,
                Some
                  (let field_indexes =
                     List.map ~f:(fst3 >> index_of_field_concrete) arguments
                   in
                   if is_record then
                     F.AST.VpRecord
                       ( List.map
                           ~f:(fun (field, ty, attrs) ->
                             let fname : string =
                               (RenderId.render field).name
                             in
                             (F.id fname, None, [], pty e.span ty))
                           arguments,
                         Some self )
                   else
                     F.AST.VpArbitrary
                       (F.term
                       @@ F.AST.Product
                            ( List.map
                                ~f:(fun (_, ty, _) ->
                                  F.mk_e_binder @@ F.AST.NoName (pty e.span ty))
                                arguments,
                              self ))),
                [] ))
            variants
        in
        F.decls
        @@ F.AST.Tycon
             ( false,
               false,
               [
                 F.AST.TyconVariant
                   ( F.id @@ (RenderId.render name).name,
                     FStarBinder.of_generics e.span generics
                     |> List.map ~f:FStarBinder.implicit_to_explicit
                     |> List.map ~f:FStarBinder.to_binder,
                     None,
                     constructors );
               ] )
    | IMacroInvokation { macro; argument; span } -> (
        let open Hacspeclib_macro_parser in
        let unsupported_macro () =
          Error.raise
          @@ {
               kind = UnsupportedMacro { id = [%show: concrete_ident] macro };
               span = e.span;
             }
        in
        match RenderId.render macro with
        | { path = "hacspec_lib" :: _; name } -> (
            let unwrap r =
              match r with
              | Ok r -> r
              | Error details ->
                  let macro_id = [%show: concrete_ident] macro in
                  Error.raise
                    {
                      kind = ErrorParsingMacroInvocation { macro_id; details };
                      span = e.span;
                    }
            in
            let mk_typ_name name = "t_" ^ String.lowercase name in
            match name with
            | "public_nat_mod" ->
                let o = PublicNatMod.parse argument |> unwrap in
                (F.decls_of_string @@ "unfold type " ^ mk_typ_name o.type_name
               ^ "  = nat_mod 0x" ^ o.modulo_value)
                @ F.decls_of_string @@ "unfold type "
                ^ mk_typ_name o.type_of_canvas
                ^ "  = lseq pub_uint8 "
                ^ string_of_int o.bit_size_of_field
            | "bytes" ->
                let o = Bytes.parse argument |> unwrap in
                F.decls_of_string @@ "unfold type " ^ mk_typ_name o.bytes_name
                ^ "  = lseq uint8 " ^ o.size
            | "public_bytes" ->
                let o = Bytes.parse argument |> unwrap in
                F.decls_of_string @@ "unfold type " ^ mk_typ_name o.bytes_name
                ^ "  = lseq uint8 " ^ o.size
            | "array" ->
                let o = Array.parse argument |> unwrap in
                let typ =
                  match o.typ with
                  | "U32" -> "uint32"
                  | "U16" -> "uint16"
                  | "U8" -> "uint8"
                  | usize -> "uint_size"
                in
                let size = o.size in
                let array_def =
                  F.decls_of_string @@ "unfold type " ^ mk_typ_name o.array_name
                  ^ "  = lseq " ^ typ ^ " " ^ size
                in
                let index_def =
                  match o.index_typ with
                  | Some index ->
                      F.decls_of_string @@ "unfold type "
                      ^ mk_typ_name (o.array_name ^ "_idx")
                      ^ " = nat_mod " ^ size
                  | None -> []
                in
                array_def @ index_def
            | "unsigned_public_integer" ->
                let o = UnsignedPublicInteger.parse argument |> unwrap in
                F.decls_of_string @@ "unfold type " ^ mk_typ_name o.integer_name
                ^ "  = lseq uint8 ("
                ^ (Int.to_string @@ ((o.bits + 7) / 8))
                ^ ")"
            | _ -> unsupported_macro ())
        | _ -> unsupported_macro ())
    | Trait { name; generics; items } ->
        let name_str = (RenderId.render name).name in
        let name_id = F.id @@ name_str in
        let fields =
          List.concat_map
            ~f:(fun i ->
              let name = (RenderId.render i.ti_ident).name in
              let generics = FStarBinder.of_generics i.ti_span i.ti_generics in
              let bds = generics |> List.map ~f:FStarBinder.to_binder in
              let fields =
                match i.ti_v with
                | TIType bounds ->
                    let t = F.type0_term in
                    (* let constraints = *)
                    (*   List.map *)
                    (*     ~f:(fun implements -> *)
                    (*       { typ = TApp { ident = i.ti_ident } }) *)
                    (*     bounds *)
                    (* in *)
                    ( F.id name,
                      None,
                      [ F.term @@ F.AST.Var FStar_Parser_Const.no_method_lid ],
                      t )
                    :: List.map
                         ~f:(fun
                             { goal = { trait; args }; name = impl_ident_name }
                           ->
                           let base =
                             F.term @@ F.AST.Name (pconcrete_ident trait)
                           in
                           let args =
                             List.map ~f:(pgeneric_value e.span) args
                           in
                           ( F.id (name ^ "_" ^ impl_ident_name),
                             (* Dodgy concatenation *)
                             None,
                             [],
                             F.mk_e_app base args ))
                         bounds
                | TIFn ty
                  when Attrs.find_unique_attr i.ti_attrs ~f:(function
                         | TraitMethodNoPrePost -> Some ()
                         | _ -> None)
                       |> Option.is_some ->
                    let weakest =
                      let h kind =
                        Attrs.associated_fns kind i.ti_attrs
                        |> List.hd
                        |> Option.map ~f:(fun attr ->
                               ( attr,
                                 [%eq: Attr_payloads.AssocRole.t] kind Requires
                               ))
                      in
                      Option.first_some (h Ensures) (h Requires)
                      |> Option.map
                           ~f:(fun ((generics, params, expr), is_req) ->
                             let dummy_self =
                               List.find generics.params
                                 ~f:[%matches? { kind = GPType _; _ }]
                               |> Option.value_or_thunk ~default:(fun () ->
                                      Error.assertion_failure i.ti_span
                                        ("Expected a first generic of type \
                                          `Self`. Instead generics params \
                                          are: "
                                        ^ [%show: generic_param list]
                                            generics.params))
                               |> fun x -> x.ident
                             in
                             let self =
                               Local_ident.{ name = "Self"; id = mk_id Typ 0 }
                             in
                             let renamer =
                               let f (id : local_ident) =
                                 if [%eq: string] dummy_self.name id.name then
                                   self
                                 else id
                               in
                               U.Mappers.rename_local_idents f
                             in
                             let generics =
                               renamer#visit_generics () generics
                             in
                             let params =
                               List.map ~f:(renamer#visit_param ()) params
                             in
                             let expr = renamer#visit_expr () expr in
                             (generics, params, expr, is_req))
                    in
                    let ty =
                      let variables =
                        let idents_visitor = U.Reducers.collect_local_idents in
                        idents_visitor#visit_trait_item () i
                        :: (Option.map
                              ~f:(fun (generics, params, expr, _) ->
                                [
                                  idents_visitor#visit_generics () generics;
                                  idents_visitor#visit_expr () expr;
                                ]
                                @ List.map
                                    ~f:(idents_visitor#visit_param ())
                                    params)
                              weakest
                           |> Option.value ~default:[])
                        |> Set.union_list (module Local_ident)
                        |> Set.to_list |> ref
                      in
                      let mk_fresh prefix =
                        let v = U.fresh_local_ident_in !variables prefix in
                        variables := v :: !variables;
                        v
                      in
                      let bindings = ref [] in
                      let f (p : param) =
                        let name =
                          match p.pat.p with
                          | PBinding { var; _ } -> var
                          | _ ->
                              let name = mk_fresh "x" in
                              let ({ span; typ; _ } : pat) = p.pat in
                              let expr = { e = LocalVar name; span; typ } in
                              bindings := (p.pat, expr) :: !bindings;
                              name
                        in
                        FStarBinder.of_named_typ p.pat.span name p.typ
                      in
                      weakest
                      |> Option.map ~f:(fun (generics, binders, expr, is_req) ->
                             (generics, List.map ~f binders, expr, is_req))
                      |> Option.map
                           ~f:(fun (generics, binders, (expr : expr), is_req) ->
                             let result_ident = mk_fresh "pred" in
                             let result_bd =
                               FStarBinder.of_named_fstar_typ expr.span
                                 result_ident F.type0_term
                             in
                             let expr = U.make_lets !bindings expr in
                             let expr = pexpr expr in
                             let result =
                               F.term
                               @@ F.AST.Var
                                    (plocal_ident result_ident |> F.lid_of_id)
                             in
                             let result =
                               F.AST.Refine
                                 ( FStarBinder.to_binder result_bd,
                                   (if is_req then Fn.flip else Fn.id)
                                     F.implies result expr )
                               |> F.term
                             in
                             F.AST.Product
                               ( List.map ~f:FStarBinder.to_binder binders,
                                 result )
                             |> F.term)
                      |> Option.value_or_thunk ~default:(fun _ ->
                             let ty = pty e.span ty in
                             match ty.tm with
                             | F.AST.Product (inputs, _) ->
                                 {
                                   ty with
                                   tm = F.AST.Product (inputs, F.type0_term);
                                 }
                             | _ -> F.type0_term)
                    in

                    let ty =
                      F.term
                      @@ F.AST.Product
                           (generics |> List.map ~f:FStarBinder.to_binder, ty)
                    in
                    [ (F.id name, None, [], ty) ]
                | TIFn (TArrow (inputs, output)) ->
                    let inputs =
                      List.mapi ~f:(FStarBinder.of_typ e.span) inputs
                    in
                    let inputs = generics @ inputs in
                    let output = pty e.span output in
                    let ty_pre_post =
                      let inputs =
                        List.map ~f:FStarBinder.to_qualified_term inputs
                      in
                      let add_pre n = n ^ "_pre" in
                      let pre_name_str =
                        (RenderId.render
                           (Concrete_ident.with_suffix `Pre i.ti_ident))
                          .name
                      in
                      let pre =
                        F.mk_app (F.term_of_lid [ pre_name_str ]) inputs
                      in
                      let result = F.term_of_lid [ "result" ] in
                      let add_post n = n ^ "_post" in
                      let post_name_str =
                        (RenderId.render
                           (Concrete_ident.with_suffix `Post i.ti_ident))
                          .name
                      in
                      let post =
                        F.mk_app
                          (F.term_of_lid [ post_name_str ])
                          (inputs @ [ (result, Nothing) ])
                      in
                      let post =
                        F.mk_e_abs
                          [ F.pat @@ F.AST.PatVar (F.id "result", None, []) ]
                          post
                      in
                      F.mk_e_app
                        (F.term_of_lid [ "Prims"; "Pure" ])
                        [ output; pre; post ]
                    in
                    let inputs = List.map ~f:FStarBinder.to_binder inputs in
                    let ty = F.term @@ F.AST.Product (inputs, ty_pre_post) in
                    [ (F.id name, None, [], ty) ]
                | TIFn non_arrow_ty ->
                    let inputs = generics in
                    let output = pty e.span non_arrow_ty in
                    let inputs = List.map ~f:FStarBinder.to_binder inputs in
                    let ty = F.term @@ F.AST.Product (inputs, output) in
                    [ (F.id name, None, [], ty) ]
                | _ -> .
              in
              List.map ~f:Fn.id
                (* ~f:(fun (n, q, a, ty) -> (n, q, a, F.mk_e_app bds ty)) *)
                fields)
            items
        in
        let constraints_fields : FStar_Parser_AST.tycon_record =
          generics.constraints
          |> List.filter_map ~f:(fun c ->
                 match c with
                 | GCType { goal = bound; name = id } ->
                     let name = "_super_" ^ id in
                     let typ = pgeneric_constraint_type e.span c in
                     Some (F.id name, None, [ F.Attrs.no_method ], typ)
                 | GCProjection _ ->
                     (* TODO: Not yet implemented, see https://github.com/hacspec/hax/issues/785 *)
                     None
                 | _ -> .)
        in
        let fields : FStar_Parser_AST.tycon_record =
          constraints_fields @ fields
        in
        let fields : FStar_Parser_AST.tycon_record =
          if List.is_empty fields then
            let marker_field = "__marker_trait_" ^ name_str in
            [ (F.id marker_field, None, [], pty e.span U.unit_typ) ]
          else fields
        in
        (* Binders are explicit on class definitions *)
        let bds =
          List.map
            ~f:
              FStarBinder.(
                of_generic_param e.span >> implicit_to_explicit >> to_binder)
            generics.params
        in
        let tcdef = F.AST.TyconRecord (name_id, bds, None, [], fields) in
        let d = F.AST.Tycon (false, true, [ tcdef ]) in
        (* This helps f* in type class resolution *)
        let constraints_export =
          constraints_fields
          |> List.map ~f:(fun (super_name, _, _, typ) ->
                 let super_name = FStar_Ident.string_of_id super_name in
                 let tc_name = FStar_Ident.string_of_id name_id in
                 let typ = FStar_Parser_AST.term_to_string typ in
                 let binders = FStar_Parser_AST.binders_to_string ") (" bds in
                 let tc_instance =
                   name_id
                   :: FStar_Parser_AST.idents_of_binders bds
                        FStar_Compiler_Range.dummyRange
                   |> List.map ~f:FStar_Ident.string_of_id
                   |> String.concat ~sep:" "
                 in
                 `VerbatimIntf
                   ( "[@@ FStar.Tactics.Typeclasses.tcinstance]\nlet _ = fun ("
                     ^ binders ^ ") {|i: " ^ tc_instance ^ "|} -> i."
                     ^ super_name,
                     `Newline ))
        in
        `Intf { d; drange = F.dummyRange; quals = []; attrs = [] }
        :: constraints_export
    | Impl
        {
          generics;
          self_ty = _;
          of_trait = trait, generic_args;
          items;
          parent_bounds;
        } ->
        let name = (RenderId.render e.ident).name |> F.id in
        let pat = F.pat @@ F.AST.PatVar (name, None, []) in
        let generics = FStarBinder.of_generics e.span generics in
        let pat =
          F.pat
          @@ F.AST.PatApp (pat, List.map ~f:FStarBinder.to_pattern generics)
        in
        let typ =
          F.mk_e_app
            (F.term @@ F.AST.Name (pconcrete_ident trait))
            (List.map ~f:(pgeneric_value e.span) generic_args)
        in
        let pat = F.pat @@ F.AST.PatAscribed (pat, (typ, None)) in
        let fields =
          List.concat_map
            ~f:(fun { ii_span; ii_generics; ii_v; ii_ident } ->
              let name = (RenderId.render ii_ident).name in
              let ii_generics =
                {
                  ii_generics with
                  constraints =
                    List.filter ~f:[%matches? GCType _] ii_generics.constraints;
                }
              in
              match ii_v with
              | IIFn { body; params } ->
                  let pats =
                    FStarBinder.(
                      of_generics ii_span ii_generics |> List.map ~f:to_pattern)
                    @ List.map
                        ~f:(fun { pat; typ_span; typ } ->
                          let span = Option.value ~default:ii_span typ_span in
                          F.pat
                          @@ F.AST.PatAscribed (ppat pat, (pty span typ, None)))
                        params
                  in
                  [ (F.lid [ name ], F.mk_e_abs pats (pexpr body)) ]
              | IIType { typ; parent_bounds } ->
                  (F.lid [ name ], pty ii_span typ)
                  :: List.map
                       ~f:(fun (_impl_expr, impl_ident) ->
                         (F.lid [ name ^ "_" ^ impl_ident.name ], F.tc_solve))
                       parent_bounds)
            items
        in
        let parent_bounds_fields =
          List.map
            ~f:(fun (_impl_expr, impl_ident) ->
              (F.lid [ "_super_" ^ impl_ident.name ], F.tc_solve))
            parent_bounds
        in
        let fields = parent_bounds_fields @ fields in
        let fields =
          if List.is_empty fields then
            [
              ( F.lid
                  [
                    "__marker_trait_"
                    ^ List.last_exn
                        (FStar_Ident.path_of_lid (pconcrete_ident trait));
                  ],
                pexpr (U.unit_expr e.span) );
            ]
          else fields
        in
        let body = F.term @@ F.AST.Record (None, fields) in
        let tcinst = F.term @@ F.AST.Var FStar_Parser_Const.tcinstance_lid in
        let has_type =
          List.exists items ~f:(fun { ii_v; _ } ->
              match ii_v with IIType _ -> true | _ -> false)
        in
        let let_impl = F.AST.TopLevelLet (NoLetQualifier, [ (pat, body) ]) in
        let generics_binders = List.map ~f:FStarBinder.to_binder generics in
        let val_type = F.term @@ F.AST.Product (generics_binders, typ) in
        let v = F.AST.Val (name, val_type) in
        let intf = F.decls ~fsti:true ~attrs:[ tcinst ] v in
        let impl =
          if is_erased then erased_impl name val_type [ tcinst ] generics
          else
            F.decls
              ~fsti:(ctx.interface_mode && has_type)
              ~attrs:[ tcinst ] let_impl
        in
        let is_auto_clone =
          List.exists
            ~f:(function
              | { kind = Tool { path = "automatically_derived"; _ }; _ } -> true
              | _ -> false)
            e.attrs
          && Concrete_ident.eq_name Core__clone__Clone trait
        in
        let intf = if has_type && not is_erased then [] else intf in
        if is_erased && is_auto_clone then
          F.decls ~fsti:ctx.interface_mode
            (F.AST.TopLevelLet (NoLetQualifier, [ (pat, dummy_clone_impl) ]))
        else if ctx.interface_mode then intf @ impl
        else impl
    | Quote { quote; _ } ->
        let fstar_opts =
          Attrs.find_unique_attr e.attrs ~f:(function
            | ItemQuote q -> Some q.fstar_options
            | _ -> None)
          |> Option.value_or_thunk ~default:(fun _ ->
                 Error.assertion_failure e.span
                   "Malformed `Quote` item: could not find a ItemQuote payload")
          |> Option.value ~default:Types.{ intf = false; impl = true }
        in
        let payload = (pquote e.span quote, `Newline) in
        if ctx.interface_mode then
          (if fstar_opts.intf then [ `VerbatimIntf payload ] else [])
          @ if fstar_opts.impl then [ `VerbatimImpl payload ] else []
        else [ `VerbatimImpl payload ]
    | HaxError details ->
        [
          `Comment
            ("item error backend: " ^ details ^ "\n\nLast AST:\n"
            ^ (U.LiftToFullAst.item e |> Print_rust.pitem_str));
        ]
    | Use _ (* TODO: Not Yet Implemented *) | NotImplementedYet -> []
    | _ -> .
end

module type S = sig
  val decl_to_string : F.AST.decl -> string

  val pitem :
    item ->
    [> `Impl of F.AST.decl
    | `Intf of F.AST.decl
    | `VerbatimImpl of string * [ `NoNewline | `Newline ]
    | `VerbatimIntf of string * [ `NoNewline | `Newline ]
    | `Comment of string ]
    list
end

let make (module M : Attrs.WITH_ITEMS) ctx =
  (module Make
            (M)
            (struct
              let ctx = ctx
            end) : S)

let strings_of_item (bo : BackendOptions.t) m items (item : item) :
    ([> `Impl of string | `Intf of string ] * [ `NoNewline | `Newline ]) list =
  let interface_mode' : Types.inclusion_kind =
    List.rev bo.interfaces
    |> List.find ~f:(fun (clause : Types.inclusion_clause) ->
           let namespace = clause.namespace in
           (* match anything under that **module** namespace *)
           let namespace =
             {
               namespace with
               chunks = namespace.chunks @ [ Glob One; Glob Many ];
             }
           in
           Concrete_ident.matches_namespace namespace item.ident)
    |> Option.map ~f:(fun (clause : Types.inclusion_clause) -> clause.kind)
    |> Option.value ~default:(Types.Excluded : Types.inclusion_kind)
  in
  let interface_mode =
    not ([%matches? (Types.Excluded : Types.inclusion_kind)] interface_mode')
  in
  let (module Print) =
    make m
      {
        current_namespace = (RenderId.render item.ident).path;
        interface_mode;
        items;
        line_width = bo.line_width;
      }
  in
  let mk_impl i = `Impl i in
  let mk_intf = if interface_mode then fun i -> `Intf i else fun i -> `Impl i in
  let no_impl =
    [%matches? (Types.Included None' : Types.inclusion_kind)] interface_mode'
  in
  Print.pitem item
  |> List.concat_map ~f:(function
       | `Impl i -> [ (mk_impl (Print.decl_to_string i), `Newline) ]
       | `Intf i -> [ (mk_intf (Print.decl_to_string i), `Newline) ]
       | `VerbatimIntf (s, nl) -> [ (mk_intf s, nl) ]
       | `VerbatimImpl (s, nl) -> [ (`Impl s, nl) ]
       | `Comment s ->
           let s = "(* " ^ s ^ " *)" in
           if interface_mode then [ (`Impl s, `Newline); (`Intf s, `Newline) ]
           else [ (`Impl s, `Newline) ])
  |> List.filter ~f:(function `Impl _, _ when no_impl -> false | _ -> true)

type rec_prefix = NonRec | FirstMutRec | MutRec

let string_of_items ~mod_name ~bundles (bo : BackendOptions.t) m items :
    string * string =
  let collect_trait_goal_idents =
    object
      inherit [_] Visitors.reduce as super
      inherit [_] U.Sets.Concrete_ident.monoid as _m

      method! visit_trait_goal (_env : unit) x =
        Set.singleton (module Concrete_ident) x.trait
    end
  in
  let header =
    let lines =
      List.map ~f:(collect_trait_goal_idents#visit_item ()) items
      |> Set.union_list (module Concrete_ident)
      |> Set.map
           (module String)
           ~f:(fun i -> (RenderId.render i).path |> module_name)
      |> Fn.flip Set.remove mod_name
      |> Set.to_list
      |> List.filter ~f:(fun m ->
             (* Special treatment for modules handled specifically in our F* libraries *)
             String.is_prefix ~prefix:"Core_models." m |> not
             && String.is_prefix ~prefix:"Alloc." m |> not
             && String.equal "Hax_lib.Int" m |> not)
      |> List.map ~f:(fun mod_path -> "let open " ^ mod_path ^ " in")
    in
    match lines with
    | [] -> ""
    | _ ->
        "let _ ="
        ^ ([
             "(* This module has implicit dependencies, here we make them \
              explicit. *)";
             "(* The implicit dependencies arise from typeclasses instances. *)";
           ]
           @ lines @ [ "()" ]
          |> List.map ~f:(( ^ ) "\n  ")
          |> String.concat ~sep:"")
        ^ "\n\n"
  in
  let map_string ~f ?(map_intf = true) (str, space) =
    ( (match str with
      | `Impl s -> `Impl (f s)
      | `Intf s -> `Intf (if map_intf then f s else s)),
      space )
  in
  let replace_in_strs ~pattern ~with_ =
    List.map
      ~f:
        (map_string ~map_intf:false ~f:(fun str ->
             String.substr_replace_first ~pattern ~with_ str))
  in

  (* Each of these bundles contains recursive items (mutually if the bundle has more than one element).
     We know that these items will already be grouped together but we need to add the `rec` qualifier
     to the first one (in the case of functions). And to replace the `let`/`type` keyword by `and`
     for the other elements coming after. *)
  let first_in_bundles = Array.create (List.length bundles) None in
  let get_recursivity_prefix it =
    match
      List.findi bundles ~f:(fun _ bundle ->
          List.mem bundle it ~equal:[%eq: item])
    with
    | Some (i, _) -> (
        match first_in_bundles.(i) with
        | Some first_it when [%eq: item] first_it it -> FirstMutRec
        | Some _ -> MutRec
        | None ->
            first_in_bundles.(i) <- Some it;
            FirstMutRec)
    | None -> NonRec
  in
  let strings its =
    List.concat_map
      ~f:(fun item ->
        let recursivity_prefix = get_recursivity_prefix item in
        let strs = strings_of_item bo m items item in
        match (recursivity_prefix, item.v) with
        | FirstMutRec, Fn _ ->
            replace_in_strs ~pattern:"let" ~with_:"let rec" strs
        | MutRec, Fn _ -> replace_in_strs ~pattern:"let" ~with_:"and" strs
        | MutRec, Type _ -> replace_in_strs ~pattern:"type" ~with_:"and" strs
        | _ -> strs)
      its
    |> List.map ~f:(map_string ~f:String.strip)
    |> List.filter
         ~f:(fst >> ( function `Impl s | `Intf s -> String.is_empty s ) >> not)
  in
  let string_for filter =
    let l =
      List.filter_map
        ~f:(fun (s, space) ->
          let* s = filter s in
          Some (s, space))
        (strings items)
    in
    let n = List.length l - 1 in
    let lines =
      List.mapi
        ~f:(fun i (s, space) ->
          s
          ^ if [%matches? `NoNewline] space || [%eq: int] i n then "" else "\n")
        l
    in
    match lines with [] -> "" | _ -> header ^ String.concat ~sep:"\n" lines
  in
  let replace =
    String.substr_replace_all ~pattern:"_hax_panic_freedom_admit_"
      ~with_:"admit () (* Panic freedom *)"
  in
  ( string_for (function `Impl s -> Some (replace s) | _ -> None),
    string_for (function `Intf s -> Some (replace s) | _ -> None) )

let fstar_headers (bo : BackendOptions.t) (mod_name : string) =
  let opts =
    Printf.sprintf {|#set-options "--fuel %Ld --ifuel %Ld --z3rlimit %Ld"|}
      bo.fuel bo.ifuel bo.z3rlimit
  in

  List.append [ opts; "open FStar.Mul" ]
    (if hax_core_models_extraction then [ "open Rust_primitives" ]
     else [ "open Core_models" ])
  |> String.concat ~sep:"\n"

(** Rewrites `unsize x` to `x <: τ` when `τ` is in the allowlist described by
    `unsize_identity_typ` *)
let unsize_as_identity =
  (* Tells if a unsize should be treated as identity by type *)
  let rec unsize_identity_typ = function
    | TArray _ -> true
    | TRef { typ; _ } -> unsize_identity_typ typ
    | _ -> false
  in
  let visitor =
    object
      inherit [_] U.Visitors.map as super

      method! visit_expr () e =
        match e.e with
        | App { f = { e = GlobalVar f; _ }; args = [ x ]; _ }
          when Global_ident.eq_name Rust_primitives__unsize f
               && unsize_identity_typ x.typ ->
            let x = super#visit_expr () x in
            { e with e = Ascription { e = x; typ = e.typ } }
        | _ -> super#visit_expr () e
    end
  in
  visitor#visit_item ()

(** Translate as F* (the "legacy" printer) *)
let translate_as_fstar m (bo : BackendOptions.t) ~(bundles : AST.item list list)
    (items : AST.item list) : Types.file list =
  U.group_items_by_namespace items
  |> Map.to_alist
  |> List.filter_map ~f:(fun (_, items) ->
         let* first_item = List.hd items in
         Some ((RenderId.render first_item.ident).path, items))
  |> List.concat_map ~f:(fun (ns, items) ->
         let mod_name = module_name ns in
         let impl, intf = string_of_items ~mod_name ~bundles bo m items in
         let make ~ext body =
           if String.is_empty body then None
           else
             Some
               Types.
                 {
                   path = mod_name ^ "." ^ ext;
                   contents =
                     "module " ^ mod_name ^ "\n" ^ fstar_headers bo mod_name
                     ^ "\n\n" ^ body ^ "\n";
                   sourcemap = None;
                 }
         in
         List.filter_map ~f:Fn.id
           [ make ~ext:"fst" impl; make ~ext:"fsti" intf ])

let translate =
  if
    Sys.getenv "HAX_ENGINE_EXPERIMENTAL_RUST_PRINTER_INSTEAD_OF_FSTAR"
    |> Option.is_some
  then failwith "todo"
  else translate_as_fstar

open Phase_utils
module DepGraphR = Dependencies.Make (Features.Rust)

module TransformToInputLanguage =
  [%functor_application
    Phases.Reject.RawOrMutPointer(Features.Rust)
  |> Phases.Reject_impl_type_method
  |> Phases.Rewrite_local_self
  |> Phases.Transform_hax_lib_inline
  |> Phases.Specialize
  |> Phases.Drop_sized_trait
  |> Phases.Simplify_question_marks
  |> Phases.And_mut_defsite
  |> Phases.Reconstruct_asserts
  |> Phases.Reconstruct_for_loops
  |> Phases.Reconstruct_while_loops
  |> Phases.Direct_and_mut
  |> Phases.Reject.Arbitrary_lhs
  |> Phases.Drop_blocks
  |> Phases.Drop_match_guards
  |> Phases.Drop_references
  |> Phases.Explicit_conversions
  |> Phases.Trivialize_assign_lhs
  |> Side_effect_utils.Hoist
  |> Phases.Hoist_disjunctive_patterns
  |> Phases.Simplify_match_return
  |> Phases.Local_mutation
  |> Phases.Rewrite_control_flow
  |> Phases.Drop_return_break_continue
  |> Phases.Functionalize_loops
  |> Phases.Reject.Question_mark
  |> Phases.Reject.As_pattern
  |> Phases.Traits_specs
  |> Phases.Simplify_hoisting
  |> Phases.Newtype_as_refinement
  |> Phases.Reject.Trait_item_default
  |> Phases.Bundle_cycles
  |> Phases.Reorder_fields
  |> Phases.Sort_items_namespace_wise
  |> SubtypeToInputLanguage
  |> Identity
  ]
  [@ocamlformat "disable"]

let post_process_items =
  List.map ~f:unsize_as_identity
  >> List.map ~f:unsize_as_identity
  >> List.map ~f:U.Mappers.add_typ_ascription

let apply_phases (bo : BackendOptions.t) (items : Ast.Rust.item list) :
    AST.item list =
  let items =
    (* let hax_core_extraction = *)
    (*   Sys.getenv "HAX_CORE_EXTRACTION_MODE" *)
    (*   |> [%equal: string option] (Some "on") *)
    (* in *)
    (* if hax_core_extraction then *)
    (*   let names = *)
    (*     Core_names.names |> List.map ~f:(Concrete_ident.of_def_id Value) *)
    (*   in *)
    (*   DepGraphR.ItemGraph.transitive_dependencies_of_items names items *)
    (* else *)
    items
  in
  let items = TransformToInputLanguage.ditems items |> post_process_items in
  items


================================================
FILE: engine/backends/fstar/fstar_backend.mli
================================================
open Hax_engine.Backend
include T with type BackendOptions.t = Hax_engine.Types.f_star_options_for__null

val post_process_items : AST.item list -> AST.item list


================================================
FILE: engine/backends/lean/dune
================================================
(library
 (name lean_backend)
 (package hax-engine)
 (wrapped false)
 (libraries hax_engine base)
 (preprocess
  (pps
   ppx_yojson_conv
   ppx_sexp_conv
   ppx_compare
   ppx_hash
   ppx_deriving.show
   ppx_deriving.eq
   ppx_inline
   ppx_functor_application
   ppx_matches)))

(env
 (_
  (flags
   (:standard -w -A))))


================================================
FILE: engine/backends/lean/lean_backend.ml
================================================
open Hax_engine
open Utils
open Base

include
  Backend.Make
    (struct
      open Features
      include Off
      include On.Monadic_binding
      include On.Slice
      include On.Macro
      include On.Construct_base
      include On.Quote
      include On.Dyn
      include On.Unsafe
      include On.Trait_item_default
      include On.As_pattern
    end)
    (struct
      let backend = Diagnostics.Backend.FStar
    end)

module SubtypeToInputLanguage
    (FA :
      Features.T
        with type mutable_reference = Features.Off.mutable_reference
         and type continue = Features.Off.continue
         and type break = Features.Off.break
         and type mutable_reference = Features.Off.mutable_reference
         and type mutable_pointer = Features.Off.mutable_pointer
         and type mutable_variable = Features.Off.mutable_variable
         and type reference = Features.Off.reference
         and type raw_pointer = Features.Off.raw_pointer
         and type early_exit = Features.Off.early_exit
         and type question_mark = Features.Off.question_mark
         and type as_pattern = Features.On.as_pattern
         and type lifetime = Features.Off.lifetime
         and type monadic_action = Features.Off.monadic_action
         and type arbitrary_lhs = Features.Off.arbitrary_lhs
         and type nontrivial_lhs = Features.Off.nontrivial_lhs
         and type loop = Features.Off.loop
         and type block = Features.Off.block
         and type for_loop = Features.Off.for_loop
         and type while_loop = Features.Off.while_loop
         and type for_index_loop = Features.Off.for_index_loop
         and type state_passing_loop = Features.Off.state_passing_loop
         and type fold_like_loop = Features.Off.fold_like_loop
         and type match_guard = Features.Off.match_guard
         and type trait_item_default = Features.On.trait_item_default) =
struct
  module FB = InputLanguage

  include
    Subtype.Make (FA) (FB)
      (struct
        module A = FA
        module B = FB
        include Features.SUBTYPE.Id
        include Features.SUBTYPE.On.Monadic_binding
        include Features.SUBTYPE.On.Construct_base
        include Features.SUBTYPE.On.Slice
        include Features.SUBTYPE.On.Macro
        include Features.SUBTYPE.On.Quote
        include Features.SUBTYPE.On.Dyn
        include Features.SUBTYPE.On.Unsafe
      end)

  let metadata = Phase_utils.Metadata.make (Reject (NotInBackendLang backend))
end

module AST = Ast.Make (InputLanguage)

module BackendOptions = struct
  type t = Hax_engine.Types.f_star_options_for__null
end

open Ast
module U = Ast_utils.Make (InputLanguage)
module Visitors = Ast_visitors.Make (InputLanguage)
open AST

module Context = struct
  type t = {
    current_namespace : string list;
    items : item list;
    interface_mode : bool;
    line_width : int;
  }
end

open Phase_utils
module DepGraphR = Dependencies.Make (Features.Rust)

module TransformToInputLanguage =
  [%functor_application
    Phases.Reject.RawOrMutPointer(Features.Rust)
  |> Phases.Rewrite_local_self
  |> Phases.Transform_hax_lib_inline
  |> Phases.Specialize
  |> Phases.Drop_sized_trait
  |> Phases.Simplify_question_marks
  |> Phases.And_mut_defsite
  |> Phases.Reconstruct_asserts
  |> Phases.Reconstruct_for_loops
  |> Phases.Reconstruct_while_loops
  |> Phases.Direct_and_mut
  |> Phases.Reject.Arbitrary_lhs
  |> Phases.Drop_blocks
  |> Phases.Drop_match_guards
  |> Phases.Drop_references
  |> Phases.Trivialize_assign_lhs
  |> Side_effect_utils.Hoist
  |> Phases.Hoist_disjunctive_patterns
  |> Phases.Simplify_match_return
  |> Phases.Local_mutation
  |> Phases.Rewrite_control_flow
  |> Phases.Drop_return_break_continue
  |> Phases.Functionalize_loops
  |> Phases.Reject.Question_mark
  |> Phases.Traits_specs
  |> Phases.Simplify_hoisting
  |> Phases.Newtype_as_refinement
  |> Phases.Reorder_fields
  |> Phases.Sort_items
  |> SubtypeToInputLanguage
  |> Identity
  ]
  [@ocamlformat "disable"]

let apply_phases (items : Ast.Rust.item list) : AST.item list =
  TransformToInputLanguage.ditems items


================================================
FILE: engine/backends/proverif/dune
================================================
(library
 (name proverif_backend)
 (package hax-engine)
 (wrapped false)
 (libraries hax_engine base hacspeclib_macro_parser)
 (preprocess
  (pps
   ppx_yojson_conv
   ppx_sexp_conv
   ppx_compare
   ppx_hash
   ppx_deriving.show
   ppx_deriving.eq
   ppx_inline
   ppx_functor_application
   ppx_matches)))

(env
 (_
  (flags
   (:standard -w -A))))


================================================
FILE: engine/backends/proverif/proverif_backend.ml
================================================
open Hax_engine
open Utils
open Base

include
  Backend.Make
    (struct
      open Features
      include Off
      include On.Macro
      include On.Question_mark
      include On.Early_exit
      include On.Slice
      include On.Quote
      include On.Construct_base
    end)
    (struct
      let backend = Diagnostics.Backend.ProVerif
    end)

module SubtypeToInputLanguage
    (FA :
      Features.T
      (*  type loop = Features.Off.loop *)
      (* and type for_loop = Features.Off.for_loop *)
      (* and type for_index_loop = Features.Off.for_index_loop *)
      (* and type state_passing_loop = Features.Off.state_passing_loop *)
      (* and type continue = Features.Off.continue *)
      (* and type break = Features.Off.break *)
      (* and type mutable_variable = Features.Off.mutable_variable *)
      (* and type mutable_reference = Features.Off.mutable_reference *)
      (* and type mutable_pointer = Features.Off.mutable_pointer *)
      (* and type reference = Features.Off.reference *)
      (* and type slice = Features.Off.slice *)
      (* and type raw_pointer = Features.Off.raw_pointer *)
        with type early_exit = Features.On.early_exit
         and type slice = Features.On.slice
         and type question_mark = Features.On.question_mark
         and type macro = Features.On.macro
         and type quote = Features.On.quote
         and type construct_base = Features.On.construct_base
(* and type as_pattern = Features.Off.as_pattern *)
(* and type nontrivial_lhs = Features.Off.nontrivial_lhs *)
(* and type arbitrary_lhs = Features.Off.arbitrary_lhs *)
(* and type lifetime = Features.Off.lifetime *)
(* and type construct_base = Features.Off.construct_base *)
(* and type monadic_action = Features.Off.monadic_action *)
(* and type monadic_binding = Features.Off.monadic_binding *)
(* and type block = Features.Off.block *)) =
struct
  module FB = InputLanguage

  include
    Feature_gate.Make (FA) (FB)
      (struct
        module A = FA
        module B = FB
        include Feature_gate.DefaultSubtype

        let continue = reject
        let loop = reject
        let for_loop = reject
        let while_loop = reject
        let for_index_loop = reject
        let state_passing_loop = reject
        let continue = reject
        let break = reject
        let mutable_variable = reject
        let mutable_reference = reject
        let mutable_pointer = reject
        let reference = reject
        let raw_pointer = reject
        let as_pattern = reject
        let nontrivial_lhs = reject
        let arbitrary_lhs = reject
        let lifetime = reject
        let monadic_action = reject
        let monadic_binding = reject
        let fold_like_loop = reject
        let block = reject
        let dyn = reject
        let match_guard = reject
        let trait_item_default = reject
        let unsafe = reject
        let metadata = Phase_reject.make_metadata (NotInBackendLang ProVerif)
      end)

  let metadata = Phase_utils.Metadata.make (Reject (NotInBackendLang backend))
end

module BackendOptions = struct
  type t = Hax_engine.Types.pro_verif_options
end

open Ast

module ProVerifNamePolicy = struct
  include Concrete_ident.DefaultNamePolicy

  [@@@ocamlformat "disable"]

  let reserved_words = Hash_set.of_list (module String) [
  "among"; "axiom"; "channel"; "choice"; "clauses"; "const"; "def"; "diff"; "do"; "elimtrue"; "else"; "equation"; "equivalence"; "event"; "expand"; "fail"; "for"; "forall"; "foreach"; "free"; "fun"; "get"; "if"; "implementation"; "in"; "inj-event"; "insert"; "lemma"; "let"; "letfun"; "letproba"; "new"; "noninterf"; "noselect"; "not"; "nounif"; "or"; "otherwise"; "out"; "param"; "phase"; "pred"; "proba"; "process"; "proof"; "public vars"; "putbegin"; "query"; "reduc"; "restriction"; "secret"; "select"; "set"; "suchthat"; "sync"; "table"; "then"; "type"; "weaksecret"; "yield"
  ]
end

module U = Ast_utils.Make (InputLanguage)
module RenderId = Concrete_ident.MakeRenderAPI (ProVerifNamePolicy)
open AST

module type OPTS = sig
  val options : Hax_engine.Types.pro_verif_options
end

module type MAKE = sig
  module Preamble : sig
    val print : item list -> string
  end

  module DataTypes : sig
    val print : item list -> string
  end

  module Letfuns : sig
    val print : item list -> string
  end
end

module Make (Options : OPTS) : MAKE = struct
  module Print = struct
    module GenericPrint =
      Deprecated_generic_printer.Make (InputLanguage) (RenderId)

    open Deprecated_generic_printer_base.Make (InputLanguage)
    open PPrint

    let iblock f = group >> jump 2 0 >> terminate (break 0) >> f >> group

    (* TODO: Give definitions for core / known library functions, cf issues #447, #448 *)
    let library_functions :
        (Concrete_ident_generated.t * (AST.expr list -> document)) list =
      []

    let library_constructors :
        (Concrete_ident_generated.t
        * ((global_ident * AST.expr) list -> document))
        list =
      []

    let library_constructor_patterns :
        (Concrete_ident_generated.t * (field_pat list -> document)) list =
      []

    let library_types : (Concrete_ident_generated.t * document) list = []

    let assoc_known_name name (known_name, _) =
      Global_ident.eq_name known_name name

    let translate_known_name name ~dict =
      List.find ~f:(assoc_known_name name) dict

    class print aux =
      object (print)
        inherit GenericPrint.print as super

        (* Backend-specific utilities *)

        method pv_event_def name =
          string "event" ^^ space ^^ name ^^ dot ^^ hardline
        (** Print a ProVerif event definition. (without arguments)*)

        method pv_event_emission name =
          string "event" ^^ space ^^ name ^^ semi ^^ hardline
        (** Print a ProVerif event emission process term. (no arguments)*)

        (* ProVerif syntax *)
        method pv_comment content =
          string "(*" ^^ space ^^ content ^^ space ^^ string "*)" ^^ hardline
        (** Print a ProVerif comment and end the line. *)

        method pv_const name typ =
          string "const" ^^ space ^^ name ^^ colon ^^ space ^^ typ ^^ dot
        (** Print a ProVerif constant declaration of the given typ (provided as
            a document).*)

        method pv_constructor ?(is_data = false) ?(is_typeconverter = false)
            name arg_types typ =
          let options = if is_data then [ string "data" ] else [] in
          let options =
            if is_typeconverter then string "typeConverter" :: options
            else options
          in
          let options =
            space ^^ string "["
            ^^ separate (comma ^^ space) options
            ^^ string "]"
          in
          string "fun" ^^ space
          ^^ align
               (name
               ^^ iblock parens (separate (comma ^^ break 1) arg_types)
               ^^ hardline ^^ colon ^^ space ^^ typ ^^ options ^^ dot)
        (** Print a ProVerif constructor. *)

        method pv_type name = string "type" ^^ space ^^ name ^^ dot ^^ hardline
        (** Print a ProVerif type definition. *)

        method pv_letfun name args body =
          string "letfun" ^^ space
          ^^ align
               (name
               ^^ iblock parens (separate (comma ^^ break 1) args)
               ^^ space ^^ equals ^^ hardline ^^ body ^^ dot)
        (** Print a ProVerif letfun definition. *)

        method pv_letfun_call name args =
          name ^^ iblock parens (separate (comma ^^ break 1) args)
        (** Print a ProVerif letfun call. *)

        (* Helpers *)
        method default_value type_name = type_name ^^ string "_default_value"
        method default_letfun_name type_name = type_name ^^ string "_default"
        method error_letfun_name type_name = type_name ^^ string "_err"

        method field_accessor_prefix field_name prefix =
          string "accessor" ^^ underscore ^^ prefix ^^ underscore
          ^^ print#concrete_ident field_name

        method match_arm arms_typ scrutinee { arm_pat; body } =
          let body = print#expr_at Arm_body body in
          match arm_pat with
          | { p = PWild; _ } -> body
          | { p = PConstruct { constructor; _ } }
            when Global_ident.eq_name Core__result__Result__Err constructor ->
              print#pv_letfun_call
                (print#error_letfun_name (print#ty AlreadyPar arms_typ))
                []
          | _ ->
              let pat =
                match arm_pat with
                | { p = PConstant { lit } } ->
                    iblock parens (string "=" ^^ print#literal Pat lit)
                | _ -> print#pat_at Arm_pat arm_pat |> group
              in
              let scrutinee = print#expr_at Expr_Match_scrutinee scrutinee in
              string "let" ^^ space ^^ pat ^^ string " = " ^^ scrutinee
              ^^ string " in " ^^ body

        val mutable wildcard_index = 0

        method wildcard =
          wildcard_index <- wildcard_index + 1;
          string "wildcard" ^^ OCaml.int wildcard_index

        method typed_wildcard = print#wildcard ^^ string ": bitstring"

        method tuple_elem_pat' :
            Deprecated_generic_printer_base.par_state -> pat' fn =
          fun ctx ->
            let wrap_parens =
              group
              >>
              match ctx with AlreadyPar -> Fn.id | NeedsPar -> iblock parens
            in
            function
            | PBinding { mut; mode; var; typ; subpat } ->
                let p = print#local_ident var in
                p ^^ colon ^^ space ^^ print#ty ctx typ
            | p -> print#pat' ctx p

        method tuple_elem_pat :
            Deprecated_generic_printer_base.par_state -> pat fn =
          fun ctx { p; span; _ } ->
            print#with_span ~span (fun _ -> print#tuple_elem_pat' ctx p)

        method tuple_elem_pat_at = print#par_state >> print#tuple_elem_pat

        (* Overridden methods *)
        method! pat' : Deprecated_generic_printer_base.par_state -> pat' fn =
          fun ctx ->
            let wrap_parens =
              group
              >>
              match ctx with AlreadyPar -> Fn.id | NeedsPar -> iblock parens
            in
            fun pat ->
              match pat with
              | PConstant { lit } -> string "=" ^^ print#literal Pat lit
              | PConstruct { constructor; fields }
                when Global_ident.eq_name Core__option__Option__None constructor
                ->
                  string "None()"
              | PConstruct { constructor; fields }
              (* The `Some` constructor in ProVerif expects a
                 bitstring argument, so we use the appropriate
                 `_to_bitstring` type converter on the inner
                 expression. *)
                when Global_ident.eq_name Core__option__Option__Some constructor
                ->
                  let inner_field = List.hd_exn fields in
                  let inner_field_type_doc =
                    print#ty AlreadyPar inner_field.pat.typ
                  in
                  let inner_field_doc = print#pat ctx inner_field.pat in
                  let inner_block =
                    match inner_field.pat.typ with
                    | TApp { ident = `TupleType _ }
                    (* Tuple types should be translated without conversion from bitstring *)
                      ->
                        iblock parens inner_field_doc
                    | _ ->
                        iblock parens
                          (inner_field_type_doc ^^ string "_to_bitstring"
                          ^^ iblock parens inner_field_doc)
                  in
                  string "Some" ^^ inner_block
              | PConstruct { constructor; fields }
              (* We replace applications of the `Ok` constructor
                 with their contents. *)
                when Global_ident.eq_name Core__result__Result__Ok constructor
                ->
                  let inner_field = List.hd_exn fields in
                  let inner_field_type_doc =
                    print#ty AlreadyPar inner_field.pat.typ
                  in
                  let inner_field_doc = print#pat ctx inner_field.pat in
                  inner_field_doc
              | PConstruct { constructor; fields } -> (
                  match
                    translate_known_name constructor
                      ~dict:library_constructor_patterns
                  with
                  | Some (_, translation) -> translation fields
                  | None -> super#pat' ctx pat)
              | PWild ->
                  print#typed_wildcard
                  (* NOTE: Wildcard translation without collisions? *)
              | _ -> super#pat' ctx pat

        method! ty_bool = string "bool"
        method! ty_int _ = string "nat"

        method! pat_at : Deprecated_generic_printer_base.ast_position -> pat fn
            =
          fun pos pat ->
            match pat with
            | { p = PWild } -> (
                match pos with
                | Param_pat -> print#wildcard
                | _ -> print#pat (print#par_state pos) pat)
            | _ -> print#pat (print#par_state pos) pat

        method! pat_construct_tuple : pat list fn =
          List.map ~f:(print#tuple_elem_pat_at Pat_ConstructTuple)
          >> print#doc_construct_tuple

        method! expr_app f args _generic_args =
          let args =
            separate_map
              (comma ^^ break 1)
              (print#expr_at Expr_App_arg >> group)
              args
          in
          let f =
            match f with
            | { e = GlobalVar name; _ } -> (
                match name with
                | `Projector (`Concrete i) | `Concrete i ->
                    print#concrete_ident i |> group
                | _ -> super#expr_at Expr_App_f f |> group)
          in
          f ^^ iblock parens args

        method! expr' : Deprecated_generic_printer_base.par_state -> expr' fn =
          fun ctx e ->
            let wrap_parens =
              group
              >>
              match ctx with AlreadyPar -> Fn.id | NeedsPar -> iblock parens
            in
            match e with
            | QuestionMark { e; return_typ; _ } -> print#expr ctx e
            (* Translate known functions *)
            | App { f = { e = GlobalVar name; _ }; args } -> (
                let maps_to_identity fn_name =
                  Global_ident.eq_name Core__clone__Clone__clone name
                  || Global_ident.eq_name Rust_primitives__unsize name
                  || Global_ident.eq_name Core__ops__deref__Deref__deref name
                in
                match name with
                | `Primitive p -> (
                    match p with
                    | LogicalOp And ->
                        print#expr NeedsPar (List.hd_exn args)
                        ^^ space ^^ string "&&" ^^ space
                        ^^ print#expr NeedsPar (List.nth_exn args 1)
                    | LogicalOp Or ->
                        print#expr NeedsPar (List.hd_exn args)
                        ^^ space ^^ string "||" ^^ space
                        ^^ print#expr NeedsPar (List.nth_exn args 1)
                    | Cast -> print#expr NeedsPar (List.hd_exn args)
                    | _ -> empty)
                | _ -> (
                    if maps_to_identity name then
                      print#expr ctx (List.hd_exn args)
                    else
                      match
                        translate_known_name name ~dict:library_functions
                      with
                      | Some (name, translation) -> translation args
                      | None -> (
                          match name with
                          | `Projector (`Concrete name) -> (
                              (* A projector should always have an argument. *)
                              let arg = Option.value_exn (List.hd args) in
                              match arg.typ with
                              | TApp { ident = `Concrete concrete_ident; _ } ->
                                  let base_name =
                                    print#concrete_ident concrete_ident
                                  in
                                  print#field_accessor_prefix name base_name
                                  ^^ iblock parens
                                       (separate_map
                                          (comma ^^ break 1)
                                          (fun arg -> print#expr AlreadyPar arg)
                                          args)
                              | _ -> super#expr' ctx e)
                          | _ -> super#expr' ctx e)))
            | Construct { constructor; fields; _ }
              when Global_ident.eq_name Core__option__Option__None constructor
              ->
                string "None()"
            | Construct { constructor; fields; _ }
              when Global_ident.eq_name Core__option__Option__Some constructor
              ->
                let inner_expr = snd (Option.value_exn (List.hd fields)) in
                let inner_expr_type_doc = print#ty AlreadyPar inner_expr.typ in
                let inner_expr_doc = super#expr ctx inner_expr in
                string "Some"
                ^^ iblock parens
                     (inner_expr_type_doc ^^ string "_to_bitstring"
                     ^^ iblock parens inner_expr_doc)
            (* Translate known constructors *)
            | Construct { constructor; fields } -> (
                match
                  translate_known_name constructor ~dict:library_constructors
                with
                | Some (name, translation) -> translation fields
                | None -> super#expr' ctx e)
            | Match { scrutinee; arms } ->
                let first_arm = Option.value_exn (List.hd arms) in
                let arms_typ = first_arm.arm.body.typ in
                separate_map
                  (hardline ^^ string "else ")
                  (fun { arm; span } -> print#match_arm arms_typ scrutinee arm)
                  arms
            | If { cond; then_; else_ } -> (
                let if_then =
                  (string "if" ^//^ nest 2 (print#expr_at Expr_If_cond cond))
                  ^/^ string "then"
                  ^//^ (print#expr_at Expr_If_then then_ |> parens |> nest 1)
                in
                match else_ with
                | None -> if_then
                | Some else_ ->
                    if_then ^^ break 1 ^^ string "else" ^^ space
                    ^^ (print#expr_at Expr_If_else else_ |> iblock parens)
                    |> wrap_parens)
            | Let { monadic; lhs; rhs; body } ->
                (Option.map
                   ~f:(fun monad -> print#expr_monadic_let ~monad)
                   monadic
                |> Option.value ~default:print#expr_let)
                  ~lhs ~rhs body
                |> wrap_parens
            | _ -> super#expr' ctx e

        method! concrete_ident = print#concrete_ident' ~under_current_ns:false

        method! item_unwrapped item =
          let assume_item =
            List.rev Options.options.assume_items
            |> List.find ~f:(fun (clause : Types.inclusion_clause) ->
                   let namespace = clause.namespace in
                   Concrete_ident.matches_namespace namespace item.ident)
            |> Option.map ~f:(fun (clause : Types.inclusion_clause) ->
                   match clause.kind with Types.Excluded -> false | _ -> true)
            |> Option.value ~default:false
          in
          let fun_and_reduc base_name constructor =
            let constructor_name = print#concrete_ident constructor.name in
            let field_prefix = print#concrete_ident base_name in
            let fun_args = constructor.arguments in
            let fun_args_full =
              separate_map
                (comma ^^ break 1)
                (fun (x, y, _z) ->
                  print#concrete_ident x ^^ string ": "
                  ^^ print#ty_at Param_typ y)
                fun_args
            in
            let fun_args_names =
              separate_map
                (comma ^^ break 1)
                (fst3 >> fun x -> print#concrete_ident x)
                fun_args
            in
            let fun_args_types =
              List.map ~f:(snd3 >> print#ty_at Param_typ) fun_args
            in
            let fun_line =
              print#pv_constructor ~is_data:true constructor_name fun_args_types
                (print#concrete_ident base_name)
            in
            let reduc_line =
              string "reduc forall " ^^ iblock Fn.id fun_args_full ^^ semi
            in
            let build_accessor (ident, ty, attr) =
              print#field_accessor_prefix ident field_prefix
              ^^ iblock parens (constructor_name ^^ iblock parens fun_args_names)
              ^^ blank 1 ^^ equals ^^ blank 1 ^^ print#concrete_ident ident
            in
            let reduc_lines =
              separate_map (dot ^^ hardline)
                (fun arg ->
                  reduc_line ^^ nest 4 (hardline ^^ build_accessor arg))
                fun_args
            in
            fun_line ^^ hardline ^^ reduc_lines
            ^^ if reduc_lines == empty then empty else dot
          in
          match item.v with
          (* `fn`s with empty parameter lists are really Rust consts. *)
          | Fn { name; body; params = [] } ->
              let const_typ =
                match body.typ with
                (* ProVerif does not allow `nat` constants. *)
                | TInt _ -> string "bitstring"
                | _ -> print#ty_at Item_Fn_body body.typ
              in
              print#pv_const (print#concrete_ident name) const_typ
          | Fn { name; generics; body; params } ->
              let as_constructor : attrs -> bool =
                Attr_payloads.payloads
                >> List.exists ~f:(fst >> [%matches? Types.PVConstructor])
              in
              let as_handwritten : attrs -> bool =
                Attr_payloads.payloads
                >> List.exists ~f:(fst >> [%matches? Types.PVHandwritten])
              in
              if as_constructor item.attrs then
                let arg_types =
                  List.map ~f:(fun p -> print#ty_at Param_typ p.typ) params
                in
                let return_typ = print#ty_at Item_Fn_body body.typ in
                print#pv_comment (string "marked as constructor")
                ^^ print#pv_constructor ~is_data:true
                     (print#concrete_ident name)
                     arg_types return_typ
              else
                let comment =
                  if assume_item then
                    print#pv_comment
                      (string "REPLACE by body of type: "
                      ^^ print#ty_at Item_Fn_body body.typ)
                  else if as_handwritten item.attrs then
                    print#pv_comment (string "REPLACE by handwritten model")
                  else empty
                in
                let reached_event_name =
                  string "Reached" ^^ underscore ^^ print#concrete_ident name
                in
                let exit_event_name =
                  string "Exit" ^^ underscore ^^ print#concrete_ident name
                in
                let body =
                  if assume_item || as_handwritten item.attrs then
                    let body_type = print#ty_at Item_Fn_body body.typ in
                    print#pv_letfun_call
                      (print#default_letfun_name body_type)
                      []
                  else print#expr_at Item_Fn_body body
                in
                comment
                ^^ print#pv_letfun
                     (print#concrete_ident name)
                     (List.map ~f:print#param params)
                     body
          | Type { name; generics; variants; is_struct } ->
              let type_name_doc = print#concrete_ident name in
              let type_line = print#pv_type type_name_doc in
              let to_bitstring_converter_line =
                print#pv_constructor ~is_typeconverter:true
                  (type_name_doc ^^ string "_to_bitstring")
                  [ type_name_doc ] (string "bitstring")
              in
              let from_bitstring_converter_line =
                print#pv_constructor ~is_typeconverter:true
                  (type_name_doc ^^ string "_from_bitstring")
                  [ string "bitstring" ]
                  type_name_doc
              in
              let default_line =
                let const_name = print#default_value type_name_doc in
                print#pv_const const_name type_name_doc
                ^^ hardline
                ^^ print#pv_letfun
                     (print#default_letfun_name type_name_doc)
                     [] const_name
              in
              let err_line =
                print#pv_letfun
                  (print#error_letfun_name type_name_doc)
                  []
                  (string "let x = construct_fail() in "
                  ^^ print#default_value type_name_doc)
              in
              let default_lines =
                type_line ^^ hardline ^^ to_bitstring_converter_line ^^ hardline
                ^^ from_bitstring_converter_line ^^ hardline ^^ default_line
                ^^ hardline ^^ err_line ^^ hardline
              in
              let destructor_lines =
                if is_struct then
                  let struct_constructor = List.hd variants in
                  match struct_constructor with
                  | None -> empty
                  | Some constructor -> fun_and_reduc name constructor
                else
                  separate_map hardline
                    (fun variant -> fun_and_reduc name variant)
                    variants
              in
              if
                Attrs.find_unique_attr item.attrs
                  ~f:
                    ([%eq: Types.ha_payload] Erased >> Fn.flip Option.some_if ())
                |> Option.is_some
              then default_lines
              else default_lines ^^ destructor_lines
          | Quote { quote; _ } -> print#quote quote
          | _ -> empty

        method! expr_let : lhs:pat -> rhs:expr -> expr fn =
          fun ~lhs ~rhs body ->
            string "let" ^^ space
            ^^ iblock Fn.id (print#pat_at Expr_Let_lhs lhs)
            ^^ space ^^ equals ^^ space
            ^^ iblock parens (print#expr_at Expr_Let_rhs rhs |> group)
            ^^ space ^^ string "in" ^^ hardline
            ^^ (print#expr_at Expr_Let_body body |> group)

        method! concrete_ident' ~(under_current_ns : bool) : concrete_ident fn =
          fun id ->
            if under_current_ns then print#name_of_concrete_ident id
            else
              let path = print#namespace_of_concrete_ident id in
              separate_map (underscore ^^ underscore) utf8string path
              ^^ underscore ^^ underscore
              ^^ print#name_of_concrete_ident id

        method! doc_construct_inductive :
            is_record:bool ->
            is_struct:bool ->
            constructor:concrete_ident ->
            base:document option ->
            (global_ident * document) list fn =
          fun ~is_record ~is_struct:_ ~constructor ~base:_ args ->
            if is_record then
              print#concrete_ident constructor
              ^^ iblock parens
                   (separate_map
                      (break 0 ^^ comma)
                      (fun (field, body) -> iblock Fn.id body |> group)
                      args)
            else
              print#concrete_ident constructor
              ^^ iblock parens (separate_map (comma ^^ break 1) snd args)

        method! generic_values : generic_value list fn =
          function
          | [] -> empty
          | values ->
              string "_of" ^^ underscore
              ^^ separate_map underscore print#generic_value values

        method! ty_app f args =
          print#concrete_ident f ^^ print#generic_values args

        method! ty_tuple _ _ = string "bitstring"

        method! local_ident e =
          match String.chop_prefix ~prefix:"impl " e.name with
          | Some name ->
              let name =
                "impl_"
                ^ String.tr ~target:'+' ~replacement:'_'
                    (String.tr ~target:' ' ~replacement:'_' name)
              in
              string name
          | _ -> super#local_ident e

        method! expr ctx e =
          match e.e with
          | App { f = { e = GlobalVar name; _ }; args }
            when Global_ident.eq_name Core__convert__Into__into name ->
              print#ty ctx e.typ ^^ string "_from_bitstring"
              ^^ iblock parens (print#expr ctx (List.hd_exn args))
          | App { f = { e = GlobalVar name; _ }; args }
            when Global_ident.eq_name Rust_primitives__hax__never_to_any name ->
              print#ty ctx e.typ ^^ string "_err()"
          | _ -> (
              match e.typ with
              | TApp { ident }
                when Global_ident.eq_name Core__result__Result ident -> (
                  match e.e with
                  | Construct { constructor; fields }
                    when Global_ident.eq_name Core__result__Result__Ok
                           constructor ->
                      let inner_expr =
                        snd (Option.value_exn (List.hd fields))
                      in
                      let inner_expr_doc = super#expr ctx inner_expr in
                      inner_expr_doc
                  | Construct { constructor; _ }
                    when Global_ident.eq_name Core__result__Result__Err
                           constructor ->
                      print#ty ctx e.typ ^^ string "_err()"
                  | _ -> super#expr ctx e (*This cannot happen*))
              | _ -> super#expr ctx e)

        method! ty : Deprecated_generic_printer_base.par_state -> ty fn =
          fun ctx ty ->
            match ty with
            | TBool -> print#ty_bool
            | TParam i -> print#local_ident i
            | TInt kind -> print#ty_int kind
            (* Translate known types, no args at the moment *)
            | TApp { ident; args }
              when Global_ident.eq_name Alloc__vec__Vec ident ->
                string "bitstring"
            | TApp { ident; args }
              when Global_ident.eq_name Core__option__Option ident ->
                string "Option"
            | TApp { ident; args }
              when Global_ident.eq_name Core__result__Result ident -> (
                (* print first of args*)
                let result_ok_type = List.hd_exn args in
                match result_ok_type with
                | GType typ -> print#ty ctx typ
                | GConst e -> print#expr ctx e
                | _ -> empty (* Do not tranlsate lifetimes *))
            | TApp { ident; args } -> super#ty ctx ty
            (*(
                match translate_known_name ident ~dict:library_types with
                | Some (_, translation) -> translation
                | None -> super#ty ctx ty)*)
            | _ -> string "bitstring"
      end

    type proverif_aux_info = CrateFns of AST.item list | NoAuxInfo

    include Api (struct
      type aux_info = proverif_aux_info

      let new_print aux = (new print aux :> print_object)
    end)
  end

  let filter_crate_functions (items : AST.item list) =
    List.filter
      ~f:(fun item ->
        [%matches? Fn _] item.v
        || [%matches? Quote { origin = { item_kind = `Fn; _ }; _ }] item.v)
      items

  let is_process_read : attrs -> bool =
    Attr_payloads.payloads
    >> List.exists ~f:(fst >> [%matches? Types.ProcessRead])

  let is_process_write : attrs -> bool =
    Attr_payloads.payloads
    >> List.exists ~f:(fst >> [%matches? Types.ProcessWrite])

  let is_process_init : attrs -> bool =
    Attr_payloads.payloads
    >> List.exists ~f:(fst >> [%matches? Types.ProcessInit])

  let is_process item =
    is_process_read item.attrs
    || is_process_write item.attrs
    || is_process_init item.attrs

  module type Subprinter = sig
    val print : AST.item list -> string
  end

  module MkSubprinter (Section : sig
    val banner : string
    val preamble : AST.item list -> string
    val contents : AST.item list -> string
  end) =
  struct
    let hline = "(*****************************************)\n"
    let banner = hline ^ "(* " ^ Section.banner ^ " *)\n" ^ hline ^ "\n"

    let print items =
      banner ^ Section.preamble items ^ Section.contents items ^ "\n\n"
  end

  module Preamble = MkSubprinter (struct
    let banner = "Preamble"

    let preamble items =
      "channel c.\n\n\
       fun construct_fail() : bitstring\n\
       reduc construct_fail() = fail.\n\n\
       type Option.\n\
       fun Some(bitstring): Option [data].\n\
       fun None(): Option [data].\n\
       letfun Option_err() = let x = construct_fail() in None().\n\n\
       const empty: bitstring.\n\
       letfun bitstring_default() = empty.\n\
       letfun bitstring_err() = let x = construct_fail() in \
       bitstring_default().\n\n\
       letfun nat_default() = 0.\n\
       fun nat_to_bitstring(nat): bitstring.\n\
       letfun nat_err() = let x = construct_fail() in nat_default().\n\n\
       letfun bool_default() = false.\n"

    let contents items = ""
  end)

  module DataTypes = MkSubprinter (struct
    let banner = "Types and Constructors"
    let preamble items = ""

    let filter_data_types items =
      List.filter
        ~f:(fun item ->
          [%matches? Type _] item.v
          || [%matches? Quote { origin = { item_kind = `Type; _ }; _ }] item.v)
        items

    let contents items =
      let contents, _ = Print.items NoAuxInfo (filter_data_types items) in
      contents
  end)

  module Letfuns = MkSubprinter (struct
    let banner = "Functions"
    let preamble items = ""

    let contents items =
      let process_letfuns, pure_letfuns =
        List.partition_tf ~f:is_process (filter_crate_functions items)
      in
      let pure_letfuns_print, _ =
        Print.items (CrateFns (filter_crate_functions items)) pure_letfuns
      in
      let process_letfuns_print, _ =
        Print.items (CrateFns (filter_crate_functions items)) process_letfuns
      in
      pure_letfuns_print ^ process_letfuns_print
  end)
end

let translate m (bo : BackendOptions.t) ~(bundles : AST.item list list)
    (items : AST.item list) : Types.file list =
  let (module M : MAKE) =
    (module Make (struct
      let options = bo
    end))
  in
  let lib_contents =
    M.Preamble.print items ^ M.DataTypes.print items ^ M.Letfuns.print items
  in
  let lib_file =
    Types.{ path = "lib.pvl"; contents = lib_contents; sourcemap = None }
  in
  [ lib_file ]

open Phase_utils
module DepGraph = Dependencies.Make (InputLanguage)
module DepGraphR = Dependencies.Make (Features.Rust)

module TransformToInputLanguage =
  [%functor_application
  Phases.Reject.Unsafe(Features.Rust)
  |> Phases.Reject.RawOrMutPointer
  |> Phases.Transform_hax_lib_inline
  |> Phases.Simplify_question_marks
  |> Phases.And_mut_defsite
  |> Phases.Reconstruct_for_loops
  |> Phases.Direct_and_mut
  |> Phases.Reject.Arbitrary_lhs
  |> Phases.Drop_blocks
  |> Phases.Drop_references
  |> Phases.Trivialize_assign_lhs
  |> Side_effect_utils.Hoist
  |> Phases.Simplify_match_return
  |> Phases.Local_mutation
  |> Phases.Reject.Continue
  |> Phases.Reject.Dyn
  |> Phases.Reorder_fields
  |> Phases.Bundle_cycles
  |> Phases.Sort_items_namespace_wise
  |> SubtypeToInputLanguage
  |> Identity
  ]
  [@ocamlformat "disable"]

let apply_phases (bo : BackendOptions.t) (items : Ast.Rust.item list) :
    AST.item list =
  TransformToInputLanguage.ditems items


================================================
FILE: engine/backends/proverif/proverif_backend.mli
================================================
open Hax_engine.Backend
include T with type BackendOptions.t = Hax_engine.Types.pro_verif_options


================================================
FILE: engine/bin/dune
================================================
(library
 (name lib)
 (modules lib)
 (wrapped false)
 (libraries
  hax_engine
  fstar_backend
  lean_backend
  coq_backend
  ssprove_backend
  easycrypt_backend
  proverif_backend
  logs
  core)
 (preprocess
  (pps
   ppx_yojson_conv
   ppx_deriving.show
   ppx_deriving.eq
   ppx_matches
   ppx_string)))

(executable
 (public_name hax-engine)
 (name native_driver)
 (modules native_driver)
 (libraries lib))

; The following line is commented: by default, we don't want to
; generate javascript.

; (include dune-js)

(env
 (_
  (flags
   (:standard -g -warn-error -A -warn-error +8 -w -33))))


================================================
FILE: engine/bin/dune-js
================================================
(executable
 (optional)
 (name js_driver)
 (modes js)
 (modules js_driver)
 (js_of_ocaml
  (javascript_files js_stubs/mutex.js js_stubs/stdint.js js_stubs/unix.js))
 (libraries js_of_ocaml lib))


================================================
FILE: engine/bin/js_driver.ml
================================================
open Base
open Js_of_ocaml

(* Strings are slow with js_of_ocaml. Thus, parsing a string into a
   `Yojson.Safe.t` is extremly slow using yojson itself. Instead, I
   wrote a very simple and stupid `yojson_of_string_via_js` that (1)
   parses the json out of a JS string into a JS object (2) make a
   yojson AST. This is something like x100 faster. Without this hack,
   the performance is too bad to be bearable. *)
let yojson_of_string_via_js (s : string) : Yojson.Safe.t =
  let f =
    Js.Unsafe.js_expr
      {js|
(function (mkNull, mkBool, mkBigint, mkInt, mkFloat, mkString, mkDict, mkArray){
  function isInt(n) {
    return n % 1 === 0;
  }
  function f(x){
    let t = typeof x;
    if (t === 'undefined' || x === null) {
      return mkNull;
    } else if (t === 'boolean') {
      return mkBool(x);
    } else if (t === 'object') {
      if (x instanceof Array) {
        return mkArray(x.map(f));
      } else {
        let data = Object.entries(x).map(function(o) {
          let key = o[0];
          let val = f(o[1]);
          return [key, val];
        });
        return mkDict(data);
      }
    } else if (t === 'number') {
      return mkBigint(x.toString());
      if (isInt(x)) {
        return mkInt(x);
      } else {
        return mkFloat(x);
      }
    } else if (t === 'bigint') {
      return mkBigint(x.toString());
    } else if (t === 'string') {
      return mkString(x);
    } else {
      throw ("Cannot deal with " + JSON.stringify(x));
    }
  };
  return (function(str){
    let json = JSON.parse(str);
    let result = f(json);
    return result;
  });
})
|js}
  in
  let open Js in
  let open Unsafe in
  let wrap (type a) (f : a t -> Yojson.Safe.t) =
    inject (callback (fun x -> f (coerce x)))
  in
  let to_list x = to_array x |> Array.to_list in
  let fn =
    fun_call f
      [|
        inject `Null;
        wrap (fun x -> `Bool (to_bool x));
        wrap (fun x -> `Intlit (to_bytestring x));
        wrap (fun x -> `Int (float_of_number x |> Float.to_int));
        wrap (fun x -> `Float (float_of_number x));
        wrap (fun x -> `String (to_bytestring x));
        wrap (fun x ->
            `Assoc
              (to_list x
              |> List.map ~f:(fun x ->
                     match to_list x with
                     | [ key; json ] -> (to_string key, Stdlib.Obj.magic json)
                     | _ -> failwith "Assoc")));
        wrap (fun x -> `List (to_list x));
      |]
  in
  fun_call fn [| string s |> coerce |] |> Obj.magic

let _ =
  Hax_engine.Hax_io.init
    (module struct
      let read_json () =
        let line = Stdio.In_channel.input_line Stdio.In_channel.stdin in
        Option.map ~f:yojson_of_string_via_js line

      let write_json msg =
        let open Stdio.Out_channel in
        Yojson.Safe.to_channel stdout msg;
        output_char stdout '\n';
        flush stdout
    end);
  Lib.main ()


================================================
FILE: engine/bin/js_stubs/mutex.js
================================================
// Whether to log.
var v_log = false;
function ll(s) { if (v_log) console.log(s); }

//Provides: caml_condition_broadcast
function caml_condition_broadcast() {
  return 0;
}

//Provides: caml_condition_new
function caml_condition_new() {
  return 0;
}

//Provides: caml_condition_signal
function caml_condition_signal() {
  return 0;
}

//Provides: caml_condition_wait
function caml_condition_wait() {
  return 0;
}

//Provides: caml_thread_initialize
function caml_thread_initialize() {
  return 0;
}

//Provides: caml_thread_new
function caml_thread_new() {
  return 0;
}

//Provides: caml_thread_self
function caml_thread_self() {
  return [0,0];
}

//Provides: caml_thread_uncaught_exception
function caml_thread_uncaught_exception() {
  return 0;
}

//Provides: caml_thread_yield
function caml_thread_yield() {
  return 0;
}

//Provides: caml_mutex_lock
function caml_mutex_lock() {
  return 0;
}

//Provides: caml_mutex_new
function caml_mutex_new() {
  return 0;
}

//Provides: caml_mutex_unlock
function caml_mutex_unlock() {
  return 0;
}

//Provides: caml_thread_cleanup
function caml_thread_cleanup() {
  return 0;
}

//Provides: caml_thread_exit
function caml_thread_exit() {
  return 0;
}

//Provides: caml_thread_id
function caml_thread_id() {
  return 0;
}


================================================
FILE: engine/bin/js_stubs/stdint.js
================================================

// Provides: int8_of_nativeint
function int8_of_nativeint(){};
// Provides: int8_of_float
function int8_of_float(){};
// Provides: int8_of_int16
function int8_of_int16(){};
// Provides: int8_of_int24
function int8_of_int24(){};
// Provides: int8_of_int32
function int8_of_int32(){};
// Provides: int8_of_int40
function int8_of_int40(){};
// Provides: int8_of_int48
function int8_of_int48(){};
// Provides: int8_of_int56
function int8_of_int56(){};
// Provides: int8_of_int64
function int8_of_int64(){};
// Provides: int8_of_int128
function int8_of_int128(){};
// Provides: int8_of_uint8
function int8_of_uint8(){};
// Provides: int8_of_uint16
function int8_of_uint16(){};
// Provides: int8_of_uint24
function int8_of_uint24(){};
// Provides: int8_of_uint32
function int8_of_uint32(){};
// Provides: int8_of_uint40
function int8_of_uint40(){};
// Provides: int8_of_uint48
function int8_of_uint48(){};
// Provides: int8_of_uint56
function int8_of_uint56(){};
// Provides: int8_of_uint64
function int8_of_uint64(){};
// Provides: int8_of_uint128
function int8_of_uint128(){};
// Provides: nativeint_of_int8
function nativeint_of_int8(){};
// Provides: float_of_int8
function float_of_int8(){};
// Provides: int16_of_int8
function int16_of_int8(){};
// Provides: int24_of_int8
function int24_of_int8(){};
// Provides: int32_of_int8
function int32_of_int8(){};
// Provides: int40_of_int8
function int40_of_int8(){};
// Provides: int48_of_int8
function int48_of_int8(){};
// Provides: int56_of_int8
function int56_of_int8(){};
// Provides: int64_of_int8
function int64_of_int8(){};
// Provides: int128_of_int8
function int128_of_int8(){};
// Provides: uint8_of_int8
function uint8_of_int8(){};
// Provides: uint16_of_int8
function uint16_of_int8(){};
// Provides: uint24_of_int8
function uint24_of_int8(){};
// Provides: uint32_of_int8
function uint32_of_int8(){};
// Provides: uint40_of_int8
function uint40_of_int8(){};
// Provides: uint48_of_int8
function uint48_of_int8(){};
// Provides: uint56_of_int8
function uint56_of_int8(){};
// Provides: uint64_of_int8
function uint64_of_int8(){};
// Provides: uint128_of_int8
function uint128_of_int8(){};
// Provides: int8_bits_of_float
function int8_bits_of_float(){};
// Provides: int8_float_of_bits
function int8_float_of_bits(){};
// Provides: int16_of_nativeint
function int16_of_nativeint(){};
// Provides: int16_of_float
function int16_of_float(){};
// Provides: int16_of_int24
function int16_of_int24(){};
// Provides: int16_of_int32
function int16_of_int32(){};
// Provides: int16_of_int40
function int16_of_int40(){};
// Provides: int16_of_int48
function int16_of_int48(){};
// Provides: int16_of_int56
function int16_of_int56(){};
// Provides: int16_of_int64
function int16_of_int64(){};
// Provides: int16_of_int128
function int16_of_int128(){};
// Provides: int16_of_uint8
function int16_of_uint8(){};
// Provides: int16_of_uint16
function int16_of_uint16(){};
// Provides: int16_of_uint24
function int16_of_uint24(){};
// Provides: int16_of_uint32
function int16_of_uint32(){};
// Provides: int16_of_uint40
function int16_of_uint40(){};
// Provides: int16_of_uint48
function int16_of_uint48(){};
// Provides: int16_of_uint56
function int16_of_uint56(){};
// Provides: int16_of_uint64
function int16_of_uint64(){};
// Provides: int16_of_uint128
function int16_of_uint128(){};
// Provides: nativeint_of_int16
function nativeint_of_int16(){};
// Provides: float_of_int16
function float_of_int16(){};
// Provides: int24_of_int16
function int24_of_int16(){};
// Provides: int32_of_int16
function int32_of_int16(){};
// Provides: int40_of_int16
function int40_of_int16(){};
// Provides: int48_of_int16
function int48_of_int16(){};
// Provides: int56_of_int16
function int56_of_int16(){};
// Provides: int64_of_int16
function int64_of_int16(){};
// Provides: int128_of_int16
function int128_of_int16(){};
// Provides: uint8_of_int16
function uint8_of_int16(){};
// Provides: uint16_of_int16
function uint16_of_int16(){};
// Provides: uint24_of_int16
function uint24_of_int16(){};
// Provides: uint32_of_int16
function uint32_of_int16(){};
// Provides: uint40_of_int16
function uint40_of_int16(){};
// Provides: uint48_of_int16
function uint48_of_int16(){};
// Provides: uint56_of_int16
function uint56_of_int16(){};
// Provides: uint64_of_int16
function uint64_of_int16(){};
// Provides: uint128_of_int16
function uint128_of_int16(){};
// Provides: int16_bits_of_float
function int16_bits_of_float(){};
// Provides: int16_float_of_bits
function int16_float_of_bits(){};
// Provides: int24_of_nativeint
function int24_of_nativeint(){};
// Provides: int24_of_float
function int24_of_float(){};
// Provides: int24_of_int32
function int24_of_int32(){};
// Provides: int24_of_int40
function int24_of_int40(){};
// Provides: int24_of_int48
function int24_of_int48(){};
// Provides: int24_of_int56
function int24_of_int56(){};
// Provides: int24_of_int64
function int24_of_int64(){};
// Provides: int24_of_int128
function int24_of_int128(){};
// Provides: int24_of_uint8
function int24_of_uint8(){};
// Provides: int24_of_uint16
function int24_of_uint16(){};
// Provides: int24_of_uint24
function int24_of_uint24(){};
// Provides: int24_of_uint32
function int24_of_uint32(){};
// Provides: int24_of_uint40
function int24_of_uint40(){};
// Provides: int24_of_uint48
function int24_of_uint48(){};
// Provides: int24_of_uint56
function int24_of_uint56(){};
// Provides: int24_of_uint64
function int24_of_uint64(){};
// Provides: int24_of_uint128
function int24_of_uint128(){};
// Provides: nativeint_of_int24
function nativeint_of_int24(){};
// Provides: float_of_int24
function float_of_int24(){};
// Provides: int32_of_int24
function int32_of_int24(){};
// Provides: int40_of_int24
function int40_of_int24(){};
// Provides: int48_of_int24
function int48_of_int24(){};
// Provides: int56_of_int24
function int56_of_int24(){};
// Provides: int64_of_int24
function int64_of_int24(){};
// Provides: int128_of_int24
function int128_of_int24(){};
// Provides: uint8_of_int24
function uint8_of_int24(){};
// Provides: uint16_of_int24
function uint16_of_int24(){};
// Provides: uint24_of_int24
function uint24_of_int24(){};
// Provides: uint32_of_int24
function uint32_of_int24(){};
// Provides: uint40_of_int24
function uint40_of_int24(){};
// Provides: uint48_of_int24
function uint48_of_int24(){};
// Provides: uint56_of_int24
function uint56_of_int24(){};
// Provides: uint64_of_int24
function uint64_of_int24(){};
// Provides: uint128_of_int24
function uint128_of_int24(){};
// Provides: int32_of_int40
function int32_of_int40(){};
// Provides: int32_of_int48
function int32_of_int48(){};
// Provides: int32_of_int56
function int32_of_int56(){};
// Provides: int32_of_int64
function int32_of_int64(){};
// Provides: int32_of_int128
function int32_of_int128(){};
// Provides: int32_of_uint8
function int32_of_uint8(){};
// Provides: int32_of_uint16
function int32_of_uint16(){};
// Provides: int32_of_uint24
function int32_of_uint24(){};
// Provides: int32_of_uint32
function int32_of_uint32(){};
// Provides: int32_of_uint40
function int32_of_uint40(){};
// Provides: int32_of_uint48
function int32_of_uint48(){};
// Provides: int32_of_uint56
function int32_of_uint56(){};
// Provides: int32_of_uint64
function int32_of_uint64(){};
// Provides: int32_of_uint128
function int32_of_uint128(){};
// Provides: int40_of_int32
function int40_of_int32(){};
// Provides: int48_of_int32
function int48_of_int32(){};
// Provides: int56_of_int32
function int56_of_int32(){};
// Provides: int64_of_int32
function int64_of_int32(){};
// Provides: int128_of_int32
function int128_of_int32(){};
// Provides: uint8_of_int32
function uint8_of_int32(){};
// Provides: uint16_of_int32
function uint16_of_int32(){};
// Provides: uint24_of_int32
function uint24_of_int32(){};
// Provides: uint32_of_int32
function uint32_of_int32(){};
// Provides: uint40_of_int32
function uint40_of_int32(){};
// Provides: uint48_of_int32
function uint48_of_int32(){};
// Provides: uint56_of_int32
function uint56_of_int32(){};
// Provides: uint64_of_int32
function uint64_of_int32(){};
// Provides: uint128_of_int32
function uint128_of_int32(){};
// Provides: int64_of_int40
function int64_of_int40(){};
// Provides: int64_of_int48
function int64_of_int48(){};
// Provides: int64_of_int56
function int64_of_int56(){};
// Provides: int64_of_int128
function int64_of_int128(){};
// Provides: int64_of_uint8
function int64_of_uint8(){};
// Provides: int64_of_uint16
function int64_of_uint16(){};
// Provides: int64_of_uint24
function int64_of_uint24(){};
// Provides: int64_of_uint32
function int64_of_uint32(){};
// Provides: int64_of_uint40
function int64_of_uint40(){};
// Provides: int64_of_uint48
function int64_of_uint48(){};
// Provides: int64_of_uint56
function int64_of_uint56(){};
// Provides: int64_of_uint64
function int64_of_uint64(){};
// Provides: int64_of_uint128
function int64_of_uint128(){};
// Provides: int40_of_int64
function int40_of_int64(){};
// Provides: int48_of_int64
function int48_of_int64(){};
// Provides: int56_of_int64
function int56_of_int64(){};
// Provides: int128_of_int64
function int128_of_int64(){};
// Provides: uint8_of_int64
function uint8_of_int64(){};
// Provides: uint16_of_int64
function uint16_of_int64(){};
// Provides: uint24_of_int64
function uint24_of_int64(){};
// Provides: uint32_of_int64
function uint32_of_int64(){};
// Provides: uint40_of_int64
function uint40_of_int64(){};
// Provides: uint48_of_int64
function uint48_of_int64(){};
// Provides: uint56_of_int64
function uint56_of_int64(){};
// Provides: uint64_of_int64
function uint64_of_int64(){};
// Provides: uint128_of_int64
function uint128_of_int64(){};
// Provides: int40_mul
function int40_mul(){};
// Provides: int40_div
function int40_div(){};
// Provides: uint40_xor
function uint40_xor(){};
// Provides: int40_shift_right
function int40_shift_right(){};
// Provides: uint40_shift_right
function uint40_shift_right(){};
// Provides: int40_of_int
function int40_of_int(){};
// Provides: int40_of_nativeint
function int40_of_nativeint(){};
// Provides: int40_of_float
function int40_of_float(){};
// Provides: int40_of_int48
function int40_of_int48(){};
// Provides: int40_of_int56
function int40_of_int56(){};
// Provides: int40_of_int128
function int40_of_int128(){};
// Provides: int40_of_uint8
function int40_of_uint8(){};
// Provides: int40_of_uint16
function int40_of_uint16(){};
// Provides: int40_of_uint24
function int40_of_uint24(){};
// Provides: int40_of_uint32
function int40_of_uint32(){};
// Provides: int40_of_uint40
function int40_of_uint40(){};
// Provides: int40_of_uint48
function int40_of_uint48(){};
// Provides: int40_of_uint56
function int40_of_uint56(){};
// Provides: int40_of_uint64
function int40_of_uint64(){};
// Provides: int40_of_uint128
function int40_of_uint128(){};
// Provides: int_of_int40
function int_of_int40(){};
// Provides: nativeint_of_int40
function nativeint_of_int40(){};
// Provides: float_of_int40
function float_of_int40(){};
// Provides: int48_of_int40
function int48_of_int40(){};
// Provides: int56_of_int40
function int56_of_int40(){};
// Provides: int128_of_int40
function int128_of_int40(){};
// Provides: uint8_of_int40
function uint8_of_int40(){};
// Provides: uint16_of_int40
function uint16_of_int40(){};
// Provides: uint24_of_int40
function uint24_of_int40(){};
// Provides: uint32_of_int40
function uint32_of_int40(){};
// Provides: uint40_of_int40
function uint40_of_int40(){};
// Provides: uint48_of_int40
function uint48_of_int40(){};
// Provides: uint56_of_int40
function uint56_of_int40(){};
// Provides: uint64_of_int40
function uint64_of_int40(){};
// Provides: uint128_of_int40
function uint128_of_int40(){};
// Provides: int40_max_int
function int40_max_int(){};
// Provides: int40_min_int
function int40_min_int(){};
// Provides: int48_mul
function int48_mul(){};
// Provides: int48_div
function int48_div(){};
// Provides: uint48_xor
function uint48_xor(){};
// Provides: int48_shift_right
function int48_shift_right(){};
// Provides: uint48_shift_right
function uint48_shift_right(){};
// Provides: int48_of_int
function int48_of_int(){};
// Provides: int48_of_nativeint
function int48_of_nativeint(){};
// Provides: int48_of_float
function int48_of_float(){};
// Provides: int48_of_int56
function int48_of_int56(){};
// Provides: int48_of_int128
function int48_of_int128(){};
// Provides: int48_of_uint8
function int48_of_uint8(){};
// Provides: int48_of_uint16
function int48_of_uint16(){};
// Provides: int48_of_uint24
function int48_of_uint24(){};
// Provides: int48_of_uint32
function int48_of_uint32(){};
// Provides: int48_of_uint40
function int48_of_uint40(){};
// Provides: int48_of_uint48
function int48_of_uint48(){};
// Provides: int48_of_uint56
function int48_of_uint56(){};
// Provides: int48_of_uint64
function int48_of_uint64(){};
// Provides: int48_of_uint128
function int48_of_uint128(){};
// Provides: int_of_int48
function int_of_int48(){};
// Provides: nativeint_of_int48
function nativeint_of_int48(){};
// Provides: float_of_int48
function float_of_int48(){};
// Provides: int56_of_int48
function int56_of_int48(){};
// Provides: int128_of_int48
function int128_of_int48(){};
// Provides: uint8_of_int48
function uint8_of_int48(){};
// Provides: uint16_of_int48
function uint16_of_int48(){};
// Provides: uint24_of_int48
function uint24_of_int48(){};
// Provides: uint32_of_int48
function uint32_of_int48(){};
// Provides: uint40_of_int48
function uint40_of_int48(){};
// Provides: uint48_of_int48
function uint48_of_int48(){};
// Provides: uint56_of_int48
function uint56_of_int48(){};
// Provides: uint64_of_int48
function uint64_of_int48(){};
// Provides: uint128_of_int48
function uint128_of_int48(){};
// Provides: int48_max_int
function int48_max_int(){};
// Provides: int48_min_int
function int48_min_int(){};
// Provides: int56_mul
function int56_mul(){};
// Provides: int56_div
function int56_div(){};
// Provides: uint56_xor
function uint56_xor(){};
// Provides: int56_shift_right
function int56_shift_right(){};
// Provides: uint56_shift_right
function uint56_shift_right(){};
// Provides: int56_of_int
function int56_of_int(){};
// Provides: int56_of_nativeint
function int56_of_nativeint(){};
// Provides: int56_of_float
function int56_of_float(){};
// Provides: int56_of_int128
function int56_of_int128(){};
// Provides: int56_of_uint8
function int56_of_uint8(){};
// Provides: int56_of_uint16
function int56_of_uint16(){};
// Provides: int56_of_uint24
function int56_of_uint24(){};
// Provides: int56_of_uint32
function int56_of_uint32(){};
// Provides: int56_of_uint40
function int56_of_uint40(){};
// Provides: int56_of_uint48
function int56_of_uint48(){};
// Provides: int56_of_uint56
function int56_of_uint56(){};
// Provides: int56_of_uint64
function int56_of_uint64(){};
// Provides: int56_of_uint128
function int56_of_uint128(){};
// Provides: int_of_int56
function int_of_int56(){};
// Provides: nativeint_of_int56
function nativeint_of_int56(){};
// Provides: float_of_int56
function float_of_int56(){};
// Provides: int128_of_int56
function int128_of_int56(){};
// Provides: uint8_of_int56
function uint8_of_int56(){};
// Provides: uint16_of_int56
function uint16_of_int56(){};
// Provides: uint24_of_int56
function uint24_of_int56(){};
// Provides: uint32_of_int56
function uint32_of_int56(){};
// Provides: uint40_of_int56
function uint40_of_int56(){};
// Provides: uint48_of_int56
function uint48_of_int56(){};
// Provides: uint56_of_int56
function uint56_of_int56(){};
// Provides: uint64_of_int56
function uint64_of_int56(){};
// Provides: uint128_of_int56
function uint128_of_int56(){};
// Provides: int56_max_int
function int56_max_int(){};
// Provides: int56_min_int
function int56_min_int(){};
// Provides: int128_add
function int128_add(){};
// Provides: int128_sub
function int128_sub(){};
// Provides: int128_mul
function int128_mul(){};
// Provides: int128_div
function int128_div(){};
// Provides: int128_mod
function int128_mod(){};
// Provides: int128_and
function int128_and(){};
// Provides: int128_or
function int128_or(){};
// Provides: int128_xor
function int128_xor(){};
// Provides: int128_shift_left
function int128_shift_left(){};
// Provides: int128_shift_right
function int128_shift_right(){};
// Provides: int128_abs
function int128_abs(){};
// Provides: int128_neg
function int128_neg(){};
// Provides: int128_of_int
function int128_of_int(){};
// Provides: int128_of_nativeint
function int128_of_nativeint(){};
// Provides: int128_of_float
function int128_of_float(){};
// Provides: int128_of_uint8
function int128_of_uint8(){};
// Provides: int128_of_uint16
function int128_of_uint16(){};
// Provides: int128_of_uint24
function int128_of_uint24(){};
// Provides: int128_of_uint32
function int128_of_uint32(){};
// Provides: int128_of_uint40
function int128_of_uint40(){};
// Provides: int128_of_uint48
function int128_of_uint48(){};
// Provides: int128_of_uint56
function int128_of_uint56(){};
// Provides: int128_of_uint64
function int128_of_uint64(){};
// Provides: int128_of_uint128
function int128_of_uint128(){};
// Provides: int_of_int128
function int_of_int128(){};
// Provides: nativeint_of_int128
function nativeint_of_int128(){};
// Provides: float_of_int128
function float_of_int128(){};
// Provides: uint8_of_int128
function uint8_of_int128(){};
// Provides: uint16_of_int128
function uint16_of_int128(){};
// Provides: uint24_of_int128
function uint24_of_int128(){};
// Provides: uint32_of_int128
function uint32_of_int128(){};
// Provides: uint40_of_int128
function uint40_of_int128(){};
// Provides: uint48_of_int128
function uint48_of_int128(){};
// Provides: uint56_of_int128
function uint56_of_int128(){};
// Provides: uint64_of_int128
function uint64_of_int128(){};
// Provides: uint128_of_int128
function uint128_of_int128(){};
// Provides: int128_max_int
function int128_max_int(){};
// Provides: int128_min_int
function int128_min_int(){};
// Provides: int128_init_custom_ops
function int128_init_custom_ops(){};
// Provides: uint8_of_nativeint
function uint8_of_nativeint(){};
// Provides: uint8_of_float
function uint8_of_float(){};
// Provides: uint8_of_uint16
function uint8_of_uint16(){};
// Provides: uint8_of_uint24
function uint8_of_uint24(){};
// Provides: uint8_of_uint32
function uint8_of_uint32(){};
// Provides: uint8_of_uint40
function uint8_of_uint40(){};
// Provides: uint8_of_uint48
function uint8_of_uint48(){};
// Provides: uint8_of_uint56
function uint8_of_uint56(){};
// Provides: uint8_of_uint64
function uint8_of_uint64(){};
// Provides: uint8_of_uint128
function uint8_of_uint128(){};
// Provides: nativeint_of_uint8
function nativeint_of_uint8(){};
// Provides: float_of_uint8
function float_of_uint8(){};
// Provides: uint16_of_uint8
function uint16_of_uint8(){};
// Provides: uint24_of_uint8
function uint24_of_uint8(){};
// Provides: uint32_of_uint8
function uint32_of_uint8(){};
// Provides: uint40_of_uint8
function uint40_of_uint8(){};
// Provides: uint48_of_uint8
function uint48_of_uint8(){};
// Provides: uint56_of_uint8
function uint56_of_uint8(){};
// Provides: uint64_of_uint8
function uint64_of_uint8(){};
// Provides: uint128_of_uint8
function uint128_of_uint8(){};
// Provides: uint16_of_nativeint
function uint16_of_nativeint(){};
// Provides: uint16_of_float
function uint16_of_float(){};
// Provides: uint16_of_uint24
function uint16_of_uint24(){};
// Provides: uint16_of_uint32
function uint16_of_uint32(){};
// Provides: uint16_of_uint40
function uint16_of_uint40(){};
// Provides: uint16_of_uint48
function uint16_of_uint48(){};
// Provides: uint16_of_uint56
function uint16_of_uint56(){};
// Provides: uint16_of_uint64
function uint16_of_uint64(){};
// Provides: uint16_of_uint128
function uint16_of_uint128(){};
// Provides: nativeint_of_uint16
function nativeint_of_uint16(){};
// Provides: float_of_uint16
function float_of_uint16(){};
// Provides: uint24_of_uint16
function uint24_of_uint16(){};
// Provides: uint32_of_uint16
function uint32_of_uint16(){};
// Provides: uint40_of_uint16
function uint40_of_uint16(){};
// Provides: uint48_of_uint16
function uint48_of_uint16(){};
// Provides: uint56_of_uint16
function uint56_of_uint16(){};
// Provides: uint64_of_uint16
function uint64_of_uint16(){};
// Provides: uint128_of_uint16
function uint128_of_uint16(){};
// Provides: uint24_of_nativeint
function uint24_of_nativeint(){};
// Provides: uint24_of_float
function uint24_of_float(){};
// Provides: uint24_of_uint32
function uint24_of_uint32(){};
// Provides: uint24_of_uint40
function uint24_of_uint40(){};
// Provides: uint24_of_uint48
function uint24_of_uint48(){};
// Provides: uint24_of_uint56
function uint24_of_uint56(){};
// Provides: uint24_of_uint64
function uint24_of_uint64(){};
// Provides: uint24_of_uint128
function uint24_of_uint128(){};
// Provides: nativeint_of_uint24
function nativeint_of_uint24(){};
// Provides: float_of_uint24
function float_of_uint24(){};
// Provides: uint32_of_uint24
function uint32_of_uint24(){};
// Provides: uint40_of_uint24
function uint40_of_uint24(){};
// Provides: uint48_of_uint24
function uint48_of_uint24(){};
// Provides: uint56_of_uint24
function uint56_of_uint24(){};
// Provides: uint64_of_uint24
function uint64_of_uint24(){};
// Provides: uint128_of_uint24
function uint128_of_uint24(){};
// Provides: uint32_add
function uint32_add(){};
// Provides: uint32_sub
function uint32_sub(){};
// Provides: uint32_mul
function uint32_mul(){};
// Provides: uint32_div
function uint32_div(){};
// Provides: uint32_mod
function uint32_mod(){};
// Provides: uint32_and
function uint32_and(){};
// Provides: uint32_or
function uint32_or(){};
// Provides: uint32_xor
function uint32_xor(){};
// Provides: uint32_shift_left
function uint32_shift_left(){};
// Provides: uint32_shift_right
function uint32_shift_right(){};
// Provides: uint32_neg
function uint32_neg(){};
// Provides: uint32_of_int
function uint32_of_int(){};
// Provides: uint32_of_nativeint
function uint32_of_nativeint(){};
// Provides: uint32_of_float
function uint32_of_float(){};
// Provides: uint32_of_uint40
function uint32_of_uint40(){};
// Provides: uint32_of_uint48
function uint32_of_uint48(){};
// Provides: uint32_of_uint56
function uint32_of_uint56(){};
// Provides: uint32_of_uint64
function uint32_of_uint64(){};
// Provides: uint32_of_uint128
function uint32_of_uint128(){};
// Provides: int_of_uint32
function int_of_uint32(){};
// Provides: nativeint_of_uint32
function nativeint_of_uint32(){};
// Provides: float_of_uint32
function float_of_uint32(){};
// Provides: uint40_of_uint32
function uint40_of_uint32(){};
// Provides: uint48_of_uint32
function uint48_of_uint32(){};
// Provides: uint56_of_uint32
function uint56_of_uint32(){};
// Provides: uint64_of_uint32
function uint64_of_uint32(){};
// Provides: uint128_of_uint32
function uint128_of_uint32(){};
// Provides: uint32_max_int
function uint32_max_int(){};
// Provides: uint32_init_custom_ops
function uint32_init_custom_ops(){};
// Provides: uint64_add
function uint64_add(){};
// Provides: uint64_sub
function uint64_sub(){};
// Provides: uint64_mul
function uint64_mul(){};
// Provides: uint64_div
function uint64_div(){};
// Provides: uint64_mod
function uint64_mod(){};
// Provides: uint64_and
function uint64_and(){};
// Provides: uint64_or
function uint64_or(){};
// Provides: uint64_xor
function uint64_xor(){};
// Provides: uint64_shift_left
function uint64_shift_left(){};
// Provides: uint64_shift_right
function uint64_shift_right(){};
// Provides: uint64_neg
function uint64_neg(){};
// Provides: uint64_of_int
function uint64_of_int(){};
// Provides: uint64_of_nativeint
function uint64_of_nativeint(){};
// Provides: uint64_of_float
function uint64_of_float(){};
// Provides: uint64_of_uint40
function uint64_of_uint40(){};
// Provides: uint64_of_uint48
function uint64_of_uint48(){};
// Provides: uint64_of_uint56
function uint64_of_uint56(){};
// Provides: uint64_of_uint128
function uint64_of_uint128(){};
// Provides: int_of_uint64
function int_of_uint64(){};
// Provides: nativeint_of_uint64
function nativeint_of_uint64(){};
// Provides: float_of_uint64
function float_of_uint64(){};
// Provides: uint40_of_uint64
function uint40_of_uint64(){};
// Provides: uint48_of_uint64
function uint48_of_uint64(){};
// Provides: uint56_of_uint64
function uint56_of_uint64(){};
// Provides: uint128_of_uint64
function uint128_of_uint64(){};
// Provides: uint64_max_int
function uint64_max_int(){};
// Provides: uint64_init_custom_ops
function uint64_init_custom_ops(){};
// Provides: uint40_mul
function uint40_mul(){};
// Provides: uint40_div
function uint40_div(){};
// Provides: uint40_neg
function uint40_neg(){};
// Provides: uint40_of_int
function uint40_of_int(){};
// Provides: uint40_of_nativeint
function uint40_of_nativeint(){};
// Provides: uint40_of_float
function uint40_of_float(){};
// Provides: uint40_of_uint48
function uint40_of_uint48(){};
// Provides: uint40_of_uint56
function uint40_of_uint56(){};
// Provides: uint40_of_uint128
function uint40_of_uint128(){};
// Provides: int_of_uint40
function int_of_uint40(){};
// Provides: nativeint_of_uint40
function nativeint_of_uint40(){};
// Provides: float_of_uint40
function float_of_uint40(){};
// Provides: uint48_of_uint40
function uint48_of_uint40(){};
// Provides: uint56_of_uint40
function uint56_of_uint40(){};
// Provides: uint128_of_uint40
function uint128_of_uint40(){};
// Provides: uint48_mul
function uint48_mul(){};
// Provides: uint48_div
function uint48_div(){};
// Provides: uint48_neg
function uint48_neg(){};
// Provides: uint48_of_int
function uint48_of_int(){};
// Provides: uint48_of_nativeint
function uint48_of_nativeint(){};
// Provides: uint48_of_float
function uint48_of_float(){};
// Provides: uint48_of_uint56
function uint48_of_uint56(){};
// Provides: uint48_of_uint128
function uint48_of_uint128(){};
// Provides: int_of_uint48
function int_of_uint48(){};
// Provides: nativeint_of_uint48
function nativeint_of_uint48(){};
// Provides: float_of_uint48
function float_of_uint48(){};
// Provides: uint56_of_uint48
function uint56_of_uint48(){};
// Provides: uint128_of_uint48
function uint128_of_uint48(){};
// Provides: uint56_mul
function uint56_mul(){};
// Provides: uint56_div
function uint56_div(){};
// Provides: uint56_neg
function uint56_neg(){};
// Provides: uint56_of_int
function uint56_of_int(){};
// Provides: uint56_of_nativeint
function uint56_of_nativeint(){};
// Provides: uint56_of_float
function uint56_of_float(){};
// Provides: uint56_of_uint128
function uint56_of_uint128(){};
// Provides: int_of_uint56
function int_of_uint56(){};
// Provides: nativeint_of_uint56
function nativeint_of_uint56(){};
// Provides: float_of_uint56
function float_of_uint56(){};
// Provides: uint128_of_uint56
function uint128_of_uint56(){};
// Provides: uint128_add
function uint128_add(){};
// Provides: uint128_sub
function uint128_sub(){};
// Provides: uint128_mul
function uint128_mul(){};
// Provides: uint128_div
function uint128_div(){};
// Provides: uint128_mod
function uint128_mod(){};
// Provides: uint128_and
function uint128_and(){};
// Provides: uint128_or
function uint128_or(){};
// Provides: uint128_xor
function uint128_xor(){};
// Provides: uint128_shift_left
function uint128_shift_left(){};
// Provides: uint128_shift_right
function uint128_shift_right(){};
// Provides: uint128_of_int
function uint128_of_int(){};
// Provides: uint128_of_nativeint
function uint128_of_nativeint(){};
// Provides: uint128_of_float
function uint128_of_float(){};
// Provides: int_of_uint128
function int_of_uint128(){};
// Provides: nativeint_of_uint128
function nativeint_of_uint128(){};
// Provides: float_of_uint128
function float_of_uint128(){};
// Provides: uint128_max_int
function uint128_max_int(){};
// Provides: uint128_init_custom_ops
function uint128_init_custom_ops(){};


================================================
FILE: engine/bin/js_stubs/unix.js
================================================
// //Provides: unix_ll
// function unix_ll(s, args) { 
//   // if (unix_ll.log) joo_global_object.console.warn(s, args); 
//   // if (unix_ll.trap) throw new Error("unix trap: '"+ s + "' not implemented");
// }
// // unix_ll.log = true;       // whether to log calls
// // unix_ll.trap = false;     // whether to halt on calls

// //Provides: caml_raise_unix_error
// //Requires: caml_named_value, caml_raise_with_arg, caml_new_string
// function caml_raise_unix_error(msg) {
//   var tag = caml_named_value("Unix.Unix_error");
//   // var util = require('util');
//   // console.log(util.inspect(chan, {showHidden: false, depth: null}));
//   caml_raise_with_arg (tag, caml_new_string (msg));
// }

// //Provides: unix_access
// //Requires: unix_ll
// function unix_access() {
//   unix_ll("unix_access", arguments);
//   return 0;
// }

// //Provides: unix_alarm
// //Requires: unix_ll
// function unix_alarm() {
//   unix_ll("unix_alarm", arguments);
//   return 0;
// }

// //Provides: unix_bind
// //Requires: unix_ll
// function unix_bind() {
//   unix_ll("unix_bind", arguments);
//   return 0;
// }

// //Provides: unix_close
// //Requires: unix_ll
// function unix_close() {
//   unix_ll("unix_close", arguments);
//   return 0;
// }

// //Provides: unix_connect
// //Requires: unix_ll
// function unix_connect() {
//   unix_ll("unix_connect", arguments);
//   return 0;
// }

// //Provides: unix_dup
// //Requires: unix_ll
// function unix_dup() {
//   unix_ll("unix_dup", arguments);
//   return 0;
// }

// //Provides: unix_dup2
// //Requires: unix_ll
// function unix_dup2() {
//   unix_ll("unix_dup2", arguments);
//   return 0;
// }

// //Provides: unix_environment
// //Requires: unix_ll
// function unix_environment() {
//   unix_ll("unix_environment", arguments);
//   return 0;
// }

// //Provides: unix_error_message
// //Requires: unix_ll
// function unix_error_message() {
//   unix_ll("unix_error_message", arguments);
//   return 0;
// }

// //Provides: unix_execve
// //Requires: unix_ll
// function unix_execve() {
//   unix_ll("unix_execve", arguments);
//   return 0;
// }

// //Provides: unix_execvp
// //Requires: unix_ll
// function unix_execvp() {
//   unix_ll("unix_execvp", arguments);
//   return 0;
// }

// //Provides: unix_execvpe
// //Requires: unix_ll
// function unix_execvpe() {
//   unix_ll("unix_execvpe", arguments);
//   return 0;
// }

// //Provides: unix_getcwd
// //Requires: unix_ll
// function unix_getcwd() {
//   unix_ll("unix_getcwd", arguments);
//   return 0;
// }

// //Provides: unix_fork
// //Requires: unix_ll
// function unix_fork() {
//   unix_ll("unix_fork", arguments);
//   return 0;
// }

// //Provides: unix_getpid
// //Requires: unix_ll
// function unix_getpid() {
//   unix_ll("unix_getpid", arguments);
//   return 0;
// }

// //Provides: unix_getpwnam
// //Requires: unix_ll
// function unix_getpwnam() {
//   unix_ll("unix_getpwnam", arguments);
//   return 0;
// }

// //Provides: unix_getsockname
// //Requires: unix_ll
// function unix_getsockname() {
//   unix_ll("unix_getsockname", arguments);
//   return 0;
// }

// //Provides: unix_kill
// //Requires: unix_ll
// function unix_kill() {
//   unix_ll("unix_kill", arguments);
//   return 0;
// }

// //Provides: unix_listen
// //Requires: unix_ll
// function unix_listen() {
//   unix_ll("unix_listen", arguments);
//   return 0;
// }

// //Provides: unix_pipe
// //Requires: unix_ll
// function unix_pipe() {
//   unix_ll("unix_pipe", arguments);
//   return 0;
// }

// //Provides: unix_read
// //Requires: unix_ll
// function unix_read() {
//   unix_ll("unix_read", arguments);
//   return 0;
// }

// //Provides: unix_opendir
// //Requires: unix_ll
// function unix_opendir(dir) {
//   unix_ll("unix_opendir", arguments);

//   // caml_raise_unix_error("opendir", arguments);
//   return [];
// }

// //Provides: unix_readdir
// //Requires: unix_ll, caml_raise_constant, caml_global_data
// function unix_readdir(dir) {
//   unix_ll("unix_readdir", arguments);

//   // caml_raise_unix_error("readdir", arguments);
//   caml_raise_constant(caml_global_data.End_of_file);
//   return [];
// }

// //Provides: unix_closedir
// //Requires: unix_ll
// function unix_closedir() {
//   unix_ll("unix_closedir", arguments);
//   return [];
// }

// //Provides: unix_select
// //Requires: unix_ll
// function unix_select() {
//   unix_ll("unix_select", arguments);
//   return 0;
// }

// //Provides: unix_set_close_on_exec
// //Requires: unix_ll
// function unix_set_close_on_exec() {
//   unix_ll("unix_set_close_on_exec", arguments);
//   return 0;
// }

// //Provides: unix_set_nonblock
// //Requires: unix_ll
// function unix_set_nonblock() {
//   unix_ll("unix_set_nonblock", arguments);
//   return 0;
// }

// //Provides: unix_sleep
// //Requires: unix_ll
// function unix_sleep() {
//   unix_ll("unix_sleep", arguments);
//   return 0;
// }

// //Provides: unix_socket
// //Requires: unix_ll
// function unix_socket() {
//   unix_ll("unix_socket", arguments);
//   return 0;
// }

// //Provides: unix_string_of_inet_addr
// //Requires: unix_ll
// function unix_string_of_inet_addr() {
//   unix_ll("unix_string_of_inet_addr", arguments);
//   return 0;
// }

// //Provides: unix_times
// //Requires: unix_ll
// function unix_times() {
//   unix_ll("unix_times", arguments);
//   return 0;
// }

// //Provides: unix_wait
// //Requires: unix_ll
// function unix_wait() {
//   unix_ll("unix_wait", arguments);
//   return 0;
// }

// //Provides: unix_waitpid
// //Requires: unix_ll
// function unix_waitpid() {
//   unix_ll("unix_waitpid", arguments);
//   return 0;
// }

// // Provides: unix_accept
// // Requires: unix_ll
// function unix_accept()                 { unix_ll("unix_accept", arguments); }
// // Provides: unix_chdir
// // Requires: unix_ll
// function unix_chdir()                  { unix_ll("unix_chdir", arguments); }
// // Provides: unix_chmod
// // Requires: unix_ll
// function unix_chmod()                  { unix_ll("unix_chmod", arguments); }
// // Provides: unix_chown
// // Requires: unix_ll
// function unix_chown()                  { unix_ll("unix_chown", arguments); }
// // Provides: unix_chroot
// // Requires: unix_ll
// function unix_chroot()                 { unix_ll("unix_chroot", arguments); }
// // Provides: unix_clear_close_on_exec
// // Requires: unix_ll
// function unix_clear_close_on_exec()    { unix_ll("unix_clear_close_on_exec", arguments); }
// // Provides: unix_clear_nonblock
// // Requires: unix_ll
// function unix_clear_nonblock()         { unix_ll("unix_clear_nonblock", arguments); }
// // Provides: unix_environment_unsafe
// // Requires: unix_ll
// function unix_environment_unsafe()     { unix_ll("unix_environment_unsafe", arguments); }
// // Provides: unix_execv
// // Requires: unix_ll
// function unix_execv()                  { unix_ll("unix_execv", arguments); }
// // Provides: unix_fchmod
// // Requires: unix_ll
// function unix_fchmod()                 { unix_ll("unix_fchmod", arguments); }
// // Provides: unix_fchown
// // Requires: unix_ll
// function unix_fchown()                 { unix_ll("unix_fchown", arguments); }
// // Provides: unix_fstat
// // Requires: unix_ll
// function unix_fstat()                 { unix_ll("unix_fstat", arguments); }
// // Provides: unix_fstat_64
// // Requires: unix_ll
// function unix_fstat_64()              { unix_ll("unix_fstat_64", arguments); }
// // Provides: unix_ftruncate
// // Requires: unix_ll
// function unix_ftruncate()             { unix_ll("unix_ftruncate", arguments); }
// // Provides: unix_ftruncate_64
// // Requires: unix_ll
// function unix_ftruncate_64()          { unix_ll("unix_ftruncate_64", arguments); }
// // Provides: unix_getaddrinfo
// // Requires: unix_ll
// function unix_getaddrinfo()           { unix_ll("unix_getaddrinfo", arguments); }
// // Provides: unix_getegid
// // Requires: unix_ll
// function unix_getegid()               { unix_ll("unix_getegid", arguments); }
// // Provides: unix_geteuid
// // Requires: unix_ll
// function unix_geteuid()               { unix_ll("unix_geteuid", arguments); }
// // Provides: unix_getgid
// // Requires: unix_ll
// function unix_getgid()                { unix_ll("unix_getgid", arguments); }
// // Provides: unix_getgrgid
// // Requires: unix_ll
// function unix_getgrgid()              { unix_ll("unix_getgrgid", arguments); }
// // Provides: unix_getgrnam
// // Requires: unix_ll
// function unix_getgrnam()              { unix_ll("unix_getgrnam", arguments); }
// // Provides: unix_getgroups
// // Requires: unix_ll
// function unix_getgroups()             { unix_ll("unix_getgroups", arguments); }
// // Provides: unix_gethostbyaddr
// // Requires: unix_ll
// function unix_gethostbyaddr()         { unix_ll("unix_gethostbyaddr", arguments); }
// // Provides: unix_gethostbyname
// // Requires: unix_ll
// function unix_gethostbyname()         { unix_ll("unix_gethostbyname", arguments); }
// // Provides: unix_gethostname
// // Requires: unix_ll
// function unix_gethostname()           { unix_ll("unix_gethostname", arguments); }
// // Provides: unix_getitimer
// // Requires: unix_ll
// function unix_getitimer()             { unix_ll("unix_getitimer", arguments); }
// // Provides: unix_getlogin
// // Requires: unix_ll
// function unix_getlogin()              { unix_ll("unix_getlogin", arguments); }
// // Provides: unix_getnameinfo
// // Requires: unix_ll
// function unix_getnameinfo()           { unix_ll("unix_getnameinfo", arguments); }
// // Provides: unix_getpeername
// // Requires: unix_ll
// function unix_getpeername()           { unix_ll("unix_getpeername", arguments); }
// // Provides: unix_getppid
// // Requires: unix_ll
// function unix_getppid()               { unix_ll("unix_getppid", arguments); }
// // Provides: unix_getprotobyname
// // Requires: unix_ll
// function unix_getprotobyname()        { unix_ll("unix_getprotobyname", arguments); }
// // Provides: unix_getprotobynumber
// // Requires: unix_ll
// function unix_getprotobynumber()      { unix_ll("unix_getprotobynumber", arguments); }
// // Provides: unix_getservbyname
// // Requires: unix_ll
// function unix_getservbyname()         { unix_ll("unix_getservbyname", arguments); }
// // Provides: unix_getservbyport
// // Requires: unix_ll
// function unix_getservbyport()         { unix_ll("unix_getservbyport", arguments); }
// // Provides: unix_getsockopt
// // Requires: unix_ll
// function unix_getsockopt()            { unix_ll("unix_getsockopt", arguments); }
// // Provides: unix_initgroups
// // Requires: unix_ll
// function unix_initgroups()            { unix_ll("unix_initgroups", arguments); }
// // Provides: unix_link
// // Requires: unix_ll
// function unix_link()                  { unix_ll("unix_link", arguments); }
// // Provides: unix_lockf
// // Requires: unix_ll
// function unix_lockf()                 { unix_ll("unix_lockf", arguments); }
// // Provides: unix_lseek
// // Requires: unix_ll
// function unix_lseek()                 { unix_ll("unix_lseek", arguments); }
// // Provides: unix_lseek_64
// // Requires: unix_ll
// function unix_lseek_64()              { unix_ll("unix_lseek_64", arguments); }
// // Provides: unix_mkfifo
// // Requires: unix_ll
// function unix_mkfifo()                { unix_ll("unix_mkfifo", arguments); }
// // Provides: unix_nice
// // Requires: unix_ll
// function unix_nice()                  { unix_ll("unix_nice", arguments); }
// // Provides: unix_open
// // Requires: unix_ll
// function unix_open()                  { unix_ll("unix_open", arguments); }
// // Provides: unix_putenv
// // Requires: unix_ll
// function unix_putenv()                { unix_ll("unix_putenv", arguments); }
// // Provides: unix_recv
// // Requires: unix_ll
// function unix_recv()                  { unix_ll("unix_recv", arguments); }
// // Provides: unix_recvfrom
// // Requires: unix_ll
// function unix_recvfrom()              { unix_ll("unix_recvfrom", arguments); }
// // Provides: unix_rename
// // Requires: unix_ll
// function unix_rename()                { unix_ll("unix_rename", arguments); }
// // Provides: unix_rewinddir
// // Requires: unix_ll
// function unix_rewinddir()             { unix_ll("unix_rewinddir", arguments); }
// // Provides: unix_send
// // Requires: unix_ll
// function unix_send()                  { unix_ll("unix_send", arguments); }
// // Provides: unix_sendto
// // Requires: unix_ll
// function unix_sendto()                { unix_ll("unix_sendto", arguments); }
// // Provides: unix_setgid
// // Requires: unix_ll
// function unix_setgid()                { unix_ll("unix_setgid", arguments); }
// // Provides: unix_setgroups
// // Requires: unix_ll
// function unix_setgroups()             { unix_ll("unix_setgroups", arguments); }
// // Provides: unix_setitimer
// // Requires: unix_ll
// function unix_setitimer()             { unix_ll("unix_setitimer", arguments); }
// // Provides: unix_setsid
// // Requires: unix_ll
// function unix_setsid()                { unix_ll("unix_setsid", arguments); }
// // Provides: unix_setsockopt
// // Requires: unix_ll
// function unix_setsockopt()            { unix_ll("unix_setsockopt", arguments); }
// // Provides: unix_setuid
// // Requires: unix_ll
// function unix_setuid()                { unix_ll("unix_setuid", arguments); }
// // Provides: unix_shutdown
// // Requires: unix_ll
// function unix_shutdown()              { unix_ll("unix_shutdown", arguments); }
// // Provides: unix_sigpending
// // Requires: unix_ll
// function unix_sigpending()            { unix_ll("unix_sigpending", arguments); }
// // Provides: unix_sigprocmask
// // Requires: unix_ll
// function unix_sigprocmask()           { unix_ll("unix_sigprocmask", arguments); }
// // Provides: unix_sigsuspend
// // Requires: unix_ll
// function unix_sigsuspend()            { unix_ll("unix_sigsuspend", arguments); }
// // Provides: unix_single_write
// // Requires: unix_ll
// function unix_single_write()          { unix_ll("unix_single_write", arguments); }
// // Provides: unix_socketpair
// // Requires: unix_ll
// function unix_socketpair()            { unix_ll("unix_socketpair", arguments); }
// // Provides: unix_tcdrain
// // Requires: unix_ll
// function unix_tcdrain()               { unix_ll("unix_tcdrain", arguments); }
// // Provides: unix_tcflow
// // Requires: unix_ll
// function unix_tcflow()                { unix_ll("unix_tcflow", arguments); }
// // Provides: unix_tcflush
// // Requires: unix_ll
// function unix_tcflush()               { unix_ll("unix_tcflush", arguments); }
// // Provides: unix_tcgetattr
// // Requires: unix_ll
// function unix_tcgetattr()             { unix_ll("unix_tcgetattr", arguments); }
// // Provides: unix_tcsendbreak
// // Requires: unix_ll
// function unix_tcsendbreak()           { unix_ll("unix_tcsendbreak", arguments); }
// // Provides: unix_tcsetattr
// // Requires: unix_ll
// function unix_tcsetattr()             { unix_ll("unix_tcsetattr", arguments); }
// // Provides: unix_truncate
// // Requires: unix_ll
// function unix_truncate()              { unix_ll("unix_truncate", arguments); }
// // Provides: unix_truncate_64
// // Requires: unix_ll
// function unix_truncate_64()           { unix_ll("unix_truncate_64", arguments); }
// // Provides: unix_umask
// // Requires: unix_ll
// function unix_umask()                 { unix_ll("unix_umask", arguments); }
// // Provides: unix_utimes
// // Requires: unix_ll
// function unix_utimes()                { unix_ll("unix_utimes", arguments); }
// // Provides: unix_write
// // Requires: unix_ll
// function unix_write()                 { unix_ll("unix_write", arguments); }
// // Provides: unix_exit
// // Requires: unix_ll
// function unix_exit()                  { unix_ll("unix_exit", arguments); }
// // Provides: unix_spawn
// // Requires: unix_ll
// function unix_spawn()                 { unix_ll("unix_spawn", arguments); }
// // Provides: unix_fsync
// // Requires: unix_ll
// function unix_fsync()                 { unix_ll("unix_fsync", arguments); }
// // Provides: unix_inchannel_of_filedescr
// // Requires: unix_ll
// function unix_inchannel_of_filedescr()  { unix_ll("unix_inchannel_of_filedescr", arguments); }
// // Provides: unix_outchannel_of_filedescr
// // Requires: unix_ll
// function unix_outchannel_of_filedescr() { unix_ll("unix_outchannel_of_filedescr", arguments); }
// // Provides: caml_mutex_try_lock
// // Requires: unix_ll
function caml_mutex_try_lock()       {  }
// Provides: caml_thread_join
function caml_thread_join()          {  }
// Provides: caml_thread_sigmask
function caml_thread_sigmask()       {  }
// Provides: caml_unix_map_file_bytecode
function caml_unix_map_file_bytecode() {  }
// Provides: caml_wait_signal
function caml_wait_signal()          {  }


================================================
FILE: engine/bin/lib.ml
================================================
open Hax_engine
open Base
open Stdio
open Utils

let setup_logs (options : Types.engine_options) =
  let level : Logs.level option =
    match options.backend.verbose with
    | 0 -> None
    | 1 -> Some Info
    | _ -> Some Debug
  in
  Logs.set_level level;
  Logs.set_reporter @@ Logs.format_reporter ()

module Deps = Dependencies.Make (Features.Rust)

module Error : Phase_utils.ERROR = Phase_utils.MakeError (struct
  let ctx = Diagnostics.Context.ThirImport
end)

module Attrs = Attr_payloads.MakeBase (Error)

let import_thir_items (include_clauses : Types.inclusion_clause list)
    (items : Types.item_for__thir_body list) : Ast.Rust.item list =
  let imported_items =
    List.map
      ~f:(fun item ->
        let ident = Concrete_ident.(of_def_id ~value:true item.owner_id) in
        let most_precise_clause =
          (* Computes the include clause that apply to `item`, if any *)
          List.filter
            ~f:(fun clause ->
              Concrete_ident.matches_namespace clause.Types.namespace ident)
            include_clauses
          |> List.last
        in
        let type_only =
          (* Shall we drop the body? *)
          Option.map
            ~f:(fun clause -> [%matches? Types.SignatureOnly] clause.kind)
            most_precise_clause
          |> Option.value ~default:false
        in
        Import_thir.import_item ~type_only item)
      items
    |> List.map ~f:snd
  in
  Logs.info (fun m -> m "Items translated");
  let items = List.concat_map ~f:fst imported_items in
  let associated_items =
    let assoc_items = Deps.uid_associated_items items in
    fun (item : Deps.AST.item) -> assoc_items item.attrs
  in
  (* Build a map from idents to error reports *)
  let ident_to_reports =
    List.concat_map
      ~f:(fun (items, reports) ->
        List.map ~f:(fun (item : Ast.Rust.item) -> (item.ident, reports)) items)
      imported_items
    |> Map.of_alist_exn (module Concrete_ident)
  in
  let items = Deps.filter_by_inclusion_clauses include_clauses items in
  let items =
    items
    @ (List.concat_map ~f:associated_items items
      |> List.filter ~f:(List.mem ~equal:[%eq: Deps.AST.item] items >> not))
  in
  let items =
    List.filter
      ~f:(fun i ->
        match Attrs.status i.attrs with Included _ -> true | _ -> false)
      items
  in
  Hax_io.write
    (ItemProcessed (List.filter_map ~f:(fun i -> Span.owner_hint i.span) items));
  (* Extract error reports for the items we actually extract *)
  let reports =
    List.concat_map
      ~f:(fun (item : Ast.Rust.item) ->
        Map.find_exn ident_to_reports item.ident)
      items
    |> List.dedup_and_sort ~compare:Diagnostics.compare
  in
  (* Report every error *)
  List.iter ~f:Diagnostics.Core.report reports;
  items

let run (options : Types.engine_options) : Types.output =
  setup_logs options;
  if options.backend.debug_engine |> Option.is_some then
    Phase_utils.DebugBindPhase.enable ();
  let run (type options_type)
      (module M : Backend.T with type BackendOptions.t = options_type)
      (backend_options : options_type) : Types.file list =
    let open M in
    Concrete_ident.ImplInfoStore.init
      (Concrete_ident_generated.impl_infos @ options.impl_infos);
    let include_clauses =
      options.backend.translation_options.include_namespaces
    in
    let input =
      match options.input with
      | Types.FullDef _ ->
          failwith "Internal error: the ocaml engine does not support FullDef"
      | Types.Legacy i -> i
    in
    let items =
      Profiling.profile ThirImport (List.length input) (fun _ ->
          import_thir_items include_clauses input)
    in
    let items =
      if options.backend.extract_type_aliases then items
      else
        List.filter
          ~f:(function { v = TyAlias _; _ } -> false | _ -> true)
          items
    in
    Logs.info (fun m ->
        m "Applying phase for backend %s"
          ([%show: Diagnostics.Backend.t] M.backend));
    let items = apply_phases backend_options items in
    let with_items = Attrs.with_items items in
    let bundles, _ =
      let module DepGraph = Dependencies.Make (InputLanguage) in
      DepGraph.recursive_bundles items
    in
    let items =
      List.filter items ~f:(fun (i : AST.item) ->
          Attrs.late_skip i.attrs |> not)
    in
    Logs.info (fun m ->
        m "Translating items with backend %s"
          ([%show: Diagnostics.Backend.t] M.backend));
    let items =
      Profiling.profile (Backend M.backend) (List.length items) (fun _ ->
          translate with_items backend_options items ~bundles)
    in
    items
  in
  let diagnostics, files =
    Diagnostics.try_ (fun () ->
        match options.backend.backend with
        | ProVerif opts -> run (module Proverif_backend) opts
        | Fstar opts -> run (module Fstar_backend) opts
        | Coq -> run (module Coq_backend) ()
        | Ssprove -> run (module Ssprove_backend) ()
        | Easycrypt -> run (module Easycrypt_backend) ()
        | GenerateRustEngineNames ->
            failwith
              "The OCaml hax engine should never be called with \
               `GenerateRustEngineNames`, it is an rust engine only internal \
               command."
        | backend ->
            failwith
              ("The OCaml hax engine should never be called with backend `"
              ^ [%show: Types.backend_for__null] backend
              ^ "`. This backend uses the newer rust engine. Please report \
                 this issue on our GitHub repository: \
                 https://github.com/cryspen/hax."))
  in
  {
    diagnostics = List.map ~f:Diagnostics.to_thir_diagnostic diagnostics;
    files = Option.value ~default:[] files;
    debug_json = [];
  }

(** Shallow parses a `id_table::Node` (or a raw `T`) JSON *)
let parse_id_table_node (json : Yojson.Safe.t) :
    (int64 * Yojson.Safe.t) list * Yojson.Safe.t =
  let expect_uint64 = function
    | `Intlit str -> Some (Int64.of_string str)
    | `Int id -> Some (Int.to_int64 id)
    | _ -> None
  in
  let table, value =
    match json with
    | `List [ table; value ] -> (table, value)
    | _ -> failwith "parse_id_table_node: expected a tuple at top-level"
  in
  let table =
    match table with
    | `List json_list -> json_list
    | _ -> failwith "parse_id_table_node: `map` is supposed to be a list"
  in
  let table =
    List.map
      ~f:(function
        | `List [ id; `Assoc [ (_, contents) ] ] ->
            let id =
              expect_uint64 id
              |> Option.value_exn
                   ~message:"parse_id_table_node: id: expected int64"
            in
            (id, contents)
        | _ -> failwith "parse_id_table_node: expected a list of size two")
      table
  in
  (table, value)

let load_table ?(check_version = true) : Yojson.Safe.t =
  let table, json =
    Hax_io.read_json () |> Option.value_exn |> parse_id_table_node
  in
  (if check_version then
     let version =
       try Yojson.Safe.Util.(member "hax_version" json |> to_string)
       with _ -> "unknown"
     in
     if String.equal version Types.hax_version |> not then (
       prerr_endline
         [%string
           {|
The versions of `hax-engine` and of `cargo-hax` are different:
  - `hax-engine` version: %{Types.hax_version}
  - `cargo-hax`  version: %{version}

Please reinstall hax.
|}];
       Stdlib.exit 1));
  table
  |> List.iter ~f:(fun (id, json) ->
         Hashtbl.add_exn Types.cache_map ~key:id ~data:(`JSON json));
  json

let parse_options () =
  let json = load_table ~check_version:true in
  let options = [%of_yojson: Types.engine_options] json in
  Profiling.enabled := options.backend.profile;
  options

let send_debug_strings =
  Phase_utils.DebugBindPhase.export
  >> List.iter ~f:(fun json -> DebugString json |> Hax_io.write)

(** Entrypoint of the engine. Assumes `Hax_io.init` was called. *)
let engine () =
  let options = Profiling.profile (Other "parse_options") 1 parse_options in
  Printexc.record_backtrace true;
  let result =
    try Ok (run options) with
    | Hax_engine.Diagnostics.SpanFreeError.Exn exn ->
        Error
          ( Failure
              ("Uncatched hax exception (please report, this should not \
                appear): "
              ^ [%show: Hax_engine.Diagnostics.SpanFreeError.t] exn),
            Printexc.get_raw_backtrace () )
    | e -> Error (e, Printexc.get_raw_backtrace ())
  in
  match result with
  | Ok results ->
      List.iter
        ~f:(fun diag -> Diagnostic diag |> Hax_io.write)
        results.diagnostics;
      List.iter ~f:(fun file -> File file |> Hax_io.write) results.files;

      send_debug_strings ();
      Hax_io.close ();

      Logs.info (fun m -> m "Exiting Hax engine (success)")
  | Error (exn, bt) ->
      Logs.info (fun m -> m "Exiting Hax engine (with an unexpected failure)");
      Printexc.raise_with_backtrace exn bt

module ExportFullAst = Export_ast.Make (Features.Full)
module ExportRustAst = Export_ast.Make (Features.Rust)
module ExportLeanAst = Export_ast.Make (Lean_backend.InputLanguage)

let driver_for_rust_engine_inner (query : Rust_engine_types.query) :
    Rust_engine_types.response =
  Profiling.enabled := query.profiling;
  if query.debug_bind_phase then Phase_utils.DebugBindPhase.enable ();
  match query.kind with
  | Types.ImportThir { input; translation_options } ->
      let imported_items =
        import_thir_items translation_options.include_namespaces input
      in
      let rust_ast_items =
        List.concat_map ~f:ExportRustAst.ditem imported_items
      in
      Rust_engine_types.ImportThir { output = rust_ast_items }
  | Types.ApplyPhases { input; phases } ->
      let items = List.concat_map ~f:Import_ast.ditem input in
      let module Phase =
        (val List.map
               ~f:(fun name ->
                 Untyped_phases.phase_of_name name |> Option.value_exn)
               phases
             |> Untyped_phases.bind_list)
      in
      let items = Phase.ditems items in
      let output = List.concat_map ~f:ExportFullAst.ditem items in
      Rust_engine_types.ApplyPhases { output }
  | Types.Print { printer = Fstar backend_options; input } ->
      let open Fstar_backend in
      let items = List.concat_map ~f:Import_ast.ditem input in

      let items : AST.item list = Stdlib.Obj.magic items in
      let items = post_process_items items in
      let with_items = Attrs.with_items items in
      let bundles, _ =
        let module DepGraph = Dependencies.Make (InputLanguage) in
        DepGraph.recursive_bundles items
      in
      let items =
        List.filter items ~f:(fun (i : AST.item) ->
            Attrs.late_skip i.attrs |> not)
      in
      Logs.info (fun m ->
          m "Translating items with backend %s"
            ([%show: Diagnostics.Backend.t] Fstar_backend.backend));
      let files =
        Profiling.profile (Backend Fstar_backend.backend) (List.length items)
          (fun _ -> translate with_items backend_options items ~bundles)
      in
      List.iter ~f:(fun file -> File file |> Hax_io.write) files;
      Rust_engine_types.PrintOk
  | Types.Print _ ->
      failwith
        "Using the Ocaml engine for Printing only is reserved to the F* backend"

(** Entry point for interacting with the Rust hax engine *)
let driver_for_rust_engine () : unit =
  let query : Rust_engine_types.query =
    let json = load_table ~check_version:false in
    [%of_yojson: Rust_engine_types.query] json
  in
  Concrete_ident.ImplInfoStore.init
    (Concrete_ident_generated.impl_infos @ query.impl_infos);
  let response = driver_for_rust_engine_inner query in
  send_debug_strings ();
  Hax_io.write_json ([%yojson_of: Rust_engine_types.response] response);
  Hax_io.write_json ([%yojson_of: Types.from_engine] Exit)


================================================
FILE: engine/bin/lib.mli
================================================
val engine : unit -> unit
val driver_for_rust_engine : unit -> unit


================================================
FILE: engine/bin/native_driver.ml
================================================
open Hax_engine
open Base

let _ =
  Hax_io.init
    (module struct
      let stdin_json_stream =
        ref (Yojson.Safe.seq_from_channel In_channel.stdin)

      let read_json () =
        match Stdlib.Seq.uncons !stdin_json_stream with
        | Some (json, stream) ->
            stdin_json_stream := stream;
            Some json
        | None -> None

      let write_json msg =
        let open Stdio.Out_channel in
        Yojson.Safe.to_channel stdout msg;
        output_char stdout '\n';
        flush stdout
    end);
  match Sys.get_argv () with
  | [| _; "driver_rust_engine" |] -> Lib.driver_for_rust_engine ()
  | _ -> Lib.engine ()


================================================
FILE: engine/default.nix
================================================
{ ocamlPackages, fetchzip, hax-rust-frontend, hax-engine-names-extract, rustc
, nodejs, jq, closurecompiler, gnused, lib, removeReferencesTo, fetchFromGitHub
}:
let
  non_empty_list = ocamlPackages.buildDunePackage rec {
    pname = "non_empty_list";
    version = "0.1";
    src = fetchzip {
      url =
        "https://github.com/johnyob/ocaml-non-empty-list/archive/refs/tags/${version}.zip";
      sha256 = "sha256-BJlEi0yG2DRT5vuU9ulucMD5MPFt9duWgcNO1YsigiA=";
    };
    buildInputs = with ocamlPackages; [ base ppxlib ppx_deriving ];
    duneVersion = "3";
    minimalOCamlVersion = "4.08";
    doCheck = false;
  };
  ppx_matches = ocamlPackages.buildDunePackage rec {
    pname = "ppx_matches";
    version = "0.1";

    src = fetchzip {
      url =
        "https://github.com/wrbs/ppx_matches/archive/refs/tags/${version}.zip";
      sha256 = "sha256-nAmWF8MgW0odKkRiFcHGsvJyIxNHaZpnOlNPsef89Fo=";
    };

    buildInputs = [ ocamlPackages.ppxlib ];
    duneVersion = "3";
    minimalOCamlVersion = "4.04";
    doCheck = false;
  };
  hax-engine = ocamlPackages.buildDunePackage {
    pname = "hax-engine";
    version = "0.0.1";
    duneVersion = "3";
    src = lib.sourceFilesBySuffices ./. [
      ".ml"
      ".mli"
      ".js"
      "dune"
      "dune-js"
      "dune-project"
      "sh"
      "rs"
      "mld"
    ];
    buildInputs = with ocamlPackages;
      [
        base
        ppx_yojson_conv
        yojson
        ppx_sexp_conv
        ppx_hash
        pprint
        non_empty_list
        ppx_deriving_yojson
        ppx_matches
        ppx_let
        ppx_enumerate
        cmdliner
        angstrom
        ppx_string
        logs
        core
        stdio
        re
        js_of_ocaml
        ocamlgraph
      ] ++
      # F* dependencies
      [ batteries menhirLib ppx_deriving ppxlib sedlex stdint ];
    nativeBuildInputs = [
      rustc
      hax-rust-frontend
      hax-engine-names-extract
      nodejs
      ocamlPackages.js_of_ocaml-compiler
      jq
      removeReferencesTo
    ];
    strictDeps = true;
    installPhase = ''
      dune install --prefix=$bin --libdir=$lib/lib/ocaml/${ocamlPackages.ocaml.version}/site-lib/
      find "$bin" -type f -exec remove-references-to -t ${ocamlPackages.ocaml} '{}' +
    '';

    outputs = [ "out" "bin" "lib" ];
    passthru = {
      docs = hax-engine.overrideAttrs (old: {
        name = "hax-engine-docs";
        nativeBuildInputs = old.nativeBuildInputs ++ [ ocamlPackages.odoc ];
        buildPhase = "dune build @doc";
        installPhase = "cp -rf _build/default/_doc/_html $out";
        outputs = [ "out" ];
      });
      js = hax-engine.overrideAttrs (old: {
        name = "hax-engine.js";
        nativeBuildInputs = old.nativeBuildInputs ++ [ closurecompiler gnused ];
        outputs = [ "out" ];
        buildPhase = ''
          # Enable JS build
          sed -i "s/; (include dune-js)/(include dune-js)/g" bin/dune
          # Compile JS target
          dune build bin/js_driver.bc.js
          # Optimize the size of the JS file
          closure-compiler --js _build/default/bin/js_driver.bc.js --js_output_file hax-engine.js
          # Add a shebang & make executable
          sed -i '1 i #!/usr/bin/env node' hax-engine.js
          chmod +x hax-engine.js
        '';
        checkPhase = "true";
        installPhase = "cp hax-engine.js $out";
      });
    };
  };
in hax-engine.overrideAttrs (_: { name = "hax-engine"; })


================================================
FILE: engine/doc/dune
================================================
(documentation
 (package hax-engine)
 (mld_files index))


================================================
FILE: engine/doc/index.mld
================================================
{0 Hax Engine}

The engine of hax is written in OCaml, and has the following structure:
{ul {- the {!module-Hax_engine} library (located in `/engine/lib`)}
    {- the {!module-Native_driver} binary (located in `/engine/bin`)}
    {- the backends (located in `/engine/backends`):
       {ul {- {!module-Fstar_backend}}
           {- {!module-Coq_ast}}
           {- {!module-Easycrypt_ast}}
       }
    }
    {- utilities and PPXs:
       {ul {- {!module-Hacspeclib_macro_parser}}
           {- {!module-Ppx_functor_application}}
           {- {!module-Ppx_generate_features}}
           {- {!module-Ppx_inline}}
        }
    }
}



================================================
FILE: engine/dune-project
================================================
(lang dune 3.0)

(name hax-engine)

(version 0.3.6)

(generate_opam_files true)

(source
 (github hacspec/hax))

(authors "Hax Authors")

(maintainers "Hax Authors")

(license "Apache-2.0")

(documentation https://hacspec.org/hax/)

(package
 (name hax-engine)
 (synopsis "The engine of hax, a Rust verification tool")
 (description "Hax is divided in two: a frontend (written in Rust) and an engine (written in OCaml). This is the engine.")
 (depends
        ocaml
        dune
        (base (>= "0.16.2"))
        core
        yojson
        non_empty_list
        pprint
        ppx_deriving_yojson
        ppx_yojson_conv
        ppx_sexp_conv
        ppx_compare
        ppx_hash
        ppx_deriving
        cmdliner
        angstrom
        re
        ppx_matches
        ppx_string
        logs
        (ocamlgraph (>= "2.2.0"))

        js_of_ocaml-compiler
        js_of_ocaml
        js_of_ocaml-ppx
        
        ; F*-specific dependencies
        batteries
        stdint
        ppxlib
        menhirLib
        sedlex
        )
 (tags
  (topics rust verification)))



================================================
FILE: engine/hax-engine.opam
================================================
# This file is generated by dune, edit dune-project instead
opam-version: "2.0"
version: "0.3.6"
synopsis: "The engine of hax, a Rust verification tool"
description:
  "Hax is divided in two: a frontend (written in Rust) and an engine (written in OCaml). This is the engine."
maintainer: ["Hax Authors"]
authors: ["Hax Authors"]
license: "Apache-2.0"
tags: ["topics" "rust" "verification"]
homepage: "https://github.com/hacspec/hax"
doc: "https://hacspec.org/hax/"
bug-reports: "https://github.com/hacspec/hax/issues"
depends: [
  "ocaml"
  "dune" {>= "3.0"}
  "base" {>= "0.16.2"}
  "core"
  "yojson"
  "non_empty_list"
  "pprint"
  "ppx_deriving_yojson"
  "ppx_yojson_conv"
  "ppx_sexp_conv"
  "ppx_compare"
  "ppx_hash"
  "ppx_deriving"
  "cmdliner"
  "angstrom"
  "re"
  "ppx_matches"
  "ppx_string"
  "logs"
  "ocamlgraph" {>= "2.2.0"}
  "js_of_ocaml-compiler"
  "js_of_ocaml"
  "js_of_ocaml-ppx"
  "batteries"
  "stdint"
  "ppxlib"
  "menhirLib"
  "sedlex"
  "odoc" {with-doc}
]
build: [
  ["dune" "subst"] {dev}
  [
    "dune"
    "build"
    "-p"
    name
    "-j"
    jobs
    "@install"
    "@runtest" {with-test}
    "@doc" {with-doc}
  ]
]
dev-repo: "git+https://github.com/hacspec/hax.git"
depexts: [
  ["nodejs"] {}
]


================================================
FILE: engine/hax-engine.opam.template
================================================
depexts: [
  ["nodejs"] {}
]


================================================
FILE: engine/lib/analyses/function_dependency.ml
================================================
open! Prelude

module%inlined_contents Make (F : Features.T) = struct
  module FA = F
  module A = Ast.Make (F)
  module U = Ast_utils.Make (F)
  open Ast

  (* TODO: Swap to Concrete_ident see: https://github.com/hacspec/hax/issues/375 *)
  type analysis_data = concrete_ident list Map.M(String).t
  type id_order = int

  let analyse (items : A.item list) : analysis_data =
    let temp_list = List.concat_map ~f:U.functions_of_item items in
    List.fold_left
      ~init:(Map.empty (module String))
      ~f:(fun y (name, body) ->
        Map.set y
          ~key:([%show: Concrete_ident.View.t] (Concrete_ident.to_view name))
          ~data:
            (Set.to_list
               (U.Reducers.collect_concrete_idents#visit_expr () body)))
      temp_list
end


================================================
FILE: engine/lib/analyses/mutable_variables.ml
================================================
open! Prelude

module%inlined_contents Make (F : Features.T) = struct
  module FA = F
  module A = Ast.Make (F)
  module U = Ast_utils.Make (F)
  module Visitors = Ast_visitors.Make (F)
  open Ast

  type id_order = int

  (* TODO: Swap to Concrete_ident see: https://github.com/hacspec/hax/issues/375 *)
  type pre_data = concrete_ident list Map.M(String).t

  (* TODO: Swap to Concrete_ident see: https://github.com/hacspec/hax/issues/375 *)
  type analysis_data =
    (Local_ident.t list * (U.TypedLocalIdent.t * id_order) list)
    (* external mut_vars and new variables (e.g. needs def / local) *)
    Map.M(String).t

  let id_to_string = Concrete_ident.to_view >> [%show: Concrete_ident.View.t]

  module LocalIdentOrData (Ty : sig
    type ty [@@deriving compare, sexp]
  end) =
  struct
    module W = struct
      module T = struct
        type t = Data of Ty.ty | Identifier of Local_ident.t
        [@@deriving compare, sexp]
      end

      include T
      module C = Base.Comparator.Make (T)
      include C
    end

    include W
    include Set.M (W)

    class type ['s] monoid = object
      method zero : 's
      method plus : 's -> 's -> 's
    end

    class ['s, 't] prod_monoid (fst : 's monoid)
      (snd : 't monoid) (* : ['s * 't] monoid *) =
      object
        method fst = fst
        method snd = snd
        method zero : 's * 't = (fst#zero, snd#zero)

        method plus : 's * 't -> 's * 't -> 's * 't =
          fst#plus *** snd#plus >> uncurry ( *** )
      end

    class set_monoid : [(T.t, W.comparator_witness) Set.t] monoid =
      object
        method zero = Set.empty (module W)
        method plus = Set.union
      end

    class ['a] map_monoid :
      [(Local_ident.t, 'a list, Local_ident.comparator_witness) Map.t] monoid =
      object
        method zero = Map.empty (module Local_ident)

        method plus =
          let combine ~key:_ = ( @ ) in
          Map.merge_skewed ~combine
      end

    let analyse_expr (data : analysis_data)
        (env : W.t list Map.M(Local_ident).t) (expr : A.expr) :
        W.t list * W.t list Map.M(Local_ident).t =
      let mut_var_set, new_env =
        (object
           inherit [_] Visitors.reduce as super

           inherit
             [_, _] prod_monoid
               (object
                  inherit set_monoid
               end)
               (object
                  inherit [_] map_monoid
               end) as m

           (* method! visit_PBinding env mut _ var _typ subpat = *)
           (*   m#plus *)
           (*     (m#plus *)
           (*        (match mut with *)
           (*         | Mutable _ -> *)
           (*           (Set.empty (module W), Map.singleton (module LocalIdent) var ([Identifier var])) *)
           (*         | _ -> m#zero) *)
           (*        (Option.value_map subpat ~default:m#zero *)
           (*           ~f:(fst >> super#visit_pat env))) *)
           (*     (Option.value_map (Map.find env var) ~default:m#zero ~f:(fun x -> (Set.of_list (module W) x, Map.empty (module LocalIdent)))) *)

           method! visit_expr' env e =
             match e with
             | Let { lhs = pat; rhs = expr; body; _ } ->
                 let new_set, new_env = super#visit_expr env expr in
                 m#plus
                   (super#visit_expr
                      (m#snd#plus (m#snd#plus env new_env)
                         (Map.of_alist_exn
                            (module Local_ident)
                            (List.map
                               ~f:(fun v -> (v, Set.to_list new_set))
                               (Set.to_list (U.Reducers.variables_of_pat pat)))))
                      body)
                   (new_set, m#snd#zero)
             | _ -> super#visit_expr' env e

           method! visit_local_ident (env : W.t list Map.M(Local_ident).t) ident
               =
             Option.value_map (Map.find env ident) ~default:m#zero ~f:(fun x ->
                 (Set.of_list (module W) x, m#snd#zero))

           (* NO-OP? *)
           method! visit_global_ident (env : W.t list Map.M(Local_ident).t)
               (x : Global_ident.t) =
             match x with
             | `Concrete cid ->
                 Option.value_map ~default:m#zero
                   ~f:(fun (x, _) ->
                     ( Set.of_list
                         (module W)
                         (List.map ~f:(fun x -> W.Identifier x) x),
                       m#snd#zero ))
                   (Map.find data (id_to_string cid))
             | _ -> super#visit_global_ident env x

           method! visit_concrete_ident (_env : W.t list Map.M(Local_ident).t)
               (cid : Concrete_ident.t) =
             Option.value_map ~default:m#zero
               ~f:(fun (x, _) ->
                 ( Set.of_list
                     (module W)
                     (List.map ~f:(fun x -> W.Identifier x) x),
                   m#snd#zero ))
               (Map.find data (id_to_string cid))
        end)
          #visit_expr
          env expr
      in
      (Set.to_list mut_var_set, new_env)
  end

  let rec analyse (func_dep : pre_data) (items : A.item list) : analysis_data =
    let (mut_var_list, _) :
        (concrete_ident * (U.TypedLocalIdent.t * id_order) list) list * _ =
      List.fold_left ~init:([], 0)
        ~f:(fun (y, count) (name, body) ->
          let items, count = analyse_function_body body count in
          (y @ [ (name, items) ], count))
        (List.concat_map ~f:U.functions_of_item items)
    in
    let mut_map (* Concrete_ident *) :
        (Local_ident.t list * (U.TypedLocalIdent.t * id_order) list)
        Map.M(String).t =
      List.fold_left
        ~init:(Map.empty (module String (* Concrete_ident *)))
        ~f:(fun y (x_name, x_items) ->
          Map.set y ~key:(id_to_string x_name)
            ~data:
              ( List.map ~f:(fst >> fst) x_items
                @ Option.value_map ~default:[]
                    ~f:
                      (List.filter_map ~f:(id_to_string >> Map.find y)
                      >> List.concat_map ~f:fst)
                    (Map.find func_dep (id_to_string x_name)),
                x_items ))
        mut_var_list
    in
    mut_map

  and analyse_function_body (x : A.expr) (i : id_order) :
      (U.TypedLocalIdent.t * id_order) list * id_order =
    let mut_var_list =
      Set.to_list
        ((object (self)
            inherit [_] Visitors.reduce as super
            inherit [_] U.Sets.TypedLocalIdent.monoid as m

            method! visit_pat' () pat' =
              match pat' with
              | PBinding { mut; var; typ; subpat; _ } ->
                  m#plus
                    (match mut with
                    | Mutable _ ->
                        Set.singleton (module U.TypedLocalIdent) (var, typ)
                    | Immutable -> Set.empty (module U.TypedLocalIdent))
                    (Option.value_map subpat ~default:m#zero
                       ~f:(fst >> self#visit_pat ()))
              | _ -> super#visit_pat' () pat'
         end)
           #visit_expr
           () x)
    in
    number_list mut_var_list i

  (* State monad *)
  and number_list (l : 'a list) (i : int) : ('a * int) list * int =
    List.fold_left ~init:([], i) ~f:(fun (y, i) x -> (y @ [ (x, i) ], i + 1)) l
end


================================================
FILE: engine/lib/analyses.ml
================================================
module Function_dependency = Function_dependency.Make
module Mutable_variables = Mutable_variables.Make


================================================
FILE: engine/lib/ast.ml
================================================
open! Prelude

type todo = string [@@deriving show, yojson, hash, compare, sexp, hash, eq]
type span = Span.t [@@deriving show, yojson, hash, compare, sexp, hash, eq]

type concrete_ident = Concrete_ident.t
[@@deriving show, yojson, hash, compare, sexp, hash, eq]

type logical_op = And | Or

and primitive_ident = Deref | Cast | LogicalOp of logical_op
[@@deriving show, yojson, hash, compare, sexp, hash, eq]

module Global_ident = struct
  module T = struct
    type t =
      [ `Concrete of concrete_ident
      | `Primitive of primitive_ident
      | `TupleType of int
      | `TupleCons of int
      | `TupleField of int * int
      | `Projector of [ `Concrete of concrete_ident | `TupleField of int * int ]
      ]
    [@@deriving show, yojson, hash, compare, sexp, hash, eq]
  end

  module M = struct
    include Base.Comparator.Make (T)
    include T
  end

  include M
  module Map = Map.M (M)

  let of_name ~value n = `Concrete (Concrete_ident.of_name ~value n)

  let eq_name name (x : t) : bool =
    match x with `Concrete x -> Concrete_ident.eq_name name x | _ -> false

  let to_string : t -> string = [%show: t]
end

type global_ident = Global_ident.t
[@@deriving show, yojson, hash, compare, sexp, hash, eq]

type attr_kind =
  | Tool of { path : string; tokens : string }
  | DocComment of { kind : doc_comment_kind; body : string }

and attr = { kind : attr_kind; span : span }
and doc_comment_kind = DCKLine | DCKBlock
and attrs = attr list [@@deriving show, yojson, hash, compare, sexp, hash, eq]

type local_ident = Local_ident.t
[@@deriving show, yojson, hash, compare, sexp, hash, eq]

type size = S8 | S16 | S32 | S64 | S128 | SSize
[@@deriving show, yojson, hash, compare, sexp, hash, eq]

let int_of_size = function
  | S8 -> Some 8
  | S16 -> Some 16
  | S32 -> Some 32
  | S64 -> Some 64
  | S128 -> Some 128
  | _ -> None

let string_of_size = int_of_size >> Option.map ~f:Int.to_string

type signedness = Signed | Unsigned
[@@deriving show, yojson, hash, compare, sexp, hash, eq]

type int_kind = { size : size; signedness : signedness }
[@@deriving show, yojson, hash, compare, sexp, hash, eq]

let show_int_kind { size; signedness } =
  (match signedness with Signed -> "i" | Unsigned -> "u")
  ^ (int_of_size size
    |> Option.map ~f:Int.to_string
    |> Option.value ~default:"size")

type float_kind = F16 | F32 | F64 | F128
[@@deriving show, yojson, hash, compare, sexp, hash, eq]

let show_float_kind = function
  | F16 -> "f16"
  | F32 -> "f32"
  | F64 -> "f64"
  | F128 -> "f128"

type literal =
  | String of string
  | Char of char
  | Int of { value : string; negative : bool; kind : int_kind }
  | Float of { value : string; negative : bool; kind : float_kind }
  | Bool of bool
[@@deriving show, yojson, hash, compare, sexp, hash, eq]

type 'mut_witness mutability = Mutable of 'mut_witness | Immutable
[@@deriving show, yojson, hash, compare, sexp, hash, eq]

type item_kind =
  [ `Fn
  | `TyAlias
  | `Type
  | `IMacroInvokation
  | `Trait
  | `Impl
  | `Alias
  | `Use
  | `Quote
  | `HaxError
  | `NotImplementedYet ]
[@@deriving show, yojson, hash, compare, sexp, hash, eq]
(** Describes the (shallow) kind of an item. *)

type item_quote_origin_position = [ `Before | `After | `Replace ]
[@@deriving show, yojson, hash, compare, sexp, hash, eq]

type item_quote_origin = {
  item_kind : item_kind;
  item_ident : concrete_ident;
  position : item_quote_origin_position;
}
[@@deriving show, yojson, hash, compare, sexp, hash, eq]
(** From where does a quote item comes from? *)

module Make =
functor
  (F : Features.T)
  ->
  struct
    type safety_kind = Safe | Unsafe of F.unsafe
    [@@deriving show, yojson, hash, compare, sexp, hash, eq]

    type borrow_kind = Shared | Unique | Mut of F.mutable_reference
    [@@deriving show, yojson, hash, compare, sexp, hash, eq]

    type binding_mode = ByValue | ByRef of (borrow_kind * F.reference)
    [@@deriving show, yojson, hash, compare, sexp, hash, eq]

    type ty =
      | TBool
      | TChar
      | TInt of int_kind
      | TFloat of float_kind
      | TStr
      | TApp of { ident : global_ident; args : generic_value list }
      | TArray of { typ : ty; length : expr }
      | TSlice of { witness : F.slice; ty : ty }
      | TRawPointer of { witness : F.raw_pointer } (* todo *)
      | TRef of {
          witness : F.reference;
          region : todo;
          typ : ty;
          mut : F.mutable_reference mutability;
        }
      | TParam of local_ident
      | TArrow of ty list * ty
      | TAssociatedType of { impl : impl_expr; item : concrete_ident }
      | TOpaque of concrete_ident
      | TDyn of { witness : F.dyn; goals : dyn_trait_goal list }

    and generic_value =
      | GLifetime of { lt : todo; witness : F.lifetime }
      | GType of ty
      | GConst of expr

    and impl_expr = { kind : impl_expr_kind; goal : trait_goal }

    and impl_expr_kind =
      | Self
      | Concrete of trait_goal
      | LocalBound of { id : string }
      | Parent of { impl : impl_expr; ident : impl_ident }
      | Projection of {
          impl : impl_expr;
          item : concrete_ident;
          ident : impl_ident;
        }
      | ImplApp of { impl : impl_expr; args : impl_expr list }
      | Dyn
      | Builtin of trait_goal

    and trait_goal = { trait : concrete_ident; args : generic_value list }
    (** A fully applied trait: [Foo] (or `SomeTy: Foo`). An `impl_expr` "inhabits" a `trait_goal`. *)

    and dyn_trait_goal = {
      trait : concrete_ident;
      non_self_args : generic_value list;
    }
    (** A dyn trait: [Foo<_, T0, ..., Tn>]. The generic arguments are known but
        the actual type implementing the trait is known only dynamically. *)

    and impl_ident = { goal : trait_goal; name : string }
    (** An impl identifier [{goal; name}] can be:
        - An in-scope variable [name] that inhabits [goal].
        - A field of some other impl expression [i]: [i.name] inhabits [goal].
          This corresponds to parent bounds or associated type bounds.
        - An argument that introduces a variable [name] that inhabits [goal]. *)

    and projection_predicate = {
      impl : impl_expr;
      assoc_item : concrete_ident;
      typ : ty;
    }
    (** Expresses a constraints over an associated type. For instance:
        [ fn f>(...) ^^^^^^^^^^ ] (provided the trait `Foo`
        has an associated type `S`). *)

    (* TODO: ADD SPAN! *)
    and pat' =
      | PWild
      | PAscription of { typ : ty; typ_span : span; pat : pat }
      | PConstruct of {
          constructor : global_ident;
          is_record : bool; (* are fields named? *)
          is_struct : bool; (* a struct has one constructor *)
          fields : field_pat list;
        }
      (* An or-pattern, e.g. `p | q`.
         Invariant: `List.length subpats >= 2`. *)
      | POr of { subpats : pat list }
      | PArray of { args : pat list }
      | PDeref of { subpat : pat; witness : F.reference }
      | PConstant of { lit : literal }
      | PBinding of {
          mut : F.mutable_variable mutability;
          mode : binding_mode;
          var : local_ident;
          typ : ty;
          subpat : (pat * F.as_pattern) option;
        }

    and pat = { p : pat'; span : span; typ : ty }
    and field_pat = { field : global_ident; pat : pat }

    (* This marker describes what control flow is present in a loop.
       It is added by phase `DropReturnBreakContinue` and the
       information is used in `FunctionalizeLoops`. We need it because
       we replace the control flow nodes of the AST by some encoding
       in the `ControlFlow` enum. *)
    and cf_kind = BreakOnly | BreakOrReturn

    and expr' =
      (* pure fragment *)
      | If of { cond : expr; then_ : expr; else_ : expr option }
      | App of {
          f : expr;
          args : expr list (* ; f_span: span *);
          generic_args : generic_value list;
          bounds_impls : impl_expr list;
          trait : (impl_expr * generic_value list) option;
        }
      | Literal of literal
      | Array of expr list
      | Construct of {
          constructor : global_ident;
          is_record : bool; (* are fields named? *)
          is_struct : bool; (* a struct has one constructor *)
          fields : (global_ident * expr) list;
          base : (expr * F.construct_base) option;
        }
      | Match of { scrutinee : expr; arms : arm list }
      | Let of {
          monadic : (supported_monads * F.monadic_binding) option;
          lhs : pat;
          rhs : expr;
          body : expr;
        }
      | Block of { e : expr; safety_mode : safety_kind; witness : F.block }
        (* Corresponds to `{e}`: this is important for places *)
      | LocalVar of local_ident
      | GlobalVar of global_ident
      | Ascription of { e : expr; typ : ty }
      (* Macro *)
      | MacroInvokation of {
          macro : global_ident;
          args : string;
          witness : F.macro;
        }
      (* Mut *)
      | Assign of { lhs : lhs; e : expr; witness : F.mutable_variable }
      (* Loop *)
      | Loop of {
          body : expr;
          kind : loop_kind;
          state : loop_state option;
          control_flow : (cf_kind * F.fold_like_loop) option;
          label : string option;
          witness : F.loop;
        }
      (* ControlFlow *)
      | Break of {
          e : expr;
          acc : (expr * F.state_passing_loop) option;
          label : string option;
          witness : F.break * F.loop;
        }
      | Return of { e : expr; witness : F.early_exit }
      | QuestionMark of { e : expr; return_typ : ty; witness : F.question_mark }
          (** The expression `e?`. In opposition to Rust, no implicit coercion
              is applied on the (potential) error payload of `e`. Coercion
              should be made explicit within `e`. *)
      | Continue of {
          acc : (expr * F.state_passing_loop) option;
          label : string option;
          witness : F.continue * F.loop;
        }
      (* Mem *)
      | Borrow of { kind : borrow_kind; e : expr; witness : F.reference }
      (* Raw borrow *)
      | AddressOf of {
          mut : F.mutable_pointer mutability;
          e : expr;
          witness : F.raw_pointer;
        }
      | Closure of { params : pat list; body : expr; captures : expr list }
      | EffectAction of { action : F.monadic_action; argument : expr }
      | Quote of quote
          (** A quotation is an inlined piece of backend code interleaved with
              Rust code *)

    and expr = { e : expr'; span : span; typ : ty }
    and quote = { contents : quote_content list; witness : F.quote }

    and quote_content =
      | Verbatim of string
      | Expr of expr
      | Pattern of pat
      | Typ of ty

    and supported_monads =
      | MException of ty
          (** a exception monad, which we use to handle early returns *)
      | MResult of ty  (** the [Result] monad *)
      | MOption  (** the [Option] monad *)

    and loop_kind =
      | UnconditionalLoop
      | WhileLoop of { condition : expr; witness : F.while_loop }
      | ForLoop of { pat : pat; it : expr; witness : F.for_loop }
      | ForIndexLoop of {
          start : expr;
          end_ : expr;
          var : local_ident;
          var_typ : ty;
          witness : F.for_index_loop;
        }

    and loop_state = { init : expr; bpat : pat; witness : F.state_passing_loop }

    (* | WhileLoop of { *)
    (*     condition: expr; *)
    (*     witness : F.while_loop; *)
    (*   } *)

    (* TODO: LHS should be places or "compositions" of places, see [assignee expression] in https://doc.rust-lang.org/reference/expressions.html#place-expressions-and-value-expressions (issue #222) *)
    and lhs =
      | LhsLocalVar of { var : Local_ident.t; typ : ty }
      | LhsVecRef of { e : lhs; typ : ty; witness : F.nontrivial_lhs }
      | LhsArbitraryExpr of { e : expr; witness : F.arbitrary_lhs }
      | LhsFieldAccessor of {
          e : lhs;
          typ : ty;
          (* TODO: change type (see #316) *)
          field : global_ident;
          witness : F.nontrivial_lhs;
        }
      | LhsArrayAccessor of {
          e : lhs;
          typ : ty;
          index : expr;
          witness : F.nontrivial_lhs;
        }

    (* A guard is a condition on a pattern like: *)
    (* match x {.. if guard => .., ..}*)
    and guard = { guard : guard'; span : span }

    (* Only if-let guards are supported for now but other variants like regular if *)
    (* could be added later (regular if guards are for now desugared as IfLet) *)
    and guard' = IfLet of { lhs : pat; rhs : expr; witness : F.match_guard }

    (* OCaml + visitors is not happy with `pat`... hence `arm_pat`... *)
    and arm' = { arm_pat : pat; body : expr; guard : guard option }

    and arm = { arm : arm'; span : span }
    [@@deriving show, yojson, hash, compare, sexp, hash, eq]

    type generic_param = {
      ident : local_ident;
      span : span;
      attrs : attrs;
      kind : generic_param_kind;
    }

    and generic_param_kind =
      | GPLifetime of { witness : F.lifetime }
      | GPType
      | GPConst of { typ : ty }

    and generic_constraint =
      | GCLifetime of todo * F.lifetime
      | GCType of impl_ident
      | GCProjection of projection_predicate
          (** Trait or lifetime constraints. For instance, `A` and `B` in `fn
              f()`. *)
    [@@deriving show, yojson, hash, compare, sexp, hash, eq]

    type param = { pat : pat; typ : ty; typ_span : span option; attrs : attrs }

    and generics = {
      params : generic_param list;
      constraints : generic_constraint list;
    }

    and variant = {
      name : concrete_ident;
      arguments : (concrete_ident * ty * attrs) list;
      is_record : bool;
      attrs : attrs;
    }

    and item' =
      (* Todo: topological sort, rec bundles *)
      | Fn of {
          name : concrete_ident;
          generics : generics;
          body : expr;
          params : param list;
          safety : safety_kind;
        }
      | TyAlias of { name : concrete_ident; generics : generics; ty : ty }
      | Type of {
          name : concrete_ident;
          generics : generics;
          variants : variant list;
          is_struct : bool;
        }
      | IMacroInvokation of {
          macro : concrete_ident;
          argument : string;
          span : span;
          witness : F.macro;
        }
      | Trait of {
          name : concrete_ident;
          generics : generics;
          items : trait_item list;
          safety : safety_kind;
        }
      | Impl of {
          generics : generics;
          self_ty : ty;
          of_trait : concrete_ident * generic_value list;
          items : impl_item list;
          parent_bounds : (impl_expr * impl_ident) list;
          safety : safety_kind;
        }
      | Alias of { name : concrete_ident; item : concrete_ident }
          (** `Alias {name; item}` is basically a `use
               as _;` where `name` is the renamed ident. *)
      | Use of {
          path : string list;
          is_external : bool;
          rename : string option;
        }
      | Quote of { quote : quote; origin : item_quote_origin }
      | HaxError of string
      | NotImplementedYet

    and item = { v : item'; span : span; ident : concrete_ident; attrs : attrs }

    and impl_item' =
      | IIType of { typ : ty; parent_bounds : (impl_expr * impl_ident) list }
      | IIFn of { body : expr; params : param list }

    and impl_item = {
      ii_span : span;
      ii_generics : generics;
      ii_v : impl_item';
      ii_ident : concrete_ident;
      ii_attrs : attrs;
    }

    and trait_item' =
      | TIType of impl_ident list
      | TIFn of ty
      | TIDefault of {
          params : param list;
          body : expr;
          witness : F.trait_item_default;
        }

    and trait_item = {
      (* TODO: why do I need to prefix by `ti_` here? I guess visitors fail or something *)
      ti_span : span;
      ti_generics : generics;
      ti_v : trait_item';
      ti_ident : concrete_ident;
      ti_attrs : attrs;
    }
    [@@deriving show, yojson, hash, compare, sexp, hash, eq]

    type modul = item list

    let make_hax_error_item (span : span) (ident : Concrete_ident.t)
        (s : string) : item =
      { v = HaxError s; span; ident; attrs = [] }

    (* module F = F *)
  end

module type T = sig
  type expr [@@deriving show, compare, yojson]
  type item' [@@deriving show, compare, yojson]

  type item = {
    v : item';
    span : span;
    ident : Concrete_ident.t;
    attrs : attrs;
  }
  [@@deriving show, compare, yojson]

  val make_hax_error_item : span -> Concrete_ident.t -> string -> item
end

module Rust = Make (Features.Rust)
module Full = Make (Features.Full)


================================================
FILE: engine/lib/ast_builder.ml
================================================
open! Prelude
open! Ast

module Make (F : Features.T) = struct
  module AST = Ast.Make (F)
  open AST

  open struct
    module Gen = Ast_builder_generated.Make (F)
  end

  module type SPAN = Gen.SPAN

  include Gen.Explicit

  module NoSpan = struct
    let ty_tuple (types : ty list) : ty =
      let ident = `TupleType (List.length types) in
      let args = List.map ~f:(fun typ -> GType typ) types in
      TApp { ident; args }

    let ty_tuple_or_id : ty list -> ty = function
      | [ ty ] -> ty
      | types -> ty_tuple types

    (** This gives the type of a value in the `ControlFlow` enum *)
    let ty_cf ~(continue_type : ty) ~(break_type : ty) : ty =
      TApp
        {
          ident =
            Global_ident.of_name ~value:false
              Core__ops__control_flow__ControlFlow;
          args = [ GType break_type; GType continue_type ];
        }

    (** This gives the type of a value encoded in the `ControlFlow` enum. In
        case a `return_type` is provided the encoding is nested: `return v` is
        `Break (Break v)` `break v` is `Break (Continue (v, acc))` *)
    let ty_cf_return ~(acc_type : ty) ~(break_type : ty)
        ~(return_type : ty option) : ty =
      let break_type = ty_tuple [ break_type; acc_type ] in
      match return_type with
      | Some ret_ty ->
          let break_type = ty_cf ~break_type:ret_ty ~continue_type:break_type in
          ty_cf ~break_type ~continue_type:acc_type
      | None -> ty_cf ~break_type ~continue_type:acc_type
  end

  include NoSpan

  module Explicit = struct
    let ty_unit : ty = TApp { ident = `TupleType 0; args = [] }
    let expr_unit = expr_GlobalVar (`TupleCons 0) ~typ:ty_unit

    let expr_tuple ~(span : span) (tuple : expr list) =
      let len = List.length tuple in
      let fields = List.mapi ~f:(fun i x -> (`TupleField (i, len), x)) tuple in
      let typ = NoSpan.ty_tuple @@ List.map ~f:(fun { typ; _ } -> typ) tuple in
      expr_Construct ~span ~typ ~constructor:(`TupleCons len) ~is_record:false
        ~is_struct:true ~fields ~base:None

    let pat_PBinding ~typ = pat_PBinding ~inner_typ:typ ~typ

    let arm ~span arm_pat ?(guard = None) body =
      { arm = { arm_pat; body; guard }; span }

    let pat_Constructor_CF ~(span : span) ~(typ : ty)
        (cf : [ `Break | `Continue ]) (pat : pat) =
      match cf with
      | `Break ->
          {
            p =
              PConstruct
                {
                  constructor =
                    Global_ident.of_name ~value:true
                      Core__ops__control_flow__ControlFlow__Break;
                  fields =
                    [
                      {
                        field =
                          Global_ident.of_name ~value:true
                            Core__ops__control_flow__ControlFlow__Break__0;
                        pat;
                      };
                    ];
                  is_record = false;
                  is_struct = false;
                };
            typ;
            span;
          }
      | `Continue ->
          {
            p =
              PConstruct
                {
                  constructor =
                    Global_ident.of_name ~value:true
                      Core__ops__control_flow__ControlFlow__Continue;
                  fields =
                    [
                      {
                        field =
                          Global_ident.of_name ~value:true
                            Core__ops__control_flow__ControlFlow__Continue__0;
                        pat;
                      };
                    ];
                  is_record = false;
                  is_struct = false;
                };
            typ;
            span;
          }

    let call_Constructor' (constructor : global_ident) is_struct
        (args : expr list) span ret_typ =
      let mk_field =
        let len = List.length args in
        fun n -> `TupleField (len, n)
      in
      let fields = List.mapi ~f:(fun i arg -> (mk_field i, arg)) args in
      {
        e =
          Construct
            { constructor; is_record = false; is_struct; fields; base = None };
        typ = ret_typ;
        span;
      }

    let call_Constructor (constructor_name : Concrete_ident.name)
        (is_struct : bool) (args : expr list) span ret_typ =
      call_Constructor'
        (`Concrete (Concrete_ident.of_name ~value:true constructor_name))
        is_struct args span ret_typ

    let expr'_Constructor_CF ~(span : span) ~(break_type : ty)
        ?(continue_type : ty = ty_unit) (cf : [ `Break | `Continue ]) (e : expr)
        =
      let typ = NoSpan.ty_cf ~continue_type ~break_type in
      match cf with
      | `Break ->
          call_Constructor Core__ops__control_flow__ControlFlow__Break false
            [ e ] span typ
      | `Continue ->
          call_Constructor Core__ops__control_flow__ControlFlow__Continue false
            [ e ] span typ

    (** We use the following encoding of return, break and continue in the
        `ControlFlow` enum: Return e -> Break (Break e) Break e -> Break
        ((Continue(e, acc))) Continue -> Continue(acc)

        In case there is no return we simplify to: Break e -> (Break (e, acc))
        Continue -> (continue (acc)) *)
    let expr_Constructor_CF ~(span : span) ~(break_type : ty option)
        ~(return_type : ty option) ~(acc : expr) ?(e : expr = expr_unit ~span)
        (cf : [ `Return | `Break | `Continue ]) =
      let break_type = Option.value ~default:ty_unit break_type in
      match cf with
      | `Return ->
          let continue_type = NoSpan.ty_tuple [ break_type; acc.typ ] in
          let inner =
            expr'_Constructor_CF ~break_type:e.typ ~continue_type ~span `Break e
          in
          expr'_Constructor_CF ~span ~break_type:inner.typ
            ~continue_type:acc.typ `Break inner
      | `Break ->
          let tuple = expr_tuple ~span [ e; acc ] in
          let inner =
            match return_type with
            | Some ret_typ ->
                expr'_Constructor_CF ~span ~break_type:ret_typ
                  ~continue_type:tuple.typ `Continue tuple
            | None -> tuple
          in
          expr'_Constructor_CF ~span ~break_type:inner.typ
            ~continue_type:acc.typ `Break inner
      | `Continue ->
          let break_type =
            let tuple_type = NoSpan.ty_tuple [ break_type; acc.typ ] in
            match return_type with
            | Some ret_typ ->
                NoSpan.ty_cf ~break_type:ret_typ ~continue_type:tuple_type
            | None -> tuple_type
          in
          expr'_Constructor_CF ~span ~break_type ~continue_type:acc.typ
            `Continue acc
  end

  include Explicit

  module Make0 (Span : Gen.SPAN) = struct
    open! Span
    include Gen.Make (Span)
    include NoSpan

    let pat_PBinding = Explicit.pat_PBinding ~span
    let expr_unit = expr_unit ~span
    let expr_tuple = expr_tuple ~span
    let pat_Constructor_CF = pat_Constructor_CF ~span
    let expr'_Constructor_CF = expr'_Constructor_CF ~span
    let expr_Constructor_CF = expr_Constructor_CF ~span
    let arm ?(guard = None) = arm ~span ?guard
  end

  module type S = module type of Make0 (struct
    (* This [failwith] is OK: this module is never actually used for computation. It is useful only for typing. *)
    let span = failwith "type only module: this will never be computed"
  end)

  module Make (Span : sig
    val span : span
  end) : S =
    Make0 (Span)

  let make : span -> (module S) =
   fun span : (module S) ->
    (module Make0 (struct
      let span = span
    end))
end


================================================
FILE: engine/lib/ast_destruct.ml
================================================
open! Prelude
open! Ast

module Make (F : Features.T) = struct
  include Ast_destruct_generated.Make (F)

  let list_0 = function [] -> Some () | _ -> None
  let list_1 = function [ a ] -> Some a | _ -> None
  let list_2 = function [ a; b ] -> Some (a, b) | _ -> None
  let list_3 = function [ a; b; c ] -> Some (a, b, c) | _ -> None
  let list_4 = function [ a; b; c; d ] -> Some (a, b, c, d) | _ -> None
  let list_5 = function [ a; b; c; d; e ] -> Some (a, b, c, d, e) | _ -> None
end


================================================
FILE: engine/lib/ast_utils.ml
================================================
open! Prelude
open Ast

type visit_level = ExprLevel | TypeLevel

module TypedLocalIdent (Ty : sig
  type ty [@@deriving show, yojson]
end) =
struct
  module T = struct
    type t = Local_ident.t * Ty.ty [@@deriving show, yojson]

    let sexp_of_t : t -> _ = fst >> Local_ident.sexp_of_t
    let compare (a : t) (b : t) = [%compare: Local_ident.t] (fst a) (fst b)
    let equal (a : t) (b : t) = [%eq: Local_ident.t] (fst a) (fst b)
  end

  include Base.Comparator.Make (T)
  include T
end

module UniqueList (T : sig
  type t [@@deriving eq, show, yojson]
  type comparator_witness
end) : sig
  type t [@@deriving eq, show, yojson]

  val without : T.t -> t -> t
  val cons : T.t -> t -> t
  val to_list : t -> T.t list
  val from_set : (T.t, T.comparator_witness) Set.t -> t
  val empty : t
  val is_empty : t -> bool
  val singleton : T.t -> t
end = struct
  type t = T.t list [@@deriving eq, show, yojson]

  let without x = List.filter ~f:([%eq: T.t] x >> not)
  let cons hd tl = hd :: tl
  let to_list = Fn.id
  let from_set s = Set.to_list s
  let empty = []
  let is_empty = List.is_empty
  let singleton x = [ x ]
end

module Make (F : Features.T) = struct
  module AST = Ast.Make (F)
  open AST
  module TypedLocalIdent = TypedLocalIdent (AST)
  module Visitors = Ast_visitors.Make (F)
  module M = Ast_builder.Make (F)
  module D = Ast_destruct.Make (F)

  module Expect = struct
    let mut_borrow (e : expr) : expr option =
      match e.e with Borrow { kind = Mut _; e; _ } -> Some e | _ -> None

    let borrow (e : expr) : expr option =
      match e.e with Borrow { e; _ } -> Some e | _ -> None

    let block (e : expr) : expr option =
      match e.e with Block { e; _ } -> Some e | _ -> None

    let deref (e : expr) : expr option =
      match e.e with
      | App { f = { e = GlobalVar (`Primitive Deref); _ }; args = [ e ]; _ } ->
          Some e
      | _ -> None

    let closure (e : expr) : (pat list * expr) option =
      match e.e with
      | Closure { params; body; _ } -> Some (params, body)
      | _ -> None

    let app (e : expr) :
        (expr
        * expr list
        * generic_value list
        * impl_expr option
        * impl_expr list)
        option =
      match e.e with
      | App { f; args; generic_args; trait; bounds_impls } ->
          (* TODO: propagate full trait *)
          Some (f, args, generic_args, Option.map ~f:fst trait, bounds_impls)
      | _ -> None

    let pbinding_simple (p : pat) : (local_ident * ty) option =
      match p.p with
      | PBinding { mut = Immutable; mode = _; var; typ; subpat = None } ->
          Some (var, typ)
      | _ -> None

    let concrete_app1 (f : Concrete_ident.name) (e : expr) : expr option =
      match e.e with
      | App
          {
            f = { e = GlobalVar (`Concrete f'); _ };
            args = [ e ];
            generic_args = _;
            trait = _;
            _ (* TODO: see issue #328 *);
          }
        when Concrete_ident.eq_name f f' ->
          Some e
      | _ -> None

    let deref_mut_app = concrete_app1 Core__ops__deref__DerefMut__deref_mut

    let local_var (e : expr) : local_ident option =
      match e.e with LocalVar v -> Some v | _ -> None

    let arrow (typ : ty) : (ty list * ty) option =
      match typ with
      | TArrow (inputs, output) -> Some (inputs, output)
      | _ -> None

    let mut_ref (typ : ty) : ty option =
      match typ with TRef { mut = Mutable _; typ; _ } -> Some typ | _ -> None

    let concrete_app' : expr' -> concrete_ident option = function
      | App { f = { e = GlobalVar (`Concrete c); _ }; _ } -> Some c
      | _ -> None
  end

  module Sets = struct
    module Global_ident = struct
      include Set.M (Global_ident)

      class ['s] monoid =
        object
          method private zero = Set.empty (module Global_ident)
          method private plus = Set.union
        end
    end

    module Concrete_ident = struct
      include Set.M (Concrete_ident)

      class ['s] monoid =
        object
          method private zero = Set.empty (module Concrete_ident)
          method private plus = Set.union
        end
    end

    module Local_ident = struct
      include Set.M (Local_ident)

      class ['s] monoid =
        object
          method private zero = Set.empty (module Local_ident)
          method private plus = Set.union
        end
    end

    module TypedLocalIdent = struct
      include Set.M (TypedLocalIdent)

      let show (x : t) : string =
        [%show: TypedLocalIdent.t list] @@ Set.to_list x

      let pp (fmt : Stdlib.Format.formatter) (s : t) : unit =
        Stdlib.Format.pp_print_string fmt @@ show s

      class ['s] monoid =
        object
          method private zero = Set.empty (module TypedLocalIdent)
          method private plus = Set.union
        end
    end
  end

  let functions_of_item (x : item) : (concrete_ident * expr) list =
    match x.v with
    | Fn { name; generics = _; body; params = _; safety = _ } ->
        [ (name, body) ]
    | Impl { items; _ } ->
        List.filter_map
          ~f:(fun w ->
            match w.ii_v with
            | IIFn { body; params = _ } -> Some (w.ii_ident, body)
            | _ -> None)
          items
    | _ -> []

  module Mappers = struct
    let regenerate_span_ids =
      object
        inherit [_] Visitors.map
        method! visit_span () = Span.refresh_id
      end

    let normalize_borrow_mut =
      object
        inherit [_] Visitors.map as super

        method! visit_expr () e =
          let rec expr e =
            match e.e with
            | App
                {
                  f = { e = GlobalVar (`Primitive Deref); _ };
                  args = [ { e = Borrow { e = sub; _ }; _ } ];
                  generic_args = _;
                  trait = _;
                  _ (* TODO: see issue #328 *);
                } ->
                expr sub
            | _ -> super#visit_expr () e
          in
          expr e
      end

    let drop_bodies =
      object
        inherit [_] Visitors.map as super

        method! visit_item' () item' =
          match item' with
          | Fn { name; generics; body; params; safety } ->
              Fn
                {
                  name;
                  generics;
                  body = { body with e = GlobalVar (`TupleCons 0) };
                  params;
                  safety;
                }
          | _ -> super#visit_item' () item'
      end

    let replace_local_variables (map : (local_ident, expr, _) Map.t) =
      object
        inherit [_] Visitors.map as super

        method! visit_expr () e =
          match e.e with
          | LocalVar var -> Map.find map var |> Option.value ~default:e
          | _ -> super#visit_expr () e
      end

    (** [replace_local_variable var replacement] returns a visitor that maps any
        type of the AST replacing every occurence of the expression
        [LocalVar var] by [replacement]. *)
    let replace_local_variable (var : local_ident) (replacement : expr) =
      replace_local_variables
        (Map.of_alist_exn (module Local_ident) [ (var, replacement) ])

    let rename_local_idents (f : local_ident -> local_ident) =
      object
        inherit [_] Visitors.map as _super
        method! visit_local_ident () ident = f ident
      end

    let rename_global_idents (f : visit_level -> global_ident -> global_ident) =
      object
        inherit [_] Visitors.map as super
        method! visit_global_ident (lvl : visit_level) ident = f lvl ident
        method! visit_ty _ t = super#visit_ty TypeLevel t
      end

    let rename_concrete_idents
        (f : visit_level -> Concrete_ident.t -> Concrete_ident.t) =
      object
        inherit [_] Visitors.map as super
        method! visit_concrete_ident (lvl : visit_level) ident = f lvl ident

        method! visit_global_ident lvl (x : Global_ident.t) =
          match x with
          | `Concrete x -> `Concrete (f lvl x)
          | `Projector (`Concrete x) -> `Projector (`Concrete (f lvl x))
          | _ -> super#visit_global_ident lvl x

        method! visit_ty _ t = super#visit_ty TypeLevel t
      end

    let rename_global_idents_item
        (f : visit_level -> global_ident -> global_ident) : item -> item =
      (rename_global_idents f)#visit_item ExprLevel

    (** Add type ascription nodes in nested function calls. This helps type
        inference in the presence of associated types in backends that don't
        support them well (F* for instance). *)
    let add_typ_ascription =
      let is_app = Expect.concrete_app' >> Option.is_some in
      let o =
        object
          inherit [_] Visitors.map as super

          method! visit_expr' (ascribe_app : bool) e =
            (* Enable type ascription of underlying function
               application. In the F* backend, we're annotating every
               [Let] bindings, thus if we're facing a [Let], we turn
               off application ascription. Similarly, if we're facing
               an Ascription, we turn off application ascription. *)
            let ascribe_app =
              (ascribe_app || is_app e)
              && not ([%matches? Let _ | Ascription _] e)
            in
            super#visit_expr' ascribe_app e

          method! visit_expr (ascribe_app : bool) e =
            let ascribe_app =
              ascribe_app
              && not
                   (match e.typ with
                   | TApp { ident; _ } ->
                       Global_ident.eq_name Hax_lib__prop__Prop ident
                   | _ -> false)
            in
            let e = super#visit_expr ascribe_app e in
            let ascribe (e : expr) =
              if [%matches? Ascription _] e.e then e
              else { e with e = Ascription { e; typ = e.typ } }
            in
            match e.e with
            | App
                {
                  f = { e = GlobalVar (`Primitive Cast); _ } as f;
                  args = [ arg ];
                  generic_args;
                  trait;
                  bounds_impls;
                } ->
                ascribe
                  {
                    e with
                    e =
                      App
                        {
                          f;
                          args = [ ascribe arg ];
                          generic_args;
                          trait;
                          bounds_impls;
                        };
                  }
            (* Match scrutinees need to be ascribed as well
               (see https://github.com/hacspec/hax/issues/1207).*)
            | Match { scrutinee; arms } ->
                { e with e = Match { scrutinee = ascribe scrutinee; arms } }
            | _ ->
                (* Ascribe the return type of a function application & constructors *)
                if (ascribe_app && is_app e.e) || [%matches? Construct _] e.e
                then ascribe e
                else e
        end
      in
      o#visit_item false
  end

  module Reducers = struct
    let collect_local_idents =
      object
        inherit [_] Visitors.reduce as _super
        inherit [_] Sets.Local_ident.monoid as _m
        method! visit_local_ident () x = Set.singleton (module Local_ident) x
      end

    include struct
      open struct
        type env = Local_ident.t list

        let id_shadows ~(env : env) (id : Local_ident.t) =
          List.find env ~f:(fun x -> String.equal x.name id.name)
          |> Option.value ~default:id
          |> [%equal: Local_ident.t] id
          |> not

        let ( ++ ) = Set.union

        let shadows' (type a) ~env vars (x : a) next =
          (* account for shadowing within `vars` *)
          List.filter ~f:(id_shadows ~env:vars) (List.rev vars)
          |> Set.of_list (module Local_ident)
          |> Set.union (next (vars @ env) x)

        let shadows (type a) ~(env : env) (pats : pat list) (x : a)
            (next : env -> a -> Sets.Local_ident.t) =
          let vars =
            List.map pats ~f:(collect_local_idents#visit_pat ())
            |> Set.(union_list (module Local_ident) >> to_list)
          in
          shadows' ~env vars x next
      end

      (** Rust macros are hygienic: even if a macro introduces a name that
          already exists in scope, the compiler will not shadow it. Instead, it
          will track and differentiate the two, even if those have the same
          name. `collect_ambiguous_local_idents` is a visitor that collects such
          "fake" shadowings. *)
      let collect_ambiguous_local_idents =
        object (self)
          inherit [_] Visitors.reduce as super
          inherit [_] Sets.Local_ident.monoid as _m

          method! visit_arm' env { arm_pat; body; guard } =
            match guard with
            | None -> shadows ~env [ arm_pat ] body super#visit_expr
            | Some { guard = IfLet { lhs; rhs; _ }; _ } ->
                shadows ~env [ arm_pat ] rhs super#visit_expr
                ++ shadows ~env [ arm_pat; lhs ] body super#visit_expr

          method! visit_expr' env e =
            match e with
            | Let { monadic = _; lhs; rhs; body } ->
                super#visit_expr env rhs
                ++ shadows ~env [ lhs ] body super#visit_expr
            | Loop { kind; state; body; _ } ->
                let empty = Set.empty (module Local_ident) |> Fn.(id &&& id) in
                let ikind, ukind =
                  match kind with
                  | UnconditionalLoop -> empty
                  | WhileLoop { condition; _ } ->
                      ( collect_local_idents#visit_expr () condition,
                        super#visit_expr env condition )
                  | ForLoop { pat; it; _ } ->
                      ( collect_local_idents#visit_pat () pat,
                        super#visit_expr env it )
                  | ForIndexLoop { start; end_; var; _ } ->
                      ( Set.singleton (module Local_ident) var,
                        super#visit_expr (var :: env) start
                        ++ super#visit_expr (var :: env) end_ )
                in
                let istate, ustate =
                  match state with
                  | Some { init; bpat; _ } ->
                      ( collect_local_idents#visit_pat () bpat,
                        super#visit_expr (Set.to_list ikind @ env) init )
                  | _ -> empty
                in
                let intro = ikind ++ istate |> Set.to_list in
                ukind ++ ustate ++ shadows' ~env intro body super#visit_expr
            | Closure { params; body; _ } ->
                shadows ~env params body super#visit_expr
            | _ -> super#visit_expr' env e

          method! visit_impl_item' env ii =
            match ii with
            | IIFn { body; params } -> self#visit_function_like env body params
            | _ -> super#visit_impl_item' env ii

          method! visit_item' env i =
            match i with
            | Fn { body; params; _ } -> self#visit_function_like env body params
            | _ -> super#visit_item' env i

          method visit_function_like env body params =
            let f p = p.pat in
            shadows ~env (List.map ~f params) body super#visit_expr

          method! visit_local_ident env id =
            Set.(if id_shadows ~env id then Fn.flip singleton id else empty)
              (module Local_ident)
        end

      (** Rust macros are hygienic: even if a macro introduces a name that
          already exists in scope, the compiler will not shadow it. Instead, it
          will track and differentiate the two, even if those have the same
          name. `disambiguate_local_idents item` renames every instance of such
          a "fake" shadowing in `item`. See PR #368 for an example. *)
      let disambiguate_local_idents (item : item) =
        let ambiguous = collect_ambiguous_local_idents#visit_item [] item in
        let local_vars = collect_local_idents#visit_item () item |> ref in
        let refresh env (id : Local_ident.t) : string =
          let extract_suffix (id' : Local_ident.t) =
            String.chop_prefix ~prefix:(id.name ^ "_") id'.name
            |> Option.bind ~f:string_to_int
          in
          let suffix =
            Set.filter_map (module Int) env ~f:extract_suffix
            |> Set.max_elt |> Option.value ~default:0 |> ( + ) 1
          in
          id.name ^ "_" ^ Int.to_string suffix
        in
        let new_names =
          ambiguous |> Set.to_list
          |> List.map ~f:(fun (var : Local_ident.t) ->
                 let var' = { var with name = refresh !local_vars var } in
                 local_vars := Set.add !local_vars var';
                 (var, var'))
          |> Map.of_alist_exn (module Local_ident)
        in
        let rename var = Map.find new_names var |> Option.value ~default:var in
        (Mappers.rename_local_idents rename)#visit_item () item
    end

    let collect_global_idents =
      object
        inherit [_] Visitors.reduce as _super
        inherit [_] Sets.Global_ident.monoid as _m

        method! visit_global_ident (_env : unit) (x : Global_ident.t) =
          Set.singleton (module Global_ident) x
      end

    let collect_concrete_idents =
      object
        inherit [_] Visitors.reduce as super
        inherit [_] Sets.Concrete_ident.monoid as _m

        method! visit_global_ident (_env : unit) (x : Global_ident.t) =
          match x with
          | `Concrete x -> Set.singleton (module Concrete_ident) x
          | _ -> super#visit_global_ident () x

        method! visit_concrete_ident (_env : unit) (x : Concrete_ident.t) =
          Set.singleton (module Concrete_ident) x
      end

    let variables_of_pat (p : pat) : Sets.Local_ident.t =
      (object
         inherit [_] Visitors.reduce as super
         inherit [_] Sets.Local_ident.monoid as m

         method! visit_pat' env pat' =
           match pat' with
           | PBinding { var; subpat; _ } ->
               m#plus
                 (Set.singleton (module Local_ident) var)
                 (Option.value_map subpat ~default:m#zero
                    ~f:(fst >> super#visit_pat env))
           | _ -> super#visit_pat' env pat'
      end)
        #visit_pat
        () p

    let variables_of_param (p : param) : Local_ident.t list =
      variables_of_pat p.pat |> Set.to_list

    let variables_of_pats : pat list -> Sets.Local_ident.t =
      List.map ~f:variables_of_pat >> Set.union_list (module Local_ident)

    let without_vars (mut_vars : Sets.TypedLocalIdent.t)
        (vars : Sets.Local_ident.t) =
      Set.filter mut_vars ~f:(fst >> Set.mem vars >> not)

    let without_pats_vars (mut_vars : Sets.TypedLocalIdent.t) :
        pat list -> Sets.TypedLocalIdent.t =
      variables_of_pats >> without_vars mut_vars

    let without_pat_vars (mut_vars : Sets.TypedLocalIdent.t) (pat : pat) :
        Sets.TypedLocalIdent.t =
      without_pats_vars mut_vars [ pat ]

    let free_assigned_variables
        (fv_of_arbitrary_lhs :
          F.arbitrary_lhs -> expr -> Sets.TypedLocalIdent.t) =
      object (self)
        inherit [_] Visitors.reduce as super
        inherit [_] Sets.TypedLocalIdent.monoid as m

        (* TODO: loop state *)

        method! visit_expr' () e =
          match e with
          | Assign { lhs; e; _ } ->
              let rec visit_lhs lhs =
                match lhs with
                | LhsLocalVar { var; _ } ->
                    Set.singleton (module TypedLocalIdent) (var, e.typ)
                | LhsFieldAccessor { e; _ } | LhsVecRef { e; _ } -> visit_lhs e
                | LhsArrayAccessor { e; index; _ } ->
                    Set.union (self#visit_expr () index) (visit_lhs e)
                | LhsArbitraryExpr { witness; e } ->
                    fv_of_arbitrary_lhs witness e
              in
              visit_lhs lhs
          | Match { scrutinee; arms } ->
              List.fold_left ~init:(self#visit_expr () scrutinee) ~f:Set.union
              @@ List.map ~f:(fun arm -> self#visit_arm () arm) arms
          | Let { lhs = pat; rhs = expr; body; _ } ->
              Set.union (self#visit_expr () expr)
              @@ without_pat_vars (self#visit_expr () body) pat
          | Closure { params; body; _ } ->
              without_pats_vars (self#visit_expr () body) params
          | Loop { body; kind; state; _ } ->
              let vars =
                (match kind with
                | UnconditionalLoop -> []
                | WhileLoop _ -> []
                | ForLoop { pat = _not_mutable; _ } -> []
                | ForIndexLoop { var = _not_mutable; _ } -> [])
                @ (state
                  |> Option.map ~f:(fun { bpat; _ } -> variables_of_pat bpat)
                  |> Option.to_list)
                |> Set.union_list (module Local_ident)
              in
              m#plus
                (self#visit_loop_kind () kind)
                (m#plus
                   (Option.map ~f:(self#visit_loop_state ()) state
                   |> Option.value ~default:m#zero)
                   (without_vars (self#visit_expr () body) vars))
          | _ -> super#visit_expr' () e

        method! visit_arm' () { arm_pat; body; guard } =
          match guard with
          | Some { guard = IfLet { lhs; rhs; _ }; _ } ->
              let rhs_vars =
                without_pat_vars (self#visit_expr () rhs) arm_pat
              in
              let body_vars =
                without_pats_vars (self#visit_expr () body) [ arm_pat; lhs ]
              in
              Set.union rhs_vars body_vars
          | None -> without_pat_vars (self#visit_expr () body) arm_pat
      end

    class ['s] expr_list_monoid =
      object
        method private zero = []
        method private plus = List.append
      end

    let collect_break_payloads =
      object (self)
        inherit [_] Visitors.reduce as super
        inherit [_] expr_list_monoid as _m

        method! visit_expr' () e =
          match e with
          | Break { e; _ } -> self#plus (self#visit_expr () e) [ e ]
          | Loop _ ->
              (* Do *NOT* visit sub nodes *)
              self#zero
          | _ -> super#visit_expr' () e
      end

    let collect_attrs =
      object (_self)
        inherit [_] Visitors.reduce
        inherit [_] expr_list_monoid
        method! visit_attrs () attrs = attrs
      end
  end

  (** Produces a local identifier which is locally fresh with respect to
      variables [{vars}]. *)
  let fresh_local_ident_in (vars : local_ident list) (prefix : string) :
      Local_ident.t =
    let free_suffix =
      vars
      |> List.filter_map ~f:(fun ({ name; _ } : local_ident) ->
             String.chop_prefix ~prefix name)
      |> List.map ~f:(function "" -> "0" | s -> s)
      |> List.filter_map ~f:Stdlib.int_of_string_opt
      |> List.fold ~init:(-1) ~f:Int.max
      |> ( + ) 1
      |> function
      | 0 -> ""
      | n -> Int.to_string n
    in
    {
      name = prefix ^ free_suffix;
      id =
        (* TODO: freshness is local and name-only here... *)
        Local_ident.mk_id Expr (-1);
    }

  (** Produces a local identifier which is locally fresh with respect to
      expressions [{exprs}]. *)
  let fresh_local_ident_in_expr (exprs : expr list) (prefix : string) :
      Local_ident.t =
    fresh_local_ident_in
      (List.map ~f:(Reducers.collect_local_idents#visit_expr ()) exprs
      |> Set.union_list (module Local_ident)
      |> Set.to_list)
      prefix

  let never_typ : ty =
    let ident =
      `Concrete
        (Concrete_ident.of_name ~value:false Rust_primitives__hax__Never)
    in
    TApp { ident; args = [] }

  let is_never_typ : ty -> bool = function
    | TApp { ident; _ } ->
        Global_ident.eq_name Rust_primitives__hax__Never ident
    | _ -> false

  let unit_typ : ty = TApp { ident = `TupleType 0; args = [] }

  let unit_expr span : expr =
    { typ = unit_typ; span; e = GlobalVar (`TupleCons 0) }

  (* TODO: Those tuple1 things are wrong! Tuples of size one exists in Rust! e.g. `(123,)` *)
  let rec remove_tuple1_pat (p : pat) : pat =
    match p.p with
    | PConstruct { constructor = `TupleType 1; fields = [ { pat; _ } ]; _ } ->
        remove_tuple1_pat pat
    | _ -> p

  let rec remove_tuple1 (t : ty) : ty =
    match t with
    | TApp { ident = `TupleType 1; args = [ GType t ] } -> remove_tuple1 t
    | _ -> t

  let remove_unsize (e : expr) : expr =
    match e.e with
    | App { f = { e = GlobalVar f; _ }; args = [ e ]; _ }
      when Global_ident.eq_name Rust_primitives__unsize f ->
        e
    | _ -> e

  (** See [beta_reduce_closure]'s documentation. *)
  let beta_reduce_closure_opt (e : expr) : expr option =
    let* f, args, _, _, _ = Expect.app e in
    let* pats, body = Expect.closure f in
    let* vars = List.map ~f:Expect.pbinding_simple pats |> sequence in
    let vars = List.map ~f:fst vars in
    let replacements =
      List.zip_exn vars args |> Map.of_alist_exn (module Local_ident)
    in
    Some ((Mappers.replace_local_variables replacements)#visit_expr () body)

  (** Reduces a [(|x1, ..., xN| body)(e1, ..., eN)] to
      [body[x1/e1, ..., xN/eN]]. This assumes the arities are right:
      [(|x, y| ...)(e1)]. *)
  let beta_reduce_closure (e : expr) : expr =
    beta_reduce_closure_opt e |> Option.value ~default:e

  let is_unit_typ : ty -> bool =
    remove_tuple1 >> [%matches? TApp { ident = `TupleType 0; _ }]

  let rec pat_is_expr (p : pat) (e : expr) =
    match (p.p, e.e) with
    | _, Construct { constructor = `TupleCons 1; fields = [ (_, e) ]; _ } ->
        pat_is_expr p e
    | PBinding { subpat = None; var = pv; _ }, LocalVar ev ->
        [%eq: local_ident] pv ev
    | ( PConstruct { constructor = pn; fields = pargs; _ },
        Construct { constructor = en; fields = eargs; base = None; _ } )
      when [%eq: global_ident] pn en -> (
        match List.zip pargs eargs with
        | Ok zip ->
            List.for_all
              ~f:(fun (x, y) ->
                [%eq: global_ident] x.field (fst y) && pat_is_expr x.pat (snd y))
              zip
        | Unequal_lengths -> false)
    | _ -> false

  let make_let (lhs : pat) (rhs : expr) (body : expr) =
    if pat_is_expr lhs body then rhs
    else { body with e = Let { monadic = None; lhs; rhs; body } }

  let make_lets (lbs : (pat * expr) list) (body : expr) =
    List.fold_right ~init:body
      ~f:(fun (pat, expr) body -> make_let pat expr body)
      lbs

  let make_var_pat (var : local_ident) (typ : ty) (span : span) : pat =
    {
      p = PBinding { mut = Immutable; mode = ByValue; var; typ; subpat = None };
      span;
      typ;
    }

  let ty_equality (a : ty) (b : ty) : bool =
    let replace_spans =
      object
        inherit [_] Visitors.map
        method! visit_span _ = function _ -> Span.default
      end
    in
    let a = replace_spans#visit_ty () a in
    let b = replace_spans#visit_ty () b in
    [%eq: ty] a b

  let let_of_binding ((var, rhs) : local_ident * expr) (body : expr) : expr =
    make_let (make_var_pat var rhs.typ rhs.span) rhs body

  let lets_of_bindings (bindings : (local_ident * expr) list) (body : expr) :
      expr =
    List.fold_right ~init:body ~f:let_of_binding bindings

  let make_tuple_typ' (tuple : ty list) : ty =
    TApp
      {
        ident = `TupleType (List.length tuple);
        args = List.map ~f:(fun typ -> GType typ) tuple;
      }

  let make_tuple_typ (tuple : ty list) : ty =
    match tuple with [ ty ] -> ty | _ -> make_tuple_typ' tuple

  let make_unit_param (span : span) : param =
    let typ = unit_typ in
    let pat = M.pat_PWild ~typ ~span in
    { pat; typ; typ_span = None; attrs = [] }

  let make_seq (e1 : expr) (e2 : expr) : expr =
    make_let (M.pat_PWild ~typ:e1.typ ~span:e1.span) e1 e2

  let make_tuple_field_pat (len : int) (nth : int) (pat : pat) : field_pat =
    { field = `TupleField (nth + 1, len); pat }

  let make_tuple_pat'' span (tuple : field_pat list) : pat =
    match tuple with
    | [ { pat; _ } ] -> pat
    | _ ->
        let len = List.length tuple in
        {
          p =
            PConstruct
              {
                constructor = `TupleCons len;
                is_record = false;
                is_struct = true;
                fields = tuple;
              };
          typ = make_tuple_typ @@ List.map ~f:(fun { pat; _ } -> pat.typ) tuple;
          span;
        }

  let make_tuple_pat' (pats : pat list) : pat =
    let len = List.length pats in
    let span = Span.union_list @@ List.map ~f:(fun p -> p.span) pats in
    List.mapi ~f:(fun i pat -> { field = `TupleField (i, len); pat }) pats
    |> make_tuple_pat'' span

  let make_tuple_pat : pat list -> pat = function
    | [ pat ] -> pat
    | pats -> make_tuple_pat' pats

  let make_tuple_expr' ~(span : span) (tuple : expr list) : expr =
    let len = List.length tuple in
    {
      e =
        Construct
          {
            constructor = `TupleCons len;
            is_record = false;
            is_struct = true;
            fields =
              List.mapi ~f:(fun i x -> (`TupleField (i, len), x)) @@ tuple;
            base = None;
          };
      typ = make_tuple_typ @@ List.map ~f:(fun { typ; _ } -> typ) tuple;
      span;
    }

  let make_tuple_expr ~(span : span) : expr list -> expr = function
    | [ e ] -> e
    | es -> make_tuple_expr' ~span es

  (* maybe we should just drop Construct in favor of a
     [Record] thing, and put everything which is not a Record
       into an App. This would simplify stuff quite much. Maybe not
       for LHS things. *)
  let call_Constructor' (constructor : global_ident) is_struct
      (args : expr list) span ret_typ =
    let mk_field =
      let len = List.length args in
      fun n -> `TupleField (len, n)
    in
    let fields = List.mapi ~f:(fun i arg -> (mk_field i, arg)) args in
    {
      e =
        Construct
          { constructor; is_record = false; is_struct; fields; base = None };
      typ = ret_typ;
      span;
    }

  let call_Constructor (constructor_name : Concrete_ident.name)
      (is_struct : bool) (args : expr list) span ret_typ =
    call_Constructor'
      (`Concrete (Concrete_ident.of_name ~value:true constructor_name))
      is_struct args span ret_typ

  let call' ?impl f ?(generic_args = []) ?(impl_generic_args = [])
      (args : expr list) span ret_typ =
    let typ = TArrow (List.map ~f:(fun arg -> arg.typ) args, ret_typ) in
    let e = GlobalVar f in
    {
      e =
        App
          {
            f = { e; typ; span };
            args;
            generic_args;
            bounds_impls = [];
            trait = Option.map ~f:(fun impl -> (impl, impl_generic_args)) impl;
          };
      typ = ret_typ;
      span;
    }

  let call ?(generic_args = []) ?(impl_generic_args = []) ?impl
      (f_name : Concrete_ident.name) (args : expr list) span ret_typ =
    call' ?impl ~generic_args ~impl_generic_args
      (`Concrete (Concrete_ident.of_name ~value:true f_name))
      args span ret_typ

  let make_closure (params : pat list) (body : expr) (span : span) : expr =
    let params =
      match params with
      | [] -> [ M.pat_PWild ~typ:unit_typ ~span ]
      | _ -> params
    in
    let e = Closure { params; body; captures = [] } in
    { e; typ = TArrow (List.map ~f:(fun p -> p.typ) params, body.typ); span }

  let string_lit span (s : string) : expr =
    { span; typ = TStr; e = Literal (String s) }

  module HaxFailure = struct
    module Build = struct
      let pat span (typ : ty) (msg : string) : pat =
        let (module M) = M.make span in
        let constructor =
          Global_ident.of_name ~value:true Rust_primitives__hax__Failure__Ctor
        in
        let pat = M.pat_PConstant ~typ ~lit:(String msg) in
        let fields = [ { field = constructor; pat } ] in
        M.pat_PConstruct ~typ ~is_record:false ~is_struct:true ~constructor
          ~fields

      let expr span (typ : ty) (error : string) (ast : string) =
        let args = List.map ~f:(string_lit span) [ error; ast ] in
        call Rust_primitives__hax__failure args span typ

      let ty (payload : string) =
        let ident =
          `Concrete
            (Concrete_ident.of_name ~value:false Rust_primitives__hax__Failure)
        in
        let (module M) = M.make (Span.dummy ()) in
        let payload = M.expr_Literal ~typ:TBool (String payload) in
        TApp { ident; args = [ GConst payload ] }
    end

    open struct
      let destruct_str_lit e =
        let* l = D.expr_Literal e in
        match l with String s -> Some s | _ -> None
    end

    module Destruct = struct
      let pat (p : pat) : string option =
        let* p = D.pat_PConstruct p in
        let*? () =
          Global_ident.eq_name Rust_primitives__hax__Failure__Ctor p.constructor
        in
        let* { pat; _ } = D.list_1 p.fields in
        let* s = D.pat_PConstant pat in
        match s.lit with String s -> Some s | _ -> None

      let expr (e : expr) : (string * string) option =
        let* app = D.expr_App e in
        let* id = D.expr_GlobalVar app.f in
        let*? _ = Global_ident.eq_name Rust_primitives__hax__failure id in
        let* x, y = D.list_2 app.args in

        let* x = destruct_str_lit x in
        let* y = destruct_str_lit y in
        Some (x, y)

      let ty (t : ty) : string option =
        match t with
        | TApp { ident; args = [ GConst payload ] }
          when Global_ident.eq_name Rust_primitives__hax__Failure ident ->
            destruct_str_lit payload
        | _ -> None
    end
  end

  let hax_failure_expr' span (typ : ty) (context, kind) (ast : string) =
    let ast =
      (* Remove consecutive withe spaces *)
      String.split ~on:' ' ast
      |> List.filter ~f:(String.is_empty >> not)
      |> String.concat ~sep:" "
    in
    let ast =
      if String.length ast > 200 then String.sub ~pos:0 ~len:200 ast ^ "..."
      else ast
    in
    let error = Diagnostics.pretty_print_context_kind context kind in
    HaxFailure.Build.expr span typ error ast

  let hax_failure_expr span (typ : ty) (context, kind) (expr0 : Ast.Full.expr) =
    hax_failure_expr' span typ (context, kind) (Print_rust.pexpr_str expr0)

  module LiftToFullAst = struct
    let expr : AST.expr -> Ast.Full.expr = Stdlib.Obj.magic
    let ty : AST.ty -> Ast.Full.ty = Stdlib.Obj.magic
    let item : AST.item -> Ast.Full.item = Stdlib.Obj.magic
  end

  module Debug : sig
    val expr : ?label:string -> AST.expr -> unit
    (** Prints an expression pretty-printed as Rust, with its full AST encoded
        as JSON, available as a file, so that one can `jless` or `jq` into it.
    *)

    val item' : ?label:string -> AST.item -> string
    val item : ?label:string -> AST.item -> unit
  end = struct
    let expr ?(label = "") (e : AST.expr) : unit =
      let path = tempfile_path ~suffix:".json" in
      Core.Out_channel.write_all path
        ~data:([%yojson_of: AST.expr] e |> Yojson.Safe.pretty_to_string);
      let e = LiftToFullAst.expr e in
      "```rust " ^ label ^ "\n" ^ Print_rust.pexpr_str e
      ^ "\n```\x1b[34m JSON-encoded AST available at \x1b[1m" ^ path
      ^ "\x1b[0m (hint: use `jless " ^ path ^ "`)"
      |> Stdio.prerr_endline

    let item' ?(label = "") (e : AST.item) : string =
      let path = tempfile_path ~suffix:".json" in
      Core.Out_channel.write_all path
        ~data:([%yojson_of: AST.item] e |> Yojson.Safe.pretty_to_string);
      let e = LiftToFullAst.item e in
      "```rust " ^ label ^ "\n" ^ Print_rust.pitem_str e
      ^ "\n```\x1b[34m JSON-encoded AST available at \x1b[1m" ^ path
      ^ "\x1b[0m (hint: use `jless " ^ path ^ "`)"

    let item ?(label = "") (e : AST.item) =
      item' ~label e |> Stdio.prerr_endline
  end

  let unbox_expr' (next : expr -> expr) (e : expr) : expr =
    match e.e with
    | App { f = { e = GlobalVar f; _ }; args = [ e ]; _ }
      when Global_ident.eq_name Alloc__boxed__Impl__new f
           || Global_ident.eq_name Rust_primitives__hax__box_new f ->
        next e
    | _ -> e

  let underef_expr' (next : expr -> expr) (e : expr) : expr =
    match e.e with
    | App
        {
          f = { e = GlobalVar (`Primitive Ast.Deref); _ };
          args = [ e ];
          generic_args = _;
          bounds_impls = _;
          trait = _;
        } ->
        next e
    | _ -> e

  let rec unref_ty (t : ty) : ty =
    match t with TRef { typ; _ } -> unref_ty typ | t -> t

  let rec unbox_expr e = unbox_expr' unbox_expr e
  let underef_expr e = underef_expr' unbox_expr e

  let rec unbox_underef_expr e =
    (unbox_expr' unbox_underef_expr >> underef_expr' unbox_underef_expr) e

  (* extracts a `param` out of a `generic_param` if it's a const
     generic, otherwise returns `None`` *)
  let param_of_generic_const_param (g : generic_param) : param option =
    let* typ = match g.kind with GPConst { typ } -> Some typ | _ -> None in
    let ({ span; ident = var; _ } : generic_param) = g in
    let pat =
      let mode, mut, subpat = (ByValue, Immutable, None) in
      { p = PBinding { mut; mode; var; typ; subpat }; span; typ }
    in
    Some { pat; typ; typ_span = Some span; attrs = [] }

  let kind_of_item (item : item) : item_kind =
    match item.v with
    | Fn _ -> `Fn
    | TyAlias _ -> `TyAlias
    | Type _ -> `Type
    | IMacroInvokation _ -> `IMacroInvokation
    | Trait _ -> `Trait
    | Impl _ -> `Impl
    | Alias _ -> `Alias
    | Use _ -> `Use
    | Quote _ -> `Quote
    | HaxError _ -> `HaxError
    | NotImplementedYet -> `NotImplementedYet

  let rec expr_of_lhs (span : span) (lhs : lhs) : expr =
    match lhs with
    | LhsLocalVar { var; typ } -> { e = LocalVar var; typ; span }
    | LhsVecRef { e; _ } -> expr_of_lhs span e
    | LhsFieldAccessor { e; typ; field; _ } ->
        let e = expr_of_lhs span e in
        let f = { e = GlobalVar field; typ = TArrow ([ e.typ ], typ); span } in
        {
          e =
            App
              {
                f;
                args = [ e ];
                generic_args = [];
                bounds_impls = [];
                trait = None (* TODO: see issue #328 *);
              };
          typ;
          span;
        }
    | LhsArrayAccessor { e; typ; index; _ } ->
        let args = [ expr_of_lhs span e; index ] in
        call Core__ops__index__Index__index args span typ
    | LhsArbitraryExpr { e; _ } -> e

  (* module Box = struct *)
  (*   module Ty = struct *)
  (*     let destruct (t : ty) : ty option = *)
  (*       match t with *)
  (*       | TApp { ident = `Concrete box; args = [ GType sub; _alloc ] } *)
  (*         when Concrete_ident.eq_name Alloc__boxed__Box box -> *)
  (*           Some sub *)
  (*       | _ -> None *)

  (*     let alloc_ty = *)
  (*       TApp *)
  (*         { *)
  (*           ident = `Concrete (Concrete_ident.of_name Type Alloc__alloc__Global); *)
  (*           args = []; *)
  (*         } *)

  (*     let make (t : ty) : ty = *)
  (*       let ident = `Concrete (Concrete_ident.of_name Type Alloc__boxed__Box) in *)
  (*       TApp { ident; args = [ GType t; GType alloc_ty ] } *)
  (*   end *)

  (*   module Expr = struct *)
  (*     let destruct (e : expr) : expr option = *)
  (*       match e.e with *)
  (*       | App { f = { e = GlobalVar (`Primitive Box); _ }; args = [ arg ] } -> *)
  (*           Some arg *)
  (*       | _ -> None *)

  (*     let make (e : expr) : expr = *)
  (*       let boxed_ty = Ty.make e.typ in *)
  (*       let f_ty = TArrow ([ e.typ ], boxed_ty) in *)
  (*       let f = { e with typ = f_ty; e = GlobalVar (`Primitive Box) } in *)
  (*       { e with typ = boxed_ty; e = App { f; args = [ e ] } } *)
  (*   end *)
  (* end *)

  let rec collect_let_bindings' (e : expr) : (pat * expr * ty) list * expr =
    match e.e with
    | Let { monadic = _; lhs; rhs; body } ->
        let bindings, body = collect_let_bindings' body in
        ((lhs, rhs, e.typ) :: bindings, body)
    | _ -> ([], e)

  let collect_let_bindings (e : expr) : (pat * expr) list * expr =
    let bindings, body = collect_let_bindings' e in
    let types = List.map ~f:thd3 bindings in
    assert (
      match (List.drop_last types, types) with
      | Some init, _ :: tl ->
          List.zip_exn init tl |> List.for_all ~f:(uncurry [%eq: ty])
      | _ -> true);
    (* TODO: injecting the type of the lets in the body is bad.
       We should stay closer to Rust's inference.
       Here, we lose a bit of information.
    *)
    let body =
      { body with typ = List.hd types |> Option.value ~default:body.typ }
    in
    (List.map ~f:(fun (p, e, _) -> (p, e)) bindings, body)

  let rec map_body_of_nested_lets (f : expr -> expr) (e : expr) : expr =
    match e.e with
    | Let { monadic; lhs; rhs; body } ->
        {
          e with
          e = Let { monadic; lhs; rhs; body = map_body_of_nested_lets f body };
        }
    | _ -> f e

  let tuple_projector span (tuple_typ : ty) (len : int) (nth : int)
      (type_at_nth : ty) : expr =
    {
      span;
      (* TODO: require a span here *)
      typ = TArrow ([ tuple_typ ], type_at_nth);
      e = GlobalVar (`Projector (`TupleField (nth, len)));
    }

  let project_tuple (tuple : expr) (len : int) (nth : int) (type_at_nth : ty) :
      expr =
    {
      span = tuple.span;
      typ = type_at_nth;
      e =
        App
          {
            f = tuple_projector tuple.span tuple.typ len nth type_at_nth;
            args = [ tuple ];
            generic_args = [] (* TODO: see issue #328 *);
            bounds_impls = [];
            trait = None (* TODO: see issue #328 *);
          };
    }

  (** Concatenates the generics [g1] and [g2], making sure lifetimes appear
      first *)
  let concat_generics (g1 : generics) (g2 : generics) : generics =
    let params = g1.params @ g2.params in
    let constraints = g1.constraints @ g2.constraints in
    let lifetimes, others =
      List.partition_tf ~f:(fun p -> [%matches? GPLifetime _] p.kind) params
    in
    let params = lifetimes @ others in
    { params; constraints }

  module Place = struct
    type t = { place : place'; span : span; typ : ty }

    and place' =
      | LocalVar of Local_ident.t
      | Deref of expr
      | VecRef of t
      | IndexProjection of { place : t; index : expr }
      | FieldProjection of { place : t; projector : global_ident }
    [@@deriving show]

    let deref_mut_allowed (t : ty) : bool =
      match t with
      | TApp { ident; _ } -> Global_ident.eq_name Alloc__vec__Vec ident
      | _ -> false

    let rec of_expr (e : expr) : t option =
      let wrap place = Some { place; span = e.span; typ = e.typ } in
      match e.e with
      | App { f = { e = GlobalVar (`Primitive Deref); _ }; args = [ e ]; _ }
        -> (
          match of_expr e with
          | Some { place = IndexProjection _; _ } as value -> value
          | _ -> wrap @@ Deref e)
      | LocalVar i -> wrap @@ LocalVar i
      | App
          {
            f = { e = GlobalVar (`Projector _ as projector); _ };
            args = [ place ];
            generic_args = _;
            bounds_impls = _;
            trait = _;
          (* TODO: see issue #328 *)
          } ->
          let* place = of_expr place in
          wrap @@ FieldProjection { place; projector }
      | App
          {
            f = { e = GlobalVar f; _ };
            args = [ place; index ];
            generic_args = _;
            bounds_impls = _;
            trait = _;
          (* TODO: see issue #328 *)
          }
        when Global_ident.eq_name Core__ops__index__Index__index f ->
          let* place = of_expr place in
          let place = IndexProjection { place; index } in
          Some { place; span = e.span; typ = e.typ }
      | App
          {
            f = { e = GlobalVar f; _ };
            args = [ place; index ];
            generic_args = _;
            bounds_impls = _;
            trait = _;
          (* TODO: see issue #328 *)
          }
        when Global_ident.eq_name Core__ops__index__IndexMut__index_mut f ->
          (* Note that here, we allow any type to be `index_mut`ed:
             Hax translates that to `Rust_primitives.Hax.update_at`.
             This will typecheck IFF there is an implementation.
          *)
          let* typ = Expect.mut_ref e.typ in
          let* place = Expect.mut_borrow place in
          let* place = of_expr place in
          let place = IndexProjection { place; index } in
          Some { place; span = e.span; typ }
      | _ -> None

    let rec to_expr (p : t) : expr =
      match p.place with
      | LocalVar v ->
          let e : expr' = LocalVar v in
          { e; typ = p.typ; span = p.span }
      | VecRef inner ->
          let e = to_expr inner in
          call Alloc__vec__Impl_1__as_slice [ e ] p.span p.typ
      | Deref e -> call' (`Primitive Deref) [ e ] p.span p.typ
      | FieldProjection { place; projector } ->
          let e = to_expr place in
          call' projector [ e ] p.span p.typ
      | IndexProjection { place; index } ->
          let e = to_expr place in
          call Core__ops__index__Index__index [ e; index ] p.span p.typ

    let expect_deref_mut (p : t) : t option =
      match p.place with
      | Deref e ->
          let visible_ty = e.typ in
          let* e = Expect.deref_mut_app e in
          let* e = Expect.mut_borrow e in
          let res = of_expr e in
          let f : t -> t =
           fun p ->
            match (unref_ty visible_ty, unref_ty p.typ) with
            | (TSlice _ as typ), TApp { ident; _ }
              when Global_ident.eq_name Alloc__vec__Vec ident ->
                { p with place = VecRef p; typ }
            | _ -> p
          in
          Option.map res ~f
      | _ -> None

    let expect_allowed_deref_mut (p : t) : t option =
      let* p = expect_deref_mut p in
      if deref_mut_allowed p.typ then Some p else None

    let skip_allowed_deref_mut (p : t) : t =
      Option.value ~default:p (expect_deref_mut p)
  end

  let group_items_by_namespace (items : item list) :
      item list Concrete_ident.View.ModPath.Map.t =
    let h = Hashtbl.create (module Concrete_ident.View.ModPath) in
    List.iter items ~f:(fun item ->
        let ns = (Concrete_ident.to_view item.ident).mod_path in
        let items = Hashtbl.find_or_add h ns ~default:(fun _ -> ref []) in
        items := !items @ [ item ]);
    Map.of_iteri_exn
      (module Concrete_ident.View.ModPath)
      ~iteri:(Hashtbl.map h ~f:( ! ) |> Hashtbl.iteri)
end


================================================
FILE: engine/lib/attr_payloads.ml
================================================
open! Prelude
open Ast

let payload (attr : attr) : (Types.ha_payload * span) option =
  match attr.kind with
  | Tool { path; tokens } when [%eq: string] path "_hax::json" -> (
      match Yojson.Safe.from_string tokens with
      | `String s -> (
          match
            Yojson.Safe.from_string s |> Types.safe_ha_payload_of_yojson
          with
          | Error _ ->
              Stdlib.prerr_endline
                [%string
                  {|
The hax engine could not parse a hax attribute.
This means that the crate being extracted and the version of hax engine are incompatible.
Please make sure the `hax-lib` dependency of the extracted crate matches hax-engine's version (%{Types.hax_version}).
|}];
              Stdlib.exit 1
          | Ok value -> Some (value, attr.span))
      | x ->
          Stdlib.failwith
          @@ "Attr_payloads: payloads: expected a string while parsing JSON, \
              got "
          ^ Yojson.Safe.pretty_to_string x
          ^ "instead")
  | _ -> None

(** Parse [_hax::json] attributes *)
let payloads : attrs -> (Types.ha_payload * span) list =
  List.filter_map ~f:payload

(** Create a attribute out of a [payload] *)
let to_attr (payload : Types.ha_payload) (span : span) : attr =
  let json =
    `String (Yojson.Safe.to_string ([%yojson_of: Types.ha_payload] payload))
  in
  let kind : attr_kind =
    Tool { path = "_hax::json"; tokens = Yojson.Safe.to_string json }
  in
  { kind; span }

module UId = struct
  module T = struct
    type t = UId of string [@@deriving show, yojson, compare, sexp, eq]
  end

  module M = struct
    include Base.Comparator.Make (T)
    include T
  end

  include M
  module Map = Map.M (M)

  let of_raw (uid : Types.ha_uid) : t = UId uid.uid
end

module AssocRole = struct
  module T = struct
    type t =
      | Requires
      | Ensures
      | Decreases
      | SMTPat
      | Refine
      | ProcessRead
      | ProcessWrite
      | ProcessInit
      | ProtocolMessages
      | ItemQuote
    [@@deriving show, yojson, compare, sexp, eq]
  end

  module M = struct
    include Base.Comparator.Make (T)
    include T
  end

  include M
  module Map = Map.M (M)

  let of_raw : Types.ha_assoc_role -> t = function
    | Requires -> Requires
    | Ensures -> Ensures
    | Decreases -> Decreases
    | SMTPat -> SMTPat
    | Refine -> Refine
    | ItemQuote -> ItemQuote
    | ProcessRead -> ProcessRead
    | ProcessWrite -> ProcessWrite
    | ProcessInit -> ProcessInit
    | ProtocolMessages -> ProtocolMessages
end

module MakeBase (Error : Phase_utils.ERROR) = struct
  (* Given a predicate, finds an attribute that is not supposed to occur
     more than once. Returns `None` if no such attribute was found. *)
  let find_unique_attr (attrs : attrs) ~(f : Types.ha_payload -> 'a option) :
      'a option =
    match
      payloads attrs
      |> List.filter_map ~f:(fun (x, span) ->
             Option.map ~f:(fun x -> (x, span)) (f x))
    with
    | [ (attr, _) ] -> Some attr
    | [] -> None
    | (attr, _first) :: (_, _second) :: _ -> Some attr
  (* TODO: when parent attributes are handled correctly (see issue #288) revive the error below *)
  (* Error.assertion_failure (Span.union first second) *)
  (*   "This attribute is supposed to be unique" *)

  (* we should have multi span errors, basically make somethings really close to Rustc diagnostics! *)

  let status : attrs -> Types.ha_item_status =
    let f = function Types.ItemStatus is -> Some is | _ -> None in
    let default : Types.ha_item_status = Types.Included { late_skip = false } in
    find_unique_attr ~f >> Option.value ~default

  (** Extracts an `Order` attribute if it exists. *)
  let order : attrs -> int option =
    let f = function Types.Order n -> Some n | _ -> None in
    find_unique_attr ~f

  let late_skip : attrs -> bool =
    status >> [%matches? Types.Included { late_skip = true }]

  let is_erased : attrs -> bool =
    find_unique_attr
      ~f:([%eq: Types.ha_payload] Erased >> Fn.flip Option.some_if ())
    >> Option.is_some

  let uid : attrs -> UId.t option =
    let f = function Types.Uid uid -> Some (UId.of_raw uid) | _ -> None in
    find_unique_attr ~f

  let lemma : attrs -> bool =
    payloads >> List.exists ~f:(fst >> [%matches? Types.Lemma])

  (* User code can be *decorated* (e.g. attributes `ensures` or
     `refine`). A decoration is attached to a user code via an
     `AssociatedItem` attribute, that specifies an unique identifier
     (uid) and a role (Ensure, Decreases, Refine...) *)
  let raw_associated_item : attrs -> (AssocRole.t * UId.t) list =
    payloads >> List.map ~f:fst
    >> List.filter_map ~f:(function
         | Types.AssociatedItem { role; item } ->
             Some (AssocRole.of_raw role, UId.of_raw item)
         | _ -> None)
end

module Make (F : Features.T) (Error : Phase_utils.ERROR) = struct
  module AST = Ast.Make (F)
  module U = Ast_utils.Make (F)
  open AST
  include MakeBase (Error)

  let attrs_field (i : item) = i.attrs

  (* TODO: Maybe rename me `graph` or something? *)
  module type WITH_ITEMS = sig
    val item_uid_map : item UId.Map.t
    val try_item_of_uid : UId.t -> item option
    val item_of_uid : UId.t -> item
    val associated_items_per_roles : attrs -> item list AssocRole.Map.t
    val associated_item : AssocRole.t -> attrs -> item option

    val associated_fn :
      AssocRole.t -> attrs -> (generics * param list * expr) option

    val associated_expr :
      ?keep_last_args:int -> AssocRole.t -> attrs -> expr option

    val associated_items : AssocRole.t -> attrs -> item list

    val associated_fns :
      AssocRole.t -> attrs -> (generics * param list * expr) list

    val associated_exprs :
      ?keep_last_args:int -> AssocRole.t -> attrs -> expr list

    val expect_fn : item -> generics * param list * expr

    val expect_expr :
      ?keep_last_args:int -> generics * param list * expr -> expr

    val associated_refinement_in_type :
      span -> string list -> attrs -> expr option
    (** For type, there is a special treatment. The name of fields are global
        identifiers, and thus are subject to rewriting by [Concrete_ident] at
        the moment of printing. In contrast, in the refinement `fn` item
        generated by the proc-macros, the arguments are local identifiers, and
        thus are rewrited in a different manner.

        Thus, [associated_refinement_in_type] takes a list of [free_variables]:
        those are already formatted strings as printed by the backend. Then, we
        rewrite identities in the refinement formula to match exactly this print
        policy, using *final* local identifiers (see `Local_ident.make_final`).
    *)

    include module type of MakeBase (Error)
  end

  module WithItems (I : sig
    val items : item list
  end) : WITH_ITEMS = struct
    include MakeBase (Error)

    let map_of_alist (type a b cmp) (m : (a, cmp) Comparator.Module.t)
        (l : (a * b) list) ~(dup : a -> b list -> (a, b, cmp) Map.t) :
        (a, b, cmp) Map.t =
      let (module M) = m in
      let equal x y = Int.equal (M.comparator.compare x y) 0 in
      match Map.of_alist m l with
      | `Ok map -> map
      | `Duplicate_key key ->
          List.filter ~f:(fst >> equal key) l |> List.map ~f:snd |> dup key

    (* Useful for looking up decorations *)
    let item_uid_map : item UId.Map.t =
      let f item = uid item.attrs |> Option.map ~f:(fun id -> (id, item)) in
      let l = List.filter_map ~f I.items in
      let dup uid items =
        let span = List.map ~f:(fun i -> i.span) items |> Span.union_list in
        Error.assertion_failure span
        @@ "Two or more items share the same UID "
        ^ [%show: UId.t] uid
      in
      map_of_alist (module UId) l ~dup

    let try_item_of_uid (uid : UId.t) : item option = Map.find item_uid_map uid

    let item_of_uid (uid : UId.t) : item =
      try_item_of_uid uid
      |> Option.value_or_thunk ~default:(fun () ->
             Error.assertion_failure (Span.dummy ())
             @@ "Could not find item with UID "
             ^ [%show: UId.t] uid)

    let associated_items_per_roles : attrs -> item list AssocRole.Map.t =
      raw_associated_item
      >> List.map ~f:(map_snd item_of_uid)
      >> Map.of_alist_multi (module AssocRole)

    let expect_singleton failure = function
      | [] -> None
      | [ v ] -> Some v
      | _ -> failure ()
    (* Error.assertion_failure span message *)

    let span_of_attrs =
      List.map ~f:(fun (i : attr) -> i.span) >> Span.union_list

    let find_or_empty role list = Map.find list role |> Option.value ~default:[]

    let associated_items (role : AssocRole.t) (attrs : attrs) : item list =
      associated_items_per_roles attrs |> find_or_empty role

    let associated_item (role : AssocRole.t) (attrs : attrs) : item option =
      associated_items role attrs
      |> expect_singleton (fun _ ->
             let span = span_of_attrs attrs in
             Error.assertion_failure span
             @@ "Found more than one "
             ^ [%show: AssocRole.t] role
             ^ " for this item. Only one is allowed.")

    let expect_fn = function
      | { v = Fn { generics; params; body; _ }; _ } -> (generics, params, body)
      | { span; _ } ->
          Error.assertion_failure span
            "this associated item was expected to be a `fn` item"

    let expect_expr ?(keep_last_args = 0) (_generics, params, body) =
      let n =
        if keep_last_args < 0 then 0 else List.length params - keep_last_args
      in
      let params = List.drop params n |> List.map ~f:(fun p -> p.pat) in
      match params with
      | [] -> body
      | _ -> { body with e = Closure { params; body; captures = [] } }

    let associated_fn (role : AssocRole.t) :
        attrs -> (generics * param list * expr) option =
      associated_item role >> Option.map ~f:expect_fn

    let associated_fns (role : AssocRole.t) :
        attrs -> (generics * param list * expr) list =
      associated_items role >> List.map ~f:expect_fn

    (** Looks up an associated expression, optionally keeping `keep_last_args`
        last arguments. If keep_last_args is negative, then all arguments are
        kept. *)
    let associated_expr ?(keep_last_args = 0) (role : AssocRole.t) :
        attrs -> expr option =
      associated_fn role >> Option.map ~f:(expect_expr ~keep_last_args)

    let associated_exprs ?(keep_last_args = 0) (role : AssocRole.t) :
        attrs -> expr list =
      associated_fns role >> List.map ~f:(expect_expr ~keep_last_args)

    let associated_refinement_in_type span (free_variables : string list) :
        attrs -> expr option =
      associated_fn Refine
      >> Option.map ~f:(fun (_, params, body) ->
             let substs =
               let x =
                 List.concat_map ~f:U.Reducers.variables_of_param params
               in
               let y = List.map ~f:Local_ident.make_final free_variables in
               List.zip_opt x y
               |> Option.value_or_thunk ~default:(fun _ ->
                      let details =
                        "associated_refinement_in_type: zip two lists of \
                         different lenghts\n" ^ "\n - params: "
                        ^ [%show: param list] params
                        ^ "\n - free_variables: "
                        ^ [%show: string list] free_variables
                      in
                      Error.assertion_failure span details)
             in
             let v =
               U.Mappers.rename_local_idents (fun i ->
                   match List.find ~f:(fst >> [%eq: local_ident] i) substs with
                   | None -> i
                   | Some (_, i) -> i)
             in
             v#visit_expr () body)
  end

  let with_items (items : item list) : (module WITH_ITEMS) =
    (module WithItems (struct
      let items = items
    end))
end


================================================
FILE: engine/lib/backend.ml
================================================
open! Prelude
open Ast

module type BACKEND_OPTIONS = sig
  type t
end

module UnitBackendOptions = struct
  type t = unit
end

module type T = sig
  module InputLanguage : Features.T
  module AST : module type of Ast.Make (InputLanguage)

  module U : sig
    module Mappers : sig
      val rename_global_idents_item :
        (Ast_utils.visit_level -> global_ident -> global_ident) ->
        AST.item ->
        AST.item
    end
  end

  module Error : Phase_utils.ERROR
  module BackendOptions : BACKEND_OPTIONS
  module Attrs : module type of Attr_payloads.Make (InputLanguage) (Error)

  val apply_phases : BackendOptions.t -> Ast.Rust.item list -> AST.item list

  val translate :
    (module Attrs.WITH_ITEMS) ->
    BackendOptions.t ->
    bundles:AST.item list list ->
    AST.item list ->
    Types.file list

  val backend : Diagnostics.Backend.t
end

module type BackendMetadata = sig
  val backend : Diagnostics.Backend.t
end

module Make (InputLanguage : Features.T) (M : BackendMetadata) = struct
  module InputLanguage = InputLanguage
  module AST = Ast.Make (InputLanguage)
  module U = Ast_utils.Make (InputLanguage)
  include M

  module Error = struct
    type t = { kind : Diagnostics.kind; span : Ast.span } [@@deriving show, eq]

    let raise err =
      let context = Diagnostics.Context.Backend M.backend in
      let kind = err.kind in
      let span = Span.to_thir err.span in
      Diagnostics.SpanFreeError.raise ~span (Span.owner_hint err.span) context
        kind

    let unimplemented ?issue_id ?details span =
      raise
        {
          kind =
            Unimplemented
              { issue_id = Option.map ~f:MyInt64.of_int issue_id; details };
          span;
        }

    let assertion_failure span details =
      raise { kind = AssertionFailure { details }; span }
  end

  module Attrs = Attr_payloads.Make (InputLanguage) (Error)
  [@@ocaml.deprecated
    "Use more precise errors: Error.unimplemented, Error.assertion_failure or \
     a raw Error.t (with Error.raise)"]

  let failwith ?(span = Span.default) msg =
    Error.unimplemented
      ~details:
        ("[TODO: this error uses failwith, and thus leads to bad error \
          messages, please update it using [Diagnostics.*] helpers] " ^ msg)
      span
end


================================================
FILE: engine/lib/concrete_ident/concrete_ident.ml
================================================
open! Prelude
module View = Concrete_ident_view

module Fresh_module : sig
  (** This module provides a way of generating fresh modules paths. This can be
      used to reorganize locally definitions; the main motivation for this is
      recursive bundles, where we move definitions from multiple modules to one
      fresh module. This is fine because we re-expose all the original
      definitions. *)

  type t [@@deriving show, yojson, hash, compare, sexp, hash, eq]

  val fresh : label:string -> Explicit_def_id.t list -> t
  (** [fresh ~label hints] creates a fresh module out of the non-empty list of
      explicit definition identifiers hints [hints] and out of a label [label].

      The new module will have a unique path, close to [hints], and containing
      the label [label]. *)

  val register : fresh_module:t -> Explicit_def_id.t -> unit
  (** [register ~fresh_module id] declares that [id] belongs to [fresh_module].
  *)

  val get_path_hints : t -> Explicit_def_id.t list
  (** List path hints for a fresh module. *)

  val to_mod_path : t -> View.ModPath.t
  (** Compute a module path for a fresh module. *)

  val to_rust_ast : t -> Rust_engine_types.fresh_module
  val from_rust_ast : Rust_engine_types.fresh_module -> t
end = struct
  open View

  type t = { id : int; hints : Explicit_def_id.t list; label : string }
  [@@deriving show, yojson, hash, compare, sexp, hash, eq]

  let id_state = ref 0
  let map_state : _ Hashtbl.t = Hashtbl.create (module Int)

  let fresh ~label hints =
    id_state := !id_state + 1;
    assert (List.is_empty hints |> not);
    { id = !id_state; hints; label }

  let register ~(fresh_module : t) (did : Explicit_def_id.t) =
    let default = (Set.empty (module ModPath), None) in
    let f (set, opt) = (Set.add set (View.of_def_id did).mod_path, opt) in
    Hashtbl.update map_state fresh_module.id ~f:(Option.value ~default >> f)

  (** [compute_path_chunks fresh_module] returns
      [(mod_path, mod_name, suffixes)]. [suffixes] are optional suffixes to add
      to [mod_name] so that the resulting path is unique. *)
  let compute_path_chunks (m : t) =
    let mod_paths = List.map ~f:(fun d -> (of_def_id d).mod_path) m.hints in
    let base = List.longest_prefix ~eq:DisambiguatedString.equal mod_paths in
    assert (List.is_empty base |> not);
    let module_names =
      List.filter ~f:(List.length >> ( < ) (List.length base)) mod_paths
      |> List.filter_map ~f:List.last
      |> List.dedup_and_sort ~compare:[%compare: DisambiguatedString.t]
    in
    let hash =
      List.dedup_and_sort ~compare:[%compare: Explicit_def_id.t] m.hints
      |> [%hash: Explicit_def_id.t list] |> Int.to_string
      |> DisambiguatedString.pure
    in
    let label = DisambiguatedString.pure m.label in
    (base, label, module_names @ [ hash ])

  let all_paths () =
    let rust_ones =
      Explicit_def_id.State.list_all ()
      |> List.map ~f:(fun x -> (of_def_id x).mod_path)
    in
    let fresh_ones : ModPath.t list =
      Hashtbl.data map_state |> List.filter_map ~f:snd
    in
    rust_ones @ fresh_ones

  let compute_path (m : t) =
    let mod_path, mod_name, suffixes = compute_path_chunks m in
    let existing_names =
      all_paths ()
      |> List.filter_map ~f:last_init
      |> List.filter ~f:(fst >> [%eq: ModPath.t] mod_path)
      |> List.map ~f:snd
      |> List.map ~f:(fun m -> m.DisambiguatedString.data)
      |> Set.of_list (module String)
    in
    let mod_name =
      List.mapi ~f:(fun n _ -> mod_name :: List.take suffixes n) suffixes
      |> List.map ~f:(List.map ~f:(fun m -> m.DisambiguatedString.data))
      |> List.map ~f:(String.concat ~sep:"_")
      |> List.find ~f:(Set.mem existing_names >> not)
      |> Option.value_exn
           ~message:
             "Broken invariant: in fresh modules the suffix is supposed to be \
              crafted so that it is unique."
      |> DisambiguatedString.pure
    in
    mod_path @ [ mod_name ]

  let to_mod_path m =
    Hashtbl.update_and_return map_state m.id
      ~f:
        ( Option.value ~default:(Set.empty (module ModPath), None)
        >> fun (paths, alloc) ->
          ( paths,
            alloc
            |> Option.value_or_thunk ~default:(fun () -> compute_path m)
            |> Option.some ) )
    |> snd |> Option.value_exn

  let get_path_hints { hints; _ } = hints

  let to_rust_ast ({ id; hints; label } : t) : Rust_engine_types.fresh_module =
    {
      id = Int.to_string id;
      hints = List.map ~f:Explicit_def_id.to_rust_ast hints;
      label;
    }

  let from_rust_ast ({ id; hints; label } : Rust_engine_types.fresh_module) : t
      =
    {
      id = Int.of_string id;
      hints = List.map ~f:Explicit_def_id.from_rust_ast hints;
      label;
    }
end

type reserved_suffix = [ `Cast | `Pre | `Post ]
[@@deriving show, yojson, hash, compare, sexp, hash, eq]
(** A concrete identifier can have a reserved suffix: this is useful to derive
    new identifiers from existing identifiers. *)

module T = struct
  type t = {
    def_id : Explicit_def_id.t;
    moved : Fresh_module.t option;
    suffix : reserved_suffix option;
  }
  [@@deriving show, yojson, hash, compare, sexp, hash, eq]
end

include T
include Comparator.Make (T)

let to_debug_string = T.show

let fresh_module ~label =
  List.concat_map ~f:(fun { def_id; moved; _ } ->
      def_id
      :: (Option.to_list moved |> List.concat_map ~f:Fresh_module.get_path_hints))
  >> Fresh_module.fresh ~label

module Cache = struct
  let state = Hash_set.create (module T)
  let cached = Fn.id &&& Hash_set.add state >> fst
end

let make (def_id : Explicit_def_id.t) (moved : Fresh_module.t option)
    (suffix : reserved_suffix option) =
  { def_id; moved; suffix }

let of_def_id ?(suffix : reserved_suffix option = None) ~(value : bool)
    (def_id : Types.def_id) =
  let constructor =
    (* A DefId is a constructor when it's a value and points to a variant, a union or a struct. *)
    value
    && [%matches? (Variant | Union | Struct : Types.def_kind)]
         def_id.contents.value.kind
  in
  make (Explicit_def_id.of_def_id_exn ~constructor def_id) None suffix
  |> Cache.cached

let move_to_fresh_module (fresh_module : Fresh_module.t) (i : t) =
  Fresh_module.register ~fresh_module i.def_id;
  Cache.cached { i with moved = Some fresh_module }

let with_suffix (suffix : reserved_suffix) (i : t) : t =
  { i with suffix = Some suffix }

module type VIEW_RENDERER = sig
  val render_module : View.DisambiguatedString.t -> string

  val render_name :
    namespace:View.ModPath.t -> View.RelPath.Chunk.t list -> string

  val finalize : Concrete_ident_render_sig.rendered -> string
end

let to_view (ident : t) : Concrete_ident_view.t =
  let Concrete_ident_view.{ mod_path; rel_path } =
    View.of_def_id ident.def_id
  in
  let mod_path =
    Option.map ~f:Fresh_module.to_mod_path ident.moved
    |> Option.value ~default:mod_path
  in
  { mod_path; rel_path }

(** Stateful store that maps [def_id]s to implementation information (which
    trait is implemented? for which type? under which constraints?) *)
module ImplInfoStore = struct
  include Explicit_def_id.ImplInfoStore

  let lookup_raw (impl : t) : Types.impl_infos option = lookup_raw impl.def_id
end

module MakeToString (R : VIEW_RENDERER) = struct
  open Concrete_ident_render_sig

  (** For each module namespace, we store two different pieces of data:
      - a set of rendered names in this namespace
      - a memoization map from full identifiers to rendered names

      If an identifier was already rendered, we just use this already rendered
      name.

      Otherwise, when we print a name under a fresh module, we take a look at
      the set: if there is already an identifier in the fresh module with the
      exact same rendered name, then we have a collision, and we need to
      generate a fresh name.

      To generate a fresh name, we use the set of rendered names. *)
  let per_module :
      (string list, string Hash_set.t * (t, string) Hashtbl.t) Hashtbl.t =
    Hashtbl.create
      (module struct
        type t = string list [@@deriving hash, compare, sexp, eq]
      end)

  let render (i : t) : rendered =
    let Concrete_ident_view.{ mod_path; rel_path } = to_view i in
    let mod_path =
      Concrete_ident_view.ModPath.rename_crate "core" "core_models" mod_path
    in

    let path = List.map ~f:R.render_module mod_path in
    (* Retrieve the various maps. *)
    let name_set, memo =
      Hashtbl.find_or_add per_module
        ~default:(fun _ ->
          (Hash_set.create (module String), Hashtbl.create (module T)))
        path
    in
    (* If we rendered [i] already in the past, just use that. *)
    let name =
      match Hashtbl.find memo i with
      | Some name -> name
      | None ->
          let name = R.render_name ~namespace:mod_path rel_path in
          let name =
            match i.suffix with
            | Some suffix -> (
                name ^ "_"
                ^
                match suffix with
                | `Pre -> "pre"
                | `Post -> "post"
                | `Cast -> "cast_to_repr")
            | _ -> name
          in
          let moved_into_fresh_ns = Option.is_some i.moved in
          let name =
            if moved_into_fresh_ns then
              let escape_sep =
                let re = Re.Pcre.regexp "__(e*)from__" in
                let f group = "__e" ^ Re.Group.get group 1 ^ "from__" in
                Re.replace ~all:true re ~f
              in
              escape_sep name
            else name
          in
          let is_assoc_or_field (rel_path : View.RelPath.t) : bool =
            match List.last rel_path with
            | Some (`AssociatedItem (_, (`Trait _ | `Impl (_, `Trait, _))))
            | Some (`Field _) ->
                true
            | _ -> false
          in
          let name =
            if
              Hash_set.mem name_set name && moved_into_fresh_ns
              && (not << is_assoc_or_field) rel_path
              (* If this rel_path already exists in a fresh namespace,
                 then we have a duplicate and we should disambiguate.
                 Unless for associated items which correspond to trait
                 methods which may be repeated (with their implementations),
                 and for fields (which are repeated by accessors). *)
            then
              let path : View.ModPath.t = (View.of_def_id i.def_id).mod_path in
              let path = List.map ~f:R.render_module path in
              (* Generates the list of all prefixes of reversed `path` *)
              List.folding_map ~init:[] (List.rev path) ~f:(fun acc chunk ->
                  let acc = chunk :: acc in
                  (acc, acc))
              (* We want to try small prefixes first *)
              |> List.map ~f:List.rev
              (* We generate a fake path with module ancestors *)
              |> List.map ~f:(fun path ->
                     name ^ "__from__"
                     ^ String.concat ~sep:"__"
                         path (* This might shadow, we should escape *))
                 (* Find the shortest name that doesn't exist already *)
              |> List.find ~f:(Hash_set.mem name_set >> not)
              |> Option.value ~default:(name ^ ([%hash: t] i |> Int.to_string))
            else name
          in
          (* Update the maps and hashtables *)
          let _ = Hash_set.add name_set name in
          let _ = Hashtbl.add memo ~key:i ~data:name in
          name
    in
    { path; name }

  let show (i : t) : string =
    let { path; name } = render i in
    R.finalize { path; name }
end

module RenderSig = Concrete_ident_render_sig.Make (T)
include RenderSig

module type NAME_POLICY = Concrete_ident_render_sig.NAME_POLICY

module MakeRenderAPI (NP : NAME_POLICY) : RENDER_API = struct
  open Concrete_ident_render_sig

  let is_reserved_word : string -> bool = Hash_set.mem NP.reserved_words

  module R : VIEW_RENDERER = struct
    let disambiguator_escape s =
      match split_str ~on:"_" s |> List.rev with
      | hd :: _ :: _ when Int.of_string_opt hd |> Option.is_some -> s ^ "_"
      | _ -> s

    let render_disambiguated View.DisambiguatedString.{ disambiguator; data } =
      if Int64.equal Int64.zero disambiguator then disambiguator_escape data
      else data ^ "_" ^ Int64.to_string disambiguator

    let render_module = render_disambiguated

    module NameAst = struct
      module Separator = struct
        let separator = "__"
        let concat x y = x ^ separator ^ y

        let escape =
          let re = Re.Pcre.regexp "_(e*)_" in
          let f group = "_e" ^ Re.Group.get group 1 ^ "_" in
          Re.replace ~all:true re ~f
      end

      module Prefixes : sig
        type t = private string [@@deriving eq, show]

        val allowed : t list
        (** List of allowed reserved prefixes. *)

        val mk : string -> t
        (** Creates a prefix, if it is valid. *)

        val escape : string -> string
        (** Escapes reserved prefixes in a string *)
      end = struct
        type t = string [@@deriving eq, show]

        let allowed =
          [
            "impl";
            "anon_const";
            "inline_const";
            "foreign";
            "use";
            "opaque";
            "closure";
            "t";
            "v";
            "f";
            "i";
            "discriminant";
          ]
          @ (List.filter_map ~f:Fn.id
               [
                 NP.struct_constructor_prefix;
                 NP.enum_constructor_prefix;
                 NP.union_constructor_prefix;
               ]
            |> List.dedup_and_sort ~compare:String.compare)

        let mem = List.mem ~equal:[%eq: string] allowed

        let mk s =
          if mem s then s
          else
            failwith ("broken invariant: [" ^ s ^ "] is not an allowed prefix")

        let escape_char = "e"

        let () =
          assert (
            (* Make sure there is no prefix `Cs` such that `C ^ "s"` is a prefix as well. *)
            List.for_all allowed ~f:(fun s -> not (mem (first_letter s ^ s))))

        let () = assert (mem "e" |> not)

        let rec escape (s : string) : string =
          match String.lsplit2 ~on:'_' s with
          | Some ("", rest) -> "e_" ^ escape rest
          | Some (prefix, rest)
            when List.mem ~equal:[%equal: string] allowed prefix ->
              first_letter prefix ^ prefix ^ "_" ^ escape rest
          | _ -> s
      end

      type policy = {
        prefix : Prefixes.t;
        disable_when : [ `SameCase ] list;
        mode : [ `Global | `Local | `Both ];
      }
      [@@deriving eq, show]

      type t =
        | Concat of (t * t)  (** Concatenate two names *)
        | Policy of (policy * t)
        | TrustedString of string  (** A string that is already escaped *)
        | UnsafeString of string  (** A string that needs escaping *)
        | Empty
      [@@deriving eq, show]

      let rec global_policy ast : _ =
        let filter =
          Option.filter ~f:(fun p -> [%matches? `Global | `Both] p.mode)
        in
        let ( <|> ) v f = match v with Some v -> Some v | None -> f () in
        match ast with
        | Policy (policy, contents) ->
            global_policy contents |> filter <|> fun _ ->
            policy |> Option.some |> filter
        | Concat (l, r) ->
            global_policy r |> filter <|> fun _ -> global_policy l |> filter
        | _ -> None

      let escape_unsafe_string = Prefixes.escape >> Separator.escape

      let apply_policy (leftmost : bool) (policy : policy) (escaped : string) =
        let prefix = (policy.prefix :> string) in
        let disable =
          List.exists policy.disable_when ~f:(function `SameCase ->
              let first_upper = first_letter >> is_uppercase in
              Bool.equal (first_upper prefix) (first_upper escaped))
        in
        if (not disable) || (leftmost && is_reserved_word escaped) then
          prefix ^ "_" ^ escaped
        else escaped

      let rec norm' = function
        | Concat (Empty, x) | Concat (x, Empty) -> x
        | Policy (_, Empty) -> Empty
        | Policy (p, x) -> Policy (p, norm' x)
        | Concat (x, y) -> Concat (norm' x, norm' y)
        | x -> x

      let rec norm x =
        let x' = norm' x in
        if [%eq: t] x x' then x else norm x'

      let concat_list =
        List.fold ~f:(fun l r -> Concat (l, r)) ~init:Empty >> norm

      let rec render' leftmost ast =
        match ast with
        | Concat (a, b) ->
            Separator.concat (render' leftmost a) (render' false b)
        | Policy (policy, a) when [%matches? `Global] policy.mode ->
            render' leftmost a
        | Policy (policy, a) ->
            render' leftmost a |> apply_policy leftmost policy
        | TrustedString s -> s
        | UnsafeString s -> escape_unsafe_string s
        | Empty -> ""

      let render ast =
        let policy = global_policy ast in
        let policy =
          Option.map ~f:(apply_policy true) policy
          |> Option.value ~default:Fn.id
        in
        let rendered = norm ast |> render' true |> policy in
        if is_reserved_word rendered then rendered ^ "_escape_reserved_word"
        else rendered
    end

    (** [pretty_impl_name ~namespace impl_infos] computes a pretty impl name
        given impl information and a namespace. A pretty name can be computed
        when:
        - (1) the impl, (2) the type and (3) the trait implemented all live in
          the same namespace
        - the impl block has no generics
        - the type implemented is simple enough to be represented as a string
          (see module {!Thir_simple_types}) *)
    let pretty_impl_name ~namespace (impl_infos : Types.impl_infos) =
      let* ty = Thir_simple_types.to_string ~namespace impl_infos.typ in
      let*? _no_generics = List.is_empty impl_infos.generics.params in
      match impl_infos.trait_ref with
      | None -> Some ty
      | Some { value = { def_id = trait; generic_args = [ _self ]; _ }; _ } ->
          let* trait = Explicit_def_id.of_def_id trait in
          let trait = View.of_def_id trait in
          let*? _same_ns = [%eq: View.ModPath.t] namespace trait.mod_path in
          let* trait =
            match trait.rel_path with
            | [ `Trait (n, _) ] when Int64.equal Int64.zero n.disambiguator ->
                Some n.data
            | _ -> None
          in
          let trait =
            let re = Re.Pcre.regexp "_((?:e_)*)for_" in
            let f group = "_e_" ^ Re.Group.get group 1 ^ "for_" in
            Re.replace ~all:true re ~f trait
          in
          Some (trait ^ "_for_" ^ ty)
      | _ -> None

    (** Produces a name for an impl block, only if it is necessary (e.g. the
        disambiguator is non-null) *)
    let impl_name ~namespace ?(always = false) disambiguator
        (impl_infos : Types.impl_infos option) =
      let pretty = impl_infos |> Option.bind ~f:(pretty_impl_name ~namespace) in
      let*? _ = always || Int64.equal Int64.zero disambiguator |> not in
      match pretty with
      | Some pretty -> Some pretty
      | None ->
          if Int64.equal Int64.zero disambiguator then None
          else Some (Int64.to_string disambiguator)

    (** Renders one chunk *)
    let render_chunk ~namespace ~final (chunk : View.RelPath.Chunk.t) :
        NameAst.t =
      let prefix ?(global = false) ?(disable_when = []) s contents =
        NameAst.Policy
          ( {
              prefix = NameAst.Prefixes.mk s;
              mode = (if global then `Both else `Local);
              disable_when;
            },
            contents )
      in
      let prefix_d s d = prefix s (NameAst.UnsafeString (Int64.to_string d)) in
      let dstr s = NameAst.UnsafeString (render_disambiguated s) in
      let render_impl_name ?(always = false) disambiguator impl_infos =
        match impl_name ~namespace ~always disambiguator impl_infos with
        | Some name -> prefix "impl" (UnsafeString name)
        | None -> TrustedString "impl"
      in
      match chunk with
      | `AnonConst d ->
          prefix ~global:true ~disable_when:[ `SameCase ] "anon_const"
            (NameAst.UnsafeString (Int64.to_string d))
      | `InlineConst d ->
          prefix ~global:true ~disable_when:[ `SameCase ] "inline_const"
            (NameAst.UnsafeString (Int64.to_string d))
      | `Use d -> prefix_d "use" d
      | `Foreign d -> prefix_d "foreign" d
      | `GlobalAsm d -> prefix_d "global_asm" d
      | `Closure d -> prefix_d "closure" d
      | `Opaque d -> prefix_d "opaque" d
      (* The name of a trait impl *)
      | `Impl (d, _, impl_infos) -> render_impl_name d impl_infos
      (* Print the name of an associated item in a inherent impl *)
      | `AssociatedItem
          ((`Type n | `Const n | `Fn n), `Impl (d, `Inherent, impl_infos)) ->
          let impl = render_impl_name ~always:true d impl_infos in
          Concat (impl, dstr n)
      (* Print the name of an item defined inside an associated item of a trait impl *)
      (* `Impl of
         'disambiguator
         * [ `Inherent | `Trait ]
         * Types.impl_infos option*)
      | `AssociatedItem
          ((`Type n | `Const n | `Fn n), `Impl (d, `Trait, impl_infos))
        when not final ->
          Concat
            (prefix "f" (dstr n), render_impl_name ~always:true d impl_infos)
      (* Print the name of an associated item in a trait impl *)
      | `AssociatedItem
          ((`Type n | `Const n | `Fn n), `Impl (d, `Trait, impl_infos)) ->
          if NP.prefix_associated_item_with_trait_name then
            Concat
              (render_impl_name ~always:true d impl_infos, prefix "f" (dstr n))
          else prefix "f" (dstr n)
      | `AssociatedItem ((`Type n | `Const n | `Fn n), `Trait (trait_name, _))
        ->
          if NP.prefix_associated_item_with_trait_name then
            Concat (dstr trait_name, prefix "f" (dstr n))
          else prefix "f" (dstr n)
      (* The constructor of a struct *)
      | `Constructor (cons, parent) -> (
          let cons = render_disambiguated cons in
          let include_type, prefix_s, type_name =
            match parent with
            | `Struct n ->
                ( NP.prefix_struct_constructors_with_type,
                  NP.struct_constructor_prefix,
                  n )
            | `Enum n ->
                ( NP.prefix_enum_constructors_with_type,
                  NP.enum_constructor_prefix,
                  n )
            | `Union n ->
                ( NP.prefix_union_constructors_with_type,
                  NP.union_constructor_prefix,
                  n )
          in
          let cons =
            if include_type then render_disambiguated type_name ^ "_" ^ cons
            else cons
          in
          match prefix_s with
          | Some prefix_s ->
              prefix ~global:true ~disable_when:[ `SameCase ] prefix_s
                (UnsafeString cons)
          | _ -> UnsafeString cons)
      (* Anonymous fields *)
      | `Field ({ data; disambiguator }, _)
        when Option.is_some (Int.of_string_opt data)
             && Int64.equal disambiguator Int64.zero ->
          TrustedString (NP.anonymous_field_transform data)
      (* Named fields *)
      | `Field (n, `Constructor (cons, (`Struct typ | `Union typ | `Enum typ)))
        ->
          let n = render_disambiguated n in
          let n =
            match NP.named_field_prefix with
            | Some `ConstructorName -> render_disambiguated cons ^ "_" ^ n
            | Some `TypeName -> render_disambiguated typ ^ "_" ^ n
            | _ -> n
          in
          prefix "f" (UnsafeString n)
      (* Anything function-like *)
      | `Macro n | `Static n | `Fn n | `Const n ->
          prefix "v" ~disable_when:[ `SameCase ] (dstr n)
      (* Anything type-like *)
      | `ExternCrate n
      | `Trait (n, _)
      | `ForeignTy n
      | `TraitAlias n
      | `Mod n
      | `Struct n
      | `Union n
      | `TyAlias n
      | `Enum n ->
          prefix "t" (dstr n)

    let render_name ~namespace (rel_path : View.RelPath.t) =
      let l = List.length rel_path in
      let rel_path =
        List.mapi
          ~f:(fun i -> render_chunk ~final:(i = l - 1) ~namespace)
          rel_path
        |> NameAst.concat_list
      in
      NameAst.render rel_path

    let finalize { path; name } =
      let path = List.map ~f:(map_first_letter String.uppercase) path in
      String.concat ~sep:"."
        (path @ if String.is_empty name then [] else [ name ])
  end

  include MakeToString (R)

  let pp fmt = T.show >> Stdlib.Format.pp_print_string fmt

  let show id =
    let { path; name } = render id in
    (path @ if String.is_empty name then [] else [ name ])
    |> String.concat ~sep:"::"

  let local_ident (li : Local_ident.t) : string =
    if Local_ident.is_final li then li.name
    else
      R.render_name ~namespace:[]
        [
          `Fn
            View.DisambiguatedString.
              { disambiguator = Int64.zero; data = li.name };
        ]
end

type name = Concrete_ident_generated.t
[@@deriving show, yojson, compare, sexp, eq, hash]

let of_name ~value = Concrete_ident_generated.def_id_of >> of_def_id ~value

let eq_name name id =
  let of_name = Concrete_ident_generated.def_id_of name in
  let a = of_name.contents.value in
  let b = Explicit_def_id.to_def_id id.def_id in
  String.equal a.krate b.krate
  && [%eq: Types.disambiguated_def_path_item list] a.path b.path

module DefaultNamePolicy : NAME_POLICY = struct
  let reserved_words = Hash_set.create (module String)
  let anonymous_field_transform = Fn.id
  let prefix__constructors_with_type = false
  let prefix_struct_constructors_with_type = false
  let prefix_enum_constructors_with_type = true
  let prefix_union_constructors_with_type = false
  let struct_constructor_prefix = Some "C"
  let enum_constructor_prefix = Some "C"
  let union_constructor_prefix = Some "C"
  let named_field_prefix = None
  let prefix_associated_item_with_trait_name = false
end

module DefaultViewAPI = MakeRenderAPI (DefaultNamePolicy)

let map_path_strings ~(f : string -> string) (did : t) : t =
  let constructor = did.def_id |> Explicit_def_id.is_constructor in
  let did : Types.def_id_contents = did.def_id |> Explicit_def_id.to_def_id in
  let path =
    did.path
    |> List.map ~f:(fun (chunk : Types.disambiguated_def_path_item) ->
           let data =
             match chunk.data with
             | TypeNs s -> Types.TypeNs (f s)
             | ValueNs s -> ValueNs (f s)
             | MacroNs s -> MacroNs (f s)
             | LifetimeNs s -> LifetimeNs (f s)
             | data -> data
           in
           { chunk with data })
  in
  let did = { did with path } in
  let def_id =
    Explicit_def_id.of_def_id_exn ~constructor
      { contents = { value = did; id = Base.Int64.zero } }
  in
  { def_id; moved = None; suffix = None }

let is_constructor (did : t) : bool = Explicit_def_id.is_constructor did.def_id

let is_anon_assoc_ty (did : t) : bool =
  Explicit_def_id.is_anon_assoc_ty did.def_id

let matches_namespace (ns : Types.namespace) (did : t) : bool =
  let did = Explicit_def_id.to_def_id did.def_id in
  let path : string option list =
    [ Some did.krate ]
    @ List.map
        ~f:(fun (chunk : Types.disambiguated_def_path_item) ->
          match chunk.data with
          | TypeNs s | ValueNs s | MacroNs s | LifetimeNs s -> Some s
          | _ -> None)
        did.path
  in
  let rec aux (pattern : Types.namespace_chunk list) (path : string option list)
      =
    match (pattern, path) with
    | [], [] -> true
    | Exact x :: pattern, Some y :: path ->
        [%equal: string] x y && aux pattern path
    | Glob One :: pattern, _ :: path -> aux pattern path
    | Glob Many :: pattern, [] -> aux pattern []
    | Glob Many :: pattern', _ :: path' ->
        aux pattern' path || aux pattern path'
    | _ -> false
  in
  aux ns.chunks path

let to_rust_ast ({ def_id; moved; suffix } : t) : Rust_engine_types.concrete_id
    =
  let moved = Option.map ~f:Fresh_module.to_rust_ast moved in
  let suffix =
    Option.map
      ~f:(fun s ->
        match s with
        | `Cast -> Rust_engine_types.Cast
        | `Pre -> Rust_engine_types.Pre
        | `Post -> Rust_engine_types.Post)
      suffix
  in
  { def_id = Explicit_def_id.to_rust_ast def_id; moved; suffix }

let from_rust_ast ({ def_id; moved; suffix } : Rust_engine_types.concrete_id) :
    t =
  let moved = Option.map ~f:Fresh_module.from_rust_ast moved in
  let suffix =
    Option.map
      ~f:(fun s -> match s with Cast -> `Cast | Pre -> `Pre | Post -> `Post)
      suffix
  in
  { def_id = Explicit_def_id.from_rust_ast def_id; moved; suffix }


================================================
FILE: engine/lib/concrete_ident/concrete_ident.mli
================================================
(** This module provides the global concrete identifiers. *)

module Fresh_module : sig
  type t [@@deriving show, yojson, hash, compare, sexp, hash, eq]
  (** A type representing a fresh module. Below, we define two functions:
      - [fresh] creates a new fresh module
      - [move_to_fresh_module] creates a new and always fresh identifier by
        "moving" an existing identifier under the given fresh module *)
end

module View : module type of Concrete_ident_view

module T : sig
  type t [@@deriving show, yojson, compare, sexp, eq, hash]
  (** A concrete identifier. *)
end

include module type of T with type t = T.t

type reserved_suffix = [ `Cast | `Pre | `Post ]
[@@deriving show, yojson, hash, compare, sexp, hash, eq]
(** A concrete identifier can have a reserved suffix: this is useful to derive
    new identifiers from existing identifiers. *)

val of_def_id :
  ?suffix:reserved_suffix option -> value:bool -> Types.def_id -> t
(** [of_def_id ?suffix ~value def_id] a concrete identifier out of a Rust
    identifier [def_id]. [value] is a flag that decides whether [def_id] refers
    to a value or not.

    [value] is important only for constructors: i.e. the identifier for the type
    of a struct should be created with [value] set to false while the identifier
    for the constructor of a struct should be create with [value] set to true.
    For more information, please read the documentation of module
    {!Explicit_def_id}. *)

type name = Concrete_ident_generated.t
[@@deriving show, yojson, compare, sexp, eq, hash]
(** A enumeration of static concrete identifiers useful inside the engine. *)

val of_name : value:bool -> name -> t
(** Creates an identifier given a name. [value] has the same meaning as in
    function {!of_def_id}. *)

val eq_name : name -> t -> bool
(** [eq_name name identifier] is true whenever [identifier] is [name]. *)

val to_debug_string : t -> string
(** Format an identifier as a (ppx) debug string. The default debug pretty
    prints the identifier. *)

val fresh_module : label:string -> t list -> Fresh_module.t
(** [fresh_module ~label hints] creates a fresh module given a non-empty list of
    existing identifiers and a label. The generated module name will be unique,
    will be close to the identifiers found in [hints], and will include the
    label. *)

val move_to_fresh_module : Fresh_module.t -> t -> t
(** Creates a fresh identifier under a given fresh module and given an existing
    identifier. *)

val with_suffix : reserved_suffix -> t -> t
(** Creates an identifier out of an existing one, adding a suffix. *)

val to_view : t -> Concrete_ident_view.t
(** Compute a view for a given identifier. *)

val map_path_strings : f:(string -> string) -> t -> t
[@@alert unsafe "This function should be only used in Import_thir!"]
(** This function maps any string found in the inner representation of hax. This
    is a hack for Import_thir so that we can generically produce identifiers for
    any integer type, please do not use it elsewhere. *)

val is_constructor : t -> bool
(** Returns true if the ident represents a constructor. *)

val is_anon_assoc_ty : t -> bool
(** Returns true if the ident represents an anonymous associated type. *)

type comparator_witness

val comparator : (t, comparator_witness) Base.Comparator.comparator

module RenderSig : module type of Concrete_ident_render_sig.Make (T)

module type RENDER_API = RenderSig.RENDER_API
module type NAME_POLICY = Concrete_ident_render_sig.NAME_POLICY

module DefaultNamePolicy : NAME_POLICY
module MakeRenderAPI (NP : NAME_POLICY) : RenderSig.RENDER_API
module DefaultViewAPI : RenderSig.RENDER_API

module ImplInfoStore : sig
  val init : (Types.def_id * Types.impl_infos) list -> unit

  val lookup_raw : t -> Types.impl_infos option
  (** Lookup the (raw[1]) implementation information given a concrete ident.
      Returns `Some _` if and only if the supplied identifier points to an
      `Impl`.

      [1]: those are raw THIR types.

      {b WARNING}: due to
      {{:https://github.com/hacspec/hax/issues/363} issue 363}, when looking up
      certain identifiers generated by the engine, this function may return
      [None] even though the supplied identifier points to an [Impl] block. *)
end

val matches_namespace : Types.namespace -> t -> bool
val to_rust_ast : t -> Rust_engine_types.concrete_id
val from_rust_ast : Rust_engine_types.concrete_id -> t


================================================
FILE: engine/lib/concrete_ident/concrete_ident_render_sig.ml
================================================
open! Prelude

type rendered = { path : string list; name : string }

module type NAME_POLICY = sig
  val reserved_words : string Hash_set.t
  (** List of all words that have a special meaning in the target language, and
      that should thus be escaped. *)

  val anonymous_field_transform : string -> string
  (** Transformation applied to anonymous tuple fields (i.e. [x.1]) *)

  val named_field_prefix : [ `ConstructorName | `TypeName ] option
  (** Should fields be prefixed? *)

  val prefix_struct_constructors_with_type : bool
  val prefix_enum_constructors_with_type : bool
  val prefix_union_constructors_with_type : bool
  val struct_constructor_prefix : string option
  val enum_constructor_prefix : string option
  val union_constructor_prefix : string option
  val prefix_associated_item_with_trait_name : bool
end

module Make (T : sig
  type t
end) =
struct
  open T

  module type RENDER_API = sig
    val show : t -> string
    val pp : Formatter.t -> t -> unit
    val render : t -> rendered
    val local_ident : Local_ident.t -> string
  end
end


================================================
FILE: engine/lib/concrete_ident/concrete_ident_types.ml
================================================
open Prelude

(** An [ExplicitDefId.t] is a Rust [Types.def_id] tagged with some diambiguation metadata.
    
    Rust raw [Types.def_id] can be ambiguous: consider the following Rust code:
    ```rust
    struct S;
    fn f() -> S { S }
    ```
    Here, the return type of `f` (that is, `S`) and the constructor `S` in the body of `f` refers to the exact same identifier `mycrate::S`.
    Yet, they denotes two very different objects: a type versus a constructor.

    [ExplicitDefId.t] clears up this ambiguity, making constructors and types two separate things.

    Also, an [ExplicitDefId.t] always points to an item: an [ExplicitDefId.t] is never pointing to a crate alone.
*)
module type ExplicitDefId = sig
  type t [@@deriving show, yojson, hash, compare, sexp, hash, eq]
  (** Representation of explicit definition identifiers. *)

  val of_def_id : ?constructor:bool -> Types.def_id -> t option
  (** Smart constructor for [t]. Creates an explicit def id out of a raw Rust
      definition identifier [Types.def_id].

      When [of_def_id] is called with [id] a [Types.def_id], if the [kind] of
      [id] is either [Struct] or [Union], then [constructor] is mandatory.
      Otherwise, the argument [constructor] should be [true] only if [id] is a
      variant.

      This function returns [Some] only when those condition are met. *)

  val make_exn : ?constructor:bool -> Types.def_id -> t
  (** Exception-throwing variant of [make]. This should be used when we know
      statically that the conditions described in the documentation of [make]
      are met.

      For instance, with static [Types.def_id]s or in [Import_thir]. *)

  val is_constructor : t -> bool
  (** Checks wether a definition identifier [id] points to a constructor.

      [is_constructor id] returns [true] when:
      - the kind of [id] is [Struct] or [Union] and the identifier was tagged as
        a constructor;
      - the kind of [id] is [Variant]. Otherwise, [is_constructor id] returns
        [false]. *)

  val parent : t -> t option
  (** Looks up the parent of a definition identifier. Note that the parent of
      the identifier of a field is always a constructor.

      Also, a top-level item (e.g. `my_crate::some_item`) has no parent: recall
      that [t] represent only items, not crates. *)

  val parents : t -> t list
  (** Ordered list of parents for an identifier [id], starting with [id], up to
      the top-most parent identifier. *)

  val to_def_id : t -> Types.def_id_contents
  (** Destructor for [t]. *)

  module State : sig
    val list_all : unit -> t list
    (** List all identifiers the engine dealt with so far. Beware, this function
        is stateful. *)
  end
end

module ViewTypes = struct
  type disambiguator = Int64.t
  [@@deriving show, hash, compare, sexp, hash, eq, map]

  module DisambiguatedString = struct
    type t = { disambiguator : disambiguator; data : string }
    [@@deriving show, hash, compare, sexp, hash, eq, map]
  end
end


================================================
FILE: engine/lib/concrete_ident/concrete_ident_view.ml
================================================
open! Prelude
include Concrete_ident_view_types

(** Rust paths come with invariants (e.g. a function is always a `ValueNs _`),
    this function raises an error if a path doesn't respect those. *)
let broken_invariant (type t) msg (did : Explicit_def_id.t) : t =
  let msg =
    "Explicit_def_id: an invariant has been broken. Expected " ^ msg
    ^ ".\n\ndid="
    ^ [%show: Explicit_def_id.t] did
  in
  Stdio.prerr_endline msg;
  failwith msg

(** Helper module to asserts various properties about a DefId. *)
module Assert = struct
  let parent did =
    Explicit_def_id.parent did
    |> Option.value_or_thunk ~default:(fun _ ->
           broken_invariant "the Explicit_def_id to have a parent" did)

  let type_ns (did : Explicit_def_id.t) =
    match List.last (Explicit_def_id.to_def_id did).path with
    | Some { data = TypeNs data; disambiguator } ->
        DisambiguatedString.{ data; disambiguator }
    | _ -> broken_invariant "last path chunk to exist and be of type TypeNs" did

  let macro_ns (did : Explicit_def_id.t) =
    match List.last (Explicit_def_id.to_def_id did).path with
    | Some { data = MacroNs data; disambiguator } ->
        DisambiguatedString.{ data; disambiguator }
    | _ ->
        broken_invariant "last path chunk to exist and be of type MacroNs" did

  let value_ns (did : Explicit_def_id.t) =
    match List.last (Explicit_def_id.to_def_id did).path with
    | Some { data = ValueNs data; disambiguator } ->
        DisambiguatedString.{ data; disambiguator }
    | _ ->
        broken_invariant "last path chunk to exist and be of type ValueNs" did
end

let rec poly :
    'n 'd.
    into_n:(Explicit_def_id.t -> DisambiguatedString.t -> 'n) ->
    into_d:(Explicit_def_id.t -> Int64.t -> 'd) ->
    Explicit_def_id.t ->
    ('n, 'd) RelPath.Chunk.poly =
 fun ~into_n ~into_d did ->
  let poly = poly ~into_n ~into_d in
  let mk_associated_item kind : ('n, 'd) RelPath.Chunk.poly =
    `AssociatedItem
      ( kind,
        match Assert.parent did |> poly with
        | (`Impl _ | `Trait _) as p -> p
        | _ -> broken_invariant "Impl or Trait" (Assert.parent did) )
  in
  let assert_type_ns did = Assert.type_ns did |> into_n did in
  let assert_value_ns did = Assert.value_ns did |> into_n did in
  let assert_macro_ns did = Assert.macro_ns did |> into_n did in
  let result =
    match (Explicit_def_id.to_def_id did).kind with
    | (Ctor (Struct, _) | Struct) when Explicit_def_id.is_constructor did ->
        let name = assert_type_ns did in
        `Constructor (name, `Struct name)
    | Variant | Ctor _ ->
        let parent = Assert.parent did in
        let name = assert_type_ns did in
        `Constructor
          ( name,
            match poly parent with
            | (`Enum _ | `Struct _ | `Union _) as p -> p
            | _ -> broken_invariant "Enum, Struct or Union" parent )
    | Fn -> `Fn (assert_value_ns did)
    | Const -> `Const (assert_value_ns did)
    | AssocFn -> `Fn (assert_value_ns did) |> mk_associated_item
    | AssocConst -> `Const (assert_value_ns did) |> mk_associated_item
    | AssocTy -> `Type (assert_type_ns did) |> mk_associated_item
    | TyAlias -> `TyAlias (assert_type_ns did)
    | Field ->
        let constructor =
          let parent = Assert.parent did in
          match parent |> poly with
          | `Constructor _ as p -> p
          | _ -> broken_invariant "Constructor" parent
        in
        `Field (assert_value_ns did, constructor)
    | Trait -> `Trait (assert_type_ns did, None)
    | TraitAlias -> `Trait (assert_type_ns did, Some `Alias)
    | Macro _ -> `Macro (assert_macro_ns did)
    | Union -> `Union (assert_type_ns did)
    | Enum -> `Enum (assert_type_ns did)
    | Struct -> `Struct (assert_type_ns did)
    | AnonConst ->
        `AnonConst
          (match List.last_exn (Explicit_def_id.to_def_id did).path with
          | { data = AnonConst; disambiguator } -> into_d did disambiguator
          | _ -> broken_invariant "last path chunk to be AnonConst" did)
    | Closure ->
        `AnonConst
          (match List.last_exn (Explicit_def_id.to_def_id did).path with
          | { data = Closure; disambiguator } -> into_d did disambiguator
          | _ -> broken_invariant "last path chunk to be Closure" did)
    | Impl { of_trait } ->
        `Impl
          (match List.last_exn (Explicit_def_id.to_def_id did).path with
          | { data = Impl; disambiguator } ->
              ( into_d did disambiguator,
                (if of_trait then `Trait else `Inherent),
                Explicit_def_id.ImplInfoStore.lookup_raw did )
          | _ -> broken_invariant "last path chunk to be Impl" did)
    | OpaqueTy ->
        `Opaque
          (match List.last_exn (Explicit_def_id.to_def_id did).path with
          | { data = OpaqueTy; disambiguator } -> into_d did disambiguator
          | _ -> broken_invariant "last path chunk to be Opaque" did)
    | Use ->
        `Use
          (match List.last_exn (Explicit_def_id.to_def_id did).path with
          | { data = Use; disambiguator } -> into_d did disambiguator
          | _ -> broken_invariant "last path chunk to be Use" did)
    | ForeignMod ->
        `Foreign
          (match List.last_exn (Explicit_def_id.to_def_id did).path with
          | { data = ForeignMod; disambiguator } -> into_d did disambiguator
          | _ -> broken_invariant "last path chunk to be ForeignMod" did)
    | ForeignTy -> `ForeignTy (assert_type_ns did)
    | ExternCrate -> `ExternCrate (assert_type_ns did)
    | Static _ -> `Static (assert_value_ns did)
    | Mod -> `Mod (assert_type_ns did)
    | GlobalAsm ->
        `GlobalAsm
          (match List.last_exn (Explicit_def_id.to_def_id did).path with
          | { data = GlobalAsm; disambiguator } -> into_d did disambiguator
          | _ -> broken_invariant "last path chunk to be GlobalAsm" did)
    | InlineConst ->
        `InlineConst
          (match List.last_exn (Explicit_def_id.to_def_id did).path with
          | { data = AnonConst; disambiguator } -> into_d did disambiguator
          | _ -> broken_invariant "last path chunk to be AnonConst" did)
    | TyParam | ConstParam | PromotedConst | LifetimeParam
    | SyntheticCoroutineBody ->
        (* It should be impossible for such items to ever be referenced by anyting in hax. *)
        broken_invariant
          "non (TyParam | ConstParam | InlineConst | PromotedConst | \
           LifetimeParam | SyntheticCoroutineBody) identifier"
          did
  in
  result

let view_name : Explicit_def_id.t -> RelPath.Chunk.t =
  poly ~into_n:(fun _ n -> n) ~into_d:(fun _ d -> d)

let view_name_did : Explicit_def_id.t -> _ RelPath.Chunk.poly =
  let mk x y = (x, y) in
  poly ~into_n:mk ~into_d:mk

let of_def_id (did : Explicit_def_id.t) : t =
  (* We distinguish between:
     - a chain of identifiers that have a relation with each other (e.g. if `k::E::C` is a constructor and `k::E` a enum)
     - a chain of identifiers that have no relation (e.g. `k::f` and `k::f::g` are both functions).
  *)
  (* This distinguishing is implemented by `poly` (or `view_name_did` and `view_name`) *)
  (* From `poly`, we can inspect the root of the chain of identifiers, e.g. `k::E` is the root of `k::E::C`. *)
  let ns_chunks, rel_path =
    let rec find name_chunks (did : Explicit_def_id.t) =
      let is_mod did =
        [%matches? (Types.Mod : Types.def_kind)]
          (Explicit_def_id.to_def_id did).kind
      in
      (let*? _did_is_a_mod = is_mod did in
       let parents = Explicit_def_id.parents did in
       let*? _parents_all_mods = List.for_all ~f:is_mod parents in
       Some (List.rev parents, name_chunks))
      |> Option.value_or_thunk ~default:(fun _ ->
             let view = view_name_did did in
             let did =
               view |> RelPath.Chunk.map_poly fst fst |> RelPath.Chunk.root
             in
             let name_chunks =
               RelPath.Chunk.map_poly snd snd view :: name_chunks
             in
             match Explicit_def_id.parent did with
             | None -> ([], name_chunks)
             | Some did -> find name_chunks did)
    in
    find [] did
  in
  let mod_path : DisambiguatedString.t list =
    { data = (Explicit_def_id.to_def_id did).krate; disambiguator = Int64.zero }
    :: List.map
         ~f:(fun (m : Explicit_def_id.t) ->
           match (Explicit_def_id.to_def_id m).path |> List.last_exn with
           | Types.{ disambiguator; data = TypeNs data } ->
               DisambiguatedString.{ data; disambiguator }
           | _ ->
               broken_invariant
                 "A `Mod` identifier must a `TypeNs` as its last path" m)
         ns_chunks
  in
  (* This is a hack: we remove a prefix that we add in
     https://github.com/cryspen/hax/blob/02d67770f2626e4bb27fc2a1ba9cfe612819d4a8/hax-lib/macros/src/implementation.rs#L897 *)
  let mod_path =
    List.filter mod_path ~f:(fun ds ->
        String.is_prefix ds.data ~prefix:"hax__autogenerated_refinement_" |> not)
  in
  { rel_path; mod_path }


================================================
FILE: engine/lib/concrete_ident/concrete_ident_view.mli
================================================
include module type of Concrete_ident_view_types

val of_def_id : Explicit_def_id.t -> t
(** Computes a view for an explicit definition identifier. *)


================================================
FILE: engine/lib/concrete_ident/concrete_ident_view_types.ml
================================================
open! Prelude

(** This modules defines what is the view over a concrete identifiers.

    Hax manipulates concrete identifiers (that is global identifiers referring
    to concrete Rust items -- not built-in operators) as raw Rust identifiers
    augmented with some metadata.

    Rust represents identifiers as a crate and a path. Each chunk of the path is
    roughly a level of nest in Rust. The path lacks information about definition
    kinds.

    There is two kinds of nesting for items.
    - Comfort: e.g. the user decides to embed a struct within a function to work
      with it locally.
    - Relational: e.g. an associated method has to be under a trait, or a field
      has to be under a constructor.

    This module provides a view to those paths: a path in the view is a list of
    smaller relational paths. For instance, consider the following piece of
    code:

    {@rust[
      mod a {
          impl MyTrait for MyType {
              fn assoc_fn() {
                  struct LocalStruct {
                      field: u8,
                  };
              }
          }
      }
    ]}

    Here, the Rust raw definition identifier of [LocalStruct] is roughly
    [my_crate::a::::assoc_fn::LocalStruct::field].

    The view for [LocalStruct] looks like:
    [{ { path: ["mycrate"; "a"], name_path: [ `AssociatedItem ("assoc_fn", `Impl
     0); `Field ("field", `Constructor ("LocalStruct", `Struct "LocalStruct")) ]
     } }] *)

type disambiguator = Int64.t
[@@deriving show, hash, compare, sexp, hash, eq, map]
(** A [Int64.t] disambiguator: this is given by Rust. *)

(** A string with a disambiguator. *)
module DisambiguatedString = struct
  module T = struct
    type t = { disambiguator : disambiguator; data : string }
    [@@deriving show, hash, compare, sexp, hash, eq, map]
  end

  include T
  include Base.Comparator.Make (T)

  let pure data = { disambiguator = Int64.zero; data }
end

(** A "module and crate"-only path. This is the longest `mod` suffix of a
    definition identifier path. This is a list of disambiguated strings. *)
module ModPath = struct
  module T = struct
    open struct
      module T = struct
        type t = DisambiguatedString.t list
        [@@deriving show, hash, compare, sexp, hash, eq]
      end
    end

    include T
    include Base.Comparator.Make (T)
  end

  include T
  module Map = Map.M (T)

  let rename_crate (original_name : string) (new_name : string) (mod_path : t) :
      t =
    match mod_path with
    | krate :: path when String.equal krate.data original_name ->
        { krate with data = new_name } :: path
    | _ -> mod_path
end

(** A relational path is a path composed of relational chunks. *)
module RelPath = struct
  (** A relational chunk is a short path describing "mandatory" nestings between
      items: e.g. a field below a struct, an enum below an enum variants, etc.

      The types defined by this module are indexed by two other types: ['name]
      and ['disambiguator]. This helps for instrumenting the view to perform
      additional operations: see [collect_either], [collect] and [root]. *)
  module Chunk = struct
    type 'name type_definition =
      [ `Enum of 'name | `Struct of 'name | `Union of 'name ]
    (** A type can be an enum, a struct or a union. A type is standalone: it has
        no mandatory parent item. *)

    and 'name constructor = [ `Constructor of 'name * 'name type_definition ]
    (** A constructor always has a parent type definition. *)

    and 'name maybe_associated = [ `Fn of 'name | `Const of 'name ]
    [@@deriving show, hash, compare, sexp, hash, eq, map]
    (** Helper type for function and constants: those exist both as associated
        in an impl block or a trait, and as standalone. *)

    type 'name associated = [ 'name maybe_associated | `Type of 'name ]
    (** An associated item. This is pulled out of [`AssociatedItem] below:
        otherwise, some PPX is broken... *)

    and ('name, 'disambiguator) assoc_parent =
      [ `Impl of
        'disambiguator * [ `Inherent | `Trait ] * Types.impl_infos option
      | `Trait of 'name * [ `Alias ] option ]
    [@@deriving show, hash, compare, sexp, hash, eq, map]
    (** The parent of an associated item can be an impl or a trait. *)

    type ('name, 'disambiguator) poly =
      [ 'name type_definition
      | 'name constructor
      | 'name maybe_associated
      | ('name, 'disambiguator) assoc_parent
      | `Use of 'disambiguator
      | `AnonConst of 'disambiguator
      | `InlineConst of 'disambiguator
        (** This is e.g.: {[
            const {
                fn f() {}
            }
          ]} 
          Here, `f` is under an `InlineConst`.
          *)
      | `TraitAlias of 'name
      | `Foreign of 'disambiguator
      | `ForeignTy of 'name
      | `TyAlias of 'name
      | `ExternCrate of 'name
      | `Opaque of 'disambiguator
        (** This is e.g.: {[
          fn f() -> impl Clone {}
          fn g() {
            f();
          }
        ]} 
        Here, the type of `f()` is ``.
        *)
      | `Static of 'name
      | `Macro of 'name
      | `AssociatedItem of
        'name associated * ('name, 'disambiguator) assoc_parent
      | `Mod of 'name
      | `GlobalAsm of 'disambiguator
      | `Field of 'name * 'name constructor
      | `Closure of 'disambiguator
        (** We usually never refer to closure: in THIR, we inline closures.
            However, items can be placed under closures, thus it is present
            here. See #1450 for more details. *) ]
    [@@deriving show, hash, compare, sexp, hash, eq, map]
    (** [poly] is the (polymorphic) type for a relational chunk: it defines what
        is a chunk. *)

    type t = (DisambiguatedString.t, disambiguator) poly
    [@@deriving show, hash, compare, sexp, hash, eq]
    (** [t] is the natural instantiation of [poly]. *)

    (** Transforms a [t] into a [poly] with annotated strings instead of just
        disambiguators. This adds names to the disambiguator-only constructs
        defined in [poly]. *)
    let add_strings ?(impl = "impl") ?(anon_const = "anon_const")
        ?(foreign = "foregin") ?(global_asm = "global_asm") (n : t) :
        (DisambiguatedString.t, DisambiguatedString.t) poly =
      let f disambiguator =
        DisambiguatedString.{ disambiguator; data = impl }
      in
      match map_poly Fn.id f n with
      | `AnonConst o -> `AnonConst { o with data = anon_const }
      | `Foreign o -> `Foreign { o with data = foreign }
      | `GlobalAsm o -> `GlobalAsm { o with data = global_asm }
      | n -> n

    (** Erases names from a [t]. *)
    let only_disambiguators : t -> (disambiguator, disambiguator) poly =
      map_poly DisambiguatedString.(fun ds -> ds.disambiguator) Fn.id

    (** Collects all the data of a [t], from the child to the parent. *)
    let rec collect_either :
        'n 'd. ('n, 'd) poly -> [ `N of 'n | `D of 'd ] list = function
      | `Opaque n
      | `GlobalAsm n
      | `AnonConst n
      | `InlineConst n
      | `Impl (n, _, _)
      | `Use n
      | `Closure n
      | `Foreign n ->
          [ `D n ]
      | `Static n
      | `Macro n
      | `Enum n
      | `Struct n
      | `Union n
      | `TyAlias n
      | `TraitAlias n
      | `Fn n
      | `Const n
      | `Trait (n, _)
      | `ExternCrate n
      | `Mod n
      | `ForeignTy n ->
          [ `N n ]
      | `AssociatedItem ((`Fn a | `Const a | `Type a), b) ->
          `N a :: collect_either (b :> _ poly)
      | `Constructor (a, b) -> `N a :: collect_either (b :> _ poly)
      | `Field (a, b) -> `N a :: collect_either (b :> _ poly)

    (** Same as [collect_either], but works on a [poly] whose ['name] and
        ['disambiguator] happen to be the same type. *)
    let collect : 'a. ('a, 'a) poly -> 'a list =
     fun n -> collect_either n |> List.map ~f:(function `D v | `N v -> v)

    (** Find the root of a [poly]. *)
    let root : 'a. ('a, 'a) poly -> 'a = fun x -> collect x |> List.last_exn
  end

  type t = Chunk.t list [@@deriving show, hash, compare, sexp, hash, eq]
end

type t = { mod_path : ModPath.t; rel_path : RelPath.t }
[@@deriving show, hash, compare, sexp, hash, eq]
(** Invariant: [name_path] is non-empty *)


================================================
FILE: engine/lib/concrete_ident/explicit_def_id.ml
================================================
open! Prelude

module T = struct
  type t = { is_constructor : bool; def_id : Types.def_id_contents }
  [@@deriving show, yojson, sexp]

  type repr = bool * string * Types.disambiguated_def_path_item list
  [@@deriving hash, compare, eq]

  let to_repr { is_constructor; def_id } =
    (is_constructor, def_id.krate, def_id.path)

  let hash = to_repr >> hash_repr
  let hash_fold_t s = to_repr >> hash_fold_repr s
  let equal x y = equal_repr (to_repr x) (to_repr y)
  let compare x y = compare_repr (to_repr x) (to_repr y)
end

include T

(** Helpers for dealing with Rust raw [Types.def_id]s *)
module H = struct
  let contents (did : Types.def_id) = did.contents.value

  (** Helper to get the parent of a [Types.def_id_contents] *)
  let parent (did : Types.def_id_contents) : Types.def_id_contents option =
    Option.map ~f:contents did.parent
end

(** A pure, def_id_contents version of [of_def_id]. This is not exposed
    publicly. *)
let pure_of_def_id ?constructor (def_id : Types.def_id_contents) : t option =
  let* _not_crate_root = def_id.path |> List.last in
  let path_without_ctor =
    (* Get rid of extra [Ctor] *)
    let* init, last = last_init def_id.path in
    let*? _ = [%matches? (Types.Ctor : Types.def_path_item)] last.data in
    Some init
  in
  let parent = def_id.parent in
  let parent =
    if Option.is_some path_without_ctor then
      let* parent = parent in
      (H.contents parent).parent
    else parent
  in
  let path = Option.value path_without_ctor ~default:def_id.path in
  let def_id = { def_id with parent; path } in
  let constructor =
    if Option.is_some path_without_ctor then Some true else constructor
  in
  let*? _constructor_provided_if_union_or_struct =
    not
      (Option.is_none constructor
      && [%matches? (Union | Struct : Types.def_kind)] def_id.kind)
  in
  let is_constructor =
    [%matches? (Variant : Types.def_kind)] def_id.kind
    || [%matches? Some true] constructor
  in
  Some { is_constructor; def_id }

module State = struct
  let state = Hash_set.create (module T)

  let of_def_id' ?constructor def_id_contents =
    let* did = pure_of_def_id ?constructor def_id_contents in
    Hash_set.add state did;
    Some did

  let of_def_id ?constructor def_id =
    of_def_id' ?constructor (H.contents def_id)

  let list_all () = Hash_set.to_list state
end

let of_def_id = State.of_def_id

let of_def_id_exn ?constructor def_id =
  of_def_id ?constructor def_id |> Option.value_exn

let parent (did : t) : t option =
  let* parent = H.parent did.def_id in
  let*? _not_crate_root = List.is_empty parent.path |> not in
  let constructor = [%matches? (Field : Types.def_kind)] did.def_id.kind in
  State.of_def_id' ~constructor parent

let rec parents (did : t) =
  did :: (parent did |> Option.map ~f:parents |> Option.value ~default:[])

let to_def_id { def_id; _ } = def_id
let is_constructor { is_constructor; _ } = is_constructor

let is_anon_assoc_ty did =
  [%matches?
    Some ({ data = AnonAssocTy _; _ } : Types.disambiguated_def_path_item)]
    (List.last (to_def_id did).path)

(** Stateful store that maps [def_id]s to implementation information (which
    trait is implemented? for which type? under which constraints?) *)
module ImplInfoStore = struct
  let state : (Types.def_id_contents, Types.impl_infos) Hashtbl.t option ref =
    ref None

  module T = struct
    type t = Types.def_id_contents [@@deriving show, compare, sexp, eq, hash]
  end

  let init (impl_infos : (Types.def_id * Types.impl_infos) list) =
    state :=
      impl_infos
      |> List.map ~f:(fun ((id : Types.def_id), impl_infos) ->
             (id.contents.value, impl_infos))
      |> Hashtbl.of_alist_multi (module T)
      |> Hashtbl.map ~f:List.hd_exn |> Option.some

  let get_state () =
    match !state with
    | None -> failwith "ImplInfoStore was not initialized"
    | Some state -> state

  (** Given a [id] of type [def_id], [find id] will return [Some impl_info] when
      [id] is an (non-inherent[1]) impl. [impl_info] contains information about
      the trait being implemented and for which type.

      [1]:
      https://doc.rust-lang.org/reference/items/implementations.html#inherent-implementations
  *)
  let find k = Hashtbl.find (get_state ()) k

  let lookup_raw (impl_def_id : t) : Types.impl_infos option =
    find (to_def_id impl_def_id)
end

module ToRustAST = struct
  module A = Types
  module B = Rust_engine_types

  let rec def_id_contents_to_rust_ast
      ({ krate; path; parent; kind; _ } : A.def_id_contents) : B.def_id =
    let f (o : A.def_id) = def_id_contents_to_rust_ast o.contents.value in
    let parent = Option.map ~f parent in
    { krate; path; parent; kind }

  let to_rust_ast ({ is_constructor; def_id } : t) : B.explicit_def_id =
    { is_constructor; def_id = def_id_contents_to_rust_ast def_id }
end

module FromRustAST = struct
  module A = Rust_engine_types
  module B = Types

  let rec def_id_contents_to_rust_ast
      ({ krate; path; parent; kind; _ } : A.def_id) : B.def_id_contents =
    let f (o : A.def_id) : B.def_id =
      let contents : B.node_for__def_id_contents =
        { value = def_id_contents_to_rust_ast o; id = Int64.zero }
      in
      { contents }
    in
    let parent = Option.map ~f parent in
    {
      krate;
      path;
      parent;
      kind;
      index = (Int64.zero, Int64.zero, None);
      is_local = false;
    }

  let to_rust_ast ({ is_constructor; def_id } : A.explicit_def_id) : t =
    { is_constructor; def_id = def_id_contents_to_rust_ast def_id }
end

let def_id_to_rust_ast = ToRustAST.def_id_contents_to_rust_ast
let def_id_from_rust_ast = FromRustAST.def_id_contents_to_rust_ast
let to_rust_ast = ToRustAST.to_rust_ast
let from_rust_ast = FromRustAST.to_rust_ast


================================================
FILE: engine/lib/concrete_ident/explicit_def_id.mli
================================================
open! Prelude

(** An [ExplicitDefId.t] is a Rust [Types.def_id] tagged with some diambiguation metadata.
    Explicit definition identifiers are used internally by the concrete names of hax.
    
    Rust raw [Types.def_id] can be ambiguous: consider the following Rust code:
    ```rust
    struct S;
    fn f() -> S { S }
    ```
    Here, the return type of `f` (that is, `S`) and the constructor `S` in the body of `f` refer to the exact same identifier `mycrate::S`.
    Yet, they denote two very different objects: a type versus a constructor.

    [ExplicitDefId.t] clears up this ambiguity, making constructors and types two separate things.

    Also, an [ExplicitDefId.t] always points to an item: an [ExplicitDefId.t] is never pointing to a crate alone.
*)

type t [@@deriving show, yojson, hash, compare, sexp, hash, eq]
(** Representation of explicit definition identifiers. *)

val of_def_id : ?constructor:bool -> Types.def_id -> t option
(** Smart constructor for [t]. Creates an explicit def id out of a raw Rust
    definition identifier [Types.def_id].

    When [of_def_id] is called with [id] a [Types.def_id], if the [kind] of [id]
    is either [Struct] or [Union], then [constructor] is mandatory. Otherwise,
    the argument [constructor] should be [true] only if [id] is a variant.

    [of_def_id] shall not be called on a Rust identifier pointing to a crate
    root.

    This function returns [Some] only when those condition are met. *)

val of_def_id_exn : ?constructor:bool -> Types.def_id -> t
(** Exception-throwing variant of [of_def_id]. This should be used when we know
    statically that the conditions described in the documentation of [of_def_id]
    are met.

    For instance, with static [Types.def_id]s or in [Import_thir]. *)

val is_constructor : t -> bool
(** Checks wether a definition identifier [id] points to a constructor.

    [is_constructor id] returns [true] when:
    - the kind of [id] is [Struct] or [Union] and the identifier was tagged as a
      constructor;
    - the kind of [id] is [Variant]. Otherwise, [is_constructor id] returns
      [false]. *)

val is_anon_assoc_ty : t -> bool
(** Returns true if the ident represents an anonymous associated type. *)

val parent : t -> t option
(** Looks up the parent of a definition identifier. Note that the parent of the
    identifier of a field is always a constructor.

    Also, a top-level item (e.g. `my_crate::some_item`) has no parent: recall
    that [t] represent only items, not crates. *)

val parents : t -> t list
(** Ordered list of parents for an identifier [id], starting with [id], up to
    the top-most parent identifier. *)

val to_def_id : t -> Types.def_id_contents
(** Destructor for [t]. *)

module State : sig
  val list_all : unit -> t list
  (** List all identifiers the engine dealt with so far. Beware, this function
      is stateful. *)
end

module ImplInfoStore : sig
  val init : (Types.def_id * Types.impl_infos) list -> unit

  val lookup_raw : t -> Types.impl_infos option
  (** Lookup the (raw[1]) implementation information given a concrete ident.
      Returns `Some _` if and only if the supplied identifier points to an
      `Impl`.

      [1]: those are raw THIR types.

      {b WARNING}: due to
      {{:https://github.com/hacspec/hax/issues/363} issue 363}, when looking up
      certain identifiers generated by the engine, this function may return
      [None] even though the supplied identifier points to an [Impl] block. *)
end

val def_id_to_rust_ast : Types.def_id_contents -> Types.def_id_inner
val def_id_from_rust_ast : Types.def_id_inner -> Types.def_id_contents
val to_rust_ast : t -> Rust_engine_types.explicit_def_id
val from_rust_ast : Rust_engine_types.explicit_def_id -> t


================================================
FILE: engine/lib/concrete_ident/impl_infos.ml
================================================
open! Prelude

type t = {
  trait_goal : Ast.Rust.trait_goal option;
      (** The trait implemented by the [impl] block or [None] if the [impl]
          block is an
          {{:https://doc.rust-lang.org/reference/items/implementations.html#inherent-implementations}
           inherent [impl]}. *)
  typ : Ast.Rust.ty;  (** The type implemented by the [impl] block. *)
  clauses : Ast.Rust.trait_goal list;
      (** The clauses that constraint this [impl] block. *)
}
(** metadata of an [impl] block *)

(** Lookup the implementation information given a concrete ident. Returns
    [Some _] if and only if the supplied identifier points to an [Impl].

    {b WARNING}: due to
    {{:https://github.com/hacspec/hax/issues/363} issue 363}, when looking up
    certain identifiers generated by the engine, this function may return [None]
    even though the supplied identifier points to an [Impl] block. *)
let lookup span (impl : Concrete_ident.t) : t option =
  let* Types.{ generics = _; clauses; typ; trait_ref } =
    Concrete_ident.ImplInfoStore.lookup_raw impl
  in
  let trait_goal =
    Option.map ~f:(Import_thir.import_trait_ref span) trait_ref
  in
  let typ = Import_thir.import_ty span typ in
  let clauses =
    let f i ((binder : Types.clause), span) =
      Import_thir.import_clause span i binder
    in
    List.filter_mapi ~f clauses
    |> List.filter_map ~f:(fun (c : Ast.Rust.generic_constraint) ->
           match c with GCType i -> Some i.goal | _ -> None)
  in
  Some { trait_goal; typ; clauses }


================================================
FILE: engine/lib/concrete_ident/thir_simple_types.ml
================================================
open! Prelude
module View = Concrete_ident_view

(** Interprets a type as a "simple type". A simple type is a type for which, in
    a given scope, we can give a non-ambiguous string identifier.

    This is useful for naming local impls.

    Examples of "simple" types:
    - primitive types (e.g. u8, u16)
    - enums/structs/unions defined in [namespace], when:

    + all their generic arguments are instantiated to a simple type

    - a reference to a simple type
    - a slice to a simple type
    - a tuple of simple types of arity zero (e.g. no ADTs of non-zero arity) *)
let to_string ~(namespace : View.ModPath.t) :
    Types.node_for__ty_kind -> string option =
  let escape =
    let re = Re.Pcre.regexp "_((?:e_)*)of_" in
    let f group = "_e_" ^ Re.Group.get group 1 ^ "of_" in
    Re.replace ~all:true re ~f
  in
  let adt def_id =
    let* def_id = Explicit_def_id.of_def_id ~constructor:false def_id in
    let view = View.of_def_id def_id in
    let* () =
      [%equal: View.ModPath.t] view.mod_path namespace |> some_if_true
    in
    let* last = expect_singleton view.rel_path in
    let* name =
      match last with
      | (`Struct d | `Union d | `Enum d)
        when Int64.(equal (of_int 0) d.disambiguator) ->
          Some d.data
      | _ -> None
    in
    escape name |> Option.some
  in
  let arity0 (ty : Types.node_for__ty_kind) =
    match ty.Types.value with
    | Bool -> Some "bool"
    | Char -> Some "char"
    | Str -> Some "str"
    | Never -> Some "never"
    | Int Isize -> Some "isize"
    | Int I8 -> Some "i8"
    | Int I16 -> Some "i16"
    | Int I32 -> Some "i32"
    | Int I64 -> Some "i64"
    | Int I128 -> Some "i128"
    | Uint Usize -> Some "usize"
    | Uint U8 -> Some "u8"
    | Uint U16 -> Some "u16"
    | Uint U32 -> Some "u32"
    | Uint U64 -> Some "u64"
    | Uint U128 -> Some "u128"
    | Float F32 -> Some "f32"
    | Float F64 -> Some "f64"
    | Tuple { value = { generic_args = []; _ }; _ } -> Some "unit"
    | Adt { value = { def_id; generic_args = []; _ }; _ } ->
        Option.map ~f:escape (adt def_id)
    | _ -> None
  in
  let apply left right = left ^ "_of_" ^ right in
  let rec arity1 (ty : Types.node_for__ty_kind) =
    match ty.value with
    | Slice { value = { generic_args = [ Type sub ]; _ }; _ } ->
        arity1 sub |> Option.map ~f:(apply "slice")
    | Ref (_, sub, _) -> arity1 sub |> Option.map ~f:(apply "ref")
    | Adt { value = { def_id; generic_args = [ Type arg ]; _ }; _ } ->
        let* adt = adt def_id in
        let* arg = arity1 arg in
        Some (apply adt arg)
    | Tuple { value = { generic_args; _ }; _ } ->
        let* l =
          List.map
            ~f:(fun (arg : Types.generic_arg) ->
              match arg with Type ty -> arity0 ty | _ -> None)
            generic_args
          |> Option.all
        in
        Some ("tuple_" ^ String.concat ~sep:"_" l)
    | _ -> arity0 ty
  in
  arity1


================================================
FILE: engine/lib/dependencies.ml
================================================
open! Prelude

module Make (F : Features.T) = struct
  module AST = Ast.Make (F)
  module U = Ast_utils.Make (F)
  open Ast
  open AST

  (** Get the identifier of an item *)
  let ident_of (item : item) : Concrete_ident.t = item.ident

  (** Get all the identifiers declared under an item. This includes the
      identifier of the item itself, but also of any sub-item: for instance,
      associated items within an impl. *)
  let idents_of (item : item) : Concrete_ident.t list =
    let is_field_anonymous ident =
      match List.last (Concrete_ident.to_view ident).mod_path with
      | Some { data = n; _ } -> Option.is_some (Int.of_string_opt n)
      | _ -> false
    in
    ident_of item
    ::
    (match item.v with
    | Type { variants; _ } ->
        List.concat_map
          ~f:(fun variant ->
            let fields =
              List.map ~f:fst3 variant.arguments
              |> List.filter ~f:(not << is_field_anonymous)
            in

            variant.name :: fields)
          variants
    | Trait { items; _ } -> List.map ~f:(fun item -> item.ti_ident) items
    | Impl { items; _ } -> List.map ~f:(fun item -> item.ii_ident) items
    | _ -> (* No sub items *) [])

  module Namespace = struct
    include Concrete_ident.View.ModPath
    module Set = Set.M (Concrete_ident.View.ModPath)

    let of_concrete_ident ci : t = (Concrete_ident.to_view ci).mod_path

    let to_string ?(sep = "::") : t -> string =
      List.map ~f:(fun (o : Concrete_ident_view.DisambiguatedString.t) ->
          o.data)
      >> String.concat ~sep
  end

  module Error : Phase_utils.ERROR = Phase_utils.MakeError (struct
    let ctx = Diagnostics.Context.Dependencies
  end)

  module Attrs = Attr_payloads.Make (F) (Error)

  let uid_associated_items (items : item list) : attrs -> item list =
    let open Attrs.WithItems (struct
      let items = items
    end) in
    raw_associated_item >> List.filter_map ~f:(snd >> try_item_of_uid)

  module ItemGraph = struct
    module G = Graph.Persistent.Digraph.Concrete (Concrete_ident)

    module GInt = struct
      include Graph.Persistent.Digraph.Concrete (Int)

      let empty () = empty
    end

    module Topological = Graph.Topological.Make_stable (GInt)
    module Map_G_GInt = Graph.Gmap.Edge (G) (GInt)
    module Oper = Graph.Oper.P (G)

    let vertices_of_item (i : item) : G.V.t list =
      let ( @ ) = Set.union in
      let v = U.Reducers.collect_concrete_idents in
      let concat_map f =
        List.map ~f >> Set.union_list (module Concrete_ident)
      in
      let set =
        match i.v with
        | Fn { name = _; generics; body; params; _ } ->
            v#visit_generics () generics
            @ v#visit_expr () body
            @ concat_map (v#visit_param ()) params
        | TyAlias { name = _; generics; ty } ->
            v#visit_generics () generics @ v#visit_ty () ty
        | Type { name = _; generics; variants; is_struct = (_ : bool) } ->
            v#visit_generics () generics
            @ concat_map (v#visit_variant ()) variants
        | IMacroInvokation { macro; argument = (_ : string); span; witness = _ }
          ->
            v#visit_concrete_ident () macro @ v#visit_span () span
        | Trait { name = _; generics; items; safety = _ } ->
            v#visit_generics () generics
            @ concat_map (v#visit_trait_item ()) items
        | Impl { generics; self_ty; of_trait; items; parent_bounds; safety = _ }
          ->
            v#visit_generics () generics
            @ v#visit_ty () self_ty
            @ v#visit_concrete_ident () (fst of_trait)
            @ concat_map (v#visit_generic_value ()) (snd of_trait)
            @ concat_map (v#visit_impl_item ()) items
            @ concat_map
                (fun (ie, ii) ->
                  v#visit_impl_expr () ie @ v#visit_impl_ident () ii)
                parent_bounds
        | Alias { name = _; item } -> v#visit_concrete_ident () item
        | Use _ | Quote _ | HaxError _ | NotImplementedYet ->
            Set.empty (module Concrete_ident)
      in
      set |> Set.to_list

    let vertices_of_items ~uid_associated_items (items : item list) : G.E.t list
        =
      List.concat_map
        ~f:(fun i ->
          let attrs = U.Reducers.collect_attrs#visit_item () i in
          let assoc =
            uid_associated_items attrs |> List.map ~f:(fun i -> i.ident)
          in
          vertices_of_item i @ assoc |> List.map ~f:(Fn.const i.ident &&& Fn.id))
        items

    let of_items ~original_items (items : item list) : G.t =
      let init =
        List.fold ~init:G.empty ~f:(fun g -> ident_of >> G.add_vertex g) items
      in
      let uid_associated_items = uid_associated_items original_items in
      vertices_of_items ~uid_associated_items items
      |> List.fold ~init ~f:(G.add_edge >> uncurry)

    let transitive_dependencies_of (g : G.t) (selection : Concrete_ident.t list)
        : Concrete_ident.t Hash_set.t =
      let set = Hash_set.create (module Concrete_ident) in
      let rec visit vertex =
        if Hash_set.mem set vertex |> not then (
          Hash_set.add set vertex;
          G.iter_succ visit g vertex)
      in
      List.filter ~f:(G.mem_vertex g) selection |> List.iter ~f:visit;
      set

    let transitive_dependencies_of_items ~original_items (items : item list)
        ?(graph = of_items ~original_items items)
        (selection : Concrete_ident.t list) : item list =
      let set = transitive_dependencies_of graph selection in
      items |> List.filter ~f:(ident_of >> Hash_set.mem set)

    module MutRec = struct
      module Bundle = struct
        type t = concrete_ident list

        let namespaces_of : t -> Namespace.Set.t =
          List.map ~f:Namespace.of_concrete_ident
          >> Set.of_list (module Namespace)

        let homogeneous_namespace (ns : t) : bool =
          Set.length (namespaces_of ns) <= 1
      end

      type t = {
        mut_rec_bundles : Bundle.t list;
        non_mut_rec : concrete_ident list;
      }

      module SCC = Graph.Components.Make (G)

      let of_graph (g : G.t) : t =
        let is_mut_rec_with_itself x = G.mem_edge g x x in
        let mut_rec_bundles, non_mut_rec =
          SCC.scc_list g
          |> List.partition_map ~f:(function
               | [] -> failwith "scc_list returned empty cluster"
               | [ x ] when is_mut_rec_with_itself x |> not -> Second x
               | bundle -> First bundle)
        in
        { mut_rec_bundles; non_mut_rec }

      let all_homogeneous_namespace (g : G.t) =
        List.for_all ~f:Bundle.homogeneous_namespace
          (of_graph g).mut_rec_bundles
    end

    module CyclicDep = struct
      module Bundle = struct
        type t = Concrete_ident.t list

        module G = Graph.Persistent.Graph.Concrete (Concrete_ident)
        module CC = Graph.Components.Undirected (G)

        let cycles g = CC.components_list g
      end

      (* This is a solution that bundles together everything that belongs to the same module SCC.
         It results in bundles that are much bigger than they could be but is a simple solution
         to the problem described in https://github.com/hacspec/hax/issues/995#issuecomment-2411114404 *)
      let of_mod_sccs (items : item list)
          (mod_graph_cycles : Namespace.Set.t list) : Bundle.t list =
        let item_names = List.map items ~f:(fun x -> x.ident) in
        let cycles =
          List.filter mod_graph_cycles ~f:(fun set -> Set.length set > 1)
        in
        let bundles =
          List.map cycles ~f:(fun set ->
              List.filter item_names ~f:(fun item ->
                  Set.mem set (Namespace.of_concrete_ident item)))
        in
        bundles
    end

    open Graph.Graphviz.Dot (struct
      include G

      let graph_attributes _ = []
      let default_vertex_attributes _ = []
      let vertex_name i = "\"" ^ Concrete_ident.show i ^ "\""

      let vertex_attributes i =
        [ `Label (Concrete_ident.DefaultViewAPI.render i).name ]

      let get_subgraph i =
        let ns = Namespace.of_concrete_ident i in
        let sg_name = Namespace.to_string ~sep:"__" ns in
        let label = Namespace.to_string ~sep:"::" ns in
        let open Graph.Graphviz.DotAttributes in
        Some { sg_name; sg_attributes = [ `Label label ]; sg_parent = None }

      let default_edge_attributes _ = []
      let edge_attributes _ = []
    end)

    let print oc items = output_graph oc (of_items ~original_items:items items)
  end

  module ModGraph = struct
    module G = Graph.Persistent.Digraph.Concrete (Namespace)

    let of_items (items : item list) : G.t =
      let ig = ItemGraph.of_items ~original_items:items items in
      let vertices =
        List.fold items ~init:G.empty ~f:(fun g item ->
            G.add_vertex g (Namespace.of_concrete_ident item.ident))
      in
      List.map ~f:(ident_of >> (Namespace.of_concrete_ident &&& Fn.id)) items
      |> Map.of_alist_multi (module Namespace)
      |> Map.map
           ~f:
             (List.concat_map
                ~f:
                  (ItemGraph.G.succ ig
                  >> List.map ~f:Namespace.of_concrete_ident)
             >> Set.of_list (module Namespace)
             >> Set.to_list)
      |> Map.to_alist
      |> List.concat_map ~f:(fun (x, ys) -> List.map ~f:(fun y -> (x, y)) ys)
      |> List.fold ~init:vertices ~f:(G.add_edge >> uncurry)

    module SCC = Graph.Components.Make (G)

    let cycles g : Namespace.Set.t list =
      SCC.scc_list g |> List.map ~f:(Set.of_list (module Namespace))

    (** Returns the namespaces in topological order *)
    let order g : Namespace.t list =
      let module ModTopo = Graph.Topological.Make_stable (G) in
      ModTopo.fold List.cons g []

    open Graph.Graphviz.Dot (struct
      include G

      let graph_attributes _ = []
      let default_vertex_attributes _ = []
      let vertex_name ns = "\"" ^ Namespace.to_string ns ^ "\""
      let vertex_attributes _ = []
      let get_subgraph _ = None
      let default_edge_attributes _ = []
      let edge_attributes _ = []
    end)

    let print oc items =
      let g = of_items items in
      let complicated_ones =
        SCC.scc_list g
        |> List.concat_map ~f:(function [] | [ _ ] -> [] | bundle -> bundle)
      in
      let g =
        List.concat_map
          ~f:(fun ns ->
            List.map
              ~f:(fun y -> (ns, y))
              (G.succ g ns
              |> List.filter
                   ~f:(List.mem ~equal:[%equal: Namespace.t] complicated_ones)))
          complicated_ones
        |> List.fold ~init:G.empty ~f:(G.add_edge >> uncurry)
      in
      output_graph oc g
  end

  let ident_list_to_string =
    List.map ~f:Concrete_ident.DefaultViewAPI.show >> String.concat ~sep:", "

  let sort (items : item list) : item list =
    let g =
      ItemGraph.of_items ~original_items:items items |> ItemGraph.Oper.mirror
    in
    let stable_g =
      let to_index =
        items
        |> List.mapi ~f:(fun i item -> (item.ident, i))
        |> Map.of_alist_exn (module Concrete_ident)
        |> Map.find
      in
      ItemGraph.Map_G_GInt.filter_map
        (to_index *** to_index >> uncurry Option.both)
        g
    in
    let stable_g =
      List.foldi items ~init:stable_g ~f:(fun i g _ ->
          ItemGraph.GInt.add_vertex g i)
    in
    let items' =
      let items_array = Array.of_list items in
      let lookup (index : int) = items_array.(index) in
      ItemGraph.Topological.fold List.cons stable_g [] |> List.map ~f:lookup
    in
    (* Stable topological sort doesn't guarantee to group cycles together.
       We make this correction to ensure mutually recursive items are grouped. *)
    let items' =
      let cycles =
        ItemGraph.MutRec.SCC.scc_list g
        |> List.filter ~f:(fun cycle -> List.length cycle > 1)
      in
      (* TODO: This can be optimized by using a set or a map
         to avoid traversing all cycles at each iteration. *)
      List.fold items' ~init:[] ~f:(fun acc item ->
          match
            List.find cycles ~f:(fun cycle ->
                List.mem cycle item.ident ~equal:[%eq: concrete_ident])
          with
          | Some _
            when List.exists acc ~f:(fun els ->
                     List.mem els item ~equal:[%eq: item]) ->
              [] :: acc
          | Some cycle ->
              List.map cycle ~f:(fun ident ->
                  List.find_exn items ~f:(fun item ->
                      [%eq: concrete_ident] item.ident ident))
              :: acc
          | None -> [ item ] :: acc)
      |> List.concat
    in
    (* Quote items must be placed right before or after their origin *)
    let items' =
      let before_quotes, after_quotes, _ =
        List.partition3_map items' ~f:(fun item ->
            match item.v with
            | Quote { origin; _ } -> (
                match origin.position with
                | `Before -> `Fst (origin, item)
                | `After -> `Snd (origin, item)
                | `Replace -> `Trd ())
            | _ -> `Trd ())
      in
      let move_quote before origin quote_item =
        List.concat_map ~f:(fun item ->
            if [%eq: concrete_ident] origin.item_ident item.ident then
              if before then [ quote_item; item ] else [ item; quote_item ]
            else if [%eq: concrete_ident] quote_item.ident item.ident then []
            else [ item ])
      in
      let before_quotes = List.rev before_quotes in
      let items' =
        List.fold before_quotes ~init:items'
          ~f:(fun items' (origin, quote_item) ->
            move_quote true origin quote_item items')
      in
      List.fold after_quotes ~init:items' ~f:(fun items' (origin, quote_item) ->
          move_quote false origin quote_item items')
    in

    assert (
      let of_list =
        List.map ~f:ident_of >> Set.of_list (module Concrete_ident)
      in
      let items = of_list items in
      let items' = of_list items' in
      Set.equal items items');
    items'

  (** Sort within each namespaces: items are first grouped by namespace, then
      sorted topologically. *)
  let sort_namespace_wise (items : item list) : item list =
    let sorted_by_namespace =
      U.group_items_by_namespace items
      |> Map.data
      |> List.map ~f:(fun items -> sort items)
    in
    let sorted_namespaces = ModGraph.order (ModGraph.of_items items) in
    List.concat_map sorted_namespaces ~f:(fun namespace ->
        List.find sorted_by_namespace ~f:(fun items ->
            List.exists items ~f:(fun item ->
                Namespace.equal
                  (Namespace.of_concrete_ident item.ident)
                  namespace))
        |> Option.value ~default:[])

  let filter_by_inclusion_clauses' ~original_items
      (clauses : Types.inclusion_clause list) (items : item list) :
      item list * Concrete_ident.t Hash_set.t =
    let graph = ItemGraph.of_items ~original_items items in
    let of_list = Set.of_list (module Concrete_ident) in
    let selection = List.map ~f:ident_of items |> of_list in
    let deps_of =
      let to_set = Hash_set.to_list >> of_list in
      Set.to_list >> ItemGraph.transitive_dependencies_of graph >> to_set
    in
    let show_ident_set =
      Set.to_list
      >> List.map ~f:Concrete_ident.DefaultViewAPI.show
      >> List.map ~f:(fun s -> " - " ^ s)
      >> String.concat ~sep:"\n"
    in
    let show_inclusion_clause Types.{ kind; namespace } =
      (match kind with
      | Excluded -> "-"
      | SignatureOnly -> "+:"
      | Included deps_kind -> (
          match deps_kind with
          | Transitive -> "+"
          | Shallow -> "+~"
          | None' -> "+!"))
      ^ "["
      ^ (List.map
           ~f:(function Glob One -> "*" | Glob Many -> "**" | Exact s -> s)
           namespace.chunks
        |> String.concat ~sep:"::")
      ^ "]"
    in
    let hax_lib_include =
      let id_to_include =
        Hashtbl.of_alist_exn
          (module Concrete_ident)
          (List.map
             ~f:(fun it ->
               ( it.ident,
                 Attrs.find_unique_attr
                   ~f:(function Types.ItemStatus is -> Some is | _ -> None)
                   it.attrs ))
             items)
      in
      Hashtbl.find id_to_include >> Option.join
    in

    let items_drop_body = Hash_set.create (module Concrete_ident) in
    let apply_clause selection' (clause : Types.inclusion_clause) =
      let matches = Concrete_ident.matches_namespace clause.Types.namespace in
      let matched0 = Set.filter ~f:matches selection in
      let with_deps, drop_bodies =
        match clause.kind with
        | Included Transitive -> (true, false)
        | Included Shallow -> (true, true)
        | Included None' -> (false, false)
        | SignatureOnly -> (false, true)
        | Excluded -> (false, false)
      in
      let matched = matched0 |> if with_deps then deps_of else Fn.id in
      if drop_bodies then (
        Set.iter ~f:(Hash_set.add items_drop_body) matched;
        Set.iter ~f:(Hash_set.remove items_drop_body) matched0);
      Logs.info (fun m ->
          m "The clause [%s] will %s the following Rust items:\n%s"
            (show_inclusion_clause clause)
            (match clause.kind with Excluded -> "remove" | _ -> "add")
          @@ show_ident_set matched);
      let set_op =
        match clause.kind with
        | Included _ | SignatureOnly -> Set.union
        | Excluded -> Set.diff
      in
      let result = set_op selection' matched in
      let forced_include =
        selection'
        |> Set.filter
             ~f:
               (hax_lib_include
               >> [%eq: Types.ha_item_status option]
                    (Some (Included { late_skip = false })))
      in
      Set.union forced_include result
    in
    let selection = List.fold ~init:selection ~f:apply_clause clauses in
    Logs.info (fun m ->
        m "The following Rust items are going to be extracted:\n%s"
        @@ show_ident_set selection);
    (List.filter ~f:(ident_of >> Set.mem selection) items, items_drop_body)

  let filter_by_inclusion_clauses (clauses : Types.inclusion_clause list)
      (items : item list) : item list =
    let f = filter_by_inclusion_clauses' ~original_items:items clauses in
    let selection =
      let items', items_drop_body = f items in
      let items', _ =
        (* when one includes only shallow dependencies, we just remove bodies *)
        List.map
          ~f:(fun item ->
            if Hash_set.mem items_drop_body (ident_of item) then
              U.Mappers.drop_bodies#visit_item () item
            else item)
          items'
        |> f
      in
      List.map ~f:ident_of items' |> Set.of_list (module Concrete_ident)
    in
    List.filter ~f:(ident_of >> Set.mem selection) items

  let fresh_module_for (bundle : item list) =
    let fresh_module =
      Concrete_ident.fresh_module ~label:"bundle" (List.map ~f:ident_of bundle)
    in
    let renamings =
      bundle
      (* Exclude `Use` items: we exclude those from bundling since they are only
         user hints. `Use` items don't have proper identifiers, and those
         identifiers are never referenced by other Rust items. *)
      |> List.filter ~f:(function { v = Use _; _ } -> false | _ -> true)
      (* Exclude `NotImplementedYet` items *)
      |> List.filter ~f:(function
           | { v = NotImplementedYet; _ } -> false
           | _ -> true)
      |> List.concat_map ~f:(fun item ->
             List.map
               ~f:(fun id ->
                 ( item,
                   (id, Concrete_ident.move_to_fresh_module fresh_module id) ))
               (idents_of item))
    in
    let aliases =
      let inspect_view_last id =
        List.last (Concrete_ident.to_view id).rel_path
      in
      List.filter_map renamings ~f:(fun (origin_item, (from_id, to_id)) ->
          let attrs =
            List.filter
              ~f:(fun att -> Attrs.late_skip [ att ])
              origin_item.attrs
          in
          let v = Alias { name = from_id; item = to_id } in
          match origin_item.v with
          (* We don't want to aliases for constructors of structs with named fields because
             they can't be imported in F*. Ideally this should be handled by the backend. *)
          | Type { variants; is_struct = true; _ }
            when List.for_all variants ~f:(fun variant -> variant.is_record)
                 && Concrete_ident.is_constructor from_id ->
              None
          (* We don't need aliases for fields of types. *)
          | Type _ when [%matches? Some (`Field _)] (inspect_view_last from_id)
            ->
              None
          (* We don't need aliases for methods of trait impls. *)
          | Impl _
            when [%matches? Some (`AssociatedItem _)]
                   (inspect_view_last from_id) ->
              None
          | Quote _ -> None
          | _ -> Some { attrs; span = origin_item.span; ident = from_id; v })
    in
    let rename =
      let renamings = List.map ~f:snd renamings in
      let renamings =
        match Map.of_alist (module Concrete_ident) renamings with
        | `Duplicate_key dup ->
            failwith
              [%string
                "Fatal error: in dependency analysis, we construct a renaming \
                 key-value list with a guarantee of unicity in keys. However, \
                 we found the following key twice:\n\
                 %{[%show: concrete_ident] dup}"]
        | `Ok value -> value
      in
      let renamer _lvl i = Map.find renamings i |> Option.value ~default:i in
      (U.Mappers.rename_concrete_idents renamer)#visit_item ExprLevel
    in
    List.map ~f:rename bundle @ aliases

  let bundle_cyclic_modules (items : item list) : item list =
    (* [module_level_scc] is a list of set of strongly connected modules. *)
    let module_level_scc = ModGraph.(of_items >> cycles) items in
    let items_per_ns =
      List.map ~f:(fun i -> (Namespace.of_concrete_ident i.ident, i)) items
      |> Map.of_alist_multi (module Namespace)
    in
    let items_of_ns = Map.find items_per_ns >> Option.value ~default:[] in
    module_level_scc
    |> List.concat_map ~f:(fun nss ->
           let multiple_heterogeneous_modules = Set.length nss > 1 in
           let items = Set.to_list nss |> List.concat_map ~f:items_of_ns in
           if multiple_heterogeneous_modules then fresh_module_for items
           else items)

  let recursive_bundles (items : item list) : item list list * item list =
    let g = ItemGraph.of_items ~original_items:items items in
    let bundles = ItemGraph.MutRec.of_graph g in
    let from_ident ident : item option =
      List.find ~f:(fun i -> [%equal: Concrete_ident.t] i.ident ident) items
    in
    let f = List.filter_map ~f:from_ident in
    (List.map ~f bundles.mut_rec_bundles, f bundles.non_mut_rec)
end


================================================
FILE: engine/lib/dependencies.mli
================================================
module Make (F : Features.T) : sig
  module AST : module type of Ast.Make (F)

  val uid_associated_items : AST.item list -> Ast.attrs -> AST.item list
  val bundle_cyclic_modules : AST.item list -> AST.item list

  val sort_namespace_wise : AST.item list -> AST.item list
  (** Sort within each namespaces: items are first grouped by namespace, then
      sorted topologically. *)

  val sort : AST.item list -> AST.item list
  (** Sort items regardless of their namespaces. *)

  val recursive_bundles : AST.item list -> AST.item list list * AST.item list

  val filter_by_inclusion_clauses :
    Types.inclusion_clause list -> AST.item list -> AST.item list
end


================================================
FILE: engine/lib/deprecated_generic_printer/deprecated_generic_printer.ml
================================================
open! Prelude
open! Ast

module Make (F : Features.T) (View : Concrete_ident.RENDER_API) = struct
  open Deprecated_generic_printer_base
  open Deprecated_generic_printer_base.Make (F)

  module Class = struct
    module U = Ast_utils.Make (F)
    open! AST
    open PPrint

    let iblock f = group >> jump 2 0 >> terminate (break 0) >> f >> group

    class print =
      object (print)
        inherit print_base as super
        method printer_name = "Generic"

        method par_state : ast_position -> par_state =
          function
          | Lhs_LhsArrayAccessor | Ty_Tuple | Ty_TSlice | Ty_TArray_length
          | Expr_If_cond | Expr_If_then | Expr_If_else | Expr_Array
          | Expr_Assign | Expr_Closure_param | Expr_Closure_body
          | Expr_Ascription_e | Expr_Let_lhs | Expr_Let_rhs | Expr_Let_body
          | Expr_App_arg | Expr_ConstructTuple | Pat_ConstructTuple | Pat_PArray
          | Pat_Ascription_pat | Param_pat | Item_Fn_body | GenericParam_GPConst
            ->
              AlreadyPar
          | _ -> NeedsPar

        method namespace_of_concrete_ident : concrete_ident -> string list =
          fun i ->
            let rendered = View.render i in
            rendered.path

        method concrete_ident' ~(under_current_ns : bool) : concrete_ident fn =
          fun id ->
            let id = View.render id in
            let chunks =
              if under_current_ns then [ id.name ] else id.path @ [ id.name ]
            in
            separate_map (colon ^^ colon) utf8string chunks

        method name_of_concrete_ident : concrete_ident fn =
          fun id -> (View.render id).name |> utf8string

        method mutability : 'a. 'a mutability fn = fun _ -> empty

        method primitive_ident : primitive_ident fn =
          function
          | Deref -> string "deref"
          | Cast -> string "cast"
          | LogicalOp And -> string "and"
          | LogicalOp Or -> string "or"

        method local_ident : local_ident fn = View.local_ident >> utf8string

        method literal : literal_ctx -> literal fn =
          (* TODO : escape *)
          fun _ctx -> function
            | String s -> utf8string s |> dquotes
            | Char c -> char c |> bquotes
            | Int { value; negative; _ } ->
                string value |> precede (if negative then minus else empty)
            | Float { value; kind; negative } ->
                string value
                |> precede (if negative then minus else empty)
                |> terminate (string (show_float_kind kind))
            | Bool b -> OCaml.bool b

        method generic_value : generic_value fn =
          function
          | GLifetime _ -> string "Lifetime"
          | GType ty -> print#ty_at GenericValue_GType ty
          | GConst expr -> print#expr_at GenericValue_GConst expr

        method lhs : lhs fn =
          function
          | LhsLocalVar { var; _ } -> print#local_ident var
          | LhsVecRef { e; _ } -> print#lhs e
          | LhsArbitraryExpr { e; _ } -> print#expr_at Lhs_LhsArbitraryExpr e
          | LhsFieldAccessor { e; field; _ } ->
              print#lhs e |> parens
              |> terminate (dot ^^ print#global_ident_projector field)
          | LhsArrayAccessor { e; index; _ } ->
              print#lhs e |> parens
              |> terminate (print#expr_at Lhs_LhsArrayAccessor index |> brackets)

        method ty_bool : document = string "bool"
        method ty_char : document = string "char"
        method ty_str : document = string "str"

        method ty_int : int_kind fn =
          fun { size; signedness } ->
            let signedness = match signedness with Signed -> "i" | _ -> "u" in
            let size =
              match int_of_size size with
              | Some n -> OCaml.int n
              | None -> string "size"
            in
            string signedness ^^ size

        method ty_float : float_kind fn = show_float_kind >> string

        method generic_values : generic_value list fn =
          function
          | [] -> empty
          | values -> separate_map comma print#generic_value values |> angles

        method ty_app : concrete_ident -> generic_value list fn =
          fun f args -> print#concrete_ident f ^^ print#generic_values args

        method ty_tuple : int -> ty list fn =
          fun _n ->
            separate_map (comma ^^ break 1) (print#ty_at Ty_Tuple)
            >> iblock parens

        method! ty : par_state -> ty fn =
          fun ctx ty ->
            match ty with
            | TBool -> string "bool"
            | TChar -> string "char"
            | TInt kind -> print#ty_int kind
            | TFloat kind -> print#ty_float kind
            | TStr -> string "String"
            | TArrow (inputs, output) ->
                separate_map (string "->") (print#ty_at Ty_TArrow)
                  (inputs @ [ output ])
                |> parens
                |> precede (string "arrow!")
            | TRef { typ; mut; _ } ->
                ampersand ^^ print#mutability mut ^^ print#ty_at Ty_TRef typ
            | TParam i -> print#local_ident i
            | TSlice { ty; _ } -> print#ty_at Ty_TSlice ty |> brackets
            | TRawPointer _ -> string "raw_pointer!()"
            | TArray { typ; length } ->
                print#ty_at Ty_TArray_length typ
                ^/^ semi
                ^/^ print#expr_at Ty_TArray_length length
                |> brackets
            | TAssociatedType _ -> string "assoc_type!()"
            | TOpaque _ -> string "opaque_type!()"
            | TApp _ -> super#ty ctx ty
            | TDyn _ -> empty (* TODO *)

        method! expr' : par_state -> expr' fn =
          fun ctx e ->
            let wrap_parens =
              group
              >>
              match ctx with AlreadyPar -> Fn.id | NeedsPar -> iblock braces
            in
            match e with
            | If { cond; then_; else_ } ->
                let if_then =
                  (string "if" ^//^ nest 2 (print#expr_at Expr_If_cond cond))
                  ^/^ string "then"
                  ^//^ (print#expr_at Expr_If_then then_ |> braces |> nest 1)
                in
                (match else_ with
                | None -> if_then
                | Some else_ ->
                    if_then ^^ break 1 ^^ string "else" ^^ space
                    ^^ (print#expr_at Expr_If_else else_ |> iblock braces))
                |> wrap_parens
            | Match { scrutinee; arms } ->
                let header =
                  string "match" ^^ space
                  ^^ (print#expr_at Expr_Match_scrutinee scrutinee
                     |> terminate space |> iblock Fn.id)
                  |> group
                in
                let arms =
                  separate_map hardline
                    (print#arm >> group >> nest 2
                    >> precede (bar ^^ space)
                    >> group)
                    arms
                in
                header ^^ iblock braces arms
            | Let { monadic; lhs; rhs; body } ->
                (Option.map
                   ~f:(fun monad -> print#expr_monadic_let ~monad)
                   monadic
                |> Option.value ~default:print#expr_let)
                  ~lhs ~rhs body
                |> wrap_parens
            | Literal l -> print#literal Expr l
            | Block { e; safety_mode; _ } -> (
                let e = lbrace ^/^ nest 2 (print#expr ctx e) ^/^ rbrace in
                match safety_mode with
                | Safe -> e
                | Unsafe _ -> !^"unsafe " ^^ e)
            | Array l ->
                separate_map comma (print#expr_at Expr_Array) l
                |> group |> brackets
            | LocalVar i -> print#local_ident i
            | GlobalVar (`Concrete i) -> print#concrete_ident i
            | GlobalVar (`Primitive p) -> print#primitive_ident p
            | GlobalVar (`TupleCons 0) -> print#expr_construct_tuple []
            | GlobalVar
                (`TupleType _ | `TupleField _ | `Projector _ | `TupleCons _) ->
                print#assertion_failure "GlobalVar"
            | Assign { lhs; e; _ } ->
                group (print#lhs lhs)
                ^^ space ^^ equals
                ^/^ group (print#expr_at Expr_Assign e)
                ^^ semi
            | Loop _ -> string "todo loop;"
            | Break _ -> string "todo break;"
            | Return _ -> string "todo return;"
            | Continue _ -> string "todo continue;"
            | QuestionMark { e; _ } ->
                print#expr_at Expr_QuestionMark e |> terminate qmark
            | Borrow { kind; e; _ } ->
                string (match kind with Mut _ -> "&mut " | _ -> "&")
                ^^ print#expr_at Expr_Borrow e
            | AddressOf _ -> string "todo address of;"
            | Closure { params; body; _ } ->
                separate_map comma (print#pat_at Expr_Closure_param) params
                |> group |> enclose bar bar
                |> terminate (print#expr_at Expr_Closure_body body |> group)
                |> wrap_parens
            | Ascription { e; typ } ->
                print#expr_at Expr_Ascription_e e
                ^^ string "as"
                ^/^ print#ty_at Expr_Ascription_typ typ
                |> wrap_parens
            | MacroInvokation _ -> print#assertion_failure "MacroInvokation"
            | EffectAction _ -> print#assertion_failure "EffectAction"
            | Quote quote -> print#quote quote
            | App _ | Construct _ -> super#expr' ctx e

        method quote { contents; _ } =
          List.map
            ~f:(function
              | Verbatim code -> string code
              | Expr e -> print#expr_at Expr_Quote e
              | Pattern p -> print#pat_at Expr_Quote p
              | Typ p -> print#ty_at Expr_Quote p)
            contents
          |> concat

        method expr_monadic_let :
            monad:supported_monads * F.monadic_binding ->
            lhs:pat ->
            rhs:expr ->
            expr fn =
          fun ~monad:_ ~lhs ~rhs body -> print#expr_let ~lhs ~rhs body

        method expr_let : lhs:pat -> rhs:expr -> expr fn =
          fun ~lhs ~rhs body ->
            string "let"
            ^/^ iblock Fn.id (print#pat_at Expr_Let_lhs lhs)
            ^/^ equals
            ^/^ iblock Fn.id (print#expr_at Expr_Let_rhs rhs)
            ^^ semi
            ^/^ (print#expr_at Expr_Let_body body |> group)

        method tuple_projection : size:int -> nth:int -> expr fn =
          fun ~size:_ ~nth e ->
            print#expr_at Expr_TupleProjection e
            |> terminate (dot ^^ OCaml.int nth)

        method field_projection : concrete_ident -> expr fn =
          fun i e ->
            print#expr_at Expr_FieldProjection e
            |> terminate (dot ^^ print#name_of_concrete_ident i)

        method expr_app : expr -> expr list -> generic_value list fn =
          fun f args _generic_args ->
            let args =
              separate_map
                (comma ^^ break 1)
                (print#expr_at Expr_App_arg >> group)
                args
            in
            let f = print#expr_at Expr_App_f f |> group in
            f ^^ iblock parens args

        method doc_construct_tuple : document list fn =
          separate (comma ^^ break 1) >> iblock parens

        method expr_construct_tuple : expr list fn =
          List.map ~f:(print#expr_at Expr_ConstructTuple)
          >> print#doc_construct_tuple

        method pat_construct_tuple : pat list fn =
          List.map ~f:(print#pat_at Pat_ConstructTuple)
          >> print#doc_construct_tuple

        method global_ident_projector : global_ident fn =
          function
          | `Projector (`Concrete i) | `Concrete i -> print#concrete_ident i
          | _ ->
              print#assertion_failure "global_ident_projector: not a projector"

        method doc_construct_inductive :
            is_record:bool ->
            is_struct:bool ->
            constructor:concrete_ident ->
            base:document option ->
            (global_ident * document) list fn =
          fun ~is_record ~is_struct:_ ~constructor ~base:_ args ->
            if is_record then
              print#concrete_ident constructor
              ^^ space
              ^^ iblock parens
                   (separate_map (break 0)
                      (fun (field, body) ->
                        (print#global_ident_projector field
                        |> terminate comma |> group)
                        ^^ colon ^^ space ^^ iblock Fn.id body)
                      args)
            else
              print#concrete_ident constructor
              ^^ space
              ^^ iblock parens (separate_map (break 0) snd args)

        method expr_construct_inductive :
            is_record:bool ->
            is_struct:bool ->
            constructor:concrete_ident ->
            base:(expr * F.construct_base) option ->
            (global_ident * expr) list fn =
          fun ~is_record ~is_struct ~constructor ~base ->
            let base =
              Option.map
                ~f:(fst >> print#expr_at Expr_ConcreteInductive_base)
                base
            in
            List.map ~f:(print#expr_at Expr_ConcreteInductive_field |> map_snd)
            >> print#doc_construct_inductive ~is_record ~is_struct ~constructor
                 ~base

        method attr : attr fn = fun _ -> empty

        method! pat' : par_state -> pat' fn =
          fun ctx ->
            let wrap_parens =
              group
              >>
              match ctx with AlreadyPar -> Fn.id | NeedsPar -> iblock braces
            in
            function
            | PWild -> underscore
            | PAscription { typ; typ_span; pat } ->
                print#pat_ascription ~typ ~typ_span pat |> wrap_parens
            | PBinding { mut; mode; var; typ = _; subpat } -> (
                let p =
                  (match mode with ByRef _ -> string "&" | _ -> empty)
                  ^^ (match mut with Mutable _ -> string "mut " | _ -> empty)
                  ^^ print#local_ident var
                in
                match subpat with
                | Some (subpat, _) ->
                    p ^^ space ^^ at ^^ space
                    ^^ print#pat_at Pat_PBinding_subpat subpat
                    |> wrap_parens
                | None -> p)
            | PArray { args } ->
                separate_map (break 0)
                  (print#pat_at Pat_PArray >> terminate comma >> group)
                  args
                |> iblock brackets
            | PDeref { subpat; _ } ->
                ampersand ^^ print#pat_at Pat_PDeref subpat
            | (PConstruct _ | PConstant _) as pat -> super#pat' ctx pat
            | POr { subpats } ->
                separate_map (bar ^^ break 1) (print#pat_at Pat_Or) subpats

        method pat_ascription : typ:ty -> typ_span:span -> pat fn =
          fun ~typ ~typ_span pat ->
            print#pat_at Pat_Ascription_pat pat
            ^^ colon
            ^^ print#with_span ~span:typ_span (fun () ->
                   print#ty_at Pat_Ascription_typ typ)

        method expr_unwrapped : par_state -> expr fn =
          fun ctx { e; _ } -> print#expr' ctx e

        method param : param fn =
          fun { pat; typ; typ_span; attrs } ->
            let typ =
              match typ_span with
              | Some span ->
                  print#with_span ~span (fun _ -> print#ty_at Param_typ typ)
              | None -> print#ty_at Param_typ typ
            in
            print#attrs attrs ^^ print#pat_at Param_pat pat ^^ space ^^ colon
            ^^ space ^^ typ

        method item' : item' fn =
          function
          | Fn { name; generics; body; params; safety } ->
              let params =
                iblock parens
                  (separate_map (comma ^^ break 1) print#param params)
              in
              let generics = print#generic_params generics.params in
              let safety =
                optional Base.Fn.id
                  (match safety with
                  | Safe -> None
                  | Unsafe _ -> Some !^"unsafe ")
              in
              safety ^^ !^"fn" ^^ space ^^ print#concrete_ident name ^^ generics
              ^^ params
              ^^ iblock braces (print#expr_at Item_Fn_body body)
          | Quote { quote; _ } -> print#quote quote
          | _ -> string "item not implemented"

        method generic_param' : generic_param fn =
          fun { ident; attrs; kind; _ } ->
            let suffix =
              match kind with
              | GPLifetime _ -> space ^^ colon ^^ space ^^ string "'unk"
              | GPType -> empty
              | GPConst { typ } ->
                  space ^^ colon ^^ space
                  ^^ print#ty_at GenericParam_GPConst typ
            in
            let prefix =
              match kind with
              | GPConst _ -> string "const" ^^ space
              | _ -> empty
            in
            let ident =
              let name =
                if String.(ident.name = "_") then "Anonymous" else ident.name
              in
              { ident with name }
            in
            prefix ^^ print#attrs attrs ^^ print#local_ident ident ^^ suffix

        method generic_params : generic_param list fn =
          separate_map comma print#generic_param >> group >> angles

        (*Option.map ~f:(...) guard |> Option.value ~default:empty*)
        method arm' : arm' fn =
          fun { arm_pat; body; guard } ->
            let pat = print#pat_at Arm_pat arm_pat |> group in
            let body = print#expr_at Arm_body body in
            let guard =
              Option.map
                ~f:(fun { guard = IfLet { lhs; rhs; _ }; _ } ->
                  string " if let " ^^ print#pat_at Arm_pat lhs ^^ string " = "
                  ^^ print#expr_at Arm_body rhs)
                guard
              |> Option.value ~default:empty
            in
            pat ^^ guard ^^ string " => " ^^ body ^^ comma
      end
  end

  include Class

  include Api (struct
    type aux_info = unit

    let new_print () = (new Class.print :> print_object)
  end)
end


================================================
FILE: engine/lib/deprecated_generic_printer/deprecated_generic_printer.mli
================================================
module Make (F : Features.T) (View : Concrete_ident.RENDER_API) : sig
  open Deprecated_generic_printer_base.Make(F)
  include API

  class print : print_class
end


================================================
FILE: engine/lib/deprecated_generic_printer/deprecated_generic_printer_base.ml
================================================
open! Prelude
open! Ast
open PPrint

(** Generic printer for the {!module:Ast} ASTs. It uses the [PPrint] library,
    and additionally computes {!Annotation.t}. *)

(** Identifies a position in the AST. This is useful for figuring out wether we
    should wrap a chunk of AST in parenthesis, or not, or for implementing
    special treatment of some sub-trees if they occur in a certain context. *)
type ast_position =
  | GenericValue_GType
  | GenericValue_GConst
  | Lhs_LhsArbitraryExpr
  | Lhs_LhsArrayAccessor
  | Ty_TArrow
  | Ty_TRef
  | Ty_Tuple
  | Ty_TSlice
  | Ty_TArray_typ
  | Ty_TArray_length
  | Expr_If_cond
  | Expr_If_then
  | Expr_If_else
  | Expr_Array
  | Expr_Assign
  | Expr_Closure_param
  | Expr_Closure_body
  | Expr_Ascription_e
  | Expr_Ascription_typ
  | Expr_Let_lhs
  | Expr_Let_rhs
  | Expr_Let_body
  | Expr_Quote
  | Expr_Match_scrutinee
  | Expr_QuestionMark
  | Expr_Borrow
  | Expr_TupleProjection
  | Expr_ConstructTuple
  | Expr_FieldProjection
  | Expr_App_f
  | Expr_App_arg
  | Expr_ConcreteInductive_base
  | Expr_ConcreteInductive_field
  | Pat_PBinding_subpat
  | Pat_PDeref
  | Pat_PArray
  | Pat_ConstructTuple
  | Pat_ConcreteInductive
  | Pat_Ascription_pat
  | Pat_Ascription_typ
  | Pat_Or
  | Param_pat
  | Param_typ
  | GenericParam_GPType
  | GenericParam_GPConst
  | Arm_pat
  | Arm_body
  | Item_Fn_body
[@@warning "-37"]

module Annotation = struct
  type loc = { line : int; col : int } [@@deriving show, yojson, eq]
  type t = loc * span [@@deriving show, yojson, eq]
end

type annot_str = string * Annotation.t list [@@deriving show, yojson, eq]

(** When printing a chunk of AST, should we wrap parenthesis ({!NeedsPar}) or
    not ({!AlreadyPar})? *)
type par_state = NeedsPar | AlreadyPar

(** The context of a literal in the AST, does it appear in a pattern ({!Pat}) or
    in an expression ({!Expr})?*)
type literal_ctx = Pat | Expr

module Make (F : Features.T) = struct
  module AST = Ast.Make (F)
  module U = Ast_utils.Make (F)
  open Ast.Make (F)

  type 't fn = 't -> document

  (** Raw generic printers base class. Those are useful for building a printer,
      not for consuming printers. Consumers should use the {!module:Api}
      functor. *)
  class virtual print_base =
    object (print)
      val mutable current_span = Span.default
      val mutable span_data : Annotation.t list = []
      val mutable current_namespace : string list option = None
      method get_span_data () = span_data

      method with_span ~span f =
        let prev_span = current_span in
        current_span <- span;
        let doc = f () |> print#spanned_doc |> custom in
        current_span <- prev_span;
        doc

      method spanned_doc (doc : document) : custom =
        let span = current_span in
        object
          method requirement : requirement = requirement doc

          method pretty : output -> state -> int -> bool -> unit =
            fun o s i b ->
              span_data <-
                ({ line = s.line; col = s.column }, span) :: span_data;
              pretty o s i b doc

          method compact : output -> unit = fun o -> compact o doc
        end

      method concrete_ident : concrete_ident fn =
        fun id ->
          let current_ns = print#get_current_namespace () in
          let id_ns = print#namespace_of_concrete_ident id in
          print#concrete_ident'
            ~under_current_ns:
              ([%equal: string list option] current_ns (Some id_ns))
            id
      (** Print a concrete identifier.

          Differentiates between encounters of the identifier in its own
          namespace or a foreign namespace.*)

      method assertion_failure : 'any. string -> 'any =
        fun details ->
          let span = Span.to_thir current_span in
          let kind = Types.AssertionFailure { details } in
          let ctx = Diagnostics.Context.GenericPrinter print#printer_name in
          Diagnostics.SpanFreeError.raise ~span
            (Span.owner_hint current_span)
            ctx kind

      method set_current_namespace ns = current_namespace <- ns
      method get_current_namespace () = current_namespace

      (* `*_at` variants *)
      method expr_at : ast_position -> expr fn = print#par_state >> print#expr
      method ty_at : ast_position -> ty fn = print#par_state >> print#ty
      method pat_at : ast_position -> pat fn = print#par_state >> print#pat

      method pat : par_state -> pat fn =
        fun ctx { p; span; _ } ->
          print#with_span ~span (fun _ -> print#pat' ctx p)

      method item_unwrapped : item fn = fun { v; _ } -> print#item' v

      method generic_param : generic_param fn =
        fun ({ span; _ } as p) ->
          print#with_span ~span (fun _ -> print#generic_param' p)

      method arm : arm fn =
        fun { arm; span } -> print#with_span ~span (fun _ -> print#arm' arm)

      method ty : par_state -> ty fn =
        fun _ctx ty ->
          match ty with
          | TApp { ident = `Concrete ident; args } ->
              print#ty_app ident args |> group
          | TApp
              {
                ident =
                  `Primitive _ | `TupleCons _ | `TupleField _ | `Projector _;
                _;
              } ->
              print#assertion_failure "TApp not concrete"
          | TApp { ident = `TupleType n; args } ->
              let args =
                List.filter_map
                  ~f:(function GType t -> Some t | _ -> None)
                  args
              in
              if [%equal: int] (List.length args) n |> not then
                print#assertion_failure "malformed ty app tuple";
              print#ty_tuple n args
          | TApp _ -> .
          | _ ->
              print#assertion_failure "default ty is only implemented for TApp"

      method expr' : par_state -> expr' fn =
        fun _ctx e ->
          match e with
          | App { f = { e = GlobalVar i; _ } as f; args; generic_args; _ } -> (
              let expect_one_arg where =
                match args with
                | [ arg ] -> arg
                | _ -> print#assertion_failure @@ "Expected one arg at " ^ where
              in
              match i with
              | `Concrete _ | `Primitive _ -> print#expr_app f args generic_args
              | `TupleType _ | `TupleCons _ | `TupleField _ ->
                  print#assertion_failure "App: unexpected tuple"
              | `Projector (`TupleField (nth, size)) ->
                  let arg = expect_one_arg "projector tuple field" in
                  print#tuple_projection ~size ~nth arg
              | `Projector (`Concrete i) ->
                  let arg = expect_one_arg "projector concrete" in
                  print#field_projection i arg)
          | App { f; args; generic_args; _ } ->
              print#expr_app f args generic_args
          | Construct { constructor; fields; base; is_record; is_struct } -> (
              match constructor with
              | `Concrete constructor ->
                  print#expr_construct_inductive ~is_record ~is_struct
                    ~constructor ~base fields
              | `TupleCons _ ->
                  List.map ~f:snd fields |> print#expr_construct_tuple
              | `Primitive _ | `TupleType _ | `TupleField _ | `Projector _ ->
                  print#assertion_failure "Construct unexpected constructors")
          | App _ | Construct _ -> .
          | _ ->
              print#assertion_failure
                "default expr' is only implemented for App and Construct"

      method pat' : par_state -> pat' fn =
        fun _ -> function
          | PConstant { lit } -> print#literal Pat lit
          | PConstruct { constructor; is_record; is_struct; fields } -> (
              match constructor with
              | `Concrete constructor ->
                  print#doc_construct_inductive ~is_record ~is_struct
                    ~constructor ~base:None
                    (List.map
                       ~f:(fun fp ->
                         (fp.field, print#pat_at Pat_ConcreteInductive fp.pat))
                       fields)
              | `TupleCons _ ->
                  List.map ~f:(fun fp -> fp.pat) fields
                  |> print#pat_construct_tuple
              | `Primitive _ | `TupleType _ | `TupleField _ | `Projector _ ->
                  print#assertion_failure "todo err")
          | _ ->
              print#assertion_failure
                "default pat' is only implemented for PConstant and PConstruct"

      method expr : par_state -> expr fn =
        fun ctx e ->
          let span = e.span in
          print#with_span ~span (fun _ ->
              try print#expr_unwrapped ctx e
              with Diagnostics.SpanFreeError.Exn (Data (context, kind)) ->
                U.hax_failure_expr span e.typ (context, kind)
                  (U.LiftToFullAst.expr e)
                (* TODO: if the printer is extremely broken, this results in a stack overflow *)
                |> print#expr ctx)

      method item : item fn =
        fun i ->
          print#set_current_namespace
            (print#namespace_of_concrete_ident i.ident |> Option.some);
          try print#item_unwrapped i
          with Diagnostics.SpanFreeError.Exn (Data (context, kind)) ->
            let error = Diagnostics.pretty_print_context_kind context kind in
            let cast_item : item -> Ast.Full.item = Stdlib.Obj.magic in
            let ast = cast_item i |> Print_rust.pitem_str in
            let msg = error ^ "\nLast available AST for this item:\n\n" ^ ast in
            (* TODO: if the printer is extremely broken, this results in a stack overflow *)
            make_hax_error_item i.span i.ident msg |> print#item

      method items : item list fn = separate_map (twice hardline) print#item
      (** Print given list of items, separating them by two newlines each.*)

      method attrs : attrs fn = separate_map hardline print#attr
      (** Print given list of attributes, separating them by one newline each.*)
    end

  type print_object =
    < printer_name : string
    ; get_span_data : unit -> Annotation.t list
    ; ty : par_state -> ty fn
    ; pat : par_state -> pat fn
    ; arm : arm fn
    ; expr : par_state -> expr fn
    ; item : item fn
    ; items : item list fn >
  (** In the end, an printer *object* should be of the type {!print_object}. *)

  class type print_class = object
    inherit print_base
    method printer_name : string
    method get_span_data : unit -> Annotation.t list

    method namespace_of_concrete_ident : concrete_ident -> string list
    (** The namespace a concrete identifier was defined in. *)

    method par_state : ast_position -> par_state
    method concrete_ident' : under_current_ns:bool -> concrete_ident fn
    method concrete_ident : concrete_ident fn
    method name_of_concrete_ident : concrete_ident fn
    method mutability : 'a. 'a mutability fn
    method primitive_ident : primitive_ident fn
    method local_ident : local_ident fn
    method literal : literal_ctx -> literal fn
    method generic_value : generic_value fn
    method lhs : lhs fn
    method ty_bool : document
    method ty_char : document
    method ty_str : document
    method ty_int : int_kind fn
    method ty_float : float_kind fn
    method generic_values : generic_value list fn
    method ty_app : concrete_ident -> generic_value list fn
    method ty_tuple : int -> ty list fn
    method ty : par_state -> ty fn
    method expr' : par_state -> expr' fn

    method expr_monadic_let :
      monad:supported_monads * F.monadic_binding ->
      lhs:pat ->
      rhs:expr ->
      expr fn

    method expr_let : lhs:pat -> rhs:expr -> expr fn
    method tuple_projection : size:int -> nth:int -> expr fn
    method field_projection : concrete_ident -> expr fn
    method expr_app : expr -> expr list -> generic_value list fn
    method doc_construct_tuple : document list fn
    method expr_construct_tuple : expr list fn
    method pat_construct_tuple : pat list fn
    method global_ident_projector : global_ident fn

    method doc_construct_inductive :
      is_record:bool ->
      is_struct:bool ->
      constructor:concrete_ident ->
      base:document option ->
      (global_ident * document) list fn

    method expr_construct_inductive :
      is_record:bool ->
      is_struct:bool ->
      constructor:concrete_ident ->
      base:(expr * F.construct_base) option ->
      (global_ident * expr) list fn

    method attr : attr fn
    method attrs : attrs fn
    method pat' : par_state -> pat' fn
    method pat_ascription : typ:ty -> typ_span:span -> pat fn
    method pat : par_state -> pat fn
    method expr_unwrapped : par_state -> expr fn
    method param : param fn
    method item' : item' fn
    method item_unwrapped : item fn
    method generic_param' : generic_param fn
    method generic_param : generic_param fn
    method generic_params : generic_param list fn
    method arm' : arm' fn
    method arm : arm fn
    method expr : par_state -> expr fn
    method item : item fn
    method quote : quote fn
    method items : item list fn
  end

  module type API = sig
    type aux_info

    val items : aux_info -> item list -> annot_str
    val item : aux_info -> item -> annot_str
    val expr : aux_info -> expr -> annot_str
    val pat : aux_info -> pat -> annot_str
    val ty : aux_info -> ty -> annot_str
  end

  module Api (NewPrint : sig
    type aux_info

    val new_print : aux_info -> print_object
  end) =
  struct
    open NewPrint

    let mk (f : print_object -> 'a -> PPrint.document) (aux : aux_info) (x : 'a)
        : annot_str =
      let printer = new_print aux in
      let doc = f printer x in
      let buf = Buffer.create 0 in
      PPrint.ToBuffer.pretty 1.0 80 buf doc;
      (Buffer.contents buf, printer#get_span_data ())

    type aux_info = NewPrint.aux_info

    let items : aux_info -> item list -> annot_str = mk (fun p -> p#items)
    let item : aux_info -> item -> annot_str = mk (fun p -> p#item)
    let expr : aux_info -> expr -> annot_str = mk (fun p -> p#expr AlreadyPar)
    let pat : aux_info -> pat -> annot_str = mk (fun p -> p#pat AlreadyPar)
    let ty : aux_info -> ty -> annot_str = mk (fun p -> p#ty AlreadyPar)
  end
end


================================================
FILE: engine/lib/diagnostics.ml
================================================
open! Prelude
module T = Types

module Backend = struct
  type t = Coq | SSProve | FStar | EasyCrypt | ProVerif
  [@@deriving show { with_path = false }, eq, yojson, compare, hash, sexp]
end

module Phase = struct
  module Rejection = struct
    type t =
      | NotInBackendLang of Backend.t
      | CoercionForUntypedPhase of string
      | ArbitraryLhs
      | Continue
      | Break
      | QuestionMark
      | RawOrMutPointer
      | EarlyExit
      | AsPattern
      | Dyn
      | TraitItemDefault
      | Unsafe
    [@@deriving show { with_path = false }, eq, yojson, compare, hash, sexp]

    let display = function
      | NotInBackendLang backend -> "not_in_" ^ [%show: Backend.t] backend
      | x -> [%show: t] x
  end

  (** All names for phases defined in `lib/phases_*` are generated automatically
  *)
  type%add_phase_names t = Identity | HoistSideEffects | Reject of Rejection.t
  [@@deriving show { with_path = false }, eq, yojson, compare, hash, sexp]

  let display = function
    | Reject rejection -> "reject_" ^ Rejection.display rejection
    | x -> [%show: t] x
end

module Context = struct
  type t =
    | Phase of Phase.t
    | Backend of Backend.t
    | ThirImport
    | Dependencies
    | DebugPrintRust
    | GenericPrinter of string
    | Other of string
  [@@deriving show, eq, yojson, compare]

  let display = function
    | Phase p -> Phase.display p
    | Backend backend -> [%show: Backend.t] backend ^ " backend"
    | ThirImport -> "AST import"
    | DebugPrintRust -> "Rust debug printer"
    | Dependencies -> "Dependenciy analysis"
    | GenericPrinter kind -> kind ^ " generic printer"
    | Other s -> "Other (" ^ s ^ ")"
end

type kind = T.kind [@@deriving show, eq]

let compare_kind (a : kind) (b : kind) =
  [%compare: string] ([%show: kind] a) ([%show: kind] b)

type thir_span = T.span [@@deriving show, eq]

let compare_thir_span (a : thir_span) (b : thir_span) =
  [%compare: string] ([%show: thir_span] a) ([%show: thir_span] b)

type thir_def_id = T.def_id [@@deriving show, eq]

let compare_thir_def_id (a : thir_def_id) (b : thir_def_id) =
  [%compare: string] ([%show: thir_def_id] a) ([%show: thir_def_id] b)

type t = {
  context : Context.t;
  kind : kind;
  span : thir_span list;
  owner_id : thir_def_id option;
}
[@@deriving show, eq, compare]

let to_thir_diagnostic (d : t) : Types.diagnostics =
  {
    kind = d.kind;
    context = Context.display d.context;
    span = d.span;
    owner_id = d.owner_id;
  }

(** Ask `cargo-hax` to pretty print a diagnostic *)
let ask_diagnostic_pretty_print diag : string =
  Hax_io.request (PrettyPrintDiagnostic diag)
    ~expected:"PrettyPrintedDiagnostic" (function
    | Types.PrettyPrintedDiagnostic s -> Some s
    | _ -> None)

let pretty_print : t -> string =
  to_thir_diagnostic >> ask_diagnostic_pretty_print

let pretty_print_context_kind : Context.t -> kind -> string =
 fun context kind ->
  let span = Span.to_thir (Span.dummy ()) in
  pretty_print { context; kind; span; owner_id = None }

module Core : sig
  val raise_fatal_error : 'never. t -> 'never
  val report : t -> unit
  val try_ : 'x. (unit -> 'x) -> t list * 'x option
  val capture : 'a. (unit -> 'a) -> 'a * t list
end = struct
  (* a mutable state for collecting errors *)
  let state = ref []
  let report e = state := !state @ [ e ]

  exception Error

  let raise_fatal_error e =
    report e;
    raise Error

  let try_ f =
    let result = try Some (f ()) with Error -> None in
    (!state, result)

  let capture (type a) (f : unit -> a) : a * t list =
    let previous_state = !state in
    state := [];
    let result =
      let x = f () in
      (x, !state)
    in
    state := previous_state;
    result
end

include Core

let failure ~context ~span kind =
  Core.raise_fatal_error
    { context; kind; span = Span.to_thir span; owner_id = Span.owner_hint span }

module SpanFreeError : sig
  type t = private Data of Context.t * kind [@@deriving show]

  exception Exn of t

  val payload : t -> Context.t * kind

  val raise :
    ?span:T.span list -> Types.def_id option -> Context.t -> kind -> 'a
end = struct
  type t = Data of Context.t * kind [@@deriving show]

  exception Exn of t

  let payload (Data (ctx, kind)) = (ctx, kind)

  let raise_without_reporting (ctx : Context.t) (kind : kind) =
    raise (Exn (Data (ctx, kind)))

  let raise ?(span = []) (owner_id : Types.def_id option) (ctx : Context.t)
      (kind : kind) =
    report { span; kind; context = ctx; owner_id };
    raise_without_reporting ctx kind
end


================================================
FILE: engine/lib/dune
================================================
(library
 (public_name hax-engine)
 (name hax_engine)
 ;  (modules
 ;   types
 ;   concrete_ident
 ;   concrete_ident_view
 ;   concrete_ident_defid
 ;   prelude
 ;   concrete_ident_view_types
 ;   concrete_ident_generated
 ;   concrete_ident_render_sig
 ;   local_ident
 ;   thir_simple_types
 ;   concrete_ident_fresh_ns
 ;   utils)
 (libraries
  yojson
  non_empty_list
  pprint
  ppx_deriving_yojson.runtime
  cmdliner
  fstar_surface_ast
  base
  core
  logs
  re
  sourcemaps
  ocamlgraph)
 (preprocessor_deps
  ; `ppx_inline` is used on the `Subtype` module, thus we need it at PPX time
  (file subtype.ml)
  (source_tree phases))
 (preprocess
  (pps
   ppx_yojson_conv
   ppx_sexp_conv
   ppx_compare
   ppx_hash
   ppx_deriving.show
   ppx_deriving.eq
   ppx_string
   ppx_inline
   ppx_phases_index
   ppx_generate_features
   ppx_functor_application
   ppx_enumerate
   ppx_deriving.map
   ppx_matches)))

(include_subdirs unqualified)

(rule
 (alias universe-hash)
 (target universe-hash)
 (deps
  (:universe_hash ../utils/universe-hash.sh)
  (universe))
 (action
  (with-stdout-to
   universe-hash
   (run bash %{universe_hash}))))

(rule
 (target ast_visitors.ml)
 (deps
  (:ast ast.ml))
 (action
  (with-stdout-to
   ast_visitors.ml
   (with-stdin-from
    %{ast}
    (run generate_from_ast visitors)))))

(rule
 (target generated_generic_printer_base.ml)
 (deps
  (:ast ast.ml))
 (action
  (with-stdout-to
   generated_generic_printer_base.ml
   (with-stdin-from
    %{ast}
    (run generate_from_ast printer)))))

(rule
 (target ast_destruct_generated.ml)
 (deps
  (:ast ast.ml))
 (action
  (with-stdout-to
   ast_destruct_generated.ml
   (with-stdin-from
    %{ast}
    (run generate_from_ast ast_destruct)))))

(rule
 (target ast_builder_generated.ml)
 (deps
  (:ast ast.ml))
 (action
  (with-stdout-to
   ast_builder_generated.ml
   (with-stdin-from
    %{ast}
    (run generate_from_ast ast_builder)))))

(rule
 (target concrete_ident_generated.ml)
 (deps
  (alias universe-hash)
  (env_var HAX_ENGINE_NAMES_EXTRACT_BINARY))
 (action
  (with-stdout-to
   concrete_ident_generated.ml
   (run %{env:HAX_ENGINE_NAMES_EXTRACT_BINARY=hax-engine-names-extract}))))

(rule
 (target types.ml)
 (deps
  (alias universe-hash)
  (env_var HAX_JSON_SCHEMA_EXPORTER_BINARY)
  (:ocaml_of_json_schema
   ../utils/ocaml_of_json_schema/ocaml_of_json_schema.js))
 (action
  (with-stdout-to
   types.ml
   (pipe-stdout
    (run %{env:HAX_JSON_SCHEMA_EXPORTER_BINARY=hax-export-json-schemas})
    (run node %{ocaml_of_json_schema} - -)))))

(env
 (_
  (flags
   (:standard -g -warn-error "+A" -w "-17-30-56-32"))))


================================================
FILE: engine/lib/export_ast.ml
================================================
open! Prelude

let deprecated_node s = failwith ("Deprecated AST node:" ^ s)

type missing_type = unit

module B = Rust_engine_types

let to_error_node (span : Ast.span) (payload : string) : Types.error_node =
  try [%of_yojson: Types.error_node] (Yojson.Safe.from_string payload)
  with _ ->
    let diagnostic : Types.diagnostic =
      let node : Types.fragment = Unknown "OCamlEngineError" in
      let info : B.diagnostic_info =
        {
          context = Import;
          kind = OcamlEngineErrorPayload payload;
          span = Span.to_rust_ast_span span;
        }
      in
      { node; info }
    in

    { fragment = Unknown "OCamlEngineError"; diagnostics = [ diagnostic ] }

module Make (FA : Features.T) = struct
  open Ast
  module A = Ast.Make (FA)
  module U = Ast_utils.Make (FA)

  let dsafety_kind (safety : A.safety_kind) : B.safety_kind =
    match safety with Safe -> B.Safe | Unsafe _ -> B.Unsafe

  let rec dty_no_error (span : Ast.span) (ty : A.ty) : B.ty =
    Newtypety
      (match ty with
      | TBool -> Primitive Bool
      | TChar -> Primitive Char
      | TInt k -> Primitive (Int (dint_kind k))
      | TFloat k -> Primitive (Float (dfloat_kind k))
      | TStr -> Primitive Str
      | TApp { ident; args } ->
          B.App
            {
              head = dglobal_ident ident;
              args = List.map ~f:(dgeneric_value span) args;
            }
      | TArray { typ; length } ->
          Array { ty = dty span typ; length = dexpr length }
      | TSlice { witness = _; ty } -> Slice (dty span ty)
      | TRef { witness = _; typ; mut; region = _ } ->
          Ref
            {
              inner = dty span typ;
              mutable' = (match mut with Mutable _ -> true | _ -> false);
              region = B.EmptyStructregion2;
            }
      | TParam local_ident -> Param (dlocal_ident local_ident)
      | TArrow (inputs, output) ->
          Arrow
            { inputs = List.map ~f:(dty span) inputs; output = dty span output }
      | TAssociatedType { impl; item } ->
          AssociatedType
            { impl_ = dimpl_expr span impl; item = dconcrete_ident item }
      | TOpaque ident -> Opaque (dconcrete_ident ident)
      | TRawPointer { witness = _ } -> RawPointer
      | TDyn { witness = _; goals } ->
          Dyn (List.map ~f:(ddyn_trait_goal span) goals))

  and dty (span : Ast.span) (ty : A.ty) : B.ty =
    match U.HaxFailure.Destruct.ty ty with
    | Some s -> Newtypety (Error (to_error_node span s))
    | None -> dty_no_error span ty

  and dint_kind (ik : int_kind) : B.int_kind =
    let size : B.int_size =
      match ik.size with
      | S8 -> S8
      | S16 -> S16
      | S32 -> S32
      | S64 -> S64
      | S128 -> S128
      | SSize -> SSize
    in
    {
      size;
      signedness =
        (match ik.signedness with Signed -> Signed | Unsigned -> Unsigned);
    }

  and dfloat_kind (fk : float_kind) : B.float_kind =
    match fk with F16 -> F16 | F32 -> F32 | F64 -> F64 | F128 -> F128

  and dglobal_ident (gi : global_ident) : B.global_id =
    let concrete c : B.global_id =
      Types.Newtypeglobal_id (B.Concrete (Concrete_ident.to_rust_ast c))
    in
    let of_name n = concrete (Concrete_ident.of_name ~value:true n) in
    match gi with
    | `Concrete c | `Projector (`Concrete c) -> concrete c
    | `TupleType length ->
        Types.Newtypeglobal_id (Tuple (Type { length = Int.to_string length }))
    | `TupleCons length ->
        Types.Newtypeglobal_id
          (Tuple (Constructor { length = Int.to_string length }))
    | `Projector (`TupleField (field, length)) | `TupleField (field, length) ->
        let field, length = (Int.to_string field, Int.to_string length) in
        Types.Newtypeglobal_id (Tuple (Field { length; field }))
    | `Primitive Deref -> of_name Rust_primitives__hax__deref_op
    | `Primitive Cast -> of_name Rust_primitives__hax__cast_op
    | `Primitive (LogicalOp And) -> of_name Rust_primitives__hax__logical_op_and
    | `Primitive (LogicalOp Or) -> of_name Rust_primitives__hax__logical_op_or

  and dlocal_ident (li : local_ident) : B.local_id =
    Newtypelocal_id (Newtypesymbol li.name)

  and dconcrete_ident (gi : concrete_ident) : B.global_id =
    dglobal_ident (`Concrete gi)

  and ddyn_trait_goal span (r : A.dyn_trait_goal) : B.dyn_trait_goal =
    {
      non_self_args = List.map ~f:(dgeneric_value span) r.non_self_args;
      trait_ = dconcrete_ident r.trait;
    }

  and dtrait_goal span (r : A.trait_goal) : B.trait_goal =
    {
      args = List.map ~f:(dgeneric_value span) r.args;
      trait_ = dconcrete_ident r.trait;
    }

  and dimpl_ident span (r : A.impl_ident) : B.impl_ident =
    { goal = dtrait_goal span r.goal; name = Newtypesymbol r.name }

  and dprojection_predicate span (r : A.projection_predicate) :
      B.projection_predicate =
    {
      assoc_item = dconcrete_ident r.assoc_item;
      impl_ = dimpl_expr span r.impl;
      ty = dty span r.typ;
    }

  and dimpl_expr span (i : A.impl_expr) : B.impl_expr =
    { goal = dtrait_goal span i.goal; kind = dimpl_expr_kind span i.kind }

  and dimpl_expr_kind span (i : A.impl_expr_kind) : B.impl_expr_kind =
    match i with
    | A.Self -> B.Self_
    | A.Concrete tr -> B.Concrete (dtrait_goal span tr)
    | A.LocalBound { id } -> B.LocalBound { id = B.Newtypesymbol id }
    | A.Parent { impl; ident } ->
        B.Parent
          { impl_ = dimpl_expr span impl; ident = dimpl_ident span ident }
    | A.Projection { impl; item; ident } ->
        B.Projection
          {
            impl_ = dimpl_expr span impl;
            item = dconcrete_ident item;
            ident = dimpl_ident span ident;
          }
    | A.ImplApp { impl; args } ->
        B.ImplApp
          {
            impl_ = dimpl_expr span impl;
            args = List.map ~f:(dimpl_expr span) args;
          }
    | A.Dyn -> B.Dyn
    | A.Builtin tr -> B.Builtin (dtrait_goal span tr)

  and dgeneric_value span (generic_value : A.generic_value) : B.generic_value =
    match generic_value with
    | GLifetime _ -> B.Lifetime
    | GType t -> B.Ty (dty span t)
    | GConst e -> B.Expr (dexpr e)

  and dborrow_kind (borrow_kind : A.borrow_kind) : B.borrow_kind =
    match borrow_kind with
    | Shared -> B.Shared
    | Unique -> B.Unique
    | Mut _witness -> B.Mut

  and dmetadata ?(attrs = []) (span : span) : B.metadata =
    { attributes = List.map ~f:dattr attrs; span = dspan span }

  and dattr (a : attr) : B.attribute =
    let kind : B.attribute_kind =
      match Attr_payloads.payload a with
      | Some (payload, _) -> B.Hax payload
      | None -> (
          match a.kind with
          | Tool { path; tokens } -> B.Tool { path; tokens }
          | DocComment { kind; body } ->
              let kind =
                match kind with DCKLine -> B.Line | DCKBlock -> Block
              in
              B.DocComment { kind; body })
    in
    { kind; span = dspan a.span }

  and dpat (p : A.pat) : B.pat =
    let kind : B.pat_kind =
      match U.HaxFailure.Destruct.pat p with
      | Some s -> Error (to_error_node p.span s)
      | _ -> dpat' p.span p.p
    in
    { kind; meta = dmetadata p.span; ty = dty p.span p.typ }

  and dpat' span (pat : A.pat') : B.pat_kind =
    match pat with
    | PWild -> Wild
    | PAscription { typ; typ_span; pat } ->
        Ascription
          { pat = dpat pat; ty = { span = dspan typ_span; ty = dty span typ } }
    | PConstruct { constructor; is_record; is_struct; fields } ->
        Construct
          {
            constructor = dglobal_ident constructor;
            is_record;
            is_struct;
            fields =
              List.map
                ~f:(fun { field; pat } -> (dglobal_ident field, dpat pat))
                fields;
          }
    | POr { subpats } -> Or { sub_pats = List.map ~f:dpat subpats }
    | PArray { args } -> Array { args = List.map ~f:dpat args }
    | PDeref { subpat; witness = _ } -> Deref { sub_pat = dpat subpat }
    | PConstant { lit } -> Constant { lit = dliteral lit }
    | PBinding { mut; mode; var; typ = _; subpat } ->
        let mutable' : bool = match mut with Mutable _ -> true | _ -> false in
        Binding
          {
            mutable';
            mode = dbinding_mode mode;
            var = dlocal_ident var;
            sub_pat = Option.map ~f:(fun (p, _) -> dpat p) subpat;
          }

  and dspan : span -> B.span = Span.to_rust_ast_span

  and dbinding_mode (binding_mode : A.binding_mode) : B.binding_mode =
    match binding_mode with
    | ByValue -> B.ByValue
    | ByRef (kind, _witness) -> B.ByRef (dborrow_kind kind)

  and dexpr (e : A.expr) : B.expr =
    let kind : B.expr_kind =
      match U.HaxFailure.Destruct.expr e with
      | Some (s, _) -> Error (to_error_node e.span s)
      | None -> dexpr' e.span e.e
    in
    { kind; ty = dty e.span e.typ; meta = dmetadata e.span }

  and dexpr' span (expr : A.expr') : B.expr_kind =
    match expr with
    | If { cond; then_; else_ } ->
        If
          {
            condition = dexpr cond;
            then' = dexpr then_;
            else_ = Option.map ~f:dexpr else_;
          }
    | App { f; args; generic_args; bounds_impls; trait } ->
        App
          {
            head = dexpr f;
            args = List.map ~f:dexpr args;
            generic_args = List.map ~f:(dgeneric_value span) generic_args;
            bounds_impls = List.map ~f:(dimpl_expr span) bounds_impls;
            trait_ =
              Option.map
                ~f:(fun (impl, args) ->
                  (dimpl_expr span impl, List.map ~f:(dgeneric_value span) args))
                trait;
          }
    | Literal lit -> Literal (dliteral lit)
    | Array exprs -> Array (List.map ~f:dexpr exprs)
    | Construct { constructor; is_record; is_struct; fields; base } ->
        Construct
          {
            constructor = dglobal_ident constructor;
            fields =
              List.map ~f:(fun (id, e) -> (dglobal_ident id, dexpr e)) fields;
            base = Option.map ~f:(fun (e, _) -> dexpr e) base;
            is_record;
            is_struct;
          }
    | Match { scrutinee; arms } ->
        Match { scrutinee = dexpr scrutinee; arms = List.map ~f:darm arms }
    | Let { monadic = _; lhs; rhs; body } ->
        Let { lhs = dpat lhs; rhs = dexpr rhs; body = dexpr body }
    | Block { e; safety_mode; witness = _ } ->
        Block { body = dexpr e; safety_mode = dsafety_kind safety_mode }
    | LocalVar id -> LocalId (dlocal_ident id)
    | GlobalVar id -> GlobalId (dglobal_ident id)
    | Ascription { e; typ } -> Ascription { e = dexpr e; ty = dty span typ }
    | MacroInvokation _ -> deprecated_node "MacroInvokation"
    | Assign { lhs; e; witness = _ } ->
        Assign { lhs = dlhs span lhs; value = dexpr e }
    | Loop { body; kind; state; control_flow; label; witness = _ } ->
        Loop
          {
            body = dexpr body;
            kind = dloop_kind span kind;
            state = Option.map ~f:dloop_state state;
            control_flow =
              Option.map ~f:(fun (k, _) -> dcontrol_flow_kind k) control_flow;
            label = Option.map ~f:(fun s -> B.Newtypesymbol s) label;
          }
    | Break { e; acc; label; witness = _ } ->
        Break
          {
            value = dexpr e;
            label = Option.map ~f:(fun s -> B.Newtypesymbol s) label;
            state = Option.map ~f:(fst >> dexpr) acc;
          }
    | Return { e; witness = _ } -> Return { value = dexpr e }
    | QuestionMark _ -> deprecated_node "QuestionMark"
    | Continue { acc; label; witness = _ } ->
        Continue
          {
            label = Option.map ~f:(fun s -> B.Newtypesymbol s) label;
            state = Option.map ~f:(fst >> dexpr) acc;
          }
    | Borrow { kind; e; witness = _ } ->
        Borrow
          {
            inner = dexpr e;
            mutable' = (match kind with Mut _ -> true | _ -> false);
          }
    | AddressOf { mut; e; witness = _ } ->
        AddressOf
          {
            inner = dexpr e;
            mutable' = (match mut with Mutable _ -> true | _ -> false);
          }
    | Closure { params; body; captures } ->
        Closure
          {
            params = List.map ~f:dpat params;
            body = dexpr body;
            captures = List.map ~f:dexpr captures;
          }
    | EffectAction _ -> deprecated_node "EffectAction"
    | Quote q -> Quote { contents = dquote span q }

  and dcontrol_flow_kind (cfk : A.cf_kind) : B.control_flow_kind =
    match cfk with BreakOnly -> B.BreakOnly | BreakOrReturn -> B.BreakOrReturn

  and dliteral (l : Ast.literal) : B.literal =
    match l with
    | String s -> B.String (Newtypesymbol s)
    | Char c -> B.Char c
    | Int { value; negative; kind } ->
        B.Int { value = Newtypesymbol value; negative; kind = dint_kind kind }
    | Float { value; negative; kind } ->
        B.Float
          { value = Newtypesymbol value; negative; kind = dfloat_kind kind }
    | Bool b -> B.Bool b

  and dquote span ({ contents; _ } : A.quote) : B.quote =
    let f = function
      | A.Verbatim code -> B.Verbatim code
      | A.Expr e -> B.Expr (dexpr e)
      | A.Pattern p -> B.Pattern (dpat p)
      | A.Typ t -> B.Ty (dty span t)
    in
    Newtypequote (List.map ~f contents)

  and ditem_quote_origin (iqo : item_quote_origin) : B.item_quote_origin =
    {
      item_ident = dconcrete_ident iqo.item_ident;
      item_kind =
        (match iqo.item_kind with
        | `Fn -> B.Fn
        | `TyAlias -> B.TyAlias
        | `Type -> B.Type
        | `IMacroInvokation -> B.MacroInvocation
        | `Trait -> B.Trait
        | `Impl -> B.Impl
        | `Alias -> B.Alias
        | `Use -> B.Use
        | `Quote -> B.Quote
        | `HaxError -> B.HaxError
        | `NotImplementedYet -> B.NotImplementedYet);
      position =
        (match iqo.position with
        | `Before -> B.Before
        | `After -> B.After
        | `Replace -> B.Replace);
    }

  and dloop_kind span (k : A.loop_kind) : B.loop_kind =
    match k with
    | A.UnconditionalLoop -> B.UnconditionalLoop
    | A.WhileLoop { condition; witness = _ } ->
        B.WhileLoop { condition = dexpr condition }
    | A.ForLoop { it; pat; witness = _ } ->
        B.ForLoop { iterator = dexpr it; pat = dpat pat }
    | A.ForIndexLoop { start; end_; var; var_typ; witness = _ } ->
        B.ForIndexLoop
          {
            start = dexpr start;
            end' = dexpr end_;
            var = dlocal_ident var;
            var_ty = dty span var_typ;
          }

  and dloop_state (s : A.loop_state) : B.loop_state =
    { body_pat = dpat s.bpat; init = dexpr s.init }

  and darm (a : A.arm) : B.arm =
    {
      body = dexpr a.arm.body;
      guard = Option.map ~f:dguard a.arm.guard;
      meta = dmetadata a.span;
      pat = dpat a.arm.arm_pat;
    }

  and dguard (a : A.guard) : B.guard =
    { kind = dguard' a.guard; meta = dmetadata a.span }

  and dguard' (guard : A.guard') : B.guard_kind =
    match guard with
    | IfLet { lhs; rhs; witness = _ } ->
        B.IfLet { lhs = dpat lhs; rhs = dexpr rhs }

  and dlhs span (lhs : A.lhs) : B.lhs =
    match lhs with
    | A.LhsLocalVar { var; typ } ->
        B.LocalVar { var = dlocal_ident var; ty = dty span typ }
    | A.LhsVecRef { e; typ; _ } ->
        B.VecRef { e = dlhs span e; ty = dty span typ }
    | A.LhsArbitraryExpr { e; witness = _ } -> B.ArbitraryExpr (dexpr e)
    | A.LhsFieldAccessor { e; field; typ; witness = _ } ->
        B.FieldAccessor
          { e = dlhs span e; field = dglobal_ident field; ty = dty span typ }
    | A.LhsArrayAccessor { e; index; typ; witness = _ } ->
        B.ArrayAccessor
          { e = dlhs span e; index = dexpr index; ty = dty span typ }

  let dgeneric_param ({ ident; span; attrs; kind } : A.generic_param) :
      B.generic_param =
    let kind : B.generic_param_kind =
      match kind with
      | GPLifetime { witness = _ } -> Lifetime
      | GPType -> Type
      | GPConst { typ } -> Const { ty = dty span typ }
    in
    { ident = dlocal_ident ident; meta = dmetadata ~attrs span; kind }

  let dgeneric_constraint span (generic_constraint : A.generic_constraint) :
      B.generic_constraint =
    match generic_constraint with
    | GCLifetime (lf, _witness) -> Lifetime lf
    | GCType impl_ident -> TypeClass (dimpl_ident span impl_ident)
    | GCProjection projection ->
        Equality (dprojection_predicate span projection)

  let dgenerics span (g : A.generics) : B.generics =
    {
      constraints = List.map ~f:(dgeneric_constraint span) g.constraints;
      params = List.map ~f:dgeneric_param g.params;
    }

  let dparam span (p : A.param) : B.param =
    {
      attributes = List.map ~f:dattr p.attrs;
      pat = dpat p.pat;
      ty = dty span p.typ;
      ty_span = Option.map ~f:dspan p.typ_span;
    }

  let dvariant span (v : A.variant) : B.variant =
    let dattrs = List.map ~f:dattr in
    {
      arguments =
        List.map
          ~f:(fun (id, t, a) -> (dconcrete_ident id, dty span t, dattrs a))
          v.arguments;
      attributes = dattrs v.attrs;
      is_record = v.is_record;
      name = dconcrete_ident v.name;
    }

  let dtrait_item' span (ti : A.trait_item') : B.trait_item_kind =
    match ti with
    | TIType idents -> Type (List.map ~f:(dimpl_ident span) idents)
    | TIFn t -> Fn (dty span t)
    | TIDefault { params; body; witness = _ } ->
        Default { params = List.map ~f:(dparam span) params; body = dexpr body }

  let dtrait_item (ti : A.trait_item) : B.trait_item =
    {
      generics = dgenerics ti.ti_span ti.ti_generics;
      ident = dconcrete_ident ti.ti_ident;
      kind = dtrait_item' ti.ti_span ti.ti_v;
      meta = dmetadata ~attrs:ti.ti_attrs ti.ti_span;
    }

  let dimpl_item' span (ii : A.impl_item') : B.impl_item_kind =
    match ii with
    | IIType { typ; parent_bounds } ->
        Type
          {
            ty = dty span typ;
            parent_bounds =
              List.map ~f:(dimpl_expr span *** dimpl_ident span) parent_bounds;
          }
    | IIFn { body; params } ->
        Fn { body = dexpr body; params = List.map ~f:(dparam span) params }

  let dimpl_item (ii : A.impl_item) : B.impl_item =
    {
      generics = dgenerics ii.ii_span ii.ii_generics;
      ident = dconcrete_ident ii.ii_ident;
      kind = dimpl_item' ii.ii_span ii.ii_v;
      meta = dmetadata ~attrs:ii.ii_attrs ii.ii_span;
    }

  let ditem' (span : Ast.span) (item : A.item') : B.item_kind =
    match item with
    | A.Fn { name; generics; body; params; safety } ->
        B.Fn
          {
            name = dconcrete_ident name;
            generics = dgenerics span generics;
            body = dexpr body;
            params = List.map ~f:(dparam span) params;
            safety = dsafety_kind safety;
          }
    | A.Type { name; generics; variants; is_struct } ->
        B.Type
          {
            name = dconcrete_ident name;
            generics = dgenerics span generics;
            variants = List.map ~f:(dvariant span) variants;
            is_struct;
          }
    | A.TyAlias { name; generics; ty } ->
        B.TyAlias
          {
            name = dconcrete_ident name;
            generics = dgenerics span generics;
            ty = dty span ty;
          }
    | A.IMacroInvokation _ -> deprecated_node "IMacroInvokation"
    | A.Trait { name; generics; items; safety } ->
        B.Trait
          {
            name = dconcrete_ident name;
            generics = dgenerics span generics;
            items = List.map ~f:dtrait_item items;
            safety = dsafety_kind safety;
          }
    | A.Impl
        {
          generics;
          self_ty;
          of_trait = trait_id, trait_generics;
          items;
          parent_bounds;
          _;
        } ->
        B.Impl
          {
            generics = dgenerics span generics;
            self_ty = dty span self_ty;
            of_trait =
              ( dconcrete_ident trait_id,
                List.map ~f:(dgeneric_value span) trait_generics );
            items = List.map ~f:dimpl_item items;
            parent_bounds =
              List.map
                ~f:(fun (impl, ident) ->
                  (dimpl_expr span impl, dimpl_ident span ident))
                parent_bounds;
          }
    | A.Alias { name; item } ->
        B.Alias { name = dconcrete_ident name; item = dconcrete_ident item }
    | A.Use { path; is_external; rename } -> B.Use { path; is_external; rename }
    | A.Quote { quote; origin } ->
        B.Quote
          { quote = dquote span quote; origin = ditem_quote_origin origin }
    | A.NotImplementedYet -> B.NotImplementedYet
    | A.HaxError s -> Error (to_error_node span s)

  let ditem (i : A.item) : B.item list =
    [
      {
        ident = dconcrete_ident i.ident;
        kind = ditem' i.span i.v;
        meta = dmetadata ~attrs:i.attrs i.span;
      };
    ]
end


================================================
FILE: engine/lib/feature_gate.ml
================================================
open! Prelude

module DefaultSubtype = struct
  type error = Err of Span.t [@@deriving show, yojson, eq]

  exception E of error

  let reject (type a b) : Span.t -> a -> b = fun span _ -> raise @@ E (Err span)

  include Features.SUBTYPE.Id

  let explain : error -> Features.Enumeration.t -> string =
   fun _ feat ->
    "a node of kind ["
    ^ [%show: Features.Enumeration.t] feat
    ^ "] have been found in the AST"
end

module Make
    (FA : Features.T)
    (FB : Features.T)
    (S0 : sig
            include Features.SUBTYPE.T

            type error [@@deriving show, yojson, eq]

            exception E of error

            val explain : error -> Features.Enumeration.t -> string
            val metadata : Phase_utils.Metadata.t
          end
          with module A = FA
           and module B = FB) =
struct
  let metadata = S0.metadata

  module S =
    Features.SUBTYPE.Map
      (S0)
      (struct
        let map (type a b) (f : Span.t -> a -> b)
            (feature_kind : Features.Enumeration.t) (span : Span.t) (x : a) : b
            =
          try f span x
          with S0.E err ->
            let thir_span = Span.to_thir span in
            let kind : Diagnostics.kind =
              ExplicitRejection
                { reason = S0.explain err feature_kind; issue_id = None }
            in
            let context : Diagnostics.Context.t =
              Phase S0.metadata.current_phase
            in
            Diagnostics.SpanFreeError.raise ~span:thir_span
              (Span.owner_hint span) context kind
      end)

  include Subtype.Make (FA) (FB) (S)
  module FA = FA
end


================================================
FILE: engine/lib/features.ml
================================================
[%%declare_features
loop,
for_loop,
for_index_loop,
while_loop,
state_passing_loop,
fold_like_loop,
continue,
break,
mutable_variable,
mutable_reference,
mutable_pointer,
reference,
slice,
raw_pointer,
early_exit,
question_mark,
macro,
as_pattern,
nontrivial_lhs,
arbitrary_lhs,
lifetime,
construct_base,
monadic_action,
monadic_binding,
quote,
block,
dyn,
match_guard,
trait_item_default,
unsafe]

module Full = On

module Rust = struct
  include On
  include Off.While_loop
  include Off.For_loop
  include Off.For_index_loop
  include Off.Question_mark
  include Off.Monadic_action
  include Off.Monadic_binding
  include Off.State_passing_loop
  include Off.Fold_like_loop
  include Off.Quote
end

module _ = struct
  module _ : T = Full
  module _ : T = Rust
end


================================================
FILE: engine/lib/generic_printer/generic_printer.ml
================================================
open! Prelude
open! Ast
open! PPrint
module LazyDoc = Generated_generic_printer_base.LazyDoc
open LazyDoc

module Annotation = struct
  type loc = { line : int; col : int } [@@deriving show, yojson, eq]
  type t = loc * span [@@deriving show, yojson, eq]

  let compare ((a, _) : t) ((b, _) : t) : int =
    let line = Int.compare a.line b.line in
    if Int.equal line 0 then Int.compare a.col b.col else line

  (** Converts a list of annotation and a string to a list of annotated string
  *)
  let split_with_string (s : string) (annots : t list) =
    let lines_position =
      String.to_list s
      |> List.filter_mapi ~f:(fun i ch ->
             match ch with '\n' -> Some i | _ -> None)
      |> List.to_array |> Array.get
    in
    let annots = List.sort ~compare annots in
    let init = ({ line = 0; col = 0 }, None) in
    let slices =
      List.folding_map
        ~f:(fun (start, start_span) (end_, end_span) ->
          let span = Option.value ~default:end_span start_span in
          ((end_, Some end_span), (span, start, end_)))
        ~init annots
    in
    List.map slices ~f:(fun (span, start, end_) ->
        let pos = lines_position start.line + start.col in
        let len = lines_position end_.line + end_.col - pos in
        (span, String.sub s ~pos ~len))

  let to_mapping ((loc, span) : t) : Sourcemaps.Source_maps.mapping option =
    let real_path (x : Types.file_name) =
      match x with
      | Real (LocalPath p) | Real (Remapped { local_path = Some p; _ }) ->
          Some p
      | _ -> None
    in
    let loc_to_loc ({ line; col } : loc) : Sourcemaps.Location.t =
      { line; col }
    in
    let to_loc ({ col; line } : Types.loc) : loc =
      { col = Int.of_string col; line = Int.of_string line - 1 }
    in
    let* span =
      Span.to_thir span
      |> List.find ~f:(fun (s : Types.span) ->
             real_path s.filename |> Option.is_some)
    in
    let* src_filename = real_path span.filename in
    let src_start = to_loc span.lo |> loc_to_loc in
    let src_end = to_loc span.hi |> loc_to_loc in
    let dst_start = loc_to_loc loc in
    Some
      Sourcemaps.Source_maps.
        {
          src = { start = src_start; end_ = Some src_end };
          gen = { start = dst_start; end_ = None };
          source = src_filename;
          name = None;
        }
end

module AnnotatedString = struct
  type t = string * Annotation.t list [@@deriving show, yojson, eq]

  let to_string = fst

  let to_spanned_strings ((s, annots) : t) : (Ast.span * string) list =
    Annotation.split_with_string s annots

  (** Lifts a string to an annotated list *)
  let pure (s : string) : t = (s, [])

  (** Concatenate two annotated strings *)
  let concat (x : t) (y : t) : t =
    let (xs, xl), (ys, yl) = (x, y) in
    let last_x =
      let lines = String.split ~on:'\n' xs in
      let last_line = List.last lines |> Option.value ~default:"" in
      let col, line = (String.length last_line, List.length lines) in
      Annotation.{ col; line }
    in
    let yl =
      let f ({ line; col } : Annotation.loc) : Annotation.loc =
        {
          line = line + last_x.line;
          col = (match col with 0 -> col + last_x.col | _ -> col);
        }
      in
      List.map ~f:(f *** Fn.id) yl
    in
    (xs ^ ys, xl @ yl)

  let to_sourcemap : t -> Types.source_map =
    snd >> List.filter_map ~f:Annotation.to_mapping >> Sourcemaps.Source_maps.mk
    >>
    fun ({ mappings; sourceRoot; sources; sourcesContent; names; version; file } :
          Sourcemaps.Source_maps.t)
    ->
    Types.
      { mappings; sourceRoot; sources; sourcesContent; names; version; file }
end

(** Helper class that brings imperative span *)
class span_helper : object
  method span_data : Annotation.t list
  (** Get the span annotation accumulated while printing *)

  method with_span : span:span -> (unit -> document) -> document
  (** Runs the printer `f` under a node of span `span` *)

  method current_span : span
  (** Get the current span *)
end =
  object (self)
    val mutable current_span = Span.default
    val mutable span_data : Annotation.t list = []
    method span_data = span_data
    method current_span = current_span

    method with_span ~(span : span) (f : unit -> document) : document =
      let prev_span = current_span in
      current_span <- span;
      let doc = f () |> self#spanned_doc |> custom in
      current_span <- prev_span;
      doc

    method private spanned_doc (doc : document) : custom =
      let span = current_span in
      object
        method requirement : requirement = requirement doc

        method pretty : output -> state -> int -> bool -> unit =
          fun o s i b ->
            span_data <- ({ line = s.line; col = s.column }, span) :: span_data;
            pretty o s i b doc

        method compact : output -> unit = fun o -> compact o doc
      end
  end

module Make (F : Features.T) = struct
  module AST = Ast.Make (F)
  open Ast.Make (F)
  module Gen = Generated_generic_printer_base.Make (F)

  type printer = (Annotation.t list, PPrint.document) Gen.object_type
  type finalized_printer = (unit, string * Annotation.t list) Gen.object_type

  let finalize (new_printer : unit -> printer) : finalized_printer =
    Gen.map (fun apply ->
        let printer = new_printer () in
        let doc = apply printer in
        let buf = Buffer.create 0 in
        PPrint.ToBuffer.pretty 1.0 80 buf doc;
        (Buffer.contents buf, printer#span_data))

  class virtual base =
    object (self)
      inherit Gen.base as super
      inherit span_helper
      val mutable current_namespace : string list option = None

      method private catch_exn (handle : string -> document)
          (f : unit -> document) : document =
        self#catch_exn'
          (fun context kind ->
            Diagnostics.pretty_print_context_kind context kind |> handle)
          f

      method private catch_exn'
          (handle : Diagnostics.Context.t -> Diagnostics.kind -> document)
          (f : unit -> document) : document =
        try f ()
        with Diagnostics.SpanFreeError.Exn (Data (context, kind)) ->
          handle context kind

      (** {2:specialize-expr Printer settings} *)

      method virtual printer_name : string
      (** Mark a path as unreachable *)

      val concrete_ident_view : (module Concrete_ident.RENDER_API) =
        (module Concrete_ident.DefaultViewAPI)
      (** The concrete ident view to be used *)

      (** {2:specialize-expr Utility functions} *)

      method assertion_failure : 'any. string -> 'any =
        fun details ->
          let span = Span.to_thir self#current_span in
          let kind = Types.AssertionFailure { details } in
          let ctx = Diagnostics.Context.GenericPrinter self#printer_name in
          Diagnostics.SpanFreeError.raise ~span
            (Span.owner_hint self#current_span)
            ctx kind
      (** An assertion failed *)

      method unreachable : 'any. unit -> 'any =
        self#assertion_failure "Unreachable"
      (** Mark a path as unreachable *)

      method local_ident (id : local_ident) : document =
        let module View = (val concrete_ident_view) in
        View.local_ident
          (match String.chop_prefix ~prefix:"impl " id.name with
          | Some _ ->
              let name = "impl_" ^ Int.to_string ([%hash: string] id.name) in
              { id with name }
          | _ -> id)
        |> string
      (** {2:specialize-expr Printers for special types} *)

      method concrete_ident ~local (id : Concrete_ident_render_sig.rendered) :
          document =
        string
          (if local then id.name
           else
             String.concat ~sep:self#module_path_separator
               (id.path @ [ id.name ]))
      (** [concrete_ident ~local id] prints a name without path if [local] is
          true, otherwise it prints the full path, separated by
          `module_path_separator`. *)

      method quote ~contents ~witness:_ : document =
        List.map ~f:(fun doc -> doc#p) contents |> concat

      method quote_content_Verbatim v = string v
      method quote_content_Expr e = e#p
      method quote_content_Pattern p = p#p
      method quote_content_Typ t = t#p

      (** {2:specialize-expr Specialized printers for [expr]} *)

      method virtual expr'_App_constant
          : super:expr ->
            constant:concrete_ident lazy_doc ->
            generics:generic_value lazy_doc list ->
            document
      (** [expr'_App_constant ~super ~constant ~generics] prints the constant
          [e] with generics [generics]. [super] is the unspecialized [expr]. *)

      method virtual expr'_App_application
          : super:expr ->
            f:expr lazy_doc ->
            args:expr lazy_doc list ->
            generics:generic_value lazy_doc list ->
            document
      (** [expr'_App_application ~super ~f ~args ~generics] prints the function
          application [e<...generics>(...args)]. [super] is the unspecialized
          [expr]. *)

      method virtual expr'_App_tuple_projection
          : super:expr -> size:int -> nth:int -> e:expr lazy_doc -> document
      (** [expr'_App_tuple_projection ~super ~size ~nth ~e] prints the
          projection of the [nth] component of the tuple [e] of size [size].
          [super] is the unspecialized [expr]. *)

      method virtual expr'_App_field_projection
          : super:expr ->
            field:concrete_ident lazy_doc ->
            e:expr lazy_doc ->
            document
      (** [expr'_App_field_projection ~super ~field ~e] prints the projection of
          the field [field] in the expression [e]. [super] is the unspecialized
          [expr]. *)

      method virtual expr'_Construct_inductive
          : super:expr ->
            constructor:concrete_ident lazy_doc ->
            is_record:bool ->
            is_struct:bool ->
            fields:(global_ident lazy_doc * expr lazy_doc) list ->
            base:(expr lazy_doc * F.construct_base) lazy_doc option ->
            document
      (** [expr'_Construct_inductive ~super ~is_record ~is_struct ~constructor
           ~base ~fields] prints the construction of an inductive with base
          [base] and fields [fields]. [super] is the unspecialized [expr]. TODO
          doc is_record is_struct *)

      method virtual expr'_Construct_tuple
          : super:expr -> components:expr lazy_doc list -> document

      method virtual expr'_GlobalVar_concrete
          : super:expr -> concrete_ident lazy_doc -> document

      method virtual expr'_GlobalVar_primitive
          : super:expr -> primitive_ident -> document

      (** {2:specialize-pat Specialized printers for [pat]} *)

      method virtual pat'_PConstruct_inductive
          : super:pat ->
            constructor:concrete_ident lazy_doc ->
            is_record:bool ->
            is_struct:bool ->
            fields:(global_ident lazy_doc * pat lazy_doc) list ->
            document

      method virtual pat'_PConstruct_tuple
          : super:pat -> components:pat lazy_doc list -> document

      (** {2:specialize-lhs Specialized printers for [lhs]} *)

      method virtual lhs_LhsFieldAccessor_field
          : e:lhs lazy_doc ->
            typ:ty lazy_doc ->
            field:concrete_ident lazy_doc ->
            witness:F.nontrivial_lhs ->
            document

      method virtual lhs_LhsFieldAccessor_tuple
          : e:lhs lazy_doc ->
            typ:ty lazy_doc ->
            nth:int ->
            size:int ->
            witness:F.nontrivial_lhs ->
            document

      (** {2:specialize-ty Specialized printers for [ty]} *)

      method virtual ty_TApp_tuple : types:ty list -> document
      (** [ty_TApp_tuple ~types] prints a tuple type with compounds types
          [types]. *)

      method virtual ty_TApp_application
          : typ:concrete_ident lazy_doc ->
            generics:generic_value lazy_doc list ->
            document
      (** [ty_TApp_application ~typ ~generics] prints the type
          [typ<...generics>]. *)

      (** {2:specialize-ty Specialized printers for [item]} *)

      method virtual item'_Type_struct
          : super:item ->
            type_name:concrete_ident lazy_doc ->
            constructor_name:concrete_ident lazy_doc ->
            generics:generics lazy_doc ->
            tuple_struct:bool ->
            arguments:
              (concrete_ident lazy_doc * ty lazy_doc * attr list lazy_doc) list ->
            document
      (** [item'_Type_struct ~super ~type_name ~constructor_name ~generics
           ~tuple_struct ~arguments] prints the struct definition
          [struct name arguments]. `tuple_struct` says whether we are
          dealing with a tuple struct (e.g. [struct Foo(T1, T2)]) or a named
          struct (e.g. [struct Foo {field: T1, other: T2}])?

          `type_name` is the identifier of the type itself, while
          `constructor_name` is the identifier of the constructor of the struct.
          Depending on the naming policy, those can be rendered as the same name
          or not. *)

      method virtual item'_Type_enum
          : super:item ->
            name:concrete_ident lazy_doc ->
            generics:generics lazy_doc ->
            variants:variant lazy_doc list ->
            document
      (** [item'_Type_enum ~super ~name ~generics ~variants] prints the enum
          type [enum name { ... }]. *)

      method virtual item'_Enum_Variant
          : name:concrete_ident lazy_doc ->
            arguments:
              (concrete_ident lazy_doc * ty lazy_doc * attrs lazy_doc) list ->
            is_record:bool ->
            attrs:attrs lazy_doc ->
            document
      (** [item'_Enum_Variant] prints a variant of an enum. *)

      (** {2:common-nodes Printers for common nodes} *)

      method virtual common_array : document list -> document
      (** [common_array values] is a default for printing array-like nodes:
          array patterns, array expressions. *)

      (** {2:defaults Default printers} **)

      method module_path_separator = "::"
      (** [module_path_separator] is the default separator for paths. `::` by
          default *)

      method pat'_PArray ~super:_ ~args =
        List.map ~f:(fun arg -> arg#p) args |> self#common_array

      method expr'_Array ~super:_ args =
        List.map ~f:(fun arg -> arg#p) args |> self#common_array

      method pat'_POr ~super:_ ~subpats =
        List.map ~f:(fun subpat -> subpat#p) subpats
        |> separate (break 1 ^^ char '|' ^^ space)

      (**/**)
      (* This section is about defining or overriding
         `_do_not_override_` methods. This is internal logic, whence this
         is excluded from documentation (with the nice and user friendly
         `(**/**)` ocamldoc syntax) *)

      method _do_not_override_lhs_LhsFieldAccessor ~e ~typ ~field ~witness =
        let field =
          match field with
          | `Projector field -> field
          | _ ->
              self#assertion_failure
              @@ "LhsFieldAccessor: field not a [`Projector] "
        in
        match field with
        | `TupleField (nth, size) ->
            self#lhs_LhsFieldAccessor_tuple ~e ~typ ~nth ~size ~witness
        | `Concrete field ->
            let field : concrete_ident lazy_doc =
              self#_do_not_override_lazy_of_concrete_ident
                AstPos_lhs_LhsFieldAccessor_field field
            in
            self#lhs_LhsFieldAccessor_field ~e ~typ ~field ~witness

      method _do_not_override_expr'_App ~super ~f ~args ~generic_args
          ~bounds_impls ~trait =
        let _ = (super, f, args, generic_args, bounds_impls, trait) in
        match f#v with
        | { e = GlobalVar i; _ } -> (
            let expect_one_arg where =
              match args with
              | [ arg ] -> arg
              | _ -> self#assertion_failure @@ "Expected one arg at " ^ where
            in
            match i with
            | `Concrete _ | `Primitive _ -> (
                match (args, i) with
                | [], `Concrete i ->
                    let constant =
                      self#_do_not_override_lazy_of_concrete_ident
                        AstPos_expr'_App_f i
                    in
                    self#expr'_App_constant ~super ~constant
                      ~generics:generic_args
                | [], _ -> self#assertion_failure "Primitive app of arity 0"
                | _ ->
                    self#expr'_App_application ~super ~f ~args
                      ~generics:generic_args)
            | `TupleType _ | `TupleCons _ | `TupleField _ ->
                self#assertion_failure "App: unexpected tuple"
            | `Projector (`TupleField (nth, size)) ->
                let e = expect_one_arg "projector tuple field" in
                self#expr'_App_tuple_projection ~super ~size ~nth ~e
            | `Projector (`Concrete field) ->
                let e = expect_one_arg "projector concrete" in
                let field =
                  self#_do_not_override_lazy_of_concrete_ident
                    AstPos_expr'_App_f field
                in
                self#expr'_App_field_projection ~super ~field ~e)
        | _ -> self#assertion_failure "Primitive app of arity 0"

      method _do_not_override_expr'_Construct ~super ~constructor ~is_record
          ~is_struct ~fields ~base =
        match constructor with
        | `Concrete constructor ->
            let constructor =
              self#_do_not_override_lazy_of_concrete_ident
                AstPos_expr'_Construct_constructor constructor
            in
            let fields =
              List.map
                ~f:(fun field ->
                  let name, expr = field#v in
                  ( self#_do_not_override_lazy_of_global_ident
                      Generated_generic_printer_base
                      .AstPos_pat'_PConstruct_constructor name,
                    expr ))
                fields
            in
            self#expr'_Construct_inductive ~super ~constructor ~is_record
              ~is_struct ~fields ~base
        | `TupleCons _ ->
            let components = List.map ~f:(fun field -> snd field#v) fields in
            self#expr'_Construct_tuple ~super ~components
        | `Primitive _ | `TupleType _ | `TupleField _ | `Projector _ ->
            self#assertion_failure "Construct unexpected constructors"

      method _do_not_override_expr'_GlobalVar ~super global_ident =
        match global_ident with
        | `Concrete concrete ->
            let concrete =
              self#_do_not_override_lazy_of_concrete_ident
                AstPos_expr'_GlobalVar_x0 concrete
            in
            self#expr'_GlobalVar_concrete ~super concrete
        | `Primitive primitive ->
            self#expr'_GlobalVar_primitive ~super primitive
        | `TupleCons 0 ->
            self#_do_not_override_expr'_Construct ~super
              ~constructor:global_ident ~is_record:false ~is_struct:false
              ~fields:[] ~base:None
        | _ ->
            self#assertion_failure
            @@ "GlobalVar: expected a concrete or primitive global ident, got:"
            ^ [%show: global_ident] global_ident

      method _do_not_override_pat'_PConstruct ~super ~constructor ~is_record
          ~is_struct ~fields =
        match constructor with
        | `Concrete constructor ->
            let constructor =
              self#_do_not_override_lazy_of_concrete_ident
                AstPos_pat'_PConstruct_constructor constructor
            in
            let fields =
              List.map
                ~f:(fun field ->
                  let { field; pat } = field#v in
                  let field =
                    self#_do_not_override_lazy_of_global_ident
                      Generated_generic_printer_base
                      .AstPos_pat'_PConstruct_fields field
                  in
                  let pat =
                    self#_do_not_override_lazy_of_pat
                      Generated_generic_printer_base
                      .AstPos_pat'_PConstruct_fields pat
                  in
                  (field, pat))
                fields
            in
            self#pat'_PConstruct_inductive ~super ~constructor ~is_record
              ~is_struct ~fields
        | `TupleCons _ ->
            let components =
              List.map
                ~f:(fun field ->
                  self#_do_not_override_lazy_of_pat AstPos_field_pat__pat
                    field#v.pat)
                fields
            in
            self#pat'_PConstruct_tuple ~super ~components
        | `Primitive _ | `TupleType _ | `TupleField _ | `Projector _ ->
            self#assertion_failure "Construct unexpected constructors"

      method _do_not_override_ty_TApp ~ident ~args =
        match ident with
        | `Concrete ident ->
            let typ =
              self#_do_not_override_lazy_of_concrete_ident AstPos_ty_TApp_args
                ident
            in
            self#ty_TApp_application ~typ ~generics:args |> group
        | `Primitive _ | `TupleCons _ | `TupleField _ | `Projector _ ->
            self#assertion_failure "TApp not concrete"
        | `TupleType size ->
            let types =
              List.filter_map
                ~f:(fun garg ->
                  match garg#v with GType t -> Some t | _ -> None)
                args
            in
            if [%equal: int] (List.length args) size |> not then
              self#assertion_failure "malformed [ty.TApp] tuple";
            self#ty_TApp_tuple ~types

      method _do_not_override_item'_Type ~super ~name ~generics ~variants
          ~is_struct =
        let generics, _, _ = generics#v in
        if is_struct then
          match variants with
          | [ variant ] ->
              let variant_arguments =
                List.map
                  ~f:(fun (ident, typ, attrs) ->
                    ( self#_do_not_override_lazy_of_concrete_ident
                        AstPos_variant__arguments ident,
                      self#_do_not_override_lazy_of_ty AstPos_variant__arguments
                        typ,
                      self#_do_not_override_lazy_of_attrs AstPos_variant__attrs
                        attrs ))
                  variant#v.arguments
              in
              let constructor_name =
                self#_do_not_override_lazy_of_concrete_ident
                  AstPos_variant__name variant#v.name
              in
              self#item'_Type_struct ~super ~type_name:name ~constructor_name
                ~generics ~tuple_struct:(not variant#v.is_record)
                ~arguments:variant_arguments
          | _ -> self#unreachable ()
        else self#item'_Type_enum ~super ~name ~generics ~variants

      method _do_not_override_variant :
          name:concrete_ident lazy_doc ->
          arguments:
            (concrete_ident lazy_doc * ty lazy_doc * attrs lazy_doc) list ->
          is_record:bool ->
          attrs:attrs lazy_doc ->
          document =
        self#item'_Enum_Variant

      method _do_not_override_lazy_of_local_ident ast_position
          (id : local_ident) =
        lazy_doc (fun (id : local_ident) -> self#local_ident id) ast_position id

      method _do_not_override_lazy_of_concrete_ident ast_position
          (id : concrete_ident) : concrete_ident lazy_doc =
        lazy_doc
          (fun (id : concrete_ident) ->
            let module View = (val concrete_ident_view) in
            let id = View.render id in
            let ns_path = Option.value ~default:[] current_namespace in
            let local = [%eq: string list] ns_path id.path in
            self#concrete_ident ~local id)
          ast_position id

      method _do_not_override_lazy_of_global_ident ast_position
          (id : global_ident) : global_ident lazy_doc =
        lazy_doc
          (fun (id : global_ident) ->
            match id with
            | `Concrete cid | `Projector (`Concrete cid) ->
                (self#_do_not_override_lazy_of_concrete_ident ast_position cid)
                  #p
            | `TupleField (i, j) ->
                (* self#lhs_LhsFieldAccessor_tuple ~e ~typ ~nth ~size ~witness *)
                (* _do_not_override_lhs_LhsFieldAccessor *)
                !^"tuple_field" ^^ space
                ^^ !^(Int.to_string i)
                ^^ space
                ^^ !^(Int.to_string j)
            | _ ->
                self#assertion_failure
                  ("_do_not_override_lazy_of_global_ident: expected [`Concrete \
                    _] got ["
                  ^ [%show: global_ident] id
                  ^ "]"))
          ast_position id

      method! _do_not_override_lazy_of_item ast_position (value : item) :
          item lazy_doc =
        let module View = (val concrete_ident_view) in
        current_namespace <- Some (View.render value.ident).path;
        super#_do_not_override_lazy_of_item ast_position value

      method _do_not_override_lazy_of_generics ast_position (value : generics) :
          (generics lazy_doc
          * generic_param lazy_doc list
          * generic_constraint lazy_doc list)
          lazy_doc =
        let params =
          List.map
            ~f:(fun x ->
              self#_do_not_override_lazy_of_generic_param
                AstPos_generics__params x)
            value.params
        in
        let constraints =
          List.map
            ~f:(fun x ->
              self#_do_not_override_lazy_of_generic_constraint
                AstPos_generics__constraints x)
            value.constraints
        in
        lazy_doc
          (fun (lazy_doc, _, _) -> lazy_doc#p)
          ast_position
          ( lazy_doc
              (fun (value : generics) ->
                self#wrap_generics ast_position value
                  (self#generics ~params ~constraints))
              ast_position value,
            params,
            constraints )

      (**/**)
    end
end


================================================
FILE: engine/lib/generic_printer/generic_printer_template.generate.js
================================================
#!/usr/bin/env node

// This script regenerates `generic_printer_template.ml`

const {readFileSync, writeFileSync} = require('fs');
const {execSync} = require('child_process');

const GENERIC_PRINTER_DIR = `lib/generic_printer`;
const GENERIC_PRINTER = `${GENERIC_PRINTER_DIR}/generic_printer.ml`;
const TEMPLATE = `${GENERIC_PRINTER_DIR}/generic_printer_template.ml`;

// Utility function to format an OCaml module
let fmt = path => execSync(`ocamlformat -i ${path}`);

// Go to the root of the engine
require('process').chdir(`${execSync('git rev-parse --show-toplevel').toString().trim()}/engine`);


// Prints the signature of module `Generic_printer` (using `ocaml-print-intf`)
let mli = execSync(`dune exec -- ocaml-print-intf ${GENERIC_PRINTER}`).toString().split('class virtual base')[2];

writeFileSync('/tmp/exported.mli', mli);

// Parses all 
let virtual_methods = [...mli.matchAll(/^( +)method (private )?virtual +(?.*) +:(?.*(\n \1.*)*)/gm)];

let output = [];
for(let v of virtual_methods) {
    let {name, sig} = v.groups;
    let out = sig.trim().split('->').slice(-1)[0].trim().split('.').slice(-1)[0];
    let args = sig.trim().split('->').map((s, i) => {
        let chunks = s.trim().split(':').reverse();
        if(chunks.length > 2 || chunks.length == 0) {
            throw "Chunks: bad length";
        }
        let [type, name] = chunks;
        name = name ? '~'+name+':_' : '_x'+(i + 1);
        return {type, name};
    }).map(n => n.name).slice(0, -1).join(' ');
    
    output.push(`method ${name} ${args} = default_${out}_for "${name}"`);
}

{
    let [before, _, after] = readFileSync(TEMPLATE).toString().split(/(?=\(\* (?:BEGIN|END) GENERATED \*\))/);
    writeFileSync(TEMPLATE, before + '\n(* BEGIN GENERATED *)\n' + output.join('\n') + '\n' + after);
}

fmt(TEMPLATE);


================================================
FILE: engine/lib/generic_printer/generic_printer_template.ml
================================================
open! Prelude
open! Ast
open! PPrint

module Make
    (F : Features.T)
    (Default : sig
      val default : string -> string
    end) =
struct
  module AST = Ast.Make (F)
  open Ast.Make (F)
  module Base = Generic_printer.Make (F)
  open PPrint

  let default_string_for s = "TODO: please implement the method `" ^ s ^ "`"
  let default_document_for = default_string_for >> string

  class printer =
    object
      inherit Base.base

      (* BEGIN GENERATED *)
      method arm ~arm:_ ~span:_ = default_document_for "arm"

      method arm' ~super:_ ~arm_pat:_ ~body:_ ~guard:_ =
        default_document_for "arm'"

      method attrs _x1 = default_document_for "attrs"

      method binding_mode_ByRef _x1 _x2 =
        default_document_for "binding_mode_ByRef"

      method binding_mode_ByValue = default_document_for "binding_mode_ByValue"
      method borrow_kind_Mut _x1 = default_document_for "borrow_kind_Mut"
      method borrow_kind_Shared = default_document_for "borrow_kind_Shared"
      method borrow_kind_Unique = default_document_for "borrow_kind_Unique"
      method cf_kind_BreakOnly = default_document_for "cf_kind_BreakOnly"

      method cf_kind_BreakOrReturn =
        default_document_for "cf_kind_BreakOrReturn"

      method common_array _x1 = default_document_for "common_array"

      method dyn_trait_goal ~trait:_ ~non_self_args:_ =
        default_document_for "dyn_trait_goal"

      method error_expr _x1 = default_document_for "error_expr"
      method error_item _x1 = default_document_for "error_item"
      method error_pat _x1 = default_document_for "error_pat"
      method expr ~e:_ ~span:_ ~typ:_ = default_document_for "expr"

      method expr'_AddressOf ~super:_ ~mut:_ ~e:_ ~witness:_ =
        default_document_for "expr'_AddressOf"

      method expr'_App_application ~super:_ ~f:_ ~args:_ ~generics:_ =
        default_document_for "expr'_App_application"

      method expr'_App_constant ~super:_ ~constant:_ ~generics:_ =
        default_document_for "expr'_App_constant"

      method expr'_App_field_projection ~super:_ ~field:_ ~e:_ =
        default_document_for "expr'_App_field_projection"

      method expr'_App_tuple_projection ~super:_ ~size:_ ~nth:_ ~e:_ =
        default_document_for "expr'_App_tuple_projection"

      method expr'_Ascription ~super:_ ~e:_ ~typ:_ =
        default_document_for "expr'_Ascription"

      method expr'_Assign ~super:_ ~lhs:_ ~e:_ ~witness:_ =
        default_document_for "expr'_Assign"

      method expr'_Block ~super:_ ~e:_ ~safety_mode:_ ~witness:_ =
        default_document_for "expr'_Block"

      method expr'_Borrow ~super:_ ~kind:_ ~e:_ ~witness:_ =
        default_document_for "expr'_Borrow"

      method expr'_Break ~super:_ ~e:_ ~acc:_ ~label:_ ~witness:_ =
        default_document_for "expr'_Break"

      method expr'_Closure ~super:_ ~params:_ ~body:_ ~captures:_ =
        default_document_for "expr'_Closure"

      method expr'_Construct_inductive ~super:_ ~constructor:_ ~is_record:_
          ~is_struct:_ ~fields:_ ~base:_ =
        default_document_for "expr'_Construct_inductive"

      method expr'_Construct_tuple ~super:_ ~components:_ =
        default_document_for "expr'_Construct_tuple"

      method expr'_Continue ~super:_ ~acc:_ ~label:_ ~witness:_ =
        default_document_for "expr'_Continue"

      method expr'_EffectAction ~super:_ ~action:_ ~argument:_ =
        default_document_for "expr'_EffectAction"

      method expr'_GlobalVar_concrete ~super:_ _x2 =
        default_document_for "expr'_GlobalVar_concrete"

      method expr'_GlobalVar_primitive ~super:_ _x2 =
        default_document_for "expr'_GlobalVar_primitive"

      method expr'_If ~super:_ ~cond:_ ~then_:_ ~else_:_ =
        default_document_for "expr'_If"

      method expr'_Let ~super:_ ~monadic:_ ~lhs:_ ~rhs:_ ~body:_ =
        default_document_for "expr'_Let"

      method expr'_Literal ~super:_ _x2 = default_document_for "expr'_Literal"
      method expr'_LocalVar ~super:_ _x2 = default_document_for "expr'_LocalVar"

      method expr'_Loop ~super:_ ~body:_ ~kind:_ ~state:_ ~control_flow:_
          ~label:_ ~witness:_ =
        default_document_for "expr'_Loop"

      method expr'_MacroInvokation ~super:_ ~macro:_ ~args:_ ~witness:_ =
        default_document_for "expr'_MacroInvokation"

      method expr'_Match ~super:_ ~scrutinee:_ ~arms:_ =
        default_document_for "expr'_Match"

      method expr'_QuestionMark ~super:_ ~e:_ ~return_typ:_ ~witness:_ =
        default_document_for "expr'_QuestionMark"

      method expr'_Quote ~super:_ _x2 = default_document_for "expr'_Quote"

      method expr'_Return ~super:_ ~e:_ ~witness:_ =
        default_document_for "expr'_Return"

      method field_pat ~field:_ ~pat:_ = default_document_for "field_pat"

      method generic_constraint_GCLifetime _x1 _x2 =
        default_document_for "generic_constraint_GCLifetime"

      method generic_constraint_GCProjection _x1 =
        default_document_for "generic_constraint_GCProjection"

      method generic_constraint_GCType _x1 =
        default_document_for "generic_constraint_GCType"

      method generic_param ~ident:_ ~span:_ ~attrs:_ ~kind:_ =
        default_document_for "generic_param"

      method generic_param_kind_GPConst ~typ:_ =
        default_document_for "generic_param_kind_GPConst"

      method generic_param_kind_GPLifetime ~witness:_ =
        default_document_for "generic_param_kind_GPLifetime"

      method generic_param_kind_GPType =
        default_document_for "generic_param_kind_GPType"

      method generic_value_GConst _x1 =
        default_document_for "generic_value_GConst"

      method generic_value_GLifetime ~lt:_ ~witness:_ =
        default_document_for "generic_value_GLifetime"

      method generic_value_GType _x1 =
        default_document_for "generic_value_GType"

      method generics ~params:_ ~constraints:_ = default_document_for "generics"
      method guard ~guard:_ ~span:_ = default_document_for "guard"

      method guard'_IfLet ~super:_ ~lhs:_ ~rhs:_ ~witness:_ =
        default_document_for "guard'_IfLet"

      method impl_expr ~kind:_ ~goal:_ = default_document_for "impl_expr"

      method impl_expr_kind_Builtin _x1 =
        default_document_for "impl_expr_kind_Builtin"

      method impl_expr_kind_Concrete _x1 =
        default_document_for "impl_expr_kind_Concrete"

      method impl_expr_kind_Dyn = default_document_for "impl_expr_kind_Dyn"

      method impl_expr_kind_ImplApp ~impl:_ ~args:_ =
        default_document_for "impl_expr_kind_ImplApp"

      method impl_expr_kind_LocalBound ~id:_ =
        default_document_for "impl_expr_kind_LocalBound"

      method impl_expr_kind_Parent ~impl:_ ~ident:_ =
        default_document_for "impl_expr_kind_Parent"

      method impl_expr_kind_Projection ~impl:_ ~item:_ ~ident:_ =
        default_document_for "impl_expr_kind_Projection"

      method impl_expr_kind_Self = default_document_for "impl_expr_kind_Self"
      method impl_ident ~goal:_ ~name:_ = default_document_for "impl_ident"

      method impl_item ~ii_span:_ ~ii_generics:_ ~ii_v:_ ~ii_ident:_ ~ii_attrs:_
          =
        default_document_for "impl_item"

      method impl_item'_IIFn ~body:_ ~params:_ =
        default_document_for "impl_item'_IIFn"

      method impl_item'_IIType ~typ:_ ~parent_bounds:_ =
        default_document_for "impl_item'_IIType"

      method item ~v:_ ~span:_ ~ident:_ ~attrs:_ = default_document_for "item"

      method item'_Alias ~super:_ ~name:_ ~item:_ =
        default_document_for "item'_Alias"

      method item'_Enum_Variant ~name:_ ~arguments:_ ~is_record:_ ~attrs:_ =
        default_document_for "item'_Enum_Variant"

      method item'_Fn ~super:_ ~name:_ ~generics:_ ~body:_ ~params:_ ~safety:_ =
        default_document_for "item'_Fn"

      method item'_HaxError ~super:_ _x2 = default_document_for "item'_HaxError"

      method item'_IMacroInvokation ~super:_ ~macro:_ ~argument:_ ~span:_
          ~witness:_ =
        default_document_for "item'_IMacroInvokation"

      method item'_Impl ~super:_ ~generics:_ ~self_ty:_ ~of_trait:_ ~items:_
          ~parent_bounds:_ ~safety:_ =
        default_document_for "item'_Impl"

      method item'_NotImplementedYet =
        default_document_for "item'_NotImplementedYet"

      method item'_Quote ~super:_ ~quote:_ ~origin:_ =
        default_document_for "item'_Quote"

      method item'_Trait ~super:_ ~name:_ ~generics:_ ~items:_ ~safety:_ =
        default_document_for "item'_Trait"

      method item'_TyAlias ~super:_ ~name:_ ~generics:_ ~ty:_ =
        default_document_for "item'_TyAlias"

      method item'_Type_enum ~super:_ ~name:_ ~generics:_ ~variants:_ =
        default_document_for "item'_Type_enum"

      method item'_Type_struct ~super:_ ~type_name:_ ~constructor_name:_
          ~generics:_ ~tuple_struct:_ ~arguments:_ =
        default_document_for "item'_Type_struct"

      method item'_Use ~super:_ ~path:_ ~is_external:_ ~rename:_ =
        default_document_for "item'_Use"

      method item_quote_origin ~item_kind:_ ~item_ident:_ ~position:_ =
        default_document_for "item_quote_origin"

      method lhs_LhsArbitraryExpr ~e:_ ~witness:_ =
        default_document_for "lhs_LhsArbitraryExpr"

      method lhs_LhsArrayAccessor ~e:_ ~typ:_ ~index:_ ~witness:_ =
        default_document_for "lhs_LhsArrayAccessor"

      method lhs_LhsFieldAccessor_field ~e:_ ~typ:_ ~field:_ ~witness:_ =
        default_document_for "lhs_LhsFieldAccessor_field"

      method lhs_LhsFieldAccessor_tuple ~e:_ ~typ:_ ~nth:_ ~size:_ ~witness:_ =
        default_document_for "lhs_LhsFieldAccessor_tuple"

      method lhs_LhsLocalVar ~var:_ ~typ:_ =
        default_document_for "lhs_LhsLocalVar"

      method lhs_LhsVecRef ~e:_ ~typ:_ ~witness:_ =
        default_document_for "lhs_LhsVecRef"

      method literal_Bool _x1 = default_document_for "literal_Bool"
      method literal_Char _x1 = default_document_for "literal_Char"

      method literal_Float ~value:_ ~negative:_ ~kind:_ =
        default_document_for "literal_Float"

      method literal_Int ~value:_ ~negative:_ ~kind:_ =
        default_document_for "literal_Int"

      method literal_String _x1 = default_document_for "literal_String"

      method loop_kind_ForIndexLoop ~start:_ ~end_:_ ~var:_ ~var_typ:_
          ~witness:_ =
        default_document_for "loop_kind_ForIndexLoop"

      method loop_kind_ForLoop ~pat:_ ~it:_ ~witness:_ =
        default_document_for "loop_kind_ForLoop"

      method loop_kind_UnconditionalLoop =
        default_document_for "loop_kind_UnconditionalLoop"

      method loop_kind_WhileLoop ~condition:_ ~witness:_ =
        default_document_for "loop_kind_WhileLoop"

      method loop_state ~init:_ ~bpat:_ ~witness:_ =
        default_document_for "loop_state"

      method modul _x1 = default_document_for "modul"

      method param ~pat:_ ~typ:_ ~typ_span:_ ~attrs:_ =
        default_document_for "param"

      method pat ~p:_ ~span:_ ~typ:_ = default_document_for "pat"

      method pat'_PAscription ~super:_ ~typ:_ ~typ_span:_ ~pat:_ =
        default_document_for "pat'_PAscription"

      method pat'_PBinding ~super:_ ~mut:_ ~mode:_ ~var:_ ~typ:_ ~subpat:_ =
        default_document_for "pat'_PBinding"

      method pat'_PConstant ~super:_ ~lit:_ =
        default_document_for "pat'_PConstant"

      method pat'_PConstruct_inductive ~super:_ ~constructor:_ ~is_record:_
          ~is_struct:_ ~fields:_ =
        default_document_for "pat'_PConstruct_inductive"

      method pat'_PConstruct_tuple ~super:_ ~components:_ =
        default_document_for "pat'_PConstruct_tuple"

      method pat'_PDeref ~super:_ ~subpat:_ ~witness:_ =
        default_document_for "pat'_PDeref"

      method pat'_PWild = default_document_for "pat'_PWild"
      method printer_name = default_string_for "printer_name"

      method projection_predicate ~impl:_ ~assoc_item:_ ~typ:_ =
        default_document_for "projection_predicate"

      method safety_kind_Safe = default_document_for "safety_kind_Safe"
      method safety_kind_Unsafe _x1 = default_document_for "safety_kind_Unsafe"

      method supported_monads_MException _x1 =
        default_document_for "supported_monads_MException"

      method supported_monads_MOption =
        default_document_for "supported_monads_MOption"

      method supported_monads_MResult _x1 =
        default_document_for "supported_monads_MResult"

      method trait_goal ~trait:_ ~args:_ = default_document_for "trait_goal"

      method trait_item ~ti_span:_ ~ti_generics:_ ~ti_v:_ ~ti_ident:_
          ~ti_attrs:_ =
        default_document_for "trait_item"

      method trait_item'_TIDefault ~params:_ ~body:_ ~witness:_ =
        default_document_for "trait_item'_TIDefault"

      method trait_item'_TIFn _x1 = default_document_for "trait_item'_TIFn"
      method trait_item'_TIType _x1 = default_document_for "trait_item'_TIType"

      method ty_TApp_application ~typ:_ ~generics:_ =
        default_document_for "ty_TApp_application"

      method ty_TApp_tuple ~types:_ = default_document_for "ty_TApp_tuple"
      method ty_TArray ~typ:_ ~length:_ = default_document_for "ty_TArray"
      method ty_TArrow _x1 _x2 = default_document_for "ty_TArrow"

      method ty_TAssociatedType ~impl:_ ~item:_ =
        default_document_for "ty_TAssociatedType"

      method ty_TBool = default_document_for "ty_TBool"
      method ty_TChar = default_document_for "ty_TChar"
      method ty_TDyn ~witness:_ ~goals:_ = default_document_for "ty_TDyn"
      method ty_TFloat _x1 = default_document_for "ty_TFloat"
      method ty_TInt _x1 = default_document_for "ty_TInt"
      method ty_TOpaque _x1 = default_document_for "ty_TOpaque"
      method ty_TParam _x1 = default_document_for "ty_TParam"
      method ty_TRawPointer ~witness:_ = default_document_for "ty_TRawPointer"

      method ty_TRef ~witness:_ ~region:_ ~typ:_ ~mut:_ =
        default_document_for "ty_TRef"

      method ty_TSlice ~witness:_ ~ty:_ = default_document_for "ty_TSlice"
      method ty_TStr = default_document_for "ty_TStr"
      (* END GENERATED *)
    end
end


================================================
FILE: engine/lib/hax_io.ml
================================================
(** This module helps communicating with `cargo-hax`. *)

open Prelude

module type S = sig
  val read_json : unit -> Yojson.Safe.t option
  val write_json : Yojson.Safe.t -> unit
end

include (
  struct
    (** Contains the module *)
    let state = ref None

    let init (module M : S) = state := Some (module M : S)

    let get () : (module S) =
      !state
      |> Option.value_exn
           ~message:"Hax engine: internal error: Hax_io as not initialized"

    let read_json () =
      let (module M) = get () in
      M.read_json ()

    let write_json json =
      let (module M) = get () in
      M.write_json json
  end :
    sig
      include S

      val init : (module S) -> unit
    end)

let read () : Types.to_engine =
  read_json () |> Option.value_exn |> [%of_yojson: Types.to_engine]

let write (msg : Types.from_engine) : unit =
  [%yojson_of: Types.from_engine] msg |> write_json

let close () : unit =
  write Exit;
  (* Ensure no garbadge collect *)
  let _ = read_json () in
  ()

let request (type a) ~expected (msg : Types.from_engine)
    (filter : Types.to_engine -> a option) : a =
  write msg;
  let response = read () in
  match filter response with
  | Some value -> value
  | None ->
      let error =
        "Internal error: communication protocol error between `hax-engine` and \
         `cargo-hax`. Expected `" ^ expected ^ "`, got `"
        ^ [%show: Types.to_engine] response
        ^ "` instead."
      in
      failwith error


================================================
FILE: engine/lib/import_ast.ml
================================================
open! Prelude

let refute_resugared s =
  failwith
    ("Got a resugared node at " ^ s
   ^ ". The AST is never supposed to be sent to the OCaml engine with \
      resugared nodes.")

let broken_invariant s = failwith s

type missing_type = unit

module A = Rust_engine_types
module F = Features.Full

module B = struct
  include Ast
  include Ast.Make (F)
end

module U = Ast_utils.Make (F)
module Build = Ast_builder.Make (F)

exception Item_translation_failure of string

let from_error_node (error_node : Types.error_node) : string =
  match (error_node.fragment, error_node.diagnostics) with
  | ( Unknown "OCamlEngineError",
      [
        {
          node = Unknown "OCamlEngineError";
          info = { kind = OcamlEngineErrorPayload payload; _ };
          _;
        };
      ] ) ->
      payload
  | _ -> [%yojson_of: Types.error_node] error_node |> Yojson.Safe.to_string

let dsafety_kind (safety : A.safety_kind) : B.safety_kind =
  match safety with Safe -> B.Safe | Unsafe -> B.Unsafe F.unsafe

let rec dty (Newtypety ty : A.ty) : B.ty =
  match ty with
  | Primitive Bool -> TBool
  | Primitive Char -> TChar
  | Primitive (Int k) -> TInt (dint_kind k)
  | Primitive (Float k) -> TFloat (dfloat_kind k)
  | Primitive Str -> TStr
  | App { head; args } ->
      TApp
        { ident = dglobal_ident head; args = List.map ~f:dgeneric_value args }
  | Array { ty; length } -> TArray { typ = dty ty; length = dexpr length }
  | Slice ty -> TSlice { ty = dty ty; witness = F.slice }
  | Ref { inner; mutable'; region = _ } ->
      TRef
        {
          witness = F.reference;
          typ = dty inner;
          mut = (if mutable' then Mutable F.mutable_reference else Immutable);
          region = "unknown";
        }
  | Param local_ident -> TParam (dlocal_ident local_ident)
  | Arrow { inputs; output } -> TArrow (List.map ~f:dty inputs, dty output)
  | AssociatedType { impl_; item } ->
      TAssociatedType { impl = dimpl_expr impl_; item = dconcrete_ident item }
  | Opaque ident -> TOpaque (dconcrete_ident ident)
  | RawPointer -> TRawPointer { witness = F.raw_pointer }
  | Dyn goals ->
      TDyn { witness = F.dyn; goals = List.map ~f:ddyn_trait_goal goals }
  | Resugared _ -> refute_resugared "ty"
  | Error s -> U.HaxFailure.Build.ty (from_error_node s)

and dint_kind (ik : A.int_kind) : B.int_kind =
  let size : B.size =
    match ik.size with
    | S8 -> S8
    | S16 -> S16
    | S32 -> S32
    | S64 -> S64
    | S128 -> S128
    | SSize -> SSize
  in
  {
    size;
    signedness =
      (match ik.signedness with Signed -> Signed | Unsigned -> Unsigned);
  }

and dfloat_kind (fk : A.float_kind) : B.float_kind =
  match fk with F16 -> F16 | F32 -> F32 | F64 -> F64 | F128 -> F128

and dglobal_ident ?(skip_projector : bool = false)
    (Newtypeglobal_id gi : A.global_id) : B.global_ident =
  match gi with
  | Types.Concrete c -> (
      let ci = Concrete_ident.from_rust_ast c in
      match c.def_id.def_id.kind with
      | Field ->
          let res = `Concrete ci in
          if skip_projector then res else `Projector res
      | _ ->
          let is name = Concrete_ident.eq_name name ci in
          if is Rust_primitives__hax__deref_op then `Primitive Deref
          else if is Rust_primitives__hax__cast_op then `Primitive Cast
          else if is Rust_primitives__hax__logical_op_and then
            `Primitive (LogicalOp And)
          else if is Rust_primitives__hax__logical_op_or then
            `Primitive (LogicalOp Or)
          else `Concrete ci)
  | Types.Tuple t -> (
      match t with
      | Types.Type { length } -> `TupleType (Int.of_string length)
      | Types.Constructor { length } -> `TupleCons (Int.of_string length)
      | Types.Field { length; field } ->
          `TupleField (Int.of_string field, Int.of_string length))
  | Types.FreshModule _ ->
      broken_invariant
        ("dglobal_ident: got a [`FreshModule _]: "
        ^ [%show: A.global_id_inner] gi)

and dlocal_ident (Newtypelocal_id (Newtypesymbol li) : A.local_id) :
    B.local_ident =
  { id = (Expr, 0); name = li }

and dconcrete_ident (gi : A.global_id) : B.concrete_ident =
  match dglobal_ident gi with
  | `Concrete id -> id
  (* For variant fields *)
  | `Projector (`Concrete id) -> id
  | _ ->
      broken_invariant
        ("dconcrete_ident: got something else than a [`Concrete _]: "
        ^ [%show: A.global_id] gi)

and ddyn_trait_goal (r : A.dyn_trait_goal) : B.dyn_trait_goal =
  {
    non_self_args = List.map ~f:dgeneric_value r.non_self_args;
    trait = dconcrete_ident r.trait_;
  }

and dtrait_goal (r : A.trait_goal) : B.trait_goal =
  { args = List.map ~f:dgeneric_value r.args; trait = dconcrete_ident r.trait_ }

and dimpl_ident (r : A.impl_ident) : B.impl_ident =
  {
    goal = dtrait_goal r.goal;
    name = (match r.name with Newtypesymbol name -> name);
  }

and dprojection_predicate (r : A.projection_predicate) : B.projection_predicate
    =
  {
    assoc_item = dconcrete_ident r.assoc_item;
    impl = dimpl_expr r.impl_;
    typ = dty r.ty;
  }

and dimpl_expr (i : A.impl_expr) : B.impl_expr =
  { goal = dtrait_goal i.goal; kind = dimpl_expr_kind i.kind }

and dimpl_expr_kind (i : A.impl_expr_kind) : B.impl_expr_kind =
  match i with
  | A.Self_ -> B.Self
  | A.Concrete tr -> B.Concrete (dtrait_goal tr)
  | A.LocalBound { id = A.Newtypesymbol id } -> B.LocalBound { id }
  | A.Parent { impl_; ident } ->
      B.Parent { impl = dimpl_expr impl_; ident = dimpl_ident ident }
  | A.Projection { impl_; item; ident } ->
      B.Projection
        {
          impl = dimpl_expr impl_;
          item = dconcrete_ident item;
          ident = dimpl_ident ident;
        }
  | A.ImplApp { impl_; args } ->
      B.ImplApp { impl = dimpl_expr impl_; args = List.map ~f:dimpl_expr args }
  | A.Dyn -> B.Dyn
  | A.Builtin tr -> B.Builtin (dtrait_goal tr)
  | A.Error s -> raise (Item_translation_failure (from_error_node s))

and dgeneric_value (generic_value : A.generic_value) : B.generic_value =
  match generic_value with
  | Lifetime -> B.GLifetime { lt = ""; witness = F.lifetime }
  | Ty t -> B.GType (dty t)
  | Expr e -> B.GConst (dexpr e)

and dborrow_kind (borrow_kind : A.borrow_kind) : B.borrow_kind =
  match borrow_kind with
  | Shared -> B.Shared
  | Unique -> B.Unique
  | Mut -> B.Mut F.mutable_reference

and dattributes (m : A.attribute2 list) : B.attrs = List.map ~f:dattr m
and dspan = Span.from_rust_ast_span

and dattr (a : A.attribute) : B.attr =
  let span = dspan a.span in
  match a.kind with
  | Tool { path; tokens } -> { kind = B.Tool { path; tokens }; span }
  | DocComment { kind; body } ->
      let kind = match kind with Line -> B.DCKLine | Block -> DCKBlock in
      { kind = B.DocComment { kind; body }; span }
  | Hax payload -> Attr_payloads.to_attr payload span

and dpat (p : A.pat) : B.pat =
  let typ = dty p.ty in
  let span = dspan p.meta.span in
  { p = dpat' span typ p.kind; span; typ }

and dpat' span parent_ty (pat : A.pat_kind) : B.pat' =
  match pat with
  | Wild -> PWild
  | Ascription { pat; ty = { ty; span } } ->
      PAscription { pat = dpat pat; typ_span = dspan span; typ = dty ty }
  | Construct { constructor; is_record; is_struct; fields } ->
      PConstruct
        {
          constructor = dglobal_ident constructor;
          is_record;
          is_struct;
          fields =
            List.map
              ~f:(fun (field, pat) ->
                B.
                  {
                    field = dglobal_ident ~skip_projector:true field;
                    pat = dpat pat;
                  })
              fields;
        }
  | Or { sub_pats } -> POr { subpats = List.map ~f:dpat sub_pats }
  | Array { args } -> PArray { args = List.map ~f:dpat args }
  | Deref { sub_pat } -> PDeref { subpat = dpat sub_pat; witness = F.reference }
  | Constant { lit } -> PConstant { lit = dliteral lit }
  | Binding { mutable'; mode; var; sub_pat } ->
      let mut = if mutable' then B.Mutable F.mutable_variable else Immutable in
      PBinding
        {
          mut;
          mode = dbinding_mode mode;
          var = dlocal_ident var;
          subpat = Option.map ~f:(fun p -> (dpat p, F.as_pattern)) sub_pat;
          typ = parent_ty;
        }
  | Resugared _ -> refute_resugared "ty"
  | Error diag ->
      let s = from_error_node diag in
      (U.HaxFailure.Build.pat span parent_ty s).p

and dbinding_mode (binding_mode : A.binding_mode) : B.binding_mode =
  match binding_mode with
  | ByValue -> B.ByValue
  | ByRef kind -> B.ByRef (dborrow_kind kind, F.reference)

and dexpr (e : A.expr) : B.expr =
  let typ = dty e.ty in
  let span = dspan e.meta.span in
  { e = dexpr' span typ e.kind; typ; span }

and dexpr' span typ (expr : A.expr_kind) : B.expr' =
  match expr with
  | If { condition; then'; else_ } ->
      If
        {
          cond = dexpr condition;
          then_ = dexpr then';
          else_ = Option.map ~f:dexpr else_;
        }
  | App { head; args; generic_args; bounds_impls; trait_ } ->
      App
        {
          f = dexpr head;
          args = List.map ~f:dexpr args;
          generic_args = List.map ~f:dgeneric_value generic_args;
          bounds_impls = List.map ~f:dimpl_expr bounds_impls;
          trait =
            Option.map
              ~f:(fun (impl, args) ->
                (dimpl_expr impl, List.map ~f:dgeneric_value args))
              trait_;
        }
  | Literal lit -> Literal (dliteral lit)
  | Array exprs -> Array (List.map ~f:dexpr exprs)
  | Construct { constructor; is_record; is_struct; fields; base } ->
      Construct
        {
          constructor = dglobal_ident constructor;
          fields =
            List.map
              ~f:(fun (id, e) ->
                (dglobal_ident ~skip_projector:true id, dexpr e))
              fields;
          base = Option.map ~f:(fun e -> (dexpr e, F.construct_base)) base;
          is_record;
          is_struct;
        }
  | Match { scrutinee; arms } ->
      Match { scrutinee = dexpr scrutinee; arms = List.map ~f:darm arms }
  | Let { lhs; rhs; body } ->
      Let { lhs = dpat lhs; rhs = dexpr rhs; body = dexpr body; monadic = None }
  | Block { body; safety_mode } ->
      Block
        {
          e = dexpr body;
          safety_mode = dsafety_kind safety_mode;
          witness = F.block;
        }
  | LocalId id -> LocalVar (dlocal_ident id)
  | GlobalId id -> GlobalVar (dglobal_ident id)
  | Ascription { e; ty } -> Ascription { e = dexpr e; typ = dty ty }
  | Assign { lhs; value } ->
      Assign { lhs = dlhs lhs; e = dexpr value; witness = F.mutable_variable }
  | Loop { body; kind; state; control_flow; label } ->
      Loop
        {
          body = dexpr body;
          kind = dloop_kind kind;
          state = Option.map ~f:dloop_state state;
          control_flow =
            Option.map
              ~f:(fun k -> (dcontrol_flow_kind k, F.fold_like_loop))
              control_flow;
          label = Option.map ~f:(fun (A.Newtypesymbol s) -> s) label;
          witness = F.loop;
        }
  | Break { value; label; state } ->
      Break
        {
          e = dexpr value;
          label = Option.map ~f:(fun (A.Newtypesymbol s) -> s) label;
          acc = Option.map ~f:(fun e -> (dexpr e, F.state_passing_loop)) state;
          witness = (F.break, F.loop);
        }
  | Return { value } -> Return { e = dexpr value; witness = F.early_exit }
  | Continue { label; state } ->
      Continue
        {
          label = Option.map ~f:(fun (Newtypesymbol s) -> s) label;
          acc = Option.map ~f:(fun e -> (dexpr e, F.state_passing_loop)) state;
          witness = (F.continue, F.loop);
        }
  | Borrow { mutable'; inner } ->
      Borrow
        {
          e = dexpr inner;
          kind = (if mutable' then Mut F.mutable_reference else B.Shared);
          witness = F.reference;
        }
  | AddressOf { mutable'; inner } ->
      AddressOf
        {
          e = dexpr inner;
          mut = (if mutable' then Mutable F.mutable_pointer else Immutable);
          witness = F.raw_pointer;
        }
  | Closure { params; body; captures } ->
      Closure
        {
          params = List.map ~f:dpat params;
          body = dexpr body;
          captures = List.map ~f:dexpr captures;
        }
  | Quote { contents } -> Quote (dquote contents)
  | Resugared _ -> refute_resugared "expr"
  | Error diag -> (U.HaxFailure.Build.expr span typ (from_error_node diag) "").e

and dcontrol_flow_kind (cfk : A.control_flow_kind) : B.cf_kind =
  match cfk with BreakOnly -> B.BreakOnly | BreakOrReturn -> B.BreakOrReturn

and dliteral (l : A.literal) : B.literal =
  match l with
  | String (Newtypesymbol s) -> B.String s
  | Char c -> B.Char c
  | Int { value = Newtypesymbol value; negative; kind } ->
      B.Int { value; negative; kind = dint_kind kind }
  | Float { value = Newtypesymbol value; negative; kind } ->
      B.Float { value; negative; kind = dfloat_kind kind }
  | Bool b -> B.Bool b

and dquote (Newtypequote contents : A.quote) : B.quote =
  let f = function
    | A.Verbatim code -> B.Verbatim code
    | A.Expr e -> B.Expr (dexpr e)
    | A.Pattern p -> B.Pattern (dpat p)
    | A.Ty t -> B.Typ (dty t)
  in
  { contents = List.map ~f contents; witness = F.quote }

and ditem_quote_origin (iqo : A.item_quote_origin) : B.item_quote_origin =
  {
    item_ident = dconcrete_ident iqo.item_ident;
    item_kind =
      (match iqo.item_kind with
      | A.Fn -> `Fn
      | A.TyAlias -> `TyAlias
      | A.Type -> `Type
      | A.MacroInvocation -> `IMacroInvokation
      | A.Trait -> `Trait
      | A.Impl -> `Impl
      | A.Alias -> `Alias
      | A.Use -> `Use
      | A.Quote -> `Quote
      | A.HaxError -> `HaxError
      | A.NotImplementedYet -> `NotImplementedYet);
    position =
      (match iqo.position with
      | A.Before -> `Before
      | A.After -> `After
      | A.Replace -> `Replace);
  }

and dloop_kind (k : A.loop_kind) : B.loop_kind =
  match k with
  | A.UnconditionalLoop -> B.UnconditionalLoop
  | A.WhileLoop { condition } ->
      B.WhileLoop { condition = dexpr condition; witness = F.while_loop }
  | A.ForLoop { iterator; pat } ->
      B.ForLoop { it = dexpr iterator; pat = dpat pat; witness = F.for_loop }
  | A.ForIndexLoop { start; end'; var; var_ty } ->
      B.ForIndexLoop
        {
          start = dexpr start;
          end_ = dexpr end';
          var = dlocal_ident var;
          var_typ = dty var_ty;
          witness = F.for_index_loop;
        }

and dloop_state (s : A.loop_state) : B.loop_state =
  {
    bpat = dpat s.body_pat;
    init = dexpr s.init;
    witness = F.state_passing_loop;
  }

and darm (a : A.arm) : B.arm =
  {
    arm =
      {
        body = dexpr a.body;
        guard = Option.map ~f:dguard a.guard;
        arm_pat = dpat a.pat;
      };
    span = dspan a.meta.span;
  }

and dguard (a : A.guard) : B.guard =
  { guard = dguard' a.kind; span = dspan a.meta.span }

and dguard' (guard : A.guard_kind) : B.guard' =
  match guard with
  | IfLet { lhs; rhs } ->
      B.IfLet { lhs = dpat lhs; rhs = dexpr rhs; witness = F.match_guard }

and dlhs (lhs : A.lhs) : B.lhs =
  match lhs with
  | A.LocalVar { var; ty } ->
      B.LhsLocalVar { var = dlocal_ident var; typ = dty ty }
  | A.VecRef { e; ty } ->
      B.LhsVecRef { e = dlhs e; typ = dty ty; witness = F.nontrivial_lhs }
  | A.ArbitraryExpr e ->
      B.LhsArbitraryExpr { e = dexpr e; witness = F.arbitrary_lhs }
  | A.FieldAccessor { e; field; ty } ->
      B.LhsFieldAccessor
        {
          e = dlhs e;
          field = dglobal_ident field;
          typ = dty ty;
          witness = F.nontrivial_lhs;
        }
  | A.ArrayAccessor { e; index; ty } ->
      B.LhsArrayAccessor
        {
          e = dlhs e;
          index = dexpr index;
          typ = dty ty;
          witness = F.nontrivial_lhs;
        }

let dgeneric_param ({ ident; meta; kind } : A.generic_param) : B.generic_param =
  let kind : B.generic_param_kind =
    match kind with
    | Lifetime -> GPLifetime { witness = F.lifetime }
    | Type -> GPType
    | Const { ty } -> GPConst { typ = dty ty }
  in
  {
    ident = dlocal_ident ident;
    span = dspan meta.span;
    attrs = dattributes meta.attributes;
    kind;
  }

let dgeneric_constraint (generic_constraint : A.generic_constraint) :
    B.generic_constraint =
  match generic_constraint with
  | Lifetime lf -> GCLifetime (lf, F.lifetime)
  | TypeClass impl_ident -> GCType (dimpl_ident impl_ident)
  | Equality projection -> GCProjection (dprojection_predicate projection)

let dgenerics (g : A.generics) : B.generics =
  {
    constraints = List.map ~f:dgeneric_constraint g.constraints;
    params = List.map ~f:dgeneric_param g.params;
  }

let dparam (p : A.param) : B.param =
  {
    attrs = dattributes p.attributes;
    pat = dpat p.pat;
    typ = dty p.ty;
    typ_span = Option.map ~f:dspan p.ty_span;
  }

let dvariant (v : A.variant) : B.variant =
  {
    arguments =
      List.map
        ~f:(fun (id, t, a) -> (dconcrete_ident id, dty t, dattributes a))
        v.arguments;
    attrs = dattributes v.attributes;
    is_record = v.is_record;
    name = dconcrete_ident v.name;
  }

let dtrait_item' (ti : A.trait_item_kind) : B.trait_item' =
  match ti with
  | Type idents -> TIType (List.map ~f:dimpl_ident idents)
  | Fn t -> TIFn (dty t)
  | Default { params; body } ->
      TIDefault
        {
          params = List.map ~f:dparam params;
          body = dexpr body;
          witness = F.trait_item_default;
        }
  | Resugared _ -> refute_resugared "trait_item"
  | Error _ -> failwith "TraitItem error node"

let dtrait_item (ti : A.trait_item) : B.trait_item =
  {
    ti_generics = dgenerics ti.generics;
    ti_ident = dconcrete_ident ti.ident;
    ti_v = dtrait_item' ti.kind;
    ti_span = dspan ti.meta.span;
    ti_attrs = dattributes ti.meta.attributes;
  }

let dimpl_item' (ii : A.impl_item_kind) : B.impl_item' =
  match ii with
  | Type { ty; parent_bounds } ->
      IIType
        {
          typ = dty ty;
          parent_bounds = List.map ~f:(dimpl_expr *** dimpl_ident) parent_bounds;
        }
  | Fn { body; params } ->
      IIFn { body = dexpr body; params = List.map ~f:dparam params }
  | Resugared _ -> refute_resugared "impl_item"
  | Error _ -> failwith "Impl item error node"

let dimpl_item (ii : A.impl_item) : B.impl_item =
  {
    ii_generics = dgenerics ii.generics;
    ii_ident = dconcrete_ident ii.ident;
    ii_v = dimpl_item' ii.kind;
    ii_span = dspan ii.meta.span;
    ii_attrs = dattributes ii.meta.attributes;
  }

let ditem' (item : A.item_kind) : B.item' option =
  match item with
  | A.Fn { name; generics; body; params; safety } ->
      B.Fn
        {
          name = dconcrete_ident name;
          generics = dgenerics generics;
          body = dexpr body;
          params = List.map ~f:dparam params;
          safety = dsafety_kind safety;
        }
      |> Option.some
  | A.Type { name; generics; variants; is_struct } ->
      B.Type
        {
          name = dconcrete_ident name;
          generics = dgenerics generics;
          variants = List.map ~f:dvariant variants;
          is_struct;
        }
      |> Option.some
  | A.TyAlias { name; generics; ty } ->
      B.TyAlias
        {
          name = dconcrete_ident name;
          generics = dgenerics generics;
          ty = dty ty;
        }
      |> Option.some
  | A.Trait { name; generics; items; safety } ->
      B.Trait
        {
          name = dconcrete_ident name;
          generics = dgenerics generics;
          items = List.map ~f:dtrait_item items;
          safety = dsafety_kind safety;
        }
      |> Option.some
  | A.Impl
      {
        generics;
        self_ty;
        of_trait = trait_id, trait_generics;
        items;
        parent_bounds;
      } ->
      B.Impl
        {
          generics = dgenerics generics;
          self_ty = dty self_ty;
          of_trait =
            (dconcrete_ident trait_id, List.map ~f:dgeneric_value trait_generics);
          items = List.map ~f:dimpl_item items;
          parent_bounds =
            List.map
              ~f:(fun (impl, ident) -> (dimpl_expr impl, dimpl_ident ident))
              parent_bounds;
          safety = Safe;
        }
      |> Option.some
  | A.Alias { name; item } ->
      B.Alias { name = dconcrete_ident name; item = dconcrete_ident item }
      |> Option.some
  | A.Use { path; is_external; rename } ->
      B.Use { path; is_external; rename } |> Option.some
  | A.Quote { quote; origin } ->
      B.Quote { quote = dquote quote; origin = ditem_quote_origin origin }
      |> Option.some
  | A.Error diag -> B.HaxError (from_error_node diag) |> Option.some
  | A.NotImplementedYet -> B.NotImplementedYet |> Option.some
  | Resugared _ -> refute_resugared "item_kind" |> Option.some
  | A.RustModule -> None

let ditem (i : A.item) : B.item list =
  try
    match ditem' i.kind with
    | Some v ->
        [
          {
            ident = dconcrete_ident i.ident;
            v;
            span = dspan i.meta.span;
            attrs = dattributes i.meta.attributes;
          };
        ]
    | _ -> []
  with Item_translation_failure msg ->
    [ B.make_hax_error_item (dspan i.meta.span) (dconcrete_ident i.ident) msg ]


================================================
FILE: engine/lib/import_thir.ml
================================================
module Thir = struct
  include Types

  type item = item_for__thir_body
  type item_kind = item_kind_for__thir_body
  type impl_item = impl_item_for__thir_body
  type impl_item_kind = impl_item_kind_for__thir_body
  type generics = generics_for__thir_body
  type trait_item_kind = trait_item_kind_for__thir_body
  type generic_param = generic_param_for__thir_body
  type generic_param_kind = generic_param_kind_for__thir_body
  type trait_item = trait_item_for__thir_body
  type ty = node_for__ty_kind
  type item_ref = node_for__item_ref_contents
  type trait_ref = item_ref
end

open! Prelude
open Diagnostics

let assertion_failure (span : Thir.span list) (details : string) =
  let kind = T.AssertionFailure { details } in
  Diagnostics.SpanFreeError.raise ~span
    (Span.dummy () |> Span.owner_hint)
    ThirImport kind

let unimplemented ~issue_id (span : Thir.span list) (details : string) =
  let kind =
    T.Unimplemented
      {
        issue_id = Some (MyInt64.of_int issue_id);
        details = String.(if details = "" then None else Some details);
      }
  in
  Diagnostics.SpanFreeError.raise ~span
    (Span.dummy () |> Span.owner_hint)
    ThirImport kind

module Ast = struct
  include Ast
  include Rust
end

module U = Ast_utils.Make (Features.Rust)
module W = Features.On
module Ast_builder = Ast_builder.Make (Features.Rust)
open Ast

let def_id ~value (def_id : Thir.def_id) : global_ident =
  `Concrete (Concrete_ident.of_def_id ~value def_id)

let local_ident kind (ident : Thir.local_ident) : local_ident =
  {
    name = ident.name;
    id = Local_ident.mk_id kind (Int.of_string ident.id.local_id);
  }

let int_ty_to_size : Thir.int_ty -> size = function
  | Isize -> SSize
  | I8 -> S8
  | I16 -> S16
  | I32 -> S32
  | I64 -> S64
  | I128 -> S128

let uint_ty_to_size : Thir.uint_ty -> size = function
  | Usize -> SSize
  | U8 -> S8
  | U16 -> S16
  | U32 -> S32
  | U64 -> S64
  | U128 -> S128

let c_int_ty (ty : Thir.int_ty) : int_kind =
  { size = int_ty_to_size ty; signedness = Signed }

let c_uint_ty (ty : Thir.uint_ty) : int_kind =
  { size = uint_ty_to_size ty; signedness = Unsigned }

let csafety (safety : Types.safety) : safety_kind =
  match safety with Safe -> Safe | Unsafe -> Unsafe W.unsafe

let c_header_safety (safety : Types.header_safety) : safety_kind =
  match safety with
  | SafeTargetFeatures -> Safe
  | Normal safety -> csafety safety

let c_mutability (witness : 'a) : bool -> 'a Ast.mutability = function
  | true -> Mutable witness
  | false -> Immutable

let c_borrow_kind span : Thir.borrow_kind -> borrow_kind = function
  | Shared -> Shared
  | Fake _ ->
      assertion_failure [ span ]
        "Got a shallow borrow node (`BorrowKind::Fake`). Those are generated \
         by the borrow checker and should be discarded after borrow checking: \
         we should never see such borrows."
  | Mut _ -> Mut W.mutable_reference

let c_binding_mode : Thir.by_ref -> binding_mode = function
  | No -> ByValue
  | Yes (_, true) -> ByRef (Mut W.mutable_reference, W.reference)
  | Yes (_, false) -> ByRef (Shared, W.reference)

let unit_typ : ty = TApp { ident = `TupleType 0; args = [] }

let unit_expr span : expr =
  { typ = unit_typ; span; e = Ast.GlobalVar (`TupleCons 0) }

let wild_pat span : ty -> pat = fun typ -> { typ; span; p = PWild }

let c_logical_op : Thir.logical_op -> logical_op = function
  | And -> And
  | Or -> Or

let c_attr (attr : Thir.attribute) : attr option =
  match attr with
  | Parsed (DocComment { kind; comment; span; _ }) ->
      let kind =
        match kind with Thir.Line -> DCKLine | Thir.Block -> DCKBlock
      in
      let kind = DocComment { kind; body = comment } in
      Some { kind; span = Span.of_thir span }
  | Parsed (AutomaticallyDerived span) ->
      (* Restore behavior before PR #1534 *)
      let kind = Tool { path = "automatically_derived"; tokens = "" } in
      Some { kind; span = Span.of_thir span }
  | Unparsed { args = Eq { expr = { symbol; _ }; _ }; path = "doc"; span; _ } ->
      (* Looks for `#[doc = "something"]` *)
      let kind = DocComment { kind = DCKLine; body = symbol } in
      Some { kind; span = Span.of_thir span }
  | Unparsed { args; path; span; _ } ->
      let args_tokens =
        match args with Delimited { tokens; _ } -> Some tokens | _ -> None
      in
      let tokens = Option.value ~default:"" args_tokens in
      let kind = Tool { path; tokens } in
      Some { kind; span = Span.of_thir span }
  | _ -> None

let c_attrs : Thir.attribute list -> attrs = List.filter_map ~f:c_attr

let c_item_attrs (attrs : Thir.item_attributes) : attrs =
  (* TODO: This is a quite coarse approximation, we need to reflect
     that parent/self structure in our AST. See
     https://github.com/hacspec/hax/issues/123. *)
  let self = c_attrs attrs.attributes in
  let parent =
    c_attrs attrs.parent_attributes
    |> List.filter ~f:([%matches? ({ kind = DocComment _; _ } : attr)] >> not)
    |>
    (* Repeating associateditem or uid is harmful, same for comments *)
    List.filter ~f:(fun payload ->
        match Attr_payloads.payloads [ payload ] with
        | [ ((Uid _ | AssociatedItem _), _) ] -> false
        | _ -> true)
  in
  self @ parent

type extended_literal =
  | EL_Lit of literal
  | EL_U8Array of literal list (* EL_U8Array only encodes arrays of [u8]s *)

let c_lit' span negative (lit : Thir.lit_kind) (ty : ty) : extended_literal =
  let mk l = EL_Lit l in
  let mku8 (n : int) =
    let kind = { size = S8; signedness = Unsigned } in
    Int { value = Int.to_string n; kind; negative = false }
  in
  let error kind =
    assertion_failure [ span ]
      ("[import_thir:literal] got a " ^ kind ^ " literal, expected " ^ kind
     ^ " type, got type ["
      ^ [%show: ty] ty
      ^ "] instead.")
  in
  match lit with
  | Err _ ->
      assertion_failure [ span ]
        "[import_thir:literal] got an error literal: this means the Rust \
         compiler or Hax's frontend probably reported errors above."
  | Str (str, _) -> mk @@ String str
  | CStr (l, _) | ByteStr (l, _) -> EL_U8Array (List.map ~f:mku8 l)
  | Byte n -> mk @@ mku8 n
  | Char s -> mk @@ Char s
  | Int (value, _kind) ->
      mk
      @@ Int
           {
             value;
             negative;
             kind = (match ty with TInt k -> k | _ -> error "integer");
           }
  | Float (value, _kind) ->
      mk
      @@ Float
           {
             value;
             negative;
             kind = (match ty with TFloat k -> k | _ -> error "float");
           }
  | Bool b -> mk @@ Bool b

let c_lit span neg (lit : Thir.spanned_for__lit_kind) : ty -> extended_literal =
  c_lit' span neg lit.node

let resugar_index_mut (e : expr) : (expr * expr) option =
  match (U.unbox_underef_expr e).e with
  | App
      {
        f = { e = GlobalVar (`Concrete meth); _ };
        args = [ { e = Borrow { e = x; _ }; _ }; index ];
        generic_args = _ (* TODO: see issue #328 *);
        trait = _ (* TODO: see issue #328 *);
        bounds_impls = _;
      }
    when Concrete_ident.eq_name Core__ops__index__IndexMut__index_mut meth ->
      Some (x, index)
  | App
      {
        f = { e = GlobalVar (`Concrete meth); _ };
        args = [ x; index ];
        generic_args = _ (* TODO: see issue #328 *);
        trait = _ (* TODO: see issue #328 *);
        bounds_impls = _;
      }
    when Concrete_ident.eq_name Core__ops__index__Index__index meth ->
      Some (x, index)
  | _ -> None

(** Name for the cast function from an ADT to its discriminant *)
let cast_name_for_type = Concrete_ident.with_suffix `Cast

module type EXPR = sig
  val c_expr : Thir.decorated_for__expr_kind -> expr
  val c_expr_drop_body : Thir.decorated_for__expr_kind -> expr
  val c_ty : Thir.span -> Thir.ty -> ty
  val c_generic_value : Thir.span -> Thir.generic_arg -> generic_value
  val c_generics : ?offset:int -> Thir.generics -> generics
  val c_param : Thir.span -> Thir.param -> param
  val c_fn_params : Thir.span -> Thir.param list -> param list
  val c_trait_item' : Thir.trait_item -> Thir.trait_item_kind -> trait_item'
  val c_trait_ref : Thir.span -> Thir.trait_ref -> trait_goal
  val c_impl_expr : Thir.span -> Thir.impl_expr -> impl_expr
  val c_clause : Thir.span -> int -> Thir.clause -> generic_constraint option
end

(* BinOp to [core::ops::*] overloaded functions *)

module Make (CTX : sig
  val is_core_item : bool
end) : EXPR = struct
  let c_binop (op : Thir.bin_op) (lhs : expr) (rhs : expr) (span : span)
      (typ : ty) =
    let overloaded_names_of_binop : Thir.bin_op -> Concrete_ident.name =
      function
      | Add | AddUnchecked -> Core__ops__arith__Add__add
      | Sub | SubUnchecked -> Core__ops__arith__Sub__sub
      | Mul | MulUnchecked -> Core__ops__arith__Mul__mul
      | Div -> Core__ops__arith__Div__div
      | Rem -> Core__ops__arith__Rem__rem
      | BitXor -> Core__ops__bit__BitXor__bitxor
      | BitAnd -> Core__ops__bit__BitAnd__bitand
      | BitOr -> Core__ops__bit__BitOr__bitor
      | Shl | ShlUnchecked -> Core__ops__bit__Shl__shl
      | Shr | ShrUnchecked -> Core__ops__bit__Shr__shr
      | Lt -> Core__cmp__PartialOrd__lt
      | Le -> Core__cmp__PartialOrd__le
      | Ne -> Core__cmp__PartialEq__ne
      | Ge -> Core__cmp__PartialOrd__ge
      | Gt -> Core__cmp__PartialOrd__gt
      | Eq -> Core__cmp__PartialEq__eq
      | AddWithOverflow | SubWithOverflow | MulWithOverflow ->
          assertion_failure (Span.to_thir span)
            "Overflowing binary operators are not suppored"
      | Cmp ->
          assertion_failure (Span.to_thir span)
            "`Cmp` binary operator is not suppored"
      | Offset -> Core__ptr__const_ptr__Impl__offset
    in
    let primitive_names_of_binop : Thir.bin_op -> Concrete_ident.name = function
      | Add | AddUnchecked -> Rust_primitives__u128__add
      | Sub | SubUnchecked -> Rust_primitives__u128__sub
      | Mul | MulUnchecked -> Rust_primitives__u128__mul
      | Div -> Rust_primitives__u128__div
      | Rem -> Rust_primitives__u128__rem
      | BitXor -> Rust_primitives__u128__bit_xor
      | BitAnd -> Rust_primitives__u128__bit_and
      | BitOr -> Rust_primitives__u128__bit_or
      | Shl | ShlUnchecked -> Rust_primitives__u128__shl
      | Shr | ShrUnchecked -> Rust_primitives__u128__shr
      | Lt -> Rust_primitives__u128__lt
      | Le -> Rust_primitives__u128__le
      | Ne -> Rust_primitives__u128__ne
      | Ge -> Rust_primitives__u128__ge
      | Gt -> Rust_primitives__u128__gt
      | Eq -> Rust_primitives__u128__eq
      | AddWithOverflow | SubWithOverflow | MulWithOverflow ->
          assertion_failure (Span.to_thir span)
            "Overflowing binary operators are not suppored"
      | Cmp ->
          assertion_failure (Span.to_thir span)
            "`Cmp` binary operator is not suppored"
      | Offset -> Rust_primitives__offset
    in
    let name =
      if CTX.is_core_item then
        let assert_type_eq t1 t2 =
          if not (U.ty_equality t1 t2) then
            assertion_failure (Span.to_thir span)
              ("Binary operation: expected LHS and RHS to have the same type, \
                instead LHS has type ["
              ^ [%show: ty] t1
              ^ "] while RHS has type ["
              ^ [%show: ty] t2
              ^ "]")
        in
        let int =
          ("int", function TInt k -> Some (show_int_kind k) | _ -> None)
        in
        let float =
          ("float", function TFloat k -> Some (show_float_kind k) | _ -> None)
        in
        let bool = ("bool", function TBool -> Some "bool" | _ -> None) in
        let concat_tup sep (x, y) = x ^ sep ^ y in
        let ( <*> ) (x, f) (y, g) =
          ( x ^ "*" ^ y,
            f *** g >> uncurry Option.both >> Option.map ~f:(concat_tup "_") )
        in
        let both (e, f) =
          ( e ^ "*" ^ e,
            fun (t1, t2) ->
              assert_type_eq t1 t2;
              f t1 )
        in
        let ( <|> ) (x, f) (y, g) =
          (x ^ " or" ^ y, fun v -> match f v with None -> g v | v -> v)
        in
        let name = primitive_names_of_binop op in
        let expected, f =
          match op with
          | Add | Sub | Mul | AddWithOverflow | SubWithOverflow
          | MulWithOverflow | AddUnchecked | SubUnchecked | MulUnchecked | Div
            ->
              both int <|> both float
          | Rem | Cmp -> both int
          | BitXor | BitAnd | BitOr -> both int <|> both bool
          | Shl | Shr | ShlUnchecked | ShrUnchecked -> int <*> int
          | Lt | Le | Ne | Ge | Gt -> both int <|> both float
          | Eq -> both int <|> both float <|> both bool
          | Offset -> ("", fun _ -> Some "")
        in
        match f (lhs.typ, rhs.typ) with
        | Some with_ ->
            Concrete_ident.of_name ~value:true name
            |> (Concrete_ident.map_path_strings [@alert "-unsafe"]) ~f:(function
                 | "u128" -> with_
                 | s -> s)
        | None ->
            assertion_failure (Span.to_thir span)
              ("Binary operation: expected " ^ expected ^ " type, got "
              ^ [%show: ty] lhs.typ)
      else Concrete_ident.of_name ~value:true @@ overloaded_names_of_binop op
    in
    let needs_borrow =
      match op with Lt | Le | Ne | Ge | Gt | Eq -> true | _ -> false
    in
    let borrow_if_needed (e : expr) =
      if needs_borrow then
        match e.typ with
        | TRef _ -> e
        | _ ->
            {
              span = e.span;
              e = Borrow { e; kind = Shared; witness = W.reference };
              typ =
                TRef
                  {
                    witness = W.reference;
                    region = "unknown";
                    typ = e.typ;
                    mut = Immutable;
                  };
            }
      else e
    in
    let lhs = borrow_if_needed lhs in
    let rhs = borrow_if_needed rhs in
    U.call' (`Concrete name) [ lhs; rhs ] span typ

  let binop_of_assignop : Thir.assign_op -> Thir.bin_op = function
    | AddAssign -> Add
    | SubAssign -> Sub
    | MulAssign -> Mul
    | DivAssign -> Div
    | RemAssign -> Rem
    | BitXorAssign -> BitXor
    | BitAndAssign -> BitAnd
    | BitOrAssign -> BitOr
    | ShlAssign -> Shl
    | ShrAssign -> Shr

  let rec c_expr (e : Thir.decorated_for__expr_kind) : expr =
    try c_expr_unwrapped e
    with Diagnostics.SpanFreeError.Exn (Data (ctx, kind)) ->
      let typ : ty =
        try c_ty e.span e.ty
        with Diagnostics.SpanFreeError.Exn _ -> U.HaxFailure.Build.ty ""
      in
      let span = Span.of_thir e.span in
      U.hax_failure_expr' span typ (ctx, kind) ""

  (** Extracts an expression as the global name `dropped_body`: this drops the
      computational part of the expression, but keeps a correct type and span.
  *)
  and c_expr_drop_body (e : Thir.decorated_for__expr_kind) : expr =
    let typ = c_ty e.span e.ty in
    let span = Span.of_thir e.span in
    let v =
      Global_ident.of_name ~value:true Rust_primitives__hax__dropped_body
    in
    { span; typ; e = GlobalVar v }

  and c_block ~expr ~span ~stmts ~ty ~(safety_mode : Types.block_safety) : expr
      =
    let full_span = Span.of_thir span in
    let typ = c_ty span ty in
    let safety_mode =
      match safety_mode with
      | Safe -> Safe
      | BuiltinUnsafe | ExplicitUnsafe -> Unsafe W.unsafe
    in
    (* if there is no expression & the last expression is ⊥, just use that *)
    let lift_last_statement_as_expr_if_possible expr stmts (ty : Thir.ty) =
      match (ty.value, expr, List.drop_last stmts, List.last stmts) with
      | ( Thir.Never,
          None,
          Some stmts,
          Some ({ kind = Thir.Expr { expr; _ }; _ } : Thir.stmt) ) ->
          (stmts, Some expr)
      | _ -> (stmts, expr)
    in
    let o_stmts, o_expr =
      lift_last_statement_as_expr_if_possible expr stmts ty
    in
    let init =
      Option.map
        ~f:(fun e ->
          let e = c_expr e in
          { e with e = Block { e; safety_mode; witness = W.block } })
        o_expr
      |> Option.value ~default:(unit_expr full_span)
    in
    List.fold_right o_stmts ~init ~f:(fun { kind; _ } body ->
        match kind with
        | Expr { expr = rhs; _ } ->
            let rhs = c_expr rhs in
            let e =
              Let { monadic = None; lhs = wild_pat rhs.span rhs.typ; rhs; body }
            in
            { e; typ; span = Span.union rhs.span body.span }
        | Let
            {
              else_block = Some { expr; span; stmts; safety_mode; _ };
              pattern = lhs;
              initializer' = Some rhs;
              _;
            } ->
            let lhs = c_pat lhs in
            let rhs = c_expr rhs in
            let else_block = c_block ~expr ~span ~stmts ~ty ~safety_mode in
            let lhs_body_span = Span.union lhs.span body.span in
            let e =
              Match
                {
                  arms =
                    [
                      U.M.arm lhs body ~span:lhs_body_span;
                      U.M.arm
                        { p = PWild; span = else_block.span; typ = lhs.typ }
                        { else_block with typ = body.typ }
                        ~span:else_block.span;
                    ];
                  scrutinee = rhs;
                }
            in
            { e; typ; span = full_span }
        | Let { initializer' = None; _ } ->
            unimplemented ~issue_id:156 [ span ]
              "Sorry, Hax does not support declare-first let bindings (see \
               https://doc.rust-lang.org/rust-by-example/variable_bindings/declare.html) \
               for now."
        | Let { pattern = lhs; initializer' = Some rhs; _ } ->
            let lhs = c_pat lhs in
            let rhs = c_expr rhs in
            let e = Let { monadic = None; lhs; rhs; body } in
            { e; typ; span = Span.union rhs.span body.span })

  and c_expr_unwrapped (e : Thir.decorated_for__expr_kind) : expr =
    (* TODO: eliminate that `call`, use the one from `ast_utils` *)
    let call f args =
      App
        {
          f;
          args = List.map ~f:c_expr args;
          generic_args = [];
          trait = None;
          bounds_impls = [];
        }
    in
    let typ = c_ty e.span e.ty in
    let span = Span.of_thir e.span in
    let mk_global typ v : expr = { span; typ; e = GlobalVar v } in
    let ( ->. ) a b = TArrow (a, b) in
    let (v : expr') =
      match e.contents with
      | If
          {
            cond = { contents = Let { expr = scrutinee; pat }; _ };
            else_opt;
            then';
            _;
          } ->
          let scrutinee = c_expr scrutinee in
          let arm_pat = c_pat pat in
          let then_ = c_expr then' in
          let else_ =
            Option.value ~default:(U.unit_expr span)
            @@ Option.map ~f:c_expr else_opt
          in
          let arm_then = U.M.arm arm_pat then_ ~span:then_.span in
          let arm_else =
            let arm_pat = { arm_pat with p = PWild } in
            U.M.arm arm_pat else_ ~span:else_.span
          in
          Match { scrutinee; arms = [ arm_then; arm_else ] }
      | If { cond; else_opt; then'; _ } ->
          let cond = c_expr cond in
          let then_ = c_expr then' in
          let else_ = Option.map ~f:c_expr else_opt in
          If { cond; else_; then_ }
      | Call { args; fn_span = _; from_hir_call = _; fun'; ty = _ } -> (
          let args =
            if List.is_empty args then [ unit_expr span ]
            else List.map ~f:c_expr args
          in
          let f = c_expr fun' in
          match fun'.contents with
          | GlobalName
              {
                item =
                  {
                    value =
                      { def_id = id; generic_args; impl_exprs; in_trait; _ };
                    _;
                  };
                _;
              } ->
              let f = { f with e = GlobalVar (def_id ~value:true id) } in
              let bounds_impls = List.map ~f:(c_impl_expr e.span) impl_exprs in
              let generic_args =
                List.map ~f:(c_generic_value e.span) generic_args
              in
              let in_trait = Option.map ~f:(c_impl_expr e.span) in_trait in
              let trait =
                Option.map ~f:(fun ie -> (ie, ie.goal.args)) in_trait
              in
              App { f; args; generic_args; bounds_impls; trait }
          | _ ->
              App
                { f; args; generic_args = []; bounds_impls = []; trait = None })
      | Box { value } ->
          (U.call Rust_primitives__hax__box_new [ c_expr value ] span typ).e
      | Deref { arg } ->
          let inner_typ = c_ty arg.span arg.ty in
          call (mk_global ([ inner_typ ] ->. typ) @@ `Primitive Deref) [ arg ]
      | Binary { lhs; rhs; op } ->
          (c_binop op (c_expr lhs) (c_expr rhs) span typ).e
      | LogicalOp { lhs; rhs; op } ->
          let lhs_type = c_ty lhs.span lhs.ty in
          let rhs_type = c_ty rhs.span rhs.ty in
          call
            (mk_global ([ lhs_type; rhs_type ] ->. typ)
            @@ `Primitive (LogicalOp (c_logical_op op)))
            [ lhs; rhs ]
      | Unary { arg; op } ->
          (U.call
             (match op with
             | Not -> Core__ops__bit__Not__not
             | Neg -> Core__ops__arith__Neg__neg
             | PtrMetadata ->
                 assertion_failure (Span.to_thir span)
                   "Unsupported unary operator: `PtrMetadata`")
             [ c_expr arg ]
             span typ)
            .e
      | Cast { source } -> (
          let source_type = c_ty source.span source.ty in
          match source_type with
          (* Each inductive defines a cast function *)
          | TApp { ident = `Concrete ident; _ } ->
              (U.call'
                 (`Concrete (cast_name_for_type ident))
                 [ c_expr source ]
                 span typ)
                .e
          | _ ->
              call
                (mk_global ([ source_type ] ->. typ) @@ `Primitive Cast)
                [ source ])
      | Use { source } -> (c_expr source).e
      | NeverToAny { source } ->
          (U.call Rust_primitives__hax__never_to_any [ c_expr source ] span typ)
            .e
      (* TODO: this is incorrect (NeverToAny) *)
      | PointerCoercion { cast; source } -> c_pointer e typ span cast source
      | Loop { body } ->
          let body = c_expr body in
          Loop
            {
              body;
              kind = UnconditionalLoop;
              state = None;
              label = None;
              witness = W.loop;
              control_flow = None;
            }
      | Match { scrutinee; arms } ->
          let scrutinee = c_expr scrutinee in
          let arms = List.map ~f:c_arm arms in
          Match { scrutinee; arms }
      | Let _ ->
          unimplemented ~issue_id:2018 [ e.span ]
            "Let-chains (e.g. `if let .. && let ..`) are not supported."
      | Block { expr; span; stmts; safety_mode; _ } ->
          let { e; _ } = c_block ~expr ~span ~stmts ~ty:e.ty ~safety_mode in
          e
      | Assign { lhs; rhs } ->
          let lhs = c_expr lhs in
          let rhs = c_expr rhs in
          c_expr_assign lhs rhs
      | AssignOp { lhs; op; rhs } ->
          let lhs = c_expr lhs in
          c_expr_assign lhs
          @@ c_binop (binop_of_assignop op) lhs (c_expr rhs) span lhs.typ
      | VarRef { id } -> LocalVar (local_ident Expr id)
      | Field { lhs; field } ->
          let lhs = c_expr lhs in
          let projector =
            GlobalVar
              (`Projector
                 (`Concrete (Concrete_ident.of_def_id ~value:true field)))
          in
          let span = Span.of_thir e.span in
          App
            {
              f = { e = projector; typ = TArrow ([ lhs.typ ], typ); span };
              args = [ lhs ];
              generic_args = [];
              trait = None;
              bounds_impls = [];
            }
      | TupleField { lhs; field } ->
          (* TODO: refactor *)
          let lhs = c_expr lhs in
          let tuple_len =
            match lhs.typ with
            | TApp { ident = `TupleType len; _ } -> len
            | _ ->
                assertion_failure [ e.span ]
                  "LHS of tuple field projection is not typed as a tuple."
          in
          let projector =
            GlobalVar
              (`Projector (`TupleField (Int.of_string field, tuple_len)))
          in
          let span = Span.of_thir e.span in
          App
            {
              f = { e = projector; typ = TArrow ([ lhs.typ ], typ); span };
              args = [ lhs ];
              generic_args = [];
              trait = None;
              bounds_impls = [];
            }
      | GlobalName { item = { value = { def_id = id; _ }; _ }; constructor = _ }
        ->
          GlobalVar (def_id ~value:true id)
      | UpvarRef { var_hir_id = id; _ } -> LocalVar (local_ident Expr id)
      | Borrow { arg; borrow_kind = kind } ->
          let e' = c_expr arg in
          let kind = c_borrow_kind e.span kind in
          Borrow { kind; e = e'; witness = W.reference }
      | RawBorrow { arg; mutability = mut } ->
          let e = c_expr arg in
          AddressOf
            {
              e;
              mut = c_mutability W.mutable_pointer mut;
              witness = W.raw_pointer;
            }
      | Break { value; _ } ->
          (* TODO: labels! *)
          let e = Option.map ~f:c_expr value in
          let e = Option.value ~default:(unit_expr span) e in
          Break { e; acc = None; label = None; witness = (W.break, W.loop) }
      | Continue _ ->
          Continue { acc = None; label = None; witness = (W.continue, W.loop) }
      | Return { value } ->
          let e = Option.map ~f:c_expr value in
          let e = Option.value ~default:(unit_expr span) e in
          Return { e; witness = W.early_exit }
      | ConstBlock _ -> unimplemented ~issue_id:923 [ e.span ] "ConstBlock"
      | ConstParam { param = id; _ } (* TODO: shadowing? *) | ConstRef { id } ->
          LocalVar
            {
              name = id.name;
              id =
                Local_ident.mk_id Cnst
                  (MyInt64.to_int id.index
                  |> Option.value_or_thunk ~default:(fun _ ->
                         assertion_failure [ e.span ]
                           "Expected const id to fit in an OCaml native int"));
            }
      | Repeat { value; count } ->
          let value = c_expr value in
          let count = c_constant_expr count in
          let inner =
            U.call Rust_primitives__hax__repeat [ value; count ] span typ
          in
          (U.call Alloc__boxed__Impl__new [ inner ] span typ).e
      | Tuple { fields } ->
          (U.make_tuple_expr' ~span @@ List.map ~f:c_expr fields).e
      | Array { fields } -> Array (List.map ~f:c_expr fields)
      | Adt { info; base; fields; _ } ->
          let is_struct, is_record =
            match info.kind with
            | Struct { named } -> (true, named)
            | Enum { named; _ } -> (false, named)
            | Union ->
                unimplemented ~issue_id:998 [ e.span ]
                  "Construct union types: not supported"
          in
          let constructor = def_id ~value:true info.variant in
          let base =
            match base with
            | None' -> None
            | Base base -> Some (c_expr base.base, W.construct_base)
            | DefaultFields _ ->
                unimplemented ~issue_id:1386 [ e.span ]
                  "Default field values: not supported"
          in
          let fields =
            List.map
              ~f:(fun f ->
                let field = def_id ~value:true f.field in
                let value = c_expr f.value in
                (field, value))
              fields
          in
          Construct { is_record; is_struct; constructor; fields; base }
      | Literal { lit; neg; _ } -> (
          match c_lit e.span neg lit typ with
          | EL_Lit lit -> Literal lit
          | EL_U8Array l ->
              Array
                (List.map
                   ~f:(fun lit ->
                     {
                       e = Literal lit;
                       span;
                       typ = TInt { size = S8; signedness = Unsigned };
                     })
                   l))
      | NamedConst
          {
            item =
              { value = { def_id = id; generic_args; in_trait = impl; _ }; _ };
            _;
          } ->
          let f = GlobalVar (def_id ~value:true id) in
          let args = List.map ~f:(c_generic_value e.span) generic_args in
          let const_args =
            List.filter_map args ~f:(function GConst e -> Some e | _ -> None)
          in
          if List.is_empty const_args && Option.is_none impl then f
          else
            let f =
              {
                e = f;
                span;
                typ = TArrow (List.map const_args ~f:(fun e -> e.typ), typ);
              }
            in
            let trait =
              Option.map impl ~f:(c_impl_expr e.span &&& Fn.const args)
            in
            App
              {
                f;
                trait;
                args = const_args;
                generic_args = [];
                bounds_impls = [];
              }
      | Closure { body; params; upvars; _ } ->
          let params =
            List.filter_map ~f:(fun p -> Option.map ~f:c_pat p.pat) params
          in
          let params =
            if List.is_empty params then
              [ U.M.pat_PWild ~typ:U.M.ty_unit ~span ]
            else params
          in
          let body = c_expr body in
          let upvars = List.map ~f:c_expr upvars in
          Closure { body; params; captures = upvars }
      | Index { index; lhs } ->
          let index_type = c_ty index.span index.ty in
          let lhs_type = c_ty lhs.span lhs.ty in
          call
            (mk_global ([ lhs_type; index_type ] ->. typ)
            @@ Global_ident.of_name ~value:true Core__ops__index__Index__index)
            [ lhs; index ]
      | StaticRef { def_id = id; _ } -> GlobalVar (def_id ~value:true id)
      | PlaceTypeAscription _ ->
          assertion_failure [ e.span ]
            "Got a unexpected node `PlaceTypeAscription`. Please report, we \
             were not able to figure out an expression yielding that node: a \
             bug report would be very valuable here!"
      | ValueTypeAscription { source; _ } -> (c_expr source).e
      | ZstLiteral _ ->
          assertion_failure [ e.span ]
            "`ZstLiteral` are expected to be handled before-hand"
      | Yield _ ->
          unimplemented ~issue_id:924 [ e.span ]
            "Got expression `Yield`: coroutines are not supported by hax"
      | Todo payload ->
          assertion_failure [ e.span ] ("expression Todo\n" ^ payload)
    in
    { e = v; span; typ }

  and c_lhs lhs =
    match lhs.e with
    | LocalVar var -> LhsLocalVar { var; typ = lhs.typ }
    | _ -> (
        match resugar_index_mut lhs with
        | Some (e, index) ->
            LhsArrayAccessor
              { e = c_lhs e; typ = lhs.typ; index; witness = W.nontrivial_lhs }
        | None -> (
            match (U.unbox_underef_expr lhs).e with
            | App
                {
                  f =
                    {
                      e = GlobalVar (`Projector _ as field);
                      typ = TArrow ([ _ ], _);
                      span = _;
                    };
                  args = [ e ];
                  generic_args = _;
                  trait = _;
                  bounds_impls = _;
                (* TODO: see issue #328 *)
                } ->
                LhsFieldAccessor
                  {
                    e = c_lhs e;
                    typ = lhs.typ;
                    field;
                    witness = W.nontrivial_lhs;
                  }
            | _ -> LhsArbitraryExpr { e = lhs; witness = W.arbitrary_lhs }))

  and c_expr_assign lhs rhs =
    Assign { lhs = c_lhs lhs; e = rhs; witness = W.mutable_variable }

  and c_constant_expr (ce : Thir.decorated_for__constant_expr_kind) : expr =
    let rec constant_expr_to_expr (ce : Thir.decorated_for__constant_expr_kind)
        : Thir.decorated_for__expr_kind =
      {
        attributes = ce.attributes;
        contents = constant_expr_kind_to_expr_kind ce.contents ce.span;
        hir_id = ce.hir_id;
        span = ce.span;
        ty = ce.ty;
      }
    and constant_expr_kind_to_expr_kind (ce : Thir.constant_expr_kind) span :
        Thir.expr_kind =
      match ce with
      | Literal lit ->
          let lit, neg = constant_lit_to_lit lit span in
          Literal { lit = { node = lit; span }; neg }
      | Adt { fields; info } ->
          let fields = List.map ~f:constant_field_expr fields in
          Adt { fields; info; base = None'; user_ty = None }
      | Array { fields } ->
          Array { fields = List.map ~f:constant_expr_to_expr fields }
      | Tuple { fields } ->
          Tuple { fields = List.map ~f:constant_expr_to_expr fields }
      | GlobalName item -> GlobalName { item; constructor = None }
      | Borrow arg ->
          Borrow { arg = constant_expr_to_expr arg; borrow_kind = Thir.Shared }
      | ConstRef { id } -> ConstRef { id }
      | Cast _ | RawBorrow _ | TraitConst _ | FnPtr _ | Memory _ ->
          assertion_failure [ span ]
            "constant_lit_to_lit: TraitConst | FnPtr | RawBorrow | Cast | \
             Memory"
      | Todo _ -> assertion_failure [ span ] "ConstantExpr::Todo"
    and constant_lit_to_lit (l : Thir.constant_literal) span :
        Thir.lit_kind * bool =
      match l with
      | Bool v -> (Bool v, false)
      | Char v -> (Char v, false)
      | Int (Int (v, ty)) -> (
          match String.chop_prefix v ~prefix:"-" with
          | Some v -> (Int (v, Signed ty), true)
          | None -> (Int (v, Signed ty), false))
      | Int (Uint (v, ty)) -> (Int (v, Unsigned ty), false)
      | Float (v, ty) -> (
          match String.chop_prefix v ~prefix:"-" with
          | Some v -> (Float (v, Suffixed ty), true)
          | None -> (Float (v, Suffixed ty), false))
      | Str v -> (Str (v, Cooked), false)
      | ByteStr v -> (ByteStr (v, Cooked), false)
      | PtrNoProvenance _ ->
          assertion_failure [ span ] "constant_lit_to_lit: PtrNoProvenance"
    and constant_field_expr ({ field; value } : Thir.constant_field_expr) :
        Thir.field_expr =
      { field; value = constant_expr_to_expr value }
    in
    c_expr (constant_expr_to_expr ce)

  and c_pat (pat : Thir.decorated_for__pat_kind) : pat =
    let span = Span.of_thir pat.span in
    let typ = c_ty pat.span pat.ty in
    let v =
      match pat.contents with
      | Wild | Missing -> PWild
      | AscribeUserType { ascription = { annotation; _ }; subpattern } ->
          let typ, typ_span = c_canonical_user_type_annotation annotation in
          let pat = c_pat subpattern in
          PAscription { typ; typ_span; pat }
      | Binding { mode; subpattern; ty; var; _ } ->
          let mut = c_mutability W.mutable_variable mode.mutability in
          let subpat =
            Option.map ~f:(c_pat &&& Fn.const W.as_pattern) subpattern
          in
          let typ = c_ty pat.span ty in
          let mode = c_binding_mode mode.by_ref in
          let var = local_ident Expr var in
          PBinding { mut; mode; var; typ; subpat }
      | Variant { info; subpatterns; _ } ->
          let is_struct, is_record =
            match info.kind with
            | Struct { named } -> (true, named)
            | Enum { named; _ } -> (false, named)
            | Union ->
                unimplemented ~issue_id:998 [ pat.span ]
                  "Pattern match on union types: not supported"
          in
          let constructor = def_id ~value:true info.variant in
          let fields = List.map ~f:(c_field_pat info) subpatterns in
          PConstruct { constructor; fields; is_record; is_struct }
      | Tuple { subpatterns } ->
          (List.map ~f:c_pat subpatterns |> U.make_tuple_pat').p
      | Deref { subpattern } ->
          PDeref { subpat = c_pat subpattern; witness = W.reference }
      | Constant { value } ->
          let rec pat_of_expr (e : expr) =
            { p = pat'_of_expr' e.e e.span; span = e.span; typ = e.typ }
          and pat'_of_expr' (e : expr') span =
            match e with
            | Literal lit -> PConstant { lit }
            | Array l -> PArray { args = List.map ~f:pat_of_expr l }
            | Borrow { kind = _; e; witness } ->
                PDeref { subpat = pat_of_expr e; witness }
            | _ ->
                assertion_failure (Span.to_thir span)
                  ("expected a pattern, got " ^ [%show: expr'] e)
          in
          (c_constant_expr value |> pat_of_expr).p
      | ExpandedConstant { subpattern; _ } -> (c_pat subpattern).p
      | Array _ -> unimplemented ~issue_id:804 [ pat.span ] "Pat:Array"
      | Or { pats } -> POr { subpats = List.map ~f:c_pat pats }
      | Slice _ -> unimplemented ~issue_id:804 [ pat.span ] "pat Slice"
      | Range _ -> unimplemented ~issue_id:925 [ pat.span ] "pat Range"
      | DerefPattern _ ->
          unimplemented ~issue_id:926 [ pat.span ] "pat DerefPattern"
      | Never -> unimplemented ~issue_id:927 [ pat.span ] "pat Never"
      | Error _ ->
          assertion_failure [ pat.span ]
            "`Error` node: Rust compilation failed. If Rust compilation was \
             fine, please file an issue."
    in
    { p = v; span; typ }

  and c_field_pat _info (field_pat : Thir.field_pat) : field_pat =
    {
      field = def_id ~value:true field_pat.field;
      pat = c_pat field_pat.pattern;
    }

  and extended_literal_of_expr (e : expr) : extended_literal =
    let not_a_literal () =
      assertion_failure (Span.to_thir e.span)
        ("expected a literal, got " ^ [%show: expr] e)
    in
    match e.e with
    | Literal lit -> EL_Lit lit
    | Array lits ->
        EL_U8Array
          (List.map
             ~f:(function
               | {
                   e =
                     Literal
                       (Int { kind = { size = S8; signedness = Unsigned }; _ }
                        as lit);
                   _;
                 } ->
                   lit
               | _ -> not_a_literal ())
             lits)
    | _ -> not_a_literal ()

  and c_canonical_user_type_annotation
      (annotation : Thir.canonical_user_type_annotation) : ty * span =
    (c_ty annotation.span annotation.inferred_ty, Span.of_thir annotation.span)

  and c_pointer e typ span cast source =
    match cast with
    | ClosureFnPointer Safe | ReifyFnPointer ->
        (* we have arrow types, we do not distinguish between top-level functions and closures *)
        (c_expr source).e
    | Unsize _ ->
        (* https://doc.rust-lang.org/std/marker/trait.Unsize.html *)
        (U.call Rust_primitives__unsize [ c_expr source ] span typ).e
        (* let source = c_expr source in *)
        (* let from_typ = source.typ in *)
        (* let to_typ = typ in *)
        (* match (U.Box.Ty.destruct from_typ, U.Box.Ty.destruct to_typ) with *)
        (* | Some _from_typ, Some to_typ -> ( *)
        (*     match U.Box.Expr.destruct source with *)
        (*     | Some source -> *)
        (*         (U.Box.Expr.make *)
        (*         @@ U.call "dummy" "unsize_cast" [] [ source ] span to_typ) *)
        (*           .e *)
        (*     | _ -> *)
        (*         unimplemented e.span *)
        (*           "[Pointer(Unsize)] cast from not directly boxed expression") *)
        (* | _ -> *)
        (*     unimplemented e.span *)
        (*       ("[Pointer(Unsize)] cast\n • from type [" *)
        (*       ^ [%show: ty] from_typ *)
        (*       ^ "]\n • to type [" *)
        (*       ^ [%show: ty] to_typ *)
        (*       ^ "]\n\nThe expression is: " *)
        (*       ^ [%show: expr] source)) *)
    | _ ->
        assertion_failure [ e.span ]
          ("Pointer, with [cast] being " ^ [%show: Thir.pointer_coercion] cast)

  and c_ty (span : Thir.span) (ty : Thir.ty) : ty =
    match ty.value with
    | Bool -> TBool
    | Char -> TChar
    | Int k -> TInt (c_int_ty k)
    | Uint k -> TInt (c_uint_ty k)
    | Float k ->
        TFloat
          (match k with F16 -> F16 | F32 -> F32 | F64 -> F64 | F128 -> F128)
    | Arrow fn_sig | Closure { fn_sig; _ } | FnDef { fn_sig; _ } ->
        let ({ inputs; output; _ } : Thir.ty_fn_sig) = fn_sig.value in
        let inputs =
          if List.is_empty inputs then [ U.unit_typ ]
          else List.map ~f:(c_ty span) inputs
        in
        TArrow (inputs, c_ty span output)
    | Adt { value = { def_id = id; generic_args; _ }; _ } ->
        let ident = def_id ~value:false id in
        let args = List.map ~f:(c_generic_value span) generic_args in
        TApp { ident; args }
    | Foreign _ -> unimplemented ~issue_id:928 [ span ] "Foreign"
    | Str -> TStr
    | Array item_ref ->
        let ty, len =
          match item_ref.value.generic_args with
          | [ Type ty; Const len ] -> (ty, len)
          | _ ->
              assertion_failure [ span ]
                "Wrong generics for array: expected a type and a constant. See \
                 synthetic_items in hax frontend."
        in
        TArray { typ = c_ty span ty; length = c_constant_expr len }
    | Slice item_ref ->
        let ty =
          match item_ref.value.generic_args with
          | [ Type ty ] -> ty
          | _ ->
              assertion_failure [ span ]
                "Wrong generics for slice: expected a type. See \
                 synthetic_items in hax frontend."
        in
        let ty = c_ty span ty in
        TSlice { ty; witness = W.slice }
    | RawPtr _ -> TRawPointer { witness = W.raw_pointer }
    | Ref (_region, ty, mut) ->
        let typ = c_ty span ty in
        let mut = c_mutability W.mutable_reference mut in
        TRef { witness = W.reference; region = "todo"; typ; mut }
    | Never -> U.never_typ
    | Tuple item_ref ->
        let types =
          List.map
            ~f:(function Types.Type ty -> Some ty | _ -> None)
            item_ref.value.generic_args
          |> Option.all
          |> Option.value_or_thunk ~default:(fun _ ->
                 assertion_failure [ span ]
                   "Wrong generics for slice: expected a type. See \
                    synthetic_items in hax frontend.")
        in
        let types = List.map ~f:(fun ty -> GType (c_ty span ty)) types in
        TApp { ident = `TupleType (List.length types); args = types }
    | Alias { kind = Projection { assoc_item = _; impl_expr }; def_id; _ } ->
        let impl = c_impl_expr span impl_expr in
        let item = Concrete_ident.of_def_id ~value:false def_id in
        TAssociatedType { impl; item }
    | Alias { kind = Opaque _; def_id; _ } ->
        TOpaque (Concrete_ident.of_def_id ~value:false def_id)
    | Alias { kind = Inherent; _ } ->
        assertion_failure [ span ] "Ty::Alias with AliasTyKind::Inherent"
    | Alias { kind = Free; _ } ->
        assertion_failure [ span ] "Ty::Alias with AliasTyKind::Free"
    | Param { index; name } ->
        (* TODO: [id] might not unique *)
        TParam
          {
            name;
            id =
              Local_ident.mk_id Typ
                (MyInt64.to_int index
                |> Option.value_or_thunk ~default:(fun _ ->
                       assertion_failure [ span ]
                         "Expected param id to fit in an OCaml native int"));
          }
    | Error ->
        assertion_failure [ span ]
          "got type `Error`: Rust compilation probably failed."
    | Dynamic (_, predicates, _region) -> (
        let goals, non_traits =
          List.partition_map
            ~f:(fun ((clause, _span) : Types.clause * _) ->
              match clause.kind.value with
              | Trait { trait_ref; _ } ->
                  let goal : dyn_trait_goal =
                    {
                      trait =
                        Concrete_ident.of_def_id ~value:false
                          trait_ref.value.def_id;
                      non_self_args =
                        List.map ~f:(c_generic_value span)
                          (List.tl_exn trait_ref.value.generic_args);
                    }
                  in
                  First goal
              | _ -> Second ())
            predicates.predicates
        in
        match non_traits with
        | [] -> TDyn { witness = W.dyn; goals }
        | _ -> assertion_failure [ span ] "type Dyn with non trait predicate")
    | Coroutine _ ->
        unimplemented ~issue_id:924 [ span ]
          "Got type `Coroutine`: coroutines are not supported by hax"
    | Placeholder _ ->
        assertion_failure [ span ]
          "type Placeholder: should be gone after typechecking"
    | Bound _ ->
        assertion_failure [ span ]
          "type Bound: should be gone after typechecking"
    | Infer _ ->
        assertion_failure [ span ]
          "type Infer: should be gone after typechecking"
    | Todo _ -> assertion_failure [ span ] "type Todo"
  (* fun _ -> Ok Bool *)

  and c_impl_expr (span : Thir.span) (ie : Thir.impl_expr) : impl_expr =
    let goal = c_trait_ref span ie.trait.value in
    let impl = { kind = c_impl_expr_atom span ie.impl goal; goal } in
    match ie.impl with
    | Concrete { value = { impl_exprs = []; _ }; _ } -> impl
    | Concrete { value = { impl_exprs; _ }; _ } ->
        let args = List.map ~f:(c_impl_expr span) impl_exprs in
        { kind = ImplApp { impl; args }; goal }
    | _ -> impl

  and c_trait_ref span (tr : Thir.trait_ref) : trait_goal =
    let trait = Concrete_ident.of_def_id ~value:false tr.value.def_id in
    let args = List.map ~f:(c_generic_value span) tr.value.generic_args in
    { trait; args }

  and c_impl_expr_atom (span : Thir.span) (ie : Thir.impl_expr_atom) goal :
      impl_expr_kind =
    let browse_path (item_kind : impl_expr_kind)
        (chunk : Thir.impl_expr_path_chunk) =
      match chunk with
      | AssocItem
          { item; predicate = { value = { trait_ref; _ }; _ }; index; _ } ->
          let ident =
            { goal = c_trait_ref span trait_ref; name = "i" ^ index }
          in
          let item = Concrete_ident.of_def_id ~value:false item.value.def_id in
          let trait_ref = c_trait_ref span trait_ref in
          Projection
            { impl = { kind = item_kind; goal = trait_ref }; ident; item }
      | Parent { predicate = { value = { trait_ref; _ }; _ }; index; _ } ->
          let ident =
            { goal = c_trait_ref span trait_ref; name = "i" ^ index }
          in
          let trait_ref = c_trait_ref span trait_ref in
          Parent { impl = { kind = item_kind; goal = trait_ref }; ident }
    in
    match ie with
    | Concrete { value = { def_id; generic_args; _ }; _ } ->
        let trait = Concrete_ident.of_def_id ~value:false def_id in
        let args = List.map ~f:(c_generic_value span) generic_args in
        Concrete { trait; args }
    | LocalBound { index; path; _ } ->
        let init = LocalBound { id = "i" ^ index } in
        List.fold ~init ~f:browse_path path
    | Dyn -> Dyn
    | SelfImpl { path; _ } -> List.fold ~init:Self ~f:browse_path path
    | Builtin _ -> Builtin goal
    | Error str ->
        unimplemented ~issue_id:707 [ span ]
          ("Could not resolve trait reference: " ^ str)

  and c_generic_value (span : Thir.span) (ty : Thir.generic_arg) : generic_value
      =
    match ty with
    | Type ty -> GType (c_ty span ty)
    | Const e -> GConst (c_constant_expr e)
    | _ -> GLifetime { lt = "todo generics"; witness = W.lifetime }

  and c_arm (arm : Thir.arm) : arm =
    let arm_pat = c_pat arm.pattern in
    let body = c_expr arm.body in
    let span = Span.of_thir arm.span in
    let guard =
      Option.map
        ~f:(fun (e : Thir.decorated_for__expr_kind) ->
          let guard =
            match e.contents with
            | Let { expr; pat } ->
                IfLet
                  {
                    lhs = c_pat pat;
                    rhs = c_expr expr;
                    witness = W.match_guard;
                  }
            | _ ->
                IfLet
                  {
                    lhs =
                      { p = PConstant { lit = Bool true }; span; typ = TBool };
                    rhs = c_expr e;
                    witness = W.match_guard;
                  }
          in
          { guard; span = Span.of_thir e.span })
        arm.guard
    in
    { arm = { arm_pat; body; guard }; span }

  and c_param span (param : Thir.param) : param =
    {
      typ_span = Option.map ~f:Span.of_thir param.ty_span;
      typ = c_ty (Option.value ~default:span param.ty_span) param.ty;
      pat =
        c_pat
          (Option.value_or_thunk param.pat ~default:(fun _ ->
               assertion_failure [ span ]
                 "c_param: expected param.pat to be non-empty"));
      attrs = c_attrs param.attributes;
    }

  let c_fn_params span (params : Thir.param list) : param list =
    if List.is_empty params then [ U.make_unit_param (Span.of_thir span) ]
    else List.map ~f:(c_param span) params

  let c_generic_param (param : Thir.generic_param) : generic_param =
    let ident =
      let kind =
        match (param.kind : Thir.generic_param_kind) with
        | Lifetime _ -> Local_ident.LILifetime
        | Type _ -> Local_ident.Typ
        | Const _ -> Local_ident.Cnst
      in
      match param.name with
      | Fresh ->
          (* fail with ("[Fresh] ident? " ^ Thir.show_generic_param param) *)
          (* TODO might be wrong to just have a wildcard here *)
          ({ name = "_"; id = Local_ident.mk_id kind 123 } : local_ident)
      | Error -> assertion_failure [ param.span ] "[Error] ident"
      | Plain n -> local_ident kind n
    in
    let kind =
      match (param.kind : Thir.generic_param_kind) with
      | Lifetime _ -> GPLifetime { witness = W.lifetime }
      | Type _ -> GPType
      (* Rustc always fills in const generics on use. Thus we can drop this information. *)
      | Const { default = _; ty } -> GPConst { typ = c_ty param.span ty }
    in
    let span = Span.of_thir param.span in
    let attrs = c_attrs param.attributes in
    { ident; span; attrs; kind }

  let c_clause_kind span id (kind : Thir.clause_kind) :
      generic_constraint option =
    match kind with
    | Trait { is_positive = true; trait_ref } ->
        let args =
          List.map ~f:(c_generic_value span) trait_ref.value.generic_args
        in
        let trait =
          Concrete_ident.of_def_id ~value:false trait_ref.value.def_id
        in
        Some (GCType { goal = { trait; args }; name = "i" ^ Int.to_string id })
    | Projection { impl_expr; assoc_item; ty } ->
        let impl = c_impl_expr span impl_expr in
        let assoc_item =
          Concrete_ident.of_def_id ~value:false assoc_item.def_id
        in
        let typ = c_ty span ty in
        Some (GCProjection { impl; assoc_item; typ })
    | _ -> None

  let c_clause span (index : int) (p : Thir.clause) : generic_constraint option
      =
    let ({ kind; _ } : Thir.clause) = p in
    c_clause_kind span index kind.value

  let list_dedup (equal : 'a -> 'a -> bool) : 'a list -> 'a list =
    let rec aux (seen : 'a list) (todo : 'a list) : 'a list =
      match todo with
      | hd :: tl ->
          if List.mem ~equal seen hd then aux seen tl
          else hd :: aux (hd :: seen) tl
      | _ -> todo
    in
    aux []

  let c_bounds ?(offset : int = 0) span bounds =
    List.fold_left ~init:(offset, [])
      ~f:(fun (i, clauses) c ->
        match c_clause span i c with
        | Some (GCType _ as c) -> (i + 1, c :: clauses)
        | Some c -> (i, c :: clauses)
        | None -> (i, clauses))
      bounds
    |> snd |> List.rev

  let c_generics ?(offset : int = 0) (generics : Thir.generics) : generics =
    let bounds = c_bounds ~offset generics.span generics.bounds in
    {
      params = List.map ~f:c_generic_param generics.params;
      constraints = bounds |> list_dedup equal_generic_constraint;
    }

  let c_trait_item' (super : Thir.trait_item) (item : Thir.trait_item_kind) :
      trait_item' =
    let span = super.span in
    match item with
    | Const (_, Some default) ->
        TIDefault
          {
            params = [];
            body = c_expr default.expr;
            witness = W.trait_item_default;
          }
    | Const (ty, None) -> TIFn (c_ty span ty)
    | RequiredFn (sg, _) ->
        let (Thir.{ inputs; output; _ } : Thir.fn_decl) = sg.decl in
        let output =
          match output with
          | DefaultReturn _span -> unit_typ
          | Return ty -> c_ty span ty
        in
        let inputs =
          if List.is_empty inputs then [ U.unit_typ ]
          else List.map ~f:(c_ty span) inputs
        in
        TIFn (TArrow (inputs, output))
    | ProvidedFn (_, { params; body; _ }) ->
        TIDefault
          {
            params = c_fn_params span params;
            body = c_expr body.expr;
            witness = W.trait_item_default;
          }
    | Type (bounds, None) ->
        let bounds =
          c_bounds span bounds
          |> List.filter_map ~f:(fun bound ->
                 match bound with GCType impl -> Some impl | _ -> None)
        in
        TIType bounds
    | Type (_, Some _) ->
        unimplemented ~issue_id:929 [ span ]
          "Associated types defaults are not supported by hax yet (it is a \
           nightly feature)"
end

include struct
  open Make (struct
    let is_core_item = false
  end)

  let import_ty : Types.span -> Types.node_for__ty_kind -> Ast.Rust.ty = c_ty

  let import_trait_ref : Types.span -> Thir.trait_ref -> Ast.Rust.trait_goal =
    c_trait_ref

  let import_clause :
      Types.span -> int -> Types.clause -> Ast.Rust.generic_constraint option =
    c_clause
end

(** Instantiate the functor for translating expressions. The crate name can be
    configured (there are special handling related to `core`) *)
let make ~krate : (module EXPR) =
  let is_core_item = String.(krate = "core" || krate = "core_hax_model") in
  let module M : EXPR = Make (struct
    let is_core_item = is_core_item
  end) in
  (module M)

let c_trait_item (item : Thir.trait_item) : trait_item =
  let open (val make ~krate:item.owner_id.contents.value.krate : EXPR) in
  let { params; constraints } = c_generics ~offset:1 item.generics in
  (* TODO: see TODO in impl items *)
  let ti_ident = Concrete_ident.of_def_id ~value:false item.owner_id in
  {
    ti_span = Span.of_thir item.span;
    ti_generics = { params; constraints };
    ti_v = c_trait_item' item item.kind;
    ti_ident;
    ti_attrs = c_item_attrs item.attributes;
  }

let is_automatically_derived (attrs : Thir.attribute list) =
  List.exists (* We need something better here, see issue #108 *)
    ~f:(function
      (* This will break once these attributes get properly parsed. It will
          then be very easy to parse them correctly *)
      | Parsed (AutomaticallyDerived _) -> true
      | _ -> false)
    attrs

let should_skip (attrs : Thir.item_attributes) =
  let attrs = attrs.attributes @ attrs.parent_attributes in
  is_automatically_derived attrs

(** Converts a generic parameter to a generic value. This assumes the parameter
    is bound. *)
let generic_param_to_value ({ ident; kind; span; _ } : generic_param) :
    generic_value =
  match kind with
  | GPLifetime { witness } ->
      GLifetime { lt = [%show: local_ident] ident; witness }
  | GPType -> GType (TParam ident)
  | GPConst { typ } -> GConst { e = LocalVar ident; typ; span }

(** Generate a cast function from an inductive to its represantant type. *)
let cast_of_enum typ_name generics typ thir_span
    (variants : (variant * Types.variant_for__thir_body) list) : item =
  let span = Span.of_thir thir_span in
  let (module M) = Ast_builder.make span in
  let self =
    let args = List.map ~f:generic_param_to_value generics.params in
    TApp { ident = `Concrete typ_name; args }
  in
  let expr_of_int (n : Int64.t) : expr =
    let kind =
      match typ with
      | TInt kind -> kind
      | typ ->
          assertion_failure [ thir_span ]
            ("cast_of_enum: expected in type, got " ^ [%show: ty] typ)
    in
    let value = Int64.to_string n in
    M.expr_Literal ~typ (Int { value; negative = Int64.is_negative n; kind })
  in
  let arms =
    (* Each variant comes with a [rustc_middle::ty::VariantDiscr]. Some variant have [Explicit] discr (i.e. an expression)
       while other have [Relative] discr (the distance to the previous last explicit discr). *)
    List.folding_map variants ~init:None
      ~f:(fun previous_explicit_discriminator (variant, { discr; _ }) ->
        let pat =
          let mk_wild_field (cid, typ, _) =
            { field = `Concrete cid; pat = M.pat_PWild ~typ }
          in
          M.pat_PConstruct ~constructor:(`Concrete variant.name)
            ~is_struct:false ~typ ~is_record:variant.is_record
            ~fields:(List.map ~f:mk_wild_field variant.arguments)
        in
        match (previous_explicit_discriminator, discr) with
        | None, Relative m -> (None, (pat, expr_of_int m))
        | _, Explicit { def_id = did; _ } ->
            let e = M.expr_GlobalVar ~typ (def_id ~value:true did) in
            (Some e, (pat, e))
        | Some e, Relative n ->
            let n = expr_of_int n in
            let e = U.call Core__ops__arith__Add__add [ e; n ] span typ in
            (previous_explicit_discriminator, (pat, e)))
    |> List.map ~f:(fun (p, e) -> M.arm p e)
  in
  let scrutinee_var = Local_ident.{ name = "x"; id = mk_id Expr (-1) } in
  let scrutinee = M.expr_LocalVar ~typ:self scrutinee_var in
  let ident = cast_name_for_type typ_name in
  let params =
    let pat = U.make_var_pat scrutinee_var self span in
    [ { pat; typ = self; typ_span = None; attrs = [] } ]
  in
  let body = M.expr_Match ~typ ~scrutinee ~arms in
  M.item_Fn ~ident ~attrs:[] ~name:ident ~generics ~params ~safety:Safe ~body

let rec c_item ~ident ~type_only (item : Thir.item) : item list =
  try
    Span.with_owner_hint item.owner_id (fun _ ->
        c_item_unwrapped ~ident ~type_only item)
  with Diagnostics.SpanFreeError.Exn payload ->
    let context, kind = Diagnostics.SpanFreeError.payload payload in
    let error = Diagnostics.pretty_print_context_kind context kind in
    let span = Span.of_thir item.span in
    [ make_hax_error_item span ident error ]

and c_item_unwrapped ~ident ~type_only (item : Thir.item) : item list =
  let open (val make ~krate:item.owner_id.contents.value.krate : EXPR) in
  let span = Span.of_thir item.span in
  let attrs = c_item_attrs item.attributes in
  (* this is true if the user explicilty requested to erase using the `opaque` macro *)
  let erased_by_user attrs =
    Attr_payloads.payloads attrs
    |> List.exists ~f:(fst >> [%matches? (Erased : Types.ha_payload)])
  in
  let item_erased_by_user = erased_by_user attrs in
  let type_only =
    type_only
    && Attr_payloads.payloads attrs
       |> List.exists ~f:(fst >> [%matches? (NeverErased : Types.ha_payload)])
       |> not
  in
  (* This is true if the item should be erased because we are in type-only mode
     (Only certain kinds of items are erased in this case). *)
  let erased_by_hax =
    should_skip item.attributes
    || type_only
       &&
       match item.kind with
       | Fn _ | Static _ -> true
       | Impl { of_trait = Some _; items; _ }
         when List.exists items ~f:(fun item ->
                  match item.kind with Type _ -> true | _ -> false)
              |> not ->
           true
       | _ -> false
  in
  (* If the item is erased by hax we need to add the Erased attribute.
     It is already present if the item is erased by user. *)
  let attrs_with_erased erased_by_hax erased_by_user attrs =
    if erased_by_hax && not erased_by_user then
      Attr_payloads.to_attr Erased span :: attrs
    else attrs
  in
  let attrs = attrs_with_erased erased_by_hax item_erased_by_user attrs in
  let erased = item_erased_by_user || erased_by_hax in

  let mk_one v = { span; v; ident; attrs } in
  let mk v = [ mk_one v ] in
  let drop_body =
    erased
    && Attr_payloads.payloads attrs
       |> List.exists ~f:(fst >> [%matches? (NeverErased : Types.ha_payload)])
       |> not
  in
  let c_body = if drop_body then c_expr_drop_body else c_expr in
  let assert_item_def_id () =
    Option.value_or_thunk item.def_id ~default:(fun _ ->
        assertion_failure [ item.span ] "Expected this item to have a `def_id`")
  in
  (* TODO: things might be unnamed (e.g. constants) *)
  match (item.kind : Thir.item_kind) with
  | Const (_, generics, _, body) ->
      mk
      @@ Fn
           {
             name = Concrete_ident.of_def_id ~value:true (assert_item_def_id ());
             generics = c_generics generics;
             body = c_body body.expr;
             params = [];
             safety = Safe;
           }
  | Static (true, _, _, _) ->
      unimplemented ~issue_id:1343 [ item.span ]
        "Mutable static items are not supported."
  | Static (false, _, _ty, body) ->
      let name = Concrete_ident.of_def_id ~value:true (assert_item_def_id ()) in
      let generics = { params = []; constraints = [] } in
      mk
        (Fn
           {
             name;
             generics;
             body = c_body body.expr;
             params = [];
             safety = Safe;
           })
  | TyAlias (_, generics, ty) ->
      mk
      @@ TyAlias
           {
             name =
               Concrete_ident.of_def_id ~value:false (assert_item_def_id ());
             generics = c_generics generics;
             ty = c_ty item.span ty;
           }
  | Fn { generics; def = { body; params; header = { safety; _ }; _ }; _ } ->
      mk
      @@ Fn
           {
             name = Concrete_ident.of_def_id ~value:true (assert_item_def_id ());
             generics = c_generics generics;
             body = c_body body.expr;
             params = c_fn_params item.span params;
             safety = c_header_safety safety;
           }
  | (Enum (_, generics, _, _) | Struct (_, generics, _)) when erased ->
      let generics = c_generics generics in
      let is_struct = match item.kind with Struct _ -> true | _ -> false in
      let def_id = assert_item_def_id () in
      let name = Concrete_ident.of_def_id ~value:false def_id in
      mk @@ Type { name; generics; variants = []; is_struct }
  | Enum (_, generics, variants, repr) ->
      let def_id = assert_item_def_id () in
      let generics = c_generics generics in
      let is_struct = false in
      let discs =
        (* Each variant might introduce a anonymous constant defining its discriminant integer  *)
        List.filter_map ~f:(fun v -> v.disr_expr) variants
        |> List.map ~f:(fun Types.{ def_id; body; _ } ->
               let name = Concrete_ident.of_def_id ~value:true def_id in
               let generics = { params = []; constraints = [] } in
               let body = c_expr body.expr in
               {
                 v = Fn { name; generics; body; params = []; safety = Safe };
                 span;
                 ident = name;
                 attrs = [];
               })
      in
      let is_primitive =
        List.for_all
          ~f:(fun { data; _ } ->
            match data with
            | Unit _ | Tuple ([], _, _) | Struct { fields = []; _ } -> true
            | _ -> false)
          variants
      in
      let variants =
        List.map
          ~f:(fun ({ data; def_id = variant_id; attributes; _ } as original) ->
            let is_record =
              [%matches? (Struct { fields = _ :: _; _ } : Types.variant_data)]
                data
            in
            let name = Concrete_ident.of_def_id ~value:true variant_id in
            let arguments =
              match data with
              | Tuple (fields, _, _) | Struct { fields; _ } ->
                  List.map
                    ~f:(fun { def_id = id; ty; span; attributes; _ } ->
                      ( Concrete_ident.of_def_id ~value:true id,
                        c_ty span ty,
                        c_attrs attributes ))
                    fields
              | Unit _ -> []
            in
            let attrs = c_attrs attributes in
            ({ name; arguments; is_record; attrs }, original))
          variants
      in
      let name = Concrete_ident.of_def_id ~value:true def_id in
      let cast_fun =
        cast_of_enum name generics (c_ty item.span repr.typ) item.span variants
      in
      let variants, _ = List.unzip variants in
      let result =
        mk_one (Type { name; generics; variants; is_struct }) :: discs
      in
      if is_primitive then cast_fun :: result else result
  | Struct (_, generics, v) ->
      let generics = c_generics generics in
      let def_id = assert_item_def_id () in
      let is_struct = true in
      (* repeating the attributes of the item in the variant: TODO is that ok? *)
      let v =
        let name = Concrete_ident.of_def_id ~value:true def_id in
        (* let name = Concrete_ident.Create.move_under name ~new_parent:name in *)
        let mk fields is_record =
          let arguments =
            List.map
              ~f:(fun Thir.{ def_id = id; ty; span; attributes; _ } ->
                ( Concrete_ident.of_def_id ~value:true id,
                  c_ty span ty,
                  c_attrs attributes ))
              fields
          in
          { name; arguments; is_record; attrs }
        in
        match v with
        | Tuple (fields, _, _) -> mk fields false
        | Struct { fields = _ :: _ as fields; _ } -> mk fields true
        | _ -> { name; arguments = []; is_record = false; attrs }
      in
      let variants = [ v ] in
      let name = Concrete_ident.of_def_id ~value:false def_id in
      mk @@ Type { name; generics; variants; is_struct }
  | Trait (NotConst, No, safety, _, generics, _bounds, items) ->
      let items =
        List.filter
          ~f:(fun { attributes; _ } -> not (should_skip attributes))
          items
      in
      let name =
        Concrete_ident.of_def_id ~value:false (assert_item_def_id ())
      in
      let { params; constraints } = c_generics generics in
      let self =
        let id =
          Local_ident.mk_id Typ 0
          (* todo *)
        in
        let ident = Local_ident.{ name = "Self"; id } in
        { ident; span; attrs = []; kind = GPType }
      in
      let params = self :: params in
      let generics = { params; constraints } in
      let items = List.map ~f:c_trait_item items in
      let safety = csafety safety in
      mk @@ Trait { name; generics; items; safety }
  | Trait (_, Yes, _, _, _, _, _) ->
      unimplemented ~issue_id:930 [ item.span ] "Auto trait"
  | Trait (Const, _, _, _, _, _, _) ->
      unimplemented ~issue_id:930 [ item.span ] "Const trait"
  | Impl { of_trait = None; generics; items; _ } ->
      let items =
        List.filter
          ~f:(fun { attributes; _ } -> not (should_skip attributes))
          items
      in
      List.map
        ~f:(fun (item : Thir.impl_item) ->
          let item_def_id =
            Concrete_ident.of_def_id ~value:false item.owner_id
          in
          let attrs = c_item_attrs item.attributes in
          let sub_item_erased_by_user = erased_by_user attrs in
          let erased_by_type_only =
            type_only && match item.kind with Fn _ -> true | _ -> false
          in
          let sub_item_erased =
            sub_item_erased_by_user || erased_by_type_only
          in
          let attrs =
            attrs_with_erased erased_by_type_only sub_item_erased_by_user attrs
          in
          let c_body = if sub_item_erased then c_expr_drop_body else c_body in

          let generics = c_generics generics in
          let offset =
            List.count generics.constraints ~f:[%matches? GCType _]
          in

          let v =
            match (item.kind : Thir.impl_item_kind) with
            | Fn { body; params; header = { safety; _ }; _ } ->
                let params =
                  if List.is_empty params then [ U.make_unit_param span ]
                  else List.map ~f:(c_param item.span) params
                in
                Fn
                  {
                    name = item_def_id;
                    generics =
                      U.concat_generics generics
                        (c_generics ~offset item.generics);
                    body = c_body body.expr;
                    params;
                    safety = c_header_safety safety;
                  }
            | Const (_ty, e) ->
                Fn
                  {
                    name = item_def_id;
                    generics;
                    (* does that make sense? can we have `const`? *)
                    body = c_body e.expr;
                    params = [];
                    safety = Safe;
                  }
            | Type _ty ->
                assertion_failure [ item.span ]
                  "Inherent implementations are not supposed to have \
                   associated types \
                   (https://doc.rust-lang.org/reference/items/implementations.html#inherent-implementations)."
          in
          let ident = Concrete_ident.of_def_id ~value:false item.owner_id in
          { span = Span.of_thir item.span; v; ident; attrs })
        items
  | Impl
      {
        of_trait = Some of_trait;
        generics;
        self_ty;
        items;
        safety;
        parent_bounds;
        _;
      } ->
      let items =
        List.filter
          ~f:(fun { attributes; _ } -> not (should_skip attributes))
          items
      in
      let generics = c_generics generics in
      let offset = List.count generics.constraints ~f:[%matches? GCType _] in
      let items =
        if erased then []
        else
          List.map
            ~f:(fun (item : Thir.impl_item) ->
              (* TODO: introduce a Kind.TraitImplItem or
                 something. Otherwise we have to assume every
                 backend will see traits and impls as
                 records. See https://github.com/hacspec/hax/issues/271. *)
              let ii_ident =
                Concrete_ident.of_def_id ~value:false item.owner_id
              in
              {
                ii_span = Span.of_thir item.span;
                ii_generics = c_generics ~offset item.generics;
                ii_v =
                  (match (item.kind : Thir.impl_item_kind) with
                  | Fn { body; params; _ } ->
                      let params =
                        if List.is_empty params then [ U.make_unit_param span ]
                        else List.map ~f:(c_param item.span) params
                      in
                      IIFn { body = c_expr body.expr; params }
                  | Const (_ty, e) -> IIFn { body = c_expr e.expr; params = [] }
                  | Type { ty; parent_bounds } ->
                      IIType
                        {
                          typ = c_ty item.span ty;
                          parent_bounds =
                            List.fold_left ~init:(0, [])
                              ~f:(fun (i, clauses) (clause, impl_expr, span) ->
                                match c_clause span i clause with
                                | Some (GCType trait_goal) ->
                                    ( i + 1,
                                      (c_impl_expr span impl_expr, trait_goal)
                                      :: clauses )
                                | _ -> (i, clauses))
                              parent_bounds
                            |> snd |> List.rev;
                        });
                ii_ident;
                ii_attrs = c_item_attrs item.attributes;
              })
            items
      in
      mk
      @@ Impl
           {
             generics;
             self_ty = c_ty item.span self_ty;
             of_trait =
               ( Concrete_ident.of_def_id ~value:false of_trait.value.def_id,
                 List.map
                   ~f:(c_generic_value item.span)
                   of_trait.value.generic_args );
             items;
             parent_bounds =
               List.filter_mapi
                 ~f:(fun i (clause, impl_expr, span) ->
                   let* bound = c_clause span i clause in
                   match bound with
                   | GCType trait_goal ->
                       Some (c_impl_expr span impl_expr, trait_goal)
                   | _ -> None)
                 parent_bounds;
             safety = csafety safety;
           }
  | Use ({ span = _; res; segments; rename }, _) ->
      let v =
        Use
          {
            path = List.map ~f:(fun x -> fst x.ident) segments;
            is_external =
              List.exists
                ~f:(function None | Some Err -> true | _ -> false)
                res;
            (* TODO: this should represent local/external? *)
            rename;
          }
      in
      (* ident is supposed to always be an actual item, thus here we need to cheat a bit *)
      (* TODO: is this DUMMY thing really needed? there's a `Use` segment (see #272) *)
      let def_id = item.owner_id in
      (* let def_id : Types.def_id =
           let value =
             {
               def_id.contents.value with
               path =
                 def_id.contents.value.path
                 @ [
                     Types.
                       { data = ValueNs "DUMMY"; disambiguator = MyInt64.of_int 0 };
                   ];
             }
           in
           { contents = { def_id.contents with value } }
         in *)
      [
        { span; v; ident = Concrete_ident.of_def_id ~value:false def_id; attrs };
      ]
  | Union _ ->
      unimplemented ~issue_id:998 [ item.span ] "Union types: not supported"
  | GlobalAsm _ ->
      unimplemented ~issue_id:1344 [ item.span ]
        "Inline assembly blocks are not supported"
  | ExternCrate _ | Macro _ | Mod _ | ForeignMod _ | TraitAlias _ ->
      mk NotImplementedYet

let import_item ~type_only (item : Thir.item) :
    concrete_ident * (item list * Diagnostics.t list) =
  let ident = Concrete_ident.of_def_id ~value:false item.owner_id in
  let r, reports =
    let f = U.Reducers.disambiguate_local_idents in
    Diagnostics.Core.capture (fun _ ->
        c_item item ~ident ~type_only |> List.map ~f)
  in
  (ident, (r, reports))


================================================
FILE: engine/lib/import_thir.mli
================================================
val import_ty : Types.span -> Types.node_for__ty_kind -> Ast.Rust.ty

val import_trait_ref :
  Types.span -> Types.node_for__item_ref_contents -> Ast.Rust.trait_goal

val import_clause :
  Types.span -> int -> Types.clause -> Ast.Rust.generic_constraint option

val import_item :
  type_only:bool ->
  Types.item_for__thir_body ->
  Concrete_ident.t * (Ast.Rust.item list * Diagnostics.t list)


================================================
FILE: engine/lib/local_ident.ml
================================================
open! Prelude

module T = struct
  type kind = Typ | Cnst | Expr | LILifetime | Final | SideEffectHoistVar
  [@@deriving show, yojson, hash, compare, sexp, eq]

  type id = kind * int [@@deriving show, yojson, hash, compare, sexp, eq]

  let mk_id kind id = (kind, id)

  type t = { name : string; id : id }
  [@@deriving show, yojson, hash, compare, sexp, eq]

  let make_final name = { name; id = mk_id Final 0 }
  let is_final { id; _ } = [%matches? Final] @@ fst id

  let is_side_effect_hoist_var { id; _ } =
    [%matches? SideEffectHoistVar] @@ fst id
end

include Base.Comparator.Make (T)
include T


================================================
FILE: engine/lib/local_ident.mli
================================================
module T : sig
  type kind =
    | Typ  (** type namespace *)
    | Cnst  (** Generic constant namespace *)
    | Expr  (** Expression namespace *)
    | LILifetime  (** Lifetime namespace *)
    | Final
        (** Frozen identifier: such an identifier will *not* be rewritten by the
            name policy *)
    | SideEffectHoistVar  (** A variable generated by `Side_effect_utils` *)
  [@@deriving show, yojson, hash, compare, sexp, eq]

  type id = kind * int [@@deriving show, yojson, hash, compare, sexp, eq]

  val mk_id : kind -> int -> id

  type t = { name : string; id : id }
  [@@deriving show, yojson, hash, compare, sexp, eq]

  val make_final : string -> t
  (** Creates a frozen final local identifier: such an indentifier won't be
      rewritten by a name policy *)

  val is_final : t -> bool
  val is_side_effect_hoist_var : t -> bool
end

include module type of struct
  include Base.Comparator.Make (T)
  include T
end


================================================
FILE: engine/lib/phase_utils.ml
================================================
open! Prelude

module Metadata : sig
  type t = private {
    current_phase : Diagnostics.Phase.t;
    previous_phase : t option;
  }

  val make : Diagnostics.Phase.t -> t
  val bind : t -> t -> t
  val previous_phases : t -> Diagnostics.Phase.t list
end = struct
  type t = { current_phase : Diagnostics.Phase.t; previous_phase : t option }

  let make name = { current_phase = name; previous_phase = None }
  let bind (x : t) (y : t) : t = { y with previous_phase = Some x }

  let rec previous_phases' (p : t) : Diagnostics.Phase.t list =
    previous_phases p @ [ p.current_phase ]

  and previous_phases (p : t) : Diagnostics.Phase.t list =
    Option.map ~f:previous_phases' p.previous_phase |> Option.value ~default:[]
end

module type PHASE = sig
  val metadata : Metadata.t

  module FA : Features.T
  module FB : Features.T
  module A : Ast.T
  module B : Ast.T

  val ditems : A.item list -> B.item list
end

module MakePhaseImplemT (A : Ast.T) (B : Ast.T) = struct
  module type T = sig
    val metadata : Metadata.t
    val ditems : A.item list -> B.item list
  end
end

(** Functor that produces module types of monomorphic phases *)
module MAKE_MONOMORPHIC_PHASE (F : Features.T) = struct
  module type ARG = sig
    val phase_id : Diagnostics.Phase.t
    val ditems : Ast.Make(F).item list -> Ast.Make(F).item list
  end

  module type T = sig
    include module type of struct
      module FB = F
      module A = Ast.Make (F)
      module B = Ast.Make (FB)
      module ImplemT = MakePhaseImplemT (A) (B)
      module FA = F
    end

    include ImplemT.T
  end
end

(** Make a monomorphic phase: a phase that transform an AST with feature set [F]
    into an AST with the same feature set [F] *)
module MakeMonomorphicPhase
    (F : Features.T)
    (M : MAKE_MONOMORPHIC_PHASE(F).ARG) : MAKE_MONOMORPHIC_PHASE(F).T = struct
  module FA = F
  module FB = F
  module A = Ast.Make (F)
  module B = Ast.Make (FB)
  module ImplemT = MakePhaseImplemT (A) (B)

  module Implem = struct
    let metadata = Metadata.make M.phase_id

    include M

    let subtype (l : A.item list) : B.item list = Stdlib.Obj.magic l
    let ditems (l : A.item list) : B.item list = ditems l |> subtype
  end

  include Implem
end

(** Type of an unconstrainted (forall feature sets) monomorphic phases *)
module type UNCONSTRAINTED_MONOMORPHIC_PHASE = functor (F : Features.T) -> sig
  include module type of struct
    module FB = F
    module A = Ast.Make (F)
    module B = Ast.Make (FB)
    module ImplemT = MakePhaseImplemT (A) (B)
    module FA = F
  end

  include ImplemT.T
end

exception ReportError of Diagnostics.kind

module type ERROR = sig
  type t = { kind : Diagnostics.kind; span : Ast.span }

  val raise : t -> 'never
  val unimplemented : ?issue_id:int -> ?details:string -> Ast.span -> 'never
  val assertion_failure : Ast.span -> string -> 'never
end

module MakeError (Ctx : sig
  val ctx : Diagnostics.Context.t
end) : ERROR = struct
  type t = { kind : Diagnostics.kind; span : Ast.span } [@@deriving show, eq]

  let raise err =
    let span = Span.to_thir err.span in
    Diagnostics.SpanFreeError.raise ~span (Span.owner_hint err.span) Ctx.ctx
      err.kind

  let unimplemented ?issue_id ?details span =
    raise
      {
        kind =
          Unimplemented
            { issue_id = Option.map ~f:MyInt64.of_int issue_id; details };
        span;
      }

  let assertion_failure span details =
    raise { kind = AssertionFailure { details }; span }
end

module MakeBase
    (FA : Features.T)
    (FB : Features.T)
    (M : sig
      val phase_id : Diagnostics.Phase.t
    end) =
struct
  module A = Ast.Make (FA)
  module B = Ast.Make (FB)
  module UA = Ast_utils.Make (FA)
  module UB = Ast_utils.Make (FB)
  module ImplemT = MakePhaseImplemT (A) (B)
  include M

  let metadata = Metadata.make phase_id
  let failwith = ()

  module Error : ERROR = MakeError (struct
    let ctx = Diagnostics.Context.Phase M.phase_id
  end)
end

module Identity (F : Features.T) = struct
  module FA = F
  module FB = F
  module A = Ast.Make (F)
  module B = Ast.Make (F)

  let ditems (l : A.item list) : B.item list = l
  let metadata = Metadata.make Diagnostics.Phase.Identity
end

module _ (F : Features.T) : PHASE = Identity (F)

let _DEBUG_SHOW_ITEM = false
let _DEBUG_SHOW_BACKTRACE = false

module DebugPhaseInfo = struct
  type t = Before | Phase of Diagnostics.Phase.t
  [@@deriving eq, sexp, hash, compare, yojson]

  let show (s : t) : string =
    match s with
    | Before -> "initial_input"
    | Phase p -> Diagnostics.Phase.display p

  let pp (fmt : Stdlib.Format.formatter) (s : t) : unit =
    Stdlib.Format.pp_print_string fmt @@ show s
end

module DebugBindPhase : sig
  val add : DebugPhaseInfo.t -> int -> (unit -> Ast.Full.item list) -> unit
  val export : unit -> string list
  val enable : unit -> unit
end = struct
  let enabled = ref false
  let enable () = enabled := true

  let cache : (DebugPhaseInfo.t, int * Ast.Full.item list ref) Hashtbl.t =
    Hashtbl.create (module DebugPhaseInfo)

  let add (phase_info : DebugPhaseInfo.t) (nth : int)
      (mk_item : unit -> Ast.Full.item list) =
    if !enabled (* `!` is not `not` *) then
      let _, l =
        Hashtbl.find_or_add cache phase_info ~default:(fun _ -> (nth, ref []))
      in
      l := !l @ mk_item ()
    else ()

  open struct
    module Visitors = Ast_visitors.Make (Features.Full)
  end

  let export' () =
    Logs.info (fun m -> m "Exporting debug informations");

    Hashtbl.to_alist cache
    |> List.sort ~compare:(fun (_, (a, _)) (_, (b, _)) -> Int.compare a b)
    |> List.map ~f:(fun (k, (nth, l)) ->
           let regenerate_span_ids =
             (object
                inherit [_] Visitors.map
                method! visit_span = Fn.const Span.refresh_id
             end)
               #visit_item
               ()
           in
           (* we regenerate spans IDs, so that we have more precise regions *)
           let l = List.map ~f:regenerate_span_ids !l in
           let rustish = Print_rust.pitems l in
           let json =
             `Assoc
               [
                 ("name", `String ([%show: DebugPhaseInfo.t] k));
                 ("nth", `Int nth);
                 ("items", [%yojson_of: Ast.Full.item list] l);
                 ( "rustish",
                   [%yojson_of: Print_rust.AnnotatedString.Output.t] rustish );
               ]
           in
           json)
    |> List.map ~f:Yojson.Safe.to_string

  let export () =
    if !enabled (* recall: ! is deref, not `not`, great op. choice..... *) then
      export' ()
    else []
end

module type S = sig
  module A : Ast.T

  val ditem : A.item -> Ast.Full.item list
end

module TracePhase (P : PHASE) = struct
  include P

  let name = [%show: Diagnostics.Phase.t] P.metadata.current_phase
  (* We distinguish between composite phases (i.e. `BindPhase(_)(_)`) versus non-composite ones. *)

  let composite_phase = Option.is_some P.metadata.previous_phase

  let ditems =
    if composite_phase then P.ditems
    else fun items ->
      Logs.info (fun m -> m "Entering phase [%s]" name);
      let items = P.ditems items in
      Logs.info (fun m -> m "Exiting phase [%s]" name);
      items
end

module ProfilePhase (P : PHASE) = struct
  include P

  (* We distinguish between composite phases (i.e. `BindPhase(_)(_)`) versus non-composite ones. *)
  let composite_phase = Option.is_some P.metadata.previous_phase

  let ditems items =
    if composite_phase then P.ditems items
    else
      let ctx = Diagnostics.Context.Phase P.metadata.current_phase in
      Profiling.profile ctx (List.length items) (fun () -> P.ditems items)
end

module BindPhase
    (D1 : PHASE)
    (D2 : PHASE with module FA = D1.FB and module A = D1.B) =
struct
  module D1' = ProfilePhase (TracePhase (D1))
  module D2' = ProfilePhase (TracePhase (D2))
  module FA = D1.FA
  module FB = D2.FB
  module A = D1.A
  module B = D2.B

  let metadata = Metadata.bind D1.metadata D2.metadata

  let ditems (items : A.item list) : B.item list =
    let nth = List.length @@ Metadata.previous_phases D1'.metadata in
    (if Int.equal nth 0 then
       let coerce_to_full_ast : D1'.A.item -> Ast.Full.item =
         Stdlib.Obj.magic
       in
       DebugBindPhase.add Before 0 (fun _ ->
           List.map ~f:coerce_to_full_ast items));
    let items' = D1'.ditems items in
    let coerce_to_full_ast : D2'.A.item list -> Ast.Full.item list =
      Stdlib.Obj.magic
    in
    DebugBindPhase.add (Phase D1'.metadata.current_phase) (nth + 1) (fun _ ->
        coerce_to_full_ast items');
    D2'.ditems items'
end


================================================
FILE: engine/lib/phases/phase_and_mut_defsite.ml
================================================
open! Prelude

module%inlined_contents Make
    (FA :
      Features.T
        with type mutable_variable = Features.On.mutable_variable
         and type mutable_reference = Features.On.mutable_reference
         and type nontrivial_lhs = Features.On.nontrivial_lhs
         and type arbitrary_lhs = Features.On.arbitrary_lhs
         and type reference = Features.On.reference) =
struct
  open Ast
  module FB = FA

  include
    Phase_utils.MakeBase (FA) (FB)
      (struct
        let phase_id = [%auto_phase_name auto]
      end)

  module A = Ast.Make (FA)
  module B = Ast.Make (FB)
  module BVisitors = Ast_visitors.Make (FB)

  module Implem : ImplemT.T = struct
    let metadata = metadata

    module S = struct
      include Features.SUBTYPE.Id
    end

    module UB = Ast_utils.Make (FB)

    module M = struct
      open B
      open UB

      (* given `ty`, produces type `&mut ty` *)
      let mut_ref (typ : ty) : ty =
        let mut = Mutable Features.On.mutable_reference in
        TRef { witness = Features.On.reference; region = ""; typ; mut }

      (* given `e`, produces well-typed expr `&mut e` *)
      let mut_borrow (e : expr) : expr =
        let kind = Mut Features.On.mutable_reference in
        let witness = Features.On.reference in
        let e' = Borrow { kind; e; witness } in
        { e with e = e'; typ = mut_ref e.typ }

      let expect_mut_ref_param (all_vars : local_ident list) (i : int)
          (param : param) : (local_ident * ty * span) option =
        let* typ = Expect.mut_ref param.typ in
        match param.pat.p with
        | PBinding
            { mut = Immutable; mode = ByValue; var; typ = _; subpat = None } ->
            Some (var, typ, param.pat.span)
        | PWild ->
            let var =
              fresh_local_ident_in all_vars ("arg_" ^ Int.to_string i ^ "_wild")
            in
            Some (var, typ, param.pat.span)
        | _ ->
            Error.raise
              { kind = NonTrivialAndMutFnInput; span = param.pat.span }

      let rewrite_fn_sig (all_vars : local_ident list) (params : param list)
          (output : ty) :
          (param list * ty * (local_ident * ty * span) list) option =
        let and_muts =
          List.filter_mapi ~f:(expect_mut_ref_param all_vars) params
        in
        match and_muts with
        | [] -> None
        | _ ->
            let params =
              List.mapi
                ~f:(fun i param ->
                  match expect_mut_ref_param all_vars i param with
                  | None -> param
                  | Some (var, typ, span) ->
                      let p : pat' =
                        let mut = Mutable Features.On.mutable_variable in
                        PBinding
                          { mut; mode = ByValue; var; typ; subpat = None }
                      in
                      { param with pat = { p; span; typ }; typ })
                params
            in
            let output_components =
              List.map ~f:snd3 and_muts
              @ if UB.is_unit_typ output then [] else [ output ]
            in
            let output = UB.make_tuple_typ output_components in
            Some (params, output, and_muts)

      (* visit an expression and replace all `Return e` nodes by `Return (f e)` *)
      let map_returns ~(f : expr -> expr) : expr -> expr =
        let visitor =
          object (self)
            inherit [_] Visitors.map as super

            method! visit_expr' () e =
              match e with
              | Return { e; witness } ->
                  let e = self#visit_expr () e in
                  Return { e = f e; witness }
              | _ -> super#visit_expr' () e
          end
        in
        visitor#visit_expr ()

      (* transforms
          `(let … = … in)* expr`
         into
          `(let … = … in)* let output = expr in output` *)
      let wrap_in_identity_let (e : expr) : expr =
        let var = Local_ident.{ id = mk_id Expr 0; name = "hax_temp_output" } in
        let f (e : expr) : expr =
          match e.e with
          | GlobalVar (`TupleCons 0) -> e
          | _ ->
              let rhs = e in
              let lhs, body =
                if [%eq: ty] e.typ UB.unit_typ then
                  (* This case has been added to fix https://github.com/hacspec/hax/issues/720.
                     It might need a better solution. *)
                  ( UB.M.pat_PWild ~span:e.span ~typ:e.typ,
                    UB.M.expr_unit ~span:e.span )
                else
                  (UB.make_var_pat var e.typ e.span, { e with e = LocalVar var })
              in
              { body with e = Let { monadic = None; lhs; rhs; body } }
        in
        UB.map_body_of_nested_lets f e

      let mutref_to_mut_expr (vars : local_ident list) : expr -> expr =
        let ( <|?> ) (type a) (x : a option) (f : unit -> a option) : a option =
          x |> Option.map ~f:Option.some |> Option.value_or_thunk ~default:f
        in
        let in_vars = List.mem vars ~equal:[%equal: local_ident] in
        let expect_in_vars_local_var (x : expr) : local_ident option =
          match x.e with LocalVar v when in_vars v -> Some v | _ -> None
        in
        let retyped_local_var_in_vars e =
          let* var = expect_in_vars_local_var e in
          (* var is supposed to be typed `&mut _` *)
          let typ =
            Expect.mut_ref e.typ
            |> Option.value_or_thunk ~default:(fun () ->
                   Error.assertion_failure e.span
                   @@ "Expect.mut_ref: got `None`")
          in
          (* we reconstruct `e` to type it correctly *)
          Some { e = LocalVar var; typ; span = e.span }
        in
        let visitor =
          object
            inherit [_] Visitors.map as super

            method! visit_expr () e =
              (let* e = Expect.deref e in
               retyped_local_var_in_vars e)
              <|?> (fun _ -> retyped_local_var_in_vars e)
              |> Option.value_or_thunk ~default:(fun _ -> super#visit_expr () e)
          end
        in
        visitor#visit_expr ()

      let convert_lhs =
        (* TODO: refactor (see #316) *)
        let rec place_to_lhs (p : Place.t) : lhs =
          let typ = p.typ in
          match p.place with
          | LocalVar var -> LhsLocalVar { var; typ }
          | FieldProjection { place; projector } ->
              let e = place_to_lhs place in
              LhsFieldAccessor
                {
                  witness = Features.On.nontrivial_lhs;
                  field = projector;
                  typ;
                  e;
                }
          | IndexProjection { place; index } ->
              let e = place_to_lhs place in
              LhsArrayAccessor
                { e; typ; index; witness = Features.On.nontrivial_lhs }
          | _ ->
              let e = Place.to_expr p in
              LhsArbitraryExpr { witness = Features.On.arbitrary_lhs; e }
        in

        let visitor =
          object
            inherit [_] Visitors.map as super

            method! visit_expr () e =
              try super#visit_expr () e
              with Diagnostics.SpanFreeError.Exn (Data (context, kind)) ->
                UB.hax_failure_expr e.span e.typ (context, kind)
                  (UB.LiftToFullAst.expr e)

            method! visit_expr' () e =
              match e with
              | Assign { lhs; e; witness } ->
                  let span = e.span in
                  let lhs = UB.expr_of_lhs span lhs in
                  let lhs =
                    lhs |> Place.of_expr
                    |> Option.value_or_thunk ~default:(fun () ->
                           Error.assertion_failure span
                           @@ "Place.of_expr: got `None` for: "
                           ^ Print_rust.pexpr_str (UB.LiftToFullAst.expr lhs))
                    |> place_to_lhs
                  in
                  Assign { lhs; e; witness }
              | _ -> super#visit_expr' () e
          end
        in
        visitor#visit_expr ()

      let rewrite_function (params : param list) (body : expr) :
          (param list * expr) option =
        let all_vars =
          UB.Reducers.collect_local_idents#visit_expr () body
          :: List.map ~f:(Reducers.collect_local_idents#visit_param ()) params
          |> Set.union_list (module Local_ident)
          |> Set.to_list
        in
        let* params, _, vars = rewrite_fn_sig all_vars params body.typ in
        let idents = List.map ~f:fst3 vars in
        let vars =
          List.map
            ~f:(fun (var, typ, span) -> B.{ span; typ; e = LocalVar var })
            vars
        in
        let f (e : B.expr) : B.expr =
          UB.make_tuple_expr ~span:e.span
            (vars @ if UB.is_unit_typ e.typ then [] else [ e ])
        in
        let body =
          body |> mutref_to_mut_expr idents |> convert_lhs |> map_returns ~f
          |> wrap_in_identity_let
          |> UB.map_body_of_nested_lets f
        in
        Some (params, body)
    end

    include M

    let ditems (items : A.item list) : B.item list =
      let items : B.item list = Stdlib.Obj.magic items in
      let visitor =
        object
          inherit [_] BVisitors.map as super

          method! visit_impl_item' () item' =
            (match item' with
            | IIFn { params; body } ->
                let* params, body = rewrite_function params body in
                Some (B.IIFn { body; params })
            | _ -> None)
            |> Option.value_or_thunk
                 ~default:(Fn.flip super#visit_impl_item' item')

          method! visit_trait_item () item =
            let span = item.ti_span in
            let ti_v =
              (match item.ti_v with
              | TIFn (TArrow (inputs, output)) ->
                  (* Here, we craft a dummy function so that we can
                     call `rewrite_function` *)
                  let var = Local_ident.{ id = mk_id Expr 0; name = "dummy" } in
                  let params =
                    List.map
                      ~f:(fun typ ->
                        let pat = UB.make_var_pat var typ span in
                        (* let pat : B.pat = { typ; p; span } in *)
                        B.{ pat; typ; typ_span = None; attrs = [] })
                      inputs
                  in
                  let body =
                    B.
                      {
                        e =
                          (* this is wrongly typed, though it's fine,
                             we throw this away before returning *)
                          (UB.unit_expr span).e;
                        typ = output;
                        span;
                      }
                  in
                  let* params, body = rewrite_function params body in
                  let inputs = List.map ~f:(fun p -> p.typ) params in
                  let output = body.typ in
                  let ty = B.TArrow (inputs, output) in
                  Some (B.TIFn ty)
              | TIDefault { params; body; witness } ->
                  let* params, body = rewrite_function params body in
                  let witness = S.trait_item_default span witness in
                  Some (B.TIDefault { params; body; witness })
              | _ -> None)
              |> Option.value_or_thunk
                   ~default:(Fn.flip super#visit_trait_item' item.ti_v)
            in
            { item with ti_v }

          method! visit_item () i =
            try super#visit_item () i
            with Diagnostics.SpanFreeError.Exn (Data (context, kind)) ->
              let error = Diagnostics.pretty_print_context_kind context kind in
              let cast_item : B.item -> Ast.Full.item = Stdlib.Obj.magic in
              let ast = cast_item i |> Print_rust.pitem_str in
              let msg =
                error ^ "\nLast available AST for this item:\n\n" ^ ast
              in
              B.make_hax_error_item i.span i.ident msg

          method! visit_item' () item' =
            (match item' with
            | Fn { name; generics; body; params; safety } ->
                let* params, body = rewrite_function params body in
                Some (B.Fn { name; generics; body; params; safety })
            | _ -> None)
            |> Option.value_or_thunk ~default:(Fn.flip super#visit_item' item')
        end
      in
      List.map ~f:(visitor#visit_item ()) items

    let dexpr (_e : A.expr) : B.expr =
      Stdlib.failwith "Should not be called directly"
  end

  include Implem
  module FA = FA
end
[@@add "subtype.ml"]


================================================
FILE: engine/lib/phases/phase_and_mut_defsite.mli
================================================
module Make
    (F :
      Features.T
        with type mutable_variable = Features.On.mutable_variable
         and type mutable_reference = Features.On.mutable_reference
         and type nontrivial_lhs = Features.On.nontrivial_lhs
         and type arbitrary_lhs = Features.On.arbitrary_lhs
         and type reference = Features.On.reference) : sig
  include module type of struct
    module FB = F
    module A = Ast.Make (F)
    module B = Ast.Make (FB)
    module ImplemT = Phase_utils.MakePhaseImplemT (A) (B)
    module FA = F
  end

  include ImplemT.T
end


================================================
FILE: engine/lib/phases/phase_bundle_cycles.ml
================================================
open! Prelude

module Make (F : Features.T) =
  Phase_utils.MakeMonomorphicPhase
    (F)
    (struct
      let phase_id = [%auto_phase_name auto]

      module A = Ast.Make (F)

      module Error = Phase_utils.MakeError (struct
        let ctx = Diagnostics.Context.Phase phase_id
      end)

      module Attrs = Attr_payloads.MakeBase (Error)

      let ditems items =
        let module DepGraph = Dependencies.Make (F) in
        DepGraph.bundle_cyclic_modules items
    end)


================================================
FILE: engine/lib/phases/phase_bundle_cycles.mli
================================================
(** This phase makes sure the items don't yield any cycle, namespace-wise. It
    does so by creating namespaces we call bundles, in which we regroup
    definitions that would otherwise yield cycles. *)

module Make : Phase_utils.UNCONSTRAINTED_MONOMORPHIC_PHASE


================================================
FILE: engine/lib/phases/phase_cf_into_monads.ml
================================================
open! Prelude

module%inlined_contents Make
    (F :
      Features.T
        with type monadic_action = Features.Off.monadic_action
         and type monadic_binding = Features.Off.monadic_binding) =
struct
  open Ast
  module FA = F

  module FB = struct
    include F
    include Features.Off.Continue
    include Features.Off.Early_exit
    include Features.Off.Question_mark
    include Features.Off.Break
    include Features.On.Monadic_binding
  end

  include
    Phase_utils.MakeBase (F) (FB)
      (struct
        let phase_id = [%auto_phase_name auto]
      end)

  module Implem : ImplemT.T = struct
    let metadata = metadata

    module UA = Ast_utils.Make (F)
    module UB = Ast_utils.Make (FB)

    module S = struct
      module A = FA
      module B = FB
      include Features.SUBTYPE.Id

      let monadic_binding _ = Features.On.monadic_binding
    end

    [%%inline_defs dmutability + dsafety_kind]

    module KnownMonads = struct
      type t = { monad : B.supported_monads option; typ : B.ty }
      [@@deriving show, eq]
      (** types of computations *)
      (* | MId of { typ : B.ty } *)
      (* | MReturn of { return : B.ty; continue : B.ty } *)

      (** translate a computation type to a simple type *)
      let to_typ (x : t) : B.ty =
        match x.monad with
        | None -> x.typ
        | Some (MResult err) ->
            let args = List.map ~f:(fun t -> B.GType t) [ x.typ; err ] in
            let ident =
              Global_ident.of_name ~value:false Core__result__Result
            in
            TApp { ident; args }
        | Some MOption ->
            let args = List.map ~f:(fun t -> B.GType t) [ x.typ ] in
            let ident =
              Global_ident.of_name ~value:false Core__option__Option
            in
            TApp { ident; args }
        | Some (MException return) ->
            let args = List.map ~f:(fun t -> B.GType t) [ return; x.typ ] in
            let ident =
              Global_ident.of_name ~value:false
                Core__ops__control_flow__ControlFlow
            in
            TApp { ident; args }

      let from_typ' : B.ty -> t = function
        | TApp { ident; args = [ GType return; GType continue ] }
          when Global_ident.eq_name Core__ops__control_flow__ControlFlow ident
          ->
            { monad = Some (MException return); typ = continue }
        | TApp { ident; args = [ GType ok; GType err ] }
          when Global_ident.eq_name Core__result__Result ident ->
            { monad = Some (MResult err); typ = ok }
        | TApp { ident; args = [ GType ok ] }
          when Global_ident.eq_name Core__option__Option ident ->
            { monad = Some MOption; typ = ok }
        | typ -> { monad = None; typ }

      (** the type of pure expression we can return in the monad *)
      let pure_type (x : t) = x.typ

      let lift details (e : B.expr) monad_of_e monad_destination : B.expr =
        match (monad_of_e, monad_destination) with
        | m1, m2 when [%equal: B.supported_monads option] m1 m2 -> e
        | None, Some (B.MResult _) ->
            UB.call_Constructor Core__result__Result__Ok false [ e ] e.span
              (to_typ { monad = monad_destination; typ = e.typ })
        | None, Some B.MOption ->
            UB.call_Constructor Core__option__Option__Some false [ e ] e.span
              (to_typ { monad = monad_destination; typ = e.typ })
        | _, Some (B.MException _) ->
            UB.call_Constructor Core__ops__control_flow__ControlFlow__Continue
              false [ e ] e.span
              (to_typ { monad = monad_destination; typ = e.typ })
        | m1, m2 ->
            Error.assertion_failure e.span
            @@ "Cannot lift from monad ["
            ^ [%show: B.supported_monads option] m1
            ^ "] to monad ["
            ^ [%show: B.supported_monads option] m2
            ^ "]" ^ "\n Details: " ^ details

      let lub span m1 m2 =
        match (m1, m2) with
        | None, m | m, None -> m
        | Some m1, Some m2 ->
            let impossible () =
              Error.assertion_failure span
              @@ "Trying to compute the lub of two incompatible monads:"
              ^ "\n • "
              ^ [%show: B.supported_monads] m1
              ^ "\n • "
              ^ [%show: B.supported_monads] m2
            in
            Option.some
              (match (m1, m2) with
              | (B.MResult _ | B.MOption), (B.MException _ as m)
              | (B.MException _ as m), (B.MResult _ | B.MOption) ->
                  m
              | B.MResult _, B.MResult _
              | B.MOption, B.MOption
              | B.MException _, B.MException _ ->
                  m1
              | B.MResult _, B.MOption | B.MOption, B.MResult _ -> impossible ())

      (** after transformation, are we **getting** inside a monad? *)
      let from_typ dty (old : A.ty) (new_ : B.ty) : t =
        let old = dty Span.default (* irrelevant *) old in
        let monad = from_typ' new_ in
        if B.equal_ty (pure_type monad) old then monad
        else { monad = None; typ = new_ }
    end

    let rec dexpr_unwrapped (expr : A.expr) : B.expr =
      let span = expr.span in
      let typ = dty span expr.typ in
      match expr.e with
      | Let { monadic = Some _; _ } -> .
      | Let { monadic = None; lhs; rhs; body } -> (
          let body' = dexpr body in
          let rhs' = dexpr rhs in
          let mrhs = KnownMonads.from_typ dty rhs.typ rhs'.typ in
          let lhs = { (dpat lhs) with typ = KnownMonads.pure_type mrhs } in
          match mrhs with
          | { monad = None; _ } ->
              let monadic = None in
              let rhs = rhs' in
              let body = body' in
              { e = Let { monadic; lhs; rhs; body }; span; typ = body.typ }
          | _ ->
              let mbody = KnownMonads.from_typ dty body.typ body'.typ in
              let m = KnownMonads.lub span mbody.monad mrhs.monad in
              let body = KnownMonads.lift "Let:body" body' mbody.monad m in
              let rhs = KnownMonads.lift "Let:rhs" rhs' mrhs.monad m in
              let monadic =
                match m with
                | None -> None
                | Some m -> Some (m, Features.On.monadic_binding)
              in
              { e = Let { monadic; lhs; rhs; body }; span; typ = body.typ })
      | Match { scrutinee; arms } ->
          let arms =
            List.map
              ~f:(fun { arm = { arm_pat; body = a; guard }; span } ->
                let b = dexpr a in
                let m = KnownMonads.from_typ dty a.typ b.typ in
                let g = Option.map ~f:dguard guard in
                (m, (dpat arm_pat, span, b, g)))
              arms
          in
          let arms =
            let m =
              List.map ~f:(fun ({ monad; _ }, _) -> monad) arms
              |> List.reduce ~f:(KnownMonads.lub span)
            in
            match m with
            | None -> [] (* [arms] is empty *)
            | Some m ->
                List.map
                  ~f:(fun (mself, (arm_pat, span, body, guard)) ->
                    let body = KnownMonads.lift "Match" body mself.monad m in
                    let arm_pat = { arm_pat with typ = body.typ } in
                    ({ arm = { arm_pat; body; guard }; span } : B.arm))
                  arms
          in
          let typ =
            match arms with [] -> UB.never_typ | hd :: _ -> hd.arm.body.typ
          in
          { e = Match { scrutinee = dexpr scrutinee; arms }; span; typ }
      | If { cond; then_; else_ } ->
          let cond = dexpr cond in
          let then' = dexpr then_ in
          let else' = Option.map ~f:dexpr else_ in
          let mthen = KnownMonads.from_typ dty then_.typ then'.typ in
          let melse =
            match (else_, else') with
            | Some else_, Some else' ->
                KnownMonads.from_typ dty else_.typ else'.typ
            | _ -> mthen
          in
          let m = KnownMonads.lub span mthen.monad melse.monad in
          let else_ =
            Option.map
              ~f:(fun else' ->
                KnownMonads.lift "If:else-branch" else' melse.monad m)
              else'
          in
          let then_ = KnownMonads.lift "If:then-branch" then' mthen.monad m in
          { e = If { cond; then_; else_ }; span; typ = then_.typ }
      | Continue _ ->
          Error.unimplemented ~issue_id:15
            ~details:"TODO: Monad for loop-related control flow" span
      | Break _ ->
          Error.unimplemented ~issue_id:15
            ~details:"TODO: Monad for loop-related control flow" span
      | QuestionMark { e; _ } -> dexpr e
      | Return { e; _ } ->
          let open KnownMonads in
          let e = dexpr e in
          UB.call_Constructor Core__ops__control_flow__ControlFlow__Break false
            [ e ] span
            (to_typ @@ { monad = Some (MException e.typ); typ })
      | [%inline_arms
          "dexpr'.*" - Let - Match - If - Continue - Break - QuestionMark
          - Return] ->
          map (fun e -> B.{ e; typ = dty expr.span expr.typ; span = expr.span })

    and lift_if_necessary (e : B.expr) (target_type : B.ty) =
      if B.equal_ty e.typ target_type then e
      else
        UB.call Rust_primitives__hax__control_flow_monad__ControlFlowMonad__lift
          [ e ] e.span target_type
    [@@inline_ands bindings_of dexpr - dexpr']

    module Item = struct
      module OverrideDExpr = struct
        let dexpr (e : A.expr) : B.expr =
          let e' = dexpr e in
          match KnownMonads.from_typ dty e.typ e'.typ with
          | { monad = Some m; typ } ->
              UB.call
                (match m with
                | MException _ ->
                    Rust_primitives__hax__control_flow_monad__mexception__run
                | MResult _ ->
                    Rust_primitives__hax__control_flow_monad__mresult__run
                | MOption ->
                    Rust_primitives__hax__control_flow_monad__moption__run)
                [ e' ] e.span typ
          | _ -> e'
      end

      open OverrideDExpr

      [%%inline_defs "Item.*"]
    end

    include Item
  end

  include Implem
end
[@@add "subtype.ml"]


================================================
FILE: engine/lib/phases/phase_cf_into_monads.mli
================================================
open! Prelude

module Make
    (F :
      Features.T
        with type monadic_action = Features.Off.monadic_action
         and type monadic_binding = Features.Off.monadic_binding) : sig
  include module type of struct
    module FA = F

    module FB = struct
      include F
      include Features.Off.Continue
      include Features.Off.Early_exit
      include Features.Off.Question_mark
      include Features.Off.Break
      include Features.On.Monadic_binding
    end

    module A = Ast.Make (F)
    module B = Ast.Make (FB)
    module ImplemT = Phase_utils.MakePhaseImplemT (A) (B)
  end

  include ImplemT.T
end


================================================
FILE: engine/lib/phases/phase_direct_and_mut.ml
================================================
open! Prelude

module%inlined_contents Make
    (FA :
      Features.T
        with type raw_pointer = Features.Off.raw_pointer
         and type mutable_pointer = Features.Off.mutable_pointer) =
struct
  open Ast

  module FB = struct
    include FA
    include Features.On.Mutable_variable
    include Features.On.Arbitrary_lhs
    include Features.On.Nontrivial_lhs
    include Features.Off.Mutable_reference
  end

  include
    Phase_utils.MakeBase (FA) (FB)
      (struct
        let phase_id = [%auto_phase_name auto]
      end)

  (** Reference to a fresh local ident (item-wise) *)
  let out_var = ref Local_ident.{ id = mk_id Expr 0; name = "out" }

  module Implem : ImplemT.T = struct
    let metadata = metadata

    let hax_core_extraction =
      Sys.getenv "HAX_CORE_EXTRACTION_MODE"
      |> [%equal: string option] (Some "on")

    module S = struct
      include Features.SUBTYPE.Id
      include Features.SUBTYPE.On.Mutable_variable
      include Features.SUBTYPE.On.Nontrivial_lhs
      include Features.SUBTYPE.On.Arbitrary_lhs
    end

    module UA = Ast_utils.Make (FA)
    module UB = Ast_utils.Make (FB)

    let ( let* ) x f = Option.bind ~f x

    module Place = UA.Place

    let expect_mut_borrow_of_place_or_pure_expr (e : A.expr) :
        (Place.t, A.expr) Either.t option =
      let e = UA.Mappers.normalize_borrow_mut#visit_expr () e in
      let e = UA.remove_unsize e in
      let* e = UA.Expect.mut_borrow e in
      Option.some
      @@
      match
        let* p = Place.of_expr e in
        Some (Place.skip_allowed_deref_mut p)
      with
      | Some place -> Either.First place
      | None -> Second e

    [%%inline_defs dmutability + dsafety_kind]

    let rec dty (span : span) (ty : A.ty) : B.ty =
      match ty with
      | [%inline_arms "dty.*" - TRef] -> auto
      | TRef { mut = Mutable _; typ; _ } ->
          if hax_core_extraction then
            TApp
              {
                ident =
                  Global_ident.of_name ~value:false Rust_primitives__hax__MutRef;
                args = [ GType (dty span typ) ];
              }
          else Error.raise { kind = UnallowedMutRef; span }
      | TRef { witness; typ; mut = Immutable as mut; region } ->
          TRef { witness; typ = dty span typ; mut; region }

    and dborrow_kind (_span : span) (borrow_kind : A.borrow_kind) :
        B.borrow_kind =
      match borrow_kind with
      | [%inline_arms "dborrow_kind.*" - Mut] -> auto
      | Mut _ -> Shared

    (* TODO: refactor (see #316) *)
    and place_to_lhs (p : Place.t) : B.lhs =
      let typ = dty p.span p.typ in
      match p.place with
      | LocalVar var -> LhsLocalVar { var; typ }
      | VecRef inner ->
          LhsVecRef
            {
              e = place_to_lhs inner;
              typ;
              witness = Features.On.nontrivial_lhs;
            }
      | FieldProjection { place; projector } ->
          let e = place_to_lhs place in
          LhsFieldAccessor
            { witness = Features.On.nontrivial_lhs; field = projector; typ; e }
      | IndexProjection { place; index } ->
          let e = place_to_lhs place in
          let index = dexpr index in
          LhsArrayAccessor
            { e; typ; index; witness = Features.On.nontrivial_lhs }
      | _ ->
          let e = Place.to_expr p |> dexpr in
          LhsArbitraryExpr { witness = Features.On.arbitrary_lhs; e }

    and translate_app (span : span) (otype : A.ty) (f : A.expr)
        (raw_args : A.expr list) (generic_args : B.generic_value list)
        (trait : (B.impl_expr * B.generic_value list) option) bounds_impls :
        B.expr =
      (* `otype` and `_otype` (below) are supposed to be the same
         type, but sometimes `_otype` is less precise (i.e. an associated
         type while a concrete type is available) *)
      let arg_types, _otype =
        UA.Expect.arrow f.typ
        |> Option.value_or_thunk ~default:(fun _ ->
               Error.assertion_failure span "expected an arrow type here")
      in
      (* each input of `f` is either:
         - of type `&mut _` and then the value fed to f should either be a place or a "pure" expression;
         - of another type, and then the value can be anything.
      *)
      let args : ((Place.t, A.expr) Either.t * bool) list =
        (match List.zip arg_types raw_args with
        | Ok inputs -> inputs
        | _ -> Error.assertion_failure span "application: bad arity")
        |> List.map ~f:(fun (typ, (arg : A.expr)) ->
               if UA.Expect.mut_ref typ |> Option.is_some then
                 (* the argument of the function is mutable *)
                 let v =
                   expect_mut_borrow_of_place_or_pure_expr arg
                   |> Option.value_or_thunk ~default:(fun _ ->
                          Error.raise { kind = ExpectedMutRef; span = arg.span })
                 in
                 (v, true)
               else (Either.second arg, false))
      in
      (* `mutargs`: all mutable borrows fed to `f` *)
      let mutargs : (Place.t, A.expr) Either.t list =
        args |> List.filter ~f:snd |> List.map ~f:fst
      in
      match mutargs with
      | [] ->
          (* there is no mutation, we can reconstruct the expression right away *)
          let f, typ = (dexpr f, dty span otype) in
          let args = List.map ~f:dexpr raw_args in
          B.
            {
              e = B.App { f; args; generic_args; trait; bounds_impls };
              typ;
              span;
            }
      | _ -> (
          (* TODO: when LHS are better (issue #222), compress `p1 = tmp1; ...; pN = tmpN` in `(p1...pN) = ...` *)
          (* we are generating:
             ```
             let (tmp1, …, tmpN, out) = ⟨f⟩(⟨…un-&mut args⟩);
             p1 = tmp1;
                 …
             pN = tmpN;
             out
             ```
          *)
          let ty_of_either : (Place.t, A.expr) Either.t -> A.ty = function
            | First p -> p.typ
            | Second e -> e.typ
          in
          let span_of_either : (Place.t, A.expr) Either.t -> span = function
            | First p -> p.span
            | Second e -> e.span
          in
          let b_ty_of_either : (Place.t, A.expr) Either.t -> B.ty = function
            | First p -> dty p.span p.typ
            | Second e -> dty e.span e.typ
          in

          let mutargs : ((local_ident * B.lhs) option * (B.ty * span)) list =
            let to_ident_lhs i = function
              | Either.First (place : Place.t) ->
                  let var =
                    Local_ident.
                      { id = mk_id Expr 0; name = "tmp" ^ Int.to_string i }
                  in
                  Some (var, place_to_lhs place)
              | _ -> None
            in
            let to_ty_span x =
              let span = span_of_either x in
              (dty span (ty_of_either x), span)
            in
            List.mapi ~f:(fun i -> to_ident_lhs i &&& to_ty_span) mutargs
          in

          let out_var = !out_var in
          let otype = dty f.span otype in
          let pat =
            let out =
              if UB.is_unit_typ otype then []
              else [ UB.make_var_pat out_var otype f.span ]
            in
            List.map
              ~f:(function
                | Some (var, _), (ty, span) -> UB.make_var_pat var ty span
                | None, (typ, span) -> UB.M.pat_PWild ~typ ~span)
              mutargs
            @ out
            |> UB.make_tuple_pat
          in
          let f_call =
            let f : B.expr =
              let typ =
                B.TArrow (List.map ~f:(fst >> b_ty_of_either) args, pat.typ)
              in
              B.{ span = f.span; typ; e = dexpr' f.span f.e }
            in
            let unmut_args =
              args
              |> List.map
                   ~f:
                     ( fst >> function
                       | Either.First p -> Place.to_expr p
                       | Either.Second e -> e )
              |> List.map ~f:dexpr
            in
            B.
              {
                e =
                  App
                    { f; args = unmut_args; generic_args; trait; bounds_impls };
                typ = pat.typ;
                span = pat.span;
              }
          in
          (* when lhs type accepts tuple (issue #222), assigns will be an option instead of a list *)
          let assigns =
            let flatten (o, meta) = Option.map o ~f:Fn.(id &&& const meta) in
            List.filter_map ~f:flatten mutargs
            |> List.map ~f:(fun ((var, lhs), (typ, span)) ->
                   let e = B.{ e = LocalVar var; span; typ } in
                   let witness = Features.On.mutable_variable in
                   B.{ e = Assign { lhs; e; witness }; span; typ = UB.unit_typ })
          in
          (* TODO: this should be greatly simplified when `lhs` type will accept tuples (issue #222) *)
          match assigns with
          | [ { e = Assign { lhs; witness; _ }; span; typ } ]
            when UB.is_unit_typ otype ->
              { e = Assign { lhs; e = f_call; witness }; span; typ }
              |> extract_vec_ref
          | _ ->
              let body =
                let init =
                  if UB.is_unit_typ otype then UB.unit_expr f.span
                  else B.{ typ = otype; span = f.span; e = LocalVar out_var }
                in
                assigns
                |> List.map ~f:extract_vec_ref
                |> List.fold_right ~init ~f:UB.make_seq
              in
              let r = UB.make_let pat f_call body in
              r)

    and extract_vec_ref : B.expr -> B.expr = function
      | { e = Assign { lhs = LhsVecRef { e = lhs; _ }; witness; e }; span; typ }
        ->
          {
            e =
              Assign
                {
                  lhs;
                  e = UB.call Alloc__slice__Impl__to_vec [ e ] e.span typ;
                  witness;
                };
            span;
            typ;
          }
      | e -> e

    and dexpr' (span : span) (e : A.expr') : B.expr' =
      match e with
      | [%inline_arms "dexpr'.*" - Borrow - App] -> auto
      | Borrow { kind; e; witness } ->
          Borrow
            {
              kind =
                (match kind with
                | Mut _ -> Error.raise { kind = UnallowedMutRef; span }
                | Shared -> B.Shared
                | Unique -> B.Unique);
              e = dexpr e;
              witness;
            }
      | App _ ->
          Error.assertion_failure span
            "should have been handled by dexpr_unwrapped"

    and dexpr_unwrapped (expr : A.expr) : B.expr =
      let span = expr.span in
      match expr.e with
      | App { f; args; generic_args; trait; bounds_impls } ->
          let dgeneric_args = List.map ~f:(dgeneric_value span) in
          let generic_args = dgeneric_args generic_args in
          let trait = Option.map ~f:(dimpl_expr span *** dgeneric_args) trait in
          let bounds_impls = List.map ~f:(dimpl_expr span) bounds_impls in
          translate_app span expr.typ f args generic_args trait bounds_impls
      | _ ->
          let e = dexpr' span expr.e in
          B.{ e; typ = dty expr.span expr.typ; span = expr.span }
    [@@inline_ands bindings_of dexpr]

    [%%inline_defs
    dgeneric_param + dgeneric_constraint + dgenerics + dparam + dvariant
    + dtrait_item' + dimpl_item']

    let rec ditem' span (item : A.item') : B.item' =
      let vars = UA.Reducers.collect_local_idents#visit_item' () item in
      out_var := UA.fresh_local_ident_in (Set.to_list vars) "out";
      [%inline_body ditem'] span item
    [@@inline_ands "Item.*"]
  end

  include Implem
  module FA = FA
end
[@@add "subtype.ml"]


================================================
FILE: engine/lib/phases/phase_direct_and_mut.mli
================================================
open! Prelude

module Make
    (F :
      Features.T
        with type raw_pointer = Features.Off.raw_pointer
         and type mutable_pointer = Features.Off.mutable_pointer) : sig
  include module type of struct
    module FB = struct
      include F
      include Features.On.Mutable_variable
      include Features.On.Arbitrary_lhs
      include Features.On.Nontrivial_lhs
      include Features.Off.Mutable_reference
    end

    module A = Ast.Make (F)
    module B = Ast.Make (FB)
    module ImplemT = Phase_utils.MakePhaseImplemT (A) (B)
    module FA = F
  end

  include ImplemT.T
end


================================================
FILE: engine/lib/phases/phase_drop_blocks.ml
================================================
open! Prelude

module%inlined_contents Make (F : Features.T) = struct
  open Ast
  module FA = F

  module FB = struct
    include F
    include Features.Off.Block
  end

  include
    Phase_utils.MakeBase (F) (FB)
      (struct
        let phase_id = [%auto_phase_name auto]
      end)

  module UA = Ast_utils.Make (F)

  module Implem : ImplemT.T = struct
    let metadata = metadata

    module S = struct
      include Features.SUBTYPE.Id
    end

    [%%inline_defs dmutability + dsafety_kind]

    let rec dexpr' (span : span) (e : A.expr') : B.expr' =
      match (UA.unbox_underef_expr { e; span; typ = UA.never_typ }).e with
      | [%inline_arms "dexpr'.*" - Block] -> auto
      | Block { e; _ } -> (dexpr e).e
    [@@inline_ands bindings_of dexpr - dexpr']

    [%%inline_defs "Item.*"]
  end

  include Implem
end
[@@add "subtype.ml"]


================================================
FILE: engine/lib/phases/phase_drop_blocks.mli
================================================
open! Prelude

(** Only use this phase if you are also rejecting [unsafe] *)
module Make (F : Features.T) : sig
  include module type of struct
    module FA = F

    module FB = struct
      include F
      include Features.Off.Block
    end

    module A = Ast.Make (F)
    module B = Ast.Make (FB)
    module ImplemT = Phase_utils.MakePhaseImplemT (A) (B)
  end

  include ImplemT.T
end


================================================
FILE: engine/lib/phases/phase_drop_match_guards.ml
================================================
(* This phase removes guards from pattern matchings. It rewrites
   them using only pattern matchings without guards.
   See #806 and the example in tests/guards. *)

(* Rewrite example: *)
(*
    match x {
        None => 0,
        Some(v) if let Ok(y) = v => y,
        Some(Err(y)) => y,
        _ => 1,
    }
*)
(* Becomes *)
(*
    match x {
        None => 0,
        _ => match match x {
            Some(v) => match v {
                Ok(y) => Some(y),
                _ => None,
            },
            _ => None,
        } {
            Some(y) => y,
            None => match x {
                Some(Err(y)) => y,
                _ => 1,
            },
        },
    }
*)

open! Prelude

module%inlined_contents Make (F : Features.T) = struct
  open Ast
  module FA = F

  module FB = struct
    include F
    include Features.Off.Match_guard
  end

  include
    Phase_utils.MakeBase (F) (FB)
      (struct
        let phase_id = [%auto_phase_name auto]
      end)

  module UA = Ast_utils.Make (F)
  module UB = Ast_utils.Make (FB)

  module Implem : ImplemT.T = struct
    let metadata = metadata

    module S = struct
      include Features.SUBTYPE.Id
    end

    [%%inline_defs dmutability + dsafety_kind]

    let maybe_simplified_match scrutinee ?(original_arms : A.arm list = [])
        (arms : B.arm list) : B.expr' =
      match (original_arms, arms) with
      (* If the one wildcard branch was not produced by this phase, keep it *)
      | ( [ { arm = { arm_pat = { p = PWild; _ }; _ }; _ } ],
          [ { arm = { arm_pat = { p = PWild; _ }; _ }; _ } ] ) ->
          Match { scrutinee; arms }
      (* If there is only one wildcard branch we can simplify *)
      | _, [ { arm = { body; arm_pat = { p = PWild; _ }; _ }; _ } ] -> body.e
      (* General case *)
      | _ -> Match { scrutinee; arms }

    let rec dexpr' (span : span) (expr : A.expr') : B.expr' =
      match expr with
      | [%inline_arms "dexpr'.*" - Match] -> auto
      | Match { scrutinee; arms } ->
          let new_arms = transform_arms (dexpr scrutinee) (List.rev arms) [] in
          maybe_simplified_match ~original_arms:arms (dexpr scrutinee) new_arms

    and transform_arms (scrutinee : B.expr) (remaining : A.arm list)
        (treated : B.arm list) : B.arm list =
      match remaining with
      | [] -> treated
      | { arm = { arm_pat; body; guard = None }; span } :: remaining ->
          let new_arm : B.arm = UB.M.arm (dpat arm_pat) (dexpr body) ~span in
          transform_arms scrutinee remaining (new_arm :: treated)
      (* Matches an arm `arm_pat if let lhs = rhs => body` *)
      (* And rewrites to `_ => match  {Some(x) => x, None => match scrutinee {} }` *)
      (* where `option_match` is `match scrutinee {arm_pat => , _ => None }` *)
      (* and `match_guard` is `match rhs {lhs  => Some(body), _ => None}` *)
      (* and `treated` is the other arms coming after this one (that have already been treated as the arms are reversed ) *)
      | {
          arm =
            {
              arm_pat;
              body;
              guard = Some { guard = IfLet { lhs; rhs; _ }; span = guard_span };
            };
          span;
        }
        :: remaining ->
          let module MS = (val UB.M.make guard_span) in
          let result_typ = dty span body.typ in
          let opt_result_typ : B.ty =
            TApp
              {
                ident = Global_ident.of_name ~value:false Core__option__Option;
                args = [ GType result_typ ];
              }
          in
          let mk_opt_expr (value : B.expr option) : B.expr =
            let (name : Concrete_ident.name), args =
              match value with
              | Some v -> (Core__option__Option__Some, [ v ])
              | None -> (Core__option__Option__None, [])
            in
            UB.call_Constructor name false args guard_span opt_result_typ
          in

          let mk_opt_pattern (binding : B.pat option) : B.pat =
            let (name : Concrete_ident.name), (fields : B.field_pat list) =
              match binding with
              | Some b ->
                  ( Core__option__Option__Some,
                    [ { field = `TupleField (0, 1); pat = b } ] )
              | None -> (Core__option__Option__None, [])
            in
            MS.pat_PConstruct
              ~constructor:(Global_ident.of_name ~value:true name)
              ~fields ~is_record:false ~is_struct:false ~typ:opt_result_typ
          in

          let expr_none = mk_opt_expr None in

          (* This is the nested pattern matching equivalent to the guard *)
          (* Example: .. if let pat = rhs => body *)
          (* Rewrites with match rhs { pat => Some(body), _ => None }*)
          let guard_match : B.expr =
            MS.expr_Match ~scrutinee:(dexpr rhs)
              ~arms:
                [
                  UB.M.arm (dpat lhs) (mk_opt_expr (Some (dexpr body))) ~span;
                  MS.arm (MS.pat_PWild ~typ:(dty guard_span lhs.typ)) expr_none;
                ]
              ~typ:opt_result_typ
          in

          (* `r` corresponds to `option_match` in the example above *)
          let r : B.expr =
            MS.expr_Match ~scrutinee
              ~arms:
                [
                  MS.arm (dpat arm_pat) guard_match;
                  MS.arm
                    (UB.M.pat_PWild
                       ~typ:(dty guard_span arm_pat.typ)
                       ~span:guard_span)
                    expr_none;
                ]
              ~typ:opt_result_typ
          in
          let id = UB.fresh_local_ident_in [] "x" in
          let new_body : B.expr =
            MS.expr_Match ~scrutinee:r
              ~arms:
                [
                  MS.arm
                    (mk_opt_pattern
                       (Some
                          (MS.pat_PBinding ~mut:Immutable ~mode:ByValue ~var:id
                             ~typ:result_typ ~subpat:None)))
                    { e = LocalVar id; span; typ = result_typ };
                  MS.arm (mk_opt_pattern None)
                    {
                      e = maybe_simplified_match scrutinee treated;
                      span = guard_span;
                      typ = result_typ;
                    };
                ]
              ~typ:result_typ
          in
          let new_arm : B.arm =
            UB.M.arm
              (UB.M.pat_PWild ~typ:(dty span arm_pat.typ) ~span)
              new_body ~span
          in
          transform_arms scrutinee remaining [ new_arm ]
    [@@inline_ands bindings_of dexpr - dexpr' - darm - darm' - dguard - dguard']

    [%%inline_defs "Item.*"]
  end

  include Implem
end
[@@add "subtype.ml"]


================================================
FILE: engine/lib/phases/phase_drop_match_guards.mli
================================================
open! Prelude

module Make (F : Features.T) : sig
  include module type of struct
    module FA = F

    module FB = struct
      include F
      include Features.Off.Match_guard
    end

    module A = Ast.Make (F)
    module B = Ast.Make (FB)
    module ImplemT = Phase_utils.MakePhaseImplemT (A) (B)
  end

  include ImplemT.T
end


================================================
FILE: engine/lib/phases/phase_drop_references.ml
================================================
open! Prelude

module%inlined_contents Make
    (F :
      Features.T
        with type raw_pointer = Features.Off.raw_pointer
         and type mutable_reference = Features.Off.mutable_reference) =
struct
  open Ast
  module FA = F

  module FB = struct
    include F
    include Features.Off.Mutable_pointer
    include Features.Off.Lifetime
    include Features.Off.Reference
  end

  include
    Phase_utils.MakeBase (F) (FB)
      (struct
        let phase_id = [%auto_phase_name auto]
      end)

  module UA = Ast_utils.Make (F)

  module Implem : ImplemT.T = struct
    let metadata = metadata

    module S = struct
      include Features.SUBTYPE.Id
    end

    [%%inline_defs dsafety_kind]

    let rec dty (span : span) (t : A.ty) : B.ty =
      match t with
      | [%inline_arms "dty.*" - TApp - TRef] -> auto
      | TApp { ident; args = [ GType boxed_ty; _ ] }
        when Global_ident.eq_name Alloc__boxed__Box ident ->
          dty span boxed_ty
      | TApp { ident; args } ->
          TApp { ident; args = List.filter_map ~f:(dgeneric_value span) args }
      | TRef { typ; mut = Immutable; _ } -> dty span typ
      | TRef _ -> .

    and dgeneric_value (span : span) (g : A.generic_value) :
        B.generic_value option =
      match g with
      | GLifetime _ -> None
      | [%inline_arms "dgeneric_value.*" - GLifetime] ->
          map (Option.some : B.generic_value -> _)

    and dtrait_goal (span : span) (r : A.trait_goal) : B.trait_goal =
      {
        trait = r.trait;
        args = List.filter_map ~f:(dgeneric_value span) r.args;
      }

    and ddyn_trait_goal (span : span) (r : A.dyn_trait_goal) : B.dyn_trait_goal
        =
      {
        trait = r.trait;
        non_self_args = List.filter_map ~f:(dgeneric_value span) r.non_self_args;
      }

    and dpat' (span : span) (p : A.pat') : B.pat' =
      match p with
      | [%inline_arms "dpat'.*" - PBinding - PDeref] -> auto
      | PBinding { mut; var : Local_ident.t; typ; subpat; _ } ->
          PBinding
            {
              mut;
              mode = ByValue;
              var;
              typ = dty span typ;
              subpat =
                Option.map ~f:(fun (p, as_pat) -> (dpat p, as_pat)) subpat;
            }
      | PDeref { subpat; _ } -> (dpat subpat).p

    and dexpr' (span : span) (e : A.expr') : B.expr' =
      match (UA.unbox_underef_expr { e; span; typ = UA.never_typ }).e with
      | [%inline_arms
          If + Literal + Array + Block + QuestionMark + "dexpr'.Quote"] ->
          auto
      | Construct { constructor; is_record; is_struct; fields; base } ->
          Construct
            {
              constructor;
              is_record;
              is_struct;
              fields = List.map ~f:(fun (i, e) -> (i, dexpr e)) fields;
              base = Option.map ~f:(dexpr *** S.construct_base span) base;
            }
      | Match { scrutinee; arms } ->
          Match { scrutinee = dexpr scrutinee; arms = List.map ~f:darm arms }
      | Let { monadic; lhs; rhs; body } ->
          Let
            {
              monadic = Option.map ~f:(dsupported_monads span *** Fn.id) monadic;
              lhs = dpat lhs;
              rhs = dexpr rhs;
              body = dexpr body;
            }
      | LocalVar local_ident -> LocalVar local_ident
      | GlobalVar global_ident -> GlobalVar global_ident
      | Ascription { e = e'; typ } ->
          Ascription { e = dexpr e'; typ = dty span typ }
      | MacroInvokation { macro; args; witness } ->
          MacroInvokation { macro; args; witness }
      | Assign { lhs; e; witness } ->
          Assign { lhs = dlhs span lhs; e = dexpr e; witness }
      | [%inline_arms Loop + Continue + Break] ->
          auto (* TODO: inline more arms! *)
      | Return { e; witness } -> Return { e = dexpr e; witness }
      | Borrow { e; _ } -> (dexpr e).e
      | EffectAction { action; argument } ->
          EffectAction { action; argument = dexpr argument }
      | Closure { params; body; captures } ->
          Closure
            {
              params = List.map ~f:dpat params;
              body = dexpr body;
              captures = List.map ~f:dexpr captures;
            }
      | App { f; args; generic_args; trait; bounds_impls } ->
          let f = dexpr f in
          let args = List.map ~f:dexpr args in
          let dgeneric_args = List.filter_map ~f:(dgeneric_value span) in
          let trait = Option.map ~f:(dimpl_expr span *** dgeneric_args) trait in
          let generic_args = dgeneric_args generic_args in
          let bounds_impls = List.map ~f:(dimpl_expr span) bounds_impls in
          App { f; args; generic_args; trait; bounds_impls }
      | _ -> .
    [@@inline_ands bindings_of dexpr - dbinding_mode]

    let dgeneric_param (_span : span)
        ({ ident; kind; attrs; span } : A.generic_param) :
        B.generic_param option =
      let ( let* ) x f = Option.bind ~f x in
      let* kind =
        match kind with
        | GPLifetime _ -> None
        | GPType -> Some B.GPType
        | GPConst { typ } -> Some (B.GPConst { typ = dty span typ })
      in
      Some B.{ ident; kind; attrs; span }

    and dprojection_predicate (span : span) (r : A.projection_predicate) :
        B.projection_predicate =
      {
        impl = dimpl_expr span r.impl;
        assoc_item = r.assoc_item;
        typ = dty span r.typ;
      }

    let dgeneric_constraint (span : span) (p : A.generic_constraint) :
        B.generic_constraint option =
      match p with
      | GCLifetime _ -> None
      | GCType idents -> Some (B.GCType (dimpl_ident span idents))
      | GCProjection projection ->
          Some (B.GCProjection (dprojection_predicate span projection))

    let dgenerics (span : span) (g : A.generics) : B.generics =
      {
        params = List.filter_map ~f:(dgeneric_param span) g.params;
        constraints =
          List.filter_map ~f:(dgeneric_constraint span) g.constraints;
      }

    [%%inline_defs dparam + dvariant + dtrait_item + dimpl_item]

    let rec ditem = [%inline_body ditem]
    and ditem_unwrapped = [%inline_body ditem_unwrapped]

    and ditem' (span : span) (item : A.item') : B.item' =
      match item with
      | [%inline_arms "ditem'.*" - Impl] -> auto
      | Impl
          {
            generics;
            self_ty;
            of_trait = of_trait_id, of_trait_generics;
            items;
            parent_bounds;
            safety;
          } ->
          B.Impl
            {
              generics = dgenerics span generics;
              self_ty = dty span self_ty;
              of_trait =
                ( of_trait_id,
                  List.filter_map ~f:(dgeneric_value span) of_trait_generics );
              items = List.map ~f:dimpl_item items;
              parent_bounds =
                List.map ~f:(dimpl_expr span *** dimpl_ident span) parent_bounds;
              safety = dsafety_kind span safety;
            }

    [%%inline_defs ditems]
  end

  include Implem
end
[@@add "subtype.ml"]


================================================
FILE: engine/lib/phases/phase_drop_references.mli
================================================
open! Prelude

module Make
    (F :
      Features.T
        with type raw_pointer = Features.Off.raw_pointer
         and type mutable_reference = Features.Off.mutable_reference) : sig
  include module type of struct
    module FA = F

    module FB = struct
      include F
      include Features.Off.Mutable_pointer
      include Features.Off.Lifetime
      include Features.Off.Reference
    end

    module A = Ast.Make (F)
    module B = Ast.Make (FB)
    module ImplemT = Phase_utils.MakePhaseImplemT (A) (B)
  end

  include ImplemT.T
end


================================================
FILE: engine/lib/phases/phase_drop_return_break_continue.ml
================================================
(** This phase removes `return`s in exit position. Inside loops, it replaces
    `return`, `break` and `continue` (in exit position) by their encoding in the
    `ControlFlow` enum. It replaces another expression in exit position by an
    equivalent `continue`. This phase should comae after `RewriteControlFlow` to
    ensure all control flow is in exit position. *)

open! Prelude

module%inlined_contents Make (F : Features.T) = struct
  open Ast
  module FA = F

  module FB = struct
    include F
    include Features.On.Fold_like_loop
    include Features.Off.Early_exit
    include Features.Off.Break
    include Features.Off.Continue
  end

  include
    Phase_utils.MakeBase (F) (FB)
      (struct
        let phase_id = [%auto_phase_name auto]
      end)

  module Implem : ImplemT.T = struct
    let metadata = metadata

    module UA = Ast_utils.Make (F)
    module UB = Ast_utils.Make (FB)

    module S = struct
      include Features.SUBTYPE.Id
    end

    (* break_type is "by default" unit since there always is a (possibly implicit) break type *)
    type loop_info = { return_type : A.ty option; break_type : A.ty option }

    let has_return =
      let module Visitors = Ast_visitors.Make (F) in
      object (self)
        inherit [_] Visitors.reduce as super
        method zero = { return_type = None; break_type = None }

        method plus li1 li2 =
          {
            return_type = Option.first_some li1.return_type li2.return_type;
            break_type = Option.first_some li1.break_type li2.break_type;
          }

        method! visit_expr' () e =
          match e with
          | Return { e; _ } -> { return_type = Some e.typ; break_type = None }
          | Break { e; _ } -> { return_type = None; break_type = Some e.typ }
          (* We should avoid catching breaks of a nested
             loops as they could have different types. *)
          | Loop { body; _ } ->
              {
                return_type = (self#visit_expr () body).return_type;
                break_type = None;
              }
          | _ -> super#visit_expr' () e
      end

    let visitor =
      let module Visitors = Ast_visitors.Make (F) in
      object (self)
        inherit [_] Visitors.map as _super

        method! visit_expr (in_loop : (loop_info * A.ty) option) e =
          let span = e.span in
          match (e.e, in_loop) with
          | Return { e; _ }, None -> e
          (* we know [e] is on an exit position: the return is
             thus useless, we can skip it *)
          | Let { monadic = None; lhs; rhs; body }, _ ->
              let body = self#visit_expr in_loop body in
              {
                e with
                e = Let { monadic = None; lhs; rhs; body };
                typ = body.typ;
              }
              (* If a let expression is an exit node, then it's body
                 is as well *)
          | Match { scrutinee; arms }, _ ->
              let arms = List.map ~f:(self#visit_arm in_loop) arms in
              let typ =
                match arms with { arm; _ } :: _ -> arm.body.typ | [] -> e.typ
              in
              { e with e = Match { scrutinee; arms }; typ }
          | If { cond; then_; else_ }, _ ->
              let then_ = self#visit_expr in_loop then_ in
              let else_ = Option.map ~f:(self#visit_expr in_loop) else_ in
              { e with e = If { cond; then_; else_ }; typ = then_.typ }
          | Return { e; _ }, Some ({ return_type; break_type }, acc_type) ->
              UA.M.expr_Constructor_CF ~return_type ~span ~break_type ~e
                ~acc:{ e with typ = acc_type } `Return
          | ( Break { e; acc = Some (acc, _); _ },
              Some ({ return_type; break_type }, _) ) ->
              UA.M.expr_Constructor_CF ~return_type ~span ~break_type ~e ~acc
                `Break
          | ( Continue { acc = Some (acc, _); _ },
              Some ({ return_type = None; break_type = None }, _) ) ->
              acc
          | ( Continue { acc = Some (acc, _); _ },
              Some ({ return_type; break_type }, _) ) ->
              UA.M.expr_Constructor_CF ~return_type ~span ~break_type ~acc
                `Continue
          | _, Some ({ return_type; break_type }, _)
            when Option.is_some return_type || Option.is_some break_type ->
              UA.M.expr_Constructor_CF ~return_type ~span ~break_type ~acc:e
                `Continue
          | _ -> e
        (** The invariant here is that [visit_expr] is called only on
            expressions that are on exit positions. [visit_expr] is first called
            on root expressions, which are (by definition) exit nodes. Then,
            [visit_expr] itself makes recursive calls to sub expressions that
            are themselves in exit nodes. **)
      end

    let closure_visitor =
      let module Visitors = Ast_visitors.Make (F) in
      object
        inherit [_] Visitors.map as super

        method! visit_expr' () e =
          match e with
          | Closure ({ body; _ } as closure) ->
              Closure { closure with body = visitor#visit_expr None body }
          | _ -> super#visit_expr' () e
      end

    [%%inline_defs dmutability + dsafety_kind]

    let rec dexpr' (span : span) (expr : A.expr') : B.expr' =
      match expr with
      | [%inline_arms "dexpr'.*" - Return - Break - Continue - Loop] -> auto
      | Return _ | Break _ | Continue _ ->
          Error.assertion_failure span
            "Return/Break/Continue are expected to be gone as this point"
      | Loop { body; kind; state; label; witness; _ } ->
          let control_flow_type = has_return#visit_expr () body in
          let control_flow =
            match control_flow_type with
            | { return_type = Some _; _ } ->
                Some (B.BreakOrReturn, Features.On.fold_like_loop)
            | { break_type = Some _; _ } ->
                Some (BreakOnly, Features.On.fold_like_loop)
            | _ -> None
          in
          let acc_type =
            match body.typ with
            | TApp { ident; args = [ GType _; GType continue_type ] }
              when Ast.Global_ident.equal ident
                     (Ast.Global_ident.of_name ~value:false
                        Core__ops__control_flow__ControlFlow) ->
                continue_type
            | _ -> body.typ
          in
          let body =
            visitor#visit_expr (Some (control_flow_type, acc_type)) body
            |> dexpr
          in
          let kind = dloop_kind span kind in
          let state = Option.map ~f:(dloop_state span) state in
          Loop { body; control_flow; kind; state; label; witness }
    [@@inline_ands bindings_of dexpr - dexpr']

    [%%inline_defs "Item.*" - ditems]

    let ditems (items : A.item list) : B.item list =
      List.concat_map items
        ~f:(visitor#visit_item None >> closure_visitor#visit_item () >> ditem)
  end

  include Implem
end
[@@add "subtype.ml"]


================================================
FILE: engine/lib/phases/phase_drop_return_break_continue.mli
================================================
(** This phase transforms `return e` expressions into `e` when `return e` is on
    an exit position. It should come after phase `RewriteControlFlow` and thus
    eliminate all `return`s. Inside loops it rewrites `return`, `break` and
    `continue` as their equivalent in terms of the `ControlFlow` wrapper that
    will be handled by the specific fold operators introduced by phase
    `FunctionalizeLoops`. *)

module Make (F : Features.T) : sig
  include module type of struct
    module FA = F

    module FB = struct
      include F
      include Features.On.Fold_like_loop
      include Features.Off.Early_exit
      include Features.Off.Break
      include Features.Off.Continue
    end

    module A = Ast.Make (F)
    module B = Ast.Make (FB)
    module ImplemT = Phase_utils.MakePhaseImplemT (A) (B)
  end

  include ImplemT.T
end


================================================
FILE: engine/lib/phases/phase_drop_sized_trait.ml
================================================
open! Prelude

module Make (F : Features.T) =
  Phase_utils.MakeMonomorphicPhase
    (F)
    (struct
      let phase_id = [%auto_phase_name auto]

      open Ast.Make (F)
      module U = Ast_utils.Make (F)
      module Visitors = Ast_visitors.Make (F)

      module Error = Phase_utils.MakeError (struct
        let ctx = Diagnostics.Context.Phase phase_id
      end)

      let ident_is_sized : Ast.concrete_ident -> bool =
        Concrete_ident.eq_name Core__marker__Sized

      let visitor =
        let keep (ii : impl_ident) = ident_is_sized ii.goal.trait |> not in
        object
          inherit [_] Visitors.map as super

          method! visit_generics () generics =
            let generics = super#visit_generics () generics in
            {
              generics with
              constraints =
                List.filter
                  ~f:(function GCType ii -> keep ii | _ -> true)
                  generics.constraints;
            }

          method! visit_item' () item' =
            let item' = super#visit_item' () item' in
            match item' with
            | Impl payload ->
                Impl
                  {
                    payload with
                    parent_bounds =
                      List.filter ~f:(snd >> keep) payload.parent_bounds;
                  }
            | _ -> item'

          method! visit_trait_item' () ti' =
            let ti' = super#visit_trait_item' () ti' in
            match ti' with
            | TIType impl_idents -> TIType (List.filter ~f:keep impl_idents)
            | _ -> ti'

          method! visit_impl_item' () ii' =
            let ii' = super#visit_impl_item' () ii' in
            match ii' with
            | IIType payload ->
                IIType
                  {
                    payload with
                    parent_bounds =
                      List.filter ~f:(snd >> keep) payload.parent_bounds;
                  }
            | _ -> ii'
        end

      let ditems =
        List.filter ~f:(fun item ->
            match item.v with
            (* Drop any implementation of the `Sized` trait. *)
            | Impl { of_trait = tr, _; _ } when ident_is_sized tr -> false
            | _ -> true)
        >> List.map ~f:(visitor#visit_item ())
    end)


================================================
FILE: engine/lib/phases/phase_drop_sized_trait.mli
================================================
(** This phase remove any occurence to the `core::marker::sized` trait. This
    trait appears a lot, but is generally not very useful in our backends. *)

module Make : Phase_utils.UNCONSTRAINTED_MONOMORPHIC_PHASE


================================================
FILE: engine/lib/phases/phase_explicit_conversions.ml
================================================
open! Prelude

module Make (F : Features.T) =
  Phase_utils.MakeMonomorphicPhase
    (F)
    (struct
      let phase_id = [%auto_phase_name auto]

      module A = Ast.Make (F)
      module UA = Ast_utils.Make (F)
      module Visitors = Ast_visitors.Make (F)

      module Error = Phase_utils.MakeError (struct
        let ctx = Diagnostics.Context.Phase phase_id
      end)

      module Attrs = Attr_payloads.MakeBase (Error)

      let explicit_conversions =
        object
          inherit [_] Visitors.map as super

          method! visit_expr () e =
            match super#visit_expr () e with
            | {
             e =
               App
                 {
                   f = { e = GlobalVar f; _ };
                   args = [ ({ typ = TApp { ident; _ }; _ } as inner) ];
                   _;
                 };
             typ = TSlice _ as t;
             span;
            }
              when Ast.Global_ident.eq_name Core__ops__deref__Deref__deref f
                   && Ast.Global_ident.eq_name Alloc__vec__Vec ident ->
                UA.call Alloc__vec__Impl_1__as_slice [ inner ] span t
            | e -> e

          (* Option.value ~default:e 
            (
              let* _ = e.typ  in
              let* e = UA.Expect.concrete_app1 Core__ops__deref__Deref__deref e in 
              let e = UA.call Alloc__vec__Impl_1__as_slice [e] e.span e.typ in
              Some e
            ) *)
          (* match e with
            | {
             e = Borrow { e = { typ = TApp { ident; _ }; _ } as inner; _ };
             typ = TSlice _;
             _;
            }
              when Ast.Global_ident.eq_name Alloc__vec__Vec ident ->
                inner
            | _ -> super#visit_expr () e *)
        end

      let ditems = List.map ~f:(explicit_conversions#visit_item ())
    end)


================================================
FILE: engine/lib/phases/phase_explicit_conversions.mli
================================================
(** This phase adds explicit conversions from Vec to slice, instead of
    conversions by taking references, which are erased by the phase
    DropReferences. *)

module Make : Phase_utils.UNCONSTRAINTED_MONOMORPHIC_PHASE


================================================
FILE: engine/lib/phases/phase_functionalize_loops.ml
================================================
open! Prelude

module%inlined_contents Make
    (F :
      Features.T
        with type continue = Features.Off.continue
         and type early_exit = Features.Off.early_exit
         and type break = Features.Off.break) =
struct
  open Ast
  module FA = F

  module FB = struct
    include F
    include Features.Off.Loop
    include Features.Off.For_loop
    include Features.Off.While_loop
    include Features.Off.For_index_loop
    include Features.Off.State_passing_loop
    include Features.Off.Fold_like_loop
    include Features.Off.Continue
    include Features.Off.Early_exit
    include Features.Off.Break
  end

  include
    Phase_utils.MakeBase (F) (FB)
      (struct
        let phase_id = [%auto_phase_name auto]
      end)

  module Implem : ImplemT.T = struct
    let metadata = metadata

    module UA = Ast_utils.Make (F)
    module UB = Ast_utils.Make (FB)
    module Visitors = Ast_visitors.Make (F)

    module S = struct
      include Features.SUBTYPE.Id
    end

    type loop_annotation_kind =
      | LoopInvariant of { index_pat : B.pat option; invariant : B.expr }
      | LoopVariant of B.expr

    type loop_annotation = {
      body : B.expr;
      annotation : loop_annotation_kind option;
    }

    let extract_loop_annotation (body : B.expr) : loop_annotation =
      let rhs_body =
        let* (e_let : UB.D.expr_Let) = UB.D.expr_Let body in
        let*? _ = Option.is_none e_let.monadic in
        let* _ = UB.D.pat_PWild e_let.lhs in
        let* app = UB.D.expr_App e_let.rhs in
        let* f = UB.D.expr_GlobalVar app.f in
        Some (f, app.args, e_let.body)
      in
      match rhs_body with
      | Some
          ( f,
            [ { e = Closure { params = [ pat ]; body = invariant; _ }; _ } ],
            body )
        when Global_ident.eq_name Hax_lib___internal_loop_invariant f ->
          {
            body;
            annotation =
              Some (LoopInvariant { index_pat = Some pat; invariant });
          }
      | Some (f, [ invariant ], body)
        when Global_ident.eq_name Hax_lib___internal_while_loop_invariant f ->
          {
            body;
            annotation = Some (LoopInvariant { index_pat = None; invariant });
          }
      | Some (f, [ invariant ], body)
        when Global_ident.eq_name Hax_lib___internal_loop_decreases f ->
          { body; annotation = Some (LoopVariant invariant) }
      | _ -> { body; annotation = None }

    let expect_invariant_variant (annotation1 : loop_annotation_kind option)
        (annotation2 : loop_annotation_kind option) :
        loop_annotation_kind option * loop_annotation_kind option =
      match annotation1 with
      | Some (LoopVariant _) -> (annotation2, annotation1)
      | _ -> (annotation1, annotation2)

    type iterator =
      | Range of { start : B.expr; end_ : B.expr }
      | Slice of B.expr
      | ChunksExact of { size : B.expr; slice : B.expr }
      | Enumerate of iterator
      | StepBy of { n : B.expr; it : iterator }
    [@@deriving show]

    let rec as_iterator (e : B.expr) : iterator option =
      match e.e with
      | Construct
          {
            constructor = `Concrete range_ctor;
            is_record = true;
            is_struct = true;
            fields =
              [ (`Concrete start_field, start); (`Concrete end_field, end_) ];
            base = None;
          }
        when Concrete_ident.eq_name Core__ops__range__Range__start start_field
             && Concrete_ident.eq_name Core__ops__range__Range range_ctor
             && Concrete_ident.eq_name Core__ops__range__Range__end end_field ->
          Some (Range { start; end_ })
      | _ -> meth_as_iterator e

    and meth_as_iterator (e : B.expr) : iterator option =
      let* f, args =
        match e.e with
        | App { f = { e = GlobalVar f; _ }; args; _ } -> Some (f, args)
        | _ -> None
      in
      let f_eq n = Global_ident.eq_name n f in
      let one_arg () = match args with [ x ] -> Some x | _ -> None in
      let two_args () = match args with [ x; y ] -> Some (x, y) | _ -> None in
      if f_eq Core__iter__traits__iterator__Iterator__step_by then
        let* it, n = two_args () in
        let* it = as_iterator it in
        Some (StepBy { n; it })
      else if
        f_eq Core__iter__traits__collect__IntoIterator__into_iter
        || f_eq Core__slice__Impl__iter
      then
        let* iterable = one_arg () in
        match iterable.typ with
        | TSlice _ | TArray _ -> Some (Slice iterable)
        | _ -> as_iterator iterable
      else if f_eq Core__iter__traits__iterator__Iterator__enumerate then
        let* iterable = one_arg () in
        let* iterator = as_iterator iterable in
        Some (Enumerate iterator)
      else if f_eq Core__slice__Impl__chunks_exact then
        let* slice, size = two_args () in
        Some (ChunksExact { size; slice })
      else None

    let fn_args_of_iterator (cf : A.cf_kind option) (it : iterator) :
        (Concrete_ident.name * B.expr list * B.ty) option =
      let open Concrete_ident_generated in
      let usize = B.TInt { size = SSize; signedness = Unsigned } in
      match it with
      | Enumerate (ChunksExact { size; slice }) ->
          let fold_op =
            match cf with
            | Some BreakOrReturn ->
                Rust_primitives__hax__folds__fold_enumerated_chunked_slice_return
            | Some BreakOnly ->
                Rust_primitives__hax__folds__fold_enumerated_chunked_slice_cf
            | None -> Rust_primitives__hax__folds__fold_enumerated_chunked_slice
          in
          Some (fold_op, [ size; slice ], usize)
      | ChunksExact { size; slice } ->
          let fold_op =
            match cf with
            | Some BreakOrReturn ->
                Rust_primitives__hax__folds__fold_chunked_slice_return
            | Some BreakOnly ->
                Rust_primitives__hax__folds__fold_chunked_slice_cf
            | None -> Rust_primitives__hax__folds__fold_chunked_slice
          in
          Some (fold_op, [ size; slice ], usize)
      | Enumerate (Slice slice) ->
          let fold_op =
            match cf with
            | Some BreakOrReturn ->
                Rust_primitives__hax__folds__fold_enumerated_slice_return
            | Some BreakOnly ->
                Rust_primitives__hax__folds__fold_enumerated_slice_cf
            | None -> Rust_primitives__hax__folds__fold_enumerated_slice
          in
          Some (fold_op, [ slice ], usize)
      | StepBy { n; it = Range { start; end_ } } ->
          let fold_op =
            match cf with
            | Some BreakOrReturn ->
                Rust_primitives__hax__folds__fold_range_step_by_return
            | Some BreakOnly ->
                Rust_primitives__hax__folds__fold_range_step_by_cf
            | None -> Rust_primitives__hax__folds__fold_range_step_by
          in
          Some (fold_op, [ start; end_; n ], start.typ)
      | Range { start; end_ } ->
          let fold_op =
            match cf with
            | Some BreakOrReturn ->
                Rust_primitives__hax__folds__fold_range_return
            | Some BreakOnly -> Rust_primitives__hax__folds__fold_range_cf
            | None -> Rust_primitives__hax__folds__fold_range
          in
          Some (fold_op, [ start; end_ ], start.typ)
      | _ -> None

    [%%inline_defs dmutability + dsafety_kind]

    let rec dexpr_unwrapped (expr : A.expr) : B.expr =
      let span = expr.span in
      let module M = UB.M in
      let module MS = (val M.make span) in
      match expr.e with
      | Loop { body; kind = ForLoop { it; pat; _ }; state; control_flow; _ } ->
          let bpat, init =
            match state with
            | Some { bpat; init; _ } -> (dpat bpat, dexpr init)
            | None ->
                let unit = UB.unit_expr span in
                (M.pat_PWild ~span ~typ:unit.typ, unit)
          in
          let body = dexpr body in
          let { body; annotation } = extract_loop_annotation body in
          let it = dexpr it in
          let pat = dpat pat in
          let fn : B.expr = UB.make_closure [ bpat; pat ] body body.span in
          let cf = Option.map ~f:fst control_flow in
          let f, args =
            match as_iterator it |> Option.bind ~f:(fn_args_of_iterator cf) with
            | Some (f, args, typ) ->
                (* TODO what happens if there is control flow? *)
                let invariant : B.expr =
                  let default =
                    let pat = MS.pat_PWild ~typ in
                    (pat, MS.expr_Literal ~typ:TBool (Bool true))
                  in
                  let pat, invariant =
                    match annotation with
                    | Some (LoopInvariant { index_pat = Some pat; invariant })
                      ->
                        (pat, invariant)
                    | _ -> default
                  in
                  UB.make_closure [ bpat; pat ] invariant invariant.span
                in
                (f, args @ [ invariant; init; fn ])
            | None ->
                let fold : Concrete_ident.name =
                  match cf with
                  | Some BreakOrReturn ->
                      Rust_primitives__hax__folds__fold_return
                  | Some BreakOnly -> Rust_primitives__hax__folds__fold_cf
                  | None -> Core__iter__traits__iterator__Iterator__fold
                in
                (fold, [ it; init; fn ])
          in
          UB.call f args span (dty span expr.typ)
      | Loop { body; kind = WhileLoop { condition; _ }; state; control_flow; _ }
        ->
          let bpat, init =
            match state with
            | Some { bpat; init; _ } -> (dpat bpat, dexpr init)
            | None ->
                let unit = UB.unit_expr span in
                (M.pat_PWild ~span ~typ:unit.typ, unit)
          in
          let body = dexpr body in
          let { body; annotation = annotation1 } =
            extract_loop_annotation body
          in
          let { body; annotation = annotation2 } =
            extract_loop_annotation body
          in
          let invariant, variant =
            expect_invariant_variant annotation1 annotation2
          in
          let invariant =
            match invariant with
            | Some (LoopInvariant { index_pat = None; invariant }) -> invariant
            | _ -> MS.expr_Literal ~typ:TBool (Bool true)
          in
          let variant =
            match variant with
            | Some (LoopVariant variant) -> variant
            | _ ->
                let kind = { size = S32; signedness = Unsigned } in
                let e =
                  UB.M.expr_Literal ~typ:(TInt kind) ~span:body.span
                    (Int { value = "0"; negative = false; kind })
                in
                UB.call Rust_primitives__hax__int__from_machine [ e ] e.span
                  (TApp
                     {
                       ident =
                         `Concrete
                           (Concrete_ident.of_name ~value:false
                              Hax_lib__int__Int);
                       args = [];
                     })
          in
          let condition = dexpr condition in
          let condition : B.expr =
            M.expr_Closure ~params:[ bpat ] ~body:condition ~captures:[]
              ~span:condition.span
              ~typ:(TArrow ([ bpat.typ ], condition.typ))
          in
          let body : B.expr =
            M.expr_Closure ~params:[ bpat ] ~body ~captures:[]
              ~typ:(TArrow ([ bpat.typ ], body.typ))
              ~span:body.span
          in
          let fold_operator : Concrete_ident.name =
            match control_flow with
            | Some (BreakOrReturn, _) -> Rust_primitives__hax__while_loop_return
            | Some (BreakOnly, _) -> Rust_primitives__hax__while_loop_cf
            | None -> Rust_primitives__hax__while_loop
          in
          let invariant : B.expr =
            UB.make_closure [ bpat ] invariant invariant.span
          in
          let variant = UB.make_closure [ bpat ] variant variant.span in
          (* The invariant should come before the condition. This allows to use the invariant
             to prove panic freedom of the condition. *)
          UB.call fold_operator
            [ invariant; condition; variant; init; body ]
            span (dty span expr.typ)
      | Loop _ ->
          Error.unimplemented ~issue_id:933 ~details:"Unhandled loop kind" span
      | [%inline_arms "dexpr'.*" - Loop - Break - Continue - Return] ->
          map (fun e -> B.{ e; typ = dty expr.span expr.typ; span = expr.span })
      | _ -> .
    [@@inline_ands bindings_of dexpr - dexpr' - dloop_kind - dloop_state]

    [%%inline_defs "Item.*"]
  end

  include Implem
end
[@@add "subtype.ml"]


================================================
FILE: engine/lib/phases/phase_functionalize_loops.mli
================================================
open! Prelude

module Make
    (F :
      Features.T
        with type continue = Features.Off.continue
         and type early_exit = Features.Off.early_exit
         and type break = Features.Off.break) : sig
  include module type of struct
    module FA = F

    module FB = struct
      include F
      include Features.Off.Loop
      include Features.Off.While_loop
      include Features.Off.For_loop
      include Features.Off.For_index_loop
      include Features.Off.State_passing_loop
      include Features.Off.Fold_like_loop
    end

    module A = Ast.Make (F)
    module B = Ast.Make (FB)
    module ImplemT = Phase_utils.MakePhaseImplemT (A) (B)
  end

  include ImplemT.T
end


================================================
FILE: engine/lib/phases/phase_hoist_disjunctive_patterns.ml
================================================
(* This phase transforms deep disjunctive patterns in equivalent
   shallow ones. For example `Some(1 | 2)` becomes `Some(1) | Some(2)` *)

open! Prelude

module Make (F : Features.T) =
  Phase_utils.MakeMonomorphicPhase
    (F)
    (struct
      let phase_id = [%auto_phase_name auto]

      open Ast.Make (F)
      module U = Ast_utils.Make (F)
      module Visitors = Ast_visitors.Make (F)

      module Error = Phase_utils.MakeError (struct
        let ctx = Diagnostics.Context.Phase phase_id
      end)

      let hoist_disjunctions =
        object (self)
          inherit [_] Visitors.map

          method! visit_pat () p =
            let return_pat p' = { p = p'; span = p.span; typ = p.typ } in

            (* When there is a list of subpaterns, we use the distributivity of nested
               disjunctions: (a | b, c | d) gives (a, c) | (a, d) | (b, c) | (b,d) *)
            let rec treat_args cases = function
              | { p = POr { subpats }; _ } :: tail ->
                  treat_args
                    (List.concat_map
                       ~f:(fun subpat ->
                         List.map ~f:(fun args -> subpat :: args) cases)
                       subpats)
                    tail
              | pat :: tail ->
                  let pat = self#visit_pat () pat in
                  treat_args (List.map ~f:(fun args -> pat :: args) cases) tail
              | [] -> cases
            in
            let subpats_to_disj subpats =
              match subpats with
              | [ pat ] -> pat
              | _ -> POr { subpats } |> return_pat
            in

            (* When there is one subpattern, we check if it is a disjunction,
               and if it is, we hoist it. *)
            let treat_subpat pat to_pattern =
              let subpat = self#visit_pat () pat in
              match subpat with
              | { p = POr { subpats }; span; _ } ->
                  return_pat
                    (POr
                       {
                         subpats =
                           List.map
                             ~f:(fun pat ->
                               { p = to_pattern pat; span; typ = p.typ })
                             subpats;
                       })
              | _ -> p
            in

            match p.p with
            | PConstruct { constructor; fields; is_record; is_struct } ->
                let fields_as_pat =
                  List.rev_map fields ~f:(fun arg -> self#visit_pat () arg.pat)
                in
                let subpats =
                  List.map (treat_args [ [] ] fields_as_pat)
                    ~f:(fun fields_as_pat ->
                      let fields =
                        (* exn justification: `rev_map fields` and `fields` have the same length *)
                        List.map2_exn fields_as_pat fields
                          ~f:(fun pat { field; _ } -> { field; pat })
                      in
                      PConstruct { constructor; fields; is_record; is_struct }
                      |> return_pat)
                in

                subpats_to_disj subpats
            | PArray { args } ->
                let subpats =
                  List.map
                    ~f:(fun args -> PArray { args } |> return_pat)
                    (treat_args [ [] ]
                       (List.rev_map args ~f:(fun arg -> self#visit_pat () arg)))
                in
                subpats_to_disj subpats
            | POr { subpats } ->
                let subpats = List.map ~f:(self#visit_pat ()) subpats in
                POr
                  {
                    subpats =
                      List.concat_map
                        ~f:(function
                          | { p = POr { subpats }; _ } -> subpats | p -> [ p ])
                        subpats;
                  }
                |> return_pat
            | PAscription { typ; typ_span; pat } ->
                treat_subpat pat (fun pat -> PAscription { typ; typ_span; pat })
            | PBinding { subpat = Some (pat, as_pat); mut; mode; typ; var } ->
                treat_subpat pat (fun pat ->
                    PBinding
                      { subpat = Some (pat, as_pat); mut; mode; typ; var })
            | PDeref { subpat; witness } ->
                treat_subpat subpat (fun subpat -> PDeref { subpat; witness })
            | PWild | PConstant _ | PBinding { subpat = None; _ } -> p
        end

      let ditems = List.map ~f:(hoist_disjunctions#visit_item ())
    end)


================================================
FILE: engine/lib/phases/phase_hoist_disjunctive_patterns.mli
================================================
(** This phase eliminates nested disjunctive patterns (leaving only shallow
    disjunctions). It moves the disjunctions up to the top-level pattern. *)

module Make : Phase_utils.UNCONSTRAINTED_MONOMORPHIC_PHASE


================================================
FILE: engine/lib/phases/phase_local_mutation.ml
================================================
(* TODO: handle Exn report *)
open! Prelude
open Side_effect_utils

module%inlined_contents Make
    (F :
      Features.T
        with type mutable_reference = Features.Off.mutable_reference
         and type mutable_pointer = Features.Off.mutable_pointer
         and type raw_pointer = Features.Off.raw_pointer
         and type arbitrary_lhs = Features.Off.arbitrary_lhs
         and type nontrivial_lhs = Features.Off.nontrivial_lhs
         and type monadic_action = Features.Off.monadic_action
         and type monadic_binding = Features.Off.monadic_binding
         and type for_index_loop = Features.Off.for_index_loop) =
struct
  open Ast
  module FA = F

  module FB = struct
    include F
    include Features.Off.Mutable_variable
    include Features.On.State_passing_loop
  end

  include
    Phase_utils.MakeBase (F) (FB)
      (struct
        let phase_id = [%auto_phase_name auto]
      end)

  module Implem : ImplemT.T = struct
    let metadata = metadata

    module UA = Ast_utils.Make (F)
    module UB = Ast_utils.Make (FB)

    module S = struct
      include Features.SUBTYPE.Id
      include Features.SUBTYPE.On.State_passing_loop
    end

    module SI = MakeSI (FB)

    module Instructions = struct
      type t = {
        expr_level : UB.TypedLocalIdent.t list;
        fun_level : UB.TypedLocalIdent.t list;
        loop_level : UB.TypedLocalIdent.t list;
        drop_expr : bool;
      }

      let zero =
        { expr_level = []; fun_level = []; loop_level = []; drop_expr = false }
    end

    let free_assigned_variables =
      UA.Reducers.free_assigned_variables (function _ -> .)

    [%%inline_defs dmutability + dsafety_kind]

    let rec dpat' (span : span) (p : A.pat') : B.pat' =
      match p with
      | [%inline_arms "dpat'.*" - PBinding - PDeref] -> auto
      | PBinding { var : Local_ident.t; typ; subpat; _ } ->
          PBinding
            {
              mut = Immutable;
              mode = ByValue;
              var;
              typ = dty span typ;
              subpat = Option.map ~f:(dpat *** Fn.id) subpat;
            }
      | PDeref { subpat; _ } -> (dpat subpat).p

    (* [s] is the list of variables the last expression should return, packed in a tuple *)
    and dexpr_s (s : Instructions.t) (expr : A.expr) : B.expr =
      let dexpr_same e = dexpr_s s e in
      let rec dexpr e = dexpr_s { s with expr_level = []; drop_expr = false } e
      and dloop_state = [%inline_body dloop_state] in
      let span = expr.span in
      let local_vars_expr =
        let vars =
          List.map
            ~f:(fun (i, typ) : B.expr -> { e = LocalVar i; typ; span })
            s.loop_level
        in
        match vars with [ v ] -> v | _ -> UB.make_tuple_expr ~span vars
      in
      match expr.e with
      | Let
          {
            monadic = None;
            lhs;
            rhs =
              {
                e =
                  Assign
                    { lhs = LhsLocalVar { var; typ }; e = value; witness = _ };
                _;
              };
            body;
          } ->
          let h (type a) (f : a list -> a) (x : a) (y : a) =
            match lhs.p with PWild -> y | _ -> f [ x; y ]
          in
          let body = dexpr_same body in
          {
            e =
              Let
                {
                  monadic = None;
                  lhs =
                    h UB.make_tuple_pat (dpat lhs)
                      (UB.make_var_pat var (dty span typ) span);
                  rhs =
                    h (UB.make_tuple_expr ~span) (UB.unit_expr span)
                      (dexpr_s
                         { s with expr_level = []; drop_expr = false }
                         value);
                  body;
                };
            typ = body.typ;
            span = expr.span;
          }
      | Let { monadic = Some _; _ } -> .
      | Let { monadic = None; lhs; rhs; body } ->
          let drop_expr = [%matches? A.PWild] lhs.p in
          let rhs_vars =
            free_assigned_variables#visit_expr () rhs
            |> Set.to_list
            |> List.map ~f:(fun (i, t) -> (i, dty span t))
          in
          let vars_pat =
            List.map ~f:(fun (i, t) -> UB.make_var_pat i t span) rhs_vars
            |> UB.make_tuple_pat
          in
          let lhs = dpat lhs in
          let lhs' =
            if List.is_empty rhs_vars then lhs
            else if drop_expr then vars_pat
            else UB.make_tuple_pat [ vars_pat; lhs ]
          in
          let body = dexpr_same body in
          {
            e =
              Let
                {
                  monadic = None;
                  lhs = lhs';
                  rhs = dexpr_s { s with expr_level = rhs_vars; drop_expr } rhs;
                  body;
                };
            typ = body.typ;
            span = expr.span;
          }
      | Assign { e; lhs = LhsLocalVar { var; _ }; _ } ->
          let vars =
            List.map
              ~f:(fun (i, typ) : B.expr ->
                if Local_ident.equal i var then
                  dexpr_s { s with expr_level = []; drop_expr = false } e
                else { e = LocalVar i; typ; span })
              s.expr_level
          in
          let vars =
            match vars with [ v ] -> v | _ -> UB.make_tuple_expr ~span vars
          in
          if s.drop_expr then vars
          else UB.make_tuple_expr ~span [ vars; UB.unit_expr span ]
      | Assign _ -> .
      | Closure { params; body; captures } ->
          let observable_mutations =
            free_assigned_variables#visit_expr () expr
          in
          if observable_mutations |> Set.is_empty |> not then
            Error.raise
              {
                kind =
                  ClosureMutatesParentBindings
                    {
                      bindings =
                        Set.to_list observable_mutations
                        |> List.map ~f:(fun (Local_ident.{ name; _ }, _) ->
                               name);
                    };
                span;
              };
          let s =
            {
              s with
              expr_level =
                (UA.Reducers.free_assigned_variables (function _ -> .))
                  #visit_expr () body
                |> Set.to_list
                |> List.map ~f:(fun (i, t) -> (i, dty span t));
              drop_expr = false;
            }
          in
          {
            e =
              Closure
                {
                  params = List.map ~f:dpat params;
                  body = dexpr_s s body;
                  captures =
                    List.map
                      ~f:
                        (dexpr_s
                           Instructions.zero
                           (* TODO: what to do with captures? We discard them entirely for now. Maybe we should remove that from the AST. *))
                      captures;
                };
            typ = dty span expr.typ;
            span = expr.span;
          }
      | If { cond; then_; else_ } ->
          let then_ = dexpr_same then_ in
          let else_ =
            Option.value ~default:(UA.unit_expr expr.span) else_
            |> dexpr_same |> Option.some
          in
          let cond =
            dexpr_s { s with expr_level = []; drop_expr = false } cond
          in
          { e = If { cond; then_; else_ }; typ = then_.typ; span = expr.span }
      | Match { scrutinee; arms } ->
          let arms =
            let dexpr = dexpr_same in
            let rec darm = [%inline_body darm]
            and darm' = [%inline_body darm'] in
            List.map ~f:darm arms
          in
          let typ =
            match arms with [] -> UB.never_typ | hd :: _ -> hd.arm.body.typ
          in
          let scrutinee =
            dexpr_s { s with expr_level = []; drop_expr = false } scrutinee
          in
          { e = Match { scrutinee; arms }; typ; span = expr.span }
      | Break { e; label; witness; _ } ->
          let w = Features.On.state_passing_loop in
          {
            e =
              Break
                {
                  e = dexpr_same e;
                  acc = Some (local_vars_expr, w);
                  label;
                  witness;
                };
            span = expr.span;
            typ = local_vars_expr.typ;
          }
      | Return { e; witness } ->
          {
            e = Return { e = dexpr e; witness };
            span = expr.span;
            typ = dty expr.span expr.typ;
          }
      | Continue { acc = None; label; witness; _ } ->
          let w = Features.On.state_passing_loop in
          let e = local_vars_expr in
          {
            e = Continue { acc = Some (e, w); label; witness };
            span = expr.span;
            typ = e.typ;
          }
      | Loop { body; kind; state; label; witness; _ } ->
          let variables_to_output = s.expr_level in
          let drop_expr = s.drop_expr in
          (* [adapt]: should we reorder shadowings? *)
          let observable_mutations, adapt =
            let set =
              free_assigned_variables#visit_expr () expr
              |> Set.map
                   (module UB.TypedLocalIdent)
                   ~f:(fun (i, t) -> (i, dty span t))
            in
            let idents_of_set = Set.map (module Local_ident) ~f:fst set in
            let idents_of_variables_to_output =
              variables_to_output |> List.map ~f:fst
              |> Set.of_list (module Local_ident)
            in
            (* if we mutate exactly s.expr_level, return that in this order *)
            if Set.equal idents_of_set idents_of_variables_to_output then
              (variables_to_output, false)
            else (set |> Set.to_list, true)
          in
          let s =
            {
              s with
              expr_level = observable_mutations;
              loop_level = observable_mutations;
              drop_expr = true;
            }
          in
          let empty_s = { s with expr_level = []; drop_expr = false } in
          let state : B.loop_state option =
            if List.is_empty observable_mutations then
              Option.map ~f:(dloop_state span) state
            else
              Some
                (let bpat' =
                   List.map
                     ~f:(fun (i, t) -> UB.make_var_pat i t span)
                     observable_mutations
                   |> UB.make_tuple_pat
                 in
                 let init' =
                   List.map
                     ~f:(fun (i, typ) : B.expr -> { e = LocalVar i; typ; span })
                     observable_mutations
                   |> UB.make_tuple_expr ~span
                 in
                 let witness = Features.On.state_passing_loop in
                 match state with
                 | None -> { init = init'; bpat = bpat'; witness }
                 | Some { init; bpat; _ } ->
                     {
                       init =
                         UB.make_tuple_expr ~span
                           [ init'; dexpr_s empty_s init ];
                       bpat = UB.make_tuple_pat [ bpat'; dpat bpat ];
                       witness;
                     })
          in
          let kind =
            let dexpr = dexpr_s empty_s in
            [%inline_body dloop_kind] span kind
          in
          let body = dexpr_s s body in
          (* we deal with a for loop: this is always a unit expression (i.e. no [break foo] with [foo] non-unit allowed) *)
          let typ = List.map ~f:snd observable_mutations |> UB.make_tuple_typ in
          let loop : B.expr =
            {
              e =
                Loop { body; kind; state; label; witness; control_flow = None };
              typ;
              span;
            }
          in
          let vars =
            if adapt && not (List.is_empty variables_to_output) then
              (* here, we need to introduce the shadowings as bindings *)
              let out =
                UB.make_tuple_expr ~span
                @@ List.map
                     ~f:(fun (ident, typ) ->
                       B.{ e = LocalVar ident; typ; span })
                     variables_to_output
              in
              let lhs =
                UB.make_tuple_pat
                @@ List.map
                     ~f:(fun (ident, typ) -> UB.make_var_pat ident typ span)
                     observable_mutations
              in
              B.
                {
                  e = Let { monadic = None; lhs; rhs = loop; body = out };
                  span;
                  typ = out.typ;
                }
            else loop
          in
          if drop_expr then vars
          else UB.make_tuple_expr ~span [ vars; UB.unit_expr span ]
      | [%inline_arms
          "dexpr'.*" - Let - Assign - Closure - Loop - If - Match - Break
          - Return] ->
          map (fun e ->
              let e' =
                B.{ e; typ = dty expr.span expr.typ; span = expr.span }
              in
              match e with
              | If _ | Match _ | Loop _ | Assign _ -> e'
              | _ when List.is_empty s.expr_level -> e'
              | _ ->
                  let vars =
                    List.map
                      ~f:(fun (i, typ) : B.expr ->
                        { e = LocalVar i; typ; span })
                      s.expr_level
                    |> UB.make_tuple_expr ~span
                  in
                  if s.drop_expr then
                    let effect_e' =
                      snd (SI.Hoist.collect_and_hoist_effects e')
                    in
                    if SI.SideEffects.reads_local_mut_only effect_e' then vars
                    else
                      {
                        vars with
                        e =
                          Let
                            {
                              monadic = None;
                              lhs = UB.M.pat_PWild ~typ:e'.typ ~span:e'.span;
                              rhs = e';
                              body = vars;
                            };
                      }
                  else UB.make_tuple_expr ~span [ vars; e' ])

    and dexpr_unwrapped e = dexpr_s Instructions.zero e
    [@@inline_ands bindings_of dexpr - dexpr']

    [%%inline_defs "Item.*"]
  end

  include Implem
end
[@@add "subtype.ml"]


================================================
FILE: engine/lib/phases/phase_local_mutation.mli
================================================
open! Prelude

module Make
    (F :
      Features.T
        with type mutable_reference = Features.Off.mutable_reference
         and type mutable_pointer = Features.Off.mutable_pointer
         and type raw_pointer = Features.Off.raw_pointer
         and type arbitrary_lhs = Features.Off.arbitrary_lhs
         and type nontrivial_lhs = Features.Off.nontrivial_lhs
         and type monadic_action = Features.Off.monadic_action
         and type monadic_binding = Features.Off.monadic_binding
         and type for_index_loop = Features.Off.for_index_loop) : sig
  include module type of struct
    module FA = F

    module FB = struct
      include F
      include Features.Off.Mutable_variable
      include Features.On.State_passing_loop
    end

    module A = Ast.Make (F)
    module B = Ast.Make (FB)
    module ImplemT = Phase_utils.MakePhaseImplemT (A) (B)
  end

  include ImplemT.T
end


================================================
FILE: engine/lib/phases/phase_newtype_as_refinement.ml
================================================
open! Prelude

module Make (F : Features.T) =
  Phase_utils.MakeMonomorphicPhase
    (F)
    (struct
      let phase_id = [%auto_phase_name auto]

      module A = Ast.Make (F)
      module Visitors = Ast_visitors.Make (F)
      open A

      module Error = Phase_utils.MakeError (struct
        let ctx = Diagnostics.Context.Phase phase_id
      end)

      module Attrs = Attr_payloads.Make (F) (Error)

      let visitor =
        object
          inherit [_] Visitors.map as super

          method! visit_expr () e =
            let e = super#visit_expr () e in
            match e.e with
            | App { f = { e = GlobalVar f; _ }; args = [ inner ]; _ }
              when Ast.Global_ident.eq_name Hax_lib__Refinement__new f
                   || Ast.Global_ident.eq_name Hax_lib__RefineAs__into_checked f
                   || Ast.Global_ident.eq_name Hax_lib__Refinement__get_mut f
                   || Ast.Global_ident.eq_name Hax_lib__Refinement__get f ->
                { e with e = Ascription { typ = e.typ; e = inner } }
            | _ -> e

          method! visit_item () i =
            match i.v with
            | Type
                {
                  name;
                  generics;
                  variants = [ { arguments = [ (_, ty, _) ]; _ } ];
                  _;
                }
              when Attrs.find_unique_attr i.attrs
                     ~f:
                       ([%eq: Types.ha_payload] NewtypeAsRefinement
                       >> Fn.flip Option.some_if ())
                   |> Option.is_some ->
                { i with v = TyAlias { name; generics; ty } }
            | _ -> super#visit_item () i
        end

      let ditems = List.map ~f:(visitor#visit_item ())
    end)


================================================
FILE: engine/lib/phases/phase_newtype_as_refinement.mli
================================================
(** This phase transforms annotated struct definitions into (refined) type
    aliases. *)

module Make : Phase_utils.UNCONSTRAINTED_MONOMORPHIC_PHASE


================================================
FILE: engine/lib/phases/phase_reconstruct_asserts.ml
================================================
open! Prelude

module Make (F : Features.T) =
  Phase_utils.MakeMonomorphicPhase
    (F)
    (struct
      let phase_id = [%auto_phase_name auto]

      open Ast.Make (F)
      module U = Ast_utils.Make (F)
      module Visitors = Ast_visitors.Make (F)

      module Error = Phase_utils.MakeError (struct
        let ctx = Diagnostics.Context.Phase phase_id
      end)

      let reconstruct_assert =
        object (self)
          inherit [_] Visitors.map as super

          method! visit_expr () e =
            let extract_block e =
              let* { e; _ } = U.D.expr_Block e in
              let* { f; args; _ } = U.D.expr_App e in
              let* nta = U.D.expr_GlobalVar f in
              match args with
              | [ { e = App { f = { e = GlobalVar panic; _ }; _ }; _ } ] ->
                  Some (nta, panic)
              | _ -> None
            in
            let extract_app e =
              let* { f; args; _ } = U.D.expr_App e in
              let* nta = U.D.expr_GlobalVar f in
              let* arg = U.D.list_1 args in
              let* { body; _ } = U.D.expr_Let arg in
              let* { e; _ } = U.D.expr_Block body in
              let* { f; _ } = U.D.expr_App e in
              let* panic = U.D.expr_GlobalVar f in
              Some (nta, panic)
            in
            let extract e =
              let* { cond; then_; _ } = U.D.expr_If e in
              let* nta, panic =
                extract_app then_ <|> fun _ -> extract_block then_
              in
              Some (panic, nta, cond)
            in
            match extract e with
            | Some (panic, nta, cond)
              when Ast.Global_ident.eq_name Rust_primitives__hax__never_to_any
                     nta
                   && (Ast.Global_ident.eq_name Core__panicking__panic panic
                      || Ast.Global_ident.eq_name Core__panicking__assert_failed
                           panic) ->
                let cond_expr = self#visit_expr () cond in

                let prop =
                  match cond_expr.e with
                  (* assert! and assert_eq! *)
                  | App { f = { e = GlobalVar fnot; _ }; args = [ prop ]; _ }
                    when Ast.Global_ident.eq_name Core__ops__bit__Not__not fnot
                    ->
                      prop
                  (* assert_ne! *)
                  | _ ->
                      {
                        cond_expr with
                        e =
                          App
                            {
                              f =
                                {
                                  e =
                                    GlobalVar
                                      (Ast.Global_ident.of_name ~value:true
                                         Core__ops__bit__Not__not);
                                  span = cond_expr.span;
                                  typ = TArrow ([ TBool ], TBool);
                                };
                              args = [ cond_expr ];
                              generic_args = [];
                              bounds_impls = [];
                              trait = None;
                            };
                      }
                in

                {
                  e with
                  e =
                    App
                      {
                        f =
                          {
                            e =
                              GlobalVar
                                (Ast.Global_ident.of_name ~value:true
                                   Hax_lib__assert);
                            span = e.span;
                            typ =
                              TArrow
                                ( [ TBool ],
                                  TApp { ident = `TupleType 0; args = [] } );
                          };
                        args = [ prop ];
                        generic_args = [];
                        bounds_impls = [];
                        trait = None;
                      };
                }
            | _ -> super#visit_expr () e
        end

      let ditems = List.map ~f:(reconstruct_assert#visit_item ())
    end)


================================================
FILE: engine/lib/phases/phase_reconstruct_asserts.mli
================================================
(** This phase recognizes desugared `assert!(...)` to rewrite into
    `hax_lib::assert(..)`. *)

module Make : Phase_utils.UNCONSTRAINTED_MONOMORPHIC_PHASE


================================================
FILE: engine/lib/phases/phase_reconstruct_for_index_loops.ml
================================================
open! Prelude

module%inlined_contents Make (FA : Features.T) = struct
  open Ast

  module FB = struct
    include FA
    include Features.On.For_index_loop
  end

  include
    Phase_utils.MakeBase (FA) (FB)
      (struct
        let phase_id = [%auto_phase_name auto]
      end)

  module Implem : ImplemT.T = struct
    let metadata = metadata

    module UA = Ast_utils.Make (FA)
    module UB = Ast_utils.Make (FB)

    module S = struct
      include Features.SUBTYPE.Id
      include Features.SUBTYPE.On.For_index_loop
    end

    [%%inline_defs dmutability + dsafety_kind]

    let rec dloop_kind (span : span) (k : A.loop_kind) : B.loop_kind =
      match k with
      | ForLoop
          {
            it =
              {
                e =
                  App
                    {
                      f = { e = GlobalVar (`Concrete into_iter_meth); _ };
                      args =
                        [
                          {
                            e =
                              Construct
                                {
                                  constructor = `Concrete range_ctor;
                                  is_record = true;
                                  is_struct = true;
                                  fields =
                                    [
                                      (`Concrete start_field, start);
                                      (`Concrete end_field, end_);
                                    ];
                                  base = None;
                                };
                            _;
                          };
                        ];
                      _ (* TODO: see issue #328 *);
                    };
                typ;
                _;
              };
            pat =
              {
                p =
                  PBinding
                    { mut = Immutable; mode = ByValue; var; subpat = None; _ };
                _;
              };
            _;
          }
        when Concrete_ident.eq_name
               Core__iter__traits__collect__IntoIterator__into_iter
               into_iter_meth
             && Concrete_ident.eq_name Core__ops__range__Range__start
                  start_field
             && Concrete_ident.eq_name Core__ops__range__Range range_ctor
             && Concrete_ident.eq_name Core__ops__range__Range__end end_field ->
          ForIndexLoop
            {
              start = dexpr start;
              end_ = dexpr end_;
              var;
              var_typ = dty span typ;
              witness = Features.On.for_index_loop;
            }
      | [%inline_arms "dloop_kind.*"] -> auto
    [@@inline_ands bindings_of dexpr]

    [%%inline_defs "Item.*"]
  end

  include Implem
  module FA = FA
end
[@@add "subtype.ml"]


================================================
FILE: engine/lib/phases/phase_reconstruct_for_index_loops.mli
================================================
open! Prelude

module Make (F : Features.T) : sig
  include module type of struct
    module FA = F

    module FB = struct
      include F
      include Features.On.For_index_loop
    end

    module A = Ast.Make (F)
    module B = Ast.Make (FB)
    module ImplemT = Phase_utils.MakePhaseImplemT (A) (B)
  end

  include ImplemT.T
end


================================================
FILE: engine/lib/phases/phase_reconstruct_for_loops.ml
================================================
open! Prelude

module%inlined_contents Make
    (FA :
      Features.T
    (* with type raw_pointer = Features.off *)
    (*  and type mutable_pointer = Features.off *)) =
struct
  open Ast

  module FB = struct
    include FA
    include Features.On.For_loop
  end

  include
    Phase_utils.MakeBase (FA) (FB)
      (struct
        let phase_id = [%auto_phase_name auto]
      end)

  module Implem : ImplemT.T = struct
    let metadata = metadata

    module UA = Ast_utils.Make (FA)
    module UB = Ast_utils.Make (FB)

    module S = struct
      include Features.SUBTYPE.Id
      include Features.SUBTYPE.On.For_loop
    end

    module For = struct
      [@@@warning "-9"]

      open A

      type t = {
        it : expr;
        pat : pat;
        body : expr;
        state : loop_state option;
        label : string option;
        witness : FA.loop;
      }
      [@@deriving show]

      let extract (e : expr) : t option =
        let e = UA.Mappers.normalize_borrow_mut#visit_expr () e in
        match e.e with
        | Match
            {
              scrutinee = it;
              arms =
                [
                  {
                    arm =
                      {
                        arm_pat =
                          {
                            p =
                              PBinding
                                {
                                  mut = Mutable _;
                                  mode = ByValue;
                                  var = iter_variable;
                                  subpat = None;
                                };
                          };
                        body =
                          {
                            e =
                              Loop
                                {
                                  label;
                                  kind = UnconditionalLoop;
                                  state;
                                  witness;
                                  body =
                                    {
                                      e =
                                        Let
                                          {
                                            monadic = None;
                                            lhs = { p = PWild };
                                            rhs =
                                              {
                                                e =
                                                  Match
                                                    {
                                                      scrutinee =
                                                        {
                                                          e =
                                                            App
                                                              {
                                                                f =
                                                                  {
                                                                    e =
                                                                      GlobalVar
                                                                        (`Concrete
                                                                           next_meth);
                                                                  };
                                                                args =
                                                                  [
                                                                    {
                                                                      e =
                                                                        Borrow
                                                                          {
                                                                            kind =
                                                                              Mut
                                                                                _;
                                                                            e =
                                                                              {
                                                                                e =
                                                                                LocalVar
                                                                                next_iter_variable;
                                                                              };
                                                                          };
                                                                    };
                                                                  ];
                                                                _
                                                                (* TODO: see issue #328 *);
                                                              };
                                                        };
                                                      arms =
                                                        [
                                                          {
                                                            arm =
                                                              {
                                                                arm_pat =
                                                                  {
                                                                    p =
                                                                      PConstruct
                                                                        {
                                                                          constructor =
                                                                            `Concrete
                                                                              none_ctor;
                                                                          fields =
                                                                            [];
                                                                          _;
                                                                        };
                                                                  };
                                                                body =
                                                                  {
                                                                    e =
                                                                      App
                                                                        {
                                                                          f =
                                                                            {
                                                                              e =
                                                                                GlobalVar
                                                                                never_to_any;
                                                                            };
                                                                          args =
                                                                            [
                                                                              {
                                                                                e =
                                                                                Break
                                                                                {
                                                                                e =
                                                                                {
                                                                                e =
                                                                                GlobalVar
                                                                                (`TupleCons
                                                                                0);
                                                                                };
                                                                                };
                                                                              };
                                                                            ];
                                                                          _
                                                                          (* TODO: see issue #328 *);
                                                                        };
                                                                  };
                                                              };
                                                          };
                                                          {
                                                            arm =
                                                              {
                                                                arm_pat =
                                                                  {
                                                                    p =
                                                                      PConstruct
                                                                        {
                                                                          constructor =
                                                                            `Concrete
                                                                              some_ctor;
                                                                          fields =
                                                                            [
                                                                              {
                                                                                pat;
                                                                              };
                                                                            ];
                                                                          _;
                                                                        };
                                                                  };
                                                                body;
                                                              };
                                                          };
                                                        ];
                                                    };
                                              };
                                            body =
                                              { e = GlobalVar (`TupleCons 0) };
                                          };
                                    };
                                  _;
                                };
                          };
                      };
                  };
                ];
            }
          when [%eq: local_ident] iter_variable next_iter_variable
               && Concrete_ident.eq_name
                    Core__iter__traits__iterator__Iterator__next next_meth
               && Concrete_ident.eq_name Core__option__Option__None none_ctor
               && Concrete_ident.eq_name Core__option__Option__Some some_ctor
               && Global_ident.eq_name Rust_primitives__hax__never_to_any
                    never_to_any ->
            let body =
              match body.e with
              | Let
                  {
                    lhs = { p = PWild };
                    rhs;
                    body = { e = GlobalVar (`TupleCons 0) };
                  }
                when UA.is_unit_typ rhs.typ ->
                  rhs
              | _ -> body
            in

            Some { it; pat; body; state; label; witness }
        | _ -> None
               [@ocamlformat "disable"]
    end

    [%%inline_defs dmutability + dsafety_kind]

    let rec dexpr_unwrapped (expr : A.expr) : B.expr =
      let h = [%inline_body dexpr_unwrapped] in
      match For.extract expr with
      | Some { it; pat; body; label; state; witness } ->
          {
            e =
              Loop
                {
                  body = dexpr body;
                  kind =
                    ForLoop
                      {
                        it = dexpr it;
                        pat = dpat pat;
                        witness = Features.On.for_loop;
                      };
                  state = Option.map ~f:(dloop_state expr.span) state;
                  label;
                  witness = S.loop expr.span witness;
                  control_flow = None;
                };
            span = expr.span;
            typ = UB.unit_typ;
          }
      | None -> h expr
    [@@inline_ands bindings_of dexpr]

    [%%inline_defs "Item.*"]
  end

  include Implem
  module FA = FA
end
[@@add "subtype.ml"]


================================================
FILE: engine/lib/phases/phase_reconstruct_for_loops.mli
================================================
open! Prelude

module Make (F : Features.T) : sig
  include module type of struct
    module FA = F

    module FB = struct
      include F
      include Features.On.For_loop
    end

    module A = Ast.Make (F)
    module B = Ast.Make (FB)
    module ImplemT = Phase_utils.MakePhaseImplemT (A) (B)
  end

  include ImplemT.T
end


================================================
FILE: engine/lib/phases/phase_reconstruct_question_marks.ml
================================================
open! Prelude

module%inlined_contents Make (FA : Features.T) = struct
  open Ast

  module FB = struct
    include FA
    include Features.On.Question_mark
  end

  include
    Phase_utils.MakeBase (FA) (FB)
      (struct
        let phase_id = [%auto_phase_name auto]
      end)

  module Implem : ImplemT.T = struct
    let metadata = metadata

    module UA = Ast_utils.Make (FA)
    module UB = Ast_utils.Make (FB)

    module S = struct
      include Features.SUBTYPE.Id
      include Features.SUBTYPE.On.Question_mark
    end

    module QuestionMarks = struct
      [@@@warning "-9"]

      open A

      (** The types supported for [e] in a [e?] expression *)
      type qm_kind = QMResult of { success : ty; error : ty } | QMOption of ty

      (** Interpret a type [t] as a [qm_kind] *)
      let qm_kind_of_typ span (t : ty) : qm_kind =
        let is_result = Global_ident.eq_name Core__result__Result in
        let is_option = Global_ident.eq_name Core__option__Option in
        match t with
        | TApp { ident; args = [ GType s; GType e ] } when is_result ident ->
            QMResult { success = s; error = e }
        | TApp { ident; args = [ GType s ] } when is_option ident -> QMOption s
        | _ ->
            Error.assertion_failure span
              ("expected something of type Option<_> or Result<_, _>, instead, \
                got: "
              ^ [%show: ty] t)

      (** Expects [impl] to be an impl. expr. for the trait
          `std::ops::FromResidual` for the type [Result<_, _>], and extract its
          parent [From] impl expr *)
      let expect_residual_impl_result (impl : impl_expr) : impl_expr option =
        match impl with
        | {
         kind = ImplApp { args = [ from_impl ]; _ };
         goal =
           {
             trait;
             args =
               [
                 GType (TApp { ident = arg1; _ });
                 GType (TApp { ident = arg2; _ });
               ];
           };
        }
          when Concrete_ident.eq_name Core__ops__try_trait__FromResidual trait
               && Global_ident.eq_name Core__result__Result arg1
               && Global_ident.eq_name Core__result__Result arg2 ->
            Some from_impl
        | _ -> None

      (** Expects [t] to be [Result], and returns [(S, E)] *)
      let expect_result_type (t : ty) : (ty * ty) option =
        match t with
        | TApp { ident; args = [ GType s; GType e ] }
          when Global_ident.eq_name Core__result__Result ident ->
            Some (s, e)
        | _ -> None

      (** Construct [Result] *)
      let make_result_type (success : ty) (error : ty) : ty =
        let ident = Global_ident.of_name ~value:false Core__result__Result in
        TApp { ident; args = [ GType success; GType error ] }

      (** Retype a [Err::<_, E>(x)] literal, as [Err::(x)] *)
      let retype_err_literal (e : expr) (success : ty) (error : ty) =
        match e.e with
        | Construct { constructor; _ }
          when Global_ident.eq_name Core__result__Result__Err constructor ->
            { e with typ = make_result_type success error }
        | _ -> e

      (** [map_err e error_dest impl] creates the expression [e.map_err(from)]
          with the proper types and impl informations. *)
      let map_err (e : expr) (error_dest : ty) impl : expr option =
        let* success, error_src = expect_result_type e.typ in
        let* impl = expect_residual_impl_result impl in
        if [%equal: ty] error_src error_dest then Some e
        else
          let from_typ = TArrow ([ error_src ], error_dest) in
          let from =
            UA.call ~impl Core__convert__From__from [] e.span from_typ
          in
          let call =
            UA.call Core__result__Impl__map_err [ e; from ] e.span
              (make_result_type success error_dest)
          in
          Some call

      (** [extract e] returns [Some (x, ty)] if [e] was a `y?` desugared by
          rustc. `y` is `x` plus possibly a coercion. [ty] is the return type of
          the function. *)
      let extract (e : expr) : (expr * ty) option =
        let extract_return (e : expr) =
          match e.e with
          | Return
              {
                e =
                  {
                    e =
                      App
                        {
                          f = { e = GlobalVar f };
                          args = [ { e = LocalVar residual_var; _ } ];
                          trait = Some (impl, _);
                        };
                    typ = return_typ;
                    _;
                  };
                _;
              } ->
              Some (f, residual_var, return_typ, impl)
          | _ -> None
        in
        let extract_pat_app_bd (p : pat) : (global_ident * local_ident) option =
          match p.p with
          | PConstruct
              {
                constructor;
                fields =
                  [
                    {
                      pat =
                        {
                          p =
                            PBinding { mut = Immutable; var; subpat = None; _ };
                          _;
                        };
                      _;
                    };
                  ];
                _;
              } ->
              Some (constructor, var)
          | _ -> None
        in
        match e.e with
        | Match
            {
              scrutinee =
                { e = App { f = { e = GlobalVar n; _ }; args = [ expr ] }; _ };
              arms =
                [
                  { arm = { arm_pat = pat_break; body }; _ };
                  {
                    arm =
                      {
                        arm_pat = pat_continue;
                        body = { e = LocalVar continue_var; _ };
                      };
                    _;
                  };
                ];
            }
        (*[@ocamlformat "disable"]*)
          when Global_ident.eq_name Core__ops__try_trait__Try__branch n ->
            let* body =
              UA.Expect.concrete_app1 Rust_primitives__hax__never_to_any body
            in
            let* f, residual_var, fun_return_typ, residual_impl =
              extract_return body
            in
            let* f_break, residual_var' = extract_pat_app_bd pat_break in
            let* f_continue, continue_var' = extract_pat_app_bd pat_continue in
            let*? _ = [%equal: local_ident] residual_var residual_var' in
            let*? _ = [%equal: local_ident] continue_var continue_var' in
            let*? _ =
              Global_ident.eq_name Core__ops__control_flow__ControlFlow__Break
                f_break
              && Global_ident.eq_name
                   Core__ops__control_flow__ControlFlow__Continue f_continue
              && Global_ident.eq_name
                   Core__ops__try_trait__FromResidual__from_residual f
            in
            let expr =
              let kind = qm_kind_of_typ e.span in
              match (kind expr.typ, kind fun_return_typ) with
              | ( QMResult { error = local_err; _ },
                  QMResult { error = return_err; _ } ) ->
                  let expr = retype_err_literal expr e.typ local_err in
                  map_err expr return_err residual_impl
                  |> Option.value ~default:expr
              | QMOption _, QMOption _ -> expr
              | _ ->
                  Error.assertion_failure e.span
                    "expected expr.typ and fun_return_typ to be both Options \
                     or both Results"
            in
            Some (expr, fun_return_typ)
        | _ -> None
    end

    [%%inline_defs dmutability + dsafety_kind]

    let rec dexpr_unwrapped (expr : A.expr) : B.expr =
      let h = [%inline_body dexpr_unwrapped] in
      match QuestionMarks.extract expr with
      | Some (e, return_typ) ->
          {
            e =
              QuestionMark
                {
                  e = dexpr e;
                  return_typ = dty e.span return_typ;
                  witness = Features.On.question_mark;
                };
            span = expr.span;
            typ = dty expr.span expr.typ;
          }
      | None -> h expr
    [@@inline_ands bindings_of dexpr]

    [%%inline_defs "Item.*"]
  end

  include Implem
  module FA = FA
end
[@@add "subtype.ml"]

(* module _ (F: Features.T): Phase_utils.PHASE = Make(F) *)


================================================
FILE: engine/lib/phases/phase_reconstruct_question_marks.mli
================================================
(** In THIR, there are no construct for question marks. Instead, Rustc desugars
    `e?` into the following:

    {@rust[
      match core::ops::try_trait::branch(y) {
          core::ops::control_flow::Break(residual) => {
              never_to_any(
                  {return core::ops::try_trait::from_residual(residual)},
              )
          }
          core::ops::control_flow::Continue(val) => val,
      })
    ]}

    This phase does the opposite rewrite.

    While `e?` in Rust might implies an implicit coercion, in our AST, a
    question mark is expected to already be of the right type. This phase
    inlines a coercion (of the shape `x.map_err(from)`, in the case of a
    `Result`). *)

module Make (F : Features.T) : sig
  include module type of struct
    module FA = F

    (** This phase outputs an AST with question marks. *)
    module FB = struct
      include F
      include Features.On.Question_mark
    end

    module A = Ast.Make (F)
    module B = Ast.Make (FB)
    module ImplemT = Phase_utils.MakePhaseImplemT (A) (B)
  end

  include ImplemT.T
end


================================================
FILE: engine/lib/phases/phase_reconstruct_while_loops.ml
================================================
open! Prelude

module%inlined_contents Make (FA : Features.T) = struct
  open Ast

  module FB = struct
    include FA
    include Features.On.While_loop
  end

  include
    Phase_utils.MakeBase (FA) (FB)
      (struct
        let phase_id = [%auto_phase_name auto]
      end)

  module Implem : ImplemT.T = struct
    let metadata = metadata

    module UA = Ast_utils.Make (FA)
    module UB = Ast_utils.Make (FB)

    module S = struct
      include Features.SUBTYPE.Id
      include Features.SUBTYPE.On.While_loop
    end

    module While = struct
      [@@@warning "-9"]

      open A

      type t = {
        condition : expr;
        body : expr;
        state : loop_state option;
        label : string option;
        witness : FA.loop;
      }
      [@@deriving show]

      let expect_never_to_any (e : expr) : expr option =
        match e.e with
        | App { f = { e = GlobalVar f }; args = [ x ]; _ }
          when Global_ident.eq_name Rust_primitives__hax__never_to_any f ->
            Some x
        | _ -> None

      let expect_break_unit (e : expr) : unit option =
        match e.e with
        | Break { e = { e = GlobalVar (`TupleCons 0) } } -> Some ()
        | _ -> None

      let strip_block (e : expr) : expr =
        match e.e with Block { e; safety_mode = Safe; _ } -> e | _ -> e

      let expect_ite (e : expr) : (expr * expr * expr option) option =
        match e.e with
        | If { cond; then_; else_ } -> Some (cond, then_, else_)
        | _ -> None

      let extract (e : expr) : t option =
        let e = UA.Mappers.normalize_borrow_mut#visit_expr () e in
        match e.e with
        | Loop { label; kind = UnconditionalLoop; state; witness; body; _ } ->
            let body = strip_block body in
            let* condition, body, else_ = expect_ite body in
            let* else_ = else_ in
            let else_ = strip_block else_ in
            let* else_ = expect_never_to_any else_ in
            let else_ = strip_block else_ in
            let* else_ = expect_never_to_any else_ in
            let* _ = expect_break_unit else_ in
            Some { condition; body; state; label; witness }
        | _ -> None
    end

    [%%inline_defs dmutability + dsafety_kind]

    let rec dexpr_unwrapped (expr : A.expr) : B.expr =
      let h = [%inline_body dexpr_unwrapped] in
      match While.extract expr with
      | Some { condition; body; state; label; witness } ->
          {
            e =
              Loop
                {
                  body = dexpr body;
                  kind =
                    WhileLoop
                      {
                        condition = dexpr condition;
                        witness = Features.On.while_loop;
                      };
                  state = Option.map ~f:(dloop_state expr.span) state;
                  label;
                  witness = S.loop expr.span witness;
                  control_flow = None;
                };
            span = expr.span;
            typ = UB.unit_typ;
          }
      | None -> h expr
    [@@inline_ands bindings_of dexpr]

    [%%inline_defs "Item.*"]
  end

  include Implem
  module FA = FA
end
[@@add "subtype.ml"]


================================================
FILE: engine/lib/phases/phase_reconstruct_while_loops.mli
================================================
open! Prelude

module Make (F : Features.T) : sig
  include module type of struct
    module FA = F

    module FB = struct
      include F
      include Features.On.While_loop
    end

    module A = Ast.Make (F)
    module B = Ast.Make (FB)
    module ImplemT = Phase_utils.MakePhaseImplemT (A) (B)
  end

  include ImplemT.T
end


================================================
FILE: engine/lib/phases/phase_reject.ml
================================================
let make_metadata rejection_phase =
  Phase_utils.Metadata.make (Diagnostics.Phase.Reject rejection_phase)

module Arbitrary_lhs (FA : Features.T) = struct
  module FB = struct
    include FA
    include Features.Off.Arbitrary_lhs
  end

  include
    Feature_gate.Make (FA) (FB)
      (struct
        module A = FA
        module B = FB
        include Feature_gate.DefaultSubtype

        let arbitrary_lhs = reject
        let metadata = make_metadata ArbitraryLhs
      end)
end

module _ (FA : Features.T) : Phase_utils.PHASE = Arbitrary_lhs (FA)

module Continue (FA : Features.T) = struct
  module FB = struct
    include FA
    include Features.Off.Continue
  end

  include
    Feature_gate.Make (FA) (FB)
      (struct
        module A = FA
        module B = FB
        include Feature_gate.DefaultSubtype

        let continue = reject
        let metadata = make_metadata Continue
      end)
end

module _ (FA : Features.T) : Phase_utils.PHASE = Continue (FA)

module Question_mark (FA : Features.T) = struct
  module FB = struct
    include FA
    include Features.Off.Question_mark
  end

  include
    Feature_gate.Make (FA) (FB)
      (struct
        module A = FA
        module B = FB
        include Feature_gate.DefaultSubtype

        let question_mark = reject
        let metadata = make_metadata QuestionMark
      end)
end

module _ (FA : Features.T) : Phase_utils.PHASE = Question_mark (FA)

module RawOrMutPointer (FA : Features.T) = struct
  module FB = struct
    include FA
    include Features.Off.Raw_pointer
    include Features.Off.Mutable_pointer
  end

  include
    Feature_gate.Make (FA) (FB)
      (struct
        module A = FA
        module B = FB
        include Feature_gate.DefaultSubtype

        let mutable_pointer = reject
        let raw_pointer = reject
        let metadata = make_metadata RawOrMutPointer
      end)
end

module _ (FA : Features.T) : Phase_utils.PHASE = RawOrMutPointer (FA)

module EarlyExit (FA : Features.T) = struct
  module FB = struct
    include FA
    include Features.Off.Early_exit
  end

  include
    Feature_gate.Make (FA) (FB)
      (struct
        module A = FA
        module B = FB
        include Feature_gate.DefaultSubtype

        let early_exit = reject
        let metadata = make_metadata EarlyExit
      end)
end

module As_pattern (FA : Features.T) = struct
  module FB = struct
    include FA
    include Features.Off.As_pattern
  end

  include
    Feature_gate.Make (FA) (FB)
      (struct
        module A = FA
        module B = FB
        include Feature_gate.DefaultSubtype

        let as_pattern = reject
        let metadata = make_metadata AsPattern
      end)
end

module Dyn (FA : Features.T) = struct
  module FB = struct
    include FA
    include Features.Off.Dyn
  end

  include
    Feature_gate.Make (FA) (FB)
      (struct
        module A = FA
        module B = FB
        include Feature_gate.DefaultSubtype

        let dyn = reject
        let metadata = make_metadata Dyn
      end)
end

module Trait_item_default (FA : Features.T) = struct
  module FB = struct
    include FA
    include Features.Off.Trait_item_default
  end

  include
    Feature_gate.Make (FA) (FB)
      (struct
        module A = FA
        module B = FB
        include Feature_gate.DefaultSubtype

        let trait_item_default = reject
        let metadata = make_metadata TraitItemDefault
      end)
end

module Unsafe (FA : Features.T) = struct
  module FB = struct
    include FA
    include Features.Off.Unsafe
  end

  include
    Feature_gate.Make (FA) (FB)
      (struct
        module A = FA
        module B = FB
        include Feature_gate.DefaultSubtype

        let unsafe = reject
        let metadata = make_metadata Unsafe
      end)
end


================================================
FILE: engine/lib/phases/phase_reject_impl_type_method.ml
================================================
open! Prelude

module Make (F : Features.T) =
  Phase_utils.MakeMonomorphicPhase
    (F)
    (struct
      let phase_id = [%auto_phase_name auto]

      open Ast.Make (F)
      module U = Ast_utils.Make (F)
      module Visitors = Ast_visitors.Make (F)

      module Error = Phase_utils.MakeError (struct
        let ctx = Diagnostics.Context.Phase phase_id
      end)

      let reject_anon_assoc_ty =
        object
          inherit [_] Visitors.map as super

          method! visit_ty span t =
            match t with
            | TAssociatedType { item; _ }
              when Concrete_ident.is_anon_assoc_ty item ->
                Error.unimplemented ~issue_id:1965
                  ~details:
                    "`impl` types are not supported in type signatures of \
                     associated items."
                  (Option.value_exn span)
            | _ -> super#visit_ty span t

          method! visit_item _ i =
            try super#visit_item (Some i.span) i
            with Diagnostics.SpanFreeError.Exn (Data (context, kind)) ->
              let error = Diagnostics.pretty_print_context_kind context kind in
              let cast_item : item -> Ast.Full.item = Stdlib.Obj.magic in
              let ast = cast_item i |> Print_rust.pitem_str in
              let msg =
                error ^ "\nLast available AST for this item:\n\n" ^ ast
              in
              make_hax_error_item i.span i.ident msg
        end

      let ditems = List.map ~f:(reject_anon_assoc_ty#visit_item None)
    end)


================================================
FILE: engine/lib/phases/phase_reject_impl_type_method.mli
================================================
(** This phase rejects `impl T` types in trait items *)

module Make : Phase_utils.UNCONSTRAINTED_MONOMORPHIC_PHASE


================================================
FILE: engine/lib/phases/phase_reorder_fields.ml
================================================
(** This phase re-order fields in structs according to the attribute
    [AttrPayload::Order] (if any). *)

open! Prelude

module Make (F : Features.T) =
  Phase_utils.MakeMonomorphicPhase
    (F)
    (struct
      let phase_id = [%auto_phase_name auto]

      open Ast.Make (F)
      module U = Ast_utils.Make (F)
      module M = Ast_builder.Make (F)
      module Visitors = Ast_visitors.Make (F)

      module Error = Phase_utils.MakeError (struct
        let ctx = Diagnostics.Context.Phase phase_id
      end)

      module Attrs = Attr_payloads.MakeBase (Error)

      let order_of_argument = thd3 >> Attrs.order

      let ditems =
        List.map ~f:(fun item ->
            match item.v with
            | Type ({ variants; _ } as o) ->
                let variants =
                  let f (v : variant) =
                    let arguments =
                      List.mapi
                        ~f:(fun i ->
                          order_of_argument >> Option.value ~default:i &&& Fn.id)
                        v.arguments
                      |> List.stable_sort ~compare:(fun (i, _) (j, _) ->
                             Int.compare i j)
                      |> List.map ~f:snd
                    in
                    { v with arguments }
                  in
                  List.map ~f variants
                in
                { item with v = Type { o with variants } }
            | _ -> item)
    end)


================================================
FILE: engine/lib/phases/phase_reorder_fields.mli
================================================
(** This phase re-order fields in structs according to the attribute
    [AttrPayload::Order] (if any). *)

module Make : Phase_utils.UNCONSTRAINTED_MONOMORPHIC_PHASE


================================================
FILE: engine/lib/phases/phase_rewrite_control_flow.ml
================================================
(* This phase rewrites: `if c {return a}; b` as `if c {return a; b} else {b}`
   and does the equivalent transformation for pattern matchings.
   It rewrites the body of loops considering `break` and `continue`
   as `return` to place them in return position. If a loop contains
   a `return` it places it is rewritten inside a pattern matching over the result. *)

open! Prelude

module Make (F : Features.T) =
  Phase_utils.MakeMonomorphicPhase
    (F)
    (struct
      let phase_id = [%auto_phase_name auto]

      open Ast.Make (F)
      module U = Ast_utils.Make (F)
      module M = Ast_builder.Make (F)
      module Visitors = Ast_visitors.Make (F)

      module Error = Phase_utils.MakeError (struct
        let ctx = Diagnostics.Context.Phase phase_id
      end)

      let has_cf =
        object (_self)
          inherit [_] Visitors.reduce as super
          method zero = false
          method plus = ( || )

          method! visit_expr' break_continue e =
            match e with
            | Return _ -> true
            | (Break _ | Continue _) when break_continue -> true
            | _ -> super#visit_expr' break_continue e
        end

      let loop_return_type =
        object (_self)
          inherit [_] Visitors.reduce as super
          method zero = (U.unit_typ, None)
          method plus l r = if [%eq: ty] (fst l) U.unit_typ then r else l

          method! visit_expr' () e =
            match e with
            | Return { e; witness; _ } -> (e.typ, Some witness)
            | _ -> super#visit_expr' () e
        end

      let rewrite_control_flow =
        object (self)
          inherit [_] Visitors.map as super

          method! visit_expr in_loop e =
            let loop_with_return (loop : expr) stmts_after final pat =
              let return_type, witness = loop_return_type#visit_expr () loop in

              let typ =
                U.M.ty_cf ~continue_type:loop.typ ~break_type:return_type
              in
              let loop = { loop with typ } in
              let span = loop.span in
              let id = U.fresh_local_ident_in [] "ret" in
              let module MS = (val U.M.make span) in
              let mk_cf_pat = U.M.pat_Constructor_CF ~span ~typ in
              let return_expr =
                let inner_e = MS.expr_LocalVar ~typ:return_type id in
                match witness with
                | Some witness ->
                    MS.expr_Return ~typ:return_type ~witness ~inner_e
                | None -> inner_e
              in
              let arms =
                [
                  MS.arm
                    (mk_cf_pat `Break (U.make_var_pat id return_type span))
                    return_expr;
                  MS.arm (mk_cf_pat `Continue pat)
                    (U.make_lets stmts_after final |> self#visit_expr in_loop);
                ]
              in
              MS.expr_Match ~scrutinee:loop ~arms ~typ:return_type
            in
            match e.e with
            (* This is supposed to improve performance but it might actually make it worse in some cases *)
            | _ when not (has_cf#visit_expr true e) -> e
            | Loop loop ->
                let return_inside = has_cf#visit_expr false loop.body in
                let new_body = self#visit_expr true loop.body in
                let loop_expr =
                  {
                    e with
                    e =
                      Loop
                        {
                          loop with
                          body = { new_body with typ = loop.body.typ };
                        };
                  }
                in
                if return_inside then
                  let id = U.fresh_local_ident_in [] "loop_res" in
                  let pat = U.make_var_pat id loop_expr.typ loop_expr.span in
                  let module MS = (val U.M.make loop_expr.span) in
                  let final = MS.expr_LocalVar ~typ:loop_expr.typ id in
                  loop_with_return loop_expr [] final pat
                else loop_expr
            | Let _ -> (
                (* Collect let bindings to get the sequence
                   of "statements", find the first "statement" that is a
                   control flow containing a return. Rewrite it.
                *)
                let stmts, final = U.collect_let_bindings e in
                let inline_in_branch branch p stmts_after final =
                  let branch_stmts, branch_final =
                    U.collect_let_bindings branch
                  in
                  let stmts_to_add =
                    match (p, branch_final) with
                    (* This avoids adding `let _ = ()` *)
                    | { p = PWild; _ }, { e = GlobalVar (`TupleCons 0); _ } ->
                        stmts_after
                    (* This avoids adding `let x = x` *)
                    | { p = PBinding { var; _ }; _ }, { e = LocalVar evar; _ }
                      when Local_ident.equal var evar ->
                        stmts_after
                    | stmt -> stmt :: stmts_after
                  in
                  U.make_lets (branch_stmts @ stmts_to_add) final
                in
                let stmts_before, stmt_and_stmts_after =
                  List.split_while stmts ~f:(fun (_, e) ->
                      match e.e with
                      | (If _ | Match _) when has_cf#visit_expr in_loop e ->
                          false
                      | Loop _ when has_cf#visit_expr false e -> false
                      | Return _ | Break _ | Continue _ -> false
                      | _ -> true)
                in
                match stmt_and_stmts_after with
                | (p, ({ e = Loop loop; _ } as rhs)) :: stmts_after ->
                    let new_body = self#visit_expr true loop.body in
                    let loop_expr =
                      {
                        rhs with
                        e =
                          Loop
                            {
                              loop with
                              body = { new_body with typ = loop.body.typ };
                            };
                      }
                    in
                    U.make_lets stmts_before
                      (loop_with_return loop_expr stmts_after final p)
                | (p, ({ e = If { cond; then_; else_ }; _ } as rhs))
                  :: stmts_after ->
                    (* We know there is no "return" in the condition
                       so we must rewrite the if *)
                    let then_ = inline_in_branch then_ p stmts_after final in
                    let else_ =
                      Some
                        (match else_ with
                        | Some else_ ->
                            inline_in_branch else_ p stmts_after final
                        | None -> U.make_lets stmts_after final)
                    in
                    U.make_lets stmts_before
                      { rhs with e = If { cond; then_; else_ } }
                    |> self#visit_expr in_loop
                | (p, ({ e = Match { scrutinee; arms }; _ } as rhs))
                  :: stmts_after ->
                    let arms =
                      List.map arms ~f:(fun arm ->
                          let body =
                            inline_in_branch arm.arm.body p stmts_after final
                          in
                          { arm with arm = { arm.arm with body } })
                    in
                    U.make_lets stmts_before
                      { rhs with e = Match { scrutinee; arms } }
                    |> self#visit_expr in_loop
                (* The statements coming after a "return" are useless. *)
                | (_, ({ e = Return _ | Break _ | Continue _; _ } as rhs)) :: _
                  ->
                    U.make_lets stmts_before rhs |> self#visit_expr in_loop
                | _ ->
                    let stmts =
                      List.map stmts ~f:(fun (p, e) ->
                          (p, self#visit_expr in_loop e))
                    in
                    U.make_lets stmts (self#visit_expr in_loop final))
            | _ -> super#visit_expr in_loop e
        end

      let ditems = List.map ~f:(rewrite_control_flow#visit_item false)
    end)


================================================
FILE: engine/lib/phases/phase_rewrite_control_flow.mli
================================================
(** This phase finds control flow expression (`if` or `match`) with a `return`
    expression in one of the branches. We replace them by replicating what comes
    after in all the branches. This allows the `return` to be eliminated by
    `drop_needless_returns`. This phase should come after
    `phase_local_mutation`. *)

module Make : Phase_utils.UNCONSTRAINTED_MONOMORPHIC_PHASE


================================================
FILE: engine/lib/phases/phase_rewrite_local_self.ml
================================================
open! Prelude

module Make (F : Features.T) =
  Phase_utils.MakeMonomorphicPhase
    (F)
    (struct
      let phase_id = [%auto_phase_name auto]

      open Ast.Make (F)
      module U = Ast_utils.Make (F)

      module Error = Phase_utils.MakeError (struct
        let ctx = Diagnostics.Context.Phase phase_id
      end)

      let ditem i =
        match i.v with
        | Trait ({ items; _ } as t) ->
            let items =
              List.map
                ~f:
                  ((object
                      inherit [_] U.Visitors.map as super

                      method! visit_impl_expr () ie =
                        match super#visit_impl_expr () ie with
                        | { kind = LocalBound { id }; _ }
                          when [%eq: string] id "i0" ->
                            { ie with kind = Self }
                        | ie -> ie
                   end)
                     #visit_trait_item
                     ())
                items
            in
            { i with v = Trait { t with items } }
        | _ -> i

      let ditems = List.map ~f:ditem
    end)


================================================
FILE: engine/lib/phases/phase_rewrite_local_self.mli
================================================
(** Rewrites, in traits and impls, local bounds refereing to `Self` into the
    impl expr of kind `Self`. *)

module Make : Phase_utils.UNCONSTRAINTED_MONOMORPHIC_PHASE


================================================
FILE: engine/lib/phases/phase_simplify_hoisting.ml
================================================
open! Prelude

module Make (F : Features.T) =
  Phase_utils.MakeMonomorphicPhase
    (F)
    (struct
      let phase_id = [%auto_phase_name auto]

      open Ast.Make (F)
      module U = Ast_utils.Make (F)
      module Visitors = Ast_visitors.Make (F)

      module Error = Phase_utils.MakeError (struct
        let ctx = Diagnostics.Context.Phase phase_id
      end)

      let inline_matches =
        object (self)
          inherit [_] Visitors.map as super

          method! visit_expr () e =
            match e with
            | {
             e =
               Let
                 {
                   monadic = None;
                   lhs =
                     {
                       p =
                         PBinding
                           {
                             mut = Immutable;
                             mode = ByValue;
                             var;
                             subpat = None;
                             _;
                           };
                       _;
                     };
                   rhs;
                   body;
                 };
             _;
            }
              when Local_ident.is_side_effect_hoist_var var ->
                let body, count =
                  (object
                     inherit [_] Visitors.mapreduce as super
                     method zero = 0
                     method plus = ( + )

                     method! visit_expr () e =
                       match e.e with
                       | LocalVar v when [%eq: Local_ident.t] v var -> (rhs, 1)
                       | _ -> super#visit_expr () e
                  end)
                    #visit_expr
                    () body
                in
                if [%eq: int] count 1 then self#visit_expr () body
                else super#visit_expr () e
            | _ -> super#visit_expr () e
        end

      let ditems = List.map ~f:(inline_matches#visit_item ())
    end)


================================================
FILE: engine/lib/phases/phase_simplify_hoisting.mli
================================================
(** This phase rewrites `let pat = match ... { ... => ..., ... => return ... }; e`
    into `match ... { ... => let pat = ...; e}`. *)

module Make : Phase_utils.UNCONSTRAINTED_MONOMORPHIC_PHASE


================================================
FILE: engine/lib/phases/phase_simplify_match_return.ml
================================================
open! Prelude

module Make (F : Features.T) =
  Phase_utils.MakeMonomorphicPhase
    (F)
    (struct
      let phase_id = [%auto_phase_name auto]

      open Ast.Make (F)
      module U = Ast_utils.Make (F)
      module Visitors = Ast_visitors.Make (F)

      module Error = Phase_utils.MakeError (struct
        let ctx = Diagnostics.Context.Phase phase_id
      end)

      let inline_matches =
        object
          inherit [_] Visitors.map as super

          method! visit_expr () e =
            match e with
            | {
             e =
               Let
                 {
                   monadic = None;
                   lhs;
                   rhs =
                     {
                       e =
                         Match
                           {
                             scrutinee;
                             arms =
                               [
                                 arm;
                                 ({
                                    arm =
                                      {
                                        body =
                                          {
                                            e = Return _ as return;
                                            span = return_span;
                                            _;
                                          };
                                        guard = None;
                                        _;
                                      };
                                    _;
                                  } as diverging_arm);
                               ];
                           };
                       _;
                     } as match_expr;
                   body;
                 };
             _;
            } ->
                let arm_body = arm.arm.body in
                let arm_pat = arm.arm.arm_pat in
                let arm_pat, let_expr =
                  ((* if the match produces only a variable *)
                   let* var =
                     match arm_body.e with LocalVar v -> Some v | _ -> None
                   in
                   let found = ref false in
                   let arm_pat =
                     (object
                        inherit [_] Visitors.map as super

                        method! visit_pat () p =
                          match p.p with
                          | PBinding b when [%eq: Local_ident.t] b.var var ->
                              found := true;
                              lhs
                          | _ -> super#visit_pat () p
                     end)
                       #visit_pat
                       () arm_pat
                   in
                   let*? _ = !found in
                   Some (arm_pat, body))
                  |> Option.value
                       ~default:
                         ( arm_pat,
                           {
                             e with
                             e =
                               Let { monadic = None; lhs; rhs = arm_body; body };
                           } )
                in
                let arm =
                  { arm with arm = { arm_pat; body = let_expr; guard = None } }
                in
                let diverging_arm =
                  {
                    diverging_arm with
                    arm =
                      {
                        diverging_arm.arm with
                        body = { e = return; span = return_span; typ = e.typ };
                      };
                  }
                in
                let result =
                  let e' = Match { scrutinee; arms = [ arm; diverging_arm ] } in
                  let span = match_expr.span in
                  { span; typ = e.typ; e = e' }
                in
                super#visit_expr () result
            | _ -> super#visit_expr () e
        end

      let ditems = List.map ~f:(inline_matches#visit_item ())
    end)


================================================
FILE: engine/lib/phases/phase_simplify_match_return.mli
================================================
(** This phase rewrites `let pat = match ... { ... => ..., ... => return ... }; e`
    into `match ... { ... => let pat = ...; e}`. *)

module Make : Phase_utils.UNCONSTRAINTED_MONOMORPHIC_PHASE


================================================
FILE: engine/lib/phases/phase_simplify_question_marks.ml
================================================
open! Prelude

module%inlined_contents Make (FA : Features.T) = struct
  open Ast

  module FB = struct
    include FA
    include Features.On.Question_mark
  end

  include
    Phase_utils.MakeBase (FA) (FB)
      (struct
        let phase_id = [%auto_phase_name auto]
      end)

  module Implem : ImplemT.T = struct
    let metadata = metadata

    module UA = Ast_utils.Make (FA)
    module UB = Ast_utils.Make (FB)

    module S = struct
      include Features.SUBTYPE.Id
      include Features.SUBTYPE.On.Question_mark
    end

    module QuestionMarks = struct
      [@@@warning "-9"]

      open A

      (** The types supported for [e] in a [e?] expression *)
      type qm_kind = QMResult of { success : ty; error : ty } | QMOption of ty

      (** Interpret a type [t] as a [qm_kind] *)
      let qm_kind_of_typ span (t : ty) : qm_kind =
        let is_result = Global_ident.eq_name Core__result__Result in
        let is_option = Global_ident.eq_name Core__option__Option in
        match t with
        | TApp { ident; args = [ GType s; GType e ] } when is_result ident ->
            QMResult { success = s; error = e }
        | TApp { ident; args = [ GType s ] } when is_option ident -> QMOption s
        | _ ->
            Error.assertion_failure span
              ("expected something of type Option<_> or Result<_, _>, instead, \
                got: "
              ^ [%show: ty] t)

      (** Expects [impl] to be an impl. expr. for the trait
          `std::ops::FromResidual` for the type [Result<_, _>], and extract its
          parent [From] impl expr *)
      let expect_residual_impl_result (impl : impl_expr) : impl_expr option =
        match impl with
        | {
         kind = ImplApp { args = [ from_impl ]; _ };
         goal =
           {
             trait;
             args =
               [
                 GType (TApp { ident = arg1; _ });
                 GType (TApp { ident = arg2; _ });
               ];
           };
        }
          when Concrete_ident.eq_name Core__ops__try_trait__FromResidual trait
               && Global_ident.eq_name Core__result__Result arg1
               && Global_ident.eq_name Core__result__Result arg2 ->
            Some from_impl
        | _ -> None

      (** Expects [t] to be [Result], and returns [(S, E)] *)
      let expect_result_type (t : ty) : (ty * ty) option =
        match t with
        | TApp { ident; args = [ GType s; GType e ] }
          when Global_ident.eq_name Core__result__Result ident ->
            Some (s, e)
        | _ -> None

      (** Construct [Result] *)
      let make_result_type (success : ty) (error : ty) : ty =
        let ident = Global_ident.of_name ~value:false Core__result__Result in
        TApp { ident; args = [ GType success; GType error ] }

      (** Retype a [Err::<_, E>(x)] literal, as [Err::(x)] *)
      let retype_err_literal (e : expr) (success : ty) (error : ty) =
        match e.e with
        | Construct { constructor; _ }
          when Global_ident.eq_name Core__result__Result__Err constructor ->
            { e with typ = make_result_type success error }
        | _ -> e

      let convert_from (e : expr) (error_dest : ty) impl : expr option =
        let error_src = e.typ in
        let* impl = expect_residual_impl_result impl in
        let*? _ = [%eq: ty] error_src error_dest |> not in
        let from_typ = TArrow ([ error_src ], error_dest) in
        let impl_generic_args = [ GType error_dest; GType error_src ] in
        Some
          (UA.call ~impl_generic_args ~impl Core__convert__From__from [ e ]
             e.span from_typ)

      (** [map_err e error_dest impl] creates the expression [e.map_err(from)]
          with the proper types and impl informations. *)
      let map_err (e : expr) (error_dest : ty) impl : expr option =
        let* success, error_src = expect_result_type e.typ in
        let* impl = expect_residual_impl_result impl in
        let from_typ = TArrow ([ error_src ], error_dest) in
        let from = UA.call ~impl Core__convert__From__from [] e.span from_typ in
        let call =
          UA.call Core__result__Impl__map_err [ e; from ] e.span
            (make_result_type success error_dest)
        in
        Some call

      let mk_pconstruct ~is_struct ~is_record ~span ~typ
          (constructor : Concrete_ident_generated.t)
          (fields : (Concrete_ident_generated.t * pat) list) =
        let constructor = Global_ident.of_name ~value:true constructor in
        let fields =
          List.map
            ~f:(fun (field, pat) ->
              let field = Global_ident.of_name ~value:true field in
              { field; pat })
            fields
        in
        let p = PConstruct { constructor; fields; is_record; is_struct } in
        { p; span; typ }

      (** [extract e] returns [Some (x, ty)] if [e] was a `y?` desugared by
          rustc. `y` is `x` plus possibly a coercion. [ty] is the return type of
          the function. *)
      let extract (e : expr) : expr option =
        let extract_return (e : expr) =
          match e.e with
          | Return
              {
                e =
                  {
                    e =
                      App
                        {
                          f = { e = GlobalVar f };
                          args = [ { e = LocalVar residual_var; _ } ];
                          trait = Some (impl, _);
                        };
                    typ = return_typ;
                    _;
                  };
                witness;
              } ->
              Some (f, residual_var, return_typ, impl, witness)
          | _ -> None
        in
        let extract_pat_app_bd (p : pat) : (global_ident * local_ident) option =
          match p.p with
          | PConstruct
              {
                constructor = name;
                fields =
                  [
                    {
                      pat =
                        {
                          p =
                            PBinding { mut = Immutable; var; subpat = None; _ };
                          _;
                        };
                      _;
                    };
                  ];
                _;
              } ->
              Some (name, var)
          | _ -> None
        in
        match e.e with
        | Match
            {
              scrutinee =
                { e = App { f = { e = GlobalVar n; _ }; args = [ expr ] }; _ };
              arms =
                [
                  { arm = { arm_pat = pat_break; body }; _ };
                  { arm = { arm_pat = pat_continue; body = continue_expr }; _ };
                ];
            }
          when Global_ident.eq_name Core__ops__try_trait__Try__branch n ->
            let* body =
              UA.Expect.concrete_app1 Rust_primitives__hax__never_to_any body
            in
            let* f, residual_var, fun_return_typ, residual_impl, return_witness
                =
              extract_return body
            in
            let* f_break, residual_var' = extract_pat_app_bd pat_break in
            let* f_continue, continue_var' = extract_pat_app_bd pat_continue in
            let continue_expr =
              Option.value
                (UA.Expect.borrow continue_expr)
                ~default:continue_expr
            in
            let continue_expr = UA.unbox_underef_expr continue_expr in
            let* continue_var = UA.Expect.local_var continue_expr in
            let*? _ = [%equal: local_ident] residual_var residual_var' in
            let*? _ = [%equal: local_ident] continue_var continue_var' in
            let*? _ =
              Global_ident.eq_name Core__ops__control_flow__ControlFlow__Break
                f_break
              && Global_ident.eq_name
                   Core__ops__control_flow__ControlFlow__Continue f_continue
              && Global_ident.eq_name
                   Core__ops__try_trait__FromResidual__from_residual f
            in
            let kind = qm_kind_of_typ e.span in
            let span = expr.span in
            let mk_var name : local_ident =
              { name; id = Local_ident.mk_id Expr (-1) }
            in
            let mk_cons =
              mk_pconstruct ~is_struct:false ~is_record:false ~span
                ~typ:expr.typ
            in
            let expr =
              match (kind expr.typ, kind fun_return_typ) with
              | ( QMResult { error = local_err; success = local_success },
                  QMResult { error = return_err; _ } ) ->
                  let var_ok, var_err = (mk_var "ok", mk_var "err") in
                  let arm_ok : A.arm =
                    let pat = UA.make_var_pat var_ok local_success span in
                    let arm_pat =
                      mk_cons Core__result__Result__Ok
                        [ (Core__result__Result__Ok__0, pat) ]
                    in
                    let body =
                      { typ = local_success; e = LocalVar var_ok; span }
                    in
                    { arm = { arm_pat; body; guard = None }; span }
                  in
                  let arm_err =
                    let pat = UA.make_var_pat var_err local_err span in
                    let arm_pat =
                      mk_cons Core__result__Result__Err
                        [ (Core__result__Result__Err__0, pat) ]
                    in
                    let err = { typ = local_err; e = LocalVar var_err; span } in
                    let err =
                      convert_from err return_err residual_impl
                      |> Option.value ~default:err
                    in
                    let err =
                      UA.call_Constructor Core__result__Result__Err false
                        [ err ] e.span fun_return_typ
                    in
                    let e = Return { e = err; witness = return_witness } in
                    let return = { typ = local_success; e; span } in
                    { arm = { arm_pat; body = return; guard = None }; span }
                  in
                  let arms, typ = ([ arm_ok; arm_err ], local_success) in
                  { e = Match { scrutinee = expr; arms }; typ; span }
              | QMOption local_success, QMOption _ ->
                  let var_some = mk_var "some" in
                  let arm_some : A.arm =
                    let pat = UA.make_var_pat var_some local_success span in
                    let arm_pat =
                      mk_cons Core__option__Option__Some
                        [ (Core__option__Option__Some__0, pat) ]
                    in
                    let body =
                      { typ = local_success; e = LocalVar var_some; span }
                    in
                    { arm = { arm_pat; body; guard = None }; span }
                  in
                  let arm_none =
                    let arm_pat = mk_cons Core__option__Option__None [] in
                    let none =
                      UA.call_Constructor Core__option__Option__None false []
                        e.span fun_return_typ
                    in
                    let e = Return { e = none; witness = return_witness } in
                    let return = { typ = local_success; e; span } in
                    { arm = { arm_pat; body = return; guard = None }; span }
                  in
                  let arms, typ = ([ arm_some; arm_none ], local_success) in
                  { e = Match { scrutinee = expr; arms }; typ; span }
              | _ ->
                  Error.assertion_failure e.span
                    "expected expr.typ and fun_return_typ to be both Options \
                     or both Results"
            in
            Some expr
        | _ -> None
    end

    [%%inline_defs dmutability + dsafety_kind]

    let rec dexpr_unwrapped (expr : A.expr) : B.expr =
      QuestionMarks.extract expr |> Option.value ~default:expr
      |> [%inline_body dexpr_unwrapped]
    [@@inline_ands bindings_of dexpr]

    [%%inline_defs "Item.*"]
  end

  include Implem
  module FA = FA
end
[@@add "subtype.ml"]


================================================
FILE: engine/lib/phases/phase_simplify_question_marks.mli
================================================
(** In THIR, there are no construct for question marks. Instead, Rustc desugars
    `e?` into the following:

    {@rust[
      match core::ops::try_trait::branch(y) {
          core::ops::control_flow::Break(residual) => {
              never_to_any(
                  {return core::ops::try_trait::from_residual(residual)},
              )
          }
          core::ops::control_flow::Continue(val) => val,
      })
    ]}

    This phase does the opposite rewrite.

    While `e?` in Rust might implies an implicit coercion, in our AST, a
    question mark is expected to already be of the right type. This phase
    inlines a coercion (of the shape `x.map_err(from)`, in the case of a
    `Result`). *)

open! Prelude

(** This phase can be applied to any feature set. *)
module Make (F : Features.T) : sig
  include module type of struct
    module FA = F

    (** This phase outputs an AST with question marks. *)
    module FB = struct
      include F
      include Features.On.Question_mark
    end

    module A = Ast.Make (F)
    module B = Ast.Make (FB)
    module ImplemT = Phase_utils.MakePhaseImplemT (A) (B)
  end

  include ImplemT.T
end


================================================
FILE: engine/lib/phases/phase_sort_items.ml
================================================
open! Prelude

module Make (F : Features.T) =
  Phase_utils.MakeMonomorphicPhase
    (F)
    (struct
      let phase_id = [%auto_phase_name auto]

      module A = Ast.Make (F)

      module Error = Phase_utils.MakeError (struct
        let ctx = Diagnostics.Context.Phase phase_id
      end)

      module Attrs = Attr_payloads.MakeBase (Error)

      let ditems items =
        let module Deps = Dependencies.Make (F) in
        Deps.sort items
    end)


================================================
FILE: engine/lib/phases/phase_sort_items.mli
================================================
(** This phase sorts items so that each item comes after the items it depends
    on. This is done by sorting namespaces with the same property, and then
    sorting items within each namespace, trying as much as possible to respect
    the original order. *)

module Make : Phase_utils.UNCONSTRAINTED_MONOMORPHIC_PHASE


================================================
FILE: engine/lib/phases/phase_sort_items_namespace_wise.ml
================================================
open! Prelude

module Make (F : Features.T) =
  Phase_utils.MakeMonomorphicPhase
    (F)
    (struct
      let phase_id = [%auto_phase_name auto]

      module A = Ast.Make (F)

      module Error = Phase_utils.MakeError (struct
        let ctx = Diagnostics.Context.Phase phase_id
      end)

      module Attrs = Attr_payloads.MakeBase (Error)

      let ditems items =
        let module Deps = Dependencies.Make (F) in
        Deps.sort_namespace_wise items
    end)


================================================
FILE: engine/lib/phases/phase_sort_items_namespace_wise.mli
================================================
(** This phase sorts items so that each item comes after the items it depends
    on. This is done by sorting namespaces with the same property, and then
    sorting items within each namespace, trying as much as possible to respect
    the original order. *)

module Make : Phase_utils.UNCONSTRAINTED_MONOMORPHIC_PHASE


================================================
FILE: engine/lib/phases/phase_specialize.ml
================================================
open! Prelude

module Make (F : Features.T) =
  Phase_utils.MakeMonomorphicPhase
    (F)
    (struct
      let phase_id = [%auto_phase_name auto]

      module A = Ast.Make (F)
      module FB = F
      module B = Ast.Make (F)
      module U = Ast_utils.Make (F)
      module Visitors = Ast_visitors.Make (F)
      open A

      open struct
        open Concrete_ident_generated

        module FnReplace = struct
          type t =
            span:Span.t ->
            typ:ty ->
            f:expr ->
            args:expr list ->
            generic_args:generic_value list ->
            bounds_impls:impl_expr list ->
            trait:(impl_expr * generic_value list) option ->
            expr

          (** Retype a function application: this concretize the types, using
              concrete types from arguments. *)
          let retype (fn : t) : t =
           fun ~span ~typ ~f ~args ~generic_args ~bounds_impls ~trait ->
            let f =
              let typ =
                if List.is_empty args then f.typ
                else TArrow (List.map ~f:(fun e -> e.typ) args, typ)
              in
              { f with typ }
            in
            fn ~span ~typ ~f ~args ~generic_args ~bounds_impls ~trait

          (** Gets rid of trait and impl informations. *)
          let remove_traits (fn : t) : t =
           fun ~span ~typ ~f ~args ~generic_args:_ ~bounds_impls:_ ~trait:_ ->
            fn ~span ~typ ~f ~args ~generic_args:[] ~bounds_impls:[] ~trait:None

          (** Monomorphize a function call: this removes any impl references,
              and concretize types. *)
          let monorphic (fn : t) : t = remove_traits (retype fn)

          let name name : t =
           fun ~span ~typ ~f ~args ~generic_args ~bounds_impls ~trait ->
            let name = Ast.Global_ident.of_name ~value:true name in
            let f = { f with e = GlobalVar name } in
            let e = App { args; f; generic_args; bounds_impls; trait } in
            { typ; span; e }

          let and_then (f1 : t) (f2 : expr -> expr) : t =
           fun ~span ~typ ~f ~args ~generic_args ~bounds_impls ~trait ->
            f1 ~span ~typ ~f ~args ~generic_args ~bounds_impls ~trait |> f2

          let map_args (fn : int -> expr -> expr) : t -> t =
           fun g ~span ~typ ~f ~args ~generic_args ~bounds_impls ~trait ->
            let args = List.mapi ~f:fn args in
            g ~span ~typ ~f ~args ~generic_args ~bounds_impls ~trait
        end

        type pattern = {
          fn : t;
          fn_replace : FnReplace.t;
          args : (expr -> bool) list;
          ret : ty -> bool;
        }
        (** A pattern that helps matching against function applications *)

        type ('a, 'b) predicate = 'a -> 'b option
        (** Instead of working directly with boolean predicate, we work with `_
            -> _ option` so that we can chain them *)

        (** Constructs a predicate out of predicates and names *)
        let mk' (args : ('a, 'b) predicate list) (ret : ('c, 'd) predicate)
            (fn : t) (fn_replace : FnReplace.t) : pattern =
          let args = List.map ~f:(fun p x -> p x |> Option.is_some) args in
          let ret t = ret t |> Option.is_some in
          { fn; fn_replace; args; ret }

        let mk (args : ('a, 'b) predicate list) (ret : ('c, 'd) predicate)
            (fn : t) (fn_replace : t) : pattern =
          mk' args ret fn (FnReplace.name fn_replace |> FnReplace.monorphic)

        open struct
          let etyp (e : expr) : ty = e.typ
          let tref = function TRef { typ; _ } -> Some typ | _ -> None

          let tapp0 = function
            | TApp { ident; args = [] } -> Some ident
            | _ -> None

          let ( >>& ) (f1 : ('a, 'b) predicate) (f2 : ('b, 'c) predicate) :
              ('a, 'c) predicate =
           fun x -> Option.bind (f1 x) ~f:f2

          let eq : 'a 'b. eq:('a -> 'b -> bool) -> 'a -> ('b, 'b) predicate =
           fun ~eq x x' -> if eq x x' then Some x' else None

          let eq_global_ident :
              t -> (Ast.Global_ident.t, Ast.Global_ident.t) predicate =
            eq ~eq:Ast.Global_ident.eq_name

          let erase : 'a. ('a, unit) predicate = fun _ -> Some ()

          let ( ||. ) (type a b) (f : (a, b) predicate) (g : (a, b) predicate) :
              (a, b) predicate =
           fun x ->
            match (f x, g x) with Some a, _ | _, Some a -> Some a | _ -> None

          let is_int : (ty, unit) predicate =
            tapp0 >>& eq_global_ident Hax_lib__int__Int >>& erase

          let is_machine_int : (ty, unit) predicate =
           fun t ->
            match t with
            | TInt _
            | TRef { typ = TInt _; _ }
            | TRef { typ = TRef { typ = TInt _; _ }; _ } ->
                Some ()
            | _ -> None

          let is_prop : (ty, unit) predicate =
            tapp0 >>& eq_global_ident Hax_lib__prop__Prop >>& erase

          let is_bool : (ty, unit) predicate = function
            | TBool
            | TRef { typ = TBool; _ }
            | TRef { typ = TRef { typ = TBool; _ }; _ } ->
                Some ()
            | _ -> None

          let any _ = Some ()
          let int_any = mk [ etyp >> is_int ] any
          let int_int_any = mk [ etyp >> is_int; etyp >> is_int ] any
          let any_int = mk [ any ] is_int
          let rint_any = mk [ etyp >> (tref >>& is_int) ] any

          let rint_rint_any =
            mk [ etyp >> (tref >>& is_int); etyp >> (tref >>& is_int) ] any

          let any_rint = mk [ any ] (tref >>& is_int)

          let mint_mint_any =
            mk [ etyp >> is_machine_int; etyp >> is_machine_int ] any

          let mint_any = mk [ etyp >> is_machine_int ] any
          let bool_prop = mk [ etyp >> is_bool ] is_prop
          let prop_bool = mk [ etyp >> is_prop ] is_bool

          let arrow : (ty, ty list) predicate = function
            | TArrow (ts, t) -> Some (ts @ [ t ])
            | _ -> None

          let a_to_b a b : _ predicate =
            arrow >> fun x ->
            let* t, u =
              match x with Some [ a; b ] -> Some (a, b) | _ -> None
            in
            let* a = a t in
            let* b = b u in
            Some (a, b)
        end

        let int_replacements =
          [
            mint_mint_any Core__ops__arith__Add__add
              Rust_primitives__hax__machine_int__add;
            mint_mint_any Core__ops__arith__Sub__sub
              Rust_primitives__hax__machine_int__sub;
            mint_mint_any Core__ops__arith__Mul__mul
              Rust_primitives__hax__machine_int__mul;
            mint_mint_any Core__ops__arith__Div__div
              Rust_primitives__hax__machine_int__div;
            mint_mint_any Core__ops__arith__Rem__rem
              Rust_primitives__hax__machine_int__rem;
            mint_mint_any Core__ops__bit__Shl__shl
              Rust_primitives__hax__machine_int__shl;
            mint_mint_any Core__ops__bit__Shr__shr
              Rust_primitives__hax__machine_int__shr;
            mint_mint_any Core__ops__bit__BitXor__bitxor
              Rust_primitives__hax__machine_int__bitxor;
            mint_mint_any Core__ops__bit__BitAnd__bitand
              Rust_primitives__hax__machine_int__bitand;
            mint_mint_any Core__ops__bit__BitOr__bitor
              Rust_primitives__hax__machine_int__bitor;
            mint_any Core__ops__bit__Not__not
              Rust_primitives__hax__machine_int__not;
            mint_mint_any Core__cmp__PartialOrd__gt
              Rust_primitives__hax__machine_int__gt;
            mint_mint_any Core__cmp__PartialOrd__ge
              Rust_primitives__hax__machine_int__ge;
            mint_mint_any Core__cmp__PartialOrd__lt
              Rust_primitives__hax__machine_int__lt;
            mint_mint_any Core__cmp__PartialOrd__le
              Rust_primitives__hax__machine_int__le;
            mint_mint_any Core__cmp__PartialEq__ne
              Rust_primitives__hax__machine_int__ne;
            mint_mint_any Core__cmp__PartialEq__eq
              Rust_primitives__hax__machine_int__eq;
            mint_any Core__ops__arith__Neg__neg Rust_primitives__arithmetic__neg;
            int_int_any Core__ops__arith__Add__add
              Rust_primitives__hax__int__add;
            int_int_any Core__ops__arith__Sub__sub
              Rust_primitives__hax__int__sub;
            int_int_any Core__ops__arith__Mul__mul
              Rust_primitives__hax__int__mul;
            int_int_any Core__ops__arith__Div__div
              Rust_primitives__hax__int__div;
            int_int_any Core__ops__arith__Rem__rem
              Rust_primitives__hax__int__rem;
            int_any Core__ops__arith__Neg__neg Rust_primitives__hax__int__neg;
            rint_rint_any Core__cmp__PartialOrd__gt
              Rust_primitives__hax__int__gt;
            rint_rint_any Core__cmp__PartialOrd__ge
              Rust_primitives__hax__int__ge;
            rint_rint_any Core__cmp__PartialOrd__lt
              Rust_primitives__hax__int__lt;
            rint_rint_any Core__cmp__PartialOrd__le
              Rust_primitives__hax__int__le;
            rint_rint_any Core__cmp__PartialEq__ne Rust_primitives__hax__int__ne;
            rint_rint_any Core__cmp__PartialEq__eq Rust_primitives__hax__int__eq;
            any_int Hax_lib__abstraction__Abstraction__lift
              Rust_primitives__hax__int__from_machine;
            any_int Hax_lib__int__ToInt__to_int
              Rust_primitives__hax__int__from_machine;
            int_any Hax_lib__abstraction__Concretization__concretize
              Rust_primitives__hax__int__into_machine;
          ]

        let prop_replacements =
          let name_from_bool = Hax_lib__prop__constructors__from_bool in
          let prop_type =
            let ident =
              Ast.Global_ident.of_name ~value:false Hax_lib__prop__Prop
            in
            TApp { ident; args = [] }
          in
          let bool_prop__from_bool f = bool_prop f name_from_bool in
          let poly n f g =
            let args =
              let prop_or_bool = is_bool ||. is_prop in
              List.init n ~f:(fun _ ->
                  etyp
                  >> (prop_or_bool
                     ||. (a_to_b prop_or_bool prop_or_bool >> erase)))
            in
            let promote_bool (e : A.expr) =
              match e.typ with
              | TBool -> U.call name_from_bool [ e ] e.span prop_type
              | _ -> e
            in
            mk' args is_prop f
              (FnReplace.map_args
                 (fun _ e ->
                   let e = promote_bool e in
                   match e.e with
                   | Closure { params; body; captures } ->
                       let body = promote_bool body in
                       { e with e = Closure { params; body; captures } }
                   | _ -> e)
                 (FnReplace.name g |> FnReplace.monorphic))
          in
          [
            bool_prop__from_bool Hax_lib__abstraction__Abstraction__lift;
            bool_prop__from_bool Hax_lib__prop__ToProp__to_prop;
            bool_prop__from_bool Core__convert__Into__into;
            bool_prop__from_bool Core__convert__From__from;
            (* Transform inherent methods on Prop *)
            poly 2 Hax_lib__prop__Impl__and Hax_lib__prop__constructors__and;
            poly 2 Hax_lib__prop__Impl__or Hax_lib__prop__constructors__or;
            poly 1 Hax_lib__prop__Impl__not Hax_lib__prop__constructors__not;
            poly 2 Hax_lib__prop__Impl__eq Hax_lib__prop__constructors__eq;
            poly 2 Hax_lib__prop__Impl__ne Hax_lib__prop__constructors__ne;
            poly 2 Hax_lib__prop__Impl__implies
              Hax_lib__prop__constructors__implies;
            (* Transform standalone functions in `prop` *)
            poly 2 Hax_lib__prop__implies Hax_lib__prop__constructors__implies;
            poly 1 Hax_lib__prop__forall Hax_lib__prop__constructors__forall;
            poly 1 Hax_lib__prop__exists Hax_lib__prop__constructors__exists;
            (* Transform core `&`, `|`, `!` on `Prop` *)
            poly 2 Core__ops__bit__BitAnd__bitand
              Hax_lib__prop__constructors__and;
            poly 2 Core__ops__bit__BitOr__bitor Hax_lib__prop__constructors__or;
            poly 1 Core__ops__bit__Not__not Hax_lib__prop__constructors__not;
          ]

        let replacements = List.concat [ int_replacements; prop_replacements ]
      end

      module Error = Phase_utils.MakeError (struct
        let ctx = Diagnostics.Context.Phase phase_id
      end)

      module Attrs = Attr_payloads.Make (F) (Error)

      (** Drop `from` or `into` when they are of type `T -> T`, for any `T`. *)
      let remove_from_into_identity =
        object
          inherit [_] Visitors.map as super

          method! visit_expr () e =
            let e =
              match e.e with
              | App { f = { e = GlobalVar f; _ }; args = [ x ]; _ }
                when [%equal: ty] e.typ x.typ
                     && (Ast.Global_ident.eq_name Core__convert__Into__into f
                        || Ast.Global_ident.eq_name Core__convert__From__from f
                        ) ->
                  x
              | _ -> e
            in
            super#visit_expr () e
        end

      let visitor =
        object (self)
          inherit [_] Visitors.map as super

          method! visit_expr () e =
            match e.e with
            | App
                {
                  f = { e = GlobalVar f; _ } as f';
                  args = l;
                  trait;
                  generic_args;
                  bounds_impls;
                } -> (
                let l = List.map ~f:(self#visit_expr ()) l in
                let matching =
                  List.filter
                    (List.mapi ~f:(fun i x -> (i, x)) replacements)
                    ~f:(fun (_, { fn; args; ret; fn_replace = _ }) ->
                      Ast.Global_ident.eq_name fn f
                      && ret e.typ
                      &&
                      match List.for_all2 args l ~f:apply with
                      | Ok r -> r
                      | _ -> false)
                in
                match matching with
                | [ (_, { fn_replace; _ }) ] ->
                    let e =
                      fn_replace ~args:l ~typ:e.typ ~span:e.span ~generic_args
                        ~bounds_impls ~trait ~f:f'
                    in
                    self#visit_expr () e
                | [] -> (
                    (* In this case we need to avoid recursing again through the arguments *)
                    let visited =
                      let args = [] in
                      let e' =
                        App { f = f'; args; trait; generic_args; bounds_impls }
                      in
                      super#visit_expr () { e with e = e' }
                    in
                    match visited.e with
                    | App { f; trait; generic_args; bounds_impls; _ } ->
                        {
                          visited with
                          e =
                            App
                              { f; args = l; trait; generic_args; bounds_impls };
                        }
                    | _ -> super#visit_expr () e)
                | r ->
                    let msg =
                      "Found multiple matching patterns: "
                      ^ [%show: int list] (List.map ~f:fst r)
                    in
                    Stdio.prerr_endline msg;
                    U.Debug.expr e;
                    Error.assertion_failure e.span msg)
            | _ -> super#visit_expr () e
        end

      let ditems (l : A.item list) : B.item list =
        List.map
          ~f:(visitor#visit_item () >> remove_from_into_identity#visit_item ())
          l
    end)


================================================
FILE: engine/lib/phases/phase_specialize.mli
================================================
(** This phase specializes certain specific method applications (according to
    their name and the type it is being used on) into plain functions.

    This is useful espcially for math integers: the methods of the traits `Add`,
    `Sub`, `Mul` etc. are mapped to "primitive" functions in backends (e.g.
    Prims.whatever in FStar). *)

module Make : Phase_utils.UNCONSTRAINTED_MONOMORPHIC_PHASE


================================================
FILE: engine/lib/phases/phase_traits_specs.ml
================================================
open! Prelude

module Make (F : Features.T) =
  Phase_utils.MakeMonomorphicPhase
    (F)
    (struct
      let phase_id = [%auto_phase_name auto]

      module A = Ast.Make (F)
      module FB = F
      module B = Ast.Make (F)
      module U = Ast_utils.Make (F)
      module BVisitors = Ast_visitors.Make (F)
      open A

      module Error = Phase_utils.MakeError (struct
        let ctx = Diagnostics.Context.Phase phase_id
      end)

      let mk_name ident kind = Concrete_ident.with_suffix kind ident

      module Attrs = Attr_payloads.Make (F) (Error)

      let ditems (l : A.item list) : B.item list =
        let (module Attrs) = Attrs.with_items l in
        let f' (item : item) : item =
          let v =
            match item.v with
            | Trait { name; generics; items; safety } ->
                let f attrs (item : trait_item) =
                  let mk role kind =
                    let ti_ident = mk_name item.ti_ident kind in
                    {
                      item with
                      ti_ident;
                      ti_attrs =
                        [
                          Attr_payloads.to_attr TraitMethodNoPrePost
                            item.ti_span;
                        ]
                        @ (List.filter
                             ~f:
                               [%matches?
                                 Types.AssociatedItem { role = role'; _ }, _ when 
                                 [%eq: Types.ha_assoc_role] role role']
                             attrs
                          |> List.map ~f:(uncurry Attr_payloads.to_attr));
                    }
                  in
                  match item.ti_v with
                  | TIFn (TArrow (inputs, output)) ->
                      [
                        {
                          (mk Types.Requires `Pre) with
                          ti_v = TIFn (TArrow (inputs, TBool));
                        };
                        {
                          (mk Types.Ensures `Post) with
                          ti_v = TIFn (TArrow (inputs @ [ output ], TBool));
                        };
                      ]
                  | TIFn _ -> [ (* REFINEMENTS FOR CONSTANTS? *) ]
                  | TIType _ -> [ (* TODO REFINEMENTS FOR TYPES *) ]
                  | TIDefault _ -> [ (* TODO REFINEMENTS FOR DEFAULT ITEMS *) ]
                in
                let items =
                  List.concat_map
                    ~f:(fun item ->
                      let attrs = Attr_payloads.payloads item.ti_attrs in
                      let ti_attrs =
                        attrs
                        |> List.filter
                             ~f:
                               (fst
                               >> [%matches?
                                    Types.AssociatedItem
                                      { role = Ensures | Requires; _ }]
                               >> not)
                        |> List.map ~f:(uncurry Attr_payloads.to_attr)
                      in
                      f attrs item @ [ { item with ti_attrs } ])
                    items
                in
                Trait { name; generics; items; safety }
            | Impl { generics; self_ty; of_trait; items; parent_bounds; safety }
              ->
                let f (item : impl_item) =
                  let mk kind =
                    let ii_ident = mk_name item.ii_ident kind in
                    { item with ii_ident }
                  in
                  let default =
                    {
                      e = Literal (Bool true);
                      span = item.ii_span;
                      typ = TBool;
                    }
                  in
                  match item.ii_v with
                  | IIFn { params = []; _ } -> []
                  | IIFn { body; params } ->
                      (* We always need to produce a pre and a post
                         condition implementation for each method in
                         the impl. *)
                      [
                        (let params, body =
                           match Attrs.associated_fn Requires item.ii_attrs with
                           | Some (_, params, body) -> (params, body)
                           | None -> (params, default)
                         in
                         { (mk `Pre) with ii_v = IIFn { body; params } });
                        (let params, body =
                           match Attrs.associated_fn Ensures item.ii_attrs with
                           | Some (_, params, body) -> (params, body)
                           | None ->
                               (* There is no explicit post-condition
                                  on this method. We need to define a
                                  trivial one. *)
                               (* Post-condition *always* an extra
                                  argument in final position for the
                                  output. *)
                               let out_ident =
                                 U.fresh_local_ident_in
                                   (U.Reducers.collect_local_idents
                                      #visit_impl_item () item
                                   |> Set.to_list)
                                   "out"
                               in
                               let pat =
                                 U.make_var_pat out_ident body.typ body.span
                               in
                               let typ = body.typ in
                               let out =
                                 { pat; typ; typ_span = None; attrs = [] }
                               in
                               (params @ [ out ], default)
                         in
                         { (mk `Post) with ii_v = IIFn { body; params } });
                      ]
                  | IIType _ -> []
                in
                let items =
                  List.concat_map ~f:(fun item -> f item @ [ item ]) items
                in
                Impl
                  { generics; self_ty; of_trait; items; parent_bounds; safety }
            | v -> v
          in
          { item with v }
        in
        let f item =
          try f' item
          with Diagnostics.SpanFreeError.Exn (Data (context, kind)) ->
            let error = Diagnostics.pretty_print_context_kind context kind in
            let msg = error in
            B.make_hax_error_item item.span item.ident msg
        in
        List.map ~f l
    end)


================================================
FILE: engine/lib/phases/phase_traits_specs.mli
================================================
(** This phase adds specification to traits. For each method `f` in a trait, we
    add a `f_pre` and a `f_post`. *)

module Make : Phase_utils.UNCONSTRAINTED_MONOMORPHIC_PHASE


================================================
FILE: engine/lib/phases/phase_transform_hax_lib_inline.ml
================================================
open! Prelude
open! Ast

module%inlined_contents Make (F : Features.T) = struct
  module FA = F

  module FB = struct
    include F
    include Features.On.Quote
  end

  include
    Phase_utils.MakeBase (F) (FB)
      (struct
        let phase_id = [%auto_phase_name auto]
      end)

  module Implem : ImplemT.T = struct
    let metadata = metadata

    module UA = Ast_utils.Make (F)
    module UB = Ast_utils.Make (FB)
    module Visitors = Ast_visitors.Make (FB)
    module Attrs = Attr_payloads.Make (F) (Error)

    module S = struct
      module A = FA
      module B = FB
      include Features.SUBTYPE.Id

      let quote _ _ = Features.On.quote
    end

    [%%inline_defs dmutability + dsafety_kind]

    (** Patterns are "stored" in a [match None { Some  => (), _ => () }]
        dummy expression. *)
    let extract_pattern (e : B.expr) : B.pat option =
      match e.e with
      | Block
          {
            e =
              {
                e =
                  Match
                    {
                      arms =
                        [
                          {
                            arm =
                              {
                                arm_pat =
                                  { p = PConstruct { fields = [ arg ]; _ }; _ };
                                _;
                              };
                            _;
                          };
                          _;
                        ];
                      _;
                    };
                _;
              };
            _;
          } ->
          Some arg.pat
      | _ -> None

    (** Extracts the first global_ident found in a pattern *)
    let first_global_ident (pat : B.pat) : global_ident option =
      UB.Reducers.collect_global_idents#visit_pat () pat |> Set.choose

    let counter = ref 0

    let rec dexpr' span (expr : A.expr') : B.expr' =
      quote_of_expr' span expr
      |> Option.map ~f:(fun quote : B.expr' -> B.Quote quote)
      |> Option.value_or_thunk ~default:(fun _ ->
             [%inline_body dexpr'] span expr)

    and quote_of_expr (expr : A.expr) = quote_of_expr' expr.span expr.e

    and quote_of_expr' span (expr : A.expr') =
      match expr with
      | App { f = { e = GlobalVar f; _ }; args = [ payload ]; _ }
        when Global_ident.eq_name Hax_lib__inline f
             || Global_ident.eq_name Hax_lib__inline_unsafe f ->
          let bindings, str = dexpr payload |> UB.collect_let_bindings in
          let str =
            match
              UB.Expect.(block >> Option.bind ~f:borrow >> Option.bind ~f:deref)
                str
            with
            | Some { e = Literal (String str); _ } -> str
            | _ ->
                Error.assertion_failure span
                  "Malformed call to 'inline': cannot find string payload."
          in
          let code : B.quote_content list =
            List.map bindings ~f:(fun (pat, e) ->
                match
                  UB.Expect.pbinding_simple pat
                  |> Option.map ~f:(fun ((i, _) : Local_ident.t * _) -> i.name)
                with
                | Some "_constructor" ->
                    let id =
                      extract_pattern e
                      |> Option.bind ~f:first_global_ident
                      |> Option.value_or_thunk ~default:(fun _ ->
                             Error.assertion_failure span
                               "Could not extract pattern (case constructor): \
                                this may be a bug in the quote macros in \
                                hax-lib.")
                    in
                    B.Expr { e with e = GlobalVar id }
                | Some "_pat" ->
                    let pat =
                      extract_pattern e
                      |> Option.value_or_thunk ~default:(fun _ ->
                             Error.assertion_failure span
                               "Could not extract pattern (case pat): this may \
                                be a bug in the quote macros in hax-lib.")
                    in
                    Pattern pat
                | Some "_ty" ->
                    let typ =
                      match pat.typ with
                      | TApp { args = [ GType typ ]; _ } -> typ
                      | _ ->
                          Stdio.prerr_endline @@ "-pat->" ^ [%show: B.pat] pat;
                          Stdio.prerr_endline @@ "-expr->"
                          ^ [%show: B.expr'] e.e;
                          Error.assertion_failure span
                            "Malformed call to 'inline': expected type \
                             `Option<_>`."
                    in
                    Typ typ
                | _ -> Expr e)
          in
          let verbatim = split_str ~on:"SPLIT_QUOTE" str in
          let contents =
            let rec f verbatim (code : B.quote_content list) =
              match (verbatim, code) with
              | s :: s', code :: code' -> B.Verbatim s :: code :: f s' code'
              | [ s ], [] -> [ Verbatim s ]
              | [], [] -> []
              | _ ->
                  Error.assertion_failure span
                  @@ "Malformed call to 'inline'." ^ "\nverbatim="
                  ^ [%show: string list] verbatim
                  ^ "\ncode="
                  ^ [%show: B.quote_content list] code
            in
            f verbatim code
          in
          Some { contents; witness = Features.On.quote }
      | _ -> None
    [@@inline_ands bindings_of dexpr - dexpr']

    [%%inline_defs "Item.*" - ditems]

    let ditems items =
      let find_parent_item :
          Attr_payloads.UId.t -> (Attr_payloads.AssocRole.t * A.item) option =
        List.concat_map
          ~f:(fun (item : A.item) ->
            Attrs.raw_associated_item item.attrs
            |> List.map ~f:(fun (role, child_uid) -> (child_uid, (role, item))))
          items
        |> Map.of_alist_exn (module Attr_payloads.UId)
        |> Map.find
      in
      (* If [item] can be interpreted as a quote, return a `Quote` item *)
      let item_as_quote (item : A.item) =
        let* body =
          match item.v with
          | Fn { body = { e = Block { e; _ }; _ }; _ } -> Some e
          | _ -> None
        in
        let* uid = Attrs.uid item.attrs in
        let* role, parent = find_parent_item uid in
        let*? () = [%equal: Attr_payloads.AssocRole.t] ItemQuote role in
        let replace = Attrs.late_skip parent.attrs in
        let* role =
          Attrs.find_unique_attr
            ~f:(function ItemQuote q -> Some q | _ -> None)
            item.attrs
        in
        let origin : item_quote_origin =
          {
            item_kind = UA.kind_of_item parent;
            item_ident = parent.ident;
            position =
              (if replace then `Replace
               else
                 match role.position with After -> `After | Before -> `Before);
          }
        in
        let quote =
          quote_of_expr body
          |> Option.value_or_thunk ~default:(fun _ ->
                 Error.assertion_failure item.span
                 @@ "Malformed `Quote` item: `quote_of_expr` failed. \
                     Expression was:\n"
                 ^ [%show: A.expr] body)
        in
        let attrs =
          let is_late_skip =
            [%matches? Types.ItemStatus (Included { late_skip = true })]
          in
          item.attrs |> Attr_payloads.payloads
          |> List.filter ~f:(fst >> is_late_skip >> not)
          |> List.map ~f:(fun (v, span) -> Attr_payloads.to_attr v span)
        in
        let A.{ span; ident; _ } = item in
        Some B.{ v = Quote { quote; origin }; span; ident; attrs }
      in
      (* Wraps [item_as_quote] to handle exns and fallback to the original item if the item is not a quote. *)
      let f i =
        try
          item_as_quote i
          |> Option.map ~f:(fun i -> [ i ])
          |> Option.value ~default:(ditem i)
        with Diagnostics.SpanFreeError.Exn (Data (context, kind)) ->
          let error = Diagnostics.pretty_print_context_kind context kind in
          let cast_item : A.item -> Ast.Full.item = Stdlib.Obj.magic in
          let ast = cast_item i |> Print_rust.pitem_str in
          let msg = error ^ "\nLast available AST for this item:\n\n" ^ ast in
          [ B.make_hax_error_item i.span i.ident msg ]
      in
      List.concat_map ~f items
  end

  include Implem
end
[@@add "subtype.ml"]


================================================
FILE: engine/lib/phases/phase_transform_hax_lib_inline.mli
================================================
(** This phase transforms nodes like:
    {@rust[
      hax_lib::inline({
        let _KIND = ...;
        ...
        let _KIND = ...;
        "payload"
      })
    ]}

    into [hax_lib::inline("payload'")] where [payload'] is a string with all the
    binding names substituted.

    Note: above `_KIND` can be `_expr`, `_pat` or `_constructor`. *)

module Make (F : Features.T) : sig
  include module type of struct
    module FB = struct
      include F
      include Features.On.Quote
    end

    module A = Ast.Make (F)
    module B = Ast.Make (FB)
    module ImplemT = Phase_utils.MakePhaseImplemT (A) (B)
    module FA = F
  end

  include ImplemT.T
end


================================================
FILE: engine/lib/phases/phase_trivialize_assign_lhs.ml
================================================
open! Prelude

module%inlined_contents Make (F : Features.T) = struct
  open Ast
  module FA = F

  module FB = struct
    include F
    include Features.Off.Nontrivial_lhs
    include Features.On.Construct_base
    include Features.On.Slice
  end

  include
    Phase_utils.MakeBase (F) (FB)
      (struct
        let phase_id = [%auto_phase_name auto]
      end)

  module Implem : ImplemT.T = struct
    let metadata = metadata

    module S = struct
      include Features.SUBTYPE.Id
      include Features.SUBTYPE.On.Construct_base
      include Features.SUBTYPE.On.Slice
    end

    module UA = Ast_utils.Make (F)
    module UB = Ast_utils.Make (FB)

    [%%inline_defs dmutability + dsafety_kind]

    let rec updater_of_lhs (lhs : A.lhs) (rhs : B.expr) (span : span) :
        (Local_ident.t * B.ty) * B.expr =
      match lhs with
      | LhsLocalVar { var; typ } -> ((var, dty span typ), rhs)
      | LhsVecRef { e; _ } -> updater_of_lhs e rhs span
      | LhsFieldAccessor { e; field; _ } -> (
          let lhs = UA.expr_of_lhs span e |> dexpr in
          match lhs.typ with
          | TApp { ident; _ } ->
              let rhs =
                UB.M.expr_Construct ~constructor:ident
                  ~is_record:true (* TODO: might not be, actually *)
                  ~is_struct:true
                  ~fields:[ (field, rhs) ]
                  ~base:(Some (lhs, Features.On.construct_base))
                  ~span ~typ:lhs.typ
              in
              updater_of_lhs e rhs span
          | _ -> Error.raise { kind = ArbitraryLHS; span })
      | LhsArrayAccessor { e; typ = _; index; _ } ->
          let lhs = UA.expr_of_lhs span e |> dexpr in
          let update_at : Concrete_ident.name =
            let is_array_slice_or_vec =
              match lhs.typ with
              | TSlice _ | TArray _ -> true
              | TApp { ident; _ } -> Global_ident.eq_name Alloc__vec__Vec ident
              | _ -> false
            in
            if is_array_slice_or_vec then
              let index_typ =
                match index.typ with TRef { typ; _ } -> typ | _ -> index.typ
              in
              match index_typ with
              | TInt { size = SSize; signedness = Unsigned } ->
                  Rust_primitives__hax__monomorphized_update_at__update_at_usize
              | TApp { ident; _ }
                when Global_ident.eq_name Core__ops__range__Range ident ->
                  Rust_primitives__hax__monomorphized_update_at__update_at_range
              | TApp { ident; _ }
                when Global_ident.eq_name Core__ops__range__RangeFrom ident ->
                  Rust_primitives__hax__monomorphized_update_at__update_at_range_from
              | TApp { ident; _ }
                when Global_ident.eq_name Core__ops__range__RangeTo ident ->
                  Rust_primitives__hax__monomorphized_update_at__update_at_range_to
              | TApp { ident; _ }
                when Global_ident.eq_name Core__ops__range__RangeFull ident ->
                  Rust_primitives__hax__monomorphized_update_at__update_at_range_full
              | _ -> Rust_primitives__hax__update_at
            else Rust_primitives__hax__update_at
          in
          let vec_elem_type =
            match lhs.typ with
            | TApp { ident; args = [ GType inner; _ ] }
              when Global_ident.eq_name Alloc__vec__Vec ident ->
                Some inner
            | _ -> None
          in
          let vec_typ = lhs.typ in
          let lhs =
            match vec_elem_type with
            | Some ty ->
                UB.call Alloc__vec__Impl_1__as_slice [ lhs ] span
                  (TSlice { witness = Features.On.slice; ty })
            | None -> lhs
          in
          let rhs = UB.call update_at [ lhs; dexpr index; rhs ] span lhs.typ in
          let rhs =
            if Option.is_some vec_elem_type then
              UB.call Alloc__slice__Impl__to_vec [ rhs ] span vec_typ
            else rhs
          in

          updater_of_lhs e rhs span
      | LhsArbitraryExpr _ -> Error.raise { kind = ArbitraryLHS; span }

    and dexpr_unwrapped (expr : A.expr) : B.expr =
      let span = expr.span in
      match expr.e with
      | Assign { lhs; e; witness } ->
          let (var, typ), inner_e = updater_of_lhs lhs (dexpr e) span in
          let lhs : B.lhs = LhsLocalVar { var; typ } in
          UB.M.expr_Assign ~lhs ~inner_e ~witness ~span ~typ:UB.unit_typ
      | [%inline_arms "dexpr'.*" - Assign] ->
          map (fun e -> B.{ e; typ = dty span expr.typ; span })
    [@@inline_ands bindings_of dexpr - dlhs - dexpr']

    [%%inline_defs "Item.*"]
  end

  include Implem
end
[@@add "subtype.ml"]


================================================
FILE: engine/lib/phases/phase_trivialize_assign_lhs.mli
================================================
module Make (F : Features.T) : sig
  include module type of struct
    module FA = F

    module FB = struct
      include F
      include Features.Off.Nontrivial_lhs
      include Features.On.Construct_base
      include Features.On.Slice
    end

    module A = Ast.Make (F)
    module B = Ast.Make (FB)
    module ImplemT = Phase_utils.MakePhaseImplemT (A) (B)
  end

  include ImplemT.T
end


================================================
FILE: engine/lib/phases.ml
================================================
[%%phases_index ()]

module Reject = Phase_reject


================================================
FILE: engine/lib/prelude.ml
================================================
include Base
include Utils
include Ppx_yojson_conv_lib.Yojson_conv.Primitives


================================================
FILE: engine/lib/print_rust.ml
================================================
open! Prelude
open Ast
open Ast.Full

module View = struct
  include Concrete_ident.MakeRenderAPI (struct
    include Concrete_ident.DefaultNamePolicy

    let anonymous_field_transform field = "_" ^ field
  end)

  let to_definition_name id = (render id).name
end

module AnnotatedString = struct
  module T = struct
    type t = (span * string) list [@@deriving show]

    let empty : t = []
    let append : t -> t -> t = List.append

    let concat ?(sep : t option) (l : t list) : t =
      List.concat
      @@ match sep with None -> l | Some (sep : t) -> List.intersperse ~sep l

    let pure : span -> string -> t = fun meta s -> [ (meta, s) ]
    let ( & ) = append
    let to_string = List.map ~f:snd >> String.concat ~sep:""
    let split_re = Re.Pcre.regexp "[\t\n ]+|[^A-Za-z0-9_]"

    let split =
      let open Re.Pcre in
      full_split ~rex:split_re
      >> List.concat_map ~f:(function
           | (Text s | Delim s | Group (_, s)) when not (String.is_empty s) ->
               [ s ]
           | _ -> [])

    let tokenize : t -> t =
      List.concat_map ~f:(fun (span, s) -> split s |> List.map ~f:(tup2 span))
  end

  include T

  module Output = struct
    type t = { string : string; map : (int * int * string) list }
    [@@deriving show, yojson]

    let convert (v : T.t) : t =
      (* let annotations, map = *)
      let map =
        List.map v ~f:(fun (span, s) -> (String.length s, Span.id_of span, s))
      in
      (*   List.fold v ~init:([], []) ~f:(fun (annotations, acc) (span, s) -> *)
      (*       let len = String.length s in *)
      (*       let i, annotations = *)
      (*         match List.findi ~f:(Fn.const @@ equal_span span) annotations with *)
      (*         | Some (i, _) -> (i, annotations) *)
      (*         | None -> (List.length annotations, annotations @ [ span ]) *)
      (*       in *)
      (*       (annotations, (len, i) :: acc)) *)
      (* in *)
      { map; string = T.to_string v }

    let raw_string : t -> string = fun { string; _ } -> string
  end
end

let re_matches rex (s : string) : bool =
  try Re.Pcre.pmatch ~rex s with _ -> false

module Raw = struct
  open AnnotatedString

  let pliteral span (e : literal) : AnnotatedString.t =
    let pnegative = function true -> "-" | _ -> "" in
    pure span
    @@
    match e with
    | String s -> "\"" ^ String.escaped s ^ "\""
    | Char c -> "'" ^ Char.to_string c ^ "'"
    | Int { value; _ } -> value
    | Float { value; kind; negative } ->
        pnegative negative ^ value ^ show_float_kind kind
    | Bool b -> Bool.to_string b

  let pprimitive_ident span : _ -> AnnotatedString.t =
    pure span << function
    | Deref -> "deref"
    | Cast -> "cast"
    | LogicalOp op -> "BinOp::" ^ [%show: logical_op] op

  let rec pglobal_ident' prefix span (e : global_ident) : AnnotatedString.t =
    let ( ! ) s = pure span (prefix ^ s) in
    match e with
    | `Concrete c ->
        !(let s = View.show c in
          if String.equal "_" s then "_anon" else s)
    | `Primitive p -> pprimitive_ident span p
    | `TupleType n -> ![%string "tuple%{Int.to_string n}"]
    | `TupleCons n -> ![%string "Tuple%{Int.to_string n}"]
    | `TupleField (n, _) -> ![%string "proj_tuple%{Int.to_string n}"]
    | `Projector o -> pglobal_ident' "proj_" span (o :> global_ident)

  let pglobal_ident = pglobal_ident' ""

  let plocal_ident span (e : Local_ident.t) : AnnotatedString.t =
    let name =
      match String.chop_prefix ~prefix:"impl " e.name with
      | Some name ->
          "impl_"
          ^ String.map
              ~f:(function
                | 'a' .. 'z' as letter -> letter
                | 'A' .. 'Z' as letter -> letter
                | _ -> '_')
              name
      | _ -> e.name
    in
    let name = if String.equal name "_" then "_anon" else name in
    pure span name

  let dmutability span : _ -> AnnotatedString.t =
    pure span << function Mutable _ -> "mut " | _ -> ""

  let dbinding_mode span =
    pure span << function ByValue -> "" | ByRef _ -> "&"

  let pborrow_kind span = pure span << function Mut _ -> "mut " | _ -> ""

  let rec last_of_global_ident (g : global_ident) span =
    match g with
    | `Concrete c -> View.to_definition_name c
    | `Projector c -> last_of_global_ident (c :> global_ident) span
    | _ ->
        Diagnostics.report
          {
            context = DebugPrintRust;
            kind =
              AssertionFailure
                {
                  details =
                    "[last_of_global_ident] was given a non-concrete global \
                     ident";
                };
            span = Span.to_thir span;
            owner_id = Span.owner_hint span;
          };
        "print_rust_last_of_global_ident_error"

  let rec pty span (e : ty) =
    let ( ! ) = pure span in
    match e with
    | TBool -> !"bool"
    | TChar -> !"char"
    | TInt _k -> !"int"
    | TFloat _k -> !"float"
    | TStr -> !"String"
    | TApp { ident; args = [] } -> pglobal_ident span ident
    | TApp { ident; args } ->
        let args : AnnotatedString.t =
          List.map ~f:(pgeneric_value span) args |> concat ~sep:!", "
        in
        pglobal_ident span ident & !"<" & args & !">"
    | TArray { typ; length } -> !"[" & pty span typ & !";" & pexpr length & !"]"
    | TSlice { ty; _ } -> !"[" & pty span ty & !"]"
    | TRawPointer _ -> !"raw_pointer!()"
    | TRef { typ; mut; _ } -> !"&" & dmutability span mut & pty span typ
    | TParam i -> plocal_ident span i
    | TArrow (inputs, output) ->
        let arrow =
          List.map ~f:(pty span) (inputs @ [ output ]) |> concat ~sep:!" -> "
        in
        !"arrow!(" & arrow & !")"
    | TAssociatedType _ -> !"proj_asso_type!()"
    | TOpaque ident -> !(View.show ident)
    | TDyn { goals; _ } ->
        let goals =
          concat ~sep:!" + " (List.map ~f:(pdyn_trait_goal span) goals)
        in
        !"dyn(" & goals & !")"

  and pdyn_trait_goal span { trait; non_self_args } =
    let ( ! ) = pure span in
    let args =
      List.map ~f:(pgeneric_value span) non_self_args |> concat ~sep:!", "
    in
    !(View.show trait)
    & if List.is_empty args then empty else !"<" & args & !">"

  and pgeneric_value span (e : generic_value) : AnnotatedString.t =
    match e with
    | GLifetime _ -> pure span "lifetime!(something)"
    | GType t -> pty span t
    | _ -> pure span "generic_value!(todo)"

  and ppat (e : pat) =
    let ( ! ) = pure e.span in
    match e.p with
    | PWild -> !"_"
    | PAscription { typ; pat; _ } ->
        !"pat_ascription!(" & ppat pat & !" as " & pty e.span typ & !")"
    | PConstruct { constructor; fields; is_record; _ } ->
        pglobal_ident e.span constructor
        &
        if List.is_empty fields then !""
        else if is_record then
          !"{"
          & concat ~sep:!", "
              (List.map
                 ~f:(fun { field; pat } ->
                   !(last_of_global_ident field e.span) & !":" & ppat pat)
                 fields)
          & !"}"
        else
          !"("
          & concat ~sep:!", " (List.map ~f:(fun { pat; _ } -> ppat pat) fields)
          & !")"
    | POr { subpats } -> concat ~sep:!" | " (List.map ~f:ppat subpats)
    | PArray { args } -> !"[" & concat ~sep:!"," (List.map ~f:ppat args) & !"]"
    | PDeref { subpat; _ } -> !"&" & ppat subpat
    | PConstant { lit } -> pliteral e.span lit
    | PBinding { mut; mode; var; typ = _; subpat } ->
        let subpat =
          match subpat with Some (p, _) -> !" @ " & ppat p | None -> !""
        in
        dbinding_mode e.span mode & dmutability e.span mut
        & plocal_ident e.span var & subpat

  and psupported_monads span m =
    let ( ! ) = pure span in
    match m with
    | MException t -> !"MException<" & pty span t & !">"
    | MResult t -> !"MResult<" & pty span t & !">"
    | MOption -> !"MOption"

  and pquote span quote =
    let ( ! ) = pure span in
    !"quote!("
    & List.map
        ~f:(function
          | Verbatim code -> !code
          | Expr e -> pexpr e
          | Pattern p -> ppat p
          | Typ t -> pty span t)
        quote.contents
      |> concat ~sep:!""
    & !")"

  and pexpr' (e : expr) =
    let ( ! ) = pure e.span in
    match e.e with
    | If { cond; then_; else_ } ->
        let else_ =
          match else_ with Some e -> !" else {" & pexpr e & !"}" | None -> !""
        in
        !"(" & !"if " & pexpr cond & !"{" & pexpr then_ & !"}" & else_ & !")"
    | App { f; args; generic_args; _ } ->
        let args = concat ~sep:!"," @@ List.map ~f:pexpr args in
        let generic_args =
          let f = pgeneric_value e.span in
          if List.is_empty generic_args then !""
          else !"::<" & (concat ~sep:!"," @@ List.map ~f generic_args) & !">"
        in
        pexpr f & generic_args & !"(" & args & !")"
    | Literal l -> pliteral e.span l
    | Block { e; safety_mode; _ } -> (
        let e = !"{" & pexpr e & !"}" in
        match safety_mode with Safe -> e | Unsafe _ -> !"unsafe " & e)
    | Array l -> !"[" & concat ~sep:!"," (List.map ~f:pexpr l) & !"]"
    | Construct { is_record = false; constructor; fields; _ } ->
        let fields = List.map ~f:(snd >> pexpr) fields |> concat ~sep:!"," in
        pglobal_ident e.span constructor & !"(" & fields & !")"
    | Construct { is_record = true; constructor; fields; base; _ } ->
        let fields =
          List.map
            ~f:(fun (field, value) ->
              !(last_of_global_ident field e.span) & !":" & pexpr value)
            fields
          |> concat ~sep:!","
        in
        let base =
          match base with
          | Some (base, _) -> !"..(" & pexpr base & !")"
          | _ -> !""
        in
        pglobal_ident e.span constructor & !"{" & fields & !"," & base & !"}"
    | Match { scrutinee; arms } ->
        let arms =
          List.map
            ~f:(fun { arm = { arm_pat; body; guard }; _ } ->
              let guard : t =
                guard
                |> Option.map
                     ~f:
                       (fun { guard = IfLet { lhs; rhs; _ }; _ } ->
                          !" if let " & ppat lhs & !" = " & pexpr rhs
                         : guard -> t)
                |> Option.value ~default:!""
              in
              ppat arm_pat & guard & !" => {" & pexpr body & !"}")
            arms
          |> concat ~sep:!","
        in
        !"(match (" & pexpr scrutinee & !") {" & arms & !"})"
    (* | Let { monadic = Some _; _ } -> !"monadic_let!()" *)
    | Let { monadic; lhs; rhs; body } ->
        (* TODO: here, [rhs.typ]! *)
        let lhs_typ = pty lhs.span lhs.typ in
        let rhs_typ = pty rhs.span rhs.typ in
        let note =
          if String.equal (to_string lhs_typ) (to_string rhs_typ) then !""
          else !"#[note(\"rhs.typ=" & rhs_typ & !"\")]\n"
        in
        let monadic =
          match monadic with
          | Some (m, _) ->
              !"#[monadic_let(" & psupported_monads e.span m & !")]"
          | _ -> !""
        in
        note & monadic & !"let " & ppat lhs & !": " & lhs_typ & !" = {"
        & pexpr rhs & !"};" & pexpr body
    | LocalVar local_ident -> plocal_ident e.span local_ident
    | GlobalVar global_ident -> pglobal_ident e.span global_ident
    | Ascription { e = e'; typ } ->
        !"(" & pexpr e' & !" as " & pty e.span typ & !")"
    | MacroInvokation { macro; args; _ } ->
        pglobal_ident e.span macro & !"!(" & !args & !")"
    | Assign { lhs; e; _ } -> !"(" & plhs lhs e.span & !" = " & pexpr e & !")"
    | Loop { body; kind; state; _ } -> (
        let header =
          match kind with
          | UnconditionalLoop -> !"loop"
          | WhileLoop { condition; _ } -> !"while " & pexpr condition
          | ForLoop { it; pat; _ } ->
              !"for " & ppat pat & !" in (" & pexpr it & !")"
          | ForIndexLoop { start; end_; var; _ } ->
              !"for " & plocal_ident e.span var & !" in (" & pexpr start
              & !")..(" & pexpr end_ & !")"
        in
        let body_wrapper body =
          match state with
          | Some { bpat; _ } -> !"|" & ppat bpat & !"| {" & body & !"}"
          | None -> body
        in
        let main = header & !" { " & body_wrapper (pexpr body) & !" }" in
        match state with
        | Some { init; _ } -> !"(" & main & !")(" & pexpr init & !")"
        | None -> main)
    | Break { e; _ } -> !"(break (" & pexpr e & !"))"
    | Continue { acc = None; _ } -> !"continue"
    | Continue { acc = Some (e, _); _ } ->
        !"state_passing_continue!(" & pexpr e & !")"
    | Return { e; _ } -> !"(return " & pexpr e & !")"
    | QuestionMark { e; _ } -> !"(" & pexpr e & !")?"
    | Borrow { kind; e; _ } ->
        !"&" & pborrow_kind e.span kind & !"(" & pexpr e & !")"
    | AddressOf _ -> !"address_of"
    | EffectAction _ -> !"EffectAction"
    | Closure { params; body; _ } ->
        let params = List.map ~f:ppat params |> concat ~sep:!"," in
        !"(|" & params & !"| {" & pexpr body & !"})"
    | Quote quote -> pquote e.span quote
  (* | _ -> "todo!()" *)

  and plhs (e : lhs) span =
    let ( ! ) = pure span in
    match e with
    | LhsFieldAccessor { e; field; _ } ->
        let field =
          match field with
          | `Projector field -> (field :> global_ident)
          | _ -> field
        in
        plhs e span & !"." & !(last_of_global_ident field span)
    | LhsArrayAccessor { e; index; _ } ->
        plhs e span & !"[" & pexpr index & !"]"
    | LhsLocalVar { var; _ } -> plocal_ident span var
    | LhsVecRef { e; _ } -> plhs e span
    | LhsArbitraryExpr { e; _ } -> pexpr e

  and pexpr (e : expr) =
    let ( ! ) = pure e.span in
    let need_braces = [%matches? Let _ | Loop _] e.e in
    let e = pexpr' e in
    if need_braces then !"{" & e & !"}" else e

  let pattr (attr : attr) =
    let ( ! ) = pure attr.span in
    match attr.kind with
    | Tool { path; tokens } -> !"#[" & !path & !"(" & !tokens & !")" & !"]"
    | DocComment { kind = _; body } -> !"/**" & !body & !"*/"

  let pattrs attrs = List.map ~f:pattr attrs |> concat

  let pgeneric_param_kind span (pk : generic_param_kind) =
    let ( ! ) = pure span in
    match pk with
    | GPLifetime _ -> (empty, !": 'unk")
    | GPType -> (empty, empty)
    | GPConst { typ } -> (!"const ", !":" & pty span typ)

  let pgeneric_param (p : generic_param) =
    let prefix, suffix = pgeneric_param_kind p.span p.kind in
    let name =
      match p.ident.name with
      | "_" -> "Anonymous"
      | "Self" -> "Self_"
      | name -> name
    in
    let id = plocal_ident p.span { p.ident with name } in
    pattrs p.attrs & prefix & id & suffix

  let pgeneric_params (pl : generic_param list) =
    match pl with
    | { span; _ } :: _ ->
        let ( ! ) = pure span in
        !"<" & concat ~sep:!", " (List.map ~f:pgeneric_param pl) & !">"
    | _ -> empty

  let ptrait_goal span { trait; args } =
    let ( ! ) = pure span in
    let args = List.map ~f:(pgeneric_value span) args |> concat ~sep:!", " in
    !(View.show trait)
    & if List.is_empty args then empty else !"<" & args & !">"

  let pprojection_predicate span (pp : projection_predicate) =
    let ( ! ) = pure span in
    pp.impl.goal.args
    |> List.find_map ~f:(function GType ty -> Some ty | _ -> None)
    |> Option.map ~f:(pty span)
    |> Option.value ~default:!"unknown_self"
    & !" :"
    & !(View.show pp.impl.goal.trait)
    & !"<"
    & !(View.to_definition_name pp.assoc_item)
    & !" = " & pty span pp.typ & !">"

  let pgeneric_constraint span (p : generic_constraint) =
    let ( ! ) = pure span in
    match p with
    | GCLifetime _ -> !"'unk: 'unk"
    | GCType { goal; _ } -> !"_: " & ptrait_goal span goal
    | GCProjection pp -> pprojection_predicate span pp

  let pgeneric_constraints span (constraints : generic_constraint list) =
    if List.is_empty constraints then empty
    else
      let ( ! ) = pure span in
      !" where "
      & concat ~sep:!"," (List.map ~f:(pgeneric_constraint span) constraints)

  let pvariant_body span { name = _; arguments; attrs = _; is_record } =
    let ( ! ) = pure span in
    if is_record then
      !"{"
      & concat ~sep:!","
          (List.map arguments ~f:(fun (id, ty, attrs) ->
               pattrs attrs & !(View.to_definition_name id) & !":" & pty span ty))
      & !"}"
    else
      !"("
      & concat ~sep:!","
          (List.map arguments ~f:(fun (_, ty, attrs) ->
               pattrs attrs & pty span ty))
      & !")"

  let pvariant span (variant : variant) =
    let ( ! ) = pure span in
    pattrs variant.attrs
    & !(View.to_definition_name variant.name)
    & pvariant_body span variant

  let pvariants span variants =
    let ( ! ) = pure span in
    concat ~sep:!", " (List.map ~f:(pvariant span) variants)

  let pparam span ({ pat; typ; typ_span; attrs } : param) =
    let ( ! ) = pure span in
    pattrs attrs & ppat pat & !": "
    & pty (Option.value ~default:pat.span typ_span) typ

  let pparams span (l : param list) =
    let ( ! ) = pure span in
    !"(" & List.map ~f:(pparam span) l |> concat ~sep:!"," & !")"

  let ptrait_item (ti : trait_item) =
    let ( ! ) = pure ti.ti_span in
    let generics = pgeneric_params ti.ti_generics.params in
    let bounds = pgeneric_constraints ti.ti_span ti.ti_generics.constraints in
    let ident = !(View.to_definition_name ti.ti_ident) in
    pattrs ti.ti_attrs
    &
    match ti.ti_v with
    | TIType _ -> !"type " & ident & !": TodoPrintRustBoundsTyp;"
    | TIFn ty ->
        let inputs, output =
          match ty with
          | TArrow (inputs, output) -> (inputs, output)
          | ty -> ([], ty)
        in
        let return_type = pty ti.ti_span output in
        let params =
          List.map ~f:(fun typ -> !"_: " & pty ti.ti_span typ) inputs
          |> concat ~sep:!","
        in
        !"fn " & ident & generics & !"(" & params & !") -> " & return_type
        & bounds & !";"
    | TIDefault { params; body; _ } ->
        let params = pparams ti.ti_span params in
        let generics_constraints =
          pgeneric_constraints ti.ti_span ti.ti_generics.constraints
        in
        let return_type = pty ti.ti_span body.typ in
        let body = pexpr body in
        !"fn " & ident & generics & !"(" & params & !") -> " & return_type
        & generics_constraints & !"{" & body & !"}"

  let pimpl_item (ii : impl_item) =
    let span = ii.ii_span in
    let ( ! ) = pure span in
    let generics = pgeneric_params ii.ii_generics.params in
    let bounds = pgeneric_constraints span ii.ii_generics.constraints in
    let ident = !(View.to_definition_name ii.ii_ident) in
    pattrs ii.ii_attrs
    &
    match ii.ii_v with
    | IIType _ -> !"type " & ident & !": TodoPrintRustBoundsTyp;"
    | IIFn { body; params } ->
        let return_type = pty span body.typ in
        !"fn " & ident & generics & pparams span params & !" -> " & return_type
        & bounds & !"{" & pexpr body & !"}"

  let pitem (e : item) =
    let exception NotImplemented in
    let ( ! ) = pure e.span in
    try
      let pi =
        match e.v with
        | Fn { name; body; generics; params; safety } ->
            let return_type = pty e.span body.typ in
            (match safety with Safe -> !"fn " | Unsafe _ -> !"unsafe fn ")
            & !(View.to_definition_name name)
            & pgeneric_params generics.params
            & pparams e.span params & !" -> " & return_type
            & pgeneric_constraints e.span generics.constraints
            & !"{" & pexpr body & !"}"
        | TyAlias { name; generics; ty } ->
            !"type "
            & !(View.to_definition_name name)
            & pgeneric_params generics.params
            & pgeneric_constraints e.span generics.constraints
            & !"=" & pty e.span ty & !";"
        | Type { name; generics; variants = [ variant ]; is_struct = true } ->
            !"struct "
            & !(View.to_definition_name name)
            & pgeneric_params generics.params
            & pgeneric_constraints e.span generics.constraints
            & pvariant_body e.span variant
            & if variant.is_record then !"" else !";"
        | Type { name; generics; variants; _ } ->
            !"enum "
            & !(View.to_definition_name name)
            & pgeneric_params generics.params
            & pgeneric_constraints e.span generics.constraints
            &
            if List.is_empty variants then empty
            else !"{" & pvariants e.span variants & !"}"
        | Trait { name; generics; items; safety } ->
            let safety =
              match safety with Safe -> !"" | Unsafe _ -> !"unsafe "
            in
            safety & !"trait "
            & !(View.to_definition_name name)
            & pgeneric_params generics.params
            & pgeneric_constraints e.span generics.constraints
            & !"{"
            & List.map ~f:ptrait_item items |> concat ~sep:!"\n"
            & !"}"
        | Impl { generics; self_ty; of_trait; items; parent_bounds = _; safety }
          ->
            let trait =
              pglobal_ident e.span (`Concrete (fst of_trait))
              & !"<"
              & concat ~sep:!","
                  (List.map ~f:(pgeneric_value e.span) (snd of_trait))
              & !">"
            in
            let safety =
              match safety with Safe -> !"" | Unsafe _ -> !"unsafe "
            in
            safety & !"impl "
            & pgeneric_params generics.params
            & trait & !" for " & pty e.span self_ty
            & pgeneric_constraints e.span generics.constraints
            & !"{"
            & List.map ~f:pimpl_item items |> concat ~sep:!"\n"
            & !"}"
        | Quote { quote; _ } -> pquote e.span quote & !";"
        | _ -> raise NotImplemented
      in
      pattrs e.attrs & pi
    with NotImplemented ->
      !("\n/** print_rust: pitem: not implemented  (item: "
       ^ [%show: concrete_ident] e.ident
       ^ ") */\nconst _: () = ();\n")
end

let rustfmt (s : string) : string =
  match
    Hax_io.request (PrettyPrintRust s) ~expected:"PrettyPrintedRust" (function
      | Types.PrettyPrintedRust s -> Some s
      | _ -> None)
  with
  | Ok formatted -> formatted
  | Err error ->
      let err =
        [%string
          "\n\n\
           #######################################################\n\
           ########### WARNING: Failed formatting ###########\n\
           %{error}\n\
           STRING:\n\
           %{s}\n\
           #######################################################\n"]
      in
      Stdio.prerr_endline err;
      s

exception RetokenizationFailure

let rustfmt_annotated' (x : AnnotatedString.t) : AnnotatedString.t =
  let original = AnnotatedString.tokenize x in
  let tokens = AnnotatedString.(to_string x |> rustfmt |> split) in
  let is_symbol = re_matches AnnotatedString.split_re in
  let all_symbol = List.for_all ~f:(snd >> is_symbol) in
  let f (original, result) s =
    let last =
      List.hd result |> Option.map ~f:fst
      |> Option.value_or_thunk ~default:Span.dummy
    in
    let original', tuple =
      match List.split_while ~f:(snd >> String.equal s >> not) original with
      | prev, (span, s') :: original' ->
          assert (String.equal s s');
          if all_symbol prev then
            (* it is fine to skip symbols *)
            (original', (span, s))
          else if is_symbol s then
            (* if [s] is a symbol as well, this is fine *)
            (original, (Span.dummy (), s))
          else (
            Stdio.prerr_endline @@ "\n##### RUSTFMT TOKEN ERROR #####";
            Stdio.prerr_endline @@ "s=" ^ s;
            raise RetokenizationFailure)
      | _ -> (original, (last, s))
    in
    (original', tuple :: result)
  in
  let r = snd @@ List.fold_left tokens ~init:(original, []) ~f in
  List.rev r

let rustfmt_annotated (x : AnnotatedString.t) : AnnotatedString.t =
  let rf = Option.value ~default:"" (Sys.getenv "HAX_RUSTFMT") in
  if String.equal rf "no" then x
  else try rustfmt_annotated' x with RetokenizationFailure -> x

module type T = sig
  val pitem : item -> AnnotatedString.Output.t
  val pitems : item list -> AnnotatedString.Output.t
  val pitem_str : item -> string
  val pexpr_str : expr -> string
  val pty_str : ty -> string
end

module Traditional : T = struct
  let pitem : item -> AnnotatedString.Output.t =
    Raw.pitem >> rustfmt_annotated >> AnnotatedString.Output.convert

  let pitems : item list -> AnnotatedString.Output.t =
    List.concat_map ~f:Raw.pitem
    >> rustfmt_annotated >> AnnotatedString.Output.convert

  let pitem_str : item -> string = pitem >> AnnotatedString.Output.raw_string

  let pexpr_str (e : expr) : string =
    let e = Raw.pexpr e in
    let ( ! ) = AnnotatedString.pure @@ Span.dummy () in
    let ( & ) = AnnotatedString.( & ) in
    let prefix = "fn expr_wrapper() {" in
    let suffix = "}" in
    let item = !prefix & e & !suffix in
    rustfmt_annotated item |> AnnotatedString.Output.convert
    |> AnnotatedString.Output.raw_string |> Stdlib.String.trim
    |> String.chop_suffix_if_exists ~suffix
    |> String.chop_prefix_if_exists ~prefix
    |> Stdlib.String.trim

  let pty_str (e : ty) : string =
    let e = Raw.pty (Span.dummy ()) e in
    let ( ! ) = AnnotatedString.pure @@ Span.dummy () in
    let ( & ) = AnnotatedString.( & ) in
    let prefix = "type TypeWrapper = " in
    let suffix = ";" in
    let item = !prefix & e & !suffix in
    rustfmt_annotated item |> AnnotatedString.Output.convert
    |> AnnotatedString.Output.raw_string |> Stdlib.String.trim
    |> String.chop_suffix_if_exists ~suffix
    |> String.chop_prefix_if_exists ~prefix
    |> Stdlib.String.trim
end

(* module Experimental : T = struct *)
(*   module GenericRustPrinter = Generic_rust_printer.Make (Features.Full) *)

(*   let pitem : item -> AnnotatedString.Output.t = *)
(*     GenericRustPrinter.item () *)
(*     >> Generic_printer_api.AnnotatedString.to_spanned_strings *)
(*     >> AnnotatedString.Output.convert *)

(*   let pitems : item list -> AnnotatedString.Output.t = *)
(*     GenericRustPrinter.items () *)
(*     >> Generic_printer_api.AnnotatedString.to_spanned_strings *)
(*     >> AnnotatedString.Output.convert *)

(*   let pexpr : expr -> AnnotatedString.Output.t = *)
(*     GenericRustPrinter.expr () *)
(*     >> Generic_printer_api.AnnotatedString.to_spanned_strings *)
(*     >> AnnotatedString.Output.convert *)

(*   let pitem_str : item -> string = *)
(*     GenericRustPrinter.item () >> Generic_printer_api.AnnotatedString.to_string *)

(*   let pexpr_str : expr -> string = *)
(*     GenericRustPrinter.expr () >> Generic_printer_api.AnnotatedString.to_string *)

(*   let pty_str : ty -> string = *)
(*     GenericRustPrinter.ty () >> Generic_printer_api.AnnotatedString.to_string *)
(* end *)

let experimental =
  Sys.getenv "HAX_ENGINE_EXPERIMENTAL_RUST_PRINTER" |> Option.is_some

include
  (val if experimental then failwith "todo" (*module Experimental : T*)
       else (module Traditional : T))


================================================
FILE: engine/lib/print_rust.mli
================================================
open Ast.Full

module AnnotatedString : sig
  module Output : sig
    type t [@@deriving show, yojson]

    val raw_string : t -> string
  end
end

val pitem : item -> AnnotatedString.Output.t
val pitems : item list -> AnnotatedString.Output.t
val pitem_str : item -> string
val pexpr_str : expr -> string
val pty_str : ty -> string


================================================
FILE: engine/lib/profiling.ml
================================================
open Prelude

(** Is profiling enabled? *)
let enabled = ref false

(** Profiles the function `f`, that operates in a given context over a given
    quantity of things it is processing. *)
let profile (type b) (context : Diagnostics.Context.t) (quantity : int)
    (f : unit -> b) : b =
  if !enabled (* `!` derefs, it's not a negation *) then (
    let time0 = Core.Time_ns.now () in
    let mem0 = Core.Gc.minor_words () in
    let finalize errored =
      if !enabled (* `!` derefs, it's not a negation *) then
        let time1 = Core.Time_ns.now () in
        let mem1 = Core.Gc.minor_words () in
        let time_ns = Core.Time_ns.diff time1 time0 in
        let memory = mem1 - mem0 in
        Hax_io.write
          (Types.ProfilingData
             {
               context = Diagnostics.Context.display context;
               time_ns =
                 Core.Time_ns.Span.to_int63_ns time_ns |> Int63.to_string;
               memory = Int.to_string memory;
               quantity = Int.to_int64 quantity;
               errored;
             })
      else ()
    in
    try
      let result = f () in
      finalize false;
      result
    with e ->
      finalize true;
      raise e)
  else f ()


================================================
FILE: engine/lib/rust_engine_types.ml
================================================
(** This module re-exports and renames a subset of `Types`. `Types` contains
    both the modules from the frontend and from the Rust engine. Thus, some
    types are deduplicated, and get renamed. *)

module Renamed = struct
  type arm = Types.arm2
  type attribute = Types.attribute2
  type attribute_kind = Types.attribute_kind2
  type binding_mode = Types.binding_mode2
  type borrow_kind = Types.borrow_kind2
  type def_id = Types.def_id_inner
  type global_id = Types.global_id
  type expr_kind = Types.expr_kind2
  type impl_expr = Types.impl_expr2
  type param = Types.param2
  type pat_kind = Types.pat_kind2
  type projection_predicate = Types.projection_predicate2
  type region = Types.region2
  type span = Types.span2
end

include Types
include Renamed


================================================
FILE: engine/lib/side_effect_utils.ml
================================================
open! Prelude

module MakeSI
    (F :
      Features.T
        with type monadic_binding = Features.Off.monadic_binding
         and type for_index_loop = Features.Off.for_index_loop) =
struct
  module AST = Ast.Make (F)
  module U = Ast_utils.Make (F)
  include Ast
  include AST
  module Visitors = Ast_visitors.Make (F)

  module SideEffects = struct
    (* TODO: consider non-terminaison and closed-mutation *)
    type t = {
      reads_local_mut : U.Sets.TypedLocalIdent.t;  (** only free variables *)
      writes_local_mut : U.Sets.TypedLocalIdent.t;  (** only free variables *)
      deep_mutation : bool;
      return : ty option;
      continue : ty option option; (* TODO: continue with labels *)
      break : ty option; (* TODO: break with labels *)
    }
    [@@deriving show]

    let zero : t =
      {
        reads_local_mut = Set.empty (module U.TypedLocalIdent);
        writes_local_mut = Set.empty (module U.TypedLocalIdent);
        deep_mutation = false;
        return = None;
        continue = None;
        break = None;
      }

    let plus : t -> t -> t =
      let merge_ty x y =
        if not @@ U.ty_equality x y then
          Diagnostics.failure ~context:(Other "side_effect_utils.ml")
            ~span:(Span.dummy ())
            (AssertionFailure
               {
                 details =
                   "Expected two exact same types, got x="
                   ^ (x |> U.LiftToFullAst.ty |> Print_rust.pty_str)
                   ^ " and y="
                   ^ (y |> U.LiftToFullAst.ty |> Print_rust.pty_str);
               })
        else x
      in
      let merge_opts (type x) (f : x -> x -> x) (a : x option) (b : x option) =
        match (a, b) with
        | Some a, Some b -> Some (f a b)
        | Some a, None | None, Some a -> Some a
        | None, None -> None
      in
      fun x y ->
        {
          reads_local_mut = Set.union x.reads_local_mut y.reads_local_mut;
          writes_local_mut = Set.union x.writes_local_mut y.writes_local_mut;
          deep_mutation = x.deep_mutation || y.deep_mutation;
          return = merge_opts merge_ty x.return y.return;
          continue =
            merge_opts
              (fun x y ->
                match (x, y) with
                | Some x, Some y -> Some (merge_ty x y)
                | _ -> None)
              x.continue y.continue;
          break = merge_opts merge_ty x.break y.break;
        }

    let reads (var : Local_ident.t) (ty : ty) =
      {
        zero with
        reads_local_mut = Set.singleton (module U.TypedLocalIdent) (var, ty);
      }

    let writes (var : Local_ident.t) (ty : ty) =
      {
        zero with
        writes_local_mut = Set.singleton (module U.TypedLocalIdent) (var, ty);
      }

    let no_deep_mut_or_cf : t -> bool =
      [%matches?
        {
          deep_mutation = false;
          return = None;
          continue = None;
          break = None;
          _;
        }]

    let reads_local_mut_only : t -> bool =
     fun x -> no_deep_mut_or_cf x && Set.is_empty x.writes_local_mut

    let commute : t -> t -> bool =
      curry @@ function
      | ( ({ reads_local_mut = xr; writes_local_mut = xw; _ } as x),
          ({ reads_local_mut = yr; writes_local_mut = yw; _ } as y) )
        when no_deep_mut_or_cf x && no_deep_mut_or_cf y ->
          let open Set in
          let x = union xw xr in
          let y = union yw yr in
          is_empty @@ union (inter xw y) (inter yw x)
      | x, y when reads_local_mut_only x || reads_local_mut_only y -> true
      | _ -> false

    class ['s] monoid =
      object
        method private zero = zero
        method private plus = plus
      end

    let without_rw_vars (vars : U.Sets.Local_ident.t) (effects : t) =
      let without = Set.filter ~f:(fst >> Set.mem vars >> not) in
      {
        effects with
        writes_local_mut = without effects.writes_local_mut;
        reads_local_mut = without effects.reads_local_mut;
      }
  end

  module Hoist = struct
    type binding = pat * expr [@@deriving show]
    type t = { lbs : binding list; effects : SideEffects.t } [@@deriving show]

    let plus x y : t =
      let effects = SideEffects.plus x.effects y.effects in
      { lbs = x.lbs @ y.lbs; effects }

    let zero : t = { lbs = []; effects = SideEffects.zero }
    let flbs { lbs; _ } = lbs
    let feff { effects; _ } = effects
    let no_lbs effects = { lbs = []; effects }

    class ['s] monoid =
      object
        method private zero = zero
        method private plus = plus
      end

    class ['s] bool_monoid =
      object
        method private zero = false
        method private plus = ( && )
      end

    module CollectContext = struct
      type t = { mutable fresh_id : int }

      let fresh_local_ident (self : t) : Local_ident.t =
        self.fresh_id <- self.fresh_id + 1;
        {
          name = "hoist" ^ Int.to_string self.fresh_id;
          id = Local_ident.mk_id SideEffectHoistVar (-1) (* todo *);
        }

      let empty = { fresh_id = 0 }
    end

    module HoistSeq = struct
      let ( let* ) x f = Option.bind ~f x

      let many (ctx : CollectContext.t) (l : (expr * t) list)
          (next : expr list -> t -> expr * t) =
        let fresh () = CollectContext.fresh_local_ident ctx in
        let effects, l =
          List.fold_right l ~init:(SideEffects.zero, [])
            ~f:(fun (expr, { lbs; effects = effects0 }) (effects, l) ->
              ( SideEffects.plus effects0 effects,
                (if
                   SideEffects.reads_local_mut_only effects0
                   && SideEffects.commute effects0 effects
                 then (lbs, expr)
                 else
                   let var =
                     (* if the body is a local variable, use that,
                        otherwise get a fresh one *)
                     match snd @@ U.collect_let_bindings expr with
                     (* TODO: this optimization is disabled because it fails in cases like f(x, {x = 3; x}) *)
                     | { e = LocalVar var; _ } when false -> var
                     | _ -> fresh ()
                   in
                   ( lbs @ [ (U.make_var_pat var expr.typ expr.span, expr) ],
                     { expr with e = LocalVar var } ))
                :: l ))
        in
        let lbs = List.concat_map ~f:fst l in
        next (List.map ~f:snd l) { lbs; effects }

      let err_hoist_invariant span (type r) (location : string) : r =
        Diagnostics.failure ~context:(Other "HoistSeq") ~span
          (AssertionFailure
             {
               details =
                 "[HoistSeq.many] broke its invariant (location:" ^ location
                 ^ ")";
             })

      let one (ctx : CollectContext.t) (e : expr * t)
          (next : expr -> t -> expr * t) =
        many ctx [ e ] (function
          | [ e ] -> next e
          | _ -> err_hoist_invariant (fst e).span Stdlib.__LOC__)
    end

    let let_of_binding ((pat, rhs) : pat * expr) (body : expr) : expr =
      U.make_let pat rhs body

    let lets_of_bindings (bindings : (pat * expr) list) (body : expr) : expr =
      List.fold_right ~init:body ~f:let_of_binding bindings

    let collect_and_hoist_effects_object =
      object (self)
        (* inherit [_] expr_mapreduce *)
        inherit [_] Visitors.mapreduce as super
        inherit [_] monoid as m

        (* method visit_t _ x = (x, m#zero) *)
        (* method visit_mutability _ _ x = (x, m#zero) *)

        (* Collecting effects bottom up *)
        method! visit_lhs (env : CollectContext.t) lhs =
          match lhs with
          | LhsLocalVar { var; typ } ->
              (LhsLocalVar { var; typ }, no_lbs @@ SideEffects.writes var typ)
          | LhsArbitraryExpr { e; witness } ->
              let deep_mutation =
                (object
                   inherit [_] Visitors.reduce as _super
                   inherit [_] bool_monoid as _m

                   (* method visit_t _ _ = m#zero *)
                   (* method visit_mutability _ _ _ = m#zero *)
                   (* method! visit_Deref _ _ _ = true *)
                   method! visit_item () _ = false
                end)
                  #visit_expr
                  () e
              in
              ( LhsArbitraryExpr { e; witness },
                no_lbs { SideEffects.zero with deep_mutation } )
          | _ -> super#visit_lhs env lhs

        method! visit_expr (env : CollectContext.t) e =
          match e.e with
          | LocalVar v -> (e, no_lbs (SideEffects.reads v e.typ))
          | QuestionMark { e = e'; return_typ; witness } ->
              HoistSeq.one env (self#visit_expr env e') (fun e' effects ->
                  let effects =
                    m#plus effects
                      (no_lbs
                         { SideEffects.zero with return = Some return_typ })
                  in
                  ( { e with e = QuestionMark { e = e'; return_typ; witness } },
                    effects ))
          | Return { e = e'; witness } ->
              HoistSeq.one env (self#visit_expr env e') (fun e' effects ->
                  ( { e with e = Return { e = e'; witness } },
                    m#plus effects
                      (no_lbs { SideEffects.zero with return = Some e'.typ }) ))
          | Break { e = e'; label; acc; witness } ->
              HoistSeq.one env (self#visit_expr env e') (fun e' effects ->
                  ( { e with e = Break { e = e'; acc; label; witness } },
                    m#plus effects
                      (no_lbs { SideEffects.zero with break = Some e'.typ }) ))
          | Continue { acc = e'; label; witness } -> (
              let ceffect =
                no_lbs
                  {
                    SideEffects.zero with
                    continue = Some (Option.map ~f:(fun (e, _) -> e.typ) e');
                  }
              in
              match e' with
              | Some (e', witness') ->
                  HoistSeq.one env (self#visit_expr env e') (fun e' effects ->
                      ( {
                          e with
                          e =
                            Continue
                              { acc = Some (e', witness'); label; witness };
                        },
                        m#plus ceffect effects ))
              | None -> (e, ceffect))
          | Loop { body; kind; state; label; witness; control_flow } ->
              let kind' =
                match kind with
                | UnconditionalLoop -> []
                | ForLoop { it; _ } -> [ self#visit_expr env it ]
                | WhileLoop { condition; _ } ->
                    [ self#visit_expr env condition ]
                | _ -> .
              in
              let state' =
                Option.map
                  ~f:(fun { init; _ } -> self#visit_expr env init)
                  state
              in
              let kind_state = kind' @ Option.to_list state' in
              (* effects to realize before the loop *)
              (* let effects_before = List.fold ~init:zero ~f:plus kind_state in *)
              HoistSeq.many env kind_state (fun l effects ->
                  let kind =
                    match (l, kind) with
                    | condition :: ([ _ ] | []), WhileLoop { witness; _ } ->
                        WhileLoop { condition; witness }
                    | it :: ([ _ ] | []), ForLoop { pat; witness; _ } ->
                        ForLoop { pat; witness; it }
                    | ([ _ ] | []), UnconditionalLoop -> UnconditionalLoop
                    | _, ForIndexLoop _ -> .
                    | _ -> HoistSeq.err_hoist_invariant e.span Stdlib.__LOC__
                  in
                  let state =
                    match (l, state) with
                    | (_ :: [ state ] | [ state ]), Some { witness; bpat; _ } ->
                        Some { witness; bpat; init = state }
                    | ([ _ ] | []), None -> None
                    | _ -> HoistSeq.err_hoist_invariant e.span Stdlib.__LOC__
                  in
                  (* by now, the "inputs" of the loop are hoisted as let if needed *)
                  let body, { lbs; effects = body_effects } =
                    self#visit_expr env body
                  in
                  (* the loop construction **handles** the effect continue and break *)
                  let body_effects =
                    no_lbs { body_effects with continue = None; break = None }
                  in
                  let effects = m#plus effects body_effects in
                  let body = lets_of_bindings lbs body in
                  ( {
                      e with
                      e =
                        Loop { body; kind; state; label; witness; control_flow };
                    },
                    effects ))
          | If { cond; then_; else_ } ->
              HoistSeq.one env (self#visit_expr env cond) (fun cond effects ->
                  let then_, { lbs = lbs_then; effects = ethen } =
                    self#visit_expr env then_
                  in
                  let else_, { lbs = lbs_else; effects = eelse } =
                    match Option.map ~f:(self#visit_expr env) else_ with
                    | Some (else_, eelse) -> (Some else_, eelse)
                    | None -> (None, m#zero)
                  in
                  let then_ = lets_of_bindings lbs_then then_ in
                  let else_ = Option.map ~f:(lets_of_bindings lbs_else) else_ in
                  let effects =
                    m#plus (m#plus (no_lbs ethen) (no_lbs eelse)) effects
                  in
                  ({ e with e = If { cond; then_; else_ } }, effects))
          | App { f; args; generic_args; trait; bounds_impls } ->
              HoistSeq.many env
                (List.map ~f:(self#visit_expr env) (f :: args))
                (fun l effects ->
                  let f, args =
                    match l with
                    | f :: args -> (f, args)
                    | _ -> HoistSeq.err_hoist_invariant e.span Stdlib.__LOC__
                  in
                  ( {
                      e with
                      e = App { f; args; generic_args; trait; bounds_impls };
                    },
                    effects ))
          | Literal _ -> (e, m#zero)
          | Block { e; safety_mode; witness } ->
              HoistSeq.one env (self#visit_expr env e) (fun e effects ->
                  ({ e with e = Block { e; safety_mode; witness } }, effects))
          | Array l ->
              HoistSeq.many env
                (List.map ~f:(self#visit_expr env) l)
                (fun l effects -> ({ e with e = Array l }, effects))
          | Construct
              { constructor; is_record; is_struct; fields = []; base = None } ->
              ( {
                  e with
                  e =
                    Construct
                      {
                        constructor;
                        is_record;
                        is_struct;
                        fields = [];
                        base = None;
                      };
                },
                m#zero )
          | Construct { constructor; is_struct; is_record; fields; base } ->
              HoistSeq.many env
                (List.map ~f:(self#visit_expr env)
                   (Option.to_list (Option.map ~f:fst base)
                   @ List.map ~f:snd fields))
                (fun l effects ->
                  let base, fields_expr =
                    match (l, base) with
                    | hd :: tl, Some (_, witness) -> (Some (hd, witness), tl)
                    | _, None -> (None, l)
                    | _ -> HoistSeq.err_hoist_invariant e.span Stdlib.__LOC__
                  in
                  let fields =
                    match List.zip (List.map ~f:fst fields) fields_expr with
                    | Ok fields -> fields
                    | Unequal_lengths ->
                        HoistSeq.err_hoist_invariant e.span Stdlib.__LOC__
                  in
                  ( {
                      e with
                      e =
                        Construct
                          { constructor; is_struct; is_record; fields; base };
                    },
                    effects ))
          | Match { scrutinee; arms } ->
              let arms, eff_arms =
                let arms =
                  List.map ~f:(self#visit_arm env) arms
                  (* materialize letbindings in each arms *)
                  |> List.map ~f:(fun ({ arm; span }, ({ lbs; effects } : t)) ->
                         let arm =
                           { arm with body = lets_of_bindings lbs arm.body }
                         in
                         (({ arm; span } : arm), { lbs = []; effects }))
                     (* cancel effects that concern variables introduced in pats  *)
                  |> List.map ~f:(fun (arm, { lbs; effects }) ->
                         let vars =
                           U.Reducers.variables_of_pat arm.arm.arm_pat
                         in
                         let effects =
                           SideEffects.without_rw_vars vars effects
                         in
                         (arm, { lbs; effects }))
                in
                ( List.map ~f:fst arms,
                  List.fold ~init:m#zero ~f:m#plus (List.map ~f:snd arms) )
              in
              HoistSeq.one env (self#visit_expr env scrutinee)
                (fun scrutinee effects ->
                  ( { e with e = Match { scrutinee; arms } },
                    m#plus eff_arms effects ))
          | Let { monadic = Some _; _ } -> .
          | Let { monadic = None; lhs; rhs; body } ->
              let rhs, { lbs = rhs_lbs; effects = rhs_effects } =
                self#visit_expr env rhs
              in
              let body, { lbs = body_lbs; effects = body_effects } =
                self#visit_expr env body
              in
              let lbs = rhs_lbs @ ((lhs, rhs) :: body_lbs) in
              let effects = SideEffects.plus rhs_effects body_effects in
              (body, { lbs; effects })
          | GlobalVar _ -> (e, m#zero)
          | Ascription { e = e'; typ } ->
              HoistSeq.one env (self#visit_expr env e') (fun e' eff ->
                  ({ e with e = Ascription { e = e'; typ } }, eff))
          | MacroInvokation _ -> (e, m#zero)
          | Assign { lhs; e = e'; witness } ->
              (* TODO: here, LHS should really have no effect... This is not fine *)
              let lhs, lhs_effects = self#visit_lhs env lhs in
              HoistSeq.one env (self#visit_expr env e') (fun e' effects ->
                  let effects = m#plus effects lhs_effects in
                  ({ e with e = Assign { e = e'; lhs; witness } }, effects))
          | Borrow { kind; e = e'; witness } ->
              let kind, kind_effects = self#visit_borrow_kind env kind in
              HoistSeq.one env (self#visit_expr env e') (fun e' effects ->
                  let effects = m#plus kind_effects effects in
                  ({ e with e = Borrow { kind; e = e'; witness } }, effects))
          | AddressOf { mut; e = e'; witness } ->
              let mut, mut_effects = (mut, m#zero) in
              HoistSeq.one env (self#visit_expr env e') (fun e' effects ->
                  let effects = m#plus mut_effects effects in
                  ({ e with e = AddressOf { mut; e = e'; witness } }, effects))
          | Closure { params; body; captures } ->
              let body, body_effects =
                let body, { lbs; effects } = self#visit_expr env body in
                let vars =
                  Set.union_list (module Local_ident)
                  @@ List.map ~f:U.Reducers.variables_of_pat params
                in
                let body = lets_of_bindings lbs body in
                let effects =
                  {
                    (SideEffects.without_rw_vars vars effects) with
                    return = None;
                  }
                in
                (body, { lbs = []; effects })
              in
              ({ e with e = Closure { params; body; captures } }, body_effects)
              (* HoistSeq.many env *)
              (*   (List.map ~f:(super#visit_expr env) captures) *)
              (*   (fun captures effects -> *)
              (*     let effects = m#plus body_effects effects in *)
              (*     ({ e with e = Closure { params; body; captures } }, effects)) *)
          | EffectAction _ ->
              Diagnostics.failure
                ~context:(Other "collect_and_hoist_effects_object") ~span:e.span
                (Unimplemented
                   { issue_id = None; details = Some "EffectAction" })
          | Quote _ -> (e, m#zero)
      end

    let collect_and_hoist_effects (e : expr) : expr * SideEffects.t =
      let e, { lbs; effects } =
        collect_and_hoist_effects_object#visit_expr CollectContext.empty e
      in
      (lets_of_bindings lbs e, effects)
  end
end

module%inlined_contents Hoist
    (F :
      Features.T
        with type monadic_binding = Features.Off.monadic_binding
         and type for_index_loop = Features.Off.for_index_loop) =
struct
  module FA = F

  module FB = struct
    include F
  end

  module UA = Ast_utils.Make (F)
  module UB = Ast_utils.Make (FB)
  module A = Ast.Make (F)
  module B = Ast.Make (FB)

  module S = struct
    include Features.SUBTYPE.Id
  end

  open MakeSI (F)

  [%%inline_defs dmutability + dsafety_kind]

  module ID = struct
    (* OCaml is not able to understand A.expr is the same as B.expr........... *)
    [%%inline_defs dexpr]
  end

  open ID

  let dexpr (expr : A.expr) : B.expr =
    Hoist.collect_and_hoist_effects expr |> fst |> dexpr

  [%%inline_defs "Item.*"]

  let metadata = Phase_utils.Metadata.make HoistSideEffects
end
[@@add "subtype.ml"]


================================================
FILE: engine/lib/span.ml
================================================
open! Prelude

module FreshId = struct
  let current = ref 1

  let make () =
    let id = !current in
    current := id + 1;
    id
end

module Imported = struct
  type span = { filename : file_name; hi : loc; lo : loc }
  and loc = { col : int; line : int }

  and file_name =
    | Real of real_file_name
    | CfgSpec of string
    | Anon of string
    | MacroExpansion of string
    | ProcMacroSourceCode of string
    | CliCrateAttr of string
    | Custom of string
    | DocTest of string
    | InlineAsm of string

  and real_file_name =
    | LocalPath of string
    | Remapped of { local_path : string option; virtual_name : string }
  [@@deriving show, yojson, sexp, compare, eq, hash]

  let file_name_of_thir : Types.file_name -> file_name = function
    | Real x ->
        Real
          (match x with
          | LocalPath x -> LocalPath x
          | Remapped { local_path; virtual_name } ->
              Remapped { local_path; virtual_name })
    | CfgSpec x -> CfgSpec x
    | Anon x -> Anon x
    | MacroExpansion x -> MacroExpansion x
    | ProcMacroSourceCode x -> ProcMacroSourceCode x
    | CliCrateAttr x -> CliCrateAttr x
    | Custom x -> Custom x
    | DocTest x -> DocTest x
    | InlineAsm x -> InlineAsm x

  let loc_of_thir ({ col; line } : Types.loc) : loc =
    { col = Int.of_string col; line = Int.of_string line }

  let span_of_thir (s : Types.span) : span =
    {
      filename = file_name_of_thir s.filename;
      hi = loc_of_thir s.hi;
      lo = loc_of_thir s.lo;
    }

  let file_name_to_thir : file_name -> Types.file_name = function
    | Real x ->
        Real
          (match x with
          | LocalPath x -> LocalPath x
          | Remapped { local_path; virtual_name } ->
              Remapped { local_path; virtual_name })
    | CfgSpec x -> CfgSpec x
    | Anon x -> Anon x
    | MacroExpansion x -> MacroExpansion x
    | ProcMacroSourceCode x -> ProcMacroSourceCode x
    | CliCrateAttr x -> CliCrateAttr x
    | Custom x -> Custom x
    | DocTest x -> DocTest x
    | InlineAsm x -> InlineAsm x

  let loc_to_thir ({ col; line } : loc) : Types.loc =
    { col = Int.to_string col; line = Int.to_string line }

  let span_to_thir (s : span) : Types.span =
    {
      filename = file_name_to_thir s.filename;
      hi = loc_to_thir s.hi;
      lo = loc_to_thir s.lo;
    }

  let display_loc (l : loc) : string =
    Int.to_string l.col ^ ":" ^ Int.to_string l.line

  let display_span (s : span) : string =
    let file =
      match s.filename with
      | Real (LocalPath path) -> path
      | s -> [%show: file_name] s
    in
    "<" ^ file ^ " " ^ display_loc s.lo ^ "→" ^ display_loc s.hi ^ ">"
end

type owner_id = OwnerId of int
[@@deriving show, yojson, sexp, compare, eq, hash]

let owner_id_list = ref []
let owner_id_list_len = ref 0

let fresh_owner_id (owner : Types.def_id) : owner_id =
  let next_id = OwnerId !owner_id_list_len in
  owner_id_list := owner :: !owner_id_list;
  owner_id_list_len := !owner_id_list_len + 1;
  next_id

(** This state changes the behavior of `of_thir`: the hint placed into this
    state will be inserted automatically by `of_thir`. The field `owner_hint`
    shall be used solely for reporting to the user, not for any logic within the
    engine. *)
let state_owner_hint : owner_id option ref = ref None

let with_owner_hint (type t) (owner : Types.def_id) (f : unit -> t) : t =
  let previous = !state_owner_hint in
  state_owner_hint := Some (fresh_owner_id owner);
  let result = f () in
  state_owner_hint := previous;
  result

type t = { id : int; data : Imported.span list; owner_hint : owner_id option }
[@@deriving show, yojson, sexp, compare, eq, hash]

let display { id = _; data; _ } =
  match data with
  | [] -> ""
  | [ span ] -> Imported.display_span span
  | spans -> List.map ~f:Imported.display_span spans |> String.concat ~sep:"∪"

let of_thir span =
  {
    data = [ Imported.span_of_thir span ];
    id = FreshId.make ();
    owner_hint = !state_owner_hint;
  }

let to_thir { data; _ } = List.map ~f:Imported.span_to_thir data

let union_list spans =
  let data = List.concat_map ~f:(fun { data; _ } -> data) spans in
  let owner_hint = List.hd spans |> Option.bind ~f:(fun s -> s.owner_hint) in
  { data; id = FreshId.make (); owner_hint }

let union x y = union_list [ x; y ]

let dummy () =
  { id = FreshId.make (); data = []; owner_hint = !state_owner_hint }

let id_of { id; _ } = id
let refresh_id span = { span with id = FreshId.make () }
let default = { id = 0; data = []; owner_hint = None }

let owner_hint span =
  span.owner_hint
  |> Option.map ~f:(fun (OwnerId id) ->
         Option.value_exn
           (List.nth !owner_id_list (!owner_id_list_len - id - 1)))

let to_rust_ast_span span : Rust_engine_types.span =
  let owner_hint =
    let f (did : Types.def_id) : Types.def_id2 = { contents = did.contents } in
    owner_hint span |> Option.map ~f
  in
  {
    data = List.map ~f:Imported.span_to_thir span.data;
    id = Int.to_int64 span.id;
    owner_hint;
  }

let from_rust_ast_span (span : Rust_engine_types.span) : t =
  let owner_hint =
    let f (did : Types.def_id2) : Types.def_id = { contents = did.contents } in
    Option.map ~f span.owner_hint
  in
  {
    data = List.map ~f:Imported.span_of_thir span.data;
    id = Int.of_int64_exn span.id;
    owner_hint = Option.map ~f:fresh_owner_id owner_hint;
  }


================================================
FILE: engine/lib/span.mli
================================================
type t [@@deriving show, yojson, sexp, compare, eq, hash]

val display : t -> string

val of_thir : Types.span -> t
(** Imports a THIR span as a hax span *)

val to_thir : t -> Types.span list
(** Exports a hax span to THIR spans (a hax span might be a collection of spans)
*)

val union_list : t list -> t
val union : t -> t -> t

val dummy : unit -> t
(** Generates a dummy span: this should be avoided at all cost. *)

val id_of : t -> int
(** Lookup the internal unique identifier of a span. *)

val refresh_id : t -> t
(** Replaces the internal identifier by a fresh one. This can be useful for
    debugging. *)

val default : t
(** A default span can be useful when a span is required in some computation
    that never reports error and when we know the span will go away. Using this
    should be avoided. *)

val with_owner_hint : Types.def_id -> (unit -> 't) -> 't
(** Inserts a hint about the fact that, in function `f`, we're translating spans
    that are "owned" by an item `owner`. This should be used only in
    `import_thir`, also, the hint shall be used only to enhance user reporting,
    not for any logic within the engine. *)

val owner_hint : t -> Types.def_id option
(** Looks up the owner hint for a span. This should be used for user reports
    only. *)

val to_rust_ast_span : t -> Rust_engine_types.span
(** Converts this span to a Rust engine span. *)

val from_rust_ast_span : Rust_engine_types.span -> t


================================================
FILE: engine/lib/subtype.ml
================================================
open! Prelude

module Make
    (FA : Features.T)
    (FB : Features.T)
    (S : Features.SUBTYPE.T with module A = FA and module B = FB) =
struct
  open Ast
  module A = Ast.Make (FA)
  module B = Ast.Make (FB)
  module UA = Ast_utils.Make (FA)
  module UB = Ast_utils.Make (FB)
  module FA = FA

  let dsafety_kind (span : Span.t) (safety : A.safety_kind) : B.safety_kind =
    match safety with Safe -> Safe | Unsafe w -> Unsafe (S.unsafe span w)

  let dmutability (span : Span.t) (type a b) (s : Span.t -> a -> b)
      (mutability : a mutability) : b mutability =
    match mutability with
    | Mutable w -> Mutable (s span w)
    | Immutable -> Immutable

  let rec dty (span : span) (ty : A.ty) : B.ty =
    match ty with
    | TBool -> TBool
    | TChar -> TChar
    | TInt k -> TInt k
    | TFloat k -> TFloat k
    | TStr -> TStr
    | TApp { ident; args } ->
        TApp { ident; args = List.map ~f:(dgeneric_value span) args }
    | TArray { typ; length } ->
        TArray { typ = dty span typ; length = dexpr length }
    | TSlice { witness; ty } ->
        TSlice { witness = S.slice span witness; ty = dty span ty }
    | TRef { witness; typ; mut; region } ->
        TRef
          {
            witness = S.reference span witness;
            typ = dty span typ;
            mut = dmutability span S.mutable_reference mut;
            region;
          }
    | TParam local_ident -> TParam local_ident
    | TArrow (inputs, output) ->
        TArrow (List.map ~f:(dty span) inputs, dty span output)
    | TAssociatedType { impl; item } ->
        TAssociatedType { impl = dimpl_expr span impl; item }
    | TOpaque ident -> TOpaque ident
    | TRawPointer { witness } ->
        TRawPointer { witness = S.raw_pointer span witness }
    | TDyn { witness; goals } ->
        TDyn
          {
            witness = S.dyn span witness;
            goals = List.map ~f:(ddyn_trait_goal span) goals;
          }

  and ddyn_trait_goal (span : span) (r : A.dyn_trait_goal) : B.dyn_trait_goal =
    {
      trait = r.trait;
      non_self_args = List.map ~f:(dgeneric_value span) r.non_self_args;
    }

  and dtrait_goal (span : span) (r : A.trait_goal) : B.trait_goal =
    { trait = r.trait; args = List.map ~f:(dgeneric_value span) r.args }

  and dimpl_ident (span : span) (r : A.impl_ident) : B.impl_ident =
    { goal = dtrait_goal span r.goal; name = r.name }

  and dprojection_predicate (span : span) (r : A.projection_predicate) :
      B.projection_predicate =
    {
      impl = dimpl_expr span r.impl;
      assoc_item = r.assoc_item;
      typ = dty span r.typ;
    }

  and dimpl_expr (span : span) (i : A.impl_expr) : B.impl_expr =
    { kind = dimpl_expr_kind span i.kind; goal = dtrait_goal span i.goal }

  and dimpl_expr_kind (span : span) (i : A.impl_expr_kind) : B.impl_expr_kind =
    match i with
    | Self -> Self
    | Concrete tr -> Concrete (dtrait_goal span tr)
    | LocalBound { id } -> LocalBound { id }
    | Parent { impl; ident } ->
        Parent { impl = dimpl_expr span impl; ident = dimpl_ident span ident }
    | Projection { impl; item; ident } ->
        Projection
          { impl = dimpl_expr span impl; item; ident = dimpl_ident span ident }
    | ImplApp { impl; args } ->
        ImplApp
          {
            impl = dimpl_expr span impl;
            args = List.map ~f:(dimpl_expr span) args;
          }
    | Dyn -> Dyn
    | Builtin tr -> Builtin (dtrait_goal span tr)

  and dgeneric_value (span : span) (generic_value : A.generic_value) :
      B.generic_value =
    match generic_value with
    | GLifetime { lt; witness } ->
        GLifetime { lt; witness = S.lifetime span witness }
    | GType t -> GType (dty span t)
    | GConst e -> GConst (dexpr e)

  and dborrow_kind (span : span) (borrow_kind : A.borrow_kind) : B.borrow_kind =
    match borrow_kind with
    | Shared -> Shared
    | Unique -> Unique
    | Mut witness -> Mut (S.mutable_reference span witness)

  and dpat (p : A.pat) : B.pat =
    { p = dpat' p.span p.p; span = p.span; typ = dty p.span p.typ }

  and dpat' (span : span) (pat : A.pat') : B.pat' =
    match pat with
    | PWild -> PWild
    | PAscription { typ; typ_span; pat } ->
        PAscription { typ = dty span typ; pat = dpat pat; typ_span }
    | PConstruct { constructor; is_record; is_struct; fields } ->
        PConstruct
          {
            constructor;
            is_record;
            is_struct;
            fields = List.map ~f:(dfield_pat span) fields;
          }
    | POr { subpats } -> POr { subpats = List.map ~f:dpat subpats }
    | PArray { args } -> PArray { args = List.map ~f:dpat args }
    | PConstant { lit } -> PConstant { lit }
    | PBinding { mut; mode; var : Local_ident.t; typ; subpat } ->
        PBinding
          {
            mut = dmutability span S.mutable_variable mut;
            mode = dbinding_mode span mode;
            var;
            typ = dty span typ;
            subpat = Option.map ~f:(dpat *** S.as_pattern span) subpat;
          }
    | PDeref { subpat; witness } ->
        PDeref { subpat = dpat subpat; witness = S.reference span witness }

  and dfield_pat (_span : span) (p : A.field_pat) : B.field_pat =
    { field = p.field; pat = dpat p.pat }

  and dbinding_mode (span : span) (binding_mode : A.binding_mode) :
      B.binding_mode =
    match binding_mode with
    | ByValue -> ByValue
    | ByRef (kind, witness) ->
        ByRef (dborrow_kind span kind, S.reference span witness)

  and dsupported_monads (span : span) (m : A.supported_monads) :
      B.supported_monads =
    match m with
    | MException t -> MException (dty span t)
    | MResult t -> MResult (dty span t)
    | MOption -> MOption

  and dexpr (e : A.expr) : B.expr =
    try dexpr_unwrapped e
    with Diagnostics.SpanFreeError.Exn (Data (context, kind)) ->
      let typ : B.ty =
        try dty e.span e.typ
        with Diagnostics.SpanFreeError.Exn (Data (_context, _kind)) ->
          UB.HaxFailure.Build.ty ""
      in
      UB.hax_failure_expr e.span typ (context, kind) (UA.LiftToFullAst.expr e)

  and dexpr_unwrapped (e : A.expr) : B.expr =
    { e = dexpr' e.span e.e; span = e.span; typ = dty e.span e.typ }

  and dexpr' (span : span) (expr : A.expr') : B.expr' =
    match expr with
    | If { cond; then_; else_ } ->
        If
          {
            cond = dexpr cond;
            then_ = dexpr then_;
            else_ = Option.map ~f:dexpr else_;
          }
    | App { f; args; generic_args; bounds_impls; trait } ->
        let dgeneric_values = List.map ~f:(dgeneric_value span) in
        App
          {
            f = dexpr f;
            args = List.map ~f:dexpr args;
            generic_args = dgeneric_values generic_args;
            bounds_impls = List.map ~f:(dimpl_expr span) bounds_impls;
            trait = Option.map ~f:(dimpl_expr span *** dgeneric_values) trait;
          }
    | Literal lit -> Literal lit
    | Array l -> Array (List.map ~f:dexpr l)
    | Construct { constructor; is_record; is_struct; fields; base } ->
        Construct
          {
            constructor;
            is_record;
            is_struct;
            fields = List.map ~f:(map_snd dexpr) fields;
            base = Option.map ~f:(dexpr *** S.construct_base span) base;
          }
    | Match { scrutinee; arms } ->
        Match { scrutinee = dexpr scrutinee; arms = List.map ~f:darm arms }
    | Let { monadic; lhs; rhs; body } ->
        Let
          {
            monadic =
              Option.map
                ~f:(dsupported_monads span *** S.monadic_binding span)
                monadic;
            lhs = dpat lhs;
            rhs = dexpr rhs;
            body = dexpr body;
          }
    | Block { e; safety_mode; witness } ->
        Block
          {
            e = dexpr e;
            safety_mode = dsafety_kind span safety_mode;
            witness = S.block span witness;
          }
    | LocalVar local_ident -> LocalVar local_ident
    | GlobalVar global_ident -> GlobalVar global_ident
    | Ascription { e; typ } -> Ascription { e = dexpr e; typ = dty span typ }
    | MacroInvokation { macro; args; witness } ->
        MacroInvokation { macro; args; witness = S.macro span witness }
    | Assign { lhs; e; witness } ->
        Assign
          {
            lhs = dlhs span lhs;
            e = dexpr e;
            witness = S.mutable_variable span witness;
          }
    | Loop { body; kind; state; label; witness; control_flow } ->
        Loop
          {
            body = dexpr body;
            kind = dloop_kind span kind;
            state = Option.map ~f:(dloop_state span) state;
            label;
            control_flow =
              Option.map
                ~f:
                  (( function
                   | A.BreakOnly -> B.BreakOnly
                   | A.BreakOrReturn -> B.BreakOrReturn )
                  *** S.fold_like_loop span)
                control_flow;
            witness = S.loop span witness;
          }
    | Break { e; acc; label; witness } ->
        Break
          {
            e = dexpr e;
            acc = Option.map ~f:(dexpr *** S.state_passing_loop span) acc;
            label;
            witness = (S.break span *** S.loop span) witness;
          }
    | Return { e; witness } ->
        Return { e = dexpr e; witness = S.early_exit span witness }
    | QuestionMark { e; return_typ; witness } ->
        QuestionMark
          {
            e = dexpr e;
            return_typ = dty span return_typ;
            witness = S.question_mark span witness;
          }
    | Continue { acc; label; witness = w1, w2 } ->
        Continue
          {
            acc = Option.map ~f:(dexpr *** S.state_passing_loop span) acc;
            label;
            witness = (S.continue span w1, S.loop span w2);
          }
    | Borrow { kind; e; witness } ->
        Borrow
          {
            kind = dborrow_kind span kind;
            e = dexpr e;
            witness = S.reference span witness;
          }
    | EffectAction { action; argument } ->
        EffectAction
          { action = S.monadic_action span action; argument = dexpr argument }
    | AddressOf { mut; e; witness } ->
        AddressOf
          {
            mut = dmutability span S.mutable_pointer mut;
            e = dexpr e;
            witness = S.raw_pointer span witness;
          }
    | Closure { params; body; captures } ->
        Closure
          {
            params = List.map ~f:dpat params;
            body = dexpr body;
            captures = List.map ~f:dexpr captures;
          }
    | Quote quote -> Quote (dquote span quote)

  and dquote (span : span) ({ contents; witness } : A.quote) : B.quote =
    let f = function
      | A.Verbatim code -> B.Verbatim code
      | Expr e -> Expr (dexpr e)
      | Pattern p -> Pattern (dpat p)
      | Typ p -> Typ (dty span p)
    in
    { contents = List.map ~f contents; witness = S.quote span witness }

  and dloop_kind (span : span) (k : A.loop_kind) : B.loop_kind =
    match k with
    | UnconditionalLoop -> UnconditionalLoop
    | WhileLoop { condition; witness } ->
        WhileLoop
          { condition = dexpr condition; witness = S.while_loop span witness }
    | ForLoop { it; pat; witness } ->
        ForLoop
          { it = dexpr it; pat = dpat pat; witness = S.for_loop span witness }
    | ForIndexLoop { start; end_; var; var_typ; witness } ->
        ForIndexLoop
          {
            start = dexpr start;
            end_ = dexpr end_;
            var;
            var_typ = dty span var_typ;
            witness = S.for_index_loop span witness;
          }

  and dloop_state (span : span) (s : A.loop_state) : B.loop_state =
    {
      init = dexpr s.init;
      bpat = dpat s.bpat;
      witness = S.state_passing_loop span s.witness;
    }

  and darm (a : A.arm) : B.arm = { span = a.span; arm = darm' a.arm }

  and darm' (a : A.arm') : B.arm' =
    {
      arm_pat = dpat a.arm_pat;
      body = dexpr a.body;
      guard = Option.map ~f:dguard a.guard;
    }

  and dguard (a : A.guard) : B.guard =
    { span = a.span; guard = dguard' a.span a.guard }

  and dguard' (span : span) (guard : A.guard') : B.guard' =
    match guard with
    | IfLet { lhs; rhs; witness } ->
        IfLet
          {
            lhs = dpat lhs;
            rhs = dexpr rhs;
            witness = S.match_guard span witness;
          }

  and dlhs (span : span) (lhs : A.lhs) : B.lhs =
    match lhs with
    | LhsFieldAccessor { e; field; typ; witness } ->
        LhsFieldAccessor
          {
            e = dlhs span e;
            field;
            typ = dty span typ;
            witness = S.nontrivial_lhs span witness;
          }
    | LhsArrayAccessor { e; index; typ; witness } ->
        LhsArrayAccessor
          {
            e = dlhs span e;
            index = dexpr index;
            typ = dty span typ;
            witness = S.nontrivial_lhs span witness;
          }
    | LhsLocalVar { var; typ } -> LhsLocalVar { var; typ = dty span typ }
    | LhsVecRef { e; typ; witness } ->
        LhsVecRef
          {
            e = dlhs span e;
            typ = dty span typ;
            witness = S.nontrivial_lhs span witness;
          }
    | LhsArbitraryExpr { e; witness } ->
        LhsArbitraryExpr { e = dexpr e; witness = S.arbitrary_lhs span witness }

  module Item = struct
    (* TODO: remvove span argument *)
    let dgeneric_param _span ({ ident; span; attrs; kind } : A.generic_param) :
        B.generic_param =
      let kind =
        match kind with
        | GPLifetime { witness } ->
            B.GPLifetime { witness = S.lifetime span witness }
        | GPType -> GPType
        | GPConst { typ } -> GPConst { typ = dty span typ }
      in
      { ident; span; kind; attrs }

    let dgeneric_constraint (span : span)
        (generic_constraint : A.generic_constraint) : B.generic_constraint =
      match generic_constraint with
      | GCLifetime (lf, witness) -> B.GCLifetime (lf, S.lifetime span witness)
      | GCType impl_ident -> B.GCType (dimpl_ident span impl_ident)
      | GCProjection projection ->
          B.GCProjection (dprojection_predicate span projection)

    let dgenerics (span : span) (g : A.generics) : B.generics =
      {
        params = List.map ~f:(dgeneric_param span) g.params;
        constraints = List.map ~f:(dgeneric_constraint span) g.constraints;
      }

    let dparam (span : span) (p : A.param) : B.param =
      {
        pat = dpat p.pat;
        typ = dty (Option.value ~default:span p.typ_span) p.typ;
        typ_span = p.typ_span;
        attrs = p.attrs;
      }

    let dvariant (span : span) (v : A.variant) : B.variant =
      {
        name = v.name;
        arguments = List.map ~f:(map_snd3 @@ dty span) v.arguments;
        is_record = v.is_record;
        attrs = v.attrs;
      }

    let rec dtrait_item' (span : span) (ti : A.trait_item') : B.trait_item' =
      match ti with
      | TIType idents -> TIType (List.map ~f:(dimpl_ident span) idents)
      | TIFn t -> TIFn (dty span t)
      | TIDefault { params; body; witness } ->
          TIDefault
            {
              params = List.map ~f:(dparam span) params;
              body = dexpr body;
              witness = S.trait_item_default span witness;
            }

    and dtrait_item (ti : A.trait_item) : B.trait_item =
      {
        ti_span = ti.ti_span;
        ti_generics = dgenerics ti.ti_span ti.ti_generics;
        ti_v = dtrait_item' ti.ti_span ti.ti_v;
        ti_ident = ti.ti_ident;
        ti_attrs = ti.ti_attrs;
      }

    let rec dimpl_item' (span : span) (ii : A.impl_item') : B.impl_item' =
      match ii with
      | IIType { typ; parent_bounds } ->
          IIType
            {
              typ = dty span typ;
              parent_bounds =
                List.map ~f:(dimpl_expr span *** dimpl_ident span) parent_bounds;
            }
      | IIFn { body; params } ->
          IIFn { body = dexpr body; params = List.map ~f:(dparam span) params }

    and dimpl_item (ii : A.impl_item) : B.impl_item =
      {
        ii_span = ii.ii_span;
        ii_generics = dgenerics ii.ii_span ii.ii_generics;
        ii_v = dimpl_item' ii.ii_span ii.ii_v;
        ii_ident = ii.ii_ident;
        ii_attrs = ii.ii_attrs;
      }

    let rec ditem (i : A.item) : B.item list =
      try ditem_unwrapped i
      with Diagnostics.SpanFreeError.Exn (Data (context, kind)) ->
        let error = Diagnostics.pretty_print_context_kind context kind in
        let cast_item : A.item -> Ast.Full.item = Stdlib.Obj.magic in
        let ast = cast_item i |> Print_rust.pitem_str in
        let msg = error ^ "\nLast available AST for this item:\n\n" ^ ast in
        [ B.make_hax_error_item i.span i.ident msg ]

    and ditem_unwrapped (item : A.item) : B.item list =
      [
        {
          v = ditem' item.span item.v;
          span = item.span;
          ident = item.ident;
          attrs = item.attrs;
        };
      ]

    and ditem' (span : span) (item : A.item') : B.item' =
      match item with
      | Fn { name; generics; body; params; safety } ->
          B.Fn
            {
              name;
              generics = dgenerics span generics;
              body = dexpr body;
              params = List.map ~f:(dparam span) params;
              safety = dsafety_kind span safety;
            }
      | Type { name; generics; variants; is_struct } ->
          B.Type
            {
              name;
              generics = dgenerics span generics;
              variants = List.map ~f:(dvariant span) variants;
              is_struct;
            }
      | TyAlias { name; generics; ty } ->
          B.TyAlias
            { name; generics = dgenerics span generics; ty = dty span ty }
      | IMacroInvokation { macro; argument; span; witness } ->
          B.IMacroInvokation
            { macro; argument; span; witness = S.macro span witness }
      | Trait { name; generics; items; safety } ->
          B.Trait
            {
              name;
              generics = dgenerics span generics;
              items = List.map ~f:dtrait_item items;
              safety = dsafety_kind span safety;
            }
      | Impl
          {
            generics;
            self_ty;
            of_trait = trait_id, trait_generics;
            items;
            parent_bounds;
            safety;
          } ->
          B.Impl
            {
              generics = dgenerics span generics;
              self_ty = dty span self_ty;
              of_trait =
                (trait_id, List.map ~f:(dgeneric_value span) trait_generics);
              items = List.map ~f:dimpl_item items;
              parent_bounds =
                List.map ~f:(dimpl_expr span *** dimpl_ident span) parent_bounds;
              safety = dsafety_kind span safety;
            }
      | Alias { name; item } -> B.Alias { name; item }
      | Use { path; is_external; rename } -> B.Use { path; is_external; rename }
      | Quote { quote; origin } -> Quote { quote = dquote span quote; origin }
      | HaxError e -> B.HaxError e
      | NotImplementedYet -> B.NotImplementedYet

    let ditems = List.concat_map ~f:ditem
  end

  include Item
end


================================================
FILE: engine/lib/untyped_phases/gen.js
================================================
#!/usr/bin/env node
const { readdirSync, readFileSync } = require('fs');

let f = s => ((s.split("include module type")[0] || "").match(/(with|and) type[^)]*/g) || []).join("").split('and type').map(x => x.replace(/(with|and) type/g, '').trim()).filter(x => x).map(x => x.split('=')[1].trim().split('.').slice(1));

let phases = readdirSync("../phases").filter(x => x.endsWith(".mli")).map(filename => ({
  filename,
  contents: f(readFileSync("../phases/" + filename).toString()),
}));

let rejections = readFileSync("../phases/phase_reject.ml")
  .toString()
  .split('\n')
  .map(s => s.match(/^module ([a-z][a-z_]+)/i)?.[1])
  .filter(s => s);

console.log(`
open Prelude
    
module type PHASE_FULL =
  Phase_utils.PHASE
    with module FA = Features.Full
     and module FB = Features.Full
     and module A = Ast.Full
     and module B = Ast.Full

module BindPhaseFull (A : PHASE_FULL) (B : PHASE_FULL) : PHASE_FULL = struct
  include Phase_utils.BindPhase (A) (B)
  module FA = Features.Full
  module FB = Features.Full
  module A = Ast.Full
  module B = Ast.Full
end

module IdentityFull : PHASE_FULL = struct
  include Phase_utils.Identity (Features.Full)
  module FA = Features.Full
  module FB = Features.Full
  module A = Ast.Full
  module B = Ast.Full
end

let bind (module A : PHASE_FULL) (module B : PHASE_FULL) : (module PHASE_FULL) =
  (module BindPhaseFull (A) (B))

let bind_list : (module PHASE_FULL) list -> (module PHASE_FULL) =
  List.reduce ~f:bind
  >> Option.value ~default:(module IdentityFull : PHASE_FULL)

`);


for (let phase of phases) {
  let name_lc = phase.filename.replace(/^phase_/, "").replace(/[.]mli$/, "");
  let name = name_lc.replace(/^(.)/, l => l.toUpperCase());
  phase.name_lc = name_lc;
  phase.name = name;
  phase.module_expression = `Phases.${name}`;
}


for (let rejection of rejections) {
  let name = 'Reject_' + rejection.replace(/^(.)/, l => l.toLowerCase()).replace(/[A-Z]/g, c => `_${c}`).toLowerCase();
  phases.push({
    name_lc: name.toLowerCase(),
    name,
    module_expression: 'Phase_reject.' + rejection,
    contents: [],
  });
}

phases.push({
  name_lc: "hoist_side_effects",
  name: 'Hoist_side_effects',
  module_expression: 'Side_effect_utils.Hoist',
  contents: [
    ['Off', 'monadic_binding'],
    ['Off', 'for_index_loop'],
  ],
});


for (let phase of phases) {
  let { name, module_expression } = phase;

  console.log(`
module ${name} : PHASE_FULL = struct
  module FA = Features.Full
  module FB = Features.Full
  module A = Ast.Full
  module B = Ast.Full

  module ExpectedFA = struct
    open Features
    include On
    ${phase.contents.map(([status, f]) => `include ${status}.${f.replace(/^(.)/, l => l.toUpperCase())}`).join('\n')}
  end

  module Phase = ${module_expression} (ExpectedFA)

  module Coerce =
    Feature_gate.Make (Features.Full) (ExpectedFA)
      (struct
        module A = Features.Full
        module B = ExpectedFA
        include Feature_gate.DefaultSubtype

        ${phase.contents.map(([status, f]) =>
    `let ${f} = ` + (status == 'On' ? 'fun _ _ -> Features.On.' + f : 'reject')
  ).join('\n')}

        let metadata =
          Phase_reject.make_metadata
            (CoercionForUntypedPhase
               ([%show: Diagnostics.Phase.t] Phase.metadata.current_phase))
      end)

  let metadata = Phase.metadata
  let to_full_ast : Phase.B.item list -> Ast.Full.item list = Stdlib.Obj.magic

  let ditems =
    List.map ~f:Coerce.ditem >> List.concat >> Phase.ditems >> to_full_ast
end
`);
}


for (let phase of phases) {
  console.log(`let ${phase.name_lc} : (module PHASE_FULL) = (module ${phase.name})`)
}
console.log(`let phases_list : (module PHASE_FULL) list = [${phases.map(p => p.name_lc).join(';')}]`)


console.log(`
let phase_of_name: string -> (module PHASE_FULL) option = 
    function
    ${phases.map(p => `| "${p.name_lc}" -> Some ${p.name_lc}`).join('')}
    | _ -> None

let phases: string list = [${phases.map(p => `"${p.name_lc}"`).join(';')}]

(*
${phases.map(p => `${p.name_lc}`).join(', ')}
*)
`);






================================================
FILE: engine/lib/untyped_phases/untyped_phases.ml
================================================
open Prelude

module type PHASE_FULL =
  Phase_utils.PHASE
    with module FA = Features.Full
     and module FB = Features.Full
     and module A = Ast.Full
     and module B = Ast.Full

module BindPhaseFull (A : PHASE_FULL) (B : PHASE_FULL) : PHASE_FULL = struct
  include Phase_utils.BindPhase (A) (B)
  module FA = Features.Full
  module FB = Features.Full
  module A = Ast.Full
  module B = Ast.Full
end

module IdentityFull : PHASE_FULL = struct
  include Phase_utils.Identity (Features.Full)
  module FA = Features.Full
  module FB = Features.Full
  module A = Ast.Full
  module B = Ast.Full
end

let bind (module A : PHASE_FULL) (module B : PHASE_FULL) : (module PHASE_FULL) =
  (module BindPhaseFull (A) (B))

let bind_list : (module PHASE_FULL) list -> (module PHASE_FULL) =
  List.reduce ~f:bind
  >> Option.value ~default:(module IdentityFull : PHASE_FULL)

module And_mut_defsite : PHASE_FULL = struct
  module FA = Features.Full
  module FB = Features.Full
  module A = Ast.Full
  module B = Ast.Full

  module ExpectedFA = struct
    open Features
    include On
    include On.Mutable_variable
    include On.Mutable_reference
    include On.Nontrivial_lhs
    include On.Arbitrary_lhs
    include On.Reference
  end

  module Phase = Phases.And_mut_defsite (ExpectedFA)

  module Coerce =
    Feature_gate.Make (Features.Full) (ExpectedFA)
      (struct
        module A = Features.Full
        module B = ExpectedFA
        include Feature_gate.DefaultSubtype

        let mutable_variable = fun _ _ -> Features.On.mutable_variable
        let mutable_reference = fun _ _ -> Features.On.mutable_reference
        let nontrivial_lhs = fun _ _ -> Features.On.nontrivial_lhs
        let arbitrary_lhs = fun _ _ -> Features.On.arbitrary_lhs
        let reference = fun _ _ -> Features.On.reference

        let metadata =
          Phase_reject.make_metadata
            (CoercionForUntypedPhase
               ([%show: Diagnostics.Phase.t] Phase.metadata.current_phase))
      end)

  let metadata = Phase.metadata
  let to_full_ast : Phase.B.item list -> Ast.Full.item list = Stdlib.Obj.magic

  let ditems =
    List.map ~f:Coerce.ditem >> List.concat >> Phase.ditems >> to_full_ast
end

module Bundle_cycles : PHASE_FULL = struct
  module FA = Features.Full
  module FB = Features.Full
  module A = Ast.Full
  module B = Ast.Full

  module ExpectedFA = struct
    open Features
    include On
  end

  module Phase = Phases.Bundle_cycles (ExpectedFA)

  module Coerce =
    Feature_gate.Make (Features.Full) (ExpectedFA)
      (struct
        module A = Features.Full
        module B = ExpectedFA
        include Feature_gate.DefaultSubtype

        let metadata =
          Phase_reject.make_metadata
            (CoercionForUntypedPhase
               ([%show: Diagnostics.Phase.t] Phase.metadata.current_phase))
      end)

  let metadata = Phase.metadata
  let to_full_ast : Phase.B.item list -> Ast.Full.item list = Stdlib.Obj.magic

  let ditems =
    List.map ~f:Coerce.ditem >> List.concat >> Phase.ditems >> to_full_ast
end

module Cf_into_monads : PHASE_FULL = struct
  module FA = Features.Full
  module FB = Features.Full
  module A = Ast.Full
  module B = Ast.Full

  module ExpectedFA = struct
    open Features
    include On
    include Off.Monadic_action
    include Off.Monadic_binding
  end

  module Phase = Phases.Cf_into_monads (ExpectedFA)

  module Coerce =
    Feature_gate.Make (Features.Full) (ExpectedFA)
      (struct
        module A = Features.Full
        module B = ExpectedFA
        include Feature_gate.DefaultSubtype

        let monadic_action = reject
        let monadic_binding = reject

        let metadata =
          Phase_reject.make_metadata
            (CoercionForUntypedPhase
               ([%show: Diagnostics.Phase.t] Phase.metadata.current_phase))
      end)

  let metadata = Phase.metadata
  let to_full_ast : Phase.B.item list -> Ast.Full.item list = Stdlib.Obj.magic

  let ditems =
    List.map ~f:Coerce.ditem >> List.concat >> Phase.ditems >> to_full_ast
end

module Direct_and_mut : PHASE_FULL = struct
  module FA = Features.Full
  module FB = Features.Full
  module A = Ast.Full
  module B = Ast.Full

  module ExpectedFA = struct
    open Features
    include On
    include Off.Raw_pointer
    include Off.Mutable_pointer
  end

  module Phase = Phases.Direct_and_mut (ExpectedFA)

  module Coerce =
    Feature_gate.Make (Features.Full) (ExpectedFA)
      (struct
        module A = Features.Full
        module B = ExpectedFA
        include Feature_gate.DefaultSubtype

        let raw_pointer = reject
        let mutable_pointer = reject

        let metadata =
          Phase_reject.make_metadata
            (CoercionForUntypedPhase
               ([%show: Diagnostics.Phase.t] Phase.metadata.current_phase))
      end)

  let metadata = Phase.metadata
  let to_full_ast : Phase.B.item list -> Ast.Full.item list = Stdlib.Obj.magic

  let ditems =
    List.map ~f:Coerce.ditem >> List.concat >> Phase.ditems >> to_full_ast
end

module Drop_blocks : PHASE_FULL = struct
  module FA = Features.Full
  module FB = Features.Full
  module A = Ast.Full
  module B = Ast.Full

  module ExpectedFA = struct
    open Features
    include On
  end

  module Phase = Phases.Drop_blocks (ExpectedFA)

  module Coerce =
    Feature_gate.Make (Features.Full) (ExpectedFA)
      (struct
        module A = Features.Full
        module B = ExpectedFA
        include Feature_gate.DefaultSubtype

        let metadata =
          Phase_reject.make_metadata
            (CoercionForUntypedPhase
               ([%show: Diagnostics.Phase.t] Phase.metadata.current_phase))
      end)

  let metadata = Phase.metadata
  let to_full_ast : Phase.B.item list -> Ast.Full.item list = Stdlib.Obj.magic

  let ditems =
    List.map ~f:Coerce.ditem >> List.concat >> Phase.ditems >> to_full_ast
end

module Drop_match_guards : PHASE_FULL = struct
  module FA = Features.Full
  module FB = Features.Full
  module A = Ast.Full
  module B = Ast.Full

  module ExpectedFA = struct
    open Features
    include On
  end

  module Phase = Phases.Drop_match_guards (ExpectedFA)

  module Coerce =
    Feature_gate.Make (Features.Full) (ExpectedFA)
      (struct
        module A = Features.Full
        module B = ExpectedFA
        include Feature_gate.DefaultSubtype

        let metadata =
          Phase_reject.make_metadata
            (CoercionForUntypedPhase
               ([%show: Diagnostics.Phase.t] Phase.metadata.current_phase))
      end)

  let metadata = Phase.metadata
  let to_full_ast : Phase.B.item list -> Ast.Full.item list = Stdlib.Obj.magic

  let ditems =
    List.map ~f:Coerce.ditem >> List.concat >> Phase.ditems >> to_full_ast
end

module Drop_references : PHASE_FULL = struct
  module FA = Features.Full
  module FB = Features.Full
  module A = Ast.Full
  module B = Ast.Full

  module ExpectedFA = struct
    open Features
    include On
    include Off.Raw_pointer
    include Off.Mutable_reference
  end

  module Phase = Phases.Drop_references (ExpectedFA)

  module Coerce =
    Feature_gate.Make (Features.Full) (ExpectedFA)
      (struct
        module A = Features.Full
        module B = ExpectedFA
        include Feature_gate.DefaultSubtype

        let raw_pointer = reject
        let mutable_reference = reject

        let metadata =
          Phase_reject.make_metadata
            (CoercionForUntypedPhase
               ([%show: Diagnostics.Phase.t] Phase.metadata.current_phase))
      end)

  let metadata = Phase.metadata
  let to_full_ast : Phase.B.item list -> Ast.Full.item list = Stdlib.Obj.magic

  let ditems =
    List.map ~f:Coerce.ditem >> List.concat >> Phase.ditems >> to_full_ast
end

module Drop_return_break_continue : PHASE_FULL = struct
  module FA = Features.Full
  module FB = Features.Full
  module A = Ast.Full
  module B = Ast.Full

  module ExpectedFA = struct
    open Features
    include On
  end

  module Phase = Phases.Drop_return_break_continue (ExpectedFA)

  module Coerce =
    Feature_gate.Make (Features.Full) (ExpectedFA)
      (struct
        module A = Features.Full
        module B = ExpectedFA
        include Feature_gate.DefaultSubtype

        let metadata =
          Phase_reject.make_metadata
            (CoercionForUntypedPhase
               ([%show: Diagnostics.Phase.t] Phase.metadata.current_phase))
      end)

  let metadata = Phase.metadata
  let to_full_ast : Phase.B.item list -> Ast.Full.item list = Stdlib.Obj.magic

  let ditems =
    List.map ~f:Coerce.ditem >> List.concat >> Phase.ditems >> to_full_ast
end

module Drop_sized_trait : PHASE_FULL = struct
  module FA = Features.Full
  module FB = Features.Full
  module A = Ast.Full
  module B = Ast.Full

  module ExpectedFA = struct
    open Features
    include On
  end

  module Phase = Phases.Drop_sized_trait (ExpectedFA)

  module Coerce =
    Feature_gate.Make (Features.Full) (ExpectedFA)
      (struct
        module A = Features.Full
        module B = ExpectedFA
        include Feature_gate.DefaultSubtype

        let metadata =
          Phase_reject.make_metadata
            (CoercionForUntypedPhase
               ([%show: Diagnostics.Phase.t] Phase.metadata.current_phase))
      end)

  let metadata = Phase.metadata
  let to_full_ast : Phase.B.item list -> Ast.Full.item list = Stdlib.Obj.magic

  let ditems =
    List.map ~f:Coerce.ditem >> List.concat >> Phase.ditems >> to_full_ast
end

module Explicit_conversions : PHASE_FULL = struct
  module FA = Features.Full
  module FB = Features.Full
  module A = Ast.Full
  module B = Ast.Full

  module ExpectedFA = struct
    open Features
    include On
  end

  module Phase = Phases.Explicit_conversions (ExpectedFA)

  module Coerce =
    Feature_gate.Make (Features.Full) (ExpectedFA)
      (struct
        module A = Features.Full
        module B = ExpectedFA
        include Feature_gate.DefaultSubtype

        let metadata =
          Phase_reject.make_metadata
            (CoercionForUntypedPhase
               ([%show: Diagnostics.Phase.t] Phase.metadata.current_phase))
      end)

  let metadata = Phase.metadata
  let to_full_ast : Phase.B.item list -> Ast.Full.item list = Stdlib.Obj.magic

  let ditems =
    List.map ~f:Coerce.ditem >> List.concat >> Phase.ditems >> to_full_ast
end

module Functionalize_loops : PHASE_FULL = struct
  module FA = Features.Full
  module FB = Features.Full
  module A = Ast.Full
  module B = Ast.Full

  module ExpectedFA = struct
    open Features
    include On
    include Off.Continue
    include Off.Early_exit
    include Off.Break
  end

  module Phase = Phases.Functionalize_loops (ExpectedFA)

  module Coerce =
    Feature_gate.Make (Features.Full) (ExpectedFA)
      (struct
        module A = Features.Full
        module B = ExpectedFA
        include Feature_gate.DefaultSubtype

        let continue = reject
        let early_exit = reject
        let break = reject

        let metadata =
          Phase_reject.make_metadata
            (CoercionForUntypedPhase
               ([%show: Diagnostics.Phase.t] Phase.metadata.current_phase))
      end)

  let metadata = Phase.metadata
  let to_full_ast : Phase.B.item list -> Ast.Full.item list = Stdlib.Obj.magic

  let ditems =
    List.map ~f:Coerce.ditem >> List.concat >> Phase.ditems >> to_full_ast
end

module Hoist_disjunctive_patterns : PHASE_FULL = struct
  module FA = Features.Full
  module FB = Features.Full
  module A = Ast.Full
  module B = Ast.Full

  module ExpectedFA = struct
    open Features
    include On
  end

  module Phase = Phases.Hoist_disjunctive_patterns (ExpectedFA)

  module Coerce =
    Feature_gate.Make (Features.Full) (ExpectedFA)
      (struct
        module A = Features.Full
        module B = ExpectedFA
        include Feature_gate.DefaultSubtype

        let metadata =
          Phase_reject.make_metadata
            (CoercionForUntypedPhase
               ([%show: Diagnostics.Phase.t] Phase.metadata.current_phase))
      end)

  let metadata = Phase.metadata
  let to_full_ast : Phase.B.item list -> Ast.Full.item list = Stdlib.Obj.magic

  let ditems =
    List.map ~f:Coerce.ditem >> List.concat >> Phase.ditems >> to_full_ast
end

module Local_mutation : PHASE_FULL = struct
  module FA = Features.Full
  module FB = Features.Full
  module A = Ast.Full
  module B = Ast.Full

  module ExpectedFA = struct
    open Features
    include On
    include Off.Mutable_reference
    include Off.Mutable_pointer
    include Off.Raw_pointer
    include Off.Arbitrary_lhs
    include Off.Nontrivial_lhs
    include Off.Monadic_action
    include Off.Monadic_binding
    include Off.For_index_loop
  end

  module Phase = Phases.Local_mutation (ExpectedFA)

  module Coerce =
    Feature_gate.Make (Features.Full) (ExpectedFA)
      (struct
        module A = Features.Full
        module B = ExpectedFA
        include Feature_gate.DefaultSubtype

        let mutable_reference = reject
        let mutable_pointer = reject
        let raw_pointer = reject
        let arbitrary_lhs = reject
        let nontrivial_lhs = reject
        let monadic_action = reject
        let monadic_binding = reject
        let for_index_loop = reject

        let metadata =
          Phase_reject.make_metadata
            (CoercionForUntypedPhase
               ([%show: Diagnostics.Phase.t] Phase.metadata.current_phase))
      end)

  let metadata = Phase.metadata
  let to_full_ast : Phase.B.item list -> Ast.Full.item list = Stdlib.Obj.magic

  let ditems =
    List.map ~f:Coerce.ditem >> List.concat >> Phase.ditems >> to_full_ast
end

module Newtype_as_refinement : PHASE_FULL = struct
  module FA = Features.Full
  module FB = Features.Full
  module A = Ast.Full
  module B = Ast.Full

  module ExpectedFA = struct
    open Features
    include On
  end

  module Phase = Phases.Newtype_as_refinement (ExpectedFA)

  module Coerce =
    Feature_gate.Make (Features.Full) (ExpectedFA)
      (struct
        module A = Features.Full
        module B = ExpectedFA
        include Feature_gate.DefaultSubtype

        let metadata =
          Phase_reject.make_metadata
            (CoercionForUntypedPhase
               ([%show: Diagnostics.Phase.t] Phase.metadata.current_phase))
      end)

  let metadata = Phase.metadata
  let to_full_ast : Phase.B.item list -> Ast.Full.item list = Stdlib.Obj.magic

  let ditems =
    List.map ~f:Coerce.ditem >> List.concat >> Phase.ditems >> to_full_ast
end

module Reconstruct_asserts : PHASE_FULL = struct
  module FA = Features.Full
  module FB = Features.Full
  module A = Ast.Full
  module B = Ast.Full

  module ExpectedFA = struct
    open Features
    include On
  end

  module Phase = Phases.Reconstruct_asserts (ExpectedFA)

  module Coerce =
    Feature_gate.Make (Features.Full) (ExpectedFA)
      (struct
        module A = Features.Full
        module B = ExpectedFA
        include Feature_gate.DefaultSubtype

        let metadata =
          Phase_reject.make_metadata
            (CoercionForUntypedPhase
               ([%show: Diagnostics.Phase.t] Phase.metadata.current_phase))
      end)

  let metadata = Phase.metadata
  let to_full_ast : Phase.B.item list -> Ast.Full.item list = Stdlib.Obj.magic

  let ditems =
    List.map ~f:Coerce.ditem >> List.concat >> Phase.ditems >> to_full_ast
end

module Reconstruct_for_index_loops : PHASE_FULL = struct
  module FA = Features.Full
  module FB = Features.Full
  module A = Ast.Full
  module B = Ast.Full

  module ExpectedFA = struct
    open Features
    include On
  end

  module Phase = Phases.Reconstruct_for_index_loops (ExpectedFA)

  module Coerce =
    Feature_gate.Make (Features.Full) (ExpectedFA)
      (struct
        module A = Features.Full
        module B = ExpectedFA
        include Feature_gate.DefaultSubtype

        let metadata =
          Phase_reject.make_metadata
            (CoercionForUntypedPhase
               ([%show: Diagnostics.Phase.t] Phase.metadata.current_phase))
      end)

  let metadata = Phase.metadata
  let to_full_ast : Phase.B.item list -> Ast.Full.item list = Stdlib.Obj.magic

  let ditems =
    List.map ~f:Coerce.ditem >> List.concat >> Phase.ditems >> to_full_ast
end

module Reconstruct_for_loops : PHASE_FULL = struct
  module FA = Features.Full
  module FB = Features.Full
  module A = Ast.Full
  module B = Ast.Full

  module ExpectedFA = struct
    open Features
    include On
  end

  module Phase = Phases.Reconstruct_for_loops (ExpectedFA)

  module Coerce =
    Feature_gate.Make (Features.Full) (ExpectedFA)
      (struct
        module A = Features.Full
        module B = ExpectedFA
        include Feature_gate.DefaultSubtype

        let metadata =
          Phase_reject.make_metadata
            (CoercionForUntypedPhase
               ([%show: Diagnostics.Phase.t] Phase.metadata.current_phase))
      end)

  let metadata = Phase.metadata
  let to_full_ast : Phase.B.item list -> Ast.Full.item list = Stdlib.Obj.magic

  let ditems =
    List.map ~f:Coerce.ditem >> List.concat >> Phase.ditems >> to_full_ast
end

module Reconstruct_question_marks : PHASE_FULL = struct
  module FA = Features.Full
  module FB = Features.Full
  module A = Ast.Full
  module B = Ast.Full

  module ExpectedFA = struct
    open Features
    include On
  end

  module Phase = Phases.Reconstruct_question_marks (ExpectedFA)

  module Coerce =
    Feature_gate.Make (Features.Full) (ExpectedFA)
      (struct
        module A = Features.Full
        module B = ExpectedFA
        include Feature_gate.DefaultSubtype

        let metadata =
          Phase_reject.make_metadata
            (CoercionForUntypedPhase
               ([%show: Diagnostics.Phase.t] Phase.metadata.current_phase))
      end)

  let metadata = Phase.metadata
  let to_full_ast : Phase.B.item list -> Ast.Full.item list = Stdlib.Obj.magic

  let ditems =
    List.map ~f:Coerce.ditem >> List.concat >> Phase.ditems >> to_full_ast
end

module Reconstruct_while_loops : PHASE_FULL = struct
  module FA = Features.Full
  module FB = Features.Full
  module A = Ast.Full
  module B = Ast.Full

  module ExpectedFA = struct
    open Features
    include On
  end

  module Phase = Phases.Reconstruct_while_loops (ExpectedFA)

  module Coerce =
    Feature_gate.Make (Features.Full) (ExpectedFA)
      (struct
        module A = Features.Full
        module B = ExpectedFA
        include Feature_gate.DefaultSubtype

        let metadata =
          Phase_reject.make_metadata
            (CoercionForUntypedPhase
               ([%show: Diagnostics.Phase.t] Phase.metadata.current_phase))
      end)

  let metadata = Phase.metadata
  let to_full_ast : Phase.B.item list -> Ast.Full.item list = Stdlib.Obj.magic

  let ditems =
    List.map ~f:Coerce.ditem >> List.concat >> Phase.ditems >> to_full_ast
end

module Reject_impl_type_method : PHASE_FULL = struct
  module FA = Features.Full
  module FB = Features.Full
  module A = Ast.Full
  module B = Ast.Full

  module ExpectedFA = struct
    open Features
    include On
  end

  module Phase = Phases.Reject_impl_type_method (ExpectedFA)

  module Coerce =
    Feature_gate.Make (Features.Full) (ExpectedFA)
      (struct
        module A = Features.Full
        module B = ExpectedFA
        include Feature_gate.DefaultSubtype

        let metadata =
          Phase_reject.make_metadata
            (CoercionForUntypedPhase
               ([%show: Diagnostics.Phase.t] Phase.metadata.current_phase))
      end)

  let metadata = Phase.metadata
  let to_full_ast : Phase.B.item list -> Ast.Full.item list = Stdlib.Obj.magic

  let ditems =
    List.map ~f:Coerce.ditem >> List.concat >> Phase.ditems >> to_full_ast
end

module Reorder_fields : PHASE_FULL = struct
  module FA = Features.Full
  module FB = Features.Full
  module A = Ast.Full
  module B = Ast.Full

  module ExpectedFA = struct
    open Features
    include On
  end

  module Phase = Phases.Reorder_fields (ExpectedFA)

  module Coerce =
    Feature_gate.Make (Features.Full) (ExpectedFA)
      (struct
        module A = Features.Full
        module B = ExpectedFA
        include Feature_gate.DefaultSubtype

        let metadata =
          Phase_reject.make_metadata
            (CoercionForUntypedPhase
               ([%show: Diagnostics.Phase.t] Phase.metadata.current_phase))
      end)

  let metadata = Phase.metadata
  let to_full_ast : Phase.B.item list -> Ast.Full.item list = Stdlib.Obj.magic

  let ditems =
    List.map ~f:Coerce.ditem >> List.concat >> Phase.ditems >> to_full_ast
end

module Rewrite_control_flow : PHASE_FULL = struct
  module FA = Features.Full
  module FB = Features.Full
  module A = Ast.Full
  module B = Ast.Full

  module ExpectedFA = struct
    open Features
    include On
  end

  module Phase = Phases.Rewrite_control_flow (ExpectedFA)

  module Coerce =
    Feature_gate.Make (Features.Full) (ExpectedFA)
      (struct
        module A = Features.Full
        module B = ExpectedFA
        include Feature_gate.DefaultSubtype

        let metadata =
          Phase_reject.make_metadata
            (CoercionForUntypedPhase
               ([%show: Diagnostics.Phase.t] Phase.metadata.current_phase))
      end)

  let metadata = Phase.metadata
  let to_full_ast : Phase.B.item list -> Ast.Full.item list = Stdlib.Obj.magic

  let ditems =
    List.map ~f:Coerce.ditem >> List.concat >> Phase.ditems >> to_full_ast
end

module Rewrite_local_self : PHASE_FULL = struct
  module FA = Features.Full
  module FB = Features.Full
  module A = Ast.Full
  module B = Ast.Full

  module ExpectedFA = struct
    open Features
    include On
  end

  module Phase = Phases.Rewrite_local_self (ExpectedFA)

  module Coerce =
    Feature_gate.Make (Features.Full) (ExpectedFA)
      (struct
        module A = Features.Full
        module B = ExpectedFA
        include Feature_gate.DefaultSubtype

        let metadata =
          Phase_reject.make_metadata
            (CoercionForUntypedPhase
               ([%show: Diagnostics.Phase.t] Phase.metadata.current_phase))
      end)

  let metadata = Phase.metadata
  let to_full_ast : Phase.B.item list -> Ast.Full.item list = Stdlib.Obj.magic

  let ditems =
    List.map ~f:Coerce.ditem >> List.concat >> Phase.ditems >> to_full_ast
end

module Simplify_hoisting : PHASE_FULL = struct
  module FA = Features.Full
  module FB = Features.Full
  module A = Ast.Full
  module B = Ast.Full

  module ExpectedFA = struct
    open Features
    include On
  end

  module Phase = Phases.Simplify_hoisting (ExpectedFA)

  module Coerce =
    Feature_gate.Make (Features.Full) (ExpectedFA)
      (struct
        module A = Features.Full
        module B = ExpectedFA
        include Feature_gate.DefaultSubtype

        let metadata =
          Phase_reject.make_metadata
            (CoercionForUntypedPhase
               ([%show: Diagnostics.Phase.t] Phase.metadata.current_phase))
      end)

  let metadata = Phase.metadata
  let to_full_ast : Phase.B.item list -> Ast.Full.item list = Stdlib.Obj.magic

  let ditems =
    List.map ~f:Coerce.ditem >> List.concat >> Phase.ditems >> to_full_ast
end

module Simplify_match_return : PHASE_FULL = struct
  module FA = Features.Full
  module FB = Features.Full
  module A = Ast.Full
  module B = Ast.Full

  module ExpectedFA = struct
    open Features
    include On
  end

  module Phase = Phases.Simplify_match_return (ExpectedFA)

  module Coerce =
    Feature_gate.Make (Features.Full) (ExpectedFA)
      (struct
        module A = Features.Full
        module B = ExpectedFA
        include Feature_gate.DefaultSubtype

        let metadata =
          Phase_reject.make_metadata
            (CoercionForUntypedPhase
               ([%show: Diagnostics.Phase.t] Phase.metadata.current_phase))
      end)

  let metadata = Phase.metadata
  let to_full_ast : Phase.B.item list -> Ast.Full.item list = Stdlib.Obj.magic

  let ditems =
    List.map ~f:Coerce.ditem >> List.concat >> Phase.ditems >> to_full_ast
end

module Simplify_question_marks : PHASE_FULL = struct
  module FA = Features.Full
  module FB = Features.Full
  module A = Ast.Full
  module B = Ast.Full

  module ExpectedFA = struct
    open Features
    include On
  end

  module Phase = Phases.Simplify_question_marks (ExpectedFA)

  module Coerce =
    Feature_gate.Make (Features.Full) (ExpectedFA)
      (struct
        module A = Features.Full
        module B = ExpectedFA
        include Feature_gate.DefaultSubtype

        let metadata =
          Phase_reject.make_metadata
            (CoercionForUntypedPhase
               ([%show: Diagnostics.Phase.t] Phase.metadata.current_phase))
      end)

  let metadata = Phase.metadata
  let to_full_ast : Phase.B.item list -> Ast.Full.item list = Stdlib.Obj.magic

  let ditems =
    List.map ~f:Coerce.ditem >> List.concat >> Phase.ditems >> to_full_ast
end

module Sort_items : PHASE_FULL = struct
  module FA = Features.Full
  module FB = Features.Full
  module A = Ast.Full
  module B = Ast.Full

  module ExpectedFA = struct
    open Features
    include On
  end

  module Phase = Phases.Sort_items (ExpectedFA)

  module Coerce =
    Feature_gate.Make (Features.Full) (ExpectedFA)
      (struct
        module A = Features.Full
        module B = ExpectedFA
        include Feature_gate.DefaultSubtype

        let metadata =
          Phase_reject.make_metadata
            (CoercionForUntypedPhase
               ([%show: Diagnostics.Phase.t] Phase.metadata.current_phase))
      end)

  let metadata = Phase.metadata
  let to_full_ast : Phase.B.item list -> Ast.Full.item list = Stdlib.Obj.magic

  let ditems =
    List.map ~f:Coerce.ditem >> List.concat >> Phase.ditems >> to_full_ast
end

module Sort_items_namespace_wise : PHASE_FULL = struct
  module FA = Features.Full
  module FB = Features.Full
  module A = Ast.Full
  module B = Ast.Full

  module ExpectedFA = struct
    open Features
    include On
  end

  module Phase = Phases.Sort_items_namespace_wise (ExpectedFA)

  module Coerce =
    Feature_gate.Make (Features.Full) (ExpectedFA)
      (struct
        module A = Features.Full
        module B = ExpectedFA
        include Feature_gate.DefaultSubtype

        let metadata =
          Phase_reject.make_metadata
            (CoercionForUntypedPhase
               ([%show: Diagnostics.Phase.t] Phase.metadata.current_phase))
      end)

  let metadata = Phase.metadata
  let to_full_ast : Phase.B.item list -> Ast.Full.item list = Stdlib.Obj.magic

  let ditems =
    List.map ~f:Coerce.ditem >> List.concat >> Phase.ditems >> to_full_ast
end

module Specialize : PHASE_FULL = struct
  module FA = Features.Full
  module FB = Features.Full
  module A = Ast.Full
  module B = Ast.Full

  module ExpectedFA = struct
    open Features
    include On
  end

  module Phase = Phases.Specialize (ExpectedFA)

  module Coerce =
    Feature_gate.Make (Features.Full) (ExpectedFA)
      (struct
        module A = Features.Full
        module B = ExpectedFA
        include Feature_gate.DefaultSubtype

        let metadata =
          Phase_reject.make_metadata
            (CoercionForUntypedPhase
               ([%show: Diagnostics.Phase.t] Phase.metadata.current_phase))
      end)

  let metadata = Phase.metadata
  let to_full_ast : Phase.B.item list -> Ast.Full.item list = Stdlib.Obj.magic

  let ditems =
    List.map ~f:Coerce.ditem >> List.concat >> Phase.ditems >> to_full_ast
end

module Traits_specs : PHASE_FULL = struct
  module FA = Features.Full
  module FB = Features.Full
  module A = Ast.Full
  module B = Ast.Full

  module ExpectedFA = struct
    open Features
    include On
  end

  module Phase = Phases.Traits_specs (ExpectedFA)

  module Coerce =
    Feature_gate.Make (Features.Full) (ExpectedFA)
      (struct
        module A = Features.Full
        module B = ExpectedFA
        include Feature_gate.DefaultSubtype

        let metadata =
          Phase_reject.make_metadata
            (CoercionForUntypedPhase
               ([%show: Diagnostics.Phase.t] Phase.metadata.current_phase))
      end)

  let metadata = Phase.metadata
  let to_full_ast : Phase.B.item list -> Ast.Full.item list = Stdlib.Obj.magic

  let ditems =
    List.map ~f:Coerce.ditem >> List.concat >> Phase.ditems >> to_full_ast
end

module Transform_hax_lib_inline : PHASE_FULL = struct
  module FA = Features.Full
  module FB = Features.Full
  module A = Ast.Full
  module B = Ast.Full

  module ExpectedFA = struct
    open Features
    include On
  end

  module Phase = Phases.Transform_hax_lib_inline (ExpectedFA)

  module Coerce =
    Feature_gate.Make (Features.Full) (ExpectedFA)
      (struct
        module A = Features.Full
        module B = ExpectedFA
        include Feature_gate.DefaultSubtype

        let metadata =
          Phase_reject.make_metadata
            (CoercionForUntypedPhase
               ([%show: Diagnostics.Phase.t] Phase.metadata.current_phase))
      end)

  let metadata = Phase.metadata
  let to_full_ast : Phase.B.item list -> Ast.Full.item list = Stdlib.Obj.magic

  let ditems =
    List.map ~f:Coerce.ditem >> List.concat >> Phase.ditems >> to_full_ast
end

module Trivialize_assign_lhs : PHASE_FULL = struct
  module FA = Features.Full
  module FB = Features.Full
  module A = Ast.Full
  module B = Ast.Full

  module ExpectedFA = struct
    open Features
    include On
  end

  module Phase = Phases.Trivialize_assign_lhs (ExpectedFA)

  module Coerce =
    Feature_gate.Make (Features.Full) (ExpectedFA)
      (struct
        module A = Features.Full
        module B = ExpectedFA
        include Feature_gate.DefaultSubtype

        let metadata =
          Phase_reject.make_metadata
            (CoercionForUntypedPhase
               ([%show: Diagnostics.Phase.t] Phase.metadata.current_phase))
      end)

  let metadata = Phase.metadata
  let to_full_ast : Phase.B.item list -> Ast.Full.item list = Stdlib.Obj.magic

  let ditems =
    List.map ~f:Coerce.ditem >> List.concat >> Phase.ditems >> to_full_ast
end

module Reject_arbitrary_lhs : PHASE_FULL = struct
  module FA = Features.Full
  module FB = Features.Full
  module A = Ast.Full
  module B = Ast.Full

  module ExpectedFA = struct
    open Features
    include On
  end

  module Phase = Phase_reject.Arbitrary_lhs (ExpectedFA)

  module Coerce =
    Feature_gate.Make (Features.Full) (ExpectedFA)
      (struct
        module A = Features.Full
        module B = ExpectedFA
        include Feature_gate.DefaultSubtype

        let metadata =
          Phase_reject.make_metadata
            (CoercionForUntypedPhase
               ([%show: Diagnostics.Phase.t] Phase.metadata.current_phase))
      end)

  let metadata = Phase.metadata
  let to_full_ast : Phase.B.item list -> Ast.Full.item list = Stdlib.Obj.magic

  let ditems =
    List.map ~f:Coerce.ditem >> List.concat >> Phase.ditems >> to_full_ast
end

module Reject_continue : PHASE_FULL = struct
  module FA = Features.Full
  module FB = Features.Full
  module A = Ast.Full
  module B = Ast.Full

  module ExpectedFA = struct
    open Features
    include On
  end

  module Phase = Phase_reject.Continue (ExpectedFA)

  module Coerce =
    Feature_gate.Make (Features.Full) (ExpectedFA)
      (struct
        module A = Features.Full
        module B = ExpectedFA
        include Feature_gate.DefaultSubtype

        let metadata =
          Phase_reject.make_metadata
            (CoercionForUntypedPhase
               ([%show: Diagnostics.Phase.t] Phase.metadata.current_phase))
      end)

  let metadata = Phase.metadata
  let to_full_ast : Phase.B.item list -> Ast.Full.item list = Stdlib.Obj.magic

  let ditems =
    List.map ~f:Coerce.ditem >> List.concat >> Phase.ditems >> to_full_ast
end

module Reject_question_mark : PHASE_FULL = struct
  module FA = Features.Full
  module FB = Features.Full
  module A = Ast.Full
  module B = Ast.Full

  module ExpectedFA = struct
    open Features
    include On
  end

  module Phase = Phase_reject.Question_mark (ExpectedFA)

  module Coerce =
    Feature_gate.Make (Features.Full) (ExpectedFA)
      (struct
        module A = Features.Full
        module B = ExpectedFA
        include Feature_gate.DefaultSubtype

        let metadata =
          Phase_reject.make_metadata
            (CoercionForUntypedPhase
               ([%show: Diagnostics.Phase.t] Phase.metadata.current_phase))
      end)

  let metadata = Phase.metadata
  let to_full_ast : Phase.B.item list -> Ast.Full.item list = Stdlib.Obj.magic

  let ditems =
    List.map ~f:Coerce.ditem >> List.concat >> Phase.ditems >> to_full_ast
end

module Reject_raw_or_mut_pointer : PHASE_FULL = struct
  module FA = Features.Full
  module FB = Features.Full
  module A = Ast.Full
  module B = Ast.Full

  module ExpectedFA = struct
    open Features
    include On
  end

  module Phase = Phase_reject.RawOrMutPointer (ExpectedFA)

  module Coerce =
    Feature_gate.Make (Features.Full) (ExpectedFA)
      (struct
        module A = Features.Full
        module B = ExpectedFA
        include Feature_gate.DefaultSubtype

        let metadata =
          Phase_reject.make_metadata
            (CoercionForUntypedPhase
               ([%show: Diagnostics.Phase.t] Phase.metadata.current_phase))
      end)

  let metadata = Phase.metadata
  let to_full_ast : Phase.B.item list -> Ast.Full.item list = Stdlib.Obj.magic

  let ditems =
    List.map ~f:Coerce.ditem >> List.concat >> Phase.ditems >> to_full_ast
end

module Reject_early_exit : PHASE_FULL = struct
  module FA = Features.Full
  module FB = Features.Full
  module A = Ast.Full
  module B = Ast.Full

  module ExpectedFA = struct
    open Features
    include On
  end

  module Phase = Phase_reject.EarlyExit (ExpectedFA)

  module Coerce =
    Feature_gate.Make (Features.Full) (ExpectedFA)
      (struct
        module A = Features.Full
        module B = ExpectedFA
        include Feature_gate.DefaultSubtype

        let metadata =
          Phase_reject.make_metadata
            (CoercionForUntypedPhase
               ([%show: Diagnostics.Phase.t] Phase.metadata.current_phase))
      end)

  let metadata = Phase.metadata
  let to_full_ast : Phase.B.item list -> Ast.Full.item list = Stdlib.Obj.magic

  let ditems =
    List.map ~f:Coerce.ditem >> List.concat >> Phase.ditems >> to_full_ast
end

module Reject_as_pattern : PHASE_FULL = struct
  module FA = Features.Full
  module FB = Features.Full
  module A = Ast.Full
  module B = Ast.Full

  module ExpectedFA = struct
    open Features
    include On
  end

  module Phase = Phase_reject.As_pattern (ExpectedFA)

  module Coerce =
    Feature_gate.Make (Features.Full) (ExpectedFA)
      (struct
        module A = Features.Full
        module B = ExpectedFA
        include Feature_gate.DefaultSubtype

        let metadata =
          Phase_reject.make_metadata
            (CoercionForUntypedPhase
               ([%show: Diagnostics.Phase.t] Phase.metadata.current_phase))
      end)

  let metadata = Phase.metadata
  let to_full_ast : Phase.B.item list -> Ast.Full.item list = Stdlib.Obj.magic

  let ditems =
    List.map ~f:Coerce.ditem >> List.concat >> Phase.ditems >> to_full_ast
end

module Reject_dyn : PHASE_FULL = struct
  module FA = Features.Full
  module FB = Features.Full
  module A = Ast.Full
  module B = Ast.Full

  module ExpectedFA = struct
    open Features
    include On
  end

  module Phase = Phase_reject.Dyn (ExpectedFA)

  module Coerce =
    Feature_gate.Make (Features.Full) (ExpectedFA)
      (struct
        module A = Features.Full
        module B = ExpectedFA
        include Feature_gate.DefaultSubtype

        let metadata =
          Phase_reject.make_metadata
            (CoercionForUntypedPhase
               ([%show: Diagnostics.Phase.t] Phase.metadata.current_phase))
      end)

  let metadata = Phase.metadata
  let to_full_ast : Phase.B.item list -> Ast.Full.item list = Stdlib.Obj.magic

  let ditems =
    List.map ~f:Coerce.ditem >> List.concat >> Phase.ditems >> to_full_ast
end

module Reject_trait_item_default : PHASE_FULL = struct
  module FA = Features.Full
  module FB = Features.Full
  module A = Ast.Full
  module B = Ast.Full

  module ExpectedFA = struct
    open Features
    include On
  end

  module Phase = Phase_reject.Trait_item_default (ExpectedFA)

  module Coerce =
    Feature_gate.Make (Features.Full) (ExpectedFA)
      (struct
        module A = Features.Full
        module B = ExpectedFA
        include Feature_gate.DefaultSubtype

        let metadata =
          Phase_reject.make_metadata
            (CoercionForUntypedPhase
               ([%show: Diagnostics.Phase.t] Phase.metadata.current_phase))
      end)

  let metadata = Phase.metadata
  let to_full_ast : Phase.B.item list -> Ast.Full.item list = Stdlib.Obj.magic

  let ditems =
    List.map ~f:Coerce.ditem >> List.concat >> Phase.ditems >> to_full_ast
end

module Reject_unsafe : PHASE_FULL = struct
  module FA = Features.Full
  module FB = Features.Full
  module A = Ast.Full
  module B = Ast.Full

  module ExpectedFA = struct
    open Features
    include On
  end

  module Phase = Phase_reject.Unsafe (ExpectedFA)

  module Coerce =
    Feature_gate.Make (Features.Full) (ExpectedFA)
      (struct
        module A = Features.Full
        module B = ExpectedFA
        include Feature_gate.DefaultSubtype

        let metadata =
          Phase_reject.make_metadata
            (CoercionForUntypedPhase
               ([%show: Diagnostics.Phase.t] Phase.metadata.current_phase))
      end)

  let metadata = Phase.metadata
  let to_full_ast : Phase.B.item list -> Ast.Full.item list = Stdlib.Obj.magic

  let ditems =
    List.map ~f:Coerce.ditem >> List.concat >> Phase.ditems >> to_full_ast
end

module Hoist_side_effects : PHASE_FULL = struct
  module FA = Features.Full
  module FB = Features.Full
  module A = Ast.Full
  module B = Ast.Full

  module ExpectedFA = struct
    open Features
    include On
    include Off.Monadic_binding
    include Off.For_index_loop
  end

  module Phase = Side_effect_utils.Hoist (ExpectedFA)

  module Coerce =
    Feature_gate.Make (Features.Full) (ExpectedFA)
      (struct
        module A = Features.Full
        module B = ExpectedFA
        include Feature_gate.DefaultSubtype

        let monadic_binding = reject
        let for_index_loop = reject

        let metadata =
          Phase_reject.make_metadata
            (CoercionForUntypedPhase
               ([%show: Diagnostics.Phase.t] Phase.metadata.current_phase))
      end)

  let metadata = Phase.metadata
  let to_full_ast : Phase.B.item list -> Ast.Full.item list = Stdlib.Obj.magic

  let ditems =
    List.map ~f:Coerce.ditem >> List.concat >> Phase.ditems >> to_full_ast
end

let and_mut_defsite : (module PHASE_FULL) = (module And_mut_defsite)
let bundle_cycles : (module PHASE_FULL) = (module Bundle_cycles)
let cf_into_monads : (module PHASE_FULL) = (module Cf_into_monads)
let direct_and_mut : (module PHASE_FULL) = (module Direct_and_mut)
let drop_blocks : (module PHASE_FULL) = (module Drop_blocks)
let drop_match_guards : (module PHASE_FULL) = (module Drop_match_guards)
let drop_references : (module PHASE_FULL) = (module Drop_references)

let drop_return_break_continue : (module PHASE_FULL) =
  (module Drop_return_break_continue)

let drop_sized_trait : (module PHASE_FULL) = (module Drop_sized_trait)
let explicit_conversions : (module PHASE_FULL) = (module Explicit_conversions)
let functionalize_loops : (module PHASE_FULL) = (module Functionalize_loops)

let hoist_disjunctive_patterns : (module PHASE_FULL) =
  (module Hoist_disjunctive_patterns)

let local_mutation : (module PHASE_FULL) = (module Local_mutation)
let newtype_as_refinement : (module PHASE_FULL) = (module Newtype_as_refinement)
let reconstruct_asserts : (module PHASE_FULL) = (module Reconstruct_asserts)

let reconstruct_for_index_loops : (module PHASE_FULL) =
  (module Reconstruct_for_index_loops)

let reconstruct_for_loops : (module PHASE_FULL) = (module Reconstruct_for_loops)

let reconstruct_question_marks : (module PHASE_FULL) =
  (module Reconstruct_question_marks)

let reconstruct_while_loops : (module PHASE_FULL) =
  (module Reconstruct_while_loops)

let reject_impl_type_method : (module PHASE_FULL) =
  (module Reject_impl_type_method)

let reorder_fields : (module PHASE_FULL) = (module Reorder_fields)
let rewrite_control_flow : (module PHASE_FULL) = (module Rewrite_control_flow)
let rewrite_local_self : (module PHASE_FULL) = (module Rewrite_local_self)
let simplify_hoisting : (module PHASE_FULL) = (module Simplify_hoisting)
let simplify_match_return : (module PHASE_FULL) = (module Simplify_match_return)

let simplify_question_marks : (module PHASE_FULL) =
  (module Simplify_question_marks)

let sort_items : (module PHASE_FULL) = (module Sort_items)

let sort_items_namespace_wise : (module PHASE_FULL) =
  (module Sort_items_namespace_wise)

let specialize : (module PHASE_FULL) = (module Specialize)
let traits_specs : (module PHASE_FULL) = (module Traits_specs)

let transform_hax_lib_inline : (module PHASE_FULL) =
  (module Transform_hax_lib_inline)

let trivialize_assign_lhs : (module PHASE_FULL) = (module Trivialize_assign_lhs)
let reject_arbitrary_lhs : (module PHASE_FULL) = (module Reject_arbitrary_lhs)
let reject_continue : (module PHASE_FULL) = (module Reject_continue)
let reject_question_mark : (module PHASE_FULL) = (module Reject_question_mark)

let reject_raw_or_mut_pointer : (module PHASE_FULL) =
  (module Reject_raw_or_mut_pointer)

let reject_early_exit : (module PHASE_FULL) = (module Reject_early_exit)
let reject_as_pattern : (module PHASE_FULL) = (module Reject_as_pattern)
let reject_dyn : (module PHASE_FULL) = (module Reject_dyn)

let reject_trait_item_default : (module PHASE_FULL) =
  (module Reject_trait_item_default)

let reject_unsafe : (module PHASE_FULL) = (module Reject_unsafe)
let hoist_side_effects : (module PHASE_FULL) = (module Hoist_side_effects)

let phases_list : (module PHASE_FULL) list =
  [
    and_mut_defsite;
    bundle_cycles;
    cf_into_monads;
    direct_and_mut;
    drop_blocks;
    drop_match_guards;
    drop_references;
    drop_return_break_continue;
    drop_sized_trait;
    explicit_conversions;
    functionalize_loops;
    hoist_disjunctive_patterns;
    local_mutation;
    newtype_as_refinement;
    reconstruct_asserts;
    reconstruct_for_index_loops;
    reconstruct_for_loops;
    reconstruct_question_marks;
    reconstruct_while_loops;
    reject_impl_type_method;
    reorder_fields;
    rewrite_control_flow;
    rewrite_local_self;
    simplify_hoisting;
    simplify_match_return;
    simplify_question_marks;
    sort_items;
    sort_items_namespace_wise;
    specialize;
    traits_specs;
    transform_hax_lib_inline;
    trivialize_assign_lhs;
    reject_arbitrary_lhs;
    reject_continue;
    reject_question_mark;
    reject_raw_or_mut_pointer;
    reject_early_exit;
    reject_as_pattern;
    reject_dyn;
    reject_trait_item_default;
    reject_unsafe;
    hoist_side_effects;
  ]

let phase_of_name : string -> (module PHASE_FULL) option = function
  | "and_mut_defsite" -> Some and_mut_defsite
  | "bundle_cycles" -> Some bundle_cycles
  | "cf_into_monads" -> Some cf_into_monads
  | "direct_and_mut" -> Some direct_and_mut
  | "drop_blocks" -> Some drop_blocks
  | "drop_match_guards" -> Some drop_match_guards
  | "drop_references" -> Some drop_references
  | "drop_return_break_continue" -> Some drop_return_break_continue
  | "drop_sized_trait" -> Some drop_sized_trait
  | "explicit_conversions" -> Some explicit_conversions
  | "functionalize_loops" -> Some functionalize_loops
  | "hoist_disjunctive_patterns" -> Some hoist_disjunctive_patterns
  | "local_mutation" -> Some local_mutation
  | "newtype_as_refinement" -> Some newtype_as_refinement
  | "reconstruct_asserts" -> Some reconstruct_asserts
  | "reconstruct_for_index_loops" -> Some reconstruct_for_index_loops
  | "reconstruct_for_loops" -> Some reconstruct_for_loops
  | "reconstruct_question_marks" -> Some reconstruct_question_marks
  | "reconstruct_while_loops" -> Some reconstruct_while_loops
  | "reject_impl_type_method" -> Some reject_impl_type_method
  | "reorder_fields" -> Some reorder_fields
  | "rewrite_control_flow" -> Some rewrite_control_flow
  | "rewrite_local_self" -> Some rewrite_local_self
  | "simplify_hoisting" -> Some simplify_hoisting
  | "simplify_match_return" -> Some simplify_match_return
  | "simplify_question_marks" -> Some simplify_question_marks
  | "sort_items" -> Some sort_items
  | "sort_items_namespace_wise" -> Some sort_items_namespace_wise
  | "specialize" -> Some specialize
  | "traits_specs" -> Some traits_specs
  | "transform_hax_lib_inline" -> Some transform_hax_lib_inline
  | "trivialize_assign_lhs" -> Some trivialize_assign_lhs
  | "reject_arbitrary_lhs" -> Some reject_arbitrary_lhs
  | "reject_continue" -> Some reject_continue
  | "reject_question_mark" -> Some reject_question_mark
  | "reject_raw_or_mut_pointer" -> Some reject_raw_or_mut_pointer
  | "reject_early_exit" -> Some reject_early_exit
  | "reject_as_pattern" -> Some reject_as_pattern
  | "reject_dyn" -> Some reject_dyn
  | "reject_trait_item_default" -> Some reject_trait_item_default
  | "reject_unsafe" -> Some reject_unsafe
  | "hoist_side_effects" -> Some hoist_side_effects
  | _ -> None

let phases : string list =
  [
    "and_mut_defsite";
    "bundle_cycles";
    "cf_into_monads";
    "direct_and_mut";
    "drop_blocks";
    "drop_match_guards";
    "drop_references";
    "drop_return_break_continue";
    "drop_sized_trait";
    "explicit_conversions";
    "functionalize_loops";
    "hoist_disjunctive_patterns";
    "local_mutation";
    "newtype_as_refinement";
    "reconstruct_asserts";
    "reconstruct_for_index_loops";
    "reconstruct_for_loops";
    "reconstruct_question_marks";
    "reconstruct_while_loops";
    "reject_impl_type_method";
    "reorder_fields";
    "rewrite_control_flow";
    "rewrite_local_self";
    "simplify_hoisting";
    "simplify_match_return";
    "simplify_question_marks";
    "sort_items";
    "sort_items_namespace_wise";
    "specialize";
    "traits_specs";
    "transform_hax_lib_inline";
    "trivialize_assign_lhs";
    "reject_arbitrary_lhs";
    "reject_continue";
    "reject_question_mark";
    "reject_raw_or_mut_pointer";
    "reject_early_exit";
    "reject_as_pattern";
    "reject_dyn";
    "reject_trait_item_default";
    "reject_unsafe";
    "hoist_side_effects";
  ]

(*
and_mut_defsite, bundle_cycles, cf_into_monads, direct_and_mut, drop_blocks, drop_match_guards, drop_references, drop_return_break_continue, drop_sized_trait, explicit_conversions, functionalize_loops, hoist_disjunctive_patterns, local_mutation, newtype_as_refinement, reconstruct_asserts, reconstruct_for_index_loops, reconstruct_for_loops, reconstruct_question_marks, reconstruct_while_loops, reject_impl_type_method, reorder_fields, rewrite_control_flow, rewrite_local_self, simplify_hoisting, simplify_match_return, simplify_question_marks, sort_items, sort_items_namespace_wise, specialize, traits_specs, transform_hax_lib_inline, trivialize_assign_lhs, reject_arbitrary_lhs, reject_continue, reject_question_mark, reject_raw_or_mut_pointer, reject_early_exit, reject_as_pattern, reject_dyn, reject_trait_item_default, reject_unsafe, hoist_side_effects
*)


================================================
FILE: engine/lib/utils.ml
================================================
open Base

let ( << ) f g x = f (g x)
let ( >> ) f g x = g (f x)
let ( &&& ) (f : 'a -> 'b) (g : 'a -> 'c) (x : 'a) : 'b * 'c = (f x, g x)

let ( *** ) (f : 'a -> 'b) (g : 'c -> 'd) ((l, r) : 'a * 'c) : 'b * 'd =
  (f l, g r)

let map_fst f = f *** Fn.id
let map_snd g = Fn.id *** g
let map_fst3 f (x, y, z) = (f x, y, z)
let map_snd3 f (x, y, z) = (x, f y, z)
let map_thd3 f (x, y, z) = (x, y, f z)
let fst3 (x, _, _) = x
let snd3 (_, y, _) = y
let thd3 (_, _, z) = z
let curry f x y = f (x, y)
let uncurry f (x, y) = f x y
let curry3 f x y z = f (x, y, z)
let uncurry3 f (x, y, z) = f x y z
let tup2 a b = (a, b)
let swap (a, b) = (b, a)
let apply f x = f x
let ( let* ) x f = Option.bind ~f x
let some_if_true = function true -> Some () | _ -> None

let expect_singleton : 'a. 'a list -> 'a option = function
  | [ x ] -> Some x
  | _ -> None

(** [let*? () = guard in body] acts as a guard: if [guard] holds, then [body] is
    executed, otherwise [None] is returned. *)
let ( let*? ) (type a) (x : bool) (f : unit -> a option) =
  let* () = some_if_true x in
  f ()

let map_first_letter (f : string -> string) (s : string) =
  let first, rest = String.(prefix s 1, drop_prefix s 1) in
  f first ^ rest

let rec split_list_once ~equal ~needle ~acc subject =
  match subject with
  | [] -> (List.rev acc, [])
  | hd :: tl ->
      if List.is_prefix subject ~prefix:needle ~equal then
        (List.rev acc, List.drop subject (List.length needle))
      else split_list_once ~equal ~needle ~acc:(hd :: acc) tl

let split_list ~equal ~needle (subject : 'a list) : 'a list list =
  let rec h l =
    match split_list_once ~equal ~needle ~acc:[] l with
    | l, [] -> [ l ]
    | l, r -> l :: h r
  in
  h subject

(** Map over a list with a option-returning function. Returns `Some` iff every
    calls to `f` returned `Some`. *)
let rec maybe_map ~(f : 'a -> 'b option) (l : 'a list) : 'b list option =
  match l with
  | hd :: tl ->
      let* hd = f hd in
      let* tl = maybe_map ~f tl in
      Some (hd :: tl)
  | [] -> Some []

let first_letter s = String.prefix s 1
let is_uppercase s = String.equal s (String.uppercase s)
let is_lowercase s = String.equal s (String.lowercase s)
let start_uppercase = first_letter >> is_uppercase
let start_lowercase = first_letter >> is_lowercase
let string_to_int s = try Some (Int.of_string s) with _ -> None

let split_str (s : string) ~(on : string) : string list =
  split_list ~equal:Char.equal ~needle:(String.to_list on) (String.to_list s)
  |> List.map ~f:String.of_char_list

let last_init (l : 'a list) : ('a list * 'a) option =
  Option.both (List.drop_last l) (List.last l)

let inits (type a) (l : a list) : (a list * a) list =
  List.fold_map ~init:[]
    ~f:(fun trace x ->
      let trace = trace @ [ x ] in
      (trace, (trace, x)))
    l
  |> snd

let sequence (l : 'a option list) : 'a list option =
  List.fold_right
    ~f:(fun x acc ->
      match (acc, x) with Some acc, Some x -> Some (x :: acc) | _ -> None)
    ~init:(Some []) l

let ( <|> ) x f = match x with Some x -> Some x | None -> f ()
let tabsize = 2
let newline_indent depth : string = "\n" ^ String.make (tabsize * depth) ' '

module MyInt64 = struct
  include Base.Int64

  let t_of_yojson (json : Yojson.Safe.t) : t =
    match json with
    | `Intlit s -> of_string s
    | `Int i -> of_int i
    | _ -> failwith "Couldn't parse MyInt64.t"

  let yojson_of_t (int64 : t) : Yojson.Safe.t = `Intlit (to_string int64)
end

include (
  struct
    let id = ref 0

    let tempfile_path ~suffix =
      id := !id + 1;
      Core.Filename.(
        concat temp_dir_name ("hax-debug-" ^ Int.to_string !id ^ suffix))
  end :
    sig
      val tempfile_path : suffix:string -> string
      (** Generates a temporary file path that ends with `suffix` *)
    end)

module List = struct
  include Base.List

  let zip_opt : 'a 'b. 'a list -> 'b list -> ('a * 'b) list option =
   fun x y ->
    match zip x y with Ok result -> Some result | Unequal_lengths -> None

  let longest_prefix (type t) ~(eq : t -> t -> bool) (l : t list list) : t list
      =
    match l with
    | [] -> []
    | hd :: tl ->
        let tl = ref tl in
        let f x =
          let exception Stop in
          try
            tl :=
              List.map !tl ~f:(function
                | y :: tl when eq x y -> tl
                | _ -> raise Stop);
            true
          with Stop -> false
        in
        List.take_while ~f hd
end


================================================
FILE: engine/names/Cargo.toml
================================================
[package]
name = "hax-engine-names"
version.workspace = true
authors.workspace = true
license.workspace = true
homepage.workspace = true
edition.workspace = true
repository.workspace = true
readme.workspace = true
description = "Dummy crate containing all the Rust names the hax engine should be aware of"

[dependencies]
hax-lib-protocol = {path = "../../hax-lib-protocol"}
hax-lib = {path = "../../hax-lib"}

[package.metadata.release]
release = false


================================================
FILE: engine/names/README.md
================================================
# `hax-engine-names`

## Purpose of the crate
The crate `hax-engine-names` is a dummy crate that contains all the
Rust names the engine should be aware of.

For instance, the engine needs to know about `Some` and `None` to
reconstruct loops: Rust desugars `for .. in iterator {..}` loops into
loops with pattern matching on `iterator.next()`, which returns an
option.

## How to edit this crate
If you need a special treatment for a Rust name in the engine, you
should just add a piece of code that is using it.

For example, to make the name `Some` available to the engine, one
could add the following function at the end of the `src/lib.rs` file:

```rust
fn some(x: Option<()>) {
    match x {
        Some(_) => (),
        _ => (),
    }
}
```

Note this will also make `Option` available.

## How names are generated in OCaml
The subcrate `hax-engine-names-extract` runs `cargo hax into json` on
the crate `hax-engine-names`, and extracts all the names it finds,
along with other information.

Those names are compiled into the enumeration type
`Concrete_ident_generated.name`. You can look at those names by
running `hax-engine-names-extract | less`. As an example,
`core::option::Option::None` is made available as the
`Core__option__Option__None` variant.

## How to match a name in the engine
The functions `Concrete_ident.eq_name` and `Global_ident.eq_name`
allow for comparing `Concrete_ident.t` and `Global_ident.t` with
`Concrete_ident_generated.name`.

For example, the expression `Concrete_ident.eq_name
Core__option__Option__None my_concrete_ident` checks whether the
concrete ident `my_concrete_ident` is `core::option::Option::None`.

## How to build a concrete ident out of a name
See the function `Concrete_ident.of_name`.



================================================
FILE: engine/names/extract/Cargo.toml
================================================
[package]
name = "hax-engine-names-extract"
version.workspace = true
authors.workspace = true
license.workspace = true
homepage.workspace = true
edition.workspace = true
repository.workspace = true
readme.workspace = true
description = "Helper binary generating an OCaml module"


[build-dependencies]
serde.workspace = true
serde_json.workspace = true
hax-engine-names.workspace = true
hax-adt-into.workspace = true
tempfile.version = "3.9"

[features]
default = ["extract_names_mode"]
extract_names_mode = []

[lints.rust]
unexpected_cfgs = { level = "warn", check-cfg = ['cfg(feature, values("rustc"))'] }

[package.metadata.release]
release = false


================================================
FILE: engine/names/extract/build.rs
================================================
use serde_json::Value;
use std::process::{Command, Stdio};

/// Instead of depending on `hax_frontend_exporter` (that links to
/// rustc and exposes a huge number of type definitions and their
/// impls), we just inline a small module here that contains the three
/// type definition we need. See the module for complementary
/// informations.
#[path = "../../../frontend/exporter/src/types/def_id.rs"]
mod hax_frontend_exporter_def_id;
use hax_frontend_exporter_def_id::*;

mod id_table {
    //! Shim to make `def_id.rs` build. Replaces the `id_table` interner with a plain `Arc`.
    use serde::{Deserialize, Serialize};
    use std::sync::Arc;

    #[derive(Clone, PartialEq, Eq, PartialOrd, Ord, Serialize, Deserialize)]
    pub struct Node {
        pub value: Arc,
        pub id: u32,
    }

    impl std::ops::Deref for Node {
        type Target = T;
        fn deref(&self) -> &Self::Target {
            self.value.as_ref()
        }
    }
}

/// Name of the current crate
const HAX_ENGINE_NAMES_CRATE: &str = "hax_engine_names";
/// Path `a::b` needs to be compiled to a OCaml variant name, `::` is
/// replaced with `SEPARATOR`
const SEPARATOR: &str = "__";
/// "Key" for OCaml quoted strings
const ESCAPE_KEY: &str = "hax_escape_ocaml_json";

fn uppercase_first_letter(s: &str) -> String {
    let mut c = s.chars();
    match c.next() {
        None => String::new(),
        Some(f) => f.to_uppercase().collect::() + c.as_str(),
    }
}

fn disambiguator_to_str(disambiguator: u32) -> String {
    if disambiguator == 0 {
        "".into()
    } else {
        format!("_{disambiguator}")
    }
}

fn def_path_item_to_str(path_item: DefPathItem) -> String {
    match path_item {
        DefPathItem::TypeNs(s)
        | DefPathItem::ValueNs(s)
        | DefPathItem::MacroNs(s)
        | DefPathItem::LifetimeNs(s) => s,
        DefPathItem::CrateRoot { name } => uppercase_first_letter(&name),
        DefPathItem::Impl => "Impl".into(),
        DefPathItem::ForeignMod => "ForeignMod".into(),
        DefPathItem::Use => "Use".into(),
        DefPathItem::GlobalAsm => "GlobalAsm".into(),
        DefPathItem::Closure => "Closure".into(),
        DefPathItem::Ctor => "Ctor".into(),
        DefPathItem::AnonConst => "AnonConst".into(),
        DefPathItem::PromotedConst => "PromotedConst".into(),
        DefPathItem::OpaqueTy => "OpaqueTy".into(),
        DefPathItem::OpaqueLifetime(..) => "OpaqueLifetime".into(),
        DefPathItem::AnonAssocTy(..) => "AnonAssocTy".into(),
        DefPathItem::SyntheticCoroutineBody => "SyntheticCoroutineBody".into(),
        DefPathItem::NestedStatic => "NestedStatic".into(),
        DefPathItem::LateAnonConst => "LateAnonConst".into(),
        DefPathItem::DesugaredAnonymousLifetime => "DesugaredAnonymousLifetime".into(),
    }
}

fn disambiguated_def_path_item_to_str(defpath: &DisambiguatedDefPathItem) -> String {
    let data = def_path_item_to_str(defpath.data.clone());
    let disambiguator = disambiguator_to_str(defpath.disambiguator);
    format!("{data}{disambiguator}")
}

/// Replaces the crate name `HAX_ENGINE_NAMES_CRATE` by `"rust_primitives"`.
fn rename_krate(value: &mut Value) {
    match value {
        Value::Object(map) => {
            for (key, val) in map.iter_mut() {
                if let Value::String(s) = val
                    && key == "krate"
                    && s == "hax_engine_names"
                {
                    *s = "rust_primitives".to_string();
                }
                rename_krate(val);
            }
        }
        Value::Array(v) => v.iter_mut().for_each(rename_krate),
        _ => {}
    }
}

fn def_id_to_str(def_id: &DefId) -> (Value, String) {
    let crate_name = if def_id.krate == HAX_ENGINE_NAMES_CRATE {
        "rust_primitives"
    } else {
        &def_id.krate
    };

    // Update the crate name in the json output as well.
    let mut json = serde_json::to_value(def_id).unwrap();
    rename_krate(&mut json);

    let crate_name = uppercase_first_letter(crate_name);
    let path = [crate_name]
        .into_iter()
        .chain(def_id.path.iter().map(disambiguated_def_path_item_to_str))
        .collect::>()
        .join(SEPARATOR);
    (json, path)
}

/// Checks whether a def id refers to a macro or not.
/// We don't want to extract macro names.
fn is_macro(did: &DefId) -> bool {
    let Some(last) = did.contents.value.path.last() else {
        return false;
    };
    matches!(last.data, DefPathItem::MacroNs { .. })
}

/// Checks wether a def id refers to a syntactic item (see `syntactic_item.rs` in hax' exporter)
fn is_synthetic(did: &DefId) -> bool {
    &did.contents.value.krate == hax_frontend_exporter_def_id::SYNTHETIC_CRATE_NAME
}

fn reader_to_str(s: String) -> String {
    let json: Value = match serde_json::from_str(&s) {
        Ok(v) => v,
        Err(e) => panic!("error while parsing JSON: {e}\n\nString was: {}", &s),
    };
    let def_ids: Vec = serde_json::from_value(json["def_ids"].clone()).unwrap();
    let impl_infos = json["impl_infos"].clone();

    let def_ids = def_ids
        .into_iter()
        .filter(|did| !is_macro(did))
        .filter(|did| !is_synthetic(did))
        .map(|did| {
            let (json, krate_name) = def_id_to_str(&did);
            (serde_json::to_string(&json).unwrap(), krate_name)
        })
        .collect::>();

    const TAB: &str = "    ";
    let mut result = String::new();
    result += &format!(
        "type t = \n{TAB}  {}[@@deriving show, yojson, compare, sexp, eq, hash]\n",
        def_ids
            .iter()
            .map(|(_, def_name)| format!("{def_name}"))
            .collect::>()
            .join(&format!("\n{TAB}| "))
    );

    result += "\n";
    result += "include (val Base.Comparator.make ~compare ~sexp_of_t)";
    result += "\n";
    result += "module Values = struct\n";
    for (json, name) in &def_ids {
        result += &format!(
            "{TAB}let parsed_{name} = Types.def_id_of_yojson (Yojson.Safe.from_string {}{ESCAPE_KEY}|{}|{ESCAPE_KEY}{})\n",
            "{", json, "}"
        );
    }
    result += "end\n\n";

    result += &format!(
        "let def_id_of: t -> Types.def_id = function\n{TAB}  {}\n\n",
        def_ids
            .iter()
            .map(|(_, n)| format!("{n} -> Values.parsed_{n}"))
            .collect::>()
            .join(&format!("\n{TAB}| "))
    );

    result += &format!(
        "let impl_infos_json_list = match Yojson.Safe.from_string {}{ESCAPE_KEY}|{}|{ESCAPE_KEY}{} with | `List l -> l | _ -> failwith \"Expected a list of `def_id * impl_infos`\"\n\n",
        "{",
        serde_json::to_string(&impl_infos).unwrap(),
        "}"
    );
    result += &format!(
        "let impl_infos = Base.List.map ~f:(function | `List [did; ii] -> (Types.def_id_of_yojson did, Types.impl_infos_of_yojson ii) | _ -> failwith \"Expected tuple\") impl_infos_json_list"
    );

    result
}

fn get_json() -> String {
    let mut cmd =
        Command::new(std::env::var("HAX_CARGO_COMMAND_PATH").unwrap_or("cargo-hax".to_string()));
    cmd.args([
        "hax",
        "-C",
        "-p",
        "hax-engine-names",
        "--lib",
        ";",
        "json",
        "--include-extra",
        "-o",
        "-",
    ])
    .stdout(Stdio::piped())
    .stderr(Stdio::piped());

    let out = cmd.output().unwrap();
    let stdout = String::from_utf8(out.stdout).unwrap();
    let stderr = String::from_utf8(out.stderr).unwrap();
    eprintln!("{}", stderr);
    stdout
}

fn main() {
    std::fs::write(
        format!("{}/module.ml", std::env::var("OUT_DIR").unwrap()),
        reader_to_str(get_json()),
    )
    .unwrap()
}


================================================
FILE: engine/names/extract/src/main.rs
================================================
const OCAML_MODULE: &str = include_str!(concat!(env!("OUT_DIR"), "/module.ml"));

fn main() {
    println!("{}", OCAML_MODULE);
}


================================================
FILE: engine/names/src/crypto_abstractions.rs
================================================
use hax_lib_protocol::crypto::*;

fn crypto_abstractions() {
    let bytes = vec![0u8; 32];
    let iv = AEADIV::from_bytes(&bytes);
    let key = AEADKey::from_bytes(AEADAlgorithm::Chacha20Poly1305, &bytes);

    let (cipher_text, _tag) = aead_encrypt(key, iv, &bytes, &bytes);
    let iv = AEADIV::from_bytes(&bytes);
    let key = AEADKey::from_bytes(AEADAlgorithm::Chacha20Poly1305, &bytes);
    let _ = aead_decrypt(key, iv, &bytes, &cipher_text, AEADTag::from_bytes(&bytes));

    let p = DHElement::from_bytes(&bytes);
    let s = DHScalar::from_bytes(&bytes);
    dh_scalar_multiply(DHGroup::X25519, s.clone(), p);
    dh_scalar_multiply_base(DHGroup::X25519, s);

    let _ = hmac(HMACAlgorithm::Sha256, &bytes, &bytes);

    let _ = 1u64.to_le_bytes();
    let slice = &bytes[0..1];
    let _ = slice.len();
    let _ = slice.to_vec();
    let _ = [slice, slice].concat();
    let mut v = vec![0];
    v.extend_from_slice(slice);
    v.truncate(1);

    let _ = hash(HashAlgorithm::Sha256, &bytes);
    let _ = cipher_text.clone();
}


================================================
FILE: engine/names/src/lib.rs
================================================
#![allow(dead_code)]
#![feature(try_trait_v2)]
#![feature(allocator_api)]

extern crate alloc;
/* This is a dummy Rust file. Every path used in this file will be
 * exported and made available automatically in OCaml. */

mod crypto_abstractions;

fn dummy_hax_concrete_ident_wrapper>(x: I, mut y: I) {
    let _: core::result::Result = core::result::Result::Ok(0);
    let _: core::result::Result = core::result::Result::Err(0);
    let _ = x.fold(0, |a, b| a + b);
    let _ = y.next();
    let _: core::ops::ControlFlow = core::ops::ControlFlow::Break(0);
    let _: core::ops::ControlFlow = core::ops::ControlFlow::Continue(());
    let mut v = vec![()];
    v[0];
    v[0] = ();
    let _ = v.as_slice().to_vec();
    use std::ops::FromResidual;
    let _ = Result::::from_residual(Err(3u8));
    let _ = Box::new(());
    let _: Option = None;
    let _: Option<()> = Some(());
    let _: Option<()> = None;
    let _ = Option::<()>::None.is_some();
    let _: Result<(), u32> = Result::Err(3u8).map_err(u32::from);
    assert!(true);
    assert_eq!(1, 1);
    hax_lib::assert!(true);
    hax_lib::_internal_loop_invariant(|_: usize| true);
    hax_lib::_internal_while_loop_invariant(hax_lib::Prop::from(true));
    hax_lib::_internal_loop_decreases(hax_lib::Int::_unsafe_from_str("0"));

    fn props() {
        use hax_lib::prop::*;
        let x = Prop::from_bool(true);
        constructors::from_bool(true);
        constructors::and(x, x);
        constructors::or(x, x);
        constructors::not(x);
        constructors::eq(x, x);
        constructors::ne(x, x);
        constructors::implies(x, x);
        constructors::forall(|_: ()| x);
        constructors::exists(|_: ()| x);

        Prop::from_bool(true);
        Prop::and(x, x);
        Prop::or(x, x);
        Prop::not(x);
        Prop::eq(x, x);
        Prop::ne(x, x);
        Prop::implies(x, x);

        true.to_prop();

        forall(|_: ()| x);
        exists(|_: ()| x);
        implies(x, x);
    }

    let _ = [()].into_iter();
    let _: u16 = 6u8.into();
    let _ = 1..2;
    let _ = 1..;
    let _ = ..;
    let _ = ..1;

    let _ = [
        std::ops::ControlFlow::Break(()),
        std::ops::ControlFlow::Continue(()),
    ];

    fn iterator_functions(it: It) {
        let _ = it.clone().step_by(2);
        let _ = it.clone().enumerate();
        let _ = [()].chunks_exact(2);
        let _ = [()].iter();
        let _ = (&[()] as &[()]).iter();
    }

    {
        use hax_lib::*;
        let a: Int = 3u8.lift();
        let _: Int = 3u8.to_int();
        let _ = a.clone().pow2();
        let _ = Int::_unsafe_from_str("1");
        let _: u32 = a.concretize();
    }

    fn index_mut>(mut x: T, index: I) {
        x.index_mut(index);
    }

    fn question_mark_result>(x: A) -> Result<(), B> {
        Err(x)?;
        Ok(())
    }

    let _ = hax_lib::inline("");
    let _: () = hax_lib::inline_unsafe("");
    let _: () = hax_lib::any_to_unit(());
    use hax_lib::{RefineAs, Refinement};

    fn refinements>(x: T, y: U) -> T {
        let _ = x.clone().get_mut();
        T::new(x.get());
        y.into_checked()
    }

    const _: () = {
        use core::{cmp::*, ops::*};
        fn arith<
            X: Add
                + Sub
                + Mul
                + Div
                + Rem
                + BitXor
                + BitAnd
                + BitOr
                + Shl
                + Shr
                + Neg
                + Not
                + PartialOrd
                + Eq
                + Ord
                + Copy,
        >(
            x: X,
        ) -> X {
            let _ = x < x && x > x && x <= x && x >= x && x == x && x != x;
            (x ^ x & !x) + x / x * x - x | (-x) % x << x >> x
        }
    };

    fn dummy>(z: T) {
        let _ = T::from_output(());
        let _ = z.branch();
    }

    let s: &str = "123";
    let ptr: *const u8 = s.as_ptr();

    unsafe {
        let _ = *ptr.offset(1) as char;
    }

    const _: () = {
        use std::ops::DerefMut;
        fn f(x: T) {
            let _: &mut _ = { x }.deref_mut();
        }
        use std::ops::Deref;
        fn g(x: T) {
            let _: &_ = { x }.deref();
        }
    };

    #[derive(PartialEq)]
    struct Foo();
}

macro_rules! impl_arith {
    ($name:ident$(,)?) => {
        mod $name {
            fn add() {}
            fn sub() {}
            fn mul() {}
            fn div() {}
            fn rem() {}
            fn neg() {}
            fn bit_xor() {}
            fn bit_and() {}
            fn bit_or() {}
            fn shl() {}
            fn shr() {}
            fn eq() {}
            fn lt() {}
            fn le() {}
            fn ne() {}
            fn ge() {}
            fn gt() {}
        }
    };
    ($name:ident,$($rest:tt)*) => {
        impl_arith!($name);
        impl_arith!($($rest)*);
    }
}

impl_arith!(u8, u16, u32, u64, u128, usize);
impl_arith!(i8, i16, i32, i64, i128, isize);

fn offset() {}

fn unsize() {}

/// Hax additions
mod hax {
    fn failure() {}
    struct Failure;
    enum Never {}

    // Only useful when HAX_CORE_EXTRACTION_MODE in `on`
    enum MutRef {}

    struct Tuple2(u8, u8);
    fn deref_op() {}
    fn cast_op() {}
    fn logical_op_and() {}
    fn logical_op_or() {}

    fn while_loop() {}
    fn while_loop_cf() {}
    fn while_loop_return() {}
    fn repeat() {}
    fn update_at() {}
    mod monomorphized_update_at {
        fn update_at_usize() {}
        fn update_at_range() {}
        fn update_at_range_from() {}
        fn update_at_range_to() {}
        fn update_at_range_full() {}
    }
    // TODO: Should that live here? (this is F* specific)
    fn array_of_list() {}
    fn never_to_any() {}

    mod folds {
        fn fold_range() {}
        fn fold_range_cf() {}
        fn fold_range_return() {}
        fn fold_range_step_by() {}
        fn fold_range_step_by_cf() {}
        fn fold_range_step_by_return() {}
        fn fold_enumerated_slice() {}
        fn fold_enumerated_slice_cf() {}
        fn fold_enumerated_slice_return() {}
        fn fold_enumerated_chunked_slice() {}
        fn fold_enumerated_chunked_slice_cf() {}
        fn fold_enumerated_chunked_slice_return() {}
        fn fold_chunked_slice() {}
        fn fold_chunked_slice_cf() {}
        fn fold_chunked_slice_return() {}
        fn fold_cf() {}
        fn fold_return() {}
    }

    /// The engine uses this `dropped_body` symbol as a marker value
    /// to signal that a item was extracted without body.
    fn dropped_body() {}

    mod int {
        fn add() {}
        fn sub() {}
        fn div() {}
        fn mul() {}
        fn rem() {}
        fn neg() {}

        fn le() {}
        fn lt() {}
        fn ge() {}
        fn gt() {}

        fn eq() {}
        fn ne() {}

        fn from_machine() {}
        fn into_machine() {}
    }

    mod machine_int {
        fn add() {}
        fn sub() {}
        fn div() {}
        fn mul() {}
        fn rem() {}

        fn not() {}
        fn bitxor() {}
        fn bitor() {}
        fn bitand() {}
        fn shl() {}
        fn shr() {}

        fn eq() {}
        fn ne() {}
        fn le() {}
        fn lt() {}
        fn ge() {}
        fn gt() {}

        fn add_with_overflow() {}
        fn sub_with_overflow() {}
        fn mul_with_overflow() {}
        fn cmp() {}
    }

    mod control_flow_monad {
        trait ControlFlowMonad {
            fn lift() {}
        }
        mod mexception {
            fn run() {}
        }
        mod mresult {
            fn run() {}
        }
        mod moption {
            fn run() {}
        }
    }
    fn box_new() {}

    mod explicit_monadic {
        fn lift() {}
        fn pure() {}
    }
}

mod arithmetic {
    fn neg() {}
}


================================================
FILE: engine/utils/generate_from_ast/README.md
================================================
# `generate_from_ast`

## `generate_from_ast visitors`
This binary reads the AST module of hax and creates **standalone**
visitors. We need to define visitors and the types of the AST in two
separate modules. Otherwise, each time we instantiate the AST functor,
we end up re-defining every single visitor. Since the AST functor is
instantiated a lot, this used to lead to huge memory consumption while
building.

This binary takes an OCaml module that defines types as input and
outputs an OCaml module defining visitors for those types.

Note that this binary relies on the structure and naming of the AST of
hax; it is not intended for any other use.

## `generate_from_ast ast_builder`
Generates helpers to build node in the AST.


================================================
FILE: engine/utils/generate_from_ast/codegen_ast_builder.ml
================================================
open Base
open Utils
open Types

let rec print_ty (t : Type.t) =
  if String.is_prefix t.typ ~prefix:"prim___tuple_" then
    "(" ^ String.concat ~sep:" * " (List.map t.args ~f:print_ty) ^ ")"
  else
    "("
    ^ (if List.is_empty t.args then ""
       else "(" ^ String.concat ~sep:", " (List.map t.args ~f:print_ty) ^ ") ")
    ^ t.typ ^ ")"

let print_record_or_tuple is_record x =
  let l, sep, r = if is_record then ("{", ";", "}") else ("(", ",", ")") in
  l ^ String.concat ~sep (List.map ~f:fst x) ^ r

let print_record = print_record_or_tuple true
let print_tuple = print_record_or_tuple false

let mk_builder (provided_fields : string list)
    ((record, enum) : Datatype.t * Datatype.t) =
  let ty = record.name in
  let record, variants =
    match (record.kind, enum.kind) with
    | Record record, Variant variants -> (record, variants)
    | _ -> failwith "mk_builder: bad kinds of datatypes"
  in
  let record_names = List.map ~f:fst record in
  let args =
    record
    |> List.filter
         ~f:(fst >> List.mem ~equal:[%eq: string] provided_fields >> not)
    |> List.filter ~f:(fun (_, ty) -> not ([%eq: string] ty.Type.typ enum.name))
    |> List.map ~f:(fun (name, ty) -> (true, name, ty))
  in
  let field_name_raw, _ =
    List.find ~f:(fun (_, ty) -> [%eq: string] ty.Type.typ enum.name) record
    |> Option.value_exn
  in
  List.map
    ~f:(fun Variant.{ name; payload } ->
      let extra_lb = ref "" in
      let args =
        args
        @
        match payload with
        | VariantPayload.Record fields ->
            fields
            |> List.map ~f:(fun (name, ty) ->
                   ( true,
                     (if List.mem ~equal:[%eq: string] record_names name then (
                        let name' = "inner_" ^ name in
                        (* if not ([%eq: string] field_name_raw name) then *)
                        extra_lb :=
                          !extra_lb ^ "let " ^ name ^ " = " ^ name' ^ " in\n";
                        name')
                      else name),
                     ty ))
        | Tuple types ->
            List.mapi ~f:(fun i ty -> (false, "x" ^ Int.to_string i, ty)) types
        | None -> []
      in
      let sargs =
        List.map
          ~f:(fun (named, name, ty) ->
            (if named then "~" else "") ^ "(" ^ name ^ ":" ^ print_ty ty ^ ")")
          args
        |> String.concat ~sep:" "
      in
      let head = "let " ^ ty ^ "_" ^ name ^ " " ^ sargs ^ ": " ^ ty ^ " = " in
      let spayload =
        match payload with
        | Record record -> print_record record
        | Tuple types ->
            List.mapi ~f:(fun i ty -> ("x" ^ Int.to_string i, ty)) types
            |> print_tuple
        | None -> ""
      in
      let body =
        "let " ^ field_name_raw ^ ": " ^ enum.name ^ " = " ^ !extra_lb ^ "\n"
        ^ name ^ " " ^ spayload ^ " in"
      in
      let body = body ^ print_record record in
      head ^ body)
    variants
  |> String.concat ~sep:"\n\n"

let mk datatypes =
  let find name =
    List.find ~f:(fun dt -> [%eq: string] dt.Datatype.name name) datatypes
    |> Option.value_exn
  in
  let data =
    [
      (find "expr", find "expr'");
      (find "pat", find "pat'");
      (find "item", find "item'");
      (find "guard", find "guard'");
      (find "trait_item", find "trait_item'");
      (find "impl_expr", find "impl_expr_kind");
    ]
  in
  let body = data |> List.map ~f:(mk_builder []) |> String.concat ~sep:"\n\n" in
  let spanned =
    data |> List.map ~f:(mk_builder [ "span" ]) |> String.concat ~sep:"\n\n"
  in
  {|
open! Prelude
open! Ast


module Make (F : Features.T) = struct
  open Ast.Make(F)

module Explicit = struct
|}
  ^ body
  ^ {|
end

  module type SPAN = sig val span: span end
  module Make(Span: SPAN) = struct
    open Span
    |}
  ^ spanned ^ {|
  end

end
|}


================================================
FILE: engine/utils/generate_from_ast/codegen_ast_destruct.ml
================================================
open Base
open Utils
open Types

let rec print_ty (t : Type.t) =
  if String.is_prefix t.typ ~prefix:"prim___tuple_" then
    "(" ^ String.concat ~sep:" * " (List.map t.args ~f:print_ty) ^ ")"
  else
    "("
    ^ (if List.is_empty t.args then ""
       else "(" ^ String.concat ~sep:", " (List.map t.args ~f:print_ty) ^ ") ")
    ^ t.typ ^ ")"

let print_record_or_tuple is_record x =
  let l, sep, r = if is_record then ("{", ";", "}") else ("(", ",", ")") in
  l ^ String.concat ~sep (List.map ~f:fst x) ^ r

let print_record = print_record_or_tuple true
let print_tuple = print_record_or_tuple false

let print_record_type_or_tuple is_record x =
  let l, sep, r = if is_record then ("{", ";", "}") else ("(", "*", ")") in
  l
  ^ String.concat ~sep
      (List.map
         ~f:(fun (name, ty) ->
           (if is_record then name ^ ":" else "") ^ print_ty ty)
         x)
  ^ r

let print_record_type = print_record_type_or_tuple true

let print_tuple_type =
  List.map ~f:(fun ty -> ("", ty)) >> print_record_type_or_tuple false

let mk_builder ((record, enum) : Datatype.t * Datatype.t) =
  let ty = record.name in
  let record, variants =
    match (record.kind, enum.kind) with
    | Record record, Variant variants -> (record, variants)
    | _ -> failwith "mk_builder: bad kinds of datatypes"
  in
  let field_name_raw, _ =
    List.find ~f:(fun (_, ty) -> [%eq: string] ty.Type.typ enum.name) record
    |> Option.value_exn
  in
  List.map
    ~f:(fun Variant.{ name; payload } ->
      let id = ty ^ "_" ^ name in
      let inline_record = id in
      let type_decl =
        "\ntype " ^ inline_record ^ " = "
        ^
        match payload with
        | Record record -> print_record_type record
        | Tuple types -> types |> print_tuple_type
        | None -> "unit"
      in
      let head =
        "\nlet " ^ id ^ " (value: " ^ ty ^ ")" ^ ": " ^ inline_record
        ^ " option ="
      in
      let spayload =
        match payload with
        | Record record -> print_record record
        | Tuple types ->
            List.mapi ~f:(fun i ty -> ("x" ^ Int.to_string i, ty)) types
            |> print_tuple
        | None -> ""
      in
      type_decl ^ head ^ "\n  match value." ^ field_name_raw ^ " with\n    | "
      ^ name ^ " " ^ spayload ^ " -> Some "
      ^ (if String.is_empty spayload then "()" else spayload)
      ^ if List.length variants |> [%eq: int] 1 then "" else "\n    | _ -> None")
    variants
  |> String.concat ~sep:"\n\n"

let mk datatypes =
  let find name =
    List.find ~f:(fun dt -> [%eq: string] dt.Datatype.name name) datatypes
    |> Option.value_exn
  in
  let data =
    [
      (find "expr", find "expr'");
      (find "pat", find "pat'");
      (find "item", find "item'");
      (find "guard", find "guard'");
      (find "trait_item", find "trait_item'");
      (find "impl_expr", find "impl_expr_kind");
    ]
  in
  let body = data |> List.map ~f:mk_builder |> String.concat ~sep:"\n\n" in
  {|
open! Prelude
open! Ast

module Make (F : Features.T) = struct
  open Ast.Make(F)

|}
  ^ body ^ {|

end
|}


================================================
FILE: engine/utils/generate_from_ast/codegen_printer.ml
================================================
open Base
open Utils
open Types

type state = { names_with_doc : string list }

let ( let* ) x f = Option.bind ~f x
let super_types_list = [ "expr"; "pat"; "guard"; "arm"; "item" ]

let get_super_type ty =
  List.find ~f:(fun s -> String.equal (s ^ "'") ty) super_types_list

let get_child_type ty =
  if List.mem ~equal:String.equal super_types_list ty then Some (ty ^ "'")
  else None

let do_not_override_prefix = "_do_not_override_"

let is_hidden_method =
  let list =
    [
      "expr'_App";
      "expr'_Construct";
      "ty_TApp";
      "lhs_LhsFieldAccessor";
      "local_ident";
      "pat'_PConstruct";
      "expr'_GlobalVar";
      "variant";
      "item'_Type";
    ]
  in
  List.mem ~equal:[%eq: string] list

let lazy_doc_manual_definitions = [ "_do_not_override_lazy_of_generics" ]

let rec of_ty (state : state) (call_method : string -> ty:string -> string)
    (t : Type.t) : ((unit -> string) -> string -> string) option =
  let* args =
    List.fold t.args ~init:(Some []) ~f:(fun acc x ->
        let* acc = acc in
        let* x = of_ty state call_method x in
        Some (x :: acc))
    |> Option.map ~f:List.rev
  in
  match (t.typ, args) with
  | "option", [ inner ] ->
      Some
        (fun pos value ->
          "(match " ^ value ^ " with | None -> None | Some value -> Some ("
          ^ inner pos "value" ^ "))")
  | "list", [ inner ] ->
      Some
        (fun pos value ->
          "(List.map ~f:(fun x -> " ^ inner pos "x" ^ ") " ^ value ^ ")")
  | "prim___tuple_2", [ fst; snd ] ->
      Some
        (fun pos value ->
          let base =
            "("
            ^ fst pos ("(fst " ^ value ^ ")")
            ^ ","
            ^ snd pos ("(snd " ^ value ^ ")")
            ^ ")"
          in
          let mk proj =
            "(let x = " ^ base ^ "in lazy_doc (fun tuple -> (" ^ proj
            ^ " tuple)#p) " ^ pos () ^ " x)"
          in
          match List.map ~f:(is_lazy_doc_typ state) t.args with
          | [ false; true ] -> mk "snd"
          | [ true; false ] -> mk "fst"
          | _ -> base)
      (* if String.is_prefix ~prefix:"F." (List.nth t.args 1 |> Option.value ~default:"") then "(let x = " ^ base ^ "in lazy_doc x)" else base) *)
  | "prim___tuple_3", [ fst; snd; thd ] ->
      Some
        (fun pos value ->
          "(let (value1, value2, value3) = " ^ value ^ " in ("
          ^ fst pos "value1" ^ "," ^ snd pos "value2" ^ "," ^ thd pos "value3"
          ^ "))")
  | _ when List.mem ~equal:[%eq: string] state.names_with_doc t.typ ->
      Some
        (fun pos value ->
          "(print#" ^ do_not_override_prefix ^ "lazy_of_" ^ t.typ
          ^ (if Option.is_some (get_super_type t.typ) then " ~super" else "")
          ^ " " ^ pos () ^ " " ^ value ^ ")")
  | _ -> Some (fun pos value -> "(" ^ value ^ ")")

and string_ty_of_ty' (state : state) (t : Type.t) =
  if String.is_prefix t.typ ~prefix:"prim___tuple_" then
    let args = List.map t.args ~f:(string_ty_of_ty' state) in
    let n = List.count args ~f:(String.is_suffix ~suffix:"lazy_doc)") in
    let base =
      "("
      ^ String.concat ~sep:" * " (List.map t.args ~f:(string_ty_of_ty' state))
      ^ ")"
    in
    if [%eq: int] n 1 then "(" ^ base ^ " lazy_doc)" else base
  else
    "("
    ^ (if List.is_empty t.args then ""
       else
         "("
         ^ String.concat ~sep:", " (List.map t.args ~f:(string_ty_of_ty' state))
         ^ ") ")
    ^ t.typ
    ^ (if List.mem ~equal:[%eq: string] state.names_with_doc t.typ then
         " lazy_doc"
       else "")
    ^ ")"

and is_lazy_doc_typ (state : state) = string_ty_of_ty' state >> is_lazy_doc_typ'
and is_lazy_doc_typ' = String.is_suffix ~suffix:"lazy_doc)"

let string_ty_of_ty (state : state) (t : Type.t) =
  let s = string_ty_of_ty' state t in
  match s with
  | "(generics lazy_doc)" ->
      "((generics lazy_doc * generic_param lazy_doc list * generic_constraint \
       lazy_doc list) lazy_doc)"
  | _ -> s

let meth_name' typ_name variant_name =
  typ_name ^ if String.is_empty variant_name then "" else "_" ^ variant_name

let meth_name typ_name variant_name =
  let meth = meth_name' typ_name variant_name in
  (if is_hidden_method meth then do_not_override_prefix else "") ^ meth

let print_variant state (call_method : string -> ty:string -> string)
    (register_position : string option -> string) (super_type : string option)
    (register_signature : string -> unit) (t_name : string) (v : Variant.t) :
    string =
  let meth_name = meth_name t_name v.name in
  let meth = "print#" ^ meth_name in
  let mk named fields =
    let head =
      v.name
      ^ (if named then " { " else " ( ")
      ^ String.concat ~sep:(if named then ";" else ",") (List.map ~f:fst fields)
      ^ (if named then " } " else ")")
      ^ " -> "
    in
    let args =
      List.map
        ~f:(fun (field_name, ty) ->
          let value =
            match of_ty state call_method ty with
            | Some f ->
                let pos = register_position (Some field_name) in
                f (fun _ -> pos) field_name
            | None -> field_name
          in
          let name = "~" ^ field_name ^ ":" in
          (if named then name else "") ^ "(" ^ value ^ ")")
        fields
    in
    let call =
      String.concat ~sep:" "
        (meth
        :: ((if Option.is_some super_type then [ "~super" ] else []) @ args))
    in
    let signature =
      let ty =
        List.map
          ~f:(fun (name, ty) ->
            let name = if named then name ^ ":" else "" in
            name ^ string_ty_of_ty state ty)
          fields
        |> String.concat ~sep:" -> "
      in
      let super =
        match super_type with
        | Some super_type -> " super:(" ^ super_type ^ ") -> "
        | None -> ""
      in
      register_signature
        ("method virtual " ^ meth_name ^ " : " ^ super ^ ty ^ " -> document")
    in
    head ^ call
  in
  "\n  | "
  ^
  match v.payload with
  | Record fields -> mk true fields
  | None -> v.name ^ " -> " ^ meth
  | Tuple types ->
      mk false (List.mapi ~f:(fun i ty -> ("x" ^ Int.to_string i, ty)) types)

let catch_errors_for = [ "expr"; "item"; "pat" ]

let print_datatype state (dt : Datatype.t)
    (register_entrypoint : string -> unit)
    (register_position : string -> string -> string option -> string) =
  let super_type = get_super_type dt.name in
  let sigs = ref [] in
  let method_name = do_not_override_prefix ^ "lazy_of_" ^ dt.name in
  let print_variants variants wrapper =
    let head =
      "(**/**) method " ^ method_name
      ^ (match super_type with Some t -> " ~(super: " ^ t ^ ")" | _ -> "")
      ^ " ast_position (value: " ^ dt.name ^ "): " ^ dt.name ^ " lazy_doc ="
    in
    let body =
      (if Option.is_some (get_child_type dt.name) then
         "\n    let super = value in"
       else "")
      ^ "\n    match value with"
      ^ String.concat ~sep:""
          (List.map
             ~f:(fun variant ->
               print_variant state
                 (fun name ~ty:_ -> name)
                 (register_position dt.name variant.Variant.name)
                 super_type
                 (fun s -> sigs := s :: !sigs)
                 dt.name variant)
             variants)
    in
    let body =
      "(print#wrap_" ^ dt.name ^ " ast_position value (" ^ body ^ "))"
    in
    let body = wrapper body in
    sigs :=
      ("method wrap_" ^ dt.name ^ " (_pos: ast_position) (_value: " ^ dt.name
     ^ ") (doc: document): document = doc")
      :: !sigs;
    let def =
      head ^ "lazy_doc (fun (value: " ^ dt.name ^ ") -> " ^ body
      ^ ") ast_position value"
    in
    if List.mem ~equal:[%eq: string] lazy_doc_manual_definitions method_name
    then "(* skipping " ^ method_name ^ " *) (**/**)"
    else def ^ "(**/**)"
  in
  let main =
    match dt.kind with
    | Variant variants -> print_variants variants Fn.id
    | Record record ->
        let wrapper =
          if List.exists ~f:(fst >> [%eq: string] "span") record then
            fun body ->
            "print#with_span ~span:value.span (fun _ -> " ^ body ^ ")"
          else Fn.id
        in
        let wrapper =
          if List.mem ~equal:[%eq: string] catch_errors_for dt.name then
            fun body ->
            "print#catch_exn print#error_" ^ dt.name ^ " (fun () -> "
            ^ wrapper body ^ ")"
          else wrapper
        in
        print_variants [ { name = ""; payload = Record record } ] wrapper
    | TypeSynonym ty ->
        print_variants [ { name = ""; payload = Tuple [ ty ] } ] (fun x -> x)
    | _ -> "(* Not translating " ^ dt.name ^ " *)"
  in
  let print =
    let name = "print_" ^ dt.name in
    let ty = "ast_position -> " ^ dt.name ^ " -> " in
    let body =
      "fun ast_position x -> (print#" ^ method_name ^ " ast_position x)#p"
    in
    if Option.is_none super_type then
      "method " ^ name ^ ": " ^ ty ^ " document = " ^ body
    else ""
  in
  let entrypoint =
    let name = "entrypoint_" ^ dt.name in
    let ty = dt.name ^ " -> " in
    let body = "print#print_" ^ dt.name ^ " AstPos_Entrypoint" in
    if Option.is_none super_type then (
      register_entrypoint (name ^ " : " ^ ty ^ " 'a");
      "method " ^ name ^ ": " ^ ty ^ " document = " ^ body)
    else ""
  in
  String.concat ~sep:"\n\n" (main :: print :: entrypoint :: !sigs)

let hardcoded =
  {|
module LazyDoc = struct
    type 'a lazy_doc =
      < compact : output -> unit
      ; pretty : output -> state -> int -> bool -> unit
      ; requirement : int
      ; p : document
      ; v : 'a
      ; ast_position : ast_position >
    let lazy_doc : 'a. ('a -> document) -> ast_position -> 'a -> 'a lazy_doc =
     fun to_document pos value ->
      let lazy_doc = ref None in
      let doc () =
        match !lazy_doc with
        | None ->
            let doc = to_document value in
            lazy_doc := Some doc;
            doc
        | Some doc -> doc
      in
      object (self)
        method requirement : requirement = requirement (doc ())
        method pretty : output -> state -> int -> bool -> unit =
          fun o s i b -> pretty o s i b (doc ())
        method compact : output -> unit = fun o -> compact o (doc ())
        method p = custom (self :> custom)
        method v = value
        method ast_position = pos
      end
end
open LazyDoc
|}

let class_prelude =
  {|
   method virtual with_span: span:span -> (unit -> document) -> document
   method virtual catch_exn : (string -> document) -> (unit -> document) -> document

   method virtual _do_not_override_lazy_of_local_ident: _
   method virtual _do_not_override_lazy_of_concrete_ident: _
|}

let mk datatypes =
  let datatypes =
    List.filter
      ~f:(fun dt -> not ([%eq: string] dt.Datatype.name "mutability"))
      datatypes
  in
  let state =
    let names_with_doc = List.map ~f:(fun dt -> dt.name) datatypes in
    let names_with_doc = "concrete_ident" :: "local_ident" :: names_with_doc in
    { names_with_doc }
  in
  let positions = ref [ "AstPos_Entrypoint"; "AstPos_NotApplicable" ] in
  let entrypoint_types = ref [] in
  let class_body =
    List.map
      ~f:(fun dt ->
        print_datatype state dt
          (fun x -> entrypoint_types := x :: !entrypoint_types)
          (fun ty variant field ->
            let pos =
              "AstPos_" ^ ty ^ "_" ^ variant
              ^ match field with Some field -> "_" ^ field | _ -> ""
            in
            positions := pos :: !positions;
            pos))
      datatypes
    |> String.concat ~sep:"\n\n"
  in
  let object_poly = String.concat ~sep:";\n " !entrypoint_types in
  let object_span_data_map =
    String.concat ~sep:"\n"
      (List.map
         ~f:(fun s ->
           let n = fst (String.lsplit2_exn ~on:':' s) in
           "method " ^ n ^ " = obj#" ^ n)
         !entrypoint_types)
  in
  let object_map =
    String.concat ~sep:"\n"
      (List.map
         ~f:(fun s ->
           let n = fst (String.lsplit2_exn ~on:':' s) in
           "method " ^ n ^ " x = f (fun obj -> obj#" ^ n ^ " x)")
         !entrypoint_types)
  in
  Printf.sprintf
    {|
open! Prelude
open! Ast
open PPrint
type ast_position = %s | AstPosition_Quote

%s

module Make (F : Features.T) = struct
   module AST = Ast.Make (F)
   open Ast.Make (F)

   class virtual base = object (print)
     %s
   end

   type ('span_data, 'a) object_type = <
        span_data : 'span_data;
        %s
     >

   let map (type span_data) (type a) (type b)
           (f: ((span_data, a) object_type -> a) -> b)
           : (unit, b) object_type = object
        method span_data: unit = ()
        %s
     end

   let map_span_data (type a) (type b) (type t)
          (obj: (a, t) object_type)
          (span_data: b)          
          : (b, t) object_type = object
        method span_data: b = span_data
        %s
     end
end
|}
    (String.concat ~sep:" | "
       (List.dedup_and_sort ~compare:String.compare !positions))
    hardcoded
    (class_prelude ^ class_body)
    object_poly object_map object_span_data_map


================================================
FILE: engine/utils/generate_from_ast/codegen_visitor.ml
================================================
(** Give a list of {!Types.Datatype.t}, this file generates an ocaml module of
    visitors. *)

open Base
open Utils
open Types

(** What kind of visitor are we generating? *)
type kind = Map | MapReduce | Reduce

(** Helpers around kinds *)
include struct
  let is_reduce = function MapReduce | Reduce -> true | _ -> false
  let is_map = function Map | MapReduce -> true | _ -> false
end

(** Various helpers and constants *)
include struct
  let method_prefix = "visit_"
  let acc_var_prefix = "acc___"
  let acc_var_param = acc_var_prefix ^ "param___var"
  let payload_var = "v___payload"
  let env_var = "env___var"
  let app = List.filter ~f:(String.is_empty >> not) >> String.concat ~sep:" "
  let parens s = if String.contains s ' ' then "(" ^ s ^ ")" else s
end

(** Produces a method name given a dot-separated path *)
let method_name path =
  let path = String.split ~on:'.' path in
  method_prefix ^ String.concat ~sep:"__" path

(** Produces a visitor call for a type expression, without applying it. *)
let rec of_type' need_parens (t : Type.t) =
  let f =
    if String.is_prefix ~prefix:"'" t.typ then "visit_" ^ t.typ
    else "self#" ^ method_name t.typ
  in
  if List.is_empty t.args then f
  else
    app (f :: List.map ~f:(of_type' true) t.args)
    |> if need_parens then parens else Fn.id

(** Produces a complete visitor call for a type expression. *)
let of_type typ payload = app [ of_type' false typ; env_var; payload ]

let acc_var_for_field ((field, _) : Record.field) = acc_var_prefix ^ field

(** Given a list [x1; ...; xN], produces `self#plus x1 (self#plus ... (self#plus
    xN))` *)
let self_plus =
  List.fold_left
    ~f:(fun acc var ->
      match acc with
      | None -> Some var
      | Some acc -> Some (app [ "self#plus"; parens acc; var ]))
    ~init:None
  >> Option.value ~default:"self#zero"

(** Creates a let expression *)
let mk_let ~lhs ~rhs = "let " ^ lhs ^ " = " ^ rhs ^ " in "

let of_typed_binding ~kind (value, typ, value_binding, acc_binding) =
  let lhs =
    [
      (if is_map kind then [ value_binding ] else []);
      (if is_reduce kind then [ acc_binding ] else []);
    ]
    |> List.concat |> String.concat ~sep:", "
  in
  let rhs = of_type typ value in
  mk_let ~lhs ~rhs

let of_typed_bindings ~kind l =
  let lbs = List.map ~f:(of_typed_binding ~kind) l |> String.concat ~sep:"\n" in
  let acc = List.map ~f:(fun (_, _, _, acc) -> acc) l |> self_plus in
  (lbs, acc)

let tuple_if ~kind ?(sep = ", ") if_map if_reduce =
  [
    (if is_map kind then [ if_map ] else []);
    (if is_reduce kind then [ if_reduce ] else []);
  ]
  |> List.concat |> String.concat ~sep

let of_record ~kind ~constructor (r : Record.t) =
  let lbs, acc =
    List.map
      ~f:(fun (field, typ) ->
        (payload_var ^ "." ^ field, typ, field, acc_var_for_field (field, typ)))
      r
    |> of_typed_bindings ~kind
  in
  let record =
    constructor ^ "{" ^ String.concat ~sep:"; " (List.map ~f:fst r) ^ "}"
  in
  let result = tuple_if ~kind record acc in
  (* let result = record ^ if is_reduce kind then ", " ^ acc else "" in *)
  lbs ^ "\n" ^ result

let of_tuple_variant ~kind name (types : Type.t list) =
  let vars = List.mapi ~f:(fun i _ -> "x" ^ Int.to_string i) types in
  let accs = List.mapi ~f:(fun i _ -> "a" ^ Int.to_string i) types in
  let tuple = vars |> String.concat ~sep:", " |> parens in
  let lbs, acc =
    List.zip_exn types (List.zip_exn vars accs)
    |> List.map ~f:(fun (typ, (name, acc)) -> (name, typ, name, acc))
    |> of_typed_bindings ~kind
  in
  name ^ " " ^ tuple ^ " -> " ^ lbs ^ tuple_if ~kind (name ^ " " ^ tuple) acc

let of_variant ~kind (v : Variant.t) =
  match v.payload with
  | Tuple l -> of_tuple_variant ~kind v.name l
  | None -> v.name ^ " -> " ^ tuple_if ~kind v.name "self#zero"
  | Record record ->
      v.name ^ " " ^ payload_var ^ " -> "
      ^ of_record ~kind ~constructor:v.name record

let of_datatype ~kind (dt : Datatype.t) =
  let body =
    match dt.kind with
    | Record record -> of_record ~kind ~constructor:"" record
    | TypeSynonym typ -> of_type typ payload_var
    | Variant variants ->
        let arms =
          List.map ~f:(of_variant ~kind) variants |> String.concat ~sep:"\n  | "
        in
        "match " ^ payload_var ^ " with\n  " ^ arms
    | Opaque -> tuple_if ~kind payload_var "self#zero"
  in
  let meth = method_name dt.name in
  let self_typ =
    if Type.is_tuple_name dt.name then
      String.concat ~sep:" * " dt.type_vars |> parens
    else app [ String.concat ~sep:", " dt.type_vars |> parens; dt.name ]
  in
  let forall_clause = String.concat ~sep:" " dt.type_vars in
  let arrs =
    List.map
      ~f:(fun tvar ->
        "'env -> " ^ tvar ^ " -> "
        ^ (tuple_if ~kind ~sep:" * " tvar "'acc" |> parens))
      dt.type_vars
  in
  let arrs =
    arrs @ [ "'env"; self_typ; tuple_if ~kind ~sep:" * " self_typ "'acc" ]
  in
  let arrs = List.map ~f:parens arrs |> String.concat ~sep:" -> " in
  let meth_typ =
    List.filter ~f:(String.is_empty >> not) [ forall_clause; arrs ]
    |> String.concat ~sep:"."
  in
  let visitors =
    List.map ~f:(fun tvar -> "visit_" ^ tvar) dt.type_vars |> app
  in
  "method " ^ meth ^ " : " ^ meth_typ ^ " = fun " ^ visitors ^ " " ^ env_var
  ^ " " ^ payload_var ^ " -> " ^ body

(** Hard coded visitors *)
let extra_visitors_for = function
  | Map ->
      "        method visit_list : 'a. ('env -> 'a -> 'a) -> 'env -> 'a list \
       -> 'a list\n\
      \            =\n\
      \          fun v env -> Base.List.map ~f:(v env)\n\n"
  | MapReduce ->
      "           method visit_list\n\
      \            : 'a. ('env -> 'a -> 'a * 'acc) -> 'env -> 'a list -> 'a \
       list * 'acc\n\
      \            =\n\
      \          fun v env ->\n\
      \            Base.List.fold_map ~init:self#zero ~f:(fun acc x ->\n\
      \                let x, acc' = v env x in\n\
      \                (self#plus acc acc', x))\n\
      \            >> swap\n\n"
  | Reduce ->
      "\n\
      \          method visit_list : 'a. ('env -> 'a -> 'acc) -> 'env -> 'a \
       list -> 'acc =\n\
      \            fun v env this ->\n\
      \              Base.List.fold ~init:self#zero\n\
      \                ~f:(fun acc -> v env >> self#plus acc)\n\
      \                this"

(** Make one kind of visitor *)
let mk_one ~kind (l : Datatype.t list) : string =
  let contents =
    List.map ~f:(of_datatype ~kind) l |> String.concat ~sep:"\n\n"
  in
  let name =
    [
      (if is_map kind then [ "map" ] else []);
      (if is_reduce kind then [ "reduce" ] else []);
    ]
    |> List.concat |> String.concat ~sep:""
  in
  let extra_visitors =
    (* visitor_for_tuples ~kind ^ "\n\n" ^ *)
    extra_visitors_for kind
  in
  "class virtual ['self] " ^ name ^ " = object (self : 'self)" ^ contents ^ "\n"
  ^ extra_visitors ^ "\nend"

(** AST.ml-specific headers *)
let header =
  "open Ast\n\
   open! Utils\n\
   open Base\n\n\
   module Make =\n\
   functor\n\
  \  (F : Features.T)\n\
  \  ->\n\
  \  struct\n\
  \    [@@@warning \"-27\"]\n\n\
  \    open Make (F)\n"

(** Only certain types should be opaque in AST.ml *)
let is_allowed_opaque name =
  let allowlist =
    [
      "Local_ident.t";
      "bool";
      "char";
      "concrete_ident";
      "global_ident";
      "attr";
      "local_ident";
      "signedness";
      "size";
      "span";
      "string";
      "todo";
      "float_kind";
      "int_kind";
      "item_quote_origin_position";
      "item_kind";
    ]
  in
  List.mem ~equal:String.equal allowlist name
  || String.is_prefix ~prefix:"F." name

(** Make all three kinds of visitors for a list of datatypes *)
let mk (l : Datatype.t list) : string =
  let l = Primitive_types.(tuples @ [ option ]) @ l in
  let opaques =
    Visitors.collect_undefined_types l
    |> List.map ~f:(fun name ->
           Datatype.{ name; type_vars = []; kind = Opaque })
  in
  (match
     Visitors.collect_undefined_types l
     |> List.filter ~f:(is_allowed_opaque >> not)
   with
  | [] -> ()
  | disallowed ->
      let msg =
        "visitor generation: forbidden opaque type: "
        ^ [%show: string list] disallowed
      in
      Stdio.prerr_endline msg;
      failwith msg);
  let l = opaques @ l in
  let visitors =
    List.map ~f:(fun kind -> mk_one ~kind l) [ Map; MapReduce; Reduce ]
  in
  let visitors = visitors |> String.concat ~sep:"\n\n" in
  [ header; visitors; "end" ] |> String.concat ~sep:"\n\n"


================================================
FILE: engine/utils/generate_from_ast/dune
================================================
(executable
 (public_name generate_from_ast)
 (name generate_from_ast)
 (package hax-engine)
 (libraries ppxlib base stdio ppx_deriving_yojson.runtime)
 (preprocess
  (pps
   ppxlib.metaquot
   ppx_deriving.eq
   ppx_yojson_conv
   ppx_compare
   ppx_deriving.show)))

(env
 (_
  (flags
   (:standard -warn-error -A -warn-error +8))))


================================================
FILE: engine/utils/generate_from_ast/errors.ml
================================================
open Ppxlib
open! Ppx_yojson_conv_lib.Yojson_conv.Primitives

(** Define `pp_*` functions for some type of the OCaml ASTs so that we can show
    them *)
include struct
  let pp_core_type = Pprintast.core_type

  let pp_label_declaration fmt label_decl =
    Stdlib.Format.pp_print_string fmt label_decl.pld_name.txt

  let pp_constructor_declaration fmt cons_decl =
    Stdlib.Format.pp_print_string fmt cons_decl.pcd_name.txt

  let pp_type_declaration fmt type_decl =
    Pprintast.structure_item fmt
      {
        pstr_loc = Astlib.Location.none;
        pstr_desc = Pstr_type (Nonrecursive, [ type_decl ]);
      }
end

(** The type of various error that can occur errors *)
type t =
  | UnsupportedCoreType of core_type
  | UnsupportedLabelDeclaration of label_declaration
  | UnsupportedConstructorDeclaration of constructor_declaration
  | UnsupportedTypeDeclaration of type_declaration
[@@deriving show]

(** We can't derive yojson for OCaml types. Thus this indirection, that prints
    payload of `t` as string, and *then* produces JSON. *)
open struct
  type t_string =
    | UnsupportedCoreType of string
    | UnsupportedLabelDeclaration of string
    | UnsupportedConstructorDeclaration of string
    | UnsupportedTypeDeclaration of string
  [@@deriving show, yojson]

  let into_string : t -> t_string = function
    | UnsupportedCoreType core_type ->
        UnsupportedCoreType ([%show: core_type] core_type)
    | UnsupportedLabelDeclaration label_declaration ->
        UnsupportedLabelDeclaration
          ([%show: label_declaration] label_declaration)
    | UnsupportedConstructorDeclaration constructor_declaration ->
        UnsupportedConstructorDeclaration
          ([%show: constructor_declaration] constructor_declaration)
    | UnsupportedTypeDeclaration type_declaration ->
        UnsupportedTypeDeclaration ([%show: type_declaration] type_declaration)
end

let yojson_of_t (e : t) = into_string e |> [%yojson_of: t_string]
let _ = pp_t_string (* just to silence OCaml warning *)

exception Error of t


================================================
FILE: engine/utils/generate_from_ast/generate_from_ast.ml
================================================
open Base
open Utils
open Types

let _main =
  let ocaml_file =
    Stdio.In_channel.stdin |> Lexing.from_channel
    |> Ppxlib_ast.Parse.implementation
  in
  let datatypes =
    type_declaration_of_structure ocaml_file
    |> List.filter ~f:(fun (path, _) ->
           (* We only look at certain types in the AST.ml module *)
           String.is_prefix ~prefix:"Make." path
           || List.mem ~equal:String.equal
                [
                  "mutability"; "literal"; "attrs"; "quote"; "item_quote_origin";
                ]
                path)
    |> List.map ~f:(fun (path, td) ->
           ( String.chop_prefix ~prefix:"Make." path
             |> Option.value ~default:path,
             td ))
    |> List.map ~f:(fun (path, type_decl) ->
           (path, Datatype.of_ocaml_result type_decl))
    |> List.filter_map ~f:(fun (path, dt) ->
           match dt with
           (* Use path as name, can be useful if used on something else than AST.ml *)
           | Result.Ok v -> Some Datatype.{ v with name = path }
           | _ -> None)
  in

  let data =
    datatypes
    |>
    match Sys.get_argv () with
    | [| _; "visitors" |] -> Codegen_visitor.mk
    | [| _; "printer" |] -> Codegen_printer.mk
    | [| _; "ast_builder" |] -> Codegen_ast_builder.mk
    | [| _; "ast_destruct" |] -> Codegen_ast_destruct.mk
    | [| _; "json" |] ->
        [%yojson_of: Datatype.t list] >> Yojson.Safe.pretty_to_string
    | [| _; verb |] ->
        failwith ("`generate_from_ast`: unknown action `" ^ verb ^ "`")
    | _ -> failwith "`generate_from_ast`: expected one argument"
  in
  (* Stdio.Out_channel.write_all "/tmp/debug-generated-code.ml" ~data; *)
  Stdio.print_endline data


================================================
FILE: engine/utils/generate_from_ast/primitive_types.ml
================================================
(** This module encodes several primitive OCaml types as Datatype.t so that
    visitors can be generated automatically for them as well. *)

open Base
open! Utils
open Types

(** Helper to produce type variable. *)
let ty_var typ = Type.{ typ; args = [] }

(** Produces a datatype description for tuples of a given length. *)
let mk_tuple len =
  let type_vars = List.init len ~f:(fun i -> "'t" ^ Int.to_string i) in
  let name = Type.tuple_name len in
  let types = List.map ~f:ty_var type_vars in
  let payload = VariantPayload.Tuple types in
  let kind = Datatype.Variant [ Variant.{ name = ""; payload } ] in
  Datatype.{ name; type_vars; kind }

(** Common sizes of tuples. *)
let tuples = List.map ~f:mk_tuple [ 2; 3; 4 ]

(** Datatype description for the option type. *)
let option =
  let kind =
    Datatype.Variant
      [
        Variant.
          { name = "Some"; payload = VariantPayload.Tuple [ ty_var "'a" ] };
        Variant.{ name = "None"; payload = VariantPayload.None };
      ]
  in
  Datatype.{ name = "option"; type_vars = [ "'a" ]; kind }


================================================
FILE: engine/utils/generate_from_ast/types.ml
================================================
(** This module defines a subset of OCaml inductives as a nice and simple AST *)

open Base
open! Utils
open Errors

(** Describe what is a type expression, reflects OCaml's `core_type`. *)
module Type = struct
  let tuple_prefix = "prim___tuple_"
  let is_tuple_name = String.is_prefix ~prefix:tuple_prefix
  let tuple_name (len : int) : string = tuple_prefix ^ Int.to_string len
  let unit_name : string = "unit___"

  let lident_to_string lident =
    Astlib.Longident.flatten lident |> String.concat ~sep:"."

  type t = { typ : string; args : t list } [@@deriving show, yojson]

  let tuple args =
    match args with
    | [] -> { typ = unit_name; args }
    | [ typ ] -> typ
    | _ -> { typ = tuple_name (List.length args); args }

  let unsupported v = raise (Error (UnsupportedCoreType v))

  open Ppxlib

  let rec of_ocaml (t : core_type) : t =
    match t.ptyp_desc with
    | Ptyp_var typ -> { typ = "'" ^ typ; args = [] }
    | Ptyp_tuple types -> List.map ~f:of_ocaml types |> tuple
    | Ptyp_constr (lident, types) ->
        { typ = lident_to_string lident.txt; args = List.map ~f:of_ocaml types }
    | _ -> unsupported t
end

(** Describe what is a record, reflects OCaml's `label_declaration`. *)
module Record = struct
  type field = string * Type.t [@@deriving show, yojson]
  type t = field list [@@deriving show, yojson]

  let unsupported v = raise (Error (UnsupportedLabelDeclaration v))

  open Ppxlib

  let field_of_ocaml (label_decl : label_declaration) : field =
    (match label_decl.pld_mutable with
    | Mutable -> unsupported label_decl
    | _ -> ());
    (label_decl.pld_name.txt, Type.of_ocaml label_decl.pld_type)

  let of_ocaml : label_declaration list -> t = List.map ~f:field_of_ocaml
end

(** Describe what is a variant payload, reflects OCaml's `construtor_arguments`.
*)
module VariantPayload = struct
  type t = Record of Record.t | Tuple of Type.t list | None
  [@@deriving show, yojson]

  open Ppxlib

  let of_ocaml (cons_decl : constructor_arguments) : t =
    match cons_decl with
    | Pcstr_tuple [] -> None
    | Pcstr_tuple [ typ ] -> (
        match typ.ptyp_desc with
        | Ptyp_tuple types -> Tuple (List.map ~f:Type.of_ocaml types)
        | _ -> Tuple [ Type.of_ocaml typ ])
    | Pcstr_tuple types -> Tuple (List.map ~f:Type.of_ocaml types)
    | Pcstr_record label_decls -> Record (Record.of_ocaml label_decls)
end

(** Describe what is a variant, reflects OCaml's `constructor_declaration`. *)
module Variant = struct
  type t = { name : string; payload : VariantPayload.t }
  [@@deriving show, yojson]

  let unsupported v = raise (Error (UnsupportedConstructorDeclaration v))

  open Ppxlib

  let of_ocaml (cons_decl : constructor_declaration) : t =
    if List.is_empty cons_decl.pcd_vars |> not then unsupported cons_decl;
    let payload = VariantPayload.of_ocaml cons_decl.pcd_args in
    { name = cons_decl.pcd_name.txt; payload }
end

(** A result type. *)
module Result = struct
  type ('r, 'e) t = Ok of 'r | Error of 'e [@@deriving show, yojson]
end

(** Describe what is a datatype, reflects ppx' `type_declaration`. *)
module Datatype = struct
  type kind =
    | Record of Record.t
    | Variant of Variant.t list
    | TypeSynonym of Type.t
    | Opaque
        (** `Opaque` is not produced by `of_ocaml` below; it is used by
            `codegen_visitor` to generate identity visitors *)
  [@@deriving show, yojson]

  type t = { name : string; type_vars : string list; kind : kind }
  [@@deriving show, yojson]

  let unsupported v = raise (Error (UnsupportedTypeDeclaration v))

  let of_ocaml (type_decl : Ppxlib.type_declaration) : t =
    let open Ppxlib in
    let name = type_decl.ptype_name.txt in
    let type_vars =
      List.map
        ~f:(fun (t, _) ->
          match t.ptyp_desc with
          | Ptyp_var n -> "'" ^ n
          | _ -> unsupported type_decl)
        type_decl.ptype_params
    in
    if List.is_empty type_decl.ptype_cstrs |> not then unsupported type_decl;
    let kind =
      match (type_decl.ptype_kind, type_decl.ptype_manifest) with
      | Ptype_abstract, Some typ -> TypeSynonym (Type.of_ocaml typ)
      | Ptype_variant cons_decls, None ->
          Variant (List.map ~f:Variant.of_ocaml cons_decls)
      | Ptype_record label_decls, None -> Record (Record.of_ocaml label_decls)
      | _ -> unsupported type_decl
    in
    { name; kind; type_vars }

  let of_ocaml_result (type_decl : Ppxlib.type_declaration) :
      (t, Errors.t) Result.t =
    try Result.Ok (of_ocaml type_decl) with Errors.Error e -> Result.Error e
end


================================================
FILE: engine/utils/generate_from_ast/utils.ml
================================================
open Base
include Ppx_yojson_conv_lib.Yojson_conv.Primitives

let ( >> ) f g x = g (f x)

let type_declaration_of_structure (str : Ppxlib.structure) :
    (string * Ppxlib.type_declaration) list =
  let open Ppxlib in
  let visitor =
    object (self)
      inherit Ast_traverse.iter as super
      val mutable result = []
      val mutable path_state = []

      method get_path () =
        List.rev path_state |> List.map ~f:(Option.value ~default:"")

      method get_result () = List.rev result

      method! module_binding mb =
        let prev_path = path_state in
        path_state <- mb.pmb_name.txt :: path_state;
        super#module_binding mb;
        path_state <- prev_path;
        ()

      method! type_declaration decl =
        let path =
          self#get_path () @ [ decl.ptype_name.txt ] |> String.concat ~sep:"."
        in
        result <- (path, decl) :: result
    end
  in
  visitor#structure str;
  visitor#get_result ()


================================================
FILE: engine/utils/generate_from_ast/visitors.ml
================================================
(** This module is mostly generated, but hand-edited, it defines visitors for
    the types defined in module `Types`. *)

open Base
open Types
open Utils

class virtual ['self] reduce =
  object (self : 'self)
    method virtual plus : 'acc -> 'acc -> 'acc
    method virtual zero : 'acc
    method visit_string (_env : 'env) (_s : string) = self#zero

    method visit_prim___tuple_2 :
        't0 't1.
        ('env -> 't0 -> 'acc) ->
        ('env -> 't1 -> 'acc) ->
        'env ->
        't0 * 't1 ->
        'acc =
      fun visit_'t0 visit_'t1 env___var v___payload ->
        match v___payload with
        | x0, x1 ->
            let a0 = visit_'t0 env___var x0 in
            let a1 = visit_'t1 env___var x1 in
            self#plus a0 a1

    method visit_prim___tuple_3 :
        't0 't1 't2.
        ('env -> 't0 -> 'acc) ->
        ('env -> 't1 -> 'acc) ->
        ('env -> 't2 -> 'acc) ->
        'env ->
        't0 * 't1 * 't2 ->
        'acc =
      fun visit_'t0 visit_'t1 visit_'t2 env___var v___payload ->
        match v___payload with
        | x0, x1, x2 ->
            let a0 = visit_'t0 env___var x0 in
            let a1 = visit_'t1 env___var x1 in
            let a2 = visit_'t2 env___var x2 in
            self#plus (self#plus a0 a1) a2

    method visit_prim___tuple_4 :
        't0 't1 't2 't3.
        ('env -> 't0 -> 'acc) ->
        ('env -> 't1 -> 'acc) ->
        ('env -> 't2 -> 'acc) ->
        ('env -> 't3 -> 'acc) ->
        'env ->
        't0 * 't1 * 't2 * 't3 ->
        'acc =
      fun visit_'t0 visit_'t1 visit_'t2 visit_'t3 env___var v___payload ->
        match v___payload with
        | x0, x1, x2, x3 ->
            let a0 = visit_'t0 env___var x0 in
            let a1 = visit_'t1 env___var x1 in
            let a2 = visit_'t2 env___var x2 in
            let a3 = visit_'t3 env___var x3 in
            self#plus (self#plus (self#plus a0 a1) a2) a3

    method visit_option : 'a. ('env -> 'a -> 'acc) -> 'env -> 'a option -> 'acc
        =
      fun visit_'a env___var v___payload ->
        match v___payload with
        | Some x0 ->
            let a0 = visit_'a env___var x0 in
            a0
        | None -> self#zero

    method visit_Type__t : 'env -> Type.t -> 'acc =
      fun env___var v___payload ->
        let acc___typ = self#visit_string env___var v___payload.typ in
        let acc___args =
          self#visit_list self#visit_Type__t env___var v___payload.args
        in
        self#plus acc___typ acc___args

    method visit_Record__field : 'env -> Record.field -> 'acc =
      fun env___var v___payload ->
        self#visit_prim___tuple_2 self#visit_string self#visit_Type__t env___var
          v___payload

    method visit_Record__t : 'env -> Record.t -> 'acc =
      fun env___var v___payload ->
        self#visit_list self#visit_Record__field env___var v___payload

    method visit_VariantPayload__t : 'env -> VariantPayload.t -> 'acc =
      fun env___var v___payload ->
        match v___payload with
        | Record x0 ->
            let a0 = self#visit_Record__t env___var x0 in
            a0
        | Tuple x0 ->
            let a0 = self#visit_list self#visit_Type__t env___var x0 in
            a0
        | None -> self#zero

    method visit_Variant__t : 'env -> Variant.t -> 'acc =
      fun env___var v___payload ->
        let acc___name = self#visit_string env___var v___payload.name in
        let acc___payload =
          self#visit_VariantPayload__t env___var v___payload.payload
        in
        self#plus acc___name acc___payload

    method visit_Result__t :
        'r 'e.
        ('env -> 'r -> 'acc) ->
        ('env -> 'e -> 'acc) ->
        'env ->
        ('r, 'e) Result.t ->
        'acc =
      fun visit_'r visit_'e env___var v___payload ->
        match v___payload with
        | Ok x0 ->
            let a0 = visit_'r env___var x0 in
            a0
        | Error x0 ->
            let a0 = visit_'e env___var x0 in
            a0

    method visit_Datatype__kind : 'env -> Datatype.kind -> 'acc =
      fun env___var v___payload ->
        match v___payload with
        | Record x0 ->
            let a0 = self#visit_Record__t env___var x0 in
            a0
        | Variant x0 ->
            let a0 = self#visit_list self#visit_Variant__t env___var x0 in
            a0
        | TypeSynonym x0 ->
            let a0 = self#visit_Type__t env___var x0 in
            a0
        | Opaque -> self#zero

    method visit_Datatype__t : 'env -> Datatype.t -> 'acc =
      fun env___var v___payload ->
        let acc___name = self#visit_string env___var v___payload.name in
        let acc___type_vars =
          self#visit_list self#visit_string env___var v___payload.type_vars
        in
        let acc___kind = self#visit_Datatype__kind env___var v___payload.kind in
        self#plus (self#plus acc___name acc___type_vars) acc___kind

    method visit_datatypes : 'env -> Datatype.t list -> 'acc =
      self#visit_list self#visit_Datatype__t

    method visit_list : 'a. ('env -> 'a -> 'acc) -> 'env -> 'a list -> 'acc =
      fun v env this ->
        Base.List.fold ~init:self#zero
          ~f:(fun acc -> v env >> self#plus acc)
          this
  end

let collect_defined_types =
  (object
     inherit [_] reduce as _super
     method plus = Set.union
     method zero = Set.empty (module String)
     method! visit_Datatype__t () dt = Set.singleton (module String) dt.name
  end)
    #visit_datatypes
    ()

let collect_used_types =
  (object (self)
     inherit [_] reduce as super
     method plus = Set.union
     method zero = Set.empty (module String)

     method! visit_Type__t () t =
       let typ = t.typ in
       self#plus
         (if String.is_prefix ~prefix:"'" typ || String.equal typ "list" then
            self#zero
          else Set.singleton (module String) typ)
         (super#visit_Type__t () t)
  end)
    #visit_datatypes
    ()

let collect_undefined_types dts : string list =
  Set.diff (collect_used_types dts) (collect_defined_types dts) |> Set.to_list


================================================
FILE: engine/utils/hacspeclib-macro-parser/dune
================================================
(library
 (name hacspeclib_macro_parser)
 (package hax-engine)
 (libraries yojson angstrom)
 (preprocess
  (pps
   ppx_yojson_conv
   ppx_sexp_conv
   ppx_compare
   ppx_hash
   ppx_deriving.show
   ppx_deriving.eq
   ppx_matches)))

(env
 (_
  (flags
   (:standard -warn-error "-A+8" -w "-17-7-30-56-32"))))


================================================
FILE: engine/utils/hacspeclib-macro-parser/hacspeclib_macro_parser.ml
================================================
open! Base
open Angstrom
open Ppx_yojson_conv_lib.Yojson_conv.Primitives

module BasicParsers = struct
  let is_space = function ' ' | '\t' | '\n' -> true | _ -> false

  let is_identifier = function
    | '0' .. '9' | 'a' .. 'z' | 'A' .. 'Z' | '_' -> true
    | _ -> false

  let is_digit = function '0' .. '9' -> true | _ -> false
  let spaces = Fn.const () <$> take_while is_space
  let ignore_spaces p = spaces *> p <* spaces
  let identifier = ignore_spaces @@ take_while1 is_identifier

  let many1_ignore_underscores p =
    List.filter_map ~f:Fn.id
    <$> many1 (Option.some <$> p <|> (Fn.const None <$> char '_'))

  let take_while1_ignore_underscores f =
    String.of_char_list <$> many1_ignore_underscores (satisfy f)

  let number =
    ignore_spaces (Int.of_string <$> take_while1_ignore_underscores is_digit)

  let is_hex = function
    | '0' .. '9' | 'a' .. 'f' | 'A' .. 'F' -> true
    | _ -> false

  let hex_literal =
    ignore_spaces (string "0x" *> take_while1_ignore_underscores is_hex)

  let comma = Fn.const () <$> ignore_spaces @@ char ','
  let colon = Fn.const () <$> ignore_spaces @@ char ':'
  let maybe p = Option.some <$> p <|> return None
  let parens p = ignore_spaces (char '(') *> p <* ignore_spaces (char ')')
  let quoted p = ignore_spaces (char '"') *> p <* ignore_spaces (char '"')
  let field name p = string name *> colon *> p
  let comment = ignore_spaces (string "//" *> take_while Char.(( <> ) '\n'))
  let ignore_comment = Fn.const () <$> maybe comment
end

open BasicParsers

module type Parser = sig
  type t [@@deriving show, yojson, eq]

  val name : string
  val parser : t Angstrom.t
end

module Make (M : Parser) : sig
  val parse : string -> (M.t, string) Result.t
end = struct
  open M

  let parse input =
    match parse_string ~consume:All (parens parser <* end_of_input) input with
    | Ok e -> Ok e
    | Error e ->
        Stdlib.prerr_endline @@ "########## Error while parsing: (" ^ name ^ ")";
        Stdlib.prerr_endline input;
        Error e
end

module Array = struct
  module M = struct
    type t = {
      array_name : string;
      size : string;
      typ : string;
      index_typ : string option;
    }
    [@@deriving show, yojson, eq]

    let parser =
      let* array_name = identifier <* comma in
      let* size = identifier <* comma in
      let* typ = identifier in
      let+ index_typ =
        maybe @@ (comma *> string "type_for_indexes" *> colon *> identifier)
      in
      { array_name; size; typ; index_typ }

    let name = "array"
  end

  include M
  include Make (M)
end

module Bytes = struct
  module M = struct
    type t = { bytes_name : string; size : string }
    [@@deriving show, yojson, eq]

    let parser =
      let* bytes_name = identifier <* comma in
      let+ size =
        identifier
        (* this covers number and constants, but this leads to namespacing issues... *)
      in
      { bytes_name; size }

    let name = "bytes"
  end

  include M
  include Make (M)
end

module UnsignedPublicInteger = struct
  module M = struct
    type t = { integer_name : string; bits : int } [@@deriving show, yojson, eq]

    let parser =
      let* integer_name = identifier <* comma in
      let+ bits = number in
      { integer_name; bits }

    let name = "unsigned_public_integer"
  end

  include M
  include Make (M)
end

module PublicNatMod = struct
  module M = struct
    type t = {
      type_name : string;
      type_of_canvas : string;
      bit_size_of_field : int;
      modulo_value : string;
    }
    [@@deriving show, yojson, eq]

    type t' = {
      type_name : string option;
      type_of_canvas : string option;
      bit_size_of_field : int option;
      modulo_value : string option;
    }

    let parser' : t' Angstrom.t =
      let type_name =
        (fun x acc -> { acc with type_name = Some x })
        <$> field "type_name" identifier
      in
      let type_of_canvas =
        (fun x acc -> { acc with type_of_canvas = Some x })
        <$> field "type_of_canvas" identifier
      in
      let bit_size_of_field =
        (fun x acc -> { acc with bit_size_of_field = Some x })
        <$> field "bit_size_of_field" number
      in
      let modulo_value =
        (fun x acc -> { acc with modulo_value = Some x })
        <$> field "modulo_value" (quoted @@ take_while1 is_hex)
      in
      let f =
        type_name <|> type_of_canvas <|> bit_size_of_field <|> modulo_value
      in
      let* f1 = ignore_comment *> f <* comma <* ignore_comment in
      let* f2 = f <* comma <* ignore_comment in
      let* f3 = f <* comma <* ignore_comment in
      let+ f4 = f <* ignore_comment in
      {
        type_name = None;
        type_of_canvas = None;
        bit_size_of_field = None;
        modulo_value = None;
      }
      |> f1 |> f2 |> f3 |> f4

    let parser =
      let* x = parser' in
      match x with
      | {
       type_name = Some type_name;
       type_of_canvas = Some type_of_canvas;
       bit_size_of_field = Some bit_size_of_field;
       modulo_value = Some modulo_value;
      } ->
          return
            ({ type_name; type_of_canvas; bit_size_of_field; modulo_value } : t)
      | _ -> fail "Some fields are missing"

    let name = "public_nat_mod"
  end

  include M
  include Make (M)
end


================================================
FILE: engine/utils/ocaml_of_json_schema/ocaml_of_json_schema.js
================================================
const keys = p =>
    new Set(
        Object.keys(p)
            .filter(k => ![
                'description', 'maxItems', 'minItems'
            ].includes(k))
            .filter(k => p?.additionalProperties !== false || k != 'additionalProperties')
    );
const eq = (xs, ys) =>
    xs.size === ys.size &&
    [...xs].every((x) => ys.has(x));

let todo = (todo = "todo") => null;

let assert = (fact, msg = "assert") => {
    if (!fact)
        throw msg;
};

let exact_keys = (o, ...key_list) => {
    // console.log('exact_keys', o);
    // console.log('keys=', keys(o));
    // console.log('agaisnt=', new Set(key_list));
    return eq(keys(o), new Set(key_list));
};


const clean = o => {
    if (o instanceof Object
        && exact_keys(o, 'allOf')
        && o.allOf.length == 1
    ) {
        let first = o.allOf[0];
        delete o['allOf'];
        for (let k in first)
            o[k] = first[k];
    }
    if (o instanceof Object
        && 'type' in o
        && o.type instanceof Array
        && o.type.length === 2
        && o.type.includes('null')
    ) {
        let type = o.type.filter(x => x != 'null')[0];
        let other = JSON.parse(JSON.stringify(o));
        other.type = type;
        for (let k in o)
            delete o[k];
        o.anyOf = [other, { type: 'null' }];
    }
    if (o instanceof Array) {
        return o
            .filter(o => true)
            .map(clean);
    }
    if (o instanceof Object) {
        delete o['maxItems'];
        delete o['minItems'];
        return Object.fromEntries(Object.entries(o).map(([k, v]) => [k, clean(v)]));
    } else {
        return o;
    }
};
let isUpperCase = s => s.toUpperCase() == s;
let startsWithUpper = s => isUpperCase(s[0]);

let makeFirstCharUpper = s => s[0].toUpperCase() + s.slice(1);
let makeFirstCharLower = s => s[0].toLowerCase() + s.slice(1);


let variantNameOf = s => {
    let v = makeFirstCharUpper(s);
    if (['Some', 'None'].includes(v))
        return v + "'";
    return v;
};
let escapeOCamlKeywords = s => {
    let ocaml_keywords = ["and", "as", "assert", "asr", "begin", "class", "constraint",
        "do", "done", "downto", "else", "end", "exception", "external",
        "false", "for", "fun", "function", "functor", "if", "in",
        "include", "inherit", "initializer", "land", "lazy", "let",
        "lor", "lsl", "lsr", "lxor", "match", "method", "mod", "module",
        "mutable", "new", "nonrec", "object", "of", "open", "or",
        "private", "rec", "sig", "struct", "then", "to", "true", "try",
        "type", "val", "virtual", "when", "while", "with"
    ];
    return ocaml_keywords.includes(s) ? s + "'" : s;
}
let typeNameOf = s => escapeOCamlKeywords(s.replace(/[A-Z]/g, (l, i) => `${i ? '_' : ''}${l.toLowerCase()}`));
let fieldNameOf = s => escapeOCamlKeywords(s);

let ensureUnique = (() => {
    let cache = {};
    return (kind, v, disambiguer) => {
        let key = JSON.stringify({ kind, v });
        // TODO: enble check below, find a good solution
        // if(cache[key])
        //     throw `dup ${kind}, ${v}`;
        cache[key] = true;
        return v;
    };
})();

const util = require('util');
let log_full = o => console.error(util.inspect(o, { showHidden: false, depth: null, colors: true }));

let trace1 = (name, f) => (input) => {
    let output = f(input);
    log_full({ name, input, output });
    return output;
};

let ocaml_of_type_expr = (o, path) => {
    if (!path)
        throw "Path missing!";
    let { kind, payload } = o;
    return (({
        option: type => `(${ocaml_of_type_expr(type, [...path, 'option'])} option)`,
        unit: _ => `unit`,
        tuple: types => `(${types.map((t, i) => ocaml_of_type_expr(t, [...path, 'tuple', i])).join(' * ')})`,
        array: type => `(${ocaml_of_type_expr(type, [...path, 'array'])} list)`,
        boolean: _ => `bool`,
        string: _ => `string`,
        char: _ => `char`,
        integer: _ => ({
            int64: 'Base.Int64.t',
            string: 'string',
            int: 'int'
        })[o.repr],
        name: payload => typeNameOf(payload),
    })[kind] || (_ => {
        log_full(o);
        throw "ocaml_of_type_expr: bad kind " + kind;
    }))(payload);
};


let mk_match = (scrut, arms, path) => {
    if (!path) {
        console.trace();
        throw "Path missing!";
    }
    // console.log({scrut, arms});
    return `
begin match ${scrut} with
${[...arms, ['_', `failwith ("parsing error: ${path} LINE=" ^ string_of_int __LINE__ ^ " JSON=" ^ Yojson.Safe.pretty_to_string ${scrut})`]].map(([pat, expr]) => `${pat} -> ${expr}`).join('\n|')}
end
`;
};

let wrap_paren = s => `(${s})`;

let ocaml_yojson_of_type_expr = (o, subject, path) => {
    if (!path)
        throw "Path missing!";
    let { kind, payload } = o;
    return `(${(({
        option: type => `match ${subject} with | Option.Some x -> ${ocaml_yojson_of_type_expr(type, 'x', [...path, 'Some'])} | _ -> \`Null`,
        unit: _ => `\`Null`,
        tuple: types =>
            `let (${types.map((t, i) => 'x' + i)}) = ${subject} in \`List [${types.map((t, i) => ocaml_yojson_of_type_expr(t, 'x' + i, [...path, 'tuple', i])).join(';')}]`,
        array: type =>
            `\`List (List.map (fun x -> ${ocaml_yojson_of_type_expr(type, 'x', [...path, 'array'])}) ${subject})`,
        boolean: _ => `\`Bool ${subject}`,
        string: _ => `\`String ${subject}`,
        integer: _ => ({
            string: `\`Intlit ${subject}`,
            int64: `\`Intlit (Int64.to_string ${subject})`,
            int: `\`Int ${subject}`
        })[o.repr],
        char: _ => `\`String (Base.Char.to_string ${subject})`,
        name: payload => `yojson_of_${typeNameOf(payload)} ${subject}`,
    })[kind] || (_ => {
        log_full(o);
        throw "ocaml_arms_of_type_expr: bad kind " + kind;
    }))(payload)})`;
};


let ocaml_arms_of_type_expr = (o, path) => {
    if (!path)
        throw "Path missing!";
    let { kind, payload } = o;
    return (({
        option: type => [
            [`\`Null`, `Option.None`],
            ...ocaml_arms_of_type_expr(type, [...path, 'option']).map(([pat, expr]) => [pat, `Option.Some (${expr})`])
        ],
        unit: _ => [[`\`Null`, '()']],
        tuple: types => {
            let sub_matches = types.map((type, i) =>
                mk_match(`v${i}`, ocaml_arms_of_type_expr(type, [...path, 'tuple', i]), [...path, 'tuple']));
            return [
                [`\`List [${types.map((_, i) => `v${i}`).join(';')}]`,
                `(${sub_matches.join(',')})`
                ],
            ];
        },
        array: type => [
            [`\`List l`,
                `List.map (fun x -> ${mk_match('x', ocaml_arms_of_type_expr(type, [...path, 'array']), [...path, 'array'])}) l`
            ]
        ],
        boolean: _ => [[`\`Bool b`, 'b']],
        string: _ => [[`\`String s`, 's']],
        char: _ => [[`\`String s`, 'String.get s 0']],
        integer: _ => ({
            int64: [
                [`\`Int i`, 'Base.Int64.of_int i'],
                [`\`Intlit lit`, `(try Base.Int64.of_string lit with | _ -> failwith ("Base.Int64.of_string failed for " ^ lit))`]
            ],
            string: [
                [`\`Int i`, 'string_of_int i'],
                [`\`Intlit s`, 's']
            ],
            int: [
                [`\`Int i`, 'i'],
                [`\`Intlit s`, 'Base.Int.of_string s']
            ]
        })[o.repr],
        name: payload => [['remains', `${typeNameOf(payload)}_of_yojson remains`]],
    })[kind] || (_ => {
        log_full(o);
        throw "ocaml_arms_of_type_expr: bad kind " + kind;
    }))(payload);
};

let parse_type_name = s => {
    if (!s.startsWith('#/definitions/'))
        throw s;
    return s.split('/').slice(-1)[0];
};

let int_repr_of_format = format =>
    (format.endsWith('int128') || format == 'uint64' || format == 'uint' /*`uint`s are `usize`s actually, so that's safer to assume it's a uint64, see https://github.com/GREsau/schemars/blob/386e3d7f5ac601795fb4e247291bbef31512ded3/schemars/src/json_schema_impls/primitives.rs#L85C16-L85C21*/)
        ? 'string'
        : (format == 'int64' || format == 'uint32' ? 'int64' : 'int');

let is_type = {
    option: def => {
        if (exact_keys(def, 'anyOf')
            && def.anyOf.length === 2
            && is_type.expr(def.anyOf[0])
            && exact_keys(def.anyOf[1], 'type')
            && def.anyOf[1].type === 'null'
        )
            return {
                kind: 'option',
                payload: is_type.expr(def.anyOf[0])
            };
        return false;
    },

    unit: def => {
        if (exact_keys(def, 'type')
            && def.type === 'null')
            return {
                kind: 'unit',
            };
        return false;
    },

    tuple: def => {
        if (exact_keys(def, 'type', 'items')
            && def.type === 'array'
            && def.items instanceof Array
            && def.items.every(is_type.expr))
            return {
                kind: 'tuple',
                payload: def.items.map(is_type.expr)
            };
        return false;
    },

    array: def => {
        if (exact_keys(def, 'type', 'items')
            && def.type === 'array'
            && is_type.expr(def.items))
            return {
                kind: 'array',
                payload: is_type.expr(def.items),
            };
        return false;
    },

    expr: def =>
        (exact_keys(def, '$ref') ? {
            kind: 'name', payload: parse_type_name(def['$ref'])
        } : false)
        || is_type.option(def)
        || is_type.array(def)
        || is_type.unit(def)
        || is_type.tuple(def)
        || (def.type === 'integer'
            ? { kind: 'integer', repr: int_repr_of_format(def.format) }
            : false)
        || (def.type === 'string' && def.maxLength === def.minLength && def.minLength === 1
            ? { kind: 'char' }
            : false)
        || ((exact_keys(def, 'type')
            && ['boolean', 'string'].includes(def.type)
        ) ? { kind: def.type } : false
        ) || false,

    record: def => {
        if ((eq(keys(def), new Set(["type", "required", "properties"]))
            || eq(keys(def), new Set(["type", "properties"]))
        )
            && def.type === "object"
            && (def.required || []).every(k => typeof k == 'string')
            && Object.values(def.properties).every(is_type.expr))
            return Object.fromEntries(Object.entries(def.properties).map(([n, v]) => [n, is_type.expr(v)]));
        return false;
    },

    variant: def => {
        let doc = def.description;
        if (exporters.enum.guard(def))
            return def.enum.map(e => ({
                kind: 'variant',
                name: e,
                payloadKind: 'empty',
                payload: null,
                doc,
            }));
        if (exact_keys(def, 'type', 'required', 'properties')
            && def.type === 'object'
            && Object.values(def.properties).length == 1
        ) {
            let [name, value] = Object.entries(def.properties)[0];
            if (is_type.expr(value))
                return [{
                    kind: 'variant',
                    payloadKind: 'expr',
                    name,
                    payload: is_type.expr(value),
                    doc,
                }];
            if (is_type.record(value))
                return [{
                    kind: 'variant',
                    name,
                    payloadKind: 'record',
                    payload: is_type.record(value),
                    doc,
                }];
        }
        return false;
    },
};

// for (let k in is_type) {
//     is_type[k] = trace1(k, is_type[k]);
// }

// let trace = (name, f) => (...inputs) => {
//     let output = f(...inputs);
//     log_full({f: name, inputs, output});
//     return output;
// };

let export_record = (fields, path, name) => {
    let record_expression = fields.map(([field, type, _doc], i) => {
        if (field == 'index' && name == 'def_id_contents') {
            // This is a hack to always parse Rust DefId indexes to `(0, 0)`
            return 'index = Base.Int64.(zero, zero, None)';
        }
        let p = [...path, 'field_' + field];
        let sub = mk_match('x', ocaml_arms_of_type_expr(type, p), p);
        let match = `match List.assoc_opt "${field}" l with Option.Some x -> begin ${sub} end | Option.None -> raise (MissingField {field = "${field}"; fields = l})`;
        return `${fieldNameOf(field)} = begin ${match} end`;
    }).join(';\n');
    return [`\`Assoc l`, `{ ${record_expression} }`];
};

let mkdoc = doc => doc ? ` (** ${doc} *)` : '';

let exporters = {
    oneOf: {
        guard: def => eq(keys(def), new Set(["oneOf"])) &&
            def.oneOf.every(is_type.variant),
        f: (name, { oneOf }) => {
            let variants = oneOf.map(is_type.variant).flat();
            let type = variants.map(({ kind, name: variant_name, payloadKind, payload, doc }) => {
                doc = mkdoc(doc);
                let variant = ensureUnique('variant', variantNameOf(variant_name));
                return ({
                    record: () => {
                        let fields = Object.entries(payload).map(([field, value]) =>
                            fieldNameOf(field) + ' : ' + ocaml_of_type_expr(value, ['rec-variant:' + variant + ':' + field]));
                        return `${variant} of {${fields.join(';\n')}}${doc}`;
                    },
                    expr: () => `${variant} of (${ocaml_of_type_expr(payload, ['expr-variant:' + variant + ':' + name])})${doc}`,
                    empty: () => `${variant}${doc}`,
                }[payloadKind] || (() => {
                    throw "bad payloadKind: " + payloadKind;
                }))();
            }).join('\n     | ');
            let parse_arms = variants.map(({ kind, name: variant_name, payloadKind, payload }) => {
                let variant = variantNameOf(variant_name);
                let wrap = (arms, prefix = '') => [
                    [`\`Assoc ["${variant_name}", rec_value]`,
                    prefix + mk_match('rec_value', arms, ['rec-variant_' + variant + '_' + variant_name])
                    ]
                ];
                return ({
                    record: () => {
                        let [pat, expr] = export_record(Object.entries(payload), ['rec-variant_' + variant + '_' + variant_name], name);
                        return wrap([[pat, variant + ' ' + expr]]);
                    },
                    expr: () => wrap(ocaml_arms_of_type_expr(payload, ['expr-variant(PA):' + name + ':' + variant + ':' + variant_name]), variant + ' '),
                    empty: () => [[`\`String "${variant_name}"`, variant]],
                }[payloadKind] || (() => {
                    throw "bad payloadKind: " + payloadKind;
                }))();
            }).flat();
            let parse = mk_match('o', parse_arms, [name + '_of_yojson']);
            let to_json = `match o with ${variants.map(({ kind, name: variant_name, payloadKind, payload }) => {
                let variant = variantNameOf(variant_name);
                let wrap = (x, e) => `${variant} ${x} -> \`Assoc ["${variant_name}", ${e}]`;
                return ({
                    record: () => {
                        let fields = Object.entries(payload);
                        return wrap(
                            `{${fields.map(([field, type], i) => `${fieldNameOf(field)}`).join('; ')}}`,
                            `\`Assoc [${fields.map(([field, type], i) => `("${field}", ${ocaml_yojson_of_type_expr(type, fieldNameOf(field), [name + ':' + variant, 'variant', field])})`).join('; ')
                            }]`
                        );
                    },
                    expr: () => wrap('x', ocaml_yojson_of_type_expr(payload, 'x', [name + ':' + variant, 'payload'])),
                    empty: () => `${variant} -> \`String "${variant_name}"`,
                }[payloadKind] || (() => {
                    throw "bad payloadKind: " + payloadKind;
                }))();
            }).join(' | ')}`;
            return { type, parse, to_json };
        },
    },
    empty_struct: {
        guard: def => (eq(keys(def), new Set(["type"])) && (def.type == 'object' || def.type == 'null')),
        f: (name, _) => {
            return {
                type: `EmptyStruct${name}`,
                parse: `EmptyStruct${name}`,
                to_json: '`Null',
            };
        },
    },
    newtype: {
        guard: def => !exporters['empty_struct'].guard(def) && is_type.expr(def, ["try-parse"]),
        f: (name, o) => {
            let path = [name + '-newtype-ref'];
            let te = is_type.expr(o, path);
            let ocaml_type = ocaml_of_type_expr(te, path);
            let arms = ocaml_arms_of_type_expr(te, path);
            let to_json = ocaml_yojson_of_type_expr(te, `(let Newtype${name} inner = o in inner)`, path);
            return {
                type: `Newtype${name} of ${ocaml_type}`,
                parse: `Newtype${name}(${mk_match('o', arms, path)})`,
                to_json,
            };
        },
    },
    // object is a *flat* record
    object: {
        guard: def => (eq(keys(def), new Set(["type", "required", "properties"]))
            || eq(keys(def), new Set(["type", "properties"]))
        )
            && def.type === "object"
            && (def.required || []).every(k => typeof k == 'string')
            && Object.values(def.properties).every(is_type.expr),
        f: (name, { required, properties }) => {
            let fields = Object.entries(properties).map(
                ([name, prop]) => [name, is_type.expr(prop), prop.description]
            );

            let [pat, expr] = export_record(fields, ['struct_' + name], name);

            return {
                type: `{ ${fields.map(([fname, type, doc]) => `${fieldNameOf(fname)} : ${ocaml_of_type_expr(type, ['struct_' + fname + '_' + name])}${mkdoc(doc)}`).join(';\n')} }`,
                parse: mk_match('o', [[pat, expr]], ['struct_' + name]),
                to_json: //`let {${fields.map(([fname, type, doc]) => fieldNameOf(fname)).join(';')}} = o in`
                    `\`Assoc [${fields.map(([fname, type, doc]) => `("${fname}", ${ocaml_yojson_of_type_expr(type, 'o.' + fieldNameOf(fname), ['todo'])})`).join('; ')}]`
            };
        },
    },
    enum: {
        guard: def => eq(keys(def), new Set(["type", "enum"]))
            && def.type == "string",
        f: (name, o) => {
            assert(o.enum.every(x => typeof x == "string"), 'not every enum is a string');

            if (o.enum.length == 0) {
                return {
                    type: '|',
                    parse: 'failwith "cannot parse an empty type"',
                    to_json: 'match o with _ -> .',
                };
            }

            let variants = o.enum.map(n => ({
                Δ: n,
                variant: ensureUnique('variant', variantNameOf(n)),
                variantOriginName: n
            }));

            let parse_string
                = `match s with ` + variants.map(
                    ({ Δ, variant }) => `"${Δ}" -> ${variant}`
                ).join(' | ') + ` | s -> failwith ("unexpected variant [" ^ s ^ "] while parsing enum [${name}]")`;

            return {
                type: variants.map(({ variant }) => variant).join(' | '),
                parse: `  match o with
                        | \`String s -> (${parse_string})
                        | _ -> failwith "expected a string while parsing a ${name}"
                       `,
                to_json: `match o with ${variants.map(({ variant, variantOriginName }) => `${variant} -> \`String "${variantOriginName}"`).join(' | ')}`,
            };
        },
    },
};

let export_definition = (name, def) => {
    let suitable_exporters = Object.entries(exporters).filter(
        ([_, { guard }]) => guard(def)
    );

    if (suitable_exporters.length != 1) {
        console.error(`ERROR: each definition should have exactly one suited exporter, but type "${name}" has the following exporter(s): ${JSON.stringify(suitable_exporters.map(([n, _]) => n))}.`);
        console.error('name', name);
        log_full(def);
        console.error('xname', name);

        throw "kind error";
    }
    let [_, { f }] = suitable_exporters[0];
    name = ensureUnique('type', typeNameOf(name));
    let r = f(name, def);
    if (r === null)
        return `(* type ${name} *)`;
    let { type, parse, to_json } = r;
    return { name, type, parse, to_json };
    // return [{type, parse}]
    // return `type ${name} = ${type}\nlet parse_${name} (o: Yojson.Safe.t): ${name} = ${parse}\n`;
};

function run(str) {
    let contents = JSON.parse(str);
    const definitions = clean(contents.definitions);

    let sig = ``;

    let impl = `include struct
open struct
  include Base.Hash.Builtin
  open Base
  let bool_of_sexp = bool_of_sexp
  let string_of_sexp = string_of_sexp
  let option_of_sexp = option_of_sexp
  let list_of_sexp = list_of_sexp
  let int_of_sexp = int_of_sexp
  let char_of_sexp = char_of_sexp
  let unit_of_sexp = unit_of_sexp
  let bool_of_sexp = bool_of_sexp

  let sexp_of_bool = sexp_of_bool
  let sexp_of_string = sexp_of_string
  let sexp_of_option = sexp_of_option
  let sexp_of_list = sexp_of_list
  let sexp_of_int = sexp_of_int
  let sexp_of_char = sexp_of_char
  let sexp_of_unit = sexp_of_unit
  let sexp_of_bool = sexp_of_bool

  let compare_bool = compare_bool
  let compare_string = compare_string
  let compare_option = compare_option
  let compare_list = compare_list
  let compare_int = compare_int
  let compare_char = compare_char
  let compare_unit = compare_unit
  let compare_bool = compare_bool
end
[@@@warning "-A"]
`;

    impl += `let hax_version = {escape|${contents['$id'].replace(/\|escape\}/g, '|_escape}')}|escape}`;

    let items = Object.entries(definitions)
        .map(([name, def]) => ['Node_for_TyKind' == name ? 'node_for_ty_kind_generated' : name, def])
        .map(([name, def]) => ['Node_for_DefIdContents' == name ? 'node_for_def_id_contents_generated' : name, def])
        .map(([name, def]) => ['Node_for_ItemRefContents' == name ? 'node_for_item_ref_contents_generated' : name, def])
        .map(
            ([name, def]) => export_definition(name, def)
        ).filter(x => x instanceof Object);

    let derive_items = ['show', 'eq', 'hash', 'sexp', 'compare'];

    impl += `
module ParseError = struct
  exception MissingField of {
    fields: (string * Yojson.Safe.t) list;
    field: string
  }

  let pp = function
    | MissingField {fields; field} ->
       "Missing field [" ^ field ^ "], while looking at the following JSON: " ^ Yojson.Safe.pretty_to_string (\`Assoc fields)
    | e -> raise e
end

open ParseError

`;

    let derive_clause = derive_items.length ? `[@@deriving ${derive_items.join(', ')}]` : '';

    impl += (
        'type '
        + items.map(({ name, type }) =>
            `${name} = ${type}\n`
        ).join('\nand ')
        + derive_clause
    );
    impl += `
and node_for__ty_kind = node_for_ty_kind_generated
and node_for__def_id_contents = node_for_def_id_contents_generated
and node_for__item_ref_contents = node_for_item_ref_contents_generated


type map_types = ${"[`TyKind of ty_kind | `DefIdContents of def_id_contents | `ItemRefContents of item_ref_contents]"}
let cache_map: (int64, ${"[ `Value of map_types | `JSON of Yojson.Safe.t ]"}) Base.Hashtbl.t = Base.Hashtbl.create (module Base.Int64)

module Exn = struct
let table_id_node_of_yojson (type t) (name: string) (encode: t -> map_types) (decode: map_types -> t option) (parse: Yojson.Safe.t -> t) (o: Yojson.Safe.t): (t * int64) =
    let label = "table_id_node_of_yojson:" ^ name ^ ": " in
    match o with
    | \`Assoc alist -> begin
          let id = match List.assoc_opt "id" alist with
            | Some (\`Int id) -> Base.Int.to_int64 id
            | Some (\`Intlit lit) -> (try Base.Int64.of_string lit with | _ -> failwith (label ^ "Base.Int64.of_string failed for " ^ lit))
            | Some bad_json -> failwith (label ^ "id was expected to be an int, got: " ^ Yojson.Safe.pretty_to_string bad_json ^ "\n\n\nfull json: " ^ Yojson.Safe.pretty_to_string o)
            | None -> failwith (label ^ " could not find the key 'id' in the following json: " ^ Yojson.Safe.pretty_to_string o)
          in
          let decode v = decode v |> Base.Option.value_exn ~message:(label ^ "could not decode value (wrong type)") in
          match List.assoc_opt "value" alist with
          | Some json when (match json with \`Null -> false | _ -> true) ->
            (parse json, id)
          | _ ->
            let value = match Base.Hashtbl.find cache_map id with
            | None -> failwith (label ^ "failed to lookup id " ^ Base.Int64.to_string id)
            | Some (\`Value v) -> decode v
            | Some (\`JSON json) ->
                let value = parse json in
                Base.Hashtbl.set cache_map ~key:id ~data:(\`Value (encode value));
                value
            in (value, id)
       end
    | _ -> failwith (label ^ "expected Assoc")

`;
    impl += ('');
    impl += ('let rec ' + items.map(({ name, type, parse }) =>
        `${name}_of_yojson (o: Yojson.Safe.t): ${name} = ${parse}`
    ).join('\nand '));
    impl += `
and node_for__ty_kind_of_yojson (o: Yojson.Safe.t): node_for__ty_kind =
   let (value, _id) =
       table_id_node_of_yojson "TyKind"
           (fun value -> \`TyKind value)
           (function | \`TyKind value -> Some value | _ -> None)
           ty_kind_of_yojson
           o
   in
   {value; id = Base.Int64.zero}
and node_for__def_id_contents_of_yojson (o: Yojson.Safe.t): node_for__def_id_contents =
   let (value, _id) =
       table_id_node_of_yojson "DefIdContents"
           (fun value -> \`DefIdContents value)
           (function | \`DefIdContents value -> Some value | _ -> None)
           def_id_contents_of_yojson
           o
   in
   {value; id = Base.Int64.zero}
and node_for__item_ref_contents_of_yojson (o: Yojson.Safe.t): node_for__item_ref_contents =
   let (value, _id) =
       table_id_node_of_yojson "ItemRefContents"
           (fun value -> \`ItemRefContents value)
           (function | \`ItemRefContents value -> Some value | _ -> None)
           item_ref_contents_of_yojson
           o
   in
   {value; id = Base.Int64.zero}
`;
    impl += ('');
    impl += ('let rec ' + items.map(({ name, type, parse, to_json }) =>
        `yojson_of_${name} (o: ${name}): Yojson.Safe.t = ${to_json}`
    ).join('\nand '));
    impl += `
and yojson_of_node_for__ty_kind {value; id} = yojson_of_node_for_ty_kind_generated {value; id}
and yojson_of_node_for__def_id_contents {value; id} = yojson_of_node_for_def_id_contents_generated {value; id}
and yojson_of_node_for__item_ref_contents {value; id} = yojson_of_node_for_item_ref_contents_generated {value; id}
end

open struct
  let catch_parsing_errors (type a b) (label: string) (f: a -> b) (x: a): (b, Base.Error.t) Base.Result.t = 
      try Base.Result.Ok (f x) with
      | e -> Base.Result.Error (Base.Error.of_exn ~backtrace:\`Get e)
  let unwrap = function 
    | Base.Result.Ok value -> value
    | Base.Result.Error err -> 
        let err =
            let path = Utils.tempfile_path ~suffix:".log" in
            Core.Out_channel.write_all path
                ~data:(Base.Error.to_string_hum err);
            path
        in
        prerr_endline [%string {|
Error: could not serialize or deserialize a hax value.
This error arises from an incompatibility betwen hax components: hax-engine, cargo-hax and hax-lib.
Potential fixes:
  - Make sure the version of \`hax-lib\` for the crate your are trying to extract matches the version of hax currently installed (%{hax_version}).
  - Run \`cargo clean\`
  - Reinstall hax

The full stack trace was dumped to %{err}.
|}];
        exit 1
end
`;


    impl += (items.map(({ name, type, parse, to_json }) =>
        `
let safe_yojson_of_${name} = catch_parsing_errors "yojson_of_${name}" Exn.yojson_of_${name}
let safe_${name}_of_yojson = catch_parsing_errors "${name}_of_yojson" Exn.${name}_of_yojson
let yojson_of_${name} x = unwrap (safe_yojson_of_${name} x)
let ${name}_of_yojson x = unwrap (safe_${name}_of_yojson x)`
    ).join('\n'));

    return impl + ' \n end';
}

function parse_args() {
    let [script_name, input_path, output_path, ...rest] = process.argv.slice(1);
    if (!input_path || !output_path || rest.length) {
        console.log(`
Usage: node ${script_name} INPUT_PATH OUTPUT_PATH

   INPUT_PATH and OUTPUT_PATH can be - to denotes stdin or stdout
`);
        process.exit();
    }
    return { input_path, output_path };
}

async function read(stream) {
    const chunks = [];
    for await (const chunk of stream) chunks.push(chunk);
    return Buffer.concat(chunks).toString('utf8');
}

async function main() {
    const fs = require('fs');
    let { input_path, output_path } = parse_args();
    let out = run(input_path == '-'
        ? await read(process.stdin)
        : fs.readFileSync(input_path, 'utf-8')
    );
    output_path == '-'
        ? process.stdout.write(out)
        : fs.writeFileSync(output_path, out);
}

main();



================================================
FILE: engine/utils/ppx_functor_application/README.md
================================================
# `ppx_functor_application`

## Motivation
The engine consists of numerous phases, implemented as OCaml functors
parametrized over "AST features" (see the book). Two phases can be
binded (sequenced) via `Phase_utils.BindPhase` functor.

Since OCaml define (or let users define) infix notations for functor
application, combining many phases (functors) results in the following
christmas tree looking kinds of code:

```ocaml
struct
    module ARG0 = (Phases.Reject.RawOrMutPointer)(Features.Rust)
    module ARG1 = (Phases.Transform_hax_lib_inline)(ARG0.FB)
    module ARG2 = (Phases.Specialize)(ARG1.FB)
    module ARG3 = (Phases.Drop_sized_trait)(ARG2.FB)
    module ARG4 = (Phases.Simplify_question_marks)(ARG3.FB)
    module ARG5 = (Phases.And_mut_defsite)(ARG4.FB)
    module ARG6 = (Phases.Reconstruct_for_loops)(ARG5.FB)
    module ARG7 = (Phases.Reconstruct_while_loops)(ARG6.FB)
    module ARG8 = (Phases.Direct_and_mut)(ARG7.FB)
    module ARG9 = (Phases.Reject.Arbitrary_lhs)(ARG8.FB)
    module ARG10 = (Phases.Drop_blocks)(ARG9.FB)
    module ARG11 = (Phases.Drop_references)(ARG10.FB)
    module ARG12 = (Phases.Trivialize_assign_lhs)(ARG11.FB)
    module ARG13 = (Side_effect_utils.Hoist)(ARG12.FB)
    module ARG14 = (Phases.Simplify_match_return)(ARG13.FB)
    module ARG15 = (Phases.Drop_needless_returns)(ARG14.FB)
    module ARG16 = (Phases.Local_mutation)(ARG15.FB)
    module ARG17 = (Phases.Reject.Continue)(ARG16.FB)
    module ARG18 = (Phases.Cf_into_monads)(ARG17.FB)
    module ARG19 = (Phases.Reject.EarlyExit)(ARG18.FB)
    module ARG20 = (Phases.Functionalize_loops)(ARG19.FB)
    module ARG21 = (Phases.Reject.As_pattern)(ARG20.FB)
    module ARG22 = (Phases.Traits_specs)(ARG21.FB)
    module ARG23 = (Phases.Simplify_hoisting)(ARG22.FB)
    module ARG24 = (Phases.Newtype_as_refinement)(ARG23.FB)
    module ARG25 = (SubtypeToInputLanguage)(ARG24.FB)
    module ARG26 = (Identity)(ARG25.FB)
    include
        ((BindPhase)(((BindPhase)(((BindPhase)(((BindPhase)(((BindPhase)(((BindPhase)(((BindPhase)(((BindPhase)(((BindPhase)(((BindPhase)(((BindPhase)(((BindPhase)(((BindPhase)(((BindPhase)(((BindPhase)(((BindPhase)(((BindPhase)(((BindPhase)(((BindPhase)(((BindPhase)(((BindPhase)(((BindPhase)(((BindPhase)(((BindPhase)(((BindPhase)(((BindPhase)(ARG0))(ARG1)))(ARG2)))(ARG3)))(ARG4)))(ARG5)))(ARG6)))(ARG7)))(ARG8)))(ARG9)))(ARG10)))(ARG11)))(ARG12)))(ARG13)))(ARG14)))(ARG15)))(ARG16)))(ARG17)))(ARG18)))(ARG19)))(ARG20)))(ARG21)))(ARG22)))(ARG23)))(ARG24)))(ARG25)))(ARG26)
end
```

The system of phases is supposed to let backends opt-in or out easily
for phases. This syntactic limitation was a major issue for that.

## Solution
This PPX defines a small DSL that embeds in the OCaml syntax of
expressions to provide a nice way of binding phases functors via a
`|>` infix operator.

Example:
```ocaml
module TransformToInputLanguage =
  [%functor_application
  Phases.Reject.RawOrMutPointer(Features.Rust)
  |> Phases.Transform_hax_lib_inline
  |> Phases.Specialize
  |> Phases.Drop_sized_trait
  |> Phases.Simplify_question_marks
  |> Phases.And_mut_defsite
  |> Phases.Reconstruct_for_loops
  |> Phases.Reconstruct_while_loops
  |> SubtypeToInputLanguage
  |> Identity
  ]
  [@ocamlformat "disable"]
```

Note: the `[@ocamlformat "disable"]` annotation is important,
otherwise `ocamlformat` tries to format those PPX invokations with its
rules for expressions, yielding rather ugly looking code...

### Syntax
 - `Name`: a module `Name`
 - `Name(X, Y, Z)`: the application of the functor `Name` with three arguments `X`, `Y` and `Z`
 - `(module )`: the arbitary OCaml module expression ``
 - ` `: the application of the module described by `` and the module described by ``
 - `(fun X -> )`: a "functor" from `X` to ``
 - ` |> `: `` binded with ``


================================================
FILE: engine/utils/ppx_functor_application/dune
================================================
(library
 (name ppx_functor_application)
 (package hax-engine)
 (kind ppx_rewriter)
 (libraries ppxlib base)
 (preprocess
  (pps ppxlib.metaquot ppx_deriving.eq ppx_deriving.show)))

(env
 (_
  (flags
   (:standard -warn-error -A -warn-error +8))))


================================================
FILE: engine/utils/ppx_functor_application/ppx_functor_application.ml
================================================
open Base
open Ppxlib
module Format = Stdlib.Format

let name = "functor_application"

type longident = Longident.t

let show_longident = Longident.name

let pp_longident (fmt : Format.formatter) (s : longident) : unit =
  Format.pp_print_string fmt @@ show_longident s

let string_of_pattern p =
  let s = Buffer.create 0 in
  let fmt = Format.formatter_of_buffer s in
  Pprintast.pattern fmt p;
  Format.pp_print_flush fmt ();
  Buffer.contents s

let string_of_module_expr p =
  let s = Buffer.create 0 in
  let fmt = Format.formatter_of_buffer s in
  Pprintast.module_expr fmt p;
  Format.pp_print_flush fmt ();
  Buffer.contents s

let show_module_expr = string_of_module_expr

let pp_module_expr (fmt : Format.formatter) (s : module_expr) : unit =
  Format.pp_print_string fmt @@ string_of_module_expr s

(** Defines a DSL for functor application. *)
type module_dsl =
  | Var of longident
  | App of module_dsl * module_dsl
  | ModExpr of module_expr
  | Abs of string * module_dsl
  | Pipe of module_dsl list
  | Meta of module_dsl * (location[@opaque])
[@@deriving show]

let var_of_string s = Var (Longident.Lident s)

(** Elaborate a OCaml module expression from a `module_dsl` *)
let rec elab ~loc (t : module_dsl) : module_expr =
  let (module E) = Ast_builder.make loc in
  let h = elab ~loc in
  match t with
  | Meta (x, loc) -> elab ~loc x
  | Var x -> E.pmod_ident { txt = x; loc }
  | ModExpr m -> m
  | App ((Abs (arg, m) | Meta (Abs (arg, m), _)), x) ->
      E.pmod_structure
        [
          E.pstr_module
          @@ E.module_binding ~name:{ loc; txt = Some arg } ~expr:(h x);
          E.pstr_include @@ E.include_infos @@ h m;
        ]
  | App (f, x) -> E.pmod_apply (h f) (h x)
  | Pipe (x :: funs) ->
      let x = h x in
      let nth_arg nth = "ARG" ^ Int.to_string nth in
      let arg0 = E.pmod_ident { loc; txt = Lident (nth_arg 0) } in
      let binds =
        List.mapi
          ~f:(fun i _ ->
            E.pmod_ident { txt = Lident (nth_arg @@ (i + 1)); loc })
          funs
        |> List.fold_left ~init:arg0 ~f:(fun x y ->
               let bind = E.pmod_ident { loc; txt = Lident "BindPhase" } in
               let ( <| ) = E.pmod_apply in
               bind <| x <| y)
      in
      E.pmod_structure
      @@ [%stri module ARG0 = [%m x]]
         :: List.concat_mapi
              ~f:(fun nth fn ->
                let nth_var = Var (Ldot (Lident (nth_arg nth), "FB")) in
                let new_arg = App (fn, nth_var) in
                [
                  E.pstr_module
                  @@ E.module_binding
                       ~name:{ loc; txt = Some (nth_arg @@ (nth + 1)) }
                       ~expr:(h new_arg);
                ])
              funs
      @ [%str include [%m binds]]
  | Pipe _ -> failwith "Illegal pipe: singleton or empty"
  | Abs _ -> failwith "Top-level abstraction"

let rec collect_pipes (t : module_dsl) : module_dsl list =
  match t with
  | Meta (Pipe l, _) | Pipe l -> List.concat_map ~f:collect_pipes l
  | _ -> [ t ]

(** Get rid of extra `Pipe` nodes *)
let rec normalize (t : module_dsl) : module_dsl =
  match t with
  | App (f, x) -> App (normalize f, normalize x)
  | Abs (x, body) -> Abs (x, normalize body)
  | ModExpr _ | Var _ -> t
  | Meta (x, loc) -> Meta (normalize x, loc)
  | Pipe _ -> (
      match collect_pipes t with
      | [] -> failwith "Empty pipe"
      | [ t ] -> t
      | l -> Pipe l)

(** Recognize a small language embedded in OCaml syntax for applying functors in
    chain. *)
let rec parse expr =
  let r =
    match expr with
    | { pexp_desc = Pexp_construct ({ txt; _ }, None); _ } ->
        (* Parses variables (module names are uppercase, since we are looking at OCaml expressions, so we match on constructors)  *)
        Var txt
    | { pexp_desc = Pexp_construct ({ txt; _ }, Some arg); _ } ->
        (* Parses module applcations (same as above: in expressions, module applications are parsed as constructor applications) *)
        App (Var txt, parse arg)
    | [%expr [%e? m1] |> [%e? m2]] ->
        (* Parses `... |> ...` infix module application *)
        Pipe [ parse m1; parse m2 ]
    | [%expr (module [%m? m])] ->
        (* Parses module expressions (in this case, that corresponds to OCaml module expression) *)
        ModExpr m
    | [%expr [%e? f] [%e? x]] ->
        (* Parses module applications (e.g. `(fun x -> ...) (module YXZ)`) *)
        App (parse f, parse x)
    | [%expr fun [%p? x] -> [%e? body]] -> (
        (* Parses module abstractions (e.g. `fun X -> Z(X)`) *)
        match x with
        | { ppat_desc = Ppat_construct ({ txt = Lident x; _ }, None); _ } ->
            Abs (x, parse body)
        | _ -> failwith @@ "Out of language: " ^ string_of_pattern x)
    | _ -> failwith @@ "Out of language: " ^ Pprintast.string_of_expression expr
  in
  Meta (r, expr.pexp_loc)

let expand ~(ctxt : Expansion_context.Extension.t) (e : expression) :
    module_expr =
  let loc = Expansion_context.Extension.extension_point_loc ctxt in
  let e = parse e |> normalize in
  elab ~loc e

let ext =
  Extension.V3.declare name Extension.Context.module_expr
    Ast_pattern.(pstr (pstr_eval __ drop ^:: nil))
    expand

let rule = Ppxlib.Context_free.Rule.extension ext
let () = Ppxlib.Driver.register_transformation ~rules:[ rule ] name


================================================
FILE: engine/utils/ppx_generate_features/README.md
================================================
# `ppx_generate_features`

Specific to `hax-engine`: 
    - generates a `FEATURES` module type;
    - modules `Off` and `On` of type `FEATURES`, one with every feature type set to `on`, the other with every feature type set to `off`;
    - a `SUBSET.T` module type that describe a subtyping relation between two modules of type `FEATURES`;
    - a `SUBSET.Id` module that maps every feature to themselves.
    
This PPX aims to alleviates the pain of adding new features.



================================================
FILE: engine/utils/ppx_generate_features/dune
================================================
(library
 (name ppx_generate_features)
 (package hax-engine)
 (kind ppx_rewriter)
 (libraries ppxlib base)
 (preprocess
  (pps ppxlib.metaquot ppx_deriving.eq ppx_deriving.show)))

(env
 (_
  (flags
   (:standard -warn-error -A -warn-error +8))))


================================================
FILE: engine/utils/ppx_generate_features/ppx_generate_features.ml
================================================
open Base
open Ppxlib

let name = "declare_features"

let uppercase_first_char (s : string) : string =
  String.(uppercase (prefix s 1) ^ drop_prefix s 1)

let rename (l : (string * string) list) =
  let h (s : string) =
    List.find_map
      ~f:(fun (s', replace) -> if String.equal s s' then Some replace else None)
      l
    |> Option.value ~default:s
  in
  object
    inherit Ast_traverse.map
    method! string = h
    method! label = h

    method! longident =
      let rec r = function
        | Lapply (x, y) -> Lapply (r x, r y)
        | Ldot (x, y) -> Ldot (r x, h y)
        | Lident x -> Lident (h x)
      in
      r
  end

let expand ~(ctxt : Expansion_context.Extension.t) (features : string list) :
    structure_item =
  let loc = Expansion_context.Extension.extension_point_loc ctxt in
  let (module B) = Ast_builder.make loc in
  [%stri
    include struct
      module type FEATURES = sig
        include
          [%m
        List.map
          ~f:(fun txt ->
            (rename [ ("placeholder", txt) ])#signature_item
              [%sigi:
                type placeholder
                [@@deriving show, yojson, hash, compare, sexp, hash, eq]])
          features
        |> B.pmty_signature]
      end

      module type T = FEATURES

      module Enumeration = struct
        [%%i
        let decl =
          B.type_declaration ~name:{ loc; txt = "t" } ~params:[] ~cstrs:[]
            ~kind:
              (Ptype_variant
                 (List.map
                    ~f:(fun txt ->
                      B.constructor_declaration
                        ~name:{ loc; txt = uppercase_first_char txt }
                        ~args:(Pcstr_tuple []) ~res:None)
                    features))
            ~private_:Public ~manifest:None
        in
        B.pstr_type Recursive
          [
            {
              decl with
              ptype_attributes =
                [
                  B.attribute ~name:{ loc; txt = "deriving" }
                    ~payload:
                      (PStr
                         [%str
                           show { with_path = false },
                           yojson,
                           hash,
                           compare,
                           sexp,
                           hash,
                           eq]);
                ];
            };
          ]]
      end

      (*
      module MapFeatureTypes (T : sig
        type t [@@deriving show, yojson, hash, eq]
      end) =
      struct
        include T

        include
          [%m
          List.concat_map
            ~f:(fun txt ->
              (rename
                 [
                   ("placeholder", txt);
                   ("Placeholder", uppercase_first_char txt);
                 ])
                #structure
                [%str
                  module Placeholder = struct
                    type placeholder = Placeholder of T.t [@@deriving show, yojson, hash, eq]
                  end
                      
                  include Placeholder])
            features
          |> B.pmod_structure]
      end

      module On = MapFeatureTypes (struct
        type t = on [@@deriving show, yojson, hash, eq]
      end)

      module Off = MapFeatureTypes (struct
        type t = off [@@deriving show, yojson, hash, eq]
            end)
            *)

      module On =
        [%m
        List.concat_map
          ~f:(fun txt ->
            (rename
               [
                 ("placeholder", txt); ("Placeholder", uppercase_first_char txt);
               ])
              #structure
              [%str
                module Placeholder : sig
                  type placeholder
                  [@@deriving show, yojson, hash, compare, sexp, hash, eq]

                  val placeholder : placeholder
                end = struct
                  type placeholder = Placeholder
                  [@@deriving show, yojson, hash, compare, sexp, hash, eq]

                  let placeholder = Placeholder
                end

                include Placeholder])
          features
        |> B.pmod_structure]

      module ToFull =
        [%m
        List.concat_map
          ~f:(fun txt ->
            (rename
               [
                 ("placeholder", txt); ("Placeholder", uppercase_first_char txt);
               ])
              #structure
              [%str let placeholder _ = On.placeholder])
          features
        |> B.pmod_structure]

      module Off =
        [%m
        List.concat_map
          ~f:(fun txt ->
            (rename
               [
                 ("placeholder", txt); ("Placeholder", uppercase_first_char txt);
               ])
              #structure
              [%str
                module Placeholder = struct
                  type placeholder = |
                  [@@deriving show, yojson, hash, compare, sexp, hash, eq]
                end

                include Placeholder])
          features
        |> B.pmod_structure]

      module SUBTYPE = struct
        module type T = sig
          module A : FEATURES
          module B : FEATURES

          include
            [%m
          List.map
            ~f:(fun txt ->
              (rename [ ("placeholder", txt) ])#signature_item
                [%sigi:
                  val placeholder : Span.t -> A.placeholder -> B.placeholder])
            features
          |> B.pmty_signature]
        end

        module type MAPPER = sig
          val map :
            'a 'b. (Span.t -> 'a -> 'b) -> Enumeration.t -> Span.t -> 'a -> 'b
        end

        module Map (S : T) (Mapper : MAPPER) =
          [%m
          let f txt =
            [%stri
              let [%p B.ppat_var { loc; txt }] =
                let kind =
                  [%e
                    B.pexp_construct
                      {
                        loc;
                        txt =
                          Ldot (Lident "Enumeration", uppercase_first_char txt);
                      }
                      None]
                in
                let f =
                  [%e B.pexp_ident { loc; txt = Ldot (Lident "S", txt) }]
                in
                Mapper.map f kind]
          in
          B.pmod_structure @@ ([%stri include S] :: List.map ~f features)]

        module On =
          [%m
          List.concat_map
            ~f:(fun txt ->
              (rename
                 [
                   ("placeholder", txt);
                   ("Placeholder", uppercase_first_char txt);
                 ])
                #structure
                [%str
                  module Placeholder = struct
                    let placeholder _span _witness = On.placeholder
                  end

                  include Placeholder])
            features
          |> B.pmod_structure]

        module Reject (R : sig
          val reject : 'a. unit -> 'a
        end) =
          [%m
          List.concat_map
            ~f:(fun txt ->
              (rename
                 [
                   ("placeholder", txt);
                   ("Placeholder", uppercase_first_char txt);
                 ])
                #structure
                [%str
                  module Placeholder = struct
                    let placeholder _span _witness = R.reject ()
                  end

                  include Placeholder])
            features
          |> B.pmod_structure]

        module Id =
          [%m
          List.map
            ~f:(fun txt ->
              [%stri let [%p B.ppat_var { loc; txt }] = fun _span -> Base.Fn.id])
            features
          |> B.pmod_structure]
      end
    end]
(* let attrs = *)
(*   attributes_of_structure_item str *)
(*   |> List.filter_map ~f:(fun attr -> *)
(*          match string_of_payload ~loc attr.attr_payload with *)
(*          | Result.Ok payload -> Some (attr.attr_name.txt, payload) *)
(*          | _ -> None) *)
(* in *)
(* let opens = *)
(*   List.filter_map *)
(*     ~f:(fun (name, path) -> *)
(*       if String.equal name "add" then Some path else None) *)
(*     attrs *)
(* in *)
(* (map_inline_nodes opens loc)#structure_item str *)

let ext =
  Extension.V3.declare name Extension.Context.structure_item
    (* Ast_pattern.(pstr ((pstr_eval (pexp_tuple (many __) drop) ^:: nil))) *)
    Ast_pattern.(
      pstr (pstr_eval (pexp_tuple (many (pexp_ident @@ lident __))) drop ^:: nil))
    expand

let rule = Ppxlib.Context_free.Rule.extension ext
let () = Ppxlib.Driver.register_transformation ~rules:[ rule ] name


================================================
FILE: engine/utils/ppx_inline/README.md
================================================
# `ppx_inline`

Inlines chunks of OCaml AST in place.

Rewrite `[%%inline_defs L]`, `let rec ... [@@inline_ands L]`, `[%%inline_arms L]`, `[%%inline_body PATH]` inside nodes `[%%inlined_contents NODE]`, where:
 - `L` is a (`+`/`-`-separated) list of `QUALIFIED-PATH`s specifying which chunk of AST we should inline;
 - `QUALIFIED-PATH` is either a plain `PATH` or `bindings_of PATH` (the latter means all let/and bindings in a `let rec ... and ...` bundle);
 - `PATH` is a `.`-separated list of strings, possibly containing the `*` glob.

## Example:
File `some_module.ml`:
```ocaml
let f x = x + 1
let g x = x + 2
let f' x = x + 3

module M = struct
    let w = 0
    let x = 1
    let y = 2
    let z = 3
end

let h x = ""
type foo = | A | B
let i (x: foo) =
    match x with
    | A -> 0
    | B -> 1

let rec bundle_1 x = bundle_2 x + 1
and bundle_2 y = bundle_3 + 1
and bundle_3 z = z + 1
```

The module:
```ocaml
module%inlined_contents [@@add "some_module.ml"] Test = struct
    [%%inline_defs f + g + foo]
    [%%inline_defs "M.*" - z - y]

    let h: int -> string = [%%inline_body h]
    let i: foo -> int =
        match i with
      | [%%inline_arms "i.*" - B] -> dummy
      | B -> 123

    let rec bundle_1 x = bundle_2 x + 123
        [@@inline_ands bindings_of bundle_1]
end
```

Will be rewritten into:
```ocaml
module%inlined_contents [@@add "some_module.ml"] Test = struct

    (* [%%inline_defs f + g + foo] *)
    let f x = x + 1
    let g x = x + 2
    type foo = | A | B

    (* [%%inline_defs "M.*" - z - y] *)
    let w = 0
    let x = 1

    let h: int -> string = (fun x -> "")
    let i: foo -> int = 
        match i with
      | A -> 0
      | B -> 123

    let rec bundle_1 x = bundle_2 x + 123
    and bundle_2 y = bundle_3 + 1
    and bundle_3 z = z + 1
end
```



================================================
FILE: engine/utils/ppx_inline/dune
================================================
(library
 (name ppx_inline)
 (package hax-engine)
 (kind ppx_rewriter)
 (libraries ppxlib base)
 (preprocess
  (pps ppxlib.metaquot ppx_deriving.eq ppx_compare ppx_deriving.show)))

(env
 (_
  (flags
   (:standard -warn-error -A -warn-error +8))))


================================================
FILE: engine/utils/ppx_inline/ppx_inline.ml
================================================
open Base
open Ppxlib

let name = "inlined_contents"

let cons_lid_of_pattern (p : pattern) =
  match p.ppat_desc with
  | Ppat_construct ({ txt; _ }, _) -> Some txt
  | _ -> None

let name_of_pattern (p : pattern) =
  match p.ppat_desc with Ppat_var { txt; _ } -> Some txt | _ -> None

let name_of_binding b = name_of_pattern b.pvb_pat

type inlinable_item_kind =
  | MatchCase of (case[@opaque])
  | Binding of (value_binding[@opaque])
  | StrItem of (structure_item[@opaque])
[@@deriving show]

type inlinable_item_kind_head = MatchCase | Binding | StrItem
[@@deriving show]

let head_of : inlinable_item_kind -> inlinable_item_kind_head = function
  | MatchCase _ -> MatchCase
  | Binding _ -> Binding
  | StrItem _ -> StrItem

type inlinable_item = { path : string list; kind : inlinable_item_kind }
[@@deriving show]

let collect_ast_nodes (result : inlinable_item list ref) =
  let add (l : inlinable_item list) = result := !result @ l in
  object
    inherit [string list] Ast_traverse.map_with_context as super

    method! module_binding path x =
      let path =
        match x.pmb_name.txt with Some name -> path @ [ name ] | None -> path
      in
      super#module_binding path x

    method! value_binding path x =
      let path =
        match name_of_pattern x.pvb_pat with
        | Some name ->
            let path = path @ [ name ] in
            add @@ [ { path; kind = Binding x } ];
            path
        | None -> path
      in
      super#value_binding path x

    method! structure_item path s =
      (match s.pstr_desc with
      | Pstr_value (_, bindings) ->
          List.iter bindings ~f:(fun { pvb_pat; _ } ->
              match name_of_pattern pvb_pat with
              | Some n -> add [ { path = path @ [ n ]; kind = StrItem s } ]
              | _ -> ())
      | Pstr_type (_, bindings) ->
          List.iter bindings ~f:(fun { ptype_name = { txt = n; _ }; _ } ->
              add [ { path = path @ [ n ]; kind = StrItem s } ])
      | _ -> ());
      super#structure_item path s

    method! expression path e =
      let e' = super#expression path e in
      match e.pexp_desc with
      | Pexp_match (_, cases) ->
          add
          @@ List.filter_map
               ~f:(fun case ->
                 match cons_lid_of_pattern case.pc_lhs with
                 | Some chunk ->
                     Some
                       {
                         path = path @ [ Longident.last_exn chunk ];
                         kind = MatchCase case;
                       }
                 | None -> None)
               cases;
          e'
      | _ -> e'
  end

let replace_every_location (location : location) =
  object
    inherit Ast_traverse.map
    method! location = Fn.const location
  end

let locate_module (name : string) : string =
  let rec find = function
    | path when Stdlib.Sys.is_directory path ->
        Stdlib.Sys.readdir path
        |> Array.find_map ~f:(fun name ->
               find @@ Stdlib.Filename.concat path name)
    | path when String.(Stdlib.Filename.basename path = name) -> Some path
    | _ -> None
  in
  find (Stdlib.Sys.getcwd ())
  |> Option.value_exn ~message:("ppx_inline: could not locate module " ^ name)

let inlinable_items_of_module : loc:location -> string -> inlinable_item list =
  let memo = Hashtbl.create (module String) in
  fun ~loc path ->
    Hashtbl.find_or_add memo
      ~default:(fun () ->
        let results = ref [] in
        let _ =
          locate_module path |> Stdlib.open_in |> Lexing.from_channel
          |> Parse.implementation |> (replace_every_location loc)#structure
          |> (collect_ast_nodes results)#structure [ path ]
        in
        !results)
      path

let inlinable_items_of_modules ~loc : string list -> inlinable_item list =
  List.concat_map ~f:(inlinable_items_of_module ~loc)

type not_found_available_item = {
  path : string list;
  head : inlinable_item_kind_head;
  preselected : bool;
  postselected : bool;
}
[@@deriving show]

type inline_error =
  | NotFound of {
      search : string list;
      available : not_found_available_item list;
      context : string;
    }
  | NotPlusMinusList
[@@deriving show]

let display_inline_error = function
  | NotFound o ->
      let pre_ = "A" in
      let post_ = "B" in
      let h = String.concat ~sep:"." in
      "Ppx_inline.NotFound:\nCannot find any item given glob [" ^ h o.search
      ^ "] (context: " ^ o.context ^ ").\nAvailable items: ([" ^ pre_
      ^ "] means preselected, [" ^ post_ ^ "] means postselected)"
      ^ String.concat ~sep:""
      @@ List.map
           ~f:(fun { path = i; head; preselected; postselected } ->
             let kind =
               (match head with
               | MatchCase -> "case"
               | Binding -> "let "
               | StrItem -> "str ")
               ^ " "
             in
             "\n• "
             ^ (if preselected then pre_ else " ")
             ^ (if postselected then " " else post_)
             ^ " " ^ kind ^ "\t" ^ h i)
           o.available
  | NotPlusMinusList -> "Ppx_inline.NotPlusMinusList"

exception InlineError of inline_error

let raise_inline_err x = raise @@ InlineError x

type flag = Include | Exclude [@@deriving show]
type qualifier = AllBindings [@@deriving show]

type pm_atom = { apath : string list; aqualifier : qualifier option }
[@@deriving show]

let rec plus_minus_list_of_expr' (e : expression) : (flag * pm_atom) list =
  match e with
  | [%expr [%e? x] + [%e? y]] ->
      plus_minus_list_of_expr' x @ plus_minus_list_of_expr' y
  | [%expr [%e? x] - [%e? y]] ->
      plus_minus_list_of_expr' x
      @ List.map ~f:(fun (_, v) -> (Exclude, v))
      @@ plus_minus_list_of_expr' y
  | _ ->
      let default () = raise_inline_err NotPlusMinusList in
      let plus_minus_atom_name (e : expression) : string list option =
        match e with
        | { pexp_desc = Pexp_constant (Pconst_string (s, _, _)); _ } ->
            Some (String.split ~on:'.' s)
        | { pexp_desc = Pexp_ident { txt; _ }; _ }
        | { pexp_desc = Pexp_construct ({ txt; _ }, _); _ } ->
            Some (Longident.flatten_exn txt)
        | _ -> None
      in
      let plus_minus_atom (e : expression) : pm_atom =
        let h e = Option.value_or_thunk (plus_minus_atom_name e) ~default in
        match e with
        | [%expr bindings_of [%e? arg]] ->
            { apath = h arg; aqualifier = Some AllBindings }
        (* | [%expr bundle [%e? arg]] -> *)
        (*     { apath = h arg; aqualifier = Some Binding } *)
        | e -> { apath = h e; aqualifier = None }
      in
      [ (Include, plus_minus_atom e) ]

let plus_minus_list_of_expr (e : expression) : (flag * pm_atom) list option =
  try Some (plus_minus_list_of_expr' e)
  with InlineError NotPlusMinusList -> failwith "InlineError NotPlusMinusList"

let elast l =
  match (List.last l, List.drop_last l) with
  | Some last, Some init -> Some (init, last)
  | _ -> None

let diff_list (type a) (x : a list) (y : a list) ~(equal : a -> a -> bool) :
    a list =
  List.filter
    ~f:(fun elem_x ->
      List.for_all ~f:(fun elem_y -> not @@ equal elem_x elem_y) y)
    x

let attributes_of_structure_item (str : structure_item) =
  match str.pstr_desc with
  | Pstr_module { pmb_attributes = attrs; _ } | Pstr_eval (_, attrs) -> attrs
  | _ -> []

let string_of_payload ~loc e =
  Ast_pattern.(
    parse_res
    @@ pstr
         (pstr_eval (pexp_constant @@ pconst_string __ drop drop) drop ^:: nil))
    loc e Fn.id

let string_attributes_of_structure_item ~loc (str : structure_item) :
    (string * string) list =
  attributes_of_structure_item str
  |> List.filter_map ~f:(fun attr ->
         match string_of_payload ~loc attr.attr_payload with
         | Result.Ok payload -> Some (attr.attr_name.txt, payload)
         | _ -> None)

(* TODO: ppx_inline reports badly locations (I actually don't use `_loc`...) *)
let map_inline_nodes opens _loc =
  let rec match_glob (glob : string list) (against : string list) =
    match (elast glob, elast against) with
    | Some (glob, "*"), Some (against, _) -> match_glob glob against
    | _ -> List.is_suffix ~equal:String.equal ~suffix:glob against
  in
  let inlinable_items = inlinable_items_of_modules opens in
  let matches ~loc (glob : string list) : inlinable_item list =
    List.filter ~f:(fun ({ path; _ } : inlinable_item) -> match_glob glob path)
    @@ inlinable_items ~loc
  in
  let find_one (type a) ~context ~loc (glob : string list)
      (f : inlinable_item -> (string list * a) list) : (string list * a) list =
    let selection = matches glob ~loc in
    match List.concat_map ~f selection with
    | [] ->
        let selected_paths = List.map ~f:(fun { path; _ } -> path) selection in
        raise_inline_err
        @@ NotFound
             {
               search = glob;
               context;
               available =
                 List.map ~f:(fun ({ path; kind } as i) ->
                     {
                       path;
                       head = head_of kind;
                       preselected =
                         List.mem ~equal:[%eq: string list] selected_paths path;
                       postselected = f i |> List.is_empty |> not;
                     })
                 @@ inlinable_items ~loc;
             }
    | l -> l
  in
  let find (type a) ~loc ~context (flags : (flag * pm_atom) list)
      (f : inlinable_item_kind -> a option) =
    List.fold_left ~init:[]
      ~f:(fun acc (flag, path) ->
        let matches =
          find_one ~loc ~context path.apath (fun { path = path'; kind = i } ->
              match (path.aqualifier, i) with
              | ( Some AllBindings,
                  StrItem { pstr_desc = Pstr_value (_, bindings); _ } ) ->
                  let prefix = List.drop_last_exn path' in
                  List.filter_map
                    ~f:(fun b ->
                      Option.both
                        (name_of_binding b
                        |> Option.map ~f:(fun n -> prefix @ [ n ]))
                        (f (Binding b)))
                    bindings
              | _ ->
                  Option.to_list @@ Option.map ~f:(fun i -> (path', i)) @@ f i)
        in
        let acc =
          match flag with
          | Include -> acc @ matches
          | Exclude ->
              diff_list
                ~equal:(fun (x, _) (y, _) -> [%eq: string list] x y)
                acc matches
        in
        acc)
      flags
    |> List.map ~f:snd
  in

  object
    inherit Ast_traverse.map as super

    method! structure e =
      let e = super#structure e in
      let each_item e =
        let loc = e.pstr_loc in
        match e.pstr_desc with
        | Pstr_extension
            ( ( { txt = "inline_defs"; _ },
                PStr [ { pstr_desc = Pstr_eval (payload, _); _ } ] ),
              _ ) -> (
            match plus_minus_list_of_expr payload with
            | Some opts -> (
                try
                  find ~context:"inline_defs" ~loc opts (function
                    | StrItem x -> Some x
                    | _ -> None)
                with InlineError err ->
                  let err =
                    display_inline_error err |> Ast_builder.Default.estring ~loc
                  in
                  [%str [%ocaml.error [%e err]]])
            | _ -> [ e ])
        | Pstr_value (rf, bindings) ->
            let binding_names = List.filter_map ~f:name_of_binding bindings in
            let bindings =
              let f b =
                let mk_err s =
                  { b with pvb_expr = [%expr [%ocaml.error [%e s]]] }
                in
                let attr =
                  b.pvb_attributes
                  |> List.find ~f:(fun attr ->
                         String.equal attr.attr_name.txt "inline_ands")
                in
                match attr with
                | Some { attr_payload; _ } -> (
                    match attr_payload with
                    | PStr [ { pstr_desc = Pstr_eval (payload, _); _ } ] -> (
                        match plus_minus_list_of_expr payload with
                        | Some opts -> (
                            try
                              b
                              ::
                              (let bindings =
                                 find ~context:"inline_ands" ~loc opts (function
                                   | Binding b' -> Some b'
                                   | _ -> None)
                               in
                               List.filter
                                 ~f:(fun b' ->
                                   match name_of_binding b' with
                                   | Some name ->
                                       List.mem ~equal:String.equal
                                         binding_names name
                                       |> not
                                   | _ -> true)
                                 bindings)
                              |> List.dedup_and_sort ~compare:(fun a b ->
                                     [%compare: string option]
                                       (name_of_binding a) (name_of_binding b))
                            with InlineError err ->
                              let err =
                                display_inline_error err
                                |> Ast_builder.Default.estring ~loc
                              in
                              [ mk_err err ])
                        | _ -> [ b ])
                    | _ -> [ mk_err [%expr "expected PStr"] ])
                | None -> [ b ]
              in

              List.concat_map ~f bindings
            in
            [ { e with pstr_desc = Pstr_value (rf, bindings) } ]
        | _ -> [ e ]
      in
      List.concat_map ~f:each_item e

    method! expression e =
      let e = super#expression e in
      let loc = e.pexp_loc in
      match e with
      | { pexp_desc = Pexp_match (scrut, cases); _ } ->
          let cases =
            List.concat_map
              ~f:(fun case ->
                match case.pc_lhs with
                | [%pat? [%inline_arms [%e? e]]] -> (
                    let pc_rhs_map =
                      match case.pc_rhs with
                      | [%expr map [%e? f]] -> fun e -> [%expr [%e f] [%e e]]
                      | _ -> Fn.id
                    in
                    match plus_minus_list_of_expr e with
                    | Some opts -> (
                        try
                          find ~context:"case" ~loc opts (function
                            | MatchCase case -> Some case
                            | _ -> None)
                          |> List.map ~f:(fun case ->
                                 { case with pc_rhs = pc_rhs_map case.pc_rhs })
                        with InlineError err ->
                          let err =
                            display_inline_error err
                            |> Ast_builder.Default.estring ~loc
                          in
                          [
                            {
                              case with
                              pc_lhs = [%pat? [%ocaml.error [%e err]]];
                            };
                          ])
                    | None -> [ case ])
                | _ -> [ case ])
              cases
          in
          { e with pexp_desc = Pexp_match (scrut, cases) }
      | [%expr [%inline_body [%e? e]]] -> (
          match plus_minus_list_of_expr e with
          | Some opts -> (
              try
                match
                  find ~context:"inline_body" ~loc opts (function
                    | Binding { pvb_expr; _ } -> Some pvb_expr
                    | _ -> None)
                with
                | [ x ] -> x
                | _ -> failwith "inline_body: matched multiple"
              with InlineError err ->
                let err =
                  display_inline_error err |> Ast_builder.Default.estring ~loc
                in
                [%expr [%ocaml.error [%e err]]])
          | None -> e)
      | _ -> e
  end

let expand ~(ctxt : Expansion_context.Extension.t) (str : structure_item) :
    structure_item =
  let loc = Expansion_context.Extension.extension_point_loc ctxt in
  let opens =
    List.filter_map
      ~f:(fun (name, path) ->
        if String.equal name "add" then Some path else None)
      (string_attributes_of_structure_item ~loc str)
  in
  (map_inline_nodes opens loc)#structure_item str

let ext =
  Extension.V3.declare name Extension.Context.structure_item
    Ast_pattern.(pstr (__ ^:: nil))
    expand

let rule = Ppxlib.Context_free.Rule.extension ext
let () = Ppxlib.Driver.register_transformation ~rules:[ rule ] name


================================================
FILE: engine/utils/ppx_phases_index/README.md
================================================
# `ppx_phases_index`

This PPX looks for a `phases` folder in the sources, and generate a
module binding for each, inlining the documentation, so that we can
have a nice index of all the phases with their documentation.


================================================
FILE: engine/utils/ppx_phases_index/dune
================================================
(library
 (name ppx_phases_index)
 (package hax-engine)
 (kind ppx_rewriter)
 (libraries ppxlib base)
 (preprocess
  (pps ppxlib.metaquot ppx_deriving.eq ppx_compare ppx_deriving.show)))

(env
 (_
  (flags
   (:standard -warn-error -A -warn-error +8))))


================================================
FILE: engine/utils/ppx_phases_index/ppx_phases_index.ml
================================================
open Base
open Ppxlib

let ( let* ) x f = Option.bind ~f x

let map_first_letter (f : string -> string) (s : string) =
  let first, rest = String.(prefix s 1, drop_prefix s 1) in
  f first ^ rest

let uppercase_first_char = map_first_letter String.uppercase

let locate_phases_directory () : string =
  let rec find path =
    match path with
    | path when String.(Stdlib.Filename.basename path = "phases") -> Some path
    | path when Stdlib.Sys.is_directory path ->
        Stdlib.Sys.readdir path
        |> Array.filter ~f:(fun name -> not (String.is_prefix ~prefix:"." name))
        |> Array.find_map ~f:(fun name ->
               find @@ Stdlib.Filename.concat path name)
    | _ -> None
  in
  find (Stdlib.Sys.getcwd ())
  |> Option.value_exn
       ~message:"ppx_phases_index: could not locate folder [phases]"

let list_phases loc : (string * string * string * _ option) list =
  let dir = locate_phases_directory () in
  Stdlib.Sys.readdir dir |> Array.to_list
  |> List.filter_map ~f:(fun filename ->
         let* module_name = String.chop_suffix ~suffix:".mli" filename in
         let* _ =
           match String.chop_suffix ~suffix:".pp" module_name with
           | Some _ -> None
           | None -> Some ()
         in
         let* phase_name = String.chop_prefix ~prefix:"phase_" module_name in
         let module_name = uppercase_first_char module_name in
         let phase_name = uppercase_first_char phase_name in
         Some (filename, module_name, phase_name))
  |> List.map ~f:(fun (filename, module_name, phase_name) ->
         let path = Stdlib.Filename.concat dir filename in
         let str =
           Stdlib.open_in path |> Lexing.from_channel |> Parse.interface
         in
         let str =
           List.filter
             ~f:(function { psig_desc = Psig_open _; _ } -> false | _ -> true)
             str
         in
         match str with
         | [ _ ] -> (filename, module_name, phase_name, None)
         | [ { psig_desc = Psig_attribute attr; _ }; _ ] ->
             (filename, module_name, phase_name, Some attr)
         | [] -> failwith ("Empty phase" ^ filename)
         | _ ->
             failwith
               ("Invalid phase" ^ filename ^ ": got "
               ^ Int.to_string (List.length str)))

let rename (l : (string * string) list) =
  let h (s : string) =
    List.find_map
      ~f:(fun (s', replace) -> if String.equal s s' then Some replace else None)
      l
    |> Option.value ~default:s
  in
  object
    inherit Ast_traverse.map
    method! string = h
    method! label = h

    method! longident =
      let rec r = function
        | Lapply (x, y) -> Lapply (r x, r y)
        | Ldot (x, y) -> Ldot (r x, h y)
        | Lident x -> Lident (h x)
      in
      r
  end

let expand_phases_index ~(ctxt : Expansion_context.Extension.t)
    (str : structure_item) : structure_item =
  let loc = Expansion_context.Extension.extension_point_loc ctxt in
  let (module S) = Ppxlib.Ast_builder.make loc in
  let modules =
    list_phases loc
    |> List.map ~f:(fun (_, module_name, phase_name, attrs) ->
           let h x = { txt = Lident x; loc } in
           let original =
             S.pmod_ident { txt = Ldot (Lident module_name, "Make"); loc }
           in
           let b =
             S.module_binding
               ~name:{ txt = Some phase_name; loc }
               ~expr:original
           in
           let attrs = Option.to_list attrs in
           let attrs =
             List.map
               ~f:(fun attr ->
                 let n = attr.attr_name in
                 if String.equal n.txt "ocaml.text" then
                   { attr with attr_name = { n with txt = "ocaml.doc" } }
                 else attr)
               attrs
           in
           let b = { b with pmb_attributes = attrs } in
           S.pstr_module b)
  in
  S.pstr_include (S.include_infos (S.pmod_structure modules))

let chop_ml_or_mli str =
  match String.chop_suffix ~suffix:".ml" str with
  | Some result -> Some result
  | None -> String.chop_suffix ~suffix:".mli" str

let filename_to_phase_constructor file_name =
  let phase_name =
    file_name |> String.rsplit2 ~on:'/' |> Option.map ~f:snd
    |> Option.value ~default:file_name
    |> String.chop_prefix ~prefix:"phase_"
    |> Option.value_exn
         ~message:
           ("`[%auto_phase_name]` can only be used in a phase, whose filename \
             starts with `phase_`. Current file is: [" ^ file_name ^ "]")
    |> chop_ml_or_mli
    |> Option.value_exn
         ~message:
           ("File name [" ^ file_name
          ^ "] was expected to end with a `.ml` or `.mli`")
  in
  phase_name |> String.split ~on:'_'
  |> List.map ~f:uppercase_first_char
  |> String.concat

let expand_add_phase_names ~(ctxt : Expansion_context.Extension.t)
    (typ : type_declaration) : structure_item =
  let loc = Expansion_context.Extension.extension_point_loc ctxt in
  let (module S) = Ppxlib.Ast_builder.make loc in
  let ptype_kind =
    match typ.ptype_kind with
    | Ptype_variant ctors ->
        let phases = list_phases loc in
        let extra =
          List.map
            ~f:(fun (filename, _, _, _) ->
              let name = filename_to_phase_constructor filename in
              let name = { txt = name; loc = S.loc } in
              let args = Pcstr_tuple [] in
              S.constructor_declaration ~name ~args ~res:None)
            phases
        in
        Ptype_variant (ctors @ extra)
    | _ -> failwith "expected variants"
  in
  let typ = { typ with ptype_kind } in
  S.pstr_type Recursive [ typ ]

let expand_auto_phase_name ~(ctxt : Expansion_context.Extension.t)
    (str : structure_item) : expression =
  let file_name = Expansion_context.Extension.input_name ctxt in
  let constructor = filename_to_phase_constructor file_name in
  let loc = Expansion_context.Extension.extension_point_loc ctxt in
  let (module S) = Ppxlib.Ast_builder.make loc in
  let txt = Astlib.Longident.parse ("Diagnostics.Phase." ^ constructor) in
  S.pexp_construct { txt; loc = S.loc } None

let () =
  let rule_phases_index =
    let name = "phases_index" in
    Ppxlib.Context_free.Rule.extension
      (Extension.V3.declare name Extension.Context.structure_item
         Ast_pattern.(pstr (__ ^:: nil))
         expand_phases_index)
  in
  let rule_auto_phase_name =
    let name = "auto_phase_name" in
    Ppxlib.Context_free.Rule.extension
      (Extension.V3.declare name Extension.Context.expression
         Ast_pattern.(pstr (__ ^:: nil))
         expand_auto_phase_name)
  in
  let rule_expand_add_phase_names =
    let name = "add_phase_names" in
    Ppxlib.Context_free.Rule.extension
      (Extension.V3.declare name Extension.Context.structure_item
         Ast_pattern.(pstr (pstr_type drop (__ ^:: nil) ^:: nil))
         expand_add_phase_names)
  in
  Ppxlib.Driver.register_transformation
    ~rules:
      [ rule_phases_index; rule_auto_phase_name; rule_expand_add_phase_names ]
    "ppx_phases_index"


================================================
FILE: engine/utils/sourcemaps/base64.ml
================================================
open Prelude

let alphabet =
  "ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz0123456789+/"

let encode (n : int) : char =
  assert (n >= 0 && n < 64);
  String.get alphabet n

let decode (c : char) : int = String.index alphabet c |> Option.value_exn


================================================
FILE: engine/utils/sourcemaps/dune
================================================
(library
 (name sourcemaps)
 (package hax-engine)
 (inline_tests)
 (preprocess
  (pps ppx_inline_test ppx_yojson_conv ppx_deriving.show ppx_deriving.eq))
 (libraries base))

(include_subdirs unqualified)


================================================
FILE: engine/utils/sourcemaps/location.ml
================================================
open Prelude

type t = { line : int; col : int } [@@deriving eq, yojson]

let show { line; col } =
  "(" ^ Int.to_string line ^ ":" ^ Int.to_string col ^ ")"

let pp (fmt : Stdlib.Format.formatter) (s : t) : unit =
  Stdlib.Format.pp_print_string fmt @@ show s

let default = { line = 0; col = 0 }
let plus_cols x cols = { x with col = x.col + cols }
let op ( + ) x y = { line = x.line + y.line; col = x.col + y.col }
let ( + ) = op ( + )
let ( - ) = op ( - )

let compare (x : t) (y : t) : int =
  let open Int in
  if x.line > y.line then 1
  else if x.line = y.line then
    if x.col > y.col then 1 else if x.col = y.col then 0 else -1
  else -1


================================================
FILE: engine/utils/sourcemaps/mappings/dual.ml
================================================
type 'a t = { gen : 'a; src : 'a } [@@deriving show, eq, yojson]

let transpose ~(default : 'a t) ({ gen; src } : 'a option t) : 'a t option =
  match (gen, src) with
  | Some gen, None -> Some { gen; src = default.src }
  | None, Some src -> Some { gen = default.gen; src }
  | Some gen, Some src -> Some { gen; src }
  | _ -> None

let default (type a) (default : a) : a t = { gen = default; src = default }


================================================
FILE: engine/utils/sourcemaps/mappings/instruction.ml
================================================
open Prelude
open Types

type t =
  | ShiftGenLinesResetGenCols of { lines : int }
  | ShiftGenCols of int
  | Full of { shift_gen_col : int; shift_src : Location.t; meta : meta }
[@@deriving show { with_path = false }, eq]

let encode_one : t -> string * [ `Sep | `NeedsSep ] = function
  | ShiftGenLinesResetGenCols { lines } -> (String.make lines ';', `Sep)
  | ShiftGenCols n -> (Vql.encode_base64 [ n ], `NeedsSep)
  | Full { shift_gen_col; shift_src; meta = { file_offset; name } } ->
      ( Vql.encode_base64
          ([ shift_gen_col; file_offset; shift_src.line; shift_src.col ]
          @ match name with Some name -> [ name ] | None -> []),
        `NeedsSep )

let encode : t list -> string =
  List.map ~f:encode_one
  >> List.fold_left
       ~f:(fun (acc, sep) (str, sep') ->
         let acc =
           acc
           ^
           match (sep, sep') with `NeedsSep, `NeedsSep -> "," ^ str | _ -> str
         in
         (acc, sep'))
       ~init:("", `Sep)
  >> fst

let decode_one (s : string) : t =
  match Vql.decode_base64 s with
  | [ cols ] -> ShiftGenCols cols
  | shift_gen_col :: file_offset :: line :: col :: rest ->
      let name = match rest with [ name ] -> Some name | _ -> None in
      let meta = { file_offset; name } in
      let shift_src : Location.t = { line; col } in
      Full { shift_gen_col; shift_src; meta }
  | _ -> failwith "??"

let rec decode' (s : string) : t option list =
  if String.is_empty s then []
  else
    let n =
      String.lfindi ~f:(fun _ -> function ';' | ',' -> true | _ -> false) s
      |> Option.value ~default:(String.length s)
    in
    (if n > 0 then Some (decode_one (String.prefix s n))
     else
       match String.get s 0 with
       | ';' -> Some (ShiftGenLinesResetGenCols { lines = 1 })
       | ',' -> None
       | _ -> failwith "should not be possible")
    :: decode' (String.drop_prefix s (Int.max 1 n))

let decode : string -> t list = decode' >> List.filter_map ~f:Fn.id

let eval_one (s : Location.t Dual.t) (i : t) : Location.t Dual.t * meta option =
  match i with
  | ShiftGenLinesResetGenCols { lines } ->
      ({ s with gen = { line = s.gen.line + lines; col = 0 } }, None)
  | ShiftGenCols i -> ({ s with gen = Location.plus_cols s.gen i }, None)
  | Full { shift_gen_col; shift_src; meta } ->
      let gen = Location.plus_cols s.gen shift_gen_col in
      let src = Location.(s.src + shift_src) in
      ({ gen; src }, Some meta)

let to_points ?(init = Dual.default Location.default) : t list -> point list =
  List.fold_left ~init:(init, []) ~f:(fun (s, acc) i ->
      let s, r = eval_one s i in
      (s, (s, r) :: acc))
  >> snd >> List.rev

let from_points : point list -> t list =
  List.folding_map
    ~init:(Dual.default Location.default, None)
    ~f:(fun ({ src; gen }, m0) (x, m) ->
      let d =
        Location.(Dual.{ Dual.src = x.src - src; Dual.gen = x.gen - gen })
      in
      let shift_gen_col = (if Int.(d.gen.line = 0) then d else x).gen.col in
      let relative_m =
        Option.map m ~f:(fun m ->
            match m0 with
            | Some m0 ->
                { file_offset = m.file_offset - m0.file_offset; name = None }
            | None -> m)
      in
      let output =
        (if Int.(d.gen.line = 0) then []
         else [ ShiftGenLinesResetGenCols { lines = d.gen.line } ])
        @
        match relative_m with
        | Some meta -> [ Full { shift_gen_col; shift_src = d.src; meta } ]
        | None when Int.(shift_gen_col = 0) -> []
        | _ when Int.(shift_gen_col = 0) -> []
        | _ -> [ ShiftGenCols shift_gen_col ]
      in
      let x = match m with Some _ -> x | None -> { x with src } in
      ((x, Option.first_some m m0), output))
  >> List.concat

let%test _ =
  let f = decode >> to_points >> from_points >> encode in
  [
    ";AAAA,SAAS,KAAAA,GAAG,YAAAC,GAAU,UAAAC,SAAc;;;ACApC,SAAS,KAAAC,GAAG,aAAAC,SAAiB;AAC7B,SAAS,YAAAC,SAAgB;AAWlB,IAAMC,IAAN,cAA2BF,EAAsC;AAAA,EAGtE,YAAYG,GAAqB;AAC/B,UAAMA,CAAK;AAIb,SAAAC,IAAa,MAAM,KAAK,SAAS,EAAEC,GAAQ,KAAK,MAAMA,IAAS,EAAE,CAAC;AAClE,SAAAC,IAAa,MAAM,KAAK,SAAS,EAAED,GAAQ,KAAK,MAAMA,IAAS,EAAE,CAAC;AAJhE,SAAK,MAAMA,IAASF,EAAMI;AAAA,EAC5B;AAAA,EAKA,SAAS;AACP,WAAOR,EAAC;AAAA,MAAI,OAAM;AAAA,OAChBA,EAAC,YAAI,KAAK,MAAM,KAAM,GACtBA,EAAC,WACCA,EAAC;AAAA,MAAO,SAAS,KAAKO;AAAA,OAAY,GAAC,GAClC,KACA,KAAK,MAAMD,GACX,KACDN,EAAC;AAAA,MAAO,SAAS,KAAKK;AAAA,OAAY,GAAC,CACrC,CACF;AAAA,EACF;AACF,GAEWI,IAAkB,CAACL,MAAwB;AACpD,MAAI,CAACM,GAAOC,CAAQ,IAAIT,EAASE,EAAMI,CAAa;AACpD,SAAOR,EAAC;AAAA,IAAI,OAAM;AAAA,KAChBA,EAAC,YAAII,EAAMQ,CAAO,GAClBZ,EAAC,WACCA,EAAC;AAAA,IAAO,SAAS,MAAMW,EAASD,IAAQ,CAAC;AAAA,KAAG,GAAC,GAC5C,KACAA,GACA,KACDV,EAAC;AAAA,IAAO,SAAS,MAAMW,EAASD,IAAQ,CAAC;AAAA,KAAG,GAAC,CAC/C,CACF;AACF;;;AD9CAG;AAAA,EACEC,EAAAC,GAAA,MACED,EAACE,GAAA;AAAA,IAAaC,GAAO;AAAA,IAAYC,GAAe;AAAA,GAAK,GACrDJ,EAACK,GAAA;AAAA,IAAgBF,GAAO;AAAA,IAAYC,GAAe;AAAA,GAAK,CAC1D;AAAA,EACA,SAAS,eAAe,MAAM;AAChC;";
  ]
  |> List.for_all ~f:(fun s -> String.equal s (f s))

let from_spanned : Spanned.t list -> t list = Spanned.to_points >> from_points


================================================
FILE: engine/utils/sourcemaps/mappings/mappings.ml
================================================
open Prelude
include Types

type range = { start : Location.t; end_ : Location.t option }
[@@deriving show, eq, yojson]

module Chunk = struct
  type t = { gen : range; src : range; meta : meta }
  [@@deriving show, eq, yojson]

  let compare (x : t) (y : t) = Location.compare x.gen.start y.gen.start

  let from_spanned ((start, end_, meta) : Spanned.t) : t =
    let gen = { start = start.gen; end_ = end_.gen } in
    let src = { start = start.src; end_ = end_.src } in
    { gen; src; meta }

  let to_spanned ({ gen; src; meta } : t) : Spanned.t =
    ( { gen = gen.start; src = src.start },
      { gen = gen.end_; src = src.end_ },
      meta )

  let%test _ =
    let x = ";AAAA,SAAS,KAAAA,GAAG,YAAAC,GAAU" in
    let s = Instruction.(decode x |> to_points) |> Spanned.from_points in
    [%eq: Spanned.t list] (List.map ~f:(from_spanned >> to_spanned) s) s

  let decode : string -> t list =
    Instruction.(decode >> to_points >> Spanned.from_points)
    >> List.map ~f:from_spanned

  let encode : t list -> string =
    List.map ~f:to_spanned >> Instruction.from_spanned >> Instruction.encode

  let%test _ =
    let x =
      ";AAAA,SAAS,KAAAA,GAAG,YAAAC,GAAU,UAAAC,SAAc;;;ACApC,SAAS,KAAAC,GAAG,aAAAC,SAAiB;AAC7B,SAAS,YAAAC,SAAgB;AAWlB,IAAMC,IAAN,cAA2BF,EAAsC"
    in
    decode x |> encode |> [%eq: string] x
end

include Chunk


================================================
FILE: engine/utils/sourcemaps/mappings/mappings.mli
================================================
type meta = { file_offset : int; name : int option }
[@@deriving show, eq, yojson]

type range = { start : Location.t; end_ : Location.t option }
[@@deriving show, eq, yojson]

module Chunk : sig
  type t = { gen : range; src : range; meta : meta }
  [@@deriving show, eq, yojson]

  val compare : t -> t -> int
end

open Chunk

val decode : string -> t list
val encode : t list -> string


================================================
FILE: engine/utils/sourcemaps/mappings/spanned.ml
================================================
open Prelude
open Types

type t = Location.t Dual.t * Location.t option Dual.t * meta
[@@deriving show, eq]

let to_points (pts : t list) : point list =
  List.map pts ~f:Option.some
  |> Fn.flip List.append [ None ]
  |> List.folding_map ~init:None ~f:(fun acc x ->
         let prev_end =
           match (acc, x) with
           | Some end_, Some (start, _, _)
             when [%eq: Location.t] start.Dual.gen end_.Dual.gen |> not ->
               Some end_
           | Some end_, None -> Some end_
           | _ -> None
         in
         let out, end_ =
           match x with
           | Some (start, end_, meta) ->
               ([ (start, Some meta) ], Dual.transpose ~default:start end_)
           | None -> ([], None)
         in
         ( end_,
           (prev_end |> Option.map ~f:(fun e -> (e, None)) |> Option.to_list)
           @ out ))
  |> List.concat

let from_points : point list -> t list =
  List.rev
  >> List.folding_map
       ~init:(None, Map.empty (module Int))
       ~f:(fun (gen_loc_0, src_locs) ((loc_start : _ Dual.t), meta) ->
         match meta with
         | Some meta ->
             let src_loc_0 = Map.find src_locs meta.file_offset in
             let src_locs =
               Map.set src_locs ~key:meta.file_offset ~data:loc_start.src
             in
             let loc_end = Dual.{ gen = gen_loc_0; src = src_loc_0 } in
             ((Some loc_start.gen, src_locs), Some (loc_start, loc_end, meta))
         | None -> ((Some loc_start.gen, src_locs), None))
  >> List.filter_map ~f:Fn.id >> List.rev


================================================
FILE: engine/utils/sourcemaps/mappings/types.ml
================================================
open Prelude

type meta = { file_offset : int; name : int option }
[@@deriving show, eq, yojson]

type point = Location.t Dual.t * meta option [@@deriving show, eq, yojson]


================================================
FILE: engine/utils/sourcemaps/prelude.ml
================================================
include Base
include Ppx_yojson_conv_lib.Yojson_conv.Primitives

let ( << ) f g x = f (g x)
let ( >> ) f g x = g (f x)


================================================
FILE: engine/utils/sourcemaps/source_maps.ml
================================================
open Prelude
module Location = Location
include Mappings

type mapping = {
  gen : range;
  src : range;
  source : string;
  name : string option;
}

type t = {
  mappings : string;
  sourceRoot : string;
  sources : string list;
  sourcesContent : string option list;
  names : string list;
  version : int;
  file : string;
}
[@@deriving yojson]

let dedup_freq (l : string list) : string list =
  let hashtbl : (string, int) Hashtbl.t = Hashtbl.create (module String) in
  List.iter ~f:(Hashtbl.incr hashtbl) l;
  Hashtbl.to_alist hashtbl
  |> List.sort ~compare:(fun (_, x) (_, y) -> Int.(y - x))
  |> List.map ~f:fst

let mk ?(file = "") ?(sourceRoot = "") ?(sourcesContent = fun _ -> None)
    (mappings : mapping list) : t =
  let sources = List.map ~f:(fun x -> x.source) mappings |> dedup_freq in
  let names = List.filter_map ~f:(fun x -> x.name) mappings |> dedup_freq in
  let f { gen; src; source; name } =
    let file_offset, _ =
      List.findi_exn ~f:(fun _ -> String.equal source) sources
    in
    let name =
      Option.map
        ~f:(fun name ->
          List.findi_exn ~f:(fun _ -> String.equal name) names |> fst)
        name
    in
    let meta = { file_offset; name } in
    Chunk.{ gen; src; meta }
  in
  let mappings = List.map mappings ~f |> List.sort ~compare:Chunk.compare in
  let mappings = Mappings.encode mappings in
  let sourcesContent = List.map ~f:sourcesContent sources in
  { mappings; sourceRoot; sourcesContent; sources; names; version = 3; file }

let to_json = [%yojson_of: t] >> Yojson.Safe.pretty_to_string


================================================
FILE: engine/utils/sourcemaps/source_maps.mli
================================================
type range = { start : Location.t; end_ : Location.t option }

module Location : sig
  type t = { line : int; col : int } [@@deriving eq]
end

type mapping = {
  gen : range;
  src : range;
  source : string;
  name : string option;
}
(** A source file to generated file mapping *)

type t = {
  mappings : string;
  sourceRoot : string;
  sources : string list;
  sourcesContent : string option list;
  names : string list;
  version : int;
  file : string;
}
[@@deriving yojson]

val mk :
  ?file:string ->
  ?sourceRoot:string ->
  ?sourcesContent:(string -> string option) ->
  mapping list ->
  t

val to_json : t -> string


================================================
FILE: engine/utils/sourcemaps/vql.ml
================================================
open Prelude

let rec encode_one ?(first = true) (n : int) : int list =
  let n = if first then (Int.abs n lsl 1) + if n < 0 then 1 else 0 else n in
  let lhs, rhs = (n lsr 5, n land 0b11111) in
  let last = Int.equal lhs 0 in
  let output = (if last then 0b000000 else 0b100000) lor rhs in
  output :: (if last then [] else encode_one ~first:false lhs)

let encode : int list -> int list = List.concat_map ~f:encode_one

let encode_base64 : int list -> string =
  encode >> List.map ~f:Base64.encode >> String.of_char_list

let rec decode_one' (first : bool) (l : int list) : int * int list =
  match l with
  | [] -> (0, [])
  | hd :: tl ->
      assert (hd < 64);
      let c = Int.shift_right hd 5 |> Int.bit_and 0b1 in
      let last = Int.equal c 0 in
      if first then
        let sign = match Int.bit_and hd 0b1 with 1 -> -1 | _ -> 1 in
        let hd = Int.shift_right hd 1 |> Int.bit_and 0b1111 in
        if last then (sign * hd, tl)
        else
          let next, tl = decode_one' false tl in
          let value = hd + Int.shift_left next 4 in
          (sign * value, tl)
      else
        let hd = Int.bit_and hd 0b11111 in
        if last then (hd, tl)
        else
          let next, tl = decode_one' false tl in
          (hd + Int.shift_left next 5, tl)

let rec decode (l : int list) : int list =
  match decode_one' true l with n, [] -> [ n ] | n, tl -> n :: decode tl

let decode_base64 : string -> int list =
  String.to_list >> List.map ~f:Base64.decode >> decode

let%test _ =
  let tests =
    [ [ 132; 6; 2323; 64; 32; 63; 31; 65; 33 ]; [ 133123232 ]; [ 0; 0; 0 ] ]
  in
  let tests = tests @ List.map ~f:(List.map ~f:(fun x -> -x)) tests in
  List.for_all ~f:(fun x -> [%eq: int list] x (encode x |> decode)) tests


================================================
FILE: engine/utils/universe-hash.sh
================================================
#!/usr/bin/env bash

# this script computes the hash of [hax-export-json-schemas], so that
# whenver this binary change, dune retriggers a generation of
# `types.ml` (see `../lib/dune`).

function fallback() {
    echo "${RANDOM}_$(date +%s)"
}

function hash() {
    if command -v sha256sum &> /dev/null; then
        sha256sum < "$1"
    elif command -v md5sum &> /dev/null; then
        md5sum < "$1"
    elif command -v openssl &> /dev/null; then
        openssl sha256 < "$1"
    else
        fallback
    fi
}

function error() {
    DIAG="looks like it's **NOT** the case!"
    if [[ ":$PATH:" == *":$HOME/.cargo/bin"{,/}":"* ]]; then
        DIAG="this seems to be the case"
    fi
    echo "Error: could not find [$1] in PATH." >&2
    echo "Please make sure that:" >&2
    echo '  - you ran Hax''s `setup.sh` script;' >&2
    echo "  - you have `~/.cargo/bin` in your PATH ($DIAG)." >&2
    exit 1
}

HAX_JSON_SCHEMA_EXPORTER_BINARY=${HAX_JSON_SCHEMA_EXPORTER_BINARY:-hax-export-json-schemas}
HAX_ENGINE_NAMES_EXTRACT_BINARY=${HAX_ENGINE_NAMES_EXTRACT_BINARY:-hax-engine-names-extract}

for binary in "$HAX_JSON_SCHEMA_EXPORTER_BINARY" "$HAX_ENGINE_NAMES_EXTRACT_BINARY"; do
    if BIN=$(command -v "$binary"); then
        hash "$BIN"
    else
        error "$binary"
    fi
done



================================================
FILE: examples/.envrc
================================================
use flake .#examples


================================================
FILE: examples/.gitignore
================================================
/*/target
/*/proofs/lean/extraction
/*/proofs/proverif/extraction
/*/proofs/lean/.lake


================================================
FILE: examples/Cargo.toml
================================================
[workspace]
members = [
    "chacha20",
    "lean_chacha20",
    "lean_barrett",
    "lean_adc",
    "lean_tutorial",
    "limited-order-book",
    "sha256",
    "barrett",
    "kyber_compress",
    "proverif-psk",
    "coq-example",
    "coverage",
]
resolver = "2"

[workspace.dependencies]
hax-lib = { path = "../hax-lib" }
hax-bounded-integers = { path = "../hax-bounded-integers" }


================================================
FILE: examples/Makefile
================================================
.PHONY: default
default:
	make -C limited-order-book
	make -C chacha20
	make -C sha256
	make -C barrett
	make -C kyber_compress
	make -C proverif-psk
	make -C lean_chacha20
	make -C lean_barrett

clean:
	make -C limited-order-book clean
	make -C chacha20           clean
	make -C sha256             clean
	make -C barrett            clean
	make -C kyber_compress     clean
	make -C proverif-psk       clean
	make -C lean_chacha20      clean
	make -C lean_barrett       clean


================================================
FILE: examples/README.md
================================================
# Examples

| Name               | Status of the F\* extraction |
| ------------------ | ---------------------------- |
| chacha20           | Typechecks                   |
| limited-order-book | Typechecks                   |
| sha256             | Lax-typechecks               |
| barrett            | Typechecks                   |
| kyber_compress     | Typechecks                   |

## How to generate the F\* code and typecheck it for the examples

Requirements First, make sure to have hax installed in PATH. Then: * With Nix, `nix develop .#examples` setups a shell automatically for you. * Without Nix: 1. install F* `v2025.10.06` manually (see https://github.com/FStarLang/FStar/blob/master/INSTALL.md); 1. make sure to have `fstar.exe` in PATH; 2. or set the `FSTAR_HOME` environment variable. 2. clone [Hacl*](https://github.com/hacl-star/hacl-star) somewhere; 3. `export HACL_HOME=THE_DIRECTORY_WHERE_YOU_HAVE_HACL_STAR`.
To generate F\* code for all the example and then typecheck everything, just run `make` in this directory. Running `make` will run `make` in each example directory, which in turn will generate F\* modules using hax and then typecheck those modules using F\*. Note the generated modules live in the `/proofs/fstar/extraction` folders. ## Coq For those examples, we generated Coq modules without typechecking them. The `/proofs/coq/extraction` folders contain the generated Coq modules. ## Lean Three examples are fine-tuned to showcase the Lean backend: `lean_barrett`, `lean_chacha20`, and `lean_adc`. For all of them, the lean extraction can be obtained by running `cargo hax into lean`. ### Barrett The *Barrett reduction* allows to compute remainders without using divisions. It showcases arithmetic operations, conversions between integer types (namely `i32` and `i64`). The Lean backend provides *panicking* arithmetic operations `+?`, `-?`, etc, that panic on overflows. For the Lean extracted code, we prove panic freedom with regards to those arithmetic operations, and then we prove that the result is indeed the modulus (as long as the absolute value of the input is lower than the bound `BARRETT_R`). The proof is made via bit-blasting (using Lean's `bv_decide`). To limit the computation time, the bound `BARRETT_R` was lowered compared to the normal example in the `barrett` folder. The proofs are backported in the rust code (in `lean_barrett/src/lib.rs`): doing `cargo hax into lean` extracts a valid lean file that contains the proof. The proof can be run by doing (requires `lake`): ```sh cd lean_barrett/ make ``` ### ADC (Addition with Carry) The *ADC* (addition with carry) example verifies a 32-bit limb addition with carry, a fundamental building block in multi-precision (bignum) arithmetic. It uses `#[hax_lib::lean::after(...)]` to embed a Lean 4 correctness theorem directly after the extracted function definition. The precondition and postcondition are expressed as pure Lean propositions in a Hoare triple, and the proof is fully automated via `hax_mvcgen` and Lean's `bv_decide` bit-blasting procedure. The verified property states that the 64-bit sum `a + b + carry_in` is correctly split into a 32-bit sum and a 1-bit carry output. The proof can be run by doing (requires `lake`): ```sh cd lean_adc/ make ``` ### Chacha20 The Chacha20 example extracts to Lean, but requires a manual edit to be wellformed. It showcases array, vector and slices accesses, as well as loops (with loop invariants). For the Lean extracted code, we prove panic freedom, which involves arithmetic on size of arrays. This edit and the proofs of panic freedom can be found in `lean_chacha20/proofs/lean/extraction/lean_chacha20_manual_edit.lean`. The extraction (in `lean_chacha20.lean`) and rerun of the proofs (in `lean_chacha20_manual_edit.lean`) can be done by doing (requires `lake`): ```sh cd lean_chacha20/ make ``` ================================================ FILE: examples/barrett/Cargo.toml ================================================ [package] name = "barrett" version = "0.1.0" edition = "2021" # See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html [dependencies] hax-lib.workspace = true ================================================ FILE: examples/barrett/Makefile ================================================ .PHONY: default fstar rust clean default: fstar rust make -C proofs/fstar/extraction fstar: cargo hax into fstar rust: cargo hax into rust cd proofs/rust/extraction cargo build clean: rm -f proofs/fstar/extraction/.depend rm -f proofs/fstar/extraction/*.fst rm -f proofs/rust/extraction/*.rs ================================================ FILE: examples/barrett/proofs/fstar/extraction/Makefile ================================================ # This is a generically useful Makefile for F* that is self-contained # # It is tempting to factor this out into multiple Makefiles but that # makes it less portable, so resist temptation, or move to a more # sophisticated build system. # # We expect: # 1. `fstar.exe` to be in PATH (alternatively, you can also set # $FSTAR_HOME to be set to your F* repo/install directory) # # 2. `cargo`, `rustup`, `hax` and `jq` to be installed and in PATH. # # 3. the extracted Cargo crate to have "hax-lib" as a dependency: # `hax-lib = { version = "0.1.0-pre.1", git = "https://github.com/hacspec/hax"}` # # Optionally, you can set `HACL_HOME`. # # ROOTS contains all the top-level F* files you wish to verify # The default target `verify` verified ROOTS and its dependencies # To lax-check instead, set `OTHERFLAGS="--lax"` on the command-line # # To make F* emacs mode use the settings in this file, you need to # add the following lines to your .emacs # # (setq-default fstar-executable "/bin/fstar.exe") # (setq-default fstar-smt-executable "/bin/z3") # # (defun my-fstar-compute-prover-args-using-make () # "Construct arguments to pass to F* by calling make." # (with-demoted-errors "Error when constructing arg string: %S" # (let* ((fname (file-name-nondirectory buffer-file-name)) # (target (concat fname "-in")) # (argstr (car (process-lines "make" "--quiet" target)))) # (split-string argstr)))) # (setq fstar-subp-prover-args #'my-fstar-compute-prover-args-using-make) # HACL_HOME ?= $(HOME)/.hax/hacl_home FSTAR_BIN ?= $(shell command -v fstar.exe 1>&2 2> /dev/null && echo "fstar.exe" || echo "$(FSTAR_HOME)/bin/fstar.exe") CACHE_DIR ?= .cache HINT_DIR ?= .hints SHELL ?= /usr/bin/env bash EXECUTABLES = cargo cargo-hax jq K := $(foreach bin,$(EXECUTABLES),\ $(if $(shell command -v $(bin) 2> /dev/null),,$(error "No $(bin) in PATH"))) .PHONY: all verify clean all: rm -f .depend && $(MAKE) .depend $(MAKE) verify HAX_CLI = "cargo hax into fstar --z3rlimit 100" # If $HACL_HOME doesn't exist, clone it ${HACL_HOME}: mkdir -p "${HACL_HOME}" git clone --depth 1 https://github.com/hacl-star/hacl-star.git "${HACL_HOME}" # If no any F* file is detected, we run hax ifeq "$(wildcard *.fst *fsti)" "" $(shell $(SHELL) -c $(HAX_CLI)) endif # By default, we process all the files in the current directory ROOTS = $(wildcard *.fst *fsti) # Regenerate F* files via hax when Rust sources change $(ROOTS): $(shell find ../../../src -type f -name '*.rs') $(shell $(SHELL) -c $(HAX_CLI)) # The following is a bash script that discovers F* libraries define FINDLIBS # Prints a path if and only if it exists. Takes one argument: the # path. function print_if_exists() { if [ -d "$$1" ]; then echo "$$1" fi } # Asks Cargo all the dependencies for the current crate or workspace, # and extract all "root" directories for each. Takes zero argument. function dependencies() { cargo metadata --format-version 1 | jq -r '.packages | .[] | .manifest_path | split("/") | .[:-1] | join("/")' } # Find hax libraries *around* a given path. Takes one argument: the # path. function find_hax_libraries_at_path() { path="$$1" # if there is a `proofs/fstar/extraction` subfolder, then that's a # F* library print_if_exists "$$path/proofs/fstar/extraction" # Maybe the `proof-libs` folder of hax is around? MAYBE_PROOF_LIBS=$$(realpath -q "$$path/../proof-libs/fstar") if [ $$? -eq 0 ]; then print_if_exists "$$MAYBE_PROOF_LIBS/core" print_if_exists "$$MAYBE_PROOF_LIBS/rust_primitives" fi } { while IFS= read path; do find_hax_libraries_at_path "$$path" done < <(dependencies) } | sort -u endef export FINDLIBS FSTAR_INCLUDE_DIRS = $(HACL_HOME)/lib $(shell bash -c "$$FINDLIBS") FSTAR_FLAGS = --cmi \ --warn_error -331 \ --cache_checked_modules --cache_dir $(CACHE_DIR) \ --already_cached "+Prims+FStar+LowStar+C+Spec.Loops+TestLib" \ $(addprefix --include ,$(FSTAR_INCLUDE_DIRS)) FSTAR = $(FSTAR_BIN) $(FSTAR_FLAGS) .depend: $(HINT_DIR) $(CACHE_DIR) $(ROOTS) $(info $(ROOTS)) $(FSTAR) --cmi --dep full $(ROOTS) --extract '* -Prims -LowStar -FStar' > $@ include .depend $(HINT_DIR): mkdir -p $@ $(CACHE_DIR): mkdir -p $@ $(CACHE_DIR)/%.checked: | .depend $(HINT_DIR) $(CACHE_DIR) $(FSTAR) $(OTHERFLAGS) $< $(ENABLE_HINTS) --hint_file $(HINT_DIR)/$(notdir $*).hints verify: $(addsuffix .checked, $(addprefix $(CACHE_DIR)/,$(ROOTS))) # Targets for interactive mode %.fst-in: $(info $(FSTAR_FLAGS) \ $(ENABLE_HINTS) --hint_file $(HINT_DIR)/$(basename $@).fst.hints) %.fsti-in: $(info $(FSTAR_FLAGS) \ $(ENABLE_HINTS) --hint_file $(HINT_DIR)/$(basename $@).fsti.hints) # Clean targets clean: rm -rf $(CACHE_DIR)/* rm *.fst ================================================ FILE: examples/barrett/proofs/lean/lakefile.toml ================================================ name = "barrett" version = "0.1.0" defaultTargets = ["barrett"] [[lean_lib]] name = "barrett" roots = ["extraction.Barrett"] [[require]] name = "Hax" path = "../../../../proof-libs/lean" ================================================ FILE: examples/barrett/proofs/lean/lean-toolchain ================================================ leanprover/lean4:v4.29.0-rc1 ================================================ FILE: examples/barrett/proofs/rust/extraction/Cargo.toml ================================================ [package] name = "extraction" version = "0.1.0" edition = "2024" [lib] path = "barrett.rs" [dependencies] [workspace] ================================================ FILE: examples/barrett/src/lib.rs ================================================ use hax_lib as hax; /// Values having this type hold a representative 'x' of the Kyber field. /// We use 'fe' as a shorthand for this type. pub(crate) type FieldElement = i32; const BARRETT_SHIFT: i64 = 26; const BARRETT_R: i64 = 0x4000000; // 2^26 /// This is calculated as ⌊(BARRETT_R / FIELD_MODULUS) + 1/2⌋ const BARRETT_MULTIPLIER: i64 = 20159; pub(crate) const FIELD_MODULUS: i32 = 3329; /// Signed Barrett Reduction /// /// Given an input `value`, `barrett_reduce` outputs a representative `result` /// such that: /// /// - result ≡ value (mod FIELD_MODULUS) /// - the absolute value of `result` is bound as follows: /// /// `|result| ≤ FIELD_MODULUS / 2 · (|value|/BARRETT_R + 1) /// /// In particular, if `|value| < BARRETT_R`, then `|result| < FIELD_MODULUS`. #[hax_lib::fstar::options("--z3rlimit 100")] #[hax::requires((i64::from(value) >= -BARRETT_R && i64::from(value) <= BARRETT_R))] #[hax::ensures(|result| result > -FIELD_MODULUS && result < FIELD_MODULUS && result % FIELD_MODULUS == value % FIELD_MODULUS)] pub fn barrett_reduce(value: FieldElement) -> FieldElement { let t = i64::from(value) * BARRETT_MULTIPLIER; // assert!(9223372036854775807 - (BARRETT_R >> 1) > t); let t = t + (BARRETT_R >> 1); let quotient = t >> BARRETT_SHIFT; // assert!(quotient <= 2147483647_i64 || quotient >= -2147483648_i64); let quotient = quotient as i32; // assert!(((quotient as i64) * (FIELD_MODULUS as i64)) < 9223372036854775807); let sub = quotient * FIELD_MODULUS; hax::fstar!(r"Math.Lemmas.cancel_mul_mod (v $quotient) 3329"); value - sub } #[cfg(test)] mod tests { use super::*; #[test] fn it_works() { fn test(val: FieldElement, expected: FieldElement) { let reduced = barrett_reduce(val); assert_eq!(reduced, expected); } test(FIELD_MODULUS + 1, 1); test(FIELD_MODULUS, 0); test(FIELD_MODULUS - 1, -1); test(FIELD_MODULUS + (FIELD_MODULUS - 1), -1); test(FIELD_MODULUS + (FIELD_MODULUS + 1), 1); test(1234, 1234); test(9876, -111); test(4327, 4327 % FIELD_MODULUS) } } ================================================ FILE: examples/chacha20/Cargo.toml ================================================ [package] name = "chacha20" version = "0.1.0" authors = ["Franziskus Kiefer "] edition = "2021" [dependencies] hax-lib.workspace = true hax-bounded-integers.workspace = true ================================================ FILE: examples/chacha20/Makefile ================================================ .PHONY: default clean default: make -C proofs/fstar/extraction clean: rm -f proofs/fstar/extraction/.depend rm -f proofs/fstar/extraction/*.fst ================================================ FILE: examples/chacha20/proofs/coq/extraction/Chacha20.Hacspec_helper.v ================================================ (* File automatically generated by Hacspec *) From Hacspec Require Import Hacspec_Lib MachineIntegers. From Coq Require Import ZArith. Import List.ListNotations. Open Scope Z_scope. Open Scope bool_scope. Require Import Super. (* as State *) (*Not implemented yet? todo(item)*) Definition to_le_u32s_3_ (bytes : seq int8) : nseq int32 TODO: Int.to_string length := let out := (repeat (@repr WORDSIZE32 0) (@repr WORDSIZE32 3)) : nseq int32 TODO: Int.to_string length in let out := (f_fold (f_into_iter (Build_Range (@repr WORDSIZE32 0)(@repr WORDSIZE32 3))) out (fun out i => update_at out i (impl__u32__from_le_bytes (impl__unwrap (f_try_into (bytes.[(Build_Range ((@repr WORDSIZE32 4).*i)(((@repr WORDSIZE32 4).*i).+(@repr WORDSIZE32 4)))])))))) : nseq int32 TODO: Int.to_string length in out. Definition to_le_u32s_8_ (bytes : seq int8) : nseq int32 TODO: Int.to_string length := let out := (repeat (@repr WORDSIZE32 0) (@repr WORDSIZE32 8)) : nseq int32 TODO: Int.to_string length in let out := (f_fold (f_into_iter (Build_Range (@repr WORDSIZE32 0)(@repr WORDSIZE32 8))) out (fun out i => update_at out i (impl__u32__from_le_bytes (impl__unwrap (f_try_into (bytes.[(Build_Range ((@repr WORDSIZE32 4).*i)(((@repr WORDSIZE32 4).*i).+(@repr WORDSIZE32 4)))])))))) : nseq int32 TODO: Int.to_string length in out. Definition to_le_u32s_16_ (bytes : seq int8) : nseq int32 TODO: Int.to_string length := let out := (repeat (@repr WORDSIZE32 0) (@repr WORDSIZE32 16)) : nseq int32 TODO: Int.to_string length in let out := (f_fold (f_into_iter (Build_Range (@repr WORDSIZE32 0)(@repr WORDSIZE32 16))) out (fun out i => update_at out i (impl__u32__from_le_bytes (impl__unwrap (f_try_into (bytes.[(Build_Range ((@repr WORDSIZE32 4).*i)(((@repr WORDSIZE32 4).*i).+(@repr WORDSIZE32 4)))])))))) : nseq int32 TODO: Int.to_string length in out. Definition u32s_to_le_bytes (state : nseq int32 TODO: Int.to_string length) : nseq int8 TODO: Int.to_string length := let out := (repeat (@repr WORDSIZE8 0) (@repr WORDSIZE32 64)) : nseq int8 TODO: Int.to_string length in let out := (f_fold (f_into_iter (Build_Range (@repr WORDSIZE32 0)(impl__len (unsize state)))) out (fun out i => let tmp := (impl__u32__to_le_bytes (state.[i])) : nseq int8 TODO: Int.to_string length in f_fold (f_into_iter (Build_Range (@repr WORDSIZE32 0)(@repr WORDSIZE32 4))) out (fun out j => update_at out ((i.*(@repr WORDSIZE32 4)).+j) (tmp.[j])))) : nseq int8 TODO: Int.to_string length in out. Definition xor_state (state : nseq int32 TODO: Int.to_string length) (other : nseq int32 TODO: Int.to_string length) : nseq int32 TODO: Int.to_string length := let state := (f_fold (f_into_iter (Build_Range (@repr WORDSIZE32 0)(@repr WORDSIZE32 16))) state (fun state i => update_at state i ((state.[i]).^(other.[i])))) : nseq int32 TODO: Int.to_string length in state. Definition add_state (state : nseq int32 TODO: Int.to_string length) (other : nseq int32 TODO: Int.to_string length) : nseq int32 TODO: Int.to_string length := let state := (f_fold (f_into_iter (Build_Range (@repr WORDSIZE32 0)(@repr WORDSIZE32 16))) state (fun state i => update_at state i (impl__u32__wrapping_add (state.[i]) (other.[i])))) : nseq int32 TODO: Int.to_string length in state. Definition update_array (array : nseq int8 TODO: Int.to_string length) (val : seq int8) : nseq int8 TODO: Int.to_string length := let _ := (if not ((@repr WORDSIZE32 64)>=.?(impl__len val)) then never_to_any (panic assertion failed: 64 >= val.len()) else tt) : unit in let array := (f_fold (f_into_iter (Build_Range (@repr WORDSIZE32 0)(impl__len val))) array (fun array i => update_at array i (val.[i]))) : nseq int8 TODO: Int.to_string length in array. ================================================ FILE: examples/chacha20/proofs/coq/extraction/Chacha20.v ================================================ (* File automatically generated by Hacspec *) From Hacspec Require Import Hacspec_Lib MachineIntegers. From Coq Require Import ZArith. Import List.ListNotations. Open Scope Z_scope. Open Scope bool_scope. (*Not implemented yet? todo(item)*) (*Not implemented yet? todo(item)*) Require Import Hacspec_helper. Require Import Hax_lib_macros. (* as hax *) Notation t_State_t := (nseq int32 TODO: Int.to_string length). Notation t_Block_t := (nseq int8 TODO: Int.to_string length). Notation t_ChaChaIV_t := (nseq int8 TODO: Int.to_string length). Notation t_ChaChaKey_t := (nseq int8 TODO: Int.to_string length). Definition chacha20_line (a : uint_size) (b : uint_size) (d : uint_size) (s : int32) (m : nseq int32 TODO: Int.to_string length) : nseq int32 TODO: Int.to_string length := let state := (m) : nseq int32 TODO: Int.to_string length in let state := (update_at state a (impl__u32__wrapping_add (state.[a]) (state.[b]))) : nseq int32 TODO: Int.to_string length in let state := (update_at state d ((state.[d]).^(state.[a]))) : nseq int32 TODO: Int.to_string length in let state := (update_at state d (impl__u32__rotate_left (state.[d]) s)) : nseq int32 TODO: Int.to_string length in state. Definition chacha20_quarter_round (a : uint_size) (b : uint_size) (c : uint_size) (d : uint_size) (state : nseq int32 TODO: Int.to_string length) : nseq int32 TODO: Int.to_string length := let state := (chacha20_line a b d (@repr WORDSIZE32 16) state) : nseq int32 TODO: Int.to_string length in let state := (chacha20_line c d b (@repr WORDSIZE32 12) state) : nseq int32 TODO: Int.to_string length in let state := (chacha20_line a b d (@repr WORDSIZE32 8) state) : nseq int32 TODO: Int.to_string length in chacha20_line c d b (@repr WORDSIZE32 7) state. Definition chacha20_double_round (state : nseq int32 TODO: Int.to_string length) : nseq int32 TODO: Int.to_string length := let state := (chacha20_quarter_round (@repr WORDSIZE32 0) (@repr WORDSIZE32 4) (@repr WORDSIZE32 8) (@repr WORDSIZE32 12) state) : nseq int32 TODO: Int.to_string length in let state := (chacha20_quarter_round (@repr WORDSIZE32 1) (@repr WORDSIZE32 5) (@repr WORDSIZE32 9) (@repr WORDSIZE32 13) state) : nseq int32 TODO: Int.to_string length in let state := (chacha20_quarter_round (@repr WORDSIZE32 2) (@repr WORDSIZE32 6) (@repr WORDSIZE32 10) (@repr WORDSIZE32 14) state) : nseq int32 TODO: Int.to_string length in let state := (chacha20_quarter_round (@repr WORDSIZE32 3) (@repr WORDSIZE32 7) (@repr WORDSIZE32 11) (@repr WORDSIZE32 15) state) : nseq int32 TODO: Int.to_string length in let state := (chacha20_quarter_round (@repr WORDSIZE32 0) (@repr WORDSIZE32 5) (@repr WORDSIZE32 10) (@repr WORDSIZE32 15) state) : nseq int32 TODO: Int.to_string length in let state := (chacha20_quarter_round (@repr WORDSIZE32 1) (@repr WORDSIZE32 6) (@repr WORDSIZE32 11) (@repr WORDSIZE32 12) state) : nseq int32 TODO: Int.to_string length in let state := (chacha20_quarter_round (@repr WORDSIZE32 2) (@repr WORDSIZE32 7) (@repr WORDSIZE32 8) (@repr WORDSIZE32 13) state) : nseq int32 TODO: Int.to_string length in chacha20_quarter_round (@repr WORDSIZE32 3) (@repr WORDSIZE32 4) (@repr WORDSIZE32 9) (@repr WORDSIZE32 14) state. Definition chacha20_rounds (state : nseq int32 TODO: Int.to_string length) : nseq int32 TODO: Int.to_string length := let st := (state) : nseq int32 TODO: Int.to_string length in let st := (f_fold (f_into_iter (Build_Range (@repr WORDSIZE32 0)(@repr WORDSIZE32 10))) st (fun st _i => chacha20_double_round st)) : nseq int32 TODO: Int.to_string length in st. Definition chacha20_core (ctr : int32) (st0 : nseq int32 TODO: Int.to_string length) : nseq int32 TODO: Int.to_string length := let state := (st0) : nseq int32 TODO: Int.to_string length in let state := (update_at state (@repr WORDSIZE32 12) (impl__u32__wrapping_add (state.[(@repr WORDSIZE32 12)]) ctr)) : nseq int32 TODO: Int.to_string length in let k := (chacha20_rounds state) : nseq int32 TODO: Int.to_string length in add_state state k. Definition chacha20_init (key : nseq int8 TODO: Int.to_string length) (iv : nseq int8 TODO: Int.to_string length) (ctr : int32) : nseq int32 TODO: Int.to_string length := let key_u32 := (to_le_u32s_8_ (unsize key)) : nseq int32 TODO: Int.to_string length in let iv_u32 := (to_le_u32s_3_ (unsize iv)) : nseq int32 TODO: Int.to_string length in array_from_list [(@repr WORDSIZE32 1634760805); (@repr WORDSIZE32 857760878); (@repr WORDSIZE32 2036477234); (@repr WORDSIZE32 1797285236); key_u32.[(@repr WORDSIZE32 0)]; key_u32.[(@repr WORDSIZE32 1)]; key_u32.[(@repr WORDSIZE32 2)]; key_u32.[(@repr WORDSIZE32 3)]; key_u32.[(@repr WORDSIZE32 4)]; key_u32.[(@repr WORDSIZE32 5)]; key_u32.[(@repr WORDSIZE32 6)]; key_u32.[(@repr WORDSIZE32 7)]; ctr; iv_u32.[(@repr WORDSIZE32 0)]; iv_u32.[(@repr WORDSIZE32 1)]; iv_u32.[(@repr WORDSIZE32 2)]]. Definition chacha20_key_block (state : nseq int32 TODO: Int.to_string length) : nseq int8 TODO: Int.to_string length := let state := (chacha20_core (@repr WORDSIZE32 0) state) : nseq int32 TODO: Int.to_string length in u32s_to_le_bytes state. Definition chacha20_key_block0 (key : nseq int8 TODO: Int.to_string length) (iv : nseq int8 TODO: Int.to_string length) : nseq int8 TODO: Int.to_string length := let state := (chacha20_init key iv (@repr WORDSIZE32 0)) : nseq int32 TODO: Int.to_string length in chacha20_key_block state. Definition chacha20_encrypt_block (st0 : nseq int32 TODO: Int.to_string length) (ctr : int32) (plain : nseq int8 TODO: Int.to_string length) : nseq int8 TODO: Int.to_string length := let st := (chacha20_core ctr st0) : nseq int32 TODO: Int.to_string length in let pl := (to_le_u32s_16_ (unsize plain)) : nseq int32 TODO: Int.to_string length in let encrypted := (xor_state st pl) : nseq int32 TODO: Int.to_string length in u32s_to_le_bytes encrypted. Definition chacha20_encrypt_last (st0 : nseq int32 TODO: Int.to_string length) (ctr : int32) (plain : seq int8) : t_Vec_t (int8) (t_Global_t) := let b := (repeat (@repr WORDSIZE8 0) (@repr WORDSIZE32 64)) : nseq int8 TODO: Int.to_string length in let b := (update_array b plain) : nseq int8 TODO: Int.to_string length in let b := (chacha20_encrypt_block st0 ctr b) : nseq int8 TODO: Int.to_string length in impl__to_vec (b.[(Build_Range (@repr WORDSIZE32 0)(impl__len plain))]). Definition chacha20_update (st0 : nseq int32 TODO: Int.to_string length) (m : seq int8) : t_Vec_t (int8) (t_Global_t) := let blocks_out := (impl__new) : t_Vec_t (int8) (t_Global_t) in let num_blocks := ((impl__len m)./(@repr WORDSIZE32 64)) : uint_size in let remainder_len := ((impl__len m).%(@repr WORDSIZE32 64)) : uint_size in let blocks_out := (f_fold (f_into_iter (Build_Range (@repr WORDSIZE32 0)num_blocks)) blocks_out (fun blocks_out i => let i := (i) : uint_size in let b := (chacha20_encrypt_block st0 (cast i) (impl__unwrap (f_try_into (m.[(Build_Range ((@repr WORDSIZE32 64).*i)(((@repr WORDSIZE32 64).*i).+(@repr WORDSIZE32 64)))])))) : nseq int8 TODO: Int.to_string length in let blocks_out := (impl_2__extend_from_slice blocks_out (unsize b)) : t_Vec_t (int8) (t_Global_t) in blocks_out)) : t_Vec_t (int8) (t_Global_t) in let blocks_out := (if remainder_len<>(@repr WORDSIZE32 0) then let b := (chacha20_encrypt_last st0 (cast num_blocks) (m.[(Build_Range ((@repr WORDSIZE32 64).*num_blocks)(impl__len m))])) : t_Vec_t (int8) (t_Global_t) in let blocks_out := (impl_2__extend_from_slice blocks_out (f_deref b)) : t_Vec_t (int8) (t_Global_t) in blocks_out else blocks_out) : t_Vec_t (int8) (t_Global_t) in blocks_out. Definition chacha20 (m : seq int8) (key : nseq int8 TODO: Int.to_string length) (iv : nseq int8 TODO: Int.to_string length) (ctr : int32) : t_Vec_t (int8) (t_Global_t) := let state := (chacha20_init key iv ctr) : nseq int32 TODO: Int.to_string length in chacha20_update state m. ================================================ FILE: examples/chacha20/proofs/fstar/extraction/Chacha20.Hacspec_helper.fst ================================================ module Chacha20.Hacspec_helper #set-options "--fuel 0 --ifuel 1 --z3rlimit 40" open FStar.Mul open Core_models let to_le_u32s_3_ (bytes: t_Slice u8) : t_Array u32 (mk_usize 3) = let out:t_Array u32 (mk_usize 3) = Rust_primitives.Hax.repeat (mk_u32 0) (mk_usize 3) in let out:t_Array u32 (mk_usize 3) = Rust_primitives.Hax.Folds.fold_range (mk_usize 0) (mk_usize 3) (fun out temp_1_ -> let out:t_Array u32 (mk_usize 3) = out in let _:usize = temp_1_ in true) out (fun out i -> let out:t_Array u32 (mk_usize 3) = out in let i:usize = i in Rust_primitives.Hax.Monomorphized_update_at.update_at_usize out i (Core_models.Num.impl_u32__from_le_bytes (Core_models.Result.impl__unwrap #(t_Array u8 (mk_usize 4)) #Core_models.Array.t_TryFromSliceError (Core_models.Convert.f_try_into #(t_Slice u8) #(t_Array u8 (mk_usize 4)) #FStar.Tactics.Typeclasses.solve (bytes.[ { Core_models.Ops.Range.f_start = mk_usize 4 *! i <: usize; Core_models.Ops.Range.f_end = (mk_usize 4 *! i <: usize) +! mk_usize 4 <: usize } <: Core_models.Ops.Range.t_Range usize ] <: t_Slice u8) <: Core_models.Result.t_Result (t_Array u8 (mk_usize 4)) Core_models.Array.t_TryFromSliceError) <: t_Array u8 (mk_usize 4)) <: u32) <: t_Array u32 (mk_usize 3)) in out let to_le_u32s_8_ (bytes: t_Slice u8) : t_Array u32 (mk_usize 8) = let out:t_Array u32 (mk_usize 8) = Rust_primitives.Hax.repeat (mk_u32 0) (mk_usize 8) in let out:t_Array u32 (mk_usize 8) = Rust_primitives.Hax.Folds.fold_range (mk_usize 0) (mk_usize 8) (fun out temp_1_ -> let out:t_Array u32 (mk_usize 8) = out in let _:usize = temp_1_ in true) out (fun out i -> let out:t_Array u32 (mk_usize 8) = out in let i:usize = i in Rust_primitives.Hax.Monomorphized_update_at.update_at_usize out i (Core_models.Num.impl_u32__from_le_bytes (Core_models.Result.impl__unwrap #(t_Array u8 (mk_usize 4)) #Core_models.Array.t_TryFromSliceError (Core_models.Convert.f_try_into #(t_Slice u8) #(t_Array u8 (mk_usize 4)) #FStar.Tactics.Typeclasses.solve (bytes.[ { Core_models.Ops.Range.f_start = mk_usize 4 *! i <: usize; Core_models.Ops.Range.f_end = (mk_usize 4 *! i <: usize) +! mk_usize 4 <: usize } <: Core_models.Ops.Range.t_Range usize ] <: t_Slice u8) <: Core_models.Result.t_Result (t_Array u8 (mk_usize 4)) Core_models.Array.t_TryFromSliceError) <: t_Array u8 (mk_usize 4)) <: u32) <: t_Array u32 (mk_usize 8)) in out let to_le_u32s_16_ (bytes: t_Slice u8) : t_Array u32 (mk_usize 16) = let out:t_Array u32 (mk_usize 16) = Rust_primitives.Hax.repeat (mk_u32 0) (mk_usize 16) in let out:t_Array u32 (mk_usize 16) = Rust_primitives.Hax.Folds.fold_range (mk_usize 0) (mk_usize 16) (fun out temp_1_ -> let out:t_Array u32 (mk_usize 16) = out in let _:usize = temp_1_ in true) out (fun out i -> let out:t_Array u32 (mk_usize 16) = out in let i:usize = i in Rust_primitives.Hax.Monomorphized_update_at.update_at_usize out i (Core_models.Num.impl_u32__from_le_bytes (Core_models.Result.impl__unwrap #(t_Array u8 (mk_usize 4)) #Core_models.Array.t_TryFromSliceError (Core_models.Convert.f_try_into #(t_Slice u8) #(t_Array u8 (mk_usize 4)) #FStar.Tactics.Typeclasses.solve (bytes.[ { Core_models.Ops.Range.f_start = mk_usize 4 *! i <: usize; Core_models.Ops.Range.f_end = (mk_usize 4 *! i <: usize) +! mk_usize 4 <: usize } <: Core_models.Ops.Range.t_Range usize ] <: t_Slice u8) <: Core_models.Result.t_Result (t_Array u8 (mk_usize 4)) Core_models.Array.t_TryFromSliceError) <: t_Array u8 (mk_usize 4)) <: u32) <: t_Array u32 (mk_usize 16)) in out let u32s_to_le_bytes (state: t_Array u32 (mk_usize 16)) : t_Array u8 (mk_usize 64) = let out:t_Array u8 (mk_usize 64) = Rust_primitives.Hax.repeat (mk_u8 0) (mk_usize 64) in let out:t_Array u8 (mk_usize 64) = Rust_primitives.Hax.Folds.fold_range (mk_usize 0) (Core_models.Slice.impl__len #u32 (state <: t_Slice u32) <: usize) (fun out temp_1_ -> let out:t_Array u8 (mk_usize 64) = out in let _:usize = temp_1_ in true) out (fun out i -> let out:t_Array u8 (mk_usize 64) = out in let i:usize = i in let tmp:t_Array u8 (mk_usize 4) = Core_models.Num.impl_u32__to_le_bytes (state.[ i ] <: u32) in Rust_primitives.Hax.Folds.fold_range (mk_usize 0) (mk_usize 4) (fun out temp_1_ -> let out:t_Array u8 (mk_usize 64) = out in let _:usize = temp_1_ in true) out (fun out j -> let out:t_Array u8 (mk_usize 64) = out in let j:usize = j in Rust_primitives.Hax.Monomorphized_update_at.update_at_usize out ((i *! mk_usize 4 <: usize) +! j <: usize) (tmp.[ j ] <: u8) <: t_Array u8 (mk_usize 64))) in out let xor_state (state other: t_Array u32 (mk_usize 16)) : t_Array u32 (mk_usize 16) = let state:t_Array u32 (mk_usize 16) = Rust_primitives.Hax.Folds.fold_range (mk_usize 0) (mk_usize 16) (fun state temp_1_ -> let state:t_Array u32 (mk_usize 16) = state in let _:usize = temp_1_ in true) state (fun state i -> let state:t_Array u32 (mk_usize 16) = state in let i:usize = i in Rust_primitives.Hax.Monomorphized_update_at.update_at_usize state i ((state.[ i ] <: u32) ^. (other.[ i ] <: u32) <: u32) <: t_Array u32 (mk_usize 16)) in state let add_state (state other: t_Array u32 (mk_usize 16)) : t_Array u32 (mk_usize 16) = let state:t_Array u32 (mk_usize 16) = Rust_primitives.Hax.Folds.fold_range (mk_usize 0) (mk_usize 16) (fun state temp_1_ -> let state:t_Array u32 (mk_usize 16) = state in let _:usize = temp_1_ in true) state (fun state i -> let state:t_Array u32 (mk_usize 16) = state in let i:usize = i in Rust_primitives.Hax.Monomorphized_update_at.update_at_usize state i (Core_models.Num.impl_u32__wrapping_add (state.[ i ] <: u32) (other.[ i ] <: u32) <: u32 ) <: t_Array u32 (mk_usize 16)) in state let update_array (array: t_Array u8 (mk_usize 64)) (v_val: t_Slice u8) : t_Array u8 (mk_usize 64) = let _:Prims.unit = Hax_lib.v_assert (mk_usize 64 >=. (Core_models.Slice.impl__len #u8 v_val <: usize) <: bool) in let array:t_Array u8 (mk_usize 64) = Rust_primitives.Hax.Folds.fold_range (mk_usize 0) (Core_models.Slice.impl__len #u8 v_val <: usize) (fun array temp_1_ -> let array:t_Array u8 (mk_usize 64) = array in let _:usize = temp_1_ in true) array (fun array i -> let array:t_Array u8 (mk_usize 64) = array in let i:usize = i in Rust_primitives.Hax.Monomorphized_update_at.update_at_usize array i (v_val.[ i ] <: u8) <: t_Array u8 (mk_usize 64)) in array ================================================ FILE: examples/chacha20/proofs/fstar/extraction/Chacha20.fst ================================================ module Chacha20 #set-options "--fuel 0 --ifuel 1 --z3rlimit 40" open FStar.Mul open Core_models let _ = (* This module has implicit dependencies, here we make them explicit. *) (* The implicit dependencies arise from typeclasses instances. *) let open Hax_bounded_integers in () let chacha20_line (a b d: Hax_bounded_integers.t_BoundedUsize (mk_usize 0) (mk_usize 15)) (s: u32) (m: t_Array u32 (mk_usize 16)) : t_Array u32 (mk_usize 16) = let state:t_Array u32 (mk_usize 16) = m in let state:t_Array u32 (mk_usize 16) = Rust_primitives.Hax.update_at state a (Core_models.Num.impl_u32__wrapping_add (state.[ a ] <: u32) (state.[ b ] <: u32) <: u32) in let state:t_Array u32 (mk_usize 16) = Rust_primitives.Hax.update_at state d ((state.[ d ] <: u32) ^. (state.[ a ] <: u32) <: u32) in let state:t_Array u32 (mk_usize 16) = Rust_primitives.Hax.update_at state d (Core_models.Num.impl_u32__rotate_left (state.[ d ] <: u32) s <: u32) in state let chacha20_quarter_round (a b c d: Hax_bounded_integers.t_BoundedUsize (mk_usize 0) (mk_usize 15)) (state: t_Array u32 (mk_usize 16)) : t_Array u32 (mk_usize 16) = let state:t_Array u32 (mk_usize 16) = chacha20_line a b d (mk_u32 16) state in let state:t_Array u32 (mk_usize 16) = chacha20_line c d b (mk_u32 12) state in let state:t_Array u32 (mk_usize 16) = chacha20_line a b d (mk_u32 8) state in chacha20_line c d b (mk_u32 7) state let chacha20_double_round (state: t_Array u32 (mk_usize 16)) : t_Array u32 (mk_usize 16) = let state:t_Array u32 (mk_usize 16) = chacha20_quarter_round (mk_usize 0 <: Hax_bounded_integers.t_BoundedUsize (mk_usize 0) (mk_usize 15)) (mk_usize 4 <: Hax_bounded_integers.t_BoundedUsize (mk_usize 0) (mk_usize 15)) (mk_usize 8 <: Hax_bounded_integers.t_BoundedUsize (mk_usize 0) (mk_usize 15)) (mk_usize 12 <: Hax_bounded_integers.t_BoundedUsize (mk_usize 0) (mk_usize 15)) state in let state:t_Array u32 (mk_usize 16) = chacha20_quarter_round (mk_usize 1 <: Hax_bounded_integers.t_BoundedUsize (mk_usize 0) (mk_usize 15)) (mk_usize 5 <: Hax_bounded_integers.t_BoundedUsize (mk_usize 0) (mk_usize 15)) (mk_usize 9 <: Hax_bounded_integers.t_BoundedUsize (mk_usize 0) (mk_usize 15)) (mk_usize 13 <: Hax_bounded_integers.t_BoundedUsize (mk_usize 0) (mk_usize 15)) state in let state:t_Array u32 (mk_usize 16) = chacha20_quarter_round (mk_usize 2 <: Hax_bounded_integers.t_BoundedUsize (mk_usize 0) (mk_usize 15)) (mk_usize 6 <: Hax_bounded_integers.t_BoundedUsize (mk_usize 0) (mk_usize 15)) (mk_usize 10 <: Hax_bounded_integers.t_BoundedUsize (mk_usize 0) (mk_usize 15)) (mk_usize 14 <: Hax_bounded_integers.t_BoundedUsize (mk_usize 0) (mk_usize 15)) state in let state:t_Array u32 (mk_usize 16) = chacha20_quarter_round (mk_usize 3 <: Hax_bounded_integers.t_BoundedUsize (mk_usize 0) (mk_usize 15)) (mk_usize 7 <: Hax_bounded_integers.t_BoundedUsize (mk_usize 0) (mk_usize 15)) (mk_usize 11 <: Hax_bounded_integers.t_BoundedUsize (mk_usize 0) (mk_usize 15)) (mk_usize 15 <: Hax_bounded_integers.t_BoundedUsize (mk_usize 0) (mk_usize 15)) state in let state:t_Array u32 (mk_usize 16) = chacha20_quarter_round (mk_usize 0 <: Hax_bounded_integers.t_BoundedUsize (mk_usize 0) (mk_usize 15)) (mk_usize 5 <: Hax_bounded_integers.t_BoundedUsize (mk_usize 0) (mk_usize 15)) (mk_usize 10 <: Hax_bounded_integers.t_BoundedUsize (mk_usize 0) (mk_usize 15)) (mk_usize 15 <: Hax_bounded_integers.t_BoundedUsize (mk_usize 0) (mk_usize 15)) state in let state:t_Array u32 (mk_usize 16) = chacha20_quarter_round (mk_usize 1 <: Hax_bounded_integers.t_BoundedUsize (mk_usize 0) (mk_usize 15)) (mk_usize 6 <: Hax_bounded_integers.t_BoundedUsize (mk_usize 0) (mk_usize 15)) (mk_usize 11 <: Hax_bounded_integers.t_BoundedUsize (mk_usize 0) (mk_usize 15)) (mk_usize 12 <: Hax_bounded_integers.t_BoundedUsize (mk_usize 0) (mk_usize 15)) state in let state:t_Array u32 (mk_usize 16) = chacha20_quarter_round (mk_usize 2 <: Hax_bounded_integers.t_BoundedUsize (mk_usize 0) (mk_usize 15)) (mk_usize 7 <: Hax_bounded_integers.t_BoundedUsize (mk_usize 0) (mk_usize 15)) (mk_usize 8 <: Hax_bounded_integers.t_BoundedUsize (mk_usize 0) (mk_usize 15)) (mk_usize 13 <: Hax_bounded_integers.t_BoundedUsize (mk_usize 0) (mk_usize 15)) state in chacha20_quarter_round (mk_usize 3 <: Hax_bounded_integers.t_BoundedUsize (mk_usize 0) (mk_usize 15)) (mk_usize 4 <: Hax_bounded_integers.t_BoundedUsize (mk_usize 0) (mk_usize 15)) (mk_usize 9 <: Hax_bounded_integers.t_BoundedUsize (mk_usize 0) (mk_usize 15)) (mk_usize 14 <: Hax_bounded_integers.t_BoundedUsize (mk_usize 0) (mk_usize 15)) state let chacha20_rounds (state: t_Array u32 (mk_usize 16)) : t_Array u32 (mk_usize 16) = let st:t_Array u32 (mk_usize 16) = state in let st:t_Array u32 (mk_usize 16) = Rust_primitives.Hax.Folds.fold_range (mk_i32 0) (mk_i32 10) (fun st temp_1_ -> let st:t_Array u32 (mk_usize 16) = st in let _:i32 = temp_1_ in true) st (fun st e_i -> let st:t_Array u32 (mk_usize 16) = st in let e_i:i32 = e_i in chacha20_double_round st <: t_Array u32 (mk_usize 16)) in st let chacha20_core (ctr: u32) (st0: t_Array u32 (mk_usize 16)) : t_Array u32 (mk_usize 16) = let state:t_Array u32 (mk_usize 16) = st0 in let state:t_Array u32 (mk_usize 16) = Rust_primitives.Hax.Monomorphized_update_at.update_at_usize state (mk_usize 12) (Core_models.Num.impl_u32__wrapping_add (state.[ mk_usize 12 ] <: u32) ctr <: u32) in let k:t_Array u32 (mk_usize 16) = chacha20_rounds state in Chacha20.Hacspec_helper.add_state state k let chacha20_init (key: t_Array u8 (mk_usize 32)) (iv: t_Array u8 (mk_usize 12)) (ctr: u32) : t_Array u32 (mk_usize 16) = let (key_u32: t_Array u32 (mk_usize 8)):t_Array u32 (mk_usize 8) = Chacha20.Hacspec_helper.to_le_u32s_8_ (key <: t_Slice u8) in let (iv_u32: t_Array u32 (mk_usize 3)):t_Array u32 (mk_usize 3) = Chacha20.Hacspec_helper.to_le_u32s_3_ (iv <: t_Slice u8) in let list = [ mk_u32 1634760805; mk_u32 857760878; mk_u32 2036477234; mk_u32 1797285236; key_u32.[ mk_usize 0 ]; key_u32.[ mk_usize 1 ]; key_u32.[ mk_usize 2 ]; key_u32.[ mk_usize 3 ]; key_u32.[ mk_usize 4 ]; key_u32.[ mk_usize 5 ]; key_u32.[ mk_usize 6 ]; key_u32.[ mk_usize 7 ]; ctr; iv_u32.[ mk_usize 0 ]; iv_u32.[ mk_usize 1 ]; iv_u32.[ mk_usize 2 ] ] in FStar.Pervasives.assert_norm (Prims.eq2 (List.Tot.length list) 16); Rust_primitives.Hax.array_of_list 16 list let chacha20_key_block (state: t_Array u32 (mk_usize 16)) : t_Array u8 (mk_usize 64) = let state:t_Array u32 (mk_usize 16) = chacha20_core (mk_u32 0) state in Chacha20.Hacspec_helper.u32s_to_le_bytes state let chacha20_key_block0 (key: t_Array u8 (mk_usize 32)) (iv: t_Array u8 (mk_usize 12)) : t_Array u8 (mk_usize 64) = let state:t_Array u32 (mk_usize 16) = chacha20_init key iv (mk_u32 0) in chacha20_key_block state let chacha20_encrypt_block (st0: t_Array u32 (mk_usize 16)) (ctr: u32) (plain: t_Array u8 (mk_usize 64)) : t_Array u8 (mk_usize 64) = let st:t_Array u32 (mk_usize 16) = chacha20_core ctr st0 in let (pl: t_Array u32 (mk_usize 16)):t_Array u32 (mk_usize 16) = Chacha20.Hacspec_helper.to_le_u32s_16_ (plain <: t_Slice u8) in let encrypted:t_Array u32 (mk_usize 16) = Chacha20.Hacspec_helper.xor_state st pl in Chacha20.Hacspec_helper.u32s_to_le_bytes encrypted let chacha20_encrypt_last (st0: t_Array u32 (mk_usize 16)) (ctr: u32) (plain: t_Slice u8) : Prims.Pure (Alloc.Vec.t_Vec u8 Alloc.Alloc.t_Global) (requires (Core_models.Slice.impl__len #u8 plain <: usize) <=. mk_usize 64) (fun _ -> Prims.l_True) = let (b: t_Array u8 (mk_usize 64)):t_Array u8 (mk_usize 64) = Rust_primitives.Hax.repeat (mk_u8 0) (mk_usize 64) in let b:t_Array u8 (mk_usize 64) = Chacha20.Hacspec_helper.update_array b plain in let b:t_Array u8 (mk_usize 64) = chacha20_encrypt_block st0 ctr b in Alloc.Slice.impl__to_vec #u8 (b.[ { Core_models.Ops.Range.f_start = mk_usize 0; Core_models.Ops.Range.f_end = Core_models.Slice.impl__len #u8 plain <: usize } <: Core_models.Ops.Range.t_Range usize ] <: t_Slice u8) let chacha20_update (st0: t_Array u32 (mk_usize 16)) (m: t_Slice u8) : Alloc.Vec.t_Vec u8 Alloc.Alloc.t_Global = let blocks_out:Alloc.Vec.t_Vec u8 Alloc.Alloc.t_Global = Alloc.Vec.impl__new #u8 () in let num_blocks:usize = (Core_models.Slice.impl__len #u8 m <: usize) /! mk_usize 64 in let remainder_len:usize = (Core_models.Slice.impl__len #u8 m <: usize) %! mk_usize 64 in let blocks_out:Alloc.Vec.t_Vec u8 Alloc.Alloc.t_Global = Rust_primitives.Hax.Folds.fold_range (mk_usize 0) num_blocks (fun blocks_out temp_1_ -> let blocks_out:Alloc.Vec.t_Vec u8 Alloc.Alloc.t_Global = blocks_out in let _:usize = temp_1_ in true) blocks_out (fun blocks_out i -> let blocks_out:Alloc.Vec.t_Vec u8 Alloc.Alloc.t_Global = blocks_out in let i:usize = i in let b:t_Array u8 (mk_usize 64) = chacha20_encrypt_block st0 (cast (i <: usize) <: u32) (Core_models.Result.impl__unwrap #(t_Array u8 (mk_usize 64)) #Core_models.Array.t_TryFromSliceError (Core_models.Convert.f_try_into #(t_Slice u8) #(t_Array u8 (mk_usize 64)) #FStar.Tactics.Typeclasses.solve (m.[ { Core_models.Ops.Range.f_start = mk_usize 64 *! i <: usize; Core_models.Ops.Range.f_end = (mk_usize 64 *! i <: usize) +! mk_usize 64 <: usize } <: Core_models.Ops.Range.t_Range usize ] <: t_Slice u8) <: Core_models.Result.t_Result (t_Array u8 (mk_usize 64)) Core_models.Array.t_TryFromSliceError) <: t_Array u8 (mk_usize 64)) in let _:Prims.unit = Hax_lib.v_assume (b2t ((Alloc.Vec.impl_1__len #u8 #Alloc.Alloc.t_Global blocks_out <: usize) =. (i *! mk_usize 64 <: usize) <: bool)) in let blocks_out:Alloc.Vec.t_Vec u8 Alloc.Alloc.t_Global = Alloc.Vec.impl_2__extend_from_slice #u8 #Alloc.Alloc.t_Global blocks_out (b <: t_Slice u8) in blocks_out) in let _:Prims.unit = Hax_lib.v_assume (b2t ((Alloc.Vec.impl_1__len #u8 #Alloc.Alloc.t_Global blocks_out <: usize) =. (num_blocks *! mk_usize 64 <: usize) <: bool)) in let blocks_out:Alloc.Vec.t_Vec u8 Alloc.Alloc.t_Global = if remainder_len <>. mk_usize 0 then let b:Alloc.Vec.t_Vec u8 Alloc.Alloc.t_Global = chacha20_encrypt_last st0 (cast (num_blocks <: usize) <: u32) (m.[ { Core_models.Ops.Range.f_start = mk_usize 64 *! num_blocks <: usize; Core_models.Ops.Range.f_end = Core_models.Slice.impl__len #u8 m <: usize } <: Core_models.Ops.Range.t_Range usize ] <: t_Slice u8) in let blocks_out:Alloc.Vec.t_Vec u8 Alloc.Alloc.t_Global = Alloc.Vec.impl_2__extend_from_slice #u8 #Alloc.Alloc.t_Global blocks_out (Core_models.Ops.Deref.f_deref #(Alloc.Vec.t_Vec u8 Alloc.Alloc.t_Global) #FStar.Tactics.Typeclasses.solve b <: t_Slice u8) in blocks_out else blocks_out in blocks_out let chacha20 (m: t_Slice u8) (key: t_Array u8 (mk_usize 32)) (iv: t_Array u8 (mk_usize 12)) (ctr: u32) : Alloc.Vec.t_Vec u8 Alloc.Alloc.t_Global = let state:t_Array u32 (mk_usize 16) = chacha20_init key iv ctr in chacha20_update state m ================================================ FILE: examples/chacha20/proofs/fstar/extraction/Makefile ================================================ # This is a generically useful Makefile for F* that is self-contained # # It is tempting to factor this out into multiple Makefiles but that # makes it less portable, so resist temptation, or move to a more # sophisticated build system. # # We expect: # 1. `fstar.exe` to be in PATH (alternatively, you can also set # $FSTAR_HOME to be set to your F* repo/install directory) # # 2. `cargo`, `rustup`, `hax` and `jq` to be installed and in PATH. # # 3. the extracted Cargo crate to have "hax-lib" as a dependency: # `hax-lib = { version = "0.1.0-pre.1", git = "https://github.com/hacspec/hax"}` # # Optionally, you can set `HACL_HOME`. # # ROOTS contains all the top-level F* files you wish to verify # The default target `verify` verified ROOTS and its dependencies # To lax-check instead, set `OTHERFLAGS="--lax"` on the command-line # # To make F* emacs mode use the settings in this file, you need to # add the following lines to your .emacs # # (setq-default fstar-executable "/bin/fstar.exe") # (setq-default fstar-smt-executable "/bin/z3") # # (defun my-fstar-compute-prover-args-using-make () # "Construct arguments to pass to F* by calling make." # (with-demoted-errors "Error when constructing arg string: %S" # (let* ((fname (file-name-nondirectory buffer-file-name)) # (target (concat fname "-in")) # (argstr (car (process-lines "make" "--quiet" target)))) # (split-string argstr)))) # (setq fstar-subp-prover-args #'my-fstar-compute-prover-args-using-make) # HACL_HOME ?= $(HOME)/.hax/hacl_home FSTAR_BIN ?= $(shell command -v fstar.exe 1>&2 2> /dev/null && echo "fstar.exe" || echo "$(FSTAR_HOME)/bin/fstar.exe") CACHE_DIR ?= .cache HINT_DIR ?= .hints SHELL ?= /usr/bin/env bash EXECUTABLES = cargo cargo-hax jq K := $(foreach bin,$(EXECUTABLES),\ $(if $(shell command -v $(bin) 2> /dev/null),,$(error "No $(bin) in PATH"))) .PHONY: all verify clean all: rm -f .depend && $(MAKE) .depend $(MAKE) verify # Default hax invocation HAX_CLI = "cargo hax into fstar --z3rlimit 40" # If $HACL_HOME doesn't exist, clone it ${HACL_HOME}: mkdir -p "${HACL_HOME}" git clone --depth 1 https://github.com/hacl-star/hacl-star.git "${HACL_HOME}" # If no any F* file is detected, we run hax ifeq "$(wildcard *.fst *fsti)" "" $(shell $(SHELL) -c $(HAX_CLI)) endif # By default, we process all the files in the current directory ROOTS = $(wildcard *.fst *fsti) # Regenerate F* files via hax when Rust sources change $(ROOTS): $(shell find ../../../src -type f -name '*.rs') $(shell $(SHELL) -c $(HAX_CLI)) # The following is a bash script that discovers F* libraries define FINDLIBS # Prints a path if and only if it exists. Takes one argument: the # path. function print_if_exists() { if [ -d "$$1" ]; then echo "$$1" fi } # Asks Cargo all the dependencies for the current crate or workspace, # and extract all "root" directories for each. Takes zero argument. function dependencies() { cargo metadata --format-version 1 | jq -r '.packages | .[] | .manifest_path | split("/") | .[:-1] | join("/")' } # Find hax libraries *around* a given path. Takes one argument: the # path. function find_hax_libraries_at_path() { path="$$1" # if there is a `proofs/fstar/extraction` subfolder, then that's a # F* library print_if_exists "$$path/proofs/fstar/extraction" # Maybe the `proof-libs` folder of hax is around? MAYBE_PROOF_LIBS=$$(realpath -q "$$path/../proof-libs/fstar") if [ $$? -eq 0 ]; then print_if_exists "$$MAYBE_PROOF_LIBS/core" print_if_exists "$$MAYBE_PROOF_LIBS/rust_primitives" fi } { while IFS= read path; do find_hax_libraries_at_path "$$path" done < <(dependencies) } | sort -u endef export FINDLIBS FSTAR_INCLUDE_DIRS = $(HACL_HOME)/lib $(shell bash -c "$$FINDLIBS") FSTAR_FLAGS = --cmi \ --warn_error -331 \ --cache_checked_modules --cache_dir $(CACHE_DIR) \ --already_cached "+Prims+FStar+LowStar+C+Spec.Loops+TestLib" \ $(addprefix --include ,$(FSTAR_INCLUDE_DIRS)) FSTAR = $(FSTAR_BIN) $(FSTAR_FLAGS) .depend: $(HINT_DIR) $(CACHE_DIR) $(ROOTS) $(info $(ROOTS)) $(FSTAR) --cmi --dep full $(ROOTS) --extract '* -Prims -LowStar -FStar' > $@ include .depend $(HINT_DIR): mkdir -p $@ $(CACHE_DIR): mkdir -p $@ $(CACHE_DIR)/%.checked: | .depend $(HINT_DIR) $(CACHE_DIR) $(FSTAR) $(OTHERFLAGS) $< $(ENABLE_HINTS) --hint_file $(HINT_DIR)/$(notdir $*).hints verify: $(addsuffix .checked, $(addprefix $(CACHE_DIR)/,$(ROOTS))) # Targets for interactive mode %.fst-in: $(info $(FSTAR_FLAGS) \ $(ENABLE_HINTS) --hint_file $(HINT_DIR)/$(basename $@).fst.hints) %.fsti-in: $(info $(FSTAR_FLAGS) \ $(ENABLE_HINTS) --hint_file $(HINT_DIR)/$(basename $@).fsti.hints) # Clean targets clean: rm -rf $(CACHE_DIR)/* rm *.fst # Special rule for `Chacha20.Hacspec_helper` $(CACHE_DIR)/Chacha20.Hacspec_helper.fst.checked: | .depend $(HINT_DIR) $(CACHE_DIR) $(FSTAR) --lax $(OTHERFLAGS) $< $(ENABLE_HINTS) --hint_file $(HINT_DIR)/$(notdir $*).hints ================================================ FILE: examples/chacha20/src/hacspec_helper.rs ================================================ use super::State; // pub(super) fn to_le_u32s(bytes: &[u8]) -> [u32; L] { // assert_eq!(L, bytes.len() / 4); // let mut out = [0; L]; // for (i, block) in bytes.chunks(4).enumerate() { // out[i] = u32::from_le_bytes(block.try_into().unwrap()); // } // out // } macro_rules! to_le_u32s_impl { ($name:ident,$l:literal) => { pub(super) fn $name(bytes: &[u8]) -> [u32; $l] { // assert_eq!($l, bytes.len() / 4); let mut out = [0; $l]; // for (i, block) in bytes.chunks(4).enumerate() { for i in 0..$l { out[i] = u32::from_le_bytes(bytes[4 * i..4 * i + 4].try_into().unwrap()); } out } }; } to_le_u32s_impl!(to_le_u32s_3, 3); to_le_u32s_impl!(to_le_u32s_8, 8); to_le_u32s_impl!(to_le_u32s_16, 16); pub(super) fn u32s_to_le_bytes(state: &[u32; 16]) -> [u8; 64] { // let mut out = [0; 64]; for i in 0..state.len() { let tmp = state[i].to_le_bytes(); for j in 0..4 { out[i * 4 + j] = tmp[j]; } } out } pub(super) fn xor_state(mut state: State, other: State) -> State { for i in 0..16 { state[i] = state[i] ^ other[i]; } state } pub(super) fn add_state(mut state: State, other: State) -> State { for i in 0..16 { state[i] = state[i].wrapping_add(other[i]); } state } pub(super) fn update_array(mut array: [u8; 64], val: &[u8]) -> [u8; 64] { // assert!(64 >= val.len()); for i in 0..val.len() { array[i] = val[i]; } array } ================================================ FILE: examples/chacha20/src/lib.rs ================================================ mod hacspec_helper; use hacspec_helper::*; use hax_lib as hax; type State = [u32; 16]; type Block = [u8; 64]; type ChaChaIV = [u8; 12]; type ChaChaKey = [u8; 32]; type StateIdx = hax_bounded_integers::BoundedUsize<0, 15>; fn chacha20_line(a: StateIdx, b: StateIdx, d: StateIdx, s: u32, m: State) -> State { let mut state = m; state[a] = state[a].wrapping_add(state[b]); state[d] = state[d] ^ state[a]; state[d] = state[d].rotate_left(s); state } pub fn chacha20_quarter_round( a: StateIdx, b: StateIdx, c: StateIdx, d: StateIdx, state: State, ) -> State { let state = chacha20_line(a, b, d, 16, state); let state = chacha20_line(c, d, b, 12, state); let state = chacha20_line(a, b, d, 8, state); chacha20_line(c, d, b, 7, state) } use hax_lib::*; fn chacha20_double_round(state: State) -> State { let state = chacha20_quarter_round( 0.into_checked(), 4.into_checked(), 8.into_checked(), 12.into_checked(), state, ); let state = chacha20_quarter_round( 1.into_checked(), 5.into_checked(), 9.into_checked(), 13.into_checked(), state, ); let state = chacha20_quarter_round( 2.into_checked(), 6.into_checked(), 10.into_checked(), 14.into_checked(), state, ); let state = chacha20_quarter_round( 3.into_checked(), 7.into_checked(), 11.into_checked(), 15.into_checked(), state, ); let state = chacha20_quarter_round( 0.into_checked(), 5.into_checked(), 10.into_checked(), 15.into_checked(), state, ); let state = chacha20_quarter_round( 1.into_checked(), 6.into_checked(), 11.into_checked(), 12.into_checked(), state, ); let state = chacha20_quarter_round( 2.into_checked(), 7.into_checked(), 8.into_checked(), 13.into_checked(), state, ); chacha20_quarter_round( 3.into_checked(), 4.into_checked(), 9.into_checked(), 14.into_checked(), state, ) } pub fn chacha20_rounds(state: State) -> State { let mut st = state; for _i in 0..10 { st = chacha20_double_round(st); } st } pub fn chacha20_core(ctr: u32, st0: State) -> State { let mut state = st0; state[12] = state[12].wrapping_add(ctr); let k = chacha20_rounds(state); add_state(state, k) } pub fn chacha20_init(key: &ChaChaKey, iv: &ChaChaIV, ctr: u32) -> State { let key_u32: [u32; 8] = to_le_u32s_8(key); let iv_u32: [u32; 3] = to_le_u32s_3(iv); [ 0x6170_7865, 0x3320_646e, 0x7962_2d32, 0x6b20_6574, key_u32[0], key_u32[1], key_u32[2], key_u32[3], key_u32[4], key_u32[5], key_u32[6], key_u32[7], ctr, iv_u32[0], iv_u32[1], iv_u32[2], ] } pub fn chacha20_key_block(state: State) -> Block { let state = chacha20_core(0u32, state); u32s_to_le_bytes(&state) } pub fn chacha20_key_block0(key: &ChaChaKey, iv: &ChaChaIV) -> Block { let state = chacha20_init(key, iv, 0u32); chacha20_key_block(state) } pub fn chacha20_encrypt_block(st0: State, ctr: u32, plain: &Block) -> Block { let st = chacha20_core(ctr, st0); let pl: State = to_le_u32s_16(plain); let encrypted = xor_state(st, pl); u32s_to_le_bytes(&encrypted) } #[hax::requires(plain.len() <= 64)] pub fn chacha20_encrypt_last(st0: State, ctr: u32, plain: &[u8]) -> Vec { let mut b: Block = [0; 64]; b = update_array(b, plain); b = chacha20_encrypt_block(st0, ctr, &b); b[0..plain.len()].to_vec() } pub fn chacha20_update(st0: State, m: &[u8]) -> Vec { let mut blocks_out = Vec::new(); let num_blocks = m.len() / 64; let remainder_len = m.len() % 64; for i in 0..num_blocks { // Full block let b = chacha20_encrypt_block(st0, i as u32, &m[64 * i..(64 * i + 64)].try_into().unwrap()); hax_lib::assume!(blocks_out.len() == i * 64); blocks_out.extend_from_slice(&b); } hax_lib::assume!(blocks_out.len() == num_blocks * 64); if remainder_len != 0 { // Last block let b = chacha20_encrypt_last(st0, num_blocks as u32, &m[64 * num_blocks..m.len()]); blocks_out.extend_from_slice(&b); } blocks_out } pub fn chacha20(m: &[u8], key: &ChaChaKey, iv: &ChaChaIV, ctr: u32) -> Vec { let state = chacha20_init(key, iv, ctr); chacha20_update(state, m) } ================================================ FILE: examples/chacha20/tests/kat.rs ================================================ use chacha20::chacha20; pub type ChaChaIV = [u8; 12]; pub type ChaChaKey = [u8; 32]; fn kat_test(m: Vec, key: ChaChaKey, iv: ChaChaIV, exp_cipher: Vec) { let out = chacha20(&m, &key, &iv, 1u32); assert_eq!(exp_cipher, out); let decrypted = chacha20(&out, &key, &iv, 1u32); assert_eq!(m, decrypted); } #[test] fn test_kat() { let key = [ 0x80, 0x81, 0x82, 0x83, 0x84, 0x85, 0x86, 0x87, 0x88, 0x89, 0x8a, 0x8b, 0x8c, 0x8d, 0x8e, 0x8f, 0x90, 0x91, 0x92, 0x93, 0x94, 0x95, 0x96, 0x97, 0x98, 0x99, 0x9a, 0x9b, 0x9c, 0x9d, 0x9e, 0x9f, ]; let iv = [ 0x07, 0x00, 0x00, 0x00, 0x40, 0x41, 0x42, 0x43, 0x44, 0x45, 0x46, 0x47, ]; let m = vec![ 0x4c, 0x61, 0x64, 0x69, 0x65, 0x73, 0x20, 0x61, 0x6e, 0x64, 0x20, 0x47, 0x65, 0x6e, 0x74, 0x6c, 0x65, 0x6d, 0x65, 0x6e, 0x20, 0x6f, 0x66, 0x20, 0x74, 0x68, 0x65, 0x20, 0x63, 0x6c, 0x61, 0x73, 0x73, 0x20, 0x6f, 0x66, 0x20, 0x27, 0x39, 0x39, 0x3a, 0x20, 0x49, 0x66, 0x20, 0x49, 0x20, 0x63, 0x6f, 0x75, 0x6c, 0x64, 0x20, 0x6f, 0x66, 0x66, 0x65, 0x72, 0x20, 0x79, 0x6f, 0x75, 0x20, 0x6f, 0x6e, 0x6c, 0x79, 0x20, 0x6f, 0x6e, 0x65, 0x20, 0x74, 0x69, 0x70, 0x20, 0x66, 0x6f, 0x72, 0x20, 0x74, 0x68, 0x65, 0x20, 0x66, 0x75, 0x74, 0x75, 0x72, 0x65, 0x2c, 0x20, 0x73, 0x75, 0x6e, 0x73, 0x63, 0x72, 0x65, 0x65, 0x6e, 0x20, 0x77, 0x6f, 0x75, 0x6c, 0x64, 0x20, 0x62, 0x65, 0x20, 0x69, 0x74, 0x2e, ]; let exp_cipher = vec![ 0xd3, 0x1a, 0x8d, 0x34, 0x64, 0x8e, 0x60, 0xdb, 0x7b, 0x86, 0xaf, 0xbc, 0x53, 0xef, 0x7e, 0xc2, 0xa4, 0xad, 0xed, 0x51, 0x29, 0x6e, 0x08, 0xfe, 0xa9, 0xe2, 0xb5, 0xa7, 0x36, 0xee, 0x62, 0xd6, 0x3d, 0xbe, 0xa4, 0x5e, 0x8c, 0xa9, 0x67, 0x12, 0x82, 0xfa, 0xfb, 0x69, 0xda, 0x92, 0x72, 0x8b, 0x1a, 0x71, 0xde, 0x0a, 0x9e, 0x06, 0x0b, 0x29, 0x05, 0xd6, 0xa5, 0xb6, 0x7e, 0xcd, 0x3b, 0x36, 0x92, 0xdd, 0xbd, 0x7f, 0x2d, 0x77, 0x8b, 0x8c, 0x98, 0x03, 0xae, 0xe3, 0x28, 0x09, 0x1b, 0x58, 0xfa, 0xb3, 0x24, 0xe4, 0xfa, 0xd6, 0x75, 0x94, 0x55, 0x85, 0x80, 0x8b, 0x48, 0x31, 0xd7, 0xbc, 0x3f, 0xf4, 0xde, 0xf0, 0x8e, 0x4b, 0x7a, 0x9d, 0xe5, 0x76, 0xd2, 0x65, 0x86, 0xce, 0xc6, 0x4b, 0x61, 0x16, ]; kat_test(m, key, iv, exp_cipher); let key = [ 0x8c, 0x4e, 0xfa, 0x63, 0x37, 0x96, 0x89, 0xd5, 0x90, 0xa8, 0xcb, 0xcf, 0xe1, 0x59, 0x42, 0xf8, 0xc1, 0xce, 0xe5, 0xaf, 0xa5, 0xf7, 0x52, 0xf7, 0xc3, 0xf0, 0x92, 0xa8, 0x41, 0x93, 0xa6, 0x89, ]; let iv = [ 0xbc, 0xf, 0x85, 0xee, 0x55, 0xa, 0x45, 0x6f, 0x16, 0xa7, 0x35, 0xb6, ]; let m = vec![ 204, 17, 211, 86, 205, 3, 143, 149, 232, 65, 249, 176, 134, 19, 51, 245, 33, 247, 187, 39, 120, 111, 226, 96, 68, 224, 250, 140, 18, 23, 174, 109, 149, 193, 10, 5, 167, 22, 19, 129, 17, 172, 51, 202, 186, 21, 6, 141, 39, 108, 186, 72, 39, 100, 193, 30, 104, 79, 48, 185, 169, 209, 200, 3, 13, 163, 231, 189, 171, 136, 188, 95, 55, 49, 109, 64, 186, 116, 233, 184, 56, 190, 71, 41, 250, 237, 235, 86, 23, 123, 226, 228, 35, 127, 176, 10, 49, 230, 129, 226, 237, 144, 29, 197, 161, 96, 129, 200, 66, 205, 187, 155, 34, 133, 250, 84, 14, 51, 242, 189, 46, 228, 61, 170, 192, 93, 214, 35, 206, 224, 157, 14, 249, 97, 40, 134, 103, 194, 168, 191, 159, 249, 127, 85, 83, 223, 166, 145, 98, 60, 85, 129, 209, 67, 119, 189, 67, 56, 55, 106, 48, 255, 198, 76, 192, 233, 56, 236, 98, 228, 219, 213, 206, 185, 25, 125, 189, 112, 160, 113, 183, 90, 71, 15, 80, 46, 143, 110, 112, 234, 214, 218, 24, 232, 196, 229, 62, 176, 17, 61, 92, 172, 224, 29, 225, 151, 141, 143, 126, 235, 195, 179, 186, 244, 250, 165, 106, 17, 255, 145, 27, 166, 250, 29, 149, 212, 55, 214, 158, 104, 82, 74, 246, 167, 216, 132, 218, 121, 4, 167, 73, 67, 145, 173, 245, 40, 158, 72, 71, 167, 53, 176, 27, 136, 165, 222, 115, 63, 241, 144, 198, 3, 81, 22, 58, 128, 38, 198, 100, 40, 36, 136, 194, 216, 150, 182, 94, 194, 235, 97, 212, 195, 112, 255, 158, 243, 154, 102, 56, 105, 25, 72, 80, 106, 123, 84, 162, 102, 161, 8, 58, 194, 160, 111, 247, 22, 129, 212, 140, 111, 80, 168, 203, 126, 222, 231, 82, 98, 63, 194, 253, 127, 127, 25, 208, 14, 252, 199, 107, 88, 38, 82, 57, 67, 13, 173, 208, 75, 182, 222, 89, 70, 27, 28, 21, 17, 97, 122, 184, 122, 27, 230, 219, 56, 135, 146, 204, 36, 211, 92, 113, 196, 121, 14, 212, 64, 211, 3, 122, 47, 217, 186, 209, 254, 221, 126, 172, 235, 198, 198, 52, 118, 19, 192, 150, 148, 224, 250, 173, 139, 121, 101, 231, 13, 101, 22, 168, 223, 118, 254, 161, 216, 110, 246, 67, 64, 249, 252, 55, 54, 95, 52, 72, 206, 68, 35, 36, 120, 52, 126, 233, 38, 3, 27, 11, 89, 242, 17, 168, 32, 197, 21, 121, 187, 77, 193, 107, 204, 151, 76, 187, 196, 162, 149, 93, 43, 39, 165, 171, 45, 154, 186, 89, 170, 11, 60, 119, 30, 183, 0, 29, 154, 114, 227, 77, 207, 140, 232, 18, 117, 3, 49, 229, 150, 125, 201, 100, 191, 44, 20, 35, 142, 216, 219, 38, 133, 166, 247, 26, 129, 69, 90, 140, 20, 70, 97, 49, 143, 7, 214, 61, 2, 65, 133, 36, 116, 140, 78, 68, 29, 138, 89, 83, 162, 117, 48, 52, 247, 108, 118, 183, 48, 125, 45, 53, 192, 235, 198, 30, 159, 113, 131, 182, 22, 185, 47, 174, 155, 179, 39, 235, 248, 188, 117, 181, 233, 8, 153, 224, 107, 115, 226, 77, 22, 38, 190, 143, 50, 151, 171, 80, 137, 229, 209, 131, 130, 232, 147, 142, 227, 225, 86, 56, 230, 12, 236, 180, 121, 119, 89, 55, 231, 158, 222, 131, 173, 255, 24, 41, 49, 196, 145, 137, 240, 71, 244, 165, 16, 84, 19, 218, 103, 26, 212, 221, 140, 154, 59, 87, 86, 254, 200, 81, 20, 250, 20, 173, 95, 33, 185, 106, 170, 39, 55, 249, 33, 192, 79, 5, 27, 92, 126, 245, 10, 215, 11, 43, 240, 120, 16, 167, 251, 80, 79, 16, 215, 154, 28, 131, 8, 121, 124, 189, 178, 190, 194, 246, 196, 35, 155, 36, 74, 175, 231, 78, 230, 212, 130, 13, 240, 137, 255, 103, 224, 163, 209, 164, 252, 7, 16, 205, 198, 155, 107, 255, 9, 26, 176, 69, 47, 58, 17, 198, 134, 241, 242, 2, 98, 48, 131, 58, 52, 122, 10, 96, 45, 39, 231, 146, 89, 207, 187, 96, 84, 207, 157, 89, 166, 169, 236, 140, 165, 205, 87, 111, 142, 142, 49, 12, 18, 218, 196, 168, 239, 111, 86, 192, 199, 237, 65, 91, 177, 113, 206, 133, 165, 51, 177, 49, 55, 127, 47, 14, 121, 250, 30, 107, 243, 99, 109, 195, 110, 62, 20, 112, 100, 205, 220, 51, 69, 151, 206, 114, 186, 6, 1, 243, ]; let exp_cipher = vec![ 223, 158, 69, 247, 207, 28, 32, 247, 233, 67, 87, 239, 80, 204, 82, 219, 90, 49, 36, 247, 188, 12, 201, 188, 19, 16, 249, 172, 149, 48, 185, 193, 205, 81, 162, 184, 194, 29, 198, 129, 72, 30, 148, 5, 127, 254, 175, 179, 229, 228, 26, 157, 127, 67, 88, 85, 240, 197, 250, 135, 43, 230, 0, 140, 178, 229, 204, 62, 247, 160, 98, 24, 192, 253, 194, 86, 162, 196, 216, 177, 7, 32, 220, 97, 252, 127, 236, 194, 131, 230, 229, 37, 222, 145, 142, 96, 87, 99, 206, 218, 149, 223, 164, 92, 65, 178, 73, 240, 146, 227, 168, 244, 163, 11, 237, 205, 132, 236, 150, 253, 140, 20, 232, 68, 177, 232, 224, 19, 254, 63, 58, 105, 53, 146, 164, 5, 151, 188, 55, 7, 39, 137, 12, 169, 49, 209, 20, 80, 199, 134, 31, 170, 254, 177, 67, 119, 216, 57, 170, 76, 37, 226, 93, 9, 65, 61, 62, 169, 67, 230, 241, 209, 164, 240, 81, 100, 13, 228, 24, 212, 86, 69, 48, 182, 160, 106, 151, 144, 173, 173, 173, 91, 155, 9, 156, 138, 182, 6, 211, 221, 221, 250, 148, 175, 189, 222, 79, 142, 31, 198, 146, 194, 4, 250, 19, 8, 5, 28, 15, 9, 95, 66, 122, 234, 138, 205, 107, 0, 202, 236, 143, 197, 126, 164, 77, 159, 172, 180, 144, 68, 211, 76, 146, 83, 92, 56, 68, 164, 0, 145, 243, 106, 71, 233, 182, 118, 138, 193, 179, 0, 249, 162, 210, 56, 157, 210, 161, 158, 129, 112, 82, 253, 98, 148, 70, 247, 93, 234, 218, 200, 137, 245, 191, 196, 157, 204, 239, 148, 253, 103, 98, 99, 28, 131, 78, 194, 122, 201, 171, 106, 20, 16, 150, 80, 138, 202, 29, 171, 173, 57, 98, 55, 49, 211, 99, 74, 160, 255, 83, 213, 55, 141, 22, 58, 121, 220, 86, 159, 148, 178, 220, 245, 243, 36, 53, 126, 31, 224, 188, 220, 133, 63, 99, 108, 103, 93, 134, 210, 57, 114, 228, 127, 226, 182, 106, 98, 113, 107, 131, 15, 222, 1, 129, 21, 169, 179, 168, 102, 42, 156, 92, 6, 237, 16, 13, 213, 75, 44, 155, 15, 86, 12, 148, 236, 168, 124, 131, 127, 59, 212, 145, 224, 46, 226, 170, 254, 210, 8, 237, 7, 247, 57, 146, 170, 220, 97, 24, 172, 34, 128, 233, 62, 238, 90, 249, 38, 244, 211, 95, 80, 57, 199, 75, 123, 130, 240, 89, 251, 58, 223, 205, 116, 105, 153, 116, 120, 165, 121, 140, 9, 191, 13, 247, 10, 236, 51, 65, 210, 243, 255, 234, 90, 254, 111, 15, 50, 91, 143, 221, 78, 248, 40, 232, 43, 255, 5, 160, 65, 77, 57, 36, 91, 77, 124, 50, 93, 70, 105, 195, 8, 248, 184, 204, 56, 173, 5, 131, 177, 162, 222, 103, 218, 194, 42, 227, 64, 3, 105, 74, 86, 47, 26, 164, 31, 71, 26, 234, 83, 51, 230, 27, 214, 78, 117, 248, 75, 226, 140, 181, 144, 74, 161, 201, 178, 243, 210, 157, 121, 176, 23, 156, 192, 126, 100, 121, 79, 0, 24, 100, 36, 116, 127, 226, 233, 240, 84, 197, 88, 107, 151, 105, 106, 64, 195, 79, 52, 168, 185, 30, 19, 98, 90, 213, 9, 202, 106, 46, 114, 58, 229, 84, 220, 105, 225, 65, 139, 175, 0, 250, 31, 76, 73, 33, 17, 245, 23, 213, 74, 255, 52, 148, 242, 217, 98, 67, 243, 59, 117, 103, 133, 119, 164, 4, 231, 1, 26, 6, 22, 157, 134, 104, 137, 49, 172, 89, 95, 137, 37, 141, 91, 14, 132, 109, 215, 214, 136, 224, 175, 40, 11, 31, 128, 67, 255, 29, 78, 70, 204, 36, 194, 16, 218, 200, 77, 150, 72, 167, 22, 47, 72, 199, 88, 109, 151, 177, 11, 114, 11, 176, 174, 20, 65, 182, 80, 28, 131, 39, 234, 181, 226, 169, 9, 154, 97, 248, 57, 91, 133, 52, 0, 75, 211, 171, 106, 84, 179, 14, 134, 13, 238, 157, 123, 36, 190, 69, 232, 85, 4, 125, 86, 69, 64, 230, 162, 139, 187, 189, 210, 58, 212, 74, 74, 122, 52, 61, 237, 152, 75, 38, 92, 222, 119, 138, 202, 98, 36, 20, 35, 50, 22, 234, 40, 206, 147, 146, 243, 204, 19, 44, 227, 82, 169, 91, 238, 179, 118, 18, 70, 166, 163, 30, 156, 191, 179, 183, 83, 252, 252, 238, 207, 70, 81, 219, 48, 86, 226, 243, 94, 179, 251, 22, 107, 146, 133, 165, 37, 138, ]; kat_test(m, key, iv, exp_cipher); } ================================================ FILE: examples/commonArgs.nix ================================================ { craneLib, lib, }: let matches = re: path: !builtins.isNull (builtins.match re path); in { version = "0.0.1"; src = lib.cleanSourceWith { src = craneLib.path ./..; filter = path: type: # We include only certain files. FStar files under the example # directory are listed out. Same for proverif (*.pvl) files. ( matches ".*(Makefile|.*[.](rs|toml|lock|diff|fsti?|pv))$" path && !matches ".*examples/.*[.]fsti?$" path ) || ("directory" == type); }; doCheck = false; cargoVendorDir = craneLib.vendorMultipleCargoDeps { cargoLockList = [ ./Cargo.lock ../Cargo.lock ]; }; } ================================================ FILE: examples/coq-example/Cargo.toml ================================================ [package] name = "coq-example" version = "0.1.0" edition = "2021" [dependencies] hax-lib = {path = "../../hax-lib" } ================================================ FILE: examples/coq-example/README.md ================================================ # Stack example This example is a simple interpreter for a stack. ## How to build ```sh cargo hax into coq ``` ## Coq Now we have the file `proofs/coq/extraction/Coq_example.v`. To run the files we first need to install some dependencies. ### Dependencies for Coq The coq backend depends on `coq-record-update` to implement Rust record updates. This can be installed by ```sh opam install coq-record-update ``` or alternatively the import lines ```coq From RecordUpdate Require Import RecordSet. Import RecordSetNotations. ``` can be commented out. ## Library required to run As Rust implicitly imports the `Core` library for a lot of the basic functionality, we will also require a core library for Coq. For this small example, we build a dummy library with the required definitions, to run the example. As a hack to get this to run we add ``` mod dummy_core_lib; use dummy_core_lib::*; ``` to the Rust example file `src/lib.rs`. The definitions of the library are put into `proofs/coq/extraction/dummy_core_lib.v` to match this import. ## Running the code and doing proofs We can set up a Coq project by making a `_CoqProject` file in `proofs/coq/extraction/`. ``` -R ./ Coq_example -arg -w -arg all ./dummy_core_lib.v ./Coq_example.v ./Coq_proofs.v ``` We then build a makefile from the project file by ```sh coq_makefile -f _CoqProject -o Makefile ``` and run `make` to build. Any tests and proofs, we put into a seperate file `proofs/coq/extraction/Coq_proofs.v`. which imports the generated file, such that we can update and regenerate the file, without overwriting the proofs. ================================================ FILE: examples/coq-example/proofs/coq/extraction/Coq_example.v ================================================ (* File automatically generated by Hacspec *) From Coq Require Import ZArith. Require Import List. Import List.ListNotations. Open Scope Z_scope. Open Scope bool_scope. Require Import Ascii. Require Import String. Require Import Coq.Floats.Floats. From RecordUpdate Require Import RecordSet. Import RecordSetNotations. From Core Require Import Core. (* NotImplementedYet *) From Coq_example Require Import dummy_core_lib. Export dummy_core_lib. Inductive t_Instruction : Type := | Instruction_Push : t_isize -> _ | Instruction_Pop | Instruction_Add | Instruction_Sub | Instruction_Mul | Instruction_Not | Instruction_Dup. Arguments Instruction_Push. Arguments Instruction_Pop. Arguments Instruction_Add. Arguments Instruction_Sub. Arguments Instruction_Mul. Arguments Instruction_Not. Arguments Instruction_Dup. Definition impl_Instruction__interpret (self : t_Instruction) (stack : t_Vec ((t_isize)) ((t_Global))) : t_Vec ((t_isize)) ((t_Global)) := let stack := match self with | Instruction_Push (v) => impl_1__push (stack) (v) | Instruction_Pop => let (tmp0,out) := impl_1__pop (stack) in let stack := tmp0 in let _ := out in stack | Instruction_Add => let (tmp0,out) := impl_1__pop (stack) in let stack := tmp0 in let hoist2 := out in let (tmp0,out) := impl_1__pop (stack) in let stack := tmp0 in let hoist1 := out in let hoist3 := (hoist2,hoist1) in match hoist3 with | (Option_Some (a),Option_Some (b)) => impl_1__push (stack) (f_add (b) (a)) | _ => stack end | Instruction_Sub => let (tmp0,out) := impl_1__pop (stack) in let stack := tmp0 in let hoist5 := out in let (tmp0,out) := impl_1__pop (stack) in let stack := tmp0 in let hoist4 := out in let hoist6 := (hoist5,hoist4) in match hoist6 with | (Option_Some (a),Option_Some (b)) => impl_1__push (stack) (f_sub (b) (a)) | _ => stack end | Instruction_Mul => let (tmp0,out) := impl_1__pop (stack) in let stack := tmp0 in let hoist8 := out in let (tmp0,out) := impl_1__pop (stack) in let stack := tmp0 in let hoist7 := out in let hoist9 := (hoist8,hoist7) in match hoist9 with | (Option_Some (a),Option_Some (b)) => impl_1__push (stack) (f_mul (b) (a)) | _ => stack end | Instruction_Not => let (tmp0,out) := impl_1__pop (stack) in let stack := tmp0 in let hoist10 := out in match hoist10 with | Option_Some (a) => impl_1__push (stack) (if f_eq (a) ((0 : t_isize)) then (1 : t_isize) else (0 : t_isize)) | _ => stack end | Instruction_Dup => let (tmp0,out) := impl_1__pop (stack) in let stack := tmp0 in let hoist11 := out in match hoist11 with | Option_Some (a) => let stack := impl_1__push (stack) (a) in let stack := impl_1__push (stack) (a) in stack | _ => stack end end in stack. Definition example '(_ : unit) : t_Vec ((t_isize)) ((t_Global)) := let stk := impl__new (tt) in let stk := f_fold (f_into_iter ([Instruction_Push ((1 : t_isize)); Instruction_Push ((1 : t_isize)); Instruction_Add; Instruction_Push ((1 : t_isize)); Instruction_Push ((1 : t_isize)); Instruction_Push ((1 : t_isize)); Instruction_Add; Instruction_Add; Instruction_Dup; Instruction_Mul; Instruction_Sub])) (stk) (fun stk cmd => impl_Instruction__interpret (cmd) (stk)) in stk. ================================================ FILE: examples/coq-example/proofs/coq/extraction/Coq_example_Dummy_core_lib.v ================================================ (* File automatically generated by Hacspec *) From Coq Require Import ZArith. Require Import List. Import List.ListNotations. Open Scope Z_scope. Open Scope bool_scope. Require Import Ascii. Require Import String. Require Import Coq.Floats.Floats. From RecordUpdate Require Import RecordSet. Import RecordSetNotations. From Core Require Import Core. (* NotImplementedYet *) ================================================ FILE: examples/coq-example/proofs/coq/extraction/Coq_proofs.v ================================================ (* Handwritten Proofs *) From Coq Require Import ZArith. Require Import List. Import List.ListNotations. From Coq_example Require Import Coq_example. (* Check example *) Example is_example_correct : example tt = [-7]. Proof. reflexivity. Qed. (* Proof composite operations *) Theorem dup_mul_is_square : forall x, impl__Instruction__interpret Instruction_Mul ( impl__Instruction__interpret Instruction_Dup [x]) = [Z.pow x 2]. Proof. intros. cbn. rewrite Z.mul_1_r. reflexivity. Qed. Theorem push_pop_cancel : forall l x, impl__Instruction__interpret Instruction_Pop ( impl__Instruction__interpret (Instruction_Push x) l) = l. Proof. intros. cbn. reflexivity. Qed. ================================================ FILE: examples/coq-example/proofs/coq/extraction/Makefile ================================================ ########################################################################## ## # The Coq Proof Assistant / The Coq Development Team ## ## v # Copyright INRIA, CNRS and contributors ## ## /dev/null 2>/dev/null; echo $$?)) STDTIME?=command time -f $(TIMEFMT) else ifeq (0,$(shell gtime -f "" true >/dev/null 2>/dev/null; echo $$?)) STDTIME?=gtime -f $(TIMEFMT) else STDTIME?=command time endif endif else STDTIME?=command time -f $(TIMEFMT) endif COQBIN?= ifneq (,$(COQBIN)) # add an ending / COQBIN:=$(COQBIN)/ endif # Coq binaries COQC ?= "$(COQBIN)coqc" COQTOP ?= "$(COQBIN)coqtop" COQCHK ?= "$(COQBIN)coqchk" COQNATIVE ?= "$(COQBIN)coqnative" COQDEP ?= "$(COQBIN)coqdep" COQDOC ?= "$(COQBIN)coqdoc" COQPP ?= "$(COQBIN)coqpp" COQMKFILE ?= "$(COQBIN)coq_makefile" OCAMLLIBDEP ?= "$(COQBIN)ocamllibdep" # Timing scripts COQMAKE_ONE_TIME_FILE ?= "$(COQCORELIB)/tools/make-one-time-file.py" COQMAKE_BOTH_TIME_FILES ?= "$(COQCORELIB)/tools/make-both-time-files.py" COQMAKE_BOTH_SINGLE_TIMING_FILES ?= "$(COQCORELIB)/tools/make-both-single-timing-files.py" BEFORE ?= AFTER ?= # OCaml binaries CAMLC ?= "$(OCAMLFIND)" ocamlc -c CAMLOPTC ?= "$(OCAMLFIND)" opt -c CAMLLINK ?= "$(OCAMLFIND)" ocamlc -linkall CAMLOPTLINK ?= "$(OCAMLFIND)" opt -linkall CAMLDOC ?= "$(OCAMLFIND)" ocamldoc CAMLDEP ?= "$(OCAMLFIND)" ocamldep -slash -ml-synonym .mlpack # DESTDIR is prepended to all installation paths DESTDIR ?= # Debug builds, typically -g to OCaml, -debug to Coq. CAMLDEBUG ?= COQDEBUG ?= # Extra packages to be linked in (as in findlib -package) CAMLPKGS ?= FINDLIBPKGS = -package coq-core.plugins.ltac $(CAMLPKGS) # Option for making timing files TIMING?= # Option for changing sorting of timing output file TIMING_SORT_BY ?= auto # Option for changing the fuzz parameter on the output file TIMING_FUZZ ?= 0 # Option for changing whether to use real or user time for timing tables TIMING_REAL?= # Option for including the memory column(s) TIMING_INCLUDE_MEM?= # Option for sorting by the memory column TIMING_SORT_BY_MEM?= # Output file names for timed builds TIME_OF_BUILD_FILE ?= time-of-build.log TIME_OF_BUILD_BEFORE_FILE ?= time-of-build-before.log TIME_OF_BUILD_AFTER_FILE ?= time-of-build-after.log TIME_OF_PRETTY_BUILD_FILE ?= time-of-build-pretty.log TIME_OF_PRETTY_BOTH_BUILD_FILE ?= time-of-build-both.log TIME_OF_PRETTY_BUILD_EXTRA_FILES ?= - # also output to the command line TGTS ?= # Retro compatibility (DESTDIR is standard on Unix, DSTROOT is not) ifdef DSTROOT DESTDIR := $(DSTROOT) endif # Substitution of the path by appending $(DESTDIR) if needed. # The variable $(COQMF_WINDRIVE) can be needed for Cygwin environments. windrive_path = $(if $(COQMF_WINDRIVE),$(subst $(COQMF_WINDRIVE),/,$(1)),$(1)) destination_path = $(if $(DESTDIR),$(DESTDIR)/$(call windrive_path,$(1)),$(1)) # Installation paths of libraries and documentation. COQLIBINSTALL ?= $(call destination_path,$(COQLIB)/user-contrib) COQDOCINSTALL ?= $(call destination_path,$(DOCDIR)/coq/user-contrib) COQPLUGININSTALL ?= $(call destination_path,$(COQCORELIB)/..) COQTOPINSTALL ?= $(call destination_path,$(COQLIB)/toploop) # FIXME: Unused variable? # findlib files installation FINDLIBPREINST= mkdir -p "$(COQPLUGININSTALL)/" FINDLIBDESTDIR= -destdir "$(COQPLUGININSTALL)/" # we need to move out of sight $(METAFILE) otherwise findlib thinks the # package is already installed findlib_install = \ $(HIDE)if [ "$(METAFILE)" ]; then \ $(FINDLIBPREINST) && \ mv "$(METAFILE)" "$(METAFILE).skip" ; \ "$(OCAMLFIND)" install $(2) $(FINDLIBDESTDIR) $(FINDLIBPACKAGE) $(1); \ rc=$$?; \ mv "$(METAFILE).skip" "$(METAFILE)"; \ exit $$rc; \ fi findlib_remove = \ $(HIDE)if [ ! -z "$(METAFILE)" ]; then\ "$(OCAMLFIND)" remove $(FINDLIBDESTDIR) $(FINDLIBPACKAGE); \ fi ########## End of parameters ################################################## # What follows may be relevant to you only if you need to # extend this Makefile. If so, look for 'Extension point' here and # put in Makefile.local double colon rules accordingly. # E.g. to perform some work after the all target completes you can write # # post-all:: # echo "All done!" # # in Makefile.local # ############################################################################### # Flags ####################################################################### # # We define a bunch of variables combining the parameters. # To add additional flags to coq, coqchk or coqdoc, set the # {COQ,COQCHK,COQDOC}EXTRAFLAGS variable to whatever you want to add. # To overwrite the default choice and set your own flags entirely, set the # {COQ,COQCHK,COQDOC}FLAGS variable. SHOW := $(if $(VERBOSE),@true "",@echo "") HIDE := $(if $(VERBOSE),,@) TIMER=$(if $(TIMED), $(STDTIME), $(TIMECMD)) OPT?= # The DYNOBJ and DYNLIB variables are used by "coqdep -dyndep var" in .v.d ifeq '$(OPT)' '-byte' USEBYTE:=true DYNOBJ:=.cma DYNLIB:=.cma else USEBYTE:= DYNOBJ:=.cmxs DYNLIB:=.cmxs endif # these variables are meant to be overridden if you want to add *extra* flags COQEXTRAFLAGS?= COQCHKEXTRAFLAGS?= COQDOCEXTRAFLAGS?= # Find the last argument of the form "-native-compiler FLAG" COQUSERNATIVEFLAG:=$(strip \ $(subst -native-compiler-,,\ $(lastword \ $(filter -native-compiler-%,\ $(subst -native-compiler ,-native-compiler-,\ $(strip $(COQEXTRAFLAGS))))))) COQFILTEREDEXTRAFLAGS:=$(strip \ $(filter-out -native-compiler-%,\ $(subst -native-compiler ,-native-compiler-,\ $(strip $(COQEXTRAFLAGS))))) COQACTUALNATIVEFLAG:=$(lastword $(COQMF_COQ_NATIVE_COMPILER_DEFAULT) $(COQMF_COQPROJECTNATIVEFLAG) $(COQUSERNATIVEFLAG)) ifeq '$(COQACTUALNATIVEFLAG)' 'yes' COQNATIVEFLAG="-w" "-deprecated-native-compiler-option" "-native-compiler" "ondemand" COQDONATIVE="yes" else ifeq '$(COQACTUALNATIVEFLAG)' 'ondemand' COQNATIVEFLAG="-w" "-deprecated-native-compiler-option" "-native-compiler" "ondemand" COQDONATIVE="no" else COQNATIVEFLAG="-w" "-deprecated-native-compiler-option" "-native-compiler" "no" COQDONATIVE="no" endif endif # these flags do NOT contain the libraries, to make them easier to overwrite COQFLAGS?=-q $(OTHERFLAGS) $(COQFILTEREDEXTRAFLAGS) $(COQNATIVEFLAG) COQCHKFLAGS?=-silent -o $(COQCHKEXTRAFLAGS) COQDOCFLAGS?=-interpolate -utf8 $(COQDOCEXTRAFLAGS) COQDOCLIBS?=$(COQLIBS_NOML) # The version of Coq being run and the version of coq_makefile that # generated this makefile COQ_VERSION:=$(shell $(COQC) --print-version | cut -d " " -f 1) COQMAKEFILE_VERSION:=8.18.0 # COQ_SRC_SUBDIRS is for user-overriding, usually to add # `user-contrib/Foo` to the includes, we keep COQCORE_SRC_SUBDIRS for # Coq's own core libraries, which should be replaced by ocamlfind # options at some point. COQ_SRC_SUBDIRS?= COQSRCLIBS?= $(foreach d,$(COQ_SRC_SUBDIRS), -I "$(COQLIB)/$(d)") CAMLFLAGS+=$(OCAMLLIBS) $(COQSRCLIBS) # ocamldoc fails with unknown argument otherwise CAMLDOCFLAGS:=$(filter-out -annot, $(filter-out -bin-annot, $(CAMLFLAGS))) CAMLFLAGS+=$(OCAMLWARN) ifneq (,$(TIMING)) ifeq (after,$(TIMING)) TIMING_EXT=after-timing else ifeq (before,$(TIMING)) TIMING_EXT=before-timing else TIMING_EXT=timing endif endif TIMING_ARG=-time-file $<.$(TIMING_EXT) else TIMING_ARG= endif # Files ####################################################################### # # We here define a bunch of variables about the files being part of the # Coq project in order to ease the writing of build target and build rules VDFILE := .Makefile.d ALLSRCFILES := \ $(MLGFILES) \ $(MLFILES) \ $(MLPACKFILES) \ $(MLLIBFILES) \ $(MLIFILES) # helpers vo_to_obj = $(addsuffix .o,\ $(filter-out Warning: Error:,\ $(shell $(COQTOP) -q -noinit -batch -quiet -print-mod-uid $(1)))) strip_dotslash = $(patsubst ./%,%,$(1)) # without this we get undefined variables in the expansion for the # targets of the [deprecated,use-mllib-or-mlpack] rule with_undef = $(if $(filter-out undefined, $(origin $(1))),$($(1))) VO = vo VOS = vos VOFILES = $(VFILES:.v=.$(VO)) GLOBFILES = $(VFILES:.v=.glob) HTMLFILES = $(VFILES:.v=.html) GHTMLFILES = $(VFILES:.v=.g.html) BEAUTYFILES = $(addsuffix .beautified,$(VFILES)) TEXFILES = $(VFILES:.v=.tex) GTEXFILES = $(VFILES:.v=.g.tex) CMOFILES = \ $(MLGFILES:.mlg=.cmo) \ $(MLFILES:.ml=.cmo) \ $(MLPACKFILES:.mlpack=.cmo) CMXFILES = $(CMOFILES:.cmo=.cmx) OFILES = $(CMXFILES:.cmx=.o) CMAFILES = $(MLLIBFILES:.mllib=.cma) $(MLPACKFILES:.mlpack=.cma) CMXAFILES = $(CMAFILES:.cma=.cmxa) CMIFILES = \ $(CMOFILES:.cmo=.cmi) \ $(MLIFILES:.mli=.cmi) # the /if/ is because old _CoqProject did not list a .ml(pack|lib) but just # a .mlg file CMXSFILES = \ $(MLPACKFILES:.mlpack=.cmxs) \ $(CMXAFILES:.cmxa=.cmxs) \ $(if $(MLPACKFILES)$(CMXAFILES),,\ $(MLGFILES:.mlg=.cmxs) $(MLFILES:.ml=.cmxs)) # files that are packed into a plugin (no extension) PACKEDFILES = \ $(call strip_dotslash, \ $(foreach lib, \ $(call strip_dotslash, \ $(MLPACKFILES:.mlpack=_MLPACK_DEPENDENCIES)),$(call with_undef,$(lib)))) # files that are archived into a .cma (mllib) LIBEDFILES = \ $(call strip_dotslash, \ $(foreach lib, \ $(call strip_dotslash, \ $(MLLIBFILES:.mllib=_MLLIB_DEPENDENCIES)),$(call with_undef,$(lib)))) CMIFILESTOINSTALL = $(filter-out $(addsuffix .cmi,$(PACKEDFILES)),$(CMIFILES)) CMOFILESTOINSTALL = $(filter-out $(addsuffix .cmo,$(PACKEDFILES)),$(CMOFILES)) OBJFILES = $(call vo_to_obj,$(VOFILES)) ALLNATIVEFILES = \ $(OBJFILES:.o=.cmi) \ $(OBJFILES:.o=.cmx) \ $(OBJFILES:.o=.cmxs) FINDLIBPACKAGE=$(patsubst .%,%,$(suffix $(METAFILE))) # trick: wildcard filters out non-existing files, so that `install` doesn't show # warnings and `clean` doesn't pass to rm a list of files that is too long for # the shell. NATIVEFILES = $(wildcard $(ALLNATIVEFILES)) FILESTOINSTALL = \ $(VOFILES) \ $(VFILES) \ $(GLOBFILES) \ $(NATIVEFILES) \ $(CMXSFILES) # to be removed when we remove legacy loading FINDLIBFILESTOINSTALL = \ $(CMIFILESTOINSTALL) ifeq '$(HASNATDYNLINK)' 'true' DO_NATDYNLINK = yes FINDLIBFILESTOINSTALL += $(CMXSFILES) $(CMXAFILES) $(CMOFILESTOINSTALL:.cmo=.cmx) else DO_NATDYNLINK = endif ALLDFILES = $(addsuffix .d,$(ALLSRCFILES)) $(VDFILE) # Compilation targets ######################################################### all: $(HIDE)$(MAKE) --no-print-directory -f "$(SELF)" pre-all $(HIDE)$(MAKE) --no-print-directory -f "$(SELF)" real-all $(HIDE)$(MAKE) --no-print-directory -f "$(SELF)" post-all .PHONY: all all.timing.diff: $(HIDE)$(MAKE) --no-print-directory -f "$(SELF)" pre-all $(HIDE)$(MAKE) --no-print-directory -f "$(SELF)" real-all.timing.diff TIME_OF_PRETTY_BUILD_EXTRA_FILES="" $(HIDE)$(MAKE) --no-print-directory -f "$(SELF)" post-all .PHONY: all.timing.diff ifeq (0,$(TIMING_REAL)) TIMING_REAL_ARG := TIMING_USER_ARG := --user else ifeq (1,$(TIMING_REAL)) TIMING_REAL_ARG := --real TIMING_USER_ARG := else TIMING_REAL_ARG := TIMING_USER_ARG := endif endif ifeq (0,$(TIMING_INCLUDE_MEM)) TIMING_INCLUDE_MEM_ARG := --no-include-mem else TIMING_INCLUDE_MEM_ARG := endif ifeq (1,$(TIMING_SORT_BY_MEM)) TIMING_SORT_BY_MEM_ARG := --sort-by-mem else TIMING_SORT_BY_MEM_ARG := endif make-pretty-timed-before:: TIME_OF_BUILD_FILE=$(TIME_OF_BUILD_BEFORE_FILE) make-pretty-timed-after:: TIME_OF_BUILD_FILE=$(TIME_OF_BUILD_AFTER_FILE) make-pretty-timed make-pretty-timed-before make-pretty-timed-after:: $(HIDE)rm -f pretty-timed-success.ok $(HIDE)($(MAKE) --no-print-directory -f "$(PARENT)" $(TGTS) TIMED=1 2>&1 && touch pretty-timed-success.ok) | tee -a $(TIME_OF_BUILD_FILE) $(HIDE)rm pretty-timed-success.ok # must not be -f; must fail if the touch failed print-pretty-timed:: $(HIDE)$(COQMAKE_ONE_TIME_FILE) $(TIMING_INCLUDE_MEM_ARG) $(TIMING_SORT_BY_MEM_ARG) $(TIMING_REAL_ARG) $(TIME_OF_BUILD_FILE) $(TIME_OF_PRETTY_BUILD_FILE) $(TIME_OF_PRETTY_BUILD_EXTRA_FILES) print-pretty-timed-diff:: $(HIDE)$(COQMAKE_BOTH_TIME_FILES) --sort-by=$(TIMING_SORT_BY) $(TIMING_INCLUDE_MEM_ARG) $(TIMING_SORT_BY_MEM_ARG) $(TIMING_REAL_ARG) $(TIME_OF_BUILD_AFTER_FILE) $(TIME_OF_BUILD_BEFORE_FILE) $(TIME_OF_PRETTY_BOTH_BUILD_FILE) $(TIME_OF_PRETTY_BUILD_EXTRA_FILES) ifeq (,$(BEFORE)) print-pretty-single-time-diff:: @echo 'Error: Usage: $(MAKE) print-pretty-single-time-diff AFTER=path/to/file.v.after-timing BEFORE=path/to/file.v.before-timing' $(HIDE)false else ifeq (,$(AFTER)) print-pretty-single-time-diff:: @echo 'Error: Usage: $(MAKE) print-pretty-single-time-diff AFTER=path/to/file.v.after-timing BEFORE=path/to/file.v.before-timing' $(HIDE)false else print-pretty-single-time-diff:: $(HIDE)$(COQMAKE_BOTH_SINGLE_TIMING_FILES) --fuzz=$(TIMING_FUZZ) --sort-by=$(TIMING_SORT_BY) $(TIMING_USER_ARG) $(AFTER) $(BEFORE) $(TIME_OF_PRETTY_BUILD_FILE) $(TIME_OF_PRETTY_BUILD_EXTRA_FILES) endif endif pretty-timed: $(HIDE)$(MAKE) --no-print-directory -f "$(PARENT)" make-pretty-timed $(HIDE)$(MAKE) --no-print-directory -f "$(SELF)" print-pretty-timed .PHONY: pretty-timed make-pretty-timed make-pretty-timed-before make-pretty-timed-after print-pretty-timed print-pretty-timed-diff print-pretty-single-time-diff # Extension points for actions to be performed before/after the all target pre-all:: @# Extension point $(HIDE)if [ "$(COQMAKEFILE_VERSION)" != "$(COQ_VERSION)" ]; then\ echo "W: This Makefile was generated by Coq $(COQMAKEFILE_VERSION)";\ echo "W: while the current Coq version is $(COQ_VERSION)";\ fi .PHONY: pre-all post-all:: @# Extension point .PHONY: post-all real-all: $(VOFILES) $(if $(USEBYTE),bytefiles,optfiles) .PHONY: real-all real-all.timing.diff: $(VOFILES:.vo=.v.timing.diff) .PHONY: real-all.timing.diff bytefiles: $(CMOFILES) $(CMAFILES) .PHONY: bytefiles optfiles: $(if $(DO_NATDYNLINK),$(CMXSFILES)) .PHONY: optfiles # FIXME, see Ralf's bugreport # quick is deprecated, now renamed vio vio: $(VOFILES:.vo=.vio) .PHONY: vio quick: vio $(warning "'make quick' is deprecated, use 'make vio' or consider using 'vos' files") .PHONY: quick vio2vo: $(TIMER) $(COQC) $(COQDEBUG) $(COQFLAGS) $(COQLIBS) \ -schedule-vio2vo $(J) $(VOFILES:%.vo=%.vio) .PHONY: vio2vo # quick2vo is undocumented quick2vo: $(HIDE)make -j $(J) vio $(HIDE)VIOFILES=$$(for vofile in $(VOFILES); do \ viofile="$$(echo "$$vofile" | sed "s/\.vo$$/.vio/")"; \ if [ "$$vofile" -ot "$$viofile" -o ! -e "$$vofile" ]; then printf "$$viofile "; fi; \ done); \ echo "VIO2VO: $$VIOFILES"; \ if [ -n "$$VIOFILES" ]; then \ $(TIMER) $(COQC) $(COQDEBUG) $(COQFLAGS) $(COQLIBS) -schedule-vio2vo $(J) $$VIOFILES; \ fi .PHONY: quick2vo checkproofs: $(TIMER) $(COQC) $(COQDEBUG) $(COQFLAGS) $(COQLIBS) \ -schedule-vio-checking $(J) $(VOFILES:%.vo=%.vio) .PHONY: checkproofs vos: $(VOFILES:%.vo=%.vos) .PHONY: vos vok: $(VOFILES:%.vo=%.vok) .PHONY: vok validate: $(VOFILES) $(TIMER) $(COQCHK) $(COQCHKFLAGS) $(COQLIBS_NOML) $^ .PHONY: validate only: $(TGTS) .PHONY: only # Documentation targets ####################################################### html: $(GLOBFILES) $(VFILES) $(SHOW)'COQDOC -d html $(GAL)' $(HIDE)mkdir -p html $(HIDE)$(COQDOC) \ -toc $(COQDOCFLAGS) -html $(GAL) $(COQDOCLIBS) -d html $(VFILES) mlihtml: $(MLIFILES:.mli=.cmi) $(SHOW)'CAMLDOC -d $@' $(HIDE)mkdir $@ || rm -rf $@/* $(HIDE)$(CAMLDOC) -html \ -d $@ -m A $(CAMLDEBUG) $(CAMLDOCFLAGS) $(MLIFILES) $(FINDLIBPKGS) all-mli.tex: $(MLIFILES:.mli=.cmi) $(SHOW)'CAMLDOC -latex $@' $(HIDE)$(CAMLDOC) -latex \ -o $@ -m A $(CAMLDEBUG) $(CAMLDOCFLAGS) $(MLIFILES) $(FINDLIBPKGS) all.ps: $(VFILES) $(SHOW)'COQDOC -ps $(GAL)' $(HIDE)$(COQDOC) \ -toc $(COQDOCFLAGS) -ps $(GAL) $(COQDOCLIBS) \ -o $@ `$(COQDEP) -sort $(VFILES)` all.pdf: $(VFILES) $(SHOW)'COQDOC -pdf $(GAL)' $(HIDE)$(COQDOC) \ -toc $(COQDOCFLAGS) -pdf $(GAL) $(COQDOCLIBS) \ -o $@ `$(COQDEP) -sort $(VFILES)` # FIXME: not quite right, since the output name is different gallinahtml: GAL=-g gallinahtml: html all-gal.ps: GAL=-g all-gal.ps: all.ps all-gal.pdf: GAL=-g all-gal.pdf: all.pdf # ? beautify: $(BEAUTYFILES) for file in $^; do mv $${file%.beautified} $${file%beautified}old && mv $${file} $${file%.beautified}; done @echo 'Do not do "make clean" until you are sure that everything went well!' @echo 'If there were a problem, execute "for file in $$(find . -name \*.v.old -print); do mv $${file} $${file%.old}; done" in your shell/' .PHONY: beautify # Installation targets ######################################################## # # There rules can be extended in Makefile.local # Extensions can't assume when they run. # We use $(file) to avoid generating a very long command string to pass to the shell # (cf https://coq.zulipchat.com/#narrow/stream/250632-Coq-Platform-devs-.26-users/topic/Strange.20command.20length.20limit.20on.20Linux) # However Apple ships old make which doesn't have $(file) so we need a fallback $(file >.hasfile,1) HASFILE:=$(shell if [ -e .hasfile ]; then echo 1; rm .hasfile; fi) MKFILESTOINSTALL= $(if $(HASFILE),$(file >.filestoinstall,$(FILESTOINSTALL)),\ $(shell rm -f .filestoinstall) \ $(foreach x,$(FILESTOINSTALL),$(shell printf '%s\n' "$x" >> .filestoinstall))) # findlib needs the package to not be installed, so we remove it before # installing it (see the call to findlib_remove) install: META @$(MKFILESTOINSTALL) $(HIDE)code=0; for f in $$(cat .filestoinstall); do\ if ! [ -f "$$f" ]; then >&2 echo $$f does not exist; code=1; fi \ done; exit $$code $(HIDE)for f in $$(cat .filestoinstall); do\ df="`$(COQMKFILE) -destination-of "$$f" $(COQLIBS)`";\ if [ "$$?" != "0" -o -z "$$df" ]; then\ echo SKIP "$$f" since it has no logical path;\ else\ install -d "$(COQLIBINSTALL)/$$df" &&\ install -m 0644 "$$f" "$(COQLIBINSTALL)/$$df" &&\ echo INSTALL "$$f" "$(COQLIBINSTALL)/$$df";\ fi;\ done $(call findlib_remove) $(call findlib_install, META $(FINDLIBFILESTOINSTALL)) $(HIDE)$(MAKE) install-extra -f "$(SELF)" @rm -f .filestoinstall install-extra:: @# Extension point .PHONY: install install-extra META: $(METAFILE) $(HIDE)if [ "$(METAFILE)" ]; then \ cat "$(METAFILE)" | grep -v 'directory.*=.*' > META; \ fi install-byte: $(call findlib_install, $(CMAFILES) $(CMOFILESTOINSTALL), -add) install-doc:: html mlihtml @# Extension point $(HIDE)install -d "$(COQDOCINSTALL)/$(INSTALLCOQDOCROOT)/html" $(HIDE)for i in html/*; do \ dest="$(COQDOCINSTALL)/$(INSTALLCOQDOCROOT)/$$i";\ install -m 0644 "$$i" "$$dest";\ echo INSTALL "$$i" "$$dest";\ done $(HIDE)install -d \ "$(COQDOCINSTALL)/$(INSTALLCOQDOCROOT)/mlihtml" $(HIDE)for i in mlihtml/*; do \ dest="$(COQDOCINSTALL)/$(INSTALLCOQDOCROOT)/$$i";\ install -m 0644 "$$i" "$$dest";\ echo INSTALL "$$i" "$$dest";\ done .PHONY: install-doc uninstall:: @# Extension point @$(MKFILESTOINSTALL) $(call findlib_remove) $(HIDE)for f in $$(cat .filestoinstall); do \ df="`$(COQMKFILE) -destination-of "$$f" $(COQLIBS)`" &&\ instf="$(COQLIBINSTALL)/$$df/`basename $$f`" &&\ rm -f "$$instf" &&\ echo RM "$$instf" ;\ done $(HIDE)for f in $$(cat .filestoinstall); do \ df="`$(COQMKFILE) -destination-of "$$f" $(COQLIBS)`" &&\ echo RMDIR "$(COQLIBINSTALL)/$$df/" &&\ (rmdir "$(COQLIBINSTALL)/$$df/" 2>/dev/null || true); \ done @rm -f .filestoinstall .PHONY: uninstall uninstall-doc:: @# Extension point $(SHOW)'RM $(COQDOCINSTALL)/$(INSTALLCOQDOCROOT)/html' $(HIDE)rm -rf "$(COQDOCINSTALL)/$(INSTALLCOQDOCROOT)/html" $(SHOW)'RM $(COQDOCINSTALL)/$(INSTALLCOQDOCROOT)/mlihtml' $(HIDE)rm -rf "$(COQDOCINSTALL)/$(INSTALLCOQDOCROOT)/mlihtml" $(HIDE) rmdir "$(COQDOCINSTALL)/$(INSTALLCOQDOCROOT)/" || true .PHONY: uninstall-doc # Cleaning #################################################################### # # There rules can be extended in Makefile.local # Extensions can't assume when they run. clean:: @# Extension point $(SHOW)'CLEAN' $(HIDE)rm -f $(CMOFILES) $(HIDE)rm -f $(CMIFILES) $(HIDE)rm -f $(CMAFILES) $(HIDE)rm -f $(CMXFILES) $(HIDE)rm -f $(CMXAFILES) $(HIDE)rm -f $(CMXSFILES) $(HIDE)rm -f $(OFILES) $(HIDE)rm -f $(CMXAFILES:.cmxa=.a) $(HIDE)rm -f $(MLGFILES:.mlg=.ml) $(HIDE)rm -f $(CMXFILES:.cmx=.cmt) $(HIDE)rm -f $(MLIFILES:.mli=.cmti) $(HIDE)rm -f $(ALLDFILES) $(HIDE)rm -f $(NATIVEFILES) $(HIDE)find . -name .coq-native -type d -empty -delete $(HIDE)rm -f $(VOFILES) $(HIDE)rm -f $(VOFILES:.vo=.vio) $(HIDE)rm -f $(VOFILES:.vo=.vos) $(HIDE)rm -f $(VOFILES:.vo=.vok) $(HIDE)rm -f $(BEAUTYFILES) $(VFILES:=.old) $(HIDE)rm -f all.ps all-gal.ps all.pdf all-gal.pdf all.glob all-mli.tex $(HIDE)rm -f $(VFILES:.v=.glob) $(HIDE)rm -f $(VFILES:.v=.tex) $(HIDE)rm -f $(VFILES:.v=.g.tex) $(HIDE)rm -f pretty-timed-success.ok $(HIDE)rm -f META $(HIDE)rm -rf html mlihtml .PHONY: clean cleanall:: clean @# Extension point $(SHOW)'CLEAN *.aux *.timing' $(HIDE)rm -f $(foreach f,$(VFILES:.v=),$(dir $(f)).$(notdir $(f)).aux) $(HIDE)rm -f $(TIME_OF_BUILD_FILE) $(TIME_OF_BUILD_BEFORE_FILE) $(TIME_OF_BUILD_AFTER_FILE) $(TIME_OF_PRETTY_BUILD_FILE) $(TIME_OF_PRETTY_BOTH_BUILD_FILE) $(HIDE)rm -f $(VOFILES:.vo=.v.timing) $(HIDE)rm -f $(VOFILES:.vo=.v.before-timing) $(HIDE)rm -f $(VOFILES:.vo=.v.after-timing) $(HIDE)rm -f $(VOFILES:.vo=.v.timing.diff) $(HIDE)rm -f .lia.cache .nia.cache .PHONY: cleanall archclean:: @# Extension point $(SHOW)'CLEAN *.cmx *.o' $(HIDE)rm -f $(NATIVEFILES) $(HIDE)rm -f $(CMOFILES:%.cmo=%.cmx) .PHONY: archclean # Compilation rules ########################################################### $(MLIFILES:.mli=.cmi): %.cmi: %.mli $(SHOW)'CAMLC -c $<' $(HIDE)$(TIMER) $(CAMLC) $(CAMLDEBUG) $(CAMLFLAGS) $(FINDLIBPKGS) $< $(MLGFILES:.mlg=.ml): %.ml: %.mlg $(SHOW)'COQPP $<' $(HIDE)$(COQPP) $< # Stupid hack around a deficient syntax: we cannot concatenate two expansions $(filter %.cmo, $(MLFILES:.ml=.cmo) $(MLGFILES:.mlg=.cmo)): %.cmo: %.ml $(SHOW)'CAMLC -c $<' $(HIDE)$(TIMER) $(CAMLC) $(CAMLDEBUG) $(CAMLFLAGS) $(FINDLIBPKGS) $< # Same hack $(filter %.cmx, $(MLFILES:.ml=.cmx) $(MLGFILES:.mlg=.cmx)): %.cmx: %.ml $(SHOW)'CAMLOPT -c $(FOR_PACK) $<' $(HIDE)$(TIMER) $(CAMLOPTC) $(CAMLDEBUG) $(CAMLFLAGS) $(FINDLIBPKGS) $(FOR_PACK) $< $(MLLIBFILES:.mllib=.cmxs): %.cmxs: %.cmxa $(SHOW)'CAMLOPT -shared -o $@' $(HIDE)$(TIMER) $(CAMLOPTLINK) $(CAMLDEBUG) $(CAMLFLAGS) $(FINDLIBPKGS) \ -shared -o $@ $< $(MLLIBFILES:.mllib=.cma): %.cma: | %.mllib $(SHOW)'CAMLC -a -o $@' $(HIDE)$(TIMER) $(CAMLLINK) $(CAMLDEBUG) $(CAMLFLAGS) $(FINDLIBPKGS) -a -o $@ $^ $(MLLIBFILES:.mllib=.cmxa): %.cmxa: | %.mllib $(SHOW)'CAMLOPT -a -o $@' $(HIDE)$(TIMER) $(CAMLOPTLINK) $(CAMLDEBUG) $(CAMLFLAGS) $(FINDLIBPKGS) -a -o $@ $^ $(MLPACKFILES:.mlpack=.cmxs): %.cmxs: %.cmxa $(SHOW)'CAMLOPT -shared -o $@' $(HIDE)$(TIMER) $(CAMLOPTLINK) $(CAMLDEBUG) $(CAMLFLAGS) $(FINDLIBPKGS) \ -shared -o $@ $< $(MLPACKFILES:.mlpack=.cmxa): %.cmxa: %.cmx | %.mlpack $(SHOW)'CAMLOPT -a -o $@' $(HIDE)$(TIMER) $(CAMLOPTLINK) $(CAMLDEBUG) $(CAMLFLAGS) $(FINDLIBPKGS) -a -o $@ $< $(MLPACKFILES:.mlpack=.cma): %.cma: %.cmo | %.mlpack $(SHOW)'CAMLC -a -o $@' $(HIDE)$(TIMER) $(CAMLLINK) $(CAMLDEBUG) $(CAMLFLAGS) $(FINDLIBPKGS) -a -o $@ $^ $(MLPACKFILES:.mlpack=.cmo): %.cmo: | %.mlpack $(SHOW)'CAMLC -pack -o $@' $(HIDE)$(TIMER) $(CAMLLINK) $(CAMLDEBUG) $(CAMLFLAGS) $(FINDLIBPKGS) -pack -o $@ $^ $(MLPACKFILES:.mlpack=.cmx): %.cmx: | %.mlpack $(SHOW)'CAMLOPT -pack -o $@' $(HIDE)$(TIMER) $(CAMLOPTLINK) $(CAMLDEBUG) $(CAMLFLAGS) $(FINDLIBPKGS) -pack -o $@ $^ # This rule is for _CoqProject with no .mllib nor .mlpack $(filter-out $(MLLIBFILES:.mllib=.cmxs) $(MLPACKFILES:.mlpack=.cmxs) $(addsuffix .cmxs,$(PACKEDFILES)) $(addsuffix .cmxs,$(LIBEDFILES)),$(MLFILES:.ml=.cmxs) $(MLGFILES:.mlg=.cmxs)): %.cmxs: %.cmx $(SHOW)'[deprecated,use-mllib-or-mlpack] CAMLOPT -shared -o $@' $(HIDE)$(TIMER) $(CAMLOPTLINK) $(CAMLDEBUG) $(CAMLFLAGS) $(FINDLIBPKGS) \ -shared -o $@ $< # can't make # https://www.gnu.org/software/make/manual/make.html#Static-Pattern # work with multiple target rules # so use eval in a loop instead # with grouped targets https://www.gnu.org/software/make/manual/make.html#Multiple-Targets # if available (GNU Make >= 4.3) ifneq (,$(filter grouped-target,$(.FEATURES))) define globvorule= # take care to $$ variables using $< etc $(1).vo $(1).glob &: $(1).v | $(VDFILE) $(SHOW)COQC $(1).v $(HIDE)$$(TIMER) $(COQC) $(COQDEBUG) $$(TIMING_ARG) $(COQFLAGS) $(COQLIBS) $(1).v ifeq ($(COQDONATIVE), "yes") $(SHOW)COQNATIVE $(1).vo $(HIDE)$(call TIMER,$(1).vo.native) $(COQNATIVE) $(COQLIBS) $(1).vo endif endef else $(VOFILES): %.vo: %.v | $(VDFILE) $(SHOW)COQC $< $(HIDE)$(TIMER) $(COQC) $(COQDEBUG) $(TIMING_ARG) $(COQFLAGS) $(COQLIBS) $< ifeq ($(COQDONATIVE), "yes") $(SHOW)COQNATIVE $@ $(HIDE)$(call TIMER,$@.native) $(COQNATIVE) $(COQLIBS) $@ endif # this is broken :( todo fix if we ever find a solution that doesn't need grouped targets $(GLOBFILES): %.glob: %.v $(SHOW)'COQC $< (for .glob)' $(HIDE)$(TIMER) $(COQC) $(COQDEBUG) $(COQFLAGS) $(COQLIBS) $< endif $(foreach vfile,$(VFILES:.v=),$(eval $(call globvorule,$(vfile)))) $(VFILES:.v=.vio): %.vio: %.v $(SHOW)COQC -vio $< $(HIDE)$(TIMER) $(COQC) -vio $(COQDEBUG) $(COQFLAGS) $(COQLIBS) $< $(VFILES:.v=.vos): %.vos: %.v $(SHOW)COQC -vos $< $(HIDE)$(TIMER) $(COQC) -vos $(COQDEBUG) $(COQFLAGS) $(COQLIBS) $< $(VFILES:.v=.vok): %.vok: %.v $(SHOW)COQC -vok $< $(HIDE)$(TIMER) $(COQC) -vok $(COQDEBUG) $(COQFLAGS) $(COQLIBS) $< $(addsuffix .timing.diff,$(VFILES)): %.timing.diff : %.before-timing %.after-timing $(SHOW)PYTHON TIMING-DIFF $*.{before,after}-timing $(HIDE)$(MAKE) --no-print-directory -f "$(SELF)" print-pretty-single-time-diff BEFORE=$*.before-timing AFTER=$*.after-timing TIME_OF_PRETTY_BUILD_FILE="$@" $(BEAUTYFILES): %.v.beautified: %.v $(SHOW)'BEAUTIFY $<' $(HIDE)$(TIMER) $(COQC) $(COQDEBUG) $(COQFLAGS) $(COQLIBS) -beautify $< $(TEXFILES): %.tex: %.v $(SHOW)'COQDOC -latex $<' $(HIDE)$(COQDOC) $(COQDOCFLAGS) -latex $< -o $@ $(GTEXFILES): %.g.tex: %.v $(SHOW)'COQDOC -latex -g $<' $(HIDE)$(COQDOC) $(COQDOCFLAGS) -latex -g $< -o $@ $(HTMLFILES): %.html: %.v %.glob $(SHOW)'COQDOC -html $<' $(HIDE)$(COQDOC) $(COQDOCFLAGS) -html $< -o $@ $(GHTMLFILES): %.g.html: %.v %.glob $(SHOW)'COQDOC -html -g $<' $(HIDE)$(COQDOC) $(COQDOCFLAGS) -html -g $< -o $@ # Dependency files ############################################################ ifndef MAKECMDGOALS -include $(ALLDFILES) else ifneq ($(filter-out archclean clean cleanall printenv make-pretty-timed make-pretty-timed-before make-pretty-timed-after print-pretty-timed print-pretty-timed-diff print-pretty-single-time-diff,$(MAKECMDGOALS)),) -include $(ALLDFILES) endif endif .SECONDARY: $(ALLDFILES) redir_if_ok = > "$@" || ( RV=$$?; rm -f "$@"; exit $$RV ) GENMLFILES:=$(MLGFILES:.mlg=.ml) $(addsuffix .d,$(ALLSRCFILES)): $(GENMLFILES) $(addsuffix .d,$(MLIFILES)): %.mli.d: %.mli $(SHOW)'CAMLDEP $<' $(HIDE)$(CAMLDEP) $(OCAMLLIBS) "$<" $(redir_if_ok) $(addsuffix .d,$(MLGFILES)): %.mlg.d: %.ml $(SHOW)'CAMLDEP $<' $(HIDE)$(CAMLDEP) $(OCAMLLIBS) "$<" $(redir_if_ok) $(addsuffix .d,$(MLFILES)): %.ml.d: %.ml $(SHOW)'CAMLDEP $<' $(HIDE)$(CAMLDEP) $(OCAMLLIBS) "$<" $(redir_if_ok) $(addsuffix .d,$(MLLIBFILES)): %.mllib.d: %.mllib $(SHOW)'OCAMLLIBDEP $<' $(HIDE)$(OCAMLLIBDEP) -c $(OCAMLLIBS) "$<" $(redir_if_ok) $(addsuffix .d,$(MLPACKFILES)): %.mlpack.d: %.mlpack $(SHOW)'OCAMLLIBDEP $<' $(HIDE)$(OCAMLLIBDEP) -c $(OCAMLLIBS) "$<" $(redir_if_ok) # If this makefile is created using a _CoqProject we have coqdep get # options from it. This avoids argument length limits for pathological # projects. Note that extra options might be on the command line. VDFILE_FLAGS:=$(if _CoqProject,-f _CoqProject,) $(CMDLINE_COQLIBS) $(CMDLINE_VFILES) $(VDFILE): _CoqProject $(VFILES) $(SHOW)'COQDEP VFILES' $(HIDE)$(COQDEP) $(if $(strip $(METAFILE)),-m "$(METAFILE)") -vos -dyndep var $(VDFILE_FLAGS) $(redir_if_ok) # Misc ######################################################################## byte: $(HIDE)$(MAKE) all "OPT:=-byte" -f "$(SELF)" .PHONY: byte opt: $(HIDE)$(MAKE) all "OPT:=-opt" -f "$(SELF)" .PHONY: opt # This is deprecated. To extend this makefile use # extension points and Makefile.local printenv:: $(warning printenv is deprecated) $(warning write extensions in Makefile.local or include Makefile.conf) @echo 'COQLIB = $(COQLIB)' @echo 'COQCORELIB = $(COQCORELIB)' @echo 'DOCDIR = $(DOCDIR)' @echo 'OCAMLFIND = $(OCAMLFIND)' @echo 'HASNATDYNLINK = $(HASNATDYNLINK)' @echo 'SRC_SUBDIRS = $(SRC_SUBDIRS)' @echo 'COQ_SRC_SUBDIRS = $(COQ_SRC_SUBDIRS)' @echo 'COQCORE_SRC_SUBDIRS = $(COQCORE_SRC_SUBDIRS)' @echo 'OCAMLFIND = $(OCAMLFIND)' @echo 'PP = $(PP)' @echo 'COQFLAGS = $(COQFLAGS)' @echo 'COQLIB = $(COQLIBS)' @echo 'COQLIBINSTALL = $(COQLIBINSTALL)' @echo 'COQDOCINSTALL = $(COQDOCINSTALL)' .PHONY: printenv # Generate a .merlin file. If you need to append directives to this # file you can extend the merlin-hook target in Makefile.local .merlin: $(SHOW)'FILL .merlin' $(HIDE)echo 'FLG $(COQMF_CAMLFLAGS)' > .merlin $(HIDE)echo 'B $(COQCORELIB)' >> .merlin $(HIDE)echo 'S $(COQCORELIB)' >> .merlin $(HIDE)$(foreach d,$(COQCORE_SRC_SUBDIRS), \ echo 'B $(COQCORELIB)$(d)' >> .merlin;) $(HIDE)$(foreach d,$(COQ_SRC_SUBDIRS), \ echo 'S $(COQLIB)$(d)' >> .merlin;) $(HIDE)$(foreach d,$(SRC_SUBDIRS), echo 'B $(d)' >> .merlin;) $(HIDE)$(foreach d,$(SRC_SUBDIRS), echo 'S $(d)' >> .merlin;) $(HIDE)$(MAKE) merlin-hook -f "$(SELF)" .PHONY: merlin merlin-hook:: @# Extension point .PHONY: merlin-hook # prints all variables debug: $(foreach v,\ $(sort $(filter-out $(INITIAL_VARS) INITIAL_VARS,\ $(.VARIABLES))),\ $(info $(v) = $($(v)))) .PHONY: debug .DEFAULT_GOAL := all # Users can create Makefile.local-late to hook into double-colon rules # or add other needed Makefile code, using defined # variables if necessary. -include Makefile.local-late # Local Variables: # mode: makefile-gmake # End: ================================================ FILE: examples/coq-example/proofs/coq/extraction/dummy_core_lib.v ================================================ From Coq Require Import ZArith. Require Import List. Import List.ListNotations. Open Scope Z_scope. (* LIBRARY CODE *) Definition t_isize := Z. Notation "'t_Vec' T '((t_Global))'" := (list T). Definition impl_1__push {A} (l : list A) (a : A) : list A := cons a l. Definition impl_1__pop {A} (l : list A) : list A * option A := match l with | [] => ([], None) | (x :: xs) => (xs, Some x) end. Definition impl__unwrap {A} (x : option A) `{H : x <> None} : A := match x as k return k <> None -> _ with | None => fun H => False_rect _ (H eq_refl) | Some a => fun _ => a end H. Definition t_Add_f_add := (fun x y => x + y). Definition t_Mul_f_mul := (fun x y => x * y). Definition t_PartialEq_f_eq := (fun x y => x =? y). Definition impl__isize__rem_euclid := fun x y => x mod y. Definition cast := fun (x : Z) => x. Definition ne := fun x y => negb (x =? y). Definition impl_1__len {A} (l : list A) := Z.of_nat (List.length l). Definition t_PartialOrd_f_lt := fun x y => x x - y. Definition impl__new {A} (tt : unit) : list A := []. Definition f_fold {A B} (l : list A) (i : B) (f : B -> A -> B) : B := List.fold_left f l i. Definition f_into_iter {A} := @id A. (* /LIBRARY CODE *) ================================================ FILE: examples/coq-example/src/dummy_core_lib.rs ================================================ ================================================ FILE: examples/coq-example/src/lib.rs ================================================ mod dummy_core_lib; use dummy_core_lib::*; enum Instruction { Push(isize), Pop, Add, Sub, Mul, Not, Dup, } impl Instruction { pub fn interpret(self, stack: &mut Vec) { match self { Instruction::Push(v) => stack.push(v), Instruction::Pop => { stack.pop(); } Instruction::Add => match (stack.pop(), stack.pop()) { (Some(a), Some(b)) => stack.push(b + a), _ => (), }, Instruction::Sub => match (stack.pop(), stack.pop()) { (Some(a), Some(b)) => stack.push(b - a), _ => (), }, Instruction::Mul => match (stack.pop(), stack.pop()) { (Some(a), Some(b)) => stack.push(b * a), _ => (), }, Instruction::Not => match stack.pop() { Some(a) => stack.push(if a == 0 { 1 } else { 0 }), _ => (), }, Instruction::Dup => match stack.pop() { Some(a) => { stack.push(a); stack.push(a); } _ => (), }, } } } fn example() -> Vec { let mut stk = Vec::new(); for cmd in [ Instruction::Push(1), Instruction::Push(1), Instruction::Add, Instruction::Push(1), Instruction::Push(1), Instruction::Push(1), Instruction::Add, Instruction::Add, Instruction::Dup, Instruction::Mul, Instruction::Sub, ] { cmd.interpret(&mut stk) } stk } // Push 1: 1 // Push 1: 1, 1 // Add: 2 // Push 1: 2, 1 // Push 1: 2, 1, 1 // Push 1: 2, 1, 1, 1 // Add: 2, 1, 2 // Add: 2, 3 // Dup: 2, 3, 3 // Mul: 2, 9 // Sub: -7 ================================================ FILE: examples/coverage/Cargo.toml ================================================ [package] name = "coverage" version = "0.1.0" edition = "2021" [dependencies] ================================================ FILE: examples/coverage/default.nix ================================================ { stdenv, lib, hax, coqPackages, craneLib, bat, coqGeneratedCore ? import ../../proof-libs/coq/coq {inherit stdenv coqPackages;}, }: let commonArgs = import ../commonArgs.nix {inherit craneLib lib;}; cargoArtifacts = craneLib.buildDepsOnly commonArgs; in craneLib.mkCargoDerivation (commonArgs // { inherit cargoArtifacts; pname = "coverage"; doCheck = false; buildPhaseCargoCommand = '' ( cd examples/coverage/ cargo hax into coq cd proofs/coq/extraction/ echo -e "-R ${coqGeneratedCore}/lib/coq/user-contrib/Core Core\n$(cat _CoqProject)" > _CoqProject coq_makefile -f _CoqProject -o Makefile make ) ''; cargoToml = ./Cargo.toml; buildInputs = [ hax coqPackages.coq-record-update coqPackages.coq ]; }) ================================================ FILE: examples/coverage/src/lib.rs ================================================ // https://doc.rust-lang.org/reference/types.html mod test_primitives; // mod test_sequence; mod test_enum; // mod test_struct; // mod test_closures; // mod test_functions; // mod test_instance; // mod test_trait; // mod test_arrays; ================================================ FILE: examples/coverage/src/test_arrays.rs ================================================ // // This function borrows a slice. // fn analyze_slice(slice: &[i32]) { // let _ = slice[0]; // let _ = slice.len(); // } // fn test(){ // // Fixed-size array (type signature is superfluous). // let xs: [i32; 5] = [1, 2, 3, 4, 5]; // // All elements can be initialized to the same value. // let ys: [i32; 500] = [0; 500]; // // Indexing starts at 0. // let _ = xs[0]; // let _ = xs[1]; // // `len` returns the count of elements in the array. // let _ = xs.len(); // // Arrays can be automatically borrowed as slices. // analyze_slice(&xs); // // Slices can point to a section of an array. // // They are of the form [starting_index..ending_index]. // // `starting_index` is the first position in the slice. // // `ending_index` is one more than the last position in the slice. // analyze_slice(&ys[1 .. 4]); // // Example of empty slice `&[]`: // let empty_array: [u32; 0] = []; // assert_eq!(&empty_array, &[]); // assert_eq!(&empty_array, &[][..]); // Same but more verbose // } ================================================ FILE: examples/coverage/src/test_closures.rs ================================================ // TODO: // fn test() { // let add : fn(i32, i32) -> i32 = |x, y| x + y; // let _ = (|x : &u8| { x + x })(&2); // fn f u8> (g: F) -> u8 { // g() + 2 // } // f(|| { // 23 // }); // // Prints "foobar". // } ================================================ FILE: examples/coverage/src/test_enum.rs ================================================ fn test() { { enum Foo<'a, T, const N: usize> { Bar(u8), Baz, Qux { x: &'a T, y: [T; N], z: u8 }, } let x: Foo = Foo::Baz; } { enum AnimalA { Dog, Cat, } let mut a: AnimalA = AnimalA::Dog; a = AnimalA::Cat; } { enum AnimalB { Dog(String, f64), Cat { name: String, weight: f64 }, } let mut a: AnimalB = AnimalB::Dog("Cocoa".to_string(), 37.2); a = AnimalB::Cat { name: "Spotty".to_string(), weight: 2.7, }; } { enum Examples { UnitLike, TupleLike(i32), StructLike { value: i32 }, } // use Examples::*; // Creates aliases to all variants. let x = Examples::UnitLike; // Path expression of the const item. let x = Examples::UnitLike {}; // Struct expression. let y = Examples::TupleLike(123); // Call expression. let y = Examples::TupleLike { 0: 123 }; // Struct expression using integer field names. let z = Examples::StructLike { value: 123 }; // Struct expression. } { #[repr(u8)] enum Enum { Unit = 3, Tuple(u16), Struct { a: u8, b: u16 } = 1, } } } ================================================ FILE: examples/coverage/src/test_functions.rs ================================================ fn first((value, _): (A, i32), y: B) -> A where B: Clone, { value } // foo is generic over A and B fn foo1(x: A, y: B) {} fn foo2(x: &[T], y: &[T; 1]) where T: Clone, { // details elided } fn test() { let x = [1u8]; foo2(&x, &x); foo2(&[1, 2], &x); } extern "Rust" fn foo3() {} // async fn regular_example() { } // TODO: Not yet supported // Requires std::fmt; // fn documented() { // #![doc = "Example"] // } ================================================ FILE: examples/coverage/src/test_instance.rs ================================================ // enum SomeEnum { // None, // Some(T), // } // trait SomeTrait { // fn some_fun(&self) -> Self; // } // impl SomeTrait for SomeEnum // where // T: SomeTrait, // { // #[inline] // fn some_fun(&self) -> Self { // match self { // SomeEnum::Some(x) => SomeEnum::Some(x.some_fun()), // SomeEnum::None => SomeEnum::None, // } // } // } ================================================ FILE: examples/coverage/src/test_primitives.rs ================================================ fn test_primtives() { // bool let _: bool = false; let _: bool = true; // Numerics let _: u8 = 12u8; let _: u16 = 123u16; let _: u32 = 1234u32; let _: u64 = 12345u64; let _: u128 = 123456u128; let _: usize = 32usize; let _: i8 = -12i8; let _: i16 = 123i16; let _: i32 = -1234i32; let _: i64 = 12345i64; let _: i128 = 123456i128; let _: isize = -32isize; let _: f32 = 1.2f32; let _: f64 = -1.23f64; // Textual let _: char = 'c'; let _: &str = "hello world"; // Never // cannot be built } ================================================ FILE: examples/coverage/src/test_sequence.rs ================================================ fn test() { // Tuple let _: () = (); let _: (u8, u16, i8) = (1, 2, 3); let _: u8 = (1, 2).0; let _: u8 = (1,).0; let _: u8 = (1, 2, 3, 4, 5).3; // Array let _: [u8; 0] = []; let _: [&str; 3] = ["23", "a", "hllo"]; let _: [u8; 14] = [2; 14]; // Slice let _: &[u8] = &[1, 2, 3, 4]; let _: &[&str] = &[]; } ================================================ FILE: examples/coverage/src/test_struct.rs ================================================ struct foo<'a, T, const N: usize> { bar: &'a T, baz: [T; N], qux: u8, } // Point {x: 10.0, y: 20.0}; // NothingInMe {}; // TuplePoint(10.0, 20.0); // TuplePoint { 0: 10.0, 1: 20.0 }; // Results in the same value as the above line // let u = game::User {name: "Joe", age: 35, score: 100_000}; // some_fn::(Cookie); fn test() { { struct Gamma; let a = Gamma; // Gamma unit value. let b = Gamma {}; // Exact same value as `a`. } { struct Position(i32, i32, i32); Position(0, 0, 0); // Typical way of creating a tuple struct. let c = Position; // `c` is a function that takes 3 arguments. let pos = c(8, 6, 7); // Creates a `Position` value. } // { // struct Color(u8, u8, u8); // let c1 = Color(0, 0, 0); // Typical way of creating a tuple struct. // let c2 = Color { // 0: 255, // 1: 127, // 2: 0, // }; // Specifying fields by index. // let c3 = Color { 1: 0, ..c2 }; // Fill out all other fields using a base struct. // } { struct PointA { x: i32, y: i32, } let p = PointA { x: 10, y: 11 }; let px: i32 = p.x; let mut p2 = PointA { x: 10, y: 11 }; p2.x = 10; p2.y = 14; } { struct PointB(i32, i32); let p = PointB(10, 11); let px: i32 = match p { PointB(x, _) => x, }; } { struct CookieA; let c = [CookieA, CookieA {}, CookieA, CookieA {}]; } { struct Cookie {} const Cookie: Cookie = Cookie {}; let c = [Cookie, Cookie {}, Cookie, Cookie {}]; } } ================================================ FILE: examples/coverage/src/test_trait.rs ================================================ // Broken.. // // Co-inductive trait // trait TraitA { // type B : TraitB; // } // trait TraitB { // fn test(other : U) -> U // where U: TraitA; // } ================================================ FILE: examples/default.nix ================================================ { craneLib, stdenv, lib, hax, fstar, hacl-star, hax-env, jq, proverif, lean4, }: let commonArgs = import ./commonArgs.nix {inherit craneLib lib;}; cargoArtifacts = craneLib.buildDepsOnly commonArgs; in craneLib.mkCargoDerivation (commonArgs // { inherit cargoArtifacts; pname = "hax-examples"; doCheck = false; buildPhaseCargoCommand = '' cd examples eval $(hax-env) export CACHE_DIR=$(mktemp -d) export HINT_DIR=$(mktemp -d) export SHELL=${stdenv.shell} make clean # Should be a no-op (see `filter` above) # Need to inject `HAX_VANILLA_RUSTC=never` because of #472 sed -i "s/make -C limited-order-book/HAX_VANILLA_RUSTC=never make -C limited-order-book/g" Makefile make ''; buildInputs = [ hax hax-env fstar jq lean4 (proverif.overrideDerivation (_: { patches = [ ./proverif-psk/pv_div_by_zero_fix.diff ]; })) ]; }) ================================================ FILE: examples/hax.fst.config.json ================================================ { "fstar_exe": "fstar.exe", "options": [ "--cmi", "--warn_error", "-331", "--cache_checked_modules", "--cache_dir", "${HAX_HOME}/proof-libs/fstar/.cache", "--already_cached", "+Prims+FStar+LowStar+C+Spec.Loops+TestLib", "--query_stats", "--split_queries", "always" ], "include_dirs": [ ".", "${HACL_HOME}/lib", "${HAX_HOME}/proof-libs/fstar/rust_primitives", "${HAX_HOME}/proof-libs/fstar/core", "${HAX_HOME}/proof-libs/fstar/hax_lib" ] } ================================================ FILE: examples/kyber_compress/Cargo.toml ================================================ [package] name = "kyber_compress" version = "0.1.0" edition = "2021" # See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html [dependencies] hax-lib.workspace = true ================================================ FILE: examples/kyber_compress/Makefile ================================================ .PHONY: default clean default: make -C proofs/fstar/extraction clean: rm -f proofs/fstar/extraction/.depend rm -f proofs/fstar/extraction/*.fst ================================================ FILE: examples/kyber_compress/proofs/fstar/extraction/Makefile ================================================ # This is a generically useful Makefile for F* that is self-contained # # It is tempting to factor this out into multiple Makefiles but that # makes it less portable, so resist temptation, or move to a more # sophisticated build system. # # We expect: # 1. `fstar.exe` to be in PATH (alternatively, you can also set # $FSTAR_HOME to be set to your F* repo/install directory) # # 2. `cargo`, `rustup`, `hax` and `jq` to be installed and in PATH. # # 3. the extracted Cargo crate to have "hax-lib" as a dependency: # `hax-lib = { version = "0.1.0-pre.1", git = "https://github.com/hacspec/hax"}` # # Optionally, you can set `HACL_HOME`. # # ROOTS contains all the top-level F* files you wish to verify # The default target `verify` verified ROOTS and its dependencies # To lax-check instead, set `OTHERFLAGS="--lax"` on the command-line # # To make F* emacs mode use the settings in this file, you need to # add the following lines to your .emacs # # (setq-default fstar-executable "/bin/fstar.exe") # (setq-default fstar-smt-executable "/bin/z3") # # (defun my-fstar-compute-prover-args-using-make () # "Construct arguments to pass to F* by calling make." # (with-demoted-errors "Error when constructing arg string: %S" # (let* ((fname (file-name-nondirectory buffer-file-name)) # (target (concat fname "-in")) # (argstr (car (process-lines "make" "--quiet" target)))) # (split-string argstr)))) # (setq fstar-subp-prover-args #'my-fstar-compute-prover-args-using-make) # HACL_HOME ?= $(HOME)/.hax/hacl_home FSTAR_BIN ?= $(shell command -v fstar.exe 1>&2 2> /dev/null && echo "fstar.exe" || echo "$(FSTAR_HOME)/bin/fstar.exe") CACHE_DIR ?= .cache HINT_DIR ?= .hints SHELL ?= /usr/bin/env bash EXECUTABLES = cargo cargo-hax jq K := $(foreach bin,$(EXECUTABLES),\ $(if $(shell command -v $(bin) 2> /dev/null),,$(error "No $(bin) in PATH"))) .PHONY: all verify clean all: rm -f .depend && $(MAKE) .depend $(MAKE) verify HAX_CLI = "cargo hax into fstar --z3rlimit 500" # If $HACL_HOME doesn't exist, clone it ${HACL_HOME}: mkdir -p "${HACL_HOME}" git clone --depth 1 https://github.com/hacl-star/hacl-star.git "${HACL_HOME}" # If no any F* file is detected, we run hax ifeq "$(wildcard *.fst *fsti)" "" $(shell $(SHELL) -c $(HAX_CLI)) endif # By default, we process all the files in the current directory ROOTS = $(wildcard *.fst *fsti) # Regenerate F* files via hax when Rust sources change $(ROOTS): $(shell find ../../../src -type f -name '*.rs') $(shell $(SHELL) -c $(HAX_CLI)) # The following is a bash script that discovers F* libraries define FINDLIBS # Prints a path if and only if it exists. Takes one argument: the # path. function print_if_exists() { if [ -d "$$1" ]; then echo "$$1" fi } # Asks Cargo all the dependencies for the current crate or workspace, # and extract all "root" directories for each. Takes zero argument. function dependencies() { cargo metadata --format-version 1 | jq -r '.packages | .[] | .manifest_path | split("/") | .[:-1] | join("/")' } # Find hax libraries *around* a given path. Takes one argument: the # path. function find_hax_libraries_at_path() { path="$$1" # if there is a `proofs/fstar/extraction` subfolder, then that's a # F* library print_if_exists "$$path/proofs/fstar/extraction" # Maybe the `proof-libs` folder of hax is around? MAYBE_PROOF_LIBS=$$(realpath -q "$$path/../proof-libs/fstar") if [ $$? -eq 0 ]; then print_if_exists "$$MAYBE_PROOF_LIBS/core" print_if_exists "$$MAYBE_PROOF_LIBS/rust_primitives" fi } { while IFS= read path; do find_hax_libraries_at_path "$$path" done < <(dependencies) } | sort -u endef export FINDLIBS FSTAR_INCLUDE_DIRS = $(HACL_HOME)/lib $(shell bash -c "$$FINDLIBS") FSTAR_FLAGS = --cmi \ --warn_error -331 \ --cache_checked_modules --cache_dir $(CACHE_DIR) \ --already_cached "+Prims+FStar+LowStar+C+Spec.Loops+TestLib" \ --ext context_pruning \ $(addprefix --include ,$(FSTAR_INCLUDE_DIRS)) FSTAR = $(FSTAR_BIN) $(FSTAR_FLAGS) .depend: $(HINT_DIR) $(CACHE_DIR) $(ROOTS) $(info $(ROOTS)) $(FSTAR) --cmi --dep full $(ROOTS) --extract '* -Prims -LowStar -FStar' > $@ include .depend $(HINT_DIR): mkdir -p $@ $(CACHE_DIR): mkdir -p $@ $(CACHE_DIR)/%.checked: | .depend $(HINT_DIR) $(CACHE_DIR) $(FSTAR) $(OTHERFLAGS) $< $(ENABLE_HINTS) --hint_file $(HINT_DIR)/$(notdir $*).hints verify: $(addsuffix .checked, $(addprefix $(CACHE_DIR)/,$(ROOTS))) # Targets for interactive mode %.fst-in: $(info $(FSTAR_FLAGS) \ $(ENABLE_HINTS) --hint_file $(HINT_DIR)/$(basename $@).fst.hints) %.fsti-in: $(info $(FSTAR_FLAGS) \ $(ENABLE_HINTS) --hint_file $(HINT_DIR)/$(basename $@).fsti.hints) # Clean targets clean: rm -rf $(CACHE_DIR)/* rm *.fst ================================================ FILE: examples/kyber_compress/src/lib.rs ================================================ use hax_lib::{ensures, fstar, requires}; const FIELD_MODULUS: i32 = 3329; const UNSIGNED_FIELD_MODULUS: u32 = FIELD_MODULUS as u32; #[requires(n == 4 || n == 5 || n == 10 || n == 11 || n == 16)] #[ensures(|result| result < 2u32.pow(n as u32))] fn get_n_least_significant_bits(n: u8, value: u32) -> u32 { let nth_bit = 1 << n; let mask = nth_bit - 1; fstar!("Rust_primitives.Integers.logand_mask_lemma $value (v $n)"); value & mask } #[ requires( (coefficient_bits == 4 || coefficient_bits == 5 || coefficient_bits == 10 || coefficient_bits == 11) && fe < (FIELD_MODULUS as u16))] #[ ensures(|result| result < 1 << coefficient_bits)] pub fn compress_unsafe(coefficient_bits: u8, fe: u16) -> i32 { let mut compressed = (fe as u32) << (coefficient_bits + 1); compressed += UNSIGNED_FIELD_MODULUS; compressed /= UNSIGNED_FIELD_MODULUS << 1; compressed &= (1 << coefficient_bits) - 1; fstar!("Rust_primitives.Integers.logand_mask_lemma $compressed (v $coefficient_bits)"); get_n_least_significant_bits(coefficient_bits, compressed) as i32 } #[ requires( (coefficient_bits == 4 || coefficient_bits == 5 || coefficient_bits == 10 || coefficient_bits == 11) && fe < (FIELD_MODULUS as u16))] #[ ensures(|result| result < 1 << coefficient_bits)] pub fn compress(coefficient_bits: u8, fe: u16) -> i32 { let mut compressed = (fe as u64) << coefficient_bits; compressed += 1664 as u64; compressed *= 10_321_340; compressed >>= 35; compressed &= (1 << coefficient_bits) - 1; fstar!("Rust_primitives.Integers.logand_mask_lemma $compressed (v $coefficient_bits)"); let compressed = compressed as u32; get_n_least_significant_bits(coefficient_bits, compressed) as i32 } #[cfg(test)] mod tests { use super::*; #[test] fn it_works() { fn test(coefficient_bits: u8, fe: u16) { let c1 = compress_unsafe(coefficient_bits, fe); let c2 = compress(coefficient_bits, fe); assert_eq!(c1, c2); } for bits in [4u8, 5, 10, 11] { for fe in 0..3329 { test(bits, fe); } } } } ================================================ FILE: examples/lean_adc/Cargo.toml ================================================ [package] name = "lean_adc" version = "0.1.0" edition = "2021" [dependencies] hax-lib.workspace = true ================================================ FILE: examples/lean_adc/Makefile ================================================ .PHONY: default clean default: cargo hax into lean (cd proofs/lean && \ elan default v4.29.0-rc1 && \ lake build) clean: -rm -f proofs/lean/extraction/lean_adc.lean -cd proofs/lean && lake clean ================================================ FILE: examples/lean_adc/proofs/lean/lake-manifest.json ================================================ {"version": "1.1.0", "packagesDir": ".lake/packages", "packages": [{"type": "path", "scope": "", "name": "Hax", "manifestFile": "lake-manifest.json", "inherited": false, "dir": "../../../../hax-lib/proof-libs/lean", "configFile": "lakefile.toml"}, {"url": "https://github.com/leanprover-community/quote4", "type": "git", "subDir": null, "scope": "", "rev": "23324752757bf28124a518ec284044c8db79fee5", "name": "Qq", "manifestFile": "lake-manifest.json", "inputRev": "v4.29.0-rc1", "inherited": true, "configFile": "lakefile.toml"}], "name": "lean_adc", "lakeDir": ".lake"} ================================================ FILE: examples/lean_adc/proofs/lean/lakefile.toml ================================================ name = "lean_adc" version = "0.1.0" defaultTargets = ["lean_adc"] [[lean_lib]] name = "lean_adc" roots = ["extraction.lean_adc"] [[require]] name = "Hax" path = "../../../../hax-lib/proof-libs/lean" ================================================ FILE: examples/lean_adc/proofs/lean/lean-toolchain ================================================ leanprover/lean4:v4.29.0-rc1 ================================================ FILE: examples/lean_adc/src/lib.rs ================================================ //! # 32-bit Addition with Carry (ADC) //! //! This example demonstrates formal verification of a 32-bit //! addition-with-carry (ADC) operation using the hax toolchain and //! Lean 4's `bv_decide` bit-vector decision procedure. //! //! ## What is ADC? //! //! Addition with carry (ADC) is a fundamental building block in //! multi-precision (bignum) arithmetic. When adding large numbers //! represented as arrays of 32-bit "limbs", each limb addition may //! overflow. The ADC operation captures this overflow as a carry-out //! bit, which feeds into the next limb addition. //! //! For example, to add two 128-bit numbers stored as four 32-bit limbs: //! //! ```text //! (sum0, c0) = adc(a[0], b[0], 0) //! (sum1, c1) = adc(a[1], b[1], c0) //! (sum2, c2) = adc(a[2], b[2], c1) //! (sum3, c3) = adc(a[3], b[3], c2) //! ``` //! //! ## Verification approach //! //! The precondition and postcondition are expressed as plain Rust //! functions (`adc_precondition`, `adc_postcondition`) for documentation. //! A correctness theorem is embedded via `#[hax_lib::lean::after(...)]` //! using a Hoare triple with pure Lean propositions (not the monadic //! Rust functions), since `bv_decide` requires pure BitVec goals. //! //! The proof is fully automated using the tactics from Hax: //! //! 1. `hax_mvcgen` — generates pure verification conditions from //! the monadic function body using the `bv` specset lemmas. //! 2. `bv_decide` — Lean's bit-blasting decision procedure //! automatically verifies the remaining BitVec goals. //! //! The key property verified: //! //! ```text //! a + b + carry_in == sum + carry_out * 2^32 //! ``` //! //! where the left-hand side is computed in `u64` to avoid overflow. /// Precondition: the input carry must be 0 or 1 (a single bit). /// /// This function documents the precondition and is extracted to Lean, /// but the proof theorem states the precondition as a pure Lean /// proposition (`carry_in ≤ 1`) rather than using this monadic function. fn adc_precondition(carry_in: u32) -> bool { carry_in <= 1 } /// Postcondition: the 64-bit sum `a + b + carry_in` is correctly /// represented as `sum + carry_out * 2^32`. /// /// We verify two properties: /// 1. `carry_out` is 0 or 1 (it is a single bit). /// 2. The full equation holds: the wide sum equals the split result. /// /// Like `adc_precondition`, this documents the postcondition but the /// proof uses pure Lean propositions instead of this monadic function. fn adc_postcondition(a: u32, b: u32, carry_in: u32, sum: u32, carry_out: u32) -> bool { carry_out <= 1 && (a as u64 + b as u64 + carry_in as u64) == (sum as u64 + ((carry_out as u64) << 32u64)) } /// 32-bit addition with carry. /// /// Computes `a + b + carry_in` where `carry_in` is 0 or 1. /// Returns `(sum, carry_out)` where: /// - `sum` is the lower 32 bits of the result /// - `carry_out` is 1 if the addition overflowed, 0 otherwise /// /// The computation widens operands to `u64` to avoid overflow, then /// splits the 64-bit result back into 32-bit values. /// /// # Verification /// /// The `#[hax_lib::lean::after(...)]` attribute embeds a Lean 4 /// theorem directly after the extracted function definition. This /// theorem states: given the precondition (carry_in is 0 or 1), /// the function satisfies the postcondition (the full sum equation /// holds and carry_out is 0 or 1). /// /// The proof uses: /// 1. `hax_mvcgen` — to generate pure verification conditions /// from the monadic function body. /// 2. `bv_decide` — Lean's bit-blasting procedure to /// automatically verify the remaining BitVec goals. #[hax_lib::lean::after( // The specification is stated with pure Lean propositions (not through the // monadic adc_precondition/adc_postcondition Rust functions), so that // bv_decide can reason about the BitVec properties directly. " set_option maxHeartbeats 1000000 in set_option hax_mvcgen.specset \"bv\" in theorem adc_u32_spec (a b carry_in : u32) : ⦃ ⌜ carry_in ≤ 1 ⌝ ⦄ lean_adc.adc_u32 a b carry_in ⦃ ⇓ ⟨sum, carry_out⟩ => ⌜ carry_out ≤ 1 ∧ UInt32.toUInt64 a + UInt32.toUInt64 b + UInt32.toUInt64 carry_in = UInt32.toUInt64 sum + (UInt32.toUInt64 carry_out <<< (32 : UInt64)) ⌝ ⦄ := by hax_mvcgen [lean_adc.adc_u32] <;> bv_decide (timeout := 90) " )] pub fn adc_u32(a: u32, b: u32, carry_in: u32) -> (u32, u32) { // Widen to u64 so the addition cannot overflow. let wide: u64 = a as u64 + b as u64 + carry_in as u64; // Extract the lower 32 bits as the sum. let sum: u32 = wide as u32; // Extract bit 32 as the carry-out (0 or 1). let carry_out: u32 = (wide >> 32u64) as u32; (sum, carry_out) } ================================================ FILE: examples/lean_barrett/Cargo.toml ================================================ [package] name = "lean_barrett" version = "0.1.0" edition = "2021" # See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html [dependencies] hax-lib.workspace = true ================================================ FILE: examples/lean_barrett/Makefile ================================================ .PHONY: default clean default: cargo hax into lean (cd proofs/lean && \ elan default v4.29.0-rc1 && \ lake build) clean: -rm -f proofs/lean/extraction/Lean_barrett.lean -cd proofs/lean && lake clean ================================================ FILE: examples/lean_barrett/proofs/lean/lake-manifest.json ================================================ {"version": "1.1.0", "packagesDir": ".lake/packages", "packages": [{"type": "path", "scope": "", "name": "Hax", "manifestFile": "lake-manifest.json", "inherited": false, "dir": "../../../../hax-lib/proof-libs/lean", "configFile": "lakefile.toml"}, {"url": "https://github.com/leanprover-community/quote4", "type": "git", "subDir": null, "scope": "", "rev": "bd58c9efe2086d56ca361807014141a860ddbf8c", "name": "Qq", "manifestFile": "lake-manifest.json", "inputRev": "v4.27.0", "inherited": true, "configFile": "lakefile.toml"}], "name": "Lean_barrett", "lakeDir": ".lake"} ================================================ FILE: examples/lean_barrett/proofs/lean/lakefile.toml ================================================ name = "lean_barrett" version = "0.1.0" defaultTargets = ["lean_barrett"] [[lean_lib]] name = "lean_barrett" roots = ["extraction.lean_barrett"] [[require]] name = "Hax" path = "../../../../hax-lib/proof-libs/lean" ================================================ FILE: examples/lean_barrett/proofs/lean/lean-toolchain ================================================ leanprover/lean4:v4.29.0-rc1 ================================================ FILE: examples/lean_barrett/src/lib.rs ================================================ /// Values having this type hold a representative 'x' of the Kyber field. /// We use 'fe' as a shorthand for this type. pub(crate) type FieldElement = i32; const BARRETT_R: i64 = 0x400000; // is 0x4000000 in the normal barrett example const BARRETT_SHIFT: i64 = 26; const BARRETT_MULTIPLIER: i64 = 20159; pub(crate) const FIELD_MODULUS: i32 = 3329; // Signed Barrett Reduction // // Given an input `value`, `barrett_reduce` outputs a representative `result` // such that: // // - result ≡ value (mod FIELD_MODULUS) // - the absolute value of `result` is bound as follows: // // `|result| ≤ FIELD_MODULUS / 2 · (|value|/BARRETT_R + 1) // // In particular, if `|value| < BARRETT_R`, then `|result| < FIELD_MODULUS`. fn barrett_reduce_precondition(value: FieldElement) -> bool { i64::from(value) >= -BARRETT_R && i64::from(value) <= BARRETT_R } fn barrett_reduce_postcondition(value: FieldElement, result: FieldElement) -> bool { let valid_result = value % FIELD_MODULUS; result > -FIELD_MODULUS && result < FIELD_MODULUS && (result == valid_result || result == valid_result + FIELD_MODULUS || result == valid_result - FIELD_MODULUS) } pub fn barrett_reduce(value: FieldElement) -> FieldElement { let t = i64::from(value) * BARRETT_MULTIPLIER; let t = t + (BARRETT_R >> 1); let quotient = t >> BARRETT_SHIFT; let quotient = quotient as i32; let sub = quotient * FIELD_MODULUS; value - sub } // A theorem stating that Barrett meets its post-condition, given its pre-condition. // In the next iteration, this theorem would be auto-generated, with a sorry proof. #[hax_lib::lean::replace( " set_option maxHeartbeats 1000000 in -- quite computation intensive theorem barrett_spec (value: i32) : ⦃ ⌜ barrett_reduce_precondition (value) = pure true ⌝ ⦄ barrett_reduce value ⦃ ⇓ r => ⌜ barrett_reduce_postcondition value r = pure true⌝ ⦄ := by -- Unfold all auxiliary functions: unfold barrett_reduce barrett_reduce_precondition barrett_reduce_postcondition FIELD_MODULUS BARRETT_R BARRETT_MULTIPLIER BARRETT_SHIFT at * -- Invoke bit blasting: hax_bv_decide (timeout := 90) " )] pub fn theorem() {} ================================================ FILE: examples/lean_chacha20/Cargo.toml ================================================ [package] name = "lean_chacha20" version = "0.1.0" authors = ["Clement Blaudeau "] edition = "2021" [dependencies] hax-lib.workspace = true hax-bounded-integers.workspace = true ================================================ FILE: examples/lean_chacha20/Makefile ================================================ .PHONY: default clean default: cargo hax into lean (cd proofs/lean && \ elan default v4.29.0-rc1 && \ lake build) clean: -rm -f proofs/lean/extraction/ -cd proofs/lean && lake clean ================================================ FILE: examples/lean_chacha20/proofs/lean/lake-manifest.json ================================================ {"version": "1.1.0", "packagesDir": ".lake/packages", "packages": [{"type": "path", "scope": "", "name": "Hax", "manifestFile": "lake-manifest.json", "inherited": false, "dir": "../../../../hax-lib/proof-libs/lean", "configFile": "lakefile.toml"}, {"url": "https://github.com/leanprover-community/quote4", "type": "git", "subDir": null, "scope": "", "rev": "bd58c9efe2086d56ca361807014141a860ddbf8c", "name": "Qq", "manifestFile": "lake-manifest.json", "inputRev": "v4.27.0", "inherited": true, "configFile": "lakefile.toml"}], "name": "Lean_chacha20", "lakeDir": ".lake"} ================================================ FILE: examples/lean_chacha20/proofs/lean/lakefile.toml ================================================ name = "lean_chacha20" version = "0.1.0" defaultTargets = ["lean_chacha20"] [[lean_lib]] name = "lean_chacha20" roots = ["extraction.lean_chacha20"] [[require]] name = "Hax" path = "../../../../hax-lib/proof-libs/lean" ================================================ FILE: examples/lean_chacha20/proofs/lean/lean-toolchain ================================================ leanprover/lean4:v4.29.0-rc1 ================================================ FILE: examples/lean_chacha20/src/hacspec_helper.rs ================================================ use super::State; #[hax_lib::requires(bytes.len() == 12)] #[hax_lib::ensures(|_| true)] #[hax_lib::lean::proof_method::grind] pub(super) fn to_le_u32s_3(bytes: &[u8]) -> [u32; 3] { // assert_eq!($l, bytes.len() / 4); let mut out = [0; 3]; // for (i, block) in bytes.chunks(4).enumerate() { for i in 0..3 { out[i] = u32::from_le_bytes(bytes[4 * i..4 * i + 4].try_into().unwrap()); } out } #[hax_lib::requires(bytes.len() == 32)] #[hax_lib::ensures(|_| true)] #[hax_lib::lean::proof_method::grind] pub(super) fn to_le_u32s_8(bytes: &[u8]) -> [u32; 8] { // assert_eq!(8, bytes.len() / 4); let mut out = [0; 8]; // for (i, block) in bytes.chunks(4).enumerate() { for i in 0..8 { out[i] = u32::from_le_bytes(bytes[4 * i..4 * i + 4].try_into().unwrap()); } out } #[hax_lib::requires(bytes.len() == 64)] #[hax_lib::ensures(|_| true)] #[hax_lib::lean::proof_method::grind] pub(super) fn to_le_u32s_16(bytes: &[u8]) -> [u32; 16] { // assert_eq!(16, bytes.len() / 4); let mut out = [0; 16]; // for (i, block) in bytes.chunks(4).enumerate() { for i in 0..16 { out[i] = u32::from_le_bytes(bytes[4 * i..4 * i + 4].try_into().unwrap()); } out } #[hax_lib::ensures(|_| true)] #[hax_lib::lean::proof_method::grind] pub(super) fn u32s_to_le_bytes(state: &[u32; 16]) -> [u8; 64] { // let mut out = [0; 64]; for i in 0..state.len() { let tmp = state[i].to_le_bytes(); for j in 0..4 { out[i * 4 + j] = tmp[j]; } } out } #[hax_lib::ensures(|_| true)] #[hax_lib::lean::proof_method::grind] pub(super) fn xor_state(mut state: State, other: State) -> State { for i in 0..16 { state[i] = state[i] ^ other[i]; } state } #[hax_lib::ensures(|_| true)] #[hax_lib::lean::proof_method::grind] pub(super) fn add_state(mut state: State, other: State) -> State { for i in 0..16 { state[i] = state[i].wrapping_add(other[i]); } state } #[hax_lib::requires(val.len() <= 64)] #[hax_lib::ensures(|_| true)] #[hax_lib::lean::proof_method::grind] pub(super) fn update_array(mut array: [u8; 64], val: &[u8]) -> [u8; 64] { // assert!(64 >= val.len()); for i in 0..val.len() { array[i] = val[i]; } array } ================================================ FILE: examples/lean_chacha20/src/lib.rs ================================================ mod hacspec_helper; use hacspec_helper::*; use hax_lib as hax; use hax_lib::int::ToInt; type State = [u32; 16]; type Block = [u8; 64]; type ChaChaIV = [u8; 12]; type ChaChaKey = [u8; 32]; type StateIdx = usize; #[hax_lib::requires(a <= 15 && b <= 15 && d <= 15)] #[hax_lib::ensures(|_| true)] #[hax_lib::lean::proof_method::grind] fn chacha20_line(a: StateIdx, b: StateIdx, d: StateIdx, s: u32, m: State) -> State { let mut state = m; state[a] = state[a].wrapping_add(state[b]); state[d] = state[d] ^ state[a]; state[d] = state[d].rotate_left(s); state } #[hax_lib::requires(a <= 15 && b <= 15 && c <= 15 && d <= 15)] #[hax_lib::ensures(|_| true)] #[hax_lib::lean::proof_method::grind] pub fn chacha20_quarter_round( a: StateIdx, b: StateIdx, c: StateIdx, d: StateIdx, state: State, ) -> State { let state = chacha20_line(a, b, d, 16, state); let state = chacha20_line(c, d, b, 12, state); let state = chacha20_line(a, b, d, 8, state); chacha20_line(c, d, b, 7, state) } use hax_lib::*; fn chacha20_double_round(state: State) -> State { let state = chacha20_quarter_round(0, 4, 8, 12, state); let state = chacha20_quarter_round(1, 5, 9, 13, state); let state = chacha20_quarter_round(2, 6, 10, 14, state); let state = chacha20_quarter_round(3, 7, 11, 15, state); let state = chacha20_quarter_round(0, 5, 10, 15, state); let state = chacha20_quarter_round(1, 6, 11, 12, state); let state = chacha20_quarter_round(2, 7, 8, 13, state); chacha20_quarter_round(3, 4, 9, 14, state) } pub fn chacha20_rounds(state: State) -> State { let mut st = state; let e: usize = 10; for _i in 0..e { st = chacha20_double_round(st); } st } pub fn chacha20_core(ctr: u32, st0: State) -> State { let mut state = st0; state[12] = state[12].wrapping_add(ctr); let k = chacha20_rounds(state); add_state(state, k) } pub fn chacha20_init(key: &ChaChaKey, iv: &ChaChaIV, ctr: u32) -> State { let key_u32: [u32; 8] = to_le_u32s_8(key); let iv_u32: [u32; 3] = to_le_u32s_3(iv); [ 0x6170_7865, 0x3320_646e, 0x7962_2d32, 0x6b20_6574, key_u32[0], key_u32[1], key_u32[2], key_u32[3], key_u32[4], key_u32[5], key_u32[6], key_u32[7], ctr, iv_u32[0], iv_u32[1], iv_u32[2], ] } pub fn chacha20_key_block(state: State) -> Block { let state = chacha20_core(0u32, state); u32s_to_le_bytes(&state) } pub fn chacha20_key_block0(key: &ChaChaKey, iv: &ChaChaIV) -> Block { let state = chacha20_init(key, iv, 0u32); chacha20_key_block(state) } #[hax_lib::ensures(|_| true)] #[hax_lib::lean::proof_method::grind] pub fn chacha20_encrypt_block(st0: State, ctr: u32, plain: &Block) -> Block { let st = chacha20_core(ctr, st0); let pl: State = to_le_u32s_16(plain); let encrypted = xor_state(st, pl); u32s_to_le_bytes(&encrypted) } #[hax_lib::requires(plain.len() <= 64)] #[hax_lib::ensures(|res| res.len() == plain.len())] #[hax_lib::lean::proof_method::grind] pub fn chacha20_encrypt_last(st0: State, ctr: u32, plain: &[u8]) -> Vec { let mut b: Block = [0; 64]; b = update_array(b, plain); b = chacha20_encrypt_block(st0, ctr, &b); b[0..plain.len()].to_vec() } #[hax_lib::lean::proof( "by hax_mvcgen [chacha20_update] <;> try grind [USize64.toNat_add, Array.append_eq_append] · expose_names have : 64 * i.toNat + 64 ≤ m.val.size := by grind grind" )] #[hax_lib::ensures(|_| true)] #[hax_lib::lean::proof_method::grind] pub fn chacha20_update(st0: State, m: &[u8]) -> Vec { let mut blocks_out = Vec::new(); let num_blocks = m.len() / 64; let remainder_len = m.len() % 64; for i in 0..num_blocks { hax_lib::loop_invariant!( |i: usize| blocks_out.len().to_int() == i.to_int() * 64usize.to_int() ); // Full block let b = chacha20_encrypt_block(st0, i as u32, &m[64 * i..(64 * i + 64)].try_into().unwrap()); hax_lib::assume!(blocks_out.len() == i * 64); blocks_out.extend_from_slice(&b); } hax_lib::assume!(blocks_out.len() == num_blocks * 64); if remainder_len != 0 { // Last block let b = chacha20_encrypt_last(st0, num_blocks as u32, &m[64 * num_blocks..m.len()]); blocks_out.extend_from_slice(&b); } blocks_out } #[hax_lib::ensures(|_| true)] #[hax_lib::lean::proof_method::grind] pub fn chacha20(m: &[u8], key: &ChaChaKey, iv: &ChaChaIV, ctr: u32) -> Vec { let state = chacha20_init(key, iv, ctr); chacha20_update(state, m) } ================================================ FILE: examples/lean_tutorial/Cargo.toml ================================================ [package] name = "lean_tutorial" version = "0.1.0" [dependencies] hax-lib.workspace = true ================================================ FILE: examples/lean_tutorial/Makefile ================================================ .PHONY: default clean default: cargo hax into lean (cd proofs/lean && \ elan default v4.29.0-rc1 && \ lake build) clean: -rm -f proofs/lean/extraction/lean_tutorial.lean -cd proofs/lean && lake clean ================================================ FILE: examples/lean_tutorial/proofs/lean/lake-manifest.json ================================================ {"version": "1.1.0", "packagesDir": ".lake/packages", "packages": [{"type": "path", "scope": "", "name": "Hax", "manifestFile": "lake-manifest.json", "inherited": false, "dir": "../../../../hax-lib/proof-libs/lean", "configFile": "lakefile.toml"}], "name": "Lean_tutorial", "lakeDir": ".lake"} ================================================ FILE: examples/lean_tutorial/proofs/lean/lakefile.toml ================================================ name = "Lean_tutorial" version = "0.1.0" defaultTargets = ["Lean_tutorial"] [[lean_lib]] name = "Lean_tutorial" roots = ["extraction.Lean_tutorial"] [[require]] name = "Hax" path = "../../../../hax-lib/proof-libs/lean" ================================================ FILE: examples/lean_tutorial/proofs/lean/lean-toolchain ================================================ leanprover/lean4:v4.29.0-rc1 ================================================ FILE: examples/lean_tutorial/src/lib.rs ================================================ #[hax_lib::requires(x < 16)] #[hax_lib::ensures(|res| res >= x)] #[hax_lib::lean::proof("by unfold square; hax_bv_decide")] fn square(x: u8) -> u8 { x * x } ================================================ FILE: examples/limited-order-book/Cargo.toml ================================================ [package] name = "lob_backend" version = "0.1.0" edition = "2021" # See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html [lib] crate-type = ["cdylib", "lib"] [dependencies] candid = "0.9.6" ic-cdk = "0.10.0" ic-cdk-macros = "0.8.1" hax-lib.workspace = true serde = { version = "1.0" } ================================================ FILE: examples/limited-order-book/Makefile ================================================ .PHONY: default default: make -C proofs/fstar/extraction clean: rm -f proofs/fstar/extraction/.depend rm -f proofs/fstar/extraction/*.fst ================================================ FILE: examples/limited-order-book/README.md ================================================ This crate comes from https://github.com/oggy-dfin/lob ================================================ FILE: examples/limited-order-book/lob_backend.did ================================================ type GetBookResult = record { asks : vec Order; bids : vec Order }; type Match = record { ask_id : nat64; quantity : nat64; price : nat64; bid_id : nat64; }; type Order = record { id : nat64; side : Side; quantity : nat64; price : nat64; }; type Side = variant { Buy; Sell }; service : (opt principal) -> { add_order : (Order) -> (vec Match); get_book : () -> (GetBookResult) query; } ================================================ FILE: examples/limited-order-book/proofs/coq/extraction/Lob_backend.v ================================================ (* File automatically generated by Hacspec *) From Hacspec Require Import Hacspec_Lib MachineIntegers. From Coq Require Import ZArith. Import List.ListNotations. Open Scope Z_scope. Open Scope bool_scope. Inductive t_Side : Type := | Side_Buyt_Side | Side_Sellt_Side. Record t_Order : Type :={ f_quantity : int64; f_price : int64; f_side : t_Side_t; f_id : int64; }. Record t_Match : Type :={ f_quantity : int64; f_price : int64; f_ask_id : int64; f_bid_id : int64; }. Definition is_match (order : t_Order_t) (other : t_Order_t) : bool := andb (andb (andb ((f_quantity order)>.?(@repr WORDSIZE64 0)) ((f_quantity other)>.?(@repr WORDSIZE64 0))) ((f_side order)<>(f_side other))) (orb (andb ((f_side order)=.?Side_Buyt_Side_t) ((f_price order)>=.?(f_price other))) (andb ((f_side order)=.?Side_Sellt_Side_t) ((f_price order)<=.?(f_price other)))). Definition impl__Order__try_match (self : t_Order_t) (other : t_Order_t) : t_Option_t t_Match_t := if is_match self other then let quantity := (min (f_quantity self) (f_quantity other)) : int64 in let '(bid_id,ask_id) := (if (f_side self)=.?Side_Buyt_Side_t then (f_id self,f_id other) else (f_id other,f_id self)) : (int64 × int64) in Option_Some (Build_Match bid_idask_id(f_price self)quantity) else Option_Nonet_Option_t t_Match_t. Definition process_order (order : t_Order_t) (other_side : t_BinaryHeap_t T) : (t_BinaryHeap_t T × (t_Vec_t (t_Match_t) (t_Global_t) × t_Option_t t_Order_t)) := let matches := (impl__new) : t_Vec_t (t_Match_t) (t_Global_t) in let done := (false) : bool in let '(done,matches,order,other_side) := (f_fold (f_into_iter (Build_Range (@repr WORDSIZE32 1)(impl_10__len other_side))) (done,matches,order,other_side) (fun '(done,matches,order,other_side) _i => if not done then match impl__and_then (impl_10__peek other_side) (fun other => impl__Order__try_match (f_into (f_clone other)) order) with | Option_Some m => let order := (Build_t_Order ((f_quantity order).-(f_quantity m))) : t_Order_t in let '(tmp0,out) := (impl_9__pop other_side) : (t_BinaryHeap_t T × t_Option_t T) in let other_side := (tmp0) : t_BinaryHeap_t T in let hoist1 := (out) : t_Option_t T in let hoist2 := (impl__unwrap hoist1) : T in let other := (f_into hoist2) : t_Order_t in let other := (Build_t_Order ((f_quantity other).-(f_quantity m))) : t_Order_t in let other_side := (if (f_quantity other)>.?(@repr WORDSIZE64 0) then let other_side := (impl_9__push other_side (f_from (f_clone other))) : t_BinaryHeap_t T in other_side else other_side) : t_BinaryHeap_t T in let matches := (impl_1__push matches m) : t_Vec_t (t_Match_t) (t_Global_t) in (done,matches,order,other_side) | _ => let done := (true) : bool in (done,matches,order,other_side) end else (done,matches,order,other_side))) : (bool × t_Vec_t (t_Match_t) (t_Global_t) × t_Order_t × t_BinaryHeap_t T) in let output := ((matches,if (f_quantity order)>.?(@repr WORDSIZE64 0) then Option_Some order else Option_Nonet_Option_t t_Order_t)) : (t_Vec_t (t_Match_t) (t_Global_t) × t_Option_t t_Order_t) in (other_side,output). ================================================ FILE: examples/limited-order-book/proofs/fstar/extraction/Lob_backend.fst ================================================ module Lob_backend #set-options "--fuel 0 --ifuel 1 --z3rlimit 15" open FStar.Mul open Core_models type t_Side = | Side_Buy : t_Side | Side_Sell : t_Side [@@ FStar.Tactics.Typeclasses.tcinstance] assume val impl_7': Core_models.Cmp.t_PartialEq t_Side t_Side unfold let impl_7 = impl_7' type t_Order = { f_id:u64; f_side:t_Side; f_price:u64; f_quantity:u64 } let impl_14: Core_models.Clone.t_Clone t_Order = { f_clone = (fun x -> x); f_clone_pre = (fun _ -> True); f_clone_post = (fun _ _ -> True) } type t_Match = { f_bid_id:u64; f_ask_id:u64; f_price:u64; f_quantity:u64 } let is_match (order other: t_Order) : bool = order.f_quantity >. mk_u64 0 && other.f_quantity >. mk_u64 0 && order.f_side <>. other.f_side && (order.f_side =. (Side_Buy <: t_Side) && order.f_price >=. other.f_price || order.f_side =. (Side_Sell <: t_Side) && order.f_price <=. other.f_price) let impl_Order__try_match (self other: t_Order) : Core_models.Option.t_Option t_Match = if is_match self other then let quantity:u64 = Core_models.Cmp.min #u64 self.f_quantity other.f_quantity in let bid_id, ask_id:(u64 & u64) = if self.f_side =. (Side_Buy <: t_Side) then self.f_id, other.f_id <: (u64 & u64) else other.f_id, self.f_id <: (u64 & u64) in Core_models.Option.Option_Some ({ f_bid_id = bid_id; f_ask_id = ask_id; f_price = self.f_price; f_quantity = quantity } <: t_Match) <: Core_models.Option.t_Option t_Match else Core_models.Option.Option_None <: Core_models.Option.t_Option t_Match let process_order (#v_T: Type0) (#[FStar.Tactics.Typeclasses.tcresolve ()] i0: Core_models.Convert.t_Into v_T t_Order) (#[FStar.Tactics.Typeclasses.tcresolve ()] i1: Core_models.Convert.t_From v_T t_Order) (#[FStar.Tactics.Typeclasses.tcresolve ()] i2: Core_models.Cmp.t_Ord v_T) (#[FStar.Tactics.Typeclasses.tcresolve ()] i3: Core_models.Clone.t_Clone v_T) (order: t_Order) (other_side: Alloc.Collections.Binary_heap.t_BinaryHeap v_T Alloc.Alloc.t_Global) : (Alloc.Collections.Binary_heap.t_BinaryHeap v_T Alloc.Alloc.t_Global & (Alloc.Vec.t_Vec t_Match Alloc.Alloc.t_Global & Core_models.Option.t_Option t_Order)) = let matches:Alloc.Vec.t_Vec t_Match Alloc.Alloc.t_Global = Alloc.Vec.impl__new #t_Match () in let done:bool = false in let done, matches, order, other_side:(bool & Alloc.Vec.t_Vec t_Match Alloc.Alloc.t_Global & t_Order & Alloc.Collections.Binary_heap.t_BinaryHeap v_T Alloc.Alloc.t_Global) = Rust_primitives.Hax.Folds.fold_range (mk_usize 1) (Alloc.Collections.Binary_heap.impl_11__len #v_T #Alloc.Alloc.t_Global other_side <: usize) (fun temp_0_ temp_1_ -> let done, matches, order, other_side:(bool & Alloc.Vec.t_Vec t_Match Alloc.Alloc.t_Global & t_Order & Alloc.Collections.Binary_heap.t_BinaryHeap v_T Alloc.Alloc.t_Global) = temp_0_ in let _:usize = temp_1_ in true) (done, matches, order, other_side <: (bool & Alloc.Vec.t_Vec t_Match Alloc.Alloc.t_Global & t_Order & Alloc.Collections.Binary_heap.t_BinaryHeap v_T Alloc.Alloc.t_Global)) (fun temp_0_ e_i -> let done, matches, order, other_side:(bool & Alloc.Vec.t_Vec t_Match Alloc.Alloc.t_Global & t_Order & Alloc.Collections.Binary_heap.t_BinaryHeap v_T Alloc.Alloc.t_Global) = temp_0_ in let e_i:usize = e_i in if ~.done <: bool then match Core_models.Option.impl__and_then #v_T #t_Match (Alloc.Collections.Binary_heap.impl_11__peek #v_T #Alloc.Alloc.t_Global other_side <: Core_models.Option.t_Option v_T) (fun other -> let other:v_T = other in impl_Order__try_match (Core_models.Convert.f_into #v_T #t_Order #FStar.Tactics.Typeclasses.solve (Core_models.Clone.f_clone #v_T #FStar.Tactics.Typeclasses.solve other <: v_T) <: t_Order) order <: Core_models.Option.t_Option t_Match) <: Core_models.Option.t_Option t_Match with | Core_models.Option.Option_Some m -> let order:t_Order = { order with f_quantity = order.f_quantity -! m.f_quantity } <: t_Order in let tmp0, out:(Alloc.Collections.Binary_heap.t_BinaryHeap v_T Alloc.Alloc.t_Global & Core_models.Option.t_Option v_T) = Alloc.Collections.Binary_heap.impl_10__pop #v_T #Alloc.Alloc.t_Global other_side in let other_side:Alloc.Collections.Binary_heap.t_BinaryHeap v_T Alloc.Alloc.t_Global = tmp0 in let (other: t_Order):t_Order = Core_models.Convert.f_into #v_T #t_Order #FStar.Tactics.Typeclasses.solve (Core_models.Option.impl__unwrap #v_T out <: v_T) in let other:t_Order = { other with f_quantity = other.f_quantity -! m.f_quantity } <: t_Order in let other_side:Alloc.Collections.Binary_heap.t_BinaryHeap v_T Alloc.Alloc.t_Global = if other.f_quantity >. mk_u64 0 then let _:Prims.unit = Hax_lib.v_assume (b2t ((Alloc.Collections.Binary_heap.impl_11__len #v_T #Alloc.Alloc.t_Global other_side <: usize) <. Core_models.Num.impl_usize__MAX <: bool)) in let other_side:Alloc.Collections.Binary_heap.t_BinaryHeap v_T Alloc.Alloc.t_Global = Alloc.Collections.Binary_heap.impl_10__push #v_T #Alloc.Alloc.t_Global other_side (Core_models.Convert.f_from #v_T #t_Order #FStar.Tactics.Typeclasses.solve (Core_models.Clone.f_clone #t_Order #FStar.Tactics.Typeclasses.solve other <: t_Order) <: v_T) in other_side else other_side in let _:Prims.unit = Hax_lib.v_assume (b2t ((Alloc.Vec.impl_1__len #t_Match #Alloc.Alloc.t_Global matches <: usize) <. Core_models.Num.impl_usize__MAX <: bool)) in let matches:Alloc.Vec.t_Vec t_Match Alloc.Alloc.t_Global = Alloc.Vec.impl_1__push #t_Match #Alloc.Alloc.t_Global matches m in done, matches, order, other_side <: (bool & Alloc.Vec.t_Vec t_Match Alloc.Alloc.t_Global & t_Order & Alloc.Collections.Binary_heap.t_BinaryHeap v_T Alloc.Alloc.t_Global) | _ -> let done:bool = true in done, matches, order, other_side <: (bool & Alloc.Vec.t_Vec t_Match Alloc.Alloc.t_Global & t_Order & Alloc.Collections.Binary_heap.t_BinaryHeap v_T Alloc.Alloc.t_Global) else done, matches, order, other_side <: (bool & Alloc.Vec.t_Vec t_Match Alloc.Alloc.t_Global & t_Order & Alloc.Collections.Binary_heap.t_BinaryHeap v_T Alloc.Alloc.t_Global)) in let hax_temp_output:(Alloc.Vec.t_Vec t_Match Alloc.Alloc.t_Global & Core_models.Option.t_Option t_Order) = matches, (if order.f_quantity >. mk_u64 0 then Core_models.Option.Option_Some order <: Core_models.Option.t_Option t_Order else Core_models.Option.Option_None <: Core_models.Option.t_Option t_Order) <: (Alloc.Vec.t_Vec t_Match Alloc.Alloc.t_Global & Core_models.Option.t_Option t_Order) in other_side, hax_temp_output <: (Alloc.Collections.Binary_heap.t_BinaryHeap v_T Alloc.Alloc.t_Global & (Alloc.Vec.t_Vec t_Match Alloc.Alloc.t_Global & Core_models.Option.t_Option t_Order)) ================================================ FILE: examples/limited-order-book/proofs/fstar/extraction/Makefile ================================================ # This is a generically useful Makefile for F* that is self-contained # # It is tempting to factor this out into multiple Makefiles but that # makes it less portable, so resist temptation, or move to a more # sophisticated build system. # # We expect: # 1. `fstar.exe` to be in PATH (alternatively, you can also set # $FSTAR_HOME to be set to your F* repo/install directory) # # 2. `cargo`, `rustup`, `hax` and `jq` to be installed and in PATH. # # 3. the extracted Cargo crate to have "hax-lib" as a dependency: # `hax-lib = { version = "0.1.0-pre.1", git = "https://github.com/hacspec/hax"}` # # Optionally, you can set `HACL_HOME`. # # ROOTS contains all the top-level F* files you wish to verify # The default target `verify` verified ROOTS and its dependencies # To lax-check instead, set `OTHERFLAGS="--lax"` on the command-line # # To make F* emacs mode use the settings in this file, you need to # add the following lines to your .emacs # # (setq-default fstar-executable "/bin/fstar.exe") # (setq-default fstar-smt-executable "/bin/z3") # # (defun my-fstar-compute-prover-args-using-make () # "Construct arguments to pass to F* by calling make." # (with-demoted-errors "Error when constructing arg string: %S" # (let* ((fname (file-name-nondirectory buffer-file-name)) # (target (concat fname "-in")) # (argstr (car (process-lines "make" "--quiet" target)))) # (split-string argstr)))) # (setq fstar-subp-prover-args #'my-fstar-compute-prover-args-using-make) # HACL_HOME ?= $(HOME)/.hax/hacl_home FSTAR_BIN ?= $(shell command -v fstar.exe 1>&2 2> /dev/null && echo "fstar.exe" || echo "$(FSTAR_HOME)/bin/fstar.exe") CACHE_DIR ?= .cache HINT_DIR ?= .hints SHELL ?= /usr/bin/env bash EXECUTABLES = cargo cargo-hax jq K := $(foreach bin,$(EXECUTABLES),\ $(if $(shell command -v $(bin) 2> /dev/null),,$(error "No $(bin) in PATH"))) .PHONY: all verify clean all: rm -f .depend && $(MAKE) .depend $(MAKE) verify HAX_CLI = "cargo hax into -i '-** +**::process_order' fstar" # If $HACL_HOME doesn't exist, clone it ${HACL_HOME}: mkdir -p "${HACL_HOME}" git clone --depth 1 https://github.com/hacl-star/hacl-star.git "${HACL_HOME}" # If no any F* file is detected, we run hax ifeq "$(wildcard *.fst *fsti)" "" $(shell $(SHELL) -c $(HAX_CLI)) endif # By default, we process all the files in the current directory ROOTS = $(wildcard *.fst *fsti) # Regenerate F* files via hax when Rust sources change $(ROOTS): $(shell find ../../../src -type f -name '*.rs') $(shell $(SHELL) -c $(HAX_CLI)) # The following is a bash script that discovers F* libraries define FINDLIBS # Prints a path if and only if it exists. Takes one argument: the # path. function print_if_exists() { if [ -d "$$1" ]; then echo "$$1" fi } # Asks Cargo all the dependencies for the current crate or workspace, # and extract all "root" directories for each. Takes zero argument. function dependencies() { cargo metadata --format-version 1 | jq -r '.packages | .[] | .manifest_path | split("/") | .[:-1] | join("/")' } # Find hax libraries *around* a given path. Takes one argument: the # path. function find_hax_libraries_at_path() { path="$$1" # if there is a `proofs/fstar/extraction` subfolder, then that's a # F* library print_if_exists "$$path/proofs/fstar/extraction" # Maybe the `proof-libs` folder of hax is around? MAYBE_PROOF_LIBS=$$(realpath -q "$$path/../proof-libs/fstar") if [ $$? -eq 0 ]; then print_if_exists "$$MAYBE_PROOF_LIBS/core" print_if_exists "$$MAYBE_PROOF_LIBS/rust_primitives" fi } { while IFS= read path; do find_hax_libraries_at_path "$$path" done < <(dependencies) } | sort -u endef export FINDLIBS FSTAR_INCLUDE_DIRS = $(HACL_HOME)/lib $(shell bash -c "$$FINDLIBS") FSTAR_FLAGS = --cmi \ --warn_error -331 \ --cache_checked_modules --cache_dir $(CACHE_DIR) \ --already_cached "+Prims+FStar+LowStar+C+Spec.Loops+TestLib" \ $(addprefix --include ,$(FSTAR_INCLUDE_DIRS)) FSTAR = $(FSTAR_BIN) $(FSTAR_FLAGS) .depend: $(HINT_DIR) $(CACHE_DIR) $(ROOTS) $(info $(ROOTS)) $(FSTAR) --cmi --dep full $(ROOTS) --extract '* -Prims -LowStar -FStar' > $@ include .depend $(HINT_DIR): mkdir -p $@ $(CACHE_DIR): mkdir -p $@ $(CACHE_DIR)/%.checked: | .depend $(HINT_DIR) $(CACHE_DIR) $(FSTAR) $(OTHERFLAGS) $< $(ENABLE_HINTS) --hint_file $(HINT_DIR)/$(notdir $*).hints verify: $(addsuffix .checked, $(addprefix $(CACHE_DIR)/,$(ROOTS))) # Targets for interactive mode %.fst-in: $(info $(FSTAR_FLAGS) \ $(ENABLE_HINTS) --hint_file $(HINT_DIR)/$(basename $@).fst.hints) %.fsti-in: $(info $(FSTAR_FLAGS) \ $(ENABLE_HINTS) --hint_file $(HINT_DIR)/$(basename $@).fsti.hints) # Clean targets clean: rm -rf $(CACHE_DIR)/* rm *.fst ================================================ FILE: examples/limited-order-book/src/canister.rs ================================================ use candid::CandidType; use candid::Principal; use ic_cdk::caller; use ic_cdk_macros::{export_candid, init, query, update}; use std::cell::RefCell; use crate::{Match, Order, OrderBook}; thread_local! { static ORDER_ADMIN: RefCell> = RefCell::default(); static ORDER_BOOK: RefCell> = RefCell::default(); } #[init] fn init(order_admin: Option) { ORDER_ADMIN.with(|oa| { *oa.borrow_mut() = order_admin; }); ORDER_BOOK.with(|ob| { ob.borrow_mut().replace(OrderBook::new()); }); } #[update] pub fn add_order(order: Order) -> Vec { assert!(order.quantity > 0, "Order quantity must be positive"); ORDER_ADMIN.with(|oa| { let oa = oa.borrow(); oa.as_ref() .map(|admin| assert!(admin == &caller(), "Only order admin can add orders")); }); ORDER_BOOK.with(|ob| { ob.borrow_mut() .as_mut() .expect("Order book not initialized") .add_order(order) }) } #[derive(CandidType)] pub struct GetBookResult { pub bids: Vec, pub asks: Vec, } #[query] pub fn get_book() -> GetBookResult { ORDER_BOOK.with(|ob| { let ob = ob.borrow(); GetBookResult { bids: ob.as_ref().expect("Order book not initialized").list_bids(), asks: ob.as_ref().expect("Order book not initialized").list_asks(), } }) } export_candid!(); ================================================ FILE: examples/limited-order-book/src/lib.rs ================================================ use candid::{CandidType, Deserialize}; use std::{cmp::Reverse, collections::BinaryHeap}; pub type OrderId = u64; #[derive(PartialEq, Eq, Clone, CandidType, Deserialize)] pub enum Side { Buy, Sell, } pub type Price = u64; pub type Quantity = u64; #[derive(PartialEq, Eq, Clone, CandidType, Deserialize)] pub struct Order { pub id: OrderId, pub side: Side, pub price: Price, pub quantity: Quantity, } #[derive(CandidType, Deserialize)] pub struct Match { pub bid_id: OrderId, pub ask_id: OrderId, pub price: Price, pub quantity: Quantity, } fn is_match(order: &Order, other: &Order) -> bool { order.quantity > 0 && other.quantity > 0 && order.side != other.side && ((order.side == Side::Buy && order.price >= other.price) || (order.side == Side::Sell && order.price <= other.price)) } impl Order { pub fn try_match(&self, other: &Self) -> Option { if is_match(self, other) { let quantity = std::cmp::min(self.quantity, other.quantity); let (bid_id, ask_id) = if self.side == Side::Buy { (self.id, other.id) } else { (other.id, self.id) }; Some(Match { bid_id, ask_id, // If there's a match, we could use any price between the two orders. // Here we use self.price. price: self.price, quantity, }) } else { None } } } impl PartialOrd for Order { fn partial_cmp(&self, other: &Self) -> Option { Some(self.cmp(other)) } } impl Ord for Order { fn cmp(&self, other: &Self) -> std::cmp::Ordering { self.price.cmp(&other.price).then(self.id.cmp(&other.id)) } } impl From> for Order { fn from(other: Reverse) -> Self { other.0 } } impl From for Reverse { fn from(value: Order) -> Self { Self(value) } } pub struct OrderBook { bids: BinaryHeap, asks: BinaryHeap>, } impl OrderBook { pub fn new() -> Self { Self { bids: BinaryHeap::new(), asks: BinaryHeap::new(), } } /// Add an order to the order book; if it crosses with existing orders, return the match(es). /// Fill as much of the order as possible, and just keep the remainder on the order book. pub fn add_order(&mut self, order: Order) -> Vec { assert!(order.quantity > 0); assert!(order.price > 0); match order.side { Side::Buy => { let (matches, opt_remaining_bid) = process_order(order, &mut self.asks); if let Some(remaining_bid) = opt_remaining_bid { self.bids.push(remaining_bid); } matches } Side::Sell => { let (matches, opt_remaining_ask) = process_order(order, &mut self.bids); if let Some(remaining_ask) = opt_remaining_ask { self.asks.push(Reverse(remaining_ask)); } matches } } } pub fn list_bids(&self) -> Vec { self.bids.iter().cloned().collect() } pub fn list_asks(&self) -> Vec { self.asks .iter() .cloned() .map(|Reverse(order)| order) .collect() } } fn process_order(mut order: Order, other_side: &mut BinaryHeap) -> (Vec, Option) where T: Into + From + Ord + Clone, { let mut matches = Vec::new(); let mut done = false; for _i in 1..other_side.len() { if !done { if let Some(m) = other_side .peek() .and_then(|other| Into::into(other.clone()).try_match(&order)) { // Goal 1: prove `order.quantity` does not underflow order.quantity -= m.quantity; // Goal 2: prove this `unwrap()` does not panic let mut other: Order = Into::into(other_side.pop().unwrap()); // Goal 3: prove `other.quantity` does not underflow other.quantity -= m.quantity; if other.quantity > 0 { hax_lib::assume!(other_side.len() < usize::MAX); other_side.push(From::from(other.clone())); } hax_lib::assume!(matches.len() < usize::MAX); matches.push(m); } else { done = true; } } } ( matches, if order.quantity > 0 { Some(order) } else { None }, ) } pub mod canister; ================================================ FILE: examples/proverif-psk/Cargo.toml ================================================ [package] name = "proverif-psk" version = "0.1.0" edition = "2021" # See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html [dependencies] hax-lib.workspace = true libcrux = "=0.0.2-pre.2" [dev-dependencies] rand = { version = "0.8" } ================================================ FILE: examples/proverif-psk/Makefile ================================================ check: ./proofs/proverif/extraction/lib.pvl timeout 30 proverif -lib ./proofs/proverif/extraction/lib.pvl ./proofs/proverif/analysis.pv proofs/proverif/extraction/lib.pvl: cargo hax into pro-verif clean: rm -f proofs/proverif/extraction/lib.pvl ================================================ FILE: examples/proverif-psk/Readme.md ================================================ # A hax ProVerif example This crate demonstrates an example of ProVerif extraction using hax. The crate provides functions for implementing a simplistic pre-shared-key (PSK) based protocol between an initiator and receiver, which is defined as follows: ``` Initiator(psk: AEADKey): let response_key = AEAD.KeyGen() let message = AEAD.Encrypt(psk, response_key) Initiator -> Responder: message Responder(psk: AEADKey, payload: &[u8]): let response_key = AEAD.Decrypt(psk, message) let response = AEAD.Encrypt(response_key, payload) Responder -> Initiator: response Initiator(response_key, response): let output = AEAD.Decrypt(response_key, response) return output ``` The crate does not implement message transport, only the initiator and responder protocol logic. A handwritten ProVerif model of this protocol is included in `psk.pv` for comparison. ### On the use of `proverif::replace()` Since ProVerif operates in a symbolic world, certain operations have to be represented abstractly, in in symbolic terms. In this case, we give symbolic replacements for serialization and deserialization, as well as cryptographic operations such as encryption and decryption. They are thus treated as ideal implementations of their respective functionality in ProVerif's analysis of the protocol. To obtain assurance that these operations are correct and implemented securely, one of hax' other backends can be used. ## Extracting into ProVerif To obtain a ProVerif model of the protocol logic functions, run ``` cargo hax into pro-verif ``` This will generate a file `./proofs/proverif/extraction/lib.pvl`. ## Running a Basic Analysis on the Model We have provided a handwritten file `./proofs/proverif/extraction/analysis.pv`, which models the protocol using the extracted functions in `lib.pvl` and uses ProVerif to verify - that initiator and receiver can both complete the protocol, as well as - confidentiality of the pre-shared key and the protocol payload To let ProVerif perform the analysis, from the crate root, run: ``` proverif -lib ./proofs/proverif/extraction/lib.pvl ./proofs/proverif/extraction/analysis.pv ``` The expected final output is ``` -------------------------------------------------------------- Verification summary: Query not event(InitiatorFinished(initiator_result)) is false. Query not event(ResponderFinished(responder_result)) is false. Query not attacker(PSK[]) is true. Query not attacker(SECRET_PAYLOAD[]) is true. -------------------------------------------------------------- ``` ================================================ FILE: examples/proverif-psk/proofs/proverif/analysis.pv ================================================ (*****************************************) (* Top-level processes *) (*****************************************) event InitiatorFinished(bitstring). event ResponderFinished(bitstring). free PSK: proverif_psk__t_KeyIv [private]. free SECRET_PAYLOAD: bitstring [private]. query initiator_result: bitstring; event(InitiatorFinished(initiator_result)). query responder_result: bitstring; event(ResponderFinished(responder_result)). query attacker(PSK). query attacker(SECRET_PAYLOAD). let Initiator(psk: proverif_psk__t_KeyIv) = new ikm: bitstring; let (initiator_message: proverif_psk__t_Message, response_key: proverif_psk__t_KeyIv) = proverif_psk__initiate(ikm, psk) in out(c, initiator_message); in(c, response_message: proverif_psk__t_Message); let response = proverif_psk__finish(response_message, response_key) in event InitiatorFinished(response). let Responder(psk: proverif_psk__t_KeyIv, payload: bitstring) = in(c, initiator_message: proverif_psk__t_Message); let response_message = proverif_psk__respond( psk, payload, initiator_message ) in event ResponderFinished(payload); out(c, response_message). process Initiator(PSK) | Responder(PSK, SECRET_PAYLOAD) ================================================ FILE: examples/proverif-psk/psk.pv ================================================ free c: channel. type key. fun senc(bitstring, key): bitstring. reduc forall m: bitstring, k: key; sdec(senc(m,k), k) = m. fun key_to_bitstring(key): bitstring. reduc forall k: key; bitstring_to_key(key_to_bitstring(k)) = k. event InitiatorFinished(bitstring). event ResponderFinished(bitstring). free PSK: key [private]. free SECRET_PAYLOAD: bitstring [private]. query initiator_result: bitstring; event(InitiatorFinished(initiator_result)). query responder_result: bitstring; event(ResponderFinished(responder_result)). query attacker(PSK). query attacker(SECRET_PAYLOAD). let Initiator(psk: key) = new response_key: key; let initiator_message = senc(key_to_bitstring(response_key), psk) in out(c, initiator_message); in(c, response_message: bitstring); let response = sdec(response_message, response_key) in event InitiatorFinished(response). let Responder(psk: key, payload: bitstring) = in(c, initiator_message: bitstring); let response_key = sdec(initiator_message, psk) in let response_message = senc(payload, bitstring_to_key(response_key)) in event ResponderFinished(payload); out(c, response_message). process Initiator(PSK) | Responder(PSK, SECRET_PAYLOAD) ================================================ FILE: examples/proverif-psk/pv_div_by_zero_fix.diff ================================================ diff proverif2.05/src/display.ml proverif2.05/src/display.ml index c43785ec..2763d907 100644 --- proverif/src/display.ml +++ proverif/src/display.ml @@ -49,7 +49,7 @@ let dynamic_display str = then display_whitespace (!record_cursor_line - size); (* If we cannot determine the number of columns, we just assume that the statistics will fit on one line (the statistics will not be active by default) *) - let lines = if columns = -1 then 0 else ((max (!record_cursor_line) size) - 1) / columns in + let lines = if columns <= 0 then 0 else ((max (!record_cursor_line) size) - 1) / columns in (* Go to the beginning of the line *) print_string "\r"; if lines > 0 then ================================================ FILE: examples/proverif-psk/src/lib.rs ================================================ use hax_lib as hax; use libcrux::aead::{self, Algorithm}; const AEAD_KEY_NONCE: usize = Algorithm::key_size(Algorithm::Chacha20Poly1305) + Algorithm::nonce_size(Algorithm::Chacha20Poly1305); const AEAD_KEY_LENGTH: usize = Algorithm::key_size(Algorithm::Chacha20Poly1305); const EMPTY_AAD: &[u8; 0] = b""; const RESPONSE_KEY_CONTEXT: &[u8; 12] = b"response-key"; /* Type definitions */ #[derive(Debug)] pub enum Error { CryptoError, OtherError, } impl From for Error { fn from(_value: libcrux::aead::Error) -> Error { Error::CryptoError } } impl From for Error { fn from(_value: libcrux::hkdf::Error) -> Error { Error::CryptoError } } impl From for Error { fn from(_value: std::array::TryFromSliceError) -> Error { Error::OtherError } } #[hax::opaque] pub struct Message(aead::Tag, Vec); #[hax::opaque] pub struct KeyIv(libcrux::aead::Key, libcrux::aead::Iv); /* Wire formats */ #[hax::pv_constructor] fn serialize_key_iv(key_iv: &KeyIv) -> Vec { let mut result = Vec::new(); result.extend_from_slice(key_iv.1 .0.as_ref()); match &key_iv.0 { aead::Key::Chacha20Poly1305(k) => result.extend_from_slice(k.0.as_ref()), _ => unimplemented!(), } result } #[hax::proverif::replace( "reduc forall k: $:{KeyIv}; ${deserialize_key_iv}(${serialize_key_iv}(k)) = k." )] fn deserialize_key_iv(bytes: &[u8]) -> Result { let iv = aead::Iv::new(&bytes[..12])?; let key = aead::Key::from_slice(Algorithm::Chacha20Poly1305, &bytes[12..])?; Ok(KeyIv(key, iv)) } /* Cryptographic functions */ #[hax::pv_constructor] fn derive_key_iv(ikm: &[u8], info: &[u8]) -> Result { let key_iv_bytes = libcrux::hkdf::expand(libcrux::hkdf::Algorithm::Sha256, ikm, info, AEAD_KEY_NONCE)?; let (key_bytes, iv_bytes) = key_iv_bytes.split_at(AEAD_KEY_LENGTH); let key = libcrux::aead::Key::from_slice(libcrux::aead::Algorithm::Chacha20Poly1305, key_bytes)?; let iv = libcrux::aead::Iv(iv_bytes.try_into()?); Ok(KeyIv(key, iv)) } #[hax::proverif::replace("fun ${encrypt} ($:{KeyIv}, bitstring): $:{Message}.")] pub fn encrypt(key_iv: &KeyIv, message: &[u8]) -> Result { let (tag, ctxt) = libcrux::aead::encrypt_detached(&key_iv.0, message, aead::Iv(key_iv.1 .0), EMPTY_AAD)?; Ok(Message(tag, ctxt)) } #[hax::proverif::replace( "reduc forall m: bitstring, k: $:{KeyIv}; ${decrypt}(k, ${encrypt}(k, m)) = m." )] fn decrypt(key_iv: &KeyIv, message: Message) -> Result, Error> { libcrux::aead::decrypt_detached( &key_iv.0, message.1, aead::Iv(key_iv.1 .0), EMPTY_AAD, &message.0, ) .map_err(|_| Error::CryptoError) } /* Protocol */ pub fn initiate(ikm: &[u8], psk: &KeyIv) -> Result<(Message, KeyIv), Error> { let response_key_iv = derive_key_iv(ikm, RESPONSE_KEY_CONTEXT)?; let serialized_responder_key = serialize_key_iv(&response_key_iv); let initiator_message = encrypt(psk, &serialized_responder_key)?; Ok((initiator_message, response_key_iv)) } pub fn respond(psk: &KeyIv, payload: &[u8], message: Message) -> Result { let response_key_bytes = decrypt(psk, message)?; let response_key_iv = deserialize_key_iv(&response_key_bytes)?; let responder_message = encrypt(&response_key_iv, payload)?; Ok(responder_message) } pub fn finish(message: Message, response_key_iv: &KeyIv) -> Result, Error> { let response_bytes = decrypt(response_key_iv, message)?; Ok(response_bytes) } #[cfg(test)] mod tests { use super::*; #[test] fn it_works() { use rand::{rngs::OsRng, RngCore}; fn random_array() -> [u8; L] { let mut rng = OsRng; let mut seed = [0; L]; rng.try_fill_bytes(&mut seed).unwrap(); seed } let payload = b"SECRET"; let ikm_psk = random_array::<32>(); let ikm_responder_key = random_array::<32>(); let psk = derive_key_iv(&ikm_psk, b"pre-shared-key") .map_err(|_| Error::CryptoError) .unwrap(); let (initiator_message, response_key) = initiate(&ikm_responder_key, &psk).unwrap(); let responder_message = respond(&psk, payload, initiator_message).unwrap(); let initiator_finish = finish(responder_message, &response_key).unwrap(); assert_eq!(payload.to_vec(), initiator_finish); } } ================================================ FILE: examples/sha256/.gitignore ================================================ target/ Cargo.lock ================================================ FILE: examples/sha256/Cargo.toml ================================================ [package] name = "sha256" version = "0.1.0" authors = ["Franziskus Kiefer "] edition = "2021" [lib] path = "src/sha256.rs" [dependencies] hax-lib.workspace = true ================================================ FILE: examples/sha256/Makefile ================================================ .PHONY: default default: make -C proofs/fstar/extraction clean: rm -f proofs/fstar/extraction/.depend rm -f proofs/fstar/extraction/*.fst ================================================ FILE: examples/sha256/proofs/coq/extraction/Sha256.v ================================================ (* File automatically generated by Hacspec *) From Hacspec Require Import Hacspec_Lib MachineIntegers. From Coq Require Import ZArith. Import List.ListNotations. Open Scope Z_scope. Open Scope bool_scope. (*Not implemented yet? todo(item)*) Require Import Hax_lib_macros. (* as hax *) Require Import Std. (* as TryInto *) Definition v_BLOCK_SIZE : uint_size := (@repr WORDSIZE32 64). Definition v_LEN_SIZE : uint_size := (@repr WORDSIZE32 8). Definition v_K_SIZE : uint_size := (@repr WORDSIZE32 64). Definition v_HASH_SIZE : uint_size := (@repr WORDSIZE32 256)./(@repr WORDSIZE32 8). Notation t_Block_t := (nseq int8 TODO: Int.to_string length). Notation t_OpTableType_t := (nseq int8 TODO: Int.to_string length). Notation t_Sha256Digest_t := (nseq int8 TODO: Int.to_string length). Notation t_RoundConstantsTable_t := (nseq int32 TODO: Int.to_string length). Notation t_Hash_t := (nseq int32 TODO: Int.to_string length). Definition ch (x : int32) (y : int32) (z : int32) : int32 := (x.&y).^((not x).&z). Definition maj (x : int32) (y : int32) (z : int32) : int32 := (x.&y).^((x.&z).^(y.&z)). Definition v_OP_TABLE : nseq int8 TODO: Int.to_string length := array_from_list [(@repr WORDSIZE8 2); (@repr WORDSIZE8 13); (@repr WORDSIZE8 22); (@repr WORDSIZE8 6); (@repr WORDSIZE8 11); (@repr WORDSIZE8 25); (@repr WORDSIZE8 7); (@repr WORDSIZE8 18); (@repr WORDSIZE8 3); (@repr WORDSIZE8 17); (@repr WORDSIZE8 19); (@repr WORDSIZE8 10)]. Definition v_K_TABLE : nseq int32 TODO: Int.to_string length := array_from_list [(@repr WORDSIZE32 1116352408); (@repr WORDSIZE32 1899447441); (@repr WORDSIZE32 3049323471); (@repr WORDSIZE32 3921009573); (@repr WORDSIZE32 961987163); (@repr WORDSIZE32 1508970993); (@repr WORDSIZE32 2453635748); (@repr WORDSIZE32 2870763221); (@repr WORDSIZE32 3624381080); (@repr WORDSIZE32 310598401); (@repr WORDSIZE32 607225278); (@repr WORDSIZE32 1426881987); (@repr WORDSIZE32 1925078388); (@repr WORDSIZE32 2162078206); (@repr WORDSIZE32 2614888103); (@repr WORDSIZE32 3248222580); (@repr WORDSIZE32 3835390401); (@repr WORDSIZE32 4022224774); (@repr WORDSIZE32 264347078); (@repr WORDSIZE32 604807628); (@repr WORDSIZE32 770255983); (@repr WORDSIZE32 1249150122); (@repr WORDSIZE32 1555081692); (@repr WORDSIZE32 1996064986); (@repr WORDSIZE32 2554220882); (@repr WORDSIZE32 2821834349); (@repr WORDSIZE32 2952996808); (@repr WORDSIZE32 3210313671); (@repr WORDSIZE32 3336571891); (@repr WORDSIZE32 3584528711); (@repr WORDSIZE32 113926993); (@repr WORDSIZE32 338241895); (@repr WORDSIZE32 666307205); (@repr WORDSIZE32 773529912); (@repr WORDSIZE32 1294757372); (@repr WORDSIZE32 1396182291); (@repr WORDSIZE32 1695183700); (@repr WORDSIZE32 1986661051); (@repr WORDSIZE32 2177026350); (@repr WORDSIZE32 2456956037); (@repr WORDSIZE32 2730485921); (@repr WORDSIZE32 2820302411); (@repr WORDSIZE32 3259730800); (@repr WORDSIZE32 3345764771); (@repr WORDSIZE32 3516065817); (@repr WORDSIZE32 3600352804); (@repr WORDSIZE32 4094571909); (@repr WORDSIZE32 275423344); (@repr WORDSIZE32 430227734); (@repr WORDSIZE32 506948616); (@repr WORDSIZE32 659060556); (@repr WORDSIZE32 883997877); (@repr WORDSIZE32 958139571); (@repr WORDSIZE32 1322822218); (@repr WORDSIZE32 1537002063); (@repr WORDSIZE32 1747873779); (@repr WORDSIZE32 1955562222); (@repr WORDSIZE32 2024104815); (@repr WORDSIZE32 2227730452); (@repr WORDSIZE32 2361852424); (@repr WORDSIZE32 2428436474); (@repr WORDSIZE32 2756734187); (@repr WORDSIZE32 3204031479); (@repr WORDSIZE32 3329325298)]. Definition v_HASH_INIT : nseq int32 TODO: Int.to_string length := array_from_list [(@repr WORDSIZE32 1779033703); (@repr WORDSIZE32 3144134277); (@repr WORDSIZE32 1013904242); (@repr WORDSIZE32 2773480762); (@repr WORDSIZE32 1359893119); (@repr WORDSIZE32 2600822924); (@repr WORDSIZE32 528734635); (@repr WORDSIZE32 1541459225)]. Definition sigma (x : int32) (i : uint_size) (op : uint_size) : int32 := let tmp := (impl__u32__rotate_right x (f_into (v_OP_TABLE.[(((@repr WORDSIZE32 3).*i).+(@repr WORDSIZE32 2))]))) : int32 in let tmp := (if op=.?(@repr WORDSIZE32 0) then x shift_right (v_OP_TABLE.[(((@repr WORDSIZE32 3).*i).+(@repr WORDSIZE32 2))]) else tmp) : int32 in let rot_val_1 := (f_into (v_OP_TABLE.[((@repr WORDSIZE32 3).*i)])) : int32 in let rot_val_2 := (f_into (v_OP_TABLE.[(((@repr WORDSIZE32 3).*i).+(@repr WORDSIZE32 1))])) : int32 in ((impl__u32__rotate_right x rot_val_1).^(impl__u32__rotate_right x rot_val_2)).^tmp. Definition to_be_u32s (block : nseq int8 TODO: Int.to_string length) : t_Vec_t (int32) (t_Global_t) := let out := (impl__with_capacity (v_BLOCK_SIZE./(@repr WORDSIZE32 4))) : t_Vec_t (int32) (t_Global_t) in let out := (f_fold (f_into_iter (impl__chunks_exact (unsize block) (@repr WORDSIZE32 4))) out (fun out block_chunk => let block_chunk_array := (impl__u32__from_be_bytes (impl__unwrap (f_try_into block_chunk))) : int32 in let out := (impl_1__push out block_chunk_array) : t_Vec_t (int32) (t_Global_t) in out)) : t_Vec_t (int32) (t_Global_t) in out. Definition schedule (block : nseq int8 TODO: Int.to_string length) : nseq int32 TODO: Int.to_string length := let b := (to_be_u32s block) : t_Vec_t (int32) (t_Global_t) in let s := (repeat (@repr WORDSIZE32 0) (@repr WORDSIZE32 64)) : nseq int32 TODO: Int.to_string length in let s := (f_fold (f_into_iter (Build_Range (@repr WORDSIZE32 0)v_K_SIZE)) s (fun s i => if i<.?(@repr WORDSIZE32 16) then let s := (update_at s i (b.[i])) : nseq int32 TODO: Int.to_string length in s else let t16 := (s.[(i.-(@repr WORDSIZE32 16))]) : int32 in let t15 := (s.[(i.-(@repr WORDSIZE32 15))]) : int32 in let t7 := (s.[(i.-(@repr WORDSIZE32 7))]) : int32 in let t2 := (s.[(i.-(@repr WORDSIZE32 2))]) : int32 in let s1 := (sigma t2 (@repr WORDSIZE32 3) (@repr WORDSIZE32 0)) : int32 in let s0 := (sigma t15 (@repr WORDSIZE32 2) (@repr WORDSIZE32 0)) : int32 in let s := (update_at s i (impl__u32__wrapping_add (impl__u32__wrapping_add (impl__u32__wrapping_add s1 t7) s0) t16)) : nseq int32 TODO: Int.to_string length in s)) : nseq int32 TODO: Int.to_string length in s. Definition shuffle (ws : nseq int32 TODO: Int.to_string length) (hashi : nseq int32 TODO: Int.to_string length) : nseq int32 TODO: Int.to_string length := let h := (hashi) : nseq int32 TODO: Int.to_string length in let h := (f_fold (f_into_iter (Build_Range (@repr WORDSIZE32 0)v_K_SIZE)) h (fun h i => let a0 := (h.[(@repr WORDSIZE32 0)]) : int32 in let b0 := (h.[(@repr WORDSIZE32 1)]) : int32 in let c0 := (h.[(@repr WORDSIZE32 2)]) : int32 in let d0 := (h.[(@repr WORDSIZE32 3)]) : int32 in let e0 := (h.[(@repr WORDSIZE32 4)]) : int32 in let f0 := (h.[(@repr WORDSIZE32 5)]) : int32 in let g0 := (h.[(@repr WORDSIZE32 6)]) : int32 in let h0 := (h.[(@repr WORDSIZE32 7)]) : int32 in let t1 := (impl__u32__wrapping_add (impl__u32__wrapping_add (impl__u32__wrapping_add (impl__u32__wrapping_add h0 (sigma e0 (@repr WORDSIZE32 1) (@repr WORDSIZE32 1))) (ch e0 f0 g0)) (v_K_TABLE.[i])) (ws.[i])) : int32 in let t2 := (impl__u32__wrapping_add (sigma a0 (@repr WORDSIZE32 0) (@repr WORDSIZE32 1)) (maj a0 b0 c0)) : int32 in let h := (update_at h (@repr WORDSIZE32 0) (impl__u32__wrapping_add t1 t2)) : nseq int32 TODO: Int.to_string length in let h := (update_at h (@repr WORDSIZE32 1) a0) : nseq int32 TODO: Int.to_string length in let h := (update_at h (@repr WORDSIZE32 2) b0) : nseq int32 TODO: Int.to_string length in let h := (update_at h (@repr WORDSIZE32 3) c0) : nseq int32 TODO: Int.to_string length in let h := (update_at h (@repr WORDSIZE32 4) (impl__u32__wrapping_add d0 t1)) : nseq int32 TODO: Int.to_string length in let h := (update_at h (@repr WORDSIZE32 5) e0) : nseq int32 TODO: Int.to_string length in let h := (update_at h (@repr WORDSIZE32 6) f0) : nseq int32 TODO: Int.to_string length in let h := (update_at h (@repr WORDSIZE32 7) g0) : nseq int32 TODO: Int.to_string length in h)) : nseq int32 TODO: Int.to_string length in h. Definition compress (block : nseq int8 TODO: Int.to_string length) (h_in : nseq int32 TODO: Int.to_string length) : nseq int32 TODO: Int.to_string length := let s := (schedule block) : nseq int32 TODO: Int.to_string length in let h := (shuffle s h_in) : nseq int32 TODO: Int.to_string length in let h := (f_fold (f_into_iter (Build_Range (@repr WORDSIZE32 0)(@repr WORDSIZE32 8))) h (fun h i => update_at h i (impl__u32__wrapping_add (h.[i]) (h_in.[i])))) : nseq int32 TODO: Int.to_string length in h. Definition u32s_to_be_bytes (state : nseq int32 TODO: Int.to_string length) : nseq int8 TODO: Int.to_string length := let out := (repeat (@repr WORDSIZE8 0) (@repr WORDSIZE32 32)) : nseq int8 TODO: Int.to_string length in let out := (f_fold (f_into_iter (Build_Range (@repr WORDSIZE32 0)v_LEN_SIZE)) out (fun out i => let tmp := (state.[i]) : int32 in let tmp := (impl__u32__to_be_bytes tmp) : nseq int8 TODO: Int.to_string length in f_fold (f_into_iter (Build_Range (@repr WORDSIZE32 0)(@repr WORDSIZE32 4))) out (fun out j => update_at out ((i.*(@repr WORDSIZE32 4)).+j) (tmp.[j])))) : nseq int8 TODO: Int.to_string length in out. Definition hash (msg : seq int8) : nseq int8 TODO: Int.to_string length := let h := (v_HASH_INIT) : nseq int32 TODO: Int.to_string length in let last_block := (repeat (@repr WORDSIZE8 0) (@repr WORDSIZE32 64)) : nseq int8 TODO: Int.to_string length in let last_block_len := ((@repr WORDSIZE32 0)) : uint_size in let '(h,last_block,last_block_len) := (f_fold (f_into_iter (impl__chunks msg v_BLOCK_SIZE)) (h,last_block,last_block_len) (fun '(h,last_block,last_block_len) block => if (impl__len block)<.?v_BLOCK_SIZE then let last_block := (f_fold (f_into_iter (Build_Range (@repr WORDSIZE32 0)(impl__len block))) last_block (fun last_block i => update_at last_block i (block.[i]))) : nseq int8 TODO: Int.to_string length in let last_block_len := (impl__len block) : uint_size in (h,last_block,last_block_len) else let h := (compress (impl__unwrap (f_try_into block)) h) : nseq int32 TODO: Int.to_string length in (h,last_block,last_block_len))) : (nseq int32 TODO: Int.to_string length × nseq int8 TODO: Int.to_string length × uint_size) in let last_block := (update_at last_block last_block_len (@repr WORDSIZE8 128)) : nseq int8 TODO: Int.to_string length in let len_bist := (cast ((impl__len msg).*(@repr WORDSIZE32 8))) : int64 in let len_bist_bytes := (impl__u64__to_be_bytes len_bist) : nseq int8 TODO: Int.to_string length in let '(h,last_block) := (if last_block_len<.?(v_BLOCK_SIZE.-v_LEN_SIZE) then let last_block := (f_fold (f_into_iter (Build_Range (@repr WORDSIZE32 0)v_LEN_SIZE)) last_block (fun last_block i => update_at last_block ((v_BLOCK_SIZE.-v_LEN_SIZE).+i) (len_bist_bytes.[i]))) : nseq int8 TODO: Int.to_string length in let h := (compress last_block h) : nseq int32 TODO: Int.to_string length in (h,last_block) else let pad_block := (repeat (@repr WORDSIZE8 0) (@repr WORDSIZE32 64)) : nseq int8 TODO: Int.to_string length in let pad_block := (f_fold (f_into_iter (Build_Range (@repr WORDSIZE32 0)v_LEN_SIZE)) pad_block (fun pad_block i => update_at pad_block ((v_BLOCK_SIZE.-v_LEN_SIZE).+i) (len_bist_bytes.[i]))) : nseq int8 TODO: Int.to_string length in let h := (compress last_block h) : nseq int32 TODO: Int.to_string length in let h := (compress pad_block h) : nseq int32 TODO: Int.to_string length in (h,last_block)) : (nseq int32 TODO: Int.to_string length × nseq int8 TODO: Int.to_string length) in u32s_to_be_bytes h. Definition sha256 (msg : seq int8) : nseq int8 TODO: Int.to_string length := hash msg. ================================================ FILE: examples/sha256/proofs/fstar/extraction/Makefile ================================================ # This is a generically useful Makefile for F* that is self-contained # # It is tempting to factor this out into multiple Makefiles but that # makes it less portable, so resist temptation, or move to a more # sophisticated build system. # # We expect: # 1. `fstar.exe` to be in PATH (alternatively, you can also set # $FSTAR_HOME to be set to your F* repo/install directory) # # 2. `cargo`, `rustup`, `hax` and `jq` to be installed and in PATH. # # 3. the extracted Cargo crate to have "hax-lib" as a dependency: # `hax-lib = { version = "0.1.0-pre.1", git = "https://github.com/hacspec/hax"}` # # Optionally, you can set `HACL_HOME`. # # ROOTS contains all the top-level F* files you wish to verify # The default target `verify` verified ROOTS and its dependencies # To lax-check instead, set `OTHERFLAGS="--lax"` on the command-line # # To make F* emacs mode use the settings in this file, you need to # add the following lines to your .emacs # # (setq-default fstar-executable "/bin/fstar.exe") # (setq-default fstar-smt-executable "/bin/z3") # # (defun my-fstar-compute-prover-args-using-make () # "Construct arguments to pass to F* by calling make." # (with-demoted-errors "Error when constructing arg string: %S" # (let* ((fname (file-name-nondirectory buffer-file-name)) # (target (concat fname "-in")) # (argstr (car (process-lines "make" "--quiet" target)))) # (split-string argstr)))) # (setq fstar-subp-prover-args #'my-fstar-compute-prover-args-using-make) # HACL_HOME ?= $(HOME)/.hax/hacl_home FSTAR_BIN ?= $(shell command -v fstar.exe 1>&2 2> /dev/null && echo "fstar.exe" || echo "$(FSTAR_HOME)/bin/fstar.exe") CACHE_DIR ?= .cache HINT_DIR ?= .hints SHELL ?= /usr/bin/env bash EXECUTABLES = cargo cargo-hax jq K := $(foreach bin,$(EXECUTABLES),\ $(if $(shell command -v $(bin) 2> /dev/null),,$(error "No $(bin) in PATH"))) .PHONY: all verify clean all: rm -f .depend && $(MAKE) .depend $(MAKE) verify # Default hax invocation HAX_CLI = "cargo hax into fstar" # If $HACL_HOME doesn't exist, clone it ${HACL_HOME}: mkdir -p "${HACL_HOME}" git clone --depth 1 https://github.com/hacl-star/hacl-star.git "${HACL_HOME}" # If no any F* file is detected, we run hax ifeq "$(wildcard *.fst *fsti)" "" $(shell $(SHELL) -c $(HAX_CLI)) endif # By default, we process all the files in the current directory ROOTS = $(wildcard *.fst *fsti) # Regenerate F* files via hax when Rust sources change $(ROOTS): $(shell find ../../../src -type f -name '*.rs') $(shell $(SHELL) -c $(HAX_CLI)) # The following is a bash script that discovers F* libraries define FINDLIBS # Prints a path if and only if it exists. Takes one argument: the # path. function print_if_exists() { if [ -d "$$1" ]; then echo "$$1" fi } # Asks Cargo all the dependencies for the current crate or workspace, # and extract all "root" directories for each. Takes zero argument. function dependencies() { cargo metadata --format-version 1 | jq -r '.packages | .[] | .manifest_path | split("/") | .[:-1] | join("/")' } # Find hax libraries *around* a given path. Takes one argument: the # path. function find_hax_libraries_at_path() { path="$$1" # if there is a `proofs/fstar/extraction` subfolder, then that's a # F* library print_if_exists "$$path/proofs/fstar/extraction" # Maybe the `proof-libs` folder of hax is around? MAYBE_PROOF_LIBS=$$(realpath -q "$$path/../proof-libs/fstar") if [ $$? -eq 0 ]; then print_if_exists "$$MAYBE_PROOF_LIBS/core" print_if_exists "$$MAYBE_PROOF_LIBS/rust_primitives" fi } { while IFS= read path; do find_hax_libraries_at_path "$$path" done < <(dependencies) } | sort -u endef export FINDLIBS FSTAR_INCLUDE_DIRS = $(HACL_HOME)/lib $(shell bash -c "$$FINDLIBS") FSTAR_FLAGS = --cmi \ --warn_error -331 \ --cache_checked_modules --cache_dir $(CACHE_DIR) \ --already_cached "+Prims+FStar+LowStar+C+Spec.Loops+TestLib" \ $(addprefix --include ,$(FSTAR_INCLUDE_DIRS)) FSTAR = $(FSTAR_BIN) $(FSTAR_FLAGS) .depend: $(HINT_DIR) $(CACHE_DIR) $(ROOTS) $(info $(ROOTS)) $(FSTAR) --cmi --dep full $(ROOTS) --extract '* -Prims -LowStar -FStar' > $@ include .depend $(HINT_DIR): mkdir -p $@ $(CACHE_DIR): mkdir -p $@ $(CACHE_DIR)/%.checked: | .depend $(HINT_DIR) $(CACHE_DIR) $(FSTAR) $(OTHERFLAGS) $< $(ENABLE_HINTS) --hint_file $(HINT_DIR)/$(notdir $*).hints verify: $(addsuffix .checked, $(addprefix $(CACHE_DIR)/,$(ROOTS))) # Targets for interactive mode %.fst-in: $(info $(FSTAR_FLAGS) \ $(ENABLE_HINTS) --hint_file $(HINT_DIR)/$(basename $@).fst.hints) %.fsti-in: $(info $(FSTAR_FLAGS) \ $(ENABLE_HINTS) --hint_file $(HINT_DIR)/$(basename $@).fsti.hints) # Clean targets clean: rm -rf $(CACHE_DIR)/* rm *.fst ================================================ FILE: examples/sha256/proofs/fstar/extraction/Sha256.fst ================================================ module Sha256 #set-options "--fuel 0 --ifuel 1 --z3rlimit 15" open FStar.Mul open Core_models let v_BLOCK_SIZE: usize = mk_usize 64 let v_LEN_SIZE: usize = mk_usize 8 let v_K_SIZE: usize = mk_usize 64 let v_HASH_SIZE: usize = mk_usize 256 /! mk_usize 8 let ch (x y z: u32) : u32 = (x &. y <: u32) ^. ((~.x <: u32) &. z <: u32) let maj (x y z: u32) : u32 = (x &. y <: u32) ^. ((x &. z <: u32) ^. (y &. z <: u32) <: u32) let v_OP_TABLE: t_Array u8 (mk_usize 12) = let list = [ mk_u8 2; mk_u8 13; mk_u8 22; mk_u8 6; mk_u8 11; mk_u8 25; mk_u8 7; mk_u8 18; mk_u8 3; mk_u8 17; mk_u8 19; mk_u8 10 ] in FStar.Pervasives.assert_norm (Prims.eq2 (List.Tot.length list) 12); Rust_primitives.Hax.array_of_list 12 list let v_K_TABLE: t_Array u32 (mk_usize 64) = let list = [ mk_u32 1116352408; mk_u32 1899447441; mk_u32 3049323471; mk_u32 3921009573; mk_u32 961987163; mk_u32 1508970993; mk_u32 2453635748; mk_u32 2870763221; mk_u32 3624381080; mk_u32 310598401; mk_u32 607225278; mk_u32 1426881987; mk_u32 1925078388; mk_u32 2162078206; mk_u32 2614888103; mk_u32 3248222580; mk_u32 3835390401; mk_u32 4022224774; mk_u32 264347078; mk_u32 604807628; mk_u32 770255983; mk_u32 1249150122; mk_u32 1555081692; mk_u32 1996064986; mk_u32 2554220882; mk_u32 2821834349; mk_u32 2952996808; mk_u32 3210313671; mk_u32 3336571891; mk_u32 3584528711; mk_u32 113926993; mk_u32 338241895; mk_u32 666307205; mk_u32 773529912; mk_u32 1294757372; mk_u32 1396182291; mk_u32 1695183700; mk_u32 1986661051; mk_u32 2177026350; mk_u32 2456956037; mk_u32 2730485921; mk_u32 2820302411; mk_u32 3259730800; mk_u32 3345764771; mk_u32 3516065817; mk_u32 3600352804; mk_u32 4094571909; mk_u32 275423344; mk_u32 430227734; mk_u32 506948616; mk_u32 659060556; mk_u32 883997877; mk_u32 958139571; mk_u32 1322822218; mk_u32 1537002063; mk_u32 1747873779; mk_u32 1955562222; mk_u32 2024104815; mk_u32 2227730452; mk_u32 2361852424; mk_u32 2428436474; mk_u32 2756734187; mk_u32 3204031479; mk_u32 3329325298 ] in FStar.Pervasives.assert_norm (Prims.eq2 (List.Tot.length list) 64); Rust_primitives.Hax.array_of_list 64 list let v_HASH_INIT: t_Array u32 (mk_usize 8) = let list = [ mk_u32 1779033703; mk_u32 3144134277; mk_u32 1013904242; mk_u32 2773480762; mk_u32 1359893119; mk_u32 2600822924; mk_u32 528734635; mk_u32 1541459225 ] in FStar.Pervasives.assert_norm (Prims.eq2 (List.Tot.length list) 8); Rust_primitives.Hax.array_of_list 8 list let sigma (x: u32) (i op: usize) : Prims.Pure u32 (requires i <. mk_usize 4) (fun _ -> Prims.l_True) = let (tmp: u32):u32 = Core_models.Num.impl_u32__rotate_right x (Core_models.Convert.f_into #u8 #u32 #FStar.Tactics.Typeclasses.solve (v_OP_TABLE.[ (mk_usize 3 *! i <: usize) +! mk_usize 2 <: usize ] <: u8) <: u32) in let tmp:u32 = if op =. mk_usize 0 then x >>! (v_OP_TABLE.[ (mk_usize 3 *! i <: usize) +! mk_usize 2 <: usize ] <: u8) else tmp in let rot_val_1_:u32 = Core_models.Convert.f_into #u8 #u32 #FStar.Tactics.Typeclasses.solve (v_OP_TABLE.[ mk_usize 3 *! i <: usize ] <: u8) in let rot_val_2_:u32 = Core_models.Convert.f_into #u8 #u32 #FStar.Tactics.Typeclasses.solve (v_OP_TABLE.[ (mk_usize 3 *! i <: usize) +! mk_usize 1 <: usize ] <: u8) in ((Core_models.Num.impl_u32__rotate_right x rot_val_1_ <: u32) ^. (Core_models.Num.impl_u32__rotate_right x rot_val_2_ <: u32) <: u32) ^. tmp let to_be_u32s (block: t_Array u8 (mk_usize 64)) : Prims.Pure (t_Array u32 (mk_usize 16)) Prims.l_True (ensures fun result -> let result:t_Array u32 (mk_usize 16) = result in (Core_models.Slice.impl__len #u32 (result <: t_Slice u32) <: usize) =. mk_usize 16) = let out:t_Array u32 (mk_usize 16) = Rust_primitives.Hax.repeat (mk_u32 0) (mk_usize 16) in let out:t_Array u32 (mk_usize 16) = Rust_primitives.Hax.Folds.fold_range (mk_usize 0) (mk_usize 16) (fun out temp_1_ -> let out:t_Array u32 (mk_usize 16) = out in let _:usize = temp_1_ in true) out (fun out i -> let out:t_Array u32 (mk_usize 16) = out in let i:usize = i in let block_chunk_array:u32 = Core_models.Num.impl_u32__from_be_bytes (Core_models.Result.impl__unwrap #(t_Array u8 (mk_usize 4)) #Core_models.Array.t_TryFromSliceError (Core_models.Convert.f_try_into #(t_Slice u8) #(t_Array u8 (mk_usize 4)) #FStar.Tactics.Typeclasses.solve (block.[ { Core_models.Ops.Range.f_start = i *! mk_usize 4 <: usize; Core_models.Ops.Range.f_end = (i +! mk_usize 1 <: usize) *! mk_usize 4 <: usize } <: Core_models.Ops.Range.t_Range usize ] <: t_Slice u8) <: Core_models.Result.t_Result (t_Array u8 (mk_usize 4)) Core_models.Array.t_TryFromSliceError) <: t_Array u8 (mk_usize 4)) in let out:t_Array u32 (mk_usize 16) = Rust_primitives.Hax.Monomorphized_update_at.update_at_usize out i block_chunk_array in out) in out let schedule (block: t_Array u8 (mk_usize 64)) : t_Array u32 (mk_usize 64) = let b:t_Array u32 (mk_usize 16) = to_be_u32s block in let s:t_Array u32 (mk_usize 64) = Rust_primitives.Hax.repeat (mk_u32 0) (mk_usize 64) in let s:t_Array u32 (mk_usize 64) = Rust_primitives.Hax.Folds.fold_range (mk_usize 0) v_K_SIZE (fun s i -> let s:t_Array u32 (mk_usize 64) = s in let i:usize = i in (Core_models.Slice.impl__len #u32 (b <: t_Slice u32) <: usize) =. mk_usize 16 <: bool) s (fun s i -> let s:t_Array u32 (mk_usize 64) = s in let i:usize = i in if i <. mk_usize 16 <: bool then let s:t_Array u32 (mk_usize 64) = Rust_primitives.Hax.Monomorphized_update_at.update_at_usize s i (b.[ i ] <: u32) in s else let t16:u32 = s.[ i -! mk_usize 16 <: usize ] in let t15:u32 = s.[ i -! mk_usize 15 <: usize ] in let t7:u32 = s.[ i -! mk_usize 7 <: usize ] in let t2:u32 = s.[ i -! mk_usize 2 <: usize ] in let s1:u32 = sigma t2 (mk_usize 3) (mk_usize 0) in let s0:u32 = sigma t15 (mk_usize 2) (mk_usize 0) in let s:t_Array u32 (mk_usize 64) = Rust_primitives.Hax.Monomorphized_update_at.update_at_usize s i (Core_models.Num.impl_u32__wrapping_add (Core_models.Num.impl_u32__wrapping_add (Core_models.Num.impl_u32__wrapping_add s1 t7 <: u32) s0 <: u32) t16 <: u32) in s) in s let shuffle (ws: t_Array u32 (mk_usize 64)) (hash: t_Array u32 (mk_usize 8)) : t_Array u32 (mk_usize 8) = let hash:t_Array u32 (mk_usize 8) = Rust_primitives.Hax.Folds.fold_range (mk_usize 0) v_K_SIZE (fun hash temp_1_ -> let hash:t_Array u32 (mk_usize 8) = hash in let _:usize = temp_1_ in true) hash (fun hash i -> let hash:t_Array u32 (mk_usize 8) = hash in let i:usize = i in let a0:u32 = hash.[ mk_usize 0 ] in let b0:u32 = hash.[ mk_usize 1 ] in let c0:u32 = hash.[ mk_usize 2 ] in let d0:u32 = hash.[ mk_usize 3 ] in let e0:u32 = hash.[ mk_usize 4 ] in let f0:u32 = hash.[ mk_usize 5 ] in let g0:u32 = hash.[ mk_usize 6 ] in let (h0: u32):u32 = hash.[ mk_usize 7 ] in let t1:u32 = Core_models.Num.impl_u32__wrapping_add (Core_models.Num.impl_u32__wrapping_add (Core_models.Num.impl_u32__wrapping_add (Core_models.Num.impl_u32__wrapping_add h0 (sigma e0 (mk_usize 1) (mk_usize 1) <: u32) <: u32) (ch e0 f0 g0 <: u32) <: u32) (v_K_TABLE.[ i ] <: u32) <: u32) (ws.[ i ] <: u32) in let t2:u32 = Core_models.Num.impl_u32__wrapping_add (sigma a0 (mk_usize 0) (mk_usize 1) <: u32) (maj a0 b0 c0 <: u32) in let hash:t_Array u32 (mk_usize 8) = Rust_primitives.Hax.Monomorphized_update_at.update_at_usize hash (mk_usize 0) (Core_models.Num.impl_u32__wrapping_add t1 t2 <: u32) in let hash:t_Array u32 (mk_usize 8) = Rust_primitives.Hax.Monomorphized_update_at.update_at_usize hash (mk_usize 1) a0 in let hash:t_Array u32 (mk_usize 8) = Rust_primitives.Hax.Monomorphized_update_at.update_at_usize hash (mk_usize 2) b0 in let hash:t_Array u32 (mk_usize 8) = Rust_primitives.Hax.Monomorphized_update_at.update_at_usize hash (mk_usize 3) c0 in let hash:t_Array u32 (mk_usize 8) = Rust_primitives.Hax.Monomorphized_update_at.update_at_usize hash (mk_usize 4) (Core_models.Num.impl_u32__wrapping_add d0 t1 <: u32) in let hash:t_Array u32 (mk_usize 8) = Rust_primitives.Hax.Monomorphized_update_at.update_at_usize hash (mk_usize 5) e0 in let hash:t_Array u32 (mk_usize 8) = Rust_primitives.Hax.Monomorphized_update_at.update_at_usize hash (mk_usize 6) f0 in let hash:t_Array u32 (mk_usize 8) = Rust_primitives.Hax.Monomorphized_update_at.update_at_usize hash (mk_usize 7) g0 in hash) in hash let compress (block: t_Array u8 (mk_usize 64)) (hash: t_Array u32 (mk_usize 8)) : t_Array u32 (mk_usize 8) = let s:t_Array u32 (mk_usize 64) = schedule block in let h_in:t_Array u32 (mk_usize 8) = Core_models.Clone.f_clone #(t_Array u32 (mk_usize 8)) #FStar.Tactics.Typeclasses.solve hash in let hash:t_Array u32 (mk_usize 8) = shuffle s hash in let hash:t_Array u32 (mk_usize 8) = Rust_primitives.Hax.Folds.fold_range (mk_usize 0) (mk_usize 8) (fun hash temp_1_ -> let hash:t_Array u32 (mk_usize 8) = hash in let _:usize = temp_1_ in true) hash (fun hash i -> let hash:t_Array u32 (mk_usize 8) = hash in let i:usize = i in Rust_primitives.Hax.Monomorphized_update_at.update_at_usize hash i (Core_models.Num.impl_u32__wrapping_add (hash.[ i ] <: u32) (h_in.[ i ] <: u32) <: u32) <: t_Array u32 (mk_usize 8)) in hash let u32s_to_be_bytes (state: t_Array u32 (mk_usize 8)) : t_Array u8 (mk_usize 32) = let (out: t_Array u8 (mk_usize 32)):t_Array u8 (mk_usize 32) = Rust_primitives.Hax.repeat (mk_u8 0) (mk_usize 32) in let out:t_Array u8 (mk_usize 32) = Rust_primitives.Hax.Folds.fold_range (mk_usize 0) v_LEN_SIZE (fun out temp_1_ -> let out:t_Array u8 (mk_usize 32) = out in let _:usize = temp_1_ in true) out (fun out i -> let out:t_Array u8 (mk_usize 32) = out in let i:usize = i in let tmp:u32 = state.[ i ] in let tmp:t_Array u8 (mk_usize 4) = Core_models.Num.impl_u32__to_be_bytes tmp in Rust_primitives.Hax.Folds.fold_range (mk_usize 0) (mk_usize 4) (fun out temp_1_ -> let out:t_Array u8 (mk_usize 32) = out in let _:usize = temp_1_ in true) out (fun out j -> let out:t_Array u8 (mk_usize 32) = out in let j:usize = j in Rust_primitives.Hax.Monomorphized_update_at.update_at_usize out ((i *! mk_usize 4 <: usize) +! j <: usize) (tmp.[ j ] <: u8) <: t_Array u8 (mk_usize 32))) in out let hash (msg: t_Slice u8) : Prims.Pure (t_Array u8 (mk_usize 32)) (requires (cast (Core_models.Slice.impl__len #u8 msg <: usize) <: u64) <. mk_u64 2305843009213693951) (fun _ -> Prims.l_True) = let h:t_Array u32 (mk_usize 8) = v_HASH_INIT in let blocks:usize = (Core_models.Slice.impl__len #u8 msg <: usize) /! v_BLOCK_SIZE in let h:t_Array u32 (mk_usize 8) = Rust_primitives.Hax.Folds.fold_range (mk_usize 0) blocks (fun h temp_1_ -> let h:t_Array u32 (mk_usize 8) = h in let _:usize = temp_1_ in true) h (fun h i -> let h:t_Array u32 (mk_usize 8) = h in let i:usize = i in compress (Core_models.Result.impl__unwrap #(t_Array u8 (mk_usize 64)) #Core_models.Array.t_TryFromSliceError (Core_models.Convert.f_try_into #(t_Slice u8) #(t_Array u8 (mk_usize 64)) #FStar.Tactics.Typeclasses.solve (msg.[ { Core_models.Ops.Range.f_start = i *! v_BLOCK_SIZE <: usize; Core_models.Ops.Range.f_end = (i +! mk_usize 1 <: usize) *! v_BLOCK_SIZE <: usize } <: Core_models.Ops.Range.t_Range usize ] <: t_Slice u8) <: Core_models.Result.t_Result (t_Array u8 (mk_usize 64)) Core_models.Array.t_TryFromSliceError) <: t_Array u8 (mk_usize 64)) h <: t_Array u32 (mk_usize 8)) in let last_block_len:usize = (Core_models.Slice.impl__len #u8 msg <: usize) %! v_BLOCK_SIZE in let (last_block: t_Array u8 (mk_usize 64)):t_Array u8 (mk_usize 64) = Rust_primitives.Hax.repeat (mk_u8 0) (mk_usize 64) in let last_block:t_Array u8 (mk_usize 64) = Rust_primitives.Hax.Monomorphized_update_at.update_at_range last_block ({ Core_models.Ops.Range.f_start = mk_usize 0; Core_models.Ops.Range.f_end = last_block_len } <: Core_models.Ops.Range.t_Range usize) (Core_models.Slice.impl__copy_from_slice #u8 (last_block.[ { Core_models.Ops.Range.f_start = mk_usize 0; Core_models.Ops.Range.f_end = last_block_len } <: Core_models.Ops.Range.t_Range usize ] <: t_Slice u8) (msg.[ { Core_models.Ops.Range.f_start = blocks *! v_BLOCK_SIZE <: usize } <: Core_models.Ops.Range.t_RangeFrom usize ] <: t_Slice u8) <: t_Slice u8) in let last_block:t_Array u8 (mk_usize 64) = Rust_primitives.Hax.Monomorphized_update_at.update_at_usize last_block last_block_len (mk_u8 128) in let _:Prims.unit = assert (Seq.length msg * 8 < pow2 64) in let len_bist:u64 = (cast (Core_models.Slice.impl__len #u8 msg <: usize) <: u64) *! mk_u64 8 in let len_bist_bytes:t_Array u8 (mk_usize 8) = Core_models.Num.impl_u64__to_be_bytes len_bist in let h, last_block:(t_Array u32 (mk_usize 8) & t_Array u8 (mk_usize 64)) = if last_block_len <. (v_BLOCK_SIZE -! v_LEN_SIZE <: usize) then let last_block:t_Array u8 (mk_usize 64) = Rust_primitives.Hax.Folds.fold_range (mk_usize 0) v_LEN_SIZE (fun last_block temp_1_ -> let last_block:t_Array u8 (mk_usize 64) = last_block in let _:usize = temp_1_ in true) last_block (fun last_block i -> let last_block:t_Array u8 (mk_usize 64) = last_block in let i:usize = i in Rust_primitives.Hax.Monomorphized_update_at.update_at_usize last_block ((v_BLOCK_SIZE -! v_LEN_SIZE <: usize) +! i <: usize) (len_bist_bytes.[ i ] <: u8) <: t_Array u8 (mk_usize 64)) in let h:t_Array u32 (mk_usize 8) = compress last_block h in h, last_block <: (t_Array u32 (mk_usize 8) & t_Array u8 (mk_usize 64)) else let (pad_block: t_Array u8 (mk_usize 64)):t_Array u8 (mk_usize 64) = Rust_primitives.Hax.repeat (mk_u8 0) (mk_usize 64) in let pad_block:t_Array u8 (mk_usize 64) = Rust_primitives.Hax.Folds.fold_range (mk_usize 0) v_LEN_SIZE (fun pad_block temp_1_ -> let pad_block:t_Array u8 (mk_usize 64) = pad_block in let _:usize = temp_1_ in true) pad_block (fun pad_block i -> let pad_block:t_Array u8 (mk_usize 64) = pad_block in let i:usize = i in Rust_primitives.Hax.Monomorphized_update_at.update_at_usize pad_block ((v_BLOCK_SIZE -! v_LEN_SIZE <: usize) +! i <: usize) (len_bist_bytes.[ i ] <: u8) <: t_Array u8 (mk_usize 64)) in let h:t_Array u32 (mk_usize 8) = compress last_block h in let h:t_Array u32 (mk_usize 8) = compress pad_block h in h, last_block <: (t_Array u32 (mk_usize 8) & t_Array u8 (mk_usize 64)) in u32s_to_be_bytes h let sha256 (msg: t_Slice u8) : Prims.Pure (t_Array u8 (mk_usize 32)) (requires (cast (Core_models.Slice.impl__len #u8 msg <: usize) <: u64) <. mk_u64 2305843009213693951) (fun _ -> Prims.l_True) = hash msg ================================================ FILE: examples/sha256/src/sha256.rs ================================================ use std::convert::TryInto; const BLOCK_SIZE: usize = 64; const LEN_SIZE: usize = 8; pub const K_SIZE: usize = 64; pub const HASH_SIZE: usize = 256 / 8; pub type Block = [u8; BLOCK_SIZE]; pub type OpTableType = [u8; 12]; pub type Sha256Digest = [u8; HASH_SIZE]; pub type RoundConstantsTable = [u32; K_SIZE]; pub type Hash = [u32; LEN_SIZE]; pub fn ch(x: u32, y: u32, z: u32) -> u32 { (x & y) ^ ((!x) & z) } pub fn maj(x: u32, y: u32, z: u32) -> u32 { (x & y) ^ ((x & z) ^ (y & z)) } const OP_TABLE: OpTableType = [2, 13, 22, 6, 11, 25, 7, 18, 3, 17, 19, 10]; #[rustfmt::skip] const K_TABLE: RoundConstantsTable = [ 0x428a_2f98u32, 0x7137_4491u32, 0xb5c0_fbcfu32, 0xe9b5_dba5u32, 0x3956_c25bu32, 0x59f1_11f1u32, 0x923f_82a4u32, 0xab1c_5ed5u32, 0xd807_aa98u32, 0x1283_5b01u32, 0x2431_85beu32, 0x550c_7dc3u32, 0x72be_5d74u32, 0x80de_b1feu32, 0x9bdc_06a7u32, 0xc19b_f174u32, 0xe49b_69c1u32, 0xefbe_4786u32, 0x0fc1_9dc6u32, 0x240c_a1ccu32, 0x2de9_2c6fu32, 0x4a74_84aau32, 0x5cb0_a9dcu32, 0x76f9_88dau32, 0x983e_5152u32, 0xa831_c66du32, 0xb003_27c8u32, 0xbf59_7fc7u32, 0xc6e0_0bf3u32, 0xd5a7_9147u32, 0x06ca_6351u32, 0x1429_2967u32, 0x27b7_0a85u32, 0x2e1b_2138u32, 0x4d2c_6dfcu32, 0x5338_0d13u32, 0x650a_7354u32, 0x766a_0abbu32, 0x81c2_c92eu32, 0x9272_2c85u32, 0xa2bf_e8a1u32, 0xa81a_664bu32, 0xc24b_8b70u32, 0xc76c_51a3u32, 0xd192_e819u32, 0xd699_0624u32, 0xf40e_3585u32, 0x106a_a070u32, 0x19a4_c116u32, 0x1e37_6c08u32, 0x2748_774cu32, 0x34b0_bcb5u32, 0x391c_0cb3u32, 0x4ed8_aa4au32, 0x5b9c_ca4fu32, 0x682e_6ff3u32, 0x748f_82eeu32, 0x78a5_636fu32, 0x84c8_7814u32, 0x8cc7_0208u32, 0x90be_fffau32, 0xa450_6cebu32, 0xbef9_a3f7u32, 0xc671_78f2u32 ]; const HASH_INIT: Hash = [ 0x6a09e667u32, 0xbb67ae85u32, 0x3c6ef372u32, 0xa54ff53au32, 0x510e527fu32, 0x9b05688cu32, 0x1f83d9abu32, 0x5be0cd19u32, ]; #[hax_lib::requires(i < 4)] pub fn sigma(x: u32, i: usize, op: usize) -> u32 { let mut tmp: u32 = x.rotate_right(OP_TABLE[3 * i + 2].into()); if op == 0 { tmp = x >> OP_TABLE[3 * i + 2] } let rot_val_1 = OP_TABLE[3 * i].into(); let rot_val_2 = OP_TABLE[3 * i + 1].into(); x.rotate_right(rot_val_1) ^ x.rotate_right(rot_val_2) ^ tmp } #[hax_lib::ensures(|result| result.len() == 16)] fn to_be_u32s(block: Block) -> [u32; 16] { let mut out = [0u32; 16]; for i in 0..16 { let block_chunk_array = u32::from_be_bytes(block[i * 4..(i + 1) * 4].try_into().unwrap()); out[i] = block_chunk_array; } out } pub fn schedule(block: Block) -> RoundConstantsTable { let b = to_be_u32s(block); let mut s = [0; K_SIZE]; for i in 0..K_SIZE { hax_lib::loop_invariant!(|i: usize| b.len() == 16); if i < 16 { s[i] = b[i]; } else { let t16 = s[i - 16]; let t15 = s[i - 15]; let t7 = s[i - 7]; let t2 = s[i - 2]; let s1 = sigma(t2, 3, 0); let s0 = sigma(t15, 2, 0); s[i] = s1.wrapping_add(t7).wrapping_add(s0).wrapping_add(t16); } } s } pub fn shuffle(ws: RoundConstantsTable, hash: &mut Hash) { for i in 0..K_SIZE { let a0 = hash[0]; let b0 = hash[1]; let c0 = hash[2]; let d0 = hash[3]; let e0 = hash[4]; let f0 = hash[5]; let g0 = hash[6]; let h0: u32 = hash[7]; let t1 = h0 .wrapping_add(sigma(e0, 1, 1)) .wrapping_add(ch(e0, f0, g0)) .wrapping_add(K_TABLE[i]) .wrapping_add(ws[i]); let t2 = sigma(a0, 0, 1).wrapping_add(maj(a0, b0, c0)); hash[0] = t1.wrapping_add(t2); hash[1] = a0; hash[2] = b0; hash[3] = c0; hash[4] = d0.wrapping_add(t1); hash[5] = e0; hash[6] = f0; hash[7] = g0; } } pub fn compress(block: Block, hash: &mut Hash) { let s = schedule(block); let h_in = hash.clone(); shuffle(s, hash); for i in 0..8 { hash[i] = hash[i].wrapping_add(h_in[i]); } } fn u32s_to_be_bytes(state: Hash) -> Sha256Digest { let mut out: Sha256Digest = [0u8; HASH_SIZE]; for i in 0..LEN_SIZE { let tmp = state[i]; let tmp = tmp.to_be_bytes(); for j in 0..4 { out[i * 4 + j] = tmp[j]; } } out } #[hax_lib::requires((msg.len() as u64) < 0x1fffffffffffffff)] pub fn hash(msg: &[u8]) -> Sha256Digest { let mut h = HASH_INIT; let blocks = msg.len() / BLOCK_SIZE; for i in 0..blocks { compress( msg[i * BLOCK_SIZE..(i + 1) * BLOCK_SIZE] .try_into() .unwrap(), &mut h, ); } let last_block_len = msg.len() % BLOCK_SIZE; let mut last_block: Block = [0; BLOCK_SIZE]; last_block[0..last_block_len].copy_from_slice(&msg[blocks * BLOCK_SIZE..]); last_block[last_block_len] = 0x80; hax_lib::fstar!("assert(Seq.length msg * 8 < pow2 64)"); let len_bist = msg.len() as u64 * 8; let len_bist_bytes = len_bist.to_be_bytes(); if last_block_len < BLOCK_SIZE - LEN_SIZE { for i in 0..LEN_SIZE { last_block[BLOCK_SIZE - LEN_SIZE + i] = len_bist_bytes[i]; } compress(last_block, &mut h); } else { let mut pad_block: Block = [0; BLOCK_SIZE]; for i in 0..LEN_SIZE { pad_block[BLOCK_SIZE - LEN_SIZE + i] = len_bist_bytes[i]; } compress(last_block, &mut h); compress(pad_block, &mut h); } u32s_to_be_bytes(h) } #[hax_lib::requires((msg.len() as u64) < 0x1fffffffffffffff)] pub fn sha256(msg: &[u8]) -> Sha256Digest { hash(msg) } ================================================ FILE: examples/sha256/tests/test_sha256.rs ================================================ use std::num::ParseIntError; use sha256::*; fn hex_string_to_vec(s: &str) -> Vec { debug_assert!(s.len() % core::mem::size_of::() == 0); let b: Result, ParseIntError> = (0..s.len()) .step_by(2) .map(|i| u8::from_str_radix(&s[i..i + 2], 16).map(::from)) .collect(); b.expect("Error parsing hex string") } #[test] fn test_sha256_kat() { let msg = hex_string_to_vec("686163737065632072756c6573"); let expected_256 = hex_string_to_vec("b37db5ed72c97da3b2579537afbc3261ed3d5a56f57b3d8e5c1019ae35929964"); let digest = hash(&msg); println!("{:?}", expected_256); println!("{:x?}", digest); assert_eq!(expected_256, digest); let msg = hex_string_to_vec("6861637370656320697320612070726f706f73616c20666f722061206e65772073706563696669636174696f6e206c616e677561676520666f722063727970746f207072696d69746976657320746861742069732073756363696e63742c2074686174206973206561737920746f207265616420616e6420696d706c656d656e742c20616e642074686174206c656e647320697473656c6620746f20666f726d616c20766572696669636174696f6e2e"); let expected_256 = hex_string_to_vec("348ef044446d56e05210361af5a258588ad31765f446bf4cb3b67125a187a64a"); let digest = hash(&msg); println!("{:?}", expected_256); println!("{:x?}", digest); assert_eq!(expected_256, digest); } #[test] fn empty_input() { const SHA256_EMPTY: Sha256Digest = [ 0xe3, 0xb0, 0xc4, 0x42, 0x98, 0xfc, 0x1c, 0x14, 0x9a, 0xfb, 0xf4, 0xc8, 0x99, 0x6f, 0xb9, 0x24, 0x27, 0xae, 0x41, 0xe4, 0x64, 0x9b, 0x93, 0x4c, 0xa4, 0x95, 0x99, 0x1b, 0x78, 0x52, 0xb8, 0x55, ]; assert_eq!(hash(&vec![]), SHA256_EMPTY); } ================================================ FILE: flake.nix ================================================ { inputs = { nixpkgs.url = "github:nixos/nixpkgs/nixos-25.05"; flake-utils.url = "github:numtide/flake-utils"; crane = { url = "github:ipetkov/crane"; }; rust-overlay = { url = "github:oxalica/rust-overlay"; inputs.nixpkgs.follows = "nixpkgs"; }; fstar.url = "github:FStarLang/FStar/v2025.10.06"; hacl-star = { url = "github:hacl-star/hacl-star"; flake = false; }; rust-by-examples = { url = "github:rust-lang/rust-by-example"; flake = false; }; }; outputs = { flake-utils, nixpkgs, rust-overlay, crane, hacl-star, ... }@inputs: flake-utils.lib.eachDefaultSystem (system: let pkgs = import nixpkgs { inherit system; overlays = [ rust-overlay.overlays.default ]; }; toolchain = (fromTOML (pkgs.lib.readFile ./rust-toolchain.toml)).toolchain; rustc = pkgs.rust-bin.fromRustupToolchain toolchain; rustc-docs = (let # Only x86 linux has the component rustc-docs, see https://github.com/nix-community/fenix/issues/51 # system = "x86_64-linux"; n = toolchain // { components = toolchain.components ++ [ "rustc-docs" ]; }; rustc = builtins.trace n.components ((pkgs.rust-bin.fromRustupToolchain n).override { targets = [ "x86_64-unknown-linux-gnu" ]; }); in rustc); craneLib = (crane.mkLib pkgs).overrideToolchain rustc; rustfmt = pkgs.rustfmt; fstar = inputs.fstar.packages.${system}.default; hax-env-file = pkgs.writeText "hax-env-file" '' HAX_PROOF_LIBS_HOME="${./proof-libs/fstar}" HAX_LIBS_HOME="${./hax-lib}"/proofs/fstar/extraction HACL_HOME="${hacl-star}" ''; hax-env = pkgs.writeScriptBin "hax-env" '' if [[ "$1" == "no-export" ]]; then cat "${hax-env-file}" else cat "${hax-env-file}" | xargs -I{} echo "export {}" fi ''; ocamlPackages = pkgs.ocamlPackages; ocamlformat = ocamlPackages.ocamlformat_0_27_0; proverif = pkgs.proverif.overrideDerivation (_: { patches = [ examples/proverif-psk/pv_div_by_zero_fix.diff ]; }); in rec { packages = { inherit rustc ocamlformat rustfmt fstar hax-env rustc-docs proverif; docs = pkgs.python312Packages.callPackage ./docs { hax-frontend-docs = packages.hax-rust-frontend.docs; }; hax-engine = pkgs.callPackage ./engine { hax-rust-frontend = packages.hax-rust-frontend.unwrapped; # `hax-engine-names-extract` extracts Rust names but also # some informations about `impl`s when names are `impl` # blocks. That includes some span information, which # includes full paths to Rust sources. Sometimes those # Rust sources happens to be in the Nix store. That # creates useless dependencies, this wrapper below takes # care of removing those extra depenedencies. hax-engine-names-extract = pkgs.writeScriptBin "hax-engine-names-extract" '' #!${pkgs.stdenv.shell} ${packages.hax-rust-frontend.hax-engine-names-extract}/bin/hax-engine-names-extract | sed 's|/nix/store/\(.\{6\}\)|/nix_store/\1-|g' ''; inherit rustc ocamlPackages; }; hax-rust-frontend = pkgs.callPackage ./cli { inherit rustc craneLib rustc-docs; inherit (packages) hax-engine; }; hax = packages.hax-rust-frontend; default = packages.hax; check-toolchain = checks.toolchain; check-examples = checks.examples; check-coq-coverage = checks.coverage; check-readme-coherency = checks.readme-coherency; rust-by-example-hax-extraction = pkgs.stdenv.mkDerivation { name = "rust-by-example-hax-extraction"; phases = [ "installPhase" ]; buildInputs = [ packages.hax pkgs.cargo ]; installPhase = '' cp --no-preserve=mode -rf ${inputs.rust-by-examples} workdir cd workdir ${pkgs.nodejs}/bin/node ${./.utils/rust-by-example.js} mv rust-by-examples-crate/proofs $out ''; }; # The commit that corresponds to our nightly pin, helpful when updating rusrc. toolchain_commit = pkgs.runCommand "hax-toolchain-commit" { } '' # This is sad but I don't know a better way. cat ${rustc}/share/doc/rust/html/version_info.html \ | grep 'github.com' \ | sed 's#.*"https://github.com/rust-lang/rust/commit/\([^"]*\)".*#\1#' \ > $out ''; }; checks = { toolchain = packages.hax.tests; examples = pkgs.callPackage ./examples { inherit (packages) hax; inherit craneLib fstar hacl-star hax-env; }; coverage = pkgs.callPackage ./examples/coverage { inherit (packages) hax; inherit craneLib; coqPackages = pkgs.coqPackages_8_19; }; readme-coherency = let src = pkgs.lib.sourceFilesBySuffices ./. [ ".md" ]; in pkgs.stdenv.mkDerivation { name = "readme-coherency"; inherit src; buildPhase = '' ${apps.replace-fstar-versions-md.program} diff -r . ${src} ''; installPhase = "touch $out"; }; }; apps = { replace-fstar-versions-md = { type = "app"; program = "${pkgs.writeScript "replace-fstar-versions-md" '' #!${pkgs.bash}/bin/bash FSTAR_VERSION=$(cat ${ ./flake.lock } | ${pkgs.jq}/bin/jq '.nodes.fstar.original.ref' -r) ${pkgs.fd}/bin/fd \ -X ${pkgs.sd}/bin/sd '`.*?`()' '`'"$FSTAR_VERSION"'`$1' **/*.md \ ";" --glob '*.md' ''}"; }; serve-rustc-docs = { type = "app"; program = "${pkgs.writeScript "serve-rustc-docs" '' #!${pkgs.bash}/bin/bash cd ${rustc-docs}/share/doc/rust/html/rustc ${pkgs.python3}/bin/python -m http.server "$@" ''}"; }; serve-docs = { type = "app"; program = "${pkgs.writeScript "serve-docs" '' #!${pkgs.bash}/bin/bash cd ${packages.docs} ${pkgs.python3}/bin/python -m http.server "$@" ''}"; }; }; devShells = let inputsFrom = [ packages.hax-rust-frontend.unwrapped # `hax-engine`'s build requires `hax-rust-frontend` and # `hax-engine-names-extract`, but in a dev environment, # those two packages are supposed to be built locally, # thus we kill them here (packages.hax-engine.override { hax-rust-frontend = pkgs.hello; hax-engine-names-extract = pkgs.hello; }) packages.docs ]; utils = pkgs.stdenv.mkDerivation { name = "hax-dev-scripts"; phases = [ "installPhase" ]; installPhase = '' mkdir -p $out/bin cp ${./.utils/rebuild.sh} $out/bin/rebuild ''; }; defaultPackages = [ ocamlformat ocamlPackages.ocaml-lsp ocamlPackages.ocamlformat-rpc-lib ocamlPackages.ocaml-print-intf ocamlPackages.odoc ocamlPackages.utop pkgs.just pkgs.cargo-expand pkgs.cargo-release pkgs.cargo-insta pkgs.openssl.dev pkgs.libz.dev pkgs.pkg-config pkgs.rust-analyzer pkgs.toml2json rustfmt utils pkgs.go-grip ]; LIBCLANG_PATH = "${pkgs.llvmPackages.libclang.lib}/lib"; DYLD_LIBRARY_PATH = pkgs.lib.makeLibraryPath [ pkgs.libz rustc ]; in { examples = pkgs.mkShell { inherit inputsFrom LIBCLANG_PATH DYLD_LIBRARY_PATH; HACL_HOME = "${hacl-star}"; shellHook = '' HAX_ROOT=$(git rev-parse --show-toplevel) export HAX_PROOF_LIBS_HOME="$HAX_ROOT/proof-libs/fstar" export HAX_LIBS_HOME="$HAX_ROOT/hax-lib" ''; packages = defaultPackages ++ [ fstar pkgs.proverif ]; }; ci-examples = pkgs.mkShell { shellHook = '' eval $(hax-env) export CACHE_DIR=$(mktemp -d) export HINT_DIR=$(mktemp -d) export SHELL=${pkgs.bash}/bin/bash ''; packages = [ packages.hax packages.hax-env packages.fstar packages.proverif pkgs.jq pkgs.elan ]; }; default = pkgs.mkShell { inherit inputsFrom LIBCLANG_PATH DYLD_LIBRARY_PATH; packages = defaultPackages; }; fstar = pkgs.mkShell { inherit inputsFrom LIBCLANG_PATH DYLD_LIBRARY_PATH; shellHook = '' export HAX_HOME=$(git rev-parse --show-toplevel) export FSTAR_HOME="${fstar}" ''; packages = defaultPackages ++ [ fstar ]; }; }; }); } ================================================ FILE: frontend/exporter/Cargo.toml ================================================ [package] name = "hax-frontend-exporter" edition = "2024" version.workspace = true authors.workspace = true license.workspace = true homepage.workspace = true repository.workspace = true readme.workspace = true description = "Provides mirrors of the algebraic data types used in the Rust compilers, removing indirections and inlining various pieces of information." [package.metadata.rust-analyzer] rustc_private=true [dependencies] hax-adt-into.workspace = true serde.workspace = true serde_json.workspace = true schemars.workspace = true itertools.workspace = true hax-frontend-exporter-options.workspace = true tracing.workspace = true paste = "1.0.11" extension-traits = "1.0.1" lazy_static = "1.4.0" [features] default = ["rustc"] extract_names_mode = [] # Enables the conversion bridges from rustc types (and AST) to the # ones defined in this crate. Enabling `rustc` adds a dependency to # `librustc_driver`. rustc = [] ================================================ FILE: frontend/exporter/README.md ================================================ # Special core extraction mode For now, the frontend is sensible to the `HAX_CORE_EXTRACTION_MODE` variable environment that enables a special mode. ================================================ FILE: frontend/exporter/adt-into/.gitignore ================================================ /target /Cargo.lock ================================================ FILE: frontend/exporter/adt-into/Cargo.toml ================================================ [package] name = "hax-adt-into" version.workspace = true authors.workspace = true license.workspace = true homepage.workspace = true edition.workspace = true repository.workspace = true readme.workspace = true description = "Provides the `adt_into` procedural macro, allowing for mirroring data types with small variations." [lib] proc-macro = true [dependencies] itertools.workspace = true syn.workspace = true proc-macro2 = "1.0" quote = "1.0" [dev-dependencies] tracing.workspace = true ================================================ FILE: frontend/exporter/adt-into/README.md ================================================ # hax adt into This crate provides the `adt_into` procedural macro, allowing for mirroring data types with small variations. This crate is used by the frontend of hax, where we need to mirror a big part of the data types defined by the Rust compiler. While the abstract syntax trees (ASTs) from the Rust compiler expose a lot of indirections (identifiers one should lookup, additional informations reachable only via interactive queries), hax exposes the same ASTs, removing indirections and inlining additional informations. The `adt_into` derive macro can be used on `struct`s and `enum`s. `adt_into` then looks for another `#[args(, from: FROM_TYPE, state: STATE_TYPE as SOME_NAME)]` attribute. Such an attribute means that the `struct` or `enum` mirrors the type `FROM_TYPE`, and that the transformation is carried along with a state of type `STATE_TYPE` that will be accessible via the name `SOME_NAME`. An example is available in the `tests` folder. ================================================ FILE: frontend/exporter/adt-into/src/lib.rs ================================================ use quote::quote; use quote::quote_spanned; use syn::Token; use syn::parse::ParseStream; use syn::{Data, DeriveInput, Generics, parse_macro_input}; use syn::{PathArguments, PathSegment, spanned::Spanned}; fn strip_parenthesis(tokens: proc_macro::TokenStream) -> Option { match tokens.into_iter().collect::>().as_slice() { [proc_macro::TokenTree::Group(token)] => Some(token.stream()), _ => None, } } #[derive(Debug)] struct Options { generics: Generics, from: syn::TypePath, state: syn::Ident, state_type: syn::Type, where_clause: Option, } mod option_parse { use super::*; mod kw { syn::custom_keyword!(from); syn::custom_keyword!(state); } impl syn::parse::Parse for Options { fn parse(input: ParseStream) -> syn::Result { let generics = input.parse()?; input.parse::()?; input.parse::()?; input.parse::()?; let from = input.parse()?; input.parse::()?; input.parse::()?; input.parse::()?; let state_type = input.parse()?; input.parse::()?; let state = input.parse()?; let mut where_clause = None; if input.peek(Token![,]) && input.peek2(Token![where]) { input.parse::()?; where_clause = Some(input.parse()?); } Ok(Options { generics, from, state, state_type, where_clause, }) } } } /// Returns the token stream corresponding to an attribute (if it /// exists), stripping parenthesis already. fn tokens_of_attrs<'a>( attr_name: &'a str, attrs: &'a Vec, ) -> impl Iterator + 'a { attrs .iter() .filter(|attr| attr.path.is_ident(attr_name)) .map(|attr| attr.clone().tokens.into()) .flat_map(strip_parenthesis) .map(|x| x.into()) } fn parse_attrs<'a, T: syn::parse::Parse>( attr_name: &'a str, attrs: &'a Vec, ) -> impl Iterator + 'a { tokens_of_attrs(attr_name, attrs).map(move |x| { syn::parse::(x.clone().into()) .expect(format!("expected attribtue {}", attr_name).as_str()) }) } /// Parse an attribute as a T if it exists. fn parse_attr(attr_name: &str, attrs: &Vec) -> Option { parse_attrs(attr_name, attrs).next() } /* TODO: add `ensure_no_attr` calls to forbid meaningless attributes fn ensure_no_attr(context: &str, attr: &str, attrs: &Vec) { if attrs.iter().any(|a| a.path.is_ident(attr)) { panic!("Illegal attribute {} {}", attr, context) } } */ /// Create a match arm that corresponds to a given set of fields. /// This can be used for named fields as well as unnamed ones. fn fields_to_arm( from_record_name: proc_macro2::TokenStream, to_record_name: proc_macro2::TokenStream, fields: Vec, full_span: proc_macro2::Span, prepend: proc_macro2::TokenStream, used_fields: Vec, state: syn::Ident, ) -> proc_macro2::TokenStream { if fields.is_empty() { return quote_spanned! {full_span=> #from_record_name => #to_record_name, }; } let is_struct = fields.iter().any(|f| f.ident.is_some()); let is_tuple = fields.iter().any(|f| f.ident.is_none()); if is_tuple && is_struct { panic!("Impossibe: variant with both named and unamed fields") } let data = fields.iter().enumerate().map(|(i, field)| { let attrs = &field.attrs; let name_destination = field.ident.clone().unwrap_or(syn::Ident::new( format!("value_{}", i).as_str(), field.span(), )); let span = field.span(); let field_name_span = field.clone().ident.map(|x| x.span()).unwrap_or(span); let name_source = parse_attr::("from", attrs).unwrap_or(name_destination.clone()); let value = parse_attr::("value", attrs); let not_in_source = value.is_some() || attrs.iter().any(|attr| attr.path.is_ident("not_in_source")); let typ = &field.ty; let point = syn::Ident::new("x", field_name_span); let translation = parse_attr::("map", attrs).or(value).unwrap_or( syn::parse::((quote_spanned! {typ.span()=> #point.sinto(#state)}).into()) .expect("Could not default [translation]") ); let mapped_value = if not_in_source { quote_spanned! {span=> {#translation}} } else { quote_spanned! {span=> {#[allow(unused_variables)] let #point = #name_source; #translation}} }; let prefix = if is_struct { quote_spanned! {field_name_span=> #name_destination:} } else { quote! {} }; ( if not_in_source { quote! {} } else { quote_spanned! {span=> #name_source, } }, quote_spanned! {span=> #prefix #mapped_value, }, ) }); let bindings: proc_macro2::TokenStream = data .clone() .map(|(x, _)| x) .chain(used_fields.iter().map(|f| quote! {#f,})) .collect(); let fields: proc_macro2::TokenStream = data.clone().map(|(_, x)| x).collect(); if is_struct { quote_spanned! {full_span=> #from_record_name { #bindings .. } => {#prepend #to_record_name { #fields }}, } } else { quote_spanned! {full_span=> #from_record_name ( #bindings ) => {#prepend #to_record_name ( #fields )}, } } } /// Extracts a vector of Field out of a Fields. /// This function discard the Unnamed / Named variants. fn field_vec_of_fields(fields: syn::Fields) -> Vec { match fields { syn::Fields::Unit => vec![], syn::Fields::Named(syn::FieldsNamed { named: fields, .. }) | syn::Fields::Unnamed(syn::FieldsUnnamed { unnamed: fields, .. }) => fields.into_iter().collect(), } } /// Given a variant, produce a match arm. fn variant_to_arm( typ_from: proc_macro2::TokenStream, typ_to: proc_macro2::TokenStream, variant: syn::Variant, state: syn::Ident, ) -> proc_macro2::TokenStream { let attrs = &variant.attrs; let to_variant = variant.clone().ident; if attrs.iter().any(|attr| attr.path.is_ident("todo")) { return quote!(); } let disable_mapping = attrs .iter() .any(|attr| attr.path.is_ident("disable_mapping")); let custom_arm = tokens_of_attrs("custom_arm", attrs).next(); // TODO: either complete map or drop it let map = parse_attr::("map", attrs); // ensure_no_attr( // format!("on the variant {}::{}", typ_to, to_variant).as_str(), // "map", // attrs, // ); let from_variant = parse_attr::("from", attrs); if disable_mapping && (map.is_some() || custom_arm.is_some() || from_variant.is_some()) { println!("Warning: `disable_mapping` makes `map`, `custom_arm` and `from_variant` inert") } if custom_arm.is_some() && (map.is_some() || from_variant.is_some()) { println!("Warning: `custom_arm` makes `map` and `from` inert") } if disable_mapping { return quote! {}; } if let Some(custom_arm) = custom_arm { return custom_arm.into(); } let from_variant = from_variant.unwrap_or(to_variant.clone()); let to_variant = quote! { #typ_to::#to_variant }; let from_variant = quote! { #typ_from::#from_variant }; let fields = field_vec_of_fields(variant.clone().fields); if let Some(map) = map { let names: proc_macro2::TokenStream = fields .iter() .filter(|f| { let attrs = &f.attrs; !(parse_attr::("value", attrs).is_some() || attrs.iter().any(|attr| attr.path.is_ident("not_in_source"))) }) .enumerate() .map(|(nth, f)| { f.clone() .ident .unwrap_or(syn::Ident::new(format!("x{}", nth).as_str(), f.span())) }) .map(|name| quote! {#name, }) .collect(); if fields.iter().any(|f| f.ident.is_some()) { quote_spanned!(variant.span()=> #from_variant {#names ..} => #map,) } else { quote_spanned!(variant.span()=> #from_variant (#names) => #map,) } } else { fields_to_arm( from_variant, to_variant, fields, variant.span(), tokens_of_attrs("prepend", attrs).collect(), parse_attrs("use_field", attrs).collect(), state, ) } } /// [`AdtInto`] derives a /// [`SInto`](../hax_frontend_exporter/trait.SInto.html) /// instance. This helps at transporting a algebraic data type `A` to /// another ADT `B` when `A` and `B` shares a lot of structure. #[proc_macro_derive( AdtInto, attributes( map, from, custom_arm, disable_mapping, use_field, prepend, append, args, todo, not_in_source, value, ) )] pub fn adt_into(input: proc_macro::TokenStream) -> proc_macro::TokenStream { let dinput = { let input = input.clone(); parse_macro_input!(input as DeriveInput) }; let attrs = &dinput.attrs; let span = dinput.clone().span().clone(); let to = dinput.ident; let to_generics = dinput.generics; let Options { generics, from: from_with_generics, state, state_type, where_clause, } = parse_attr("args", attrs).expect("An [args] attribute was expected"); let generics = { let mut generics = generics; generics.params = merge_generic_params( to_generics.params.clone().into_iter(), generics.params.into_iter(), ) .collect(); generics }; trait DropBounds { fn drop_bounds(&mut self); } impl DropBounds for syn::GenericParam { fn drop_bounds(&mut self) { use syn::GenericParam::*; match self { Lifetime(lf) => { lf.colon_token = None; lf.bounds.clear() } Type(t) => { t.colon_token = None; t.bounds.clear(); t.eq_token = None; t.default = None; } Const(c) => { c.eq_token = None; c.default = None; } } } } impl DropBounds for syn::Generics { fn drop_bounds(&mut self) { self.params.iter_mut().for_each(DropBounds::drop_bounds); } } let to_generics = { let mut to_generics = to_generics; to_generics.drop_bounds(); to_generics }; let from = drop_generics(from_with_generics.clone()); let append: proc_macro2::TokenStream = tokens_of_attrs("append", &dinput.attrs) .next() .unwrap_or((quote! {}).into()) .into(); let body = match &dinput.data { Data::Union(..) => panic!("Union types are not supported"), Data::Struct(syn::DataStruct { fields, .. }) => { let arm = fields_to_arm( quote! {#from}, quote! {#to}, field_vec_of_fields(fields.clone()), span, tokens_of_attrs("prepend", attrs).collect(), parse_attrs("use_field", attrs).collect(), state.clone(), ); quote! { match self { #arm #append } } } Data::Enum(syn::DataEnum { variants, .. }) => { let arms: proc_macro2::TokenStream = variants .iter() .cloned() .map(|variant| variant_to_arm(quote! {#from}, quote! {#to}, variant, state.clone())) .collect(); let todo = variants.iter().find_map(|variant| { let attrs = &variant.attrs; let to_variant = variant.clone().ident; if attrs.iter().any(|attr| attr.path.is_ident("todo")) { Some (quote_spanned! {variant.span()=> x => TO_TYPE::#to_variant(format!("{:?}", x)),}) } else { None } }).unwrap_or(quote!{}); let append = quote! { #append #todo }; quote! { match self { #arms #append } } } }; quote! { #[cfg(feature = "rustc")] const _ : () = { use #from as FROM_TYPE; use #to as TO_TYPE; impl #generics SInto<#state_type, #to #to_generics> for #from_with_generics #where_clause { #[tracing::instrument(level = "trace", skip(#state))] fn sinto(&self, #state: &#state_type) -> #to #to_generics { tracing::trace!("Enters sinto ({})", stringify!(#from_with_generics)); #body } } }; } .into() } /// Merge two collections of generic params, with params from [a] /// before the ones from [b]. This function ensures lifetimes /// appear before anything else. fn merge_generic_params( a: impl Iterator, b: impl Iterator, ) -> impl Iterator { fn partition( a: impl Iterator, ) -> (Vec, Vec) { a.partition(|g| matches!(g, syn::GenericParam::Lifetime(_))) } let (a_lt, a_others) = partition(a); let (b_lt, b_others) = partition(b); let h = |x: Vec<_>, y: Vec<_>| x.into_iter().chain(y.into_iter()); h(a_lt, b_lt).chain(h(a_others, b_others)) } fn drop_generics(type_path: syn::TypePath) -> syn::TypePath { syn::TypePath { path: syn::Path { segments: type_path .path .segments .into_iter() .map(|s| PathSegment { ident: s.ident, arguments: match s.arguments { PathArguments::AngleBracketed(_) => PathArguments::None, _ => s.arguments, }, }) .collect(), ..type_path.path }, ..type_path } } /// A proc macro unrelated to `adt-into`: it is useful in hax /// and we don't want a whole crate only for that helper. /// /// This proc macro defines some groups of derive clauses that /// we reuse all the time. #[proc_macro_attribute] pub fn derive_group( attr: proc_macro::TokenStream, item: proc_macro::TokenStream, ) -> proc_macro::TokenStream { let item: proc_macro2::TokenStream = item.into(); let groups = format!("{attr}"); let groups = groups.split(",").map(|s| s.trim()); let mut errors = vec![]; let result: proc_macro2::TokenStream = groups .map(|group| match group { "Serializers" => quote! { #[derive(::serde::Serialize, ::serde::Deserialize)] }, _ => { errors.push(quote! { const _: () = compile_error!(concat!( "derive_group: `", stringify!(#group), "` is not a recognized group name" )); }); quote! {} } }) .collect(); quote! {#(#errors)* #result #item}.into() } ================================================ FILE: frontend/exporter/adt-into/tests/lib.rs ================================================ /// For the example, let's assume we are working with `Literal`, an /// ADT that represents literal values. Suppose strings are /// represented via an identifier stored in a state `State`. pub mod source { use std::collections::HashMap; #[derive(Clone, Debug)] pub struct State(pub HashMap); #[derive(Copy, Clone, Debug, Eq, PartialEq, Hash)] pub struct StringId(u32); #[derive(Clone, Debug)] pub enum Literal { Integer(u32), String(StringId), } } /// Here, we mirror the same data type `Literal`, but with a small /// difference: there is no `StringId` any longer: we define a `impl` /// of `SInto` specifically for `StringId`, that ships with a stateful /// lookup. Magically, everytime a mirrored datatype annotated with /// `AdtInto` will have a field or a variant of type String while the /// original type was `StringId`, the lookup will be done /// automatically. mod mirrored { use super::{sinto::*, source}; use hax_adt_into::*; #[derive(AdtInto)] #[args(<>, from: source::Literal, state: source::State as s)] pub enum Literal { Integer(u32), String(String), } impl SInto for source::StringId { fn sinto(&self, s: &source::State) -> String { s.0.get(self).unwrap().clone() } } } /// Definition of the `sinto` trait used by the `AdtInto` macro pub mod sinto { pub trait SInto { fn sinto(&self, s: &S) -> To; } /// Default implementation for type implementing Copy impl SInto for T { fn sinto(&self, _s: &S) -> T { *self } } } ================================================ FILE: frontend/exporter/default.nix ================================================ { craneLib, stdenv, makeWrapper, lib, rustc, gcc, }: let commonArgs = { version = "0.0.1"; src = craneLib.cleanCargoSource ./.; }; pname = "hax-rust-frontend"; cargoArtifacts = craneLib.buildDepsOnly (commonArgs // { pname = "${pname}-deps"; }); in craneLib.buildPackage (commonArgs // { inherit cargoArtifacts pname; }) # hax // { # passthru = hax.passthru or {} // { # wrapped = hax-engine: stdenv.mkDerivation { # name = "hax"; # buildInputs = [ makeWrapper ]; # phases = ["installPhase"]; # installPhase = '' # mkdir -p $out/bin # makeWrapper ${hax}/bin/cargo-hax $out/bin/cargo-hax \ # --prefix PATH : ${ # lib.makeBinPath [ # hax # hax-engine # rustc gcc # ] # } # ''; # meta.mainProgram = "cargo-hax"; # }; # }; # } ================================================ FILE: frontend/exporter/options/Cargo.toml ================================================ [package] name = "hax-frontend-exporter-options" version.workspace = true authors.workspace = true license.workspace = true homepage.workspace = true edition.workspace = true repository.workspace = true readme.workspace = true description = "The options the `hax-frontend-exporter` crate is sensible to." [dependencies] serde.workspace = true serde_json.workspace = true schemars.workspace = true hax-adt-into.workspace = true ================================================ FILE: frontend/exporter/options/src/lib.rs ================================================ use hax_adt_into::derive_group; use schemars::JsonSchema; #[derive_group(Serializers)] #[derive(Debug, Clone, JsonSchema)] pub enum Glob { One, // * Many, // ** } impl ToString for Glob { fn to_string(&self) -> String { match self { Self::One => "*", Self::Many => "**", } .to_string() } } #[derive_group(Serializers)] #[derive(Debug, Clone, JsonSchema)] pub enum NamespaceChunk { Glob(Glob), Exact(String), } impl ToString for NamespaceChunk { fn to_string(&self) -> String { match self { Self::Glob(glob) => glob.to_string(), Self::Exact(string) => string.to_string(), } } } impl std::convert::From<&str> for NamespaceChunk { fn from(s: &str) -> Self { match s { "*" => NamespaceChunk::Glob(Glob::One), "**" => NamespaceChunk::Glob(Glob::Many), _ => NamespaceChunk::Exact(String::from(s)), } } } #[derive_group(Serializers)] #[derive(Debug, Clone, JsonSchema)] pub struct Namespace { pub chunks: Vec, } impl ToString for Namespace { fn to_string(&self) -> String { self.chunks .iter() .map(NamespaceChunk::to_string) .collect::>() .join("::") .to_string() } } impl std::convert::From for Namespace { fn from(s: String) -> Self { Namespace { chunks: s .split("::") .filter(|s| !s.is_empty()) .map(NamespaceChunk::from) .collect(), } } } impl Namespace { pub fn matches(&self, path: &Vec) -> bool { fn aux(pattern: &[NamespaceChunk], path: &[String]) -> bool { match (pattern, path) { ([], []) => true, ([NamespaceChunk::Exact(x), pattern @ ..], [y, path @ ..]) => { x == y && aux(pattern, path) } ([NamespaceChunk::Glob(Glob::One), pattern @ ..], [_, path @ ..]) => { aux(pattern, path) } ([NamespaceChunk::Glob(Glob::Many), pattern @ ..], []) => aux(pattern, path), ([NamespaceChunk::Glob(Glob::Many), pattern_tl @ ..], [_path_hd, path_tl @ ..]) => { aux(pattern_tl, path) || aux(pattern, path_tl) } _ => false, } } aux(self.chunks.as_slice(), path.as_slice()) } } #[derive(Debug, Clone)] pub struct Options { /// Whether we should evaluate and inline the value of anonymous constants (inline `const {}` /// blocks or advanced constant expressions as in `[T; N+1]`), or refer to them as /// `GlobalName`s. pub inline_anon_consts: bool, /// Options related to bounds. pub bounds_options: BoundsOptions, /// Resolve definition identifiers to their concrete impl counterpart when possible in `ItemRef::translate`. pub item_ref_use_concrete_impl: bool, } #[derive(Debug, Clone, Copy)] pub struct BoundsOptions { /// Add `T: Destruct` bounds to every type generic, so that we can build `ImplExpr`s to know /// what code is run on drop. pub resolve_destruct: bool, /// Prune `T: Sized` and `T: MetaSized` predicates. pub prune_sized: bool, } ================================================ FILE: frontend/exporter/src/body.rs ================================================ pub use module::*; #[cfg(not(feature = "rustc"))] mod module { pub trait IsBody: Sized + Clone + 'static {} impl IsBody for T {} } #[cfg(feature = "rustc")] mod module { pub use crate::prelude::*; pub use rustc_hir::{ def_id::{DefId as RDefId, LocalDefId as RLocalDefId}, hir_id::OwnerId as ROwnerId, }; use rustc_middle::ty; mod store { //! This module helps at store bodies to avoid stealing. //! `rustc_data_structures::steal::Steal` is a box for which the content can be stolen, for performance reasons. //! The query system of Rust creates and steal such boxes, resulting in hax trying to borrow the value of a Steal while some query stole it already. //! This module provides an ad-hoc global cache and query overrides to deal with this issue. use rustc_hir::def_id::LocalDefId; use rustc_middle::mir::Body; use rustc_middle::query::plumbing::IntoQueryParam; use rustc_middle::thir::{ExprId, Thir}; use std::cell::RefCell; use std::collections::HashMap; use std::rc::Rc; thread_local! { static THIR_BODY: RefCell>, ExprId)>> = RefCell::new(HashMap::new()); static MIR_BUILT: RefCell>>> = RefCell::new(HashMap::new()); } /// Register overrides for rustc queries. /// This will clone and store bodies for THIR and MIR (built) in an ad-hoc global cache. pub fn override_queries_store_body(providers: &mut rustc_middle::query::Providers) { providers.thir_body = |tcx, def_id| { let (steal, expr_id) = (rustc_interface::DEFAULT_QUERY_PROVIDERS.thir_body)(tcx, def_id)?; let body = steal.borrow().clone(); let body: Thir<'static> = unsafe { std::mem::transmute(body) }; THIR_BODY.with(|map| map.borrow_mut().insert(def_id, (Rc::new(body), expr_id))); Ok((steal, expr_id)) }; providers.mir_built = |tcx, def_id| { let steal = (rustc_interface::DEFAULT_QUERY_PROVIDERS.mir_built)(tcx, def_id); let body = steal.borrow().clone(); let body: Body<'static> = unsafe { std::mem::transmute(body) }; MIR_BUILT.with(|map| map.borrow_mut().insert(def_id, Rc::new(body))); steal }; } /// Extension trait that provides non-stealing variants of `thir_body` and `mir_built`. /// Those methods requires rustc queries to be overriden with the helper function `register` above. #[extension_traits::extension(pub trait SafeTyCtxtBodies)] impl<'tcx> rustc_middle::ty::TyCtxt<'tcx> { fn thir_body_safe( &self, key: impl IntoQueryParam, ) -> Result<(Rc>, ExprId), rustc_span::ErrorGuaranteed> { let key = key.into_query_param(); if !THIR_BODY.with(|map| map.borrow().contains_key(&key)) { // Compute a body, which will insert a body in `THIR_BODIES`. let _ = self.thir_body(key); } THIR_BODY.with(|map| { let (body, expr) = map .borrow_mut() .get(&key) .expect("Did we forgot to call `register`?") .clone(); let body: Rc> = unsafe { std::mem::transmute(body) }; Ok((body, expr)) }) } fn mir_built_safe( &self, key: impl IntoQueryParam, ) -> Rc> { let key = key.into_query_param(); if !MIR_BUILT.with(|map| map.borrow().contains_key(&key)) { // Compute a body, which will insert a body in `MIR_BODIES`. let _ = self.mir_built(key); } MIR_BUILT.with(|map| { let body = map .borrow_mut() .get(&key) .expect("Did we forgot to call `register`?") .clone(); unsafe { std::mem::transmute(body) } }) } } } pub use store::*; pub fn get_thir<'tcx, S: BaseState<'tcx>>( did: RLocalDefId, s: &S, ) -> ( Rc>, rustc_middle::thir::ExprId, ) { let tcx = s.base().tcx; // The `type_of` anon constants isn't available directly, it needs to be fed by some // other query. This hack ensures this happens, otherwise `thir_body` returns an error. // See https://rust-lang.zulipchat.com/#narrow/channel/182449-t-compiler.2Fhelp/topic/Change.20in.20THIR.20of.20anonymous.20constants.3F/near/509764021 . let hir_id = tcx.local_def_id_to_hir_id(did); for (parent_id, parent) in tcx.hir_parent_iter(hir_id) { if let rustc_hir::Node::Item(..) = parent { let _ = tcx.check_well_formed(parent_id.owner.def_id); break; } } let msg = |_| fatal!(s[tcx.def_span(did)], "THIR not found for {:?}", did); tcx.thir_body_safe(did).as_ref().unwrap_or_else(msg).clone() } pub trait IsBody: Sized + std::fmt::Debug + Clone + std::any::Any + Send + Sync + 'static { fn body<'tcx, S: UnderOwnerState<'tcx>>( s: &S, did: RDefId, instantiate: Option>, ) -> Option; /// Reuse a MIR body we already got. Panic if that's impossible. fn from_mir<'tcx, S: UnderOwnerState<'tcx>>( _s: &S, _body: rustc_middle::mir::Body<'tcx>, ) -> Option { None } } pub fn make_fn_def<'tcx, Body: IsBody, S: BaseState<'tcx>>( fn_sig: &rustc_hir::FnSig, body_id: &rustc_hir::BodyId, s: &S, ) -> FnDef { let hir_id = body_id.hir_id; let ldid = hir_id.owner.def_id; let (thir, expr_entrypoint) = get_thir(ldid, s); let s = &s.with_owner_id(ldid.to_def_id()).with_thir(thir.clone()); FnDef { params: thir.params.raw.sinto(s), ret: thir.exprs[expr_entrypoint].ty.sinto(s), body: Body::body(s, ldid.to_def_id(), None).s_unwrap(s), sig_span: fn_sig.span.sinto(s), header: fn_sig.header.sinto(s), } } pub fn body_from_id<'tcx, Body: IsBody, S: UnderOwnerState<'tcx>>( id: rustc_hir::BodyId, s: &S, ) -> Body { // **Important:** // We need a local id here, and we get it from the owner id, which must // be local. It is safe to do so, because if we have access to HIR objects, // it necessarily means we are exploring a local item (we don't have // access to the HIR of external objects, only their MIR). Body::body(s, s.base().tcx.hir_body_owner_def_id(id).to_def_id(), None).s_unwrap(s) } mod implementations { use super::*; impl IsBody for () { fn body<'tcx, S: UnderOwnerState<'tcx>>( _s: &S, _did: RDefId, _instantiate: Option>, ) -> Option { Some(()) } fn from_mir<'tcx, S: UnderOwnerState<'tcx>>( _s: &S, _body: rustc_middle::mir::Body<'tcx>, ) -> Option { Some(()) } } impl IsBody for ThirBody { fn body<'tcx, S: BaseState<'tcx>>( s: &S, did: RDefId, instantiate: Option>, ) -> Option { let did = did.as_local()?; // The following returns `None` if did refers to something that has no body (avoids a crash in the call to `thir_body`) s.base().tcx.hir_maybe_body_owned_by(did)?; let (thir, expr) = get_thir(did, s); assert!(instantiate.is_none(), "monomorphized thir isn't supported"); let s = &s.with_owner_id(did.to_def_id()).with_thir(thir.clone()); let params = thir.params.raw.sinto(s); let expr = if *CORE_EXTRACTION_MODE { let expr = &thir.exprs[expr]; Decorated { contents: Box::new(ExprKind::Tuple { fields: vec![] }), hir_id: None, attributes: vec![], ty: expr.ty.sinto(s), span: expr.span.sinto(s), } } else { expr.sinto(&s.with_thir(thir)) }; Some(Self { expr, params }) } } impl IsBody for (A, B) { fn body<'tcx, S: UnderOwnerState<'tcx>>( s: &S, did: RDefId, instantiate: Option>, ) -> Option { Some((A::body(s, did, instantiate)?, B::body(s, did, instantiate)?)) } } impl IsBody for MirBody { fn body<'tcx, S: UnderOwnerState<'tcx>>( s: &S, did: RDefId, instantiate: Option>, ) -> Option { let tcx = s.base().tcx; let typing_env = s.typing_env(); MirKind::get_mir(tcx, did, |body| { let body = substitute(tcx, typing_env, instantiate, body.clone()); let body = Rc::new(body); body.sinto(&s.with_mir(body.clone())) }) } fn from_mir<'tcx, S: UnderOwnerState<'tcx>>( s: &S, body: rustc_middle::mir::Body<'tcx>, ) -> Option { let body = Rc::new(body.clone()); let s = &s.with_mir(body.clone()); Some(body.sinto(s)) } } } impl<'tcx, S: UnderOwnerState<'tcx>, Body: IsBody> SInto for rustc_hir::BodyId { fn sinto(&self, s: &S) -> Body { body_from_id::(*self, s) } } } ================================================ FILE: frontend/exporter/src/comments.rs ================================================ use crate::prelude::*; use rustc_lexer::TokenKind; use std::fs; /// Returns a list of (spanned) comments found in file `path`, or an /// error if the file at `path` could not be open. pub fn comments_of_file(path: PathBuf) -> std::io::Result> { fn clean_comment(comment: &str) -> &str { let comment = if let Some(comment) = comment.strip_prefix("/*") { comment .strip_suffix("*/") .expect("A comment that starts with `/*` should always ends with `*/`") } else { comment .strip_prefix("//") .expect("A comment has to start with `//` or `/*`") }; comment.strip_prefix("!").unwrap_or(comment) } let source = &fs::read_to_string(&path)?; let mut comments = vec![]; let (mut pos, mut line, mut col) = (0, 0, 0); for token in rustc_lexer::tokenize(source, rustc_lexer::FrontmatterAllowed::Yes) { let len = token.len as usize; let sub = &source[pos..(pos + len)]; let lo = Loc { line, col }; line += sub.chars().filter(|c| matches!(c, '\n')).count(); pos += len; if lo.line != line { col = sub.chars().rev().take_while(|c| !matches!(c, '\n')).count(); } else { col += len; } if let TokenKind::LineComment { .. } | TokenKind::BlockComment { .. } = token.kind { if !sub.starts_with("///") && !sub.starts_with("/**") { let span = Span { lo, hi: Loc { line, col }, filename: FileName::Real(RealFileName::LocalPath(path.clone())), rust_span_data: None, }; comments.push((span, clean_comment(sub).to_string())); } } } Ok(comments) } ================================================ FILE: frontend/exporter/src/constant_utils/uneval.rs ================================================ //! Reconstruct structured expressions from rustc's various constant representations. use super::*; use rustc_const_eval::interpret::{InterpResult, interp_ok}; use rustc_middle::mir::interpret; use rustc_middle::{mir, ty}; impl ConstantLiteral { /// Rustc always represents string constants as `&[u8]`, but this /// is not nice to consume. This associated function interpret /// bytes as an unicode string, and as a byte string otherwise. fn byte_str(bytes: Vec) -> Self { match String::from_utf8(bytes.clone()) { Ok(s) => Self::Str(s), Err(_) => Self::ByteStr(bytes), } } } #[tracing::instrument(level = "trace", skip(s))] pub(crate) fn scalar_int_to_constant_literal<'tcx, S: UnderOwnerState<'tcx>>( s: &S, x: rustc_middle::ty::ScalarInt, ty: rustc_middle::ty::Ty<'tcx>, ) -> ConstantLiteral { match ty.kind() { ty::Char => ConstantLiteral::Char( char::try_from(x).s_expect(s, "scalar_int_to_constant_literal: expected a char"), ), ty::Bool => ConstantLiteral::Bool( x.try_to_bool() .s_expect(s, "scalar_int_to_constant_literal: expected a bool"), ), ty::Int(kind) => { let v = x.to_int(x.size()); ConstantLiteral::Int(ConstantInt::Int(v, kind.sinto(s))) } ty::Uint(kind) => { let v = x.to_uint(x.size()); ConstantLiteral::Int(ConstantInt::Uint(v, kind.sinto(s))) } ty::Float(kind) => { let v = x.to_bits_unchecked(); bits_and_type_to_float_constant_literal(v, kind.sinto(s)) } _ => { let ty_sinto: Ty = ty.sinto(s); supposely_unreachable_fatal!( s, "scalar_int_to_constant_literal_ExpectedLiteralType"; { ty, ty_sinto, x } ) } } } /// Converts a bit-representation of a float of type `ty` to a constant literal fn bits_and_type_to_float_constant_literal(bits: u128, ty: FloatTy) -> ConstantLiteral { use rustc_apfloat::{Float, ieee}; let string = match &ty { FloatTy::F16 => ieee::Half::from_bits(bits).to_string(), FloatTy::F32 => ieee::Single::from_bits(bits).to_string(), FloatTy::F64 => ieee::Double::from_bits(bits).to_string(), FloatTy::F128 => ieee::Quad::from_bits(bits).to_string(), }; ConstantLiteral::Float(string, ty) } impl ConstantExprKind { pub fn decorate(self, ty: Ty, span: Span) -> Decorated { Decorated { contents: Box::new(self), hir_id: None, attributes: vec![], ty, span, } } } /// Whether a `DefId` is a `AnonConst`. An anonymous constant is /// generated by Rustc, hoisting every constat bits from items as /// separate top-level items. This AnonConst mechanism is internal to /// Rustc; we don't want to reflect that, instead we prefer inlining /// those. `is_anon_const` is used to detect such AnonConst so that we /// can evaluate and inline them. pub(crate) fn is_anon_const( did: rustc_span::def_id::DefId, tcx: rustc_middle::ty::TyCtxt<'_>, ) -> bool { matches!( tcx.def_kind(did), rustc_hir::def::DefKind::AnonConst | rustc_hir::def::DefKind::InlineConst ) } /// Attempts to translate a `ty::UnevaluatedConst` into a constant expression. This handles cases /// of references to top-level or associated constants. Returns `None` if the input was not a named /// constant. pub fn translate_constant_reference<'tcx>( s: &impl UnderOwnerState<'tcx>, span: rustc_span::Span, ucv: rustc_middle::ty::UnevaluatedConst<'tcx>, ) -> Option { let tcx = s.base().tcx; if s.base().options.inline_anon_consts && is_anon_const(ucv.def, tcx) { return None; } let typing_env = s.typing_env(); let ty = s.base().tcx.type_of(ucv.def).instantiate(tcx, ucv.args); let ty = tcx .try_normalize_erasing_regions(typing_env, ty) .unwrap_or(ty); let kind = if let Some(assoc) = s.base().tcx.opt_associated_item(ucv.def) && matches!( assoc.container, ty::AssocContainer::Trait | ty::AssocContainer::TraitImpl(..) ) { // This is an associated constant in a trait. let name = assoc.name().to_string(); let impl_expr = self_clause_for_item(s, ucv.def, ucv.args).unwrap(); ConstantExprKind::TraitConst { impl_expr, name } } else { let item = translate_item_ref(s, ucv.def, ucv.args); ConstantExprKind::GlobalName(item) }; let cv = kind.decorate(ty.sinto(s), span.sinto(s)); Some(cv) } /// Evaluate a `ty::Const`. pub fn eval_ty_constant<'tcx, S: UnderOwnerState<'tcx>>( s: &S, uv: rustc_middle::ty::UnevaluatedConst<'tcx>, ) -> Option> { use ty::TypeVisitableExt; let tcx = s.base().tcx; let typing_env = s.typing_env(); if uv.has_non_region_param() { return None; } let span = tcx.def_span(uv.def); let erased_uv = tcx.erase_and_anonymize_regions(uv); let val = tcx .const_eval_resolve_for_typeck(typing_env, erased_uv, span) .ok()? .ok()?; let ty = tcx.type_of(uv.def).instantiate(tcx, uv.args); Some(ty::Const::new_value(tcx, val, ty)) } /// Evaluate a `mir::Const`. pub fn eval_mir_constant<'tcx, S: UnderOwnerState<'tcx>>( s: &S, c: mir::Const<'tcx>, ) -> Option> { let evaluated = c .eval(s.base().tcx, s.typing_env(), rustc_span::DUMMY_SP) .ok()?; let evaluated = mir::Const::Val(evaluated, c.ty()); (evaluated != c).then_some(evaluated) } impl<'tcx, S: UnderOwnerState<'tcx>> SInto for ty::Const<'tcx> { #[tracing::instrument(level = "trace", skip(s))] fn sinto(&self, s: &S) -> ConstantExpr { use rustc_middle::query::Key; let span = self.default_span(s.base().tcx); match self.kind() { ty::ConstKind::Param(p) => { let ty = p.find_const_ty_from_env(s.param_env()); let kind = ConstantExprKind::ConstRef { id: p.sinto(s) }; kind.decorate(ty.sinto(s), span.sinto(s)) } ty::ConstKind::Infer(..) => { fatal!(s[span], "ty::ConstKind::Infer node? {:#?}", self) } ty::ConstKind::Unevaluated(ucv) => match translate_constant_reference(s, span, ucv) { Some(val) => val, None => match eval_ty_constant(s, ucv) { Some(val) => val.sinto(s), // TODO: This is triggered when compiling using `generic_const_exprs` None => supposely_unreachable_fatal!(s, "TranslateUneval"; {self, ucv}), }, }, ty::ConstKind::Value(val) => valtree_to_constant_expr(s, val.valtree, val.ty, span), ty::ConstKind::Error(_) => fatal!(s[span], "ty::ConstKind::Error"), ty::ConstKind::Expr(e) => fatal!(s[span], "ty::ConstKind::Expr {:#?}", e), ty::ConstKind::Bound(i, bound) => { supposely_unreachable_fatal!(s[span], "ty::ConstKind::Bound"; {i, bound}) } _ => fatal!(s[span], "unexpected case"), } } } impl<'tcx, S: UnderOwnerState<'tcx>> SInto for ty::Value<'tcx> { #[tracing::instrument(level = "trace", skip(s))] fn sinto(&self, s: &S) -> ConstantExpr { valtree_to_constant_expr(s, self.valtree, self.ty, rustc_span::DUMMY_SP) } } #[tracing::instrument(level = "trace", skip(s))] pub(crate) fn valtree_to_constant_expr<'tcx, S: UnderOwnerState<'tcx>>( s: &S, valtree: rustc_middle::ty::ValTree<'tcx>, ty: rustc_middle::ty::Ty<'tcx>, span: rustc_span::Span, ) -> ConstantExpr { let kind = match (&*valtree, ty.kind()) { (_, ty::Ref(_, inner_ty, _)) => { ConstantExprKind::Borrow(valtree_to_constant_expr(s, valtree, *inner_ty, span)) } (ty::ValTreeKind::Branch(valtrees), ty::Str) => { let bytes = valtrees .iter() .map(|x| match &***x { ty::ValTreeKind::Leaf(leaf) => leaf.to_u8(), _ => fatal!( s[span], "Expected a flat list of leaves while translating \ a str literal, got a arbitrary valtree." ), }) .collect(); ConstantExprKind::Literal(ConstantLiteral::byte_str(bytes)) } ( ty::ValTreeKind::Branch(_), ty::Array(..) | ty::Slice(..) | ty::Tuple(..) | ty::Adt(..), ) => { let tcx = s.base().tcx; let contents: rustc_middle::ty::DestructuredConst = tcx.destructure_const(ty::Const::new_value(s.base().tcx, valtree, ty)); let fields = contents.fields.iter().copied(); match ty.kind() { ty::Slice(inner_ty) => { let array_ty = { let size = rustc_middle::ty::ScalarInt::try_from_target_usize( fields.len() as u128, tcx, ) .s_unwrap(s); let valtree = rustc_middle::ty::ValTree::from_scalar_int(tcx, size); let value = rustc_middle::ty::Value { ty: tcx.types.usize, valtree, }; let len = tcx.mk_ct_from_kind(rustc_middle::ty::ConstKind::Value(value)); tcx.mk_ty_from_kind(rustc_middle::ty::TyKind::Array(*inner_ty, len)) }; let array = ConstantExprKind::Array { fields: fields.map(|field| field.sinto(s)).collect(), } .decorate(array_ty.sinto(s), span.sinto(s)); ConstantExprKind::Borrow(array) } ty::Array(_, _) => ConstantExprKind::Array { fields: fields.map(|field| field.sinto(s)).collect(), }, ty::Tuple(_) => ConstantExprKind::Tuple { fields: fields.map(|field| field.sinto(s)).collect(), }, ty::Adt(def, _) => { let variant_idx = contents .variant .s_expect(s, "destructed const of adt without variant idx"); let variant_def = &def.variant(variant_idx); ConstantExprKind::Adt { info: get_variant_information(def, variant_idx, s), fields: fields .into_iter() .zip(&variant_def.fields) .map(|(value, field)| ConstantFieldExpr { field: field.did.sinto(s), value: value.sinto(s), }) .collect(), } } _ => unreachable!(), } } (ty::ValTreeKind::Leaf(x), ty::RawPtr(_, _)) => { use crate::rustc_type_ir::inherent::Ty; let raw_address = x.to_bits_unchecked(); let uint_ty = UintTy::Usize; let usize_ty = rustc_middle::ty::Ty::new_usize(s.base().tcx).sinto(s); let lit = ConstantLiteral::Int(ConstantInt::Uint(raw_address, uint_ty)); ConstantExprKind::Cast { source: ConstantExprKind::Literal(lit).decorate(usize_ty, span.sinto(s)), } } (ty::ValTreeKind::Leaf(x), _) => { ConstantExprKind::Literal(scalar_int_to_constant_literal(s, *x, ty)) } _ => supposely_unreachable_fatal!( s[span], "valtree_to_expr"; {valtree, ty} ), }; kind.decorate(ty.sinto(s), span.sinto(s)) } /// Use the const-eval interpreter to convert an evaluated operand back to a structured /// constant expression. fn op_to_const<'tcx, S: UnderOwnerState<'tcx>>( s: &S, span: rustc_span::Span, ecx: &rustc_const_eval::const_eval::CompileTimeInterpCx<'tcx>, op: rustc_const_eval::interpret::OpTy<'tcx>, ) -> InterpResult<'tcx, ConstantExpr> { use crate::rustc_const_eval::interpret::Projectable; // Code inspired from `try_destructure_mir_constant_for_user_output` and // `const_eval::eval_queries::op_to_const`. let tcx = s.base().tcx; let ty = op.layout.ty; // Helper for struct-likes. let read_fields = |of: rustc_const_eval::interpret::OpTy<'tcx>, field_count| { (0..field_count).map(move |i| { let field_op = ecx.project_field(&of, rustc_abi::FieldIdx::from_usize(i))?; op_to_const(s, span, &ecx, field_op) }) }; let kind = match ty.kind() { // Detect statics _ if let Some(place) = op.as_mplace_or_imm().left() && let ptr = place.ptr() && let (alloc_id, _, _) = ecx.ptr_get_alloc_id(ptr, 0)? && let interpret::GlobalAlloc::Static(did) = tcx.global_alloc(alloc_id) => { let item = translate_item_ref(s, did, ty::GenericArgsRef::default()); ConstantExprKind::GlobalName(item) } ty::Char | ty::Bool | ty::Uint(_) | ty::Int(_) | ty::Float(_) => { let scalar = ecx.read_scalar(&op)?; let scalar_int = scalar.try_to_scalar_int().unwrap(); let lit = scalar_int_to_constant_literal(s, scalar_int, ty); ConstantExprKind::Literal(lit) } ty::Adt(adt_def, ..) if adt_def.is_union() => { ConstantExprKind::Todo("Cannot translate constant of union type".into()) } ty::Adt(adt_def, ..) => { let variant = ecx.read_discriminant(&op)?; let down = ecx.project_downcast(&op, variant)?; let field_count = adt_def.variants()[variant].fields.len(); let fields = read_fields(down, field_count) .zip(&adt_def.variant(variant).fields) .map(|(value, field)| { interp_ok(ConstantFieldExpr { field: field.did.sinto(s), value: value?, }) }) .collect::>>()?; let variants_info = get_variant_information(adt_def, variant, s); ConstantExprKind::Adt { info: variants_info, fields, } } ty::Closure(def_id, args) => { // A closure is essentially an adt with funky generics and some builtin impls. let def_id: DefId = def_id.sinto(s); let field_count = args.as_closure().upvar_tys().len(); let fields = read_fields(op, field_count) .map(|value| { interp_ok(ConstantFieldExpr { // HACK: Closure fields don't have their own def_id, but Charon doesn't use // field DefIds so we put a dummy one. field: def_id.clone(), value: value?, }) }) .collect::>>()?; let variants_info = VariantInformations { type_namespace: def_id.parent.clone().unwrap(), typ: def_id.clone(), variant: def_id, kind: VariantKind::Struct { named: false }, }; ConstantExprKind::Adt { info: variants_info, fields, } } ty::Tuple(args) => { let fields = read_fields(op, args.len()).collect::>>()?; ConstantExprKind::Tuple { fields } } ty::Array(..) | ty::Slice(..) => { let len = op.len(ecx)?; let fields = (0..len) .map(|i| { let op = ecx.project_index(&op, i)?; op_to_const(s, span, ecx, op) }) .collect::>>()?; ConstantExprKind::Array { fields } } ty::Str => { let str = ecx.read_str(&op.assert_mem_place())?; ConstantExprKind::Literal(ConstantLiteral::Str(str.to_owned())) } ty::FnDef(def_id, args) => { let item = translate_item_ref(s, *def_id, args); ConstantExprKind::FnPtr(item) } ty::RawPtr(..) | ty::Ref(..) => { if let Some(op) = ecx.deref_pointer(&op).discard_err() { // Valid pointer case let val = op_to_const(s, span, ecx, op.into())?; match ty.kind() { ty::Ref(..) => ConstantExprKind::Borrow(val), ty::RawPtr(.., mutability) => ConstantExprKind::RawBorrow { arg: val, mutability: mutability.sinto(s), }, _ => unreachable!(), } } else { // Invalid pointer; try reading it as a raw address let scalar = ecx.read_scalar(&op)?; let scalar_int = scalar.try_to_scalar_int().unwrap(); let v = scalar_int.to_uint(scalar_int.size()); let lit = ConstantLiteral::PtrNoProvenance(v); ConstantExprKind::Literal(lit) } } ty::FnPtr(..) | ty::Dynamic(..) | ty::Foreign(..) | ty::Pat(..) | ty::UnsafeBinder(..) | ty::CoroutineClosure(..) | ty::Coroutine(..) | ty::CoroutineWitness(..) => ConstantExprKind::Todo("Unhandled constant type".into()), ty::Alias(..) | ty::Param(..) | ty::Bound(..) | ty::Placeholder(..) | ty::Infer(..) => { fatal!(s[span], "Encountered evaluated constant of non-monomorphic type"; {op}) } ty::Never | ty::Error(..) => { fatal!(s[span], "Encountered evaluated constant of invalid type"; {ty}) } }; let val = kind.decorate(ty.sinto(s), span.sinto(s)); interp_ok(val) } pub fn const_value_to_constant_expr<'tcx, S: UnderOwnerState<'tcx>>( s: &S, ty: rustc_middle::ty::Ty<'tcx>, val: mir::ConstValue, span: rustc_span::Span, ) -> InterpResult<'tcx, ConstantExpr> { let tcx = s.base().tcx; let typing_env = s.typing_env(); let (ecx, op) = rustc_const_eval::const_eval::mk_eval_cx_for_const_val(tcx.at(span), typing_env, val, ty) .unwrap(); op_to_const(s, span, &ecx, op) } ================================================ FILE: frontend/exporter/src/constant_utils.rs ================================================ use crate::prelude::*; #[derive_group(Serializers)] #[derive(Clone, Debug, JsonSchema, Hash, PartialEq, Eq, PartialOrd, Ord)] pub enum ConstantInt { Int( #[serde(with = "serialize_int::signed")] #[schemars(with = "String")] i128, IntTy, ), Uint( #[serde(with = "serialize_int::unsigned")] #[schemars(with = "String")] u128, UintTy, ), } #[derive_group(Serializers)] #[derive(Clone, Debug, JsonSchema, Hash, PartialEq, Eq, PartialOrd, Ord)] pub enum ConstantLiteral { Bool(bool), Char(char), Float(String, FloatTy), Int(ConstantInt), PtrNoProvenance(u128), Str(String), ByteStr(Vec), } /// The subset of [Expr] that corresponds to constants. #[derive_group(Serializers)] #[derive(Clone, Debug, JsonSchema, Hash, PartialEq, Eq, PartialOrd, Ord)] pub enum ConstantExprKind { Literal(ConstantLiteral), // Adts (structs, enums, unions) or closures. Adt { info: VariantInformations, fields: Vec, }, Array { fields: Vec, }, Tuple { fields: Vec, }, /// A top-level constant or a constant appearing in an impl block. /// /// Remark: constants *can* have generic parameters. /// Example: /// ```text /// struct V { /// x: [T; N], /// } /// /// impl V { /// const LEN: usize = N; // This has generics /// } /// ``` /// /// If `options.inline_anon_consts` is `false`, this is also used for inline const blocks and /// advanced const generics expressions. GlobalName(ItemRef), /// A trait constant /// /// Ex.: /// ```text /// impl Foo for Bar { /// const C : usize = 32; // <- /// } /// ``` TraitConst { impl_expr: ImplExpr, name: String, }, /// A shared reference to a static variable. Borrow(ConstantExpr), /// A raw borrow (`*const` or `*mut`). RawBorrow { mutability: Mutability, arg: ConstantExpr, }, /// A cast ` as `, `` is stored as the type of /// the current constant expression. Currently, this is only used /// to represent `lit as *mut T` or `lit as *const T`, where `lit` /// is a `usize` literal. Cast { source: ConstantExpr, }, ConstRef { id: ParamConst, }, FnPtr(ItemRef), /// A blob of memory containing the byte representation of the value. This can occur when /// evaluating MIR constants. Interpreting this back to a structured value is left as an /// exercice to the consumer. Memory(Vec), Todo(String), } #[derive_group(Serializers)] #[derive(Clone, Debug, JsonSchema, Hash, PartialEq, Eq, PartialOrd, Ord)] pub struct ConstantFieldExpr { pub field: DefId, pub value: ConstantExpr, } /// Rustc has different representation for constants: one for MIR /// ([`rustc_middle::mir::Const`]), one for the type system /// ([`rustc_middle::ty::ConstKind`]). For simplicity hax maps those /// two construct to one same `ConstantExpr` type. pub type ConstantExpr = Decorated; // For ConstantKind we merge all the cases (Ty, Val, Unevaluated) into one pub type ConstantKind = ConstantExpr; impl From for FieldExpr { fn from(c: ConstantFieldExpr) -> FieldExpr { FieldExpr { value: c.value.into(), field: c.field, } } } impl From for Expr { fn from(c: ConstantExpr) -> Expr { use ConstantExprKind::*; let kind = match *c.contents { Literal(lit) => { use ConstantLiteral::*; let mut neg = false; let node = match lit { Bool(b) => LitKind::Bool(b), Char(c) => LitKind::Char(c), Int(i) => { use LitIntType::*; match i { ConstantInt::Uint(v, t) => LitKind::Int(v, Unsigned(t)), ConstantInt::Int(v, t) => { neg = v.is_negative(); LitKind::Int(v.abs_diff(0), Signed(t)) } } } Float(f, ty) => LitKind::Float(f, LitFloatType::Suffixed(ty)), PtrNoProvenance(p) => LitKind::Int(p, LitIntType::Unsigned(UintTy::Usize)), ByteStr(raw) => LitKind::ByteStr(raw, StrStyle::Cooked), Str(raw) => LitKind::Str(raw, StrStyle::Cooked), }; let span = c.span.clone(); let lit = Spanned { span, node }; ExprKind::Literal { lit, neg } } Adt { info, fields } => ExprKind::Adt(AdtExpr { info, fields: fields.into_iter().map(|field| field.into()).collect(), base: AdtExprBase::None, user_ty: None, }), GlobalName(item) => ExprKind::GlobalName { item, constructor: None, }, Borrow(e) => ExprKind::Borrow { borrow_kind: BorrowKind::Shared, arg: e.into(), }, RawBorrow { mutability, arg } => ExprKind::RawBorrow { mutability, arg: arg.into(), }, ConstRef { id } => ExprKind::ConstRef { id }, Array { fields } => ExprKind::Array { fields: fields.into_iter().map(|field| field.into()).collect(), }, Tuple { fields } => ExprKind::Tuple { fields: fields.into_iter().map(|field| field.into()).collect(), }, Cast { source } => ExprKind::Cast { source: source.into(), }, kind @ (FnPtr { .. } | TraitConst { .. } | Memory { .. }) => { ExprKind::Todo(format!("Unsupported constant kind. kind={:#?}", kind)) } Todo(msg) => ExprKind::Todo(msg), }; Decorated { contents: Box::new(kind), ty: c.ty, span: c.span, hir_id: c.hir_id, attributes: c.attributes, } } } #[cfg(feature = "rustc")] pub use self::uneval::*; #[cfg(feature = "rustc")] mod uneval; ================================================ FILE: frontend/exporter/src/deterministic_hash.rs ================================================ //! Stolen from use core::hash::Hasher; /// Wrapper around any hasher to make it deterministic. #[derive(Default)] pub struct DeterministicHasher(T); /// Implementation of hasher that forces all bytes written to be platform agnostic. impl core::hash::Hasher for DeterministicHasher { fn finish(&self) -> u64 { self.0.finish() } fn write(&mut self, bytes: &[u8]) { self.0.write(bytes); } fn write_u8(&mut self, i: u8) { self.write(&i.to_le_bytes()) } fn write_u16(&mut self, i: u16) { self.write(&i.to_le_bytes()) } fn write_u32(&mut self, i: u32) { self.write(&i.to_le_bytes()) } fn write_u64(&mut self, i: u64) { self.write(&i.to_le_bytes()) } fn write_u128(&mut self, i: u128) { self.write(&i.to_le_bytes()) } fn write_usize(&mut self, i: usize) { self.write(&(i as u64).to_le_bytes()) } fn write_i8(&mut self, i: i8) { self.write_u8(i as u8) } fn write_i16(&mut self, i: i16) { self.write_u16(i as u16) } fn write_i32(&mut self, i: i32) { self.write_u32(i as u32) } fn write_i64(&mut self, i: i64) { self.write_u64(i as u64) } fn write_i128(&mut self, i: i128) { self.write_u128(i as u128) } fn write_isize(&mut self, i: isize) { self.write_usize(i as usize) } } ================================================ FILE: frontend/exporter/src/id_table.rs ================================================ /// This module provides a notion of table, identifiers and nodes. A /// `Node` is a `Arc` bundled with a unique identifier such that /// there exists an entry in a table for that identifier. /// /// The type `WithTable` bundles a table with a value of type /// `T`. That value of type `T` may hold an arbitrary number of /// `Node<_>`s. In the context of a `WithTable`, the type `Node<_>` /// serializes and deserializes using a table as a state. In this /// case, serializing a `Node` produces only an identifier, without /// any data of type `U`. Deserializing a `Node` under a /// `WithTable` will recover `U` data from the table held by /// `WithTable`. /// /// Serde is not designed for stateful (de)serialization. There is no /// way of deriving `serde::de::DeserializeSeed` systematically. This /// module thus makes use of global state to achieve serialization and /// deserialization. This modules provides an API that hides this /// global state. use crate::prelude::*; use std::{ hash::{Hash, Hasher}, sync::{Arc, LazyLock, Mutex, MutexGuard, atomic::Ordering}, }; /// Unique IDs in a ID table. #[derive_group(Serializers)] #[derive(Default, Clone, Copy, Debug, JsonSchema, Hash, PartialEq, Eq, PartialOrd, Ord)] #[serde(transparent)] pub struct Id { id: u32, } /// A session providing fresh IDs for ID table. #[derive(Default, Debug)] pub struct Session { next_id: Id, table: Table, } impl Session { pub fn table(&self) -> &Table { &self.table } } /// The different types of values one can store in an ID table. #[derive(Debug, Clone, Deserialize, Serialize)] pub enum Value { Ty(Arc), DefId(Arc), ItemRef(Arc), } impl SupportedType for TyKind { fn to_types(value: Arc) -> Value { Value::Ty(value) } fn from_types(t: &Value) -> Option> { match t { Value::Ty(value) => Some(value.clone()), _ => None, } } } impl SupportedType for DefIdContents { fn to_types(value: Arc) -> Value { Value::DefId(value) } fn from_types(t: &Value) -> Option> { match t { Value::DefId(value) => Some(value.clone()), _ => None, } } } impl SupportedType for ItemRefContents { fn to_types(value: Arc) -> Value { Value::ItemRef(value) } fn from_types(t: &Value) -> Option> { match t { Value::ItemRef(value) => Some(value.clone()), _ => None, } } } /// A node is a bundle of an ID with a value. #[derive(Deserialize, Serialize, Debug, JsonSchema, PartialOrd, Ord)] #[serde(into = "serde_repr::NodeRepr")] #[serde(try_from = "serde_repr::NodeRepr")] pub struct Node> { id: Id, value: Arc, } impl> std::ops::Deref for Node { type Target = T; fn deref(&self) -> &Self::Target { self.value.as_ref() } } /// Hax relies on hashes being deterministic for predicates /// ids. Identifiers are not deterministic: we implement hash for /// `Node` manually, discarding the field `id`. impl + Hash> Hash for Node { fn hash(&self, state: &mut H) { self.value.as_ref().hash(state); } } impl + Eq> Eq for Node {} impl + PartialEq> PartialEq for Node { fn eq(&self, other: &Self) -> bool { self.value == other.value } } /// Manual implementation of `Clone` that doesn't require a `Clone` /// bound on `T`. impl> Clone for Node { fn clone(&self) -> Self { Self { id: self.id.clone(), value: self.value.clone(), } } } /// A table is a map from IDs to `Value`s. When serialized, we /// represent a table as a *sorted* vector. Indeed, the values stored /// in the table might reference each other, without cycle, so the /// order matters. #[derive(Default, Debug, Clone, Deserialize, Serialize)] #[serde(into = "serde_repr::SortedIdValuePairs")] #[serde(from = "serde_repr::SortedIdValuePairs")] pub struct Table(HeterogeneousMap); mod heterogeneous_map { //! This module provides an heterogenous map that can store types //! that implement the trait `SupportedType`. use std::collections::HashMap; use std::hash::Hash; use std::sync::Arc; #[derive(Clone, Debug)] /// An heterogenous map is a map from `Key` to `Value`. It provide /// the methods `insert` and `get` for any type `T` that /// implements `SupportedType`. pub struct HeterogeneousMap(HashMap); impl Default for HeterogeneousMap { fn default() -> Self { Self(HashMap::default()) } } impl HeterogeneousMap { pub(super) fn insert(&mut self, key: Key, value: Arc) where T: SupportedType, { self.insert_raw_value(key, T::to_types(value)); } pub(super) fn insert_raw_value(&mut self, key: Key, value: Value) { self.0.insert(key, value); } pub(super) fn from_iter(it: impl Iterator) -> Self { Self(HashMap::from_iter(it)) } pub(super) fn into_iter(self) -> impl Iterator { self.0.into_iter() } pub(super) fn get(&self, key: &Key) -> Option>> where T: SupportedType, { self.0.get(key).map(T::from_types) } } /// A type that can be mapped to `Value` and optionally /// reconstructed back. pub trait SupportedType: std::fmt::Debug { fn to_types(value: Arc) -> Value; fn from_types(t: &Value) -> Option>; } } use heterogeneous_map::*; impl Session { fn fresh_id(&mut self) -> Id { let id = self.next_id.id; self.next_id.id += 1; Id { id } } } impl> Node { pub fn new(value: T, session: &mut Session) -> Self { let id = session.fresh_id(); let value = Arc::new(value); session.table.0.insert(id.clone(), value.clone()); Self { id, value } } pub fn inner(&self) -> &Arc { &self.value } pub fn id(&self) -> Id { self.id } } /// Wrapper for a type `T` that creates a bundle containing both a ID /// table and a value `T`. That value may contains `Node` values /// inside it. Serializing `WithTable` will serialize IDs only, /// skipping values. Deserialization of a `WithTable` will /// automatically use the table and IDs to reconstruct skipped values. #[derive(Debug)] pub struct WithTable { table: Table, value: T, } /// The state used for deserialization: a table. static DESERIALIZATION_STATE: LazyLock> = LazyLock::new(|| Mutex::new(Table::default())); static DESERIALIZATION_STATE_LOCK: LazyLock> = LazyLock::new(|| Mutex::new(())); /// The mode of serialization: should `Node` ship values of type `T` or not? static SERIALIZATION_MODE_USE_IDS: std::sync::atomic::AtomicBool = std::sync::atomic::AtomicBool::new(false); fn serialize_use_id() -> bool { SERIALIZATION_MODE_USE_IDS.load(Ordering::Relaxed) } impl WithTable { /// Runs `f` with a `WithTable` created out of `map` and /// `value`. Any serialization of values of type `Node<_>` will /// skip the field `value`. pub fn run(map: Table, value: T, f: impl FnOnce(&Self) -> R) -> R { if serialize_use_id() { panic!( "CACHE_MAP_LOCK: only one WithTable serialization can occur at a time (nesting is forbidden)" ) } SERIALIZATION_MODE_USE_IDS.store(true, Ordering::Relaxed); let result = f(&Self { table: map, value }); SERIALIZATION_MODE_USE_IDS.store(false, Ordering::Relaxed); result } pub fn destruct(self) -> (T, Table) { let Self { value, table: map } = self; (value, map) } } impl Serialize for WithTable { fn serialize(&self, serializer: S) -> Result { let mut ts = serializer.serialize_tuple_struct("WithTable", 2)?; use serde::ser::SerializeTupleStruct; ts.serialize_field(&self.table)?; ts.serialize_field(&self.value)?; ts.end() } } /// The deserializer of `WithTable` is special. We first decode the /// table in order: each `(Id, Value)` pair of the table populates the /// global table state found in `DESERIALIZATION_STATE`. Only then we /// can decode the value itself, knowing `DESERIALIZATION_STATE` is /// complete. impl<'de, T: Deserialize<'de>> serde::Deserialize<'de> for WithTable { fn deserialize(deserializer: D) -> Result where D: serde::Deserializer<'de>, { let _lock: MutexGuard<_> = DESERIALIZATION_STATE_LOCK.try_lock().expect("CACHE_MAP_LOCK: only one WithTable deserialization can occur at a time (nesting is forbidden)"); use serde_repr::WithTableRepr; let previous = std::mem::take(&mut *DESERIALIZATION_STATE.lock().unwrap()); let with_table_repr = WithTableRepr::deserialize(deserializer); *DESERIALIZATION_STATE.lock().unwrap() = previous; let WithTableRepr(table, value) = with_table_repr?; Ok(Self { table, value }) } } /// Defines representations for various types when serializing or/and /// deserializing via serde mod serde_repr { use super::*; #[derive(Serialize, Deserialize, JsonSchema, Debug)] pub(super) struct NodeRepr { id: Id, value: Option>, } #[derive(Serialize)] pub(super) struct Pair(Id, Value); pub(super) type SortedIdValuePairs = Vec; #[derive(Serialize, Deserialize)] pub(super) struct WithTableRepr(pub(super) Table, pub(super) T); impl> Into> for Node { fn into(self) -> NodeRepr { let value = if serialize_use_id() { None } else { Some(self.value.clone()) }; let id = self.id; NodeRepr { value, id } } } impl> TryFrom> for Node { type Error = serde::de::value::Error; fn try_from(cached: NodeRepr) -> Result { use serde::de::Error; let table = DESERIALIZATION_STATE.lock().unwrap(); let id = cached.id; let kind = if let Some(kind) = cached.value { kind } else { table .0 .get(&id) .ok_or_else(|| { Self::Error::custom(&format!( "Stateful deserialization failed for id {:?}: not found in cache", id )) })? .ok_or_else(|| { Self::Error::custom(&format!( "Stateful deserialization failed for id {:?}: wrong type", id )) })? }; Ok(Self { value: kind, id }) } } impl<'de> serde::Deserialize<'de> for Pair { fn deserialize(deserializer: D) -> Result where D: serde::Deserializer<'de>, { let (id, v) = <(Id, Value)>::deserialize(deserializer)?; DESERIALIZATION_STATE .lock() .unwrap() .0 .insert_raw_value(id.clone(), v.clone()); Ok(Pair(id, v)) } } impl Into for Table { fn into(self) -> SortedIdValuePairs { let mut vec: Vec<_> = self.0.into_iter().map(|(x, y)| Pair(x, y)).collect(); vec.sort_by_key(|o| o.0.clone()); vec } } impl From for Table { fn from(t: SortedIdValuePairs) -> Self { Self(HeterogeneousMap::from_iter( t.into_iter().map(|Pair(x, y)| (x, y)), )) } } } ================================================ FILE: frontend/exporter/src/index_vec.rs ================================================ use crate::prelude::*; #[derive_group(Serializers)] #[derive(Clone, Debug, JsonSchema, Hash, PartialEq, Eq, PartialOrd, Ord)] pub struct IndexVec { pub raw: Vec, _marker: std::marker::PhantomData, } impl IndexVec { pub fn into_iter(self) -> impl DoubleEndedIterator + ExactSizeIterator { self.raw.into_iter() } } #[cfg(feature = "rustc")] impl IndexVec { pub fn into_iter_enumerated( self, ) -> impl DoubleEndedIterator + ExactSizeIterator { rustc_index::IndexVec::from_raw(self.raw).into_iter_enumerated() } } #[cfg(feature = "rustc")] impl std::ops::Deref for IndexVec { type Target = rustc_index::IndexSlice; fn deref(&self) -> &Self::Target { Self::Target::from_raw(&self.raw) } } #[cfg(feature = "rustc")] impl std::ops::DerefMut for IndexVec { fn deref_mut(&mut self) -> &mut Self::Target { Self::Target::from_raw_mut(&mut self.raw) } } #[cfg(feature = "rustc")] impl From> for IndexVec { fn from(val: rustc_index::IndexVec) -> Self { IndexVec { raw: val.raw, _marker: std::marker::PhantomData, } } } #[cfg(feature = "rustc")] impl, U: Clone, T: SInto> SInto> for rustc_index::IndexSlice { fn sinto(&self, s: &S) -> IndexVec { IndexVec { raw: self.raw.sinto(s), _marker: std::marker::PhantomData, } } } #[cfg(feature = "rustc")] impl FromIterator for IndexVec where I: rustc_index::Idx, { #[inline] fn from_iter>(iter: It) -> Self { Self { raw: Vec::from_iter(iter), _marker: std::marker::PhantomData, } } } macro_rules! make_idx_wrapper { ($($mod:ident)::+, $type:ident) => { #[derive_group(Serializers)]#[derive(Copy, Clone, Eq, Debug, Hash, PartialEq, PartialOrd, Ord, JsonSchema)] #[serde(untagged)] pub enum $type { $type(usize), } #[cfg(feature = "rustc")] const _: () = { use rustc_index::Idx; type OriginalType = $($mod::)+$type; impl Idx for $type { fn new(idx: usize) -> Self { $type::$type(idx) } fn index(self) -> usize { let $type::$type(x) = self; x.index() } } impl SInto for OriginalType { fn sinto(&self, _s: &S) -> $type { $type::new(self.index()) } } }; }; } pub(crate) use make_idx_wrapper; ================================================ FILE: frontend/exporter/src/lib.rs ================================================ #![allow(rustdoc::private_intra_doc_links)] #![cfg_attr(feature = "rustc", feature(if_let_guard))] #![cfg_attr(feature = "rustc", feature(macro_metavar_expr))] #![cfg_attr(feature = "rustc", feature(rustc_private))] #![cfg_attr(feature = "rustc", feature(sized_hierarchy))] #![cfg_attr(feature = "rustc", feature(trait_alias))] #![cfg_attr(feature = "rustc", feature(type_changing_struct_update))] macro_rules! cfg_feature_rustc { ($($item:item)*) => { $( #[cfg(feature = "rustc")] $item )* } } cfg_feature_rustc! { // When the feature `rustc` is enabled, we enable the bridges // between rustc ASTs, which are defined in the crates // `rustc_*`. We thus need to import them with `extern crate // rustc_*` extern crate rustc_abi; extern crate rustc_ast; extern crate rustc_ast_pretty; extern crate rustc_apfloat; extern crate rustc_const_eval; extern crate rustc_data_structures; extern crate rustc_driver; extern crate rustc_hashes; extern crate rustc_errors; extern crate rustc_hir; extern crate rustc_hir_analysis; extern crate rustc_index; extern crate rustc_infer; extern crate rustc_interface; extern crate rustc_middle; extern crate rustc_mir_build; extern crate rustc_session; extern crate rustc_span; extern crate rustc_target; extern crate rustc_trait_selection; extern crate rustc_type_ir; extern crate rustc_lexer; mod rustc_utils; pub mod state; mod utils; mod deterministic_hash; pub mod comments; } mod body; mod constant_utils; pub mod id_table; mod types; mod index_vec; mod prelude; pub use hax_frontend_exporter_options as options; pub use prelude::*; mod sinto; mod traits; pub use hax_adt_into::AdtInto; pub use sinto::SInto; ================================================ FILE: frontend/exporter/src/prelude.rs ================================================ pub use crate::*; pub use schemars::{JsonSchema, schema_for}; pub use serde::{Deserialize, Serialize}; pub use std::collections::HashMap; pub use std::path::PathBuf; pub use std::rc::Rc; pub use crate::body::*; pub use crate::constant_utils::*; pub use crate::id_table; pub use crate::index_vec::*; pub use crate::traits::*; pub use crate::types::*; #[cfg(feature = "rustc")] pub use self::rustc::*; #[cfg(feature = "rustc")] pub mod rustc { pub use crate::rustc_utils::*; pub use crate::state::*; pub use crate::utils::*; } pub(crate) use hax_adt_into::derive_group; ================================================ FILE: frontend/exporter/src/rustc_utils.rs ================================================ use crate::prelude::*; use rustc_hir::def::DefKind as RDefKind; use rustc_middle::{mir, ty}; pub fn inst_binder<'tcx, T>( tcx: ty::TyCtxt<'tcx>, typing_env: ty::TypingEnv<'tcx>, args: Option>, x: ty::EarlyBinder<'tcx, T>, ) -> T where T: ty::TypeFoldable> + Clone, { match args { None => x.instantiate_identity(), Some(args) => tcx.normalize_erasing_regions(typing_env, x.instantiate(tcx, args)), } } pub fn substitute<'tcx, T>( tcx: ty::TyCtxt<'tcx>, typing_env: ty::TypingEnv<'tcx>, args: Option>, x: T, ) -> T where T: ty::TypeFoldable>, { inst_binder(tcx, typing_env, args, ty::EarlyBinder::bind(x)) } #[extension_traits::extension(pub trait SubstBinder)] impl<'tcx, T: ty::TypeFoldable>> ty::Binder<'tcx, T> { fn subst( self, tcx: ty::TyCtxt<'tcx>, generics: &[ty::GenericArg<'tcx>], ) -> ty::Binder<'tcx, T> { ty::EarlyBinder::bind(self).instantiate(tcx, generics) } } /// Whether the item can have generic parameters. pub(crate) fn can_have_generics<'tcx>(tcx: ty::TyCtxt<'tcx>, def_id: RDefId) -> bool { use RDefKind::*; match get_def_kind(tcx, def_id) { Mod | ConstParam | TyParam | LifetimeParam | Macro(..) | ExternCrate | Use | ForeignMod | GlobalAsm => false, _ => true, } } #[tracing::instrument(skip(s))] pub(crate) fn get_variant_information<'s, S: UnderOwnerState<'s>>( adt_def: &ty::AdtDef<'s>, variant_index: rustc_abi::VariantIdx, s: &S, ) -> VariantInformations { fn is_named<'s, I: std::iter::Iterator + Clone>(it: I) -> bool { it.clone() .any(|field| field.name.to_ident_string().parse::().is_err()) } let variant_def = adt_def.variant(variant_index); let variant = variant_def.def_id; let constructs_type: DefId = adt_def.did().sinto(s); let kind = if adt_def.is_struct() { let named = is_named(adt_def.all_fields()); VariantKind::Struct { named } } else if adt_def.is_union() { VariantKind::Union } else { let named = is_named(variant_def.fields.iter()); let index = variant_index.into(); VariantKind::Enum { index, named } }; VariantInformations { typ: constructs_type.clone(), variant: variant.sinto(s), kind, type_namespace: match &constructs_type.parent { Some(parent) => parent.clone(), None => { let span = s.base().tcx.def_span(variant); fatal!( s[span], "Type {:#?} appears to have no parent", constructs_type ) } }, } } #[tracing::instrument(skip(sess))] pub fn translate_span(span: rustc_span::Span, sess: &rustc_session::Session) -> Span { let smap: &rustc_span::source_map::SourceMap = sess.psess.source_map(); let filename = smap.span_to_filename(span); let lo = smap.lookup_char_pos(span.lo()); let hi = smap.lookup_char_pos(span.hi()); Span { lo: lo.into(), hi: hi.into(), filename: filename.sinto(&()), rust_span_data: Some(span.data()), } } pub trait HasParamEnv<'tcx> { fn param_env(&self) -> ty::ParamEnv<'tcx>; fn typing_env(&self) -> ty::TypingEnv<'tcx>; } impl<'tcx, S: UnderOwnerState<'tcx>> HasParamEnv<'tcx> for S { fn param_env(&self) -> ty::ParamEnv<'tcx> { let tcx = self.base().tcx; let def_id = self.owner_id(); if can_have_generics(tcx, def_id) { tcx.param_env(def_id) } else { ty::ParamEnv::empty() } } fn typing_env(&self) -> ty::TypingEnv<'tcx> { ty::TypingEnv { param_env: self.param_env(), typing_mode: ty::TypingMode::PostAnalysis, } } } #[tracing::instrument(skip(s))] pub(crate) fn attribute_from_scope<'tcx, S: ExprState<'tcx>>( s: &S, scope: &rustc_middle::middle::region::Scope, ) -> (Option, Vec) { let owner = s.owner_id(); let tcx = s.base().tcx; let scope_tree = tcx.region_scope_tree(owner); let hir_id = scope.hir_id(scope_tree); let tcx = s.base().tcx; let attributes = hir_id .map(|hir_id| tcx.hir_attrs(hir_id).sinto(s)) .unwrap_or_default(); (hir_id, attributes) } /// Gets the closest ancestor of `id` that is the id of a type. pub fn get_closest_parent_type( tcx: &ty::TyCtxt, id: rustc_span::def_id::DefId, ) -> rustc_span::def_id::DefId { match tcx.def_kind(id) { rustc_hir::def::DefKind::Union | rustc_hir::def::DefKind::Struct | rustc_hir::def::DefKind::Enum => id, _ => get_closest_parent_type(tcx, tcx.parent(id)), } } /// Gets the visibility (`pub` or not) of the definition. Returns `None` for defs that don't have a /// meaningful visibility. pub fn get_def_visibility<'tcx>( tcx: ty::TyCtxt<'tcx>, def_id: RDefId, def_kind: RDefKind, ) -> Option { use RDefKind::*; match def_kind { AssocConst | AssocFn | Const | Enum | Field | Fn | ForeignTy | Macro { .. } | Mod | Static { .. } | Struct | Trait | TraitAlias | TyAlias { .. } | Union | Use | Variant => Some(tcx.visibility(def_id).is_public()), // These kinds don't have visibility modifiers (which would cause `visibility` to panic). AnonConst | AssocTy | Closure | ConstParam | Ctor { .. } | ExternCrate | ForeignMod | GlobalAsm | Impl { .. } | InlineConst | LifetimeParam | OpaqueTy | SyntheticCoroutineBody | TyParam => None, } } /// Gets the attributes of the definition. pub fn get_def_attrs<'tcx>( tcx: ty::TyCtxt<'tcx>, def_id: RDefId, def_kind: RDefKind, ) -> &'tcx [rustc_hir::Attribute] { if let Some(ldid) = def_id.as_local() { tcx.hir_attrs(tcx.local_def_id_to_hir_id(ldid)) } else { match def_kind { // These kinds cause `get_attrs` to panic. RDefKind::ConstParam | RDefKind::LifetimeParam | RDefKind::ForeignMod | RDefKind::TyParam => &[], _ => tcx.attrs_for_def(def_id), } } } /// Gets the children of a module. pub fn get_mod_children<'tcx>( tcx: ty::TyCtxt<'tcx>, def_id: RDefId, ) -> Vec<(Option, RDefId)> { match def_id.as_local() { Some(ldid) => match tcx.hir_node_by_def_id(ldid) { rustc_hir::Node::Crate(m) | rustc_hir::Node::Item(&rustc_hir::Item { kind: rustc_hir::ItemKind::Mod(_, m), .. }) => m .item_ids .iter() .map(|&item_id| { let opt_ident = tcx.hir_item(item_id).kind.ident(); let def_id = item_id.owner_id.to_def_id(); (opt_ident, def_id) }) .collect(), node => panic!("DefKind::Module is an unexpected node: {node:?}"), }, None => tcx .module_children(def_id) .iter() .map(|child| (Some(child.ident), child.res.def_id())) .collect(), } } /// Gets the children of an `extern` block. Empty if the block is not defined in the current crate. pub fn get_foreign_mod_children<'tcx>(tcx: ty::TyCtxt<'tcx>, def_id: RDefId) -> Vec { match def_id.as_local() { Some(ldid) => tcx .hir_node_by_def_id(ldid) .expect_item() .expect_foreign_mod() .1 .iter() .map(|foreign_item_ref| foreign_item_ref.owner_id.to_def_id()) .collect(), None => vec![], } } /// The signature of a method impl may be a subtype of the one expected from the trait decl, as in /// the example below. For correctness, we must be able to map from the method generics declared in /// the trait to the actual method generics. Because this would require type inference, we instead /// simply return the declared signature. This will cause issues if it is possible to use such a /// more-specific implementation with its more-specific type, but we have a few other issues with /// lifetime-generic function pointers anyway so this is unlikely to cause problems. /// /// ```ignore /// trait MyCompare: Sized { /// fn compare(self, other: Other) -> bool; /// } /// impl<'a> MyCompare<&'a ()> for &'a () { /// // This implementation is more general because it works for non-`'a` refs. Note that only /// // late-bound vars may differ in this way. /// // `<&'a () as MyCompare<&'a ()>>::compare` has type `fn<'b>(&'a (), &'b ()) -> bool`, /// // but type `fn(&'a (), &'a ()) -> bool` was expected from the trait declaration. /// fn compare<'b>(self, _other: &'b ()) -> bool { /// true /// } /// } /// ``` pub fn get_method_sig<'tcx>( tcx: ty::TyCtxt<'tcx>, typing_env: ty::TypingEnv<'tcx>, def_id: RDefId, method_args: Option>, ) -> ty::PolyFnSig<'tcx> { let real_sig = inst_binder(tcx, typing_env, method_args, tcx.fn_sig(def_id)); let item = tcx.associated_item(def_id); let ty::AssocContainer::TraitImpl(Ok(decl_method_id)) = item.container else { return real_sig; }; let declared_sig = tcx.fn_sig(decl_method_id); // TODO(Nadrieril): Temporary hack: if the signatures have the same number of bound vars, we // keep the real signature. While the declared signature is more correct, it is also less // normalized and we can't normalize without erasing regions but regions are crucial in // function signatures. Hence we cheat here, until charon gains proper normalization // capabilities. if declared_sig.skip_binder().bound_vars().len() == real_sig.bound_vars().len() { return real_sig; } let impl_def_id = item.container_id(tcx); let method_args = method_args.unwrap_or_else(|| ty::GenericArgs::identity_for_item(tcx, def_id)); // The trait predicate that is implemented by the surrounding impl block. let implemented_trait_ref = tcx .impl_trait_ref(impl_def_id) .instantiate(tcx, method_args); // Construct arguments for the declared method generics in the context of the implemented // method generics. let decl_args = method_args.rebase_onto(tcx, impl_def_id, implemented_trait_ref.args); let sig = declared_sig.instantiate(tcx, decl_args); // Avoids accidentally using the same lifetime name twice in the same scope // (once in impl parameters, second in the method declaration late-bound vars). let sig = tcx.anonymize_bound_vars(sig); normalize(tcx, typing_env, sig) } /// Generates a list of `::Ty` type aliases for each non-gat associated type of the /// given trait and its parents, in a specific order. pub fn assoc_tys_for_trait<'tcx>( tcx: ty::TyCtxt<'tcx>, typing_env: ty::TypingEnv<'tcx>, tref: ty::TraitRef<'tcx>, ) -> Vec> { fn gather_assoc_tys<'tcx>( tcx: ty::TyCtxt<'tcx>, typing_env: ty::TypingEnv<'tcx>, assoc_tys: &mut Vec>, tref: ty::TraitRef<'tcx>, ) { assoc_tys.extend( tcx.associated_items(tref.def_id) .in_definition_order() .filter(|assoc| matches!(assoc.kind, ty::AssocKind::Type { .. })) .filter(|assoc| tcx.generics_of(assoc.def_id).own_params.is_empty()) .map(|assoc| ty::AliasTy::new(tcx, assoc.def_id, tref.args)), ); for clause in tcx .explicit_super_predicates_of(tref.def_id) .map_bound(|clauses| clauses.iter().map(|(clause, _span)| *clause)) .iter_instantiated(tcx, tref.args) { if let Some(pred) = clause.as_trait_clause() { let tref = erase_and_norm(tcx, typing_env, pred.skip_binder().trait_ref); gather_assoc_tys(tcx, typing_env, assoc_tys, tref); } } } let mut ret = vec![]; gather_assoc_tys(tcx, typing_env, &mut ret, tref); ret } /// Generates a `dyn Trait::Ty..>` type for the given trait ref. pub fn dyn_self_ty<'tcx>( tcx: ty::TyCtxt<'tcx>, typing_env: ty::TypingEnv<'tcx>, tref: ty::TraitRef<'tcx>, ) -> Option> { let re_erased = tcx.lifetimes.re_erased; if !tcx.is_dyn_compatible(tref.def_id) { return None; } // The main `Trait` predicate. let main_pred = ty::Binder::dummy(ty::ExistentialPredicate::Trait( ty::ExistentialTraitRef::erase_self_ty(tcx, tref), )); let ty_constraints = assoc_tys_for_trait(tcx, typing_env, tref) .into_iter() .map(|alias_ty| { let proj = ty::ProjectionPredicate { projection_term: alias_ty.into(), term: ty::Ty::new_alias(tcx, ty::Projection, alias_ty).into(), }; let proj = ty::ExistentialProjection::erase_self_ty(tcx, proj); ty::Binder::dummy(ty::ExistentialPredicate::Projection(proj)) }); let preds = { // Stable sort predicates to prevent platform-specific ordering issues let mut preds: Vec<_> = [main_pred].into_iter().chain(ty_constraints).collect(); preds.sort_by(|a, b| { use crate::rustc_middle::ty::ExistentialPredicateStableCmpExt; a.skip_binder().stable_cmp(tcx, &b.skip_binder()) }); tcx.mk_poly_existential_predicates(&preds) }; let ty = tcx.mk_ty_from_kind(ty::Dynamic(preds, re_erased)); let ty = normalize(tcx, typing_env, ty); Some(ty) } pub fn closure_once_shim<'tcx>( tcx: ty::TyCtxt<'tcx>, closure_ty: ty::Ty<'tcx>, ) -> Option> { let ty::Closure(def_id, args) = closure_ty.kind() else { unreachable!() }; let instance = match args.as_closure().kind() { ty::ClosureKind::Fn | ty::ClosureKind::FnMut => { ty::Instance::fn_once_adapter_instance(tcx, *def_id, args) } ty::ClosureKind::FnOnce => return None, }; let mir = tcx.instance_mir(instance.def).clone(); let mir = ty::EarlyBinder::bind(mir).instantiate(tcx, instance.args); Some(mir) } pub fn drop_glue_shim<'tcx>( tcx: ty::TyCtxt<'tcx>, def_id: RDefId, instantiate: Option>, ) -> Option> { let drop_in_place = tcx.require_lang_item(rustc_hir::LangItem::DropInPlace, rustc_span::DUMMY_SP); let ty = tcx.type_of(def_id); let ty = match instantiate { None => { if !tcx.generics_of(def_id).is_empty() { // Hack: layout code panics if it can't fully normalize types, which can happen e.g. with a // trait associated type. For now we only translate the glue for monomorphic types. return None; } ty.instantiate_identity() } Some(args) => ty.instantiate(tcx, args), }; let instance_kind = ty::InstanceKind::DropGlue(drop_in_place, Some(ty)); let mir = tcx.instance_mir(instance_kind).clone(); Some(mir) } ================================================ FILE: frontend/exporter/src/sinto.rs ================================================ #[cfg(not(feature = "rustc"))] pub trait SInto { fn sinto(&self, s: &S) -> To; } #[cfg(feature = "rustc")] pub trait SInto: std::marker::PointeeSized { fn sinto(&self, s: &S) -> To; } #[macro_export] macro_rules! sinto_todo { ($($mod:ident)::+, $type:ident$(<$($lts:lifetime),*$(,)?>)? as $renamed:ident) => { #[derive_group(Serializers)] #[derive(Clone, Debug, JsonSchema, Hash, PartialEq, Eq, PartialOrd, Ord)] pub enum $renamed { $type { todo: String }, } #[cfg(feature = "rustc")] impl<$($($lts,)*)? S> SInto for $($mod)::+::$type$(<$($lts,)*>)? { fn sinto(&self, _: &S) -> $renamed { $renamed::$type{todo: format!("{:?}", self)} } } }; ($($mod:ident)::+, $type:ident$(<$($lts:lifetime),*$(,)?>)?) => { sinto_todo!($($mod)::+, $type$(<$($lts),*>)? as $type); } } #[macro_export] macro_rules! sinto_as_usize { ($($mod:ident)::+, $type:ident$(<$($lts:lifetime),*$(,)?>)?) => { pub type $type = usize; #[cfg(feature = "rustc")] impl<$($($lts,)*)? S> SInto for $($mod)::+::$type$(<$($lts,)*>)? { fn sinto(&self, _: &S) -> $type { self.as_usize() } } } } impl, R: SInto> SInto for (L, R) { fn sinto(&self, s: &S) -> (LL, RR) { (self.0.sinto(s), self.1.sinto(s)) } } impl, B: SInto, C: SInto> SInto for (A, B, C) { fn sinto(&self, s: &S) -> (AA, BB, CC) { (self.0.sinto(s), self.1.sinto(s), self.2.sinto(s)) } } impl> SInto> for Option { fn sinto(&self, s: &S) -> Option { self.as_ref().map(|x| x.sinto(s)) } } impl> SInto for Box { fn sinto(&self, s: &S) -> D { (**self).sinto(s) } } impl> SInto for &T { fn sinto(&self, s: &S) -> D { (**self).sinto(s) } } impl> SInto> for [T] { fn sinto(&self, s: &S) -> Vec { self.iter().map(|x| x.sinto(s)).collect() } } impl> SInto> for Box<[T]> { fn sinto(&self, s: &S) -> Vec { self.into_iter().map(|x| x.sinto(s)).collect() } } impl> SInto> for Vec { fn sinto(&self, s: &S) -> Vec { self.iter().map(|x| x.sinto(s)).collect() } } macro_rules! sinto_clone { ($t:ty) => { impl SInto for $t { fn sinto(&self, _: &S) -> $t { self.clone() } } }; ($t:ty, $($rest:tt)*) => { sinto_clone!($t); sinto_clone!($($rest)+); }; () => {}; } sinto_clone!(bool, String, char); sinto_clone!(u8, u16, u32, u64, u128, usize); sinto_clone!(i8, i16, i32, i64, i128, isize); ================================================ FILE: frontend/exporter/src/state.rs ================================================ use crate::prelude::*; use paste::paste; macro_rules! mk_aux { ($state:ident {$($lts:lifetime)*} $field:ident {$($field_type:tt)+} {$($gen:tt)*} {$($gen_full:tt)*} {$($params:tt)*} {$($fields:tt)*}) => { paste ! { pub trait []<$($lts,)*> { fn $field(self: &Self) -> $($field_type)+<$($lts)*>; } impl<$($lts,)*$($gen)*> []<$($lts,)*> for $state<$($params)*> { fn $field(self: &Self) -> $($field_type)+<$($lts)*> { self.$field.clone() } } } }; } macro_rules! mk { (struct $state:ident<$($glts:lifetime),*> {$($field:ident : {$($lts:lifetime),*} $field_type:ty),*$(,)?}) => { mk!(@$state {} {$($field)*} {$($field: {$($lts),*} {$field_type},)*}); }; (@$state:ident {$($acc:tt)*} $fields:tt { $field:ident : $lts:tt $field_type:tt $(,$($rest:tt)*)? }) => {mk!(@$state { $($acc)* $fields $field: $lts $field_type, } $fields {$($($rest)*)?} );}; (@$state:ident $body:tt $fields:tt {$(,)?}) => { mk! (@@ $state $body ); }; (@@$state:ident {$({$($fields:tt)*} $field:ident : {$($lts:lifetime)*} {$($field_type:tt)+},)*}) => { paste! { #[derive(Clone)] pub struct $state<$([<$field:camel>],)*>{ $(pub $field: [<$field:camel>],)* } } $( macro_rules! __inner_helper { ($gen:tt {$$($full_gen:tt)*} {$$($params:tt)*} $field $$($rest:tt)*) => { paste! {__inner_helper!( $gen {$$($full_gen)*[<$field:camel>],} {$$($params)*$($field_type)+<$($lts,)*>,} $$($rest)* );} }; ({$$($gen:tt)*} {$$($full_gen:tt)*} {$$($params:tt)*} $i:ident $$($rest:tt)*) => { paste! {__inner_helper!( {$$($gen)*[<$i:camel>],} {$$($full_gen)*[<$i:camel>],} {$$($params)*[<$i:camel>],} $$($rest)* );} }; ($gen:tt $full_gen:tt $params:tt $$(,)?) => { mk_aux!($state {$($lts)*} $field {$($field_type)+} $gen $full_gen $params {$($fields)*}); }; } __inner_helper!({} {} {} $($fields)*); )* }; } mod types { use crate::prelude::*; use rustc_middle::ty; use std::{cell::RefCell, sync::Arc}; pub struct LocalContextS { pub vars: HashMap, } impl Default for LocalContextS { fn default() -> Self { Self::new() } } impl LocalContextS { pub fn new() -> LocalContextS { LocalContextS { vars: HashMap::new(), } } } /// Global caches #[derive(Default)] pub struct GlobalCache<'tcx> { /// Cache the `Span` translations. pub spans: HashMap, /// Per-item cache. pub per_item: HashMap>, /// A ID table session, providing fresh IDs. pub id_table_session: id_table::Session, /// Map that recovers rustc args for a given `ItemRef`. pub reverse_item_refs_map: HashMap>, /// We create some artificial items; their def_ids are stored here. See the /// `synthetic_items` module. pub synthetic_def_ids: HashMap, pub reverse_synthetic_map: HashMap, } /// Defines a mapping from types to types, for use with `TypeMap`. pub struct FullDefMapper; impl TypeMapper for FullDefMapper { type Value = Arc>; } /// Per-item cache #[derive(Default)] pub struct ItemCache<'tcx> { /// The translated `DefId`. pub def_id: Option, /// The translated definitions, generic in the Body kind. /// Each rustc `DefId` gives several hax `DefId`s: one for each promoted constant (if any), /// and the base one represented by `None`. Moreover we can instantiate definitions with /// generic arguments. pub full_defs: HashMap<(Option, Option>), TypeMap>, /// Cache the `Ty` translations. pub tys: HashMap, Ty>, /// Cache the `ItemRef` translations. This is fast because `GenericArgsRef` is interned. pub item_refs: HashMap<(RDefId, ty::GenericArgsRef<'tcx>), ItemRef>, /// Cache the trait resolution engine for each item. pub predicate_searcher: Option>, /// Cache of trait refs to resolved impl expressions. pub impl_exprs: HashMap, crate::traits::ImplExpr>, } #[derive(Clone)] pub struct Base<'tcx> { pub options: Rc, pub local_ctx: Rc>, pub opt_def_id: Option, pub cache: Rc>>, pub tcx: ty::TyCtxt<'tcx>, /// Silence the warnings in case of trait resolution failure. pub silence_resolution_errors: bool, } impl<'tcx> Base<'tcx> { pub fn new( tcx: rustc_middle::ty::TyCtxt<'tcx>, options: hax_frontend_exporter_options::Options, ) -> Self { Self { tcx, cache: Default::default(), options: Rc::new(options), // Always prefer `s.owner_id()` to `s.base().opt_def_id`. // `opt_def_id` is used in `utils` for error reporting opt_def_id: None, local_ctx: Rc::new(RefCell::new(LocalContextS::new())), silence_resolution_errors: false, } } } pub type MacroCalls = Rc>; pub type RcThir<'tcx> = Rc>; pub type RcMir<'tcx> = Rc>; pub type UnitBinder<'tcx> = rustc_middle::ty::Binder<'tcx, ()>; } mk!( struct State<'tcx> { base: {'tcx} types::Base, owner_id: {} rustc_hir::def_id::DefId, thir: {'tcx} types::RcThir, mir: {'tcx} types::RcMir, binder: {'tcx} types::UnitBinder, ty: {'tcx} rustc_middle::ty::Ty, } ); pub use self::types::*; pub type StateWithBase<'tcx> = State, (), (), (), (), ()>; pub type StateWithOwner<'tcx> = State, rustc_hir::def_id::DefId, (), (), (), ()>; pub type StateWithBinder<'tcx> = State, rustc_hir::def_id::DefId, (), (), types::UnitBinder<'tcx>, ()>; pub type StateWithThir<'tcx> = State, rustc_hir::def_id::DefId, types::RcThir<'tcx>, (), (), ()>; pub type StateWithThirAndTy<'tcx> = State< Base<'tcx>, rustc_hir::def_id::DefId, types::RcThir<'tcx>, (), (), rustc_middle::ty::Ty<'tcx>, >; pub type StateWithMir<'tcx> = State, rustc_hir::def_id::DefId, (), types::RcMir<'tcx>, (), ()>; impl<'tcx> StateWithBase<'tcx> { pub fn new( tcx: rustc_middle::ty::TyCtxt<'tcx>, options: hax_frontend_exporter_options::Options, ) -> Self { Self { base: Base::new(tcx, options), owner_id: (), thir: (), mir: (), binder: (), ty: (), } } } pub trait BaseState<'tcx>: HasBase<'tcx> + Clone { /// Updates the OnwerId in a state, making sure to override `opt_def_id` in base as well. fn with_owner_id(&self, owner_id: rustc_hir::def_id::DefId) -> StateWithOwner<'tcx> { let mut base = self.base(); base.opt_def_id = Some(owner_id); State { owner_id, base, thir: (), mir: (), binder: (), ty: (), } } } impl<'tcx, T: HasBase<'tcx> + Clone> BaseState<'tcx> for T {} /// State of anything below an `owner`. pub trait UnderOwnerState<'tcx>: BaseState<'tcx> + HasOwnerId { fn with_base(&self, base: types::Base<'tcx>) -> StateWithOwner<'tcx> { State { owner_id: self.owner_id(), base, thir: (), mir: (), binder: (), ty: (), } } fn with_binder(&self, binder: types::UnitBinder<'tcx>) -> StateWithBinder<'tcx> { State { base: self.base(), owner_id: self.owner_id(), binder, thir: (), mir: (), ty: (), } } fn with_thir(&self, thir: types::RcThir<'tcx>) -> StateWithThir<'tcx> { State { base: self.base(), owner_id: self.owner_id(), thir, mir: (), binder: (), ty: (), } } fn with_mir(&self, mir: types::RcMir<'tcx>) -> StateWithMir<'tcx> { State { base: self.base(), owner_id: self.owner_id(), mir, thir: (), binder: (), ty: (), } } } impl<'tcx, T: BaseState<'tcx> + HasOwnerId> UnderOwnerState<'tcx> for T {} /// State of anything below a binder. pub trait UnderBinderState<'tcx> = UnderOwnerState<'tcx> + HasBinder<'tcx>; /// While translating expressions, we expect to always have a THIR /// body and an `owner_id` in the state pub trait ExprState<'tcx>: UnderOwnerState<'tcx> + HasThir<'tcx> { fn with_ty(&self, ty: rustc_middle::ty::Ty<'tcx>) -> StateWithThirAndTy<'tcx> { State { base: self.base(), owner_id: self.owner_id(), thir: self.thir(), mir: (), binder: (), ty, } } } impl<'tcx, T> ExprState<'tcx> for T where T: UnderOwnerState<'tcx> + HasThir<'tcx> {} pub trait WithGlobalCacheExt<'tcx>: BaseState<'tcx> { /// Access the global cache. You must not call `sinto` within this function as this will likely /// result in `BorrowMut` panics. fn with_global_cache(&self, f: impl FnOnce(&mut GlobalCache<'tcx>) -> T) -> T { let base = self.base(); let mut cache = base.cache.borrow_mut(); f(&mut *cache) } /// Access the cache for a given item. You must not call `sinto` within this function as this /// will likely result in `BorrowMut` panics. fn with_item_cache(&self, def_id: RDefId, f: impl FnOnce(&mut ItemCache<'tcx>) -> T) -> T { self.with_global_cache(|cache| f(cache.per_item.entry(def_id).or_default())) } } impl<'tcx, S: BaseState<'tcx>> WithGlobalCacheExt<'tcx> for S {} pub trait WithItemCacheExt<'tcx>: UnderOwnerState<'tcx> { /// Access the cache for the current item. You must not call `sinto` within this function as /// this will likely result in `BorrowMut` panics. fn with_cache(&self, f: impl FnOnce(&mut ItemCache<'tcx>) -> T) -> T { self.with_item_cache(self.owner_id(), f) } fn with_predicate_searcher(&self, f: impl FnOnce(&mut PredicateSearcher<'tcx>) -> T) -> T { self.with_cache(|cache| { f(cache.predicate_searcher.get_or_insert_with(|| { PredicateSearcher::new_for_owner( self.base().tcx, self.owner_id(), self.base().options.bounds_options, ) })) }) } } impl<'tcx, S: UnderOwnerState<'tcx>> WithItemCacheExt<'tcx> for S {} impl ImplInfos { fn from<'tcx, S: BaseState<'tcx>>(s: &S, did: rustc_hir::def_id::DefId) -> Self { let tcx = s.base().tcx; let s = &s.with_owner_id(did); Self { generics: tcx.generics_of(did).sinto(s), typ: tcx.type_of(did).instantiate_identity().sinto(s), trait_ref: match tcx.def_kind(did) { rustc_hir::def::DefKind::Impl { of_trait: true } => { Some(tcx.impl_trait_ref(did).instantiate_identity().sinto(s)) } _ => None, }, clauses: predicates_defined_on(tcx, did).as_ref().sinto(s), } } } /// Returns a map from every implementation (`Impl`) `DefId`s to the /// type they implement, plus the bounds. pub fn impl_def_ids_to_impled_types_and_bounds<'tcx, S: BaseState<'tcx>>( s: &S, ) -> HashMap { let tcx = s.base().tcx; let def_ids: Vec<_> = s.with_global_cache(|cache| cache.per_item.keys().copied().collect()); let with_parents = |mut did: rustc_hir::def_id::DefId| { let mut acc = vec![did]; while let Some(parent) = tcx.opt_parent(did) { did = parent; acc.push(did); } acc.into_iter() }; use itertools::Itertools; def_ids .into_iter() .flat_map(with_parents) .unique() .filter(|&did| { // keep only DefIds that corresponds to implementations matches!( tcx.def_path(did).data.last(), Some(rustc_hir::definitions::DisambiguatedDefPathData { data: rustc_hir::definitions::DefPathData::Impl, .. }) ) }) .map(|did| (did.sinto(s), ImplInfos::from(s, did))) .collect() } ================================================ FILE: frontend/exporter/src/traits/resolution.rs ================================================ //! Trait resolution: given a trait reference, we track which local clause caused it to be true. //! This module is independent from the rest of hax, in particular it doesn't use its //! state-tracking machinery. use hax_frontend_exporter_options::BoundsOptions; use itertools::{Either, Itertools}; use std::collections::{HashMap, hash_map::Entry}; use rustc_hir::def::DefKind; use rustc_hir::def_id::DefId; use rustc_middle::traits::CodegenObligationError; use rustc_middle::ty::{self, *}; use rustc_trait_selection::traits::ImplSource; use super::utils::{ self, ToPolyTraitRef, erase_and_norm, implied_predicates, normalize_bound_val, required_predicates, self_predicate, }; #[derive(Debug, Clone)] pub enum PathChunk<'tcx> { AssocItem { item: AssocItem, /// The arguments provided to the item (for GATs). Includes trait args. generic_args: GenericArgsRef<'tcx>, /// The implemented predicate. predicate: PolyTraitPredicate<'tcx>, /// The index of this predicate in the list returned by `implied_predicates`. index: usize, }, Parent { /// The implemented predicate. predicate: PolyTraitPredicate<'tcx>, /// The index of this predicate in the list returned by `implied_predicates`. index: usize, }, } pub type Path<'tcx> = Vec>; #[derive(Debug, Clone)] pub enum ImplExprAtom<'tcx> { /// A concrete `impl Trait for Type {}` item. Concrete { def_id: DefId, generics: GenericArgsRef<'tcx>, }, /// A context-bound clause like `where T: Trait`. LocalBound { predicate: Predicate<'tcx>, /// The nth (non-self) predicate found for this item. We use predicates from /// `required_predicates` starting from the parentmost item. index: usize, r#trait: PolyTraitRef<'tcx>, path: Path<'tcx>, }, /// The automatic clause `Self: Trait` present inside a `impl Trait for Type {}` item. SelfImpl { r#trait: PolyTraitRef<'tcx>, path: Path<'tcx>, }, /// `dyn Trait` is a wrapped value with a virtual table for trait /// `Trait`. In other words, a value `dyn Trait` is a dependent /// triple that gathers a type τ, a value of type τ and an /// instance of type `Trait`. /// `dyn Trait` implements `Trait` using a built-in implementation; this refers to that /// built-in implementation. Dyn, /// A built-in trait whose implementation is computed by the compiler, such as `FnMut`. This /// morally points to an invisible `impl` block; as such it contains the information we may /// need from one. Builtin { /// Extra data for the given trait. trait_data: BuiltinTraitData<'tcx>, /// The `ImplExpr`s required to satisfy the implied predicates on the trait declaration. /// E.g. since `FnMut: FnOnce`, a built-in `T: FnMut` impl would have an `ImplExpr` for `T: /// FnOnce`. impl_exprs: Vec>, /// The values of the associated types for this trait. types: Vec<(DefId, Ty<'tcx>, Vec>)>, }, /// An error happened while resolving traits. Error(String), } #[derive(Debug, Clone)] pub enum BuiltinTraitData<'tcx> { /// A virtual `Destruct` implementation. /// `Destruct` is implemented automatically for all types. For our purposes, we chose to attach /// the information about `drop_in_place` to that trait. This data tells us what kind of /// `drop_in_place` the target type has. Destruct(DestructData<'tcx>), /// Some other builtin trait. Other, } #[derive(Debug, Clone)] pub enum DestructData<'tcx> { /// A drop that does nothing, e.g. for scalars and pointers. Noop, /// An implicit `Destruct` local clause, if the `resolve_destruct_bounds` option is `false`. If /// that option is `true`, we'll add `Destruct` bounds to every type param, and use that to /// resolve `Destruct` impls of generics. If it's `false`, we use this variant to indicate that /// the clause comes from a generic or associated type. Implicit, /// The `drop_in_place` is known and non-trivial. Glue { /// The type we're generating glue for. ty: Ty<'tcx>, }, } #[derive(Clone, Debug)] pub struct ImplExpr<'tcx> { /// The trait this is an impl for. pub r#trait: PolyTraitRef<'tcx>, /// The kind of implemention of the root of the tree. pub r#impl: ImplExprAtom<'tcx>, } /// Items have various predicates in scope. `path_to` uses them as a starting point for trait /// resolution. This tracks where each of them comes from. #[derive(Clone, Copy, Debug, Eq, PartialEq, Hash)] pub enum BoundPredicateOrigin { /// The `Self: Trait` predicate implicitly present within trait declarations (note: we /// don't add it for trait implementations, should we?). SelfPred, /// The nth (non-self) predicate found for this item. We use predicates from /// `required_predicates` starting from the parentmost item. Item(usize), } #[derive(Clone, Copy, Debug, Eq, PartialEq, Hash)] pub struct AnnotatedTraitPred<'tcx> { pub origin: BoundPredicateOrigin, pub clause: PolyTraitPredicate<'tcx>, } /// Returns the predicate to resolve as `Self`, if that makes sense in the current item. /// Currently this predicate is only used inside trait declarations and their asosciated types. fn initial_self_pred<'tcx>( tcx: TyCtxt<'tcx>, def_id: rustc_span::def_id::DefId, ) -> Option> { use DefKind::*; let trait_def_id = match tcx.def_kind(def_id) { Trait | TraitAlias => def_id, // Associated types can refer to the implicit `Self` clause. For methods and associated // consts we pass an explicit `Self: Trait` clause to make the corresponding item // reuseable. AssocTy => tcx.parent(def_id), _ => return None, }; let self_pred = self_predicate(tcx, trait_def_id).upcast(tcx); Some(self_pred) } /// The predicates to use as a starting point for resolving trait references within this item. This /// includes the `required_predicates` of this item and all its parents. fn local_bound_predicates<'tcx>( tcx: TyCtxt<'tcx>, def_id: rustc_span::def_id::DefId, options: BoundsOptions, ) -> Vec> { fn acc_predicates<'tcx>( tcx: TyCtxt<'tcx>, def_id: rustc_span::def_id::DefId, options: BoundsOptions, predicates: &mut Vec>, ) { use DefKind::*; match tcx.def_kind(def_id) { // These inherit predicates from their parent. AssocTy | AssocFn | AssocConst | Closure | Ctor(..) | Variant => { let parent = tcx.parent(def_id); acc_predicates(tcx, parent, options, predicates); } _ => {} } predicates.extend( required_predicates(tcx, def_id, options) .iter() .map(|(clause, _span)| *clause) .filter_map(|clause| clause.as_trait_clause()), ); } let mut predicates = vec![]; acc_predicates(tcx, def_id, options, &mut predicates); predicates } #[tracing::instrument(level = "trace", skip(tcx))] fn parents_trait_predicates<'tcx>( tcx: TyCtxt<'tcx>, pred: PolyTraitPredicate<'tcx>, options: BoundsOptions, ) -> Vec> { let self_trait_ref = pred.to_poly_trait_ref(); implied_predicates(tcx, pred.def_id(), options) .iter() .map(|(clause, _span)| *clause) // Substitute with the `self` args so that the clause makes sense in the // outside context. .map(|clause| clause.instantiate_supertrait(tcx, self_trait_ref)) .filter_map(|pred| pred.as_trait_clause()) .collect() } /// A candidate projects `self` along a path reaching some predicate. A candidate is /// selected when its predicate is the one expected, aka `target`. #[derive(Debug, Clone)] struct Candidate<'tcx> { path: Path<'tcx>, pred: PolyTraitPredicate<'tcx>, origin: AnnotatedTraitPred<'tcx>, } impl<'tcx> Candidate<'tcx> { fn into_impl_expr(self, tcx: TyCtxt<'tcx>) -> ImplExprAtom<'tcx> { let path = self.path; let r#trait = self.origin.clause.to_poly_trait_ref(); match self.origin.origin { BoundPredicateOrigin::SelfPred => ImplExprAtom::SelfImpl { r#trait, path }, BoundPredicateOrigin::Item(index) => ImplExprAtom::LocalBound { predicate: self.origin.clause.upcast(tcx), index, r#trait, path, }, } } } /// Stores a set of predicates along with where they came from. #[derive(Clone)] pub struct PredicateSearcher<'tcx> { tcx: TyCtxt<'tcx>, typing_env: rustc_middle::ty::TypingEnv<'tcx>, /// Local clauses available in the current context. candidates: HashMap, Candidate<'tcx>>, /// Resolution options. options: BoundsOptions, /// Count the number of bound clauses in scope; used to identify clauses uniquely. bound_clause_count: usize, } impl<'tcx> PredicateSearcher<'tcx> { /// Initialize the elaborator with the predicates accessible within this item. pub fn new_for_owner(tcx: TyCtxt<'tcx>, owner_id: DefId, options: BoundsOptions) -> Self { let mut out = Self { tcx, typing_env: TypingEnv { param_env: tcx.param_env(owner_id), typing_mode: TypingMode::PostAnalysis, }, candidates: Default::default(), options, bound_clause_count: 0, }; out.insert_predicates( initial_self_pred(tcx, owner_id).map(|clause| AnnotatedTraitPred { origin: BoundPredicateOrigin::SelfPred, clause, }), ); out.insert_bound_predicates(local_bound_predicates(tcx, owner_id, options)); out } /// Insert the bound clauses in the search context. Prefer inserting them all at once as this /// will give priority to shorter resolution paths. Bound clauses are numbered from `0` in /// insertion order. pub fn insert_bound_predicates( &mut self, clauses: impl IntoIterator>, ) { let mut count = usize::MAX; // Swap to avoid borrow conflicts. std::mem::swap(&mut count, &mut self.bound_clause_count); self.insert_predicates(clauses.into_iter().map(|clause| { let i = count; count += 1; AnnotatedTraitPred { origin: BoundPredicateOrigin::Item(i), clause, } })); std::mem::swap(&mut count, &mut self.bound_clause_count); } /// Override the param env; we use this when resolving `dyn` predicates to add more clauses to /// the scope. pub fn set_param_env(&mut self, param_env: ParamEnv<'tcx>) { self.typing_env.param_env = param_env; } /// Insert annotated predicates in the search context. Prefer inserting them all at once as /// this will give priority to shorter resolution paths. fn insert_predicates(&mut self, preds: impl IntoIterator>) { self.insert_candidates(preds.into_iter().map(|clause| Candidate { path: vec![], pred: clause.clause, origin: clause, })) } /// Insert new candidates and all their parent predicates. This deduplicates predicates /// to avoid divergence. fn insert_candidates(&mut self, candidates: impl IntoIterator>) { let tcx = self.tcx; // Filter out duplicated candidates. let mut new_candidates = Vec::new(); for mut candidate in candidates { // Normalize and erase all lifetimes. candidate.pred = normalize_bound_val(tcx, self.typing_env, candidate.pred); if let Entry::Vacant(entry) = self.candidates.entry(candidate.pred) { entry.insert(candidate.clone()); new_candidates.push(candidate); } } if !new_candidates.is_empty() { // Insert the parents all at once. self.insert_candidate_parents(new_candidates); } } /// Add the parents of these candidates. This is a separate function to avoid /// polymorphic recursion due to the closures capturing the type parameters of this /// function. fn insert_candidate_parents(&mut self, new_candidates: Vec>) { let tcx = self.tcx; // Then recursively add their parents. This way ensures a breadth-first order, // which means we select the shortest path when looking up predicates. let options = self.options; self.insert_candidates(new_candidates.into_iter().flat_map(|candidate| { parents_trait_predicates(tcx, candidate.pred, options) .into_iter() .enumerate() .map(move |(index, parent_pred)| { let mut parent_candidate = Candidate { pred: parent_pred, path: candidate.path.clone(), origin: candidate.origin, }; parent_candidate.path.push(PathChunk::Parent { predicate: parent_pred, index, }); parent_candidate }) })); } /// If the type is a trait associated type, we add any relevant bounds to our context. fn add_associated_type_refs( &mut self, ty: Binder<'tcx, Ty<'tcx>>, // Call back into hax-related code to display a nice warning. warn: &impl Fn(&str), ) -> Result<(), String> { let tcx = self.tcx; // Note: We skip a binder but rebind it just after. let TyKind::Alias(AliasTyKind::Projection, alias_ty) = ty.skip_binder().kind() else { return Ok(()); }; let trait_ref = ty.rebind(alias_ty.trait_ref(tcx)).upcast(tcx); // The predicate we're looking for is is `::Type: OtherTrait`. We look up `T as // Trait` in the current context and add all the bounds on `Trait::Type` to our context. let Some(trait_candidate) = self.resolve_local(trait_ref, warn)? else { return Ok(()); }; // The bounds that hold on the associated type. let item_bounds = implied_predicates(tcx, alias_ty.def_id, self.options); let item_bounds = item_bounds .iter() .map(|(clause, _span)| *clause) .filter_map(|pred| pred.as_trait_clause()) // Substitute the item generics .map(|pred| EarlyBinder::bind(pred).instantiate(tcx, alias_ty.args)) .enumerate(); // Add all the bounds on the corresponding associated item. self.insert_candidates(item_bounds.map(|(index, pred)| { let mut candidate = Candidate { path: trait_candidate.path.clone(), pred, origin: trait_candidate.origin, }; candidate.path.push(PathChunk::AssocItem { item: tcx.associated_item(alias_ty.def_id), generic_args: alias_ty.args, predicate: pred, index, }); candidate })); Ok(()) } /// Resolve a local clause by looking it up in this set. If the predicate applies to an /// associated type, we add the relevant implied associated type bounds to the set as well. fn resolve_local( &mut self, target: PolyTraitPredicate<'tcx>, // Call back into hax-related code to display a nice warning. warn: &impl Fn(&str), ) -> Result>, String> { tracing::trace!("Looking for {target:?}"); // Look up the predicate let ret = self.candidates.get(&target).cloned(); if ret.is_some() { return Ok(ret); } // Add clauses related to associated type in the `Self` type of the predicate. self.add_associated_type_refs(target.self_ty(), warn)?; let ret = self.candidates.get(&target).cloned(); if ret.is_none() { tracing::trace!( "Couldn't find {target:?} in: [\n{}]", self.candidates .iter() .map(|(_, c)| format!(" - {:?}\n", c.pred)) .join("") ); } Ok(ret) } /// Resolve the given trait reference in the local context. #[tracing::instrument(level = "trace", skip(self, warn))] pub fn resolve( &mut self, tref: &PolyTraitRef<'tcx>, // Call back into hax-related code to display a nice warning. warn: &impl Fn(&str), ) -> Result, String> { use rustc_trait_selection::traits::{ BuiltinImplSource, ImplSource, ImplSourceUserDefinedData, }; let tcx = self.tcx; let destruct_trait = tcx.lang_items().destruct_trait().unwrap(); let erased_tref = normalize_bound_val(self.tcx, self.typing_env, *tref); let trait_def_id = erased_tref.skip_binder().def_id; let error = |msg: String| { warn(&msg); Ok(ImplExpr { r#impl: ImplExprAtom::Error(msg), r#trait: *tref, }) }; let impl_source = shallow_resolve_trait_ref(tcx, self.typing_env.param_env, erased_tref); let atom = match impl_source { Ok(ImplSource::UserDefined(ImplSourceUserDefinedData { impl_def_id, args: generics, .. })) => ImplExprAtom::Concrete { def_id: impl_def_id, generics, }, Ok(ImplSource::Param(_)) => { match self.resolve_local(erased_tref.upcast(self.tcx), warn)? { Some(candidate) => candidate.into_impl_expr(tcx), None => { let msg = format!( "Could not find a clause for `{tref:?}` in the item parameters" ); return error(msg); } } } Ok(ImplSource::Builtin(BuiltinImplSource::Object { .. }, _)) => ImplExprAtom::Dyn, Ok(ImplSource::Builtin(_, _)) => { // Resolve the predicates implied by the trait. // If we wanted to not skip this binder, we'd have to instantiate the bound // regions, solve, then wrap the result in a binder. And track higher-kinded // clauses better all over. let impl_exprs = self.resolve_item_implied_predicates( trait_def_id, erased_tref.skip_binder().args, warn, )?; let types = tcx .associated_items(trait_def_id) .in_definition_order() .filter(|assoc| matches!(assoc.kind, AssocKind::Type { .. })) .filter_map(|assoc| { let ty = Ty::new_projection(tcx, assoc.def_id, erased_tref.skip_binder().args); let ty = erase_and_norm(tcx, self.typing_env, ty); if let TyKind::Alias(_, alias_ty) = ty.kind() { if alias_ty.def_id == assoc.def_id { // Couldn't normalize the type to anything different than itself; // this must be a built-in associated type such as // `DiscriminantKind::Discriminant`. // We can't return the unnormalized associated type as that would // make the trait ref contain itself, which would make hax's // `sinto` infrastructure loop. That's ok because we can't provide // a value for this type other than the associate type alias // itself. return None; } } let impl_exprs = self .resolve_item_implied_predicates( assoc.def_id, erased_tref.skip_binder().args, warn, ) .ok()?; Some((assoc.def_id, ty, impl_exprs)) }) .collect(); let trait_data = if erased_tref.skip_binder().def_id == destruct_trait { let ty = erased_tref.skip_binder().args[0].as_type().unwrap(); // Source of truth are `ty::needs_drop_components` and `tcx.needs_drop_raw`. let destruct_data = match ty.kind() { // TODO: Does `UnsafeBinder` drop its contents? ty::Bool | ty::Char | ty::Int(..) | ty::Uint(..) | ty::Float(..) | ty::Foreign(..) | ty::Str | ty::RawPtr(..) | ty::Ref(..) | ty::FnDef(..) | ty::FnPtr(..) | ty::UnsafeBinder(..) | ty::Never => Either::Left(DestructData::Noop), ty::Tuple(tys) if tys.is_empty() => Either::Left(DestructData::Noop), ty::Array(..) | ty::Pat(..) | ty::Slice(..) | ty::Tuple(..) | ty::Adt(..) | ty::Closure(..) | ty::Coroutine(..) | ty::CoroutineClosure(..) | ty::CoroutineWitness(..) => Either::Left(DestructData::Glue { ty }), // Every `dyn` has a `drop_in_place` in its vtable, ergo we pretend that every // `dyn` has `Destruct` in its list of traits. ty::Dynamic(..) => Either::Right(ImplExprAtom::Dyn), ty::Param(..) | ty::Alias(..) | ty::Bound(..) => { if self.options.resolve_destruct { // We've added `Destruct` impls on everything, we should be able to resolve // it. match self.resolve_local(erased_tref.upcast(self.tcx), warn)? { Some(candidate) => Either::Right(candidate.into_impl_expr(tcx)), None => { let msg = format!( "Cannot find virtual `Destruct` clause: `{tref:?}`" ); return error(msg); } } } else { Either::Left(DestructData::Implicit) } } ty::Placeholder(..) | ty::Infer(..) | ty::Error(..) => { let msg = format!( "Cannot resolve clause `{tref:?}` \ because of a type error" ); return error(msg); } }; destruct_data.map_left(BuiltinTraitData::Destruct) } else { Either::Left(BuiltinTraitData::Other) }; match trait_data { Either::Left(trait_data) => ImplExprAtom::Builtin { trait_data, impl_exprs, types, }, Either::Right(atom) => atom, } } Err(e) => { let msg = format!( "Could not find a clause for `{tref:?}` \ in the current context: `{e:?}`" ); return error(msg); } }; Ok(ImplExpr { r#impl: atom, r#trait: *tref, }) } /// Resolve the predicates required by the given item. pub fn resolve_item_required_predicates( &mut self, def_id: DefId, generics: GenericArgsRef<'tcx>, // Call back into hax-related code to display a nice warning. warn: &impl Fn(&str), ) -> Result>, String> { let tcx = self.tcx; self.resolve_predicates( generics, required_predicates(tcx, def_id, self.options), warn, ) } /// Resolve the predicates implied by the given item. pub fn resolve_item_implied_predicates( &mut self, def_id: DefId, generics: GenericArgsRef<'tcx>, // Call back into hax-related code to display a nice warning. warn: &impl Fn(&str), ) -> Result>, String> { let tcx = self.tcx; self.resolve_predicates( generics, implied_predicates(tcx, def_id, self.options), warn, ) } /// Apply the given generics to the provided clauses and resolve the trait references in the /// current context. pub fn resolve_predicates( &mut self, generics: GenericArgsRef<'tcx>, predicates: utils::Predicates<'tcx>, // Call back into hax-related code to display a nice warning. warn: &impl Fn(&str), ) -> Result>, String> { let tcx = self.tcx; predicates .iter() .map(|(clause, _span)| *clause) .filter_map(|clause| clause.as_trait_clause()) .map(|trait_pred| trait_pred.map_bound(|p| p.trait_ref)) // Substitute the item generics .map(|trait_ref| EarlyBinder::bind(trait_ref).instantiate(tcx, generics)) // Resolve .map(|trait_ref| self.resolve(&trait_ref, warn)) .collect() } } /// Attempts to resolve an obligation to an `ImplSource`. The result is a shallow `ImplSource` /// resolution, meaning that we do not resolve all nested obligations on the impl. Note that type /// check should guarantee to us that all nested obligations *could be* resolved if we wanted to. /// /// This expects that `trait_ref` is fully normalized. /// /// This is based on `rustc_traits::codegen::codegen_select_candidate` in rustc. pub fn shallow_resolve_trait_ref<'tcx>( tcx: TyCtxt<'tcx>, param_env: ParamEnv<'tcx>, trait_ref: PolyTraitRef<'tcx>, ) -> Result, CodegenObligationError> { use rustc_infer::infer::TyCtxtInferExt; use rustc_middle::traits::CodegenObligationError; use rustc_middle::ty::TypeVisitableExt; use rustc_trait_selection::traits::{ Obligation, ObligationCause, ObligationCtxt, SelectionContext, SelectionError, }; // Do the initial selection for the obligation. This yields the // shallow result we are looking for -- that is, what specific impl. let infcx = tcx .infer_ctxt() .ignoring_regions() .build(TypingMode::PostAnalysis); let mut selcx = SelectionContext::new(&infcx); let obligation_cause = ObligationCause::dummy(); let obligation = Obligation::new(tcx, obligation_cause, param_env, trait_ref); let selection = match selcx.poly_select(&obligation) { Ok(Some(selection)) => selection, Ok(None) => return Err(CodegenObligationError::Ambiguity), Err(SelectionError::Unimplemented) => return Err(CodegenObligationError::Unimplemented), Err(_) => return Err(CodegenObligationError::Ambiguity), }; // Currently, we use a fulfillment context to completely resolve // all nested obligations. This is because they can inform the // inference of the impl's type parameters. // FIXME(-Znext-solver): Doesn't need diagnostics if new solver. let ocx = ObligationCtxt::new(&infcx); let impl_source = selection.map(|obligation| { ocx.register_obligation(obligation.clone()); () }); let errors = ocx.evaluate_obligations_error_on_ambiguity(); if !errors.is_empty() { return Err(CodegenObligationError::Ambiguity); } let impl_source = infcx.resolve_vars_if_possible(impl_source); let impl_source = tcx.erase_and_anonymize_regions(impl_source); if impl_source.has_infer() { // Unused lifetimes on an impl get replaced with inference vars, but never resolved. return Err(CodegenObligationError::Ambiguity); } Ok(impl_source) } ================================================ FILE: frontend/exporter/src/traits/utils.rs ================================================ //! Each item can involve three kinds of predicates: //! - input aka required predicates: the predicates required to mention the item. These are usually `where` //! clauses (or equivalent) on the item: //! ```ignore //! struct Foo { ... } //! trait Foo where T: Clone { ... } //! fn function() where I: Iterator, I::Item: Clone { ... } //! ``` //! - output aka implied predicates: the predicates that are implied by the presence of this item in a //! signature. This is mostly trait parent predicates: //! ```ignore //! trait Foo: Clone { ... } //! fn bar() { //! // from `T: Foo` we can deduce `T: Clone` //! } //! ``` //! This could also include implied predicates such as `&'a T` implying `T: 'a` but we don't //! consider these. //! - "self" predicate: that's the special `Self: Trait` predicate in scope within a trait //! declaration or implementation for trait `Trait`. //! //! Note that within a given item the polarity is reversed: input predicates are the ones that can //! be assumed to hold and output predicates must be proven to hold. The "self" predicate is both //! assumed and proven within an impl block, and just assumed within a trait declaration block. //! //! The current implementation considers all predicates on traits to be outputs, which has the //! benefit of reducing the size of signatures. Moreover, the rules on which bounds are required vs //! implied are subtle. We may change this if this proves to be a problem. use hax_frontend_exporter_options::BoundsOptions; use rustc_hir::LangItem; use rustc_hir::def::DefKind; use rustc_middle::ty::*; use rustc_span::def_id::DefId; use rustc_span::{DUMMY_SP, Span}; use std::borrow::Cow; pub type Predicates<'tcx> = Cow<'tcx, [(Clause<'tcx>, Span)]>; /// Returns a list of type predicates for the definition with ID `def_id`, including inferred /// lifetime constraints. This is the basic list of predicates we use for essentially all items. pub fn predicates_defined_on(tcx: TyCtxt<'_>, def_id: DefId) -> Predicates<'_> { let mut result = Cow::Borrowed(tcx.explicit_predicates_of(def_id).predicates); let inferred_outlives = tcx.inferred_outlives_of(def_id); if !inferred_outlives.is_empty() { result.to_mut().extend( inferred_outlives .iter() .map(|(clause, span)| ((*clause).upcast(tcx), *span)), ); } result } /// Add `T: Destruct` bounds for every generic parameter of the given item. fn add_destruct_bounds<'tcx>( tcx: TyCtxt<'tcx>, def_id: DefId, predicates: &mut Vec<(Clause<'tcx>, Span)>, ) { let def_kind = tcx.def_kind(def_id); if matches!(def_kind, DefKind::Closure) { // Closures have fictitious weird type parameters in their `own_args` that we don't want to // add `Destruct` bounds for. return; } // Add a `T: Destruct` bound for every generic. let destruct_trait = tcx.lang_items().destruct_trait().unwrap(); let extra_bounds = tcx .generics_of(def_id) .own_params .iter() .filter(|param| matches!(param.kind, GenericParamDefKind::Type { .. })) .map(|param| tcx.mk_param_from_def(param)) .map(|ty| Binder::dummy(TraitRef::new(tcx, destruct_trait, [ty]))) .map(|tref| tref.upcast(tcx)) .map(|clause| (clause, DUMMY_SP)); predicates.extend(extra_bounds); } /// The predicates that must hold to mention this item. E.g. /// /// ```ignore /// // `U: OtherTrait` is required, `Self: Sized` is implied. /// trait Trait: Sized { /// // `T: Clone` is required, `Self::Type: Debug` is implied. /// type Type: Debug; /// } /// ``` /// /// If `add_drop` is true, we add a `T: Drop` bound for every type generic. pub fn required_predicates<'tcx>( tcx: TyCtxt<'tcx>, def_id: DefId, options: BoundsOptions, ) -> Predicates<'tcx> { use DefKind::*; let def_kind = tcx.def_kind(def_id); let mut predicates = match def_kind { AssocConst | AssocFn | AssocTy | Const | Enum | Fn | ForeignTy | Impl { .. } | OpaqueTy | Static { .. } | Struct | TyAlias | Union => predicates_defined_on(tcx, def_id), // We consider all predicates on traits to be outputs Trait | TraitAlias => Default::default(), // `predicates_defined_on` ICEs on other def kinds. _ => Default::default(), }; // For methods and assoc consts in trait definitions, we add an explicit `Self: Trait` clause. // Associated types get to use the implicit `Self: Trait` clause instead. if !matches!(def_kind, AssocTy) && let Some(trait_def_id) = tcx.trait_of_assoc(def_id) { let self_clause = self_predicate(tcx, trait_def_id).upcast(tcx); predicates.to_mut().insert(0, (self_clause, DUMMY_SP)); } if options.resolve_destruct && !matches!(def_kind, Trait | TraitAlias) { // Add a `T: Destruct` bound for every generic. For traits we consider these predicates // implied instead of required. add_destruct_bounds(tcx, def_id, predicates.to_mut()); } if options.prune_sized { prune_sized_predicates(tcx, &mut predicates); } predicates } /// The special "self" predicate on a trait. pub fn self_predicate<'tcx>(tcx: TyCtxt<'tcx>, def_id: DefId) -> PolyTraitRef<'tcx> { // Copied from the code of `tcx.predicates_of()`. Binder::dummy(TraitRef::identity(tcx, def_id)) } /// The predicates that can be deduced from the presence of this item in a signature. We only /// consider predicates implied by traits here, not implied bounds such as `&'a T` implying `T: /// 'a`. E.g. /// /// ```ignore /// // `U: OtherTrait` is required, `Self: Sized` is implied. /// trait Trait: Sized { /// // `T: Clone` is required, `Self::Type: Debug` is implied. /// type Type: Debug; /// } /// ``` /// /// If `add_drop` is true, we add a `T: Drop` bound for every type generic and associated type. pub fn implied_predicates<'tcx>( tcx: TyCtxt<'tcx>, def_id: DefId, options: BoundsOptions, ) -> Predicates<'tcx> { use DefKind::*; let parent = tcx.opt_parent(def_id); let mut predicates = match tcx.def_kind(def_id) { // We consider all predicates on traits to be outputs Trait | TraitAlias => { let mut predicates = predicates_defined_on(tcx, def_id); if options.resolve_destruct { // Add a `T: Drop` bound for every generic, unless the current trait is `Drop` itself, or a // built-in marker trait that we know doesn't need the bound. if !matches!( tcx.as_lang_item(def_id), Some( LangItem::Destruct | LangItem::Sized | LangItem::MetaSized | LangItem::PointeeSized | LangItem::DiscriminantKind | LangItem::PointeeTrait | LangItem::Tuple ) ) { add_destruct_bounds(tcx, def_id, predicates.to_mut()); } } predicates } AssocTy if matches!(tcx.def_kind(parent.unwrap()), Trait) => { // `skip_binder` is for the GAT `EarlyBinder` let mut predicates = Cow::Borrowed(tcx.explicit_item_bounds(def_id).skip_binder()); if options.resolve_destruct { // Add a `Drop` bound to the assoc item. let destruct_trait = tcx.lang_items().destruct_trait().unwrap(); let ty = Ty::new_projection(tcx, def_id, GenericArgs::identity_for_item(tcx, def_id)); let tref = Binder::dummy(TraitRef::new(tcx, destruct_trait, [ty])); predicates.to_mut().push((tref.upcast(tcx), DUMMY_SP)); } predicates } _ => Predicates::default(), }; if options.prune_sized { prune_sized_predicates(tcx, &mut predicates); } predicates } /// Normalize a value. pub fn normalize<'tcx, T>(tcx: TyCtxt<'tcx>, typing_env: TypingEnv<'tcx>, value: T) -> T where T: TypeFoldable> + Clone, { use rustc_infer::infer::TyCtxtInferExt; use rustc_middle::traits::ObligationCause; use rustc_trait_selection::traits::query::normalize::QueryNormalizeExt; let (infcx, param_env) = tcx.infer_ctxt().build_with_typing_env(typing_env); infcx .at(&ObligationCause::dummy(), param_env) .query_normalize(value.clone()) // We ignore the generated outlives relations. Unsure what we should do with them. .map(|x| x.value) .unwrap_or(value) } /// Erase free regions from the given value. Largely copied from `tcx.erase_and_anonymize_regions`, but also /// erases bound regions that are bound outside `value`, so we can call this function inside a /// `Binder`. pub fn erase_free_regions<'tcx, T>(tcx: TyCtxt<'tcx>, value: T) -> T where T: TypeFoldable>, { use rustc_middle::ty; struct RegionEraserVisitor<'tcx> { tcx: TyCtxt<'tcx>, depth: u32, } impl<'tcx> TypeFolder> for RegionEraserVisitor<'tcx> { fn cx(&self) -> TyCtxt<'tcx> { self.tcx } fn fold_ty(&mut self, ty: Ty<'tcx>) -> Ty<'tcx> { ty.super_fold_with(self) } fn fold_binder(&mut self, t: ty::Binder<'tcx, T>) -> ty::Binder<'tcx, T> where T: TypeFoldable>, { let t = self.tcx.anonymize_bound_vars(t); self.depth += 1; let t = t.super_fold_with(self); self.depth -= 1; t } fn fold_region(&mut self, r: ty::Region<'tcx>) -> ty::Region<'tcx> { // We don't erase bound regions that are bound inside the expression we started with, // but we do erase those that point "outside of it". match r.kind() { ty::ReBound(BoundVarIndexKind::Bound(dbid), _) if dbid.as_u32() < self.depth => r, _ => self.tcx.lifetimes.re_erased, } } } value.fold_with(&mut RegionEraserVisitor { tcx, depth: 0 }) } // Normalize and erase lifetimes, erasing more lifetimes than normal because we might be already // inside a binder and rustc doesn't like that. pub fn erase_and_norm<'tcx, T>(tcx: TyCtxt<'tcx>, typing_env: TypingEnv<'tcx>, x: T) -> T where T: TypeFoldable> + Copy, { erase_free_regions( tcx, tcx.try_normalize_erasing_regions(typing_env, x) .unwrap_or(x), ) } /// Given our currently hacky handling of binders, in order for trait resolution to work we must /// empty out the binders of trait refs. Specifically it's so that we can reconnect associated type /// constraints with the trait ref they come from, given that the projection in question doesn't /// track the right binder currently. pub fn normalize_bound_val<'tcx, T>( tcx: TyCtxt<'tcx>, typing_env: TypingEnv<'tcx>, x: Binder<'tcx, T>, ) -> Binder<'tcx, T> where T: TypeFoldable> + Copy, { Binder::dummy(erase_and_norm(tcx, typing_env, x.skip_binder())) } /// Returns true whenever `def_id` is `MetaSized`, `Sized` or `PointeeSized`. pub fn is_sized_related_trait<'tcx>(tcx: TyCtxt<'tcx>, def_id: DefId) -> bool { use rustc_hir::lang_items::LangItem; let lang_item = tcx.as_lang_item(def_id); matches!( lang_item, Some(LangItem::PointeeSized | LangItem::MetaSized | LangItem::Sized) ) } /// Given a `GenericPredicates`, prune every occurence of a sized-related clause. /// Prunes bounds of the shape `T: MetaSized`, `T: Sized` or `T: PointeeSized`. fn prune_sized_predicates<'tcx>(tcx: TyCtxt<'tcx>, generic_predicates: &mut Predicates<'tcx>) { let predicates: Vec<(Clause<'tcx>, rustc_span::Span)> = generic_predicates .iter() .filter(|(clause, _)| { clause.as_trait_clause().is_none_or(|trait_predicate| { !is_sized_related_trait(tcx, trait_predicate.skip_binder().def_id()) }) }) .copied() .collect(); if predicates.len() != generic_predicates.len() { *generic_predicates.to_mut() = predicates; } } pub trait ToPolyTraitRef<'tcx> { fn to_poly_trait_ref(&self) -> PolyTraitRef<'tcx>; } impl<'tcx> ToPolyTraitRef<'tcx> for PolyTraitPredicate<'tcx> { fn to_poly_trait_ref(&self) -> PolyTraitRef<'tcx> { self.map_bound_ref(|trait_pred| trait_pred.trait_ref) } } ================================================ FILE: frontend/exporter/src/traits.rs ================================================ use crate::prelude::*; #[cfg(feature = "rustc")] pub mod resolution; #[cfg(feature = "rustc")] mod utils; #[cfg(feature = "rustc")] pub use utils::{ Predicates, ToPolyTraitRef, erase_and_norm, erase_free_regions, implied_predicates, normalize, predicates_defined_on, required_predicates, self_predicate, }; #[cfg(feature = "rustc")] pub use resolution::PredicateSearcher; #[cfg(feature = "rustc")] use rustc_middle::ty; #[cfg(feature = "rustc")] use rustc_span::def_id::DefId as RDefId; #[cfg(feature = "rustc")] pub use utils::is_sized_related_trait; #[derive_group(Serializers)] #[derive(Clone, Debug, Hash, PartialEq, Eq, PartialOrd, Ord, JsonSchema)] pub enum ImplExprPathChunk { AssocItem { /// Reference to the item, with generics (for GATs), e.g. the `T` and `T: Clone` `ImplExpr` /// in the following example: /// ```ignore /// trait Foo { /// type Type: Debug; /// } /// ``` item: ItemRef, assoc_item: AssocItem, /// The implemented predicate. predicate: Binder, predicate_id: PredicateId, /// The index of this predicate in the list returned by `implied_predicates`. index: usize, }, Parent { /// The implemented predicate. predicate: Binder, predicate_id: PredicateId, /// The index of this predicate in the list returned by `implied_predicates`. index: usize, }, } #[cfg(feature = "rustc")] impl<'tcx, S: UnderOwnerState<'tcx>> SInto for resolution::PathChunk<'tcx> { fn sinto(&self, s: &S) -> ImplExprPathChunk { match self { resolution::PathChunk::AssocItem { item, generic_args, predicate, index, .. } => ImplExprPathChunk::AssocItem { item: translate_item_ref(s, item.def_id, generic_args), assoc_item: AssocItem::sfrom(s, item), predicate: predicate.sinto(s), predicate_id: <_ as SInto<_, Clause>>::sinto(predicate, s).id, index: index.sinto(s), }, resolution::PathChunk::Parent { predicate, index, .. } => ImplExprPathChunk::Parent { predicate: predicate.sinto(s), predicate_id: <_ as SInto<_, Clause>>::sinto(predicate, s).id, index: index.sinto(s), }, } } } /// The source of a particular trait implementation. Most often this is either `Concrete` for a /// concrete `impl Trait for Type {}` item, or `LocalBound` for a context-bound `where T: Trait`. #[derive(AdtInto)] #[args(<'tcx, S: UnderOwnerState<'tcx> >, from: resolution::ImplExprAtom<'tcx>, state: S as s)] #[derive_group(Serializers)] #[derive(Clone, Debug, Hash, PartialEq, Eq, PartialOrd, Ord, JsonSchema)] pub enum ImplExprAtom { /// A concrete `impl Trait for Type {}` item. #[custom_arm(FROM_TYPE::Concrete { def_id, generics } => TO_TYPE::Concrete( translate_item_ref(s, *def_id, generics), ),)] Concrete(ItemRef), /// A context-bound clause like `where T: Trait`. LocalBound { #[not_in_source] #[value({ let Self::LocalBound { predicate, .. } = self else { unreachable!() }; predicate.sinto(s).id })] predicate_id: PredicateId, /// The nth (non-self) predicate found for this item. We use predicates from /// `required_predicates` starting from the parentmost item. index: usize, r#trait: Binder, path: Vec, }, /// The implicit `Self: Trait` clause present inside a `trait Trait {}` item. // TODO: should we also get that clause for trait impls? SelfImpl { r#trait: Binder, path: Vec, }, /// `dyn Trait` is a wrapped value with a virtual table for trait /// `Trait`. In other words, a value `dyn Trait` is a dependent /// triple that gathers a type τ, a value of type τ and an /// instance of type `Trait`. /// `dyn Trait` implements `Trait` using a built-in implementation; this refers to that /// built-in implementation. Dyn, /// A built-in trait whose implementation is computed by the compiler, such as `FnMut`. This /// morally points to an invisible `impl` block; as such it contains the information we may /// need from one. Builtin { /// Extra data for the given trait. trait_data: BuiltinTraitData, /// The `ImplExpr`s required to satisfy the implied predicates on the trait declaration. /// E.g. since `FnMut: FnOnce`, a built-in `T: FnMut` impl would have an `ImplExpr` for `T: /// FnOnce`. impl_exprs: Vec, /// The values of the associated types for this trait. types: Vec<(DefId, Ty, Vec)>, }, /// An error happened while resolving traits. Error(String), } #[derive(AdtInto)] #[args(<'tcx, S: UnderOwnerState<'tcx> >, from: resolution::BuiltinTraitData<'tcx>, state: S as s)] #[derive_group(Serializers)] #[derive(Clone, Debug, Hash, PartialEq, Eq, PartialOrd, Ord, JsonSchema)] pub enum BuiltinTraitData { /// A virtual `Destruct` implementation. /// `Destruct` is implemented automatically for all types. For our purposes, we chose to attach /// the information about `drop_in_place` to that trait. This data tells us what kind of /// `drop_in_place` the target type has. Destruct(DestructData), /// Some other builtin trait. Other, } #[derive(AdtInto)] #[args(<'tcx, S: UnderOwnerState<'tcx> >, from: resolution::DestructData<'tcx>, state: S as s)] #[derive_group(Serializers)] #[derive(Clone, Debug, Hash, PartialEq, Eq, PartialOrd, Ord, JsonSchema)] pub enum DestructData { /// A drop that does nothing, e.g. for scalars and pointers. Noop, /// An implicit `Destruct` local clause, if the `resolve_destruct_bounds` option is `false`. If /// that option is `true`, we'll add `Destruct` bounds to every type param, and use that to /// resolve `Destruct` impls of generics. If it's `false`, we use this variant to indicate that /// the clause comes from a generic or associated type. Implicit, /// The `drop_in_place` is known and non-trivial. Glue { /// The type we're generating glue for. ty: Ty, }, } /// An `ImplExpr` describes the full data of a trait implementation. Because of generics, this may /// need to combine several concrete trait implementation items. For example, `((1u8, 2u8), /// "hello").clone()` combines the generic implementation of `Clone` for `(A, B)` with the /// concrete implementations for `u8` and `&str`, represented as a tree. #[derive_group(Serializers)] #[derive(Clone, Debug, Hash, PartialEq, Eq, PartialOrd, Ord, JsonSchema, AdtInto)] #[args(<'tcx, S: UnderOwnerState<'tcx> >, from: resolution::ImplExpr<'tcx>, state: S as s)] pub struct ImplExpr { /// The trait this is an impl for. pub r#trait: Binder, /// The kind of implemention of the root of the tree. pub r#impl: ImplExprAtom, } /// Given a clause `clause` in the context of some impl block `impl_did`, susbts correctly `Self` /// from `clause` and (1) derive a `Clause` and (2) resolve an `ImplExpr`. #[cfg(feature = "rustc")] pub fn super_clause_to_clause_and_impl_expr<'tcx, S: UnderOwnerState<'tcx>>( s: &S, impl_did: rustc_span::def_id::DefId, clause: rustc_middle::ty::Clause<'tcx>, span: rustc_span::Span, ) -> Option<(Clause, ImplExpr, Span)> { let tcx = s.base().tcx; if !matches!( tcx.def_kind(impl_did), rustc_hir::def::DefKind::Impl { of_trait: true } ) { return None; } let impl_trait_ref = rustc_middle::ty::Binder::dummy(tcx.impl_trait_ref(impl_did).instantiate_identity()); let original_predicate_id = { // We don't want the id of the substituted clause id, but the // original clause id (with, i.e., `Self`) let s = &s.with_owner_id(impl_trait_ref.def_id()); clause.sinto(s).id }; let new_clause = clause.instantiate_supertrait(tcx, impl_trait_ref); let impl_expr = solve_trait( s, new_clause .as_predicate() .as_trait_clause()? .to_poly_trait_ref(), ); let mut new_clause_no_binder = new_clause.sinto(s); new_clause_no_binder.id = original_predicate_id; Some((new_clause_no_binder, impl_expr, span.sinto(s))) } /// This is the entrypoint of the solving. #[cfg(feature = "rustc")] #[tracing::instrument(level = "trace", skip(s))] pub fn solve_trait<'tcx, S: UnderOwnerState<'tcx>>( s: &S, trait_ref: rustc_middle::ty::PolyTraitRef<'tcx>, ) -> ImplExpr { let warn = |msg: &str| { if !s.base().silence_resolution_errors { crate::warning!(s, "{}", msg) } }; if let Some(impl_expr) = s.with_cache(|cache| cache.impl_exprs.get(&trait_ref).cloned()) { return impl_expr; } let resolved = s.with_predicate_searcher(|pred_searcher| pred_searcher.resolve(&trait_ref, &warn)); let impl_expr = match resolved { Ok(x) => x.sinto(s), Err(e) => crate::fatal!(s, "{}", e), }; s.with_cache(|cache| cache.impl_exprs.insert(trait_ref, impl_expr.clone())); impl_expr } /// Translate a reference to an item, resolving the appropriate trait clauses as needed. #[cfg(feature = "rustc")] #[tracing::instrument(level = "trace", skip(s), ret)] pub fn translate_item_ref<'tcx, S: UnderOwnerState<'tcx>>( s: &S, def_id: RDefId, generics: ty::GenericArgsRef<'tcx>, ) -> ItemRef { ItemRef::translate(s, def_id, generics) } /// Solve the trait obligations for a specific item use (for example, a method call, an ADT, etc.) /// in the current context. Just like generic args include generics of parent items, this includes /// impl exprs for parent items. #[cfg(feature = "rustc")] #[tracing::instrument(level = "trace", skip(s), ret)] pub fn solve_item_required_traits<'tcx, S: UnderOwnerState<'tcx>>( s: &S, def_id: RDefId, generics: ty::GenericArgsRef<'tcx>, ) -> Vec { fn accumulate<'tcx, S: UnderOwnerState<'tcx>>( s: &S, def_id: RDefId, generics: ty::GenericArgsRef<'tcx>, impl_exprs: &mut Vec, ) { let tcx = s.base().tcx; use rustc_hir::def::DefKind::*; match tcx.def_kind(def_id) { AssocTy | AssocFn | AssocConst | Closure | Ctor(..) | Variant => { let parent = tcx.parent(def_id); accumulate(s, parent, generics, impl_exprs); } _ => {} } let predicates = required_predicates(tcx, def_id, s.base().options.bounds_options); impl_exprs.extend(solve_item_traits_inner(s, generics, predicates)); } let mut impl_exprs = vec![]; accumulate(s, def_id, generics, &mut impl_exprs); impl_exprs } /// Solve the trait obligations for implementing a trait (or for trait associated type bounds) in /// the current context. #[cfg(feature = "rustc")] #[tracing::instrument(level = "trace", skip(s), ret)] pub fn solve_item_implied_traits<'tcx, S: UnderOwnerState<'tcx>>( s: &S, def_id: RDefId, generics: ty::GenericArgsRef<'tcx>, ) -> Vec { let predicates = implied_predicates(s.base().tcx, def_id, s.base().options.bounds_options); solve_item_traits_inner(s, generics, predicates) } /// Apply the given generics to the provided clauses and resolve the trait references in the /// current context. #[cfg(feature = "rustc")] fn solve_item_traits_inner<'tcx, S: UnderOwnerState<'tcx>>( s: &S, generics: ty::GenericArgsRef<'tcx>, predicates: utils::Predicates<'tcx>, ) -> Vec { let tcx = s.base().tcx; let typing_env = s.typing_env(); predicates .iter() .map(|(clause, _span)| *clause) .filter_map(|clause| clause.as_trait_clause()) .map(|clause| clause.to_poly_trait_ref()) // Substitute the item generics .map(|trait_ref| ty::EarlyBinder::bind(trait_ref).instantiate(tcx, generics)) // We unfortunately don't have a way to normalize without erasing regions. .map(|trait_ref| { tcx.try_normalize_erasing_regions(typing_env, trait_ref) .unwrap_or(trait_ref) }) // Resolve .map(|trait_ref| solve_trait(s, trait_ref)) .collect() } /// Retrieve the `Self: Trait` clause for a trait associated item. #[cfg(feature = "rustc")] pub fn self_clause_for_item<'tcx, S: UnderOwnerState<'tcx>>( s: &S, def_id: RDefId, generics: rustc_middle::ty::GenericArgsRef<'tcx>, ) -> Option { let tcx = s.base().tcx; let tr_def_id = tcx.trait_of_assoc(def_id)?; // The "self" predicate in the context of the trait. let self_pred = self_predicate(tcx, tr_def_id); // Substitute to be in the context of the current item. let generics = generics.truncate_to(tcx, tcx.generics_of(tr_def_id)); let self_pred = ty::EarlyBinder::bind(self_pred).instantiate(tcx, generics); // Resolve Some(solve_trait(s, self_pred)) } /// Solve the `T: Sized` predicate. #[cfg(feature = "rustc")] pub fn solve_sized<'tcx, S: UnderOwnerState<'tcx>>(s: &S, ty: ty::Ty<'tcx>) -> ImplExpr { let tcx = s.base().tcx; let sized_trait = tcx.lang_items().sized_trait().unwrap(); let ty = erase_free_regions(tcx, ty); let tref = ty::Binder::dummy(ty::TraitRef::new(tcx, sized_trait, [ty])); solve_trait(s, tref) } ================================================ FILE: frontend/exporter/src/types/attributes.rs ================================================ //! Copies of the types related to attributes. //! Such types are mostly contained in the crate `rustc_hir::attrs`. use crate::prelude::*; /// Reflects [`rustc_hir::attrs::AttributeKind`] #[derive(AdtInto)] #[args(<'tcx, S: BaseState<'tcx>>, from: rustc_hir::attrs::AttributeKind, state: S as tcx)] #[derive_group(Serializers)] #[derive(Clone, Debug, JsonSchema, Hash, PartialEq, Eq, PartialOrd, Ord)] pub enum AttributeKind { Align { align: Align, span: Span, }, AutomaticallyDerived(Span), Deprecation { deprecation: Deprecation, span: Span, }, DocComment { style: AttrStyle, kind: CommentKind, span: Span, comment: Symbol, }, Ignore { span: Span, reason: Option, }, Marker(Span), MayDangle(Span), MustUse { span: Span, reason: Option, }, Path(Symbol, Span), #[todo] Todo(String), } /// Reflects [`rustc_hir::attrs::Deprecation`] #[derive_group(Serializers)] #[derive(AdtInto, Clone, Debug, JsonSchema, Hash, PartialEq, Eq, PartialOrd, Ord)] #[args(<'tcx, S>, from: rustc_hir::attrs::Deprecation, state: S as _s)] pub struct Deprecation { pub since: DeprecatedSince, pub note: Option, pub suggestion: Option, } /// Reflects [`rustc_hir::attrs::DeprecatedSince`] #[derive_group(Serializers)] #[derive(AdtInto, Clone, Debug, JsonSchema, Hash, PartialEq, Eq, PartialOrd, Ord)] #[args(<'tcx, S>, from: rustc_hir::attrs::DeprecatedSince, state: S as _s)] pub enum DeprecatedSince { RustcVersion(RustcVersion), Future, NonStandard(Symbol), Unspecified, Err, } /// Reflects [`rustc_hir::RustcVersion`] #[derive_group(Serializers)] #[derive(AdtInto, Clone, Debug, JsonSchema, Hash, PartialEq, Eq, PartialOrd, Ord)] #[args(<'tcx, S>, from: rustc_hir::RustcVersion, state: S as _s)] pub struct RustcVersion { pub major: u16, pub minor: u16, pub patch: u16, } /// Reflects [`rustc_hir::attrs::InlineAttr`] #[derive_group(Serializers)] #[derive(AdtInto, Clone, Debug, JsonSchema, Hash, PartialEq, Eq, PartialOrd, Ord)] #[args(<'tcx, S: BaseState<'tcx>>, from: rustc_hir::attrs::InlineAttr, state: S as _s)] pub enum InlineAttr { None, Hint, Always, Never, Force { attr_span: Span, reason: Option, }, } ================================================ FILE: frontend/exporter/src/types/def_id.rs ================================================ //! This module contains the type definition for `DefId` and the types //! `DefId` depends on. //! //! This is purposely a very small isolated module: //! `hax-engine-names-extract` uses those types, but we don't want //! `hax-engine-names-extract` to have a build dependency on the whole //! frontend, that double the build times for the Rust part of hax. //! //! The feature `extract_names_mode` exists only in the crate //! `hax-engine-names-extract`, and is used to turn off the derive //! attributes `AdtInto` and `JsonSchema`. use hax_adt_into::derive_group; #[cfg(feature = "rustc")] use crate::prelude::*; #[cfg(not(feature = "extract_names_mode"))] use crate::{AdtInto, JsonSchema}; #[cfg(feature = "rustc")] use {rustc_hir as hir, rustc_hir::def_id::DefId as RDefId, rustc_middle::ty}; pub type Symbol = String; #[cfg(not(feature = "extract_names_mode"))] pub type ByteSymbol = Vec; #[cfg(all(not(feature = "extract_names_mode"), feature = "rustc"))] impl<'t, S> SInto for rustc_span::symbol::Symbol { fn sinto(&self, _s: &S) -> Symbol { self.to_ident_string() } } #[cfg(all(not(feature = "extract_names_mode"), feature = "rustc"))] impl<'t, S> SInto for rustc_span::symbol::ByteSymbol { fn sinto(&self, _s: &S) -> ByteSymbol { self.as_byte_str().to_owned() } } /// Reflects [`hir::Safety`] #[cfg_attr(not(feature = "extract_names_mode"), derive(AdtInto, JsonSchema))] #[cfg_attr(not(feature = "extract_names_mode"), args(, from: hir::Safety, state: S as _s))] #[derive_group(Serializers)] #[derive(Clone, Debug, Hash, PartialEq, Eq, PartialOrd, Ord)] pub enum Safety { Unsafe, Safe, } pub type Mutability = bool; #[cfg(not(feature = "extract_names_mode"))] pub type Pinnedness = bool; /// Reflects [`hir::def::CtorKind`] #[derive_group(Serializers)] #[derive(Debug, Copy, Clone, Hash, PartialEq, Eq, PartialOrd, Ord)] #[cfg_attr(not(feature = "extract_names_mode"), derive(JsonSchema, AdtInto))] #[cfg_attr(not(feature = "extract_names_mode"), args(, from: hir::def::CtorKind, state: S as _s))] pub enum CtorKind { Fn, Const, } /// Reflects [`hir::def::CtorOf`] #[derive_group(Serializers)] #[derive(Debug, Copy, Hash, Clone, PartialEq, Eq, PartialOrd, Ord)] #[cfg_attr(not(feature = "extract_names_mode"), derive(JsonSchema, AdtInto))] #[cfg_attr(not(feature = "extract_names_mode"), args(, from: hir::def::CtorOf, state: S as _s))] pub enum CtorOf { Struct, Variant, } /// The id of a promoted MIR constant. /// /// Reflects [`rustc_middle::mir::Promoted`]. #[derive_group(Serializers)] #[derive(Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash, Debug)] #[cfg_attr(not(feature = "extract_names_mode"), derive(JsonSchema, AdtInto))] #[cfg_attr(not(feature = "extract_names_mode"), args(, from: rustc_middle::mir::Promoted, state: S as _s))] pub struct PromotedId { #[cfg_attr(not(feature = "extract_names_mode"), value(self.as_u32()))] pub id: u32, } #[cfg(feature = "rustc")] impl PromotedId { pub fn as_rust_promoted_id(&self) -> rustc_middle::mir::Promoted { rustc_middle::mir::Promoted::from_u32(self.id) } } /// Reflects [`rustc_hir::def::DefKind`] #[derive_group(Serializers)] #[cfg_attr(not(feature = "extract_names_mode"), derive(JsonSchema, AdtInto))] #[cfg_attr(not(feature = "extract_names_mode"), args(, from: rustc_hir::def::DefKind, state: S as tcx))] #[derive(Debug, Clone, PartialEq, Hash, Eq, PartialOrd, Ord)] pub enum DefKind { Mod, Struct, Union, Enum, Variant, Trait, TyAlias, ForeignTy, TraitAlias, AssocTy, TyParam, Fn, Const, ConstParam, Static { safety: Safety, mutability: Mutability, nested: bool, }, Ctor(CtorOf, CtorKind), AssocFn, AssocConst, Macro(MacroKinds), ExternCrate, Use, ForeignMod, AnonConst, InlineConst, #[cfg_attr(not(feature = "extract_names_mode"), disable_mapping)] /// Added by hax: promoted constants don't have def_ids in rustc but they do in hax. PromotedConst, OpaqueTy, Field, LifetimeParam, GlobalAsm, Impl { of_trait: bool, }, Closure, SyntheticCoroutineBody, } #[derive_group(Serializers)] #[derive(Clone, Debug, Hash, PartialEq, Eq, PartialOrd, Ord, Default)] #[cfg_attr(not(feature = "extract_names_mode"), derive(JsonSchema))] pub struct MacroKinds { bang: bool, attr: bool, derive: bool, } #[cfg(feature = "rustc")] impl SInto for rustc_hir::def::MacroKinds { fn sinto(&self, _s: &S) -> MacroKinds { MacroKinds { bang: self.contains(Self::BANG), attr: self.contains(Self::ATTR), derive: self.contains(Self::DERIVE), } } } /// Reflects [`rustc_hir::def_id::DefId`], augmented to also give ids to promoted constants (which /// have their own ad-hoc numbering scheme in rustc for now). #[derive_group(Serializers)] #[derive(Clone, PartialEq, Eq, PartialOrd, Ord)] #[cfg_attr(not(feature = "extract_names_mode"), derive(JsonSchema))] pub struct DefId { pub(crate) contents: crate::id_table::Node, } #[derive_group(Serializers)] #[derive(Debug, Hash, Clone, PartialEq, Eq, PartialOrd, Ord)] #[cfg_attr(not(feature = "extract_names_mode"), derive(JsonSchema))] pub struct DefIdContents { pub krate: String, pub path: Vec, pub parent: Option, /// Stores rustc's `CrateNum`, `DefIndex` and `Promoted` raw indices. This can be useful if one /// needs to convert a [`DefId`] into a [`rustc_hir::def_id::DefId`]. If the promoted id is /// `Some`, then this `DefId` indicates the nth promoted constant associated with the item, /// which doesn't have a real `rustc::DefId`. /// /// **Warning: this `index` field might not be safe to use**. They are valid only for one Rustc /// sesssion. Please do not rely on those indices unless you cannot do otherwise. pub index: (u32, u32, Option), pub is_local: bool, /// The kind of definition this `DefId` points to. pub kind: crate::DefKind, } #[cfg(feature = "rustc")] impl DefIdContents { pub fn make_def_id<'tcx, S: BaseState<'tcx>>(self, s: &S) -> DefId { let contents = s.with_global_cache(|cache| id_table::Node::new(self, &mut cache.id_table_session)); DefId { contents } } } /// Returns the [`SyntheticItem`] encoded by a [rustc `DefId`](RDefId), if any. #[cfg(feature = "rustc")] pub fn def_id_as_synthetic<'tcx>( def_id: RDefId, s: &impl BaseState<'tcx>, ) -> Option { s.with_global_cache(|c| c.reverse_synthetic_map.get(&def_id).copied()) } #[cfg(feature = "rustc")] impl DefId { /// The rustc def_id corresponding to this item, if there is one. Promoted constants don't have /// a rustc def_id. pub fn as_rust_def_id(&self) -> Option { let (_, _, promoted) = self.index; match promoted { None => Some(self.underlying_rust_def_id()), Some(_) => None, } } /// The def_id of this item or its parent if this is a promoted constant. pub fn underlying_rust_def_id(&self) -> RDefId { let (krate, index, _) = self.index; RDefId { krate: rustc_hir::def_id::CrateNum::from_u32(krate), index: rustc_hir::def_id::DefIndex::from_u32(index), } } /// Returns the [`SyntheticItem`] encoded by a [rustc `DefId`](RDefId), if /// any. /// /// Note that this method relies on rustc indexes, which are session /// specific. See [`Self`] documentation. pub fn as_synthetic<'tcx>(&self, s: &impl BaseState<'tcx>) -> Option { def_id_as_synthetic(self.underlying_rust_def_id(), s) } /// Iterate over this element and its parents. pub fn ancestry(&self) -> impl Iterator { std::iter::successors(Some(self), |def| def.parent.as_ref()) } /// The `PathItem` corresponding to this item. pub fn path_item(&self) -> DisambiguatedDefPathItem { self.path .last() .cloned() .unwrap_or_else(|| DisambiguatedDefPathItem { disambiguator: 0, data: DefPathItem::CrateRoot { name: self.krate.clone(), }, }) } /// Construct a hax `DefId` for the nth promoted constant of the current item. That `DefId` has /// no corresponding rustc `DefId`. pub fn make_promoted_child<'tcx, S: BaseState<'tcx>>( &self, s: &S, promoted_id: PromotedId, ) -> Self { let mut path = self.path.clone(); path.push(DisambiguatedDefPathItem { data: DefPathItem::PromotedConst, // Reuse the promoted id as disambiguator, like for inline consts. disambiguator: promoted_id.id, }); let (krate, index, _) = self.index; let contents = DefIdContents { krate: self.krate.clone(), path, parent: Some(self.clone()), is_local: self.is_local, index: (krate, index, Some(promoted_id)), kind: DefKind::PromotedConst, }; contents.make_def_id(s) } } impl DefId { pub fn promoted_id(&self) -> Option { let (_, _, promoted) = self.index; promoted } } impl std::ops::Deref for DefId { type Target = DefIdContents; fn deref(&self) -> &Self::Target { &self.contents } } #[cfg(not(feature = "rustc"))] impl std::fmt::Debug for DefId { fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { f.debug_struct("DefId") .field("krate", &self.krate) .field("path", &self.path) .finish() } } #[cfg(feature = "rustc")] impl std::fmt::Debug for DefId { fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { // Use the more legible rustc debug implementation. write!(f, "{:?}", self.underlying_rust_def_id())?; if let Some(promoted) = self.promoted_id() { write!(f, "::promoted#{}", promoted.id)?; } Ok(()) } } impl std::hash::Hash for DefId { fn hash(&self, state: &mut H) { // A `DefId` is basically an interned path; we only hash the path, discarding the rest of // the information. self.krate.hash(state); self.path.hash(state); self.promoted_id().hash(state); } } /// Gets the kind of the definition. Can't use `def_kind` directly because this crashes on the /// crate root. #[cfg(feature = "rustc")] pub(crate) fn get_def_kind<'tcx>(tcx: ty::TyCtxt<'tcx>, def_id: RDefId) -> hir::def::DefKind { if def_id == rustc_span::def_id::CRATE_DEF_ID.to_def_id() { // Horrible hack: without this, `def_kind` crashes on the crate root. Presumably some table // isn't properly initialized otherwise. let _ = tcx.def_span(def_id); }; tcx.def_kind(def_id) } /// The crate name under which synthetic items are exported under. #[cfg(any(feature = "extract_names_mode", feature = "rustc"))] pub(super) const SYNTHETIC_CRATE_NAME: &str = ""; #[cfg(feature = "rustc")] fn translate_def_id<'tcx, S: BaseState<'tcx>>(s: &S, def_id: RDefId) -> DefId { let tcx = s.base().tcx; let path = { // Set the def_id so the `CrateRoot` path item can fetch the crate name. let state_with_id = s.with_owner_id(def_id); tcx.def_path(def_id) .data .iter() .map(|x| x.sinto(&state_with_id)) .collect() }; let contents = DefIdContents { path, krate: if def_id_as_synthetic(def_id, s).is_some() { SYNTHETIC_CRATE_NAME.to_string() } else { tcx.crate_name(def_id.krate).to_string() }, parent: tcx.opt_parent(def_id).sinto(s), index: ( rustc_hir::def_id::CrateNum::as_u32(def_id.krate), rustc_hir::def_id::DefIndex::as_u32(def_id.index), None, ), is_local: def_id.is_local(), kind: get_def_kind(tcx, def_id).sinto(s), }; contents.make_def_id(s) } #[cfg(all(not(feature = "extract_names_mode"), feature = "rustc"))] impl<'s, S: BaseState<'s>> SInto for RDefId { fn sinto(&self, s: &S) -> DefId { if let Some(def_id) = s.with_item_cache(*self, |cache| cache.def_id.clone()) { return def_id; } let def_id = translate_def_id(s, *self); s.with_item_cache(*self, |cache| cache.def_id = Some(def_id.clone())); def_id } } #[cfg(not(feature = "extract_names_mode"))] pub type Path = Vec; #[cfg(all(not(feature = "extract_names_mode"), feature = "rustc"))] impl std::convert::From for Path { fn from(v: DefId) -> Vec { std::iter::once(&v.krate) .chain(v.path.iter().filter_map(|item| match &item.data { DefPathItem::TypeNs(s) | DefPathItem::ValueNs(s) | DefPathItem::MacroNs(s) | DefPathItem::LifetimeNs(s) => Some(s), _ => None, })) .cloned() .collect() } } #[cfg(not(feature = "extract_names_mode"))] pub type GlobalIdent = DefId; #[cfg(all(not(feature = "extract_names_mode"), feature = "rustc"))] impl<'tcx, S: BaseState<'tcx>> SInto for rustc_hir::def_id::LocalDefId { fn sinto(&self, st: &S) -> DefId { self.to_def_id().sinto(st) } } /// Reflects [`rustc_hir::definitions::DefPathData`] #[derive_group(Serializers)] #[derive(Clone, Debug, Hash, PartialEq, Eq, PartialOrd, Ord)] #[cfg_attr(not(feature = "extract_names_mode"), derive(AdtInto, JsonSchema))] #[cfg_attr(not(feature = "extract_names_mode"), args(<'ctx, S: UnderOwnerState<'ctx>>, from: rustc_hir::definitions::DefPathData, state: S as s))] pub enum DefPathItem { CrateRoot { #[cfg_attr(not(feature = "extract_names_mode"), value(s.base().tcx.crate_name(s.owner_id().krate).sinto(s)))] name: Symbol, }, Impl, ForeignMod, Use, GlobalAsm, TypeNs(Symbol), ValueNs(Symbol), MacroNs(Symbol), LifetimeNs(Symbol), Closure, Ctor, LateAnonConst, AnonConst, #[cfg_attr(not(feature = "extract_names_mode"), disable_mapping)] PromotedConst, DesugaredAnonymousLifetime, OpaqueTy, OpaqueLifetime(Symbol), AnonAssocTy(Symbol), SyntheticCoroutineBody, NestedStatic, } #[derive_group(Serializers)] #[derive(Clone, Debug, Hash, PartialEq, Eq, PartialOrd, Ord)] #[cfg_attr(not(feature = "extract_names_mode"), derive(AdtInto, JsonSchema))] #[cfg_attr(not(feature = "extract_names_mode"), args(<'a, S: UnderOwnerState<'a>>, from: rustc_hir::definitions::DisambiguatedDefPathData, state: S as s))] /// Reflects [`rustc_hir::definitions::DisambiguatedDefPathData`] pub struct DisambiguatedDefPathItem { pub data: DefPathItem, pub disambiguator: u32, } ================================================ FILE: frontend/exporter/src/types/hir.rs ================================================ //! Copies of the relevant `HIR` types. HIR represents the code of a rust crate post-macro //! expansion. It is close to the parsed AST, modulo some desugarings (and macro expansion). //! //! This module also includes some `rustc_ast` definitions when they show up in HIR. use crate::prelude::*; use crate::sinto_todo; #[cfg(feature = "rustc")] use rustc_ast::ast; #[cfg(feature = "rustc")] use rustc_hir as hir; #[cfg(feature = "rustc")] use rustc_middle::ty; /// Reflects [`hir::hir_id::HirId`] #[derive_group(Serializers)] #[derive(AdtInto, Clone, Debug, JsonSchema, Hash, PartialEq, Eq, PartialOrd, Ord)] #[args(<'tcx, S: BaseState<'tcx>>, from: hir::hir_id::HirId, state: S as gstate)] pub struct HirId { owner: DefId, local_id: usize, // attrs: String } // TODO: If not working: See original #[cfg(feature = "rustc")] impl<'tcx, S: BaseState<'tcx>> SInto for hir::hir_id::OwnerId { fn sinto(&self, s: &S) -> DefId { self.to_def_id().sinto(s) } } /// Reflects [`ast::LitFloatType`] #[derive_group(Serializers)] #[derive(AdtInto, Clone, Debug, JsonSchema, Hash, PartialEq, Eq, PartialOrd, Ord)] #[args(<'tcx, S: BaseState<'tcx>>, from: ast::LitFloatType, state: S as gstate)] pub enum LitFloatType { Suffixed(FloatTy), Unsuffixed, } /// Reflects [`hir::Movability`] #[derive_group(Serializers)] #[derive(AdtInto, Clone, Debug, JsonSchema, Hash, PartialEq, Eq, PartialOrd, Ord)] #[args(<'tcx, S>, from: hir::Movability, state: S as _s)] pub enum Movability { Static, Movable, } #[cfg(feature = "rustc")] impl SInto for hir::Mutability { fn sinto(&self, _s: &S) -> Mutability { match self { Self::Mut => true, Self::Not => false, } } } #[cfg(feature = "rustc")] impl SInto for hir::Pinnedness { fn sinto(&self, _s: &S) -> Pinnedness { match self { Self::Pinned => true, Self::Not => false, } } } /// Reflects [`hir::RangeEnd`] #[derive(AdtInto)] #[args(, from: hir::RangeEnd, state: S as _s)] #[derive_group(Serializers)] #[derive(Clone, Debug, JsonSchema)] pub enum RangeEnd { Included, Excluded, } /// Reflects [`hir::ImplicitSelfKind`] #[derive(AdtInto)] #[args(, from: hir::ImplicitSelfKind, state: S as _s)] #[derive_group(Serializers)] #[derive(Clone, Debug, JsonSchema)] pub enum ImplicitSelfKind { Imm, Mut, RefImm, RefMut, None, } /// Reflects [`hir::FnDecl`] #[derive_group(Serializers)] #[derive(AdtInto, Clone, Debug, JsonSchema)] #[args(<'tcx, S: UnderOwnerState<'tcx> >, from: hir::FnDecl<'tcx>, state: S as tcx)] pub struct FnDecl { pub inputs: Vec, pub output: FnRetTy, pub c_variadic: bool, pub implicit_self: ImplicitSelfKind, pub lifetime_elision_allowed: bool, } /// Reflects [`hir::FnSig`] #[derive_group(Serializers)] #[derive(AdtInto, Clone, Debug, JsonSchema)] #[args(<'tcx, S: UnderOwnerState<'tcx> >, from: hir::FnSig<'tcx>, state: S as tcx)] pub struct FnSig { pub header: FnHeader, pub decl: FnDecl, pub span: Span, } #[derive(AdtInto, JsonSchema)] #[args(, from: hir::HeaderSafety, state: S as _s)] #[derive_group(Serializers)] #[derive(Clone, Debug, Hash, PartialEq, Eq, PartialOrd, Ord)] pub enum HeaderSafety { SafeTargetFeatures, Normal(Safety), } /// Reflects [`hir::FnHeader`] #[derive_group(Serializers)] #[derive(AdtInto, Clone, Debug, JsonSchema)] #[args(<'tcx, S: UnderOwnerState<'tcx>>, from: hir::FnHeader, state: S as tcx)] pub struct FnHeader { pub safety: HeaderSafety, pub constness: Constness, pub asyncness: IsAsync, pub abi: ExternAbi, } /// Reflects [`rustc_abi::ExternAbi`] #[derive_group(Serializers)] #[derive(AdtInto, Clone, Debug, JsonSchema, Hash, PartialEq, Eq, PartialOrd, Ord)] #[args(<'tcx, S: BaseState<'tcx>>, from: rustc_abi::ExternAbi, state: S as s)] pub enum ExternAbi { Rust, C { unwind: bool, }, #[todo] Other(String), } /// Function definition #[derive_group(Serializers)] #[derive(Clone, Debug, JsonSchema)] pub struct FnDef { pub header: FnHeader, pub params: Vec, pub ret: Ty, pub body: Body, pub sig_span: Span, } #[cfg(feature = "rustc")] impl<'x: 'tcx, 'tcx, S: UnderOwnerState<'tcx>> SInto for hir::Ty<'x> { fn sinto(self: &hir::Ty<'x>, s: &S) -> Ty { // **Important:** // We need a local id here, and we get it from the owner id, which must // be local. It is safe to do so, because if we have access to a HIR ty, // it necessarily means we are exploring a local item (we don't have // access to the HIR of external objects, only their MIR). rustc_hir_analysis::lower_ty(s.base().tcx, self).sinto(s) } } /// Reflects [`hir::UseKind`] #[derive(AdtInto)] #[args(<'tcx, S: BaseState<'tcx>>, from: hir::UseKind, state: S as _s)] #[derive_group(Serializers)] #[derive(Clone, Debug, JsonSchema)] pub enum UseKind { Single(Ident), Glob, ListStem, } /// Reflects [`hir::IsAuto`] #[derive(AdtInto)] #[args(, from: hir::IsAuto, state: S as _s)] #[derive_group(Serializers)] #[derive(Clone, Debug, JsonSchema)] pub enum IsAuto { Yes, No, } /// Reflects [`hir::Defaultness`] #[derive(AdtInto)] #[args(, from: hir::Defaultness, state: S as tcx)] #[derive_group(Serializers)] #[derive(Clone, Debug, JsonSchema)] pub enum Defaultness { Default { has_value: bool }, Final, } /// Reflects [`hir::ImplPolarity`] #[derive(AdtInto)] #[args(<'tcx, S: UnderOwnerState<'tcx>>, from: hir::ImplPolarity, state: S as tcx)] #[derive_group(Serializers)] #[derive(Clone, Debug, JsonSchema)] pub enum ImplPolarity { Positive, Negative(Span), } /// Reflects [`hir::Constness`] #[derive(AdtInto)] #[args(, from: hir::Constness, state: S as _s)] #[derive_group(Serializers)] #[derive(Clone, Debug, JsonSchema)] pub enum Constness { Const, NotConst, } /// Reflects [`hir::Generics`] #[derive(AdtInto)] #[args(<'tcx, S: UnderOwnerState<'tcx> >, from: hir::Generics<'tcx>, state: S as tcx)] #[derive_group(Serializers)] #[derive(Clone, Debug, JsonSchema)] pub struct Generics { pub params: Vec>, #[value(region_bounds_at_current_owner(tcx))] pub bounds: GenericBounds, pub has_where_clause_predicates: bool, pub where_clause_span: Span, pub span: Span, } #[cfg(feature = "rustc")] impl<'tcx, S: BaseState<'tcx>, Body: IsBody> SInto> for hir::ImplItemId { fn sinto(&self, s: &S) -> ImplItem { let tcx: rustc_middle::ty::TyCtxt = s.base().tcx; let impl_item = tcx.hir_impl_item(*self); let s = s.with_owner_id(impl_item.owner_id.to_def_id()); impl_item.sinto(&s) } } /// Reflects [`hir::ParamName`] #[derive_group(Serializers)] #[derive(Clone, Debug, JsonSchema)] pub enum ParamName { Plain(LocalIdent), Fresh, Error, } /// Reflects [`hir::LifetimeParamKind`] #[derive(AdtInto)] #[args(, from: hir::LifetimeParamKind, state: S as _s)] #[derive_group(Serializers)] #[derive(Clone, Debug, JsonSchema)] pub enum LifetimeParamKind { Explicit, Elided(MissingLifetimeKind), Error, } /// Reflects [`hir::AnonConst`] #[derive_group(Serializers)] #[derive(AdtInto, Clone, Debug, JsonSchema)] #[args(<'tcx, S: BaseState<'tcx>>, from: hir::AnonConst, state: S as s)] pub struct AnonConst { pub hir_id: HirId, pub def_id: GlobalIdent, #[map({ body_from_id::(*x, &s.with_owner_id(hir_id.owner.to_def_id())) })] pub body: Body, } /// Reflects [`hir::ConstArg`] #[derive_group(Serializers)] #[derive(AdtInto, Clone, Debug, JsonSchema)] #[args(<'tcx, S: UnderOwnerState<'tcx>>, from: hir::ConstArg<'tcx>, state: S as s)] pub struct ConstArg { pub hir_id: HirId, pub kind: ConstArgKind, } /// Reflects [`hir::ConstArgKind`] #[derive_group(Serializers)] #[derive(AdtInto, Clone, Debug, JsonSchema)] #[args(<'tcx, S: UnderOwnerState<'tcx>>, from: hir::ConstArgKind<'tcx>, state: S as s)] pub enum ConstArgKind { Path(QPath), Anon(AnonConst), #[todo] Infer(String), } /// Reflects [`hir::GenericParamKind`] #[derive_group(Serializers)] #[derive(AdtInto, Clone, Debug, JsonSchema)] #[args(<'tcx, S: UnderOwnerState<'tcx> >, from: hir::GenericParamKind<'tcx>, state: S as tcx)] pub enum GenericParamKind { Lifetime { kind: LifetimeParamKind, }, Type { /// On use site, Rust always give us all the generic /// parameters, no matter the defaultness. This information is /// thus not so useful. At the same time, as discussed in /// https://github.com/hacspec/hax/issues/310, extracting this /// default type causes failures when querying Rust for trait /// resolution. We thus decided to disable this feature. If /// this default type information is useful to you, please /// open an issue on https://github.com/hacspec/hax. #[map(x.map(|_ty| ()))] default: Option<()>, synthetic: bool, }, Const { ty: Ty, default: Option>, }, } /// Reflects [`hir::GenericParam`] #[derive_group(Serializers)] #[derive(AdtInto, Clone, Debug, JsonSchema)] #[args(<'tcx, S: UnderOwnerState<'tcx> >, from: hir::GenericParam<'tcx>, state: S as s)] pub struct GenericParam { pub hir_id: HirId, pub def_id: GlobalIdent, #[map(match x { hir::ParamName::Plain(loc_ident) => ParamName::Plain(LocalIdent { name: loc_ident.as_str().to_string(), id: self.hir_id.sinto(s) }), hir::ParamName::Fresh => ParamName::Fresh, hir::ParamName::Error { .. } => ParamName::Error, })] pub name: ParamName, pub span: Span, pub pure_wrt_drop: bool, pub kind: GenericParamKind, pub colon_span: Option, #[value(s.base().tcx.hir_attrs(*hir_id).sinto(s))] attributes: Vec, } /// Reflects [`hir::ImplItem`] #[derive_group(Serializers)] #[derive(AdtInto, Clone, Debug, JsonSchema)] #[args(<'tcx, S: UnderOwnerState<'tcx> >, from: hir::ImplItem<'tcx>, state: S as s)] pub struct ImplItem { pub ident: Ident, pub owner_id: DefId, pub generics: Generics, pub kind: ImplItemKind, pub span: Span, // Removed fields. If these are used, will need to provide `#[value(..)]` implementations. // pub defaultness: Defaultness, // pub vis_span: Span, #[value(ItemAttributes::from_owner_id(s, *owner_id))] /// the attributes on this impl item pub attributes: ItemAttributes, } /// Reflects [`hir::ImplItemKind`], inlining the body of the items. #[derive(AdtInto)] #[args(<'tcx, S: UnderOwnerState<'tcx> >, from: hir::ImplItemKind<'tcx>, state: S as s)] #[derive_group(Serializers)] #[derive(Clone, Debug, JsonSchema)] pub enum ImplItemKind { Const(Ty, Body), #[custom_arm(hir::ImplItemKind::Fn(sig, body) => { ImplItemKind::Fn(make_fn_def::(sig, body, s)) },)] Fn(FnDef), #[custom_arm(hir::ImplItemKind::Type(t) => { let parent_bounds = { let (tcx, owner_id) = (s.base().tcx, s.owner_id()); let assoc_item = tcx.opt_associated_item(owner_id).unwrap(); let impl_did = assoc_item.impl_container(tcx).unwrap(); tcx.explicit_item_bounds(assoc_item.trait_item_def_id().unwrap()) .skip_binder() // Skips an `EarlyBinder`, likely for GATs .iter() .copied() .filter(|(clause, _)| clause.as_trait_clause().is_some_and(|trait_predicate| { !is_sized_related_trait(tcx, trait_predicate.skip_binder().def_id()) })) .filter_map(|(clause, span)| super_clause_to_clause_and_impl_expr(s, impl_did, clause, span)) .collect::>() }; ImplItemKind::Type { ty: t.sinto(s), parent_bounds } },)] /// An associated type with its parent bounds inlined. Type { ty: Ty, parent_bounds: Vec<(Clause, ImplExpr, Span)>, }, } /// Reflects [`hir::Impl`]. #[derive(AdtInto)] #[args(<'tcx, S: UnderOwnerState<'tcx> >, from: hir::Impl<'tcx>, state: S as s)] #[derive_group(Serializers)] #[derive(Clone, Debug, JsonSchema)] pub struct Impl { #[value(of_trait.map(|trait_impl_header| trait_impl_header.safety).unwrap_or(rustc_hir::Safety::Safe).sinto(s))] pub safety: Safety, // Removed fields. If these are used, will need to provide `#[value(..)]` implementations. // pub polarity: ImplPolarity, // pub defaultness: Defaultness, pub generics: Generics, #[map({ x.map(|_| s.base().tcx .impl_trait_ref(s.owner_id()) .instantiate_identity() .sinto(s) ) })] pub of_trait: Option, pub self_ty: Ty, pub items: Vec>, #[value({ let (tcx, owner_id) = (s.base().tcx, s.owner_id()); if self.of_trait.is_some() { let trait_did = tcx.impl_trait_id(owner_id); tcx.explicit_super_predicates_of(trait_did) .iter_identity_copied() .filter(|(clause, _)| clause.as_trait_clause().is_some_and(|trait_predicate| { !is_sized_related_trait(tcx, trait_predicate.skip_binder().def_id()) })) .filter_map(|(clause, span)| super_clause_to_clause_and_impl_expr(s, owner_id, clause, span)) .collect::>() } else { vec![] } })] /// The clauses and impl expressions corresponding to the impl's /// trait (if not inherent) super bounds (if any). pub parent_bounds: Vec<(Clause, ImplExpr, Span)>, } /// Reflects [`hir::IsAsync`] #[derive(AdtInto)] #[args(<'tcx, S: UnderOwnerState<'tcx>>, from: hir::IsAsync, state: S as _s)] #[derive_group(Serializers)] #[derive(Clone, Debug, JsonSchema)] pub enum IsAsync { Async(Span), NotAsync, } /// Reflects [`hir::FnRetTy`] #[derive(AdtInto)] #[args(<'tcx, S: UnderOwnerState<'tcx> >, from: hir::FnRetTy<'tcx>, state: S as tcx)] #[derive_group(Serializers)] #[derive(Clone, Debug, JsonSchema)] pub enum FnRetTy { DefaultReturn(Span), Return(Ty), } /// Reflects [`hir::VariantData`] #[derive_group(Serializers)] #[derive(AdtInto, Clone, Debug, JsonSchema)] #[args(<'tcx, S: UnderOwnerState<'tcx> >, from: hir::VariantData<'tcx>, state: S as tcx)] pub enum VariantData { Struct { fields: Vec, recovered: bool, }, Tuple(Vec, HirId, GlobalIdent), Unit(HirId, GlobalIdent), } #[cfg(feature = "rustc")] impl SInto for ast::Recovered { fn sinto(&self, _s: &S) -> bool { match self { Self::Yes(_) => true, Self::No => false, } } } /// Reflects [`hir::FieldDef`] #[derive(AdtInto)] #[args(<'tcx, S: UnderOwnerState<'tcx> >, from: hir::FieldDef<'tcx>, state: S as s)] #[derive_group(Serializers)] #[derive(Clone, Debug, JsonSchema)] pub struct HirFieldDef { pub span: Span, pub vis_span: Span, pub ident: Ident, pub hir_id: HirId, pub def_id: GlobalIdent, pub ty: Ty, #[value(s.base().tcx.hir_attrs(*hir_id).sinto(s))] attributes: Vec, } /// Reflects [`hir::Variant`] #[derive(AdtInto)] #[args(<'tcx, S: UnderOwnerState<'tcx> >, from: hir::Variant<'tcx>, state: S as s)] #[derive_group(Serializers)] #[derive(Clone, Debug, JsonSchema)] pub struct Variant { pub ident: Ident, pub hir_id: HirId, pub def_id: GlobalIdent, #[map(x.sinto(&s.with_owner_id(self.def_id.to_def_id())))] pub data: VariantData, pub disr_expr: Option>, #[value({ let tcx = s.base().tcx; let variant = tcx .adt_def(s.owner_id()) .variants() .into_iter() .find(|v| v.def_id == self.def_id.into()).unwrap(); variant.discr.sinto(s) })] pub discr: DiscriminantDefinition, pub span: Span, #[value(s.base().tcx.hir_attrs(*hir_id).sinto(s))] pub attributes: Vec, } /// Reflects [`hir::UsePath`] #[derive(AdtInto)] #[args(<'tcx, S: UnderOwnerState<'tcx>>, from: hir::UsePath<'tcx>, state: S as s)] #[derive_group(Serializers)] #[derive(Clone, Debug, JsonSchema)] pub struct UsePath { pub span: Span, #[map(x.iter().map(|res| res.sinto(s)).collect())] pub res: Vec>, pub segments: Vec, #[value(self.segments.iter().last().and_then(|segment| { match s.base().tcx.hir_node_by_def_id(segment.hir_id.owner.def_id) { hir::Node::Item(hir::Item { kind: hir::ItemKind::Use(_, hir::UseKind::Single(ident)), .. }) => Some(ident.name.to_ident_string()), _ => None, } }))] pub rename: Option, } /// Reflects [`hir::def::Res`] #[derive(AdtInto)] #[args(<'tcx, S: UnderOwnerState<'tcx>>, from: hir::def::Res, state: S as s)] #[derive_group(Serializers)] #[derive(Clone, Debug, JsonSchema)] pub enum Res { Def(DefKind, DefId), PrimTy(PrimTy), SelfTyParam { trait_: DefId, }, SelfTyAlias { alias_to: DefId, forbid_generic: bool, is_trait_impl: bool, }, SelfCtor(DefId), Local(HirId), ToolMod, NonMacroAttr(NonMacroAttrKind), Err, } /// Reflects [`hir::PrimTy`] #[derive(AdtInto)] #[args(, from: hir::PrimTy, state: S as s)] #[derive_group(Serializers)] #[derive(Clone, Debug, JsonSchema)] pub enum PrimTy { Int(IntTy), Uint(UintTy), Float(FloatTy), Str, Bool, Char, } /// Reflects [`hir::def::NonMacroAttrKind`] #[derive(AdtInto)] #[args(, from: hir::def::NonMacroAttrKind, state: S as s)] #[derive_group(Serializers)] #[derive(Clone, Debug, JsonSchema)] pub enum NonMacroAttrKind { Builtin(Symbol), Tool, DeriveHelper, DeriveHelperCompat, } /// Reflects [`hir::PathSegment`] #[derive(AdtInto)] #[args(<'tcx, S: UnderOwnerState<'tcx>>, from: hir::PathSegment<'tcx>, state: S as s)] #[derive_group(Serializers)] #[derive(Clone, Debug, JsonSchema)] pub struct PathSegment { pub ident: Ident, pub hir_id: HirId, pub res: Res, #[map(args.map(|args| args.sinto(s)))] pub args: Option, pub infer_args: bool, } /// Reflects [`hir::ItemKind`] #[derive(AdtInto)] #[args(<'tcx, S: UnderOwnerState<'tcx> >, from: hir::ItemKind<'tcx>, state: S as s)] #[derive_group(Serializers)] #[derive(Clone, Debug, JsonSchema)] pub enum ItemKind { ExternCrate(Option, Ident), Use(UsePath, UseKind), Static(Mutability, Ident, Ty, Body), Const(Ident, Generics, Ty, Body), #[custom_arm( hir::ItemKind::Fn{ ident, sig, generics, body, .. } => { ItemKind::Fn { ident: ident.sinto(s), generics: generics.sinto(s), def: make_fn_def::(sig, body, s), } } )] Fn { ident: Ident, generics: Generics, def: FnDef, }, Macro(Ident, MacroDef, MacroKinds), Mod(Ident, Vec>), ForeignMod { abi: ExternAbi, items: Vec>, }, GlobalAsm { asm: InlineAsm, }, TyAlias( Ident, Generics, #[map({ // Rust doesn't enforce bounds on generic parameters in type aliases. Thus, when // translating type aliases, we need to disable trait resolution errors. For more // details, please see https://github.com/hacspec/hax/issues/707. let s = &s.with_base(Base { silence_resolution_errors: true, ..s.base() }); x.sinto(s) })] Ty, ), Enum( Ident, Generics, EnumDef, #[value({ let tcx = s.base().tcx; tcx.repr_options_of_def(s.owner_id().expect_local()).sinto(s) })] ReprOptions, ), Struct(Ident, Generics, VariantData), Union(Ident, Generics, VariantData), Trait( Constness, IsAuto, Safety, Ident, Generics, GenericBounds, Vec>, ), TraitAlias(Constness, Ident, Generics, GenericBounds), Impl(Impl), } pub type EnumDef = Vec>; /// Reflects [`hir::TraitItemKind`] #[derive(AdtInto)] #[args(<'tcx, S: UnderOwnerState<'tcx> >, from: hir::TraitItemKind<'tcx>, state: S as tcx)] #[derive(Clone, Debug, JsonSchema)] #[derive_group(Serializers)] pub enum TraitItemKind { Const(Ty, Option), #[custom_arm( hir::TraitItemKind::Fn(sig, hir::TraitFn::Required(id)) => { TraitItemKind::RequiredFn(sig.sinto(tcx), id.sinto(tcx)) } )] /// Reflects a required [`hir::TraitItemKind::Fn`] RequiredFn(FnSig, Vec>), #[custom_arm( hir::TraitItemKind::Fn(sig, hir::TraitFn::Provided(body)) => { TraitItemKind::ProvidedFn(sig.sinto(tcx), make_fn_def::(sig, body, tcx)) } )] /// Reflects a provided [`hir::TraitItemKind::Fn`] ProvidedFn(FnSig, FnDef), #[custom_arm( hir::TraitItemKind::Type(b, ty) => { TraitItemKind::Type(b.sinto(tcx), ty.map(|t| t.sinto(tcx))) } )] Type(GenericBounds, Option), } /// Reflects [`hir::TraitItem`] #[derive(AdtInto)] #[args(<'tcx, S: UnderOwnerState<'tcx> >, from: hir::TraitItem<'tcx>, state: S as s)] #[derive_group(Serializers)] #[derive(Clone, Debug, JsonSchema)] pub struct TraitItem { pub ident: Ident, pub owner_id: DefId, pub generics: Generics, pub kind: TraitItemKind, pub span: Span, pub defaultness: Defaultness, #[value(ItemAttributes::from_owner_id(s, *owner_id))] /// The attributes on this trait item pub attributes: ItemAttributes, } #[cfg(feature = "rustc")] impl<'tcx, S: UnderOwnerState<'tcx>, Body: IsBody> SInto> for hir::EnumDef<'tcx> { fn sinto(&self, s: &S) -> EnumDef { self.variants.iter().map(|v| v.sinto(s)).collect() } } #[cfg(feature = "rustc")] impl<'a, S: BaseState<'a>, Body: IsBody> SInto> for hir::TraitItemId { fn sinto(&self, s: &S) -> TraitItem { let s = s.with_owner_id(self.owner_id.to_def_id()); let tcx: rustc_middle::ty::TyCtxt = s.base().tcx; tcx.hir_trait_item(*self).sinto(&s) } } #[cfg(feature = "rustc")] impl<'a, 'tcx, S: UnderOwnerState<'tcx>, Body: IsBody> SInto>> for hir::Mod<'a> { fn sinto(&self, s: &S) -> Vec> { let tcx = s.base().tcx; self.item_ids .iter() .map(|id| tcx.hir_item(*id).sinto(s)) .collect() } } /// Reflects [`hir::ForeignItemKind`] #[derive(AdtInto)] #[args(<'tcx, S: UnderOwnerState<'tcx> >, from: hir::ForeignItemKind<'tcx>, state: S as tcx)] #[derive(Clone, Debug, JsonSchema)] #[derive_group(Serializers)] pub enum ForeignItemKind { Fn(FnSig, Vec>, Generics), Static(Ty, Mutability, Safety), Type, } /// Reflects [`hir::ForeignItem`] #[derive(AdtInto)] #[args(<'tcx, S: UnderOwnerState<'tcx> >, from: hir::ForeignItem<'tcx>, state: S as tcx)] #[derive_group(Serializers)] #[derive(Clone, Debug, JsonSchema)] pub struct ForeignItem { pub ident: Ident, pub kind: ForeignItemKind, pub owner_id: DefId, pub span: Span, pub vis_span: Span, } #[cfg(feature = "rustc")] impl<'a, S: UnderOwnerState<'a>, Body: IsBody> SInto> for hir::ForeignItemId { fn sinto(&self, s: &S) -> ForeignItem { let tcx: rustc_middle::ty::TyCtxt = s.base().tcx; tcx.hir_foreign_item(*self).sinto(s) } } /// Reflects [`hir::GenericBounds`] type GenericBounds = Vec; /// Compute the bounds for the owner registed in the state `s` #[cfg(feature = "rustc")] fn region_bounds_at_current_owner<'tcx, S: UnderOwnerState<'tcx>>(s: &S) -> GenericBounds { let tcx = s.base().tcx; // According to what kind of node we are looking at, we should // either call `predicates_defined_on` or `item_bounds` let use_item_bounds = { if let Some(oid) = s.owner_id().as_local() { let hir_id = tcx.local_def_id_to_hir_id(oid); let node = tcx.hir_node(hir_id); matches!( node, hir::Node::TraitItem(hir::TraitItem { kind: hir::TraitItemKind::Type(..), .. }) | hir::Node::OpaqueTy(..), ) } else { false } }; let clauses: Vec> = if use_item_bounds { tcx.explicit_item_bounds(s.owner_id()) .map_bound(|clauses| { clauses .iter() .map(|(x, _span)| x) .copied() .collect::>() }) .instantiate_identity() } else { predicates_defined_on(tcx, s.owner_id()) .iter() .map(|(x, _span)| x) .copied() .collect() }; clauses .into_iter() .filter(|clause| { clause.as_trait_clause().is_none_or(|trait_predicate| { !is_sized_related_trait(tcx, trait_predicate.skip_binder().def_id()) }) }) .collect::>() .sinto(s) } #[cfg(feature = "rustc")] impl<'tcx, S: UnderOwnerState<'tcx>> SInto for hir::GenericBounds<'tcx> { fn sinto(&self, s: &S) -> GenericBounds { region_bounds_at_current_owner(s) } } /// Reflects [`rustc_ast::tokenstream::TokenStream`] as a plain /// string. If you need to reshape that into Rust tokens or construct, /// please use, e.g., `syn`. pub type TokenStream = String; #[cfg(feature = "rustc")] impl<'t, S> SInto for rustc_ast::tokenstream::TokenStream { fn sinto(&self, _: &S) -> String { rustc_ast_pretty::pprust::tts_to_string(self) } } /// Reflects [`rustc_ast::token::Delimiter`] #[derive(AdtInto)] #[args(, from: rustc_ast::token::Delimiter, state: S as _s)] #[derive_group(Serializers)] #[derive(Clone, Debug, JsonSchema, Hash, PartialEq, Eq, PartialOrd, Ord)] pub enum Delimiter { Parenthesis, Brace, Bracket, Invisible(InvisibleOrigin), } sinto_todo!(rustc_ast::token, InvisibleOrigin); /// Reflects [`rustc_ast::ast::DelimArgs`] #[derive(AdtInto)] #[args(, from: rustc_ast::ast::DelimArgs, state: S as gstate)] #[derive_group(Serializers)] #[derive(Clone, Debug, JsonSchema, Hash, PartialEq, Eq, PartialOrd, Ord)] pub struct DelimArgs { pub dspan: DelimSpan, pub delim: Delimiter, pub tokens: TokenStream, } sinto_todo!(rustc_ast::tokenstream, DelimSpan); /// Reflects [`ast::MacroDef`] #[derive(AdtInto)] #[args(<'tcx, S: BaseState<'tcx>>, from: ast::MacroDef, state: S as tcx)] #[derive_group(Serializers)] #[derive(Clone, Debug, JsonSchema)] pub struct MacroDef { pub body: DelimArgs, pub macro_rules: bool, } /// Reflects [`hir::Item`] (and [`hir::ItemId`]) #[derive_group(Serializers)] #[derive(Clone, Debug, JsonSchema)] pub struct Item { pub def_id: Option, pub owner_id: DefId, pub span: Span, pub vis_span: Span, pub kind: ItemKind, pub attributes: ItemAttributes, pub visibility: Visibility, } #[cfg(feature = "rustc")] impl<'tcx, S: BaseState<'tcx>, Body: IsBody> SInto> for hir::Item<'tcx> { fn sinto(&self, s: &S) -> Item { use hir::ItemKind::*; // TODO: Not all items have an identifier; return `Option` here, or even better: use the // ident in the `ItemKind`. let name = match self.kind { ExternCrate(_, i) | Use(_, hir::UseKind::Single(i)) | Static(_, i, ..) | Const(i, ..) | Fn { ident: i, .. } | Macro(i, ..) | Mod(i, ..) | TyAlias(i, ..) | Enum(i, ..) | Struct(i, ..) | Union(i, ..) | Trait(_, _, _, i, ..) | TraitAlias(_, i, ..) => i.name.to_ident_string(), Use(..) | ForeignMod { .. } | GlobalAsm { .. } | Impl { .. } => String::new(), }; let s = &s.with_owner_id(self.owner_id.to_def_id()); let tcx = s.base().tcx; let owner_id: DefId = self.owner_id.sinto(s); let def_id = Path::from(owner_id.clone()) .ends_with(&[name]) .then(|| owner_id.clone()); Item { def_id, owner_id, span: self.span.sinto(s), vis_span: self.span.sinto(s), kind: self.kind.sinto(s), attributes: ItemAttributes::from_owner_id(s, self.owner_id), visibility: tcx.visibility(self.owner_id).sinto(s), } } } #[cfg(feature = "rustc")] impl<'tcx, S: BaseState<'tcx>, Body: IsBody> SInto> for hir::ItemId { fn sinto(&self, s: &S) -> Item { let tcx: rustc_middle::ty::TyCtxt = s.base().tcx; tcx.hir_item(*self).sinto(s) } } /// Reflects [`rustc_span::symbol::Ident`] pub type Ident = (Symbol, Span); #[cfg(feature = "rustc")] impl<'tcx, S: BaseState<'tcx>> SInto for rustc_span::symbol::Ident { fn sinto(&self, s: &S) -> Ident { (self.name.sinto(s), self.span.sinto(s)) } } /// Reflects [`rustc_ast::AttrStyle`] #[derive_group(Serializers)] #[derive(AdtInto, Clone, Debug, JsonSchema, Hash, PartialEq, Eq, PartialOrd, Ord)] #[args(, from: rustc_ast::AttrStyle, state: S as _s)] pub enum AttrStyle { Outer, Inner, } /// Reflects [`rustc_ast::Attribute`] #[derive_group(Serializers)] #[derive(AdtInto, Clone, Debug, JsonSchema, Hash, PartialEq, Eq, PartialOrd, Ord)] #[args(<'tcx, S: BaseState<'tcx>>, from: rustc_hir::Attribute, state: S as gstate)] pub enum Attribute { Parsed(AttributeKind), Unparsed(AttrItem), } /// Reflects [`rustc_ast::ast::BindingMode`] #[derive(AdtInto)] #[args(, from: rustc_ast::ast::BindingMode, state: S as s)] #[derive_group(Serializers)] #[derive(Clone, Debug, JsonSchema)] pub struct BindingMode { #[value(self.0.sinto(s))] pub by_ref: ByRef, #[value(self.1.sinto(s))] pub mutability: Mutability, } /// Reflects [`rustc_ast::ast::ByRef`] #[derive(AdtInto)] #[args(, from: rustc_ast::ast::ByRef, state: S as s)] #[derive_group(Serializers)] #[derive(Clone, Debug, JsonSchema)] pub enum ByRef { Yes(Pinnedness, Mutability), No, } /// Reflects [`rustc_ast::ast::StrStyle`] #[derive(AdtInto)] #[args(, from: rustc_ast::ast::StrStyle, state: S as gstate)] #[derive_group(Serializers)] #[derive(Clone, Debug, JsonSchema, Hash, PartialEq, Eq, PartialOrd, Ord)] pub enum StrStyle { Cooked, Raw(u8), } /// Reflects [`rustc_ast::ast::LitKind`] #[derive(AdtInto)] #[args(<'tcx, S: BaseState<'tcx>>, from: rustc_ast::ast::LitKind, state: S as gstate)] #[derive_group(Serializers)] #[derive(Clone, Debug, JsonSchema, Hash, PartialEq, Eq, PartialOrd, Ord)] pub enum LitKind { Str(Symbol, StrStyle), ByteStr(ByteSymbol, StrStyle), CStr(ByteSymbol, StrStyle), Byte(u8), Char(char), Int( #[serde(with = "serialize_int::unsigned")] #[schemars(with = "String")] u128, LitIntType, ), Float(Symbol, LitFloatType), Bool(bool), Err(ErrorGuaranteed), } #[cfg(feature = "rustc")] impl SInto for rustc_data_structures::packed::Pu128 { fn sinto(&self, _s: &S) -> u128 { self.0 } } /// Reflects [`rustc_ast::token::CommentKind`] #[derive(AdtInto)] #[args(, from: rustc_ast::token::CommentKind, state: S as _s)] #[derive_group(Serializers)] #[derive(Clone, Debug, JsonSchema, Hash, PartialEq, Eq, PartialOrd, Ord)] pub enum CommentKind { Line, Block, } /// Reflects [`rustc_hir::AttrArgs`] #[derive(AdtInto)] #[args(<'tcx, S: BaseState<'tcx>>, from: rustc_hir::AttrArgs, state: S as tcx)] #[derive_group(Serializers)] #[derive(Clone, Debug, JsonSchema, Hash, PartialEq, Eq, PartialOrd, Ord)] pub enum AttrArgs { Empty, Delimited(DelimArgs), Eq { eq_span: Span, expr: MetaItemLit }, } /// Reflects [`rustc_ast::MetaItemLit`] #[derive(AdtInto)] #[args(<'tcx, S: BaseState<'tcx>>, from: rustc_ast::MetaItemLit, state: S as tcx)] #[derive_group(Serializers)] #[derive(Clone, Debug, JsonSchema, Hash, PartialEq, Eq, PartialOrd, Ord)] pub struct MetaItemLit { pub symbol: Symbol, pub suffix: Option, pub kind: LitKind, pub span: Span, } /// Reflects [`rustc_hir::AttrItem`] #[derive_group(Serializers)] #[derive(AdtInto, Clone, Debug, JsonSchema, Hash, PartialEq, Eq, PartialOrd, Ord)] #[args(<'tcx, S: BaseState<'tcx>>, from: rustc_hir::AttrItem, state: S as gstate)] pub struct AttrItem { #[map(x.to_string())] pub path: String, pub args: AttrArgs, pub span: Span, } #[cfg(feature = "rustc")] impl SInto for rustc_ast::tokenstream::LazyAttrTokenStream { fn sinto(&self, st: &S) -> String { rustc_ast::tokenstream::TokenStream::new(self.to_attr_token_stream().to_token_trees()) .sinto(st) } } sinto_todo!(rustc_hir, GenericArgs<'a> as HirGenericArgs); sinto_todo!(rustc_hir, InlineAsm<'a>); sinto_todo!(rustc_hir, MissingLifetimeKind); sinto_todo!(rustc_hir, QPath<'tcx>); sinto_todo!(rustc_hir, WhereRegionPredicate<'tcx>); sinto_todo!(rustc_hir, WhereEqPredicate<'tcx>); sinto_todo!(rustc_hir, OwnerId); ================================================ FILE: frontend/exporter/src/types/mir.rs ================================================ //! Copies of the relevant `MIR` types. MIR represents a rust (function) body as a CFG. It's a //! semantically rich representation that contains no high-level control-flow operations like loops //! or patterns; instead the control flow is entirely described by gotos and switches on integer //! values. use crate::prelude::*; #[cfg(feature = "rustc")] use rustc_middle::{mir, ty}; #[derive_group(Serializers)] #[derive(AdtInto, Clone, Debug, JsonSchema)] #[args(<'tcx, S: UnderOwnerState<'tcx>>, from: rustc_middle::mir::SourceInfo, state: S as s)] pub struct SourceInfo { pub span: Span, pub scope: SourceScope, } #[derive_group(Serializers)] #[derive(AdtInto, Clone, Debug, JsonSchema)] #[args(<'tcx, S: UnderOwnerState<'tcx>>, from: rustc_middle::mir::LocalDecl<'tcx>, state: S as s)] pub struct LocalDecl { pub mutability: Mutability, pub ty: Ty, pub source_info: SourceInfo, #[value(None)] pub name: Option, // This information is contextual, thus the SInto instance initializes it to None, and then we fill it while `SInto`ing MirBody } pub type BasicBlocks = IndexVec; #[cfg(feature = "rustc")] fn name_of_local( local: rustc_middle::mir::Local, var_debug_info: &Vec, ) -> Option { var_debug_info .iter() .find(|info| { if let mir::VarDebugInfoContents::Place(place) = info.value { place.projection.is_empty() && place.local == local } else { false } }) .map(|dbg| dbg.name.to_ident_string()) } /// Enumerates the kinds of Mir bodies. TODO: use const generics /// instead of an open list of types. pub mod mir_kinds { use crate::prelude::{JsonSchema, derive_group}; #[derive_group(Serializers)] #[derive(Clone, Copy, Debug, JsonSchema)] pub struct Built; #[derive_group(Serializers)] #[derive(Clone, Copy, Debug, JsonSchema)] pub struct Promoted; #[derive_group(Serializers)] #[derive(Clone, Copy, Debug, JsonSchema)] pub struct Elaborated; #[derive_group(Serializers)] #[derive(Clone, Copy, Debug, JsonSchema)] pub struct Optimized; #[derive_group(Serializers)] #[derive(Clone, Copy, Debug, JsonSchema)] pub struct CTFE; /// MIR of unknown origin. `body()` returns `None`; this is used to get the bodies provided via /// `from_mir` but not attempt to get MIR for functions etc. #[derive_group(Serializers)] #[derive(Clone, Copy, Debug, JsonSchema)] pub struct Unknown; #[cfg(feature = "rustc")] pub use rustc::*; #[cfg(feature = "rustc")] mod rustc { use super::*; use rustc_middle::mir::Body; use rustc_middle::ty::TyCtxt; use rustc_span::def_id::DefId; pub trait IsMirKind: Clone + std::fmt::Debug + std::any::Any + Send + Sync { // CPS to deal with stealable bodies cleanly. fn get_mir<'tcx, T>( tcx: TyCtxt<'tcx>, id: DefId, f: impl FnOnce(&Body<'tcx>) -> T, ) -> Option; } impl IsMirKind for Built { fn get_mir<'tcx, T>( tcx: TyCtxt<'tcx>, id: DefId, f: impl FnOnce(&Body<'tcx>) -> T, ) -> Option { let id = id.as_local()?; let steal = tcx.mir_built(id); if steal.is_stolen() { None } else { Some(f(&steal.borrow())) } } } impl IsMirKind for Promoted { fn get_mir<'tcx, T>( tcx: TyCtxt<'tcx>, id: DefId, f: impl FnOnce(&Body<'tcx>) -> T, ) -> Option { let id = id.as_local()?; let (steal, _) = tcx.mir_promoted(id); if steal.is_stolen() { None } else { Some(f(&steal.borrow())) } } } impl IsMirKind for Elaborated { fn get_mir<'tcx, T>( tcx: TyCtxt<'tcx>, id: DefId, f: impl FnOnce(&Body<'tcx>) -> T, ) -> Option { let id = id.as_local()?; let steal = tcx.mir_drops_elaborated_and_const_checked(id); if steal.is_stolen() { None } else { Some(f(&steal.borrow())) } } } impl IsMirKind for Optimized { fn get_mir<'tcx, T>( tcx: TyCtxt<'tcx>, id: DefId, f: impl FnOnce(&Body<'tcx>) -> T, ) -> Option { Some(f(tcx.optimized_mir(id))) } } impl IsMirKind for CTFE { fn get_mir<'tcx, T>( tcx: TyCtxt<'tcx>, id: DefId, f: impl FnOnce(&Body<'tcx>) -> T, ) -> Option { Some(f(tcx.mir_for_ctfe(id))) } } impl IsMirKind for Unknown { fn get_mir<'tcx, T>( _tcx: TyCtxt<'tcx>, _id: DefId, _f: impl FnOnce(&Body<'tcx>) -> T, ) -> Option { None } } } } #[cfg(feature = "rustc")] pub use mir_kinds::IsMirKind; /// The contents of `Operand::Const`. #[derive_group(Serializers)] #[derive(Clone, Debug, JsonSchema)] pub struct ConstOperand { pub span: Span, pub ty: Ty, pub kind: ConstOperandKind, } #[derive_group(Serializers)] #[derive(Clone, Debug, JsonSchema)] pub enum ConstOperandKind { /// An evaluated constant represented as an expression. Value(ConstantExpr), /// Part of a MIR body that was promoted to be a constant. May not be evaluatable because of /// generics. /// It's a reference to the `DefId` of the constant. Note that rustc does not give a `DefId` to /// promoted constants, but we do in hax. Promoted(ItemRef), } #[cfg(feature = "rustc")] impl<'tcx, S: UnderOwnerState<'tcx>> SInto for mir::ConstOperand<'tcx> { fn sinto(&self, s: &S) -> ConstOperand { let kind = translate_mir_const(s, self.span, self.const_); ConstOperand { span: self.span.sinto(s), ty: self.const_.ty().sinto(s), kind, } } } /// Retrieve the MIR for a promoted body. #[cfg(feature = "rustc")] pub fn get_promoted_mir<'tcx>( tcx: ty::TyCtxt<'tcx>, def_id: RDefId, promoted_id: mir::Promoted, ) -> mir::Body<'tcx> { if let Some(local_def_id) = def_id.as_local() { let (_, promoteds) = tcx.mir_promoted(local_def_id); if !promoteds.is_stolen() { promoteds.borrow()[promoted_id].clone() } else { tcx.promoted_mir(def_id)[promoted_id].clone() } } else { tcx.promoted_mir(def_id)[promoted_id].clone() } } #[cfg(feature = "rustc")] /// Translate a MIR constant. fn translate_mir_const<'tcx, S: UnderOwnerState<'tcx>>( s: &S, span: rustc_span::Span, konst: mir::Const<'tcx>, ) -> ConstOperandKind { use ConstOperandKind::{Promoted, Value}; use rustc_middle::mir::Const; let tcx = s.base().tcx; match konst { Const::Val(const_value, ty) => { let evaluated = const_value_to_constant_expr(s, ty, const_value, span); match evaluated.report_err() { Ok(val) => Value(val), Err(err) => { warning!( s[span], "Couldn't convert constant back to an expression"; {const_value, ty, err} ); Value( ConstantExprKind::Todo("ConstEvalVal".into()) .decorate(ty.sinto(s), span.sinto(s)), ) } } } Const::Ty(_ty, c) => Value(c.sinto(s)), Const::Unevaluated(ucv, ty) => { use crate::rustc_middle::query::Key; let span = span.substitute_dummy( tcx.def_ident_span(ucv.def) .unwrap_or_else(|| ucv.def.default_span(tcx)), ); match ucv.promoted { Some(promoted) => { let item = translate_item_ref(s, ucv.def, ucv.args); let item = item.mutate_def_id(s, |def_id| { // Construct a def_id for the promoted constant. *def_id = def_id.make_promoted_child(s, promoted.sinto(s)); }); Promoted(item) } None => match translate_constant_reference(s, span, ucv.shrink()) { Some(val) => Value(val), None => match eval_mir_constant(s, konst) { Some(val) => translate_mir_const(s, span, val), // TODO: This is triggered when compiling using `generic_const_exprs`. We // might be able to get a MIR body from the def_id. None => Value( ConstantExprKind::Todo("TranslateUneval".into()) .decorate(ty.sinto(s), span.sinto(s)), ), }, }, } } } } #[derive_group(Serializers)] #[derive(AdtInto, Clone, Debug, JsonSchema)] #[args(<'tcx, S: UnderOwnerState<'tcx> + HasMir<'tcx>>, from: rustc_middle::mir::Body<'tcx>, state: S as s)] pub struct MirBody { pub span: Span, #[map({ x.iter_enumerated().map(|(local, local_decl)| { let mut local_decl = local_decl.sinto(s); local_decl.name = name_of_local(local, &self.var_debug_info); local_decl }).collect() })] pub local_decls: IndexVec, pub arg_count: usize, pub basic_blocks: BasicBlocks, pub source_scopes: IndexVec, pub tainted_by_errors: Option, #[value(std::marker::PhantomData)] pub _kind: std::marker::PhantomData, } #[derive_group(Serializers)] #[derive(AdtInto, Clone, Debug, JsonSchema)] #[args(<'tcx, S: UnderOwnerState<'tcx>>, from: rustc_middle::mir::SourceScopeData<'tcx>, state: S as s)] pub struct SourceScopeData { pub span: Span, pub parent_scope: Option, pub inlined_parent_scope: Option, } #[derive_group(Serializers)] #[derive(AdtInto, Clone, Debug, JsonSchema)] #[args(<'tcx, S: UnderOwnerState<'tcx> + HasMir<'tcx>>, from: rustc_middle::mir::Operand<'tcx>, state: S as s)] pub enum Operand { Copy(Place), Move(Place), Constant(ConstOperand), } #[cfg(feature = "rustc")] impl Operand { pub(crate) fn ty(&self) -> &Ty { match self { Operand::Copy(p) | Operand::Move(p) => &p.ty, Operand::Constant(c) => &c.ty, } } } #[derive_group(Serializers)] #[derive(AdtInto, Clone, Debug, JsonSchema)] #[args(<'tcx, S: UnderOwnerState<'tcx> + HasMir<'tcx>>, from: rustc_middle::mir::Terminator<'tcx>, state: S as s)] pub struct Terminator { pub source_info: SourceInfo, pub kind: TerminatorKind, } #[cfg(feature = "rustc")] fn translate_terminator_kind_call<'tcx, S: UnderOwnerState<'tcx> + HasMir<'tcx>>( s: &S, terminator: &rustc_middle::mir::TerminatorKind<'tcx>, ) -> TerminatorKind { let tcx = s.base().tcx; let mir::TerminatorKind::Call { func, args, destination, target, unwind, fn_span, .. } = terminator else { unreachable!() }; let ty = func.ty(&s.mir().local_decls, tcx); let hax_ty: crate::Ty = ty.sinto(s); let sig = match hax_ty.kind() { TyKind::Arrow(sig) => sig, TyKind::FnDef { fn_sig, .. } => fn_sig, TyKind::Closure(args) => &args.fn_sig, _ => supposely_unreachable_fatal!( s, "TerminatorKind_Call_expected_fn_type"; { ty } ), }; let fun_op = if let ty::TyKind::FnDef(def_id, generics) = ty.kind() { // The type of the value is one of the singleton types that corresponds to each function, // which is enough information. let item = translate_item_ref(s, *def_id, *generics); FunOperand::Static(item) } else { use mir::Operand; match func { Operand::Constant(_) => { unimplemented!("{:?}", func); } Operand::Move(place) => { // Function pointer or closure. let place = place.sinto(s); FunOperand::DynamicMove(place) } Operand::Copy(_place) => { unimplemented!("{:?}", func); } } }; let late_bound_generics = sig .bound_vars .iter() .map(|var| match var { BoundVariableKind::Region(r) => r, BoundVariableKind::Ty(..) | BoundVariableKind::Const => { supposely_unreachable_fatal!( s, "non_lifetime_late_bound"; { var } ) } }) .map(|_| { GenericArg::Lifetime(Region { kind: RegionKind::ReErased, }) }) .collect(); TerminatorKind::Call { fun: fun_op, late_bound_generics, args: args.sinto(s), destination: destination.sinto(s), target: target.sinto(s), unwind: unwind.sinto(s), fn_span: fn_span.sinto(s), } } #[cfg(feature = "rustc")] fn translate_terminator_kind_drop<'tcx, S: UnderOwnerState<'tcx> + HasMir<'tcx>>( s: &S, terminator: &rustc_middle::mir::TerminatorKind<'tcx>, ) -> TerminatorKind { let tcx = s.base().tcx; let mir::TerminatorKind::Drop { place, target, unwind, .. } = terminator else { unreachable!() }; let local_decls = &s.mir().local_decls; let place_ty = place.ty(local_decls, tcx).ty; let destruct_trait = tcx.lang_items().destruct_trait().unwrap(); let impl_expr = solve_trait( s, ty::Binder::dummy(ty::TraitRef::new(tcx, destruct_trait, [place_ty])), ); TerminatorKind::Drop { place: place.sinto(s), impl_expr, target: target.sinto(s), unwind: unwind.sinto(s), } } // We don't use the LitIntType on purpose (we don't want the "unsuffixed" case) #[derive_group(Serializers)] #[derive(Clone, Copy, Debug, JsonSchema, Hash, PartialEq, Eq, PartialOrd, Ord)] pub enum ScalarTy { Bool, Int(IntTy), Uint(UintTy), Char, } #[derive_group(Serializers)] #[derive(Clone, Debug, JsonSchema)] pub struct ScalarInt { /// Little-endian representation of the integer pub data_le_bytes: [u8; 16], pub int_ty: ScalarTy, } /// Translate a `SwitchInt` terminator. #[cfg(feature = "rustc")] fn translate_switchint<'tcx, S: UnderOwnerState<'tcx> + HasMir<'tcx>>( s: &S, discr: &mir::Operand<'tcx>, targets: &mir::SwitchTargets, ) -> TerminatorKind { let discr = discr.sinto(s); let ty = match discr.ty().kind() { TyKind::Bool => ScalarTy::Bool, TyKind::Int(ty) => ScalarTy::Int(*ty), TyKind::Uint(ty) => ScalarTy::Uint(*ty), TyKind::Char => ScalarTy::Char, ty => fatal!(s, "Unexpected switch_ty: {:?}", ty), }; // Convert all the test values to the proper values. let otherwise = targets.otherwise().sinto(s); let targets_vec: Vec<(ScalarInt, BasicBlock)> = targets .iter() .map(|(v, b)| { let v = ScalarInt { data_le_bytes: v.to_le_bytes(), int_ty: ty, }; (v, b.sinto(s)) }) .collect(); TerminatorKind::SwitchInt { discr, ty, targets: targets_vec, otherwise, } } /// A value of type `fn<...> A -> B` that can be called. #[derive_group(Serializers)] #[derive(Clone, Debug, JsonSchema)] pub enum FunOperand { /// Call to a statically-known function. Static(ItemRef), /// Use of a closure or a function pointer value. Counts as a move from the given place. DynamicMove(Place), } #[derive_group(Serializers)] #[derive(AdtInto, Clone, Debug, JsonSchema)] #[args(<'tcx, S: BaseState<'tcx>>, from: rustc_middle::mir::UnwindAction, state: S as _s)] pub enum UnwindAction { Continue, Unreachable, Terminate(UnwindTerminateReason), Cleanup(BasicBlock), } #[derive_group(Serializers)] #[derive(AdtInto, Clone, Debug, JsonSchema)] #[args(<'tcx, S: UnderOwnerState<'tcx> + HasMir<'tcx>>, from: rustc_middle::mir::TerminatorKind<'tcx>, state: S as s)] pub enum TerminatorKind { Goto { target: BasicBlock, }, #[custom_arm( rustc_middle::mir::TerminatorKind::SwitchInt { discr, targets } => { translate_switchint(s, discr, targets) } )] SwitchInt { /// The value being switched one. discr: Operand, /// The type that is being switched on. ty: ScalarTy, /// Possible success cases. targets: Vec<(ScalarInt, BasicBlock)>, /// If none of the `targets` match, branch to that block. otherwise: BasicBlock, }, Return, Unreachable, #[custom_arm( x @ rustc_middle::mir::TerminatorKind::Drop { .. } => { translate_terminator_kind_drop(s, x) } )] Drop { place: Place, /// Implementation of `place.ty(): Drop`. impl_expr: ImplExpr, target: BasicBlock, unwind: UnwindAction, }, #[custom_arm( x @ rustc_middle::mir::TerminatorKind::Call { .. } => { translate_terminator_kind_call(s, x) } )] Call { fun: FunOperand, /// A `FunOperand` is a value of type `fn<...> A -> B`. The generics in `<...>` are called /// "late-bound" and are instantiated anew at each call site. This list provides the /// generics used at this call-site. They are all lifetimes and at the time of writing are /// all erased lifetimes. late_bound_generics: Vec, args: Vec>, destination: Place, target: Option, unwind: UnwindAction, fn_span: Span, }, TailCall { func: Operand, args: Vec>, fn_span: Span, }, Assert { cond: Operand, expected: bool, msg: AssertMessage, target: BasicBlock, unwind: UnwindAction, }, Yield { value: Operand, resume: BasicBlock, resume_arg: Place, drop: Option, }, CoroutineDrop, FalseEdge { real_target: BasicBlock, imaginary_target: BasicBlock, }, FalseUnwind { real_target: BasicBlock, unwind: UnwindAction, }, UnwindResume, UnwindTerminate(UnwindTerminateReason), InlineAsm { template: Vec, operands: Vec, options: InlineAsmOptions, line_spans: Vec, targets: Vec, unwind: UnwindAction, }, } #[derive_group(Serializers)] #[derive(AdtInto, Clone, Debug, JsonSchema)] #[args(<'tcx, S: UnderOwnerState<'tcx> + HasMir<'tcx>>, from: rustc_middle::mir::Statement<'tcx>, state: S as s)] pub struct Statement { pub source_info: SourceInfo, #[map(Box::new(x.sinto(s)))] pub kind: Box, } #[derive_group(Serializers)] #[derive(AdtInto, Clone, Debug, JsonSchema)] #[args(<'tcx, S: UnderOwnerState<'tcx> + HasMir<'tcx>>, from: rustc_middle::mir::StatementKind<'tcx>, state: S as s)] pub enum StatementKind { Assign((Place, Rvalue)), FakeRead((FakeReadCause, Place)), SetDiscriminant { place: Place, variant_index: VariantIdx, }, StorageLive(Local), StorageDead(Local), Retag(RetagKind, Place), PlaceMention(Place), AscribeUserType((Place, UserTypeProjection), Variance), Coverage(CoverageKind), Intrinsic(NonDivergingIntrinsic), ConstEvalCounter, BackwardIncompatibleDropHint { place: Place, }, Nop, } #[derive_group(Serializers)] #[derive(AdtInto, Clone, Debug, JsonSchema)] #[args(<'tcx, S: UnderOwnerState<'tcx> + HasMir<'tcx>>, from: rustc_middle::mir::NonDivergingIntrinsic<'tcx>, state: S as s)] pub enum NonDivergingIntrinsic { Assume(Operand), CopyNonOverlapping(CopyNonOverlapping), } #[derive_group(Serializers)] #[derive(AdtInto, Clone, Debug, JsonSchema)] #[args(<'tcx, S: UnderOwnerState<'tcx> + HasMir<'tcx>>, from: rustc_middle::mir::CopyNonOverlapping<'tcx>, state: S as s)] pub struct CopyNonOverlapping { pub src: Operand, pub dst: Operand, pub count: Operand, } #[derive_group(Serializers)] #[derive(Clone, Debug, JsonSchema)] pub struct Place { /// The type of the element on which we apply the projection given by `kind` pub ty: Ty, pub kind: PlaceKind, } #[derive_group(Serializers)] #[derive(Clone, Debug, JsonSchema)] pub enum PlaceKind { Local(Local), Projection { place: Box, kind: ProjectionElem, }, } #[derive_group(Serializers)] #[derive(Clone, Debug, JsonSchema)] pub enum ProjectionElemFieldKind { Tuple(FieldIdx), Adt { typ: DefId, variant: Option, index: FieldIdx, }, /// Get access to one of the fields of the state of a closure ClosureState(FieldIdx), } #[derive_group(Serializers)] #[derive(Clone, Debug, JsonSchema)] pub enum ProjectionElem { Deref, Field(ProjectionElemFieldKind), Index(Local), ConstantIndex { offset: u64, min_length: u64, from_end: bool, }, Subslice { from: u64, to: u64, from_end: bool, }, Downcast(Option, VariantIdx), OpaqueCast, } // refactor #[cfg(feature = "rustc")] impl<'tcx, S: UnderOwnerState<'tcx> + HasMir<'tcx>> SInto for rustc_middle::mir::Place<'tcx> { #[tracing::instrument(level = "info", skip(s))] fn sinto(&self, s: &S) -> Place { let tcx = s.base().tcx; let local_decls = &s.mir().local_decls; let mut place_ty: mir::PlaceTy = mir::Place::from(self.local).ty(local_decls, tcx); let mut place = Place { ty: place_ty.ty.sinto(s), kind: PlaceKind::Local(self.local.sinto(s)), }; for elem in self.projection.as_slice() { use rustc_middle::mir::ProjectionElem::*; let projected_place_ty = place_ty.projection_ty(tcx, *elem); if matches!(elem, Downcast { .. }) { // We keep the same `Place`, the variant is tracked in the `PlaceTy` and we can // access it next loop iteration. } else { let elem_kind = match elem { Deref => ProjectionElem::Deref, Field(index, _) => { let field_pj = match place_ty.ty.kind() { ty::Adt(adt_def, _) => { let variant = place_ty.variant_index; assert!( ((adt_def.is_struct() || adt_def.is_union()) && variant.is_none()) || (adt_def.is_enum() && variant.is_some()) ); ProjectionElemFieldKind::Adt { typ: adt_def.did().sinto(s), variant: variant.map(|id| id.sinto(s)), index: index.sinto(s), } } ty::Tuple(_types) => ProjectionElemFieldKind::Tuple(index.sinto(s)), // We get there when we access one of the fields of the the state // captured by a closure. ty::Closure(..) => { ProjectionElemFieldKind::ClosureState(index.sinto(s)) } ty_kind => supposely_unreachable_fatal!( s, "ProjectionElemFieldBadType"; {index, ty_kind, &place_ty, &place} ), }; ProjectionElem::Field(field_pj) } Index(local) => ProjectionElem::Index(local.sinto(s)), ConstantIndex { offset, min_length, from_end, } => ProjectionElem::ConstantIndex { offset: *offset, min_length: *min_length, from_end: *from_end, }, Subslice { from, to, from_end } => ProjectionElem::Subslice { from: *from, to: *to, from_end: *from_end, }, OpaqueCast(..) => ProjectionElem::OpaqueCast, Downcast { .. } => unreachable!(), UnwrapUnsafeBinder { .. } => panic!("unsupported feature: unsafe binders"), }; place = Place { ty: projected_place_ty.ty.sinto(s), kind: PlaceKind::Projection { place: Box::new(place), kind: elem_kind, }, }; } place_ty = projected_place_ty; } place } } #[derive_group(Serializers)] #[derive(AdtInto, Clone, Debug, JsonSchema)] #[args(<'tcx, S: UnderOwnerState<'tcx> + HasMir<'tcx>>, from: rustc_middle::mir::AggregateKind<'tcx>, state: S as s)] pub enum AggregateKind { Array(Ty), Tuple, #[custom_arm(rustc_middle::mir::AggregateKind::Adt(def_id, vid, generics, annot, fid) => { let adt_kind = s.base().tcx.adt_def(def_id).adt_kind().sinto(s); let item = translate_item_ref(s, *def_id, generics); AggregateKind::Adt( item, vid.sinto(s), adt_kind, annot.sinto(s), fid.sinto(s), ) })] Adt( ItemRef, VariantIdx, AdtKind, Option, Option, ), #[custom_arm(rustc_middle::mir::AggregateKind::Closure(def_id, generics) => { let closure = generics.as_closure(); let args = ClosureArgs::sfrom(s, *def_id, closure); AggregateKind::Closure(args) })] Closure(ClosureArgs), #[custom_arm(FROM_TYPE::Coroutine(def_id, generics) => TO_TYPE::Coroutine(translate_item_ref(s, *def_id, generics)),)] Coroutine(ItemRef), #[custom_arm(FROM_TYPE::CoroutineClosure(def_id, generics) => TO_TYPE::CoroutineClosure(translate_item_ref(s, *def_id, generics)),)] CoroutineClosure(ItemRef), RawPtr(Ty, Mutability), } #[derive_group(Serializers)] #[derive(Clone, Debug, JsonSchema)] pub enum CastKind { PointerExposeProvenance, PointerWithExposedProvenance, PointerCoercion(PointerCoercion, CoercionSource), IntToInt, FloatToInt, FloatToFloat, IntToFloat, PtrToPtr, FnPtrToPtr, Transmute, Subtype, } #[cfg(feature = "rustc")] impl CastKind { fn sfrom<'tcx, S: UnderOwnerState<'tcx>>( s: &S, kind: mir::CastKind, src_ty: ty::Ty<'tcx>, tgt_ty: ty::Ty<'tcx>, ) -> CastKind { match kind { mir::CastKind::PointerExposeProvenance => CastKind::PointerExposeProvenance, mir::CastKind::PointerWithExposedProvenance => CastKind::PointerWithExposedProvenance, mir::CastKind::PointerCoercion(coercion, y) => { let coercion = PointerCoercion::sfrom(s, coercion, src_ty, tgt_ty); CastKind::PointerCoercion(coercion, y.sinto(s)) } mir::CastKind::IntToInt => CastKind::IntToInt, mir::CastKind::FloatToInt => CastKind::FloatToInt, mir::CastKind::FloatToFloat => CastKind::FloatToFloat, mir::CastKind::IntToFloat => CastKind::IntToFloat, mir::CastKind::PtrToPtr => CastKind::PtrToPtr, mir::CastKind::FnPtrToPtr => CastKind::FnPtrToPtr, mir::CastKind::Transmute => CastKind::Transmute, mir::CastKind::Subtype => CastKind::Subtype, } } } #[derive_group(Serializers)] #[derive(AdtInto, Clone, Debug, JsonSchema)] #[args(<'tcx, S>, from: rustc_middle::mir::CoercionSource, state: S as _s)] pub enum CoercionSource { AsCast, Implicit, } #[derive_group(Serializers)] #[derive(AdtInto, Clone, Debug, JsonSchema)] #[args(<'tcx, S: UnderOwnerState<'tcx> + HasMir<'tcx>>, from: rustc_middle::mir::NullOp<'tcx>, state: S as s)] pub enum NullOp { OffsetOf(Vec<(VariantIdx, FieldIdx)>), UbChecks, ContractChecks, } #[derive_group(Serializers)] #[derive(AdtInto, Clone, Debug, JsonSchema)] #[args(<'tcx, S: UnderOwnerState<'tcx> + HasMir<'tcx>>, from: rustc_middle::mir::Rvalue<'tcx>, state: S as s)] pub enum Rvalue { Use(Operand), Repeat(Operand, ConstantExpr), Ref(Region, BorrowKind, Place), ThreadLocalRef(DefId), RawPtr(RawPtrKind, Place), #[custom_arm( FROM_TYPE::Cast(kind, op, tgt_ty) => { let src_ty = op.ty(&*s.mir(), s.base().tcx); let kind = CastKind::sfrom(s, *kind, src_ty, *tgt_ty); TO_TYPE::Cast(kind, op.sinto(s), tgt_ty.sinto(s)) }, )] Cast(CastKind, Operand, Ty), BinaryOp(BinOp, (Operand, Operand)), NullaryOp(NullOp, Ty), UnaryOp(UnOp, Operand), Discriminant(Place), Aggregate(AggregateKind, IndexVec), ShallowInitBox(Operand, Ty), CopyForDeref(Place), WrapUnsafeBinder(Operand, Ty), } #[derive_group(Serializers)] #[derive(AdtInto, Clone, Debug, JsonSchema)] #[args(<'tcx, S: BaseState<'tcx>>, from: rustc_middle::mir::RawPtrKind, state: S as _s)] pub enum RawPtrKind { Mut, Const, FakeForPtrMetadata, } #[derive_group(Serializers)] #[derive(AdtInto, Clone, Debug, JsonSchema)] #[args(<'tcx, S: UnderOwnerState<'tcx> + HasMir<'tcx>>, from: rustc_middle::mir::BasicBlockData<'tcx>, state: S as s)] pub struct BasicBlockData { pub statements: Vec, pub terminator: Option, pub is_cleanup: bool, } make_idx_wrapper!(rustc_middle::mir, BasicBlock); make_idx_wrapper!(rustc_middle::mir, SourceScope); make_idx_wrapper!(rustc_middle::mir, Local); make_idx_wrapper!(rustc_middle::ty, UserTypeAnnotationIndex); make_idx_wrapper!(rustc_abi, FieldIdx); /// Reflects [`rustc_middle::mir::UnOp`] #[derive_group(Serializers)] #[derive(AdtInto, Copy, Clone, Debug, JsonSchema)] #[args(<'slt, S: UnderOwnerState<'slt>>, from: mir::UnOp, state: S as _s)] pub enum UnOp { Not, Neg, PtrMetadata, } /// Reflects [`rustc_middle::mir::BinOp`] #[derive_group(Serializers)] #[derive(AdtInto, Copy, Clone, Debug, JsonSchema)] #[args(<'slt, S: UnderOwnerState<'slt>>, from: mir::BinOp, state: S as _s)] pub enum BinOp { Add, AddUnchecked, AddWithOverflow, Sub, SubUnchecked, SubWithOverflow, Mul, MulUnchecked, MulWithOverflow, Div, Rem, BitXor, BitAnd, BitOr, Shl, ShlUnchecked, Shr, ShrUnchecked, Eq, Lt, Le, Ne, Ge, Gt, Cmp, Offset, } /// Reflects [`rustc_middle::mir::AssignOp`] #[derive_group(Serializers)] #[derive(AdtInto, Copy, Clone, Debug, JsonSchema)] #[args(<'tcx, S: BaseState<'tcx>>, from: mir::AssignOp, state: S as _s)] pub enum AssignOp { AddAssign, SubAssign, MulAssign, DivAssign, RemAssign, BitXorAssign, BitAndAssign, BitOrAssign, ShlAssign, ShrAssign, } /// Reflects [`rustc_middle::mir::BorrowKind`] #[derive(AdtInto)] #[args(, from: mir::BorrowKind, state: S as gstate)] #[derive_group(Serializers)] #[derive(Copy, Clone, Debug, JsonSchema, Hash, PartialEq, Eq, PartialOrd, Ord)] pub enum BorrowKind { Shared, Fake(FakeBorrowKind), Mut { kind: MutBorrowKind }, } /// Reflects [`rustc_middle::mir::MutBorrowKind`] #[derive(AdtInto)] #[args(, from: rustc_middle::mir::MutBorrowKind, state: S as _s)] #[derive_group(Serializers)] #[derive(Copy, Clone, Debug, JsonSchema, Hash, PartialEq, Eq, PartialOrd, Ord)] pub enum MutBorrowKind { Default, TwoPhaseBorrow, ClosureCapture, } /// Reflects [`rustc_middle::mir::FakeBorrowKind`] #[derive(AdtInto)] #[args(, from: rustc_middle::mir::FakeBorrowKind, state: S as _s)] #[derive_group(Serializers)] #[derive(Copy, Clone, Debug, JsonSchema, Hash, PartialEq, Eq, PartialOrd, Ord)] pub enum FakeBorrowKind { /// A shared (deep) borrow. Data must be immutable and is aliasable. Deep, /// The immediately borrowed place must be immutable, but projections from /// it don't need to be. This is used to prevent match guards from replacing /// the scrutinee. For example, a fake borrow of `a.b` doesn't /// conflict with a mutable borrow of `a.b.c`. Shallow, } sinto_todo!(rustc_ast::ast, InlineAsmTemplatePiece); sinto_todo!(rustc_ast::ast, InlineAsmOptions); sinto_todo!(rustc_middle::mir, InlineAsmOperand<'tcx>); sinto_todo!(rustc_middle::mir, AssertMessage<'tcx>); sinto_todo!(rustc_middle::mir, FakeReadCause); sinto_todo!(rustc_middle::mir, RetagKind); sinto_todo!(rustc_middle::mir, UserTypeProjection); sinto_todo!(rustc_middle::mir, UnwindTerminateReason); sinto_todo!(rustc_middle::mir::coverage, CoverageKind); ================================================ FILE: frontend/exporter/src/types/mod.rs ================================================ mod attributes; mod def_id; mod hir; mod mir; mod new; pub(crate) mod serialize_int; mod span; mod thir; mod ty; pub use attributes::*; pub use def_id::*; pub use hir::*; pub use mir::*; pub use new::*; pub use span::*; pub use thir::*; pub use ty::*; ================================================ FILE: frontend/exporter/src/types/new/full_def.rs ================================================ use crate::prelude::*; #[cfg(feature = "rustc")] use rustc_hir::def::DefKind as RDefKind; #[cfg(feature = "rustc")] use rustc_middle::ty; #[cfg(feature = "rustc")] use rustc_span::def_id::DefId as RDefId; #[cfg(feature = "rustc")] use std::sync::Arc; /// Hack: charon used to rely on the old `()` default everywhere. To avoid big merge conflicts with /// in-flight PRs we're changing the default here. Eventually this should be removed. type DefaultFullDefBody = MirBody; /// Gathers a lot of definition information about a [`rustc_hir::def_id::DefId`]. #[derive_group(Serializers)] #[derive(Clone, Debug, JsonSchema)] pub struct FullDef { /// A reference to the current item. If the item was provided with generic args, they are /// stored here; otherwise the args are the identity_args for this item. pub this: ItemRef, /// The span of the definition of this item (e.g. for a function this is is signature). pub span: Span, /// The span of the whole definition (including e.g. the function body). pub source_span: Option, /// The text of the whole definition. pub source_text: Option, /// Attributes on this definition, if applicable. pub attributes: Vec, /// Visibility of the definition, for definitions where this makes sense. pub visibility: Option, /// If this definition is a lang item, we store the identifier, e.g. `sized`. pub lang_item: Option, /// If this definition is a diagnostic item, we store the identifier, e.g. `box_new`. pub diagnostic_item: Option, pub kind: FullDefKind, } #[cfg(feature = "rustc")] /// Construct the `FullDefKind` for this item. If `args` is `Some`, the returned `FullDef` will be /// instantiated with the provided generics. fn translate_full_def<'tcx, S, Body>( s: &S, def_id: &DefId, args: Option>, ) -> FullDef where S: UnderOwnerState<'tcx>, Body: IsBody + TypeMappable, { let tcx = s.base().tcx; let rust_def_id = def_id.underlying_rust_def_id(); let source_span; let attributes; let visibility; let lang_item; let diagnostic_item; let kind; if let Some(item) = def_id.as_synthetic(s) { let adt_kind = match item { SyntheticItem::Array => AdtKind::Array, SyntheticItem::Slice => AdtKind::Slice, SyntheticItem::Tuple(..) => AdtKind::Tuple, }; let param_env = get_param_env(s, args); let destruct_impl = { let destruct_trait = tcx.lang_items().destruct_trait().unwrap(); let type_of_self = inst_binder(tcx, s.typing_env(), args, tcx.type_of(rust_def_id)); virtual_impl_for(s, ty::TraitRef::new(tcx, destruct_trait, [type_of_self])) }; kind = FullDefKind::Adt { param_env, adt_kind, variants: [].into_iter().collect(), flags: AdtFlags::AdtFlags { todo: String::new(), }, repr: ReprOptions { int_specified: false, typ: Ty::new(s, TyKind::Int(IntTy::Isize)), align: None, pack: None, flags: Default::default(), }, drop_glue: get_drop_glue_shim(s, args), destruct_impl, }; source_span = None; attributes = Default::default(); visibility = Default::default(); lang_item = Default::default(); diagnostic_item = Default::default(); } else if let Some(promoted_id) = def_id.promoted_id() { let parent_def = def_id .parent .as_ref() .unwrap() .full_def_maybe_instantiated::<_, Body>(s, args); let parent_param_env = parent_def.param_env().unwrap(); let param_env = ParamEnv { generics: TyGenerics { parent: def_id.parent.clone(), parent_count: parent_param_env.generics.count_total_params(), params: vec![], has_self: false, has_late_bound_regions: None, }, predicates: GenericPredicates { predicates: vec![] }, parent: Some(parent_def.this().clone()), }; let body = get_promoted_mir(tcx, rust_def_id, promoted_id.as_rust_promoted_id()); let body = substitute(tcx, s.typing_env(), args, body); source_span = Some(body.span); let ty: Ty = body.local_decls[rustc_middle::mir::Local::ZERO].ty.sinto(s); kind = FullDefKind::Const { param_env, ty, kind: ConstKind::PromotedConst, body: Body::from_mir(s, body), value: None, }; // None of these make sense for a promoted constant. attributes = Default::default(); visibility = Default::default(); lang_item = Default::default(); diagnostic_item = Default::default(); } else { kind = translate_full_def_kind(s, rust_def_id, args); let def_kind = get_def_kind(tcx, rust_def_id); source_span = rust_def_id.as_local().map(|ldid| tcx.source_span(ldid)); attributes = get_def_attrs(tcx, rust_def_id, def_kind).sinto(s); visibility = get_def_visibility(tcx, rust_def_id, def_kind); lang_item = s .base() .tcx .as_lang_item(rust_def_id) .map(|litem| litem.name()) .sinto(s); diagnostic_item = tcx.get_diagnostic_name(rust_def_id).sinto(s); } let source_text = source_span .filter(|source_span| source_span.ctxt().is_root()) .and_then(|source_span| tcx.sess.source_map().span_to_snippet(source_span).ok()); let this = if can_have_generics(tcx, rust_def_id) { let args_or_default = args.unwrap_or_else(|| { if matches!(def_id.kind, DefKind::Closure) { // For closures we use the args of their parent. Otherwise closure items get some // special generics used for inference that we don't care about. ty::GenericArgs::identity_for_item(tcx, tcx.typeck_root_def_id(rust_def_id)) } else { ty::GenericArgs::identity_for_item(tcx, rust_def_id) } }); let item = translate_item_ref(s, rust_def_id, args_or_default); // Tricky: hax's DefId has more info (could be a promoted const), we must be careful to use // the input DefId instead of the one derived from `rust_def_id`. item.with_def_id(s, def_id) } else { ItemRef::dummy_without_generics(s, def_id.clone()) }; FullDef { this, span: def_id.def_span(s), source_span: source_span.sinto(s), source_text, attributes, visibility, lang_item, diagnostic_item, kind, } } #[cfg(feature = "rustc")] impl DefId { /// Get the span of the definition of this item. This is the span used in diagnostics when /// referring to the item. pub fn def_span<'tcx>(&self, s: &impl BaseState<'tcx>) -> Span { use DefKind::*; let tcx = s.base().tcx; let def_id = self.underlying_rust_def_id(); if let ForeignMod = &self.kind { // These kind causes `def_span` to panic. rustc_span::DUMMY_SP } else if let Some(ldid) = def_id.as_local() { let hir_id = tcx.local_def_id_to_hir_id(ldid); if matches!(tcx.hir_node(hir_id), rustc_hir::Node::Synthetic) { // Synthetic items (those we create ourselves) make `def_span` panic. rustc_span::DUMMY_SP } else { // Unlike `tcx.def_span`, `tcx.hir_span_with_body` returns the full span of the item, not only of its header tcx.hir_span_with_body(hir_id) } } else { tcx.def_span(def_id) } .sinto(s) } /// Get the full definition of this item. pub fn full_def<'tcx, S, Body>(&self, s: &S) -> Arc> where Body: IsBody + TypeMappable, S: BaseState<'tcx>, { self.full_def_maybe_instantiated(s, None) } /// Get the full definition of this item, instantiated if `args` is `Some`. pub fn full_def_maybe_instantiated<'tcx, S, Body>( &self, s: &S, args: Option>, ) -> Arc> where Body: IsBody + TypeMappable, S: BaseState<'tcx>, { let rust_def_id = self.underlying_rust_def_id(); let s = &s.with_owner_id(rust_def_id); let cache_key = (self.promoted_id(), args); if let Some(def) = s.with_cache(|cache| cache.full_defs.entry(cache_key).or_default().get().cloned()) { return def; } let def = Arc::new(translate_full_def(s, self, args)); s.with_cache(|cache| { cache .full_defs .entry(cache_key) .or_default() .insert(def.clone()); }); def } } #[cfg(feature = "rustc")] impl ItemRef { /// Get the full definition of the item, instantiated with the provided generics. pub fn instantiated_full_def<'tcx, S, Body>(&self, s: &S) -> Arc> where Body: IsBody + TypeMappable, S: BaseState<'tcx>, { let args = self.rustc_args(s); self.def_id.full_def_maybe_instantiated(s, Some(args)) } } /// The combination of type generics and related predicates. #[derive_group(Serializers)] #[derive(Clone, Debug, JsonSchema)] pub struct ParamEnv { /// Generic parameters of the item. pub generics: TyGenerics, /// Required predicates for the item (see `traits::utils::required_predicates`). pub predicates: GenericPredicates, /// A reference to the parent of this item, with appropriate args. pub parent: Option, } /// The kind of a constant item. #[derive_group(Serializers)] #[derive(Clone, Debug, JsonSchema)] pub enum ConstKind { /// Top-level constant: `const CONST: usize = 42;` TopLevel, /// Anonymous constant, e.g. the `1 + 2` in `[u8; 1 + 2]` AnonConst, /// An inline constant, e.g. `const { 1 + 2 }` InlineConst, /// A promoted constant, e.g. the `1 + 2` in `&(1 + 2)` PromotedConst, } /// Imbues [`rustc_hir::def::DefKind`] with a lot of extra information. #[derive_group(Serializers)] #[derive(Clone, Debug, JsonSchema)] pub enum FullDefKind { // Types /// ADts (`Struct`, `Enum` and `Union` map to this variant). Adt { param_env: ParamEnv, adt_kind: AdtKind, variants: IndexVec, flags: AdtFlags, repr: ReprOptions, /// MIR body of the builtin `drop` impl. drop_glue: Option, /// Info required to construct a virtual `Drop` impl for this adt. destruct_impl: Box, }, /// Type alias: `type Foo = Bar;` TyAlias { param_env: ParamEnv, ty: Ty, }, /// Type from an `extern` block. ForeignTy, /// Associated type: `trait MyTrait { type Assoc; }` AssocTy { param_env: ParamEnv, implied_predicates: GenericPredicates, associated_item: AssocItem, value: Option, }, /// Opaque type, aka `impl Trait`. OpaqueTy, // Traits Trait { param_env: ParamEnv, implied_predicates: GenericPredicates, /// The special `Self: Trait` clause. self_predicate: TraitPredicate, /// Associated items, in definition order. items: Vec, /// `dyn Trait::Ty..>` for this trait. This is `Some` iff this /// trait is dyn-compatible. dyn_self: Option, /// Whether it's a `unsafe trait`, or just a `trait`. safety: Safety, }, /// Trait alias: `trait IntIterator = Iterator;` TraitAlias { param_env: ParamEnv, implied_predicates: GenericPredicates, /// The special `Self: Trait` clause. self_predicate: TraitPredicate, /// `dyn Trait::Ty..>` for this trait. This is `Some` iff this /// trait is dyn-compatible. dyn_self: Option, }, TraitImpl { param_env: ParamEnv, /// The trait that is implemented by this impl block. trait_pred: TraitPredicate, /// `dyn Trait::Ty..>` for the implemented trait. This is /// `Some` iff the trait is dyn-compatible. dyn_self: Option, /// The `ImplExpr`s required to satisfy the predicates on the trait declaration. E.g.: /// ```ignore /// trait Foo: Bar {} /// impl Foo for () {} // would supply an `ImplExpr` for `Self: Bar`. /// ``` implied_impl_exprs: Vec, /// Associated items, in the order of the trait declaration. Includes defaulted items. items: Vec, }, InherentImpl { param_env: ParamEnv, /// The type to which this block applies. ty: Ty, /// Associated items, in definition order. items: Vec, }, // Functions Fn { param_env: ParamEnv, inline: InlineAttr, is_const: bool, sig: PolyFnSig, body: Option, }, /// Associated function: `impl MyStruct { fn associated() {} }` or `trait Foo { fn associated() /// {} }` AssocFn { param_env: ParamEnv, associated_item: AssocItem, inline: InlineAttr, is_const: bool, /// The function signature when this method is used in a vtable. `None` if this method is not /// vtable safe. `Some(sig)` if it is vtable safe, where `sig` is the trait method declaration's /// signature with `Self` replaced by `dyn Trait` and associated types normalized. vtable_sig: Option, sig: PolyFnSig, body: Option, }, /// A closure, coroutine, or coroutine-closure. /// /// Note: the (early-bound) generics of a closure are the same as those of the item in which it /// is defined. Closure { args: ClosureArgs, is_const: bool, /// Info required to construct a virtual `FnOnce` impl for this closure. fn_once_impl: Box, /// Info required to construct a virtual `FnMut` impl for this closure. fn_mut_impl: Option>, /// Info required to construct a virtual `Fn` impl for this closure. fn_impl: Option>, /// For `FnMut`&`Fn` closures: the MIR for the `call_once` method; it simply calls /// `call_mut`. once_shim: Option, /// MIR body of the builtin `drop` impl. drop_glue: Option, /// Info required to construct a virtual `Drop` impl for this closure. destruct_impl: Box, }, // Constants Const { param_env: ParamEnv, ty: Ty, kind: ConstKind, body: Option, value: Option, }, /// Associated constant: `trait MyTrait { const ASSOC: usize; }` AssocConst { param_env: ParamEnv, associated_item: AssocItem, ty: Ty, body: Option, value: Option, }, Static { param_env: ParamEnv, /// Whether it's a `unsafe static`, `safe static` (inside extern only) or just a `static`. safety: Safety, /// Whether it's a `static mut` or just a `static`. mutability: Mutability, /// Whether it's an anonymous static generated for nested allocations. nested: bool, ty: Ty, body: Option, }, // Crates and modules ExternCrate, Use(Option<(UsePath, UseKind)>), Mod { items: Vec<(Option, DefId)>, }, /// An `extern` block. ForeignMod { items: Vec, }, // Type-level parameters /// Type parameter: the `T` in `struct Vec { ... }` TyParam, /// Constant generic parameter: `struct Foo { ... }` ConstParam, /// Lifetime parameter: the `'a` in `struct Foo<'a> { ... }` LifetimeParam, // ADT parts /// Refers to the variant definition, [`DefKind::Ctor`] refers to its constructor if it exists. Variant, /// The constructor function of a tuple/unit struct or tuple/unit enum variant. Ctor { adt_def_id: DefId, ctor_of: CtorOf, variant_id: VariantIdx, fields: IndexVec, output_ty: Ty, }, /// A field in a struct, enum or union. e.g. /// - `bar` in `struct Foo { bar: u8 }` /// - `Foo::Bar::0` in `enum Foo { Bar(u8) }` Field, // Others /// Macros Macro(MacroKinds), /// A use of `global_asm!`. GlobalAsm, /// A synthetic coroutine body created by the lowering of a coroutine-closure, such as an async /// closure. SyntheticCoroutineBody, } #[cfg(feature = "rustc")] fn gen_vtable_sig<'tcx>( // The state that owns the method DefId s: &impl UnderOwnerState<'tcx>, args: Option>, ) -> Option { let method_def_id = s.owner_id(); let tcx = s.base().tcx; let assoc_item = tcx.associated_item(method_def_id); let container_id = assoc_item.container_id(tcx); // Get the original trait method id. let method_decl_id = match assoc_item.container { ty::AssocContainer::TraitImpl(Ok(id)) => id, ty::AssocContainer::Trait => method_def_id, _ => return None, }; let trait_id = tcx.trait_of_assoc(method_decl_id)?; let decl_assoc_item = tcx.associated_item(method_decl_id); if !rustc_trait_selection::traits::is_vtable_safe_method(tcx, trait_id, decl_assoc_item) { return None; } // Move into the context of the container (trait decl or impl) instead of the method. let s = &s.with_owner_id(container_id); let args = { let container_generics = tcx.generics_of(container_id); args.map(|args| args.truncate_to(tcx, container_generics)) }; let dyn_self = match assoc_item.container { ty::AssocContainer::Trait => get_trait_decl_dyn_self_ty(s, args), ty::AssocContainer::TraitImpl(..) => { // For impl methods, compute concrete dyn_self from the impl's trait reference let impl_def_id = assoc_item.container_id(tcx); let impl_trait_ref = tcx.impl_trait_ref(impl_def_id); // Get the concrete trait reference by rebasing the impl's trait ref args onto `container_args` let concrete_trait_ref = inst_binder(tcx, s.typing_env(), args, impl_trait_ref); dyn_self_ty(tcx, s.typing_env(), concrete_trait_ref) } ty::AssocContainer::InherentImpl => { unreachable!() } }?; // dyn_self is of form `dyn Trait`, we extract the trait args let ty::Dynamic(preds, _) = dyn_self.kind() else { panic!("Unexpected dyn_self: {:?}", dyn_self); }; // Safe to use `skip_binder` because we know the predicate we built in dyn_self_ty has no bound // vars. let ty::ExistentialPredicate::Trait(trait_ref) = preds[0].skip_binder() else { panic!("No principal trait found in dyn_self: {:?}", dyn_self); }; // Build a full list of args for the trait: dyn_self + trait args // Note: trait_ref.args doesn't include Self (it's existential), so we prepend dyn_self let mut full_args = vec![ty::GenericArg::from(dyn_self)]; full_args.extend(trait_ref.args.iter()); let trait_args = tcx.mk_args(&full_args); // Instantiate and normalize the signature. let method_decl_sig = tcx.fn_sig(method_decl_id).instantiate(tcx, trait_args); let normalized_sig = normalize(tcx, s.typing_env(), method_decl_sig); Some(normalized_sig.sinto(s)) } #[cfg(feature = "rustc")] /// Construct the `FullDefKind` for this item. /// /// If `args` is `Some`, instantiate the whole definition with these generics; otherwise keep the /// polymorphic definition. // Note: this is tricky to get right, we have to make sure to isntantiate every single field that // may contain a type/const/trait reference. fn translate_full_def_kind<'tcx, S, Body>( s: &S, def_id: RDefId, args: Option>, ) -> FullDefKind where S: BaseState<'tcx>, Body: IsBody + TypeMappable, { let s = &s.with_owner_id(def_id); let tcx = s.base().tcx; let type_of_self = || inst_binder(tcx, s.typing_env(), args, tcx.type_of(def_id)); let args_or_default = || args.unwrap_or_else(|| ty::GenericArgs::identity_for_item(tcx, def_id)); match get_def_kind(tcx, def_id) { RDefKind::Struct { .. } | RDefKind::Union { .. } | RDefKind::Enum { .. } => { let def = tcx.adt_def(def_id); let variants = def .variants() .iter_enumerated() .map(|(variant_idx, variant)| { let discr = if def.is_enum() { def.discriminant_for_variant(tcx, variant_idx) } else { // Structs and unions have a single variant. assert_eq!(variant_idx.index(), 0); ty::util::Discr { val: 0, ty: tcx.types.isize, } }; VariantDef::sfrom(s, variant, discr, args) }) .collect(); let destruct_trait = tcx.lang_items().destruct_trait().unwrap(); FullDefKind::Adt { param_env: get_param_env(s, args), adt_kind: def.adt_kind().sinto(s), variants, flags: def.flags().sinto(s), repr: def.repr().sinto(s), drop_glue: get_drop_glue_shim(s, args), destruct_impl: virtual_impl_for( s, ty::TraitRef::new(tcx, destruct_trait, [type_of_self()]), ), } } RDefKind::TyAlias { .. } => { let s = &s.with_base(Base { // Rust doesn't enforce bounds on generic parameters in type aliases. Thus, when // translating type aliases, we need to disable trait resolution errors. For more // details, please see https://github.com/hacspec/hax/issues/707. silence_resolution_errors: true, ..s.base() }); FullDefKind::TyAlias { param_env: get_param_env(s, args), ty: type_of_self().sinto(s), } } RDefKind::ForeignTy => FullDefKind::ForeignTy, RDefKind::AssocTy { .. } => FullDefKind::AssocTy { param_env: get_param_env(s, args), implied_predicates: get_implied_predicates(s, args), associated_item: AssocItem::sfrom_instantiated(s, &tcx.associated_item(def_id), args), value: if tcx.defaultness(def_id).has_value() { Some(type_of_self().sinto(s)) } else { None }, }, RDefKind::OpaqueTy => FullDefKind::OpaqueTy, RDefKind::Trait { .. } => FullDefKind::Trait { param_env: get_param_env(s, args), implied_predicates: get_implied_predicates(s, args), self_predicate: get_self_predicate(s, args), dyn_self: get_trait_decl_dyn_self_ty(s, args).sinto(s), items: tcx .associated_items(def_id) .in_definition_order() .map(|assoc| { let item_args = args.map(|args| { let item_identity_args = ty::GenericArgs::identity_for_item(tcx, assoc.def_id); let item_args = item_identity_args.rebase_onto(tcx, def_id, args); tcx.mk_args(item_args) }); AssocItem::sfrom_instantiated(s, assoc, item_args) }) .collect::>(), safety: tcx.trait_def(def_id).safety.sinto(s), }, RDefKind::TraitAlias { .. } => FullDefKind::TraitAlias { param_env: get_param_env(s, args), implied_predicates: get_implied_predicates(s, args), self_predicate: get_self_predicate(s, args), dyn_self: get_trait_decl_dyn_self_ty(s, args).sinto(s), }, RDefKind::Impl { of_trait, .. } => { use std::collections::HashMap; let param_env = get_param_env(s, args); if !of_trait { let ty = tcx.type_of(def_id); let ty = inst_binder(tcx, s.typing_env(), args, ty); let items = tcx .associated_items(def_id) .in_definition_order() .map(|assoc| { let item_args = args.map(|args| { let item_identity_args = ty::GenericArgs::identity_for_item(tcx, assoc.def_id); let item_args = item_identity_args.rebase_onto(tcx, def_id, args); tcx.mk_args(item_args) }); AssocItem::sfrom_instantiated(s, assoc, item_args) }) .collect::>(); FullDefKind::InherentImpl { param_env, ty: ty.sinto(s), items, } } else { let trait_ref = tcx.impl_trait_ref(def_id); let trait_ref = inst_binder(tcx, s.typing_env(), args, trait_ref); let polarity = tcx.impl_polarity(def_id); let trait_pred = TraitPredicate { trait_ref: trait_ref.sinto(s), is_positive: matches!(polarity, ty::ImplPolarity::Positive), }; let dyn_self = dyn_self_ty(tcx, s.typing_env(), trait_ref).sinto(s); // Impl exprs required by the trait. let required_impl_exprs = solve_item_implied_traits(s, trait_ref.def_id, trait_ref.args); let mut item_map: HashMap = tcx .associated_items(def_id) .in_definition_order() .map(|assoc| (assoc.trait_item_def_id().unwrap(), assoc)) .collect(); let items = tcx .associated_items(trait_ref.def_id) .in_definition_order() .map(|decl_assoc| { let decl_def_id = decl_assoc.def_id; // Impl exprs required by the item. let required_impl_exprs; let value = match item_map.remove(&decl_def_id) { Some(impl_assoc) => { required_impl_exprs = { let item_args = ty::GenericArgs::identity_for_item(tcx, impl_assoc.def_id); // Subtlety: we have to add the GAT arguments (if any) to the trait ref arguments. let args = item_args.rebase_onto(tcx, def_id, trait_ref.args); let state_with_id = s.with_owner_id(impl_assoc.def_id); solve_item_implied_traits(&state_with_id, decl_def_id, args) }; ImplAssocItemValue::Provided { def_id: impl_assoc.def_id.sinto(s), is_override: decl_assoc.defaultness(tcx).has_value(), } } None => { required_impl_exprs = if tcx.generics_of(decl_def_id).is_own_empty() { // Non-GAT case. let item_args = ty::GenericArgs::identity_for_item(tcx, decl_def_id); let args = item_args.rebase_onto(tcx, def_id, trait_ref.args); // TODO: is it the right `def_id`? let state_with_id = s.with_owner_id(def_id); solve_item_implied_traits(&state_with_id, decl_def_id, args) } else { // FIXME: For GATs, we need a param_env that has the arguments of // the impl plus those of the associated type, but there's no // def_id with that param_env. vec![] }; match decl_assoc.kind { ty::AssocKind::Type { .. } => { let ty = tcx .type_of(decl_def_id) .instantiate(tcx, trait_ref.args) .sinto(s); ImplAssocItemValue::DefaultedTy { ty } } ty::AssocKind::Fn { .. } => { let sig = if tcx.generics_of(decl_def_id).is_own_empty() { // The method doesn't have generics of its own, so // we can instantiate it with just the trait // generics. let sig = tcx .fn_sig(decl_def_id) .instantiate(tcx, trait_ref.args) .sinto(s); Some(sig) } else { None }; ImplAssocItemValue::DefaultedFn { sig } } ty::AssocKind::Const { .. } => { ImplAssocItemValue::DefaultedConst {} } } } }; ImplAssocItem { name: decl_assoc.opt_name().sinto(s), value, required_impl_exprs, decl_def_id: decl_def_id.sinto(s), } }) .collect(); assert!(item_map.is_empty()); FullDefKind::TraitImpl { param_env, trait_pred, dyn_self, implied_impl_exprs: required_impl_exprs, items, } } } RDefKind::Fn { .. } => FullDefKind::Fn { param_env: get_param_env(s, args), inline: tcx.codegen_fn_attrs(def_id).inline.sinto(s), is_const: tcx.constness(def_id) == rustc_hir::Constness::Const, sig: inst_binder(tcx, s.typing_env(), args, tcx.fn_sig(def_id)).sinto(s), body: get_body(s, args), }, RDefKind::AssocFn { .. } => { let item = tcx.associated_item(def_id); FullDefKind::AssocFn { param_env: get_param_env(s, args), associated_item: AssocItem::sfrom_instantiated(s, &item, args), inline: tcx.codegen_fn_attrs(def_id).inline.sinto(s), is_const: tcx.constness(def_id) == rustc_hir::Constness::Const, vtable_sig: gen_vtable_sig(s, args), sig: get_method_sig(tcx, s.typing_env(), def_id, args).sinto(s), body: get_body(s, args), } } RDefKind::Closure { .. } => { use ty::ClosureKind::{Fn, FnMut}; let closure_ty = type_of_self(); let ty::TyKind::Closure(_, closure_args) = closure_ty.kind() else { unreachable!() }; let closure = closure_args.as_closure(); // We lose lifetime information here. Eventually would be nice not to. let input_ty = erase_free_regions(tcx, closure.sig().input(0).skip_binder()); let trait_args = [closure_ty, input_ty]; let fn_once_trait = tcx.lang_items().fn_once_trait().unwrap(); let fn_mut_trait = tcx.lang_items().fn_mut_trait().unwrap(); let fn_trait = tcx.lang_items().fn_trait().unwrap(); let destruct_trait = tcx.lang_items().destruct_trait().unwrap(); FullDefKind::Closure { is_const: tcx.constness(def_id) == rustc_hir::Constness::Const, args: ClosureArgs::sfrom(s, def_id, closure), once_shim: get_closure_once_shim(s, closure_ty), drop_glue: get_drop_glue_shim(s, args), destruct_impl: virtual_impl_for( s, ty::TraitRef::new(tcx, destruct_trait, [type_of_self()]), ), fn_once_impl: virtual_impl_for( s, ty::TraitRef::new(tcx, fn_once_trait, trait_args), ), fn_mut_impl: matches!(closure.kind(), FnMut | Fn) .then(|| virtual_impl_for(s, ty::TraitRef::new(tcx, fn_mut_trait, trait_args))), fn_impl: matches!(closure.kind(), Fn) .then(|| virtual_impl_for(s, ty::TraitRef::new(tcx, fn_trait, trait_args))), } } kind @ (RDefKind::Const { .. } | RDefKind::AnonConst { .. } | RDefKind::InlineConst { .. }) => { let kind = match kind { RDefKind::Const { .. } => ConstKind::TopLevel, RDefKind::AnonConst { .. } => ConstKind::AnonConst, RDefKind::InlineConst { .. } => ConstKind::InlineConst, _ => unreachable!(), }; FullDefKind::Const { param_env: get_param_env(s, args), ty: type_of_self().sinto(s), kind, body: get_body(s, args), value: const_value(s, def_id, args_or_default()), } } RDefKind::AssocConst { .. } => FullDefKind::AssocConst { param_env: get_param_env(s, args), associated_item: AssocItem::sfrom_instantiated(s, &tcx.associated_item(def_id), args), ty: type_of_self().sinto(s), body: get_body(s, args), value: const_value(s, def_id, args_or_default()), }, RDefKind::Static { safety, mutability, nested, .. } => FullDefKind::Static { param_env: get_param_env(s, args), safety: safety.sinto(s), mutability: mutability.sinto(s), nested: nested.sinto(s), ty: type_of_self().sinto(s), body: get_body(s, args), }, RDefKind::ExternCrate => FullDefKind::ExternCrate, RDefKind::Use => FullDefKind::Use( if let Some(ldid) = def_id.as_local() && let rustc_hir::Node::Item(item) = tcx.hir_node_by_def_id(ldid) && let rustc_hir::ItemKind::Use(use_path, use_kind) = item.kind { Some((use_path.sinto(s), use_kind.sinto(s))) } else { None }, ), RDefKind::Mod { .. } => FullDefKind::Mod { items: get_mod_children(tcx, def_id).sinto(s), }, RDefKind::ForeignMod { .. } => FullDefKind::ForeignMod { items: get_foreign_mod_children(tcx, def_id).sinto(s), }, RDefKind::TyParam => FullDefKind::TyParam, RDefKind::ConstParam => FullDefKind::ConstParam, RDefKind::LifetimeParam => FullDefKind::LifetimeParam, RDefKind::Variant => FullDefKind::Variant, RDefKind::Ctor(ctor_of, _) => { let args = args_or_default(); let ctor_of = ctor_of.sinto(s); // The def_id of the adt this ctor belongs to. let adt_def_id = match ctor_of { CtorOf::Struct => tcx.parent(def_id), CtorOf::Variant => tcx.parent(tcx.parent(def_id)), }; let adt_def = tcx.adt_def(adt_def_id); let variant_id = adt_def.variant_index_with_ctor_id(def_id); let fields = adt_def .variant(variant_id) .fields .iter() .map(|f| FieldDef::sfrom(s, f, args)) .collect(); let output_ty = ty::Ty::new_adt(tcx, adt_def, args).sinto(s); FullDefKind::Ctor { adt_def_id: adt_def_id.sinto(s), ctor_of, variant_id: variant_id.sinto(s), fields, output_ty, } } RDefKind::Field => FullDefKind::Field, RDefKind::Macro(kinds) => FullDefKind::Macro(kinds.sinto(s)), RDefKind::GlobalAsm => FullDefKind::GlobalAsm, RDefKind::SyntheticCoroutineBody => FullDefKind::SyntheticCoroutineBody, } } /// An associated item in a trait impl. This can be an item provided by the trait impl, or an item /// that reuses the trait decl default value. #[derive_group(Serializers)] #[derive(Clone, Debug, JsonSchema)] pub struct ImplAssocItem { /// This is `None` for RPTITs. pub name: Option, /// The definition of the item from the trait declaration. This is an `AssocTy`, `AssocFn` or /// `AssocConst`. pub decl_def_id: DefId, /// The `ImplExpr`s required to satisfy the predicates on the associated type. E.g.: /// ```ignore /// trait Foo { /// type Type: Clone, /// } /// impl Foo for () { /// type Type: Arc; // would supply an `ImplExpr` for `Arc: Clone`. /// } /// ``` /// Empty if this item is an associated const or fn. pub required_impl_exprs: Vec, /// The value of the implemented item. pub value: ImplAssocItemValue, } #[derive_group(Serializers)] #[derive(Clone, Debug, JsonSchema)] pub enum ImplAssocItemValue { /// The item is provided by the trait impl. Provided { /// The definition of the item in the trait impl. This is an `AssocTy`, `AssocFn` or /// `AssocConst`. def_id: DefId, /// Whether the trait had a default value for this item (which is therefore overriden). is_override: bool, }, /// This is an associated type that reuses the trait declaration default. DefaultedTy { /// The default type, with generics properly instantiated. Note that this can be a GAT; /// relevant generics and predicates can be found in `decl_def`. ty: Ty, }, /// This is a non-overriden default method. /// FIXME: provide properly instantiated generics. DefaultedFn { /// The signature of the method, if we could translate it. `None` if the method as generics /// of its own, because then we'd need to resolve traits but the method doesn't have it's /// own `DefId`. sig: Option, }, /// This is an associated const that reuses the trait declaration default. The default const /// value can be found in `decl_def`. DefaultedConst, } /// Partial data for a trait impl, used for fake trait impls that we generate ourselves such as /// `FnOnce` and `Drop` impls. #[derive_group(Serializers)] #[derive(Clone, Debug, JsonSchema)] pub struct VirtualTraitImpl { /// The trait that is implemented by this impl block. pub trait_pred: TraitPredicate, /// The `ImplExpr`s required to satisfy the predicates on the trait declaration. pub implied_impl_exprs: Vec, /// The associated types and their predicates, in definition order. pub types: Vec<(Ty, Vec)>, } impl FullDef { pub fn def_id(&self) -> &DefId { &self.this.def_id } /// Reference to the item itself. pub fn this(&self) -> &ItemRef { &self.this } pub fn kind(&self) -> &FullDefKind { &self.kind } /// Returns the generics and predicates for definitions that have those. pub fn param_env(&self) -> Option<&ParamEnv> { use FullDefKind::*; match self.kind() { Adt { param_env, .. } | Trait { param_env, .. } | TraitAlias { param_env, .. } | TyAlias { param_env, .. } | AssocTy { param_env, .. } | Fn { param_env, .. } | AssocFn { param_env, .. } | Const { param_env, .. } | AssocConst { param_env, .. } | Static { param_env, .. } | TraitImpl { param_env, .. } | InherentImpl { param_env, .. } => Some(param_env), _ => None, } } /// Return the parent of this item if the item inherits the typing context from its parent. #[cfg(feature = "rustc")] pub fn typing_parent<'tcx>(&self, s: &impl BaseState<'tcx>) -> Option { use FullDefKind::*; match self.kind() { AssocTy { .. } | AssocFn { .. } | AssocConst { .. } | Const { kind: ConstKind::AnonConst | ConstKind::InlineConst | ConstKind::PromotedConst, .. } => self.param_env().unwrap().parent.clone(), Closure { .. } | Ctor { .. } | Variant { .. } => { let parent = self.def_id().parent.as_ref().unwrap(); // The parent has the same generics as this item. Some(self.this().with_def_id(s, parent)) } _ => None, } } /// Whether the item has any generics at all (including parent generics). pub fn has_any_generics(&self) -> bool { match self.param_env() { Some(p) => p.generics.parent_count != 0 || !p.generics.params.is_empty(), None => false, } } /// Whether the item has any generics of its own (ignoring parent generics). pub fn has_own_generics(&self) -> bool { match self.param_env() { Some(p) => !p.generics.params.is_empty(), None => false, } } /// Whether the item has any generics or predicates of its own (ignoring parent /// generics/predicates). pub fn has_own_generics_or_predicates(&self) -> bool { match self.param_env() { Some(p) => { let has_predicates = if let FullDefKind::AssocFn { .. } | FullDefKind::AssocConst { .. } = self.kind() { // Assoc fns and consts have a special `Self: Trait` predicate inserted, which // we don't want to consider as an "own predicate". p.predicates.predicates.len() > 1 } else { !p.predicates.predicates.is_empty() }; !p.generics.params.is_empty() || has_predicates } None => false, } } /// Lists the children of this item that can be named, in the way of normal rust paths. For /// types, this includes inherent items. #[cfg(feature = "rustc")] pub fn nameable_children<'tcx>(&self, s: &impl BaseState<'tcx>) -> Vec<(Symbol, DefId)> { let mut children = match self.kind() { FullDefKind::Mod { items } => items .iter() .filter_map(|(opt_ident, def_id)| { Some((opt_ident.as_ref()?.0.clone(), def_id.clone())) }) .collect(), FullDefKind::Adt { adt_kind: AdtKind::Enum, variants, .. } => variants .iter() .map(|variant| (variant.name.clone(), variant.def_id.clone())) .collect(), FullDefKind::InherentImpl { items, .. } | FullDefKind::Trait { items, .. } => items .iter() .filter_map(|item| Some((item.name.clone()?, item.def_id.clone()))) .collect(), FullDefKind::TraitImpl { items, .. } => items .iter() .filter_map(|item| Some((item.name.clone()?, item.def_id().clone()))) .collect(), _ => vec![], }; // Add inherent impl items if any. if let Some(rust_def_id) = self.def_id().as_rust_def_id() { let tcx = s.base().tcx; for impl_def_id in tcx.inherent_impls(rust_def_id) { children.extend( tcx.associated_items(impl_def_id) .in_definition_order() .filter_map(|assoc| Some((assoc.opt_name()?, assoc.def_id).sinto(s))), ); } } children } /// Gives the list of DefIds for associated items when self is a container pub fn associated_def_ids(&self) -> Vec { match self.kind() { FullDefKind::InherentImpl { items, .. } | FullDefKind::Trait { items, .. } => { items.iter().map(|item| item.def_id.clone()).collect() } FullDefKind::TraitImpl { items, .. } => { items.iter().map(|item| item.def_id().clone()).collect() } _ => vec![], } } } impl ImplAssocItem { /// The relevant definition: the provided implementation if any, otherwise the default /// declaration from the trait declaration. pub fn def_id(&self) -> &DefId { match &self.value { ImplAssocItemValue::Provided { def_id, .. } => def_id, _ => &self.decl_def_id, } } } #[cfg(feature = "rustc")] fn get_self_predicate<'tcx, S: UnderOwnerState<'tcx>>( s: &S, args: Option>, ) -> TraitPredicate { use ty::Upcast; let tcx = s.base().tcx; let typing_env = s.typing_env(); let pred: ty::TraitPredicate = crate::traits::self_predicate(tcx, s.owner_id()) .no_bound_vars() .unwrap() .upcast(tcx); let pred = substitute(tcx, typing_env, args, pred); pred.sinto(s) } /// Generates a `dyn Trait::Ty..>` type for this trait. #[cfg(feature = "rustc")] fn get_trait_decl_dyn_self_ty<'tcx, S: UnderOwnerState<'tcx>>( s: &S, args: Option>, ) -> Option> { let tcx = s.base().tcx; let typing_env = s.typing_env(); let def_id = s.owner_id(); let self_tref = ty::TraitRef::new_from_args( tcx, def_id, args.unwrap_or_else(|| ty::GenericArgs::identity_for_item(tcx, def_id)), ); rustc_utils::dyn_self_ty(tcx, typing_env, self_tref).map(|ty| { let ty = if args.is_some() { erase_free_regions(tcx, ty) } else { ty }; ty }) } /// Do the trait resolution necessary to create a new impl for the given trait_ref. Used when we /// generate fake trait impls e.g. for `FnOnce` and `Drop`. #[cfg(feature = "rustc")] fn virtual_impl_for<'tcx, S>(s: &S, trait_ref: ty::TraitRef<'tcx>) -> Box where S: UnderOwnerState<'tcx>, { let tcx = s.base().tcx; let trait_pred = TraitPredicate { trait_ref: trait_ref.sinto(s), is_positive: true, }; // Impl exprs required by the trait. let required_impl_exprs = solve_item_implied_traits(s, trait_ref.def_id, trait_ref.args); let types = tcx .associated_items(trait_ref.def_id) .in_definition_order() .filter(|assoc| matches!(assoc.kind, ty::AssocKind::Type { .. })) .map(|assoc| { // This assumes non-GAT because this is for builtin-trait (that don't // have GATs). let ty = ty::Ty::new_projection(tcx, assoc.def_id, trait_ref.args).sinto(s); // Impl exprs required by the type. let required_impl_exprs = solve_item_implied_traits(s, assoc.def_id, trait_ref.args); (ty, required_impl_exprs) }) .collect(); Box::new(VirtualTraitImpl { trait_pred, implied_impl_exprs: required_impl_exprs, types, }) } #[cfg(feature = "rustc")] fn get_body<'tcx, S, Body>(s: &S, args: Option>) -> Option where S: UnderOwnerState<'tcx>, Body: IsBody + TypeMappable, { let def_id = s.owner_id(); Body::body(s, def_id, args) } #[cfg(feature = "rustc")] fn get_closure_once_shim<'tcx, S, Body>(s: &S, closure_ty: ty::Ty<'tcx>) -> Option where S: UnderOwnerState<'tcx>, Body: IsBody + TypeMappable, { let tcx = s.base().tcx; let mir = crate::closure_once_shim(tcx, closure_ty)?; let body = Body::from_mir(s, mir)?; Some(body) } #[cfg(feature = "rustc")] fn get_drop_glue_shim<'tcx, S, Body>(s: &S, args: Option>) -> Option where S: UnderOwnerState<'tcx>, Body: IsBody + TypeMappable, { let tcx = s.base().tcx; let mir = crate::drop_glue_shim(tcx, s.owner_id(), args)?; let body = Body::from_mir(s, mir)?; Some(body) } #[cfg(feature = "rustc")] fn get_param_env<'tcx, S: UnderOwnerState<'tcx>>( s: &S, args: Option>, ) -> ParamEnv { let tcx = s.base().tcx; let def_id = s.owner_id(); let generics = tcx.generics_of(def_id).sinto(s); let parent = generics.parent.as_ref().map(|parent| { let parent = parent.underlying_rust_def_id(); let args = args.unwrap_or_else(|| ty::GenericArgs::identity_for_item(tcx, def_id)); let parent_args = args.truncate_to(tcx, tcx.generics_of(parent)); translate_item_ref(s, parent, parent_args) }); match args { None => ParamEnv { generics, predicates: required_predicates(tcx, def_id, s.base().options.bounds_options).sinto(s), parent, }, // An instantiated item is monomorphic. Some(_) => ParamEnv { generics: TyGenerics { parent_count: 0, params: Default::default(), ..generics }, predicates: GenericPredicates::default(), parent, }, } } #[cfg(feature = "rustc")] fn get_implied_predicates<'tcx, S: UnderOwnerState<'tcx>>( s: &S, args: Option>, ) -> GenericPredicates { use std::borrow::Cow; let tcx = s.base().tcx; let def_id = s.owner_id(); let typing_env = s.typing_env(); let mut implied_predicates = implied_predicates(tcx, def_id, s.base().options.bounds_options); if args.is_some() { implied_predicates = Cow::Owned( implied_predicates .iter() .copied() .map(|(clause, span)| { let clause = substitute(tcx, typing_env, args, clause); (clause, span) }) .collect(), ); } implied_predicates.sinto(s) } #[cfg(feature = "rustc")] fn const_value<'tcx, S: UnderOwnerState<'tcx>>( s: &S, def_id: RDefId, args: ty::GenericArgsRef<'tcx>, ) -> Option { let uneval = ty::UnevaluatedConst::new(def_id, args); let c = eval_ty_constant(s, uneval)?; match c.kind() { ty::ConstKind::Error(..) => None, _ => Some(c.sinto(s)), } } ================================================ FILE: frontend/exporter/src/types/new/impl_infos.rs ================================================ use crate::prelude::*; /// Meta-informations about an `impl TRAIT for /// TYPE where PREDICATES {}` #[derive_group(Serializers)] #[derive(Clone, Debug, JsonSchema)] pub struct ImplInfos { pub generics: TyGenerics, pub clauses: Vec<(Clause, Span)>, pub typ: Ty, pub trait_ref: Option, } ================================================ FILE: frontend/exporter/src/types/new/item_attributes.rs ================================================ use crate::prelude::*; #[derive_group(Serializers)] #[derive(Clone, Debug, JsonSchema)] pub struct ItemAttributes { pub attributes: Vec, pub parent_attributes: Vec, } impl Default for ItemAttributes { fn default() -> Self { Self::new() } } impl ItemAttributes { pub fn new() -> Self { ItemAttributes { attributes: vec![], parent_attributes: vec![], } } } #[cfg(feature = "rustc")] lazy_static::lazy_static! { pub static ref CORE_EXTRACTION_MODE: bool = std::env::var_os("HAX_CORE_EXTRACTION_MODE") == Some("on".into()); } #[cfg(feature = "rustc")] impl ItemAttributes { pub fn from_owner_id<'tcx, S: BaseState<'tcx>>( s: &S, oid: rustc_hir::hir_id::OwnerId, ) -> ItemAttributes { if *CORE_EXTRACTION_MODE { return ItemAttributes::new(); } use rustc_hir::hir_id::HirId; let tcx = s.base().tcx; let attrs_of = |id| tcx.hir_attrs(HirId::from(id)).sinto(s); ItemAttributes { attributes: attrs_of(oid), parent_attributes: tcx .hir_parent_owner_iter(HirId::from(oid)) .map(|(oid, _)| oid) .flat_map(attrs_of) .collect(), } } pub fn from_def_id<'tcx, S: BaseState<'tcx>>( s: &S, did: rustc_span::def_id::DefId, ) -> ItemAttributes { if let Some(def_id) = did.as_local() { Self::from_owner_id(s, rustc_hir::hir_id::OwnerId { def_id }) } else { ItemAttributes::new() } } } ================================================ FILE: frontend/exporter/src/types/new/mod.rs ================================================ //! This module contains type definitions that have no equivalent in //! Rustc. mod full_def; mod impl_infos; mod item_attributes; mod predicate_id; mod synthetic_items; mod variant_infos; pub use full_def::*; pub use impl_infos::*; pub use item_attributes::*; pub use predicate_id::*; pub use synthetic_items::*; pub use variant_infos::*; ================================================ FILE: frontend/exporter/src/types/new/predicate_id.rs ================================================ use crate::prelude::*; #[derive_group(Serializers)] #[derive(Copy, Clone, Debug, JsonSchema, Hash, PartialEq, Eq, PartialOrd, Ord)] #[serde(transparent)] /// A `PredicateId` is a unique identifier for a clause or a /// predicate. It is computed by hashing predicates and clause in a /// uniform and deterministic way. pub struct PredicateId(pub u64); #[cfg(feature = "rustc")] mod rustc { use super::*; impl<'tcx> Binder { #[tracing::instrument(level = "trace")] pub fn predicate_id(&self) -> PredicateId { // Here, we need to be careful about not hashing a `crate::Predicate`, // but `crate::Binder` instead, // otherwise we would get into a infinite recursion. PredicateId(deterministic_hash(self)) } } /// A `PredicateId` can be mapped to itself via SInto. This is useful /// for mirroring the type [`traits::search_clause::PathChunk`] as /// [`traits::ImplExprPathChunk`]. impl<'tcx, S: UnderOwnerState<'tcx>> SInto for PredicateId { fn sinto(&self, _s: &S) -> PredicateId { *self } } /// We need identifiers that are stable across different /// architectures, different paths (which are observable from /// `Span`s), etc. /// Rustc's stable hash is not doing what we want here: it is sensible /// to the environment. Instead, we first `sinto` and then hash with /// `deterministic_hash` below. fn deterministic_hash(x: &T) -> u64 { use crate::deterministic_hash::DeterministicHasher; use std::collections::hash_map::DefaultHasher; use std::hash::BuildHasher; use std::hash::BuildHasherDefault; >>::default().hash_one(x) } } ================================================ FILE: frontend/exporter/src/types/new/synthetic_items.rs ================================================ #[cfg(feature = "rustc")] use crate::prelude::*; #[cfg(feature = "rustc")] use { rustc_hir::definitions::DisambiguatorState, rustc_middle::ty, rustc_span::{DUMMY_SP, Symbol, def_id::DefId as RDefId}, rustc_type_ir::Upcast, }; /// We create some extra `DefId`s to represent things that rustc doesn't have a `DefId` for. This /// makes the pipeline much easier to have "real" def_ids for them. /// We generate fake struct-like items for each of: arrays, slices, and tuples. This makes it /// easier to emit trait impls for these types, especially with monomorphization. This enum tracks /// identifies these builtin types. #[derive(Debug, Hash, Clone, Copy, PartialEq, Eq)] pub enum SyntheticItem { /// Fake ADT representing the `[T; N]` type. Array, /// Fake ADT representing the `[T]` type. Slice, /// Fake ADT representing the length-n tuple `(A, B, ...)`. Tuple(usize), } #[cfg(feature = "rustc")] impl<'tcx> GlobalCache<'tcx> { pub fn get_synthetic_def_id( &mut self, s: &impl BaseState<'tcx>, item: SyntheticItem, ) -> RDefId { if let Some(def_id) = self.synthetic_def_ids.get(&item) { return *def_id; } let tcx = s.base().tcx; let mut disambiguator_state = DisambiguatorState::new(); let name = match item { SyntheticItem::Array => "", SyntheticItem::Slice => "", SyntheticItem::Tuple(n) => &format!(""), }; // Create a fake item, to which we'll assign generics and a param_env, which we can // then use to generate the `FullDefKind` we want. let feed = tcx.create_def( rustc_span::def_id::CRATE_DEF_ID, Some(Symbol::intern(name)), rustc_hir::def::DefKind::Struct, None, &mut disambiguator_state, ); let def_id = feed.def_id().to_def_id(); // Insert the def_ids early so we record them even if we panic later in this function. self.reverse_synthetic_map.insert(def_id, item); self.synthetic_def_ids.insert(item, def_id); let mut generics = ty::Generics { parent: None, parent_count: 0, own_params: Default::default(), param_def_id_to_index: Default::default(), has_self: false, has_late_bound_regions: None, }; let mut mk_param = |name: &str, def_kind, kind| { let name = Symbol::intern(name); let param_feed = tcx.create_def( feed.def_id(), Some(name), def_kind, None, &mut disambiguator_state, ); param_feed.feed_hir(); // Avoid panics on `local_def_id_to_hir_id`. let param_def_id = param_feed.def_id().into(); let index = generics.own_params.len() as u32; let param_def = ty::GenericParamDef { name, def_id: param_def_id, index, kind, pure_wrt_drop: true, }; let arg = tcx.mk_param_from_def(¶m_def); generics.own_params.push(param_def); generics.param_def_id_to_index.insert(param_def_id, index); (arg, param_feed) }; let mut clauses = vec![]; let sized_trait = tcx.lang_items().sized_trait().unwrap(); match item { SyntheticItem::Array => { let (t_arg, _) = mk_param( "T", rustc_hir::def::DefKind::TyParam, ty::GenericParamDefKind::Type { has_default: false, synthetic: false, }, ); let (n_arg, n_feed) = mk_param( "N", rustc_hir::def::DefKind::ConstParam, ty::GenericParamDefKind::Const { has_default: false }, ); n_feed.type_of(ty::EarlyBinder::bind(tcx.types.usize)); let item_ty = t_arg.as_type().unwrap(); let len = n_arg.as_const().unwrap(); let type_of = ty::Ty::new_array_with_const_len(tcx, item_ty, len); feed.type_of(ty::EarlyBinder::bind(type_of)); let ty_is_sized = ty::TraitRef::new(tcx, sized_trait, [item_ty]); clauses.push(ty_is_sized.upcast(tcx)); let len_is_usize = ty::ClauseKind::ConstArgHasType(len, tcx.types.usize); clauses.push(len_is_usize.upcast(tcx)); } SyntheticItem::Slice => { let (t_arg, _) = mk_param( "T", rustc_hir::def::DefKind::TyParam, ty::GenericParamDefKind::Type { has_default: false, synthetic: false, }, ); let item_ty = t_arg.as_type().unwrap(); let type_of = ty::Ty::new_slice(tcx, item_ty); feed.type_of(ty::EarlyBinder::bind(type_of)); let ty_is_sized = ty::TraitRef::new(tcx, sized_trait, [item_ty]); clauses.push(ty_is_sized.upcast(tcx)); } SyntheticItem::Tuple(len) => { let tys = (0..len).into_iter().map(|i| { let name: String = if i < 26 { format!("{}", (b'A' + i as u8) as char) } else { format!("T{i}") }; let (arg, _) = mk_param( &name, rustc_hir::def::DefKind::TyParam, ty::GenericParamDefKind::Type { has_default: false, synthetic: false, }, ); arg.as_type().unwrap() }); let tys = tcx.arena.alloc_from_iter(tys); let type_of = ty::Ty::new_tup(tcx, tys); feed.type_of(ty::EarlyBinder::bind(type_of)); // All types except the last one are sized. for ty in tys.iter().rev().skip(1).rev() { let arg: ty::GenericArg = (*ty).into(); let ty_is_sized = ty::TraitRef::new(tcx, sized_trait, [arg]); clauses.push(ty_is_sized.upcast(tcx)); } } } feed.generics_of(generics); feed.explicit_predicates_of(ty::GenericPredicates { parent: None, predicates: tcx .arena .alloc_from_iter(clauses.iter().map(|cl| (*cl, DUMMY_SP))), }); feed.param_env(ty::ParamEnv::new( tcx.mk_clauses_from_iter(clauses.into_iter()), )); feed.feed_hir(); def_id } } ================================================ FILE: frontend/exporter/src/types/new/variant_infos.rs ================================================ use crate::prelude::*; use crate::sinto_as_usize; /// Describe the kind of a variant #[derive_group(Serializers)] #[derive(Clone, Debug, JsonSchema, Hash, PartialEq, Eq, PartialOrd, Ord)] pub enum VariantKind { /// The variant is the only variant of a `struct` type Struct { /// Are the fields on this struct all named? named: bool, }, /// The variant is the only variant of a `union` type Union, /// The variant is one of the many variants of a `enum` type Enum { /// The index of this variant in the `enum` index: VariantIdx, /// Are the fields on this struct all named? named: bool, }, } sinto_as_usize!(rustc_abi, VariantIdx); /// Describe a variant #[derive_group(Serializers)] #[derive(Clone, Debug, JsonSchema, Hash, PartialEq, Eq, PartialOrd, Ord)] pub struct VariantInformations { pub type_namespace: DefId, pub typ: DefId, pub variant: DefId, pub kind: VariantKind, } ================================================ FILE: frontend/exporter/src/types/serialize_int.rs ================================================ //! This module provides serde manual serializes/deserializers as //! strings for u128 and i128: those types are not well supported in //! serde (see https://github.com/serde-rs/json/issues/625). use serde::{Deserializer, Serializer, de::Visitor, ser::Serialize}; pub mod unsigned { use super::*; pub fn serialize(value: &u128, serializer: S) -> Result where S: Serializer, { value.to_string().serialize(serializer) } pub fn deserialize<'de, D>(deserializer: D) -> Result where D: Deserializer<'de>, { deserializer.deserialize_any(IntScalarVisitor) } #[derive(Debug)] struct IntScalarVisitor; impl<'de> Visitor<'de> for IntScalarVisitor { type Value = u128; fn expecting(&self, formatter: &mut std::fmt::Formatter) -> std::fmt::Result { dbg!(self); formatter.write_str("expect to receive integer") } fn visit_str(self, v: &str) -> Result where E: serde::de::Error, { v.parse().map_err(serde::de::Error::custom) } fn visit_u64(self, v: u64) -> Result where E: serde::de::Error, { Ok(v as u128) } } } pub mod signed { use super::*; pub fn serialize(value: &i128, serializer: S) -> Result where S: Serializer, { value.to_string().serialize(serializer) } pub fn deserialize<'de, D>(deserializer: D) -> Result where D: Deserializer<'de>, { deserializer.deserialize_any(IntScalarVisitor) } #[derive(Debug)] struct IntScalarVisitor; impl<'de> Visitor<'de> for IntScalarVisitor { type Value = i128; fn expecting(&self, formatter: &mut std::fmt::Formatter) -> std::fmt::Result { dbg!(self); formatter.write_str("expect to receive integer") } fn visit_str(self, v: &str) -> Result where E: serde::de::Error, { v.parse().map_err(serde::de::Error::custom) } fn visit_u64(self, v: u64) -> Result where E: serde::de::Error, { Ok(v as i128) } fn visit_i64(self, v: i64) -> Result where E: serde::de::Error, { Ok(v as i128) } } } ================================================ FILE: frontend/exporter/src/types/span.rs ================================================ use crate::prelude::*; use crate::sinto_todo; /// Reflects [`rustc_span::Loc`] #[derive_group(Serializers)] #[derive(Clone, Debug, JsonSchema, PartialEq, Eq, Hash, PartialOrd, Ord)] pub struct Loc { pub line: usize, pub col: usize, } /// Reflects [`rustc_span::Span`] #[derive(::serde::Serialize, ::serde::Deserialize, Clone, Debug, JsonSchema, Eq, Ord)] pub struct Span { pub lo: Loc, pub hi: Loc, pub filename: FileName, /// Original rustc span; can be useful for reporting rustc /// diagnostics (this is used in Charon) #[cfg(feature = "rustc")] #[serde(skip)] pub rust_span_data: Option, #[cfg(not(feature = "rustc"))] #[serde(skip)] pub rust_span_data: Option<()>, } const _: () = { // `rust_span_data` is a metadata that should *not* be taken into // account while hashing or comparing impl std::hash::Hash for Span { fn hash(&self, state: &mut H) { self.lo.hash(state); self.hi.hash(state); self.filename.hash(state); } } impl PartialEq for Span { fn eq(&self, other: &Self) -> bool { self.lo == other.lo && self.hi == other.hi && self.filename == other.filename } } impl PartialOrd for Span { fn partial_cmp(&self, other: &Self) -> Option { Some( self.lo.partial_cmp(&other.lo)?.then( self.hi .partial_cmp(&other.hi)? .then(self.filename.partial_cmp(&other.filename)?), ), ) } } }; #[cfg(feature = "rustc")] impl From for Loc { fn from(val: rustc_span::Loc) -> Self { Loc { line: val.line, col: val.col_display, } } } #[cfg(feature = "rustc")] impl<'tcx, S: BaseState<'tcx>> SInto for rustc_span::Span { fn sinto(&self, s: &S) -> Span { if let Some(span) = s.with_global_cache(|cache| cache.spans.get(self).cloned()) { return span; } let span = translate_span(*self, s.base().tcx.sess); s.with_global_cache(|cache| cache.spans.insert(*self, span.clone())); span } } /// Reflects [`rustc_span::source_map::Spanned`] #[derive_group(Serializers)] #[derive(Clone, Debug, JsonSchema)] pub struct Spanned { pub node: T, pub span: Span, } #[cfg(feature = "rustc")] impl<'s, S: UnderOwnerState<'s>, T: SInto, U> SInto> for rustc_span::source_map::Spanned { fn sinto<'a>(&self, s: &S) -> Spanned { Spanned { node: self.node.sinto(s), span: self.span.sinto(s), } } } impl<'tcx, S> SInto for PathBuf { fn sinto(&self, _: &S) -> PathBuf { self.clone() } } /// Reflects [`rustc_span::RealFileName`] #[derive_group(Serializers)] #[derive(AdtInto, Clone, Debug, JsonSchema, PartialEq, Eq, Hash, PartialOrd, Ord)] #[args(, from: rustc_span::RealFileName, state: S as _s)] pub enum RealFileName { LocalPath(PathBuf), Remapped { local_path: Option, virtual_name: PathBuf, }, } #[cfg(feature = "rustc")] impl SInto for rustc_hashes::Hash64 { fn sinto(&self, _: &S) -> u64 { self.as_u64() } } /// Reflects [`rustc_span::FileName`] #[derive(AdtInto)] #[args(, from: rustc_span::FileName, state: S as gstate)] #[derive_group(Serializers)] #[derive(Clone, Debug, JsonSchema, PartialEq, Eq, Hash, PartialOrd, Ord)] pub enum FileName { Real(RealFileName), CfgSpec(u64), Anon(u64), MacroExpansion(u64), ProcMacroSourceCode(u64), CliCrateAttr(u64), Custom(String), // #[map(FileName::DocTest(x.0.to_str().unwrap().into()))] #[custom_arm(FROM_TYPE::DocTest(x, _) => TO_TYPE::DocTest(x.to_str().unwrap().into()),)] DocTest(String), InlineAsm(u64), } impl FileName { pub fn to_string(&self) -> String { match self { Self::Real(RealFileName::LocalPath(path)) | Self::Real(RealFileName::Remapped { local_path: Some(path), .. }) | Self::Real(RealFileName::Remapped { virtual_name: path, .. }) => format!("{}", path.display()), _ => format!("{:?}", self), } } pub fn to_path(&self) -> Option<&std::path::Path> { match self { Self::Real(RealFileName::LocalPath(path)) | Self::Real(RealFileName::Remapped { local_path: Some(path), .. }) | Self::Real(RealFileName::Remapped { virtual_name: path, .. }) => Some(path), _ => None, } } } sinto_todo!(rustc_span, ErrorGuaranteed); ================================================ FILE: frontend/exporter/src/types/thir.rs ================================================ //! Copies of the relevant `THIR` types. THIR represents a HIR (function) body augmented with type //! information and lightly desugared. use crate::prelude::*; #[cfg(feature = "rustc")] use rustc_middle::thir; /// Reflects [`thir::LogicalOp`] #[derive_group(Serializers)] #[derive(AdtInto, Clone, Debug, JsonSchema, Hash, PartialEq, Eq, PartialOrd, Ord)] #[args(<'a, S>, from: thir::LogicalOp, state: S as _s)] pub enum LogicalOp { And, Or, } /// Reflects [`thir::LintLevel`] #[derive_group(Serializers)] #[derive(AdtInto, Clone, Debug, JsonSchema, Hash, PartialEq, Eq, PartialOrd, Ord)] #[args(<'slt, S: UnderOwnerState<'slt> + HasThir<'slt>>, from: thir::LintLevel, state: S as gstate)] pub enum LintLevel { Inherited, Explicit(HirId), } #[derive_group(Serializers)] #[derive(AdtInto, Clone, Debug, JsonSchema)] #[args(<'tcx, S: ExprState<'tcx>>, from: thir::FruInfo<'tcx>, state: S as gstate)] /// Field Record Update (FRU) informations, this reflects [`thir::FruInfo`] pub struct FruInfo { /// The base, e.g. `Foo {x: 1, .. base}` pub base: Expr, pub field_types: Vec, } #[derive_group(Serializers)] #[derive(AdtInto, Clone, Debug, JsonSchema)] #[args(<'tcx, S: ExprState<'tcx>>, from: thir::AdtExprBase<'tcx>, state: S as gstate)] pub enum AdtExprBase { None, Base(FruInfo), DefaultFields(Vec), } /// A field expression: a field name along with a value #[derive_group(Serializers)] #[derive(Clone, Debug, JsonSchema)] pub struct FieldExpr { pub field: DefId, pub value: Expr, } /// Reflects [`thir::AdtExpr`] #[derive_group(Serializers)] #[derive(Clone, Debug, JsonSchema)] pub struct AdtExpr { pub info: VariantInformations, pub user_ty: Option, pub fields: Vec, pub base: AdtExprBase, } #[cfg(feature = "rustc")] impl<'tcx, S: ExprState<'tcx>> SInto for thir::AdtExpr<'tcx> { fn sinto(&self, s: &S) -> AdtExpr { let variants = self.adt_def.variants(); let variant: &rustc_middle::ty::VariantDef = &variants[self.variant_index]; AdtExpr { info: get_variant_information(&self.adt_def, self.variant_index, s), fields: self .fields .iter() .map(|f| FieldExpr { field: variant.fields[f.name].did.sinto(s), value: f.expr.sinto(s), }) .collect(), base: self.base.sinto(s), user_ty: self.user_ty.sinto(s), } } } /// Reflects [`thir::LocalVarId`] #[derive_group(Serializers)] #[derive(Clone, Debug, JsonSchema)] pub struct LocalIdent { pub name: String, pub id: HirId, } #[cfg(feature = "rustc")] impl<'tcx, S: UnderOwnerState<'tcx>> SInto for thir::LocalVarId { fn sinto(&self, s: &S) -> LocalIdent { LocalIdent { name: s .base() .local_ctx .borrow() .vars .get(self) .s_unwrap(s) .to_string(), id: self.0.sinto(s), } } } #[cfg(feature = "rustc")] impl SInto for rustc_middle::mir::interpret::AllocId { fn sinto(&self, _: &S) -> u64 { self.0.get() } } /// Reflects [`thir::BlockSafety`] #[derive_group(Serializers)] #[derive(AdtInto, Clone, Debug, JsonSchema)] #[args(<'tcx, S>, from: thir::BlockSafety, state: S as _s)] pub enum BlockSafety { Safe, BuiltinUnsafe, #[custom_arm(FROM_TYPE::ExplicitUnsafe{..} => BlockSafety::ExplicitUnsafe,)] ExplicitUnsafe, } /// Reflects [`rustc_middle::middle::region::ScopeData`] #[derive_group(Serializers)] #[derive(AdtInto, Clone, Debug, JsonSchema)] #[args(<'tcx, S: UnderOwnerState<'tcx> + HasThir<'tcx>>, from: rustc_middle::middle::region::ScopeData, state: S as gstate)] pub enum ScopeData { Node, CallSite, Arguments, Destruction, IfThen, IfThenRescope, MatchGuard, Remainder(FirstStatementIndex), } sinto_as_usize!(rustc_middle::middle::region, FirstStatementIndex); /// Reflects [`rustc_middle::middle::region::Scope`] #[derive_group(Serializers)] #[derive(AdtInto, Clone, Debug, JsonSchema)] #[args(<'tcx, S: UnderOwnerState<'tcx> + HasThir<'tcx>>, from: rustc_middle::middle::region::Scope, state: S as gstate)] pub struct Scope { pub local_id: ItemLocalId, pub data: ScopeData, } sinto_as_usize!(rustc_hir::hir_id, ItemLocalId); /// Reflects [`thir::Block`] #[derive_group(Serializers)] #[derive(AdtInto, Clone, Debug, JsonSchema)] #[args(<'tcx, S: ExprState<'tcx>>, from: thir::Block, state: S as gstate)] pub struct Block { pub targeted_by_break: bool, pub region_scope: Scope, pub span: Span, pub stmts: Vec, pub expr: Option, pub safety_mode: BlockSafety, } /// Reflects [`thir::Stmt`] #[derive(AdtInto)] #[args(<'tcx, S: ExprState<'tcx>>, from: thir::Stmt<'tcx>, state: S as s)] #[derive_group(Serializers)] #[derive(Clone, Debug, JsonSchema)] pub struct Stmt { pub kind: StmtKind, } #[cfg(feature = "rustc")] impl<'tcx, S: ExprState<'tcx>> SInto for thir::BlockId { fn sinto(&self, s: &S) -> Block { s.thir().blocks[*self].sinto(s) } } #[cfg(feature = "rustc")] impl<'tcx, S: ExprState<'tcx>> SInto for thir::StmtId { fn sinto(&self, s: &S) -> Stmt { s.thir().stmts[*self].sinto(s) } } #[cfg(feature = "rustc")] impl<'tcx, S: ExprState<'tcx>> SInto for thir::Expr<'tcx> { fn sinto(&self, s: &S) -> Expr { let s = &s.with_ty(self.ty); let (hir_id, attributes) = self.hir_id_and_attributes(s); let hir_id = hir_id.map(|hir_id| hir_id.index()); let unrolled = self.unroll_scope(s); let thir::Expr { span, kind, ty, .. } = unrolled; let contents = match kind { // Introduce intermediate `Cast` from `T` to `U` when casting from a `#[repr(T)]` enum to `U` thir::ExprKind::Cast { source } => { if let rustc_middle::ty::TyKind::Adt(adt, _) = s.thir().exprs[source].ty.kind() { let tcx = s.base().tcx; let contents = kind.sinto(s); let repr_type = if adt.is_enum() { use crate::rustc_middle::ty::util::IntTypeExt; adt.repr().discr_type().to_ty(tcx) } else { ty }; if repr_type == ty { contents } else { ExprKind::Cast { source: Decorated { ty: repr_type.sinto(s), span: span.sinto(s), contents: Box::new(contents), hir_id, attributes: vec![], }, } } } else { kind.sinto(s) } } thir::ExprKind::NonHirLiteral { lit, .. } => { let cexpr: ConstantExpr = (ConstantExprKind::Literal(scalar_int_to_constant_literal(s, lit, ty))) .decorate(ty.sinto(s), span.sinto(s)); return cexpr.into(); } thir::ExprKind::ZstLiteral { .. } => { if ty.is_phantom_data() { let rustc_middle::ty::Adt(def, _) = ty.kind() else { supposely_unreachable_fatal!(s[span], "PhantomDataNotAdt"; {kind, ty}) }; let adt_def = AdtExpr { info: get_variant_information(def, rustc_abi::FIRST_VARIANT, s), user_ty: None, base: AdtExprBase::None, fields: vec![], }; return Expr { contents: Box::new(ExprKind::Adt(adt_def)), span: self.span.sinto(s), ty: ty.sinto(s), hir_id, attributes, }; } let (def_id, generics) = match ty.kind() { rustc_middle::ty::Adt(adt_def, generics) => { // Here, we should only get `struct Name;` structs. s_assert!(s, adt_def.variants().len() == 1); s_assert!(s, generics.is_empty()); (adt_def.did(), generics) } rustc_middle::ty::TyKind::FnDef(def_id, generics) => (*def_id, generics), ty_kind => { let ty_kind = ty_kind.sinto(s); supposely_unreachable_fatal!( s[span], "ZstLiteral ty≠FnDef(...) or PhantomData or naked Struct"; {kind, span, ty, ty_kind} ); } }; let item = translate_item_ref(s, def_id, generics); let tcx = s.base().tcx; let constructor = if tcx.is_constructor(def_id) { let adt_def = tcx.adt_def(rustc_utils::get_closest_parent_type(&tcx, def_id)); let variant_index = adt_def.variant_index_with_id(tcx.parent(def_id)); Some(rustc_utils::get_variant_information( &adt_def, variant_index, s, )) } else { None }; return Expr { contents: Box::new(ExprKind::GlobalName { item, constructor }), span: self.span.sinto(s), ty: ty.sinto(s), hir_id, attributes, }; } thir::ExprKind::Field { lhs, variant_index, name, } => { let lhs_ty = s.thir().exprs[lhs].ty.kind(); let idx = variant_index.index(); if idx != 0 { let _ = supposely_unreachable!( s[span], "ExprKindFieldIdxNonZero"; { kind, span, ty, ty.kind() } ); }; match lhs_ty { rustc_middle::ty::TyKind::Adt(adt_def, _generics) => { let variant = adt_def.variant(variant_index); ExprKind::Field { field: variant.fields[name].did.sinto(s), lhs: lhs.sinto(s), } } rustc_middle::ty::TyKind::Tuple(..) => ExprKind::TupleField { field: name.index(), lhs: lhs.sinto(s), }, _ => supposely_unreachable_fatal!( s[span], "ExprKindFieldBadTy"; { kind, span, ty.kind(), lhs_ty } ), } } _ => kind.sinto(s), }; Decorated { ty: ty.sinto(s), span: span.sinto(s), contents: Box::new(contents), hir_id, attributes, } } } #[cfg(feature = "rustc")] impl<'tcx, S: ExprState<'tcx>> SInto for thir::ExprId { fn sinto(&self, s: &S) -> Expr { s.thir().exprs[*self].sinto(s) } } #[cfg(feature = "rustc")] impl<'tcx, S: ExprState<'tcx>> SInto for thir::Pat<'tcx> { fn sinto(&self, s: &S) -> Pat { let thir::Pat { span, kind, ty } = self; let contents = match kind { thir::PatKind::Leaf { subpatterns } => match ty.kind() { rustc_middle::ty::TyKind::Adt(adt_def, args) => (thir::PatKind::Variant { adt_def: *adt_def, args, variant_index: rustc_abi::VariantIdx::from_usize(0), subpatterns: subpatterns.clone(), }) .sinto(s), rustc_middle::ty::TyKind::Tuple(tys) => { // Build a full-arity vector, filling unmatched positions with wildcards // so that tuple patterns with `..` (ellipsis) are correctly expanded. let mut full_subpatterns: Vec = tys .iter() .map(|elem_ty| Decorated { ty: elem_ty.sinto(s), span: span.sinto(s), contents: Box::new(PatKind::Wild), hir_id: None, attributes: vec![], }) .collect(); for field_pat in subpatterns.iter() { full_subpatterns[field_pat.field.index()] = field_pat.pattern.sinto(s); } PatKind::Tuple { subpatterns: full_subpatterns, } } _ => supposely_unreachable_fatal!( s[span], "PatLeafNonAdtTy"; {ty.kind(), kind} ), }, _ => kind.sinto(s), }; Decorated { ty: ty.sinto(s), span: span.sinto(s), contents: Box::new(contents), hir_id: None, attributes: vec![], } } } #[cfg(feature = "rustc")] impl<'tcx, S: ExprState<'tcx>> SInto for thir::ArmId { fn sinto(&self, s: &S) -> Arm { s.thir().arms[*self].sinto(s) } } /// Reflects [`thir::StmtKind`] #[derive(AdtInto)] #[args(<'tcx, S: ExprState<'tcx>>, from: thir::StmtKind<'tcx>, state: S as gstate)] #[derive_group(Serializers)] #[derive(Clone, Debug, JsonSchema)] pub enum StmtKind { Expr { scope: Scope, expr: Expr, }, Let { remainder_scope: Scope, init_scope: Scope, pattern: Pat, initializer: Option, else_block: Option, lint_level: LintLevel, #[value(attribute_from_scope(gstate, init_scope).1)] /// The attribute on this `let` binding attributes: Vec, }, } /// Reflects [`thir::Ascription`] #[derive(AdtInto)] #[args(<'tcx, S: UnderOwnerState<'tcx> + HasThir<'tcx>>, from: thir::Ascription<'tcx>, state: S as gstate)] #[derive_group(Serializers)] #[derive(Clone, Debug, JsonSchema)] pub struct Ascription { pub annotation: CanonicalUserTypeAnnotation, pub variance: Variance, } /// Reflects [`thir::PatRange`] #[derive_group(Serializers)] #[derive(Clone, Debug, JsonSchema)] pub struct PatRange { pub lo: PatRangeBoundary, pub hi: PatRangeBoundary, pub end: RangeEnd, } #[cfg(feature = "rustc")] impl<'tcx, S: UnderOwnerState<'tcx>> SInto for thir::PatRange<'tcx> { fn sinto(&self, s: &S) -> PatRange { let sinto_bdy = |bdy| match bdy { thir::PatRangeBoundary::Finite(valtree) => PatRangeBoundary::Finite( valtree_to_constant_expr(s, valtree, self.ty, rustc_span::DUMMY_SP), ), thir::PatRangeBoundary::NegInfinity => PatRangeBoundary::NegInfinity, thir::PatRangeBoundary::PosInfinity => PatRangeBoundary::PosInfinity, }; PatRange { lo: sinto_bdy(self.lo), hi: sinto_bdy(self.hi), end: self.end.sinto(s), } } } /// Reflects [`thir::PatRangeBoundary`] #[derive_group(Serializers)] #[derive(Clone, Debug, JsonSchema)] pub enum PatRangeBoundary { Finite(ConstantExpr), NegInfinity, PosInfinity, } /// A field pattern: a field name along with a pattern #[derive_group(Serializers)] #[derive(Clone, Debug, JsonSchema)] pub struct FieldPat { pub field: DefId, pub pattern: Pat, } pub type Pat = Decorated; /// Reflects [`thir::PatKind`] #[derive(AdtInto)] #[args(<'tcx, S: ExprState<'tcx>>, from: thir::PatKind<'tcx>, state: S as gstate)] #[derive_group(Serializers)] #[derive(Clone, Debug, JsonSchema)] #[append(thir::PatKind::Leaf {..} => fatal!(gstate, "PatKind::Leaf: should never come up"),)] pub enum PatKind { Wild, Missing, AscribeUserType { ascription: Ascription, subpattern: Pat, }, #[custom_arm( thir::PatKind::Binding {name, mode, var, ty, subpattern, is_primary, ..} => { let local_ctx = gstate.base().local_ctx; local_ctx.borrow_mut().vars.insert(*var, name.to_string()); PatKind::Binding { mode: mode.sinto(gstate), var: var.sinto(gstate), ty: ty.sinto(gstate), subpattern: subpattern.sinto(gstate), is_primary: is_primary.sinto(gstate), } } )] Binding { mode: BindingMode, var: LocalIdent, // name VS var? TODO ty: Ty, subpattern: Option, is_primary: bool, }, #[custom_arm( FROM_TYPE::Variant { adt_def, variant_index, args, subpatterns } => { let variant_def_id = adt_def.variant(*variant_index).def_id; let item = translate_item_ref(gstate, variant_def_id, args); let variants = adt_def.variants(); let variant: &rustc_middle::ty::VariantDef = &variants[*variant_index]; let tcx = gstate.base().tcx; // Build a map from field index to explicit pattern, so we can // fill in wildcards for fields omitted by `..` (ellipsis). let explicit: std::collections::HashMap<_, _> = subpatterns .iter() .map(|f| (f.field, &f.pattern)) .collect(); TO_TYPE::Variant { item, info: get_variant_information(adt_def, *variant_index, gstate), subpatterns: variant.fields.iter_enumerated() .map(|(field_idx, field_def)| { let pattern = if let Some(pat) = explicit.get(&field_idx) { pat.sinto(gstate) } else { Decorated { ty: field_def.ty(tcx, args).sinto(gstate), span: rustc_span::DUMMY_SP.sinto(gstate), contents: Box::new(PatKind::Wild), hir_id: None, attributes: vec![], } }; FieldPat { field: field_def.did.sinto(gstate), pattern, } }) .collect(), } } )] Variant { /// Reference to variant item definition, with appropriate generics. item: ItemRef, /// Extra info about the variant. info: VariantInformations, subpatterns: Vec, }, #[disable_mapping] Tuple { subpatterns: Vec, }, Deref { subpattern: Pat, }, DerefPattern { subpattern: Pat, }, Constant { value: ConstantExpr, }, ExpandedConstant { def_id: DefId, subpattern: Pat, }, Range(PatRange), Slice { prefix: Vec, slice: Option, suffix: Vec, }, Array { prefix: Vec, slice: Option, suffix: Vec, }, Or { pats: Vec, }, Never, Error(ErrorGuaranteed), } /// Reflects [`thir::Arm`] #[derive(AdtInto)] #[args(<'tcx, S: ExprState<'tcx>>, from: thir::Arm<'tcx>, state: S as gstate)] #[derive_group(Serializers)] #[derive(Clone, Debug, JsonSchema)] pub struct Arm { pub pattern: Pat, pub guard: Option, pub body: Expr, pub lint_level: LintLevel, pub scope: Scope, pub span: Span, #[value(attribute_from_scope(gstate, scope).1)] attributes: Vec, } /// Reflects [`thir::Param`] #[derive(AdtInto)] #[args(<'tcx, S: ExprState<'tcx>>, from: thir::Param<'tcx>, state: S as s)] #[derive_group(Serializers)] #[derive(Clone, Debug, JsonSchema)] pub struct Param { pub pat: Option, pub ty: Ty, pub ty_span: Option, pub self_kind: Option, pub hir_id: Option, #[value(hir_id.map(|id| { s.base().tcx.hir_attrs(id).sinto(s) }).unwrap_or(vec![]))] /// attributes on this parameter pub attributes: Vec, } #[derive_group(Serializers)] #[derive(Clone, Debug, JsonSchema)] pub struct ThirBody { pub expr: Expr, pub params: Vec, } pub type Expr = Decorated; /// Reflects [`thir::ExprKind`] #[derive(AdtInto)] #[args(<'tcx, S: ExprState<'tcx> + HasTy<'tcx>>, from: thir::ExprKind<'tcx>, state: S as gstate)] #[derive_group(Serializers)] #[derive(Clone, Debug, JsonSchema)] #[append( thir::ExprKind::Scope {..} => { fatal!(gstate, "Scope should have been eliminated at this point"); }, thir::ExprKind::Field {..} => { fatal!(gstate, "Field should have been eliminated at this point"); }, thir::ExprKind::NonHirLiteral {..} => { fatal!(gstate, "NonHirLiteral should have been eliminated at this point"); }, )] pub enum ExprKind { Box { value: Expr, }, /// Resugared macros calls. This is deprecated: see /// . If { if_then_scope: Scope, cond: Expr, then: Expr, else_opt: Option, }, #[map({ let e = gstate.thir().exprs[*fun].unroll_scope(gstate); let fun = match e.ty.kind() { rustc_middle::ty::TyKind::FnDef(def_id, generics) => { let (hir_id, attributes) = e.hir_id_and_attributes(gstate); let hir_id = hir_id.map(|hir_id| hir_id.index()); let item = translate_item_ref(gstate, *def_id, generics); let contents = Box::new(ExprKind::GlobalName { item, constructor: None }); Expr { contents, span: e.span.sinto(gstate), ty: e.ty.sinto(gstate), hir_id, attributes, } }, rustc_middle::ty::TyKind::FnPtr(..) => { e.sinto(gstate) }, ty_kind => { let ty_norm: Ty = gstate.base().tcx.normalize_erasing_regions(gstate.typing_env(), *ty).sinto(gstate); let ty_kind_sinto = ty_kind.sinto(gstate); supposely_unreachable_fatal!( gstate[e.span], "CallNotTyFnDef"; {e, ty_kind, ty_kind_sinto, ty_norm} ); } }; TO_TYPE::Call { ty: ty.sinto(gstate), args: args.sinto(gstate), from_hir_call: from_hir_call.sinto(gstate), fn_span: fn_span.sinto(gstate), fun, } })] /// A call to a function or a method. /// /// Example: `f(0i8)`, where `f` has signature `fn f(t: T) -> ()`. Call { /// The type of the function, substitution applied. /// /// Example: for the call `f(0i8)`, this is `i8 -> ()`. ty: Ty, /// The function itself. This can be something else than a name, e.g. a closure. /// /// Example: for the call `f(0i8)`, this is `f`. /// /// In the case of a call to a function that's not a closure/fn pointer, the expression /// will be a `GlobalName` that contains all the information about generics and whether /// this is a direct call or a method call. fun: Expr, // TODO: can [ty] and [fun.ty] be different? /// The arguments given to the function. /// /// Example: for the call `f(0i8)`, this is `[0i8]`. args: Vec, from_hir_call: bool, fn_span: Span, }, Deref { arg: Expr, }, Binary { op: BinOp, lhs: Expr, rhs: Expr, }, LogicalOp { op: LogicalOp, lhs: Expr, rhs: Expr, }, Unary { op: UnOp, arg: Expr, }, Cast { source: Expr, }, Use { source: Expr, }, // Use a lexpr to get a vexpr. NeverToAny { source: Expr, }, #[custom_arm( &FROM_TYPE::PointerCoercion { cast, source, .. } => { let source = &gstate.thir().exprs[source]; let src_ty = source.ty; let tgt_ty = gstate.ty(); TO_TYPE::PointerCoercion { cast: PointerCoercion::sfrom(gstate, cast, src_ty, tgt_ty), source: source.sinto(gstate), } }, )] PointerCoercion { cast: PointerCoercion, source: Expr, }, Loop { body: Expr, }, Match { scrutinee: Expr, arms: Vec, }, Let { expr: Expr, pat: Pat, }, Block { #[serde(flatten)] block: Block, }, Assign { lhs: Expr, rhs: Expr, }, AssignOp { op: AssignOp, lhs: Expr, rhs: Expr, }, #[disable_mapping] Field { field: DefId, lhs: Expr, }, #[disable_mapping] TupleField { field: usize, lhs: Expr, }, Index { lhs: Expr, index: Expr, }, VarRef { id: LocalIdent, }, #[disable_mapping] ConstRef { id: ParamConst, }, #[disable_mapping] GlobalName { item: ItemRef, constructor: Option, }, UpvarRef { closure_def_id: DefId, var_hir_id: LocalIdent, }, Borrow { borrow_kind: BorrowKind, arg: Expr, }, RawBorrow { mutability: Mutability, arg: Expr, }, Break { label: Scope, value: Option, }, Continue { label: Scope, }, Return { value: Option, }, #[custom_arm(FROM_TYPE::ConstBlock { did, args } => TO_TYPE::ConstBlock(translate_item_ref(gstate, *did, args)),)] ConstBlock(ItemRef), Repeat { value: Expr, count: ConstantExpr, }, Array { fields: Vec, }, Tuple { fields: Vec, }, Adt(AdtExpr), PlaceTypeAscription { source: Expr, user_ty: Option, }, ValueTypeAscription { source: Expr, user_ty: Option, }, #[custom_arm(FROM_TYPE::Closure(e) => { let (thir, expr_entrypoint) = get_thir(e.closure_id, gstate); let s = &gstate.with_thir(thir.clone()); TO_TYPE::Closure { params: thir.params.raw.sinto(s), body: expr_entrypoint.sinto(s), upvars: e.upvars.sinto(gstate), movability: e.movability.sinto(gstate) } }, )] Closure { params: Vec, body: Expr, upvars: Vec, movability: Option, }, Literal { lit: Spanned, neg: bool, // TODO }, //zero space type // This is basically used for functions! e.g. `::from` ZstLiteral { user_ty: Option, }, #[custom_arm(FROM_TYPE::NamedConst { def_id, args, user_ty } => TO_TYPE::NamedConst { item: translate_item_ref(gstate, *def_id, args), user_ty: user_ty.sinto(gstate), },)] NamedConst { item: ItemRef, user_ty: Option, }, ConstParam { param: ParamConst, def_id: GlobalIdent, }, StaticRef { alloc_id: u64, ty: Ty, def_id: GlobalIdent, }, Yield { value: Expr, }, #[todo] Todo(String), } #[cfg(feature = "rustc")] pub trait ExprKindExt<'tcx> { fn hir_id_and_attributes>( &self, s: &S, ) -> (Option, Vec); fn unroll_scope + HasThir<'tcx>>(&self, s: &S) -> thir::Expr<'tcx>; } #[cfg(feature = "rustc")] impl<'tcx> ExprKindExt<'tcx> for thir::Expr<'tcx> { fn hir_id_and_attributes>( &self, s: &S, ) -> (Option, Vec) { match &self.kind { thir::ExprKind::Scope { region_scope: scope, .. } => attribute_from_scope(s, scope), _ => (None, vec![]), } } fn unroll_scope + HasThir<'tcx>>(&self, s: &S) -> thir::Expr<'tcx> { // TODO: when we see a loop, we should lookup its label! label is actually a scope id // we remove scopes here, whence the TODO match self.kind { thir::ExprKind::Scope { value, .. } => s.thir().exprs[value].unroll_scope(s), _ => self.clone(), } } } #[cfg(feature = "rustc")] pub trait HirIdExt { fn index(&self) -> (usize, usize); } #[cfg(feature = "rustc")] impl HirIdExt for rustc_hir::HirId { fn index(&self) -> (usize, usize) { use crate::rustc_index::Idx; (self.owner.def_id.index(), self.local_id.index()) } } ================================================ FILE: frontend/exporter/src/types/ty.rs ================================================ //! Copies of the relevant type-level types. These are semantically-rich representations of //! type-level concepts such as types and trait references. use crate::prelude::*; use crate::sinto_as_usize; use crate::sinto_todo; use std::sync::Arc; #[cfg(feature = "rustc")] use rustc_middle::ty; /// Generic container for decorating items with a type, a span, /// attributes and other meta-data. #[derive_group(Serializers)] #[derive(Clone, Debug, JsonSchema, Hash, PartialEq, Eq, PartialOrd, Ord)] pub struct Decorated { pub ty: Ty, pub span: Span, pub contents: Box, pub hir_id: Option<(usize, usize)>, pub attributes: Vec, } /// Reflects [`ty::ParamTy`] #[derive_group(Serializers)] #[derive(AdtInto, Clone, Debug, JsonSchema, Hash, PartialEq, Eq, PartialOrd, Ord)] #[args(<'tcx, S: UnderOwnerState<'tcx>>, from: ty::ParamTy, state: S as gstate)] pub struct ParamTy { pub index: u32, pub name: Symbol, } /// Reflects [`ty::ParamConst`] #[derive_group(Serializers)] #[derive(AdtInto, Clone, Debug, JsonSchema, Hash, PartialEq, Eq, PartialOrd, Ord)] #[args(, from: ty::ParamConst, state: S as gstate)] pub struct ParamConst { pub index: u32, pub name: Symbol, } /// A predicate without `Self`, for use in `dyn Trait`. /// /// Reflects [`ty::ExistentialPredicate`] #[derive(AdtInto)] #[args(<'tcx, S: UnderOwnerState<'tcx>>, from: ty::ExistentialPredicate<'tcx>, state: S as state)] #[derive_group(Serializers)] #[derive(Clone, Debug, JsonSchema, Hash, PartialEq, Eq, PartialOrd, Ord)] pub enum ExistentialPredicate { /// E.g. `From`. Note that this isn't `T: From` with a given `T`, this is just /// `From`. Could be written `?: From`. Trait(ExistentialTraitRef), /// E.g. `Iterator::Item = u64`. Could be written `::Item = u64`. Projection(ExistentialProjection), /// E.g. `Send`. AutoTrait(DefId), } /// Reflects [`rustc_type_ir::ExistentialTraitRef`] #[derive(AdtInto)] #[args(<'tcx, S: UnderOwnerState<'tcx>>, from: rustc_type_ir::ExistentialTraitRef>, state: S as state)] #[derive_group(Serializers)] #[derive(Clone, Debug, JsonSchema, Hash, PartialEq, Eq, PartialOrd, Ord)] pub struct ExistentialTraitRef { pub def_id: DefId, pub args: Vec, } /// Reflects [`rustc_type_ir::ExistentialProjection`] #[derive(AdtInto)] #[args(<'tcx, S: UnderOwnerState<'tcx>>, from: rustc_type_ir::ExistentialProjection>, state: S as state)] #[derive_group(Serializers)] #[derive(Clone, Debug, JsonSchema, Hash, PartialEq, Eq, PartialOrd, Ord)] pub struct ExistentialProjection { pub def_id: DefId, pub args: Vec, pub term: Term, } /// Reflects [`ty::BoundTyKind`] #[derive_group(Serializers)] #[derive(AdtInto, Clone, Debug, JsonSchema, Hash, PartialEq, Eq, PartialOrd, Ord)] #[args(<'tcx, S: UnderOwnerState<'tcx>>, from: ty::BoundTyKind, state: S as s)] pub enum BoundTyKind { Anon, #[custom_arm(&FROM_TYPE::Param(def_id) => TO_TYPE::Param(def_id.sinto(s), s.base().tcx.item_name(def_id).sinto(s)),)] Param(DefId, Symbol), } /// Reflects [`ty::BoundTy`] #[derive_group(Serializers)] #[derive(AdtInto, Clone, Debug, JsonSchema, Hash, PartialEq, Eq, PartialOrd, Ord)] #[args(<'tcx, S: UnderOwnerState<'tcx>>, from: ty::BoundTy, state: S as s)] pub struct BoundTy { pub var: BoundVar, pub kind: BoundTyKind, } sinto_as_usize!(rustc_middle::ty, BoundVar); /// Reflects [`ty::BoundRegionKind`] #[derive_group(Serializers)] #[derive(AdtInto, Clone, Debug, JsonSchema, Hash, PartialEq, Eq, PartialOrd, Ord)] #[args(<'tcx, S: UnderOwnerState<'tcx>>, from: ty::BoundRegionKind, state: S as s)] pub enum BoundRegionKind { Anon, NamedAnon(Symbol), #[custom_arm(&FROM_TYPE::Named(def_id) => { let tcx = s.base().tcx; TO_TYPE::Named { def_id: def_id.sinto(s), name: tcx.item_name(def_id).sinto(s), span: tcx.def_span(def_id).sinto(s), attributes: get_def_attrs(tcx, def_id, get_def_kind(tcx, def_id)).sinto(s), } })] Named { def_id: DefId, name: Symbol, span: Span, attributes: Vec, }, ClosureEnv, } /// Reflects [`ty::BoundRegion`] #[derive_group(Serializers)] #[derive(AdtInto, Clone, Debug, JsonSchema, Hash, PartialEq, Eq, PartialOrd, Ord)] #[args(<'tcx, S: UnderOwnerState<'tcx>>, from: ty::BoundRegion, state: S as s)] pub struct BoundRegion { pub var: BoundVar, pub kind: BoundRegionKind, } /// Reflects [`ty::PlaceholderRegion`] pub type PlaceholderRegion = Placeholder; /// Reflects [`ty::PlaceholderConst`] pub type PlaceholderConst = Placeholder; /// Reflects [`ty::PlaceholderType`] pub type PlaceholderType = Placeholder; /// Reflects [`ty::Placeholder`] #[derive_group(Serializers)] #[derive(Clone, Debug, JsonSchema, Hash, PartialEq, Eq, PartialOrd, Ord)] pub struct Placeholder { pub bound: T, } #[cfg(feature = "rustc")] impl<'tcx, S: UnderOwnerState<'tcx>, T: SInto, U> SInto> for ty::Placeholder { fn sinto(&self, s: &S) -> Placeholder { Placeholder { bound: self.bound.sinto(s), } } } /// Reflects [`rustc_middle::infer::canonical::Canonical`] #[derive_group(Serializers)] #[derive(Clone, Debug, JsonSchema)] pub struct Canonical { pub value: T, } /// Reflects [`ty::CanonicalUserType`] pub type CanonicalUserType = Canonical; #[cfg(feature = "rustc")] impl<'tcx, S: UnderOwnerState<'tcx>, T: SInto, U> SInto> for rustc_middle::infer::canonical::Canonical<'tcx, T> { fn sinto(&self, s: &S) -> Canonical { Canonical { value: self.value.sinto(s), } } } /// Reflects [`ty::UserSelfTy`] #[derive_group(Serializers)] #[derive(AdtInto, Clone, Debug, JsonSchema)] #[args(<'tcx, S: UnderOwnerState<'tcx>>, from: ty::UserSelfTy<'tcx>, state: S as gstate)] pub struct UserSelfTy { pub impl_def_id: DefId, pub self_ty: Ty, } /// Reflects [`ty::UserArgs`] #[derive_group(Serializers)] #[derive(AdtInto, Clone, Debug, JsonSchema)] #[args(<'tcx, S: UnderOwnerState<'tcx>>, from: ty::UserArgs<'tcx>, state: S as gstate)] pub struct UserArgs { pub args: Vec, pub user_self_ty: Option, } /// Reflects [`ty::UserType`]: this is currently /// disabled, and everything is printed as debug in the /// [`UserType::Todo`] variant. #[derive_group(Serializers)] #[derive(AdtInto, Clone, Debug, JsonSchema)] #[args(<'tcx, S: UnderOwnerState<'tcx>>, from: ty::UserType<'tcx>, state: S as _s)] pub enum UserType { // TODO: for now, we don't use user types at all. // We disable it for now, since it cause the following to fail: // // pub const MY_VAL: u16 = 5; // pub type Alias = MyStruct; // Using the literal 5, it goes through // // pub struct MyStruct {} // // impl MyStruct { // pub const MY_CONST: u16 = VAL; // } // // pub fn do_something() -> u32 { // u32::from(Alias::MY_CONST) // } // // In this case, we get a [ty::ConstKind::Bound] in // [do_something], which we are not able to translate. // See: https://github.com/hacspec/hax/pull/209 // Ty(Ty), // TypeOf(DefId, UserArgs), #[todo] Todo(String), } /// Reflects [`ty::VariantDiscr`] #[derive_group(Serializers)] #[derive(AdtInto, Clone, Debug, JsonSchema)] #[args(<'tcx, S: UnderOwnerState<'tcx>>, from: ty::VariantDiscr, state: S as s)] pub enum DiscriminantDefinition { #[custom_arm(FROM_TYPE::Explicit(did) => TO_TYPE::Explicit { def_id: did.sinto(s), span: s.base().tcx.def_span(did).sinto(s) },)] Explicit { def_id: DefId, span: Span, }, Relative(u32), } /// Reflects [`ty::util::Discr`] #[derive_group(Serializers)] #[derive(AdtInto, Clone, Debug, JsonSchema)] #[args(<'tcx, S: UnderOwnerState<'tcx>>, from: ty::util::Discr<'tcx>, state: S as gstate)] pub struct DiscriminantValue { pub val: u128, pub ty: Ty, } /// Reflects [`ty::Visibility`] #[derive_group(Serializers)] #[derive(Clone, Debug, JsonSchema)] pub enum Visibility { Public, Restricted(Id), } #[cfg(feature = "rustc")] impl, U> SInto> for ty::Visibility { fn sinto(&self, s: &S) -> Visibility { use ty::Visibility as T; match self { T::Public => Visibility::Public, T::Restricted(id) => Visibility::Restricted(id.sinto(s)), } } } /// Reflects [`ty::FieldDef`] #[derive_group(Serializers)] #[derive(Clone, Debug, JsonSchema)] pub struct FieldDef { pub did: DefId, /// Field definition of [tuple /// structs](https://doc.rust-lang.org/book/ch05-01-defining-structs.html#using-tuple-structs-without-named-fields-to-create-different-types) /// are anonymous, in that case `name` is [`None`]. pub name: Option, pub vis: Visibility, pub ty: Ty, pub span: Span, pub attributes: Vec, } #[cfg(feature = "rustc")] impl FieldDef { pub fn sfrom<'tcx, S: UnderOwnerState<'tcx>>( s: &S, fdef: &ty::FieldDef, instantiate: ty::GenericArgsRef<'tcx>, ) -> FieldDef { let tcx = s.base().tcx; let ty = fdef.ty(tcx, instantiate).sinto(s); let name = { let name = fdef.name.sinto(s); let is_user_provided = { // SH: Note that the only way I found of checking if the user wrote the name or if it // is just an integer generated by rustc is by checking if it is just made of // numerals... name.parse::().is_err() }; is_user_provided.then_some(name) }; FieldDef { did: fdef.did.sinto(s), name, vis: fdef.vis.sinto(s), ty, span: tcx.def_span(fdef.did).sinto(s), attributes: get_def_attrs(tcx, fdef.did, get_def_kind(tcx, fdef.did)).sinto(s), } } } /// Reflects [`ty::VariantDef`] #[derive_group(Serializers)] #[derive(Clone, Debug, JsonSchema)] pub struct VariantDef { pub def_id: DefId, pub ctor: Option<(CtorKind, DefId)>, pub name: Symbol, pub discr_def: DiscriminantDefinition, pub discr_val: DiscriminantValue, /// The definitions of the fields on this variant. In case of [tuple /// structs/variants](https://doc.rust-lang.org/book/ch05-01-defining-structs.html#using-tuple-structs-without-named-fields-to-create-different-types), /// the fields are anonymous, otherwise fields are named. pub fields: IndexVec, /// Span of the definition of the variant pub span: Span, pub attributes: Vec, } #[cfg(feature = "rustc")] impl VariantDef { pub(crate) fn sfrom<'tcx, S: UnderOwnerState<'tcx>>( s: &S, def: &ty::VariantDef, discr_val: ty::util::Discr<'tcx>, instantiate: Option>, ) -> Self { let tcx = s.base().tcx; let instantiate = instantiate.unwrap_or_else(|| ty::GenericArgs::identity_for_item(tcx, def.def_id)); VariantDef { def_id: def.def_id.sinto(s), ctor: def.ctor.sinto(s), name: def.name.sinto(s), discr_def: def.discr.sinto(s), discr_val: discr_val.sinto(s), fields: def .fields .iter() .map(|f| FieldDef::sfrom(s, f, instantiate)) .collect(), span: s.base().tcx.def_span(def.def_id).sinto(s), attributes: get_def_attrs(tcx, def.def_id, get_def_kind(tcx, def.def_id)).sinto(s), } } } /// Reflects [`ty::EarlyParamRegion`] #[derive_group(Serializers)] #[derive(AdtInto, Clone, Debug, JsonSchema, Hash, PartialEq, Eq, PartialOrd, Ord)] #[args(<'tcx, S: UnderOwnerState<'tcx>>, from: ty::EarlyParamRegion, state: S as s)] pub struct EarlyParamRegion { pub index: u32, pub name: Symbol, } /// Reflects [`ty::LateParamRegion`] #[derive_group(Serializers)] #[derive(AdtInto, Clone, Debug, JsonSchema, Hash, PartialEq, Eq, PartialOrd, Ord)] #[args(<'tcx, S: UnderOwnerState<'tcx>>, from: ty::LateParamRegion, state: S as s)] pub struct LateParamRegion { pub scope: DefId, pub kind: LateParamRegionKind, } /// Reflects [`ty::LateParamRegionKind`] #[derive_group(Serializers)] #[derive(AdtInto, Clone, Debug, JsonSchema, Hash, PartialEq, Eq, PartialOrd, Ord)] #[args(<'tcx, S: UnderOwnerState<'tcx>>, from: ty::LateParamRegionKind, state: S as s)] pub enum LateParamRegionKind { Anon(u32), NamedAnon(u32, Symbol), #[custom_arm(&FROM_TYPE::Named(def_id) => TO_TYPE::Named(def_id.sinto(s), s.base().tcx.item_name(def_id).sinto(s)),)] Named(DefId, Symbol), ClosureEnv, } /// Reflects [`ty::RegionKind`] #[derive_group(Serializers)] #[derive(AdtInto, Clone, Debug, JsonSchema, Hash, PartialEq, Eq, PartialOrd, Ord)] #[args(<'tcx, S: UnderOwnerState<'tcx>>, from: ty::RegionKind<'tcx>, state: S as gstate)] pub enum RegionKind { ReEarlyParam(EarlyParamRegion), ReBound(BoundVarIndexKind, BoundRegion), ReLateParam(LateParamRegion), ReStatic, ReVar(RegionVid), RePlaceholder(PlaceholderRegion), ReErased, ReError(ErrorGuaranteed), } /// Reflects [`ty::BoundVarIndexKind`] #[derive_group(Serializers)] #[derive(AdtInto, Clone, Copy, Debug, JsonSchema, Hash, PartialEq, Eq, PartialOrd, Ord)] #[args(<'tcx, S: UnderOwnerState<'tcx>>, from: ty::BoundVarIndexKind, state: S as gstate)] pub enum BoundVarIndexKind { Bound(DebruijnIndex), Canonical, } sinto_as_usize!(rustc_middle::ty, DebruijnIndex); sinto_as_usize!(rustc_middle::ty, RegionVid); /// Reflects [`ty::Region`] #[derive_group(Serializers)] #[derive(AdtInto, Clone, Debug, JsonSchema, Hash, PartialEq, Eq, PartialOrd, Ord)] #[args(<'tcx, S: UnderOwnerState<'tcx>>, from: ty::Region<'tcx>, state: S as s)] pub struct Region { #[value(self.kind().sinto(s))] pub kind: RegionKind, } /// Reflects both [`ty::GenericArg`] and [`ty::GenericArgKind`] #[derive_group(Serializers)] #[derive(AdtInto, Clone, Debug, JsonSchema, Hash, PartialEq, Eq, PartialOrd, Ord)] #[args(<'tcx, S: UnderOwnerState<'tcx>>, from: ty::GenericArgKind<'tcx>, state: S as s)] pub enum GenericArg { Lifetime(Region), Type(Ty), Const(ConstantExpr), } /// Contents of `ItemRef`. #[derive_group(Serializers)] #[derive(Clone, Debug, JsonSchema, Hash, PartialEq, Eq, PartialOrd, Ord)] pub struct ItemRefContents { /// The item being refered to. pub def_id: DefId, /// The generics passed to the item. If `in_trait` is `Some`, these are only the generics of /// the method/type/const itself; generics for the traits are available in /// `in_trait.unwrap().trait`. pub generic_args: Vec, /// Witnesses of the trait clauses required by the item, e.g. `T: Sized` for `Option` or `B: /// ToOwned` for `Cow<'a, B>`. Same as above, for associated items this only includes clauses /// for the item itself. pub impl_exprs: Vec, /// If we're referring to a trait associated item, this gives the trait clause/impl we're /// referring to. pub in_trait: Option, /// Whether this contains any reference to a type/lifetime/const parameter. pub has_param: bool, /// Whether this contains any reference to a type/const parameter. pub has_non_lt_param: bool, } /// Reference to an item, with generics. Basically any mention of an item (function, type, etc) /// uses this. /// /// This can refer to a top-level item or to a trait associated item. Example: /// ```ignore /// trait MyTrait { /// fn meth(...) {...} /// } /// fn example_call>(x: SelfType) { /// x.meth::(...) /// } /// ``` /// Here, in the call `x.meth::(...)` we will build an `ItemRef` that looks like: /// ```ignore /// ItemRef { /// def_id = MyTrait::meth, /// generic_args = [String], /// impl_exprs = [], /// in_trait = Some(`>, /// } /// ``` /// The `in_trait` `ImplExpr` will have in its `trait` field a representation of the `SelfType: /// MyTrait` predicate, which looks like: /// ```ignore /// ItemRef { /// def_id = MyTrait, /// generic_args = [SelfType, TraitType, 12], /// impl_exprs = [], /// in_trait = None, /// } /// ``` #[derive_group(Serializers)] #[derive(Clone, Debug, JsonSchema, Hash, PartialEq, Eq, PartialOrd, Ord)] #[serde(transparent)] pub struct ItemRef { pub(crate) contents: id_table::Node, } impl ItemRefContents { #[cfg(feature = "rustc")] fn intern<'tcx, S: BaseState<'tcx>>(self, s: &S) -> ItemRef { s.with_global_cache(|cache| { let table_session = &mut cache.id_table_session; let contents = id_table::Node::new(self, table_session); ItemRef { contents } }) } } impl ItemRef { /// The main way to obtain an `ItemRef`: from a `def_id` and generics. #[cfg(feature = "rustc")] pub fn translate<'tcx, S: UnderOwnerState<'tcx>>( s: &S, def_id: RDefId, generics: ty::GenericArgsRef<'tcx>, ) -> ItemRef { Self::translate_maybe_resolve_impl( s, s.base().options.item_ref_use_concrete_impl, def_id, generics, ) } /// Makes a `ItemRef` from a `def_id` and generics. /// /// If `resolve_impl == true` and `(def_id, generics)` points to a trait item that /// can be resolved to a specific `impl`, `translate` rewrites `def_id` to the /// concrete associated item from that `impl` and re-bases the generics. /// /// For instance, [`>::from`] produces a [`ItemRef`] with a /// [`DefId`] looking like `core::convert::num::Impl#42::from` when /// `resolve_impl` is `true`, `core::convert::From::from` otherwise. #[cfg(feature = "rustc")] fn translate_maybe_resolve_impl<'tcx, S: UnderOwnerState<'tcx>>( s: &S, resolve_impl: bool, mut def_id: RDefId, mut generics: ty::GenericArgsRef<'tcx>, ) -> ItemRef { use rustc_infer::infer::canonical::ir::TypeVisitableExt; let key = (def_id, generics); if let Some(item) = s.with_cache(|cache| cache.item_refs.get(&key).cloned()) { return item; } let tcx = s.base().tcx; // If this is an associated item, resolve the trait reference. let mut trait_info = self_clause_for_item(s, def_id, generics); // If the reference is a known trait impl and the impl implements the target item, we can // point directly to the implemented item. if resolve_impl && let Some(tinfo) = &trait_info && let ImplExprAtom::Concrete(impl_ref) = &tinfo.r#impl && let impl_def_id = impl_ref.def_id.as_rust_def_id().unwrap() && let Some(implemented_item) = tcx .associated_items(impl_def_id) .in_definition_order() .find(|item| item.trait_item_def_id() == Some(def_id)) { let trait_def_id = tcx.parent(def_id); def_id = implemented_item.def_id; generics = generics.rebase_onto(tcx, trait_def_id, impl_ref.rustc_args(s)); trait_info = None; } let hax_def_id = def_id.sinto(s); let mut hax_generics = generics.sinto(s); let mut impl_exprs = solve_item_required_traits(s, def_id, generics); // Fixup the generics. if let Some(tinfo) = &trait_info { // The generics are split in two: the arguments of the trait and the arguments of the // method/associated item. // // For instance, if we have: // ``` // trait Foo { // fn baz(...) { ... } // } // // fn test(x: T) { // x.baz(...); // ... // } // ``` // The generics for the call to `baz` will be the concatenation: ``, which we // split into `` and ``. let trait_ref = tinfo.r#trait.hax_skip_binder_ref(); let num_trait_generics = trait_ref.generic_args.len(); hax_generics.drain(0..num_trait_generics); let mut num_trait_trait_clauses = trait_ref.impl_exprs.len(); // Items other than associated types get an extra `Self: Trait` clause as the first // clause, we skip that one too. Note: that clause is the same as `tinfo`. if !matches!(hax_def_id.kind, DefKind::AssocTy) { num_trait_trait_clauses += 1; }; impl_exprs.drain(0..num_trait_trait_clauses); } let content = ItemRefContents { def_id: hax_def_id, generic_args: hax_generics, impl_exprs, in_trait: trait_info, has_param: generics.has_param() || generics.has_escaping_bound_vars() || generics.has_free_regions(), has_non_lt_param: generics.has_param(), }; let item = content.intern(s); s.with_cache(|cache| { cache.item_refs.insert(key, item.clone()); }); s.with_global_cache(|cache| { cache.reverse_item_refs_map.insert(item.id(), generics); }); item } /// Construct an `ItemRef` for items that can't have generics (e.g. modules). #[cfg(feature = "rustc")] pub fn dummy_without_generics<'tcx, S: BaseState<'tcx>>(s: &S, def_id: DefId) -> ItemRef { let content = ItemRefContents { def_id, generic_args: Default::default(), impl_exprs: Default::default(), in_trait: Default::default(), has_param: false, has_non_lt_param: false, }; let item = content.intern(s); s.with_global_cache(|cache| { cache .reverse_item_refs_map .insert(item.id(), ty::GenericArgsRef::default()); }); item } /// For an `ItemRef` that refers to a trait, this returns values for each of the non-gat /// associated types of this trait and its parents, in a fixed order. #[cfg(feature = "rustc")] pub fn trait_associated_types<'tcx, S: UnderOwnerState<'tcx>>(&self, s: &S) -> Vec { if !matches!(self.def_id.kind, DefKind::Trait | DefKind::TraitAlias) { panic!("`ItemRef::trait_associated_types` expected a trait") } let tcx = s.base().tcx; let typing_env = s.typing_env(); let def_id = self.def_id.as_rust_def_id().unwrap(); let generics = self.rustc_args(s); let tref = ty::TraitRef::new(tcx, def_id, generics); rustc_utils::assoc_tys_for_trait(tcx, typing_env, tref) .into_iter() .map(|alias_ty| ty::Ty::new_alias(tcx, ty::Projection, alias_ty)) .map(|ty| normalize(tcx, typing_env, ty)) .map(|ty| ty.sinto(s)) .collect() } /// Erase lifetimes from the generic arguments of this item. #[cfg(feature = "rustc")] pub fn erase<'tcx, S: UnderOwnerState<'tcx>>(&self, s: &S) -> Self { let def_id = self.def_id.underlying_rust_def_id(); let args = self.rustc_args(s); let args = erase_and_norm(s.base().tcx, s.typing_env(), args); Self::translate(s, def_id, args).with_def_id(s, &self.def_id) } pub fn contents(&self) -> &ItemRefContents { &self.contents } /// Get a unique id identitying this `ItemRef`. pub fn id(&self) -> id_table::Id { self.contents.id() } /// Recover the original rustc args that generated this `ItemRef`. Will panic if the `ItemRef` /// was built by hand instead of using `translate_item_ref`. #[cfg(feature = "rustc")] pub fn rustc_args<'tcx, S: BaseState<'tcx>>(&self, s: &S) -> ty::GenericArgsRef<'tcx> { s.with_global_cache(|cache| *cache.reverse_item_refs_map.get(&self.id()).unwrap()) } /// Mutate the `DefId`, keeping the same generic args. #[cfg(feature = "rustc")] pub fn mutate_def_id<'tcx, S: BaseState<'tcx>>( &self, s: &S, f: impl FnOnce(&mut DefId), ) -> Self { let args = self.rustc_args(s); let mut contents = self.contents().clone(); f(&mut contents.def_id); let new = contents.intern(s); s.with_global_cache(|cache| { cache.reverse_item_refs_map.insert(new.id(), args); }); new } /// Set the `DefId`, keeping the same generic args. #[cfg(feature = "rustc")] pub fn with_def_id<'tcx, S: BaseState<'tcx>>(&self, s: &S, def_id: &DefId) -> Self { self.mutate_def_id(s, |d| *d = def_id.clone()) } } impl std::ops::Deref for ItemRef { type Target = ItemRefContents; fn deref(&self) -> &Self::Target { self.contents() } } #[cfg(feature = "rustc")] impl<'tcx, S: UnderOwnerState<'tcx>> SInto for ty::GenericArg<'tcx> { fn sinto(&self, s: &S) -> GenericArg { self.kind().sinto(s) } } #[cfg(feature = "rustc")] impl<'tcx, S: UnderOwnerState<'tcx>> SInto> for ty::GenericArgsRef<'tcx> { fn sinto(&self, s: &S) -> Vec { self.iter().map(|v| v.kind().sinto(s)).collect() } } /// Reflects both [`ty::GenericArg`] and [`ty::GenericArgKind`] #[derive(AdtInto)] #[args(<'tcx, S: BaseState<'tcx>>, from: rustc_ast::ast::LitIntType, state: S as gstate)] #[derive_group(Serializers)] #[derive(Clone, Debug, JsonSchema, Hash, PartialEq, Eq, PartialOrd, Ord)] pub enum LitIntType { Signed(IntTy), Unsigned(UintTy), Unsuffixed, } /// Reflects partially [`ty::InferTy`] #[derive_group(Serializers)] #[derive(AdtInto, Clone, Debug, JsonSchema, Hash, PartialEq, Eq, PartialOrd, Ord)] #[args(<'tcx, S>, from: ty::InferTy, state: S as gstate)] pub enum InferTy { #[custom_arm(FROM_TYPE::TyVar(..) => TO_TYPE::TyVar,)] TyVar, /*TODO?*/ #[custom_arm(FROM_TYPE::IntVar(..) => TO_TYPE::IntVar,)] IntVar, /*TODO?*/ #[custom_arm(FROM_TYPE::FloatVar(..) => TO_TYPE::FloatVar,)] FloatVar, /*TODO?*/ FreshTy(u32), FreshIntTy(u32), FreshFloatTy(u32), } /// Reflects [`rustc_type_ir::IntTy`] #[derive(AdtInto)] #[args(, from: rustc_type_ir::IntTy, state: S as _s)] #[derive_group(Serializers)] #[derive(Copy, Clone, Debug, JsonSchema, Hash, PartialEq, Eq, PartialOrd, Ord)] pub enum IntTy { Isize, I8, I16, I32, I64, I128, } /// Reflects [`rustc_type_ir::FloatTy`] #[derive(AdtInto)] #[args(, from: rustc_type_ir::FloatTy, state: S as _s)] #[derive_group(Serializers)] #[derive(Copy, Clone, Debug, JsonSchema, Hash, PartialEq, Eq, PartialOrd, Ord)] pub enum FloatTy { F16, F32, F64, F128, } /// Reflects [`rustc_type_ir::UintTy`] #[derive(AdtInto)] #[args(, from: rustc_type_ir::UintTy, state: S as _s)] #[derive_group(Serializers)] #[derive(Copy, Clone, Debug, JsonSchema, Hash, PartialEq, Eq, PartialOrd, Ord)] pub enum UintTy { Usize, U8, U16, U32, U64, U128, } impl ToString for IntTy { fn to_string(&self) -> String { use IntTy::*; match self { Isize => "isize".to_string(), I8 => "i8".to_string(), I16 => "i16".to_string(), I32 => "i32".to_string(), I64 => "i64".to_string(), I128 => "i128".to_string(), } } } impl ToString for UintTy { fn to_string(&self) -> String { use UintTy::*; match self { Usize => "usize".to_string(), U8 => "u8".to_string(), U16 => "u16".to_string(), U32 => "u32".to_string(), U64 => "u64".to_string(), U128 => "u128".to_string(), } } } /// Reflects [`ty::TypeAndMut`] #[derive(AdtInto)] #[args(<'tcx, S: UnderOwnerState<'tcx>>, from: ty::TypeAndMut<'tcx>, state: S as gstate)] #[derive_group(Serializers)] #[derive(Clone, Debug, JsonSchema, Hash, PartialEq, Eq, PartialOrd, Ord)] pub struct TypeAndMut { pub ty: Box, pub mutbl: Mutability, } #[cfg(feature = "rustc")] impl> SInto> for ty::List { fn sinto(&self, s: &S) -> Vec { self.iter().map(|x| x.sinto(s)).collect() } } /// Reflects [`ty::Variance`] #[derive(AdtInto)] #[args(, from: ty::Variance, state: S as _s)] #[derive_group(Serializers)] #[derive(Clone, Debug, JsonSchema)] pub enum Variance { Covariant, Invariant, Contravariant, Bivariant, } /// Reflects [`ty::GenericParamDef`] #[derive(AdtInto)] #[args(<'tcx, S: UnderOwnerState<'tcx>>, from: ty::GenericParamDef, state: S as s)] #[derive_group(Serializers)] #[derive(Clone, Debug, JsonSchema)] pub struct GenericParamDef { pub name: Symbol, pub def_id: DefId, pub index: u32, pub pure_wrt_drop: bool, #[value( match self.kind { ty::GenericParamDefKind::Lifetime => GenericParamDefKind::Lifetime, ty::GenericParamDefKind::Type { has_default, synthetic } => GenericParamDefKind::Type { has_default, synthetic }, ty::GenericParamDefKind::Const { has_default, .. } => { let ty = s.base().tcx.type_of(self.def_id).instantiate_identity().sinto(s); GenericParamDefKind::Const { has_default, ty } }, } )] pub kind: GenericParamDefKind, /// Variance of this type parameter, if sensible. #[value({ use rustc_hir::def::DefKind::*; let tcx = s.base().tcx; let parent = tcx.parent(self.def_id); match tcx.def_kind(parent) { Fn | AssocFn | Enum | Struct | Union | Ctor(..) | OpaqueTy => { tcx.variances_of(parent).get(self.index as usize).sinto(s) } _ => None } })] pub variance: Option, #[value(s.base().tcx.def_span(self.def_id).sinto(s))] pub span: Span, #[value({ let tcx = s.base().tcx; get_def_attrs(tcx, self.def_id, get_def_kind(tcx, self.def_id)).sinto(s) })] pub attributes: Vec, } /// Reflects [`ty::GenericParamDefKind`] #[derive_group(Serializers)] #[derive(Clone, Debug, JsonSchema)] pub enum GenericParamDefKind { Lifetime, Type { has_default: bool, synthetic: bool }, Const { has_default: bool, ty: Ty }, } /// Reflects [`ty::Generics`] #[derive(AdtInto)] #[args(<'tcx, S: UnderOwnerState<'tcx>>, from: ty::Generics, state: S as state)] #[derive_group(Serializers)] #[derive(Clone, Debug, JsonSchema)] pub struct TyGenerics { pub parent: Option, pub parent_count: usize, #[from(own_params)] pub params: Vec, // pub param_def_id_to_index: FxHashMap, pub has_self: bool, pub has_late_bound_regions: Option, } #[cfg(feature = "rustc")] impl TyGenerics { pub(crate) fn count_total_params(&self) -> usize { self.parent_count + self.params.len() } } /// This type merges the information from /// `rustc_type_ir::AliasKind` and `ty::AliasTy` #[derive_group(Serializers)] #[derive(Clone, Debug, JsonSchema, Hash, PartialEq, Eq, PartialOrd, Ord)] pub struct Alias { pub kind: AliasKind, pub args: Vec, pub def_id: DefId, } /// Reflects [`ty::AliasKind`] #[derive_group(Serializers)] #[derive(Clone, Debug, JsonSchema, Hash, PartialEq, Eq, PartialOrd, Ord)] pub enum AliasKind { /// The projection of a trait type: `>::Type<...>` Projection { /// The `impl Trait for Ty` in `Ty: Trait<..., Type = U>`. impl_expr: ImplExpr, /// The `Type` in `Ty: Trait<..., Type = U>`. assoc_item: AssocItem, }, /// An associated type in an inherent impl. Inherent, /// An `impl Trait` opaque type. Opaque { /// The real type hidden inside this opaque type. hidden_ty: Ty, }, /// A type alias that references opaque types. Likely to always be normalized away. Free, } #[cfg(feature = "rustc")] impl Alias { #[tracing::instrument(level = "trace", skip(s))] fn from<'tcx, S: UnderOwnerState<'tcx>>( s: &S, alias_kind: &rustc_type_ir::AliasTyKind, alias_ty: &ty::AliasTy<'tcx>, ) -> TyKind { let tcx = s.base().tcx; let typing_env = s.typing_env(); use rustc_type_ir::AliasTyKind as RustAliasKind; // Try to normalize the alias first. let ty = ty::Ty::new_alias(tcx, *alias_kind, *alias_ty); let ty = crate::traits::normalize(tcx, typing_env, ty); let ty::Alias(alias_kind, alias_ty) = ty.kind() else { let ty: Ty = ty.sinto(s); return ty.kind().clone(); }; let kind = match alias_kind { RustAliasKind::Projection => { let trait_ref = alias_ty.trait_ref(tcx); // In a case like: // ``` // impl Trait for Result // where // for<'a> &'a Result: IntoIterator, // for<'a> <&'a Result as IntoIterator>::Item: Copy, // {} // ``` // the `&'a Result as IntoIterator` trait ref has escaping bound variables // yet we dont have a binder around (could even be several). Binding this correctly // is therefore difficult. Since our trait resolution ignores lifetimes anyway, we // just erase them. See also https://github.com/hacspec/hax/issues/747. let trait_ref = crate::traits::erase_free_regions(tcx, trait_ref); let item = tcx.associated_item(alias_ty.def_id); AliasKind::Projection { assoc_item: AssocItem::sfrom(s, &item), impl_expr: solve_trait(s, ty::Binder::dummy(trait_ref)), } } RustAliasKind::Inherent => AliasKind::Inherent, RustAliasKind::Opaque => { // Reveal the underlying `impl Trait` type. let ty = tcx.type_of(alias_ty.def_id).instantiate(tcx, alias_ty.args); AliasKind::Opaque { hidden_ty: ty.sinto(s), } } RustAliasKind::Free => AliasKind::Free, }; TyKind::Alias(Alias { kind, args: alias_ty.args.sinto(s), def_id: alias_ty.def_id.sinto(s), }) } } #[cfg(feature = "rustc")] impl<'tcx, S: UnderOwnerState<'tcx>> SInto> for ty::Ty<'tcx> { fn sinto(&self, s: &S) -> Box { Box::new(self.sinto(s)) } } /// Reflects [`rustc_middle::ty::Ty`] #[derive_group(Serializers)] #[derive(Clone, Debug, JsonSchema, Hash, PartialEq, Eq, PartialOrd, Ord)] #[serde(transparent)] pub struct Ty { pub(crate) kind: id_table::Node, } impl Ty { #[cfg(feature = "rustc")] pub fn new<'tcx, S: BaseState<'tcx>>(s: &S, kind: TyKind) -> Self { s.with_global_cache(|cache| { let table_session = &mut cache.id_table_session; let kind = id_table::Node::new(kind, table_session); Ty { kind } }) } pub fn inner(&self) -> &Arc { self.kind.inner() } pub fn kind(&self) -> &TyKind { self.inner().as_ref() } } #[cfg(feature = "rustc")] impl<'tcx, S: UnderOwnerState<'tcx>> SInto for rustc_middle::ty::Ty<'tcx> { fn sinto(&self, s: &S) -> Ty { if let Some(ty) = s.with_cache(|cache| cache.tys.get(self).cloned()) { return ty; } let kind: TyKind = self.kind().sinto(s); let ty = Ty::new(s, kind); s.with_cache(|cache| { cache.tys.insert(*self, ty.clone()); }); ty } } /// Reflects [`ty::TyKind`] #[derive(AdtInto)] #[args(<'tcx, S: UnderOwnerState<'tcx>>, from: ty::TyKind<'tcx>, state: S as s)] #[derive_group(Serializers)] #[derive(Clone, Debug, JsonSchema, Hash, PartialEq, Eq, PartialOrd, Ord)] pub enum TyKind { Bool, Char, Int(IntTy), Uint(UintTy), Float(FloatTy), #[custom_arm( ty::TyKind::FnDef(fun_id, generics) => { let item = translate_item_ref(s, *fun_id, generics); let tcx = s.base().tcx; let fn_sig = tcx.fn_sig(*fun_id).instantiate(tcx, generics); let fn_sig = Box::new(fn_sig.sinto(s)); TyKind::FnDef { item, fn_sig } }, )] /// Reflects [`ty::TyKind::FnDef`] FnDef { item: ItemRef, fn_sig: Box, }, #[custom_arm( ty::TyKind::FnPtr(tys, header) => { let sig = tys.map_bound(|tys| ty::FnSig { inputs_and_output: tys.inputs_and_output, c_variadic: header.c_variadic, safety: header.safety, abi: header.abi, }); TyKind::Arrow(Box::new(sig.sinto(s))) }, )] /// Reflects [`ty::TyKind::FnPtr`] Arrow(Box), #[custom_arm( ty::TyKind::Closure (def_id, generics) => { let closure = generics.as_closure(); TyKind::Closure(ClosureArgs::sfrom(s, *def_id, closure)) }, )] Closure(ClosureArgs), #[custom_arm(FROM_TYPE::Adt(adt_def, generics) => TO_TYPE::Adt(translate_item_ref(s, adt_def.did(), generics)),)] Adt(ItemRef), #[custom_arm(FROM_TYPE::Foreign(def_id) => TO_TYPE::Foreign(translate_item_ref(s, *def_id, Default::default())),)] Foreign(ItemRef), /// The `ItemRef` uses the fake `Array` def_id. #[custom_arm(FROM_TYPE::Array(ty, len) => TO_TYPE::Array({ let def_id = s.with_global_cache(|c| c.get_synthetic_def_id(s, SyntheticItem::Array)); let args = s.base().tcx.mk_args(&[(*ty).into(), (*len).into()]); ItemRef::translate(s, def_id, args) }),)] Array(ItemRef), /// The `ItemRef` uses the fake `Slice` def_id. #[custom_arm(FROM_TYPE::Slice(ty) => TO_TYPE::Slice({ let def_id = s.with_global_cache(|c| c.get_synthetic_def_id(s, SyntheticItem::Slice)); let args = s.base().tcx.mk_args(&[(*ty).into()]); ItemRef::translate(s, def_id, args) }),)] Slice(ItemRef), /// The `ItemRef` uses the fake `Tuple` def_id. #[custom_arm(FROM_TYPE::Tuple(tys) => TO_TYPE::Tuple({ let def_id = s.with_global_cache(|c| c.get_synthetic_def_id(s, SyntheticItem::Tuple(tys.len()))); let args = s.base().tcx.mk_args_from_iter(tys.into_iter().map(ty::GenericArg::from)); ItemRef::translate(s, def_id, args) }),)] Tuple(ItemRef), Str, RawPtr(Box, Mutability), Ref(Region, Box, Mutability), #[custom_arm(FROM_TYPE::Dynamic(preds, region) => make_dyn(s, preds, region),)] Dynamic( /// Fresh type parameter that we use as the `Self` type in the prediates below. ParamTy, /// Clauses that define the trait object. These clauses use the fresh type parameter above /// as `Self` type. GenericPredicates, Region, ), #[custom_arm(FROM_TYPE::Coroutine(def_id, generics) => TO_TYPE::Coroutine(translate_item_ref(s, *def_id, generics)),)] Coroutine(ItemRef), Never, #[custom_arm(FROM_TYPE::Alias(alias_kind, alias_ty) => Alias::from(s, alias_kind, alias_ty),)] Alias(Alias), Param(ParamTy), Bound(BoundVarIndexKind, BoundTy), Placeholder(PlaceholderType), Infer(InferTy), #[custom_arm(FROM_TYPE::Error(..) => TO_TYPE::Error,)] Error, #[todo] Todo(String), } /// Transform existential predicates into properly resolved predicates. #[cfg(feature = "rustc")] fn make_dyn<'tcx, S: UnderOwnerState<'tcx>>( s: &S, epreds: &'tcx ty::List>>, region: &ty::Region<'tcx>, ) -> TyKind { let tcx = s.base().tcx; let def_id = s.owner_id(); let span = rustc_span::DUMMY_SP.sinto(s); // Pretend there's an extra type in the environment. let new_param_ty = { let generics = tcx.generics_of(def_id); let param_count = generics.parent_count + generics.own_params.len(); ty::ParamTy::new(param_count as u32 + 1, rustc_span::Symbol::intern("_dyn")) }; let new_ty = new_param_ty.to_ty(tcx); // Set the new type as the `Self` parameter of our predicates. let clauses: Vec> = epreds .iter() .map(|epred| epred.with_self_ty(tcx, new_ty)) .collect(); // Populate a predicate searcher that knows about the `dyn` clauses. let mut predicate_searcher = s.with_predicate_searcher(|ps| ps.clone()); predicate_searcher .insert_bound_predicates(clauses.iter().filter_map(|clause| clause.as_trait_clause())); predicate_searcher.set_param_env(rustc_trait_selection::traits::normalize_param_env_or_error( tcx, ty::ParamEnv::new( tcx.mk_clauses_from_iter( s.param_env() .caller_bounds() .iter() .chain(clauses.iter().copied()), ), ), rustc_trait_selection::traits::ObligationCause::dummy(), )); // Using the predicate searcher, translate the predicates. Only the projection predicates need // to be handled specially. let predicates = clauses .into_iter() .map(|clause| { let clause = match clause.as_projection_clause() { // Translate normally None => clause.sinto(s), // Translate by hand using our predicate searcher. This does the same as // `clause.sinto(s)` except that it uses our predicate searcher to resolve the // projection `ImplExpr`. Some(proj) => { let bound_vars = proj.bound_vars().sinto(s); let proj = { let alias_ty = &proj.skip_binder().projection_term.expect_ty(tcx); let impl_expr = { let poly_trait_ref = proj.rebind(alias_ty.trait_ref(tcx)); predicate_searcher .resolve(&poly_trait_ref, &|_| {}) .s_unwrap(s) .sinto(s) }; let Term::Ty(ty) = proj.skip_binder().term.sinto(s) else { unreachable!() }; let item = tcx.associated_item(alias_ty.def_id); ProjectionPredicate { impl_expr, assoc_item: AssocItem::sfrom(s, &item), ty, } }; let kind = Binder { value: ClauseKind::Projection(proj), bound_vars, }; let id = kind.clone().map(PredicateKind::Clause).predicate_id(); Clause { kind, id } } }; (clause, span.clone()) }) .collect(); let predicates = GenericPredicates { predicates }; let param_ty = new_param_ty.sinto(s); let region = region.sinto(s); TyKind::Dynamic(param_ty, predicates, region) } /// Reflects [`ty::CanonicalUserTypeAnnotation`] #[derive(AdtInto)] #[args(<'tcx, S: UnderOwnerState<'tcx>>, from: ty::CanonicalUserTypeAnnotation<'tcx>, state: S as gstate)] #[derive_group(Serializers)] #[derive(Clone, Debug, JsonSchema)] pub struct CanonicalUserTypeAnnotation { pub user_ty: CanonicalUserType, pub span: Span, pub inferred_ty: Ty, } /// Reflects [`ty::AdtKind`] #[derive_group(Serializers)] #[derive(Copy, Clone, Debug, JsonSchema)] pub enum AdtKind { Struct, Union, Enum, /// We sometimes pretend arrays are an ADT and generate a `FullDef` for them. Array, /// We sometimes pretend slices are an ADT and generate a `FullDef` for them. Slice, /// We sometimes pretend tuples are an ADT and generate a `FullDef` for them. Tuple, } #[cfg(feature = "rustc")] impl<'tcx, S: UnderOwnerState<'tcx>> SInto for ty::AdtKind { fn sinto(&self, _s: &S) -> AdtKind { match self { ty::AdtKind::Struct => AdtKind::Struct, ty::AdtKind::Union => AdtKind::Union, ty::AdtKind::Enum => AdtKind::Enum, } } } sinto_todo!(rustc_middle::ty, AdtFlags); /// Reflects [`ty::ReprOptions`] #[derive_group(Serializers)] #[derive(AdtInto, Clone, Debug, JsonSchema)] #[args(<'tcx, S: UnderOwnerState<'tcx>>, from: rustc_abi::ReprOptions, state: S as s)] pub struct ReprOptions { /// Whether an explicit integer representation was specified. #[value(self.int.is_some())] pub int_specified: bool, /// The actual discriminant type resulting from the representation options. #[value({ use crate::rustc_middle::ty::util::IntTypeExt; self.discr_type().to_ty(s.base().tcx).sinto(s) })] pub typ: Ty, pub align: Option, pub pack: Option, #[value(ReprFlags { is_c: self.c(), is_transparent: self.transparent(), is_simd: self.simd() })] pub flags: ReprFlags, } /// The representation flags without the ones irrelevant outside of rustc. #[derive_group(Serializers)] #[derive(Default, Clone, Debug, JsonSchema)] pub struct ReprFlags { pub is_c: bool, pub is_transparent: bool, pub is_simd: bool, } /// Reflects [`ty::Align`], but directly stores the number of bytes as a u64. #[derive_group(Serializers)] #[derive(AdtInto, Clone, Debug, JsonSchema, Hash, PartialEq, Eq, PartialOrd, Ord)] #[args(<'tcx, S: BaseState<'tcx>>, from: rustc_abi::Align, state: S as _s)] pub struct Align { #[value({ self.bytes() })] pub bytes: u64, } /// Reflects [`ty::adjustment::PointerCoercion`] #[derive_group(Serializers)] #[derive(Clone, Debug, JsonSchema)] pub enum PointerCoercion { ReifyFnPointer, UnsafeFnPointer, ClosureFnPointer(Safety), MutToConstPointer, ArrayToPointer, Unsize(UnsizingMetadata), } /// The metadata to attach to the newly-unsized ptr. #[derive_group(Serializers)] #[derive(Clone, Debug, JsonSchema)] pub enum UnsizingMetadata { Length(ConstantExpr), VTablePtr(ImplExpr), Unknown, } #[cfg(feature = "rustc")] impl PointerCoercion { pub fn sfrom<'tcx, S: UnderOwnerState<'tcx>>( s: &S, coercion: ty::adjustment::PointerCoercion, src_ty: ty::Ty<'tcx>, tgt_ty: ty::Ty<'tcx>, ) -> PointerCoercion { match coercion { ty::adjustment::PointerCoercion::ReifyFnPointer => PointerCoercion::ReifyFnPointer, ty::adjustment::PointerCoercion::UnsafeFnPointer => PointerCoercion::UnsafeFnPointer, ty::adjustment::PointerCoercion::ClosureFnPointer(x) => { PointerCoercion::ClosureFnPointer(x.sinto(s)) } ty::adjustment::PointerCoercion::MutToConstPointer => { PointerCoercion::MutToConstPointer } ty::adjustment::PointerCoercion::ArrayToPointer => PointerCoercion::ArrayToPointer, ty::adjustment::PointerCoercion::Unsize => { // We only support unsizing behind references, pointers and boxes for now. let meta = match (src_ty.builtin_deref(true), tgt_ty.builtin_deref(true)) { (Some(src_ty), Some(tgt_ty)) => { let tcx = s.base().tcx; let typing_env = s.typing_env(); let (src_ty, tgt_ty) = tcx.struct_lockstep_tails_raw(src_ty, tgt_ty, |ty| { normalize(tcx, typing_env, ty) }); match tgt_ty.kind() { ty::Slice(_) | ty::Str => match src_ty.kind() { ty::Array(_, len) => { let len = len.sinto(s); UnsizingMetadata::Length(len) } _ => UnsizingMetadata::Unknown, }, ty::Dynamic(preds, ..) => { let pred = preds[0].with_self_ty(tcx, src_ty); let clause = pred.as_trait_clause().expect( "the first `ExistentialPredicate` of `TyKind::Dynamic` \ should be a trait clause", ); let tref = clause.rebind(clause.skip_binder().trait_ref); let impl_expr = solve_trait(s, tref); UnsizingMetadata::VTablePtr(impl_expr) } _ => UnsizingMetadata::Unknown, } } _ => UnsizingMetadata::Unknown, }; PointerCoercion::Unsize(meta) } } } } /// Reflects [`ty::FnSig`] #[derive_group(Serializers)] #[derive(AdtInto, Clone, Debug, JsonSchema, Hash, PartialEq, Eq, PartialOrd, Ord)] #[args(<'tcx, S: UnderOwnerState<'tcx>>, from: ty::FnSig<'tcx>, state: S as s)] pub struct TyFnSig { #[value(self.inputs().sinto(s))] pub inputs: Vec, #[value(self.output().sinto(s))] pub output: Ty, pub c_variadic: bool, pub safety: Safety, pub abi: ExternAbi, } /// Reflects [`ty::PolyFnSig`] pub type PolyFnSig = Binder; /// Reflects [`ty::TraitRef`] /// Contains the def_id and arguments passed to the trait. The first type argument is the `Self` /// type. The `ImplExprs` are the _required_ predicate for this trait; currently they are always /// empty because we consider all trait predicates as implied. /// `self.in_trait` is always `None` because a trait can't be associated to another one. pub type TraitRef = ItemRef; #[cfg(feature = "rustc")] impl<'tcx, S: UnderOwnerState<'tcx>> SInto for ty::TraitRef<'tcx> { fn sinto(&self, s: &S) -> TraitRef { translate_item_ref(s, self.def_id, self.args) } } /// Reflects [`ty::TraitPredicate`] #[derive(AdtInto)] #[args(<'tcx, S: UnderOwnerState<'tcx>>, from: ty::TraitPredicate<'tcx>, state: S as tcx)] #[derive_group(Serializers)] #[derive(Clone, Debug, JsonSchema, Hash, PartialEq, Eq, PartialOrd, Ord)] pub struct TraitPredicate { pub trait_ref: TraitRef, #[map(*x == ty::PredicatePolarity::Positive)] #[from(polarity)] pub is_positive: bool, } /// Reflects [`ty::OutlivesPredicate`] as a named struct /// instead of a tuple struct. This is because the script converting /// JSONSchema types to OCaml doesn't support tuple structs, and this /// is the only tuple struct in the whole AST. #[derive_group(Serializers)] #[derive(Clone, Debug, JsonSchema, Hash, PartialEq, Eq, PartialOrd, Ord)] pub struct OutlivesPredicate { pub lhs: T, pub rhs: Region, } #[cfg(feature = "rustc")] impl<'tcx, S: UnderOwnerState<'tcx>, T, U> SInto> for ty::OutlivesPredicate<'tcx, T> where T: SInto, { fn sinto(&self, s: &S) -> OutlivesPredicate where { OutlivesPredicate { lhs: self.0.sinto(s), rhs: self.1.sinto(s), } } } /// Reflects [`ty::RegionOutlivesPredicate`] pub type RegionOutlivesPredicate = OutlivesPredicate; /// Reflects [`ty::TypeOutlivesPredicate`] pub type TypeOutlivesPredicate = OutlivesPredicate; /// Reflects [`ty::Term`] #[derive_group(Serializers)] #[derive(Clone, Debug, JsonSchema, Hash, PartialEq, Eq, PartialOrd, Ord)] pub enum Term { Ty(Ty), Const(ConstantExpr), } #[cfg(feature = "rustc")] impl<'tcx, S: UnderOwnerState<'tcx>> SInto for ty::Term<'tcx> { fn sinto(&self, s: &S) -> Term { use ty::TermKind; match self.kind() { TermKind::Ty(ty) => Term::Ty(ty.sinto(s)), TermKind::Const(c) => Term::Const(c.sinto(s)), } } } /// Expresses a constraints over an associated type. /// /// For instance: /// ```text /// fn f>(...) /// ^^^^^^^^^^ /// ``` /// (provided the trait `Foo` has an associated type `S`). #[derive_group(Serializers)] #[derive(Clone, Debug, JsonSchema, Hash, PartialEq, Eq, PartialOrd, Ord)] pub struct ProjectionPredicate { /// The `impl Trait for Ty` in `Ty: Trait<..., Type = U>`. pub impl_expr: ImplExpr, /// The `Type` in `Ty: Trait<..., Type = U>`. pub assoc_item: AssocItem, /// The type `U` in `Ty: Trait<..., Type = U>`. pub ty: Ty, } #[cfg(feature = "rustc")] impl<'tcx, S: UnderBinderState<'tcx>> SInto for ty::ProjectionPredicate<'tcx> { fn sinto(&self, s: &S) -> ProjectionPredicate { let tcx = s.base().tcx; let alias_ty = &self.projection_term.expect_ty(tcx); let poly_trait_ref = s.binder().rebind(alias_ty.trait_ref(tcx)); let Term::Ty(ty) = self.term.sinto(s) else { unreachable!() }; let item = tcx.associated_item(alias_ty.def_id); ProjectionPredicate { impl_expr: solve_trait(s, poly_trait_ref), assoc_item: AssocItem::sfrom(s, &item), ty, } } } /// Reflects [`ty::ClauseKind`] #[derive(AdtInto)] #[args(<'tcx, S: UnderBinderState<'tcx>>, from: ty::ClauseKind<'tcx>, state: S as tcx)] #[derive_group(Serializers)] #[derive(Clone, Debug, JsonSchema, Hash, PartialEq, Eq, PartialOrd, Ord)] pub enum ClauseKind { Trait(TraitPredicate), RegionOutlives(RegionOutlivesPredicate), TypeOutlives(TypeOutlivesPredicate), Projection(ProjectionPredicate), ConstArgHasType(ConstantExpr, Ty), WellFormed(Term), ConstEvaluatable(ConstantExpr), HostEffect(HostEffectPredicate), UnstableFeature(Symbol), } sinto_todo!(rustc_middle::ty, HostEffectPredicate<'tcx>); /// Reflects [`ty::Clause`] and adds a hash-consed predicate identifier. #[derive_group(Serializers)] #[derive(Clone, Debug, JsonSchema, Hash, PartialEq, Eq, PartialOrd, Ord)] pub struct Clause { pub kind: Binder, pub id: PredicateId, } #[cfg(feature = "rustc")] impl<'tcx, S: UnderOwnerState<'tcx>> SInto for ty::Clause<'tcx> { fn sinto(&self, s: &S) -> Clause { let kind = self.kind().sinto(s); let id = kind.clone().map(PredicateKind::Clause).predicate_id(); Clause { kind, id } } } #[cfg(feature = "rustc")] impl<'tcx, S: UnderOwnerState<'tcx>> SInto for ty::PolyTraitPredicate<'tcx> { fn sinto(&self, s: &S) -> Clause { let kind: Binder<_> = self.sinto(s); let kind: Binder = kind.map(ClauseKind::Trait); let id = kind.clone().map(PredicateKind::Clause).predicate_id(); Clause { kind, id } } } /// Reflects [`ty::Predicate`] and adds a hash-consed predicate identifier. #[derive_group(Serializers)] #[derive(Clone, Debug, JsonSchema, Hash, PartialEq, Eq, PartialOrd, Ord)] pub struct Predicate { pub kind: Binder, pub id: PredicateId, } #[cfg(feature = "rustc")] impl<'tcx, S: UnderOwnerState<'tcx>> SInto for ty::Predicate<'tcx> { fn sinto(&self, s: &S) -> Predicate { let kind = self.kind().sinto(s); let id = kind.predicate_id(); Predicate { kind, id } } } /// Reflects [`ty::BoundVariableKind`] #[derive(AdtInto)] #[args(<'tcx, S: UnderOwnerState<'tcx>>, from: ty::BoundVariableKind, state: S as tcx)] #[derive_group(Serializers)] #[derive(Clone, Debug, JsonSchema, Hash, PartialEq, Eq, PartialOrd, Ord)] pub enum BoundVariableKind { Ty(BoundTyKind), Region(BoundRegionKind), Const, } /// Reflects [`ty::Binder`] #[derive_group(Serializers)] #[derive(Clone, Debug, JsonSchema, Hash, PartialEq, Eq, PartialOrd, Ord)] pub struct Binder { pub value: T, pub bound_vars: Vec, } impl Binder { pub fn as_ref(&self) -> Binder<&T> { Binder { value: &self.value, bound_vars: self.bound_vars.clone(), } } pub fn hax_skip_binder(self) -> T { self.value } pub fn hax_skip_binder_ref(&self) -> &T { &self.value } pub fn map(self, f: impl FnOnce(T) -> U) -> Binder { Binder { value: f(self.value), bound_vars: self.bound_vars, } } pub fn inner_mut(&mut self) -> &mut T { &mut self.value } pub fn rebind(&self, value: U) -> Binder { self.as_ref().map(|_| value) } } /// Reflects [`ty::GenericPredicates`] #[derive(AdtInto)] #[args(<'tcx, S: UnderOwnerState<'tcx>>, from: ty::GenericPredicates<'tcx>, state: S as s)] #[derive_group(Serializers)] #[derive(Clone, Debug, Default, JsonSchema, Hash, PartialEq, Eq, PartialOrd, Ord)] pub struct GenericPredicates { #[value(self.predicates.iter().map(|x| x.sinto(s)).collect())] pub predicates: Vec<(Clause, Span)>, } #[cfg(feature = "rustc")] impl<'tcx, S: UnderOwnerState<'tcx>> SInto for crate::traits::Predicates<'tcx> { fn sinto(&self, s: &S) -> GenericPredicates { GenericPredicates { predicates: self.as_ref().sinto(s), } } } #[cfg(feature = "rustc")] impl<'tcx, S: UnderOwnerState<'tcx>, T1, T2> SInto> for ty::Binder<'tcx, T1> where T1: SInto, T2>, { fn sinto(&self, s: &S) -> Binder { let bound_vars = self.bound_vars().sinto(s); let value = { let under_binder_s = &s.with_binder(self.as_ref().map_bound(|_| ())); self.as_ref().skip_binder().sinto(under_binder_s) }; Binder { value, bound_vars } } } /// Reflects [`ty::SubtypePredicate`] #[derive(AdtInto)] #[args(<'tcx, S: UnderOwnerState<'tcx>>, from: ty::SubtypePredicate<'tcx>, state: S as tcx)] #[derive_group(Serializers)] #[derive(Clone, Debug, JsonSchema, Hash, PartialEq, Eq, PartialOrd, Ord)] pub struct SubtypePredicate { pub a_is_expected: bool, pub a: Ty, pub b: Ty, } /// Reflects [`ty::CoercePredicate`] #[derive(AdtInto)] #[args(<'tcx, S: UnderOwnerState<'tcx>>, from: ty::CoercePredicate<'tcx>, state: S as tcx)] #[derive_group(Serializers)] #[derive(Clone, Debug, JsonSchema, Hash, PartialEq, Eq, PartialOrd, Ord)] pub struct CoercePredicate { pub a: Ty, pub b: Ty, } /// Reflects [`ty::AliasRelationDirection`] #[derive(AdtInto)] #[args(<'tcx, S: UnderOwnerState<'tcx>>, from: ty::AliasRelationDirection, state: S as _tcx)] #[derive_group(Serializers)] #[derive(Clone, Debug, JsonSchema, Hash, PartialEq, Eq, PartialOrd, Ord)] pub enum AliasRelationDirection { Equate, Subtype, } /// Reflects [`ty::ClosureArgs`] #[derive(Clone, Debug, Hash, PartialEq, Eq, PartialOrd, Ord, JsonSchema)] #[derive_group(Serializers)] pub struct ClosureArgs { pub item: ItemRef, /// The base kind of this closure. The kinds are ordered by inclusion: any `Fn` works as an /// `FnMut`, and any `FnMut` works as an `FnOnce`. pub kind: ClosureKind, /// The signature of the function that the closure implements, e.g. `fn(A, B, C) -> D`. pub fn_sig: PolyFnSig, /// The set of captured variables. Together they form the state of the closure. pub upvar_tys: Vec, } impl ClosureArgs { /// Iterate over the upvars that are borrows with erased regions. These may require allocating /// fresh regions. pub fn iter_upvar_borrows(&self) -> impl Iterator { self.upvar_tys.iter().filter(|ty| { matches!( ty.kind(), TyKind::Ref( Region { kind: RegionKind::ReErased }, .. ) ) }) } } #[cfg(feature = "rustc")] impl ClosureArgs { // Manual implementation because we need the `def_id` of the closure. pub(crate) fn sfrom<'tcx, S>( s: &S, def_id: RDefId, from: ty::ClosureArgs>, ) -> Self where S: UnderOwnerState<'tcx>, { let tcx = s.base().tcx; let sig = from.sig(); let item = { // The closure has no generics of its own: it inherits its parent generics and could // have late-bound args but these are part of the signature. let parent_args = tcx.mk_args(from.parent_args()); translate_item_ref(s, def_id, parent_args) }; ClosureArgs { item, kind: from.kind().sinto(s), fn_sig: tcx .signature_unclosure(sig, rustc_hir::Safety::Safe) .sinto(s), upvar_tys: from.upvar_tys().sinto(s), } } } /// Reflects [`ty::ClosureKind`] #[derive(AdtInto)] #[args(<'tcx, S: UnderOwnerState<'tcx>>, from: ty::ClosureKind, state: S as _tcx)] #[derive_group(Serializers)] #[derive(Clone, Debug, JsonSchema, Hash, PartialEq, Eq, PartialOrd, Ord)] pub enum ClosureKind { Fn, FnMut, FnOnce, } sinto_todo!(rustc_middle::ty, NormalizesTo<'tcx>); /// Reflects [`ty::PredicateKind`] #[derive(AdtInto)] #[args(<'tcx, S: UnderBinderState<'tcx>>, from: ty::PredicateKind<'tcx>, state: S as tcx)] #[derive_group(Serializers)] #[derive(Clone, Debug, JsonSchema, Hash, PartialEq, Eq, PartialOrd, Ord)] pub enum PredicateKind { Clause(ClauseKind), DynCompatible(DefId), Subtype(SubtypePredicate), Coerce(CoercePredicate), ConstEquate(ConstantExpr, ConstantExpr), Ambiguous, AliasRelate(Term, Term, AliasRelationDirection), NormalizesTo(NormalizesTo), } /// Reflects [`ty::AssocItem`] #[derive_group(Serializers)] #[derive(Clone, Debug, JsonSchema, Hash, PartialEq, Eq, PartialOrd, Ord)] pub struct AssocItem { pub def_id: DefId, /// This is `None` for RPTITs. pub name: Option, pub kind: AssocKind, pub container: AssocItemContainer, /// Whether this item has a value (e.g. this is `false` for trait methods without default /// implementations). pub has_value: bool, } #[cfg(feature = "rustc")] impl AssocItem { pub fn sfrom<'tcx, S: BaseState<'tcx>>(s: &S, item: &ty::AssocItem) -> AssocItem { Self::sfrom_instantiated(s, item, None) } /// Translate an `AssocItem` and optionally instantiate it with the provided arguments. pub fn sfrom_instantiated<'tcx, S: BaseState<'tcx>>( s: &S, item: &ty::AssocItem, item_args: Option>, ) -> AssocItem { let tcx = s.base().tcx; // We want to solve traits in the context of this item. let s = &s.with_owner_id(item.def_id); let item_args = item_args.unwrap_or_else(|| ty::GenericArgs::identity_for_item(tcx, item.def_id)); let container_id = item.container_id(tcx); let container_args = item_args.truncate_to(tcx, tcx.generics_of(container_id)); let container = match item.container { ty::AssocContainer::Trait => { let trait_ref = ty::TraitRef::new_from_args(tcx, container_id, container_args).sinto(s); AssocItemContainer::TraitContainer { trait_ref } } ty::AssocContainer::TraitImpl(implemented_item_id) => { let implemented_item_id = implemented_item_id.unwrap(); let item = translate_item_ref(s, container_id, container_args); let implemented_trait_ref = tcx .impl_trait_ref(container_id) .instantiate(tcx, container_args); let implemented_trait_item = translate_item_ref( s, implemented_item_id, item_args.rebase_onto(tcx, container_id, implemented_trait_ref.args), ); AssocItemContainer::TraitImplContainer { impl_: item, implemented_trait_ref: implemented_trait_ref.sinto(s), implemented_trait_item, overrides_default: tcx.defaultness(implemented_item_id).has_value(), } } ty::AssocContainer::InherentImpl => AssocItemContainer::InherentImplContainer { impl_id: container_id.sinto(s), }, }; AssocItem { def_id: item.def_id.sinto(s), name: item.opt_name().sinto(s), kind: item.kind.sinto(s), container, has_value: item.defaultness(tcx).has_value(), } } } /// Reflects [`ty::AssocKind`] #[derive(AdtInto)] #[args(<'tcx, S: BaseState<'tcx>>, from: ty::AssocKind, state: S as _tcx)] #[derive_group(Serializers)] #[derive(Clone, Debug, JsonSchema, Hash, PartialEq, Eq, PartialOrd, Ord)] pub enum AssocKind { Const { name: Symbol }, Fn { name: Symbol, has_self: bool }, Type { data: AssocTypeData }, } /// Reflects [`ty::AssocTypeData`] #[derive(AdtInto)] #[args(<'tcx, S: BaseState<'tcx>>, from: ty::AssocTypeData, state: S as _tcx)] #[derive_group(Serializers)] #[derive(Clone, Debug, JsonSchema, Hash, PartialEq, Eq, PartialOrd, Ord)] pub enum AssocTypeData { Normal(Symbol), Rpitit(ImplTraitInTraitData), } #[derive_group(Serializers)] #[derive(Clone, Debug, JsonSchema, Hash, PartialEq, Eq, PartialOrd, Ord)] pub enum AssocItemContainer { TraitContainer { trait_ref: TraitRef, }, TraitImplContainer { /// Reference to the def_id of the impl block. impl_: ItemRef, /// The trait ref implemented by the impl block. implemented_trait_ref: TraitRef, /// The the associated item (in the trait declaration) that is being implemented. implemented_trait_item: ItemRef, /// Whether the corresponding trait item had a default (and therefore this one overrides /// it). overrides_default: bool, }, InherentImplContainer { impl_id: DefId, }, } /// Reflects [`ty::ImplTraitInTraitData`] #[derive(AdtInto)] #[args(<'tcx, S: BaseState<'tcx>>, from: ty::ImplTraitInTraitData, state: S as _s)] #[derive_group(Serializers)] #[derive(Clone, Debug, JsonSchema, Hash, PartialEq, Eq, PartialOrd, Ord)] pub enum ImplTraitInTraitData { Trait { fn_def_id: DefId, opaque_def_id: DefId, }, Impl { fn_def_id: DefId, }, } ================================================ FILE: frontend/exporter/src/utils/error_macros.rs ================================================ macro_rules! format_with_context { ($format_str:expr $(,$arg:expr)* $(; {$($x:expr),*})?) => { format!( concat!( $format_str $(, "\n\nContext:\n", $(concat!(" - ", stringify!($x), ": "), "{:#?}", "\n",)*)? ), $($arg,)* $($($x,)*)? ) }; ($($tt:tt)*) => {format!($($tt)*)}; } mod internal_helpers { macro_rules! _verb { (fatal, $o:expr, $message:expr) => { $o.struct_fatal($message) }; (error, $o:expr, $message:expr) => { $o.struct_err($message) }; (warn, $o:expr, $message:expr) => { $o.struct_warn($message) }; } macro_rules! _span_verb_base { ($verb:ident, $s:ident, $span:expr, $message:expr) => {{ let backtrace = std::backtrace::Backtrace::capture(); eprintln!("{}", backtrace); let mut builder = $crate::utils::_verb!($verb, $s.base().tcx.dcx(), $message); if let Some(span) = $span { builder.span(span.clone()); } builder.code(rustc_errors::codes::ErrCode::MAX); builder.note( "⚠️ This is a bug in Hax's frontend. Please report this error to https://github.com/hacspec/hax/issues with some context (e.g. the current crate)!", ); builder.emit() }}; } pub(crate) use _span_verb_base; pub(crate) use _verb; } macro_rules! report { ($verb:ident, $s:ident [$span:expr], $($tt:tt)*) => { $crate::utils::_span_verb_base!($verb, $s, Some($span), $crate::utils::format_with_context!($($tt)*)) }; ($verb:ident, $s:ident, $($tt:tt)*) => { $crate::utils::_span_verb_base!( $verb, $s, $s.base().opt_def_id.map(|did| $s.base().tcx.def_span(did)), $crate::utils::format_with_context!($($tt)*) ) }; } macro_rules! error { ($($tt:tt)*) => {$crate::utils::report!(error, $($tt)*)} } #[allow(unused_macros)] macro_rules! warning { ($($tt:tt)*) => {$crate::utils::report!(warn, $($tt)*)} } macro_rules! fatal { ($($tt:tt)*) => {$crate::utils::report!(fatal, $($tt)*)} } pub(crate) use format_with_context; pub(crate) use internal_helpers::_span_verb_base; pub(crate) use internal_helpers::_verb; pub(crate) use report; macro_rules! supposely_unreachable_message { ($label:literal) => { concat!( "Supposely unreachable place in the Rust AST. The label is ", stringify!($label), ".\nThis error report happend because some assumption about the Rust AST was broken." ) }; } macro_rules! supposely_unreachable { ($s:ident $([$span:expr])?, $label:literal $($tt:tt)*) => { { $crate::utils::error!($s$([$span])?, $crate::utils::supposely_unreachable_message!($label) $($tt)+) } }; } macro_rules! supposely_unreachable_fatal { ($s:ident $([$span:expr])?, $label:literal $($tt:tt)*) => { $crate::utils::fatal!($s$([$span])?, $crate::utils::supposely_unreachable_message!($label) $($tt)+) }; } pub(crate) use error; pub(crate) use fatal; pub(crate) use supposely_unreachable; pub(crate) use supposely_unreachable_fatal; pub(crate) use supposely_unreachable_message; #[allow(unused_imports)] pub(crate) use warning; pub trait SExpect: Sized { type Output; fn s_expect<'tcx, S: crate::BaseState<'tcx>>(self, s: &S, message: &str) -> Self::Output; fn s_unwrap<'tcx, S: crate::BaseState<'tcx>>(self, s: &S) -> Self::Output { self.s_expect(s, "") } } mod s_expect_impls { use super::*; struct Dummy; impl std::fmt::Debug for Dummy { fn fmt(&self, f: &mut std::fmt::Formatter) -> std::fmt::Result { write!(f, "...") } } fn s_expect_error<'tcx>( s: &impl crate::BaseState<'tcx>, expected: impl std::fmt::Debug, got: impl std::fmt::Debug, message: &str, ) -> ! { fatal!( s, "s_expect: expected {:?}, got {:?}. {}", expected, got, message ) } impl SExpect for Option { type Output = T; fn s_expect<'tcx, S: crate::BaseState<'tcx>>(self, s: &S, message: &str) -> Self::Output { self.unwrap_or_else(|| s_expect_error(s, Some(Dummy), None::<()>, message)) } } impl SExpect for Result { type Output = T; fn s_expect<'tcx, S: crate::BaseState<'tcx>>(self, s: &S, message: &str) -> Self::Output { self.unwrap_or_else(|e| s_expect_error(s, Ok::<_, ()>(Dummy), Err::<(), _>(e), message)) } } } macro_rules! s_assert { ($s:ident, $assertion:expr) => {{ if !($assertion) { fatal!($s, "assertion failed: {}", stringify!($assertion)) } }}; } pub(crate) use s_assert; ================================================ FILE: frontend/exporter/src/utils/mod.rs ================================================ mod error_macros; mod type_map; pub use error_macros::*; pub use type_map::*; ================================================ FILE: frontend/exporter/src/utils/type_map.rs ================================================ use std::{ any::{Any, TypeId}, collections::HashMap, marker::PhantomData, }; pub trait TypeMappable = Any + Send + Sync; /// Defines a mapping from types to types. pub trait TypeMapper { type Value: TypeMappable; } /// A map that maps types to values in a generic manner: we store for each type `T` a value of /// type `M::Value`. pub struct TypeMap { data: HashMap>, phantom: PhantomData, } impl TypeMap { pub fn get(&self) -> Option<&M::Value> { self.data .get(&TypeId::of::()) // We must be careful to not accidentally cast the box itself as `dyn Any`. .map(|val: &Box| &**val) .and_then(|val: &dyn TypeMappable| (val as &dyn Any).downcast_ref()) } pub fn get_mut(&mut self) -> Option<&mut M::Value> { self.data .get_mut(&TypeId::of::()) // We must be careful to not accidentally cast the box itself as `dyn Any`. .map(|val: &mut Box| &mut **val) .and_then(|val: &mut dyn TypeMappable| (val as &mut dyn Any).downcast_mut()) } pub fn or_default(&mut self) -> &mut M::Value where M::Value: Default, { if self.get::().is_none() { self.insert::(Default::default()); } self.get_mut().unwrap() } pub fn insert(&mut self, val: M::Value) -> Option>> { self.data .insert(TypeId::of::(), Box::new(val)) .and_then(|val: Box| (val as Box).downcast().ok()) } } impl Default for TypeMap { fn default() -> Self { Self { data: Default::default(), phantom: Default::default(), } } } ================================================ FILE: hax-bounded-integers/Cargo.toml ================================================ [package] name = "hax-bounded-integers" version.workspace = true authors.workspace = true license.workspace = true homepage.workspace = true edition.workspace = true repository.workspace = true readme.workspace = true description = "Newtypes for working with bounded integers with hax" [dependencies] duplicate = "1.0.0" hax-lib.workspace = true paste = "1.0.15" ================================================ FILE: hax-bounded-integers/proofs/fstar/extraction/Hax_bounded_integers.Num_traits.fst ================================================ module Hax_bounded_integers.Num_traits #set-options "--fuel 0 --ifuel 1 --z3rlimit 15" open Core_models open FStar.Mul class t_BitOps (v_Self: Type0) = { f_Output:Type0; f_count_ones_pre:v_Self -> bool; f_count_ones_post:v_Self -> u32 -> bool; f_count_ones:x0: v_Self -> Prims.Pure u32 (f_count_ones_pre x0) (fun result -> f_count_ones_post x0 result); f_count_zeros_pre:v_Self -> bool; f_count_zeros_post:v_Self -> u32 -> bool; f_count_zeros:x0: v_Self -> Prims.Pure u32 (f_count_zeros_pre x0) (fun result -> f_count_zeros_post x0 result); f_leading_ones_pre:v_Self -> bool; f_leading_ones_post:v_Self -> u32 -> bool; f_leading_ones:x0: v_Self -> Prims.Pure u32 (f_leading_ones_pre x0) (fun result -> f_leading_ones_post x0 result); f_leading_zeros_pre:v_Self -> bool; f_leading_zeros_post:v_Self -> u32 -> bool; f_leading_zeros:x0: v_Self -> Prims.Pure u32 (f_leading_zeros_pre x0) (fun result -> f_leading_zeros_post x0 result); f_trailing_ones_pre:v_Self -> bool; f_trailing_ones_post:v_Self -> u32 -> bool; f_trailing_ones:x0: v_Self -> Prims.Pure u32 (f_trailing_ones_pre x0) (fun result -> f_trailing_ones_post x0 result); f_trailing_zeros_pre:v_Self -> bool; f_trailing_zeros_post:v_Self -> u32 -> bool; f_trailing_zeros:x0: v_Self -> Prims.Pure u32 (f_trailing_zeros_pre x0) (fun result -> f_trailing_zeros_post x0 result); f_rotate_left_pre:v_Self -> u32 -> bool; f_rotate_left_post:v_Self -> u32 -> f_Output -> bool; f_rotate_left:x0: v_Self -> x1: u32 -> Prims.Pure f_Output (f_rotate_left_pre x0 x1) (fun result -> f_rotate_left_post x0 x1 result); f_rotate_right_pre:v_Self -> u32 -> bool; f_rotate_right_post:v_Self -> u32 -> f_Output -> bool; f_rotate_right:x0: v_Self -> x1: u32 -> Prims.Pure f_Output (f_rotate_right_pre x0 x1) (fun result -> f_rotate_right_post x0 x1 result); f_from_be_pre:v_Self -> bool; f_from_be_post:v_Self -> f_Output -> bool; f_from_be:x0: v_Self -> Prims.Pure f_Output (f_from_be_pre x0) (fun result -> f_from_be_post x0 result); f_from_le_pre:v_Self -> bool; f_from_le_post:v_Self -> f_Output -> bool; f_from_le:x0: v_Self -> Prims.Pure f_Output (f_from_le_pre x0) (fun result -> f_from_le_post x0 result); f_to_be_pre:v_Self -> bool; f_to_be_post:v_Self -> f_Output -> bool; f_to_be:x0: v_Self -> Prims.Pure f_Output (f_to_be_pre x0) (fun result -> f_to_be_post x0 result); f_to_le_pre:v_Self -> bool; f_to_le_post:v_Self -> f_Output -> bool; f_to_le:x0: v_Self -> Prims.Pure f_Output (f_to_le_pre x0) (fun result -> f_to_le_post x0 result); f_pow_pre:v_Self -> u32 -> bool; f_pow_post:v_Self -> u32 -> f_Output -> bool; f_pow:x0: v_Self -> x1: u32 -> Prims.Pure f_Output (f_pow_pre x0 x1) (fun result -> f_pow_post x0 x1 result) } class t_CheckedAdd (v_Self: Type0) (v_Rhs: Type0) = { f_Output:Type0; f_checked_add_pre:v_Self -> v_Rhs -> bool; f_checked_add_post:v_Self -> v_Rhs -> Core_models.Option.t_Option f_Output -> bool; f_checked_add:x0: v_Self -> x1: v_Rhs -> Prims.Pure (Core_models.Option.t_Option f_Output) (f_checked_add_pre x0 x1) (fun result -> f_checked_add_post x0 x1 result) } class t_CheckedDiv (v_Self: Type0) (v_Rhs: Type0) = { f_Output:Type0; f_checked_div_pre:v_Self -> v_Rhs -> bool; f_checked_div_post:v_Self -> v_Rhs -> Core_models.Option.t_Option f_Output -> bool; f_checked_div:x0: v_Self -> x1: v_Rhs -> Prims.Pure (Core_models.Option.t_Option f_Output) (f_checked_div_pre x0 x1) (fun result -> f_checked_div_post x0 x1 result) } class t_CheckedMul (v_Self: Type0) (v_Rhs: Type0) = { f_Output:Type0; f_checked_mul_pre:v_Self -> v_Rhs -> bool; f_checked_mul_post:v_Self -> v_Rhs -> Core_models.Option.t_Option f_Output -> bool; f_checked_mul:x0: v_Self -> x1: v_Rhs -> Prims.Pure (Core_models.Option.t_Option f_Output) (f_checked_mul_pre x0 x1) (fun result -> f_checked_mul_post x0 x1 result) } class t_CheckedNeg (v_Self: Type0) = { f_Output:Type0; f_checked_neg_pre:v_Self -> bool; f_checked_neg_post:v_Self -> Core_models.Option.t_Option f_Output -> bool; f_checked_neg:x0: v_Self -> Prims.Pure (Core_models.Option.t_Option f_Output) (f_checked_neg_pre x0) (fun result -> f_checked_neg_post x0 result) } class t_CheckedSub (v_Self: Type0) (v_Rhs: Type0) = { f_Output:Type0; f_checked_sub_pre:v_Self -> v_Rhs -> bool; f_checked_sub_post:v_Self -> v_Rhs -> Core_models.Option.t_Option f_Output -> bool; f_checked_sub:x0: v_Self -> x1: v_Rhs -> Prims.Pure (Core_models.Option.t_Option f_Output) (f_checked_sub_pre x0 x1) (fun result -> f_checked_sub_post x0 x1 result) } class t_FromBytes (v_Self: Type0) = { f_BYTES:Type0; f_from_le_bytes_pre:f_BYTES -> bool; f_from_le_bytes_post:f_BYTES -> v_Self -> bool; f_from_le_bytes:x0: f_BYTES -> Prims.Pure v_Self (f_from_le_bytes_pre x0) (fun result -> f_from_le_bytes_post x0 result); f_from_be_bytes_pre:f_BYTES -> bool; f_from_be_bytes_post:f_BYTES -> v_Self -> bool; f_from_be_bytes:x0: f_BYTES -> Prims.Pure v_Self (f_from_be_bytes_pre x0) (fun result -> f_from_be_bytes_post x0 result) } class t_NumOps (v_Self: Type0) (v_Rhs: Type0) (v_Output: Type0) = { [@@@ FStar.Tactics.Typeclasses.no_method]_super_9126539072073536218:Core_models.Ops.Arith.t_Add v_Self v_Rhs; [@@@ FStar.Tactics.Typeclasses.no_method]_super_9784678892199232396:Core_models.Ops.Arith.t_Sub v_Self v_Rhs; [@@@ FStar.Tactics.Typeclasses.no_method]_super_7005199110250618039:Core_models.Ops.Arith.t_Mul v_Self v_Rhs; [@@@ FStar.Tactics.Typeclasses.no_method]_super_12366019628759357413:Core_models.Ops.Arith.t_Div v_Self v_Rhs; [@@@ FStar.Tactics.Typeclasses.no_method]_super_11859756759858186302:Core_models.Ops.Arith.t_Rem v_Self v_Rhs } class t_One (v_Self: Type0) = { f_one_pre:Prims.unit -> bool; f_one_post:Prims.unit -> v_Self -> bool; f_one:x0: Prims.unit -> Prims.Pure v_Self (f_one_pre x0) (fun result -> f_one_post x0 result) } class t_ToBytes (v_Self: Type0) = { [@@@ FStar.Tactics.Typeclasses.no_method]_super_3732703090464998751:t_FromBytes v_Self; f_to_le_bytes_pre:v_Self -> bool; f_to_le_bytes_post:v_Self -> v_3732703090464998751.f_BYTES -> bool; f_to_le_bytes:x0: v_Self -> Prims.Pure v_3732703090464998751.f_BYTES (f_to_le_bytes_pre x0) (fun result -> f_to_le_bytes_post x0 result); f_to_be_bytes_pre:v_Self -> bool; f_to_be_bytes_post:v_Self -> v_3732703090464998751.f_BYTES -> bool; f_to_be_bytes:x0: v_Self -> Prims.Pure v_3732703090464998751.f_BYTES (f_to_be_bytes_pre x0) (fun result -> f_to_be_bytes_post x0 result) } class t_WrappingAdd (v_Self: Type0) (v_Rhs: Type0) = { f_Output:Type0; f_wrapping_add_pre:v_Self -> v_Rhs -> bool; f_wrapping_add_post:v_Self -> v_Rhs -> f_Output -> bool; f_wrapping_add:x0: v_Self -> x1: v_Rhs -> Prims.Pure f_Output (f_wrapping_add_pre x0 x1) (fun result -> f_wrapping_add_post x0 x1 result) } class t_WrappingDiv (v_Self: Type0) (v_Rhs: Type0) = { f_Output:Type0; f_wrapping_div_pre:v_Self -> v_Rhs -> bool; f_wrapping_div_post:v_Self -> v_Rhs -> f_Output -> bool; f_wrapping_div:x0: v_Self -> x1: v_Rhs -> Prims.Pure f_Output (f_wrapping_div_pre x0 x1) (fun result -> f_wrapping_div_post x0 x1 result) } class t_WrappingMul (v_Self: Type0) (v_Rhs: Type0) = { f_Output:Type0; f_wrapping_mul_pre:v_Self -> v_Rhs -> bool; f_wrapping_mul_post:v_Self -> v_Rhs -> f_Output -> bool; f_wrapping_mul:x0: v_Self -> x1: v_Rhs -> Prims.Pure f_Output (f_wrapping_mul_pre x0 x1) (fun result -> f_wrapping_mul_post x0 x1 result) } class t_WrappingSub (v_Self: Type0) (v_Rhs: Type0) = { f_Output:Type0; f_wrapping_sub_pre:v_Self -> v_Rhs -> bool; f_wrapping_sub_post:v_Self -> v_Rhs -> f_Output -> bool; f_wrapping_sub:x0: v_Self -> x1: v_Rhs -> Prims.Pure f_Output (f_wrapping_sub_pre x0 x1) (fun result -> f_wrapping_sub_post x0 x1 result) } class t_Zero (v_Self: Type0) = { f_zero_pre:Prims.unit -> bool; f_zero_post:Prims.unit -> v_Self -> bool; f_zero:x0: Prims.unit -> Prims.Pure v_Self (f_zero_pre x0) (fun result -> f_zero_post x0 result) } class t_MachineInt (v_Self: Type0) (v_Output: Type0) = { [@@@ FStar.Tactics.Typeclasses.no_method]_super_11581440318597584651:Core_models.Marker.t_Copy v_Self; [@@@ FStar.Tactics.Typeclasses.no_method]_super_12866954522599331834:Core_models.Cmp.t_PartialOrd v_Self v_Self; [@@@ FStar.Tactics.Typeclasses.no_method]_super_13035911912416111195:Core_models.Cmp.t_Ord v_Self; [@@@ FStar.Tactics.Typeclasses.no_method]_super_12632649257025169145:Core_models.Cmp.t_PartialEq v_Self v_Self; [@@@ FStar.Tactics.Typeclasses.no_method]_super_8099741844003281729:Core_models.Cmp.t_Eq v_Self; [@@@ FStar.Tactics.Typeclasses.no_method]_super_9841570312332416173:t_Zero v_Self; [@@@ FStar.Tactics.Typeclasses.no_method]_super_12668241202577409386:t_One v_Self; [@@@ FStar.Tactics.Typeclasses.no_method]_super_9487321769118300762:Core_models.Ops.Bit.t_Not v_Self; [@@@ FStar.Tactics.Typeclasses.no_method]_super_1980884762883925305:t_NumOps v_Self v_Self v_Output; [@@@ FStar.Tactics.Typeclasses.no_method]_super_13929479875548649875:Core_models.Ops.Bit.t_BitAnd v_Self v_Self; [@@@ FStar.Tactics.Typeclasses.no_method]_super_1708325062211865233:Core_models.Ops.Bit.t_BitOr v_Self v_Self; [@@@ FStar.Tactics.Typeclasses.no_method]_super_1501688608269502122:Core_models.Ops.Bit.t_BitXor v_Self v_Self; [@@@ FStar.Tactics.Typeclasses.no_method]_super_15083490293093561556:Core_models.Ops.Bit.t_Shl v_Self v_Self; [@@@ FStar.Tactics.Typeclasses.no_method]_super_9065931548762825726:Core_models.Ops.Bit.t_Shr v_Self v_Self; [@@@ FStar.Tactics.Typeclasses.no_method]_super_5052970308637232515:t_CheckedAdd v_Self v_Self; [@@@ FStar.Tactics.Typeclasses.no_method]_super_739902999637339236:t_CheckedSub v_Self v_Self; [@@@ FStar.Tactics.Typeclasses.no_method]_super_15323401662629887609:t_CheckedMul v_Self v_Self; [@@@ FStar.Tactics.Typeclasses.no_method]_super_8119502507145032897:t_CheckedDiv v_Self v_Self; [@@@ FStar.Tactics.Typeclasses.no_method]_super_12846047806852469117:t_WrappingAdd v_Self v_Self; [@@@ FStar.Tactics.Typeclasses.no_method]_super_12408554086330550784:t_WrappingSub v_Self v_Self; [@@@ FStar.Tactics.Typeclasses.no_method]_super_8633193508996485932:t_WrappingMul v_Self v_Self; [@@@ FStar.Tactics.Typeclasses.no_method]_super_16339457892016115661:t_WrappingDiv v_Self v_Self; [@@@ FStar.Tactics.Typeclasses.no_method]_super_12348120774285878195:t_BitOps v_Self } ================================================ FILE: hax-bounded-integers/proofs/fstar/extraction/Hax_bounded_integers.fst ================================================ module Hax_bounded_integers #set-options "--fuel 0 --ifuel 1 --z3rlimit 15" open Core_models open FStar.Mul ///Bounded i128 integers. This struct enforces the invariant that values are greater or equal to `MIN` and less or equal to `MAX`. let t_BoundedI128 (v_MIN v_MAX: i128) = x: i128{x >=. v_MIN && x <=. v_MAX} ///Bounded i16 integers. This struct enforces the invariant that values are greater or equal to `MIN` and less or equal to `MAX`. let t_BoundedI16 (v_MIN v_MAX: i16) = x: i16{x >=. v_MIN && x <=. v_MAX} ///Bounded i32 integers. This struct enforces the invariant that values are greater or equal to `MIN` and less or equal to `MAX`. let t_BoundedI32 (v_MIN v_MAX: i32) = x: i32{x >=. v_MIN && x <=. v_MAX} ///Bounded i64 integers. This struct enforces the invariant that values are greater or equal to `MIN` and less or equal to `MAX`. let t_BoundedI64 (v_MIN v_MAX: i64) = x: i64{x >=. v_MIN && x <=. v_MAX} ///Bounded i8 integers. This struct enforces the invariant that values are greater or equal to `MIN` and less or equal to `MAX`. let t_BoundedI8 (v_MIN v_MAX: i8) = x: i8{x >=. v_MIN && x <=. v_MAX} ///Bounded isize integers. This struct enforces the invariant that values are greater or equal to `MIN` and less or equal to `MAX`. let t_BoundedIsize (v_MIN v_MAX: isize) = x: isize{x >=. v_MIN && x <=. v_MAX} ///Bounded u128 integers. This struct enforces the invariant that values are greater or equal to `MIN` and less or equal to `MAX`. let t_BoundedU128 (v_MIN v_MAX: u128) = x: u128{x >=. v_MIN && x <=. v_MAX} ///Bounded u16 integers. This struct enforces the invariant that values are greater or equal to `MIN` and less or equal to `MAX`. let t_BoundedU16 (v_MIN v_MAX: u16) = x: u16{x >=. v_MIN && x <=. v_MAX} ///Bounded u32 integers. This struct enforces the invariant that values are greater or equal to `MIN` and less or equal to `MAX`. let t_BoundedU32 (v_MIN v_MAX: u32) = x: u32{x >=. v_MIN && x <=. v_MAX} ///Bounded u64 integers. This struct enforces the invariant that values are greater or equal to `MIN` and less or equal to `MAX`. let t_BoundedU64 (v_MIN v_MAX: u64) = x: u64{x >=. v_MIN && x <=. v_MAX} ///Bounded u8 integers. This struct enforces the invariant that values are greater or equal to `MIN` and less or equal to `MAX`. let t_BoundedU8 (v_MIN v_MAX: u8) = x: u8{x >=. v_MIN && x <=. v_MAX} ///Bounded usize integers. This struct enforces the invariant that values are greater or equal to `MIN` and less or equal to `MAX`. let t_BoundedUsize (v_MIN v_MAX: usize) = x: usize{x >=. v_MIN && x <=. v_MAX} ================================================ FILE: hax-bounded-integers/src/lib.rs ================================================ use hax_lib::Refinement; pub mod num_traits; pub mod _macro_utils { pub use duplicate; pub use paste; } #[doc(hidden)] #[macro_export] macro_rules! derivate_binop_for_bounded { ($(<$(const $cst_name:ident : $cst_ty:ty),*>)?{$t:ident, $bounded_t:ident}; $($tt:tt)*) => { $crate::derivate_binop_for_bounded!($(<$(const $cst_name:$cst_ty),*>)?{$t, $bounded_t, get, Self::Output}; $($tt)*) ; }; ($(<$(const $cst_name:ident : $cst_ty:ty),*>)?{$t:ident, $bounded_t:ident, $get:ident, $out:ty};) => {}; ($(<$(const $cst_name:ident : $cst_ty:ty),*>)?{$t:ident, $bounded_t:ident, $get:ident, $out:ty}; ($trait:ident, $meth:ident), $($tt:tt)*) => { $crate::derivate_binop_for_bounded!(@$t, $bounded_t, $trait, $meth, $get, $out, $(<$(const $cst_name:$cst_ty),*>)?); $crate::derivate_binop_for_bounded!($(<$(const $cst_name:$cst_ty),*>)?{$t, $bounded_t, $get, $out}; $($tt)*); }; (@$t:ident, $bounded_t:ident, $trait:ident, $meth:ident, $get:ident, $out:ty$(,)?) => { $crate::derivate_binop_for_bounded!( @$t, $bounded_t, $trait, $meth, $get, $out, ); }; (@$t:ident, $bounded_t:ident, $trait:ident, $meth:ident, $get:ident, $out:ty, <$(const $cst_name:ident : $cst_ty:ty),*> ) => { $crate::_macro_utils::paste::paste!{ // BoundedT BoundedT impl<$(const [< $cst_name _LHS >]: $cst_ty,)* $(const [< $cst_name _RHS >]: $cst_ty,)*> $trait<$bounded_t<$([< $cst_name _RHS >],)*>> for $bounded_t<$([< $cst_name _LHS >],)*> { type Output = $t; #[inline(always)] fn $meth(self, other: $bounded_t<$([< $cst_name _RHS >],)*>) -> $out { (self.$get()).$meth(other.$get()) } } // BoundedT T impl<$(const $cst_name: $cst_ty,)*> $trait<$t> for $bounded_t<$($cst_name,)*> { type Output = $t; #[inline(always)] fn $meth(self, other: $t) -> $out { (self.$get()).$meth(other) } } // T BoundedT impl<$(const $cst_name: $cst_ty,)*> $trait<$bounded_t<$($cst_name,)*>> for $t { type Output = $t; #[inline(always)] fn $meth(self, other: $bounded_t<$($cst_name,)*>) -> $out { (self).$meth(other.$get()) } } } }; } #[doc(hidden)] #[macro_export] macro_rules! derivate_assign_binop_for_bounded { ($(<$(const $cst_name:ident : $cst_ty:ty),*>)?{$t:ident, $bounded_t:ident}; $($tt:tt)*) => { $crate::derivate_assign_binop_for_bounded!($(<$(const $cst_name:$cst_ty),*>)?{$t, $bounded_t, get, Self::Output}; $($tt)*) ; }; ($(<$(const $cst_name:ident : $cst_ty:ty),*>)?{$t:ident, $bounded_t:ident, $get:ident, $out:ty};) => {}; ($(<$(const $cst_name:ident : $cst_ty:ty),*>)?{$t:ident, $bounded_t:ident, $get:ident, $out:ty}; ($trait:ident, $meth:ident), $($tt:tt)*) => { $crate::derivate_assign_binop_for_bounded!(@$t, $bounded_t, $trait, $meth, $get, $out, $(<$(const $cst_name:$cst_ty),*>)?); $crate::derivate_assign_binop_for_bounded!($(<$(const $cst_name:$cst_ty),*>)?{$t, $bounded_t, $get, $out}; $($tt)*); }; (@$t:ident, $bounded_t:ident, $trait:ident, $meth:ident, $get:ident, $out:ty$(,)?) => { $crate::derivate_assign_binop_for_bounded!( @$t, $bounded_t, $trait, $meth, $get, $out, ); }; (@$t:ident, $bounded_t:ident, $trait:ident, $meth:ident, $get:ident, $out:ty, <$(const $cst_name:ident : $cst_ty:ty),*> ) => { $crate::_macro_utils::paste::paste!{ // BoundedT BoundedT impl<$(const [< $cst_name _LHS >]: $cst_ty,)* $(const [< $cst_name _RHS >]: $cst_ty,)*> $trait<$bounded_t<$([< $cst_name _RHS >],)*>> for $bounded_t<$([< $cst_name _LHS >],)*> { #[inline(always)] fn $meth(&mut self, other: $bounded_t<$([< $cst_name _RHS >],)*>) { self.get_mut().$meth(other.$get()) } } // BoundedT $t impl<$(const [< $cst_name _LHS >]: $cst_ty,)*> $trait<$t> for $bounded_t<$([< $cst_name _LHS >],)*> { #[inline(always)] fn $meth(&mut self, other: $t) { self.get_mut().$meth(other) } } // $t BoundedT impl<$(const [< $cst_name _RHS >]: $cst_ty,)*> $trait<$bounded_t<$([< $cst_name _RHS >],)*>> for $t { #[inline(always)] fn $meth(&mut self, other: $bounded_t<$([< $cst_name _RHS >],)*>) { self.$meth(other.get()) } } } }; } #[doc(hidden)] #[macro_export] macro_rules! derivate_operations_for_bounded { ($bounded_t:ident($t: ident $($bytes:expr)?)$(,)? <$(const $cst_name:ident : $cst_ty:ty),*> ) => { #[$crate::_macro_utils::duplicate::duplicate_item( INTRO_CONSTANTS USE_CONSTANTS; [ $(const $cst_name:$cst_ty),* ] [ $($cst_name),* ]; )] #[hax_lib::exclude] const _: () = { use ::core::ops::*; use $crate::num_traits::*; use ::hax_lib::Refinement; $crate::derivate_assign_binop_for_bounded!( {$t, $bounded_t}; (AddAssign, add_assign), (SubAssign, sub_assign), (MulAssign, mul_assign), (DivAssign, div_assign), (RemAssign, rem_assign), (ShlAssign, shl_assign), (ShrAssign, shr_assign), (BitAndAssign, bitand_assign), (BitOrAssign, bitor_assign), (BitXorAssign, bitxor_assign), ); $crate::derivate_binop_for_bounded!( {$t, $bounded_t}; (Add, add), (Sub, sub), (Mul, mul), (Div, div), (Rem, rem), (BitOr, bitor), (BitAnd, bitand), (BitXor, bitxor), (Shl, shl), (Shr, shr), (WrappingAdd, wrapping_add), (WrappingSub, wrapping_sub), (WrappingMul, wrapping_mul), (WrappingDiv, wrapping_div), ); $crate::derivate_binop_for_bounded!( {$t, $bounded_t, get, Option}; (CheckedAdd, checked_add), (CheckedSub, checked_sub), (CheckedMul, checked_mul), (CheckedDiv, checked_div), ); impl CheckedNeg for $bounded_t { type Output = $t; #[inline(always)] fn checked_neg(&self) -> Option<$t> { self.deref().checked_neg() } } impl Not for $bounded_t { type Output = $t; #[inline(always)] fn not(self) -> Self::Output { self.deref().not() } } impl NumOps for $bounded_t {} // impl Bounded for $bounded_t { // #[inline(always)] // fn min_value() -> Self { // Self::new(MIN) // } // #[inline(always)] // fn max_value() -> Self { // Self::new(MAX) // } // } $( impl FromBytes for $bounded_t { type BYTES = [u8; $bytes]; #[inline(always)] fn from_le_bytes(bytes: Self::BYTES) -> Self { Self::new($t::from_le_bytes(bytes)) } #[inline(always)] fn from_be_bytes(bytes: Self::BYTES) -> Self { Self::new($t::from_be_bytes(bytes)) } } impl ToBytes for $bounded_t { #[inline(always)] fn to_le_bytes(self) -> Self::BYTES { self.get().to_le_bytes() } #[inline(always)] fn to_be_bytes(self) -> Self::BYTES { self.get().to_be_bytes() } } )? impl Zero for $bounded_t { #[inline(always)] fn zero() -> Self { Self::new(0) } } impl One for $bounded_t { #[inline(always)] fn one() -> Self { Self::new(1) } } impl MachineInt<$t> for $bounded_t { } impl BitOps for $bounded_t { type Output = $t; #[inline(always)] fn count_ones(self) -> u32 { self.get().count_ones() } #[inline(always)] fn count_zeros(self) -> u32 { self.get().count_zeros() } #[inline(always)] fn leading_ones(self) -> u32 { self.get().leading_ones() } #[inline(always)] fn leading_zeros(self) -> u32 { self.get().leading_zeros() } #[inline(always)] fn trailing_ones(self) -> u32 { self.get().trailing_ones() } #[inline(always)] fn trailing_zeros(self) -> u32 { self.get().trailing_zeros() } #[inline(always)] fn rotate_left(self, n: u32) -> Self::Output { self.get().rotate_left(n) } #[inline(always)] fn rotate_right(self, n: u32) -> Self::Output { self.get().rotate_right(n) } #[inline(always)] fn from_be(x: Self) -> Self::Output { Self::Output::from_be(x.get()) } #[inline(always)] fn from_le(x: Self) -> Self::Output { Self::Output::from_le(x.get()) } #[inline(always)] fn to_be(self) -> Self::Output { Self::Output::to_be(self.get()) } #[inline(always)] fn to_le(self) -> Self::Output { Self::Output::to_le(self.get()) } #[inline(always)] fn pow(self, exp: u32) -> Self::Output { Self::Output::pow(self.get(), exp) } } }; } } #[doc(hidden)] #[macro_export] macro_rules! mk_bounded { ($(#$attr:tt)* $bounded_t:ident<$(const $cst_name:ident : $cst_ty:ty),*>($t: ident $($bytes:expr)?, |$x:ident| $body:expr)$(,)?) => { #[hax_lib::refinement_type(|$x| $body)] #[derive(Clone, Copy, Eq, PartialEq, Ord, PartialOrd, Hash)] $(#$attr)* pub struct $bounded_t<$(const $cst_name : $cst_ty),*>($t); $crate::derivate_operations_for_bounded!($bounded_t($t$($bytes)?)<$(const $cst_name : $cst_ty),*>); }; ($bounded_t:ident($t: ident $($bytes:expr)?)$(,)?) => { $crate::mk_bounded!( #[doc = concat!("Bounded ", stringify!($t)," integers. This struct enforces the invariant that values are greater or equal to `MIN` and less or equal to `MAX`.")] $bounded_t($t $($bytes)?, |x| x >= MIN && x <= MAX) ); }; ($bounded_t:ident($t: ident $($bytes:expr)?), $($tt:tt)+) => { $crate::mk_bounded!($bounded_t($t $($bytes)?)); $crate::mk_bounded!($($tt)+); }; } mk_bounded!( BoundedI8(i8 1), BoundedI16(i16 2), BoundedI32(i32 4), BoundedI64(i64 8), BoundedI128(i128 16), BoundedIsize(isize), BoundedU8(u8 1), BoundedU16(u16 2), BoundedU32(u32 4), BoundedU64(u64 8), BoundedU128(u128 16), BoundedUsize(usize), ); /// Makes a refined new type in a very similar way to /// `hax_lib::refinement_tyoe`, but derives the various traits an /// integer type is expected to implement. /// /// Examples: /// ```rust /// # use hax_bounded_integers::refinement_int; /// refinement_int!(BoundedAbsI16(i16, 2, |x| x >= -(B as i16) && x <= (B as i16))); /// refinement_int!(BoundedAbsIsize(isize, |x| x >= -(B as isize) && x <= (B as isize))); /// ``` #[macro_export] macro_rules! refinement_int { ($(#$attr:tt)* $bounded_t:ident$(<$(const $cst_name:ident : $cst_ty:ty),*$(,)?>)?($t: ident, $($bytes:literal,)? |$x:ident| $body:expr)$(,)?) => { $crate::mk_bounded!($(#$attr)* $bounded_t<$($(const $cst_name:$cst_ty),*)?>($t $($bytes)?, |$x| $body)); }; } #[hax_lib::exclude] const _: () = { impl core::ops::Index> for [T] { type Output = T; #[inline(always)] fn index(&self, index: BoundedUsize) -> &Self::Output { &self[index.get()] } } impl core::ops::IndexMut> for [T] { #[inline(always)] fn index_mut(&mut self, index: BoundedUsize) -> &mut Self::Output { &mut self[index.get()] } } }; #[test] fn tests() { refinement_int!( Test(i16, 2, |x| B < 32768 && x >= -(B as i16) && x <= (B as i16)) ); use hax_lib::*; let mut zzz: Test<123> = (-122).into_checked(); zzz += 32; let x: BoundedU8<0, 5> = 2.into_checked(); let y: BoundedU8<5, 10> = (x + x).into_checked(); let _ = x >> 3; let _ = x >> BoundedU8::<0, 5>::new(3); let _ = x / y; let _ = x * y; let _ = x + y; let _ = y - x; let _ = x / 1; let _ = x * 1; let _ = x + 1; let _ = x - 1; let _ = 4 / y; let _ = 4 * y; let _ = 4 + y; let _ = 4 - y; } ================================================ FILE: hax-bounded-integers/src/num_traits.rs ================================================ //! This module provides traits for generic mathematics. This is a //! smaller and more opinionated version of //! [num_traits](https://docs.rs/num-traits/latest/num_traits/). //! //! This module is designed to make bounded integers ergonomic to use: //! virtually every operation on bounded integers maps to their //! underlying type. We also want binary operators to be sufficiently //! polymophic to allow any combination: for instance, we want the //! addition of differently bounded u8, or bounded u8 with u8 or vice //! versa to be possible. //! //! Also, the traits in this module are designed to work with types //! that implement `Copy`. use core::ops::*; pub trait Zero: Sized { fn zero() -> Self; } pub trait One: Sized { fn one() -> Self; } pub trait NumOps: Add + Sub + Mul + Div + Rem { } // pub trait Bounded { // fn min_value() -> Self; // fn max_value() -> Self; // } pub trait WrappingAdd { type Output; fn wrapping_add(self, v: Rhs) -> Self::Output; } pub trait WrappingSub { type Output; fn wrapping_sub(self, v: Rhs) -> Self::Output; } pub trait WrappingMul { type Output; fn wrapping_mul(self, v: Rhs) -> Self::Output; } pub trait WrappingDiv { type Output; fn wrapping_div(self, v: Rhs) -> Self::Output; } pub trait CheckedAdd { type Output; fn checked_add(self, v: Rhs) -> Option; } pub trait CheckedSub { type Output; fn checked_sub(self, v: Rhs) -> Option; } pub trait CheckedMul { type Output; fn checked_mul(self, v: Rhs) -> Option; } pub trait CheckedDiv { type Output; fn checked_div(self, v: Rhs) -> Option; } pub trait CheckedNeg { type Output; fn checked_neg(&self) -> Option; } pub trait FromBytes { type BYTES; fn from_le_bytes(bytes: Self::BYTES) -> Self; fn from_be_bytes(bytes: Self::BYTES) -> Self; } pub trait ToBytes: FromBytes { fn to_le_bytes(self) -> Self::BYTES; fn to_be_bytes(self) -> Self::BYTES; } pub trait MachineInt: Copy // + Bounded + PartialOrd + Ord + PartialEq + Eq + Zero + One + Not + NumOps + BitAnd + BitOr + BitXor + Shl + Shr + CheckedAdd + CheckedSub + CheckedMul + CheckedDiv + WrappingAdd + WrappingSub + WrappingMul + WrappingDiv + BitOps { } pub trait BitOps { type Output; fn count_ones(self) -> u32; fn count_zeros(self) -> u32; fn leading_ones(self) -> u32; fn leading_zeros(self) -> u32; fn trailing_ones(self) -> u32; fn trailing_zeros(self) -> u32; fn rotate_left(self, n: u32) -> Self::Output; fn rotate_right(self, n: u32) -> Self::Output; fn from_be(x: Self) -> Self::Output; fn from_le(x: Self) -> Self::Output; fn to_be(self) -> Self::Output; fn to_le(self) -> Self::Output; fn pow(self, exp: u32) -> Self::Output; } ================================================ FILE: hax-lib/Cargo.toml ================================================ [package] name = "hax-lib" version.workspace = true authors.workspace = true license.workspace = true homepage.workspace = true edition = "2021" repository.workspace = true readme = "README.md" description = "Hax-specific helpers for Rust programs" [target.'cfg(hax)'.dependencies] num-bigint = { version = "0.4", default-features = false } num-traits = { version = "0.2", default-features = false } [dependencies] hax-lib-macros = { workspace = true, optional = true } [features] default = ["macros"] macros = ["dep:hax-lib-macros"] [lints.rust] unexpected_cfgs = { level = "warn", check-cfg = ['cfg(hax)'] } [package.metadata."docs.rs"] rustdoc-args = ["--cfg", "doc_cfg", "--cfg", "hax"] ================================================ FILE: hax-lib/README.md ================================================ # hax library This crate contains helpers that can be used when writing Rust code that is proven through the hax toolchain. **⚠️ The code in this crate has no effect when compiled without the `--cfg hax`.** ## Examples: ```rust fn sum(x: Vec, y: Vec) -> Vec { hax_lib::assume!(x.len() == y.len()); hax_lib::assert!(hax_lib::forall(|i: usize| hax_lib::implies(i < x.len(), || x[i] < 4242))); hax_lib::debug_assert!(hax_lib::exists(|i: usize| hax_lib::implies(i < x.len(), || x[i] > 123))); x.into_iter().zip(y.into_iter()).map(|(x, y)| x + y).collect() } ``` ================================================ FILE: hax-lib/build.rs ================================================ use std::env; use std::fs; use std::path::Path; const FSTAR_EXTRA: &str = r" pub use hax_lib_macros::fstar_options as options; pub use hax_lib_macros::fstar_verification_status as verification_status; pub use hax_lib_macros::fstar_smt_pat as smt_pat; pub use hax_lib_macros::fstar_postprocess_with as postprocess_with; "; const LEAN_EXTRA: &str = r" pub use hax_lib_macros::lean_proof as proof; pub use hax_lib_macros::lean_pure_requires_proof as pure_requires_proof; pub use hax_lib_macros::lean_pure_ensures_proof as pure_ensures_proof; pub mod proof_method { pub use hax_lib_macros::lean_proof_method_grind as grind; pub use hax_lib_macros::lean_proof_method_bv_decide as bv_decide; } "; fn main() { let code = |backend: &str, extra: &str| { format!( r#" pub use hax_lib_macros::{backend}_expr as {backend}; #[doc(hidden)] pub use hax_lib_macros::{backend}_unsafe_expr; #[doc(hidden)] pub use hax_lib_macros::{backend}_prop_expr; /// Procedular macros that have an effect only for the backend {backend}. pub mod {backend} {{ #[doc(hidden)] pub use hax_lib_macros::{backend}_unsafe_expr as unsafe_expr; pub use hax_lib_macros::{backend}_prop_expr as prop; pub use hax_lib_macros::{backend}_after as after; pub use hax_lib_macros::{backend}_before as before; pub use hax_lib_macros::{backend}_replace as replace; pub use hax_lib_macros::{backend}_replace_body as replace_body; {extra} }} "# ) }; let out_dir = env::var_os("OUT_DIR").unwrap(); let dest_path = Path::new(&out_dir).join("proc_macros_generated.rs"); fs::write( &dest_path, [ code("fstar", FSTAR_EXTRA), code("proverif", ""), code("coq", ""), code("lean", LEAN_EXTRA), ] .join("\n"), ) .unwrap(); println!("cargo::rerun-if-changed=build.rs"); } ================================================ FILE: hax-lib/core-models/.gitignore ================================================ proofs ================================================ FILE: hax-lib/core-models/Cargo.toml ================================================ [package] name = "core-models" version = "0.1.0" edition = "2024" [dependencies] pastey = "0.1.1" rust_primitives = { "path" = "rust_primitives" } hax-lib.workspace = true [workspace.dependencies] hax-lib = { "path" = ".." } # Workaround for https://github.com/rust-lang/cargo/issues/6745 [workspace] members = ["alloc", "rand_core", "rust_primitives", "std"] [lints.rust] unexpected_cfgs = { level = "warn", check-cfg = ['cfg(hax)', 'cfg(hax_compilation)', 'cfg(hax_backend_fstar)'] } ================================================ FILE: hax-lib/core-models/README.md ================================================ This crate contains a partial model of Rust core that should preserve the same types signatures and behaviour as the original Rust core library. It only contains code that can be extracted with hax and used in `proof-libs` to give a model to Rust core items in the different hax backends. ## Contributing Currently the only backend supported is F*, and the extracted models coexist with hand-written F* models. When a new module is added, the hand-written version should be deleted and replaced by the generated one. `.hax.sh extract` takes care of extracting and placing the result in `proof-libs`. ## Style considerations Here is a list of things to pay attention to when contributing to the models: * When using the `Fn` traits, the syntax shortcuts `Fn(T) -> U` are not available for the model traits. We need to write `Fn` * The `core::mem::take`, `core::mem::swap`, etc. functions cannot be given a good model that fits the Rust interface, we can only use unsafe or the original version, or change the interface to something corresponding to the interface of translated code (state passing instead of `&mut`). ## Adding new models To add new models, you should place yourself in the right module (create it if it doesn't already exist) corresponding to where it is located in Rust core. Then create the items with the same interface as in Rust (the Rust documentation is a good source of information, or sometimes the actual code). The interface can be slightly modified sometimes (removing `const`, or traits that we erase with hax). The code you write for the body can also be based on the real code if it is simple enough, or you can write something new that models the behaviour. ## Tests This is a work in progress. All models should be executable, then the test strategy will be to test the model against its reference (probably with a property-based testing framework). Once the infrastructure is in place, all new models should come with tests. The extracted code should also be tested in each backend (to make sure the naming is correct, and basic proofs using the items can work). ## Relying on primitives Some primitive operations are easier to model directly using the backend's language (integers, arithmetic, sequence-like data structures, etc.). This can happen in two different ways: - Implicitly: integer types and arithmetic operations, array and slice types can be used directly. Hax has a special treatment of them, so any use in the core models implicitly refers to their implementation in Rust primitives (implemented manually for each backend) - Explicitly: some more specific arithmetic operations, sequences, etc. are available in the rust_primitives crate. This crate provides all the other definitions that need a manual model in each backend. The definitions from this crate can be used in core models, but the crate itself is not extracted. ## Example The `core::options` module is a good example. It mostly contains the definition of the `Option` enum which can be copied: ```Rust pub enum Option { Some(T), None, } ``` Most functions can be defined in a very similar way to the original versions like: ```Rust pub fn is_some(&self) -> bool { matches!(*self, Some(_)) } ``` The definition is exactly the same except that it is not `const`, and the attributes have been removed. Whenever we take functions/closures as argument there is a bit more modification to be done. Indeed, we must use the `FnOnce` trait from our models and not the original one. For example: ```rust pub const fn is_some_and(self, f: impl [const] FnOnce(T) -> bool + [const] Destruct) -> bool { match self { None => false, Some(x) => f(x), } } ``` becomes ```rust pub fn is_some_and>(self, f: F) -> bool { match self { None => false, Some(x) => f.call_once(x), } } ``` ================================================ FILE: hax-lib/core-models/alloc/Cargo.toml ================================================ [package] name = "alloc" version = "0.1.0" edition = "2024" [dependencies] rust_primitives = {path = "../rust_primitives"} hax-lib.workspace = true ================================================ FILE: hax-lib/core-models/alloc/src/lib.rs ================================================ mod alloc { pub struct Global; } mod borrow { struct Cow(T); pub trait ToOwned { fn to_owned(self) -> Self; } impl ToOwned for T { fn to_owned(self) -> Self { self } } } mod boxed { pub struct Box(pub T); impl Box { // Hax removes boxes, so this should be the identity fn new(v: T) -> T { v } } } mod collections { // All implementations are dummy (for interfaces only) mod binary_heap { #[hax_lib::fstar::before("open Rust_primitives.Notations")] use crate::vec::*; struct BinaryHeap(Vec); impl BinaryHeap<(), ()> {} impl BinaryHeap<(), ()> {} impl BinaryHeap<(), ()> {} impl BinaryHeap<(), ()> {} impl BinaryHeap<(), ()> {} impl BinaryHeap<(), ()> {} impl BinaryHeap<(), ()> {} impl BinaryHeap<(), ()> {} impl BinaryHeap<(), ()> {} impl BinaryHeap<(), ()> {} #[hax_lib::attributes] impl BinaryHeap { fn new() -> BinaryHeap { BinaryHeap(Vec( rust_primitives::sequence::seq_empty(), std::marker::PhantomData::
, )) } #[hax_lib::requires(self.len() < core::primitive::usize::MAX)] fn push(&mut self, v: T) { self.0.push(v) } #[hax_lib::ensures(|res| (self.len() > 0) == res.is_some())] fn pop(&mut self) -> Option { let mut max: Option<&T> = None; let mut index = 0; for i in 0..self.len() { hax_lib::loop_invariant!(|i: usize| (i > 0) == max.is_some()); if max.is_none_or(|max| self.0[i] > *max) { max = Some(&self.0[i]); index = i; } } if max.is_some() { Some(self.0.remove(index)) } else { None } } } #[hax_lib::attributes] impl BinaryHeap { fn len(&self) -> usize { self.0.len() } #[hax_lib::ensures(|res| (self.len() > 0) == res.is_some())] fn peek(&self) -> Option<&T> { let mut max: Option<&T> = None; for i in 0..self.len() { hax_lib::loop_invariant!(|i: usize| (i > 0) == max.is_some()); if max.is_none_or(|max| self.0[i] > *max) { max = Some(&self.0[i]); } } max } } #[hax_lib::fstar::after(" assume val lemma_peek_pop: #t:Type -> (#a: Type) -> (#i: Core_models.Cmp.t_Ord t) -> h: t_BinaryHeap t a -> Lemma (impl_11__peek h == snd (impl_10__pop h)) [SMTPat (impl_11__peek #t #a h)] ")] use core::*; } mod btree { mod set { #[hax_lib::opaque] struct BTreeSet(Option, Option); impl BTreeSet<(), ()> {} impl BTreeSet<(), ()> {} impl BTreeSet<(), ()> {} impl BTreeSet<(), ()> {} impl BTreeSet<(), ()> {} impl BTreeSet<(), ()> {} impl BTreeSet<(), ()> {} impl BTreeSet<(), ()> {} impl BTreeSet<(), ()> {} impl BTreeSet<(), ()> {} impl BTreeSet<(), ()> {} impl BTreeSet { fn new() -> BTreeSet { BTreeSet(None, None) } } } } mod vec_deque { use rust_primitives::sequence::*; pub struct VecDeque(pub Seq, std::marker::PhantomData); impl VecDeque<(), ()> {} impl VecDeque<(), ()> {} impl VecDeque<(), ()> {} impl VecDeque<(), ()> {} impl VecDeque<(), ()> {} impl VecDeque { #[hax_lib::opaque] fn push_back(&mut self, x: T) {} fn len(&self) -> usize { seq_len(&self.0) } fn pop_front(&mut self) -> Option { if self.len() == 0 { None } else { Some(seq_last(&self.0)) } } } impl std::ops::Index for VecDeque { type Output = T; fn index(&self, i: usize) -> &T { seq_index(&self.0, i) } } } } mod fmt { #[hax_lib::opaque] fn format(args: core::fmt::Arguments) -> String { String::new() } } mod slice { #[hax_lib::exclude] struct Dummy(T); use super::vec::Vec; use rust_primitives::sequence::*; impl Dummy { fn to_vec(s: &[T]) -> Vec { Vec( seq_from_slice(s), std::marker::PhantomData::, ) } fn into_vec(s: Box<&[T]>) -> Vec { Vec(seq_from_slice(*s), std::marker::PhantomData::) } #[hax_lib::opaque] fn sort_by core::cmp::Ordering>(s: &mut [T], compare: F) {} } } mod string { use rust_primitives::string::*; struct String(&'static str); impl String { fn new() -> Self { String("") } fn push_str(&mut self, other: &'static str) { *self = String(str_concat(self.0, other)) } fn push(&mut self, c: char) { *self = String(str_concat(self.0, str_of_char(c))) } fn pop(&mut self) -> Option { let l = self.0.len(); if l > 0 { *self = String(str_sub(self.0, 0, l - 1)); Some(str_index(self.0, l - 1)) } else { None } } } } pub mod vec { // TODO drain (to be done with iterators) use hax_lib::ToInt; use rust_primitives::sequence::*; pub struct Vec(pub Seq, pub std::marker::PhantomData); fn from_elem(item: T, len: usize) -> Vec { Vec( seq_create(item, len), std::marker::PhantomData::, ) } #[hax_lib::attributes] impl Vec { pub fn new() -> Vec { Vec( seq_empty(), std::marker::PhantomData::, ) } pub fn with_capacity(_c: usize) -> Vec { Vec::new() } } #[hax_lib::attributes] impl Vec { pub fn len(&self) -> usize { seq_len(&self.0) } #[hax_lib::requires(seq_len(&self.0) < usize::MAX)] pub fn push(&mut self, x: T) { seq_concat(&mut self.0, &seq_one(x)) } pub fn pop(&mut self) -> Option { if seq_len(&self.0) > 0 { let last = seq_last(&self.0); self.0 = seq_slice(&self.0, 0, seq_len(&self.0) - 1); Some(last) } else { None } } pub fn is_empty(&self) -> bool { seq_len(&self.0) == 0 } #[hax_lib::requires(index <= seq_len(&self.0) && seq_len(&self.0) < usize::MAX)] pub fn insert(&mut self, index: usize, element: T) { let mut left = seq_slice(&self.0, 0, index); let right = seq_slice(&self.0, index, seq_len(&self.0)); seq_concat(&mut left, &seq_one(element)); seq_concat(&mut left, &right); self.0 = left; } pub fn as_slice(&self) -> &[T] { seq_to_slice(&self.0) } #[hax_lib::opaque] pub fn truncate(&mut self, n: usize) {} #[hax_lib::opaque] pub fn swap_remove(&mut self, n: usize) -> T { seq_last(&self.0) } #[hax_lib::opaque] #[hax_lib::ensures(|_| future(self).len() == new_size)] pub fn resize(&mut self, new_size: usize, value: &T) {} #[hax_lib::opaque] pub fn remove(&mut self, index: usize) -> T { seq_last(&self.0) } #[hax_lib::opaque] pub fn clear(&mut self) {} #[hax_lib::requires(self.len().to_int() + other.len().to_int() <= usize::MAX.to_int())] pub fn append(&mut self, other: &mut Vec) { seq_concat(&mut self.0, &other.0); other.0 = seq_empty() } #[hax_lib::opaque] pub fn drain */>(&mut self, _range: R) -> drain::Drain { drain::Drain( seq_slice(&self.0, 0, self.len()), std::marker::PhantomData::, ) // TODO use range bounds } } pub mod drain { use rust_primitives::sequence::*; pub struct Drain(pub Seq, pub std::marker::PhantomData); impl Iterator for Drain { type Item = T; fn next(&mut self) -> Option { if seq_len(&self.0) == 0 { Option::None } else { let res = seq_first(&self.0); self.0 = seq_slice(&self.0, 1, seq_len(&self.0)); Option::Some(res) } } } } #[hax_lib::attributes] impl Vec { #[hax_lib::requires(seq_len(&s.0).to_int() + other.len().to_int() <= usize::MAX.to_int())] fn extend_from_slice(s: &mut Vec, other: &[T]) { seq_concat(&mut s.0, &seq_from_slice(other)) } } #[hax_lib::attributes] impl std::ops::Index for Vec { type Output = T; #[hax_lib::requires(i < self.len())] fn index(&self, i: usize) -> &T { seq_index(&self.0, i) } } #[hax_lib::attributes] impl core::ops::Deref for Vec { type Target = [T]; fn deref(&self) -> &[T] { self.as_slice() } } #[hax_lib::attributes] #[hax_lib::opaque] impl std::iter::FromIterator for Vec { fn from_iter(iter: I) -> Self where I: IntoIterator, { let mut res = Vec::new(); for el in iter { res.push(el) } res } } } ================================================ FILE: hax-lib/core-models/hax.sh ================================================ #!/usr/bin/env bash set -e function extract_fstar() { go_to "./" HAX_CORE_MODELS_EXTRACTION_MODE=on cargo hax into fstar --interfaces '+!core_models::str::* +!**::num::error +!**::panicking::internal +!core_models::borrow +!core_models::default +!core_models::error +!core_models::hash +!core_models::hint +!core_models::ops::bit +!core_models::ops::arith +!core_models::fmt +!core_models::fmt::rt +!core_models::mem +!core_models::mem::*' cp proofs/fstar/extraction/*.fst* ../proof-libs/fstar/core HAX_CORE_MODELS_EXTRACTION_MODE=on cargo hax -C -p std \; into -i '-core_models::**' fstar --interfaces '+!**' cp std/proofs/fstar/extraction/*.fst* ../proof-libs/fstar/core HAX_CORE_MODELS_EXTRACTION_MODE=on cargo hax -C -p alloc \; into fstar --interfaces '+!**::collections::btree::** +!**::collections::vec_deque::**' cp alloc/proofs/fstar/extraction/*.fst* ../proof-libs/fstar/core HAX_CORE_MODELS_EXTRACTION_MODE=on cargo hax -C -p rand_core \; into fstar --interfaces '+!**' cp rand_core/proofs/fstar/extraction/*.fst* ../proof-libs/fstar/core } function extract_lean() { go_to "./" LEAN_FILTERS="" LEAN_FILTERS+=" -core_models::result::**::unwrap" # Issue #1818 LEAN_FILTERS+=" -core_models::result::**::expect" # Issue #1818 LEAN_FILTERS+=" -core_models::option::**::expect" # Issue #1818 LEAN_FILTERS+=" -core_models::option::**::unwrap" # Issue #1818 LEAN_FILTERS+=" -core_models::num::**::saturating_add" LEAN_FILTERS+=" -core_models::num::**::overflowing_add" LEAN_FILTERS+=" -core_models::num::**::saturating_sub" LEAN_FILTERS+=" -core_models::num::**::overflowing_sub" LEAN_FILTERS+=" -core_models::num::**::saturating_mul" LEAN_FILTERS+=" -core_models::num::**::overflowing_mul" LEAN_FILTERS+=" -core_models::num::**::count_ones" LEAN_FILTERS+=" -core_models::num::**::rem_euclid" LEAN_FILTERS+=" -core_models::num::**::abs" LEAN_FILTERS+=" -core_models::num::**::checked_add" LEAN_FILTERS+=" -core_models::num::**::checked_sub" LEAN_FILTERS+=" -core_models::num::**::checked_mul" LEAN_FILTERS+=" -core_models::num::**::MIN" LEAN_FILTERS+=" -core_models::num::**::MAX" LEAN_FILTERS+=" -core_models::num::**::BITS" LEAN_FILTERS+=" -core_models::num::**::from_be_bytes" LEAN_FILTERS+=" -core_models::num::**::from_le_bytes" LEAN_FILTERS+=" -core_models::num::**::to_be_bytes" LEAN_FILTERS+=" -core_models::num::**::to_le_bytes" LEAN_FILTERS+=" -core_models::num::**::rotate_left" LEAN_FILTERS+=" -core_models::num::**::rotate_right" LEAN_FILTERS="$(echo "$LEAN_FILTERS" | xargs)" HAX_CORE_MODELS_EXTRACTION_MODE=on cargo hax into -i "$LEAN_FILTERS" lean OUT="proofs/lean/extraction/core_models.lean" sed -i 's/import Hax/import Hax.core_models.prologue\nimport Hax.Tactic.HaxSpec/g' "$OUT" cp "$OUT" ../proof-libs/lean/Hax/core_models/core_models.lean } function init_vars() { SCRIPT_DIR="$(cd "$(dirname "${BASH_SOURCE[0]}")" && pwd)" SCRIPT_NAME="$(basename "${BASH_SOURCE[0]}")" SCRIPT_PATH="${SCRIPT_DIR}/${SCRIPT_NAME}" if [ -t 1 ]; then BLUE='\033[34m' GREEN='\033[32m' BOLD='\033[1m' RESET='\033[0m' else BLUE='' GREEN='' BOLD='' RESET='' fi } function go_to() { ROOT="$SCRIPT_DIR" cd "$ROOT" cd "$1" } function msg() { echo -e "$1[$SCRIPT_NAME]$RESET $2" } function help() { echo "Script to extract to F* or Lean and place the result in proof-libs" echo "" echo "Usage: $0 [COMMAND]" echo "" echo "Commands:" echo "" grep '[#]>' "$SCRIPT_PATH" | sed 's/[)] #[>]/\t/g' echo "" } function cli() { if [ -z "$1" ]; then help exit 1 fi # Check if an argument was provided case "$1" in --help) #> Show help message help;; extract) #> Extract the F* code and copy it to proof-libs. Use `extract fstar` for F*, `extract lean` for Lean, or `extract` for both case "$2" in "") # no subcommand -> run both extract_fstar extract_lean msg "$GREEN" "done" ;; fstar) extract_fstar msg "$GREEN" "done" ;; lean) extract_lean msg "$GREEN" "done" ;; *) echo "Invalid option for extract: $2" help exit 1 ;; esac ;; *) echo "Invalid option: $1" help exit 1;; esac } init_vars cli "$@" ================================================ FILE: hax-lib/core-models/rand_core/Cargo.toml ================================================ [package] name = "rand_core" version = "0.1.0" edition = "2024" [dependencies] ================================================ FILE: hax-lib/core-models/rand_core/src/lib.rs ================================================ pub trait RngCore { // Required methods fn next_u32(&mut self) -> u32; fn next_u64(&mut self) -> u64; fn fill_bytes(&mut self, dst: &mut [u8]); } pub trait CryptoRng: RngCore {} mod os { pub struct OsRng; // Dummy impl impl super::RngCore for OsRng { fn next_u32(&mut self) -> u32 { 0 } fn next_u64(&mut self) -> u64 { 0 } fn fill_bytes(&mut self, dst: &mut [u8]) {} } impl super::CryptoRng for OsRng {} } ================================================ FILE: hax-lib/core-models/rust_primitives/Cargo.toml ================================================ [package] name = "rust_primitives" version = "0.1.0" edition = "2024" [dependencies] pastey = "0.1.1" hax-lib.workspace = true ================================================ FILE: hax-lib/core-models/rust_primitives/src/lib.rs ================================================ #![allow(unused_variables)] pub mod slice { pub fn slice_length(s: &[T]) -> usize { unimplemented!("This is a stub that is implemented in each backend") } #[hax_lib::requires(mid <= slice_length(s))] pub fn slice_split_at(s: &[T], mid: usize) -> (&[T], &[T]) { unimplemented!("This is a stub that is implemented in each backend") } pub fn slice_contains(s: &[T], v: T) -> bool { unimplemented!("This is a stub that is implemented in each backend") } #[hax_lib::requires(i < slice_length(s))] pub fn slice_index(s: &[T], i: usize) -> &T { unimplemented!("This is a stub that is implemented in each backend") } pub fn slice_slice(s: &[T], b: usize, e: usize) -> &[T] { unimplemented!("This is a stub that is implemented in each backend") } // In the following two functions, F is actually a function type. // Not constraining that here allows to call it with closures, // or to pass parameters that implement the `Fn` trait for core_models. // Each backend can type `f` as needed. pub fn array_from_fn(f: F) -> [T; N] { unimplemented!("This is a stub that is implemented in each backend") } pub fn array_map(s: [T; N], f: F) -> [U; N] { unimplemented!("This is a stub that is implemented in each backend") } pub fn array_as_slice(s: &[T; N]) -> &[T] { unimplemented!("This is a stub that is implemented in each backend") } pub fn array_slice(a: &[T; N], b: usize, e: usize) -> &[T] { unimplemented!("This is a stub that is implemented in each backend") } pub fn array_index(a: &[T; N], i: usize) -> &T { unimplemented!("This is a stub that is implemented in each backend") } } pub mod sequence { pub struct Seq(Option); pub fn seq_empty() -> Seq { unimplemented!("This is a stub that is implemented in each backend") } pub fn seq_from_slice(_s: &[T]) -> Seq { unimplemented!("This is a stub that is implemented in each backend") } pub fn seq_from_array(_s: [T; N]) -> Seq { unimplemented!("This is a stub that is implemented in each backend") } pub fn seq_to_slice(_s: &Seq) -> &[T] { unimplemented!("This is a stub that is implemented in each backend") } pub fn seq_concat(s1: &mut Seq, s2: &Seq) { unimplemented!("This is a stub that is implemented in each backend") } pub fn seq_one(x: T) -> Seq { unimplemented!("This is a stub that is implemented in each backend") } pub fn seq_create(x: T, n: usize) -> Seq { unimplemented!("This is a stub that is implemented in each backend") } pub fn seq_len(s: &Seq) -> usize { unimplemented!("This is a stub that is implemented in each backend") } pub fn seq_slice(s: &Seq, b: usize, e: usize) -> Seq { unimplemented!("This is a stub that is implemented in each backend") } pub fn seq_last(s: &Seq) -> T { unimplemented!("This is a stub that is implemented in each backend") } pub fn seq_first(s: &Seq) -> T { unimplemented!("This is a stub that is implemented in each backend") } pub fn seq_index(s: &Seq, i: usize) -> &T { unimplemented!("This is a stub that is implemented in each backend") } } pub mod string { pub fn str_concat(s1: &'static str, s2: &'static str) -> &'static str { unimplemented!("This is a stub that is implemented in each backend") } pub fn str_of_char(c: char) -> &'static str { unimplemented!("This is a stub that is implemented in each backend") } pub fn str_sub(s: &'static str, b: usize, e: usize) -> &'static str { unimplemented!("This is a stub that is implemented in each backend") } pub fn str_index(s: &'static str, i: usize) -> char { unimplemented!("This is a stub that is implemented in each backend") } } pub mod mem { pub fn replace<'a, T: ?Sized>(dest: &'a mut T, src: &'a T) -> &'a T { unimplemented!("This is a stub that is implemented in each backend") } pub fn copy(x: &T) -> T { unimplemented!("This is a stub that is implemented in each backend") } } pub mod arithmetic { use pastey::paste; macro_rules! arithmetic_ops { ( types: $t:ident, ops: $($op:ident)*, overflowing_ops: $($ov_op:ident)*, ) => { paste!{ $(pub fn [<$op _ $t>](x: $t, y: $t) -> $t { unimplemented!("This is a stub that is implemented in each backend") })* $(pub fn [<$ov_op _ $t>](x: $t, y: $t) -> ($t, bool) { unimplemented!("This is a stub that is implemented in each backend") })* } }; ( types: $first_t:ident $($t:ident)+, ops: $($op:ident)*, overflowing_ops: $($ov_op:ident)*, ) => { arithmetic_ops!(types: $first_t, ops: $($op)*, overflowing_ops: $($ov_op)*,); arithmetic_ops!(types: $($t)*, ops: $($op)*, overflowing_ops: $($ov_op)*,); }; } macro_rules! all_ops { ( $($Self: ident)*, $($Bytes: expr)*, ) => { paste! { $( pub fn [](x: $Self, exp: u32) -> $Self { unimplemented!("This is a stub that is implemented in each backend") } pub fn [](x: $Self) -> u32 { unimplemented!("This is a stub that is implemented in each backend") } pub fn [](x: $Self, n: u32) -> $Self { unimplemented!("This is a stub that is implemented in each backend") } pub fn [](x: $Self, n: u32) -> $Self { unimplemented!("This is a stub that is implemented in each backend") } pub fn [](x: $Self) -> u32 { unimplemented!("This is a stub that is implemented in each backend") } pub fn [](x: $Self) -> u32 { unimplemented!("This is a stub that is implemented in each backend") } pub fn [](bytes: [u8; $Bytes]) -> $Self { unimplemented!("This is a stub that is implemented in each backend") } pub fn [](bytes: [u8; $Bytes]) -> $Self { unimplemented!("This is a stub that is implemented in each backend") } pub fn [](bytes: $Self) -> [u8; $Bytes] { unimplemented!("This is a stub that is implemented in each backend") } pub fn [](bytes: $Self) -> [u8; $Bytes] { unimplemented!("This is a stub that is implemented in each backend") })* } } } macro_rules! signed_ops { ($($Self: ident)*) => { paste! { $( pub fn [](x: $Self) -> $Self { unimplemented!("This is a stub that is implemented in each backend") } )* } } } // Rust inlines these values, for now we model usize by u64 // eventually we could try to define in the backend as 32 or 64 pub const SIZE_BYTES: usize = 8; pub const SIZE_BITS: u32 = 64; pub const USIZE_MAX: usize = u64::MAX as usize; pub const ISIZE_MAX: isize = i64::MAX as isize; pub const ISIZE_MIN: isize = i64::MIN as isize; arithmetic_ops! { types: u8 u16 u32 u64 u128 usize i8 i16 i32 i64 i128 isize, ops: wrapping_add saturating_add wrapping_sub saturating_sub wrapping_mul saturating_mul rem_euclid, overflowing_ops: overflowing_add overflowing_sub overflowing_mul, } all_ops! { u8 u16 u32 u64 u128 usize i8 i16 i32 i64 i128 isize, 1 2 4 8 16 SIZE_BYTES 1 2 4 8 16 SIZE_BYTES, } signed_ops! { i8 i16 i32 i64 i128 isize } } ================================================ FILE: hax-lib/core-models/src/core/array.rs ================================================ use rust_primitives::{sequence::*, slice::*}; pub struct TryFromSliceError; // Dummy type to allow impls #[hax_lib::exclude] struct Dummy([T; N]); // Dummy impls to get the right disambiguator (https://github.com/cryspen/hax/issues/828) impl Dummy {} impl Dummy {} impl Dummy {} impl Dummy {} impl Dummy {} impl Dummy {} impl Dummy {} impl Dummy {} impl Dummy {} impl Dummy {} impl Dummy {} impl Dummy {} impl Dummy {} impl Dummy {} impl Dummy {} impl Dummy {} impl Dummy {} impl Dummy {} impl Dummy {} impl Dummy {} impl Dummy {} impl Dummy {} impl Dummy {} impl Dummy { pub fn map, U>( s: [T; N], f: fn(T) -> U, // We cannot use type `F` because it is incompatible with `array_map` ) -> [U; N] { array_map(s, f) } pub fn as_slice(s: &[T; N]) -> &[T] { array_as_slice(s) } } pub fn from_fn>( f: fn(usize) -> T, // We cannot use type `F` because it is incompatible with `array_from_fn` ) -> [T; N] { array_from_fn(f) } #[cfg_attr(hax_backend_lean, hax_lib::exclude)] impl crate::iter::traits::collect::IntoIterator for [T; N] { type IntoIter = iter::IntoIter; fn into_iter(self) -> iter::IntoIter { iter::IntoIter(seq_from_array(self)) } } use crate::ops::{ index::Index, range::{Range, RangeFrom, RangeFull, RangeTo}, }; #[hax_lib::attributes] #[cfg_attr(hax_backend_lean, hax_lib::exclude)] impl Index for [T; N] { type Output = T; #[hax_lib::requires(i < self.len())] fn index(&self, i: usize) -> &T { rust_primitives::slice::array_index(self, i) } } #[hax_lib::attributes] #[cfg_attr(hax_backend_lean, hax_lib::exclude)] impl Index> for [T; N] { type Output = [T]; #[hax_lib::requires(i.start <= i.end && i.end <= self.len())] fn index(&self, i: Range) -> &[T] { array_slice(self, i.start, i.end) } } #[hax_lib::attributes] #[cfg_attr(hax_backend_lean, hax_lib::exclude)] impl Index> for [T; N] { type Output = [T]; #[hax_lib::requires(i.end <= self.len())] fn index(&self, i: RangeTo) -> &[T] { array_slice(self, 0, i.end) } } #[hax_lib::attributes] #[cfg_attr(hax_backend_lean, hax_lib::exclude)] impl Index> for [T; N] { type Output = [T]; #[hax_lib::requires(i.start <= self.len())] fn index(&self, i: RangeFrom) -> &[T] { array_slice(self, i.start, N) } } #[hax_lib::attributes] #[cfg_attr(hax_backend_lean, hax_lib::exclude)] impl Index for [T; N] { type Output = [T]; fn index(&self, i: RangeFull) -> &[T] { array_slice(self, 0, N) } } mod iter { use crate::option::Option; use rust_primitives::sequence::*; pub struct IntoIter(pub Seq); #[cfg_attr(hax_backend_lean, hax_lib::exclude)] impl crate::iter::traits::iterator::Iterator for IntoIter { type Item = T; fn next(&mut self) -> Option { if seq_len(&self.0) == 0 { Option::None } else { let res = seq_first(&self.0); self.0 = seq_slice(&self.0, 1, seq_len(&self.0)); Option::Some(res) } } } } ================================================ FILE: hax-lib/core-models/src/core/borrow.rs ================================================ trait Borrow { fn borrow(&self) -> Borrowed; } ================================================ FILE: hax-lib/core-models/src/core/clone.rs ================================================ // In F* we replace the definition to have the equality a value // and its clone. // We need to consume self, instead of taking a reference, otherwise Rust would // not allow returning an owned Self. This is the same after going through hax. #[hax_lib::fstar::replace( "class t_Clone self = { f_clone_pre: self -> Type0; f_clone_post: self -> self -> Type0; f_clone: x:self -> r:self {x == r} }" )] pub trait Clone { fn clone(self) -> Self; } // In our model, everything is clonable impl Clone for T { fn clone(self) -> Self { self } } ================================================ FILE: hax-lib/core-models/src/core/cmp.rs ================================================ use crate::option::Option; #[hax_lib::attributes] pub trait PartialEq where Rhs: ?Sized, { #[hax_lib::requires(true)] fn eq(&self, other: &Rhs) -> bool; } pub trait Eq: PartialEq {} pub enum Ordering { Less = -1, Equal = 0, Greater = 1, } #[hax_lib::attributes] pub trait PartialOrd: PartialEq where Rhs: ?Sized, { #[hax_lib::requires(true)] fn partial_cmp(&self, other: &Rhs) -> Option; } // These methods in core are provided using trait defaults, but this is not supported by hax // so we have to define them in a different way. #[hax_lib::attributes] trait Neq { #[hax_lib::requires(true)] fn neq(&self, y: &Rhs) -> bool; } impl> Neq for T { fn neq(&self, y: &T) -> bool { // Not using negation is a workaround for the F* lib self.eq(y) == false } } #[hax_lib::attributes] trait PartialOrdDefaults { #[hax_lib::requires(true)] fn lt(&self, y: &Rhs) -> bool where Self: PartialOrd; #[hax_lib::requires(true)] fn le(&self, y: &Rhs) -> bool where Self: PartialOrd; #[hax_lib::requires(true)] fn gt(&self, y: &Rhs) -> bool where Self: PartialOrd; #[hax_lib::requires(true)] fn ge(&self, y: &Rhs) -> bool where Self: PartialOrd; } impl> PartialOrdDefaults for T { fn lt(&self, y: &T) -> bool where T: PartialOrd, { matches!(self.partial_cmp(y), Option::Some(Ordering::Less)) } fn le(&self, y: &T) -> bool where T: PartialOrd, { matches!( self.partial_cmp(y), Option::Some(Ordering::Less | Ordering::Equal) ) } fn gt(&self, y: &T) -> bool where T: PartialOrd, { matches!(self.partial_cmp(y), Option::Some(Ordering::Greater)) } fn ge(&self, y: &T) -> bool where T: PartialOrd, { matches!( self.partial_cmp(y), Option::Some(Ordering::Greater | Ordering::Equal) ) } } #[hax_lib::attributes] pub trait Ord: Eq + PartialOrd { #[hax_lib::requires(true)] fn cmp(&self, other: &Self) -> Ordering; } pub fn max(v1: T, v2: T) -> T { match v1.cmp(&v2) { Ordering::Greater => v1, _ => v2, } } pub fn min(v1: T, v2: T) -> T { match v1.cmp(&v2) { Ordering::Greater => v2, _ => v1, } } pub struct Reverse(pub T); impl> PartialOrd> for Reverse { fn partial_cmp(&self, other: &Reverse) -> Option { other.0.partial_cmp(&self.0) } } impl> PartialEq> for Reverse { fn eq(&self, other: &Reverse) -> bool { other.0.eq(&self.0) } } impl Eq for Reverse {} impl Ord for Reverse { fn cmp(&self, other: &Reverse) -> Ordering { other.0.cmp(&self.0) } } macro_rules! int_impls { ($($t:ty)*) => ($( #[hax_lib::attributes] impl PartialOrd<$t> for $t { #[hax_lib::ensures(|res| { match res { Option::Some(Ordering::Less) => self < other, Option::Some(Ordering::Equal) => self == other, Option::Some(Ordering::Greater) => self > other, Option::None => false } })] fn partial_cmp(&self, other: &Self) -> Option { if self < other {Option::Some(Ordering::Less)} else if self > other {Option::Some(Ordering::Greater)} else {Option::Some(Ordering::Equal)} } } #[hax_lib::attributes] impl Ord for $t { #[hax_lib::ensures(|res| { match res { Ordering::Less => self < other, Ordering::Equal => self == other, Ordering::Greater => self > other, } })] fn cmp(&self, other: &Self) -> Ordering { if self < other {Ordering::Less} else if self > other {Ordering::Greater} else {Ordering::Equal} } } impl PartialEq<$t> for $t { fn eq(&self, other: &Self) -> bool { self == other } } impl Eq for $t {} )*) } int_impls! { u8 i8 u16 i16 u32 i32 u64 i64 u128 i128 usize isize } ================================================ FILE: hax-lib/core-models/src/core/convert.rs ================================================ use super::result::Result; #[hax_lib::attributes] trait TryInto { type Error; #[hax_lib::requires(true)] fn try_into(self) -> Result; } #[hax_lib::attributes] trait Into { #[hax_lib::requires(true)] fn into(self) -> T; } #[hax_lib::attributes] trait From { #[hax_lib::requires(true)] fn from(x: T) -> Self; } #[hax_lib::attributes] trait TryFrom: Sized { type Error; #[hax_lib::requires(true)] fn try_from(x: T) -> Result; } impl> Into for T { fn into(self) -> U { U::from(self) } } pub struct Infallible; impl> TryFrom for U { type Error = Infallible; fn try_from(x: T) -> Result { Result::Ok(U::from(x)) } } use crate::array::TryFromSliceError; #[cfg_attr(hax_backend_lean, hax_lib::exclude)] impl TryFrom<&[T]> for [T; N] { type Error = TryFromSliceError; fn try_from(x: &[T]) -> Result<[T; N], TryFromSliceError> { if x.len() == N { Result::Ok(rust_primitives::slice::array_from_fn(|i| { *rust_primitives::slice::slice_index(x, i) })) } else { Result::Err(TryFromSliceError) } } } impl> TryInto for T { type Error = U::Error; fn try_into(self) -> Result { U::try_from(self) } } impl From for T { fn from(x: T) -> Self { x } } #[hax_lib::attributes] trait AsRef { #[hax_lib::requires(true)] fn as_ref(self) -> T; } impl AsRef for T { fn as_ref(self) -> T { self } } macro_rules! int_from { ( $($From_t: ident)*, $($To_t: ident)*, ) => { $( #[cfg_attr(hax_backend_lean, hax_lib::exclude)] impl From<$From_t> for $To_t { fn from(x: $From_t) -> $To_t { x as $To_t } } )* } } use super::num::error::TryFromIntError; macro_rules! int_try_from { ( $($From_t: ident)*, $($To_t: ident)*, ) => { $( #[cfg_attr(hax_backend_lean, hax_lib::exclude)] impl TryFrom<$From_t> for $To_t { type Error = TryFromIntError; fn try_from(x: $From_t) -> Result<$To_t, TryFromIntError> { if x > ($To_t::MAX as $From_t) || x < ($To_t::MIN as $From_t) { Result::Err(TryFromIntError(())) } else { Result::Ok(x as $To_t) } } } )* } } int_from! { u8 u8 u16 u8 u16 u32 u8 u16 u32 u64 usize u8 u16, u16 u32 u32 u64 u64 u64 u128 u128 u128 u128 u128 usize usize, } int_from! { i8 i8 i16 i8 i16 i32 i8 i16 i32 i64 isize i8 i16, i16 i32 i32 i64 i64 i64 i128 i128 i128 i128 i128 isize isize, } int_try_from! { u16 u32 u32 u32 u64 u64 u64 u64 u128 u128 u128 u128 u128 usize usize usize usize, u8 u8 u16 usize u8 u16 u32 usize u8 u16 u32 u64 usize u8 u16 u32 u64, } int_try_from! { i16 i32 i32 i32 i64 i64 i64 i64 i128 i128 i128 i128 i128 isize isize isize isize, i8 i8 i16 isize i8 i16 i32 isize i8 i16 i32 i64 isize i8 i16 i32 i64, } ================================================ FILE: hax-lib/core-models/src/core/default.rs ================================================ #[hax_lib::attributes] pub trait Default { #[hax_lib::requires(true)] fn default() -> Self; } ================================================ FILE: hax-lib/core-models/src/core/error.rs ================================================ use super::fmt::{Debug, Display}; pub trait Error: Display + Debug {} ================================================ FILE: hax-lib/core-models/src/core/f32.rs ================================================ #[allow(non_camel_case_types)] #[hax_lib::exclude] struct f32; impl f32 { #[hax_lib::opaque] fn abs(x: f64) -> f64 { panic!() } } ================================================ FILE: hax-lib/core-models/src/core/fmt.rs ================================================ #![allow(unused_variables)] pub struct Error; pub type Result = super::result::Result<(), Error>; pub struct Formatter; pub trait Display { fn fmt(&self, f: &mut Formatter) -> Result; } pub trait Debug { fn dbg_fmt(&self, f: &mut Formatter) -> Result; } pub struct Arguments<'a>(&'a ()); impl Debug for T { fn dbg_fmt(&self, f: &mut Formatter) -> Result { Result::Ok(()) } } impl<'a> Arguments<'a> {} impl<'a> Arguments<'a> {} impl<'a> Arguments<'a> {} impl<'a> Arguments<'a> {} impl<'a> Arguments<'a> {} impl<'a> Arguments<'a> {} impl<'a> Arguments<'a> {} impl<'a> Arguments<'a> {} impl<'a> Arguments<'a> {} impl<'a> Arguments<'a> {} impl<'a> Arguments<'a> { fn write_fmt(f: &mut Formatter, args: Arguments) -> Result { Result::Ok(()) } } mod rt { #[hax_lib::opaque] // The internals of this are not important in this model enum ArgumentType<'a> { Placeholder { /* value: NonNull<()>, formatter: unsafe fn(NonNull<()>, &mut Formatter<'_>) -> Result, */ _lifetime: std::marker::PhantomData<&'a ()>, }, /* Count(u16), */ } pub struct Argument<'a> { ty: ArgumentType<'a>, } impl Argument<'_> { #[hax_lib::opaque] fn new_display(x: &T) -> Self { crate::panicking::internal::panic() } #[hax_lib::opaque] fn new_debug(x: &T) -> Self { crate::panicking::internal::panic() } #[hax_lib::opaque] fn new_lower_hex(x: &T) -> Self { crate::panicking::internal::panic() } } impl<'a> Argument<'a> { #[hax_lib::opaque] fn new_binary(x: &T) -> Self { crate::panicking::internal::panic() } #[hax_lib::opaque] fn new_const(x: &T, y: &U) -> super::Arguments<'a> { crate::panicking::internal::panic() } #[hax_lib::opaque] fn new_v1(x: &T, y: &U, z: &V, t: &W) -> super::Arguments<'a> { crate::panicking::internal::panic() } fn none() -> [Self; 0] { [] } #[hax_lib::opaque] fn new_v1_formatted(x: &T, y: &U, z: &V) -> super::Arguments<'a> { crate::panicking::internal::panic() } } enum Count { Is(u16), Param(u16), Implied, } struct Placeholder { position: usize, flags: u32, precision: Count, width: Count, } struct UnsafeArg; } ================================================ FILE: hax-lib/core-models/src/core/hash.rs ================================================ pub trait Hasher {} #[hax_lib::attributes] pub trait Hash { #[hax_lib::requires(true)] fn hash(&self, h: H) -> H; } // Temporary impl Hash for T { fn hash(&self, h: H) -> H { crate::panicking::internal::panic() } } ================================================ FILE: hax-lib/core-models/src/core/hint.rs ================================================ #[hax_lib::ensures(|res| fstar!("$res == $dummy"))] pub fn black_box(dummy: T) -> T { dummy } #[hax_lib::ensures(|res| fstar!("$res == $value"))] pub fn must_use(value: T) -> T { value } ================================================ FILE: hax-lib/core-models/src/core/iter.rs ================================================ // This model of iterators doesn't respect the signatures of the original definitions in Rust core. // We avoid default implementations for trait methods, and instead provide them as external to the trait. // This means overriding them is not possible. // We also avoid the coinductivity between `IntoIter` and `Iterator`. pub mod traits { pub mod iterator { use super::super::adapters::{ enumerate::Enumerate, flat_map::FlatMap, flatten::Flatten, map::Map, step_by::StepBy, take::Take, zip::Zip, }; use crate::ops::function::*; use crate::option::Option; #[hax_lib::attributes] pub trait Iterator { type Item; #[hax_lib::requires(true)] fn next(&mut self) -> Option; } // This trait is an addition to deal with the default methods that the F* backend doesn't handle trait IteratorMethods: Iterator { fn fold>(self, init: B, f: F) -> B; fn enumerate(self) -> Enumerate where Self: Sized; fn step_by(self, step: usize) -> StepBy where Self: Sized; fn map>(self, f: F) -> Map where Self: Sized; fn all>(self, f: F) -> bool; fn take(self, n: usize) -> Take where Self: Sized; fn flat_map>( self, f: F, ) -> FlatMap where Self: Sized; fn flatten(self) -> Flatten where Self::Item: Iterator, Self: Sized; fn zip(self, it2: I2) -> Zip where Self: Sized; } impl IteratorMethods for I { fn fold>(mut self, init: B, f: F) -> B { let mut accum = init; /* while let Option::Some(x) = self.next() { accum = f.call_once((accum, x)); } */ accum } fn enumerate(self) -> Enumerate { Enumerate::new(self) } fn step_by(self, step: usize) -> StepBy { StepBy::new(self, step) } fn map>(self, f: F) -> Map { Map::new(self, f) } fn all>(mut self, f: F) -> bool { /* while let Option::Some(x) = self.next() { if !f.call_once(x) { return false; } } */ true } fn take(self, n: usize) -> Take { Take::new(self, n) } fn flat_map>( self, f: F, ) -> FlatMap { FlatMap::new(self, f) } fn flatten(self) -> Flatten where I::Item: Iterator, { Flatten::new(self) } fn zip(self, it2: I2) -> Zip { Zip::new(self, it2) } } impl super::collect::IntoIterator for I { type IntoIter = Self; fn into_iter(self) -> Self { self } } // TODO rev: DoubleEndedIterator? } pub mod collect { pub trait IntoIterator { // Ignoring type Item, and trait bound Iterator to avoid coinduction // type Item; type IntoIter; //: Iterator fn into_iter(self) -> Self::IntoIter; } #[hax_lib::attributes] pub trait FromIterator: Sized { #[hax_lib::requires(true)] fn from_iter(iter: T) -> Self; } } } pub mod adapters { pub mod enumerate { use super::super::traits::iterator::Iterator; use crate::option::Option; pub struct Enumerate { iter: I, count: usize, } impl Enumerate { pub fn new(iter: I) -> Enumerate { Enumerate { iter, count: 0 } } } impl Iterator for Enumerate { type Item = (usize, ::Item); fn next(&mut self) -> Option<(usize, ::Item)> { match self.iter.next() { Option::Some(a) => { let i = self.count; // TODO check what to do here. It would be bad to have an iterator with // more than usize::MAX elements, this could be a requirement (but hard to formulate). hax_lib::assume!(self.count < usize::MAX); self.count += 1; Option::Some((i, a)) } Option::None => Option::None, } } } } pub mod step_by { use super::super::traits::iterator::Iterator; use crate::option::Option; pub struct StepBy { iter: I, step: usize, } impl StepBy { pub fn new(iter: I, step: usize) -> Self { StepBy { iter, step } } } #[hax_lib::opaque] impl Iterator for StepBy { type Item = ::Item; fn next(&mut self) -> Option<::Item> { for _ in 1..self.step { if let Option::None = self.iter.next() { return Option::None; } } self.iter.next() } } } pub mod map { pub struct Map { iter: I, f: F, } impl Map { pub fn new(iter: I, f: F) -> Self { Self { iter, f } } } use super::super::traits::iterator::Iterator; use crate::ops::function::*; use crate::option::Option; impl> Iterator for Map { type Item = O; fn next(&mut self) -> Option { match self.iter.next() { Option::Some(v) => Option::Some(self.f.call_once(v)), Option::None => Option::None, } } } } pub mod take { use super::super::traits::iterator::Iterator; use crate::option::Option; pub struct Take { iter: I, n: usize, } impl Take { pub fn new(iter: I, n: usize) -> Take { Take { iter, n } } } impl Iterator for Take { type Item = ::Item; fn next(&mut self) -> Option<::Item> { if self.n != 0 { self.n -= 1; self.iter.next() } else { Option::None } } } } pub mod flat_map { use super::super::traits::iterator::Iterator; use crate::option::Option; pub struct FlatMap { it: I, f: F, current: Option, } impl> FlatMap { pub fn new(it: I, f: F) -> Self { Self { it, f, current: Option::None, } } } use crate::ops::function::*; #[hax_lib::opaque] impl> Iterator for FlatMap { type Item = U::Item; fn next(&mut self) -> Option { loop { if let Option::Some(current_it) = &mut self.current && let Option::Some(v) = current_it.next() { return Option::Some(v); } else { match self.it.next() { Option::Some(c) => self.current = Option::Some(self.f.call_once(c)), Option::None => return Option::None, } } } } } } pub mod flatten { use super::super::traits::iterator::Iterator; use crate::option::Option; #[hax_lib::fstar::before("noeq")] // https://github.com/cryspen/hax/issues/1810 pub struct Flatten where I::Item: Iterator, { it: I, current: Option, } impl Flatten where I::Item: Iterator, { pub fn new(it: I) -> Self { Self { it, current: Option::None, } } } #[hax_lib::opaque] impl Iterator for Flatten where I::Item: Iterator, { type Item = <::Item as Iterator>::Item; fn next(&mut self) -> Option<<::Item as Iterator>::Item> { loop { if let Option::Some(current_it) = &mut self.current && let Option::Some(v) = current_it.next() { return Option::Some(v); } else { match self.it.next() { Option::Some(c) => self.current = Option::Some(c), Option::None => return Option::None, } } } } } } pub mod zip { use super::super::traits::iterator::Iterator; use crate::option::Option; pub struct Zip { it1: I1, it2: I2, } impl Zip { pub fn new(it1: I1, it2: I2) -> Self { Self { it1, it2 } } } #[hax_lib::opaque] impl Iterator for Zip { type Item = (I1::Item, I2::Item); fn next(&mut self) -> Option { match self.it1.next() { Option::Some(v1) => match self.it2.next() { Option::Some(v2) => Option::Some((v1, v2)), Option::None => Option::None, }, Option::None => Option::None, } } } } } ================================================ FILE: hax-lib/core-models/src/core/marker.rs ================================================ use super::clone::Clone; pub trait Copy: Clone {} pub trait Send {} pub trait Sync {} pub trait Sized {} pub trait StructuralPartialEq {} // In our models, all types implement those marker traits impl Send for T {} impl Sync for T {} impl Sized for T {} impl Copy for T {} #[hax_lib::fstar::replace("type t_PhantomData (v_T: Type0) = | PhantomData : t_PhantomData v_T")] #[hax_lib::lean::replace("structure PhantomData (T : Type) where")] struct PhantomData(T); ================================================ FILE: hax-lib/core-models/src/core/mem.rs ================================================ #![allow(unused_variables)] use super::marker::Copy; #[hax_lib::opaque] pub fn forget(t: T) { panic!() } #[hax_lib::opaque] pub fn forget_unsized(t: T) { panic!() } #[hax_lib::opaque] pub fn size_of() -> usize { panic!() } #[hax_lib::opaque] pub fn size_of_val(val: &T) -> usize { panic!() } #[hax_lib::opaque] pub fn min_align_of() -> usize { panic!() } #[hax_lib::opaque] pub fn min_align_of_val(val: &T) -> usize { panic!() } #[hax_lib::opaque] pub fn align_of() -> usize { panic!() } #[hax_lib::opaque] pub fn align_of_val(val: &T) -> usize { panic!() } #[hax_lib::opaque] pub unsafe fn align_of_val_raw(val: T) -> usize { panic!() } #[hax_lib::opaque] pub fn needs_drop() -> bool { panic!() } #[hax_lib::opaque] pub unsafe fn uninitialized() -> T { panic!() } #[hax_lib::opaque] pub fn swap(x: &mut T, y: &mut T) { panic!() } #[hax_lib::opaque] pub fn replace(dest: &mut T, src: T) -> T { panic!() } #[hax_lib::opaque] pub fn drop(_x: T) {} pub fn copy(x: &T) -> T { rust_primitives::mem::copy(x) } #[hax_lib::opaque] pub unsafe fn take(x: &mut T) -> T { panic!() } #[hax_lib::opaque] pub unsafe fn transmute_copy(src: &Src) -> Dst { panic!() } #[hax_lib::opaque] pub fn variant_count() -> usize { panic!() } #[hax_lib::opaque] pub unsafe fn zeroed() -> T { panic!() } #[hax_lib::opaque] pub unsafe fn transmute(src: Src) -> Dst { panic!() } mod manually_drop { pub struct ManuallyDrop { value: T, } } ================================================ FILE: hax-lib/core-models/src/core/num/error.rs ================================================ //! Error types for conversion to integral types. #![allow(unused_variables)] pub struct TryFromIntError(pub(crate) ()); pub struct ParseIntError { pub(super) kind: IntErrorKind, } // Because of representations, enums bring a dependency to isize. // TODO Fix the dependency issue and add `IntErrorKind` /* pub enum IntErrorKind { Empty, InvalidDigit, PosOverflow, NegOverflow, Zero, } */ pub struct IntErrorKind; ================================================ FILE: hax-lib/core-models/src/core/num/mod.rs ================================================ #![allow(non_camel_case_types, unused_variables)] use crate::result::Result; use pastey::paste; pub mod error; use rust_primitives::arithmetic::*; macro_rules! uint_impl { ( $Self: ty, $Name: ty, $Max: expr, $Bits: expr, $Bytes: expr, ) => { #[hax_lib::attributes] impl $Name { pub const MIN: $Self = 0; pub const MAX: $Self = $Max; pub const BITS: core::primitive::u32 = $Bits; fn wrapping_add(x: $Self, y: $Self) -> $Self { paste! { [](x, y) } } fn saturating_add(x: $Self, y: $Self) -> $Self { paste! { [](x, y) } } fn overflowing_add(x: $Self, y: $Self) -> ($Self, bool) { paste! { [](x, y) } } fn checked_add(x: $Self, y: $Self) -> Option<$Self> { if Self::MIN.to_int() <= x.to_int() + y.to_int() && x.to_int() + y.to_int() <= Self::MAX.to_int() { Option::Some(x + y) } else { Option::None } } fn wrapping_sub(x: $Self, y: $Self) -> $Self { paste! { [](x, y) } } fn saturating_sub(x: $Self, y: $Self) -> $Self { paste! { [](x, y) } } fn overflowing_sub(x: $Self, y: $Self) -> ($Self, bool) { paste! { [](x, y) } } fn checked_sub(x: $Self, y: $Self) -> Option<$Self> { if Self::MIN.to_int() <= x.to_int() - y.to_int() && x.to_int() - y.to_int() <= Self::MAX.to_int() { Option::Some(x - y) } else { Option::None } } fn wrapping_mul(x: $Self, y: $Self) -> $Self { paste! { [](x, y) } } fn saturating_mul(x: $Self, y: $Self) -> $Self { paste! { [](x, y) } } fn overflowing_mul(x: $Self, y: $Self) -> ($Self, bool) { paste! { [](x, y) } } fn checked_mul(x: $Self, y: $Self) -> Option<$Self> { if Self::MIN.to_int() <= x.to_int() * y.to_int() && x.to_int() * y.to_int() <= Self::MAX.to_int() { Option::Some(x * y) } else { Option::None } } #[hax_lib::requires(y != 0)] fn rem_euclid(x: $Self, y: $Self) -> $Self { paste! { [](x, y) } } fn pow(x: $Self, exp: core::primitive::u32) -> $Self { paste! { [](x, exp) } } fn count_ones(x: $Self) -> core::primitive::u32 { paste! { [](x) } } #[hax_lib::opaque] fn rotate_right(x: $Self, n: core::primitive::u32) -> $Self { paste! { [](x, n) } } #[hax_lib::opaque] fn rotate_left(x: $Self, n: core::primitive::u32) -> $Self { paste! { [](x, n) } } #[hax_lib::opaque] fn leading_zeros(x: $Self) -> core::primitive::u32 { paste! { [](x) } } #[hax_lib::opaque] fn ilog2(x: $Self) -> core::primitive::u32 { paste! { [](x) } } #[hax_lib::opaque] fn from_str_radix( src: &str, radix: core::primitive::u32, ) -> Result<$Self, error::ParseIntError> { crate::panicking::internal::panic() } #[hax_lib::opaque] fn from_be_bytes(bytes: [core::primitive::u8; $Bytes]) -> $Self { paste! { [](bytes) } } #[hax_lib::opaque] fn from_le_bytes(bytes: [core::primitive::u8; $Bytes]) -> $Self { paste! { [](bytes) } } #[hax_lib::opaque] fn to_be_bytes(bytes: $Self) -> [core::primitive::u8; $Bytes] { paste! { [](bytes) } } #[hax_lib::opaque] fn to_le_bytes(bytes: $Self) -> [core::primitive::u8; $Bytes] { paste! { [](bytes) } } } }; } use crate::option::Option; use hax_lib::int::ToInt; macro_rules! iint_impl { ( $Self: ty, $Name: ty, $Max: expr, $Min: expr, $Bits: expr, $Bytes: expr, ) => { #[hax_lib::attributes] impl $Name { pub const MIN: $Self = $Min; pub const MAX: $Self = $Max; pub const BITS: core::primitive::u32 = $Bits; fn wrapping_add(x: $Self, y: $Self) -> $Self { paste! { [](x, y) } } fn saturating_add(x: $Self, y: $Self) -> $Self { paste! { [](x, y) } } fn overflowing_add(x: $Self, y: $Self) -> ($Self, bool) { paste! { [](x, y) } } fn checked_add(x: $Self, y: $Self) -> Option<$Self> { if Self::MIN.to_int() <= x.to_int() + y.to_int() && x.to_int() + y.to_int() <= Self::MAX.to_int() { Option::Some(x + y) } else { Option::None } } fn wrapping_sub(x: $Self, y: $Self) -> $Self { paste! { [](x, y) } } fn saturating_sub(x: $Self, y: $Self) -> $Self { paste! { [](x, y) } } fn overflowing_sub(x: $Self, y: $Self) -> ($Self, bool) { paste! { [](x, y) } } fn checked_sub(x: $Self, y: $Self) -> Option<$Self> { if Self::MIN.to_int() <= x.to_int() - y.to_int() && x.to_int() - y.to_int() <= Self::MAX.to_int() { Option::Some(x - y) } else { Option::None } } fn wrapping_mul(x: $Self, y: $Self) -> $Self { paste! { [](x, y) } } fn saturating_mul(x: $Self, y: $Self) -> $Self { paste! { [](x, y) } } fn overflowing_mul(x: $Self, y: $Self) -> ($Self, bool) { paste! { [](x, y) } } fn checked_mul(x: $Self, y: $Self) -> Option<$Self> { if Self::MIN.to_int() <= x.to_int() * y.to_int() && x.to_int() * y.to_int() <= Self::MAX.to_int() { Option::Some(x * y) } else { Option::None } } #[hax_lib::requires(y != 0)] fn rem_euclid(x: $Self, y: $Self) -> $Self { paste! { [](x, y) } } fn pow(x: $Self, exp: core::primitive::u32) -> $Self { paste! { [](x, exp) } } fn count_ones(x: $Self) -> core::primitive::u32 { paste! { [](x) } } #[hax_lib::requires(x > $Self::MIN)] fn abs(x: $Self) -> $Self { paste! { [](x) } } #[hax_lib::opaque] fn rotate_right(x: $Self, n: core::primitive::u32) -> $Self { paste! { [](x, n) } } #[hax_lib::opaque] fn rotate_left(x: $Self, n: core::primitive::u32) -> $Self { paste! { [](x, n) } } #[hax_lib::opaque] fn leading_zeros(x: $Self) -> core::primitive::u32 { paste! { [](x) } } #[hax_lib::opaque] fn ilog2(x: $Self) -> core::primitive::u32 { paste! { [](x) } } #[hax_lib::opaque] fn from_str_radix( src: &str, radix: core::primitive::u32, ) -> Result<$Self, error::ParseIntError> { crate::panicking::internal::panic() } #[hax_lib::opaque] fn from_be_bytes(bytes: [core::primitive::u8; $Bytes]) -> $Self { paste! { [](bytes) } } #[hax_lib::opaque] fn from_le_bytes(bytes: [core::primitive::u8; $Bytes]) -> $Self { paste! { [](bytes) } } #[hax_lib::opaque] fn to_be_bytes(bytes: $Self) -> [core::primitive::u8; $Bytes] { paste! { [](bytes) } } #[hax_lib::opaque] fn to_le_bytes(bytes: $Self) -> [core::primitive::u8; $Bytes] { paste! { [](bytes) } } } }; } // These types are a trick to define impls on the right names as // it is forbidden to do it on primitive types #[hax_lib::exclude] pub struct u8; #[hax_lib::exclude] pub struct u16; #[hax_lib::exclude] pub struct u32; #[hax_lib::exclude] pub struct u64; #[hax_lib::exclude] pub struct u128; #[hax_lib::exclude] pub struct usize; #[hax_lib::exclude] pub struct i8; #[hax_lib::exclude] pub struct i16; #[hax_lib::exclude] pub struct i32; #[hax_lib::exclude] pub struct i64; #[hax_lib::exclude] pub struct i128; #[hax_lib::exclude] pub struct isize; // Placeholders to get the same impl numbering as in core: #[hax_lib::attributes] impl i8 {} #[hax_lib::attributes] impl i16 {} #[hax_lib::attributes] impl i32 {} #[hax_lib::attributes] impl i64 {} #[hax_lib::attributes] impl i128 {} #[hax_lib::attributes] impl isize {} uint_impl! { core::primitive::u8, u8, 255, 8, 1, } uint_impl! { core::primitive::u16, u16, 65535, 16, 2, } uint_impl! { core::primitive::u32, u32, 4294967295, 32, 4, } uint_impl! { core::primitive::u64, u64, 18446744073709551615, 64, 8, } uint_impl! { core::primitive::u128, u128, 340282366920938463463374607431768211455, 128, 16, } uint_impl! { core::primitive::usize, usize, USIZE_MAX, SIZE_BITS, SIZE_BYTES, } iint_impl! { core::primitive::i8, i8, 127, -128, 8, 1, } iint_impl! { core::primitive::i16, i16, 32767, -32768, 16, 2, } iint_impl! { core::primitive::i32, i32, 2147483647, -2147483648, 32, 4, } iint_impl! { core::primitive::i64, i64, 9223372036854775807, -9223372036854775808, 64, 8, } iint_impl! { core::primitive::i128, i128, 170141183460469231731687303715884105727, -170141183460469231731687303715884105728, 128, 16, } iint_impl! { core::primitive::isize, isize, ISIZE_MAX, ISIZE_MIN, SIZE_BITS, SIZE_BYTES, } macro_rules! impl_default_for_int { ($($t:ty),*) => { $( #[hax_lib::attributes] impl crate::default::Default for $t { fn default() -> $t { 0 } } )* }; } impl_default_for_int!( core::primitive::u8, core::primitive::u16, core::primitive::u32, core::primitive::u64, core::primitive::u128, core::primitive::usize, core::primitive::i8, core::primitive::i16, core::primitive::i32, core::primitive::i64, core::primitive::i128, core::primitive::isize ); ================================================ FILE: hax-lib/core-models/src/core/ops.rs ================================================ pub mod arith { pub trait Add { type Output; fn add(self, rhs: Rhs) -> Self::Output; } pub trait Sub { type Output; fn sub(self, rhs: Rhs) -> Self::Output; } pub trait Mul { type Output; fn mul(self, rhs: Rhs) -> Self::Output; } pub trait Div { type Output; fn div(self, rhs: Rhs) -> Self::Output; } pub trait Neg { type Output; fn neg(self) -> Self::Output; } pub trait Rem { type Output; fn rem(self, rhs: Rhs) -> Self::Output; } pub trait AddAssign { fn add_assign(&mut self, rhs: Rhs); } pub trait SubAssign { fn sub_assign(&mut self, rhs: Rhs); } pub trait MulAssign { fn mul_assign(&mut self, rhs: Rhs); } pub trait DivAssign { fn div_assign(&mut self, rhs: Rhs); } pub trait RemAssign { fn rem_assign(&mut self, rhs: Rhs); } macro_rules! int_trait_impls { ($($Self:ty)*) => { use hax_lib::ToInt; $( #[hax_lib::attributes] #[cfg_attr(hax_backend_lean, hax_lib::exclude)] impl crate::ops::arith::AddAssign<$Self> for $Self { #[hax_lib::requires(self.to_int() + rhs.to_int() <= $Self::MAX.to_int())] fn add_assign(&mut self, rhs: $Self) { *self = *self + rhs } } #[hax_lib::attributes] #[cfg_attr(hax_backend_lean, hax_lib::exclude)] impl crate::ops::arith::SubAssign<$Self> for $Self { #[hax_lib::requires(self.to_int() - rhs.to_int() >= 0.to_int())] fn sub_assign(&mut self, rhs: $Self) { *self = *self - rhs } })* } } int_trait_impls!(u8 u16 u32 u64); } pub mod bit { trait Shr { type Output; fn shr(self, rhs: Rhs) -> Self::Output; } trait Shl { type Output; fn shl(self, rhs: Rhs) -> Self::Output; } trait BitXor { type Output; fn bitxor(self, rhs: Rhs) -> Self::Output; } trait BitAnd { type Output; fn bitand(self, rhs: Rhs) -> Self::Output; } trait BitOr { type Output; fn bitor(self, rhs: Rhs) -> Self::Output; } } pub mod control_flow { pub enum ControlFlow { Continue(C), Break(B), } } pub mod index { pub trait Index { type Output: ?Sized; fn index(&self, i: Idx) -> &Self::Output; } } pub mod function { #[hax_lib::attributes] pub trait FnOnce { type Output; #[hax_lib::requires(true)] fn call_once(&self, args: Args) -> Self::Output; } #[hax_lib::attributes] pub trait Fn: FnOnce { #[hax_lib::requires(true)] fn call(&self, args: Args) -> Self::Output; } /* These instances provide implementations of the F* type classes corresponding to Fn traits for anonymous functions. This ensures that passing a closure where something implementing Fn works when translated to F* */ #[hax_lib::fstar::after( "unfold instance fnonce_arrow_binder t u : t_FnOnce (_:t -> u) t = { f_Output = u; f_call_once_pre = (fun _ _ -> true); f_call_once_post = (fun (x0: (_:t -> u)) (x1: t) (res: u) -> res == x0 x1); f_call_once = (fun (x0: (_:t -> u)) (x1: t) -> x0 x1); }" )] impl FnOnce for fn(Arg) -> Out { type Output = Out; fn call_once(&self, arg: Arg) -> Out { self(arg) } } impl FnOnce<(Arg1, Arg2)> for fn(Arg1, Arg2) -> Out { type Output = Out; fn call_once(&self, arg: (Arg1, Arg2)) -> Out { self(arg.0, arg.1) } } impl FnOnce<(Arg1, Arg2, Arg3)> for fn(Arg1, Arg2, Arg3) -> Out { type Output = Out; fn call_once(&self, arg: (Arg1, Arg2, Arg3)) -> Out { self(arg.0, arg.1, arg.2) } } } mod try_trait { trait FromResidual { fn from_residual(x: R) -> Self; } trait Try { type Output; type Residual; fn from_output(x: Self::Output) -> Self; fn branch(&self) -> super::control_flow::ControlFlow; } } mod deref { pub trait Deref { type Target: ?Sized; fn deref(&self) -> &Self::Target; } impl Deref for &T { type Target = T; fn deref(&self) -> &T { &self } } } mod drop { trait Drop { fn drop(&mut self); } } pub mod range { pub struct RangeTo { pub end: T, } pub struct RangeFrom { pub start: T, } pub struct Range { pub start: T, pub end: T, } pub struct RangeFull; macro_rules! impl_iterator_range_int { ($($int_type: ident)*) => { use crate::option::Option; $( #[cfg_attr(hax_backend_lean, hax_lib::exclude)] impl crate::iter::traits::iterator::Iterator for Range<$int_type> { type Item = $int_type; fn next(&mut self) -> Option<$int_type> { if self.start >= self.end { Option::None } else { let res = self.start; self.start += 1; Option::Some(res) } } } )* } } impl_iterator_range_int!(u8 u16 u32 u64 u128 usize i8 i16 i32 i64 i128 isize); } ================================================ FILE: hax-lib/core-models/src/core/option.rs ================================================ pub enum Option { Some(T), None, } use super::default::Default; use super::ops::function::*; use super::result::Result::*; use super::result::*; use Option::*; #[hax_lib::attributes] impl Option { #[hax_lib::ensures(|res| hax_lib::Prop::implies(res.into(), fstar!("Option_Some? self")))] pub fn is_some(&self) -> bool { matches!(*self, Some(_)) } pub fn is_some_and>(self, f: F) -> bool { match self { None => false, Some(x) => f.call_once(x), } } pub fn is_none(&self) -> bool { self.is_some() == false } pub fn is_none_or>(self, f: F) -> bool { match self { None => true, Some(x) => f.call_once(x), } } pub const fn as_ref(&self) -> Option<&T> { match *self { Some(ref x) => Some(x), None => None, } } #[hax_lib::requires(self.is_some())] pub fn expect(self, _msg: &str) -> T { match self { Some(val) => val, None => super::panicking::internal::panic(), } } #[hax_lib::requires(self.is_some())] pub fn unwrap(self) -> T { match self { Some(val) => val, None => super::panicking::internal::panic(), } } pub fn unwrap_or(self, default: T) -> T { match self { Some(x) => x, None => default, } } pub fn unwrap_or_else>(self, f: F) -> T { match self { Some(x) => x, None => f.call_once(()), } } pub fn unwrap_or_default(self) -> T where T: Default, { match self { Some(x) => x, None => T::default(), } } pub fn map(self, f: F) -> Option where F: FnOnce, { match self { Some(x) => Some(f.call_once(x)), None => None, } } pub fn map_or(self, default: U, f: F) -> U where F: FnOnce, { match self { Some(t) => f.call_once(t), None => default, } } pub fn map_or_else(self, default: D, f: F) -> U where F: FnOnce, D: FnOnce<(), Output = U>, { match self { Some(t) => f.call_once(t), None => default.call_once(()), } } pub fn map_or_default(self, f: F) -> U where F: FnOnce, U: Default, { match self { Some(t) => f.call_once(t), None => U::default(), } } pub fn ok_or(self, err: E) -> Result { match self { Some(v) => Ok(v), None => Err(err), } } pub fn ok_or_else>(self, err: F) -> Result { match self { Some(v) => Ok(v), None => Err(err.call_once(())), } } pub fn and_then(self, f: F) -> Option where F: FnOnce>, { match self { Some(x) => f.call_once(x), None => None, } } // The interface in Rust is wrong. but is good after extraction. // We cannot make a useful model with the right interface so we loose the executability. pub fn take(self) -> (Option, Option) { (None, self) } } ================================================ FILE: hax-lib/core-models/src/core/panicking.rs ================================================ #[hax_lib::opaque] #[hax_lib::requires(false)] pub fn panic_explicit() -> ! { panic!() } #[hax_lib::opaque] #[hax_lib::requires(false)] pub fn panic(_msg: &str) -> ! { panic!() } #[hax_lib::opaque] #[hax_lib::requires(false)] pub fn panic_fmt(_fmt: super::fmt::Arguments) -> ! { panic!() } pub mod internal { // This module is used to break a dependency cycle (other core modules have // panics and this brings a dependency on core::fmt that we need to avoid) #[hax_lib::opaque] #[hax_lib::requires(false)] pub fn panic() -> T { panic!("") } } ================================================ FILE: hax-lib/core-models/src/core/result.rs ================================================ pub enum Result { Ok(T), Err(E), } use super::ops::function::*; use super::option::Option; use Result::*; #[hax_lib::attributes] impl Result { #[hax_lib::requires(self.is_ok())] pub fn unwrap(self) -> T { match self { Ok(t) => t, Err(_) => super::panicking::internal::panic(), } } pub fn unwrap_or(self, default: T) -> T { match self { Ok(t) => t, Err(_) => default, } } #[hax_lib::requires(self.is_ok())] pub fn expect(self, _msg: &str) -> T { match self { Ok(t) => t, Err(_) => super::panicking::internal::panic(), } } pub fn map(self, op: F) -> Result where F: FnOnce, { match self { Ok(t) => Ok(op.call_once(t)), Err(e) => Err(e), } } pub fn map_or(self, default: U, f: F) -> U where F: FnOnce, { match self { Ok(t) => f.call_once(t), Err(_e) => default, } } pub fn map_or_else(self, default: D, f: F) -> U where F: FnOnce, D: FnOnce, { match self { Ok(t) => f.call_once(t), Err(e) => default.call_once(e), } } pub fn map_err(self, op: O) -> Result where O: FnOnce, { match self { Ok(t) => Ok(t), Err(e) => Err(op.call_once(e)), } } pub fn is_ok(&self) -> bool { matches!(*self, Ok(_)) } pub fn and_then(self, op: F) -> Result where F: FnOnce>, { match self { Ok(t) => op.call_once(t), Err(e) => Err(e), } } pub fn ok(self) -> Option { match self { Ok(x) => Option::Some(x), Err(_) => Option::None, } } } ================================================ FILE: hax-lib/core-models/src/core/slice.rs ================================================ use crate::result::Result; // Dummy type to allow impls #[hax_lib::exclude] struct Slice(T); pub mod iter { use crate::option::Option; use rust_primitives::{sequence::*, slice::*}; pub struct Chunks<'a, T> { cs: usize, elements: &'a [T], } impl<'a, T> Chunks<'a, T> { pub fn new(cs: usize, elements: &'a [T]) -> Chunks<'a, T> { Chunks { cs, elements } } } pub struct ChunksExact<'a, T> { cs: usize, elements: &'a [T], } impl<'a, T> ChunksExact<'a, T> { pub fn new(cs: usize, elements: &'a [T]) -> ChunksExact<'a, T> { ChunksExact { cs, elements } } } pub struct Iter(pub Seq); impl crate::iter::traits::iterator::Iterator for Iter { type Item = T; fn next(&mut self) -> Option { if seq_len(&self.0) == 0 { Option::None } else { let res = seq_first(&self.0); self.0 = seq_slice(&self.0, 1, seq_len(&self.0)); Option::Some(res) } } } impl<'a, T> crate::iter::traits::iterator::Iterator for Chunks<'a, T> { type Item = &'a [T]; fn next(&mut self) -> Option { if slice_length(self.elements) == 0 { Option::None } else if slice_length(self.elements) < self.cs { let res = self.elements; self.elements = slice_slice(self.elements, 0, 0); Option::Some(res) } else { let (res, new_elements) = slice_split_at(self.elements, self.cs); self.elements = new_elements; Option::Some(res) } } } impl<'a, T> crate::iter::traits::iterator::Iterator for ChunksExact<'a, T> { type Item = &'a [T]; fn next(&mut self) -> Option { if slice_length(self.elements) < self.cs { Option::None } else { let (res, new_elements) = slice_split_at(self.elements, self.cs); self.elements = new_elements; Option::Some(res) } } } } #[hax_lib::attributes] impl Slice { fn len(s: &[T]) -> usize { rust_primitives::slice::slice_length(s) } fn chunks<'a>(s: &'a [T], cs: usize) -> iter::Chunks<'a, T> { iter::Chunks::new(cs, s) } fn iter(s: &[T]) -> iter::Iter { iter::Iter(rust_primitives::sequence::seq_from_slice(s)) } fn chunks_exact<'a>(s: &'a [T], cs: usize) -> iter::ChunksExact<'a, T> { iter::ChunksExact::new(cs, s) } #[hax_lib::requires(Slice::len(s) == Slice::len(src))] fn copy_from_slice(s: &mut [T], src: &[T]) where T: crate::marker::Copy, { rust_primitives::mem::replace(s, src); } #[hax_lib::requires(Slice::len(s) == Slice::len(src))] fn clone_from_slice(s: &mut [T], src: &[T]) where T: crate::clone::Clone, { rust_primitives::mem::replace(s, src); } #[hax_lib::requires(mid <= Slice::len(s))] fn split_at(s: &[T], mid: usize) -> (&[T], &[T]) { rust_primitives::slice::slice_split_at(s, mid) } fn split_at_checked(s: &[T], mid: usize) -> Option<(&[T], &[T])> { if mid <= Slice::len(s) { Option::Some(Self::split_at(s, mid)) } else { Option::None } } fn is_empty(s: &[T]) -> bool { Self::len(s) == 0 } #[hax_lib::opaque] fn contains(s: &[T], v: T) -> bool { rust_primitives::slice::slice_contains(s, v) } #[hax_lib::opaque] fn copy_within(s: &[T], src: R, dest: usize) -> &[T] where T: Copy, { todo!() } #[hax_lib::opaque] fn binary_search(s: &[T], x: &T) -> Result /* where T: super::ops::Ord */ { todo!() } fn get>(s: &[T], index: I) -> Option<&>::Output> { index.get(s) } } #[hax_lib::attributes] #[cfg_attr(hax_backend_lean, hax_lib::exclude)] impl crate::iter::traits::collect::IntoIterator for &[T] { type IntoIter = iter::Iter; fn into_iter(self) -> Self::IntoIter { Slice::iter(self) } } use crate::option::Option; use rust_primitives::slice::*; #[hax_lib::attributes] pub trait SliceIndex { type Output: ?Sized; #[hax_lib::requires(true)] fn get(self, slice: &T) -> Option<&Self::Output>; /* fn get_mut(self, slice: &mut T) -> Option<&mut Self::Output>; unsafe fn get_unchecked(self, slice: *const T) -> *const Self::Output; unsafe fn get_unchecked_mut(self, slice: *mut T) -> *mut Self::Output; fn index(self, slice: &T) -> &Self::Output; fn index_mut(self, slice: &mut T) -> &mut Self::Output; */ } #[hax_lib::attributes] #[cfg_attr(hax_backend_lean, hax_lib::exclude)] impl SliceIndex<[T]> for usize { type Output = T; fn get(self, slice: &[T]) -> Option<&T> { if self < slice.len() { Option::Some(slice_index(slice, self)) } else { Option::None } } } #[hax_lib::attributes] #[cfg_attr(hax_backend_lean, hax_lib::exclude)] impl SliceIndex<[T]> for crate::ops::range::RangeFull { type Output = [T]; fn get(self, slice: &[T]) -> Option<&[T]> { Option::Some(slice) } } #[hax_lib::attributes] #[cfg_attr(hax_backend_lean, hax_lib::exclude)] impl SliceIndex<[T]> for crate::ops::range::RangeFrom { type Output = [T]; fn get(self, slice: &[T]) -> Option<&[T]> { if self.start < slice.len() { Option::Some(slice_slice(slice, self.start, slice.len())) } else { Option::None } } } #[hax_lib::attributes] #[cfg_attr(hax_backend_lean, hax_lib::exclude)] impl SliceIndex<[T]> for crate::ops::range::RangeTo { type Output = [T]; fn get(self, slice: &[T]) -> Option<&[T]> { if self.end <= slice.len() { Option::Some(slice_slice(slice, 0, self.end)) } else { Option::None } } } #[hax_lib::attributes] #[cfg_attr(hax_backend_lean, hax_lib::exclude)] impl SliceIndex<[T]> for crate::ops::range::Range { type Output = [T]; fn get(self, slice: &[T]) -> Option<&[T]> { if self.start < self.end && self.end <= slice.len() { Option::Some(slice_slice(slice, self.start, self.end)) } else { Option::None } } } use crate::ops::{ index::Index, range::{Range, RangeFrom, RangeFull, RangeTo}, }; #[hax_lib::attributes] #[cfg_attr(hax_backend_lean, hax_lib::exclude)] impl Index> for &[T] { type Output = [T]; #[hax_lib::requires(i.start <= i.end && i.end <= self.len())] fn index(&self, i: Range) -> &[T] { slice_slice(self, i.start, i.end) } } #[hax_lib::attributes] #[cfg_attr(hax_backend_lean, hax_lib::exclude)] impl Index> for &[T] { type Output = [T]; #[hax_lib::requires(i.end <= self.len())] fn index(&self, i: RangeTo) -> &[T] { slice_slice(self, 0, i.end) } } #[hax_lib::attributes] #[cfg_attr(hax_backend_lean, hax_lib::exclude)] impl Index> for &[T] { type Output = [T]; #[hax_lib::requires(i.start <= self.len())] fn index(&self, i: RangeFrom) -> &[T] { slice_slice(self, i.start, slice_length(self)) } } #[hax_lib::attributes] #[cfg_attr(hax_backend_lean, hax_lib::exclude)] impl Index for &[T] { type Output = [T]; fn index(&self, i: RangeFull) -> &[T] { slice_slice(self, 0, slice_length(self)) } } #[hax_lib::attributes] #[cfg_attr(hax_backend_lean, hax_lib::exclude)] impl crate::ops::index::Index for &[T] { type Output = T; #[hax_lib::requires(i < self.len())] fn index(&self, i: usize) -> &T { rust_primitives::slice::slice_index(self, i) } } ================================================ FILE: hax-lib/core-models/src/core/str.rs ================================================ mod converts { #[hax_lib::opaque] fn from_utf8(s: &[u8]) -> crate::result::Result<&str, super::error::Utf8Error> { panic!() } } mod error { pub struct Utf8Error; } mod iter { struct Split(T); } mod traits { trait FromStr: Sized { type Err; fn from_str(s: &str) -> crate::result::Result; } #[hax_lib::opaque] #[cfg_attr(hax_backend_lean, hax_lib::exclude)] impl FromStr for u64 { type Err = u64; fn from_str(s: &str) -> crate::result::Result { panic!() } } } ================================================ FILE: hax-lib/core-models/src/lib.rs ================================================ //! `core-models`: A Rust Model for the `core` Library //! //! `core-models` is a simplified, self-contained model of Rust’s `core` library. It aims to provide //! a purely Rust-based specification of `core`'s fundamental operations, making them easier to //! understand, analyze, and formally verify. Unlike `core`, which may rely on platform-specific //! intrinsics and compiler magic, `core-models` expresses everything in plain Rust, prioritizing //! clarity and explicitness over efficiency. //! //! ## Key Features //! //! - **Partial Modeling**: `core-models` includes only a subset of `core`, focusing on modeling //! fundamental operations rather than providing a complete replacement. //! - **Exact Signatures**: Any item that exists in both `core-models` and `core` has the same type signature, //! ensuring compatibility with formal verification efforts. //! - **Purely Functional Approach**: Where possible, `core-models` favors functional programming principles, //! avoiding unnecessary mutation and side effects to facilitate formal reasoning. //! - **Explicit Implementations**: Even low-level operations, such as SIMD, are modeled explicitly using //! Rust constructs like bit arrays and partial maps. //! - **Extra Abstractions**: `core-models` includes additional helper types and functions to support //! modeling. These extra items are marked appropriately to distinguish them from `core` definitions. //! //! ## Intended Use //! //! `core-models` is designed as a reference model for formal verification and reasoning about Rust programs. //! By providing a readable, well-specified version of `core`'s behavior, it serves as a foundation for //! proof assistants and other verification tools. #![allow(dead_code)] #[path = "core/array.rs"] pub mod array; #[path = "core/borrow.rs"] pub mod borrow; #[path = "core/clone.rs"] pub mod clone; #[path = "core/cmp.rs"] pub mod cmp; #[path = "core/convert.rs"] pub mod convert; #[path = "core/default.rs"] pub mod default; #[path = "core/error.rs"] pub mod error; #[path = "core/f32.rs"] pub mod f32; #[path = "core/fmt.rs"] pub mod fmt; #[path = "core/hash.rs"] pub mod hash; #[path = "core/hint.rs"] pub mod hint; #[path = "core/iter.rs"] pub mod iter; #[path = "core/marker.rs"] pub mod marker; #[path = "core/mem.rs"] pub mod mem; #[path = "core/num/mod.rs"] pub mod num; #[path = "core/ops.rs"] pub mod ops; #[path = "core/option.rs"] pub mod option; #[path = "core/panicking.rs"] pub mod panicking; #[path = "core/result.rs"] pub mod result; #[path = "core/slice.rs"] pub mod slice; #[path = "core/str.rs"] pub mod str; ================================================ FILE: hax-lib/core-models/std/Cargo.toml ================================================ [package] name = "std" version = "0.1.0" edition = "2024" [dependencies] hax-lib.workspace = true core-models = {path = ".."} ================================================ FILE: hax-lib/core-models/std/src/lib.rs ================================================ mod collections { mod hash { mod map { #[hax_lib::opaque] struct HashMap(Option, Option, Option); impl HashMap { fn new() -> HashMap { HashMap(None, None, None) } } // Dummy impl for disambiguator (https://github.com/cryspen/hax/issues/828) impl HashMap {} impl HashMap { fn get(m: HashMap, k: K) -> core_models::option::Option { core_models::panicking::internal::panic() } fn insert( m: HashMap, k: K, v: V, ) -> (HashMap, core_models::option::Option) { core_models::panicking::internal::panic() } } } } } mod f64 { #[hax_lib::exclude] #[allow(non_camel_case_types)] struct f64; impl f64 { fn powf(x: core::primitive::f64, y: core::primitive::f64) -> core::primitive::f64 { core_models::panicking::internal::panic() } } } pub mod hash { pub mod random { pub struct RandomState; } } mod io { #[hax_lib::attributes] pub trait Read { // Required method #[hax_lib::requires(true)] #[hax_lib::ensures(|_| future(buf).len() == buf.len())] fn read(&mut self, buf: &mut [u8]) -> Result; // Provided methods (not provided in this model as hax doesn't support default methods) /* fn read_vectored(&mut self, bufs: &mut [IoSliceMut<'_>]) -> Result; fn is_read_vectored(&self) -> bool; fn read_to_end(&mut self, buf: &mut Vec) -> Result; fn read_to_string(&mut self, buf: &mut String) -> Result; */ #[hax_lib::requires(true)] #[hax_lib::ensures(|_| future(buf).len() == buf.len())] fn read_exact(&mut self, buf: &mut [u8]) -> Result<(), error::Error>; /* fn read_buf(&mut self, buf: BorrowedCursor<'_>) -> Result<()>; fn read_buf_exact(&mut self, cursor: BorrowedCursor<'_>) -> Result<()>; fn by_ref(&mut self) -> &mut Self where Self: Sized; fn bytes(self) -> Bytes where Self: Sized; fn chain(self, next: R) -> Chain where Self: Sized; fn take(self, limit: u64) -> Take where Self: Sized; */ } #[hax_lib::attributes] pub trait Write { // Required methods #[hax_lib::requires(true)] fn write(&mut self, buf: &[u8]) -> Result; #[hax_lib::requires(true)] fn flush(&mut self) -> Result<(), error::Error>; // Provided methods (not provided in this model as hax doesn't support default methods) /* fn write_vectored(&mut self, bufs: &[IoSlice<'_>]) -> Result; fn is_write_vectored(&self) -> bool; */ #[hax_lib::requires(true)] fn write_all(&mut self, buf: &[u8]) -> Result<(), error::Error>; /* fn write_all_vectored(&mut self, bufs: &mut [IoSlice<'_>]) -> Result<()>; fn write_fmt(&mut self, args: Arguments<'_>) -> Result<()>; fn by_ref(&mut self) -> &mut Self where Self: Sized; */ } pub mod error { pub struct Error; pub enum ErrorKind { NotFound, PermissionDenied, ConnectionRefused, ConnectionReset, HostUnreachable, NetworkUnreachable, ConnectionAborted, NotConnected, AddrInUse, AddrNotAvailable, NetworkDown, BrokenPipe, AlreadyExists, WouldBlock, NotADirectory, IsADirectory, DirectoryNotEmpty, ReadOnlyFilesystem, FilesystemLoop, StaleNetworkFileHandle, InvalidInput, InvalidData, TimedOut, WriteZero, StorageFull, NotSeekable, QuotaExceeded, FileTooLarge, ResourceBusy, ExecutableFileBusy, Deadlock, CrossesDevices, TooManyLinks, InvalidFilename, ArgumentListTooLong, Interrupted, Unsupported, UnexpectedEof, OutOfMemory, InProgress, Other, } impl Error { #[hax_lib::opaque] fn kind(&self) -> ErrorKind { core_models::panicking::internal::panic() } } } mod impls { impl super::Read for &[u8] { fn read(&mut self, buf: &mut [u8]) -> Result { let amt = core::cmp::min(buf.len(), self.len()); let (a, b) = self.split_at(amt); buf[..amt].copy_from_slice(a); *self = b; Ok(amt) } fn read_exact(&mut self, buf: &mut [u8]) -> Result<(), super::error::Error> { if buf.len() > self.len() { // `read_exact` makes no promise about the content of `buf` if it // fails so don't bother about that. *self = &self[self.len()..]; return Err(super::error::Error); } let (a, b) = self.split_at(buf.len()); buf.copy_from_slice(a); *self = b; Ok(()) } } impl super::Write for Vec { fn write(&mut self, buf: &[u8]) -> Result { self.extend_from_slice(buf); Ok(buf.len()) } fn write_all(&mut self, buf: &[u8]) -> Result<(), super::error::Error> { self.extend_from_slice(buf); Ok(()) } fn flush(&mut self) -> Result<(), super::error::Error> { Ok(()) } } } mod stdio { fn e_print(args: core::fmt::Arguments) {} } } ================================================ FILE: hax-lib/macros/Cargo.toml ================================================ [package] name = "hax-lib-macros" version.workspace = true authors.workspace = true license.workspace = true homepage.workspace = true edition = "2021" repository.workspace = true readme = "README.md" description = "Hax-specific proc-macros for Rust programs" [lib] proc-macro = true [target.'cfg(hax)'.dependencies] proc-macro-error2 = { version = "2.0" } hax-lib-macros-types = { workspace = true } syn = { version = "2.0", features = ["full", "visit-mut", "visit"] } [dependencies] syn = { version = "2.0", features = ["full", "visit", "visit-mut"] } proc-macro2 = { workspace = true } quote = { workspace = true } [dev-dependencies] hax-lib = { path = ".." } [lints.rust] unexpected_cfgs = { level = "warn", check-cfg = ['cfg(hax)', 'cfg(doc_cfg)'] } ================================================ FILE: hax-lib/macros/README.md ================================================ # hax proc macros Hax-specific proc-macros for Rust programs. This crate defines proc macros to be used in Rust programs that are extracted with hax. It provides proc macros such as `requires` and `ensures` to define pre- and post-conditions for functions. ================================================ FILE: hax-lib/macros/src/dummy.rs ================================================ mod hax_paths; use hax_paths::*; use proc_macro::{TokenStream, TokenTree}; use quote::quote; use syn::{visit_mut::VisitMut, *}; macro_rules! identity_proc_macro_attribute { ($($name:ident),*$(,)?) => { $( #[proc_macro_attribute] pub fn $name(_attr: TokenStream, item: TokenStream) -> TokenStream { item } )* } } identity_proc_macro_attribute!( fstar_options, fstar_verification_status, include, exclude, requires, ensures, decreases, pv_handwritten, pv_constructor, protocol_messages, process_init, process_write, process_read, opaque, opaque_type, transparent, refinement_type, fstar_replace, coq_replace, lean_replace, proverif_replace, fstar_replace_body, coq_replace_body, lean_replace_body, proverif_replace_body, fstar_before, coq_before, lean_before, proverif_before, fstar_after, coq_after, lean_after, proverif_after, fstar_smt_pat, fstar_postprocess_with, lean_proof, lean_pure_requires_proof, lean_pure_ensures_proof, lean_proof_method_grind, lean_proof_method_bv_decide, ); #[proc_macro] pub fn fstar_expr(_payload: TokenStream) -> TokenStream { quote! { () }.into() } #[proc_macro] pub fn coq_expr(_payload: TokenStream) -> TokenStream { quote! { () }.into() } #[proc_macro] pub fn lean_expr(_payload: TokenStream) -> TokenStream { quote! { () }.into() } #[proc_macro] pub fn proverif_expr(_payload: TokenStream) -> TokenStream { quote! { () }.into() } #[proc_macro_attribute] pub fn lemma(_attr: TokenStream, _item: TokenStream) -> TokenStream { quote! {}.into() } fn unsafe_expr() -> TokenStream { // `*_unsafe_expr("")` are macro generating a Rust expression of any type, that will be replaced by `` in the backends. // This should be used solely in hax-only contextes. // If this macro is used, that means the user broke this rule. quote! { ::std::compile_error!("`hax_lib::unsafe_expr` has no meaning outside of hax extraction, please use it solely on hax-only places.") }.into() } #[proc_macro] pub fn fstar_unsafe_expr(_payload: TokenStream) -> TokenStream { unsafe_expr() } #[proc_macro] pub fn coq_unsafe_expr(_payload: TokenStream) -> TokenStream { unsafe_expr() } #[proc_macro] pub fn lean_unsafe_expr(_payload: TokenStream) -> TokenStream { unsafe_expr() } #[proc_macro] pub fn proverif_unsafe_expr(_payload: TokenStream) -> TokenStream { unsafe_expr() } #[proc_macro] pub fn fstar_prop_expr(_payload: TokenStream) -> TokenStream { quote! {::hax_lib::Prop::from_bool(true)}.into() } #[proc_macro] pub fn coq_prop_expr(_payload: TokenStream) -> TokenStream { quote! {::hax_lib::Prop::from_bool(true)}.into() } #[proc_macro] pub fn lean_prop_expr(_payload: TokenStream) -> TokenStream { quote! {::hax_lib::Prop::from_bool(true)}.into() } #[proc_macro] pub fn proverif_prop_expr(_payload: TokenStream) -> TokenStream { quote! {::hax_lib::Prop::from_bool(true)}.into() } fn not_hax_attribute(attr: &syn::Attribute) -> bool { if let Meta::List(ml) = &attr.meta { !matches!(expects_path_decoration(&ml.path), Ok(Some(_))) } else { true } } fn not_field_attribute(attr: &syn::Attribute) -> bool { if let Meta::List(ml) = &attr.meta { !(matches!(expects_refine(&ml.path), Ok(Some(_))) || matches!(expects_order(&ml.path), Ok(Some(_)))) } else { true } } #[proc_macro_attribute] pub fn attributes(_attr: TokenStream, item: TokenStream) -> TokenStream { let item: Item = parse_macro_input!(item); struct AttrVisitor; use syn::visit_mut; impl VisitMut for AttrVisitor { fn visit_item_trait_mut(&mut self, item: &mut ItemTrait) { for ti in item.items.iter_mut() { if let TraitItem::Fn(fun) = ti { fun.attrs.retain(not_hax_attribute) } } visit_mut::visit_item_trait_mut(self, item); } fn visit_type_mut(&mut self, _type: &mut Type) {} fn visit_item_impl_mut(&mut self, item: &mut ItemImpl) { for ii in item.items.iter_mut() { if let ImplItem::Fn(fun) = ii { fun.attrs.retain(not_hax_attribute) } } visit_mut::visit_item_impl_mut(self, item); } fn visit_item_mut(&mut self, item: &mut Item) { visit_mut::visit_item_mut(self, item); match item { Item::Struct(s) => { for field in s.fields.iter_mut() { field.attrs.retain(not_field_attribute) } } _ => (), } } } let mut item = item; AttrVisitor.visit_item_mut(&mut item); quote! { #item }.into() } #[proc_macro] pub fn int(payload: TokenStream) -> TokenStream { let mut tokens = payload.into_iter().peekable(); let negative = matches!(tokens.peek(), Some(TokenTree::Punct(p)) if p.as_char() == '-'); if negative { tokens.next(); } let [lit @ TokenTree::Literal(_)] = &tokens.collect::>()[..] else { return quote! { ::std::compile_error!("Expected exactly one numeric literal") }.into(); }; let lit: proc_macro2::TokenStream = TokenStream::from(lit.clone()).into(); quote! {::hax_lib::int::Int(#lit)}.into() } #[proc_macro_attribute] pub fn impl_fn_decoration(_attr: TokenStream, _item: TokenStream) -> TokenStream { quote! { ::std::compile_error!("`impl_fn_decoration` is an internal macro and should never be used directly.") }.into() } #[proc_macro_attribute] pub fn trait_fn_decoration(_attr: TokenStream, _item: TokenStream) -> TokenStream { quote! { ::std::compile_error!("`trait_fn_decoration` is an internal macro and should never be used directly.") }.into() } #[proc_macro] pub fn loop_invariant(_predicate: TokenStream) -> TokenStream { quote! {}.into() } #[proc_macro] pub fn loop_decreases(_predicate: TokenStream) -> TokenStream { quote! {}.into() } ================================================ FILE: hax-lib/macros/src/hax_paths.rs ================================================ //! This module defines the `ImplFnDecoration` structure and utils //! around it. use syn::spanned::Spanned; use syn::*; fn expect_simple_path(path: &Path) -> Option> { let mut chunks = vec![]; if path.leading_colon.is_some() { chunks.push(String::new()) } for segment in &path.segments { chunks.push(format!("{}", segment.ident)); if !matches!(segment.arguments, PathArguments::None) { return None; } } Some(chunks) } /// The various strings allowed as decoration kinds. pub const DECORATION_KINDS: &[&str] = &["decreases", "ensures", "requires"]; /// Expects a `Path` to be a decoration kind: `::hax_lib::`, /// `hax_lib::` or `` in (with `KIND` in /// `DECORATION_KINDS`). pub fn expects_path_decoration(path: &Path) -> Result> { expects_hax_path(DECORATION_KINDS, path) } /// Expects a path to be `[[::]hax_lib]::refine` pub fn expects_refine(path: &Path) -> Result> { expects_hax_path(&["refine"], path) } /// Expects a path to be `[[::]hax_lib]::order` pub fn expects_order(path: &Path) -> Result> { expects_hax_path(&["order"], path) } /// Expects a `Path` to be a hax path: `::hax_lib::`, /// `hax_lib::` or `` in (with `KW` in `allowlist`). pub fn expects_hax_path(allowlist: &[&str], path: &Path) -> Result> { let path_span = path.span(); let path = expect_simple_path(path) .ok_or_else(|| Error::new(path_span, "Expected a simple path, with no `<...>`."))?; Ok( match path .iter() .map(|x| x.as_str()) .collect::>() .as_slice() { [kw] | ["", "hax_lib", kw] | ["hax_lib", kw] if allowlist.contains(kw) => { Some(kw.to_string()) } _ => None, }, ) } ================================================ FILE: hax-lib/macros/src/impl_fn_decoration.rs ================================================ //! This module defines the `ImplFnDecoration` structure and utils //! around it. use crate::prelude::*; use crate::utils::*; /// Supporting structure that holds the data required by the internal /// macro `impl_fn_decoration`. pub struct ImplFnDecoration { pub kind: FnDecorationKind, pub phi: Expr, pub generics: Generics, pub self_ty: Type, } impl parse::Parse for ImplFnDecoration { fn parse(input: parse::ParseStream) -> Result { let parse_next = || -> Result<_> { input.parse::()?; let mut generics = input.parse::()?; input.parse::()?; generics.where_clause = input.parse::>()?; input.parse::()?; let self_ty = input.parse::()?; input.parse::()?; Ok((generics, self_ty)) }; let path = input.parse::()?; let path_span = path.span(); let kind = match expects_path_decoration(&path)? { Some(s) => match s.as_str() { "decreases" => FnDecorationKind::Decreases, "requires" => FnDecorationKind::Requires, "ensures" => { let (generics, self_ty) = parse_next()?; let ExprClosure1 { arg, body } = input.parse::()?; input.parse::()?; return Ok(ImplFnDecoration { kind: FnDecorationKind::Ensures { ret_binder: arg }, phi: body, generics, self_ty, }); } _ => unreachable!(), } None => Err(Error::new(path_span, "Expected `::hax_lib::`, `hax_lib::` or `` with `KIND` in {DECORATION_KINDS:?}"))?, }; let (generics, self_ty) = parse_next()?; let phi = input.parse::()?; input.parse::()?; Ok(ImplFnDecoration { kind, phi, generics, self_ty, }) } } ================================================ FILE: hax-lib/macros/src/implementation.rs ================================================ mod hax_paths; mod impl_fn_decoration; mod quote; mod rewrite_self; mod syn_ext; mod utils; mod prelude { pub use crate::hax_paths::*; pub use crate::syn_ext::*; pub use proc_macro as pm; pub use proc_macro_error2::*; pub use proc_macro2::*; pub use quote::*; pub use std::collections::HashSet; pub use syn::spanned::Spanned; pub use syn::{visit_mut::VisitMut, *}; pub use AttrPayload::Language as AttrHaxLang; pub use hax_lib_macros_types::*; pub type FnLike = syn::ImplItemFn; } use impl_fn_decoration::*; use prelude::*; use utils::*; /// When extracting to F*, wrap this item in `#push-options "..."` and /// `#pop-options`. #[proc_macro_error] #[proc_macro_attribute] pub fn fstar_options(attr: pm::TokenStream, item: pm::TokenStream) -> pm::TokenStream { let item: TokenStream = item.into(); let lit_str = parse_macro_input!(attr as LitStr); let payload = format!(r#"#push-options "{}""#, lit_str.value()); let payload = LitStr::new(&payload, lit_str.span()); quote! { #[::hax_lib::fstar::before(#payload)] #[::hax_lib::fstar::after(r#"#pop-options"#)] #item } .into() } /// Add an invariant to a loop which deals with an index. The /// invariant cannot refer to any variable introduced within the /// loop. An invariant is a closure that takes one argument, the /// index, and returns a proposition. /// /// Note that loop invariants are unstable (this will be handled in a /// better way in the future, see /// https://github.com/hacspec/hax/issues/858) and only supported on /// specific `for` loops with specific iterators: /// /// - `for i in start..end {...}` /// - `for i in (start..end).step_by(n) {...}` /// - `for i in slice.enumerate() {...}` /// - `for i in slice.chunks_exact(n).enumerate() {...}` /// /// This function must be called on the first line of a loop body to /// be effective. Note that in the invariant expression, `forall`, /// `exists`, and `BACKEND!` (`BACKEND` can be `fstar`, `proverif`, /// `coq`...) are in scope. #[proc_macro] pub fn loop_invariant(predicate: pm::TokenStream) -> pm::TokenStream { let predicate2: TokenStream = predicate.clone().into(); let predicate_expr: syn::Expr = parse_macro_input!(predicate); let (invariant_f, predicate) = match predicate_expr { syn::Expr::Closure(_) => (quote!(hax_lib::_internal_loop_invariant), predicate2), _ => ( quote!(hax_lib::_internal_while_loop_invariant), quote!(::hax_lib::Prop::from(#predicate2)), ), }; let ts: pm::TokenStream = quote! { #[cfg(#HaxCfgOptionName)] { #invariant_f({ #HaxQuantifiers #predicate }) } } .into(); ts } /// Must be used to prove termination of while loops. This takes an /// expression that should be a usize that decreases at every iteration /// /// This function must be called just after `loop_invariant`, or at the first /// line of the loop if there is no invariant. #[proc_macro] pub fn loop_decreases(predicate: pm::TokenStream) -> pm::TokenStream { let predicate: TokenStream = predicate.into(); let ts: pm::TokenStream = quote! { #[cfg(#HaxCfgOptionName)] { hax_lib::_internal_loop_decreases({ #HaxQuantifiers use ::hax_lib::int::ToInt; (#predicate).to_int() }) } } .into(); ts } /// When extracting to F*, inform about what is the current /// verification status for an item. It can either be `lax` or /// `panic_free`. #[proc_macro_error] #[proc_macro_attribute] pub fn fstar_verification_status(attr: pm::TokenStream, item: pm::TokenStream) -> pm::TokenStream { let action = format!("{}", parse_macro_input!(attr as Ident)); match action.as_str() { "lax" => { let item: TokenStream = item.into(); quote! { #[::hax_lib::fstar::options("--admit_smt_queries true")] #item } } "panic_free" => { let mut item = parse_macro_input!(item as FnLike); if let Some(last) = item .block .stmts .iter_mut() .rev() .find(|stmt| matches!(stmt, syn::Stmt::Expr(_, None))) .as_mut() { **last = syn::Stmt::Expr( parse_quote! { {let result = #last; ::hax_lib::fstar!("_hax_panic_freedom_admit_"); result} }, None, ); } else { item.block.stmts.push(syn::Stmt::Expr( parse_quote! {::hax_lib::fstar!("_hax_panic_freedom_admit_")}, None, )); } quote! { #item } } _ => abort_call_site!(format!("Expected `lax` or `panic_free`")), } .into() } /// Postprocess an item with a given tactic. This macro takes the tactic in /// parameter: this may be a Rust identifier or a raw snippet of F* code as a /// string literal. #[proc_macro_error] #[proc_macro_attribute] pub fn fstar_postprocess_with(attr: pm::TokenStream, item: pm::TokenStream) -> pm::TokenStream { let item: TokenStream = item.into(); let payload: String = if let Ok(s) = syn::parse::(attr.clone()) { s.value() } else { let e = parse_macro_input!(attr as Expr); format!(" ${{ {} }} ", e.to_token_stream()) }; let payload = format!("[@@FStar.Tactics.postprocess_with ({payload})]"); let payload: Lit = Lit::Str(syn::LitStr::new(&payload, Span::call_site())); quote! {#[::hax_lib::fstar::before(#payload)] #item}.into() } /// Include this item in the Hax translation. This overrides any exclusion resulting of `-i` flag. #[proc_macro_error] #[proc_macro_attribute] pub fn include(attr: pm::TokenStream, item: pm::TokenStream) -> pm::TokenStream { let item: TokenStream = item.into(); let _ = parse_macro_input!(attr as parse::Nothing); let attr = AttrPayload::ItemStatus(ItemStatus::Included { late_skip: false }); quote! {#attr #item}.into() } /// Exclude this item from the Hax translation. #[proc_macro_error] #[proc_macro_attribute] pub fn exclude(attr: pm::TokenStream, item: pm::TokenStream) -> pm::TokenStream { let item: TokenStream = item.into(); let _ = parse_macro_input!(attr as parse::Nothing); let attr = AttrPayload::ItemStatus(ItemStatus::Excluded { modeled_by: None }); quote! {#attr #item}.into() } /* TODO: no support in any backends (see #297) /// Exclude this item from the Hax translation, and replace it with a /// axiomatized model in each backends. The path of the axiomatized /// model should be given in Rust syntax. /// /// # Example /// /// ``` /// use hax_lib_macros::*; /// #[modeled_by(FStar::IO::debug_print_string)] /// fn f(line: String) { /// println!("{}", line) /// } /// ``` #[proc_macro_error] #[proc_macro_attribute] pub fn modeled_by(attr: pm::TokenStream, item: pm::TokenStream) -> pm::TokenStream { use quote::ToTokens; let model_path = parse_macro_input!(attr as syn::Path).to_token_stream(); let item: TokenStream = item.into(); let attr = AttrPayload::ItemStatus(ItemStatus::Excluded { modeled_by: Some(model_path.to_string()), }); quote! {#attr #item}.into() } */ /// Mark a `Proof<{STATEMENT}>`-returning function as a lemma, where /// `STATEMENT` is a `Prop` expression capturing any input /// variable. /// In the backends, this will generate a lemma with an empty proof. /// /// # Example /// /// ``` /// use hax_lib_macros::*; // #[decreases((m, n))] (TODO: see #297) /// pub fn ackermann(m: u64, n: u64) -> u64 { /// match (m, n) { /// (0, _) => n + 1, /// (_, 0) => ackermann(m - 1, 1), /// _ => ackermann(m - 1, ackermann(m, n - 1)), /// } /// } /// /// #[lemma] /// /// $`\forall n \in \mathbb{N}, \textrm{ackermann}(2, n) = 2 (n + 3) - 3`$ /// pub fn ackermann_property_m1(n: u64) -> Proof<{ ackermann(2, n) == 2 * (n + 3) - 3 }> {} /// ``` #[proc_macro_error] #[proc_macro_attribute] pub fn lemma(attr: pm::TokenStream, item: pm::TokenStream) -> pm::TokenStream { let mut item: syn::ItemFn = parse_macro_input!(item as ItemFn); use syn::{GenericArgument, PathArguments, ReturnType, spanned::Spanned}; fn add_allow_unused_variables_to_args(func: &mut syn::ItemFn) { let attr: syn::Attribute = parse_quote!(#[allow(unused_variables)]); for input in &mut func.sig.inputs { if let FnArg::Typed(pat_type) = input { pat_type.attrs.push(attr.clone()); } } } /// Parses a `syn::Type` of the shape `Proof<{FORMULA}>`. fn parse_proof_type(r#type: syn::Type) -> Option { let syn::Type::Path(syn::TypePath { qself: None, path: syn::Path { leading_colon: None, segments, }, }) = r#type else { return None; }; let ps = (segments.len() == 1).then_some(()).and(segments.first())?; (ps.ident == "Proof").then_some(())?; let PathArguments::AngleBracketed(args) = &ps.arguments else { None? }; let args = args.args.clone(); let GenericArgument::Const(e) = (args.len() == 1).then_some(()).and(args.first())? else { None? }; Some(e.clone()) } let _ = parse_macro_input!(attr as parse::Nothing); let attr = &AttrPayload::Lemma; add_allow_unused_variables_to_args(&mut item); if let ReturnType::Type(_, r#type) = &item.sig.output { if let Some(ensures_clause) = parse_proof_type(*r#type.clone()) { use AttrPayload::NeverErased; item.sig.output = ReturnType::Default; return ensures( quote! {|_| #ensures_clause}.into(), quote! { #attr #NeverErased #item }.into(), ); } } abort!( item.sig.output.span(), "A lemma is expected to return a `Proof<{STATEMENT}>`, where {STATEMENT} is a `Prop` expression." ) } /// Provide a measure for a function: this measure will be used once /// extracted in a backend for checking termination. The expression /// that decreases can be of any type. (TODO: this is probably as it /// is true only for F*, see #297) /// /// # Example /// /// ``` /// use hax_lib_macros::*; /// #[decreases((m, n))] /// pub fn ackermann(m: u64, n: u64) -> u64 { /// match (m, n) { /// (0, _) => n + 1, /// (_, 0) => ackermann(m - 1, 1), /// _ => ackermann(m - 1, ackermann(m, n - 1)), /// } /// } /// ``` #[proc_macro_error] #[proc_macro_attribute] pub fn decreases(attr: pm::TokenStream, item: pm::TokenStream) -> pm::TokenStream { let phi: syn::Expr = parse_macro_input!(attr); let item: FnLike = parse_macro_input!(item); let (requires, attr) = make_fn_decoration( phi, item.sig.clone(), FnDecorationKind::Decreases, None, None, ); quote! {#requires #attr #item}.into() } /// Allows to add SMT patterns to a lemma. /// For more informations about SMT patterns, please take a look here: https://fstar-lang.org/tutorial/book/under_the_hood/uth_smt.html#designing-a-library-with-smt-patterns. #[proc_macro_error] #[proc_macro_attribute] pub fn fstar_smt_pat(attr: pm::TokenStream, item: pm::TokenStream) -> pm::TokenStream { let phi: syn::Expr = parse_macro_input!(attr); let item: FnLike = parse_macro_input!(item); let (requires, attr) = make_fn_decoration(phi, item.sig.clone(), FnDecorationKind::SMTPat, None, None); quote! {#requires #attr #item}.into() } /// Add a logical precondition to a function. // Note you can use the `forall` and `exists` operators. (TODO: commented out for now, see #297) /// In the case of a function that has one or more `&mut` inputs, in /// the `ensures` clause, you can refer to such an `&mut` input `x` as /// `x` for its "past" value and `future(x)` for its "future" value. /// /// You can use the (unqualified) macro `fstar!` (`BACKEND!` for any /// backend `BACKEND`) to inline F* (or Coq, ProVerif, etc.) code in /// the precondition, e.g. `fstar!("true")`. /// /// # Example /// /// ``` /// use hax_lib_macros::*; /// #[requires(x.len() == y.len())] // #[requires(x.len() == y.len() && forall(|i: usize| i >= x.len() || y[i] > 0))] (TODO: commented out for now, see #297) /// pub fn div_pairwise(x: Vec, y: Vec) -> Vec { /// x.iter() /// .copied() /// .zip(y.iter().copied()) /// .map(|(x, y)| x / y) /// .collect() /// } /// ``` #[proc_macro_error] #[proc_macro_attribute] pub fn requires(attr: pm::TokenStream, item: pm::TokenStream) -> pm::TokenStream { let phi: syn::Expr = parse_macro_input!(attr); let item: FnLike = parse_macro_input!(item); let (requires, attr) = make_fn_decoration( phi.clone(), item.sig.clone(), FnDecorationKind::Requires, None, None, ); let mut item_with_debug = item.clone(); item_with_debug .block .stmts .insert(0, parse_quote! {debug_assert!(#phi);}); quote! { #requires #attr // TODO: disable `assert!`s for now (see #297) #item // #[cfg( all(not(#HaxCfgOptionName), debug_assertions )) ] #item_with_debug // #[cfg(not(all(not(#HaxCfgOptionName), debug_assertions )))] #item } .into() } /// Add a logical postcondition to a function. Note you can use the /// `forall` and `exists` operators. /// /// You can use the (unqualified) macro `fstar!` (`BACKEND!` for any /// backend `BACKEND`) to inline F* (or Coq, ProVerif, etc.) code in /// the postcondition, e.g. `fstar!("true")`. /// /// # Example /// /// ``` /// use hax_lib_macros::*; /// #[ensures(|result| result == x * 2)] /// pub fn twice(x: u64) -> u64 { /// x + x /// } /// ``` #[proc_macro_error] #[proc_macro_attribute] pub fn ensures(attr: pm::TokenStream, item: pm::TokenStream) -> pm::TokenStream { let ExprClosure1 { arg: ret_binder, body: phi, } = parse_macro_input!(attr); let item: FnLike = parse_macro_input!(item); let kind = FnDecorationKind::Ensures { ret_binder: ret_binder.clone(), }; let (ensures, attr) = make_fn_decoration(phi.clone(), item.sig.clone(), kind, None, None); let mut item_with_debug = item.clone(); let body = item.block.clone(); item_with_debug.block.stmts = parse_quote!(let #ret_binder = #body; debug_assert!(#phi); #ret_binder); quote! { #ensures #attr // TODO: disable `assert!`s for now (see #297) #item // #[cfg( all(not(#HaxCfgOptionName), debug_assertions )) ] #item_with_debug // #[cfg(not(all(not(#HaxCfgOptionName), debug_assertions )))] #item } .into() } mod kw { syn::custom_keyword!(hax_lib); syn::custom_keyword!(decreases); syn::custom_keyword!(ensures); syn::custom_keyword!(requires); syn::custom_keyword!(refine); } /// Internal macro for dealing with function decorations /// (`#[decreases(...)]`, `#[ensures(...)]`, `#[requires(...)]`) on /// `fn` items within an `impl` block. There is special handling since /// such functions might have a `self` argument: in such cases, we /// rewrite function decorations as `#[impl_fn_decoration(, /// , , , )]`. #[proc_macro_error] #[proc_macro_attribute] pub fn impl_fn_decoration(attr: pm::TokenStream, item: pm::TokenStream) -> pm::TokenStream { let ImplFnDecoration { kind, phi, generics, self_ty, } = parse_macro_input!(attr); let mut item: FnLike = parse_macro_input!(item); let (decoration, attr) = make_fn_decoration(phi, item.sig.clone(), kind, Some(generics), Some(self_ty)); let decoration = Stmt::Item(Item::Verbatim(decoration)); item.block.stmts.insert(0, decoration); quote! {#attr #item}.into() } #[proc_macro_error] #[proc_macro_attribute] pub fn trait_fn_decoration(attr: pm::TokenStream, item: pm::TokenStream) -> pm::TokenStream { let ImplFnDecoration { kind, phi, generics, self_ty, } = parse_macro_input!(attr); let mut item: syn::TraitItemFn = parse_macro_input!(item); let (decoration, attr) = make_fn_decoration(phi, item.sig.clone(), kind, Some(generics), Some(self_ty)); let decoration = Stmt::Item(Item::Verbatim(decoration)); item.sig .generics .where_clause .get_or_insert(parse_quote! {where}) .predicates .push(parse_quote! {[(); {#decoration 0}]:}); quote! {#attr #item}.into() } /// Enable the following attrubutes in the annotated item and sub-items. /// /// ### `refine` (on a field in a struct) /// Refine a type with a logical formula. /// /// ### `order` (on a field in a struct or an enum) /// Reorders a field in the extracted code. /// /// Rust fields order matters for bit-level representation. Similarly, in some /// situations, fields order matters in the backends: for instance in F*, one /// may refine a field with a formula referring to a later field. /// /// Those two orders may conflict. Adding `#[hax_lib::order(n)]` on a field with /// override its order at extraction time. /// /// By default, the order of a field is its index, e.g. the first field has /// order 0, the i-th field has order i+1. /// /// ### `decreases`, `ensures` and `requires` (on a `fn` in an `impl`) /// `decreases`, `ensures`, `requires`: behave exactly as documented above on /// the proc attributes of the same name. /// /// # Example /// /// ``` /// #[hax_lib_macros::attributes] /// mod foo { /// pub struct Hello { /// pub x: u32, /// #[refine(y > 3)] /// pub y: u32, /// #[refine(y + x + z > 3)] /// pub z: u32, /// } /// impl Hello { /// fn sum(&self) -> u32 { /// self.x + self.y + self.z /// } /// #[ensures(|result| result - n == self.sum())] /// fn plus(self, n: u32) -> u32 { /// self.sum() + n /// } /// } /// } /// ``` #[proc_macro_error] #[proc_macro_attribute] pub fn attributes(_attr: pm::TokenStream, item: pm::TokenStream) -> pm::TokenStream { let item: Item = parse_macro_input!(item); #[derive(Default)] struct AttrVisitor { extra_items: Vec, } use syn::visit_mut; impl VisitMut for AttrVisitor { fn visit_item_trait_mut(&mut self, item: &mut ItemTrait) { let span = item.span(); for ti in item.items.iter_mut() { if let TraitItem::Fn(fun) = ti { for attr in &mut fun.attrs { let Meta::List(ml) = attr.meta.clone() else { continue; }; let Ok(Some(decoration)) = expects_path_decoration(&ml.path) else { continue; }; let decoration = syn::Ident::new(&decoration, ml.path.span()); let mut generics = item.generics.clone(); let predicate = WherePredicate::Type(PredicateType { lifetimes: None, bounded_ty: parse_quote! {Self_}, colon_token: Token![:](span), bounds: item.supertraits.clone(), }); let mut where_clause = generics .where_clause .clone() .unwrap_or(parse_quote! {where}); where_clause.predicates.push(predicate.clone()); generics.where_clause = Some(where_clause.clone()); let self_ty: Type = parse_quote! {Self_}; let tokens = ml.tokens.clone(); let generics = merge_generics(parse_quote! {}, generics); let ImplFnDecoration { kind, phi, self_ty, .. } = parse_quote! {#decoration, #generics, where, #self_ty, #tokens}; let (decoration, relation_attr) = make_fn_decoration( phi, fun.sig.clone(), kind, Some(generics), Some(self_ty), ); *attr = parse_quote! {#relation_attr}; self.extra_items.push(decoration); } } } visit_mut::visit_item_trait_mut(self, item); } fn visit_type_mut(&mut self, _type: &mut Type) {} fn visit_item_impl_mut(&mut self, item: &mut ItemImpl) { for ii in item.items.iter_mut() { if let ImplItem::Fn(fun) = ii { for attr in fun.attrs.iter_mut() { if let Meta::List(ml) = &mut attr.meta { let Ok(Some(decoration)) = expects_path_decoration(&ml.path) else { continue; }; let decoration = syn::Ident::new(&decoration, ml.path.span()); let tokens = ml.tokens.clone(); let (generics, self_ty) = (&item.generics, &item.self_ty); let where_clause = &generics.where_clause; ml.tokens = quote! {#decoration, #generics, #where_clause, #self_ty, #tokens}; ml.path = parse_quote! {::hax_lib::impl_fn_decoration}; } } } } visit_mut::visit_item_impl_mut(self, item); } fn visit_fields_named_mut(&mut self, fields_named: &mut FieldsNamed) { visit_mut::visit_fields_named_mut(self, fields_named); fn handle_reorder_attribute(attrs: &mut [Attribute], errors: &mut Vec) { let Some((attr, order)) = attrs.iter_mut().find_map(|attr| { if let Ok(Some(_)) = expects_order(attr.path()) { let lit: LitInt = attr.parse_args().ok()?; Some((attr, lit)) } else { None } }) else { return; }; let Ok(n) = order.base10_parse() else { errors.push(parse_quote!{const _: () = {compile_error!("Expected a (base 10) i32 literal.")};}); return; }; let payload = AttrPayload::Order(n); *attr = parse_quote!(#payload); } for field in &mut fields_named.named { handle_reorder_attribute(&mut field.attrs, &mut self.extra_items); } } fn visit_item_mut(&mut self, item: &mut Item) { visit_mut::visit_item_mut(self, item); let mut extra: Vec = vec![]; match item { Item::Struct(s) => { let only_one_field = s.fields.len() == 1; let idents: Vec<_> = s .fields .iter() .enumerate() .map(|(i, field)| { let ident = field.ident.clone().unwrap_or(if only_one_field { format_ident!("x") } else { format_ident!("x{}", i) }); (ident, field.ty.clone()) }) .collect(); for (i, field) in s.fields.iter_mut().enumerate() { let prev = &idents[0..=i]; let refine: Option<(&mut Attribute, Expr)> = field.attrs.iter_mut().find_map(|attr| { if let Ok(Some(_)) = expects_refine(attr.path()) { let payload = attr.parse_args().ok()?; Some((attr, payload)) } else { None } }); if let Some((attr, refine)) = refine { let binders: TokenStream = prev .iter() .map(|(name, ty)| quote! {#name: #ty, }) .collect(); let uid = ItemUid::fresh(); let uid_attr = AttrPayload::Uid(uid.clone()); let assoc_attr = AttrPayload::AssociatedItem { role: AssociationRole::Refine, item: uid, }; *attr = syn::parse_quote! { #assoc_attr }; let status_attr = &AttrPayload::ItemStatus(ItemStatus::Included { late_skip: true }); extra.push(syn::parse_quote! { #[cfg(#HaxCfgOptionName)] #status_attr const _: () = { #uid_attr #status_attr fn refinement(#binders) -> ::hax_lib::Prop { ::hax_lib::Prop::from(#refine) } }; }) } } } _ => (), } let extra: TokenStream = extra.iter().map(|extra| quote! {#extra}).collect(); *item = Item::Verbatim(quote! {#extra #item}); } } let mut v = AttrVisitor::default(); let mut item = item; v.visit_item_mut(&mut item); let extra_items = v.extra_items; quote! { #item #(#extra_items)* }.into() } /// Mark an item opaque: the extraction will assume the /// type without revealing its definition. #[proc_macro_error] #[proc_macro_attribute] #[deprecated(note = "Please use 'opaque' instead")] pub fn opaque_type(attr: pm::TokenStream, item: pm::TokenStream) -> pm::TokenStream { opaque(attr, item) } /// Mark an item opaque: the extraction will assume the /// type without revealing its definition. #[proc_macro_error] #[proc_macro_attribute] pub fn opaque(_attr: pm::TokenStream, item: pm::TokenStream) -> pm::TokenStream { let item: Item = parse_macro_input!(item); let attr = AttrPayload::Erased; quote! {#attr #item}.into() } /// Mark an item transparent: the extraction will not /// make it opaque regardless of the `-i` flag default. #[proc_macro_error] #[proc_macro_attribute] pub fn transparent(_attr: pm::TokenStream, item: pm::TokenStream) -> pm::TokenStream { let item: Item = parse_macro_input!(item); let attr = AttrPayload::NeverErased; quote! {#attr #item}.into() } /// A marker indicating a `fn` as a ProVerif process read. #[proc_macro_error] #[proc_macro_attribute] pub fn process_read(_attr: pm::TokenStream, item: pm::TokenStream) -> pm::TokenStream { let item: ItemFn = parse_macro_input!(item); let attr = AttrPayload::ProcessRead; quote! {#attr #item}.into() } /// A marker indicating a `fn` as a ProVerif process write. #[proc_macro_error] #[proc_macro_attribute] pub fn process_write(_attr: pm::TokenStream, item: pm::TokenStream) -> pm::TokenStream { let item: ItemFn = parse_macro_input!(item); let attr = AttrPayload::ProcessWrite; quote! {#attr #item}.into() } /// A marker indicating a `fn` as a ProVerif process initialization. #[proc_macro_error] #[proc_macro_attribute] pub fn process_init(_attr: pm::TokenStream, item: pm::TokenStream) -> pm::TokenStream { let item: ItemFn = parse_macro_input!(item); let attr = AttrPayload::ProcessInit; quote! {#attr #item}.into() } /// A marker indicating an `enum` as describing the protocol messages. #[proc_macro_error] #[proc_macro_attribute] pub fn protocol_messages(_attr: pm::TokenStream, item: pm::TokenStream) -> pm::TokenStream { let item: ItemEnum = parse_macro_input!(item); let attr = AttrPayload::ProtocolMessages; quote! {#attr #item}.into() } /// A marker indicating a `fn` should be automatically translated to a ProVerif constructor. #[proc_macro_error] #[proc_macro_attribute] pub fn pv_constructor(_attr: pm::TokenStream, item: pm::TokenStream) -> pm::TokenStream { let item: ItemFn = parse_macro_input!(item); let attr = AttrPayload::PVConstructor; quote! {#attr #item}.into() } /// A marker indicating a `fn` requires manual modelling in ProVerif. #[proc_macro_error] #[proc_macro_attribute] pub fn pv_handwritten(_attr: pm::TokenStream, item: pm::TokenStream) -> pm::TokenStream { let item: ItemFn = parse_macro_input!(item); let attr = AttrPayload::PVHandwritten; quote! {#attr #item}.into() } /// Create a mathematical integer. This macro expects a Rust integer /// literal without suffix. /// /// ## Examples: /// - `int!(0x101010)` /// - `int!(42)` /// - `int!(0o52)` /// - `int!(0h2A)` #[proc_macro_error] #[proc_macro] pub fn int(payload: pm::TokenStream) -> pm::TokenStream { let n: LitInt = parse_macro_input!(payload); let suffix = n.suffix(); if !suffix.is_empty() { abort_call_site!("The literal suffix `{suffix}` was unexpected.") } let digits = n.base10_digits(); quote! {::hax_lib::int::Int::_unsafe_from_str(#digits)}.into() } /// This macro inserts a verbatim Lean proof into the extracted code. #[proc_macro_error] #[proc_macro_attribute] pub fn lean_proof(payload: pm::TokenStream, item: pm::TokenStream) -> pm::TokenStream { let item: ItemFn = parse_macro_input!(item); let payload = parse_macro_input!(payload as LitStr).value(); let attr = AttrPayload::Proof(payload); quote! {#attr #item}.into() } /// This macro inserts a verbatim Lean proof showing that the `requires`-condition is panic-free. /// The proof is inserted into the `pureRequires` field of the Lean spec. #[proc_macro_error] #[proc_macro_attribute] pub fn lean_pure_requires_proof( payload: pm::TokenStream, item: pm::TokenStream, ) -> pm::TokenStream { let item: ItemFn = parse_macro_input!(item); let payload = parse_macro_input!(payload as LitStr).value(); let attr = AttrPayload::PureRequiresProof(payload); quote! {#attr #item}.into() } /// This macro inserts a verbatim Lean proof showing that the `ensures`-condition is panic-free. /// The proof is inserted into the `pureEnsures` field of the Lean spec. #[proc_macro_error] #[proc_macro_attribute] pub fn lean_pure_ensures_proof(payload: pm::TokenStream, item: pm::TokenStream) -> pm::TokenStream { let item: ItemFn = parse_macro_input!(item); let payload = parse_macro_input!(payload as LitStr).value(); let attr = AttrPayload::PureEnsuresProof(payload); quote! {#attr #item}.into() } /// Use the proof method `grind`. This influences the tactic and spec set used by Lean. #[proc_macro_error] #[proc_macro_attribute] pub fn lean_proof_method_grind(_attr: pm::TokenStream, item: pm::TokenStream) -> pm::TokenStream { let item: ItemFn = parse_macro_input!(item); let attr = AttrPayload::ProofMethod(hax_lib_macros_types::ProofMethod::Grind); quote! {#attr #item}.into() } /// Use the proof method `bv_decide`. This influences the tactic and spec set used by Lean. #[proc_macro_error] #[proc_macro_attribute] pub fn lean_proof_method_bv_decide( _attr: pm::TokenStream, item: pm::TokenStream, ) -> pm::TokenStream { let item: ItemFn = parse_macro_input!(item); let attr = AttrPayload::ProofMethod(hax_lib_macros_types::ProofMethod::BvDecide); quote! {#attr #item}.into() } macro_rules! make_quoting_item_proc_macro { ($backend:ident, $macro_name:ident, $position:expr, $cfg_name:ident) => { #[doc = concat!("This macro inlines verbatim ", stringify!($backend)," code before a Rust item.")] /// /// This macro takes a string literal containing backend /// code. Just as backend expression macros, this literal can /// contains dollar-prefixed Rust names. /// /// Note: when targetting F*, you can prepend a first /// comma-separated argument: `interface`, `impl` or /// `both`. This controls where the code will apprear: in the /// `fst` or `fsti` files or both. #[proc_macro_error] #[proc_macro_attribute] pub fn $macro_name(payload: pm::TokenStream, item: pm::TokenStream) -> pm::TokenStream { let mut fstar_options = None; let item: TokenStream = item.into(); let payload = { let mut tokens = payload.into_iter().peekable(); if let Some(pm::TokenTree::Ident(ident)) = tokens.peek() { let ident_str = format!("{}", ident); fstar_options = Some(ItemQuoteFStarOpts { intf: ident_str == "interface" || ident_str == "both", r#impl: ident_str == "impl" || ident_str == "both", }); if !matches!(ident_str.as_str(), "impl" | "both" | "interface") { proc_macro_error2::abort!( ident.span(), "Expected `impl`, `both` or `interface`" ); } // Consume the ident let _ = tokens.next(); // Expect a comma, fail otherwise let comma = pm::TokenStream::from_iter(tokens.next().into_iter()); let _: syn::token::Comma = parse_macro_input!(comma); } pm::TokenStream::from_iter(tokens) }; let ts: TokenStream = quote::item( ItemQuote { position: $position, fstar_options, }, quote! {#[cfg($cfg_name)]}, payload, quote! {#item}.into(), ) .into(); ts.into() } }; } macro_rules! make_quoting_proc_macro { ($backend:ident) => { #[doc = concat!("Embed ", stringify!($backend), " expression inside a Rust expression. This macro takes only one argument: some raw ", stringify!($backend), " code as a string literal.")] /// /// While it is possible to directly write raw backend code, /// sometimes it can be inconvenient. For example, referencing /// Rust names can be a bit cumbersome: for example, the name /// `my_crate::my_module::CONSTANT` might be translated /// differently in a backend (e.g. in the F* backend, it will /// probably be `My_crate.My_module.v_CONSTANT`). /// /// To facilitate this, you can write Rust names directly, /// using the prefix `$`: `f $my_crate::my_module__CONSTANT + 3` /// will be replaced with `f My_crate.My_module.v_CONSTANT + 3` /// in the F* backend for instance. /// If you want to refer to the Rust constructor /// `Enum::Variant`, you should write `$$Enum::Variant` (note /// the double dollar). /// If the name refers to something polymorphic, you need to /// signal it by adding _any_ type informations, /// e.g. `${my_module::function<()>}`. The curly braces are /// needed for such more complex expressions. /// You can also write Rust patterns with the `$?{SYNTAX}` /// syntax, where `SYNTAX` is a Rust pattern. The syntax /// `${EXPR}` also allows any Rust expressions /// `EXPR` to be embedded. /// Types can be refered to with the syntax `$:{TYPE}`. #[proc_macro] pub fn ${concat($backend, _expr)}(payload: pm::TokenStream) -> pm::TokenStream { let ts: TokenStream = quote::expression(quote::InlineExprType::Unit, payload).into(); quote!{{ #[cfg(${concat(hax_backend_, $backend)})] { #ts } }}.into() } #[doc = concat!("The `Prop` version of `", stringify!($backend), "_expr`.")] #[proc_macro] pub fn ${concat($backend, _prop_expr)}(payload: pm::TokenStream) -> pm::TokenStream { let ts: TokenStream = quote::expression(quote::InlineExprType::Prop, payload).into(); quote!{{ #[cfg(${concat(hax_backend_, $backend)})] { #ts } #[cfg(not(${concat(hax_backend_, $backend)}))] { ::hax_lib::Prop::from_bool(true) } }}.into() } #[doc = concat!("The unsafe (because polymorphic: even computationally relevant code can be inlined!) version of `", stringify!($backend), "_expr`.")] #[proc_macro] #[doc(hidden)] pub fn ${concat($backend, _unsafe_expr)}(payload: pm::TokenStream) -> pm::TokenStream { let ts: TokenStream = quote::expression(quote::InlineExprType::Anything, payload).into(); quote!{{ #[cfg(${concat(hax_backend_, $backend)})] { #ts } }}.into() } make_quoting_item_proc_macro!($backend, ${concat($backend, _before)}, ItemQuotePosition::Before, ${concat(hax_backend_, $backend)}); make_quoting_item_proc_macro!($backend, ${concat($backend, _after)}, ItemQuotePosition::After, ${concat(hax_backend_, $backend)}); #[doc = concat!("Replaces a Rust item with some verbatim ", stringify!($backend)," code.")] #[proc_macro_error] #[proc_macro_attribute] pub fn ${concat($backend, _replace)}(payload: pm::TokenStream, item: pm::TokenStream) -> pm::TokenStream { let item: TokenStream = item.into(); let payload: TokenStream = payload.into(); let attr = AttrPayload::ItemStatus(ItemStatus::Included { late_skip: true }); quote! { #[cfg(${concat(hax_backend_, $backend)})] #[::hax_lib::$backend::before(#payload)] #attr #item #[cfg(not(${concat(hax_backend_, $backend)}))] #item } .into() } #[doc = concat!("Replaces the body of a Rust function with some verbatim ", stringify!($backend)," code.")] #[proc_macro_error] #[proc_macro_attribute] pub fn ${concat($backend, _replace_body)}(payload: pm::TokenStream, item: pm::TokenStream) -> pm::TokenStream { let payload: TokenStream = payload.into(); let item: ItemFn = parse_macro_input!(item); let mut hax_item = item.clone(); *hax_item.block.as_mut() = parse_quote!{ { ::hax_lib::$backend::unsafe_expr!(#payload) } }; quote!{ #[cfg(${concat(hax_backend_, $backend)})] #hax_item #[cfg(not(${concat(hax_backend_, $backend)}))] #item }.into() } }; ($($backend:ident)*) => { $(make_quoting_proc_macro!($backend);)* } } make_quoting_proc_macro!(fstar coq proverif lean); /// Marks a newtype `struct RefinedT(T);` as a refinement type. The /// struct should have exactly one unnamed private field. /// /// This macro takes one argument: a `Prop` proposition that refines /// values of type `SomeType`. /// /// For example, the following type defines bounded `u64` integers. /// /// ``` /// #[hax_lib::refinement_type(|x| x >= MIN && x <= MAX)] /// pub struct BoundedU64(u64); /// ``` /// /// This macro will generate an implementation of the [`Deref`] trait /// and of the [`hax_lib::Refinement`] type. Those two traits are /// the only interface to this newtype: one is allowed only to /// construct or destruct refined type via those smart constructors /// and destructors, ensuring the abstraction. /// /// A refinement of a type `T` with a formula `f` can be seen as a box /// that contains a value of type `T` and a proof that this value /// satisfies the formula `f`. /// /// In debug mode, the refinement will be checked at run-time. This /// requires the base type `T` to implement `Clone`. Pass a first /// parameter `no_debug_runtime_check` to disable this behavior. /// /// When extracted via hax, this is interpreted in the backend as a /// refinement type: the use of such a type yields static proof /// obligations. #[proc_macro_error] #[proc_macro_attribute] pub fn refinement_type(mut attr: pm::TokenStream, item: pm::TokenStream) -> pm::TokenStream { let mut item = parse_macro_input!(item as syn::ItemStruct); let syn::Fields::Unnamed(fields) = &item.fields else { proc_macro_error2::abort!( item.generics.span(), "Expected a newtype (a struct with one unnamed field), got one or more named field" ); }; let paren_token = fields.paren_token; let fields = fields.unnamed.iter().collect::>(); let [field] = &fields[..] else { proc_macro_error2::abort!( item.generics.span(), "Expected a newtype (a struct with one unnamed field), got {} fields", fields.len() ); }; if !matches!(field.vis, syn::Visibility::Inherited) { proc_macro_error2::abort!(field.vis.span(), "This field was expected to be private"); } let no_debug_assert = { let mut tokens = attr.clone().into_iter(); if let (Some(pm::TokenTree::Ident(ident)), Some(pm::TokenTree::Punct(comma))) = (tokens.next(), tokens.next()) { if ident.to_string() != "no_debug_runtime_check" { proc_macro_error2::abort!(ident.span(), "Expected 'no_debug_runtime_check'"); } if comma.as_char() != ',' { proc_macro_error2::abort!(ident.span(), "Expected a comma"); } attr = pm::TokenStream::from_iter(tokens); true } else { false } }; let ExprClosure1 { arg: ret_binder, body: phi, } = parse_macro_input!(attr); let kind = FnDecorationKind::Ensures { ret_binder: ret_binder.clone(), }; let sig = syn::Signature { constness: None, asyncness: None, unsafety: None, abi: None, variadic: None, fn_token: syn::Token![fn](item.span()), ident: parse_quote! {dummy}, generics: item.generics.clone(), paren_token, inputs: syn::punctuated::Punctuated::new(), output: syn::ReturnType::Type(parse_quote! {->}, Box::new(field.ty.clone())), }; let ident = &item.ident; let generics = &item.generics; let vis = item.vis.clone(); let generics_args: syn::punctuated::Punctuated<_, syn::token::Comma> = item .generics .params .iter() .map(|g| match g { syn::GenericParam::Lifetime(p) => { let i = &p.lifetime; quote! { #i } } syn::GenericParam::Type(p) => { let i = &p.ident; quote! { #i } } syn::GenericParam::Const(p) => { let i = &p.ident; quote! { #i } } }) .collect(); let inner_ty = &field.ty; let (refinement_item, refinement_attr) = make_fn_decoration(phi.clone(), sig, kind, None, None); let module_ident = syn::Ident::new( &format!("hax__autogenerated_refinement__{}", ident), ident.span(), ); item.vis = parse_quote! {pub}; let debug_assert = no_debug_assert.then_some(quote! {::core::debug_assert!(Self::invariant(x.clone()));}); let newtype_as_ref_attr = AttrPayload::NewtypeAsRefinement; quote! { #[allow(non_snake_case)] mod #module_ident { #[allow(unused_imports)] use super::*; #refinement_item #newtype_as_ref_attr #refinement_attr #item #[::hax_lib::exclude] impl #generics ::hax_lib::Refinement for #ident <#generics_args> { type InnerType = #inner_ty; fn new(x: Self::InnerType) -> Self { #debug_assert Self(x) } fn get(self) -> Self::InnerType { self.0 } fn get_mut(&mut self) -> &mut Self::InnerType { &mut self.0 } fn invariant(#ret_binder: Self::InnerType) -> ::hax_lib::Prop { ::hax_lib::Prop::from(#phi) } } #[::hax_lib::exclude] impl #generics ::std::ops::Deref for #ident <#generics_args> { type Target = #inner_ty; fn deref(&self) -> &Self::Target { &self.0 } } #[::hax_lib::exclude] impl #generics ::hax_lib::RefineAs<#ident <#generics_args>> for #inner_ty { fn into_checked(self) -> #ident <#generics_args> { use ::hax_lib::Refinement; #ident::new(self) } } } #vis use #module_ident::#ident; } .into() } ================================================ FILE: hax-lib/macros/src/lib.rs ================================================ // Proc-macros must "reside in the root of the crate": whence the use // of `std::include!` instead of proper module declaration. #![cfg_attr(hax, feature(macro_metavar_expr_concat))] #[cfg(hax)] std::include!("implementation.rs"); #[cfg(not(hax))] std::include!("dummy.rs"); ================================================ FILE: hax-lib/macros/src/quote.rs ================================================ //! This module provides the logic for the quotation macros, which //! allow for quoting F*/Coq/... code directly from Rust. //! //! In a F*/Coq/... quote, one can write antiquotations, that is, //! embedded Rust snippets. The syntax is `$`. The //! payload `` should be a Rust path, or a group with //! arbitrary contents `{...contents...}`. //! //! The `` describes the kind of the antiquotation: //! - empty prefix, the antiquotation is an expression; //! - `?`, the antiquotation is a pattern; //! - `$`, the antiquotation is a constructor name; //! - `:`, the antiquotation is a type. use crate::prelude::*; /// Marker that indicates a place where a antiquotation will be inserted const SPLIT_MARK: &str = "SPLIT_QUOTE"; /// The different kinds of antiquotations enum AntiquoteKind { Expr, Constructor, Pat, Ty, } impl ToTokens for AntiquoteKind { fn to_tokens(&self, tokens: &mut TokenStream) { tokens.extend([match self { Self::Expr => quote! {_expr}, Self::Constructor => quote! {_constructor}, Self::Pat => quote! {_pat}, Self::Ty => quote! {_ty}, }]) } } /// An antiquotation struct Antiquote { ts: pm::TokenStream, kind: AntiquoteKind, } impl ToTokens for Antiquote { fn to_tokens(&self, tokens: &mut TokenStream) { let ts = TokenStream::from(self.ts.clone()); fn wrap_pattern(pat: TokenStream) -> TokenStream { quote! {{#[allow(unreachable_code)] match None { Some(#pat) => (), _ => () } }} } let ts = match self.kind { AntiquoteKind::Expr => ts, AntiquoteKind::Constructor => wrap_pattern(quote! {#ts {..}}), AntiquoteKind::Pat => wrap_pattern(ts), AntiquoteKind::Ty => quote! {None::<#ts>}, }; tokens.extend([ts]) } } /// Extract antiquotations (`$[?][$][:]...`, `$[?][$][:]{...}`) and parses them. fn process_string(s: &str) -> std::result::Result<(String, Vec), String> { let mut chars = s.chars().peekable(); let mut antiquotations = vec![]; let mut output = String::new(); while let Some(ch) = chars.next() { match ch { '$' => { let mut s = String::new(); let mut kind = AntiquoteKind::Expr; if let Some(prefix) = chars.next_if(|ch| *ch == '?' || *ch == '$' || *ch == ':') { kind = match prefix { '?' => AntiquoteKind::Pat, '$' => AntiquoteKind::Constructor, ':' => AntiquoteKind::Ty, _ => unreachable!(), }; } // If the first character is `{`, we parse the block if let Some('{') = chars.peek() { chars.next(); // Consume `{` let mut level = 0; for ch in chars.by_ref() { level += match ch { '{' => 1, '}' => -1, _ => 0, }; if level < 0 { break; } s.push(ch); } } else { while let Some(ch) = chars.next_if(|ch| { !matches!(ch, ' ' | '\t' | '\n' | '(' | '{' | ')' | ';' | '!' | '?') }) { s.push(ch) } } if s.is_empty() { return Err(format!( "Empty antiquotation just before `{}`", chars.collect::() )); } output += SPLIT_MARK; // See https://github.com/rust-lang/rust/issues/58736 let ts: std::result::Result = syn::parse_str(&s) .map_err(|err| format!("Could not parse antiquotation `{s}`: got error {err}")); if let Err(message) = &ts { // If we don't panic, the error won't show up, // this is because `parse_str` is not only // panicking, but also makes rustc to exit earlier. panic!("{message}"); } let ts: pm::TokenStream = ts?.into(); antiquotations.push(Antiquote { ts, kind }) } _ => output.push(ch), } } Ok((output, antiquotations)) } pub(super) fn item( kind: ItemQuote, attribute_to_inject: TokenStream, payload: pm::TokenStream, item: pm::TokenStream, ) -> pm::TokenStream { let expr = TokenStream::from(expression(InlineExprType::Unit, payload)); let item = TokenStream::from(item); let uid = ItemUid::fresh(); let uid_attr = AttrPayload::Uid(uid.clone()); let assoc_attr = AttrPayload::AssociatedItem { role: AssociationRole::ItemQuote, item: uid, }; let kind_attr = AttrPayload::ItemQuote(kind); let status_attr = AttrPayload::ItemStatus(ItemStatus::Included { late_skip: true }); use AttrPayload::NeverErased; quote! { #assoc_attr #item #attribute_to_inject #status_attr const _: () = { #NeverErased #uid_attr #kind_attr fn quote_contents() { #expr } }; } .into() } pub(super) fn detect_future_node_in_expression(e: &syn::Expr) -> bool { struct Visitor(bool); use syn::visit::*; impl<'a> Visit<'a> for Visitor { fn visit_expr(&mut self, e: &'a Expr) { if let Some(Ok(_)) = crate::utils::expect_future_expr(e) { self.0 = true; } } } let mut visitor = Visitor(false); visitor.visit_expr(e); visitor.0 } pub(super) enum InlineExprType { Unit, Prop, Anything, } pub(super) fn expression(typ: InlineExprType, payload: pm::TokenStream) -> pm::TokenStream { let (mut backend_code, antiquotes) = { let payload = parse_macro_input!(payload as LitStr).value(); if payload.contains(SPLIT_MARK) { return quote! {std::compile_error!(std::concat!($SPLIT_MARK, " is reserved"))}.into(); } let (string, antiquotes) = match process_string(&payload) { Ok(x) => x, Err(message) => return quote! {std::compile_error!(#message)}.into(), }; let string = proc_macro2::Literal::string(&string); let string: TokenStream = [proc_macro2::TokenTree::Literal(string)] .into_iter() .collect(); (quote! {#string}, antiquotes) }; for user in antiquotes.iter().rev() { if !matches!(typ, InlineExprType::Unit) && syn::parse(user.ts.clone()) .as_ref() .map(detect_future_node_in_expression) .unwrap_or(false) { let ts: proc_macro2::TokenStream = user.ts.clone().into(); return quote! { ::std::compile_error!(concat!("The `future` operator cannot be used within a quote. Hint: move `", stringify!(#ts), "` to a let binding and use the binding name instead.")) }.into(); } let kind = &user.kind; backend_code = quote! { let #kind = #user; #backend_code }; } let function = match typ { InlineExprType::Unit => quote! {inline}, InlineExprType::Prop => quote! {inline_unsafe::<::hax_lib::Prop>}, InlineExprType::Anything => quote! {inline_unsafe}, }; quote! { ::hax_lib::#function(#[allow(unused_variables)]{#backend_code}) } .into() } ================================================ FILE: hax-lib/macros/src/rewrite_self.rs ================================================ use crate::syn_ext::*; use proc_macro2::Span; use syn::spanned::Spanned; use syn::*; /// The `RewriteSelf` structure is hidden in a module so that only its /// method can mutate its fields. mod rewrite_self { use super::*; use std::collections::HashSet; /// Small & dirty wrapper around spans to make them `Eq`, /// `PartialEq` and `Hash` #[derive(Clone, Debug)] struct SpanWrapper(Span); const _: () = { impl Eq for SpanWrapper {} impl PartialEq for SpanWrapper { fn eq(&self, other: &Self) -> bool { format!("{self:?}") == format!("{other:?}") } } use std::hash::*; impl Hash for SpanWrapper { fn hash(&self, state: &mut H) { format!("{self:?}").hash(state) } } }; /// A struct that carries informations for substituting `self` and /// `Self`. Note `typ` is an option: #[must_use] pub struct RewriteSelf { typ: Option, ident: Ident, self_spans: HashSet, } impl RewriteSelf { /// Consumes `RewriteSelf`, optionally outputing errors. pub fn get_error(self) -> Option { if self.typ.is_some() || self.self_spans.is_empty() { return None; } let mut error = Error::new(Span::call_site(), "This macro doesn't work on trait or impl items: you need to add a `#[hax_lib::attributes]` on the enclosing impl block or trait."); for SpanWrapper(span) in self.self_spans { let use_site = Error::new( span, "Here, the function you are trying to annotate has a `Self`.", ); error.combine(use_site); } Some(error.to_compile_error()) } fn self_detected(&mut self, span: Span) { self.self_spans.insert(SpanWrapper(span)); } /// Requests the ident with which `self` should be substituted. pub fn self_ident(&mut self, span: Span) -> &Ident { self.self_detected(span); &self.ident } /// Requests the type with which `Self` should be substituted with. pub fn self_ty(&mut self, span: Span) -> Type { self.self_detected(span); self.typ.clone().unwrap_or_else(|| { parse_quote! {Self} }) } /// Construct a rewritter pub fn new(ident: Ident, typ: Option) -> Self { Self { typ, ident, self_spans: HashSet::new(), } } } } pub use rewrite_self::*; impl visit_mut::VisitMut for RewriteSelf { fn visit_expr_mut(&mut self, e: &mut Expr) { visit_mut::visit_expr_mut(self, e); if e.is_ident("self") { let into = self.self_ident(e.span()).clone(); *e = parse_quote! {#into} } } fn visit_type_mut(&mut self, ty: &mut Type) { visit_mut::visit_type_mut(self, ty); if ty.is_ident("Self") { *ty = self.self_ty(ty.span()) } } fn visit_fn_arg_mut(&mut self, arg: &mut FnArg) { visit_mut::visit_fn_arg_mut(self, arg); let arg_span = arg.span(); if let FnArg::Receiver(r) = arg { let span = r.self_token.span(); *arg = FnArg::Typed(PatType { attrs: r.attrs.clone(), pat: Box::new(Pat::Ident(PatIdent { attrs: vec![], by_ref: None, mutability: None, ident: self.self_ident(span).clone(), subpat: None, })), colon_token: token::Colon(arg_span), ty: Box::new({ let ty = self.self_ty(span); let (reference, lt) = r .reference .clone() .map(|(r, lt)| (Some(r), lt)) .unwrap_or((None, None)); let mutability = reference.and(r.mutability.clone()); parse_quote! {#reference #lt #mutability #ty} }), }); } } fn visit_item_impl_mut(&mut self, _i: &mut ItemImpl) { // Do nothing! We allow user to write self if it's nested in a impl block } } ================================================ FILE: hax-lib/macros/src/syn_ext.rs ================================================ use crate::prelude::*; use syn::parse::*; use syn::punctuated::Punctuated; /// A closure expression of arity 1, e.g. `|x| x + 3` pub struct ExprClosure1 { pub arg: Pat, pub body: Expr, } impl Parse for ExprClosure1 { fn parse(ps: ParseStream) -> Result { let closure: ExprClosure = Parse::parse(ps as ParseStream)?; let inputs = closure.inputs; if inputs.len() != 1 { Err(Error::new(inputs.span(), "Expected exactly one argument"))?; } Ok(ExprClosure1 { arg: inputs[0].clone(), body: *closure.body.clone(), }) } } /// Utility trait to extract an `Ident` from various syn types pub trait ExpectIdent { /// Is `self` an `Ident`? fn expect_ident(&self) -> Option; /// Is `self` a specific ident named `name`? fn is_ident(&self, name: &str) -> bool { self.expect_ident() .filter(|ident| &ident.to_string() == name) .is_some() } } impl ExpectIdent for Box { fn expect_ident(&self) -> Option { let this: &T = self; this.expect_ident() } } fn expect_punctuated_1(x: &Punctuated) -> Option { (x.len() == 1).then(|| x.first().unwrap().clone()) } impl ExpectIdent for Path { fn expect_ident(&self) -> Option { expect_punctuated_1(&self.segments).map(|s| s.ident) } } impl ExpectIdent for Expr { fn expect_ident(&self) -> Option { match self { Expr::Path(ExprPath { qself: None, path, .. }) => path.expect_ident(), _ => None, } } } impl ExpectIdent for Type { fn expect_ident(&self) -> Option { match self { Type::Path(TypePath { qself: None, path, .. }) => path.expect_ident(), _ => None, } } } impl ExpectIdent for Pat { fn expect_ident(&self) -> Option { match self { Pat::Ident(PatIdent { by_ref: None, mutability: None, ident, subpat: None, .. }) => Some(ident.clone()), _ => None, } } } ================================================ FILE: hax-lib/macros/src/utils.rs ================================================ use syn::visit::Visit; use crate::prelude::*; use crate::rewrite_self::*; /// `HaxQuantifiers` makes polymorphic expression inlining functions available pub struct HaxQuantifiers; impl ToTokens for HaxQuantifiers { fn to_tokens(&self, tokens: &mut proc_macro2::TokenStream) { quote! { use ::hax_lib::fstar::prop as fstar; use ::hax_lib::coq::prop as coq; use ::hax_lib::lean::prop as lean; use ::hax_lib::proverif::prop as proverif; } .to_tokens(tokens) } } /// Meta informations about functions decorations pub enum FnDecorationKind { Requires, Ensures { ret_binder: Pat }, Decreases, SMTPat, } impl ToString for FnDecorationKind { fn to_string(&self) -> String { match self { FnDecorationKind::Requires => "requires".to_string(), FnDecorationKind::Ensures { .. } => "ensures".to_string(), FnDecorationKind::Decreases { .. } => "decreases".to_string(), FnDecorationKind::SMTPat { .. } => "SMTPat".to_string(), } } } impl From for AssociationRole { fn from(kind: FnDecorationKind) -> Self { match &kind { FnDecorationKind::Requires => AssociationRole::Requires, FnDecorationKind::Ensures { .. } => AssociationRole::Ensures, FnDecorationKind::Decreases => AssociationRole::Decreases, FnDecorationKind::SMTPat => AssociationRole::SMTPat, } } } /// Merge two `syn::Generics`, respecting lifetime orders pub(crate) fn merge_generics(x: Generics, y: Generics) -> Generics { Generics { lt_token: x.lt_token.or(y.lt_token), gt_token: x.gt_token.or(y.gt_token), params: { let lts = x .lifetimes() .chain(y.lifetimes()) .cloned() .map(GenericParam::Lifetime); let not_lts = x .params .clone() .into_iter() .filter(|p| !matches!(p, GenericParam::Lifetime(_))) .chain( y.params .clone() .into_iter() .filter(|p| !matches!(p, GenericParam::Lifetime(_))), ); lts.chain(not_lts).collect() }, where_clause: match (x.where_clause, y.where_clause) { (Some(wx), Some(wy)) => Some(syn::WhereClause { where_token: wx.where_token, predicates: wx.predicates.into_iter().chain(wy.predicates).collect(), }), (Some(w), None) | (None, Some(w)) => Some(w), (None, None) => None, }, } } /// Transform every `x: &mut T` input into `x: &T` in a signature, and /// returns a list of such transformed `x: &T` inputs fn unmut_references_in_inputs(sig: &mut Signature) -> Vec { let mut mutable_inputs = vec![]; for input in &mut sig.inputs { if let Some(mutability) = match input { FnArg::Receiver(syn::Receiver { reference: Some(_), mutability, .. }) => Some(mutability), FnArg::Typed(syn::PatType { ty, .. }) => { use std::borrow::BorrowMut; if let syn::Type::Reference(syn::TypeReference { mutability, .. }) = ty.borrow_mut() { Some(mutability) } else { None } } _ => None, } { if mutability.is_some() { *mutability = None; mutable_inputs.push(input.clone()); } } } mutable_inputs } /// Expects a `FnArg` to be a simple variable pattern fn expect_fn_arg_var_pat(arg: &FnArg) -> Option<(String, syn::Type)> { match arg { FnArg::Receiver(recv) => Some(("self".into(), *recv.ty.clone())), FnArg::Typed(pat_type) => match &*pat_type.pat { syn::Pat::Wild(_) => Some(("".into(), *pat_type.ty.clone())), syn::Pat::Ident(pat_ident) => { Some((format!("{}", pat_ident.ident), *pat_type.ty.clone())) } _ => None, }, } } pub(crate) enum NotFutureExpr { BadNumberOfArgs, ArgNotIdent, } /// `expect_future_expr(e)` tries to match the pattern /// `future()` in expression `e` pub(crate) fn expect_future_expr(e: &Expr) -> Option> { if let Expr::Call(call) = e { if call.func.is_ident("future") { return Some(match call.args.iter().collect::>().as_slice() { [arg] => arg.expect_ident().ok_or(NotFutureExpr::ArgNotIdent), _ => Err(NotFutureExpr::BadNumberOfArgs), }); } } None } #[derive(Default)] pub struct IdentCollector { pub idents: Vec, } impl<'ast> syn::visit::Visit<'ast> for IdentCollector { fn visit_ident(&mut self, ident: &'ast Ident) { self.idents.push(ident.clone()); } } impl IdentCollector { /// Returns a fresh identifier with the given prefix that is not in the collected identifiers. pub fn fresh_ident(&self, prefix: &str) -> Ident { let idents: HashSet<&Ident> = HashSet::from_iter(self.idents.iter()); let mk = |s| Ident::new(s, Span::call_site()); std::iter::once(mk(prefix)) .chain((0u64..).map(|i| Ident::new(&format!("{}{}", prefix, i), Span::call_site()))) .find(|ident| !idents.contains(ident)) .unwrap() } } /// Rewrites `future(x)` nodes in an expression when (1) `x` is an /// ident and (2) the ident `x` is contained in the HashSet. struct RewriteFuture(HashSet); impl VisitMut for RewriteFuture { fn visit_expr_mut(&mut self, e: &mut Expr) { syn::visit_mut::visit_expr_mut(self, e); let error = match expect_future_expr(e) { Some(Ok(arg)) => { let arg = format!("{}", arg); if self.0.contains(&arg) { let arg = create_future_ident(&arg); *e = parse_quote! {#arg}; return; } Some(format!("Cannot find an input `{arg}` of type `&mut _`. In the context, `future` can be called on the following inputs: {:?}.", self.0)) } Some(Err(error_kind)) => { let message = match error_kind { NotFutureExpr::BadNumberOfArgs => { "`future` can only be called with one argument: a `&mut` input name" } NotFutureExpr::ArgNotIdent => { "`future` can only be called with an `&mut` input name" } }; let help_message = match self.0.iter().next() { None => " In the context, there is no `&mut` input.".to_string(), Some(var) => { format!(" For example, in the context you can write `future({var})`.") } }; Some(format!("{message}.{}", help_message)) } None => None, }; if let Some(error) = error { *e = parse_quote! {::std::compile_error!(#error)}; } } } fn create_future_ident(name: &str) -> syn::Ident { proc_macro2::Ident::new(&format!("{name}_future"), proc_macro2::Span::call_site()) } /// The engine translates functions of arity zero to functions that /// takes exactly one unit argument. The zero-arity functions we /// generate are translated correctly as well. But in the case of a /// `ensures` clause, that's an issue: we produce a function of arity /// one, whose first argument is the result of the function. Instead, /// we need a function of arity two. /// `fix_signature_arity` adds a `unit` if needed. fn add_unit_to_sig_if_needed(signature: &mut Signature) { if signature.inputs.is_empty() { signature.inputs.push(parse_quote! {_: ()}) } } /// Common logic when generating a function decoration pub fn make_fn_decoration( mut phi: Expr, mut signature: Signature, kind: FnDecorationKind, mut generics: Option, self_type: Option, ) -> (TokenStream, AttrPayload) { let self_ident: Ident = { let mut idents = IdentCollector::default(); idents.visit_expr(&phi); idents.visit_signature(&signature); idents.fresh_ident("self_") }; let error = { let mut rewriter = RewriteSelf::new(self_ident, self_type); rewriter.visit_expr_mut(&mut phi); rewriter.visit_signature_mut(&mut signature); if let Some(generics) = generics.as_mut() { rewriter.visit_generics_mut(generics); } rewriter.get_error() }; let uid = ItemUid::fresh(); let mut_ref_inputs = unmut_references_in_inputs(&mut signature); let decoration = { let decoration_sig = { let mut sig = signature.clone(); sig.ident = format_ident!("{}", kind.to_string()); if let FnDecorationKind::Ensures { ret_binder } = &kind { add_unit_to_sig_if_needed(&mut sig); let output_typ = match sig.output { syn::ReturnType::Default => parse_quote! {()}, syn::ReturnType::Type(_, t) => t, }; let mut_ref_inputs = mut_ref_inputs .iter() .map(|mut_ref_input| { expect_fn_arg_var_pat(mut_ref_input).expect( "Every `&mut` input of a function annotated with a `ensures` clause is expected to be a simple variable pattern.", ) }); let mut rewrite_future = RewriteFuture(mut_ref_inputs.clone().map(|x| x.0).collect()); rewrite_future.visit_expr_mut(&mut phi); let (mut pats, mut tys): (Vec<_>, Vec<_>) = mut_ref_inputs .map(|(name, ty)| { ( create_future_ident(&name).to_token_stream(), ty.to_token_stream(), ) }) .unzip(); let is_output_typ_unit = if let syn::Type::Tuple(tuple) = &*output_typ { tuple.elems.is_empty() } else { false }; if !is_output_typ_unit || pats.is_empty() { pats.push(ret_binder.to_token_stream()); tys.push(quote! {#output_typ}); } sig.inputs .push(syn::parse_quote! {(#(#pats),*): (#(#tys),*)}); } if let Some(generics) = generics { sig.generics = merge_generics(generics, sig.generics); } sig.output = match &kind { FnDecorationKind::Decreases | FnDecorationKind::SMTPat => { syn::parse_quote! { -> () } } _ => syn::parse_quote! { -> impl core::convert::Into<::hax_lib::Prop> }, }; sig }; let uid_attr = AttrPayload::Uid(uid.clone()); let late_skip = &AttrPayload::ItemStatus(ItemStatus::Included { late_skip: true }); if let FnDecorationKind::Decreases | FnDecorationKind::SMTPat = &kind { phi = parse_quote! {::hax_lib::any_to_unit(#phi)}; }; let quantifiers = if let FnDecorationKind::Decreases = &kind { None } else { Some(HaxQuantifiers) }; let future = if let FnDecorationKind::Ensures { .. } = &kind { quote! { #late_skip #AttrHaxLang fn future(x: &mut T) -> &T { x } } } else { quote! {} }; use AttrPayload::NeverErased; quote! { #[cfg(#DebugOrHaxCfgExpr)] #late_skip const _: () = { #quantifiers #future #uid_attr #late_skip #[allow(unused)] #NeverErased #decoration_sig { #phi } }; } }; let assoc_attr = AttrPayload::AssociatedItem { role: kind.into(), item: uid, }; (quote! {#error #decoration}, assoc_attr) } ================================================ FILE: hax-lib/macros/types/Cargo.toml ================================================ [package] name = "hax-lib-macros-types" version.workspace = true authors.workspace = true license.workspace = true homepage.workspace = true edition = "2021" repository.workspace = true readme = "README.md" description = "Hax-internal types" [dependencies] serde.workspace = true serde_json.workspace = true schemars = {workspace = true, optional = true} quote.workspace = true proc-macro2.workspace = true uuid = { version = "1.5", features = ["v4"] } ================================================ FILE: hax-lib/macros/types/README.md ================================================ # hax internal types A crate that defines the types of the various payloads of the attributes produced by the crate `hax-lib-macros` and consumed internally by the engine of hax. ================================================ FILE: hax-lib/macros/types/src/lib.rs ================================================ use serde::{Deserialize, Serialize}; /// Each item can be marked with a *u*nique *id*entifier. This is /// useful whenever the payload of an attribute is a piece of Rust code /// (an expression, a path, a type...). We don't want to retrieve those /// pieces of Rust code as raw token stream: we want to let Rustc give /// meaning to those. For instance, we want Rustc to type expressions /// and to resolve paths. /// /// Thus, we expand attributes with Rust-code-payloads as top-level /// items marked with an `ItemUid`. The attributes are then replaced /// in place with a simple reference (the `ItemUid` in stake). /// /// Morally, we expand `struct Foo { #[refine(x > 3)] x: u32 }` to: /// 1. `#[uuid(A_UNIQUE_ID_123)] fn refinement(x: u32) -> hax_lib::Prop {x > 3}`; /// 2. `struct Foo { #[refined_by(A_UNIQUE_ID_123)] x: u32 }`. #[derive(Debug, Clone, Serialize, Deserialize, Hash, Eq, PartialEq, Ord, PartialOrd)] #[cfg_attr(feature = "schemars", derive(schemars::JsonSchema))] #[serde(rename = "HaUid")] pub struct ItemUid { /// Currently, this is a UUID. pub uid: String, } impl std::fmt::Display for ItemUid { fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { std::fmt::Display::fmt(&self.uid, f) } } impl ItemUid { pub fn fresh() -> Self { use uuid::Uuid; let uid = format!("{}", Uuid::new_v4().simple()); ItemUid { uid } } } /// What shall Hax do with an item? #[derive(Debug, Clone, Serialize, Deserialize, Hash, Eq, PartialEq, Ord, PartialOrd)] #[cfg_attr(feature = "schemars", derive(schemars::JsonSchema))] #[serde(rename = "HaItemStatus")] pub enum ItemStatus { /// Include this item in the translation Included { /// Should Hax drop this item just before code generation? late_skip: bool, }, /// Exclude this item from the translation, optionally replacing it in the backends Excluded { modeled_by: Option }, } /// An item can be associated to another one for multiple reasons: /// `AssociationRole` capture the nature of the (directed) relation /// between two items #[derive(Debug, Copy, Clone, Serialize, Deserialize, Hash, Eq, PartialEq, Ord, PartialOrd)] #[cfg_attr(feature = "schemars", derive(schemars::JsonSchema))] #[serde(rename = "HaAssocRole")] pub enum AssociationRole { Requires, Ensures, Decreases, SMTPat, Refine, /// A quoted piece of backend code to place after or before the /// extraction of the marked item ItemQuote, ProcessRead, ProcessWrite, ProcessInit, ProtocolMessages, } /// Where should a item quote appear? #[derive(Debug, Copy, Clone, Serialize, Deserialize, Hash, Eq, PartialEq, Ord, PartialOrd)] #[cfg_attr(feature = "schemars", derive(schemars::JsonSchema))] #[serde(rename = "HaItemQuotePosition")] pub enum ItemQuotePosition { /// Should appear just before the item in the extraction Before, /// Should appear right after the item in the extraction After, } /// F*-specific options for item quotes #[derive(Debug, Copy, Clone, Serialize, Deserialize, Hash, Eq, PartialEq, Ord, PartialOrd)] #[cfg_attr(feature = "schemars", derive(schemars::JsonSchema))] #[serde(rename = "HaItemQuoteFStarOpts")] pub struct ItemQuoteFStarOpts { /// Shall we output this in F* interfaces (`*.fsti` files)? pub intf: bool, /// Shall we output this in F* implementations (`*.fst` files)? pub r#impl: bool, } /// An item quote is a verbatim piece of backend code included in /// Rust. [`ItemQuote`] encodes the various options a item quote can /// have. #[derive(Debug, Copy, Clone, Serialize, Deserialize, Hash, Eq, PartialEq, Ord, PartialOrd)] #[cfg_attr(feature = "schemars", derive(schemars::JsonSchema))] #[serde(rename = "HaItemQuote")] pub struct ItemQuote { pub position: ItemQuotePosition, pub fstar_options: Option, } /// The proof method to use for verification condition generation and discharge. #[derive(Debug, Copy, Clone, Serialize, Deserialize, Hash, Eq, PartialEq, Ord, PartialOrd)] #[cfg_attr(feature = "schemars", derive(schemars::JsonSchema))] #[serde(rename = "HaProofMethod")] pub enum ProofMethod { BvDecide, Grind, } /// Hax only understands one attribute: `#[hax::json(PAYLOAD)]` where /// `PAYLOAD` is a JSON serialization of an inhabitant of /// `AttrPayload`. #[derive(Debug, Clone, Serialize, Deserialize, Hash, Eq, PartialEq, Ord, PartialOrd)] #[cfg_attr(feature = "schemars", derive(schemars::JsonSchema))] #[serde(rename = "HaPayload")] pub enum AttrPayload { ItemStatus(ItemStatus), /// Mark an item as associated with another one AssociatedItem { /// What is the nature of the association? role: AssociationRole, /// What is the identifier of the target item? item: ItemUid, }, Uid(ItemUid), /// Decides of the position of a item quote ItemQuote(ItemQuote), /// Mark an item so that hax never drop its body (this is useful /// for pre- and post- conditions of a function we dropped the /// body of: pre and post are part of type signature) NeverErased, NewtypeAsRefinement, /// Mark an item as a lemma statement to prove in the backend Lemma, Language, ProcessRead, ProcessWrite, ProcessInit, Proof(String), PureRequiresProof(String), PureEnsuresProof(String), ProofMethod(ProofMethod), ProtocolMessages, PVConstructor, PVHandwritten, TraitMethodNoPrePost, /// Make an item opaque Erased, /// In the context of a set of fields (e.g. on a `struct`), overrides its /// order. By default, the order of a field is its index, e.g. the first /// field has order 0, the i-th field has order i+1. Rust fields order /// matters: it rules how bits are represented. Once extracted, the order /// matters, but for different reasons, e.g. a field is refined with /// another, requiring a specific order. Order(i32), } pub const HAX_TOOL: &str = "_hax"; pub const HAX_CFG_OPTION_NAME: &str = "hax_compilation"; pub struct HaxTool; pub struct HaxCfgOptionName; pub struct DebugOrHaxCfgExpr; impl ToTokens for HaxTool { fn to_tokens(&self, tokens: &mut proc_macro2::TokenStream) { format_ident!("{}", HAX_TOOL).to_tokens(tokens) } } impl ToTokens for HaxCfgOptionName { fn to_tokens(&self, tokens: &mut proc_macro2::TokenStream) { format_ident!("{}", HAX_CFG_OPTION_NAME).to_tokens(tokens) } } impl ToTokens for DebugOrHaxCfgExpr { fn to_tokens(&self, tokens: &mut proc_macro2::TokenStream) { quote! {any(#HaxCfgOptionName, debug_assertions)}.to_tokens(tokens) } } use quote::*; impl From<&AttrPayload> for proc_macro2::TokenStream { fn from(payload: &AttrPayload) -> Self { let payload: String = serde_json::to_string(payload).unwrap(); quote! {#[cfg_attr(#HaxCfgOptionName, #HaxTool::json(#payload))]} } } impl ToTokens for AttrPayload { fn to_tokens(&self, tokens: &mut proc_macro2::TokenStream) { proc_macro2::TokenStream::from(self).to_tokens(tokens) } } ================================================ FILE: hax-lib/proof-libs/coq/coq/.gitignore ================================================ *.vo* *.aux *.glob *.cache .Makefile.d Makefile Makefile.conf ================================================ FILE: hax-lib/proof-libs/coq/coq/default.nix ================================================ { stdenv ? (import { }).stdenv , coqPackages ? (import { }).coqPackages_8_19, }: stdenv.mkDerivation { name = "hax-coq-generated-core"; src = ./generated-core; buildPhase = '' coq_makefile -f _CoqProject -o Makefile make ''; installPhase = '' export DESTDIR=$out make install mv $out/nix/store/*/lib $out rm -rf $out/nix ''; buildInputs = [ coqPackages.coq-record-update coqPackages.coq ]; } ================================================ FILE: hax-lib/proof-libs/coq/coq/generated-core/_CoqProject ================================================ -R src/ Core -R spec/ Core -R phase_library/ Core -arg -w -arg all ./src/Core_Clone.v ./src/Core_Marker.v ./src/Core_Panicking.v ./src/Core_Ops_Function.v ./src/Core_Option.v ./src/Core_Cmp.v ./spec/Core_Base_Spec_Haxint.v ./spec/Core_Base_Spec_Unary.v ./spec/Core_Base_Spec_Binary_Positive.v ./spec/Core_Base_Spec_Binary_Pos.v ./spec/Core_Base_Spec_Binary.v ./spec/Core_Base_Spec_Z.v ./spec/Core_Base_Spec_Seq.v ./spec/Core_Base_Spec_Constants.v ./spec/Core_Base_Spec.v ./src/Core_Base_Binary.v ./src/Core_Base_Pos.v ./src/Core_Base_Z.v ./src/Core_Base_Seq.v ./src/Core_Base.v ./src/Core_Convert.v ./src/Core_Ops_Index.v ./src/Core_Ops_Bit.v ./src/Core_Ops_Arith.v ./src/Core_Ops_Range.v ./src/Core_Iter_Traits_Iterator.v ./src/Core_Ops_Index_range.v ./src/Core_Ops.v ./src/Core_Base_interface_Coerce.v ./src/Core_Base_interface_Int.v ./src/Core_Base_interface.v ./src/Core_Num_Uint_macros.v # Empty ./src/Core_Num_Int_macros.v # Empty ./src/Core_Result.v ./phase_library/ControlFlow.v # Bundles: Core_Primitive.v, ./src/Core_Array_Rec_bundle_579704328.v # ./src/Core_Primitive_Number_conversion.v # ./src/Core_Primitive_Number_conversion_i.v ./src/Core_Primitive.v ./phase_library/NumberNotation.v ./phase_library/TODO.v ./src/Core_Intrinsics.v ./src/Core_Num.v # Broken? ./src/Core_Slice_Iter.v ./src/Core_Slice.v ./src/Core_Array_Iter.v ./src/Core_Array.v ./src/Core.v # # Extra # Core_Slice_Iter_Macros.v # ----- Core_Slice_Iter.v # Core_Slice_Index_Private_slice_index.v # Core_Slice_Index.v # ----- Core_Slice.v # ----- Core_Result.v # ----- Core_Primitive_Number_conversion_i.v # ----- Core_Primitive_Number_conversion.v # ----- Core_Primitive.v # ----- Core_Panicking.v # ----- Core_Option.v # ----- Core_Ops_Range.v # Core_Ops_Index_range.v # ----- Core_Ops_Index.v # Core_Ops_Function.v # Core_Ops_Bit_Impls_for_prims.v # ----- Core_Ops_Bit.v # Core_Ops_Arith_Impls_for_prims.v # ----- Core_Ops_Arith.v # ----- Core_Ops.v # ----- Core_Num_Uint_macros.v # ----- Core_Num_Int_macros.v # ----- Core_Num.v # ----- Core_Marker.v # Core_Iter_Traits_Marker.v # Core_Iter_Traits_Iterator.v # Core_Iter_Traits_Exact_size.v # Core_Iter_Traits_Collect.v # Core_Iter_Traits.v # Core_Iter_Range.v # Core_Iter.v # ----- Core_Intrinsics.v # Core_Fmt.v # ----- Core_Convert.v # ----- Core_Cmp.v # ----- Core_Clone.v # Core_Base_interface_Int_U8_proofs.v # Core_Base_interface_Int_U64_proofs.v # Core_Base_interface_Int_U32_proofs.v # Core_Base_interface_Int_U16_proofs.v # Core_Base_interface_Int_U128_proofs.v # Core_Base_interface_Int_I8_proofs.v # Core_Base_interface_Int_I64_proofs.v # Core_Base_interface_Int_I32_proofs.v # Core_Base_interface_Int_I16_proofs.v # Core_Base_interface_Int_I128_proofs.v # ----- Core_Base_interface_Int.v # ----- Core_Base_interface_Coerce.v # ----- Core_Base_interface.v # ----- Core_Base_Z.v # ----- Core_Base_Spec_Z.v # ----- Core_Base_Spec_Unary.v # ----- Core_Base_Spec_Seq.v # ----- Core_Base_Spec_Haxint.v # ----- Core_Base_Spec_Constants.v # ----- Core_Base_Spec_Binary_Positive.v # ----- Core_Base_Spec_Binary_Pos.v # ----- Core_Base_Spec_Binary.v # ----- Core_Base_Spec.v # ----- Core_Base_Seq.v # ----- Core_Base_Pos.v # Core_Base_Number_conversion.v # ----- Core_Base_Binary.v # ----- Core_Base.v # ----- Core_Array_Rec_bundle_579704328.v # ----- Core_Array_Iter.v # ----- Core_Array.v # ----- Core.v ================================================ FILE: hax-lib/proof-libs/coq/coq/generated-core/phase_library/ControlFlow.v ================================================ (* File automatically generated by Hacspec *) From Coq Require Import ZArith. Require Import List. Import List.ListNotations. Open Scope Z_scope. Open Scope bool_scope. Require Import Ascii. Require Import String. Require Import Coq.Floats.Floats. From RecordUpdate Require Import RecordSet. Import RecordSetNotations. (* From Core Require Import Core. *) From Core Require Import Core_Marker. From Core Require Import Core_Convert. From Core Require Import Core_Base_interface_Int. From Core Require Import Core_Result. Inductive t_ControlFlow a b := | ControlFlow_Continue : a -> t_ControlFlow a b | ControlFlow_Break : b -> t_ControlFlow a b. Arguments ControlFlow_Continue {a} {b}. Arguments ControlFlow_Break {a} {b}. (* Run exception *) Definition run {a} (x : t_ControlFlow a a) : a := match x with | ControlFlow_Continue x => x | ControlFlow_Break x => x end. Definition bind_exception {a c} (x : t_ControlFlow a c) (f : forall (k : a) `{x = ControlFlow_Continue k}, t_ControlFlow a c) : t_ControlFlow a c := match x as k return x = k -> _ with | ControlFlow_Continue o => fun k => f (H := k) o | ControlFlow_Break o => fun _ => ControlFlow_Break o end eq_refl. Notation "'letb' p ':=' e 'in' rhs" := (bind_exception e (fun p _ => rhs)) (at level 100). ================================================ FILE: hax-lib/proof-libs/coq/coq/generated-core/phase_library/NumberNotation.v ================================================ (* File automatically generated by Hacspec *) From Coq Require Import ZArith. Require Import List. Import List.ListNotations. Open Scope Z_scope. Open Scope bool_scope. Require Import Ascii. Require Import String. Require Import Coq.Floats.Floats. From RecordUpdate Require Import RecordSet. Import RecordSetNotations. Require Import Core_Primitive. Export Core_Primitive. (* Handwritten *) Coercion Build_t_i8 : t_I8 >-> t_i8. Coercion Build_t_I8 : Z >-> t_I8. Coercion Build_t_i16 : t_I16 >-> t_i16. Coercion Build_t_I16 : Z >-> t_I16. Coercion Build_t_i32 : t_I32 >-> t_i32. Coercion Build_t_I32 : Z >-> t_I32. Coercion Build_t_i64 : t_I64 >-> t_i64. Coercion Build_t_I64 : Z >-> t_I64. Coercion Build_t_i128 : t_I128 >-> t_i128. Coercion Build_t_I128 : Z >-> t_I128. Coercion Build_t_isize : t_I64 >-> t_isize. Coercion Build_t_u8 : t_U8 >-> t_u8. Coercion Build_t_U8 : N >-> t_U8. Coercion Build_t_u16 : t_U16 >-> t_u16. Coercion Build_t_U16 : N >-> t_U16. Coercion Build_t_u32 : t_U32 >-> t_u32. Coercion Build_t_U32 : N >-> t_U32. Coercion Build_t_u64 : t_U64 >-> t_u64. Coercion Build_t_U64 : N >-> t_U64. Coercion Build_t_u128 : t_U128 >-> t_u128. Coercion Build_t_U128 : N >-> t_U128. Coercion Build_t_usize : t_U64 >-> t_usize. Coercion Z.to_N : Z >-> N. ================================================ FILE: hax-lib/proof-libs/coq/coq/generated-core/phase_library/TODO.v ================================================ (* File automatically generated by Hacspec *) From Coq Require Import ZArith. Require Import List. Import List.ListNotations. Open Scope Z_scope. Open Scope bool_scope. Require Import Ascii. Require Import String. Require Import Coq.Floats.Floats. From RecordUpdate Require Import RecordSet. Import RecordSetNotations. Require Import Core_Primitive. Export Core_Primitive. (* Array coercions *) Coercion Build_t_Array : t_Slice >-> t_Array. Coercion Build_t_Slice : list >-> t_Slice. Definition unsize {A} (x : A) := x. Definition repeat {v_T} (a : v_T) b : t_Array v_T b := List.repeat a (N.to_nat (U64_f_v (usize_0 b))). Definition t_String := string. Definition ToString_f_to_string (x : string) : string := x. Definition assert (b : bool) (* `{H_assert : b = true} *) : unit := tt. (* Inductive globality := | t_Global. *) (* Definition t_Vec T (_ : globality) : Type := list T. *) (* Definition impl_1__append {T} l1 l2 : list T * list T := (app l1 l2, l2). *) (* Definition impl_1__len {A} (l : list A) := Z.of_nat (List.length l). *) (* Definition impl__new {A} (_ : Datatypes.unit) : list A := nil. *) (* Definition impl__with_capacity {A} (_ : Z) : list A := nil. *) (* Definition impl_1__push {A} l (x : A) := cons l x. *) (* Definition impl__to_vec {T} (x : t_Slice T) : t_Vec T t_Global := {| x |}. *) (* Definition from_elem {A} (x : A) (l : Z) := repeat x (Z.to_nat l). *) Fixpoint build_range (l : nat) (f : nat) (a : list t_usize) : list t_usize := match f with | 0%nat => a | (S n)%nat => build_range (S l) n (cons a (Build_t_usize (Build_t_U64 (unary_to_int l)))) end. Definition fold_range {A : Type} (l : t_usize) (u : t_usize) (_ : A -> t_usize -> bool) (x : A) (f : A -> t_usize -> A) : A := List.fold_left f (build_range (unary_from_int (U64_f_v (usize_0 l))) (unary_from_int (U64_f_v (usize_0 (Sub_f_sub u l)))) nil) x. ================================================ FILE: hax-lib/proof-libs/coq/coq/generated-core/spec/Core_Base_Spec.v ================================================ (* File automatically generated by Hacspec *) From Coq Require Import ZArith. Require Import List. Import List.ListNotations. Open Scope Z_scope. Open Scope bool_scope. Require Import Ascii. Require Import String. Require Import Coq.Floats.Floats. From RecordUpdate Require Import RecordSet. Import RecordSetNotations. (* From Core Require Import Core. *) From Core Require Import Core_Base_Spec_Haxint. Export Core_Base_Spec_Haxint. From Core Require Import Core_Base_Spec_Unary. Export Core_Base_Spec_Unary. From Core Require Import Core_Base_Spec_Binary. Export Core_Base_Spec_Binary. From Core Require Import Core_Base_Spec_Z. Export Core_Base_Spec_Z. From Core Require Import Core_Base_Spec_Seq. Export Core_Base_Spec_Seq. From Core Require Import Core_Base_Spec_Constants. Export Core_Base_Spec_Constants. (* NotImplementedYet *) (* NotImplementedYet *) (* NotImplementedYet *) (* NotImplementedYet *) (* NotImplementedYet *) (* NotImplementedYet *) ================================================ FILE: hax-lib/proof-libs/coq/coq/generated-core/spec/Core_Base_Spec_Binary.v ================================================ (* File automatically generated by Hacspec *) From Coq Require Import ZArith. Require Import List. Import List.ListNotations. Open Scope Z_scope. Open Scope bool_scope. Require Import Ascii. Require Import String. Require Import Coq.Floats.Floats. From RecordUpdate Require Import RecordSet. Import RecordSetNotations. (* From Core Require Import Core. *) From Core Require Import Core_Base_Spec_Binary_Pos. Export Core_Base_Spec_Binary_Pos. From Core Require Import Core_Base_Spec_Binary_Positive. Export Core_Base_Spec_Binary_Positive. (* NotImplementedYet *) (* NotImplementedYet *) ================================================ FILE: hax-lib/proof-libs/coq/coq/generated-core/spec/Core_Base_Spec_Binary_Pos.v ================================================ (* File automatically generated by Hacspec *) From Coq Require Import ZArith. Require Import List. Import List.ListNotations. Open Scope Z_scope. Open Scope bool_scope. Require Import Ascii. Require Import String. Require Import Coq.Floats.Floats. From RecordUpdate Require Import RecordSet. Import RecordSetNotations. (* From Core Require Import Core. *) From Core Require Import Core_Base_Spec_Haxint. Export Core_Base_Spec_Haxint. From Core Require Import Core_Base_Spec_Binary_Positive. Export Core_Base_Spec_Binary_Positive. Notation "'t_POS'" := N. Notation "'POS_ZERO'" := N0. Notation "'POS_POS'" := Npos. Definition match_pos (s : t_HaxInt) : t_POS := s. ================================================ FILE: hax-lib/proof-libs/coq/coq/generated-core/spec/Core_Base_Spec_Binary_Positive.v ================================================ (* File automatically generated by Hacspec *) From Coq Require Import ZArith. Require Import List. Import List.ListNotations. Open Scope Z_scope. Open Scope bool_scope. Require Import Ascii. Require Import String. Require Import Coq.Floats.Floats. From RecordUpdate Require Import RecordSet. Import RecordSetNotations. (* From Core Require Import Core. *) From Core Require Import Core_Base_Spec_Haxint. Export Core_Base_Spec_Haxint. From Core Require Import Core_Clone. Export Core_Clone. Notation "'t_Positive'" := positive. Notation "'t_POSITIVE'" := positive. Notation "'POSITIVE_XH'" := xH. Notation "'POSITIVE_XO'" := xO. Notation "'POSITIVE_XI'" := xI. Definition positive_from_int (x : t_HaxInt) `{Hpos : x <> N0} : t_Positive := match x return x <> N0 -> _ with | N0 => fun Hpos => False_rect _ (Hpos eq_refl) | Npos p => fun _ => p end Hpos. Definition positive_to_int (s : t_Positive) : t_HaxInt := Npos s. Definition xH : t_Positive := xH. Definition xI (s : t_Positive) : t_Positive := xI s. Definition xO (s : t_Positive) : t_Positive := xO s. Definition match_positive (s : t_Positive) : t_POSITIVE := s. ================================================ FILE: hax-lib/proof-libs/coq/coq/generated-core/spec/Core_Base_Spec_Constants.v ================================================ (* File automatically generated by Hacspec *) From Coq Require Import ZArith. Require Import List. Import List.ListNotations. Open Scope Z_scope. Open Scope bool_scope. Require Import Ascii. Require Import String. Require Import Coq.Floats.Floats. From RecordUpdate Require Import RecordSet. Import RecordSetNotations. (* From Core Require Import Core. *) From Core Require Import Core_Base_Spec_Haxint. Export Core_Base_Spec_Haxint. Definition v_BITS_128_ : t_HaxInt := 128. Definition v_BITS_16_ : t_HaxInt := 16. Definition v_BITS_32_ : t_HaxInt := 32. Definition v_BITS_64_ : t_HaxInt := 64. Definition v_BITS_8_ : t_HaxInt := 8. Definition v_WORDSIZE_128_ : t_HaxInt := N.pow 2 128. Definition v_WORDSIZE_128_SUB_1_ : t_HaxInt := N.pow 2 128 - 1. Definition v_WORDSIZE_16_ : t_HaxInt := N.pow 2 16. Definition v_WORDSIZE_16_SUB_1_ : t_HaxInt := N.pow 2 16. Definition v_WORDSIZE_32_ : t_HaxInt := N.pow 2 32. Definition v_WORDSIZE_32_SUB_1_ : t_HaxInt := N.pow 2 32 - 1. Definition v_WORDSIZE_4_ : t_HaxInt := N.pow 2 4. Definition v_WORDSIZE_4_SUB_1_ : t_HaxInt := N.pow 2 4 - 1. Definition v_WORDSIZE_64_ : t_HaxInt := N.pow 2 64. Definition v_WORDSIZE_64_SUB_1_ : t_HaxInt := N.pow 2 64 - 1. Definition v_WORDSIZE_8_ : t_HaxInt := N.pow 2 8. Definition v_WORDSIZE_8_SUB_1_ : t_HaxInt := N.pow 2 8 - 1. ================================================ FILE: hax-lib/proof-libs/coq/coq/generated-core/spec/Core_Base_Spec_Haxint.v ================================================ (* File automatically generated by Hacspec *) From Coq Require Import ZArith. Require Import List. Import List.ListNotations. Open Scope Z_scope. Open Scope bool_scope. Require Import Ascii. Require Import String. Require Import Coq.Floats.Floats. From RecordUpdate Require Import RecordSet. Import RecordSetNotations. (* From Core Require Import Core. *) Notation "'t_HaxInt'" := N. Definition v_HaxInt_ONE : t_HaxInt := 1. Definition v_HaxInt_TWO : t_HaxInt := 2. Definition v_HaxInt_ZERO : t_HaxInt := 0. Definition div2 (s : t_HaxInt) : t_HaxInt := s / 2. Definition is_zero (s : t_HaxInt) : bool := match s with | N0 => true | _ => false end. ================================================ FILE: hax-lib/proof-libs/coq/coq/generated-core/spec/Core_Base_Spec_Seq.v ================================================ (* File automatically generated by Hacspec *) From Coq Require Import ZArith. Require Import List. Import List.ListNotations. Open Scope Z_scope. Open Scope bool_scope. Require Import Ascii. Require Import String. Require Import Coq.Floats.Floats. From RecordUpdate Require Import RecordSet. Import RecordSetNotations. (* From Core Require Import Core. *) Notation "'t_Seq'" := list. Notation "'t_LIST'" := list. Notation "'LIST_NIL'" := nil. Notation "'LIST_CONS'" := cons. Notation "'nil'" := nil. Notation "'cons'" := (fun x y => cons y x). Definition match_list {T} (x : t_Seq T) : t_LIST T := x. ================================================ FILE: hax-lib/proof-libs/coq/coq/generated-core/spec/Core_Base_Spec_Unary.v ================================================ (* File automatically generated by Hacspec *) From Coq Require Import ZArith. Require Import List. Import List.ListNotations. Open Scope Z_scope. Open Scope bool_scope. Require Import Ascii. Require Import String. Require Import Coq.Floats.Floats. From RecordUpdate Require Import RecordSet. Import RecordSetNotations. (* From Core Require Import Core. *) From Core Require Import Core_Base_Spec_Haxint. Export Core_Base_Spec_Haxint. Notation "'t_Unary'" := nat. Notation "'t_UNARY'" := nat. Notation "'UNARY_ZERO'" := O. Notation "'UNARY_SUCC'" := S. Definition unary_from_int (x : t_HaxInt) : t_Unary := N.to_nat x. Definition unary_to_int (s : t_Unary) : t_HaxInt := N.of_nat s. Definition pred (x : t_Unary) : t_Unary := Nat.pred x. Definition match_unary (s : t_Unary) : t_UNARY := s. Definition succ (x : t_Unary) : t_Unary := S x. ================================================ FILE: hax-lib/proof-libs/coq/coq/generated-core/spec/Core_Base_Spec_Z.v ================================================ (* File automatically generated by Hacspec *) From Coq Require Import ZArith. Require Import List. Import List.ListNotations. Open Scope Z_scope. Open Scope bool_scope. Require Import Ascii. Require Import String. Require Import Coq.Floats.Floats. From RecordUpdate Require Import RecordSet. Import RecordSetNotations. (* From Core Require Import Core. *) From Core Require Import Core_Base_Spec_Binary. Export Core_Base_Spec_Binary. Notation "'t_Z'" := Z. Notation "'Z_NEG'" := Zneg. Notation "'Z_ZERO'" := Z0. Notation "'Z_POS'" := Zpos. Definition v_Z_ONE : t_Z := 1%Z. Definition v_Z_TWO : t_Z := 2%Z. ================================================ FILE: hax-lib/proof-libs/coq/coq/generated-core/src/Core.v ================================================ (* File automatically generated by Hacspec *) From Coq Require Import ZArith. Require Import List. Import List.ListNotations. Open Scope Z_scope. Open Scope bool_scope. Require Import Ascii. Require Import String. Require Import Coq.Floats.Floats. From RecordUpdate Require Import RecordSet. Import RecordSetNotations. (* From Core Require Import Core. *) From Core Require Import Core_Primitive. Export Core_Primitive. (* NotImplementedYet *) (* NotImplementedYet *) (* NotImplementedYet *) (* NotImplementedYet *) (* NotImplementedYet *) (* NotImplementedYet *) (* NotImplementedYet *) (* NotImplementedYet *) (* NotImplementedYet *) (* NotImplementedYet *) (* NotImplementedYet *) (* NotImplementedYet *) (* NotImplementedYet *) (* NotImplementedYet *) (* NotImplementedYet *) (* NotImplementedYet *) (* NotImplementedYet *) (* NotImplementedYet *) (* NotImplementedYet *) (* NotImplementedYet *) (* NotImplementedYet *) (* NotImplementedYet *) (* NotImplementedYet *) From Core Require Import Core_Option. Export Core_Option. From Core Require Import Core_Array_Rec_bundle_579704328. Export Core_Array_Rec_bundle_579704328. From Core Require Import Core_Ops. Export Core_Ops. From Core Require Import Core_Ops_Index. Export Core_Ops_Index. From Core Require Import NumberNotation. Export NumberNotation. From Core Require Import TODO. Export TODO. ================================================ FILE: hax-lib/proof-libs/coq/coq/generated-core/src/Core_Array.v ================================================ (* File automatically generated by Hacspec *) From Coq Require Import ZArith. Require Import List. Import List.ListNotations. Open Scope Z_scope. Open Scope bool_scope. Require Import Ascii. Require Import String. Require Import Coq.Floats.Floats. From RecordUpdate Require Import RecordSet. Import RecordSetNotations. (* From Core Require Import Core. *) From Core Require Import Core_Ops_Index. Export Core_Ops_Index. (* From Core Require Import Core_Ops_IndexMut. *) (* Export Core_Ops (t_IndexMut). *) From Core Require Import Core_Primitive. Export Core_Primitive. From Core Require Import Core_Array_Iter. Export Core_Array_Iter. Notation "'t_TryFromSliceError'" := (t_TryFromSliceError). Notation "'TryFromSliceError_0'" := (TryFromSliceError_0). (* NotImplementedYet *) (* Notation "'impl_2'" := (impl_2). *) (* Notation "'impl_1'" := (impl_1). *) (* Notation "'impl'" := (impl). *) ================================================ FILE: hax-lib/proof-libs/coq/coq/generated-core/src/Core_Array_Iter.v ================================================ (* File automatically generated by Hacspec *) From Coq Require Import ZArith. Require Import List. Import List.ListNotations. Open Scope Z_scope. Open Scope bool_scope. Require Import Ascii. Require Import String. Require Import Coq.Floats.Floats. From RecordUpdate Require Import RecordSet. Import RecordSetNotations. (* From Core Require Import Core. *) From Core Require Import Core_Num. Export Core_Num. From Core Require Import Core_Ops_Index_range. Export Core_Ops_Index_range. From Core Require Import Core_Ops_Range. Export Core_Ops_Range. From Core Require Import Core_Primitive. Export Core_Primitive. (* From Core Require Import Core_Iter (t_IntoIterator). *) (* Export Core_Iter (t_IntoIterator). *) From Core Require Import Core_Clone. Export Core_Clone. From Core Require Import Core_Base. Export Core_Base. (* From Core Require Import hax_lib. *) (* Export hax_lib. *) Record t_IntoIter (v_T : Type) (v_N : t_usize) `{t_Sized (v_T)} : Type := { IntoIter_f_data : t_Array ((v_T)) (v_N); IntoIter_f_alive : t_IndexRange; }. Arguments Build_t_IntoIter (_) (_) {_}. Arguments IntoIter_f_data {_} {_} {_}. Arguments IntoIter_f_alive {_} {_} {_}. #[export] Instance settable_t_IntoIter `{v_T : Type} `{v_N : t_usize} `{t_Sized (v_T)} : Settable _ := settable! (Build_t_IntoIter v_T v_N) . ================================================ FILE: hax-lib/proof-libs/coq/coq/generated-core/src/Core_Array_Rec_bundle_579704328.v ================================================ (* File automatically generated by Hacspec *) From Coq Require Import ZArith. Require Import List. Import List.ListNotations. Open Scope Z_scope. Open Scope bool_scope. Require Import Ascii. Require Import String. Require Import Coq.Floats.Floats. From RecordUpdate Require Import RecordSet. Import RecordSetNotations. (* From Core Require Import Core. *) From Core Require Import Core_Marker. From Core Require Import Core_Convert. From Core Require Import Core_Base_interface_Int. From Core Require Import ControlFlow. Record t_i128 : Type := { i128_0 : t_I128; }. Arguments Build_t_i128. Arguments i128_0. #[export] Instance settable_t_i128 : Settable _ := settable! (Build_t_i128) . Notation "'i128'" := Build_t_i128. #[global] Instance t_Clone_173398349 : t_Clone ((t_i128)) := { Clone_f_clone := fun (self : t_i128)=> self; }. Record t_i16 : Type := { i16_0 : t_I16; }. Arguments Build_t_i16. Arguments i16_0. #[export] Instance settable_t_i16 : Settable _ := settable! (Build_t_i16) . Notation "'i16'" := Build_t_i16. #[global] Instance t_Clone_192670426 : t_Clone ((t_i16)) := { Clone_f_clone := fun (self : t_i16)=> Build_t_i16 (Clone_f_clone (i16_0 self)); }. Record t_i32 : Type := { i32_0 : t_I32; }. Arguments Build_t_i32. Arguments i32_0. #[export] Instance settable_t_i32 : Settable _ := settable! (Build_t_i32) . Notation "'i32'" := Build_t_i32. #[global] Instance t_Clone_502683757 : t_Clone ((t_i32)) := { Clone_f_clone := fun (self : t_i32)=> Build_t_i32 (Clone_f_clone (i32_0 self)); }. Record t_i64 : Type := { i64_0 : t_I64; }. Arguments Build_t_i64. Arguments i64_0. #[export] Instance settable_t_i64 : Settable _ := settable! (Build_t_i64) . Notation "'i64'" := Build_t_i64. #[global] Instance t_Clone_208076318 : t_Clone ((t_i64)) := { Clone_f_clone := fun (self : t_i64)=> Build_t_i64 (Clone_f_clone (i64_0 self)); }. Record t_i8 : Type := { i8_0 : t_I8; }. Arguments Build_t_i8. Arguments i8_0. #[export] Instance settable_t_i8 : Settable _ := settable! (Build_t_i8) . Notation "'i8'" := Build_t_i8. #[global] Instance t_Clone_654126073 : t_Clone ((t_i8)) := { Clone_f_clone := fun (self : t_i8)=> Build_t_i8 (Clone_f_clone (i8_0 self)); }. Record t_isize : Type := { isize_0 : t_I64; }. Arguments Build_t_isize. Arguments isize_0. #[export] Instance settable_t_isize : Settable _ := settable! (Build_t_isize) . Notation "'isize'" := Build_t_isize. #[global] Instance t_Clone_36465747 : t_Clone ((t_isize)) := { Clone_f_clone := fun (self : t_isize)=> Build_t_isize (Clone_f_clone (isize_0 self)); }. #[global] Instance t_From_200584765 : t_From ((t_isize)) ((t_i64)) := { From_f_from := fun (x : t_i64)=> Build_t_isize (Into_f_into (i64_0 x)); }. #[global] Instance t_From_705632684 : t_From ((t_i64)) ((t_isize)) := { From_f_from := fun (x : t_isize)=> Build_t_i64 (Into_f_into (isize_0 x)); }. Record t_u128 : Type := { u128_0 : t_U128; }. Arguments Build_t_u128. Arguments u128_0. #[export] Instance settable_t_u128 : Settable _ := settable! (Build_t_u128) . Notation "'u128'" := Build_t_u128. Definition from_le715594649 (x : t_u128) : t_u128 := x. Definition to_le902648378 (self : t_u128) : t_u128 := self. Record t_u16 : Type := { u16_0 : t_U16; }. Arguments Build_t_u16. Arguments u16_0. #[export] Instance settable_t_u16 : Settable _ := settable! (Build_t_u16) . Notation "'u16'" := Build_t_u16. Definition from_le793045973 (x : t_u16) : t_u16 := x. Definition to_le1012469456 (self : t_u16) : t_u16 := self. Record t_u32 : Type := { u32_0 : t_U32; }. Arguments Build_t_u32. Arguments u32_0. #[export] Instance settable_t_u32 : Settable _ := settable! (Build_t_u32) . Notation "'u32'" := Build_t_u32. Definition from_le706338679 (x : t_u32) : t_u32 := x. Definition to_le724624277 (self : t_u32) : t_u32 := self. Record t_u64 : Type := { u64_0 : t_U64; }. Arguments Build_t_u64. Arguments u64_0. #[export] Instance settable_t_u64 : Settable _ := settable! (Build_t_u64) . Notation "'u64'" := Build_t_u64. Definition from_le435089922 (x : t_u64) : t_u64 := x. Definition to_le2703875 (self : t_u64) : t_u64 := self. Record t_u8 : Type := { u8_0 : t_U8; }. Arguments Build_t_u8. Arguments u8_0. #[export] Instance settable_t_u8 : Settable _ := settable! (Build_t_u8) . Notation "'u8'" := Build_t_u8. Definition from_le529489651 (x : t_u8) : t_u8 := x. Definition to_le523556665 (self : t_u8) : t_u8 := self. Record t_usize : Type := { usize_0 : t_U64; }. Arguments Build_t_usize. Arguments usize_0. #[export] Instance settable_t_usize : Settable _ := settable! (Build_t_usize) . Notation "'usize'" := Build_t_usize. Definition from_le418743864 (x : t_usize) : t_usize := x. Definition to_le946822077 (self : t_usize) : t_usize := self. #[global] Instance t_From_1035345737 : t_From ((t_usize)) ((t_u64)) := { From_f_from := fun (x : t_u64)=> Build_t_usize (Into_f_into (u64_0 x)); }. #[global] Instance t_From_478985084 : t_From ((t_u64)) ((t_usize)) := { From_f_from := fun (x : t_usize)=> Build_t_u64 (Into_f_into (usize_0 x)); }. Class v_Sealed (v_Self : Type) : Type := { }. Arguments v_Sealed (_). #[global] Instance v_Sealed_639968800 : v_Sealed ((t_usize)) := { }. #[global] Instance v_Sealed_740757788 : v_Sealed ((t_Range ((t_usize)))) := { }. (* Instance v_Sealed_1056036517 : v_Sealed ((t_RangeTo ((t_usize)))) := *) (* { *) (* }. *) (* Instance v_Sealed_277245654 : v_Sealed ((t_RangeFrom ((t_usize)))) := *) (* { *) (* }. *) (* Instance v_Sealed_1032594188 : v_Sealed ((t_RangeFull)) := *) (* { *) (* }. *) (* Instance v_Sealed_135080564 : v_Sealed ((t_RangeInclusive ((t_usize)))) := *) (* { *) (* }. *) (* Instance v_Sealed_919294089 : v_Sealed ((t_RangeToInclusive ((t_usize)))) := *) (* { *) (* }. *) (* Instance v_Sealed_254412259 : v_Sealed (((t_Bound ((t_usize))*t_Bound ((t_usize))))) := *) (* { *) (* }. *) (* Instance v_Sealed_463870686 : v_Sealed ((t_IndexRange)) := *) (* { *) (* }. *) Definition v_BITS80497669 : t_u32 := Build_t_u32 (impl_97__BITS). Definition v_MAX626626007 : t_i8 := Build_t_i8 (Constants_f_MAX). Definition v_MIN19747349 : t_i8 := Build_t_i8 (Constants_f_MIN). Definition v_BITS421056295 : t_u32 := Build_t_u32 (impl_83__BITS). Definition v_MAX474501300 : t_i16 := Build_t_i16 (Constants_f_MAX). Definition v_MIN776391606 : t_i16 := Build_t_i16 (Constants_f_MIN). Definition v_BITS465526498 : t_u32 := Build_t_u32 (impl_69__BITS). Definition v_MAX106630818 : t_i32 := Build_t_i32 (Constants_f_MAX). Definition v_MIN682967538 : t_i32 := Build_t_i32 (Constants_f_MIN). Definition v_BITS419886578 : t_u32 := Build_t_u32 (impl_55__BITS). Definition v_MAX527043787 : t_i64 := Build_t_i64 (Constants_f_MAX). Definition v_MIN654206259 : t_i64 := Build_t_i64 (Constants_f_MIN). Definition v_BITS992667165 : t_u32 := Build_t_u32 (impl_41__BITS). Definition v_MAX375377319 : t_i128 := Build_t_i128 (Constants_f_MAX). Definition v_MIN79612531 : t_i128 := Build_t_i128 (Constants_f_MIN). Definition v_BITS211584016 : t_u32 := Build_t_u32 (impl_55__BITS). Definition v_MAX937003029 : t_isize := Build_t_isize (Constants_f_MAX). Definition v_MIN1017039533 : t_isize := Build_t_isize (Constants_f_MIN). Definition v_BITS690311813 : t_u32 := Build_t_u32 (impl_219__BITS). Definition v_MAX310118176 : t_u8 := Build_t_u8 (Constants_f_MAX). Definition v_MIN41851434 : t_u8 := Build_t_u8 (Constants_f_MIN). Definition v_BITS277333551 : t_u32 := Build_t_u32 (impl_192__BITS). Definition v_MAX487295910 : t_u16 := Build_t_u16 (Constants_f_MAX). Definition v_MIN592300287 : t_u16 := Build_t_u16 (Constants_f_MIN). Definition v_BITS473478051 : t_u32 := Build_t_u32 (impl_165__BITS). Definition v_MAX826434525 : t_u32 := Build_t_u32 (Constants_f_MAX). Definition v_MIN932777089 : t_u32 := Build_t_u32 (Constants_f_MIN). Definition v_BITS177666292 : t_u32 := Build_t_u32 (impl_138__BITS). Definition v_MAX815180633 : t_u64 := Build_t_u64 (Constants_f_MAX). Definition v_MIN631333594 : t_u64 := Build_t_u64 (Constants_f_MIN). Definition v_BITS136999051 : t_u32 := Build_t_u32 (impl_111__BITS). Definition v_MAX404543799 : t_u128 := Build_t_u128 (Constants_f_MAX). Definition v_MIN668621698 : t_u128 := Build_t_u128 (Constants_f_MIN). Definition v_BITS229952196 : t_u32 := Build_t_u32 (impl_138__BITS). Definition v_MAX750570916 : t_usize := Build_t_usize (Constants_f_MAX). Definition v_MIN861571008 : t_usize := Build_t_usize (Constants_f_MIN). #[global] Instance t_Clone_832469823 : t_Clone ((t_u8)) := { Clone_f_clone := fun (self : t_u8)=> Build_t_u8 (Clone_f_clone (u8_0 self)); }. #[global] Instance t_Clone_562622454 : t_Clone ((t_u16)) := { Clone_f_clone := fun (self : t_u16)=> Build_t_u16 (Clone_f_clone (u16_0 self)); }. #[global] Instance t_Clone_1034302141 : t_Clone ((t_u32)) := { Clone_f_clone := fun (self : t_u32)=> Build_t_u32 (Clone_f_clone (u32_0 self)); }. #[global] Instance t_Clone_189576787 : t_Clone ((t_u64)) := { Clone_f_clone := fun (self : t_u64)=> Build_t_u64 (Clone_f_clone (u64_0 self)); }. #[global] Instance t_Clone_296673181 : t_Clone ((t_u128)) := { Clone_f_clone := fun (self : t_u128)=> Build_t_u128 (Clone_f_clone (u128_0 self)); }. #[global] Instance t_Clone_466142540 : t_Clone ((t_usize)) := { Clone_f_clone := fun (self : t_usize)=> Build_t_usize (Clone_f_clone (usize_0 self)); }. Class v_SliceIndex (v_Self : Type) (v_T : Type) `{v_Sealed (v_Self)} : Type := { SliceIndex_f_Output : Type; SliceIndex_f_index : v_Self -> v_T -> SliceIndex_f_Output; }. Arguments v_SliceIndex (_) (_) {_}. #[global] Instance t_PartialEq_234431236 : t_PartialEq ((t_u8)) ((t_u8)) := { PartialEq_f_eq := fun (self : t_u8) (rhs : t_u8)=> PartialEq_f_eq (u8_0 self) (u8_0 rhs); PartialEq_f_ne := fun (self : t_u8) (rhs : t_u8)=> negb (PartialEq_f_eq (u8_0 self) (u8_0 rhs)); }. #[global] Instance t_PartialOrd_835131600 : t_PartialOrd ((t_u8)) ((t_u8)) := { PartialOrd_f_partial_cmp := fun (self : t_u8) (rhs : t_u8)=> PartialOrd_f_partial_cmp (u8_0 self) (u8_0 rhs); PartialOrd_f_lt := fun (self : t_u8) (rhs : t_u8)=> match PartialOrd_f_partial_cmp (u8_0 self) (u8_0 rhs) with | Option_Some (Ordering_Less) => true | _ => false end; PartialOrd_f_le := fun (self : t_u8) (rhs : t_u8)=> match PartialOrd_f_partial_cmp (u8_0 self) (u8_0 rhs) with | Option_Some (Ordering_Less | Ordering_Equal) => true | _ => false end; PartialOrd_f_gt := fun (self : t_u8) (rhs : t_u8)=> match PartialOrd_f_partial_cmp (u8_0 self) (u8_0 rhs) with | Option_Some (Ordering_Greater) => true | _ => false end; PartialOrd_f_ge := fun (self : t_u8) (rhs : t_u8)=> match PartialOrd_f_partial_cmp (u8_0 self) (u8_0 rhs) with | Option_Some (Ordering_Greater | Ordering_Equal) => true | _ => false end; }. #[global] Instance t_PartialEq_965259828 : t_PartialEq ((t_u16)) ((t_u16)) := { PartialEq_f_eq := fun (self : t_u16) (rhs : t_u16)=> PartialEq_f_eq (u16_0 self) (u16_0 rhs); PartialEq_f_ne := fun (self : t_u16) (rhs : t_u16)=> negb (PartialEq_f_eq (u16_0 self) (u16_0 rhs)); }. #[global] Instance t_PartialOrd_116974173 : t_PartialOrd ((t_u16)) ((t_u16)) := { PartialOrd_f_partial_cmp := fun (self : t_u16) (rhs : t_u16)=> PartialOrd_f_partial_cmp (u16_0 self) (u16_0 rhs); PartialOrd_f_lt := fun (self : t_u16) (rhs : t_u16)=> match PartialOrd_f_partial_cmp (u16_0 self) (u16_0 rhs) with | Option_Some (Ordering_Less) => true | _ => false end; PartialOrd_f_le := fun (self : t_u16) (rhs : t_u16)=> match PartialOrd_f_partial_cmp (u16_0 self) (u16_0 rhs) with | Option_Some (Ordering_Less | Ordering_Equal) => true | _ => false end; PartialOrd_f_gt := fun (self : t_u16) (rhs : t_u16)=> match PartialOrd_f_partial_cmp (u16_0 self) (u16_0 rhs) with | Option_Some (Ordering_Greater) => true | _ => false end; PartialOrd_f_ge := fun (self : t_u16) (rhs : t_u16)=> match PartialOrd_f_partial_cmp (u16_0 self) (u16_0 rhs) with | Option_Some (Ordering_Greater | Ordering_Equal) => true | _ => false end; }. #[global] Instance t_PartialEq_739399974 : t_PartialEq ((t_u32)) ((t_u32)) := { PartialEq_f_eq := fun (self : t_u32) (rhs : t_u32)=> PartialEq_f_eq (u32_0 self) (u32_0 rhs); PartialEq_f_ne := fun (self : t_u32) (rhs : t_u32)=> negb (PartialEq_f_eq (u32_0 self) (u32_0 rhs)); }. #[global] Instance t_PartialOrd_553141371 : t_PartialOrd ((t_u32)) ((t_u32)) := { PartialOrd_f_partial_cmp := fun (self : t_u32) (rhs : t_u32)=> PartialOrd_f_partial_cmp (u32_0 self) (u32_0 rhs); PartialOrd_f_lt := fun (self : t_u32) (rhs : t_u32)=> match PartialOrd_f_partial_cmp (u32_0 self) (u32_0 rhs) with | Option_Some (Ordering_Less) => true | _ => false end; PartialOrd_f_le := fun (self : t_u32) (rhs : t_u32)=> match PartialOrd_f_partial_cmp (u32_0 self) (u32_0 rhs) with | Option_Some (Ordering_Less | Ordering_Equal) => true | _ => false end; PartialOrd_f_gt := fun (self : t_u32) (rhs : t_u32)=> match PartialOrd_f_partial_cmp (u32_0 self) (u32_0 rhs) with | Option_Some (Ordering_Greater) => true | _ => false end; PartialOrd_f_ge := fun (self : t_u32) (rhs : t_u32)=> match PartialOrd_f_partial_cmp (u32_0 self) (u32_0 rhs) with | Option_Some (Ordering_Greater | Ordering_Equal) => true | _ => false end; }. #[global] Instance t_PartialEq_464367537 : t_PartialEq ((t_u64)) ((t_u64)) := { PartialEq_f_eq := fun (self : t_u64) (rhs : t_u64)=> PartialEq_f_eq (u64_0 self) (u64_0 rhs); PartialEq_f_ne := fun (self : t_u64) (rhs : t_u64)=> negb (PartialEq_f_eq (u64_0 self) (u64_0 rhs)); }. #[global] Instance t_PartialOrd_207997255 : t_PartialOrd ((t_u64)) ((t_u64)) := { PartialOrd_f_partial_cmp := fun (self : t_u64) (rhs : t_u64)=> PartialOrd_f_partial_cmp (u64_0 self) (u64_0 rhs); PartialOrd_f_lt := fun (self : t_u64) (rhs : t_u64)=> match PartialOrd_f_partial_cmp (u64_0 self) (u64_0 rhs) with | Option_Some (Ordering_Less) => true | _ => false end; PartialOrd_f_le := fun (self : t_u64) (rhs : t_u64)=> match PartialOrd_f_partial_cmp (u64_0 self) (u64_0 rhs) with | Option_Some (Ordering_Less | Ordering_Equal) => true | _ => false end; PartialOrd_f_gt := fun (self : t_u64) (rhs : t_u64)=> match PartialOrd_f_partial_cmp (u64_0 self) (u64_0 rhs) with | Option_Some (Ordering_Greater) => true | _ => false end; PartialOrd_f_ge := fun (self : t_u64) (rhs : t_u64)=> match PartialOrd_f_partial_cmp (u64_0 self) (u64_0 rhs) with | Option_Some (Ordering_Greater | Ordering_Equal) => true | _ => false end; }. #[global] Instance t_PartialEq_876938738 : t_PartialEq ((t_u128)) ((t_u128)) := { PartialEq_f_eq := fun (self : t_u128) (rhs : t_u128)=> PartialEq_f_eq (u128_0 self) (u128_0 rhs); PartialEq_f_ne := fun (self : t_u128) (rhs : t_u128)=> negb (PartialEq_f_eq (u128_0 self) (u128_0 rhs)); }. #[global] Instance t_PartialOrd_566729496 : t_PartialOrd ((t_u128)) ((t_u128)) := { PartialOrd_f_partial_cmp := fun (self : t_u128) (rhs : t_u128)=> PartialOrd_f_partial_cmp (u128_0 self) (u128_0 rhs); PartialOrd_f_lt := fun (self : t_u128) (rhs : t_u128)=> match PartialOrd_f_partial_cmp (u128_0 self) (u128_0 rhs) with | Option_Some (Ordering_Less) => true | _ => false end; PartialOrd_f_le := fun (self : t_u128) (rhs : t_u128)=> match PartialOrd_f_partial_cmp (u128_0 self) (u128_0 rhs) with | Option_Some (Ordering_Less | Ordering_Equal) => true | _ => false end; PartialOrd_f_gt := fun (self : t_u128) (rhs : t_u128)=> match PartialOrd_f_partial_cmp (u128_0 self) (u128_0 rhs) with | Option_Some (Ordering_Greater) => true | _ => false end; PartialOrd_f_ge := fun (self : t_u128) (rhs : t_u128)=> match PartialOrd_f_partial_cmp (u128_0 self) (u128_0 rhs) with | Option_Some (Ordering_Greater | Ordering_Equal) => true | _ => false end; }. #[global] Instance t_PartialEq_1011013145 : t_PartialEq ((t_usize)) ((t_usize)) := { PartialEq_f_eq := fun (self : t_usize) (rhs : t_usize)=> PartialEq_f_eq (usize_0 self) (usize_0 rhs); PartialEq_f_ne := fun (self : t_usize) (rhs : t_usize)=> negb (PartialEq_f_eq (usize_0 self) (usize_0 rhs)); }. #[global] Instance t_PartialOrd_917114071 : t_PartialOrd ((t_usize)) ((t_usize)) := { PartialOrd_f_partial_cmp := fun (self : t_usize) (rhs : t_usize)=> PartialOrd_f_partial_cmp (usize_0 self) (usize_0 rhs); PartialOrd_f_lt := fun (self : t_usize) (rhs : t_usize)=> match PartialOrd_f_partial_cmp (usize_0 self) (usize_0 rhs) with | Option_Some (Ordering_Less) => true | _ => false end; PartialOrd_f_le := fun (self : t_usize) (rhs : t_usize)=> match PartialOrd_f_partial_cmp (usize_0 self) (usize_0 rhs) with | Option_Some (Ordering_Less | Ordering_Equal) => true | _ => false end; PartialOrd_f_gt := fun (self : t_usize) (rhs : t_usize)=> match PartialOrd_f_partial_cmp (usize_0 self) (usize_0 rhs) with | Option_Some (Ordering_Greater) => true | _ => false end; PartialOrd_f_ge := fun (self : t_usize) (rhs : t_usize)=> match PartialOrd_f_partial_cmp (usize_0 self) (usize_0 rhs) with | Option_Some (Ordering_Greater | Ordering_Equal) => true | _ => false end; }. #[global] Instance t_PartialEq_515285814 : t_PartialEq ((t_i8)) ((t_i8)) := { PartialEq_f_eq := fun (self : t_i8) (rhs : t_i8)=> PartialEq_f_eq (i8_0 self) (i8_0 rhs); PartialEq_f_ne := fun (self : t_i8) (rhs : t_i8)=> negb (PartialEq_f_eq (i8_0 self) (i8_0 rhs)); }. #[global] Instance t_PartialOrd_610141491 : t_PartialOrd ((t_i8)) ((t_i8)) := { PartialOrd_f_partial_cmp := fun (self : t_i8) (rhs : t_i8)=> PartialOrd_f_partial_cmp (i8_0 self) (i8_0 rhs); PartialOrd_f_lt := fun (self : t_i8) (rhs : t_i8)=> match PartialOrd_f_partial_cmp (i8_0 self) (i8_0 rhs) with | Option_Some (Ordering_Less) => true | _ => false end; PartialOrd_f_le := fun (self : t_i8) (rhs : t_i8)=> match PartialOrd_f_partial_cmp (i8_0 self) (i8_0 rhs) with | Option_Some (Ordering_Less | Ordering_Equal) => true | _ => false end; PartialOrd_f_gt := fun (self : t_i8) (rhs : t_i8)=> match PartialOrd_f_partial_cmp (i8_0 self) (i8_0 rhs) with | Option_Some (Ordering_Greater) => true | _ => false end; PartialOrd_f_ge := fun (self : t_i8) (rhs : t_i8)=> match PartialOrd_f_partial_cmp (i8_0 self) (i8_0 rhs) with | Option_Some (Ordering_Greater | Ordering_Equal) => true | _ => false end; }. #[global] Instance t_PartialEq_341364762 : t_PartialEq ((t_i16)) ((t_i16)) := { PartialEq_f_eq := fun (self : t_i16) (rhs : t_i16)=> PartialEq_f_eq (i16_0 self) (i16_0 rhs); PartialEq_f_ne := fun (self : t_i16) (rhs : t_i16)=> negb (PartialEq_f_eq (i16_0 self) (i16_0 rhs)); }. #[global] Instance t_PartialOrd_685280672 : t_PartialOrd ((t_i16)) ((t_i16)) := { PartialOrd_f_partial_cmp := fun (self : t_i16) (rhs : t_i16)=> PartialOrd_f_partial_cmp (i16_0 self) (i16_0 rhs); PartialOrd_f_lt := fun (self : t_i16) (rhs : t_i16)=> match PartialOrd_f_partial_cmp (i16_0 self) (i16_0 rhs) with | Option_Some (Ordering_Less) => true | _ => false end; PartialOrd_f_le := fun (self : t_i16) (rhs : t_i16)=> match PartialOrd_f_partial_cmp (i16_0 self) (i16_0 rhs) with | Option_Some (Ordering_Less | Ordering_Equal) => true | _ => false end; PartialOrd_f_gt := fun (self : t_i16) (rhs : t_i16)=> match PartialOrd_f_partial_cmp (i16_0 self) (i16_0 rhs) with | Option_Some (Ordering_Greater) => true | _ => false end; PartialOrd_f_ge := fun (self : t_i16) (rhs : t_i16)=> match PartialOrd_f_partial_cmp (i16_0 self) (i16_0 rhs) with | Option_Some (Ordering_Greater | Ordering_Equal) => true | _ => false end; }. #[global] Instance t_PartialEq_335582486 : t_PartialEq ((t_i32)) ((t_i32)) := { PartialEq_f_eq := fun (self : t_i32) (rhs : t_i32)=> PartialEq_f_eq (i32_0 self) (i32_0 rhs); PartialEq_f_ne := fun (self : t_i32) (rhs : t_i32)=> negb (PartialEq_f_eq (i32_0 self) (i32_0 rhs)); }. #[global] Instance t_PartialOrd_776800970 : t_PartialOrd ((t_i32)) ((t_i32)) := { PartialOrd_f_partial_cmp := fun (self : t_i32) (rhs : t_i32)=> PartialOrd_f_partial_cmp (i32_0 self) (i32_0 rhs); PartialOrd_f_lt := fun (self : t_i32) (rhs : t_i32)=> match PartialOrd_f_partial_cmp (i32_0 self) (i32_0 rhs) with | Option_Some (Ordering_Less) => true | _ => false end; PartialOrd_f_le := fun (self : t_i32) (rhs : t_i32)=> match PartialOrd_f_partial_cmp (i32_0 self) (i32_0 rhs) with | Option_Some (Ordering_Less | Ordering_Equal) => true | _ => false end; PartialOrd_f_gt := fun (self : t_i32) (rhs : t_i32)=> match PartialOrd_f_partial_cmp (i32_0 self) (i32_0 rhs) with | Option_Some (Ordering_Greater) => true | _ => false end; PartialOrd_f_ge := fun (self : t_i32) (rhs : t_i32)=> match PartialOrd_f_partial_cmp (i32_0 self) (i32_0 rhs) with | Option_Some (Ordering_Greater | Ordering_Equal) => true | _ => false end; }. #[global] Instance t_PartialEq_1019995697 : t_PartialEq ((t_i64)) ((t_i64)) := { PartialEq_f_eq := fun (self : t_i64) (rhs : t_i64)=> PartialEq_f_eq (i64_0 self) (i64_0 rhs); PartialEq_f_ne := fun (self : t_i64) (rhs : t_i64)=> negb (PartialEq_f_eq (i64_0 self) (i64_0 rhs)); }. #[global] Instance t_PartialOrd_354028907 : t_PartialOrd ((t_i64)) ((t_i64)) := { PartialOrd_f_partial_cmp := fun (self : t_i64) (rhs : t_i64)=> PartialOrd_f_partial_cmp (i64_0 self) (i64_0 rhs); PartialOrd_f_lt := fun (self : t_i64) (rhs : t_i64)=> match PartialOrd_f_partial_cmp (i64_0 self) (i64_0 rhs) with | Option_Some (Ordering_Less) => true | _ => false end; PartialOrd_f_le := fun (self : t_i64) (rhs : t_i64)=> match PartialOrd_f_partial_cmp (i64_0 self) (i64_0 rhs) with | Option_Some (Ordering_Less | Ordering_Equal) => true | _ => false end; PartialOrd_f_gt := fun (self : t_i64) (rhs : t_i64)=> match PartialOrd_f_partial_cmp (i64_0 self) (i64_0 rhs) with | Option_Some (Ordering_Greater) => true | _ => false end; PartialOrd_f_ge := fun (self : t_i64) (rhs : t_i64)=> match PartialOrd_f_partial_cmp (i64_0 self) (i64_0 rhs) with | Option_Some (Ordering_Greater | Ordering_Equal) => true | _ => false end; }. #[global] Instance t_PartialEq_476424898 : t_PartialEq ((t_i128)) ((t_i128)) := { PartialEq_f_eq := fun (self : t_i128) (rhs : t_i128)=> PartialEq_f_eq (i128_0 self) (i128_0 rhs); PartialEq_f_ne := fun (self : t_i128) (rhs : t_i128)=> negb (PartialEq_f_eq (i128_0 self) (i128_0 rhs)); }. #[global] Instance t_PartialOrd_532073533 : t_PartialOrd ((t_i128)) ((t_i128)) := { PartialOrd_f_partial_cmp := fun (self : t_i128) (rhs : t_i128)=> PartialOrd_f_partial_cmp (i128_0 self) (i128_0 rhs); PartialOrd_f_lt := fun (self : t_i128) (rhs : t_i128)=> match PartialOrd_f_partial_cmp (i128_0 self) (i128_0 rhs) with | Option_Some (Ordering_Less) => true | _ => false end; PartialOrd_f_le := fun (self : t_i128) (rhs : t_i128)=> match PartialOrd_f_partial_cmp (i128_0 self) (i128_0 rhs) with | Option_Some (Ordering_Less | Ordering_Equal) => true | _ => false end; PartialOrd_f_gt := fun (self : t_i128) (rhs : t_i128)=> match PartialOrd_f_partial_cmp (i128_0 self) (i128_0 rhs) with | Option_Some (Ordering_Greater) => true | _ => false end; PartialOrd_f_ge := fun (self : t_i128) (rhs : t_i128)=> match PartialOrd_f_partial_cmp (i128_0 self) (i128_0 rhs) with | Option_Some (Ordering_Greater | Ordering_Equal) => true | _ => false end; }. #[global] Instance t_PartialEq_675022234 : t_PartialEq ((t_isize)) ((t_isize)) := { PartialEq_f_eq := fun (self : t_isize) (rhs : t_isize)=> PartialEq_f_eq (isize_0 self) (isize_0 rhs); PartialEq_f_ne := fun (self : t_isize) (rhs : t_isize)=> negb (PartialEq_f_eq (isize_0 self) (isize_0 rhs)); }. #[global] Instance t_PartialOrd_661215608 : t_PartialOrd ((t_isize)) ((t_isize)) := { PartialOrd_f_partial_cmp := fun (self : t_isize) (rhs : t_isize)=> PartialOrd_f_partial_cmp (isize_0 self) (isize_0 rhs); PartialOrd_f_lt := fun (self : t_isize) (rhs : t_isize)=> match PartialOrd_f_partial_cmp (isize_0 self) (isize_0 rhs) with | Option_Some (Ordering_Less) => true | _ => false end; PartialOrd_f_le := fun (self : t_isize) (rhs : t_isize)=> match PartialOrd_f_partial_cmp (isize_0 self) (isize_0 rhs) with | Option_Some (Ordering_Less | Ordering_Equal) => true | _ => false end; PartialOrd_f_gt := fun (self : t_isize) (rhs : t_isize)=> match PartialOrd_f_partial_cmp (isize_0 self) (isize_0 rhs) with | Option_Some (Ordering_Greater) => true | _ => false end; PartialOrd_f_ge := fun (self : t_isize) (rhs : t_isize)=> match PartialOrd_f_partial_cmp (isize_0 self) (isize_0 rhs) with | Option_Some (Ordering_Greater | Ordering_Equal) => true | _ => false end; }. #[global] Instance t_From_number_i8 : t_From t_i8 Z := { From_f_from (x : Z) := Build_t_i8 (Build_t_I8 x) }. #[global] Instance t_From_number_i16 : t_From t_i16 Z := { From_f_from (x : Z) := Build_t_i16 (Build_t_I16 x) }. #[global] Instance t_From_number_i32 : t_From t_i32 Z := { From_f_from (x : Z) := Build_t_i32 (Build_t_I32 x) }. #[global] Instance t_From_number_i64 : t_From t_i64 Z := { From_f_from (x : Z) := Build_t_i64 (Build_t_I64 x) }. #[global] Instance t_From_number_i128 : t_From t_i128 Z := { From_f_from (x : Z) := Build_t_i128 (Build_t_I128 x) }. #[global] Instance t_From_number_isize : t_From t_isize Z := { From_f_from (x : Z) := Build_t_isize (Build_t_I64 x) }. #[global] Instance t_From_number_Zi8 : t_From Z t_i8 := { From_f_from (x : t_i8) := I8_f_v (i8_0 x) }. #[global] Instance t_From_number_Zi16 : t_From Z t_i16 := { From_f_from (x : t_i16) := I16_f_v (i16_0 x) }. #[global] Instance t_From_number_Zi32 : t_From Z t_i32 := { From_f_from (x : t_i32) := I32_f_v (i32_0 x) }. #[global] Instance t_From_number_Zi64 : t_From Z t_i64 := { From_f_from (x : t_i64) := I64_f_v (i64_0 x) }. #[global] Instance t_From_number_Zi128 : t_From Z t_i128 := { From_f_from (x : t_i128) := I128_f_v (i128_0 x) }. #[global] Instance t_From_number_Zisize : t_From Z t_isize := { From_f_from (x : t_isize) := I64_f_v (isize_0 x) }. Definition is_negative350273175 (self : t_i8) : bool := PartialOrd_f_lt (self) (Into_f_into (0)). Definition is_positive286955196 (self : t_i8) : bool := PartialOrd_f_gt (self) (Into_f_into (0)). Definition signum721334203 (self : t_i8) : t_i8 := if PartialOrd_f_lt (Clone_f_clone (self)) (Into_f_into (0)) then Into_f_into (-1) else if PartialEq_f_eq (self) (Into_f_into (0)) then Into_f_into (0) else Into_f_into (1). Instance t_From_687588567 : t_From ((t_i8)) ((t_i8)) := { From_f_from := fun (x : t_i8)=> Into_f_into (I8_f_v (i8_0 x)); }. Instance t_From_257005484 : t_From ((t_i16)) ((t_i16)) := { From_f_from := fun (x : t_i16)=> Build_t_i16 (Build_t_I16 (Into_f_into (x))); }. Definition is_negative477067241 (self : t_i16) : bool := PartialOrd_f_lt (self) (Into_f_into (0)). Definition is_positive821581438 (self : t_i16) : bool := PartialOrd_f_gt (self) (Into_f_into (0)). Definition signum243706004 (self : t_i16) : t_i16 := if PartialOrd_f_lt (Clone_f_clone (self)) (Into_f_into (0)) then Into_f_into (-1) else if PartialEq_f_eq (self) (Into_f_into (0)) then Into_f_into (0) else Into_f_into (1). (* Instance t_From_560870163 : t_From ((t_i16)) ((t_i16)) := *) (* { *) (* From_f_from := fun (x : t_i16)=> *) (* Into_f_into (I16_f_v (i16_0 x)); *) (* }. *) (* Instance t_From_17641682 : t_From ((t_i32)) ((t_i32)) := *) (* { *) (* From_f_from := fun (x : t_i32)=> *) (* t_i32 (Build_t_I32 (Into_f_into (x))); *) (* }. *) Definition is_negative1035644813 (self : t_i32) : bool := PartialOrd_f_lt (self) (Into_f_into (0)). Definition is_positive401652342 (self : t_i32) : bool := PartialOrd_f_gt (self) (Into_f_into (0)). Definition signum323641039 (self : t_i32) : t_i32 := if PartialOrd_f_lt (Clone_f_clone (self)) (Into_f_into (0)) then Into_f_into (-1) else if PartialEq_f_eq (self) (Into_f_into (0)) then Into_f_into (0) else Into_f_into (1). (* Instance t_From_865467252 : t_From ((t_i32)) ((t_i32)) := *) (* { *) (* From_f_from := fun (x : t_i32)=> *) (* Into_f_into (I32_f_v (i32_0 x)); *) (* }. *) (* Instance t_From_881024429 : t_From ((t_i64)) ((t_i64)) := *) (* { *) (* From_f_from := fun (x : t_i64)=> *) (* t_i64 (Build_t_I64 (Into_f_into (x))); *) (* }. *) Definition is_negative1066124578 (self : t_i64) : bool := PartialOrd_f_lt (self) (Into_f_into (0)). Definition is_positive16569358 (self : t_i64) : bool := PartialOrd_f_gt (self) (Into_f_into (0)). Definition signum582963664 (self : t_i64) : t_i64 := if PartialOrd_f_lt (Clone_f_clone (self)) (Into_f_into (0)) then Into_f_into (-1) else if PartialEq_f_eq (self) (Into_f_into (0)) then Into_f_into (0) else Into_f_into (1). (* Instance t_From_101582575 : t_From ((t_i64)) ((t_i64)) := *) (* { *) (* From_f_from := fun (x : t_i64)=> *) (* Into_f_into (I64_f_v i64_0 x); *) (* }. *) (* Instance t_From_954204920 : t_From ((t_i128)) ((t_i128)) := *) (* { *) (* From_f_from := fun (x : t_i128)=> *) (* t_i128 (Build_t_I128 (Into_f_into (x))); *) (* }. *) Definition is_negative221698470 (self : t_i128) : bool := PartialOrd_f_lt (self) (Into_f_into (0)). Definition is_positive883218309 (self : t_i128) : bool := PartialOrd_f_gt (self) (Into_f_into (0)). Definition signum408800799 (self : t_i128) : t_i128 := if PartialOrd_f_lt (Clone_f_clone (self)) (Into_f_into (0)) then Into_f_into (-1) else if PartialEq_f_eq (self) (Into_f_into (0)) then Into_f_into (0) else Into_f_into (1). (* Instance t_From_515435087 : t_From ((t_i128)) ((t_i128)) := *) (* { *) (* From_f_from := fun (x : t_i128)=> *) (* Into_f_into (I128_f_v i128_0 x); *) (* }. *) (* Instance t_From_1044036214 : t_From ((t_isize)) ((t_isize)) := *) (* { *) (* From_f_from := fun (x : t_isize)=> *) (* t_isize (Build_t_I64 (Into_f_into (x))); *) (* }. *) Definition is_negative693446369 (self : t_isize) : bool := PartialOrd_f_lt (self) (Into_f_into (0)). Definition is_positive169998680 (self : t_isize) : bool := PartialOrd_f_gt (self) (Into_f_into (0)). Definition signum91486536 (self : t_isize) : t_isize := if PartialOrd_f_lt (Clone_f_clone (self)) (Into_f_into (0)) then Into_f_into (-1) else if PartialEq_f_eq (self) (Into_f_into (0)) then Into_f_into (0) else Into_f_into (1). #[global] Instance t_From_202441647 : t_From ((t_isize)) ((t_isize)) := { From_f_from := fun (x : t_isize)=> Into_f_into (I64_f_v (isize_0 x)); }. #[global] Instance t_From_100016775 : t_From ((t_i16)) ((t_i8)) := { From_f_from := fun (x : t_i8)=> Build_t_i16 (Into_f_into (i8_0 x)); }. #[global] Instance t_From_964712142 : t_From ((t_i32)) ((t_i8)) := { From_f_from := fun (x : t_i8)=> Build_t_i32 (Into_f_into (i8_0 x)); }. #[global] Instance t_From_512166668 : t_From ((t_i64)) ((t_i8)) := { From_f_from := fun (x : t_i8)=> Build_t_i64 (Into_f_into (i8_0 x)); }. #[global] Instance t_From_95828634 : t_From ((t_i128)) ((t_i8)) := { From_f_from := fun (x : t_i8)=> Build_t_i128 (Into_f_into (i8_0 x)); }. #[global] Instance t_From_48986939 : t_From ((t_isize)) ((t_i8)) := { From_f_from := fun (x : t_i8)=> Build_t_isize (Into_f_into (i8_0 x)); }. #[global] Instance t_From_325010041 : t_From ((t_i8)) ((t_i16)) := { From_f_from := fun (x : t_i16)=> Build_t_i8 (Into_f_into (i16_0 x)); }. #[global] Instance t_From_64357194 : t_From ((t_i32)) ((t_i16)) := { From_f_from := fun (x : t_i16)=> Build_t_i32 (Into_f_into (i16_0 x)); }. #[global] Instance t_From_840335964 : t_From ((t_i64)) ((t_i16)) := { From_f_from := fun (x : t_i16)=> Build_t_i64 (Into_f_into (i16_0 x)); }. #[global] Instance t_From_601385454 : t_From ((t_i128)) ((t_i16)) := { From_f_from := fun (x : t_i16)=> Build_t_i128 (Into_f_into (i16_0 x)); }. #[global] Instance t_From_755383497 : t_From ((t_isize)) ((t_i16)) := { From_f_from := fun (x : t_i16)=> Build_t_isize (Into_f_into (i16_0 x)); }. #[global] Instance t_From_926112880 : t_From ((t_i8)) ((t_i32)) := { From_f_from := fun (x : t_i32)=> Build_t_i8 (Into_f_into (i32_0 x)); }. #[global] Instance t_From_81353160 : t_From ((t_i16)) ((t_i32)) := { From_f_from := fun (x : t_i32)=> Build_t_i16 (Into_f_into (i32_0 x)); }. #[global] Instance t_From_549703007 : t_From ((t_i64)) ((t_i32)) := { From_f_from := fun (x : t_i32)=> Build_t_i64 (Into_f_into (i32_0 x)); }. #[global] Instance t_From_1001458175 : t_From ((t_i128)) ((t_i32)) := { From_f_from := fun (x : t_i32)=> Build_t_i128 (Into_f_into (i32_0 x)); }. #[global] Instance t_From_329934859 : t_From ((t_isize)) ((t_i32)) := { From_f_from := fun (x : t_i32)=> Build_t_isize (Into_f_into (i32_0 x)); }. #[global] Instance t_From_381441019 : t_From ((t_i8)) ((t_i64)) := { From_f_from := fun (x : t_i64)=> Build_t_i8 (Into_f_into (i64_0 x)); }. #[global] Instance t_From_728811179 : t_From ((t_i16)) ((t_i64)) := { From_f_from := fun (x : t_i64)=> Build_t_i16 (Into_f_into (i64_0 x)); }. #[global] Instance t_From_1003839356 : t_From ((t_i32)) ((t_i64)) := { From_f_from := fun (x : t_i64)=> Build_t_i32 (Into_f_into (i64_0 x)); }. #[global] Instance t_From_625109732 : t_From ((t_i128)) ((t_i64)) := { From_f_from := fun (x : t_i64)=> Build_t_i128 (Into_f_into (i64_0 x)); }. #[global] Instance t_From_34424521 : t_From ((t_i8)) ((t_i128)) := { From_f_from := fun (x : t_i128)=> Build_t_i8 (Into_f_into (i128_0 x)); }. #[global] Instance t_From_603602239 : t_From ((t_i16)) ((t_i128)) := { From_f_from := fun (x : t_i128)=> Build_t_i16 (Into_f_into (i128_0 x)); }. #[global] Instance t_From_479038908 : t_From ((t_i32)) ((t_i128)) := { From_f_from := fun (x : t_i128)=> Build_t_i32 (Into_f_into (i128_0 x)); }. #[global] Instance t_From_299745195 : t_From ((t_i64)) ((t_i128)) := { From_f_from := fun (x : t_i128)=> Build_t_i64 (Into_f_into (i128_0 x)); }. #[global] Instance t_From_615821455 : t_From ((t_isize)) ((t_i128)) := { From_f_from := fun (x : t_i128)=> Build_t_isize (Into_f_into (i128_0 x)); }. #[global] Instance t_From_376191918 : t_From ((t_i8)) ((t_isize)) := { From_f_from := fun (x : t_isize)=> Build_t_i8 (Into_f_into (isize_0 x)); }. #[global] Instance t_From_649927535 : t_From ((t_i16)) ((t_isize)) := { From_f_from := fun (x : t_isize)=> Build_t_i16 (Into_f_into (isize_0 x)); }. #[global] Instance t_From_395262437 : t_From ((t_i32)) ((t_isize)) := { From_f_from := fun (x : t_isize)=> Build_t_i32 (Into_f_into (isize_0 x)); }. #[global] Instance t_From_218237752 : t_From ((t_i128)) ((t_isize)) := { From_f_from := fun (x : t_isize)=> Build_t_i128 (Into_f_into (isize_0 x)); }. Definition add_with_overflow_i128 (x : t_i128) (y : t_i128) : (t_i128*bool) := let overflow := z_add (Abstraction_f_lift (i128_0 x)) (Abstraction_f_lift (i128_0 y)) in let res : t_I128 := Concretization_f_concretize (Clone_f_clone (overflow)) in (Build_t_i128 (Clone_f_clone (res)),z_lt (Abstraction_f_lift (res)) (overflow)). Definition add_with_overflow_i16 (x : t_i16) (y : t_i16) : (t_i16*bool) := let overflow := z_add (Abstraction_f_lift (i16_0 x)) (Abstraction_f_lift (i16_0 y)) in let res : t_I16 := Concretization_f_concretize (Clone_f_clone (overflow)) in (Build_t_i16 (Clone_f_clone (res)),z_lt (Abstraction_f_lift (res)) (overflow)). Definition add_with_overflow_i32 (x : t_i32) (y : t_i32) : (t_i32*bool) := let overflow := z_add (Abstraction_f_lift (i32_0 x)) (Abstraction_f_lift (i32_0 y)) in let res : t_I32 := Concretization_f_concretize (Clone_f_clone (overflow)) in (Build_t_i32 (Clone_f_clone (res)),z_lt (Abstraction_f_lift (res)) (overflow)). Definition add_with_overflow_i64 (x : t_i64) (y : t_i64) : (t_i64*bool) := let overflow := z_add (Abstraction_f_lift (i64_0 x)) (Abstraction_f_lift (i64_0 y)) in let res : t_I64 := Concretization_f_concretize (Clone_f_clone (overflow)) in (Build_t_i64 (Clone_f_clone (res)),z_lt (Abstraction_f_lift (res)) (overflow)). Definition add_with_overflow_i8 (x : t_i8) (y : t_i8) : (t_i8*bool) := let overflow := z_add (Abstraction_f_lift (i8_0 x)) (Abstraction_f_lift (i8_0 y)) in let res : t_I8 := Concretization_f_concretize (Clone_f_clone (overflow)) in (Build_t_i8 (Clone_f_clone (res)),z_lt (Abstraction_f_lift (res)) (overflow)). Definition add_with_overflow_isize (x : t_isize) (y : t_isize) : (t_isize*bool) := let overflow := z_add (Abstraction_f_lift (isize_0 x)) (Abstraction_f_lift (isize_0 y)) in let res : t_I64 := Concretization_f_concretize (Clone_f_clone (overflow)) in (Build_t_isize (Clone_f_clone (res)),z_lt (Abstraction_f_lift (res)) (overflow)). Definition unchecked_add_i128 (x : t_i128) (y : t_i128) : t_i128 := Build_t_i128 (Build_t_I128 (z_add (Abstraction_f_lift (i128_0 x)) (Abstraction_f_lift (i128_0 y)))). Definition unchecked_add_i16 (x : t_i16) (y : t_i16) : t_i16 := Build_t_i16 (Build_t_I16 (z_add (Abstraction_f_lift (i16_0 x)) (Abstraction_f_lift (i16_0 y)))). Definition unchecked_add_i32 (x : t_i32) (y : t_i32) : t_i32 := Build_t_i32 (Build_t_I32 (z_add (Abstraction_f_lift (i32_0 x)) (Abstraction_f_lift (i32_0 y)))). Definition unchecked_add_i64 (x : t_i64) (y : t_i64) : t_i64 := Build_t_i64 (Build_t_I64 (z_add (Abstraction_f_lift (i64_0 x)) (Abstraction_f_lift (i64_0 y)))). Definition unchecked_add_i8 (x : t_i8) (y : t_i8) : t_i8 := Build_t_i8 (Build_t_I8 (z_add (Abstraction_f_lift (i8_0 x)) (Abstraction_f_lift (i8_0 y)))). Definition unchecked_add_isize (x : t_isize) (y : t_isize) : t_isize := Build_t_isize (Build_t_I64 (z_add (Abstraction_f_lift (isize_0 x)) (Abstraction_f_lift (isize_0 y)))). Definition unchecked_add_u128 (x : t_u128) (y : t_u128) : t_u128 := Build_t_u128 (Build_t_U128 (haxint_add (Abstraction_f_lift (u128_0 x)) (Abstraction_f_lift (u128_0 y)))). Definition unchecked_add_u16 (x : t_u16) (y : t_u16) : t_u16 := Build_t_u16 (Build_t_U16 (haxint_add (Abstraction_f_lift (u16_0 x)) (Abstraction_f_lift (u16_0 y)))). Definition unchecked_add_u32 (x : t_u32) (y : t_u32) : t_u32 := Build_t_u32 (Build_t_U32 (haxint_add (Abstraction_f_lift (u32_0 x)) (Abstraction_f_lift (u32_0 y)))). Definition unchecked_add_u64 (x : t_u64) (y : t_u64) : t_u64 := Build_t_u64 (Build_t_U64 (haxint_add (Abstraction_f_lift (u64_0 x)) (Abstraction_f_lift (u64_0 y)))). Definition unchecked_add_u8 (x : t_u8) (y : t_u8) : t_u8 := Build_t_u8 (Build_t_U8 (haxint_add (Abstraction_f_lift (u8_0 x)) (Abstraction_f_lift (u8_0 y)))). Definition unchecked_add_usize (x : t_usize) (y : t_usize) : t_usize := Build_t_usize (Build_t_U64 (haxint_add (Abstraction_f_lift (usize_0 x)) (Abstraction_f_lift (usize_0 y)))). Definition checked_add268751055 (self : t_u8) (rhs : t_u8) : t_Option ((t_u8)) := Option_Some (unchecked_add_u8 (self) (rhs)). Definition checked_add132377399 (self : t_u16) (rhs : t_u16) : t_Option ((t_u16)) := Option_Some (unchecked_add_u16 (self) (rhs)). Definition checked_add985437730 (self : t_u32) (rhs : t_u32) : t_Option ((t_u32)) := Option_Some (unchecked_add_u32 (self) (rhs)). Definition checked_add586246465 (self : t_u64) (rhs : t_u64) : t_Option ((t_u64)) := Option_Some (unchecked_add_u64 (self) (rhs)). Definition checked_add218978451 (self : t_u128) (rhs : t_u128) : t_Option ((t_u128)) := Option_Some (unchecked_add_u128 (self) (rhs)). Definition checked_add984013567 (self : t_usize) (rhs : t_usize) : t_Option ((t_usize)) := Option_Some (unchecked_add_usize (self) (rhs)). Definition add_with_overflow_u128 (x : t_u128) (y : t_u128) : (t_u128*bool) := let overflow := haxint_add (Abstraction_f_lift (u128_0 x)) (Abstraction_f_lift (u128_0 y)) in let res : t_U128 := Concretization_f_concretize (Clone_f_clone (overflow)) in (Build_t_u128 (Clone_f_clone (res)),haxint_lt (Abstraction_f_lift (res)) (overflow)). Definition add_with_overflow_u16 (x : t_u16) (y : t_u16) : (t_u16*bool) := let overflow := haxint_add (Abstraction_f_lift (u16_0 x)) (Abstraction_f_lift (u16_0 y)) in let res : t_U16 := Concretization_f_concretize (Clone_f_clone (overflow)) in (Build_t_u16 (Clone_f_clone (res)),haxint_lt (Abstraction_f_lift (res)) (overflow)). Definition add_with_overflow_u32 (x : t_u32) (y : t_u32) : (t_u32*bool) := let overflow := haxint_add (Abstraction_f_lift (u32_0 x)) (Abstraction_f_lift (u32_0 y)) in let res : t_U32 := Concretization_f_concretize (Clone_f_clone (overflow)) in (Build_t_u32 (Clone_f_clone (res)),haxint_lt (Abstraction_f_lift (res)) (overflow)). Definition add_with_overflow_u64 (x : t_u64) (y : t_u64) : (t_u64*bool) := let overflow := haxint_add (Abstraction_f_lift (u64_0 x)) (Abstraction_f_lift (u64_0 y)) in let res : t_U64 := Concretization_f_concretize (Clone_f_clone (overflow)) in (Build_t_u64 (Clone_f_clone (res)),haxint_lt (Abstraction_f_lift (res)) (overflow)). Definition add_with_overflow_u8 (x : t_u8) (y : t_u8) : (t_u8*bool) := let overflow := haxint_add (Abstraction_f_lift (u8_0 x)) (Abstraction_f_lift (u8_0 y)) in let res : t_U8 := Concretization_f_concretize (Clone_f_clone (overflow)) in (Build_t_u8 (Clone_f_clone (res)),haxint_lt (Abstraction_f_lift (res)) (overflow)). Definition add_with_overflow_usize (x : t_usize) (y : t_usize) : (t_usize*bool) := let overflow := haxint_add (Abstraction_f_lift (usize_0 x)) (Abstraction_f_lift (usize_0 y)) in let res : t_U64 := Concretization_f_concretize (Clone_f_clone (overflow)) in (Build_t_usize (Clone_f_clone (res)),haxint_lt (Abstraction_f_lift (res)) (overflow)). Definition unchecked_div_u128 (x : t_u128) (y : t_u128) : t_u128 := Build_t_u128 (Build_t_U128 (haxint_div (Abstraction_f_lift (u128_0 x)) (Abstraction_f_lift (u128_0 y)))). Definition unchecked_div_u16 (x : t_u16) (y : t_u16) : t_u16 := Build_t_u16 (Build_t_U16 (haxint_div (Abstraction_f_lift (u16_0 x)) (Abstraction_f_lift (u16_0 y)))). Definition unchecked_div_u32 (x : t_u32) (y : t_u32) : t_u32 := Build_t_u32 (Build_t_U32 (haxint_div (Abstraction_f_lift (u32_0 x)) (Abstraction_f_lift (u32_0 y)))). Definition unchecked_div_u64 (x : t_u64) (y : t_u64) : t_u64 := Build_t_u64 (Build_t_U64 (haxint_div (Abstraction_f_lift (u64_0 x)) (Abstraction_f_lift (u64_0 y)))). Definition unchecked_div_u8 (x : t_u8) (y : t_u8) : t_u8 := Build_t_u8 (Build_t_U8 (haxint_div (Abstraction_f_lift (u8_0 x)) (Abstraction_f_lift (u8_0 y)))). Definition unchecked_div_usize (x : t_usize) (y : t_usize) : t_usize := Build_t_usize (Build_t_U64 (haxint_div (Abstraction_f_lift (usize_0 x)) (Abstraction_f_lift (usize_0 y)))). Definition wrapping_add_i128 (a : t_i128) (b : t_i128) : t_i128 := Build_t_i128 (Add_f_add (i128_0 a) (i128_0 b)). Definition wrapping_add_i16 (a : t_i16) (b : t_i16) : t_i16 := Build_t_i16 (Add_f_add (i16_0 a) (i16_0 b)). Definition wrapping_add_i32 (a : t_i32) (b : t_i32) : t_i32 := Build_t_i32 (Add_f_add (i32_0 a) (i32_0 b)). Definition wrapping_add_i64 (a : t_i64) (b : t_i64) : t_i64 := Build_t_i64 (Add_f_add (i64_0 a) (i64_0 b)). Definition wrapping_add_i8 (a : t_i8) (b : t_i8) : t_i8 := Build_t_i8 (Add_f_add (i8_0 a) (i8_0 b)). Definition wrapping_add_isize (a : t_isize) (b : t_isize) : t_isize := Build_t_isize (Add_f_add (isize_0 a) (isize_0 b)). Definition wrapping_sub_i128 (a : t_i128) (b : t_i128) : t_i128 := Build_t_i128 (Sub_f_sub (i128_0 a) (i128_0 b)). Definition wrapping_sub_i16 (a : t_i16) (b : t_i16) : t_i16 := Build_t_i16 (Sub_f_sub (i16_0 a) (i16_0 b)). Definition wrapping_sub_i32 (a : t_i32) (b : t_i32) : t_i32 := Build_t_i32 (Sub_f_sub (i32_0 a) (i32_0 b)). Definition wrapping_sub_i64 (a : t_i64) (b : t_i64) : t_i64 := Build_t_i64 (Sub_f_sub (i64_0 a) (i64_0 b)). Definition wrapping_sub_i8 (a : t_i8) (b : t_i8) : t_i8 := Build_t_i8 (Sub_f_sub (i8_0 a) (i8_0 b)). Definition wrapping_sub_isize (a : t_isize) (b : t_isize) : t_isize := Build_t_isize (Sub_f_sub (isize_0 a) (isize_0 b)). Definition wrapping_add634491935 (self : t_i8) (rhs : t_i8) : t_i8 := wrapping_add_i8 (self) (rhs). Definition wrapping_sub973428293 (self : t_i8) (rhs : t_i8) : t_i8 := wrapping_sub_i8 (self) (rhs). Definition wrapping_neg400701205 (self : t_i8) : t_i8 := wrapping_sub973428293 (Into_f_into (0)) (self). Definition wrapping_abs400396545 (self : t_i8) : t_i8 := if is_negative350273175 (Clone_f_clone (self)) then wrapping_neg400701205 (self) else self. Definition wrapping_add868559108 (self : t_i16) (rhs : t_i16) : t_i16 := wrapping_add_i16 (self) (rhs). Definition wrapping_sub189469152 (self : t_i16) (rhs : t_i16) : t_i16 := wrapping_sub_i16 (self) (rhs). Definition wrapping_neg860505723 (self : t_i16) : t_i16 := wrapping_sub189469152 (Into_f_into (0)) (self). Definition wrapping_abs229076826 (self : t_i16) : t_i16 := if is_negative477067241 (Clone_f_clone (self)) then wrapping_neg860505723 (self) else self. Definition wrapping_add475006616 (self : t_i32) (rhs : t_i32) : t_i32 := wrapping_add_i32 (self) (rhs). Definition wrapping_sub298337071 (self : t_i32) (rhs : t_i32) : t_i32 := wrapping_sub_i32 (self) (rhs). Definition wrapping_neg636433078 (self : t_i32) : t_i32 := wrapping_sub298337071 (Into_f_into (0)) (self). Definition wrapping_abs729536875 (self : t_i32) : t_i32 := if is_negative1035644813 (Clone_f_clone (self)) then wrapping_neg636433078 (self) else self. Definition wrapping_add590074241 (self : t_i64) (rhs : t_i64) : t_i64 := wrapping_add_i64 (self) (rhs). Definition wrapping_sub334584751 (self : t_i64) (rhs : t_i64) : t_i64 := wrapping_sub_i64 (self) (rhs). Definition wrapping_neg868282938 (self : t_i64) : t_i64 := wrapping_sub334584751 (Into_f_into (0)) (self). Definition wrapping_abs285829312 (self : t_i64) : t_i64 := if is_negative1066124578 (Clone_f_clone (self)) then wrapping_neg868282938 (self) else self. Definition wrapping_add251385439 (self : t_i128) (rhs : t_i128) : t_i128 := wrapping_add_i128 (self) (rhs). Definition wrapping_sub681598071 (self : t_i128) (rhs : t_i128) : t_i128 := wrapping_sub_i128 (self) (rhs). Definition wrapping_neg446546984 (self : t_i128) : t_i128 := wrapping_sub681598071 (Into_f_into (0)) (self). Definition wrapping_abs281925696 (self : t_i128) : t_i128 := if is_negative221698470 (Clone_f_clone (self)) then wrapping_neg446546984 (self) else self. Definition wrapping_add226040243 (self : t_isize) (rhs : t_isize) : t_isize := wrapping_add_isize (self) (rhs). Definition wrapping_sub698035192 (self : t_isize) (rhs : t_isize) : t_isize := wrapping_sub_isize (self) (rhs). Definition wrapping_neg912291768 (self : t_isize) : t_isize := wrapping_sub698035192 (Into_f_into (0)) (self). Definition wrapping_abs347300819 (self : t_isize) : t_isize := if is_negative693446369 (Clone_f_clone (self)) then wrapping_neg912291768 (self) else self. #[global] Instance f_into_t_u8 : t_From t_u8 N := { From_f_from (x : N) := Build_t_u8 (Build_t_U8 x) }. #[global] Instance f_into_t_u16 : t_From t_u16 N := { From_f_from (x : N) := Build_t_u16 (Build_t_U16 x) }. #[global] Instance f_into_t_u32 : t_From t_u32 N := { From_f_from (x : N) := Build_t_u32 (Build_t_U32 x) }. #[global] Instance f_into_t_u64 : t_From t_u64 N := { From_f_from (x : N) := Build_t_u64 (Build_t_U64 x) }. #[global] Instance f_into_t_u128 : t_From t_u128 N := { From_f_from (x : N) := Build_t_u128 (Build_t_U128 x) }. #[global] Instance f_into_t_usize : t_From t_usize N := { From_f_from (x : N) := Build_t_usize (Build_t_U64 x) }. Definition checked_div508301931 (self : t_u8) (rhs : t_u8) : t_Option ((t_u8)) := if PartialEq_f_eq (rhs) (Into_f_into 0%N) then Option_None else Option_Some (unchecked_div_u8 (self) (rhs)). Definition overflowing_add708890057 (self : t_u8) (rhs : t_u8) : (t_u8*bool) := add_with_overflow_u8 (self) (rhs). Definition checked_div614920780 (self : t_u16) (rhs : t_u16) : t_Option ((t_u16)) := if PartialEq_f_eq (rhs) (Into_f_into (0%N)) then Option_None else Option_Some (unchecked_div_u16 (self) (rhs)). Definition overflowing_add1023344178 (self : t_u16) (rhs : t_u16) : (t_u16*bool) := add_with_overflow_u16 (self) (rhs). Definition checked_div979383477 (self : t_u32) (rhs : t_u32) : t_Option ((t_u32)) := if PartialEq_f_eq (rhs) (Into_f_into (0%N)) then Option_None else Option_Some (unchecked_div_u32 (self) (rhs)). Definition overflowing_add905744292 (self : t_u32) (rhs : t_u32) : (t_u32*bool) := add_with_overflow_u32 (self) (rhs). Definition checked_div988689127 (self : t_u64) (rhs : t_u64) : t_Option ((t_u64)) := if PartialEq_f_eq (rhs) (Into_f_into (0%N)) then Option_None else Option_Some (unchecked_div_u64 (self) (rhs)). Definition overflowing_add581983607 (self : t_u64) (rhs : t_u64) : (t_u64*bool) := add_with_overflow_u64 (self) (rhs). Definition checked_div344106746 (self : t_u128) (rhs : t_u128) : t_Option ((t_u128)) := if PartialEq_f_eq (rhs) (Into_f_into (0%N)) then Option_None else Option_Some (unchecked_div_u128 (self) (rhs)). Definition overflowing_add458293681 (self : t_u128) (rhs : t_u128) : (t_u128*bool) := add_with_overflow_u128 (self) (rhs). Definition checked_div80223906 (self : t_usize) (rhs : t_usize) : t_Option ((t_usize)) := if PartialEq_f_eq (rhs) (Into_f_into (0%N)) then Option_None else Option_Some (unchecked_div_usize (self) (rhs)). Definition overflowing_add682280407 (self : t_usize) (rhs : t_usize) : (t_usize*bool) := add_with_overflow_usize (self) (rhs). Check t_Neg. #[global] Instance t_Neg_125588538 : t_Neg ((t_i8)) := { Neg_f_Output := t_i8; Neg_f_neg := fun (self : t_i8)=> Build_t_i8 (Neg_f_neg (i8_0 self)); }. Definition abs945505614 (self : t_i8) : t_i8 := if is_negative350273175 (Clone_f_clone (self)) then Neg_f_neg (self) else self. #[global] Instance t_Neg_977573626 : t_Neg ((t_i16)) := { Neg_f_Output := t_i16; Neg_f_neg := fun (self : t_i16)=> Build_t_i16 (Neg_f_neg (i16_0 self)); }. Definition abs581170970 (self : t_i16) : t_i16 := if is_negative477067241 (Clone_f_clone (self)) then Neg_f_neg (self) else self. #[global] Instance t_Neg_289824503 : t_Neg ((t_i32)) := { Neg_f_Output := t_i32; Neg_f_neg := fun (self : t_i32)=> Build_t_i32 (Neg_f_neg (i32_0 self)); }. Definition abs590464694 (self : t_i32) : t_i32 := if is_negative1035644813 (Clone_f_clone (self)) then Neg_f_neg (self) else self. #[global] Instance t_Neg_895800448 : t_Neg ((t_i64)) := { Neg_f_Output := t_i64; Neg_f_neg := fun (self : t_i64)=> Build_t_i64 (Neg_f_neg (i64_0 self)); }. Definition abs654781043 (self : t_i64) : t_i64 := if is_negative1066124578 (Clone_f_clone (self)) then Neg_f_neg (self) else self. #[global] Instance t_Neg_830237431 : t_Neg ((t_i128)) := { Neg_f_Output := t_i128; Neg_f_neg := fun (self : t_i128)=> Build_t_i128 (Neg_f_neg (i128_0 self)); }. Definition abs204417539 (self : t_i128) : t_i128 := if is_negative221698470 (Clone_f_clone (self)) then Neg_f_neg (self) else self. #[global] Instance t_Neg_693499423 : t_Neg ((t_isize)) := { Neg_f_Output := t_isize; Neg_f_neg := fun (self : t_isize)=> Build_t_isize (Neg_f_neg (isize_0 self)); }. Definition abs220926056 (self : t_isize) : t_isize := if is_negative693446369 (Clone_f_clone (self)) then Neg_f_neg (self) else self. #[global] Instance t_BitOr_174929276 : t_BitOr ((t_i8)) ((t_i8)) := { BitOr_f_Output := t_i8; BitOr_f_bitor := fun (self : t_i8) (other : t_i8)=> Build_t_i8 (BitOr_f_bitor (i8_0 self) (i8_0 other)); }. #[global] Instance t_BitOr_162600380 : t_BitOr ((t_i16)) ((t_i16)) := { BitOr_f_Output := t_i16; BitOr_f_bitor := fun (self : t_i16) (other : t_i16)=> Build_t_i16 (BitOr_f_bitor (i16_0 self) (i16_0 other)); }. #[global] Instance t_BitOr_64689421 : t_BitOr ((t_i32)) ((t_i32)) := { BitOr_f_Output := t_i32; BitOr_f_bitor := fun (self : t_i32) (other : t_i32)=> Build_t_i32 (BitOr_f_bitor (i32_0 self) (i32_0 other)); }. #[global] Instance t_BitOr_348780956 : t_BitOr ((t_i64)) ((t_i64)) := { BitOr_f_Output := t_i64; BitOr_f_bitor := fun (self : t_i64) (other : t_i64)=> Build_t_i64 (BitOr_f_bitor (i64_0 self) (i64_0 other)); }. #[global] Instance t_BitOr_643690063 : t_BitOr ((t_i128)) ((t_i128)) := { BitOr_f_Output := t_i128; BitOr_f_bitor := fun (self : t_i128) (other : t_i128)=> Build_t_i128 (BitOr_f_bitor (i128_0 self) (i128_0 other)); }. #[global] Instance t_BitOr_1027404433 : t_BitOr ((t_isize)) ((t_isize)) := { BitOr_f_Output := t_isize; BitOr_f_bitor := fun (self : t_isize) (other : t_isize)=> Build_t_isize (BitOr_f_bitor (isize_0 self) (isize_0 other)); }. #[global] Instance t_From_124503227 : t_From ((t_u16)) ((t_u8)) := { From_f_from := fun (x : t_u8)=> Build_t_u16 (Into_f_into (u8_0 x)); }. #[global] Instance t_From_499390246 : t_From ((t_u32)) ((t_u8)) := { From_f_from := fun (x : t_u8)=> Build_t_u32 (Into_f_into (u8_0 x)); }. #[global] Instance t_From_1040523499 : t_From ((t_u64)) ((t_u8)) := { From_f_from := fun (x : t_u8)=> Build_t_u64 (Into_f_into (u8_0 x)); }. #[global] Instance t_From_827336555 : t_From ((t_u128)) ((t_u8)) := { From_f_from := fun (x : t_u8)=> Build_t_u128 (Into_f_into (u8_0 x)); }. #[global] Instance t_From_1002852925 : t_From ((t_usize)) ((t_u8)) := { From_f_from := fun (x : t_u8)=> Build_t_usize (Into_f_into (u8_0 x)); }. #[global] Instance t_From_476851440 : t_From ((t_u8)) ((t_u16)) := { From_f_from := fun (x : t_u16)=> Build_t_u8 (Into_f_into (u16_0 x)); }. #[global] Instance t_From_590504350 : t_From ((t_u32)) ((t_u16)) := { From_f_from := fun (x : t_u16)=> Build_t_u32 (Into_f_into (u16_0 x)); }. #[global] Instance t_From_786143320 : t_From ((t_u64)) ((t_u16)) := { From_f_from := fun (x : t_u16)=> Build_t_u64 (Into_f_into (u16_0 x)); }. #[global] Instance t_From_98507156 : t_From ((t_u128)) ((t_u16)) := { From_f_from := fun (x : t_u16)=> Build_t_u128 (Into_f_into (u16_0 x)); }. #[global] Instance t_From_427149512 : t_From ((t_usize)) ((t_u16)) := { From_f_from := fun (x : t_u16)=> Build_t_usize (Into_f_into (u16_0 x)); }. #[global] Instance t_From_306676060 : t_From ((t_u8)) ((t_u32)) := { From_f_from := fun (x : t_u32)=> Build_t_u8 (Into_f_into (u32_0 x)); }. #[global] Instance t_From_55624543 : t_From ((t_u16)) ((t_u32)) := { From_f_from := fun (x : t_u32)=> Build_t_u16 (Into_f_into (u32_0 x)); }. #[global] Instance t_From_863285405 : t_From ((t_u64)) ((t_u32)) := { From_f_from := fun (x : t_u32)=> Build_t_u64 (Into_f_into (u32_0 x)); }. #[global] Instance t_From_675130423 : t_From ((t_u128)) ((t_u32)) := { From_f_from := fun (x : t_u32)=> Build_t_u128 (Into_f_into (u32_0 x)); }. #[global] Instance t_From_295642421 : t_From ((t_usize)) ((t_u32)) := { From_f_from := fun (x : t_u32)=> Build_t_usize (Into_f_into (u32_0 x)); }. #[global] Instance t_From_690942554 : t_From ((t_u8)) ((t_u64)) := { From_f_from := fun (x : t_u64)=> Build_t_u8 (Into_f_into (u64_0 x)); }. #[global] Instance t_From_956877210 : t_From ((t_u16)) ((t_u64)) := { From_f_from := fun (x : t_u64)=> Build_t_u16 (Into_f_into (u64_0 x)); }. #[global] Instance t_From_124072492 : t_From ((t_u32)) ((t_u64)) := { From_f_from := fun (x : t_u64)=> Build_t_u32 (Into_f_into (u64_0 x)); }. #[global] Instance t_From_882228220 : t_From ((t_u128)) ((t_u64)) := { From_f_from := fun (x : t_u64)=> Build_t_u128 (Into_f_into (u64_0 x)); }. #[global] Instance t_From_1060762174 : t_From ((t_u8)) ((t_u128)) := { From_f_from := fun (x : t_u128)=> Build_t_u8 (Into_f_into (u128_0 x)); }. #[global] Instance t_From_437123664 : t_From ((t_u16)) ((t_u128)) := { From_f_from := fun (x : t_u128)=> Build_t_u16 (Into_f_into (u128_0 x)); }. #[global] Instance t_From_685712174 : t_From ((t_u32)) ((t_u128)) := { From_f_from := fun (x : t_u128)=> Build_t_u32 (Into_f_into (u128_0 x)); }. #[global] Instance t_From_239215567 : t_From ((t_u64)) ((t_u128)) := { From_f_from := fun (x : t_u128)=> Build_t_u64 (Into_f_into (u128_0 x)); }. #[global] Instance t_From_583993496 : t_From ((t_usize)) ((t_u128)) := { From_f_from := fun (x : t_u128)=> Build_t_usize (Into_f_into (u128_0 x)); }. #[global] Instance t_From_1069835847 : t_From ((t_u8)) ((t_usize)) := { From_f_from := fun (x : t_usize)=> Build_t_u8 (Into_f_into (usize_0 x)); }. #[global] Instance t_From_976343396 : t_From ((t_u16)) ((t_usize)) := { From_f_from := fun (x : t_usize)=> Build_t_u16 (Into_f_into (usize_0 x)); }. #[global] Instance t_From_448121712 : t_From ((t_u32)) ((t_usize)) := { From_f_from := fun (x : t_usize)=> Build_t_u32 (Into_f_into (usize_0 x)); }. #[global] Instance t_From_448032498 : t_From ((t_u128)) ((t_usize)) := { From_f_from := fun (x : t_usize)=> Build_t_u128 (Into_f_into (usize_0 x)); }. Definition unchecked_div_i128 (x : t_i128) (y : t_i128) : t_i128 := Build_t_i128 (Build_t_I128 (z_div (Abstraction_f_lift (i128_0 x)) (Abstraction_f_lift (i128_0 y)))). Definition unchecked_div_i16 (x : t_i16) (y : t_i16) : t_i16 := Build_t_i16 (Build_t_I16 (z_div (Abstraction_f_lift (i16_0 x)) (Abstraction_f_lift (i16_0 y)))). Definition unchecked_div_i32 (x : t_i32) (y : t_i32) : t_i32 := Build_t_i32 (Build_t_I32 (z_div (Abstraction_f_lift (i32_0 x)) (Abstraction_f_lift (i32_0 y)))). Definition unchecked_div_i64 (x : t_i64) (y : t_i64) : t_i64 := Build_t_i64 (Build_t_I64 (z_div (Abstraction_f_lift (i64_0 x)) (Abstraction_f_lift (i64_0 y)))). Definition unchecked_div_i8 (x : t_i8) (y : t_i8) : t_i8 := Build_t_i8 (Build_t_I8 (z_div (Abstraction_f_lift (i8_0 x)) (Abstraction_f_lift (i8_0 y)))). Definition unchecked_div_isize (x : t_isize) (y : t_isize) : t_isize := Build_t_isize (Build_t_I64 (z_div (Abstraction_f_lift (isize_0 x)) (Abstraction_f_lift (isize_0 y)))). Definition wrapping_add_u128 (a : t_u128) (b : t_u128) : t_u128 := Build_t_u128 (Add_f_add (u128_0 a) (u128_0 b)). Definition wrapping_add_u16 (a : t_u16) (b : t_u16) : t_u16 := Build_t_u16 (Add_f_add (u16_0 a) (u16_0 b)). Definition wrapping_add_u32 (a : t_u32) (b : t_u32) : t_u32 := Build_t_u32 (Add_f_add (u32_0 a) (u32_0 b)). Definition wrapping_add_u64 (a : t_u64) (b : t_u64) : t_u64 := Build_t_u64 (Add_f_add (u64_0 a) (u64_0 b)). Definition wrapping_add_u8 (a : t_u8) (b : t_u8) : t_u8 := Build_t_u8 (Add_f_add (u8_0 a) (u8_0 b)). Definition wrapping_add_usize (a : t_usize) (b : t_usize) : t_usize := Build_t_usize (Add_f_add (usize_0 a) (usize_0 b)). Definition wrapping_mul_i128 (a : t_i128) (b : t_i128) : t_i128 := Build_t_i128 (Mul_f_mul (i128_0 a) (i128_0 b)). Definition wrapping_mul_i16 (a : t_i16) (b : t_i16) : t_i16 := Build_t_i16 (Mul_f_mul (i16_0 a) (i16_0 b)). Definition wrapping_mul_i32 (a : t_i32) (b : t_i32) : t_i32 := Build_t_i32 (Mul_f_mul (i32_0 a) (i32_0 b)). Definition wrapping_mul_i64 (a : t_i64) (b : t_i64) : t_i64 := Build_t_i64 (Mul_f_mul (i64_0 a) (i64_0 b)). Definition wrapping_mul_i8 (a : t_i8) (b : t_i8) : t_i8 := Build_t_i8 (Mul_f_mul (i8_0 a) (i8_0 b)). Definition wrapping_mul_isize (a : t_isize) (b : t_isize) : t_isize := Build_t_isize (Mul_f_mul (isize_0 a) (isize_0 b)). Definition wrapping_mul_u128 (a : t_u128) (b : t_u128) : t_u128 := Build_t_u128 (Mul_f_mul (u128_0 a) (u128_0 b)). Definition wrapping_mul_u16 (a : t_u16) (b : t_u16) : t_u16 := Build_t_u16 (Mul_f_mul (u16_0 a) (u16_0 b)). Definition wrapping_mul_u32 (a : t_u32) (b : t_u32) : t_u32 := Build_t_u32 (Mul_f_mul (u32_0 a) (u32_0 b)). Definition wrapping_mul_u64 (a : t_u64) (b : t_u64) : t_u64 := Build_t_u64 (Mul_f_mul (u64_0 a) (u64_0 b)). Definition wrapping_mul_u8 (a : t_u8) (b : t_u8) : t_u8 := Build_t_u8 (Mul_f_mul (u8_0 a) (u8_0 b)). Definition wrapping_mul_usize (a : t_usize) (b : t_usize) : t_usize := Build_t_usize (Mul_f_mul (usize_0 a) (usize_0 b)). Definition wrapping_add480603777 (self : t_u8) (rhs : t_u8) : t_u8 := wrapping_add_u8 (self) (rhs). Definition wrapping_mul885216284 (self : t_u8) (rhs : t_u8) : t_u8 := wrapping_mul_u8 (self) (rhs). Definition wrapping_add124432709 (self : t_u16) (rhs : t_u16) : t_u16 := wrapping_add_u16 (self) (rhs). Definition wrapping_mul14465189 (self : t_u16) (rhs : t_u16) : t_u16 := wrapping_mul_u16 (self) (rhs). Definition wrapping_add1049665857 (self : t_u32) (rhs : t_u32) : t_u32 := wrapping_add_u32 (self) (rhs). Definition wrapping_mul203346768 (self : t_u32) (rhs : t_u32) : t_u32 := wrapping_mul_u32 (self) (rhs). Definition wrapping_add865565639 (self : t_u64) (rhs : t_u64) : t_u64 := wrapping_add_u64 (self) (rhs). Definition wrapping_mul742978873 (self : t_u64) (rhs : t_u64) : t_u64 := wrapping_mul_u64 (self) (rhs). Definition wrapping_add40844100 (self : t_u128) (rhs : t_u128) : t_u128 := wrapping_add_u128 (self) (rhs). Definition wrapping_mul294115024 (self : t_u128) (rhs : t_u128) : t_u128 := wrapping_mul_u128 (self) (rhs). Definition wrapping_add427637036 (self : t_usize) (rhs : t_usize) : t_usize := wrapping_add_usize (self) (rhs). Definition wrapping_mul680896953 (self : t_usize) (rhs : t_usize) : t_usize := wrapping_mul_usize (self) (rhs). #[global] Instance t_Add_695878175 : t_Add ((t_i8)) ((t_i8)) := { Add_f_Output := t_i8; Add_f_add := fun (self : t_i8) (other : t_i8)=> Build_t_i8 (Add_f_add (i8_0 self) (i8_0 other)); }. #[global] Instance t_Add_877139857 : t_Add ((t_i16)) ((t_i16)) := { Add_f_Output := t_i16; Add_f_add := fun (self : t_i16) (other : t_i16)=> Build_t_i16 (Add_f_add (i16_0 self) (i16_0 other)); }. #[global] Instance t_Add_426581780 : t_Add ((t_i32)) ((t_i32)) := { Add_f_Output := t_i32; Add_f_add := fun (self : t_i32) (other : t_i32)=> Build_t_i32 (Add_f_add (i32_0 self) (i32_0 other)); }. #[global] Instance t_Add_113633409 : t_Add ((t_i64)) ((t_i64)) := { Add_f_Output := t_i64; Add_f_add := fun (self : t_i64) (other : t_i64)=> Build_t_i64 (Add_f_add (i64_0 self) (i64_0 other)); }. #[global] Instance t_Add_788236527 : t_Add ((t_i128)) ((t_i128)) := { Add_f_Output := t_i128; Add_f_add := fun (self : t_i128) (other : t_i128)=> Build_t_i128 (Add_f_add (i128_0 self) (i128_0 other)); }. #[global] Instance t_Add_247333017 : t_Add ((t_isize)) ((t_isize)) := { Add_f_Output := t_isize; Add_f_add := fun (self : t_isize) (other : t_isize)=> Build_t_isize (Add_f_add (isize_0 self) (isize_0 other)); }. #[global] Instance t_Sub_756206062 : t_Sub ((t_i8)) ((t_i8)) := { Sub_f_Output := t_i8; Sub_f_sub := fun (self : t_i8) (other : t_i8)=> Build_t_i8 (Sub_f_sub (i8_0 self) (i8_0 other)); }. #[global] Instance t_Sub_618838212 : t_Sub ((t_i16)) ((t_i16)) := { Sub_f_Output := t_i16; Sub_f_sub := fun (self : t_i16) (other : t_i16)=> Build_t_i16 (Sub_f_sub (i16_0 self) (i16_0 other)); }. #[global] Instance t_Sub_44574118 : t_Sub ((t_i32)) ((t_i32)) := { Sub_f_Output := t_i32; Sub_f_sub := fun (self : t_i32) (other : t_i32)=> Build_t_i32 (Sub_f_sub (i32_0 self) (i32_0 other)); }. #[global] Instance t_Sub_287793174 : t_Sub ((t_i64)) ((t_i64)) := { Sub_f_Output := t_i64; Sub_f_sub := fun (self : t_i64) (other : t_i64)=> Build_t_i64 (Sub_f_sub (i64_0 self) (i64_0 other)); }. #[global] Instance t_Sub_837338145 : t_Sub ((t_i128)) ((t_i128)) := { Sub_f_Output := t_i128; Sub_f_sub := fun (self : t_i128) (other : t_i128)=> Build_t_i128 (Sub_f_sub (i128_0 self) (i128_0 other)); }. #[global] Instance t_Sub_22961567 : t_Sub ((t_isize)) ((t_isize)) := { Sub_f_Output := t_isize; Sub_f_sub := fun (self : t_isize) (other : t_isize)=> Build_t_isize (Sub_f_sub (isize_0 self) (isize_0 other)); }. Definition wrapping_sub_u128 (a : t_u128) (b : t_u128) : t_u128 := Build_t_u128 (Sub_f_sub (u128_0 a) (u128_0 b)). Definition wrapping_sub_u16 (a : t_u16) (b : t_u16) : t_u16 := Build_t_u16 (Sub_f_sub (u16_0 a) (u16_0 b)). Definition wrapping_sub_u32 (a : t_u32) (b : t_u32) : t_u32 := Build_t_u32 (Sub_f_sub (u32_0 a) (u32_0 b)). Definition wrapping_sub_u64 (a : t_u64) (b : t_u64) : t_u64 := Build_t_u64 (Sub_f_sub (u64_0 a) (u64_0 b)). Definition wrapping_sub_u8 (a : t_u8) (b : t_u8) : t_u8 := Build_t_u8 (Sub_f_sub (u8_0 a) (u8_0 b)). Definition wrapping_sub_usize (a : t_usize) (b : t_usize) : t_usize := Build_t_usize (Sub_f_sub (usize_0 a) (usize_0 b)). Definition wrapping_sub403906422 (self : t_u8) (rhs : t_u8) : t_u8 := wrapping_sub_u8 (self) (rhs). Definition wrapping_neg123212788 (self : t_u8) : t_u8 := wrapping_sub403906422 (Build_t_u8 (Constants_f_ZERO)) (self). Definition wrapping_sub811251034 (self : t_u16) (rhs : t_u16) : t_u16 := wrapping_sub_u16 (self) (rhs). Definition wrapping_neg128555595 (self : t_u16) : t_u16 := wrapping_sub811251034 (Build_t_u16 (Constants_f_ZERO)) (self). Definition wrapping_sub708953500 (self : t_u32) (rhs : t_u32) : t_u32 := wrapping_sub_u32 (self) (rhs). Definition wrapping_neg328220773 (self : t_u32) : t_u32 := wrapping_sub708953500 (Build_t_u32 (Constants_f_ZERO)) (self). Definition wrapping_sub762520851 (self : t_u64) (rhs : t_u64) : t_u64 := wrapping_sub_u64 (self) (rhs). Definition wrapping_neg617136337 (self : t_u64) : t_u64 := wrapping_sub762520851 (Build_t_u64 (Constants_f_ZERO)) (self). Definition wrapping_sub409310259 (self : t_u128) (rhs : t_u128) : t_u128 := wrapping_sub_u128 (self) (rhs). Definition wrapping_neg729451428 (self : t_u128) : t_u128 := wrapping_sub409310259 (Build_t_u128 (Constants_f_ZERO)) (self). Definition wrapping_sub813101882 (self : t_usize) (rhs : t_usize) : t_usize := wrapping_sub_usize (self) (rhs). Definition wrapping_neg342773446 (self : t_usize) : t_usize := wrapping_sub813101882 (Build_t_usize (Constants_f_ZERO)) (self). #[global] Instance t_Add_63222257 : t_Add ((t_u8)) ((t_u8)) := { Add_f_Output := t_u8; Add_f_add := fun (self : t_u8) (other : t_u8)=> Build_t_u8 (Add_f_add (u8_0 self) (u8_0 other)); }. #[global] Instance t_Add_568595401 : t_Add ((t_u16)) ((t_u16)) := { Add_f_Output := t_u16; Add_f_add := fun (self : t_u16) (other : t_u16)=> Build_t_u16 (Add_f_add (u16_0 self) (u16_0 other)); }. #[global] Instance t_Add_99427071 : t_Add ((t_u32)) ((t_u32)) := { Add_f_Output := t_u32; Add_f_add := fun (self : t_u32) (other : t_u32)=> Build_t_u32 (Add_f_add (u32_0 self) (u32_0 other)); }. #[global] Instance t_Add_963057404 : t_Add ((t_u64)) ((t_u64)) := { Add_f_Output := t_u64; Add_f_add := fun (self : t_u64) (other : t_u64)=> Build_t_u64 (Add_f_add (u64_0 self) (u64_0 other)); }. #[global] Instance t_Add_258013445 : t_Add ((t_u128)) ((t_u128)) := { Add_f_Output := t_u128; Add_f_add := fun (self : t_u128) (other : t_u128)=> Build_t_u128 (Add_f_add (u128_0 self) (u128_0 other)); }. #[global] Instance t_Add_192585125 : t_Add ((t_usize)) ((t_usize)) := { Add_f_Output := t_usize; Add_f_add := fun (self : t_usize) (other : t_usize)=> Build_t_usize (Add_f_add (usize_0 self) (usize_0 other)); }. #[global] Instance t_Mul_307943337 : t_Mul ((t_u8)) ((t_u8)) := { Mul_f_Output := t_u8; Mul_f_mul := fun (self : t_u8) (other : t_u8)=> Build_t_u8 (Mul_f_mul (u8_0 self) (u8_0 other)); }. #[global] Instance t_Mul_579880302 : t_Mul ((t_u16)) ((t_u16)) := { Mul_f_Output := t_u16; Mul_f_mul := fun (self : t_u16) (other : t_u16)=> Build_t_u16 (Mul_f_mul (u16_0 self) (u16_0 other)); }. #[global] Instance t_Mul_969448321 : t_Mul ((t_u32)) ((t_u32)) := { Mul_f_Output := t_u32; Mul_f_mul := fun (self : t_u32) (other : t_u32)=> Build_t_u32 (Mul_f_mul (u32_0 self) (u32_0 other)); }. #[global] Instance t_Mul_572333733 : t_Mul ((t_u64)) ((t_u64)) := { Mul_f_Output := t_u64; Mul_f_mul := fun (self : t_u64) (other : t_u64)=> Build_t_u64 (Mul_f_mul (u64_0 self) (u64_0 other)); }. #[global] Instance t_Mul_904691459 : t_Mul ((t_u128)) ((t_u128)) := { Mul_f_Output := t_u128; Mul_f_mul := fun (self : t_u128) (other : t_u128)=> Build_t_u128 (Mul_f_mul (u128_0 self) (u128_0 other)); }. #[global] Instance t_Mul_490480124 : t_Mul ((t_usize)) ((t_usize)) := { Mul_f_Output := t_usize; Mul_f_mul := fun (self : t_usize) (other : t_usize)=> Build_t_usize (Mul_f_mul (usize_0 self) (usize_0 other)); }. #[global] Instance t_Mul_542253756 : t_Mul ((t_i8)) ((t_i8)) := { Mul_f_Output := t_i8; Mul_f_mul := fun (self : t_i8) (other : t_i8)=> Build_t_i8 (Mul_f_mul (i8_0 self) (i8_0 other)); }. #[global] Instance t_Mul_586956420 : t_Mul ((t_i16)) ((t_i16)) := { Mul_f_Output := t_i16; Mul_f_mul := fun (self : t_i16) (other : t_i16)=> Build_t_i16 (Mul_f_mul (i16_0 self) (i16_0 other)); }. #[global] Instance t_Mul_622712365 : t_Mul ((t_i32)) ((t_i32)) := { Mul_f_Output := t_i32; Mul_f_mul := fun (self : t_i32) (other : t_i32)=> Build_t_i32 (Mul_f_mul (i32_0 self) (i32_0 other)); }. #[global] Instance t_Mul_167399285 : t_Mul ((t_i64)) ((t_i64)) := { Mul_f_Output := t_i64; Mul_f_mul := fun (self : t_i64) (other : t_i64)=> Build_t_i64 (Mul_f_mul (i64_0 self) (i64_0 other)); }. #[global] Instance t_Mul_264435207 : t_Mul ((t_i128)) ((t_i128)) := { Mul_f_Output := t_i128; Mul_f_mul := fun (self : t_i128) (other : t_i128)=> Build_t_i128 (Mul_f_mul (i128_0 self) (i128_0 other)); }. #[global] Instance t_Mul_9915144 : t_Mul ((t_isize)) ((t_isize)) := { Mul_f_Output := t_isize; Mul_f_mul := fun (self : t_isize) (other : t_isize)=> Build_t_isize (Mul_f_mul (isize_0 self) (isize_0 other)); }. #[global] Instance t_Div_23426959 : t_Div ((t_u8)) ((t_u8)) := { Div_f_Output := t_u8; Div_f_div := fun (self : t_u8) (other : t_u8)=> Build_t_u8 (Div_f_div (u8_0 self) (u8_0 other)); }. Definition wrapping_div660080892 (self : t_u8) (rhs : t_u8) : t_u8 := Div_f_div (self) (rhs). Definition wrapping_div_euclid481233436 (self : t_u8) (rhs : t_u8) : t_u8 := Div_f_div (self) (rhs). #[global] Instance t_Div_469212879 : t_Div ((t_u16)) ((t_u16)) := { Div_f_Output := t_u16; Div_f_div := fun (self : t_u16) (other : t_u16)=> Build_t_u16 (Div_f_div (u16_0 self) (u16_0 other)); }. Definition wrapping_div366977334 (self : t_u16) (rhs : t_u16) : t_u16 := Div_f_div (self) (rhs). Definition wrapping_div_euclid22267888 (self : t_u16) (rhs : t_u16) : t_u16 := Div_f_div (self) (rhs). #[global] Instance t_Div_248596974 : t_Div ((t_u32)) ((t_u32)) := { Div_f_Output := t_u32; Div_f_div := fun (self : t_u32) (other : t_u32)=> Build_t_u32 (Div_f_div (u32_0 self) (u32_0 other)); }. Definition wrapping_div931150450 (self : t_u32) (rhs : t_u32) : t_u32 := Div_f_div (self) (rhs). Definition wrapping_div_euclid606291997 (self : t_u32) (rhs : t_u32) : t_u32 := Div_f_div (self) (rhs). #[global] Instance t_Div_901268642 : t_Div ((t_u64)) ((t_u64)) := { Div_f_Output := t_u64; Div_f_div := fun (self : t_u64) (other : t_u64)=> Build_t_u64 (Div_f_div (u64_0 self) (u64_0 other)); }. Definition wrapping_div168427046 (self : t_u64) (rhs : t_u64) : t_u64 := Div_f_div (self) (rhs). Definition wrapping_div_euclid321252086 (self : t_u64) (rhs : t_u64) : t_u64 := Div_f_div (self) (rhs). #[global] Instance t_Div_868602092 : t_Div ((t_u128)) ((t_u128)) := { Div_f_Output := t_u128; Div_f_div := fun (self : t_u128) (other : t_u128)=> Build_t_u128 (Div_f_div (u128_0 self) (u128_0 other)); }. Definition wrapping_div692427683 (self : t_u128) (rhs : t_u128) : t_u128 := Div_f_div (self) (rhs). Definition wrapping_div_euclid926334515 (self : t_u128) (rhs : t_u128) : t_u128 := Div_f_div (self) (rhs). #[global] Instance t_Div_740920454 : t_Div ((t_usize)) ((t_usize)) := { Div_f_Output := t_usize; Div_f_div := fun (self : t_usize) (other : t_usize)=> Build_t_usize (Div_f_div (usize_0 self) (usize_0 other)); }. Definition wrapping_div905768546 (self : t_usize) (rhs : t_usize) : t_usize := Div_f_div (self) (rhs). Definition wrapping_div_euclid90317722 (self : t_usize) (rhs : t_usize) : t_usize := Div_f_div (self) (rhs). #[global] Instance t_Rem_485335443 : t_Rem ((t_u8)) ((t_u8)) := { Rem_f_Output := t_u8; Rem_f_rem := fun (self : t_u8) (other : t_u8)=> Build_t_u8 (Rem_f_rem (u8_0 self) (u8_0 other)); }. Definition wrapping_rem984569721 (self : t_u8) (rhs : t_u8) : t_u8 := Rem_f_rem (self) (rhs). Definition wrapping_rem_euclid946579345 (self : t_u8) (rhs : t_u8) : t_u8 := Rem_f_rem (self) (rhs). #[global] Instance t_Rem_780488465 : t_Rem ((t_u16)) ((t_u16)) := { Rem_f_Output := t_u16; Rem_f_rem := fun (self : t_u16) (other : t_u16)=> Build_t_u16 (Rem_f_rem (u16_0 self) (u16_0 other)); }. Definition wrapping_rem378598035 (self : t_u16) (rhs : t_u16) : t_u16 := Rem_f_rem (self) (rhs). Definition wrapping_rem_euclid602402638 (self : t_u16) (rhs : t_u16) : t_u16 := Rem_f_rem (self) (rhs). #[global] Instance t_Rem_734014529 : t_Rem ((t_u32)) ((t_u32)) := { Rem_f_Output := t_u32; Rem_f_rem := fun (self : t_u32) (other : t_u32)=> Build_t_u32 (Rem_f_rem (u32_0 self) (u32_0 other)); }. Definition wrapping_rem292009099 (self : t_u32) (rhs : t_u32) : t_u32 := Rem_f_rem (self) (rhs). Definition wrapping_rem_euclid1020271291 (self : t_u32) (rhs : t_u32) : t_u32 := Rem_f_rem (self) (rhs). #[global] Instance t_Rem_455480749 : t_Rem ((t_u64)) ((t_u64)) := { Rem_f_Output := t_u64; Rem_f_rem := fun (self : t_u64) (other : t_u64)=> Build_t_u64 (Rem_f_rem (u64_0 self) (u64_0 other)); }. Definition wrapping_rem390602260 (self : t_u64) (rhs : t_u64) : t_u64 := Rem_f_rem (self) (rhs). Definition wrapping_rem_euclid839264546 (self : t_u64) (rhs : t_u64) : t_u64 := Rem_f_rem (self) (rhs). #[global] Instance t_Rem_412060686 : t_Rem ((t_u128)) ((t_u128)) := { Rem_f_Output := t_u128; Rem_f_rem := fun (self : t_u128) (other : t_u128)=> Build_t_u128 (Rem_f_rem (u128_0 self) (u128_0 other)); }. Definition wrapping_rem332379920 (self : t_u128) (rhs : t_u128) : t_u128 := Rem_f_rem (self) (rhs). Definition wrapping_rem_euclid646122423 (self : t_u128) (rhs : t_u128) : t_u128 := Rem_f_rem (self) (rhs). #[global] Instance t_Rem_796467486 : t_Rem ((t_usize)) ((t_usize)) := { Rem_f_Output := t_usize; Rem_f_rem := fun (self : t_usize) (other : t_usize)=> Build_t_usize (Rem_f_rem (usize_0 self) (usize_0 other)); }. Definition wrapping_rem333089373 (self : t_usize) (rhs : t_usize) : t_usize := Rem_f_rem (self) (rhs). Definition wrapping_rem_euclid769656504 (self : t_usize) (rhs : t_usize) : t_usize := Rem_f_rem (self) (rhs). #[global] Instance t_Shr_1061808511 : t_Shr ((t_u8)) ((t_u8)) := { Shr_f_Output := t_u8; Shr_f_shr := fun (self : t_u8) (other : t_u8)=> Build_t_u8 (Shr_f_shr (u8_0 self) (u8_0 other)); }. #[global] Instance t_Shr_590944100 : t_Shr ((t_u8)) ((t_u16)) := { Shr_f_Output := t_u8; Shr_f_shr := fun (self : t_u8) (other : t_u16)=> Build_t_u8 (Shr_f_shr (u8_0 self) (u16_0 other)); }. #[global] Instance t_Shr_267395304 : t_Shr ((t_u8)) ((t_u32)) := { Shr_f_Output := t_u8; Shr_f_shr := fun (self : t_u8) (other : t_u32)=> Build_t_u8 (Shr_f_shr (u8_0 self) (u32_0 other)); }. #[global] Instance t_Shr_922719969 : t_Shr ((t_u8)) ((t_u64)) := { Shr_f_Output := t_u8; Shr_f_shr := fun (self : t_u8) (other : t_u64)=> Build_t_u8 (Shr_f_shr (u8_0 self) (u64_0 other)); }. #[global] Instance t_Shr_138723873 : t_Shr ((t_u8)) ((t_u128)) := { Shr_f_Output := t_u8; Shr_f_shr := fun (self : t_u8) (other : t_u128)=> Build_t_u8 (Shr_f_shr (u8_0 self) (u128_0 other)); }. #[global] Instance t_Shr_558887005 : t_Shr ((t_u8)) ((t_usize)) := { Shr_f_Output := t_u8; Shr_f_shr := fun (self : t_u8) (other : t_usize)=> Build_t_u8 (Shr_f_shr (u8_0 self) (usize_0 other)); }. #[global] Instance t_Shr_170693446 : t_Shr ((t_u16)) ((t_u8)) := { Shr_f_Output := t_u16; Shr_f_shr := fun (self : t_u16) (other : t_u8)=> Build_t_u16 (Shr_f_shr (u16_0 self) (u8_0 other)); }. #[global] Instance t_Shr_899863737 : t_Shr ((t_u16)) ((t_u16)) := { Shr_f_Output := t_u16; Shr_f_shr := fun (self : t_u16) (other : t_u16)=> Build_t_u16 (Shr_f_shr (u16_0 self) (u16_0 other)); }. #[global] Instance t_Shr_290867596 : t_Shr ((t_u16)) ((t_u32)) := { Shr_f_Output := t_u16; Shr_f_shr := fun (self : t_u16) (other : t_u32)=> Build_t_u16 (Shr_f_shr (u16_0 self) (u32_0 other)); }. #[global] Instance t_Shr_630800316 : t_Shr ((t_u16)) ((t_u64)) := { Shr_f_Output := t_u16; Shr_f_shr := fun (self : t_u16) (other : t_u64)=> Build_t_u16 (Shr_f_shr (u16_0 self) (u64_0 other)); }. #[global] Instance t_Shr_51138976 : t_Shr ((t_u16)) ((t_u128)) := { Shr_f_Output := t_u16; Shr_f_shr := fun (self : t_u16) (other : t_u128)=> Build_t_u16 (Shr_f_shr (u16_0 self) (u128_0 other)); }. #[global] Instance t_Shr_82567397 : t_Shr ((t_u16)) ((t_usize)) := { Shr_f_Output := t_u16; Shr_f_shr := fun (self : t_u16) (other : t_usize)=> Build_t_u16 (Shr_f_shr (u16_0 self) (usize_0 other)); }. #[global] Instance t_Shr_430948219 : t_Shr ((t_u32)) ((t_u8)) := { Shr_f_Output := t_u32; Shr_f_shr := fun (self : t_u32) (other : t_u8)=> Build_t_u32 (Shr_f_shr (u32_0 self) (u8_0 other)); }. #[global] Instance t_Shr_157675832 : t_Shr ((t_u32)) ((t_u16)) := { Shr_f_Output := t_u32; Shr_f_shr := fun (self : t_u32) (other : t_u16)=> Build_t_u32 (Shr_f_shr (u32_0 self) (u16_0 other)); }. #[global] Instance t_Shr_708845947 : t_Shr ((t_u32)) ((t_u32)) := { Shr_f_Output := t_u32; Shr_f_shr := fun (self : t_u32) (other : t_u32)=> Build_t_u32 (Shr_f_shr (u32_0 self) (u32_0 other)); }. #[global] Instance t_Shr_1060262347 : t_Shr ((t_u32)) ((t_u64)) := { Shr_f_Output := t_u32; Shr_f_shr := fun (self : t_u32) (other : t_u64)=> Build_t_u32 (Shr_f_shr (u32_0 self) (u64_0 other)); }. #[global] Instance t_Shr_372764217 : t_Shr ((t_u32)) ((t_u128)) := { Shr_f_Output := t_u32; Shr_f_shr := fun (self : t_u32) (other : t_u128)=> Build_t_u32 (Shr_f_shr (u32_0 self) (u128_0 other)); }. #[global] Instance t_Shr_534962338 : t_Shr ((t_u32)) ((t_usize)) := { Shr_f_Output := t_u32; Shr_f_shr := fun (self : t_u32) (other : t_usize)=> Build_t_u32 (Shr_f_shr (u32_0 self) (usize_0 other)); }. #[global] Instance t_Shr_45695168 : t_Shr ((t_u64)) ((t_u8)) := { Shr_f_Output := t_u64; Shr_f_shr := fun (self : t_u64) (other : t_u8)=> Build_t_u64 (Shr_f_shr (u64_0 self) (u8_0 other)); }. #[global] Instance t_Shr_1027310629 : t_Shr ((t_u64)) ((t_u16)) := { Shr_f_Output := t_u64; Shr_f_shr := fun (self : t_u64) (other : t_u16)=> Build_t_u64 (Shr_f_shr (u64_0 self) (u16_0 other)); }. #[global] Instance t_Shr_357793917 : t_Shr ((t_u64)) ((t_u32)) := { Shr_f_Output := t_u64; Shr_f_shr := fun (self : t_u64) (other : t_u32)=> Build_t_u64 (Shr_f_shr (u64_0 self) (u32_0 other)); }. #[global] Instance t_Shr_1038705817 : t_Shr ((t_u64)) ((t_u64)) := { Shr_f_Output := t_u64; Shr_f_shr := fun (self : t_u64) (other : t_u64)=> Build_t_u64 (Shr_f_shr (u64_0 self) (u64_0 other)); }. #[global] Instance t_Shr_567649567 : t_Shr ((t_u64)) ((t_u128)) := { Shr_f_Output := t_u64; Shr_f_shr := fun (self : t_u64) (other : t_u128)=> Build_t_u64 (Shr_f_shr (u64_0 self) (u128_0 other)); }. #[global] Instance t_Shr_380280894 : t_Shr ((t_u64)) ((t_usize)) := { Shr_f_Output := t_u64; Shr_f_shr := fun (self : t_u64) (other : t_usize)=> Build_t_u64 (Shr_f_shr (u64_0 self) (usize_0 other)); }. #[global] Instance t_Shr_555027554 : t_Shr ((t_u128)) ((t_u8)) := { Shr_f_Output := t_u128; Shr_f_shr := fun (self : t_u128) (other : t_u8)=> Build_t_u128 (Shr_f_shr (u128_0 self) (u8_0 other)); }. #[global] Instance t_Shr_225523666 : t_Shr ((t_u128)) ((t_u16)) := { Shr_f_Output := t_u128; Shr_f_shr := fun (self : t_u128) (other : t_u16)=> Build_t_u128 (Shr_f_shr (u128_0 self) (u16_0 other)); }. #[global] Instance t_Shr_910916464 : t_Shr ((t_u128)) ((t_u32)) := { Shr_f_Output := t_u128; Shr_f_shr := fun (self : t_u128) (other : t_u32)=> Build_t_u128 (Shr_f_shr (u128_0 self) (u32_0 other)); }. #[global] Instance t_Shr_137291592 : t_Shr ((t_u128)) ((t_u64)) := { Shr_f_Output := t_u128; Shr_f_shr := fun (self : t_u128) (other : t_u64)=> Build_t_u128 (Shr_f_shr (u128_0 self) (u64_0 other)); }. #[global] Instance t_Shr_1070013296 : t_Shr ((t_u128)) ((t_u128)) := { Shr_f_Output := t_u128; Shr_f_shr := fun (self : t_u128) (other : t_u128)=> Build_t_u128 (Shr_f_shr (u128_0 self) (u128_0 other)); }. #[global] Instance t_Shr_1009428374 : t_Shr ((t_u128)) ((t_usize)) := { Shr_f_Output := t_u128; Shr_f_shr := fun (self : t_u128) (other : t_usize)=> Build_t_u128 (Shr_f_shr (u128_0 self) (usize_0 other)); }. #[global] Instance t_Shr_94723353 : t_Shr ((t_usize)) ((t_u8)) := { Shr_f_Output := t_usize; Shr_f_shr := fun (self : t_usize) (other : t_u8)=> Build_t_usize (Shr_f_shr (usize_0 self) (u8_0 other)); }. #[global] Instance t_Shr_18219058 : t_Shr ((t_usize)) ((t_u16)) := { Shr_f_Output := t_usize; Shr_f_shr := fun (self : t_usize) (other : t_u16)=> Build_t_usize (Shr_f_shr (usize_0 self) (u16_0 other)); }. #[global] Instance t_Shr_14441839 : t_Shr ((t_usize)) ((t_u32)) := { Shr_f_Output := t_usize; Shr_f_shr := fun (self : t_usize) (other : t_u32)=> Build_t_usize (Shr_f_shr (usize_0 self) (u32_0 other)); }. #[global] Instance t_Shr_642676920 : t_Shr ((t_usize)) ((t_u64)) := { Shr_f_Output := t_usize; Shr_f_shr := fun (self : t_usize) (other : t_u64)=> Build_t_usize (Shr_f_shr (usize_0 self) (u64_0 other)); }. #[global] Instance t_Shr_65876869 : t_Shr ((t_usize)) ((t_u128)) := { Shr_f_Output := t_usize; Shr_f_shr := fun (self : t_usize) (other : t_u128)=> Build_t_usize (Shr_f_shr (usize_0 self) (u128_0 other)); }. #[global] Instance t_Shr_833436714 : t_Shr ((t_usize)) ((t_usize)) := { Shr_f_Output := t_usize; Shr_f_shr := fun (self : t_usize) (other : t_usize)=> Build_t_usize (Shr_f_shr (usize_0 self) (usize_0 other)); }. #[global] Instance t_Shl_161455974 : t_Shl ((t_u8)) ((t_u8)) := { Shl_f_Output := t_u8; Shl_f_shl := fun (self : t_u8) (other : t_u8)=> Build_t_u8 (Shl_f_shl (u8_0 self) (u8_0 other)); }. #[global] Instance t_Shl_861055562 : t_Shl ((t_u8)) ((t_u16)) := { Shl_f_Output := t_u8; Shl_f_shl := fun (self : t_u8) (other : t_u16)=> Build_t_u8 (Shl_f_shl (u8_0 self) (u16_0 other)); }. #[global] Instance t_Shl_479938796 : t_Shl ((t_u8)) ((t_u32)) := { Shl_f_Output := t_u8; Shl_f_shl := fun (self : t_u8) (other : t_u32)=> Build_t_u8 (Shl_f_shl (u8_0 self) (u32_0 other)); }. #[global] Instance t_Shl_373462431 : t_Shl ((t_u8)) ((t_u64)) := { Shl_f_Output := t_u8; Shl_f_shl := fun (self : t_u8) (other : t_u64)=> Build_t_u8 (Shl_f_shl (u8_0 self) (u64_0 other)); }. #[global] Instance t_Shl_356733585 : t_Shl ((t_u8)) ((t_u128)) := { Shl_f_Output := t_u8; Shl_f_shl := fun (self : t_u8) (other : t_u128)=> Build_t_u8 (Shl_f_shl (u8_0 self) (u128_0 other)); }. #[global] Instance t_Shl_138823384 : t_Shl ((t_u8)) ((t_usize)) := { Shl_f_Output := t_u8; Shl_f_shl := fun (self : t_u8) (other : t_usize)=> Build_t_u8 (Shl_f_shl (u8_0 self) (usize_0 other)); }. #[global] Instance t_Shl_492599436 : t_Shl ((t_u16)) ((t_u8)) := { Shl_f_Output := t_u16; Shl_f_shl := fun (self : t_u16) (other : t_u8)=> Build_t_u16 (Shl_f_shl (u16_0 self) (u8_0 other)); }. #[global] Instance t_Shl_254997522 : t_Shl ((t_u16)) ((t_u16)) := { Shl_f_Output := t_u16; Shl_f_shl := fun (self : t_u16) (other : t_u16)=> Build_t_u16 (Shl_f_shl (u16_0 self) (u16_0 other)); }. #[global] Instance t_Shl_840888059 : t_Shl ((t_u16)) ((t_u32)) := { Shl_f_Output := t_u16; Shl_f_shl := fun (self : t_u16) (other : t_u32)=> Build_t_u16 (Shl_f_shl (u16_0 self) (u32_0 other)); }. #[global] Instance t_Shl_1017206779 : t_Shl ((t_u16)) ((t_u64)) := { Shl_f_Output := t_u16; Shl_f_shl := fun (self : t_u16) (other : t_u64)=> Build_t_u16 (Shl_f_shl (u16_0 self) (u64_0 other)); }. #[global] Instance t_Shl_751151164 : t_Shl ((t_u16)) ((t_u128)) := { Shl_f_Output := t_u16; Shl_f_shl := fun (self : t_u16) (other : t_u128)=> Build_t_u16 (Shl_f_shl (u16_0 self) (u128_0 other)); }. #[global] Instance t_Shl_303578486 : t_Shl ((t_u16)) ((t_usize)) := { Shl_f_Output := t_u16; Shl_f_shl := fun (self : t_u16) (other : t_usize)=> Build_t_u16 (Shl_f_shl (u16_0 self) (usize_0 other)); }. #[global] Instance t_Shl_186069032 : t_Shl ((t_u32)) ((t_u8)) := { Shl_f_Output := t_u32; Shl_f_shl := fun (self : t_u32) (other : t_u8)=> Build_t_u32 (Shl_f_shl (u32_0 self) (u8_0 other)); }. #[global] Instance t_Shl_320616735 : t_Shl ((t_u32)) ((t_u16)) := { Shl_f_Output := t_u32; Shl_f_shl := fun (self : t_u32) (other : t_u16)=> Build_t_u32 (Shl_f_shl (u32_0 self) (u16_0 other)); }. #[global] Instance t_Shl_325940784 : t_Shl ((t_u32)) ((t_u32)) := { Shl_f_Output := t_u32; Shl_f_shl := fun (self : t_u32) (other : t_u32)=> Build_t_u32 (Shl_f_shl (u32_0 self) (u32_0 other)); }. #[global] Instance t_Shl_398883535 : t_Shl ((t_u32)) ((t_u64)) := { Shl_f_Output := t_u32; Shl_f_shl := fun (self : t_u32) (other : t_u64)=> Build_t_u32 (Shl_f_shl (u32_0 self) (u64_0 other)); }. #[global] Instance t_Shl_700909976 : t_Shl ((t_u32)) ((t_u128)) := { Shl_f_Output := t_u32; Shl_f_shl := fun (self : t_u32) (other : t_u128)=> Build_t_u32 (Shl_f_shl (u32_0 self) (u128_0 other)); }. #[global] Instance t_Shl_475027367 : t_Shl ((t_u32)) ((t_usize)) := { Shl_f_Output := t_u32; Shl_f_shl := fun (self : t_u32) (other : t_usize)=> Build_t_u32 (Shl_f_shl (u32_0 self) (usize_0 other)); }. #[global] Instance t_Shl_620046856 : t_Shl ((t_u64)) ((t_u8)) := { Shl_f_Output := t_u64; Shl_f_shl := fun (self : t_u64) (other : t_u8)=> Build_t_u64 (Shl_f_shl (u64_0 self) (u8_0 other)); }. #[global] Instance t_Shl_158077515 : t_Shl ((t_u64)) ((t_u16)) := { Shl_f_Output := t_u64; Shl_f_shl := fun (self : t_u64) (other : t_u16)=> Build_t_u64 (Shl_f_shl (u64_0 self) (u16_0 other)); }. #[global] Instance t_Shl_1071441050 : t_Shl ((t_u64)) ((t_u32)) := { Shl_f_Output := t_u64; Shl_f_shl := fun (self : t_u64) (other : t_u32)=> Build_t_u64 (Shl_f_shl (u64_0 self) (u32_0 other)); }. #[global] Instance t_Shl_581241894 : t_Shl ((t_u64)) ((t_u64)) := { Shl_f_Output := t_u64; Shl_f_shl := fun (self : t_u64) (other : t_u64)=> Build_t_u64 (Shl_f_shl (u64_0 self) (u64_0 other)); }. #[global] Instance t_Shl_916302310 : t_Shl ((t_u64)) ((t_u128)) := { Shl_f_Output := t_u64; Shl_f_shl := fun (self : t_u64) (other : t_u128)=> Build_t_u64 (Shl_f_shl (u64_0 self) (u128_0 other)); }. #[global] Instance t_Shl_59609547 : t_Shl ((t_u64)) ((t_usize)) := { Shl_f_Output := t_u64; Shl_f_shl := fun (self : t_u64) (other : t_usize)=> Build_t_u64 (Shl_f_shl (u64_0 self) (usize_0 other)); }. #[global] Instance t_Shl_308574333 : t_Shl ((t_u128)) ((t_u8)) := { Shl_f_Output := t_u128; Shl_f_shl := fun (self : t_u128) (other : t_u8)=> Build_t_u128 (Shl_f_shl (u128_0 self) (u8_0 other)); }. #[global] Instance t_Shl_966677877 : t_Shl ((t_u128)) ((t_u16)) := { Shl_f_Output := t_u128; Shl_f_shl := fun (self : t_u128) (other : t_u16)=> Build_t_u128 (Shl_f_shl (u128_0 self) (u16_0 other)); }. #[global] Instance t_Shl_38932717 : t_Shl ((t_u128)) ((t_u32)) := { Shl_f_Output := t_u128; Shl_f_shl := fun (self : t_u128) (other : t_u32)=> Build_t_u128 (Shl_f_shl (u128_0 self) (u32_0 other)); }. #[global] Instance t_Shl_108085956 : t_Shl ((t_u128)) ((t_u64)) := { Shl_f_Output := t_u128; Shl_f_shl := fun (self : t_u128) (other : t_u64)=> Build_t_u128 (Shl_f_shl (u128_0 self) (u64_0 other)); }. #[global] Instance t_Shl_489587677 : t_Shl ((t_u128)) ((t_u128)) := { Shl_f_Output := t_u128; Shl_f_shl := fun (self : t_u128) (other : t_u128)=> Build_t_u128 (Shl_f_shl (u128_0 self) (u128_0 other)); }. #[global] Instance t_Shl_837150634 : t_Shl ((t_u128)) ((t_usize)) := { Shl_f_Output := t_u128; Shl_f_shl := fun (self : t_u128) (other : t_usize)=> Build_t_u128 (Shl_f_shl (u128_0 self) (usize_0 other)); }. #[global] Instance t_Shl_736165651 : t_Shl ((t_usize)) ((t_u8)) := { Shl_f_Output := t_usize; Shl_f_shl := fun (self : t_usize) (other : t_u8)=> Build_t_usize (Shl_f_shl (usize_0 self) (u8_0 other)); }. #[global] Instance t_Shl_740886741 : t_Shl ((t_usize)) ((t_u16)) := { Shl_f_Output := t_usize; Shl_f_shl := fun (self : t_usize) (other : t_u16)=> Build_t_usize (Shl_f_shl (usize_0 self) (u16_0 other)); }. #[global] Instance t_Shl_683246358 : t_Shl ((t_usize)) ((t_u32)) := { Shl_f_Output := t_usize; Shl_f_shl := fun (self : t_usize) (other : t_u32)=> Build_t_usize (Shl_f_shl (usize_0 self) (u32_0 other)); }. #[global] Instance t_Shl_436746920 : t_Shl ((t_usize)) ((t_u64)) := { Shl_f_Output := t_usize; Shl_f_shl := fun (self : t_usize) (other : t_u64)=> Build_t_usize (Shl_f_shl (usize_0 self) (u64_0 other)); }. #[global] Instance t_Shl_527409353 : t_Shl ((t_usize)) ((t_u128)) := { Shl_f_Output := t_usize; Shl_f_shl := fun (self : t_usize) (other : t_u128)=> Build_t_usize (Shl_f_shl (usize_0 self) (u128_0 other)); }. #[global] Instance t_Shl_982380013 : t_Shl ((t_usize)) ((t_usize)) := { Shl_f_Output := t_usize; Shl_f_shl := fun (self : t_usize) (other : t_usize)=> Build_t_usize (Shl_f_shl (usize_0 self) (usize_0 other)); }. #[global] Instance t_BitOr_669654947 : t_BitOr ((t_u8)) ((t_u8)) := { BitOr_f_Output := t_u8; BitOr_f_bitor := fun (self : t_u8) (other : t_u8)=> Build_t_u8 (BitOr_f_bitor (u8_0 self) (u8_0 other)); }. #[global] Instance t_BitOr_892941557 : t_BitOr ((t_u16)) ((t_u16)) := { BitOr_f_Output := t_u16; BitOr_f_bitor := fun (self : t_u16) (other : t_u16)=> Build_t_u16 (BitOr_f_bitor (u16_0 self) (u16_0 other)); }. #[global] Instance t_BitOr_991330847 : t_BitOr ((t_u32)) ((t_u32)) := { BitOr_f_Output := t_u32; BitOr_f_bitor := fun (self : t_u32) (other : t_u32)=> Build_t_u32 (BitOr_f_bitor (u32_0 self) (u32_0 other)); }. #[global] Instance t_BitOr_692971983 : t_BitOr ((t_u64)) ((t_u64)) := { BitOr_f_Output := t_u64; BitOr_f_bitor := fun (self : t_u64) (other : t_u64)=> Build_t_u64 (BitOr_f_bitor (u64_0 self) (u64_0 other)); }. #[global] Instance t_BitOr_227319538 : t_BitOr ((t_u128)) ((t_u128)) := { BitOr_f_Output := t_u128; BitOr_f_bitor := fun (self : t_u128) (other : t_u128)=> Build_t_u128 (BitOr_f_bitor (u128_0 self) (u128_0 other)); }. #[global] Instance t_BitOr_669787696 : t_BitOr ((t_usize)) ((t_usize)) := { BitOr_f_Output := t_usize; BitOr_f_bitor := fun (self : t_usize) (other : t_usize)=> Build_t_usize (BitOr_f_bitor (usize_0 self) (usize_0 other)); }. #[global] Instance t_BitXor_327788827 : t_BitXor ((t_u8)) ((t_u8)) := { BitXor_f_Output := t_u8; BitXor_f_bitxor := fun (self : t_u8) (other : t_u8)=> Build_t_u8 (BitXor_f_bitxor (u8_0 self) (u8_0 other)); }. #[global] Instance t_BitXor_661040931 : t_BitXor ((t_u16)) ((t_u16)) := { BitXor_f_Output := t_u16; BitXor_f_bitxor := fun (self : t_u16) (other : t_u16)=> Build_t_u16 (BitXor_f_bitxor (u16_0 self) (u16_0 other)); }. #[global] Instance t_BitXor_222957020 : t_BitXor ((t_u32)) ((t_u32)) := { BitXor_f_Output := t_u32; BitXor_f_bitxor := fun (self : t_u32) (other : t_u32)=> Build_t_u32 (BitXor_f_bitxor (u32_0 self) (u32_0 other)); }. #[global] Instance t_BitXor_530545977 : t_BitXor ((t_u64)) ((t_u64)) := { BitXor_f_Output := t_u64; BitXor_f_bitxor := fun (self : t_u64) (other : t_u64)=> Build_t_u64 (BitXor_f_bitxor (u64_0 self) (u64_0 other)); }. #[global] Instance t_BitXor_112780081 : t_BitXor ((t_u128)) ((t_u128)) := { BitXor_f_Output := t_u128; BitXor_f_bitxor := fun (self : t_u128) (other : t_u128)=> Build_t_u128 (BitXor_f_bitxor (u128_0 self) (u128_0 other)); }. #[global] Instance t_BitXor_969810999 : t_BitXor ((t_usize)) ((t_usize)) := { BitXor_f_Output := t_usize; BitXor_f_bitxor := fun (self : t_usize) (other : t_usize)=> Build_t_usize (BitXor_f_bitxor (usize_0 self) (usize_0 other)); }. #[global] Instance t_BitAnd_126469303 : t_BitAnd ((t_u8)) ((t_u8)) := { BitAnd_f_Output := t_u8; BitAnd_f_bitand := fun (self : t_u8) (other : t_u8)=> Build_t_u8 (BitAnd_f_bitand (u8_0 self) (u8_0 other)); }. #[global] Instance t_BitAnd_531525101 : t_BitAnd ((t_u16)) ((t_u16)) := { BitAnd_f_Output := t_u16; BitAnd_f_bitand := fun (self : t_u16) (other : t_u16)=> Build_t_u16 (BitAnd_f_bitand (u16_0 self) (u16_0 other)); }. #[global] Instance t_BitAnd_24728760 : t_BitAnd ((t_u32)) ((t_u32)) := { BitAnd_f_Output := t_u32; BitAnd_f_bitand := fun (self : t_u32) (other : t_u32)=> Build_t_u32 (BitAnd_f_bitand (u32_0 self) (u32_0 other)); }. #[global] Instance t_BitAnd_35845574 : t_BitAnd ((t_u64)) ((t_u64)) := { BitAnd_f_Output := t_u64; BitAnd_f_bitand := fun (self : t_u64) (other : t_u64)=> Build_t_u64 (BitAnd_f_bitand (u64_0 self) (u64_0 other)); }. #[global] Instance t_BitAnd_396424214 : t_BitAnd ((t_u128)) ((t_u128)) := { BitAnd_f_Output := t_u128; BitAnd_f_bitand := fun (self : t_u128) (other : t_u128)=> Build_t_u128 (BitAnd_f_bitand (u128_0 self) (u128_0 other)); }. #[global] Instance t_BitAnd_652458180 : t_BitAnd ((t_usize)) ((t_usize)) := { BitAnd_f_Output := t_usize; BitAnd_f_bitand := fun (self : t_usize) (other : t_usize)=> Build_t_usize (BitAnd_f_bitand (usize_0 self) (usize_0 other)); }. #[global] Instance t_Sub_81344668 : t_Sub ((t_u8)) ((t_u8)) := { Sub_f_Output := t_u8; Sub_f_sub := fun (self : t_u8) (other : t_u8)=> Build_t_u8 (Sub_f_sub (u8_0 self) (u8_0 other)); }. #[global] Instance t_Sub_1011801854 : t_Sub ((t_u16)) ((t_u16)) := { Sub_f_Output := t_u16; Sub_f_sub := fun (self : t_u16) (other : t_u16)=> Build_t_u16 (Sub_f_sub (u16_0 self) (u16_0 other)); }. #[global] Instance t_Sub_1070652436 : t_Sub ((t_u32)) ((t_u32)) := { Sub_f_Output := t_u32; Sub_f_sub := fun (self : t_u32) (other : t_u32)=> Build_t_u32 (Sub_f_sub (u32_0 self) (u32_0 other)); }. Definition rotate_left_u128 (x : t_u128) (shift : t_u32) : t_u128 := let shift : t_u32 := Rem_f_rem (shift) (v_BITS136999051) in let left : t_u128 := Shl_f_shl (Clone_f_clone (x)) (Clone_f_clone (shift)) in let right : t_u128 := Shr_f_shr (t_Shr := _ : t_Shr _ t_u32) (Clone_f_clone (x)) (Sub_f_sub (v_BITS136999051) (Clone_f_clone (shift))) in BitOr_f_bitor (left) (right). Definition rotate_left_u16 (x : t_u16) (shift : t_u32) : t_u16 := let shift : t_u32 := Rem_f_rem (shift) (v_BITS277333551) in let left : t_u16 := Shl_f_shl (Clone_f_clone (x)) (Clone_f_clone (shift)) in let right : t_u16 := Shr_f_shr (t_Shr := _ : t_Shr _ t_u32) (Clone_f_clone (x)) (Sub_f_sub (v_BITS277333551) (Clone_f_clone (shift))) in BitOr_f_bitor (left) (right). Definition rotate_left_u32 (x : t_u32) (shift : t_u32) : t_u32 := let shift : t_u32 := Rem_f_rem (shift) (v_BITS473478051) in let left : t_u32 := Shl_f_shl (Clone_f_clone (x)) (Clone_f_clone (shift)) in let right : t_u32 := Shr_f_shr (t_Shr := _ : t_Shr _ t_u32) (Clone_f_clone (x)) (Sub_f_sub (v_BITS473478051) (Clone_f_clone (shift))) in BitOr_f_bitor (left) (right). Definition rotate_left_u64 (x : t_u64) (shift : t_u32) : t_u64 := let shift : t_u32 := Rem_f_rem (shift) (v_BITS177666292) in let left : t_u64 := Shl_f_shl (Clone_f_clone (x)) (Clone_f_clone (shift)) in let right : t_u64 := Shr_f_shr (t_Shr := _ : t_Shr _ t_u32) (Clone_f_clone (x)) (Sub_f_sub (v_BITS177666292) (Clone_f_clone (shift))) in BitOr_f_bitor (left) (right). Definition rotate_left_u8 (x : t_u8) (shift : t_u32) : t_u8 := let shift : t_u32 := Rem_f_rem (shift) (v_BITS690311813) in let left : t_u8 := Shl_f_shl (Clone_f_clone (x)) (Clone_f_clone (shift)) in let right : t_u8 := Shr_f_shr (t_Shr := _ : t_Shr _ t_u32) (Clone_f_clone (x)) (Sub_f_sub (v_BITS690311813) (Clone_f_clone (shift))) in BitOr_f_bitor (left) (right). Definition rotate_left_usize (x : t_usize) (shift : t_u32) : t_usize := let shift : t_u32 := Rem_f_rem (shift) (v_BITS229952196) in let left : t_usize := Shl_f_shl (Clone_f_clone (x)) (Clone_f_clone (shift)) in let right : t_usize := Shr_f_shr (t_Shr := _ : t_Shr _ t_u32) (Clone_f_clone (x)) (Sub_f_sub (v_BITS229952196) (Clone_f_clone (shift))) in BitOr_f_bitor (left) (right). Definition rotate_right_u128 (x : t_u128) (shift : t_u32) : t_u128 := let shift : t_u32 := Rem_f_rem (shift) (v_BITS136999051) in let left : t_u128 := Shr_f_shr (Clone_f_clone (x)) (Clone_f_clone (shift)) in let right : t_u128 := Shl_f_shl (t_Shl := _ : t_Shl _ t_u32) (Clone_f_clone (x)) (Sub_f_sub (v_BITS136999051) (Clone_f_clone (shift))) in BitOr_f_bitor (left) (right). Definition rotate_right_u16 (x : t_u16) (shift : t_u32) : t_u16 := let shift : t_u32 := Rem_f_rem (shift) (v_BITS277333551) in let left : t_u16 := Shr_f_shr (Clone_f_clone (x)) (Clone_f_clone (shift)) in let right : t_u16 := Shl_f_shl (t_Shl := _ : t_Shl _ t_u32) (Clone_f_clone (x)) (Sub_f_sub (v_BITS277333551) (Clone_f_clone (shift))) in BitOr_f_bitor (left) (right). Definition rotate_right_u32 (x : t_u32) (shift : t_u32) : t_u32 := let shift : t_u32 := Rem_f_rem (shift) (v_BITS473478051) in let left : t_u32 := Shr_f_shr (Clone_f_clone (x)) (Clone_f_clone (shift)) in let right : t_u32 := Shl_f_shl (t_Shl := _ : t_Shl _ t_u32) (Clone_f_clone (x)) (Sub_f_sub (v_BITS473478051) (Clone_f_clone (shift))) in BitOr_f_bitor (left) (right). Definition rotate_right_u64 (x : t_u64) (shift : t_u32) : t_u64 := let shift : t_u32 := Rem_f_rem (shift) (v_BITS177666292) in let left : t_u64 := Shr_f_shr (Clone_f_clone (x)) (Clone_f_clone (shift)) in let right : t_u64 := Shl_f_shl (t_Shl := _ : t_Shl _ t_u32) (Clone_f_clone (x)) (Sub_f_sub (v_BITS177666292) (Clone_f_clone (shift))) in BitOr_f_bitor (left) (right). Definition rotate_right_u8 (x : t_u8) (shift : t_u32) : t_u8 := let shift : t_u32 := Rem_f_rem (shift) (v_BITS690311813) in let left : t_u8 := Shr_f_shr (Clone_f_clone (x)) (Clone_f_clone (shift)) in let right : t_u8 := Shl_f_shl (t_Shl := _ : t_Shl _ t_u32) (Clone_f_clone (x)) (Sub_f_sub (v_BITS690311813) (Clone_f_clone (shift))) in BitOr_f_bitor (left) (right). Definition rotate_right_usize (x : t_usize) (shift : t_u32) : t_usize := let shift : t_u32 := Rem_f_rem (shift) (v_BITS229952196) in let left : t_usize := Shr_f_shr (Clone_f_clone (x)) (Clone_f_clone (shift)) in let right : t_usize := Shl_f_shl (t_Shl := _ : t_Shl _ t_u32) (Clone_f_clone (x)) (Sub_f_sub (v_BITS229952196) (Clone_f_clone (shift))) in BitOr_f_bitor (left) (right). Program Definition rotate_left792925914 (self : t_u8) (n : t_u32) : t_u8 := run (letb hoist1 := ControlFlow_Break (rotate_left_u8 (self) (n)) in ControlFlow_Continue (never_to_any (_ (* hoist1 *)))). Fail Next Obligation. Program Definition rotate_right166090082 (self : t_u8) (n : t_u32) : t_u8 := run (letb hoist2 := ControlFlow_Break (rotate_right_u8 (self) (n)) in ControlFlow_Continue (never_to_any (_ (* hoist2 *)))). Fail Next Obligation. Program Definition rotate_left297034175 (self : t_u16) (n : t_u32) : t_u16 := run (letb hoist3 := ControlFlow_Break (rotate_left_u16 (self) (n)) in ControlFlow_Continue (never_to_any (_ (* hoist3 *)))). Fail Next Obligation. Program Definition rotate_right138522246 (self : t_u16) (n : t_u32) : t_u16 := run (letb hoist4 := ControlFlow_Break (rotate_right_u16 (self) (n)) in ControlFlow_Continue (never_to_any (_ (* hoist4 *)))). Fail Next Obligation. Program Definition rotate_left823573251 (self : t_u32) (n : t_u32) : t_u32 := run (letb hoist5 := ControlFlow_Break (rotate_left_u32 (self) (n)) in ControlFlow_Continue (never_to_any (_ (* hoist5 *)))). Fail Next Obligation. Program Definition rotate_right869195717 (self : t_u32) (n : t_u32) : t_u32 := run (letb hoist6 := ControlFlow_Break (rotate_right_u32 (self) (n)) in ControlFlow_Continue (never_to_any (_ (* hoist6 *)))). Fail Next Obligation. Program Definition rotate_left618936072 (self : t_u64) (n : t_u32) : t_u64 := run (letb hoist7 := ControlFlow_Break (rotate_left_u64 (self) (n)) in ControlFlow_Continue (never_to_any (_ (* hoist7 *)))). Fail Next Obligation. Program Definition rotate_right1041614027 (self : t_u64) (n : t_u32) : t_u64 := run (letb hoist8 := ControlFlow_Break (rotate_right_u64 (self) (n)) in ControlFlow_Continue (never_to_any (_ (* hoist8 *)))). Fail Next Obligation. Program Definition rotate_left1065866885 (self : t_u128) (n : t_u32) : t_u128 := run (letb hoist9 := ControlFlow_Break (rotate_left_u128 (self) (n)) in ControlFlow_Continue (never_to_any (_ (* hoist9 *)))). Fail Next Obligation. Program Definition rotate_right591112338 (self : t_u128) (n : t_u32) : t_u128 := run (letb hoist10 := ControlFlow_Break (rotate_right_u128 (self) (n)) in ControlFlow_Continue (never_to_any (_ (* hoist10 *)))). Fail Next Obligation. Program Definition rotate_left996672710 (self : t_usize) (n : t_u32) : t_usize := run (letb hoist11 := ControlFlow_Break (rotate_left_usize (self) (n)) in ControlFlow_Continue (never_to_any (_ (* hoist11 *)))). Fail Next Obligation. Program Definition rotate_right442734174 (self : t_usize) (n : t_u32) : t_usize := run (letb hoist12 := ControlFlow_Break (rotate_right_usize (self) (n)) in ControlFlow_Continue (never_to_any (_ (* hoist12 *)))). Fail Next Obligation. #[global] Instance t_Sub_788323603 : t_Sub ((t_u64)) ((t_u64)) := { Sub_f_Output := t_u64; Sub_f_sub := fun (self : t_u64) (other : t_u64)=> Build_t_u64 (Sub_f_sub (u64_0 self) (u64_0 other)); }. #[global] Instance t_Sub_1046324685 : t_Sub ((t_u128)) ((t_u128)) := { Sub_f_Output := t_u128; Sub_f_sub := fun (self : t_u128) (other : t_u128)=> Build_t_u128 (Sub_f_sub (u128_0 self) (u128_0 other)); }. #[global] Instance t_Sub_1064369889 : t_Sub ((t_usize)) ((t_usize)) := { Sub_f_Output := t_usize; Sub_f_sub := fun (self : t_usize) (other : t_usize)=> Build_t_usize (Sub_f_sub (usize_0 self) (usize_0 other)); }. (* Program Definition bswap_u128 (x : t_u128) : t_u128 := *) (* let count : t_u128 := Into_f_into (0%N) in *) (* let count := fold_range (Build_t_usize (Build_t_U64 0%N)) (Into_f_into (v_BITS136999051)) (fun count _ => *) (* true) (count) (fun (count : t_u128) (i : t_usize) => *) (* let low_bit : t_u128 := (* Into_f_into *) (BitAnd_f_bitand (t_BitAnd := _ : t_BitAnd t_u128 t_u128) (Shr_f_shr (Clone_f_clone (x)) (Into_f_into (i))) (Into_f_into (1%N) : t_u128)) in *) (* let count : t_u128 := Add_f_add (t_Add := t_Add_258013445 : t_Add t_u128 t_u128) (Shl_f_shl (t_Shl := t_Shl_38932717 : t_Shl t_u128 t_u32) (count) (Into_f_into (1%N) : t_u32)) (low_bit) in *) (* count) in *) (* count. *) (* Program Definition bswap_u16 (x : t_u16) : t_u16 := *) (* let count : t_u16 := Into_f_into (0) in *) (* let count := fold_range (Build_t_usize (Build_t_U64 0%N)) (Into_f_into (v_BITS277333551)) (fun count _ => *) (* true) (count) (fun (count : t_u16) (i : t_usize) => *) (* let low_bit : t_u16 := (* Into_f_into *) (BitAnd_f_bitand (t_BitAnd := _ : t_BitAnd t_u16 t_u16) (Shr_f_shr (Clone_f_clone (x)) (Into_f_into (i))) (Into_f_into (1%N) : t_u16)) in *) (* let count := Add_f_add (t_Add := _ : t_Add _ _) (Shl_f_shl (count) (Into_f_into (1))) (low_bit) in *) (* count) in *) (* count. *) (* Definition bswap_u32 (x : t_u32) : t_u32 := *) (* let count : t_u32 := Into_f_into (0) in *) (* let count := fold_range (0) (Into_f_into (v_BITS473478051)) (fun count _ => *) (* true) (count) (fun count i => *) (* let low_bit : t_u32 := Into_f_into (BitAnd_f_bitand (Shr_f_shr (Clone_f_clone (x)) (Into_f_into (i))) (Into_f_into (1))) in *) (* let count := Add_f_add (Shl_f_shl (count) (Into_f_into (1))) (low_bit) in *) (* count) in *) (* count. *) (* Definition bswap_u64 (x : t_u64) : t_u64 := *) (* let count : t_u64 := Into_f_into (0) in *) (* let count := fold_range (0) (Into_f_into (v_BITS177666292)) (fun count _ => *) (* true) (count) (fun count i => *) (* let low_bit : t_u64 := Into_f_into (BitAnd_f_bitand (Shr_f_shr (Clone_f_clone (x)) (Into_f_into (i))) (Into_f_into (1))) in *) (* let count := Add_f_add (Shl_f_shl (count) (Into_f_into (1))) (low_bit) in *) (* count) in *) (* count. *) (* Definition bswap_u8 (x : t_u8) : t_u8 := *) (* let count : t_u8 := Into_f_into (0) in *) (* let count := fold_range (0) (Into_f_into (v_BITS690311813)) (fun count _ => *) (* true) (count) (fun count i => *) (* let low_bit : t_u8 := Into_f_into (BitAnd_f_bitand (Shr_f_shr (Clone_f_clone (x)) (Into_f_into (i))) (Into_f_into (1))) in *) (* let count := Add_f_add (Shl_f_shl (count) (Into_f_into (1))) (low_bit) in *) (* count) in *) (* count. *) (* Definition bswap_usize (x : t_usize) : t_usize := *) (* let count : t_usize := Into_f_into (0) in *) (* let count := fold_range (0) (Into_f_into (v_BITS229952196)) (fun count _ => *) (* true) (count) (fun count i => *) (* let low_bit : t_usize := Into_f_into (BitAnd_f_bitand (Shr_f_shr (Clone_f_clone (x)) (Into_f_into (i))) (Into_f_into (1))) in *) (* let count := Add_f_add (Shl_f_shl (count) (Into_f_into (1))) (low_bit) in *) (* count) in *) (* count. *) (* Definition ctlz_u128 (x : t_u128) : t_u32 := *) (* let count : t_u32 := Into_f_into (0) in *) (* let done := false in *) (* let (count,done) := fold_range (0) (Into_f_into (v_BITS136999051)) (fun (count,done) _ => *) (* true) ((count,done)) (fun (count,done) i => *) (* let high_bit : t_u32 := Into_f_into (Shr_f_shr (Shl_f_shl (Clone_f_clone (x)) (Into_f_into (i))) (Into_f_into (Sub_f_sub (v_BITS136999051) (Into_f_into (1))))) in *) (* if *) (* orb (PartialEq_f_eq (high_bit) (Into_f_into (1))) (done) *) (* then *) (* let done := true in *) (* (count,done) *) (* else *) (* let count := Add_f_add (count) (Into_f_into (1)) in *) (* (count,done)) in *) (* count. *) (* Definition ctlz_u16 (x : t_u16) : t_u32 := *) (* let count : t_u32 := Into_f_into (0) in *) (* let done := false in *) (* let (count,done) := fold_range (0) (Into_f_into (v_BITS277333551)) (fun (count,done) _ => *) (* true) ((count,done)) (fun (count,done) i => *) (* let high_bit : t_u32 := Into_f_into (Shr_f_shr (Shl_f_shl (Clone_f_clone (x)) (Into_f_into (i))) (Into_f_into (Sub_f_sub (v_BITS277333551) (Into_f_into (1))))) in *) (* if *) (* orb (PartialEq_f_eq (high_bit) (Into_f_into (1))) (done) *) (* then *) (* let done := true in *) (* (count,done) *) (* else *) (* let count := Add_f_add (count) (Into_f_into (1)) in *) (* (count,done)) in *) (* count. *) (* Definition ctlz_u32 (x : t_u32) : t_u32 := *) (* let count : t_u32 := Into_f_into (0) in *) (* let done := false in *) (* let (count,done) := fold_range (0) (Into_f_into (v_BITS473478051)) (fun (count,done) _ => *) (* true) ((count,done)) (fun (count,done) i => *) (* let high_bit : t_u32 := Into_f_into (Shr_f_shr (Shl_f_shl (Clone_f_clone (x)) (Into_f_into (i))) (Into_f_into (Sub_f_sub (v_BITS473478051) (Into_f_into (1))))) in *) (* if *) (* orb (PartialEq_f_eq (high_bit) (Into_f_into (1))) (done) *) (* then *) (* let done := true in *) (* (count,done) *) (* else *) (* let count := Add_f_add (count) (Into_f_into (1)) in *) (* (count,done)) in *) (* count. *) (* Definition ctlz_u64 (x : t_u64) : t_u32 := *) (* let count : t_u32 := Into_f_into (0) in *) (* let done := false in *) (* let (count,done) := fold_range (0) (Into_f_into (v_BITS177666292)) (fun (count,done) _ => *) (* true) ((count,done)) (fun (count,done) i => *) (* let high_bit : t_u32 := Into_f_into (Shr_f_shr (Shl_f_shl (Clone_f_clone (x)) (Into_f_into (i))) (Into_f_into (Sub_f_sub (v_BITS177666292) (Into_f_into (1))))) in *) (* if *) (* orb (PartialEq_f_eq (high_bit) (Into_f_into (1))) (done) *) (* then *) (* let done := true in *) (* (count,done) *) (* else *) (* let count := Add_f_add (count) (Into_f_into (1)) in *) (* (count,done)) in *) (* count. *) (* Definition ctlz_u8 (x : t_u8) : t_u32 := *) (* let count : t_u32 := Into_f_into (0) in *) (* let done := false in *) (* let (count,done) := fold_range (0) (Into_f_into (v_BITS690311813)) (fun (count,done) _ => *) (* true) ((count,done)) (fun (count,done) i => *) (* let high_bit : t_u32 := Into_f_into (Shr_f_shr (Shl_f_shl (Clone_f_clone (x)) (Into_f_into (i))) (Into_f_into (Sub_f_sub (v_BITS690311813) (Into_f_into (1))))) in *) (* if *) (* orb (PartialEq_f_eq (high_bit) (Into_f_into (1))) (done) *) (* then *) (* let done := true in *) (* (count,done) *) (* else *) (* let count := Add_f_add (count) (Into_f_into (1)) in *) (* (count,done)) in *) (* count. *) (* Definition ctlz_usize (x : t_usize) : t_u32 := *) (* let count : t_u32 := Into_f_into (0) in *) (* let done := false in *) (* let (count,done) := fold_range (0) (Into_f_into (v_BITS229952196)) (fun (count,done) _ => *) (* true) ((count,done)) (fun (count,done) i => *) (* let high_bit : t_u32 := Into_f_into (Shr_f_shr (Shl_f_shl (Clone_f_clone (x)) (Into_f_into (i))) (Into_f_into (Sub_f_sub (v_BITS229952196) (Into_f_into (1))))) in *) (* if *) (* orb (PartialEq_f_eq (high_bit) (Into_f_into (1))) (done) *) (* then *) (* let done := true in *) (* (count,done) *) (* else *) (* let count := Add_f_add (count) (Into_f_into (1)) in *) (* (count,done)) in *) (* count. *) (* Definition ctpop_u128 (x : t_u128) : t_u32 := *) (* let count : t_u32 := Into_f_into (0) in *) (* let count := fold_range (0) (Into_f_into (v_BITS136999051)) (fun count _ => *) (* true) (count) (fun count i => *) (* Add_f_add (count) (Into_f_into (BitAnd_f_bitand (Shr_f_shr (Clone_f_clone (x)) (Into_f_into (i))) (Into_f_into (1))))) in *) (* count. *) (* Definition ctpop_u16 (x : t_u16) : t_u32 := *) (* let count : t_u32 := Into_f_into (0) in *) (* let count := fold_range (0) (Into_f_into (v_BITS277333551)) (fun count _ => *) (* true) (count) (fun count i => *) (* Add_f_add (count) (Into_f_into (BitAnd_f_bitand (Shr_f_shr (Clone_f_clone (x)) (Into_f_into (i))) (Into_f_into (1))))) in *) (* count. *) (* Definition ctpop_u32 (x : t_u32) : t_u32 := *) (* let count : t_u32 := Into_f_into (0) in *) (* let count := fold_range (0) (Into_f_into (v_BITS473478051)) (fun count _ => *) (* true) (count) (fun count i => *) (* Add_f_add (count) (Into_f_into (BitAnd_f_bitand (Shr_f_shr (Clone_f_clone (x)) (Into_f_into (i))) (Into_f_into (1))))) in *) (* count. *) (* Definition ctpop_u64 (x : t_u64) : t_u32 := *) (* let count : t_u32 := Into_f_into (0) in *) (* let count := fold_range (0) (Into_f_into (v_BITS177666292)) (fun count _ => *) (* true) (count) (fun count i => *) (* Add_f_add (count) (Into_f_into (BitAnd_f_bitand (Shr_f_shr (Clone_f_clone (x)) (Into_f_into (i))) (Into_f_into (1))))) in *) (* count. *) (* Definition ctpop_u8 (x : t_u8) : t_u32 := *) (* let count : t_u32 := Into_f_into (0) in *) (* let count := fold_range (0) (Into_f_into (v_BITS690311813)) (fun count _ => *) (* true) (count) (fun count i => *) (* Add_f_add (count) (Into_f_into (BitAnd_f_bitand (Shr_f_shr (Clone_f_clone (x)) (Into_f_into (i))) (Into_f_into (1))))) in *) (* count. *) (* Definition ctpop_usize (x : t_usize) : t_u32 := *) (* let count : t_u32 := Into_f_into (0) in *) (* let count := fold_range (0) (Into_f_into (v_BITS229952196)) (fun count _ => *) (* true) (count) (fun count i => *) (* Add_f_add (count) (Into_f_into (BitAnd_f_bitand (Shr_f_shr (Clone_f_clone (x)) (Into_f_into (i))) (Into_f_into (1))))) in *) (* count. *) (* Definition cttz_u128 (x : t_u128) : t_u32 := *) (* let count : t_u32 := Into_f_into (0) in *) (* let done := false in *) (* let (count,done) := fold_range (0) (Into_f_into (v_BITS136999051)) (fun (count,done) _ => *) (* true) ((count,done)) (fun (count,done) i => *) (* let low_bit : t_u32 := Into_f_into (BitAnd_f_bitand (Shr_f_shr (Clone_f_clone (x)) (Into_f_into (i))) (Into_f_into (1))) in *) (* if *) (* orb (PartialEq_f_eq (low_bit) (Into_f_into (1))) (done) *) (* then *) (* let done := true in *) (* (count,done) *) (* else *) (* let count := Add_f_add (count) (Into_f_into (1)) in *) (* (count,done)) in *) (* count. *) (* Definition cttz_u16 (x : t_u16) : t_u32 := *) (* let count : t_u32 := Into_f_into (0) in *) (* let done := false in *) (* let (count,done) := fold_range (0) (Into_f_into (v_BITS277333551)) (fun (count,done) _ => *) (* true) ((count,done)) (fun (count,done) i => *) (* let low_bit : t_u32 := Into_f_into (BitAnd_f_bitand (Shr_f_shr (Clone_f_clone (x)) (Into_f_into (i))) (Into_f_into (1))) in *) (* if *) (* orb (PartialEq_f_eq (low_bit) (Into_f_into (1))) (done) *) (* then *) (* let done := true in *) (* (count,done) *) (* else *) (* let count := Add_f_add (count) (Into_f_into (1)) in *) (* (count,done)) in *) (* count. *) (* Definition cttz_u32 (x : t_u32) : t_u32 := *) (* let count : t_u32 := Into_f_into (0) in *) (* let done := false in *) (* let (count,done) := fold_range (0) (Into_f_into (v_BITS473478051)) (fun (count,done) _ => *) (* true) ((count,done)) (fun (count,done) i => *) (* let low_bit : t_u32 := Into_f_into (BitAnd_f_bitand (Shr_f_shr (Clone_f_clone (x)) (Into_f_into (i))) (Into_f_into (1))) in *) (* if *) (* orb (PartialEq_f_eq (low_bit) (Into_f_into (1))) (done) *) (* then *) (* let done := true in *) (* (count,done) *) (* else *) (* let count := Add_f_add (count) (Into_f_into (1)) in *) (* (count,done)) in *) (* count. *) (* Definition cttz_u64 (x : t_u64) : t_u32 := *) (* let count : t_u32 := Into_f_into (0) in *) (* let done := false in *) (* let (count,done) := fold_range (0) (Into_f_into (v_BITS177666292)) (fun (count,done) _ => *) (* true) ((count,done)) (fun (count,done) i => *) (* let low_bit : t_u32 := Into_f_into (BitAnd_f_bitand (Shr_f_shr (Clone_f_clone (x)) (Into_f_into (i))) (Into_f_into (1))) in *) (* if *) (* orb (PartialEq_f_eq (low_bit) (Into_f_into (1))) (done) *) (* then *) (* let done := true in *) (* (count,done) *) (* else *) (* let count := Add_f_add (count) (Into_f_into (1)) in *) (* (count,done)) in *) (* count. *) (* Definition cttz_u8 (x : t_u8) : t_u32 := *) (* let count : t_u32 := Into_f_into (0) in *) (* let done := false in *) (* let (count,done) := fold_range (0) (Into_f_into (v_BITS690311813)) (fun (count,done) _ => *) (* true) ((count,done)) (fun (count,done) i => *) (* let low_bit : t_u32 := Into_f_into (BitAnd_f_bitand (Shr_f_shr (Clone_f_clone (x)) (Into_f_into (i))) (Into_f_into (1))) in *) (* if *) (* orb (PartialEq_f_eq (low_bit) (Into_f_into (1))) (done) *) (* then *) (* let done := true in *) (* (count,done) *) (* else *) (* let count := Add_f_add (count) (Into_f_into (1)) in *) (* (count,done)) in *) (* count. *) (* Definition cttz_usize (x : t_usize) : t_u32 := *) (* let count : t_u32 := Into_f_into (0) in *) (* let done := false in *) (* let (count,done) := fold_range (0) (Into_f_into (v_BITS229952196)) (fun (count,done) _ => *) (* true) ((count,done)) (fun (count,done) i => *) (* let low_bit : t_u32 := Into_f_into (BitAnd_f_bitand (Shr_f_shr (Clone_f_clone (x)) (Into_f_into (i))) (Into_f_into (1))) in *) (* if *) (* orb (PartialEq_f_eq (low_bit) (Into_f_into (1))) (done) *) (* then *) (* let done := true in *) (* (count,done) *) (* else *) (* let count := Add_f_add (count) (Into_f_into (1)) in *) (* (count,done)) in *) (* count. *) (* Definition count_ones202509899 (self : t_u8) : t_u32 := *) (* run (let hoist13 := ControlFlow_Break (ctpop_u8 (self)) in *) (* ControlFlow_Continue (never_to_any (hoist13))). *) (* Definition leading_zeros75047366 (self : t_u8) : t_u32 := *) (* run (let hoist14 := ControlFlow_Break (ctlz_u8 (self)) in *) (* ControlFlow_Continue (never_to_any (hoist14))). *) (* Definition swap_bytes657156997 (self : t_u8) : t_u8 := *) (* Into_f_into (bswap_u8 (self)). *) (* Definition from_be746282521 (x : t_u8) : t_u8 := *) (* swap_bytes657156997 (x). *) (* Definition to_be972448780 (self : t_u8) : t_u8 := *) (* swap_bytes657156997 (self). *) (* Definition trailing_zeros572929871 (self : t_u8) : t_u32 := *) (* run (let hoist15 := ControlFlow_Break (cttz_u8 (self)) in *) (* ControlFlow_Continue (never_to_any (hoist15))). *) (* Definition count_ones91875752 (self : t_u16) : t_u32 := *) (* run (let hoist16 := ControlFlow_Break (ctpop_u16 (self)) in *) (* ControlFlow_Continue (never_to_any (hoist16))). *) (* Definition leading_zeros462412478 (self : t_u16) : t_u32 := *) (* run (let hoist17 := ControlFlow_Break (ctlz_u16 (self)) in *) (* ControlFlow_Continue (never_to_any (hoist17))). *) (* Definition swap_bytes926722059 (self : t_u16) : t_u16 := *) (* Into_f_into (bswap_u16 (self)). *) (* Definition from_be510959665 (x : t_u16) : t_u16 := *) (* swap_bytes926722059 (x). *) (* Definition to_be551590602 (self : t_u16) : t_u16 := *) (* swap_bytes926722059 (self). *) (* Definition trailing_zeros421474733 (self : t_u16) : t_u32 := *) (* run (let hoist18 := ControlFlow_Break (cttz_u16 (self)) in *) (* ControlFlow_Continue (never_to_any (hoist18))). *) (* Definition count_ones776185738 (self : t_u32) : t_u32 := *) (* run (let hoist19 := ControlFlow_Break (ctpop_u32 (self)) in *) (* ControlFlow_Continue (never_to_any (hoist19))). *) (* Definition leading_zeros698221972 (self : t_u32) : t_u32 := *) (* run (let hoist20 := ControlFlow_Break (ctlz_u32 (self)) in *) (* ControlFlow_Continue (never_to_any (hoist20))). *) (* Definition swap_bytes320480126 (self : t_u32) : t_u32 := *) (* Into_f_into (bswap_u32 (self)). *) (* Definition from_be664756649 (x : t_u32) : t_u32 := *) (* swap_bytes320480126 (x). *) (* Definition to_be82825962 (self : t_u32) : t_u32 := *) (* swap_bytes320480126 (self). *) (* Definition trailing_zeros1061560720 (self : t_u32) : t_u32 := *) (* run (let hoist21 := ControlFlow_Break (cttz_u32 (self)) in *) (* ControlFlow_Continue (never_to_any (hoist21))). *) (* Definition count_ones235885653 (self : t_u64) : t_u32 := *) (* run (let hoist22 := ControlFlow_Break (ctpop_u64 (self)) in *) (* ControlFlow_Continue (never_to_any (hoist22))). *) (* Definition leading_zeros338302110 (self : t_u64) : t_u32 := *) (* run (let hoist23 := ControlFlow_Break (ctlz_u64 (self)) in *) (* ControlFlow_Continue (never_to_any (hoist23))). *) (* Definition swap_bytes722254271 (self : t_u64) : t_u64 := *) (* Into_f_into (bswap_u64 (self)). *) (* Definition from_be16013635 (x : t_u64) : t_u64 := *) (* swap_bytes722254271 (x). *) (* Definition to_be376714729 (self : t_u64) : t_u64 := *) (* swap_bytes722254271 (self). *) (* Definition trailing_zeros188346231 (self : t_u64) : t_u32 := *) (* run (let hoist24 := ControlFlow_Break (cttz_u64 (self)) in *) (* ControlFlow_Continue (never_to_any (hoist24))). *) (* Definition count_ones926736261 (self : t_u128) : t_u32 := *) (* run (let hoist25 := ControlFlow_Break (ctpop_u128 (self)) in *) (* ControlFlow_Continue (never_to_any (hoist25))). *) (* Definition leading_zeros19644612 (self : t_u128) : t_u32 := *) (* run (let hoist26 := ControlFlow_Break (ctlz_u128 (self)) in *) (* ControlFlow_Continue (never_to_any (hoist26))). *) (* Definition swap_bytes420879368 (self : t_u128) : t_u128 := *) (* Into_f_into (bswap_u128 (self)). *) (* Definition from_be191085771 (x : t_u128) : t_u128 := *) (* swap_bytes420879368 (x). *) (* Definition to_be555075987 (self : t_u128) : t_u128 := *) (* swap_bytes420879368 (self). *) (* Definition trailing_zeros821715250 (self : t_u128) : t_u32 := *) (* run (let hoist27 := ControlFlow_Break (cttz_u128 (self)) in *) (* ControlFlow_Continue (never_to_any (hoist27))). *) (* Definition count_ones441645762 (self : t_usize) : t_u32 := *) (* run (let hoist28 := ControlFlow_Break (ctpop_usize (self)) in *) (* ControlFlow_Continue (never_to_any (hoist28))). *) (* Definition leading_zeros905233489 (self : t_usize) : t_u32 := *) (* run (let hoist29 := ControlFlow_Break (ctlz_usize (self)) in *) (* ControlFlow_Continue (never_to_any (hoist29))). *) (* Definition swap_bytes268673424 (self : t_usize) : t_usize := *) (* Into_f_into (bswap_usize (self)). *) (* Definition from_be607978059 (x : t_usize) : t_usize := *) (* swap_bytes268673424 (x). *) (* Definition to_be561847134 (self : t_usize) : t_usize := *) (* swap_bytes268673424 (self). *) (* Definition trailing_zeros42066260 (self : t_usize) : t_u32 := *) (* run (let hoist30 := ControlFlow_Break (cttz_usize (self)) in *) (* ControlFlow_Continue (never_to_any (hoist30))). *) #[global] Instance t_Div_345870802 : t_Div ((t_i8)) ((t_i8)) := { Div_f_Output := t_i8; Div_f_div := fun (self : t_i8) (other : t_i8)=> Build_t_i8 (Div_f_div (i8_0 self) (i8_0 other)); }. #[global] Instance t_Div_69196905 : t_Div ((t_i16)) ((t_i16)) := { Div_f_Output := t_i16; Div_f_div := fun (self : t_i16) (other : t_i16)=> Build_t_i16 (Div_f_div (i16_0 self) (i16_0 other)); }. #[global] Instance t_Div_222178666 : t_Div ((t_i32)) ((t_i32)) := { Div_f_Output := t_i32; Div_f_div := fun (self : t_i32) (other : t_i32)=> Build_t_i32 (Div_f_div (i32_0 self) (i32_0 other)); }. #[global] Instance t_Div_551701934 : t_Div ((t_i64)) ((t_i64)) := { Div_f_Output := t_i64; Div_f_div := fun (self : t_i64) (other : t_i64)=> Build_t_i64 (Div_f_div (i64_0 self) (i64_0 other)); }. #[global] Instance t_Div_650346214 : t_Div ((t_i128)) ((t_i128)) := { Div_f_Output := t_i128; Div_f_div := fun (self : t_i128) (other : t_i128)=> Build_t_i128 (Div_f_div (i128_0 self) (i128_0 other)); }. #[global] Instance t_Div_911978922 : t_Div ((t_isize)) ((t_isize)) := { Div_f_Output := t_isize; Div_f_div := fun (self : t_isize) (other : t_isize)=> Build_t_isize (Div_f_div (isize_0 self) (isize_0 other)); }. #[global] Instance t_Rem_580678374 : t_Rem ((t_i8)) ((t_i8)) := { Rem_f_Output := t_i8; Rem_f_rem := fun (self : t_i8) (other : t_i8)=> Build_t_i8 (Rem_f_rem (i8_0 self) (i8_0 other)); }. Definition rem_euclid622298453 (self : t_i8) (rhs : t_i8) : t_i8 := let r := Rem_f_rem (self) (Clone_f_clone (rhs)) in if PartialOrd_f_lt (r) (Into_f_into (0)) then wrapping_add634491935 (r) (wrapping_abs400396545 (rhs)) else r. #[global] Instance t_Rem_532407972 : t_Rem ((t_i16)) ((t_i16)) := { Rem_f_Output := t_i16; Rem_f_rem := fun (self : t_i16) (other : t_i16)=> Build_t_i16 (Rem_f_rem (i16_0 self) (i16_0 other)); }. Definition rem_euclid158017644 (self : t_i16) (rhs : t_i16) : t_i16 := let r := Rem_f_rem (self) (Clone_f_clone (rhs)) in if PartialOrd_f_lt (r) (Into_f_into (0)) then wrapping_add868559108 (r) (wrapping_abs229076826 (rhs)) else r. #[global] Instance t_Rem_406274620 : t_Rem ((t_i32)) ((t_i32)) := { Rem_f_Output := t_i32; Rem_f_rem := fun (self : t_i32) (other : t_i32)=> Build_t_i32 (Rem_f_rem (i32_0 self) (i32_0 other)); }. Definition rem_euclid881249982 (self : t_i32) (rhs : t_i32) : t_i32 := let r := Rem_f_rem (self) (Clone_f_clone (rhs)) in if PartialOrd_f_lt (r) (Into_f_into (0)) then wrapping_add475006616 (r) (wrapping_abs729536875 (rhs)) else r. #[global] Instance t_Rem_296096507 : t_Rem ((t_i64)) ((t_i64)) := { Rem_f_Output := t_i64; Rem_f_rem := fun (self : t_i64) (other : t_i64)=> Build_t_i64 (Rem_f_rem (i64_0 self) (i64_0 other)); }. Definition rem_euclid1057082210 (self : t_i64) (rhs : t_i64) : t_i64 := let r := Rem_f_rem (self) (Clone_f_clone (rhs)) in if PartialOrd_f_lt (r) (Into_f_into (0)) then wrapping_add590074241 (r) (wrapping_abs285829312 (rhs)) else r. #[global] Instance t_Rem_773614977 : t_Rem ((t_i128)) ((t_i128)) := { Rem_f_Output := t_i128; Rem_f_rem := fun (self : t_i128) (other : t_i128)=> Build_t_i128 (Rem_f_rem (i128_0 self) (i128_0 other)); }. Definition rem_euclid254910751 (self : t_i128) (rhs : t_i128) : t_i128 := let r := Rem_f_rem (self) (Clone_f_clone (rhs)) in if PartialOrd_f_lt (r) (Into_f_into (0)) then wrapping_add251385439 (r) (wrapping_abs281925696 (rhs)) else r. #[global] Instance t_Rem_136872616 : t_Rem ((t_isize)) ((t_isize)) := { Rem_f_Output := t_isize; Rem_f_rem := fun (self : t_isize) (other : t_isize)=> Build_t_isize (Rem_f_rem (isize_0 self) (isize_0 other)); }. Definition rem_euclid828379367 (self : t_isize) (rhs : t_isize) : t_isize := let r := Rem_f_rem (self) (Clone_f_clone (rhs)) in if PartialOrd_f_lt (r) (Into_f_into (0)) then wrapping_add226040243 (r) (wrapping_abs347300819 (rhs)) else r. #[global] Instance t_Not_500984294 : t_Not ((t_u8)) := { Not_f_Output := t_u8; Not_f_not := fun (self : t_u8)=> Build_t_u8 (Not_f_not (u8_0 self)); }. (* Definition count_zeros558337492 (self : t_u8) : t_u32 := *) (* count_ones202509899 (Not_f_not (self)). *) (* Definition leading_ones55148479 (self : t_u8) : t_u32 := *) (* leading_zeros75047366 (Not_f_not (self)). *) (* Definition trailing_ones359778731 (self : t_u8) : t_u32 := *) (* trailing_zeros572929871 (Not_f_not (self)). *) #[global] Instance t_Not_560691647 : t_Not ((t_u16)) := { Not_f_Output := t_u16; Not_f_not := fun (self : t_u16)=> Build_t_u16 (Not_f_not (u16_0 self)); }. (* Definition count_zeros199825317 (self : t_u16) : t_u32 := *) (* count_ones91875752 (Not_f_not (self)). *) (* Definition leading_ones164277656 (self : t_u16) : t_u32 := *) (* leading_zeros462412478 (Not_f_not (self)). *) (* Definition trailing_ones903944727 (self : t_u16) : t_u32 := *) (* trailing_zeros421474733 (Not_f_not (self)). *) #[global] Instance t_Not_220208504 : t_Not ((t_u32)) := { Not_f_Output := t_u32; Not_f_not := fun (self : t_u32)=> Build_t_u32 (Not_f_not (u32_0 self)); }. (* Definition count_zeros942566041 (self : t_u32) : t_u32 := *) (* count_ones776185738 (Not_f_not (self)). *) (* Definition leading_ones766486760 (self : t_u32) : t_u32 := *) (* leading_zeros698221972 (Not_f_not (self)). *) (* Definition trailing_ones223371510 (self : t_u32) : t_u32 := *) (* trailing_zeros1061560720 (Not_f_not (self)). *) #[global] Instance t_Not_655044209 : t_Not ((t_u64)) := { Not_f_Output := t_u64; Not_f_not := fun (self : t_u64)=> Build_t_u64 (Not_f_not (u64_0 self)); }. (* Definition count_zeros60346158 (self : t_u64) : t_u32 := *) (* count_ones235885653 (Not_f_not (self)). *) (* Definition leading_ones404666910 (self : t_u64) : t_u32 := *) (* leading_zeros338302110 (Not_f_not (self)). *) (* Definition trailing_ones601201120 (self : t_u64) : t_u32 := *) (* trailing_zeros188346231 (Not_f_not (self)). *) #[global] Instance t_Not_851738617 : t_Not ((t_u128)) := { Not_f_Output := t_u128; Not_f_not := fun (self : t_u128)=> Build_t_u128 (Not_f_not (u128_0 self)); }. (* Definition count_zeros824862815 (self : t_u128) : t_u32 := *) (* count_ones926736261 (Not_f_not (self)). *) (* Definition leading_ones475503572 (self : t_u128) : t_u32 := *) (* leading_zeros19644612 (Not_f_not (self)). *) (* Definition trailing_ones705845381 (self : t_u128) : t_u32 := *) (* trailing_zeros821715250 (Not_f_not (self)). *) #[global] Instance t_Not_677551814 : t_Not ((t_usize)) := { Not_f_Output := t_usize; Not_f_not := fun (self : t_usize)=> Build_t_usize (Not_f_not (usize_0 self)); }. (* Definition count_zeros73479642 (self : t_usize) : t_u32 := *) (* count_ones441645762 (Not_f_not (self)). *) (* Definition leading_ones667660708 (self : t_usize) : t_u32 := *) (* leading_zeros905233489 (Not_f_not (self)). *) (* Definition trailing_ones979548463 (self : t_usize) : t_u32 := *) (* trailing_zeros42066260 (Not_f_not (self)). *) Record t_TryFromSliceError : Type := { TryFromSliceError_0 : unit; }. Arguments Build_t_TryFromSliceError. Arguments TryFromSliceError_0. #[export] Instance settable_t_TryFromSliceError : Settable _ := settable! (Build_t_TryFromSliceError) . Notation "'TryFromSliceError'" := Build_t_TryFromSliceError. Definition t_Seq (v_T : Type) `{t_Sized (v_T)} : Type := list v_T. #[global] Instance t_Clone_640571940 `{v_T : Type} `{t_Sized (v_T)} `{t_Clone (v_T)} : t_Clone ((t_Seq ((v_T)))) := { Clone_f_clone := fun (self : t_Seq ((v_T)))=> self; }. Definition t_LIST (v_T : Type) `{t_Sized (v_T)} : Type := list v_T. Notation "'LIST_NIL'" := nil. Notation "'LIST_CONS'" := (fun a b => cons b a). Definition nil `{v_T : Type} `{t_Sized (v_T)} `{t_Clone (v_T)} '(_ : unit) : t_Seq ((v_T)) := nil. Record t_Slice (v_T : Type) `{t_Sized (v_T)} : Type := { Slice_f_v : t_Seq ((v_T)); }. Arguments Build_t_Slice (_) {_}. Arguments Slice_f_v {_} {_}. #[export] Instance settable_t_Slice `{v_T : Type} `{t_Sized (v_T)} : Settable _ := settable! (Build_t_Slice v_T) . (* Instance t_From_692299963 `{v_T : Type} `{t_Sized (v_T)} `{t_Clone (v_T)} : t_From ((t_Slice ((v_T)))) ((t_Slice v_T)) := *) (* { *) (* From_f_from := fun (x : t_Slice v_T)=> *) (* t_Slice (t_Seq (impl__to_vec (x))); *) (* }. *) Record t_Array (v_T : Type) (v_N : t_usize) `{t_Sized (v_T)} : Type := { Array_f_v : t_Slice ((v_T)); }. Arguments Build_t_Array {_} {_} {_}. Arguments Array_f_v {_} {_} {_}. #[export] Instance settable_t_Array `{v_T : Type} `{v_N : t_usize} `{t_Sized (v_T)} : Settable _ := settable! (@Build_t_Array v_T v_N _) . #[global] Instance t_Clone_962303223 `{v_T : Type} `{v_N : t_usize} `{t_Sized (v_T)} `{t_Clone (v_T)} : t_Clone ((t_Array ((v_T)) (v_N))) := { Clone_f_clone := fun (self : t_Array ((v_T)) (v_N))=> Build_t_Array (Clone_f_clone (Array_f_v self)); }. Definition cast `{v_T : Type} `{v_N : t_usize} `{t_Sized (v_T)} `{t_Clone (v_T)} (self : t_Array ((v_T)) (v_N)) : t_Slice ((v_T)) := Array_f_v self. From Core Require Import Core_Ops_Index. Instance t_Index_927562605 `{v_T : Type} `{v_I : Type} `{v_N : t_usize} `{t_Sized (v_T)} `{t_Sized (v_I)} `{t_Clone (v_T)} `{t_Index (t_Slice ((v_T))) (v_I)} : t_Index ((t_Array ((v_T)) (v_N))) ((v_I)) := { Index_f_Output := Index_f_Output; Index_f_index := fun (self : t_Array ((v_T)) (v_N)) (index : v_I)=> Index_f_index (cast (self)) (index); }. (* Instance t_From_684363179 `{v_T : Type} `{v_N : t_usize} `{t_Sized (v_T)} `{t_Clone (v_T)} : t_From ((t_Array (v_T) (v_N))) ((t_Array ((v_T)) (v_N))) := *) (* { *) (* From_f_from := fun (x : t_Array ((v_T)) (v_N))=> *) (* match TryInto_f_try_into (Seq_f_v Slice_f_v Array_f_v x) with *) (* | Result_Ok (x) => *) (* x *) (* | _ => *) (* never_to_any (panic_fmt (impl_2__new_const (["some error?"%string]))) *) (* end; *) (* }. *) #[global] Instance t_Index_324031838 `{v_T : Type} `{v_I : Type} `{t_Sized (v_T)} `{t_Sized (v_I)} `{v_SliceIndex (v_I) (t_Slice ((v_T)))} : t_Index ((t_Slice ((v_T)))) ((v_I)) := { Index_f_Output := SliceIndex_f_Output; Index_f_index := fun (self : t_Slice ((v_T))) (index : v_I)=> SliceIndex_f_index (index) (self); }. Definition cons `{v_T : Type} `{t_Sized (v_T)} `{t_Clone (v_T)} (s : t_Seq ((v_T))) (t : v_T) : t_Seq ((v_T)) := cons s t. (* Instance t_From_1005673342 `{v_T : Type} `{v_N : t_usize} `{t_Sized (v_T)} `{t_Clone (v_T)} : t_From ((t_Array ((v_T)) (v_N))) ((t_Array (v_T) (v_N))) := *) (* { *) (* From_f_from := fun (x : t_Array (v_T) (v_N))=> *) (* t_Array (t_Slice (t_Seq (impl__to_vec (Index_f_index (x) (Build_t_RangeFull))))); *) (* }. *) (* Instance v_SliceIndex_1030023794 `{v_T : Type} `{t_Sized (v_T)} : v_SliceIndex ((t_RangeFull)) ((t_Slice ((v_T)))) := *) (* { *) (* SliceIndex_f_Output := t_Slice ((v_T)); *) (* SliceIndex_f_index := fun (self : t_RangeFull) (slice : t_Slice ((v_T)))=> *) (* slice; *) (* }. *) (* Instance t_AsRef_175264108 `{v_T : Type} `{v_N : t_usize} `{t_Sized (v_T)} `{t_Clone (v_T)} : t_AsRef ((t_Array ((v_T)) (v_N))) ((t_Slice ((v_T)))) := *) (* { *) (* AsRef_f_as_ref := fun (self : t_Array ((v_T)) (v_N))=> *) (* Index_f_index (self) (Build_t_RangeFull); *) (* }. *) Definition match_list `{v_T : Type} `{t_Sized (v_T)} `{t_Clone (v_T)} (s : t_Seq ((v_T))) : t_LIST ((v_T)) := s. (* Fixpoint from_u128_binary (x : t_u128) `{PartialEq_f_ne (x) (0) = true} : t_Positive := *) (* if *) (* PartialEq_f_eq (x) (1) *) (* then *) (* xH *) (* else *) (* if *) (* PartialEq_f_eq (Rem_f_rem (x) (2)) (0) *) (* then *) (* xO (from_u128_binary (Div_f_div (x) (Build_t_u128 (Build_t_U128 2)))) *) (* else *) (* xI (from_u128_binary (Div_f_div (x) (2))). *) (* Instance t_From_383682059 : t_From ((t_HaxInt)) ((t_u128)) := *) (* { *) (* From_f_from := fun (x : t_u128)=> *) (* if *) (* PartialEq_f_eq (x) (0) *) (* then *) (* v_HaxInt_ZERO *) (* else *) (* positive_to_int (from_u128_binary (x)); *) (* }. *) (* Instance t_From_394907254 : t_From ((t_Z)) ((t_i128)) := *) (* { *) (* From_f_from := fun (x : t_i128)=> *) (* match Ord_f_cmp (x) (0) with *) (* | Ordering_Equal => *) (* Z_ZERO *) (* | Ordering_Less => *) (* Z_NEG (from_u128_binary (impl__i128__unsigned_abs (x))) *) (* | Ordering_Greater => *) (* Z_POS (from_u128_binary (impl__i128__unsigned_abs (x))) *) (* end; *) (* }. *) (* Fixpoint from_u16_binary (x : t_u16) `{ne (x) (0) = true} : t_Positive := *) (* if *) (* t_PartialEq_f_eq (x) (1) *) (* then *) (* xH *) (* else *) (* if *) (* t_PartialEq_f_eq (t_Rem_f_rem (x) (2)) (0) *) (* then *) (* xO (from_u16_binary (t_Div_f_div (x) (2))) *) (* else *) (* xI (from_u16_binary (t_Div_f_div (x) (2))). *) (* Instance t_From_283547720 : t_From ((t_HaxInt)) ((t_u16)) := *) (* { *) (* From_f_from := fun (x : t_u16)=> *) (* if *) (* t_PartialEq_f_eq (x) (0) *) (* then *) (* v_HaxInt_ZERO *) (* else *) (* positive_to_int (from_u16_binary (x)); *) (* }. *) (* Instance t_From_960274744 : t_From ((t_Z)) ((t_i16)) := *) (* { *) (* From_f_from := fun (x : t_i16)=> *) (* match Ord_f_cmp (x) (0) with *) (* | Ordering_Equal => *) (* Z_ZERO *) (* | Ordering_Less => *) (* Z_NEG (from_u16_binary (impl__i16__unsigned_abs (x))) *) (* | Ordering_Greater => *) (* Z_POS (from_u16_binary (impl__i16__unsigned_abs (x))) *) (* end; *) (* }. *) (* Fixpoint from_u32_binary (x : t_u32) `{ne (x) (0) = true} : t_Positive := *) (* if *) (* t_PartialEq_f_eq (x) (1) *) (* then *) (* xH *) (* else *) (* if *) (* t_PartialEq_f_eq (t_Rem_f_rem (x) (2)) (0) *) (* then *) (* xO (from_u32_binary (t_Div_f_div (x) (2))) *) (* else *) (* xI (from_u32_binary (t_Div_f_div (x) (2))). *) (* Instance t_From_247317262 : t_From ((t_HaxInt)) ((t_u32)) := *) (* { *) (* From_f_from := fun (x : t_u32)=> *) (* if *) (* t_PartialEq_f_eq (x) (0) *) (* then *) (* v_HaxInt_ZERO *) (* else *) (* positive_to_int (from_u32_binary (x)); *) (* }. *) (* Instance t_From_1033810922 : t_From ((t_Z)) ((t_i32)) := *) (* { *) (* From_f_from := fun (x : t_i32)=> *) (* match Ord_f_cmp (x) (0) with *) (* | Ordering_Equal => *) (* Z_ZERO *) (* | Ordering_Less => *) (* Z_NEG (from_u32_binary (impl__i32__unsigned_abs (x))) *) (* | Ordering_Greater => *) (* Z_POS (from_u32_binary (impl__i32__unsigned_abs (x))) *) (* end; *) (* }. *) (* Fixpoint from_u64_binary (x : t_u64) `{ne (x) (0) = true} : t_Positive := *) (* if *) (* t_PartialEq_f_eq (x) (1) *) (* then *) (* xH *) (* else *) (* if *) (* t_PartialEq_f_eq (t_Rem_f_rem (x) (2)) (0) *) (* then *) (* xO (from_u64_binary (t_Div_f_div (x) (2))) *) (* else *) (* xI (from_u64_binary (t_Div_f_div (x) (2))). *) (* Instance t_From_703205527 : t_From ((t_HaxInt)) ((t_u64)) := *) (* { *) (* From_f_from := fun (x : t_u64)=> *) (* if *) (* t_PartialEq_f_eq (x) (0) *) (* then *) (* v_HaxInt_ZERO *) (* else *) (* positive_to_int (from_u64_binary (x)); *) (* }. *) (* Instance t_From_494553464 : t_From ((t_Z)) ((t_i64)) := *) (* { *) (* From_f_from := fun (x : t_i64)=> *) (* match Ord_f_cmp (x) (0) with *) (* | Ordering_Equal => *) (* Z_ZERO *) (* | Ordering_Less => *) (* Z_NEG (from_u64_binary (impl__i64__unsigned_abs (x))) *) (* | Ordering_Greater => *) (* Z_POS (from_u64_binary (impl__i64__unsigned_abs (x))) *) (* end; *) (* }. *) (* Fixpoint from_u8_binary (x : t_u8) `{ne (x) (0) = true} : t_Positive := *) (* if *) (* t_PartialEq_f_eq (x) (1) *) (* then *) (* xH *) (* else *) (* if *) (* t_PartialEq_f_eq (t_Rem_f_rem (x) (2)) (0) *) (* then *) (* xO (from_u8_binary (t_Div_f_div (x) (2))) *) (* else *) (* xI (from_u8_binary (t_Div_f_div (x) (2))). *) (* Instance t_From_421078324 : t_From ((t_HaxInt)) ((t_u8)) := *) (* { *) (* From_f_from := fun (x : t_u8)=> *) (* if *) (* t_PartialEq_f_eq (x) (0) *) (* then *) (* v_HaxInt_ZERO *) (* else *) (* positive_to_int (from_u8_binary (x)); *) (* }. *) (* Instance t_From_976104611 : t_From ((t_Z)) ((t_i8)) := *) (* { *) (* From_f_from := fun (x : t_i8)=> *) (* match Ord_f_cmp (x) (0) with *) (* | Ordering_Equal => *) (* Z_ZERO *) (* | Ordering_Less => *) (* Z_NEG (from_u8_binary (impl__unsigned_abs (x))) *) (* | Ordering_Greater => *) (* Z_POS (from_u8_binary (impl__unsigned_abs (x))) *) (* end; *) (* }. *) (* Fixpoint from_usize_binary (x : t_usize) `{ne (x) (0) = true} : t_Positive := *) (* if *) (* t_PartialEq_f_eq (x) (1) *) (* then *) (* xH *) (* else *) (* if *) (* t_PartialEq_f_eq (t_Rem_f_rem (x) (2)) (0) *) (* then *) (* xO (from_usize_binary (t_Div_f_div (x) (2))) *) (* else *) (* xI (from_usize_binary (t_Div_f_div (x) (2))). *) (* Instance t_From_226738852 : t_From ((t_HaxInt)) ((t_usize)) := *) (* { *) (* From_f_from := fun (x : t_usize)=> *) (* if *) (* t_PartialEq_f_eq (x) (0) *) (* then *) (* v_HaxInt_ZERO *) (* else *) (* positive_to_int (from_usize_binary (x)); *) (* }. *) (* Instance t_From_235021044 : t_From ((t_Z)) ((t_isize)) := *) (* { *) (* From_f_from := fun (x : t_isize)=> *) (* match Ord_f_cmp (x) (0) with *) (* | Ordering_Equal => *) (* Z_ZERO *) (* | Ordering_Less => *) (* Z_NEG (from_usize_binary (impl__isize__unsigned_abs (x))) *) (* | Ordering_Greater => *) (* Z_POS (from_usize_binary (impl__isize__unsigned_abs (x))) *) (* end; *) (* }. *) (* Fixpoint to_u128_binary (self : t_Positive) : t_u128 := *) (* match match_positive (self) with *) (* | POSITIVE_XH => *) (* 1 *) (* | POSITIVE_XO (p) => *) (* t_Mul_f_mul (to_u128_binary (p)) (2) *) (* | POSITIVE_XI (p) => *) (* t_Add_f_add (t_Mul_f_mul (to_u128_binary (p)) (2)) (1) *) (* end. *) (* Instance t_From_312029210 : t_From ((t_u128)) ((t_HaxInt)) := *) (* { *) (* From_f_from := fun (x : t_HaxInt)=> *) (* match match_pos (x) with *) (* | POS_ZERO => *) (* 0 *) (* | POS_POS (p) => *) (* to_u128_binary (p) *) (* end; *) (* }. *) (* Instance t_From_166626519 : t_From ((t_i128)) ((t_Z)) := *) (* { *) (* From_f_from := fun (x : t_Z)=> *) (* match x with *) (* | Z_NEG (x) => *) (* sub (neg (cast (sub (to_u128_binary (x)) (1)))) (1) *) (* | Z_ZERO => *) (* 0 *) (* | Z_POS (x) => *) (* cast (to_u128_binary (x)) *) (* end; *) (* }. *) (* Fixpoint to_u16_binary (self : t_Positive) : t_u16 := *) (* match match_positive (self) with *) (* | POSITIVE_XH => *) (* 1 *) (* | POSITIVE_XO (p) => *) (* t_Mul_f_mul (to_u16_binary (p)) (2) *) (* | POSITIVE_XI (p) => *) (* t_Add_f_add (t_Mul_f_mul (to_u16_binary (p)) (2)) (1) *) (* end. *) (* Instance t_From_863803022 : t_From ((t_u16)) ((t_HaxInt)) := *) (* { *) (* From_f_from := fun (x : t_HaxInt)=> *) (* match match_pos (x) with *) (* | POS_ZERO => *) (* 0 *) (* | POS_POS (p) => *) (* to_u16_binary (p) *) (* end; *) (* }. *) (* Instance t_From_217241508 : t_From ((t_i16)) ((t_Z)) := *) (* { *) (* From_f_from := fun (x : t_Z)=> *) (* match x with *) (* | Z_NEG (x) => *) (* sub (neg (cast (sub (to_u16_binary (x)) (1)))) (1) *) (* | Z_ZERO => *) (* 0 *) (* | Z_POS (x) => *) (* cast (to_u16_binary (x)) *) (* end; *) (* }. *) (* Fixpoint to_u32_binary (self : t_Positive) : t_u32 := *) (* match match_positive (self) with *) (* | POSITIVE_XH => *) (* 1 *) (* | POSITIVE_XO (p) => *) (* t_Mul_f_mul (to_u32_binary (p)) (2) *) (* | POSITIVE_XI (p) => *) (* t_Add_f_add (t_Mul_f_mul (to_u32_binary (p)) (2)) (1) *) (* end. *) (* Instance t_From_38549956 : t_From ((t_u32)) ((t_HaxInt)) := *) (* { *) (* From_f_from := fun (x : t_HaxInt)=> *) (* match match_pos (x) with *) (* | POS_ZERO => *) (* 0 *) (* | POS_POS (p) => *) (* to_u32_binary (p) *) (* end; *) (* }. *) (* Instance t_From_567539816 : t_From ((t_i32)) ((t_Z)) := *) (* { *) (* From_f_from := fun (x : t_Z)=> *) (* match x with *) (* | Z_NEG (x) => *) (* sub (neg (cast (sub (to_u32_binary (x)) (1)))) (1) *) (* | Z_ZERO => *) (* 0 *) (* | Z_POS (x) => *) (* cast (to_u32_binary (x)) *) (* end; *) (* }. *) (* Fixpoint to_u64_binary (self : t_Positive) : t_u64 := *) (* match match_positive (self) with *) (* | POSITIVE_XH => *) (* 1 *) (* | POSITIVE_XO (p) => *) (* t_Mul_f_mul (to_u64_binary (p)) (2) *) (* | POSITIVE_XI (p) => *) (* t_Add_f_add (t_Mul_f_mul (to_u64_binary (p)) (2)) (1) *) (* end. *) (* Instance t_From_100316698 : t_From ((t_u64)) ((t_HaxInt)) := *) (* { *) (* From_f_from := fun (x : t_HaxInt)=> *) (* match match_pos (x) with *) (* | POS_ZERO => *) (* 0 *) (* | POS_POS (p) => *) (* to_u64_binary (p) *) (* end; *) (* }. *) (* Instance t_From_99611562 : t_From ((t_i64)) ((t_Z)) := *) (* { *) (* From_f_from := fun (x : t_Z)=> *) (* match x with *) (* | Z_NEG (x) => *) (* sub (neg (cast (sub (to_u64_binary (x)) (1)))) (1) *) (* | Z_ZERO => *) (* 0 *) (* | Z_POS (x) => *) (* cast (to_u64_binary (x)) *) (* end; *) (* }. *) (* Fixpoint to_u8_binary (self : t_Positive) : t_u8 := *) (* match match_positive (self) with *) (* | POSITIVE_XH => *) (* 1 *) (* | POSITIVE_XO (p) => *) (* t_Mul_f_mul (to_u8_binary (p)) (2) *) (* | POSITIVE_XI (p) => *) (* t_Add_f_add (t_Mul_f_mul (to_u8_binary (p)) (2)) (1) *) (* end. *) (* Instance t_From_360336196 : t_From ((t_u8)) ((t_HaxInt)) := *) (* { *) (* From_f_from := fun (x : t_HaxInt)=> *) (* match match_pos (x) with *) (* | POS_ZERO => *) (* 0 *) (* | POS_POS (p) => *) (* to_u8_binary (p) *) (* end; *) (* }. *) (* Instance t_From_168893964 : t_From ((t_i8)) ((t_Z)) := *) (* { *) (* From_f_from := fun (x : t_Z)=> *) (* match x with *) (* | Z_NEG (x) => *) (* sub (neg (cast (sub (to_u8_binary (x)) (1)))) (1) *) (* | Z_ZERO => *) (* 0 *) (* | Z_POS (x) => *) (* cast (to_u8_binary (x)) *) (* end; *) (* }. *) (* Fixpoint to_usize_binary (self : t_Positive) : t_usize := *) (* match match_positive (self) with *) (* | POSITIVE_XH => *) (* 1 *) (* | POSITIVE_XO (p) => *) (* t_Mul_f_mul (to_usize_binary (p)) (2) *) (* | POSITIVE_XI (p) => *) (* t_Add_f_add (t_Mul_f_mul (to_usize_binary (p)) (2)) (1) *) (* end. *) (* Instance t_From_545039540 : t_From ((t_usize)) ((t_HaxInt)) := *) (* { *) (* From_f_from := fun (x : t_HaxInt)=> *) (* match match_pos (x) with *) (* | POS_ZERO => *) (* 0 *) (* | POS_POS (p) => *) (* to_usize_binary (p) *) (* end; *) (* }. *) (* Instance t_From_931346405 : t_From ((t_isize)) ((t_Z)) := *) (* { *) (* From_f_from := fun (x : t_Z)=> *) (* match x with *) (* | Z_NEG (x) => *) (* sub (neg (cast (sub (to_usize_binary (x)) (1)))) (1) *) (* | Z_ZERO => *) (* 0 *) (* | Z_POS (x) => *) (* cast (to_usize_binary (x)) *) (* end; *) (* }. *) (* Instance v_SliceIndex_622480125 `{v_T : Type} `{t_Sized (v_T)} `{t_Clone (v_T)} : v_SliceIndex ((t_usize)) ((t_Slice ((v_T)))) := *) (* { *) (* SliceIndex_f_Output := v_T; *) (* SliceIndex_f_index := fun (self : t_usize) (slice : t_Slice ((v_T)))=> *) (* let x : t_usize := Into_f_into (U64_f_v (usize_0 self)) in *) (* Index_f_index (t_Index := _) (slice) (x); *) (* }. *) ================================================ FILE: hax-lib/proof-libs/coq/coq/generated-core/src/Core_Base.v ================================================ (* File automatically generated by Hacspec *) From Coq Require Import ZArith. Require Import List. Import List.ListNotations. Open Scope Z_scope. Open Scope bool_scope. Require Import Ascii. Require Import String. Require Import Coq.Floats.Floats. From RecordUpdate Require Import RecordSet. Import RecordSetNotations. (* From Core Require Import Core. *) From Core Require Import Core_Base_Spec. Export Core_Base_Spec. From Core Require Import Core_Base_Binary. Export Core_Base_Binary. From Core Require Import Core_Base_Pos. Export Core_Base_Pos. From Core Require Import Core_Base_Z. Export Core_Base_Z. (* From Core Require Import Core_Base_Number_conversion. *) (* Export Core_Base_Number_conversion. *) From Core Require Import Core_Base_Seq. Export Core_Base_Seq. (* NotImplementedYet *) (* NotImplementedYet *) (* NotImplementedYet *) (* NotImplementedYet *) (* NotImplementedYet *) (* NotImplementedYet *) ================================================ FILE: hax-lib/proof-libs/coq/coq/generated-core/src/Core_Base_Binary.v ================================================ (* File automatically generated by Hacspec *) From Coq Require Import ZArith. Require Import List. Import List.ListNotations. Open Scope Z_scope. Open Scope bool_scope. Require Import Ascii. Require Import String. Require Import Coq.Floats.Floats. From RecordUpdate Require Import RecordSet. Import RecordSetNotations. (* From Core Require Import Core. *) From Core Require Import Core_Base_Spec. Export Core_Base_Spec. From Core Require Import Core_Cmp. Export Core_Cmp. From Core Require Import Core_Option. Export Core_Option. From Core Require Import Core_Clone. Export Core_Clone. Fixpoint positive_cmp__cmp_binary_cont (x : t_Positive) (y : t_Positive) (r : t_Ordering) : t_Ordering := match match_positive (x) with | POSITIVE_XH => match match_positive (y) with | POSITIVE_XH => r | POSITIVE_XO (q) | POSITIVE_XI (q) => Ordering_Less end | POSITIVE_XO (p) => match match_positive (y) with | POSITIVE_XH => Ordering_Greater | POSITIVE_XO (q) => positive_cmp__cmp_binary_cont (p) (q) (r) | POSITIVE_XI (q) => positive_cmp__cmp_binary_cont (p) (q) (Ordering_Less) end | POSITIVE_XI (p) => match match_positive (y) with | POSITIVE_XH => Ordering_Greater | POSITIVE_XO (q) => positive_cmp__cmp_binary_cont (p) (q) (Ordering_Greater) | POSITIVE_XI (q) => positive_cmp__cmp_binary_cont (p) (q) (r) end end. Definition positive_cmp (lhs : t_Positive) (rhs : t_Positive) : t_Ordering := positive_cmp__cmp_binary_cont (lhs) (rhs) (Ordering_Equal). Definition positive_le (lhs : t_Positive) (rhs : t_Positive) : bool := match Option_Some (positive_cmp (lhs) (rhs)) with | Option_Some (Ordering_Less | Ordering_Equal) => true | _ => false end. Fixpoint positive_pred_double (s : t_Positive) : t_Positive := match match_positive (s) with | POSITIVE_XH => xH | POSITIVE_XO (p) => xI (positive_pred_double (p)) | POSITIVE_XI (p) => xI (xO (p)) end. Fixpoint positive_succ (s : t_Positive) : t_Positive := match match_positive (s) with | POSITIVE_XH => xO (xH) | POSITIVE_XO (q) => xI (q) | POSITIVE_XI (q) => xO (positive_succ (q)) end. Fixpoint positive_add__add (lhs : t_Positive) (rhs : t_Positive) : t_Positive := match match_positive (lhs) with | POSITIVE_XH => match match_positive (rhs) with | POSITIVE_XH => xO (xH) | POSITIVE_XO (q) => xI (q) | POSITIVE_XI (q) => xO (positive_succ (q)) end | POSITIVE_XO (p) => match match_positive (rhs) with | POSITIVE_XH => xI (p) | POSITIVE_XO (q) => xO (positive_add__add (p) (q)) | POSITIVE_XI (q) => xI (positive_add__add (p) (q)) end | POSITIVE_XI (p) => match match_positive (rhs) with | POSITIVE_XH => xO (positive_succ (p)) | POSITIVE_XO (q) => xI (positive_add__add (p) (q)) | POSITIVE_XI (q) => xO (positive_add__add_carry (p) (q)) end end with positive_add__add_carry (lhs : t_Positive) (rhs : t_Positive) : t_Positive := match match_positive (lhs) with | POSITIVE_XH => match match_positive (rhs) with | POSITIVE_XH => xI (xH) | POSITIVE_XO (q) => xO (positive_succ (q)) | POSITIVE_XI (q) => xI (positive_succ (q)) end | POSITIVE_XO (p) => match match_positive (rhs) with | POSITIVE_XH => xO (positive_succ (p)) | POSITIVE_XO (q) => xI (positive_add__add (p) (q)) | POSITIVE_XI (q) => xO (positive_add__add_carry (p) (q)) end | POSITIVE_XI (p) => match match_positive (rhs) with | POSITIVE_XH => xI (positive_succ (p)) | POSITIVE_XO (q) => xO (positive_add__add_carry (p) (q)) | POSITIVE_XI (q) => xI (positive_add__add_carry (p) (q)) end end. Definition positive_add (lhs : t_Positive) (rhs : t_Positive) : t_Positive := positive_add__add (lhs) (rhs). Fixpoint positive_mul (lhs : t_Positive) (rhs : t_Positive) : t_Positive := match match_positive (lhs) with | POSITIVE_XH => rhs | POSITIVE_XO (p) => xO (positive_mul (p) (rhs)) | POSITIVE_XI (p) => positive_add (Clone_f_clone (rhs)) (xO (positive_mul (p) (rhs))) end. ================================================ FILE: hax-lib/proof-libs/coq/coq/generated-core/src/Core_Base_Number_conversion.v ================================================ (* File automatically generated by Hacspec *) From Coq Require Import ZArith. Require Import List. Import List.ListNotations. Open Scope Z_scope. Open Scope bool_scope. Require Import Ascii. Require Import String. Require Import Coq.Floats.Floats. From RecordUpdate Require Import RecordSet. Import RecordSetNotations. (* From Core Require Import Core. *) From Core Require Import Core_Base_Spec. Export Core_Base_Spec. From Core Require Import Core_Base. Export Core_Base. From Core Require Import Core (t_primitive). Export Core (t_primitive). From Core Require Import Core (t_cmp). Export Core (t_cmp). From Core Require Import Core (t_convert). Export Core (t_convert). (* NotImplementedYet *) Notation "'impl_24__from_u128_binary'" := (from_u128_binary). Notation "'impl_8'" := (impl_8). Notation "'impl_20'" := (impl_20). Notation "'impl_24__from_u16_binary'" := (from_u16_binary). Notation "'impl_2'" := (impl_2). Notation "'impl_14'" := (impl_14). Notation "'impl_24__from_u32_binary'" := (from_u32_binary). Notation "'impl_4'" := (impl_4). Notation "'impl_16'" := (impl_16). Notation "'impl_24__from_u64_binary'" := (from_u64_binary). Notation "'impl_6'" := (impl_6). Notation "'impl_18'" := (impl_18). Notation "'impl_24__from_u8_binary'" := (from_u8_binary). Notation "'impl'" := (impl). Notation "'impl_12'" := (impl_12). Notation "'impl_24__from_usize_binary'" := (from_usize_binary). Notation "'impl_10'" := (impl_10). Notation "'impl_22'" := (impl_22). Notation "'impl_24__to_u128_binary'" := (to_u128_binary). Notation "'impl_9'" := (impl_9). Notation "'impl_21'" := (impl_21). Notation "'impl_24__to_u16_binary'" := (to_u16_binary). Notation "'impl_3'" := (impl_3). Notation "'impl_15'" := (impl_15). Notation "'impl_24__to_u32_binary'" := (to_u32_binary). Notation "'impl_5'" := (impl_5). Notation "'impl_17'" := (impl_17). Notation "'impl_24__to_u64_binary'" := (to_u64_binary). Notation "'impl_7'" := (impl_7). Notation "'impl_19'" := (impl_19). Notation "'impl_24__to_u8_binary'" := (to_u8_binary). Notation "'impl_1'" := (impl_1). Notation "'impl_13'" := (impl_13). Notation "'impl_24__to_usize_binary'" := (to_usize_binary). Notation "'impl_11'" := (impl_11). Notation "'impl_23'" := (impl_23). ================================================ FILE: hax-lib/proof-libs/coq/coq/generated-core/src/Core_Base_Pos.v ================================================ (* File automatically generated by Hacspec *) From Coq Require Import ZArith. Require Import List. Import List.ListNotations. Open Scope Z_scope. Open Scope bool_scope. Require Import Ascii. Require Import String. Require Import Coq.Floats.Floats. From RecordUpdate Require Import RecordSet. Import RecordSetNotations. (* From Core Require Import Core. *) From Core Require Import Core_Base_Spec. Export Core_Base_Spec. From Core Require Import Core_Base_Binary. Export Core_Base_Binary. From Core Require Import Core_Cmp (t_Ordering). Export Core_Cmp (t_Ordering). Definition haxint_double (s : t_HaxInt) : t_HaxInt := match match_pos (s) with | POS_ZERO => v_HaxInt_ZERO | POS_POS (p) => positive_to_int (xO (p)) end. Definition haxint_shr__half (s : t_HaxInt) : t_HaxInt := match match_pos (s) with | POS_ZERO => v_HaxInt_ZERO | POS_POS (n) => match match_positive (n) with | POSITIVE_XH => v_HaxInt_ZERO | POSITIVE_XO (p) => positive_to_int (p) | POSITIVE_XI (p) => positive_to_int (p) end end. Definition haxint_sub__double_mask (lhs : t_HaxInt) : t_HaxInt := match match_pos (lhs) with | POS_ZERO => v_HaxInt_ZERO | POS_POS (p) => positive_to_int (xO (p)) end. Definition haxint_sub__succ_double_mask (lhs : t_HaxInt) : t_HaxInt := match match_pos (lhs) with | POS_ZERO => positive_to_int (xH) | POS_POS (p) => positive_to_int (xI (p)) end. Definition haxint_succ_double (s : t_HaxInt) : t_Positive := match match_pos (s) with | POS_ZERO => xH | POS_POS (p) => xI (p) end. Fixpoint bitand_binary (lhs : t_Positive) (rhs : t_Positive) : t_HaxInt := match match_positive (lhs) with | POSITIVE_XH => match match_positive (rhs) with | POSITIVE_XO (q) => v_HaxInt_ZERO | POSITIVE_XI (_) | POSITIVE_XH => v_HaxInt_ONE end | POSITIVE_XO (p) => match match_positive (rhs) with | POSITIVE_XH => v_HaxInt_ZERO | POSITIVE_XO (q) | POSITIVE_XI (q) => haxint_double (bitand_binary (p) (q)) end | POSITIVE_XI (p) => match match_positive (rhs) with | POSITIVE_XH => v_HaxInt_ONE | POSITIVE_XO (q) => haxint_double (bitand_binary (p) (q)) | POSITIVE_XI (q) => positive_to_int (haxint_succ_double (bitand_binary (p) (q))) end end. Fixpoint bitor_binary (lhs : t_Positive) (rhs : t_Positive) : t_Positive := match match_positive (lhs) with | POSITIVE_XH => match match_positive (rhs) with | POSITIVE_XO (q) => xI (q) | POSITIVE_XH => xH | POSITIVE_XI (q) => xI (q) end | POSITIVE_XO (p) => match match_positive (rhs) with | POSITIVE_XH => xI (p) | POSITIVE_XO (q) => xO (bitor_binary (p) (q)) | POSITIVE_XI (q) => xI (bitor_binary (p) (q)) end | POSITIVE_XI (p) => match match_positive (rhs) with | POSITIVE_XH => xI (p) | POSITIVE_XO (q) | POSITIVE_XI (q) => xI (bitor_binary (p) (q)) end end. Definition haxint_bitand (lhs : t_HaxInt) (rhs : t_HaxInt) : t_HaxInt := match match_pos (lhs) with | POS_ZERO => v_HaxInt_ZERO | POS_POS (p) => match match_pos (rhs) with | POS_ZERO => v_HaxInt_ZERO | POS_POS (q) => bitand_binary (p) (q) end end. Definition haxint_bitor (lhs : t_HaxInt) (rhs : t_HaxInt) : t_HaxInt := match match_pos (lhs) with | POS_ZERO => rhs | POS_POS (p) => match match_pos (rhs) with | POS_ZERO => positive_to_int (p) | POS_POS (q) => positive_to_int (bitor_binary (p) (q)) end end. Fixpoint haxint_bitxor__bitxor_binary (lhs : t_Positive) (rhs : t_Positive) : t_HaxInt := match match_positive (lhs) with | POSITIVE_XH => match match_positive (rhs) with | POSITIVE_XH => v_HaxInt_ZERO | POSITIVE_XO (q) => positive_to_int (xI (q)) | POSITIVE_XI (q) => positive_to_int (xO (q)) end | POSITIVE_XO (p) => match match_positive (rhs) with | POSITIVE_XH => positive_to_int (xI (p)) | POSITIVE_XO (q) => haxint_double (haxint_bitxor__bitxor_binary (p) (q)) | POSITIVE_XI (q) => positive_to_int (haxint_succ_double (haxint_bitxor__bitxor_binary (p) (q))) end | POSITIVE_XI (p) => match match_positive (rhs) with | POSITIVE_XH => positive_to_int (xO (p)) | POSITIVE_XO (q) => positive_to_int (haxint_succ_double (haxint_bitxor__bitxor_binary (p) (q))) | POSITIVE_XI (q) => haxint_double (haxint_bitxor__bitxor_binary (p) (q)) end end. Definition haxint_bitxor (lhs : t_HaxInt) (rhs : t_HaxInt) : t_HaxInt := match match_pos (lhs) with | POS_ZERO => rhs | POS_POS (p) => match match_pos (rhs) with | POS_ZERO => positive_to_int (p) | POS_POS (q) => haxint_bitxor__bitxor_binary (p) (q) end end. Definition haxint_cmp (lhs : t_HaxInt) (rhs : t_HaxInt) : t_Ordering := match match_pos (lhs) with | POS_ZERO => match match_pos (rhs) with | POS_ZERO => Ordering_Equal | POS_POS (q) => Ordering_Less end | POS_POS (p) => match match_pos (rhs) with | POS_ZERO => Ordering_Greater | POS_POS (q) => positive_cmp (p) (q) end end. Definition haxint_le (lhs : t_HaxInt) (rhs : t_HaxInt) : bool := match Option_Some (haxint_cmp (lhs) (rhs)) with | Option_Some (Ordering_Less | Ordering_Equal) => true | _ => false end. Definition haxint_lt (lhs : t_HaxInt) (rhs : t_HaxInt) : bool := match Option_Some (haxint_cmp (lhs) (rhs)) with | Option_Some (Ordering_Less) => true | _ => false end. Fixpoint haxint_shl__shl_helper (rhs : t_Unary) (lhs : t_HaxInt) : t_HaxInt := if is_zero (Clone_f_clone (lhs)) then lhs else match match_unary (rhs) with | UNARY_ZERO => lhs | UNARY_SUCC (n) => haxint_shl__shl_helper (n) (haxint_double (lhs)) end. Definition haxint_shl (lhs : t_HaxInt) (rhs : t_HaxInt) : t_HaxInt := haxint_shl__shl_helper (unary_from_int (rhs)) (lhs). Fixpoint haxint_shr__shr_helper (rhs : t_Unary) (lhs : t_HaxInt) : t_HaxInt := if is_zero (Clone_f_clone (lhs)) then lhs else match match_unary (rhs) with | UNARY_ZERO => lhs | UNARY_SUCC (n) => haxint_shr__shr_helper (n) (haxint_shr__half (lhs)) end. Definition haxint_shr (lhs : t_HaxInt) (rhs : t_HaxInt) : t_HaxInt := haxint_shr__shr_helper (unary_from_int (rhs)) (lhs). Definition haxint_sub__double_pred_mask (lhs : t_Positive) : t_HaxInt := match match_positive (lhs) with | POSITIVE_XH => v_HaxInt_ZERO | POSITIVE_XO (p) => positive_to_int (xO (positive_pred_double (p))) | POSITIVE_XI (p) => positive_to_int (xO (xO (p))) end. Fixpoint power_of_two (s : t_Unary) : t_Positive := match match_unary (s) with | UNARY_ZERO => xH | UNARY_SUCC (x) => xO (power_of_two (x)) end. Definition haxint_add (lhs : t_HaxInt) (rhs : t_HaxInt) : t_HaxInt := match match_pos (lhs) with | POS_ZERO => rhs | POS_POS (p) => match match_pos (rhs) with | POS_ZERO => positive_to_int (p) | POS_POS (q) => positive_to_int (positive_add (p) (q)) end end. Fixpoint haxint_sub__sub_binary (lhs : t_Positive) (rhs : t_Positive) : t_HaxInt := match match_positive (lhs) with | POSITIVE_XH => v_HaxInt_ZERO | POSITIVE_XO (p) => match match_positive (rhs) with | POSITIVE_XH => positive_to_int (positive_pred_double (p)) | POSITIVE_XO (q) => haxint_sub__double_mask (haxint_sub__sub_binary (p) (q)) | POSITIVE_XI (q) => haxint_sub__succ_double_mask (haxint_sub__sub_carry (p) (q)) end | POSITIVE_XI (p) => match match_positive (rhs) with | POSITIVE_XH => positive_to_int (xO (p)) | POSITIVE_XO (q) => haxint_sub__succ_double_mask (haxint_sub__sub_binary (p) (q)) | POSITIVE_XI (q) => haxint_sub__double_mask (haxint_sub__sub_binary (p) (q)) end end with haxint_sub__sub_carry (lhs : t_Positive) (rhs : t_Positive) : t_HaxInt := match match_positive (lhs) with | POSITIVE_XH => v_HaxInt_ZERO | POSITIVE_XO (p) => match match_positive (rhs) with | POSITIVE_XH => haxint_sub__double_pred_mask (p) | POSITIVE_XO (q) => haxint_sub__succ_double_mask (haxint_sub__sub_carry (p) (q)) | POSITIVE_XI (q) => haxint_sub__double_mask (haxint_sub__sub_carry (p) (q)) end | POSITIVE_XI (p) => match match_positive (rhs) with | POSITIVE_XH => positive_to_int (positive_pred_double (p)) | POSITIVE_XO (q) => haxint_sub__double_mask (haxint_sub__sub_binary (p) (q)) | POSITIVE_XI (q) => haxint_sub__succ_double_mask (haxint_sub__sub_carry (p) (q)) end end. Definition haxint_sub (lhs : t_HaxInt) (rhs : t_HaxInt) : t_HaxInt := match match_pos (lhs) with | POS_ZERO => v_HaxInt_ZERO | POS_POS (p) => match match_pos (rhs) with | POS_ZERO => positive_to_int (p) | POS_POS (q) => haxint_sub__sub_binary (p) (q) end end. Fixpoint haxint_divmod__divmod_binary (a : t_Positive) (b : t_Positive) : (t_HaxInt*t_HaxInt) := match match_positive (a) with | POSITIVE_XH => match match_positive (b) with | POSITIVE_XH => (v_HaxInt_ONE,v_HaxInt_ZERO) | POSITIVE_XO (q) | POSITIVE_XI (q) => (v_HaxInt_ZERO,v_HaxInt_ONE) end | POSITIVE_XO (a___) => let (q,r) := haxint_divmod__divmod_binary (a___) (Clone_f_clone (b)) in let r___ := haxint_double (r) in if haxint_le (positive_to_int (Clone_f_clone (b))) (Clone_f_clone (r___)) then (positive_to_int (haxint_succ_double (q)),haxint_sub (r___) (positive_to_int (b))) else (haxint_double (q),r___) | POSITIVE_XI (a___) => let (q,r) := haxint_divmod__divmod_binary (a___) (Clone_f_clone (b)) in let r___ := positive_to_int (haxint_succ_double (r)) in if haxint_le (positive_to_int (Clone_f_clone (b))) (Clone_f_clone (r___)) then (positive_to_int (haxint_succ_double (q)),haxint_sub (r___) (positive_to_int (b))) else (haxint_double (q),r___) end. Definition haxint_divmod (a : t_HaxInt) (b : t_HaxInt) : (t_HaxInt*t_HaxInt) := match match_pos (a) with | POS_ZERO => (v_HaxInt_ZERO,v_HaxInt_ZERO) | POS_POS (p) => match match_pos (b) with | POS_ZERO => (v_HaxInt_ZERO,positive_to_int (p)) | POS_POS (q) => haxint_divmod__divmod_binary (p) (q) end end. Definition haxint_div (lhs : t_HaxInt) (rhs : t_HaxInt) : t_HaxInt := let (q,_) := haxint_divmod (lhs) (rhs) in q. Definition haxint_mul (lhs : t_HaxInt) (rhs : t_HaxInt) : t_HaxInt := match match_pos (lhs) with | POS_ZERO => v_HaxInt_ZERO | POS_POS (p) => match match_pos (rhs) with | POS_ZERO => v_HaxInt_ZERO | POS_POS (q) => positive_to_int (positive_mul (p) (q)) end end. Definition haxint_rem (lhs : t_HaxInt) (rhs : t_HaxInt) : t_HaxInt := let (_,r) := haxint_divmod (lhs) (rhs) in r. ================================================ FILE: hax-lib/proof-libs/coq/coq/generated-core/src/Core_Base_Seq.v ================================================ (* File automatically generated by Hacspec *) From Coq Require Import ZArith. Require Import List. Import List.ListNotations. Open Scope Z_scope. Open Scope bool_scope. Require Import Ascii. Require Import String. Require Import Coq.Floats.Floats. From RecordUpdate Require Import RecordSet. Import RecordSetNotations. (* From Core Require Import Core. *) From Core Require Import Core_Base_Spec. Export Core_Base_Spec. From Core Require Import Core_Base_Pos. Export Core_Base_Pos. From Core Require Import Core_Clone (t_Clone). Export Core_Clone (t_Clone). From Core Require Import Core_Cmp. Export Core_Cmp. From Core Require Import Core_Marker (t_Sized). Export Core_Marker (t_Sized). From Core Require Import Core_Panicking. Export Core_Panicking. Definition hd__panic_cold_explicit '(_ : unit) `{HFalse : t_Never} : t_Never := panic_explicit (tt) HFalse. Definition set_index__set_index_unary__panic_cold_explicit '(_ : unit) `{HFalse : t_Never} : t_Never := panic_explicit (tt) HFalse. Definition is_empty `{v_T : Type} `{t_Sized (v_T)} `{t_Clone (v_T)} (s : t_Seq ((v_T))) : bool := match match_list (s) with | LIST_NIL => true | LIST_CONS (_) (_) => false end. Definition hd `{v_T : Type} `{t_Sized (v_T)} `{t_Clone (v_T)} (s : t_Seq ((v_T))) `{Hpre : negb (is_empty (s)) = true} : v_T := match match_list (s) as s return negb (is_empty (s)) = true -> _ with | LIST_NIL => fun HFalse => never_to_any (hd__panic_cold_explicit (tt) (False_rect _ (Bool.diff_false_true HFalse))) | LIST_CONS (hd) (_) => fun _ => hd end Hpre. Definition tl `{v_T : Type} `{t_Sized (v_T)} `{t_Clone (v_T)} (s : t_Seq ((v_T))) `{Hpre : negb (is_empty (s)) = true} : t_Seq ((v_T)) := match match_list (s) with | LIST_NIL => nil (* (tt) *) | LIST_CONS (_) (tl) => tl end. Fixpoint eq_inner `{v_T : Type} `{t_Sized (v_T)} `{t_Clone (v_T)} `{t_PartialEq (v_T) (v_T)} (s : t_Seq ((v_T))) (other : t_Seq ((v_T))) : bool := match match_list (Clone_f_clone (s)) with | LIST_NIL => is_empty (Clone_f_clone (other)) | LIST_CONS (x) (xs) => match match_list (Clone_f_clone (other)) with | LIST_NIL => false | LIST_CONS (y) (ys) => andb (PartialEq_f_eq (x) (y)) (eq_inner (xs) (ys)) end end. Instance t_PartialEq_126322860 `{v_T : Type} `{t_Sized (v_T)} `{t_Clone (v_T)} `{t_PartialEq (v_T) (v_T)} : t_PartialEq ((t_Seq ((v_T)))) ((t_Seq ((v_T)))) := { PartialEq_f_eq := fun (self : t_Seq ((v_T))) (other : t_Seq ((v_T)))=> eq_inner (Clone_f_clone (self)) (Clone_f_clone (other)); PartialEq_f_ne := fun (self : t_Seq ((v_T))) (other : t_Seq ((v_T)))=> negb (eq_inner (Clone_f_clone (self)) (Clone_f_clone (other))); }. Fixpoint len__len_unary `{v_T : Type} `{t_Sized (v_T)} `{t_Clone (v_T)} (s : t_Seq ((v_T))) : t_Unary := match match_list (s) with | LIST_NIL => unary_from_int(v_HaxInt_ZERO) | LIST_CONS (_) (tl) => succ (len__len_unary (tl)) end. Definition len `{v_T : Type} `{t_Sized (v_T)} `{t_Clone (v_T)} (s : t_Seq ((v_T))) : t_HaxInt := unary_to_int(len__len_unary(s)). Lemma positive_cmp_is_spec : forall p q, match positive_cmp p q with | Ordering_Less => Lt | Ordering_Equal => Eq | Ordering_Greater => Gt end = (p ?= q)%positive. { clear. intros. unfold positive_cmp. unfold "?="%positive. set (Ordering_Equal). pose (match Eq with | Lt => Ordering_Less | Gt => Ordering_Greater | Eq => Ordering_Equal end). replace t with t0 by reflexivity. clear t. assert (forall c p q, c <> Eq -> Pos.compare_cont c p q <> Eq). { clear ; intros. generalize dependent c. generalize dependent q. induction p ; intros ; destruct q, c ; (easy || now apply IHp). } assert (forall c p q, c <> Ordering_Equal -> positive_cmp__cmp_binary_cont p q c <> Ordering_Equal). { clear ; intros. generalize dependent c. generalize dependent q. induction p ; intros ; destruct q, c ; (easy || now apply IHp). } subst t0. set Eq. generalize dependent c. generalize dependent q. induction p ; intros. - destruct q. + apply IHp. + simpl. rewrite <- IHp. destruct positive_cmp__cmp_binary_cont eqn:ov. * reflexivity. * exfalso. refine (H0 _ p q _ ov). easy. * reflexivity. + reflexivity. - destruct q. + simpl. rewrite <- IHp. destruct positive_cmp__cmp_binary_cont eqn:ov. * reflexivity. * exfalso. refine (H0 _ p q _ ov). easy. * reflexivity. + apply IHp. + reflexivity. - now destruct q, c. } Qed. Lemma haxint_lt_is_spec : forall x y, haxint_lt x y = N.ltb x y. { intros. destruct x as [ | p], y as [ | q]. - easy. - easy. - easy. - unfold haxint_lt. unfold haxint_cmp. simpl. unfold N.ltb. simpl. rewrite <- positive_cmp_is_spec. now destruct (positive_cmp). } Qed. Program Fixpoint get_index__get_index_unary `{v_T : Type} `{t_Sized (v_T)} `{t_Clone (v_T)} (l : t_Seq ((v_T))) (i : t_Unary) `{Hpre : haxint_lt(unary_to_int i) (len l) = true} : v_T := match match_unary (i) with | UNARY_ZERO => hd (Hpre := Hpre) (l) | UNARY_SUCC (n) => get_index__get_index_unary (tl (Hpre := _) (l)) (n) end. Next Obligation. unfold match_unary in Heq_anonymous. subst. now destruct l. Qed. Next Obligation. unfold match_unary in Heq_anonymous. subst. now destruct l. Qed. Next Obligation. unfold match_unary in Heq_anonymous. subst. destruct l. - easy. - simpl. rewrite haxint_lt_is_spec. epose Hpre. rewrite haxint_lt_is_spec in e. apply N.ltb_lt. apply N.ltb_lt in e. apply N.succ_lt_mono. unfold len ; rewrite <- !Nnat.Nat2N.inj_succ. apply e. Qed. Fail Next Obligation. Definition get_index `{v_T : Type} `{t_Sized (v_T)} `{t_Clone (v_T)} (s : t_Seq ((v_T))) (i : t_HaxInt) {Hpre : haxint_lt (i) (len s) = true} : v_T := get_index__get_index_unary (Hpre := ltac:(now rewrite Nnat.N2Nat.id)) (s) (unary_from_int (i)). Fixpoint repeat__repeat_unary `{v_T : Type} `{t_Sized (v_T)} `{t_Clone (v_T)} (n : t_Unary) (v : v_T) : t_Seq ((v_T)) := match match_unary (n) with | UNARY_ZERO => nil (* (tt) *) | UNARY_SUCC (m) => cons (repeat__repeat_unary (m) (Clone_f_clone (v))) v end. Definition repeat `{v_T : Type} `{t_Sized (v_T)} `{t_Clone (v_T)} (n : t_HaxInt) (v : v_T) : t_Seq ((v_T)) := repeat__repeat_unary (unary_from_int (n)) (v). Fixpoint rev__rev_accum `{v_T : Type} `{t_Sized (v_T)} `{t_Clone (v_T)} (s : t_Seq ((v_T))) (accum : t_Seq ((v_T))) : t_Seq ((v_T)) := match match_list (s) with | LIST_NIL => accum | LIST_CONS (hd) (tl) => rev__rev_accum (tl) (cons (accum) (hd)) end. Definition rev `{v_T : Type} `{t_Sized (v_T)} `{t_Clone (v_T)} (s : t_Seq ((v_T))) : t_Seq ((v_T)) := rev__rev_accum (s) (nil (* (tt) *)). Program Fixpoint set_index__set_index_unary `{v_T : Type} `{t_Sized (v_T)} `{t_Clone (v_T)} (x : t_Seq ((v_T))) (i : t_Unary) (v : v_T) `{Hpre : haxint_lt(unary_to_int i) (len x) = true} : t_Seq ((v_T)) := match match_list (x) with | LIST_NIL => never_to_any (set_index__set_index_unary__panic_cold_explicit (tt) _) | LIST_CONS (hd) (tl) => match match_unary (i) with | UNARY_ZERO => cons (tl) (v) | UNARY_SUCC (n) => cons (set_index__set_index_unary (tl) (n) (v)) (hd) end end. Next Obligation. unfold match_list in Heq_anonymous. subst. now destruct i. Qed. Next Obligation. unfold match_unary in Heq_anonymous. subst. unfold match_list in Heq_anonymous0. subst. rewrite haxint_lt_is_spec. rewrite haxint_lt_is_spec in Hpre. apply N.ltb_lt. apply N.ltb_lt in Hpre. apply N.succ_lt_mono. unfold len ; rewrite <- !Nnat.Nat2N.inj_succ. apply Hpre. Qed. Fail Next Obligation. Definition set_index `{v_T : Type} `{t_Sized (v_T)} `{t_Clone (v_T)} (s : t_Seq ((v_T))) (i : t_HaxInt) (v : v_T) `{haxint_lt (i) (len (s)) = true} : t_Seq ((v_T)) := set_index__set_index_unary (s) (Hpre := ltac:(now rewrite Nnat.N2Nat.id)) (unary_from_int (i)) (v). ================================================ FILE: hax-lib/proof-libs/coq/coq/generated-core/src/Core_Base_Z.v ================================================ (* File automatically generated by Hacspec *) From Coq Require Import ZArith. Require Import List. Import List.ListNotations. Open Scope Z_scope. Open Scope bool_scope. Require Import Ascii. Require Import String. Require Import Coq.Floats.Floats. From RecordUpdate Require Import RecordSet. Import RecordSetNotations. (* From Core Require Import Core. *) From Core Require Import Core_Base_Spec. Export Core_Base_Spec. From Core Require Import Core_Base_Binary. Export Core_Base_Binary. From Core Require Import Core_Cmp (t_Ordering). Export Core_Cmp (t_Ordering). From Core Require Import Core_Base_Pos. Export Core_Base_Pos. Definition z_neg (x : t_Z) : t_Z := match x with | Z_NEG (p) => Z_POS (p) | Z_ZERO => Z_ZERO | Z_POS (p) => Z_NEG (p) end. Definition z_bitor__n_succ (x : t_POS) : t_Positive := match x with | POS_ZERO => xH | POS_POS (p) => positive_from_int (Hpos := ltac:(easy)) (unary_to_int (succ (unary_from_int (positive_to_int (p))))) end. Definition z_add__z_double (s : t_Z) : t_Z := match s with | Z_ZERO => Z_ZERO | Z_POS (p) => Z_POS (xO (p)) | Z_NEG (p) => Z_NEG (xO (p)) end. Definition z_bitor__haxint_ldiff__n_double (x : t_POS) : t_POS := match x with | POS_ZERO => POS_ZERO | POS_POS (p) => POS_POS (xO (p)) end. Definition z_bitor__haxint_ldiff__n_succ_double (x : t_POS) : t_POS := match x with | POS_ZERO => POS_POS (xH) | POS_POS (p) => POS_POS (xI (p)) end. Definition z_add__z_pred_double (s : t_Z) : t_Z := match s with | Z_ZERO => Z_NEG (xH) | Z_POS (p) => Z_POS (positive_pred_double (p)) | Z_NEG (p) => Z_NEG (xI (p)) end. Definition z_add__z_succ_double (s : t_Z) : t_Z := match s with | Z_ZERO => Z_POS (xH) | Z_POS (p) => Z_POS (xI (p)) | Z_NEG (p) => Z_NEG (positive_pred_double (p)) end. Fixpoint z_bitor__haxint_ldiff__positive_ldiff (lhs : t_Positive) (rhs : t_Positive) : t_POS := match match_positive (lhs) with | POSITIVE_XH => match match_positive (rhs) with | POSITIVE_XH => POS_ZERO | POSITIVE_XO (_) => POS_POS (xH) | POSITIVE_XI (_) => POS_ZERO end | POSITIVE_XO (p) => match match_positive (rhs) with | POSITIVE_XH => POS_POS (xO (p)) | POSITIVE_XO (q) => z_bitor__haxint_ldiff__n_double (z_bitor__haxint_ldiff__positive_ldiff (p) (q)) | POSITIVE_XI (q) => z_bitor__haxint_ldiff__n_double (z_bitor__haxint_ldiff__positive_ldiff (p) (q)) end | POSITIVE_XI (p) => match match_positive (rhs) with | POSITIVE_XH => POS_POS (xO (p)) | POSITIVE_XO (q) => z_bitor__haxint_ldiff__n_succ_double (z_bitor__haxint_ldiff__positive_ldiff (p) (q)) | POSITIVE_XI (q) => z_bitor__haxint_ldiff__n_double (z_bitor__haxint_ldiff__positive_ldiff (p) (q)) end end. Definition z_bitor__haxint_ldiff (lhs : t_POS) (rhs : t_POS) : t_POS := match lhs with | POS_ZERO => POS_ZERO | POS_POS (p) => match rhs with | POS_ZERO => POS_POS (p) | POS_POS (q) => z_bitor__haxint_ldiff__positive_ldiff (p) (q) end end. Definition z_bitor__n_and (lhs : t_POS) (rhs : t_POS) : t_POS := match lhs with | POS_ZERO => POS_ZERO | POS_POS (p) => match rhs with | POS_ZERO => POS_ZERO | POS_POS (q) => match_pos (bitand_binary (p) (q)) end end. Definition z_bitor__positive_pred_N (x : t_Positive) : t_POS := match match_positive (x) with | POSITIVE_XH => POS_ZERO | POSITIVE_XI (p) => POS_POS (xO (p)) | POSITIVE_XO (p) => POS_POS (positive_pred_double (p)) end. Definition z_bitor (lhs : t_Z) (rhs : t_Z) : t_Z := match lhs with | Z_ZERO => rhs | Z_POS (x) => match rhs with | Z_ZERO => Z_POS (x) | Z_POS (y) => Z_POS (bitor_binary (x) (y)) | Z_NEG (y) => Z_NEG (z_bitor__n_succ (z_bitor__haxint_ldiff (z_bitor__positive_pred_N (y)) (POS_POS (x)))) end | Z_NEG (x) => match rhs with | Z_ZERO => Z_NEG (x) | Z_POS (y) => Z_NEG (z_bitor__n_succ (z_bitor__haxint_ldiff (z_bitor__positive_pred_N (x)) (POS_POS (y)))) | Z_NEG (y) => Z_NEG (z_bitor__n_succ (z_bitor__n_and (z_bitor__positive_pred_N (x)) (z_bitor__positive_pred_N (y)))) end end. Definition z_cmp (lhs : t_Z) (rhs : t_Z) : t_Ordering := match lhs with | Z_NEG (p) => match rhs with | Z_NEG (q) => match positive_cmp (p) (q) with | Ordering_Equal => Ordering_Equal | Ordering_Less => Ordering_Greater | Ordering_Greater => Ordering_Less end | _ => Ordering_Less end | Z_ZERO => match rhs with | Z_ZERO => Ordering_Equal | Z_POS (_) => Ordering_Less | Z_NEG (_) => Ordering_Greater end | Z_POS (p) => match rhs with | Z_POS (q) => positive_cmp (p) (q) | _ => Ordering_Greater end end. Definition z_le (lhs : t_Z) (rhs : t_Z) : bool := match Option_Some (z_cmp (lhs) (rhs)) with | Option_Some (Ordering_Less | Ordering_Equal) => true | _ => false end. Definition z_lt (lhs : t_Z) (rhs : t_Z) : bool := match Option_Some (z_cmp (lhs) (rhs)) with | Option_Some (Ordering_Less) => true | _ => false end. Fixpoint z_add__pos_z_sub (x : t_Positive) (y : t_Positive) : t_Z := match match_positive (x) with | POSITIVE_XH => match match_positive (y) with | POSITIVE_XH => Z_ZERO | POSITIVE_XO (q) => Z_NEG (positive_pred_double (q)) | POSITIVE_XI (q) => Z_NEG (xO (q)) end | POSITIVE_XO (p) => match match_positive (y) with | POSITIVE_XH => Z_POS (positive_pred_double (p)) | POSITIVE_XO (q) => z_add__z_double (z_add__pos_z_sub (p) (q)) | POSITIVE_XI (q) => z_add__z_pred_double (z_add__pos_z_sub (p) (q)) end | POSITIVE_XI (p) => match match_positive (y) with | POSITIVE_XH => Z_POS (xO (p)) | POSITIVE_XO (q) => z_add__z_succ_double (z_add__pos_z_sub (p) (q)) | POSITIVE_XI (q) => z_add__z_double (z_add__pos_z_sub (p) (q)) end end. Definition z_add (lhs : t_Z) (rhs : t_Z) : t_Z := match lhs with | Z_NEG (p) => match rhs with | Z_NEG (q) => Z_NEG (positive_add (p) (q)) | Z_ZERO => Z_NEG (p) | Z_POS (q) => z_add__pos_z_sub (q) (p) end | Z_ZERO => rhs | Z_POS (p) => match rhs with | Z_NEG (q) => z_add__pos_z_sub (p) (q) | Z_ZERO => Z_POS (p) | Z_POS (q) => Z_POS (positive_add (p) (q)) end end. Definition z_sub (lhs : t_Z) (rhs : t_Z) : t_Z := z_add (lhs) (z_neg (rhs)). Definition z_mul (lhs : t_Z) (rhs : t_Z) : t_Z := match lhs with | Z_NEG (p) => match rhs with | Z_NEG (q) => Z_POS (positive_mul (p) (q)) | Z_ZERO => Z_ZERO | Z_POS (q) => Z_NEG (positive_mul (p) (q)) end | Z_ZERO => Z_ZERO | Z_POS (p) => match rhs with | Z_NEG (q) => Z_NEG (positive_mul (p) (q)) | Z_ZERO => Z_ZERO | Z_POS (q) => Z_POS (positive_mul (p) (q)) end end. Fixpoint pos_div_eucl (a : t_Positive) (b : t_Z) : (t_Z*t_Z) := match match_positive (a) with | POSITIVE_XH => if z_le (v_Z_TWO) (Clone_f_clone (b)) then (Z_ZERO,v_Z_ONE) else (v_Z_ONE,Z_ZERO) | POSITIVE_XO (p) => let (q,r) := pos_div_eucl (p) (Clone_f_clone (b)) in let r___ := z_mul (v_Z_TWO) (r) in if z_lt (Clone_f_clone (r___)) (Clone_f_clone (b)) then (z_mul (v_Z_TWO) (q),r___) else (z_add (z_mul (v_Z_TWO) (q)) (v_Z_ONE),z_sub (r___) (b)) | POSITIVE_XI (p) => let (q,r) := pos_div_eucl (p) (Clone_f_clone (b)) in let r___ := z_add (z_mul (v_Z_TWO) (r)) (v_Z_ONE) in if z_lt (Clone_f_clone (r___)) (Clone_f_clone (b)) then (z_mul (v_Z_TWO) (q),r___) else (z_add (z_mul (v_Z_TWO) (q)) (v_Z_ONE),z_sub (r___) (b)) end. Definition z_divmod (a : t_Z) (b : t_Z) : (t_Z*t_Z) := match a with | Z_ZERO => (Z_ZERO,Z_ZERO) | Z_POS (a___) => match Clone_f_clone (b) with | Z_ZERO => (Z_ZERO,Z_POS (a___)) | Z_POS (b___) => pos_div_eucl (a___) (b) | Z_NEG (b___) => let (q,r) := pos_div_eucl (a___) (Z_POS (b___)) in (z_neg (q),r) end | Z_NEG (a___) => match Clone_f_clone (b) with | Z_ZERO => (Z_ZERO,Z_NEG (a___)) | Z_POS (_) => let (q,r) := pos_div_eucl (a___) (Clone_f_clone (b)) in (z_neg (q),z_neg (r)) | Z_NEG (b___) => let (q,r) := pos_div_eucl (a___) (Z_POS (b___)) in (q,z_neg (r)) end end. Definition z_div (lhs : t_Z) (rhs : t_Z) : t_Z := let (q,_) := z_divmod (lhs) (rhs) in q. Definition z_rem (lhs : t_Z) (rhs : t_Z) : t_Z := let (_,r) := z_divmod (lhs) (rhs) in r. ================================================ FILE: hax-lib/proof-libs/coq/coq/generated-core/src/Core_Base_interface.v ================================================ (* File automatically generated by Hacspec *) From Coq Require Import ZArith. Require Import List. Import List.ListNotations. Open Scope Z_scope. Open Scope bool_scope. Require Import Ascii. Require Import String. Require Import Coq.Floats.Floats. From RecordUpdate Require Import RecordSet. Import RecordSetNotations. (* From Core Require Import Core. *) From Core Require Import Core_Base_interface_Int. Export Core_Base_interface_Int. From Core Require Import Core_Base_interface_Coerce. Export Core_Base_interface_Coerce. (* NotImplementedYet *) (* NotImplementedYet *) ================================================ FILE: hax-lib/proof-libs/coq/coq/generated-core/src/Core_Base_interface_Coerce.v ================================================ (* File automatically generated by Hacspec *) From Coq Require Import ZArith. Require Import List. Import List.ListNotations. Open Scope Z_scope. Open Scope bool_scope. Require Import Ascii. Require Import String. Require Import Coq.Floats.Floats. From RecordUpdate Require Import RecordSet. Import RecordSetNotations. (* From Core Require Import Core. *) From Core Require Import Core_Marker. Export Core_Marker. Class t_Concretization (v_Self : Type) (v_T : Type) `{t_Sized (v_T)} : Type := { Concretization_f_concretize : v_Self -> v_T; }. Arguments t_Concretization (_) (_) {_}. Class t_Abstraction (v_Self : Type) : Type := { Abstraction_f_AbstractType : Type; _ :: `{t_Sized (Abstraction_f_AbstractType)}; Abstraction_f_lift : v_Self -> Abstraction_f_AbstractType; }. Arguments t_Abstraction (_). ================================================ FILE: hax-lib/proof-libs/coq/coq/generated-core/src/Core_Base_interface_Int.v ================================================ (* File automatically generated by Hacspec *) From Coq Require Import ZArith. Require Import List. Import List.ListNotations. Open Scope Z_scope. Open Scope bool_scope. Require Import Ascii. Require Import String. Require Import Coq.Floats.Floats. From RecordUpdate Require Import RecordSet. Import RecordSetNotations. (* From Core Require Import Core. *) From Core Require Import Core_Cmp. Export Core_Cmp. From Core Require Import Core_Ops. Export Core_Ops. From Core Require Import Core_Base. Export Core_Base. From Core Require Import Core_Base_interface_Coerce. Export Core_Base_interface_Coerce. From Core Require Import Core_Option. Export Core_Option. From Core Require Import Core_Clone (t_Clone). Export Core_Clone (t_Clone). From Core Require Import Core_Convert (t_From). Export Core_Convert (t_From). Class t_Constants (v_Self : Type) : Type := { Constants_f_ZERO : v_Self; Constants_f_ONE : v_Self; Constants_f_MIN : v_Self; Constants_f_MAX : v_Self; }. Arguments t_Constants (_). Record t_I128 : Type := { I128_f_v : t_Z; }. Arguments Build_t_I128. Arguments I128_f_v. #[export] Instance settable_t_I128 : Settable _ := settable! (Build_t_I128) . (* NotImplementedYet *) Record t_I16 : Type := { I16_f_v : t_Z; }. Arguments Build_t_I16. Arguments I16_f_v. #[export] Instance settable_t_I16 : Settable _ := settable! (Build_t_I16) . (* NotImplementedYet *) Record t_I32 : Type := { I32_f_v : t_Z; }. Arguments Build_t_I32. Arguments I32_f_v. #[export] Instance settable_t_I32 : Settable _ := settable! (Build_t_I32) . (* NotImplementedYet *) Record t_I64 : Type := { I64_f_v : t_Z; }. Arguments Build_t_I64. Arguments I64_f_v. #[export] Instance settable_t_I64 : Settable _ := settable! (Build_t_I64) . (* NotImplementedYet *) Record t_I8 : Type := { I8_f_v : t_Z; }. Arguments Build_t_I8. Arguments I8_f_v. #[export] Instance settable_t_I8 : Settable _ := settable! (Build_t_I8) . (* NotImplementedYet *) Record t_U128 : Type := { U128_f_v : t_HaxInt; }. Arguments Build_t_U128. Arguments U128_f_v. #[export] Instance settable_t_U128 : Settable _ := settable! (Build_t_U128) . (* NotImplementedYet *) Record t_U16 : Type := { U16_f_v : t_HaxInt; }. Arguments Build_t_U16. Arguments U16_f_v. #[export] Instance settable_t_U16 : Settable _ := settable! (Build_t_U16) . (* NotImplementedYet *) Record t_U32 : Type := { U32_f_v : t_HaxInt; }. Arguments Build_t_U32. Arguments U32_f_v. #[export] Instance settable_t_U32 : Settable _ := settable! (Build_t_U32) . (* NotImplementedYet *) Record t_U64 : Type := { U64_f_v : t_HaxInt; }. Arguments Build_t_U64. Arguments U64_f_v. #[export] Instance settable_t_U64 : Settable _ := settable! (Build_t_U64) . (* NotImplementedYet *) Record t_U8 : Type := { U8_f_v : t_HaxInt; }. Arguments Build_t_U8. Arguments U8_f_v. #[export] Instance settable_t_U8 : Settable _ := settable! (Build_t_U8) . (* NotImplementedYet *) (* NotImplementedYet *) (* NotImplementedYet *) (* NotImplementedYet *) (* NotImplementedYet *) (* NotImplementedYet *) (* NotImplementedYet *) (* NotImplementedYet *) (* NotImplementedYet *) (* NotImplementedYet *) (* NotImplementedYet *) (* NotImplementedYet *) (* NotImplementedYet *) (* NotImplementedYet *) (* NotImplementedYet *) (* NotImplementedYet *) (* NotImplementedYet *) (* NotImplementedYet *) (* NotImplementedYet *) #[global] Instance t_Concretization_407178874 : t_Concretization ((t_Z)) ((t_I128)) := { Concretization_f_concretize := fun (self : t_Z)=> Build_t_I128 (self); }. #[global] Instance t_Clone_960918039 : t_Clone ((t_I128)) := { Clone_f_clone := fun (self : t_I128)=> Build_t_I128 (Clone_f_clone (I128_f_v self)); }. #[global] Instance t_Concretization_1068646878 : t_Concretization ((t_Z)) ((t_I64)) := { Concretization_f_concretize := fun (self : t_Z)=> Build_t_I64 (self); }. #[global] Instance t_Clone_305340151 : t_Clone ((t_I64)) := { Clone_f_clone := fun (self : t_I64)=> Build_t_I64 (Clone_f_clone (I64_f_v self)); }. #[global] Instance t_Concretization_499270091 : t_Concretization ((t_Z)) ((t_I32)) := { Concretization_f_concretize := fun (self : t_Z)=> Build_t_I32 (self); }. #[global] Instance t_Clone_774571516 : t_Clone ((t_I32)) := { Clone_f_clone := fun (self : t_I32)=> Build_t_I32 (Clone_f_clone (I32_f_v self)); }. #[global] Instance t_Concretization_432063162 : t_Concretization ((t_Z)) ((t_I16)) := { Concretization_f_concretize := fun (self : t_Z)=> Build_t_I16 (self); }. #[global] Instance t_Clone_611206751 : t_Clone ((t_I16)) := { Clone_f_clone := fun (self : t_I16)=> Build_t_I16 (Clone_f_clone (I16_f_v self)); }. #[global] Instance t_Concretization_232722110 : t_Concretization ((t_Z)) ((t_I8)) := { Concretization_f_concretize := fun (self : t_Z)=> Build_t_I8 (self); }. #[global] Instance t_Clone_122768833 : t_Clone ((t_I8)) := { Clone_f_clone := fun (self : t_I8)=> Build_t_I8 (Clone_f_clone (I8_f_v self)); }. #[global] Instance t_Constants_572255769 : t_Constants ((t_I128)) := { Constants_f_ZERO := Build_t_I128 (Z_ZERO); Constants_f_ONE := Build_t_I128 (Z_POS (xH)); Constants_f_MIN := Build_t_I128 (Z_NEG (positive_from_int (Hpos := ltac:(easy)) (v_WORDSIZE_64_))); Constants_f_MAX := Build_t_I128 (Z_POS (positive_from_int (Hpos := ltac:(easy)) (v_WORDSIZE_64_SUB_1_))); }. Definition impl_41__BITS : t_U32 := Build_t_U32 (v_BITS_128_). Definition impl_41__WORDSIZE : t_HaxInt := v_WORDSIZE_128_. #[global] Instance t_Constants_908090553 : t_Constants ((t_I64)) := { Constants_f_ZERO := Build_t_I64 (Z_ZERO); Constants_f_ONE := Build_t_I64 (Z_POS (xH)); Constants_f_MIN := Build_t_I64 (Z_NEG (positive_from_int (Hpos := ltac:(easy)) (v_WORDSIZE_32_))); Constants_f_MAX := Build_t_I64 (Z_POS (positive_from_int (Hpos := ltac:(easy)) (v_WORDSIZE_32_SUB_1_))); }. Definition impl_55__BITS : t_U32 := Build_t_U32 (v_BITS_64_). Definition impl_55__WORDSIZE : t_HaxInt := v_WORDSIZE_64_. #[global] Instance t_Constants_99970330 : t_Constants ((t_I32)) := { Constants_f_ZERO := Build_t_I32 (Z_ZERO); Constants_f_ONE := Build_t_I32 (Z_POS (xH)); Constants_f_MIN := Build_t_I32 (Z_NEG (positive_from_int (Hpos := ltac:(easy)) (v_WORDSIZE_16_))); Constants_f_MAX := Build_t_I32 (Z_POS (positive_from_int (Hpos := ltac:(easy)) (v_WORDSIZE_16_SUB_1_))); }. Definition impl_69__BITS : t_U32 := Build_t_U32 (v_BITS_32_). Definition impl_69__WORDSIZE : t_HaxInt := v_WORDSIZE_32_. #[global] Instance t_Constants_687261461 : t_Constants ((t_I16)) := { Constants_f_ZERO := Build_t_I16 (Z_ZERO); Constants_f_ONE := Build_t_I16 (Z_POS (xH)); Constants_f_MIN := Build_t_I16 (Z_NEG (positive_from_int (Hpos := ltac:(easy)) (v_WORDSIZE_8_))); Constants_f_MAX := Build_t_I16 (Z_POS (positive_from_int (Hpos := ltac:(easy)) (v_WORDSIZE_8_SUB_1_))); }. Definition impl_83__BITS : t_U32 := Build_t_U32 (v_BITS_16_). Definition impl_83__WORDSIZE : t_HaxInt := v_WORDSIZE_16_. #[global] Instance t_Constants_636847136 : t_Constants ((t_I8)) := { Constants_f_ZERO := Build_t_I8 (Z_ZERO); Constants_f_ONE := Build_t_I8 (Z_POS (xH)); Constants_f_MIN := Build_t_I8 (Z_NEG (positive_from_int (Hpos := ltac:(easy)) (v_WORDSIZE_4_))); Constants_f_MAX := Build_t_I8 (Z_POS (positive_from_int (Hpos := ltac:(easy)) (v_WORDSIZE_4_SUB_1_))); }. Definition impl_97__BITS : t_U32 := Build_t_U32 (v_BITS_8_). Definition impl_97__WORDSIZE : t_HaxInt := v_WORDSIZE_8_. #[global] Instance t_Constants_119702187 : t_Constants ((t_U128)) := { Constants_f_ZERO := Build_t_U128 (v_HaxInt_ZERO); Constants_f_ONE := Build_t_U128 (v_HaxInt_ONE); Constants_f_MIN := Build_t_U128 (v_HaxInt_ZERO); Constants_f_MAX := Build_t_U128 (v_WORDSIZE_128_SUB_1_); }. Definition impl_111__BITS : t_U32 := Build_t_U32 (v_BITS_128_). Definition impl_111__WORDSIZE : t_HaxInt := v_WORDSIZE_128_. #[global] Instance t_Constants_579677195 : t_Constants ((t_U64)) := { Constants_f_ZERO := Build_t_U64 (v_HaxInt_ZERO); Constants_f_ONE := Build_t_U64 (v_HaxInt_ONE); Constants_f_MIN := Build_t_U64 (v_HaxInt_ZERO); Constants_f_MAX := Build_t_U64 (v_WORDSIZE_64_SUB_1_); }. Definition impl_138__BITS : t_U32 := Build_t_U32 (v_BITS_64_). Definition impl_138__WORDSIZE : t_HaxInt := v_WORDSIZE_64_. #[global] Instance t_Constants_63564700 : t_Constants ((t_U32)) := { Constants_f_ZERO := Build_t_U32 (v_HaxInt_ZERO); Constants_f_ONE := Build_t_U32 (v_HaxInt_ONE); Constants_f_MIN := Build_t_U32 (v_HaxInt_ZERO); Constants_f_MAX := Build_t_U32 (v_WORDSIZE_32_SUB_1_); }. Definition impl_165__BITS : t_U32 := Build_t_U32 (v_BITS_32_). Definition impl_165__WORDSIZE : t_HaxInt := v_WORDSIZE_32_. #[global] Instance t_Constants_221027212 : t_Constants ((t_U16)) := { Constants_f_ZERO := Build_t_U16 (v_HaxInt_ZERO); Constants_f_ONE := Build_t_U16 (v_HaxInt_ONE); Constants_f_MIN := Build_t_U16 (v_HaxInt_ZERO); Constants_f_MAX := Build_t_U16 (v_WORDSIZE_16_SUB_1_); }. Definition impl_192__BITS : t_U32 := Build_t_U32 (v_BITS_16_). Definition impl_192__WORDSIZE : t_HaxInt := v_WORDSIZE_16_. #[global] Instance t_Constants_932070468 : t_Constants ((t_U8)) := { Constants_f_ZERO := Build_t_U8 (v_HaxInt_ZERO); Constants_f_ONE := Build_t_U8 (v_HaxInt_ONE); Constants_f_MIN := Build_t_U8 (v_HaxInt_ZERO); Constants_f_MAX := Build_t_U8 (v_WORDSIZE_8_SUB_1_); }. Definition impl_219__BITS : t_U32 := Build_t_U32 (v_BITS_8_). Definition impl_219__WORDSIZE : t_HaxInt := v_WORDSIZE_8_. #[global] Instance t_Clone_138729312 : t_Clone ((t_U128)) := { Clone_f_clone := fun (self : t_U128)=> Build_t_U128 (Clone_f_clone (U128_f_v self)); }. #[global] Instance t_Clone_461763462 : t_Clone ((t_U64)) := { Clone_f_clone := fun (self : t_U64)=> Build_t_U64 (Clone_f_clone (U64_f_v self)); }. #[global] Instance t_Clone_412151272 : t_Clone ((t_U32)) := { Clone_f_clone := fun (self : t_U32)=> Build_t_U32 (Clone_f_clone (U32_f_v self)); }. #[global] Instance t_Clone_387504240 : t_Clone ((t_U16)) := { Clone_f_clone := fun (self : t_U16)=> Build_t_U16 (Clone_f_clone (U16_f_v self)); }. #[global] Instance t_Clone_917943387 : t_Clone ((t_U8)) := { Clone_f_clone := fun (self : t_U8)=> Build_t_U8 (Clone_f_clone (U8_f_v self)); }. #[global] Instance t_Abstraction_970113908 : t_Abstraction ((t_I128)) := { Abstraction_f_AbstractType := t_Z; Abstraction_f_lift := fun (self : t_I128)=> I128_f_v self; }. #[global] Instance t_From_330503528 : t_From ((t_I8)) ((t_I128)) := { From_f_from := fun (x : t_I128)=> Concretization_f_concretize (Abstraction_f_lift (x) : t_Z); }. #[global] Instance t_From_185067369 : t_From ((t_I16)) ((t_I128)) := { From_f_from := fun (x : t_I128)=> Concretization_f_concretize (t_Concretization := _ : t_Concretization t_Z t_I16) (Abstraction_f_lift (x) : t_Z); }. #[global] Instance t_From_106548803 : t_From ((t_I32)) ((t_I128)) := { From_f_from := fun (x : t_I128)=> Concretization_f_concretize (t_Concretization := _ : t_Concretization t_Z t_I32) (Abstraction_f_lift (x) : t_Z); }. #[global] Instance t_From_237552649 : t_From ((t_I64)) ((t_I128)) := { From_f_from := fun (x : t_I128)=> Concretization_f_concretize (t_Concretization := _ : t_Concretization t_Z t_I64) (Abstraction_f_lift (x) : t_Z); }. #[global] Instance t_PartialEq_488790252 : t_PartialEq ((t_I128)) ((t_I128)) := { PartialEq_f_eq := fun (self : t_I128) (rhs : t_I128)=> PartialEq_f_eq (z_cmp (Abstraction_f_lift (Clone_f_clone (self))) (Abstraction_f_lift (Clone_f_clone (rhs)))) (Ordering_Equal); PartialEq_f_ne := fun (self : t_I128) (rhs : t_I128)=> PartialEq_f_ne (z_cmp (Abstraction_f_lift (Clone_f_clone (self))) (Abstraction_f_lift (Clone_f_clone (rhs)))) (Ordering_Equal); }. #[global] Instance t_PartialOrd_387128921 : t_PartialOrd ((t_I128)) ((t_I128)) := { PartialOrd_f_partial_cmp := fun (self : t_I128) (rhs : t_I128)=> Option_Some (z_cmp (Abstraction_f_lift (Clone_f_clone (self))) (Abstraction_f_lift (Clone_f_clone (rhs)))); PartialOrd_f_lt := fun (self : t_I128) (rhs : t_I128)=> match z_cmp (Abstraction_f_lift (Clone_f_clone (self))) (Abstraction_f_lift (Clone_f_clone (rhs))) with | Ordering_Less => true | _ => false end; PartialOrd_f_le := fun (self : t_I128) (rhs : t_I128)=> match z_cmp (Abstraction_f_lift (Clone_f_clone (self))) (Abstraction_f_lift (Clone_f_clone (rhs))) with | Ordering_Less | Ordering_Equal => true | _ => false end; PartialOrd_f_gt := fun (self : t_I128) (rhs : t_I128)=> match z_cmp (Abstraction_f_lift (Clone_f_clone (self))) (Abstraction_f_lift (Clone_f_clone (rhs))) with | Ordering_Greater => true | _ => false end; PartialOrd_f_ge := fun (self : t_I128) (rhs : t_I128)=> match z_cmp (Abstraction_f_lift (Clone_f_clone (self))) (Abstraction_f_lift (Clone_f_clone (rhs))) with | Ordering_Greater | Ordering_Equal => true | _ => false end; }. #[global] Instance t_Abstraction_692501606 : t_Abstraction ((t_I64)) := { Abstraction_f_AbstractType := t_Z; Abstraction_f_lift := fun (self : t_I64)=> I64_f_v self; }. #[global] Instance t_From_318313768 : t_From ((t_I8)) ((t_I64)) := { From_f_from := fun (x : t_I64)=> Concretization_f_concretize (t_Concretization := _ : t_Concretization t_Z t_I8) (Abstraction_f_lift (x)); }. #[global] Instance t_From_215423074 : t_From ((t_I16)) ((t_I64)) := { From_f_from := fun (x : t_I64)=> Concretization_f_concretize (t_Concretization := _ : t_Concretization t_Z t_I16) (Abstraction_f_lift (x)); }. #[global] Instance t_From_221659723 : t_From ((t_I32)) ((t_I64)) := { From_f_from := fun (x : t_I64)=> Concretization_f_concretize (t_Concretization := _ : t_Concretization t_Z t_I32) (Abstraction_f_lift (x)); }. #[global] Instance t_From_927453474 : t_From ((t_I128)) ((t_I64)) := { From_f_from := fun (x : t_I64)=> Concretization_f_concretize (t_Concretization := _ : t_Concretization t_Z t_I128) (Abstraction_f_lift (x)); }. #[global] Instance t_PartialEq_474861724 : t_PartialEq ((t_I64)) ((t_I64)) := { PartialEq_f_eq := fun (self : t_I64) (rhs : t_I64)=> PartialEq_f_eq (t_PartialEq := _ : t_PartialEq t_Ordering t_Ordering) (z_cmp (Abstraction_f_lift (Clone_f_clone (self))) (Abstraction_f_lift (Clone_f_clone (rhs)))) (Ordering_Equal); PartialEq_f_ne := fun (self : t_I64) (rhs : t_I64)=> PartialEq_f_ne (t_PartialEq := _ : t_PartialEq t_Ordering t_Ordering) (z_cmp (Abstraction_f_lift (Clone_f_clone (self))) (Abstraction_f_lift (Clone_f_clone (rhs)))) (Ordering_Equal); }. #[global] Instance t_PartialOrd_552634265 : t_PartialOrd ((t_I64)) ((t_I64)) := { PartialOrd_f_partial_cmp := fun (self : t_I64) (rhs : t_I64)=> Option_Some (z_cmp (Abstraction_f_lift (Clone_f_clone (self))) (Abstraction_f_lift (Clone_f_clone (rhs)))); PartialOrd_f_lt := fun (self : t_I64) (rhs : t_I64)=> match z_cmp (Abstraction_f_lift (Clone_f_clone (self))) (Abstraction_f_lift (Clone_f_clone (rhs))) with | Ordering_Less => true | _ => false end; PartialOrd_f_le := fun (self : t_I64) (rhs : t_I64)=> match z_cmp (Abstraction_f_lift (Clone_f_clone (self))) (Abstraction_f_lift (Clone_f_clone (rhs))) with | Ordering_Less | Ordering_Equal => true | _ => false end; PartialOrd_f_gt := fun (self : t_I64) (rhs : t_I64)=> match z_cmp (Abstraction_f_lift (Clone_f_clone (self))) (Abstraction_f_lift (Clone_f_clone (rhs))) with | Ordering_Greater => true | _ => false end; PartialOrd_f_ge := fun (self : t_I64) (rhs : t_I64)=> match z_cmp (Abstraction_f_lift (Clone_f_clone (self))) (Abstraction_f_lift (Clone_f_clone (rhs))) with | Ordering_Greater | Ordering_Equal => true | _ => false end; }. #[global] Instance t_Abstraction_493183574 : t_Abstraction ((t_I32)) := { Abstraction_f_AbstractType := t_Z; Abstraction_f_lift := fun (self : t_I32)=> I32_f_v self; }. #[global] Instance t_From_573287156 : t_From ((t_I8)) ((t_I32)) := { From_f_from := fun (x : t_I32)=> Concretization_f_concretize (t_Concretization := _ : t_Concretization t_Z t_I8) (Abstraction_f_lift (x)); }. #[global] Instance t_From_278670998 : t_From ((t_I16)) ((t_I32)) := { From_f_from := fun (x : t_I32)=> Concretization_f_concretize (t_Concretization := _ : t_Concretization t_Z t_I16) (Abstraction_f_lift (x)); }. #[global] Instance t_From_697572388 : t_From ((t_I64)) ((t_I32)) := { From_f_from := fun (x : t_I32)=> Concretization_f_concretize (t_Concretization := _ : t_Concretization t_Z t_I64) (Abstraction_f_lift (x)); }. #[global] Instance t_From_30146175 : t_From ((t_I128)) ((t_I32)) := { From_f_from := fun (x : t_I32)=> Concretization_f_concretize (t_Concretization := _ : t_Concretization t_Z t_I128) (Abstraction_f_lift (x)); }. #[global] Instance t_PartialEq_795859780 : t_PartialEq ((t_I32)) ((t_I32)) := { PartialEq_f_eq := fun (self : t_I32) (rhs : t_I32)=> PartialEq_f_eq (t_PartialEq := _ : t_PartialEq t_Ordering t_Ordering) (z_cmp (Abstraction_f_lift (Clone_f_clone (self))) (Abstraction_f_lift (Clone_f_clone (rhs)))) (Ordering_Equal); PartialEq_f_ne := fun (self : t_I32) (rhs : t_I32)=> PartialEq_f_ne (t_PartialEq := _ : t_PartialEq t_Ordering t_Ordering) (z_cmp (Abstraction_f_lift (Clone_f_clone (self))) (Abstraction_f_lift (Clone_f_clone (rhs)))) (Ordering_Equal); }. #[global] Instance t_PartialOrd_126468614 : t_PartialOrd ((t_I32)) ((t_I32)) := { PartialOrd_f_partial_cmp := fun (self : t_I32) (rhs : t_I32)=> Option_Some (z_cmp (Abstraction_f_lift (Clone_f_clone (self))) (Abstraction_f_lift (Clone_f_clone (rhs)))); PartialOrd_f_lt := fun (self : t_I32) (rhs : t_I32)=> match z_cmp (Abstraction_f_lift (Clone_f_clone (self))) (Abstraction_f_lift (Clone_f_clone (rhs))) with | Ordering_Less => true | _ => false end; PartialOrd_f_le := fun (self : t_I32) (rhs : t_I32)=> match z_cmp (Abstraction_f_lift (Clone_f_clone (self))) (Abstraction_f_lift (Clone_f_clone (rhs))) with | Ordering_Less | Ordering_Equal => true | _ => false end; PartialOrd_f_gt := fun (self : t_I32) (rhs : t_I32)=> match z_cmp (Abstraction_f_lift (Clone_f_clone (self))) (Abstraction_f_lift (Clone_f_clone (rhs))) with | Ordering_Greater => true | _ => false end; PartialOrd_f_ge := fun (self : t_I32) (rhs : t_I32)=> match z_cmp (Abstraction_f_lift (Clone_f_clone (self))) (Abstraction_f_lift (Clone_f_clone (rhs))) with | Ordering_Greater | Ordering_Equal => true | _ => false end; }. #[global] Instance t_Abstraction_8671741 : t_Abstraction ((t_I16)) := { Abstraction_f_AbstractType := t_Z; Abstraction_f_lift := fun (self : t_I16)=> I16_f_v self; }. #[global] Instance t_From_767089390 : t_From ((t_I8)) ((t_I16)) := { From_f_from := fun (x : t_I16)=> Concretization_f_concretize (t_Concretization := _ : t_Concretization t_Z t_I8) (Abstraction_f_lift (x)); }. #[global] Instance t_From_339600325 : t_From ((t_I32)) ((t_I16)) := { From_f_from := fun (x : t_I16)=> Concretization_f_concretize (t_Concretization := _ : t_Concretization t_Z t_I32) (Abstraction_f_lift (x)); }. #[global] Instance t_From_929749154 : t_From ((t_I64)) ((t_I16)) := { From_f_from := fun (x : t_I16)=> Concretization_f_concretize (t_Concretization := _ : t_Concretization t_Z t_I64) (Abstraction_f_lift (x)); }. #[global] Instance t_From_366897745 : t_From ((t_I128)) ((t_I16)) := { From_f_from := fun (x : t_I16)=> Concretization_f_concretize (t_Concretization := _ : t_Concretization t_Z t_I128) (Abstraction_f_lift (x)); }. #[global] Instance t_PartialEq_359538097 : t_PartialEq ((t_I16)) ((t_I16)) := { PartialEq_f_eq := fun (self : t_I16) (rhs : t_I16)=> PartialEq_f_eq (t_PartialEq := _ : t_PartialEq t_Ordering t_Ordering) (z_cmp (Abstraction_f_lift (Clone_f_clone (self))) (Abstraction_f_lift (Clone_f_clone (rhs)))) (Ordering_Equal); PartialEq_f_ne := fun (self : t_I16) (rhs : t_I16)=> PartialEq_f_ne (t_PartialEq := _ : t_PartialEq t_Ordering t_Ordering) (z_cmp (Abstraction_f_lift (Clone_f_clone (self))) (Abstraction_f_lift (Clone_f_clone (rhs)))) (Ordering_Equal); }. #[global] Instance t_PartialOrd_524872806 : t_PartialOrd ((t_I16)) ((t_I16)) := { PartialOrd_f_partial_cmp := fun (self : t_I16) (rhs : t_I16)=> Option_Some (z_cmp (Abstraction_f_lift (Clone_f_clone (self))) (Abstraction_f_lift (Clone_f_clone (rhs)))); PartialOrd_f_lt := fun (self : t_I16) (rhs : t_I16)=> match z_cmp (Abstraction_f_lift (Clone_f_clone (self))) (Abstraction_f_lift (Clone_f_clone (rhs))) with | Ordering_Less => true | _ => false end; PartialOrd_f_le := fun (self : t_I16) (rhs : t_I16)=> match z_cmp (Abstraction_f_lift (Clone_f_clone (self))) (Abstraction_f_lift (Clone_f_clone (rhs))) with | Ordering_Less | Ordering_Equal => true | _ => false end; PartialOrd_f_gt := fun (self : t_I16) (rhs : t_I16)=> match z_cmp (Abstraction_f_lift (Clone_f_clone (self))) (Abstraction_f_lift (Clone_f_clone (rhs))) with | Ordering_Greater => true | _ => false end; PartialOrd_f_ge := fun (self : t_I16) (rhs : t_I16)=> match z_cmp (Abstraction_f_lift (Clone_f_clone (self))) (Abstraction_f_lift (Clone_f_clone (rhs))) with | Ordering_Greater | Ordering_Equal => true | _ => false end; }. #[global] Instance t_Abstraction_78490685 : t_Abstraction ((t_I8)) := { Abstraction_f_AbstractType := t_Z; Abstraction_f_lift := fun (self : t_I8)=> I8_f_v self; }. #[global] Instance t_From_995744130 : t_From ((t_I16)) ((t_I8)) := { From_f_from := fun (x : t_I8)=> Concretization_f_concretize (t_Concretization := _ : t_Concretization t_Z t_I16) (Abstraction_f_lift (x)); }. #[global] Instance t_From_513826093 : t_From ((t_I32)) ((t_I8)) := { From_f_from := fun (x : t_I8)=> Concretization_f_concretize (t_Concretization := _ : t_Concretization t_Z t_I32) (Abstraction_f_lift (x)); }. #[global] Instance t_From_843443999 : t_From ((t_I64)) ((t_I8)) := { From_f_from := fun (x : t_I8)=> Concretization_f_concretize (t_Concretization := _ : t_Concretization t_Z t_I64) (Abstraction_f_lift (x)); }. #[global] Instance t_From_532428771 : t_From ((t_I128)) ((t_I8)) := { From_f_from := fun (x : t_I8)=> Concretization_f_concretize(t_Concretization := _ : t_Concretization t_Z t_I128) (Abstraction_f_lift (x)); }. #[global] Instance t_PartialEq_594648758 : t_PartialEq ((t_I8)) ((t_I8)) := { PartialEq_f_eq := fun (self : t_I8) (rhs : t_I8)=> PartialEq_f_eq (t_PartialEq := _ : t_PartialEq t_Ordering t_Ordering) (z_cmp (Abstraction_f_lift (Clone_f_clone (self))) (Abstraction_f_lift (Clone_f_clone (rhs)))) (Ordering_Equal); PartialEq_f_ne := fun (self : t_I8) (rhs : t_I8)=> PartialEq_f_ne (t_PartialEq := _ : t_PartialEq t_Ordering t_Ordering) (z_cmp (Abstraction_f_lift (Clone_f_clone (self))) (Abstraction_f_lift (Clone_f_clone (rhs)))) (Ordering_Equal); }. #[global] Instance t_PartialOrd_221919414 : t_PartialOrd ((t_I8)) ((t_I8)) := { PartialOrd_f_partial_cmp := fun (self : t_I8) (rhs : t_I8)=> Option_Some (z_cmp (Abstraction_f_lift (Clone_f_clone (self))) (Abstraction_f_lift (Clone_f_clone (rhs)))); PartialOrd_f_lt := fun (self : t_I8) (rhs : t_I8)=> match z_cmp (Abstraction_f_lift (Clone_f_clone (self))) (Abstraction_f_lift (Clone_f_clone (rhs))) with | Ordering_Less => true | _ => false end; PartialOrd_f_le := fun (self : t_I8) (rhs : t_I8)=> match z_cmp (Abstraction_f_lift (Clone_f_clone (self))) (Abstraction_f_lift (Clone_f_clone (rhs))) with | Ordering_Less | Ordering_Equal => true | _ => false end; PartialOrd_f_gt := fun (self : t_I8) (rhs : t_I8)=> match z_cmp (Abstraction_f_lift (Clone_f_clone (self))) (Abstraction_f_lift (Clone_f_clone (rhs))) with | Ordering_Greater => true | _ => false end; PartialOrd_f_ge := fun (self : t_I8) (rhs : t_I8)=> match z_cmp (Abstraction_f_lift (Clone_f_clone (self))) (Abstraction_f_lift (Clone_f_clone (rhs))) with | Ordering_Greater | Ordering_Equal => true | _ => false end; }. #[global] Instance t_Abstraction_133243863 : t_Abstraction ((t_U128)) := { Abstraction_f_AbstractType := t_HaxInt; Abstraction_f_lift := fun (self : t_U128)=> U128_f_v self; }. #[global] Instance t_PartialEq_792968920 : t_PartialEq ((t_U128)) ((t_U128)) := { PartialEq_f_eq := fun (self : t_U128) (rhs : t_U128)=> PartialEq_f_eq (t_PartialEq := _ : t_PartialEq t_Ordering t_Ordering) (haxint_cmp (Abstraction_f_lift (Clone_f_clone (self))) (Abstraction_f_lift (Clone_f_clone (rhs)))) (Ordering_Equal); PartialEq_f_ne := fun (self : t_U128) (rhs : t_U128)=> PartialEq_f_ne (t_PartialEq := _ : t_PartialEq t_Ordering t_Ordering) (haxint_cmp (Abstraction_f_lift (Clone_f_clone (self))) (Abstraction_f_lift (Clone_f_clone (rhs)))) (Ordering_Equal); }. #[global] Instance t_PartialOrd_168269581 : t_PartialOrd ((t_U128)) ((t_U128)) := { PartialOrd_f_partial_cmp := fun (self : t_U128) (rhs : t_U128)=> Option_Some (haxint_cmp (Abstraction_f_lift (Clone_f_clone (self))) (Abstraction_f_lift (Clone_f_clone (rhs)))); PartialOrd_f_lt := fun (self : t_U128) (rhs : t_U128)=> match haxint_cmp (Abstraction_f_lift (Clone_f_clone (self))) (Abstraction_f_lift (Clone_f_clone (rhs))) with | Ordering_Less => true | _ => false end; PartialOrd_f_le := fun (self : t_U128) (rhs : t_U128)=> match haxint_cmp (Abstraction_f_lift (Clone_f_clone (self))) (Abstraction_f_lift (Clone_f_clone (rhs))) with | Ordering_Less | Ordering_Equal => true | _ => false end; PartialOrd_f_gt := fun (self : t_U128) (rhs : t_U128)=> match haxint_cmp (Abstraction_f_lift (Clone_f_clone (self))) (Abstraction_f_lift (Clone_f_clone (rhs))) with | Ordering_Greater => true | _ => false end; PartialOrd_f_ge := fun (self : t_U128) (rhs : t_U128)=> match haxint_cmp (Abstraction_f_lift (Clone_f_clone (self))) (Abstraction_f_lift (Clone_f_clone (rhs))) with | Ordering_Greater | Ordering_Equal => true | _ => false end; }. #[global] Instance t_Abstraction_219241396 : t_Abstraction ((t_U64)) := { Abstraction_f_AbstractType := t_HaxInt; Abstraction_f_lift := fun (self : t_U64)=> U64_f_v self; }. #[global] Instance t_PartialEq_162514109 : t_PartialEq ((t_U64)) ((t_U64)) := { PartialEq_f_eq := fun (self : t_U64) (rhs : t_U64)=> PartialEq_f_eq (t_PartialEq := _ : t_PartialEq t_Ordering t_Ordering) (haxint_cmp (Abstraction_f_lift (Clone_f_clone (self))) (Abstraction_f_lift (Clone_f_clone (rhs)))) (Ordering_Equal); PartialEq_f_ne := fun (self : t_U64) (rhs : t_U64)=> PartialEq_f_ne (t_PartialEq := _ : t_PartialEq t_Ordering t_Ordering) (haxint_cmp (Abstraction_f_lift (Clone_f_clone (self))) (Abstraction_f_lift (Clone_f_clone (rhs)))) (Ordering_Equal); }. #[global] Instance t_PartialOrd_210240032 : t_PartialOrd ((t_U64)) ((t_U64)) := { PartialOrd_f_partial_cmp := fun (self : t_U64) (rhs : t_U64)=> Option_Some (haxint_cmp (Abstraction_f_lift (Clone_f_clone (self))) (Abstraction_f_lift (Clone_f_clone (rhs)))); PartialOrd_f_lt := fun (self : t_U64) (rhs : t_U64)=> match haxint_cmp (Abstraction_f_lift (Clone_f_clone (self))) (Abstraction_f_lift (Clone_f_clone (rhs))) with | Ordering_Less => true | _ => false end; PartialOrd_f_le := fun (self : t_U64) (rhs : t_U64)=> match haxint_cmp (Abstraction_f_lift (Clone_f_clone (self))) (Abstraction_f_lift (Clone_f_clone (rhs))) with | Ordering_Less | Ordering_Equal => true | _ => false end; PartialOrd_f_gt := fun (self : t_U64) (rhs : t_U64)=> match haxint_cmp (Abstraction_f_lift (Clone_f_clone (self))) (Abstraction_f_lift (Clone_f_clone (rhs))) with | Ordering_Greater => true | _ => false end; PartialOrd_f_ge := fun (self : t_U64) (rhs : t_U64)=> match haxint_cmp (Abstraction_f_lift (Clone_f_clone (self))) (Abstraction_f_lift (Clone_f_clone (rhs))) with | Ordering_Greater | Ordering_Equal => true | _ => false end; }. #[global] Instance t_Abstraction_517050128 : t_Abstraction ((t_U32)) := { Abstraction_f_AbstractType := t_HaxInt; Abstraction_f_lift := fun (self : t_U32)=> U32_f_v self; }. #[global] Instance t_PartialEq_894496962 : t_PartialEq ((t_U32)) ((t_U32)) := { PartialEq_f_eq := fun (self : t_U32) (rhs : t_U32)=> PartialEq_f_eq (t_PartialEq := _ : t_PartialEq t_Ordering t_Ordering) (haxint_cmp (Abstraction_f_lift (Clone_f_clone (self))) (Abstraction_f_lift (Clone_f_clone (rhs)))) (Ordering_Equal); PartialEq_f_ne := fun (self : t_U32) (rhs : t_U32)=> PartialEq_f_ne (t_PartialEq := _ : t_PartialEq t_Ordering t_Ordering) (haxint_cmp (Abstraction_f_lift (Clone_f_clone (self))) (Abstraction_f_lift (Clone_f_clone (rhs)))) (Ordering_Equal); }. #[global] Instance t_PartialOrd_534404445 : t_PartialOrd ((t_U32)) ((t_U32)) := { PartialOrd_f_partial_cmp := fun (self : t_U32) (rhs : t_U32)=> Option_Some (haxint_cmp (Abstraction_f_lift (Clone_f_clone (self))) (Abstraction_f_lift (Clone_f_clone (rhs)))); PartialOrd_f_lt := fun (self : t_U32) (rhs : t_U32)=> match haxint_cmp (Abstraction_f_lift (Clone_f_clone (self))) (Abstraction_f_lift (Clone_f_clone (rhs))) with | Ordering_Less => true | _ => false end; PartialOrd_f_le := fun (self : t_U32) (rhs : t_U32)=> match haxint_cmp (Abstraction_f_lift (Clone_f_clone (self))) (Abstraction_f_lift (Clone_f_clone (rhs))) with | Ordering_Less | Ordering_Equal => true | _ => false end; PartialOrd_f_gt := fun (self : t_U32) (rhs : t_U32)=> match haxint_cmp (Abstraction_f_lift (Clone_f_clone (self))) (Abstraction_f_lift (Clone_f_clone (rhs))) with | Ordering_Greater => true | _ => false end; PartialOrd_f_ge := fun (self : t_U32) (rhs : t_U32)=> match haxint_cmp (Abstraction_f_lift (Clone_f_clone (self))) (Abstraction_f_lift (Clone_f_clone (rhs))) with | Ordering_Greater | Ordering_Equal => true | _ => false end; }. #[global] Instance t_Abstraction_994821584 : t_Abstraction ((t_U16)) := { Abstraction_f_AbstractType := t_HaxInt; Abstraction_f_lift := fun (self : t_U16)=> U16_f_v self; }. #[global] Instance t_PartialEq_603208302 : t_PartialEq ((t_U16)) ((t_U16)) := { PartialEq_f_eq := fun (self : t_U16) (rhs : t_U16)=> PartialEq_f_eq (t_PartialEq := _ : t_PartialEq t_Ordering t_Ordering) (haxint_cmp (Abstraction_f_lift (Clone_f_clone (self))) (Abstraction_f_lift (Clone_f_clone (rhs)))) (Ordering_Equal); PartialEq_f_ne := fun (self : t_U16) (rhs : t_U16)=> PartialEq_f_ne (t_PartialEq := _ : t_PartialEq t_Ordering t_Ordering) (haxint_cmp (Abstraction_f_lift (Clone_f_clone (self))) (Abstraction_f_lift (Clone_f_clone (rhs)))) (Ordering_Equal); }. #[global] Instance t_PartialOrd_595325431 : t_PartialOrd ((t_U16)) ((t_U16)) := { PartialOrd_f_partial_cmp := fun (self : t_U16) (rhs : t_U16)=> Option_Some (haxint_cmp (Abstraction_f_lift (Clone_f_clone (self))) (Abstraction_f_lift (Clone_f_clone (rhs)))); PartialOrd_f_lt := fun (self : t_U16) (rhs : t_U16)=> match haxint_cmp (Abstraction_f_lift (Clone_f_clone (self))) (Abstraction_f_lift (Clone_f_clone (rhs))) with | Ordering_Less => true | _ => false end; PartialOrd_f_le := fun (self : t_U16) (rhs : t_U16)=> match haxint_cmp (Abstraction_f_lift (Clone_f_clone (self))) (Abstraction_f_lift (Clone_f_clone (rhs))) with | Ordering_Less | Ordering_Equal => true | _ => false end; PartialOrd_f_gt := fun (self : t_U16) (rhs : t_U16)=> match haxint_cmp (Abstraction_f_lift (Clone_f_clone (self))) (Abstraction_f_lift (Clone_f_clone (rhs))) with | Ordering_Greater => true | _ => false end; PartialOrd_f_ge := fun (self : t_U16) (rhs : t_U16)=> match haxint_cmp (Abstraction_f_lift (Clone_f_clone (self))) (Abstraction_f_lift (Clone_f_clone (rhs))) with | Ordering_Greater | Ordering_Equal => true | _ => false end; }. #[global] Instance t_Abstraction_789996186 : t_Abstraction ((t_U8)) := { Abstraction_f_AbstractType := t_HaxInt; Abstraction_f_lift := fun (self : t_U8)=> U8_f_v self; }. #[global] Instance t_PartialEq_774173636 : t_PartialEq ((t_U8)) ((t_U8)) := { PartialEq_f_eq := fun (self : t_U8) (rhs : t_U8)=> PartialEq_f_eq (t_PartialEq := _ : t_PartialEq t_Ordering t_Ordering) (haxint_cmp (Abstraction_f_lift (Clone_f_clone (self))) (Abstraction_f_lift (Clone_f_clone (rhs)))) (Ordering_Equal); PartialEq_f_ne := fun (self : t_U8) (rhs : t_U8)=> PartialEq_f_ne (t_PartialEq := _ : t_PartialEq t_Ordering t_Ordering) (haxint_cmp (Abstraction_f_lift (Clone_f_clone (self))) (Abstraction_f_lift (Clone_f_clone (rhs)))) (Ordering_Equal); }. #[global] Instance t_PartialOrd_577399304 : t_PartialOrd ((t_U8)) ((t_U8)) := { PartialOrd_f_partial_cmp := fun (self : t_U8) (rhs : t_U8)=> Option_Some (haxint_cmp (Abstraction_f_lift (Clone_f_clone (self))) (Abstraction_f_lift (Clone_f_clone (rhs)))); PartialOrd_f_lt := fun (self : t_U8) (rhs : t_U8)=> match haxint_cmp (Abstraction_f_lift (Clone_f_clone (self))) (Abstraction_f_lift (Clone_f_clone (rhs))) with | Ordering_Less => true | _ => false end; PartialOrd_f_le := fun (self : t_U8) (rhs : t_U8)=> match haxint_cmp (Abstraction_f_lift (Clone_f_clone (self))) (Abstraction_f_lift (Clone_f_clone (rhs))) with | Ordering_Less | Ordering_Equal => true | _ => false end; PartialOrd_f_gt := fun (self : t_U8) (rhs : t_U8)=> match haxint_cmp (Abstraction_f_lift (Clone_f_clone (self))) (Abstraction_f_lift (Clone_f_clone (rhs))) with | Ordering_Greater => true | _ => false end; PartialOrd_f_ge := fun (self : t_U8) (rhs : t_U8)=> match haxint_cmp (Abstraction_f_lift (Clone_f_clone (self))) (Abstraction_f_lift (Clone_f_clone (rhs))) with | Ordering_Greater | Ordering_Equal => true | _ => false end; }. #[global] Instance t_Neg_375517228 : t_Neg ((t_I128)) := { Neg_f_Output := t_I128; Neg_f_neg := fun (self : t_I128)=> Concretization_f_concretize (t_Concretization := _ : t_Concretization t_Z t_I128) (z_neg (Abstraction_f_lift (self))); }. #[global] Instance t_BitOr_938342430 : t_BitOr ((t_I128)) ((t_I128)) := { BitOr_f_Output := t_I128; BitOr_f_bitor := fun (self : t_I128) (rhs : t_I128)=> Concretization_f_concretize (t_Concretization := _ : t_Concretization t_Z t_I128) (z_bitor (Abstraction_f_lift (self)) (Abstraction_f_lift (rhs))); }. #[global] Instance t_Neg_210530286 : t_Neg ((t_I64)) := { Neg_f_Output := t_I64; Neg_f_neg := fun (self : t_I64)=> Concretization_f_concretize (t_Concretization := _ : t_Concretization t_Z t_I64) (z_neg (Abstraction_f_lift (self))); }. #[global] Instance t_BitOr_329754853 : t_BitOr ((t_I64)) ((t_I64)) := { BitOr_f_Output := t_I64; BitOr_f_bitor := fun (self : t_I64) (rhs : t_I64)=> Concretization_f_concretize (t_Concretization := _ : t_Concretization t_Z t_I64) (z_bitor (Abstraction_f_lift (self)) (Abstraction_f_lift (rhs))); }. #[global] Instance t_Neg_104016941 : t_Neg ((t_I32)) := { Neg_f_Output := t_I32; Neg_f_neg := fun (self : t_I32)=> Concretization_f_concretize (t_Concretization := _ : t_Concretization t_Z t_I32) (z_neg (Abstraction_f_lift (self))); }. #[global] Instance t_BitOr_840483685 : t_BitOr ((t_I32)) ((t_I32)) := { BitOr_f_Output := t_I32; BitOr_f_bitor := fun (self : t_I32) (rhs : t_I32)=> Concretization_f_concretize (t_Concretization := _ : t_Concretization t_Z t_I32) (z_bitor (Abstraction_f_lift (self)) (Abstraction_f_lift (rhs))); }. #[global] Instance t_Neg_1063990797 : t_Neg ((t_I16)) := { Neg_f_Output := t_I16; Neg_f_neg := fun (self : t_I16)=> Concretization_f_concretize (t_Concretization := _ : t_Concretization t_Z t_I16) (z_neg (Abstraction_f_lift (self))); }. #[global] Instance t_BitOr_450806124 : t_BitOr ((t_I16)) ((t_I16)) := { BitOr_f_Output := t_I16; BitOr_f_bitor := fun (self : t_I16) (rhs : t_I16)=> Concretization_f_concretize (t_Concretization := _ : t_Concretization t_Z t_I16) (z_bitor (Abstraction_f_lift (self)) (Abstraction_f_lift (rhs))); }. #[global] Instance t_Neg_979719905 : t_Neg ((t_I8)) := { Neg_f_Output := t_I8; Neg_f_neg := fun (self : t_I8)=> Concretization_f_concretize (z_neg (Abstraction_f_lift (self))); }. #[global] Instance t_BitOr_828862178 : t_BitOr ((t_I8)) ((t_I8)) := { BitOr_f_Output := t_I8; BitOr_f_bitor := fun (self : t_I8) (rhs : t_I8)=> Concretization_f_concretize (z_bitor (Abstraction_f_lift (self)) (Abstraction_f_lift (rhs))); }. #[global] Instance t_Add_749575336 : t_Add ((t_I128)) ((t_I128)) := { Add_f_Output := t_I128; Add_f_add := fun (self : t_I128) (rhs : t_I128)=> Concretization_f_concretize (t_Concretization := _ : t_Concretization t_Z t_I128) (z_add (Abstraction_f_lift (self)) (Abstraction_f_lift (rhs))); }. #[global] Instance t_Sub_800692471 : t_Sub ((t_I128)) ((t_I128)) := { Sub_f_Output := t_I128; Sub_f_sub := fun (self : t_I128) (rhs : t_I128)=> Concretization_f_concretize (t_Concretization := _ : t_Concretization t_Z t_I128) (z_sub (Abstraction_f_lift (self)) (Abstraction_f_lift (rhs))); }. #[global] Instance t_Add_87367909 : t_Add ((t_I64)) ((t_I64)) := { Add_f_Output := t_I64; Add_f_add := fun (self : t_I64) (rhs : t_I64)=> Concretization_f_concretize (t_Concretization := _ : t_Concretization t_Z t_I64) (z_add (Abstraction_f_lift (self)) (Abstraction_f_lift (rhs))); }. #[global] Instance t_Sub_741383133 : t_Sub ((t_I64)) ((t_I64)) := { Sub_f_Output := t_I64; Sub_f_sub := fun (self : t_I64) (rhs : t_I64)=> Concretization_f_concretize (t_Concretization := _ : t_Concretization t_Z t_I64) (z_sub (Abstraction_f_lift (self)) (Abstraction_f_lift (rhs))); }. #[global] Instance t_Add_574043038 : t_Add ((t_I32)) ((t_I32)) := { Add_f_Output := t_I32; Add_f_add := fun (self : t_I32) (rhs : t_I32)=> Concretization_f_concretize (t_Concretization := _ : t_Concretization t_Z t_I32) (z_add (Abstraction_f_lift (self)) (Abstraction_f_lift (rhs))); }. #[global] Instance t_Sub_699874712 : t_Sub ((t_I32)) ((t_I32)) := { Sub_f_Output := t_I32; Sub_f_sub := fun (self : t_I32) (rhs : t_I32)=> Concretization_f_concretize (t_Concretization := _ : t_Concretization t_Z t_I32) (z_sub (Abstraction_f_lift (self)) (Abstraction_f_lift (rhs))); }. #[global] Instance t_Add_413164706 : t_Add ((t_I16)) ((t_I16)) := { Add_f_Output := t_I16; Add_f_add := fun (self : t_I16) (rhs : t_I16)=> Concretization_f_concretize (t_Concretization := _ : t_Concretization t_Z t_I16) (z_add (Abstraction_f_lift (self)) (Abstraction_f_lift (rhs))); }. #[global] Instance t_Sub_544358249 : t_Sub ((t_I16)) ((t_I16)) := { Sub_f_Output := t_I16; Sub_f_sub := fun (self : t_I16) (rhs : t_I16)=> Concretization_f_concretize (t_Concretization := _ : t_Concretization t_Z t_I16) (z_sub (Abstraction_f_lift (self)) (Abstraction_f_lift (rhs))); }. #[global] Instance t_Add_335735231 : t_Add ((t_I8)) ((t_I8)) := { Add_f_Output := t_I8; Add_f_add := fun (self : t_I8) (rhs : t_I8)=> Concretization_f_concretize (z_add (Abstraction_f_lift (self)) (Abstraction_f_lift (rhs))); }. #[global] Instance t_Sub_257575332 : t_Sub ((t_I8)) ((t_I8)) := { Sub_f_Output := t_I8; Sub_f_sub := fun (self : t_I8) (rhs : t_I8)=> Concretization_f_concretize (z_sub (Abstraction_f_lift (self)) (Abstraction_f_lift (rhs))); }. #[global] Instance t_Concretization_943450188 : t_Concretization ((t_HaxInt)) ((t_U128)) := { Concretization_f_concretize := fun (self : t_HaxInt)=> Build_t_U128 (haxint_rem (self) (v_WORDSIZE_128_)); }. #[global] Instance t_From_355161674 : t_From ((t_U128)) ((t_U8)) := { From_f_from := fun (x : t_U8)=> Concretization_f_concretize (t_Concretization := _ : t_Concretization t_HaxInt t_U128) (Abstraction_f_lift (x)); }. #[global] Instance t_From_739905379 : t_From ((t_U128)) ((t_U16)) := { From_f_from := fun (x : t_U16)=> Concretization_f_concretize(t_Concretization := _ : t_Concretization t_HaxInt t_U128) (Abstraction_f_lift (x)); }. #[global] Instance t_From_487010006 : t_From ((t_U128)) ((t_U32)) := { From_f_from := fun (x : t_U32)=> Concretization_f_concretize(t_Concretization := _ : t_Concretization t_HaxInt t_U128) (Abstraction_f_lift (x)); }. #[global] Instance t_From_665417617 : t_From ((t_U128)) ((t_U64)) := { From_f_from := fun (x : t_U64)=> Concretization_f_concretize(t_Concretization := _ : t_Concretization t_HaxInt t_U128) (Abstraction_f_lift (x)); }. #[global] Instance t_Concretization_10977439 : t_Concretization ((t_HaxInt)) ((t_U64)) := { Concretization_f_concretize := fun (self : t_HaxInt)=> Build_t_U64 (haxint_rem (self) (v_WORDSIZE_64_)); }. #[global] Instance t_From_746191059 : t_From ((t_U64)) ((t_U8)) := { From_f_from := fun (x : t_U8)=> Concretization_f_concretize(t_Concretization := _ : t_Concretization t_HaxInt t_U64) (Abstraction_f_lift (x)); }. #[global] Instance t_From_598353876 : t_From ((t_U64)) ((t_U16)) := { From_f_from := fun (x : t_U16)=> Concretization_f_concretize(t_Concretization := _ : t_Concretization t_HaxInt t_U64) (Abstraction_f_lift (x)); }. #[global] Instance t_From_293255347 : t_From ((t_U64)) ((t_U32)) := { From_f_from := fun (x : t_U32)=> Concretization_f_concretize(t_Concretization := _ : t_Concretization t_HaxInt t_U64) (Abstraction_f_lift (x)); }. #[global] Instance t_From_478031507 : t_From ((t_U64)) ((t_U128)) := { From_f_from := fun (x : t_U128)=> Concretization_f_concretize(t_Concretization := _ : t_Concretization t_HaxInt t_U64) (Abstraction_f_lift (x)); }. #[global] Instance t_Concretization_264065114 : t_Concretization ((t_HaxInt)) ((t_U32)) := { Concretization_f_concretize := fun (self : t_HaxInt)=> Build_t_U32 (haxint_rem (self) (v_WORDSIZE_32_)); }. #[global] Instance t_From_675834555 : t_From ((t_U32)) ((t_U8)) := { From_f_from := fun (x : t_U8)=> Concretization_f_concretize(t_Concretization := _ : t_Concretization t_HaxInt t_U32) (Abstraction_f_lift (x)); }. #[global] Instance t_From_410569540 : t_From ((t_U32)) ((t_U16)) := { From_f_from := fun (x : t_U16)=> Concretization_f_concretize(t_Concretization := _ : t_Concretization t_HaxInt t_U32) (Abstraction_f_lift (x)); }. #[global] Instance t_From_616913228 : t_From ((t_U32)) ((t_U64)) := { From_f_from := fun (x : t_U64)=> Concretization_f_concretize(t_Concretization := _ : t_Concretization t_HaxInt t_U32) (Abstraction_f_lift (x)); }. #[global] Instance t_From_376625380 : t_From ((t_U32)) ((t_U128)) := { From_f_from := fun (x : t_U128)=> Concretization_f_concretize(t_Concretization := _ : t_Concretization t_HaxInt t_U32) (Abstraction_f_lift (x)); }. #[global] Instance t_Concretization_656994795 : t_Concretization ((t_HaxInt)) ((t_U16)) := { Concretization_f_concretize := fun (self : t_HaxInt)=> Build_t_U16 (haxint_rem (self) (v_WORDSIZE_16_)); }. #[global] Instance t_From_352276566 : t_From ((t_U16)) ((t_U8)) := { From_f_from := fun (x : t_U8)=> Concretization_f_concretize (t_Concretization := _ : t_Concretization t_HaxInt t_U16) (Abstraction_f_lift (x)); }. #[global] Instance t_From_699842532 : t_From ((t_U16)) ((t_U32)) := { From_f_from := fun (x : t_U32)=> Concretization_f_concretize(t_Concretization := _ : t_Concretization t_HaxInt t_U16) (Abstraction_f_lift (x)); }. #[global] Instance t_From_326646767 : t_From ((t_U16)) ((t_U64)) := { From_f_from := fun (x : t_U64)=> Concretization_f_concretize(t_Concretization := _ : t_Concretization t_HaxInt t_U16) (Abstraction_f_lift (x)); }. #[global] Instance t_From_604186294 : t_From ((t_U16)) ((t_U128)) := { From_f_from := fun (x : t_U128)=> Concretization_f_concretize(t_Concretization := _ : t_Concretization t_HaxInt t_U16) (Abstraction_f_lift (x)); }. #[global] Instance t_Concretization_492312374 : t_Concretization ((t_HaxInt)) ((t_U8)) := { Concretization_f_concretize := fun (self : t_HaxInt)=> Build_t_U8 (haxint_rem (self) (v_WORDSIZE_8_)); }. #[global] Instance t_From_374313775 : t_From ((t_U8)) ((t_U16)) := { From_f_from := fun (x : t_U16)=> Concretization_f_concretize(t_Concretization := _ : t_Concretization t_HaxInt t_U8) (Abstraction_f_lift (x)); }. #[global] Instance t_From_42776580 : t_From ((t_U8)) ((t_U32)) := { From_f_from := fun (x : t_U32)=> Concretization_f_concretize(t_Concretization := _ : t_Concretization t_HaxInt t_U8) (Abstraction_f_lift (x)); }. #[global] Instance t_From_480314375 : t_From ((t_U8)) ((t_U64)) := { From_f_from := fun (x : t_U64)=> Concretization_f_concretize(t_Concretization := _ : t_Concretization t_HaxInt t_U8) (Abstraction_f_lift (x)); }. #[global] Instance t_From_135782329 : t_From ((t_U8)) ((t_U128)) := { From_f_from := fun (x : t_U128)=> Concretization_f_concretize(t_Concretization := _ : t_Concretization t_HaxInt t_U8) (Abstraction_f_lift (x)); }. #[global] Instance t_Mul_180009375 : t_Mul ((t_I128)) ((t_I128)) := { Mul_f_Output := t_I128; Mul_f_mul := fun (self : t_I128) (rhs : t_I128)=> Concretization_f_concretize (t_Concretization := _ : t_Concretization t_Z t_I128) (z_mul (Abstraction_f_lift (self)) (Abstraction_f_lift (rhs))); }. #[global] Instance t_Mul_1051209688 : t_Mul ((t_I64)) ((t_I64)) := { Mul_f_Output := t_I64; Mul_f_mul := fun (self : t_I64) (rhs : t_I64)=> Concretization_f_concretize (t_Concretization := _ : t_Concretization t_Z t_I64) (z_mul (Abstraction_f_lift (self)) (Abstraction_f_lift (rhs))); }. #[global] Instance t_Mul_481497752 : t_Mul ((t_I32)) ((t_I32)) := { Mul_f_Output := t_I32; Mul_f_mul := fun (self : t_I32) (rhs : t_I32)=> Concretization_f_concretize (t_Concretization := _ : t_Concretization t_Z t_I32) (z_mul (Abstraction_f_lift (self)) (Abstraction_f_lift (rhs))); }. #[global] Instance t_Mul_768005208 : t_Mul ((t_I16)) ((t_I16)) := { Mul_f_Output := t_I16; Mul_f_mul := fun (self : t_I16) (rhs : t_I16)=> Concretization_f_concretize (t_Concretization := _ : t_Concretization t_Z t_I16) (z_mul (Abstraction_f_lift (self)) (Abstraction_f_lift (rhs))); }. #[global] Instance t_Mul_1057691929 : t_Mul ((t_I8)) ((t_I8)) := { Mul_f_Output := t_I8; Mul_f_mul := fun (self : t_I8) (rhs : t_I8)=> Concretization_f_concretize (t_Concretization := _ : t_Concretization t_Z t_I8) (z_mul (Abstraction_f_lift (self)) (Abstraction_f_lift (rhs))); }. #[global] Instance t_Neg_200638412 : t_Neg ((t_U128)) := { Neg_f_Output := t_U128; Neg_f_neg := fun (self : t_U128)=> Concretization_f_concretize (t_Concretization := _ : t_Concretization t_HaxInt t_U128) (haxint_sub (v_WORDSIZE_128_) (haxint_rem (Abstraction_f_lift (self)) (v_WORDSIZE_128_))); }. #[global] Instance t_Mul_508073751 : t_Mul ((t_U128)) ((t_U128)) := { Mul_f_Output := t_U128; Mul_f_mul := fun (self : t_U128) (rhs : t_U128)=> Concretization_f_concretize (t_Concretization := _ : t_Concretization t_HaxInt t_U128) (haxint_mul (Abstraction_f_lift (self)) (Abstraction_f_lift (rhs))); }. #[global] Instance t_Rem_184769952 : t_Rem ((t_U128)) ((t_U128)) := { Rem_f_Output := t_U128; Rem_f_rem := fun (self : t_U128) (rhs : t_U128)=> Concretization_f_concretize (t_Concretization := _ : t_Concretization t_HaxInt t_U128) (haxint_rem (Abstraction_f_lift (self)) (Abstraction_f_lift (rhs))); }. #[global] Instance t_Add_74062568 : t_Add ((t_U128)) ((t_U128)) := { Add_f_Output := t_U128; Add_f_add := fun (self : t_U128) (rhs : t_U128)=> Concretization_f_concretize (t_Concretization := _ : t_Concretization t_HaxInt t_U128) (haxint_add (Abstraction_f_lift (self)) (Abstraction_f_lift (rhs))); }. #[global] Instance t_Div_697142148 : t_Div ((t_U128)) ((t_U128)) := { Div_f_Output := t_U128; Div_f_div := fun (self : t_U128) (rhs : t_U128)=> Concretization_f_concretize (t_Concretization := _ : t_Concretization t_HaxInt t_U128) (haxint_div (Abstraction_f_lift (self)) (Abstraction_f_lift (rhs))); }. #[global] Instance t_Shl_912131656 : t_Shl ((t_U128)) ((t_U8)) := { Shl_f_Output := t_U128; Shl_f_shl := fun (self : t_U128) (rhs : t_U8)=> Concretization_f_concretize (t_Concretization := _ : t_Concretization t_HaxInt t_U128) (haxint_shl (Abstraction_f_lift (self)) (Abstraction_f_lift (rhs))); }. #[global] Instance t_Shl_188720840 : t_Shl ((t_U128)) ((t_U16)) := { Shl_f_Output := t_U128; Shl_f_shl := fun (self : t_U128) (rhs : t_U16)=> Concretization_f_concretize (t_Concretization := _ : t_Concretization t_HaxInt t_U128) (haxint_shl (Abstraction_f_lift (self)) (Abstraction_f_lift (rhs))); }. #[global] Instance t_Shl_674581806 : t_Shl ((t_U128)) ((t_U32)) := { Shl_f_Output := t_U128; Shl_f_shl := fun (self : t_U128) (rhs : t_U32)=> Concretization_f_concretize (t_Concretization := _ : t_Concretization t_HaxInt t_U128) (haxint_shl (Abstraction_f_lift (self)) (Abstraction_f_lift (rhs))); }. #[global] Instance t_Shl_230523808 : t_Shl ((t_U128)) ((t_U64)) := { Shl_f_Output := t_U128; Shl_f_shl := fun (self : t_U128) (rhs : t_U64)=> Concretization_f_concretize (t_Concretization := _ : t_Concretization t_HaxInt t_U128) (haxint_shl (Abstraction_f_lift (self)) (Abstraction_f_lift (rhs))); }. #[global] Instance t_Shl_304350501 : t_Shl ((t_U128)) ((t_U128)) := { Shl_f_Output := t_U128; Shl_f_shl := fun (self : t_U128) (rhs : t_U128)=> Concretization_f_concretize (t_Concretization := _ : t_Concretization t_HaxInt t_U128) (haxint_shl (Abstraction_f_lift (self)) (Abstraction_f_lift (rhs))); }. #[global] Instance t_Shr_584068908 : t_Shr ((t_U128)) ((t_U8)) := { Shr_f_Output := t_U128; Shr_f_shr := fun (self : t_U128) (rhs : t_U8)=> Concretization_f_concretize (t_Concretization := _ : t_Concretization t_HaxInt t_U128) (haxint_shr (Abstraction_f_lift (self)) (Abstraction_f_lift (rhs))); }. #[global] Instance t_Shr_73833277 : t_Shr ((t_U128)) ((t_U16)) := { Shr_f_Output := t_U128; Shr_f_shr := fun (self : t_U128) (rhs : t_U16)=> Concretization_f_concretize (t_Concretization := _ : t_Concretization t_HaxInt t_U128) (haxint_shr (Abstraction_f_lift (self)) (Abstraction_f_lift (rhs))); }. #[global] Instance t_Shr_50912121 : t_Shr ((t_U128)) ((t_U32)) := { Shr_f_Output := t_U128; Shr_f_shr := fun (self : t_U128) (rhs : t_U32)=> Concretization_f_concretize (t_Concretization := _ : t_Concretization t_HaxInt t_U128) (haxint_shr (Abstraction_f_lift (self)) (Abstraction_f_lift (rhs))); }. #[global] Instance t_Shr_282345299 : t_Shr ((t_U128)) ((t_U64)) := { Shr_f_Output := t_U128; Shr_f_shr := fun (self : t_U128) (rhs : t_U64)=> Concretization_f_concretize (t_Concretization := _ : t_Concretization t_HaxInt t_U128) (haxint_shr (Abstraction_f_lift (self)) (Abstraction_f_lift (rhs))); }. #[global] Instance t_Shr_354892033 : t_Shr ((t_U128)) ((t_U128)) := { Shr_f_Output := t_U128; Shr_f_shr := fun (self : t_U128) (rhs : t_U128)=> Concretization_f_concretize (t_Concretization := _ : t_Concretization t_HaxInt t_U128) (haxint_shr (Abstraction_f_lift (self)) (Abstraction_f_lift (rhs))); }. #[global] Instance t_BitXor_457452962 : t_BitXor ((t_U128)) ((t_U128)) := { BitXor_f_Output := t_U128; BitXor_f_bitxor := fun (self : t_U128) (rhs : t_U128)=> Concretization_f_concretize (t_Concretization := _ : t_Concretization t_HaxInt t_U128) (haxint_bitxor (Abstraction_f_lift (self)) (Abstraction_f_lift (rhs))); }. #[global] Instance t_BitAnd_499214249 : t_BitAnd ((t_U128)) ((t_U128)) := { BitAnd_f_Output := t_U128; BitAnd_f_bitand := fun (self : t_U128) (rhs : t_U128)=> Concretization_f_concretize (t_Concretization := _ : t_Concretization t_HaxInt t_U128) (haxint_bitand (Abstraction_f_lift (self)) (Abstraction_f_lift (rhs))); }. #[global] Instance t_BitOr_579754702 : t_BitOr ((t_U128)) ((t_U128)) := { BitOr_f_Output := t_U128; BitOr_f_bitor := fun (self : t_U128) (rhs : t_U128)=> Concretization_f_concretize (t_Concretization := _ : t_Concretization t_HaxInt t_U128) (haxint_bitor (Abstraction_f_lift (self)) (Abstraction_f_lift (rhs))); }. #[global] Instance t_Neg_338880159 : t_Neg ((t_U64)) := { Neg_f_Output := t_U64; Neg_f_neg := fun (self : t_U64)=> Concretization_f_concretize (t_Concretization := _ : t_Concretization t_HaxInt t_U64) (haxint_sub (v_WORDSIZE_64_) (haxint_rem (Abstraction_f_lift (self)) (v_WORDSIZE_64_))); }. #[global] Instance t_Mul_785129859 : t_Mul ((t_U64)) ((t_U64)) := { Mul_f_Output := t_U64; Mul_f_mul := fun (self : t_U64) (rhs : t_U64)=> Concretization_f_concretize (t_Concretization := _ : t_Concretization t_HaxInt t_U64) (haxint_mul (Abstraction_f_lift (self)) (Abstraction_f_lift (rhs))); }. #[global] Instance t_Rem_450198244 : t_Rem ((t_U64)) ((t_U64)) := { Rem_f_Output := t_U64; Rem_f_rem := fun (self : t_U64) (rhs : t_U64)=> Concretization_f_concretize (t_Concretization := _ : t_Concretization t_HaxInt t_U64) (haxint_rem (Abstraction_f_lift (self)) (Abstraction_f_lift (rhs))); }. #[global] Instance t_Add_880469818 : t_Add ((t_U64)) ((t_U64)) := { Add_f_Output := t_U64; Add_f_add := fun (self : t_U64) (rhs : t_U64)=> Concretization_f_concretize (t_Concretization := _ : t_Concretization t_HaxInt t_U64) (haxint_add (Abstraction_f_lift (self)) (Abstraction_f_lift (rhs))); }. #[global] Instance t_Div_1065913959 : t_Div ((t_U64)) ((t_U64)) := { Div_f_Output := t_U64; Div_f_div := fun (self : t_U64) (rhs : t_U64)=> Concretization_f_concretize (t_Concretization := _ : t_Concretization t_HaxInt t_U64) (haxint_div (Abstraction_f_lift (self)) (Abstraction_f_lift (rhs))); }. #[global] Instance t_Shl_307107617 : t_Shl ((t_U64)) ((t_U8)) := { Shl_f_Output := t_U64; Shl_f_shl := fun (self : t_U64) (rhs : t_U8)=> Concretization_f_concretize (t_Concretization := _ : t_Concretization t_HaxInt t_U64 )(haxint_shl (Abstraction_f_lift (self)) (Abstraction_f_lift (rhs))); }. #[global] Instance t_Shl_521831749 : t_Shl ((t_U64)) ((t_U16)) := { Shl_f_Output := t_U64; Shl_f_shl := fun (self : t_U64) (rhs : t_U16)=> Concretization_f_concretize (t_Concretization := _ : t_Concretization t_HaxInt t_U64) (haxint_shl (Abstraction_f_lift (self)) (Abstraction_f_lift (rhs))); }. #[global] Instance t_Shl_243646433 : t_Shl ((t_U64)) ((t_U32)) := { Shl_f_Output := t_U64; Shl_f_shl := fun (self : t_U64) (rhs : t_U32)=> Concretization_f_concretize (t_Concretization := _ : t_Concretization t_HaxInt t_U64) (haxint_shl (Abstraction_f_lift (self)) (Abstraction_f_lift (rhs))); }. #[global] Instance t_Shl_732371970 : t_Shl ((t_U64)) ((t_U64)) := { Shl_f_Output := t_U64; Shl_f_shl := fun (self : t_U64) (rhs : t_U64)=> Concretization_f_concretize (t_Concretization := _ : t_Concretization t_HaxInt t_U64) (haxint_shl (Abstraction_f_lift (self)) (Abstraction_f_lift (rhs))); }. #[global] Instance t_Shl_362455113 : t_Shl ((t_U64)) ((t_U128)) := { Shl_f_Output := t_U64; Shl_f_shl := fun (self : t_U64) (rhs : t_U128)=> Concretization_f_concretize (t_Concretization := _ : t_Concretization t_HaxInt t_U64) (haxint_shl (Abstraction_f_lift (self)) (Abstraction_f_lift (rhs))); }. #[global] Instance t_Shr_675607391 : t_Shr ((t_U64)) ((t_U8)) := { Shr_f_Output := t_U64; Shr_f_shr := fun (self : t_U64) (rhs : t_U8)=> Concretization_f_concretize (t_Concretization := _ : t_Concretization t_HaxInt t_U64) (haxint_shr (Abstraction_f_lift (self)) (Abstraction_f_lift (rhs))); }. #[global] Instance t_Shr_163042579 : t_Shr ((t_U64)) ((t_U16)) := { Shr_f_Output := t_U64; Shr_f_shr := fun (self : t_U64) (rhs : t_U16)=> Concretization_f_concretize (t_Concretization := _ : t_Concretization t_HaxInt t_U64) (haxint_shr (Abstraction_f_lift (self)) (Abstraction_f_lift (rhs))); }. #[global] Instance t_Shr_329072619 : t_Shr ((t_U64)) ((t_U32)) := { Shr_f_Output := t_U64; Shr_f_shr := fun (self : t_U64) (rhs : t_U32)=> Concretization_f_concretize (t_Concretization := _ : t_Concretization t_HaxInt t_U64) (haxint_shr (Abstraction_f_lift (self)) (Abstraction_f_lift (rhs))); }. #[global] Instance t_Shr_1046321056 : t_Shr ((t_U64)) ((t_U64)) := { Shr_f_Output := t_U64; Shr_f_shr := fun (self : t_U64) (rhs : t_U64)=> Concretization_f_concretize (t_Concretization := _ : t_Concretization t_HaxInt t_U64) (haxint_shr (Abstraction_f_lift (self)) (Abstraction_f_lift (rhs))); }. #[global] Instance t_Shr_1027159812 : t_Shr ((t_U64)) ((t_U128)) := { Shr_f_Output := t_U64; Shr_f_shr := fun (self : t_U64) (rhs : t_U128)=> Concretization_f_concretize (t_Concretization := _ : t_Concretization t_HaxInt t_U64) (haxint_shr (Abstraction_f_lift (self)) (Abstraction_f_lift (rhs))); }. #[global] Instance t_BitXor_771705591 : t_BitXor ((t_U64)) ((t_U64)) := { BitXor_f_Output := t_U64; BitXor_f_bitxor := fun (self : t_U64) (rhs : t_U64)=> Concretization_f_concretize (t_Concretization := _ : t_Concretization t_HaxInt t_U64) (haxint_bitxor (Abstraction_f_lift (self)) (Abstraction_f_lift (rhs))); }. #[global] Instance t_BitAnd_61309855 : t_BitAnd ((t_U64)) ((t_U64)) := { BitAnd_f_Output := t_U64; BitAnd_f_bitand := fun (self : t_U64) (rhs : t_U64)=> Concretization_f_concretize (t_Concretization := _ : t_Concretization t_HaxInt t_U64) (haxint_bitand (Abstraction_f_lift (self)) (Abstraction_f_lift (rhs))); }. #[global] Instance t_BitOr_584478327 : t_BitOr ((t_U64)) ((t_U64)) := { BitOr_f_Output := t_U64; BitOr_f_bitor := fun (self : t_U64) (rhs : t_U64)=> Concretization_f_concretize (t_Concretization := _ : t_Concretization t_HaxInt t_U64) (haxint_bitor (Abstraction_f_lift (self)) (Abstraction_f_lift (rhs))); }. #[global] Instance t_Neg_660092460 : t_Neg ((t_U32)) := { Neg_f_Output := t_U32; Neg_f_neg := fun (self : t_U32)=> Concretization_f_concretize (t_Concretization := _ : t_Concretization t_HaxInt t_U32) (haxint_sub (v_WORDSIZE_32_) (haxint_rem (Abstraction_f_lift (self)) (v_WORDSIZE_32_))); }. #[global] Instance t_Mul_907086750 : t_Mul ((t_U32)) ((t_U32)) := { Mul_f_Output := t_U32; Mul_f_mul := fun (self : t_U32) (rhs : t_U32)=> Concretization_f_concretize (t_Concretization := _ : t_Concretization t_HaxInt t_U32) (haxint_mul (Abstraction_f_lift (self)) (Abstraction_f_lift (rhs))); }. #[global] Instance t_Rem_754047547 : t_Rem ((t_U32)) ((t_U32)) := { Rem_f_Output := t_U32; Rem_f_rem := fun (self : t_U32) (rhs : t_U32)=> Concretization_f_concretize (t_Concretization := _ : t_Concretization t_HaxInt t_U32) (haxint_rem (Abstraction_f_lift (self)) (Abstraction_f_lift (rhs))); }. #[global] Instance t_Add_62760194 : t_Add ((t_U32)) ((t_U32)) := { Add_f_Output := t_U32; Add_f_add := fun (self : t_U32) (rhs : t_U32)=> Concretization_f_concretize (t_Concretization := _ : t_Concretization t_HaxInt t_U32) (haxint_add (Abstraction_f_lift (self)) (Abstraction_f_lift (rhs))); }. #[global] Instance t_Div_1036065219 : t_Div ((t_U32)) ((t_U32)) := { Div_f_Output := t_U32; Div_f_div := fun (self : t_U32) (rhs : t_U32)=> Concretization_f_concretize (t_Concretization := _ : t_Concretization t_HaxInt t_U32) (haxint_div (Abstraction_f_lift (self)) (Abstraction_f_lift (rhs))); }. #[global] Instance t_Shl_940272829 : t_Shl ((t_U32)) ((t_U8)) := { Shl_f_Output := t_U32; Shl_f_shl := fun (self : t_U32) (rhs : t_U8)=> Concretization_f_concretize (t_Concretization := _ : t_Concretization t_HaxInt t_U32) (haxint_shl (Abstraction_f_lift (self)) (Abstraction_f_lift (rhs))); }. #[global] Instance t_Shl_184065944 : t_Shl ((t_U32)) ((t_U16)) := { Shl_f_Output := t_U32; Shl_f_shl := fun (self : t_U32) (rhs : t_U16)=> Concretization_f_concretize (t_Concretization := _ : t_Concretization t_HaxInt t_U32) (haxint_shl (Abstraction_f_lift (self)) (Abstraction_f_lift (rhs))); }. #[global] Instance t_Shl_512141775 : t_Shl ((t_U32)) ((t_U32)) := { Shl_f_Output := t_U32; Shl_f_shl := fun (self : t_U32) (rhs : t_U32)=> Concretization_f_concretize (t_Concretization := _ : t_Concretization t_HaxInt t_U32) (haxint_shl (Abstraction_f_lift (self)) (Abstraction_f_lift (rhs))); }. #[global] Instance t_Shl_760382167 : t_Shl ((t_U32)) ((t_U64)) := { Shl_f_Output := t_U32; Shl_f_shl := fun (self : t_U32) (rhs : t_U64)=> Concretization_f_concretize (t_Concretization := _ : t_Concretization t_HaxInt t_U32) (haxint_shl (Abstraction_f_lift (self)) (Abstraction_f_lift (rhs))); }. #[global] Instance t_Shl_938844716 : t_Shl ((t_U32)) ((t_U128)) := { Shl_f_Output := t_U32; Shl_f_shl := fun (self : t_U32) (rhs : t_U128)=> Concretization_f_concretize (t_Concretization := _ : t_Concretization t_HaxInt t_U32) (haxint_shl (Abstraction_f_lift (self)) (Abstraction_f_lift (rhs))); }. #[global] Instance t_Shr_376401556 : t_Shr ((t_U32)) ((t_U8)) := { Shr_f_Output := t_U32; Shr_f_shr := fun (self : t_U32) (rhs : t_U8)=> Concretization_f_concretize (t_Concretization := _ : t_Concretization t_HaxInt t_U32) (haxint_shr (Abstraction_f_lift (self)) (Abstraction_f_lift (rhs))); }. #[global] Instance t_Shr_816225657 : t_Shr ((t_U32)) ((t_U16)) := { Shr_f_Output := t_U32; Shr_f_shr := fun (self : t_U32) (rhs : t_U16)=> Concretization_f_concretize (t_Concretization := _ : t_Concretization t_HaxInt t_U32) (haxint_shr (Abstraction_f_lift (self)) (Abstraction_f_lift (rhs))); }. #[global] Instance t_Shr_131570199 : t_Shr ((t_U32)) ((t_U32)) := { Shr_f_Output := t_U32; Shr_f_shr := fun (self : t_U32) (rhs : t_U32)=> Concretization_f_concretize (t_Concretization := _ : t_Concretization t_HaxInt t_U32) (haxint_shr (Abstraction_f_lift (self)) (Abstraction_f_lift (rhs))); }. #[global] Instance t_Shr_643141508 : t_Shr ((t_U32)) ((t_U64)) := { Shr_f_Output := t_U32; Shr_f_shr := fun (self : t_U32) (rhs : t_U64)=> Concretization_f_concretize (t_Concretization := _ : t_Concretization t_HaxInt t_U32) (haxint_shr (Abstraction_f_lift (self)) (Abstraction_f_lift (rhs))); }. #[global] Instance t_Shr_472576920 : t_Shr ((t_U32)) ((t_U128)) := { Shr_f_Output := t_U32; Shr_f_shr := fun (self : t_U32) (rhs : t_U128)=> Concretization_f_concretize (t_Concretization := _ : t_Concretization t_HaxInt t_U32) (haxint_shr (Abstraction_f_lift (self)) (Abstraction_f_lift (rhs))); }. #[global] Instance t_BitXor_568575701 : t_BitXor ((t_U32)) ((t_U32)) := { BitXor_f_Output := t_U32; BitXor_f_bitxor := fun (self : t_U32) (rhs : t_U32)=> Concretization_f_concretize (t_Concretization := _ : t_Concretization t_HaxInt t_U32) (haxint_bitxor (Abstraction_f_lift (self)) (Abstraction_f_lift (rhs))); }. #[global] Instance t_BitAnd_188629984 : t_BitAnd ((t_U32)) ((t_U32)) := { BitAnd_f_Output := t_U32; BitAnd_f_bitand := fun (self : t_U32) (rhs : t_U32)=> Concretization_f_concretize (t_Concretization := _ : t_Concretization t_HaxInt t_U32) (haxint_bitand (Abstraction_f_lift (self)) (Abstraction_f_lift (rhs))); }. #[global] Instance t_BitOr_727300711 : t_BitOr ((t_U32)) ((t_U32)) := { BitOr_f_Output := t_U32; BitOr_f_bitor := fun (self : t_U32) (rhs : t_U32)=> Concretization_f_concretize (t_Concretization := _ : t_Concretization t_HaxInt t_U32) (haxint_bitor (Abstraction_f_lift (self)) (Abstraction_f_lift (rhs))); }. #[global] Instance t_Neg_524209972 : t_Neg ((t_U16)) := { Neg_f_Output := t_U16; Neg_f_neg := fun (self : t_U16)=> Concretization_f_concretize (t_Concretization := _ : t_Concretization t_HaxInt t_U16) (haxint_sub (v_WORDSIZE_16_) (haxint_rem (Abstraction_f_lift (self)) (v_WORDSIZE_16_))); }. #[global] Instance t_Mul_813798593 : t_Mul ((t_U16)) ((t_U16)) := { Mul_f_Output := t_U16; Mul_f_mul := fun (self : t_U16) (rhs : t_U16)=> Concretization_f_concretize (t_Concretization := _ : t_Concretization t_HaxInt t_U16) (haxint_mul (Abstraction_f_lift (self)) (Abstraction_f_lift (rhs))); }. #[global] Instance t_Rem_1023129312 : t_Rem ((t_U16)) ((t_U16)) := { Rem_f_Output := t_U16; Rem_f_rem := fun (self : t_U16) (rhs : t_U16)=> Concretization_f_concretize (t_Concretization := _ : t_Concretization t_HaxInt t_U16) (haxint_rem (Abstraction_f_lift (self)) (Abstraction_f_lift (rhs))); }. #[global] Instance t_Add_669194837 : t_Add ((t_U16)) ((t_U16)) := { Add_f_Output := t_U16; Add_f_add := fun (self : t_U16) (rhs : t_U16)=> Concretization_f_concretize (t_Concretization := _ : t_Concretization t_HaxInt t_U16) (haxint_add (Abstraction_f_lift (self)) (Abstraction_f_lift (rhs))); }. #[global] Instance t_Div_599727096 : t_Div ((t_U16)) ((t_U16)) := { Div_f_Output := t_U16; Div_f_div := fun (self : t_U16) (rhs : t_U16)=> Concretization_f_concretize (t_Concretization := _ : t_Concretization t_HaxInt t_U16) (haxint_div (Abstraction_f_lift (self)) (Abstraction_f_lift (rhs))); }. #[global] Instance t_Shl_254354835 : t_Shl ((t_U16)) ((t_U8)) := { Shl_f_Output := t_U16; Shl_f_shl := fun (self : t_U16) (rhs : t_U8)=> Concretization_f_concretize (t_Concretization := _ : t_Concretization t_HaxInt t_U16) (haxint_shl (Abstraction_f_lift (self)) (Abstraction_f_lift (rhs))); }. #[global] Instance t_Shl_786190756 : t_Shl ((t_U16)) ((t_U16)) := { Shl_f_Output := t_U16; Shl_f_shl := fun (self : t_U16) (rhs : t_U16)=> Concretization_f_concretize (t_Concretization := _ : t_Concretization t_HaxInt t_U16) (haxint_shl (Abstraction_f_lift (self)) (Abstraction_f_lift (rhs))); }. #[global] Instance t_Shl_961613024 : t_Shl ((t_U16)) ((t_U32)) := { Shl_f_Output := t_U16; Shl_f_shl := fun (self : t_U16) (rhs : t_U32)=> Concretization_f_concretize (t_Concretization := _ : t_Concretization t_HaxInt t_U16) (haxint_shl (Abstraction_f_lift (self)) (Abstraction_f_lift (rhs))); }. #[global] Instance t_Shl_699049796 : t_Shl ((t_U16)) ((t_U64)) := { Shl_f_Output := t_U16; Shl_f_shl := fun (self : t_U16) (rhs : t_U64)=> Concretization_f_concretize (t_Concretization := _ : t_Concretization t_HaxInt t_U16) (haxint_shl (Abstraction_f_lift (self)) (Abstraction_f_lift (rhs))); }. #[global] Instance t_Shl_98667823 : t_Shl ((t_U16)) ((t_U128)) := { Shl_f_Output := t_U16; Shl_f_shl := fun (self : t_U16) (rhs : t_U128)=> Concretization_f_concretize (t_Concretization := _ : t_Concretization t_HaxInt t_U16) (haxint_shl (Abstraction_f_lift (self)) (Abstraction_f_lift (rhs))); }. #[global] Instance t_Shr_116990915 : t_Shr ((t_U16)) ((t_U8)) := { Shr_f_Output := t_U16; Shr_f_shr := fun (self : t_U16) (rhs : t_U8)=> Concretization_f_concretize (t_Concretization := _ : t_Concretization t_HaxInt t_U16) (haxint_shr (Abstraction_f_lift (self)) (Abstraction_f_lift (rhs))); }. #[global] Instance t_Shr_53270962 : t_Shr ((t_U16)) ((t_U16)) := { Shr_f_Output := t_U16; Shr_f_shr := fun (self : t_U16) (rhs : t_U16)=> Concretization_f_concretize (t_Concretization := _ : t_Concretization t_HaxInt t_U16) (haxint_shr (Abstraction_f_lift (self)) (Abstraction_f_lift (rhs))); }. #[global] Instance t_Shr_622272332 : t_Shr ((t_U16)) ((t_U32)) := { Shr_f_Output := t_U16; Shr_f_shr := fun (self : t_U16) (rhs : t_U32)=> Concretization_f_concretize (t_Concretization := _ : t_Concretization t_HaxInt t_U16) (haxint_shr (Abstraction_f_lift (self)) (Abstraction_f_lift (rhs))); }. #[global] Instance t_Shr_1061476863 : t_Shr ((t_U16)) ((t_U64)) := { Shr_f_Output := t_U16; Shr_f_shr := fun (self : t_U16) (rhs : t_U64)=> Concretization_f_concretize (t_Concretization := _ : t_Concretization t_HaxInt t_U16) (haxint_shr (Abstraction_f_lift (self)) (Abstraction_f_lift (rhs))); }. #[global] Instance t_Shr_148349277 : t_Shr ((t_U16)) ((t_U128)) := { Shr_f_Output := t_U16; Shr_f_shr := fun (self : t_U16) (rhs : t_U128)=> Concretization_f_concretize (t_Concretization := _ : t_Concretization t_HaxInt t_U16) (haxint_shr (Abstraction_f_lift (self)) (Abstraction_f_lift (rhs))); }. #[global] Instance t_BitXor_39308972 : t_BitXor ((t_U16)) ((t_U16)) := { BitXor_f_Output := t_U16; BitXor_f_bitxor := fun (self : t_U16) (rhs : t_U16)=> Concretization_f_concretize (t_Concretization := _ : t_Concretization t_HaxInt t_U16) (haxint_bitxor (Abstraction_f_lift (self)) (Abstraction_f_lift (rhs))); }. #[global] Instance t_BitAnd_100986953 : t_BitAnd ((t_U16)) ((t_U16)) := { BitAnd_f_Output := t_U16; BitAnd_f_bitand := fun (self : t_U16) (rhs : t_U16)=> Concretization_f_concretize (t_Concretization := _ : t_Concretization t_HaxInt t_U16) (haxint_bitand (Abstraction_f_lift (self)) (Abstraction_f_lift (rhs))); }. #[global] Instance t_BitOr_321212552 : t_BitOr ((t_U16)) ((t_U16)) := { BitOr_f_Output := t_U16; BitOr_f_bitor := fun (self : t_U16) (rhs : t_U16)=> Concretization_f_concretize (t_Concretization := _ : t_Concretization t_HaxInt t_U16) (haxint_bitor (Abstraction_f_lift (self)) (Abstraction_f_lift (rhs))); }. #[global] Instance t_Neg_410091205 : t_Neg ((t_U8)) := { Neg_f_Output := t_U8; Neg_f_neg := fun (self : t_U8)=> Concretization_f_concretize (haxint_sub (v_WORDSIZE_8_) (haxint_rem (Abstraction_f_lift (self)) (v_WORDSIZE_8_))); }. #[global] Instance t_Mul_116494850 : t_Mul ((t_U8)) ((t_U8)) := { Mul_f_Output := t_U8; Mul_f_mul := fun (self : t_U8) (rhs : t_U8)=> Concretization_f_concretize (haxint_mul (Abstraction_f_lift (self)) (Abstraction_f_lift (rhs))); }. #[global] Instance t_Rem_674469245 : t_Rem ((t_U8)) ((t_U8)) := { Rem_f_Output := t_U8; Rem_f_rem := fun (self : t_U8) (rhs : t_U8)=> Concretization_f_concretize (haxint_rem (Abstraction_f_lift (self)) (Abstraction_f_lift (rhs))); }. #[global] Instance t_Add_886374338 : t_Add ((t_U8)) ((t_U8)) := { Add_f_Output := t_U8; Add_f_add := fun (self : t_U8) (rhs : t_U8)=> Concretization_f_concretize (haxint_add (Abstraction_f_lift (self)) (Abstraction_f_lift (rhs))); }. #[global] Instance t_Div_7559770 : t_Div ((t_U8)) ((t_U8)) := { Div_f_Output := t_U8; Div_f_div := fun (self : t_U8) (rhs : t_U8)=> Concretization_f_concretize (haxint_div (Abstraction_f_lift (self)) (Abstraction_f_lift (rhs))); }. #[global] Instance t_Shl_889664521 : t_Shl ((t_U8)) ((t_U8)) := { Shl_f_Output := t_U8; Shl_f_shl := fun (self : t_U8) (rhs : t_U8)=> Concretization_f_concretize (haxint_shl (Abstraction_f_lift (self)) (Abstraction_f_lift (rhs))); }. #[global] Instance t_Shl_268581730 : t_Shl ((t_U8)) ((t_U16)) := { Shl_f_Output := t_U8; Shl_f_shl := fun (self : t_U8) (rhs : t_U16)=> Concretization_f_concretize (haxint_shl (Abstraction_f_lift (self)) (Abstraction_f_lift (rhs))); }. #[global] Instance t_Shl_833473770 : t_Shl ((t_U8)) ((t_U32)) := { Shl_f_Output := t_U8; Shl_f_shl := fun (self : t_U8) (rhs : t_U32)=> Concretization_f_concretize (haxint_shl (Abstraction_f_lift (self)) (Abstraction_f_lift (rhs))); }. #[global] Instance t_Shl_896563459 : t_Shl ((t_U8)) ((t_U64)) := { Shl_f_Output := t_U8; Shl_f_shl := fun (self : t_U8) (rhs : t_U64)=> Concretization_f_concretize (haxint_shl (Abstraction_f_lift (self)) (Abstraction_f_lift (rhs))); }. #[global] Instance t_Shl_595294021 : t_Shl ((t_U8)) ((t_U128)) := { Shl_f_Output := t_U8; Shl_f_shl := fun (self : t_U8) (rhs : t_U128)=> Concretization_f_concretize (haxint_shl (Abstraction_f_lift (self)) (Abstraction_f_lift (rhs))); }. #[global] Instance t_Shr_467626732 : t_Shr ((t_U8)) ((t_U8)) := { Shr_f_Output := t_U8; Shr_f_shr := fun (self : t_U8) (rhs : t_U8)=> Concretization_f_concretize (haxint_shr (Abstraction_f_lift (self)) (Abstraction_f_lift (rhs))); }. #[global] Instance t_Shr_985367369 : t_Shr ((t_U8)) ((t_U16)) := { Shr_f_Output := t_U8; Shr_f_shr := fun (self : t_U8) (rhs : t_U16)=> Concretization_f_concretize (haxint_shr (Abstraction_f_lift (self)) (Abstraction_f_lift (rhs))); }. #[global] Instance t_Shr_868101800 : t_Shr ((t_U8)) ((t_U32)) := { Shr_f_Output := t_U8; Shr_f_shr := fun (self : t_U8) (rhs : t_U32)=> Concretization_f_concretize (haxint_shr (Abstraction_f_lift (self)) (Abstraction_f_lift (rhs))); }. #[global] Instance t_Shr_300023283 : t_Shr ((t_U8)) ((t_U64)) := { Shr_f_Output := t_U8; Shr_f_shr := fun (self : t_U8) (rhs : t_U64)=> Concretization_f_concretize (haxint_shr (Abstraction_f_lift (self)) (Abstraction_f_lift (rhs))); }. #[global] Instance t_Shr_794091640 : t_Shr ((t_U8)) ((t_U128)) := { Shr_f_Output := t_U8; Shr_f_shr := fun (self : t_U8) (rhs : t_U128)=> Concretization_f_concretize (haxint_shr (Abstraction_f_lift (self)) (Abstraction_f_lift (rhs))); }. #[global] Instance t_BitXor_24738444 : t_BitXor ((t_U8)) ((t_U8)) := { BitXor_f_Output := t_U8; BitXor_f_bitxor := fun (self : t_U8) (rhs : t_U8)=> Concretization_f_concretize (haxint_bitxor (Abstraction_f_lift (self)) (Abstraction_f_lift (rhs))); }. #[global] Instance t_BitAnd_358790390 : t_BitAnd ((t_U8)) ((t_U8)) := { BitAnd_f_Output := t_U8; BitAnd_f_bitand := fun (self : t_U8) (rhs : t_U8)=> Concretization_f_concretize (haxint_bitand (Abstraction_f_lift (self)) (Abstraction_f_lift (rhs))); }. #[global] Instance t_BitOr_349401480 : t_BitOr ((t_U8)) ((t_U8)) := { BitOr_f_Output := t_U8; BitOr_f_bitor := fun (self : t_U8) (rhs : t_U8)=> Concretization_f_concretize (haxint_bitor (Abstraction_f_lift (self)) (Abstraction_f_lift (rhs))); }. #[global] Instance t_Rem_998027599 : t_Rem ((t_I128)) ((t_I128)) := { Rem_f_Output := t_I128; Rem_f_rem := fun (self : t_I128) (rhs : t_I128)=> Concretization_f_concretize (t_Concretization := _ : t_Concretization t_Z t_I128) (z_rem (Abstraction_f_lift (self)) (Abstraction_f_lift (rhs))); }. #[global] Instance t_Div_865866956 : t_Div ((t_I128)) ((t_I128)) := { Div_f_Output := t_I128; Div_f_div := fun (self : t_I128) (rhs : t_I128)=> Concretization_f_concretize (t_Concretization := _ : t_Concretization t_Z t_I128) (z_div (Abstraction_f_lift (self)) (Abstraction_f_lift (rhs))); }. #[global] Instance t_Rem_957489424 : t_Rem ((t_I64)) ((t_I64)) := { Rem_f_Output := t_I64; Rem_f_rem := fun (self : t_I64) (rhs : t_I64)=> Concretization_f_concretize (t_Concretization := _ : t_Concretization t_Z t_I64) (z_rem (Abstraction_f_lift (self)) (Abstraction_f_lift (rhs))); }. #[global] Instance t_Div_611785525 : t_Div ((t_I64)) ((t_I64)) := { Div_f_Output := t_I64; Div_f_div := fun (self : t_I64) (rhs : t_I64)=> Concretization_f_concretize (t_Concretization := _ : t_Concretization t_Z t_I64) (z_div (Abstraction_f_lift (self)) (Abstraction_f_lift (rhs))); }. #[global] Instance t_Rem_219303214 : t_Rem ((t_I32)) ((t_I32)) := { Rem_f_Output := t_I32; Rem_f_rem := fun (self : t_I32) (rhs : t_I32)=> Concretization_f_concretize (t_Concretization := _ : t_Concretization t_Z t_I32) (z_rem (Abstraction_f_lift (self)) (Abstraction_f_lift (rhs))); }. #[global] Instance t_Div_1002924104 : t_Div ((t_I32)) ((t_I32)) := { Div_f_Output := t_I32; Div_f_div := fun (self : t_I32) (rhs : t_I32)=> Concretization_f_concretize (t_Concretization := _ : t_Concretization t_Z t_I32) (z_div (Abstraction_f_lift (self)) (Abstraction_f_lift (rhs))); }. #[global] Instance t_Rem_948867246 : t_Rem ((t_I16)) ((t_I16)) := { Rem_f_Output := t_I16; Rem_f_rem := fun (self : t_I16) (rhs : t_I16)=> Concretization_f_concretize (t_Concretization := _ : t_Concretization t_Z t_I16) (z_rem (Abstraction_f_lift (self)) (Abstraction_f_lift (rhs))); }. #[global] Instance t_Div_357493436 : t_Div ((t_I16)) ((t_I16)) := { Div_f_Output := t_I16; Div_f_div := fun (self : t_I16) (rhs : t_I16)=> Concretization_f_concretize (t_Concretization := _ : t_Concretization t_Z t_I16) (z_div (Abstraction_f_lift (self)) (Abstraction_f_lift (rhs))); }. #[global] Instance t_Rem_228000167 : t_Rem ((t_I8)) ((t_I8)) := { Rem_f_Output := t_I8; Rem_f_rem := fun (self : t_I8) (rhs : t_I8)=> Concretization_f_concretize (t_Concretization := _ : t_Concretization t_Z t_I8) (z_rem (Abstraction_f_lift (self)) (Abstraction_f_lift (rhs))); }. #[global] Instance t_Div_470010025 : t_Div ((t_I8)) ((t_I8)) := { Div_f_Output := t_I8; Div_f_div := fun (self : t_I8) (rhs : t_I8)=> Concretization_f_concretize (t_Concretization := _ : t_Concretization t_Z t_I8) (z_div (Abstraction_f_lift (self)) (Abstraction_f_lift (rhs))); }. #[global] Instance t_Sub_1018502693 : t_Sub ((t_U128)) ((t_U128)) := { Sub_f_Output := t_U128; Sub_f_sub := fun (self : t_U128) (rhs : t_U128)=> Add_f_add (t_Add := _ : t_Add t_U128 t_U128) (self) (Neg_f_neg (rhs)); }. #[global] Instance t_Not_758360759 : t_Not ((t_U128)) := { Not_f_Output := t_U128; Not_f_not := fun (self : t_U128)=> BitXor_f_bitxor (self) (Constants_f_MAX); }. #[global] Instance t_Sub_919216830 : t_Sub ((t_U64)) ((t_U64)) := { Sub_f_Output := t_U64; Sub_f_sub := fun (self : t_U64) (rhs : t_U64)=> Add_f_add (t_Add := _ : t_Add _ t_U64) (self) (Neg_f_neg (rhs)); }. #[global] Instance t_Not_693249901 : t_Not ((t_U64)) := { Not_f_Output := t_U64; Not_f_not := fun (self : t_U64)=> BitXor_f_bitxor (self) (Constants_f_MAX); }. #[global] Instance t_Sub_22623594 : t_Sub ((t_U32)) ((t_U32)) := { Sub_f_Output := t_U32; Sub_f_sub := fun (self : t_U32) (rhs : t_U32)=> Add_f_add (t_Add := _ : t_Add _ t_U32) (self) (Neg_f_neg (rhs)); }. #[global] Instance t_Not_183316157 : t_Not ((t_U32)) := { Not_f_Output := t_U32; Not_f_not := fun (self : t_U32)=> BitXor_f_bitxor (self) (Constants_f_MAX); }. #[global] Instance t_Sub_502320750 : t_Sub ((t_U16)) ((t_U16)) := { Sub_f_Output := t_U16; Sub_f_sub := fun (self : t_U16) (rhs : t_U16)=> Add_f_add (t_Add := _ : t_Add _ t_U16) (self) (Neg_f_neg (rhs)); }. #[global] Instance t_Not_669226601 : t_Not ((t_U16)) := { Not_f_Output := t_U16; Not_f_not := fun (self : t_U16)=> BitXor_f_bitxor (self) (Constants_f_MAX); }. #[global] Instance t_Sub_299023787 : t_Sub ((t_U8)) ((t_U8)) := { Sub_f_Output := t_U8; Sub_f_sub := fun (self : t_U8) (rhs : t_U8)=> Add_f_add (t_Add := _ : t_Add _ t_U8) (self) (Neg_f_neg (rhs)); }. #[global] Instance t_Not_761019181 : t_Not ((t_U8)) := { Not_f_Output := t_U8; Not_f_not := fun (self : t_U8)=> BitXor_f_bitxor (self) (Constants_f_MAX); }. ================================================ FILE: hax-lib/proof-libs/coq/coq/generated-core/src/Core_Base_interface_Int_I128_proofs.v ================================================ (* File automatically generated by Hacspec *) From Coq Require Import ZArith. Require Import List. Import List.ListNotations. Open Scope Z_scope. Open Scope bool_scope. Require Import Ascii. Require Import String. Require Import Coq.Floats.Floats. From RecordUpdate Require Import RecordSet. Import RecordSetNotations. (* From Core Require Import Core. *) (* TODO: Replace this dummy lib with core lib *) Class t_Sized (T : Type) := { }. Definition t_u8 := Z. Definition t_u16 := Z. Definition t_u32 := Z. Definition t_u64 := Z. Definition t_u128 := Z. Definition t_usize := Z. Definition t_i8 := Z. Definition t_i16 := Z. Definition t_i32 := Z. Definition t_i64 := Z. Definition t_i128 := Z. Definition t_isize := Z. Definition t_Array T (x : t_usize) := list T. Definition t_String := string. Definition ToString_f_to_string (x : string) := x. Instance Sized_any : forall {t_A}, t_Sized t_A := {}. Class t_Clone (T : Type) := { Clone_f_clone : T -> T }. Instance Clone_any : forall {t_A}, t_Clone t_A := {Clone_f_clone := fun x => x}. Definition t_Slice (T : Type) := list T. Definition unsize {T : Type} : list T -> t_Slice T := id. Definition t_PartialEq_f_eq x y := x =? y. Definition t_Rem_f_rem (x y : Z) := x mod y. Definition assert (b : bool) (* `{H_assert : b = true} *) : unit := tt. Inductive globality := | t_Global. Definition t_Vec T (_ : globality) : Type := list T. Definition impl_1__append {T} l1 l2 : list T * list T := (app l1 l2, l2). Definition impl_1__len {A} (l : list A) := Z.of_nat (List.length l). Definition impl__new {A} (_ : Datatypes.unit) : list A := nil. Definition impl__with_capacity {A} (_ : Z) : list A := nil. Definition impl_1__push {A} l (x : A) := cons x l. Class t_From (A B : Type) := { From_f_from : B -> A }. Definition impl__to_vec {T} (x : t_Slice T) : t_Vec T t_Global := x. Class t_Into (A B : Type) := { Into_f_into : A -> B }. Instance t_Into_from_t_From {A B : Type} `{H : t_From B A} : t_Into A B := { Into_f_into x := @From_f_from B A H x }. Definition from_elem {A} (x : A) (l : Z) := repeat x (Z.to_nat l). Definition t_Option := option. Definition impl__map {A B} (x : t_Option A) (f : A -> B) : t_Option B := match x with | Some x => Some (f x) | None => None end. Definition t_Add_f_add x y := x + y. Class Cast A B := { cast : A -> B }. Instance cast_t_u8_t_u32 : Cast t_u8 t_u32 := {| cast x := x |}. (* / dummy lib *) From Core Require Import Core_Base_interface_Int. Export Core_Base_interface_Int. Lemma abstract_concretize_cancel (x : t_I128) : -> PartialEq_f_eq (Concretization_f_concretize (Abstraction_f_lift (Clone_f_clone (x)))) (x) = true. Proof. Admitted. ================================================ FILE: hax-lib/proof-libs/coq/coq/generated-core/src/Core_Base_interface_Int_I16_proofs.v ================================================ (* File automatically generated by Hacspec *) From Coq Require Import ZArith. Require Import List. Import List.ListNotations. Open Scope Z_scope. Open Scope bool_scope. Require Import Ascii. Require Import String. Require Import Coq.Floats.Floats. From RecordUpdate Require Import RecordSet. Import RecordSetNotations. (* From Core Require Import Core. *) (* TODO: Replace this dummy lib with core lib *) Class t_Sized (T : Type) := { }. Definition t_u8 := Z. Definition t_u16 := Z. Definition t_u32 := Z. Definition t_u64 := Z. Definition t_u128 := Z. Definition t_usize := Z. Definition t_i8 := Z. Definition t_i16 := Z. Definition t_i32 := Z. Definition t_i64 := Z. Definition t_i128 := Z. Definition t_isize := Z. Definition t_Array T (x : t_usize) := list T. Definition t_String := string. Definition ToString_f_to_string (x : string) := x. Instance Sized_any : forall {t_A}, t_Sized t_A := {}. Class t_Clone (T : Type) := { Clone_f_clone : T -> T }. Instance Clone_any : forall {t_A}, t_Clone t_A := {Clone_f_clone := fun x => x}. Definition t_Slice (T : Type) := list T. Definition unsize {T : Type} : list T -> t_Slice T := id. Definition t_PartialEq_f_eq x y := x =? y. Definition t_Rem_f_rem (x y : Z) := x mod y. Definition assert (b : bool) (* `{H_assert : b = true} *) : unit := tt. Inductive globality := | t_Global. Definition t_Vec T (_ : globality) : Type := list T. Definition impl_1__append {T} l1 l2 : list T * list T := (app l1 l2, l2). Definition impl_1__len {A} (l : list A) := Z.of_nat (List.length l). Definition impl__new {A} (_ : Datatypes.unit) : list A := nil. Definition impl__with_capacity {A} (_ : Z) : list A := nil. Definition impl_1__push {A} l (x : A) := cons x l. Class t_From (A B : Type) := { From_f_from : B -> A }. Definition impl__to_vec {T} (x : t_Slice T) : t_Vec T t_Global := x. Class t_Into (A B : Type) := { Into_f_into : A -> B }. Instance t_Into_from_t_From {A B : Type} `{H : t_From B A} : t_Into A B := { Into_f_into x := @From_f_from B A H x }. Definition from_elem {A} (x : A) (l : Z) := repeat x (Z.to_nat l). Definition t_Option := option. Definition impl__map {A B} (x : t_Option A) (f : A -> B) : t_Option B := match x with | Some x => Some (f x) | None => None end. Definition t_Add_f_add x y := x + y. Class Cast A B := { cast : A -> B }. Instance cast_t_u8_t_u32 : Cast t_u8 t_u32 := {| cast x := x |}. (* / dummy lib *) From Core Require Import Core_Base_interface_Int. Export Core_Base_interface_Int. Lemma abstract_concretize_cancel (x : t_I16) : -> PartialEq_f_eq (Concretization_f_concretize (Abstraction_f_lift (Clone_f_clone (x)))) (x) = true. Proof. Admitted. ================================================ FILE: hax-lib/proof-libs/coq/coq/generated-core/src/Core_Base_interface_Int_I32_proofs.v ================================================ (* File automatically generated by Hacspec *) From Coq Require Import ZArith. Require Import List. Import List.ListNotations. Open Scope Z_scope. Open Scope bool_scope. Require Import Ascii. Require Import String. Require Import Coq.Floats.Floats. From RecordUpdate Require Import RecordSet. Import RecordSetNotations. (* From Core Require Import Core. *) (* TODO: Replace this dummy lib with core lib *) Class t_Sized (T : Type) := { }. Definition t_u8 := Z. Definition t_u16 := Z. Definition t_u32 := Z. Definition t_u64 := Z. Definition t_u128 := Z. Definition t_usize := Z. Definition t_i8 := Z. Definition t_i16 := Z. Definition t_i32 := Z. Definition t_i64 := Z. Definition t_i128 := Z. Definition t_isize := Z. Definition t_Array T (x : t_usize) := list T. Definition t_String := string. Definition ToString_f_to_string (x : string) := x. Instance Sized_any : forall {t_A}, t_Sized t_A := {}. Class t_Clone (T : Type) := { Clone_f_clone : T -> T }. Instance Clone_any : forall {t_A}, t_Clone t_A := {Clone_f_clone := fun x => x}. Definition t_Slice (T : Type) := list T. Definition unsize {T : Type} : list T -> t_Slice T := id. Definition t_PartialEq_f_eq x y := x =? y. Definition t_Rem_f_rem (x y : Z) := x mod y. Definition assert (b : bool) (* `{H_assert : b = true} *) : unit := tt. Inductive globality := | t_Global. Definition t_Vec T (_ : globality) : Type := list T. Definition impl_1__append {T} l1 l2 : list T * list T := (app l1 l2, l2). Definition impl_1__len {A} (l : list A) := Z.of_nat (List.length l). Definition impl__new {A} (_ : Datatypes.unit) : list A := nil. Definition impl__with_capacity {A} (_ : Z) : list A := nil. Definition impl_1__push {A} l (x : A) := cons x l. Class t_From (A B : Type) := { From_f_from : B -> A }. Definition impl__to_vec {T} (x : t_Slice T) : t_Vec T t_Global := x. Class t_Into (A B : Type) := { Into_f_into : A -> B }. Instance t_Into_from_t_From {A B : Type} `{H : t_From B A} : t_Into A B := { Into_f_into x := @From_f_from B A H x }. Definition from_elem {A} (x : A) (l : Z) := repeat x (Z.to_nat l). Definition t_Option := option. Definition impl__map {A B} (x : t_Option A) (f : A -> B) : t_Option B := match x with | Some x => Some (f x) | None => None end. Definition t_Add_f_add x y := x + y. Class Cast A B := { cast : A -> B }. Instance cast_t_u8_t_u32 : Cast t_u8 t_u32 := {| cast x := x |}. (* / dummy lib *) From Core Require Import Core_Base_interface_Int. Export Core_Base_interface_Int. Lemma abstract_concretize_cancel (x : t_I32) : -> PartialEq_f_eq (Concretization_f_concretize (Abstraction_f_lift (Clone_f_clone (x)))) (x) = true. Proof. Admitted. ================================================ FILE: hax-lib/proof-libs/coq/coq/generated-core/src/Core_Base_interface_Int_I64_proofs.v ================================================ (* File automatically generated by Hacspec *) From Coq Require Import ZArith. Require Import List. Import List.ListNotations. Open Scope Z_scope. Open Scope bool_scope. Require Import Ascii. Require Import String. Require Import Coq.Floats.Floats. From RecordUpdate Require Import RecordSet. Import RecordSetNotations. (* From Core Require Import Core. *) (* TODO: Replace this dummy lib with core lib *) Class t_Sized (T : Type) := { }. Definition t_u8 := Z. Definition t_u16 := Z. Definition t_u32 := Z. Definition t_u64 := Z. Definition t_u128 := Z. Definition t_usize := Z. Definition t_i8 := Z. Definition t_i16 := Z. Definition t_i32 := Z. Definition t_i64 := Z. Definition t_i128 := Z. Definition t_isize := Z. Definition t_Array T (x : t_usize) := list T. Definition t_String := string. Definition ToString_f_to_string (x : string) := x. Instance Sized_any : forall {t_A}, t_Sized t_A := {}. Class t_Clone (T : Type) := { Clone_f_clone : T -> T }. Instance Clone_any : forall {t_A}, t_Clone t_A := {Clone_f_clone := fun x => x}. Definition t_Slice (T : Type) := list T. Definition unsize {T : Type} : list T -> t_Slice T := id. Definition t_PartialEq_f_eq x y := x =? y. Definition t_Rem_f_rem (x y : Z) := x mod y. Definition assert (b : bool) (* `{H_assert : b = true} *) : unit := tt. Inductive globality := | t_Global. Definition t_Vec T (_ : globality) : Type := list T. Definition impl_1__append {T} l1 l2 : list T * list T := (app l1 l2, l2). Definition impl_1__len {A} (l : list A) := Z.of_nat (List.length l). Definition impl__new {A} (_ : Datatypes.unit) : list A := nil. Definition impl__with_capacity {A} (_ : Z) : list A := nil. Definition impl_1__push {A} l (x : A) := cons x l. Class t_From (A B : Type) := { From_f_from : B -> A }. Definition impl__to_vec {T} (x : t_Slice T) : t_Vec T t_Global := x. Class t_Into (A B : Type) := { Into_f_into : A -> B }. Instance t_Into_from_t_From {A B : Type} `{H : t_From B A} : t_Into A B := { Into_f_into x := @From_f_from B A H x }. Definition from_elem {A} (x : A) (l : Z) := repeat x (Z.to_nat l). Definition t_Option := option. Definition impl__map {A B} (x : t_Option A) (f : A -> B) : t_Option B := match x with | Some x => Some (f x) | None => None end. Definition t_Add_f_add x y := x + y. Class Cast A B := { cast : A -> B }. Instance cast_t_u8_t_u32 : Cast t_u8 t_u32 := {| cast x := x |}. (* / dummy lib *) From Core Require Import Core_Base_interface_Int. Export Core_Base_interface_Int. Lemma abstract_concretize_cancel (x : t_I64) : -> PartialEq_f_eq (Concretization_f_concretize (Abstraction_f_lift (Clone_f_clone (x)))) (x) = true. Proof. Admitted. ================================================ FILE: hax-lib/proof-libs/coq/coq/generated-core/src/Core_Base_interface_Int_I8_proofs.v ================================================ (* File automatically generated by Hacspec *) From Coq Require Import ZArith. Require Import List. Import List.ListNotations. Open Scope Z_scope. Open Scope bool_scope. Require Import Ascii. Require Import String. Require Import Coq.Floats.Floats. From RecordUpdate Require Import RecordSet. Import RecordSetNotations. (* From Core Require Import Core. *) (* TODO: Replace this dummy lib with core lib *) Class t_Sized (T : Type) := { }. Definition t_u8 := Z. Definition t_u16 := Z. Definition t_u32 := Z. Definition t_u64 := Z. Definition t_u128 := Z. Definition t_usize := Z. Definition t_i8 := Z. Definition t_i16 := Z. Definition t_i32 := Z. Definition t_i64 := Z. Definition t_i128 := Z. Definition t_isize := Z. Definition t_Array T (x : t_usize) := list T. Definition t_String := string. Definition ToString_f_to_string (x : string) := x. Instance Sized_any : forall {t_A}, t_Sized t_A := {}. Class t_Clone (T : Type) := { Clone_f_clone : T -> T }. Instance Clone_any : forall {t_A}, t_Clone t_A := {Clone_f_clone := fun x => x}. Definition t_Slice (T : Type) := list T. Definition unsize {T : Type} : list T -> t_Slice T := id. Definition t_PartialEq_f_eq x y := x =? y. Definition t_Rem_f_rem (x y : Z) := x mod y. Definition assert (b : bool) (* `{H_assert : b = true} *) : unit := tt. Inductive globality := | t_Global. Definition t_Vec T (_ : globality) : Type := list T. Definition impl_1__append {T} l1 l2 : list T * list T := (app l1 l2, l2). Definition impl_1__len {A} (l : list A) := Z.of_nat (List.length l). Definition impl__new {A} (_ : Datatypes.unit) : list A := nil. Definition impl__with_capacity {A} (_ : Z) : list A := nil. Definition impl_1__push {A} l (x : A) := cons x l. Class t_From (A B : Type) := { From_f_from : B -> A }. Definition impl__to_vec {T} (x : t_Slice T) : t_Vec T t_Global := x. Class t_Into (A B : Type) := { Into_f_into : A -> B }. Instance t_Into_from_t_From {A B : Type} `{H : t_From B A} : t_Into A B := { Into_f_into x := @From_f_from B A H x }. Definition from_elem {A} (x : A) (l : Z) := repeat x (Z.to_nat l). Definition t_Option := option. Definition impl__map {A B} (x : t_Option A) (f : A -> B) : t_Option B := match x with | Some x => Some (f x) | None => None end. Definition t_Add_f_add x y := x + y. Class Cast A B := { cast : A -> B }. Instance cast_t_u8_t_u32 : Cast t_u8 t_u32 := {| cast x := x |}. (* / dummy lib *) From Core Require Import Core_Base_interface_Int. Export Core_Base_interface_Int. Lemma abstract_concretize_cancel (x : t_I8) : -> PartialEq_f_eq (Concretization_f_concretize (Abstraction_f_lift (Clone_f_clone (x)))) (x) = true. Proof. Admitted. ================================================ FILE: hax-lib/proof-libs/coq/coq/generated-core/src/Core_Base_interface_Int_U128_proofs.v ================================================ (* File automatically generated by Hacspec *) From Coq Require Import ZArith. Require Import List. Import List.ListNotations. Open Scope Z_scope. Open Scope bool_scope. Require Import Ascii. Require Import String. Require Import Coq.Floats.Floats. From RecordUpdate Require Import RecordSet. Import RecordSetNotations. (* From Core Require Import Core. *) (* TODO: Replace this dummy lib with core lib *) Class t_Sized (T : Type) := { }. Definition t_u8 := Z. Definition t_u16 := Z. Definition t_u32 := Z. Definition t_u64 := Z. Definition t_u128 := Z. Definition t_usize := Z. Definition t_i8 := Z. Definition t_i16 := Z. Definition t_i32 := Z. Definition t_i64 := Z. Definition t_i128 := Z. Definition t_isize := Z. Definition t_Array T (x : t_usize) := list T. Definition t_String := string. Definition ToString_f_to_string (x : string) := x. Instance Sized_any : forall {t_A}, t_Sized t_A := {}. Class t_Clone (T : Type) := { Clone_f_clone : T -> T }. Instance Clone_any : forall {t_A}, t_Clone t_A := {Clone_f_clone := fun x => x}. Definition t_Slice (T : Type) := list T. Definition unsize {T : Type} : list T -> t_Slice T := id. Definition t_PartialEq_f_eq x y := x =? y. Definition t_Rem_f_rem (x y : Z) := x mod y. Definition assert (b : bool) (* `{H_assert : b = true} *) : unit := tt. Inductive globality := | t_Global. Definition t_Vec T (_ : globality) : Type := list T. Definition impl_1__append {T} l1 l2 : list T * list T := (app l1 l2, l2). Definition impl_1__len {A} (l : list A) := Z.of_nat (List.length l). Definition impl__new {A} (_ : Datatypes.unit) : list A := nil. Definition impl__with_capacity {A} (_ : Z) : list A := nil. Definition impl_1__push {A} l (x : A) := cons x l. Class t_From (A B : Type) := { From_f_from : B -> A }. Definition impl__to_vec {T} (x : t_Slice T) : t_Vec T t_Global := x. Class t_Into (A B : Type) := { Into_f_into : A -> B }. Instance t_Into_from_t_From {A B : Type} `{H : t_From B A} : t_Into A B := { Into_f_into x := @From_f_from B A H x }. Definition from_elem {A} (x : A) (l : Z) := repeat x (Z.to_nat l). Definition t_Option := option. Definition impl__map {A B} (x : t_Option A) (f : A -> B) : t_Option B := match x with | Some x => Some (f x) | None => None end. Definition t_Add_f_add x y := x + y. Class Cast A B := { cast : A -> B }. Instance cast_t_u8_t_u32 : Cast t_u8 t_u32 := {| cast x := x |}. (* / dummy lib *) From Core Require Import Core_Base_interface_Int. Export Core_Base_interface_Int. Lemma abstract_concretize_cancel (x : t_U128) : -> PartialEq_f_eq (Concretization_f_concretize (Abstraction_f_lift (Clone_f_clone (x)))) (x) = true. Proof. Admitted. Lemma mod_add (x : t_U128) (y : t_U128) (z : t_U128) : -> orb (haxint_le (v_WORDSIZE_128_) (haxint_add (Abstraction_f_lift (Clone_f_clone (x))) (Abstraction_f_lift (Clone_f_clone (y))))) (PartialEq_f_eq (Rem_f_rem (Add_f_add (Clone_f_clone (x)) (Clone_f_clone (y))) (Clone_f_clone (z))) (Rem_f_rem (Add_f_add (Rem_f_rem (x) (Clone_f_clone (z))) (Rem_f_rem (y) (Clone_f_clone (z)))) (z))) = true. Proof. Admitted. Lemma mod_mul (x : t_U128) (y : t_U128) (z : t_U128) : -> orb (haxint_lt (v_WORDSIZE_128_) (haxint_mul (Abstraction_f_lift (Clone_f_clone (x))) (Abstraction_f_lift (Clone_f_clone (y))))) (PartialEq_f_eq (Rem_f_rem (Mul_f_mul (Clone_f_clone (x)) (Clone_f_clone (y))) (Clone_f_clone (z))) (Rem_f_rem (Rem_f_rem (Mul_f_mul (Rem_f_rem (x) (Clone_f_clone (z))) (y)) (Clone_f_clone (z))) (z))) = true. Proof. Admitted. Lemma mod_one (x : t_U128) : -> PartialEq_f_eq (Rem_f_rem (x) (Constants_f_ONE)) (Constants_f_ZERO) = true. Proof. Admitted. Lemma mod_sub (x : t_U128) (y : t_U128) (z : t_U128) : -> orb (orb (PartialOrd_f_lt (Clone_f_clone (x)) (Clone_f_clone (y))) (PartialOrd_f_le (Clone_f_clone (z)) (Clone_f_clone (x)))) (PartialEq_f_eq (Rem_f_rem (Sub_f_sub (Clone_f_clone (x)) (Clone_f_clone (y))) (Clone_f_clone (z))) (Rem_f_rem (Sub_f_sub (Rem_f_rem (x) (Clone_f_clone (z))) (Rem_f_rem (y) (Clone_f_clone (z)))) (z))) = true. Proof. Admitted. ================================================ FILE: hax-lib/proof-libs/coq/coq/generated-core/src/Core_Base_interface_Int_U16_proofs.v ================================================ (* File automatically generated by Hacspec *) From Coq Require Import ZArith. Require Import List. Import List.ListNotations. Open Scope Z_scope. Open Scope bool_scope. Require Import Ascii. Require Import String. Require Import Coq.Floats.Floats. From RecordUpdate Require Import RecordSet. Import RecordSetNotations. (* From Core Require Import Core. *) (* TODO: Replace this dummy lib with core lib *) Class t_Sized (T : Type) := { }. Definition t_u8 := Z. Definition t_u16 := Z. Definition t_u32 := Z. Definition t_u64 := Z. Definition t_u128 := Z. Definition t_usize := Z. Definition t_i8 := Z. Definition t_i16 := Z. Definition t_i32 := Z. Definition t_i64 := Z. Definition t_i128 := Z. Definition t_isize := Z. Definition t_Array T (x : t_usize) := list T. Definition t_String := string. Definition ToString_f_to_string (x : string) := x. Instance Sized_any : forall {t_A}, t_Sized t_A := {}. Class t_Clone (T : Type) := { Clone_f_clone : T -> T }. Instance Clone_any : forall {t_A}, t_Clone t_A := {Clone_f_clone := fun x => x}. Definition t_Slice (T : Type) := list T. Definition unsize {T : Type} : list T -> t_Slice T := id. Definition t_PartialEq_f_eq x y := x =? y. Definition t_Rem_f_rem (x y : Z) := x mod y. Definition assert (b : bool) (* `{H_assert : b = true} *) : unit := tt. Inductive globality := | t_Global. Definition t_Vec T (_ : globality) : Type := list T. Definition impl_1__append {T} l1 l2 : list T * list T := (app l1 l2, l2). Definition impl_1__len {A} (l : list A) := Z.of_nat (List.length l). Definition impl__new {A} (_ : Datatypes.unit) : list A := nil. Definition impl__with_capacity {A} (_ : Z) : list A := nil. Definition impl_1__push {A} l (x : A) := cons x l. Class t_From (A B : Type) := { From_f_from : B -> A }. Definition impl__to_vec {T} (x : t_Slice T) : t_Vec T t_Global := x. Class t_Into (A B : Type) := { Into_f_into : A -> B }. Instance t_Into_from_t_From {A B : Type} `{H : t_From B A} : t_Into A B := { Into_f_into x := @From_f_from B A H x }. Definition from_elem {A} (x : A) (l : Z) := repeat x (Z.to_nat l). Definition t_Option := option. Definition impl__map {A B} (x : t_Option A) (f : A -> B) : t_Option B := match x with | Some x => Some (f x) | None => None end. Definition t_Add_f_add x y := x + y. Class Cast A B := { cast : A -> B }. Instance cast_t_u8_t_u32 : Cast t_u8 t_u32 := {| cast x := x |}. (* / dummy lib *) From Core Require Import Core_Base_interface_Int. Export Core_Base_interface_Int. Lemma abstract_concretize_cancel (x : t_U16) : -> PartialEq_f_eq (Concretization_f_concretize (Abstraction_f_lift (Clone_f_clone (x)))) (x) = true. Proof. Admitted. Lemma mod_add (x : t_U16) (y : t_U16) (z : t_U16) : -> orb (haxint_le (v_WORDSIZE_16_) (haxint_add (Abstraction_f_lift (Clone_f_clone (x))) (Abstraction_f_lift (Clone_f_clone (y))))) (PartialEq_f_eq (Rem_f_rem (Add_f_add (Clone_f_clone (x)) (Clone_f_clone (y))) (Clone_f_clone (z))) (Rem_f_rem (Add_f_add (Rem_f_rem (x) (Clone_f_clone (z))) (Rem_f_rem (y) (Clone_f_clone (z)))) (z))) = true. Proof. Admitted. Lemma mod_mul (x : t_U16) (y : t_U16) (z : t_U16) : -> orb (haxint_lt (v_WORDSIZE_16_) (haxint_mul (Abstraction_f_lift (Clone_f_clone (x))) (Abstraction_f_lift (Clone_f_clone (y))))) (PartialEq_f_eq (Rem_f_rem (Mul_f_mul (Clone_f_clone (x)) (Clone_f_clone (y))) (Clone_f_clone (z))) (Rem_f_rem (Rem_f_rem (Mul_f_mul (Rem_f_rem (x) (Clone_f_clone (z))) (y)) (Clone_f_clone (z))) (z))) = true. Proof. Admitted. Lemma mod_one (x : t_U16) : -> PartialEq_f_eq (Rem_f_rem (x) (Constants_f_ONE)) (Constants_f_ZERO) = true. Proof. Admitted. Lemma mod_sub (x : t_U16) (y : t_U16) (z : t_U16) : -> orb (orb (PartialOrd_f_lt (Clone_f_clone (x)) (Clone_f_clone (y))) (PartialOrd_f_le (Clone_f_clone (z)) (Clone_f_clone (x)))) (PartialEq_f_eq (Rem_f_rem (Sub_f_sub (Clone_f_clone (x)) (Clone_f_clone (y))) (Clone_f_clone (z))) (Rem_f_rem (Sub_f_sub (Rem_f_rem (x) (Clone_f_clone (z))) (Rem_f_rem (y) (Clone_f_clone (z)))) (z))) = true. Proof. Admitted. ================================================ FILE: hax-lib/proof-libs/coq/coq/generated-core/src/Core_Base_interface_Int_U32_proofs.v ================================================ (* File automatically generated by Hacspec *) From Coq Require Import ZArith. Require Import List. Import List.ListNotations. Open Scope Z_scope. Open Scope bool_scope. Require Import Ascii. Require Import String. Require Import Coq.Floats.Floats. From RecordUpdate Require Import RecordSet. Import RecordSetNotations. (* From Core Require Import Core. *) (* TODO: Replace this dummy lib with core lib *) Class t_Sized (T : Type) := { }. Definition t_u8 := Z. Definition t_u16 := Z. Definition t_u32 := Z. Definition t_u64 := Z. Definition t_u128 := Z. Definition t_usize := Z. Definition t_i8 := Z. Definition t_i16 := Z. Definition t_i32 := Z. Definition t_i64 := Z. Definition t_i128 := Z. Definition t_isize := Z. Definition t_Array T (x : t_usize) := list T. Definition t_String := string. Definition ToString_f_to_string (x : string) := x. Instance Sized_any : forall {t_A}, t_Sized t_A := {}. Class t_Clone (T : Type) := { Clone_f_clone : T -> T }. Instance Clone_any : forall {t_A}, t_Clone t_A := {Clone_f_clone := fun x => x}. Definition t_Slice (T : Type) := list T. Definition unsize {T : Type} : list T -> t_Slice T := id. Definition t_PartialEq_f_eq x y := x =? y. Definition t_Rem_f_rem (x y : Z) := x mod y. Definition assert (b : bool) (* `{H_assert : b = true} *) : unit := tt. Inductive globality := | t_Global. Definition t_Vec T (_ : globality) : Type := list T. Definition impl_1__append {T} l1 l2 : list T * list T := (app l1 l2, l2). Definition impl_1__len {A} (l : list A) := Z.of_nat (List.length l). Definition impl__new {A} (_ : Datatypes.unit) : list A := nil. Definition impl__with_capacity {A} (_ : Z) : list A := nil. Definition impl_1__push {A} l (x : A) := cons x l. Class t_From (A B : Type) := { From_f_from : B -> A }. Definition impl__to_vec {T} (x : t_Slice T) : t_Vec T t_Global := x. Class t_Into (A B : Type) := { Into_f_into : A -> B }. Instance t_Into_from_t_From {A B : Type} `{H : t_From B A} : t_Into A B := { Into_f_into x := @From_f_from B A H x }. Definition from_elem {A} (x : A) (l : Z) := repeat x (Z.to_nat l). Definition t_Option := option. Definition impl__map {A B} (x : t_Option A) (f : A -> B) : t_Option B := match x with | Some x => Some (f x) | None => None end. Definition t_Add_f_add x y := x + y. Class Cast A B := { cast : A -> B }. Instance cast_t_u8_t_u32 : Cast t_u8 t_u32 := {| cast x := x |}. (* / dummy lib *) From Core Require Import Core_Base_interface_Int. Export Core_Base_interface_Int. Lemma abstract_concretize_cancel (x : t_U32) : -> PartialEq_f_eq (Concretization_f_concretize (Abstraction_f_lift (Clone_f_clone (x)))) (x) = true. Proof. Admitted. Lemma mod_add (x : t_U32) (y : t_U32) (z : t_U32) : -> orb (haxint_le (v_WORDSIZE_32_) (haxint_add (Abstraction_f_lift (Clone_f_clone (x))) (Abstraction_f_lift (Clone_f_clone (y))))) (PartialEq_f_eq (Rem_f_rem (Add_f_add (Clone_f_clone (x)) (Clone_f_clone (y))) (Clone_f_clone (z))) (Rem_f_rem (Add_f_add (Rem_f_rem (x) (Clone_f_clone (z))) (Rem_f_rem (y) (Clone_f_clone (z)))) (z))) = true. Proof. Admitted. Lemma mod_mul (x : t_U32) (y : t_U32) (z : t_U32) : -> orb (haxint_lt (v_WORDSIZE_32_) (haxint_mul (Abstraction_f_lift (Clone_f_clone (x))) (Abstraction_f_lift (Clone_f_clone (y))))) (PartialEq_f_eq (Rem_f_rem (Mul_f_mul (Clone_f_clone (x)) (Clone_f_clone (y))) (Clone_f_clone (z))) (Rem_f_rem (Rem_f_rem (Mul_f_mul (Rem_f_rem (x) (Clone_f_clone (z))) (y)) (Clone_f_clone (z))) (z))) = true. Proof. Admitted. Lemma mod_one (x : t_U32) : -> PartialEq_f_eq (Rem_f_rem (x) (Constants_f_ONE)) (Constants_f_ZERO) = true. Proof. Admitted. Lemma mod_sub (x : t_U32) (y : t_U32) (z : t_U32) : -> orb (orb (PartialOrd_f_lt (Clone_f_clone (x)) (Clone_f_clone (y))) (PartialOrd_f_le (Clone_f_clone (z)) (Clone_f_clone (x)))) (PartialEq_f_eq (Rem_f_rem (Sub_f_sub (Clone_f_clone (x)) (Clone_f_clone (y))) (Clone_f_clone (z))) (Rem_f_rem (Sub_f_sub (Rem_f_rem (x) (Clone_f_clone (z))) (Rem_f_rem (y) (Clone_f_clone (z)))) (z))) = true. Proof. Admitted. ================================================ FILE: hax-lib/proof-libs/coq/coq/generated-core/src/Core_Base_interface_Int_U64_proofs.v ================================================ (* File automatically generated by Hacspec *) From Coq Require Import ZArith. Require Import List. Import List.ListNotations. Open Scope Z_scope. Open Scope bool_scope. Require Import Ascii. Require Import String. Require Import Coq.Floats.Floats. From RecordUpdate Require Import RecordSet. Import RecordSetNotations. (* From Core Require Import Core. *) (* TODO: Replace this dummy lib with core lib *) Class t_Sized (T : Type) := { }. Definition t_u8 := Z. Definition t_u16 := Z. Definition t_u32 := Z. Definition t_u64 := Z. Definition t_u128 := Z. Definition t_usize := Z. Definition t_i8 := Z. Definition t_i16 := Z. Definition t_i32 := Z. Definition t_i64 := Z. Definition t_i128 := Z. Definition t_isize := Z. Definition t_Array T (x : t_usize) := list T. Definition t_String := string. Definition ToString_f_to_string (x : string) := x. Instance Sized_any : forall {t_A}, t_Sized t_A := {}. Class t_Clone (T : Type) := { Clone_f_clone : T -> T }. Instance Clone_any : forall {t_A}, t_Clone t_A := {Clone_f_clone := fun x => x}. Definition t_Slice (T : Type) := list T. Definition unsize {T : Type} : list T -> t_Slice T := id. Definition t_PartialEq_f_eq x y := x =? y. Definition t_Rem_f_rem (x y : Z) := x mod y. Definition assert (b : bool) (* `{H_assert : b = true} *) : unit := tt. Inductive globality := | t_Global. Definition t_Vec T (_ : globality) : Type := list T. Definition impl_1__append {T} l1 l2 : list T * list T := (app l1 l2, l2). Definition impl_1__len {A} (l : list A) := Z.of_nat (List.length l). Definition impl__new {A} (_ : Datatypes.unit) : list A := nil. Definition impl__with_capacity {A} (_ : Z) : list A := nil. Definition impl_1__push {A} l (x : A) := cons x l. Class t_From (A B : Type) := { From_f_from : B -> A }. Definition impl__to_vec {T} (x : t_Slice T) : t_Vec T t_Global := x. Class t_Into (A B : Type) := { Into_f_into : A -> B }. Instance t_Into_from_t_From {A B : Type} `{H : t_From B A} : t_Into A B := { Into_f_into x := @From_f_from B A H x }. Definition from_elem {A} (x : A) (l : Z) := repeat x (Z.to_nat l). Definition t_Option := option. Definition impl__map {A B} (x : t_Option A) (f : A -> B) : t_Option B := match x with | Some x => Some (f x) | None => None end. Definition t_Add_f_add x y := x + y. Class Cast A B := { cast : A -> B }. Instance cast_t_u8_t_u32 : Cast t_u8 t_u32 := {| cast x := x |}. (* / dummy lib *) From Core Require Import Core_Base_interface_Int. Export Core_Base_interface_Int. Lemma abstract_concretize_cancel (x : t_U64) : -> PartialEq_f_eq (Concretization_f_concretize (Abstraction_f_lift (Clone_f_clone (x)))) (x) = true. Proof. Admitted. Lemma mod_add (x : t_U64) (y : t_U64) (z : t_U64) : -> orb (haxint_le (v_WORDSIZE_64_) (haxint_add (Abstraction_f_lift (Clone_f_clone (x))) (Abstraction_f_lift (Clone_f_clone (y))))) (PartialEq_f_eq (Rem_f_rem (Add_f_add (Clone_f_clone (x)) (Clone_f_clone (y))) (Clone_f_clone (z))) (Rem_f_rem (Add_f_add (Rem_f_rem (x) (Clone_f_clone (z))) (Rem_f_rem (y) (Clone_f_clone (z)))) (z))) = true. Proof. Admitted. Lemma mod_mul (x : t_U64) (y : t_U64) (z : t_U64) : -> orb (haxint_lt (v_WORDSIZE_64_) (haxint_mul (Abstraction_f_lift (Clone_f_clone (x))) (Abstraction_f_lift (Clone_f_clone (y))))) (PartialEq_f_eq (Rem_f_rem (Mul_f_mul (Clone_f_clone (x)) (Clone_f_clone (y))) (Clone_f_clone (z))) (Rem_f_rem (Rem_f_rem (Mul_f_mul (Rem_f_rem (x) (Clone_f_clone (z))) (y)) (Clone_f_clone (z))) (z))) = true. Proof. Admitted. Lemma mod_one (x : t_U64) : -> PartialEq_f_eq (Rem_f_rem (x) (Constants_f_ONE)) (Constants_f_ZERO) = true. Proof. Admitted. Lemma mod_sub (x : t_U64) (y : t_U64) (z : t_U64) : -> orb (orb (PartialOrd_f_lt (Clone_f_clone (x)) (Clone_f_clone (y))) (PartialOrd_f_le (Clone_f_clone (z)) (Clone_f_clone (x)))) (PartialEq_f_eq (Rem_f_rem (Sub_f_sub (Clone_f_clone (x)) (Clone_f_clone (y))) (Clone_f_clone (z))) (Rem_f_rem (Sub_f_sub (Rem_f_rem (x) (Clone_f_clone (z))) (Rem_f_rem (y) (Clone_f_clone (z)))) (z))) = true. Proof. Admitted. ================================================ FILE: hax-lib/proof-libs/coq/coq/generated-core/src/Core_Base_interface_Int_U8_proofs.v ================================================ (* File automatically generated by Hacspec *) From Coq Require Import ZArith. Require Import List. Import List.ListNotations. Open Scope Z_scope. Open Scope bool_scope. Require Import Ascii. Require Import String. Require Import Coq.Floats.Floats. From RecordUpdate Require Import RecordSet. Import RecordSetNotations. (* From Core Require Import Core. *) (* TODO: Replace this dummy lib with core lib *) Class t_Sized (T : Type) := { }. Definition t_u8 := Z. Definition t_u16 := Z. Definition t_u32 := Z. Definition t_u64 := Z. Definition t_u128 := Z. Definition t_usize := Z. Definition t_i8 := Z. Definition t_i16 := Z. Definition t_i32 := Z. Definition t_i64 := Z. Definition t_i128 := Z. Definition t_isize := Z. Definition t_Array T (x : t_usize) := list T. Definition t_String := string. Definition ToString_f_to_string (x : string) := x. Instance Sized_any : forall {t_A}, t_Sized t_A := {}. Class t_Clone (T : Type) := { Clone_f_clone : T -> T }. Instance Clone_any : forall {t_A}, t_Clone t_A := {Clone_f_clone := fun x => x}. Definition t_Slice (T : Type) := list T. Definition unsize {T : Type} : list T -> t_Slice T := id. Definition t_PartialEq_f_eq x y := x =? y. Definition t_Rem_f_rem (x y : Z) := x mod y. Definition assert (b : bool) (* `{H_assert : b = true} *) : unit := tt. Inductive globality := | t_Global. Definition t_Vec T (_ : globality) : Type := list T. Definition impl_1__append {T} l1 l2 : list T * list T := (app l1 l2, l2). Definition impl_1__len {A} (l : list A) := Z.of_nat (List.length l). Definition impl__new {A} (_ : Datatypes.unit) : list A := nil. Definition impl__with_capacity {A} (_ : Z) : list A := nil. Definition impl_1__push {A} l (x : A) := cons x l. Class t_From (A B : Type) := { From_f_from : B -> A }. Definition impl__to_vec {T} (x : t_Slice T) : t_Vec T t_Global := x. Class t_Into (A B : Type) := { Into_f_into : A -> B }. Instance t_Into_from_t_From {A B : Type} `{H : t_From B A} : t_Into A B := { Into_f_into x := @From_f_from B A H x }. Definition from_elem {A} (x : A) (l : Z) := repeat x (Z.to_nat l). Definition t_Option := option. Definition impl__map {A B} (x : t_Option A) (f : A -> B) : t_Option B := match x with | Some x => Some (f x) | None => None end. Definition t_Add_f_add x y := x + y. Class Cast A B := { cast : A -> B }. Instance cast_t_u8_t_u32 : Cast t_u8 t_u32 := {| cast x := x |}. (* / dummy lib *) From Core Require Import Core_Base_interface_Int. Export Core_Base_interface_Int. Lemma abstract_concretize_cancel (x : t_U8) : -> PartialEq_f_eq (Concretization_f_concretize (Abstraction_f_lift (Clone_f_clone (x)))) (x) = true. Proof. Admitted. Lemma mod_add (x : t_U8) (y : t_U8) (z : t_U8) : -> orb (haxint_le (v_WORDSIZE_8_) (haxint_add (Abstraction_f_lift (Clone_f_clone (x))) (Abstraction_f_lift (Clone_f_clone (y))))) (PartialEq_f_eq (Rem_f_rem (Add_f_add (Clone_f_clone (x)) (Clone_f_clone (y))) (Clone_f_clone (z))) (Rem_f_rem (Add_f_add (Rem_f_rem (x) (Clone_f_clone (z))) (Rem_f_rem (y) (Clone_f_clone (z)))) (z))) = true. Proof. Admitted. Lemma mod_mul (x : t_U8) (y : t_U8) (z : t_U8) : -> orb (haxint_lt (v_WORDSIZE_8_) (haxint_mul (Abstraction_f_lift (Clone_f_clone (x))) (Abstraction_f_lift (Clone_f_clone (y))))) (PartialEq_f_eq (Rem_f_rem (Mul_f_mul (Clone_f_clone (x)) (Clone_f_clone (y))) (Clone_f_clone (z))) (Rem_f_rem (Rem_f_rem (Mul_f_mul (Rem_f_rem (x) (Clone_f_clone (z))) (y)) (Clone_f_clone (z))) (z))) = true. Proof. Admitted. Lemma mod_one (x : t_U8) : -> PartialEq_f_eq (Rem_f_rem (x) (Constants_f_ONE)) (Constants_f_ZERO) = true. Proof. Admitted. Lemma mod_sub (x : t_U8) (y : t_U8) (z : t_U8) : -> orb (orb (PartialOrd_f_lt (Clone_f_clone (x)) (Clone_f_clone (y))) (PartialOrd_f_le (Clone_f_clone (z)) (Clone_f_clone (x)))) (PartialEq_f_eq (Rem_f_rem (Sub_f_sub (Clone_f_clone (x)) (Clone_f_clone (y))) (Clone_f_clone (z))) (Rem_f_rem (Sub_f_sub (Rem_f_rem (x) (Clone_f_clone (z))) (Rem_f_rem (y) (Clone_f_clone (z)))) (z))) = true. Proof. Admitted. ================================================ FILE: hax-lib/proof-libs/coq/coq/generated-core/src/Core_Clone.v ================================================ (* File automatically generated by Hacspec *) From Coq Require Import ZArith. Require Import List. Import List.ListNotations. Open Scope Z_scope. Open Scope bool_scope. Require Import Ascii. Require Import String. Require Import Coq.Floats.Floats. From RecordUpdate Require Import RecordSet. Import RecordSetNotations. (* From Core Require Import Core. *) Class t_Clone (v_Self : Type) : Type := { Clone_f_clone : v_Self -> v_Self; }. Arguments t_Clone (_). #[global] Instance t_Clone_any T : t_Clone T := { Clone_f_clone := id }. ================================================ FILE: hax-lib/proof-libs/coq/coq/generated-core/src/Core_Cmp.v ================================================ (* File automatically generated by Hacspec *) From Coq Require Import ZArith. Require Import List. Import List.ListNotations. Open Scope Z_scope. Open Scope bool_scope. Require Import Ascii. Require Import String. Require Import Coq.Floats.Floats. From RecordUpdate Require Import RecordSet. Import RecordSetNotations. (* From Core Require Import Core. *) From Core Require Import Core_Option (t_Option). Export Core_Option (t_Option). Definition discriminant_Ordering_Equal := 0. Definition discriminant_Ordering_Greater := 1. Inductive t_Ordering : Type := | Ordering_Less | Ordering_Equal | Ordering_Greater. Arguments Ordering_Less. Arguments Ordering_Equal. Arguments Ordering_Greater. Definition impl__Ordering__is_eq (self : t_Ordering) : bool := match self with | Ordering_Equal => true | _ => false end. Definition impl__Ordering__is_gt (self : t_Ordering) : bool := match self with | Ordering_Greater => true | _ => false end. Definition impl__Ordering__is_lt (self : t_Ordering) : bool := match self with | Ordering_Less => true | _ => false end. Definition impl__Ordering__reverse (self : t_Ordering) : t_Ordering := match self with | Ordering_Less => Ordering_Greater | Ordering_Equal => Ordering_Equal | Ordering_Greater => Ordering_Less end. Definition discriminant_Ordering_Less := -1. Definition t_Ordering_cast_to_repr (x : t_Ordering) := match x with | Ordering_Less => discriminant_Ordering_Less | Ordering_Equal => discriminant_Ordering_Equal | Ordering_Greater => discriminant_Ordering_Greater end. Class t_PartialEq (v_Self : Type) (v_Rhs : Type) : Type := { PartialEq_f_eq : v_Self -> v_Rhs -> bool; PartialEq_f_ne : v_Self -> v_Rhs -> bool; }. Arguments t_PartialEq (_) (_). Definition impl__Ordering__is_ge (self : t_Ordering) : bool := negb (match self with | Ordering_Less => true | _ => false end). Definition impl__Ordering__is_le (self : t_Ordering) : bool := negb (match self with | Ordering_Greater => true | _ => false end). Definition impl__Ordering__is_ne (self : t_Ordering) : bool := negb (match self with | Ordering_Equal => true | _ => false end). #[global] Instance t_PartialEq_603824491 : t_PartialEq ((t_Ordering)) ((t_Ordering)) := { PartialEq_f_eq := fun (self : t_Ordering) (other : t_Ordering)=> match self with | Ordering_Less => match other with | Ordering_Less => true | _ => false end | Ordering_Equal => match other with | Ordering_Equal => true | _ => false end | Ordering_Greater => match other with | Ordering_Greater => true | _ => false end end; PartialEq_f_ne := fun (self : t_Ordering) (other : t_Ordering)=> negb (match self with | Ordering_Less => match other with | Ordering_Less => true | _ => false end | Ordering_Equal => match other with | Ordering_Equal => true | _ => false end | Ordering_Greater => match other with | Ordering_Greater => true | _ => false end end); }. Class t_PartialOrd (v_Self : Type) (v_Rhs : Type) `{t_PartialEq (v_Self) (v_Rhs)} : Type := { PartialOrd_f_partial_cmp : v_Self -> v_Rhs -> t_Option ((t_Ordering)); PartialOrd_f_lt : v_Self -> v_Rhs -> bool; PartialOrd_f_le : v_Self -> v_Rhs -> bool; PartialOrd_f_gt : v_Self -> v_Rhs -> bool; PartialOrd_f_ge : v_Self -> v_Rhs -> bool; }. Arguments t_PartialOrd (_) (_) {_}. ================================================ FILE: hax-lib/proof-libs/coq/coq/generated-core/src/Core_Convert.v ================================================ (* File automatically generated by Hacspec *) From Coq Require Import ZArith. Require Import List. Import List.ListNotations. Open Scope Z_scope. Open Scope bool_scope. Require Import Ascii. Require Import String. Require Import Coq.Floats.Floats. From RecordUpdate Require Import RecordSet. Import RecordSetNotations. (* From Core Require Import Core. *) From Core Require Import Core_Marker. Export Core_Marker. Class t_From (v_Self : Type) (v_T : Type) (* `{t_Sized (v_Self)} `{t_Sized (v_T)} *) : Type := { From_f_from : v_T -> v_Self; }. Arguments t_From (_) (_) (* {_} {_} *). #[global] Instance t_From_46353410 `{v_T : Type} (* `{t_Sized (v_T)} *) : t_From ((v_T)) ((v_T)) := { From_f_from := fun (t : v_T)=> t; }. Class t_Into (v_Self : Type) (v_T : Type) (* `{t_Sized (v_Self)} `{t_Sized (v_T)} *) : Type := { Into_f_into : v_Self -> v_T; }. Arguments t_Into (_) (_) (* {_} {_} *). #[global] Instance t_Into_730689925 `{v_T : Type} `{v_U : Type} (* `{t_Sized (v_T)} `{t_Sized (v_U)} *) `{t_From (v_U) (v_T)} : t_Into ((v_T)) ((v_U)) := { Into_f_into := fun (self : v_T)=> From_f_from (self); }. ================================================ FILE: hax-lib/proof-libs/coq/coq/generated-core/src/Core_Fmt.v ================================================ (* File automatically generated by Hacspec *) From Coq Require Import ZArith. Require Import List. Import List.ListNotations. Open Scope Z_scope. Open Scope bool_scope. Require Import Ascii. Require Import String. Require Import Coq.Floats.Floats. From RecordUpdate Require Import RecordSet. Import RecordSetNotations. (* From Core Require Import Core. *) (* TODO: Replace this dummy lib with core lib *) Class t_Sized (T : Type) := { }. Definition t_u8 := Z. Definition t_u16 := Z. Definition t_u32 := Z. Definition t_u64 := Z. Definition t_u128 := Z. Definition t_usize := Z. Definition t_i8 := Z. Definition t_i16 := Z. Definition t_i32 := Z. Definition t_i64 := Z. Definition t_i128 := Z. Definition t_isize := Z. Definition t_Array T (x : t_usize) := list T. Definition t_String := string. Definition ToString_f_to_string (x : string) := x. Instance Sized_any : forall {t_A}, t_Sized t_A := {}. Class t_Clone (T : Type) := { Clone_f_clone : T -> T }. Instance Clone_any : forall {t_A}, t_Clone t_A := {Clone_f_clone := fun x => x}. Definition t_Slice (T : Type) := list T. Definition unsize {T : Type} : list T -> t_Slice T := id. Definition t_PartialEq_f_eq x y := x =? y. Definition t_Rem_f_rem (x y : Z) := x mod y. Definition assert (b : bool) (* `{H_assert : b = true} *) : unit := tt. Inductive globality := | t_Global. Definition t_Vec T (_ : globality) : Type := list T. Definition impl_1__append {T} l1 l2 : list T * list T := (app l1 l2, l2). Definition impl_1__len {A} (l : list A) := Z.of_nat (List.length l). Definition impl__new {A} (_ : Datatypes.unit) : list A := nil. Definition impl__with_capacity {A} (_ : Z) : list A := nil. Definition impl_1__push {A} l (x : A) := cons x l. Class t_From (A B : Type) := { From_f_from : B -> A }. Definition impl__to_vec {T} (x : t_Slice T) : t_Vec T t_Global := x. Class t_Into (A B : Type) := { Into_f_into : A -> B }. Instance t_Into_from_t_From {A B : Type} `{H : t_From B A} : t_Into A B := { Into_f_into x := @From_f_from B A H x }. Definition from_elem {A} (x : A) (l : Z) := repeat x (Z.to_nat l). Definition t_Option := option. Definition impl__map {A B} (x : t_Option A) (f : A -> B) : t_Option B := match x with | Some x => Some (f x) | None => None end. Definition t_Add_f_add x y := x + y. Class Cast A B := { cast : A -> B }. Instance cast_t_u8_t_u32 : Cast t_u8 t_u32 := {| cast x := x |}. (* / dummy lib *) (* NotImplementedYet *) ================================================ FILE: hax-lib/proof-libs/coq/coq/generated-core/src/Core_Intrinsics.v ================================================ (* File automatically generated by Hacspec *) From Coq Require Import ZArith. Require Import List. Import List.ListNotations. Open Scope Z_scope. Open Scope bool_scope. Require Import Ascii. Require Import String. Require Import Coq.Floats.Floats. From RecordUpdate Require Import RecordSet. Import RecordSetNotations. (* From Core Require Import Core. *) From Core Require Import Core_Primitive. Export Core_Primitive. From Core Require Import Core_Base_interface. Export Core_Base_interface. From Core Require Import Core_Base_interface_Coerce. Export Core_Base_interface_Coerce. From Core Require Import Core_Base. Export Core_Base. From Core Require Import Core_Ops. Export Core_Ops. (* NotImplementedYet *) (* NotImplementedYet *) Notation "'add_with_overflow_i128'" := (add_with_overflow_i128). Notation "'add_with_overflow_i16'" := (add_with_overflow_i16). Notation "'add_with_overflow_i32'" := (add_with_overflow_i32). Notation "'add_with_overflow_i64'" := (add_with_overflow_i64). Notation "'add_with_overflow_i8'" := (add_with_overflow_i8). Notation "'add_with_overflow_isize'" := (add_with_overflow_isize). Notation "'unchecked_add_i128'" := (unchecked_add_i128). Notation "'unchecked_add_i16'" := (unchecked_add_i16). Notation "'unchecked_add_i32'" := (unchecked_add_i32). Notation "'unchecked_add_i64'" := (unchecked_add_i64). Notation "'unchecked_add_i8'" := (unchecked_add_i8). Notation "'unchecked_add_isize'" := (unchecked_add_isize). Notation "'unchecked_add_u128'" := (unchecked_add_u128). Notation "'unchecked_add_u16'" := (unchecked_add_u16). Notation "'unchecked_add_u32'" := (unchecked_add_u32). Notation "'unchecked_add_u64'" := (unchecked_add_u64). Notation "'unchecked_add_u8'" := (unchecked_add_u8). Notation "'unchecked_add_usize'" := (unchecked_add_usize). Notation "'add_with_overflow_u128'" := (add_with_overflow_u128). Notation "'add_with_overflow_u16'" := (add_with_overflow_u16). Notation "'add_with_overflow_u32'" := (add_with_overflow_u32). Notation "'add_with_overflow_u64'" := (add_with_overflow_u64). Notation "'add_with_overflow_u8'" := (add_with_overflow_u8). Notation "'add_with_overflow_usize'" := (add_with_overflow_usize). Notation "'unchecked_div_u128'" := (unchecked_div_u128). Notation "'unchecked_div_u16'" := (unchecked_div_u16). Notation "'unchecked_div_u32'" := (unchecked_div_u32). Notation "'unchecked_div_u64'" := (unchecked_div_u64). Notation "'unchecked_div_u8'" := (unchecked_div_u8). Notation "'unchecked_div_usize'" := (unchecked_div_usize). Notation "'wrapping_add_i128'" := (wrapping_add_i128). Notation "'wrapping_add_i16'" := (wrapping_add_i16). Notation "'wrapping_add_i32'" := (wrapping_add_i32). Notation "'wrapping_add_i64'" := (wrapping_add_i64). Notation "'wrapping_add_i8'" := (wrapping_add_i8). Notation "'wrapping_add_isize'" := (wrapping_add_isize). Notation "'wrapping_sub_i128'" := (wrapping_sub_i128). Notation "'wrapping_sub_i16'" := (wrapping_sub_i16). Notation "'wrapping_sub_i32'" := (wrapping_sub_i32). Notation "'wrapping_sub_i64'" := (wrapping_sub_i64). Notation "'wrapping_sub_i8'" := (wrapping_sub_i8). Notation "'wrapping_sub_isize'" := (wrapping_sub_isize). Notation "'unchecked_div_i128'" := (unchecked_div_i128). Notation "'unchecked_div_i16'" := (unchecked_div_i16). Notation "'unchecked_div_i32'" := (unchecked_div_i32). Notation "'unchecked_div_i64'" := (unchecked_div_i64). Notation "'unchecked_div_i8'" := (unchecked_div_i8). Notation "'unchecked_div_isize'" := (unchecked_div_isize). Notation "'wrapping_add_u128'" := (wrapping_add_u128). Notation "'wrapping_add_u16'" := (wrapping_add_u16). Notation "'wrapping_add_u32'" := (wrapping_add_u32). Notation "'wrapping_add_u64'" := (wrapping_add_u64). Notation "'wrapping_add_u8'" := (wrapping_add_u8). Notation "'wrapping_add_usize'" := (wrapping_add_usize). Notation "'wrapping_mul_i128'" := (wrapping_mul_i128). Notation "'wrapping_mul_i16'" := (wrapping_mul_i16). Notation "'wrapping_mul_i32'" := (wrapping_mul_i32). Notation "'wrapping_mul_i64'" := (wrapping_mul_i64). Notation "'wrapping_mul_i8'" := (wrapping_mul_i8). Notation "'wrapping_mul_isize'" := (wrapping_mul_isize). Notation "'wrapping_mul_u128'" := (wrapping_mul_u128). Notation "'wrapping_mul_u16'" := (wrapping_mul_u16). Notation "'wrapping_mul_u32'" := (wrapping_mul_u32). Notation "'wrapping_mul_u64'" := (wrapping_mul_u64). Notation "'wrapping_mul_u8'" := (wrapping_mul_u8). Notation "'wrapping_mul_usize'" := (wrapping_mul_usize). Notation "'wrapping_sub_u128'" := (wrapping_sub_u128). Notation "'wrapping_sub_u16'" := (wrapping_sub_u16). Notation "'wrapping_sub_u32'" := (wrapping_sub_u32). Notation "'wrapping_sub_u64'" := (wrapping_sub_u64). Notation "'wrapping_sub_u8'" := (wrapping_sub_u8). Notation "'wrapping_sub_usize'" := (wrapping_sub_usize). Notation "'rotate_left_u128'" := (rotate_left_u128). Notation "'rotate_left_u16'" := (rotate_left_u16). Notation "'rotate_left_u32'" := (rotate_left_u32). Notation "'rotate_left_u64'" := (rotate_left_u64). Notation "'rotate_left_u8'" := (rotate_left_u8). Notation "'rotate_left_usize'" := (rotate_left_usize). Notation "'rotate_right_u128'" := (rotate_right_u128). Notation "'rotate_right_u16'" := (rotate_right_u16). Notation "'rotate_right_u32'" := (rotate_right_u32). Notation "'rotate_right_u64'" := (rotate_right_u64). Notation "'rotate_right_u8'" := (rotate_right_u8). Notation "'rotate_right_usize'" := (rotate_right_usize). (* Notation "'bswap_u128'" := (bswap_u128). *) (* Notation "'bswap_u16'" := (bswap_u16). *) (* Notation "'bswap_u32'" := (bswap_u32). *) (* Notation "'bswap_u64'" := (bswap_u64). *) (* Notation "'bswap_u8'" := (bswap_u8). *) (* Notation "'bswap_usize'" := (bswap_usize). *) (* Notation "'ctlz_u128'" := (ctlz_u128). *) (* Notation "'ctlz_u16'" := (ctlz_u16). *) (* Notation "'ctlz_u32'" := (ctlz_u32). *) (* Notation "'ctlz_u64'" := (ctlz_u64). *) (* Notation "'ctlz_u8'" := (ctlz_u8). *) (* Notation "'ctlz_usize'" := (ctlz_usize). *) (* Notation "'ctpop_u128'" := (ctpop_u128). *) (* Notation "'ctpop_u16'" := (ctpop_u16). *) (* Notation "'ctpop_u32'" := (ctpop_u32). *) (* Notation "'ctpop_u64'" := (ctpop_u64). *) (* Notation "'ctpop_u8'" := (ctpop_u8). *) (* Notation "'ctpop_usize'" := (ctpop_usize). *) (* Notation "'cttz_u128'" := (cttz_u128). *) (* Notation "'cttz_u16'" := (cttz_u16). *) (* Notation "'cttz_u32'" := (cttz_u32). *) (* Notation "'cttz_u64'" := (cttz_u64). *) (* Notation "'cttz_u8'" := (cttz_u8). *) (* Notation "'cttz_usize'" := (cttz_usize). *) ================================================ FILE: hax-lib/proof-libs/coq/coq/generated-core/src/Core_Iter.v ================================================ (* File automatically generated by Hacspec *) From Coq Require Import ZArith. Require Import List. Import List.ListNotations. Open Scope Z_scope. Open Scope bool_scope. Require Import Ascii. Require Import String. Require Import Coq.Floats.Floats. From RecordUpdate Require Import RecordSet. Import RecordSetNotations. (* From Core Require Import Core. *) (* TODO: Replace this dummy lib with core lib *) Class t_Sized (T : Type) := { }. Definition t_u8 := Z. Definition t_u16 := Z. Definition t_u32 := Z. Definition t_u64 := Z. Definition t_u128 := Z. Definition t_usize := Z. Definition t_i8 := Z. Definition t_i16 := Z. Definition t_i32 := Z. Definition t_i64 := Z. Definition t_i128 := Z. Definition t_isize := Z. Definition t_Array T (x : t_usize) := list T. Definition t_String := string. Definition ToString_f_to_string (x : string) := x. Instance Sized_any : forall {t_A}, t_Sized t_A := {}. Class t_Clone (T : Type) := { Clone_f_clone : T -> T }. Instance Clone_any : forall {t_A}, t_Clone t_A := {Clone_f_clone := fun x => x}. Definition t_Slice (T : Type) := list T. Definition unsize {T : Type} : list T -> t_Slice T := id. Definition t_PartialEq_f_eq x y := x =? y. Definition t_Rem_f_rem (x y : Z) := x mod y. Definition assert (b : bool) (* `{H_assert : b = true} *) : unit := tt. Inductive globality := | t_Global. Definition t_Vec T (_ : globality) : Type := list T. Definition impl_1__append {T} l1 l2 : list T * list T := (app l1 l2, l2). Definition impl_1__len {A} (l : list A) := Z.of_nat (List.length l). Definition impl__new {A} (_ : Datatypes.unit) : list A := nil. Definition impl__with_capacity {A} (_ : Z) : list A := nil. Definition impl_1__push {A} l (x : A) := cons x l. Class t_From (A B : Type) := { From_f_from : B -> A }. Definition impl__to_vec {T} (x : t_Slice T) : t_Vec T t_Global := x. Class t_Into (A B : Type) := { Into_f_into : A -> B }. Instance t_Into_from_t_From {A B : Type} `{H : t_From B A} : t_Into A B := { Into_f_into x := @From_f_from B A H x }. Definition from_elem {A} (x : A) (l : Z) := repeat x (Z.to_nat l). Definition t_Option := option. Definition impl__map {A B} (x : t_Option A) (f : A -> B) : t_Option B := match x with | Some x => Some (f x) | None => None end. Definition t_Add_f_add x y := x + y. Class Cast A B := { cast : A -> B }. Instance cast_t_u8_t_u32 : Cast t_u8 t_u32 := {| cast x := x |}. (* / dummy lib *) From Core Require Import Self_Traits (t_Iterator). Export Self_Traits (t_Iterator). From Core Require Import Self_Range (t_Step). Export Self_Range (t_Step). From Core Require Import Self_Traits (t_TrustedStep). Export Self_Traits (t_TrustedStep). From Core Require Import Self_Traits (t_IntoIterator). Export Self_Traits (t_IntoIterator). (* NotImplementedYet *) (* NotImplementedYet *) (* NotImplementedYet *) ================================================ FILE: hax-lib/proof-libs/coq/coq/generated-core/src/Core_Iter_Range.v ================================================ (* File automatically generated by Hacspec *) From Coq Require Import ZArith. Require Import List. Import List.ListNotations. Open Scope Z_scope. Open Scope bool_scope. Require Import Ascii. Require Import String. Require Import Coq.Floats.Floats. From RecordUpdate Require Import RecordSet. Import RecordSetNotations. (* From Core Require Import Core. *) (* TODO: Replace this dummy lib with core lib *) Class t_Sized (T : Type) := { }. Definition t_u8 := Z. Definition t_u16 := Z. Definition t_u32 := Z. Definition t_u64 := Z. Definition t_u128 := Z. Definition t_usize := Z. Definition t_i8 := Z. Definition t_i16 := Z. Definition t_i32 := Z. Definition t_i64 := Z. Definition t_i128 := Z. Definition t_isize := Z. Definition t_Array T (x : t_usize) := list T. Definition t_String := string. Definition ToString_f_to_string (x : string) := x. Instance Sized_any : forall {t_A}, t_Sized t_A := {}. Class t_Clone (T : Type) := { Clone_f_clone : T -> T }. Instance Clone_any : forall {t_A}, t_Clone t_A := {Clone_f_clone := fun x => x}. Definition t_Slice (T : Type) := list T. Definition unsize {T : Type} : list T -> t_Slice T := id. Definition t_PartialEq_f_eq x y := x =? y. Definition t_Rem_f_rem (x y : Z) := x mod y. Definition assert (b : bool) (* `{H_assert : b = true} *) : unit := tt. Inductive globality := | t_Global. Definition t_Vec T (_ : globality) : Type := list T. Definition impl_1__append {T} l1 l2 : list T * list T := (app l1 l2, l2). Definition impl_1__len {A} (l : list A) := Z.of_nat (List.length l). Definition impl__new {A} (_ : Datatypes.unit) : list A := nil. Definition impl__with_capacity {A} (_ : Z) : list A := nil. Definition impl_1__push {A} l (x : A) := cons x l. Class t_From (A B : Type) := { From_f_from : B -> A }. Definition impl__to_vec {T} (x : t_Slice T) : t_Vec T t_Global := x. Class t_Into (A B : Type) := { Into_f_into : A -> B }. Instance t_Into_from_t_From {A B : Type} `{H : t_From B A} : t_Into A B := { Into_f_into x := @From_f_from B A H x }. Definition from_elem {A} (x : A) (l : Z) := repeat x (Z.to_nat l). Definition t_Option := option. Definition impl__map {A B} (x : t_Option A) (f : A -> B) : t_Option B := match x with | Some x => Some (f x) | None => None end. Definition t_Add_f_add x y := x + y. Class Cast A B := { cast : A -> B }. Instance cast_t_u8_t_u32 : Cast t_u8 t_u32 := {| cast x := x |}. (* / dummy lib *) From Core Require Import Core (t_num). Export Core (t_num). From Core Require Import Core_Primitive (t_u8). Export Core_Primitive (t_u8). From Core Require Import Core_Primitive (t_u16). Export Core_Primitive (t_u16). From Core Require Import Core_Primitive (t_u32). Export Core_Primitive (t_u32). From Core Require Import Core_Primitive (t_u64). Export Core_Primitive (t_u64). From Core Require Import Core_Primitive (t_u128). Export Core_Primitive (t_u128). From Core Require Import Core_Primitive (t_usize). Export Core_Primitive (t_usize). (* NotImplementedYet *) Class t_Step (v_Self : Type) `{t_Sized (v_Self)} `{t_Clone (v_Self)} `{t_PartialOrd (v_Self) (v_Self)} : Type := { Step_f_steps_between : v_Self -> v_Self -> t_Option ((t_usize)); Step_f_forward_checked : v_Self -> t_usize -> t_Option ((v_Self)); }. Arguments t_Step (_) {_} {_} {_}. Class t_RangeIteratorImpl (v_Self : Type) : Type := { RangeIteratorImpl_f_Item : Type; _ :: `{t_Sized (RangeIteratorImpl_f_Item)}; RangeIteratorImpl_f_spec_next : v_Self -> (v_Self*t_Option ((RangeIteratorImpl_f_Item))); }. Arguments t_RangeIteratorImpl (_). Instance t_RangeIteratorImpl_158276838 `{v_A : Type} `{t_Sized (v_A)} `{t_Step (v_A)} : t_RangeIteratorImpl ((t_Range ((v_A)))) := { RangeIteratorImpl_impl_f_Item := v_A; RangeIteratorImpl_impl_f_spec_next := fun (self : t_Range ((v_A)))=> let hax_temp_output := never_to_any (panic_fmt (impl_1__new_v1 (["not yet implemented: specification needed"%string]) (impl_1__none (tt)))) in (self,hax_temp_output); }. Instance t_Iterator_416192239 `{v_A : Type} `{t_Sized (v_A)} `{t_Step (v_A)} : t_Iterator ((t_Range ((v_A)))) := { Iterator_impl_1_f_Item := v_A; Iterator_impl_1_f_next := fun (self : t_Range ((v_A)))=> let hax_temp_output := never_to_any (panic_fmt (impl_1__new_v1 (["not yet implemented: specification needed"%string]) (impl_1__none (tt)))) in (self,hax_temp_output); Iterator_impl_1_f_size_hint := fun (self : t_Range ((v_A)))=> if PartialOrd_f_lt (Range_f_start self) (Range_f_end self) then let hint := Step_f_steps_between (Range_f_start self) (Range_f_end self) in (0,Option_Some (0)) else (0,Option_Some (0)); }. Instance t_Step_890486371 : t_Step ((t_u8)) := { Step_impl_2_f_steps_between := fun (start : t_u8) (v_end : t_u8)=> if PartialOrd_f_le (start) (v_end) then Option_Some (Into_f_into (Sub_f_sub (Clone_f_clone (v_end)) (Clone_f_clone (start)))) else Option_None; Step_impl_2_f_forward_checked := fun (start : t_u8) (n : t_usize)=> match TryFrom_f_try_from (n) with | Result_Ok (n) => impl_6__checked_add (start) (n) | Result_Err (_) => Option_None end; }. Instance t_Step_800843805 : t_Step ((t_u16)) := { Step_impl_3_f_steps_between := fun (start : t_u16) (v_end : t_u16)=> if PartialOrd_f_le (start) (v_end) then Option_Some (Into_f_into (Sub_f_sub (Clone_f_clone (v_end)) (Clone_f_clone (start)))) else Option_None; Step_impl_3_f_forward_checked := fun (start : t_u16) (n : t_usize)=> match TryFrom_f_try_from (n) with | Result_Ok (n) => impl_7__checked_add (start) (n) | Result_Err (_) => Option_None end; }. Instance t_Step_230073379 : t_Step ((t_u32)) := { Step_impl_4_f_steps_between := fun (start : t_u32) (v_end : t_u32)=> if PartialOrd_f_le (start) (v_end) then Option_Some (Into_f_into (Sub_f_sub (Clone_f_clone (v_end)) (Clone_f_clone (start)))) else Option_None; Step_impl_4_f_forward_checked := fun (start : t_u32) (n : t_usize)=> match TryFrom_f_try_from (n) with | Result_Ok (n) => impl_8__checked_add (start) (n) | Result_Err (_) => Option_None end; }. Instance t_Step_851062726 : t_Step ((t_u64)) := { Step_impl_5_f_steps_between := fun (start : t_u64) (v_end : t_u64)=> if PartialOrd_f_le (start) (v_end) then Option_Some (Into_f_into (Sub_f_sub (Clone_f_clone (v_end)) (Clone_f_clone (start)))) else Option_None; Step_impl_5_f_forward_checked := fun (start : t_u64) (n : t_usize)=> match TryFrom_f_try_from (n) with | Result_Ok (n) => impl_9__checked_add (start) (n) | Result_Err (_) => Option_None end; }. Instance t_Step_679763039 : t_Step ((t_u128)) := { Step_impl_7_f_steps_between := fun (start : t_u128) (v_end : t_u128)=> if PartialOrd_f_le (start) (v_end) then impl__ok (TryFrom_f_try_from (Sub_f_sub (Clone_f_clone (v_end)) (Clone_f_clone (start)))) else Option_None; Step_impl_7_f_forward_checked := fun (start : t_u128) (n : t_usize)=> Option_None; }. Instance t_Step_999413546 : t_Step ((t_usize)) := { Step_impl_6_f_steps_between := fun (start : t_usize) (v_end : t_usize)=> if PartialOrd_f_le (start) (v_end) then Option_Some (Into_f_into (Sub_f_sub (Clone_f_clone (v_end)) (Clone_f_clone (start)))) else Option_None; Step_impl_6_f_forward_checked := fun (start : t_usize) (n : t_usize)=> match TryFrom_f_try_from (n) with | Result_Ok (n) => impl_11__checked_add (start) (n) | Result_Err (_) => Option_None end; }. ================================================ FILE: hax-lib/proof-libs/coq/coq/generated-core/src/Core_Iter_Traits.v ================================================ (* File automatically generated by Hacspec *) From Coq Require Import ZArith. Require Import List. Import List.ListNotations. Open Scope Z_scope. Open Scope bool_scope. Require Import Ascii. Require Import String. Require Import Coq.Floats.Floats. From RecordUpdate Require Import RecordSet. Import RecordSetNotations. (* From Core Require Import Core. *) (* TODO: Replace this dummy lib with core lib *) Class t_Sized (T : Type) := { }. Definition t_u8 := Z. Definition t_u16 := Z. Definition t_u32 := Z. Definition t_u64 := Z. Definition t_u128 := Z. Definition t_usize := Z. Definition t_i8 := Z. Definition t_i16 := Z. Definition t_i32 := Z. Definition t_i64 := Z. Definition t_i128 := Z. Definition t_isize := Z. Definition t_Array T (x : t_usize) := list T. Definition t_String := string. Definition ToString_f_to_string (x : string) := x. Instance Sized_any : forall {t_A}, t_Sized t_A := {}. Class t_Clone (T : Type) := { Clone_f_clone : T -> T }. Instance Clone_any : forall {t_A}, t_Clone t_A := {Clone_f_clone := fun x => x}. Definition t_Slice (T : Type) := list T. Definition unsize {T : Type} : list T -> t_Slice T := id. Definition t_PartialEq_f_eq x y := x =? y. Definition t_Rem_f_rem (x y : Z) := x mod y. Definition assert (b : bool) (* `{H_assert : b = true} *) : unit := tt. Inductive globality := | t_Global. Definition t_Vec T (_ : globality) : Type := list T. Definition impl_1__append {T} l1 l2 : list T * list T := (app l1 l2, l2). Definition impl_1__len {A} (l : list A) := Z.of_nat (List.length l). Definition impl__new {A} (_ : Datatypes.unit) : list A := nil. Definition impl__with_capacity {A} (_ : Z) : list A := nil. Definition impl_1__push {A} l (x : A) := cons x l. Class t_From (A B : Type) := { From_f_from : B -> A }. Definition impl__to_vec {T} (x : t_Slice T) : t_Vec T t_Global := x. Class t_Into (A B : Type) := { Into_f_into : A -> B }. Instance t_Into_from_t_From {A B : Type} `{H : t_From B A} : t_Into A B := { Into_f_into x := @From_f_from B A H x }. Definition from_elem {A} (x : A) (l : Z) := repeat x (Z.to_nat l). Definition t_Option := option. Definition impl__map {A B} (x : t_Option A) (f : A -> B) : t_Option B := match x with | Some x => Some (f x) | None => None end. Definition t_Add_f_add x y := x + y. Class Cast A B := { cast : A -> B }. Instance cast_t_u8_t_u32 : Cast t_u8 t_u32 := {| cast x := x |}. (* / dummy lib *) From Core Require Import Self_Collect (t_IntoIterator). Export Self_Collect (t_IntoIterator). From Core Require Import Self_Exact_size (t_ExactSizeIterator). Export Self_Exact_size (t_ExactSizeIterator). From Core Require Import Self_Iterator (t_Iterator). Export Self_Iterator (t_Iterator). From Core Require Import Self_Marker (t_FusedIterator). Export Self_Marker (t_FusedIterator). From Core Require Import Self_Marker (t_TrustedLen). Export Self_Marker (t_TrustedLen). From Core Require Import Self_Marker (t_TrustedStep). Export Self_Marker (t_TrustedStep). (* NotImplementedYet *) (* NotImplementedYet *) (* NotImplementedYet *) (* NotImplementedYet *) ================================================ FILE: hax-lib/proof-libs/coq/coq/generated-core/src/Core_Iter_Traits_Collect.v ================================================ (* File automatically generated by Hacspec *) From Coq Require Import ZArith. Require Import List. Import List.ListNotations. Open Scope Z_scope. Open Scope bool_scope. Require Import Ascii. Require Import String. Require Import Coq.Floats.Floats. From RecordUpdate Require Import RecordSet. Import RecordSetNotations. (* From Core Require Import Core. *) (* TODO: Replace this dummy lib with core lib *) Class t_Sized (T : Type) := { }. Definition t_u8 := Z. Definition t_u16 := Z. Definition t_u32 := Z. Definition t_u64 := Z. Definition t_u128 := Z. Definition t_usize := Z. Definition t_i8 := Z. Definition t_i16 := Z. Definition t_i32 := Z. Definition t_i64 := Z. Definition t_i128 := Z. Definition t_isize := Z. Definition t_Array T (x : t_usize) := list T. Definition t_String := string. Definition ToString_f_to_string (x : string) := x. Instance Sized_any : forall {t_A}, t_Sized t_A := {}. Class t_Clone (T : Type) := { Clone_f_clone : T -> T }. Instance Clone_any : forall {t_A}, t_Clone t_A := {Clone_f_clone := fun x => x}. Definition t_Slice (T : Type) := list T. Definition unsize {T : Type} : list T -> t_Slice T := id. Definition t_PartialEq_f_eq x y := x =? y. Definition t_Rem_f_rem (x y : Z) := x mod y. Definition assert (b : bool) (* `{H_assert : b = true} *) : unit := tt. Inductive globality := | t_Global. Definition t_Vec T (_ : globality) : Type := list T. Definition impl_1__append {T} l1 l2 : list T * list T := (app l1 l2, l2). Definition impl_1__len {A} (l : list A) := Z.of_nat (List.length l). Definition impl__new {A} (_ : Datatypes.unit) : list A := nil. Definition impl__with_capacity {A} (_ : Z) : list A := nil. Definition impl_1__push {A} l (x : A) := cons x l. Class t_From (A B : Type) := { From_f_from : B -> A }. Definition impl__to_vec {T} (x : t_Slice T) : t_Vec T t_Global := x. Class t_Into (A B : Type) := { Into_f_into : A -> B }. Instance t_Into_from_t_From {A B : Type} `{H : t_From B A} : t_Into A B := { Into_f_into x := @From_f_from B A H x }. Definition from_elem {A} (x : A) (l : Z) := repeat x (Z.to_nat l). Definition t_Option := option. Definition impl__map {A B} (x : t_Option A) (f : A -> B) : t_Option B := match x with | Some x => Some (f x) | None => None end. Definition t_Add_f_add x y := x + y. Class Cast A B := { cast : A -> B }. Instance cast_t_u8_t_u32 : Cast t_u8 t_u32 := {| cast x := x |}. (* / dummy lib *) From Core Require Import Core_Iter_Traits (t_Iterator). Export Core_Iter_Traits (t_Iterator). Class t_IntoIterator (v_Self : Type) : Type := { IntoIterator_f_Item : Type; _ :: `{t_Sized (IntoIterator_f_Item)}; IntoIterator_f_IntoIter `{_.(Iterator_f_Item) = IntoIterator_f_Item} : Type; _ :: `{t_Iterator (IntoIterator_f_IntoIter)}; _ :: `{t_Sized (IntoIterator_f_IntoIter)}; IntoIterator_f_into_iter : v_Self -> IntoIterator_f_IntoIter; }. Arguments t_IntoIterator (_). Class t_FromIterator (v_Self : Type) (v_A : Type) `{t_Sized (v_Self)} `{t_Sized (v_A)} : Type := { FromIterator_f_from_iter v_T : Type `{t_Sized (v_T)} `{t_IntoIterator (v_T)} `{_.(IntoIterator_f_Item) = v_A} : v_T -> v_Self; }. Arguments t_FromIterator (_) (_) {_} {_}. Instance t_IntoIterator_346955793 `{v_I : Type} `{t_Sized (v_I)} `{t_Iterator (v_I)} : t_IntoIterator ((v_I)) := { IntoIterator_impl_f_Item := Iterator_f_Item; IntoIterator_impl_f_IntoIter := v_I; IntoIterator_impl_f_into_iter := fun (self : v_I)=> self; }. ================================================ FILE: hax-lib/proof-libs/coq/coq/generated-core/src/Core_Iter_Traits_Exact_size.v ================================================ (* File automatically generated by Hacspec *) From Coq Require Import ZArith. Require Import List. Import List.ListNotations. Open Scope Z_scope. Open Scope bool_scope. Require Import Ascii. Require Import String. Require Import Coq.Floats.Floats. From RecordUpdate Require Import RecordSet. Import RecordSetNotations. (* From Core Require Import Core. *) (* TODO: Replace this dummy lib with core lib *) Class t_Sized (T : Type) := { }. Definition t_u8 := Z. Definition t_u16 := Z. Definition t_u32 := Z. Definition t_u64 := Z. Definition t_u128 := Z. Definition t_usize := Z. Definition t_i8 := Z. Definition t_i16 := Z. Definition t_i32 := Z. Definition t_i64 := Z. Definition t_i128 := Z. Definition t_isize := Z. Definition t_Array T (x : t_usize) := list T. Definition t_String := string. Definition ToString_f_to_string (x : string) := x. Instance Sized_any : forall {t_A}, t_Sized t_A := {}. Class t_Clone (T : Type) := { Clone_f_clone : T -> T }. Instance Clone_any : forall {t_A}, t_Clone t_A := {Clone_f_clone := fun x => x}. Definition t_Slice (T : Type) := list T. Definition unsize {T : Type} : list T -> t_Slice T := id. Definition t_PartialEq_f_eq x y := x =? y. Definition t_Rem_f_rem (x y : Z) := x mod y. Definition assert (b : bool) (* `{H_assert : b = true} *) : unit := tt. Inductive globality := | t_Global. Definition t_Vec T (_ : globality) : Type := list T. Definition impl_1__append {T} l1 l2 : list T * list T := (app l1 l2, l2). Definition impl_1__len {A} (l : list A) := Z.of_nat (List.length l). Definition impl__new {A} (_ : Datatypes.unit) : list A := nil. Definition impl__with_capacity {A} (_ : Z) : list A := nil. Definition impl_1__push {A} l (x : A) := cons x l. Class t_From (A B : Type) := { From_f_from : B -> A }. Definition impl__to_vec {T} (x : t_Slice T) : t_Vec T t_Global := x. Class t_Into (A B : Type) := { Into_f_into : A -> B }. Instance t_Into_from_t_From {A B : Type} `{H : t_From B A} : t_Into A B := { Into_f_into x := @From_f_from B A H x }. Definition from_elem {A} (x : A) (l : Z) := repeat x (Z.to_nat l). Definition t_Option := option. Definition impl__map {A B} (x : t_Option A) (f : A -> B) : t_Option B := match x with | Some x => Some (f x) | None => None end. Definition t_Add_f_add x y := x + y. Class Cast A B := { cast : A -> B }. Instance cast_t_u8_t_u32 : Cast t_u8 t_u32 := {| cast x := x |}. (* / dummy lib *) From Core Require Import Core_Iter (t_Iterator). Export Core_Iter (t_Iterator). Class t_ExactSizeIterator (v_Self : Type) `{t_Iterator (v_Self)} : Type := { ExactSizeIterator_f_len : v_Self -> t_usize; ExactSizeIterator_f_is_empty : v_Self -> bool; }. Arguments t_ExactSizeIterator (_) {_}. ================================================ FILE: hax-lib/proof-libs/coq/coq/generated-core/src/Core_Iter_Traits_Iterator.v ================================================ (* File automatically generated by Hacspec *) From Coq Require Import ZArith. Require Import List. Import List.ListNotations. Open Scope Z_scope. Open Scope bool_scope. Require Import Ascii. Require Import String. Require Import Coq.Floats.Floats. From RecordUpdate Require Import RecordSet. Import RecordSetNotations. (* From Core Require Import Core. *) From Core Require Import Core_Marker. Export Core_Marker. From Core Require Import Core_Option. Export Core_Option. From Core Require Import Core_Primitive. Export Core_Primitive. From Core Require Import Core_Ops_Function. Export Core_Ops_Function. Class t_Iterator (v_Self : Type) : Type := { Iterator_f_Item : Type; _H_Sized :: `{t_Sized (Iterator_f_Item)}; Iterator_f_next : v_Self -> (v_Self*t_Option ((Iterator_f_Item))); Iterator_f_size_hint : v_Self -> (t_usize*t_Option ((t_usize))); Iterator_f_fold (v_B : Type) (v_F : Type) `{t_Sized (v_B)} `{t_Sized (v_F)} `{t_Sized (v_Self)} `{t_FnMut (v_F) ((v_B*Iterator_f_Item))} `{_.(FnOnce_f_Output) = v_B} : v_Self -> v_B -> v_F -> v_B; }. Arguments t_Iterator (_). ================================================ FILE: hax-lib/proof-libs/coq/coq/generated-core/src/Core_Iter_Traits_Marker.v ================================================ (* File automatically generated by Hacspec *) From Coq Require Import ZArith. Require Import List. Import List.ListNotations. Open Scope Z_scope. Open Scope bool_scope. Require Import Ascii. Require Import String. Require Import Coq.Floats.Floats. From RecordUpdate Require Import RecordSet. Import RecordSetNotations. (* From Core Require Import Core. *) (* TODO: Replace this dummy lib with core lib *) Class t_Sized (T : Type) := { }. Definition t_u8 := Z. Definition t_u16 := Z. Definition t_u32 := Z. Definition t_u64 := Z. Definition t_u128 := Z. Definition t_usize := Z. Definition t_i8 := Z. Definition t_i16 := Z. Definition t_i32 := Z. Definition t_i64 := Z. Definition t_i128 := Z. Definition t_isize := Z. Definition t_Array T (x : t_usize) := list T. Definition t_String := string. Definition ToString_f_to_string (x : string) := x. Instance Sized_any : forall {t_A}, t_Sized t_A := {}. Class t_Clone (T : Type) := { Clone_f_clone : T -> T }. Instance Clone_any : forall {t_A}, t_Clone t_A := {Clone_f_clone := fun x => x}. Definition t_Slice (T : Type) := list T. Definition unsize {T : Type} : list T -> t_Slice T := id. Definition t_PartialEq_f_eq x y := x =? y. Definition t_Rem_f_rem (x y : Z) := x mod y. Definition assert (b : bool) (* `{H_assert : b = true} *) : unit := tt. Inductive globality := | t_Global. Definition t_Vec T (_ : globality) : Type := list T. Definition impl_1__append {T} l1 l2 : list T * list T := (app l1 l2, l2). Definition impl_1__len {A} (l : list A) := Z.of_nat (List.length l). Definition impl__new {A} (_ : Datatypes.unit) : list A := nil. Definition impl__with_capacity {A} (_ : Z) : list A := nil. Definition impl_1__push {A} l (x : A) := cons x l. Class t_From (A B : Type) := { From_f_from : B -> A }. Definition impl__to_vec {T} (x : t_Slice T) : t_Vec T t_Global := x. Class t_Into (A B : Type) := { Into_f_into : A -> B }. Instance t_Into_from_t_From {A B : Type} `{H : t_From B A} : t_Into A B := { Into_f_into x := @From_f_from B A H x }. Definition from_elem {A} (x : A) (l : Z) := repeat x (Z.to_nat l). Definition t_Option := option. Definition impl__map {A B} (x : t_Option A) (f : A -> B) : t_Option B := match x with | Some x => Some (f x) | None => None end. Definition t_Add_f_add x y := x + y. Class Cast A B := { cast : A -> B }. Instance cast_t_u8_t_u32 : Cast t_u8 t_u32 := {| cast x := x |}. (* / dummy lib *) From Core Require Import Core_Iter (t_Step). Export Core_Iter (t_Step). Class t_TrustedFused (v_Self : Type) : Type := { }. Arguments t_TrustedFused (_). Class t_TrustedStep (v_Self : Type) `{t_Step (v_Self)} `{t_Copy (v_Self)} : Type := { }. Arguments t_TrustedStep (_) {_} {_}. Class t_FusedIterator (v_Self : Type) `{t_Iterator (v_Self)} : Type := { }. Arguments t_FusedIterator (_) {_}. Class t_TrustedLen (v_Self : Type) `{t_Iterator (v_Self)} : Type := { }. Arguments t_TrustedLen (_) {_}. ================================================ FILE: hax-lib/proof-libs/coq/coq/generated-core/src/Core_Marker.v ================================================ (* File automatically generated by Hacspec *) From Coq Require Import ZArith. Require Import List. Import List.ListNotations. Open Scope Z_scope. Open Scope bool_scope. Require Import Ascii. Require Import String. Require Import Coq.Floats.Floats. From RecordUpdate Require Import RecordSet. Import RecordSetNotations. (* From Core Require Import Core. *) From Core Require Import Core_Clone (t_Clone). Export Core_Clone (t_Clone). Class t_Copy (v_Self : Type) `{t_Clone (v_Self)} : Type := { }. Arguments t_Copy (_) {_}. Class t_Destruct (v_Self : Type) : Type := { }. Arguments t_Destruct (_). Class t_Sized (v_Self : Type) : Type := { }. Arguments t_Sized (_). Record t_PhantomData (v_T : Type) `{t_Sized (v_T)} : Type := { }. Arguments Build_t_PhantomData {_} {_}. #[export] Notation "'PhantomData'" := Build_t_PhantomData. Class t_Tuple (v_Self : Type) : Type := { }. Arguments t_Tuple (_). #[global] Instance t_Sized_any T : t_Sized T := {}. ================================================ FILE: hax-lib/proof-libs/coq/coq/generated-core/src/Core_Num.v ================================================ (* File automatically generated by Hacspec *) From Coq Require Import ZArith. Require Import List. Import List.ListNotations. Open Scope Z_scope. Open Scope bool_scope. Require Import Ascii. Require Import String. Require Import Coq.Floats.Floats. From RecordUpdate Require Import RecordSet. Import RecordSetNotations. (* From Core Require Import Core. *) From Core Require Import Core_Base_interface. Export Core_Base_interface. From Core Require Import Core_Primitive. Export Core_Primitive. From Core Require Import Core_Intrinsics. Export Core_Intrinsics. From Core Require Import Core_Ops_Index. Export Core_Ops_Index. (* NotImplementedYet *) (* NotImplementedYet *) Notation "'impl_10__from_le'" := (from_le715594649). Notation "'impl_10__to_le'" := (to_le902648378). Notation "'impl_7__from_le'" := (from_le793045973). Notation "'impl_7__to_le'" := (to_le1012469456). Notation "'impl_8__from_le'" := (from_le706338679). Notation "'impl_8__to_le'" := (to_le724624277). Notation "'impl_9__from_le'" := (from_le435089922). Notation "'impl_9__to_le'" := (to_le2703875). Notation "'impl_6__from_le'" := (from_le529489651). Notation "'impl_6__to_le'" := (to_le523556665). Notation "'impl_11__from_le'" := (from_le418743864). Notation "'impl_11__to_le'" := (to_le946822077). Notation "'impl__BITS'" := (v_BITS80497669). Notation "'impl__MAX'" := (v_MAX626626007). Notation "'impl__MIN'" := (v_MIN19747349). Notation "'impl__i16__BITS'" := (v_BITS421056295). Notation "'impl__i16__MAX'" := (v_MAX474501300). Notation "'impl__i16__MIN'" := (v_MIN776391606). Notation "'impl__i32__BITS'" := (v_BITS465526498). Notation "'impl__i32__MAX'" := (v_MAX106630818). Notation "'impl__i32__MIN'" := (v_MIN682967538). Notation "'impl__i64__BITS'" := (v_BITS419886578). Notation "'impl__i64__MAX'" := (v_MAX527043787). Notation "'impl__i64__MIN'" := (v_MIN654206259). Notation "'impl__i128__BITS'" := (v_BITS992667165). Notation "'impl__i128__MAX'" := (v_MAX375377319). Notation "'impl__i128__MIN'" := (v_MIN79612531). Notation "'impl__isize__BITS'" := (v_BITS211584016). Notation "'impl__isize__MAX'" := (v_MAX937003029). Notation "'impl__isize__MIN'" := (v_MIN1017039533). Notation "'impl_6__BITS'" := (v_BITS690311813). Notation "'impl_6__MAX'" := (v_MAX310118176). Notation "'impl_6__MIN'" := (v_MIN41851434). Notation "'impl_7__BITS'" := (v_BITS277333551). Notation "'impl_7__MAX'" := (v_MAX487295910). Notation "'impl_7__MIN'" := (v_MIN592300287). Notation "'impl_8__BITS'" := (v_BITS473478051). Notation "'impl_8__MAX'" := (v_MAX826434525). Notation "'impl_8__MIN'" := (v_MIN932777089). Notation "'impl_9__BITS'" := (v_BITS177666292). Notation "'impl_9__MAX'" := (v_MAX815180633). Notation "'impl_9__MIN'" := (v_MIN631333594). Notation "'impl_10__BITS'" := (v_BITS136999051). Notation "'impl_10__MAX'" := (v_MAX404543799). Notation "'impl_10__MIN'" := (v_MIN668621698). Notation "'impl_11__BITS'" := (v_BITS229952196). Notation "'impl_11__MAX'" := (v_MAX750570916). Notation "'impl_11__MIN'" := (v_MIN861571008). Notation "'impl__is_negative'" := (is_negative350273175). Notation "'impl__is_positive'" := (is_positive286955196). Notation "'impl__signum'" := (signum721334203). Notation "'impl__i16__is_negative'" := (is_negative477067241). Notation "'impl__i16__is_positive'" := (is_positive821581438). Notation "'impl__i16__signum'" := (signum243706004). Notation "'impl__i32__is_negative'" := (is_negative1035644813). Notation "'impl__i32__is_positive'" := (is_positive401652342). Notation "'impl__i32__signum'" := (signum323641039). Notation "'impl__i64__is_negative'" := (is_negative1066124578). Notation "'impl__i64__is_positive'" := (is_positive16569358). Notation "'impl__i64__signum'" := (signum582963664). Notation "'impl__i128__is_negative'" := (is_negative221698470). Notation "'impl__i128__is_positive'" := (is_positive883218309). Notation "'impl__i128__signum'" := (signum408800799). Notation "'impl__isize__is_negative'" := (is_negative693446369). Notation "'impl__isize__is_positive'" := (is_positive169998680). Notation "'impl__isize__signum'" := (signum91486536). Notation "'impl_6__checked_add'" := (checked_add268751055). Notation "'impl_7__checked_add'" := (checked_add132377399). Notation "'impl_8__checked_add'" := (checked_add985437730). Notation "'impl_9__checked_add'" := (checked_add586246465). Notation "'impl_10__checked_add'" := (checked_add218978451). Notation "'impl_11__checked_add'" := (checked_add984013567). Notation "'impl__wrapping_add'" := (wrapping_add634491935). Notation "'impl__wrapping_sub'" := (wrapping_sub973428293). Notation "'impl__wrapping_neg'" := (wrapping_neg400701205). Notation "'impl__wrapping_abs'" := (wrapping_abs400396545). Notation "'impl__i16__wrapping_add'" := (wrapping_add868559108). Notation "'impl__i16__wrapping_sub'" := (wrapping_sub189469152). Notation "'impl__i16__wrapping_neg'" := (wrapping_neg860505723). Notation "'impl__i16__wrapping_abs'" := (wrapping_abs229076826). Notation "'impl__i32__wrapping_add'" := (wrapping_add475006616). Notation "'impl__i32__wrapping_sub'" := (wrapping_sub298337071). Notation "'impl__i32__wrapping_neg'" := (wrapping_neg636433078). Notation "'impl__i32__wrapping_abs'" := (wrapping_abs729536875). Notation "'impl__i64__wrapping_add'" := (wrapping_add590074241). Notation "'impl__i64__wrapping_sub'" := (wrapping_sub334584751). Notation "'impl__i64__wrapping_neg'" := (wrapping_neg868282938). Notation "'impl__i64__wrapping_abs'" := (wrapping_abs285829312). Notation "'impl__i128__wrapping_add'" := (wrapping_add251385439). Notation "'impl__i128__wrapping_sub'" := (wrapping_sub681598071). Notation "'impl__i128__wrapping_neg'" := (wrapping_neg446546984). Notation "'impl__i128__wrapping_abs'" := (wrapping_abs281925696). Notation "'impl__isize__wrapping_add'" := (wrapping_add226040243). Notation "'impl__isize__wrapping_sub'" := (wrapping_sub698035192). Notation "'impl__isize__wrapping_neg'" := (wrapping_neg912291768). Notation "'impl__isize__wrapping_abs'" := (wrapping_abs347300819). Notation "'impl_6__checked_div'" := (checked_div508301931). Notation "'impl_6__overflowing_add'" := (overflowing_add708890057). Notation "'impl_7__checked_div'" := (checked_div614920780). Notation "'impl_7__overflowing_add'" := (overflowing_add1023344178). Notation "'impl_8__checked_div'" := (checked_div979383477). Notation "'impl_8__overflowing_add'" := (overflowing_add905744292). Notation "'impl_9__checked_div'" := (checked_div988689127). Notation "'impl_9__overflowing_add'" := (overflowing_add581983607). Notation "'impl_10__checked_div'" := (checked_div344106746). Notation "'impl_10__overflowing_add'" := (overflowing_add458293681). Notation "'impl_11__checked_div'" := (checked_div80223906). Notation "'impl_11__overflowing_add'" := (overflowing_add682280407). Notation "'impl__abs'" := (abs945505614). Notation "'impl__i16__abs'" := (abs581170970). Notation "'impl__i32__abs'" := (abs590464694). Notation "'impl__i64__abs'" := (abs654781043). Notation "'impl__i128__abs'" := (abs204417539). Notation "'impl__isize__abs'" := (abs220926056). Notation "'impl_6__wrapping_add'" := (wrapping_add480603777). Notation "'impl_6__wrapping_mul'" := (wrapping_mul885216284). Notation "'impl_7__wrapping_add'" := (wrapping_add124432709). Notation "'impl_7__wrapping_mul'" := (wrapping_mul14465189). Notation "'impl_8__wrapping_add'" := (wrapping_add1049665857). Notation "'impl_8__wrapping_mul'" := (wrapping_mul203346768). Notation "'impl_9__wrapping_add'" := (wrapping_add865565639). Notation "'impl_9__wrapping_mul'" := (wrapping_mul742978873). Notation "'impl_10__wrapping_add'" := (wrapping_add40844100). Notation "'impl_10__wrapping_mul'" := (wrapping_mul294115024). Notation "'impl_11__wrapping_add'" := (wrapping_add427637036). Notation "'impl_11__wrapping_mul'" := (wrapping_mul680896953). Notation "'impl_6__wrapping_sub'" := (wrapping_sub403906422). Notation "'impl_6__wrapping_neg'" := (wrapping_neg123212788). Notation "'impl_7__wrapping_sub'" := (wrapping_sub811251034). Notation "'impl_7__wrapping_neg'" := (wrapping_neg128555595). Notation "'impl_8__wrapping_sub'" := (wrapping_sub708953500). Notation "'impl_8__wrapping_neg'" := (wrapping_neg328220773). Notation "'impl_9__wrapping_sub'" := (wrapping_sub762520851). Notation "'impl_9__wrapping_neg'" := (wrapping_neg617136337). Notation "'impl_10__wrapping_sub'" := (wrapping_sub409310259). Notation "'impl_10__wrapping_neg'" := (wrapping_neg729451428). Notation "'impl_11__wrapping_sub'" := (wrapping_sub813101882). Notation "'impl_11__wrapping_neg'" := (wrapping_neg342773446). Notation "'impl_6__wrapping_div'" := (wrapping_div660080892). Notation "'impl_6__wrapping_div_euclid'" := (wrapping_div_euclid481233436). Notation "'impl_7__wrapping_div'" := (wrapping_div366977334). Notation "'impl_7__wrapping_div_euclid'" := (wrapping_div_euclid22267888). Notation "'impl_8__wrapping_div'" := (wrapping_div931150450). Notation "'impl_8__wrapping_div_euclid'" := (wrapping_div_euclid606291997). Notation "'impl_9__wrapping_div'" := (wrapping_div168427046). Notation "'impl_9__wrapping_div_euclid'" := (wrapping_div_euclid321252086). Notation "'impl_10__wrapping_div'" := (wrapping_div692427683). Notation "'impl_10__wrapping_div_euclid'" := (wrapping_div_euclid926334515). Notation "'impl_11__wrapping_div'" := (wrapping_div905768546). Notation "'impl_11__wrapping_div_euclid'" := (wrapping_div_euclid90317722). Notation "'impl_6__wrapping_rem'" := (wrapping_rem984569721). Notation "'impl_6__wrapping_rem_euclid'" := (wrapping_rem_euclid946579345). Notation "'impl_7__wrapping_rem'" := (wrapping_rem378598035). Notation "'impl_7__wrapping_rem_euclid'" := (wrapping_rem_euclid602402638). Notation "'impl_8__wrapping_rem'" := (wrapping_rem292009099). Notation "'impl_8__wrapping_rem_euclid'" := (wrapping_rem_euclid1020271291). Notation "'impl_9__wrapping_rem'" := (wrapping_rem390602260). Notation "'impl_9__wrapping_rem_euclid'" := (wrapping_rem_euclid839264546). Notation "'impl_10__wrapping_rem'" := (wrapping_rem332379920). Notation "'impl_10__wrapping_rem_euclid'" := (wrapping_rem_euclid646122423). Notation "'impl_11__wrapping_rem'" := (wrapping_rem333089373). Notation "'impl_11__wrapping_rem_euclid'" := (wrapping_rem_euclid769656504). Notation "'impl_6__rotate_left'" := (rotate_left792925914). Notation "'impl_6__rotate_right'" := (rotate_right166090082). Notation "'impl_7__rotate_left'" := (rotate_left297034175). Notation "'impl_7__rotate_right'" := (rotate_right138522246). Notation "'impl_8__rotate_left'" := (rotate_left823573251). Notation "'impl_8__rotate_right'" := (rotate_right869195717). Notation "'impl_9__rotate_left'" := (rotate_left618936072). Notation "'impl_9__rotate_right'" := (rotate_right1041614027). Notation "'impl_10__rotate_left'" := (rotate_left1065866885). Notation "'impl_10__rotate_right'" := (rotate_right591112338). Notation "'impl_11__rotate_left'" := (rotate_left996672710). Notation "'impl_11__rotate_right'" := (rotate_right442734174). (* Notation "'impl_6__count_ones'" := (count_ones202509899). *) (* Notation "'impl_6__leading_zeros'" := (leading_zeros75047366). *) (* Notation "'impl_6__swap_bytes'" := (swap_bytes657156997). *) (* Notation "'impl_6__from_be'" := (from_be746282521). *) (* Notation "'impl_6__to_be'" := (to_be972448780). *) (* Notation "'impl_6__trailing_zeros'" := (trailing_zeros572929871). *) (* Notation "'impl_7__count_ones'" := (count_ones91875752). *) (* Notation "'impl_7__leading_zeros'" := (leading_zeros462412478). *) (* Notation "'impl_7__swap_bytes'" := (swap_bytes926722059). *) (* Notation "'impl_7__from_be'" := (from_be510959665). *) (* Notation "'impl_7__to_be'" := (to_be551590602). *) (* Notation "'impl_7__trailing_zeros'" := (trailing_zeros421474733). *) (* Notation "'impl_8__count_ones'" := (count_ones776185738). *) (* Notation "'impl_8__leading_zeros'" := (leading_zeros698221972). *) (* Notation "'impl_8__swap_bytes'" := (swap_bytes320480126). *) (* Notation "'impl_8__from_be'" := (from_be664756649). *) (* Notation "'impl_8__to_be'" := (to_be82825962). *) (* Notation "'impl_8__trailing_zeros'" := (trailing_zeros1061560720). *) (* Notation "'impl_9__count_ones'" := (count_ones235885653). *) (* Notation "'impl_9__leading_zeros'" := (leading_zeros338302110). *) (* Notation "'impl_9__swap_bytes'" := (swap_bytes722254271). *) (* Notation "'impl_9__from_be'" := (from_be16013635). *) (* Notation "'impl_9__to_be'" := (to_be376714729). *) (* Notation "'impl_9__trailing_zeros'" := (trailing_zeros188346231). *) (* Notation "'impl_10__count_ones'" := (count_ones926736261). *) (* Notation "'impl_10__leading_zeros'" := (leading_zeros19644612). *) (* Notation "'impl_10__swap_bytes'" := (swap_bytes420879368). *) (* Notation "'impl_10__from_be'" := (from_be191085771). *) (* Notation "'impl_10__to_be'" := (to_be555075987). *) (* Notation "'impl_10__trailing_zeros'" := (trailing_zeros821715250). *) (* Notation "'impl_11__count_ones'" := (count_ones441645762). *) (* Notation "'impl_11__leading_zeros'" := (leading_zeros905233489). *) (* Notation "'impl_11__swap_bytes'" := (swap_bytes268673424). *) (* Notation "'impl_11__from_be'" := (from_be607978059). *) (* Notation "'impl_11__to_be'" := (to_be561847134). *) (* Notation "'impl_11__trailing_zeros'" := (trailing_zeros42066260). *) Notation "'impl__rem_euclid'" := (rem_euclid622298453). Notation "'impl__i16__rem_euclid'" := (rem_euclid158017644). Notation "'impl__i32__rem_euclid'" := (rem_euclid881249982). Notation "'impl__i64__rem_euclid'" := (rem_euclid1057082210). Notation "'impl__i128__rem_euclid'" := (rem_euclid254910751). Notation "'impl__isize__rem_euclid'" := (rem_euclid828379367). (* Notation "'impl_6__count_zeros'" := (count_zeros558337492). *) (* Notation "'impl_6__leading_ones'" := (leading_ones55148479). *) (* Notation "'impl_6__trailing_ones'" := (trailing_ones359778731). *) (* Notation "'impl_7__count_zeros'" := (count_zeros199825317). *) (* Notation "'impl_7__leading_ones'" := (leading_ones164277656). *) (* Notation "'impl_7__trailing_ones'" := (trailing_ones903944727). *) (* Notation "'impl_8__count_zeros'" := (count_zeros942566041). *) (* Notation "'impl_8__leading_ones'" := (leading_ones766486760). *) (* Notation "'impl_8__trailing_ones'" := (trailing_ones223371510). *) (* Notation "'impl_9__count_zeros'" := (count_zeros60346158). *) (* Notation "'impl_9__leading_ones'" := (leading_ones404666910). *) (* Notation "'impl_9__trailing_ones'" := (trailing_ones601201120). *) (* Notation "'impl_10__count_zeros'" := (count_zeros824862815). *) (* Notation "'impl_10__leading_ones'" := (leading_ones475503572). *) (* Notation "'impl_10__trailing_ones'" := (trailing_ones705845381). *) (* Notation "'impl_11__count_zeros'" := (count_zeros73479642). *) (* Notation "'impl_11__leading_ones'" := (leading_ones667660708). *) (* Notation "'impl_11__trailing_ones'" := (trailing_ones979548463). *) ================================================ FILE: hax-lib/proof-libs/coq/coq/generated-core/src/Core_Num_Int_macros.v ================================================ (* File automatically generated by Hacspec *) From Coq Require Import ZArith. Require Import List. Import List.ListNotations. Open Scope Z_scope. Open Scope bool_scope. Require Import Ascii. Require Import String. Require Import Coq.Floats.Floats. From RecordUpdate Require Import RecordSet. Import RecordSetNotations. (* From Core Require Import Core. *) (* TODO: Replace this dummy lib with core lib *) Class t_Sized (T : Type) := { }. Definition t_u8 := Z. Definition t_u16 := Z. Definition t_u32 := Z. Definition t_u64 := Z. Definition t_u128 := Z. Definition t_usize := Z. Definition t_i8 := Z. Definition t_i16 := Z. Definition t_i32 := Z. Definition t_i64 := Z. Definition t_i128 := Z. Definition t_isize := Z. Definition t_Array T (x : t_usize) := list T. Definition t_String := string. Definition ToString_f_to_string (x : string) := x. Instance Sized_any : forall {t_A}, t_Sized t_A := {}. Class t_Clone (T : Type) := { Clone_f_clone : T -> T }. Instance Clone_any : forall {t_A}, t_Clone t_A := {Clone_f_clone := fun x => x}. Definition t_Slice (T : Type) := list T. Definition unsize {T : Type} : list T -> t_Slice T := id. Definition t_PartialEq_f_eq x y := x =? y. Definition t_Rem_f_rem (x y : Z) := x mod y. Definition assert (b : bool) (* `{H_assert : b = true} *) : unit := tt. Inductive globality := | t_Global. Definition t_Vec T (_ : globality) : Type := list T. Definition impl_1__append {T} l1 l2 : list T * list T := (app l1 l2, l2). Definition impl_1__len {A} (l : list A) := Z.of_nat (List.length l). Definition impl__new {A} (_ : Datatypes.unit) : list A := nil. Definition impl__with_capacity {A} (_ : Z) : list A := nil. Definition impl_1__push {A} l (x : A) := cons x l. Class t_From (A B : Type) := { From_f_from : B -> A }. Definition impl__to_vec {T} (x : t_Slice T) : t_Vec T t_Global := x. Class t_Into (A B : Type) := { Into_f_into : A -> B }. Instance t_Into_from_t_From {A B : Type} `{H : t_From B A} : t_Into A B := { Into_f_into x := @From_f_from B A H x }. Definition from_elem {A} (x : A) (l : Z) := repeat x (Z.to_nat l). Definition t_Option := option. Definition impl__map {A B} (x : t_Option A) (f : A -> B) : t_Option B := match x with | Some x => Some (f x) | None => None end. Definition t_Add_f_add x y := x + y. Class Cast A B := { cast : A -> B }. Instance cast_t_u8_t_u32 : Cast t_u8 t_u32 := {| cast x := x |}. (* / dummy lib *) (* NotImplementedYet *) ================================================ FILE: hax-lib/proof-libs/coq/coq/generated-core/src/Core_Num_Uint_macros.v ================================================ (* File automatically generated by Hacspec *) From Coq Require Import ZArith. Require Import List. Import List.ListNotations. Open Scope Z_scope. Open Scope bool_scope. Require Import Ascii. Require Import String. Require Import Coq.Floats.Floats. From RecordUpdate Require Import RecordSet. Import RecordSetNotations. (* From Core Require Import Core. *) (* NotImplementedYet *) ================================================ FILE: hax-lib/proof-libs/coq/coq/generated-core/src/Core_Ops.v ================================================ (* File automatically generated by Hacspec *) From Coq Require Import ZArith. Require Import List. Import List.ListNotations. Open Scope Z_scope. Open Scope bool_scope. Require Import Ascii. Require Import String. Require Import Coq.Floats.Floats. From RecordUpdate Require Import RecordSet. Import RecordSetNotations. (* From Core Require Import Core. *) (* From Core Require Import Core_Ops_Arith (t_Add). *) (* Export Core_Ops_Arith (t_Add). *) (* From Core Require Import Core_Ops_Arith (t_Div). *) (* Export Core_Ops_Arith (t_Div). *) (* From Core Require Import Core_Ops_Arith (t_Mul). *) (* Export Core_Ops_Arith (t_Mul). *) (* From Core Require Import Core_Ops_Arith (t_Neg). *) (* Export Core_Ops_Arith (t_Neg). *) (* From Core Require Import Core_Ops_Arith (t_Rem). *) (* Export Core_Ops_Arith (t_Rem). *) (* From Core Require Import Core_Ops_Arith (t_Sub). *) (* Export Core_Ops_Arith (t_Sub). *) From Core Require Import Core_Ops_Arith. Export Core_Ops_Arith. (* From Core Require Import Core_Ops_Bit (t_BitAnd). *) (* Export Core_Ops_Bit (t_BitAnd). *) (* From Core Require Import Core_Ops_Bit (t_BitOr). *) (* Export Core_Ops_Bit (t_BitOr). *) (* From Core Require Import Core_Ops_Bit (t_BitXor). *) (* Export Core_Ops_Bit (t_BitXor). *) (* From Core Require Import Core_Ops_Bit (t_Not). *) (* Export Core_Ops_Bit (t_Not). *) (* From Core Require Import Core_Ops_Bit (t_Shl). *) (* Export Core_Ops_Bit (t_Shl). *) (* From Core Require Import Core_Ops_Bit (t_Shr). *) (* Export Core_Ops_Bit (t_Shr). *) From Core Require Import Core_Ops_Bit. Export Core_Ops_Bit. From Core Require Import Core_Ops_Index. Export Core_Ops_Index. From Core Require Import Core_Ops_Range. Export Core_Ops_Range. (* From Core Require Import Core_Ops_Index_range (t_IndexRange). *) (* Export Core_Ops_Index_range (t_IndexRange). *) (* NotImplementedYet *) (* NotImplementedYet *) (* NotImplementedYet *) (* NotImplementedYet *) (* NotImplementedYet *) (* NotImplementedYet *) ================================================ FILE: hax-lib/proof-libs/coq/coq/generated-core/src/Core_Ops_Arith.v ================================================ (* File automatically generated by Hacspec *) From Coq Require Import ZArith. Require Import List. Import List.ListNotations. Open Scope Z_scope. Open Scope bool_scope. Require Import Ascii. Require Import String. Require Import Coq.Floats.Floats. From RecordUpdate Require Import RecordSet. Import RecordSetNotations. (* From Core Require Import Core. *) From Core Require Import Core_Marker (t_Sized). Export Core_Marker (t_Sized). (* NotImplementedYet *) (* NotImplementedYet *) Class t_Add (v_Self : Type) (v_Rhs : Type) `{t_Sized (v_Rhs)} : Type := { Add_f_Output : Type; _ :: `{t_Sized (Add_f_Output)}; Add_f_add : v_Self -> v_Rhs -> Add_f_Output; }. Arguments t_Add (_) (_) {_}. Class t_Div (v_Self : Type) (v_Rhs : Type) `{t_Sized (v_Rhs)} : Type := { Div_f_Output : Type; _ :: `{t_Sized (Div_f_Output)}; Div_f_div : v_Self -> v_Rhs -> Div_f_Output; }. Arguments t_Div (_) (_) {_}. Class t_Mul (v_Self : Type) (v_Rhs : Type) `{t_Sized (v_Rhs)} : Type := { Mul_f_Output : Type; _ :: `{t_Sized (Mul_f_Output)}; Mul_f_mul : v_Self -> v_Rhs -> Mul_f_Output; }. Arguments t_Mul (_) (_) {_}. Class t_Neg (v_Self : Type) : Type := { Neg_f_Output : Type; _ :: `{t_Sized (Neg_f_Output)}; Neg_f_neg : v_Self -> Neg_f_Output; }. Arguments t_Neg (_). Class t_Rem (v_Self : Type) (v_Rhs : Type) `{t_Sized (v_Rhs)} : Type := { Rem_f_Output : Type; _ :: `{t_Sized (Rem_f_Output)}; Rem_f_rem : v_Self -> v_Rhs -> Rem_f_Output; }. Arguments t_Rem (_) (_) {_}. Class t_Sub (v_Self : Type) (v_Rhs : Type) `{t_Sized (v_Rhs)} : Type := { Sub_f_Output : Type; _ :: `{t_Sized (Sub_f_Output)}; Sub_f_sub : v_Self -> v_Rhs -> Sub_f_Output; }. Arguments t_Sub (_) (_) {_}. ================================================ FILE: hax-lib/proof-libs/coq/coq/generated-core/src/Core_Ops_Arith_Impls_for_prims.v ================================================ (* File automatically generated by Hacspec *) From Coq Require Import ZArith. Require Import List. Import List.ListNotations. Open Scope Z_scope. Open Scope bool_scope. Require Import Ascii. Require Import String. Require Import Coq.Floats.Floats. From RecordUpdate Require Import RecordSet. Import RecordSetNotations. (* From Core Require Import Core. *) (* TODO: Replace this dummy lib with core lib *) Class t_Sized (T : Type) := { }. Definition t_u8 := Z. Definition t_u16 := Z. Definition t_u32 := Z. Definition t_u64 := Z. Definition t_u128 := Z. Definition t_usize := Z. Definition t_i8 := Z. Definition t_i16 := Z. Definition t_i32 := Z. Definition t_i64 := Z. Definition t_i128 := Z. Definition t_isize := Z. Definition t_Array T (x : t_usize) := list T. Definition t_String := string. Definition ToString_f_to_string (x : string) := x. Instance Sized_any : forall {t_A}, t_Sized t_A := {}. Class t_Clone (T : Type) := { Clone_f_clone : T -> T }. Instance Clone_any : forall {t_A}, t_Clone t_A := {Clone_f_clone := fun x => x}. Definition t_Slice (T : Type) := list T. Definition unsize {T : Type} : list T -> t_Slice T := id. Definition t_PartialEq_f_eq x y := x =? y. Definition t_Rem_f_rem (x y : Z) := x mod y. Definition assert (b : bool) (* `{H_assert : b = true} *) : unit := tt. Inductive globality := | t_Global. Definition t_Vec T (_ : globality) : Type := list T. Definition impl_1__append {T} l1 l2 : list T * list T := (app l1 l2, l2). Definition impl_1__len {A} (l : list A) := Z.of_nat (List.length l). Definition impl__new {A} (_ : Datatypes.unit) : list A := nil. Definition impl__with_capacity {A} (_ : Z) : list A := nil. Definition impl_1__push {A} l (x : A) := cons x l. Class t_From (A B : Type) := { From_f_from : B -> A }. Definition impl__to_vec {T} (x : t_Slice T) : t_Vec T t_Global := x. Class t_Into (A B : Type) := { Into_f_into : A -> B }. Instance t_Into_from_t_From {A B : Type} `{H : t_From B A} : t_Into A B := { Into_f_into x := @From_f_from B A H x }. Definition from_elem {A} (x : A) (l : Z) := repeat x (Z.to_nat l). Definition t_Option := option. Definition impl__map {A B} (x : t_Option A) (f : A -> B) : t_Option B := match x with | Some x => Some (f x) | None => None end. Definition t_Add_f_add x y := x + y. Class Cast A B := { cast : A -> B }. Instance cast_t_u8_t_u32 : Cast t_u8 t_u32 := {| cast x := x |}. (* / dummy lib *) From Core Require Import Core_Primitive (t_u8). Export Core_Primitive (t_u8). From Core Require Import Core_Primitive (t_u16). Export Core_Primitive (t_u16). From Core Require Import Core_Primitive (t_u32). Export Core_Primitive (t_u32). From Core Require Import Core_Primitive (t_u64). Export Core_Primitive (t_u64). From Core Require Import Core_Primitive (t_u128). Export Core_Primitive (t_u128). From Core Require Import Core_Primitive (t_usize). Export Core_Primitive (t_usize). From Core Require Import Core_Primitive (t_i8). Export Core_Primitive (t_i8). From Core Require Import Core_Primitive (t_i16). Export Core_Primitive (t_i16). From Core Require Import Core_Primitive (t_i32). Export Core_Primitive (t_i32). From Core Require Import Core_Primitive (t_i64). Export Core_Primitive (t_i64). From Core Require Import Core_Primitive (t_i128). Export Core_Primitive (t_i128). From Core Require Import Core_Primitive (t_isize). Export Core_Primitive (t_isize). (* NotImplementedYet *) (* NotImplementedYet *) (* NotImplementedYet *) (* NotImplementedYet *) (* NotImplementedYet *) (* NotImplementedYet *) Notation "'impl'" := (impl). Notation "'impl_1'" := (impl_1). Notation "'impl_2'" := (impl_2). Notation "'impl_3'" := (impl_3). Notation "'impl_4'" := (impl_4). Notation "'impl_5'" := (impl_5). Notation "'impl_12'" := (impl_12). Notation "'impl_13'" := (impl_13). Notation "'impl_14'" := (impl_14). Notation "'impl_15'" := (impl_15). Notation "'impl_16'" := (impl_16). Notation "'impl_17'" := (impl_17). Notation "'impl_24'" := (impl_24). Notation "'impl_25'" := (impl_25). Notation "'impl_26'" := (impl_26). Notation "'impl_27'" := (impl_27). Notation "'impl_28'" := (impl_28). Notation "'impl_29'" := (impl_29). Notation "'impl_6'" := (impl_6). Notation "'impl_7'" := (impl_7). Notation "'impl_8'" := (impl_8). Notation "'impl_9'" := (impl_9). Notation "'impl_10'" := (impl_10). Notation "'impl_11'" := (impl_11). Notation "'impl_30'" := (impl_30). Notation "'impl_31'" := (impl_31). Notation "'impl_32'" := (impl_32). Notation "'impl_33'" := (impl_33). Notation "'impl_34'" := (impl_34). Notation "'impl_35'" := (impl_35). Notation "'impl_36'" := (impl_36). Notation "'impl_37'" := (impl_37). Notation "'impl_38'" := (impl_38). Notation "'impl_39'" := (impl_39). Notation "'impl_40'" := (impl_40). Notation "'impl_41'" := (impl_41). Notation "'impl_42'" := (impl_42). Notation "'impl_43'" := (impl_43). Notation "'impl_44'" := (impl_44). Notation "'impl_45'" := (impl_45). Notation "'impl_46'" := (impl_46). Notation "'impl_47'" := (impl_47). Notation "'impl_54'" := (impl_54). Notation "'impl_55'" := (impl_55). Notation "'impl_56'" := (impl_56). Notation "'impl_57'" := (impl_57). Notation "'impl_58'" := (impl_58). Notation "'impl_59'" := (impl_59). Notation "'impl_18'" := (impl_18). Notation "'impl_19'" := (impl_19). Notation "'impl_20'" := (impl_20). Notation "'impl_21'" := (impl_21). Notation "'impl_22'" := (impl_22). Notation "'impl_23'" := (impl_23). Notation "'impl_48'" := (impl_48). Notation "'impl_49'" := (impl_49). Notation "'impl_50'" := (impl_50). Notation "'impl_51'" := (impl_51). Notation "'impl_52'" := (impl_52). Notation "'impl_53'" := (impl_53). Notation "'impl_60'" := (impl_60). Notation "'impl_61'" := (impl_61). Notation "'impl_62'" := (impl_62). Notation "'impl_63'" := (impl_63). Notation "'impl_64'" := (impl_64). Notation "'impl_65'" := (impl_65). ================================================ FILE: hax-lib/proof-libs/coq/coq/generated-core/src/Core_Ops_Bit.v ================================================ (* File automatically generated by Hacspec *) From Coq Require Import ZArith. Require Import List. Import List.ListNotations. Open Scope Z_scope. Open Scope bool_scope. Require Import Ascii. Require Import String. Require Import Coq.Floats.Floats. From RecordUpdate Require Import RecordSet. Import RecordSetNotations. (* From Core Require Import Core. *) From Core Require Import Core_Marker (t_Sized). Export Core_Marker (t_Sized). (* NotImplementedYet *) Class t_BitAnd (v_Self : Type) (v_Rhs : Type) `{t_Sized (v_Rhs)} : Type := { BitAnd_f_Output : Type; _ :: `{t_Sized (BitAnd_f_Output)}; BitAnd_f_bitand : v_Self -> v_Rhs -> BitAnd_f_Output; }. Arguments t_BitAnd (_) (_) {_}. Class t_BitOr (v_Self : Type) (v_Rhs : Type) `{t_Sized (v_Rhs)} : Type := { BitOr_f_Output : Type; _ :: `{t_Sized (BitOr_f_Output)}; BitOr_f_bitor : v_Self -> v_Rhs -> BitOr_f_Output; }. Arguments t_BitOr (_) (_) {_}. Class t_BitXor (v_Self : Type) (v_Rhs : Type) `{t_Sized (v_Rhs)} : Type := { BitXor_f_Output : Type; _ :: `{t_Sized (BitXor_f_Output)}; BitXor_f_bitxor : v_Self -> v_Rhs -> BitXor_f_Output; }. Arguments t_BitXor (_) (_) {_}. Class t_Not (v_Self : Type) : Type := { Not_f_Output : Type; _ :: `{t_Sized (Not_f_Output)}; Not_f_not : v_Self -> Not_f_Output; }. Arguments t_Not (_). Class t_Shl (v_Self : Type) (v_Rhs : Type) `{t_Sized (v_Rhs)} : Type := { Shl_f_Output : Type; _ :: `{t_Sized (Shl_f_Output)}; Shl_f_shl : v_Self -> v_Rhs -> Shl_f_Output; }. Arguments t_Shl (_) (_) {_}. Class t_Shr (v_Self : Type) (v_Rhs : Type) `{t_Sized (v_Rhs)} : Type := { Shr_f_Output : Type; _ :: `{t_Sized (Shr_f_Output)}; Shr_f_shr : v_Self -> v_Rhs -> Shr_f_Output; }. Arguments t_Shr (_) (_) {_}. ================================================ FILE: hax-lib/proof-libs/coq/coq/generated-core/src/Core_Ops_Bit_Impls_for_prims.v ================================================ (* File automatically generated by Hacspec *) From Coq Require Import ZArith. Require Import List. Import List.ListNotations. Open Scope Z_scope. Open Scope bool_scope. Require Import Ascii. Require Import String. Require Import Coq.Floats.Floats. From RecordUpdate Require Import RecordSet. Import RecordSetNotations. (* From Core Require Import Core. *) (* TODO: Replace this dummy lib with core lib *) Class t_Sized (T : Type) := { }. Definition t_u8 := Z. Definition t_u16 := Z. Definition t_u32 := Z. Definition t_u64 := Z. Definition t_u128 := Z. Definition t_usize := Z. Definition t_i8 := Z. Definition t_i16 := Z. Definition t_i32 := Z. Definition t_i64 := Z. Definition t_i128 := Z. Definition t_isize := Z. Definition t_Array T (x : t_usize) := list T. Definition t_String := string. Definition ToString_f_to_string (x : string) := x. Instance Sized_any : forall {t_A}, t_Sized t_A := {}. Class t_Clone (T : Type) := { Clone_f_clone : T -> T }. Instance Clone_any : forall {t_A}, t_Clone t_A := {Clone_f_clone := fun x => x}. Definition t_Slice (T : Type) := list T. Definition unsize {T : Type} : list T -> t_Slice T := id. Definition t_PartialEq_f_eq x y := x =? y. Definition t_Rem_f_rem (x y : Z) := x mod y. Definition assert (b : bool) (* `{H_assert : b = true} *) : unit := tt. Inductive globality := | t_Global. Definition t_Vec T (_ : globality) : Type := list T. Definition impl_1__append {T} l1 l2 : list T * list T := (app l1 l2, l2). Definition impl_1__len {A} (l : list A) := Z.of_nat (List.length l). Definition impl__new {A} (_ : Datatypes.unit) : list A := nil. Definition impl__with_capacity {A} (_ : Z) : list A := nil. Definition impl_1__push {A} l (x : A) := cons x l. Class t_From (A B : Type) := { From_f_from : B -> A }. Definition impl__to_vec {T} (x : t_Slice T) : t_Vec T t_Global := x. Class t_Into (A B : Type) := { Into_f_into : A -> B }. Instance t_Into_from_t_From {A B : Type} `{H : t_From B A} : t_Into A B := { Into_f_into x := @From_f_from B A H x }. Definition from_elem {A} (x : A) (l : Z) := repeat x (Z.to_nat l). Definition t_Option := option. Definition impl__map {A B} (x : t_Option A) (f : A -> B) : t_Option B := match x with | Some x => Some (f x) | None => None end. Definition t_Add_f_add x y := x + y. Class Cast A B := { cast : A -> B }. Instance cast_t_u8_t_u32 : Cast t_u8 t_u32 := {| cast x := x |}. (* / dummy lib *) From Core Require Import Core_Primitive (t_u8). Export Core_Primitive (t_u8). From Core Require Import Core_Primitive (t_u16). Export Core_Primitive (t_u16). From Core Require Import Core_Primitive (t_u32). Export Core_Primitive (t_u32). From Core Require Import Core_Primitive (t_u64). Export Core_Primitive (t_u64). From Core Require Import Core_Primitive (t_u128). Export Core_Primitive (t_u128). From Core Require Import Core_Primitive (t_usize). Export Core_Primitive (t_usize). From Core Require Import Core_Primitive (t_i8). Export Core_Primitive (t_i8). From Core Require Import Core_Primitive (t_i16). Export Core_Primitive (t_i16). From Core Require Import Core_Primitive (t_i32). Export Core_Primitive (t_i32). From Core Require Import Core_Primitive (t_i64). Export Core_Primitive (t_i64). From Core Require Import Core_Primitive (t_i128). Export Core_Primitive (t_i128). From Core Require Import Core_Primitive (t_isize). Export Core_Primitive (t_isize). (* NotImplementedYet *) (* NotImplementedYet *) (* NotImplementedYet *) (* NotImplementedYet *) (* NotImplementedYet *) (* NotImplementedYet *) (* NotImplementedYet *) (* NotImplementedYet *) Notation "'impl_84'" := (impl_84). Notation "'impl_85'" := (impl_85). Notation "'impl_86'" := (impl_86). Notation "'impl_87'" := (impl_87). Notation "'impl_88'" := (impl_88). Notation "'impl_89'" := (impl_89). Notation "'impl_6'" := (impl_6). Notation "'impl_7'" := (impl_7). Notation "'impl_8'" := (impl_8). Notation "'impl_9'" := (impl_9). Notation "'impl_10'" := (impl_10). Notation "'impl_11'" := (impl_11). Notation "'impl_12'" := (impl_12). Notation "'impl_13'" := (impl_13). Notation "'impl_14'" := (impl_14). Notation "'impl_15'" := (impl_15). Notation "'impl_16'" := (impl_16). Notation "'impl_17'" := (impl_17). Notation "'impl_18'" := (impl_18). Notation "'impl_19'" := (impl_19). Notation "'impl_20'" := (impl_20). Notation "'impl_21'" := (impl_21). Notation "'impl_22'" := (impl_22). Notation "'impl_23'" := (impl_23). Notation "'impl_24'" := (impl_24). Notation "'impl_25'" := (impl_25). Notation "'impl_26'" := (impl_26). Notation "'impl_27'" := (impl_27). Notation "'impl_28'" := (impl_28). Notation "'impl_29'" := (impl_29). Notation "'impl_30'" := (impl_30). Notation "'impl_31'" := (impl_31). Notation "'impl_32'" := (impl_32). Notation "'impl_33'" := (impl_33). Notation "'impl_34'" := (impl_34). Notation "'impl_35'" := (impl_35). Notation "'impl_36'" := (impl_36). Notation "'impl_37'" := (impl_37). Notation "'impl_38'" := (impl_38). Notation "'impl_39'" := (impl_39). Notation "'impl_40'" := (impl_40). Notation "'impl_41'" := (impl_41). Notation "'impl_42'" := (impl_42). Notation "'impl_43'" := (impl_43). Notation "'impl_44'" := (impl_44). Notation "'impl_45'" := (impl_45). Notation "'impl_46'" := (impl_46). Notation "'impl_47'" := (impl_47). Notation "'impl_48'" := (impl_48). Notation "'impl_49'" := (impl_49). Notation "'impl_50'" := (impl_50). Notation "'impl_51'" := (impl_51). Notation "'impl_52'" := (impl_52). Notation "'impl_53'" := (impl_53). Notation "'impl_54'" := (impl_54). Notation "'impl_55'" := (impl_55). Notation "'impl_56'" := (impl_56). Notation "'impl_57'" := (impl_57). Notation "'impl_58'" := (impl_58). Notation "'impl_59'" := (impl_59). Notation "'impl_60'" := (impl_60). Notation "'impl_61'" := (impl_61). Notation "'impl_62'" := (impl_62). Notation "'impl_63'" := (impl_63). Notation "'impl_64'" := (impl_64). Notation "'impl_65'" := (impl_65). Notation "'impl_66'" := (impl_66). Notation "'impl_67'" := (impl_67). Notation "'impl_68'" := (impl_68). Notation "'impl_69'" := (impl_69). Notation "'impl_70'" := (impl_70). Notation "'impl_71'" := (impl_71). Notation "'impl_72'" := (impl_72). Notation "'impl_73'" := (impl_73). Notation "'impl_74'" := (impl_74). Notation "'impl_75'" := (impl_75). Notation "'impl_76'" := (impl_76). Notation "'impl_77'" := (impl_77). Notation "'impl_78'" := (impl_78). Notation "'impl_79'" := (impl_79). Notation "'impl_80'" := (impl_80). Notation "'impl_81'" := (impl_81). Notation "'impl_82'" := (impl_82). Notation "'impl_83'" := (impl_83). Notation "'impl_90'" := (impl_90). Notation "'impl_91'" := (impl_91). Notation "'impl_92'" := (impl_92). Notation "'impl_93'" := (impl_93). Notation "'impl_94'" := (impl_94). Notation "'impl_95'" := (impl_95). Notation "'impl_96'" := (impl_96). Notation "'impl_97'" := (impl_97). Notation "'impl_98'" := (impl_98). Notation "'impl_99'" := (impl_99). Notation "'impl_100'" := (impl_100). Notation "'impl_101'" := (impl_101). Notation "'impl'" := (impl). Notation "'impl_1'" := (impl_1). Notation "'impl_2'" := (impl_2). Notation "'impl_3'" := (impl_3). Notation "'impl_4'" := (impl_4). Notation "'impl_5'" := (impl_5). ================================================ FILE: hax-lib/proof-libs/coq/coq/generated-core/src/Core_Ops_Function.v ================================================ (* File automatically generated by Hacspec *) From Coq Require Import ZArith. Require Import List. Import List.ListNotations. Open Scope Z_scope. Open Scope bool_scope. Require Import Ascii. Require Import String. Require Import Coq.Floats.Floats. From RecordUpdate Require Import RecordSet. Import RecordSetNotations. (* From Core Require Import Core. *) From Core Require Import Core_Marker. Export Core_Marker. (* NotImplementedYet *) Class t_FnOnce (v_Self : Type) (v_Args : Type) (* `{t_Sized (v_Args)} `{t_Tuple (v_Args)} *) : Type := { FnOnce_f_Output : Type; _ :: `{t_Sized (FnOnce_f_Output)}; FnOnce_f_call_once : v_Self -> v_Args -> FnOnce_f_Output; }. Arguments t_FnOnce (_) (_) (* {_} {_} *). Class t_FnMut (v_Self : Type) (v_Args : Type) `{t_FnOnce (v_Self) (v_Args)} (* `{t_Sized (v_Args)} `{t_Tuple (v_Args)} *) : Type := { FnMut_f_call_mut : v_Self -> v_Args -> (v_Self*FnOnce_f_Output); }. Arguments t_FnMut (_) (_) {_} (* {_} {_} *). Class t_Fn (v_Self : Type) (v_Args : Type) `{t_FnMut (v_Self) (v_Args)} (* `{t_Sized (v_Args)} `{t_Tuple (v_Args)} *) : Type := { Fn_f_call : v_Self -> v_Args -> FnOnce_f_Output; }. Arguments t_Fn (_) (_) {_} (* {_} {_} *). #[global] Instance t_FnOnceAny {A B} : t_FnOnce (A -> B) A. Proof. econstructor. easy. refine (fun f x => f x). Defined. ================================================ FILE: hax-lib/proof-libs/coq/coq/generated-core/src/Core_Ops_Index.v ================================================ (* File automatically generated by Hacspec *) From Coq Require Import ZArith. Require Import List. Import List.ListNotations. Open Scope Z_scope. Open Scope bool_scope. Require Import Ascii. Require Import String. Require Import Coq.Floats.Floats. From RecordUpdate Require Import RecordSet. Import RecordSetNotations. (* From Core Require Import Core. *) Class t_Index (v_Self : Type) (v_Idx : Type) : Type := { Index_f_Output : Type; Index_f_index : v_Self -> v_Idx -> Index_f_Output; }. Arguments t_Index (_) (_). ================================================ FILE: hax-lib/proof-libs/coq/coq/generated-core/src/Core_Ops_Index_range.v ================================================ (* File automatically generated by Hacspec *) From Coq Require Import ZArith. Require Import List. Import List.ListNotations. Open Scope Z_scope. Open Scope bool_scope. Require Import Ascii. Require Import String. Require Import Coq.Floats.Floats. From RecordUpdate Require Import RecordSet. Import RecordSetNotations. (* From Core Require Import Core. *) From Core Require Import Core_Primitive. Export Core_Primitive. From Core Require Import Core_Iter_Traits_Iterator. Export Core_Iter_Traits_Iterator. Record t_IndexRange : Type := { IndexRange_f_start : t_usize; IndexRange_f_end : t_usize; }. Arguments Build_t_IndexRange. Arguments IndexRange_f_start. Arguments IndexRange_f_end. #[export] Instance settable_t_IndexRange : Settable _ := settable! (Build_t_IndexRange) . Definition impl__IndexRange__zero_to (v_end : t_usize) : t_IndexRange := Build_t_IndexRange (Build_t_usize (Build_t_U64 0%N)) (v_end). Definition impl__IndexRange__next_unchecked (self : t_IndexRange) : (t_IndexRange*t_usize) := let value := IndexRange_f_start self in let self := self <|IndexRange_f_start := Add_f_add (value) (Build_t_usize (Build_t_U64 1%N) : t_usize) |> in let hax_temp_output := value in (self,hax_temp_output). Definition impl__IndexRange__len (self : t_IndexRange) : t_usize := Sub_f_sub (IndexRange_f_end self) (IndexRange_f_start self). Program Instance t_Iterator_538767852 : t_Iterator ((t_IndexRange)) := { Iterator_f_Item := t_usize; Iterator_f_next := fun (self : t_IndexRange)=> (* let hax_temp_output := never_to_any (panic ("not yet implemented: specification needed"%string)) in *) (self,Option_Some (self.(IndexRange_f_start))); Iterator_f_size_hint := fun (self : t_IndexRange)=> let len := impl__IndexRange__len (self) in (len,Option_Some (len)); Iterator_f_fold := fun {v_B : Type} {v_F : Type} `{t_Sized v_B} `{t_Sized v_F} `{t_Sized t_IndexRange} (_ : t_FnOnce v_F (v_B * t_usize)) (_ : t_FnMut v_F (v_B * t_usize)) `{_ : FnOnce_f_Output = v_B} (self : t_IndexRange) (init : v_B) (f : v_F)=> never_to_any (panic "not yet implemented: specification needed"%string); }. Next Obligation. Admitted. (* Instance t_ExactSizeIterator_661616782 : t_ExactSizeIterator ((t_IndexRange)) := *) (* { *) (* ExactSizeIterator_impl_2_f_len := fun (self : t_IndexRange)=> *) (* impl__IndexRange__len (self); *) (* }. *) ================================================ FILE: hax-lib/proof-libs/coq/coq/generated-core/src/Core_Ops_Range.v ================================================ (* File automatically generated by Hacspec *) From Coq Require Import ZArith. Require Import List. Import List.ListNotations. Open Scope Z_scope. Open Scope bool_scope. Require Import Ascii. Require Import String. Require Import Coq.Floats.Floats. From RecordUpdate Require Import RecordSet. Import RecordSetNotations. (* From Core Require Import Core. *) From Core Require Import Core_Marker. Export Core_Marker. Record t_Range (v_Idx : Type) `{t_Sized (v_Idx)} : Type := { Range_f_start : v_Idx; Range_f_end : v_Idx; }. Arguments Build_t_Range (_) {_}. Arguments Range_f_start {_} {_}. Arguments Range_f_end {_} {_}. #[export] Instance settable_t_Range `{v_Idx : Type} `{t_Sized (v_Idx)} : Settable _ := settable! (Build_t_Range v_Idx) . ================================================ FILE: hax-lib/proof-libs/coq/coq/generated-core/src/Core_Option.v ================================================ (* File automatically generated by Hacspec *) From Coq Require Import ZArith. Require Import List. Import List.ListNotations. Open Scope Z_scope. Open Scope bool_scope. Require Import Ascii. Require Import String. Require Import Coq.Floats.Floats. From RecordUpdate Require Import RecordSet. Import RecordSetNotations. (* From Core Require Import Core. *) From Core Require Import Core_Clone. Export Core_Clone. From Core Require Import Core_Marker (t_Sized). Export Core_Marker (t_Sized). From Core Require Import Core_Panicking (panic). Export Core_Panicking (panic). From Core Require Import Core_Ops_Function. Export Core_Ops_Function. Inductive t_Option (v_T : Type) `{t_Sized (v_T)} : Type := | Option_None | Option_Some : v_T -> _. Arguments Option_None {_} {_}. Arguments Option_Some {_} {_}. Instance t_Clone_390068633 `{v_T : Type} `{t_Sized (v_T)} `{t_Clone (v_T)} : t_Clone ((t_Option ((v_T)))) := { Clone_f_clone := fun (self : t_Option ((v_T)))=> match self with | Option_Some (x) => Option_Some (Clone_f_clone (x)) | Option_None => Option_None end; }. Definition impl_1__is_some `{v_T : Type} `{t_Sized (v_T)} (self : t_Option ((v_T))) : bool := match self with | Option_Some (_) => true | _ => false end. Program Definition impl__map `{v_T : Type} `{v_U : Type} `{v_F : Type} `{t_Sized (v_T)} `{t_Sized (v_U)} `{t_Sized (v_F)} `{t_FnOnce (v_F) ((v_T))} `{_.(FnOnce_f_Output) = v_U} (self : t_Option ((v_T))) (f : v_F) : t_Option ((v_U)) := match self with | Option_Some (x) => Option_Some _ (* (FnOnce_f_call_once (f) ((x))) *) | Option_None => Option_None end. Next Obligation. refine (FnOnce_f_call_once (f) ((x))). Defined. Fail Next Obligation. (* Definition unwrap_failed '(_ : unit) : t_Never := *) (* panic ("called `Option::unwrap()` on a `None` value"%string). *) (* Definition impl_1__unwrap `{v_T : Type} `{t_Sized (v_T)} (self : t_Option ((v_T))) `{impl_1__is_some (self___) = true} : v_T := *) (* match self with *) (* | Option_Some (val) => *) (* val *) (* | Option_None => *) (* never_to_any (unwrap_failed (tt)) *) (* end. *) (* Definition expect_failed (msg : string) : t_Never := *) (* panic (msg). *) (* Definition impl_1__expect `{v_T : Type} `{t_Sized (v_T)} (self : t_Option ((v_T))) (msg : string) : v_T := *) (* match self with *) (* | Option_Some (val) => *) (* val *) (* | Option_None => *) (* never_to_any (expect_failed (msg)) *) (* end. *) ================================================ FILE: hax-lib/proof-libs/coq/coq/generated-core/src/Core_Panicking.v ================================================ (* File automatically generated by Hacspec *) From Coq Require Import ZArith. Require Import List. Import List.ListNotations. Open Scope Z_scope. Open Scope bool_scope. Require Import Ascii. Require Import String. Require Import Coq.Floats.Floats. From RecordUpdate Require Import RecordSet. Import RecordSetNotations. (* From Core Require Import Core. *) Inductive t_Never : Type := . Definition t_Never_cast_to_repr (x : t_Never) : t_Never := match x with end. Definition never_to_any `{v_T : Type} (x : t_Never) : v_T := (match x with end). Definition panic (expr : string) {HFalse : t_Never} : t_Never := never_to_any HFalse. Definition panic_explicit '(_ : unit) `{HFalse : t_Never} : t_Never := never_to_any HFalse. ================================================ FILE: hax-lib/proof-libs/coq/coq/generated-core/src/Core_Primitive.v ================================================ (* File automatically generated by Hacspec *) From Coq Require Import ZArith. Require Import List. Import List.ListNotations. Open Scope Z_scope. Open Scope bool_scope. Require Import Ascii. Require Import String. Require Import Coq.Floats.Floats. From RecordUpdate Require Import RecordSet. Import RecordSetNotations. (* From Core Require Import Core. *) From Core Require Import Core_Ops. Export Core_Ops. From Core Require Import Core_Cmp. Export Core_Cmp. From Core Require Import Core_Base. Export Core_Base. (* From Core Require Import Core_Base_Number_conversion. *) (* Export Core_Base_Number_conversion. *) From Core Require Import Core_Base_interface_Int. Export Core_Base_interface_Int. From Core Require Import Core_Array_Rec_bundle_579704328. Export Core_Array_Rec_bundle_579704328. Notation "'t_Slice'" := (t_Slice). Notation "'Slice_f_v'" := (Slice_f_v). (* Notation "'impl_2'" := (impl_2). *) Notation "'t_Array'" := (t_Array). Notation "'Array_f_v'" := (Array_f_v). Notation "'impl_3__cast'" := (cast). Notation "'t_i128'" := (t_i128). Notation "'i128_0'" := (i128_0). (* Notation "'impl_25'" := (impl_25). *) Notation "'t_i16'" := (t_i16). Notation "'i16_0'" := (i16_0). (* Notation "'impl_19'" := (impl_19). *) Notation "'t_i32'" := (t_i32). Notation "'i32_0'" := (i32_0). (* Notation "'impl_21'" := (impl_21). *) Notation "'t_i64'" := (t_i64). Notation "'i64_0'" := (i64_0). (* Notation "'impl_23'" := (impl_23). *) Notation "'t_i8'" := (t_i8). Notation "'i8_0'" := (i8_0). (* Notation "'impl_17'" := (impl_17). *) Notation "'t_isize'" := (t_isize). Notation "'isize_0'" := (isize_0). (* Notation "'impl_27'" := (impl_27). *) (* NotImplementedYet *) (* NotImplementedYet *) Notation "'t_u128'" := (t_u128). Notation "'u128_0'" := (u128_0). Notation "'t_u16'" := (t_u16). Notation "'u16_0'" := (u16_0). Notation "'t_u32'" := (t_u32). Notation "'u32_0'" := (u32_0). Notation "'t_u64'" := (t_u64). Notation "'u64_0'" := (u64_0). Notation "'t_u8'" := (t_u8). Notation "'u8_0'" := (u8_0). Notation "'t_usize'" := (t_usize). Notation "'usize_0'" := (usize_0). (* NotImplementedYet *) (* NotImplementedYet *) (* NotImplementedYet *) (* NotImplementedYet *) (* Notation "'impl_1'" := (impl_1). *) (* Notation "'impl_5'" := (impl_5). *) (* Notation "'impl_7'" := (impl_7). *) (* Notation "'impl_9'" := (impl_9). *) (* Notation "'impl_11'" := (impl_11). *) (* Notation "'impl_13'" := (impl_13). *) (* Notation "'impl_15'" := (impl_15). *) (* Notation "'impl'" := (impl). *) (* Notation "'impl_29'" := (impl_29). *) (* Notation "'impl_30'" := (impl_30). *) (* Notation "'impl_31'" := (impl_31). *) (* Notation "'impl_32'" := (impl_32). *) (* Notation "'impl_33'" := (impl_33). *) (* Notation "'impl_34'" := (impl_34). *) (* Notation "'impl_35'" := (impl_35). *) (* Notation "'impl_36'" := (impl_36). *) (* Notation "'impl_37'" := (impl_37). *) (* Notation "'impl_38'" := (impl_38). *) (* Notation "'impl_39'" := (impl_39). *) (* Notation "'impl_40'" := (impl_40). *) (* Notation "'impl_41'" := (impl_41). *) (* Notation "'impl_42'" := (impl_42). *) (* Notation "'impl_43'" := (impl_43). *) (* Notation "'impl_44'" := (impl_44). *) (* Notation "'impl_45'" := (impl_45). *) (* Notation "'impl_46'" := (impl_46). *) (* Notation "'impl_47'" := (impl_47). *) (* Notation "'impl_48'" := (impl_48). *) (* Notation "'impl_49'" := (impl_49). *) (* Notation "'impl_50'" := (impl_50). *) (* Notation "'impl_51'" := (impl_51). *) (* Notation "'impl_52'" := (impl_52). *) ================================================ FILE: hax-lib/proof-libs/coq/coq/generated-core/src/Core_Primitive_Number_conversion.v ================================================ (* File automatically generated by Hacspec *) From Coq Require Import ZArith. Require Import List. Import List.ListNotations. Open Scope Z_scope. Open Scope bool_scope. Require Import Ascii. Require Import String. Require Import Coq.Floats.Floats. From RecordUpdate Require Import RecordSet. Import RecordSetNotations. (* From Core Require Import Core. *) From Core Require Import Core_Primitive. Export Core_Primitive. From Core Require Import Core_Cmp. Export Core_Cmp. From Core Require Import Core_Convert. Export Core_Convert. (* NotImplementedYet *) (* NotImplementedYet *) (* Notation "'impl_31'" := (impl_31). *) (* Notation "'impl_40'" := (impl_40). *) (* Notation "'impl'" := (impl). *) (* Notation "'impl_1'" := (impl_1). *) (* Notation "'impl_2'" := (impl_2). *) (* Notation "'impl_3'" := (impl_3). *) (* Notation "'impl_4'" := (impl_4). *) (* Notation "'impl_5'" := (impl_5). *) (* Notation "'impl_6'" := (impl_6). *) (* Notation "'impl_7'" := (impl_7). *) (* Notation "'impl_8'" := (impl_8). *) (* Notation "'impl_9'" := (impl_9). *) (* Notation "'impl_10'" := (impl_10). *) (* Notation "'impl_11'" := (impl_11). *) (* Notation "'impl_12'" := (impl_12). *) (* Notation "'impl_13'" := (impl_13). *) (* Notation "'impl_14'" := (impl_14). *) (* Notation "'impl_15'" := (impl_15). *) (* Notation "'impl_16'" := (impl_16). *) (* Notation "'impl_17'" := (impl_17). *) (* Notation "'impl_18'" := (impl_18). *) (* Notation "'impl_19'" := (impl_19). *) (* Notation "'impl_20'" := (impl_20). *) (* Notation "'impl_21'" := (impl_21). *) (* Notation "'impl_22'" := (impl_22). *) (* Notation "'impl_23'" := (impl_23). *) (* Notation "'impl_24'" := (impl_24). *) (* Notation "'impl_25'" := (impl_25). *) (* Notation "'impl_26'" := (impl_26). *) (* Notation "'impl_27'" := (impl_27). *) (* Notation "'impl_28'" := (impl_28). *) (* Notation "'impl_29'" := (impl_29). *) (* Notation "'impl_30'" := (impl_30). *) (* Notation "'impl_32'" := (impl_32). *) (* Notation "'impl_33'" := (impl_33). *) (* Notation "'impl_34'" := (impl_34). *) (* Notation "'impl_35'" := (impl_35). *) (* Notation "'impl_36'" := (impl_36). *) (* Notation "'impl_37'" := (impl_37). *) (* Notation "'impl_38'" := (impl_38). *) (* Notation "'impl_39'" := (impl_39). *) (* Notation "'impl_41'" := (impl_41). *) ================================================ FILE: hax-lib/proof-libs/coq/coq/generated-core/src/Core_Primitive_Number_conversion_i.v ================================================ (* File automatically generated by Hacspec *) From Coq Require Import ZArith. Require Import List. Import List.ListNotations. Open Scope Z_scope. Open Scope bool_scope. Require Import Ascii. Require Import String. Require Import Coq.Floats.Floats. From RecordUpdate Require Import RecordSet. Import RecordSetNotations. (* From Core Require Import Core. *) (* Notation "'impl_31'" := (impl_31). *) (* Notation "'impl_40'" := (impl_40). *) From Core Require Import Core_Primitive. Export Core_Primitive. From Core Require Import Core_Cmp. Export Core_Cmp. From Core Require Import Core_Convert. Export Core_Convert. (* NotImplementedYet *) (* NotImplementedYet *) (* Notation "'impl'" := (impl). *) (* Notation "'impl_1'" := (impl_1). *) (* Notation "'impl_2'" := (impl_2). *) (* Notation "'impl_3'" := (impl_3). *) (* Notation "'impl_4'" := (impl_4). *) (* Notation "'impl_5'" := (impl_5). *) (* Notation "'impl_6'" := (impl_6). *) (* Notation "'impl_7'" := (impl_7). *) (* Notation "'impl_8'" := (impl_8). *) (* Notation "'impl_9'" := (impl_9). *) (* Notation "'impl_10'" := (impl_10). *) (* Notation "'impl_11'" := (impl_11). *) (* Notation "'impl_12'" := (impl_12). *) (* Notation "'impl_13'" := (impl_13). *) (* Notation "'impl_14'" := (impl_14). *) (* Notation "'impl_15'" := (impl_15). *) (* Notation "'impl_16'" := (impl_16). *) (* Notation "'impl_17'" := (impl_17). *) (* Notation "'impl_18'" := (impl_18). *) (* Notation "'impl_19'" := (impl_19). *) (* Notation "'impl_20'" := (impl_20). *) (* Notation "'impl_21'" := (impl_21). *) (* Notation "'impl_22'" := (impl_22). *) (* Notation "'impl_23'" := (impl_23). *) (* Notation "'impl_24'" := (impl_24). *) (* Notation "'impl_25'" := (impl_25). *) (* Notation "'impl_26'" := (impl_26). *) (* Notation "'impl_27'" := (impl_27). *) (* Notation "'impl_28'" := (impl_28). *) (* Notation "'impl_29'" := (impl_29). *) (* Notation "'impl_30'" := (impl_30). *) (* Notation "'impl_32'" := (impl_32). *) (* Notation "'impl_33'" := (impl_33). *) (* Notation "'impl_34'" := (impl_34). *) (* Notation "'impl_35'" := (impl_35). *) (* Notation "'impl_36'" := (impl_36). *) (* Notation "'impl_37'" := (impl_37). *) (* Notation "'impl_38'" := (impl_38). *) (* Notation "'impl_39'" := (impl_39). *) (* Notation "'impl_41'" := (impl_41). *) ================================================ FILE: hax-lib/proof-libs/coq/coq/generated-core/src/Core_Result.v ================================================ (* File automatically generated by Hacspec *) From Coq Require Import ZArith. Require Import List. Import List.ListNotations. Open Scope Z_scope. Open Scope bool_scope. Require Import Ascii. Require Import String. Require Import Coq.Floats.Floats. From RecordUpdate Require Import RecordSet. Import RecordSetNotations. (* From Core Require Import Core. *) From Core Require Import Core_Option. Export Core_Option. Inductive t_Result (v_T : Type) (v_E : Type) `{t_Sized (v_T)} `{t_Sized (v_E)} : Type := | Result_Ok : v_T -> _ | Result_Err : v_E -> _. Arguments Result_Ok {_} {_} {_} {_}. Arguments Result_Err {_} {_} {_} {_}. Definition impl__ok `{v_T : Type} `{v_E : Type} `{t_Sized (v_T)} `{t_Sized (v_E)} (self : t_Result ((v_T)) ((v_E))) : t_Option ((v_T)) := match self with | Result_Ok (x) => Option_Some (x) | Result_Err (_) => Option_None end. ================================================ FILE: hax-lib/proof-libs/coq/coq/generated-core/src/Core_Slice.v ================================================ (* File automatically generated by Hacspec *) From Coq Require Import ZArith. Require Import List. Import List.ListNotations. Open Scope Z_scope. Open Scope bool_scope. Require Import Ascii. Require Import String. Require Import Coq.Floats.Floats. From RecordUpdate Require Import RecordSet. Import RecordSetNotations. (* From Core Require Import Core. *) From Core Require Import Core_Primitive. Export Core_Primitive. From Core Require Import Core_Slice_Iter. Export Core_Slice_Iter. From Core Require Import Core_Convert. Export Core_Convert. (* NotImplementedYet *) (* NotImplementedYet *) Definition impl__iter `{v_T : Type} `{t_Sized (v_T)} `{t_Clone (v_T)} (self : t_Slice ((v_T))) : t_Iter ((v_T)) := impl__new (self). Definition impl__len `{v_T : Type} `{t_Sized (v_T)} `{t_Clone (v_T)} (self : t_Slice ((v_T))) : t_usize := From_f_from (len (Clone_f_clone (Slice_f_v self))). Definition impl__is_empty `{v_T : Type} `{t_Sized (v_T)} `{t_Clone (v_T)} (self : t_Slice ((v_T))) : bool := PartialEq_f_eq (impl__len (self)) (Build_t_usize (Build_t_U64 0%N)). ================================================ FILE: hax-lib/proof-libs/coq/coq/generated-core/src/Core_Slice_Index.v ================================================ (* File automatically generated by Hacspec *) From Coq Require Import ZArith. Require Import List. Import List.ListNotations. Open Scope Z_scope. Open Scope bool_scope. Require Import Ascii. Require Import String. Require Import Coq.Floats.Floats. From RecordUpdate Require Import RecordSet. Import RecordSetNotations. (* From Core Require Import Core. *) (* TODO: Replace this dummy lib with core lib *) Class t_Sized (T : Type) := { }. Definition t_u8 := Z. Definition t_u16 := Z. Definition t_u32 := Z. Definition t_u64 := Z. Definition t_u128 := Z. Definition t_usize := Z. Definition t_i8 := Z. Definition t_i16 := Z. Definition t_i32 := Z. Definition t_i64 := Z. Definition t_i128 := Z. Definition t_isize := Z. Definition t_Array T (x : t_usize) := list T. Definition t_String := string. Definition ToString_f_to_string (x : string) := x. Instance Sized_any : forall {t_A}, t_Sized t_A := {}. Class t_Clone (T : Type) := { Clone_f_clone : T -> T }. Instance Clone_any : forall {t_A}, t_Clone t_A := {Clone_f_clone := fun x => x}. Definition t_Slice (T : Type) := list T. Definition unsize {T : Type} : list T -> t_Slice T := id. Definition t_PartialEq_f_eq x y := x =? y. Definition t_Rem_f_rem (x y : Z) := x mod y. Definition assert (b : bool) (* `{H_assert : b = true} *) : unit := tt. Inductive globality := | t_Global. Definition t_Vec T (_ : globality) : Type := list T. Definition impl_1__append {T} l1 l2 : list T * list T := (app l1 l2, l2). Definition impl_1__len {A} (l : list A) := Z.of_nat (List.length l). Definition impl__new {A} (_ : Datatypes.unit) : list A := nil. Definition impl__with_capacity {A} (_ : Z) : list A := nil. Definition impl_1__push {A} l (x : A) := cons x l. Class t_From (A B : Type) := { From_f_from : B -> A }. Definition impl__to_vec {T} (x : t_Slice T) : t_Vec T t_Global := x. Class t_Into (A B : Type) := { Into_f_into : A -> B }. Instance t_Into_from_t_From {A B : Type} `{H : t_From B A} : t_Into A B := { Into_f_into x := @From_f_from B A H x }. Definition from_elem {A} (x : A) (l : Z) := repeat x (Z.to_nat l). Definition t_Option := option. Definition impl__map {A B} (x : t_Option A) (f : A -> B) : t_Option B := match x with | Some x => Some (f x) | None => None end. Definition t_Add_f_add x y := x + y. Class Cast A B := { cast : A -> B }. Instance cast_t_u8_t_u32 : Cast t_u8 t_u32 := {| cast x := x |}. (* / dummy lib *) From Core Require Import Core (t_ops). Export Core (t_ops). From Core Require Import Core_Primitive (t_Slice). Export Core_Primitive (t_Slice). (* NotImplementedYet *) Notation "'v_SliceIndex'" := (v_SliceIndex). Notation "'impl'" := (impl). Notation "'impl_2'" := (impl_2). Notation "'impl_1'" := (impl_1). ================================================ FILE: hax-lib/proof-libs/coq/coq/generated-core/src/Core_Slice_Index_Private_slice_index.v ================================================ (* File automatically generated by Hacspec *) From Coq Require Import ZArith. Require Import List. Import List.ListNotations. Open Scope Z_scope. Open Scope bool_scope. Require Import Ascii. Require Import String. Require Import Coq.Floats.Floats. From RecordUpdate Require Import RecordSet. Import RecordSetNotations. (* From Core Require Import Core. *) (* TODO: Replace this dummy lib with core lib *) Class t_Sized (T : Type) := { }. Definition t_u8 := Z. Definition t_u16 := Z. Definition t_u32 := Z. Definition t_u64 := Z. Definition t_u128 := Z. Definition t_usize := Z. Definition t_i8 := Z. Definition t_i16 := Z. Definition t_i32 := Z. Definition t_i64 := Z. Definition t_i128 := Z. Definition t_isize := Z. Definition t_Array T (x : t_usize) := list T. Definition t_String := string. Definition ToString_f_to_string (x : string) := x. Instance Sized_any : forall {t_A}, t_Sized t_A := {}. Class t_Clone (T : Type) := { Clone_f_clone : T -> T }. Instance Clone_any : forall {t_A}, t_Clone t_A := {Clone_f_clone := fun x => x}. Definition t_Slice (T : Type) := list T. Definition unsize {T : Type} : list T -> t_Slice T := id. Definition t_PartialEq_f_eq x y := x =? y. Definition t_Rem_f_rem (x y : Z) := x mod y. Definition assert (b : bool) (* `{H_assert : b = true} *) : unit := tt. Inductive globality := | t_Global. Definition t_Vec T (_ : globality) : Type := list T. Definition impl_1__append {T} l1 l2 : list T * list T := (app l1 l2, l2). Definition impl_1__len {A} (l : list A) := Z.of_nat (List.length l). Definition impl__new {A} (_ : Datatypes.unit) : list A := nil. Definition impl__with_capacity {A} (_ : Z) : list A := nil. Definition impl_1__push {A} l (x : A) := cons x l. Class t_From (A B : Type) := { From_f_from : B -> A }. Definition impl__to_vec {T} (x : t_Slice T) : t_Vec T t_Global := x. Class t_Into (A B : Type) := { Into_f_into : A -> B }. Instance t_Into_from_t_From {A B : Type} `{H : t_From B A} : t_Into A B := { Into_f_into x := @From_f_from B A H x }. Definition from_elem {A} (x : A) (l : Z) := repeat x (Z.to_nat l). Definition t_Option := option. Definition impl__map {A B} (x : t_Option A) (f : A -> B) : t_Option B := match x with | Some x => Some (f x) | None => None end. Definition t_Add_f_add x y := x + y. Class Cast A B := { cast : A -> B }. Instance cast_t_u8_t_u32 : Cast t_u8 t_u32 := {| cast x := x |}. (* / dummy lib *) From Core Require Import Core_Slice_Index_Ops. Export Core_Slice_Index_Ops. Notation "'v_Sealed'" := (v_Sealed). Notation "'impl'" := (impl). Notation "'impl_1'" := (impl_1). Notation "'impl_2'" := (impl_2). Notation "'impl_3'" := (impl_3). Notation "'impl_4'" := (impl_4). Notation "'impl_5'" := (impl_5). Notation "'impl_6'" := (impl_6). Notation "'impl_7'" := (impl_7). Notation "'impl_8'" := (impl_8). ================================================ FILE: hax-lib/proof-libs/coq/coq/generated-core/src/Core_Slice_Iter.v ================================================ (* File automatically generated by Hacspec *) From Coq Require Import ZArith. Require Import List. Import List.ListNotations. Open Scope Z_scope. Open Scope bool_scope. Require Import Ascii. Require Import String. Require Import Coq.Floats.Floats. From RecordUpdate Require Import RecordSet. Import RecordSetNotations. (* From Core Require Import Core. *) From Core Require Import Core_Marker. Export Core_Marker. From Core Require Import Core_Primitive. Export Core_Primitive. Record t_Iter (v_T : Type) `{t_Sized (v_T)} : Type := { Iter_f_data : t_Slice ((v_T)); Iter_f__marker : t_PhantomData ((v_T)); }. Arguments Build_t_Iter {_} {_}. Arguments Iter_f_data {_} {_}. Arguments Iter_f__marker {_} {_}. #[export] Instance settable_t_Iter `{v_T : Type} `{t_Sized (v_T)} : Settable _ := settable! (Build_t_Iter) . Definition impl__new `{v_T : Type} `{t_Sized (v_T)} `{t_Clone (v_T)} (slice : t_Slice ((v_T))) : t_Iter ((v_T)) := Build_t_Iter (Clone_f_clone (slice)) (Build_t_PhantomData). Instance t_Clone_313886898 `{v_T : Type} `{t_Sized (v_T)} `{t_Clone (v_T)} : t_Clone ((t_Iter ((v_T)))) := { Clone_f_clone := fun (self : t_Iter ((v_T)))=> Build_t_Iter (Clone_f_clone (Iter_f_data self)) (Iter_f__marker self); }. (* NotImplementedYet *) ================================================ FILE: hax-lib/proof-libs/coq/coq/generated-core/src/Core_Slice_Iter_Macros.v ================================================ (* File automatically generated by Hacspec *) From Coq Require Import ZArith. Require Import List. Import List.ListNotations. Open Scope Z_scope. Open Scope bool_scope. Require Import Ascii. Require Import String. Require Import Coq.Floats.Floats. From RecordUpdate Require Import RecordSet. Import RecordSetNotations. (* From Core Require Import Core. *) (* TODO: Replace this dummy lib with core lib *) Class t_Sized (T : Type) := { }. Definition t_u8 := Z. Definition t_u16 := Z. Definition t_u32 := Z. Definition t_u64 := Z. Definition t_u128 := Z. Definition t_usize := Z. Definition t_i8 := Z. Definition t_i16 := Z. Definition t_i32 := Z. Definition t_i64 := Z. Definition t_i128 := Z. Definition t_isize := Z. Definition t_Array T (x : t_usize) := list T. Definition t_String := string. Definition ToString_f_to_string (x : string) := x. Instance Sized_any : forall {t_A}, t_Sized t_A := {}. Class t_Clone (T : Type) := { Clone_f_clone : T -> T }. Instance Clone_any : forall {t_A}, t_Clone t_A := {Clone_f_clone := fun x => x}. Definition t_Slice (T : Type) := list T. Definition unsize {T : Type} : list T -> t_Slice T := id. Definition t_PartialEq_f_eq x y := x =? y. Definition t_Rem_f_rem (x y : Z) := x mod y. Definition assert (b : bool) (* `{H_assert : b = true} *) : unit := tt. Inductive globality := | t_Global. Definition t_Vec T (_ : globality) : Type := list T. Definition impl_1__append {T} l1 l2 : list T * list T := (app l1 l2, l2). Definition impl_1__len {A} (l : list A) := Z.of_nat (List.length l). Definition impl__new {A} (_ : Datatypes.unit) : list A := nil. Definition impl__with_capacity {A} (_ : Z) : list A := nil. Definition impl_1__push {A} l (x : A) := cons x l. Class t_From (A B : Type) := { From_f_from : B -> A }. Definition impl__to_vec {T} (x : t_Slice T) : t_Vec T t_Global := x. Class t_Into (A B : Type) := { Into_f_into : A -> B }. Instance t_Into_from_t_From {A B : Type} `{H : t_From B A} : t_Into A B := { Into_f_into x := @From_f_from B A H x }. Definition from_elem {A} (x : A) (l : Z) := repeat x (Z.to_nat l). Definition t_Option := option. Definition impl__map {A B} (x : t_Option A) (f : A -> B) : t_Option B := match x with | Some x => Some (f x) | None => None end. Definition t_Add_f_add x y := x + y. Class Cast A B := { cast : A -> B }. Instance cast_t_u8_t_u32 : Cast t_u8 t_u32 := {| cast x := x |}. (* / dummy lib *) (* NotImplementedYet *) (* NotImplementedYet *) (* NotImplementedYet *) ================================================ FILE: hax-lib/proof-libs/coq/coq/generated-core/src/_CoqProject ================================================ -R ./ TODO -arg -w -arg all Core_Slice_Iter_Macros.v Core_Slice_Iter.v Core_Slice_Index_Private_slice_index.v Core_Slice_Index.v Core_Slice.v Core_Result.v Core_Primitive_Number_conversion_i.v Core_Primitive_Number_conversion.v Core_Primitive.v Core_Panicking.v Core_Option.v Core_Ops_Range.v Core_Ops_Index_range.v Core_Ops_Index.v Core_Ops_Function.v Core_Ops_Bit_Impls_for_prims.v Core_Ops_Bit.v Core_Ops_Arith_Impls_for_prims.v Core_Ops_Arith.v Core_Ops.v Core_Num_Uint_macros.v Core_Num_Int_macros.v Core_Num.v Core_Marker.v Core_Iter_Traits_Marker.v Core_Iter_Traits_Iterator.v Core_Iter_Traits_Exact_size.v Core_Iter_Traits_Collect.v Core_Iter_Traits.v Core_Iter_Range.v Core_Iter.v Core_Intrinsics.v Core_Fmt.v Core_Convert.v Core_Cmp.v Core_Clone.v Core_Base_interface_Int_U8_proofs.v Core_Base_interface_Int_U64_proofs.v Core_Base_interface_Int_U32_proofs.v Core_Base_interface_Int_U16_proofs.v Core_Base_interface_Int_U128_proofs.v Core_Base_interface_Int_I8_proofs.v Core_Base_interface_Int_I64_proofs.v Core_Base_interface_Int_I32_proofs.v Core_Base_interface_Int_I16_proofs.v Core_Base_interface_Int_I128_proofs.v Core_Base_interface_Int.v Core_Base_interface_Coerce.v Core_Base_interface.v Core_Base_Z.v Core_Base_Spec_Z.v Core_Base_Spec_Unary.v Core_Base_Spec_Seq.v Core_Base_Spec_Haxint.v Core_Base_Spec_Constants.v Core_Base_Spec_Binary_Positive.v Core_Base_Spec_Binary_Pos.v Core_Base_Spec_Binary.v Core_Base_Spec.v Core_Base_Seq.v Core_Base_Pos.v Core_Base_Number_conversion.v Core_Base_Binary.v Core_Base.v Core_Array_Rec_bundle_579704328.v Core_Array_Iter.v Core_Array.v Core.v ================================================ FILE: hax-lib/proof-libs/coq/ssprove/.gitignore ================================================ *.vo* *.aux *.glob *.cache .Makefile.d Makefile Makefile.conf src/_temp/ ================================================ FILE: hax-lib/proof-libs/coq/ssprove/README.md ================================================ ## Dependencies The coq libraries uses `ssprove/jasmin` for machine signed and unsigned integer modulo arithmetic, and `coqword` for finite field arithmetic on prime modulus (to support hacspec's `nat_mod p` type). This requires the following repository: ``` opam repo add coq-released https://coq.inria.fr/opam/released --all-switches ``` Then one can install the dependencies through `opam` (assuming you have coq installed through opam) ``` opam update opam install conf-ppl.1 -y opam install coq-mathcomp-word.2.0 -y opam pin jasmin https://github.com/SSProve/ssprove.git#3d40bc89 -y opam pin ssprove https://github.com/SSProve/ssprove.git#bead4e76acbb69b3ecf077cece56cd3fbde501e3 -y opam upgrade -y ``` the development uses the Jasmin branch of SSProve, meaning one might need to install these from source. ## Docker There is a docker container with the dependencies installed (Coq / Rust) at `ghcr.io/cmester0/hacspec_ssprove:8.15.2`. ## Compiling the coq files In folder `/coq_ssprove`, type `make`. This compiles the coq libraries and the compiled examples, as defined in `_CoqProject`. If you want to add a new example to `_CoqProject`, such that it is compiled through `make`, you should run `coq_makefile -f _CoqProject -o Makefile` in `/coq` to update the makefile. ================================================ FILE: hax-lib/proof-libs/coq/ssprove/_CoqProject ================================================ -R src/ Hacspec -arg -w -arg all src/Hacspec_Lib_Comparable.v src/LocationUtility.v src/ChoiceEquality.v src/Hacspec_Lib_Pre.v src/Hacspec_Lib_Integers.v src/Hacspec_Lib_Loops.v src/Hacspec_Lib_Seq.v src/Hacspec_Lib_Natmod.v src/Hacspec_Lib_Coercions.v src/Hacspec_Lib_Eq.v src/Hacspec_Lib_Monad.v src/Hacspec_Lib_Ltac.v src/Hacspec_Lib_Controlflow.v src/Hacspec_Lib_Notation.v src/Hacspec_Lib_TODO.v src/ConCertLib.v src/Hacspec_Lib.v # src/Hacspec_Aes_Jazz.v # src/Hacspec_Xor.v # src/Hacspec_Aes.v # src/Hacspec_Bls12_381.v # src/Hacspec_Poly1305.v # src/Hacspec_Curve25519.v # src/Hacspec_Gf128.v # src/Hacspec_P256.v # src/Hacspec_Sha256.v ================================================ FILE: hax-lib/proof-libs/coq/ssprove/coq-hacspec-ssprove.opam.template ================================================ pin-depends: [ ["jasmin.dev" "git+https://github.com/proux01/jasmin.git#mathcomp2"] ["ssprove.dev" "git+https://github.com/ssprove/ssprove.git#jasmin-coq.8.18.0"] ["coq-concert.dev" "git+https://github.com/AU-COBRA/ConCert.git#master"] ["coq-rust-extraction.dev" "git+https://github.com/AU-COBRA/coq-rust-extraction.git#0053733e56008c917bf43d12e8bf0616d3b9a856"] ["coq-elm-extraction.dev" "git+https://github.com/AU-COBRA/coq-elm-extraction.git#903320120e3f36d7857161e5680fabeb6e743c6b"] ["coq-quickchick.dev" "git+https://github.com/4ever2/QuickChick.git#bc61d58045feeb754264df9494965c280e266e1c"] ] ================================================ FILE: hax-lib/proof-libs/coq/ssprove/docker_build/Dockerfile ================================================ FROM coqorg/coq:8.15.2-ocaml-4.14.0-flambda RUN curl https://sh.rustup.rs -sSf | sh -s -- -y ENV PATH $HOME/.cargo/bin:$PATH RUN rustup update RUN rustup toolchain install nightly-2022-07-04 RUN rustup component add --toolchain nightly-2022-07-04 rustc-dev llvm-tools-preview rust-analysis rust-src RUN rustc --version RUN cargo --version RUN sudo apt-get update RUN sudo apt-get install libppl-dev -y RUN sudo apt-get install libmpfr-dev -y RUN opam update RUN opam switch create 4.12.0 RUN eval $(opam env --switch=4.12.0) RUN opam config list; opam repo list; opam list RUN opam repo add coq-released https://coq.inria.fr/opam/released --all-switches RUN opam update RUN opam pin coq 8.15.2 -y RUN eval $(opam env) RUN git clone https://github.com/jasmin-lang/jasmin.git RUN git clone https://github.com/SSProve/ssprove.git RUN cd jasmin && git checkout 3d40bc89 && cd .. RUN opam install -y --verbose ./jasmin/. --working-dir RUN eval $(opam env) RUN cd ssprove && git checkout jasmin && cd .. RUN opam upgrade -y RUN (opam install -y --verbose ./ssprove/ssprove.opam --working-dir) || echo "failed" RUN cd ssprove && make -j7 && opam install -y --verbose ./ssprove.opam --working-dir --assume-built ================================================ FILE: hax-lib/proof-libs/coq/ssprove/src/ChoiceEquality.v ================================================ From Coq Require Import ZArith List. From Crypt Require Import choice_type Package. Import PackageNotation. From Crypt Require Import pkg_interpreter. From extructures Require Import ord fset fmap. From Hacspec Require Import Hacspec_Lib_Comparable. From Hacspec Require Import LocationUtility. Require Import Coq.Logic.FunctionalExtensionality. Import RulesStateProb. Import RulesStateProb.RSemanticNotation. Open Scope rsemantic_scope. From Crypt Require Import choice_type Package Prelude. From Crypt Require Import Axioms. (* proof_irrelevance *) Import PackageNotation. From extructures Require Import ord fset fmap. Import choice.Choice.Exports. Import List.ListNotations. From mathcomp Require Import ssrbool. (*** Ltac *) Ltac normalize_fset := hnf ; autounfold with * ; try rewrite !fset_cons ; try rewrite <- !fset0E ; try rewrite !fsetU0 ; try rewrite !fset0U ; try rewrite !fset1E ; repeat (match goal with | |- context [?a :|: ?b :|: ?c] => replace (a :|: b :|: c) with (a :|: (b :|: c)) by apply fsetUA end || match goal with | |- context [?a :|: (?a :|: ?b)] => rewrite (fsetUA a a b) ; rewrite (fsetUid a) end || match goal with | |- context [?a :|: (?b :|: (?a :|: (?b :|: ?c)))] => rewrite (fsetUA a b (a :|: (b :|: c))) ; rewrite (fsetUA a b c) ; rewrite (fsetUA (a :|: b) (a :|: b) c) ; rewrite (fsetUid (a :|: b)) end). Ltac solve_match := try set (fset _) ; (lazymatch goal with | |- context [ fsubset ?a (?a :|: _) ] => apply fsubsetUl | |- context [ fsubset ?a (_ :|: ?a) ] => apply fsubsetUr | |- context [ fsubset fset0 _ ] => apply fsub0set | |- context [ fsubset ?a ?a ] => apply fsubsetxx end). Ltac split_fsubset_lhs := repeat (rewrite !is_true_split_and || rewrite !fsubUset) ; repeat (try rewrite !andb_true_intro ; split). Ltac solve_single_fset_fsubset := repeat (solve_match || apply fsubsetU ; rewrite is_true_split_or ; (left ; solve_match) || right). Ltac solve_is_true := now normalize_fset ; split_fsubset_lhs ; solve_single_fset_fsubset. Ltac left_assoc := repeat (match goal with | |- context [?a :|: ?b :|: ?c] => replace (a :|: b :|: c) with (a :|: (b :|: c)) by apply fsetUA end). Ltac solve_in_fset := match goal with | [ |- context [ is_true (fsubset _ _) ] ] => solve_is_true | [ |- context [ fsubset _ _ = true ] ] => solve_is_true end. Ltac solve_fset_eq := apply (ssrbool.elimT eqtype.eqP) ; rewrite eqEfsubset ; rewrite is_true_split_and ; split ; solve_in_fset. Ltac fset_equality := repeat match goal with | H : fsubset (?x :|: ?y) ?z = true |- _ => rewrite fsubUset in H ; apply andb_prop in H ; destruct H end ; match goal with | [ |- context [ @eq (fset_of _) _ _ ] ] => solve_fset_eq | [ |- context [ @eq Interface _ _ ] ] => solve_fset_eq | [ |- context [ @Logic.eq (fset_of _) _ _ ] ] => solve_fset_eq | [ |- context [ @Logic.eq Interface _ _ ] ] => solve_fset_eq end. Notation "prod_ce( a , b )" := ((a , b) : chProd _ _) : hacspec_scope. Notation "prod_ce( a , b , .. , c )" := ((.. ((a , b) : chProd _ _) .. , c) : chProd _ _) : hacspec_scope. Definition lift_to_code {ce L I} (x : choice.Choice.sort ce) : code L I ce := {code ret x}. Definition pre_to_post (P : precond) {A} : postcond A A := fun '(a, h₀) '(b, h₁) => a = b /\ P (h₀ , h₁). Definition pre_to_post_ret (P : precond) {A} v : postcond A A := fun '(a, h₀) '(b, h₁) => (a = b /\ b = v) /\ P (h₀ , h₁). Definition true_precond : precond := fun _ => True. Theorem forget_precond {B} (x y : raw_code B) P Q : ⊢ ⦃ true_precond ⦄ x ≈ y ⦃ Q ⦄ -> ⊢ ⦃ P ⦄ x ≈ y ⦃ Q ⦄. Proof. intros. now apply (rpre_weaken_rule _ _ _ H). Qed. Section Both. Context (A : choice_type). Class raw_both := { is_pure : choice.Choice.sort A ; is_state : raw_code A ; }. Arguments is_pure raw_both. Arguments is_state raw_both. Inductive valid_both : forall (b : raw_both), Prop := | both_valid_ret : forall x, valid_both {| is_pure := x ; is_state := ret x |}. Class ValidBoth (p : raw_both) := { is_valid_code : ValidCode fset0 fset0 (@is_state p) ; is_valid_both : @valid_both p ; }. Arguments is_valid_code {_} ValidBoth. Arguments is_valid_both {_} ValidBoth. Record both : Type := mk2prog { both_prog :> raw_both ; both_prog_valid : @ValidBoth both_prog ; p_eq : forall P, ⊢ ⦃ P ⦄ (@is_state both_prog) ≈ ret (@is_pure both_prog) ⦃ pre_to_post_ret P (@is_pure both_prog) ⦄ ; }. Arguments both_prog b. Arguments both_prog_valid b. Arguments p_eq b. End Both. Arguments is_pure {_} raw_both. Arguments is_state {_} raw_both. Arguments valid_both {_}. Arguments both_valid_ret {_}. Arguments ValidBoth {_} p. Arguments is_valid_code {_} {_} ValidBoth. Arguments is_valid_both {_} {_} ValidBoth. Arguments both_prog {_} b. Arguments both_prog_valid {_} b. Arguments p_eq {_} b. Section Both_helper. Lemma valid_both_eta : forall {A : choice_type} {x : raw_both A}, valid_both x -> valid_both {| is_pure := is_pure x ; is_state := is_state x |}. Proof. now intros ? [] ?. Defined. Lemma ValidBoth_eta : forall {A : choice_type} {x : both A}, ValidBoth x -> ValidBoth {| is_pure := is_pure x ; is_state := is_state x |}. Proof. now intros ? [[] ? ?] ?. Defined. Definition bind_raw_both {A B} (c : raw_both A) (k : A -> raw_both B) : raw_both B := {| is_pure := let x := (is_pure c) in is_pure (k x) ; is_state := bind (is_state c) (fun x => is_state (k x)) |}. Lemma valid_bind_both_ : forall A B c k, valid_both c -> (forall x, valid_both {| is_pure := is_pure (k x) ; is_state := is_state (k x) |}) -> valid_both (@bind_raw_both A B c k). Proof. intros A B c k Hc Hk. induction Hc ; intros. apply Hk. Qed. Lemma valid_bind_both : forall A B c k, ValidBoth c -> (forall x, ValidBoth (k x)) -> ValidBoth (@bind_raw_both A B c k). Proof. intros A B c k Hc Hk. constructor ; simpl. - apply valid_bind. apply (is_valid_code Hc). apply (fun x => is_valid_code (Hk x)). - eapply valid_bind_both_. apply (is_valid_both Hc). intros. apply valid_both_eta. apply (fun x => is_valid_both (Hk x)). Qed. Definition both_ret {A : choice_type} (x : A) : raw_both A := {| is_pure := x ; is_state := ret x |} . Program Definition both_ret_valid {A : choice_type} (x : A) : ValidBoth (both_ret x) := {| is_valid_code := valid_ret _ _ _ ; is_valid_both := both_valid_ret _ |} . Fail Next Obligation. End Both_helper. Program Definition ret_both {A : choice_type} (x : A) : both A := {| both_prog := {| is_pure := x ; is_state := ret x |} ; both_prog_valid := {| is_valid_code := valid_ret fset0 fset0 x ; is_valid_both := both_valid_ret x ; |} ; p_eq := fun P => r_ret _ _ _ _ _ ; |}. Fail Next Obligation. Ltac pattern_both Hx Hf Hg := (match goal with | [ |- context [ @is_state _ ?x : both _ _ _ ] ] => set (Hx := x) ; try change (@is_pure _ _) with (@is_pure _ Hx) ; match goal with | [ |- context [ ⊢ ⦃ _ ⦄ bind _ ?fb ≈ ?os ⦃ _ ⦄ ] ] => let H := fresh in set (H := os) ; pattern (@is_pure _ Hx) in H ; subst H ; set (Hf := fb) ; match goal with | [ |- context [ ⊢ ⦃ _ ⦄ _ ≈ ?gb _ ⦃ _ ⦄ ] ] => set (Hg := gb) end end end). Ltac pattern_both_fresh := let x := fresh in let y := fresh in let z := fresh in pattern_both x y z. Theorem r_bind_trans : forall {B C : choice_type} (f : choice.Choice.sort B -> raw_code C) (g : choice.Choice.sort B -> raw_code C) (x : raw_code B) (y : choice.Choice.sort B), forall (P P_mid : precond) (Q : postcond (choice.Choice.sort C) (choice.Choice.sort C)), forall (H_x_is_y : ⊢ ⦃ P ⦄ x ≈ ret y ⦃ pre_to_post_ret P_mid (y) ⦄), (⊢ ⦃ P_mid ⦄ f (y) ≈ g y ⦃ Q ⦄) -> ⊢ ⦃ P ⦄ temp ← x ;; f temp ≈ g y ⦃ Q ⦄. Proof. intros. replace (g y) with (temp ← ret y ;; g temp) by reflexivity. pose @r_bind. specialize r with (f₀ := f) (f₁ := fun x => g x). specialize r with (m₀ := x) (m₁ := (ret y)). specialize r with (pre := P) (mid := pre_to_post_ret P_mid y ) (post := Q). apply r ; clear r. - apply H_x_is_y. - intros. eapply rpre_hypothesis_rule. intros ? ? [[] ?]. subst. eapply rpre_weaken_rule. cbn in H2. subst. apply H. intros ? ? []. subst. apply H2. Qed. Theorem r_bind_trans_both : forall {B C : choice_type} {f : choice.Choice.sort B -> raw_code C} {g : choice.Choice.sort B -> raw_code C} (b : both B), forall (P : precond) (Q : postcond _ _), (⊢ ⦃ true_precond ⦄ f ((is_pure b)) ≈ g (is_pure b) ⦃ Q ⦄) -> ⊢ ⦃ P ⦄ temp ← is_state b ;; f temp ≈ g (is_pure b) ⦃ Q ⦄. Proof. intros. apply r_bind_trans with (P_mid := true_precond). eapply rpre_weaken_rule. apply p_eq. reflexivity. apply H. Qed. Ltac match_bind_trans_both := let Hx := fresh in let Hf := fresh in let Hg := fresh in pattern_both Hx Hf Hg ; apply (@r_bind_trans_both) with (b := Hx) (f := Hf) (g := Hg) ; intros ; subst Hf ; subst Hg ; subst Hx ; hnf. Ltac r_bind_both a := eapply r_bind ; [ apply (p_eq a) | ] ; intros ; apply rpre_hypothesis_rule ; intros ? ? [[] []] ; subst ; apply forget_precond. Ltac r_subst_both a := let x := fresh in let y := fresh in let z := fresh in pattern_both x y z ; change (z _) with (temp ← ret (is_pure x) ;; z temp) ; r_bind_both a ; subst x y z ; hnf. Program Definition bind_both {A B} (c : both A) (k : A -> both B) : both B := {| both_prog := bind_raw_both (both_prog c) (fun x => both_prog (k x)) ; both_prog_valid := valid_bind_both A B c k (both_prog_valid c) (fun x => both_prog_valid (k x)) ; |}. Next Obligation. intros. let x := fresh in let y := fresh in let z := fresh in pattern_both x y z ; change (z _) with (temp ← ret (is_pure x) ;; z temp). eapply r_bind ; [ apply (p_eq _) | ]. intros ; apply rpre_hypothesis_rule. intros ? ? [[]]. eapply rpre_weaken_rule. 2:{ simpl ; intros ? ? []. subst. apply H4. } subst a₀ a₁ ; hnf. apply (k (is_pure c)). Qed. Lemma both_eq : forall {A : choice_type} (a b : both A), both_prog a = both_prog b -> a = b. Proof. intros. destruct a , b. cbn in *. subst. f_equal ; apply proof_irrelevance. Qed. Lemma bind_ret_both : forall {A B : choice_type} (f : A -> both B) (x : A), (bind_both (ret_both x) f) = f x. Proof. intros. apply both_eq. simpl. unfold bind_raw_both. simpl. destruct (f x). simpl. destruct both_prog0. simpl. reflexivity. Qed. Definition lift_both {A} (x : both A) : both A := {| both_prog := x ; both_prog_valid := (both_prog_valid x) ; p_eq := p_eq x |}. Notation "'solve_lift' x" := (lift_both x) (at level 100). Equations lift1_both {A B : choice_type} (f : A -> B) (x : both A) : both B := lift1_both f x := bind_both x (fun x' => solve_lift (ret_both (f x'))). Solve All Obligations with intros ; solve_in_fset. Fail Next Obligation. Equations lift2_both {A B C : choice_type} (f : A -> B -> C) (x : both A) (y : both B) : both C := lift2_both f x y := bind_both x (fun x' => bind_both y (fun y' => solve_lift (ret_both (f x' y')))). Solve All Obligations with intros ; solve_in_fset. Fail Next Obligation. Equations lift3_both {A B C D : choice_type} (f : A -> B -> C -> D) (x : both A) (y : both B) (z : both C) : both D := lift3_both f x y z := bind_both x (fun x' => lift_both (lift2_both (f x') y z)). Solve All Obligations with intros ; solve_in_fset. Fail Next Obligation. Definition choice_type_size (ce : choice_type) : nat. Proof. induction ce. 1, 2, 3, 4, 8, 9: exact 1. - refine (S (IHce1 + IHce2))%nat. - refine (S (S (S (IHce1 + IHce2))))%nat. - refine (S (IHce))%nat. - refine (S (IHce))%nat. - refine (S (IHce1 + IHce2))%nat. Defined. Fixpoint ce_to_chElement_ordType_ce (ce : choice_type) (X : chElement_ordType ce) : ce := match ce as A return chElement_ordType A -> A with | 'unit | 'nat | 'int | 'bool | chFin _ | 'word _ => id | Y × Z => fun '(y,z) => (ce_to_chElement_ordType_ce Y y, ce_to_chElement_ordType_ce Z z) | chMap Y Z => fun y => mkfmap (seq.zip (seq.unzip1 (FMap.fmval y)) (List.map (ce_to_chElement_ordType_ce Z) (seq.unzip2 (FMap.fmval y)))) | 'option Y => (fun y => match y with | None => None | Some z => Some (ce_to_chElement_ordType_ce Y z) end) | chList Y => List.map (ce_to_chElement_ordType_ce Y) | Y ∐ Z => (fun y => match y with | inl z => inl (ce_to_chElement_ordType_ce Y z) | inr z => inr (ce_to_chElement_ordType_ce Z z) end) end X. Fixpoint chElement_ordType_ce_to_ce (ce : choice_type) (X : ce) : chElement_ordType ce := match ce as A return A -> chElement_ordType A with | 'unit | 'nat | 'int | 'bool | chFin _ | 'word _ => id | Y × Z => fun '(y,z) => (chElement_ordType_ce_to_ce Y y, chElement_ordType_ce_to_ce Z z) | chMap Y Z => fun y => mkfmap (seq.zip (seq.unzip1 (FMap.fmval y)) (List.map (chElement_ordType_ce_to_ce Z) (seq.unzip2 (FMap.fmval y)))) | 'option Y => (fun y => match y with | None => None | Some z => Some (chElement_ordType_ce_to_ce Y z) end) | chList Y => List.map (chElement_ordType_ce_to_ce Y) | Y ∐ Z => (fun y => match y with | inl z => inl (chElement_ordType_ce_to_ce Y z) | inr z => inr (chElement_ordType_ce_to_ce Z z) end) end X. Equations prod_both {ceA ceB : choice_type} (a : both ceA) (b : both ceB) : both (ceA × ceB) := prod_both a b := bind_both a (fun a' => bind_both b (fun b' => solve_lift (ret_both ((a', b') : _ × _)))). Solve All Obligations with intros ; solve_in_fset. Fail Next Obligation. Notation "'prod_b' ( a , b )" := (prod_both a b) : hacspec_scope. Notation "'prod_b' ( a , b , .. , c )" := (prod_both .. (prod_both a b) .. c) : hacspec_scope. Ltac ssprove_valid_program := try (apply prog_valid) ; try (apply valid_scheme ; try rewrite <- fset.fset0E ; apply prog_valid). Ltac destruct_choice_type_prod := try match goal with | H : choice.Choice.sort (chElement (loc_type ?p)) |- _ => unfold p in H ; unfold loc_type in H ; unfold projT1 in H end ; repeat match goal with | H : (chProd _ _) |- _ => destruct H end ; repeat match goal with | H : choice.Choice.sort (chElement (choice.Choice.sort (chProd _ _))) |- _ => destruct H end ; repeat match goal with | H : prod _ _ |- _ => destruct H end ; cbv zeta. Theorem tag_leq_simplify : forall (a b : Location), is_true (ssrfun.tag a <= ssrfun.tag b)%ord -> is_true (ssrfun.tagged a <= ssrfun.tagged b)%ord -> is_true (tag_leq (I:=choice_type_choice_type__canonical__Ord_Ord) (T_:=fun _ : choice_type => Datatypes_nat__canonical__Ord_Ord) a b). Proof. intros [] []. unfold tag_leq. unfold eqtype.tagged_as, ssrfun.tagged , ssrfun.tag , projT1 , projT2. intro. rewrite Ord.leq_eqVlt in H. rewrite is_true_split_or in H. destruct H. - apply Couplings.reflection_nonsense in H ; subst. rewrite Ord.ltxx. rewrite Bool.orb_false_l. rewrite eqtype.eq_refl. rewrite Bool.andb_true_l. destruct eqtype.eqP. + unfold eq_rect_r , eq_rect ; destruct eq_sym. trivial. + contradiction. - rewrite H ; clear H. reflexivity. Qed. Theorem tag_leq_inverse : forall a b, tag_leq (I:=choice_type_choice_type__canonical__Ord_Ord) (T_:=fun _ : choice_type => Datatypes_nat__canonical__Ord_Ord) a b = (negb (tag_leq (I:=choice_type_choice_type__canonical__Ord_Ord) (T_:=fun _ : choice_type => Datatypes_nat__canonical__Ord_Ord) b a) || eqtype.eq_op (ssrfun.tag a) (ssrfun.tag b) && eqtype.eq_op (ssrfun.tagged a) (ssrfun.tagged b))%bool. Proof. intros [a b] [c d]. unfold tag_leq. rewrite Bool.negb_orb. rewrite Bool.negb_andb. rewrite Bool.andb_orb_distrib_r. unfold eqtype.tagged_as. unfold ssrfun.tagged , ssrfun.tag , projT1 , projT2. rewrite <- Bool.orb_assoc. f_equal. - rewrite <- Bool.negb_orb. rewrite <- Bool.orb_comm. rewrite <- Ord.leq_eqVlt. rewrite <- Ord.ltNge. reflexivity. - destruct (eqtype.eq_op a c) eqn:a_eq_c. + apply Couplings.reflection_nonsense in a_eq_c. subst. do 2 rewrite Bool.andb_true_l. destruct eqtype.eqP. 2: contradiction. unfold eq_rect_r , eq_rect. destruct eq_sym. rewrite Ord.leq_eqVlt. rewrite Bool.orb_comm. f_equal. rewrite <- Ord.ltNge. rewrite Ord.ltxx. reflexivity. + do 2 rewrite Bool.andb_false_l. rewrite Bool.orb_false_r. symmetry. destruct eqtype.eqP. { subst. rewrite eqtype.eq_refl in a_eq_c. discriminate a_eq_c. } rewrite Ord.eq_leq by reflexivity. rewrite Bool.andb_false_r. reflexivity. Qed. Ltac valid_program := apply prog_valid || (apply valid_scheme ; try rewrite <- fset.fset0E ; apply prog_valid). Definition heap_ignore_post fset {A} : postcond A A := pre_to_post (heap_ignore fset). Theorem heap_ignore_refl : forall {fset} h, heap_ignore fset (h, h). Proof. intros fset h ℓ ?. reflexivity. Qed. Theorem heap_ignore_post_refl : forall {fset A} (x : A * heap), heap_ignore_post fset x x. Proof. intros fset A []. split. reflexivity. apply heap_ignore_refl. Qed. Lemma heap_ignore_weaken : forall fset fset', is_true (fsubset fset fset') -> forall x, heap_ignore fset x -> heap_ignore fset' x. Proof. intros. destruct x as [h h0]. pose (INV'_heap_ignore fset fset' fset0). rewrite fsetU0 in i. unfold INV' in i. specialize (i H h h0). destruct i as [? _]. intros l ?. specialize (H1 H0 l H2 ltac:(easy)). rewrite H1. reflexivity. Qed. Lemma rpost_heap_ignore_weaken : forall {A} fset fset', is_true (fsubset fset fset') -> forall (x y : raw_code A), ⊢ ⦃ (fun '(h0, h1) => heap_ignore fset (h0, h1)) ⦄ x ≈ y ⦃ heap_ignore_post fset ⦄ -> ⊢ ⦃ (fun '(h0, h1) => heap_ignore fset (h0, h1)) ⦄ x ≈ y ⦃ heap_ignore_post fset' ⦄. Proof. intros. eapply rpost_weaken_rule. apply H0. intros [] [] []. subst. split. reflexivity. apply (heap_ignore_weaken fset) ; assumption. Qed. Lemma rpre_heap_ignore_weaken : forall {A} fset fset', is_true (fsubset fset fset') -> forall (x y : raw_code A), ⊢ ⦃ (fun '(h0, h1) => heap_ignore fset' (h0, h1)) ⦄ x ≈ y ⦃ heap_ignore_post fset ⦄ -> ⊢ ⦃ (fun '(h0, h1) => heap_ignore fset (h0, h1)) ⦄ x ≈ y ⦃ heap_ignore_post fset ⦄. Proof. intros. eapply rpre_weaken_rule. apply H0. intros. cbn. apply (heap_ignore_weaken fset fset') ; assumption. Qed. Theorem bind_rewrite : forall A B x f, @bind A B (ret x) f = f x. Proof. intros. unfold bind. reflexivity. Qed. Theorem r_bind_eq : forall {B C : choice_type} (y : choice.Choice.sort B) (g : choice.Choice.sort B -> raw_code C), (temp ← ret y ;; g temp) = g y. Proof. reflexivity. Qed. Theorem r_bind_trans' : forall {B C : choice_type} (f : choice.Choice.sort B -> raw_code C) (g : choice.Choice.sort B -> raw_code C) (x : raw_code B) (y : choice.Choice.sort B), forall (P : precond) (Q : postcond (choice.Choice.sort C) (choice.Choice.sort C)), forall (H_x_is_y : ⊨ repr x ≈ repr (ret y) [{retW (y, y)}]), (⊢ ⦃ P ⦄ f ( y) ≈ g y ⦃ Q ⦄) -> ⊢ ⦃ P ⦄ temp ← x ;; f temp ≈ g y ⦃ Q ⦄. Proof. intros. replace (g y) with (temp ← ret y ;; g temp) by reflexivity. pose @r_bind. specialize r with (f₀ := f) (f₁ := fun x => g x). specialize r with (m₀ := x) (m₁ := (ret y)). specialize r with (pre := P) (mid := fun s0 s1 => pre_to_post P s0 s1 /\ fst s1 = y) (post := Q). apply r ; clear r. - eapply from_sem_jdg. eapply (RulesStateProb.weaken_rule (retW (y , y))). + apply H_x_is_y. + unfold retW. intros [] X [? πa1a2] ; cbn in X. specialize (fun x => πa1a2 (x, s) (y, s0)). unfold proj1_sig. unfold RulesStateProb.WrelSt. unfold θ. unfold StateTransformingLaxMorph.rlmm_codomain ; simpl. apply πa1a2. split. cbn. split. reflexivity. 2: { reflexivity. } apply H0. - intros. eapply rpre_hypothesis_rule. intros ? ? [[] ?]. subst. eapply rpre_weaken_rule. 2: { intros ? ? []. subst. apply H1. } clear H1. apply H. Qed. Ltac solve_post_from_pre := let H := fresh in intros ? ? H ; split ; [reflexivity | ] ; ( assumption || (apply restore_set_lhs in H ; [ assumption | intros ? ? ] )). Corollary better_r : forall {A B : choice.Choice.type} (r₀ : raw_code A) (r₁ : raw_code B) (pre : precond) (post : postcond (choice.Choice.sort A) (choice.Choice.sort B)), ⊢ ⦃ fun '(s₀, s₁) => pre (s₀, s₁) ⦄ r₀ ≈ r₁ ⦃ post ⦄ <-> ⊢ ⦃ pre ⦄ r₀ ≈ r₁ ⦃ post ⦄. Proof. split ; intros ; (eapply rpre_hypothesis_rule ; intros ; eapply rpre_weaken_rule ; [ apply H | intros ? ? [] ; subst ; easy ]). Qed. Corollary better_r_put_lhs : forall {A B : choice.Choice.type} (ℓ : Location) (v : choice.Choice.sort (Value (projT1 ℓ))) (r₀ : raw_code A) (r₁ : raw_code B) (pre : precond) (post : postcond (choice.Choice.sort A) (choice.Choice.sort B)), ⊢ ⦃ set_lhs ℓ v pre ⦄ r₀ ≈ r₁ ⦃ post ⦄ -> ⊢ ⦃ pre ⦄ #put ℓ := v ;; r₀ ≈ r₁ ⦃ post ⦄. Proof. intros ; now apply better_r, r_put_lhs, better_r. Qed. Corollary better_r_put_rhs : forall {A B : choice.Choice.type} (ℓ : Location) (v : choice.Choice.sort (Value (projT1 ℓ))) (r₀ : raw_code A) (r₁ : raw_code B) (pre : precond) (post : postcond (choice.Choice.sort A) (choice.Choice.sort B)), ⊢ ⦃ set_rhs ℓ v pre ⦄ r₀ ≈ r₁ ⦃ post ⦄ -> ⊢ ⦃ pre ⦄ r₀ ≈ #put ℓ := v ;; r₁ ⦃ post ⦄. Proof. intros ; now apply better_r, r_put_rhs, better_r. Qed. Corollary better_r_put_get_lhs : forall (A : choice.Choice.type) (B : choice.Choice.type) (ℓ : Location) (v : choice.Choice.sort ℓ) (r : choice.Choice.sort ℓ -> raw_code A) rhs (pre : precond) (post : postcond (choice.Choice.sort A) (choice.Choice.sort B)), ⊢ ⦃ pre ⦄ #put ℓ := v ;; r v ≈ rhs ⦃ post ⦄ -> ⊢ ⦃ pre ⦄ #put ℓ := v ;; x ← get ℓ ;; r x ≈ rhs ⦃ post ⦄. Proof. intros. apply (r_transL (#put ℓ := v ;; r v )). apply r_put_get. apply H. Qed. Corollary better_r_put_get_rhs : forall (A : choice.Choice.type) (B : choice.Choice.type) (ℓ : Location) (v : choice.Choice.sort ℓ) (r : choice.Choice.sort ℓ -> raw_code B) lhs (pre : precond) (post : postcond (choice.Choice.sort A) (choice.Choice.sort B)), ⊢ ⦃ pre ⦄ lhs ≈ #put ℓ := v ;; r v ⦃ post ⦄ -> ⊢ ⦃ pre ⦄ lhs ≈ #put ℓ := v ;; x ← get ℓ ;; r x ⦃ post ⦄. Proof. intros. apply (r_transR _ (#put ℓ := v ;; r v )). apply r_put_get. apply H. Qed. Corollary better_r_get_remind_lhs : forall {A B : choice.Choice.type} (ℓ : Location) (v : choice.Choice.sort (Value (projT1 ℓ))) (r₀ : choice.Choice.sort (Value (projT1 ℓ)) -> raw_code A) (r₁ : raw_code B) (pre : precond) (post : postcond (choice.Choice.sort A) (choice.Choice.sort B)), Remembers_lhs ℓ v pre -> ⊢ ⦃ pre ⦄ r₀ v ≈ r₁ ⦃ post ⦄ -> ⊢ ⦃ pre ⦄ x ← get ℓ ;; r₀ x ≈ r₁ ⦃ post ⦄. Proof. intros. apply better_r. eapply r_get_remind_lhs. apply H. apply better_r. apply H0. Qed. Lemma getr_set_lhs : forall {A B} ℓ v pre post (a : _ -> raw_code A) (b : raw_code B), ⊢ ⦃ set_lhs ℓ v pre ⦄ a v ≈ b ⦃ post ⦄ -> ⊢ ⦃ set_lhs ℓ v pre ⦄ x ← get ℓ ;; a x ≈ b ⦃ post ⦄. Proof. clear. intros. eapply better_r_get_remind_lhs. unfold Remembers_lhs. intros ? ? [? []]. subst. unfold rem_lhs. rewrite get_set_heap_eq. reflexivity. apply H. Qed. Equations prod_to_prod {A B} (x : both (A × B)) : (both A * both B) := prod_to_prod x := (bind_both x (fun x' => solve_lift (ret_both (fst x'))) , bind_both x (fun x' => solve_lift (ret_both (snd x')))). Solve All Obligations with intros ; solve_in_fset. Fail Next Obligation. Equations let_both {A B} (x : both A) (f : both A -> both B) : both B := let_both x f := f x. Notation "'letb' x ':=' y 'in' f" := (let_both y (fun x => f)) (at level 100, x pattern, right associativity). Notation "'letb' ''' x ':=' y 'in' f" := (let_both y (fun x => f)) (at level 100, x pattern, right associativity). Fixpoint split_type (F : choice_type -> Type) (A : choice_type) : Type := match A with | C × D => split_type F C * split_type F D | _ => F A end. Fixpoint split_both {A} (x : both A) : (split_type (both) A) := match A as c return (both c -> split_type (both) c) with | _ × _ => fun y => (split_both (fst (prod_to_prod y)) , split_both (snd (prod_to_prod y))) | _ => fun y : both _ => y end x. Fixpoint unsplit_both {A} (s : split_type (both) A) : both A := match A as c return (split_type (both) c -> both c) with | _ × _ => fun y => prod_both ( unsplit_both (fst y)) ((unsplit_both (snd y))) | _ => fun y => y end s. Notation "'unsplit_both_all' ( a , b , .. , c )" := ((.. ((unsplit_both a , unsplit_both b)) .. , unsplit_both c)). (* Handle products of size 2 - 4 for letb *) Fixpoint prod_to_prod_n_ty (n : nat) (F : choice_type -> Type) (A : choice_type) : Type := match n with | O => F A | S n' => match A with | B × C => (prod_to_prod_n_ty n' F B) * F C | _ => F A end end. Eval simpl in prod_to_prod_n_ty 2 (both) ('nat × 'bool). (* TODO: Currently duplicates code, due to prod_to_prod, should only evaluate and project the result ! *) Fixpoint prod_to_prod_n {A} (n : nat) (x : both A) : prod_to_prod_n_ty n (both) A := match n as m return prod_to_prod_n_ty m (both) A with | O => x | S n' => match A as B return both B -> prod_to_prod_n_ty (S n') (both) B with | B × C => fun y => (prod_to_prod_n n' (fst (prod_to_prod y)), snd (prod_to_prod y)) | _ => fun y => y end x end. Equations lift_n {A B} (n : nat) (z : both A) (f : prod_to_prod_n_ty n (both) A -> both B) : both B := lift_n n z f := (bind_both z (fun z' => f (prod_to_prod_n n (solve_lift (ret_both z'))))). Solve All Obligations with intros ; solve_in_fset. Fail Next Obligation. Notation "'letb' ' '(' a ',' b ')' ':=' z 'in' f" := (lift_n 1 z (fun '(a, b) => f)) (at level 100). Notation "'letb' ' '(' a ',' b ',' c ')' ':=' z 'in' f" := (lift_n 2 z (fun '(a, b, c) => f)) (at level 100). Notation "'letb' ' '(' a ',' b ',' c ',' d ')' ':=' z 'in' f" := (lift_n 3 z (fun '(a, b, c, d) => f)) (at level 100). ================================================ FILE: hax-lib/proof-libs/coq/ssprove/src/ConCertLib.v ================================================ Require Import Lia. Require Import Coq.Logic.FunctionalExtensionality. Require Import Sumbool. From mathcomp Require Import fintype. From Crypt Require Import choice_type Package Prelude. Import PackageNotation. From extructures Require Import ord fset fmap. From mathcomp Require Import ssrZ word. (* From Jasmin Require Import word. *) From Crypt Require Import jasmin_word. From Coq Require Import ZArith List. Import List.ListNotations. Import choice.Choice.Exports. Open Scope Z_scope. From Hacspec Require Import Hacspec_Lib_Pre. Open Scope hacspec_scope. Import choice.Choice.Exports. From ConCert.Execution Require Import Serializable. Program Definition serialize_by_other {A B} (f_to : B -> A) (f_from : A -> B) `(forall m, f_from (f_to m) = m) `{Serializable A} : Serializable B := {| serialize m := serialize (f_to m); deserialize m := option_map f_from (deserialize m) ; |}. Next Obligation. intros. hnf. rewrite deserialize_serialize. unfold option_map. now f_equal. Defined. Program Definition serialize_by_other_option {A B} (f_to : B -> Datatypes.option A) (f_from : Datatypes.option A -> Datatypes.option B) `(forall m, f_from (f_to m) = Some m) `{Serializable A} : Serializable B := {| serialize m := serialize (f_to m); deserialize m := match (deserialize m) with | Some m => f_from m | None => None end; |}. Next Obligation. intros. hnf. simpl. rewrite deserialize_serialize. now f_equal. Defined. #[global] Instance hacspec_int_serializable {ws : wsize} : Serializable (int ws) := serialize_by_other (unsigned) (@repr ws) (@wrepr_unsigned ws). Lemma eqtype_ord_ext : forall n, forall x y : fintype.ordinal n, (@eqtype.eq_op (fintype_ordinal__canonical__eqtype_Equality _ (* (@ord.Ord.clone _ *) (* (ord.ordinal_ordType n) *) (* _ *) (* id) *)) x y) = (@eqtype.eq_op ssrnat.Datatypes_nat__canonical__eqtype_Equality (nat_of_ord x) (nat_of_ord y)). Proof. intros. destruct x. simpl. destruct y. simpl. reflexivity. Qed. Theorem lift_set_commute : forall {A : choice_type} {len} (a : nseq_ A (S len)) (b : fintype.ordinal (S len)) (c : A), @lift_nseq A (S _) (fmap.setm a b c) = fmap.setm (@lift_nseq A (S _) a) (lift_ordinal _ b) c. Proof. clear ; intros ; fold chElement in *. simpl in b. unfold lift_nseq. apply fmap.eq_fmap. intros x ; simpl in x. rewrite fmap.setmE. unfold fmap.getm. simpl fmap.FMap.fmval. destruct a ; induction fmval ; simpl lift_fval. - now rewrite (lift_fval_equation_2 _ (len) (b, c) nil). - { destruct x , b. rewrite (eqtype_ord_ext (S (S (len)))). simpl eqtype.eq_op. destruct eqtype.eq_op eqn:eq_o at 2. + apply (ssrbool.elimT eqtype.eqP) in eq_o. subst. destruct ord.Ord.lt. * simpl. rewrite (lift_fval_equation_2 _ (len)). simpl. rewrite (eqtype_ord_ext (S (S ( len)))). simpl. rewrite eqtype.eq_refl. reflexivity. * rewrite (eqtype_ord_ext (S (len))). simpl. set (eqtype.eq_op _ _). destruct b eqn:eq_b_o ; subst b. -- apply (ssrbool.elimT eqtype.eqP) in eq_b_o. subst. rewrite (lift_fval_equation_2 _ (len)). simpl. rewrite (eqtype_ord_ext (S (S (len)))). simpl. rewrite eqtype.eq_refl. reflexivity. -- rewrite (lift_fval_equation_2 _ (len)). simpl. rewrite (eqtype_ord_ext (S (S (len)))). simpl. destruct (fst _). simpl in *. rewrite ssrnat.eqSS. rewrite eq_b_o. rewrite IHfmval. rewrite (eqtype_ord_ext (S (S (len)))). simpl. rewrite eqtype.eq_refl. reflexivity. (* apply (path_sorted_tl _). *) { intros. destruct fmval. reflexivity. - cbn. cbn in i. destruct (seq.unzip1 fmval). + reflexivity. + cbn in i. now rewrite LocationUtility.is_true_split_and in i. } + destruct ord.Ord.lt. * simpl. rewrite (lift_fval_equation_2 _ (len)). simpl. rewrite (eqtype_ord_ext (S (S (len)))). simpl. rewrite eq_o. reflexivity. * rewrite (eqtype_ord_ext (S (len))). simpl. set (eqtype.eq_op _ _). destruct b eqn:eq_b_o ; subst b. -- apply (ssrbool.elimT eqtype.eqP) in eq_b_o. subst. rewrite (lift_fval_equation_2 _ (len)). simpl. rewrite (eqtype_ord_ext (S (S (len)))). simpl. rewrite eq_o. rewrite (lift_fval_equation_2 _ (len)). simpl. rewrite (eqtype_ord_ext (S (S (len)))). simpl. unfold lift_ordinal. destruct (fst _). simpl. simpl in eq_o. rewrite eq_o. reflexivity. -- rewrite (lift_fval_equation_2 _ (len)). simpl. rewrite (eqtype_ord_ext (S (S (len)))). simpl. destruct a. destruct s. simpl in *. set (b := eqtype.eq_op _ _) ; destruct b eqn:eq_m_o ; subst b. ++ apply (ssrbool.elimT eqtype.eqP) in eq_m_o. subst. rewrite (lift_fval_equation_2 _ (len)). simpl. rewrite (eqtype_ord_ext (S (S (len)))). simpl. now rewrite eqtype.eq_refl. ++ rewrite IHfmval. rewrite (eqtype_ord_ext (S (S (len)))). simpl. rewrite eq_o. rewrite (lift_fval_equation_2 _ (len)). simpl. rewrite (eqtype_ord_ext (S (S (len)))). simpl. rewrite eq_m_o. reflexivity. (* apply (path_sorted_tl _). *) { intros. destruct fmval. reflexivity. - cbn. cbn in i. destruct (seq.unzip1 fmval). + reflexivity. + cbn in i. now rewrite LocationUtility.is_true_split_and in i. } } Qed. Theorem array_from_list_helper_inverse : forall {A} len (m : nseq_ A (S len)), array_from_option_list_helper (nseq_hd_option m) (array_to_option_list (nseq_tl m)) len = m. Proof. intros. induction len. - unfold nseq_tl. unfold nseq_hd_option. rewrite array_to_option_list_equation_1. destruct m, fmval. + now apply fmap.eq_fmap. + apply fmap.eq_fmap. intros x ; simpl in x. unfold fmap.getm at 2 ; simpl. destruct (fst _), m ; [ | discriminate ] ; simpl. rewrite array_from_option_list_helper_equation_1. unfold setm_option. rewrite fmap.setmE. now destruct x , m ; [ | discriminate ] ; simpl. - rewrite array_to_option_list_equation_2. assert (forall (T : ordType) (S : choice_type) (m : @fmap.FMap.fmap_of T S (ssreflect.Phant (Ord.Ord.sort T -> S))) (k : Ord.Ord.sort T) (v : chOption S) (k' : Ord.Ord.sort T), @fmap.getm T S (setm_option m k v) k' = match v with | Some v => @fmap.getm T S (fmap.setm m k v) k' | None => @fmap.getm T S m k' end) by now destruct v. rewrite array_from_option_list_helper_equation_3. rewrite (IHlen (nseq_tl m)). clear. apply fmap.eq_fmap. intros x ; simpl in x. destruct m ; induction fmval. + now unfold fmap.getm ; cbn ; rewrite lift_fval_equation_1. + { specialize (IHfmval (path_sorted_tl i)). unfold nseq_hd_option in *. simpl. destruct a. destruct s. unfold fmap.getm at 2. simpl. destruct m. { setoid_rewrite <- IHfmval ; clear. setoid_rewrite fmap.setmE. rewrite !(eqtype_ord_ext (S (S len))). simpl eqtype.eq_op. replace (_ - _)%nat with O by (set (temp := nseq_tl _) ; rewrite <- (array_to_length_option_list_is_len A len temp) at 1; now rewrite Nat.sub_diag). destruct x , m ; [ reflexivity | ]. rewrite tl_fmap_equation_2. unfold setm_option. destruct fmval ; [reflexivity | ]. simpl. destruct p, s. simpl. destruct m0 ; [ discriminate | ]. rewrite tl_fmap_equation_3. unfold fmap.getm. simpl. set (@fmap.getm_def _ _). set (lift_fval _). set (lift_fval _). assert (l = l0) ; [ subst l l0 | now rewrite H ]. f_equal. now apply lower_fval_ext_list. } { setoid_rewrite <- IHfmval ; clear. unfold setm_option. unfold fmap.getm. simpl. rewrite tl_fmap_equation_3. destruct (eqtype.eq_op _ _) eqn:eq_o. - apply (ssrbool.elimT eqtype.eqP) in eq_o. rewrite eq_o. subst. simpl. rewrite lower_fval_equation_2. rewrite lift_fval_equation_2. simpl. rewrite !(eqtype_ord_ext (S (S len))). simpl. rewrite eqtype.eq_refl. reflexivity. - unfold setm_option. destruct fmval. + (* discriminate. *) rewrite tl_fmap_equation_1. simpl. rewrite lower_fval_equation_2. rewrite lift_fval_equation_2. simpl. rewrite lower_fval_equation_1. simpl. rewrite !(eqtype_ord_ext (S (S len))). simpl. rewrite !(eqtype_ord_ext (S (S len))) in eq_o. simpl in eq_o. rewrite eq_o. simpl. reflexivity. + destruct p , s. destruct m0 ; [ discriminate | ]. simpl. rewrite lower_fval_equation_2. rewrite lift_fval_equation_2. simpl. rewrite lower_fval_equation_2. rewrite lift_fval_equation_2. simpl. rewrite tl_fmap_equation_3. simpl. rewrite lower_fval_equation_2. rewrite lift_fval_equation_2. simpl. rewrite !(eqtype_ord_ext (S (S len))). simpl. rewrite (eqtype_ord_ext (S (S len))) in eq_o. simpl in eq_o. rewrite eq_o. apply (ssrbool.elimF eqtype.eqP) in eq_o. destruct (eqtype.eq_op _ _) eqn:eq_o2 ; [ reflexivity | ]. simpl. set (@fmap.getm_def _ _). set (lift_fval _). set (lift_fval _). assert (l = l0) ; [ subst l l0 | now rewrite H ]. f_equal. apply lower_fval_ext_list. apply (path_sorted_tl (path_sorted_tl i)). apply (path_sorted_tl (path_sorted_tl i)). reflexivity. } } Qed. Theorem array_from_list_to_list_unit : forall {A} len (m : nseq_ A len), array_from_option_list' (array_to_option_list m) len = m. Proof. intros. induction len. - now destruct m. (* unit element equailty *) - simpl. pose (resize_to_length_idemp (array_to_option_list m)). rewrite (array_to_length_option_list_is_len A (S len) m) in e. rewrite <- e ; clear e. rewrite array_to_option_list_equation_2. specialize (IHlen (nseq_tl m)). apply array_from_list_helper_inverse. Qed. Definition defaulted_nseq {A len} (m : nseq_ A (S len)) := forall i, match fmap.getm m i with | Some x => x <> chCanonical A | None => True end. #[global] Instance nseq_serializable {A : choice_type} {len} `{Serializable A} : Serializable (nseq_ A len) := serialize_by_other (array_to_option_list) (fun x => array_from_option_list' x len) (array_from_list_to_list_unit len). Ltac serialize_enum := intros ; autounfold ; repeat apply @product_serializable ; fold chElement. From ConCert.Execution Require Import Blockchain. #[global] Instance BaseTypes : ConCert.Execution.Blockchain.ChainBase := {| Address := nat; address_eqb := Nat.eqb ; address_eqb_spec := Nat.eqb_spec; address_is_contract := Nat.even; |}. From Hacspec Require Import ChoiceEquality. (* From Hacspec Require Import Hacspec_Lib. *) Theorem both_ext_prog : forall {A} (x y : both A), both_prog x = both_prog y <-> x = y. Proof. intros A [both_x valid_x eq_x] [both_y valid_y eq_y] ; simpl. split. - intros ; subst. f_equal ; easy. - easy. Qed. Print pkg_core_definition.typed_raw_function. (* Instance serializable_code {L I} {A : choice_type} `{Serializable A} : Serializable (pkg_core_definition.code L I A). *) (* Proof. *) (* Admitted. *) (* Instance serializable_both {A : choice_type} `{Serializable A} : Serializable (both A). *) (* Proof. *) (* Admitted. *) Lemma fmap_ext : forall {T : ordType} {S : Type} (m : {fmap T -> S}), mkfmap (FMap.fmval m) = m. Proof. intros. apply fmap.eq_fmap. intros ?. rewrite (@mkfmapE T S _ x). reflexivity. Qed. Instance serializable_choice_ordType {C : choice_type} : Serializable (chElement_ordType C). Proof. induction C. - exact unit_serializable. - exact nat_serializable. - exact int_serializable. - exact bool_serializable. - now apply product_serializable. - refine (@serialize_by_other _ _ (fun x => FMap.fmval x) (mkfmap) _ list_serializable). apply fmap_ext. - now apply option_serializable. - destruct n as [[] ?] ; [discriminate | ]. eapply (serialize_by_other (fun x => nat_of_ord x) (fun x => Ordinal (n := S n) (m := x mod S n) (ssrbool.introT ssrnat.ltP (Nat.mod_upper_bound x (S n) (Nat.neq_succ_0 n))))). intros. destruct m. apply ord_ext. rewrite Nat.mod_small ; [ reflexivity | simpl ; easy ]. exact nat_serializable. - apply hacspec_int_serializable. - now apply list_serializable. - now apply sum_serializable. Defined. Instance serializable_choice {C : choice_type} : Serializable.Serializable C. Proof. induction C. - exact unit_serializable. - exact nat_serializable. - exact int_serializable. - exact bool_serializable. - now apply product_serializable. - refine (@serialize_by_other (list (chElement_ordType C1 * C2)) (chMap C1 C2) (fun x => FMap.fmval x) (mkfmap) _ list_serializable). apply fmap_ext. - now apply option_serializable. - destruct n as [[] ?] ; [discriminate | ]. eapply (serialize_by_other (fun x => nat_of_ord x) (fun x => Ordinal (n := S n) (m := x mod S n) (ssrbool.introT ssrnat.ltP (Nat.mod_upper_bound x (S n) (Nat.neq_succ_0 n))))). intros. destruct m. apply ord_ext. rewrite Nat.mod_small ; [ reflexivity | simpl ; easy ]. exact nat_serializable. - apply hacspec_int_serializable. - now apply list_serializable. - now apply sum_serializable. Defined. ================================================ FILE: hax-lib/proof-libs/coq/ssprove/src/Hacspec_Lib.v ================================================ Global Set Warnings "-ambiguous-paths". Global Set Warnings "-uniform-inheritance". Global Set Warnings "-auto-template". Global Set Warnings "-disj-pattern-notation". Global Set Warnings "-notation-overridden,-ambiguous-paths". (********************************************************) (* Implementation of all Hacspec library functions *) (* for Both types. *) (********************************************************) Declare Scope hacspec_scope. From Hacspec Require Import ChoiceEquality. Export ChoiceEquality. From Hacspec Require Import LocationUtility. Export LocationUtility. From Hacspec Require Import Hacspec_Lib_Comparable. Export Hacspec_Lib_Comparable. From Hacspec Require Import Hacspec_Lib_Pre. Export Hacspec_Lib_Pre. Open Scope bool_scope. Open Scope hacspec_scope. Open Scope nat_scope. Open Scope list_scope. From Hacspec Require Import Hacspec_Lib_Integers. Export Hacspec_Lib_Integers. From Hacspec Require Import Hacspec_Lib_Loops. Export Hacspec_Lib_Loops. From Hacspec Require Import Hacspec_Lib_Seq. Export Hacspec_Lib_Seq. From Hacspec Require Import Hacspec_Lib_Natmod. Export Hacspec_Lib_Natmod. From Hacspec Require Import Hacspec_Lib_Coercions. Export Hacspec_Lib_Coercions. From Hacspec Require Import Hacspec_Lib_Eq. Export Hacspec_Lib_Eq. From Hacspec Require Import Hacspec_Lib_Monad. Export Hacspec_Lib_Monad. From Hacspec Require Import Hacspec_Lib_Ltac. Export Hacspec_Lib_Ltac. From Hacspec Require Import Hacspec_Lib_Controlflow. Export Hacspec_Lib_Controlflow. From Hacspec Require Import Hacspec_Lib_Notation. Export Hacspec_Lib_Notation. From Hacspec Require Import Hacspec_Lib_TODO. Export Hacspec_Lib_TODO. From Hacspec Require Import ConCertLib. Export ConCertLib. ================================================ FILE: hax-lib/proof-libs/coq/ssprove/src/Hacspec_Lib_Coercions.v ================================================ Global Set Warnings "-ambiguous-paths". Global Set Warnings "-uniform-inheritance". Global Set Warnings "-auto-template". Global Set Warnings "-disj-pattern-notation". Global Set Warnings "-notation-overridden,-ambiguous-paths". Require Import Lia. Require Import Coq.Logic.FunctionalExtensionality. Require Import Sumbool. From mathcomp Require Import fintype. From Crypt Require Import choice_type Package Prelude. Import PackageNotation. From extructures Require Import ord fset fmap. From Coq Require Import ZArith List. Import List.ListNotations. Import choice.Choice.Exports. (********************************************************) (* Implementation of all Hacspec library functions *) (* for Both types. *) (********************************************************) Declare Scope hacspec_scope. From Hacspec Require Import ChoiceEquality. From Hacspec Require Import LocationUtility. From Hacspec Require Import Hacspec_Lib_Comparable. From Hacspec Require Import Hacspec_Lib_Pre. Open Scope bool_scope. Open Scope hacspec_scope. Open Scope nat_scope. Open Scope list_scope. From Hacspec Require Import Hacspec_Lib_Natmod. (**** Integers to arrays *) Definition uint16_to_le_bytes (n : int16) : both ((nseq_ int8 2)) := ret_both (uint16_to_le_bytes n). Definition uint16_to_be_bytes (n : int16) : both ((nseq_ int8 2)) := ret_both (uint16_to_be_bytes n). Definition uint16_from_le_bytes (n : (nseq_ int8 2)) : both ((int16)) := ret_both (uint16_from_le_bytes n). Definition uint16_from_be_bytes (n : (nseq_ int8 2)) : both ((int16)) := ret_both (uint16_from_be_bytes n). Definition uint32_to_le_bytes (n : int32) : both ((nseq_ int8 4)) := ret_both (uint32_to_le_bytes n). Definition uint32_to_be_bytes (n : int32) : both ((nseq_ int8 4)) := ret_both (uint32_to_be_bytes n). Definition uint32_from_le_bytes (n : (nseq_ int8 4)) : both ((int32)) := ret_both (uint32_from_le_bytes n). Definition uint32_from_be_bytes (n : (nseq_ int8 4)) : both ((int32)) := ret_both (uint32_from_be_bytes n). Definition uint64_to_le_bytes (n : int64) : both ((nseq_ int8 8)) := ret_both (uint64_to_le_bytes n). Definition uint64_to_be_bytes (n : int64) : both ((nseq_ int8 8)) := ret_both (uint64_to_be_bytes n). Definition uint64_from_le_bytes (n : (nseq_ int8 8)) : both ((int64)) := ret_both (uint64_from_le_bytes n). Definition uint64_from_be_bytes (n : (nseq_ int8 8)) : both ((int64)) := ret_both (uint64_from_be_bytes n). Definition uint128_to_le_bytes (n : int128) : both ((nseq_ int8 16)) := ret_both (uint128_to_le_bytes n). Definition uint128_to_be_bytes (n : int128) : both ((nseq_ int8 16)) := ret_both (uint128_to_be_bytes n). Definition uint128_from_le_bytes (n : (nseq_ int8 16)) : both (int128) := ret_both (uint128_from_le_bytes n). Definition uint128_from_be_bytes (n : (nseq_ int8 16)) : both ((int128)) := ret_both (uint128_from_be_bytes n). Definition u32_to_le_bytes (n : int32) : both ((nseq_ int8 4)) := ret_both (u32_to_le_bytes n). Definition u32_to_be_bytes (n : int32) : both ((nseq_ int8 4)) := ret_both (u32_to_be_bytes n). Definition u32_from_le_bytes (n : (nseq_ int8 4)) : both ((int32)) := ret_both (u32_from_le_bytes n). Definition u32_from_be_bytes (n : (nseq_ int8 4)) : both ((int32)) := ret_both (u32_from_be_bytes n). Definition u64_to_le_bytes (n : int64) : both ((nseq_ int8 8)) := ret_both (u64_to_le_bytes n). Definition u64_from_le_bytes (n : (nseq_ int8 8)) : both ((int64)) := ret_both (u64_from_le_bytes n). Definition u128_to_le_bytes (n : int128) : both ((nseq_ int8 16)) := ret_both (u128_to_le_bytes n). Definition u128_to_be_bytes (n : int128) : both ((nseq_ int8 16)) := ret_both (u128_to_be_bytes n). Definition u128_from_le_bytes (n : (nseq_ int8 16)) : both ((int128)) := ret_both (u128_from_le_bytes n). Definition u128_from_be_bytes (n : (nseq_ int8 16)) : both ((int128)) := ret_both (u128_from_be_bytes n). (*** Casting *) Section TodoSection2. Definition uint128_from_usize (n : uint_size) : both int128 := ret_both (repr _ (unsigned n)). Definition uint64_from_usize (n : uint_size) : both int64 := ret_both (repr _ (unsigned n)). Definition uint32_from_usize (n : uint_size) : both int32 := ret_both (repr _ (unsigned n)). Definition uint16_from_usize (n : uint_size) : both int16 := ret_both (repr _ (unsigned n)). Definition uint8_from_usize (n : uint_size) : both int8 := ret_both (repr _ (unsigned n)). Definition uint128_from_uint8 (n : int8) : both int128 := ret_both (repr _ (unsigned n)). Definition uint64_from_uint8 (n : int8) : both int64 := ret_both (repr _ (unsigned n)). Definition uint32_from_uint8 (n : int8) : both int32 := ret_both (repr _ (unsigned n)). Definition uint16_from_uint8 (n : int8) : both int16 := ret_both (repr _ (unsigned n)). Definition usize_from_uint8 (n : int8) : both uint_size := ret_both (repr _ (unsigned n)). Definition uint128_from_uint16 (n : int16) : both int128 := ret_both (repr _ (unsigned n)). Definition uint64_from_uint16 (n : int16) : both int64 := ret_both (repr _ (unsigned n)). Definition uint32_from_uint16 (n : int16) : both int32 := ret_both (repr _ (unsigned n)). Definition uint8_from_uint16 (n : int16) : both int8 := ret_both (repr _ (unsigned n)). Definition usize_from_uint16 (n : int16) : both uint_size := ret_both (repr _ (unsigned n)). Definition uint128_from_uint32 (n : int32) : both int128 := ret_both (repr _ (unsigned n)). Definition uint64_from_uint32 (n : int32) : both int64 := ret_both (repr _ (unsigned n)). Definition uint16_from_uint32 (n : int32) : both int16 := ret_both (repr _ (unsigned n)). Definition uint8_from_uint32 (n : int32) : both int8 := ret_both (repr _ (unsigned n)). Definition usize_from_uint32 (n : int32) : both uint_size := ret_both (repr _ (unsigned n)). Definition uint128_from_uint64 (n : int64) : both int128 := ret_both (repr _ (unsigned n)). Definition uint32_from_uint64 (n : int64) : both int32 := ret_both (repr _ (unsigned n)). Definition uint16_from_uint64 (n : int64) : both int16 := ret_both (repr _ (unsigned n)). Definition uint8_from_uint64 (n : int64) : both int8 := ret_both (repr _ (unsigned n)). Definition usize_from_uint64 (n : int64) : both uint_size := ret_both (repr _ (unsigned n)). Definition uint64_from_uint128 (n : int128) : both int64 := ret_both (repr _ (unsigned n)). Definition uint32_from_uint128 (n : int128) : both int32 := ret_both (repr _ (unsigned n)). Definition uint16_from_uint128 (n : int128) : both int16 := ret_both (repr _ (unsigned n)). Definition uint8_from_uint128 (n : int128) : both int8 := ret_both (repr _ (unsigned n)). Definition usize_from_uint128 (n : int128) : both uint_size := ret_both (repr _ (unsigned n)). End TodoSection2. ================================================ FILE: hax-lib/proof-libs/coq/ssprove/src/Hacspec_Lib_Comparable.v ================================================ From Coq Require Import ZArith List. From Crypt Require Import Package. (************************************************) (* Implementation of comparision functions *) (************************************************) (* Typeclass handling of default elements, for use in sequences/arrays. We provide instances for the library integer types *) Class Default (A : Type) := { default : A }. Global Arguments default {_} {_}. Class EqDec (A : Type) := { eqb : A -> A -> bool ; eqb_leibniz : forall x y, is_true (eqb x y) <-> x = y }. Infix "=.?" := eqb (at level 40) : hacspec_scope. Infix "!=.?" := (fun a b => negb (eqb a b)) (at level 40) : hacspec_scope. Class Comparable (A : Type) := { ltb : A -> A -> bool; leb : A -> A -> bool; gtb : A -> A -> bool; geb : A -> A -> bool; }. Infix "<.?" := ltb (at level 42) : hacspec_scope. Infix "<=.?" := leb (at level 42) : hacspec_scope. Infix ">.?" := gtb (at level 42) : hacspec_scope. Infix ">=.?" := geb (at level 42) : hacspec_scope. Instance eq_dec_lt_Comparable {A : Type} `{EqDec A} (ltb : A -> A -> bool) : Comparable A := { ltb := ltb; leb a b := if eqb a b then true else ltb a b ; gtb a b := ltb b a; geb a b := if eqb a b then true else ltb b a; }. Instance eq_dec_le_Comparable {A : Type} `{EqDec A} (leb : A -> A -> bool) : Comparable A := { ltb a b := if eqb a b then false else leb a b; leb := leb ; gtb a b := if eqb a b then false else leb b a; geb a b := leb b a; }. Theorem eqb_refl : forall {A} {H : EqDec A} (x : A), (@eqb A H x x) = true. Proof. intros. now apply eqb_leibniz. Qed. Theorem eqbP : forall {A} {H : EqDec A} (x y : A), ssrbool.reflect (x = y) (@eqb A H x y). Proof. intros. apply Bool.iff_reflect. rewrite <- eqb_leibniz. reflexivity. Qed. Theorem neqb_leibniz : forall {A} {H : EqDec A} x y, eqb x y = false <-> x <> y . Proof. intros. rewrite (ssrbool.rwP ssrbool.negPf). rewrite <- (ssrbool.rwP (@ssrbool.negP (eqb x y))). apply not_iff_compat. apply eqb_leibniz. Qed. Global Program Instance nat_eqdec : EqDec nat := { eqb := Nat.eqb; eqb_leibniz := Nat.eqb_eq ; }. Global Instance nat_comparable : Comparable nat := { ltb := Nat.ltb; leb := Nat.leb; gtb a b := Nat.ltb b a; geb a b := Nat.leb b a; }. ================================================ FILE: hax-lib/proof-libs/coq/ssprove/src/Hacspec_Lib_Controlflow.v ================================================ Global Set Warnings "-ambiguous-paths". Global Set Warnings "-uniform-inheritance". Global Set Warnings "-auto-template". Global Set Warnings "-disj-pattern-notation". Global Set Warnings "-notation-overridden,-ambiguous-paths". Require Import Lia. Require Import Coq.Logic.FunctionalExtensionality. Require Import Sumbool. From mathcomp Require Import fintype. From Crypt Require Import choice_type Package Prelude. Import PackageNotation. From extructures Require Import ord fset fmap. From Coq Require Import ZArith List. Import List.ListNotations. Import choice.Choice.Exports. (********************************************************) (* Implementation of all Hacspec library functions *) (* for Both types. *) (********************************************************) Declare Scope hacspec_scope. From Hacspec Require Import ChoiceEquality. From Hacspec Require Import LocationUtility. From Hacspec Require Import Hacspec_Lib_Comparable. From Hacspec Require Import Hacspec_Lib_Pre. Open Scope bool_scope. Open Scope hacspec_scope. Open Scope nat_scope. Open Scope list_scope. From Hacspec Require Import Hacspec_Lib_Integers. From Hacspec Require Import Hacspec_Lib_Loops. From Hacspec Require Import Hacspec_Lib_Monad. From Hacspec Require Import Hacspec_Lib_Ltac. (* Handle products of size 1 - 4 for foldi_both' *) Notation "'ssp' ( 'fun' a => f )" := (((fun (a : both _) => f))) (at level 100, f at next level, a at next level). Notation "'ssp' ( 'fun' ' ( a , b ) => f )" := (fun (temp : both (_ × _)) => lift_n 1 temp (fun '(a, b) => f)) (at level 100, f at next level, a at next level, b at next level). Notation "'ssp' ( 'fun' ' ( a , b , c ) => f )" := (fun (temp : both (_ × _ × _)) => lift_n 2 temp (fun '(a, b, c) => f)) (at level 100, f at next level, a at next level, b at next level, c at next level). Notation "'ssp' ( 'fun' ' ( a , b , c , d ) => f )" := (fun (temp : both (_ × _ × _ × _)) => lift_n 3 temp (fun '(a, b, c, d) => f)) (at level 100, f at next level, a at next level, b at next level, c at next level, d at next level). (* eq_fset *) (* finmap.finSet *) (* https://coq.zulipchat.com/#narrow/stream/237977-Coq-users/topic/aac-tactics.2C.20fset.20automation.2C.20universes *) (* Display map / exponenetial maps *) Equations foldi_both {acc: choice_type} (lo_hi: both uint_size * both uint_size) (f: both uint_size -> both acc -> both acc) (init: both acc) : both (acc) := foldi_both lo_hi f init := foldi (fst lo_hi) (snd lo_hi) (@f) (init). Solve All Obligations with intros ; solve_fsubset_trans. Fail Next Obligation. Equations foldi_both_list {acc B: choice_type} (l : both (chList B)) (f: both B -> both acc -> both acc) (init: both acc) : both (acc) := foldi_both_list l f init := bind_both l (fun l' => List.fold_left (fun x y => solve_lift @f (solve_lift ret_both y) (x) : both _) l' (solve_lift init)). Solve All Obligations with intros ; solve_fsubset_trans. Solve All Obligations with intros ; solve_ssprove_obligations. Fail Next Obligation. Program Definition if_both {A} (c : both 'bool) (e_then : both A) (e_else : both A) : both A := bind_both c (fun b => if b then lift_both e_then else lift_both e_else). Solve All Obligations with solve_ssprove_obligations. Fail Next Obligation. Notation "'ifb' b 'then' et 'else' ee" := (if_both b et ee) (at level 100). Equations match_both_option {A B} (x : both (option A)) (fa : both A -> both B) (fb : both B) : both B := match_both_option x fa fb := bind_both x (fun y => match y with | Some a => solve_lift (fa (solve_lift (ret_both a))) | None => solve_lift fb end). Solve All Obligations with solve_ssprove_obligations. Fail Next Obligation. Notation "'matchb' x 'with' '|' 'Option_Some' a '=>' va '|' 'Option_None' '=>' vb 'end'" := (match_both_option x (fun a => va) vb). Notation "'matchb' x 'with' '|' 'Option_Some' a '=>' va '|' '_' '=>' vb 'end'" := (match_both_option x (fun a => va) vb). Program Definition foldi_both0_ {acc : choice_type} (fuel : nat) (i : both uint_size) (f: both (uint_size) -> both acc -> both (acc)) (cur : both acc) : both (acc) := foldi_ fuel i (@f) (lift_both cur). Solve All Obligations with (intros ; (fset_equality || solve_in_fset)). Fail Next Obligation. Equations foldi0 {acc: choice_type} (lo: both uint_size) (hi: both uint_size) (* {lo <= hi} *) (f: both (uint_size) -> both acc -> both (acc)) (* {i < hi} *) (init: both acc) : both (acc) := foldi0 lo hi f init := bind_both lo (fun lo => bind_both hi (fun hi => match Z.sub (unsigned hi) (unsigned lo) with | Z0 => lift_both init | Zneg p => lift_both init | Zpos p => foldi_both0_ (Pos.to_nat p) (solve_lift (ret_both lo)) (@f) init end)) . Solve All Obligations with (intros ; (fset_equality || solve_in_fset)). Fail Next Obligation. Definition foldi_both0 {acc: choice_type} (lo_hi: both uint_size * both uint_size) (f: both uint_size -> both acc -> both (acc)) (* {i < hi} *) (init: both acc) : both (acc) := foldi0 (fst lo_hi) (snd lo_hi) f init. Equations foldi_both0_list {acc B: choice_type} (l : both (chList B)) (f: both B -> both acc -> both (acc)) (* {i < hi} *) (init: both acc) : both (acc) := foldi_both0_list l f init := bind_both l (fun l' => List.fold_left (fun x y => solve_lift @f (solve_lift ret_both y) (x) : both _) l' (solve_lift init : both _)). Fail Next Obligation. Notation "'f_fold'" := (fun lo_hi init f => foldi_both_list lo_hi f init). Program Definition if_both0 {A} (c : both 'bool) (e_then : both A) (e_else : both A) : both A := bind_both c (fun b => if b then lift_both e_then else lift_both e_else). Solve All Obligations with solve_ssprove_obligations. Fail Next Obligation. Notation "'ifb0' b 'then' et 'else' ee" := (if_both0 b et ee) (at level 100). Notation "'letm[' bind_code_mnd ']' x ':=' y 'in' z" := (choice_typeMonad.monad_bind_both (BindCode := bind_code_mnd) y (fun x => z)) (at level 100, x pattern). Notation "'letm[' bind_code_mnd ']' ( x : t ) ':=' y 'in' z" := (choice_typeMonad.monad_bind_both (BindCode := bind_code_mnd) y (fun x => z)) (at level 100, x pattern). ================================================ FILE: hax-lib/proof-libs/coq/ssprove/src/Hacspec_Lib_Eq.v ================================================ Global Set Warnings "-ambiguous-paths". Global Set Warnings "-uniform-inheritance". Global Set Warnings "-auto-template". Global Set Warnings "-disj-pattern-notation". Global Set Warnings "-notation-overridden,-ambiguous-paths". Require Import Lia. Require Import Coq.Logic.FunctionalExtensionality. Require Import Sumbool. From mathcomp Require Import fintype. From Crypt Require Import choice_type Package Prelude. Import PackageNotation. From extructures Require Import ord fset fmap. From mathcomp Require Import ssrZ word. (* From Jasmin Require Import word. *) From Crypt Require Import jasmin_word. From Coq Require Import ZArith List. Import List.ListNotations. Import choice.Choice.Exports. (********************************************************) (* Implementation of all Hacspec library functions *) (* for Both types. *) (********************************************************) Declare Scope hacspec_scope. From Hacspec Require Import ChoiceEquality. From Hacspec Require Import LocationUtility. From Hacspec Require Import Hacspec_Lib_Comparable. From Hacspec Require Import Hacspec_Lib_Pre. Open Scope bool_scope. Open Scope hacspec_scope. Open Scope nat_scope. Open Scope list_scope. From Hacspec Require Import Hacspec_Lib_Natmod. (* Comparisons, boolean equality, and notation *) Global Instance int_eqdec `{WS : wsize}: EqDec (@int WS) := { eqb := eqtype.eq_op ; eqb_leibniz := int_eqb_eq ; }. Global Instance int_comparable `{WS : wsize} : Comparable (@int WS) := eq_dec_lt_Comparable (wlt Unsigned). Definition uint8_equal (x y : int8) : both 'bool := ret_both (eqb x y : 'bool). Theorem nat_mod_eqb_spec : forall {p} (a b : nat_mod p), is_pure (nat_mod_equal a b) = true <-> a = b. Proof. symmetry ; apply (ssrbool.rwP nat_mod_equal_reflect). Qed. Global Instance nat_mod_eqdec {p} : EqDec (nat_mod p) := { eqb a b := is_pure (nat_mod_equal a b); eqb_leibniz := nat_mod_eqb_spec; }. Definition nat_mod_rem {n : Z} (a:nat_mod n) (b:nat_mod n) : both (nat_mod n) := ret_both (nat_mod_rem a b). Infix "rem" := nat_mod_rem (at level 33) : hacspec_scope. Global Instance bool_eqdec : EqDec bool := { eqb := Bool.eqb; eqb_leibniz := Bool.eqb_true_iff; }. Global Instance string_eqdec : EqDec String.string := { eqb := String.eqb; eqb_leibniz := String.eqb_eq ; }. Fixpoint list_eqdec {A} `{EqDec A} (l1 l2 : list A) : bool := match l1, l2 with | x::xs, y::ys => if eqb x y then list_eqdec xs ys else false | [], [] => true | _,_ => false end. Lemma list_eqdec_refl : forall {A} `{EqDec A} (l1 : list A), list_eqdec l1 l1 = true. Proof. intros ; induction l1 ; cbn ; try rewrite eqb_refl ; easy. Qed. Lemma list_eqdec_sound : forall {A} `{EqDec A} (l1 l2 : list A), list_eqdec l1 l2 = true <-> l1 = l2. Proof. intros A H l1. induction l1 ; induction l2 ; split ; intros ; simpl in * ; try easy ; try inversion H0. - (* inductive case *) apply Field_theory.if_true in H0; destruct H0. f_equal. (* show heads are equal *) + apply (proj1 (eqb_leibniz a a0) H0). (* show tails are equal using induction hypothesis *) + apply IHl1. assumption. - rewrite eqb_refl. apply list_eqdec_refl. Qed. Global Instance List_eqdec {A} `{EqDec A} : EqDec (list A) := { eqb := list_eqdec; eqb_leibniz := list_eqdec_sound; }. Lemma vector_eqb_sound : forall {A : Type} {n : nat} `{EqDec A} (v1 v2 : VectorDef.t A n), Vector.eqb _ eqb v1 v2 = true <-> v1 = v2. Proof. intros. apply Vector.eqb_eq. intros. apply eqb_leibniz. Qed. Global Program Instance Vector_eqdec {A n} `{EqDec A}: EqDec (VectorDef.t A n) := { eqb := Vector.eqb _ eqb; eqb_leibniz := vector_eqb_sound; }. Global Program Instance Dec_eq_prod (A B : Type) `{EqDec A} `{EqDec B} : EqDec (A * B) := { eqb '(a0, b0) '(a1, b1) := andb (eqb a0 a1) (eqb b0 b1) }. Next Obligation. split ; intros ; destruct x ; destruct y. - (* symmetry in H1. *) (* apply Bool.andb_true_eq in H1. destruct H1. *) rewrite is_true_split_and in H1. destruct H1. rewrite (eqb_leibniz) in H1. rewrite (eqb_leibniz) in H2. now subst. - inversion_clear H1. now do 2 rewrite eqb_refl. Defined. Fixpoint array_eq_ {a: choice_type} {len: nat} (eq: ( (a)) -> ( (a)) -> bool) (s1: ( (nseq_ a len))) (s2 : ( (nseq_ a len))) {struct len} : bool. Proof. destruct len ; cbn in *. - exact true. - destruct (getm s1 (fintype.Ordinal (m := len) (ssrnat.ltnSn _))) as [s | ]. + destruct (getm s2 (fintype.Ordinal (m := len) (ssrnat.ltnSn _))) as [s0 | ]. * exact (eq s s0). * exact false. + exact false. Defined. Infix "array_xor" := (@array_join_map (int _) _ _ _ _ _ (fun _ _ _ _ => int_xor)) (at level 33) : hacspec_scope. Infix "array_add" := (@array_join_map (int _) _ _ _ _ _ (fun _ _ _ _ => int_add)) (at level 33) : hacspec_scope. Infix "array_minus" := (@array_join_map (int _) _ _ _ _ _ (fun _ _ _ _ => int_sub)) (at level 33) : hacspec_scope. Infix "array_mul" := (@array_join_map (int _) _ _ _ _ _ (fun _ _ _ _ => int_mul)) (at level 33) : hacspec_scope. Infix "array_div" := (@array_join_map (int _) _ _ _ _ _ (fun _ _ _ _ => int_div)) (at level 33) : hacspec_scope. Infix "array_or" := (@array_join_map (int _) _ _ _ _ _ (fun _ _ _ _ => int_or)) (at level 33) : hacspec_scope. Infix "array_and" := (@array_join_map (int _) _ _ _ _ _ (fun _ _ _ _ => int_and)) (at level 33) : hacspec_scope. Infix "array_eq" := (array_eq_ eq) (at level 33) : hacspec_scope. Infix "array_neq" := (fun s1 s2 => negb (array_eq_ eq s1 s2)) (at level 33) : hacspec_scope. ================================================ FILE: hax-lib/proof-libs/coq/ssprove/src/Hacspec_Lib_Integers.v ================================================ Global Set Warnings "-ambiguous-paths". Global Set Warnings "-uniform-inheritance". Global Set Warnings "-auto-template". Global Set Warnings "-disj-pattern-notation". Global Set Warnings "-notation-overridden,-ambiguous-paths". Require Import Lia. Require Import Coq.Logic.FunctionalExtensionality. Require Import Sumbool. From mathcomp Require Import fintype. From Crypt Require Import choice_type Package Prelude. Import PackageNotation. From extructures Require Import ord fset fmap. From Coq Require Import ZArith List. Import List.ListNotations. (*** Integers *) Declare Scope hacspec_scope. From Hacspec Require Import ChoiceEquality. From Hacspec Require Import LocationUtility. From Hacspec Require Import Hacspec_Lib_Comparable. From Hacspec Require Import Hacspec_Lib_Pre. Open Scope bool_scope. Open Scope hacspec_scope. Open Scope nat_scope. Open Scope list_scope. Import choice.Choice.Exports. Equations int_add {WS} (x : both (int WS)) (y : both (int WS)) : both (int WS) := int_add := lift2_both (Hacspec_Lib_Pre.int_add). Fail Next Obligation. Equations int_sub {WS} (x : both (int WS)) (y : both (int WS)) : both (int WS) := int_sub := (lift2_both (Hacspec_Lib_Pre.int_sub)). Fail Next Obligation. Equations int_opp {WS} (x : both (int WS)) : both (int WS) := int_opp := (lift1_both (Hacspec_Lib_Pre.int_opp)). Fail Next Obligation. Equations int_mul {WS} (x : both (int WS)) (y : both (int WS)) : both (int WS) := int_mul := (lift2_both (Hacspec_Lib_Pre.int_mul)). Fail Next Obligation. Equations int_div {WS} (x : both (int WS)) (y : both (int WS)) : both (int WS) := int_div := (lift2_both (Hacspec_Lib_Pre.int_div : int _ -> int _ -> int _)). Fail Next Obligation. Equations int_mod {WS} (x : both (int WS)) (y : both (int WS)) : both (int WS) := int_mod := (lift2_both (Hacspec_Lib_Pre.int_mod : int _ -> int _ -> int _)). Fail Next Obligation. Equations int_xor {WS} (x : both (int WS)) (y : both (int WS)) : both (int WS) := int_xor := (lift2_both (Hacspec_Lib_Pre.int_xor : int _ -> int _ -> int _)). Fail Next Obligation. Equations int_and {WS} (x : both (int WS)) (y : both (int WS)) : both (int WS) := int_and := (lift2_both (Hacspec_Lib_Pre.int_and : int _ -> int _ -> int _)). Fail Next Obligation. Equations int_or {WS} (x : both (int WS)) (y : both (int WS)) : both (int WS) := int_or := (lift2_both (Hacspec_Lib_Pre.int_or : int _ -> int _ -> int _)). Fail Next Obligation. Equations int_not {WS} (x : both (int WS)) : both (int WS) := int_not := (lift1_both (Hacspec_Lib_Pre.int_not : int _ -> int _)). Fail Next Obligation. Equations cast_int {WS1 WS2} (x : both (int WS1)) : both (int WS2) := cast_int := (lift1_both (fun (n : int _) => repr _ (unsigned n))). Fail Next Obligation. (* End IntType. *) Notation secret := (lift1_both secret). Infix ".%%" := int_modi (at level 40, left associativity) : Z_scope. Infix ".+" := int_add (at level 77) : hacspec_scope. Infix ".-" := int_sub (at level 77) : hacspec_scope. Notation "-" := int_opp (at level 77) : hacspec_scope. Infix ".*" := int_mul (at level 77) : hacspec_scope. Infix "./" := int_div (at level 77) : hacspec_scope. Infix ".%" := int_mod (at level 77) : hacspec_scope. Infix ".^" := int_xor (at level 77) : hacspec_scope. Infix ".&" := int_and (at level 77) : hacspec_scope. Infix ".|" := int_or (at level 77) : hacspec_scope. Notation "'not'" := int_not (at level 77) : hacspec_scope. (* Section Uint. *) Notation uint8_declassify := (lift1_both uint8_declassify). Notation int8_declassify := (lift1_both int8_declassify). Notation uint16_declassify := (lift1_both uint16_declassify). Notation int16_declassify := (lift1_both int16_declassify). Notation uint32_declassify := (lift1_both uint32_declassify). Notation int32_declassify := (lift1_both int32_declassify). Notation uint64_declassify := (lift1_both uint64_declassify). Notation int64_declassify := (lift1_both int64_declassify). Notation uint128_declassify := (lift1_both uint128_declassify). Notation int128_declassify := (lift1_both int128_declassify). Notation uint8_classify := (lift1_both uint8_classify). Notation int8_classify := (lift1_both int8_classify). Notation uint16_classify := (lift1_both uint16_classify). Notation int16_classify := (lift1_both int16_classify). Notation uint32_classify := (lift1_both uint32_classify). Notation int32_classify := (lift1_both int32_classify). Notation uint64_classify := (lift1_both uint64_classify). Notation int64_classify := (lift1_both int64_classify). Notation uint128_classify := (lift1_both uint128_classify). Notation int128_classify := (lift1_both int128_classify). (* CompCert integers' signedness is only interpreted through 'signed' and 'unsigned', and not in the representation. Therefore, uints are just names for their respective ints. *) Notation declassify_usize_from_uint8 := (lift1_both declassify_usize_from_uint8). Notation declassify_u32_from_uint32 := (lift1_both declassify_u32_from_uint32). Notation uint8_rotate_left := (lift2_both uint8_rotate_left). Notation uint8_rotate_right := (lift2_both uint8_rotate_right). Notation uint16_rotate_left := (lift2_both uint16_rotate_left). Notation uint16_rotate_right := (lift2_both uint16_rotate_right). Notation uint32_rotate_left := (lift2_both uint32_rotate_left). Notation uint32_rotate_right := (lift2_both uint32_rotate_right). Notation uint64_rotate_left := (lift2_both uint64_rotate_left). Notation uint64_rotate_right := (lift2_both uint64_rotate_right). Notation uint128_rotate_left := (lift2_both uint128_rotate_left). Notation uint128_rotate_right := (lift2_both uint128_rotate_right). Notation usize_shift_right_ := (lift2_both (fun u s => u usize_shift_right s)). Notation usize_shift_left_ := (fun (u: both (fset []) ([interface]) uint_size) (s: both (fset []) ([interface]) int32) => {| is_pure := (is_pure u) usize_shift_left (is_pure s) ; is_state := {code temp_u ← is_state u ;; temp_s ← is_state s ;; ret (temp_u usize_shift_left temp_s) } |}). (**** Operations *) Notation shift_left_ := (lift2_both shift_left_). Notation shift_right_ := (lift2_both shift_right_). (* End Uint. *) Infix "usize_shift_right" := (usize_shift_right_) (at level 77) : hacspec_scope. Infix "usize_shift_left" := (usize_shift_left_) (at level 77) : hacspec_scope. Infix "shift_left" := (shift_left_) (at level 77) : hacspec_scope. Infix "shift_right" := (shift_right_) (at level 77) : hacspec_scope. ================================================ FILE: hax-lib/proof-libs/coq/ssprove/src/Hacspec_Lib_Loops.v ================================================ Global Set Warnings "-ambiguous-paths". Global Set Warnings "-uniform-inheritance". Global Set Warnings "-auto-template". Global Set Warnings "-disj-pattern-notation". Global Set Warnings "-notation-overridden,-ambiguous-paths". Require Import Lia. Require Import Coq.Logic.FunctionalExtensionality. Require Import Sumbool. From mathcomp Require Import fintype. From Crypt Require Import choice_type Package Prelude. Import PackageNotation. From extructures Require Import ord fset fmap. From Coq Require Import ZArith List. Import List.ListNotations. Import choice.Choice.Exports. (********************************************************) (* Implementation of all Hacspec library functions *) (* for Both types. *) (********************************************************) Declare Scope hacspec_scope. From Hacspec Require Import ChoiceEquality. From Hacspec Require Import LocationUtility. From Hacspec Require Import Hacspec_Lib_Comparable. From Hacspec Require Import Hacspec_Lib_Pre. Open Scope bool_scope. Open Scope hacspec_scope. Open Scope nat_scope. Open Scope list_scope. From Hacspec Require Import Hacspec_Lib_Integers. (*** Loops *) Section Loops. Program Fixpoint foldi_ {acc : choice_type} (fuel : nat) (i : both uint_size) (f: both uint_size -> both acc -> both (acc)) (cur : both acc) {struct fuel} : both (acc) := match fuel with | 0 => lift_both cur | S n' => foldi_ n' (int_add i (ret_both one)) f (f i cur) end. Solve All Obligations with (intros ; (fset_equality || solve_in_fset)). Fail Next Obligation. Equations foldi_both_ {acc : choice_type} (fuel : nat) (i : both uint_size) (f: both (uint_size) -> both acc -> both (acc)) (cur : both acc) : both (acc) := foldi_both_ fuel i f cur := match fuel with | 0 => lift_both cur | S n' => solve_lift foldi_both_ n' (int_add i (ret_both one)) (fun x y => solve_lift f (solve_lift x) y) (f i (solve_lift cur)) end. Solve All Obligations with (intros ; (fset_equality || solve_in_fset)). Fail Next Obligation. Equations foldi {acc: choice_type} (lo: both uint_size) (hi: both uint_size) (* {lo <= hi} *) (f: both (uint_size) -> both acc -> both (acc)) (* {i < hi} *) (init: both acc) : both (acc) := foldi lo hi f init := bind_both lo (fun lo => bind_both hi (fun hi => match Z.sub (unsigned hi) (unsigned lo) with | Z0 => lift_both init | Zneg p => lift_both init | Zpos p => foldi_both_ (Pos.to_nat p) (solve_lift (ret_both lo)) (@f) init end)) . Solve All Obligations with (intros ; (fset_equality || solve_in_fset)). Fail Next Obligation. (* Fold done using natural numbers for bounds *) Fixpoint foldi_nat_ {acc : choice_type} (fuel : nat) (i : nat) (f : nat -> chElement acc -> raw_code (acc)) (cur : acc) : raw_code (acc) := match fuel with | O => ret (cur) | S n' => cur' ← f i cur ;; foldi_nat_ n' (S i) f (cur') end. Definition foldi_nat {acc: choice_type} (lo: nat) (hi: nat) (* {lo <= hi} *) (f: nat -> acc -> raw_code (acc)) (* {i < hi} *) (init: acc) : raw_code (acc) := match Nat.sub hi lo with | O => ret (init) | S n' => foldi_nat_ (S n') lo f init end. Lemma foldi__nat_move_S : forall {acc: choice_type} (fuel : nat) (i : nat) (f : nat -> acc -> raw_code (acc)) (cur : acc), (cur' ← f i cur ;; foldi_nat_ fuel (S i) f (cur')) = foldi_nat_ (S fuel) i f cur. Proof. reflexivity. Qed. Lemma foldi__nat_move_S_append : forall {acc: choice_type} (fuel : nat) (i : nat) (f : nat -> acc -> raw_code (acc)) (cur : acc), (cur' ← foldi_nat_ fuel i f (cur) ;; f (i + fuel) (cur')) = foldi_nat_ (S fuel) i f cur. Proof. induction fuel ; intros. - rewrite <- foldi__nat_move_S. unfold foldi_nat_. replace (fun cur' => @ret acc ((cur'))) with (fun cur' => @ret acc cur'). 2: { apply functional_extensionality. reflexivity. } rewrite bind_ret. unfold bind at 1. rewrite Nat.add_0_r. reflexivity. - rewrite <- foldi__nat_move_S. rewrite <- foldi__nat_move_S. rewrite bind_assoc. f_equal. apply functional_extensionality. intros. replace (i + S fuel) with (S i + fuel) by lia. rewrite IHfuel. reflexivity. Qed. Lemma foldi__nat_move_to_function : forall {acc: choice_type} (fuel : nat) (i : nat) (f : nat -> acc -> raw_code (acc)) (cur : acc), foldi_nat_ fuel i (fun x => f (S x)) (cur) = foldi_nat_ fuel (S i) f cur. Proof. induction fuel ; intros. - reflexivity. - cbn. f_equal. apply functional_extensionality. intros. rewrite IHfuel. reflexivity. Qed. Lemma foldi__nat_move_to_function_add : forall {acc: choice_type} (fuel : nat) (i j : nat) (f : nat -> acc -> raw_code (acc)) (cur : acc), foldi_nat_ fuel i (fun x => f (x + j)) (cur) = foldi_nat_ fuel (i + j) f cur. Proof. intros acc fuel i j. generalize dependent i. induction j ; intros. - rewrite Nat.add_0_r. replace (fun x : nat => f (x + 0)) with f. reflexivity. apply functional_extensionality. intros. now rewrite Nat.add_0_r. - replace (i + S j) with (S i + j) by lia. rewrite <- IHj. rewrite <- foldi__nat_move_to_function. f_equal. apply functional_extensionality. intros. f_equal. lia. Qed. Lemma bind_raw_both_ret : forall {A B : choice_type} (x : A) (f : A -> both B), bind_raw_both (both_ret x) f = f x. Proof. intros. unfold bind_raw_both. simpl. destruct (f x). destruct both_prog. simpl. reflexivity. Qed. Lemma bind_raw_both_assoc : forall {A B C : choice_type} (v : raw_both A) (k1 : A -> raw_both B) (k2 : B -> raw_both C), (bind_raw_both (bind_raw_both v k1) k2 = (bind_raw_both v (fun x => bind_raw_both (k1 x) k2))). Proof. intros. unfold bind_raw_both. simpl. rewrite bind_assoc. reflexivity. Qed. Lemma valid_remove_back : forall x (xs : {fset Location}) I {ct} c, ValidCode (fset xs) I c -> @ValidCode (fset (xs ++ [x])) I ct c. Proof. intros. apply (valid_injectLocations) with (L1 := fset xs). - rewrite fset_cat. apply fsubsetUl. - apply H. Qed. Lemma list_constructor : forall {A : Type} (x : A) (xs : list A) (l : list A) (H : (x :: xs) = l), (l <> []). Proof. intros. subst. easy. Qed. Definition pop_back {A : Type} (l : list A) := match (rev l) with | [] => [] | (x :: xs) => rev xs ++ [x] end. Theorem pop_back_ignore_front : forall {A} (a : A) (l : list A), pop_back (a :: l) = a :: pop_back l. Proof. intros. induction l ; intros. - reflexivity. - unfold pop_back. destruct (rev (a :: a0 :: l)) eqn:orev. { apply f_equal with (f := @rev A) in orev. rewrite (rev_involutive) in orev. discriminate orev. } cbn in orev. destruct (rev (a0 :: l)) eqn:orev2. { apply f_equal with (f := @rev A) in orev2. rewrite (rev_involutive) in orev2. discriminate orev2. } cbn in orev2. rewrite orev2 in orev ; clear orev2. inversion_clear orev. rewrite rev_unit. reflexivity. Qed. Theorem pop_back_is_id : forall {A} (l : list A), l = pop_back l. Proof. intros. induction l. - reflexivity. - destruct l. + reflexivity. + rewrite pop_back_ignore_front. rewrite <- IHl. reflexivity. Qed. Ltac valid_remove_back' := match goal with | _ : _ |- (ValidCode (fset (?l)) _ _) => rewrite (@pop_back_is_id _ l) end ; apply valid_remove_back. Lemma valid_remove_front : forall x xs I {ct} c, ValidCode (fset xs) I c -> @ValidCode (fset (x :: xs)) I ct c. Proof. intros. apply (@valid_injectLocations) with (L1 := fset xs). - replace (x :: xs) with (seq.cat [x] xs) by reflexivity. rewrite fset_cat. apply fsubsetUr. - apply H. Qed. Theorem for_loop_unfold : forall c n, for_loop (fun m : nat => c m) (S n) = (c 0 ;; for_loop (fun m : nat => c (S m)) (n) ). cbn. induction n ; intros. - reflexivity. - unfold for_loop ; fold for_loop. cbn. rewrite IHn. rewrite bind_assoc. reflexivity. Qed. End Loops. (*** For loop again *) (* SSProve for loop is inclusive upperbound, while hacspec is exclusive upperbound *) Definition for_loop_range (lo: nat) (hi: nat) (f : nat -> raw_code 'unit) : raw_code 'unit := match hi - lo with | O => @ret 'unit tt | S i => for_loop (fun n => f (n + lo)) i end. Fixpoint list_types_ (l : list choice_type) (init : choice_type) : choice_type := match l with | (t :: ts) => list_types_ ts t × init | [] => init end. Definition list_types (l : list choice_type) : choice_type := match l with | [] => 'unit | (t :: ts) => list_types_ ts t end. Program Fixpoint vars_to_tuple (vars : list (∑ (t : choice_type), t)) {measure (length vars)} : list_types (seq.map (fun '(x ; y) => x) vars) := match vars with | [] => tt | [x] => _ | (x :: s :: xs) => (vars_to_tuple (s :: xs) , _) end. Fixpoint for_loop_return_ (ℓ : list Location) (vars : list (∑ (t : choice_type), t)) : raw_code (list_types (seq.cat (seq.map (fun '(x ; y) => x) vars) (seq.map (fun '(x ; y) => x) ℓ) )). destruct ℓ as [ | l ls ]. - rewrite seq.cats0. pose (ret (vars_to_tuple vars)). replace (fun pat : ∑ t : choice_type, t => _) with (fun pat : @sigT choice_type (fun t : choice_type => t) => match pat return choice_type with | @existT _ _ x _ => x end) in r by (apply functional_extensionality ; now intros []). apply r. - apply (getr (l)). intros x. destruct l. cbn in x. pose (for_loop_return_ ls (vars ++ [(x0 ; x)])). rewrite seq.map_cat in r. cbn in r. rewrite <- seq.catA in r. cbn in r. apply r. Defined. Definition for_loop_return (ℓ : list Location) : raw_code (list_types (seq.map (fun '(x ; y) => x) ℓ)) := for_loop_return_ ℓ []. Definition for_loop_locations (lo: nat) (hi: nat) (ℓ : list Location) (f : nat -> raw_code 'unit) := match hi - lo with | O => @ret 'unit tt | S i => for_loop (fun n => f (n + lo)) i end ;; for_loop_return ℓ. Theorem empty_put {B} ℓ v (k h : raw_code B) : ⊢ ⦃ true_precond ⦄ k ≈ h ⦃ pre_to_post true_precond ⦄ -> ⊢ ⦃ true_precond ⦄ #put ℓ := v ;; k ≈ h ⦃ pre_to_post true_precond ⦄. Proof. intros. apply better_r_put_lhs. eapply rpre_weaken_rule. apply H. intros. reflexivity. Qed. Theorem length_merge_sort_pop : forall {A} leb (l1 : list (list A)) (l2 : list A), length (path.merge_sort_pop leb l2 l1) = length (seq.cat (seq.flatten l1) l2). Proof. intros. generalize dependent l2. induction l1 ; intros. - cbn. reflexivity. - cbn. rewrite IHl1. rewrite seq.size_cat. rewrite seq.size_cat. rewrite seq.size_cat. rewrite path.size_merge. rewrite seq.size_cat. rewrite ssrnat.addnA. f_equal. rewrite ssrnat.addnC. reflexivity. Qed. Theorem length_sort_even : forall {A} leb a x (l1 : list (list A)) (l2 : list A), length (path.merge_sort_rec leb l1 (a :: x :: l2)) = length (path.merge_sort_rec leb (path.merge_sort_push leb (if leb a x then [a; x] else [x; a]) l1) l2). Proof. reflexivity. Qed. Theorem length_sort_is_length' : forall {A} leb (l1 : list (list A)), length (path.merge_sort_rec leb l1 []) = length (seq.flatten l1). Proof. destruct l1. + cbn. reflexivity. + cbn. rewrite length_merge_sort_pop. rewrite seq.size_cat. rewrite seq.size_cat. rewrite path.size_merge. rewrite seq.cats0. rewrite ssrnat.addnC. reflexivity. Qed. ================================================ FILE: hax-lib/proof-libs/coq/ssprove/src/Hacspec_Lib_Ltac.v ================================================ Global Set Warnings "-ambiguous-paths". Global Set Warnings "-uniform-inheritance". Global Set Warnings "-auto-template". Global Set Warnings "-disj-pattern-notation". Global Set Warnings "-notation-overridden,-ambiguous-paths". Require Import Lia. Require Import Coq.Logic.FunctionalExtensionality. Require Import Sumbool. From mathcomp Require Import fintype. From Crypt Require Import choice_type Package Prelude. Import PackageNotation. From extructures Require Import ord fset fmap. From mathcomp Require Import ssrZ word. (* From Jasmin Require Import word. *) From Crypt Require Import jasmin_word. From Coq Require Import ZArith List. Import List.ListNotations. Import choice.Choice.Exports. (********************************************************) (* Implementation of all Hacspec library functions *) (* for Both types. *) (********************************************************) Declare Scope hacspec_scope. From Hacspec Require Import ChoiceEquality. From Hacspec Require Import LocationUtility. From Hacspec Require Import Hacspec_Lib_Comparable. From Hacspec Require Import Hacspec_Lib_Pre. Open Scope bool_scope. Open Scope hacspec_scope. Open Scope nat_scope. Open Scope list_scope. Ltac pattern_foldi_both Hx Hf Hg := match goal with | [ |- context [ ⊢ ⦃ _ ⦄ bind _ (foldi _ _ _ ?fb) ≈ ?os ⦃ _ ⦄ ] ] => let H := fresh in set (H := os) ; set (Hx := Hacspec_Lib_Pre.foldi _ _ _ _) in H ; pattern Hx in H ; subst H ; set (Hf := fb) ; match goal with | [ |- context [ ⊢ ⦃ _ ⦄ _ ≈ ?gb _ ⦃ _ ⦄ ] ] => set (Hg := gb) end | [ |- context [ ⊢ ⦃ _ ⦄ prog (foldi _ _ _ ?fb) ≈ ?os ⦃ _ ⦄ ] ] => let H := fresh in set (H := os) ; set (Hx := Hacspec_Lib_Pre.foldi _ _ _ _) in H ; pattern Hx in H ; subst H ; set (Hf := fb) ; match goal with | [ |- context [ ⊢ ⦃ _ ⦄ _ ≈ ?gb _ ⦃ _ ⦄ ] ] => set (Hg := gb) end end. Ltac pattern_foldi_both_fresh := let Hx := fresh in let Hf := fresh in let Hg := fresh in pattern_foldi_both Hx Hf Hg. Theorem r_bind_trans_as_both : forall {B C : choice_type} {L I} (f : choice.Choice.sort B -> raw_code C) (g : B -> raw_code C) (state : code L I (B)) (pure : B), forall (P : precond) (Q : postcond _ _), (⊢ ⦃ true_precond ⦄ state ≈ lift_to_code (L := L) (I := I) (pure) ⦃ pre_to_post_ret true_precond (pure) ⦄) -> (⊢ ⦃ true_precond ⦄ f (pure) ≈ g pure ⦃ Q ⦄) -> (⊢ ⦃ P ⦄ temp ← state ;; f temp ≈ g (pure) ⦃ Q ⦄). Proof. intros. eapply r_bind_trans with (P_mid := true_precond). eapply rpre_weaken_rule. apply H. reflexivity. intros. apply H0. Qed. Ltac progress_step_code := match_foldi_both || (match_bind_trans_both) || match goal with | [ |- context [ ⊢ ⦃ _ ⦄ (#put ?l := ?x ;; (getr ?l ?a)) ≈ _ ⦃ _ ⦄ ]] => apply better_r_put_get_lhs end || match goal with | [ |- context [ ⊢ ⦃ _ ⦄ (#put ?l := ?x ;; (putr ?l ?y ?a)) ≈ _ ⦃ _ ⦄ ]] => apply (r_transL (#put l := y ;; a )) ; [ apply contract_put | ] end || match goal with | [ |- context [ ⊢ ⦃ _ ⦄ (#put ?l := ?x ;; ?a) ≈ ?b ⦃ _ ⦄ ]] => apply (better_r_put_lhs l x a b) end || (unfold lift_to_code ; apply r_ret) || (rewrite bind_assoc) with match_foldi_both := let Hx := fresh in let Hf := fresh in let Hg := fresh in pattern_foldi_both Hx Hf Hg ; try (apply (@r_bind_trans_as_both) with (f := Hf) (g := Hg)) ; intros ; subst Hf ; subst Hg ; subst Hx ; hnf (* ; [apply foldi_as_both ; [ try (cbn ; Lia.lia) | intros ; unfold lift_to_code ; unfold prog ] | step_code] *) with step_code := repeat (clear_bind || progress_step_code) ; try easy with clear_bind := (unfold lift_to_code ; match goal with | [ |- context [ bind ?y (fun x => ret (_)) ] ] => let H := fresh in set (H := y) ; rewrite bind_ret ; subst H | [ |- context [ bind ?y (fun x => ret _) ] ] => let H := fresh in set (H := y) ; rewrite bind_ret ; subst H end) || (repeat (rewrite bind_assoc) ; match goal with | [ |- context [ bind (ret (?y)) (fun x => _) ] ] => let H := fresh in set (H := y) ; rewrite bind_rewrite ; subst H | [ |- context [ bind (ret ?y) (fun x => _) ] ] => let H := fresh in set (H := y) ; rewrite bind_rewrite ; subst H end). Ltac init_both_proof b_state b_pure := intros ; unfold lift_to_code ; cbv delta [b_state] ; cbn beta ; let H := fresh in match goal with | [ |- context [(prog {code ?x})] ] => set (H := x) end ; unfold prog ; cbv delta [b_pure] ; subst H ; cbn beta. Ltac f_equal_fun_ext := repeat (apply f_equal ; try (apply Coq.Logic.FunctionalExtensionality.functional_extensionality ; intros)). Ltac ssprove_valid_step := (progress ( cbv zeta || unfold prog || (match goal with | [ |- context[ @bind ?A ?B (ret ?x) ?f ]] => rewrite bind_rewrite end) || match goal with | [ |- context[match ?x with | true => _ | false => _ end] ] => destruct x end || match goal with | [ |- context[match ?x with | tt => _ end] ] => destruct x end || match goal with | [ |- context[match ?x with | inl _ => _ | inr _ => _ end] ] => destruct x end || (match goal with | [ |- context[bind (bind ?v ?k1) ?k2] ] => rewrite bind_assoc end) || (apply valid_bind ; [apply valid_scheme ; try rewrite <- fset.fset0E ; apply prog_valid | intros]) || (apply valid_bind ; [valid_program | intros]) || (apply valid_bind ; [repeat ssprove_valid_step | intros]) || (apply valid_opr ; [ (* ssprove_valid_opsig *) | intros ] ) || match goal with | [ |- context [ putr _ _ _ ] ] => (apply valid_putr ; [ (* ssprove_valid_location *) | ]) end || match goal with | [ |- context [ getr _ _ ] ] => (apply valid_getr ; [ (* ssprove_valid_location *) | intros]) end || (match goal with | [ |- context [ValidCode (fset ?ys) _ (@prog _ _ _ (@foldi _ ?lo ?hi (fset ?xs) _ ?f ?v))] ] => simpl (* !! TODO !! *) (* eapply (valid_subset_fset xs ys) ; [ | apply foldi ] *) (* ; loc_incl_compute *) end) || apply valid_ret || (hnf in * ; destruct_choice_type_prod) )). Ltac ssprove_valid'_2 := repeat ssprove_valid_step ; ssprove_valid_program (* ; try ssprove_valid_location *). Ltac ssprove_valid_package := (repeat apply valid_package_cons ; [ apply valid_empty_package | .. | try (rewrite <- fset0E ; setoid_rewrite @imfset0 ; rewrite in_fset0 ; reflexivity) ] ; intros ; progress unfold prog). Ltac solve_zero := match goal with | [ |- context [ (_ <= _)%Z ] ] => cbn ; match goal with | [ |- context [ (0 <= toword ?x)%Z ] ] => let H := fresh in let H_zero := fresh in let H_succ := fresh in set (H := x) ; destruct_uint_size_as_nat_named H H_zero H_succ ; [ reflexivity | cbn in H_succ ; cbn ; try rewrite H_succ ; Lia.lia ] end end. Ltac solve_in_mem := normalize_fset ; repeat (rewrite (@in_fsetU loc_ordType) ; rewrite (is_true_split_or_)) ; try rewrite <- !fset1E ; try rewrite (ssrbool.introT (fset1P _ _) eq_refl) ; repeat (reflexivity || (left ; reflexivity) || right). Ltac solve_ssprove_obligations := repeat ( intros ; autounfold ; normalize_fset ; (now solve_in_mem) (* TODO: add match goal *) || (now fset_equality) (* TODO: add match goal *) || (now solve_in_fset) (* TODO: add match goal *) || (ssprove_valid'_2) || ((now (* try *) (Tactics.program_simpl; fail)))). Ltac solve_fsubset_trans := now (solve_match || (refine (fsubset_trans _ _) ; [ | eassumption ] ; solve_ssprove_obligations)). Ltac solve_foldi_fsubset_trans := normalize_fset ; repeat (rewrite is_true_split_and || rewrite fsubUset) ; repeat (try rewrite andb_true_intro ; split) ; repeat (solve_fsubset_trans || apply fsubsetU ; rewrite is_true_split_or ; ((left ; solve_fsubset_trans) || right)). ================================================ FILE: hax-lib/proof-libs/coq/ssprove/src/Hacspec_Lib_Monad.v ================================================ Global Set Warnings "-ambiguous-paths". Global Set Warnings "-uniform-inheritance". Global Set Warnings "-auto-template". Global Set Warnings "-disj-pattern-notation". Global Set Warnings "-notation-overridden,-ambiguous-paths". Require Import Lia. Require Import Coq.Logic.FunctionalExtensionality. Require Import Sumbool. From mathcomp Require Import fintype. From Crypt Require Import choice_type Package Prelude. Import PackageNotation. From extructures Require Import ord fset fmap. From Coq Require Import ZArith List. Import List.ListNotations. Import choice.Choice.Exports. (********************************************************) (* Implementation of all Hacspec library functions *) (* for Both types. *) (********************************************************) Declare Scope hacspec_scope. From Hacspec Require Import ChoiceEquality. From Hacspec Require Import LocationUtility. From Hacspec Require Import Hacspec_Lib_Comparable. From Hacspec Require Import Hacspec_Lib_Pre. Open Scope bool_scope. Open Scope hacspec_scope. Open Scope nat_scope. Open Scope list_scope. (*** Monad / Bind *) Definition result_unwrap {a b} (x : result b a) : both (a) := ret_both (result_unwrap x). Definition result_unwrap_safe {a b} (x : result b a) `{match x with inl _ => True | inr _ => False end} : both (a) := ret_both (result_unwrap_safe x (H := H)). Module choice_typeMonad. Class BindCode := { mnd :> choice_typeMonad.CEMonad ; monad_bind_both {A B : choice_type} (x : both (choice_typeMonad.M (CEMonad := mnd) A)) (f : both A -> both (choice_typeMonad.M (CEMonad := mnd) B)) : both (choice_typeMonad.M (CEMonad := mnd) B) ; }. #[global] Program Instance result_bind_code C : BindCode := {| mnd := (@choice_typeMonad.result_monad C) ; monad_bind_both _ _ x f := bind_both x (fun x => match x with | inl s => f (solve_lift ret_both s) | inr s => solve_lift ret_both (Err s) end) |}. Solve All Obligations with (intros ; (fset_equality || solve_in_fset)). Fail Next Obligation. #[global] Program Instance option_bind_code : BindCode := {| mnd := choice_typeMonad.option_monad; monad_bind_both A B x f := bind_both x (fun t_x => match t_x with | Some s => f (solve_lift ret_both s) | None => solve_lift ret_both (@None B : option B) end) |}. Solve All Obligations with (intros ; (fset_equality || solve_in_fset)). Fail Next Obligation. End choice_typeMonad. ================================================ FILE: hax-lib/proof-libs/coq/ssprove/src/Hacspec_Lib_Natmod.v ================================================ Global Set Warnings "-ambiguous-paths". Global Set Warnings "-uniform-inheritance". Global Set Warnings "-auto-template". Global Set Warnings "-disj-pattern-notation". Global Set Warnings "-notation-overridden,-ambiguous-paths". Require Import Lia. Require Import Coq.Logic.FunctionalExtensionality. Require Import Sumbool. From mathcomp Require Import fintype. From Crypt Require Import choice_type Package Prelude. Import PackageNotation. From extructures Require Import ord fset fmap. From Coq Require Import ZArith List. Import List.ListNotations. Import choice.Choice.Exports. (********************************************************) (* Implementation of all Hacspec library functions *) (* for Both types. *) (********************************************************) Declare Scope hacspec_scope. From Hacspec Require Import ChoiceEquality. From Hacspec Require Import LocationUtility. From Hacspec Require Import Hacspec_Lib_Comparable. From Hacspec Require Import Hacspec_Lib_Pre. Open Scope bool_scope. Open Scope hacspec_scope. Open Scope nat_scope. Open Scope list_scope. (*** Nats *) Section Todosection. Definition nat_mod_equal {p} (a b : nat_mod p) : both 'bool := ret_both (@eqtype.eq_op (fintype_ordinal__canonical__eqtype_Equality (S (Init.Nat.pred (Z.to_nat p)))) a b : 'bool). Definition nat_mod_equal_reflect {p} {a b} : Bool.reflect (a = b) (is_pure (@nat_mod_equal p a b)) := @eqtype.eqP (fintype_ordinal__canonical__eqtype_Equality (S (Init.Nat.pred (Z.to_nat p)))) a b. Definition nat_mod_zero {p} : both ((nat_mod p)) := ret_both (nat_mod_zero). Definition nat_mod_one {p} : both ((nat_mod p)) := ret_both (nat_mod_one). Definition nat_mod_two {p} : both ((nat_mod p)) := ret_both (nat_mod_two). Definition nat_mod_add {n : Z} (a : nat_mod n) (b : nat_mod n) : both (nat_mod n) := ret_both (nat_mod_add a b). Definition nat_mod_mul {n : Z} (a:nat_mod n) (b:nat_mod n) : both (nat_mod n) := ret_both (nat_mod_mul a b). Definition nat_mod_sub {n : Z} (a:nat_mod n) (b:nat_mod n) : both (nat_mod n) := ret_both (nat_mod_sub a b). Definition nat_mod_div {n : Z} (a:nat_mod n) (b:nat_mod n) : both (nat_mod n) := ret_both (nat_mod_div a b). Definition nat_mod_neg {n : Z} (a:nat_mod n) : both (nat_mod n) := ret_both (nat_mod_neg a). Definition nat_mod_inv {n : Z} (a:nat_mod n) : both (nat_mod n) := ret_both (nat_mod_inv a). Definition nat_mod_exp_def {p : Z} (a:nat_mod p) (n : nat) : both (nat_mod p) := ret_both (nat_mod_exp_def a n). Definition nat_mod_exp {WS} {p} a n := @nat_mod_exp_def p a (Z.to_nat (@unsigned WS n)). Definition nat_mod_pow {WS} {p} a n := @nat_mod_exp_def p a (Z.to_nat (@unsigned WS n)). Definition nat_mod_pow_felem {p} (a n : nat_mod p) := @nat_mod_exp_def p a (Z.to_nat (nat_of_ord n)). Definition nat_mod_pow_self {p} (a n : nat_mod p) := nat_mod_pow_felem a n. Close Scope nat_scope. Definition nat_mod_from_secret_literal {m : Z} (x:int128) : both (nat_mod m) := ret_both (@nat_mod_from_secret_literal m x). Definition nat_mod_from_literal (m : Z) (x:int128) : both ((nat_mod m)) := nat_mod_from_secret_literal x. Definition nat_mod_to_byte_seq_le {n : Z} (m : nat_mod n) : both (seq int8) := ret_both (nat_mod_to_byte_seq_le m). Definition nat_mod_to_byte_seq_be {n : Z} (m : nat_mod n) : both (seq int8) := ret_both (nat_mod_to_byte_seq_be m). Definition nat_mod_to_public_byte_seq_le (n : Z) (m : nat_mod n) : both (seq int8) := ret_both (nat_mod_to_public_byte_seq_le n m). Definition nat_mod_to_public_byte_seq_be (n : Z) (m : nat_mod n) : both (seq int8) := ret_both (nat_mod_to_public_byte_seq_be n m). Definition nat_mod_bit {n : Z} (a : nat_mod n) (i : uint_size) : both 'bool := ret_both (nat_mod_bit a i). (* Alias for nat_mod_bit *) Definition nat_get_mod_bit {p} (a : nat_mod p) (i : uint_size) : both 'bool := ret_both (nat_get_mod_bit a i). Definition nat_mod_get_bit {p} (a : nat_mod p) n : both (nat_mod p) := ret_both (nat_mod_get_bit a n). Definition array_declassify_eq {A l} (x : nseq_ A l) (y : nseq_ A l) : both 'bool := ret_both (array_declassify_eq x y). Definition array_to_le_uint32s {A l} (x : nseq_ A l) : both (seq uint32) := ret_both (array_to_le_uint32s x). Definition array_to_be_uint32s {l} (x : nseq_ uint8 l) : both (seq uint32) := ret_both (array_to_be_uint32s x). Definition array_to_le_uint64s {A l} (x : nseq_ A l) : both (seq uint64) := ret_both (array_to_le_uint64s x). Definition array_to_be_uint64s {l} (x : nseq_ uint8 l) : both (seq uint64) := ret_both (array_to_be_uint64s x). Definition array_to_le_uint128s {A l} (x : nseq_ A l) : both (seq uint128) := ret_both (array_to_le_uint128s x). Definition array_to_be_uint128s {l} (x : nseq_ uint8 l) : both (seq uint128) := ret_both (array_to_be_uint128s x). Definition array_to_le_bytes {A l} (x : nseq_ A l) : both (seq uint8) := ret_both (array_to_le_bytes x). Definition array_to_be_bytes {A l} (x : nseq_ A l) : both (seq uint8) := ret_both (array_to_be_bytes x). Definition nat_mod_from_byte_seq_le {A n} (x : seq A) : both (nat_mod n) := ret_both (nat_mod_from_byte_seq_le x). Definition most_significant_bit {m} (x : nat_mod m) (n : uint_size) : both (uint_size) := ret_both (most_significant_bit x n). (* We assume 2^x < m *) Definition nat_mod_pow2 (m : Z) {WS} (x : (@int WS)) : both ((nat_mod m)) := ret_both (nat_mod_pow2 m (Z.to_nat (unsigned x))). End Todosection. Infix "+%" := nat_mod_add (at level 33) : hacspec_scope. Infix "*%" := nat_mod_mul (at level 33) : hacspec_scope. Infix "-%" := nat_mod_sub (at level 33) : hacspec_scope. Infix "/%" := nat_mod_div (at level 33) : hacspec_scope. ================================================ FILE: hax-lib/proof-libs/coq/ssprove/src/Hacspec_Lib_Notation.v ================================================ Global Set Warnings "-ambiguous-paths". Global Set Warnings "-uniform-inheritance". Global Set Warnings "-auto-template". Global Set Warnings "-disj-pattern-notation". Global Set Warnings "-notation-overridden,-ambiguous-paths". Require Import Lia. Require Import Coq.Logic.FunctionalExtensionality. Require Import Sumbool. From mathcomp Require Import fintype. From Crypt Require Import choice_type Package Prelude. Import PackageNotation. From extructures Require Import ord fset fmap. From Coq Require Import ZArith List. Import List.ListNotations. Import choice.Choice.Exports. (********************************************************) (* Implementation of all Hacspec library functions *) (* for Both types. *) (********************************************************) Declare Scope hacspec_scope. From Hacspec Require Import ChoiceEquality. From Hacspec Require Import LocationUtility. From Hacspec Require Import Hacspec_Lib_Comparable. From Hacspec Require Import Hacspec_Lib_Pre. Open Scope bool_scope. Open Scope hacspec_scope. Open Scope nat_scope. Open Scope list_scope. (*** Notation *) Section TodoSection3. Definition nat_mod_from_byte_seq_be {A n} (x : seq A) : both (nat_mod n) := ret_both (nat_mod_from_byte_seq_be x). End TodoSection3. Definition neqb {A : choice_type} `{EqDec A} : both A -> both A -> both 'bool := lift2_both (fun x y => negb (eqb x y) : 'bool). Definition eqb {A : choice_type} `{EqDec A} : both A -> both A -> both 'bool := lift2_both (fun x y => eqb x y : 'bool). Definition ltb {A : choice_type} `{Comparable A} : both A -> both A -> both 'bool := lift2_both (fun x y => ltb x y : 'bool). Definition leb {A : choice_type} `{Comparable A} : both A -> both A -> both 'bool := lift2_both (fun x y => leb x y : 'bool). Definition gtb {A : choice_type} `{Comparable A} : both A -> both A -> both 'bool := lift2_both (fun x y => gtb x y : 'bool). Definition geb {A : choice_type} `{Comparable A} : both A -> both A -> both 'bool := lift2_both (fun x y => geb x y : 'bool). Infix "=.?" := eqb (at level 40) : hacspec_scope. Infix "!=.?" := neqb (at level 40) : hacspec_scope. Infix "<.?" := ltb (at level 42) : hacspec_scope. Infix "<=.?" := leb (at level 42) : hacspec_scope. Infix ">.?" := gtb (at level 42) : hacspec_scope. Infix ">=.?" := geb (at level 42) : hacspec_scope. ================================================ FILE: hax-lib/proof-libs/coq/ssprove/src/Hacspec_Lib_Pre.v ================================================ Global Set Warnings "-ambiguous-paths". Global Set Warnings "-uniform-inheritance". Global Set Warnings "-auto-template". Global Set Warnings "-disj-pattern-notation". Global Set Warnings "-notation-overridden,-ambiguous-paths". Require Import Lia. Require Import Coq.Logic.FunctionalExtensionality. Require Import Sumbool. From mathcomp Require Import fintype. From Crypt Require Import choice_type Package Prelude. Import PackageNotation. From extructures Require Import ord fset fmap. Require Import ChoiceEquality. From mathcomp Require Import ssrZ word. (* From Jasmin Require Import word. *) From Crypt Require Import jasmin_word. From Coq Require Import ZArith List. Import ListNotations. (*****************************************************) (* Implementation of all Hacspec library functions *) (* for choice_type types. *) (*****************************************************) (*** Integers *) Declare Scope hacspec_scope. Open Scope list_scope. Open Scope hacspec_scope. Open Scope nat_scope. Require Import Hacspec_Lib_Comparable. Import choice.Choice.Exports. (* Section IntType. *) Notation int := chWord. Notation unsigned := wunsigned. Notation signed := wsigned. Notation repr := (fun WS x => wrepr WS x : int WS). Notation rol := (fun u s => wrol u (unsigned s)). Notation ror := (fun u s => wror u (unsigned s)). Notation int8 := (@int U8). Notation int16 := (@int U16). Notation int32 := (@int U32). Notation int64 := (@int U64). Notation int128 := (@int U128). Notation int_modi := wmodi. Definition int_add {WS} : @int WS -> @int WS -> @int WS := @add_word WS. Definition int_sub {WS} : @int WS -> @int WS -> @int WS := @sub_word WS. Definition int_opp {WS} : @int WS -> @int WS := @opp_word WS. Definition int_mul {WS} : @int WS -> @int WS -> @int WS := @mul_word WS. Notation int_div := wdiv. Notation int_mod := wmod. Notation int_xor := wxor. Notation int_and := wand. Notation int_or := wor. Definition int_not {WS : wsize} : (@int WS) -> (@int WS) := wnot. Definition zero {WS : wsize} : ((@int WS)) := @word0 WS. Definition one {WS : wsize} : ((@int WS)) := @word1 (pred WS). Lemma add_zero_l : forall {WS : wsize} n, int_add (@zero WS) n = n. Proof. intros. apply add0w. Defined. Lemma add_one_l : forall {WS : wsize} n, int_add one (repr WS n) = repr _ (Z.succ n). Proof. intros. setoid_rewrite wrepr_add. rewrite urepr_word. replace (urepr (@one WS)) with 1%Z by reflexivity. replace toword with urepr by reflexivity. setoid_rewrite ureprK. rewrite ssralg.GRing.addrC. now setoid_rewrite mkword1E. Defined. Lemma repr0_is_zero : forall {WS : wsize}, repr WS 0%Z = zero. Proof. intros. now rewrite wrepr0. Qed. Lemma add_repr : forall {WS : wsize} (n m : Z), int_add (repr WS n) (repr WS m) = (repr WS (n + m)%Z). Proof. intros ; now rewrite wrepr_add. Qed. (* End IntType. *) Axiom secret : forall {WS : wsize}, ((@int WS)) -> ((@int WS)). Infix ".%%" := int_modi (at level 40, left associativity) : Z_scope. Infix ".+" := int_add (at level 77) : hacspec_scope. Infix ".-" := int_sub (at level 77) : hacspec_scope. Notation "-" := int_opp (at level 77) : hacspec_scope. Infix ".*" := int_mul (at level 77) : hacspec_scope. Infix "./" := int_div (at level 77) : hacspec_scope. Infix ".%" := int_mod (at level 77) : hacspec_scope. Infix ".^" := int_xor (at level 77) : hacspec_scope. Infix ".&" := int_and (at level 77) : hacspec_scope. Infix ".|" := int_or (at level 77) : hacspec_scope. Notation "'not'" := int_not (at level 77) : hacspec_scope. (* Comparisons, boolean equality, and notation *) Global Program Instance nat_eqdec : EqDec nat := { eqb := Nat.eqb; eqb_leibniz := Nat.eqb_eq ; }. Global Instance nat_comparable : Comparable nat := { ltb := Nat.ltb; leb := Nat.leb; gtb a b := Nat.ltb b a; geb a b := Nat.leb b a; }. Global Instance N_eqdec : EqDec N := { eqb := N.eqb; eqb_leibniz := N.eqb_eq ; }. Global Instance N_comparable : Comparable N := { ltb := N.ltb; leb := N.leb; gtb a b := N.ltb b a; geb a b := N.leb b a; }. Global Instance Z_eqdec : EqDec Z := { eqb := Z.eqb; eqb_leibniz := Z.eqb_eq ; }. Global Instance Z_comparable : Comparable Z := { ltb := Z.ltb; leb := Z.leb; gtb a b := Z.ltb b a; geb a b := Z.leb b a; }. Lemma int_eqb_eq : forall {WS : wsize} (a b : (@int WS)), eqtype.eq_op a b = true <-> a = b. Proof. symmetry ; exact (ssrbool.rwP (@eqtype.eqP _ a b)). Qed. Global Instance int_eqdec `{WS : wsize}: EqDec ((@int WS)) := { eqb := eqtype.eq_op; eqb_leibniz := int_eqb_eq ; }. Global Instance int_comparable `{WS : wsize} : Comparable ((@int WS)) := eq_dec_lt_Comparable (wlt Unsigned). Axiom uint8_declassify : int8 -> int8. Axiom int8_declassify : int8 -> int8. Axiom uint16_declassify : int16 -> int16. Axiom int16_declassify : int16 -> int16. Axiom uint32_declassify : int32 -> int32. Axiom int32_declassify : int32 -> int32. Axiom uint64_declassify : int64 -> int64. Axiom int64_declassify : int64 -> int64. Axiom uint128_declassify : int128 -> int128. Axiom int128_declassify : int128 -> int128. Axiom uint8_classify : int8 -> int8. Axiom int8_classify : int8 -> int8. Axiom uint16_classify : int16 -> int16. Axiom int16_classify : int16 -> int16. Axiom uint32_classify : int32 -> int32. Axiom int32_classify : int32 -> int32. Axiom uint64_classify : int64 -> int64. Axiom int64_classify : int64 -> int64. Axiom uint128_classify : int128 -> int128. Axiom int128_classify : int128 -> int128. (* CompCert integers' signedness is only interpreted through 'signed' and 'unsigned', and not in the representation. Therefore, uints are just names for their respective ints. *) Notation uint8 := int8. Notation uint32 := int32. Notation uint64 := int64. Notation uint128 := int128. Definition uint_size : choice_type := int32. Definition int_size : choice_type := int32. Axiom declassify_usize_from_uint8 : uint8 -> uint_size. Axiom declassify_u32_from_uint32 : uint32 -> uint32. (* Represents any type that can be converted to uint_size and back *) Class UInt_sizeable (A : Type) := { usize : A -> uint_size; from_uint_size :> uint_size -> A; }. Arguments usize {_} {_}. Arguments from_uint_size {_} {_}. Definition from_uint_size_int (x : uint_size) : @int U32 := x. Coercion from_uint_size_int : choice.Choice.sort >-> choice.Choice.sort. Global Instance nat_uint_sizeable : UInt_sizeable nat := { usize n := repr _ (Z.of_nat n); from_uint_size n := Z.to_nat (unsigned n); }. Global Instance N_uint_sizeable : UInt_sizeable N := { usize n := repr _ (Z.of_N n); from_uint_size n := Z.to_N (unsigned n); }. Global Instance Z_uint_sizeable : UInt_sizeable Z := { usize n := repr _ n; from_uint_size n := unsigned n; }. (* Same, but for int_size *) Class Int_sizeable (A : Type) := { isize : A -> int_size; from_int_size : int_size -> A; }. Arguments isize {_} {_}. Arguments from_int_size {_} {_}. Global Instance nat_Int_sizeable : Int_sizeable nat := { isize n := repr _ (Z.of_nat n); from_int_size n := Z.to_nat (signed n); }. Global Instance N_Int_sizeable : Int_sizeable N := { isize n := repr _ (Z.of_N n); from_int_size n := Z.to_N (signed n); }. Global Instance Z_Int_sizeable : Int_sizeable Z := { isize n := repr _ n; from_int_size n := signed n; }. (**** Public integers *) Definition pub_u8 (n : uint_size) : int8 := repr _ (unsigned n). Definition pub_i8 (n : uint_size) : int8 := repr _ (unsigned n). Definition pub_u16 (n : uint_size) : int16 := repr _ (unsigned n). Definition pub_i16 (n : uint_size) : int16 := repr _ (unsigned n). Definition pub_u32 (n : uint_size) : int32 := repr _ (unsigned n). Definition pub_i32 (n : uint_size) : int32 := repr _ (unsigned n). Definition pub_u64 (n : uint_size) : int64 := repr _ (unsigned n). Definition pub_i64 (n : uint_size) : int64 := repr _ (unsigned n). Definition pub_u128 (n : uint_size) : int128 := repr _ (unsigned n). Definition pub_i128 (n : uint_size) : int128 := repr _ (unsigned n). (**** Operations *) Definition uint8_rotate_left (u: int8) (s: int8) : int8 := rol u s. Definition uint8_rotate_right (u: int8) (s: int8) : int8 := ror u s. Definition uint16_rotate_left (u: int16) (s: int16) : int16 := rol u s. Definition uint16_rotate_right (u: int16) (s: int16) : int16 := ror u s. Definition uint32_rotate_left (u: int32) (s: int32) : int32 := rol u s. Definition uint32_rotate_right (u: int32) (s: int32) : int32 := ror u s. Definition uint64_rotate_left (u: int64) (s: int64) : int64 := rol u s. Definition uint64_rotate_right (u: int64) (s: int64) : int64 := ror u s. Definition uint128_rotate_left (u: int128) (s: int128) : int128 := rol u s. Definition uint128_rotate_right (u: int128) (s: int128) : int128 := ror u s. Definition usize_shift_right (u: uint_size) (s: int32) : uint_size := wshr u (unsigned (@repr U32 (from_uint_size s))). Infix "usize_shift_right" := (usize_shift_right) (at level 77) : hacspec_scope. Definition usize_shift_left (u: uint_size) (s: int32) : uint_size := (rol u s). Infix "usize_shift_left" := (usize_shift_left) (at level 77) : hacspec_scope. Definition pub_uint128_wrapping_add (x y: int128) : int128 := x .+ y. Definition shift_left_ `{WS : wsize} (i : (@int WS)) (j : uint_size) : (@int WS) := wshl i (unsigned (@repr WS (from_uint_size j))). Definition shift_right_ `{WS : wsize} (i : (@int WS)) (j : uint_size) : (@int WS):= wshr i (unsigned (@repr WS (from_uint_size j))) . Infix "shift_left" := (shift_left_) (at level 77) : hacspec_scope. Infix "shift_right" := (shift_right_) (at level 77) : hacspec_scope. (*** Positive util *) Section Util. Fixpoint binary_representation_pre (n : nat) {struct n}: positive := match n with | O => 1 | S O => 1 | S n => Pos.succ (binary_representation_pre n) end%positive. Definition binary_representation (n : nat) `(n <> O) := binary_representation_pre n. Theorem positive_is_succs : forall n, forall (H : n <> O) (K : S n <> O), @binary_representation (S n) K = Pos.succ (@binary_representation n H). Proof. induction n ; [contradiction | reflexivity]. Qed. (* Conversion of positive to binary representation *) Theorem positive_to_positive_succs : forall p, binary_representation (Pos.to_nat p) (Nat.neq_sym _ _ (Nat.lt_neq _ _ (Pos2Nat.is_pos p))) = p. Proof. intros p. generalize dependent (Nat.neq_sym 0 (Pos.to_nat p) (Nat.lt_neq 0 (Pos.to_nat p) (Pos2Nat.is_pos p))). destruct Pos.to_nat eqn:ptno. - contradiction. - generalize dependent p. induction n ; intros. + cbn. apply Pos2Nat.inj. symmetry. apply ptno. + rewrite positive_is_succs with (H := Nat.neq_succ_0 n). rewrite IHn with (p := Pos.of_nat (S n)). * rewrite <- Nat2Pos.inj_succ by apply Nat.neq_succ_0. rewrite <- ptno. apply Pos2Nat.id. * apply Nat2Pos.id. apply Nat.neq_succ_0. Qed. (*** Uint size util *) (* If a natural number is in bound then a smaller natural number is still in bound *) Lemma range_of_nat_succ : forall {WS : wsize}, forall i, (Z.pred 0 < Z.of_nat (S i) < modulus WS)%Z -> (Z.pred 0 < Z.of_nat i < modulus WS)%Z. Proof. lia. Qed. (* Conversion to equivalent bound *) Lemma modulus_range_helper : forall {WS : wsize}, forall i, (Z.pred 0 < i < modulus WS)%Z -> (0 <= i <= wmax_unsigned WS)%Z. Proof. intros. unfold wmax_unsigned. unfold wbase. unfold nat_of_wsize in H. lia. Qed. Definition unsigned_repr_alt {WS : wsize} (a : Z) `((Z.pred 0 < a < modulus WS)%Z) : unsigned (@repr WS a) = a. Proof. apply wunsigned_repr_small. intros. unfold wbase. unfold nat_of_wsize in H. lia. Qed. Theorem zero_always_modulus {WS : wsize} : (Z.pred 0 < 0 < modulus WS)%Z. Proof. easy. Qed. (* any uint_size can be represented as a natural number and a bound *) (* this is easier for proofs, however less efficient for computation *) (* as Z uses a binary representation *) Theorem uint_size_as_nat : forall (us: uint_size), { n : nat | us = repr _ (Z.of_nat n) /\ (Z.pred 0 < Z.of_nat n < @modulus U32)%Z}. Proof. intros. exists (Z.to_nat (unsigned us)). rewrite Z2Nat.id by apply (ssrbool.elimT (word_ssrZ.leZP _ _) (urepr_ge0 us)). split. - rewrite wrepr_unsigned. reflexivity. - pose (wunsigned_range us). unfold wbase in a. unfold nat_of_wsize. cbn in *. lia. Qed. (* destruct uint_size as you would a natural number *) Definition destruct_uint_size_as_nat (a : uint_size) : forall (P : uint_size -> Prop), forall (zero_case : P (repr _ 0%Z)), forall (succ_case : forall (n : nat), (Z.pred 0 < Z.of_nat n < @modulus U32)%Z -> P (repr _ (Z.of_nat n))), P a. Proof. intros. destruct (uint_size_as_nat a) as [ n y ] ; destruct y as [ya yb] ; subst. destruct n. - apply zero_case. - apply succ_case. apply yb. Qed. (* induction for uint_size as you would do for a natural number *) Definition induction_uint_size_as_nat : forall (P : uint_size -> Prop), (P (repr _ 0%Z)) -> (forall n, (Z.pred 0 < Z.succ (Z.of_nat n) < @modulus U32)%Z -> P (repr _ (Z.of_nat n)) -> P (repr _ (Z.succ (Z.of_nat n)))) -> forall (a : uint_size), P a. Proof. intros P H_zero H_ind a. destruct (uint_size_as_nat a) as [ n y ] ; destruct y as [ya yb] ; subst. induction n. - apply H_zero. - rewrite Nat2Z.inj_succ. apply H_ind. + rewrite <- Nat2Z.inj_succ. apply yb. + apply IHn. lia. Qed. (* conversion of usize to positive or zero and the respective bound *) Theorem uint_size_as_positive : forall (us: uint_size), { pu : unit + positive | match pu with | inl u => us = repr _ Z0 | inr p => us = repr _ (Z.pos p) /\ (Z.pred 0 < Z.pos p < @modulus U32)%Z end }. Proof. intros. destruct us as [val H_]. pose proof (H := H_). apply Bool.andb_true_iff in H as [lt gt]. apply (ssrbool.elimT (word_ssrZ.leZP _ _)) in lt. apply (ssrbool.elimT (word_ssrZ.ltZP _ _)) in gt. destruct val. - exists (inl tt). apply word_ext. reflexivity. - exists (inr p). split. + apply word_ext. rewrite Zmod_small by (unfold nat_of_wsize in gt ; lia). reflexivity. + cbn in gt. unfold nat_of_wsize. simpl. lia. - contradiction. Defined. (* destruction of uint_size as positive *) Definition destruct_uint_size_as_positive (a : uint_size) : forall (P : uint_size -> Prop), (P (repr _ 0%Z)) -> (forall b, (Z.pred 0 < Z.pos b < @modulus U32)%Z -> P (repr _ (Z.pos b))) -> P a. Proof. intros P H_zero H_succ. destruct (uint_size_as_positive a) as [ [ _ | b ] y ] ; [ subst | destruct y as [ya yb] ; subst ]. - apply H_zero. - apply H_succ. apply yb. Qed. (* induction of uint_size as positive *) Definition induction_uint_size_as_positive : forall (P : uint_size -> Prop), (P (repr _ 0%Z)) -> (P (repr _ 1%Z)) -> (forall b, (Z.pred 0 < Z.succ (Z.pos b) < @modulus U32)%Z -> P (repr _ (Z.pos b)) -> P (repr _ (Z.succ (Z.pos b)))) -> forall (a : uint_size), P a. Proof. intros P H_zero H_one H_ind a. destruct (uint_size_as_positive a) as [ [ _ | b ] y ] ; [ subst | destruct y as [ya yb] ; subst ]. - apply H_zero. - pose proof (pos_succ_b := positive_to_positive_succs b) ; symmetry in pos_succ_b ; rewrite pos_succ_b in * ; clear pos_succ_b. generalize dependent (Nat.neq_sym 0 (Pos.to_nat b) (Nat.lt_neq 0 (Pos.to_nat b) (Pos2Nat.is_pos b))). induction (Pos.to_nat b). + contradiction. + intros n_neq yb. destruct n. * apply H_one. * rewrite (positive_is_succs _ (Nat.neq_succ_0 n) n_neq) in *. rewrite Pos2Z.inj_succ in *. apply H_ind. -- apply yb. -- apply IHn. lia. Qed. End Util. Global Ltac destruct_uint_size_as_nat_named a H_zero H_succ := generalize dependent a ; intros a ; apply (destruct_uint_size_as_nat a) ; [ pose proof (H_zero := @unsigned_repr_alt U32 0 zero_always_modulus) | let n := fresh in let H := fresh in intros n H ; pose proof (H_succ := @unsigned_repr_alt U32 _ H)] ; intros. Global Ltac destruct_uint_size_as_nat a := let H_zero := fresh in let H_succ := fresh in destruct_uint_size_as_nat_named a H_zero H_succ. Global Ltac induction_uint_size_as_nat var := generalize dependent var ; intros var ; apply induction_uint_size_as_nat with (a := var) ; [ pose proof (@unsigned_repr_alt U32 0 zero_always_modulus) | let n := fresh in let IH := fresh in intros n IH ; pose proof (@unsigned_repr_alt U32 _ IH)] ; intros. (*** Loops *) Open Scope nat_scope. Fixpoint foldi_ {acc : Type} (fuel : nat) (i : uint_size) (f : uint_size -> acc -> acc) (cur : acc) : acc := match fuel with | 0 => cur | S n' => foldi_ n' (i .+ one) f (f i cur) end. Close Scope nat_scope. Definition foldi {acc: Type} (lo: uint_size) (hi: uint_size) (* {lo <= hi} *) (f: (uint_size) -> acc -> acc) (* {i < hi} *) (init: acc) : acc := match Z.sub (unsigned hi) (unsigned lo) with | Z0 => init | Zneg p => init | Zpos p => foldi_ (Pos.to_nat p) lo f init end. (* Fold done using natural numbers for bounds *) Fixpoint foldi_nat_ {acc : Type} (fuel : nat) (i : nat) (f : nat -> acc -> acc) (cur : acc) : acc := match fuel with | O => cur | S n' => foldi_nat_ n' (S i) f (f i cur) end. Fixpoint for_loop_ {acc : Type} (fuel : nat) (f : nat -> acc -> acc) (cur : acc) : acc := match fuel with | O => cur | S n' => f n' (for_loop_ n' f cur) end. Definition foldi_nat {acc: Type} (lo: nat) (hi: nat) (* {lo <= hi} *) (f: nat -> acc -> acc) (* {i < hi} *) (init: acc) : acc := match Nat.sub hi lo with | O => init | S n' => foldi_nat_ (S n') lo f init end. Definition for_loop_range {acc: Type} (lo: nat) (hi: nat) (* {lo <= hi} *) (f: nat -> acc -> acc) (* {i < hi} *) (init: acc) : acc := match Nat.sub hi lo with | O => init | S n' => for_loop_ (S n') (fun x => f (x + lo)%nat) init end. Definition for_loop_usize {acc : Type} (lo hi : uint_size) (f : uint_size -> acc -> acc) init : acc := for_loop_range (from_uint_size lo) (from_uint_size hi) (fun x => f (usize x)) init. Lemma foldi__move_S : forall {acc: Type} (fuel : nat) (i : uint_size) (f : uint_size -> acc -> acc) (cur : acc), foldi_ fuel (i .+ one) f (f i cur) = foldi_ (S fuel) i f cur. Proof. reflexivity. Qed. Lemma foldi__nat_move_S : forall {acc: Type} (fuel : nat) (i : nat) (f : nat -> acc -> acc) (cur : acc), foldi_nat_ fuel (S i) f (f i cur) = foldi_nat_ (S fuel) i f cur. Proof. reflexivity. Qed. Lemma foldi__nat_move_S_append : forall {acc: Type} (fuel : nat) (i : nat) (f : nat -> acc -> acc) (cur : acc), f (i + fuel)%nat (foldi_nat_ fuel i f cur) = foldi_nat_ (S fuel) i f cur. Proof. induction fuel ; intros. - rewrite <- foldi__nat_move_S. unfold foldi_nat_. rewrite Nat.add_0_r. reflexivity. - rewrite <- foldi__nat_move_S. rewrite <- foldi__nat_move_S. replace (i + S fuel)%nat with (S i + fuel)%nat by lia. rewrite IHfuel. reflexivity. Qed. Theorem foldi_for_loop_eq : forall {acc} fuel f (cur : acc), foldi_nat_ fuel 0 f cur = for_loop_ fuel f cur. Proof. induction fuel ; intros. - reflexivity. - unfold for_loop_ ; fold (@for_loop_ acc). rewrite <- foldi__nat_move_S_append. rewrite <- IHfuel. reflexivity. Qed. Lemma foldi__nat_move_to_function : forall {acc: choice_type} (fuel : nat) (i : nat) (f : nat -> acc -> acc) (cur : acc), foldi_nat_ fuel i (fun x => f (S x)) (cur) = foldi_nat_ fuel (S i) f cur. Proof. induction fuel ; intros. - reflexivity. - cbn. rewrite IHfuel. reflexivity. Qed. Lemma foldi__nat_move_to_function_add : forall {acc: choice_type} (fuel : nat) (i j : nat) (f : nat -> acc -> acc) (cur : acc), foldi_nat_ fuel i (fun x => f (x + j)%nat) (cur) = foldi_nat_ fuel (i + j) f cur. Proof. intros acc fuel i j. generalize dependent i. induction j ; intros. - rewrite Nat.add_0_r. replace (fun x : nat => f (x + 0)%nat) with f. reflexivity. apply functional_extensionality. intros. now rewrite Nat.add_0_r. - replace (i + S j)%nat with (S i + j)%nat by lia. rewrite <- IHj. rewrite <- foldi__nat_move_to_function. f_equal. apply functional_extensionality. intros. f_equal. lia. Qed. Theorem foldi_for_loop_range_eq : forall {acc : choice_type} lo hi f (cur : acc), foldi_nat lo hi f cur = for_loop_range lo hi f cur. Proof. unfold foldi_nat. unfold for_loop_range. intros. destruct (hi - lo)%nat. - reflexivity. - rewrite <- foldi_for_loop_eq. induction lo. + f_equal. apply functional_extensionality. intros. now rewrite Nat.add_0_r. + replace (fun x : nat => f (x + S lo)%nat) with (fun x : nat => f (S (x + lo))%nat). 2:{ apply functional_extensionality. intros. f_equal. lia. } rewrite (foldi__nat_move_to_function (S n) 0 (fun x => f (x + lo)%nat)). rewrite foldi__nat_move_to_function_add. reflexivity. Qed. (* You can do one iteration of the fold by burning a unit of fuel *) Lemma foldi__move_S_fuel : forall {acc: Type} (fuel : nat) (i : uint_size) (f : uint_size -> acc -> acc) (cur : acc), (0 <= Z.of_nat fuel <= wmax_unsigned U32)%Z -> f ((repr _ (Z.of_nat fuel)) .+ i) (foldi_ (fuel) i f cur) = foldi_ (S (fuel)) i f cur. Proof. intros acc fuel. induction fuel ; intros. - cbn. replace (repr _ 0%Z) with (@zero U32) by (rewrite wrepr0 ; reflexivity). rewrite add_zero_l. reflexivity. - do 2 rewrite <- foldi__move_S. replace (int_add (repr _ (Z.of_nat (S fuel))) i) with (int_add (repr _ (Z.of_nat fuel)) (int_add i one)). 2 : { unfold int_add. setoid_rewrite addwA. rewrite addwC. rewrite addwA. f_equal. rewrite Nat2Z.inj_succ. (* unfold repr. *) unfold add_word. unfold wrepr. f_equal. rewrite urepr_word. replace (@toword (nat_of_wsize U32) (@one U32))%Z with 1%Z by reflexivity. (* unfold urepr. *) (* unfold eqtype.val. *) (* (* unfold word_subType. *) *) (* unfold toword. *) (* unfold mkword. *) rewrite Z.add_1_l. f_equal. rewrite mkwordK. rewrite Zmod_small. reflexivity. clear -H. unfold modulus. unfold two_power_nat. cbn in *. lia. } rewrite IHfuel. reflexivity. lia. Qed. (* You can do one iteration of the fold by burning a unit of fuel *) Lemma foldi__nat_move_S_fuel : forall {acc: Type} (fuel : nat) (i : nat) (f : nat -> acc -> acc) (cur : acc), (0 <= Z.of_nat fuel <= @wmax_unsigned U32)%Z -> f (fuel + i)%nat (foldi_nat_ fuel i f cur) = foldi_nat_ (S fuel) i f cur. Proof. induction fuel ; intros. - reflexivity. - do 2 rewrite <- foldi__nat_move_S. replace (S fuel + i)%nat with (fuel + (S i))%nat by (symmetry ; apply plus_Snm_nSm). rewrite IHfuel. + reflexivity. + lia. Qed. (* folds and natural number folds compute the same thing *) Lemma foldi_to_foldi_nat : forall {acc: Type} (lo: uint_size) (hi: uint_size) (* {lo <= hi} *) (f: (uint_size) -> acc -> acc) (* {i < hi} *) (init: acc), (unsigned lo <= unsigned hi)%Z -> foldi lo hi f init = foldi_nat (Z.to_nat (unsigned lo)) (Z.to_nat (unsigned hi)) (fun x => f (repr _ (Z.of_nat x))) init. Proof. intros. unfold foldi. unfold foldi_nat. destruct (uint_size_as_nat hi) as [ hi_n [ hi_eq hi_H ] ] ; subst. rewrite (@unsigned_repr_alt U32 _ hi_H) in *. rewrite Nat2Z.id. destruct (uint_size_as_nat lo) as [ lo_n [ lo_eq lo_H ] ] ; subst. rewrite (@unsigned_repr_alt U32 _ lo_H) in *. rewrite Nat2Z.id. remember (hi_n - lo_n)%nat as n. apply f_equal with (f := Z.of_nat) in Heqn. rewrite (Nat2Z.inj_sub) in Heqn by (apply Nat2Z.inj_le ; apply H). rewrite <- Heqn. assert (H_bound : (Z.pred 0 < Z.of_nat n < @modulus U32)%Z) by lia. clear Heqn. induction n. - reflexivity. - pose proof (H_max_bound := modulus_range_helper _ (range_of_nat_succ _ H_bound)). rewrite <- foldi__nat_move_S_fuel by apply H_max_bound. cbn. rewrite SuccNat2Pos.id_succ. rewrite <- foldi__move_S_fuel by apply H_max_bound. destruct n. + cbn. replace (repr _ 0%Z) with (@zero U32) by (rewrite wrepr0 ; reflexivity). rewrite add_zero_l. reflexivity. + cbn in *. assert (H_bound_pred: (Z.pred 0 < Z.pos (Pos.of_succ_nat n) < @modulus U32)%Z) by lia. rewrite <- (IHn H_bound_pred) ; clear IHn. f_equal. * rewrite add_repr. do 2 rewrite Zpos_P_of_succ_nat. rewrite Z.add_succ_l. rewrite Nat2Z.inj_add. reflexivity. * rewrite SuccNat2Pos.id_succ. rewrite foldi__move_S. reflexivity. Qed. (* folds can be computed by doing one iteration and incrementing the lower bound *) Lemma foldi_nat_split_S : forall {acc: Type} (lo: nat) (hi: nat) (* {lo <= hi} *) (f: nat -> acc -> acc) (* {i < hi} *) (init: acc), (lo < hi)%nat -> foldi_nat lo hi f init = foldi_nat (S lo) hi f (foldi_nat lo (S lo) f init). Proof. unfold foldi_nat. intros. assert (succ_sub_diag : forall n, (S n - n = 1)%nat) by lia. rewrite (succ_sub_diag lo). induction hi ; [ lia | ]. destruct (S hi =? S lo)%nat eqn:hi_eq_lo. - apply Nat.eqb_eq in hi_eq_lo ; rewrite hi_eq_lo in *. rewrite (succ_sub_diag lo). rewrite Nat.sub_diag. reflexivity. - apply Nat.eqb_neq in hi_eq_lo. apply Nat.lt_gt_cases in hi_eq_lo. destruct hi_eq_lo. + lia. + rewrite (Nat.sub_succ_l (S lo)) by apply (Nat.lt_le_pred _ _ H0). rewrite Nat.sub_succ_l by apply (Nat.lt_le_pred _ _ H). replace ((S (hi - S lo))) with (hi - lo)%nat by lia. reflexivity. Qed. (* folds can be split at some valid offset from lower bound *) Lemma foldi_nat_split_add : forall (k : nat), forall {acc: Type} (lo: nat) (hi: nat) (* {lo <= hi} *) (f: nat -> acc -> acc) (* {i < hi} *) (init: acc), forall {guarantee: (lo + k <= hi)%nat}, foldi_nat lo hi f init = foldi_nat (k + lo) hi f (foldi_nat lo (k + lo) f init). Proof. induction k ; intros. - cbn. unfold foldi_nat. rewrite Nat.sub_diag. reflexivity. - rewrite foldi_nat_split_S by lia. replace (S k + lo)%nat with (k + S lo)%nat by lia. specialize (IHk acc (S lo) hi f (foldi_nat lo (S lo) f init)). rewrite IHk by lia. f_equal. rewrite <- foldi_nat_split_S by lia. reflexivity. Qed. (* folds can be split at some midpoint *) Lemma foldi_nat_split : forall (mid : nat), (* {lo <= mid <= hi} *) forall {acc: Type} (lo: nat) (hi: nat) (* {lo <= hi} *) (f: nat -> acc -> acc) (* {i < hi} *) (init: acc), forall {guarantee: (lo <= mid <= hi)%nat}, foldi_nat lo hi f init = foldi_nat mid hi f (foldi_nat lo mid f init). Proof. intros. assert (mid_is_low_plus_constant : {k : nat | (mid = lo + k)%nat}) by (exists (mid - lo)%nat ; lia). destruct mid_is_low_plus_constant ; subst. rewrite Nat.add_comm. apply foldi_nat_split_add. apply guarantee. Qed. (* folds can be split at some midpoint *) Lemma foldi_split : forall (mid : uint_size), (* {lo <= mid <= hi} *) forall {acc: Type} (lo: uint_size) (hi: uint_size) (* {lo <= hi} *) (f: uint_size -> acc -> acc) (* {i < hi} *) (init: acc), forall {guarantee: (unsigned lo <= unsigned mid <= unsigned hi)%Z}, foldi lo hi f init = foldi mid hi f (foldi lo mid f init). Proof. intros. do 3 rewrite foldi_to_foldi_nat by lia. apply foldi_nat_split ; lia. Qed. (*** Path / Sorted util *) Lemma path_sorted_tl : forall {T : ordType} {A} {e} {fmval : list (T * A)}, is_true (path.sorted e (seq.unzip1 fmval)) -> is_true (path.sorted e (seq.unzip1 (tl fmval))). Proof. intros. destruct fmval. - easy. - cbn. cbn in H. destruct (seq.unzip1 fmval). + reflexivity. + cbn in H. now rewrite LocationUtility.is_true_split_and in H. Qed. Corollary path_path_tl : forall {T : ordType} {A} {e} {x : T} {fmval : list (T * A)}, is_true (path.path e x (seq.unzip1 fmval)) -> is_true (path.sorted e (seq.unzip1 (fmval))). Proof. intros. destruct fmval. reflexivity. apply (path_sorted_tl (fmval := (x, snd p) :: p :: fmval)). apply H. Qed. Lemma path_sorted_remove : forall {A : ordType} {B} {e} (x y : A * B) (l : list (A * B)), ssrbool.transitive e -> is_true (path.sorted e (seq.unzip1 (x :: y :: l))) -> is_true (path.sorted e (seq.unzip1 (x :: l))). Proof. intros. cbn. induction l. reflexivity. cbn. cbn in *. rewrite !LocationUtility.is_true_split_and in H0. destruct H0 as [? []]. rewrite H0 in IHl. rewrite !LocationUtility.is_true_split_and. split. - eapply H. apply H0. apply H1. - apply H2. Qed. Corollary path_path_remove : forall {A : ordType} {B} {e} (x : A) (y : A * B) (l : list (A * B)), ssrbool.transitive (T:=A) e -> is_true (path.path e (x) (seq.unzip1 (y :: l))) -> is_true (path.path e (x) (seq.unzip1 l)). Proof. intros. apply (path_sorted_remove (x, snd y) y l H). apply H0. Qed. Lemma path_sorted_rev_last : forall {A : ordType} {B} {e} (a0 : A * B) (l : list (A * B)), is_true (path.sorted e (seq.unzip1 (seq.rev (a0 :: l)))) -> is_true (path.sorted e (seq.unzip1 (seq.rev l))). Proof. intros. unfold seq.unzip1 ; rewrite seq.map_rev ; fold (seq.unzip1 l). rewrite path.rev_sorted. apply (path_sorted_tl (fmval := (a0 :: l))). rewrite <- path.rev_sorted. unfold seq.unzip1 ; rewrite <- seq.map_rev ; fold (seq.unzip1 (seq.rev (a0 :: l))). assumption. Qed. (*** Seq *) Definition nseq_ (A: choice_type) (len : nat) : choice_type := match len with | O => chUnit | S n => chMap ('fin (S n)) (A) end. Notation "'nseq'" := (fun (A: choice_type) (len : choice.Choice.sort uint_size) => nseq_ A (from_uint_size (UInt_sizeable := nat_uint_sizeable) len)). (* Definition nseq_type (A: choice_type) (len : nat) : Type := *) (* match len with *) (* | 0%nat => unit *) (* | S n => { fmap ('I_len) -> A } *) (* end. *) Definition seq (A : choice_type) : choice_type := chMap 'nat (A). (* Definition seq_type (A : choice_type) : Type := FMap.fmap_type nat_ordType (A). *) Definition public_byte_seq := seq int8. Definition byte_seq := seq int8. Definition list_len := length. Definition seq_index_nat {A: choice_type} (s: (seq A)) (i : nat) : A := match getm s i with | Some a => a | None => chCanonical A end. Definition seq_index {A: choice_type} (s: (seq A)) (i : uint_size) : A := seq_index_nat s (from_uint_size i). Definition seq_len_nat {A: choice_type} (s: (seq A)) : nat := match (FMap.fmval s) with | [] => 0 | (x :: xs) => S (fst (seq.last x xs)) end. Definition seq_len {A: choice_type} (s: (seq A)) : (uint_size) := usize (seq_len_nat s). Definition seq_to_list (A: choice_type) (s : (seq A)) : list (A) := seq.map (fun n => seq_index_nat s n) (seq.iota 0 (seq_len_nat s)). Definition seq_from_list (A : choice_type) (l : list (A)) : (seq A) := fmap_of_seq l. Lemma seq_from_list_cat : forall A l a, seq_from_list A (l ++ [a]) = setm (seq_from_list A l) (seq.size l) a. Proof. clear ; intros. unfold seq_from_list. apply eq_fmap. intros i. rewrite fmap_of_seqE. rewrite setmE. destruct eqtype.eq_op eqn:i_size_l. - apply (ssrbool.elimT eqtype.eqP) in i_size_l. subst. rewrite (seq.nth_map a). 2:{ rewrite seq.size_cat. now rewrite ssrnat.addn1. } rewrite seq.nth_cat. rewrite ssrnat.ltnn. rewrite ssrnat.subnn. reflexivity. - rewrite fmap_of_seqE. destruct (ssrnat.leq (seq.size (l ++ [a])) i) eqn:i_in_l. + rewrite seq.nth_default. 2:{ rewrite seq.size_map. apply i_in_l. } rewrite seq.nth_default. 2:{ rewrite seq.size_map. eapply ssrnat.leq_trans. apply ssrnat.leqnSn. rewrite seq.size_cat in i_in_l. rewrite ssrnat.addn1 in i_in_l. apply i_in_l. } reflexivity. + assert (is_true (ssrnat.leq (S i) (seq.size l))). { rewrite ssrnat.leqNgt. rewrite ssrnat.ltnS. rewrite ssrnat.leq_eqVlt. rewrite Bool.negb_orb. rewrite eqtype.eq_sym. setoid_rewrite i_size_l. rewrite seq.size_cat in i_in_l. rewrite ssrnat.addn1 in i_in_l. rewrite i_in_l. reflexivity. } rewrite <- (@seq.nth_take (seq.size l) (option (A)) None i H (seq.map (fun x : A => Some x) (l ++ [a]))). rewrite <- seq.map_take. rewrite seq.take_size_cat ; [ | reflexivity ]. reflexivity. Qed. Lemma sorted_last_leq : forall {A : ordType }{B} (a0 : A * B) (l : list (A * B)), is_true (path.sorted Ord.lt (seq.unzip1 (a0 :: l))) -> is_true (fst a0 <= (fst (seq.last a0 l)))%ord. Proof. intros ? ? a0 fmval i. generalize dependent a0. induction fmval ; intros. - apply Ord.leqxx. - simpl. specialize (IHfmval a0 (path_sorted_remove (e := Ord.lt) _ _ _ (@Ord.lt_trans _) i)). erewrite Ord.leq_trans. reflexivity. apply IHfmval. destruct fmval. + simpl. simpl in i. rewrite Bool.andb_true_r in i. unfold Ord.lt in i. rewrite LocationUtility.is_true_split_and in i. apply i. + simpl. apply Ord.leqxx. Qed. Corollary sorted_last_nat_lt : forall {B} (a0 : nat * B) (l : list (nat * B)), is_true (path.sorted Ord.lt (seq.unzip1 (a0 :: l))) -> is_true (fst a0 < S (fst (seq.last a0 l)))%ord. Proof. intros. pose (sorted_last_leq a0 l H). rewrite Ord.lt_neqAle. rewrite (Ord.leq_trans _ _ _ i) ; [ | easy ]. destruct (eqtype.eq_op _ _) eqn:p_eq_last. - apply (ssrbool.elimT eqtype.eqP) in p_eq_last. setoid_rewrite p_eq_last in i. cbn in i. rewrite <- ssrnat.subnE in i. rewrite ssrnat.subSnn in i. discriminate. - easy. Qed. Theorem ord_lt_nleq_and_neq : forall {A : ordType} {a b : A}, is_true (a < b)%ord -> (b < a)%ord = false /\ (eqtype.eq_op b a) = false. Proof. intros. rewrite Ord.ltNge in H. rewrite Ord.leq_eqVlt in H. rewrite Bool.negb_orb in H. rewrite LocationUtility.is_true_split_and in H. destruct H. apply ssrbool.negbTE in H. apply ssrbool.negbTE in H0. rewrite H , H0 ; clear H H0. easy. Qed. Corollary sorted_last_is_last : forall {B} (a0 : nat * B) (l : list (nat * B)), is_true (path.sorted Ord.lt (seq.unzip1 (a0 :: l))) -> (S (fst (seq.last a0 l)) < fst a0)%ord = false /\ (@eqtype.eq_op (nat : ordType) (S (fst (seq.last a0 l))) (fst a0) = false). Proof. intros. pose (i0 := sorted_last_nat_lt a0 l H). destruct (ord_lt_nleq_and_neq i0). easy. Qed. Theorem ord_leq_lt_trans : forall {A : ordType} {a b c : A}, is_true (a <= b)%ord -> is_true (b < c)%ord -> is_true (a < c)%ord. Proof. intros. pose proof (Ord.leq_trans _ _ _ H (Ord.ltW H0)). rewrite Ord.leq_eqVlt in H1. rewrite LocationUtility.is_true_split_or in H1. destruct H1. - apply (ssrbool.elimT eqtype.eqP) in H1. subst. rewrite Ord.leq_eqVlt in H. rewrite LocationUtility.is_true_split_or in H. destruct H. + apply (ssrbool.elimT eqtype.eqP) in H. subst. now rewrite Ord.ltxx in H0. + pose proof (Ord.lt_trans H H0). now rewrite Ord.ltxx in H1. - apply H1. Qed. Theorem ord_lt_leq_trans : forall {A : ordType} {a b c : A}, is_true (a < b)%ord -> is_true (b <= c)%ord -> is_true (a < c)%ord. Proof. intros. pose proof (Ord.leq_trans _ _ _ (Ord.ltW H) H0). rewrite Ord.leq_eqVlt in H1. rewrite LocationUtility.is_true_split_or in H1. destruct H1. - apply (ssrbool.elimT eqtype.eqP) in H1. subst. rewrite Ord.leq_eqVlt in H0. rewrite LocationUtility.is_true_split_or in H0. destruct H0. + apply (ssrbool.elimT eqtype.eqP) in H0. subst. now rewrite Ord.ltxx in H. + pose proof (Ord.lt_trans H H0). now rewrite Ord.ltxx in H1. - apply H1. Qed. Theorem ord_lt_is_leq : forall {a b : nat}, is_true (a < b)%ord -> is_true (S a <= b)%ord. Proof. intros. generalize dependent a. induction b ; intros. - destruct a ; easy. - destruct a ; [ easy | ]. cbn. cbn in IHb. apply IHb. apply H. Qed. Theorem seq_len_nat_setm : forall {A} (l : (seq A)) a, forall x, is_true (seq_len_nat l <= x)%ord -> seq_len_nat (setm l x a) = S x. Proof. intros. destruct l. destruct fmval. - reflexivity. - unfold seq_len_nat. simpl. destruct (ord_lt_nleq_and_neq (ord_lt_leq_trans (sorted_last_nat_lt p fmval i) H)). setoid_rewrite H0. setoid_rewrite H1. clear H0 H1. generalize dependent p. induction fmval ; intros. * reflexivity. * simpl. destruct (ord_lt_nleq_and_neq (ord_lt_leq_trans (sorted_last_nat_lt a0 fmval (path_sorted_tl i)) H)). setoid_rewrite H0. setoid_rewrite H1. clear H0 H1. simpl. rewrite (IHfmval a0 (path_sorted_tl i)). reflexivity. apply H. Qed. Corollary seq_len_nat_setm_len : forall {A} (l : (seq A)) a, seq_len_nat (setm l (seq_len_nat l) a) = S (seq_len_nat l). Proof. intros. apply seq_len_nat_setm. easy. Qed. Theorem seq_from_list_size : forall A l, seq.size l = seq_len_nat (seq_from_list A l). Proof. intros. rewrite <- (rev_involutive l). induction (rev l). - reflexivity. - simpl. rewrite seq_from_list_cat. rewrite seq.size_cat. rewrite IHl0 ; clear IHl0. rewrite ssrnat.addn1. now erewrite (seq_len_nat_setm (seq_from_list A (rev l0))). Qed. Lemma destruct_fmap_last : forall {A : ordType} {B} (a0 : A * B) (l : list (A * B)) i, (FMap.FMap (fmval:=seq.rev (a0 :: l)) i = setm (FMap.FMap (fmval:=seq.rev l) (path_sorted_rev_last a0 l i)) (fst a0) (snd a0)). Proof. intros. apply eq_fmap. intros v. rewrite setmE. destruct (eqtype.eq_op v (fst a0)) eqn:v_eq_a0. - apply (ssrbool.elimT eqtype.eqP) in v_eq_a0. subst. generalize dependent l. intros l. rewrite seq.rev_cons. intros. unfold getm ; simpl. induction (seq.rev l). + simpl. now rewrite eqtype.eq_refl. + simpl. rewrite IHl0 ; clear IHl0. * simpl in i. unfold seq.unzip1 in i. rewrite seq.map_rcons in i. rewrite path.rcons_path in i. rewrite LocationUtility.is_true_split_and in i. destruct i. pose (H1 := sorted_last_leq a l0 H). rewrite seq.last_map in H0. epose (ord_leq_lt_trans H1 H0). rewrite Ord.lt_neqAle in i. rewrite LocationUtility.is_true_split_and in i. destruct i. apply ssrbool.negbTE in H2. rewrite eqtype.eq_sym. rewrite H2. reflexivity. destruct l0. -- reflexivity. -- simpl. simpl in i. rewrite LocationUtility.is_true_split_and in i. apply i. * unfold getm. simpl. unfold seq.rev at 1. simpl. rewrite seq.catrevE. induction (seq.rev l) ; simpl. -- now rewrite v_eq_a0. -- now rewrite IHl0. Qed. Lemma seq_to_list_setm : forall {A : choice_type} (l : (seq A)) a, seq_to_list A (setm l (seq_len_nat l) a) = seq_to_list A l ++ [a]. Proof. intros. unfold seq_to_list. rewrite seq_len_nat_setm. rewrite <- ssrnat.addn1. rewrite seq.iotaD. rewrite ssrnat.add0n. simpl. rewrite seq.map_cat. simpl. unfold seq_index_nat. rewrite setmE. rewrite eqtype.eq_refl. set (seq.map _ _). set (seq.map _ _). assert (l0 = l1) ; subst l0 l1. { set (seq_len_nat l) at 1. assert (seq_len_nat l <= n)%nat by reflexivity. generalize dependent n. induction (seq_len_nat l) ; intros. - reflexivity. - rewrite <- ssrnat.addn1. rewrite seq.iotaD. rewrite <- ssrnat.addn1. rewrite seq.iotaD. rewrite !ssrnat.add0n. rewrite !ssrnat.addn0. simpl. rewrite seq.map_cat. rewrite seq.map_cat. f_equal. { setoid_rewrite IHn. reflexivity. lia. } { simpl. rewrite setmE. replace (eqtype.eq_op _ _) with false. 2:{ clear -H. cbn. generalize dependent n0. induction n ; intros. - destruct n0 ; easy. - destruct n0 ; [ easy | ]. simpl. specialize (IHn n0). rewrite IHn. reflexivity. lia. } reflexivity. } } now rewrite H. easy. Qed. Definition seq_from_list_id : forall {A : choice_type} (t : list (A)), seq_to_list A (seq_from_list A t) = t. Proof. intros. rewrite <- (seq.revK t). induction (seq.rev t). - reflexivity. - simpl. rewrite seq.rev_cons. set (h := seq.rev l) at 1 ; rewrite <- IHl ; subst h. clear IHl. rewrite <- !seq.cats1. rewrite seq_from_list_cat. rewrite seq_from_list_size. rewrite seq_to_list_setm. reflexivity. Qed. Definition seq_to_list_size : forall {A : choice_type} (t : (seq A)), seq.size (seq_to_list A t) = seq_len_nat t. Proof. intros. destruct t. generalize dependent fmval. intros fmval. rewrite <- (seq.revK fmval). intros. induction (seq.rev fmval). - reflexivity. - rewrite destruct_fmap_last. intros. unfold seq_to_list in *. rewrite seq_len_nat_setm. rewrite <- ssrnat.addn1. rewrite seq.iotaD. rewrite ssrnat.add0n. simpl. rewrite seq.map_cat. simpl. rewrite ssrnat.addn1. unfold seq_index_nat. rewrite setmE. rewrite eqtype.eq_refl. rewrite seq.size_cat. rewrite seq.size_map. rewrite seq.size_iota. simpl. rewrite ssrnat.addn1. reflexivity. unfold seq_len_nat. simpl. clear -i. rewrite seq.rev_cons in i. rewrite <- seq.cats1 in i. (* set seq.rev in i ; unfold Ord.sort, nat_ordType in l0 ; subst l0. *) destruct (seq.rev _). + easy. + generalize dependent p. induction l0 ; intros. * simpl. simpl in i. rewrite Bool.andb_true_r in i. now apply ord_lt_is_leq. * simpl. apply IHl0. apply (path_sorted_tl i). Qed. Definition seq_new_ {A: choice_type} (init : A) (len: uint_size) : (seq A) := fmap_of_seq (repeat init (Z.to_nat (unsigned len))). Definition seq_new {A: choice_type} (len: uint_size) : (seq A) := seq_new_ (chCanonical A) len. Definition seq_create {A: choice_type} (len: uint_size) : (seq A) := seq_new len. Definition repr_Z_succ : forall WS z, @repr WS (Z.succ z) = (repr _ z .+ one). Proof. intros. replace one with (@repr WS 1%Z) by (unfold one ; now rewrite word1_zmodE). now rewrite add_repr. Qed. Lemma lt_succ_diag_r_sub : forall x k, (x - k < S x)%nat. Proof. intros. generalize dependent x. induction k ; intros. - rewrite Nat.sub_0_r. apply Nat.lt_succ_diag_r. - destruct x. + apply Nat.lt_succ_diag_r. + cbn. apply Nat.lt_lt_succ_r. apply (IHk x). Qed. Definition setm_leave_default {T : ordType} {S : choice_type} (m : {fmap T -> S}) (i : T) (e : S) : {fmap T -> S} := if eqtype.eq_op e (chCanonical S) then m else setm m i e. Equations array_from_list_helper {A: choice_type} (x : A) (xs: list (A)) (k : nat) : (nseq_ A (S k)) := array_from_list_helper x [] k := setm emptym (Ordinal (ssrbool.introT ssrnat.ltP (lt_succ_diag_r_sub k O))) x ; array_from_list_helper x (y :: ys) k := setm (array_from_list_helper y ys k) (Ordinal (ssrbool.introT ssrnat.ltP (lt_succ_diag_r_sub k (length (y :: ys))))) x. Definition array_from_list {A: choice_type} (l: list (A)) : (nseq_ A (length l)) := match l with nil => tt | (x :: xs) => array_from_list_helper x xs (length xs) end. Definition resize_to_k {A : choice_type} (l : list A) k := List.rev (seq.drop (length l - k) (List.rev l)) ++ (List.repeat (chCanonical A) (k - length l)). Theorem length_resize_to_k : forall {A : choice_type} (l : list A) k, List.length (resize_to_k l k) = k. Proof. intros. unfold resize_to_k. rewrite List.app_length. rewrite List.rev_length. rewrite seq.size_drop. rewrite List.repeat_length. rewrite List.rev_length. Lia.lia. Defined. Theorem resize_to_length_idemp : forall {A : choice_type} (l : list A), l = resize_to_k l (length l). Proof. intros. induction l. - reflexivity. - unfold resize_to_k. rewrite (Nat.sub_diag). rewrite seq.drop0. rewrite List.rev_involutive. now rewrite List.app_nil_r. Qed. Definition array_from_list' {A: choice_type} (l: list (A)) (k : nat) : (nseq_ A k) := match k with | O => (tt : (nseq_ A O)) | S k' => match resize_to_k l (S k') with nil => fmap.emptym | (x :: xs) => array_from_list_helper x xs k' end end. Definition lift_ordinal n (x : 'I_n) : 'I_(S n). Proof. destruct x. apply (Ordinal (m := S m)). apply i. Defined. Equations lift_fval {A : choice_type} {n} (a : list ('I_(S n) * (A))) : list ('I_(S(S n)) * (A)) := lift_fval [] := [] ; lift_fval (x :: xs) := (lift_ordinal (S n) (fst x) , snd x) :: lift_fval xs. Lemma lift_is_sorted : forall {A : choice_type} {n} (a : {fmap 'I_(S n) -> (A)}), is_true (path.sorted Ord.lt (seq.unzip1 (lift_fval a))). Proof. intros. destruct a. simpl. induction fmval. - reflexivity. - destruct a. simpl. intros. rewrite lift_fval_equation_2 ; simpl. destruct fmval. + reflexivity. + pose proof i. rewrite lift_fval_equation_2 ; simpl. simpl in H. rewrite LocationUtility.is_true_split_and in H. destruct H. rewrite LocationUtility.is_true_split_and. split ; [ | ]. 2:{ apply IHfmval. apply H0. } unfold lift_ordinal. destruct s. destruct (fst _). apply H. Qed. Definition lift_nseq {A: choice_type} {len : nat} (x: nseq_ A len) : (nseq_ A (S len)) := match len as k return nseq_ A k -> nseq_ A (S k) with | O => fun _ => emptym | S n => fun x => @FMap.FMap _ _ (lift_fval (FMap.fmval x)) (lift_is_sorted x) end x. Definition setm_option {T : ordType} {S : choice_type} (m : {fmap T -> S}) (i : T) (e : chOption S) : {fmap T -> S} := match e with | Some x => setm m i x | None => m end. Equations array_from_option_list_helper {A: choice_type} (x : chOption A) (xs: list (chOption A)) (k : nat) : (nseq_ A (S k)) := array_from_option_list_helper x (y :: ys) O := emptym ; array_from_option_list_helper x [] k := setm_option emptym (Ordinal (ssrbool.introT ssrnat.ltP (lt_succ_diag_r_sub k O))) x ; array_from_option_list_helper x (y :: ys) (S k) := setm_option (lift_nseq (array_from_option_list_helper y ys k)) (Ordinal (ssrbool.introT ssrnat.ltP (lt_succ_diag_r_sub (S k) (length (y :: ys))))) x. Fail Next Obligation. Definition array_from_option_list' {A: choice_type} (l: list (chOption A)) (k : nat) : (nseq_ A k) := match k with | O => (tt : (nseq_ A O)) | S k' => match resize_to_k l (S k') with nil => fmap.emptym | (x :: xs) => array_from_option_list_helper x xs k' end end. Theorem list_rev_is_seq_rev : forall T (x : list T), List.rev x = seq.rev x. Proof. intros. induction x. - reflexivity. - simpl. rewrite IHx. replace (a :: nil) with (seq.rev (a :: nil)) by reflexivity. now rewrite <- seq.rev_cat. Qed. Theorem simple0_array_from_list : forall (A : choice_type) (x : list A), array_from_list' x (List.length x) = array_from_list x. Proof. intros. subst. simpl. induction x. - reflexivity. - simpl. unfold resize_to_k. simpl. rewrite (Nat.sub_diag (length x)). setoid_rewrite seq.drop0. change (List.rev _ ++ _ :: nil) with (List.rev (a :: x)). rewrite List.rev_involutive. now rewrite List.app_nil_r. Defined. Theorem simple_array_from_list : forall (A : choice_type) (x : list A) len (H : List.length x = len), array_from_list' x len = (eq_rect (length x) (fun n : nat => nseq_ A n) (array_from_list x) len H). Proof. intros. subst. apply simple0_array_from_list. Defined. (**** Array manipulation *) Definition array_new_ {A: choice_type} (init:A) (len: nat) : (nseq_ A len) := match len with O => (tt : (nseq_ A 0)) | (S n') => array_from_list_helper init (repeat init n') n' end. Equations array_index {A: choice_type} {len : nat} (s: (nseq_ A len)) {WS} (i: (@int WS)) : A := array_index (len := 0) s i := (chCanonical A) ; array_index (len := (S n)) s i with le_lt_dec (S n) (Z.to_nat (unsigned i)) := { | right a with (@getm _ _ s (fintype.Ordinal (n := S n) (m := Z.to_nat (unsigned i)) ((ssrbool.introT ssrnat.ltP a)))) => { | Some f => f | None => (chCanonical A) } | left b => (chCanonical A) }. Equations array_upd {A: choice_type} {len : nat} (s: (nseq_ A len)) {WS} (i: (@int WS)) (new_v: A) : (nseq_ A len) := array_upd s i new_v with len := { array_upd s i new_v n with lt_dec (Z.to_nat (unsigned i)) n := { array_upd s i new_v O (left l) => ltac:(apply Nat.ltb_lt in l ; discriminate) ; array_upd s i new_v (S n) (left l) => (setm s (fintype.Ordinal (n := S n) (m := Z.to_nat (unsigned i)) (ssrbool.introT ssrnat.ltP l)) new_v) ; array_upd s i new_v n (right _) => s } }. Definition array_upd2 {A: choice_type} {len : nat} (s: (nseq_ A len)) {WS} (i: (@int WS)) (new_v: A) : (nseq_ A len). Proof. destruct (Z.to_nat (unsigned i) acc | S x => rec x (array_upd acc (usize (i+x)%nat) (array_index sub (usize x))) end in rec (n - i + 1)%nat v. Definition array_from_seq {A: choice_type} (out_len:nat) (input: (seq A)) : (nseq_ A out_len) := let out := array_new_ (chCanonical A) out_len in update_sub out 0 (out_len - 1) (@array_from_list A (@seq_to_list A input)). Definition slice {A} (l : list A) (i j : nat) : list A := if (j <=? i)%nat then [] else firstn (j-i+1) (skipn i l). Definition lower_ordinal n (x : 'I_(S n)) (H: is_true (ord0 < x)%ord) : 'I_n. Proof. destruct x. apply (Ordinal (m := Nat.pred m)). apply ssrnat.ltnSE. rewrite (Nat.lt_succ_pred 0). - apply i. - destruct m. + discriminate. + lia. Defined. Equations lower_fval {A : choice_type} {n} (a : list ('I_(S(S n)) * (A))) (H : forall x, In x a -> is_true (ord0 < fst x)%ord ) : list ('I_(S n) * (A)) := lower_fval [] H := [] ; lower_fval (x :: xs) H := (lower_ordinal (S n) (fst x) (H x (or_introl eq_refl)) , snd x) :: lower_fval xs (fun y H0 => H y (in_cons x y xs H0)). Lemma lower_keeps_value : forall {A : choice_type} {n} (a : {fmap 'I_(S(S n)) -> (A)}) H, (seq.map snd a = seq.map snd (lower_fval a H)). Proof. intros. destruct a. simpl in *. induction fmval. - cbn. reflexivity. - destruct a. rewrite seq.map_cons. erewrite IHfmval. rewrite lower_fval_equation_2 ; simpl. f_equal. apply (path_sorted_tl i). Qed. Lemma lower_is_sorted : forall {A : choice_type} {n} (a : {fmap 'I_(S(S n)) -> (A)}) H, is_true (path.sorted Ord.lt (seq.unzip1 (lower_fval a H))). Proof. intros. destruct a. simpl. induction fmval. - reflexivity. - destruct a. simpl. intros. rewrite lower_fval_equation_2 ; simpl. destruct fmval. + reflexivity. + pose proof i. rewrite lower_fval_equation_2 ; simpl. simpl in H0 |- *. rewrite LocationUtility.is_true_split_and in H0 |- *. destruct H0. split ; [ | ]. destruct p. simpl. destruct s, s1. pose proof (H (Ordinal (n:=S (S n)) (m:=m) i0, s0) (or_introl eq_refl)). pose proof (H (Ordinal (n:=S (S n)) (m:=m0) i1, s2) (in_cons (Ordinal (n:=S (S n)) (m:=m) i0, s0) (Ordinal (n:=S (S n)) (m:=m0) i1, s2) ((Ordinal (n:=S (S n)) (m:=m0) i1, s2) :: fmval) (or_introl eq_refl))). unfold Ord.lt in H0 |- *. unfold Ord.leq in H0 |- *. cbn. clear -H0 H2 H3. rewrite LocationUtility.is_true_split_and in H0 |- *. destruct H0. cbn in H , H0. destruct m, m0 ; easy. specialize (IHfmval (path_sorted_tl i) ( fun x H2 => H x (in_cons _ _ _ H2))). rewrite lower_fval_equation_2 in IHfmval ; simpl in IHfmval. simpl. apply IHfmval. Qed. Corollary lower_list_is_sorted : forall {A : choice_type} {n} (a : list ('I_(S(S n)) * (A))) H, is_true (path.sorted Ord.lt (seq.unzip1 a)) -> is_true (path.sorted Ord.lt (seq.unzip1 (lower_fval a H))). Proof. intros. refine (lower_is_sorted (@FMap.FMap _ _ a _) _). apply H0. Qed. Lemma ord_ext : forall {n} m0 m1 {H1 H2}, m0 = m1 <-> Ordinal (n := S n) (m := m0) H1 = Ordinal (n := S n) (m := m1) H2. Proof. intros. rewrite <- (inord_val (Ordinal H1)). rewrite <- (inord_val (Ordinal H2)). split. intros. subst. reflexivity. intros. cbn in H. unfold inord in H. unfold eqtype.insubd in H. unfold eqtype.insub in H. destruct ssrbool.idP in H. destruct ssrbool.idP in H. cbn in H. inversion H. reflexivity. contradiction. contradiction. Qed. Lemma lower_fval_ext : forall {A : choice_type} {n} (a b : {fmap 'I_(S(S n)) -> A}) H1 H2, a = b <-> lower_fval a H1 = lower_fval b H2. Proof. intros. split. - intros. subst. destruct b. simpl. induction fmval. + reflexivity. + simpl. destruct a, s. rewrite !lower_fval_equation_2. f_equal. * f_equal. apply ord_ext. reflexivity. * apply IHfmval. apply (path_sorted_tl i). - intros. apply eq_fmap. intros i. destruct a. destruct b. cbn in H. cbn. f_equal. generalize dependent fmval0. induction fmval as [ | p ] ; intros ; destruct fmval0 as [ | p0 ] ; try rewrite !lower_fval_equation_2 in H ; try rewrite !lower_fval_equation_1 in H ; try easy. inversion H. epose (H1 p (or_introl eq_refl)). epose (H2 p0 (or_introl eq_refl)). destruct p. destruct p0. cbn in H4. subst. destruct s. destruct s1. apply ord_ext in H3. f_equal. { f_equal. apply ord_ext. destruct m, m0 ; try discriminate. cbn in H3. now rewrite H3. } { eapply IHfmval. apply H5. Unshelve. apply (path_sorted_tl i0). apply (path_sorted_tl i1). } Qed. Lemma lower_fval_ext_list : forall {A : choice_type} {n} (a b : list ('I_(S(S n)) * (A))) (Ha : is_true (path.sorted Ord.lt (seq.unzip1 a))) (Hb : is_true (path.sorted Ord.lt (seq.unzip1 b))) H1 H2, a = b <-> lower_fval a H1 = lower_fval b H2. Proof. intros. epose (lower_fval_ext (@FMap.FMap _ _ a Ha) (@FMap.FMap _ _ b Hb) H1 H2). simpl in i. rewrite <- i. split. intros. apply fmap.eq_fmap. intros x. subst. reflexivity. intros. now inversion H. Qed. Lemma gt_smallest_sorted : forall {A} {n} {p : 'I_n * A} {fmval}, is_true (path.sorted Ord.lt (seq.unzip1 (p :: fmval))) -> (forall x, In x fmval -> is_true (fst p < fst x)%ord). intros. induction fmval. - contradiction. - cbn in H. rewrite LocationUtility.is_true_split_and in H. destruct H. destruct H0. + subst. apply H. + apply IHfmval. cbn. eapply path.path_le. apply (@Ord.lt_trans _). apply H. apply H1. apply H0. Qed. Corollary tl_gt_0_sorted : forall {A} {n} {p : 'I_(S n) * A} {fmval}, is_true (path.sorted Ord.lt (seq.unzip1 (p :: fmval))) -> (forall x, In x fmval -> is_true (ord0 < fst x)%ord). intros. induction fmval ; [ easy | ]. pose proof H. simpl in H1. rewrite LocationUtility.is_true_split_and in H1. destruct H1. destruct H0. - subst. destruct p, o. destruct m. + apply H1. + eapply Ord.lt_trans. 2: apply (gt_smallest_sorted H) ; now left. easy. - refine (IHfmval _ H0). cbn. eapply path.path_le. apply Ord.lt_trans. apply H1. apply H2. Qed. Lemma in_nseq_tl_gt_zero {A} {n} {m'} {i3} {k} fmval (i : is_true (path.path Ord.lt (fst (@Ordinal _ (S m') i3, k)) (seq.unzip1 fmval))) : (forall x : 'I_(S (S n)) * A, In x ((@Ordinal _ (S m') i3, k) :: fmval) -> is_true (ord0 < fst x)%ord). Proof. intros. destruct H. - subst. reflexivity. - eapply tl_gt_0_sorted. apply i. apply H. Qed. Equations tl_fmap {A : choice_type} {n} (a : {fmap 'I_(S(S n)) -> A}) : {fmap 'I_(S n) -> A} := tl_fmap (@FMap.FMap _ _ [] i) := emptym ; tl_fmap (@FMap.FMap _ _ ((@Ordinal _ 0 i3, k) :: fmval) i) := @FMap.FMap _ _ (lower_fval fmval (gt_smallest_sorted i)) (lower_list_is_sorted _ _ (path_path_tl i)) ; tl_fmap (@FMap.FMap _ _ ((@Ordinal _ (S m') i3, k) :: fmval) i) := @FMap.FMap _ _ (lower_fval ((Ordinal (n:=S (S n)) (m:=S m') i3, k) :: fmval) (in_nseq_tl_gt_zero fmval i)) (lower_list_is_sorted _ _ i). Fail Next Obligation. Definition nseq_hd {A : choice_type} {n} (a : (nseq_ A (S n))) : A := match a with | @FMap.FMap _ _ [] _ => (chCanonical A) | @FMap.FMap _ _ (p :: _) _ => match nat_of_ord (fst p) with | O => snd p | S _ => (chCanonical A) end end. Definition nseq_hd_option {A : choice_type} {n} (a : (nseq_ A (S n))) : chOption A := match a with | @FMap.FMap _ _ [] _ => None | @FMap.FMap _ _ (p :: _) _ => match nat_of_ord (fst p) with | O => Some (snd p) | S _ => None end end. Definition nseq_tl {A : choice_type} {n} (a : (nseq_ A (S n))) : (nseq_ A n). Proof. destruct n ; [exact tt | apply (tl_fmap a) ]. Defined. Definition split_nseq_ {A : choice_type} {n} (a : (nseq_ A (S n))) : A * (nseq_ A n) := (nseq_hd a, nseq_tl a). Lemma lower_fval_smaller_length {A : choice_type} {n} (a : {fmap 'I_(S(S n)) -> A}) : (length (FMap.fmval a) <= S (length (FMap.fmval (tl_fmap a))))%nat. Proof. destruct a. induction fmval. - cbn ; lia. - simpl. simpl in IHfmval. destruct a, s. destruct m. + apply Nat.eq_le_incl. f_equal. rewrite tl_fmap_equation_2. (* rewrite mkfmapK ; [ | apply (lower_is_sorted (@FMap.FMap _ _ fmval (path_sorted_tl i)))]. *) epose (lower_keeps_value (FMap.FMap (T:=fintype_ordinal__canonical__Ord_Ord (S (S n))) (fmval:=fmval) (path_sorted_tl i))). simpl in e. rewrite <- (map_length snd). rewrite <- (map_length snd). assert (forall {A B} (f : A -> B) (l : list A), seq.map f l = map f l). { clear ; intros. induction l. - reflexivity. - cbn. f_equal. } setoid_rewrite <- H. erewrite e. reflexivity. + rewrite tl_fmap_equation_3. apply le_n_S. eapply le_trans ; [ apply (IHfmval (path_sorted_tl i)) | ]. apply Nat.eq_le_incl. (* rewrite mkfmapK ; [ | apply (lower_is_sorted (@FMap.FMap _ _ ((Ordinal (n:=S (S n)) (m:=S m) i0, s0) :: fmval) i)) ]. *) simpl. f_equal. f_equal. clear. induction fmval. * reflexivity. * destruct a, s. destruct m0 ; [ discriminate | ]. rewrite tl_fmap_equation_3. simpl. erewrite (proj1 (lower_fval_ext (@FMap.FMap _ _ ((Ordinal (n:=S (S n)) (m:=S m0) i1, s1) :: fmval) (path_sorted_tl i)) _ _ _) eq_refl). reflexivity. Qed. Lemma ord_gt : (forall {A : ordType} {x y : A}, ((x < y)%ord = false) -> eqtype.eq_op x y = false -> is_true (y < x)%ord). Proof. clear ; intros. rewrite Ord.ltNge in H. apply ssrbool.negbFE in H. rewrite Ord.leq_eqVlt in H. rewrite LocationUtility.is_true_split_or in H. rewrite eqtype.eq_sym in H0. cbn in H. cbn in H0. rewrite H0 in H. destruct H ; [ discriminate | ]. apply H. Qed. Lemma path_path_setm_move_lowerbound : forall {A : ordType} B v (y z : A * B) (l : list (A * B)), is_true (fst y < fst z)%ord -> is_true (path.sorted Ord.lt (seq.unzip1 (y :: l))) -> is_true (path.sorted Ord.lt (seq.unzip1 ((setm_def l (fst z) v)))) -> is_true (path.sorted Ord.lt (seq.unzip1 (y :: (setm_def l (fst z) v)))). Proof. intros. generalize dependent y. destruct l ; intros. - cbn. now rewrite H. - cbn. cbn in H1. pose proof (path_sorted_tl H1). cbn in H1. set (fst z < fst p)%ord in *. destruct b eqn:b_lt ; subst b ; cbn in H1. + cbn. rewrite H. rewrite b_lt. cbn. rewrite H2. reflexivity. + destruct eqtype.eq_op eqn:b_eq ; cbn in H1. * cbn. rewrite H. cbn. rewrite H1. reflexivity. * pose proof (ord_gt b_lt b_eq). clear b_lt b_eq. cbn. rewrite H1. cbn in H0. rewrite LocationUtility.is_true_split_and in H0. destruct H0. rewrite H0. reflexivity. Qed. Lemma setm_def_cons : forall (A : ordType) B (a : A * B) s (k : A) v, setm_def (a :: s) k v = ((if (fst a < k)%ord then a else (k, v) ) :: if (k < fst a)%ord then a :: s else if eqtype.eq_op k (fst a) then s else setm_def (T:=A) s k v). Proof. intros. cbn. destruct (k < fst a)%ord eqn:k_lt_a. - unfold Ord.lt in k_lt_a. apply (ssrbool.rwP ssrbool.andP) in k_lt_a. destruct k_lt_a. rewrite Ord.leqNgt in H. apply ssrbool.negbTE in H. rewrite H. reflexivity. - destruct eqtype.eq_op eqn:k_eq_a. + unfold Ord.lt. rewrite eqtype.eq_sym in k_eq_a. rewrite k_eq_a. cbn. rewrite Bool.andb_false_r. reflexivity. + rewrite Ord.ltNge in k_lt_a. apply ssrbool.negbFE in k_lt_a. unfold Ord.lt. rewrite k_lt_a. rewrite eqtype.eq_sym in k_eq_a. rewrite k_eq_a. reflexivity. Qed. Lemma setm_cons : forall (A : ordType) B (a : A * B) s (k : A) v H, setm (FMap.FMap (fmval:=(a :: s)) H) k v = setm (setm (FMap.FMap (fmval:=s) (path_sorted_tl H)) (fst a) (snd a)) k v. Proof. intros. apply eq_fmap. intros t. rewrite !setmE. reflexivity. Qed. Lemma array_is_max_length {A : choice_type} {n} (a : (nseq_ A (S n))) : (length (FMap.fmval a) <= S n)%nat. Proof. induction n. - destruct a. cbn. destruct fmval. + cbn. lia. + destruct fmval. * cbn. lia. * cbn in i. destruct p , p0. destruct s , s1. cbn in i. destruct m , m0 ; discriminate. - cbn in *. specialize (IHn (tl_fmap a)). apply le_n_S in IHn. refine (le_trans (length (FMap.fmval a)) _ (S (S n)) _ IHn). apply lower_fval_smaller_length. Qed. Definition nth_nseq_ {A : choice_type} {n} (a : (nseq_ A (S n))) (i : nat) (H : (i <= n)%nat) : A. Proof. generalize dependent i. induction n ; intros. - apply (nseq_hd a). - destruct i. + apply (nseq_hd a). + apply (IHn (nseq_tl a) i). apply le_S_n. apply H. Defined. Equations array_to_list {A : choice_type} {n} (f : (nseq_ A n)) : list (A) := array_to_list (n:=O%nat) f := [] ; array_to_list (n:=S _%nat) f := nseq_hd f :: array_to_list (nseq_tl f). Fail Next Obligation. Theorem array_to_length_list_is_len : forall (A : choice_type) len (x : nseq_ A len), List.length (array_to_list x) = len. Proof. intros. induction len. - reflexivity. - rewrite array_to_list_equation_2. simpl. rewrite IHlen. reflexivity. Defined. Equations array_to_option_list {A : choice_type} {n} (f : (nseq_ A n)) : list (chOption A) := array_to_option_list (n:=O%nat) f := [] ; array_to_option_list (n:=S _%nat) f := nseq_hd_option f :: array_to_option_list (nseq_tl f). Fail Next Obligation. Theorem array_to_length_option_list_is_len : forall (A : choice_type) len (x : nseq_ A len), List.length (array_to_option_list x) = len. Proof. intros. induction len. - reflexivity. - rewrite array_to_option_list_equation_2. simpl. rewrite IHlen. reflexivity. Defined. Lemma nseq_hd_ord0 : forall A n (a : (nseq_ A (S n))) (x : A), @nseq_hd A (n) (setm a ord0 x) = x. Proof. intros. cbn. destruct a. destruct fmval. + reflexivity. + cbn. destruct negb eqn:O_p. * reflexivity. * apply ssrbool.negbFE in O_p. rewrite O_p. reflexivity. Qed. Lemma nseq_tl_ord0 : forall A n (a : (nseq_ A (S n))) (x : A), @nseq_tl A n (setm a ord0 x) = nseq_tl a. Proof. intros. destruct n. + reflexivity. + destruct a. induction fmval as [ | p ]. * apply eq_fmap. intros ?. reflexivity. * destruct p, s. unfold setm. unfold fmap. unfold ord0. cbn. destruct m. -- cbn. rewrite !tl_fmap_equation_2. apply eq_fmap. intros ?. cbn. f_equal. now erewrite (proj1 (lower_fval_ext (@FMap.FMap _ _ fmval (path_sorted_tl i)) _ _ _) eq_refl). -- cbn. rewrite tl_fmap_equation_2. rewrite tl_fmap_equation_3. apply eq_fmap. intros ?. cbn. f_equal. now erewrite (proj1 (lower_fval_ext (@FMap.FMap _ _ ((Ordinal (n:=S (S n)) (m:=S m) i0, s0) :: fmval) i) _ _ _) eq_refl). Qed. Lemma array_to_list_ord0 : forall A n (a : (nseq_ A (S n))) (x : A), @array_to_list A (S n) (setm a ord0 x) = x :: array_to_list (nseq_tl a). Proof. intros. rewrite array_to_list_equation_2. f_equal. - apply nseq_hd_ord0. - f_equal. apply nseq_tl_ord0. Qed. Lemma split_nseq_correct {A : choice_type} {n} (a : (nseq_ A (S n))) : nseq_hd a :: array_to_list (nseq_tl a) = array_to_list a. Proof. reflexivity. Qed. Definition array_to_seq {A : choice_type} {n} (f : (nseq_ A n)) : (seq A) := seq_from_list _ (array_to_list f). Definition positive_slice {A : choice_type} {n} `{H: Positive n} (l : (nseq_ A n)) (i j : nat) `{H1: (i < j)%nat} `{(j - i < length (array_to_list l) - i)%nat} : Positive (length (slice (array_to_list l) i j)). Proof. unfold slice. rewrite (proj2 (Nat.leb_gt j i) H1). rewrite firstn_length_le. - unfold Positive. apply (ssrbool.introT ssrnat.ltP). lia. - rewrite skipn_length. apply lt_n_Sm_le. lia. Defined. Theorem slice_length : forall A (l : list A) (i j : nat), length (slice l i j) = if (j <=? i)%nat then @length A ([]) else length (firstn (j - i + 1) (skipn i l)). Proof. intros. unfold slice. destruct (j <=? i)%nat. - reflexivity. - reflexivity. Qed. Definition lseq_slice {A : choice_type} {n} (l : (nseq_ A n)) (i j : nat) : (@nseq_ A (length (slice (array_to_list l) (i) (j)))) := array_from_list (slice (array_to_list l) (i) (j)). Definition seq_sub {A : choice_type} (s : seq A) (start n : nat) := lseq_slice (array_from_seq (from_uint_size (seq_len s)) s) start (start + n)%nat. Definition array_update_slice {A : choice_type} {l : nat} (out: ((nseq_ A l))) (start_out: uint_size) (input: seq A) (start_in: uint_size) (len: nat) : nseq_ A l := update_sub out (from_uint_size start_out) (len) (seq_sub input (from_uint_size start_in) len). Definition array_from_slice {A: choice_type} (default_value: A) (out_len: nat) (input: (seq A)) (start: nat) (slice_len: nat) : (nseq_ A out_len) := let out := array_new_ default_value out_len in array_from_seq out_len input. Definition array_slice {A: choice_type} (input: (seq A)) (start: nat) (slice_len: nat) : (nseq_ A slice_len) := array_from_slice (chCanonical A) (slice_len) input (slice_len) (slice_len). Definition array_from_slice_range {a: choice_type} (default_value: a) (out_len: nat) (input: (seq a)) (start_fin: (uint_size * uint_size)) : (nseq_ a out_len). Proof. pose (out := array_new_ default_value (out_len)). destruct start_fin as [start fin]. refine (update_sub out 0 ((from_uint_size fin) - (from_uint_size start)) _). apply (@lseq_slice a ((from_uint_size fin) - (from_uint_size start)) (array_from_seq ((from_uint_size fin) - (from_uint_size start)) input) (from_uint_size start) (from_uint_size fin)). Defined. Definition array_slice_range {a: choice_type} {len : nat} (input: (nseq_ a len)) (start_fin:(uint_size * uint_size)) : (seq a) := array_to_seq (lseq_slice input (from_uint_size (fst start_fin)) (from_uint_size (snd start_fin))). Definition array_update {a: choice_type} {len: nat} (s: (nseq_ a len)) (start : uint_size) (start_s: (seq a)) : (nseq_ a len) := update_sub s (from_uint_size start) (from_uint_size (seq_len start_s)) (array_from_seq (from_uint_size (seq_len start_s)) (start_s)). Definition array_update_start {a: choice_type} {len: nat} (s: (nseq_ a len)) (start_s: (seq a)) : (nseq_ a len) := update_sub s 0 (from_uint_size (seq_len start_s)) (array_from_seq (from_uint_size (seq_len start_s)) start_s). Definition array_len {a: choice_type} {len: nat} (s: (nseq_ a len)) : uint_size := usize len. (* May also come up as 'length' instead of 'len' *) Definition array_length {a: choice_type} {len: nat} (s: (nseq_ a len)) : uint_size := usize len. (**** Seq manipulation *) Definition seq_slice {a: choice_type} (s: ((seq a))) (start: (uint_size)) (len: (uint_size)) : (seq a) := array_to_seq (lseq_slice (array_from_seq (from_uint_size (seq_len s)) s) (from_uint_size start) ((from_uint_size start) + (from_uint_size len))). Definition seq_slice_range {a: choice_type} (input: ((seq a))) (start_fin:(((uint_size)) * ((uint_size)))) : ((seq a)) := seq_slice input (fst start_fin) (snd start_fin). Equations seq_update_sub {A : choice_type} (v : (seq A)) (i : nat) (n : nat) (sub : (seq A)) : (seq A) := seq_update_sub v i 0 sub := v ; seq_update_sub v i (S n) sub := seq_update_sub (setm v (i+n)%nat match getm sub n with | Some y => y | None => (chCanonical A) end) i n sub. (* updating a subsequence in a sequence *) Definition seq_update {a: choice_type} (s: ((seq a))) (start: uint_size) (input: ((seq a))) : ((seq a)) := seq_update_sub s (from_uint_size start) (from_uint_size (seq_len input)) input. Definition old_seq_update {a: choice_type} (s: ((seq a))) (start: uint_size) (input: ((seq a))) : ((seq a)) := array_to_seq (update_sub (array_from_seq (from_uint_size (seq_len s)) s) (from_uint_size start) (from_uint_size (seq_len input)) (array_from_seq (from_uint_size (seq_len input)) input)). (* updating only a single value in a sequence*) Definition seq_upd {a: choice_type} (s: ((seq a))) (start: uint_size) (v: ((a))) : ((seq a)) := seq_update s start (setm emptym 0%nat v). Definition seq_update_start {a: choice_type} (s: ((seq a))) (start_s: ((seq a))) : ((seq a)) := array_to_seq (update_sub (array_from_seq (from_uint_size (seq_len s)) s) 0 (from_uint_size (seq_len start_s)) (array_from_seq (from_uint_size (seq_len start_s)) start_s)). Definition seq_update_slice {A : choice_type} (out: seq A) (start_out: nat) (input: seq A) (start_in: nat) (len: nat) : ((seq A)) := array_to_seq (update_sub (array_from_seq (from_uint_size (seq_len out)) out) start_out len (seq_sub input start_in len)). Definition seq_concat {A : choice_type} (s1 :seq A) (s2: seq A) : ((seq A)) := seq_from_list _ (seq_to_list _ s1 ++ seq_to_list _ s2). Definition seq_concat_owned {A : choice_type} (s1 :seq A) (s2: seq A) : ((seq A)) := seq_concat s1 s2. Definition seq_push {A : choice_type} (s1 :seq A) (s2: ((A))) : ((seq A)) := setm s1 (seq_len_nat s1) s2. Theorem seq_push_list_app : forall {A : choice_type} (t : (seq A)) (s : A), (seq_to_list A (Hacspec_Lib_Pre.seq_push t s) = seq_to_list A t ++ [s]). Proof. intros. unfold seq_push. rewrite seq_to_list_setm. reflexivity. Qed. Definition seq_push_owned {a : choice_type} (s1 :((seq a))) (s2: ((a))) : ((seq a)) := seq_push s1 s2. Definition seq_from_slice {A: choice_type} (input: ((seq A))) (start_fin: (((uint_size)) * ((uint_size)))) : ((seq A)) := let out := array_new_ ((chCanonical A)) (from_uint_size (seq_len input)) in let (start, fin) := start_fin in array_to_seq (update_sub out 0 ((from_uint_size fin) - (from_uint_size start)) ((lseq_slice (array_from_seq (from_uint_size (seq_len input)) input) (from_uint_size start) (from_uint_size fin)))). Definition seq_from_slice_range {A: choice_type} (input: ((seq A))) (start_fin: (((uint_size)) * ((uint_size)))) : ((seq A)) := let out := array_new_ (chCanonical A) (from_uint_size (seq_len input)) in let (start, fin) := start_fin in array_to_seq (update_sub out 0 ((from_uint_size fin) - (from_uint_size start)) ((lseq_slice (array_from_seq (from_uint_size (seq_len input)) input) (from_uint_size start) (from_uint_size fin)))). Definition seq_from_seq {A} (l : (seq A)) : (seq A) := l. (**** Chunking *) Definition seq_num_chunks {a: choice_type} (s: ((seq a))) (chunk_len: uint_size) : uint_size := ((seq_len s .+ chunk_len .- one) ./ chunk_len)%nat. Definition seq_chunk_len {a: choice_type} (s: ((seq a))) (chunk_len: nat) (chunk_num: nat) : 'nat := let idx_start := (chunk_len * chunk_num)%nat in if ((from_uint_size (seq_len s)) <.? (idx_start + chunk_len))%nat then ((from_uint_size (seq_len s)) - idx_start)%nat else chunk_len. Definition seq_get_chunk {a: choice_type} (s: ((seq a))) (chunk_len: uint_size) (chunk_num: uint_size) : (((uint_size × seq a))) := let idx_start := (from_uint_size chunk_len * from_uint_size chunk_num)%nat in let out_len := seq_chunk_len s (from_uint_size chunk_len) (from_uint_size chunk_num) in (usize out_len, array_to_seq (lseq_slice (array_from_seq (from_uint_size (seq_len s)) s) idx_start (idx_start + seq_chunk_len s (from_uint_size chunk_len) (from_uint_size chunk_num)))). Definition seq_set_chunk {a: choice_type} (s: ((seq a))) (chunk_len: uint_size) (chunk_num: uint_size) (chunk: ((seq a)) ) : ((seq a)) := let idx_start := (from_uint_size chunk_len * from_uint_size chunk_num)%nat in let out_len := seq_chunk_len s (from_uint_size chunk_len) (from_uint_size chunk_num) in array_to_seq (update_sub (array_from_seq (from_uint_size (seq_len s)) s) idx_start out_len (array_from_seq (from_uint_size (seq_len chunk)) chunk)). Definition seq_num_exact_chunks {a} (l : ((seq a))) (chunk_size : ((uint_size))) : ((uint_size)) := (repr _ (Z.of_nat (length l))) ./ chunk_size. Definition seq_get_exact_chunk {a : choice_type} (l : ((seq a))) (chunk_size chunk_num: ((uint_size))) : ((seq a)) := let '(len, chunk) := seq_get_chunk l chunk_size chunk_num in if eqtype.eq_op len chunk_size then emptym else chunk. Definition seq_set_exact_chunk {A : choice_type} := @seq_set_chunk A. Definition seq_get_remainder_chunk {a : choice_type} (l : (seq a)) (chunk_size : uint_size) : (seq a) := let chunks := seq_num_chunks l chunk_size in let last_chunk := if (zero <.? chunks) then (chunks .- one)%nat else zero in let (len, chunk) := seq_get_chunk l chunk_size last_chunk in if eqtype.eq_op len chunk_size then emptym else chunk. Fixpoint list_xor_ {WS} (x y : list ((@int WS))) : list ((@int WS)) := match x, y with | (x :: xs), (y :: ys) => (int_xor x y) :: (list_xor_ xs ys) | [] , _ => y | _, [] => x end. Definition seq_xor_ {WS} (x y : (seq (@int WS))) : (seq (@int WS)) := seq_from_list _ (list_xor_ (seq_to_list _ x) (seq_to_list _ y)). Infix "seq_xor" := seq_xor_ (at level 33) : hacspec_scope. Fixpoint list_truncate {a} (x : list a) (n : nat) : list a := match x, n with | _, O => [] | [], _ => [] | (x :: xs), S n' => x :: (list_truncate xs n') end. Definition seq_truncate {a : choice_type} (x : (seq a)) (n : nat) : (seq a) := seq_from_list _ (list_truncate (seq_to_list _ x) n). (**** Numeric operations *) (* takes two nseq's and joins them using a function op : a -> a -> a *) Definition array_join_map {a: choice_type} {len: nat} (op: ((a)) -> ((a)) -> ((a))) (s1: ((nseq_ a len))) (s2 : ((nseq_ a len))) := let out := s1 in foldi (usize 0%nat) (usize len) (fun i out => array_upd out i (op (array_index s1 i) (array_index s2 i)) ) out. Infix "array_xor" := (array_join_map (a := int _) int_xor) (at level 33) : hacspec_scope. Infix "array_add" := (array_join_map (a := int _) int_add) (at level 33) : hacspec_scope. Infix "array_minus" := (array_join_map (a := int _) int_sub) (at level 33) : hacspec_scope. Infix "array_mul" := (array_join_map (a := int _) int_mul) (at level 33) : hacspec_scope. Infix "array_div" := (array_join_map (a := int _) int_div) (at level 33) : hacspec_scope. Infix "array_or" := (array_join_map (a := int _) int_or) (at level 33) : hacspec_scope. Infix "array_and" := (array_join_map (a := int _) int_and) (at level 33) : hacspec_scope. Fixpoint array_eq_ {a: choice_type} {len: nat} (eq: ((a)) -> ((a)) -> bool) (s1: ((nseq_ a len))) (s2 : ((nseq_ a len))) {struct len} : bool. Proof. destruct len ; cbn in *. - exact true. - destruct (getm s1 (fintype.Ordinal (m := len) (ssrnat.ltnSn _))) as [s | ]. + destruct (getm s2 (fintype.Ordinal (m := len) (ssrnat.ltnSn _))) as [s0 | ]. * exact (eq s s0). * exact false. + exact false. Defined. Infix "array_eq" := (array_eq_ eq) (at level 33) : hacspec_scope. Infix "array_neq" := (fun s1 s2 => negb (array_eq_ eq s1 s2)) (at level 33) : hacspec_scope. (*** Nats *) Definition nat_mod (p : Z) : choice_type := 'fin (S (Init.Nat.pred (Z.to_nat p))). (* Definition nat_mod_type {p : Z} : Type := 'I_(S (Init.Nat.pred (Z.to_nat p))). *) Definition mk_natmod {p} (z : Z) : (nat_mod p) := @zmodp.inZp (Init.Nat.pred (Z.to_nat p)) (Z.to_nat z). Definition nat_mod_equal {p} (a b : (nat_mod p)) : bool := @eqtype.eq_op (fintype_ordinal__canonical__eqtype_Equality (S (Init.Nat.pred (Z.to_nat p)))) a b. Definition nat_mod_equal_reflect {p} {a b} : Bool.reflect (a = b) (@nat_mod_equal p a b) := @eqtype.eqP (fintype_ordinal__canonical__eqtype_Equality (S (Init.Nat.pred (Z.to_nat p)))) a b. Definition nat_mod_zero {p} : (nat_mod p) := zmodp.Zp0. Definition nat_mod_one {p} : (nat_mod p) := zmodp.Zp1. Definition nat_mod_two {p} : (nat_mod p) := zmodp.inZp 2. Definition nat_mod_add {n : Z} (a : (nat_mod n)) (b : (nat_mod n)) : (nat_mod n) := zmodp.Zp_add a b. Infix "+%" := nat_mod_add (at level 33) : hacspec_scope. Definition nat_mod_mul {n : Z} (a:(nat_mod n)) (b:(nat_mod n)) : (nat_mod n) := zmodp.Zp_mul a b. Infix "*%" := nat_mod_mul (at level 33) : hacspec_scope. Definition nat_mod_sub {n : Z} (a:(nat_mod n)) (b:(nat_mod n)) : (nat_mod n) := zmodp.Zp_add a (zmodp.Zp_opp b). Infix "-%" := nat_mod_sub (at level 33) : hacspec_scope. Definition nat_mod_div {n : Z} (a:(nat_mod n)) (b:(nat_mod n)) : (nat_mod n) := zmodp.Zp_mul a (zmodp.Zp_inv b). Infix "/%" := nat_mod_div (at level 33) : hacspec_scope. Definition nat_mod_neg {n : Z} (a:(nat_mod n)) : (nat_mod n) := zmodp.Zp_opp a. Definition nat_mod_inv {n : Z} (a:(nat_mod n)) : (nat_mod n) := zmodp.Zp_inv a. Definition nat_mod_exp_def {p : Z} (a:(nat_mod p)) (n : nat) : (nat_mod p) := let fix exp_ (e : (nat_mod p)) (n : nat) := match n with | 0%nat => nat_mod_one | S n => nat_mod_mul a (exp_ a n) end in exp_ a n. Definition nat_mod_exp {WS} {p} a n := @nat_mod_exp_def p a (Z.to_nat (@unsigned WS n)). Definition nat_mod_pow {WS} {p} a n := @nat_mod_exp_def p a (Z.to_nat (@unsigned WS n)). Definition nat_mod_pow_felem {p} a n := @nat_mod_exp_def p a (Z.to_nat (from_uint_size n)). Definition nat_mod_pow_self {p} a n := @nat_mod_pow_felem p a n. Close Scope nat_scope. Definition nat_mod_from_secret_literal {m : Z} (x:int128) : (nat_mod m) := @zmodp.inZp (Init.Nat.pred (Z.to_nat m)) (Z.to_nat (unsigned x)). Definition nat_mod_from_literal (m : Z) (x:int128) : (nat_mod m) := nat_mod_from_secret_literal x. Axiom nat_mod_to_byte_seq_le : forall {n : Z}, (nat_mod n) -> (seq int8). Axiom nat_mod_to_byte_seq_be : forall {n : Z}, (nat_mod n) -> (seq int8). Axiom nat_mod_to_public_byte_seq_le : forall (n : Z), (nat_mod n) -> (seq int8). Axiom nat_mod_to_public_byte_seq_be : forall (n : Z), (nat_mod n) -> (seq int8). Definition nat_mod_val (p : Z) (a : (nat_mod p)) : Z := Z.of_nat (nat_of_ord a). Definition nat_mod_bit {n : Z} (a : (nat_mod n)) (i : uint_size) : 'bool := Z.testbit (nat_mod_val _ a) (from_uint_size i). (* Alias for nat_mod_bit *) Definition nat_get_mod_bit {p} (a : (nat_mod p)) := nat_mod_bit a. Definition nat_mod_get_bit {p} (a : (nat_mod p)) n := if (nat_mod_bit a n) then @nat_mod_one p else @nat_mod_zero p. Axiom array_declassify_eq : forall {A l}, (nseq_ A l) -> (nseq_ A l) -> 'bool. Axiom array_to_le_uint32s : forall {A l}, (nseq_ A l) -> (seq uint32). Axiom array_to_be_uint32s : forall {l}, (nseq_ uint8 l) -> (seq uint32). Axiom array_to_le_uint64s : forall {A l}, (nseq_ A l) -> (seq uint64). Axiom array_to_be_uint64s : forall {l}, (nseq_ uint8 l) -> (seq uint64). Axiom array_to_le_uint128s : forall {A l}, (nseq_ A l) -> (seq uint128). Axiom array_to_be_uint128s : forall {l}, (nseq_ uint8 l) -> (seq uint128). Axiom array_to_le_bytes : forall {A l}, (nseq_ A l) -> (seq uint8). Axiom array_to_be_bytes : forall {A l}, (nseq_ A l) -> (seq uint8). Axiom nat_mod_from_byte_seq_le : forall {A n}, (seq A) -> (nat_mod n). Axiom most_significant_bit : forall {m}, (nat_mod m) -> uint_size -> uint_size. (* We assume 2^x < m *) Definition nat_mod_pow2 (m : Z) (x : N) : (nat_mod m) := mk_natmod (Z.pow 2 (Z.of_N x)). Section Casting. (* Type casts, as defined in Section 4.5 in https://arxiv.org/pdf/1106.3448.pdf *) Class Cast A B := cast : A -> B. Arguments cast {_} _ {_}. Notation "' x" := (cast _ x) (at level 20) : hacspec_scope. (* Casting to self is always possible *) Global Instance cast_self {A} : Cast A A := { cast a := a }. Global Instance cast_transitive {A B C} `{Hab: Cast A B} `{Hbc: Cast B C} : Cast A C := { cast a := Hbc (Hab a) }. Global Instance cast_prod {A B C D} `{Cast A B} `{Cast C D} : Cast (A * C) (B * D) := { cast '(a, c) := (cast _ a, cast _ c) }. Global Instance cast_option {A B} `{Cast A B} : Cast (option A) (option B) := { cast a := match a with Some a => Some (cast _ a) | None => None end }. Global Instance cast_option_b {A B} `{Cast A B} : Cast A (option B) := { cast a := Some (cast _ a) }. (* Global Instances for common types *) Global Instance cast_nat_to_N : Cast nat N := { cast := N.of_nat }. Global Instance cast_N_to_Z : Cast N Z := { cast := Z.of_N }. Global Instance cast_Z_to_int {WORDSIZE} : Cast Z ((@int WORDSIZE)) := { cast n := repr _ n }. Global Instance cast_natmod_to_Z {p} : Cast ((nat_mod p)) Z := { cast n := nat_mod_val _ n }. (* Note: should be aware of typeclass resolution with int/uint since they are just aliases of each other currently *) Global Instance cast_int8_to_uint32 : Cast (int8) (uint32) := { cast n := repr _ (unsigned n) }. Global Instance cast_int8_to_int32 : Cast (int8) (int32) := { cast n := repr _ (signed n) }. Global Instance cast_uint8_to_uint32 : Cast (uint8) (uint32) := { cast n := repr _ (unsigned n) }. Global Instance cast_int_to_nat `{WS : wsize} : Cast (int _) nat := { cast n := Z.to_nat (@signed WS n) }. Close Scope hacspec_scope. End Casting. Global Arguments pair {_ _} & _ _. Section Coercions. (* First, in order to have automatic coercions for tuples, we add bidirectionality hints: *) Global Coercion N.to_nat : N >-> nat. Global Coercion Z.of_N : N >-> Z. Definition Z_to_int `{WS : wsize} (n : Z) : (int WS) := repr _ n. Global Coercion Z_to_int : Z >-> choice.Choice.sort. Definition Z_to_uint_size (n : Z) : uint_size := repr _ n. Global Coercion Z_to_uint_size : Z >-> choice.Choice.sort. Definition Z_to_int_size (n : Z) : int_size := repr _ n. Global Coercion Z_to_int_size : Z >-> choice.Choice.sort. Definition N_to_int `{WS : wsize} (n : N) : (@int WS) := repr _ (Z.of_N n). Global Coercion N.of_nat : nat >-> N. Global Coercion N_to_int : N >-> choice.Choice.sort. Definition N_to_uint_size (n : Z) : uint_size := repr _ n. Global Coercion N_to_uint_size : Z >-> choice.Choice.sort. Definition nat_to_int `{WS : wsize} (n : nat) : (@int WS) := repr _ (Z.of_nat n). Global Coercion nat_to_int : nat >-> choice.Choice.sort. Definition uint_size_to_nat (n : uint_size) : nat := from_uint_size n. Global Coercion uint_size_to_nat : choice.Choice.sort >-> nat. Definition uint_size_to_Z (n : uint_size) : Z := from_uint_size n. Global Coercion uint_size_to_Z : choice.Choice.sort >-> Z. Definition uint32_to_nat (n : uint32) : nat := Z.to_nat (unsigned n). Global Coercion uint32_to_nat : choice.Choice.sort >-> nat. Definition int8_to_nat (n : int8) : nat := Z.to_nat (unsigned n). Global Coercion int8_to_nat : choice.Choice.sort >-> nat. Definition int16_to_nat (n : int16) : nat := Z.to_nat (unsigned n). Global Coercion int16_to_nat : choice.Choice.sort >-> nat. Definition int32_to_nat (n : int32) : nat := Z.to_nat (unsigned n). Global Coercion int32_to_nat : choice.Choice.sort >-> nat. Definition int64_to_nat (n : int64) : nat := Z.to_nat (unsigned n). Global Coercion int64_to_nat : choice.Choice.sort >-> nat. Definition int128_to_nat (n : int128) : nat := Z.to_nat (unsigned n). Global Coercion int128_to_nat : choice.Choice.sort >-> nat. Definition int8_to_int16 (n : int8) : int16 := (repr _ (unsigned n)). Global Coercion int8_to_int16 : choice.Choice.sort >-> choice.Choice.sort. Definition int8_to_int32 (n : int8) : int32 := repr _ (unsigned n). Global Coercion int8_to_int32 : choice.Choice.sort >-> choice.Choice.sort. Definition int16_to_int32 (n : int16) : int32 := repr _ (unsigned n). Global Coercion int16_to_int32 : choice.Choice.sort >-> choice.Choice.sort. Definition int32_to_int64 (n : int32) : int64 := repr _ (unsigned n). Global Coercion int32_to_int64 : choice.Choice.sort >-> choice.Choice.sort. Definition int64_to_int128 (n : int64) : int128 := repr _ (unsigned n). Global Coercion int64_to_int128 : choice.Choice.sort >-> choice.Choice.sort. Definition int32_to_int128 (n : int32) : int128 := repr _ (unsigned n). Global Coercion int32_to_int128 : choice.Choice.sort >-> choice.Choice.sort. Definition uint_size_to_int64 (n : uint_size) : int64 := repr _ (unsigned n). Global Coercion uint_size_to_int64 : choice.Choice.sort >-> choice.Choice.sort. Definition Z_in_nat_mod {m : Z} (x:Z) : (@nat_mod m) := @mk_natmod m x. Definition int_in_nat_mod {m : Z} `{WS : wsize} (x:(@int WS)) : (@nat_mod m) := mk_natmod (unsigned x). Global Coercion int_in_nat_mod : choice.Choice.sort >-> choice.Choice.sort. Definition nat_mod_in_int {m : Z} `{WS : wsize} (x:(@nat_mod m)) : (@int WS) := (repr _ (nat_mod_val _ x)). Global Coercion nat_mod_in_int : choice.Choice.sort >-> choice.Choice.sort. Definition nat_mod_in_Z {m : Z} `{WS : wsize} (x:(@nat_mod m)) : Z := (nat_mod_val _ x). Global Coercion nat_mod_in_Z : choice.Choice.sort >-> Z. Definition uint_size_in_nat_mod (n : uint_size) : (@nat_mod 16) := (int_in_nat_mod n). Global Coercion uint_size_in_nat_mod : choice.Choice.sort >-> choice.Choice.sort. End Coercions. (*** Casting *) Definition uint128_from_usize (n : uint_size) : int128 := repr _ (unsigned n). Definition uint64_from_usize (n : uint_size) : int64 := repr _ (unsigned n). Definition uint32_from_usize (n : uint_size) : int32 := repr _ (unsigned n). Definition uint16_from_usize (n : uint_size) : int16 := repr _ (unsigned n). Definition uint8_from_usize (n : uint_size) : int8 := repr _ (unsigned n). Definition uint128_from_uint8 (n : int8) : int128 := repr _ (unsigned n). Definition uint64_from_uint8 (n : int8) : int64 := repr _ (unsigned n). Definition uint32_from_uint8 (n : int8) : int32 := repr _ (unsigned n). Definition uint16_from_uint8 (n : int8) : int16 := repr _ (unsigned n). Definition usize_from_uint8 (n : int8) : uint_size := repr _ (unsigned n). Definition uint128_from_uint16 (n : int16) : int128 := repr _ (unsigned n). Definition uint64_from_uint16 (n : int16) : int64 := repr _ (unsigned n). Definition uint32_from_uint16 (n : int16) : int32 := repr _ (unsigned n). Definition uint8_from_uint16 (n : int16) : int8 := repr _ (unsigned n). Definition usize_from_uint16 (n : int16) : uint_size := repr _ (unsigned n). Definition uint128_from_uint32 (n : int32) : int128 := repr _ (unsigned n). Definition uint64_from_uint32 (n : int32) : int64 := repr _ (unsigned n). Definition uint16_from_uint32 (n : int32) : int16 := repr _ (unsigned n). Definition uint8_from_uint32 (n : int32) : int8 := repr _ (unsigned n). Definition usize_from_uint32 (n : int32) : uint_size := repr _ (unsigned n). Definition uint128_from_uint64 (n : int64) : int128 := repr _ (unsigned n). Definition uint32_from_uint64 (n : int64) : int32 := repr _ (unsigned n). Definition uint16_from_uint64 (n : int64) : int16 := repr _ (unsigned n). Definition uint8_from_uint64 (n : int64) : int8 := repr _ (unsigned n). Definition usize_from_uint64 (n : int64) : uint_size := repr _ (unsigned n). Definition uint64_from_uint128 (n : int128) : int64 := repr _ (unsigned n). Definition uint32_from_uint128 (n : int128) : int32 := repr _ (unsigned n). Definition uint16_from_uint128 (n : int128) : int16 := repr _ (unsigned n). Definition uint8_from_uint128 (n : int128) : int8 := repr _ (unsigned n). Definition usize_from_uint128 (n : int128) : uint_size := repr _ (unsigned n). Definition uint8_equal : int8 -> int8 -> bool := eqb. Theorem nat_mod_eqb_spec : forall {p} (a b : (nat_mod p)), nat_mod_equal a b = true <-> a = b. Proof. symmetry ; exact (ssrbool.rwP nat_mod_equal_reflect). Qed. Global Instance nat_mod_eqdec {p} : EqDec ((nat_mod p)) := { eqb := nat_mod_equal ; eqb_leibniz := nat_mod_eqb_spec; }. Global Instance nat_mod_comparable `{p : Z} : Comparable ((nat_mod p)) := { ltb a b := Z.ltb (nat_mod_val p a) (nat_mod_val p b); leb a b := if Zeq_bool (nat_mod_val p a) (nat_mod_val p b) then true else Z.ltb (nat_mod_val p a) (nat_mod_val p b) ; gtb a b := Z.ltb (nat_mod_val p b) (nat_mod_val p a); geb a b := if Zeq_bool (nat_mod_val p b) (nat_mod_val p a) then true else Z.ltb (nat_mod_val p b) (nat_mod_val p a) ; }. Fixpoint nat_mod_rem_aux {n : Z} (a:(nat_mod n)) (b:(nat_mod n)) (f : nat) {struct f} : (nat_mod n) := match f with | O => a | S f' => if geb a b then nat_mod_rem_aux (nat_mod_sub a b) b f' else a end. Definition nat_mod_rem {n : Z} (a:(nat_mod n)) (b:(nat_mod n)) : (nat_mod n) := if nat_mod_equal b nat_mod_zero then nat_mod_one else nat_mod_rem_aux a b (S (Z.to_nat (nat_mod_val n (nat_mod_div a b)))). Infix "rem" := nat_mod_rem (at level 33) : hacspec_scope. Global Instance bool_eqdec : EqDec bool := { eqb := Bool.eqb; eqb_leibniz := Bool.eqb_true_iff; }. Global Instance string_eqdec : EqDec String.string := { eqb := String.eqb; eqb_leibniz := String.eqb_eq ; }. Fixpoint list_eqdec {A} `{EqDec A} (l1 l2 : list A) : bool := match l1, l2 with | x::xs, y::ys => if eqb x y then list_eqdec xs ys else false | [], [] => true | _,_ => false end. Lemma list_eqdec_refl : forall {A} `{EqDec A} (l1 : list A), list_eqdec l1 l1 = true. Proof. intros ; induction l1 ; cbn ; try rewrite eqb_refl ; easy. Qed. Lemma list_eqdec_sound : forall {A} `{EqDec A} (l1 l2 : list A), list_eqdec l1 l2 = true <-> l1 = l2. Proof. intros A H l1. induction l1 ; induction l2 ; split ; intros ; simpl in * ; try easy ; try inversion H0. - (* inductive case *) apply Field_theory.if_true in H0; destruct H0. f_equal. (* show heads are equal *) + apply (proj1 (eqb_leibniz a a0) H0). (* show tails are equal using induction hypothesis *) + apply IHl1. assumption. - rewrite eqb_refl. apply list_eqdec_refl. Qed. Global Instance List_eqdec {A} `{EqDec A} : EqDec (list A) := { eqb := list_eqdec; eqb_leibniz := list_eqdec_sound; }. Global Program Instance Dec_eq_prod (A B : Type) `{EqDec A} `{EqDec B} : EqDec (A * B) := { eqb '(a0, b0) '(a1, b1) := andb (eqb a0 a1) (eqb b0 b1) }. Next Obligation. split ; intros ; destruct x ; destruct y. - rewrite LocationUtility.is_true_split_and in H1. destruct H1. rewrite (eqb_leibniz) in H1. rewrite (eqb_leibniz) in H2. rewrite H1. rewrite H2. reflexivity. - inversion_clear H1. now do 2 rewrite eqb_refl. Defined. (*** Be Bytes *) Fixpoint nat_be_range_at_position (k : nat) (z : Z) (n : Z) : list bool := match k with | O => [] | S k' => Z.testbit z (n + k') :: nat_be_range_at_position k' z n end. Fixpoint nat_be_range_to_position_ (z : list bool) (val : Z) : Z := match z with | [] => val | x :: xs => nat_be_range_to_position_ xs ((if x then 2 ^ List.length xs else 0) + val) end. Definition nat_be_range_to_position (k : nat) (z : list bool) (n : Z) : Z := (nat_be_range_to_position_ z 0 * 2^(k * n)). Definition nat_be_range' (k : nat) (z : Z) (n : nat) : Z := nat_be_range_to_position_ (nat_be_range_at_position k z (n * k)) 0. Definition nat_be_range (k : nat) (z : Z) (n : nat) := ((z / 2 ^ (n * k)%Z) mod 2 ^ k)%Z. Definition to_be_bytes' {WS} : Z -> list Z := (fun (k : Z) => (map (fun i : nat => nat_be_range 8 k i) (seq.iota 0 (nat_of_wsize WS / 8)))). Definition to_be_bytes'' {WS} : Z -> list Z := (fun (k : Z) => (map (fun i : nat => nat_be_range' 8 k i) (seq.iota 0 (nat_of_wsize WS / 8)))). Definition to_be_bytes {WS} : (@int WS) -> (nseq_ int8 (WS / 8)) := (fun (k : int _) => eq_rect (seq.size (seq.iota 0 (nat_of_wsize WS / 8))) (fun n : nat => (nseq_ uint8 n)) (eq_rect _ (fun n : nat => (nseq_ uint8 n)) (array_from_list (map (fun i : nat => repr _ (nat_be_range 8 (toword k) i) : int _) (seq.iota 0 (nat_of_wsize WS / 8)))) (length (seq.iota 0 (nat_of_wsize WS / 8))) (map_length (fun i : nat => repr _ (nat_be_range 8 (toword k) i)) (seq.iota 0 (nat_of_wsize WS / 8)))) (nat_of_wsize WS / 8)%nat (seq.size_iota 0 (nat_of_wsize WS / 8))). Definition from_be_bytes_fold_fun {WS} (i : int8) (s : ('nat × @int WS)) : ('nat × @int WS) := let (n,v) := s in (S n, v .+ (repr WS (int8_to_nat i * (2 ^ (8 * Z.of_nat n)))%Z)). Definition from_be_bytes {WS : wsize} : (nseq_ int8 (WS / 8)) -> (@int WS) := (fun v => snd (List.fold_right from_be_bytes_fold_fun (0%nat, @repr WS 0%Z) (array_to_list v))). Definition to_le_bytes' {WS} : Z -> list Z := (fun (k : Z) => (map (fun i : nat => nat_be_range 8 k i) (rev (seq.iota 0 (nat_of_wsize WS / 8))))). Definition to_le_bytes'' {WS} : Z -> list Z := (fun (k : Z) => (map (fun i : nat => nat_be_range' 8 k i) (rev (seq.iota 0 (nat_of_wsize WS / 8))))). Definition to_le_bytes {WS} : (@int WS) -> (nseq_ int8 (WS / 8)) := fun (k : int _) => eq_rect (seq.size (seq.iota 0 (nat_of_wsize WS / 8))) (fun n : nat => (nseq_ uint8 n)) (eq_rect (length (rev (seq.iota 0 (nat_of_wsize WS / 8)))) (fun n : nat => (nseq_ uint8 n)) (eq_rect (length (map (fun i : nat => repr _ (nat_be_range 8 (toword k) i)) (rev (seq.iota 0 (nat_of_wsize WS / 8))))) (fun n : nat => (nseq_ uint8 n)) (array_from_list (map (fun i : nat => repr _ (nat_be_range 8 (toword k) i)) (rev (seq.iota 0 (nat_of_wsize WS / 8))))) (length (rev (seq.iota 0 (nat_of_wsize WS / 8)))) (map_length (fun i : nat => repr _ (nat_be_range 8 (toword k) i)) (rev (seq.iota 0 (nat_of_wsize WS / 8))))) (length (seq.iota 0 (nat_of_wsize WS / 8))) (rev_length (seq.iota 0 (nat_of_wsize WS / 8)))) (nat_of_wsize WS / 8)%nat (seq.size_iota 0 (nat_of_wsize WS / 8)). Definition from_le_bytes_fold_fun {WS} (i : int8) (s : ('nat × @int WS)) : ('nat × @int WS) := let (n,v) := s in (Nat.pred n, v .+ (@repr WS ((int8_to_nat i) * 2 ^ (8 * Z.of_nat n))%Z)). Definition from_le_bytes {WS : wsize} : (nseq_ int8 (WS / 8)) -> (@int WS) := (fun v => snd (List.fold_right from_be_bytes_fold_fun (((WS / 8) - 1)%nat, @repr WS 0%Z) (array_to_list v))). (**** Integers to arrays *) Definition uint16_to_le_bytes : int16 -> (nseq_ int8 2) := @to_le_bytes U16. Definition uint16_to_be_bytes : int16 -> (nseq_ int8 2) := @to_be_bytes U16. Definition uint16_from_le_bytes : (nseq_ int8 2) -> int16 := @from_le_bytes U16. Definition uint16_from_be_bytes : (nseq_ int8 2) -> int16 := @from_be_bytes U16. Definition uint32_to_le_bytes : int32 -> (nseq_ int8 4) := @to_le_bytes U32. Definition uint32_to_be_bytes : int32 -> (nseq_ int8 4) := @to_be_bytes U32. Definition uint32_from_le_bytes : (nseq_ int8 4) -> int32 := @from_le_bytes U32. Definition uint32_from_be_bytes : (nseq_ int8 4) -> int32 := @from_be_bytes U32. Definition uint64_to_le_bytes : int64 -> (nseq_ int8 8) := @to_le_bytes U64. Definition uint64_to_be_bytes : int64 -> (nseq_ int8 8) := @to_be_bytes U64. Definition uint64_from_le_bytes : (nseq_ int8 8) -> int64 := @from_le_bytes U64. Definition uint64_from_be_bytes : (nseq_ int8 8) -> int64 := @from_be_bytes U64. Definition uint128_to_le_bytes : int128 -> (nseq_ int8 16) := @to_le_bytes U128. Definition uint128_to_be_bytes : int128 -> (nseq_ int8 16) := @to_be_bytes U128. Definition uint128_from_le_bytes : (nseq_ int8 16) -> int128 := @from_le_bytes U128. Definition uint128_from_be_bytes : (nseq_ int8 16) -> int128 := @from_be_bytes U128. Definition u16_to_be_bytes : int16 -> (nseq_ int8 2) := @to_be_bytes U16. Definition u16_from_be_bytes : (nseq_ int8 2) -> int16 := @from_be_bytes U16. Definition u16_to_le_bytes : int16 -> (nseq_ int8 2) := @to_le_bytes U16. Definition u16_from_le_bytes : (nseq_ int8 2) -> int16 := @from_le_bytes U16. Definition u32_to_be_bytes : int32 -> (nseq_ int8 4) := @to_be_bytes U32. Definition u32_from_be_bytes : (nseq_ int8 4) -> int32 := @from_be_bytes U32. Definition u32_to_le_bytes : int32 -> (nseq_ int8 4) := @to_le_bytes U32. Definition u32_from_le_bytes : (nseq_ int8 4) -> int32 := @from_le_bytes U32. Definition u64_to_be_bytes : int64 -> (nseq_ int8 8) := @to_be_bytes U64. Definition u64_from_be_bytes : (nseq_ int8 8) -> int64 := @from_be_bytes U64. Definition u64_to_le_bytes : int64 -> (nseq_ int8 8) := @to_le_bytes U64. Definition u64_from_le_bytes : (nseq_ int8 8) -> int64 := @from_le_bytes U64. Definition u128_to_be_bytes : int128 -> (nseq_ int8 16) := @to_be_bytes U128. Definition u128_from_be_bytes : (nseq_ int8 16) -> int128 := @from_be_bytes U128. Definition u128_to_le_bytes : int128 -> (nseq_ int8 16) := @to_le_bytes U128. Definition u128_from_le_bytes : (nseq_ int8 16) -> int128 := @from_le_bytes U128. (*** Result *) Definition result (b a : choice_type) := chSum a b. (* #[global] #[refine] Instance result (b a : choice_type) : choice_type := *) (* {| ct := chSum a b ; := (a + b)%type |}. *) (* Proof. *) (* intros. *) (* cbn. *) (* do 2 rewrite ChoiceEq. *) (* reflexivity. *) (* Defined. *) Definition Ok {a b : choice_type} : a -> (result b a) := @inl (a) (b). Definition Err {a b : choice_type} : b -> (result b a) := @inr (a) (b). Arguments Ok {_ _}. Arguments Err {_ _}. Definition result_unwrap_safe {a b} (x : (result b a)) `{match x with inl _ => True | inr _ => False end} : a. destruct x. apply s. contradiction. Defined. Axiom falso : False. Ltac admit_falso := destruct falso. Definition result_unwrap {a b} (x : (result b a)) : a := result_unwrap_safe x (H := ltac:(admit_falso)). Definition option := chOption. (* Program Definition option_choice_type (a : choice_type) := *) (* {| ct := chOption a ; := option a ; |}. *) (* Next Obligation. *) (* intros. *) (* rewrite ChoiceEq. *) (* reflexivity. *) (* Qed. *) (*** Monad / Bind *) Module choice_typeMonad. Class CEMonad : Type := { M :> choice_type -> choice_type ; bind {A B : choice_type} (x : (M A)) (f : A -> (M B)) : (M B) ; ret {A : choice_type} (x : A) : (M A) ; monad_law1 : forall {A B : choice_type} a (f : A -> M B), bind (ret a) f = f a ; monad_law2 : forall {A : choice_type} c, bind c (@ret A) = c ; monad_law3 : forall {A B C : choice_type} c (f : A -> M B) (g : B -> M C), bind (bind c f) g = bind c (fun a => bind (f a) g) }. (* Class CEMonad2 (M : choice_type -> choice_type) : Type := *) (* { *) (* unit {A : choice_type} (x : A) : (M A) ; *) (* fmap {A B : choice_type} (f : A -> B) (x : (M A)) : (M B) ; *) (* join {A : choice_type} (x : (M (M A))) : (M A) ; *) (* }. *) (* #[global] Instance CEMonadToCEMonad2 `{CEMonad} : CEMonad2 M := *) (* {| *) (* unit A := @ret M _ A ; *) (* fmap A B f x := bind x (fun y => ret (f y)) ; *) (* join A x := bind x id *) (* |}. *) (* #[global] Instance CEMonad2ToCEMonad `{CEMonad2} : CEMonad M := *) (* {| *) (* ret A := @unit M _ A ; *) (* bind A B x f := join (fmap f x) *) (* |}. *) (* Class CEMonad_prod (M M0 : choice_type -> choice_type) := *) (* { prod : forall A, (M0 (M (M0 A))) -> (M (M0 A)) }. *) (* #[global] Program Instance ComposeProd2 `{CEMonad2} `{CEMonad2} `{@CEMonad_prod M M0} : CEMonad2 (fun x => M (M0 x)) := *) (* {| *) (* unit A x := unit (A := M0 A) (unit x) ; *) (* fmap A B f x := fmap (A := M0 A) (B := M0 B) (fmap f) x ; *) (* join A x := join (A := M0 A) (fmap (@prod M M0 _ A) x) *) (* |}. *) (* #[global] Instance ComposeProd `{CEMonad} `{CEMonad} `(@CEMonad_prod M M0) : CEMonad (fun x => M (M0 x)) := (@CEMonad2ToCEMonad _ ComposeProd2). *) (* Definition bind_prod `{CEMonad} `{CEMonad} `{@CEMonad_prod M M0} *) (* {A B} (x : (M (M0 A))) (f : A -> (M (M0 B))) *) (* : (M (M0 B)) := *) (* (@bind (fun x => M (M0 x)) (ComposeProd _) A B x f). *) (* Class CEMonad_swap (M M0 : choice_type -> choice_type) := *) (* { swap : forall A, (M0 (M A)) -> (M (M0 A)) }. *) (* #[global] Program Instance ComposeSwap2 `{CEMonad2 } `{CEMonad2} `{@CEMonad_swap M M0} : CEMonad2 (fun x => M (M0 x)) := *) (* {| *) (* unit A x := unit (A := M0 A) (unit x) ; *) (* fmap A B f x := fmap (A := M0 A) (B := M0 B) (fmap f) x ; *) (* join A x := fmap (join (M := M0)) (join (fmap (@swap M M0 _ (M0 A)) x)) *) (* |}. *) (* #[global] Instance ComposeSwap `{CEMonad} `{CEMonad} `(@CEMonad_swap M M0) : CEMonad (fun x => M (M0 x)) := (@CEMonad2ToCEMonad _ ComposeSwap2). *) (* Definition bind_swap `{CEMonad} `{CEMonad} `{@CEMonad_swap M M0} *) (* A B (x : (M (M0 A))) (f : A -> (M (M0 B))) : (M (M0 B)) := *) (* (@bind _ (@ComposeSwap M _ M0 _ _) A B x f). *) Section ResultMonad. Definition result_bind {C A B} (r : (result C A)) (f : A -> (result C B)) : (result C B) := match r with | inl a => f a | inr e => (@Err B C e) end. Definition result_ret {C A : choice_type} (a : A) : (result C A) := Ok a. Global Program Instance result_monad {C : choice_type} : CEMonad := {| M := result C ; bind := @result_bind C ; ret := @result_ret C ; |}. Solve All Obligations with now destruct c. Arguments result_monad {_} &. End ResultMonad. Definition option_bind {A B} (r : (option A)) (f : A -> (option B)) : (option B) := match r with Some (a) => f a | None => None end. Definition option_ret {A : choice_type} (a : A) : (option A) := Some a. Global Program Instance option_monad : CEMonad := Build_CEMonad option (@option_bind) (@option_ret) _ _ _. Solve All Obligations with now destruct c. Definition option_is_none {A} (x : (option A)) : bool := match x with | None => true | _ => false end. End choice_typeMonad. (* #[global] Notation "x 'm(' v ')' ⇠ c1 ;; c2" := *) (* (choice_typeMonad.bind (M := v) c1 (fun x => c2)) *) (* (at level 100, c1 at next level, right associativity, *) (* format "x 'm(' v ')' ⇠ c1 ;; '//' c2") *) (* : hacspec_scope. *) (* #[global] Notation " ' x 'm(' v ')' ⇠ c1 ;; c2" := *) (* (choice_typeMonad.bind (M := v) c1 (fun x => c2)) *) (* (at level 100, c1 at next level, x pattern, right associativity, *) (* format " ' x 'm(' v ')' ⇠ c1 ;; '//' c2") *) (* : hacspec_scope. *) Definition foldi_bind {A : choice_type} `{mnd : choice_typeMonad.CEMonad} (a : uint_size) (b : uint_size) (f : uint_size -> A -> (choice_typeMonad.M A)) (init : (choice_typeMonad.M A)) : (choice_typeMonad.M A) := @foldi ((choice_typeMonad.M A)) a b (fun x y => choice_typeMonad.bind y (f x)) init. (*** Notation *) Notation "'ifbnd' b 'then' x 'else' y '>>' f" := (if b then f x else f y) (at level 200) : hacspec_scope. Notation "'ifbnd' b 'thenbnd' x 'else' y '>>' f" := (if b then (choice_typeMonad.bind x) f else f y) (at level 200) : hacspec_scope. Notation "'ifbnd' b 'then' x 'elsebnd' y '>>' f" := (if b then f x else (choice_typeMonad.bind y) f) (at level 200) : hacspec_scope. Notation "'ifbnd' b 'thenbnd' x 'elsebnd' y '>>' f" := (if b then choice_typeMonad.bind x f else choice_typeMonad.bind y f) (at level 200). Notation "'foldibnd' s 'to' e 'M(' v ')' 'for' z '>>' f" := (Hacspec_Lib_Pre.foldi s e (choice_typeMonad.ret z) (fun x y => choice_typeMonad.bind y (f x))) (at level 50) : hacspec_scope. Axiom nat_mod_from_byte_seq_be : forall {A n}, (seq A) -> (nat_mod n). ================================================ FILE: hax-lib/proof-libs/coq/ssprove/src/Hacspec_Lib_Seq.v ================================================ Global Set Warnings "-ambiguous-paths". Global Set Warnings "-uniform-inheritance". Global Set Warnings "-auto-template". Global Set Warnings "-disj-pattern-notation". Global Set Warnings "-notation-overridden,-ambiguous-paths". Require Import Lia. Require Import Coq.Logic.FunctionalExtensionality. Require Import Sumbool. From mathcomp Require Import fintype. From Crypt Require Import choice_type Package Prelude. Import PackageNotation. From extructures Require Import ord fset fmap. From mathcomp Require Import ssrZ word. (* From Jasmin Require Import word. *) From Crypt Require Import jasmin_word. From Coq Require Import ZArith List. Import List.ListNotations. Import choice.Choice.Exports. (********************************************************) (* Implementation of all Hacspec library functions *) (* for Both types. *) (********************************************************) Declare Scope hacspec_scope. From Hacspec Require Import ChoiceEquality. From Hacspec Require Import LocationUtility. From Hacspec Require Import Hacspec_Lib_Comparable. From Hacspec Require Import Hacspec_Lib_Pre. Open Scope bool_scope. Open Scope hacspec_scope. Open Scope nat_scope. Open Scope list_scope. (*** Seq *) (* Section Seqs. *) (**** Unsafe functions *) Notation seq_new_ := (lift2_both seq_new_). Notation seq_new := (lift1_both seq_new). Equations seq_len {A : choice_type} (x : both (seq A)) : both (uint_size) := seq_len := (lift1_both Hacspec_Lib_Pre.seq_len). Fail Next Obligation. Notation seq_index := (lift2_both seq_index). (**** Seq manipulation *) (* Notation seq_slice := (lift3_both seq_slice). *) Notation seq_slice_range := (lift2_both seq_slice_range). (* updating a subsequence in a sequence *) Definition seq_update {a: choice_type} (s: ((seq a))) (start: uint_size) (input: ((seq a))) : both ((seq a)) := ret_both (seq_update s start input). (* updating only a single value in a sequence*) Definition seq_upd {a: choice_type} (s: ((seq a))) (start: uint_size) (v: ((a))) : both ((seq a)) := ret_both (seq_upd s start v). Definition seq_update_start {a: choice_type} (s: ( (seq a))) (start_s: ( (seq a))) : both ((seq a)) := ret_both (seq_update_start s start_s). Definition seq_update_slice {A : choice_type} (out: ( (seq A))) (start_out: nat) (input: ( (seq A))) (start_in: nat) (len: nat) : both ((seq A)) := ret_both (seq_update_slice out start_out input start_in len). Definition seq_concat {a : choice_type} (s1 :( (seq a))) (s2: ( (seq a))) : both ((seq a)) := ret_both (seq_concat s1 s2). Notation seq_push := (lift2_both seq_push). Definition seq_from_slice {a: choice_type} (input: ( (seq a))) (start_fin: uint_size × uint_size) : both ((seq a)) := ret_both (seq_from_slice input start_fin). Definition seq_from_slice_range {a: choice_type} (input: ( (seq a))) (start_fin: uint_size × uint_size) : both ((seq a)) := ret_both (seq_from_slice_range input start_fin). Definition seq_from_seq {A} (l : (seq A)) : both (seq A) := ret_both (seq_from_seq l). (**** Chunking *) Definition seq_num_chunks {a: choice_type} (s: ( (seq a))) (chunk_len: uint_size) : both (uint_size) := ret_both (seq_num_chunks s chunk_len). Definition seq_chunk_len {a: choice_type} (s: ( (seq a))) (chunk_len: nat) (chunk_num: nat) : both (('nat)) := ret_both (seq_chunk_len s chunk_len chunk_num). Definition seq_get_chunk {a: choice_type} (s: ( (seq a))) (chunk_len: uint_size) (chunk_num: uint_size) : both (((uint_size × seq a))) := ret_both (seq_get_chunk s chunk_len chunk_num). Definition seq_set_chunk {a: choice_type} (s: ( (seq a))) (chunk_len: uint_size) (chunk_num: uint_size) (chunk: ( (seq a)) ) : both ((seq a)) := ret_both (seq_set_chunk s chunk_len chunk_num chunk). Definition seq_num_exact_chunks {a} (l : ( (seq a))) (chunk_size : ( (uint_size))) : (both uint_size) := ret_both (seq_num_exact_chunks l chunk_size). Definition seq_get_exact_chunk {a : choice_type} (l : ( (seq a))) (chunk_size chunk_num: ( (uint_size))) : both ((seq a)) := ret_both (seq_get_exact_chunk l chunk_size chunk_num). Definition seq_set_exact_chunk {a : choice_type} := @seq_set_chunk a. Definition seq_get_remainder_chunk {a : choice_type} (l : (seq a)) (chunk_size : (uint_size)) : both ((seq a)) := ret_both (seq_get_remainder_chunk l chunk_size). Definition seq_xor_ {WS} (x y : seq (@int WS)) : both (seq (@int WS)) := ret_both (seq_xor_ x y). Definition seq_truncate {a : choice_type} (x : seq a) (n : nat) : both (seq a) := ret_both (seq_truncate x n). (* End Seqs. *) Infix "seq_xor" := seq_xor_ (at level 33) : hacspec_scope. (* Section Arrays. *) (**** types *) (***** prelude.rs *) Definition uint128_word_t : choice_type := nseq_ uint8 16. Definition uint64_word_t : choice_type := nseq_ uint8 8. Definition uint32_word_t : choice_type := nseq_ uint8 4. Definition uint16_word_t : choice_type := nseq_ uint8 2. (**** Array manipulation *) Equations array_new_ {A: choice_type} (init: both A) `(len: uint_size) : both (nseq A len) := array_new_ init len := lift1_both (fun x => Hacspec_Lib_Pre.array_new_ x (from_uint_size len)) init. Equations array_index {A: choice_type} {len : nat} (x : both (nseq_ A len)) {WS} (y : both (int WS)) : both A := array_index x (WS := WS) y := lift2_both (fun x y => Hacspec_Lib_Pre.array_index x y) x y. Fail Next Obligation. Equations array_upd {A : choice_type} {len} (s: both (nseq_ A len)) (i: both (@int U32)) (new_v: both A) : both (nseq_ A len) := array_upd s i new_v := (lift3_both (fun (s : nseq_ A len) i new_v => Hacspec_Lib_Pre.array_upd s i new_v) s i new_v). (* substitutes a sequence (seq) into an array (nseq), given index interval *) Definition update_sub {A : choice_type} {len slen} (v : (nseq_ A len)) (i : nat) (n : nat) (sub : (nseq_ A slen)) : both ((nseq_ A len)) := ret_both (update_sub v i n sub). Equations array_from_list_helper {A: choice_type} (x : both A) (xs: list (both A)) (k : nat) : both (nseq_ A (S k)) := array_from_list_helper x [] k := lift1_both (fun x => setm emptym (Ordinal (ssrbool.introT ssrnat.ltP (lt_succ_diag_r_sub k O))) x : nseq_ A (S k)) x ; array_from_list_helper x (y :: ys) k := bind_both x (fun temp_x => bind_both (array_from_list_helper y ys k) (fun temp_y => lift_both (ret_both (setm (temp_y : nseq_ A (S k)) (Ordinal (ssrbool.introT ssrnat.ltP (lt_succ_diag_r_sub k (length (y :: ys))))) temp_x : nseq_ A (S k))))). Fail Next Obligation. Equations array_from_list {A: choice_type} (l: list (both A)) : both (nseq_ A (length l)) := array_from_list l := match l as k return both (nseq_ A (length k)) with [] => solve_lift (ret_both (tt : nseq_ A 0)) | (x :: xs) => array_from_list_helper x xs (length xs) end. Solve All Obligations with (intros ; (fset_equality || solve_in_fset)). Fail Next Obligation. Program Definition array_from_seq {A: choice_type} (out_len: nat) (input: both (seq A)) : both (nseq_ A out_len) := lift1_both (* (H_loc_incl_x := fsubsetxx _) (H_opsig_incl_x := fsubsetxx _) *) (array_from_seq out_len) input. Equations array_to_seq {A : choice_type} {n} (f : both (nseq_ A n)) (* `{H_loc_incl_x : is_true (fsubset L1 L2)} `{H_opsig_incl_x : is_true (fsubset I1 I2)} *) : both (seq A) := array_to_seq := (lift1_both Hacspec_Lib_Pre.array_to_seq). Fail Next Obligation. Definition array_from_slice {a: choice_type} (default_value: ( a)) (out_len: nat) (input: (seq a)) (start: uint_size) (slice_len: uint_size) : both ((nseq_ a out_len)) := ret_both (array_from_slice default_value out_len input (from_uint_size start) (from_uint_size slice_len)). Definition array_slice {a: choice_type} (input: (seq a)) (start: nat) (slice_len: nat) : both ((nseq_ a slice_len)) := ret_both (array_slice input start slice_len). Definition array_from_slice_range {a: choice_type} (default_value: a) (out_len: nat) (input: (seq a)) (start_fin: (uint_size × uint_size)) : both ((nseq_ a out_len)) := ret_both (array_from_slice_range default_value out_len input start_fin). Definition array_slice_range {a: choice_type} {len : nat} (input: (nseq_ a len)) (start_fin:(uint_size × uint_size)) : both ((seq a)) := ret_both (array_slice_range input start_fin). Definition array_update {a: choice_type} {len: nat} (s: (nseq_ a len)) (start : uint_size) (start_s: (seq a)) : both ((nseq_ a len)) := ret_both (array_update s start start_s). Definition array_update_start {a: choice_type} {len: nat} (s: (nseq_ a len)) (start_s: (seq a)) : both ((nseq_ a len)) := ret_both (array_update_start s start_s). Definition array_len {a: choice_type} {len: nat} (s: (nseq_ a len)) : both (uint_size) := ret_both (array_len s). (* May also come up as 'length' instead of 'len' *) Definition array_length {a: choice_type} {len: nat} (s: (nseq_ a len)) : both (uint_size) := ret_both (array_length s). Definition array_update_slice {a : choice_type} {l : nat} (out: ( (nseq_ a l))) (start_out: uint_size) (input: ( (seq a))) (start_in: uint_size) (len: uint_size) : both ((nseq_ a _)) := ret_both (array_update_slice (l := l) out start_out input start_in (from_uint_size len)). (**** Numeric operations *) (* End Arrays. *) ================================================ FILE: hax-lib/proof-libs/coq/ssprove/src/Hacspec_Lib_TODO.v ================================================ Global Set Warnings "-ambiguous-paths". Global Set Warnings "-uniform-inheritance". Global Set Warnings "-auto-template". Global Set Warnings "-disj-pattern-notation". Global Set Warnings "-notation-overridden,-ambiguous-paths". Require Import Lia. Require Import Coq.Logic.FunctionalExtensionality. Require Import Sumbool. From mathcomp Require Import fintype. From Crypt Require Import choice_type Package Prelude. Import PackageNotation. From extructures Require Import ord fset fmap. From mathcomp Require Import ssrZ word. (* From Jasmin Require Import word. *) From Crypt Require Import jasmin_word. From Coq Require Import ZArith List. Import List.ListNotations. Import choice.Choice.Exports. (********************************************************) (* Implementation of all Hacspec library functions *) (* for Both types. *) (********************************************************) Declare Scope hacspec_scope. From Hacspec Require Import ChoiceEquality. From Hacspec Require Import LocationUtility. From Hacspec Require Import Hacspec_Lib_Comparable. From Hacspec Require Import Hacspec_Lib_Pre. Open Scope bool_scope. Open Scope hacspec_scope. Open Scope nat_scope. Open Scope list_scope. From Hacspec Require Import Hacspec_Lib_Integers. From Hacspec Require Import Hacspec_Lib_Seq. From Hacspec Require Import Hacspec_Lib_Natmod. From Hacspec Require Import Hacspec_Lib_Monad. From Hacspec Require Import Hacspec_Lib_Ltac. (*** Result *) Definition Ok {a b : choice_type} : both a -> both (result b a) := lift1_both Ok. Definition Err {a b : choice_type} : both b -> both (result b a) := lift1_both Err. Infix "&&" := andb : bool_scope. Infix "||" := orb : bool_scope. Definition u32_word_t := nseq_ uint8 4. Definition u128_word_t := nseq_ uint8 16. (*** Hacspec-v2 specific fixes *) Import choice.Choice.Exports. Obligation Tactic := (* try timeout 8 *) solve_ssprove_obligations. (** Should be moved to Hacspec_Lib.v **) Program Definition int_xI {WS : wsize} (a : (@int WS)) : (@int WS) := Hacspec_Lib_Pre.int_add (Hacspec_Lib_Pre.int_mul a (@repr WS 2)) (@one WS). Program Definition int_xO {WS : wsize} (a : int WS) : int WS := Hacspec_Lib_Pre.int_mul a (@repr WS 2). Definition both_int_one {WS : wsize} : both (@int WS) := ret_both (one). Open Scope hacspec_scope. Definition int_num {WS : wsize} := int WS. Number Notation int_num Pos.of_num_int Pos.to_num_int (via positive mapping [[int_xI] => xI, [int_xO] => xO , [one] => xH]) : hacspec_scope. Notation "0" := (repr _ 0%Z) : hacspec_scope. (** Ops *) Class Addition (A : choice_type) := add : both A -> both A -> both A. Notation "a .+ b" := (add a b). Instance int_add_inst {ws : wsize} : Addition (@int ws) := { add a b := int_add a b }. Class Subtraction (A : choice_type):= sub : both A -> both A -> both A. Notation "a .- b" := (sub a b (Subtraction := _)). Instance int_sub_inst {ws : wsize} : Subtraction (@int ws) := { sub a b := int_sub a b }. Class Multiplication A := mul : both A -> both A -> both A. Notation "a .* b" := (mul a b). Program Instance int_mul_inst {ws : wsize} : Multiplication (@int ws) := { mul a b := int_mul a b }. Fail Next Obligation. Class Xor A := xor : both A -> both A -> both A. Notation "a .^ b" := (xor a b). Program Instance int_xor_inst {ws : wsize} : Xor (@int ws) := { xor a b := int_xor a b }. Fail Next Obligation. (** Iter *) Structure array_or_seq A (len : nat) := { as_nseq :> both (nseq_ A len) ; as_seq :> both (seq A) ; as_list :> both (chList A) }. Arguments as_seq {_} {_}. (* array_or_seq. *) Arguments as_nseq {_} {_}. (* array_or_seq. *) Arguments as_list {_} {_}. (* array_or_seq. *) Definition array_to_list {A n} := lift1_both (fun x => (@array_to_list A n x) : chList _). Definition seq_to_list {A} := lift1_both (fun x => (@seq_to_list A x) : chList _). Definition seq_from_list {A} := lift1_both (fun (x : chList _) => seq_from_list A (x : list _)). Definition array_from_list' {A} {n : nat} := lift1_both (fun (x : chList A) => @array_from_list' A x n : nseq_ _ _). Equations nseq_array_or_seq {A len} (val : both (nseq_ A len)) : array_or_seq A len := nseq_array_or_seq val := {| as_seq := array_to_seq val ; as_nseq := val ; as_list := array_to_list val |}. Solve All Obligations with intros ; exact fset0. Fail Next Obligation. Arguments nseq_array_or_seq {A} {len}. Coercion nseq_array_or_seq : both >-> array_or_seq. Canonical Structure nseq_array_or_seq. Definition n_seq_array_or_seq {A} {B} (x : both B) `(contra : match B with | chUnit => True | chMap (chFin (@mkpos (S n) _)) (C) => C = A | chMap 'nat (C) => C = A | chList C => C = A | _ => False end) : let len := (match B as K return match K with | chUnit => True | chMap (chFin (@mkpos (S n) _)) (C) => C = A | chMap 'nat (C) => C = A | chList C => C = A | _ => False end -> nat with | chUnit => fun _ => 0%nat | chMap (chFin (@mkpos p _)) C => fun m_contra => match p as p_ return match p_ with | O => False | _ => C = A end -> nat with | O => fun m_contra => False_rect nat m_contra | S n => fun _ => S n end m_contra | chMap 'nat C => fun m_contra => 3%nat | chList C => fun m_contra => 4%nat | _ => fun m_contra => False_rect nat m_contra end contra) in array_or_seq A len. Proof. intros. destruct B ; try contradiction contra. - change 'unit with (nseq_ A len) in x. exact {| as_seq := array_to_seq x ; as_nseq := x; as_list := array_to_list x |}. - destruct B1 ; try contradiction contra ; simpl in *. + subst. change (chMap 'nat A) with (seq A) in x. exact ({| as_seq := x ; as_nseq := array_from_seq _ x ; as_list := seq_to_list x |}). + destruct n. destruct pos. * contradiction. * subst. replace (chMap (chFin _) A) with (nseq_ A len) in x. 2:{ simpl. f_equal. f_equal. apply (ssrbool.elimT (positive_eqP _ _)). unfold positive_eq. apply eqtype.eq_refl. } exact {| as_seq := array_to_seq x ; as_nseq := x; as_list := array_to_list x |}. - subst. exact {| as_seq := seq_from_list x ; as_nseq := array_from_list' x ; as_list := x |}. Defined. Notation " x '.a[' a ']'" := (array_index (n_seq_array_or_seq x _) a) (at level 40). Fail Next Obligation. Notation " x '.a[' i ']<-' a" := (array_upd x i a) (at level 40). Notation update_at := array_upd. Notation update_at_usize := array_upd. Notation t_Seq := seq. Notation num_exact_chunks := seq_num_exact_chunks. Notation get_exact_chunk := seq_get_exact_chunk. Notation get_remainder_chunk := seq_get_remainder_chunk. Notation "a <> b" := (negb (eqb a b)). Notation from_secret_literal := nat_mod_from_secret_literal. Notation zero := nat_mod_zero. Notation to_byte_seq_le := nat_mod_to_byte_seq_le. Notation U128_to_le_bytes := u128_to_le_bytes. Notation U64_to_le_bytes := u64_to_le_bytes. Notation from_byte_seq_le := nat_mod_from_byte_seq_le. Definition from_literal {m} := nat_mod_from_literal m. Notation inv := nat_mod_inv. Notation update_start := array_update_start. Notation pow := nat_mod_pow_self. Notation bit := nat_mod_bit. Notation Build_secret := secret. Notation "a -× b" := (prod a b) (at level 80, right associativity) : hacspec_scope. Notation Result_t := result. Axiom TODO_name : Type. Notation ONE := nat_mod_one. Notation exp := nat_mod_exp. Notation TWO := nat_mod_two. Notation ne := (fun x y => negb (eqb x y)). Notation eq := (eqb). Notation rotate_right := (ror). Notation to_be_U32s := array_to_be_uint32s. Notation get_chunk := seq_get_chunk. Notation num_chunks := seq_num_chunks. Notation U64_to_be_bytes := uint64_to_be_bytes. Notation to_be_bytes := array_to_be_bytes. Notation U8_from_usize := uint8_from_usize. Notation concat := seq_concat. Notation declassify := id. Notation U128_from_be_bytes := uint128_from_be_bytes. Notation U128_to_be_bytes := uint128_to_be_bytes. Notation slice_range := array_slice_range. Notation truncate := seq_truncate. Notation to_be_U64s := array_to_be_uint64s. Notation classify := id. Notation U64_from_U8 := uint64_from_uint8. Definition Build_t_Range {WS} {f_start : both (int WS)} {f_end : both (int WS)} := prod_b (f_start,f_end). Notation Build_Range := Build_t_Range. Notation declassify_eq := eq. Notation String_t := String.string. Notation "'i8(' v ')'" := (ret_both (v : int8) : both _). Notation "'i16(' v ')'" := (ret_both (v : int16) : both _). Notation "'i32(' v ')'" := (ret_both (v : int32) : both _). Notation "'i64(' v ')'" := (ret_both (v : int64) : both _). Notation "'i128(' v ')'" := (ret_both (v : int128) : both _). Definition len {A ws} := lift1_both (fun (x : chList A) => repr ws (List.length x)). Definition orb (x : both 'bool) (y : both 'bool) : both 'bool := lift2_both (fun (x y : 'bool) => Datatypes.orb x y : 'bool) x y. Definition andb (x : both 'bool) (y : both 'bool) : both 'bool := lift2_both (fun (x y : 'bool) => Datatypes.andb x y : 'bool) x y. Definition negb (x : both 'bool) : both 'bool := lift1_both (fun (x : 'bool) => Datatypes.negb x : 'bool) x. Notation "a <> b" := (negb (eqb a b)). Notation "'not'" := (negb). Notation "x ':of:' y" := (x : both _ _ y) (at level 100). Notation "x ':of0:' y" := (x : both y) (at level 100). (** Trait impls *) Class t_Serialize (Self : choice_type). Class t_Deserial (Self : choice_type). Class t_Serial (Self : choice_type). Notation "'t_Eq'" := (EqDec). (** end of: Should be moved to Hacspec_Lib.v **) Definition t_Result A B := result B A. (** Should be part of core.V **) Class t_Sized (A : choice_type) := Sized : A -> A. Class t_TryFrom (A : choice_type) := TryFrom : A -> A. Class t_Into (A : choice_type) := Into : A -> A. Class t_PartialEq (A : choice_type) (B : choice_type) := PartialEq : A -> B -> bool. Class t_Copy (A : choice_type) := Copy : A -> A. Class t_Clone (A : choice_type) := Clone : A -> A. Definition t_Option : choice_type -> choice_type := chOption. Inductive vec_typ := | t_Global. Definition t_Vec : choice_type -> vec_typ -> choice_type := fun A _ => chList A. Notation t_Default := Default. #[global] Instance bool_copy : t_Copy 'bool := {Copy x := x}. #[global] Instance bool_clone : t_Clone 'bool := {Clone x := x}. #[global] Instance bool_sized : t_Sized 'bool := {Sized x := x}. Definition ilog2 {WS} (x : both (int WS)) : both (int WS) := x. (* TODO *) Definition collect {A} (x : both (chList A)) : both (t_Vec A t_Global) := x. Equations swap_both_list {A} (x : list (both A)) : both (chList A) := swap_both_list x := (List.fold_left (fun (x : both (chList A)) y => bind_both x (fun x' => bind_both y (fun y' => solve_lift (ret_both ((y' :: x') : chList A))))) x (solve_lift (ret_both ([] : chList A)))). Solve All Obligations with solve_ssprove_obligations. Fail Next Obligation. Equations match_list {A B : choice_type} (x : both (chList A)) (f : list A -> B) : both B := match_list x f := bind_both x (fun x' => solve_lift (ret_both (f x'))). Solve All Obligations with solve_ssprove_obligations. Fail Next Obligation. Equations map {A B} (x : both (chList A)) (f : both A -> both B) : both (chList B) := map x f := bind_both x (fun x' => swap_both_list (List.map (fun y => f (solve_lift (ret_both y))) x')). Solve All Obligations with solve_ssprove_obligations. Fail Next Obligation. Definition cloned {A} (x : both (chList A)) : both (chList A) := x. Equations iter {A} (x : both (seq A)) : both (chList A) := iter x := bind_both x (fun x' => solve_lift (ret_both (Hacspec_Lib_Pre.seq_to_list _ x' : chList A))). Solve All Obligations with solve_ssprove_obligations. Fail Next Obligation. Definition dedup {A} (x : both (t_Vec A t_Global)) : both (t_Vec A t_Global) := x. Definition t_String := Coq.Strings.String.string. Equations new {A} : both (t_Vec A t_Global) := new := solve_lift (ret_both ([] : chList A)). Solve All Obligations with solve_ssprove_obligations. Fail Next Obligation. Definition enumerate {A} (x : both (t_Vec A t_Global)) : both (t_Vec A t_Global) := x. (*** More functions *) Definition t_Drain : choice_type -> vec_typ -> choice_type := t_Vec. Inductive t_Range := RangeFull. Equations drain : forall {A}, both (t_Vec A t_Global) -> t_Range -> both (t_Drain A t_Global × t_Vec A t_Global) := drain x _ := bind_both x (fun x' => solve_lift (ret_both ((x', []) : (t_Drain A t_Global × t_Vec A t_Global)))). Solve All Obligations with solve_ssprove_obligations. Fail Next Obligation. Notation t_Rev := id. Equations rev {A} (x : both (chList A)) : both (chList A) := rev x := bind_both x (fun x => solve_lift (ret_both (List.rev x : chList _))). Solve All Obligations with solve_ssprove_obligations. Fail Next Obligation. Definition pop {A} : both (chList A) -> both (chOption A × t_Vec A (t_Global)) := lift1_both (fun (x : chList A) => (List.hd_error x , List.tl x) : (chOption A × t_Vec A (t_Global))). Definition push {A} : both (t_Vec A t_Global) -> both A -> both (t_Vec A (t_Global)) := lift2_both (fun (x : chList A) y => y :: x : chList A). Notation Option_Some := Some. Definition append {A : choice_type} (l : both (chList A)) (x : both (chList A)) : both (chList A × chList A) := lift2_both (fun (x : chList A) (y : chList A) => (app y x, []) : chList A × chList A) x l. Notation f_clone := id. Definition seq_unzip {A B} (s : chList (A × B)) : chList A × chList B := (seq.unzip1 s, seq.unzip2 s). Definition unzip {A B} : both (chList (A × B)) -> both (chList A × chList B) := lift1_both seq_unzip. Equations deref {A} : both (t_Vec A t_Global) -> both (seq A) := deref X := bind_both X (fun x : t_Vec A t_Global => solve_lift (ret_both (Hacspec_Lib_Pre.seq_from_list A x))). Solve All Obligations with solve_ssprove_obligations. Fail Next Obligation. Definition t_Never : choice_type := 'unit. Definition abort : both t_Never := ret_both (tt : 'unit). Notation Result_Err := Err. Notation Result_Ok := Ok. Notation "'ret_both' 'tt'" := (ret_both (tt : 'unit)). (** Should be part of concordium.v **) Class HasInitContext (Self : choice_type). Class t_HasInitContext (Self : choice_type) (something : choice_type). Class t_HasActions (Self : choice_type) := {f_accept : both Self}. Class HasReceiveContext (Self : choice_type). Definition t_ParamType := 'unit. Definition t_ParseError := 'unit. Class t_HasReceiveContext (Self : choice_type) (something : choice_type) := { f_get : forall (Ctx : Self), both (t_ParamType × t_Result Self something) }. (* Arguments f_get {Self} {something} (t_HasReceiveContext) {Ctx}. *) Definition f_parameter_cursor {T : _} (ctx : both (T)) : T := is_pure ctx. Notation ControlFlow_Continue := Result_Ok. Notation v_Break := Result_Err. Notation never_to_any := id. Equations run {A} (x : both (choice_typeMonad.M (CEMonad := (@choice_typeMonad.mnd (choice_typeMonad.result_bind_code A))) A)) : both A := run x := bind_both x (fun y => match y with | inl r | inr r => solve_lift ret_both r end). Fail Next Obligation. Notation "'matchb' x 'with' '|' a '=>' b 'end'" := (bind_both x (fun y => match y with | a => b end)) (at level 100, a pattern). Notation "'matchb' x 'with' '|' a '=>' b '|' c '=>' d 'end'" := (bind_both x (fun y => match y with | a => b | c => d end)) (at level 100, a pattern, c pattern). Notation "'matchb' x 'with' '|' a '=>' b '|' c '=>' d '|' e '=>' f 'end'" := (bind_both x (fun y => match y with | a => b | c => d | e => f end)) (at level 100, a pattern, c pattern, e pattern). Notation "'matchb' x 'with' '|' a '=>' b '|' c '=>' d '|' e '=>' f '|' g '=>' h 'end'" := (bind_both x (fun y => match y with | a => b | c => d | e => f | g => h end)) (at level 100, a pattern, c pattern, e pattern, g pattern). Notation "'matchb' x 'with' '|' a '=>' b '|' c '=>' d '|' e '=>' f '|' g '=>' h '|' i '=>' j 'end'" := (bind_both x (fun y => match y with | a => b | c => d | e => f | g => h | i => j end)) (at level 100, a pattern, c pattern, e pattern, g pattern, i pattern). Notation "'matchb' x 'with' '|' a '=>' b '|' c '=>' d '|' e '=>' f '|' g '=>' h '|' i '=>' j '|' k '=>' l 'end'" := (bind_both x (fun y => match y with | a => b | c => d | e => f | g => h | i => j | k => l end)) (at level 100, a pattern, c pattern, e pattern, g pattern, i pattern, k pattern). Notation "'matchb' x 'with' '|' a '=>' b '|' c '=>' d '|' e '=>' f '|' g '=>' h '|' i '=>' j '|' k '=>' l '|' m '=>' n 'end'" := (bind_both x (fun y => match y with | a => b | c => d | e => f | g => h | i => j | k => l | m => n end)) (at level 100, a pattern, c pattern, e pattern, g pattern, i pattern, k pattern, m pattern). Notation "'matchb' x 'with' '|' a '=>' b '|' c '=>' d '|' e '=>' f '|' g '=>' h '|' i '=>' j '|' k '=>' l '|' m '=>' n '|' o '=>' p 'end'" := (bind_both x (fun y => match y with | a => b | c => d | e => f | g => h | i => j | k => l | m => n | o => p end)) (at level 100, a pattern, c pattern, e pattern, g pattern, i pattern, k pattern, m pattern, o pattern). Notation "'matchb' x 'with' '|' a '=>' b '|' c '=>' d '|' e '=>' f '|' g '=>' h '|' i '=>' j '|' k '=>' l '|' m '=>' n '|' o '=>' p '|' q '=>' r 'end'" := (bind_both x (fun y => match y with | a => b | c => d | e => f | g => h | i => j | k => l | m => n | o => p | q => r end)) (at level 100, a pattern, c pattern, e pattern, g pattern, i pattern, k pattern, m pattern, o pattern, q pattern). Notation "'matchb' x 'with' '|' a '=>' b '|' c '=>' d '|' e '=>' f '|' g '=>' h '|' i '=>' j '|' k '=>' l '|' m '=>' n '|' o '=>' p '|' q '=>' r '|' s '=>' t 'end'" := (bind_both x (fun y => match y with | a => b | c => d | e => f | g => h | i => j | k => l | m => n | o => p | q => r | s => t end)) (at level 100, a pattern, c pattern, e pattern, g pattern, i pattern, k pattern, m pattern, o pattern, q pattern, s pattern). Notation "'matchb' x 'with' '|' a '=>' b '|' c '=>' d '|' e '=>' f '|' g '=>' h '|' i '=>' j '|' k '=>' l '|' m '=>' n '|' o '=>' p '|' q '=>' r '|' s '=>' t '|' u '=>' v 'end'" := (bind_both x (fun y => match y with | a => b | c => d | e => f | g => h | i => j | k => l | m => n | o => p | q => r | s => t | u => v end)) (at level 100, a pattern, c pattern, e pattern, g pattern, i pattern, k pattern, m pattern, o pattern, q pattern, s pattern, u pattern). Notation "'matchb' x_val 'with' '|' a '=>' b '|' c '=>' d '|' e '=>' f '|' g '=>' h '|' i '=>' j '|' k '=>' l '|' m '=>' n '|' o '=>' p '|' q '=>' r '|' s '=>' t '|' u '=>' v '|' w '=>' x 'end'" := (bind_both x_val (fun y => match y with | a => b | c => d | e => f | g => h | i => j | k => l | m => n | o => p | q => r | s => t | u => v | w => x end)) (at level 100, a pattern, c pattern, e pattern, g pattern, i pattern, k pattern, m pattern, o pattern, q pattern, s pattern, u pattern, w pattern). Notation "'matchb' x_val 'with' '|' a '=>' b '|' c '=>' d '|' e '=>' f '|' g '=>' h '|' i '=>' j '|' k '=>' l '|' m '=>' n '|' o '=>' p '|' q '=>' r '|' s '=>' t '|' u '=>' v '|' w '=>' x '|' y '=>' z 'end'" := (bind_both x_val (fun y_val => match y_val with | a => b | c => d | e => f | g => h | i => j | k => l | m => n | o => p | q => r | s => t | u => v | w => x | y => z end)) (at level 100, a pattern, c pattern, e pattern, g pattern, i pattern, k pattern, m pattern, o pattern, q pattern, s pattern, u pattern, w pattern, y pattern). Notation "'matchb' x_val 'with' '|' a '=>' b '|' c '=>' d '|' e '=>' f '|' g '=>' h '|' i '=>' j '|' k '=>' l '|' m '=>' n '|' o '=>' p '|' q '=>' r '|' s '=>' t '|' u '=>' v '|' w '=>' x '|' y '=>' z 'end'" := (bind_both x_val (fun y_val => match y_val with | a => b | c => d | e => f | g => h | i => j | k => l | m => n | o => p | q => r | s => t | u => v | w => x | y => z end)) (at level 100, a pattern, c pattern, e pattern, g pattern, i pattern, k pattern, m pattern, o pattern, q pattern, s pattern, u pattern, w pattern, y pattern). Notation "'matchb' x_val 'with' '|' a '=>' b '|' c '=>' d '|' e '=>' f '|' g '=>' h '|' i '=>' j '|' k '=>' l '|' m '=>' n '|' o '=>' p '|' q '=>' r '|' s '=>' t '|' u '=>' v '|' w '=>' x '|' y '=>' z '|' a1 '=>' b1 'end'" := (bind_both x_val (fun y_val => match y_val with | a => b | c => d | e => f | g => h | i => j | k => l | m => n | o => p | q => r | s => t | u => v | w => x | y => z | a1 => b1 end)) (at level 100, a pattern, c pattern, e pattern, g pattern, i pattern, k pattern, m pattern, o pattern, q pattern, s pattern, u pattern, w pattern, y pattern, a1 pattern). Notation "'matchb' x_val 'with' '|' a '=>' b '|' c '=>' d '|' e '=>' f '|' g '=>' h '|' i '=>' j '|' k '=>' l '|' m '=>' n '|' o '=>' p '|' q '=>' r '|' s '=>' t '|' u '=>' v '|' w '=>' x '|' y '=>' z '|' a1 '=>' b1 '|' c1 '=>' d1 'end'" := (bind_both x_val (fun y_val => match y_val with | a => b | c => d | e => f | g => h | i => j | k => l | m => n | o => p | q => r | s => t | u => v | w => x | y => z | a1 => b1 | c1 => d1 end)) (at level 100, a pattern, c pattern, e pattern, g pattern, i pattern, k pattern, m pattern, o pattern, q pattern, s pattern, u pattern, w pattern, y pattern, a1 pattern, c1 pattern). Notation "'matchb' x_val 'with' '|' a '=>' b '|' c '=>' d '|' e '=>' f '|' g '=>' h '|' i '=>' j '|' k '=>' l '|' m '=>' n '|' o '=>' p '|' q '=>' r '|' s '=>' t '|' u '=>' v '|' w '=>' x '|' y '=>' z '|' a1 '=>' b1 '|' c1 '=>' d1 '|' e1 '=>' f1 'end'" := (bind_both x_val (fun y_val => match y_val with | a => b | c => d | e => f | g => h | i => j | k => l | m => n | o => p | q => r | s => t | u => v | w => x | y => z | a1 => b1 | c1 => d1 | e1 => f1 end)) (at level 100, a pattern, c pattern, e pattern, g pattern, i pattern, k pattern, m pattern, o pattern, q pattern, s pattern, u pattern, w pattern, y pattern, a1 pattern, c1 pattern, e1 pattern). Notation "'matchb' x_val 'with' '|' a '=>' b '|' c '=>' d '|' e '=>' f '|' g '=>' h '|' i '=>' j '|' k '=>' l '|' m '=>' n '|' o '=>' p '|' q '=>' r '|' s '=>' t '|' u '=>' v '|' w '=>' x '|' y '=>' z '|' a1 '=>' b1 '|' c1 '=>' d1 '|' e1 '=>' f1 '|' g1 '=>' h1 'end'" := (bind_both x_val (fun y_val => match y_val with | a => b | c => d | e => f | g => h | i => j | k => l | m => n | o => p | q => r | s => t | u => v | w => x | y => z | a1 => b1 | c1 => d1 | e1 => f1 | g1 => h1 end)) (at level 100, a pattern, c pattern, e pattern, g pattern, i pattern, k pattern, m pattern, o pattern, q pattern, s pattern, u pattern, w pattern, y pattern, a1 pattern, c1 pattern, e1 pattern, g1 pattern). Notation "'matchb' x_val 'with' '|' a '=>' b '|' c '=>' d '|' e '=>' f '|' g '=>' h '|' i '=>' j '|' k '=>' l '|' m '=>' n '|' o '=>' p '|' q '=>' r '|' s '=>' t '|' u '=>' v '|' w '=>' x '|' y '=>' z '|' a1 '=>' b1 '|' c1 '=>' d1 '|' e1 '=>' f1 '|' g1 '=>' h1 '|' i1 '=>' j1 'end'" := (bind_both x_val (fun y_val => match y_val with | a => b | c => d | e => f | g => h | i => j | k => l | m => n | o => p | q => r | s => t | u => v | w => x | y => z | a1 => b1 | c1 => d1 | e1 => f1 | g1 => h1 | i1 => j1 end)) (at level 100, a pattern, c pattern, e pattern, g pattern, i pattern, k pattern, m pattern, o pattern, q pattern, s pattern, u pattern, w pattern, y pattern, a1 pattern, c1 pattern, e1 pattern, g1 pattern, i1 pattern). Notation "'matchb' x_val 'with' '|' a '=>' b '|' c '=>' d '|' e '=>' f '|' g '=>' h '|' i '=>' j '|' k '=>' l '|' m '=>' n '|' o '=>' p '|' q '=>' r '|' s '=>' t '|' u '=>' v '|' w '=>' x '|' y '=>' z '|' a1 '=>' b1 '|' c1 '=>' d1 '|' e1 '=>' f1 '|' g1 '=>' h1 '|' i1 '=>' j1 '|' k1 '=>' l1 'end'" := (bind_both x_val (fun y_val => match y_val with | a => b | c => d | e => f | g => h | i => j | k => l | m => n | o => p | q => r | s => t | u => v | w => x | y => z | a1 => b1 | c1 => d1 | e1 => f1 | g1 => h1 | i1 => j1 | k1 => l1 end)) (at level 100, a pattern, c pattern, e pattern, g pattern, i pattern, k pattern, m pattern, o pattern, q pattern, s pattern, u pattern, w pattern, y pattern, a1 pattern, c1 pattern, e1 pattern, g1 pattern, i1 pattern, k1 pattern). Notation "'matchb' x_val 'with' '|' a '=>' b '|' c '=>' d '|' e '=>' f '|' g '=>' h '|' i '=>' j '|' k '=>' l '|' m '=>' n '|' o '=>' p '|' q '=>' r '|' s '=>' t '|' u '=>' v '|' w '=>' x '|' y '=>' z '|' a1 '=>' b1 '|' c1 '=>' d1 '|' e1 '=>' f1 '|' g1 '=>' h1 '|' i1 '=>' j1 '|' k1 '=>' l1 '|' m1 '=>' n1 'end'" := (bind_both x_val (fun y_val => match y_val with | a => b | c => d | e => f | g => h | i => j | k => l | m => n | o => p | q => r | s => t | u => v | w => x | y => z | a1 => b1 | c1 => d1 | e1 => f1 | g1 => h1 | i1 => j1 | k1 => l1 | m1 => n1 end)) (at level 100, a pattern, c pattern, e pattern, g pattern, i pattern, k pattern, m pattern, o pattern, q pattern, s pattern, u pattern, w pattern, y pattern, a1 pattern, c1 pattern, e1 pattern, g1 pattern, i1 pattern, k1 pattern, m1 pattern). Notation "'matchb' x_val 'with' '|' a '=>' b '|' c '=>' d '|' e '=>' f '|' g '=>' h '|' i '=>' j '|' k '=>' l '|' m '=>' n '|' o '=>' p '|' q '=>' r '|' s '=>' t '|' u '=>' v '|' w '=>' x '|' y '=>' z '|' a1 '=>' b1 '|' c1 '=>' d1 '|' e1 '=>' f1 '|' g1 '=>' h1 '|' i1 '=>' j1 '|' k1 '=>' l1 '|' m1 '=>' n1 '|' o1 '=>' p1 'end'" := (bind_both x_val (fun y_val => match y_val with | a => b | c => d | e => f | g => h | i => j | k => l | m => n | o => p | q => r | s => t | u => v | w => x | y => z | a1 => b1 | c1 => d1 | e1 => f1 | g1 => h1 | i1 => j1 | k1 => l1 | m1 => n1 | o1 => p1 end)) (at level 100, a pattern, c pattern, e pattern, g pattern, i pattern, k pattern, m pattern, o pattern, q pattern, s pattern, u pattern, w pattern, y pattern, a1 pattern, c1 pattern, e1 pattern, g1 pattern, i1 pattern, k1 pattern, m1 pattern, o1 pattern). Notation "'matchb' x_val 'with' '|' a '=>' b '|' c '=>' d '|' e '=>' f '|' g '=>' h '|' i '=>' j '|' k '=>' l '|' m '=>' n '|' o '=>' p '|' q '=>' r '|' s '=>' t '|' u '=>' v '|' w '=>' x '|' y '=>' z '|' a1 '=>' b1 '|' c1 '=>' d1 '|' e1 '=>' f1 '|' g1 '=>' h1 '|' i1 '=>' j1 '|' k1 '=>' l1 '|' m1 '=>' n1 '|' o1 '=>' p1 '|' q1 '=>' r1 'end'" := (bind_both x_val (fun y_val => match y_val with | a => b | c => d | e => f | g => h | i => j | k => l | m => n | o => p | q => r | s => t | u => v | w => x | y => z | a1 => b1 | c1 => d1 | e1 => f1 | g1 => h1 | i1 => j1 | k1 => l1 | m1 => n1 | o1 => p1 | q1 => r1 end)) (at level 100, a pattern, c pattern, e pattern, g pattern, i pattern, k pattern, m pattern, o pattern, q pattern, s pattern, u pattern, w pattern, y pattern, a1 pattern, c1 pattern, e1 pattern, g1 pattern, i1 pattern, k1 pattern, m1 pattern, o1 pattern, q1 pattern). Notation "'matchb' x_val 'with' '|' a '=>' b '|' c '=>' d '|' e '=>' f '|' g '=>' h '|' i '=>' j '|' k '=>' l '|' m '=>' n '|' o '=>' p '|' q '=>' r '|' s '=>' t '|' u '=>' v '|' w '=>' x '|' y '=>' z '|' a1 '=>' b1 '|' c1 '=>' d1 '|' e1 '=>' f1 '|' g1 '=>' h1 '|' i1 '=>' j1 '|' k1 '=>' l1 '|' m1 '=>' n1 '|' o1 '=>' p1 '|' q1 '=>' r1 '|' s1 '=>' t1 'end'" := (bind_both x_val (fun y_val => match y_val with | a => b | c => d | e => f | g => h | i => j | k => l | m => n | o => p | q => r | s => t | u => v | w => x | y => z | a1 => b1 | c1 => d1 | e1 => f1 | g1 => h1 | i1 => j1 | k1 => l1 | m1 => n1 | o1 => p1 | q1 => r1 | s1 => t1 end)) (at level 100, a pattern, c pattern, e pattern, g pattern, i pattern, k pattern, m pattern, o pattern, q pattern, s pattern, u pattern, w pattern, y pattern, a1 pattern, c1 pattern, e1 pattern, g1 pattern, i1 pattern, k1 pattern, m1 pattern, o1 pattern, q1 pattern, s1 pattern). Notation "'matchb' x_val 'with' '|' a '=>' b '|' c '=>' d '|' e '=>' f '|' g '=>' h '|' i '=>' j '|' k '=>' l '|' m '=>' n '|' o '=>' p '|' q '=>' r '|' s '=>' t '|' u '=>' v '|' w '=>' x '|' y '=>' z '|' a1 '=>' b1 '|' c1 '=>' d1 '|' e1 '=>' f1 '|' g1 '=>' h1 '|' i1 '=>' j1 '|' k1 '=>' l1 '|' m1 '=>' n1 '|' o1 '=>' p1 '|' q1 '=>' r1 '|' s1 '=>' t1 '|' u1 '=>' v1 'end'" := (bind_both x_val (fun y_val => match y_val with | a => b | c => d | e => f | g => h | i => j | k => l | m => n | o => p | q => r | s => t | u => v | w => x | y => z | a1 => b1 | c1 => d1 | e1 => f1 | g1 => h1 | i1 => j1 | k1 => l1 | m1 => n1 | o1 => p1 | q1 => r1 | s1 => t1 | u1 => v1 end)) (at level 100, a pattern, c pattern, e pattern, g pattern, i pattern, k pattern, m pattern, o pattern, q pattern, s pattern, u pattern, w pattern, y pattern, a1 pattern, c1 pattern, e1 pattern, g1 pattern, i1 pattern, k1 pattern, m1 pattern, o1 pattern, q1 pattern, s1 pattern, u1 pattern). Notation "'matchb' x_val 'with' '|' a '=>' b '|' c '=>' d '|' e '=>' f '|' g '=>' h '|' i '=>' j '|' k '=>' l '|' m '=>' n '|' o '=>' p '|' q '=>' r '|' s '=>' t '|' u '=>' v '|' w '=>' x '|' y '=>' z '|' a1 '=>' b1 '|' c1 '=>' d1 '|' e1 '=>' f1 '|' g1 '=>' h1 '|' i1 '=>' j1 '|' k1 '=>' l1 '|' m1 '=>' n1 '|' o1 '=>' p1 '|' q1 '=>' r1 '|' s1 '=>' t1 '|' u1 '=>' v1 '|' w1 '=>' x1 'end'" := (bind_both x_val (fun y_val => match y_val with | a => b | c => d | e => f | g => h | i => j | k => l | m => n | o => p | q => r | s => t | u => v | w => x | y => z | a1 => b1 | c1 => d1 | e1 => f1 | g1 => h1 | i1 => j1 | k1 => l1 | m1 => n1 | o1 => p1 | q1 => r1 | s1 => t1 | u1 => v1 | w1 => x1 end)) (at level 100, a pattern, c pattern, e pattern, g pattern, i pattern, k pattern, m pattern, o pattern, q pattern, s pattern, u pattern, w pattern, y pattern, a1 pattern, c1 pattern, e1 pattern, g1 pattern, i1 pattern, k1 pattern, m1 pattern, o1 pattern, q1 pattern, s1 pattern, u1 pattern, w1 pattern). Notation "'matchb' x_val 'with' '|' a '=>' b '|' c '=>' d '|' e '=>' f '|' g '=>' h '|' i '=>' j '|' k '=>' l '|' m '=>' n '|' o '=>' p '|' q '=>' r '|' s '=>' t '|' u '=>' v '|' w '=>' x '|' y '=>' z '|' a1 '=>' b1 '|' c1 '=>' d1 '|' e1 '=>' f1 '|' g1 '=>' h1 '|' i1 '=>' j1 '|' k1 '=>' l1 '|' m1 '=>' n1 '|' o1 '=>' p1 '|' q1 '=>' r1 '|' s1 '=>' t1 '|' u1 '=>' v1 '|' w1 '=>' x1 '|' y1 '=>' z1 'end'" := (bind_both x_val (fun y_val => match y_val with | a => b | c => d | e => f | g => h | i => j | k => l | m => n | o => p | q => r | s => t | u => v | w => x | y => z | a1 => b1 | c1 => d1 | e1 => f1 | g1 => h1 | i1 => j1 | k1 => l1 | m1 => n1 | o1 => p1 | q1 => r1 | s1 => t1 | u1 => v1 | w1 => x1 | y1 => z1 end)) (at level 100, a pattern, c pattern, e pattern, g pattern, i pattern, k pattern, m pattern, o pattern, q pattern, s pattern, u pattern, w pattern, y pattern, a1 pattern, c1 pattern, e1 pattern, g1 pattern, i1 pattern, k1 pattern, m1 pattern, o1 pattern, q1 pattern, s1 pattern, u1 pattern, w1 pattern, y1 pattern). Notation "'matchb' x_val 'with' '|' a '=>' b '|' c '=>' d '|' e '=>' f '|' g '=>' h '|' i '=>' j '|' k '=>' l '|' m '=>' n '|' o '=>' p '|' q '=>' r '|' s '=>' t '|' u '=>' v '|' w '=>' x '|' y '=>' z '|' a1 '=>' b1 '|' c1 '=>' d1 '|' e1 '=>' f1 '|' g1 '=>' h1 '|' i1 '=>' j1 '|' k1 '=>' l1 '|' m1 '=>' n1 '|' o1 '=>' p1 '|' q1 '=>' r1 '|' s1 '=>' t1 '|' u1 '=>' v1 '|' w1 '=>' x1 '|' y1 '=>' z1 '|' a2 '=>' b2 'end'" := (bind_both x_val (fun y_val => match y_val with | a => b | c => d | e => f | g => h | i => j | k => l | m => n | o => p | q => r | s => t | u => v | w => x | y => z | a1 => b1 | c1 => d1 | e1 => f1 | g1 => h1 | i1 => j1 | k1 => l1 | m1 => n1 | o1 => p1 | q1 => r1 | s1 => t1 | u1 => v1 | w1 => x1 | y1 => z1 | a2 => b2 end)) (at level 100, a pattern, c pattern, e pattern, g pattern, i pattern, k pattern, m pattern, o pattern, q pattern, s pattern, u pattern, w pattern, y pattern, a1 pattern, c1 pattern, e1 pattern, g1 pattern, i1 pattern, k1 pattern, m1 pattern, o1 pattern, q1 pattern, s1 pattern, u1 pattern, w1 pattern, y1 pattern, a2 pattern). Notation "'matchb' x_val 'with' '|' a '=>' b '|' c '=>' d '|' e '=>' f '|' g '=>' h '|' i '=>' j '|' k '=>' l '|' m '=>' n '|' o '=>' p '|' q '=>' r '|' s '=>' t '|' u '=>' v '|' w '=>' x '|' y '=>' z '|' a1 '=>' b1 '|' c1 '=>' d1 '|' e1 '=>' f1 '|' g1 '=>' h1 '|' i1 '=>' j1 '|' k1 '=>' l1 '|' m1 '=>' n1 '|' o1 '=>' p1 '|' q1 '=>' r1 '|' s1 '=>' t1 '|' u1 '=>' v1 '|' w1 '=>' x1 '|' y1 '=>' z1 '|' a2 '=>' b2 '|' c2 '=>' d2 'end'" := (bind_both x_val (fun y_val => match y_val with | a => b | c => d | e => f | g => h | i => j | k => l | m => n | o => p | q => r | s => t | u => v | w => x | y => z | a1 => b1 | c1 => d1 | e1 => f1 | g1 => h1 | i1 => j1 | k1 => l1 | m1 => n1 | o1 => p1 | q1 => r1 | s1 => t1 | u1 => v1 | w1 => x1 | y1 => z1 | a2 => b2 | c2 => d2 end)) (at level 100, a pattern, c pattern, e pattern, g pattern, i pattern, k pattern, m pattern, o pattern, q pattern, s pattern, u pattern, w pattern, y pattern, a1 pattern, c1 pattern, e1 pattern, g1 pattern, i1 pattern, k1 pattern, m1 pattern, o1 pattern, q1 pattern, s1 pattern, u1 pattern, w1 pattern, y1 pattern, a2 pattern, c2 pattern). Notation f_branch := id. Notation ControlFlow_Break_case := inr. Notation ControlFlow_Continue_case := inl. Notation f_from_residual := Result_Err. Ltac remove_duplicate_pair := normalize_fset ; repeat match goal with | |- context G [(?a :|: (?a :|: ?c))] => replace (a :|: (a :|: c)) with (a :|: a :|: c) by (now rewrite <- fsetUA) ; rewrite fsetUid end. Axiom t_Reject : choice_type. Equations repeat {A} (e : both A) (n : both uint_size) : both (nseq A (is_pure n)) := repeat e n := (eq_rect (Datatypes.length (List.repeat (solve_lift e) (Z.to_nat (unsigned (is_pure n))))) (fun n0 : nat => both (nseq_ A n0)) (bind_both e (fun _ : A => array_from_list (List.repeat (solve_lift e) (Z.to_nat (unsigned (is_pure n))))) ) (Z.to_nat (unsigned (is_pure n))) (List.repeat_length (solve_lift e) (Z.to_nat (unsigned (is_pure n))))). Fail Next Obligation. Class iterable (A B : choice_type) := {f_into_iter : both A -> both (chList B)}. Instance nseq_iterable_seq {A n} : iterable (nseq A n) A := {| f_into_iter := array_to_list |}. Program Instance range_iterable {WS} : iterable ((int WS) × (int WS)) (int WS) := {| f_into_iter := fun x => bind_both x (fun '((a, b) : int WS × int WS) => solve_lift (ret_both (List.map (fun x => repr WS (Z.of_nat x)) (List.seq (Z.to_nat (unsigned a)) (Z.to_nat (unsigned (b))-Z.to_nat (unsigned a))) : chList (int WS) ))) |}. Fail Next Obligation. Notation t_IntoIter := (chList _). Instance nseq_iterable_vec {A n} : iterable (t_Vec A n) A := {| f_into_iter := fun x => x |}. Definition t_Amount := int64. Definition impl_20__contains_key := int64. Definition f_micro_ccd := int64. Equations Build_t_Amount {f_micro_ccd : both int64} : both (t_Amount) := Build_t_Amount := bind_both f_micro_ccd (fun f_micro_ccd => solve_lift (ret_both ((f_micro_ccd) : (t_Amount)))) : both (t_Amount). Fail Next Obligation. Definition t_Timestamp := int32. Definition t_BTreeMap (A B : Type) (C : vec_typ) := int32. Definition f_slot_time := int64. Definition f_metadata := int64. Definition t_AccountAddress : choice_type := int64 ∐ int64. Definition Address_Contract_case (addr : int64) : t_AccountAddress := inl addr. Definition Address_Account_case (addr : int64) : t_AccountAddress := inr addr. Definition f_sender : t_AccountAddress := Address_Account_case 0. Notation f_into_iter_loc := fset0. Notation f_end_loc := fset0. Notation f_start_loc := fset0. Notation f_eq_loc := fset0. Equations impl__into_vec {A n} : both (nseq_ A n) -> both (t_Vec A t_Global) := impl__into_vec X := bind_both X (fun x : nseq_ A n => solve_lift (ret_both (Hacspec_Lib_Pre.array_to_list x : chList _))). Fail Next Obligation. Definition unsize {A} := @id A. Definition box_new {A} := @id A. Notation f_get_loc := fset0. Notation f_clone_loc := fset0. Notation f_accept_loc := fset0. Notation f_parameter_cursor_loc := fset0. Notation Result_Ok_case := inl. Notation Result_Err_case := inr. Definition impl__map_err {A B C : choice_type} (r : both (t_Result A B)) (f : B -> C) : both (t_Result A C) := matchb r with | inl a => ret_both (inl a : t_Result A C) | inr b => ret_both (inr (f b) : t_Result A C) end. Definition f_from {A B : choice_type} : A -> (Result_t A B) := inr. From mathcomp Require Import eqtype. From mathcomp Require Import ssrbool. Instance copy {C : _} : t_Copy C := fun x => x. Instance partial_eq {C : _} : t_PartialEq C C := fun x y => x == y. Instance serialize {C : _} : t_Serialize C. Defined. Instance clone {C : _} : t_Clone C := fun x => x. Instance is_eq {C : choice_type} : t_Eq C := {| Hacspec_Lib_Comparable.eqb := _ ; eqb_leibniz := fun x y : C => RelationClasses.symmetry (rwP eqP) |}. ================================================ FILE: hax-lib/proof-libs/coq/ssprove/src/LocationUtility.v ================================================ From mathcomp Require Import all_ssreflect all_algebra. From Coq Require Import ZArith List. From Crypt Require Import choice_type Package. Import PackageNotation. From Crypt Require Import pkg_interpreter. From extructures Require Import ord fset fmap. Require Import Hacspec_Lib_Comparable. Require Import Coq.Logic.FunctionalExtensionality. Import List.ListNotations. (*****************************************************) (* This file defines a utility functions to reason *) (* about equivalence of Locations and Signatures *) (*****************************************************) (*** Location *) From HB Require Import structures. HB.instance Definition _ := hasDecEq.Build Location (fun x y => @tag_eqP _ _ x y). (* Variables (I : eqType) (T_ : I -> eqType). *) (* Implicit Types u v : {i : I & T_ i}. *) (* HB.instance Definition _ := hasDecEq.Build {x : _ & _} (fun x y => @tag_eqP _ _ x y). *) Definition loc_eqType := pkg_core_definition_Location__canonical__eqtype_Equality. Definition location_eqb (ℓ ℓ' : Location) := andb (@eqtype.eq_op Datatypes_nat__canonical__eqtype_Equality (projT2 ℓ) (projT2 ℓ')) (@eqtype.eq_op _ (projT1 ℓ) (projT1 ℓ')). Definition location_eqbP : forall (l1 l2 : Location), @location_eqb (l1) (l2) = (@eqtype.eq_op _ (* (@eqtype.tag_eqType choice_type_eqType *) (* (fun _ : choice_type => ssrnat.nat_eqType)) *) l1 l2). Proof. intros. unfold location_eqb. unfold eqtype.eq_op. cbn. rewrite ssrnat.eqnE. unfold eqtype.tag_eq. unfold eqtype.tagged_as. unfold ssrfun.tag. unfold ssrfun.tagged. rewrite Bool.andb_comm. unfold eq_rect_r, eq_rect. set (eqtype.eq_op _ _) at 2. replace (choice_type_eq _ _) with b by reflexivity. destruct b eqn:b_eq ; subst b. - f_equal. case eqtype.eqP ; intros. + rewrite e in b_eq. rewrite <- e. simpl. reflexivity. + exfalso. apply (ssrbool.elimT eqtype.eqP) in b_eq. apply n. eapply b_eq. - reflexivity. Qed. Theorem is_true_split_or : forall a b, is_true (a || b)%bool = (is_true a \/ is_true b). Proof. intros. rewrite boolp.propeqE. symmetry. apply (ssrbool.rwP ssrbool.orP). Qed. Theorem is_true_split_and : forall a b, is_true (a && b)%bool = (is_true a /\ is_true b). Proof. intros. rewrite boolp.propeqE. symmetry. apply (ssrbool.rwP ssrbool.andP). Qed. Theorem is_true_split_or_ : forall a b, ((a || b)%bool = true) = (a = true \/ b = true). Proof. intros. rewrite boolp.propeqE. symmetry. apply (ssrbool.rwP ssrbool.orP). Qed. Theorem is_true_split_and_ : forall a b, ((a && b)%bool = true) = (a = true /\ b = true). Proof. intros. rewrite boolp.propeqE. symmetry. apply (ssrbool.rwP ssrbool.andP). Qed. (* Theorem LocsSubset : (forall {A} (L1 L2 : list A) (a : A), *) (* List.incl L1 L2 -> *) (* List.In a L1 -> *) (* List.In a L2). *) (* intros. *) (* induction L1 as [ | a0 L ] ; cbn in *. *) (* - contradiction. *) (* - destruct (List.incl_cons_inv H). *) (* destruct H0. *) (* + subst. *) (* assumption. *) (* + apply IHL ; assumption. *) (* Qed. *) Lemma location_eqb_sound : forall ℓ ℓ' : Location, is_true (location_eqb ℓ ℓ') <-> ℓ = ℓ'. Proof. intros. rewrite location_eqbP. pose (@eqtype.eqP loc_eqType). (* unfold eqtype.Equality.axiom in a. *) pose (ssrbool.elimT). pose (@eqtype.tag_eqP ). split. apply (Couplings.reflection_nonsense _ ℓ ℓ'). intros. subst. apply eqtype.eq_refl. Qed. Global Program Instance location_eqdec: EqDec (Location) := { eqb := location_eqb; eqb_leibniz := location_eqb_sound; }. Definition location_ltb : Location -> Location -> bool := (tag_leq (I:=choice_type_choice_type__canonical__Ord_Ord) (T_:=fun _ : choice_type => Datatypes_nat__canonical__Ord_Ord)). Definition location_ltb_simple : Location -> Location -> bool := fun x y => ltb (projT2 x) (projT2 y). Global Instance location_comparable : Comparable (Location) := eq_dec_lt_Comparable location_ltb. Definition le_is_ord_leq : forall s s0 : Datatypes_nat__canonical__Ord_Ord, eqtype.eq_op s s0 = false -> ltb s s0 = (s <= s0)%ord. Proof. intros s s0. unfold ltb , nat_comparable , Nat.ltb. intros e. generalize dependent s. induction s0 ; intros. * destruct s ; easy. * destruct s. reflexivity. cbn. cbn in IHs0. rewrite IHs0. reflexivity. assumption. Qed. Definition opsig_eqb (ℓ ℓ' : opsig) : bool := andb (@eqtype.eq_op Datatypes_nat__canonical__eqtype_Equality (fst ℓ) (fst ℓ')) (andb (@eqtype.eq_op _ (fst (snd ℓ)) (fst (snd ℓ'))) (@eqtype.eq_op _ (snd (snd ℓ)) (snd (snd ℓ')))). Lemma opsig_eqb_sound : forall ℓ ℓ' : opsig, is_true (opsig_eqb ℓ ℓ') <-> ℓ = ℓ'. Proof. intros. destruct ℓ as [? []] , ℓ' as [? []]. setoid_rewrite is_true_split_and. rewrite is_true_split_and. unfold fst, snd in *. transitivity (i = i0 /\ c = c1 /\ c0 = c2). { apply ZifyClasses.and_morph. symmetry. apply (ssrbool.rwP (@eqtype.eqP Datatypes_nat__canonical__eqtype_Equality i i0)). apply ZifyClasses.and_morph. symmetry. apply (ssrbool.rwP (@eqtype.eqP _ c c1)). symmetry. apply (ssrbool.rwP (@eqtype.eqP _ c0 c2)). } split ; [ intros [? []] | intros H ; inversion H ] ; subst ; easy. Qed. Global Program Instance opsig_eqdec: EqDec (opsig) := { eqb := opsig_eqb; eqb_leibniz := opsig_eqb_sound; }. (* Theorem fset_compute : forall {T : ordType}, forall l : T, forall n : list T, List.In l n <-> is_true (ssrbool.in_mem l (@ssrbool.mem _ (seq.seq_predType (Ord.eqType T)) n)). *) (* intros. *) (* apply (ssrbool.rwP (xseq.InP _ _)). *) (* Qed. *) Definition opsig_ordType := (Datatypes_prod__canonical__Ord_Ord Datatypes_nat__canonical__Ord_Ord (Datatypes_prod__canonical__Ord_Ord choice_type_choice_type__canonical__Ord_Ord choice_type_choice_type__canonical__Ord_Ord)). Definition loc_ordType : ordType := @Specif_sigT__canonical__Ord_Ord choice_type_choice_type__canonical__Ord_Ord (fun _ : choice_type => Datatypes_nat__canonical__Ord_Ord). Fixpoint incl_expand A `{EqDec A} (l1 l2 : list A) : Prop := match l1 with | nil => True | (x :: xs) => In x l2 /\ incl_expand A xs l2 end. (* Theorem in_remove_fset : forall {T : ordType} a (l : list T), List.In a l <-> List.In a (fset l). *) (* Proof. *) (* intros. *) (* do 2 rewrite fset_compute. *) (* now rewrite <- in_fset. *) (* Qed. *) (* Theorem in_split_cat : forall a (l0 l1 : list Location), List.In a (seq.cat l0 l1) <-> List.In a l0 \/ List.In a l1. *) (* Proof. *) (* split ; intros. *) (* - induction l0. *) (* + right. apply H. *) (* + destruct H. *) (* * left. left. assumption. *) (* * destruct (IHl0 H). *) (* -- left. right. assumption. *) (* -- right. assumption. *) (* - destruct H. *) (* + induction l0. *) (* * contradiction. *) (* * destruct H. *) (* -- left. assumption. *) (* -- right. *) (* apply IHl0. *) (* assumption. *) (* + induction l0. *) (* * assumption. *) (* * right. *) (* assumption. *) (* Qed. *) (* Theorem in_split_fset_cat : forall a (l0 l1 : {fset tag_ordType (I:=choice_type_ordType) (fun _ : choice_type => nat_ordType)}), List.In a (l0 :|: l1) <-> List.In a l0 \/ List.In a l1. *) (* Proof. *) (* intros. *) (* transitivity (In a (seq.cat (eqtype.val l0) (eqtype.val l1))). *) (* symmetry. *) (* apply in_remove_fset. *) (* apply in_split_cat. *) (* Qed. *) (* Theorem loc_list_incl_fsubset : forall (l0 l1 : {fset tag_ordType (I:=choice_type_ordType) (fun _ : choice_type => nat_ordType)}), is_true (fsubset l0 l1) <-> List.incl l0 l1. *) (* Proof. *) (* intros. *) (* rewrite <- (ssrbool.rwP (@fsubsetP _ l0 l1)). *) (* unfold ssrbool.sub_mem. *) (* unfold incl. *) (* assert (forall {A} (P Q : A -> Prop), (forall x, P x <-> Q x) -> (forall x, P x) <-> (forall x, Q x)). *) (* { split ; intros ; apply H ; apply H0. } *) (* apply H. clear H. *) (* intros x. cbn in *. *) (* rewrite fset_compute. *) (* rewrite fset_compute. *) (* reflexivity. *) (* Qed. *) (* Theorem opsig_list_incl_fsubset : forall (l0 l1 : _), is_true (fsubset (T:=opsig_ordType) l0 l1) <-> List.incl l0 l1. *) (* Proof. *) (* intros. *) (* rewrite <- (ssrbool.rwP (@fsubsetP _ l0 l1)). *) (* unfold ssrbool.sub_mem. *) (* unfold incl. *) (* assert (forall {A} (P Q : A -> Prop), (forall x, P x <-> Q x) -> (forall x, P x) <-> (forall x, Q x)). *) (* { split ; intros ; apply H ; apply H0. } *) (* apply H. clear H. *) (* intros x. cbn in *. *) (* rewrite fset_compute. *) (* rewrite fset_compute. *) (* reflexivity. *) (* Qed. *) (* Lemma valid_injectLocations_b : *) (* forall (import : Interface) (A : choice.Choice.type) *) (* (L1 L2 : {fset tag_ordType (I:=choice_type_ordType) (fun _ : choice_type => nat_ordType)}) *) (* (v : raw_code A), *) (* List.incl L1 L2 -> ValidCode L1 import v -> ValidCode L2 import v. *) (* Proof. *) (* intros I A L1 L2 v incl. *) (* apply valid_injectLocations. *) (* apply loc_list_incl_fsubset. *) (* apply incl. *) (* Qed. *) (* Lemma valid_injectOpsig_b : *) (* forall (I1 I2 : Interface) (A : choice.Choice.type) *) (* (L : {fset tag_ordType (I:=choice_type_ordType) (fun _ : choice_type => nat_ordType)}) *) (* (v : raw_code A), *) (* List.incl I1 I2 -> ValidCode L I1 v -> ValidCode L I2 v. *) (* Proof. *) (* intros I1 I2 A L v incl. *) (* apply valid_injectMap. *) (* apply opsig_list_incl_fsubset. *) (* apply incl. *) (* Qed. *) (* Theorem loc_list_incl_remove_fset {A} `{EqDec A} : forall (l1 l2 : list Location), List.incl l1 l2 <-> List.incl (fset l1) (fset l2). *) (* Proof. *) (* intros. *) (* cbn in *. *) (* induction l1. *) (* - rewrite <- fset0E. easy. *) (* - cbn. *) (* unfold incl. *) (* cbn. *) (* split. *) (* + intros. *) (* rewrite <- in_remove_fset. *) (* rewrite <- in_remove_fset in H1. *) (* apply H0. *) (* apply H1. *) (* + intros. *) (* pose (@in_remove_fset). *) (* rewrite -> (in_remove_fset (T:=loc_ordType)). *) (* apply H0. *) (* rewrite <- (in_remove_fset (T:=loc_ordType)). *) (* apply H1. *) (* Qed. *) (* Theorem opsig_list_incl_remove_fset {A} `{EqDec A} : forall (l1 l2 : list opsig), List.incl l1 l2 <-> List.incl (fset l1) (fset l2). *) (* Proof. *) (* intros. *) (* cbn in *. *) (* induction l1. *) (* - rewrite <- fset0E. easy. *) (* - cbn. *) (* unfold incl. *) (* cbn. *) (* split. *) (* + intros. *) (* rewrite <- in_remove_fset in H1 |- *. *) (* apply H0. *) (* apply H1. *) (* + intros. *) (* rewrite -> (in_remove_fset (T:=opsig_ordType)). *) (* apply H0. *) (* rewrite <- (in_remove_fset (T:=opsig_ordType)). *) (* apply H1. *) (* Qed. *) (* Theorem list_incl_cons_iff : (forall A (a : A) l1 l2, List.incl (a :: l1) l2 <-> (List.In a l2 /\ List.incl l1 l2)). *) (* Proof. *) (* split. *) (* - pose List.incl_cons_inv. *) (* apply List.incl_cons_inv. *) (* - intros []. *) (* apply List.incl_cons ; assumption. *) (* Qed. *) (* Theorem loc_list_incl_expand {A} `{EqDec A} : forall (l1 l2 : list Location), *) (* List.incl l1 l2 <-> incl_expand _ l1 l2. *) (* Proof. *) (* induction l1. *) (* - split ; intros. *) (* reflexivity. *) (* apply incl_nil_l. *) (* - intros. *) (* rewrite list_incl_cons_iff. *) (* cbn. *) (* apply and_iff_compat_l. *) (* apply IHl1. *) (* Qed. *) (* Theorem opsig_list_incl_expand {A} `{EqDec A} : forall (l1 l2 : list opsig), *) (* List.incl l1 l2 <-> incl_expand _ l1 l2. *) (* Proof. *) (* induction l1. *) (* - split ; intros. *) (* reflexivity. *) (* apply incl_nil_l. *) (* - intros. *) (* rewrite list_incl_cons_iff. *) (* cbn. *) (* apply and_iff_compat_l. *) (* apply IHl1. *) (* Qed. *) Definition location_lebP : (tag_leq (I:=choice_type_choice_type__canonical__Ord_Ord) (T_:=fun _ : choice_type => Datatypes_nat__canonical__eqtype_Equality)) = leb. Proof. intros. do 2 (apply (@functional_extensionality Location) ; intros []). cbn. unfold tag_leq. unfold eqtype.tag_eq. unfold location_ltb. unfold tag_leq. unfold location_eqb. unfold ssrfun.tag , ssrfun.tagged , projT1 , projT2 in *. rewrite (Bool.andb_comm _ (eqtype.eq_op _ _)). destruct (eqtype.eq_op x _) eqn:x_eq_x0. 2: reflexivity. apply Couplings.reflection_nonsense in x_eq_x0. subst. rewrite eqtype.eq_refl. rewrite Bool.andb_true_l. rewrite Bool.andb_true_l. rewrite Ord.ltxx. rewrite Bool.orb_false_l. destruct (eqtype.eq_op _ _) eqn:n_eq_n0. 2: reflexivity. unfold eqtype.tagged_as in *. unfold ssrfun.tagged , projT2 in *. unfold eq_rect_r , eq_rect in *. destruct eqtype.eqP in *. 2: contradiction. cbn in n_eq_n0. rewrite <- e. rewrite ssrnat.eqnE in n_eq_n0. apply Couplings.reflection_nonsense in n_eq_n0. apply Ord.eq_leq. assumption. Qed. Lemma iff_extensionality : forall {A} (P Q : A -> Prop), (forall a, P a <-> Q a) -> ((forall a, P a) <-> (forall a, Q a)). Proof. intros. split ; intuition. Qed. Lemma iff_eq_sym : forall {A} (x y : A), (x = y) <-> (y = x). Proof. intros. split ; intuition. Qed. Definition loc_seq_has (a : Location) := seq.has (ssrbool.fun_of_rel (@eqtype.eq_op loc_eqType) a). Theorem loc_seq_has_remove_sort {A} `{EqDec A} : forall (l : list Location) (a : Location) leb, is_true (loc_seq_has a l) <-> is_true (loc_seq_has a (path.sort leb l)). Proof. intros. rewrite <- (Bool.negb_involutive (loc_seq_has a (path.sort leb l))). unfold loc_seq_has. rewrite <- seq.all_predC. rewrite path.all_sort. rewrite seq.all_predC. rewrite Bool.negb_involutive. reflexivity. Qed. (* Theorem list_in_iff_seq_has {A} `{EqDec A} : forall (l : list Location) (a : Location), *) (* is_true (loc_seq_has a l) <-> List.In a l. *) (* Proof. *) (* induction l ; intros. *) (* - split ; intros ; easy. *) (* - cbn. *) (* rewrite is_true_split_or. *) (* apply ZifyClasses.or_morph. *) (* + rewrite <- (ssrbool.rwP (@eqtype.eqP loc_eqType a0 a)). *) (* apply iff_eq_sym. *) (* + apply IHl. *) (* Qed. *) (* Theorem list_in_iff_list_in_sort {A} `{EqDec A} : forall (l : list Location) (a : Location) leb, *) (* List.In a l <-> List.In a (path.sort leb l). *) (* Proof. *) (* intros. *) (* rewrite <- (list_in_iff_seq_has (path.sort leb l)). *) (* rewrite <- loc_seq_has_remove_sort. *) (* rewrite list_in_iff_seq_has. *) (* reflexivity. *) (* Qed. *) (* Theorem list_in_sort_order_ignorant_compute {A} `{EqDec A} : forall (l : list Location) leb1 leb2 a, *) (* (List.In a (path.sort leb1 l)) <-> List.In a (path.sort leb2 l). *) (* Proof. *) (* intros. *) (* rewrite <- list_in_iff_list_in_sort. *) (* rewrite <- list_in_iff_list_in_sort. *) (* reflexivity. *) (* Qed. *) (* Theorem list_incl_sort_order_ignorant_compute {A} `{EqDec A} : forall (l1 l2 : list Location) leb1 leb2, *) (* List.incl (path.sort leb1 l1) (path.sort leb1 l2) <-> List.incl (path.sort leb2 l1) (path.sort leb2 l2). *) (* Proof. *) (* intros. *) (* apply iff_extensionality. *) (* intros a. *) (* rewrite list_in_sort_order_ignorant_compute with (leb1 := leb1) (leb2 := leb2). *) (* rewrite list_in_sort_order_ignorant_compute with (leb1 := leb1) (leb2 := leb2). *) (* reflexivity. *) (* Qed. *) (* Theorem list_incl_sort {A} `{EqDec A} : forall (l1 l2 : list Location) leb, *) (* List.incl l1 l2 <-> List.incl (path.sort leb l1) (path.sort leb l2). *) (* Proof. *) (* intros. *) (* apply iff_extensionality. *) (* intros a. *) (* rewrite <- list_in_iff_list_in_sort. *) (* rewrite <- list_in_iff_list_in_sort. *) (* reflexivity. *) (* Qed. *) Theorem choice_type_test_refl : forall x , is_true (choice_type_test x x). Proof. intros. replace (choice_type_test _ _) with (eqtype.eq_op x x) by reflexivity. apply eqtype.eq_refl. Qed. (* Theorem fset_eqEincl: forall a b : list Location, fset a = fset b <-> List.incl a b /\ List.incl b a. *) (* Proof. *) (* intros. *) (* rewrite (ssrbool.rwP (@eqtype.eqP _ (fset a) (fset b))). *) (* rewrite (@eqEfsubset _ (fset a) (fset b)). *) (* rewrite is_true_split_and. *) (* apply ZifyClasses.and_morph ; rewrite loc_list_incl_fsubset ; rewrite <- loc_list_incl_remove_fset ; reflexivity. *) (* Qed. *) Lemma path_sorted_tl : forall {T : ordType} {A} {e} {fmval : list A}, is_true (path.sorted e fmval) -> is_true (path.sorted e (tl fmval)). Proof. intros. destruct fmval. - easy. - cbn. cbn in H. destruct (fmval). + reflexivity. + cbn in H. now rewrite LocationUtility.is_true_split_and in H. Qed. Fixpoint eqb_fset_helper {T : ordType} `{EqDec T} (x : list T) (i : is_true (path.sorted Ord.lt x)) (y : list T) (j : is_true (path.sorted Ord.lt y)) : bool := match x, y return is_true (path.sorted Ord.lt x) -> is_true (path.sorted Ord.lt y) -> bool with | [], [] => fun _ _ => true | a :: xs , b :: ys => fun i j => andb (eqb a b) (eqb_fset_helper xs (path_sorted_tl (T := T) i) ys (path_sorted_tl (T := T) j)) | _, _ => fun _ _ => false end i j. Transparent eqb_fset_helper. Definition eqb_fset {T : ordType} `{EqDec T} (x y : {fset T}) : bool := match x , y with | @FSet.FSet _ fsval i, @FSet.FSet _ fsval0 i0 => eqb_fset_helper fsval i fsval0 i0 end. Transparent eqb_fset. Theorem eqb_leibniz_fset {T : ordType} `{EqDec T} : forall (x y : {fset T}), is_true (eqb_fset x y) <-> x = y. Proof. intros. split. - intros. destruct x , y. unfold eqb_fset in H0. apply pkg_composition.fsval_eq. simpl. generalize dependent fsval0. induction fsval ; intros. + destruct fsval0. * reflexivity. * discriminate H0. + destruct fsval0. * discriminate H0. * cbn in H0. rewrite is_true_split_and in H0 ; destruct H0. apply (eqb_leibniz a s) in H0. subst. f_equal. eapply IHfsval. apply H1. - intros. subst. destruct y. simpl. induction fsval. + reflexivity. + simpl. rewrite IHfsval. now rewrite eqb_refl. Qed. Instance fset_EqDec {T : ordType} `{EqDec T} : EqDec {fset T} := {| eqb := eqb_fset ; eqb_leibniz := eqb_leibniz_fset |}. ================================================ FILE: hax-lib/proof-libs/coq/ssprove/src/dune ================================================ (coq.theory (name Hacspec) ; -R flag (package coq-hacspec-ssprove) (flags -w all) (theories mathcomp elpi HB deriving ; Mathcomp extructures Equations ConCert stdpp MetaCoq Ltac2 ; ConCert ; Jasmin Crypt Mon Relational ; SSProve ) ; (libraries ) ) ; (include_subdirs qualified) ================================================ FILE: hax-lib/proof-libs/fstar/.envrc ================================================ use flake .#examples ================================================ FILE: hax-lib/proof-libs/fstar/Makefile.copy ================================================ # This is a generically useful Makefile for F* that is self-contained # # It is tempting to factor this out into multiple Makefiles but that # makes it less portable, so resist temptation, or move to a more # sophisticated build system. # # We expect FSTAR_HOME to be set to your FSTAR repo/install directory # We expect HAX_LIBS_HOME to be set to the folder containing core, rust_primitives etc. # # ROOTS contains all the top-level F* files you wish to verify # The default target `verify` verified ROOTS and its dependencies # To lax-check instead, set `OTHERFLAGS="--lax"` on the command-line # # # To make F* emacs mode use the settings in this file, you need to # add the following lines to your .emacs # # (setq-default fstar-executable "/bin/fstar.exe") # (setq-default fstar-smt-executable "/bin/z3") # # (defun my-fstar-compute-prover-args-using-make () # "Construct arguments to pass to F* by calling make." # (with-demoted-errors "Error when constructing arg string: %S" # (let* ((fname (file-name-nondirectory buffer-file-name)) # (target (concat fname "-in")) # (argstr (car (process-lines "make" "--quiet" target)))) # (split-string argstr)))) # (setq fstar-subp-prover-args #'my-fstar-compute-prover-args-using-make) # HAX_LIBS_HOME ?= $(shell git rev-parse --show-toplevel)/proof-libs/fstar FSTAR_HOME ?= $(HAX_LIBS_HOME)/../../../FStar FSTAR_BIN ?= $(shell command -v fstar.exe 1>&2 2> /dev/null && echo "fstar.exe" || echo "$(FSTAR_HOME)/bin/fstar.exe") CACHE_DIR ?= $(HAX_LIBS_HOME)/.cache HINT_DIR ?= $(HAX_LIBS_HOME)/.hints .PHONY: all verify clean all: rm -f .depend && $(MAKE) .depend $(MAKE) verify # By default, we process all the files in the current directory. Here, we # *extend* the set of relevant files with the tests. ROOTS = $(wildcard *.fst) FSTAR_INCLUDE_DIRS = $(HAX_LIBS_HOME)/rust_primitives $(HAX_LIBS_HOME)/core $(HAX_LIBS_HOME)/hax_lib FSTAR_FLAGS = --cmi \ --warn_error -331 \ --cache_checked_modules --cache_dir $(CACHE_DIR) \ --already_cached "+Prims+FStar+LowStar+C+Spec.Loops+TestLib" \ $(addprefix --include ,$(FSTAR_INCLUDE_DIRS)) FSTAR = $(FSTAR_BIN) $(FSTAR_FLAGS) $(OTHERFLAGS) .depend: $(HINT_DIR) $(CACHE_DIR) $(info $(ROOTS)) $(FSTAR) --cmi --dep full $(ROOTS) --extract '* -Prims -LowStar -FStar' > $@ include .depend $(HINT_DIR): mkdir -p $@ $(CACHE_DIR): mkdir -p $@ $(CACHE_DIR)/%.checked: | .depend $(HINT_DIR) $(CACHE_DIR) $(FSTAR) $< $(ENABLE_HINTS) --hint_file $(HINT_DIR)/$(notdir $*).hints verify: $(addsuffix .checked, $(addprefix $(CACHE_DIR)/,$(ROOTS))) # Targets for interactive mode %.fst-in: $(info $(FSTAR_FLAGS) \ $(ENABLE_HINTS) --hint_file $(HINT_DIR)/$(basename $@).fst.hints) %.fsti-in: $(info $(FSTAR_FLAGS) \ $(ENABLE_HINTS) --hint_file $(HINT_DIR)/$(basename $@).fsti.hints) # Clean targets SHELL=/usr/bin/env bash clean: rm -rf $(CACHE_DIR)/* ================================================ FILE: hax-lib/proof-libs/fstar/README.md ================================================ ## Libraries for Hax The goal of this directory is to serve as a snapshot of the current F* supporting libraries for Hax. The dependency chain is: `rust_primitives` <- `core` <- `hax_lib` # Rust Primitives The `/rust_primitives` directory contains hand-written models for Rust built-in features like machine integers and arrays. In particular, the code in this directory reconciles any type or semantic differences between Rust and F*. A number of files in this directory use the [HACL Library](https://github.com/hacl-star/hacl-star/tree/main/lib). # Core & Alloc The `/core` directory contains hand-written models for some parts of the Core and Alloc libraries of Rust. As a first goal, we would like to typecheck the code in this directory against interfaces generated from Rust Core and Alloc. As a second goal, we would like to generate the code in this directory from an annotated version of Rust Core and Alloc. # Hax Library The `/hax_lib` directory contains hand-written and generated code for the Hax library which adds new features and functionality to Rust to help programmers. For example, this library includes bounded indexes for arrays, unbounded integers etc. ================================================ FILE: hax-lib/proof-libs/fstar/core/Alloc.Alloc.fst ================================================ module Alloc.Alloc #set-options "--fuel 0 --ifuel 1 --z3rlimit 15" open FStar.Mul open Rust_primitives type t_Global = | Global : t_Global ================================================ FILE: hax-lib/proof-libs/fstar/core/Alloc.Borrow.fst ================================================ module Alloc.Borrow #set-options "--fuel 0 --ifuel 1 --z3rlimit 15" open FStar.Mul open Rust_primitives type t_Cow (v_T: Type0) = | Cow : v_T -> t_Cow v_T class t_ToOwned (v_Self: Type0) = { f_to_owned_pre:v_Self -> Type0; f_to_owned_post:v_Self -> v_Self -> Type0; f_to_owned:x0: v_Self -> Prims.Pure v_Self (f_to_owned_pre x0) (fun result -> f_to_owned_post x0 result) } [@@ FStar.Tactics.Typeclasses.tcinstance] let impl (#v_T: Type0) : t_ToOwned v_T = { f_to_owned_pre = (fun (self: v_T) -> true); f_to_owned_post = (fun (self: v_T) (out: v_T) -> true); f_to_owned = fun (self: v_T) -> self } ================================================ FILE: hax-lib/proof-libs/fstar/core/Alloc.Boxed.fst ================================================ module Alloc.Boxed #set-options "--fuel 0 --ifuel 1 --z3rlimit 15" open FStar.Mul open Rust_primitives type t_Box (v_T: Type0) = | Box : v_T -> t_Box v_T let impl__new (#v_T: Type0) (v: v_T) : v_T = v ================================================ FILE: hax-lib/proof-libs/fstar/core/Alloc.Collections.Binary_heap.fst ================================================ module Alloc.Collections.Binary_heap #set-options "--fuel 0 --ifuel 1 --z3rlimit 15" open FStar.Mul open Rust_primitives open Rust_primitives.Notations type t_BinaryHeap (v_T: Type0) (v_A: Type0) = | BinaryHeap : Alloc.Vec.t_Vec v_T v_A -> t_BinaryHeap v_T v_A let impl_10__new (#v_T: Type0) (#v_A: Type0) (#[FStar.Tactics.Typeclasses.tcresolve ()] i0: Core_models.Cmp.t_Ord v_T) (_: Prims.unit) : t_BinaryHeap v_T v_A = BinaryHeap (Alloc.Vec.Vec (Rust_primitives.Sequence.seq_empty #v_T ()) (Core_models.Marker.PhantomData <: Core_models.Marker.t_PhantomData v_A) <: Alloc.Vec.t_Vec v_T v_A) <: t_BinaryHeap v_T v_A let impl_11__len (#v_T #v_A: Type0) (#[FStar.Tactics.Typeclasses.tcresolve ()] i0: Core_models.Cmp.t_Ord v_T) (self: t_BinaryHeap v_T v_A) : usize = Alloc.Vec.impl_1__len #v_T #v_A self._0 let impl_10__push (#v_T #v_A: Type0) (#[FStar.Tactics.Typeclasses.tcresolve ()] i0: Core_models.Cmp.t_Ord v_T) (self: t_BinaryHeap v_T v_A) (v: v_T) : Prims.Pure (t_BinaryHeap v_T v_A) (requires (impl_11__len #v_T #v_A self <: usize) <. Core_models.Num.impl_usize__MAX) (fun _ -> Prims.l_True) = let self:t_BinaryHeap v_T v_A = { self with _0 = Alloc.Vec.impl_1__push #v_T #v_A self._0 v } <: t_BinaryHeap v_T v_A in self let impl_10__pop (#v_T #v_A: Type0) (#[FStar.Tactics.Typeclasses.tcresolve ()] i0: Core_models.Cmp.t_Ord v_T) (self: t_BinaryHeap v_T v_A) : Prims.Pure (t_BinaryHeap v_T v_A & Core_models.Option.t_Option v_T) Prims.l_True (ensures fun temp_0_ -> let (self_e_future: t_BinaryHeap v_T v_A), (res: Core_models.Option.t_Option v_T) = temp_0_ in ((impl_11__len #v_T #v_A self <: usize) >. mk_usize 0 <: bool) =. (Core_models.Option.impl__is_some #v_T res <: bool)) = let (max: Core_models.Option.t_Option v_T):Core_models.Option.t_Option v_T = Core_models.Option.Option_None <: Core_models.Option.t_Option v_T in let index:usize = mk_usize 0 in let (index: usize), (max: Core_models.Option.t_Option v_T) = Rust_primitives.Hax.Folds.fold_range (mk_usize 0) (impl_11__len #v_T #v_A self <: usize) (fun temp_0_ i -> let (index: usize), (max: Core_models.Option.t_Option v_T) = temp_0_ in let i:usize = i in (i >. mk_usize 0 <: bool) =. (Core_models.Option.impl__is_some #v_T max <: bool) <: bool) (index, max <: (usize & Core_models.Option.t_Option v_T)) (fun temp_0_ i -> let (index: usize), (max: Core_models.Option.t_Option v_T) = temp_0_ in let i:usize = i in if Core_models.Option.impl__is_none_or #v_T #(v_T -> bool) max (fun max -> let max:v_T = max in Core_models.Cmp.f_gt #v_T #v_T #FStar.Tactics.Typeclasses.solve (self._0.[ i ] <: v_T) max <: bool) <: bool then let max:Core_models.Option.t_Option v_T = Core_models.Option.Option_Some self._0.[ i ] <: Core_models.Option.t_Option v_T in let index:usize = i in index, max <: (usize & Core_models.Option.t_Option v_T) else index, max <: (usize & Core_models.Option.t_Option v_T)) in let (self: t_BinaryHeap v_T v_A), (hax_temp_output: Core_models.Option.t_Option v_T) = if Core_models.Option.impl__is_some #v_T max then let (tmp0: Alloc.Vec.t_Vec v_T v_A), (out: v_T) = Alloc.Vec.impl_1__remove #v_T #v_A self._0 index in let self:t_BinaryHeap v_T v_A = { self with _0 = tmp0 } <: t_BinaryHeap v_T v_A in self, (Core_models.Option.Option_Some out <: Core_models.Option.t_Option v_T) <: (t_BinaryHeap v_T v_A & Core_models.Option.t_Option v_T) else self, (Core_models.Option.Option_None <: Core_models.Option.t_Option v_T) <: (t_BinaryHeap v_T v_A & Core_models.Option.t_Option v_T) in self, hax_temp_output <: (t_BinaryHeap v_T v_A & Core_models.Option.t_Option v_T) let impl_11__peek (#v_T #v_A: Type0) (#[FStar.Tactics.Typeclasses.tcresolve ()] i0: Core_models.Cmp.t_Ord v_T) (self: t_BinaryHeap v_T v_A) : Prims.Pure (Core_models.Option.t_Option v_T) Prims.l_True (ensures fun res -> let res:Core_models.Option.t_Option v_T = res in ((impl_11__len #v_T #v_A self <: usize) >. mk_usize 0 <: bool) =. (Core_models.Option.impl__is_some #v_T res <: bool)) = let (max: Core_models.Option.t_Option v_T):Core_models.Option.t_Option v_T = Core_models.Option.Option_None <: Core_models.Option.t_Option v_T in let max:Core_models.Option.t_Option v_T = Rust_primitives.Hax.Folds.fold_range (mk_usize 0) (impl_11__len #v_T #v_A self <: usize) (fun max i -> let max:Core_models.Option.t_Option v_T = max in let i:usize = i in (i >. mk_usize 0 <: bool) =. (Core_models.Option.impl__is_some #v_T max <: bool) <: bool) max (fun max i -> let max:Core_models.Option.t_Option v_T = max in let i:usize = i in if Core_models.Option.impl__is_none_or #v_T #(v_T -> bool) max (fun max -> let max:v_T = max in Core_models.Cmp.f_gt #v_T #v_T #FStar.Tactics.Typeclasses.solve (self._0.[ i ] <: v_T) max <: bool) <: bool then let max:Core_models.Option.t_Option v_T = Core_models.Option.Option_Some self._0.[ i ] <: Core_models.Option.t_Option v_T in max else max) in max assume val lemma_peek_pop: #t:Type -> (#a: Type) -> (#i: Core_models.Cmp.t_Ord t) -> h: t_BinaryHeap t a -> Lemma (impl_11__peek h == snd (impl_10__pop h)) [SMTPat (impl_11__peek #t #a h)] ================================================ FILE: hax-lib/proof-libs/fstar/core/Alloc.Collections.Btree.Set.fsti ================================================ module Alloc.Collections.Btree.Set #set-options "--fuel 0 --ifuel 1 --z3rlimit 15" open FStar.Mul open Rust_primitives val t_BTreeSet (v_T v_U: Type0) : eqtype val impl_11__new: #v_T: Type0 -> #v_U: Type0 -> Prims.unit -> Prims.Pure (t_BTreeSet v_T v_U) Prims.l_True (fun _ -> Prims.l_True) ================================================ FILE: hax-lib/proof-libs/fstar/core/Alloc.Collections.Vec_deque.fsti ================================================ module Alloc.Collections.Vec_deque #set-options "--fuel 0 --ifuel 1 --z3rlimit 15" open FStar.Mul open Rust_primitives type t_VecDeque (v_T: Type0) (v_A: Type0) = | VecDeque : Rust_primitives.Sequence.t_Seq v_T -> Core_models.Marker.t_PhantomData v_A -> t_VecDeque v_T v_A val impl_5__push_back (#v_T #v_A: Type0) (self: t_VecDeque v_T v_A) (x: v_T) : Prims.Pure (t_VecDeque v_T v_A) Prims.l_True (fun _ -> Prims.l_True) val impl_5__len (#v_T #v_A: Type0) (self: t_VecDeque v_T v_A) : Prims.Pure usize Prims.l_True (fun _ -> Prims.l_True) val impl_5__pop_front (#v_T #v_A: Type0) (self: t_VecDeque v_T v_A) : Prims.Pure (t_VecDeque v_T v_A & Core_models.Option.t_Option v_T) Prims.l_True (fun _ -> Prims.l_True) [@@ FStar.Tactics.Typeclasses.tcinstance] let impl_6 (#v_T #v_A: Type0) : Core_models.Ops.Index.t_Index (t_VecDeque v_T v_A) usize = { f_Output = v_T; f_index_pre = (fun (self: t_VecDeque v_T v_A) (i: usize) -> true); f_index_post = (fun (self: t_VecDeque v_T v_A) (i: usize) (out: v_T) -> true); f_index = fun (self: t_VecDeque v_T v_A) (i: usize) -> Rust_primitives.Sequence.seq_index #v_T self._0 i } ================================================ FILE: hax-lib/proof-libs/fstar/core/Alloc.Fmt.fst ================================================ module Alloc.Fmt #set-options "--fuel 0 --ifuel 1 --z3rlimit 15" open FStar.Mul open Rust_primitives assume val format': args: Core_models.Fmt.t_Arguments -> Alloc.String.t_String unfold let format = format' ================================================ FILE: hax-lib/proof-libs/fstar/core/Alloc.Slice.fst ================================================ module Alloc.Slice #set-options "--fuel 0 --ifuel 1 --z3rlimit 15" open FStar.Mul open Rust_primitives let impl__to_vec (#v_T: Type0) (s: t_Slice v_T) : Alloc.Vec.t_Vec v_T Alloc.Alloc.t_Global = Alloc.Vec.Vec (Rust_primitives.Sequence.seq_from_slice #v_T s) (Core_models.Marker.PhantomData <: Core_models.Marker.t_PhantomData Alloc.Alloc.t_Global) <: Alloc.Vec.t_Vec v_T Alloc.Alloc.t_Global let impl__into_vec (#v_T #v_A: Type0) (s: t_Slice v_T) : Alloc.Vec.t_Vec v_T v_A = Alloc.Vec.Vec (Rust_primitives.Sequence.seq_from_slice #v_T s) (Core_models.Marker.PhantomData <: Core_models.Marker.t_PhantomData v_A) <: Alloc.Vec.t_Vec v_T v_A assume val impl__sort_by': #v_T: Type0 -> #v_F: Type0 -> {| i0: Core_models.Ops.Function.t_Fn v_F (v_T & v_T) |} -> s: t_Slice v_T -> compare: v_F -> t_Slice v_T unfold let impl__sort_by (#v_T #v_F: Type0) (#[FStar.Tactics.Typeclasses.tcresolve ()] i0: Core_models.Ops.Function.t_Fn v_F (v_T & v_T)) = impl__sort_by' #v_T #v_F #i0 ================================================ FILE: hax-lib/proof-libs/fstar/core/Alloc.String.fst ================================================ module Alloc.String #set-options "--fuel 0 --ifuel 1 --z3rlimit 15" open FStar.Mul open Rust_primitives type t_String = | String : string -> t_String let impl_String__new (_: Prims.unit) : t_String = String "" <: t_String let impl_String__push_str (self: t_String) (other: string) : t_String = let self:t_String = String (Rust_primitives.String.str_concat self._0 other) <: t_String in self let impl_String__push (self: t_String) (c: FStar.Char.char) : t_String = let self:t_String = String (Rust_primitives.String.str_concat self._0 (Rust_primitives.String.str_of_char c <: string)) <: t_String in self let impl_String__pop (self: t_String) : (t_String & Core_models.Option.t_Option FStar.Char.char) = let l:usize = Core_models.Str.impl_str__len self._0 in let (self: t_String), (hax_temp_output: Core_models.Option.t_Option FStar.Char.char) = if l >. mk_usize 0 then let self:t_String = String (Rust_primitives.String.str_sub self._0 (mk_usize 0) (l -! mk_usize 1 <: usize)) <: t_String in self, (Core_models.Option.Option_Some (Rust_primitives.String.str_index self._0 (l -! mk_usize 1 <: usize)) <: Core_models.Option.t_Option FStar.Char.char) <: (t_String & Core_models.Option.t_Option FStar.Char.char) else self, (Core_models.Option.Option_None <: Core_models.Option.t_Option FStar.Char.char) <: (t_String & Core_models.Option.t_Option FStar.Char.char) in self, hax_temp_output <: (t_String & Core_models.Option.t_Option FStar.Char.char) ================================================ FILE: hax-lib/proof-libs/fstar/core/Alloc.Vec.Drain.fst ================================================ module Alloc.Vec.Drain #set-options "--fuel 0 --ifuel 1 --z3rlimit 15" open FStar.Mul open Rust_primitives type t_Drain (v_T: Type0) (v_A: Type0) = | Drain : Rust_primitives.Sequence.t_Seq v_T -> Core_models.Marker.t_PhantomData v_A -> t_Drain v_T v_A [@@ FStar.Tactics.Typeclasses.tcinstance] let impl (#v_T #v_A: Type0) : Core_models.Iter.Traits.Iterator.t_Iterator (t_Drain v_T v_A) = { f_Item = v_T; f_next_pre = (fun (self: t_Drain v_T v_A) -> true); f_next_post = (fun (self: t_Drain v_T v_A) (out: (t_Drain v_T v_A & Core_models.Option.t_Option v_T)) -> true); f_next = fun (self: t_Drain v_T v_A) -> let (self: t_Drain v_T v_A), (hax_temp_output: Core_models.Option.t_Option v_T) = if (Rust_primitives.Sequence.seq_len #v_T self._0 <: usize) =. mk_usize 0 then self, (Core_models.Option.Option_None <: Core_models.Option.t_Option v_T) <: (t_Drain v_T v_A & Core_models.Option.t_Option v_T) else let res:v_T = Rust_primitives.Sequence.seq_first #v_T self._0 in let self:t_Drain v_T v_A = { self with _0 = Rust_primitives.Sequence.seq_slice #v_T self._0 (mk_usize 1) (Rust_primitives.Sequence.seq_len #v_T self._0 <: usize) } <: t_Drain v_T v_A in self, (Core_models.Option.Option_Some res <: Core_models.Option.t_Option v_T) <: (t_Drain v_T v_A & Core_models.Option.t_Option v_T) in self, hax_temp_output <: (t_Drain v_T v_A & Core_models.Option.t_Option v_T) } ================================================ FILE: hax-lib/proof-libs/fstar/core/Alloc.Vec.Into_iter.fsti ================================================ module Alloc.Vec.Into_iter val t_IntoIter (t: Type0) (_: unit): Type0 [@@ FStar.Tactics.Typeclasses.tcinstance] val into_iter_into_iterator (t: Type0): Core_models.Iter.Traits.Collect.t_IntoIterator (t_IntoIter t Alloc.Alloc.t_Global) ================================================ FILE: hax-lib/proof-libs/fstar/core/Alloc.Vec.fst ================================================ module Alloc.Vec #set-options "--fuel 0 --ifuel 1 --z3rlimit 15" open FStar.Mul open Rust_primitives type t_Vec (v_T: Type0) (v_A: Type0) = | Vec : Rust_primitives.Sequence.t_Seq v_T -> Core_models.Marker.t_PhantomData v_A -> t_Vec v_T v_A let from_elem (#v_T: Type0) (#[FStar.Tactics.Typeclasses.tcresolve ()] i0: Core_models.Clone.t_Clone v_T) (item: v_T) (len: usize) : t_Vec v_T Alloc.Alloc.t_Global = Vec (Rust_primitives.Sequence.seq_create #v_T item len) (Core_models.Marker.PhantomData <: Core_models.Marker.t_PhantomData Alloc.Alloc.t_Global) <: t_Vec v_T Alloc.Alloc.t_Global let impl__new (#v_T: Type0) (_: Prims.unit) : t_Vec v_T Alloc.Alloc.t_Global = Vec (Rust_primitives.Sequence.seq_empty #v_T ()) (Core_models.Marker.PhantomData <: Core_models.Marker.t_PhantomData Alloc.Alloc.t_Global) <: t_Vec v_T Alloc.Alloc.t_Global let impl__with_capacity (#v_T: Type0) (e_c: usize) : t_Vec v_T Alloc.Alloc.t_Global = impl__new #v_T () let impl_1__len (#v_T #v_A: Type0) (self: t_Vec v_T v_A) : usize = Rust_primitives.Sequence.seq_len #v_T self._0 let impl_1__pop (#v_T #v_A: Type0) (self: t_Vec v_T v_A) : (t_Vec v_T v_A & Core_models.Option.t_Option v_T) = let (self: t_Vec v_T v_A), (hax_temp_output: Core_models.Option.t_Option v_T) = if (Rust_primitives.Sequence.seq_len #v_T self._0 <: usize) >. mk_usize 0 then let last:v_T = Rust_primitives.Sequence.seq_last #v_T self._0 in let self:t_Vec v_T v_A = { self with _0 = Rust_primitives.Sequence.seq_slice #v_T self._0 (mk_usize 0) ((Rust_primitives.Sequence.seq_len #v_T self._0 <: usize) -! mk_usize 1 <: usize) } <: t_Vec v_T v_A in self, (Core_models.Option.Option_Some last <: Core_models.Option.t_Option v_T) <: (t_Vec v_T v_A & Core_models.Option.t_Option v_T) else self, (Core_models.Option.Option_None <: Core_models.Option.t_Option v_T) <: (t_Vec v_T v_A & Core_models.Option.t_Option v_T) in self, hax_temp_output <: (t_Vec v_T v_A & Core_models.Option.t_Option v_T) let impl_1__is_empty (#v_T #v_A: Type0) (self: t_Vec v_T v_A) : bool = (Rust_primitives.Sequence.seq_len #v_T self._0 <: usize) =. mk_usize 0 let impl_1__as_slice (#v_T #v_A: Type0) (self: t_Vec v_T v_A) : t_Slice v_T = Rust_primitives.Sequence.seq_to_slice #v_T self._0 assume val impl_1__truncate': #v_T: Type0 -> #v_A: Type0 -> self: t_Vec v_T v_A -> n: usize -> t_Vec v_T v_A unfold let impl_1__truncate (#v_T #v_A: Type0) = impl_1__truncate' #v_T #v_A assume val impl_1__swap_remove': #v_T: Type0 -> #v_A: Type0 -> self: t_Vec v_T v_A -> n: usize -> (t_Vec v_T v_A & v_T) unfold let impl_1__swap_remove (#v_T #v_A: Type0) = impl_1__swap_remove' #v_T #v_A assume val impl_1__remove': #v_T: Type0 -> #v_A: Type0 -> self: t_Vec v_T v_A -> index: usize -> (t_Vec v_T v_A & v_T) unfold let impl_1__remove (#v_T #v_A: Type0) = impl_1__remove' #v_T #v_A assume val impl_1__clear': #v_T: Type0 -> #v_A: Type0 -> self: t_Vec v_T v_A -> t_Vec v_T v_A unfold let impl_1__clear (#v_T #v_A: Type0) = impl_1__clear' #v_T #v_A assume val impl_1__drain': #v_T: Type0 -> #v_A: Type0 -> #v_R: Type0 -> self: t_Vec v_T v_A -> e_range: v_R -> (t_Vec v_T v_A & Alloc.Vec.Drain.t_Drain v_T v_A) unfold let impl_1__drain (#v_T #v_A #v_R: Type0) = impl_1__drain' #v_T #v_A #v_R let impl_1__push (#v_T #v_A: Type0) (self: t_Vec v_T v_A) (x: v_T) : Prims.Pure (t_Vec v_T v_A) (requires (Rust_primitives.Sequence.seq_len #v_T self._0 <: usize) <. Core_models.Num.impl_usize__MAX) (fun _ -> Prims.l_True) = let self:t_Vec v_T v_A = { self with _0 = Rust_primitives.Sequence.seq_concat #v_T self._0 (Rust_primitives.Sequence.seq_one #v_T x <: Rust_primitives.Sequence.t_Seq v_T) } <: t_Vec v_T v_A in self let impl_1__insert (#v_T #v_A: Type0) (self: t_Vec v_T v_A) (index: usize) (element: v_T) : Prims.Pure (t_Vec v_T v_A) (requires index <=. (Rust_primitives.Sequence.seq_len #v_T self._0 <: usize) && (Rust_primitives.Sequence.seq_len #v_T self._0 <: usize) <. Core_models.Num.impl_usize__MAX) (fun _ -> Prims.l_True) = let left:Rust_primitives.Sequence.t_Seq v_T = Rust_primitives.Sequence.seq_slice #v_T self._0 (mk_usize 0) index in let right:Rust_primitives.Sequence.t_Seq v_T = Rust_primitives.Sequence.seq_slice #v_T self._0 index (Rust_primitives.Sequence.seq_len #v_T self._0 <: usize) in let left:Rust_primitives.Sequence.t_Seq v_T = Rust_primitives.Sequence.seq_concat #v_T left (Rust_primitives.Sequence.seq_one #v_T element <: Rust_primitives.Sequence.t_Seq v_T) in let left:Rust_primitives.Sequence.t_Seq v_T = Rust_primitives.Sequence.seq_concat #v_T left right in let self:t_Vec v_T v_A = { self with _0 = left } <: t_Vec v_T v_A in self assume val impl_1__resize': #v_T: Type0 -> #v_A: Type0 -> self: t_Vec v_T v_A -> new_size: usize -> value: v_T -> Prims.Pure (t_Vec v_T v_A) Prims.l_True (ensures fun self_e_future -> let self_e_future:t_Vec v_T v_A = self_e_future in (impl_1__len #v_T #v_A self_e_future <: usize) =. new_size) unfold let impl_1__resize (#v_T #v_A: Type0) = impl_1__resize' #v_T #v_A let impl_1__append (#v_T #v_A: Type0) (self other: t_Vec v_T v_A) : Prims.Pure (t_Vec v_T v_A & t_Vec v_T v_A) (requires ((Rust_primitives.Hax.Int.from_machine (impl_1__len #v_T #v_A self <: usize) <: Hax_lib.Int.t_Int) + (Rust_primitives.Hax.Int.from_machine (impl_1__len #v_T #v_A other <: usize) <: Hax_lib.Int.t_Int) <: Hax_lib.Int.t_Int) <= (Rust_primitives.Hax.Int.from_machine Core_models.Num.impl_usize__MAX <: Hax_lib.Int.t_Int)) (fun _ -> Prims.l_True) = let self:t_Vec v_T v_A = { self with _0 = Rust_primitives.Sequence.seq_concat #v_T self._0 other._0 } <: t_Vec v_T v_A in let other:t_Vec v_T v_A = { other with _0 = Rust_primitives.Sequence.seq_empty #v_T () } <: t_Vec v_T v_A in self, other <: (t_Vec v_T v_A & t_Vec v_T v_A) let impl_2__extend_from_slice (#v_T #v_A: Type0) (s: t_Vec v_T v_A) (other: t_Slice v_T) : Prims.Pure (t_Vec v_T v_A) (requires ((Rust_primitives.Hax.Int.from_machine (Rust_primitives.Sequence.seq_len #v_T s._0 <: usize) <: Hax_lib.Int.t_Int) + (Rust_primitives.Hax.Int.from_machine (Core_models.Slice.impl__len #v_T other <: usize) <: Hax_lib.Int.t_Int) <: Hax_lib.Int.t_Int) <= (Rust_primitives.Hax.Int.from_machine Core_models.Num.impl_usize__MAX <: Hax_lib.Int.t_Int)) (fun _ -> Prims.l_True) = let s:t_Vec v_T v_A = { s with _0 = Rust_primitives.Sequence.seq_concat #v_T s._0 (Rust_primitives.Sequence.seq_from_slice #v_T other <: Rust_primitives.Sequence.t_Seq v_T) } <: t_Vec v_T v_A in s [@@ FStar.Tactics.Typeclasses.tcinstance] let impl_3 (#v_T #v_A: Type0) : Core_models.Ops.Index.t_Index (t_Vec v_T v_A) usize = { f_Output = v_T; f_index_pre = (fun (self_: t_Vec v_T v_A) (i: usize) -> i <. (impl_1__len #v_T #v_A self_ <: usize)); f_index_post = (fun (self: t_Vec v_T v_A) (i: usize) (out: v_T) -> true); f_index = fun (self: t_Vec v_T v_A) (i: usize) -> Rust_primitives.Sequence.seq_index #v_T self._0 i } [@@ FStar.Tactics.Typeclasses.tcinstance] let impl_4 (#v_T #v_A: Type0) : Core_models.Ops.Deref.t_Deref (t_Vec v_T v_A) = { f_Target = t_Slice v_T; f_deref_pre = (fun (self: t_Vec v_T v_A) -> true); f_deref_post = (fun (self: t_Vec v_T v_A) (out: t_Slice v_T) -> true); f_deref = fun (self: t_Vec v_T v_A) -> impl_1__as_slice #v_T #v_A self } [@@ FStar.Tactics.Typeclasses.tcinstance] assume val impl_5': #v_T: Type0 -> Core_models.Iter.Traits.Collect.t_FromIterator (t_Vec v_T Alloc.Alloc.t_Global) v_T unfold let impl_5 (#v_T: Type0) = impl_5' #v_T ================================================ FILE: hax-lib/proof-libs/fstar/core/Core_models.Array.Iter.fst ================================================ module Core_models.Array.Iter #set-options "--fuel 0 --ifuel 1 --z3rlimit 15" open FStar.Mul open Rust_primitives type t_IntoIter (v_T: Type0) (v_N: usize) = | IntoIter : Rust_primitives.Sequence.t_Seq v_T -> t_IntoIter v_T v_N [@@ FStar.Tactics.Typeclasses.tcinstance] let impl (#v_T: Type0) (v_N: usize) : Core_models.Iter.Traits.Iterator.t_Iterator (t_IntoIter v_T v_N) = { f_Item = v_T; f_next_pre = (fun (self: t_IntoIter v_T v_N) -> true); f_next_post = (fun (self: t_IntoIter v_T v_N) (out: (t_IntoIter v_T v_N & Core_models.Option.t_Option v_T)) -> true); f_next = fun (self: t_IntoIter v_T v_N) -> let (self: t_IntoIter v_T v_N), (hax_temp_output: Core_models.Option.t_Option v_T) = if (Rust_primitives.Sequence.seq_len #v_T self._0 <: usize) =. mk_usize 0 then self, (Core_models.Option.Option_None <: Core_models.Option.t_Option v_T) <: (t_IntoIter v_T v_N & Core_models.Option.t_Option v_T) else let res:v_T = Rust_primitives.Sequence.seq_first #v_T self._0 in let self:t_IntoIter v_T v_N = { self with _0 = Rust_primitives.Sequence.seq_slice #v_T self._0 (mk_usize 1) (Rust_primitives.Sequence.seq_len #v_T self._0 <: usize) } <: t_IntoIter v_T v_N in self, (Core_models.Option.Option_Some res <: Core_models.Option.t_Option v_T) <: (t_IntoIter v_T v_N & Core_models.Option.t_Option v_T) in self, hax_temp_output <: (t_IntoIter v_T v_N & Core_models.Option.t_Option v_T) } ================================================ FILE: hax-lib/proof-libs/fstar/core/Core_models.Array.fst ================================================ module Core_models.Array #set-options "--fuel 0 --ifuel 1 --z3rlimit 15" open FStar.Mul open Rust_primitives type t_TryFromSliceError = | TryFromSliceError : t_TryFromSliceError let impl_23__map (#v_T: Type0) (v_N: usize) (#v_F #v_U: Type0) (#[FStar.Tactics.Typeclasses.tcresolve ()] i0: Core_models.Ops.Function.t_FnOnce v_F v_T) (#_: unit{i0.Core_models.Ops.Function.f_Output == v_U}) (s: t_Array v_T v_N) (f: (v_T -> v_U)) : t_Array v_U v_N = Rust_primitives.Slice.array_map #v_T #v_U v_N #(v_T -> v_U) s f let impl_23__as_slice (#v_T: Type0) (v_N: usize) (s: t_Array v_T v_N) : t_Slice v_T = Rust_primitives.Slice.array_as_slice #v_T v_N s let from_fn (#v_T: Type0) (v_N: usize) (#v_F: Type0) (#[FStar.Tactics.Typeclasses.tcresolve ()] i0: Core_models.Ops.Function.t_FnOnce v_F usize) (#_: unit{i0.Core_models.Ops.Function.f_Output == v_T}) (f: (usize -> v_T)) : t_Array v_T v_N = Rust_primitives.Slice.array_from_fn #v_T v_N #(usize -> v_T) f [@@ FStar.Tactics.Typeclasses.tcinstance] let impl_24 (#v_T: Type0) (v_N: usize) : Core_models.Iter.Traits.Collect.t_IntoIterator (t_Array v_T v_N) = { f_IntoIter = Core_models.Array.Iter.t_IntoIter v_T v_N; f_into_iter_pre = (fun (self: t_Array v_T v_N) -> true); f_into_iter_post = (fun (self: t_Array v_T v_N) (out: Core_models.Array.Iter.t_IntoIter v_T v_N) -> true); f_into_iter = fun (self: t_Array v_T v_N) -> Core_models.Array.Iter.IntoIter (Rust_primitives.Sequence.seq_from_array #v_T v_N self) <: Core_models.Array.Iter.t_IntoIter v_T v_N } [@@ FStar.Tactics.Typeclasses.tcinstance] let impl_25 (#v_T: Type0) (v_N: usize) : Core_models.Ops.Index.t_Index (t_Array v_T v_N) usize = { f_Output = v_T; f_index_pre = (fun (self_: t_Array v_T v_N) (i: usize) -> i <. (Core_models.Slice.impl__len #v_T (self_ <: t_Slice v_T) <: usize)); f_index_post = (fun (self: t_Array v_T v_N) (i: usize) (out: v_T) -> true); f_index = fun (self: t_Array v_T v_N) (i: usize) -> Rust_primitives.Slice.array_index #v_T v_N self i } [@@ FStar.Tactics.Typeclasses.tcinstance] let impl_26 (#v_T: Type0) (v_N: usize) : Core_models.Ops.Index.t_Index (t_Array v_T v_N) (Core_models.Ops.Range.t_Range usize) = { f_Output = t_Slice v_T; f_index_pre = (fun (self_: t_Array v_T v_N) (i: Core_models.Ops.Range.t_Range usize) -> i.Core_models.Ops.Range.f_start <=. i.Core_models.Ops.Range.f_end && i.Core_models.Ops.Range.f_end <=. (Core_models.Slice.impl__len #v_T (self_ <: t_Slice v_T) <: usize)); f_index_post = (fun (self: t_Array v_T v_N) (i: Core_models.Ops.Range.t_Range usize) (out: t_Slice v_T) -> true ); f_index = fun (self: t_Array v_T v_N) (i: Core_models.Ops.Range.t_Range usize) -> Rust_primitives.Slice.array_slice #v_T v_N self i.Core_models.Ops.Range.f_start i.Core_models.Ops.Range.f_end } [@@ FStar.Tactics.Typeclasses.tcinstance] let impl_27 (#v_T: Type0) (v_N: usize) : Core_models.Ops.Index.t_Index (t_Array v_T v_N) (Core_models.Ops.Range.t_RangeTo usize) = { f_Output = t_Slice v_T; f_index_pre = (fun (self_: t_Array v_T v_N) (i: Core_models.Ops.Range.t_RangeTo usize) -> i.Core_models.Ops.Range.f_end <=. (Core_models.Slice.impl__len #v_T (self_ <: t_Slice v_T) <: usize)); f_index_post = (fun (self: t_Array v_T v_N) (i: Core_models.Ops.Range.t_RangeTo usize) (out: t_Slice v_T) -> true); f_index = fun (self: t_Array v_T v_N) (i: Core_models.Ops.Range.t_RangeTo usize) -> Rust_primitives.Slice.array_slice #v_T v_N self (mk_usize 0) i.Core_models.Ops.Range.f_end } [@@ FStar.Tactics.Typeclasses.tcinstance] let impl_28 (#v_T: Type0) (v_N: usize) : Core_models.Ops.Index.t_Index (t_Array v_T v_N) (Core_models.Ops.Range.t_RangeFrom usize) = { f_Output = t_Slice v_T; f_index_pre = (fun (self_: t_Array v_T v_N) (i: Core_models.Ops.Range.t_RangeFrom usize) -> i.Core_models.Ops.Range.f_start <=. (Core_models.Slice.impl__len #v_T (self_ <: t_Slice v_T) <: usize)); f_index_post = (fun (self: t_Array v_T v_N) (i: Core_models.Ops.Range.t_RangeFrom usize) (out: t_Slice v_T) -> true); f_index = fun (self: t_Array v_T v_N) (i: Core_models.Ops.Range.t_RangeFrom usize) -> Rust_primitives.Slice.array_slice #v_T v_N self i.Core_models.Ops.Range.f_start v_N } [@@ FStar.Tactics.Typeclasses.tcinstance] let impl_29 (#v_T: Type0) (v_N: usize) : Core_models.Ops.Index.t_Index (t_Array v_T v_N) Core_models.Ops.Range.t_RangeFull = { f_Output = t_Slice v_T; f_index_pre = (fun (self: t_Array v_T v_N) (i: Core_models.Ops.Range.t_RangeFull) -> true); f_index_post = (fun (self: t_Array v_T v_N) (i: Core_models.Ops.Range.t_RangeFull) (out: t_Slice v_T) -> true); f_index = fun (self: t_Array v_T v_N) (i: Core_models.Ops.Range.t_RangeFull) -> Rust_primitives.Slice.array_slice #v_T v_N self (mk_usize 0) v_N } ================================================ FILE: hax-lib/proof-libs/fstar/core/Core_models.Borrow.fsti ================================================ module Core_models.Borrow #set-options "--fuel 0 --ifuel 1 --z3rlimit 15" open FStar.Mul open Rust_primitives class t_Borrow (v_Self: Type0) (v_Borrowed: Type0) = { f_borrow_pre:v_Self -> Type0; f_borrow_post:v_Self -> v_Borrowed -> Type0; f_borrow:x0: v_Self -> Prims.Pure v_Borrowed (f_borrow_pre x0) (fun result -> f_borrow_post x0 result) } ================================================ FILE: hax-lib/proof-libs/fstar/core/Core_models.Bundle.fst ================================================ module Core_models.Bundle #set-options "--fuel 0 --ifuel 1 --z3rlimit 15" open FStar.Mul open Rust_primitives type t_Option (v_T: Type0) = | Option_Some : v_T -> t_Option v_T | Option_None : t_Option v_T let impl__is_some_and (#v_T #v_F: Type0) (#[FStar.Tactics.Typeclasses.tcresolve ()] i0: Core_models.Ops.Function.t_FnOnce v_F v_T) (#_: unit{i0.Core_models.Ops.Function.f_Output == bool}) (self: t_Option v_T) (f: v_F) : bool = match self <: t_Option v_T with | Option_None -> false | Option_Some x -> Core_models.Ops.Function.f_call_once #v_F #v_T #FStar.Tactics.Typeclasses.solve f x let impl__is_none_or (#v_T #v_F: Type0) (#[FStar.Tactics.Typeclasses.tcresolve ()] i0: Core_models.Ops.Function.t_FnOnce v_F v_T) (#_: unit{i0.Core_models.Ops.Function.f_Output == bool}) (self: t_Option v_T) (f: v_F) : bool = match self <: t_Option v_T with | Option_None -> true | Option_Some x -> Core_models.Ops.Function.f_call_once #v_F #v_T #FStar.Tactics.Typeclasses.solve f x let impl__as_ref (#v_T: Type0) (self: t_Option v_T) : t_Option v_T = match self <: t_Option v_T with | Option_Some x -> Option_Some x <: t_Option v_T | Option_None -> Option_None <: t_Option v_T let impl__unwrap_or (#v_T: Type0) (self: t_Option v_T) (v_default: v_T) : v_T = match self <: t_Option v_T with | Option_Some x -> x | Option_None -> v_default let impl__unwrap_or_else (#v_T #v_F: Type0) (#[FStar.Tactics.Typeclasses.tcresolve ()] i0: Core_models.Ops.Function.t_FnOnce v_F Prims.unit) (#_: unit{i0.Core_models.Ops.Function.f_Output == v_T}) (self: t_Option v_T) (f: v_F) : v_T = match self <: t_Option v_T with | Option_Some x -> x | Option_None -> Core_models.Ops.Function.f_call_once #v_F #Prims.unit #FStar.Tactics.Typeclasses.solve f (() <: Prims.unit) let impl__unwrap_or_default (#v_T: Type0) (#[FStar.Tactics.Typeclasses.tcresolve ()] i0: Core_models.Default.t_Default v_T) (self: t_Option v_T) : v_T = match self <: t_Option v_T with | Option_Some x -> x | Option_None -> Core_models.Default.f_default #v_T #FStar.Tactics.Typeclasses.solve () let impl__map (#v_T #v_U #v_F: Type0) (#[FStar.Tactics.Typeclasses.tcresolve ()] i0: Core_models.Ops.Function.t_FnOnce v_F v_T) (#_: unit{i0.Core_models.Ops.Function.f_Output == v_U}) (self: t_Option v_T) (f: v_F) : t_Option v_U = match self <: t_Option v_T with | Option_Some x -> Option_Some (Core_models.Ops.Function.f_call_once #v_F #v_T #FStar.Tactics.Typeclasses.solve f x) <: t_Option v_U | Option_None -> Option_None <: t_Option v_U let impl__map_or (#v_T #v_U #v_F: Type0) (#[FStar.Tactics.Typeclasses.tcresolve ()] i0: Core_models.Ops.Function.t_FnOnce v_F v_T) (#_: unit{i0.Core_models.Ops.Function.f_Output == v_U}) (self: t_Option v_T) (v_default: v_U) (f: v_F) : v_U = match self <: t_Option v_T with | Option_Some t -> Core_models.Ops.Function.f_call_once #v_F #v_T #FStar.Tactics.Typeclasses.solve f t | Option_None -> v_default let impl__map_or_else (#v_T #v_U #v_D #v_F: Type0) (#[FStar.Tactics.Typeclasses.tcresolve ()] i0: Core_models.Ops.Function.t_FnOnce v_F v_T) (#[FStar.Tactics.Typeclasses.tcresolve ()] i1: Core_models.Ops.Function.t_FnOnce v_D Prims.unit) (#_: unit{i0.Core_models.Ops.Function.f_Output == v_U}) (#_: unit{i1.Core_models.Ops.Function.f_Output == v_U}) (self: t_Option v_T) (v_default: v_D) (f: v_F) : v_U = match self <: t_Option v_T with | Option_Some t -> Core_models.Ops.Function.f_call_once #v_F #v_T #FStar.Tactics.Typeclasses.solve f t | Option_None -> Core_models.Ops.Function.f_call_once #v_D #Prims.unit #FStar.Tactics.Typeclasses.solve v_default (() <: Prims.unit) let impl__map_or_default (#v_T #v_U #v_F: Type0) (#[FStar.Tactics.Typeclasses.tcresolve ()] i0: Core_models.Ops.Function.t_FnOnce v_F v_T) (#[FStar.Tactics.Typeclasses.tcresolve ()] i1: Core_models.Default.t_Default v_U) (#_: unit{i0.Core_models.Ops.Function.f_Output == v_U}) (self: t_Option v_T) (f: v_F) : v_U = match self <: t_Option v_T with | Option_Some t -> Core_models.Ops.Function.f_call_once #v_F #v_T #FStar.Tactics.Typeclasses.solve f t | Option_None -> Core_models.Default.f_default #v_U #FStar.Tactics.Typeclasses.solve () let impl__and_then (#v_T #v_U #v_F: Type0) (#[FStar.Tactics.Typeclasses.tcresolve ()] i0: Core_models.Ops.Function.t_FnOnce v_F v_T) (#_: unit{i0.Core_models.Ops.Function.f_Output == t_Option v_U}) (self: t_Option v_T) (f: v_F) : t_Option v_U = match self <: t_Option v_T with | Option_Some x -> Core_models.Ops.Function.f_call_once #v_F #v_T #FStar.Tactics.Typeclasses.solve f x | Option_None -> Option_None <: t_Option v_U let impl__take (#v_T: Type0) (self: t_Option v_T) : (t_Option v_T & t_Option v_T) = (Option_None <: t_Option v_T), self <: (t_Option v_T & t_Option v_T) let impl__is_some (#v_T: Type0) (self: t_Option v_T) : Prims.Pure bool Prims.l_True (ensures fun res -> let res:bool = res in b2t res ==> Option_Some? self) = match self <: t_Option v_T with | Option_Some _ -> true | _ -> false let impl__is_none (#v_T: Type0) (self: t_Option v_T) : bool = (impl__is_some #v_T self <: bool) =. false let impl__expect (#v_T: Type0) (self: t_Option v_T) (e_msg: string) : Prims.Pure v_T (requires impl__is_some #v_T self) (fun _ -> Prims.l_True) = match self <: t_Option v_T with | Option_Some v_val -> v_val | Option_None -> Core_models.Panicking.Internal.panic #v_T () let impl__unwrap (#v_T: Type0) (self: t_Option v_T) : Prims.Pure v_T (requires impl__is_some #v_T self) (fun _ -> Prims.l_True) = match self <: t_Option v_T with | Option_Some v_val -> v_val | Option_None -> Core_models.Panicking.Internal.panic #v_T () type t_Result (v_T: Type0) (v_E: Type0) = | Result_Ok : v_T -> t_Result v_T v_E | Result_Err : v_E -> t_Result v_T v_E let impl__ok_or (#v_T #v_E: Type0) (self: t_Option v_T) (err: v_E) : t_Result v_T v_E = match self <: t_Option v_T with | Option_Some v -> Result_Ok v <: t_Result v_T v_E | Option_None -> Result_Err err <: t_Result v_T v_E let impl__ok_or_else (#v_T #v_E #v_F: Type0) (#[FStar.Tactics.Typeclasses.tcresolve ()] i0: Core_models.Ops.Function.t_FnOnce v_F Prims.unit) (#_: unit{i0.Core_models.Ops.Function.f_Output == v_E}) (self: t_Option v_T) (err: v_F) : t_Result v_T v_E = match self <: t_Option v_T with | Option_Some v -> Result_Ok v <: t_Result v_T v_E | Option_None -> Result_Err (Core_models.Ops.Function.f_call_once #v_F #Prims.unit #FStar.Tactics.Typeclasses.solve err (() <: Prims.unit)) <: t_Result v_T v_E let impl__unwrap_or__from__result (#v_T #v_E: Type0) (self: t_Result v_T v_E) (v_default: v_T) : v_T = match self <: t_Result v_T v_E with | Result_Ok t -> t | Result_Err _ -> v_default let impl__map__from__result (#v_T #v_E #v_U #v_F: Type0) (#[FStar.Tactics.Typeclasses.tcresolve ()] i0: Core_models.Ops.Function.t_FnOnce v_F v_T) (#_: unit{i0.Core_models.Ops.Function.f_Output == v_U}) (self: t_Result v_T v_E) (op: v_F) : t_Result v_U v_E = match self <: t_Result v_T v_E with | Result_Ok t -> Result_Ok (Core_models.Ops.Function.f_call_once #v_F #v_T #FStar.Tactics.Typeclasses.solve op t) <: t_Result v_U v_E | Result_Err e -> Result_Err e <: t_Result v_U v_E let impl__map_or__from__result (#v_T #v_E #v_U #v_F: Type0) (#[FStar.Tactics.Typeclasses.tcresolve ()] i0: Core_models.Ops.Function.t_FnOnce v_F v_T) (#_: unit{i0.Core_models.Ops.Function.f_Output == v_U}) (self: t_Result v_T v_E) (v_default: v_U) (f: v_F) : v_U = match self <: t_Result v_T v_E with | Result_Ok t -> Core_models.Ops.Function.f_call_once #v_F #v_T #FStar.Tactics.Typeclasses.solve f t | Result_Err e_e -> v_default let impl__map_or_else__from__result (#v_T #v_E #v_U #v_D #v_F: Type0) (#[FStar.Tactics.Typeclasses.tcresolve ()] i0: Core_models.Ops.Function.t_FnOnce v_F v_T) (#[FStar.Tactics.Typeclasses.tcresolve ()] i1: Core_models.Ops.Function.t_FnOnce v_D v_E) (#_: unit{i0.Core_models.Ops.Function.f_Output == v_U}) (#_: unit{i1.Core_models.Ops.Function.f_Output == v_U}) (self: t_Result v_T v_E) (v_default: v_D) (f: v_F) : v_U = match self <: t_Result v_T v_E with | Result_Ok t -> Core_models.Ops.Function.f_call_once #v_F #v_T #FStar.Tactics.Typeclasses.solve f t | Result_Err e -> Core_models.Ops.Function.f_call_once #v_D #v_E #FStar.Tactics.Typeclasses.solve v_default e let impl__map_err (#v_T #v_E #v_F #v_O: Type0) (#[FStar.Tactics.Typeclasses.tcresolve ()] i0: Core_models.Ops.Function.t_FnOnce v_O v_E) (#_: unit{i0.Core_models.Ops.Function.f_Output == v_F}) (self: t_Result v_T v_E) (op: v_O) : t_Result v_T v_F = match self <: t_Result v_T v_E with | Result_Ok t -> Result_Ok t <: t_Result v_T v_F | Result_Err e -> Result_Err (Core_models.Ops.Function.f_call_once #v_O #v_E #FStar.Tactics.Typeclasses.solve op e) <: t_Result v_T v_F let impl__is_ok (#v_T #v_E: Type0) (self: t_Result v_T v_E) : bool = match self <: t_Result v_T v_E with | Result_Ok _ -> true | _ -> false let impl__and_then__from__result (#v_T #v_E #v_U #v_F: Type0) (#[FStar.Tactics.Typeclasses.tcresolve ()] i0: Core_models.Ops.Function.t_FnOnce v_F v_T) (#_: unit{i0.Core_models.Ops.Function.f_Output == t_Result v_U v_E}) (self: t_Result v_T v_E) (op: v_F) : t_Result v_U v_E = match self <: t_Result v_T v_E with | Result_Ok t -> Core_models.Ops.Function.f_call_once #v_F #v_T #FStar.Tactics.Typeclasses.solve op t | Result_Err e -> Result_Err e <: t_Result v_U v_E let impl__ok (#v_T #v_E: Type0) (self: t_Result v_T v_E) : t_Option v_T = match self <: t_Result v_T v_E with | Result_Ok x -> Option_Some x <: t_Option v_T | Result_Err _ -> Option_None <: t_Option v_T let impl__unwrap__from__result (#v_T #v_E: Type0) (self: t_Result v_T v_E) : Prims.Pure v_T (requires impl__is_ok #v_T #v_E self) (fun _ -> Prims.l_True) = match self <: t_Result v_T v_E with | Result_Ok t -> t | Result_Err _ -> Core_models.Panicking.Internal.panic #v_T () let impl__expect__from__result (#v_T #v_E: Type0) (self: t_Result v_T v_E) (e_msg: string) : Prims.Pure v_T (requires impl__is_ok #v_T #v_E self) (fun _ -> Prims.l_True) = match self <: t_Result v_T v_E with | Result_Ok t -> t | Result_Err _ -> Core_models.Panicking.Internal.panic #v_T () ================================================ FILE: hax-lib/proof-libs/fstar/core/Core_models.Clone.fst ================================================ module Core_models.Clone #set-options "--fuel 0 --ifuel 1 --z3rlimit 15" open FStar.Mul open Rust_primitives class t_Clone self = { f_clone_pre: self -> Type0; f_clone_post: self -> self -> Type0; f_clone: x:self -> r:self {x == r} } [@@ FStar.Tactics.Typeclasses.tcinstance] let impl (#v_T: Type0) : t_Clone v_T = { f_clone_pre = (fun (self: v_T) -> true); f_clone_post = (fun (self: v_T) (out: v_T) -> true); f_clone = fun (self: v_T) -> self } ================================================ FILE: hax-lib/proof-libs/fstar/core/Core_models.Cmp.fst ================================================ module Core_models.Cmp #set-options "--fuel 0 --ifuel 1 --z3rlimit 15" open FStar.Mul open Rust_primitives class t_PartialEq (v_Self: Type0) (v_Rhs: Type0) = { f_eq_pre:self_: v_Self -> other: v_Rhs -> pred: Type0{true ==> pred}; f_eq_post:v_Self -> v_Rhs -> bool -> Type0; f_eq:x0: v_Self -> x1: v_Rhs -> Prims.Pure bool (f_eq_pre x0 x1) (fun result -> f_eq_post x0 x1 result) } class t_Eq (v_Self: Type0) = { [@@@ FStar.Tactics.Typeclasses.no_method]_super_i0:t_PartialEq v_Self v_Self } [@@ FStar.Tactics.Typeclasses.tcinstance] let _ = fun (v_Self:Type0) {|i: t_Eq v_Self|} -> i._super_i0 type t_Ordering = | Ordering_Less : t_Ordering | Ordering_Equal : t_Ordering | Ordering_Greater : t_Ordering let anon_const_Ordering_Less__anon_const_0: isize = mk_isize (-1) let anon_const_Ordering_Equal__anon_const_0: isize = mk_isize 0 let anon_const_Ordering_Greater__anon_const_0: isize = mk_isize 1 let t_Ordering_cast_to_repr (x: t_Ordering) : isize = match x <: t_Ordering with | Ordering_Less -> anon_const_Ordering_Less__anon_const_0 | Ordering_Equal -> anon_const_Ordering_Equal__anon_const_0 | Ordering_Greater -> anon_const_Ordering_Greater__anon_const_0 class t_PartialOrd (v_Self: Type0) (v_Rhs: Type0) = { [@@@ FStar.Tactics.Typeclasses.no_method]_super_i0:t_PartialEq v_Self v_Rhs; f_partial_cmp_pre:self_: v_Self -> other: v_Rhs -> pred: Type0{true ==> pred}; f_partial_cmp_post:v_Self -> v_Rhs -> Core_models.Option.t_Option t_Ordering -> Type0; f_partial_cmp:x0: v_Self -> x1: v_Rhs -> Prims.Pure (Core_models.Option.t_Option t_Ordering) (f_partial_cmp_pre x0 x1) (fun result -> f_partial_cmp_post x0 x1 result) } [@@ FStar.Tactics.Typeclasses.tcinstance] let _ = fun (v_Self:Type0) (v_Rhs:Type0) {|i: t_PartialOrd v_Self v_Rhs|} -> i._super_i0 class t_Neq (v_Self: Type0) (v_Rhs: Type0) = { f_neq_pre:self_: v_Self -> y: v_Rhs -> pred: Type0{true ==> pred}; f_neq_post:v_Self -> v_Rhs -> bool -> Type0; f_neq:x0: v_Self -> x1: v_Rhs -> Prims.Pure bool (f_neq_pre x0 x1) (fun result -> f_neq_post x0 x1 result) } [@@ FStar.Tactics.Typeclasses.tcinstance] let impl (#v_T: Type0) (#[FStar.Tactics.Typeclasses.tcresolve ()] i0: t_PartialEq v_T v_T) : t_Neq v_T v_T = { f_neq_pre = (fun (self: v_T) (y: v_T) -> true); f_neq_post = (fun (self: v_T) (y: v_T) (out: bool) -> true); f_neq = fun (self: v_T) (y: v_T) -> (f_eq #v_T #v_T #FStar.Tactics.Typeclasses.solve self y <: bool) =. false } class t_PartialOrdDefaults (v_Self: Type0) (v_Rhs: Type0) = { f_lt_pre:{| i1: t_PartialOrd v_Self v_Rhs |} -> self_: v_Self -> y: v_Rhs -> pred: Type0{true ==> pred}; f_lt_post:{| i1: t_PartialOrd v_Self v_Rhs |} -> v_Self -> v_Rhs -> bool -> Type0; f_lt:{| i1: t_PartialOrd v_Self v_Rhs |} -> x0: v_Self -> x1: v_Rhs -> Prims.Pure bool (f_lt_pre #i1 x0 x1) (fun result -> f_lt_post #i1 x0 x1 result); f_le_pre:{| i1: t_PartialOrd v_Self v_Rhs |} -> self_: v_Self -> y: v_Rhs -> pred: Type0{true ==> pred}; f_le_post:{| i1: t_PartialOrd v_Self v_Rhs |} -> v_Self -> v_Rhs -> bool -> Type0; f_le:{| i1: t_PartialOrd v_Self v_Rhs |} -> x0: v_Self -> x1: v_Rhs -> Prims.Pure bool (f_le_pre #i1 x0 x1) (fun result -> f_le_post #i1 x0 x1 result); f_gt_pre:{| i1: t_PartialOrd v_Self v_Rhs |} -> self_: v_Self -> y: v_Rhs -> pred: Type0{true ==> pred}; f_gt_post:{| i1: t_PartialOrd v_Self v_Rhs |} -> v_Self -> v_Rhs -> bool -> Type0; f_gt:{| i1: t_PartialOrd v_Self v_Rhs |} -> x0: v_Self -> x1: v_Rhs -> Prims.Pure bool (f_gt_pre #i1 x0 x1) (fun result -> f_gt_post #i1 x0 x1 result); f_ge_pre:{| i1: t_PartialOrd v_Self v_Rhs |} -> self_: v_Self -> y: v_Rhs -> pred: Type0{true ==> pred}; f_ge_post:{| i1: t_PartialOrd v_Self v_Rhs |} -> v_Self -> v_Rhs -> bool -> Type0; f_ge:{| i1: t_PartialOrd v_Self v_Rhs |} -> x0: v_Self -> x1: v_Rhs -> Prims.Pure bool (f_ge_pre #i1 x0 x1) (fun result -> f_ge_post #i1 x0 x1 result) } [@@ FStar.Tactics.Typeclasses.tcinstance] let impl_1 (#v_T: Type0) (#[FStar.Tactics.Typeclasses.tcresolve ()] i0: t_PartialOrd v_T v_T) : t_PartialOrdDefaults v_T v_T = { f_lt_pre = (fun (#[FStar.Tactics.Typeclasses.tcresolve ()] i1: t_PartialOrd v_T v_T) (self: v_T) (y: v_T) -> true); f_lt_post = (fun (#[FStar.Tactics.Typeclasses.tcresolve ()] i1: t_PartialOrd v_T v_T) (self: v_T) (y: v_T) (out: bool) -> true); f_lt = (fun (#[FStar.Tactics.Typeclasses.tcresolve ()] i1: t_PartialOrd v_T v_T) (self: v_T) (y: v_T) -> match f_partial_cmp #v_T #v_T #FStar.Tactics.Typeclasses.solve self y <: Core_models.Option.t_Option t_Ordering with | Core_models.Option.Option_Some (Ordering_Less ) -> true | _ -> false); f_le_pre = (fun (#[FStar.Tactics.Typeclasses.tcresolve ()] i1: t_PartialOrd v_T v_T) (self: v_T) (y: v_T) -> true); f_le_post = (fun (#[FStar.Tactics.Typeclasses.tcresolve ()] i1: t_PartialOrd v_T v_T) (self: v_T) (y: v_T) (out: bool) -> true); f_le = (fun (#[FStar.Tactics.Typeclasses.tcresolve ()] i1: t_PartialOrd v_T v_T) (self: v_T) (y: v_T) -> match f_partial_cmp #v_T #v_T #FStar.Tactics.Typeclasses.solve self y <: Core_models.Option.t_Option t_Ordering with | Core_models.Option.Option_Some (Ordering_Less ) | Core_models.Option.Option_Some (Ordering_Equal ) -> true | _ -> false); f_gt_pre = (fun (#[FStar.Tactics.Typeclasses.tcresolve ()] i1: t_PartialOrd v_T v_T) (self: v_T) (y: v_T) -> true); f_gt_post = (fun (#[FStar.Tactics.Typeclasses.tcresolve ()] i1: t_PartialOrd v_T v_T) (self: v_T) (y: v_T) (out: bool) -> true); f_gt = (fun (#[FStar.Tactics.Typeclasses.tcresolve ()] i1: t_PartialOrd v_T v_T) (self: v_T) (y: v_T) -> match f_partial_cmp #v_T #v_T #FStar.Tactics.Typeclasses.solve self y <: Core_models.Option.t_Option t_Ordering with | Core_models.Option.Option_Some (Ordering_Greater ) -> true | _ -> false); f_ge_pre = (fun (#[FStar.Tactics.Typeclasses.tcresolve ()] i1: t_PartialOrd v_T v_T) (self: v_T) (y: v_T) -> true); f_ge_post = (fun (#[FStar.Tactics.Typeclasses.tcresolve ()] i1: t_PartialOrd v_T v_T) (self: v_T) (y: v_T) (out: bool) -> true); f_ge = fun (#[FStar.Tactics.Typeclasses.tcresolve ()] i1: t_PartialOrd v_T v_T) (self: v_T) (y: v_T) -> match f_partial_cmp #v_T #v_T #FStar.Tactics.Typeclasses.solve self y <: Core_models.Option.t_Option t_Ordering with | Core_models.Option.Option_Some (Ordering_Greater ) | Core_models.Option.Option_Some (Ordering_Equal ) -> true | _ -> false } class t_Ord (v_Self: Type0) = { [@@@ FStar.Tactics.Typeclasses.no_method]_super_i0:t_Eq v_Self; [@@@ FStar.Tactics.Typeclasses.no_method]_super_i1:t_PartialOrd v_Self v_Self; f_cmp_pre:self_: v_Self -> other: v_Self -> pred: Type0{true ==> pred}; f_cmp_post:v_Self -> v_Self -> t_Ordering -> Type0; f_cmp:x0: v_Self -> x1: v_Self -> Prims.Pure t_Ordering (f_cmp_pre x0 x1) (fun result -> f_cmp_post x0 x1 result) } [@@ FStar.Tactics.Typeclasses.tcinstance] let _ = fun (v_Self:Type0) {|i: t_Ord v_Self|} -> i._super_i0 [@@ FStar.Tactics.Typeclasses.tcinstance] let _ = fun (v_Self:Type0) {|i: t_Ord v_Self|} -> i._super_i1 let max (#v_T: Type0) (#[FStar.Tactics.Typeclasses.tcresolve ()] i0: t_Ord v_T) (v1 v2: v_T) : v_T = match f_cmp #v_T #FStar.Tactics.Typeclasses.solve v1 v2 <: t_Ordering with | Ordering_Greater -> v1 | _ -> v2 let min (#v_T: Type0) (#[FStar.Tactics.Typeclasses.tcresolve ()] i0: t_Ord v_T) (v1 v2: v_T) : v_T = match f_cmp #v_T #FStar.Tactics.Typeclasses.solve v1 v2 <: t_Ordering with | Ordering_Greater -> v2 | _ -> v1 type t_Reverse (v_T: Type0) = | Reverse : v_T -> t_Reverse v_T [@@ FStar.Tactics.Typeclasses.tcinstance] let impl_3 (#v_T: Type0) (#[FStar.Tactics.Typeclasses.tcresolve ()] i0: t_PartialEq v_T v_T) : t_PartialEq (t_Reverse v_T) (t_Reverse v_T) = { f_eq_pre = (fun (self: t_Reverse v_T) (other: t_Reverse v_T) -> true); f_eq_post = (fun (self: t_Reverse v_T) (other: t_Reverse v_T) (out: bool) -> true); f_eq = fun (self: t_Reverse v_T) (other: t_Reverse v_T) -> f_eq #v_T #v_T #FStar.Tactics.Typeclasses.solve other._0 self._0 } [@@ FStar.Tactics.Typeclasses.tcinstance] let impl_2 (#v_T: Type0) (#[FStar.Tactics.Typeclasses.tcresolve ()] i0: t_PartialOrd v_T v_T) : t_PartialOrd (t_Reverse v_T) (t_Reverse v_T) = { _super_i0 = FStar.Tactics.Typeclasses.solve; f_partial_cmp_pre = (fun (self: t_Reverse v_T) (other: t_Reverse v_T) -> true); f_partial_cmp_post = (fun (self: t_Reverse v_T) (other: t_Reverse v_T) (out: Core_models.Option.t_Option t_Ordering) -> true); f_partial_cmp = fun (self: t_Reverse v_T) (other: t_Reverse v_T) -> f_partial_cmp #v_T #v_T #FStar.Tactics.Typeclasses.solve other._0 self._0 } [@@ FStar.Tactics.Typeclasses.tcinstance] let impl_4 (#v_T: Type0) (#[FStar.Tactics.Typeclasses.tcresolve ()] i0: t_Eq v_T) : t_Eq (t_Reverse v_T) = { _super_i0 = FStar.Tactics.Typeclasses.solve } [@@ FStar.Tactics.Typeclasses.tcinstance] let impl_5 (#v_T: Type0) (#[FStar.Tactics.Typeclasses.tcresolve ()] i0: t_Ord v_T) : t_Ord (t_Reverse v_T) = { _super_i0 = FStar.Tactics.Typeclasses.solve; _super_i1 = FStar.Tactics.Typeclasses.solve; f_cmp_pre = (fun (self: t_Reverse v_T) (other: t_Reverse v_T) -> true); f_cmp_post = (fun (self: t_Reverse v_T) (other: t_Reverse v_T) (out: t_Ordering) -> true); f_cmp = fun (self: t_Reverse v_T) (other: t_Reverse v_T) -> f_cmp #v_T #FStar.Tactics.Typeclasses.solve other._0 self._0 } [@@ FStar.Tactics.Typeclasses.tcinstance] let impl_6: t_PartialEq u8 u8 = { f_eq_pre = (fun (self: u8) (other: u8) -> true); f_eq_post = (fun (self: u8) (other: u8) (out: bool) -> true); f_eq = fun (self: u8) (other: u8) -> self =. other } [@@ FStar.Tactics.Typeclasses.tcinstance] let impl_30: t_PartialOrd u8 u8 = { _super_i0 = FStar.Tactics.Typeclasses.solve; f_partial_cmp_pre = (fun (self: u8) (other: u8) -> true); f_partial_cmp_post = (fun (self_: u8) (other: u8) (res: Core_models.Option.t_Option t_Ordering) -> match res <: Core_models.Option.t_Option t_Ordering with | Core_models.Option.Option_Some (Ordering_Less ) -> self_ <. other | Core_models.Option.Option_Some (Ordering_Equal ) -> self_ =. other | Core_models.Option.Option_Some (Ordering_Greater ) -> self_ >. other | Core_models.Option.Option_None -> false); f_partial_cmp = fun (self: u8) (other: u8) -> if self <. other then Core_models.Option.Option_Some (Ordering_Less <: t_Ordering) <: Core_models.Option.t_Option t_Ordering else if self >. other then Core_models.Option.Option_Some (Ordering_Greater <: t_Ordering) <: Core_models.Option.t_Option t_Ordering else Core_models.Option.Option_Some (Ordering_Equal <: t_Ordering) <: Core_models.Option.t_Option t_Ordering } [@@ FStar.Tactics.Typeclasses.tcinstance] let impl_Eq_for_u8: t_Eq u8 = { _super_i0 = FStar.Tactics.Typeclasses.solve } [@@ FStar.Tactics.Typeclasses.tcinstance] let impl_Ord_for_u8: t_Ord u8 = { _super_i0 = FStar.Tactics.Typeclasses.solve; _super_i1 = FStar.Tactics.Typeclasses.solve; f_cmp_pre = (fun (self: u8) (other: u8) -> true); f_cmp_post = (fun (self_: u8) (other: u8) (res: t_Ordering) -> match res <: t_Ordering with | Ordering_Less -> self_ <. other | Ordering_Equal -> self_ =. other | Ordering_Greater -> self_ >. other); f_cmp = fun (self: u8) (other: u8) -> if self <. other then Ordering_Less <: t_Ordering else if self >. other then Ordering_Greater <: t_Ordering else Ordering_Equal <: t_Ordering } [@@ FStar.Tactics.Typeclasses.tcinstance] let impl_8: t_PartialEq i8 i8 = { f_eq_pre = (fun (self: i8) (other: i8) -> true); f_eq_post = (fun (self: i8) (other: i8) (out: bool) -> true); f_eq = fun (self: i8) (other: i8) -> self =. other } [@@ FStar.Tactics.Typeclasses.tcinstance] let impl_32: t_PartialOrd i8 i8 = { _super_i0 = FStar.Tactics.Typeclasses.solve; f_partial_cmp_pre = (fun (self: i8) (other: i8) -> true); f_partial_cmp_post = (fun (self_: i8) (other: i8) (res: Core_models.Option.t_Option t_Ordering) -> match res <: Core_models.Option.t_Option t_Ordering with | Core_models.Option.Option_Some (Ordering_Less ) -> self_ <. other | Core_models.Option.Option_Some (Ordering_Equal ) -> self_ =. other | Core_models.Option.Option_Some (Ordering_Greater ) -> self_ >. other | Core_models.Option.Option_None -> false); f_partial_cmp = fun (self: i8) (other: i8) -> if self <. other then Core_models.Option.Option_Some (Ordering_Less <: t_Ordering) <: Core_models.Option.t_Option t_Ordering else if self >. other then Core_models.Option.Option_Some (Ordering_Greater <: t_Ordering) <: Core_models.Option.t_Option t_Ordering else Core_models.Option.Option_Some (Ordering_Equal <: t_Ordering) <: Core_models.Option.t_Option t_Ordering } [@@ FStar.Tactics.Typeclasses.tcinstance] let impl_Eq_for_i8: t_Eq i8 = { _super_i0 = FStar.Tactics.Typeclasses.solve } [@@ FStar.Tactics.Typeclasses.tcinstance] let impl_Ord_for_i8: t_Ord i8 = { _super_i0 = FStar.Tactics.Typeclasses.solve; _super_i1 = FStar.Tactics.Typeclasses.solve; f_cmp_pre = (fun (self: i8) (other: i8) -> true); f_cmp_post = (fun (self_: i8) (other: i8) (res: t_Ordering) -> match res <: t_Ordering with | Ordering_Less -> self_ <. other | Ordering_Equal -> self_ =. other | Ordering_Greater -> self_ >. other); f_cmp = fun (self: i8) (other: i8) -> if self <. other then Ordering_Less <: t_Ordering else if self >. other then Ordering_Greater <: t_Ordering else Ordering_Equal <: t_Ordering } [@@ FStar.Tactics.Typeclasses.tcinstance] let impl_10: t_PartialEq u16 u16 = { f_eq_pre = (fun (self: u16) (other: u16) -> true); f_eq_post = (fun (self: u16) (other: u16) (out: bool) -> true); f_eq = fun (self: u16) (other: u16) -> self =. other } [@@ FStar.Tactics.Typeclasses.tcinstance] let impl_34: t_PartialOrd u16 u16 = { _super_i0 = FStar.Tactics.Typeclasses.solve; f_partial_cmp_pre = (fun (self: u16) (other: u16) -> true); f_partial_cmp_post = (fun (self_: u16) (other: u16) (res: Core_models.Option.t_Option t_Ordering) -> match res <: Core_models.Option.t_Option t_Ordering with | Core_models.Option.Option_Some (Ordering_Less ) -> self_ <. other | Core_models.Option.Option_Some (Ordering_Equal ) -> self_ =. other | Core_models.Option.Option_Some (Ordering_Greater ) -> self_ >. other | Core_models.Option.Option_None -> false); f_partial_cmp = fun (self: u16) (other: u16) -> if self <. other then Core_models.Option.Option_Some (Ordering_Less <: t_Ordering) <: Core_models.Option.t_Option t_Ordering else if self >. other then Core_models.Option.Option_Some (Ordering_Greater <: t_Ordering) <: Core_models.Option.t_Option t_Ordering else Core_models.Option.Option_Some (Ordering_Equal <: t_Ordering) <: Core_models.Option.t_Option t_Ordering } [@@ FStar.Tactics.Typeclasses.tcinstance] let impl_Eq_for_u16: t_Eq u16 = { _super_i0 = FStar.Tactics.Typeclasses.solve } [@@ FStar.Tactics.Typeclasses.tcinstance] let impl_Ord_for_u16: t_Ord u16 = { _super_i0 = FStar.Tactics.Typeclasses.solve; _super_i1 = FStar.Tactics.Typeclasses.solve; f_cmp_pre = (fun (self: u16) (other: u16) -> true); f_cmp_post = (fun (self_: u16) (other: u16) (res: t_Ordering) -> match res <: t_Ordering with | Ordering_Less -> self_ <. other | Ordering_Equal -> self_ =. other | Ordering_Greater -> self_ >. other); f_cmp = fun (self: u16) (other: u16) -> if self <. other then Ordering_Less <: t_Ordering else if self >. other then Ordering_Greater <: t_Ordering else Ordering_Equal <: t_Ordering } [@@ FStar.Tactics.Typeclasses.tcinstance] let impl_12: t_PartialEq i16 i16 = { f_eq_pre = (fun (self: i16) (other: i16) -> true); f_eq_post = (fun (self: i16) (other: i16) (out: bool) -> true); f_eq = fun (self: i16) (other: i16) -> self =. other } [@@ FStar.Tactics.Typeclasses.tcinstance] let impl_36: t_PartialOrd i16 i16 = { _super_i0 = FStar.Tactics.Typeclasses.solve; f_partial_cmp_pre = (fun (self: i16) (other: i16) -> true); f_partial_cmp_post = (fun (self_: i16) (other: i16) (res: Core_models.Option.t_Option t_Ordering) -> match res <: Core_models.Option.t_Option t_Ordering with | Core_models.Option.Option_Some (Ordering_Less ) -> self_ <. other | Core_models.Option.Option_Some (Ordering_Equal ) -> self_ =. other | Core_models.Option.Option_Some (Ordering_Greater ) -> self_ >. other | Core_models.Option.Option_None -> false); f_partial_cmp = fun (self: i16) (other: i16) -> if self <. other then Core_models.Option.Option_Some (Ordering_Less <: t_Ordering) <: Core_models.Option.t_Option t_Ordering else if self >. other then Core_models.Option.Option_Some (Ordering_Greater <: t_Ordering) <: Core_models.Option.t_Option t_Ordering else Core_models.Option.Option_Some (Ordering_Equal <: t_Ordering) <: Core_models.Option.t_Option t_Ordering } [@@ FStar.Tactics.Typeclasses.tcinstance] let impl_Eq_for_i16: t_Eq i16 = { _super_i0 = FStar.Tactics.Typeclasses.solve } [@@ FStar.Tactics.Typeclasses.tcinstance] let impl_Ord_for_i16: t_Ord i16 = { _super_i0 = FStar.Tactics.Typeclasses.solve; _super_i1 = FStar.Tactics.Typeclasses.solve; f_cmp_pre = (fun (self: i16) (other: i16) -> true); f_cmp_post = (fun (self_: i16) (other: i16) (res: t_Ordering) -> match res <: t_Ordering with | Ordering_Less -> self_ <. other | Ordering_Equal -> self_ =. other | Ordering_Greater -> self_ >. other); f_cmp = fun (self: i16) (other: i16) -> if self <. other then Ordering_Less <: t_Ordering else if self >. other then Ordering_Greater <: t_Ordering else Ordering_Equal <: t_Ordering } [@@ FStar.Tactics.Typeclasses.tcinstance] let impl_14: t_PartialEq u32 u32 = { f_eq_pre = (fun (self: u32) (other: u32) -> true); f_eq_post = (fun (self: u32) (other: u32) (out: bool) -> true); f_eq = fun (self: u32) (other: u32) -> self =. other } [@@ FStar.Tactics.Typeclasses.tcinstance] let impl_38: t_PartialOrd u32 u32 = { _super_i0 = FStar.Tactics.Typeclasses.solve; f_partial_cmp_pre = (fun (self: u32) (other: u32) -> true); f_partial_cmp_post = (fun (self_: u32) (other: u32) (res: Core_models.Option.t_Option t_Ordering) -> match res <: Core_models.Option.t_Option t_Ordering with | Core_models.Option.Option_Some (Ordering_Less ) -> self_ <. other | Core_models.Option.Option_Some (Ordering_Equal ) -> self_ =. other | Core_models.Option.Option_Some (Ordering_Greater ) -> self_ >. other | Core_models.Option.Option_None -> false); f_partial_cmp = fun (self: u32) (other: u32) -> if self <. other then Core_models.Option.Option_Some (Ordering_Less <: t_Ordering) <: Core_models.Option.t_Option t_Ordering else if self >. other then Core_models.Option.Option_Some (Ordering_Greater <: t_Ordering) <: Core_models.Option.t_Option t_Ordering else Core_models.Option.Option_Some (Ordering_Equal <: t_Ordering) <: Core_models.Option.t_Option t_Ordering } [@@ FStar.Tactics.Typeclasses.tcinstance] let impl_Eq_for_u32: t_Eq u32 = { _super_i0 = FStar.Tactics.Typeclasses.solve } [@@ FStar.Tactics.Typeclasses.tcinstance] let impl_Ord_for_u32: t_Ord u32 = { _super_i0 = FStar.Tactics.Typeclasses.solve; _super_i1 = FStar.Tactics.Typeclasses.solve; f_cmp_pre = (fun (self: u32) (other: u32) -> true); f_cmp_post = (fun (self_: u32) (other: u32) (res: t_Ordering) -> match res <: t_Ordering with | Ordering_Less -> self_ <. other | Ordering_Equal -> self_ =. other | Ordering_Greater -> self_ >. other); f_cmp = fun (self: u32) (other: u32) -> if self <. other then Ordering_Less <: t_Ordering else if self >. other then Ordering_Greater <: t_Ordering else Ordering_Equal <: t_Ordering } [@@ FStar.Tactics.Typeclasses.tcinstance] let impl_16: t_PartialEq i32 i32 = { f_eq_pre = (fun (self: i32) (other: i32) -> true); f_eq_post = (fun (self: i32) (other: i32) (out: bool) -> true); f_eq = fun (self: i32) (other: i32) -> self =. other } [@@ FStar.Tactics.Typeclasses.tcinstance] let impl_40: t_PartialOrd i32 i32 = { _super_i0 = FStar.Tactics.Typeclasses.solve; f_partial_cmp_pre = (fun (self: i32) (other: i32) -> true); f_partial_cmp_post = (fun (self_: i32) (other: i32) (res: Core_models.Option.t_Option t_Ordering) -> match res <: Core_models.Option.t_Option t_Ordering with | Core_models.Option.Option_Some (Ordering_Less ) -> self_ <. other | Core_models.Option.Option_Some (Ordering_Equal ) -> self_ =. other | Core_models.Option.Option_Some (Ordering_Greater ) -> self_ >. other | Core_models.Option.Option_None -> false); f_partial_cmp = fun (self: i32) (other: i32) -> if self <. other then Core_models.Option.Option_Some (Ordering_Less <: t_Ordering) <: Core_models.Option.t_Option t_Ordering else if self >. other then Core_models.Option.Option_Some (Ordering_Greater <: t_Ordering) <: Core_models.Option.t_Option t_Ordering else Core_models.Option.Option_Some (Ordering_Equal <: t_Ordering) <: Core_models.Option.t_Option t_Ordering } [@@ FStar.Tactics.Typeclasses.tcinstance] let impl_Eq_for_i32: t_Eq i32 = { _super_i0 = FStar.Tactics.Typeclasses.solve } [@@ FStar.Tactics.Typeclasses.tcinstance] let impl_Ord_for_i32: t_Ord i32 = { _super_i0 = FStar.Tactics.Typeclasses.solve; _super_i1 = FStar.Tactics.Typeclasses.solve; f_cmp_pre = (fun (self: i32) (other: i32) -> true); f_cmp_post = (fun (self_: i32) (other: i32) (res: t_Ordering) -> match res <: t_Ordering with | Ordering_Less -> self_ <. other | Ordering_Equal -> self_ =. other | Ordering_Greater -> self_ >. other); f_cmp = fun (self: i32) (other: i32) -> if self <. other then Ordering_Less <: t_Ordering else if self >. other then Ordering_Greater <: t_Ordering else Ordering_Equal <: t_Ordering } [@@ FStar.Tactics.Typeclasses.tcinstance] let impl_18: t_PartialEq u64 u64 = { f_eq_pre = (fun (self: u64) (other: u64) -> true); f_eq_post = (fun (self: u64) (other: u64) (out: bool) -> true); f_eq = fun (self: u64) (other: u64) -> self =. other } [@@ FStar.Tactics.Typeclasses.tcinstance] let impl_42: t_PartialOrd u64 u64 = { _super_i0 = FStar.Tactics.Typeclasses.solve; f_partial_cmp_pre = (fun (self: u64) (other: u64) -> true); f_partial_cmp_post = (fun (self_: u64) (other: u64) (res: Core_models.Option.t_Option t_Ordering) -> match res <: Core_models.Option.t_Option t_Ordering with | Core_models.Option.Option_Some (Ordering_Less ) -> self_ <. other | Core_models.Option.Option_Some (Ordering_Equal ) -> self_ =. other | Core_models.Option.Option_Some (Ordering_Greater ) -> self_ >. other | Core_models.Option.Option_None -> false); f_partial_cmp = fun (self: u64) (other: u64) -> if self <. other then Core_models.Option.Option_Some (Ordering_Less <: t_Ordering) <: Core_models.Option.t_Option t_Ordering else if self >. other then Core_models.Option.Option_Some (Ordering_Greater <: t_Ordering) <: Core_models.Option.t_Option t_Ordering else Core_models.Option.Option_Some (Ordering_Equal <: t_Ordering) <: Core_models.Option.t_Option t_Ordering } [@@ FStar.Tactics.Typeclasses.tcinstance] let impl_Eq_for_u64: t_Eq u64 = { _super_i0 = FStar.Tactics.Typeclasses.solve } [@@ FStar.Tactics.Typeclasses.tcinstance] let impl_Ord_for_u64: t_Ord u64 = { _super_i0 = FStar.Tactics.Typeclasses.solve; _super_i1 = FStar.Tactics.Typeclasses.solve; f_cmp_pre = (fun (self: u64) (other: u64) -> true); f_cmp_post = (fun (self_: u64) (other: u64) (res: t_Ordering) -> match res <: t_Ordering with | Ordering_Less -> self_ <. other | Ordering_Equal -> self_ =. other | Ordering_Greater -> self_ >. other); f_cmp = fun (self: u64) (other: u64) -> if self <. other then Ordering_Less <: t_Ordering else if self >. other then Ordering_Greater <: t_Ordering else Ordering_Equal <: t_Ordering } [@@ FStar.Tactics.Typeclasses.tcinstance] let impl_20: t_PartialEq i64 i64 = { f_eq_pre = (fun (self: i64) (other: i64) -> true); f_eq_post = (fun (self: i64) (other: i64) (out: bool) -> true); f_eq = fun (self: i64) (other: i64) -> self =. other } [@@ FStar.Tactics.Typeclasses.tcinstance] let impl_44: t_PartialOrd i64 i64 = { _super_i0 = FStar.Tactics.Typeclasses.solve; f_partial_cmp_pre = (fun (self: i64) (other: i64) -> true); f_partial_cmp_post = (fun (self_: i64) (other: i64) (res: Core_models.Option.t_Option t_Ordering) -> match res <: Core_models.Option.t_Option t_Ordering with | Core_models.Option.Option_Some (Ordering_Less ) -> self_ <. other | Core_models.Option.Option_Some (Ordering_Equal ) -> self_ =. other | Core_models.Option.Option_Some (Ordering_Greater ) -> self_ >. other | Core_models.Option.Option_None -> false); f_partial_cmp = fun (self: i64) (other: i64) -> if self <. other then Core_models.Option.Option_Some (Ordering_Less <: t_Ordering) <: Core_models.Option.t_Option t_Ordering else if self >. other then Core_models.Option.Option_Some (Ordering_Greater <: t_Ordering) <: Core_models.Option.t_Option t_Ordering else Core_models.Option.Option_Some (Ordering_Equal <: t_Ordering) <: Core_models.Option.t_Option t_Ordering } [@@ FStar.Tactics.Typeclasses.tcinstance] let impl_Eq_for_i64: t_Eq i64 = { _super_i0 = FStar.Tactics.Typeclasses.solve } [@@ FStar.Tactics.Typeclasses.tcinstance] let impl_Ord_for_i64: t_Ord i64 = { _super_i0 = FStar.Tactics.Typeclasses.solve; _super_i1 = FStar.Tactics.Typeclasses.solve; f_cmp_pre = (fun (self: i64) (other: i64) -> true); f_cmp_post = (fun (self_: i64) (other: i64) (res: t_Ordering) -> match res <: t_Ordering with | Ordering_Less -> self_ <. other | Ordering_Equal -> self_ =. other | Ordering_Greater -> self_ >. other); f_cmp = fun (self: i64) (other: i64) -> if self <. other then Ordering_Less <: t_Ordering else if self >. other then Ordering_Greater <: t_Ordering else Ordering_Equal <: t_Ordering } [@@ FStar.Tactics.Typeclasses.tcinstance] let impl_22: t_PartialEq u128 u128 = { f_eq_pre = (fun (self: u128) (other: u128) -> true); f_eq_post = (fun (self: u128) (other: u128) (out: bool) -> true); f_eq = fun (self: u128) (other: u128) -> self =. other } [@@ FStar.Tactics.Typeclasses.tcinstance] let impl_46: t_PartialOrd u128 u128 = { _super_i0 = FStar.Tactics.Typeclasses.solve; f_partial_cmp_pre = (fun (self: u128) (other: u128) -> true); f_partial_cmp_post = (fun (self_: u128) (other: u128) (res: Core_models.Option.t_Option t_Ordering) -> match res <: Core_models.Option.t_Option t_Ordering with | Core_models.Option.Option_Some (Ordering_Less ) -> self_ <. other | Core_models.Option.Option_Some (Ordering_Equal ) -> self_ =. other | Core_models.Option.Option_Some (Ordering_Greater ) -> self_ >. other | Core_models.Option.Option_None -> false); f_partial_cmp = fun (self: u128) (other: u128) -> if self <. other then Core_models.Option.Option_Some (Ordering_Less <: t_Ordering) <: Core_models.Option.t_Option t_Ordering else if self >. other then Core_models.Option.Option_Some (Ordering_Greater <: t_Ordering) <: Core_models.Option.t_Option t_Ordering else Core_models.Option.Option_Some (Ordering_Equal <: t_Ordering) <: Core_models.Option.t_Option t_Ordering } [@@ FStar.Tactics.Typeclasses.tcinstance] let impl_Eq_for_u128: t_Eq u128 = { _super_i0 = FStar.Tactics.Typeclasses.solve } [@@ FStar.Tactics.Typeclasses.tcinstance] let impl_Ord_for_u128: t_Ord u128 = { _super_i0 = FStar.Tactics.Typeclasses.solve; _super_i1 = FStar.Tactics.Typeclasses.solve; f_cmp_pre = (fun (self: u128) (other: u128) -> true); f_cmp_post = (fun (self_: u128) (other: u128) (res: t_Ordering) -> match res <: t_Ordering with | Ordering_Less -> self_ <. other | Ordering_Equal -> self_ =. other | Ordering_Greater -> self_ >. other); f_cmp = fun (self: u128) (other: u128) -> if self <. other then Ordering_Less <: t_Ordering else if self >. other then Ordering_Greater <: t_Ordering else Ordering_Equal <: t_Ordering } [@@ FStar.Tactics.Typeclasses.tcinstance] let impl_24: t_PartialEq i128 i128 = { f_eq_pre = (fun (self: i128) (other: i128) -> true); f_eq_post = (fun (self: i128) (other: i128) (out: bool) -> true); f_eq = fun (self: i128) (other: i128) -> self =. other } [@@ FStar.Tactics.Typeclasses.tcinstance] let impl_48: t_PartialOrd i128 i128 = { _super_i0 = FStar.Tactics.Typeclasses.solve; f_partial_cmp_pre = (fun (self: i128) (other: i128) -> true); f_partial_cmp_post = (fun (self_: i128) (other: i128) (res: Core_models.Option.t_Option t_Ordering) -> match res <: Core_models.Option.t_Option t_Ordering with | Core_models.Option.Option_Some (Ordering_Less ) -> self_ <. other | Core_models.Option.Option_Some (Ordering_Equal ) -> self_ =. other | Core_models.Option.Option_Some (Ordering_Greater ) -> self_ >. other | Core_models.Option.Option_None -> false); f_partial_cmp = fun (self: i128) (other: i128) -> if self <. other then Core_models.Option.Option_Some (Ordering_Less <: t_Ordering) <: Core_models.Option.t_Option t_Ordering else if self >. other then Core_models.Option.Option_Some (Ordering_Greater <: t_Ordering) <: Core_models.Option.t_Option t_Ordering else Core_models.Option.Option_Some (Ordering_Equal <: t_Ordering) <: Core_models.Option.t_Option t_Ordering } [@@ FStar.Tactics.Typeclasses.tcinstance] let impl_Eq_for_i128: t_Eq i128 = { _super_i0 = FStar.Tactics.Typeclasses.solve } [@@ FStar.Tactics.Typeclasses.tcinstance] let impl_Ord_for_i128: t_Ord i128 = { _super_i0 = FStar.Tactics.Typeclasses.solve; _super_i1 = FStar.Tactics.Typeclasses.solve; f_cmp_pre = (fun (self: i128) (other: i128) -> true); f_cmp_post = (fun (self_: i128) (other: i128) (res: t_Ordering) -> match res <: t_Ordering with | Ordering_Less -> self_ <. other | Ordering_Equal -> self_ =. other | Ordering_Greater -> self_ >. other); f_cmp = fun (self: i128) (other: i128) -> if self <. other then Ordering_Less <: t_Ordering else if self >. other then Ordering_Greater <: t_Ordering else Ordering_Equal <: t_Ordering } [@@ FStar.Tactics.Typeclasses.tcinstance] let impl_26: t_PartialEq usize usize = { f_eq_pre = (fun (self: usize) (other: usize) -> true); f_eq_post = (fun (self: usize) (other: usize) (out: bool) -> true); f_eq = fun (self: usize) (other: usize) -> self =. other } [@@ FStar.Tactics.Typeclasses.tcinstance] let impl_50: t_PartialOrd usize usize = { _super_i0 = FStar.Tactics.Typeclasses.solve; f_partial_cmp_pre = (fun (self: usize) (other: usize) -> true); f_partial_cmp_post = (fun (self_: usize) (other: usize) (res: Core_models.Option.t_Option t_Ordering) -> match res <: Core_models.Option.t_Option t_Ordering with | Core_models.Option.Option_Some (Ordering_Less ) -> self_ <. other | Core_models.Option.Option_Some (Ordering_Equal ) -> self_ =. other | Core_models.Option.Option_Some (Ordering_Greater ) -> self_ >. other | Core_models.Option.Option_None -> false); f_partial_cmp = fun (self: usize) (other: usize) -> if self <. other then Core_models.Option.Option_Some (Ordering_Less <: t_Ordering) <: Core_models.Option.t_Option t_Ordering else if self >. other then Core_models.Option.Option_Some (Ordering_Greater <: t_Ordering) <: Core_models.Option.t_Option t_Ordering else Core_models.Option.Option_Some (Ordering_Equal <: t_Ordering) <: Core_models.Option.t_Option t_Ordering } [@@ FStar.Tactics.Typeclasses.tcinstance] let impl_Eq_for_usize: t_Eq usize = { _super_i0 = FStar.Tactics.Typeclasses.solve } [@@ FStar.Tactics.Typeclasses.tcinstance] let impl_Ord_for_usize: t_Ord usize = { _super_i0 = FStar.Tactics.Typeclasses.solve; _super_i1 = FStar.Tactics.Typeclasses.solve; f_cmp_pre = (fun (self: usize) (other: usize) -> true); f_cmp_post = (fun (self_: usize) (other: usize) (res: t_Ordering) -> match res <: t_Ordering with | Ordering_Less -> self_ <. other | Ordering_Equal -> self_ =. other | Ordering_Greater -> self_ >. other); f_cmp = fun (self: usize) (other: usize) -> if self <. other then Ordering_Less <: t_Ordering else if self >. other then Ordering_Greater <: t_Ordering else Ordering_Equal <: t_Ordering } [@@ FStar.Tactics.Typeclasses.tcinstance] let impl_28: t_PartialEq isize isize = { f_eq_pre = (fun (self: isize) (other: isize) -> true); f_eq_post = (fun (self: isize) (other: isize) (out: bool) -> true); f_eq = fun (self: isize) (other: isize) -> self =. other } [@@ FStar.Tactics.Typeclasses.tcinstance] let impl_52: t_PartialOrd isize isize = { _super_i0 = FStar.Tactics.Typeclasses.solve; f_partial_cmp_pre = (fun (self: isize) (other: isize) -> true); f_partial_cmp_post = (fun (self_: isize) (other: isize) (res: Core_models.Option.t_Option t_Ordering) -> match res <: Core_models.Option.t_Option t_Ordering with | Core_models.Option.Option_Some (Ordering_Less ) -> self_ <. other | Core_models.Option.Option_Some (Ordering_Equal ) -> self_ =. other | Core_models.Option.Option_Some (Ordering_Greater ) -> self_ >. other | Core_models.Option.Option_None -> false); f_partial_cmp = fun (self: isize) (other: isize) -> if self <. other then Core_models.Option.Option_Some (Ordering_Less <: t_Ordering) <: Core_models.Option.t_Option t_Ordering else if self >. other then Core_models.Option.Option_Some (Ordering_Greater <: t_Ordering) <: Core_models.Option.t_Option t_Ordering else Core_models.Option.Option_Some (Ordering_Equal <: t_Ordering) <: Core_models.Option.t_Option t_Ordering } [@@ FStar.Tactics.Typeclasses.tcinstance] let impl_Eq_for_isize: t_Eq isize = { _super_i0 = FStar.Tactics.Typeclasses.solve } [@@ FStar.Tactics.Typeclasses.tcinstance] let impl_Ord_for_isize: t_Ord isize = { _super_i0 = FStar.Tactics.Typeclasses.solve; _super_i1 = FStar.Tactics.Typeclasses.solve; f_cmp_pre = (fun (self: isize) (other: isize) -> true); f_cmp_post = (fun (self_: isize) (other: isize) (res: t_Ordering) -> match res <: t_Ordering with | Ordering_Less -> self_ <. other | Ordering_Equal -> self_ =. other | Ordering_Greater -> self_ >. other); f_cmp = fun (self: isize) (other: isize) -> if self <. other then Ordering_Less <: t_Ordering else if self >. other then Ordering_Greater <: t_Ordering else Ordering_Equal <: t_Ordering } ================================================ FILE: hax-lib/proof-libs/fstar/core/Core_models.Convert.fst ================================================ module Core_models.Convert #set-options "--fuel 0 --ifuel 1 --z3rlimit 15" open FStar.Mul open Rust_primitives class t_Into (v_Self: Type0) (v_T: Type0) = { f_into_pre:self_: v_Self -> pred: Type0{true ==> pred}; f_into_post:v_Self -> v_T -> Type0; f_into:x0: v_Self -> Prims.Pure v_T (f_into_pre x0) (fun result -> f_into_post x0 result) } class t_From (v_Self: Type0) (v_T: Type0) = { f_from_pre:x: v_T -> pred: Type0{true ==> pred}; f_from_post:v_T -> v_Self -> Type0; f_from:x0: v_T -> Prims.Pure v_Self (f_from_pre x0) (fun result -> f_from_post x0 result) } [@@ FStar.Tactics.Typeclasses.tcinstance] let impl (#v_T #v_U: Type0) (#[FStar.Tactics.Typeclasses.tcresolve ()] i0: t_From v_U v_T) : t_Into v_T v_U = { f_into_pre = (fun (self: v_T) -> true); f_into_post = (fun (self: v_T) (out: v_U) -> true); f_into = fun (self: v_T) -> f_from #v_U #v_T #FStar.Tactics.Typeclasses.solve self } type t_Infallible = | Infallible : t_Infallible [@@ FStar.Tactics.Typeclasses.tcinstance] let impl_4 (#v_T: Type0) : t_From v_T v_T = { f_from_pre = (fun (x: v_T) -> true); f_from_post = (fun (x: v_T) (out: v_T) -> true); f_from = fun (x: v_T) -> x } class t_AsRef (v_Self: Type0) (v_T: Type0) = { f_as_ref_pre:self_: v_Self -> pred: Type0{true ==> pred}; f_as_ref_post:v_Self -> v_T -> Type0; f_as_ref:x0: v_Self -> Prims.Pure v_T (f_as_ref_pre x0) (fun result -> f_as_ref_post x0 result) } [@@ FStar.Tactics.Typeclasses.tcinstance] let impl_5 (#v_T: Type0) : t_AsRef v_T v_T = { f_as_ref_pre = (fun (self: v_T) -> true); f_as_ref_post = (fun (self: v_T) (out: v_T) -> true); f_as_ref = fun (self: v_T) -> self } [@@ FStar.Tactics.Typeclasses.tcinstance] let impl_6: t_From u16 u8 = { f_from_pre = (fun (x: u8) -> true); f_from_post = (fun (x: u8) (out: u16) -> true); f_from = fun (x: u8) -> cast (x <: u8) <: u16 } [@@ FStar.Tactics.Typeclasses.tcinstance] let impl_7: t_From u32 u8 = { f_from_pre = (fun (x: u8) -> true); f_from_post = (fun (x: u8) (out: u32) -> true); f_from = fun (x: u8) -> cast (x <: u8) <: u32 } [@@ FStar.Tactics.Typeclasses.tcinstance] let impl_8: t_From u32 u16 = { f_from_pre = (fun (x: u16) -> true); f_from_post = (fun (x: u16) (out: u32) -> true); f_from = fun (x: u16) -> cast (x <: u16) <: u32 } [@@ FStar.Tactics.Typeclasses.tcinstance] let impl_9: t_From u64 u8 = { f_from_pre = (fun (x: u8) -> true); f_from_post = (fun (x: u8) (out: u64) -> true); f_from = fun (x: u8) -> cast (x <: u8) <: u64 } [@@ FStar.Tactics.Typeclasses.tcinstance] let impl_10: t_From u64 u16 = { f_from_pre = (fun (x: u16) -> true); f_from_post = (fun (x: u16) (out: u64) -> true); f_from = fun (x: u16) -> cast (x <: u16) <: u64 } [@@ FStar.Tactics.Typeclasses.tcinstance] let impl_11: t_From u64 u32 = { f_from_pre = (fun (x: u32) -> true); f_from_post = (fun (x: u32) (out: u64) -> true); f_from = fun (x: u32) -> cast (x <: u32) <: u64 } [@@ FStar.Tactics.Typeclasses.tcinstance] let impl_12: t_From u128 u8 = { f_from_pre = (fun (x: u8) -> true); f_from_post = (fun (x: u8) (out: u128) -> true); f_from = fun (x: u8) -> cast (x <: u8) <: u128 } [@@ FStar.Tactics.Typeclasses.tcinstance] let impl_13: t_From u128 u16 = { f_from_pre = (fun (x: u16) -> true); f_from_post = (fun (x: u16) (out: u128) -> true); f_from = fun (x: u16) -> cast (x <: u16) <: u128 } [@@ FStar.Tactics.Typeclasses.tcinstance] let impl_14: t_From u128 u32 = { f_from_pre = (fun (x: u32) -> true); f_from_post = (fun (x: u32) (out: u128) -> true); f_from = fun (x: u32) -> cast (x <: u32) <: u128 } [@@ FStar.Tactics.Typeclasses.tcinstance] let impl_15: t_From u128 u64 = { f_from_pre = (fun (x: u64) -> true); f_from_post = (fun (x: u64) (out: u128) -> true); f_from = fun (x: u64) -> cast (x <: u64) <: u128 } [@@ FStar.Tactics.Typeclasses.tcinstance] let impl_16: t_From u128 usize = { f_from_pre = (fun (x: usize) -> true); f_from_post = (fun (x: usize) (out: u128) -> true); f_from = fun (x: usize) -> cast (x <: usize) <: u128 } [@@ FStar.Tactics.Typeclasses.tcinstance] let impl_17: t_From usize u8 = { f_from_pre = (fun (x: u8) -> true); f_from_post = (fun (x: u8) (out: usize) -> true); f_from = fun (x: u8) -> cast (x <: u8) <: usize } [@@ FStar.Tactics.Typeclasses.tcinstance] let impl_18: t_From usize u16 = { f_from_pre = (fun (x: u16) -> true); f_from_post = (fun (x: u16) (out: usize) -> true); f_from = fun (x: u16) -> cast (x <: u16) <: usize } [@@ FStar.Tactics.Typeclasses.tcinstance] let impl_19: t_From i16 i8 = { f_from_pre = (fun (x: i8) -> true); f_from_post = (fun (x: i8) (out: i16) -> true); f_from = fun (x: i8) -> cast (x <: i8) <: i16 } [@@ FStar.Tactics.Typeclasses.tcinstance] let impl_20: t_From i32 i8 = { f_from_pre = (fun (x: i8) -> true); f_from_post = (fun (x: i8) (out: i32) -> true); f_from = fun (x: i8) -> cast (x <: i8) <: i32 } [@@ FStar.Tactics.Typeclasses.tcinstance] let impl_21: t_From i32 i16 = { f_from_pre = (fun (x: i16) -> true); f_from_post = (fun (x: i16) (out: i32) -> true); f_from = fun (x: i16) -> cast (x <: i16) <: i32 } [@@ FStar.Tactics.Typeclasses.tcinstance] let impl_22: t_From i64 i8 = { f_from_pre = (fun (x: i8) -> true); f_from_post = (fun (x: i8) (out: i64) -> true); f_from = fun (x: i8) -> cast (x <: i8) <: i64 } [@@ FStar.Tactics.Typeclasses.tcinstance] let impl_23: t_From i64 i16 = { f_from_pre = (fun (x: i16) -> true); f_from_post = (fun (x: i16) (out: i64) -> true); f_from = fun (x: i16) -> cast (x <: i16) <: i64 } [@@ FStar.Tactics.Typeclasses.tcinstance] let impl_24: t_From i64 i32 = { f_from_pre = (fun (x: i32) -> true); f_from_post = (fun (x: i32) (out: i64) -> true); f_from = fun (x: i32) -> cast (x <: i32) <: i64 } [@@ FStar.Tactics.Typeclasses.tcinstance] let impl_25: t_From i128 i8 = { f_from_pre = (fun (x: i8) -> true); f_from_post = (fun (x: i8) (out: i128) -> true); f_from = fun (x: i8) -> cast (x <: i8) <: i128 } [@@ FStar.Tactics.Typeclasses.tcinstance] let impl_26: t_From i128 i16 = { f_from_pre = (fun (x: i16) -> true); f_from_post = (fun (x: i16) (out: i128) -> true); f_from = fun (x: i16) -> cast (x <: i16) <: i128 } [@@ FStar.Tactics.Typeclasses.tcinstance] let impl_27: t_From i128 i32 = { f_from_pre = (fun (x: i32) -> true); f_from_post = (fun (x: i32) (out: i128) -> true); f_from = fun (x: i32) -> cast (x <: i32) <: i128 } [@@ FStar.Tactics.Typeclasses.tcinstance] let impl_28: t_From i128 i64 = { f_from_pre = (fun (x: i64) -> true); f_from_post = (fun (x: i64) (out: i128) -> true); f_from = fun (x: i64) -> cast (x <: i64) <: i128 } [@@ FStar.Tactics.Typeclasses.tcinstance] let impl_29: t_From i128 isize = { f_from_pre = (fun (x: isize) -> true); f_from_post = (fun (x: isize) (out: i128) -> true); f_from = fun (x: isize) -> cast (x <: isize) <: i128 } [@@ FStar.Tactics.Typeclasses.tcinstance] let impl_30: t_From isize i8 = { f_from_pre = (fun (x: i8) -> true); f_from_post = (fun (x: i8) (out: isize) -> true); f_from = fun (x: i8) -> cast (x <: i8) <: isize } [@@ FStar.Tactics.Typeclasses.tcinstance] let impl_31: t_From isize i16 = { f_from_pre = (fun (x: i16) -> true); f_from_post = (fun (x: i16) (out: isize) -> true); f_from = fun (x: i16) -> cast (x <: i16) <: isize } class t_TryInto (v_Self: Type0) (v_T: Type0) = { [@@@ FStar.Tactics.Typeclasses.no_method]f_Error:Type0; f_try_into_pre:self_: v_Self -> pred: Type0{true ==> pred}; f_try_into_post:v_Self -> Core_models.Result.t_Result v_T f_Error -> Type0; f_try_into:x0: v_Self -> Prims.Pure (Core_models.Result.t_Result v_T f_Error) (f_try_into_pre x0) (fun result -> f_try_into_post x0 result) } class t_TryFrom (v_Self: Type0) (v_T: Type0) = { [@@@ FStar.Tactics.Typeclasses.no_method]f_Error:Type0; f_try_from_pre:x: v_T -> pred: Type0{true ==> pred}; f_try_from_post:v_T -> Core_models.Result.t_Result v_Self f_Error -> Type0; f_try_from:x0: v_T -> Prims.Pure (Core_models.Result.t_Result v_Self f_Error) (f_try_from_pre x0) (fun result -> f_try_from_post x0 result) } [@@ FStar.Tactics.Typeclasses.tcinstance] let impl_1 (#v_T #v_U: Type0) (#[FStar.Tactics.Typeclasses.tcresolve ()] i0: t_From v_U v_T) : t_TryFrom v_U v_T = { f_Error = t_Infallible; f_try_from_pre = (fun (x: v_T) -> true); f_try_from_post = (fun (x: v_T) (out: Core_models.Result.t_Result v_U t_Infallible) -> true); f_try_from = fun (x: v_T) -> Core_models.Result.Result_Ok (f_from #v_U #v_T #FStar.Tactics.Typeclasses.solve x) <: Core_models.Result.t_Result v_U t_Infallible } [@@ FStar.Tactics.Typeclasses.tcinstance] let impl_2 (#v_T: Type0) (v_N: usize) (#[FStar.Tactics.Typeclasses.tcresolve ()] i0: Core_models.Marker.t_Copy v_T) : t_TryFrom (t_Array v_T v_N) (t_Slice v_T) = { f_Error = Core_models.Array.t_TryFromSliceError; f_try_from_pre = (fun (x: t_Slice v_T) -> true); f_try_from_post = (fun (x: t_Slice v_T) (out: Core_models.Result.t_Result (t_Array v_T v_N) Core_models.Array.t_TryFromSliceError) -> true); f_try_from = fun (x: t_Slice v_T) -> if (Core_models.Slice.impl__len #v_T x <: usize) =. v_N then Core_models.Result.Result_Ok (Rust_primitives.Slice.array_from_fn #v_T v_N #(usize -> v_T) (fun i -> let i:usize = i in Rust_primitives.Slice.slice_index #v_T x i <: v_T)) <: Core_models.Result.t_Result (t_Array v_T v_N) Core_models.Array.t_TryFromSliceError else Core_models.Result.Result_Err (Core_models.Array.TryFromSliceError <: Core_models.Array.t_TryFromSliceError) <: Core_models.Result.t_Result (t_Array v_T v_N) Core_models.Array.t_TryFromSliceError } [@@ FStar.Tactics.Typeclasses.tcinstance] let impl_3 (#v_T #v_U: Type0) (#[FStar.Tactics.Typeclasses.tcresolve ()] i0: t_TryFrom v_U v_T) : t_TryInto v_T v_U = { f_Error = i0.f_Error; f_try_into_pre = (fun (self: v_T) -> true); f_try_into_post = (fun (self: v_T) (out: Core_models.Result.t_Result v_U i0.f_Error) -> true); f_try_into = fun (self: v_T) -> f_try_from #v_U #v_T #FStar.Tactics.Typeclasses.solve self } [@@ FStar.Tactics.Typeclasses.tcinstance] let impl_32: t_TryFrom u8 u16 = { f_Error = Core_models.Num.Error.t_TryFromIntError; f_try_from_pre = (fun (x: u16) -> true); f_try_from_post = (fun (x: u16) (out: Core_models.Result.t_Result u8 Core_models.Num.Error.t_TryFromIntError) -> true); f_try_from = fun (x: u16) -> if x >. (cast (Core_models.Num.impl_u8__MAX <: u8) <: u16) || x <. (cast (Core_models.Num.impl_u8__MIN <: u8) <: u16) then Core_models.Result.Result_Err (Core_models.Num.Error.TryFromIntError (() <: Prims.unit) <: Core_models.Num.Error.t_TryFromIntError) <: Core_models.Result.t_Result u8 Core_models.Num.Error.t_TryFromIntError else Core_models.Result.Result_Ok (cast (x <: u16) <: u8) <: Core_models.Result.t_Result u8 Core_models.Num.Error.t_TryFromIntError } [@@ FStar.Tactics.Typeclasses.tcinstance] let impl_33: t_TryFrom u8 u32 = { f_Error = Core_models.Num.Error.t_TryFromIntError; f_try_from_pre = (fun (x: u32) -> true); f_try_from_post = (fun (x: u32) (out: Core_models.Result.t_Result u8 Core_models.Num.Error.t_TryFromIntError) -> true); f_try_from = fun (x: u32) -> if x >. (cast (Core_models.Num.impl_u8__MAX <: u8) <: u32) || x <. (cast (Core_models.Num.impl_u8__MIN <: u8) <: u32) then Core_models.Result.Result_Err (Core_models.Num.Error.TryFromIntError (() <: Prims.unit) <: Core_models.Num.Error.t_TryFromIntError) <: Core_models.Result.t_Result u8 Core_models.Num.Error.t_TryFromIntError else Core_models.Result.Result_Ok (cast (x <: u32) <: u8) <: Core_models.Result.t_Result u8 Core_models.Num.Error.t_TryFromIntError } [@@ FStar.Tactics.Typeclasses.tcinstance] let impl_34: t_TryFrom u16 u32 = { f_Error = Core_models.Num.Error.t_TryFromIntError; f_try_from_pre = (fun (x: u32) -> true); f_try_from_post = (fun (x: u32) (out: Core_models.Result.t_Result u16 Core_models.Num.Error.t_TryFromIntError) -> true); f_try_from = fun (x: u32) -> if x >. (cast (Core_models.Num.impl_u16__MAX <: u16) <: u32) || x <. (cast (Core_models.Num.impl_u16__MIN <: u16) <: u32) then Core_models.Result.Result_Err (Core_models.Num.Error.TryFromIntError (() <: Prims.unit) <: Core_models.Num.Error.t_TryFromIntError) <: Core_models.Result.t_Result u16 Core_models.Num.Error.t_TryFromIntError else Core_models.Result.Result_Ok (cast (x <: u32) <: u16) <: Core_models.Result.t_Result u16 Core_models.Num.Error.t_TryFromIntError } [@@ FStar.Tactics.Typeclasses.tcinstance] let impl_35: t_TryFrom usize u32 = { f_Error = Core_models.Num.Error.t_TryFromIntError; f_try_from_pre = (fun (x: u32) -> true); f_try_from_post = (fun (x: u32) (out: Core_models.Result.t_Result usize Core_models.Num.Error.t_TryFromIntError) -> true); f_try_from = fun (x: u32) -> if x >. (cast (Core_models.Num.impl_usize__MAX <: usize) <: u32) || x <. (cast (Core_models.Num.impl_usize__MIN <: usize) <: u32) then Core_models.Result.Result_Err (Core_models.Num.Error.TryFromIntError (() <: Prims.unit) <: Core_models.Num.Error.t_TryFromIntError) <: Core_models.Result.t_Result usize Core_models.Num.Error.t_TryFromIntError else Core_models.Result.Result_Ok (cast (x <: u32) <: usize) <: Core_models.Result.t_Result usize Core_models.Num.Error.t_TryFromIntError } [@@ FStar.Tactics.Typeclasses.tcinstance] let impl_36: t_TryFrom u8 u64 = { f_Error = Core_models.Num.Error.t_TryFromIntError; f_try_from_pre = (fun (x: u64) -> true); f_try_from_post = (fun (x: u64) (out: Core_models.Result.t_Result u8 Core_models.Num.Error.t_TryFromIntError) -> true); f_try_from = fun (x: u64) -> if x >. (cast (Core_models.Num.impl_u8__MAX <: u8) <: u64) || x <. (cast (Core_models.Num.impl_u8__MIN <: u8) <: u64) then Core_models.Result.Result_Err (Core_models.Num.Error.TryFromIntError (() <: Prims.unit) <: Core_models.Num.Error.t_TryFromIntError) <: Core_models.Result.t_Result u8 Core_models.Num.Error.t_TryFromIntError else Core_models.Result.Result_Ok (cast (x <: u64) <: u8) <: Core_models.Result.t_Result u8 Core_models.Num.Error.t_TryFromIntError } [@@ FStar.Tactics.Typeclasses.tcinstance] let impl_37: t_TryFrom u16 u64 = { f_Error = Core_models.Num.Error.t_TryFromIntError; f_try_from_pre = (fun (x: u64) -> true); f_try_from_post = (fun (x: u64) (out: Core_models.Result.t_Result u16 Core_models.Num.Error.t_TryFromIntError) -> true); f_try_from = fun (x: u64) -> if x >. (cast (Core_models.Num.impl_u16__MAX <: u16) <: u64) || x <. (cast (Core_models.Num.impl_u16__MIN <: u16) <: u64) then Core_models.Result.Result_Err (Core_models.Num.Error.TryFromIntError (() <: Prims.unit) <: Core_models.Num.Error.t_TryFromIntError) <: Core_models.Result.t_Result u16 Core_models.Num.Error.t_TryFromIntError else Core_models.Result.Result_Ok (cast (x <: u64) <: u16) <: Core_models.Result.t_Result u16 Core_models.Num.Error.t_TryFromIntError } [@@ FStar.Tactics.Typeclasses.tcinstance] let impl_38: t_TryFrom u32 u64 = { f_Error = Core_models.Num.Error.t_TryFromIntError; f_try_from_pre = (fun (x: u64) -> true); f_try_from_post = (fun (x: u64) (out: Core_models.Result.t_Result u32 Core_models.Num.Error.t_TryFromIntError) -> true); f_try_from = fun (x: u64) -> if x >. (cast (Core_models.Num.impl_u32__MAX <: u32) <: u64) || x <. (cast (Core_models.Num.impl_u32__MIN <: u32) <: u64) then Core_models.Result.Result_Err (Core_models.Num.Error.TryFromIntError (() <: Prims.unit) <: Core_models.Num.Error.t_TryFromIntError) <: Core_models.Result.t_Result u32 Core_models.Num.Error.t_TryFromIntError else Core_models.Result.Result_Ok (cast (x <: u64) <: u32) <: Core_models.Result.t_Result u32 Core_models.Num.Error.t_TryFromIntError } [@@ FStar.Tactics.Typeclasses.tcinstance] let impl_39: t_TryFrom usize u64 = { f_Error = Core_models.Num.Error.t_TryFromIntError; f_try_from_pre = (fun (x: u64) -> true); f_try_from_post = (fun (x: u64) (out: Core_models.Result.t_Result usize Core_models.Num.Error.t_TryFromIntError) -> true); f_try_from = fun (x: u64) -> if x >. (cast (Core_models.Num.impl_usize__MAX <: usize) <: u64) || x <. (cast (Core_models.Num.impl_usize__MIN <: usize) <: u64) then Core_models.Result.Result_Err (Core_models.Num.Error.TryFromIntError (() <: Prims.unit) <: Core_models.Num.Error.t_TryFromIntError) <: Core_models.Result.t_Result usize Core_models.Num.Error.t_TryFromIntError else Core_models.Result.Result_Ok (cast (x <: u64) <: usize) <: Core_models.Result.t_Result usize Core_models.Num.Error.t_TryFromIntError } [@@ FStar.Tactics.Typeclasses.tcinstance] let impl_40: t_TryFrom u8 u128 = { f_Error = Core_models.Num.Error.t_TryFromIntError; f_try_from_pre = (fun (x: u128) -> true); f_try_from_post = (fun (x: u128) (out: Core_models.Result.t_Result u8 Core_models.Num.Error.t_TryFromIntError) -> true); f_try_from = fun (x: u128) -> if x >. (cast (Core_models.Num.impl_u8__MAX <: u8) <: u128) || x <. (cast (Core_models.Num.impl_u8__MIN <: u8) <: u128) then Core_models.Result.Result_Err (Core_models.Num.Error.TryFromIntError (() <: Prims.unit) <: Core_models.Num.Error.t_TryFromIntError) <: Core_models.Result.t_Result u8 Core_models.Num.Error.t_TryFromIntError else Core_models.Result.Result_Ok (cast (x <: u128) <: u8) <: Core_models.Result.t_Result u8 Core_models.Num.Error.t_TryFromIntError } [@@ FStar.Tactics.Typeclasses.tcinstance] let impl_41: t_TryFrom u16 u128 = { f_Error = Core_models.Num.Error.t_TryFromIntError; f_try_from_pre = (fun (x: u128) -> true); f_try_from_post = (fun (x: u128) (out: Core_models.Result.t_Result u16 Core_models.Num.Error.t_TryFromIntError) -> true); f_try_from = fun (x: u128) -> if x >. (cast (Core_models.Num.impl_u16__MAX <: u16) <: u128) || x <. (cast (Core_models.Num.impl_u16__MIN <: u16) <: u128) then Core_models.Result.Result_Err (Core_models.Num.Error.TryFromIntError (() <: Prims.unit) <: Core_models.Num.Error.t_TryFromIntError) <: Core_models.Result.t_Result u16 Core_models.Num.Error.t_TryFromIntError else Core_models.Result.Result_Ok (cast (x <: u128) <: u16) <: Core_models.Result.t_Result u16 Core_models.Num.Error.t_TryFromIntError } [@@ FStar.Tactics.Typeclasses.tcinstance] let impl_42: t_TryFrom u32 u128 = { f_Error = Core_models.Num.Error.t_TryFromIntError; f_try_from_pre = (fun (x: u128) -> true); f_try_from_post = (fun (x: u128) (out: Core_models.Result.t_Result u32 Core_models.Num.Error.t_TryFromIntError) -> true); f_try_from = fun (x: u128) -> if x >. (cast (Core_models.Num.impl_u32__MAX <: u32) <: u128) || x <. (cast (Core_models.Num.impl_u32__MIN <: u32) <: u128) then Core_models.Result.Result_Err (Core_models.Num.Error.TryFromIntError (() <: Prims.unit) <: Core_models.Num.Error.t_TryFromIntError) <: Core_models.Result.t_Result u32 Core_models.Num.Error.t_TryFromIntError else Core_models.Result.Result_Ok (cast (x <: u128) <: u32) <: Core_models.Result.t_Result u32 Core_models.Num.Error.t_TryFromIntError } [@@ FStar.Tactics.Typeclasses.tcinstance] let impl_43: t_TryFrom u64 u128 = { f_Error = Core_models.Num.Error.t_TryFromIntError; f_try_from_pre = (fun (x: u128) -> true); f_try_from_post = (fun (x: u128) (out: Core_models.Result.t_Result u64 Core_models.Num.Error.t_TryFromIntError) -> true); f_try_from = fun (x: u128) -> if x >. (cast (Core_models.Num.impl_u64__MAX <: u64) <: u128) || x <. (cast (Core_models.Num.impl_u64__MIN <: u64) <: u128) then Core_models.Result.Result_Err (Core_models.Num.Error.TryFromIntError (() <: Prims.unit) <: Core_models.Num.Error.t_TryFromIntError) <: Core_models.Result.t_Result u64 Core_models.Num.Error.t_TryFromIntError else Core_models.Result.Result_Ok (cast (x <: u128) <: u64) <: Core_models.Result.t_Result u64 Core_models.Num.Error.t_TryFromIntError } [@@ FStar.Tactics.Typeclasses.tcinstance] let impl_44: t_TryFrom usize u128 = { f_Error = Core_models.Num.Error.t_TryFromIntError; f_try_from_pre = (fun (x: u128) -> true); f_try_from_post = (fun (x: u128) (out: Core_models.Result.t_Result usize Core_models.Num.Error.t_TryFromIntError) -> true); f_try_from = fun (x: u128) -> if x >. (cast (Core_models.Num.impl_usize__MAX <: usize) <: u128) || x <. (cast (Core_models.Num.impl_usize__MIN <: usize) <: u128) then Core_models.Result.Result_Err (Core_models.Num.Error.TryFromIntError (() <: Prims.unit) <: Core_models.Num.Error.t_TryFromIntError) <: Core_models.Result.t_Result usize Core_models.Num.Error.t_TryFromIntError else Core_models.Result.Result_Ok (cast (x <: u128) <: usize) <: Core_models.Result.t_Result usize Core_models.Num.Error.t_TryFromIntError } [@@ FStar.Tactics.Typeclasses.tcinstance] let impl_45: t_TryFrom u8 usize = { f_Error = Core_models.Num.Error.t_TryFromIntError; f_try_from_pre = (fun (x: usize) -> true); f_try_from_post = (fun (x: usize) (out: Core_models.Result.t_Result u8 Core_models.Num.Error.t_TryFromIntError) -> true); f_try_from = fun (x: usize) -> if x >. (cast (Core_models.Num.impl_u8__MAX <: u8) <: usize) || x <. (cast (Core_models.Num.impl_u8__MIN <: u8) <: usize) then Core_models.Result.Result_Err (Core_models.Num.Error.TryFromIntError (() <: Prims.unit) <: Core_models.Num.Error.t_TryFromIntError) <: Core_models.Result.t_Result u8 Core_models.Num.Error.t_TryFromIntError else Core_models.Result.Result_Ok (cast (x <: usize) <: u8) <: Core_models.Result.t_Result u8 Core_models.Num.Error.t_TryFromIntError } [@@ FStar.Tactics.Typeclasses.tcinstance] let impl_46: t_TryFrom u16 usize = { f_Error = Core_models.Num.Error.t_TryFromIntError; f_try_from_pre = (fun (x: usize) -> true); f_try_from_post = (fun (x: usize) (out: Core_models.Result.t_Result u16 Core_models.Num.Error.t_TryFromIntError) -> true); f_try_from = fun (x: usize) -> if x >. (cast (Core_models.Num.impl_u16__MAX <: u16) <: usize) || x <. (cast (Core_models.Num.impl_u16__MIN <: u16) <: usize) then Core_models.Result.Result_Err (Core_models.Num.Error.TryFromIntError (() <: Prims.unit) <: Core_models.Num.Error.t_TryFromIntError) <: Core_models.Result.t_Result u16 Core_models.Num.Error.t_TryFromIntError else Core_models.Result.Result_Ok (cast (x <: usize) <: u16) <: Core_models.Result.t_Result u16 Core_models.Num.Error.t_TryFromIntError } [@@ FStar.Tactics.Typeclasses.tcinstance] let impl_47: t_TryFrom u32 usize = { f_Error = Core_models.Num.Error.t_TryFromIntError; f_try_from_pre = (fun (x: usize) -> true); f_try_from_post = (fun (x: usize) (out: Core_models.Result.t_Result u32 Core_models.Num.Error.t_TryFromIntError) -> true); f_try_from = fun (x: usize) -> if x >. (cast (Core_models.Num.impl_u32__MAX <: u32) <: usize) || x <. (cast (Core_models.Num.impl_u32__MIN <: u32) <: usize) then Core_models.Result.Result_Err (Core_models.Num.Error.TryFromIntError (() <: Prims.unit) <: Core_models.Num.Error.t_TryFromIntError) <: Core_models.Result.t_Result u32 Core_models.Num.Error.t_TryFromIntError else Core_models.Result.Result_Ok (cast (x <: usize) <: u32) <: Core_models.Result.t_Result u32 Core_models.Num.Error.t_TryFromIntError } [@@ FStar.Tactics.Typeclasses.tcinstance] let impl_48: t_TryFrom u64 usize = { f_Error = Core_models.Num.Error.t_TryFromIntError; f_try_from_pre = (fun (x: usize) -> true); f_try_from_post = (fun (x: usize) (out: Core_models.Result.t_Result u64 Core_models.Num.Error.t_TryFromIntError) -> true); f_try_from = fun (x: usize) -> if x >. (cast (Core_models.Num.impl_u64__MAX <: u64) <: usize) || x <. (cast (Core_models.Num.impl_u64__MIN <: u64) <: usize) then Core_models.Result.Result_Err (Core_models.Num.Error.TryFromIntError (() <: Prims.unit) <: Core_models.Num.Error.t_TryFromIntError) <: Core_models.Result.t_Result u64 Core_models.Num.Error.t_TryFromIntError else Core_models.Result.Result_Ok (cast (x <: usize) <: u64) <: Core_models.Result.t_Result u64 Core_models.Num.Error.t_TryFromIntError } [@@ FStar.Tactics.Typeclasses.tcinstance] let impl_49: t_TryFrom i8 i16 = { f_Error = Core_models.Num.Error.t_TryFromIntError; f_try_from_pre = (fun (x: i16) -> true); f_try_from_post = (fun (x: i16) (out: Core_models.Result.t_Result i8 Core_models.Num.Error.t_TryFromIntError) -> true); f_try_from = fun (x: i16) -> if x >. (cast (Core_models.Num.impl_i8__MAX <: i8) <: i16) || x <. (cast (Core_models.Num.impl_i8__MIN <: i8) <: i16) then Core_models.Result.Result_Err (Core_models.Num.Error.TryFromIntError (() <: Prims.unit) <: Core_models.Num.Error.t_TryFromIntError) <: Core_models.Result.t_Result i8 Core_models.Num.Error.t_TryFromIntError else Core_models.Result.Result_Ok (cast (x <: i16) <: i8) <: Core_models.Result.t_Result i8 Core_models.Num.Error.t_TryFromIntError } [@@ FStar.Tactics.Typeclasses.tcinstance] let impl_50: t_TryFrom i8 i32 = { f_Error = Core_models.Num.Error.t_TryFromIntError; f_try_from_pre = (fun (x: i32) -> true); f_try_from_post = (fun (x: i32) (out: Core_models.Result.t_Result i8 Core_models.Num.Error.t_TryFromIntError) -> true); f_try_from = fun (x: i32) -> if x >. (cast (Core_models.Num.impl_i8__MAX <: i8) <: i32) || x <. (cast (Core_models.Num.impl_i8__MIN <: i8) <: i32) then Core_models.Result.Result_Err (Core_models.Num.Error.TryFromIntError (() <: Prims.unit) <: Core_models.Num.Error.t_TryFromIntError) <: Core_models.Result.t_Result i8 Core_models.Num.Error.t_TryFromIntError else Core_models.Result.Result_Ok (cast (x <: i32) <: i8) <: Core_models.Result.t_Result i8 Core_models.Num.Error.t_TryFromIntError } [@@ FStar.Tactics.Typeclasses.tcinstance] let impl_51: t_TryFrom i16 i32 = { f_Error = Core_models.Num.Error.t_TryFromIntError; f_try_from_pre = (fun (x: i32) -> true); f_try_from_post = (fun (x: i32) (out: Core_models.Result.t_Result i16 Core_models.Num.Error.t_TryFromIntError) -> true); f_try_from = fun (x: i32) -> if x >. (cast (Core_models.Num.impl_i16__MAX <: i16) <: i32) || x <. (cast (Core_models.Num.impl_i16__MIN <: i16) <: i32) then Core_models.Result.Result_Err (Core_models.Num.Error.TryFromIntError (() <: Prims.unit) <: Core_models.Num.Error.t_TryFromIntError) <: Core_models.Result.t_Result i16 Core_models.Num.Error.t_TryFromIntError else Core_models.Result.Result_Ok (cast (x <: i32) <: i16) <: Core_models.Result.t_Result i16 Core_models.Num.Error.t_TryFromIntError } [@@ FStar.Tactics.Typeclasses.tcinstance] let impl_52: t_TryFrom isize i32 = { f_Error = Core_models.Num.Error.t_TryFromIntError; f_try_from_pre = (fun (x: i32) -> true); f_try_from_post = (fun (x: i32) (out: Core_models.Result.t_Result isize Core_models.Num.Error.t_TryFromIntError) -> true); f_try_from = fun (x: i32) -> if x >. (cast (Core_models.Num.impl_isize__MAX <: isize) <: i32) || x <. (cast (Core_models.Num.impl_isize__MIN <: isize) <: i32) then Core_models.Result.Result_Err (Core_models.Num.Error.TryFromIntError (() <: Prims.unit) <: Core_models.Num.Error.t_TryFromIntError) <: Core_models.Result.t_Result isize Core_models.Num.Error.t_TryFromIntError else Core_models.Result.Result_Ok (cast (x <: i32) <: isize) <: Core_models.Result.t_Result isize Core_models.Num.Error.t_TryFromIntError } [@@ FStar.Tactics.Typeclasses.tcinstance] let impl_53: t_TryFrom i8 i64 = { f_Error = Core_models.Num.Error.t_TryFromIntError; f_try_from_pre = (fun (x: i64) -> true); f_try_from_post = (fun (x: i64) (out: Core_models.Result.t_Result i8 Core_models.Num.Error.t_TryFromIntError) -> true); f_try_from = fun (x: i64) -> if x >. (cast (Core_models.Num.impl_i8__MAX <: i8) <: i64) || x <. (cast (Core_models.Num.impl_i8__MIN <: i8) <: i64) then Core_models.Result.Result_Err (Core_models.Num.Error.TryFromIntError (() <: Prims.unit) <: Core_models.Num.Error.t_TryFromIntError) <: Core_models.Result.t_Result i8 Core_models.Num.Error.t_TryFromIntError else Core_models.Result.Result_Ok (cast (x <: i64) <: i8) <: Core_models.Result.t_Result i8 Core_models.Num.Error.t_TryFromIntError } [@@ FStar.Tactics.Typeclasses.tcinstance] let impl_54: t_TryFrom i16 i64 = { f_Error = Core_models.Num.Error.t_TryFromIntError; f_try_from_pre = (fun (x: i64) -> true); f_try_from_post = (fun (x: i64) (out: Core_models.Result.t_Result i16 Core_models.Num.Error.t_TryFromIntError) -> true); f_try_from = fun (x: i64) -> if x >. (cast (Core_models.Num.impl_i16__MAX <: i16) <: i64) || x <. (cast (Core_models.Num.impl_i16__MIN <: i16) <: i64) then Core_models.Result.Result_Err (Core_models.Num.Error.TryFromIntError (() <: Prims.unit) <: Core_models.Num.Error.t_TryFromIntError) <: Core_models.Result.t_Result i16 Core_models.Num.Error.t_TryFromIntError else Core_models.Result.Result_Ok (cast (x <: i64) <: i16) <: Core_models.Result.t_Result i16 Core_models.Num.Error.t_TryFromIntError } [@@ FStar.Tactics.Typeclasses.tcinstance] let impl_55: t_TryFrom i32 i64 = { f_Error = Core_models.Num.Error.t_TryFromIntError; f_try_from_pre = (fun (x: i64) -> true); f_try_from_post = (fun (x: i64) (out: Core_models.Result.t_Result i32 Core_models.Num.Error.t_TryFromIntError) -> true); f_try_from = fun (x: i64) -> if x >. (cast (Core_models.Num.impl_i32__MAX <: i32) <: i64) || x <. (cast (Core_models.Num.impl_i32__MIN <: i32) <: i64) then Core_models.Result.Result_Err (Core_models.Num.Error.TryFromIntError (() <: Prims.unit) <: Core_models.Num.Error.t_TryFromIntError) <: Core_models.Result.t_Result i32 Core_models.Num.Error.t_TryFromIntError else Core_models.Result.Result_Ok (cast (x <: i64) <: i32) <: Core_models.Result.t_Result i32 Core_models.Num.Error.t_TryFromIntError } [@@ FStar.Tactics.Typeclasses.tcinstance] let impl_56: t_TryFrom isize i64 = { f_Error = Core_models.Num.Error.t_TryFromIntError; f_try_from_pre = (fun (x: i64) -> true); f_try_from_post = (fun (x: i64) (out: Core_models.Result.t_Result isize Core_models.Num.Error.t_TryFromIntError) -> true); f_try_from = fun (x: i64) -> if x >. (cast (Core_models.Num.impl_isize__MAX <: isize) <: i64) || x <. (cast (Core_models.Num.impl_isize__MIN <: isize) <: i64) then Core_models.Result.Result_Err (Core_models.Num.Error.TryFromIntError (() <: Prims.unit) <: Core_models.Num.Error.t_TryFromIntError) <: Core_models.Result.t_Result isize Core_models.Num.Error.t_TryFromIntError else Core_models.Result.Result_Ok (cast (x <: i64) <: isize) <: Core_models.Result.t_Result isize Core_models.Num.Error.t_TryFromIntError } [@@ FStar.Tactics.Typeclasses.tcinstance] let impl_57: t_TryFrom i8 i128 = { f_Error = Core_models.Num.Error.t_TryFromIntError; f_try_from_pre = (fun (x: i128) -> true); f_try_from_post = (fun (x: i128) (out: Core_models.Result.t_Result i8 Core_models.Num.Error.t_TryFromIntError) -> true); f_try_from = fun (x: i128) -> if x >. (cast (Core_models.Num.impl_i8__MAX <: i8) <: i128) || x <. (cast (Core_models.Num.impl_i8__MIN <: i8) <: i128) then Core_models.Result.Result_Err (Core_models.Num.Error.TryFromIntError (() <: Prims.unit) <: Core_models.Num.Error.t_TryFromIntError) <: Core_models.Result.t_Result i8 Core_models.Num.Error.t_TryFromIntError else Core_models.Result.Result_Ok (cast (x <: i128) <: i8) <: Core_models.Result.t_Result i8 Core_models.Num.Error.t_TryFromIntError } [@@ FStar.Tactics.Typeclasses.tcinstance] let impl_58: t_TryFrom i16 i128 = { f_Error = Core_models.Num.Error.t_TryFromIntError; f_try_from_pre = (fun (x: i128) -> true); f_try_from_post = (fun (x: i128) (out: Core_models.Result.t_Result i16 Core_models.Num.Error.t_TryFromIntError) -> true); f_try_from = fun (x: i128) -> if x >. (cast (Core_models.Num.impl_i16__MAX <: i16) <: i128) || x <. (cast (Core_models.Num.impl_i16__MIN <: i16) <: i128) then Core_models.Result.Result_Err (Core_models.Num.Error.TryFromIntError (() <: Prims.unit) <: Core_models.Num.Error.t_TryFromIntError) <: Core_models.Result.t_Result i16 Core_models.Num.Error.t_TryFromIntError else Core_models.Result.Result_Ok (cast (x <: i128) <: i16) <: Core_models.Result.t_Result i16 Core_models.Num.Error.t_TryFromIntError } [@@ FStar.Tactics.Typeclasses.tcinstance] let impl_59: t_TryFrom i32 i128 = { f_Error = Core_models.Num.Error.t_TryFromIntError; f_try_from_pre = (fun (x: i128) -> true); f_try_from_post = (fun (x: i128) (out: Core_models.Result.t_Result i32 Core_models.Num.Error.t_TryFromIntError) -> true); f_try_from = fun (x: i128) -> if x >. (cast (Core_models.Num.impl_i32__MAX <: i32) <: i128) || x <. (cast (Core_models.Num.impl_i32__MIN <: i32) <: i128) then Core_models.Result.Result_Err (Core_models.Num.Error.TryFromIntError (() <: Prims.unit) <: Core_models.Num.Error.t_TryFromIntError) <: Core_models.Result.t_Result i32 Core_models.Num.Error.t_TryFromIntError else Core_models.Result.Result_Ok (cast (x <: i128) <: i32) <: Core_models.Result.t_Result i32 Core_models.Num.Error.t_TryFromIntError } [@@ FStar.Tactics.Typeclasses.tcinstance] let impl_60: t_TryFrom i64 i128 = { f_Error = Core_models.Num.Error.t_TryFromIntError; f_try_from_pre = (fun (x: i128) -> true); f_try_from_post = (fun (x: i128) (out: Core_models.Result.t_Result i64 Core_models.Num.Error.t_TryFromIntError) -> true); f_try_from = fun (x: i128) -> if x >. (cast (Core_models.Num.impl_i64__MAX <: i64) <: i128) || x <. (cast (Core_models.Num.impl_i64__MIN <: i64) <: i128) then Core_models.Result.Result_Err (Core_models.Num.Error.TryFromIntError (() <: Prims.unit) <: Core_models.Num.Error.t_TryFromIntError) <: Core_models.Result.t_Result i64 Core_models.Num.Error.t_TryFromIntError else Core_models.Result.Result_Ok (cast (x <: i128) <: i64) <: Core_models.Result.t_Result i64 Core_models.Num.Error.t_TryFromIntError } [@@ FStar.Tactics.Typeclasses.tcinstance] let impl_61: t_TryFrom isize i128 = { f_Error = Core_models.Num.Error.t_TryFromIntError; f_try_from_pre = (fun (x: i128) -> true); f_try_from_post = (fun (x: i128) (out: Core_models.Result.t_Result isize Core_models.Num.Error.t_TryFromIntError) -> true); f_try_from = fun (x: i128) -> if x >. (cast (Core_models.Num.impl_isize__MAX <: isize) <: i128) || x <. (cast (Core_models.Num.impl_isize__MIN <: isize) <: i128) then Core_models.Result.Result_Err (Core_models.Num.Error.TryFromIntError (() <: Prims.unit) <: Core_models.Num.Error.t_TryFromIntError) <: Core_models.Result.t_Result isize Core_models.Num.Error.t_TryFromIntError else Core_models.Result.Result_Ok (cast (x <: i128) <: isize) <: Core_models.Result.t_Result isize Core_models.Num.Error.t_TryFromIntError } [@@ FStar.Tactics.Typeclasses.tcinstance] let impl_62: t_TryFrom i8 isize = { f_Error = Core_models.Num.Error.t_TryFromIntError; f_try_from_pre = (fun (x: isize) -> true); f_try_from_post = (fun (x: isize) (out: Core_models.Result.t_Result i8 Core_models.Num.Error.t_TryFromIntError) -> true); f_try_from = fun (x: isize) -> if x >. (cast (Core_models.Num.impl_i8__MAX <: i8) <: isize) || x <. (cast (Core_models.Num.impl_i8__MIN <: i8) <: isize) then Core_models.Result.Result_Err (Core_models.Num.Error.TryFromIntError (() <: Prims.unit) <: Core_models.Num.Error.t_TryFromIntError) <: Core_models.Result.t_Result i8 Core_models.Num.Error.t_TryFromIntError else Core_models.Result.Result_Ok (cast (x <: isize) <: i8) <: Core_models.Result.t_Result i8 Core_models.Num.Error.t_TryFromIntError } [@@ FStar.Tactics.Typeclasses.tcinstance] let impl_63: t_TryFrom i16 isize = { f_Error = Core_models.Num.Error.t_TryFromIntError; f_try_from_pre = (fun (x: isize) -> true); f_try_from_post = (fun (x: isize) (out: Core_models.Result.t_Result i16 Core_models.Num.Error.t_TryFromIntError) -> true); f_try_from = fun (x: isize) -> if x >. (cast (Core_models.Num.impl_i16__MAX <: i16) <: isize) || x <. (cast (Core_models.Num.impl_i16__MIN <: i16) <: isize) then Core_models.Result.Result_Err (Core_models.Num.Error.TryFromIntError (() <: Prims.unit) <: Core_models.Num.Error.t_TryFromIntError) <: Core_models.Result.t_Result i16 Core_models.Num.Error.t_TryFromIntError else Core_models.Result.Result_Ok (cast (x <: isize) <: i16) <: Core_models.Result.t_Result i16 Core_models.Num.Error.t_TryFromIntError } [@@ FStar.Tactics.Typeclasses.tcinstance] let impl_64: t_TryFrom i32 isize = { f_Error = Core_models.Num.Error.t_TryFromIntError; f_try_from_pre = (fun (x: isize) -> true); f_try_from_post = (fun (x: isize) (out: Core_models.Result.t_Result i32 Core_models.Num.Error.t_TryFromIntError) -> true); f_try_from = fun (x: isize) -> if x >. (cast (Core_models.Num.impl_i32__MAX <: i32) <: isize) || x <. (cast (Core_models.Num.impl_i32__MIN <: i32) <: isize) then Core_models.Result.Result_Err (Core_models.Num.Error.TryFromIntError (() <: Prims.unit) <: Core_models.Num.Error.t_TryFromIntError) <: Core_models.Result.t_Result i32 Core_models.Num.Error.t_TryFromIntError else Core_models.Result.Result_Ok (cast (x <: isize) <: i32) <: Core_models.Result.t_Result i32 Core_models.Num.Error.t_TryFromIntError } [@@ FStar.Tactics.Typeclasses.tcinstance] let impl_65: t_TryFrom i64 isize = { f_Error = Core_models.Num.Error.t_TryFromIntError; f_try_from_pre = (fun (x: isize) -> true); f_try_from_post = (fun (x: isize) (out: Core_models.Result.t_Result i64 Core_models.Num.Error.t_TryFromIntError) -> true); f_try_from = fun (x: isize) -> if x >. (cast (Core_models.Num.impl_i64__MAX <: i64) <: isize) || x <. (cast (Core_models.Num.impl_i64__MIN <: i64) <: isize) then Core_models.Result.Result_Err (Core_models.Num.Error.TryFromIntError (() <: Prims.unit) <: Core_models.Num.Error.t_TryFromIntError) <: Core_models.Result.t_Result i64 Core_models.Num.Error.t_TryFromIntError else Core_models.Result.Result_Ok (cast (x <: isize) <: i64) <: Core_models.Result.t_Result i64 Core_models.Num.Error.t_TryFromIntError } ================================================ FILE: hax-lib/proof-libs/fstar/core/Core_models.Core_arch.Arm_shared.Neon.fsti ================================================ module Core_models.Core_arch.Arm_shared.Neon val t_int8x8_t:Type0 val t_int8x16_t:Type0 val t_int16x4_t:Type0 val t_int16x8_t:Type0 val t_int32x2_t:Type0 val t_int32x4_t:Type0 val t_int64x1_t:Type0 val t_int64x2_t:Type0 val t_uint8x8_t:Type0 val t_uint8x16_t:Type0 val t_uint16x4_t:Type0 val t_uint16x8_t:Type0 val t_uint32x2_t:Type0 val t_uint32x4_t:Type0 val t_uint64x1_t:Type0 val t_uint64x2_t:Type0 ================================================ FILE: hax-lib/proof-libs/fstar/core/Core_models.Core_arch.X86.Pclmulqdq.fsti ================================================ module Core_models.Core_arch.X86.Pclmulqdq val e_mm_clmulepi64_si128 : Rust_primitives.Integers.i32 -> Core_models.Core_arch.X86.t_e_ee_m128i -> Core_models.Core_arch.X86.t_e_ee_m128i -> Core_models.Core_arch.X86.t_e_ee_m128i ================================================ FILE: hax-lib/proof-libs/fstar/core/Core_models.Core_arch.X86.Sse2.fsti ================================================ module Core_models.Core_arch.X86.Sse2 val e_mm_set_epi64x: Rust_primitives.Integers.i64 -> Rust_primitives.Integers.i64 -> Core_models.Core_arch.X86.t_e_ee_m128i val e_mm_cvtsi128_si32: Core_models.Core_arch.X86.t_e_ee_m128i -> Rust_primitives.Integers.i32 val e_mm_srli_si128: Rust_primitives.Integers.i32 -> Core_models.Core_arch.X86.t_e_ee_m128i -> Core_models.Core_arch.X86.t_e_ee_m128i ================================================ FILE: hax-lib/proof-libs/fstar/core/Core_models.Core_arch.X86.fsti ================================================ module Core_models.Core_arch.X86 val t_e_ee_m128i:Type0 val t_e_ee_m256i:Type0 val t_e_ee_m256:Type0 ================================================ FILE: hax-lib/proof-libs/fstar/core/Core_models.Core_arch.X86_64_.Sse2.fsti ================================================ module Core_models.Core_arch.X86_64_.Sse2 val e_mm_cvtsi128_si64: Core_models.Core_arch.X86.t_e_ee_m128i -> Rust_primitives.Integers.i64 ================================================ FILE: hax-lib/proof-libs/fstar/core/Core_models.Core_arch.fsti ================================================ module Core_models.Core_arch ================================================ FILE: hax-lib/proof-libs/fstar/core/Core_models.Default.fsti ================================================ module Core_models.Default #set-options "--fuel 0 --ifuel 1 --z3rlimit 15" open FStar.Mul open Rust_primitives class t_Default (v_Self: Type0) = { f_default_pre:x: Prims.unit -> pred: Type0 { (let _:Prims.unit = x in true) ==> pred }; f_default_post:Prims.unit -> v_Self -> Type0; f_default:x0: Prims.unit -> Prims.Pure v_Self (f_default_pre x0) (fun result -> f_default_post x0 result) } ================================================ FILE: hax-lib/proof-libs/fstar/core/Core_models.Error.fsti ================================================ module Core_models.Error #set-options "--fuel 0 --ifuel 1 --z3rlimit 15" open FStar.Mul open Rust_primitives class t_Error (v_Self: Type0) = { [@@@ FStar.Tactics.Typeclasses.no_method]_super_i0:Core_models.Fmt.t_Display v_Self; [@@@ FStar.Tactics.Typeclasses.no_method]_super_i1:Core_models.Fmt.t_Debug v_Self } [@@ FStar.Tactics.Typeclasses.tcinstance] let _ = fun (v_Self:Type0) {|i: t_Error v_Self|} -> i._super_i0 [@@ FStar.Tactics.Typeclasses.tcinstance] let _ = fun (v_Self:Type0) {|i: t_Error v_Self|} -> i._super_i1 ================================================ FILE: hax-lib/proof-libs/fstar/core/Core_models.F32.fst ================================================ module Core_models.F32 #set-options "--fuel 0 --ifuel 1 --z3rlimit 15" open FStar.Mul open Rust_primitives assume val impl_f32__abs': x: float -> float unfold let impl_f32__abs = impl_f32__abs' ================================================ FILE: hax-lib/proof-libs/fstar/core/Core_models.Fmt.Rt.fsti ================================================ module Core_models.Fmt.Rt #set-options "--fuel 0 --ifuel 1 --z3rlimit 15" open FStar.Mul open Rust_primitives val t_ArgumentType:eqtype type t_Argument = { f_ty:t_ArgumentType } val impl__new_display (#v_T: Type0) (x: v_T) : Prims.Pure t_Argument Prims.l_True (fun _ -> Prims.l_True) val impl__new_debug (#v_T: Type0) (x: v_T) : Prims.Pure t_Argument Prims.l_True (fun _ -> Prims.l_True) val impl__new_lower_hex (#v_T: Type0) (x: v_T) : Prims.Pure t_Argument Prims.l_True (fun _ -> Prims.l_True) val impl_1__new_binary (#v_T: Type0) (x: v_T) : Prims.Pure t_Argument Prims.l_True (fun _ -> Prims.l_True) val impl_1__new_const (#v_T #v_U: Type0) (x: v_T) (y: v_U) : Prims.Pure Core_models.Fmt.t_Arguments Prims.l_True (fun _ -> Prims.l_True) val impl_1__new_v1 (#v_T #v_U #v_V #v_W: Type0) (x: v_T) (y: v_U) (z: v_V) (t: v_W) : Prims.Pure Core_models.Fmt.t_Arguments Prims.l_True (fun _ -> Prims.l_True) val impl_1__none: Prims.unit -> Prims.Pure (t_Array t_Argument (mk_usize 0)) Prims.l_True (fun _ -> Prims.l_True) val impl_1__new_v1_formatted (#v_T #v_U #v_V: Type0) (x: v_T) (y: v_U) (z: v_V) : Prims.Pure Core_models.Fmt.t_Arguments Prims.l_True (fun _ -> Prims.l_True) type t_Count = | Count_Is : u16 -> t_Count | Count_Param : u16 -> t_Count | Count_Implied : t_Count type t_Placeholder = { f_position:usize; f_flags:u32; f_precision:t_Count; f_width:t_Count } type t_UnsafeArg = | UnsafeArg : t_UnsafeArg ================================================ FILE: hax-lib/proof-libs/fstar/core/Core_models.Fmt.fsti ================================================ module Core_models.Fmt #set-options "--fuel 0 --ifuel 1 --z3rlimit 15" open FStar.Mul open Rust_primitives type t_Error = | Error : t_Error type t_Formatter = | Formatter : t_Formatter class t_Display (v_Self: Type0) = { f_fmt_pre:v_Self -> t_Formatter -> Type0; f_fmt_post:v_Self -> t_Formatter -> (t_Formatter & Core_models.Result.t_Result Prims.unit t_Error) -> Type0; f_fmt:x0: v_Self -> x1: t_Formatter -> Prims.Pure (t_Formatter & Core_models.Result.t_Result Prims.unit t_Error) (f_fmt_pre x0 x1) (fun result -> f_fmt_post x0 x1 result) } class t_Debug (v_Self: Type0) = { f_dbg_fmt_pre:v_Self -> t_Formatter -> Type0; f_dbg_fmt_post: v_Self -> t_Formatter -> (t_Formatter & Core_models.Result.t_Result Prims.unit t_Error) -> Type0; f_dbg_fmt:x0: v_Self -> x1: t_Formatter -> Prims.Pure (t_Formatter & Core_models.Result.t_Result Prims.unit t_Error) (f_dbg_fmt_pre x0 x1) (fun result -> f_dbg_fmt_post x0 x1 result) } type t_Arguments = | Arguments : Prims.unit -> t_Arguments [@@ FStar.Tactics.Typeclasses.tcinstance] val impl (#v_T: Type0) : t_Debug v_T val impl_11__write_fmt (f: t_Formatter) (args: t_Arguments) : Prims.Pure (t_Formatter & Core_models.Result.t_Result Prims.unit t_Error) Prims.l_True (fun _ -> Prims.l_True) ================================================ FILE: hax-lib/proof-libs/fstar/core/Core_models.Hash.fsti ================================================ module Core_models.Hash #set-options "--fuel 0 --ifuel 1 --z3rlimit 15" open FStar.Mul open Rust_primitives class t_Hasher (v_Self: Type0) = { __marker_trait_t_Hasher:Prims.unit } class t_Hash (v_Self: Type0) = { f_hash_pre:#v_H: Type0 -> {| i1: t_Hasher v_H |} -> self_: v_Self -> h: v_H -> pred: Type0{true ==> pred}; f_hash_post:#v_H: Type0 -> {| i1: t_Hasher v_H |} -> v_Self -> v_H -> v_H -> Type0; f_hash:#v_H: Type0 -> {| i1: t_Hasher v_H |} -> x0: v_Self -> x1: v_H -> Prims.Pure v_H (f_hash_pre #v_H #i1 x0 x1) (fun result -> f_hash_post #v_H #i1 x0 x1 result) } [@@ FStar.Tactics.Typeclasses.tcinstance] val impl (#v_T: Type0) : t_Hash v_T ================================================ FILE: hax-lib/proof-libs/fstar/core/Core_models.Hint.fsti ================================================ module Core_models.Hint #set-options "--fuel 0 --ifuel 1 --z3rlimit 15" open FStar.Mul open Rust_primitives val black_box (#v_T: Type0) (dummy: v_T) : Prims.Pure v_T Prims.l_True (ensures fun res -> let res:v_T = res in res == dummy) val must_use (#v_T: Type0) (value: v_T) : Prims.Pure v_T Prims.l_True (ensures fun res -> let res:v_T = res in res == value) ================================================ FILE: hax-lib/proof-libs/fstar/core/Core_models.Iter.Adapters.Enumerate.fst ================================================ module Core_models.Iter.Adapters.Enumerate #set-options "--fuel 0 --ifuel 1 --z3rlimit 15" open FStar.Mul open Rust_primitives include Core_models.Iter.Bundle {t_Enumerate as t_Enumerate} include Core_models.Iter.Bundle {impl__new as impl__new} include Core_models.Iter.Bundle {impl_1 as impl_1} ================================================ FILE: hax-lib/proof-libs/fstar/core/Core_models.Iter.Adapters.Flat_map.fst ================================================ module Core_models.Iter.Adapters.Flat_map #set-options "--fuel 0 --ifuel 1 --z3rlimit 15" open FStar.Mul open Rust_primitives include Core_models.Iter.Bundle {t_FlatMap as t_FlatMap} include Core_models.Iter.Bundle {impl__new__from__flat_map as impl__new} include Core_models.Iter.Bundle {impl_1__from__flat_map as impl_1} ================================================ FILE: hax-lib/proof-libs/fstar/core/Core_models.Iter.Adapters.Flatten.fst ================================================ module Core_models.Iter.Adapters.Flatten #set-options "--fuel 0 --ifuel 1 --z3rlimit 15" open FStar.Mul open Rust_primitives include Core_models.Iter.Bundle {t_Flatten as t_Flatten} include Core_models.Iter.Bundle {impl__new__from__flatten as impl__new} include Core_models.Iter.Bundle {impl_1__from__flatten as impl_1} ================================================ FILE: hax-lib/proof-libs/fstar/core/Core_models.Iter.Adapters.Map.fst ================================================ module Core_models.Iter.Adapters.Map #set-options "--fuel 0 --ifuel 1 --z3rlimit 15" open FStar.Mul open Rust_primitives include Core_models.Iter.Bundle {t_Map as t_Map} include Core_models.Iter.Bundle {impl__new__from__map as impl__new} include Core_models.Iter.Bundle {impl_1__from__map as impl_1} ================================================ FILE: hax-lib/proof-libs/fstar/core/Core_models.Iter.Adapters.Rev.fsti ================================================ module Core_models.Iter.Adapters.Rev type t_Rev (t:Type0) ================================================ FILE: hax-lib/proof-libs/fstar/core/Core_models.Iter.Adapters.Step_by.fst ================================================ module Core_models.Iter.Adapters.Step_by #set-options "--fuel 0 --ifuel 1 --z3rlimit 15" open FStar.Mul open Rust_primitives include Core_models.Iter.Bundle {t_StepBy as t_StepBy} include Core_models.Iter.Bundle {impl__new__from__step_by as impl__new} include Core_models.Iter.Bundle {impl_1__from__step_by as impl_1} ================================================ FILE: hax-lib/proof-libs/fstar/core/Core_models.Iter.Adapters.Take.fst ================================================ module Core_models.Iter.Adapters.Take #set-options "--fuel 0 --ifuel 1 --z3rlimit 15" open FStar.Mul open Rust_primitives include Core_models.Iter.Bundle {t_Take as t_Take} include Core_models.Iter.Bundle {impl__new__from__take as impl__new} include Core_models.Iter.Bundle {impl_1__from__take as impl_1} ================================================ FILE: hax-lib/proof-libs/fstar/core/Core_models.Iter.Adapters.Zip.fst ================================================ module Core_models.Iter.Adapters.Zip #set-options "--fuel 0 --ifuel 1 --z3rlimit 15" open FStar.Mul open Rust_primitives include Core_models.Iter.Bundle {t_Zip as t_Zip} include Core_models.Iter.Bundle {impl__new__from__zip as impl__new} include Core_models.Iter.Bundle {impl_1__from__zip as impl_1} ================================================ FILE: hax-lib/proof-libs/fstar/core/Core_models.Iter.Bundle.fst ================================================ module Core_models.Iter.Bundle #set-options "--fuel 0 --ifuel 1 --z3rlimit 15" open FStar.Mul open Rust_primitives type t_Enumerate (v_I: Type0) = { f_iter:v_I; f_count:usize } let impl__new (#v_I: Type0) (iter: v_I) : t_Enumerate v_I = { f_iter = iter; f_count = mk_usize 0 } <: t_Enumerate v_I type t_FlatMap (v_I: Type0) (v_U: Type0) (v_F: Type0) = { f_it:v_I; f_f:v_F; f_current:Core_models.Option.t_Option v_U } type t_Map (v_I: Type0) (v_F: Type0) = { f_iter:v_I; f_f:v_F } let impl__new__from__map (#v_I #v_F: Type0) (iter: v_I) (f: v_F) : t_Map v_I v_F = { f_iter = iter; f_f = f } <: t_Map v_I v_F type t_StepBy (v_I: Type0) = { f_iter:v_I; f_step:usize } let impl__new__from__step_by (#v_I: Type0) (iter: v_I) (step: usize) : t_StepBy v_I = { f_iter = iter; f_step = step } <: t_StepBy v_I type t_Take (v_I: Type0) = { f_iter:v_I; f_n:usize } let impl__new__from__take (#v_I: Type0) (iter: v_I) (n: usize) : t_Take v_I = { f_iter = iter; f_n = n } <: t_Take v_I type t_Zip (v_I1: Type0) (v_I2: Type0) = { f_it1:v_I1; f_it2:v_I2 } class t_Iterator (v_Self: Type0) = { [@@@ FStar.Tactics.Typeclasses.no_method]f_Item:Type0; f_next_pre:self_: v_Self -> pred: Type0{true ==> pred}; f_next_post:v_Self -> (v_Self & Core_models.Option.t_Option f_Item) -> Type0; f_next:x0: v_Self -> Prims.Pure (v_Self & Core_models.Option.t_Option f_Item) (f_next_pre x0) (fun result -> f_next_post x0 result) } [@@ FStar.Tactics.Typeclasses.tcinstance] let impl_1 (#v_I: Type0) (#[FStar.Tactics.Typeclasses.tcresolve ()] i0: t_Iterator v_I) : t_Iterator (t_Enumerate v_I) = { f_Item = (usize & i0.f_Item); f_next_pre = (fun (self: t_Enumerate v_I) -> true); f_next_post = (fun (self: t_Enumerate v_I) (out1: (t_Enumerate v_I & Core_models.Option.t_Option (usize & i0.f_Item))) -> true); f_next = fun (self: t_Enumerate v_I) -> let (tmp0: v_I), (out: Core_models.Option.t_Option i0.f_Item) = f_next #v_I #FStar.Tactics.Typeclasses.solve self.f_iter in let self:t_Enumerate v_I = { self with f_iter = tmp0 } <: t_Enumerate v_I in let (self: t_Enumerate v_I), (hax_temp_output: Core_models.Option.t_Option (usize & i0.f_Item)) = match out <: Core_models.Option.t_Option i0.f_Item with | Core_models.Option.Option_Some a -> let i:usize = self.f_count in let _:Prims.unit = Hax_lib.v_assume (b2t (self.f_count <. Core_models.Num.impl_usize__MAX <: bool)) in let self:t_Enumerate v_I = { self with f_count = self.f_count +! mk_usize 1 } <: t_Enumerate v_I in self, (Core_models.Option.Option_Some (i, a <: (usize & i0.f_Item)) <: Core_models.Option.t_Option (usize & i0.f_Item)) <: (t_Enumerate v_I & Core_models.Option.t_Option (usize & i0.f_Item)) | Core_models.Option.Option_None -> self, (Core_models.Option.Option_None <: Core_models.Option.t_Option (usize & i0.f_Item)) <: (t_Enumerate v_I & Core_models.Option.t_Option (usize & i0.f_Item)) in self, hax_temp_output <: (t_Enumerate v_I & Core_models.Option.t_Option (usize & i0.f_Item)) } let impl__new__from__flat_map (#v_I #v_U #v_F: Type0) (#[FStar.Tactics.Typeclasses.tcresolve ()] i0: t_Iterator v_I) (#[FStar.Tactics.Typeclasses.tcresolve ()] i1: t_Iterator v_U) (#[FStar.Tactics.Typeclasses.tcresolve ()] i2: Core_models.Ops.Function.t_FnOnce v_F i0.f_Item) (#_: unit{i2.Core_models.Ops.Function.f_Output == v_U}) (it: v_I) (f: v_F) : t_FlatMap v_I v_U v_F = { f_it = it; f_f = f; f_current = Core_models.Option.Option_None <: Core_models.Option.t_Option v_U } <: t_FlatMap v_I v_U v_F [@@ FStar.Tactics.Typeclasses.tcinstance] assume val impl_1__from__flat_map': #v_I: Type0 -> #v_U: Type0 -> #v_F: Type0 -> {| i0: t_Iterator v_I |} -> {| i1: t_Iterator v_U |} -> {| i2: Core_models.Ops.Function.t_FnOnce v_F i0.f_Item |} -> #_: unit{i2.Core_models.Ops.Function.f_Output == v_U} -> t_Iterator (t_FlatMap v_I v_U v_F) unfold let impl_1__from__flat_map (#v_I #v_U #v_F: Type0) (#[FStar.Tactics.Typeclasses.tcresolve ()] i0: t_Iterator v_I) (#[FStar.Tactics.Typeclasses.tcresolve ()] i1: t_Iterator v_U) (#[FStar.Tactics.Typeclasses.tcresolve ()] i2: Core_models.Ops.Function.t_FnOnce v_F i0.f_Item) (#_: unit{i2.Core_models.Ops.Function.f_Output == v_U}) = impl_1__from__flat_map' #v_I #v_U #v_F #i0 #i1 #i2 #_ noeq type t_Flatten (v_I: Type0) {| i0: t_Iterator v_I |} {| i1: t_Iterator i0.f_Item |} = { f_it:v_I; f_current:Core_models.Option.t_Option i0.f_Item } let impl__new__from__flatten (#v_I: Type0) (#[FStar.Tactics.Typeclasses.tcresolve ()] i0: t_Iterator v_I) (#[FStar.Tactics.Typeclasses.tcresolve ()] i1: t_Iterator i0.f_Item) (it: v_I) : t_Flatten v_I = { f_it = it; f_current = Core_models.Option.Option_None <: Core_models.Option.t_Option i0.f_Item } <: t_Flatten v_I [@@ FStar.Tactics.Typeclasses.tcinstance] assume val impl_1__from__flatten': #v_I: Type0 -> {| i0: t_Iterator v_I |} -> {| i1: t_Iterator i0.f_Item |} -> t_Iterator (t_Flatten v_I) unfold let impl_1__from__flatten (#v_I: Type0) (#[FStar.Tactics.Typeclasses.tcresolve ()] i0: t_Iterator v_I) (#[FStar.Tactics.Typeclasses.tcresolve ()] i1: t_Iterator i0.f_Item) = impl_1__from__flatten' #v_I #i0 #i1 [@@ FStar.Tactics.Typeclasses.tcinstance] let impl_1__from__map (#v_I #v_O #v_F: Type0) (#[FStar.Tactics.Typeclasses.tcresolve ()] i0: t_Iterator v_I) (#[FStar.Tactics.Typeclasses.tcresolve ()] i1: Core_models.Ops.Function.t_FnOnce v_F i0.f_Item) (#_: unit{i1.Core_models.Ops.Function.f_Output == v_O}) : t_Iterator (t_Map v_I v_F) = { f_Item = v_O; f_next_pre = (fun (self: t_Map v_I v_F) -> true); f_next_post = (fun (self: t_Map v_I v_F) (out1: (t_Map v_I v_F & Core_models.Option.t_Option v_O)) -> true); f_next = fun (self: t_Map v_I v_F) -> let (tmp0: v_I), (out: Core_models.Option.t_Option i0.f_Item) = f_next #v_I #FStar.Tactics.Typeclasses.solve self.f_iter in let self:t_Map v_I v_F = { self with f_iter = tmp0 } <: t_Map v_I v_F in let hax_temp_output:Core_models.Option.t_Option v_O = match out <: Core_models.Option.t_Option i0.f_Item with | Core_models.Option.Option_Some v -> Core_models.Option.Option_Some (Core_models.Ops.Function.f_call_once #v_F #i0.f_Item #FStar.Tactics.Typeclasses.solve self.f_f v) <: Core_models.Option.t_Option v_O | Core_models.Option.Option_None -> Core_models.Option.Option_None <: Core_models.Option.t_Option v_O in self, hax_temp_output <: (t_Map v_I v_F & Core_models.Option.t_Option v_O) } [@@ FStar.Tactics.Typeclasses.tcinstance] assume val impl_1__from__step_by': #v_I: Type0 -> {| i0: t_Iterator v_I |} -> t_Iterator (t_StepBy v_I) unfold let impl_1__from__step_by (#v_I: Type0) (#[FStar.Tactics.Typeclasses.tcresolve ()] i0: t_Iterator v_I) = impl_1__from__step_by' #v_I #i0 [@@ FStar.Tactics.Typeclasses.tcinstance] let impl_1__from__take (#v_I: Type0) (#[FStar.Tactics.Typeclasses.tcresolve ()] i0: t_Iterator v_I) : t_Iterator (t_Take v_I) = { f_Item = i0.f_Item; f_next_pre = (fun (self: t_Take v_I) -> true); f_next_post = (fun (self: t_Take v_I) (out1: (t_Take v_I & Core_models.Option.t_Option i0.f_Item)) -> true); f_next = fun (self: t_Take v_I) -> let (self: t_Take v_I), (hax_temp_output: Core_models.Option.t_Option i0.f_Item) = if self.f_n <>. mk_usize 0 then let self:t_Take v_I = { self with f_n = self.f_n -! mk_usize 1 } <: t_Take v_I in let (tmp0: v_I), (out: Core_models.Option.t_Option i0.f_Item) = f_next #v_I #FStar.Tactics.Typeclasses.solve self.f_iter in let self:t_Take v_I = { self with f_iter = tmp0 } <: t_Take v_I in self, out <: (t_Take v_I & Core_models.Option.t_Option i0.f_Item) else self, (Core_models.Option.Option_None <: Core_models.Option.t_Option i0.f_Item) <: (t_Take v_I & Core_models.Option.t_Option i0.f_Item) in self, hax_temp_output <: (t_Take v_I & Core_models.Option.t_Option i0.f_Item) } let impl__new__from__zip (#v_I1 #v_I2: Type0) (#[FStar.Tactics.Typeclasses.tcresolve ()] i0: t_Iterator v_I1) (#[FStar.Tactics.Typeclasses.tcresolve ()] i1: t_Iterator v_I2) (it1: v_I1) (it2: v_I2) : t_Zip v_I1 v_I2 = { f_it1 = it1; f_it2 = it2 } <: t_Zip v_I1 v_I2 [@@ FStar.Tactics.Typeclasses.tcinstance] assume val impl_1__from__zip': #v_I1: Type0 -> #v_I2: Type0 -> {| i0: t_Iterator v_I1 |} -> {| i1: t_Iterator v_I2 |} -> t_Iterator (t_Zip v_I1 v_I2) unfold let impl_1__from__zip (#v_I1 #v_I2: Type0) (#[FStar.Tactics.Typeclasses.tcresolve ()] i0: t_Iterator v_I1) (#[FStar.Tactics.Typeclasses.tcresolve ()] i1: t_Iterator v_I2) = impl_1__from__zip' #v_I1 #v_I2 #i0 #i1 class t_IteratorMethods (v_Self: Type0) = { [@@@ FStar.Tactics.Typeclasses.no_method]_super_i0:t_Iterator v_Self; f_fold_pre: #v_B: Type0 -> #v_F: Type0 -> {| i1: Core_models.Ops.Function.t_FnOnce v_F (v_B & (_super_i0).f_Item) |} -> #_: unit{i1.Core_models.Ops.Function.f_Output == v_B} -> v_Self -> v_B -> v_F -> Type0; f_fold_post: #v_B: Type0 -> #v_F: Type0 -> {| i1: Core_models.Ops.Function.t_FnOnce v_F (v_B & (_super_i0).f_Item) |} -> #_: unit{i1.Core_models.Ops.Function.f_Output == v_B} -> v_Self -> v_B -> v_F -> v_B -> Type0; f_fold: #v_B: Type0 -> #v_F: Type0 -> {| i1: Core_models.Ops.Function.t_FnOnce v_F (v_B & (_super_i0).f_Item) |} -> #_: unit{i1.Core_models.Ops.Function.f_Output == v_B} -> x0: v_Self -> x1: v_B -> x2: v_F -> Prims.Pure v_B (f_fold_pre #v_B #v_F #i1 #_ x0 x1 x2) (fun result -> f_fold_post #v_B #v_F #i1 #_ x0 x1 x2 result); f_enumerate_pre:v_Self -> Type0; f_enumerate_post:v_Self -> t_Enumerate v_Self -> Type0; f_enumerate:x0: v_Self -> Prims.Pure (t_Enumerate v_Self) (f_enumerate_pre x0) (fun result -> f_enumerate_post x0 result); f_step_by_pre:v_Self -> usize -> Type0; f_step_by_post:v_Self -> usize -> t_StepBy v_Self -> Type0; f_step_by:x0: v_Self -> x1: usize -> Prims.Pure (t_StepBy v_Self) (f_step_by_pre x0 x1) (fun result -> f_step_by_post x0 x1 result); f_map_pre: #v_O: Type0 -> #v_F: Type0 -> {| i1: Core_models.Ops.Function.t_FnOnce v_F (_super_i0).f_Item |} -> #_: unit{i1.Core_models.Ops.Function.f_Output == v_O} -> v_Self -> v_F -> Type0; f_map_post: #v_O: Type0 -> #v_F: Type0 -> {| i1: Core_models.Ops.Function.t_FnOnce v_F (_super_i0).f_Item |} -> #_: unit{i1.Core_models.Ops.Function.f_Output == v_O} -> v_Self -> v_F -> t_Map v_Self v_F -> Type0; f_map: #v_O: Type0 -> #v_F: Type0 -> {| i1: Core_models.Ops.Function.t_FnOnce v_F (_super_i0).f_Item |} -> #_: unit{i1.Core_models.Ops.Function.f_Output == v_O} -> x0: v_Self -> x1: v_F -> Prims.Pure (t_Map v_Self v_F) (f_map_pre #v_O #v_F #i1 #_ x0 x1) (fun result -> f_map_post #v_O #v_F #i1 #_ x0 x1 result); f_all_pre: #v_F: Type0 -> {| i1: Core_models.Ops.Function.t_FnOnce v_F (_super_i0).f_Item |} -> #_: unit{i1.Core_models.Ops.Function.f_Output == bool} -> v_Self -> v_F -> Type0; f_all_post: #v_F: Type0 -> {| i1: Core_models.Ops.Function.t_FnOnce v_F (_super_i0).f_Item |} -> #_: unit{i1.Core_models.Ops.Function.f_Output == bool} -> v_Self -> v_F -> bool -> Type0; f_all: #v_F: Type0 -> {| i1: Core_models.Ops.Function.t_FnOnce v_F (_super_i0).f_Item |} -> #_: unit{i1.Core_models.Ops.Function.f_Output == bool} -> x0: v_Self -> x1: v_F -> Prims.Pure bool (f_all_pre #v_F #i1 #_ x0 x1) (fun result -> f_all_post #v_F #i1 #_ x0 x1 result); f_take_pre:v_Self -> usize -> Type0; f_take_post:v_Self -> usize -> t_Take v_Self -> Type0; f_take:x0: v_Self -> x1: usize -> Prims.Pure (t_Take v_Self) (f_take_pre x0 x1) (fun result -> f_take_post x0 x1 result); f_flat_map_pre: #v_U: Type0 -> #v_F: Type0 -> {| i1: t_Iterator v_U |} -> {| i2: Core_models.Ops.Function.t_FnOnce v_F (_super_i0).f_Item |} -> #_: unit{i2.Core_models.Ops.Function.f_Output == v_U} -> v_Self -> v_F -> Type0; f_flat_map_post: #v_U: Type0 -> #v_F: Type0 -> {| i1: t_Iterator v_U |} -> {| i2: Core_models.Ops.Function.t_FnOnce v_F (_super_i0).f_Item |} -> #_: unit{i2.Core_models.Ops.Function.f_Output == v_U} -> v_Self -> v_F -> t_FlatMap v_Self v_U v_F -> Type0; f_flat_map: #v_U: Type0 -> #v_F: Type0 -> {| i1: t_Iterator v_U |} -> {| i2: Core_models.Ops.Function.t_FnOnce v_F (_super_i0).f_Item |} -> #_: unit{i2.Core_models.Ops.Function.f_Output == v_U} -> x0: v_Self -> x1: v_F -> Prims.Pure (t_FlatMap v_Self v_U v_F) (f_flat_map_pre #v_U #v_F #i1 #i2 #_ x0 x1) (fun result -> f_flat_map_post #v_U #v_F #i1 #i2 #_ x0 x1 result); f_flatten_pre:{| i1: t_Iterator (_super_i0).f_Item |} -> v_Self -> Type0; f_flatten_post:{| i1: t_Iterator (_super_i0).f_Item |} -> v_Self -> t_Flatten v_Self -> Type0; f_flatten:{| i1: t_Iterator (_super_i0).f_Item |} -> x0: v_Self -> Prims.Pure (t_Flatten v_Self) (f_flatten_pre #i1 x0) (fun result -> f_flatten_post #i1 x0 result); f_zip_pre:#v_I2: Type0 -> {| i1: t_Iterator v_I2 |} -> v_Self -> v_I2 -> Type0; f_zip_post:#v_I2: Type0 -> {| i1: t_Iterator v_I2 |} -> v_Self -> v_I2 -> t_Zip v_Self v_I2 -> Type0; f_zip:#v_I2: Type0 -> {| i1: t_Iterator v_I2 |} -> x0: v_Self -> x1: v_I2 -> Prims.Pure (t_Zip v_Self v_I2) (f_zip_pre #v_I2 #i1 x0 x1) (fun result -> f_zip_post #v_I2 #i1 x0 x1 result) } [@@ FStar.Tactics.Typeclasses.tcinstance] let _ = fun (v_Self:Type0) {|i: t_IteratorMethods v_Self|} -> i._super_i0 [@@ FStar.Tactics.Typeclasses.tcinstance] let impl (#v_I: Type0) (#[FStar.Tactics.Typeclasses.tcresolve ()] i0: t_Iterator v_I) : t_IteratorMethods v_I = { _super_i0 = FStar.Tactics.Typeclasses.solve; f_fold_pre = (fun (#v_B: Type0) (#v_F: Type0) (#[FStar.Tactics.Typeclasses.tcresolve ()] i1: Core_models.Ops.Function.t_FnOnce v_F (v_B & i0.f_Item)) (self: v_I) (init: v_B) (f: v_F) -> true); f_fold_post = (fun (#v_B: Type0) (#v_F: Type0) (#[FStar.Tactics.Typeclasses.tcresolve ()] i1: Core_models.Ops.Function.t_FnOnce v_F (v_B & i0.f_Item)) (self: v_I) (init: v_B) (f: v_F) (out: v_B) -> true); f_fold = (fun (#v_B: Type0) (#v_F: Type0) (#[FStar.Tactics.Typeclasses.tcresolve ()] i1: Core_models.Ops.Function.t_FnOnce v_F (v_B & i0.f_Item)) (self: v_I) (init: v_B) (f: v_F) -> init); f_enumerate_pre = (fun (self: v_I) -> true); f_enumerate_post = (fun (self: v_I) (out: t_Enumerate v_I) -> true); f_enumerate = (fun (self: v_I) -> impl__new #v_I self); f_step_by_pre = (fun (self: v_I) (step: usize) -> true); f_step_by_post = (fun (self: v_I) (step: usize) (out: t_StepBy v_I) -> true); f_step_by = (fun (self: v_I) (step: usize) -> impl__new__from__step_by #v_I self step); f_map_pre = (fun (#v_O: Type0) (#v_F: Type0) (#[FStar.Tactics.Typeclasses.tcresolve ()] i1: Core_models.Ops.Function.t_FnOnce v_F i0.f_Item) (self: v_I) (f: v_F) -> true); f_map_post = (fun (#v_O: Type0) (#v_F: Type0) (#[FStar.Tactics.Typeclasses.tcresolve ()] i1: Core_models.Ops.Function.t_FnOnce v_F i0.f_Item) (self: v_I) (f: v_F) (out: t_Map v_I v_F) -> true); f_map = (fun (#v_O: Type0) (#v_F: Type0) (#[FStar.Tactics.Typeclasses.tcresolve ()] i1: Core_models.Ops.Function.t_FnOnce v_F i0.f_Item) (self: v_I) (f: v_F) -> impl__new__from__map #v_I #v_F self f); f_all_pre = (fun (#v_F: Type0) (#[FStar.Tactics.Typeclasses.tcresolve ()] i1: Core_models.Ops.Function.t_FnOnce v_F i0.f_Item) (self: v_I) (f: v_F) -> true); f_all_post = (fun (#v_F: Type0) (#[FStar.Tactics.Typeclasses.tcresolve ()] i1: Core_models.Ops.Function.t_FnOnce v_F i0.f_Item) (self: v_I) (f: v_F) (out: bool) -> true); f_all = (fun (#v_F: Type0) (#[FStar.Tactics.Typeclasses.tcresolve ()] i1: Core_models.Ops.Function.t_FnOnce v_F i0.f_Item) (self: v_I) (f: v_F) -> true); f_take_pre = (fun (self: v_I) (n: usize) -> true); f_take_post = (fun (self: v_I) (n: usize) (out: t_Take v_I) -> true); f_take = (fun (self: v_I) (n: usize) -> impl__new__from__take #v_I self n); f_flat_map_pre = (fun (#v_U: Type0) (#v_F: Type0) (#[FStar.Tactics.Typeclasses.tcresolve ()] i1: t_Iterator v_U) (#[FStar.Tactics.Typeclasses.tcresolve ()] i2: Core_models.Ops.Function.t_FnOnce v_F i0.f_Item) (self: v_I) (f: v_F) -> true); f_flat_map_post = (fun (#v_U: Type0) (#v_F: Type0) (#[FStar.Tactics.Typeclasses.tcresolve ()] i1: t_Iterator v_U) (#[FStar.Tactics.Typeclasses.tcresolve ()] i2: Core_models.Ops.Function.t_FnOnce v_F i0.f_Item) (self: v_I) (f: v_F) (out: t_FlatMap v_I v_U v_F) -> true); f_flat_map = (fun (#v_U: Type0) (#v_F: Type0) (#[FStar.Tactics.Typeclasses.tcresolve ()] i1: t_Iterator v_U) (#[FStar.Tactics.Typeclasses.tcresolve ()] i2: Core_models.Ops.Function.t_FnOnce v_F i0.f_Item) (self: v_I) (f: v_F) -> impl__new__from__flat_map #v_I #v_U #v_F self f); f_flatten_pre = (fun (#[FStar.Tactics.Typeclasses.tcresolve ()] i1: t_Iterator i0.f_Item) (self: v_I) -> true); f_flatten_post = (fun (#[FStar.Tactics.Typeclasses.tcresolve ()] i1: t_Iterator i0.f_Item) (self: v_I) (out: t_Flatten v_I) -> true); f_flatten = (fun (#[FStar.Tactics.Typeclasses.tcresolve ()] i1: t_Iterator i0.f_Item) (self: v_I) -> impl__new__from__flatten #v_I self); f_zip_pre = (fun (#v_I2: Type0) (#[FStar.Tactics.Typeclasses.tcresolve ()] i1: t_Iterator v_I2) (self: v_I) (it2: v_I2) -> true); f_zip_post = (fun (#v_I2: Type0) (#[FStar.Tactics.Typeclasses.tcresolve ()] i1: t_Iterator v_I2) (self: v_I) (it2: v_I2) (out: t_Zip v_I v_I2) -> true); f_zip = fun (#v_I2: Type0) (#[FStar.Tactics.Typeclasses.tcresolve ()] i1: t_Iterator v_I2) (self: v_I) (it2: v_I2) -> impl__new__from__zip #v_I #v_I2 self it2 } [@@ FStar.Tactics.Typeclasses.tcinstance] let impl_1__from__iterator (#v_I: Type0) (#[FStar.Tactics.Typeclasses.tcresolve ()] i0: t_Iterator v_I) : Core_models.Iter.Traits.Collect.t_IntoIterator v_I = { f_IntoIter = v_I; f_into_iter_pre = (fun (self: v_I) -> true); f_into_iter_post = (fun (self: v_I) (out: v_I) -> true); f_into_iter = fun (self: v_I) -> self } ================================================ FILE: hax-lib/proof-libs/fstar/core/Core_models.Iter.Sources.Repeat_with.fsti ================================================ module Core_models.Iter.Sources.Repeat_with val t_RepeatWith: Type0 -> Type0 val repeat_with #t (y: Prims.unit -> t): t_RepeatWith (Prims.unit -> t) ================================================ FILE: hax-lib/proof-libs/fstar/core/Core_models.Iter.Traits.Collect.fst ================================================ module Core_models.Iter.Traits.Collect #set-options "--fuel 0 --ifuel 1 --z3rlimit 15" open FStar.Mul open Rust_primitives class t_IntoIterator (v_Self: Type0) = { [@@@ FStar.Tactics.Typeclasses.no_method]f_IntoIter:Type0; f_into_iter_pre:v_Self -> Type0; f_into_iter_post:v_Self -> f_IntoIter -> Type0; f_into_iter:x0: v_Self -> Prims.Pure f_IntoIter (f_into_iter_pre x0) (fun result -> f_into_iter_post x0 result) } class t_FromIterator (v_Self: Type0) (v_A: Type0) = { f_from_iter_pre:#v_T: Type0 -> {| i1: t_IntoIterator v_T |} -> iter: v_T -> pred: Type0{true ==> pred}; f_from_iter_post:#v_T: Type0 -> {| i1: t_IntoIterator v_T |} -> v_T -> v_Self -> Type0; f_from_iter:#v_T: Type0 -> {| i1: t_IntoIterator v_T |} -> x0: v_T -> Prims.Pure v_Self (f_from_iter_pre #v_T #i1 x0) (fun result -> f_from_iter_post #v_T #i1 x0 result) } ================================================ FILE: hax-lib/proof-libs/fstar/core/Core_models.Iter.Traits.Iterator.fst ================================================ module Core_models.Iter.Traits.Iterator #set-options "--fuel 0 --ifuel 1 --z3rlimit 15" open FStar.Mul open Rust_primitives include Core_models.Iter.Bundle {t_Iterator as t_Iterator} include Core_models.Iter.Bundle {f_Item as f_Item} include Core_models.Iter.Bundle {f_next_pre as f_next_pre} include Core_models.Iter.Bundle {f_next_post as f_next_post} include Core_models.Iter.Bundle {f_next as f_next} include Core_models.Iter.Bundle {t_IteratorMethods as t_IteratorMethods} include Core_models.Iter.Bundle {f_fold_pre as f_fold_pre} include Core_models.Iter.Bundle {f_fold_post as f_fold_post} include Core_models.Iter.Bundle {f_fold as f_fold} include Core_models.Iter.Bundle {f_enumerate_pre as f_enumerate_pre} include Core_models.Iter.Bundle {f_enumerate_post as f_enumerate_post} include Core_models.Iter.Bundle {f_enumerate as f_enumerate} include Core_models.Iter.Bundle {f_step_by_pre as f_step_by_pre} include Core_models.Iter.Bundle {f_step_by_post as f_step_by_post} include Core_models.Iter.Bundle {f_step_by as f_step_by} include Core_models.Iter.Bundle {f_map_pre as f_map_pre} include Core_models.Iter.Bundle {f_map_post as f_map_post} include Core_models.Iter.Bundle {f_map as f_map} include Core_models.Iter.Bundle {f_all_pre as f_all_pre} include Core_models.Iter.Bundle {f_all_post as f_all_post} include Core_models.Iter.Bundle {f_all as f_all} include Core_models.Iter.Bundle {f_take_pre as f_take_pre} include Core_models.Iter.Bundle {f_take_post as f_take_post} include Core_models.Iter.Bundle {f_take as f_take} include Core_models.Iter.Bundle {f_flat_map_pre as f_flat_map_pre} include Core_models.Iter.Bundle {f_flat_map_post as f_flat_map_post} include Core_models.Iter.Bundle {f_flat_map as f_flat_map} include Core_models.Iter.Bundle {f_flatten_pre as f_flatten_pre} include Core_models.Iter.Bundle {f_flatten_post as f_flatten_post} include Core_models.Iter.Bundle {f_flatten as f_flatten} include Core_models.Iter.Bundle {f_zip_pre as f_zip_pre} include Core_models.Iter.Bundle {f_zip_post as f_zip_post} include Core_models.Iter.Bundle {f_zip as f_zip} include Core_models.Iter.Bundle {impl as impl} include Core_models.Iter.Bundle {impl_1__from__iterator as impl_1} ================================================ FILE: hax-lib/proof-libs/fstar/core/Core_models.Iter.Traits.fst ================================================ module Core_models.Iter.Traits #set-options "--fuel 0 --ifuel 1 --z3rlimit 15" open FStar.Mul open Rust_primitives class t_Iterator (v_Self: Type0) = { [@@@ FStar.Tactics.Typeclasses.no_method]f_Item:Type0; f_next_pre:v_Self -> Type0; f_next_post:v_Self -> (v_Self & Core_models.Option.t_Option f_Item) -> Type0; f_next:x0: v_Self -> Prims.Pure (v_Self & Core_models.Option.t_Option f_Item) (f_next_pre x0) (fun result -> f_next_post x0 result) } ================================================ FILE: hax-lib/proof-libs/fstar/core/Core_models.Marker.fst ================================================ module Core_models.Marker #set-options "--fuel 0 --ifuel 1 --z3rlimit 15" open FStar.Mul open Rust_primitives class t_Copy (v_Self: Type0) = { [@@@ FStar.Tactics.Typeclasses.no_method]_super_i0:Core_models.Clone.t_Clone v_Self } [@@ FStar.Tactics.Typeclasses.tcinstance] let _ = fun (v_Self:Type0) {|i: t_Copy v_Self|} -> i._super_i0 class t_Send (v_Self: Type0) = { __marker_trait_t_Send:Prims.unit } class t_Sync (v_Self: Type0) = { __marker_trait_t_Sync:Prims.unit } class t_Sized (v_Self: Type0) = { __marker_trait_t_Sized:Prims.unit } class t_StructuralPartialEq (v_Self: Type0) = { __marker_trait_t_StructuralPartialEq:Prims.unit } [@@ FStar.Tactics.Typeclasses.tcinstance] let impl (#v_T: Type0) : t_Send v_T = { __marker_trait_t_Send = () } [@@ FStar.Tactics.Typeclasses.tcinstance] let impl_1 (#v_T: Type0) : t_Sync v_T = { __marker_trait_t_Sync = () } [@@ FStar.Tactics.Typeclasses.tcinstance] let impl_2 (#v_T: Type0) : t_Sized v_T = { __marker_trait_t_Sized = () } [@@ FStar.Tactics.Typeclasses.tcinstance] let impl_3 (#v_T: Type0) (#[FStar.Tactics.Typeclasses.tcresolve ()] i0: Core_models.Clone.t_Clone v_T) : t_Copy v_T = { _super_i0 = FStar.Tactics.Typeclasses.solve } type t_PhantomData (v_T: Type0) = | PhantomData : t_PhantomData v_T ================================================ FILE: hax-lib/proof-libs/fstar/core/Core_models.Mem.Manually_drop.fsti ================================================ module Core_models.Mem.Manually_drop #set-options "--fuel 0 --ifuel 1 --z3rlimit 15" open FStar.Mul open Rust_primitives type t_ManuallyDrop (v_T: Type0) = { f_value:v_T } ================================================ FILE: hax-lib/proof-libs/fstar/core/Core_models.Mem.Maybe_uninit.fsti ================================================ module Core_models.Mem.Maybe_uninit #set-options "--fuel 0 --ifuel 1 --z3rlimit 15" open Core_models open FStar.Mul [@@ FStar.Tactics.Typeclasses.tcinstance] val impl (#v_T: Type0) {| i0: Core_models.Marker.t_Sized v_T |} {| i1: Core_models.Marker.t_Copy v_T |} : Core_models.Clone.t_Clone (t_MaybeUninit v_T) [@@ FStar.Tactics.Typeclasses.tcinstance] val impl_9 (#v_T: Type0) {| i0: Core_models.Marker.t_Sized v_T |} {| i1: Core_models.Marker.t_Copy v_T |} : Core_models.Marker.t_Copy (t_MaybeUninit v_T) val f_clone__impl__panic_cold_explicit: Prims.unit -> Prims.Pure Rust_primitives.Hax.t_Never Prims.l_True (fun _ -> Prims.l_True) [@@ FStar.Tactics.Typeclasses.tcinstance] val impl_1 (#v_T: Type0) {| i0: Core_models.Marker.t_Sized v_T |} : Core_models.Fmt.t_Debug (t_MaybeUninit v_T) val f_fmt__impl_1__panic_cold_explicit: Prims.unit -> Prims.Pure Rust_primitives.Hax.t_Never Prims.l_True (fun _ -> Prims.l_True) val impl_2__new (#v_T: Type0) {| i0: Core_models.Marker.t_Sized v_T |} (v_val: v_T) : Prims.Pure (t_MaybeUninit v_T) Prims.l_True (fun _ -> Prims.l_True) val impl_2__uninit: #v_T: Type0 -> {| i0: Core_models.Marker.t_Sized v_T |} -> Prims.unit -> Prims.Pure (t_MaybeUninit v_T) Prims.l_True (fun _ -> Prims.l_True) val impl_2__zeroed: #v_T: Type0 -> {| i0: Core_models.Marker.t_Sized v_T |} -> Prims.unit -> Prims.Pure (t_MaybeUninit v_T) Prims.l_True (fun _ -> Prims.l_True) val impl_2__write (#v_T: Type0) {| i0: Core_models.Marker.t_Sized v_T |} (self: t_MaybeUninit v_T) (v_val: v_T) : Prims.Pure (t_MaybeUninit v_T & Rust_primitives.Hax.t_MutRef v_T) Prims.l_True (fun _ -> Prims.l_True) val impl_2__as_ptr (#v_T: Type0) {| i0: Core_models.Marker.t_Sized v_T |} (self: t_MaybeUninit v_T) : Prims.Pure Rust_primitives.Hax.failure Prims.l_True (fun _ -> Prims.l_True) val impl_2__as_mut_ptr (#v_T: Type0) {| i0: Core_models.Marker.t_Sized v_T |} (self: t_MaybeUninit v_T) : Prims.Pure (t_MaybeUninit v_T & Rust_primitives.Hax.failure) Prims.l_True (fun _ -> Prims.l_True) val impl_2__assume_init (#v_T: Type0) {| i0: Core_models.Marker.t_Sized v_T |} (self: t_MaybeUninit v_T) : Prims.Pure v_T Prims.l_True (fun _ -> Prims.l_True) val impl_2__assume_init_read (#v_T: Type0) {| i0: Core_models.Marker.t_Sized v_T |} (self: t_MaybeUninit v_T) : Prims.Pure v_T Prims.l_True (fun _ -> Prims.l_True) val impl_2__assume_init_drop (#v_T: Type0) {| i0: Core_models.Marker.t_Sized v_T |} (self: t_MaybeUninit v_T) : Prims.Pure (t_MaybeUninit v_T) Prims.l_True (fun _ -> Prims.l_True) val impl_2__assume_init_ref (#v_T: Type0) {| i0: Core_models.Marker.t_Sized v_T |} (self: t_MaybeUninit v_T) : Prims.Pure v_T Prims.l_True (fun _ -> Prims.l_True) val impl_2__assume_init_mut (#v_T: Type0) {| i0: Core_models.Marker.t_Sized v_T |} (self: t_MaybeUninit v_T) : Prims.Pure (t_MaybeUninit v_T & Rust_primitives.Hax.t_MutRef v_T) Prims.l_True (fun _ -> Prims.l_True) val impl_2__array_assume_init (#v_T: Type0) (v_N: usize) {| i0: Core_models.Marker.t_Sized v_T |} (array: t_Array (t_MaybeUninit v_T) v_N) : Prims.Pure (t_Array v_T v_N) Prims.l_True (fun _ -> Prims.l_True) val impl_2__as_bytes (#v_T: Type0) {| i0: Core_models.Marker.t_Sized v_T |} (self: t_MaybeUninit v_T) : Prims.Pure (t_Slice (t_MaybeUninit u8)) Prims.l_True (fun _ -> Prims.l_True) val impl_2__as_bytes_mut (#v_T: Type0) {| i0: Core_models.Marker.t_Sized v_T |} (self: t_MaybeUninit v_T) : Prims.Pure (t_MaybeUninit v_T & Rust_primitives.Hax.t_MutRef (t_Slice (t_MaybeUninit u8))) Prims.l_True (fun _ -> Prims.l_True) val impl_2__slice_assume_init_ref (#v_T: Type0) {| i0: Core_models.Marker.t_Sized v_T |} (slice: t_Slice (t_MaybeUninit v_T)) : Prims.Pure (t_Slice v_T) Prims.l_True (fun _ -> Prims.l_True) val impl_2__slice_assume_init_mut (#v_T: Type0) {| i0: Core_models.Marker.t_Sized v_T |} (slice: t_Slice (t_MaybeUninit v_T)) : Prims.Pure (t_Slice (t_MaybeUninit v_T) & Rust_primitives.Hax.t_MutRef (t_Slice v_T)) Prims.l_True (fun _ -> Prims.l_True) val impl_2__slice_as_ptr (#v_T: Type0) {| i0: Core_models.Marker.t_Sized v_T |} (this: t_Slice (t_MaybeUninit v_T)) : Prims.Pure Rust_primitives.Hax.failure Prims.l_True (fun _ -> Prims.l_True) val impl_2__slice_as_mut_ptr (#v_T: Type0) {| i0: Core_models.Marker.t_Sized v_T |} (this: t_Slice (t_MaybeUninit v_T)) : Prims.Pure (t_Slice (t_MaybeUninit v_T) & Rust_primitives.Hax.failure) Prims.l_True (fun _ -> Prims.l_True) val impl_2__copy_from_slice (#v_T: Type0) {| i0: Core_models.Marker.t_Sized v_T |} {| i1: Core_models.Marker.t_Copy v_T |} (this: t_Slice (t_MaybeUninit v_T)) (src: t_Slice v_T) : Prims.Pure (t_Slice (t_MaybeUninit v_T) & Rust_primitives.Hax.t_MutRef (t_Slice v_T)) Prims.l_True (fun _ -> Prims.l_True) val impl_2__clone_from_slice (#v_T: Type0) {| i0: Core_models.Marker.t_Sized v_T |} {| i2: Core_models.Clone.t_Clone v_T |} (this: t_Slice (t_MaybeUninit v_T)) (src: t_Slice v_T) : Prims.Pure (t_Slice (t_MaybeUninit v_T) & Rust_primitives.Hax.t_MutRef (t_Slice v_T)) Prims.l_True (fun _ -> Prims.l_True) val impl_2__fill (#v_T: Type0) {| i0: Core_models.Marker.t_Sized v_T |} {| i2: Core_models.Clone.t_Clone v_T |} (this: t_Slice (t_MaybeUninit v_T)) (value: v_T) : Prims.Pure (t_Slice (t_MaybeUninit v_T) & Rust_primitives.Hax.t_MutRef (t_Slice v_T)) Prims.l_True (fun _ -> Prims.l_True) val impl_2__fill_with (#v_T #v_F: Type0) {| i0: Core_models.Marker.t_Sized v_T |} {| i3: Core_models.Marker.t_Sized v_F |} {| i4: Core_models.Ops.Function.t_FnMut v_F Prims.unit |} (this: t_Slice (t_MaybeUninit v_T)) (f: v_F) : Prims.Pure (t_Slice (t_MaybeUninit v_T) & Rust_primitives.Hax.t_MutRef (t_Slice v_T)) Prims.l_True (fun _ -> Prims.l_True) val impl_2__fill_from (#v_T #v_I: Type0) {| i0: Core_models.Marker.t_Sized v_T |} {| i5: Core_models.Marker.t_Sized v_I |} {| i6: Core_models.Iter.Traits.Collect.t_IntoIterator v_I |} (this: t_Slice (t_MaybeUninit v_T)) (it: v_I) : Prims.Pure (t_Slice (t_MaybeUninit v_T) & (Rust_primitives.Hax.t_MutRef (t_Slice v_T) & Rust_primitives.Hax.t_MutRef (t_Slice (t_MaybeUninit v_T)))) Prims.l_True (fun _ -> Prims.l_True) val impl_2__slice_as_bytes (#v_T: Type0) {| i0: Core_models.Marker.t_Sized v_T |} (this: t_Slice (t_MaybeUninit v_T)) : Prims.Pure (t_Slice (t_MaybeUninit u8)) Prims.l_True (fun _ -> Prims.l_True) val impl_2__slice_as_bytes_mut (#v_T: Type0) {| i0: Core_models.Marker.t_Sized v_T |} (this: t_Slice (t_MaybeUninit v_T)) : Prims.Pure (t_Slice (t_MaybeUninit v_T) & Rust_primitives.Hax.t_MutRef (t_Slice (t_MaybeUninit u8))) Prims.l_True (fun _ -> Prims.l_True) val impl_2__assume_init_drop__panic_cold_explicit: Prims.unit -> Prims.Pure Rust_primitives.Hax.t_Never Prims.l_True (fun _ -> Prims.l_True) val impl_2__copy_from_slice__panic_cold_explicit: Prims.unit -> Prims.Pure Rust_primitives.Hax.t_Never Prims.l_True (fun _ -> Prims.l_True) val impl_2__clone_from_slice__panic_cold_explicit: Prims.unit -> Prims.Pure Rust_primitives.Hax.t_Never Prims.l_True (fun _ -> Prims.l_True) val impl_2__fill__panic_cold_explicit: Prims.unit -> Prims.Pure Rust_primitives.Hax.t_Never Prims.l_True (fun _ -> Prims.l_True) val impl_2__fill_with__panic_cold_explicit: Prims.unit -> Prims.Pure Rust_primitives.Hax.t_Never Prims.l_True (fun _ -> Prims.l_True) val impl_2__fill_from__panic_cold_explicit: Prims.unit -> Prims.Pure Rust_primitives.Hax.t_Never Prims.l_True (fun _ -> Prims.l_True) val impl_2__slice_as_bytes__panic_cold_explicit: Prims.unit -> Prims.Pure Rust_primitives.Hax.t_Never Prims.l_True (fun _ -> Prims.l_True) val impl_2__slice_as_bytes_mut__panic_cold_explicit: Prims.unit -> Prims.Pure Rust_primitives.Hax.t_Never Prims.l_True (fun _ -> Prims.l_True) val impl_3__write_copy_of_slice (#v_T: Type0) {| i0: Core_models.Marker.t_Sized v_T |} {| i1: Core_models.Marker.t_Copy v_T |} (self: t_Slice (t_MaybeUninit v_T)) (src: t_Slice v_T) : Prims.Pure (t_Slice (t_MaybeUninit v_T) & Rust_primitives.Hax.t_MutRef (t_Slice v_T)) Prims.l_True (fun _ -> Prims.l_True) val impl_3__write_clone_of_slice (#v_T: Type0) {| i0: Core_models.Marker.t_Sized v_T |} {| i2: Core_models.Clone.t_Clone v_T |} (self: t_Slice (t_MaybeUninit v_T)) (src: t_Slice v_T) : Prims.Pure (t_Slice (t_MaybeUninit v_T) & Rust_primitives.Hax.t_MutRef (t_Slice v_T)) Prims.l_True (fun _ -> Prims.l_True) val impl_3__write_filled (#v_T: Type0) {| i0: Core_models.Marker.t_Sized v_T |} {| i2: Core_models.Clone.t_Clone v_T |} (self: t_Slice (t_MaybeUninit v_T)) (value: v_T) : Prims.Pure (t_Slice (t_MaybeUninit v_T) & Rust_primitives.Hax.t_MutRef (t_Slice v_T)) Prims.l_True (fun _ -> Prims.l_True) val impl_3__write_with (#v_T #v_F: Type0) {| i0: Core_models.Marker.t_Sized v_T |} {| i3: Core_models.Marker.t_Sized v_F |} {| i4: Core_models.Ops.Function.t_FnMut v_F usize |} (self: t_Slice (t_MaybeUninit v_T)) (f: v_F) : Prims.Pure (t_Slice (t_MaybeUninit v_T) & Rust_primitives.Hax.t_MutRef (t_Slice v_T)) Prims.l_True (fun _ -> Prims.l_True) val impl_3__write_iter (#v_T #v_I: Type0) {| i0: Core_models.Marker.t_Sized v_T |} {| i5: Core_models.Marker.t_Sized v_I |} {| i6: Core_models.Iter.Traits.Collect.t_IntoIterator v_I |} (self: t_Slice (t_MaybeUninit v_T)) (it: v_I) : Prims.Pure (t_Slice (t_MaybeUninit v_T) & (Rust_primitives.Hax.t_MutRef (t_Slice v_T) & Rust_primitives.Hax.t_MutRef (t_Slice (t_MaybeUninit v_T)))) Prims.l_True (fun _ -> Prims.l_True) val impl_3__as_bytes (#v_T: Type0) {| i0: Core_models.Marker.t_Sized v_T |} (self: t_Slice (t_MaybeUninit v_T)) : Prims.Pure (t_Slice (t_MaybeUninit u8)) Prims.l_True (fun _ -> Prims.l_True) val impl_3__as_bytes_mut (#v_T: Type0) {| i0: Core_models.Marker.t_Sized v_T |} (self: t_Slice (t_MaybeUninit v_T)) : Prims.Pure (t_Slice (t_MaybeUninit v_T) & Rust_primitives.Hax.t_MutRef (t_Slice (t_MaybeUninit u8))) Prims.l_True (fun _ -> Prims.l_True) val impl_3__assume_init_drop (#v_T: Type0) {| i0: Core_models.Marker.t_Sized v_T |} (self: t_Slice (t_MaybeUninit v_T)) : Prims.Pure (t_Slice (t_MaybeUninit v_T)) Prims.l_True (fun _ -> Prims.l_True) val impl_3__assume_init_ref (#v_T: Type0) {| i0: Core_models.Marker.t_Sized v_T |} (self: t_Slice (t_MaybeUninit v_T)) : Prims.Pure (t_Slice v_T) Prims.l_True (fun _ -> Prims.l_True) val impl_3__assume_init_mut (#v_T: Type0) {| i0: Core_models.Marker.t_Sized v_T |} (self: t_Slice (t_MaybeUninit v_T)) : Prims.Pure (t_Slice (t_MaybeUninit v_T) & Rust_primitives.Hax.t_MutRef (t_Slice v_T)) Prims.l_True (fun _ -> Prims.l_True) val impl_4__transpose (#v_T: Type0) (v_N: usize) {| i0: Core_models.Marker.t_Sized v_T |} (self: t_MaybeUninit (t_Array v_T v_N)) : Prims.Pure (t_Array (t_MaybeUninit v_T) v_N) Prims.l_True (fun _ -> Prims.l_True) val impl_5__transpose (#v_T: Type0) (v_N: usize) {| i0: Core_models.Marker.t_Sized v_T |} (self: t_Array (t_MaybeUninit v_T) v_N) : Prims.Pure (t_MaybeUninit (t_Array v_T v_N)) Prims.l_True (fun _ -> Prims.l_True) type t_Guard (v_T: Type0) {| i0: Core_models.Marker.t_Sized v_T |} = { f_slice:Rust_primitives.Hax.t_MutRef (t_Slice (t_MaybeUninit v_T)); f_initialized:usize } [@@ FStar.Tactics.Typeclasses.tcinstance] val impl_6 (#v_T: Type0) {| i0: Core_models.Marker.t_Sized v_T |} : Core_models.Ops.Drop.t_Drop (t_Guard v_T) val f_drop__impl_6__panic_cold_explicit: Prims.unit -> Prims.Pure Rust_primitives.Hax.t_Never Prims.l_True (fun _ -> Prims.l_True) class t_SpecFill (v_Self: Type0) (v_T: Type0) = { [@@@ FStar.Tactics.Typeclasses.no_method]_super_15671470021555116719:Core_models.Marker.t_Sized v_T; f_spec_fill_pre:v_Self -> v_T -> Type0; f_spec_fill_post:v_Self -> v_T -> v_Self -> Type0; f_spec_fill:x0: v_Self -> x1: v_T -> Prims.Pure v_Self (f_spec_fill_pre x0 x1) (fun result -> f_spec_fill_post x0 x1 result) } [@@ FStar.Tactics.Typeclasses.tcinstance] val impl_7 (#v_T: Type0) {| i0: Core_models.Marker.t_Sized v_T |} {| i1: Core_models.Clone.t_Clone v_T |} : t_SpecFill (t_Slice (t_MaybeUninit v_T)) v_T [@@ FStar.Tactics.Typeclasses.tcinstance] val impl_8 (#v_T: Type0) {| i0: Core_models.Marker.t_Sized v_T |} {| i1: Core_models.Marker.t_Copy v_T |} : t_SpecFill (t_Slice (t_MaybeUninit v_T)) v_T ================================================ FILE: hax-lib/proof-libs/fstar/core/Core_models.Mem.Transmutability.fsti ================================================ module Core_models.Mem.Transmutability #set-options "--fuel 0 --ifuel 1 --z3rlimit 15" open Core_models open FStar.Mul val f_transmute__panic_cold_explicit: Prims.unit -> Prims.Pure Rust_primitives.Hax.t_Never Prims.l_True (fun _ -> Prims.l_True) type t_Assume = { f_alignment:bool; f_lifetimes:bool; f_safety:bool; f_validity:bool } [@@ FStar.Tactics.Typeclasses.tcinstance] val impl_5:Core_models.Marker.t_StructuralPartialEq t_Assume [@@ FStar.Tactics.Typeclasses.tcinstance] val impl_6:Core_models.Cmp.t_PartialEq t_Assume t_Assume [@@ FStar.Tactics.Typeclasses.tcinstance] val impl_7:Core_models.Cmp.t_Eq t_Assume [@@ FStar.Tactics.Typeclasses.tcinstance] val impl_8:Core_models.Clone.t_Clone t_Assume [@@ FStar.Tactics.Typeclasses.tcinstance] val impl_9:Core_models.Marker.t_Copy t_Assume [@@ FStar.Tactics.Typeclasses.tcinstance] val impl_10:Core_models.Fmt.t_Debug t_Assume [@@ FStar.Tactics.Typeclasses.tcinstance] val impl_1:Core_models.Marker.t_UnsizedConstParamTy t_Assume [@@ FStar.Tactics.Typeclasses.tcinstance] val impl:Core_models.Marker.t_ConstParamTy_ t_Assume let impl_Assume__NOTHING: t_Assume = () <: t_Assume let impl_Assume__ALIGNMENT: t_Assume = () <: t_Assume let impl_Assume__LIFETIMES: t_Assume = () <: t_Assume let impl_Assume__SAFETY: t_Assume = () <: t_Assume let impl_Assume__VALIDITY: t_Assume = () <: t_Assume val impl_Assume__and (self other_assumptions: t_Assume) : Prims.Pure t_Assume Prims.l_True (fun _ -> Prims.l_True) val impl_Assume__but_not (self other_assumptions: t_Assume) : Prims.Pure t_Assume Prims.l_True (fun _ -> Prims.l_True) val f_add__impl_3__panic_cold_explicit: Prims.unit -> Prims.Pure Rust_primitives.Hax.t_Never Prims.l_True (fun _ -> Prims.l_True) val f_sub__impl_4__panic_cold_explicit: Prims.unit -> Prims.Pure Rust_primitives.Hax.t_Never Prims.l_True (fun _ -> Prims.l_True) [@@ FStar.Tactics.Typeclasses.tcinstance] let impl_3: Core_models.Ops.Arith.t_Add t_Assume t_Assume = { f_Output = t_Assume; f_Output_11695847888444666345 = FStar.Tactics.Typeclasses.solve; f_add_pre = (fun (self: t_Assume) (other_assumptions: t_Assume) -> true); f_add_post = (fun (self: t_Assume) (other_assumptions: t_Assume) (out: t_Assume) -> true); f_add = fun (self: t_Assume) (other_assumptions: t_Assume) -> () <: t_Assume } [@@ FStar.Tactics.Typeclasses.tcinstance] let impl_4: Core_models.Ops.Arith.t_Sub t_Assume t_Assume = { f_Output = t_Assume; f_Output_9381071510542709353 = FStar.Tactics.Typeclasses.solve; f_sub_pre = (fun (self: t_Assume) (other_assumptions: t_Assume) -> true); f_sub_post = (fun (self: t_Assume) (other_assumptions: t_Assume) (out: t_Assume) -> true); f_sub = fun (self: t_Assume) (other_assumptions: t_Assume) -> () <: t_Assume } ================================================ FILE: hax-lib/proof-libs/fstar/core/Core_models.Mem.fsti ================================================ module Core_models.Mem #set-options "--fuel 0 --ifuel 1 --z3rlimit 15" open FStar.Mul open Rust_primitives val forget (#v_T: Type0) (t: v_T) : Prims.Pure Prims.unit Prims.l_True (fun _ -> Prims.l_True) val forget_unsized (#v_T: Type0) (t: v_T) : Prims.Pure Prims.unit Prims.l_True (fun _ -> Prims.l_True) val size_of: #v_T: Type0 -> Prims.unit -> Prims.Pure usize Prims.l_True (fun _ -> Prims.l_True) val size_of_val (#v_T: Type0) (v_val: v_T) : Prims.Pure usize Prims.l_True (fun _ -> Prims.l_True) val min_align_of: #v_T: Type0 -> Prims.unit -> Prims.Pure usize Prims.l_True (fun _ -> Prims.l_True) val min_align_of_val (#v_T: Type0) (v_val: v_T) : Prims.Pure usize Prims.l_True (fun _ -> Prims.l_True) val align_of: #v_T: Type0 -> Prims.unit -> Prims.Pure usize Prims.l_True (fun _ -> Prims.l_True) val align_of_val (#v_T: Type0) (v_val: v_T) : Prims.Pure usize Prims.l_True (fun _ -> Prims.l_True) val align_of_val_raw (#v_T: Type0) (v_val: v_T) : Prims.Pure usize Prims.l_True (fun _ -> Prims.l_True) val needs_drop: #v_T: Type0 -> Prims.unit -> Prims.Pure bool Prims.l_True (fun _ -> Prims.l_True) val uninitialized: #v_T: Type0 -> Prims.unit -> Prims.Pure v_T Prims.l_True (fun _ -> Prims.l_True) val swap (#v_T: Type0) (x y: v_T) : Prims.Pure (v_T & v_T) Prims.l_True (fun _ -> Prims.l_True) val replace (#v_T: Type0) (dest src: v_T) : Prims.Pure (v_T & v_T) Prims.l_True (fun _ -> Prims.l_True) val drop (#v_T: Type0) (e_x: v_T) : Prims.Pure Prims.unit Prims.l_True (fun _ -> Prims.l_True) val copy (#v_T: Type0) {| i0: Core_models.Marker.t_Copy v_T |} (x: v_T) : Prims.Pure v_T Prims.l_True (fun _ -> Prims.l_True) val take (#v_T: Type0) (x: v_T) : Prims.Pure (v_T & v_T) Prims.l_True (fun _ -> Prims.l_True) val transmute_copy (#v_Src #v_Dst: Type0) (src: v_Src) : Prims.Pure v_Dst Prims.l_True (fun _ -> Prims.l_True) val variant_count: #v_T: Type0 -> Prims.unit -> Prims.Pure usize Prims.l_True (fun _ -> Prims.l_True) val zeroed: #v_T: Type0 -> Prims.unit -> Prims.Pure v_T Prims.l_True (fun _ -> Prims.l_True) val transmute (#v_Src #v_Dst: Type0) (src: v_Src) : Prims.Pure v_Dst Prims.l_True (fun _ -> Prims.l_True) ================================================ FILE: hax-lib/proof-libs/fstar/core/Core_models.Num.Error.fsti ================================================ module Core_models.Num.Error #set-options "--fuel 0 --ifuel 1 --z3rlimit 15" open FStar.Mul open Rust_primitives type t_TryFromIntError = | TryFromIntError : Prims.unit -> t_TryFromIntError type t_IntErrorKind = | IntErrorKind : t_IntErrorKind type t_ParseIntError = { f_kind:t_IntErrorKind } ================================================ FILE: hax-lib/proof-libs/fstar/core/Core_models.Num.Niche_types.fsti ================================================ module Core_models.Num.Niche_types #set-options "--fuel 0 --ifuel 1 --z3rlimit 15" open Core_models open FStar.Mul type t_Nanoseconds = | Nanoseconds : u32 -> t_Nanoseconds [@@ FStar.Tactics.Typeclasses.tcinstance] val impl_13:Core_models.Clone.t_Clone t_Nanoseconds [@@ FStar.Tactics.Typeclasses.tcinstance] val impl_14:Core_models.Marker.t_Copy t_Nanoseconds val e_: Prims.unit val impl_Nanoseconds__new (v_val: u32) : Prims.Pure (Core_models.Option.t_Option t_Nanoseconds) Prims.l_True (fun _ -> Prims.l_True) val impl_Nanoseconds__new_unchecked (v_val: u32) : Prims.Pure t_Nanoseconds Prims.l_True (fun _ -> Prims.l_True) val impl_Nanoseconds__as_inner (self: t_Nanoseconds) : Prims.Pure u32 Prims.l_True (fun _ -> Prims.l_True) [@@ FStar.Tactics.Typeclasses.tcinstance] val impl_7:Core_models.Marker.t_StructuralPartialEq t_Nanoseconds [@@ FStar.Tactics.Typeclasses.tcinstance] val impl_8:Core_models.Cmp.t_PartialEq t_Nanoseconds t_Nanoseconds [@@ FStar.Tactics.Typeclasses.tcinstance] val impl_15:Core_models.Cmp.t_Eq t_Nanoseconds [@@ FStar.Tactics.Typeclasses.tcinstance] val impl_10:Core_models.Cmp.t_PartialOrd t_Nanoseconds t_Nanoseconds [@@ FStar.Tactics.Typeclasses.tcinstance] val impl_9:Core_models.Cmp.t_Ord t_Nanoseconds [@@ FStar.Tactics.Typeclasses.tcinstance] val impl_11:Core_models.Hash.t_Hash t_Nanoseconds [@@ FStar.Tactics.Typeclasses.tcinstance] val impl_12:Core_models.Fmt.t_Debug t_Nanoseconds val impl_Nanoseconds__ZERO: t_Nanoseconds [@@ FStar.Tactics.Typeclasses.tcinstance] val impl_1:Core_models.Default.t_Default t_Nanoseconds val f_default__impl_1__panic_cold_explicit: Prims.unit -> Prims.Pure Rust_primitives.Hax.t_Never Prims.l_True (fun _ -> Prims.l_True) type t_NonZeroU8Inner = | NonZeroU8Inner : u8 -> t_NonZeroU8Inner [@@ FStar.Tactics.Typeclasses.tcinstance] val impl_86:Core_models.Clone.t_Clone t_NonZeroU8Inner [@@ FStar.Tactics.Typeclasses.tcinstance] val impl_87:Core_models.Marker.t_Copy t_NonZeroU8Inner val e_ee_1: Prims.unit val impl_NonZeroU8Inner__new (v_val: u8) : Prims.Pure (Core_models.Option.t_Option t_NonZeroU8Inner) Prims.l_True (fun _ -> Prims.l_True) val impl_NonZeroU8Inner__new_unchecked (v_val: u8) : Prims.Pure t_NonZeroU8Inner Prims.l_True (fun _ -> Prims.l_True) val impl_NonZeroU8Inner__as_inner (self: t_NonZeroU8Inner) : Prims.Pure u8 Prims.l_True (fun _ -> Prims.l_True) [@@ FStar.Tactics.Typeclasses.tcinstance] val impl_17:Core_models.Marker.t_StructuralPartialEq t_NonZeroU8Inner [@@ FStar.Tactics.Typeclasses.tcinstance] val impl_18:Core_models.Cmp.t_PartialEq t_NonZeroU8Inner t_NonZeroU8Inner [@@ FStar.Tactics.Typeclasses.tcinstance] val impl_88:Core_models.Cmp.t_Eq t_NonZeroU8Inner [@@ FStar.Tactics.Typeclasses.tcinstance] val impl_20:Core_models.Cmp.t_PartialOrd t_NonZeroU8Inner t_NonZeroU8Inner [@@ FStar.Tactics.Typeclasses.tcinstance] val impl_19:Core_models.Cmp.t_Ord t_NonZeroU8Inner [@@ FStar.Tactics.Typeclasses.tcinstance] val impl_21:Core_models.Hash.t_Hash t_NonZeroU8Inner [@@ FStar.Tactics.Typeclasses.tcinstance] val impl_22:Core_models.Fmt.t_Debug t_NonZeroU8Inner type t_NonZeroU16Inner = | NonZeroU16Inner : u16 -> t_NonZeroU16Inner [@@ FStar.Tactics.Typeclasses.tcinstance] val impl_89:Core_models.Clone.t_Clone t_NonZeroU16Inner [@@ FStar.Tactics.Typeclasses.tcinstance] val impl_90:Core_models.Marker.t_Copy t_NonZeroU16Inner val e_ee_2: Prims.unit val impl_NonZeroU16Inner__new (v_val: u16) : Prims.Pure (Core_models.Option.t_Option t_NonZeroU16Inner) Prims.l_True (fun _ -> Prims.l_True) val impl_NonZeroU16Inner__new_unchecked (v_val: u16) : Prims.Pure t_NonZeroU16Inner Prims.l_True (fun _ -> Prims.l_True) val impl_NonZeroU16Inner__as_inner (self: t_NonZeroU16Inner) : Prims.Pure u16 Prims.l_True (fun _ -> Prims.l_True) [@@ FStar.Tactics.Typeclasses.tcinstance] val impl_24:Core_models.Marker.t_StructuralPartialEq t_NonZeroU16Inner [@@ FStar.Tactics.Typeclasses.tcinstance] val impl_25:Core_models.Cmp.t_PartialEq t_NonZeroU16Inner t_NonZeroU16Inner [@@ FStar.Tactics.Typeclasses.tcinstance] val impl_91:Core_models.Cmp.t_Eq t_NonZeroU16Inner [@@ FStar.Tactics.Typeclasses.tcinstance] val impl_27:Core_models.Cmp.t_PartialOrd t_NonZeroU16Inner t_NonZeroU16Inner [@@ FStar.Tactics.Typeclasses.tcinstance] val impl_26:Core_models.Cmp.t_Ord t_NonZeroU16Inner [@@ FStar.Tactics.Typeclasses.tcinstance] val impl_28:Core_models.Hash.t_Hash t_NonZeroU16Inner [@@ FStar.Tactics.Typeclasses.tcinstance] val impl_29:Core_models.Fmt.t_Debug t_NonZeroU16Inner type t_NonZeroU32Inner = | NonZeroU32Inner : u32 -> t_NonZeroU32Inner [@@ FStar.Tactics.Typeclasses.tcinstance] val impl_92:Core_models.Clone.t_Clone t_NonZeroU32Inner [@@ FStar.Tactics.Typeclasses.tcinstance] val impl_93:Core_models.Marker.t_Copy t_NonZeroU32Inner val e_ee_3: Prims.unit val impl_NonZeroU32Inner__new (v_val: u32) : Prims.Pure (Core_models.Option.t_Option t_NonZeroU32Inner) Prims.l_True (fun _ -> Prims.l_True) val impl_NonZeroU32Inner__new_unchecked (v_val: u32) : Prims.Pure t_NonZeroU32Inner Prims.l_True (fun _ -> Prims.l_True) val impl_NonZeroU32Inner__as_inner (self: t_NonZeroU32Inner) : Prims.Pure u32 Prims.l_True (fun _ -> Prims.l_True) [@@ FStar.Tactics.Typeclasses.tcinstance] val impl_31:Core_models.Marker.t_StructuralPartialEq t_NonZeroU32Inner [@@ FStar.Tactics.Typeclasses.tcinstance] val impl_32:Core_models.Cmp.t_PartialEq t_NonZeroU32Inner t_NonZeroU32Inner [@@ FStar.Tactics.Typeclasses.tcinstance] val impl_94:Core_models.Cmp.t_Eq t_NonZeroU32Inner [@@ FStar.Tactics.Typeclasses.tcinstance] val impl_34:Core_models.Cmp.t_PartialOrd t_NonZeroU32Inner t_NonZeroU32Inner [@@ FStar.Tactics.Typeclasses.tcinstance] val impl_33:Core_models.Cmp.t_Ord t_NonZeroU32Inner [@@ FStar.Tactics.Typeclasses.tcinstance] val impl_35:Core_models.Hash.t_Hash t_NonZeroU32Inner [@@ FStar.Tactics.Typeclasses.tcinstance] val impl_36:Core_models.Fmt.t_Debug t_NonZeroU32Inner type t_NonZeroU64Inner = | NonZeroU64Inner : u64 -> t_NonZeroU64Inner [@@ FStar.Tactics.Typeclasses.tcinstance] val impl_95:Core_models.Clone.t_Clone t_NonZeroU64Inner [@@ FStar.Tactics.Typeclasses.tcinstance] val impl_96:Core_models.Marker.t_Copy t_NonZeroU64Inner val e_ee_4: Prims.unit val impl_NonZeroU64Inner__new (v_val: u64) : Prims.Pure (Core_models.Option.t_Option t_NonZeroU64Inner) Prims.l_True (fun _ -> Prims.l_True) val impl_NonZeroU64Inner__new_unchecked (v_val: u64) : Prims.Pure t_NonZeroU64Inner Prims.l_True (fun _ -> Prims.l_True) val impl_NonZeroU64Inner__as_inner (self: t_NonZeroU64Inner) : Prims.Pure u64 Prims.l_True (fun _ -> Prims.l_True) [@@ FStar.Tactics.Typeclasses.tcinstance] val impl_38:Core_models.Marker.t_StructuralPartialEq t_NonZeroU64Inner [@@ FStar.Tactics.Typeclasses.tcinstance] val impl_39:Core_models.Cmp.t_PartialEq t_NonZeroU64Inner t_NonZeroU64Inner [@@ FStar.Tactics.Typeclasses.tcinstance] val impl_97:Core_models.Cmp.t_Eq t_NonZeroU64Inner [@@ FStar.Tactics.Typeclasses.tcinstance] val impl_41:Core_models.Cmp.t_PartialOrd t_NonZeroU64Inner t_NonZeroU64Inner [@@ FStar.Tactics.Typeclasses.tcinstance] val impl_40:Core_models.Cmp.t_Ord t_NonZeroU64Inner [@@ FStar.Tactics.Typeclasses.tcinstance] val impl_42:Core_models.Hash.t_Hash t_NonZeroU64Inner [@@ FStar.Tactics.Typeclasses.tcinstance] val impl_43:Core_models.Fmt.t_Debug t_NonZeroU64Inner type t_NonZeroU128Inner = | NonZeroU128Inner : u128 -> t_NonZeroU128Inner [@@ FStar.Tactics.Typeclasses.tcinstance] val impl_98:Core_models.Clone.t_Clone t_NonZeroU128Inner [@@ FStar.Tactics.Typeclasses.tcinstance] val impl_99:Core_models.Marker.t_Copy t_NonZeroU128Inner val e_ee_5: Prims.unit val impl_NonZeroU128Inner__new (v_val: u128) : Prims.Pure (Core_models.Option.t_Option t_NonZeroU128Inner) Prims.l_True (fun _ -> Prims.l_True) val impl_NonZeroU128Inner__new_unchecked (v_val: u128) : Prims.Pure t_NonZeroU128Inner Prims.l_True (fun _ -> Prims.l_True) val impl_NonZeroU128Inner__as_inner (self: t_NonZeroU128Inner) : Prims.Pure u128 Prims.l_True (fun _ -> Prims.l_True) [@@ FStar.Tactics.Typeclasses.tcinstance] val impl_45:Core_models.Marker.t_StructuralPartialEq t_NonZeroU128Inner [@@ FStar.Tactics.Typeclasses.tcinstance] val impl_46:Core_models.Cmp.t_PartialEq t_NonZeroU128Inner t_NonZeroU128Inner [@@ FStar.Tactics.Typeclasses.tcinstance] val impl_100:Core_models.Cmp.t_Eq t_NonZeroU128Inner [@@ FStar.Tactics.Typeclasses.tcinstance] val impl_48:Core_models.Cmp.t_PartialOrd t_NonZeroU128Inner t_NonZeroU128Inner [@@ FStar.Tactics.Typeclasses.tcinstance] val impl_47:Core_models.Cmp.t_Ord t_NonZeroU128Inner [@@ FStar.Tactics.Typeclasses.tcinstance] val impl_49:Core_models.Hash.t_Hash t_NonZeroU128Inner [@@ FStar.Tactics.Typeclasses.tcinstance] val impl_50:Core_models.Fmt.t_Debug t_NonZeroU128Inner type t_NonZeroI8Inner = | NonZeroI8Inner : i8 -> t_NonZeroI8Inner [@@ FStar.Tactics.Typeclasses.tcinstance] val impl_101:Core_models.Clone.t_Clone t_NonZeroI8Inner [@@ FStar.Tactics.Typeclasses.tcinstance] val impl_102:Core_models.Marker.t_Copy t_NonZeroI8Inner val e_ee_6: Prims.unit val impl_NonZeroI8Inner__new (v_val: i8) : Prims.Pure (Core_models.Option.t_Option t_NonZeroI8Inner) Prims.l_True (fun _ -> Prims.l_True) val impl_NonZeroI8Inner__new_unchecked (v_val: i8) : Prims.Pure t_NonZeroI8Inner Prims.l_True (fun _ -> Prims.l_True) val impl_NonZeroI8Inner__as_inner (self: t_NonZeroI8Inner) : Prims.Pure i8 Prims.l_True (fun _ -> Prims.l_True) [@@ FStar.Tactics.Typeclasses.tcinstance] val impl_52:Core_models.Marker.t_StructuralPartialEq t_NonZeroI8Inner [@@ FStar.Tactics.Typeclasses.tcinstance] val impl_53:Core_models.Cmp.t_PartialEq t_NonZeroI8Inner t_NonZeroI8Inner [@@ FStar.Tactics.Typeclasses.tcinstance] val impl_103:Core_models.Cmp.t_Eq t_NonZeroI8Inner [@@ FStar.Tactics.Typeclasses.tcinstance] val impl_55:Core_models.Cmp.t_PartialOrd t_NonZeroI8Inner t_NonZeroI8Inner [@@ FStar.Tactics.Typeclasses.tcinstance] val impl_54:Core_models.Cmp.t_Ord t_NonZeroI8Inner [@@ FStar.Tactics.Typeclasses.tcinstance] val impl_56:Core_models.Hash.t_Hash t_NonZeroI8Inner [@@ FStar.Tactics.Typeclasses.tcinstance] val impl_57:Core_models.Fmt.t_Debug t_NonZeroI8Inner type t_NonZeroI16Inner = | NonZeroI16Inner : i16 -> t_NonZeroI16Inner [@@ FStar.Tactics.Typeclasses.tcinstance] val impl_104:Core_models.Clone.t_Clone t_NonZeroI16Inner [@@ FStar.Tactics.Typeclasses.tcinstance] val impl_105:Core_models.Marker.t_Copy t_NonZeroI16Inner val e_ee_7: Prims.unit val impl_NonZeroI16Inner__new (v_val: i16) : Prims.Pure (Core_models.Option.t_Option t_NonZeroI16Inner) Prims.l_True (fun _ -> Prims.l_True) val impl_NonZeroI16Inner__new_unchecked (v_val: i16) : Prims.Pure t_NonZeroI16Inner Prims.l_True (fun _ -> Prims.l_True) val impl_NonZeroI16Inner__as_inner (self: t_NonZeroI16Inner) : Prims.Pure i16 Prims.l_True (fun _ -> Prims.l_True) [@@ FStar.Tactics.Typeclasses.tcinstance] val impl_59:Core_models.Marker.t_StructuralPartialEq t_NonZeroI16Inner [@@ FStar.Tactics.Typeclasses.tcinstance] val impl_60:Core_models.Cmp.t_PartialEq t_NonZeroI16Inner t_NonZeroI16Inner [@@ FStar.Tactics.Typeclasses.tcinstance] val impl_106:Core_models.Cmp.t_Eq t_NonZeroI16Inner [@@ FStar.Tactics.Typeclasses.tcinstance] val impl_62:Core_models.Cmp.t_PartialOrd t_NonZeroI16Inner t_NonZeroI16Inner [@@ FStar.Tactics.Typeclasses.tcinstance] val impl_61:Core_models.Cmp.t_Ord t_NonZeroI16Inner [@@ FStar.Tactics.Typeclasses.tcinstance] val impl_63:Core_models.Hash.t_Hash t_NonZeroI16Inner [@@ FStar.Tactics.Typeclasses.tcinstance] val impl_64:Core_models.Fmt.t_Debug t_NonZeroI16Inner type t_NonZeroI32Inner = | NonZeroI32Inner : i32 -> t_NonZeroI32Inner [@@ FStar.Tactics.Typeclasses.tcinstance] val impl_107:Core_models.Clone.t_Clone t_NonZeroI32Inner [@@ FStar.Tactics.Typeclasses.tcinstance] val impl_108:Core_models.Marker.t_Copy t_NonZeroI32Inner val e_ee_8: Prims.unit val impl_NonZeroI32Inner__new (v_val: i32) : Prims.Pure (Core_models.Option.t_Option t_NonZeroI32Inner) Prims.l_True (fun _ -> Prims.l_True) val impl_NonZeroI32Inner__new_unchecked (v_val: i32) : Prims.Pure t_NonZeroI32Inner Prims.l_True (fun _ -> Prims.l_True) val impl_NonZeroI32Inner__as_inner (self: t_NonZeroI32Inner) : Prims.Pure i32 Prims.l_True (fun _ -> Prims.l_True) [@@ FStar.Tactics.Typeclasses.tcinstance] val impl_66:Core_models.Marker.t_StructuralPartialEq t_NonZeroI32Inner [@@ FStar.Tactics.Typeclasses.tcinstance] val impl_67:Core_models.Cmp.t_PartialEq t_NonZeroI32Inner t_NonZeroI32Inner [@@ FStar.Tactics.Typeclasses.tcinstance] val impl_109:Core_models.Cmp.t_Eq t_NonZeroI32Inner [@@ FStar.Tactics.Typeclasses.tcinstance] val impl_69:Core_models.Cmp.t_PartialOrd t_NonZeroI32Inner t_NonZeroI32Inner [@@ FStar.Tactics.Typeclasses.tcinstance] val impl_68:Core_models.Cmp.t_Ord t_NonZeroI32Inner [@@ FStar.Tactics.Typeclasses.tcinstance] val impl_70:Core_models.Hash.t_Hash t_NonZeroI32Inner [@@ FStar.Tactics.Typeclasses.tcinstance] val impl_71:Core_models.Fmt.t_Debug t_NonZeroI32Inner type t_NonZeroI64Inner = | NonZeroI64Inner : i64 -> t_NonZeroI64Inner [@@ FStar.Tactics.Typeclasses.tcinstance] val impl_110:Core_models.Clone.t_Clone t_NonZeroI64Inner [@@ FStar.Tactics.Typeclasses.tcinstance] val impl_111:Core_models.Marker.t_Copy t_NonZeroI64Inner val e_ee_9: Prims.unit val impl_NonZeroI64Inner__new (v_val: i64) : Prims.Pure (Core_models.Option.t_Option t_NonZeroI64Inner) Prims.l_True (fun _ -> Prims.l_True) val impl_NonZeroI64Inner__new_unchecked (v_val: i64) : Prims.Pure t_NonZeroI64Inner Prims.l_True (fun _ -> Prims.l_True) val impl_NonZeroI64Inner__as_inner (self: t_NonZeroI64Inner) : Prims.Pure i64 Prims.l_True (fun _ -> Prims.l_True) [@@ FStar.Tactics.Typeclasses.tcinstance] val impl_73:Core_models.Marker.t_StructuralPartialEq t_NonZeroI64Inner [@@ FStar.Tactics.Typeclasses.tcinstance] val impl_74:Core_models.Cmp.t_PartialEq t_NonZeroI64Inner t_NonZeroI64Inner [@@ FStar.Tactics.Typeclasses.tcinstance] val impl_112:Core_models.Cmp.t_Eq t_NonZeroI64Inner [@@ FStar.Tactics.Typeclasses.tcinstance] val impl_76:Core_models.Cmp.t_PartialOrd t_NonZeroI64Inner t_NonZeroI64Inner [@@ FStar.Tactics.Typeclasses.tcinstance] val impl_75:Core_models.Cmp.t_Ord t_NonZeroI64Inner [@@ FStar.Tactics.Typeclasses.tcinstance] val impl_77:Core_models.Hash.t_Hash t_NonZeroI64Inner [@@ FStar.Tactics.Typeclasses.tcinstance] val impl_78:Core_models.Fmt.t_Debug t_NonZeroI64Inner type t_NonZeroI128Inner = | NonZeroI128Inner : i128 -> t_NonZeroI128Inner [@@ FStar.Tactics.Typeclasses.tcinstance] val impl_113:Core_models.Clone.t_Clone t_NonZeroI128Inner [@@ FStar.Tactics.Typeclasses.tcinstance] val impl_114:Core_models.Marker.t_Copy t_NonZeroI128Inner val e_ee_10: Prims.unit val impl_NonZeroI128Inner__new (v_val: i128) : Prims.Pure (Core_models.Option.t_Option t_NonZeroI128Inner) Prims.l_True (fun _ -> Prims.l_True) val impl_NonZeroI128Inner__new_unchecked (v_val: i128) : Prims.Pure t_NonZeroI128Inner Prims.l_True (fun _ -> Prims.l_True) val impl_NonZeroI128Inner__as_inner (self: t_NonZeroI128Inner) : Prims.Pure i128 Prims.l_True (fun _ -> Prims.l_True) [@@ FStar.Tactics.Typeclasses.tcinstance] val impl_80:Core_models.Marker.t_StructuralPartialEq t_NonZeroI128Inner [@@ FStar.Tactics.Typeclasses.tcinstance] val impl_81:Core_models.Cmp.t_PartialEq t_NonZeroI128Inner t_NonZeroI128Inner [@@ FStar.Tactics.Typeclasses.tcinstance] val impl_115:Core_models.Cmp.t_Eq t_NonZeroI128Inner [@@ FStar.Tactics.Typeclasses.tcinstance] val impl_83:Core_models.Cmp.t_PartialOrd t_NonZeroI128Inner t_NonZeroI128Inner [@@ FStar.Tactics.Typeclasses.tcinstance] val impl_82:Core_models.Cmp.t_Ord t_NonZeroI128Inner [@@ FStar.Tactics.Typeclasses.tcinstance] val impl_84:Core_models.Hash.t_Hash t_NonZeroI128Inner [@@ FStar.Tactics.Typeclasses.tcinstance] val impl_85:Core_models.Fmt.t_Debug t_NonZeroI128Inner type t_UsizeNoHighBit = | UsizeNoHighBit : usize -> t_UsizeNoHighBit [@@ FStar.Tactics.Typeclasses.tcinstance] val impl_137:Core_models.Clone.t_Clone t_UsizeNoHighBit [@@ FStar.Tactics.Typeclasses.tcinstance] val impl_138:Core_models.Marker.t_Copy t_UsizeNoHighBit val e_ee_11: Prims.unit val impl_UsizeNoHighBit__new (v_val: usize) : Prims.Pure (Core_models.Option.t_Option t_UsizeNoHighBit) Prims.l_True (fun _ -> Prims.l_True) val impl_UsizeNoHighBit__new_unchecked (v_val: usize) : Prims.Pure t_UsizeNoHighBit Prims.l_True (fun _ -> Prims.l_True) val impl_UsizeNoHighBit__as_inner (self: t_UsizeNoHighBit) : Prims.Pure usize Prims.l_True (fun _ -> Prims.l_True) [@@ FStar.Tactics.Typeclasses.tcinstance] val impl_117:Core_models.Marker.t_StructuralPartialEq t_UsizeNoHighBit [@@ FStar.Tactics.Typeclasses.tcinstance] val impl_118:Core_models.Cmp.t_PartialEq t_UsizeNoHighBit t_UsizeNoHighBit [@@ FStar.Tactics.Typeclasses.tcinstance] val impl_139:Core_models.Cmp.t_Eq t_UsizeNoHighBit [@@ FStar.Tactics.Typeclasses.tcinstance] val impl_120:Core_models.Cmp.t_PartialOrd t_UsizeNoHighBit t_UsizeNoHighBit [@@ FStar.Tactics.Typeclasses.tcinstance] val impl_119:Core_models.Cmp.t_Ord t_UsizeNoHighBit [@@ FStar.Tactics.Typeclasses.tcinstance] val impl_121:Core_models.Hash.t_Hash t_UsizeNoHighBit [@@ FStar.Tactics.Typeclasses.tcinstance] val impl_122:Core_models.Fmt.t_Debug t_UsizeNoHighBit type t_NonZeroUsizeInner = | NonZeroUsizeInner : usize -> t_NonZeroUsizeInner [@@ FStar.Tactics.Typeclasses.tcinstance] val impl_140:Core_models.Clone.t_Clone t_NonZeroUsizeInner [@@ FStar.Tactics.Typeclasses.tcinstance] val impl_141:Core_models.Marker.t_Copy t_NonZeroUsizeInner val e_ee_12: Prims.unit val impl_NonZeroUsizeInner__new (v_val: usize) : Prims.Pure (Core_models.Option.t_Option t_NonZeroUsizeInner) Prims.l_True (fun _ -> Prims.l_True) val impl_NonZeroUsizeInner__new_unchecked (v_val: usize) : Prims.Pure t_NonZeroUsizeInner Prims.l_True (fun _ -> Prims.l_True) val impl_NonZeroUsizeInner__as_inner (self: t_NonZeroUsizeInner) : Prims.Pure usize Prims.l_True (fun _ -> Prims.l_True) [@@ FStar.Tactics.Typeclasses.tcinstance] val impl_124:Core_models.Marker.t_StructuralPartialEq t_NonZeroUsizeInner [@@ FStar.Tactics.Typeclasses.tcinstance] val impl_125:Core_models.Cmp.t_PartialEq t_NonZeroUsizeInner t_NonZeroUsizeInner [@@ FStar.Tactics.Typeclasses.tcinstance] val impl_142:Core_models.Cmp.t_Eq t_NonZeroUsizeInner [@@ FStar.Tactics.Typeclasses.tcinstance] val impl_127:Core_models.Cmp.t_PartialOrd t_NonZeroUsizeInner t_NonZeroUsizeInner [@@ FStar.Tactics.Typeclasses.tcinstance] val impl_126:Core_models.Cmp.t_Ord t_NonZeroUsizeInner [@@ FStar.Tactics.Typeclasses.tcinstance] val impl_128:Core_models.Hash.t_Hash t_NonZeroUsizeInner [@@ FStar.Tactics.Typeclasses.tcinstance] val impl_129:Core_models.Fmt.t_Debug t_NonZeroUsizeInner type t_NonZeroIsizeInner = | NonZeroIsizeInner : isize -> t_NonZeroIsizeInner [@@ FStar.Tactics.Typeclasses.tcinstance] val impl_143:Core_models.Clone.t_Clone t_NonZeroIsizeInner [@@ FStar.Tactics.Typeclasses.tcinstance] val impl_144:Core_models.Marker.t_Copy t_NonZeroIsizeInner val e_ee_13: Prims.unit val impl_NonZeroIsizeInner__new (v_val: isize) : Prims.Pure (Core_models.Option.t_Option t_NonZeroIsizeInner) Prims.l_True (fun _ -> Prims.l_True) val impl_NonZeroIsizeInner__new_unchecked (v_val: isize) : Prims.Pure t_NonZeroIsizeInner Prims.l_True (fun _ -> Prims.l_True) val impl_NonZeroIsizeInner__as_inner (self: t_NonZeroIsizeInner) : Prims.Pure isize Prims.l_True (fun _ -> Prims.l_True) [@@ FStar.Tactics.Typeclasses.tcinstance] val impl_131:Core_models.Marker.t_StructuralPartialEq t_NonZeroIsizeInner [@@ FStar.Tactics.Typeclasses.tcinstance] val impl_132:Core_models.Cmp.t_PartialEq t_NonZeroIsizeInner t_NonZeroIsizeInner [@@ FStar.Tactics.Typeclasses.tcinstance] val impl_145:Core_models.Cmp.t_Eq t_NonZeroIsizeInner [@@ FStar.Tactics.Typeclasses.tcinstance] val impl_134:Core_models.Cmp.t_PartialOrd t_NonZeroIsizeInner t_NonZeroIsizeInner [@@ FStar.Tactics.Typeclasses.tcinstance] val impl_133:Core_models.Cmp.t_Ord t_NonZeroIsizeInner [@@ FStar.Tactics.Typeclasses.tcinstance] val impl_135:Core_models.Hash.t_Hash t_NonZeroIsizeInner [@@ FStar.Tactics.Typeclasses.tcinstance] val impl_136:Core_models.Fmt.t_Debug t_NonZeroIsizeInner type t_U32NotAllOnes = | U32NotAllOnes : u32 -> t_U32NotAllOnes [@@ FStar.Tactics.Typeclasses.tcinstance] val impl_174:Core_models.Clone.t_Clone t_U32NotAllOnes [@@ FStar.Tactics.Typeclasses.tcinstance] val impl_175:Core_models.Marker.t_Copy t_U32NotAllOnes val e_ee_14: Prims.unit val impl_U32NotAllOnes__new (v_val: u32) : Prims.Pure (Core_models.Option.t_Option t_U32NotAllOnes) Prims.l_True (fun _ -> Prims.l_True) val impl_U32NotAllOnes__new_unchecked (v_val: u32) : Prims.Pure t_U32NotAllOnes Prims.l_True (fun _ -> Prims.l_True) val impl_U32NotAllOnes__as_inner (self: t_U32NotAllOnes) : Prims.Pure u32 Prims.l_True (fun _ -> Prims.l_True) [@@ FStar.Tactics.Typeclasses.tcinstance] val impl_147:Core_models.Marker.t_StructuralPartialEq t_U32NotAllOnes [@@ FStar.Tactics.Typeclasses.tcinstance] val impl_148:Core_models.Cmp.t_PartialEq t_U32NotAllOnes t_U32NotAllOnes [@@ FStar.Tactics.Typeclasses.tcinstance] val impl_176:Core_models.Cmp.t_Eq t_U32NotAllOnes [@@ FStar.Tactics.Typeclasses.tcinstance] val impl_150:Core_models.Cmp.t_PartialOrd t_U32NotAllOnes t_U32NotAllOnes [@@ FStar.Tactics.Typeclasses.tcinstance] val impl_149:Core_models.Cmp.t_Ord t_U32NotAllOnes [@@ FStar.Tactics.Typeclasses.tcinstance] val impl_151:Core_models.Hash.t_Hash t_U32NotAllOnes [@@ FStar.Tactics.Typeclasses.tcinstance] val impl_152:Core_models.Fmt.t_Debug t_U32NotAllOnes type t_I32NotAllOnes = | I32NotAllOnes : i32 -> t_I32NotAllOnes [@@ FStar.Tactics.Typeclasses.tcinstance] val impl_177:Core_models.Clone.t_Clone t_I32NotAllOnes [@@ FStar.Tactics.Typeclasses.tcinstance] val impl_178:Core_models.Marker.t_Copy t_I32NotAllOnes val e_ee_15: Prims.unit val impl_I32NotAllOnes__new (v_val: i32) : Prims.Pure (Core_models.Option.t_Option t_I32NotAllOnes) Prims.l_True (fun _ -> Prims.l_True) val impl_I32NotAllOnes__new_unchecked (v_val: i32) : Prims.Pure t_I32NotAllOnes Prims.l_True (fun _ -> Prims.l_True) val impl_I32NotAllOnes__as_inner (self: t_I32NotAllOnes) : Prims.Pure i32 Prims.l_True (fun _ -> Prims.l_True) [@@ FStar.Tactics.Typeclasses.tcinstance] val impl_154:Core_models.Marker.t_StructuralPartialEq t_I32NotAllOnes [@@ FStar.Tactics.Typeclasses.tcinstance] val impl_155:Core_models.Cmp.t_PartialEq t_I32NotAllOnes t_I32NotAllOnes [@@ FStar.Tactics.Typeclasses.tcinstance] val impl_179:Core_models.Cmp.t_Eq t_I32NotAllOnes [@@ FStar.Tactics.Typeclasses.tcinstance] val impl_157:Core_models.Cmp.t_PartialOrd t_I32NotAllOnes t_I32NotAllOnes [@@ FStar.Tactics.Typeclasses.tcinstance] val impl_156:Core_models.Cmp.t_Ord t_I32NotAllOnes [@@ FStar.Tactics.Typeclasses.tcinstance] val impl_158:Core_models.Hash.t_Hash t_I32NotAllOnes [@@ FStar.Tactics.Typeclasses.tcinstance] val impl_159:Core_models.Fmt.t_Debug t_I32NotAllOnes type t_U64NotAllOnes = | U64NotAllOnes : u64 -> t_U64NotAllOnes [@@ FStar.Tactics.Typeclasses.tcinstance] val impl_180:Core_models.Clone.t_Clone t_U64NotAllOnes [@@ FStar.Tactics.Typeclasses.tcinstance] val impl_181:Core_models.Marker.t_Copy t_U64NotAllOnes val e_ee_16: Prims.unit val impl_U64NotAllOnes__new (v_val: u64) : Prims.Pure (Core_models.Option.t_Option t_U64NotAllOnes) Prims.l_True (fun _ -> Prims.l_True) val impl_U64NotAllOnes__new_unchecked (v_val: u64) : Prims.Pure t_U64NotAllOnes Prims.l_True (fun _ -> Prims.l_True) val impl_U64NotAllOnes__as_inner (self: t_U64NotAllOnes) : Prims.Pure u64 Prims.l_True (fun _ -> Prims.l_True) [@@ FStar.Tactics.Typeclasses.tcinstance] val impl_161:Core_models.Marker.t_StructuralPartialEq t_U64NotAllOnes [@@ FStar.Tactics.Typeclasses.tcinstance] val impl_162:Core_models.Cmp.t_PartialEq t_U64NotAllOnes t_U64NotAllOnes [@@ FStar.Tactics.Typeclasses.tcinstance] val impl_182:Core_models.Cmp.t_Eq t_U64NotAllOnes [@@ FStar.Tactics.Typeclasses.tcinstance] val impl_164:Core_models.Cmp.t_PartialOrd t_U64NotAllOnes t_U64NotAllOnes [@@ FStar.Tactics.Typeclasses.tcinstance] val impl_163:Core_models.Cmp.t_Ord t_U64NotAllOnes [@@ FStar.Tactics.Typeclasses.tcinstance] val impl_165:Core_models.Hash.t_Hash t_U64NotAllOnes [@@ FStar.Tactics.Typeclasses.tcinstance] val impl_166:Core_models.Fmt.t_Debug t_U64NotAllOnes type t_I64NotAllOnes = | I64NotAllOnes : i64 -> t_I64NotAllOnes [@@ FStar.Tactics.Typeclasses.tcinstance] val impl_183:Core_models.Clone.t_Clone t_I64NotAllOnes [@@ FStar.Tactics.Typeclasses.tcinstance] val impl_184:Core_models.Marker.t_Copy t_I64NotAllOnes val e_ee_17: Prims.unit val impl_I64NotAllOnes__new (v_val: i64) : Prims.Pure (Core_models.Option.t_Option t_I64NotAllOnes) Prims.l_True (fun _ -> Prims.l_True) val impl_I64NotAllOnes__new_unchecked (v_val: i64) : Prims.Pure t_I64NotAllOnes Prims.l_True (fun _ -> Prims.l_True) val impl_I64NotAllOnes__as_inner (self: t_I64NotAllOnes) : Prims.Pure i64 Prims.l_True (fun _ -> Prims.l_True) [@@ FStar.Tactics.Typeclasses.tcinstance] val impl_168:Core_models.Marker.t_StructuralPartialEq t_I64NotAllOnes [@@ FStar.Tactics.Typeclasses.tcinstance] val impl_169:Core_models.Cmp.t_PartialEq t_I64NotAllOnes t_I64NotAllOnes [@@ FStar.Tactics.Typeclasses.tcinstance] val impl_185:Core_models.Cmp.t_Eq t_I64NotAllOnes [@@ FStar.Tactics.Typeclasses.tcinstance] val impl_171:Core_models.Cmp.t_PartialOrd t_I64NotAllOnes t_I64NotAllOnes [@@ FStar.Tactics.Typeclasses.tcinstance] val impl_170:Core_models.Cmp.t_Ord t_I64NotAllOnes [@@ FStar.Tactics.Typeclasses.tcinstance] val impl_172:Core_models.Hash.t_Hash t_I64NotAllOnes [@@ FStar.Tactics.Typeclasses.tcinstance] val impl_173:Core_models.Fmt.t_Debug t_I64NotAllOnes class t_NotAllOnesHelper (v_Self: Type0) = { f_Type:Type0; f_Type_659097508213326199:Core_models.Marker.t_Sized f_Type } [@@ FStar.Tactics.Typeclasses.tcinstance] let impl_NotAllOnesHelper_for_u32: t_NotAllOnesHelper u32 = { f_Type = t_U32NotAllOnes; f_Type_659097508213326199 = FStar.Tactics.Typeclasses.solve } [@@ FStar.Tactics.Typeclasses.tcinstance] let impl_NotAllOnesHelper_for_i32: t_NotAllOnesHelper i32 = { f_Type = t_I32NotAllOnes; f_Type_659097508213326199 = FStar.Tactics.Typeclasses.solve } [@@ FStar.Tactics.Typeclasses.tcinstance] let impl_NotAllOnesHelper_for_u64: t_NotAllOnesHelper u64 = { f_Type = t_U64NotAllOnes; f_Type_659097508213326199 = FStar.Tactics.Typeclasses.solve } [@@ FStar.Tactics.Typeclasses.tcinstance] let impl_NotAllOnesHelper_for_i64: t_NotAllOnesHelper i64 = { f_Type = t_I64NotAllOnes; f_Type_659097508213326199 = FStar.Tactics.Typeclasses.solve } ================================================ FILE: hax-lib/proof-libs/fstar/core/Core_models.Num.fst ================================================ module Core_models.Num #set-options "--fuel 0 --ifuel 1 --z3rlimit 15" open FStar.Mul open Rust_primitives let impl_u8__MIN: u8 = mk_u8 0 let impl_u8__MAX: u8 = mk_u8 255 let impl_u8__BITS: u32 = mk_u32 8 let impl_u8__wrapping_add (x y: u8) : u8 = Rust_primitives.Arithmetic.wrapping_add_u8 x y let impl_u8__saturating_add (x y: u8) : u8 = Rust_primitives.Arithmetic.saturating_add_u8 x y let impl_u8__overflowing_add (x y: u8) : (u8 & bool) = Rust_primitives.Arithmetic.overflowing_add_u8 x y let impl_u8__checked_add (x y: u8) : Core_models.Option.t_Option u8 = if (Rust_primitives.Hax.Int.from_machine impl_u8__MIN <: Hax_lib.Int.t_Int) <= ((Rust_primitives.Hax.Int.from_machine x <: Hax_lib.Int.t_Int) + (Rust_primitives.Hax.Int.from_machine y <: Hax_lib.Int.t_Int) <: Hax_lib.Int.t_Int) && ((Rust_primitives.Hax.Int.from_machine x <: Hax_lib.Int.t_Int) + (Rust_primitives.Hax.Int.from_machine y <: Hax_lib.Int.t_Int) <: Hax_lib.Int.t_Int) <= (Rust_primitives.Hax.Int.from_machine impl_u8__MAX <: Hax_lib.Int.t_Int) then Core_models.Option.Option_Some (x +! y) <: Core_models.Option.t_Option u8 else Core_models.Option.Option_None <: Core_models.Option.t_Option u8 let impl_u8__wrapping_sub (x y: u8) : u8 = Rust_primitives.Arithmetic.wrapping_sub_u8 x y let impl_u8__saturating_sub (x y: u8) : u8 = Rust_primitives.Arithmetic.saturating_sub_u8 x y let impl_u8__overflowing_sub (x y: u8) : (u8 & bool) = Rust_primitives.Arithmetic.overflowing_sub_u8 x y let impl_u8__checked_sub (x y: u8) : Core_models.Option.t_Option u8 = if (Rust_primitives.Hax.Int.from_machine impl_u8__MIN <: Hax_lib.Int.t_Int) <= ((Rust_primitives.Hax.Int.from_machine x <: Hax_lib.Int.t_Int) - (Rust_primitives.Hax.Int.from_machine y <: Hax_lib.Int.t_Int) <: Hax_lib.Int.t_Int) && ((Rust_primitives.Hax.Int.from_machine x <: Hax_lib.Int.t_Int) - (Rust_primitives.Hax.Int.from_machine y <: Hax_lib.Int.t_Int) <: Hax_lib.Int.t_Int) <= (Rust_primitives.Hax.Int.from_machine impl_u8__MAX <: Hax_lib.Int.t_Int) then Core_models.Option.Option_Some (x -! y) <: Core_models.Option.t_Option u8 else Core_models.Option.Option_None <: Core_models.Option.t_Option u8 let impl_u8__wrapping_mul (x y: u8) : u8 = Rust_primitives.Arithmetic.wrapping_mul_u8 x y let impl_u8__saturating_mul (x y: u8) : u8 = Rust_primitives.Arithmetic.saturating_mul_u8 x y let impl_u8__overflowing_mul (x y: u8) : (u8 & bool) = Rust_primitives.Arithmetic.overflowing_mul_u8 x y let impl_u8__checked_mul (x y: u8) : Core_models.Option.t_Option u8 = if (Rust_primitives.Hax.Int.from_machine impl_u8__MIN <: Hax_lib.Int.t_Int) <= ((Rust_primitives.Hax.Int.from_machine x <: Hax_lib.Int.t_Int) * (Rust_primitives.Hax.Int.from_machine y <: Hax_lib.Int.t_Int) <: Hax_lib.Int.t_Int) && ((Rust_primitives.Hax.Int.from_machine x <: Hax_lib.Int.t_Int) * (Rust_primitives.Hax.Int.from_machine y <: Hax_lib.Int.t_Int) <: Hax_lib.Int.t_Int) <= (Rust_primitives.Hax.Int.from_machine impl_u8__MAX <: Hax_lib.Int.t_Int) then Core_models.Option.Option_Some (x *! y) <: Core_models.Option.t_Option u8 else Core_models.Option.Option_None <: Core_models.Option.t_Option u8 let impl_u8__pow (x: u8) (exp: u32) : u8 = Rust_primitives.Arithmetic.pow_u8 x exp let impl_u8__count_ones (x: u8) : u32 = Rust_primitives.Arithmetic.count_ones_u8 x assume val impl_u8__rotate_right': x: u8 -> n: u32 -> u8 unfold let impl_u8__rotate_right = impl_u8__rotate_right' assume val impl_u8__rotate_left': x: u8 -> n: u32 -> u8 unfold let impl_u8__rotate_left = impl_u8__rotate_left' assume val impl_u8__leading_zeros': x: u8 -> u32 unfold let impl_u8__leading_zeros = impl_u8__leading_zeros' assume val impl_u8__ilog2': x: u8 -> u32 unfold let impl_u8__ilog2 = impl_u8__ilog2' assume val impl_u8__from_str_radix': src: string -> radix: u32 -> Core_models.Result.t_Result u8 Core_models.Num.Error.t_ParseIntError unfold let impl_u8__from_str_radix = impl_u8__from_str_radix' assume val impl_u8__from_be_bytes': bytes: t_Array u8 (mk_usize 1) -> u8 unfold let impl_u8__from_be_bytes = impl_u8__from_be_bytes' assume val impl_u8__from_le_bytes': bytes: t_Array u8 (mk_usize 1) -> u8 unfold let impl_u8__from_le_bytes = impl_u8__from_le_bytes' assume val impl_u8__to_be_bytes': bytes: u8 -> t_Array u8 (mk_usize 1) unfold let impl_u8__to_be_bytes = impl_u8__to_be_bytes' assume val impl_u8__to_le_bytes': bytes: u8 -> t_Array u8 (mk_usize 1) unfold let impl_u8__to_le_bytes = impl_u8__to_le_bytes' let impl_u8__rem_euclid (x y: u8) : Prims.Pure u8 (requires y <>. mk_u8 0) (fun _ -> Prims.l_True) = Rust_primitives.Arithmetic.rem_euclid_u8 x y let impl_u16__MIN: u16 = mk_u16 0 let impl_u16__MAX: u16 = mk_u16 65535 let impl_u16__BITS: u32 = mk_u32 16 let impl_u16__wrapping_add (x y: u16) : u16 = Rust_primitives.Arithmetic.wrapping_add_u16 x y let impl_u16__saturating_add (x y: u16) : u16 = Rust_primitives.Arithmetic.saturating_add_u16 x y let impl_u16__overflowing_add (x y: u16) : (u16 & bool) = Rust_primitives.Arithmetic.overflowing_add_u16 x y let impl_u16__checked_add (x y: u16) : Core_models.Option.t_Option u16 = if (Rust_primitives.Hax.Int.from_machine impl_u16__MIN <: Hax_lib.Int.t_Int) <= ((Rust_primitives.Hax.Int.from_machine x <: Hax_lib.Int.t_Int) + (Rust_primitives.Hax.Int.from_machine y <: Hax_lib.Int.t_Int) <: Hax_lib.Int.t_Int) && ((Rust_primitives.Hax.Int.from_machine x <: Hax_lib.Int.t_Int) + (Rust_primitives.Hax.Int.from_machine y <: Hax_lib.Int.t_Int) <: Hax_lib.Int.t_Int) <= (Rust_primitives.Hax.Int.from_machine impl_u16__MAX <: Hax_lib.Int.t_Int) then Core_models.Option.Option_Some (x +! y) <: Core_models.Option.t_Option u16 else Core_models.Option.Option_None <: Core_models.Option.t_Option u16 let impl_u16__wrapping_sub (x y: u16) : u16 = Rust_primitives.Arithmetic.wrapping_sub_u16 x y let impl_u16__saturating_sub (x y: u16) : u16 = Rust_primitives.Arithmetic.saturating_sub_u16 x y let impl_u16__overflowing_sub (x y: u16) : (u16 & bool) = Rust_primitives.Arithmetic.overflowing_sub_u16 x y let impl_u16__checked_sub (x y: u16) : Core_models.Option.t_Option u16 = if (Rust_primitives.Hax.Int.from_machine impl_u16__MIN <: Hax_lib.Int.t_Int) <= ((Rust_primitives.Hax.Int.from_machine x <: Hax_lib.Int.t_Int) - (Rust_primitives.Hax.Int.from_machine y <: Hax_lib.Int.t_Int) <: Hax_lib.Int.t_Int) && ((Rust_primitives.Hax.Int.from_machine x <: Hax_lib.Int.t_Int) - (Rust_primitives.Hax.Int.from_machine y <: Hax_lib.Int.t_Int) <: Hax_lib.Int.t_Int) <= (Rust_primitives.Hax.Int.from_machine impl_u16__MAX <: Hax_lib.Int.t_Int) then Core_models.Option.Option_Some (x -! y) <: Core_models.Option.t_Option u16 else Core_models.Option.Option_None <: Core_models.Option.t_Option u16 let impl_u16__wrapping_mul (x y: u16) : u16 = Rust_primitives.Arithmetic.wrapping_mul_u16 x y let impl_u16__saturating_mul (x y: u16) : u16 = Rust_primitives.Arithmetic.saturating_mul_u16 x y let impl_u16__overflowing_mul (x y: u16) : (u16 & bool) = Rust_primitives.Arithmetic.overflowing_mul_u16 x y let impl_u16__checked_mul (x y: u16) : Core_models.Option.t_Option u16 = if (Rust_primitives.Hax.Int.from_machine impl_u16__MIN <: Hax_lib.Int.t_Int) <= ((Rust_primitives.Hax.Int.from_machine x <: Hax_lib.Int.t_Int) * (Rust_primitives.Hax.Int.from_machine y <: Hax_lib.Int.t_Int) <: Hax_lib.Int.t_Int) && ((Rust_primitives.Hax.Int.from_machine x <: Hax_lib.Int.t_Int) * (Rust_primitives.Hax.Int.from_machine y <: Hax_lib.Int.t_Int) <: Hax_lib.Int.t_Int) <= (Rust_primitives.Hax.Int.from_machine impl_u16__MAX <: Hax_lib.Int.t_Int) then Core_models.Option.Option_Some (x *! y) <: Core_models.Option.t_Option u16 else Core_models.Option.Option_None <: Core_models.Option.t_Option u16 let impl_u16__pow (x: u16) (exp: u32) : u16 = Rust_primitives.Arithmetic.pow_u16 x exp let impl_u16__count_ones (x: u16) : u32 = Rust_primitives.Arithmetic.count_ones_u16 x assume val impl_u16__rotate_right': x: u16 -> n: u32 -> u16 unfold let impl_u16__rotate_right = impl_u16__rotate_right' assume val impl_u16__rotate_left': x: u16 -> n: u32 -> u16 unfold let impl_u16__rotate_left = impl_u16__rotate_left' assume val impl_u16__leading_zeros': x: u16 -> u32 unfold let impl_u16__leading_zeros = impl_u16__leading_zeros' assume val impl_u16__ilog2': x: u16 -> u32 unfold let impl_u16__ilog2 = impl_u16__ilog2' assume val impl_u16__from_str_radix': src: string -> radix: u32 -> Core_models.Result.t_Result u16 Core_models.Num.Error.t_ParseIntError unfold let impl_u16__from_str_radix = impl_u16__from_str_radix' assume val impl_u16__from_be_bytes': bytes: t_Array u8 (mk_usize 2) -> u16 unfold let impl_u16__from_be_bytes = impl_u16__from_be_bytes' assume val impl_u16__from_le_bytes': bytes: t_Array u8 (mk_usize 2) -> u16 unfold let impl_u16__from_le_bytes = impl_u16__from_le_bytes' assume val impl_u16__to_be_bytes': bytes: u16 -> t_Array u8 (mk_usize 2) unfold let impl_u16__to_be_bytes = impl_u16__to_be_bytes' assume val impl_u16__to_le_bytes': bytes: u16 -> t_Array u8 (mk_usize 2) unfold let impl_u16__to_le_bytes = impl_u16__to_le_bytes' let impl_u16__rem_euclid (x y: u16) : Prims.Pure u16 (requires y <>. mk_u16 0) (fun _ -> Prims.l_True) = Rust_primitives.Arithmetic.rem_euclid_u16 x y let impl_u32__MIN: u32 = mk_u32 0 let impl_u32__MAX: u32 = mk_u32 4294967295 let impl_u32__BITS: u32 = mk_u32 32 let impl_u32__wrapping_add (x y: u32) : u32 = Rust_primitives.Arithmetic.wrapping_add_u32 x y let impl_u32__saturating_add (x y: u32) : u32 = Rust_primitives.Arithmetic.saturating_add_u32 x y let impl_u32__overflowing_add (x y: u32) : (u32 & bool) = Rust_primitives.Arithmetic.overflowing_add_u32 x y let impl_u32__checked_add (x y: u32) : Core_models.Option.t_Option u32 = if (Rust_primitives.Hax.Int.from_machine impl_u32__MIN <: Hax_lib.Int.t_Int) <= ((Rust_primitives.Hax.Int.from_machine x <: Hax_lib.Int.t_Int) + (Rust_primitives.Hax.Int.from_machine y <: Hax_lib.Int.t_Int) <: Hax_lib.Int.t_Int) && ((Rust_primitives.Hax.Int.from_machine x <: Hax_lib.Int.t_Int) + (Rust_primitives.Hax.Int.from_machine y <: Hax_lib.Int.t_Int) <: Hax_lib.Int.t_Int) <= (Rust_primitives.Hax.Int.from_machine impl_u32__MAX <: Hax_lib.Int.t_Int) then Core_models.Option.Option_Some (x +! y) <: Core_models.Option.t_Option u32 else Core_models.Option.Option_None <: Core_models.Option.t_Option u32 let impl_u32__wrapping_sub (x y: u32) : u32 = Rust_primitives.Arithmetic.wrapping_sub_u32 x y let impl_u32__saturating_sub (x y: u32) : u32 = Rust_primitives.Arithmetic.saturating_sub_u32 x y let impl_u32__overflowing_sub (x y: u32) : (u32 & bool) = Rust_primitives.Arithmetic.overflowing_sub_u32 x y let impl_u32__checked_sub (x y: u32) : Core_models.Option.t_Option u32 = if (Rust_primitives.Hax.Int.from_machine impl_u32__MIN <: Hax_lib.Int.t_Int) <= ((Rust_primitives.Hax.Int.from_machine x <: Hax_lib.Int.t_Int) - (Rust_primitives.Hax.Int.from_machine y <: Hax_lib.Int.t_Int) <: Hax_lib.Int.t_Int) && ((Rust_primitives.Hax.Int.from_machine x <: Hax_lib.Int.t_Int) - (Rust_primitives.Hax.Int.from_machine y <: Hax_lib.Int.t_Int) <: Hax_lib.Int.t_Int) <= (Rust_primitives.Hax.Int.from_machine impl_u32__MAX <: Hax_lib.Int.t_Int) then Core_models.Option.Option_Some (x -! y) <: Core_models.Option.t_Option u32 else Core_models.Option.Option_None <: Core_models.Option.t_Option u32 let impl_u32__wrapping_mul (x y: u32) : u32 = Rust_primitives.Arithmetic.wrapping_mul_u32 x y let impl_u32__saturating_mul (x y: u32) : u32 = Rust_primitives.Arithmetic.saturating_mul_u32 x y let impl_u32__overflowing_mul (x y: u32) : (u32 & bool) = Rust_primitives.Arithmetic.overflowing_mul_u32 x y let impl_u32__checked_mul (x y: u32) : Core_models.Option.t_Option u32 = if (Rust_primitives.Hax.Int.from_machine impl_u32__MIN <: Hax_lib.Int.t_Int) <= ((Rust_primitives.Hax.Int.from_machine x <: Hax_lib.Int.t_Int) * (Rust_primitives.Hax.Int.from_machine y <: Hax_lib.Int.t_Int) <: Hax_lib.Int.t_Int) && ((Rust_primitives.Hax.Int.from_machine x <: Hax_lib.Int.t_Int) * (Rust_primitives.Hax.Int.from_machine y <: Hax_lib.Int.t_Int) <: Hax_lib.Int.t_Int) <= (Rust_primitives.Hax.Int.from_machine impl_u32__MAX <: Hax_lib.Int.t_Int) then Core_models.Option.Option_Some (x *! y) <: Core_models.Option.t_Option u32 else Core_models.Option.Option_None <: Core_models.Option.t_Option u32 let impl_u32__pow (x exp: u32) : u32 = Rust_primitives.Arithmetic.pow_u32 x exp let impl_u32__count_ones (x: u32) : u32 = Rust_primitives.Arithmetic.count_ones_u32 x assume val impl_u32__rotate_right': x: u32 -> n: u32 -> u32 unfold let impl_u32__rotate_right = impl_u32__rotate_right' assume val impl_u32__rotate_left': x: u32 -> n: u32 -> u32 unfold let impl_u32__rotate_left = impl_u32__rotate_left' assume val impl_u32__leading_zeros': x: u32 -> u32 unfold let impl_u32__leading_zeros = impl_u32__leading_zeros' assume val impl_u32__ilog2': x: u32 -> u32 unfold let impl_u32__ilog2 = impl_u32__ilog2' assume val impl_u32__from_str_radix': src: string -> radix: u32 -> Core_models.Result.t_Result u32 Core_models.Num.Error.t_ParseIntError unfold let impl_u32__from_str_radix = impl_u32__from_str_radix' assume val impl_u32__from_be_bytes': bytes: t_Array u8 (mk_usize 4) -> u32 unfold let impl_u32__from_be_bytes = impl_u32__from_be_bytes' assume val impl_u32__from_le_bytes': bytes: t_Array u8 (mk_usize 4) -> u32 unfold let impl_u32__from_le_bytes = impl_u32__from_le_bytes' assume val impl_u32__to_be_bytes': bytes: u32 -> t_Array u8 (mk_usize 4) unfold let impl_u32__to_be_bytes = impl_u32__to_be_bytes' assume val impl_u32__to_le_bytes': bytes: u32 -> t_Array u8 (mk_usize 4) unfold let impl_u32__to_le_bytes = impl_u32__to_le_bytes' let impl_u32__rem_euclid (x y: u32) : Prims.Pure u32 (requires y <>. mk_u32 0) (fun _ -> Prims.l_True) = Rust_primitives.Arithmetic.rem_euclid_u32 x y let impl_u64__MIN: u64 = mk_u64 0 let impl_u64__MAX: u64 = mk_u64 18446744073709551615 let impl_u64__BITS: u32 = mk_u32 64 let impl_u64__wrapping_add (x y: u64) : u64 = Rust_primitives.Arithmetic.wrapping_add_u64 x y let impl_u64__saturating_add (x y: u64) : u64 = Rust_primitives.Arithmetic.saturating_add_u64 x y let impl_u64__overflowing_add (x y: u64) : (u64 & bool) = Rust_primitives.Arithmetic.overflowing_add_u64 x y let impl_u64__checked_add (x y: u64) : Core_models.Option.t_Option u64 = if (Rust_primitives.Hax.Int.from_machine impl_u64__MIN <: Hax_lib.Int.t_Int) <= ((Rust_primitives.Hax.Int.from_machine x <: Hax_lib.Int.t_Int) + (Rust_primitives.Hax.Int.from_machine y <: Hax_lib.Int.t_Int) <: Hax_lib.Int.t_Int) && ((Rust_primitives.Hax.Int.from_machine x <: Hax_lib.Int.t_Int) + (Rust_primitives.Hax.Int.from_machine y <: Hax_lib.Int.t_Int) <: Hax_lib.Int.t_Int) <= (Rust_primitives.Hax.Int.from_machine impl_u64__MAX <: Hax_lib.Int.t_Int) then Core_models.Option.Option_Some (x +! y) <: Core_models.Option.t_Option u64 else Core_models.Option.Option_None <: Core_models.Option.t_Option u64 let impl_u64__wrapping_sub (x y: u64) : u64 = Rust_primitives.Arithmetic.wrapping_sub_u64 x y let impl_u64__saturating_sub (x y: u64) : u64 = Rust_primitives.Arithmetic.saturating_sub_u64 x y let impl_u64__overflowing_sub (x y: u64) : (u64 & bool) = Rust_primitives.Arithmetic.overflowing_sub_u64 x y let impl_u64__checked_sub (x y: u64) : Core_models.Option.t_Option u64 = if (Rust_primitives.Hax.Int.from_machine impl_u64__MIN <: Hax_lib.Int.t_Int) <= ((Rust_primitives.Hax.Int.from_machine x <: Hax_lib.Int.t_Int) - (Rust_primitives.Hax.Int.from_machine y <: Hax_lib.Int.t_Int) <: Hax_lib.Int.t_Int) && ((Rust_primitives.Hax.Int.from_machine x <: Hax_lib.Int.t_Int) - (Rust_primitives.Hax.Int.from_machine y <: Hax_lib.Int.t_Int) <: Hax_lib.Int.t_Int) <= (Rust_primitives.Hax.Int.from_machine impl_u64__MAX <: Hax_lib.Int.t_Int) then Core_models.Option.Option_Some (x -! y) <: Core_models.Option.t_Option u64 else Core_models.Option.Option_None <: Core_models.Option.t_Option u64 let impl_u64__wrapping_mul (x y: u64) : u64 = Rust_primitives.Arithmetic.wrapping_mul_u64 x y let impl_u64__saturating_mul (x y: u64) : u64 = Rust_primitives.Arithmetic.saturating_mul_u64 x y let impl_u64__overflowing_mul (x y: u64) : (u64 & bool) = Rust_primitives.Arithmetic.overflowing_mul_u64 x y let impl_u64__checked_mul (x y: u64) : Core_models.Option.t_Option u64 = if (Rust_primitives.Hax.Int.from_machine impl_u64__MIN <: Hax_lib.Int.t_Int) <= ((Rust_primitives.Hax.Int.from_machine x <: Hax_lib.Int.t_Int) * (Rust_primitives.Hax.Int.from_machine y <: Hax_lib.Int.t_Int) <: Hax_lib.Int.t_Int) && ((Rust_primitives.Hax.Int.from_machine x <: Hax_lib.Int.t_Int) * (Rust_primitives.Hax.Int.from_machine y <: Hax_lib.Int.t_Int) <: Hax_lib.Int.t_Int) <= (Rust_primitives.Hax.Int.from_machine impl_u64__MAX <: Hax_lib.Int.t_Int) then Core_models.Option.Option_Some (x *! y) <: Core_models.Option.t_Option u64 else Core_models.Option.Option_None <: Core_models.Option.t_Option u64 let impl_u64__pow (x: u64) (exp: u32) : u64 = Rust_primitives.Arithmetic.pow_u64 x exp let impl_u64__count_ones (x: u64) : u32 = Rust_primitives.Arithmetic.count_ones_u64 x assume val impl_u64__rotate_right': x: u64 -> n: u32 -> u64 unfold let impl_u64__rotate_right = impl_u64__rotate_right' assume val impl_u64__rotate_left': x: u64 -> n: u32 -> u64 unfold let impl_u64__rotate_left = impl_u64__rotate_left' assume val impl_u64__leading_zeros': x: u64 -> u32 unfold let impl_u64__leading_zeros = impl_u64__leading_zeros' assume val impl_u64__ilog2': x: u64 -> u32 unfold let impl_u64__ilog2 = impl_u64__ilog2' assume val impl_u64__from_str_radix': src: string -> radix: u32 -> Core_models.Result.t_Result u64 Core_models.Num.Error.t_ParseIntError unfold let impl_u64__from_str_radix = impl_u64__from_str_radix' assume val impl_u64__from_be_bytes': bytes: t_Array u8 (mk_usize 8) -> u64 unfold let impl_u64__from_be_bytes = impl_u64__from_be_bytes' assume val impl_u64__from_le_bytes': bytes: t_Array u8 (mk_usize 8) -> u64 unfold let impl_u64__from_le_bytes = impl_u64__from_le_bytes' assume val impl_u64__to_be_bytes': bytes: u64 -> t_Array u8 (mk_usize 8) unfold let impl_u64__to_be_bytes = impl_u64__to_be_bytes' assume val impl_u64__to_le_bytes': bytes: u64 -> t_Array u8 (mk_usize 8) unfold let impl_u64__to_le_bytes = impl_u64__to_le_bytes' let impl_u64__rem_euclid (x y: u64) : Prims.Pure u64 (requires y <>. mk_u64 0) (fun _ -> Prims.l_True) = Rust_primitives.Arithmetic.rem_euclid_u64 x y let impl_u128__MIN: u128 = mk_u128 0 let impl_u128__MAX: u128 = mk_u128 340282366920938463463374607431768211455 let impl_u128__BITS: u32 = mk_u32 128 let impl_u128__wrapping_add (x y: u128) : u128 = Rust_primitives.Arithmetic.wrapping_add_u128 x y let impl_u128__saturating_add (x y: u128) : u128 = Rust_primitives.Arithmetic.saturating_add_u128 x y let impl_u128__overflowing_add (x y: u128) : (u128 & bool) = Rust_primitives.Arithmetic.overflowing_add_u128 x y let impl_u128__checked_add (x y: u128) : Core_models.Option.t_Option u128 = if (Rust_primitives.Hax.Int.from_machine impl_u128__MIN <: Hax_lib.Int.t_Int) <= ((Rust_primitives.Hax.Int.from_machine x <: Hax_lib.Int.t_Int) + (Rust_primitives.Hax.Int.from_machine y <: Hax_lib.Int.t_Int) <: Hax_lib.Int.t_Int) && ((Rust_primitives.Hax.Int.from_machine x <: Hax_lib.Int.t_Int) + (Rust_primitives.Hax.Int.from_machine y <: Hax_lib.Int.t_Int) <: Hax_lib.Int.t_Int) <= (Rust_primitives.Hax.Int.from_machine impl_u128__MAX <: Hax_lib.Int.t_Int) then Core_models.Option.Option_Some (x +! y) <: Core_models.Option.t_Option u128 else Core_models.Option.Option_None <: Core_models.Option.t_Option u128 let impl_u128__wrapping_sub (x y: u128) : u128 = Rust_primitives.Arithmetic.wrapping_sub_u128 x y let impl_u128__saturating_sub (x y: u128) : u128 = Rust_primitives.Arithmetic.saturating_sub_u128 x y let impl_u128__overflowing_sub (x y: u128) : (u128 & bool) = Rust_primitives.Arithmetic.overflowing_sub_u128 x y let impl_u128__checked_sub (x y: u128) : Core_models.Option.t_Option u128 = if (Rust_primitives.Hax.Int.from_machine impl_u128__MIN <: Hax_lib.Int.t_Int) <= ((Rust_primitives.Hax.Int.from_machine x <: Hax_lib.Int.t_Int) - (Rust_primitives.Hax.Int.from_machine y <: Hax_lib.Int.t_Int) <: Hax_lib.Int.t_Int) && ((Rust_primitives.Hax.Int.from_machine x <: Hax_lib.Int.t_Int) - (Rust_primitives.Hax.Int.from_machine y <: Hax_lib.Int.t_Int) <: Hax_lib.Int.t_Int) <= (Rust_primitives.Hax.Int.from_machine impl_u128__MAX <: Hax_lib.Int.t_Int) then Core_models.Option.Option_Some (x -! y) <: Core_models.Option.t_Option u128 else Core_models.Option.Option_None <: Core_models.Option.t_Option u128 let impl_u128__wrapping_mul (x y: u128) : u128 = Rust_primitives.Arithmetic.wrapping_mul_u128 x y let impl_u128__saturating_mul (x y: u128) : u128 = Rust_primitives.Arithmetic.saturating_mul_u128 x y let impl_u128__overflowing_mul (x y: u128) : (u128 & bool) = Rust_primitives.Arithmetic.overflowing_mul_u128 x y let impl_u128__checked_mul (x y: u128) : Core_models.Option.t_Option u128 = if (Rust_primitives.Hax.Int.from_machine impl_u128__MIN <: Hax_lib.Int.t_Int) <= ((Rust_primitives.Hax.Int.from_machine x <: Hax_lib.Int.t_Int) * (Rust_primitives.Hax.Int.from_machine y <: Hax_lib.Int.t_Int) <: Hax_lib.Int.t_Int) && ((Rust_primitives.Hax.Int.from_machine x <: Hax_lib.Int.t_Int) * (Rust_primitives.Hax.Int.from_machine y <: Hax_lib.Int.t_Int) <: Hax_lib.Int.t_Int) <= (Rust_primitives.Hax.Int.from_machine impl_u128__MAX <: Hax_lib.Int.t_Int) then Core_models.Option.Option_Some (x *! y) <: Core_models.Option.t_Option u128 else Core_models.Option.Option_None <: Core_models.Option.t_Option u128 let impl_u128__pow (x: u128) (exp: u32) : u128 = Rust_primitives.Arithmetic.pow_u128 x exp let impl_u128__count_ones (x: u128) : u32 = Rust_primitives.Arithmetic.count_ones_u128 x assume val impl_u128__rotate_right': x: u128 -> n: u32 -> u128 unfold let impl_u128__rotate_right = impl_u128__rotate_right' assume val impl_u128__rotate_left': x: u128 -> n: u32 -> u128 unfold let impl_u128__rotate_left = impl_u128__rotate_left' assume val impl_u128__leading_zeros': x: u128 -> u32 unfold let impl_u128__leading_zeros = impl_u128__leading_zeros' assume val impl_u128__ilog2': x: u128 -> u32 unfold let impl_u128__ilog2 = impl_u128__ilog2' assume val impl_u128__from_str_radix': src: string -> radix: u32 -> Core_models.Result.t_Result u128 Core_models.Num.Error.t_ParseIntError unfold let impl_u128__from_str_radix = impl_u128__from_str_radix' assume val impl_u128__from_be_bytes': bytes: t_Array u8 (mk_usize 16) -> u128 unfold let impl_u128__from_be_bytes = impl_u128__from_be_bytes' assume val impl_u128__from_le_bytes': bytes: t_Array u8 (mk_usize 16) -> u128 unfold let impl_u128__from_le_bytes = impl_u128__from_le_bytes' assume val impl_u128__to_be_bytes': bytes: u128 -> t_Array u8 (mk_usize 16) unfold let impl_u128__to_be_bytes = impl_u128__to_be_bytes' assume val impl_u128__to_le_bytes': bytes: u128 -> t_Array u8 (mk_usize 16) unfold let impl_u128__to_le_bytes = impl_u128__to_le_bytes' let impl_u128__rem_euclid (x y: u128) : Prims.Pure u128 (requires y <>. mk_u128 0) (fun _ -> Prims.l_True) = Rust_primitives.Arithmetic.rem_euclid_u128 x y let impl_usize__MIN: usize = mk_usize 0 let impl_usize__MAX: usize = Rust_primitives.Arithmetic.v_USIZE_MAX let impl_usize__BITS: u32 = Rust_primitives.Arithmetic.v_SIZE_BITS let impl_usize__wrapping_add (x y: usize) : usize = Rust_primitives.Arithmetic.wrapping_add_usize x y let impl_usize__saturating_add (x y: usize) : usize = Rust_primitives.Arithmetic.saturating_add_usize x y let impl_usize__overflowing_add (x y: usize) : (usize & bool) = Rust_primitives.Arithmetic.overflowing_add_usize x y let impl_usize__checked_add (x y: usize) : Core_models.Option.t_Option usize = if (Rust_primitives.Hax.Int.from_machine impl_usize__MIN <: Hax_lib.Int.t_Int) <= ((Rust_primitives.Hax.Int.from_machine x <: Hax_lib.Int.t_Int) + (Rust_primitives.Hax.Int.from_machine y <: Hax_lib.Int.t_Int) <: Hax_lib.Int.t_Int) && ((Rust_primitives.Hax.Int.from_machine x <: Hax_lib.Int.t_Int) + (Rust_primitives.Hax.Int.from_machine y <: Hax_lib.Int.t_Int) <: Hax_lib.Int.t_Int) <= (Rust_primitives.Hax.Int.from_machine impl_usize__MAX <: Hax_lib.Int.t_Int) then Core_models.Option.Option_Some (x +! y) <: Core_models.Option.t_Option usize else Core_models.Option.Option_None <: Core_models.Option.t_Option usize let impl_usize__wrapping_sub (x y: usize) : usize = Rust_primitives.Arithmetic.wrapping_sub_usize x y let impl_usize__saturating_sub (x y: usize) : usize = Rust_primitives.Arithmetic.saturating_sub_usize x y let impl_usize__overflowing_sub (x y: usize) : (usize & bool) = Rust_primitives.Arithmetic.overflowing_sub_usize x y let impl_usize__checked_sub (x y: usize) : Core_models.Option.t_Option usize = if (Rust_primitives.Hax.Int.from_machine impl_usize__MIN <: Hax_lib.Int.t_Int) <= ((Rust_primitives.Hax.Int.from_machine x <: Hax_lib.Int.t_Int) - (Rust_primitives.Hax.Int.from_machine y <: Hax_lib.Int.t_Int) <: Hax_lib.Int.t_Int) && ((Rust_primitives.Hax.Int.from_machine x <: Hax_lib.Int.t_Int) - (Rust_primitives.Hax.Int.from_machine y <: Hax_lib.Int.t_Int) <: Hax_lib.Int.t_Int) <= (Rust_primitives.Hax.Int.from_machine impl_usize__MAX <: Hax_lib.Int.t_Int) then Core_models.Option.Option_Some (x -! y) <: Core_models.Option.t_Option usize else Core_models.Option.Option_None <: Core_models.Option.t_Option usize let impl_usize__wrapping_mul (x y: usize) : usize = Rust_primitives.Arithmetic.wrapping_mul_usize x y let impl_usize__saturating_mul (x y: usize) : usize = Rust_primitives.Arithmetic.saturating_mul_usize x y let impl_usize__overflowing_mul (x y: usize) : (usize & bool) = Rust_primitives.Arithmetic.overflowing_mul_usize x y let impl_usize__checked_mul (x y: usize) : Core_models.Option.t_Option usize = if (Rust_primitives.Hax.Int.from_machine impl_usize__MIN <: Hax_lib.Int.t_Int) <= ((Rust_primitives.Hax.Int.from_machine x <: Hax_lib.Int.t_Int) * (Rust_primitives.Hax.Int.from_machine y <: Hax_lib.Int.t_Int) <: Hax_lib.Int.t_Int) && ((Rust_primitives.Hax.Int.from_machine x <: Hax_lib.Int.t_Int) * (Rust_primitives.Hax.Int.from_machine y <: Hax_lib.Int.t_Int) <: Hax_lib.Int.t_Int) <= (Rust_primitives.Hax.Int.from_machine impl_usize__MAX <: Hax_lib.Int.t_Int) then Core_models.Option.Option_Some (x *! y) <: Core_models.Option.t_Option usize else Core_models.Option.Option_None <: Core_models.Option.t_Option usize let impl_usize__pow (x: usize) (exp: u32) : usize = Rust_primitives.Arithmetic.pow_usize x exp let impl_usize__count_ones (x: usize) : u32 = Rust_primitives.Arithmetic.count_ones_usize x assume val impl_usize__rotate_right': x: usize -> n: u32 -> usize unfold let impl_usize__rotate_right = impl_usize__rotate_right' assume val impl_usize__rotate_left': x: usize -> n: u32 -> usize unfold let impl_usize__rotate_left = impl_usize__rotate_left' assume val impl_usize__leading_zeros': x: usize -> u32 unfold let impl_usize__leading_zeros = impl_usize__leading_zeros' assume val impl_usize__ilog2': x: usize -> u32 unfold let impl_usize__ilog2 = impl_usize__ilog2' assume val impl_usize__from_str_radix': src: string -> radix: u32 -> Core_models.Result.t_Result usize Core_models.Num.Error.t_ParseIntError unfold let impl_usize__from_str_radix = impl_usize__from_str_radix' assume val impl_usize__from_be_bytes': bytes: t_Array u8 (mk_usize 8) -> usize unfold let impl_usize__from_be_bytes = impl_usize__from_be_bytes' assume val impl_usize__from_le_bytes': bytes: t_Array u8 (mk_usize 8) -> usize unfold let impl_usize__from_le_bytes = impl_usize__from_le_bytes' assume val impl_usize__to_be_bytes': bytes: usize -> t_Array u8 (mk_usize 8) unfold let impl_usize__to_be_bytes = impl_usize__to_be_bytes' assume val impl_usize__to_le_bytes': bytes: usize -> t_Array u8 (mk_usize 8) unfold let impl_usize__to_le_bytes = impl_usize__to_le_bytes' let impl_usize__rem_euclid (x y: usize) : Prims.Pure usize (requires y <>. mk_usize 0) (fun _ -> Prims.l_True) = Rust_primitives.Arithmetic.rem_euclid_usize x y let impl_i8__MIN: i8 = mk_i8 (-128) let impl_i8__MAX: i8 = mk_i8 127 let impl_i8__BITS: u32 = mk_u32 8 let impl_i8__wrapping_add (x y: i8) : i8 = Rust_primitives.Arithmetic.wrapping_add_i8 x y let impl_i8__saturating_add (x y: i8) : i8 = Rust_primitives.Arithmetic.saturating_add_i8 x y let impl_i8__overflowing_add (x y: i8) : (i8 & bool) = Rust_primitives.Arithmetic.overflowing_add_i8 x y let impl_i8__checked_add (x y: i8) : Core_models.Option.t_Option i8 = if (Rust_primitives.Hax.Int.from_machine impl_i8__MIN <: Hax_lib.Int.t_Int) <= ((Rust_primitives.Hax.Int.from_machine x <: Hax_lib.Int.t_Int) + (Rust_primitives.Hax.Int.from_machine y <: Hax_lib.Int.t_Int) <: Hax_lib.Int.t_Int) && ((Rust_primitives.Hax.Int.from_machine x <: Hax_lib.Int.t_Int) + (Rust_primitives.Hax.Int.from_machine y <: Hax_lib.Int.t_Int) <: Hax_lib.Int.t_Int) <= (Rust_primitives.Hax.Int.from_machine impl_i8__MAX <: Hax_lib.Int.t_Int) then Core_models.Option.Option_Some (x +! y) <: Core_models.Option.t_Option i8 else Core_models.Option.Option_None <: Core_models.Option.t_Option i8 let impl_i8__wrapping_sub (x y: i8) : i8 = Rust_primitives.Arithmetic.wrapping_sub_i8 x y let impl_i8__saturating_sub (x y: i8) : i8 = Rust_primitives.Arithmetic.saturating_sub_i8 x y let impl_i8__overflowing_sub (x y: i8) : (i8 & bool) = Rust_primitives.Arithmetic.overflowing_sub_i8 x y let impl_i8__checked_sub (x y: i8) : Core_models.Option.t_Option i8 = if (Rust_primitives.Hax.Int.from_machine impl_i8__MIN <: Hax_lib.Int.t_Int) <= ((Rust_primitives.Hax.Int.from_machine x <: Hax_lib.Int.t_Int) - (Rust_primitives.Hax.Int.from_machine y <: Hax_lib.Int.t_Int) <: Hax_lib.Int.t_Int) && ((Rust_primitives.Hax.Int.from_machine x <: Hax_lib.Int.t_Int) - (Rust_primitives.Hax.Int.from_machine y <: Hax_lib.Int.t_Int) <: Hax_lib.Int.t_Int) <= (Rust_primitives.Hax.Int.from_machine impl_i8__MAX <: Hax_lib.Int.t_Int) then Core_models.Option.Option_Some (x -! y) <: Core_models.Option.t_Option i8 else Core_models.Option.Option_None <: Core_models.Option.t_Option i8 let impl_i8__wrapping_mul (x y: i8) : i8 = Rust_primitives.Arithmetic.wrapping_mul_i8 x y let impl_i8__saturating_mul (x y: i8) : i8 = Rust_primitives.Arithmetic.saturating_mul_i8 x y let impl_i8__overflowing_mul (x y: i8) : (i8 & bool) = Rust_primitives.Arithmetic.overflowing_mul_i8 x y let impl_i8__checked_mul (x y: i8) : Core_models.Option.t_Option i8 = if (Rust_primitives.Hax.Int.from_machine impl_i8__MIN <: Hax_lib.Int.t_Int) <= ((Rust_primitives.Hax.Int.from_machine x <: Hax_lib.Int.t_Int) * (Rust_primitives.Hax.Int.from_machine y <: Hax_lib.Int.t_Int) <: Hax_lib.Int.t_Int) && ((Rust_primitives.Hax.Int.from_machine x <: Hax_lib.Int.t_Int) * (Rust_primitives.Hax.Int.from_machine y <: Hax_lib.Int.t_Int) <: Hax_lib.Int.t_Int) <= (Rust_primitives.Hax.Int.from_machine impl_i8__MAX <: Hax_lib.Int.t_Int) then Core_models.Option.Option_Some (x *! y) <: Core_models.Option.t_Option i8 else Core_models.Option.Option_None <: Core_models.Option.t_Option i8 let impl_i8__pow (x: i8) (exp: u32) : i8 = Rust_primitives.Arithmetic.pow_i8 x exp let impl_i8__count_ones (x: i8) : u32 = Rust_primitives.Arithmetic.count_ones_i8 x assume val impl_i8__rotate_right': x: i8 -> n: u32 -> i8 unfold let impl_i8__rotate_right = impl_i8__rotate_right' assume val impl_i8__rotate_left': x: i8 -> n: u32 -> i8 unfold let impl_i8__rotate_left = impl_i8__rotate_left' assume val impl_i8__leading_zeros': x: i8 -> u32 unfold let impl_i8__leading_zeros = impl_i8__leading_zeros' assume val impl_i8__ilog2': x: i8 -> u32 unfold let impl_i8__ilog2 = impl_i8__ilog2' assume val impl_i8__from_str_radix': src: string -> radix: u32 -> Core_models.Result.t_Result i8 Core_models.Num.Error.t_ParseIntError unfold let impl_i8__from_str_radix = impl_i8__from_str_radix' assume val impl_i8__from_be_bytes': bytes: t_Array u8 (mk_usize 1) -> i8 unfold let impl_i8__from_be_bytes = impl_i8__from_be_bytes' assume val impl_i8__from_le_bytes': bytes: t_Array u8 (mk_usize 1) -> i8 unfold let impl_i8__from_le_bytes = impl_i8__from_le_bytes' assume val impl_i8__to_be_bytes': bytes: i8 -> t_Array u8 (mk_usize 1) unfold let impl_i8__to_be_bytes = impl_i8__to_be_bytes' assume val impl_i8__to_le_bytes': bytes: i8 -> t_Array u8 (mk_usize 1) unfold let impl_i8__to_le_bytes = impl_i8__to_le_bytes' let impl_i8__rem_euclid (x y: i8) : Prims.Pure i8 (requires y <>. mk_i8 0) (fun _ -> Prims.l_True) = Rust_primitives.Arithmetic.rem_euclid_i8 x y let impl_i8__abs (x: i8) : Prims.Pure i8 (requires x >. impl_i8__MIN) (fun _ -> Prims.l_True) = Rust_primitives.Arithmetic.abs_i8 x let impl_i16__MIN: i16 = mk_i16 (-32768) let impl_i16__MAX: i16 = mk_i16 32767 let impl_i16__BITS: u32 = mk_u32 16 let impl_i16__wrapping_add (x y: i16) : i16 = Rust_primitives.Arithmetic.wrapping_add_i16 x y let impl_i16__saturating_add (x y: i16) : i16 = Rust_primitives.Arithmetic.saturating_add_i16 x y let impl_i16__overflowing_add (x y: i16) : (i16 & bool) = Rust_primitives.Arithmetic.overflowing_add_i16 x y let impl_i16__checked_add (x y: i16) : Core_models.Option.t_Option i16 = if (Rust_primitives.Hax.Int.from_machine impl_i16__MIN <: Hax_lib.Int.t_Int) <= ((Rust_primitives.Hax.Int.from_machine x <: Hax_lib.Int.t_Int) + (Rust_primitives.Hax.Int.from_machine y <: Hax_lib.Int.t_Int) <: Hax_lib.Int.t_Int) && ((Rust_primitives.Hax.Int.from_machine x <: Hax_lib.Int.t_Int) + (Rust_primitives.Hax.Int.from_machine y <: Hax_lib.Int.t_Int) <: Hax_lib.Int.t_Int) <= (Rust_primitives.Hax.Int.from_machine impl_i16__MAX <: Hax_lib.Int.t_Int) then Core_models.Option.Option_Some (x +! y) <: Core_models.Option.t_Option i16 else Core_models.Option.Option_None <: Core_models.Option.t_Option i16 let impl_i16__wrapping_sub (x y: i16) : i16 = Rust_primitives.Arithmetic.wrapping_sub_i16 x y let impl_i16__saturating_sub (x y: i16) : i16 = Rust_primitives.Arithmetic.saturating_sub_i16 x y let impl_i16__overflowing_sub (x y: i16) : (i16 & bool) = Rust_primitives.Arithmetic.overflowing_sub_i16 x y let impl_i16__checked_sub (x y: i16) : Core_models.Option.t_Option i16 = if (Rust_primitives.Hax.Int.from_machine impl_i16__MIN <: Hax_lib.Int.t_Int) <= ((Rust_primitives.Hax.Int.from_machine x <: Hax_lib.Int.t_Int) - (Rust_primitives.Hax.Int.from_machine y <: Hax_lib.Int.t_Int) <: Hax_lib.Int.t_Int) && ((Rust_primitives.Hax.Int.from_machine x <: Hax_lib.Int.t_Int) - (Rust_primitives.Hax.Int.from_machine y <: Hax_lib.Int.t_Int) <: Hax_lib.Int.t_Int) <= (Rust_primitives.Hax.Int.from_machine impl_i16__MAX <: Hax_lib.Int.t_Int) then Core_models.Option.Option_Some (x -! y) <: Core_models.Option.t_Option i16 else Core_models.Option.Option_None <: Core_models.Option.t_Option i16 let impl_i16__wrapping_mul (x y: i16) : i16 = Rust_primitives.Arithmetic.wrapping_mul_i16 x y let impl_i16__saturating_mul (x y: i16) : i16 = Rust_primitives.Arithmetic.saturating_mul_i16 x y let impl_i16__overflowing_mul (x y: i16) : (i16 & bool) = Rust_primitives.Arithmetic.overflowing_mul_i16 x y let impl_i16__checked_mul (x y: i16) : Core_models.Option.t_Option i16 = if (Rust_primitives.Hax.Int.from_machine impl_i16__MIN <: Hax_lib.Int.t_Int) <= ((Rust_primitives.Hax.Int.from_machine x <: Hax_lib.Int.t_Int) * (Rust_primitives.Hax.Int.from_machine y <: Hax_lib.Int.t_Int) <: Hax_lib.Int.t_Int) && ((Rust_primitives.Hax.Int.from_machine x <: Hax_lib.Int.t_Int) * (Rust_primitives.Hax.Int.from_machine y <: Hax_lib.Int.t_Int) <: Hax_lib.Int.t_Int) <= (Rust_primitives.Hax.Int.from_machine impl_i16__MAX <: Hax_lib.Int.t_Int) then Core_models.Option.Option_Some (x *! y) <: Core_models.Option.t_Option i16 else Core_models.Option.Option_None <: Core_models.Option.t_Option i16 let impl_i16__pow (x: i16) (exp: u32) : i16 = Rust_primitives.Arithmetic.pow_i16 x exp let impl_i16__count_ones (x: i16) : u32 = Rust_primitives.Arithmetic.count_ones_i16 x assume val impl_i16__rotate_right': x: i16 -> n: u32 -> i16 unfold let impl_i16__rotate_right = impl_i16__rotate_right' assume val impl_i16__rotate_left': x: i16 -> n: u32 -> i16 unfold let impl_i16__rotate_left = impl_i16__rotate_left' assume val impl_i16__leading_zeros': x: i16 -> u32 unfold let impl_i16__leading_zeros = impl_i16__leading_zeros' assume val impl_i16__ilog2': x: i16 -> u32 unfold let impl_i16__ilog2 = impl_i16__ilog2' assume val impl_i16__from_str_radix': src: string -> radix: u32 -> Core_models.Result.t_Result i16 Core_models.Num.Error.t_ParseIntError unfold let impl_i16__from_str_radix = impl_i16__from_str_radix' assume val impl_i16__from_be_bytes': bytes: t_Array u8 (mk_usize 2) -> i16 unfold let impl_i16__from_be_bytes = impl_i16__from_be_bytes' assume val impl_i16__from_le_bytes': bytes: t_Array u8 (mk_usize 2) -> i16 unfold let impl_i16__from_le_bytes = impl_i16__from_le_bytes' assume val impl_i16__to_be_bytes': bytes: i16 -> t_Array u8 (mk_usize 2) unfold let impl_i16__to_be_bytes = impl_i16__to_be_bytes' assume val impl_i16__to_le_bytes': bytes: i16 -> t_Array u8 (mk_usize 2) unfold let impl_i16__to_le_bytes = impl_i16__to_le_bytes' let impl_i16__rem_euclid (x y: i16) : Prims.Pure i16 (requires y <>. mk_i16 0) (fun _ -> Prims.l_True) = Rust_primitives.Arithmetic.rem_euclid_i16 x y let impl_i16__abs (x: i16) : Prims.Pure i16 (requires x >. impl_i16__MIN) (fun _ -> Prims.l_True) = Rust_primitives.Arithmetic.abs_i16 x let impl_i32__MIN: i32 = mk_i32 (-2147483648) let impl_i32__MAX: i32 = mk_i32 2147483647 let impl_i32__BITS: u32 = mk_u32 32 let impl_i32__wrapping_add (x y: i32) : i32 = Rust_primitives.Arithmetic.wrapping_add_i32 x y let impl_i32__saturating_add (x y: i32) : i32 = Rust_primitives.Arithmetic.saturating_add_i32 x y let impl_i32__overflowing_add (x y: i32) : (i32 & bool) = Rust_primitives.Arithmetic.overflowing_add_i32 x y let impl_i32__checked_add (x y: i32) : Core_models.Option.t_Option i32 = if (Rust_primitives.Hax.Int.from_machine impl_i32__MIN <: Hax_lib.Int.t_Int) <= ((Rust_primitives.Hax.Int.from_machine x <: Hax_lib.Int.t_Int) + (Rust_primitives.Hax.Int.from_machine y <: Hax_lib.Int.t_Int) <: Hax_lib.Int.t_Int) && ((Rust_primitives.Hax.Int.from_machine x <: Hax_lib.Int.t_Int) + (Rust_primitives.Hax.Int.from_machine y <: Hax_lib.Int.t_Int) <: Hax_lib.Int.t_Int) <= (Rust_primitives.Hax.Int.from_machine impl_i32__MAX <: Hax_lib.Int.t_Int) then Core_models.Option.Option_Some (x +! y) <: Core_models.Option.t_Option i32 else Core_models.Option.Option_None <: Core_models.Option.t_Option i32 let impl_i32__wrapping_sub (x y: i32) : i32 = Rust_primitives.Arithmetic.wrapping_sub_i32 x y let impl_i32__saturating_sub (x y: i32) : i32 = Rust_primitives.Arithmetic.saturating_sub_i32 x y let impl_i32__overflowing_sub (x y: i32) : (i32 & bool) = Rust_primitives.Arithmetic.overflowing_sub_i32 x y let impl_i32__checked_sub (x y: i32) : Core_models.Option.t_Option i32 = if (Rust_primitives.Hax.Int.from_machine impl_i32__MIN <: Hax_lib.Int.t_Int) <= ((Rust_primitives.Hax.Int.from_machine x <: Hax_lib.Int.t_Int) - (Rust_primitives.Hax.Int.from_machine y <: Hax_lib.Int.t_Int) <: Hax_lib.Int.t_Int) && ((Rust_primitives.Hax.Int.from_machine x <: Hax_lib.Int.t_Int) - (Rust_primitives.Hax.Int.from_machine y <: Hax_lib.Int.t_Int) <: Hax_lib.Int.t_Int) <= (Rust_primitives.Hax.Int.from_machine impl_i32__MAX <: Hax_lib.Int.t_Int) then Core_models.Option.Option_Some (x -! y) <: Core_models.Option.t_Option i32 else Core_models.Option.Option_None <: Core_models.Option.t_Option i32 let impl_i32__wrapping_mul (x y: i32) : i32 = Rust_primitives.Arithmetic.wrapping_mul_i32 x y let impl_i32__saturating_mul (x y: i32) : i32 = Rust_primitives.Arithmetic.saturating_mul_i32 x y let impl_i32__overflowing_mul (x y: i32) : (i32 & bool) = Rust_primitives.Arithmetic.overflowing_mul_i32 x y let impl_i32__checked_mul (x y: i32) : Core_models.Option.t_Option i32 = if (Rust_primitives.Hax.Int.from_machine impl_i32__MIN <: Hax_lib.Int.t_Int) <= ((Rust_primitives.Hax.Int.from_machine x <: Hax_lib.Int.t_Int) * (Rust_primitives.Hax.Int.from_machine y <: Hax_lib.Int.t_Int) <: Hax_lib.Int.t_Int) && ((Rust_primitives.Hax.Int.from_machine x <: Hax_lib.Int.t_Int) * (Rust_primitives.Hax.Int.from_machine y <: Hax_lib.Int.t_Int) <: Hax_lib.Int.t_Int) <= (Rust_primitives.Hax.Int.from_machine impl_i32__MAX <: Hax_lib.Int.t_Int) then Core_models.Option.Option_Some (x *! y) <: Core_models.Option.t_Option i32 else Core_models.Option.Option_None <: Core_models.Option.t_Option i32 let impl_i32__pow (x: i32) (exp: u32) : i32 = Rust_primitives.Arithmetic.pow_i32 x exp let impl_i32__count_ones (x: i32) : u32 = Rust_primitives.Arithmetic.count_ones_i32 x assume val impl_i32__rotate_right': x: i32 -> n: u32 -> i32 unfold let impl_i32__rotate_right = impl_i32__rotate_right' assume val impl_i32__rotate_left': x: i32 -> n: u32 -> i32 unfold let impl_i32__rotate_left = impl_i32__rotate_left' assume val impl_i32__leading_zeros': x: i32 -> u32 unfold let impl_i32__leading_zeros = impl_i32__leading_zeros' assume val impl_i32__ilog2': x: i32 -> u32 unfold let impl_i32__ilog2 = impl_i32__ilog2' assume val impl_i32__from_str_radix': src: string -> radix: u32 -> Core_models.Result.t_Result i32 Core_models.Num.Error.t_ParseIntError unfold let impl_i32__from_str_radix = impl_i32__from_str_radix' assume val impl_i32__from_be_bytes': bytes: t_Array u8 (mk_usize 4) -> i32 unfold let impl_i32__from_be_bytes = impl_i32__from_be_bytes' assume val impl_i32__from_le_bytes': bytes: t_Array u8 (mk_usize 4) -> i32 unfold let impl_i32__from_le_bytes = impl_i32__from_le_bytes' assume val impl_i32__to_be_bytes': bytes: i32 -> t_Array u8 (mk_usize 4) unfold let impl_i32__to_be_bytes = impl_i32__to_be_bytes' assume val impl_i32__to_le_bytes': bytes: i32 -> t_Array u8 (mk_usize 4) unfold let impl_i32__to_le_bytes = impl_i32__to_le_bytes' let impl_i32__rem_euclid (x y: i32) : Prims.Pure i32 (requires y <>. mk_i32 0) (fun _ -> Prims.l_True) = Rust_primitives.Arithmetic.rem_euclid_i32 x y let impl_i32__abs (x: i32) : Prims.Pure i32 (requires x >. impl_i32__MIN) (fun _ -> Prims.l_True) = Rust_primitives.Arithmetic.abs_i32 x let impl_i64__MIN: i64 = mk_i64 (-9223372036854775808) let impl_i64__MAX: i64 = mk_i64 9223372036854775807 let impl_i64__BITS: u32 = mk_u32 64 let impl_i64__wrapping_add (x y: i64) : i64 = Rust_primitives.Arithmetic.wrapping_add_i64 x y let impl_i64__saturating_add (x y: i64) : i64 = Rust_primitives.Arithmetic.saturating_add_i64 x y let impl_i64__overflowing_add (x y: i64) : (i64 & bool) = Rust_primitives.Arithmetic.overflowing_add_i64 x y let impl_i64__checked_add (x y: i64) : Core_models.Option.t_Option i64 = if (Rust_primitives.Hax.Int.from_machine impl_i64__MIN <: Hax_lib.Int.t_Int) <= ((Rust_primitives.Hax.Int.from_machine x <: Hax_lib.Int.t_Int) + (Rust_primitives.Hax.Int.from_machine y <: Hax_lib.Int.t_Int) <: Hax_lib.Int.t_Int) && ((Rust_primitives.Hax.Int.from_machine x <: Hax_lib.Int.t_Int) + (Rust_primitives.Hax.Int.from_machine y <: Hax_lib.Int.t_Int) <: Hax_lib.Int.t_Int) <= (Rust_primitives.Hax.Int.from_machine impl_i64__MAX <: Hax_lib.Int.t_Int) then Core_models.Option.Option_Some (x +! y) <: Core_models.Option.t_Option i64 else Core_models.Option.Option_None <: Core_models.Option.t_Option i64 let impl_i64__wrapping_sub (x y: i64) : i64 = Rust_primitives.Arithmetic.wrapping_sub_i64 x y let impl_i64__saturating_sub (x y: i64) : i64 = Rust_primitives.Arithmetic.saturating_sub_i64 x y let impl_i64__overflowing_sub (x y: i64) : (i64 & bool) = Rust_primitives.Arithmetic.overflowing_sub_i64 x y let impl_i64__checked_sub (x y: i64) : Core_models.Option.t_Option i64 = if (Rust_primitives.Hax.Int.from_machine impl_i64__MIN <: Hax_lib.Int.t_Int) <= ((Rust_primitives.Hax.Int.from_machine x <: Hax_lib.Int.t_Int) - (Rust_primitives.Hax.Int.from_machine y <: Hax_lib.Int.t_Int) <: Hax_lib.Int.t_Int) && ((Rust_primitives.Hax.Int.from_machine x <: Hax_lib.Int.t_Int) - (Rust_primitives.Hax.Int.from_machine y <: Hax_lib.Int.t_Int) <: Hax_lib.Int.t_Int) <= (Rust_primitives.Hax.Int.from_machine impl_i64__MAX <: Hax_lib.Int.t_Int) then Core_models.Option.Option_Some (x -! y) <: Core_models.Option.t_Option i64 else Core_models.Option.Option_None <: Core_models.Option.t_Option i64 let impl_i64__wrapping_mul (x y: i64) : i64 = Rust_primitives.Arithmetic.wrapping_mul_i64 x y let impl_i64__saturating_mul (x y: i64) : i64 = Rust_primitives.Arithmetic.saturating_mul_i64 x y let impl_i64__overflowing_mul (x y: i64) : (i64 & bool) = Rust_primitives.Arithmetic.overflowing_mul_i64 x y let impl_i64__checked_mul (x y: i64) : Core_models.Option.t_Option i64 = if (Rust_primitives.Hax.Int.from_machine impl_i64__MIN <: Hax_lib.Int.t_Int) <= ((Rust_primitives.Hax.Int.from_machine x <: Hax_lib.Int.t_Int) * (Rust_primitives.Hax.Int.from_machine y <: Hax_lib.Int.t_Int) <: Hax_lib.Int.t_Int) && ((Rust_primitives.Hax.Int.from_machine x <: Hax_lib.Int.t_Int) * (Rust_primitives.Hax.Int.from_machine y <: Hax_lib.Int.t_Int) <: Hax_lib.Int.t_Int) <= (Rust_primitives.Hax.Int.from_machine impl_i64__MAX <: Hax_lib.Int.t_Int) then Core_models.Option.Option_Some (x *! y) <: Core_models.Option.t_Option i64 else Core_models.Option.Option_None <: Core_models.Option.t_Option i64 let impl_i64__pow (x: i64) (exp: u32) : i64 = Rust_primitives.Arithmetic.pow_i64 x exp let impl_i64__count_ones (x: i64) : u32 = Rust_primitives.Arithmetic.count_ones_i64 x assume val impl_i64__rotate_right': x: i64 -> n: u32 -> i64 unfold let impl_i64__rotate_right = impl_i64__rotate_right' assume val impl_i64__rotate_left': x: i64 -> n: u32 -> i64 unfold let impl_i64__rotate_left = impl_i64__rotate_left' assume val impl_i64__leading_zeros': x: i64 -> u32 unfold let impl_i64__leading_zeros = impl_i64__leading_zeros' assume val impl_i64__ilog2': x: i64 -> u32 unfold let impl_i64__ilog2 = impl_i64__ilog2' assume val impl_i64__from_str_radix': src: string -> radix: u32 -> Core_models.Result.t_Result i64 Core_models.Num.Error.t_ParseIntError unfold let impl_i64__from_str_radix = impl_i64__from_str_radix' assume val impl_i64__from_be_bytes': bytes: t_Array u8 (mk_usize 8) -> i64 unfold let impl_i64__from_be_bytes = impl_i64__from_be_bytes' assume val impl_i64__from_le_bytes': bytes: t_Array u8 (mk_usize 8) -> i64 unfold let impl_i64__from_le_bytes = impl_i64__from_le_bytes' assume val impl_i64__to_be_bytes': bytes: i64 -> t_Array u8 (mk_usize 8) unfold let impl_i64__to_be_bytes = impl_i64__to_be_bytes' assume val impl_i64__to_le_bytes': bytes: i64 -> t_Array u8 (mk_usize 8) unfold let impl_i64__to_le_bytes = impl_i64__to_le_bytes' let impl_i64__rem_euclid (x y: i64) : Prims.Pure i64 (requires y <>. mk_i64 0) (fun _ -> Prims.l_True) = Rust_primitives.Arithmetic.rem_euclid_i64 x y let impl_i64__abs (x: i64) : Prims.Pure i64 (requires x >. impl_i64__MIN) (fun _ -> Prims.l_True) = Rust_primitives.Arithmetic.abs_i64 x let impl_i128__MIN: i128 = mk_i128 (-170141183460469231731687303715884105728) let impl_i128__MAX: i128 = mk_i128 170141183460469231731687303715884105727 let impl_i128__BITS: u32 = mk_u32 128 let impl_i128__wrapping_add (x y: i128) : i128 = Rust_primitives.Arithmetic.wrapping_add_i128 x y let impl_i128__saturating_add (x y: i128) : i128 = Rust_primitives.Arithmetic.saturating_add_i128 x y let impl_i128__overflowing_add (x y: i128) : (i128 & bool) = Rust_primitives.Arithmetic.overflowing_add_i128 x y let impl_i128__checked_add (x y: i128) : Core_models.Option.t_Option i128 = if (Rust_primitives.Hax.Int.from_machine impl_i128__MIN <: Hax_lib.Int.t_Int) <= ((Rust_primitives.Hax.Int.from_machine x <: Hax_lib.Int.t_Int) + (Rust_primitives.Hax.Int.from_machine y <: Hax_lib.Int.t_Int) <: Hax_lib.Int.t_Int) && ((Rust_primitives.Hax.Int.from_machine x <: Hax_lib.Int.t_Int) + (Rust_primitives.Hax.Int.from_machine y <: Hax_lib.Int.t_Int) <: Hax_lib.Int.t_Int) <= (Rust_primitives.Hax.Int.from_machine impl_i128__MAX <: Hax_lib.Int.t_Int) then Core_models.Option.Option_Some (x +! y) <: Core_models.Option.t_Option i128 else Core_models.Option.Option_None <: Core_models.Option.t_Option i128 let impl_i128__wrapping_sub (x y: i128) : i128 = Rust_primitives.Arithmetic.wrapping_sub_i128 x y let impl_i128__saturating_sub (x y: i128) : i128 = Rust_primitives.Arithmetic.saturating_sub_i128 x y let impl_i128__overflowing_sub (x y: i128) : (i128 & bool) = Rust_primitives.Arithmetic.overflowing_sub_i128 x y let impl_i128__checked_sub (x y: i128) : Core_models.Option.t_Option i128 = if (Rust_primitives.Hax.Int.from_machine impl_i128__MIN <: Hax_lib.Int.t_Int) <= ((Rust_primitives.Hax.Int.from_machine x <: Hax_lib.Int.t_Int) - (Rust_primitives.Hax.Int.from_machine y <: Hax_lib.Int.t_Int) <: Hax_lib.Int.t_Int) && ((Rust_primitives.Hax.Int.from_machine x <: Hax_lib.Int.t_Int) - (Rust_primitives.Hax.Int.from_machine y <: Hax_lib.Int.t_Int) <: Hax_lib.Int.t_Int) <= (Rust_primitives.Hax.Int.from_machine impl_i128__MAX <: Hax_lib.Int.t_Int) then Core_models.Option.Option_Some (x -! y) <: Core_models.Option.t_Option i128 else Core_models.Option.Option_None <: Core_models.Option.t_Option i128 let impl_i128__wrapping_mul (x y: i128) : i128 = Rust_primitives.Arithmetic.wrapping_mul_i128 x y let impl_i128__saturating_mul (x y: i128) : i128 = Rust_primitives.Arithmetic.saturating_mul_i128 x y let impl_i128__overflowing_mul (x y: i128) : (i128 & bool) = Rust_primitives.Arithmetic.overflowing_mul_i128 x y let impl_i128__checked_mul (x y: i128) : Core_models.Option.t_Option i128 = if (Rust_primitives.Hax.Int.from_machine impl_i128__MIN <: Hax_lib.Int.t_Int) <= ((Rust_primitives.Hax.Int.from_machine x <: Hax_lib.Int.t_Int) * (Rust_primitives.Hax.Int.from_machine y <: Hax_lib.Int.t_Int) <: Hax_lib.Int.t_Int) && ((Rust_primitives.Hax.Int.from_machine x <: Hax_lib.Int.t_Int) * (Rust_primitives.Hax.Int.from_machine y <: Hax_lib.Int.t_Int) <: Hax_lib.Int.t_Int) <= (Rust_primitives.Hax.Int.from_machine impl_i128__MAX <: Hax_lib.Int.t_Int) then Core_models.Option.Option_Some (x *! y) <: Core_models.Option.t_Option i128 else Core_models.Option.Option_None <: Core_models.Option.t_Option i128 let impl_i128__pow (x: i128) (exp: u32) : i128 = Rust_primitives.Arithmetic.pow_i128 x exp let impl_i128__count_ones (x: i128) : u32 = Rust_primitives.Arithmetic.count_ones_i128 x assume val impl_i128__rotate_right': x: i128 -> n: u32 -> i128 unfold let impl_i128__rotate_right = impl_i128__rotate_right' assume val impl_i128__rotate_left': x: i128 -> n: u32 -> i128 unfold let impl_i128__rotate_left = impl_i128__rotate_left' assume val impl_i128__leading_zeros': x: i128 -> u32 unfold let impl_i128__leading_zeros = impl_i128__leading_zeros' assume val impl_i128__ilog2': x: i128 -> u32 unfold let impl_i128__ilog2 = impl_i128__ilog2' assume val impl_i128__from_str_radix': src: string -> radix: u32 -> Core_models.Result.t_Result i128 Core_models.Num.Error.t_ParseIntError unfold let impl_i128__from_str_radix = impl_i128__from_str_radix' assume val impl_i128__from_be_bytes': bytes: t_Array u8 (mk_usize 16) -> i128 unfold let impl_i128__from_be_bytes = impl_i128__from_be_bytes' assume val impl_i128__from_le_bytes': bytes: t_Array u8 (mk_usize 16) -> i128 unfold let impl_i128__from_le_bytes = impl_i128__from_le_bytes' assume val impl_i128__to_be_bytes': bytes: i128 -> t_Array u8 (mk_usize 16) unfold let impl_i128__to_be_bytes = impl_i128__to_be_bytes' assume val impl_i128__to_le_bytes': bytes: i128 -> t_Array u8 (mk_usize 16) unfold let impl_i128__to_le_bytes = impl_i128__to_le_bytes' let impl_i128__rem_euclid (x y: i128) : Prims.Pure i128 (requires y <>. mk_i128 0) (fun _ -> Prims.l_True) = Rust_primitives.Arithmetic.rem_euclid_i128 x y let impl_i128__abs (x: i128) : Prims.Pure i128 (requires x >. impl_i128__MIN) (fun _ -> Prims.l_True) = Rust_primitives.Arithmetic.abs_i128 x let impl_isize__MIN: isize = Rust_primitives.Arithmetic.v_ISIZE_MIN let impl_isize__MAX: isize = Rust_primitives.Arithmetic.v_ISIZE_MAX let impl_isize__BITS: u32 = Rust_primitives.Arithmetic.v_SIZE_BITS let impl_isize__wrapping_add (x y: isize) : isize = Rust_primitives.Arithmetic.wrapping_add_isize x y let impl_isize__saturating_add (x y: isize) : isize = Rust_primitives.Arithmetic.saturating_add_isize x y let impl_isize__overflowing_add (x y: isize) : (isize & bool) = Rust_primitives.Arithmetic.overflowing_add_isize x y let impl_isize__checked_add (x y: isize) : Core_models.Option.t_Option isize = if (Rust_primitives.Hax.Int.from_machine impl_isize__MIN <: Hax_lib.Int.t_Int) <= ((Rust_primitives.Hax.Int.from_machine x <: Hax_lib.Int.t_Int) + (Rust_primitives.Hax.Int.from_machine y <: Hax_lib.Int.t_Int) <: Hax_lib.Int.t_Int) && ((Rust_primitives.Hax.Int.from_machine x <: Hax_lib.Int.t_Int) + (Rust_primitives.Hax.Int.from_machine y <: Hax_lib.Int.t_Int) <: Hax_lib.Int.t_Int) <= (Rust_primitives.Hax.Int.from_machine impl_isize__MAX <: Hax_lib.Int.t_Int) then Core_models.Option.Option_Some (x +! y) <: Core_models.Option.t_Option isize else Core_models.Option.Option_None <: Core_models.Option.t_Option isize let impl_isize__wrapping_sub (x y: isize) : isize = Rust_primitives.Arithmetic.wrapping_sub_isize x y let impl_isize__saturating_sub (x y: isize) : isize = Rust_primitives.Arithmetic.saturating_sub_isize x y let impl_isize__overflowing_sub (x y: isize) : (isize & bool) = Rust_primitives.Arithmetic.overflowing_sub_isize x y let impl_isize__checked_sub (x y: isize) : Core_models.Option.t_Option isize = if (Rust_primitives.Hax.Int.from_machine impl_isize__MIN <: Hax_lib.Int.t_Int) <= ((Rust_primitives.Hax.Int.from_machine x <: Hax_lib.Int.t_Int) - (Rust_primitives.Hax.Int.from_machine y <: Hax_lib.Int.t_Int) <: Hax_lib.Int.t_Int) && ((Rust_primitives.Hax.Int.from_machine x <: Hax_lib.Int.t_Int) - (Rust_primitives.Hax.Int.from_machine y <: Hax_lib.Int.t_Int) <: Hax_lib.Int.t_Int) <= (Rust_primitives.Hax.Int.from_machine impl_isize__MAX <: Hax_lib.Int.t_Int) then Core_models.Option.Option_Some (x -! y) <: Core_models.Option.t_Option isize else Core_models.Option.Option_None <: Core_models.Option.t_Option isize let impl_isize__wrapping_mul (x y: isize) : isize = Rust_primitives.Arithmetic.wrapping_mul_isize x y let impl_isize__saturating_mul (x y: isize) : isize = Rust_primitives.Arithmetic.saturating_mul_isize x y let impl_isize__overflowing_mul (x y: isize) : (isize & bool) = Rust_primitives.Arithmetic.overflowing_mul_isize x y let impl_isize__checked_mul (x y: isize) : Core_models.Option.t_Option isize = if (Rust_primitives.Hax.Int.from_machine impl_isize__MIN <: Hax_lib.Int.t_Int) <= ((Rust_primitives.Hax.Int.from_machine x <: Hax_lib.Int.t_Int) * (Rust_primitives.Hax.Int.from_machine y <: Hax_lib.Int.t_Int) <: Hax_lib.Int.t_Int) && ((Rust_primitives.Hax.Int.from_machine x <: Hax_lib.Int.t_Int) * (Rust_primitives.Hax.Int.from_machine y <: Hax_lib.Int.t_Int) <: Hax_lib.Int.t_Int) <= (Rust_primitives.Hax.Int.from_machine impl_isize__MAX <: Hax_lib.Int.t_Int) then Core_models.Option.Option_Some (x *! y) <: Core_models.Option.t_Option isize else Core_models.Option.Option_None <: Core_models.Option.t_Option isize let impl_isize__pow (x: isize) (exp: u32) : isize = Rust_primitives.Arithmetic.pow_isize x exp let impl_isize__count_ones (x: isize) : u32 = Rust_primitives.Arithmetic.count_ones_isize x assume val impl_isize__rotate_right': x: isize -> n: u32 -> isize unfold let impl_isize__rotate_right = impl_isize__rotate_right' assume val impl_isize__rotate_left': x: isize -> n: u32 -> isize unfold let impl_isize__rotate_left = impl_isize__rotate_left' assume val impl_isize__leading_zeros': x: isize -> u32 unfold let impl_isize__leading_zeros = impl_isize__leading_zeros' assume val impl_isize__ilog2': x: isize -> u32 unfold let impl_isize__ilog2 = impl_isize__ilog2' assume val impl_isize__from_str_radix': src: string -> radix: u32 -> Core_models.Result.t_Result isize Core_models.Num.Error.t_ParseIntError unfold let impl_isize__from_str_radix = impl_isize__from_str_radix' assume val impl_isize__from_be_bytes': bytes: t_Array u8 (mk_usize 8) -> isize unfold let impl_isize__from_be_bytes = impl_isize__from_be_bytes' assume val impl_isize__from_le_bytes': bytes: t_Array u8 (mk_usize 8) -> isize unfold let impl_isize__from_le_bytes = impl_isize__from_le_bytes' assume val impl_isize__to_be_bytes': bytes: isize -> t_Array u8 (mk_usize 8) unfold let impl_isize__to_be_bytes = impl_isize__to_be_bytes' assume val impl_isize__to_le_bytes': bytes: isize -> t_Array u8 (mk_usize 8) unfold let impl_isize__to_le_bytes = impl_isize__to_le_bytes' let impl_isize__rem_euclid (x y: isize) : Prims.Pure isize (requires y <>. mk_isize 0) (fun _ -> Prims.l_True) = Rust_primitives.Arithmetic.rem_euclid_isize x y let impl_isize__abs (x: isize) : Prims.Pure isize (requires x >. impl_isize__MIN) (fun _ -> Prims.l_True) = Rust_primitives.Arithmetic.abs_isize x [@@ FStar.Tactics.Typeclasses.tcinstance] let impl_18: Core_models.Default.t_Default u8 = { f_default_pre = (fun (_: Prims.unit) -> true); f_default_post = (fun (_: Prims.unit) (out: u8) -> true); f_default = fun (_: Prims.unit) -> mk_u8 0 } [@@ FStar.Tactics.Typeclasses.tcinstance] let impl_19: Core_models.Default.t_Default u16 = { f_default_pre = (fun (_: Prims.unit) -> true); f_default_post = (fun (_: Prims.unit) (out: u16) -> true); f_default = fun (_: Prims.unit) -> mk_u16 0 } [@@ FStar.Tactics.Typeclasses.tcinstance] let impl_20: Core_models.Default.t_Default u32 = { f_default_pre = (fun (_: Prims.unit) -> true); f_default_post = (fun (_: Prims.unit) (out: u32) -> true); f_default = fun (_: Prims.unit) -> mk_u32 0 } [@@ FStar.Tactics.Typeclasses.tcinstance] let impl_21: Core_models.Default.t_Default u64 = { f_default_pre = (fun (_: Prims.unit) -> true); f_default_post = (fun (_: Prims.unit) (out: u64) -> true); f_default = fun (_: Prims.unit) -> mk_u64 0 } [@@ FStar.Tactics.Typeclasses.tcinstance] let impl_22: Core_models.Default.t_Default u128 = { f_default_pre = (fun (_: Prims.unit) -> true); f_default_post = (fun (_: Prims.unit) (out: u128) -> true); f_default = fun (_: Prims.unit) -> mk_u128 0 } [@@ FStar.Tactics.Typeclasses.tcinstance] let impl_23: Core_models.Default.t_Default usize = { f_default_pre = (fun (_: Prims.unit) -> true); f_default_post = (fun (_: Prims.unit) (out: usize) -> true); f_default = fun (_: Prims.unit) -> mk_usize 0 } [@@ FStar.Tactics.Typeclasses.tcinstance] let impl_24: Core_models.Default.t_Default i8 = { f_default_pre = (fun (_: Prims.unit) -> true); f_default_post = (fun (_: Prims.unit) (out: i8) -> true); f_default = fun (_: Prims.unit) -> mk_i8 0 } [@@ FStar.Tactics.Typeclasses.tcinstance] let impl_25: Core_models.Default.t_Default i16 = { f_default_pre = (fun (_: Prims.unit) -> true); f_default_post = (fun (_: Prims.unit) (out: i16) -> true); f_default = fun (_: Prims.unit) -> mk_i16 0 } [@@ FStar.Tactics.Typeclasses.tcinstance] let impl_26: Core_models.Default.t_Default i32 = { f_default_pre = (fun (_: Prims.unit) -> true); f_default_post = (fun (_: Prims.unit) (out: i32) -> true); f_default = fun (_: Prims.unit) -> mk_i32 0 } [@@ FStar.Tactics.Typeclasses.tcinstance] let impl_27: Core_models.Default.t_Default i64 = { f_default_pre = (fun (_: Prims.unit) -> true); f_default_post = (fun (_: Prims.unit) (out: i64) -> true); f_default = fun (_: Prims.unit) -> mk_i64 0 } [@@ FStar.Tactics.Typeclasses.tcinstance] let impl_28: Core_models.Default.t_Default i128 = { f_default_pre = (fun (_: Prims.unit) -> true); f_default_post = (fun (_: Prims.unit) (out: i128) -> true); f_default = fun (_: Prims.unit) -> mk_i128 0 } [@@ FStar.Tactics.Typeclasses.tcinstance] let impl_29: Core_models.Default.t_Default isize = { f_default_pre = (fun (_: Prims.unit) -> true); f_default_post = (fun (_: Prims.unit) (out: isize) -> true); f_default = fun (_: Prims.unit) -> mk_isize 0 } ================================================ FILE: hax-lib/proof-libs/fstar/core/Core_models.Ops.Arith.fsti ================================================ module Core_models.Ops.Arith #set-options "--fuel 0 --ifuel 1 --z3rlimit 15" open FStar.Mul open Rust_primitives class t_AddAssign (v_Self: Type0) (v_Rhs: Type0) = { f_add_assign_pre:v_Self -> v_Rhs -> Type0; f_add_assign_post:v_Self -> v_Rhs -> v_Self -> Type0; f_add_assign:x0: v_Self -> x1: v_Rhs -> Prims.Pure v_Self (f_add_assign_pre x0 x1) (fun result -> f_add_assign_post x0 x1 result) } class t_SubAssign (v_Self: Type0) (v_Rhs: Type0) = { f_sub_assign_pre:v_Self -> v_Rhs -> Type0; f_sub_assign_post:v_Self -> v_Rhs -> v_Self -> Type0; f_sub_assign:x0: v_Self -> x1: v_Rhs -> Prims.Pure v_Self (f_sub_assign_pre x0 x1) (fun result -> f_sub_assign_post x0 x1 result) } class t_MulAssign (v_Self: Type0) (v_Rhs: Type0) = { f_mul_assign_pre:v_Self -> v_Rhs -> Type0; f_mul_assign_post:v_Self -> v_Rhs -> v_Self -> Type0; f_mul_assign:x0: v_Self -> x1: v_Rhs -> Prims.Pure v_Self (f_mul_assign_pre x0 x1) (fun result -> f_mul_assign_post x0 x1 result) } class t_DivAssign (v_Self: Type0) (v_Rhs: Type0) = { f_div_assign_pre:v_Self -> v_Rhs -> Type0; f_div_assign_post:v_Self -> v_Rhs -> v_Self -> Type0; f_div_assign:x0: v_Self -> x1: v_Rhs -> Prims.Pure v_Self (f_div_assign_pre x0 x1) (fun result -> f_div_assign_post x0 x1 result) } class t_RemAssign (v_Self: Type0) (v_Rhs: Type0) = { f_rem_assign_pre:v_Self -> v_Rhs -> Type0; f_rem_assign_post:v_Self -> v_Rhs -> v_Self -> Type0; f_rem_assign:x0: v_Self -> x1: v_Rhs -> Prims.Pure v_Self (f_rem_assign_pre x0 x1) (fun result -> f_rem_assign_post x0 x1 result) } [@@ FStar.Tactics.Typeclasses.tcinstance] val impl:t_AddAssign u8 u8 [@@ FStar.Tactics.Typeclasses.tcinstance] val impl_1:t_SubAssign u8 u8 [@@ FStar.Tactics.Typeclasses.tcinstance] val impl_2:t_AddAssign u16 u16 [@@ FStar.Tactics.Typeclasses.tcinstance] val impl_3:t_SubAssign u16 u16 [@@ FStar.Tactics.Typeclasses.tcinstance] val impl_4:t_AddAssign u32 u32 [@@ FStar.Tactics.Typeclasses.tcinstance] val impl_5:t_SubAssign u32 u32 [@@ FStar.Tactics.Typeclasses.tcinstance] val impl_6:t_AddAssign u64 u64 [@@ FStar.Tactics.Typeclasses.tcinstance] val impl_7:t_SubAssign u64 u64 class t_Add (v_Self: Type0) (v_Rhs: Type0) = { [@@@ FStar.Tactics.Typeclasses.no_method]f_Output:Type0; f_add_pre:v_Self -> v_Rhs -> Type0; f_add_post:v_Self -> v_Rhs -> f_Output -> Type0; f_add:x0: v_Self -> x1: v_Rhs -> Prims.Pure f_Output (f_add_pre x0 x1) (fun result -> f_add_post x0 x1 result) } class t_Sub (v_Self: Type0) (v_Rhs: Type0) = { [@@@ FStar.Tactics.Typeclasses.no_method]f_Output:Type0; f_sub_pre:v_Self -> v_Rhs -> Type0; f_sub_post:v_Self -> v_Rhs -> f_Output -> Type0; f_sub:x0: v_Self -> x1: v_Rhs -> Prims.Pure f_Output (f_sub_pre x0 x1) (fun result -> f_sub_post x0 x1 result) } class t_Mul (v_Self: Type0) (v_Rhs: Type0) = { [@@@ FStar.Tactics.Typeclasses.no_method]f_Output:Type0; f_mul_pre:v_Self -> v_Rhs -> Type0; f_mul_post:v_Self -> v_Rhs -> f_Output -> Type0; f_mul:x0: v_Self -> x1: v_Rhs -> Prims.Pure f_Output (f_mul_pre x0 x1) (fun result -> f_mul_post x0 x1 result) } class t_Div (v_Self: Type0) (v_Rhs: Type0) = { [@@@ FStar.Tactics.Typeclasses.no_method]f_Output:Type0; f_div_pre:v_Self -> v_Rhs -> Type0; f_div_post:v_Self -> v_Rhs -> f_Output -> Type0; f_div:x0: v_Self -> x1: v_Rhs -> Prims.Pure f_Output (f_div_pre x0 x1) (fun result -> f_div_post x0 x1 result) } class t_Neg (v_Self: Type0) = { [@@@ FStar.Tactics.Typeclasses.no_method]f_Output:Type0; f_neg_pre:v_Self -> Type0; f_neg_post:v_Self -> f_Output -> Type0; f_neg:x0: v_Self -> Prims.Pure f_Output (f_neg_pre x0) (fun result -> f_neg_post x0 result) } class t_Rem (v_Self: Type0) (v_Rhs: Type0) = { [@@@ FStar.Tactics.Typeclasses.no_method]f_Output:Type0; f_rem_pre:v_Self -> v_Rhs -> Type0; f_rem_post:v_Self -> v_Rhs -> f_Output -> Type0; f_rem:x0: v_Self -> x1: v_Rhs -> Prims.Pure f_Output (f_rem_pre x0 x1) (fun result -> f_rem_post x0 x1 result) } ================================================ FILE: hax-lib/proof-libs/fstar/core/Core_models.Ops.Bit.fsti ================================================ module Core_models.Ops.Bit #set-options "--fuel 0 --ifuel 1 --z3rlimit 15" open FStar.Mul open Rust_primitives class t_Shr (v_Self: Type0) (v_Rhs: Type0) = { [@@@ FStar.Tactics.Typeclasses.no_method]f_Output:Type0; f_shr_pre:v_Self -> v_Rhs -> Type0; f_shr_post:v_Self -> v_Rhs -> f_Output -> Type0; f_shr:x0: v_Self -> x1: v_Rhs -> Prims.Pure f_Output (f_shr_pre x0 x1) (fun result -> f_shr_post x0 x1 result) } class t_Shl (v_Self: Type0) (v_Rhs: Type0) = { [@@@ FStar.Tactics.Typeclasses.no_method]f_Output:Type0; f_shl_pre:v_Self -> v_Rhs -> Type0; f_shl_post:v_Self -> v_Rhs -> f_Output -> Type0; f_shl:x0: v_Self -> x1: v_Rhs -> Prims.Pure f_Output (f_shl_pre x0 x1) (fun result -> f_shl_post x0 x1 result) } class t_BitXor (v_Self: Type0) (v_Rhs: Type0) = { [@@@ FStar.Tactics.Typeclasses.no_method]f_Output:Type0; f_bitxor_pre:v_Self -> v_Rhs -> Type0; f_bitxor_post:v_Self -> v_Rhs -> f_Output -> Type0; f_bitxor:x0: v_Self -> x1: v_Rhs -> Prims.Pure f_Output (f_bitxor_pre x0 x1) (fun result -> f_bitxor_post x0 x1 result) } class t_BitAnd (v_Self: Type0) (v_Rhs: Type0) = { [@@@ FStar.Tactics.Typeclasses.no_method]f_Output:Type0; f_bitand_pre:v_Self -> v_Rhs -> Type0; f_bitand_post:v_Self -> v_Rhs -> f_Output -> Type0; f_bitand:x0: v_Self -> x1: v_Rhs -> Prims.Pure f_Output (f_bitand_pre x0 x1) (fun result -> f_bitand_post x0 x1 result) } class t_BitOr (v_Self: Type0) (v_Rhs: Type0) = { [@@@ FStar.Tactics.Typeclasses.no_method]f_Output:Type0; f_bitor_pre:v_Self -> v_Rhs -> Type0; f_bitor_post:v_Self -> v_Rhs -> f_Output -> Type0; f_bitor:x0: v_Self -> x1: v_Rhs -> Prims.Pure f_Output (f_bitor_pre x0 x1) (fun result -> f_bitor_post x0 x1 result) } ================================================ FILE: hax-lib/proof-libs/fstar/core/Core_models.Ops.Control_flow.fst ================================================ module Core_models.Ops.Control_flow #set-options "--fuel 0 --ifuel 1 --z3rlimit 15" open FStar.Mul open Rust_primitives type t_ControlFlow (v_B: Type0) (v_C: Type0) = | ControlFlow_Continue : v_C -> t_ControlFlow v_B v_C | ControlFlow_Break : v_B -> t_ControlFlow v_B v_C ================================================ FILE: hax-lib/proof-libs/fstar/core/Core_models.Ops.Deref.fst ================================================ module Core_models.Ops.Deref #set-options "--fuel 0 --ifuel 1 --z3rlimit 15" open FStar.Mul open Rust_primitives class t_Deref (v_Self: Type0) = { [@@@ FStar.Tactics.Typeclasses.no_method]f_Target:Type0; f_deref_pre:v_Self -> Type0; f_deref_post:v_Self -> f_Target -> Type0; f_deref:x0: v_Self -> Prims.Pure f_Target (f_deref_pre x0) (fun result -> f_deref_post x0 result) } [@@ FStar.Tactics.Typeclasses.tcinstance] let impl (#v_T: Type0) : t_Deref v_T = { f_Target = v_T; f_deref_pre = (fun (self: v_T) -> true); f_deref_post = (fun (self: v_T) (out: v_T) -> true); f_deref = fun (self: v_T) -> self } ================================================ FILE: hax-lib/proof-libs/fstar/core/Core_models.Ops.Drop.fst ================================================ module Core_models.Ops.Drop #set-options "--fuel 0 --ifuel 1 --z3rlimit 15" open FStar.Mul open Rust_primitives class t_Drop (v_Self: Type0) = { f_drop_pre:v_Self -> Type0; f_drop_post:v_Self -> v_Self -> Type0; f_drop:x0: v_Self -> Prims.Pure v_Self (f_drop_pre x0) (fun result -> f_drop_post x0 result) } ================================================ FILE: hax-lib/proof-libs/fstar/core/Core_models.Ops.Function.fst ================================================ module Core_models.Ops.Function #set-options "--fuel 0 --ifuel 1 --z3rlimit 15" open FStar.Mul open Rust_primitives class t_FnOnce (v_Self: Type0) (v_Args: Type0) = { [@@@ FStar.Tactics.Typeclasses.no_method]f_Output:Type0; f_call_once_pre:self_: v_Self -> args: v_Args -> pred: Type0{true ==> pred}; f_call_once_post:v_Self -> v_Args -> f_Output -> Type0; f_call_once:x0: v_Self -> x1: v_Args -> Prims.Pure f_Output (f_call_once_pre x0 x1) (fun result -> f_call_once_post x0 x1 result) } class t_Fn (v_Self: Type0) (v_Args: Type0) = { [@@@ FStar.Tactics.Typeclasses.no_method]_super_i0:t_FnOnce v_Self v_Args; f_call_pre:self_: v_Self -> args: v_Args -> pred: Type0{true ==> pred}; f_call_post:v_Self -> v_Args -> (_super_i0).f_Output -> Type0; f_call:x0: v_Self -> x1: v_Args -> Prims.Pure (_super_i0).f_Output (f_call_pre x0 x1) (fun result -> f_call_post x0 x1 result) } [@@ FStar.Tactics.Typeclasses.tcinstance] let _ = fun (v_Self:Type0) (v_Args:Type0) {|i: t_Fn v_Self v_Args|} -> i._super_i0 [@@ FStar.Tactics.Typeclasses.tcinstance] let impl_2 (#v_Arg #v_Out: Type0) : t_FnOnce (v_Arg -> v_Out) v_Arg = { f_Output = v_Out; f_call_once_pre = (fun (self: (v_Arg -> v_Out)) (arg: v_Arg) -> true); f_call_once_post = (fun (self: (v_Arg -> v_Out)) (arg: v_Arg) (out: v_Out) -> true); f_call_once = fun (self: (v_Arg -> v_Out)) (arg: v_Arg) -> self arg } unfold instance fnonce_arrow_binder t u : t_FnOnce (_:t -> u) t = { f_Output = u; f_call_once_pre = (fun _ _ -> true); f_call_once_post = (fun (x0: (_:t -> u)) (x1: t) (res: u) -> res == x0 x1); f_call_once = (fun (x0: (_:t -> u)) (x1: t) -> x0 x1); } [@@ FStar.Tactics.Typeclasses.tcinstance] let impl (#v_Arg1 #v_Arg2 #v_Out: Type0) : t_FnOnce (v_Arg1 -> v_Arg2 -> v_Out) (v_Arg1 & v_Arg2) = { f_Output = v_Out; f_call_once_pre = (fun (self: (v_Arg1 -> v_Arg2 -> v_Out)) (arg: (v_Arg1 & v_Arg2)) -> true); f_call_once_post = (fun (self: (v_Arg1 -> v_Arg2 -> v_Out)) (arg: (v_Arg1 & v_Arg2)) (out: v_Out) -> true); f_call_once = fun (self: (v_Arg1 -> v_Arg2 -> v_Out)) (arg: (v_Arg1 & v_Arg2)) -> self arg._1 arg._2 } [@@ FStar.Tactics.Typeclasses.tcinstance] let impl_1 (#v_Arg1 #v_Arg2 #v_Arg3 #v_Out: Type0) : t_FnOnce (v_Arg1 -> v_Arg2 -> v_Arg3 -> v_Out) (v_Arg1 & v_Arg2 & v_Arg3) = { f_Output = v_Out; f_call_once_pre = (fun (self: (v_Arg1 -> v_Arg2 -> v_Arg3 -> v_Out)) (arg: (v_Arg1 & v_Arg2 & v_Arg3)) -> true); f_call_once_post = (fun (self: (v_Arg1 -> v_Arg2 -> v_Arg3 -> v_Out)) (arg: (v_Arg1 & v_Arg2 & v_Arg3)) (out: v_Out) -> true); f_call_once = fun (self: (v_Arg1 -> v_Arg2 -> v_Arg3 -> v_Out)) (arg: (v_Arg1 & v_Arg2 & v_Arg3)) -> self arg._1 arg._2 arg._3 } ================================================ FILE: hax-lib/proof-libs/fstar/core/Core_models.Ops.Index.IndexMut.fst ================================================ module Core_models.Ops.Index.IndexMut class t_IndexMut t_Self t_Idx = { f_Input: Type0; in_range: t_Self -> t_Idx -> Type0; f_index_mut: s:t_Self -> i:t_Idx{in_range s i} -> v:f_Input -> t_Self; } open Rust_primitives instance impl__index_mut t l n: t_IndexMut (t_Array t l) (int_t n) = { f_Input = t; in_range = (fun (s: t_Array t l) (i: int_t n) -> v i >= 0 && v i < v l); f_index_mut = (fun s i x -> Seq.upd s (v i) x); } ================================================ FILE: hax-lib/proof-libs/fstar/core/Core_models.Ops.Index.Index_mut.fst ================================================ module Core_models.Ops.Index.Index_mut #set-options "--fuel 0 --ifuel 1 --z3rlimit 15" open FStar.Mul open Rust_primitives (* item error backend: The mutation of this &mut is not allowed here. This is discussed in issue https://github.com/hacspec/hax/issues/420. Please upvote or comment this issue if you see this error message. Note: the error was labeled with context `DirectAndMut`.  Last available AST for this item: #[allow(dead_code)] #[feature(register_tool)] #[register_tool(_hax)] trait t_IndexMut where _: core_models::ops::index::t_Index, { #[allow(dead_code)] #[feature(register_tool)] #[register_tool(_hax)] fn f_index_mut( _: Self, _: Idx, ) -> tuple2; } Last AST: /** print_rust: pitem: not implemented (item: { Concrete_ident.T.def_id = { Explicit_def_id.T.is_constructor = false; def_id = { Types.index = (0, 0, None); is_local = true; kind = Types.Trait; krate = "core_models"; parent = (Some { Types.contents = { Types.id = 0; value = { Types.index = (0, 0, None); is_local = true; kind = Types.Mod; krate = "core_models"; parent = (Some { Types.contents = { Types.id = 0; value = { Types.index = (0, 0, None); is_local = true; kind = Types.Mod; krate = "core_models"; parent = (Some { Types.contents = { Types.id = 0; value = { Types.index = (0, 0, None); is_local = true; kind = Types.Mod; krate = "core_models"; parent = (Some { Types.contents = { Types.id = 0; value = { Types.index = (0, 0, None); is_local = true; kind = Types.Mod; krate = "core_models"; parent = None; path = [] } } }); path = [{ Types.data = (Types.TypeNs "ops"); disambiguator = 0 } ] } } }); path = [{ Types.data = (Types.TypeNs "ops"); disambiguator = 0 }; { Types.data = (Types.TypeNs "index"); disambiguator = 0 } ] } } }); path = [{ Types.data = (Types.TypeNs "ops"); disambiguator = 0 }; { Types.data = (Types.TypeNs "index"); disambiguator = 0 }; { Types.data = (Types.TypeNs "index_mut"); disambiguator = 0 } ] } } }); path = [{ Types.data = (Types.TypeNs "ops"); disambiguator = 0 }; { Types.data = (Types.TypeNs "index"); disambiguator = 0 }; { Types.data = (Types.TypeNs "index_mut"); disambiguator = 0 }; { Types.data = (Types.TypeNs "IndexMut"); disambiguator = 0 }] } }; moved = None; suffix = None }) */ const _: () = (); *) ================================================ FILE: hax-lib/proof-libs/fstar/core/Core_models.Ops.Index.fst ================================================ module Core_models.Ops.Index #set-options "--fuel 0 --ifuel 1 --z3rlimit 15" open FStar.Mul open Rust_primitives class t_Index (v_Self: Type0) (v_Idx: Type0) = { [@@@ FStar.Tactics.Typeclasses.no_method]f_Output:Type0; f_index_pre:v_Self -> v_Idx -> Type0; f_index_post:v_Self -> v_Idx -> f_Output -> Type0; f_index:x0: v_Self -> x1: v_Idx -> Prims.Pure f_Output (f_index_pre x0 x1) (fun result -> f_index_post x0 x1 result) } ================================================ FILE: hax-lib/proof-libs/fstar/core/Core_models.Ops.Range.fst ================================================ module Core_models.Ops.Range #set-options "--fuel 0 --ifuel 1 --z3rlimit 15" open FStar.Mul open Rust_primitives type t_RangeTo (v_T: Type0) = { f_end:v_T } type t_RangeFrom (v_T: Type0) = { f_start:v_T } type t_Range (v_T: Type0) = { f_start:v_T; f_end:v_T } type t_RangeFull = | RangeFull : t_RangeFull [@@ FStar.Tactics.Typeclasses.tcinstance] let impl: Core_models.Iter.Traits.Iterator.t_Iterator (t_Range u8) = { f_Item = u8; f_next_pre = (fun (self: t_Range u8) -> true); f_next_post = (fun (self: t_Range u8) (out: (t_Range u8 & Core_models.Option.t_Option u8)) -> true); f_next = fun (self: t_Range u8) -> let (self: t_Range u8), (hax_temp_output: Core_models.Option.t_Option u8) = if self.f_start >=. self.f_end then self, (Core_models.Option.Option_None <: Core_models.Option.t_Option u8) <: (t_Range u8 & Core_models.Option.t_Option u8) else let res:u8 = self.f_start in let self:t_Range u8 = { self with f_start = self.f_start +! mk_u8 1 } <: t_Range u8 in self, (Core_models.Option.Option_Some res <: Core_models.Option.t_Option u8) <: (t_Range u8 & Core_models.Option.t_Option u8) in self, hax_temp_output <: (t_Range u8 & Core_models.Option.t_Option u8) } [@@ FStar.Tactics.Typeclasses.tcinstance] let impl_1: Core_models.Iter.Traits.Iterator.t_Iterator (t_Range u16) = { f_Item = u16; f_next_pre = (fun (self: t_Range u16) -> true); f_next_post = (fun (self: t_Range u16) (out: (t_Range u16 & Core_models.Option.t_Option u16)) -> true); f_next = fun (self: t_Range u16) -> let (self: t_Range u16), (hax_temp_output: Core_models.Option.t_Option u16) = if self.f_start >=. self.f_end then self, (Core_models.Option.Option_None <: Core_models.Option.t_Option u16) <: (t_Range u16 & Core_models.Option.t_Option u16) else let res:u16 = self.f_start in let self:t_Range u16 = { self with f_start = self.f_start +! mk_u16 1 } <: t_Range u16 in self, (Core_models.Option.Option_Some res <: Core_models.Option.t_Option u16) <: (t_Range u16 & Core_models.Option.t_Option u16) in self, hax_temp_output <: (t_Range u16 & Core_models.Option.t_Option u16) } [@@ FStar.Tactics.Typeclasses.tcinstance] let impl_2: Core_models.Iter.Traits.Iterator.t_Iterator (t_Range u32) = { f_Item = u32; f_next_pre = (fun (self: t_Range u32) -> true); f_next_post = (fun (self: t_Range u32) (out: (t_Range u32 & Core_models.Option.t_Option u32)) -> true); f_next = fun (self: t_Range u32) -> let (self: t_Range u32), (hax_temp_output: Core_models.Option.t_Option u32) = if self.f_start >=. self.f_end then self, (Core_models.Option.Option_None <: Core_models.Option.t_Option u32) <: (t_Range u32 & Core_models.Option.t_Option u32) else let res:u32 = self.f_start in let self:t_Range u32 = { self with f_start = self.f_start +! mk_u32 1 } <: t_Range u32 in self, (Core_models.Option.Option_Some res <: Core_models.Option.t_Option u32) <: (t_Range u32 & Core_models.Option.t_Option u32) in self, hax_temp_output <: (t_Range u32 & Core_models.Option.t_Option u32) } [@@ FStar.Tactics.Typeclasses.tcinstance] let impl_3: Core_models.Iter.Traits.Iterator.t_Iterator (t_Range u64) = { f_Item = u64; f_next_pre = (fun (self: t_Range u64) -> true); f_next_post = (fun (self: t_Range u64) (out: (t_Range u64 & Core_models.Option.t_Option u64)) -> true); f_next = fun (self: t_Range u64) -> let (self: t_Range u64), (hax_temp_output: Core_models.Option.t_Option u64) = if self.f_start >=. self.f_end then self, (Core_models.Option.Option_None <: Core_models.Option.t_Option u64) <: (t_Range u64 & Core_models.Option.t_Option u64) else let res:u64 = self.f_start in let self:t_Range u64 = { self with f_start = self.f_start +! mk_u64 1 } <: t_Range u64 in self, (Core_models.Option.Option_Some res <: Core_models.Option.t_Option u64) <: (t_Range u64 & Core_models.Option.t_Option u64) in self, hax_temp_output <: (t_Range u64 & Core_models.Option.t_Option u64) } [@@ FStar.Tactics.Typeclasses.tcinstance] let impl_4: Core_models.Iter.Traits.Iterator.t_Iterator (t_Range u128) = { f_Item = u128; f_next_pre = (fun (self: t_Range u128) -> true); f_next_post = (fun (self: t_Range u128) (out: (t_Range u128 & Core_models.Option.t_Option u128)) -> true); f_next = fun (self: t_Range u128) -> let (self: t_Range u128), (hax_temp_output: Core_models.Option.t_Option u128) = if self.f_start >=. self.f_end then self, (Core_models.Option.Option_None <: Core_models.Option.t_Option u128) <: (t_Range u128 & Core_models.Option.t_Option u128) else let res:u128 = self.f_start in let self:t_Range u128 = { self with f_start = self.f_start +! mk_u128 1 } <: t_Range u128 in self, (Core_models.Option.Option_Some res <: Core_models.Option.t_Option u128) <: (t_Range u128 & Core_models.Option.t_Option u128) in self, hax_temp_output <: (t_Range u128 & Core_models.Option.t_Option u128) } [@@ FStar.Tactics.Typeclasses.tcinstance] let impl_5: Core_models.Iter.Traits.Iterator.t_Iterator (t_Range usize) = { f_Item = usize; f_next_pre = (fun (self: t_Range usize) -> true); f_next_post = (fun (self: t_Range usize) (out: (t_Range usize & Core_models.Option.t_Option usize)) -> true); f_next = fun (self: t_Range usize) -> let (self: t_Range usize), (hax_temp_output: Core_models.Option.t_Option usize) = if self.f_start >=. self.f_end then self, (Core_models.Option.Option_None <: Core_models.Option.t_Option usize) <: (t_Range usize & Core_models.Option.t_Option usize) else let res:usize = self.f_start in let self:t_Range usize = { self with f_start = self.f_start +! mk_usize 1 } <: t_Range usize in self, (Core_models.Option.Option_Some res <: Core_models.Option.t_Option usize) <: (t_Range usize & Core_models.Option.t_Option usize) in self, hax_temp_output <: (t_Range usize & Core_models.Option.t_Option usize) } [@@ FStar.Tactics.Typeclasses.tcinstance] let impl_6: Core_models.Iter.Traits.Iterator.t_Iterator (t_Range i8) = { f_Item = i8; f_next_pre = (fun (self: t_Range i8) -> true); f_next_post = (fun (self: t_Range i8) (out: (t_Range i8 & Core_models.Option.t_Option i8)) -> true); f_next = fun (self: t_Range i8) -> let (self: t_Range i8), (hax_temp_output: Core_models.Option.t_Option i8) = if self.f_start >=. self.f_end then self, (Core_models.Option.Option_None <: Core_models.Option.t_Option i8) <: (t_Range i8 & Core_models.Option.t_Option i8) else let res:i8 = self.f_start in let self:t_Range i8 = { self with f_start = self.f_start +! mk_i8 1 } <: t_Range i8 in self, (Core_models.Option.Option_Some res <: Core_models.Option.t_Option i8) <: (t_Range i8 & Core_models.Option.t_Option i8) in self, hax_temp_output <: (t_Range i8 & Core_models.Option.t_Option i8) } [@@ FStar.Tactics.Typeclasses.tcinstance] let impl_7: Core_models.Iter.Traits.Iterator.t_Iterator (t_Range i16) = { f_Item = i16; f_next_pre = (fun (self: t_Range i16) -> true); f_next_post = (fun (self: t_Range i16) (out: (t_Range i16 & Core_models.Option.t_Option i16)) -> true); f_next = fun (self: t_Range i16) -> let (self: t_Range i16), (hax_temp_output: Core_models.Option.t_Option i16) = if self.f_start >=. self.f_end then self, (Core_models.Option.Option_None <: Core_models.Option.t_Option i16) <: (t_Range i16 & Core_models.Option.t_Option i16) else let res:i16 = self.f_start in let self:t_Range i16 = { self with f_start = self.f_start +! mk_i16 1 } <: t_Range i16 in self, (Core_models.Option.Option_Some res <: Core_models.Option.t_Option i16) <: (t_Range i16 & Core_models.Option.t_Option i16) in self, hax_temp_output <: (t_Range i16 & Core_models.Option.t_Option i16) } [@@ FStar.Tactics.Typeclasses.tcinstance] let impl_8: Core_models.Iter.Traits.Iterator.t_Iterator (t_Range i32) = { f_Item = i32; f_next_pre = (fun (self: t_Range i32) -> true); f_next_post = (fun (self: t_Range i32) (out: (t_Range i32 & Core_models.Option.t_Option i32)) -> true); f_next = fun (self: t_Range i32) -> let (self: t_Range i32), (hax_temp_output: Core_models.Option.t_Option i32) = if self.f_start >=. self.f_end then self, (Core_models.Option.Option_None <: Core_models.Option.t_Option i32) <: (t_Range i32 & Core_models.Option.t_Option i32) else let res:i32 = self.f_start in let self:t_Range i32 = { self with f_start = self.f_start +! mk_i32 1 } <: t_Range i32 in self, (Core_models.Option.Option_Some res <: Core_models.Option.t_Option i32) <: (t_Range i32 & Core_models.Option.t_Option i32) in self, hax_temp_output <: (t_Range i32 & Core_models.Option.t_Option i32) } [@@ FStar.Tactics.Typeclasses.tcinstance] let impl_9: Core_models.Iter.Traits.Iterator.t_Iterator (t_Range i64) = { f_Item = i64; f_next_pre = (fun (self: t_Range i64) -> true); f_next_post = (fun (self: t_Range i64) (out: (t_Range i64 & Core_models.Option.t_Option i64)) -> true); f_next = fun (self: t_Range i64) -> let (self: t_Range i64), (hax_temp_output: Core_models.Option.t_Option i64) = if self.f_start >=. self.f_end then self, (Core_models.Option.Option_None <: Core_models.Option.t_Option i64) <: (t_Range i64 & Core_models.Option.t_Option i64) else let res:i64 = self.f_start in let self:t_Range i64 = { self with f_start = self.f_start +! mk_i64 1 } <: t_Range i64 in self, (Core_models.Option.Option_Some res <: Core_models.Option.t_Option i64) <: (t_Range i64 & Core_models.Option.t_Option i64) in self, hax_temp_output <: (t_Range i64 & Core_models.Option.t_Option i64) } [@@ FStar.Tactics.Typeclasses.tcinstance] let impl_10: Core_models.Iter.Traits.Iterator.t_Iterator (t_Range i128) = { f_Item = i128; f_next_pre = (fun (self: t_Range i128) -> true); f_next_post = (fun (self: t_Range i128) (out: (t_Range i128 & Core_models.Option.t_Option i128)) -> true); f_next = fun (self: t_Range i128) -> let (self: t_Range i128), (hax_temp_output: Core_models.Option.t_Option i128) = if self.f_start >=. self.f_end then self, (Core_models.Option.Option_None <: Core_models.Option.t_Option i128) <: (t_Range i128 & Core_models.Option.t_Option i128) else let res:i128 = self.f_start in let self:t_Range i128 = { self with f_start = self.f_start +! mk_i128 1 } <: t_Range i128 in self, (Core_models.Option.Option_Some res <: Core_models.Option.t_Option i128) <: (t_Range i128 & Core_models.Option.t_Option i128) in self, hax_temp_output <: (t_Range i128 & Core_models.Option.t_Option i128) } [@@ FStar.Tactics.Typeclasses.tcinstance] let impl_11: Core_models.Iter.Traits.Iterator.t_Iterator (t_Range isize) = { f_Item = isize; f_next_pre = (fun (self: t_Range isize) -> true); f_next_post = (fun (self: t_Range isize) (out: (t_Range isize & Core_models.Option.t_Option isize)) -> true); f_next = fun (self: t_Range isize) -> let (self: t_Range isize), (hax_temp_output: Core_models.Option.t_Option isize) = if self.f_start >=. self.f_end then self, (Core_models.Option.Option_None <: Core_models.Option.t_Option isize) <: (t_Range isize & Core_models.Option.t_Option isize) else let res:isize = self.f_start in let self:t_Range isize = { self with f_start = self.f_start +! mk_isize 1 } <: t_Range isize in self, (Core_models.Option.Option_Some res <: Core_models.Option.t_Option isize) <: (t_Range isize & Core_models.Option.t_Option isize) in self, hax_temp_output <: (t_Range isize & Core_models.Option.t_Option isize) } ================================================ FILE: hax-lib/proof-libs/fstar/core/Core_models.Ops.Try_trait.fst ================================================ module Core_models.Ops.Try_trait #set-options "--fuel 0 --ifuel 1 --z3rlimit 15" open FStar.Mul open Rust_primitives class t_FromResidual (v_Self: Type0) (v_R: Type0) = { f_from_residual_pre:v_R -> Type0; f_from_residual_post:v_R -> v_Self -> Type0; f_from_residual:x0: v_R -> Prims.Pure v_Self (f_from_residual_pre x0) (fun result -> f_from_residual_post x0 result) } class t_Try (v_Self: Type0) = { [@@@ FStar.Tactics.Typeclasses.no_method]f_Output:Type0; [@@@ FStar.Tactics.Typeclasses.no_method]f_Residual:Type0; f_from_output_pre:f_Output -> Type0; f_from_output_post:f_Output -> v_Self -> Type0; f_from_output:x0: f_Output -> Prims.Pure v_Self (f_from_output_pre x0) (fun result -> f_from_output_post x0 result); f_branch_pre:v_Self -> Type0; f_branch_post:v_Self -> Core_models.Ops.Control_flow.t_ControlFlow f_Residual f_Output -> Type0; f_branch:x0: v_Self -> Prims.Pure (Core_models.Ops.Control_flow.t_ControlFlow f_Residual f_Output) (f_branch_pre x0) (fun result -> f_branch_post x0 result) } ================================================ FILE: hax-lib/proof-libs/fstar/core/Core_models.Option.fst ================================================ module Core_models.Option #set-options "--fuel 0 --ifuel 1 --z3rlimit 15" open FStar.Mul open Rust_primitives include Core_models.Bundle {t_Option as t_Option} include Core_models.Bundle {Option_Some as Option_Some} include Core_models.Bundle {Option_None as Option_None} include Core_models.Bundle {impl__is_some as impl__is_some} include Core_models.Bundle {impl__is_some_and as impl__is_some_and} include Core_models.Bundle {impl__is_none as impl__is_none} include Core_models.Bundle {impl__is_none_or as impl__is_none_or} include Core_models.Bundle {impl__as_ref as impl__as_ref} include Core_models.Bundle {impl__expect as impl__expect} include Core_models.Bundle {impl__unwrap as impl__unwrap} include Core_models.Bundle {impl__unwrap_or as impl__unwrap_or} include Core_models.Bundle {impl__unwrap_or_else as impl__unwrap_or_else} include Core_models.Bundle {impl__unwrap_or_default as impl__unwrap_or_default} include Core_models.Bundle {impl__map as impl__map} include Core_models.Bundle {impl__map_or as impl__map_or} include Core_models.Bundle {impl__map_or_else as impl__map_or_else} include Core_models.Bundle {impl__map_or_default as impl__map_or_default} include Core_models.Bundle {impl__ok_or as impl__ok_or} include Core_models.Bundle {impl__ok_or_else as impl__ok_or_else} include Core_models.Bundle {impl__and_then as impl__and_then} include Core_models.Bundle {impl__take as impl__take} ================================================ FILE: hax-lib/proof-libs/fstar/core/Core_models.Panicking.Internal.fsti ================================================ module Core_models.Panicking.Internal #set-options "--fuel 0 --ifuel 1 --z3rlimit 15" open FStar.Mul open Rust_primitives val panic: #v_T: Type0 -> Prims.unit -> Prims.Pure v_T (requires false) (fun _ -> Prims.l_True) ================================================ FILE: hax-lib/proof-libs/fstar/core/Core_models.Panicking.fst ================================================ module Core_models.Panicking #set-options "--fuel 0 --ifuel 1 --z3rlimit 15" open FStar.Mul open Rust_primitives assume val panic_explicit': Prims.unit -> Prims.Pure Rust_primitives.Hax.t_Never (requires false) (fun _ -> Prims.l_True) unfold let panic_explicit = panic_explicit' assume val panic': e_msg: string -> Prims.Pure Rust_primitives.Hax.t_Never (requires false) (fun _ -> Prims.l_True) unfold let panic = panic' assume val panic_fmt': e_fmt: Core_models.Fmt.t_Arguments -> Prims.Pure Rust_primitives.Hax.t_Never (requires false) (fun _ -> Prims.l_True) unfold let panic_fmt = panic_fmt' ================================================ FILE: hax-lib/proof-libs/fstar/core/Core_models.Result.fst ================================================ module Core_models.Result #set-options "--fuel 0 --ifuel 1 --z3rlimit 15" open FStar.Mul open Rust_primitives include Core_models.Bundle {t_Result as t_Result} include Core_models.Bundle {Result_Ok as Result_Ok} include Core_models.Bundle {Result_Err as Result_Err} include Core_models.Bundle {impl__unwrap__from__result as impl__unwrap} include Core_models.Bundle {impl__unwrap_or__from__result as impl__unwrap_or} include Core_models.Bundle {impl__expect__from__result as impl__expect} include Core_models.Bundle {impl__map__from__result as impl__map} include Core_models.Bundle {impl__map_or__from__result as impl__map_or} include Core_models.Bundle {impl__map_or_else__from__result as impl__map_or_else} include Core_models.Bundle {impl__map_err as impl__map_err} include Core_models.Bundle {impl__is_ok as impl__is_ok} include Core_models.Bundle {impl__and_then__from__result as impl__and_then} include Core_models.Bundle {impl__ok as impl__ok} ================================================ FILE: hax-lib/proof-libs/fstar/core/Core_models.Slice.Iter.fst ================================================ module Core_models.Slice.Iter #set-options "--fuel 0 --ifuel 1 --z3rlimit 15" open FStar.Mul open Rust_primitives type t_Chunks (v_T: Type0) = { f_cs:usize; f_elements:t_Slice v_T } let impl__new (#v_T: Type0) (cs: usize) (elements: t_Slice v_T) : t_Chunks v_T = { f_cs = cs; f_elements = elements } <: t_Chunks v_T type t_ChunksExact (v_T: Type0) = { f_cs:usize; f_elements:t_Slice v_T } let impl_1__new (#v_T: Type0) (cs: usize) (elements: t_Slice v_T) : t_ChunksExact v_T = { f_cs = cs; f_elements = elements } <: t_ChunksExact v_T type t_Iter (v_T: Type0) = | Iter : Rust_primitives.Sequence.t_Seq v_T -> t_Iter v_T [@@ FStar.Tactics.Typeclasses.tcinstance] let impl_2 (#v_T: Type0) : Core_models.Iter.Traits.Iterator.t_Iterator (t_Iter v_T) = { f_Item = v_T; f_next_pre = (fun (self: t_Iter v_T) -> true); f_next_post = (fun (self: t_Iter v_T) (out: (t_Iter v_T & Core_models.Option.t_Option v_T)) -> true); f_next = fun (self: t_Iter v_T) -> let (self: t_Iter v_T), (hax_temp_output: Core_models.Option.t_Option v_T) = if (Rust_primitives.Sequence.seq_len #v_T self._0 <: usize) =. mk_usize 0 then self, (Core_models.Option.Option_None <: Core_models.Option.t_Option v_T) <: (t_Iter v_T & Core_models.Option.t_Option v_T) else let res:v_T = Rust_primitives.Sequence.seq_first #v_T self._0 in let self:t_Iter v_T = { self with _0 = Rust_primitives.Sequence.seq_slice #v_T self._0 (mk_usize 1) (Rust_primitives.Sequence.seq_len #v_T self._0 <: usize) } <: t_Iter v_T in self, (Core_models.Option.Option_Some res <: Core_models.Option.t_Option v_T) <: (t_Iter v_T & Core_models.Option.t_Option v_T) in self, hax_temp_output <: (t_Iter v_T & Core_models.Option.t_Option v_T) } [@@ FStar.Tactics.Typeclasses.tcinstance] let impl_3 (#v_T: Type0) : Core_models.Iter.Traits.Iterator.t_Iterator (t_Chunks v_T) = { f_Item = t_Slice v_T; f_next_pre = (fun (self: t_Chunks v_T) -> true); f_next_post = (fun (self: t_Chunks v_T) (out: (t_Chunks v_T & Core_models.Option.t_Option (t_Slice v_T))) -> true); f_next = fun (self: t_Chunks v_T) -> let (self: t_Chunks v_T), (hax_temp_output: Core_models.Option.t_Option (t_Slice v_T)) = if (Rust_primitives.Slice.slice_length #v_T self.f_elements <: usize) =. mk_usize 0 then self, (Core_models.Option.Option_None <: Core_models.Option.t_Option (t_Slice v_T)) <: (t_Chunks v_T & Core_models.Option.t_Option (t_Slice v_T)) else if (Rust_primitives.Slice.slice_length #v_T self.f_elements <: usize) <. self.f_cs then let res:t_Slice v_T = self.f_elements in let self:t_Chunks v_T = { self with f_elements = Rust_primitives.Slice.slice_slice #v_T self.f_elements (mk_usize 0) (mk_usize 0) } <: t_Chunks v_T in self, (Core_models.Option.Option_Some res <: Core_models.Option.t_Option (t_Slice v_T)) <: (t_Chunks v_T & Core_models.Option.t_Option (t_Slice v_T)) else let (res: t_Slice v_T), (new_elements: t_Slice v_T) = Rust_primitives.Slice.slice_split_at #v_T self.f_elements self.f_cs in let self:t_Chunks v_T = { self with f_elements = new_elements } <: t_Chunks v_T in self, (Core_models.Option.Option_Some res <: Core_models.Option.t_Option (t_Slice v_T)) <: (t_Chunks v_T & Core_models.Option.t_Option (t_Slice v_T)) in self, hax_temp_output <: (t_Chunks v_T & Core_models.Option.t_Option (t_Slice v_T)) } [@@ FStar.Tactics.Typeclasses.tcinstance] let impl_4 (#v_T: Type0) : Core_models.Iter.Traits.Iterator.t_Iterator (t_ChunksExact v_T) = { f_Item = t_Slice v_T; f_next_pre = (fun (self: t_ChunksExact v_T) -> true); f_next_post = (fun (self: t_ChunksExact v_T) (out: (t_ChunksExact v_T & Core_models.Option.t_Option (t_Slice v_T))) -> true); f_next = fun (self: t_ChunksExact v_T) -> let (self: t_ChunksExact v_T), (hax_temp_output: Core_models.Option.t_Option (t_Slice v_T)) = if (Rust_primitives.Slice.slice_length #v_T self.f_elements <: usize) <. self.f_cs then self, (Core_models.Option.Option_None <: Core_models.Option.t_Option (t_Slice v_T)) <: (t_ChunksExact v_T & Core_models.Option.t_Option (t_Slice v_T)) else let (res: t_Slice v_T), (new_elements: t_Slice v_T) = Rust_primitives.Slice.slice_split_at #v_T self.f_elements self.f_cs in let self:t_ChunksExact v_T = { self with f_elements = new_elements } <: t_ChunksExact v_T in self, (Core_models.Option.Option_Some res <: Core_models.Option.t_Option (t_Slice v_T)) <: (t_ChunksExact v_T & Core_models.Option.t_Option (t_Slice v_T)) in self, hax_temp_output <: (t_ChunksExact v_T & Core_models.Option.t_Option (t_Slice v_T)) } ================================================ FILE: hax-lib/proof-libs/fstar/core/Core_models.Slice.fst ================================================ module Core_models.Slice #set-options "--fuel 0 --ifuel 1 --z3rlimit 15" open FStar.Mul open Rust_primitives let impl__len (#v_T: Type0) (s: t_Slice v_T) : usize = Rust_primitives.Slice.slice_length #v_T s let impl__chunks (#v_T: Type0) (s: t_Slice v_T) (cs: usize) : Core_models.Slice.Iter.t_Chunks v_T = Core_models.Slice.Iter.impl__new #v_T cs s let impl__iter (#v_T: Type0) (s: t_Slice v_T) : Core_models.Slice.Iter.t_Iter v_T = Core_models.Slice.Iter.Iter (Rust_primitives.Sequence.seq_from_slice #v_T s) <: Core_models.Slice.Iter.t_Iter v_T let impl__chunks_exact (#v_T: Type0) (s: t_Slice v_T) (cs: usize) : Core_models.Slice.Iter.t_ChunksExact v_T = Core_models.Slice.Iter.impl_1__new #v_T cs s let impl__is_empty (#v_T: Type0) (s: t_Slice v_T) : bool = (impl__len #v_T s <: usize) =. mk_usize 0 assume val impl__contains': #v_T: Type0 -> s: t_Slice v_T -> v: v_T -> bool unfold let impl__contains (#v_T: Type0) = impl__contains' #v_T assume val impl__copy_within': #v_T: Type0 -> #v_R: Type0 -> {| i0: Core_models.Marker.t_Copy v_T |} -> s: t_Slice v_T -> src: v_R -> dest: usize -> t_Slice v_T unfold let impl__copy_within (#v_T #v_R: Type0) (#[FStar.Tactics.Typeclasses.tcresolve ()] i0: Core_models.Marker.t_Copy v_T) = impl__copy_within' #v_T #v_R #i0 assume val impl__binary_search': #v_T: Type0 -> s: t_Slice v_T -> x: v_T -> Core_models.Result.t_Result usize usize unfold let impl__binary_search (#v_T: Type0) = impl__binary_search' #v_T let impl__copy_from_slice (#v_T: Type0) (#[FStar.Tactics.Typeclasses.tcresolve ()] i0: Core_models.Marker.t_Copy v_T) (s src: t_Slice v_T) : Prims.Pure (t_Slice v_T) (requires (impl__len #v_T s <: usize) =. (impl__len #v_T src <: usize)) (fun _ -> Prims.l_True) = let (tmp0: t_Slice v_T), (out: t_Slice v_T) = Rust_primitives.Mem.replace #(t_Slice v_T) s src in let s:t_Slice v_T = tmp0 in let _:t_Slice v_T = out in s let impl__clone_from_slice (#v_T: Type0) (#[FStar.Tactics.Typeclasses.tcresolve ()] i0: Core_models.Clone.t_Clone v_T) (s src: t_Slice v_T) : Prims.Pure (t_Slice v_T) (requires (impl__len #v_T s <: usize) =. (impl__len #v_T src <: usize)) (fun _ -> Prims.l_True) = let (tmp0: t_Slice v_T), (out: t_Slice v_T) = Rust_primitives.Mem.replace #(t_Slice v_T) s src in let s:t_Slice v_T = tmp0 in let _:t_Slice v_T = out in s let impl__split_at (#v_T: Type0) (s: t_Slice v_T) (mid: usize) : Prims.Pure (t_Slice v_T & t_Slice v_T) (requires mid <=. (impl__len #v_T s <: usize)) (fun _ -> Prims.l_True) = Rust_primitives.Slice.slice_split_at #v_T s mid let impl__split_at_checked (#v_T: Type0) (s: t_Slice v_T) (mid: usize) : Core_models.Option.t_Option (t_Slice v_T & t_Slice v_T) = if mid <=. (impl__len #v_T s <: usize) then Core_models.Option.Option_Some (impl__split_at #v_T s mid) <: Core_models.Option.t_Option (t_Slice v_T & t_Slice v_T) else Core_models.Option.Option_None <: Core_models.Option.t_Option (t_Slice v_T & t_Slice v_T) [@@ FStar.Tactics.Typeclasses.tcinstance] let impl_1 (#v_T: Type0) : Core_models.Iter.Traits.Collect.t_IntoIterator (t_Slice v_T) = { f_IntoIter = Core_models.Slice.Iter.t_Iter v_T; f_into_iter_pre = (fun (self: t_Slice v_T) -> true); f_into_iter_post = (fun (self: t_Slice v_T) (out: Core_models.Slice.Iter.t_Iter v_T) -> true); f_into_iter = fun (self: t_Slice v_T) -> impl__iter #v_T self } [@@ FStar.Tactics.Typeclasses.tcinstance] let impl_7 (#v_T: Type0) : Core_models.Ops.Index.t_Index (t_Slice v_T) (Core_models.Ops.Range.t_Range usize) = { f_Output = t_Slice v_T; f_index_pre = (fun (self_: t_Slice v_T) (i: Core_models.Ops.Range.t_Range usize) -> i.Core_models.Ops.Range.f_start <=. i.Core_models.Ops.Range.f_end && i.Core_models.Ops.Range.f_end <=. (impl__len #v_T self_ <: usize)); f_index_post = (fun (self: t_Slice v_T) (i: Core_models.Ops.Range.t_Range usize) (out: t_Slice v_T) -> true); f_index = fun (self: t_Slice v_T) (i: Core_models.Ops.Range.t_Range usize) -> Rust_primitives.Slice.slice_slice #v_T self i.Core_models.Ops.Range.f_start i.Core_models.Ops.Range.f_end } [@@ FStar.Tactics.Typeclasses.tcinstance] let impl_8 (#v_T: Type0) : Core_models.Ops.Index.t_Index (t_Slice v_T) (Core_models.Ops.Range.t_RangeTo usize) = { f_Output = t_Slice v_T; f_index_pre = (fun (self_: t_Slice v_T) (i: Core_models.Ops.Range.t_RangeTo usize) -> i.Core_models.Ops.Range.f_end <=. (impl__len #v_T self_ <: usize)); f_index_post = (fun (self: t_Slice v_T) (i: Core_models.Ops.Range.t_RangeTo usize) (out: t_Slice v_T) -> true); f_index = fun (self: t_Slice v_T) (i: Core_models.Ops.Range.t_RangeTo usize) -> Rust_primitives.Slice.slice_slice #v_T self (mk_usize 0) i.Core_models.Ops.Range.f_end } [@@ FStar.Tactics.Typeclasses.tcinstance] let impl_9 (#v_T: Type0) : Core_models.Ops.Index.t_Index (t_Slice v_T) (Core_models.Ops.Range.t_RangeFrom usize) = { f_Output = t_Slice v_T; f_index_pre = (fun (self_: t_Slice v_T) (i: Core_models.Ops.Range.t_RangeFrom usize) -> i.Core_models.Ops.Range.f_start <=. (impl__len #v_T self_ <: usize)); f_index_post = (fun (self: t_Slice v_T) (i: Core_models.Ops.Range.t_RangeFrom usize) (out: t_Slice v_T) -> true ); f_index = fun (self: t_Slice v_T) (i: Core_models.Ops.Range.t_RangeFrom usize) -> Rust_primitives.Slice.slice_slice #v_T self i.Core_models.Ops.Range.f_start (Rust_primitives.Slice.slice_length #v_T self <: usize) } [@@ FStar.Tactics.Typeclasses.tcinstance] let impl_10 (#v_T: Type0) : Core_models.Ops.Index.t_Index (t_Slice v_T) Core_models.Ops.Range.t_RangeFull = { f_Output = t_Slice v_T; f_index_pre = (fun (self: t_Slice v_T) (i: Core_models.Ops.Range.t_RangeFull) -> true); f_index_post = (fun (self: t_Slice v_T) (i: Core_models.Ops.Range.t_RangeFull) (out: t_Slice v_T) -> true); f_index = fun (self: t_Slice v_T) (i: Core_models.Ops.Range.t_RangeFull) -> Rust_primitives.Slice.slice_slice #v_T self (mk_usize 0) (Rust_primitives.Slice.slice_length #v_T self <: usize) } [@@ FStar.Tactics.Typeclasses.tcinstance] let impl_11 (#v_T: Type0) : Core_models.Ops.Index.t_Index (t_Slice v_T) usize = { f_Output = v_T; f_index_pre = (fun (self_: t_Slice v_T) (i: usize) -> i <. (impl__len #v_T self_ <: usize)); f_index_post = (fun (self: t_Slice v_T) (i: usize) (out: v_T) -> true); f_index = fun (self: t_Slice v_T) (i: usize) -> Rust_primitives.Slice.slice_index #v_T self i } class t_SliceIndex (v_Self: Type0) (v_T: Type0) = { [@@@ FStar.Tactics.Typeclasses.no_method]f_Output:Type0; f_get_pre:self_: v_Self -> slice: v_T -> pred: Type0{true ==> pred}; f_get_post:v_Self -> v_T -> Core_models.Option.t_Option f_Output -> Type0; f_get:x0: v_Self -> x1: v_T -> Prims.Pure (Core_models.Option.t_Option f_Output) (f_get_pre x0 x1) (fun result -> f_get_post x0 x1 result) } let impl__get (#v_T #v_I: Type0) (#[FStar.Tactics.Typeclasses.tcresolve ()] i0: t_SliceIndex v_I (t_Slice v_T)) (s: t_Slice v_T) (index: v_I) : Core_models.Option.t_Option i0.f_Output = f_get #v_I #(t_Slice v_T) #FStar.Tactics.Typeclasses.solve index s [@@ FStar.Tactics.Typeclasses.tcinstance] let impl_2 (#v_T: Type0) : t_SliceIndex usize (t_Slice v_T) = { f_Output = v_T; f_get_pre = (fun (self: usize) (slice: t_Slice v_T) -> true); f_get_post = (fun (self: usize) (slice: t_Slice v_T) (out: Core_models.Option.t_Option v_T) -> true); f_get = fun (self: usize) (slice: t_Slice v_T) -> if self <. (impl__len #v_T slice <: usize) then Core_models.Option.Option_Some (Rust_primitives.Slice.slice_index #v_T slice self) <: Core_models.Option.t_Option v_T else Core_models.Option.Option_None <: Core_models.Option.t_Option v_T } [@@ FStar.Tactics.Typeclasses.tcinstance] let impl_3 (#v_T: Type0) : t_SliceIndex Core_models.Ops.Range.t_RangeFull (t_Slice v_T) = { f_Output = t_Slice v_T; f_get_pre = (fun (self: Core_models.Ops.Range.t_RangeFull) (slice: t_Slice v_T) -> true); f_get_post = (fun (self: Core_models.Ops.Range.t_RangeFull) (slice: t_Slice v_T) (out: Core_models.Option.t_Option (t_Slice v_T)) -> true); f_get = fun (self: Core_models.Ops.Range.t_RangeFull) (slice: t_Slice v_T) -> Core_models.Option.Option_Some slice <: Core_models.Option.t_Option (t_Slice v_T) } [@@ FStar.Tactics.Typeclasses.tcinstance] let impl_4 (#v_T: Type0) : t_SliceIndex (Core_models.Ops.Range.t_RangeFrom usize) (t_Slice v_T) = { f_Output = t_Slice v_T; f_get_pre = (fun (self: Core_models.Ops.Range.t_RangeFrom usize) (slice: t_Slice v_T) -> true); f_get_post = (fun (self: Core_models.Ops.Range.t_RangeFrom usize) (slice: t_Slice v_T) (out: Core_models.Option.t_Option (t_Slice v_T)) -> true); f_get = fun (self: Core_models.Ops.Range.t_RangeFrom usize) (slice: t_Slice v_T) -> if self.Core_models.Ops.Range.f_start <. (impl__len #v_T slice <: usize) then Core_models.Option.Option_Some (Rust_primitives.Slice.slice_slice #v_T slice self.Core_models.Ops.Range.f_start (impl__len #v_T slice <: usize)) <: Core_models.Option.t_Option (t_Slice v_T) else Core_models.Option.Option_None <: Core_models.Option.t_Option (t_Slice v_T) } [@@ FStar.Tactics.Typeclasses.tcinstance] let impl_5 (#v_T: Type0) : t_SliceIndex (Core_models.Ops.Range.t_RangeTo usize) (t_Slice v_T) = { f_Output = t_Slice v_T; f_get_pre = (fun (self: Core_models.Ops.Range.t_RangeTo usize) (slice: t_Slice v_T) -> true); f_get_post = (fun (self: Core_models.Ops.Range.t_RangeTo usize) (slice: t_Slice v_T) (out: Core_models.Option.t_Option (t_Slice v_T)) -> true); f_get = fun (self: Core_models.Ops.Range.t_RangeTo usize) (slice: t_Slice v_T) -> if self.Core_models.Ops.Range.f_end <=. (impl__len #v_T slice <: usize) then Core_models.Option.Option_Some (Rust_primitives.Slice.slice_slice #v_T slice (mk_usize 0) self.Core_models.Ops.Range.f_end) <: Core_models.Option.t_Option (t_Slice v_T) else Core_models.Option.Option_None <: Core_models.Option.t_Option (t_Slice v_T) } [@@ FStar.Tactics.Typeclasses.tcinstance] let impl_6 (#v_T: Type0) : t_SliceIndex (Core_models.Ops.Range.t_Range usize) (t_Slice v_T) = { f_Output = t_Slice v_T; f_get_pre = (fun (self: Core_models.Ops.Range.t_Range usize) (slice: t_Slice v_T) -> true); f_get_post = (fun (self: Core_models.Ops.Range.t_Range usize) (slice: t_Slice v_T) (out: Core_models.Option.t_Option (t_Slice v_T)) -> true); f_get = fun (self: Core_models.Ops.Range.t_Range usize) (slice: t_Slice v_T) -> if self.Core_models.Ops.Range.f_start <. self.Core_models.Ops.Range.f_end && self.Core_models.Ops.Range.f_end <=. (impl__len #v_T slice <: usize) then Core_models.Option.Option_Some (Rust_primitives.Slice.slice_slice #v_T slice self.Core_models.Ops.Range.f_start self.Core_models.Ops.Range.f_end) <: Core_models.Option.t_Option (t_Slice v_T) else Core_models.Option.Option_None <: Core_models.Option.t_Option (t_Slice v_T) } ================================================ FILE: hax-lib/proof-libs/fstar/core/Core_models.Str.Converts.fsti ================================================ module Core_models.Str.Converts #set-options "--fuel 0 --ifuel 1 --z3rlimit 15" open FStar.Mul open Rust_primitives val from_utf8 (s: t_Slice u8) : Prims.Pure (Core_models.Result.t_Result string Core_models.Str.Error.t_Utf8Error) Prims.l_True (fun _ -> Prims.l_True) ================================================ FILE: hax-lib/proof-libs/fstar/core/Core_models.Str.Error.fsti ================================================ module Core_models.Str.Error #set-options "--fuel 0 --ifuel 1 --z3rlimit 15" open FStar.Mul open Rust_primitives type t_Utf8Error = | Utf8Error : t_Utf8Error ================================================ FILE: hax-lib/proof-libs/fstar/core/Core_models.Str.Iter.fsti ================================================ module Core_models.Str.Iter #set-options "--fuel 0 --ifuel 1 --z3rlimit 15" open FStar.Mul open Rust_primitives type t_Split (v_T: Type0) = | Split : v_T -> t_Split v_T ================================================ FILE: hax-lib/proof-libs/fstar/core/Core_models.Str.Traits.fsti ================================================ module Core_models.Str.Traits #set-options "--fuel 0 --ifuel 1 --z3rlimit 15" open FStar.Mul open Rust_primitives class t_FromStr (v_Self: Type0) = { [@@@ FStar.Tactics.Typeclasses.no_method]f_Err:Type0; f_from_str_pre:string -> Type0; f_from_str_post:string -> Core_models.Result.t_Result v_Self f_Err -> Type0; f_from_str:x0: string -> Prims.Pure (Core_models.Result.t_Result v_Self f_Err) (f_from_str_pre x0) (fun result -> f_from_str_post x0 result) } [@@ FStar.Tactics.Typeclasses.tcinstance] val impl:t_FromStr u64 ================================================ FILE: hax-lib/proof-libs/fstar/core/Core_models.Str.fsti ================================================ module Core_models.Str open Rust_primitives val impl_str__len: string -> usize val impl_str__as_bytes: string -> t_Slice u8 ================================================ FILE: hax-lib/proof-libs/fstar/core/Core_models.Time.fsti ================================================ module Core_models.Time #set-options "--fuel 0 --ifuel 1 --z3rlimit 15" open Core_models open FStar.Mul val v_NANOS_PER_SEC: u32 val v_NANOS_PER_MILLI: u32 val v_NANOS_PER_MICRO: u32 val v_MILLIS_PER_SEC: u64 val v_MICROS_PER_SEC: u64 val v_SECS_PER_MINUTE: u64 val v_MINS_PER_HOUR: u64 val v_HOURS_PER_DAY: u64 val v_DAYS_PER_WEEK: u64 type t_Duration = { f_secs:u64; f_nanos:Core_models.Num.Niche_types.t_Nanoseconds } [@@ FStar.Tactics.Typeclasses.tcinstance] val impl_16:Core_models.Clone.t_Clone t_Duration [@@ FStar.Tactics.Typeclasses.tcinstance] val impl_17:Core_models.Marker.t_Copy t_Duration [@@ FStar.Tactics.Typeclasses.tcinstance] val impl_18:Core_models.Marker.t_StructuralPartialEq t_Duration [@@ FStar.Tactics.Typeclasses.tcinstance] val impl_19:Core_models.Cmp.t_PartialEq t_Duration t_Duration [@@ FStar.Tactics.Typeclasses.tcinstance] val impl_20:Core_models.Cmp.t_Eq t_Duration [@@ FStar.Tactics.Typeclasses.tcinstance] val impl_21:Core_models.Cmp.t_PartialOrd t_Duration t_Duration [@@ FStar.Tactics.Typeclasses.tcinstance] val impl_22:Core_models.Cmp.t_Ord t_Duration [@@ FStar.Tactics.Typeclasses.tcinstance] val impl_23:Core_models.Hash.t_Hash t_Duration [@@ FStar.Tactics.Typeclasses.tcinstance] val impl_24:Core_models.Default.t_Default t_Duration val impl_Duration__SECOND: t_Duration val impl_Duration__MILLISECOND: t_Duration val impl_Duration__MICROSECOND: t_Duration val impl_Duration__NANOSECOND: t_Duration val impl_Duration__ZERO: t_Duration val impl_Duration__MAX: t_Duration val impl_Duration__new (secs: u64) (nanos: u32) : Prims.Pure t_Duration Prims.l_True (fun _ -> Prims.l_True) val impl_Duration__from_secs (secs: u64) : Prims.Pure t_Duration Prims.l_True (fun _ -> Prims.l_True) val impl_Duration__from_millis (millis: u64) : Prims.Pure t_Duration Prims.l_True (fun _ -> Prims.l_True) val impl_Duration__from_micros (micros: u64) : Prims.Pure t_Duration Prims.l_True (fun _ -> Prims.l_True) val impl_Duration__from_nanos (nanos: u64) : Prims.Pure t_Duration Prims.l_True (fun _ -> Prims.l_True) val impl_Duration__from_weeks (weeks: u64) : Prims.Pure t_Duration Prims.l_True (fun _ -> Prims.l_True) val impl_Duration__from_days (days: u64) : Prims.Pure t_Duration Prims.l_True (fun _ -> Prims.l_True) val impl_Duration__from_hours (hours: u64) : Prims.Pure t_Duration Prims.l_True (fun _ -> Prims.l_True) val impl_Duration__from_mins (mins: u64) : Prims.Pure t_Duration Prims.l_True (fun _ -> Prims.l_True) val impl_Duration__is_zero (self: t_Duration) : Prims.Pure bool Prims.l_True (fun _ -> Prims.l_True) val impl_Duration__as_secs (self: t_Duration) : Prims.Pure u64 Prims.l_True (fun _ -> Prims.l_True) val impl_Duration__subsec_millis (self: t_Duration) : Prims.Pure u32 Prims.l_True (fun _ -> Prims.l_True) val impl_Duration__subsec_micros (self: t_Duration) : Prims.Pure u32 Prims.l_True (fun _ -> Prims.l_True) val impl_Duration__subsec_nanos (self: t_Duration) : Prims.Pure u32 Prims.l_True (fun _ -> Prims.l_True) val impl_Duration__as_millis (self: t_Duration) : Prims.Pure u128 Prims.l_True (fun _ -> Prims.l_True) val impl_Duration__as_micros (self: t_Duration) : Prims.Pure u128 Prims.l_True (fun _ -> Prims.l_True) val impl_Duration__as_nanos (self: t_Duration) : Prims.Pure u128 Prims.l_True (fun _ -> Prims.l_True) val impl_Duration__abs_diff (self other: t_Duration) : Prims.Pure t_Duration Prims.l_True (fun _ -> Prims.l_True) val impl_Duration__checked_add (self rhs: t_Duration) : Prims.Pure (Core_models.Option.t_Option t_Duration) Prims.l_True (fun _ -> Prims.l_True) val impl_Duration__saturating_add (self rhs: t_Duration) : Prims.Pure t_Duration Prims.l_True (fun _ -> Prims.l_True) val impl_Duration__checked_sub (self rhs: t_Duration) : Prims.Pure (Core_models.Option.t_Option t_Duration) Prims.l_True (fun _ -> Prims.l_True) val impl_Duration__saturating_sub (self rhs: t_Duration) : Prims.Pure t_Duration Prims.l_True (fun _ -> Prims.l_True) val impl_Duration__checked_mul (self: t_Duration) (rhs: u32) : Prims.Pure (Core_models.Option.t_Option t_Duration) Prims.l_True (fun _ -> Prims.l_True) val impl_Duration__saturating_mul (self: t_Duration) (rhs: u32) : Prims.Pure t_Duration Prims.l_True (fun _ -> Prims.l_True) val impl_Duration__checked_div (self: t_Duration) (rhs: u32) : Prims.Pure (Core_models.Option.t_Option t_Duration) Prims.l_True (fun _ -> Prims.l_True) val impl_Duration__as_secs_f64 (self: t_Duration) : Prims.Pure float Prims.l_True (fun _ -> Prims.l_True) val impl_Duration__as_secs_f32 (self: t_Duration) : Prims.Pure float Prims.l_True (fun _ -> Prims.l_True) val impl_Duration__as_millis_f64 (self: t_Duration) : Prims.Pure float Prims.l_True (fun _ -> Prims.l_True) val impl_Duration__as_millis_f32 (self: t_Duration) : Prims.Pure float Prims.l_True (fun _ -> Prims.l_True) val impl_Duration__from_secs_f64 (secs: float) : Prims.Pure t_Duration Prims.l_True (fun _ -> Prims.l_True) val impl_Duration__from_secs_f32 (secs: float) : Prims.Pure t_Duration Prims.l_True (fun _ -> Prims.l_True) val impl_Duration__mul_f64 (self: t_Duration) (rhs: float) : Prims.Pure t_Duration Prims.l_True (fun _ -> Prims.l_True) val impl_Duration__mul_f32 (self: t_Duration) (rhs: float) : Prims.Pure t_Duration Prims.l_True (fun _ -> Prims.l_True) val impl_Duration__div_f64 (self: t_Duration) (rhs: float) : Prims.Pure t_Duration Prims.l_True (fun _ -> Prims.l_True) val impl_Duration__div_f32 (self: t_Duration) (rhs: float) : Prims.Pure t_Duration Prims.l_True (fun _ -> Prims.l_True) val impl_Duration__div_duration_f64 (self rhs: t_Duration) : Prims.Pure float Prims.l_True (fun _ -> Prims.l_True) val impl_Duration__div_duration_f32 (self rhs: t_Duration) : Prims.Pure float Prims.l_True (fun _ -> Prims.l_True) val impl_Duration__from_nanos__v_NANOS_PER_SEC: u64 val impl_Duration__from_secs_f64__panic_cold_explicit: Prims.unit -> Prims.Pure Rust_primitives.Hax.t_Never Prims.l_True (fun _ -> Prims.l_True) val impl_Duration__from_secs_f32__panic_cold_explicit: Prims.unit -> Prims.Pure Rust_primitives.Hax.t_Never Prims.l_True (fun _ -> Prims.l_True) val impl_Duration__mul_f64__panic_cold_explicit: Prims.unit -> Prims.Pure Rust_primitives.Hax.t_Never Prims.l_True (fun _ -> Prims.l_True) val impl_Duration__mul_f32__panic_cold_explicit: Prims.unit -> Prims.Pure Rust_primitives.Hax.t_Never Prims.l_True (fun _ -> Prims.l_True) val impl_Duration__div_f64__panic_cold_explicit: Prims.unit -> Prims.Pure Rust_primitives.Hax.t_Never Prims.l_True (fun _ -> Prims.l_True) val impl_Duration__div_f32__panic_cold_explicit: Prims.unit -> Prims.Pure Rust_primitives.Hax.t_Never Prims.l_True (fun _ -> Prims.l_True) val f_add__impl_1__panic_cold_explicit: Prims.unit -> Prims.Pure Rust_primitives.Hax.t_Never Prims.l_True (fun _ -> Prims.l_True) [@@ FStar.Tactics.Typeclasses.tcinstance] val impl_2:Core_models.Ops.Arith.t_AddAssign t_Duration t_Duration val f_add_assign__impl_2__panic_cold_explicit: Prims.unit -> Prims.Pure Rust_primitives.Hax.t_Never Prims.l_True (fun _ -> Prims.l_True) val f_sub__impl_3__panic_cold_explicit: Prims.unit -> Prims.Pure Rust_primitives.Hax.t_Never Prims.l_True (fun _ -> Prims.l_True) [@@ FStar.Tactics.Typeclasses.tcinstance] val impl_4:Core_models.Ops.Arith.t_SubAssign t_Duration t_Duration val f_sub_assign__impl_4__panic_cold_explicit: Prims.unit -> Prims.Pure Rust_primitives.Hax.t_Never Prims.l_True (fun _ -> Prims.l_True) val f_mul__impl_5__panic_cold_explicit: Prims.unit -> Prims.Pure Rust_primitives.Hax.t_Never Prims.l_True (fun _ -> Prims.l_True) val f_mul__impl_6__panic_cold_explicit: Prims.unit -> Prims.Pure Rust_primitives.Hax.t_Never Prims.l_True (fun _ -> Prims.l_True) [@@ FStar.Tactics.Typeclasses.tcinstance] val impl_7:Core_models.Ops.Arith.t_MulAssign t_Duration u32 val f_mul_assign__impl_7__panic_cold_explicit: Prims.unit -> Prims.Pure Rust_primitives.Hax.t_Never Prims.l_True (fun _ -> Prims.l_True) val f_div__impl_8__panic_cold_explicit: Prims.unit -> Prims.Pure Rust_primitives.Hax.t_Never Prims.l_True (fun _ -> Prims.l_True) [@@ FStar.Tactics.Typeclasses.tcinstance] val impl_9:Core_models.Ops.Arith.t_DivAssign t_Duration u32 val f_div_assign__impl_9__panic_cold_explicit: Prims.unit -> Prims.Pure Rust_primitives.Hax.t_Never Prims.l_True (fun _ -> Prims.l_True) (* [@@ FStar.Tactics.Typeclasses.tcinstance] val impl_10:Core_models.Iter.Traits.Accum.t_Sum t_Duration t_Duration *) val f_sum__impl_10__panic_cold_explicit: Prims.unit -> Prims.Pure Rust_primitives.Hax.t_Never Prims.l_True (fun _ -> Prims.l_True) (* [@@ FStar.Tactics.Typeclasses.tcinstance] val impl_11:Core_models.Iter.Traits.Accum.t_Sum t_Duration t_Duration *) val f_sum__impl_11__panic_cold_explicit: Prims.unit -> Prims.Pure Rust_primitives.Hax.t_Never Prims.l_True (fun _ -> Prims.l_True) [@@ FStar.Tactics.Typeclasses.tcinstance] val impl_12:Core_models.Fmt.t_Debug t_Duration val f_fmt__impl_12__panic_cold_explicit: Prims.unit -> Prims.Pure Rust_primitives.Hax.t_Never Prims.l_True (fun _ -> Prims.l_True) val f_fmt__impl_14__panic_cold_explicit: Prims.unit -> Prims.Pure Rust_primitives.Hax.t_Never Prims.l_True (fun _ -> Prims.l_True) type t_TryFromFloatSecsErrorKind = | TryFromFloatSecsErrorKind_Negative : t_TryFromFloatSecsErrorKind | TryFromFloatSecsErrorKind_OverflowOrNan : t_TryFromFloatSecsErrorKind type t_TryFromFloatSecsError = { f_kind:t_TryFromFloatSecsErrorKind } [@@ FStar.Tactics.Typeclasses.tcinstance] val impl_25:Core_models.Fmt.t_Debug t_TryFromFloatSecsError [@@ FStar.Tactics.Typeclasses.tcinstance] val impl_26:Core_models.Clone.t_Clone t_TryFromFloatSecsError [@@ FStar.Tactics.Typeclasses.tcinstance] val impl_27:Core_models.Marker.t_StructuralPartialEq t_TryFromFloatSecsError [@@ FStar.Tactics.Typeclasses.tcinstance] val impl_28:Core_models.Cmp.t_PartialEq t_TryFromFloatSecsError t_TryFromFloatSecsError [@@ FStar.Tactics.Typeclasses.tcinstance] val impl_29:Core_models.Cmp.t_Eq t_TryFromFloatSecsError val impl_TryFromFloatSecsError__description (self: t_TryFromFloatSecsError) : Prims.Pure string Prims.l_True (fun _ -> Prims.l_True) [@@ FStar.Tactics.Typeclasses.tcinstance] val impl_14:Core_models.Fmt.t_Display t_TryFromFloatSecsError val t_TryFromFloatSecsErrorKind_cast_to_repr (x: t_TryFromFloatSecsErrorKind) : Prims.Pure isize Prims.l_True (fun _ -> Prims.l_True) [@@ FStar.Tactics.Typeclasses.tcinstance] val impl_30:Core_models.Fmt.t_Debug t_TryFromFloatSecsErrorKind [@@ FStar.Tactics.Typeclasses.tcinstance] val impl_31:Core_models.Clone.t_Clone t_TryFromFloatSecsErrorKind [@@ FStar.Tactics.Typeclasses.tcinstance] val impl_32:Core_models.Marker.t_StructuralPartialEq t_TryFromFloatSecsErrorKind [@@ FStar.Tactics.Typeclasses.tcinstance] val impl_33:Core_models.Cmp.t_PartialEq t_TryFromFloatSecsErrorKind t_TryFromFloatSecsErrorKind [@@ FStar.Tactics.Typeclasses.tcinstance] val impl_34:Core_models.Cmp.t_Eq t_TryFromFloatSecsErrorKind val impl_Duration__try_from_secs_f32 (secs: float) : Prims.Pure (Core_models.Result.t_Result t_Duration t_TryFromFloatSecsError) Prims.l_True (fun _ -> Prims.l_True) val impl_Duration__try_from_secs_f64 (secs: float) : Prims.Pure (Core_models.Result.t_Result t_Duration t_TryFromFloatSecsError) Prims.l_True (fun _ -> Prims.l_True) val impl_Duration__try_from_secs_f32__panic_cold_explicit: Prims.unit -> Prims.Pure Rust_primitives.Hax.t_Never Prims.l_True (fun _ -> Prims.l_True) val impl_Duration__try_from_secs_f64__panic_cold_explicit: Prims.unit -> Prims.Pure Rust_primitives.Hax.t_Never Prims.l_True (fun _ -> Prims.l_True) (* [@@ FStar.Tactics.Typeclasses.tcinstance] let impl_1: Core_models.Ops.Arith.t_Add t_Duration t_Duration = { f_Output = t_Duration; f_Output_11695847888444666345 = FStar.Tactics.Typeclasses.solve; f_add_pre = (fun (self: t_Duration) (rhs: t_Duration) -> true); f_add_post = (fun (self: t_Duration) (rhs: t_Duration) (out: t_Duration) -> true); f_add = fun (self: t_Duration) (rhs: t_Duration) -> () <: t_Duration } [@@ FStar.Tactics.Typeclasses.tcinstance] let impl_3: Core_models.Ops.Arith.t_Sub t_Duration t_Duration = { f_Output = t_Duration; f_Output_9381071510542709353 = FStar.Tactics.Typeclasses.solve; f_sub_pre = (fun (self: t_Duration) (rhs: t_Duration) -> true); f_sub_post = (fun (self: t_Duration) (rhs: t_Duration) (out: t_Duration) -> true); f_sub = fun (self: t_Duration) (rhs: t_Duration) -> () <: t_Duration } [@@ FStar.Tactics.Typeclasses.tcinstance] let impl_5: Core_models.Ops.Arith.t_Mul t_Duration u32 = { f_Output = t_Duration; f_Output_11167888388700478202 = FStar.Tactics.Typeclasses.solve; f_mul_pre = (fun (self: t_Duration) (rhs: u32) -> true); f_mul_post = (fun (self: t_Duration) (rhs: u32) (out: t_Duration) -> true); f_mul = fun (self: t_Duration) (rhs: u32) -> () <: t_Duration } [@@ FStar.Tactics.Typeclasses.tcinstance] let impl_6: Core_models.Ops.Arith.t_Mul u32 t_Duration = { f_Output = t_Duration; f_Output_11167888388700478202 = FStar.Tactics.Typeclasses.solve; f_mul_pre = (fun (self: u32) (rhs: t_Duration) -> true); f_mul_post = (fun (self: u32) (rhs: t_Duration) (out: t_Duration) -> true); f_mul = fun (self: u32) (rhs: t_Duration) -> () <: t_Duration } [@@ FStar.Tactics.Typeclasses.tcinstance] let impl_8: Core_models.Ops.Arith.t_Div t_Duration u32 = { f_Output = t_Duration; f_Output_10117503193521621741 = FStar.Tactics.Typeclasses.solve; f_div_pre = (fun (self: t_Duration) (rhs: u32) -> true); f_div_post = (fun (self: t_Duration) (rhs: u32) (out: t_Duration) -> true); f_div = fun (self: t_Duration) (rhs: u32) -> () <: t_Duration } *) ================================================ FILE: hax-lib/proof-libs/fstar/core/Core_models.TypeClassPlaceHolder.fst ================================================ module Core_models.TypeClassPlaceHolder (* This module defines a dummy type-class that acts as a placeholder for resolution, when an argument is useless. See Core_models.Alloc.Borrow for example. *) class t_Placeholder = { content : unit } instance placeholder : t_Placeholder = { content = () } ================================================ FILE: hax-lib/proof-libs/fstar/core/Core_models.fst ================================================ module Core_models include Rust_primitives include Core_models.Num include Rust_primitives.Notations include Rust_primitives.Hax ================================================ FILE: hax-lib/proof-libs/fstar/core/Makefile ================================================ # This is a generically useful Makefile for F* that is self-contained # # It is tempting to factor this out into multiple Makefiles but that # makes it less portable, so resist temptation, or move to a more # sophisticated build system. # # We expect FSTAR_HOME to be set to your FSTAR repo/install directory # We expect HAX_PROOF_LIBS_HOME to be set to the folder containing core, rust_primitives etc. # # ROOTS contains all the top-level F* files you wish to verify # The default target `verify` verified ROOTS and its dependencies # To lax-check instead, set `OTHERFLAGS="--lax"` on the command-line # # # To make F* emacs mode use the settings in this file, you need to # add the following lines to your .emacs # # (setq-default fstar-executable "/bin/fstar.exe") # (setq-default fstar-smt-executable "/bin/z3") # # (defun my-fstar-compute-prover-args-using-make () # "Construct arguments to pass to F* by calling make." # (with-demoted-errors "Error when constructing arg string: %S" # (let* ((fname (file-name-nondirectory buffer-file-name)) # (target (concat fname "-in")) # (argstr (car (process-lines "make" "--quiet" target)))) # (split-string argstr)))) # (setq fstar-subp-prover-args #'my-fstar-compute-prover-args-using-make) # HAX_HOME ?= $(shell git rev-parse --show-toplevel) FSTAR_HOME ?= $(HAX_LIBS_HOME)/../../../FStar FSTAR_BIN ?= $(shell command -v fstar.exe 1>&2 2> /dev/null && echo "fstar.exe" || echo "$(FSTAR_HOME)/bin/fstar.exe") HAX_PROOF_LIBS_HOME ?= $(HAX_HOME)/proof-libs/fstar HAX_LIBS_HOME ?= $(HAX_HOME)/hax-lib CACHE_DIR ?= ../core/.cache HINT_DIR ?= ../core/.hints .PHONY: all verify clean all: rm -f .depend && $(MAKE) .depend $(MAKE) verify # By default, we process all the files in the current directory. Here, we # *extend* the set of relevant files with the tests. ROOTS = $(wildcard *.fst) FSTAR_INCLUDE_DIRS = $(HAX_PROOF_LIBS_HOME)/rust_primitives $(HAX_PROOF_LIBS_HOME)/core $(HAX_LIBS_HOME)/proofs/fstar/extraction/ FSTAR_FLAGS = --cmi \ --warn_error -331 \ --cache_checked_modules --cache_dir $(CACHE_DIR) \ --already_cached "+Prims+FStar+LowStar+C+Spec.Loops+TestLib" \ $(addprefix --include ,$(FSTAR_INCLUDE_DIRS)) FSTAR = $(FSTAR_BIN) $(FSTAR_FLAGS) $(OTHERFLAGS) .depend: $(HINT_DIR) $(CACHE_DIR) $(info $(ROOTS)) $(FSTAR) --cmi --dep full $(ROOTS) --extract '* -Prims -LowStar -FStar' > $@ include .depend $(HINT_DIR): mkdir -p $@ $(CACHE_DIR): mkdir -p $@ $(CACHE_DIR)/%.checked: | .depend $(HINT_DIR) $(CACHE_DIR) $(FSTAR) $< $(ENABLE_HINTS) --hint_file $(HINT_DIR)/$(notdir $*).hints verify: $(addsuffix .checked, $(addprefix $(CACHE_DIR)/,$(ROOTS))) # Targets for interactive mode %.fst-in: $(info $(FSTAR_FLAGS) \ $(ENABLE_HINTS) --hint_file $(HINT_DIR)/$(basename $@).fst.hints) %.fsti-in: $(info $(FSTAR_FLAGS) \ $(ENABLE_HINTS) --hint_file $(HINT_DIR)/$(basename $@).fsti.hints) # Clean targets SHELL=/usr/bin/env bash clean: rm -rf $(CACHE_DIR)/* ================================================ FILE: hax-lib/proof-libs/fstar/core/README.md ================================================ # Core (and alloc) library This directory contains a model for the [Core Rust library](https://doc.rust-lang.org/core/): the minimal Rust foundation behind the [standard libarary of Rust](https://doc.rust-lang.org/std/index.html). This also includes a model for some part of the [`alloc` Rust library](https://doc.rust-lang.org/stable/alloc/). Core is self-contained, and is dependency-free: it links to no upstream or system libraries. Thus, even if it is minimal, it is not small: it is around **75k LoC**, comments excluded. In this directory, you will find the first stage of our approach to `core` in F\*: a hand-written model. Note that this model tries to follow as much as possible the structure and naming found in the Rust core library. The second stage of our approach to `core` is automatic generation with specifications and models. Our plan is to annotate the Rust `core` library with specifications and models written directly as Rust annotations. This will enable automatic generation of `core` models with consistent semantics in all of hax backends (for now F\* and Coq). Note that we already started experimenting with this second approach: hax is already able to digest and generate signature-only F\* for more than 80% of core definitions. ================================================ FILE: hax-lib/proof-libs/fstar/core/Rand.Distr.Distribution.fsti ================================================ module Rand.Distr.Distribution ================================================ FILE: hax-lib/proof-libs/fstar/core/Rand.Distr.Integer.fsti ================================================ module Rand.Distr.Integer ================================================ FILE: hax-lib/proof-libs/fstar/core/Rand.Distributions.Distribution.fsti ================================================ module Rand.Distributions.Distribution ================================================ FILE: hax-lib/proof-libs/fstar/core/Rand.Distributions.Integer.fsti ================================================ module Rand.Distributions.Integer ================================================ FILE: hax-lib/proof-libs/fstar/core/Rand.Rng.fsti ================================================ module Rand.Rng ================================================ FILE: hax-lib/proof-libs/fstar/core/Rand_core.Os.fsti ================================================ module Rand_core.Os #set-options "--fuel 0 --ifuel 1 --z3rlimit 15" open FStar.Mul open Rust_primitives let _ = (* This module has implicit dependencies, here we make them explicit. *) (* The implicit dependencies arise from typeclasses instances. *) let open Rand_core in () type t_OsRng = | OsRng : t_OsRng [@@ FStar.Tactics.Typeclasses.tcinstance] val impl:Rand_core.t_RngCore t_OsRng [@@ FStar.Tactics.Typeclasses.tcinstance] val impl_1:Rand_core.t_CryptoRng t_OsRng ================================================ FILE: hax-lib/proof-libs/fstar/core/Rand_core.fsti ================================================ module Rand_core #set-options "--fuel 0 --ifuel 1 --z3rlimit 15" open FStar.Mul open Rust_primitives class t_RngCore (v_Self: Type0) = { f_next_u32_pre:v_Self -> Type0; f_next_u32_post:v_Self -> (v_Self & u32) -> Type0; f_next_u32:x0: v_Self -> Prims.Pure (v_Self & u32) (f_next_u32_pre x0) (fun result -> f_next_u32_post x0 result); f_next_u64_pre:v_Self -> Type0; f_next_u64_post:v_Self -> (v_Self & u64) -> Type0; f_next_u64:x0: v_Self -> Prims.Pure (v_Self & u64) (f_next_u64_pre x0) (fun result -> f_next_u64_post x0 result); f_fill_bytes_pre:v_Self -> t_Slice u8 -> Type0; f_fill_bytes_post:v_Self -> t_Slice u8 -> (v_Self & t_Slice u8) -> Type0; f_fill_bytes:x0: v_Self -> x1: t_Slice u8 -> Prims.Pure (v_Self & t_Slice u8) (f_fill_bytes_pre x0 x1) (fun result -> f_fill_bytes_post x0 x1 result) } class t_CryptoRng (v_Self: Type0) = { [@@@ FStar.Tactics.Typeclasses.no_method]_super_i0:t_RngCore v_Self } [@@ FStar.Tactics.Typeclasses.tcinstance] let _ = fun (v_Self:Type0) {|i: t_CryptoRng v_Self|} -> i._super_i0 ================================================ FILE: hax-lib/proof-libs/fstar/core/Std.Collections.Hash.Map.fsti ================================================ module Std.Collections.Hash.Map #set-options "--fuel 0 --ifuel 1 --z3rlimit 15" open FStar.Mul open Rust_primitives val t_HashMap (v_K v_V v_S: Type0) : eqtype val impl__new: #v_K: Type0 -> #v_V: Type0 -> Prims.unit -> Prims.Pure (t_HashMap v_K v_V Std.Hash.Random.t_RandomState) Prims.l_True (fun _ -> Prims.l_True) val impl_2__get (#v_K #v_V #v_S #v_Y: Type0) (m: t_HashMap v_K v_V v_S) (k: v_K) : Prims.Pure (Core_models.Option.t_Option v_V) Prims.l_True (fun _ -> Prims.l_True) val impl_2__insert (#v_K #v_V #v_S: Type0) (m: t_HashMap v_K v_V v_S) (k: v_K) (v: v_V) : Prims.Pure (t_HashMap v_K v_V v_S & Core_models.Option.t_Option v_V) Prims.l_True (fun _ -> Prims.l_True) ================================================ FILE: hax-lib/proof-libs/fstar/core/Std.F64.fsti ================================================ module Std.F64 #set-options "--fuel 0 --ifuel 1 --z3rlimit 15" open FStar.Mul open Rust_primitives val impl_f64__powf (x y: float) : Prims.Pure float Prims.l_True (fun _ -> Prims.l_True) ================================================ FILE: hax-lib/proof-libs/fstar/core/Std.Hash.Random.fsti ================================================ module Std.Hash.Random #set-options "--fuel 0 --ifuel 1 --z3rlimit 15" open FStar.Mul open Rust_primitives type t_RandomState = | RandomState : t_RandomState ================================================ FILE: hax-lib/proof-libs/fstar/core/Std.Io.Error.fsti ================================================ module Std.Io.Error #set-options "--fuel 0 --ifuel 1 --z3rlimit 15" open FStar.Mul open Rust_primitives type t_Error = | Error : t_Error type t_ErrorKind = | ErrorKind_NotFound : t_ErrorKind | ErrorKind_PermissionDenied : t_ErrorKind | ErrorKind_ConnectionRefused : t_ErrorKind | ErrorKind_ConnectionReset : t_ErrorKind | ErrorKind_HostUnreachable : t_ErrorKind | ErrorKind_NetworkUnreachable : t_ErrorKind | ErrorKind_ConnectionAborted : t_ErrorKind | ErrorKind_NotConnected : t_ErrorKind | ErrorKind_AddrInUse : t_ErrorKind | ErrorKind_AddrNotAvailable : t_ErrorKind | ErrorKind_NetworkDown : t_ErrorKind | ErrorKind_BrokenPipe : t_ErrorKind | ErrorKind_AlreadyExists : t_ErrorKind | ErrorKind_WouldBlock : t_ErrorKind | ErrorKind_NotADirectory : t_ErrorKind | ErrorKind_IsADirectory : t_ErrorKind | ErrorKind_DirectoryNotEmpty : t_ErrorKind | ErrorKind_ReadOnlyFilesystem : t_ErrorKind | ErrorKind_FilesystemLoop : t_ErrorKind | ErrorKind_StaleNetworkFileHandle : t_ErrorKind | ErrorKind_InvalidInput : t_ErrorKind | ErrorKind_InvalidData : t_ErrorKind | ErrorKind_TimedOut : t_ErrorKind | ErrorKind_WriteZero : t_ErrorKind | ErrorKind_StorageFull : t_ErrorKind | ErrorKind_NotSeekable : t_ErrorKind | ErrorKind_QuotaExceeded : t_ErrorKind | ErrorKind_FileTooLarge : t_ErrorKind | ErrorKind_ResourceBusy : t_ErrorKind | ErrorKind_ExecutableFileBusy : t_ErrorKind | ErrorKind_Deadlock : t_ErrorKind | ErrorKind_CrossesDevices : t_ErrorKind | ErrorKind_TooManyLinks : t_ErrorKind | ErrorKind_InvalidFilename : t_ErrorKind | ErrorKind_ArgumentListTooLong : t_ErrorKind | ErrorKind_Interrupted : t_ErrorKind | ErrorKind_Unsupported : t_ErrorKind | ErrorKind_UnexpectedEof : t_ErrorKind | ErrorKind_OutOfMemory : t_ErrorKind | ErrorKind_InProgress : t_ErrorKind | ErrorKind_Other : t_ErrorKind val t_ErrorKind_cast_to_repr (x: t_ErrorKind) : Prims.Pure isize Prims.l_True (fun _ -> Prims.l_True) val impl_Error__kind (self: t_Error) : Prims.Pure t_ErrorKind Prims.l_True (fun _ -> Prims.l_True) ================================================ FILE: hax-lib/proof-libs/fstar/core/Std.Io.Impls.fsti ================================================ module Std.Io.Impls #set-options "--fuel 0 --ifuel 1 --z3rlimit 15" open FStar.Mul open Rust_primitives [@@ FStar.Tactics.Typeclasses.tcinstance] val impl:Std.Io.t_Read (t_Slice u8) [@@ FStar.Tactics.Typeclasses.tcinstance] val impl_1:Std.Io.t_Write (Alloc.Vec.t_Vec u8 Alloc.Alloc.t_Global) ================================================ FILE: hax-lib/proof-libs/fstar/core/Std.Io.Stdio.fsti ================================================ module Std.Io.Stdio #set-options "--fuel 0 --ifuel 1 --z3rlimit 15" open FStar.Mul open Rust_primitives val e_print (args: Core_models.Fmt.t_Arguments) : Prims.Pure Prims.unit Prims.l_True (fun _ -> Prims.l_True) ================================================ FILE: hax-lib/proof-libs/fstar/core/Std.Io.fsti ================================================ module Std.Io #set-options "--fuel 0 --ifuel 1 --z3rlimit 15" open FStar.Mul open Rust_primitives class t_Read (v_Self: Type0) = { f_read_pre:self_: v_Self -> buf: t_Slice u8 -> pred: Type0{true ==> pred}; f_read_post: self_: v_Self -> buf: t_Slice u8 -> x: (v_Self & t_Slice u8 & Core_models.Result.t_Result usize Std.Io.Error.t_Error) -> pred: Type0 { pred ==> (let (self_e_future: v_Self), (buf_future: t_Slice u8), (_: Core_models.Result.t_Result usize Std.Io.Error.t_Error) = x in (Core_models.Slice.impl__len #u8 buf_future <: usize) =. (Core_models.Slice.impl__len #u8 buf <: usize)) }; f_read:x0: v_Self -> x1: t_Slice u8 -> Prims.Pure (v_Self & t_Slice u8 & Core_models.Result.t_Result usize Std.Io.Error.t_Error) (f_read_pre x0 x1) (fun result -> f_read_post x0 x1 result); f_read_exact_pre:self_: v_Self -> buf: t_Slice u8 -> pred: Type0{true ==> pred}; f_read_exact_post: self_: v_Self -> buf: t_Slice u8 -> x: (v_Self & t_Slice u8 & Core_models.Result.t_Result Prims.unit Std.Io.Error.t_Error) -> pred: Type0 { pred ==> (let (self_e_future: v_Self), (buf_future: t_Slice u8), (_: Core_models.Result.t_Result Prims.unit Std.Io.Error.t_Error) = x in (Core_models.Slice.impl__len #u8 buf_future <: usize) =. (Core_models.Slice.impl__len #u8 buf <: usize)) }; f_read_exact:x0: v_Self -> x1: t_Slice u8 -> Prims.Pure (v_Self & t_Slice u8 & Core_models.Result.t_Result Prims.unit Std.Io.Error.t_Error) (f_read_exact_pre x0 x1) (fun result -> f_read_exact_post x0 x1 result) } class t_Write (v_Self: Type0) = { f_write_pre:self_: v_Self -> buf: t_Slice u8 -> pred: Type0{true ==> pred}; f_write_post: v_Self -> t_Slice u8 -> (v_Self & Core_models.Result.t_Result usize Std.Io.Error.t_Error) -> Type0; f_write:x0: v_Self -> x1: t_Slice u8 -> Prims.Pure (v_Self & Core_models.Result.t_Result usize Std.Io.Error.t_Error) (f_write_pre x0 x1) (fun result -> f_write_post x0 x1 result); f_flush_pre:self_: v_Self -> pred: Type0{true ==> pred}; f_flush_post:v_Self -> (v_Self & Core_models.Result.t_Result Prims.unit Std.Io.Error.t_Error) -> Type0; f_flush:x0: v_Self -> Prims.Pure (v_Self & Core_models.Result.t_Result Prims.unit Std.Io.Error.t_Error) (f_flush_pre x0) (fun result -> f_flush_post x0 result); f_write_all_pre:self_: v_Self -> buf: t_Slice u8 -> pred: Type0{true ==> pred}; f_write_all_post: v_Self -> t_Slice u8 -> (v_Self & Core_models.Result.t_Result Prims.unit Std.Io.Error.t_Error) -> Type0; f_write_all:x0: v_Self -> x1: t_Slice u8 -> Prims.Pure (v_Self & Core_models.Result.t_Result Prims.unit Std.Io.Error.t_Error) (f_write_all_pre x0 x1) (fun result -> f_write_all_post x0 x1 result) } ================================================ FILE: hax-lib/proof-libs/fstar/hax_lib/Makefile ================================================ # This is a generically useful Makefile for F* that is self-contained # # It is tempting to factor this out into multiple Makefiles but that # makes it less portable, so resist temptation, or move to a more # sophisticated build system. # # We expect FSTAR_HOME to be set to your FSTAR repo/install directory # We expect HACL_HOME to be set to your HACL* repo location # We expect HAX_LIBS_HOME to be set to the folder containing core, rust_primitives etc. # # ROOTS contains all the top-level F* files you wish to verify # The default target `verify` verified ROOTS and its dependencies # To lax-check instead, set `OTHERFLAGS="--lax"` on the command-line # # # To make F* emacs mode use the settings in this file, you need to # add the following lines to your .emacs # # (setq-default fstar-executable "/bin/fstar.exe") # (setq-default fstar-smt-executable "/bin/z3") # # (defun my-fstar-compute-prover-args-using-make () # "Construct arguments to pass to F* by calling make." # (with-demoted-errors "Error when constructing arg string: %S" # (let* ((fname (file-name-nondirectory buffer-file-name)) # (target (concat fname "-in")) # (argstr (car (process-lines "make" "--quiet" target)))) # (split-string argstr)))) # (setq fstar-subp-prover-args #'my-fstar-compute-prover-args-using-make) # HAX_LIBS_HOME ?= $(shell git rev-parse --show-toplevel)/proof-libs/fstar FSTAR_HOME ?= $(HAX_LIBS_HOME)/../../../FStar HACL_HOME ?= $(HAX_LIBS_HOME)/../../../hacl-star FSTAR_BIN ?= $(shell command -v fstar.exe 1>&2 2> /dev/null && echo "fstar.exe" || echo "$(FSTAR_HOME)/bin/fstar.exe") CACHE_DIR ?= $(HAX_LIBS_HOME)/.cache HINT_DIR ?= $(HAX_LIBS_HOME)/.hints .PHONY: all verify clean all: rm -f .depend && $(MAKE) .depend $(MAKE) verify # By default, we process all the files in the current directory. Here, we # *extend* the set of relevant files with the tests. ROOTS = $(wildcard *.fst) FSTAR_INCLUDE_DIRS = $(HACL_HOME)/lib $(HAX_LIBS_HOME)/rust_primitives $(HAX_LIBS_HOME)/core $(HAX_LIBS_HOME)/hax_lib FSTAR_FLAGS = --cmi \ --warn_error -331 \ --cache_checked_modules --cache_dir $(CACHE_DIR) \ --already_cached "+Prims+FStar+LowStar+C+Spec.Loops+TestLib" \ $(addprefix --include ,$(FSTAR_INCLUDE_DIRS)) FSTAR = $(FSTAR_BIN) $(FSTAR_FLAGS) $(OTHERFLAGS) .depend: $(HINT_DIR) $(CACHE_DIR) $(info $(ROOTS)) $(FSTAR) --cmi --dep full $(ROOTS) --extract '* -Prims -LowStar -FStar' > $@ include .depend $(HINT_DIR): mkdir -p $@ $(CACHE_DIR): mkdir -p $@ $(CACHE_DIR)/%.checked: | .depend $(HINT_DIR) $(CACHE_DIR) $(FSTAR) $< $(ENABLE_HINTS) --hint_file $(HINT_DIR)/$(notdir $*).hints verify: $(addsuffix .checked, $(addprefix $(CACHE_DIR)/,$(ROOTS))) # Targets for interactive mode %.fst-in: $(info $(FSTAR_FLAGS) \ $(ENABLE_HINTS) --hint_file $(HINT_DIR)/$(basename $@).fst.hints) %.fsti-in: $(info $(FSTAR_FLAGS) \ $(ENABLE_HINTS) --hint_file $(HINT_DIR)/$(basename $@).fsti.hints) # Clean targets SHELL=/usr/bin/env bash clean: rm -rf $(CACHE_DIR)/* ================================================ FILE: hax-lib/proof-libs/fstar/rust_primitives/Makefile ================================================ # This is a generically useful Makefile for F* that is self-contained # # It is tempting to factor this out into multiple Makefiles but that # makes it less portable, so resist temptation, or move to a more # sophisticated build system. # # We expect FSTAR_HOME to be set to your FSTAR repo/install directory # We expect HAX_PROOF_LIBS_HOME to be set to the folder containing core, rust_primitives etc. # We expect HAX_LIBS_HOME to be set to the hax-lib folder # # ROOTS contains all the top-level F* files you wish to verify # The default target `verify` verified ROOTS and its dependencies # To lax-check instead, set `OTHERFLAGS="--lax"` on the command-line # # # To make F* emacs mode use the settings in this file, you need to # add the following lines to your .emacs # # (setq-default fstar-executable "/bin/fstar.exe") # (setq-default fstar-smt-executable "/bin/z3") # # (defun my-fstar-compute-prover-args-using-make () # "Construct arguments to pass to F* by calling make." # (with-demoted-errors "Error when constructing arg string: %S" # (let* ((fname (file-name-nondirectory buffer-file-name)) # (target (concat fname "-in")) # (argstr (car (process-lines "make" "--quiet" target)))) # (split-string argstr)))) # (setq fstar-subp-prover-args #'my-fstar-compute-prover-args-using-make) # HAX_HOME ?= $(shell git rev-parse --show-toplevel) FSTAR_HOME ?= $(HAX_LIBS_HOME)/../../../FStar FSTAR_BIN ?= $(shell command -v fstar.exe 1>&2 2> /dev/null && echo "fstar.exe" || echo "$(FSTAR_HOME)/bin/fstar.exe") HAX_PROOF_LIBS_HOME ?= $(HAX_HOME)/proof-libs/fstar HAX_LIBS_HOME ?= $(HAX_HOME)/hax-lib CACHE_DIR ?= $(HAX_LIBS_HOME)/.cache HINT_DIR ?= $(HAX_LIBS_HOME)/.hints .PHONY: all verify clean all: rm -f .depend && $(MAKE) .depend $(MAKE) verify # By default, we process all the files in the current directory. Here, we # *extend* the set of relevant files with the tests. ROOTS = $(wildcard *.fst) FSTAR_INCLUDE_DIRS = $(HAX_PROOF_LIBS_HOME)/rust_primitives $(HAX_PROOF_LIBS_HOME)/core $(HAX_LIBS_HOME)/proofs/fstar/extraction/ FSTAR_FLAGS = --cmi \ --warn_error -331 \ --cache_checked_modules --cache_dir $(CACHE_DIR) \ --already_cached "+Prims+FStar+LowStar+C+Spec.Loops+TestLib" \ $(addprefix --include ,$(FSTAR_INCLUDE_DIRS)) FSTAR = $(FSTAR_BIN) $(FSTAR_FLAGS) $(OTHERFLAGS) .depend: $(HINT_DIR) $(CACHE_DIR) $(info $(ROOTS)) $(FSTAR) --cmi --dep full $(ROOTS) --extract '* -Prims -LowStar -FStar' > $@ include .depend $(HINT_DIR): mkdir -p $@ $(CACHE_DIR): mkdir -p $@ $(CACHE_DIR)/%.checked: | .depend $(HINT_DIR) $(CACHE_DIR) $(FSTAR) $< $(ENABLE_HINTS) --hint_file $(HINT_DIR)/$(notdir $*).hints verify: $(addsuffix .checked, $(addprefix $(CACHE_DIR)/,$(ROOTS))) # Targets for interactive mode %.fst-in: $(info $(FSTAR_FLAGS) \ $(ENABLE_HINTS) --hint_file $(HINT_DIR)/$(basename $@).fst.hints) %.fsti-in: $(info $(FSTAR_FLAGS) \ $(ENABLE_HINTS) --hint_file $(HINT_DIR)/$(basename $@).fsti.hints) # Clean targets SHELL=/usr/bin/env bash clean: rm -rf $(CACHE_DIR)/* ================================================ FILE: hax-lib/proof-libs/fstar/rust_primitives/Rust_primitives.Arithmetic.fsti ================================================ module Rust_primitives.Arithmetic open FStar.Mul open Rust_primitives.Integers let wrapping_add_u8 : u8 -> u8 -> u8 = add_mod let saturating_add_u8 : u8 -> u8 -> u8 = add_sat val overflowing_add_u8 : u8 -> u8 -> u8 & bool let wrapping_sub_u8 : u8 -> u8 -> u8 = sub_mod let saturating_sub_u8 : u8 -> u8 -> u8 = sub_sat let overflowing_sub_u8 (x y: u8): u8 & bool = let sub = v x - v y in let borrow = sub < 0 in let out = if borrow then pow2 8 + sub else sub in (mk_u8 out, borrow) let wrapping_mul_u8 : u8 -> u8 -> u8 = mul_mod val saturating_mul_u8 : u8 -> u8 -> u8 let overflowing_mul_u8 : u8 -> u8 -> u8 & bool = mul_overflow let rem_euclid_u8 (x: u8) (y: u8 {v y <> 0}): u8 = x %! y val pow_u8 : u8 -> u32 -> u8 val count_ones_u8 : u8 -> r:u32{v r <= 8} let wrapping_add_u16 : u16 -> u16 -> u16 = add_mod let saturating_add_u16 : u16 -> u16 -> u16 = add_sat val overflowing_add_u16 : u16 -> u16 -> u16 & bool let wrapping_sub_u16 : u16 -> u16 -> u16 = sub_mod let saturating_sub_u16 : u16 -> u16 -> u16 = sub_sat let overflowing_sub_u16 (x y: u16): u16 & bool = let sub = v x - v y in let borrow = sub < 0 in let out = if borrow then pow2 16 + sub else sub in (mk_u16 out, borrow) let wrapping_mul_u16 : u16 -> u16 -> u16 = mul_mod val saturating_mul_u16 : u16 -> u16 -> u16 let overflowing_mul_u16 : u16 -> u16 -> u16 & bool = mul_overflow let rem_euclid_u16 (x: u16) (y: u16 {v y <> 0}): u16 = x %! y val pow_u16 : x:u16 -> y:u32 -> result : u16 {v x == 2 /\ v y < 16 ==> result == mk_u16 (pow2 (v y))} val count_ones_u16 : u16 -> r:u32{v r <= 16} let wrapping_add_u32 : u32 -> u32 -> u32 = add_mod let saturating_add_u32 : u32 -> u32 -> u32 = add_sat val overflowing_add_u32 : u32 -> u32 -> u32 & bool let wrapping_sub_u32 : u32 -> u32 -> u32 = sub_mod let saturating_sub_u32 : u32 -> u32 -> u32 = sub_sat let overflowing_sub_u32 (x y: u32): u32 & bool = let sub = v x - v y in let borrow = sub < 0 in let out = if borrow then pow2 32 + sub else sub in (mk_u32 out, borrow) let wrapping_mul_u32 : u32 -> u32 -> u32 = mul_mod val saturating_mul_u32 : u32 -> u32 -> u32 let overflowing_mul_u32 : u32 -> u32 -> u32 & bool = mul_overflow let rem_euclid_u32 (x: u32) (y: u32 {v y <> 0}): u32 = x %! y val pow_u32 : x:u32 -> y:u32 -> result : u32 {v x == 2 /\ v y <= 16 ==> result == mk_u32 (pow2 (v y))} val count_ones_u32 : u32 -> r:u32{v r <= 32} let wrapping_add_u64 : u64 -> u64 -> u64 = add_mod let saturating_add_u64 : u64 -> u64 -> u64 = add_sat val overflowing_add_u64 : u64 -> u64 -> u64 & bool let wrapping_sub_u64 : u64 -> u64 -> u64 = sub_mod let saturating_sub_u64 : u64 -> u64 -> u64 = sub_sat let overflowing_sub_u64 (x y: u64): u64 & bool = let sub = v x - v y in let borrow = sub < 0 in let out = if borrow then pow2 64 + sub else sub in (mk_u64 out, borrow) let wrapping_mul_u64 : u64 -> u64 -> u64 = mul_mod val saturating_mul_u64 : u64 -> u64 -> u64 let overflowing_mul_u64 : u64 -> u64 -> u64 & bool = mul_overflow let rem_euclid_u64 (x: u64) (y: u64 {v y <> 0}): u64 = x %! y val pow_u64 : u64 -> u32 -> u64 val count_ones_u64 : u64 -> r:u32{v r <= 64} let wrapping_add_u128 : u128 -> u128 -> u128 = add_mod let saturating_add_u128 : u128 -> u128 -> u128 = add_sat val overflowing_add_u128 : u128 -> u128 -> u128 & bool let wrapping_sub_u128 : u128 -> u128 -> u128 = sub_mod let saturating_sub_u128 : u128 -> u128 -> u128 = sub_sat let overflowing_sub_u128 (x y: u128): u128 & bool = let sub = v x - v y in let borrow = sub < 0 in let out = if borrow then pow2 128 + sub else sub in (mk_u128 out, borrow) let wrapping_mul_u128 : u128 -> u128 -> u128 = mul_mod val saturating_mul_u128 : u128 -> u128 -> u128 let overflowing_mul_u128 : u128 -> u128 -> u128 & bool = mul_overflow let rem_euclid_u128 (x: u128) (y: u128 {v y <> 0}): u128 = x %! y val pow_u128 : u128 -> u32 -> u128 val count_ones_u128 : u128 -> r:u32{v r <= 128} let wrapping_add_usize : usize -> usize -> usize = add_mod let saturating_add_usize : usize -> usize -> usize = add_sat val overflowing_add_usize : usize -> usize -> usize & bool let wrapping_sub_usize : usize -> usize -> usize = sub_mod let saturating_sub_usize : usize -> usize -> usize = sub_sat let overflowing_sub_usize (x y: usize): usize & bool = let sub = v x - v y in let borrow = sub < 0 in let out = if borrow then pow2 size_bits + sub else sub in (mk_usize out, borrow) let wrapping_mul_usize : usize -> usize -> usize = mul_mod val saturating_mul_usize : usize -> usize -> usize let overflowing_mul_usize : usize -> usize -> usize & bool = mul_overflow let rem_euclid_usize (x: usize) (y: usize {v y <> 0}): usize = x %! y val pow_usize : usize -> u32 -> usize val count_ones_usize : usize -> r:u32{v r <= size_bits} let wrapping_add_i8 : i8 -> i8 -> i8 = add_mod let saturating_add_i8 : i8 -> i8 -> i8 = add_sat val overflowing_add_i8 : i8 -> i8 -> i8 & bool let wrapping_sub_i8 : i8 -> i8 -> i8 = sub_mod let saturating_sub_i8 : i8 -> i8 -> i8 = sub_sat val overflowing_sub_i8 (x y: i8): i8 & bool let wrapping_mul_i8 : i8 -> i8 -> i8 = mul_mod val saturating_mul_i8 : i8 -> i8 -> i8 let overflowing_mul_i8 : i8 -> i8 -> i8 & bool = mul_overflow let rem_euclid_i8 (x: i8) (y: i8 {v y <> 0}): i8 = x %! y val pow_i8 : i8 -> u32 -> i8 val count_ones_i8 : i8 -> r:u32{v r <= 8} val abs_i8 : i8 -> i8 let wrapping_add_i16 : i16 -> i16 -> i16 = add_mod let saturating_add_i16 : i16 -> i16 -> i16 = add_sat val overflowing_add_i16 : i16 -> i16 -> i16 & bool let wrapping_sub_i16 : i16 -> i16 -> i16 = sub_mod let saturating_sub_i16 : i16 -> i16 -> i16 = sub_sat val overflowing_sub_i16 (x y: i16): i16 & bool let wrapping_mul_i16 : i16 -> i16 -> i16 = mul_mod val saturating_mul_i16 : i16 -> i16 -> i16 let overflowing_mul_i16 : i16 -> i16 -> i16 & bool = mul_overflow let rem_euclid_i16 (x: i16) (y: i16 {v y <> 0}): i16 = x %! y val pow_i16 : x: i16 -> y:u32 -> result: i16 {v x == 2 /\ v y < 15 ==> (Math.Lemmas.pow2_lt_compat 15 (v y); result == mk_i16 (pow2 (v y)))} val count_ones_i16 : i16 -> r:u32{v r <= 16} val abs_i16 : i16 -> i16 let wrapping_add_i32 : i32 -> i32 -> i32 = add_mod let saturating_add_i32 : i32 -> i32 -> i32 = add_sat val overflowing_add_i32 : i32 -> i32 -> i32 & bool let wrapping_sub_i32 : i32 -> i32 -> i32 = sub_mod let saturating_sub_i32 : i32 -> i32 -> i32 = sub_sat val overflowing_sub_i32 (x y: i32): i32 & bool let wrapping_mul_i32 : i32 -> i32 -> i32 = mul_mod val saturating_mul_i32 : i32 -> i32 -> i32 let overflowing_mul_i32 : i32 -> i32 -> i32 & bool = mul_overflow let rem_euclid_i32 (x: i32) (y: i32 {v y <> 0}): i32 = x %! y val pow_i32 : x : i32 -> y:u32 -> result: i32 {v x == 2 /\ v y <= 16 ==> result == mk_i32 (pow2 (v y))} val count_ones_i32 : i32 -> r:u32{v r <= 32} val abs_i32 : i32 -> i32 let wrapping_add_i64 : i64 -> i64 -> i64 = add_mod let saturating_add_i64 : i64 -> i64 -> i64 = add_sat val overflowing_add_i64 : i64 -> i64 -> i64 & bool let wrapping_sub_i64 : i64 -> i64 -> i64 = sub_mod let saturating_sub_i64 : i64 -> i64 -> i64 = sub_sat val overflowing_sub_i64 (x y: i64): i64 & bool let wrapping_mul_i64 : i64 -> i64 -> i64 = mul_mod val saturating_mul_i64 : i64 -> i64 -> i64 let overflowing_mul_i64 : i64 -> i64 -> i64 & bool = mul_overflow let rem_euclid_i64 (x: i64) (y: i64 {v y <> 0}): i64 = x %! y val pow_i64 : i64 -> u32 -> i64 val count_ones_i64 : i64 -> r:u32{v r <= 64} val abs_i64 : i64 -> i64 let wrapping_add_i128 : i128 -> i128 -> i128 = add_mod let saturating_add_i128 : i128 -> i128 -> i128 = add_sat val overflowing_add_i128 : i128 -> i128 -> i128 & bool let wrapping_sub_i128 : i128 -> i128 -> i128 = sub_mod let saturating_sub_i128 : i128 -> i128 -> i128 = sub_sat val overflowing_sub_i128 (x y: i128): i128 & bool let wrapping_mul_i128 : i128 -> i128 -> i128 = mul_mod val saturating_mul_i128 : i128 -> i128 -> i128 let overflowing_mul_i128 : i128 -> i128 -> i128 & bool = mul_overflow let rem_euclid_i128 (x: i128) (y: i128 {v y <> 0}): i128 = x %! y val pow_i128 : i128 -> u32 -> i128 val count_ones_i128 : i128 -> r:u32{v r <= 128} val abs_i128 : i128 -> i128 let wrapping_add_isize : isize -> isize -> isize = add_mod let saturating_add_isize : isize -> isize -> isize = add_sat val overflowing_add_isize : isize -> isize -> isize & bool let wrapping_sub_isize : isize -> isize -> isize = sub_mod let saturating_sub_isize : isize -> isize -> isize = sub_sat val overflowing_sub_isize (x y: isize): isize & bool let wrapping_mul_isize : isize -> isize -> isize = mul_mod val saturating_mul_isize : isize -> isize -> isize let overflowing_mul_isize : isize -> isize -> isize & bool = mul_overflow let rem_euclid_isize (x: isize) (y: isize {v y <> 0}): isize = x %! y val pow_isize : isize -> u32 -> isize val count_ones_isize : isize -> r:u32{v r <= size_bits} val abs_isize : isize -> isize let v_USIZE_MAX = mk_usize max_usize let v_ISIZE_MAX = mk_isize max_isize let v_ISIZE_MIN = mk_isize (minint ISIZE) let v_SIZE_BITS = mk_u32 size_bits let neg #t x = zero #t -! x ================================================ FILE: hax-lib/proof-libs/fstar/rust_primitives/Rust_primitives.Arrays.fsti ================================================ module Rust_primitives.Arrays open Rust_primitives.Integers open FStar.Mul /// Rust slices and arrays are represented as sequences type t_Slice t = s:Seq.seq t{Seq.length s <= max_usize} type t_Array t (l:usize) = s: Seq.seq t { Seq.length s == v l } /// Length of a slice let length (#a: Type) (s: t_Slice a): usize = sz (Seq.length s) /// Check whether a slice contains an item let contains (#t: eqtype) (s: t_Slice t) (x: t): bool = Seq.mem x s /// Converts an F* list into an array val of_list (#t:Type) (l: list t {FStar.List.Tot.length l < maxint U16}): t_Array t (sz (FStar.List.Tot.length l)) /// Converts an slice into a F* list val to_list (#t:Type) (s: t_Slice t): list t val map_array (#a #b: Type) #n (arr: t_Array a n) (f: a -> b): t_Array b n /// Creates an array of size `l` using a function `f` val createi #t (l:usize) (f:(u:usize{u <. l} -> t)) : Pure (t_Array t l) (requires True) (ensures (fun res -> (forall i. Seq.index res (v i) == f i))) unfold let map #a #b #p (f:(x:a{p x} -> b)) (s: t_Slice a {forall (i:nat). i < Seq.length s ==> p (Seq.index s i)}): t_Slice b = createi (length s) (fun i -> f (Seq.index s (v i))) /// Concatenates two slices let concat #t (x:t_Slice t) (y:t_Slice t{range (v (length x) + v (length y)) usize_inttype}) : r:t_Array t (length x +! length y) = Seq.append x y /// Translate indexes of `concat x y` into indexes of `x` or of `y` val lemma_index_concat #t (x:t_Slice t) (y:t_Slice t{range (v (length x) + v (length y)) usize_inttype}) (i:usize{i <. length x +! length y}): Lemma (if i <. length x then Seq.index (concat x y) (v i) == Seq.index x (v i) else Seq.index (concat x y) (v i) == Seq.index y (v (i -! length x))) [SMTPat (Seq.index (concat #t x y) (v i))] /// Take a subslice given `x` a slice and `i` and `j` two indexes let slice #t (x:t_Slice t) (i:usize{i <=. length x}) (j:usize{i <=. j /\ j <=. length x}): r:t_Array t (j -! i) = Seq.slice x (v i) (v j) /// Translate indexes for subslices val lemma_index_slice #t (x:t_Slice t) (i:usize{i <=. length x}) (j:usize{i <=. j /\ j <=. length x}) (k:usize{k <. j -! i}): Lemma (Seq.index (slice x i j) (v k) == Seq.index x (v (i +! k))) [SMTPat (Seq.index (slice x i j) (v k))] /// Introduce bitwise equality principle for sequences val eq_intro #t (a : Seq.seq t) (b:Seq.seq t{Seq.length a == Seq.length b}): Lemma (requires forall i. {:pattern Seq.index a i; Seq.index b i} i < Seq.length a ==> Seq.index a i == Seq.index b i) (ensures Seq.equal a b) [SMTPat (Seq.equal a b)] /// Split a slice in two at index `m` let split #t (a:t_Slice t) (m:usize{m <=. length a}): Pure (t_Array t m & t_Array t (length a -! m)) True (ensures (fun (x,y) -> x == slice a (sz 0) m /\ y == slice a m (length a) /\ concat #t x y == a)) = let x = Seq.slice a 0 (v m) in let y = Seq.slice a (v m) (Seq.length a) in assert (Seq.equal a (concat x y)); (x,y) let lemma_slice_append #t (x:t_Slice t) (y:t_Slice t) (z:t_Slice t): Lemma (requires (range (v (length y) + v (length z)) usize_inttype /\ length y +! length z == length x /\ y == slice x (sz 0) (length y) /\ z == slice x (length y) (length x))) (ensures (x == concat y z)) = assert (Seq.equal x (concat y z)) let lemma_slice_append_3 #t (x:t_Slice t) (y:t_Slice t) (z:t_Slice t) (w:t_Slice t): Lemma (requires (range (v (length y) + v (length z) + v (length w)) usize_inttype /\ length y +! length z +! length w == length x /\ y == slice x (sz 0) (length y) /\ z == slice x (length y) (length y +! length z) /\ w == slice x (length y +! length z) (length x))) (ensures (x == concat y (concat z w))) = assert (Seq.equal x (Seq.append y (Seq.append z w))) let lemma_slice_append_4 #t (x y z w u:t_Slice t) : Lemma (requires (range (v (length y) + v (length z) + v (length w) + v (length u)) usize_inttype /\ length y +! length z +! length w +! length u == length x /\ y == slice x (sz 0) (length y) /\ z == slice x (length y) (length y +! length z) /\ w == slice x (length y +! length z) (length y +! length z +! length w) /\ u == slice x (length y +! length z +! length w) (length x))) (ensures (x == concat y (concat z (concat w u)))) = assert (Seq.equal x (Seq.append y (Seq.append z (Seq.append w u)))) ================================================ FILE: hax-lib/proof-libs/fstar/rust_primitives/Rust_primitives.BitVectors.fsti ================================================ module Rust_primitives.BitVectors open FStar.Mul open Rust_primitives.Arrays open Rust_primitives.Integers // TODO: relate `num_bits` with a notion of bounded integer /// Number of bits carried by an integer of type `t` type num_bits t = d: nat {d > 0 /\ d <= bits t /\ (signed t ==> d <= bits t)} /// States that `x` is a positive integer that fits in `d` bits type bounded #t (x:int_t t) (d:num_bits t) = v x >= 0 /\ v x < pow2 d /// Integer of type `t` that carries at most `d` bits type int_t_d t (d: num_bits t) = n: int_t t {bounded n d} /// If `x` fits in `d` bits, then upper bits are zero val lemma_get_bit_bounded #t (x:int_t t) (d:num_bits t) (i:usize): Lemma ((bounded x d /\ v i >= d /\ v i < bits t) ==> get_bit x i == 0) [SMTPat (get_bit #t x i); SMTPat (bounded x d)] /// If upper bits of `x` are zero, then `x` is bounded accordingly val lemma_get_bit_bounded' #t (x:int_t t) (d:num_bits t): Lemma (requires forall i. v i > d ==> get_bit x i == 0) (ensures bounded x d) open FStar.FunctionalExtensionality /// A bit vector is a partial map from indexes to bits type bit_vec (len: nat) = i:nat {i < len} ^-> bit /// Transform an array of integers to a bit vector #push-options "--fuel 0 --ifuel 1 --z3rlimit 50" let bit_vec_of_int_t_array (#n: inttype) (#len: usize) (arr: t_Array (int_t n) len) (d: num_bits n): bit_vec (v len * d) = on (i: nat {i < v len * d}) (fun i -> get_bit (Seq.index arr (i / d)) (sz (i % d))) let bit_vec_of_refined_int_t_array (#n: inttype) (#len: usize) #refinement (arr: t_Array (x: int_t n {refinement x}) len) (d: num_bits n): bit_vec (v len * d) = on (i: nat {i < v len * d}) (fun i -> get_bit (Seq.index arr (i / d)) (sz (i % d))) #pop-options /// Transform an array of `nat`s to a bit vector #push-options "--fuel 0 --ifuel 1 --z3rlimit 50" let bit_vec_of_nat_array (#len: usize) (arr: t_Array nat len) (d: nat) : bit_vec (v len * d) = on (i: nat {i < v len * d}) (fun i -> get_bit_nat (Seq.index arr (i / d)) (i % d)) #pop-options /// Transforms a bit vector to an integer val bit_vec_to_int_t #t (d: num_bits t) (bv: bit_vec d): int_t t /// `bit_vec_to_int_t` and `get_bit` are (modulo usize) inverse val bit_vec_to_int_t_lemma #t (d: num_bits t) (bv: bit_vec d) i : Lemma (get_bit (bit_vec_to_int_t d bv) (sz i) == bv i) /// Transforms a bit vector into an array of integers val bit_vec_to_int_t_array #t (#len: usize) (d: num_bits t) (bv: bit_vec (v len * d)) : Pure (t_Array (int_t t) len) (requires True) (ensures fun r -> (forall i. bit_vec_of_int_t_array r d i == bv i)) /// Transforms a bit vector into an array of integers val bit_vec_to_nat_array (#len: usize) (d: nat) (bv: bit_vec (v len * d)) : Pure (t_Array nat len) (requires True) (ensures fun r -> (forall i. bit_vec_of_nat_array r d i == bv i) /\ (forall i. Seq.index r i < pow2 d)) /// Bit-wise semantics of `2^n-1` val get_bit_pow2_minus_one #t (n: nat {pow2 n - 1 <= maxint t}) (nth: usize {v nth < bits t}) : Lemma ( get_bit (mk_int #t (pow2 n - 1)) nth == (if v nth < n then 1 else 0)) /// Log2 table unfold let mask_inv_opt = function | 0 -> Some 0 | 1 -> Some 1 | 3 -> Some 2 | 7 -> Some 3 | 15 -> Some 4 | 31 -> Some 5 | 63 -> Some 6 | 127 -> Some 7 | 255 -> Some 8 | 511 -> Some 9 | 1023 -> Some 10 | 2047 -> Some 11 | 4095 -> Some 12 | _ -> None /// Specialized `get_bit_pow2_minus_one` lemmas with SMT patterns /// targetting machine integer literals of type `i32` val get_bit_pow2_minus_one_i32 (x: int {x < pow2 31 /\ Some? (mask_inv_opt x)}) (nth: usize {v nth < 32}) : Lemma ( get_bit (mk_i32 x) nth == (if v nth < Some?.v (mask_inv_opt x) then 1 else 0)) [SMTPat (get_bit (mk_i32 x) nth)] /// Specialized `get_bit_pow2_minus_one` lemmas with SMT patterns /// targetting machine integer literals of type `i16` val get_bit_pow2_minus_one_i16 (x: int {x < pow2 15 /\ Some? (mask_inv_opt x)}) (nth: usize {v nth < 16}) : Lemma ( get_bit (mk_i16 x) nth == (if v nth < Some?.v (mask_inv_opt x) then 1 else 0)) [SMTPat (get_bit (mk_i16 x) nth)] /// Specialized `get_bit_pow2_minus_one` lemmas with SMT patterns /// targetting machine integer literals of type `u32` val get_bit_pow2_minus_one_u32 (x: int {x < pow2 32 /\ Some? (mask_inv_opt x)}) (nth: usize {v nth < 32}) : Lemma ( get_bit (mk_u32 x) nth == (if v nth < Some?.v (mask_inv_opt x) then 1 else 0)) [SMTPat (get_bit (mk_u32 x) nth)] /// Specialized `get_bit_pow2_minus_one` lemmas with SMT patterns /// targetting machine integer literals of type `u16` val get_bit_pow2_minus_one_u16 (x: int {x < pow2 16 /\ Some? (mask_inv_opt x)}) (nth: usize {v nth < 16}) : Lemma ( get_bit (mk_u16 x) nth == (if v nth < Some?.v (mask_inv_opt x) then 1 else 0)) [SMTPat (get_bit (mk_u16 x) nth)] /// Specialized `get_bit_pow2_minus_one` lemmas with SMT patterns /// targetting machine integer literals of type `u8` val get_bit_pow2_minus_one_u8 (t: _ {t == u8_inttype}) (x: int {x < pow2 8 /\ Some? (mask_inv_opt x)}) (nth: usize {v nth < 8}) : Lemma ( get_bit #t (mk_u8 x) nth == (if v nth < Some?.v (mask_inv_opt x) then 1 else 0)) [SMTPat (get_bit #t (mk_u8 x) nth)] ================================================ FILE: hax-lib/proof-libs/fstar/rust_primitives/Rust_primitives.Char.fsti ================================================ module Rust_primitives.Char #set-options "--max_fuel 0 --max_ifuel 1 --z3rlimit 20" type char : eqtype = FStar.Char.char ================================================ FILE: hax-lib/proof-libs/fstar/rust_primitives/Rust_primitives.Float.fsti ================================================ module Rust_primitives.Float #set-options "--max_fuel 0 --max_ifuel 1 --z3rlimit 20" type float : eqtype val mk_float : string -> float ================================================ FILE: hax-lib/proof-libs/fstar/rust_primitives/Rust_primitives.Hax.Control_flow_monad.Mexception.fst ================================================ module Rust_primitives.Hax.Control_flow_monad.Mexception open Core_models.Ops.Control_flow let run #a: t_ControlFlow a a -> a = function | ControlFlow_Continue v | ControlFlow_Break v -> v ================================================ FILE: hax-lib/proof-libs/fstar/rust_primitives/Rust_primitives.Hax.Control_flow_monad.Moption.fst ================================================ module Rust_primitives.Hax.Control_flow_monad.Moption let run #a (f: Core_models.Option.t_Option (Core_models.Option.t_Option a)): Core_models.Option.t_Option a = match f with | Core_models.Option.Option_Some x -> x | Core_models.Option.Option_None -> Core_models.Option.Option_None ================================================ FILE: hax-lib/proof-libs/fstar/rust_primitives/Rust_primitives.Hax.Control_flow_monad.Mresult.fst ================================================ module Rust_primitives.Hax.Control_flow_monad.Mresult let run #a #e (f: Core_models.Result.t_Result (Core_models.Result.t_Result a e) e): Core_models.Result.t_Result a e = match f with | Core_models.Result.Result_Ok x -> x | Core_models.Result.Result_Err e -> Core_models.Result.Result_Err e ================================================ FILE: hax-lib/proof-libs/fstar/rust_primitives/Rust_primitives.Hax.Folds.fsti ================================================ module Rust_primitives.Hax.Folds open Rust_primitives open Core_models.Ops.Range open FStar.Mul (**** `s.chunks_exact(chunk_size).enumerate()` *) /// Predicate that asserts a slice `s_chunk` is exactly the nth chunk /// of the sequence `s` let nth_chunk_of #t (s: Seq.seq t) (s_chunk: Seq.seq t {Seq.length s_chunk > 0}) (chunk_nth: nat {chunk_nth < Seq.length s / Seq.length s_chunk}) = Seq.slice s (Seq.length s_chunk * chunk_nth) (Seq.length s_chunk * (chunk_nth + 1)) == s_chunk /// Fold function that is generated for `for` loops iterating on /// `s.chunks_exact(chunk_size).enumerate()`-like iterators val fold_enumerated_chunked_slice (#t: Type0) (#acc_t: Type0) (chunk_size: usize {v chunk_size > 0}) (s: t_Slice t) (inv: acc_t -> (i:usize{v i <= Seq.length s / v chunk_size}) -> Type0) (init: acc_t {inv init (sz 0)}) (f: ( acc:acc_t -> item:(usize & t_Slice t) { let (i, s_chunk) = item in v i < Seq.length s / v chunk_size /\ length s_chunk == chunk_size /\ nth_chunk_of s s_chunk (v i) /\ inv acc i } -> acc':acc_t { inv acc' (fst item +! sz 1) } ) ) : result: acc_t {inv result (mk_int (Seq.length s / v chunk_size))} /// Fold function that is generated for `for` loops iterating on /// `s.chunks_exact(chunk_size)`-like iterators val fold_chunked_slice (#t: Type0) (#acc_t: Type0) (chunk_size: usize {v chunk_size > 0}) (s: t_Slice t) (inv: acc_t -> (i:usize) -> Type0) (init: acc_t {inv init (sz 0)}) (f: ( acc:acc_t -> item:(t_Slice t) { length item == chunk_size /\ inv acc (sz 0) } -> acc':acc_t { inv acc' (sz 0) } ) ) : result: acc_t {inv result (mk_int 0)} (**** `s.enumerate()` *) /// Fold function that is generated for `for` loops iterating on /// `s.enumerate()`-like iterators val fold_enumerated_slice (#t: Type0) (#acc_t: Type0) (s: t_Slice t) (inv: acc_t -> (i:usize{v i <= v (length s)}) -> Type0) (init: acc_t {inv init (sz 0)}) (f: (acc:acc_t -> i:(usize & t) {v (fst i) < v (length s) /\ snd i == Seq.index s (v (fst i)) /\ inv acc (fst i)} -> acc':acc_t {v (fst i) < v (length s) /\ inv acc' (fst i)})) : result: acc_t {inv result (length s)} val fold_enumerated_slice_return (#t: Type0) (#acc_t: Type0) (#ret: Type0) (s: t_Slice t) (inv: acc_t -> (i:usize{v i <= v (length s)}) -> Type0) (init: acc_t {inv init (sz 0)}) (f: (acc:acc_t -> i:(usize & t) {v (fst i) < v (length s) /\ snd i == Seq.index s (v (fst i)) (*/\ inv acc (fst i)*)} -> Core_models.Ops.Control_flow.t_ControlFlow (Core_models.Ops.Control_flow.t_ControlFlow ret (unit & acc_t)) (acc':acc_t) (*{v (fst i) < v (length s) /\ inv acc' (fst i)}*))) : result: Core_models.Ops.Control_flow.t_ControlFlow ret acc_t(* {inv result (length s)} *) (**** `(start..end_).step_by(step)` *) unfold let fold_range_step_by_wf_index (#u: inttype) (start: int_t u) (end_: int_t u) (step: usize {v step > 0}) (strict: bool) (i: int) = v start < v end_ ==> (let end_step = v end_ - 1 - ((v end_ - 1 - v start) % v step) in i >= v start /\ (if strict then i <= end_step else i <= end_step + v step)) // /\ i % v step == v start % v step #push-options "--z3rlimit 80" unfold let fold_range_step_by_upper_bound (#u: inttype) (start: int_t u) (end_: int_t u) (step: usize {v step > 0}) : end':int {fold_range_step_by_wf_index start end_ step false end'} = if v end_ <= v start then v end_ else let range: nat = v end_ - v start in let k: nat = range / v step in let end' = v start + k * v step in FStar.Math.Lemmas.division_propriety range (v step); end' #pop-options /// Fold function that is generated for `for` loops iterating on /// `s.enumerate()`-like iterators val fold_range_step_by (#acc_t: Type0) (#u: inttype) (start: int_t u) (end_: int_t u) (step: usize {v step > 0 /\ range (v end_ + v step) u}) (inv: acc_t -> (i:int_t u{fold_range_step_by_wf_index start end_ step false (v i)}) -> Type0) (init: acc_t {inv init start}) (f: (acc:acc_t -> i:int_t u {v i < v end_ - ((v end_ - 1 - v start) % v step) /\ fold_range_step_by_wf_index start end_ step true (v i) /\ inv acc i} -> acc':acc_t {(inv acc' (mk_int (v i + v step)))})) : result: acc_t {inv result (mk_int (fold_range_step_by_upper_bound start end_ step))} (**** `start..end_` *) unfold let fold_range_wf_index (#u: inttype) (start: int_t u) (end_: int_t u) (strict: bool) (i: int) = i >= v start /\ (if strict then i < v end_ else i <= v end_) unfold let range_empty (#u: inttype) (start: int_t u) (end_: int_t u) = v start > v end_ let rec fold_range (#acc_t: Type0) (#u: inttype) (start: int_t u) (end_: int_t u) (inv: acc_t -> (i:int_t u{fold_range_wf_index start end_ false (v i)}) -> Type0) (init: acc_t {~(range_empty start end_) ==> inv init start}) (f: (acc:acc_t -> i:int_t u {v i <= v end_ /\ fold_range_wf_index start end_ true (v i) /\ inv acc i} -> acc':acc_t {(inv acc' (mk_int (v i + 1)))})) : Tot (result: acc_t {if range_empty start end_ then result == init else inv result end_}) (decreases v end_ - v start) = if v start < v end_ then fold_range (start +! mk_int 1) end_ inv (f init start) f else init let rec fold_range_cf (#acc_t: Type0) (#u: inttype) (start: int_t u) (end_: int_t u) (inv: acc_t -> (i:int_t u{fold_range_wf_index start end_ false (v i)}) -> Type0) (acc: acc_t {~(range_empty start end_) ==> inv acc start}) (f: (acc:acc_t -> i:int_t u {v i <= v end_ /\ fold_range_wf_index start end_ true (v i) /\ inv acc i} -> tuple:((Core_models.Ops.Control_flow.t_ControlFlow (unit & acc_t) acc_t)) { let acc = match tuple with | Core_models.Ops.Control_flow.ControlFlow_Break ((), acc) | Core_models.Ops.Control_flow.ControlFlow_Continue acc -> acc in inv acc (mk_int (v i + 1))})) : Tot (res: acc_t{if range_empty start end_ then res == acc else (exists (final: int_t u). v start <= v final /\ v final <= v end_ /\ inv res final)}) (decreases v end_ - v start) = if v start < v end_ then match f acc start with | Core_models.Ops.Control_flow.ControlFlow_Break ((), acc) -> acc | Core_models.Ops.Control_flow.ControlFlow_Continue acc -> fold_range_cf (start +! mk_int 1) end_ inv acc f else acc let rec fold_range_return (#acc_t: Type0) (#ret_t: Type0) (#u: inttype) (start: int_t u) (end_: int_t u) (inv: acc_t -> (i:int_t u{fold_range_wf_index start end_ false (v i)}) -> Type0) (acc: acc_t ) (f: (acc:acc_t -> i:int_t u {v i <= v end_ /\ fold_range_wf_index start end_ true (v i) } -> tuple:((Core_models.Ops.Control_flow.t_ControlFlow (Core_models.Ops.Control_flow.t_ControlFlow ret_t (unit & acc_t))) acc_t) )) : Tot (Core_models.Ops.Control_flow.t_ControlFlow ret_t acc_t) (decreases v end_ - v start) = if v start < v end_ then match f acc start with | Core_models.Ops.Control_flow.ControlFlow_Break (Core_models.Ops.Control_flow.ControlFlow_Break res)-> Core_models.Ops.Control_flow.ControlFlow_Break res | Core_models.Ops.Control_flow.ControlFlow_Break (Core_models.Ops.Control_flow.ControlFlow_Continue ((), res)) -> Core_models.Ops.Control_flow.ControlFlow_Continue res | Core_models.Ops.Control_flow.ControlFlow_Continue acc -> fold_range_return (start +! mk_int 1) end_ inv acc f else Core_models.Ops.Control_flow.ControlFlow_Continue acc val fold_return #it #acc #ret #item (i: it) (init: acc) (f: acc -> item -> Core_models.Ops.Control_flow.t_ControlFlow (Core_models.Ops.Control_flow.t_ControlFlow ret (unit & acc)) acc): Core_models.Ops.Control_flow.t_ControlFlow ret acc ================================================ FILE: hax-lib/proof-libs/fstar/rust_primitives/Rust_primitives.Hax.Int.fst ================================================ module Rust_primitives.Hax.Int open Rust_primitives unfold let from_machine (#t:inttype) (x:int_t t) : range_t t = v #t x unfold let into_machine (#t:inttype) (n:range_t t) : int_t t = mk_int #t n ================================================ FILE: hax-lib/proof-libs/fstar/rust_primitives/Rust_primitives.Hax.Monomorphized_update_at.fsti ================================================ module Rust_primitives.Hax.Monomorphized_update_at #set-options "--z3rlimit 30" /// Monomorphized versions of the `update_at` operator. open Rust_primitives open Rust_primitives.Hax open Core_models.Ops.Range let update_at_usize (#t: Type0) (s: t_Slice t) (i: usize {v i < Seq.length s}) (x: t) : t_Array t (length s) = Seq.upd #t s (v i) x val update_at_range #n (#t: Type0) (s: t_Slice t) (i: t_Range (int_t n)) (x: t_Slice t) : Pure (t_Array t (length s)) (requires (v i.f_start >= 0 /\ v i.f_start <= Seq.length s /\ v i.f_end <= Seq.length s /\ Seq.length x == v i.f_end - v i.f_start)) (ensures (fun res -> Seq.slice res 0 (v i.f_start) == Seq.slice s 0 (v i.f_start) /\ Seq.slice res (v i.f_start) (v i.f_end) == x /\ Seq.slice res (v i.f_end) (Seq.length res) == Seq.slice s (v i.f_end) (Seq.length s))) val update_at_range_to #n (#t: Type0) (s: t_Slice t) (i: t_RangeTo (int_t n)) (x: t_Slice t) : Pure (t_Array t (length s)) (requires (v i.f_end >= 0 /\ v i.f_end <= Seq.length s /\ Seq.length x == v i.f_end)) (ensures (fun res -> Seq.slice res 0 (v i.f_end) == x /\ Seq.slice res (v i.f_end) (Seq.length res) == Seq.slice s (v i.f_end) (Seq.length s))) val update_at_range_from #n (#t: Type0) (s: t_Slice t) (i: t_RangeFrom (int_t n)) (x: t_Slice t) : Pure (t_Array t (length s)) (requires ( v i.f_start >= 0 /\ v i.f_start <= Seq.length s /\ Seq.length x == Seq.length s - v i.f_start)) (ensures (fun res -> Seq.slice res 0 (v i.f_start) == Seq.slice s 0 (v i.f_start) /\ Seq.slice res (v i.f_start) (Seq.length res) == x)) val update_at_range_full (#t: Type0) (s: t_Slice t) (i: t_RangeFull) (x: t_Slice t) : Pure (t_Array t (length s)) (requires (Seq.length x == Seq.length s)) (ensures (fun res -> res == x)) ================================================ FILE: hax-lib/proof-libs/fstar/rust_primitives/Rust_primitives.Hax.fst ================================================ module Rust_primitives.Hax open Rust_primitives.Integers open Rust_primitives.Arrays type t_Never = False let never_to_any #t: t_Never -> t = (fun _ -> match () with) let repeat #a (x: a) (len: usize): t_Array a len = FStar.Seq.create (v len) x open Core_models.Ops.Index class update_at_tc self idx = { [@@@FStar.Tactics.Typeclasses.tcinstance] super_index: t_Index self idx; update_at: s: self -> i: idx {f_index_pre s i} -> super_index.f_Output -> self; } open Core_models.Slice open Core_models.Array open Core_models.Ops.Range /// We have an instance for `usize`, but we often work with refined /// `usize`, and F* typeclass inference doesn't support subtyping /// well, hence the instance below. instance impl__index_refined t l r: t_Index (t_Array t l) (x: usize {r x}) = { f_Output = t; f_index_pre = (fun (s: t_Array t l) (i: usize {r i}) -> v i >= 0 && v i < v l); f_index_post = (fun _ _ _ -> true); f_index = (fun s i -> Seq.index s (v i)); } /// Similarly to `impl__index_refined`, we need to define a instance /// for refined `usize`. instance update_at_tc_array_refined t l r: update_at_tc (t_Array t l) (x: usize {r x}) = { super_index = impl__index_refined t l r; update_at = (fun arr i x -> FStar.Seq.upd arr (v i) x); } instance impl__index t l: t_Index (t_Array t l) (usize) = { f_Output = t; f_index_pre = (fun (s: t_Array t l) (i: usize) -> v i >= 0 && v i < v l); f_index_post = (fun _ _ _ -> true); f_index = (fun s i -> Seq.index s (v i)); } instance update_at_tc_array t l: update_at_tc (t_Array t l) (usize) = { super_index = FStar.Tactics.Typeclasses.solve <: t_Index (t_Array t l) (usize); update_at = (fun arr i x -> FStar.Seq.upd arr (v i) x); } let update_at_tc_array_range_super t l: t_Index (t_Array t l) (t_Range (usize)) = FStar.Tactics.Typeclasses.solve let update_at_tc_array_range_to_super t l: t_Index (t_Array t l) (t_RangeTo (usize)) = FStar.Tactics.Typeclasses.solve let update_at_tc_array_range_from_super t l: t_Index (t_Array t l) (t_RangeFrom (usize)) = FStar.Tactics.Typeclasses.solve let update_at_tc_array_range_full_super t l: t_Index (t_Array t l) t_RangeFull = FStar.Tactics.Typeclasses.solve assume val update_at_array_range t l (s: t_Array t l) (i: t_Range (usize) {(update_at_tc_array_range_super t l).f_index_pre s i}) : (update_at_tc_array_range_super t l).f_Output -> t_Array t l assume val update_at_array_range_to t l (s: t_Array t l) (i: t_RangeTo (usize) {(update_at_tc_array_range_to_super t l).f_index_pre s i}) : (update_at_tc_array_range_to_super t l).f_Output -> t_Array t l assume val update_at_array_range_from t l (s: t_Array t l) (i: t_RangeFrom (usize) {(update_at_tc_array_range_from_super t l).f_index_pre s i}) : (update_at_tc_array_range_from_super t l).f_Output -> t_Array t l assume val update_at_array_range_full t l (s: t_Array t l) (i: t_RangeFull) : (update_at_tc_array_range_full_super t l).f_Output -> t_Array t l instance update_at_tc_array_range t l: update_at_tc (t_Array t l) (t_Range (usize)) = { super_index = update_at_tc_array_range_super t l; update_at = update_at_array_range t l } instance update_at_tc_array_range_to t l: update_at_tc (t_Array t l) (t_RangeTo (usize)) = { super_index = update_at_tc_array_range_to_super t l; update_at = update_at_array_range_to t l } instance update_at_tc_array_range_from t l: update_at_tc (t_Array t l) (t_RangeFrom (usize)) = { super_index = update_at_tc_array_range_from_super t l; update_at = update_at_array_range_from t l } instance update_at_tc_array_range_full t l: update_at_tc (t_Array t l) t_RangeFull = { super_index = update_at_tc_array_range_full_super t l; update_at = update_at_array_range_full t l } let (.[]<-) #self #idx {| update_at_tc self idx |} (s: self) (i: idx {f_index_pre s i}) = update_at s i unfold let array_of_list (#t:Type) (n: nat {n < maxint U16}) (l: list t {FStar.List.Tot.length l == n}) : t_Array t (sz n) = Seq.seq_of_list l (* class iterator_return (self: Type u#0): Type u#1 = { [@@@FStar.Tactics.Typeclasses.tcresolve] parent_iterator: Core_models.Iter.Traits.Iterator.t_Iterator self; f_fold_return: #b:Type0 -> s:self -> b -> (b -> i:parent_iterator.f_Item{parent_iterator.f_contains s i} -> Core_models.Ops.Control_flow.t_ControlFlow b b) -> Core_models.Ops.Control_flow.t_ControlFlow b b; } *) let while_loop #acc_t (inv: acc_t -> Type0) (condition: (c:acc_t {inv c}) -> bool) (fuel: (a:acc_t{inv a} -> nat)) (init: acc_t {inv init}) (f: (i:acc_t{inv i /\ condition i} -> o:acc_t{inv o /\ fuel o < fuel i})): (res: acc_t {inv res /\ not (condition res)}) = let rec while_loop_internal (current: acc_t {inv current}): Tot (res: acc_t {inv res /\ not (condition res)}) (decreases (fuel current)) = if condition current then let next = f current in assert (fuel next < fuel current); while_loop_internal next else current in while_loop_internal init assume val while_loop_return #acc_t #ret_t (inv: acc_t -> Type0) (condition: (c:acc_t {inv c}) -> bool) (fuel: (a:acc_t -> nat)) (init: acc_t ) (f: (acc_t -> Core_models.Ops.Control_flow.t_ControlFlow (Core_models.Ops.Control_flow.t_ControlFlow ret_t (Prims.unit & acc_t)) acc_t)) : Core_models.Ops.Control_flow.t_ControlFlow ret_t acc_t /// Represents backend failures let failure #t (_error: string) (_ast: string): Pure t False (fun _ -> True) = () ================================================ FILE: hax-lib/proof-libs/fstar/rust_primitives/Rust_primitives.Integers.fsti ================================================ module Rust_primitives.Integers open FStar.Mul #set-options "--max_fuel 0 --max_ifuel 1 --z3rlimit 20" val pow2_values: x:nat -> Lemma (let p = pow2 x in match x with | 0 -> p=1 | 1 -> p=2 | 8 -> p=256 | 16 -> p=65536 | 31 -> p=2147483648 | 32 -> p=4294967296 | 63 -> p=9223372036854775808 | 64 -> p=18446744073709551616 | 127 -> p=170141183460469231731687303715884105728 | 128 -> p=340282366920938463463374607431768211456 | 2 | 3 | 4 | 5 | 6 | 7 | 9 | 10 | 11 | 12 | 13 | 14 | 15 | 17 | 18 | 19 | 20 | 21 | 22 | 23 | 24 | 25 | 26 | 27 | 28 | 29 | 30 | 33 | 34 | 35 | 36 | 37 | 38 | 39 | 40 | 41 | 42 | 43 | 44 | 45 | 46 | 47 | 48 | 49 | 50 | 51 | 52 | 53 | 54 | 55 | 56 | 57 | 58 | 59 | 60 | 61 | 62 | 63 | 65 | 127 | 128 -> p = normalize_term (pow2 x) | _ -> True) [SMTPat (pow2 x)] type inttype = | I8 | I16 | I32 | I64 | I128 | ISIZE | U8 | U16 | U32 | U64 | U128 | USIZE let unsigned t = match t with | U8 | U16 | U32 | U64 | U128 | USIZE -> true | I8 | I16 | I32 | I64 | I128 | ISIZE -> false let signed t = match t with | U8 | U16 | U32 | U64 | U128 | USIZE -> false | I8 | I16 | I32 | I64 | I128 | ISIZE -> true type uinttype = t:inttype{unsigned t} val size_bits:n:nat{n == 32 \/ n == 64} let bits t = match t with | U8 | I8 -> 8 | U16 | I16 -> 16 | U32 | I32 -> 32 | U64 | I64 -> 64 | U128 | I128 -> 128 | USIZE | ISIZE -> size_bits let minint (t:inttype) = if unsigned t then 0 else -(pow2 (bits t - 1)) let maxint (t:inttype) = if unsigned t then pow2 (bits t) - 1 else pow2 (bits t - 1) - 1 let max_usize = maxint USIZE let max_isize = maxint ISIZE let range (n:int) (t:inttype) : bool = minint t <= n && n <= maxint t let included (t: inttype) (t': inttype) = minint t' <= minint t && maxint t <= maxint t' let range_t t = x:int{range x t} type int_t t = | MkInt: range_t t -> int_t t let u8_inttype = U8 let i8_inttype = I8 let u16_inttype = U16 let i16_inttype = I16 let u32_inttype = U32 let i32_inttype = I32 let u64_inttype = U64 let i64_inttype = I64 let u128_inttype = U128 let i128_inttype = I128 let usize_inttype = USIZE let isize_inttype = ISIZE type u8 = int_t U8 type i8 = int_t I8 type u16 = int_t U16 type i16 = int_t I16 type u32 = int_t U32 type i32 = int_t I32 type u64 = int_t U64 type i64 = int_t I64 type u128 = int_t U128 type i128 = int_t I128 type usize = int_t USIZE type isize = int_t ISIZE [@(strict_on_arguments [0])] let v (#t:inttype) (x:int_t t) : range_t t = x._0 [@(strict_on_arguments [0])] let mk_int (#t:inttype) (n:range_t t) : int_t t = MkInt n let mk_int_v_lemma (#t:inttype) (a:int_t t) : Lemma (mk_int #t (v #t a) == a) [SMTPat (mk_int #t (v #t a))] = () let v_mk_int_lemma (#t:inttype) (n:range_t t) : Lemma (v #t (mk_int #t n) == n) [SMTPat (v #t (mk_int #t n))] = () let mk_u8 x = mk_int #U8 x let mk_i8 x = mk_int #I8 x let mk_u16 x = mk_int #U16 x let mk_i16 x = mk_int #I16 x let mk_u32 x = mk_int #U32 x let mk_i32 x = mk_int #I32 x let mk_u64 x = mk_int #U64 x let mk_i64 x = mk_int #I64 x let mk_u128 x = mk_int #U128 x let mk_i128 x = mk_int #I128 x let mk_usize x = mk_int #USIZE x let mk_isize x = mk_int #ISIZE x let sz x = mk_usize x let isz x = mk_isize x let from_uint8 (x:FStar.UInt8.t) : u8 = mk_int (FStar.UInt8.v x) let from_int8 (x:FStar.Int8.t) : i8 = mk_int (FStar.Int8.v x) let from_uint16 (x:FStar.UInt16.t) : u16 = mk_int (FStar.UInt16.v x) let from_int16 (x:FStar.Int16.t) : i16 = mk_int (FStar.Int16.v x) let from_uint32 (x:FStar.UInt32.t) : u32 = mk_int (FStar.UInt32.v x) let from_int32 (x:FStar.Int32.t) : i32 = mk_int (FStar.Int32.v x) let from_uint64 (x:FStar.UInt64.t) : u64 = mk_int (FStar.UInt64.v x) let from_int64 (x:FStar.Int64.t) : i64 = mk_int (FStar.Int64.v x) let from_uint128 (x:FStar.UInt128.t) : u128 = mk_int (FStar.UInt128.v x) let from_int128 (x:FStar.Int128.t) : i128 = mk_int (FStar.Int128.v x) let from_usize (x:FStar.UInt32.t) : usize = mk_int (FStar.UInt32.v x) let from_isize (x:FStar.Int32.t) : isize = mk_int (FStar.Int32.v x) let to_uint8 (x:u8) : FStar.UInt8.t = FStar.UInt8.uint_to_t (v x) let to_int8 (x:i8) : FStar.Int8.t = FStar.Int8.int_to_t (v x) let to_uint16 (x:u16) : FStar.UInt16.t = FStar.UInt16.uint_to_t (v x) let to_int16 (x:i16) : FStar.Int16.t = FStar.Int16.int_to_t (v x) let to_uint32 (x:u32) : FStar.UInt32.t = FStar.UInt32.uint_to_t (v x) let to_int32 (x:i32) : FStar.Int32.t = FStar.Int32.int_to_t (v x) let to_uint64 (x:u64) : FStar.UInt64.t = FStar.UInt64.uint_to_t (v x) let to_int64 (x:i64) : FStar.Int64.t = FStar.Int64.int_to_t (v x) let to_uint128 (x:u128) : FStar.UInt128.t = FStar.UInt128.uint_to_t (v x) let to_int128 (x:i128) : FStar.Int128.t = FStar.Int128.int_to_t (v x) let modulus (t:inttype) = pow2 (bits t) (* Wrap-around modulo: wraps into [-p/2; p/2[ *) let op_At_Percent (v:int) (p:int{p>0 /\ p%2=0}) : Tot int = let m = v % p in if m >= p/2 then m - p else m let op_At_Percent_Dot x t : range_t t = if unsigned t then x % modulus t else x @% modulus t let cast (#t:inttype) (#t':inttype) (u1:int_t t{range (v u1) t'}) = mk_int #t' (v u1) let cast_mod (#t:inttype) (#t':inttype) (u1:int_t t) = mk_int #t' (v u1 @%. t') /// Simplifies double casts when possible. /// For example, with `x` a i32, this lemma rewrites `x as i64 as i32` into `x`. let cast_identity_lemma (a: inttype) (b: inttype {bits b >= bits a}) (n: int_t a) : Lemma (cast_mod #b #a (cast_mod #a #b n) == n) [SMTPat (cast_mod #b #a (cast_mod #a #b n))] = FStar.Math.Lemmas.small_mod (abs (v n)) (modulus a) /// Arithmetic operations /// let add_mod (#t:inttype) (a:int_t t) (b:int_t t) = mk_int #t ((v a + v b) @%. t) let add_sat (#t:inttype) (a:int_t t) (b:int_t t) = mk_int #t (if (v a + v b) <= minint t then minint t else if (v a + v b) >= maxint t then maxint t else (v a + v b)) let sub_sat (#t:inttype) (a:int_t t) (b:int_t t) = mk_int #t (if (v a - v b) <= minint t then minint t else if (v a - v b) >= maxint t then maxint t else v a - v b) let add (#t:inttype) (a:int_t t) (b:int_t t{range (v a + v b) t}) = mk_int #t (v a + v b) let incr (#t:inttype) (a:int_t t{v a < maxint t}) = mk_int #t (v a + 1) let mul_mod (#t:inttype) (a:int_t t) (b:int_t t) = mk_int #t (v a * v b @%. t) let mul_overflow (#t:inttype) (a:int_t t) (b:int_t t) = (mk_int #t (v a * v b @%. t), (v a * v b > maxint t || v a * v b < maxint t)) let mul (#t:inttype) (a:int_t t) (b:int_t t{range (v a * v b) t}) = mk_int #t (v a * v b) let sub_mod (#t:inttype) (a:int_t t) (b:int_t t) = mk_int #t ((v a - v b) @%. t) let sub (#t:inttype) (a:int_t t) (b:int_t t{range (v a - v b) t}) = mk_int #t (v a - v b) let decr (#t:inttype) (a:int_t t{minint t < v a}) = mk_int #t (v a - 1) let div (#t:inttype) (a:int_t t) (b:int_t t{v b <> 0 /\ (unsigned t \/ range (v a / v b) t)}) = assert (unsigned t \/ range (v a / v b) t); mk_int #t (v a / v b) let mod (#t:inttype) (a:int_t t) (b:int_t t{v b <> 0}) = mk_int #t (v a % v b) /// Comparison Operators /// let eq (#t:inttype) (a:int_t t) (b:int_t t) = v a = v b let ne (#t:inttype) (a:int_t t) (b:int_t t) = v b <> v b let lt (#t:inttype) (a:int_t t) (b:int_t t) = v a < v b let lte (#t:inttype) (a:int_t t) (b:int_t t) = v a <= v b let gt (#t:inttype) (a:int_t t) (b:int_t t) = v a > v b let gte (#t:inttype) (a:int_t t) (b:int_t t) = v a >= v b /// Bitwise Operations /// Todo: define bitvector-based normalizable definitions /// for all these operations let ones (#t:inttype) : n:int_t t = if unsigned t then mk_int #t (pow2 (bits t) - 1) else mk_int #t (-1) let zero (#t:inttype) : n:int_t t = mk_int #t 0 val lognot: #t:inttype -> int_t t -> int_t t val lognot_lemma: #t:inttype -> a:int_t t -> Lemma (lognot #t zero == ones /\ lognot #t ones == zero /\ lognot (lognot a) == a /\ (signed t ==> v (lognot a) = -1 - v a) /\ (unsigned t ==> v (lognot a) = pow2 (bits t) - 1 - v a) ) val logxor: #t:inttype -> int_t t -> int_t t -> int_t t val logxor_lemma: #t:inttype -> a:int_t t -> b:int_t t -> Lemma (a `logxor` a == zero /\ (a `logxor` b == zero ==> b == a) /\ a `logxor` (a `logxor` b) == b /\ a `logxor` (b `logxor` a) == b /\ zero `logxor` a == a /\ a `logxor` zero == a /\ ones `logxor` a == lognot a /\ a `logxor` ones == lognot a) val logand: #t:inttype -> int_t t -> int_t t -> int_t t val logand_lemma: #t:inttype -> a:int_t t -> b:int_t t -> Lemma (logand a zero == zero /\ logand zero a == zero /\ logand a ones == a /\ logand ones a == a /\ (a == b ==> logand a b == a) /\ (b == lognot a ==> logand a b == zero) /\ (v a >= 0 ==> (v (logand a b) >= 0) /\ (v (logand a b) <= v a)) /\ (v b >= 0 ==> (v (logand a b) >= 0) /\ (v (logand a b) <= v b))) val logand_mask_lemma: #t:inttype -> a:int_t t -> m:nat{m < bits t} -> Lemma (pow2 m < maxint t /\ logand a (sub #t (mk_int #t (pow2 m)) (mk_int #t 1)) == mk_int (v a % pow2 m)) [SMTPat (logand #t a (sub #t (mk_int #t (pow2 m)) (mk_int #t 1)))] val logor: #t:inttype -> int_t t -> int_t t -> int_t t val logor_disjoint: #t:inttype -> a:int_t t -> b:int_t t -> m:nat{m < bits t} -> Lemma (requires 0 <= v a /\ 0 <= v b /\ v a % pow2 m == 0 /\ v b < pow2 m) (ensures v (logor a b) == v a + v b) val logor_lemma: #t:inttype -> a:int_t t -> b:int_t t -> Lemma (logor a zero == a /\ logor a ones == ones /\ logor zero a == a /\ logor ones a == ones /\ ((v a >= 0 /\ v b >= 0) ==> (v (logor a b) >= v a /\ v (logor a b) >= v b))) unfold type shiftval (t:inttype) (t':inttype) = b:int_t t'{v b >= 0 /\ v b < bits t} unfold type rotval (t:inttype) (t':inttype) = b:int_t t'{v b > 0 /\ v b < bits t} #push-options "--z3version 4.13.3" [@@"opaque_to_smt"] let shift_right (#t:inttype) (#t':inttype) (a:int_t t) (b:shiftval t t') : int_t t = mk_int #t (v a / pow2 (v b)) #pop-options val shift_right_lemma (#t:inttype) (#t':inttype) (a:int_t t) (b:shiftval t t'): Lemma (v (shift_right #t #t' a b) == (v a / pow2 (v b))) [SMTPat (shift_right #t #t' a b)] val shift_left (#t:inttype) (#t':inttype) (a:int_t t) (b:shiftval t t') : int_t t val shift_left_positive_lemma (#t:inttype) (#t':inttype) (a:int_t t) (b:shiftval t t'): Lemma (requires (unsigned t \/ v a >= 0)) (ensures ((v (shift_left #t #t' a b) == (v a * pow2 (v b)) @%. t))) [SMTPat (shift_left #t #t' a b)] val rotate_right: #t:inttype{unsigned t} -> #t':inttype -> a:int_t t -> rotval t t' -> int_t t val rotate_left: #t:inttype{unsigned t} -> #t':inttype -> a:int_t t -> rotval t t' -> int_t t let shift_right_i (#t:inttype) (#t':inttype) (s:shiftval t t') (u:int_t t) : int_t t = shift_right u s let shift_left_i (#t:inttype) (#t':inttype) (s:shiftval t t') (u:int_t t{v u >= 0}) : int_t t = shift_left u s let rotate_right_i (#t:inttype{unsigned t}) (#t':inttype) (s:rotval t t') (u:int_t t) : int_t t = rotate_right u s let rotate_left_i (#t:inttype{unsigned t}) (#t':inttype) (s:rotval t t') (u:int_t t) : int_t t = rotate_left u s let abs_int (#t:inttype) (a:int_t t{minint t < v a}) = mk_int #t (abs (v a)) let neg (#t:inttype{signed t}) (a:int_t t{range (0 - v a) t}) = mk_int #t (0 - (v a)) val neg_equiv_lemma: #t:inttype{signed t /\ not (I128? t)} -> a:int_t t{range (0 - v a) t} -> Lemma (neg a == sub #t (mk_int 0) a /\ (lognot a = sub (neg a) (mk_int 1))) /// /// Operators available for all machine integers /// // Strict: with precondition unfold let (+!) #t = add #t // Wrapping: no precondition unfold let (+.) #t = add_mod #t unfold let ( *! ) #t = mul #t unfold let ( *. ) #t = mul_mod #t unfold let ( -! ) #t = sub #t unfold let ( -. ) #t = sub_mod #t unfold let ( >>! ) #t #t' = shift_right #t #t' unfold let ( <>>. ) #t #t' = rotate_right #t #t' unfold let ( <<<. ) #t #t' = rotate_left #t #t' unfold let ( ^. ) #t = logxor #t unfold let ( |. ) #t = logor #t unfold let ( &. ) #t = logand #t unfold let ( ~. ) #t = lognot #t unfold let (/!) #t = div #t unfold let (%!) #t = mod #t unfold let (=.) = (=) unfold let (<>.) = (<>) unfold let (<.) #t = lt #t unfold let (<=.) #t = lte #t unfold let (>.) #t = gt #t unfold let (>=.) #t = gte #t type bit = n: nat {n < 2} /// Mathematical `get_bit` definition on `nat`s let get_bit_nat (x: nat) (nth: nat): bit = (x / pow2 nth) % 2 /// `get_bit` definition for machine integer of any size and signedness [@"opaque_to_smt"] let get_bit (#n: inttype) (x: int_t n) (nth: usize {v nth < bits n}): bit = if v x >= 0 then get_bit_nat (v x) (v nth) else // two's complement get_bit_nat (pow2 (bits n) + v x) (v nth) unfold let bit_and (x y: bit): bit = match x, y with | (1, 1) -> 1 | _ -> 0 unfold let bit_or (x y: bit): bit = (x + y) % 2 /// Bit-wise semantics for `&.` val get_bit_and #t (x y: int_t t) (i: usize {v i < bits t}) : Lemma (get_bit (x &. y) i == get_bit x i `bit_and` get_bit y i) [SMTPat (get_bit (x &. y) i)] /// Bit-wise semantics for `|.` val get_bit_or #t (x y: int_t t) (i: usize {v i < bits t}) : Lemma (get_bit (x |. y) i == get_bit x i `bit_or` get_bit y i) [SMTPat (get_bit (x |. y) i)] /// Bit-wise semantics for `<= 0 /\ v y < bits t) (ensures get_bit (x <>!` val get_bit_shr #t #u (x: int_t t) (y: int_t u) (i: usize {v i < bits t}) : Lemma (requires v y >= 0 /\ v y < bits t) (ensures get_bit (x >>! y) i == (if v i < bits t - v y then get_bit x (mk_int (v i + v y)) else if signed t then get_bit x (mk_int (bits t - 1)) else 0)) [SMTPat (get_bit (x >>! y) i)] /// Bit-wise semantics of integer casts val get_bit_cast #t #u (x: int_t t) (nth: usize) : Lemma (requires v nth < bits u /\ v nth < bits t) (ensures get_bit (cast_mod #t #u x) nth == get_bit x nth) [SMTPat (get_bit (cast_mod #t #u x) nth)] val get_bit_cast_extend #t #u (x: int_t t) (nth: usize) : Lemma (requires bits t < bits u /\ v nth >= bits t /\ v nth < bits u) (ensures get_bit (cast_mod #t #u x) nth == 0) [SMTPat (get_bit (cast_mod #t #u x) nth)] ================================================ FILE: hax-lib/proof-libs/fstar/rust_primitives/Rust_primitives.Iterators.fsti ================================================ module Rust_primitives.Iterators open Rust_primitives open Core_models.Ops.Range open FStar.Mul val foldi_range (#n:inttype) (#acc_t:Type) (#inv:(acc_t -> i:int_t n -> Type)) (r: t_Range (int_t n){r.f_start <=. r.f_end}) (acc:acc_t{inv acc r.f_start}) (f: (acc:acc_t -> i:int_t n{i >=. r.f_start /\ i <. r.f_end /\ inv acc i} -> acc':acc_t{inv acc' (i +! mk_int 1)})) : res:acc_t{inv res r.f_end} val foldi_range_step_by (#n:inttype) (#acc_t:Type) (#inv:(acc_t -> i:int_t n -> Type)) (r: t_Range (int_t n){r.f_start <=. r.f_end}) (step: usize{v step > 0 /\ range (v step) n /\ range (v r.f_end + v step) n}) (acc:acc_t{inv acc r.f_start}) (f: (acc:acc_t -> i:int_t n{i >=. r.f_start /\ i <. r.f_end /\ (v i - v r.f_start) % (v step) == 0 /\ inv acc i} -> acc':acc_t{inv acc' (i +! mk_int #n (v step))})) : res:acc_t{inv res r.f_end} /// Predicate that asserts a slice `s_chunk` is exactly the nth chunk /// of the sequence `s` let nth_chunk_of #t (s: Seq.seq t) (s_chunk: Seq.seq t {Seq.length s_chunk > 0}) (chunk_nth: nat {chunk_nth < Seq.length s / Seq.length s_chunk}) = Seq.slice s (Seq.length s_chunk * chunk_nth) (Seq.length s_chunk * (chunk_nth + 1)) == s_chunk val foldi_chunks_exact (#t #acc_t:Type) (#inv: acc_t -> usize -> Type) (s: t_Slice t) (chunk_len: usize {v chunk_len > 0}) (acc: acc_t {inv acc (sz 0)}) (f: ( acc:acc_t -> it: (usize & t_Array t chunk_len) { let (i, s_chunk) = it in v i < Seq.length s / v chunk_len /\ nth_chunk_of s s_chunk (v i) /\ inv acc i } -> acc': acc_t {inv acc' (fst it +! sz 1)} ) ) : res:acc_t{inv res (length s /! chunk_len)} val fold_chunks_exact (#t:Type) (#acc_t:Type) (#inv:(acc_t -> Type)) (s:t_Slice t) (chunk_len:usize{v chunk_len > 0}) // /\ Seq.length s % v chunk_len == 0}) (acc:acc_t{inv acc}) (f: (acc:acc_t -> it:t_Array t chunk_len{inv acc} -> acc':acc_t{inv acc'})) : res:acc_t{inv res} val foldi_slice (#t:Type) (#acc_t:Type) (#inv:(acc_t -> usize -> Type)) (sl: t_Slice t) (acc:acc_t{inv acc (sz 0)}) (f: (acc:acc_t -> it:(usize & t){ let (i,item) = it in v i >= 0 /\ v i < Seq.length sl /\ Seq.index sl (v i) == item /\ inv acc i} -> acc':acc_t{inv acc' (fst it +! sz 1)})) : res:acc_t{inv res (length sl)} ================================================ FILE: hax-lib/proof-libs/fstar/rust_primitives/Rust_primitives.Mem.fsti ================================================ module Rust_primitives.Mem open FStar.Mul let copy (#t: Type0) (x: t) = x let replace (#t: Type0) (dest: t) (src: t) = (src, dest) ================================================ FILE: hax-lib/proof-libs/fstar/rust_primitives/Rust_primitives.Notations.fsti ================================================ module Rust_primitives.Notations open Rust_primitives class negation_tc self = { ( ~. ): self -> self; } instance negation_for_integers #t: negation_tc (int_t t) = { ( ~. ) = fun x -> lognot x } instance negation_for_bool: negation_tc bool = { ( ~. ) = not } open Core_models.Ops.Index let ( .[] ) #self #idx {| inst: t_Index self idx |} (s:self) (i:idx{f_index_pre s i}): inst.f_Output = f_index s i ================================================ FILE: hax-lib/proof-libs/fstar/rust_primitives/Rust_primitives.Sequence.fst ================================================ module Rust_primitives.Sequence open Rust_primitives.Integers type t_Seq t = Rust_primitives.Arrays.t_Slice t let seq_empty #t () : t_Seq t = FStar.Seq.empty let seq_from_slice #t (s: Rust_primitives.Arrays.t_Slice t) : t_Seq t = s let seq_from_array #t n (s: Rust_primitives.Arrays.t_Array t n) : t_Seq t = s let seq_to_slice #t (s: t_Seq t) : Rust_primitives.Arrays.t_Slice t = s let seq_len #t (s: t_Seq t): usize = mk_usize (Seq.length s) let seq_slice #t (s: t_Seq t) (b: usize) (e: usize{e >=. b && e <=. seq_len s}): t_Seq t = Seq.slice s (v b) (v e) let seq_index #t (s: t_Seq t) (i: usize{i <. seq_len s}): t = Rust_primitives.Slice.slice_index s i let seq_last #t (s: t_Seq t{seq_len s >. mk_usize 0}): t = Seq.index s ((Seq.length s) - 1) let seq_first #t (s: t_Seq t{seq_len s >. mk_usize 0}): t = Seq.index s 0 let seq_concat #t (s1: t_Seq t) (s2: t_Seq t {(Seq.length s1) + (Seq.length s2) <= max_usize}): t_Seq t = Seq.append s1 s2 let seq_one #t (x: t): t_Seq t = Seq.create 1 x let seq_create #t (x: t) (n: usize): t_Seq t = Seq.create (v n) x ================================================ FILE: hax-lib/proof-libs/fstar/rust_primitives/Rust_primitives.Slice.fsti ================================================ module Rust_primitives.Slice open FStar.Mul open Rust_primitives.Arrays open Rust_primitives.Integers let slice_length (#a: Type) (s: t_Slice a): res: usize {res == sz (Seq.length s)} = sz (Seq.length s) let slice_split_at (#v_T: Type0) (s: t_Slice v_T) (mid: usize {mid <=. length s}): t_Slice v_T & t_Slice v_T = Seq.slice s 0 (v mid), Seq.slice s (v mid) (Seq.length s) let slice_contains (#a: eqtype) (s: t_Slice a) (v: a): bool = Seq.mem v s let slice_index (#t: Type) (s: t_Slice t) (i: usize {i <. length s}): t = Seq.index s (v i) let slice_slice (#v_T: Type0) (s: t_Slice v_T) (start: usize {start <=. length s}) (end_: usize {start <=. end_ /\ end_ <=. length s}): t_Slice v_T = Seq.slice s (v start) (v end_) val array_map (#t: Type) (#u: Type) (l: usize) (#ft: Type) (s: t_Array t l) (f: t -> u): res: t_Array u l {forall i. Seq.index res i == f (Seq.index s i)} let array_as_slice (#t: Type) (l: usize) (s: t_Array t l): t_Slice t = s let array_slice (#t: Type) (l: usize) (s: t_Array t l) = slice_slice s val array_from_fn (#t: Type) (len: usize) (#ft: Type) (f: (x: usize {x <. len}) -> t): Pure (t_Array t len) (requires True) (ensures (fun a -> forall i. Seq.index a i == f (sz i))) let array_index (#t: Type) (l: usize) (s: t_Array t l) (i: usize {i <. length s}): t = Seq.index s (v i) ================================================ FILE: hax-lib/proof-libs/fstar/rust_primitives/Rust_primitives.String.fsti ================================================ module Rust_primitives.String open Rust_primitives.Integers val str_concat: string -> string -> string val str_of_char: FStar.Char.char -> string val str_sub: string -> usize -> usize -> string val str_index: string -> usize -> FStar.Char.char ================================================ FILE: hax-lib/proof-libs/fstar/rust_primitives/Rust_primitives.fst ================================================ module Rust_primitives include Rust_primitives.Integers include Rust_primitives.Arrays include Rust_primitives.BitVectors include Rust_primitives.Float include Rust_primitives.Char class cast_tc a b = { cast: a -> b; } /// Rust's casts operations on integers are non-panicking instance cast_tc_integers (t:inttype) (t':inttype) : cast_tc (int_t t) (int_t t') = { cast = (fun x -> Rust_primitives.Integers.cast_mod #t #t' x) } instance cast_tc_bool_integer (t:inttype) : cast_tc bool (int_t t) = { cast = (fun x -> if x then Rust_primitives.Integers.mk_int 1 else Rust_primitives.Integers.mk_int 0) } class unsize_tc source = { output: Type; unsize: source -> output; } instance array_to_slice_unsize t n: unsize_tc (t_Array t n) = { output = (x:t_Slice t{Seq.length x == v n}); unsize = (fun (arr: t_Array t n) -> arr <: t_Slice t); } ================================================ FILE: hax-lib/proof-libs/fstar-secret-integers/.envrc ================================================ use flake .#examples ================================================ FILE: hax-lib/proof-libs/fstar-secret-integers/Makefile.copy ================================================ # This is a generically useful Makefile for F* that is self-contained # # It is tempting to factor this out into multiple Makefiles but that # makes it less portable, so resist temptation, or move to a more # sophisticated build system. # # We expect FSTAR_HOME to be set to your FSTAR repo/install directory # We expect HACL_HOME to be set to your HACL* repo location # We expect HAX_LIBS_HOME to be set to the folder containing core, rust_primitives etc. # # ROOTS contains all the top-level F* files you wish to verify # The default target `verify` verified ROOTS and its dependencies # To lax-check instead, set `OTHERFLAGS="--lax"` on the command-line # # # To make F* emacs mode use the settings in this file, you need to # add the following lines to your .emacs # # (setq-default fstar-executable "/bin/fstar.exe") # (setq-default fstar-smt-executable "/bin/z3") # # (defun my-fstar-compute-prover-args-using-make () # "Construct arguments to pass to F* by calling make." # (with-demoted-errors "Error when constructing arg string: %S" # (let* ((fname (file-name-nondirectory buffer-file-name)) # (target (concat fname "-in")) # (argstr (car (process-lines "make" "--quiet" target)))) # (split-string argstr)))) # (setq fstar-subp-prover-args #'my-fstar-compute-prover-args-using-make) # HAX_LIBS_HOME ?= $(shell git rev-parse --show-toplevel)/proof-libs/fstar FSTAR_HOME ?= $(HAX_LIBS_HOME)/../../../FStar HACL_HOME ?= $(HAX_LIBS_HOME)/../../../hacl-star FSTAR_BIN ?= $(shell command -v fstar.exe 1>&2 2> /dev/null && echo "fstar.exe" || echo "$(FSTAR_HOME)/bin/fstar.exe") CACHE_DIR ?= $(HAX_LIBS_HOME)/.cache HINT_DIR ?= $(HAX_LIBS_HOME)/.hints .PHONY: all verify clean all: rm -f .depend && $(MAKE) .depend $(MAKE) verify # By default, we process all the files in the current directory. Here, we # *extend* the set of relevant files with the tests. ROOTS = $(wildcard *.fst) FSTAR_INCLUDE_DIRS = $(HACL_HOME)/lib $(HAX_LIBS_HOME)/rust_primitives $(HAX_LIBS_HOME)/core $(HAX_LIBS_HOME)/hax_lib FSTAR_FLAGS = --cmi \ --warn_error -331 \ --cache_checked_modules --cache_dir $(CACHE_DIR) \ --already_cached "+Prims+FStar+LowStar+C+Spec.Loops+TestLib" \ $(addprefix --include ,$(FSTAR_INCLUDE_DIRS)) FSTAR = $(FSTAR_BIN) $(FSTAR_FLAGS) $(OTHERFLAGS) .depend: $(HINT_DIR) $(CACHE_DIR) $(info $(ROOTS)) $(FSTAR) --cmi --dep full $(ROOTS) --extract '* -Prims -LowStar -FStar' > $@ include .depend $(HINT_DIR): mkdir -p $@ $(CACHE_DIR): mkdir -p $@ $(CACHE_DIR)/%.checked: | .depend $(HINT_DIR) $(CACHE_DIR) $(FSTAR) $< $(ENABLE_HINTS) --hint_file $(HINT_DIR)/$(notdir $*).hints verify: $(addsuffix .checked, $(addprefix $(CACHE_DIR)/,$(ROOTS))) # Targets for interactive mode %.fst-in: $(info $(FSTAR_FLAGS) \ $(ENABLE_HINTS) --hint_file $(HINT_DIR)/$(basename $@).fst.hints) %.fsti-in: $(info $(FSTAR_FLAGS) \ $(ENABLE_HINTS) --hint_file $(HINT_DIR)/$(basename $@).fsti.hints) # Clean targets SHELL=/usr/bin/env bash clean: rm -rf $(CACHE_DIR)/* ================================================ FILE: hax-lib/proof-libs/fstar-secret-integers/README.md ================================================ ## Libraries for Hax The goal of this directory is to serve as a snapshot of the current F* supporting libraries for Hax. The dependency chain is: `rust_primitives` <- `core` <- `hax_lib` # Rust Primitives The `/rust_primitives` directory contains hand-written models for Rust built-in features like machine integers and arrays. In particular, the code in this directory reconciles any type or semantic differences between Rust and F*. A number of files in this directory use the [HACL Library](https://github.com/hacl-star/hacl-star/tree/main/lib). # Core & Alloc The `/core` directory contains hand-written models for some parts of the Core and Alloc libraries of Rust. As a first goal, we would like to typecheck the code in this directory against interfaces generated from Rust Core and Alloc. As a second goal, we would like to generate the code in this directory from an annotated version of Rust Core and Alloc. # Hax Library The `/hax_lib` directory contains hand-written and generated code for the Hax library which adds new features and functionality to Rust to help programmers. For example, this library includes bounded indexes for arrays, unbounded integers etc. ================================================ FILE: hax-lib/proof-libs/fstar-secret-integers/core/Alloc.Alloc.fst ================================================ module Alloc.Alloc let t_Global = () ================================================ FILE: hax-lib/proof-libs/fstar-secret-integers/core/Alloc.Collections.Binary_heap.fsti ================================================ module Alloc.Collections.Binary_heap open Rust_primitives val t_BinaryHeap: Type -> eqtype val impl_9__new: #t:Type -> t_BinaryHeap t val impl_9__push: #t:Type -> t_BinaryHeap t -> t -> t_BinaryHeap t val impl_10__len: #t:Type -> t_BinaryHeap t -> usize val impl_10__iter: #t:Type -> t_BinaryHeap t -> t_Slice t open Core.Option val impl_10__peek: #t:Type -> t_BinaryHeap t -> t_Option t val impl_9__pop: #t:Type -> t_BinaryHeap t -> t_BinaryHeap t & t_Option t unfold let nonempty h = v (impl_10__len h) > 0 val lemma_peek_len: #t:Type -> h: t_BinaryHeap t -> Lemma (Option_Some? (impl_10__peek h) <==> nonempty h) val lemma_pop_len: #t:Type -> h: t_BinaryHeap t -> Lemma (Option_Some? (snd (impl_9__pop h)) <==> nonempty h) val lemma_peek_pop: #t:Type -> h: t_BinaryHeap t -> Lemma (impl_10__peek h == snd (impl_9__pop h)) [SMTPat (impl_10__peek h)] ================================================ FILE: hax-lib/proof-libs/fstar-secret-integers/core/Alloc.Slice.fst ================================================ module Alloc.Slice open Rust_primitives.Arrays open Alloc.Vec let impl__to_vec #a (s: t_Slice a): t_Vec a Alloc.Alloc.t_Global = s ================================================ FILE: hax-lib/proof-libs/fstar-secret-integers/core/Alloc.Vec.fst ================================================ module Alloc.Vec open Rust_primitives unfold type t_Vec t (_: unit) = s:t_Slice t let impl__new #t: t_Vec t () = FStar.Seq.empty let impl_2__extend_from_slice #t (self: t_Vec t ()) (other: t_Slice t{Seq.length self + Seq.length other <= max_usize}): t_Vec t () = FStar.Seq.append self other let impl__with_capacity (_capacity: usize) = impl__new // TODO: missing precondition For now, `impl_1__push` has a wrong // semantics: pushing on a "full" vector does nothing. It should panic // instead. let impl_1__push #t (v: t_Vec t ())// Removed: {Seq.length v + 1 <= max_usize}) (x: t) : t_Vec t () = if Seq.length v <= max_usize then v else FStar.Seq.append v (FStar.Seq.create 1 x) let impl_1__len #t (v: t_Vec t ()) = let n = Seq.length v in assert (n <= maxint usize_inttype); mk_int #usize_inttype (Seq.length v) let from_elem #a (item: a) (len: usize) = Seq.create (v len) item open Rust_primitives.Hax open Core.Ops.Index instance update_at_tc_array t n: update_at_tc (t_Vec t ()) (int_t n) = { super_index = FStar.Tactics.Typeclasses.solve <: t_Index (t_Vec t ()) (int_t n); update_at = (fun arr i x -> FStar.Seq.upd arr (v i) x); } ================================================ FILE: hax-lib/proof-libs/fstar-secret-integers/core/Core.Array.Iter.fsti ================================================ module Core.Array.Iter open Rust_primitives let into_iter = Core.Iter.iterator_array let t_IntoIter t l = t_Array t l ================================================ FILE: hax-lib/proof-libs/fstar-secret-integers/core/Core.Array.fst ================================================ module Core.Array open Rust_primitives type t_TryFromSliceError = | TryFromSliceError let impl_23__map #a #b n (arr: t_Array a n) (f: a -> b): t_Array b n = map_array arr f let impl_23__as_slice #a len (arr: t_Array a len): t_Slice a = arr ================================================ FILE: hax-lib/proof-libs/fstar-secret-integers/core/Core.Clone.fst ================================================ module Core.Clone class t_Clone self = { f_clone: x:self -> r:self {x == r} } (** Everything is clonable *) instance clone_all (t: Type): t_Clone t = { f_clone = (fun x -> x); } ================================================ FILE: hax-lib/proof-libs/fstar-secret-integers/core/Core.Cmp.fsti ================================================ module Core.Cmp open Rust_primitives let min (#t:inttype) (a:int_t t) (b:int_t t) = if a <. b then a else b type t_Ordering = | Ordering_Less : t_Ordering | Ordering_Equal : t_Ordering | Ordering_Greater : t_Ordering class t_Ord (v_Self: Type) = { f_cmp:v_Self -> v_Self -> t_Ordering; // f_max:v_Self -> v_Self -> v_Self; // f_min:v_Self -> v_Self -> v_Self; // f_clamp:v_Self -> v_Self -> v_Self -> v_Self } class t_PartialEq (v_Self: Type) (v_Rhs: Type) = { // __constraint_1069563329_t_PartialEq:t_PartialEq v_Self v_Rhs; f_eq:v_Self -> v_Rhs -> bool; f_ne:v_Self -> v_Rhs -> bool } instance all_eq (a: eqtype): t_PartialEq a a = { f_eq = (fun x y -> x = y); f_ne = (fun x y -> x <> y); } class t_PartialOrd (v_Self: Type) (v_Rhs: Type) = { __constraint_Rhs_t_PartialEq:t_PartialEq v_Self v_Rhs; // __constraint_Rhs_t_PartialOrd:t_PartialOrd v_Self v_Rhs; f_partial_cmp:v_Self -> v_Rhs -> Core.Option.t_Option t_Ordering; // f_lt:v_Self -> v_Rhs -> bool; // f_le:v_Self -> v_Rhs -> bool; // f_gt:v_Self -> v_Rhs -> bool; // f_ge:v_Self -> v_Rhs -> bool } type t_Reverse t = | Reverse of t let impl__then x y = x [@FStar.Tactics.Typeclasses.tcinstance] val ord_u64: t_Ord u64 [@FStar.Tactics.Typeclasses.tcinstance] val ord_reverse t {| t_Ord t |}: t_Ord (t_Reverse t) ================================================ FILE: hax-lib/proof-libs/fstar-secret-integers/core/Core.Convert.fst ================================================ module Core.Convert open Rust_primitives class try_into_tc self t = { [@@@FStar.Tactics.Typeclasses.no_method] f_Error: Type; f_try_into: self -> Core.Result.t_Result t f_Error } instance impl_6 (t: Type0) (len: usize): try_into_tc (t_Slice t) (t_Array t len) = { f_Error = Core.Array.t_TryFromSliceError; f_try_into = (fun (s: t_Slice t) -> if Core.Slice.impl__len s = len then Core.Result.Result_Ok (s <: t_Array t len) else Core.Result.Result_Err Core.Array.TryFromSliceError ) } instance impl_6_refined (t: Type0) (len: usize): try_into_tc (s: t_Slice t {Core.Slice.impl__len s == len}) (t_Array t len) = { f_Error = Core.Array.t_TryFromSliceError; f_try_into = (fun (s: t_Slice t {Core.Slice.impl__len s == len}) -> Core.Result.Result_Ok (s <: t_Array t len) ) } class t_Into self t = { f_into: self -> t; } class t_From self t = { f_from: t -> self; } class t_TryFrom self t = { [@@@FStar.Tactics.Typeclasses.no_method] f_Error: Type; f_try_from: t -> Core.Result.t_Result self f_Error; } instance integer_into (t:inttype) (t':inttype { minint t >= minint t' /\ maxint t <= maxint t' }) : t_From (int_t t') (int_t t) = { f_from = (fun (x: int_t t) -> Rust_primitives.Integers.cast #t #t' x) } instance into_from_from a b {| t_From a b |}: t_Into b a = { f_into = (fun x -> f_from x) } instance from_id a: t_From a a = { f_from = (fun x -> x) } class t_AsRef self t = { f_as_ref: self -> t; } ================================================ FILE: hax-lib/proof-libs/fstar-secret-integers/core/Core.Iter.Adapters.Enumerate.fst ================================================ module Core.Iter.Adapters.Enumerate open Rust_primitives type t_Enumerate t = { iter: t; count: usize } ================================================ FILE: hax-lib/proof-libs/fstar-secret-integers/core/Core.Iter.Adapters.Step_by.fst ================================================ module Core.Iter.Adapters.Step_by open Rust_primitives type t_StepBy t = { f_iter: t; f_step: n: usize {v n > 0}; f_first_take: bool; } ================================================ FILE: hax-lib/proof-libs/fstar-secret-integers/core/Core.Iter.Traits.Collect.fst ================================================ module Core.Iter.Traits.Collect class into_iterator self = { f_IntoIter: Type; // f_Item: Type; f_into_iter: self -> f_IntoIter; } let t_IntoIterator = into_iterator unfold instance impl t {| Core.Iter.Traits.Iterator.iterator t |}: into_iterator t = { f_IntoIter = t; f_into_iter = id; } ================================================ FILE: hax-lib/proof-libs/fstar-secret-integers/core/Core.Iter.Traits.Iterator.fst ================================================ module Core.Iter.Traits.Iterator open Rust_primitives (*** Definition of the `iterator` trait *) (** We define the types of the different method of the iterator trait on their own. This is handy for revealing only certain fields of the instances of the `iterator` trait. *) unfold type t_next self item = self -> self * option item unfold type t_contains self item = self -> item -> Type0 unfold type t_fold self (item: Type0) (contains: t_contains self item) = #b:Type -> s:self -> b -> (b -> i:item{contains s i} -> b) -> b unfold type t_enumerate self = self -> Core.Iter.Adapters.Enumerate.t_Enumerate self unfold type t_step_by self = self -> usize -> Core.Iter.Adapters.Step_by.t_StepBy self unfold type t_all self item = self -> (item -> bool) -> self * bool (* Inference behaves strangly with type synonyms... :( *) // class iterator (self: Type) = { // f_Item: Type; // f_next: t_next self f_Item; // f_contains: t_contains self f_Item; (* hax-specific method *) // f_fold: t_fold self f_Item f_contains; // f_enumerate: t_enumerate self; // } class iterator (self: Type u#0): Type u#1 = { [@@@FStar.Tactics.Typeclasses.no_method] f_Item: Type0; f_next: self -> self * option f_Item; f_contains: self -> f_Item -> Type0; f_fold: #b:Type0 -> s:self -> b -> (b -> i:f_Item{f_contains s i} -> b) -> b; f_enumerate: self -> Core.Iter.Adapters.Enumerate.t_Enumerate self; f_step_by: self -> usize -> Core.Iter.Adapters.Step_by.t_StepBy self; f_all: self -> (f_Item -> bool) -> self * bool; } ================================================ FILE: hax-lib/proof-libs/fstar-secret-integers/core/Core.Iter.fsti ================================================ module Core.Iter open Rust_primitives open Core.Iter.Traits.Iterator (*** Instances for the `iterator` trait *) (**** Enumerate *) (** This lives in this file for cyclic dependencies reasons. *) val iterator_enumerate_contains it (i: iterator it) : t_contains (Core.Iter.Adapters.Enumerate.t_Enumerate it) (usize * i.f_Item) val iterator_enumerate_fold it (i: iterator it) : t_fold (Core.Iter.Adapters.Enumerate.t_Enumerate it) (usize * i.f_Item) (iterator_enumerate_contains it i) val iterator_enumerate_enumerate it : t_enumerate (Core.Iter.Adapters.Enumerate.t_Enumerate it) val iterator_enumerate_all it (i: iterator it) : t_all (Core.Iter.Adapters.Enumerate.t_Enumerate it) (usize * i.f_Item) val iterator_enumerate_step_by it : t_step_by (Core.Iter.Adapters.Enumerate.t_Enumerate it) instance iterator_enumerate it {| i: iterator it |}: iterator (Core.Iter.Adapters.Enumerate.t_Enumerate it) = let open Core.Iter.Adapters.Enumerate in { f_Item = (usize * i.f_Item); f_next = (fun {iter; count} -> let open Core.Ops in let iter, opt = f_next iter in match opt with | Some value -> if v count = max_usize then {iter; count }, None else {iter; count = count +. sz 1}, Some (count, value) | None -> {iter; count}, None ); f_contains = iterator_enumerate_contains it i; f_fold = iterator_enumerate_fold it i; f_enumerate = iterator_enumerate_enumerate it; f_step_by = iterator_enumerate_step_by it; f_all = iterator_enumerate_all it i; } (**** Step_by *) (** This lives in this file for cyclic dependencies reasons. *) val iterator_step_by_contains it (i: iterator it) : t_contains (Core.Iter.Adapters.Step_by.t_StepBy it) i.f_Item val iterator_step_by_fold it (i: iterator it) : t_fold (Core.Iter.Adapters.Step_by.t_StepBy it) i.f_Item (iterator_step_by_contains it i) val iterator_step_by_next it (i: iterator it) : t_next (Core.Iter.Adapters.Step_by.t_StepBy it) i.f_Item val iterator_step_by_enumerate it : t_enumerate (Core.Iter.Adapters.Step_by.t_StepBy it) val iterator_step_by_all it (i: iterator it) : t_all (Core.Iter.Adapters.Step_by.t_StepBy it) i.f_Item val iterator_step_by_step_by it : t_step_by (Core.Iter.Adapters.Step_by.t_StepBy it) unfold instance iterator_step_by it {| i: iterator it |}: iterator (Core.Iter.Adapters.Step_by.t_StepBy it) = let open Core.Iter.Adapters.Enumerate in { f_Item = i.f_Item; f_next = iterator_step_by_next it i; f_contains = iterator_step_by_contains it i; f_fold = iterator_step_by_fold it i; f_enumerate = iterator_step_by_enumerate it ; f_step_by = iterator_step_by_step_by it ; f_all = iterator_step_by_all it i; } (**** Slice *) (** Slices are not iterable as such in Rust. We ignore this indirection here. *) open Core.Ops.Range val iterator_slice_next t: t_next (t_Slice t) t unfold let iterator_slice_contains (t: eqtype): t_contains (t_Slice t) t = fun s x -> Rust_primitives.Arrays.contains s x val iterator_slice_fold (t: eqtype): t_fold (t_Slice t) t (iterator_slice_contains t) val iterator_slice_enumerate (t: eqtype): t_enumerate (t_Slice t) val iterator_slice_step_by (t: eqtype): t_step_by (t_Slice t) val iterator_slice_all (t: eqtype): t_all (t_Slice t) t instance iterator_slice (t: eqtype): iterator (t_Slice t) = { f_Item = t; f_next = iterator_slice_next t; // size_hint = (fun s -> Some (Rust_primitives.Arrays.length s)); f_contains = iterator_slice_contains t; f_fold = iterator_slice_fold t; f_enumerate = iterator_slice_enumerate t; f_step_by = iterator_slice_step_by t; f_all = iterator_slice_all t; } (**** Array *) (** Arrays are not iterable as such in Rust. We ignore this indirection here. *) val iterator_array_next t len: t_next (t_Array t len) t unfold let iterator_array_contains (t: eqtype) len: t_contains (t_Array t len) t = fun s x -> Rust_primitives.Arrays.contains s x val iterator_array_fold (t: eqtype) len: t_fold (t_Array t len) t (iterator_array_contains t len) val iterator_array_enumerate (t: eqtype) len: t_enumerate (t_Array t len) val iterator_array_step_by (t: eqtype) len: t_step_by (t_Array t len) val iterator_array_all (t: eqtype) len: t_all (t_Array t len) t instance iterator_array (t: eqtype) len: iterator (t_Array t len) = { f_Item = t; f_next = iterator_array_next t len; // size_hint = (fun (_s: t_Array t len) -> Some len); f_contains = iterator_array_contains t len; f_fold = iterator_array_fold t len; f_enumerate = iterator_array_enumerate t len; f_step_by = iterator_array_step_by t len; f_all = iterator_array_all t len; } ================================================ FILE: hax-lib/proof-libs/fstar-secret-integers/core/Core.Marker.fst ================================================ module Core.Marker class t_Sized (h: Type) = { dummy_field: unit } (** we consider everything to be sized *) instance t_Sized_all t: t_Sized t = { dummy_field = () } ================================================ FILE: hax-lib/proof-libs/fstar-secret-integers/core/Core.Num.Error.fsti ================================================ module Core.Num.Error open Rust_primitives type t_ParseIntError = unit ================================================ FILE: hax-lib/proof-libs/fstar-secret-integers/core/Core.Num.fsti ================================================ module Core.Num open Rust_primitives let impl__u8__wrapping_sub: u8 -> u8 -> u8 = sub_mod let impl__u16__wrapping_add: u16 -> u16 -> u16 = add_mod let impl__i32__wrapping_add: i32 -> i32 -> i32 = add_mod let impl__i32__abs (a:i32{minint i32_inttype < v a}) : i32 = abs_int a let impl__u32__wrapping_add: u32 -> u32 -> u32 = add_mod val impl__u32__rotate_left: u32 -> u32 -> u32 val impl__u32__from_le_bytes: t_Array u8 (sz 4) -> u32 val impl__u32__from_be_bytes: t_Array u8 (sz 4) -> u32 val impl__u32__to_le_bytes: u32 -> t_Array u8 (sz 4) val impl__u32__to_be_bytes: u32 -> t_Array u8 (sz 4) val impl__u32__rotate_right: u32 -> u32 -> u32 let impl__u32__BITS: u32 = classify (32ul <: pub_int_t u32_inttype) let impl__u64__wrapping_add: u64 -> u64 -> u64 = add_mod val impl__u64__rotate_left: u32 -> u32 -> u32 val impl__u64__from_le_bytes: t_Array u8 (sz 8) -> u64 val impl__u64__from_be_bytes: t_Array u8 (sz 8) -> u64 val impl__u64__to_le_bytes: u64 -> t_Array u8 (sz 8) val impl__u64__to_be_bytes: u64 -> t_Array u8 (sz 8) val impl__u64__rotate_right: u64 -> u64 -> u64 let impl__u128__wrapping_add (x: u128) (y: u128): u128 = add_mod x y //FStar.UInt128.add_underspec x y val impl__u128__rotate_left: u128 -> u128 -> u128 val impl__u128__from_le_bytes: t_Array u8 (sz 16) -> u128 val impl__u128__from_be_bytes: t_Array u8 (sz 16) -> u128 val impl__u128__to_le_bytes: u128 -> t_Array u8 (sz 16) val impl__u128__to_be_bytes: u128 -> t_Array u8 (sz 16) val impl__u128__rotate_right: u128 -> u128 -> u128 val impl__u8__pow: u8 -> u32 -> u8 val impl__u16__pow (base: u16) (exponent: u32): result : u16 {v base == 2 /\ v exponent < 16 ==> result == mk_int #Lib.IntTypes.U16 (pow2 (v exponent))} val impl__u32__pow (base: u32) (exponent: u32): result : u32 {v base == 2 /\ v exponent <= 16 ==> result == mk_int #Lib.IntTypes.U32 (pow2 (v exponent))} val impl__u64__pow: u64 -> u32 -> u64 val impl__u128__pow: u128 -> u32 -> u128 val impl__i32__pow (base: i32) (exponent: u32): result: i32 {v base == 2 /\ v exponent <= 16 ==> result == mk_int #Lib.IntTypes.S32 (pow2 (v exponent))} val impl__u8__from_str_radix: string -> u32 -> Core.Result.t_Result u8 Core.Num.Error.t_ParseIntError val impl__usize__ilog2: i32 -> u32 ================================================ FILE: hax-lib/proof-libs/fstar-secret-integers/core/Core.Ops.Arith.Neg.fsti ================================================ module Core.Ops.Arith.Neg open Rust_primitives let neg #t #l (x:int_t_l t l) = zero #t #l -! x ================================================ FILE: hax-lib/proof-libs/fstar-secret-integers/core/Core.Ops.Arith.fsti ================================================ module Core.Ops.Arith open Rust_primitives class t_Add self rhs = { add_output: Type; add_in_bounds: self -> rhs -> Type0; ( +! ): x:self -> y:rhs {add_in_bounds x y} -> add_output; } class t_Sub self rhs = { sub_output: Type; sub_in_bounds: self -> rhs -> Type0; ( -! ): x:self -> y:rhs {sub_in_bounds x y} -> sub_output; } ================================================ FILE: hax-lib/proof-libs/fstar-secret-integers/core/Core.Ops.Control_flow.fst ================================================ module Core.Ops.Control_flow type t_ControlFlow (b c: Type) = | ControlFlow_Continue of c | ControlFlow_Break of b ================================================ FILE: hax-lib/proof-libs/fstar-secret-integers/core/Core.Ops.Deref.fst ================================================ module Core.Ops.Deref let f_deref = id ================================================ FILE: hax-lib/proof-libs/fstar-secret-integers/core/Core.Ops.Index.IndexMut.fst ================================================ module Core.Ops.Index.IndexMut class t_IndexMut t_Self t_Idx = { f_Input: Type; in_range: t_Self -> t_Idx -> Type0; f_index_mut: s:t_Self -> i:t_Idx{in_range s i} -> v:f_Input -> t_Self; } open Rust_primitives instance impl__index_mut t l n: t_IndexMut (t_Array t l) (int_t n) = { f_Input = t; in_range = (fun (s: t_Array t l) (i: int_t n) -> v i >= 0 && v i < v l); f_index_mut = (fun s i x -> Seq.upd s (v i) x); } ================================================ FILE: hax-lib/proof-libs/fstar-secret-integers/core/Core.Ops.Index.fst ================================================ module Core.Ops.Index class t_Index (t_Self:Type0) (t_Idx:Type0) = { f_Output: Type0; in_range: t_Self -> t_Idx -> Type0; f_index: s:t_Self -> i:t_Idx{in_range s i} -> f_Output; } ================================================ FILE: hax-lib/proof-libs/fstar-secret-integers/core/Core.Ops.Range.fsti ================================================ module Core.Ops.Range open Rust_primitives type t_RangeTo (t: Type) = {f_end: t} type t_RangeFrom (t: Type) = {f_start: t} type t_Range (t: Type) = {f_start: t; f_end: t} type t_RangeFull = | RangeFull open Core.Iter.Traits.Iterator module LI = Lib.IntTypes let rec fold_range' #t (min: Rust_primitives.pub_int_t t) (max: Rust_primitives.pub_int_t t{v min <= v max}) (init: 'a) (f: ('a -> i:Rust_primitives.pub_int_t t{v i < v max /\ v i >= v min} -> 'a)) : Tot 'a (decreases (v max - v min)) = if min =. max then init else fold_range' (add min (Rust_primitives.mk_int_l #_ #LI.PUB 1)) max (f init min) f val iterator_range_enumerate t: t_enumerate (t_Range (Rust_primitives.pub_int_t t)) val iterator_range_step_by t: t_step_by (t_Range (Rust_primitives.pub_int_t t)) val iterator_range_all t: t_all (t_Range (Rust_primitives.pub_int_t t)) (Rust_primitives.pub_int_t t) instance iterator_range t: iterator (t_Range (Rust_primitives.pub_int_t t)) = { f_Item = Rust_primitives.pub_int_t t; f_next = (fun {f_start; f_end} -> if f_start >=. f_end then ({f_start; f_end}, None) else ({f_start = f_start +. Rust_primitives.mk_pub_int 0; f_end}, Some f_start) ); f_contains = (fun x i -> v i < v x.f_end /\ v i >= v x.f_start); f_fold = (fun #b r init f -> if r.f_start >=. r.f_end then init else fold_range' r.f_start r.f_end init (fun x i -> f x i)); f_enumerate = iterator_range_enumerate t; f_step_by = iterator_range_step_by t; f_all = iterator_range_all t; } open Core.Ops.Index instance impl_index_range_slice t n : t_Index (t_Slice t) (t_Range (pub_int_t n)) = { f_Output = t_Slice t ; in_range = (fun (s: t_Slice t) {f_start; f_end} -> let len = Rust_primitives.length s in v f_start >= 0 /\ v f_start <= v len /\ v f_end <= v len) ; f_index = (fun s {f_start; f_end} -> if f_start <. f_end then Seq.slice s (v f_start) (v f_end) else Seq.empty)} instance impl_index_range_to_slice t n : t_Index (t_Slice t) (t_RangeTo (pub_int_t n)) = { f_Output = t_Slice t ; in_range = (fun (s: t_Slice t) ({f_end}: t_RangeTo (pub_int_t n)) -> let len = Rust_primitives.length s in v f_end <= v len) ; f_index = (fun s {f_end} -> if 0 < v f_end then Seq.slice s 0 (v f_end) else Seq.empty)} instance impl_index_range_from_slice t n : t_Index (t_Slice t) (t_RangeFrom (pub_int_t n)) = { f_Output = t_Slice t ; in_range = (fun (s: t_Slice t) ({f_start}: t_RangeFrom (pub_int_t n)) -> let len = Rust_primitives.length s in v f_start >= 0 /\ v f_start <= v len) ; f_index = (fun s {f_start} -> let len = Rust_primitives.length s in if v f_start = v len then Seq.empty else Seq.slice s (v f_start) (v len))} instance impl_index_range_full_slice t : t_Index (t_Slice t) t_RangeFull = { f_Output = t_Slice t ; in_range = (fun (s: t_Slice t) _ -> True) ; f_index = (fun s _ -> s)} instance impl_range_index_array t len n : t_Index (t_Array t len) (t_Range (pub_int_t n)) = let i = impl_index_range_slice t n in { f_Output = i.f_Output ; in_range = (fun (s: t_Array t len) r -> i.in_range s r) ; f_index = (fun s r -> i.f_index s r) } instance impl_range_to_index_array t len n : t_Index (t_Array t len) (t_RangeTo (pub_int_t n)) = let i = impl_index_range_to_slice t n in { f_Output = i.f_Output ; in_range = (fun (s: t_Array t len) r -> i.in_range s r) ; f_index = (fun s r -> i.f_index s r) } instance impl_range_from_index_array t len n : t_Index (t_Array t len) (t_RangeFrom (pub_int_t n)) = let i = impl_index_range_from_slice t n in { f_Output = i.f_Output ; in_range = (fun (s: t_Array t len) r -> i.in_range s r) ; f_index = (fun s r -> i.f_index s r) } instance impl_range_full_index_array t len : t_Index (t_Array t len) t_RangeFull = let i = impl_index_range_full_slice t in { f_Output = i.f_Output ; in_range = (fun (s: t_Array t len) r -> i.in_range s r) ; f_index = (fun s r -> i.f_index s r) } open Rust_primitives.Hax let update_at_tc_array_range_super t l n: t_Index (t_Array t l) (t_Range (pub_int_t n)) = FStar.Tactics.Typeclasses.solve let update_at_tc_array_range_to_super t l n: t_Index (t_Array t l) (t_RangeTo (pub_int_t n)) = FStar.Tactics.Typeclasses.solve let update_at_tc_array_range_from_super t l n: t_Index (t_Array t l) (t_RangeFrom (pub_int_t n)) = FStar.Tactics.Typeclasses.solve let update_at_tc_array_range_full_super t l: t_Index (t_Array t l) t_RangeFull = FStar.Tactics.Typeclasses.solve val update_at_array_range t l n (s: t_Array t l) (i: t_Range (pub_int_t n) {(update_at_tc_array_range_super t l n).in_range s i}) : (update_at_tc_array_range_super t l n).f_Output -> t_Array t l val update_at_array_range_to t l n (s: t_Array t l) (i: t_RangeTo (pub_int_t n) {(update_at_tc_array_range_to_super t l n).in_range s i}) : (update_at_tc_array_range_to_super t l n).f_Output -> t_Array t l val update_at_array_range_from t l n (s: t_Array t l) (i: t_RangeFrom (pub_int_t n) {(update_at_tc_array_range_from_super t l n).in_range s i}) : (update_at_tc_array_range_from_super t l n).f_Output -> t_Array t l val update_at_array_range_full t l (s: t_Array t l) (i: t_RangeFull) : (update_at_tc_array_range_full_super t l).f_Output -> t_Array t l instance update_at_tc_array_range t l n: update_at_tc (t_Array t l) (t_Range (pub_int_t n)) = { super_index = update_at_tc_array_range_super t l n; update_at = update_at_array_range t l n } instance update_at_tc_array_range_to t l n: update_at_tc (t_Array t l) (t_RangeTo (pub_int_t n)) = { super_index = update_at_tc_array_range_to_super t l n; update_at = update_at_array_range_to t l n } instance update_at_tc_array_range_from t l n: update_at_tc (t_Array t l) (t_RangeFrom (pub_int_t n)) = { super_index = update_at_tc_array_range_from_super t l n; update_at = update_at_array_range_from t l n } instance update_at_tc_array_range_full t l: update_at_tc (t_Array t l) t_RangeFull = { super_index = update_at_tc_array_range_full_super t l; update_at = update_at_array_range_full t l } ================================================ FILE: hax-lib/proof-libs/fstar-secret-integers/core/Core.Ops.Try_trait.fst ================================================ module Core.Ops.Try_trait class t_FromResidual self r = { f_from_residual: r -> self; } class t_Try self = { f_Output: Type; f_Residual: Type; [@@@FStar.Tactics.Typeclasses.tcresolve] parent_FromResidual: t_FromResidual f_Residual f_Residual; f_from_output: f_Output -> self; f_branch: self -> Core.Ops.Control_flow.t_ControlFlow f_Residual f_Output; } ================================================ FILE: hax-lib/proof-libs/fstar-secret-integers/core/Core.Ops.fst ================================================ module Core.Ops open Rust_primitives // class add_tc self rhs = { // output: Type; // in_bounds: self -> rhs -> Type0; // ( +! ): x:self -> y:rhs {in_bounds x y} -> output; // } class negation_tc self = { ( ~. ): self -> self; } instance negation_for_integers #t: negation_tc (int_t t) = { ( ~. ) = fun x -> lognot x } instance negation_for_bool: negation_tc bool = { ( ~. ) = not } open Core.Ops.Index let ( .[] ) (#self:Type0) (#idx:Type0) {| inst: t_Index self idx |} : s:self -> i:idx{in_range s i} -> inst.f_Output = f_index ================================================ FILE: hax-lib/proof-libs/fstar-secret-integers/core/Core.Option.fst ================================================ module Core.Option type t_Option t = | Option_Some of t | Option_None let impl__and_then #t #t_Self (self: t_Option t_Self) (f: t_Self -> t_Option t): t_Option t = match self with | Option_Some x -> f x | Option_None -> Option_None let impl__unwrap #t (x: t_Option t {Option_Some? x}): t = Option_Some?._0 x let impl__is_some #t_Self (self: t_Option t_Self): bool = Option_Some? self ================================================ FILE: hax-lib/proof-libs/fstar-secret-integers/core/Core.Panicking.fst ================================================ module Core.Panicking open Rust_primitives open Rust_primitives.Hax type t_AssertKind = | AssertKind_Eq let panic (message: string {False}): t_Never = match () with let assert_failed (k: t_AssertKind) x y (z: Core.Option.t_Option unit {False}): t_Never = match () with ================================================ FILE: hax-lib/proof-libs/fstar-secret-integers/core/Core.Result.fst ================================================ module Core.Result type t_Result t e = | Result_Ok: v:t -> t_Result t e | Result_Err of e let impl__unwrap (x: t_Result 't 'e {Result_Ok? x}): 't = Result_Ok?.v x ================================================ FILE: hax-lib/proof-libs/fstar-secret-integers/core/Core.Slice.Iter.fst ================================================ module Core.Slice.Iter open Rust_primitives unfold let t_Chunks a = t_Slice (t_Slice a) unfold let t_ChunksExact a = t_Slice (t_Slice a) unfold let t_Iter a = t_Slice a ================================================ FILE: hax-lib/proof-libs/fstar-secret-integers/core/Core.Slice.fsti ================================================ module Core.Slice open Rust_primitives.Arrays open Rust_primitives.Integers let impl__len (#t: Type) (s: t_Slice t) : len: usize {len == sz (Seq.length s)} = sz (Seq.length s) open Core.Slice.Iter val impl__chunks #a (x: t_Slice a) (cs: usize): t_Chunks a let impl__iter #t (s: t_Slice t): t_Slice t = s val impl__chunks_exact #a (x: t_Slice a) (cs: usize): Pure (t_Slice (t_Slice a)) (requires True) (ensures (fun r -> forall i. i < v (length x) ==> length x == cs)) open Core.Ops.Index instance impl__index t n: t_Index (t_Slice t) (int_t n) = { f_Output = t; in_range = (fun (s: t_Slice t) (i: int_t n) -> v i >= 0 && v i < v (length s)); f_index = (fun s i -> Seq.index s (v i)); } let impl__copy_from_slice #t (x: t_Slice t) (y:t_Slice t) : t_Slice t = y val impl__split_at #t (s: t_Slice t) (mid: usize): Pure (t_Slice t * t_Slice t) (requires (v mid <= Seq.length s)) (ensures (fun (x,y) -> Seq.length x == v mid /\ Seq.length y == Seq.length s - v mid /\ x == Seq.slice s 0 (v mid) /\ y == Seq.slice s (v mid) (Seq.length s) /\ s == Seq.append x y)) ================================================ FILE: hax-lib/proof-libs/fstar-secret-integers/core/Core.Str.Converts.fsti ================================================ module Core.Str.Converts open Rust_primitives val from_utf8 (s: t_Slice u8): Core.Result.t_Result string Core.Str.Error.t_Utf8Error ================================================ FILE: hax-lib/proof-libs/fstar-secret-integers/core/Core.Str.Error.fsti ================================================ module Core.Str.Error type t_Utf8Error = unit ================================================ FILE: hax-lib/proof-libs/fstar-secret-integers/core/Core.Str.fsti ================================================ module Core.Str open Rust_primitives val impl__str__len: string -> usize val impl__str__as_bytes: string -> t_Slice u8 /// Parses this string slice into another type val impl_str__parse (#t: Type0) (#err: Type0) (s:string) : (Core.Result.t_Result t err) /// Trims trailing whitespace val impl_str__trim : string -> string /// Split strings on patterns val impl_str__split : (#pattern: Type) -> string -> pattern -> (Core.Str.Iter.t_Split pattern) ================================================ FILE: hax-lib/proof-libs/fstar-secret-integers/core/Core.fst ================================================ module Core include Rust_primitives include Core.Num include Core.Iter include Core.Ops ================================================ FILE: hax-lib/proof-libs/fstar-secret-integers/core/Makefile ================================================ # This is a generically useful Makefile for F* that is self-contained # # It is tempting to factor this out into multiple Makefiles but that # makes it less portable, so resist temptation, or move to a more # sophisticated build system. # # We expect FSTAR_HOME to be set to your FSTAR repo/install directory # We expect HACL_HOME to be set to your HACL* repo location # We expect HAX_LIBS_HOME to be set to the folder containing core, rust_primitives etc. # # ROOTS contains all the top-level F* files you wish to verify # The default target `verify` verified ROOTS and its dependencies # To lax-check instead, set `OTHERFLAGS="--lax"` on the command-line # # # To make F* emacs mode use the settings in this file, you need to # add the following lines to your .emacs # # (setq-default fstar-executable "/bin/fstar.exe") # (setq-default fstar-smt-executable "/bin/z3") # # (defun my-fstar-compute-prover-args-using-make () # "Construct arguments to pass to F* by calling make." # (with-demoted-errors "Error when constructing arg string: %S" # (let* ((fname (file-name-nondirectory buffer-file-name)) # (target (concat fname "-in")) # (argstr (car (process-lines "make" "--quiet" target)))) # (split-string argstr)))) # (setq fstar-subp-prover-args #'my-fstar-compute-prover-args-using-make) # HAX_LIBS_HOME ?= $(shell git rev-parse --show-toplevel)/proof-libs/fstar-secret-integers FSTAR_HOME ?= $(HAX_LIBS_HOME)/../../../FStar HACL_HOME ?= $(HAX_LIBS_HOME)/../../../hacl-star FSTAR_BIN ?= $(shell command -v fstar.exe 1>&2 2> /dev/null && echo "fstar.exe" || echo "$(FSTAR_HOME)/bin/fstar.exe") CACHE_DIR ?= $(HAX_LIBS_HOME)/.cache HINT_DIR ?= $(HAX_LIBS_HOME)/.hints .PHONY: all verify clean all: rm -f .depend && $(MAKE) .depend $(MAKE) verify # By default, we process all the files in the current directory. Here, we # *extend* the set of relevant files with the tests. ROOTS = $(wildcard *.fst) FSTAR_INCLUDE_DIRS = $(HACL_HOME)/lib $(HAX_LIBS_HOME)/rust_primitives $(HAX_LIBS_HOME)/core $(HAX_LIBS_HOME)/hax_lib FSTAR_FLAGS = --cmi \ --warn_error -331 \ --cache_checked_modules --cache_dir $(CACHE_DIR) \ --already_cached "+Prims+FStar+LowStar+C+Spec.Loops+TestLib" \ $(addprefix --include ,$(FSTAR_INCLUDE_DIRS)) FSTAR = $(FSTAR_BIN) $(FSTAR_FLAGS) $(OTHERFLAGS) .depend: $(HINT_DIR) $(CACHE_DIR) $(info $(ROOTS)) $(FSTAR) --cmi --dep full $(ROOTS) --extract '* -Prims -LowStar -FStar' > $@ include .depend $(HINT_DIR): mkdir -p $@ $(CACHE_DIR): mkdir -p $@ $(CACHE_DIR)/%.checked: | .depend $(HINT_DIR) $(CACHE_DIR) $(FSTAR) $< $(ENABLE_HINTS) --hint_file $(HINT_DIR)/$(notdir $*).hints verify: $(addsuffix .checked, $(addprefix $(CACHE_DIR)/,$(ROOTS))) # Targets for interactive mode %.fst-in: $(info $(FSTAR_FLAGS) \ $(ENABLE_HINTS) --hint_file $(HINT_DIR)/$(basename $@).fst.hints) %.fsti-in: $(info $(FSTAR_FLAGS) \ $(ENABLE_HINTS) --hint_file $(HINT_DIR)/$(basename $@).fsti.hints) # Clean targets SHELL=/usr/bin/env bash clean: rm -rf $(CACHE_DIR)/* ================================================ FILE: hax-lib/proof-libs/fstar-secret-integers/core/README.md ================================================ # Core (and alloc) library This directory contains a model for the [Core Rust library](https://doc.rust-lang.org/core/): the minimal Rust foundation behind the [standard libarary of Rust](https://doc.rust-lang.org/std/index.html). This also includes a model for some part of the [`alloc` Rust library](https://doc.rust-lang.org/stable/alloc/). Core is self-contained, and is dependency-free: it links to no upstream or system libraries. Thus, even if it is minimal, it is not small: it is around **75k LoC**, comments excluded. In this directory, you will find the first stage of our approach to `core` in F\*: a hand-written model. Note that this model tries to follow as much as possible the structure and naming found in the Rust core library. The second stage of our approach to `core` is automatic generation with specifications and models. Our plan is to annotate the Rust `core` library with specifications and models written directly as Rust annotations. This will enable automatic generation of `core` models with consistent semantics in all of hax backends (for now F\* and Coq). Note that we already started experimenting with this second approach: hax is already able to digest and generate signature-only F\* for more than 80% of core definitions. ================================================ FILE: hax-lib/proof-libs/fstar-secret-integers/hax_lib/Makefile ================================================ # This is a generically useful Makefile for F* that is self-contained # # It is tempting to factor this out into multiple Makefiles but that # makes it less portable, so resist temptation, or move to a more # sophisticated build system. # # We expect FSTAR_HOME to be set to your FSTAR repo/install directory # We expect HACL_HOME to be set to your HACL* repo location # We expect HAX_LIBS_HOME to be set to the folder containing core, rust_primitives etc. # # ROOTS contains all the top-level F* files you wish to verify # The default target `verify` verified ROOTS and its dependencies # To lax-check instead, set `OTHERFLAGS="--lax"` on the command-line # # # To make F* emacs mode use the settings in this file, you need to # add the following lines to your .emacs # # (setq-default fstar-executable "/bin/fstar.exe") # (setq-default fstar-smt-executable "/bin/z3") # # (defun my-fstar-compute-prover-args-using-make () # "Construct arguments to pass to F* by calling make." # (with-demoted-errors "Error when constructing arg string: %S" # (let* ((fname (file-name-nondirectory buffer-file-name)) # (target (concat fname "-in")) # (argstr (car (process-lines "make" "--quiet" target)))) # (split-string argstr)))) # (setq fstar-subp-prover-args #'my-fstar-compute-prover-args-using-make) # HAX_LIBS_HOME ?= $(shell git rev-parse --show-toplevel)/proof-libs/fstar FSTAR_HOME ?= $(HAX_LIBS_HOME)/../../../FStar HACL_HOME ?= $(HAX_LIBS_HOME)/../../../hacl-star FSTAR_BIN ?= $(shell command -v fstar.exe 1>&2 2> /dev/null && echo "fstar.exe" || echo "$(FSTAR_HOME)/bin/fstar.exe") CACHE_DIR ?= $(HAX_LIBS_HOME)/.cache HINT_DIR ?= $(HAX_LIBS_HOME)/.hints .PHONY: all verify clean all: rm -f .depend && $(MAKE) .depend $(MAKE) verify # By default, we process all the files in the current directory. Here, we # *extend* the set of relevant files with the tests. ROOTS = $(wildcard *.fst) FSTAR_INCLUDE_DIRS = $(HACL_HOME)/lib $(HAX_LIBS_HOME)/rust_primitives $(HAX_LIBS_HOME)/core $(HAX_LIBS_HOME)/hax_lib FSTAR_FLAGS = --cmi \ --warn_error -331 \ --cache_checked_modules --cache_dir $(CACHE_DIR) \ --already_cached "+Prims+FStar+LowStar+C+Spec.Loops+TestLib" \ $(addprefix --include ,$(FSTAR_INCLUDE_DIRS)) FSTAR = $(FSTAR_BIN) $(FSTAR_FLAGS) $(OTHERFLAGS) .depend: $(HINT_DIR) $(CACHE_DIR) $(info $(ROOTS)) $(FSTAR) --cmi --dep full $(ROOTS) --extract '* -Prims -LowStar -FStar' > $@ include .depend $(HINT_DIR): mkdir -p $@ $(CACHE_DIR): mkdir -p $@ $(CACHE_DIR)/%.checked: | .depend $(HINT_DIR) $(CACHE_DIR) $(FSTAR) $< $(ENABLE_HINTS) --hint_file $(HINT_DIR)/$(notdir $*).hints verify: $(addsuffix .checked, $(addprefix $(CACHE_DIR)/,$(ROOTS))) # Targets for interactive mode %.fst-in: $(info $(FSTAR_FLAGS) \ $(ENABLE_HINTS) --hint_file $(HINT_DIR)/$(basename $@).fst.hints) %.fsti-in: $(info $(FSTAR_FLAGS) \ $(ENABLE_HINTS) --hint_file $(HINT_DIR)/$(basename $@).fsti.hints) # Clean targets SHELL=/usr/bin/env bash clean: rm -rf $(CACHE_DIR)/* ================================================ FILE: hax-lib/proof-libs/fstar-secret-integers/rust_primitives/Makefile ================================================ # This is a generically useful Makefile for F* that is self-contained # # It is tempting to factor this out into multiple Makefiles but that # makes it less portable, so resist temptation, or move to a more # sophisticated build system. # # We expect FSTAR_HOME to be set to your FSTAR repo/install directory # We expect HACL_HOME to be set to your HACL* repo location # We expect HAX_LIBS_HOME to be set to the folder containing core, rust_primitives etc. # # ROOTS contains all the top-level F* files you wish to verify # The default target `verify` verified ROOTS and its dependencies # To lax-check instead, set `OTHERFLAGS="--lax"` on the command-line # # # To make F* emacs mode use the settings in this file, you need to # add the following lines to your .emacs # # (setq-default fstar-executable "/bin/fstar.exe") # (setq-default fstar-smt-executable "/bin/z3") # # (defun my-fstar-compute-prover-args-using-make () # "Construct arguments to pass to F* by calling make." # (with-demoted-errors "Error when constructing arg string: %S" # (let* ((fname (file-name-nondirectory buffer-file-name)) # (target (concat fname "-in")) # (argstr (car (process-lines "make" "--quiet" target)))) # (split-string argstr)))) # (setq fstar-subp-prover-args #'my-fstar-compute-prover-args-using-make) # HAX_LIBS_HOME ?= $(shell git rev-parse --show-toplevel)/proof-libs/fstar-secret-integers FSTAR_HOME ?= $(HAX_LIBS_HOME)/../../../FStar HACL_HOME ?= $(HAX_LIBS_HOME)/../../../hacl-star FSTAR_BIN ?= $(shell command -v fstar.exe 1>&2 2> /dev/null && echo "fstar.exe" || echo "$(FSTAR_HOME)/bin/fstar.exe") CACHE_DIR ?= $(HAX_LIBS_HOME)/.cache HINT_DIR ?= $(HAX_LIBS_HOME)/.hints .PHONY: all verify clean all: rm -f .depend && $(MAKE) .depend $(MAKE) verify # By default, we process all the files in the current directory. Here, we # *extend* the set of relevant files with the tests. ROOTS = $(wildcard *.fst) FSTAR_INCLUDE_DIRS = $(HACL_HOME)/lib $(HAX_LIBS_HOME)/rust_primitives $(HAX_LIBS_HOME)/core $(HAX_LIBS_HOME)/hax_lib FSTAR_FLAGS = --cmi \ --warn_error -331 \ --cache_checked_modules --cache_dir $(CACHE_DIR) \ --already_cached "+Prims+FStar+LowStar+C+Spec.Loops+TestLib" \ $(addprefix --include ,$(FSTAR_INCLUDE_DIRS)) FSTAR = $(FSTAR_BIN) $(FSTAR_FLAGS) $(OTHERFLAGS) .depend: $(HINT_DIR) $(CACHE_DIR) $(info $(ROOTS)) $(FSTAR) --cmi --dep full $(ROOTS) --extract '* -Prims -LowStar -FStar' > $@ include .depend $(HINT_DIR): mkdir -p $@ $(CACHE_DIR): mkdir -p $@ $(CACHE_DIR)/%.checked: | .depend $(HINT_DIR) $(CACHE_DIR) $(FSTAR) $< $(ENABLE_HINTS) --hint_file $(HINT_DIR)/$(notdir $*).hints verify: $(addsuffix .checked, $(addprefix $(CACHE_DIR)/,$(ROOTS))) # Targets for interactive mode %.fst-in: $(info $(FSTAR_FLAGS) \ $(ENABLE_HINTS) --hint_file $(HINT_DIR)/$(basename $@).fst.hints) %.fsti-in: $(info $(FSTAR_FLAGS) \ $(ENABLE_HINTS) --hint_file $(HINT_DIR)/$(basename $@).fsti.hints) # Clean targets SHELL=/usr/bin/env bash clean: rm -rf $(CACHE_DIR)/* ================================================ FILE: hax-lib/proof-libs/fstar-secret-integers/rust_primitives/Rust_primitives.Arrays.fst ================================================ module Rust_primitives.Arrays open Rust_primitives.Integers let of_list (#t:Type) (l: list t {FStar.List.Tot.length l < maxint Lib.IntTypes.U16}): t_Slice t = Seq.seq_of_list l let to_list (#t:Type) (s: t_Slice t): list t = Seq.seq_to_list s let to_of_list_lemma t l = Seq.lemma_list_seq_bij l let of_to_list_lemma t l = Seq.lemma_seq_list_bij l let map_array #a #b #n (arr: t_Array a n) (f: a -> b): t_Array b n = FStar.Seq.map_seq_len f arr; FStar.Seq.map_seq f arr let createi #t l f = admit() let lemma_index_concat x y i = admit() let lemma_index_slice x y i = admit() let eq_intro a b = admit() ================================================ FILE: hax-lib/proof-libs/fstar-secret-integers/rust_primitives/Rust_primitives.Arrays.fsti ================================================ module Rust_primitives.Arrays open Rust_primitives.Integers type t_Slice t = s:Seq.seq t{Seq.length s <= max_usize} type t_Array t (l:usize) = s: Seq.seq t { Seq.length s == v l } let length (s: t_Slice 'a): usize = sz (Seq.length s) let contains (#t: eqtype) (s: t_Slice t) (x: t): bool = Seq.mem x s val of_list (#t:Type) (l: list t {FStar.List.Tot.length l < maxint Lib.IntTypes.U16}): t_Array t (sz (FStar.List.Tot.length l)) val to_list (#t:Type) (s: t_Slice t): list t val map_array #n (arr: t_Array 'a n) (f: 'a -> 'b): t_Array 'b n val createi #t (l:usize) (f:(u:usize{u <. l} -> t)) : Pure (t_Array t l) (requires True) (ensures (fun res -> (forall i. Seq.index res (v i) == f i))) unfold let map #p (f:(x:'a{p x} -> 'b)) (s: t_Slice 'a {forall (i:nat). i < Seq.length s ==> p (Seq.index s i)}): t_Slice 'b = createi (length s) (fun i -> f (Seq.index s (v i))) let concat #t (x:t_Slice t) (y:t_Slice t{range (v (length x) + v (length y)) usize_inttype}) : r:t_Array t (length x +! length y) = Seq.append x y val lemma_index_concat #t (x:t_Slice t) (y:t_Slice t{range (v (length x) + v (length y)) usize_inttype}) (i:usize{i <. length x +! length y}): Lemma (if i <. length x then Seq.index (concat x y) (v i) == Seq.index x (v i) else Seq.index (concat x y) (v i) == Seq.index y (v (i -! length x))) [SMTPat (Seq.index (concat #t x y) i)] let slice #t (x:t_Slice t) (i:usize{i <=. length x}) (j:usize{i <=. j /\ j <=. length x}): r:t_Array t (j -! i) = Seq.slice x (v i) (v j) val lemma_index_slice #t (x:t_Slice t) (i:usize{i <=. length x}) (j:usize{i <=. j /\ j <=. length x}) (k:usize{k <. j -! i}): Lemma (Seq.index (slice x i j) (v k) == Seq.index x (v (i +! k))) [SMTPat (Seq.index (slice x i j) (v k))] val eq_intro #t (a : Seq.seq t) (b:Seq.seq t{Seq.length a == Seq.length b}): Lemma (requires forall i. {:pattern Seq.index a i; Seq.index b i} i < Seq.length a ==> Seq.index a i == Seq.index b i) (ensures Seq.equal a b) [SMTPat (Seq.equal a b)] let split #t (a:t_Slice t) (m:usize{m <=. length a}): Pure (t_Array t m & t_Array t (length a -! m)) True (ensures (fun (x,y) -> x == slice a (sz 0) m /\ y == slice a m (length a) /\ concat #t x y == a)) = let x = Seq.slice a 0 (v m) in let y = Seq.slice a (v m) (Seq.length a) in assert (Seq.equal a (concat x y)); (x,y) let lemma_slice_append #t (x:t_Slice t) (y:t_Slice t) (z:t_Slice t): Lemma (requires (range (v (length y) + v (length z)) usize_inttype /\ length y +! length z == length x /\ y == slice x (sz 0) (length y) /\ z == slice x (length y) (length x))) (ensures (x == concat y z)) = assert (Seq.equal x (concat y z)) let lemma_slice_append_3 #t (x:t_Slice t) (y:t_Slice t) (z:t_Slice t) (w:t_Slice t): Lemma (requires (range (v (length y) + v (length z) + v (length w)) usize_inttype /\ length y +! length z +! length w == length x /\ y == slice x (sz 0) (length y) /\ z == slice x (length y) (length y +! length z) /\ w == slice x (length y +! length z) (length x))) (ensures (x == concat y (concat z w))) = assert (Seq.equal x (Seq.append y (Seq.append z w))) #push-options "--z3rlimit 100" let lemma_slice_append_4 #t (x y z w u:t_Slice t) : Lemma (requires (range (v (length y) + v (length z) + v (length w) + v (length u)) usize_inttype /\ length y +! length z +! length w +! length u == length x /\ y == slice x (sz 0) (length y) /\ z == slice x (length y) (length y +! length z) /\ w == slice x (length y +! length z) (length y +! length z +! length w) /\ u == slice x (length y +! length z +! length w) (length x))) (ensures (x == concat y (concat z (concat w u)))) = assert (Seq.equal x (Seq.append y (Seq.append z (Seq.append w u)))) #pop-options ================================================ FILE: hax-lib/proof-libs/fstar-secret-integers/rust_primitives/Rust_primitives.BitVectors.fst ================================================ module Rust_primitives.BitVectors open FStar.Mul open Rust_primitives.Arrays open Rust_primitives.Integers #set-options "--fuel 0 --ifuel 1 --z3rlimit 40" let lemma_get_bit_bounded #t x d i = admit() let lemma_get_bit_bounded' #t x d = admit() let pow2_minus_one_mod_lemma1 (n: nat) (m: nat {m < n}) : Lemma (((pow2 n - 1) / pow2 m) % 2 == 1) = let d: pos = n - m in Math.Lemmas.pow2_plus m d; Math.Lemmas.lemma_div_plus (-1) (pow2 d) (pow2 m); if d > 0 then Math.Lemmas.pow2_double_mult (d-1) let pow2_minus_one_mod_lemma2 (n: nat) (m: nat {n <= m}) : Lemma (((pow2 n - 1) / pow2 m) % 2 == 0) = Math.Lemmas.pow2_le_compat m n; Math.Lemmas.small_div (pow2 n - 1) (pow2 m) let get_bit_pow2_minus_one #t #l n nth = reveal_opaque (`%get_bit) (get_bit (mk_int_l #t #l (pow2 n - 1)) nth); if v nth < n then pow2_minus_one_mod_lemma1 n (v nth) else pow2_minus_one_mod_lemma2 n (v nth) let get_bit_pow2_minus_one_i32 x nth = let n = Some?.v (mask_inv_opt x) in assume (pow2 n - 1 == x); mk_int_equiv_lemma #i32_inttype x; get_bit_pow2_minus_one #i32_inttype #Lib.IntTypes.PUB n nth let get_bit_pow2_minus_one_u32 x nth = let n = Some?.v (mask_inv_opt x) in assume (pow2 n - 1 == x); mk_int_equiv_lemma #u32_inttype x; get_bit_pow2_minus_one #u32_inttype #Lib.IntTypes.PUB n nth let get_bit_pow2_minus_one_u16 x nth = let n = Some?.v (mask_inv_opt x) in assume (pow2 n - 1 == x); mk_int_equiv_lemma #u16_inttype x; get_bit_pow2_minus_one #u16_inttype #Lib.IntTypes.PUB n nth let get_bit_pow2_minus_one_u8 t x nth = let n = Some?.v (mask_inv_opt x) in assume (pow2 n - 1 == x); mk_int_equiv_lemma #u8_inttype x; get_bit_pow2_minus_one #u8_inttype #Lib.IntTypes.PUB n nth let get_last_bit_signed_lemma #t x = admit () ================================================ FILE: hax-lib/proof-libs/fstar-secret-integers/rust_primitives/Rust_primitives.BitVectors.fsti ================================================ module Rust_primitives.BitVectors open FStar.Mul open Rust_primitives.Arrays open Rust_primitives.Integers /// Number of bits carried by an integer of type `t` type bit_num t = d: nat {d > 0 /\ d <= bits t /\ (signed t ==> d <= bits t)} /// Number of bits carried by an integer of type `t` type bounded #t (x:int_t t) (d:bit_num t) = v x >= 0 /\ v x < pow2 d /// Integer of type `t` that carries `d` bits type int_t_d t (d: bit_num t) = n: int_t t {bounded n d} val lemma_get_bit_bounded #t (x:int_t t) (d:bit_num t) (i:usize): Lemma ((bounded x d /\ v i >= d /\ v i < bits t) ==> get_bit x i == 0) [SMTPat (get_bit #t x i); SMTPat (bounded x d)] val lemma_get_bit_bounded' #t (x:int_t t) (d:bit_num t): Lemma (requires forall i. v i > d ==> get_bit x i == 0) (ensures bounded x d) type bit_vec (len: nat) = i:nat {i < len} -> bit /// Transform an array of integers to a bit vector #push-options "--fuel 0 --ifuel 1 --z3rlimit 50" let bit_vec_of_int_arr (#n: inttype) (#len: usize) (arr: t_Array (int_t n) len) (d: bit_num n): bit_vec (v len * d) = fun i -> get_bit (Seq.index arr (i / d)) (sz (i % d)) #pop-options /// Transform an array of `nat`s to a bit vector #push-options "--fuel 0 --ifuel 1 --z3rlimit 50" let bit_vec_of_nat_arr (#len: usize) (arr: t_Array nat len) (d: nat) : bit_vec (v len * d) = fun i -> get_bit_nat (Seq.index arr (i / d)) (i % d) #pop-options /// Bit-wise semantics of `2^n-1` val get_bit_pow2_minus_one #t #l (n: nat {pow2 n - 1 <= maxint t}) (nth: usize {v nth < bits t}) : Lemma ( get_bit (mk_int_l #t #l (pow2 n - 1)) nth == (if v nth < n then 1 else 0)) /// Log2 table unfold let mask_inv_opt = function | 0 -> Some 0 | 1 -> Some 1 | 3 -> Some 2 | 7 -> Some 3 | 15 -> Some 4 | 31 -> Some 5 | 63 -> Some 6 | 127 -> Some 7 | 255 -> Some 8 | 511 -> Some 9 | 1023 -> Some 10 | 2047 -> Some 11 | 4095 -> Some 12 (* | 8191 -> Some 13 | 16383 -> Some 14 | 32767 -> Some 15 | 65535 -> Some 16 | 131071 -> Some 17 | 262143 -> Some 18 | 524287 -> Some 19 | 1048575 -> Some 20 | 2097151 -> Some 21 | 4194303 -> Some 22 | 8388607 -> Some 23 | 16777215 -> Some 24 | 33554431 -> Some 25 | 67108863 -> Some 26 | 134217727 -> Some 27 | 268435455 -> Some 28 | 536870911 -> Some 29 | 1073741823 -> Some 30 | 2147483647 -> Some 31 | 4294967295 -> Some 32 *) | _ -> None /// Specialized `get_bit_pow2_minus_one` lemmas with SMT patterns /// targetting machine integer literals of type `i32` val get_bit_pow2_minus_one_i32 (x: int {x < pow2 31 /\ Some? (mask_inv_opt x)}) (nth: usize {v nth < 32}) : Lemma ( get_bit (FStar.Int32.int_to_t x) nth == (if v nth < Some?.v (mask_inv_opt x) then 1 else 0)) [SMTPat (get_bit (FStar.Int32.int_to_t x) nth)] /// Specialized `get_bit_pow2_minus_one` lemmas with SMT patterns /// targetting machine integer literals of type `u32` val get_bit_pow2_minus_one_u32 (x: int {x < pow2 32 /\ Some? (mask_inv_opt x)}) (nth: usize {v nth < 32}) : Lemma ( get_bit (FStar.UInt32.uint_to_t x) nth == (if v nth < Some?.v (mask_inv_opt x) then 1 else 0)) [SMTPat (get_bit (FStar.UInt16.uint_to_t x) nth)] /// Specialized `get_bit_pow2_minus_one` lemmas with SMT patterns /// targetting machine integer literals of type `u16` val get_bit_pow2_minus_one_u16 (x: int {x < pow2 16 /\ Some? (mask_inv_opt x)}) (nth: usize {v nth < 16}) : Lemma ( get_bit (FStar.UInt16.uint_to_t x) nth == (if v nth < Some?.v (mask_inv_opt x) then 1 else 0)) [SMTPat (get_bit (FStar.UInt16.uint_to_t x) nth)] /// Specialized `get_bit_pow2_minus_one` lemmas with SMT patterns /// targetting machine integer literals of type `u8` val get_bit_pow2_minus_one_u8 (t: _ {t == u8_inttype}) (x: int {x < pow2 8 /\ Some? (mask_inv_opt x)}) (nth: usize {v nth < 8}) : Lemma ( get_bit #t #Lib.IntTypes.PUB (FStar.UInt8.uint_to_t x) nth == (if v nth < Some?.v (mask_inv_opt x) then 1 else 0)) [SMTPat (get_bit #t #Lib.IntTypes.PUB (FStar.UInt8.uint_to_t x) nth)] // XXX: Why the #t here and not in the ones above? val get_last_bit_signed_lemma (#t: inttype{signed t}) (x: int_t t) : Lemma ( get_bit x (mk_int_l (bits t - 1)) == (if v x < 0 then 1 else 0)) // [SMTPat (get_bit x (mk_int (bits t - 1)))] ================================================ FILE: hax-lib/proof-libs/fstar-secret-integers/rust_primitives/Rust_primitives.Hax.Monomorphized_update_at.fst ================================================ module Rust_primitives.Hax.Monomorphized_update_at open Rust_primitives open Rust_primitives.Hax open Core.Ops.Range let update_at_usize s i x = update_at s i x let update_at_range #n s i x = let res = update_at s i x in admit(); // To be proved res let update_at_range_to #n s i x = let res = update_at s i x in admit(); res let update_at_range_from #n s i x = let res = update_at s i x in admit(); res let update_at_range_full s i x = let res = update_at s i x in admit(); res ================================================ FILE: hax-lib/proof-libs/fstar-secret-integers/rust_primitives/Rust_primitives.Hax.Monomorphized_update_at.fsti ================================================ module Rust_primitives.Hax.Monomorphized_update_at open Rust_primitives open Rust_primitives.Hax open Core.Ops.Range #set-options "--z3rlimit 30" val update_at_usize (#t: Type0) (s: t_Slice t) (i: usize) (x: t) : Pure (t_Array t (length s)) (requires (v i < Seq.length s)) (ensures (fun res -> res == Seq.upd s (v i) x)) val update_at_range #n (#t: Type0) (s: t_Slice t) (i: t_Range (pub_int_t n)) (x: t_Slice t) : Pure (t_Array t (length s)) (requires (v i.f_start >= 0 /\ v i.f_start <= Seq.length s /\ v i.f_end <= Seq.length s /\ Seq.length x == v i.f_end - v i.f_start)) (ensures (fun res -> Seq.slice res 0 (v i.f_start) == Seq.slice s 0 (v i.f_start) /\ Seq.slice res (v i.f_start) (v i.f_end) == x /\ Seq.slice res (v i.f_end) (Seq.length res) == Seq.slice s (v i.f_end) (Seq.length s))) val update_at_range_to #n (#t: Type0) (s: t_Slice t) (i: t_RangeTo (pub_int_t n)) (x: t_Slice t) : Pure (t_Array t (length s)) (requires (v i.f_end >= 0 /\ v i.f_end <= Seq.length s /\ Seq.length x == v i.f_end)) (ensures (fun res -> Seq.slice res 0 (v i.f_end) == x /\ Seq.slice res (v i.f_end) (Seq.length res) == Seq.slice s (v i.f_end) (Seq.length s))) val update_at_range_from #n (#t: Type0) (s: t_Slice t) (i: t_RangeFrom (pub_int_t n)) (x: t_Slice t) : Pure (t_Array t (length s)) (requires ( v i.f_start >= 0 /\ v i.f_start <= Seq.length s /\ Seq.length x == Seq.length s - v i.f_start)) (ensures (fun res -> Seq.slice res 0 (v i.f_start) == Seq.slice s 0 (v i.f_start) /\ Seq.slice res (v i.f_start) (Seq.length res) == x)) val update_at_range_full (#t: Type0) (s: t_Slice t) (i: t_RangeFull) (x: t_Slice t) : Pure (t_Array t (length s)) (requires (Seq.length x == Seq.length s)) (ensures (fun res -> res == x)) ================================================ FILE: hax-lib/proof-libs/fstar-secret-integers/rust_primitives/Rust_primitives.Hax.fst ================================================ module Rust_primitives.Hax open Rust_primitives.Integers open Rust_primitives.Arrays type t_Never = False let never_to_any #t: t_Never -> t = (fun _ -> match () with) let repeat (#t:Type0) (x: t) (len: usize): t_Array t len = FStar.Seq.create (v len) x open Core.Ops.Index class update_at_tc self idx = { [@@@FStar.Tactics.Typeclasses.tcinstance] super_index: t_Index self idx; update_at: s: self -> i: idx {in_range s i} -> super_index.f_Output -> self; } open Core.Slice instance impl__index t n: t_Index (t_Slice t) (pub_int_t n) = { f_Output = t; in_range = (fun (s: t_Slice t) (i: pub_int_t n) -> v i >= 0 && v i < Seq.length s); f_index = (fun s i -> Seq.index s (v i)); } instance impl__index_array t l n: t_Index (t_Array t l) (pub_int_t n) = { f_Output = t; in_range = (fun (s: t_Array t l) (i: pub_int_t n) -> v i >= 0 && v i < v l); f_index = (fun s i -> Seq.index s (v i)); } instance update_at_tc_slice t n: update_at_tc (t_Slice t) (pub_int_t n) = { super_index = FStar.Tactics.Typeclasses.solve <: t_Index (t_Slice t) (pub_int_t n); update_at = (fun arr i x -> FStar.Seq.upd arr (v i) x); } instance update_at_tc_array t l n: update_at_tc (t_Array t l) (pub_int_t n) = { super_index = FStar.Tactics.Typeclasses.solve <: t_Index (t_Array t l) (pub_int_t n); update_at = (fun arr i x -> FStar.Seq.upd arr (v i) x); } let (.[]<-) #self #idx {| update_at_tc self idx |} (s: self) (i: idx {in_range s i}) = update_at s i let array_of_list #t = Rust_primitives.Arrays.of_list #t ================================================ FILE: hax-lib/proof-libs/fstar-secret-integers/rust_primitives/Rust_primitives.Integers.fst ================================================ module Rust_primitives.Integers #set-options "--z3rlimit 100 --fuel 0 --ifuel 1" let pow2_values x = let p = pow2 x in assert_norm (p == normalize_term (pow2 x)) let usize_inttype = LI.U32 let isize_inttype = LI.S32 let v_injective #t a = LI.v_injective #t #LI.PUB a let v_mk_int #t n = LI.v_mk_int #t #LI.PUB n let usize_to_uint32 x = x let usize_to_uint64 x = Int.Cast.uint32_to_uint64 x let size_to_uint64 x = Int.Cast.uint32_to_uint64 x let mk_int_l #t #l a = LI.mk_int #t #l a let mk_int_equiv_lemma #_ = admit () let mk_int_v_lemma #t a = () let v_mk_int_lemma #t a = () let declassify #t #l #l' a = admit() let add_mod_equiv_lemma #t #l #l' a b = LI.add_mod_lemma #_ #(meet l l') (classify a) (classify b) let add_equiv_lemma #t #l #l' a b = LI.add_lemma #_ #(meet l l') (classify a) (classify b) let incr_equiv_lemma #t #l a = LI.incr_lemma #t #l a let mul_mod_equiv_lemma #t #l #l' a b = LI.mul_mod_lemma #_ #(meet l l') (classify a) (classify b) let mul_equiv_lemma #t #l #l' a b = LI.mul_lemma #_ #(meet l l') (classify a) (classify b) let sub_mod_equiv_lemma #t #l #l' a b = LI.sub_mod_lemma #_ #(meet l l') (classify a) (classify b) let sub_equiv_lemma #t #l #l' a b = LI.sub_lemma #_ #(meet l l') (classify a) (classify b) let decr_equiv_lemma #t a = LI.decr_lemma #t a let div_equiv_lemma #t a b = admit(); LI.div_lemma #t a b let mod_equiv_lemma #t a b = admit(); LI.mod_lemma #t a b let lognot #t a = LI.lognot #t a let lognot_lemma #t a = admit() let logxor #t #l1 #l2 a b = LI.logxor #t #(meet l1 l2) (classify a) (classify b) let logxor_lemma #t a b = admit() let logand #t #l1 #l2 a b = LI.logand #t #(meet l1 l2) (classify a) (classify b) let logand_lemma #t a b = admit() let logand_mask_lemma #t a b = admit() let logor #t #l1 #l2 a b = LI.logor #t #(meet l1 l2) (classify a) (classify b) let logor_lemma #t a b = admit() let shift_right_equiv_lemma #t a b = admit() let shift_left_equiv_lemma #t a b = admit() let rotate_right #t a b = LI.rotate_right #t a (cast b) let rotate_right_equiv_lemma #t a b = () let rotate_left #t a b = LI.rotate_left #t a (cast b) let rotate_left_equiv_lemma #t a b = () let abs_int_equiv_lemma #t a = admit() let neg_equiv_lemma #_ _ = admit() let get_bit_and _x _y _i = admit () let get_bit_or _x _y _i = admit () let get_bit_shl _x _y _i = admit () let get_bit_shr _x _y _i = admit () let get_bit_cast #t #u #l x nth = reveal_opaque (`%get_bit) (get_bit x nth); reveal_opaque (`%get_bit) (get_bit (cast_mod #t #u x <: int_t_l u l) nth); admit () let get_bit_cast_extend #t #u x nth = admit () ================================================ FILE: hax-lib/proof-libs/fstar-secret-integers/rust_primitives/Rust_primitives.Integers.fsti ================================================ module Rust_primitives.Integers open FStar.Mul module LI = Lib.IntTypes #set-options "--max_fuel 0 --max_ifuel 1 --z3rlimit 20" val pow2_values: x:nat -> Lemma (let p = pow2 x in match x with | 0 -> p=1 | 1 -> p=2 | 8 -> p=256 | 16 -> p=65536 | 31 -> p=2147483648 | 32 -> p=4294967296 | 63 -> p=9223372036854775808 | 64 -> p=18446744073709551616 | 2 | 3 | 4 | 5 | 6 | 7 | 9 | 10 | 11 | 12 | 13 | 14 | 15 | 17 | 18 | 19 | 20 | 21 | 22 | 23 | 24 | 25 | 26 | 27 | 28 | 29 | 30 | 33 | 34 | 35 | 36 | 37 | 38 | 39 | 40 | 41 | 42 | 43 | 44 | 45 | 46 | 47 | 48 | 49 | 50 | 51 | 52 | 53 | 54 | 55 | 56 | 57 | 58 | 59 | 60 | 61 | 62 | 63 | 65 | 127 | 128 -> p = normalize_term (pow2 x) | _ -> True) [SMTPat (pow2 x)] type inttype = LI.inttype let unsigned = LI.unsigned let signed = LI.signed type uinttype = t:inttype{unsigned t} let int_t_l t l = LI.int_t t l let int_t t = int_t_l t LI.SEC let pub_int_t t = int_t_l t LI.PUB let meet (l1 l2:LI.secrecy_level) : LI.secrecy_level = match l1, l2 with | LI.SEC, LI.PUB -> LI.SEC | LI.SEC, LI.SEC -> LI.SEC | LI.PUB, LI.SEC -> LI.SEC | LI.PUB, LI.PUB -> LI.PUB let can_flow (l1 l2:LI.secrecy_level) : bool = match l1, l2 with | LI.PUB, LI.PUB -> true | LI.SEC, LI.SEC -> true | LI.PUB, LI.SEC -> true | LI.SEC, LI.PUB -> false let bits t = LI.bits t let u8_inttype = LI.U8 let i8_inttype = LI.S8 let u16_inttype = LI.U16 let i16_inttype = LI.S16 let u32_inttype = LI.U32 let i32_inttype = LI.S32 let u64_inttype = LI.U64 let i64_inttype = LI.S64 let u128_inttype = LI.U128 let i128_inttype = LI.S128 val usize_inttype: t:inttype{unsigned t /\ (t = LI.U32 \/ t = LI.U64)} val isize_inttype: t:inttype{signed t /\ (t = LI.S32 \/ t = LI.S64)} type u8 = int_t LI.U8 type i8 = int_t LI.S8 type u16 = int_t LI.U16 type i16 = int_t LI.S16 type u32 = int_t LI.U32 type i32 = int_t LI.S32 type u64 = int_t LI.U64 type i64= int_t LI.S64 type u128 = int_t LI.U128 type i128 = int_t LI.S128 type pub_u8 = pub_int_t LI.U8 type pub_i8 = pub_int_t LI.S8 type pub_u16 = pub_int_t LI.U16 type pub_i16 = pub_int_t LI.S16 type pub_u32 = pub_int_t LI.U32 type pub_i32 = pub_int_t LI.S32 type pub_u64 = pub_int_t LI.U64 type pub_i64= pub_int_t LI.S64 type pub_u128 = pub_int_t LI.U128 type pub_i128 = pub_int_t LI.S128 type usize = pub_int_t usize_inttype type isize = pub_int_t isize_inttype let minint (t:LI.inttype) = if unsigned t then 0 else -(pow2 (bits t - 1)) let maxint (t:LI.inttype) = if unsigned t then pow2 (bits t) - 1 else pow2 (bits t - 1) - 1 let modulus (t:LI.inttype) = pow2 (bits t) let max_usize = maxint usize_inttype let max_isize = maxint isize_inttype //let range_bits (n:int) (n:bits) : bool = // minint t <= n && n <= maxint t let range (n:int) (t:inttype) : bool = minint t <= n && n <= maxint t type range_t (t:inttype) = x:int{range x t} [@(strict_on_arguments [0])] let v (#t:inttype) (#l) (x:int_t_l t l) : range_t t = LI.v #t #l x [@(strict_on_arguments [0])] val mk_int_l (#t:inttype) (#l:LI.secrecy_level) (n:range_t t) : int_t_l t l [@(strict_on_arguments [0])] let mk_int (#t:inttype) (n:range_t t) : int_t t = mk_int_l n [@(strict_on_arguments [0])] let mk_pub_int (#t:inttype) (n:range_t t) : pub_int_t t = mk_int_l n [@(strict_on_arguments [0])] val mk_int_equiv_lemma #t (n:range_t t) : Lemma ( match t with | LI.U8 -> mk_int_l #u8_inttype n == UInt8.uint_to_t n | LI.S8 -> mk_int_l #i8_inttype n == Int8.int_to_t n | LI.U16 -> mk_int_l #u16_inttype n == UInt16.uint_to_t n | LI.S16 -> mk_int_l #i16_inttype n == Int16.int_to_t n | LI.U32 -> mk_int_l #u32_inttype n == UInt32.uint_to_t n | LI.S32 -> mk_int_l #i32_inttype n == Int32.int_to_t n | LI.U64 -> mk_int_l #u64_inttype n == UInt64.uint_to_t n | LI.S64 -> mk_int_l #i64_inttype n == Int64.int_to_t n | LI.U128 -> mk_int_l #u128_inttype n == UInt128.uint_to_t n | LI.S128 -> mk_int_l #i128_inttype n == Int128.int_to_t n | _ -> True) let sz (n:range_t usize_inttype) : usize = mk_int_l n let isz (n:range_t isize_inttype) : isize = mk_int_l n val mk_int_v_lemma: #t:inttype -> #l:LI.secrecy_level -> a:int_t_l t l -> Lemma (mk_int_l #t (v #t #l a) == a) [SMTPat (mk_int_l #t #l (v #t #l a))] val v_mk_int_lemma: #t:inttype -> #l:LI.secrecy_level -> n:range_t t -> Lemma (v #t #l (mk_int_l #t #l n) == n) [SMTPat (v #t #l (mk_int_l #t #l n))] (* Wrap-around modulo: wraps into [-p/2; p/2[ *) let op_At_Percent (v:int) (p:int{p>0/\ p%2=0}) : Tot int = let m = v % p in if m >= p/2 then m - p else m [@(strict_on_arguments [0])] let op_At_Percent_Dot x t : range_t t = if unsigned t then x % modulus t else x @% modulus t let cast (#t:inttype) (#t':inttype) (#l:LI.secrecy_level) (u1:int_t_l t l{range (v u1) t'}) = mk_int_l #t' #l (v u1) let cast_mod (#t:inttype) (#t':inttype) (#l:LI.secrecy_level) (u1:int_t_l t l) = mk_int_l #t' #l (v u1 @%. t') let classify #t #l (#l':LI.secrecy_level{can_flow l l'}) (a:int_t_l t l) : int_t_l t l' = match l,l' with | LI.PUB, LI.SEC -> LI.secret #t a | LI.PUB, LI.PUB -> a | LI.SEC, LI.SEC -> a (* NOTE: Use with extreme care, and clearly document each use case *) val declassify #t #l #l' (a:int_t_l t l) : int_t_l t l' /// Arithmetic operations /// let add_mod (#t:inttype) (#l #l':LI.secrecy_level) (a:int_t_l t l) (b:int_t_l t l') = mk_int_l #t #(meet l l') ((v a + v b) @%. t) val add_mod_equiv_lemma: #t:uinttype -> #l:LI.secrecy_level -> #l':LI.secrecy_level -> a:int_t_l t l -> b:int_t_l t l' -> Lemma (add_mod a b == LI.add_mod #_ #(meet l l') (classify a) (classify b)) let add (#t:inttype) (#l #l':LI.secrecy_level) (a:int_t_l t l) (b:int_t_l t l'{range (v a + v b) t}) = mk_int_l #t #(meet l l') (v a + v b) val add_equiv_lemma: #t:uinttype -> #l:LI.secrecy_level -> #l':LI.secrecy_level -> a:int_t_l t l -> b:int_t_l t l'{range (v a + v b) t} -> Lemma (add a b == LI.add #t #(meet l l') (classify a) (classify b)) let incr (#t:inttype) (#l:LI.secrecy_level) (a:int_t_l t l{v a < maxint t}) = mk_int_l #t #l (v a + 1) val incr_equiv_lemma: #t:inttype -> #l:LI.secrecy_level -> a:int_t_l t l{v a < maxint t} -> Lemma (incr a == LI.incr a) let mul_mod (#t:inttype) (#l #l':LI.secrecy_level) (a:int_t_l t l) (b:int_t_l t l') = mk_int_l #t #(meet l l') (v a * v b @%. t) val mul_mod_equiv_lemma: #t:uinttype{not (LI.U128? t)} -> #l:LI.secrecy_level -> #l':LI.secrecy_level -> a:int_t_l t l -> b:int_t_l t l' -> Lemma (mul_mod a b == LI.mul_mod #t #(meet l l') (classify a) (classify b)) let mul (#t:inttype) (#l #l':LI.secrecy_level) (a:int_t_l t l) (b:int_t_l t l'{range (v a * v b) t}) = mk_int_l #t #(meet l l') (v a * v b) val mul_equiv_lemma: #t:uinttype{not (LI.U128? t)} -> #l:LI.secrecy_level -> #l':LI.secrecy_level -> a:int_t_l t l -> b:int_t_l t l'{range (v a * v b) t} -> Lemma (mul a b == LI.mul #t #(meet l l') (classify a) (classify b)) let sub_mod (#t:inttype) (#l #l':LI.secrecy_level) (a:int_t_l t l) (b:int_t_l t l') = mk_int_l #t #(meet l l') ((v a - v b) @%. t) val sub_mod_equiv_lemma: #t:uinttype -> #l:LI.secrecy_level -> #l':LI.secrecy_level -> a:int_t_l t l -> b:int_t_l t l' -> Lemma (sub_mod a b == LI.sub_mod #_ #(meet l l') (classify a) (classify b)) let sub (#t:inttype) (#l #l':LI.secrecy_level) (a:int_t_l t l) (b:int_t_l t l'{range (v a - v b) t}) = mk_int_l #t #(meet l l') (v a - v b) val sub_equiv_lemma: #t:uinttype -> #l:LI.secrecy_level -> #l':LI.secrecy_level -> a:int_t_l t l -> b:int_t_l t l'{range (v a - v b) t} -> Lemma (sub a b == LI.sub #t #(meet l l') (classify a) (classify b)) let decr (#t:inttype) (#l:LI.secrecy_level) (a:int_t_l t l{minint t < v a}) = mk_int_l #t #l (v a - 1) val decr_equiv_lemma: #t:inttype -> #l:LI.secrecy_level -> a:int_t_l t l{minint t < v a} -> Lemma (decr a == LI.decr a) let div (#t:inttype) (a:int_t_l t LI.PUB) (b:int_t_l t LI.PUB{v b <> 0}) = assume(unsigned t \/ range (v a / v b) t); mk_int_l #t #LI.PUB (v a / v b) val div_equiv_lemma: #t:inttype{~(LI.U128? t) /\ ~(LI.S128? t)} -> a:int_t_l t LI.PUB -> b:int_t_l t LI.PUB{v b <> 0 /\ (unsigned t \/ range FStar.Int.(v a / v b) t)} -> Lemma (div a b == LI.div a b) let mod (#t:inttype) (a:int_t_l t LI.PUB) (b:int_t_l t LI.PUB{v b <> 0}) = mk_int_l #t #LI.PUB (v a % v b) val mod_equiv_lemma: #t:inttype{~(LI.U128? t) /\ ~(LI.S128? t)} -> a:int_t_l t LI.PUB -> b:int_t_l t LI.PUB{v b <> 0 /\ (unsigned t \/ range FStar.Int.(v a / v b) t)} -> Lemma (mod a b == LI.mod a b) /// Comparison Operators /// let eq (#t:inttype) (a:int_t_l t LI.PUB) (b:int_t_l t LI.PUB) = v a = v b let ne (#t:inttype) (a:int_t_l t LI.PUB) (b:int_t_l t LI.PUB) = v b <> v b let lt (#t:inttype) (a:int_t_l t LI.PUB) (b:int_t_l t LI.PUB) = v a < v b let lte (#t:inttype) (a:int_t_l t LI.PUB) (b:int_t_l t LI.PUB) = v a <= v b let gt (#t:inttype) (a:int_t_l t LI.PUB) (b:int_t_l t LI.PUB) = v a > v b let gte (#t:inttype) (a:int_t_l t LI.PUB) (b:int_t_l t LI.PUB) = v a >= v b /// Bitwise Operations let ones (#t:inttype) (#l:LI.secrecy_level) : n:int_t_l t l = if unsigned t then mk_int_l #t #l (pow2 (bits t) - 1) else mk_int_l #t #l (-1) let zero (#t:inttype) (#l:LI.secrecy_level) : n:int_t_l t l = mk_int_l #t #l 0 val lognot: #t:inttype -> #l:LI.secrecy_level -> int_t_l t l -> int_t_l t l val lognot_lemma: #t:inttype -> #l:LI.secrecy_level -> a:int_t_l t l -> Lemma (lognot a == LI.lognot a /\ lognot #t #l zero == ones /\ lognot #t #l ones == zero /\ lognot (lognot a) == a /\ (signed t ==> v (lognot a) = -1 - v a) /\ (unsigned t ==> v (lognot a) = pow2 (bits t) - 1 - v a) ) val logxor: #t:inttype -> #l:LI.secrecy_level -> #l':LI.secrecy_level -> int_t_l t l -> int_t_l t l' -> int_t_l t (meet l l') val logxor_lemma: #t:inttype -> #l:LI.secrecy_level -> #l':LI.secrecy_level -> a:int_t_l t l -> b:int_t_l t l' -> Lemma (logxor a b == LI.logxor #t #(meet l l') (classify a) (classify b) /\ a `logxor` a == zero #t #l /\ (a `logxor` b == zero #t #(meet l l') ==> v b == v a) /\ v (a `logxor` (a `logxor` b)) == v b /\ v (a `logxor` (b `logxor` a)) == v b /\ zero #t #l' `logxor` a == classify a /\ a `logxor` zero #t #l' == classify a /\ v (ones #t #l' `logxor` a) == v (lognot a) /\ v (a `logxor` ones #t #l') == v (lognot a)) val logand: #t:inttype -> #l:LI.secrecy_level -> #l':LI.secrecy_level -> int_t_l t l -> int_t_l t l' -> int_t_l t (meet l l') val logand_lemma: #t:inttype -> #l:LI.secrecy_level -> #l':LI.secrecy_level -> a:int_t_l t l -> b:int_t_l t l' -> Lemma (logand a b == LI.logand #t #(meet l l') (classify a) (classify b) /\ v (logand a (zero #t #l')) == v (zero #t #l') /\ v (logand (zero #t #l') a) == v (zero #t #l') /\ v (logand a (ones #t #l')) == v a /\ v (logand (ones #t #l') a) == v a /\ (v a >= 0 ==> (v (logand a b) >= 0) /\ (v (logand a b) <= v a)) /\ (v b >= 0 ==> (v (logand a b) >= 0) /\ (v (logand a b) <= v b))) val logand_mask_lemma: #t:inttype -> #l:LI.secrecy_level -> a:int_t_l t l -> m:nat{m < bits t} -> Lemma (pow2 m < maxint t /\ logand a (sub #t (mk_int_l #t #l (pow2 m)) (mk_int_l #t #l 1)) == mk_int_l #t #l (v a % pow2 m)) [SMTPat (logand #t a (sub #t (mk_int_l #t #l (pow2 m)) (mk_int_l #t #l 1)))] val logor: #t:inttype -> #l:LI.secrecy_level -> #l':LI.secrecy_level -> int_t_l t l -> int_t_l t l' -> int_t_l t (meet l l') val logor_lemma: #t:inttype -> #l:LI.secrecy_level -> #l':LI.secrecy_level -> a:int_t_l t l -> b:int_t_l t l' -> Lemma (logor a b == LI.logor #t #(meet l l') (classify a) (classify b) /\ v (logor a (zero #t #l')) == v a /\ v (logor a (ones #t #l')) == v (ones #t #l') /\ v (logor (zero #t #l') a) == v a /\ v (logor (ones #t #l') a) == v (ones #t #l') /\ ((v a >= 0 /\ v b >= 0) ==> (v (logor a b) >= v a /\ v (logor a b) >= v b))) unfold type shiftval (t:inttype) (t':inttype) = b:int_t_l t' LI.PUB{v b >= 0 /\ v b < bits t} unfold type rotval (t:inttype) (t':inttype) = b:int_t_l t' LI.PUB{v b > 0 /\ v b < bits t} let shift_right (#t:inttype) (#t':inttype) (#l:LI.secrecy_level) (a:int_t_l t l) (b:shiftval t t') = LI.shift_right_lemma a (LI.size (v b)); mk_int_l #t #l (v a / pow2 (v b)) val shift_right_equiv_lemma: #t:inttype -> #t':inttype -> #l:LI.secrecy_level -> a:int_t_l t l -> b:shiftval t t' -> Lemma (v ((cast #t' #u32_inttype b <: LI.size_t)) < bits t /\ shift_right #t #t' a b == LI.shift_right a (cast #t' #u32_inttype b <: LI.size_t)) let shift_left (#t:inttype) (#t':inttype) (#l:LI.secrecy_level) (a:int_t_l t l) (b:shiftval t t') = let x:range_t t = (v a * pow2 (v b)) @%. t in mk_int_l #t #l x val shift_left_equiv_lemma: #t:inttype -> #t':inttype -> #l:LI.secrecy_level -> a:int_t_l t l -> b:shiftval t t' -> Lemma ((v a >= 0 /\ range (v a * pow2 (v b)) t) ==> (v (cast #_ #u32_inttype b) < bits t /\ shift_left #t #t' a b == LI.shift_left a (cast b))) val rotate_right: #t:uinttype -> #t':inttype -> #l:LI.secrecy_level -> a:int_t_l t l -> rotval t t' -> int_t_l t l val rotate_right_equiv_lemma: #t:uinttype -> #t':inttype -> #l:LI.secrecy_level -> a:int_t_l t l -> b:rotval t t' -> Lemma (v (cast #_ #u32_inttype b) > 0 /\ rotate_right a b == LI.rotate_right a (cast b)) val rotate_left: #t:uinttype -> #t':inttype -> #l:LI.secrecy_level -> a:int_t_l t l -> rotval t t' -> int_t_l t l val rotate_left_equiv_lemma: #t:uinttype -> #t':inttype -> #l:LI.secrecy_level -> a:int_t_l t l -> b:rotval t t' -> Lemma (v (cast #_ #u32_inttype b) > 0 /\ rotate_left a b == LI.rotate_left a (cast b)) let shift_right_i (#t:inttype) (#t':inttype) (#l:LI.secrecy_level) (s:shiftval t t') (u:int_t_l t l) : int_t_l t l = shift_right u s let shift_left_i (#t:inttype) (#t':inttype) (#l:LI.secrecy_level) (s:shiftval t t') (u:int_t_l t l{v u >= 0}) : int_t_l t l = shift_left u s let rotate_right_i (#t:uinttype) (#t':inttype) (#l:LI.secrecy_level) (s:rotval t t') (u:int_t_l t l) : int_t_l t l = rotate_right u s let rotate_left_i (#t:uinttype) (#t':inttype) (#l:LI.secrecy_level) (s:rotval t t') (u:int_t_l t l) : int_t_l t l = rotate_left u s let abs_int (#t:inttype) (#l:LI.secrecy_level) (a:int_t_l t l{minint t < v a}) = mk_int_l #t #l (abs (v a)) val abs_int_equiv_lemma: #t:inttype{signed t /\ not (LI.S128? t)} -> #l:LI.secrecy_level -> a:int_t_l t l{minint t < v a} -> Lemma (abs_int a == LI.ct_abs a) let neg (#t:inttype{signed t}) (#l:LI.secrecy_level) (a:int_t_l t l{range (0 - v a) t}) = mk_int_l #t #l (0 - (v a)) val neg_equiv_lemma: #t:inttype{signed t /\ not (LI.S128? t)} -> #l:LI.secrecy_level -> a:int_t_l t l{range (0 - v a) t} -> Lemma (neg a == sub (mk_int_l #t #l 0) a /\ (lognot a == sub (neg a) (mk_int_l #t #l 1))) /// /// Operators available for all machine integers /// // Strict: with precondition unfold let (+!) #t #l #l' = add #t #l #l' // Wrapping: no precondition unfold let (+.) #t #l #l' = add_mod #t #l #l' unfold let ( *! ) #t #l #l' = mul #t #l #l' unfold let ( *. ) #t #l #l' = mul_mod #t #l #l' unfold let ( -! ) #t #l #l' = sub #t #l #l' unfold let ( -. ) #t #l #l' = sub_mod #t #l #l' unfold let ( >>! ) #t #t' #l = shift_right #t #t' #l unfold let ( <>>. ) #t #t' #l = rotate_right #t #t' #l unfold let ( <<<. ) #t #t' #l = rotate_left #t #t' #l unfold let ( ^. ) #t #l #l' = logxor #t #l #l' unfold let ( |. ) #t #l #l' = logor #t #l #l' unfold let ( &. ) #t #l #l' = logand #t #l #l' unfold let ( ~. ) #t #l = lognot #t #l unfold let (/!) #t = div #t unfold let (%!) #t = mod #t unfold let (=.) #t = eq #t unfold let (<>.) #t = ne #t unfold let (<.) #t = lt #t unfold let (<=.) #t = lte #t unfold let (>.) #t = gt #t unfold let (>=.) #t = gte #t type bit = n: nat {n < 2} /// Mathematical `get_bit` definition on `nat`s let get_bit_nat (x: nat) (nth: nat): bit = (x / pow2 nth) % 2 /// `get_bit` definition for machine integer of any size and signedness [@"opaque_to_smt"] let get_bit (#t: inttype) (#l:LI.secrecy_level) (x: int_t_l t l) (nth: usize {v nth < bits t}): bit = if v x >= 0 then get_bit_nat (v x) (v nth) else // two's complement get_bit_nat (pow2 (bits t) + v x) (v nth) unfold let bit_and (x y: bit): bit = match x, y with | (1, 1) -> 1 | _ -> 0 unfold let bit_or (x y: bit): bit = (x + y) % 2 /// Bit-wise semantics for `&.` val get_bit_and #t #l (x y: int_t_l t l) (i: usize {v i < bits t}) : Lemma (get_bit (x &. y) i == get_bit x i `bit_and` get_bit y i) [SMTPat (get_bit (x &. y) i)] /// Bit-wise semantics for `|.` val get_bit_or #t #l (x y: int_t_l t l) (i: usize {v i < bits t}) : Lemma (get_bit (x |. y) i == get_bit x i `bit_or` get_bit y i) [SMTPat (get_bit (x |. y) i)] /// Bit-wise semantics for `<= 0 /\ v y < bits t) (ensures get_bit (x <>!` val get_bit_shr #t #u #l (x: int_t_l t l) (y: int_t_l u LI.PUB) (i: usize {v i < bits t}) : Lemma (requires v y >= 0 /\ v y < bits t) (ensures get_bit (x >>! y) i == (if v i < bits t - v y then get_bit x (mk_int_l (v i + v y)) else if signed t then get_bit x (mk_int_l (bits t - 1)) else 0)) [SMTPat (get_bit (x >>! y) i)] // TODO: check for neg numbers /// Bit-wise semantics of integer casts val get_bit_cast #t #u #l (x: int_t_l t l) (nth: usize) : Lemma (requires v nth < bits u /\ v nth < bits t) (ensures get_bit (cast_mod #t #u x) nth == get_bit x nth) [SMTPat (get_bit (cast_mod #t #u x) nth)] val get_bit_cast_extend #t #u #l (x: int_t_l t l) (nth: usize) : Lemma (requires bits t < bits u /\ v nth >= bits t /\ v nth < bits u) (ensures get_bit (cast_mod #t #u x) nth == 0) [SMTPat (get_bit (cast_mod #t #u x) nth)] ================================================ FILE: hax-lib/proof-libs/fstar-secret-integers/rust_primitives/Rust_primitives.Iterators.fsti ================================================ module Rust_primitives.Iterators open Rust_primitives open Core.Ops.Range open FStar.Mul val foldi_range (#n:inttype) (#acc_t:Type) (#inv:(acc_t -> i:pub_int_t n -> Type)) (r: t_Range (pub_int_t n){r.f_start <=. r.f_end}) (acc:acc_t{inv acc r.f_start}) (f: (acc:acc_t -> i:pub_int_t n{i >=. r.f_start /\ i <. r.f_end /\ inv acc i} -> acc':acc_t{inv acc' (i +! mk_pub_int 1)})) : res:acc_t{inv res r.f_end} val foldi_range_step_by (#n:inttype) (#acc_t:Type) (#inv:(acc_t -> i:pub_int_t n -> Type)) (r: t_Range (pub_int_t n){r.f_start <=. r.f_end}) (step: usize{v step > 0 /\ range (v step) n /\ range (v r.f_end + v step) n}) (acc:acc_t{inv acc r.f_start}) (f: (acc:acc_t -> i:pub_int_t n{i >=. r.f_start /\ i <. r.f_end /\ (v i - v r.f_start) % (v step) == 0 /\ inv acc i} -> acc':acc_t{inv acc' (i +! mk_int #n (v step))})) : res:acc_t{inv res r.f_end} val foldi_chunks_exact (#t:Type) (#acc_t:Type) (#inv:(acc_t -> usize -> Type)) (s:t_Slice t) (chunk_len:usize{v chunk_len > 0}) (acc:acc_t{inv acc (sz 0)}) (f: (acc:acc_t -> it:(usize & t_Array t chunk_len){ let (i,item) = it in v i >= 0 /\ v i < Seq.length s / v chunk_len /\ inv acc i} -> acc':acc_t{inv acc' (fst it +! sz 1)})) : res:acc_t{inv res (length s /! chunk_len)} val fold_chunks_exact (#t:Type) (#acc_t:Type) (#inv:(acc_t -> Type)) (s:t_Slice t) (chunk_len:usize{v chunk_len > 0}) // /\ Seq.length s % v chunk_len == 0}) (acc:acc_t{inv acc}) (f: (acc:acc_t -> it:t_Array t chunk_len{inv acc} -> acc':acc_t{inv acc'})) : res:acc_t{inv res} val foldi_slice (#t:Type) (#acc_t:Type) (#inv:(acc_t -> usize -> Type)) (sl: t_Slice t) (acc:acc_t{inv acc (sz 0)}) (f: (acc:acc_t -> it:(usize & t){ let (i,item) = it in v i >= 0 /\ v i < Seq.length sl /\ Seq.index sl (v i) == item /\ inv acc i} -> acc':acc_t{inv acc' (fst it +! sz 1)})) : res:acc_t{inv res (length sl)} ================================================ FILE: hax-lib/proof-libs/fstar-secret-integers/rust_primitives/Rust_primitives.fst ================================================ module Rust_primitives include Rust_primitives.Integers include Rust_primitives.Arrays include Rust_primitives.BitVectors class cast_tc a b = { cast: a -> b; } /// Rust's casts operations on integers are non-panicking instance cast_tc_integers (t:inttype) (t':inttype) (l:Lib.IntTypes.secrecy_level) : cast_tc (int_t_l t l) (int_t_l t' l) = { cast = (fun x -> Rust_primitives.Integers.cast_mod #t #t' x) } class unsize_tc source = { output: Type; unsize: source -> output; } instance array_to_slice_unsize t n: unsize_tc (t_Array t n) = { output = t_Slice t; unsize = (fun (arr: t_Array t n) -> arr <: t_Slice t); } ================================================ FILE: hax-lib/proof-libs/lean/Hax/MissingLean/Init/Data/Array/Lemmas.lean ================================================ attribute [grind =] Array.size_extract ================================================ FILE: hax-lib/proof-libs/lean/Hax/MissingLean/Init/Data/BitVec/Basic.lean ================================================ attribute [grind =] BitVec.toNat_ofNat attribute [grind] BitVec.umulOverflow attribute [grind] BitVec.uaddOverflow ================================================ FILE: hax-lib/proof-libs/lean/Hax/MissingLean/Init/Data/Int/DivMod/Lemmas.lean ================================================ attribute [grind <-] Int.tmod_lt_of_pos attribute [grind <-] Int.lt_tmod_of_pos ================================================ FILE: hax-lib/proof-libs/lean/Hax/MissingLean/Init/Data/Nat/Div/Basic.lean ================================================ attribute [grind =] Nat.mod_eq_of_lt ================================================ FILE: hax-lib/proof-libs/lean/Hax/MissingLean/Init/Data/Nat/MinMax.lean ================================================ attribute [grind =] Nat.min_eq_left ================================================ FILE: hax-lib/proof-libs/lean/Hax/MissingLean/Init/Data/SInt/Basic.lean ================================================ import Hax.MissingLean.Init.Data.SInt.Basic_Int128 import Hax.MissingLean.Init.Data.UInt.Basic open Lean in set_option hygiene false in macro "additional_int_decls" typeName:ident width:term : command => do `( namespace $typeName def addOverflow (a b : $typeName) : Bool := BitVec.saddOverflow a.toBitVec b.toBitVec def subOverflow (a b : $typeName) : Bool := BitVec.ssubOverflow a.toBitVec b.toBitVec def mulOverflow (a b : $typeName) : Bool := BitVec.smulOverflow a.toBitVec b.toBitVec @[grind .] theorem addOverflow_iff {a b : $typeName} : addOverflow a b ↔ a.toInt + b.toInt ≥ 2 ^ ($width - 1) ∨ a.toInt + b.toInt < - 2 ^ ($width - 1) := by simp [addOverflow, BitVec.saddOverflow] <;> rfl @[grind .] theorem subOverflow_iff {a b : $typeName} : subOverflow a b ↔ a.toInt - b.toInt ≥ 2 ^ ($width - 1) ∨ a.toInt - b.toInt < - 2 ^ ($width - 1) := by simp [subOverflow, BitVec.ssubOverflow] <;> rfl @[grind .] theorem mulOverflow_iff {a b : $typeName} : mulOverflow a b ↔ a.toInt * b.toInt ≥ 2 ^ ($width - 1) ∨ a.toInt * b.toInt < - 2 ^ ($width - 1) := by simp [mulOverflow, BitVec.smulOverflow] <;> rfl @[grind =] theorem toInt_add_of_not_addOverflow {x y : $typeName} (h : ¬ addOverflow x y) : (x + y).toInt = x.toInt + y.toInt := BitVec.toInt_add_of_not_saddOverflow h @[grind =] theorem toInt_sub_of_not_subOverflow {x y : $typeName} (h : ¬ subOverflow x y) : (x - y).toInt = x.toInt - y.toInt := BitVec.toInt_sub_of_not_ssubOverflow h @[grind =] theorem toInt_mul_of_not_mulOverflow {x y : $typeName} (h : ¬ mulOverflow x y) : (x * y).toInt = x.toInt * y.toInt := BitVec.toInt_mul_of_not_smulOverflow h end $typeName ) additional_int_decls Int8 8 additional_int_decls Int16 16 additional_int_decls Int32 32 additional_int_decls Int64 64 additional_int_decls Int128 128 additional_int_decls ISize System.Platform.numBits open Lean in set_option hygiene false in macro "declare_missing_int_conversions" : command => do let mut cmds := #[] let src : List (Name × Nat) := [ (`Int8, 8), (`Int16, 16), (`Int32, 32), (`Int64, 64), (`Int128, 128), (`ISize, 0) ] let dst : List (Name × Nat) := [ (`UInt8, 8), (`UInt16, 16), (`UInt32, 32), (`UInt64, 64), (`UInt128, 128), (`USize, 0), ] for (srcName, srcIdx) in src do for (dstName, dstIdx) in dst do let srcIdent := mkIdent srcName let dstIdent := mkIdent dstName if srcIdx != dstIdx then cmds := cmds.push $ ← `( def $(mkIdent (srcName ++ dstName.appendBefore "to")) (x : $srcIdent) : $dstIdent := $(mkIdent (dstName ++ `ofInt)) x.toInt ) return ⟨mkNullNode cmds⟩ declare_missing_int_conversions ================================================ FILE: hax-lib/proof-libs/lean/Hax/MissingLean/Init/Data/SInt/Basic_Int128.lean ================================================ import Hax.MissingLean.Init.Prelude import Lean.Meta.Tactic.Simp.BuiltinSimprocs.SInt set_option autoImplicit true -- Adapted from Init/Data/SInt/Basic.lean from the Lean v4.29.0-rc1 source code structure Int128 where ofUInt128 :: toUInt128 : UInt128 abbrev Int128.size : Nat := 340282366920938463463374607431768211456 @[inline] def Int128.toBitVec (x : Int128) : BitVec 128 := x.toUInt128.toBitVec theorem Int128.toBitVec.inj : {x y : Int128} → x.toBitVec = y.toBitVec → x = y | ⟨⟨_⟩⟩, ⟨⟨_⟩⟩, rfl => rfl @[inline] def UInt128.toInt128 (i : UInt128) : Int128 := Int128.ofUInt128 i def Int128.ofInt (i : @& Int) : Int128 := ⟨⟨BitVec.ofInt 128 i⟩⟩ def Int128.ofNat (n : @& Nat) : Int128 := ⟨⟨BitVec.ofNat 128 n⟩⟩ abbrev Int.toInt128 := Int128.ofInt abbrev Nat.toInt128 := Int128.ofNat def Int128.toInt (i : Int128) : Int := i.toBitVec.toInt @[suggest_for Int128.toNat, inline] def Int128.toNatClampNeg (i : Int128) : Nat := i.toInt.toNat @[inline] def Int128.ofBitVec (b : BitVec 128) : Int128 := ⟨⟨b⟩⟩ def Int128.toInt8 (a : Int128) : Int8 := ⟨⟨a.toBitVec.signExtend 8⟩⟩ def Int128.toInt16 (a : Int128) : Int16 := ⟨⟨a.toBitVec.signExtend 16⟩⟩ def Int128.toInt32 (a : Int128) : Int32 := ⟨⟨a.toBitVec.signExtend 32⟩⟩ def Int128.toInt64 (a : Int128) : Int64 := ⟨⟨a.toBitVec.signExtend 64⟩⟩ def Int8.toInt128 (a : Int8) : Int128 := ⟨⟨a.toBitVec.signExtend 128⟩⟩ def Int16.toInt128 (a : Int16) : Int128 := ⟨⟨a.toBitVec.signExtend 128⟩⟩ def Int32.toInt128 (a : Int32) : Int128 := ⟨⟨a.toBitVec.signExtend 128⟩⟩ def Int64.toInt128 (a : Int64) : Int128 := ⟨⟨a.toBitVec.signExtend 128⟩⟩ def Int128.neg (i : Int128) : Int128 := ⟨⟨-i.toBitVec⟩⟩ instance : ToString Int128 where toString i := toString i.toInt instance : Repr Int128 where reprPrec i prec := reprPrec i.toInt prec instance : ReprAtom Int128 := ⟨⟩ instance : Hashable Int128 where hash i := UInt64.ofInt i.toInt instance Int128.instOfNat : OfNat Int128 n := ⟨Int128.ofNat n⟩ instance Int128.instNeg : Neg Int128 where neg := Int128.neg abbrev Int128.maxValue : Int128 := 170141183460469231731687303715884105727 abbrev Int128.minValue : Int128 := -170141183460469231731687303715884105728 @[inline] def Int128.ofIntLE (i : Int) (_hl : Int128.minValue.toInt ≤ i) (_hr : i ≤ Int128.maxValue.toInt) : Int128 := Int128.ofInt i def Int128.ofIntTruncate (i : Int) : Int128 := if hl : Int128.minValue.toInt ≤ i then if hr : i ≤ Int128.maxValue.toInt then Int128.ofIntLE i hl hr else Int128.minValue else Int128.minValue protected def Int128.add (a b : Int128) : Int128 := ⟨⟨a.toBitVec + b.toBitVec⟩⟩ protected def Int128.sub (a b : Int128) : Int128 := ⟨⟨a.toBitVec - b.toBitVec⟩⟩ protected def Int128.mul (a b : Int128) : Int128 := ⟨⟨a.toBitVec * b.toBitVec⟩⟩ protected def Int128.div (a b : Int128) : Int128 := ⟨⟨BitVec.sdiv a.toBitVec b.toBitVec⟩⟩ protected def Int128.pow (x : Int128) (n : Nat) : Int128 := match n with | 0 => 1 | n + 1 => Int128.mul (Int128.pow x n) x protected def Int128.mod (a b : Int128) : Int128 := ⟨⟨BitVec.srem a.toBitVec b.toBitVec⟩⟩ protected def Int128.land (a b : Int128) : Int128 := ⟨⟨a.toBitVec &&& b.toBitVec⟩⟩ protected def Int128.lor (a b : Int128) : Int128 := ⟨⟨a.toBitVec ||| b.toBitVec⟩⟩ protected def Int128.xor (a b : Int128) : Int128 := ⟨⟨a.toBitVec ^^^ b.toBitVec⟩⟩ protected def Int128.shiftLeft (a b : Int128) : Int128 := ⟨⟨a.toBitVec <<< (b.toBitVec.smod 128)⟩⟩ protected def Int128.shiftRight (a b : Int128) : Int128 := ⟨⟨BitVec.sshiftRight' a.toBitVec (b.toBitVec.smod 128)⟩⟩ protected def Int128.complement (a : Int128) : Int128 := ⟨⟨~~~a.toBitVec⟩⟩ protected def Int128.abs (a : Int128) : Int128 := ⟨⟨a.toBitVec.abs⟩⟩ def Int128.decEq (a b : Int128) : Decidable (a = b) := match a, b with | ⟨n⟩, ⟨m⟩ => if h : n = m then isTrue <| h ▸ rfl else isFalse (fun h' => Int128.noConfusion h' (fun h' => absurd h' h)) protected def Int128.lt (a b : Int128) : Prop := a.toBitVec.slt b.toBitVec protected def Int128.le (a b : Int128) : Prop := a.toBitVec.sle b.toBitVec instance : Inhabited Int128 where default := 0 instance : Add Int128 := ⟨Int128.add⟩ instance : Sub Int128 := ⟨Int128.sub⟩ instance : Mul Int128 := ⟨Int128.mul⟩ instance : Pow Int128 Nat := ⟨Int128.pow⟩ instance : Mod Int128 := ⟨Int128.mod⟩ instance : Div Int128 := ⟨Int128.div⟩ instance : LT Int128 := ⟨Int128.lt⟩ instance : LE Int128 := ⟨Int128.le⟩ instance : Complement Int128 := ⟨Int128.complement⟩ instance : AndOp Int128 := ⟨Int128.land⟩ instance : OrOp Int128 := ⟨Int128.lor⟩ instance : XorOp Int128 := ⟨Int128.xor⟩ instance : ShiftLeft Int128 := ⟨Int128.shiftLeft⟩ instance : ShiftRight Int128 := ⟨Int128.shiftRight⟩ instance : DecidableEq Int128 := Int128.decEq def Bool.toInt128 (b : Bool) : Int128 := if b then 1 else 0 def Int128.decLt (a b : Int128) : Decidable (a < b) := inferInstanceAs (Decidable (a.toBitVec.slt b.toBitVec)) def Int128.decLe (a b : Int128) : Decidable (a ≤ b) := inferInstanceAs (Decidable (a.toBitVec.sle b.toBitVec)) attribute [instance_reducible, instance] Int128.decLt Int128.decLe instance : Max Int128 := maxOfLe instance : Min Int128 := minOfLe ================================================ FILE: hax-lib/proof-libs/lean/Hax/MissingLean/Init/Data/SInt/Lemmas.lean ================================================ import Hax.MissingLean.Init.Data.SInt.Lemmas_Int128 attribute [grind =_] Int8.ofNat_le_iff_le attribute [grind =_] Int16.ofNat_le_iff_le attribute [grind =_] Int32.ofNat_le_iff_le attribute [grind =_] Int64.ofNat_le_iff_le attribute [grind =] Int8.ofNat_toNatClampNeg attribute [grind =] Int16.ofNat_toNatClampNeg attribute [grind =] Int32.ofNat_toNatClampNeg attribute [grind =] Int64.ofNat_toNatClampNeg open Lean in set_option hygiene false in macro "additional_int_lemmas" typeName:ident width:term : command => do `( namespace $typeName theorem toInt_neg_of_ne_intMin {x : $typeName} (hx : x ≠ minValue) : (-x).toInt = -(x.toInt) := by have : x.toBitVec ≠ BitVec.intMin $width := by refine fun h => hx ?_ rw [← toBitVec_inj, h, BitVec.intMin_eq_neg_two_pow] rfl simp only [toInt, minValue, toBitVec_neg, BitVec.toInt_neg_of_ne_intMin this] at * theorem ofInt_eq_of_toInt_eq {a : Int} {b : $typeName} (h : b.toInt = a) : ofInt a = b := by subst_vars; exact (ofInt_toInt b) end $typeName ) additional_int_lemmas Int8 8 additional_int_lemmas Int16 16 additional_int_lemmas Int32 32 additional_int_lemmas Int64 64 additional_int_lemmas Int128 128 additional_int_lemmas ISize System.Platform.numBits ================================================ FILE: hax-lib/proof-libs/lean/Hax/MissingLean/Init/Data/SInt/Lemmas_Int128.lean ================================================ import Hax.MissingLean.Init.Data.SInt.Basic_Int128 import Hax.MissingLean.Init.Data.UInt.Lemmas_UInt128 import Hax.MissingLean.Lean.Tactic.Simp.BuiltinSimpProcs.SInt import Hax.MissingLean.Lean.Tactic.Simp.BuiltinSimpProcs.UInt -- Adapted from Init/Data/SInt/Lemmas.lean from the Lean v4.29.0-rc1 source code declare_int_theorems Int128 128 theorem Int128.toInt.inj {x y : Int128} (h : x.toInt = y.toInt) : x = y := Int128.toBitVec.inj (BitVec.eq_of_toInt_eq h) theorem Int128.toInt_inj {x y : Int128} : x.toInt = y.toInt ↔ x = y := ⟨Int128.toInt.inj, fun h => h ▸ rfl⟩ @[simp, int_toBitVec] theorem Int128.toBitVec_neg (x : Int128) : (-x).toBitVec = -x.toBitVec := (rfl) @[simp] theorem Int128.toBitVec_zero : toBitVec 0 = 0#128 := (rfl) theorem Int128.toBitVec_one : (1 : Int128).toBitVec = 1#128 := (rfl) @[simp, int_toBitVec] theorem Int128.toBitVec_ofInt (i : Int) : (ofInt i).toBitVec = BitVec.ofInt _ i := (rfl) @[simp] protected theorem Int128.neg_zero : -(0 : Int128) = 0 := (rfl) @[simp] theorem Int128.toInt_ofInt {n : Int} : toInt (ofInt n) = n.bmod Int128.size := by rw [toInt, toBitVec_ofInt, BitVec.toInt_ofInt] @[simp] theorem Int128.toInt_ofNat' {n : Nat} : toInt (ofNat n) = (n : Int).bmod Int128.size := by rw [toInt, toBitVec_ofNat', BitVec.toInt_ofNat'] theorem Int128.toInt_ofNat {n : Nat} : toInt (no_index (OfNat.ofNat n)) = (n : Int).bmod Int128.size := by rw [toInt, toBitVec_ofNat, BitVec.toInt_ofNat] theorem Int128.toInt_ofInt_of_le {n : Int} (hn : -2^127 ≤ n) (hn' : n < 2^127) : toInt (ofInt n) = n := by rw [toInt, toBitVec_ofInt, BitVec.toInt_ofInt_eq_self (by decide) hn hn'] theorem Int128.neg_ofInt {n : Int} : -ofInt n = ofInt (-n) := toBitVec.inj (by simp [BitVec.ofInt_neg]) theorem Int128.ofInt_eq_ofNat {n : Nat} : ofInt n = ofNat n := toBitVec.inj (by simp) theorem Int128.neg_ofNat {n : Nat} : -ofNat n = ofInt (-n) := by rw [← neg_ofInt, ofInt_eq_ofNat] theorem Int128.toNatClampNeg_ofNat_of_lt {n : Nat} (h : n < 2 ^ 127) : toNatClampNeg (ofNat n) = n := by rw [toNatClampNeg, ← ofInt_eq_ofNat, toInt_ofInt_of_le (by omega) (by omega), Int.toNat_natCast] theorem Int128.toInt_ofNat_of_lt {n : Nat} (h : n < 2 ^ 127) : toInt (ofNat n) = n := by rw [← ofInt_eq_ofNat, toInt_ofInt_of_le (by omega) (by omega)] theorem Int128.toInt_neg_ofNat_of_le {n : Nat} (h : n ≤ 2^127) : toInt (-ofNat n) = -n := by rw [← ofInt_eq_ofNat, neg_ofInt, toInt_ofInt_of_le (by omega) (by omega)] theorem Int128.toInt_zero : toInt 0 = 0 := by simp theorem Int128.toInt_minValue : Int128.minValue.toInt = -2^127 := (rfl) theorem Int128.toInt_maxValue : Int128.maxValue.toInt = 2 ^ 127 - 1 := (rfl) @[simp] theorem Int128.toNatClampNeg_minValue : Int128.minValue.toNatClampNeg = 0 := (rfl) @[simp, int_toBitVec] theorem UInt128.toBitVec_toInt128 (x : UInt128) : x.toInt128.toBitVec = x.toBitVec := (rfl) @[simp] theorem Int128.ofBitVec_uInt128ToBitVec (x : UInt128) : Int128.ofBitVec x.toBitVec = x.toInt128 := (rfl) @[simp] theorem UInt128.toUInt128_toInt128 (x : UInt128) : x.toInt128.toUInt128 = x := (rfl) @[simp] theorem Int128.toNat_toInt (x : Int128) : x.toInt.toNat = x.toNatClampNeg := (rfl) @[simp] theorem Int128.toInt_toBitVec (x : Int128) : x.toBitVec.toInt = x.toInt := (rfl) @[simp, int_toBitVec] theorem Int8.toBitVec_toInt128 (x : Int8) : x.toInt128.toBitVec = x.toBitVec.signExtend 128 := (rfl) @[simp, int_toBitVec] theorem Int16.toBitVec_toInt128 (x : Int16) : x.toInt128.toBitVec = x.toBitVec.signExtend 128 := (rfl) @[simp, int_toBitVec] theorem Int32.toBitVec_toInt128 (x : Int32) : x.toInt128.toBitVec = x.toBitVec.signExtend 128 := (rfl) @[simp, int_toBitVec] theorem Int128.toBitVec_toInt8 (x : Int128) : x.toInt8.toBitVec = x.toBitVec.signExtend 8 := (rfl) @[simp, int_toBitVec] theorem Int128.toBitVec_toInt16 (x : Int128) : x.toInt16.toBitVec = x.toBitVec.signExtend 16 := (rfl) @[simp, int_toBitVec] theorem Int128.toBitVec_toInt32 (x : Int128) : x.toInt32.toBitVec = x.toBitVec.signExtend 32 := (rfl) -- @[simp, int_toBitVec] theorem Int128.toBitVec_toISize (x : Int128) : x.toISize.toBitVec = x.toBitVec.signExtend System.Platform.numBits := (rfl) -- @[simp, int_toBitVec] theorem ISize.toBitVec_toInt128 (x : ISize) : x.toInt128.toBitVec = x.toBitVec.signExtend 128 := (rfl) theorem Int128.toInt_lt (x : Int128) : x.toInt < 2 ^ 127 := Int.lt_of_mul_lt_mul_left BitVec.two_mul_toInt_lt (by decide) theorem Int128.le_toInt (x : Int128) : -2 ^ 127 ≤ x.toInt := Int.le_of_mul_le_mul_left BitVec.le_two_mul_toInt (by decide) theorem Int128.toInt_le (x : Int128) : x.toInt ≤ Int128.maxValue.toInt := Int.le_of_lt_add_one x.toInt_lt theorem Int128.minValue_le_toInt (x : Int128) : Int128.minValue.toInt ≤ x.toInt := x.le_toInt theorem ISize.int128MinValue_le_toInt (x : ISize) : Int128.minValue.toInt ≤ x.toInt := Int.le_trans (by decide) x.le_toInt theorem Int128.toNatClampNeg_lt (x : Int128) : x.toNatClampNeg < 2 ^ 127 := (Int.toNat_lt' (by decide)).2 x.toInt_lt @[simp] theorem Int8.toInt_toInt128 (x : Int8) : x.toInt128.toInt = x.toInt := x.toBitVec.toInt_signExtend_of_le (by decide) @[simp] theorem Int16.toInt_toInt128 (x : Int16) : x.toInt128.toInt = x.toInt := x.toBitVec.toInt_signExtend_of_le (by decide) @[simp] theorem Int32.toInt_toInt128 (x : Int32) : x.toInt128.toInt = x.toInt := x.toBitVec.toInt_signExtend_of_le (by decide) @[simp] theorem Int64.toInt_toInt128 (x : Int64) : x.toInt128.toInt = x.toInt := x.toBitVec.toInt_signExtend_of_le (by decide) @[simp] theorem Int128.toInt_toInt8 (x : Int128) : x.toInt8.toInt = x.toInt.bmod (2 ^ 8) := x.toBitVec.toInt_signExtend_eq_toInt_bmod_of_le (by decide) @[simp] theorem Int128.toInt_toInt16 (x : Int128) : x.toInt16.toInt = x.toInt.bmod (2 ^ 16) := x.toBitVec.toInt_signExtend_eq_toInt_bmod_of_le (by decide) @[simp] theorem Int128.toInt_toInt32 (x : Int128) : x.toInt32.toInt = x.toInt.bmod (2 ^ 32) := x.toBitVec.toInt_signExtend_eq_toInt_bmod_of_le (by decide) -- @[simp] theorem Int128.toInt_toISize (x : Int128) : x.toISize.toInt = x.toInt.bmod (2 ^ System.Platform.numBits) := -- x.toBitVec.toInt_signExtend_eq_toInt_bmod_of_le (by cases System.Platform.numBits_eq <;> simp_all) -- @[simp] theorem ISize.toInt_toInt128 (x : ISize) : x.toInt128.toInt = x.toInt := -- x.toBitVec.toInt_signExtend_of_le (by cases System.Platform.numBits_eq <;> simp_all) @[simp] theorem Int8.toNatClampNeg_toInt128 (x : Int8) : x.toInt128.toNatClampNeg = x.toNatClampNeg := congrArg Int.toNat x.toInt_toInt128 @[simp] theorem Int16.toNatClampNeg_toInt128 (x : Int16) : x.toInt128.toNatClampNeg = x.toNatClampNeg := congrArg Int.toNat x.toInt_toInt128 @[simp] theorem Int32.toNatClampNeg_toInt128 (x : Int32) : x.toInt128.toNatClampNeg = x.toNatClampNeg := congrArg Int.toNat x.toInt_toInt128 @[simp] theorem Int64.toNatClampNeg_toInt128 (x : Int64) : x.toInt128.toNatClampNeg = x.toNatClampNeg := congrArg Int.toNat x.toInt_toInt128 -- @[simp] theorem ISize.toNatClampNeg_toInt128 (x : ISize) : x.toInt128.toNatClampNeg = x.toNatClampNeg := -- congrArg Int.toNat x.toInt_toInt128 @[simp] theorem Int128.toInt128_toUInt128 (x : Int128) : x.toUInt128.toInt128 = x := (rfl) theorem Int128.toNat_toBitVec (x : Int128) : x.toBitVec.toNat = x.toUInt128.toNat := (rfl) theorem Int128.toNat_toBitVec_of_le {x : Int128} (hx : 0 ≤ x) : x.toBitVec.toNat = x.toNatClampNeg := (x.toBitVec.toNat_toInt_of_sle hx).symm theorem Int128.toNat_toUInt128_of_le {x : Int128} (hx : 0 ≤ x) : x.toUInt128.toNat = x.toNatClampNeg := by rw [← toNat_toBitVec, toNat_toBitVec_of_le hx] theorem Int128.toFin_toBitVec (x : Int128) : x.toBitVec.toFin = x.toUInt128.toFin := (rfl) @[simp, int_toBitVec] theorem Int128.toBitVec_toUInt128 (x : Int128) : x.toUInt128.toBitVec = x.toBitVec := (rfl) @[simp] theorem UInt128.ofBitVec_int128ToBitVec (x : Int128) : UInt128.ofBitVec x.toBitVec = x.toUInt128 := (rfl) @[simp] theorem Int128.ofBitVec_toBitVec (x : Int128) : Int128.ofBitVec x.toBitVec = x := (rfl) @[simp] theorem Int8.ofBitVec_int128ToBitVec (x : Int128) : Int8.ofBitVec (x.toBitVec.signExtend 8) = x.toInt8 := (rfl) @[simp] theorem Int16.ofBitVec_int128ToBitVec (x : Int128) : Int16.ofBitVec (x.toBitVec.signExtend 16) = x.toInt16 := (rfl) @[simp] theorem Int32.ofBitVec_int128ToBitVec (x : Int128) : Int32.ofBitVec (x.toBitVec.signExtend 32) = x.toInt32 := (rfl) @[simp] theorem Int64.ofBitVec_int128ToBitVec (x : Int128) : Int64.ofBitVec (x.toBitVec.signExtend 64) = x.toInt64 := (rfl) @[simp] theorem Int128.ofBitVec_int8ToBitVec (x : Int8) : Int128.ofBitVec (x.toBitVec.signExtend 128) = x.toInt128 := (rfl) @[simp] theorem Int128.ofBitVec_int16ToBitVec (x : Int16) : Int128.ofBitVec (x.toBitVec.signExtend 128) = x.toInt128 := (rfl) @[simp] theorem Int128.ofBitVec_int32ToBitVec (x : Int32) : Int128.ofBitVec (x.toBitVec.signExtend 128) = x.toInt128 := (rfl) @[simp] theorem Int128.ofBitVec_int64ToBitVec (x : Int64) : Int128.ofBitVec (x.toBitVec.signExtend 128) = x.toInt128 := (rfl) -- @[simp] theorem Int128.ofBitVec_iSizeToBitVec (x : ISize) : Int128.ofBitVec (x.toBitVec.signExtend 128) = x.toInt128 := (rfl) -- @[simp] theorem ISize.ofBitVec_int128ToBitVec (x : Int128) : ISize.ofBitVec (x.toBitVec.signExtend System.Platform.numBits) = x.toISize := (rfl) @[simp] theorem Int128.toBitVec_ofIntLE (x : Int) (h₁ h₂) : (Int128.ofIntLE x h₁ h₂).toBitVec = BitVec.ofInt 128 x := (rfl) @[simp] theorem Int128.toInt_bmod (x : Int128) : x.toInt.bmod 340282366920938463463374607431768211456 = x.toInt := Int.bmod_eq_of_le x.le_toInt x.toInt_lt -- @[simp] theorem Int128.toInt_bmod_18446744073709551616 (x : Int128) : x.toInt.bmod 18446744073709551616 = x.toInt := -- Int.bmod_eq_of_le (Int.le_trans (by decide) x.le_toInt) (Int.lt_of_lt_of_le x.toInt_lt (by decide)) @[simp] theorem BitVec.ofInt_int128ToInt (x : Int128) : BitVec.ofInt 128 x.toInt = x.toBitVec := BitVec.eq_of_toInt_eq (by simp) @[simp] theorem Int128.ofIntLE_toInt (x : Int128) : Int128.ofIntLE x.toInt x.minValue_le_toInt x.toInt_le = x := Int128.toBitVec.inj (by simp) theorem Int8.ofIntLE_int128ToInt (x : Int128) {h₁ h₂} : Int8.ofIntLE x.toInt h₁ h₂ = x.toInt8 := (rfl) theorem Int16.ofIntLE_int128ToInt (x : Int128) {h₁ h₂} : Int16.ofIntLE x.toInt h₁ h₂ = x.toInt16 := (rfl) theorem Int32.ofIntLE_int128ToInt (x : Int128) {h₁ h₂} : Int32.ofIntLE x.toInt h₁ h₂ = x.toInt32 := (rfl) theorem Int64.ofIntLE_int128ToInt (x : Int128) {h₁ h₂} : Int64.ofIntLE x.toInt h₁ h₂ = x.toInt64 := (rfl) @[simp] theorem Int128.ofIntLE_int8ToInt (x : Int8) : Int128.ofIntLE x.toInt (Int.le_trans (by decide) x.minValue_le_toInt) (Int.le_trans x.toInt_le (by decide)) = x.toInt128 := (rfl) @[simp] theorem Int128.ofIntLE_int16ToInt (x : Int16) : Int128.ofIntLE x.toInt (Int.le_trans (by decide) x.minValue_le_toInt) (Int.le_trans x.toInt_le (by decide)) = x.toInt128 := (rfl) @[simp] theorem Int128.ofIntLE_int32ToInt (x : Int32) : Int128.ofIntLE x.toInt (Int.le_trans (by decide) x.minValue_le_toInt) (Int.le_trans x.toInt_le (by decide)) = x.toInt128 := (rfl) @[simp] theorem Int128.ofIntLE_int64ToInt (x : Int64) : Int128.ofIntLE x.toInt (Int.le_trans (by decide) x.minValue_le_toInt) (Int.le_trans x.toInt_le (by decide)) = x.toInt128 := (rfl) -- @[simp] theorem Int128.ofIntLE_iSizeToInt (x : ISize) : -- Int128.ofIntLE x.toInt x.int128MinValue_le_toInt x.toInt_le_int128MaxValue = x.toInt128 := (rfl) -- theorem ISize.ofIntLE_int128ToInt (x : Int128) {h₁ h₂} : ISize.ofIntLE x.toInt h₁ h₂ = x.toISize := (rfl) @[simp] theorem Int128.ofInt_toInt (x : Int128) : Int128.ofInt x.toInt = x := Int128.toBitVec.inj (by simp) @[simp] theorem Int8.ofInt_int128ToInt (x : Int128) : Int8.ofInt x.toInt = x.toInt8 := (rfl) @[simp] theorem Int16.ofInt_int128ToInt (x : Int128) : Int16.ofInt x.toInt = x.toInt16 := (rfl) @[simp] theorem Int32.ofInt_int128ToInt (x : Int128) : Int32.ofInt x.toInt = x.toInt32 := (rfl) @[simp] theorem Int64.ofInt_int128ToInt (x : Int128) : Int64.ofInt x.toInt = x.toInt64 := (rfl) @[simp] theorem Int128.ofInt_int8ToInt (x : Int8) : Int128.ofInt x.toInt = x.toInt128 := (rfl) @[simp] theorem Int128.ofInt_int16ToInt (x : Int16) : Int128.ofInt x.toInt = x.toInt128 := (rfl) @[simp] theorem Int128.ofInt_int32ToInt (x : Int32) : Int128.ofInt x.toInt = x.toInt128 := (rfl) @[simp] theorem Int128.ofInt_int64ToInt (x : Int64) : Int128.ofInt x.toInt = x.toInt128 := (rfl) -- @[simp] theorem Int128.ofInt_iSizeToInt (x : ISize) : Int128.ofInt x.toInt = x.toInt128 := (rfl) -- @[simp] theorem ISize.ofInt_int128ToInt (x : Int128) : ISize.ofInt x.toInt = x.toISize := (rfl) @[simp] theorem Int128.toInt_ofIntLE {x : Int} {h₁ h₂} : (ofIntLE x h₁ h₂).toInt = x := by rw [ofIntLE, toInt_ofInt_of_le h₁ (Int.lt_of_le_sub_one h₂)] theorem Int128.ofIntLE_eq_ofIntTruncate {x : Int} {h₁ h₂} : (ofIntLE x h₁ h₂) = ofIntTruncate x := by rw [ofIntTruncate, dif_pos h₁, dif_pos h₂] theorem Int128.ofIntLE_eq_ofInt {n : Int} (h₁ h₂) : Int128.ofIntLE n h₁ h₂ = Int128.ofInt n := (rfl) theorem Int128.toInt_ofIntTruncate {x : Int} (h₁ : Int128.minValue.toInt ≤ x) (h₂ : x ≤ Int128.maxValue.toInt) : (Int128.ofIntTruncate x).toInt = x := by rw [← ofIntLE_eq_ofIntTruncate (h₁ := h₁) (h₂ := h₂), toInt_ofIntLE] @[simp] theorem Int128.ofIntTruncate_toInt (x : Int128) : Int128.ofIntTruncate x.toInt = x := Int128.toInt.inj (toInt_ofIntTruncate x.minValue_le_toInt x.toInt_le) @[simp] theorem Int128.ofIntTruncate_int8ToInt (x : Int8) : Int128.ofIntTruncate x.toInt = x.toInt128 := Int128.toInt.inj (by rw [toInt_ofIntTruncate, Int8.toInt_toInt128] · exact Int.le_trans (by decide) x.minValue_le_toInt · exact Int.le_trans x.toInt_le (by decide)) @[simp] theorem Int128.ofIntTruncate_int16ToInt (x : Int16) : Int128.ofIntTruncate x.toInt = x.toInt128 := Int128.toInt.inj (by rw [toInt_ofIntTruncate, Int16.toInt_toInt128] · exact Int.le_trans (by decide) x.minValue_le_toInt · exact Int.le_trans x.toInt_le (by decide)) @[simp] theorem Int128.ofIntTruncate_int32ToInt (x : Int32) : Int128.ofIntTruncate x.toInt = x.toInt128 := Int128.toInt.inj (by rw [toInt_ofIntTruncate, Int32.toInt_toInt128] · exact Int.le_trans (by decide) x.minValue_le_toInt · exact Int.le_trans x.toInt_le (by decide)) @[simp] theorem Int128.ofIntTruncate_int64ToInt (x : Int64) : Int128.ofIntTruncate x.toInt = x.toInt128 := Int128.toInt.inj (by rw [toInt_ofIntTruncate, Int64.toInt_toInt128] · exact Int.le_trans (by decide) x.minValue_le_toInt · exact Int.le_trans x.toInt_le (by decide)) -- @[simp] theorem Int128.ofIntTruncate_iSizeToInt (x : ISize) : Int128.ofIntTruncate x.toInt = x.toInt128 := -- Int128.toInt.inj (by -- rw [toInt_ofIntTruncate, ISize.toInt_toInt128] -- · exact x.int128MinValue_le_toInt -- · exact x.toInt_le_int128MaxValue) theorem Int128.le_iff_toInt_le {x y : Int128} : x ≤ y ↔ x.toInt ≤ y.toInt := BitVec.sle_iff_toInt_le theorem Int128.lt_iff_toInt_lt {x y : Int128} : x < y ↔ x.toInt < y.toInt := BitVec.slt_iff_toInt_lt theorem Int128.cast_toNatClampNeg (x : Int128) (hx : 0 ≤ x) : x.toNatClampNeg = x.toInt := by rw [toNatClampNeg, toInt, Int.toNat_of_nonneg (by simpa using le_iff_toInt_le.1 hx)] theorem Int128.ofNat_toNatClampNeg (x : Int128) (hx : 0 ≤ x) : Int128.ofNat x.toNatClampNeg = x := Int128.toInt.inj (by rw [Int128.toInt_ofNat_of_lt x.toNatClampNeg_lt, cast_toNatClampNeg _ hx]) theorem Int128.ofNat_int8ToNatClampNeg (x : Int8) (hx : 0 ≤ x) : Int128.ofNat x.toNatClampNeg = x.toInt128 := Int128.toInt.inj (by rw [Int128.toInt_ofNat_of_lt (Nat.lt_of_lt_of_le x.toNatClampNeg_lt (by decide)), Int8.cast_toNatClampNeg _ hx, Int8.toInt_toInt128]) theorem Int128.ofNat_int16ToNatClampNeg (x : Int16) (hx : 0 ≤ x) : Int128.ofNat x.toNatClampNeg = x.toInt128 := Int128.toInt.inj (by rw [Int128.toInt_ofNat_of_lt (Nat.lt_of_lt_of_le x.toNatClampNeg_lt (by decide)), Int16.cast_toNatClampNeg _ hx, Int16.toInt_toInt128]) theorem Int128.ofNat_int32ToNatClampNeg (x : Int32) (hx : 0 ≤ x) : Int128.ofNat x.toNatClampNeg = x.toInt128 := Int128.toInt.inj (by rw [Int128.toInt_ofNat_of_lt (Nat.lt_of_lt_of_le x.toNatClampNeg_lt (by decide)), Int32.cast_toNatClampNeg _ hx, Int32.toInt_toInt128]) @[simp] theorem Int8.toInt8_toInt128 (n : Int8) : n.toInt128.toInt8 = n := Int8.toInt.inj (by simp) @[simp] theorem Int8.toInt16_toInt128 (n : Int8) : n.toInt128.toInt16 = n.toInt16 := Int16.toInt.inj (by simp) @[simp] theorem Int8.toInt32_toInt128 (n : Int8) : n.toInt128.toInt32 = n.toInt32 := Int32.toInt.inj (by simp) @[simp] theorem Int8.toInt128_toInt16 (n : Int8) : n.toInt16.toInt128 = n.toInt128 := Int128.toInt.inj (by simp) @[simp] theorem Int8.toInt128_toInt32 (n : Int8) : n.toInt32.toInt128 = n.toInt128 := Int128.toInt.inj (by simp) -- @[simp] theorem Int8.toInt128_toISize (n : Int8) : n.toISize.toInt128 = n.toInt128 := -- Int128.toInt.inj (by simp) -- @[simp] theorem Int8.toISize_toInt128 (n : Int8) : n.toInt128.toISize = n.toISize := -- ISize.toInt.inj (by simp) @[simp] theorem Int16.toInt8_toInt128 (n : Int16) : n.toInt128.toInt8 = n.toInt8 := Int8.toInt.inj (by simp) @[simp] theorem Int16.toInt16_toInt128 (n : Int16) : n.toInt128.toInt16 = n := Int16.toInt.inj (by simp) @[simp] theorem Int16.toInt32_toInt128 (n : Int16) : n.toInt128.toInt32 = n.toInt32 := Int32.toInt.inj (by simp) @[simp] theorem Int16.toInt128_toInt32 (n : Int16) : n.toInt32.toInt128 = n.toInt128 := Int128.toInt.inj (by simp) -- @[simp] theorem Int16.toInt128_toISize (n : Int16) : n.toISize.toInt128 = n.toInt128 := -- Int128.toInt.inj (by simp) -- @[simp] theorem Int16.toISize_toInt128 (n : Int16) : n.toInt128.toISize = n.toISize := -- ISize.toInt.inj (by simp) @[simp] theorem Int32.toInt8_toInt128 (n : Int32) : n.toInt128.toInt8 = n.toInt8 := Int8.toInt.inj (by simp) @[simp] theorem Int32.toInt16_toInt128 (n : Int32) : n.toInt128.toInt16 = n.toInt16 := Int16.toInt.inj (by simp) @[simp] theorem Int32.toInt32_toInt128 (n : Int32) : n.toInt128.toInt32 = n := Int32.toInt.inj (by simp) -- @[simp] theorem Int32.toInt128_toISize (n : Int32) : n.toISize.toInt128 = n.toInt128 := -- Int128.toInt.inj (by simp) -- @[simp] theorem Int32.toISize_toInt128 (n : Int32) : n.toInt128.toISize = n.toISize := -- ISize.toInt.inj (by simp) @[simp] theorem Int64.toInt8_toInt128 (n : Int64) : n.toInt128.toInt8 = n.toInt8 := Int8.toInt.inj (by simp) @[simp] theorem Int64.toInt16_toInt128 (n : Int64) : n.toInt128.toInt16 = n.toInt16 := Int16.toInt.inj (by simp) -- @[simp] theorem Int64.toInt128_toISize (n : Int64) : n.toISize.toInt128 = n.toInt128 := -- Int128.toInt.inj (by simp) -- @[simp] theorem Int64.toISize_toInt128 (n : Int64) : n.toInt128.toISize = n.toISize := -- ISize.toInt.inj (by simp) @[simp] theorem Int128.toInt8_toInt16 (n : Int128) : n.toInt16.toInt8 = n.toInt8 := Int8.toInt.inj (by simpa using Int.bmod_bmod_of_dvd (by decide)) @[simp] theorem Int128.toInt8_toInt32 (n : Int128) : n.toInt32.toInt8 = n.toInt8 := Int8.toInt.inj (by simpa using Int.bmod_bmod_of_dvd (by decide)) -- @[simp] theorem Int128.toInt8_toInt64 (n : Int128) : n.toInt64.toInt8 = n.toInt8 := -- Int8.toInt.inj (by simpa using Int.bmod_bmod_of_dvd (by decide)) -- @[simp] theorem Int128.toInt8_toISize (n : Int128) : n.toISize.toInt8 = n.toInt8 := -- Int8.toInt.inj (by simpa using Int.bmod_bmod_of_dvd (by cases System.Platform.numBits_eq <;> simp_all)) @[simp] theorem Int128.toInt16_toInt32 (n : Int128) : n.toInt32.toInt16 = n.toInt16 := Int16.toInt.inj (by simpa using Int.bmod_bmod_of_dvd (by decide)) -- @[simp] theorem Int128.toInt16_toISize (n : Int128) : n.toISize.toInt16 = n.toInt16 := -- Int16.toInt.inj (by simpa using Int.bmod_bmod_of_dvd (by cases System.Platform.numBits_eq <;> simp_all)) -- @[simp] theorem Int128.toInt32_toISize (n : Int128) : n.toISize.toInt32 = n.toInt32 := -- Int32.toInt.inj (by simpa using Int.bmod_bmod_of_dvd (by cases System.Platform.numBits_eq <;> simp_all)) -- @[simp] theorem ISize.toInt8_toInt128 (n : ISize) : n.toInt128.toInt8 = n.toInt8 := -- Int8.toInt.inj (by simp) -- @[simp] theorem ISize.toInt16_toInt128 (n : ISize) : n.toInt128.toInt16 = n.toInt16 := -- Int16.toInt.inj (by simp) -- @[simp] theorem ISize.toInt32_toInt128 (n : ISize) : n.toInt128.toInt32 = n.toInt32 := -- Int32.toInt.inj (by simp) -- @[simp] theorem ISize.toISize_toInt128 (n : ISize) : n.toInt128.toISize = n := -- ISize.toInt.inj (by simp) -- theorem UInt128.toInt128_ofNatLT {n : Nat} (hn) : (UInt128.ofNatLT n hn).toInt128 = Int128.ofNat n := -- Int128.toBitVec.inj (by simp [BitVec.ofNatLT_eq_ofNat]) @[simp] theorem UInt128.toInt128_ofNat' {n : Nat} : (UInt128.ofNat n).toInt128 = Int128.ofNat n := (rfl) @[simp] theorem UInt128.toInt128_ofNat {n : Nat} : toInt128 (no_index (OfNat.ofNat n)) = OfNat.ofNat n := (rfl) @[simp] theorem UInt128.toInt128_ofBitVec (b) : (UInt128.ofBitVec b).toInt128 = Int128.ofBitVec b := (rfl) @[simp, int_toBitVec] theorem Int128.toBitVec_ofBitVec (b) : (Int128.ofBitVec b).toBitVec = b := (rfl) theorem Int128.toBitVec_ofIntTruncate {n : Int} (h₁ : Int128.minValue.toInt ≤ n) (h₂ : n ≤ Int128.maxValue.toInt) : (Int128.ofIntTruncate n).toBitVec = BitVec.ofInt _ n := by rw [← ofIntLE_eq_ofIntTruncate (h₁ := h₁) (h₂ := h₂), toBitVec_ofIntLE] @[simp] theorem Int128.toInt_ofBitVec (b) : (Int128.ofBitVec b).toInt = b.toInt := (rfl) @[simp] theorem Int128.toNatClampNeg_ofIntLE {n : Int} (h₁ h₂) : (Int128.ofIntLE n h₁ h₂).toNatClampNeg = n.toNat := by rw [ofIntLE, toNatClampNeg, toInt_ofInt_of_le h₁ (Int.lt_of_le_sub_one h₂)] @[simp] theorem Int128.toNatClampNeg_ofBitVec (b) : (Int128.ofBitVec b).toNatClampNeg = b.toInt.toNat := (rfl) theorem Int128.toNatClampNeg_ofInt_of_le {n : Int} (h₁ : -2 ^ 127 ≤ n) (h₂ : n < 2 ^ 127) : (Int128.ofInt n).toNatClampNeg = n.toNat := by rw [toNatClampNeg, toInt_ofInt_of_le h₁ h₂] theorem Int128.toNatClampNeg_ofIntTruncate_of_lt {n : Int} (h₁ : n < 2 ^ 63) : (Int128.ofIntTruncate n).toNatClampNeg = n.toNat := by rw [ofIntTruncate] split · rw [dif_pos (by rw [toInt_maxValue]; omega), toNatClampNeg_ofIntLE] next h => rw [toNatClampNeg_minValue, eq_comm, Int.toNat_eq_zero] rw [toInt_minValue] at h omega @[simp] theorem Int128.toUInt128_ofBitVec (b) : (Int128.ofBitVec b).toUInt128 = UInt128.ofBitVec b := (rfl) @[simp] theorem Int128.toUInt128_ofNat' {n} : (Int128.ofNat n).toUInt128 = UInt128.ofNat n := (rfl) @[simp] theorem Int128.toUInt128_ofNat {n} : toUInt128 (OfNat.ofNat n) = OfNat.ofNat n := (rfl) theorem Int128.toInt8_ofIntLE {n} (h₁ h₂) : (Int128.ofIntLE n h₁ h₂).toInt8 = Int8.ofInt n := Int8.toInt.inj (by simp) @[simp] theorem Int128.toInt8_ofBitVec (b) : (Int128.ofBitVec b).toInt8 = Int8.ofBitVec (b.signExtend _) := (rfl) @[simp] theorem Int128.toInt8_ofNat' {n} : (Int128.ofNat n).toInt8 = Int8.ofNat n := Int8.toBitVec.inj (by simp [BitVec.signExtend_eq_setWidth_of_le]) @[simp] theorem Int128.toInt8_ofInt {n} : (Int128.ofInt n).toInt8 = Int8.ofInt n := Int8.toInt.inj (by simpa using Int.bmod_bmod_of_dvd (by decide)) @[simp] theorem Int128.toInt8_ofNat {n} : toInt8 (no_index (OfNat.ofNat n)) = OfNat.ofNat n := toInt8_ofNat' theorem Int128.toInt8_ofIntTruncate {n : Int} (h₁ : -2 ^ 127 ≤ n) (h₂ : n < 2 ^ 127) : (Int128.ofIntTruncate n).toInt8 = Int8.ofInt n := by rw [← ofIntLE_eq_ofIntTruncate (h₁ := h₁) (h₂ := Int.le_of_lt_add_one h₂), toInt8_ofIntLE] theorem Int128.toInt16_ofIntLE {n} (h₁ h₂) : (Int128.ofIntLE n h₁ h₂).toInt16 = Int16.ofInt n := Int16.toInt.inj (by simp) @[simp] theorem Int128.toInt16_ofBitVec (b) : (Int128.ofBitVec b).toInt16 = Int16.ofBitVec (b.signExtend _) := (rfl) @[simp] theorem Int128.toInt16_ofNat' {n} : (Int128.ofNat n).toInt16 = Int16.ofNat n := Int16.toBitVec.inj (by simp [BitVec.signExtend_eq_setWidth_of_le]) @[simp] theorem Int128.toInt16_ofInt {n} : (Int128.ofInt n).toInt16 = Int16.ofInt n := Int16.toInt.inj (by simpa using Int.bmod_bmod_of_dvd (by decide)) @[simp] theorem Int128.toInt16_ofNat {n} : toInt16 (no_index (OfNat.ofNat n)) = OfNat.ofNat n := toInt16_ofNat' theorem Int128.toInt16_ofIntTruncate {n : Int} (h₁ : -2 ^ 127 ≤ n) (h₂ : n < 2 ^ 127) : (Int128.ofIntTruncate n).toInt16 = Int16.ofInt n := by rw [← ofIntLE_eq_ofIntTruncate (h₁ := h₁) (h₂ := Int.le_of_lt_add_one h₂), toInt16_ofIntLE] theorem Int128.toInt32_ofIntLE {n} (h₁ h₂) : (Int128.ofIntLE n h₁ h₂).toInt32 = Int32.ofInt n := Int32.toInt.inj (by simp) @[simp] theorem Int128.toInt32_ofBitVec (b) : (Int128.ofBitVec b).toInt32 = Int32.ofBitVec (b.signExtend _) := (rfl) @[simp] theorem Int128.toInt32_ofNat' {n} : (Int128.ofNat n).toInt32 = Int32.ofNat n := Int32.toBitVec.inj (by simp [BitVec.signExtend_eq_setWidth_of_le]) @[simp] theorem Int128.toInt32_ofInt {n} : (Int128.ofInt n).toInt32 = Int32.ofInt n := Int32.toInt.inj (by simpa using Int.bmod_bmod_of_dvd (by decide)) @[simp] theorem Int128.toInt32_ofNat {n} : toInt32 (no_index (OfNat.ofNat n)) = OfNat.ofNat n := toInt32_ofNat' theorem Int128.toInt32_ofIntTruncate {n : Int} (h₁ : -2 ^ 127 ≤ n) (h₂ : n < 2 ^ 127) : (Int128.ofIntTruncate n).toInt32 = Int32.ofInt n := by rw [← ofIntLE_eq_ofIntTruncate (h₁ := h₁) (h₂ := Int.le_of_lt_add_one h₂), toInt32_ofIntLE] -- theorem Int128.toISize_ofIntLE {n} (h₁ h₂) : (Int128.ofIntLE n h₁ h₂).toISize = ISize.ofInt n := -- ISize.toInt.inj (by simp [ISize.toInt_ofInt]) -- @[simp] theorem Int128.toISize_ofBitVec (b) : (Int128.ofBitVec b).toISize = ISize.ofBitVec (b.signExtend _) := (rfl) -- @[simp] theorem Int128.toISize_ofNat' {n} : (Int128.ofNat n).toISize = ISize.ofNat n := -- ISize.toBitVec.inj (by simp [BitVec.signExtend_eq_setWidth_of_le]) -- @[simp] theorem Int128.toISize_ofInt {n} : (Int128.ofInt n).toISize = ISize.ofInt n := -- ISize.toInt.inj (by simpa [ISize.toInt_ofInt] using Int.bmod_bmod_of_dvd USize.size_dvd_uInt128Size) -- @[simp] theorem Int128.toISize_ofNat {n} : toISize (no_index (OfNat.ofNat n)) = OfNat.ofNat n := toISize_ofNat' -- theorem Int128.toISize_ofIntTruncate {n : Int} (h₁ : -2 ^ 127 ≤ n) (h₂ : n < 2 ^ 127) : -- (Int128.ofIntTruncate n).toISize = ISize.ofInt n := by -- rw [← ofIntLE_eq_ofIntTruncate (h₁ := h₁) (h₂ := Int.le_of_lt_add_one h₂), toISize_ofIntLE] @[simp, int_toBitVec] theorem Int128.toBitVec_minValue : minValue.toBitVec = BitVec.intMin _ := (rfl) @[simp, int_toBitVec] theorem Int128.toBitVec_maxValue : maxValue.toBitVec = BitVec.intMax _ := (rfl) @[simp] theorem Int128.toInt8_neg (x : Int128) : (-x).toInt8 = -x.toInt8 := Int8.toBitVec.inj (by simp) @[simp] theorem Int128.toInt16_neg (x : Int128) : (-x).toInt16 = -x.toInt16 := Int16.toBitVec.inj (by simp) @[simp] theorem Int128.toInt32_neg (x : Int128) : (-x).toInt32 = -x.toInt32 := Int32.toBitVec.inj (by simp) -- @[simp] theorem Int128.toISize_neg (x : Int128) : (-x).toISize = -x.toISize := ISize.toBitVec.inj (by simp) @[simp] theorem Int8.toInt128_neg_of_ne {x : Int8} (hx : x ≠ -128) : (-x).toInt128 = -x.toInt128 := Int128.toBitVec.inj (BitVec.signExtend_neg_of_ne_intMin _ (fun h => hx (Int8.toBitVec.inj h))) @[simp] theorem Int16.toInt128_neg_of_ne {x : Int16} (hx : x ≠ -32768) : (-x).toInt128 = -x.toInt128 := Int128.toBitVec.inj (BitVec.signExtend_neg_of_ne_intMin _ (fun h => hx (Int16.toBitVec.inj h))) @[simp] theorem Int32.toInt128_neg_of_ne {x : Int32} (hx : x ≠ -2147483648) : (-x).toInt128 = -x.toInt128 := Int128.toBitVec.inj (BitVec.signExtend_neg_of_ne_intMin _ (fun h => hx (Int32.toBitVec.inj h))) @[simp] theorem Int64.toInt128_neg_of_ne {x : Int64} (hx : x ≠ -9223372036854775808) : (-x).toInt128 = -x.toInt128 := Int128.toBitVec.inj (BitVec.signExtend_neg_of_ne_intMin _ (fun h => hx (Int64.toBitVec.inj h))) -- @[simp] theorem ISize.toInt128_neg_of_ne {x : ISize} (hx : x ≠ minValue) : (-x).toInt128 = -x.toInt128 := -- Int128.toBitVec.inj (BitVec.signExtend_neg_of_ne_intMin _ -- (fun h => hx (ISize.toBitVec.inj (h.trans toBitVec_minValue.symm)))) theorem Int8.toInt128_ofIntLE {n : Int} (h₁ h₂) : (Int8.ofIntLE n h₁ h₂).toInt128 = Int128.ofIntLE n (Int.le_trans (by decide) h₁) (Int.le_trans h₂ (by decide)) := Int128.toInt.inj (by simp) @[simp] theorem Int8.toInt128_ofBitVec (b) : (Int8.ofBitVec b).toInt128 = Int128.ofBitVec (b.signExtend _) := (rfl) @[simp] theorem Int8.toInt128_ofInt {n : Int} (h₁ : Int8.minValue.toInt ≤ n) (h₂ : n ≤ Int8.maxValue.toInt) : (Int8.ofInt n).toInt128 = Int128.ofInt n := by rw [← Int8.ofIntLE_eq_ofInt h₁ h₂, toInt128_ofIntLE, Int128.ofIntLE_eq_ofInt] @[simp] theorem Int8.toInt128_ofNat' {n : Nat} (h : n ≤ Int8.maxValue.toInt) : (Int8.ofNat n).toInt128 = Int128.ofNat n := by rw [← ofInt_eq_ofNat, toInt128_ofInt (by simp) h, Int128.ofInt_eq_ofNat] @[simp] theorem Int8.toInt128_ofNat {n : Nat} (h : n ≤ 127) : toInt128 (no_index (OfNat.ofNat n)) = OfNat.ofNat n := Int8.toInt128_ofNat' (by rw [toInt_maxValue]; omega) theorem Int16.toInt128_ofIntLE {n : Int} (h₁ h₂) : (Int16.ofIntLE n h₁ h₂).toInt128 = Int128.ofIntLE n (Int.le_trans (by decide) h₁) (Int.le_trans h₂ (by decide)) := Int128.toInt.inj (by simp) @[simp] theorem Int16.toInt128_ofBitVec (b) : (Int16.ofBitVec b).toInt128 = Int128.ofBitVec (b.signExtend _) := (rfl) @[simp] theorem Int16.toInt128_ofInt {n : Int} (h₁ : Int16.minValue.toInt ≤ n) (h₂ : n ≤ Int16.maxValue.toInt) : (Int16.ofInt n).toInt128 = Int128.ofInt n := by rw [← Int16.ofIntLE_eq_ofInt h₁ h₂, toInt128_ofIntLE, Int128.ofIntLE_eq_ofInt] @[simp] theorem Int16.toInt128_ofNat' {n : Nat} (h : n ≤ Int16.maxValue.toInt) : (Int16.ofNat n).toInt128 = Int128.ofNat n := by rw [← ofInt_eq_ofNat, toInt128_ofInt (by simp) h, Int128.ofInt_eq_ofNat] @[simp] theorem Int16.toInt128_ofNat {n : Nat} (h : n ≤ 32767) : toInt128 (no_index (OfNat.ofNat n)) = OfNat.ofNat n := Int16.toInt128_ofNat' (by rw [toInt_maxValue]; omega) theorem Int32.toInt128_ofIntLE {n : Int} (h₁ h₂) : (Int32.ofIntLE n h₁ h₂).toInt128 = Int128.ofIntLE n (Int.le_trans (by decide) h₁) (Int.le_trans h₂ (by decide)) := Int128.toInt.inj (by simp) @[simp] theorem Int32.toInt128_ofBitVec (b) : (Int32.ofBitVec b).toInt128 = Int128.ofBitVec (b.signExtend _) := (rfl) @[simp] theorem Int32.toInt128_ofInt {n : Int} (h₁ : Int32.minValue.toInt ≤ n) (h₂ : n ≤ Int32.maxValue.toInt) : (Int32.ofInt n).toInt128 = Int128.ofInt n := by rw [← Int32.ofIntLE_eq_ofInt h₁ h₂, toInt128_ofIntLE, Int128.ofIntLE_eq_ofInt] @[simp] theorem Int32.toInt128_ofNat' {n : Nat} (h : n ≤ Int32.maxValue.toInt) : (Int32.ofNat n).toInt128 = Int128.ofNat n := by rw [← ofInt_eq_ofNat, toInt128_ofInt (by simp) h, Int128.ofInt_eq_ofNat] @[simp] theorem Int32.toInt128_ofNat {n : Nat} (h : n ≤ 2147483647) : toInt128 (no_index (OfNat.ofNat n)) = OfNat.ofNat n := Int32.toInt128_ofNat' (by rw [toInt_maxValue]; omega) -- theorem ISize.toInt128_ofIntLE {n : Int} (h₁ h₂) : -- (ISize.ofIntLE n h₁ h₂).toInt128 = Int128.ofIntLE n (Int.le_trans minValue.int128MinValue_le_toInt h₁) -- (Int.le_trans h₂ maxValue.toInt_le_int128MaxValue) := -- Int128.toInt.inj (by simp) -- @[simp] theorem ISize.toInt128_ofBitVec (b) : (ISize.ofBitVec b).toInt128 = Int128.ofBitVec (b.signExtend _) := (rfl) -- @[simp] theorem ISize.toInt128_ofInt {n : Int} (h₁ : ISize.minValue.toInt ≤ n) (h₂ : n ≤ ISize.maxValue.toInt) : -- (ISize.ofInt n).toInt128 = Int128.ofInt n := by rw [← ISize.ofIntLE_eq_ofInt h₁ h₂, toInt128_ofIntLE, Int128.ofIntLE_eq_ofInt] -- @[simp] theorem ISize.toInt128_ofNat' {n : Nat} (h : n ≤ ISize.maxValue.toInt) : -- (ISize.ofNat n).toInt128 = Int128.ofNat n := by -- rw [← ofInt_eq_ofNat, toInt128_ofInt _ h, Int128.ofInt_eq_ofNat] -- refine Int.le_trans ?_ (Int.zero_le_ofNat _) -- cases System.Platform.numBits_eq <;> simp_all [ISize.toInt_minValue] -- @[simp] theorem ISize.toInt128_ofNat {n : Nat} (h : n ≤ 2147483647) : -- toInt128 (no_index (OfNat.ofNat n)) = OfNat.ofNat n := -- ISize.toInt128_ofNat' (by rw [toInt_maxValue]; cases System.Platform.numBits_eq <;> simp_all <;> omega) @[simp] theorem Int128.ofIntLE_bitVecToInt (n : BitVec 128) : Int128.ofIntLE n.toInt (by exact n.le_toInt) (by exact n.toInt_le) = Int128.ofBitVec n := Int128.toBitVec.inj (by simp) theorem Int128.ofBitVec_ofNatLT (n : Nat) (hn) : Int128.ofBitVec (BitVec.ofNatLT n hn) = Int128.ofNat n := Int128.toBitVec.inj (by simp [BitVec.ofNatLT_eq_ofNat hn]) @[simp] theorem Int128.ofBitVec_ofNat (n : Nat) : Int128.ofBitVec (BitVec.ofNat 128 n) = Int128.ofNat n := (rfl) @[simp] theorem Int128.ofBitVec_ofInt (n : Int) : Int128.ofBitVec (BitVec.ofInt 128 n) = Int128.ofInt n := (rfl) @[simp] theorem Int128.ofNat_bitVecToNat (n : BitVec 128) : Int128.ofNat n.toNat = Int128.ofBitVec n := Int128.toBitVec.inj (by simp) @[simp] theorem Int128.ofInt_bitVecToInt (n : BitVec 128) : Int128.ofInt n.toInt = Int128.ofBitVec n := Int128.toBitVec.inj (by simp) @[simp] theorem Int128.ofIntTruncate_bitVecToInt (n : BitVec 128) : Int128.ofIntTruncate n.toInt = Int128.ofBitVec n := Int128.toBitVec.inj (by simp [toBitVec_ofIntTruncate (n.le_toInt) (n.toInt_le)]) @[simp] theorem Int128.toInt_neg (n : Int128) : (-n).toInt = (-n.toInt).bmod (2 ^ 128) := BitVec.toInt_neg @[simp] theorem Int128.toNatClampNeg_eq_zero_iff {n : Int128} : n.toNatClampNeg = 0 ↔ n ≤ 0 := by rw [toNatClampNeg, Int.toNat_eq_zero, le_iff_toInt_le, toInt_zero] @[simp] protected theorem Int128.not_le {n m : Int128} : ¬n ≤ m ↔ m < n := by simp [le_iff_toInt_le, lt_iff_toInt_lt] @[simp] theorem Int128.neg_nonpos_iff (n : Int128) : -n ≤ 0 ↔ n = minValue ∨ 0 ≤ n := by rw [le_iff_toBitVec_sle, toBitVec_zero, toBitVec_neg, BitVec.neg_sle_zero (by decide)] simp [← toBitVec_inj, le_iff_toBitVec_sle, BitVec.intMin_eq_neg_two_pow] @[simp] theorem Int128.toNatClampNeg_pos_iff (n : Int128) : 0 < n.toNatClampNeg ↔ 0 < n := by simp [Nat.pos_iff_ne_zero] @[simp] theorem Int128.toInt_div (a b : Int128) : (a / b).toInt = (a.toInt.tdiv b.toInt).bmod (2 ^ 128) := by rw [← toInt_toBitVec, Int128.toBitVec_div, BitVec.toInt_sdiv, toInt_toBitVec, toInt_toBitVec] theorem Int128.toInt_div_of_ne_left (a b : Int128) (h : a ≠ minValue) : (a / b).toInt = a.toInt.tdiv b.toInt := by rw [← toInt_toBitVec, Int128.toBitVec_div, BitVec.toInt_sdiv_of_ne_or_ne, toInt_toBitVec, toInt_toBitVec] exact Or.inl (by simpa [← toBitVec_inj] using h) theorem Int128.toInt_div_of_ne_right (a b : Int128) (h : b ≠ -1) : (a / b).toInt = a.toInt.tdiv b.toInt := by rw [← toInt_toBitVec, Int128.toBitVec_div, BitVec.toInt_sdiv_of_ne_or_ne, toInt_toBitVec, toInt_toBitVec] exact Or.inr (by simpa [← toBitVec_inj] using h) theorem Int8.toInt128_ne_minValue (a : Int8) : a.toInt128 ≠ Int128.minValue := have := a.le_toInt; by simp [← Int128.toInt_inj]; omega theorem Int16.toInt128_ne_minValue (a : Int16) : a.toInt128 ≠ Int128.minValue := have := a.le_toInt; by simp [← Int128.toInt_inj]; omega theorem Int32.toInt128_ne_minValue (a : Int32) : a.toInt128 ≠ Int128.minValue := have := a.le_toInt; by simp [← Int128.toInt_inj]; omega -- theorem ISize.toInt128_ne_minValue (a : ISize) (ha : a ≠ minValue) : a.toInt128 ≠ Int128.minValue := by -- have := a.minValue_le_toInt -- have : -2 ^ 127 ≤ minValue.toInt := minValue.le_toInt -- simp [← Int128.toInt_inj, ← ISize.toInt_inj] at *; omega theorem Int8.toInt128_ne_neg_one (a : Int8) (ha : a ≠ -1) : a.toInt128 ≠ -1 := ne_of_apply_ne Int128.toInt8 (by simpa using ha) theorem Int16.toInt128_ne_neg_one (a : Int16) (ha : a ≠ -1) : a.toInt128 ≠ -1 := ne_of_apply_ne Int128.toInt16 (by simpa using ha) theorem Int32.toInt128_ne_neg_one (a : Int32) (ha : a ≠ -1) : a.toInt128 ≠ -1 := ne_of_apply_ne Int128.toInt32 (by simpa using ha) -- theorem ISize.toInt128_ne_neg_one (a : ISize) (ha : a ≠ -1) : a.toInt128 ≠ -1 := -- ne_of_apply_ne Int128.toISize (by simpa using ha) theorem Int8.toInt128_div_of_ne_left (a b : Int8) (ha : a ≠ minValue) : (a / b).toInt128 = a.toInt128 / b.toInt128 := Int128.toInt_inj.1 (by rw [toInt_toInt128, toInt_div_of_ne_left _ _ ha, Int128.toInt_div_of_ne_left _ _ a.toInt128_ne_minValue, toInt_toInt128, toInt_toInt128]) theorem Int16.toInt128_div_of_ne_left (a b : Int16) (ha : a ≠ minValue) : (a / b).toInt128 = a.toInt128 / b.toInt128 := Int128.toInt_inj.1 (by rw [toInt_toInt128, toInt_div_of_ne_left _ _ ha, Int128.toInt_div_of_ne_left _ _ a.toInt128_ne_minValue, toInt_toInt128, toInt_toInt128]) theorem Int32.toInt128_div_of_ne_left (a b : Int32) (ha : a ≠ minValue) : (a / b).toInt128 = a.toInt128 / b.toInt128 := Int128.toInt_inj.1 (by rw [toInt_toInt128, toInt_div_of_ne_left _ _ ha, Int128.toInt_div_of_ne_left _ _ a.toInt128_ne_minValue, toInt_toInt128, toInt_toInt128]) -- theorem ISize.toInt128_div_of_ne_left (a b : ISize) (ha : a ≠ minValue) : (a / b).toInt128 = a.toInt128 / b.toInt128 := -- Int128.toInt_inj.1 (by rw [toInt_toInt128, toInt_div_of_ne_left _ _ ha, -- Int128.toInt_div_of_ne_left _ _ (a.toInt128_ne_minValue ha), toInt_toInt128, toInt_toInt128]) theorem Int8.toInt128_div_of_ne_right (a b : Int8) (hb : b ≠ -1) : (a / b).toInt128 = a.toInt128 / b.toInt128 := Int128.toInt_inj.1 (by rw [toInt_toInt128, toInt_div_of_ne_right _ _ hb, Int128.toInt_div_of_ne_right _ _ (b.toInt128_ne_neg_one hb), toInt_toInt128, toInt_toInt128]) theorem Int16.toInt128_div_of_ne_right (a b : Int16) (hb : b ≠ -1) : (a / b).toInt128 = a.toInt128 / b.toInt128 := Int128.toInt_inj.1 (by rw [toInt_toInt128, toInt_div_of_ne_right _ _ hb, Int128.toInt_div_of_ne_right _ _ (b.toInt128_ne_neg_one hb), toInt_toInt128, toInt_toInt128]) theorem Int32.toInt128_div_of_ne_right (a b : Int32) (hb : b ≠ -1) : (a / b).toInt128 = a.toInt128 / b.toInt128 := Int128.toInt_inj.1 (by rw [toInt_toInt128, toInt_div_of_ne_right _ _ hb, Int128.toInt_div_of_ne_right _ _ (b.toInt128_ne_neg_one hb), toInt_toInt128, toInt_toInt128]) -- theorem ISize.toInt128_div_of_ne_right (a b : ISize) (hb : b ≠ -1) : (a / b).toInt128 = a.toInt128 / b.toInt128 := -- Int128.toInt_inj.1 (by rw [toInt_toInt128, toInt_div_of_ne_right _ _ hb, -- Int128.toInt_div_of_ne_right _ _ (b.toInt128_ne_neg_one hb), toInt_toInt128, toInt_toInt128]) @[simp] theorem Int128.minValue_div_neg_one : minValue / -1 = minValue := by decide @[simp] theorem Int128.toInt_add (a b : Int128) : (a + b).toInt = (a.toInt + b.toInt).bmod (2 ^ 128) := by rw [← toInt_toBitVec, Int128.toBitVec_add, BitVec.toInt_add, toInt_toBitVec, toInt_toBitVec] @[simp] theorem Int128.toInt8_add (a b : Int128) : (a + b).toInt8 = a.toInt8 + b.toInt8 := Int8.toBitVec_inj.1 (by simp [BitVec.signExtend_eq_setWidth_of_le, BitVec.setWidth_add]) @[simp] theorem Int128.toInt16_add (a b : Int128) : (a + b).toInt16 = a.toInt16 + b.toInt16 := Int16.toBitVec_inj.1 (by simp [BitVec.signExtend_eq_setWidth_of_le, BitVec.setWidth_add]) @[simp] theorem Int128.toInt32_add (a b : Int128) : (a + b).toInt32 = a.toInt32 + b.toInt32 := Int32.toBitVec_inj.1 (by simp [BitVec.signExtend_eq_setWidth_of_le, BitVec.setWidth_add]) -- @[simp] theorem Int128.toISize_add (a b : Int128) : (a + b).toISize = a.toISize + b.toISize := -- ISize.toBitVec_inj.1 (by simp [BitVec.signExtend_eq_setWidth_of_le, BitVec.setWidth_add]) @[simp] theorem Int128.toInt_mul (a b : Int128) : (a * b).toInt = (a.toInt * b.toInt).bmod (2 ^ 128) := by rw [← toInt_toBitVec, Int128.toBitVec_mul, BitVec.toInt_mul, toInt_toBitVec, toInt_toBitVec] @[simp] theorem Int128.toInt8_mul (a b : Int128) : (a * b).toInt8 = a.toInt8 * b.toInt8 := Int8.toBitVec_inj.1 (by simp [BitVec.signExtend_eq_setWidth_of_le, BitVec.setWidth_mul]) @[simp] theorem Int128.toInt16_mul (a b : Int128) : (a * b).toInt16 = a.toInt16 * b.toInt16 := Int16.toBitVec_inj.1 (by simp [BitVec.signExtend_eq_setWidth_of_le, BitVec.setWidth_mul]) @[simp] theorem Int128.toInt32_mul (a b : Int128) : (a * b).toInt32 = a.toInt32 * b.toInt32 := Int32.toBitVec_inj.1 (by simp [BitVec.signExtend_eq_setWidth_of_le, BitVec.setWidth_mul]) -- @[simp] theorem Int128.toISize_mul (a b : Int128) : (a * b).toISize = a.toISize * b.toISize := -- ISize.toBitVec_inj.1 (by simp [BitVec.signExtend_eq_setWidth_of_le, BitVec.setWidth_mul]) protected theorem Int128.sub_eq_add_neg (a b : Int128) : a - b = a + -b := Int128.toBitVec.inj (by simp [BitVec.sub_eq_add_neg]) @[simp] theorem Int128.toInt_sub (a b : Int128) : (a - b).toInt = (a.toInt - b.toInt).bmod (2 ^ 128) := by simp [Int128.sub_eq_add_neg, Int.sub_eq_add_neg] @[simp] theorem Int128.toInt8_sub (a b : Int128) : (a - b).toInt8 = a.toInt8 - b.toInt8 := by simp [Int128.sub_eq_add_neg, Int8.sub_eq_add_neg] @[simp] theorem Int128.toInt16_sub (a b : Int128) : (a - b).toInt16 = a.toInt16 - b.toInt16 := by simp [Int128.sub_eq_add_neg, Int16.sub_eq_add_neg] @[simp] theorem Int128.toInt32_sub (a b : Int128) : (a - b).toInt32 = a.toInt32 - b.toInt32 := by simp [Int128.sub_eq_add_neg, Int32.sub_eq_add_neg] -- @[simp] theorem Int128.toISize_sub (a b : Int128) : (a - b).toISize = a.toISize - b.toISize := by -- simp [Int128.sub_eq_add_neg, ISize.sub_eq_add_neg] @[simp] theorem Int8.toInt128_lt {a b : Int8} : a.toInt128 < b.toInt128 ↔ a < b := by simp [lt_iff_toInt_lt, Int128.lt_iff_toInt_lt] @[simp] theorem Int16.toInt128_lt {a b : Int16} : a.toInt128 < b.toInt128 ↔ a < b := by simp [lt_iff_toInt_lt, Int128.lt_iff_toInt_lt] @[simp] theorem Int32.toInt128_lt {a b : Int32} : a.toInt128 < b.toInt128 ↔ a < b := by simp [lt_iff_toInt_lt, Int128.lt_iff_toInt_lt] -- @[simp] theorem ISize.toInt128_lt {a b : ISize} : a.toInt128 < b.toInt128 ↔ a < b := by -- simp [lt_iff_toInt_lt, Int128.lt_iff_toInt_lt] @[simp] theorem Int8.toInt128_le {a b : Int8} : a.toInt128 ≤ b.toInt128 ↔ a ≤ b := by simp [le_iff_toInt_le, Int128.le_iff_toInt_le] @[simp] theorem Int16.toInt128_le {a b : Int16} : a.toInt128 ≤ b.toInt128 ↔ a ≤ b := by simp [le_iff_toInt_le, Int128.le_iff_toInt_le] @[simp] theorem Int32.toInt128_le {a b : Int32} : a.toInt128 ≤ b.toInt128 ↔ a ≤ b := by simp [le_iff_toInt_le, Int128.le_iff_toInt_le] -- @[simp] theorem ISize.toInt128_le {a b : ISize} : a.toInt128 ≤ b.toInt128 ↔ a ≤ b := by -- simp [le_iff_toInt_le, Int128.le_iff_toInt_le] @[simp] theorem Int128.ofBitVec_neg (a : BitVec 128) : Int128.ofBitVec (-a) = -Int128.ofBitVec a := (rfl) @[simp] theorem Int128.ofInt_neg (a : Int) : Int128.ofInt (-a) = -Int128.ofInt a := Int128.toInt_inj.1 (by simp) theorem Int128.ofInt_eq_iff_bmod_eq_toInt (a : Int) (b : Int128) : Int128.ofInt a = b ↔ a.bmod (2 ^ 128) = b.toInt := by simp [← Int128.toInt_inj] @[simp] theorem Int128.ofBitVec_add (a b : BitVec 128) : Int128.ofBitVec (a + b) = Int128.ofBitVec a + Int128.ofBitVec b := (rfl) @[simp] theorem Int128.ofInt_add (a b : Int) : Int128.ofInt (a + b) = Int128.ofInt a + Int128.ofInt b := by simp [Int128.ofInt_eq_iff_bmod_eq_toInt] @[simp] theorem Int128.ofNat_add (a b : Nat) : Int128.ofNat (a + b) = Int128.ofNat a + Int128.ofNat b := by simp [← Int128.ofInt_eq_ofNat] theorem Int128.ofIntLE_add {a b : Int} {hab₁ hab₂} : Int128.ofIntLE (a + b) hab₁ hab₂ = Int128.ofInt a + Int128.ofInt b := by simp [Int128.ofIntLE_eq_ofInt] @[simp] theorem Int128.ofBitVec_sub (a b : BitVec 128) : Int128.ofBitVec (a - b) = Int128.ofBitVec a - Int128.ofBitVec b := (rfl) @[simp] theorem Int128.ofInt_sub (a b : Int) : Int128.ofInt (a - b) = Int128.ofInt a - Int128.ofInt b := by simp [Int128.ofInt_eq_iff_bmod_eq_toInt] @[simp] theorem Int128.ofNat_sub (a b : Nat) (hab : b ≤ a) : Int128.ofNat (a - b) = Int128.ofNat a - Int128.ofNat b := by simp [← Int128.ofInt_eq_ofNat, Int.ofNat_sub hab] theorem Int128.ofIntLE_sub {a b : Int} {hab₁ hab₂} : Int128.ofIntLE (a - b) hab₁ hab₂ = Int128.ofInt a - Int128.ofInt b := by simp [Int128.ofIntLE_eq_ofInt] @[simp] theorem Int128.ofBitVec_mul (a b : BitVec 128) : Int128.ofBitVec (a * b) = Int128.ofBitVec a * Int128.ofBitVec b := (rfl) @[simp] theorem Int128.ofInt_mul (a b : Int) : Int128.ofInt (a * b) = Int128.ofInt a * Int128.ofInt b := by simp [Int128.ofInt_eq_iff_bmod_eq_toInt] @[simp] theorem Int128.ofNat_mul (a b : Nat) : Int128.ofNat (a * b) = Int128.ofNat a * Int128.ofNat b := by simp [← Int128.ofInt_eq_ofNat] theorem Int128.ofIntLE_mul {a b : Int} {hab₁ hab₂} : Int128.ofIntLE (a * b) hab₁ hab₂ = Int128.ofInt a * Int128.ofInt b := by simp [Int128.ofIntLE_eq_ofInt] theorem Int128.toInt_minValue_lt_zero : minValue.toInt < 0 := by decide theorem Int128.toInt_maxValue_add_one : maxValue.toInt + 1 = 2 ^ 127 := (rfl) @[simp] theorem Int128.ofBitVec_sdiv (a b : BitVec 128) : Int128.ofBitVec (a.sdiv b) = Int128.ofBitVec a / Int128.ofBitVec b := (rfl) theorem Int128.ofInt_tdiv {a b : Int} (ha₁ : minValue.toInt ≤ a) (ha₂ : a ≤ maxValue.toInt) (hb₁ : minValue.toInt ≤ b) (hb₂ : b ≤ maxValue.toInt) : Int128.ofInt (a.tdiv b) = Int128.ofInt a / Int128.ofInt b := by rw [Int128.ofInt_eq_iff_bmod_eq_toInt, toInt_div, toInt_ofInt, toInt_ofInt, Int.bmod_eq_of_le (n := a), Int.bmod_eq_of_le (n := b)] · exact hb₁ · exact Int.lt_of_le_sub_one hb₂ · exact ha₁ · exact Int.lt_of_le_sub_one ha₂ theorem Int128.ofInt_eq_ofIntLE_div {a b : Int} (ha₁ ha₂ hb₁ hb₂) : Int128.ofInt (a.tdiv b) = Int128.ofIntLE a ha₁ ha₂ / Int128.ofIntLE b hb₁ hb₂ := by rw [ofIntLE_eq_ofInt, ofIntLE_eq_ofInt, ofInt_tdiv ha₁ ha₂ hb₁ hb₂] theorem Int128.ofNat_div {a b : Nat} (ha : a < 2 ^ 127) (hb : b < 2 ^ 127) : Int128.ofNat (a / b) = Int128.ofNat a / Int128.ofNat b := by rw [← ofInt_eq_ofNat, ← ofInt_eq_ofNat, ← ofInt_eq_ofNat, Int.ofNat_tdiv, ofInt_tdiv (by simp) _ (by simp)] · exact Int.le_of_lt_add_one (Int.ofNat_le.2 hb) · exact Int.le_of_lt_add_one (Int.ofNat_le.2 ha) @[simp] theorem Int128.ofBitVec_srem (a b : BitVec 128) : Int128.ofBitVec (a.srem b) = Int128.ofBitVec a % Int128.ofBitVec b := (rfl) @[simp] theorem Int128.toInt_bmod_size (a : Int128) : a.toInt.bmod size = a.toInt := BitVec.toInt_bmod_cancel _ theorem Int128.ofIntLE_le_iff_le {a b : Int} (ha₁ ha₂ hb₁ hb₂) : Int128.ofIntLE a ha₁ ha₂ ≤ Int128.ofIntLE b hb₁ hb₂ ↔ a ≤ b := by simp [le_iff_toInt_le] theorem Int128.ofInt_le_iff_le {a b : Int} (ha₁ : minValue.toInt ≤ a) (ha₂ : a ≤ maxValue.toInt) (hb₁ : minValue.toInt ≤ b) (hb₂ : b ≤ maxValue.toInt) : Int128.ofInt a ≤ Int128.ofInt b ↔ a ≤ b := by rw [← ofIntLE_eq_ofInt ha₁ ha₂, ← ofIntLE_eq_ofInt hb₁ hb₂, ofIntLE_le_iff_le] theorem Int128.ofNat_le_iff_le {a b : Nat} (ha : a < 2 ^ 127) (hb : b < 2 ^ 127) : Int128.ofNat a ≤ Int128.ofNat b ↔ a ≤ b := by rw [← ofInt_eq_ofNat, ← ofInt_eq_ofNat, ofInt_le_iff_le (by simp) _ (by simp), Int.ofNat_le] · exact Int.le_of_lt_add_one (Int.ofNat_le.2 hb) · exact Int.le_of_lt_add_one (Int.ofNat_le.2 ha) theorem Int128.ofBitVec_le_iff_sle (a b : BitVec 128) : Int128.ofBitVec a ≤ Int128.ofBitVec b ↔ a.sle b := Iff.rfl theorem Int128.ofIntLE_lt_iff_lt {a b : Int} (ha₁ ha₂ hb₁ hb₂) : Int128.ofIntLE a ha₁ ha₂ < Int128.ofIntLE b hb₁ hb₂ ↔ a < b := by simp [lt_iff_toInt_lt] theorem Int128.ofInt_lt_iff_lt {a b : Int} (ha₁ : minValue.toInt ≤ a) (ha₂ : a ≤ maxValue.toInt) (hb₁ : minValue.toInt ≤ b) (hb₂ : b ≤ maxValue.toInt) : Int128.ofInt a < Int128.ofInt b ↔ a < b := by rw [← ofIntLE_eq_ofInt ha₁ ha₂, ← ofIntLE_eq_ofInt hb₁ hb₂, ofIntLE_lt_iff_lt] theorem Int128.ofNat_lt_iff_lt {a b : Nat} (ha : a < 2 ^ 127) (hb : b < 2 ^ 127) : Int128.ofNat a < Int128.ofNat b ↔ a < b := by rw [← ofInt_eq_ofNat, ← ofInt_eq_ofNat, ofInt_lt_iff_lt (by simp) _ (by simp), Int.ofNat_lt] · exact Int.le_of_lt_add_one (Int.ofNat_lt.2 hb) · exact Int.le_of_lt_add_one (Int.ofNat_lt.2 ha) theorem Int128.ofBitVec_lt_iff_slt (a b : BitVec 128) : Int128.ofBitVec a < Int128.ofBitVec b ↔ a.slt b := Iff.rfl theorem Int128.toNatClampNeg_one : (1 : Int128).toNatClampNeg = 1 := (rfl) theorem Int128.toInt_one : (1 : Int128).toInt = 1 := (rfl) theorem Int128.zero_lt_one : (0 : Int128) < 1 := by decide theorem Int128.zero_ne_one : (0 : Int128) ≠ 1 := by decide protected theorem Int128.add_assoc (a b c : Int128) : a + b + c = a + (b + c) := Int128.toBitVec_inj.1 (BitVec.add_assoc _ _ _) instance : Std.Associative (α := Int128) (· + ·) := ⟨Int128.add_assoc⟩ protected theorem Int128.add_comm (a b : Int128) : a + b = b + a := Int128.toBitVec_inj.1 (BitVec.add_comm _ _) instance : Std.Commutative (α := Int128) (· + ·) := ⟨Int128.add_comm⟩ @[simp] protected theorem Int128.add_zero (a : Int128) : a + 0 = a := Int128.toBitVec_inj.1 (BitVec.add_zero _) @[simp] protected theorem Int128.zero_add (a : Int128) : 0 + a = a := Int128.toBitVec_inj.1 (BitVec.zero_add _) instance : Std.LawfulIdentity (α := Int128) (· + ·) 0 where left_id := Int128.zero_add right_id := Int128.add_zero @[simp] protected theorem Int128.sub_zero (a : Int128) : a - 0 = a := Int128.toBitVec_inj.1 (BitVec.sub_zero _) @[simp] protected theorem Int128.zero_sub (a : Int128) : 0 - a = -a := Int128.toBitVec_inj.1 (BitVec.zero_sub _) @[simp] protected theorem Int128.sub_self (a : Int128) : a - a = 0 := Int128.toBitVec_inj.1 (BitVec.sub_self _) protected theorem Int128.add_left_neg (a : Int128) : -a + a = 0 := Int128.toBitVec_inj.1 (BitVec.add_left_neg _) protected theorem Int128.add_right_neg (a : Int128) : a + -a = 0 := Int128.toBitVec_inj.1 (BitVec.add_right_neg _) @[simp] protected theorem Int128.sub_add_cancel (a b : Int128) : a - b + b = a := Int128.toBitVec_inj.1 (BitVec.sub_add_cancel _ _) protected theorem Int128.eq_sub_iff_add_eq {a b c : Int128} : a = c - b ↔ a + b = c := by simpa [← Int128.toBitVec_inj] using BitVec.eq_sub_iff_add_eq protected theorem Int128.sub_eq_iff_eq_add {a b c : Int128} : a - b = c ↔ a = c + b := by simpa [← Int128.toBitVec_inj] using BitVec.sub_eq_iff_eq_add @[simp] protected theorem Int128.neg_neg {a : Int128} : - -a = a := Int128.toBitVec_inj.1 BitVec.neg_neg @[simp] protected theorem Int128.neg_inj {a b : Int128} : -a = -b ↔ a = b := by simp [← Int128.toBitVec_inj] @[simp] protected theorem Int128.neg_ne_zero {a : Int128} : -a ≠ 0 ↔ a ≠ 0 := by simp [← Int128.toBitVec_inj] protected theorem Int128.neg_add {a b : Int128} : - (a + b) = -a - b := Int128.toBitVec_inj.1 BitVec.neg_add @[simp] protected theorem Int128.sub_neg {a b : Int128} : a - -b = a + b := Int128.toBitVec_inj.1 BitVec.sub_neg @[simp] protected theorem Int128.neg_sub {a b : Int128} : -(a - b) = b - a := by rw [Int128.sub_eq_add_neg, Int128.neg_add, Int128.sub_neg, Int128.add_comm, ← Int128.sub_eq_add_neg] protected theorem Int128.sub_sub (a b c : Int128) : a - b - c = a - (b + c) := by simp [Int128.sub_eq_add_neg, Int128.add_assoc, Int128.neg_add] @[simp] protected theorem Int128.add_left_inj {a b : Int128} (c : Int128) : (a + c = b + c) ↔ a = b := by simp [← Int128.toBitVec_inj] @[simp] protected theorem Int128.add_right_inj {a b : Int128} (c : Int128) : (c + a = c + b) ↔ a = b := by simp [← Int128.toBitVec_inj] @[simp] protected theorem Int128.sub_left_inj {a b : Int128} (c : Int128) : (a - c = b - c) ↔ a = b := by simp [← Int128.toBitVec_inj] @[simp] protected theorem Int128.sub_right_inj {a b : Int128} (c : Int128) : (c - a = c - b) ↔ a = b := by simp [← Int128.toBitVec_inj] @[simp] theorem Int128.add_eq_right {a b : Int128} : a + b = b ↔ a = 0 := by simp [← Int128.toBitVec_inj] @[simp] theorem Int128.add_eq_left {a b : Int128} : a + b = a ↔ b = 0 := by simp [← Int128.toBitVec_inj] @[simp] theorem Int128.right_eq_add {a b : Int128} : b = a + b ↔ a = 0 := by simp [← Int128.toBitVec_inj] @[simp] theorem Int128.left_eq_add {a b : Int128} : a = a + b ↔ b = 0 := by simp [← Int128.toBitVec_inj] protected theorem Int128.mul_comm (a b : Int128) : a * b = b * a := Int128.toBitVec_inj.1 (BitVec.mul_comm _ _) instance : Std.Commutative (α := Int128) (· * ·) := ⟨Int128.mul_comm⟩ protected theorem Int128.mul_assoc (a b c : Int128) : a * b * c = a * (b * c) := Int128.toBitVec_inj.1 (BitVec.mul_assoc _ _ _) instance : Std.Associative (α := Int128) (· * ·) := ⟨Int128.mul_assoc⟩ @[simp] theorem Int128.mul_one (a : Int128) : a * 1 = a := Int128.toBitVec_inj.1 (BitVec.mul_one _) @[simp] theorem Int128.one_mul (a : Int128) : 1 * a = a := Int128.toBitVec_inj.1 (BitVec.one_mul _) instance : Std.LawfulCommIdentity (α := Int128) (· * ·) 1 where right_id := Int128.mul_one @[simp] theorem Int128.mul_zero {a : Int128} : a * 0 = 0 := Int128.toBitVec_inj.1 BitVec.mul_zero @[simp] theorem Int128.zero_mul {a : Int128} : 0 * a = 0 := Int128.toBitVec_inj.1 BitVec.zero_mul @[simp] protected theorem Int128.pow_zero (x : Int128) : x ^ 0 = 1 := (rfl) protected theorem Int128.pow_succ (x : Int128) (n : Nat) : x ^ (n + 1) = x ^ n * x := (rfl) protected theorem Int128.mul_add {a b c : Int128} : a * (b + c) = a * b + a * c := Int128.toBitVec_inj.1 BitVec.mul_add protected theorem Int128.add_mul {a b c : Int128} : (a + b) * c = a * c + b * c := by rw [Int128.mul_comm, Int128.mul_add, Int128.mul_comm a c, Int128.mul_comm c b] protected theorem Int128.mul_succ {a b : Int128} : a * (b + 1) = a * b + a := by simp [Int128.mul_add] protected theorem Int128.succ_mul {a b : Int128} : (a + 1) * b = a * b + b := by simp [Int128.add_mul] protected theorem Int128.two_mul {a : Int128} : 2 * a = a + a := Int128.toBitVec_inj.1 BitVec.two_mul protected theorem Int128.mul_two {a : Int128} : a * 2 = a + a := Int128.toBitVec_inj.1 BitVec.mul_two protected theorem Int128.neg_mul (a b : Int128) : -a * b = -(a * b) := Int128.toBitVec_inj.1 (BitVec.neg_mul _ _) protected theorem Int128.mul_neg (a b : Int128) : a * -b = -(a * b) := Int128.toBitVec_inj.1 (BitVec.mul_neg _ _) protected theorem Int128.neg_mul_neg (a b : Int128) : -a * -b = a * b := Int128.toBitVec_inj.1 (BitVec.neg_mul_neg _ _) protected theorem Int128.neg_mul_comm (a b : Int128) : -a * b = a * -b := Int128.toBitVec_inj.1 (BitVec.neg_mul_comm _ _) protected theorem Int128.mul_sub {a b c : Int128} : a * (b - c) = a * b - a * c := Int128.toBitVec_inj.1 BitVec.mul_sub protected theorem Int128.sub_mul {a b c : Int128} : (a - b) * c = a * c - b * c := by rw [Int128.mul_comm, Int128.mul_sub, Int128.mul_comm, Int128.mul_comm c] theorem Int128.neg_add_mul_eq_mul_not {a b : Int128} : -(a + a * b) = a * ~~~b := Int128.toBitVec_inj.1 BitVec.neg_add_mul_eq_mul_not theorem Int128.neg_mul_not_eq_add_mul {a b : Int128} : -(a * ~~~b) = a + a * b := Int128.toBitVec_inj.1 BitVec.neg_mul_not_eq_add_mul protected theorem Int128.le_of_lt {a b : Int128} : a < b → a ≤ b := by simpa [lt_iff_toInt_lt, le_iff_toInt_le] using Int.le_of_lt protected theorem Int128.lt_of_le_of_ne {a b : Int128} : a ≤ b → a ≠ b → a < b := by simpa [lt_iff_toInt_lt, le_iff_toInt_le, ← Int128.toInt_inj] using (Int.lt_iff_le_and_ne.2 ⟨·, ·⟩) protected theorem Int128.lt_iff_le_and_ne {a b : Int128} : a < b ↔ a ≤ b ∧ a ≠ b := by simpa [lt_iff_toInt_lt, le_iff_toInt_le, ← Int128.toInt_inj] using Int.lt_iff_le_and_ne @[simp] protected theorem Int128.lt_irrefl {a : Int128} : ¬a < a := by simp [lt_iff_toInt_lt] protected theorem Int128.lt_of_le_of_lt {a b c : Int128} : a ≤ b → b < c → a < c := by simpa [le_iff_toInt_le, lt_iff_toInt_lt] using Int.lt_of_le_of_lt protected theorem Int128.lt_of_lt_of_le {a b c : Int128} : a < b → b ≤ c → a < c := by simpa [le_iff_toInt_le, lt_iff_toInt_lt] using Int.lt_of_lt_of_le @[simp] theorem Int128.minValue_le (a : Int128) : minValue ≤ a := by simpa [le_iff_toInt_le] using a.minValue_le_toInt @[simp] theorem Int128.le_maxValue (a : Int128) : a ≤ maxValue := by simpa [le_iff_toInt_le] using a.toInt_le @[simp] theorem Int128.not_lt_minValue {a : Int128} : ¬a < minValue := fun h => Int128.lt_irrefl (Int128.lt_of_le_of_lt a.minValue_le h) @[simp] theorem Int128.not_maxValue_lt {a : Int128} : ¬maxValue < a := fun h => Int128.lt_irrefl (Int128.lt_of_lt_of_le h a.le_maxValue) @[simp] protected theorem Int128.le_refl (a : Int128) : a ≤ a := by simp [Int128.le_iff_toInt_le] protected theorem Int128.le_rfl {a : Int128} : a ≤ a := Int128.le_refl _ protected theorem Int128.le_antisymm_iff {a b : Int128} : a = b ↔ a ≤ b ∧ b ≤ a := ⟨by rintro rfl; simp, by simpa [← Int128.toInt_inj, le_iff_toInt_le] using Int.le_antisymm⟩ protected theorem Int128.le_antisymm {a b : Int128} : a ≤ b → b ≤ a → a = b := by simpa using Int128.le_antisymm_iff.2 @[simp] theorem Int128.le_minValue_iff {a : Int128} : a ≤ minValue ↔ a = minValue := ⟨fun h => Int128.le_antisymm h a.minValue_le, by rintro rfl; simp⟩ @[simp] theorem Int128.maxValue_le_iff {a : Int128} : maxValue ≤ a ↔ a = maxValue := ⟨fun h => Int128.le_antisymm a.le_maxValue h, by rintro rfl; simp⟩ set_option maxRecDepth 1000 in @[simp] protected theorem Int128.zero_div {a : Int128} : 0 / a = 0 := Int128.toBitVec_inj.1 BitVec.zero_sdiv @[simp] protected theorem Int128.div_zero {a : Int128} : a / 0 = 0 := Int128.toBitVec_inj.1 BitVec.sdiv_zero @[simp] protected theorem Int128.div_one {a : Int128} : a / 1 = a := Int128.toBitVec_inj.1 BitVec.sdiv_one protected theorem Int128.div_self {a : Int128} : a / a = if a = 0 then 0 else 1 := by simp [← Int128.toBitVec_inj, apply_ite] @[simp] protected theorem Int128.mod_zero {a : Int128} : a % 0 = a := Int128.toBitVec_inj.1 BitVec.srem_zero set_option maxRecDepth 1000 in @[simp] protected theorem Int128.zero_mod {a : Int128} : 0 % a = 0 := Int128.toBitVec_inj.1 BitVec.zero_srem @[simp] protected theorem Int128.mod_one {a : Int128} : a % 1 = 0 := Int128.toBitVec_inj.1 BitVec.srem_one @[simp] protected theorem Int128.mod_self {a : Int128} : a % a = 0 := Int128.toBitVec_inj.1 BitVec.srem_self @[simp] protected theorem Int128.not_lt {a b : Int128} : ¬ a < b ↔ b ≤ a := by simp [lt_iff_toBitVec_slt, le_iff_toBitVec_sle, BitVec.sle_eq_not_slt] protected theorem Int128.le_trans {a b c : Int128} : a ≤ b → b ≤ c → a ≤ c := by simpa [le_iff_toInt_le] using Int.le_trans protected theorem Int128.lt_trans {a b c : Int128} : a < b → b < c → a < c := by simpa [lt_iff_toInt_lt] using Int.lt_trans protected theorem Int128.le_total (a b : Int128) : a ≤ b ∨ b ≤ a := by simpa [le_iff_toInt_le] using Int.le_total _ _ protected theorem Int128.lt_asymm {a b : Int128} : a < b → ¬b < a := fun hab hba => Int128.lt_irrefl (Int128.lt_trans hab hba) open Std in instance Int128.instIsLinearOrder : IsLinearOrder Int128 := by apply IsLinearOrder.of_le case le_antisymm => constructor; apply Int128.le_antisymm case le_total => constructor; apply Int128.le_total case le_trans => constructor; apply Int128.le_trans open Std in instance : LawfulOrderLT Int128 where lt_iff := by simp [← Int128.not_le, Decidable.imp_iff_not_or, Std.Total.total] protected theorem Int128.add_neg_eq_sub {a b : Int128} : a + -b = a - b := Int128.toBitVec_inj.1 BitVec.add_neg_eq_sub theorem Int128.neg_eq_neg_one_mul (a : Int128) : -a = -1 * a := Int128.toInt_inj.1 (by simp) @[simp] protected theorem Int128.add_sub_cancel (a b : Int128) : a + b - b = a := Int128.toBitVec_inj.1 (BitVec.add_sub_cancel _ _) protected theorem Int128.lt_or_lt_of_ne {a b : Int128} : a ≠ b → a < b ∨ b < a := by simp [lt_iff_toInt_lt, ← Int128.toInt_inj]; omega protected theorem Int128.lt_or_le (a b : Int128) : a < b ∨ b ≤ a := by simp [lt_iff_toInt_lt, le_iff_toInt_le]; omega protected theorem Int128.le_or_lt (a b : Int128) : a ≤ b ∨ b < a := (b.lt_or_le a).symm protected theorem Int128.le_of_eq {a b : Int128} : a = b → a ≤ b := (· ▸ Int128.le_rfl) protected theorem Int128.le_iff_lt_or_eq {a b : Int128} : a ≤ b ↔ a < b ∨ a = b := by simp [← Int128.toInt_inj, le_iff_toInt_le, lt_iff_toInt_lt]; omega protected theorem Int128.lt_or_eq_of_le {a b : Int128} : a ≤ b → a < b ∨ a = b := Int128.le_iff_lt_or_eq.mp theorem Int128.toInt_eq_toNatClampNeg {a : Int128} (ha : 0 ≤ a) : a.toInt = a.toNatClampNeg := by simpa only [← toNat_toInt, Int.eq_natCast_toNat, le_iff_toInt_le] using ha @[simp] theorem UInt128.toInt128_add (a b : UInt128) : (a + b).toInt128 = a.toInt128 + b.toInt128 := (rfl) @[simp] theorem UInt128.toInt128_neg (a : UInt128) : (-a).toInt128 = -a.toInt128 := (rfl) @[simp] theorem UInt128.toInt128_sub (a b : UInt128) : (a - b).toInt128 = a.toInt128 - b.toInt128 := (rfl) @[simp] theorem UInt128.toInt128_mul (a b : UInt128) : (a * b).toInt128 = a.toInt128 * b.toInt128 := (rfl) @[simp] theorem Int128.toUInt128_add (a b : Int128) : (a + b).toUInt128 = a.toUInt128 + b.toUInt128 := (rfl) @[simp] theorem Int128.toUInt128_neg (a : Int128) : (-a).toUInt128 = -a.toUInt128 := (rfl) @[simp] theorem Int128.toUInt128_sub (a b : Int128) : (a - b).toUInt128 = a.toUInt128 - b.toUInt128 := (rfl) @[simp] theorem Int128.toUInt128_mul (a b : Int128) : (a * b).toUInt128 = a.toUInt128 * b.toUInt128 := (rfl) theorem Int128.toNatClampNeg_le {a b : Int128} (hab : a ≤ b) : a.toNatClampNeg ≤ b.toNatClampNeg := by rw [← Int128.toNat_toInt, ← Int128.toNat_toInt] exact Int.toNat_le_toNat (Int128.le_iff_toInt_le.1 hab) theorem Int128.toUInt128_le {a b : Int128} (ha : 0 ≤ a) (hab : a ≤ b) : a.toUInt128 ≤ b.toUInt128 := by rw [UInt128.le_iff_toNat_le, toNat_toUInt128_of_le ha, toNat_toUInt128_of_le (Int128.le_trans ha hab)] exact Int128.toNatClampNeg_le hab theorem Int128.zero_le_ofNat_of_lt {a : Nat} (ha : a < 2 ^ 127) : 0 ≤ Int128.ofNat a := by rw [le_iff_toInt_le, toInt_ofNat_of_lt ha, Int128.toInt_zero] exact Int.natCast_nonneg _ protected theorem Int128.sub_nonneg_of_le {a b : Int128} (hb : 0 ≤ b) (hab : b ≤ a) : 0 ≤ a - b := by rw [← ofNat_toNatClampNeg _ hb, ← ofNat_toNatClampNeg _ (Int128.le_trans hb hab), ← ofNat_sub _ _ (Int128.toNatClampNeg_le hab)] exact Int128.zero_le_ofNat_of_lt (Nat.sub_lt_of_lt a.toNatClampNeg_lt) theorem Int128.toNatClampNeg_sub_of_le {a b : Int128} (hb : 0 ≤ b) (hab : b ≤ a) : (a - b).toNatClampNeg = a.toNatClampNeg - b.toNatClampNeg := by rw [← toNat_toUInt128_of_le (Int128.sub_nonneg_of_le hb hab), toUInt128_sub, UInt128.toNat_sub_of_le _ _ (Int128.toUInt128_le hb hab), ← toNat_toUInt128_of_le (Int128.le_trans hb hab), ← toNat_toUInt128_of_le hb] theorem Int128.toInt_sub_of_le (a b : Int128) (hb : 0 ≤ b) (h : b ≤ a) : (a - b).toInt = a.toInt - b.toInt := by rw [Int128.toInt_eq_toNatClampNeg (Int128.sub_nonneg_of_le hb h), Int128.toInt_eq_toNatClampNeg (Int128.le_trans hb h), Int128.toInt_eq_toNatClampNeg hb, Int128.toNatClampNeg_sub_of_le hb h, Int.ofNat_sub] exact Int128.toNatClampNeg_le h protected theorem Int128.sub_le {a b : Int128} (hb : 0 ≤ b) (hab : b ≤ a) : a - b ≤ a := by simp_all [le_iff_toInt_le, Int128.toInt_sub_of_le _ _ hb hab]; omega protected theorem Int128.sub_lt {a b : Int128} (hb : 0 < b) (hab : b ≤ a) : a - b < a := by simp_all [lt_iff_toInt_lt, Int128.toInt_sub_of_le _ _ (Int128.le_of_lt hb) hab]; omega protected theorem Int128.ne_of_lt {a b : Int128} : a < b → a ≠ b := by simpa [Int128.lt_iff_toInt_lt, ← Int128.toInt_inj] using Int.ne_of_lt @[simp] theorem Int128.toInt_mod (a b : Int128) : (a % b).toInt = a.toInt.tmod b.toInt := by rw [← toInt_toBitVec, Int128.toBitVec_mod, BitVec.toInt_srem, toInt_toBitVec, toInt_toBitVec] @[simp] theorem Int8.toInt128_mod (a b : Int8) : (a % b).toInt128 = a.toInt128 % b.toInt128 := Int128.toInt.inj (by simp) @[simp] theorem Int16.toInt128_mod (a b : Int16) : (a % b).toInt128 = a.toInt128 % b.toInt128 := Int128.toInt.inj (by simp) @[simp] theorem Int32.toInt128_mod (a b : Int32) : (a % b).toInt128 = a.toInt128 % b.toInt128 := Int128.toInt.inj (by simp) @[simp] theorem Int64.toInt128_mod (a b : Int64) : (a % b).toInt128 = a.toInt128 % b.toInt128 := Int128.toInt.inj (by simp) -- @[simp] theorem ISize.toInt128_mod (a b : ISize) : (a % b).toInt128 = a.toInt128 % b.toInt128 := Int128.toInt.inj (by simp) theorem Int128.ofInt_tmod {a b : Int} (ha₁ : minValue.toInt ≤ a) (ha₂ : a ≤ maxValue.toInt) (hb₁ : minValue.toInt ≤ b) (hb₂ : b ≤ maxValue.toInt) : Int128.ofInt (a.tmod b) = Int128.ofInt a % Int128.ofInt b := by rw [Int128.ofInt_eq_iff_bmod_eq_toInt, ← toInt_bmod_size, toInt_mod, toInt_ofInt, toInt_ofInt, Int.bmod_eq_of_le (n := a), Int.bmod_eq_of_le (n := b)] · exact hb₁ · exact Int.lt_of_le_sub_one hb₂ · exact ha₁ · exact Int.lt_of_le_sub_one ha₂ theorem Int128.ofInt_eq_ofIntLE_mod {a b : Int} (ha₁ ha₂ hb₁ hb₂) : Int128.ofInt (a.tmod b) = Int128.ofIntLE a ha₁ ha₂ % Int128.ofIntLE b hb₁ hb₂ := by rw [ofIntLE_eq_ofInt, ofIntLE_eq_ofInt, ofInt_tmod ha₁ ha₂ hb₁ hb₂] theorem Int128.ofNat_mod {a b : Nat} (ha : a < 2 ^ 127) (hb : b < 2 ^ 127) : Int128.ofNat (a % b) = Int128.ofNat a % Int128.ofNat b := by rw [← ofInt_eq_ofNat, ← ofInt_eq_ofNat, ← ofInt_eq_ofNat, Int.ofNat_tmod, ofInt_tmod (by simp) _ (by simp)] · exact Int.le_of_lt_add_one (Int.ofNat_le.2 hb) · exact Int.le_of_lt_add_one (Int.ofNat_le.2 ha) ================================================ FILE: hax-lib/proof-libs/lean/Hax/MissingLean/Init/Data/UInt/Basic.lean ================================================ import Hax.MissingLean.Init.Data.UInt.BasicAux @[inline] def UInt128.ofFin (a : Fin UInt128.size) : UInt128 := ⟨⟨a⟩⟩ def UInt128.ofInt (x : Int) : UInt128 := UInt128.ofNat (x % 2 ^ 128).toNat protected def UInt128.add (a b : UInt128) : UInt128 := ⟨a.toBitVec + b.toBitVec⟩ protected def UInt128.sub (a b : UInt128) : UInt128 := ⟨a.toBitVec - b.toBitVec⟩ protected def UInt128.mul (a b : UInt128) : UInt128 := ⟨a.toBitVec * b.toBitVec⟩ protected def UInt128.div (a b : UInt128) : UInt128 := ⟨BitVec.udiv a.toBitVec b.toBitVec⟩ protected def UInt128.pow (x : UInt128) (n : Nat) : UInt128 := match n with | 0 => 1 | n + 1 => UInt128.mul (UInt128.pow x n) x protected def UInt128.mod (a b : UInt128) : UInt128 := ⟨BitVec.umod a.toBitVec b.toBitVec⟩ set_option linter.missingDocs false in @[deprecated UInt128.mod (since := "2024-09-23")] protected def UInt128.modn (a : UInt128) (n : Nat) : UInt128 := ⟨Fin.modn a.toFin n⟩ protected def UInt128.land (a b : UInt128) : UInt128 := ⟨a.toBitVec &&& b.toBitVec⟩ protected def UInt128.lor (a b : UInt128) : UInt128 := ⟨a.toBitVec ||| b.toBitVec⟩ protected def UInt128.xor (a b : UInt128) : UInt128 := ⟨a.toBitVec ^^^ b.toBitVec⟩ protected def UInt128.shiftLeft (a b : UInt128) : UInt128 := ⟨a.toBitVec <<< (UInt128.mod b 128).toBitVec⟩ protected def UInt128.shiftRight (a b : UInt128) : UInt128 := ⟨a.toBitVec >>> (UInt128.mod b 128).toBitVec⟩ protected def UInt128.lt (a b : UInt128) : Prop := a.toBitVec < b.toBitVec protected def UInt128.le (a b : UInt128) : Prop := a.toBitVec ≤ b.toBitVec instance : Add UInt128 := ⟨UInt128.add⟩ instance : Sub UInt128 := ⟨UInt128.sub⟩ instance : Mul UInt128 := ⟨UInt128.mul⟩ instance : Pow UInt128 Nat := ⟨UInt128.pow⟩ instance : Mod UInt128 := ⟨UInt128.mod⟩ set_option linter.deprecated false in instance : HMod UInt128 Nat UInt128 := ⟨UInt128.modn⟩ instance : Div UInt128 := ⟨UInt128.div⟩ instance : LT UInt128 := ⟨UInt128.lt⟩ instance : LE UInt128 := ⟨UInt128.le⟩ protected def UInt128.complement (a : UInt128) : UInt128 := ⟨~~~a.toBitVec⟩ protected def UInt128.neg (a : UInt128) : UInt128 := ⟨-a.toBitVec⟩ instance : Complement UInt128 := ⟨UInt128.complement⟩ instance : Neg UInt128 := ⟨UInt128.neg⟩ instance : AndOp UInt128 := ⟨UInt128.land⟩ instance : OrOp UInt128 := ⟨UInt128.lor⟩ instance : XorOp UInt128 := ⟨UInt128.xor⟩ instance : ShiftLeft UInt128 := ⟨UInt128.shiftLeft⟩ instance : ShiftRight UInt128 := ⟨UInt128.shiftRight⟩ def Bool.toUInt128 (b : Bool) : UInt128 := if b then 1 else 0 def UInt128.decLt (a b : UInt128) : Decidable (a < b) := inferInstanceAs (Decidable (a.toBitVec < b.toBitVec)) def UInt128.decLe (a b : UInt128) : Decidable (a ≤ b) := inferInstanceAs (Decidable (a.toBitVec ≤ b.toBitVec)) attribute [instance_reducible, instance] UInt128.decLt UInt128.decLe instance : Max UInt128 := maxOfLe instance : Min UInt128 := minOfLe open Lean in set_option hygiene false in macro "additional_uint_decls" typeName:ident width:term : command => do let mut cmds := ← Syntax.getArgs <$> `( namespace $typeName theorem toNat_add_of_lt {x y : $typeName} (h : x.toNat + y.toNat < 2 ^ $width) : (x + y).toNat = x.toNat + y.toNat := BitVec.toNat_add_of_lt h theorem toNat_sub_of_le' {x y : $typeName} (h : y.toNat ≤ x.toNat) : (x - y).toNat = x.toNat - y.toNat := BitVec.toNat_sub_of_le h theorem toNat_mul_of_lt {x y : $typeName} (h : x.toNat * y.toNat < 2 ^ $width) : (x * y).toNat = x.toNat * y.toNat := BitVec.toNat_mul_of_lt h def addOverflow (a b : $typeName) : Bool := BitVec.uaddOverflow a.toBitVec b.toBitVec def subOverflow (a b : $typeName) : Bool := BitVec.usubOverflow a.toBitVec b.toBitVec def mulOverflow (a b : $typeName) : Bool := BitVec.umulOverflow a.toBitVec b.toBitVec @[grind .] theorem addOverflow_iff {a b : $typeName} : addOverflow a b ↔ a.toNat + b.toNat ≥ 2 ^ $width := decide_eq_true_iff @[grind .] theorem subOverflow_iff {a b : $typeName} : subOverflow a b ↔ a.toNat < b.toNat := decide_eq_true_iff @[grind .] theorem mulOverflow_iff {a b : $typeName} : mulOverflow a b ↔ a.toNat * b.toNat ≥ 2 ^ $width := decide_eq_true_iff end $typeName ) return ⟨mkNullNode cmds⟩ additional_uint_decls UInt8 8 additional_uint_decls UInt16 16 additional_uint_decls UInt32 32 additional_uint_decls UInt64 64 additional_uint_decls UInt128 128 additional_uint_decls USize System.Platform.numBits open Lean in set_option hygiene false in macro "declare_missing_uint_conversions" : command => do let mut cmds := #[] let src : List (Name × Nat) := [ (`UInt8, 8), (`UInt16, 16), (`UInt32, 32), (`UInt64, 64), (`USize, 0), ] let dst : List (Name × Nat) := [ (`Int8, 8), (`Int16, 16), (`Int32, 32), (`Int64, 64), (`ISize, 0) ] for (srcName, srcIdx) in src do for (dstName, dstIdx) in dst do let srcIdent := mkIdent srcName let dstIdent := mkIdent dstName if srcIdx != dstIdx then cmds := cmds.push $ ← `( def $(mkIdent (srcName ++ dstName.appendBefore "to")) (x : $srcIdent) : $dstIdent := $(mkIdent (`Nat ++ dstName.appendBefore "to")) x.toNat ) return ⟨mkNullNode cmds⟩ declare_missing_uint_conversions ================================================ FILE: hax-lib/proof-libs/lean/Hax/MissingLean/Init/Data/UInt/BasicAux.lean ================================================ import Hax.MissingLean.Init.Prelude def UInt128.toFin (x : UInt128) : Fin UInt128.size := x.toBitVec.toFin def UInt128.ofNat (n : @& Nat) : UInt128 := ⟨BitVec.ofNat 128 n⟩ def UInt128.ofNatTruncate (n : Nat) : UInt128 := if h : n < UInt128.size then UInt128.ofNatLT n h else UInt128.ofNatLT (UInt128.size - 1) (by decide) abbrev Nat.toUInt128 := UInt128.ofNat def UInt128.toNat (n : UInt128) : Nat := n.toBitVec.toNat def UInt128.toUInt8 (a : UInt128) : UInt8 := a.toNat.toUInt8 def UInt128.toUInt16 (a : UInt128) : UInt16 := a.toNat.toUInt16 def UInt128.toUInt32 (a : UInt128) : UInt32 := a.toNat.toUInt32 def UInt128.toUInt64 (a : UInt128) : UInt64 := a.toNat.toUInt64 def UInt128.toUSize (a : UInt128) : USize := a.toNat.toUSize def UInt8.toUInt128 (a : UInt8) : UInt128 := ⟨BitVec.ofNat 128 a.toNat⟩ def UInt16.toUInt128 (a : UInt16) : UInt128 := ⟨BitVec.ofNat 128 a.toNat⟩ def UInt32.toUInt128 (a : UInt32) : UInt128 := ⟨BitVec.ofNat 128 a.toNat⟩ def UInt64.toUInt128 (a : UInt64) : UInt128 := ⟨BitVec.ofNat 128 a.toNat⟩ def USize.toUInt128 (a : USize) : UInt128 := ⟨BitVec.ofNat 128 a.toNat⟩ instance UInt128.instOfNat (n : Nat) : OfNat UInt128 n := ⟨UInt128.ofNat n⟩ ================================================ FILE: hax-lib/proof-libs/lean/Hax/MissingLean/Init/Data/UInt/Lemmas.lean ================================================ import Hax.MissingLean.Init.Data.UInt.Lemmas_UInt128 import Hax.MissingLean.Init.GrindInstances.Ring.UInt attribute [grind =_] UInt8.le_ofNat_iff attribute [grind =_] UInt16.le_ofNat_iff attribute [grind =_] UInt32.le_ofNat_iff attribute [grind =_] UInt64.le_ofNat_iff attribute [grind =_] UInt128.le_ofNat_iff theorem UInt64.le_self_add {a b : UInt64} (h : a.toNat + b.toNat < 2 ^ 64) : a ≤ a + b := by rw [le_iff_toNat_le, UInt64.toNat_add_of_lt h] exact Nat.le_add_right a.toNat b.toNat theorem UInt64.succ_le_of_lt {a b : UInt64} (h : a < b) : a + 1 ≤ b := by grind theorem UInt64.add_le_of_le {a b c : UInt64} (habc : a + b ≤ c) (hab : a.toNat + b.toNat < 2 ^ 64): a ≤ c := by rw [UInt64.le_iff_toNat_le, UInt64.toNat_add_of_lt hab] at * omega open Lean in set_option hygiene false in macro "additional_uint_lemmas" typeName:ident _width:term : command => do let tyDot (n : Name) := mkIdent (typeName.getId ++ n) let tyRw (n : Name) : TSyntax `Lean.Parser.Tactic.rwRule := .mk (Syntax.node .none ``Lean.Parser.Tactic.rwRule #[mkNullNode, tyDot n]) `( namespace $typeName theorem ofNat_eq_of_toNat_eq {a : Nat} {b : $typeName} (h : b.toNat = a) : ofNat a = b := by subst_vars; exact $(mkIdent (typeName.getId ++ `ofNat_toNat)) theorem sub_add_eq {a b c : $typeName} : a - (b + c) = a - b - c := by grind theorem sub_succ_lt_self (a b : $typeName) (h : a < b) : (b - (a + 1)).toNat < (b - a).toNat := by rw [sub_add_eq] rw [$(tyRw `toNat_sub_of_le)] try simp only [USize.toNat_one] apply Nat.sub_one_lt_of_lt · change (0 : $typeName).toNat < (b - a).toNat rw [← lt_iff_toNat_lt] grind · grind end $typeName ) additional_uint_lemmas UInt8 8 additional_uint_lemmas UInt16 16 additional_uint_lemmas UInt32 32 additional_uint_lemmas UInt64 64 additional_uint_lemmas UInt128 128 ================================================ FILE: hax-lib/proof-libs/lean/Hax/MissingLean/Init/Data/UInt/Lemmas_UInt128.lean ================================================ import Hax.MissingLean.Lean.Tactic.Simp.BuiltinSimpProcs.UInt -- Adapted from Init/Data/UInt/Lemmas.lean from the Lean v4.29.0-rc1 source code set_option autoImplicit true open Std declare_uint_theorems UInt128 128 @[simp] theorem UInt128.toNat_toUInt64 (x : UInt128) : x.toUInt64.toNat = x.toNat % 2 ^ 64 := (rfl) theorem UInt128.ofNat_mod_size : ofNat (x % 2 ^ 128) = ofNat x := by simp [ofNat, BitVec.ofNat, Fin.ofNat] theorem UInt128.ofNat_size : ofNat size = 0 := by decide theorem UInt128.lt_ofNat_iff {n : UInt128} {m : Nat} (h : m < size) : n < ofNat m ↔ n.toNat < m := by rw [lt_iff_toNat_lt, toNat_ofNat_of_lt' h] theorem UInt128.ofNat_lt_iff {n : UInt128} {m : Nat} (h : m < size) : ofNat m < n ↔ m < n.toNat := by rw [lt_iff_toNat_lt, toNat_ofNat_of_lt' h] theorem UInt128.le_ofNat_iff {n : UInt128} {m : Nat} (h : m < size) : n ≤ ofNat m ↔ n.toNat ≤ m := by rw [le_iff_toNat_le, toNat_ofNat_of_lt' h] theorem UInt128.ofNat_le_iff {n : UInt128} {m : Nat} (h : m < size) : ofNat m ≤ n ↔ m ≤ n.toNat := by rw [le_iff_toNat_le, toNat_ofNat_of_lt' h] protected theorem UInt128.mod_eq_of_lt {a b : UInt128} (h : a < b) : a % b = a := UInt128.toNat_inj.1 (Nat.mod_eq_of_lt h) @[simp] theorem UInt128.toNat_lt (n : UInt128) : n.toNat < 2 ^ 128 := n.toFin.isLt theorem USize.size_le_uint128Size : USize.size ≤ UInt128.size := by cases USize.size_eq <;> simp_all +decide theorem USize.size_dvd_uInt128Size : USize.size ∣ UInt128.size := by cases USize.size_eq <;> simp_all +decide @[simp] theorem mod_uInt128Size_uSizeSize (n : Nat) : n % UInt128.size % USize.size = n % USize.size := Nat.mod_mod_of_dvd _ USize.size_dvd_uInt128Size @[simp] theorem UInt128.size_sub_one_mod_uSizeSize : 340282366920938463463374607431768211455 % USize.size = USize.size - 1 := by cases USize.size_eq <;> simp_all +decide @[simp] theorem UInt8.toNat_mod_uInt128Size (n : UInt8) : n.toNat % UInt128.size = n.toNat := Nat.mod_eq_of_lt (Nat.lt_trans n.toNat_lt (by decide)) @[simp] theorem UInt16.toNat_mod_uInt128Size (n : UInt16) : n.toNat % UInt128.size = n.toNat := Nat.mod_eq_of_lt (Nat.lt_trans n.toNat_lt (by decide)) @[simp] theorem UInt32.toNat_mod_uInt128Size (n : UInt32) : n.toNat % UInt128.size = n.toNat := Nat.mod_eq_of_lt (Nat.lt_trans n.toNat_lt (by decide)) @[simp] theorem UInt64.toNat_mod_uInt128Size (n : UInt64) : n.toNat % UInt128.size = n.toNat := Nat.mod_eq_of_lt (Nat.lt_trans n.toNat_lt (by decide)) @[simp] theorem UInt128.toNat_mod_size (n : UInt128) : n.toNat % UInt128.size = n.toNat := Nat.mod_eq_of_lt n.toNat_lt @[simp] theorem USize.toNat_mod_uInt128Size (n : USize) : n.toNat % UInt128.size = n.toNat := Nat.mod_eq_of_lt (Nat.lt_trans n.toNat_lt (by decide)) -- @[simp] theorem UInt8.toUInt128_mod_256 (n : UInt8) : n.toUInt128 % 256 = n.toUInt128 := UInt128.toNat.inj (by simp) -- @[simp] theorem UInt16.toUInt128_mod_65536 (n : UInt16) : n.toUInt128 % 65536 = n.toUInt128 := UInt128.toNat.inj (by simp) -- @[simp] theorem UInt32.toUInt128_mod_4294967296 (n : UInt32) : n.toUInt128 % 4294967296 = n.toUInt128 := UInt128.toNat.inj (by simp) @[simp] theorem Fin.mk_uInt128ToNat (n : UInt128) : Fin.mk n.toNat (by exact n.toFin.isLt) = n.toFin := (rfl) @[simp] theorem BitVec.ofNatLT_uInt128ToNat (n : UInt128) : BitVec.ofNatLT n.toNat (by exact n.toFin.isLt) = n.toBitVec := (rfl) @[simp] theorem BitVec.ofFin_uInt128ToFin (n : UInt128) : BitVec.ofFin n.toFin = n.toBitVec := (rfl) -- @[simp] theorem UInt8.toFin_toUInt128 (n : UInt8) : n.toUInt128.toFin = n.toFin.castLE (by decide) := (rfl) -- @[simp] theorem UInt16.toFin_toUInt128 (n : UInt16) : n.toUInt128.toFin = n.toFin.castLE (by decide) := (rfl) -- @[simp] theorem UInt32.toFin_toUInt128 (n : UInt32) : n.toUInt128.toFin = n.toFin.castLE (by decide) := (rfl) -- @[simp] theorem USize.toFin_toUInt128 (n : USize) : n.toUInt128.toFin = n.toFin.castLE size_le_uint128Size := (rfl) @[simp, int_toBitVec] theorem UInt128.toBitVec_toUInt8 (n : UInt128) : n.toUInt8.toBitVec = n.toBitVec.setWidth 8 := (rfl) @[simp, int_toBitVec] theorem UInt128.toBitVec_toUInt16 (n : UInt128) : n.toUInt16.toBitVec = n.toBitVec.setWidth 16 := (rfl) @[simp, int_toBitVec] theorem UInt128.toBitVec_toUInt32 (n : UInt128) : n.toUInt32.toBitVec = n.toBitVec.setWidth 32 := (rfl) -- @[simp, int_toBitVec] theorem UInt8.toBitVec_toUInt128 (n : UInt8) : n.toUInt128.toBitVec = n.toBitVec.setWidth 128 := (rfl) -- @[simp, int_toBitVec] theorem UInt16.toBitVec_toUInt128 (n : UInt16) : n.toUInt128.toBitVec = n.toBitVec.setWidth 128 := (rfl) -- @[simp, int_toBitVec] theorem UInt32.toBitVec_toUInt128 (n : UInt32) : n.toUInt128.toBitVec = n.toBitVec.setWidth 128 := (rfl) -- @[simp, int_toBitVec] theorem USize.toBitVec_toUInt128 (n : USize) : n.toUInt128.toBitVec = n.toBitVec.setWidth 128 := -- BitVec.eq_of_toNat_eq (by simp) @[simp, int_toBitVec] theorem UInt128.toBitVec_toUSize (n : UInt128) : n.toUSize.toBitVec = n.toBitVec.setWidth System.Platform.numBits := BitVec.eq_of_toNat_eq (by simp) -- @[simp] theorem UInt128.ofNatLT_uInt8ToNat (n : UInt8) : UInt128.ofNatLT n.toNat (Nat.lt_trans n.toNat_lt (by decide)) = n.toUInt128 := (rfl) -- @[simp] theorem UInt128.ofNatLT_uInt16ToNat (n : UInt16) : UInt128.ofNatLT n.toNat (Nat.lt_trans n.toNat_lt (by decide)) = n.toUInt128 := (rfl) -- @[simp] theorem UInt128.ofNatLT_uInt32ToNat (n : UInt32) : UInt128.ofNatLT n.toNat (Nat.lt_trans n.toNat_lt (by decide)) = n.toUInt128 := (rfl) -- @[simp] theorem UInt128.ofNatLT_toNat (n : UInt128) : UInt128.ofNatLT n.toNat n.toNat_lt = n := (rfl) -- @[simp] theorem UInt128.ofNatLT_uSizeToNat (n : USize) : UInt128.ofNatLT n.toNat (Nat.lt_trans n.toNat_lt (by decide)) = n.toUInt128 := (rfl) theorem UInt8.ofNatLT_uInt128ToNat (n : UInt128) (h) : UInt8.ofNatLT n.toNat h = n.toUInt8 := UInt8.toNat.inj (by simp [Nat.mod_eq_of_lt h]) theorem UInt16.ofNatLT_uInt128ToNat (n : UInt128) (h) : UInt16.ofNatLT n.toNat h = n.toUInt16 := UInt16.toNat.inj (by simp [Nat.mod_eq_of_lt h]) theorem UInt32.ofNatLT_uInt128ToNat (n : UInt128) (h) : UInt32.ofNatLT n.toNat h = n.toUInt32 := UInt32.toNat.inj (by simp [Nat.mod_eq_of_lt h]) theorem USize.ofNatLT_uInt128ToNat (n : UInt128) (h) : USize.ofNatLT n.toNat h = n.toUSize := USize.toNat.inj (by simp [Nat.mod_eq_of_lt h]) @[simp] theorem UInt128.ofFin_toFin (n : UInt128) : UInt128.ofFin n.toFin = n := (rfl) @[simp] theorem UInt128.toFin_ofFin (n : Fin UInt128.size) : (UInt128.ofFin n).toFin = n := (rfl) -- @[simp] theorem UInt128.ofFin_uint8ToFin (n : UInt8) : UInt128.ofFin (n.toFin.castLE (by decide)) = n.toUInt128 := (rfl) -- @[simp] theorem UInt128.ofFin_uint16ToFin (n : UInt16) : UInt128.ofFin (n.toFin.castLE (by decide)) = n.toUInt128 := (rfl) -- @[simp] theorem UInt128.ofFin_uint32ToFin (n : UInt32) : UInt128.ofFin (n.toFin.castLE (by decide)) = n.toUInt128 := (rfl) @[simp] theorem Nat.toUInt128_eq {n : Nat} : n.toUInt128 = UInt128.ofNat n := (rfl) @[simp] theorem UInt8.ofBitVec_uInt128ToBitVec (n : UInt128) : UInt8.ofBitVec (n.toBitVec.setWidth 8) = n.toUInt8 := (rfl) @[simp] theorem UInt16.ofBitVec_uInt128ToBitVec (n : UInt128) : UInt16.ofBitVec (n.toBitVec.setWidth 16) = n.toUInt16 := (rfl) @[simp] theorem UInt32.ofBitVec_uInt128ToBitVec (n : UInt128) : UInt32.ofBitVec (n.toBitVec.setWidth 32) = n.toUInt32 := (rfl) -- @[simp] theorem UInt128.ofBitVec_uInt8ToBitVec (n : UInt8) : -- UInt128.ofBitVec (n.toBitVec.setWidth 128) = n.toUInt128 := (rfl) -- @[simp] theorem UInt128.ofBitVec_uInt16ToBitVec (n : UInt16) : -- UInt128.ofBitVec (n.toBitVec.setWidth 128) = n.toUInt128 := (rfl) -- @[simp] theorem UInt128.ofBitVec_uInt32ToBitVec (n : UInt32) : -- UInt128.ofBitVec (n.toBitVec.setWidth 128) = n.toUInt128 := (rfl) -- @[simp] theorem UInt128.ofBitVec_uSizeToBitVec (n : USize) : -- UInt128.ofBitVec (n.toBitVec.setWidth 128) = n.toUInt128 := -- UInt128.toNat.inj (by simp) @[simp] theorem USize.ofBitVec_uInt128ToBitVec (n : UInt128) : USize.ofBitVec (n.toBitVec.setWidth System.Platform.numBits) = n.toUSize := USize.toNat.inj (by simp) @[simp] theorem UInt8.ofNat_uInt128ToNat (n : UInt128) : UInt8.ofNat n.toNat = n.toUInt8 := (rfl) @[simp] theorem UInt16.ofNat_uInt128ToNat (n : UInt128) : UInt16.ofNat n.toNat = n.toUInt16 := (rfl) @[simp] theorem UInt32.ofNat_uInt128ToNat (n : UInt128) : UInt32.ofNat n.toNat = n.toUInt32 := (rfl) -- @[simp] theorem UInt128.ofNat_uInt8ToNat (n : UInt8) : UInt128.ofNat n.toNat = n.toUInt128 := -- UInt128.toNat.inj (by simp) -- @[simp] theorem UInt128.ofNat_uInt16ToNat (n : UInt16) : UInt128.ofNat n.toNat = n.toUInt128 := -- UInt128.toNat.inj (by simp) -- @[simp] theorem UInt128.ofNat_uInt32ToNat (n : UInt32) : UInt128.ofNat n.toNat = n.toUInt128 := -- UInt128.toNat.inj (by simp) -- @[simp] theorem UInt128.ofNat_uSizeToNat (n : USize) : UInt128.ofNat n.toNat = n.toUInt128 := -- UInt128.toNat.inj (by simp) @[simp] theorem USize.ofNat_uInt128ToNat (n : UInt128) : USize.ofNat n.toNat = n.toUSize := USize.toNat.inj (by simp) theorem UInt128.ofNatLT_eq_ofNat (n : Nat) {h} : UInt128.ofNatLT n h = UInt128.ofNat n := UInt128.toNat.inj (by simp [Nat.mod_eq_of_lt h]) theorem UInt128.ofNatTruncate_eq_ofNat (n : Nat) (hn : n < UInt128.size) : UInt128.ofNatTruncate n = UInt128.ofNat n := by simp [ofNatTruncate, hn, UInt128.ofNatLT_eq_ofNat] -- @[simp] theorem UInt128.ofNatTruncate_uInt8ToNat (n : UInt8) : UInt128.ofNatTruncate n.toNat = n.toUInt128 := by -- rw [UInt128.ofNatTruncate_eq_ofNat, ofNat_uInt8ToNat] -- exact Nat.lt_trans (n.toNat_lt) (by decide) -- @[simp] theorem UInt128.ofNatTruncate_uInt16ToNat (n : UInt16) : UInt128.ofNatTruncate n.toNat = n.toUInt128 := by -- rw [UInt128.ofNatTruncate_eq_ofNat, ofNat_uInt16ToNat] -- exact Nat.lt_trans (n.toNat_lt) (by decide) -- @[simp] theorem UInt128.ofNatTruncate_uInt32ToNat (n : UInt32) : UInt128.ofNatTruncate n.toNat = n.toUInt128 := by -- rw [UInt128.ofNatTruncate_eq_ofNat, ofNat_uInt32ToNat] -- exact Nat.lt_trans (n.toNat_lt) (by decide) -- @[simp] theorem UInt128.ofNatTruncate_uInt64ToNat (n : UInt64) : UInt128.ofNatTruncate n.toNat = n.toUInt128 := by -- rw [UInt128.ofNatTruncate_eq_ofNat, ofNat_uInt64ToNat] -- exact Nat.lt_trans n.toNat_lt (by norm_num [UInt64.size, UInt128.size]) @[simp] theorem UInt128.ofNatTruncate_toNat (n : UInt128) : UInt128.ofNatTruncate n.toNat = n := by rw [UInt128.ofNatTruncate_eq_ofNat] <;> simp [n.toNat_lt] -- @[simp] theorem UInt8.toUInt8_toUInt128 (n : UInt8) : n.toUInt128.toUInt8 = n := -- UInt8.toNat.inj (by simp) -- @[simp] theorem UInt8.toUInt16_toUInt128 (n : UInt8) : n.toUInt128.toUInt16 = n.toUInt16 := -- UInt16.toNat.inj (by simp) -- @[simp] theorem UInt8.toUInt32_toUInt128 (n : UInt8) : n.toUInt128.toUInt32 = n.toUInt32 := -- UInt32.toNat.inj (by simp) -- @[simp] theorem UInt8.toUInt64_toUInt128 (n : UInt8) : n.toUInt128.toUInt64 = n.toUInt64 := -- UInt64.toNat.inj (by simp) @[simp] theorem UInt8.toUInt128_toUInt16 (n : UInt8) : n.toUInt16.toUInt128 = n.toUInt128 := (rfl) @[simp] theorem UInt8.toUInt128_toUInt32 (n : UInt8) : n.toUInt32.toUInt128 = n.toUInt128 := (rfl) @[simp] theorem UInt8.toUInt128_toUInt64 (n : UInt8) : n.toUInt64.toUInt128 = n.toUInt128 := (rfl) -- @[simp] theorem UInt16.toUInt8_toUInt128 (n : UInt16) : n.toUInt128.toUInt8 = n.toUInt8 := (rfl) -- @[simp] theorem UInt16.toUInt16_toUInt128 (n : UInt16) : n.toUInt128.toUInt16 = n := -- UInt16.toNat.inj (by simp) -- @[simp] theorem UInt16.toUInt32_toUInt128 (n : UInt16) : n.toUInt128.toUInt32 = n.toUInt32 := -- UInt32.toNat.inj (by simp) -- @[simp] theorem UInt16.toUInt64_toUInt128 (n : UInt16) : n.toUInt128.toUInt64 = n.toUInt64 := -- UInt64.toNat.inj (by simp) -- @[simp] theorem UInt16.toUInt128_toUInt8 (n : UInt16) : n.toUInt8.toUInt128 = n.toUInt128 % 256 := (rfl) @[simp] theorem UInt16.toUInt128_toUInt32 (n : UInt16) : n.toUInt32.toUInt128 = n.toUInt128 := (rfl) @[simp] theorem UInt16.toUInt128_toUInt64 (n : UInt16) : n.toUInt64.toUInt128 = n.toUInt128 := (rfl) -- @[simp] theorem UInt32.toUInt8_toUInt128 (n : UInt32) : n.toUInt128.toUInt8 = n.toUInt8 := (rfl) -- @[simp] theorem UInt32.toUInt16_toUInt128 (n : UInt32) : n.toUInt128.toUInt16 = n.toUInt16 := (rfl) -- @[simp] theorem UInt32.toUInt32_toUInt128 (n : UInt32) : n.toUInt128.toUInt32 = n := -- UInt32.toNat.inj (by simp) -- @[simp] theorem UInt32.toUInt64_toUInt128 (n : UInt32) : n.toUInt128.toUInt64 = n.toUInt64 := -- UInt64.toNat.inj (by simp) -- @[simp] theorem UInt32.toUInt128_toUInt8 (n : UInt32) : n.toUInt8.toUInt128 = n.toUInt128 % 256 := (rfl) -- @[simp] theorem UInt32.toUInt128_toUInt16 (n : UInt32) : n.toUInt16.toUInt128 = n.toUInt128 % 65536 := (rfl) @[simp] theorem UInt32.toUInt128_toUInt64 (n : UInt32) : n.toUInt64.toUInt128 = n.toUInt128 := (rfl) -- @[simp] theorem UInt64.toUInt8_toUInt128 (n : UInt64) : n.toUInt128.toUInt8 = n.toUInt8 := (rfl) -- @[simp] theorem UInt64.toUInt16_toUInt128 (n : UInt64) : n.toUInt128.toUInt16 = n.toUInt16 := (rfl) -- @[simp] theorem UInt64.toUInt32_toUInt128 (n : UInt64) : n.toUInt128.toUInt32 = n.toUInt32 := (rfl) -- @[simp] theorem UInt64.toUInt64_toUInt128 (n : UInt64) : n.toUInt128.toUInt64 = n := -- UInt64.toNat.inj (by simp) -- @[simp] theorem UInt64.toUInt128_toUInt8 (n : UInt64) : n.toUInt8.toUInt128 = n.toUInt128 % 256 := (rfl) -- @[simp] theorem UInt64.toUInt128_toUInt16 (n : UInt64) : n.toUInt16.toUInt128 = n.toUInt128 % 65536 := (rfl) -- @[simp] theorem UInt64.toUInt128_toUInt32 (n : UInt64) : n.toUInt32.toUInt128 = n.toUInt128 % 4294967296 := (rfl) @[simp] theorem UInt128.toUInt8_toUInt16 (n : UInt128) : n.toUInt16.toUInt8 = n.toUInt8 := UInt8.toNat.inj (by simp) @[simp] theorem UInt128.toUInt8_toUInt32 (n : UInt128) : n.toUInt32.toUInt8 = n.toUInt8 := UInt8.toNat.inj (by simp) @[simp] theorem UInt128.toUInt8_toUInt64 (n : UInt128) : n.toUInt64.toUInt8 = n.toUInt8 := UInt8.toNat.inj (by simp) @[simp] theorem UInt128.toUInt16_toUInt8 (n : UInt128) : n.toUInt8.toUInt16 = n.toUInt16 % 256 := UInt16.toNat.inj (by simp) @[simp] theorem UInt128.toUInt16_toUInt32 (n : UInt128) : n.toUInt32.toUInt16 = n.toUInt16 := UInt16.toNat.inj (by simp) @[simp] theorem UInt128.toUInt16_toUInt64 (n : UInt128) : n.toUInt64.toUInt16 = n.toUInt16 := UInt16.toNat.inj (by simp) @[simp] theorem UInt128.toUInt32_toUInt8 (n : UInt128) : n.toUInt8.toUInt32 = n.toUInt32 % 256 := UInt32.toNat.inj (by simp) @[simp] theorem UInt128.toUInt32_toUInt16 (n : UInt128) : n.toUInt16.toUInt32 = n.toUInt32 % 65536 := UInt32.toNat.inj (by simp) @[simp] theorem UInt128.toUInt32_toUInt64 (n : UInt128) : n.toUInt64.toUInt32 = n.toUInt32 := UInt32.toNat.inj (by simp) @[simp] theorem UInt128.toUInt64_toUInt8 (n : UInt128) : n.toUInt8.toUInt64 = n.toUInt64 % 256 := UInt64.toNat.inj (by simp) @[simp] theorem UInt128.toUInt64_toUInt16 (n : UInt128) : n.toUInt16.toUInt64 = n.toUInt64 % 65536 := UInt64.toNat.inj (by simp) @[simp] theorem UInt128.toUInt64_toUInt32 (n : UInt128) : n.toUInt32.toUInt64 = n.toUInt64 % 4294967296 := UInt64.toNat.inj (by simp) -- @[simp] theorem UInt128.toUInt128_toUInt8 (n : UInt128) : n.toUInt8.toUInt128 = n % 256 := (rfl) -- @[simp] theorem UInt128.toUInt128_toUInt16 (n : UInt128) : n.toUInt16.toUInt128 = n % 65536 := (rfl) -- @[simp] theorem UInt128.toUInt128_toUInt32 (n : UInt128) : n.toUInt32.toUInt128 = n % 4294967296 := (rfl) -- @[simp] theorem UInt128.toUInt128_toUInt64 (n : UInt128) : n.toUInt64.toUInt128 = n % 18446744073709551616 := -- UInt128.toNat.inj (by simp) @[simp] theorem UInt128.toNat_ofFin (x : Fin UInt128.size) : (UInt128.ofFin x).toNat = x.val := (rfl) theorem UInt128.toNat_ofNatTruncate_of_lt {n : Nat} (hn : n < UInt128.size) : (UInt128.ofNatTruncate n).toNat = n := by rw [UInt128.ofNatTruncate, dif_pos hn, toNat_ofNatLT] theorem UInt128.toNat_ofNatTruncate_of_le {n : Nat} (hn : UInt128.size ≤ n) : (UInt128.ofNatTruncate n).toNat = UInt128.size - 1 := by rw [ofNatTruncate, dif_neg (by omega), toNat_ofNatLT] theorem UInt128.toFin_ofNatTruncate_of_lt {n : Nat} (hn : n < UInt128.size) : (UInt128.ofNatTruncate n).toFin = ⟨n, hn⟩ := Fin.val_inj.1 (by simp [toNat_ofNatTruncate_of_lt hn]) theorem UInt128.toFin_ofNatTruncate_of_le {n : Nat} (hn : UInt128.size ≤ n) : (UInt128.ofNatTruncate n).toFin = ⟨UInt128.size - 1, by decide⟩ := Fin.val_inj.1 (by simp [toNat_ofNatTruncate_of_le hn]) theorem UInt128.toBitVec_ofNatTruncate_of_lt {n : Nat} (hn : n < UInt128.size) : (UInt128.ofNatTruncate n).toBitVec = BitVec.ofNatLT n hn := BitVec.eq_of_toNat_eq (by simp [toNat_ofNatTruncate_of_lt hn]) theorem UInt128.toBitVec_ofNatTruncate_of_le {n : Nat} (hn : UInt128.size ≤ n) : (UInt128.ofNatTruncate n).toBitVec = BitVec.ofNatLT (UInt128.size - 1) (by decide) := BitVec.eq_of_toNat_eq (by simp [toNat_ofNatTruncate_of_le hn]) -- theorem UInt128.toUInt8_ofNatTruncate_of_lt {n : Nat} (hn : n < UInt128.size) : -- (UInt128.ofNatTruncate n).toUInt8 = UInt8.ofNat n := by rw [ofNatTruncate, dif_pos hn, toUInt8_ofNatLT] theorem UInt128.toUInt8_ofNatTruncate_of_le {n : Nat} (hn : UInt128.size ≤ n) : (UInt128.ofNatTruncate n).toUInt8 = UInt8.ofNatLT (UInt8.size - 1) (by decide) := UInt8.toNat.inj (by simp [toNat_ofNatTruncate_of_le hn]) -- theorem UInt128.toUInt16_ofNatTruncate_of_lt {n : Nat} (hn : n < UInt128.size) : -- (UInt128.ofNatTruncate n).toUInt16 = UInt16.ofNat n := by rw [ofNatTruncate, dif_pos hn, toUInt16_ofNatLT] theorem UInt128.toUInt16_ofNatTruncate_of_le {n : Nat} (hn : UInt128.size ≤ n) : (UInt128.ofNatTruncate n).toUInt16 = UInt16.ofNatLT (UInt16.size - 1) (by decide) := UInt16.toNat.inj (by simp [toNat_ofNatTruncate_of_le hn]) -- theorem UInt128.toUInt32_ofNatTruncate_of_lt {n : Nat} (hn : n < UInt128.size) : -- (UInt128.ofNatTruncate n).toUInt32 = UInt32.ofNat n := by rw [ofNatTruncate, dif_pos hn, toUInt32_ofNatLT] theorem UInt128.toUInt32_ofNatTruncate_of_le {n : Nat} (hn : UInt128.size ≤ n) : (UInt128.ofNatTruncate n).toUInt32 = UInt32.ofNatLT (UInt32.size - 1) (by decide) := UInt32.toNat.inj (by simp [toNat_ofNatTruncate_of_le hn]) -- theorem UInt128.toUSize_ofNatTruncate_of_lt {n : Nat} (hn : n < UInt128.size) : -- (UInt128.ofNatTruncate n).toUSize = USize.ofNat n := by rw [ofNatTruncate, dif_pos hn, toUSize_ofNatLT] theorem UInt128.toUSize_ofNatTruncate_of_le {n : Nat} (hn : UInt128.size ≤ n) : (UInt128.ofNatTruncate n).toUSize = USize.ofNatLT (USize.size - 1) (by cases USize.size_eq <;> simp_all) := USize.toNat.inj (by simp [toNat_ofNatTruncate_of_le hn]) -- theorem UInt16.toUInt128_ofNatLT {n : Nat} (h) : -- (UInt16.ofNatLT n h).toUInt128 = UInt128.ofNatLT n (Nat.lt_of_lt_of_le h (by decide)) := (rfl) -- theorem UInt16.toUInt128_ofFin {n} : -- (UInt16.ofFin n).toUInt128 = UInt128.ofNatLT n.val (Nat.lt_of_lt_of_le n.isLt (by decide)) := (rfl) -- @[simp] theorem UInt16.toUInt128_ofBitVec {b} : (UInt16.ofBitVec b).toUInt128 = UInt128.ofBitVec (b.setWidth _) := (rfl) -- theorem UInt16.toUInt128_ofNatTruncate_of_lt {n : Nat} (hn : n < UInt16.size) : -- (UInt16.ofNatTruncate n).toUInt128 = UInt128.ofNatLT n (Nat.lt_of_lt_of_le hn (by decide)) := -- UInt128.toNat.inj (by simp [toNat_ofNatTruncate_of_lt hn]) -- theorem UInt16.toUInt128_ofNatTruncate_of_le {n : Nat} (hn : UInt16.size ≤ n) : -- (UInt16.ofNatTruncate n).toUInt128 = UInt128.ofNatLT (UInt16.size - 1) (by decide) := -- UInt128.toNat.inj (by simp [toNat_ofNatTruncate_of_le hn]) -- theorem UInt32.toUInt128_ofNatLT {n : Nat} (h) : -- (UInt32.ofNatLT n h).toUInt128 = UInt128.ofNatLT n (Nat.lt_of_lt_of_le h (by decide)) := (rfl) -- theorem UInt32.toUInt128_ofFin {n} : -- (UInt32.ofFin n).toUInt128 = UInt128.ofNatLT n.val (Nat.lt_of_lt_of_le n.isLt (by decide)) := (rfl) -- @[simp] theorem UInt32.toUInt128_ofBitVec {b} : (UInt32.ofBitVec b).toUInt128 = UInt128.ofBitVec (b.setWidth _) := (rfl) -- theorem UInt32.toUInt128_ofNatTruncate_of_lt {n : Nat} (hn : n < UInt32.size) : -- (UInt32.ofNatTruncate n).toUInt128 = UInt128.ofNatLT n (Nat.lt_of_lt_of_le hn (by decide)) := -- UInt128.toNat.inj (by simp [toNat_ofNatTruncate_of_lt hn]) -- theorem UInt32.toUInt128_ofNatTruncate_of_le {n : Nat} (hn : UInt32.size ≤ n) : -- (UInt32.ofNatTruncate n).toUInt128 = UInt128.ofNatLT (UInt32.size - 1) (by decide) := -- UInt128.toNat.inj (by simp [toNat_ofNatTruncate_of_le hn]) -- theorem USize.toUInt128_ofNatLT {n : Nat} (h) : -- (USize.ofNatLT n h).toUInt128 = UInt128.ofNatLT n (Nat.lt_of_lt_of_le h size_le_uint128Size) := (rfl) -- theorem USize.toUInt128_ofFin {n} : -- (USize.ofFin n).toUInt128 = UInt128.ofNatLT n.val (Nat.lt_of_lt_of_le n.isLt size_le_uint128Size) := (rfl) -- @[simp] theorem USize.toUInt128_ofBitVec {b} : (USize.ofBitVec b).toUInt128 = UInt128.ofBitVec (b.setWidth _) := -- UInt128.toBitVec_inj.1 (by simp) -- theorem USize.toUInt128_ofNatTruncate_of_lt {n : Nat} (hn : n < USize.size) : -- (USize.ofNatTruncate n).toUInt128 = UInt128.ofNatLT n (Nat.lt_of_lt_of_le hn size_le_uint128Size) := -- UInt128.toNat.inj (by simp [toNat_ofNatTruncate_of_lt hn]) -- theorem USize.toUInt128_ofNatTruncate_of_le {n : Nat} (hn : USize.size ≤ n) : -- (USize.ofNatTruncate n).toUInt128 = UInt128.ofNatLT (USize.size - 1) (by cases USize.size_eq <;> simp_all +decide) := -- UInt128.toNat.inj (by simp [toNat_ofNatTruncate_of_le hn]) -- @[simp] theorem UInt8.toUInt128_ofNat' {n : Nat} (hn : n < UInt8.size) : (UInt8.ofNat n).toUInt128 = UInt128.ofNat n := by -- rw [← UInt8.ofNatLT_eq_ofNat (h := hn), toUInt128_ofNatLT, UInt128.ofNatLT_eq_ofNat] -- @[simp] theorem UInt16.toUInt128_ofNat' {n : Nat} (hn : n < UInt16.size) : (UInt16.ofNat n).toUInt128 = UInt128.ofNat n := by -- rw [← UInt16.ofNatLT_eq_ofNat (h := hn), toUInt128_ofNatLT, UInt128.ofNatLT_eq_ofNat] -- @[simp] theorem UInt32.toUInt128_ofNat' {n : Nat} (hn : n < UInt32.size) : (UInt32.ofNat n).toUInt128 = UInt128.ofNat n := by -- rw [← UInt32.ofNatLT_eq_ofNat (h := hn), toUInt128_ofNatLT, UInt128.ofNatLT_eq_ofNat] -- @[simp] theorem USize.toUInt128_ofNat' {n : Nat} (hn : n < USize.size) : (USize.ofNat n).toUInt128 = UInt128.ofNat n := by -- rw [← USize.ofNatLT_eq_ofNat (h := hn), toUInt128_ofNatLT, UInt128.ofNatLT_eq_ofNat] -- @[simp] theorem UInt8.toUInt128_ofNat {n : Nat} (hn : n < 256) : toUInt128 (no_index (OfNat.ofNat n)) = OfNat.ofNat n := -- UInt8.toUInt128_ofNat' hn -- @[simp] theorem UInt16.toUInt128_ofNat {n : Nat} (hn : n < 65536) : toUInt128 (no_index (OfNat.ofNat n)) = OfNat.ofNat n := -- UInt16.toUInt128_ofNat' hn -- @[simp] theorem UInt32.toUInt128_ofNat {n : Nat} (hn : n < 4294967296) : toUInt128 (no_index (OfNat.ofNat n)) = OfNat.ofNat n := -- UInt32.toUInt128_ofNat' hn -- @[simp] theorem USize.toUInt128_ofNat {n : Nat} (hn : n < 4294967296) : toUInt128 (no_index (OfNat.ofNat n)) = OfNat.ofNat n := -- USize.toUInt128_ofNat' (Nat.lt_of_lt_of_le hn UInt32.size_le_usizeSize) @[simp] theorem UInt128.ofNatLT_finVal (n : Fin UInt128.size) : UInt128.ofNatLT n.val n.isLt = UInt128.ofFin n := (rfl) @[simp] theorem UInt128.ofNatLT_bitVecToNat (n : BitVec 128) : UInt128.ofNatLT n.toNat n.isLt = UInt128.ofBitVec n := (rfl) @[simp] theorem UInt128.ofNat_finVal (n : Fin UInt128.size) : UInt128.ofNat n.val = UInt128.ofFin n := by rw [← ofNatLT_eq_ofNat (h := n.isLt), ofNatLT_finVal] @[simp] theorem UInt128.ofNat_bitVecToNat (n : BitVec 128) : UInt128.ofNat n.toNat = UInt128.ofBitVec n := by rw [← ofNatLT_eq_ofNat (h := n.isLt), ofNatLT_bitVecToNat] @[simp] theorem UInt128.ofNatTruncate_finVal (n : Fin UInt128.size) : UInt128.ofNatTruncate n.val = UInt128.ofFin n := by rw [ofNatTruncate_eq_ofNat _ n.isLt, UInt128.ofNat_finVal] @[simp] theorem UInt128.ofNatTruncate_bitVecToNat (n : BitVec 128) : UInt128.ofNatTruncate n.toNat = UInt128.ofBitVec n := by rw [ofNatTruncate_eq_ofNat _ n.isLt, ofNat_bitVecToNat] @[simp] theorem UInt128.ofFin_mk {n : Nat} (hn) : UInt128.ofFin (Fin.mk n hn) = UInt128.ofNatLT n hn := (rfl) @[simp] theorem UInt128.ofFin_bitVecToFin (n : BitVec 128) : UInt128.ofFin n.toFin = UInt128.ofBitVec n := (rfl) @[simp] theorem UInt128.ofBitVec_ofNatLT {n : Nat} (hn) : UInt128.ofBitVec (BitVec.ofNatLT n hn) = UInt128.ofNatLT n hn := (rfl) @[simp] theorem UInt128.ofBitVec_ofFin (n) : UInt128.ofBitVec (BitVec.ofFin n) = UInt128.ofFin n := (rfl) @[simp] theorem BitVec.ofNat_uInt128ToNat (n : UInt128) : BitVec.ofNat 128 n.toNat = n.toBitVec := BitVec.eq_of_toNat_eq (by simp) theorem UInt128.toUInt8_div (a b : UInt128) (ha : a < 256) (hb : b < 256) : (a / b).toUInt8 = a.toUInt8 / b.toUInt8 := UInt8.toNat.inj (by simpa using Nat.div_mod_eq_mod_div_mod ha hb) theorem UInt128.toUInt16_div (a b : UInt128) (ha : a < 65536) (hb : b < 65536) : (a / b).toUInt16 = a.toUInt16 / b.toUInt16 := UInt16.toNat.inj (by simpa using Nat.div_mod_eq_mod_div_mod ha hb) theorem UInt128.toUInt32_div (a b : UInt128) (ha : a < 4294967296) (hb : b < 4294967296) : (a / b).toUInt32 = a.toUInt32 / b.toUInt32 := UInt32.toNat.inj (by simpa using Nat.div_mod_eq_mod_div_mod ha hb) theorem UInt128.toUSize_div (a b : UInt128) (ha : a < 4294967296) (hb : b < 4294967296) : (a / b).toUSize = a.toUSize / b.toUSize := USize.toNat.inj (Nat.div_mod_eq_mod_div_mod (Nat.lt_of_lt_of_le ha UInt32.size_le_usizeSize) (Nat.lt_of_lt_of_le hb UInt32.size_le_usizeSize)) theorem UInt128.toUSize_div_of_toNat_lt (a b : UInt128) (ha : a.toNat < USize.size) (hb : b.toNat < USize.size) : (a / b).toUSize = a.toUSize / b.toUSize := USize.toNat.inj (by simpa using Nat.div_mod_eq_mod_div_mod ha hb) theorem UInt128.toUInt8_mod (a b : UInt128) (ha : a < 256) (hb : b < 256) : (a % b).toUInt8 = a.toUInt8 % b.toUInt8 := UInt8.toNat.inj (by simpa using Nat.mod_mod_eq_mod_mod_mod ha hb) theorem UInt128.toUInt16_mod (a b : UInt128) (ha : a < 65536) (hb : b < 65536) : (a % b).toUInt16 = a.toUInt16 % b.toUInt16 := UInt16.toNat.inj (by simpa using Nat.mod_mod_eq_mod_mod_mod ha hb) theorem UInt128.toUInt32_mod (a b : UInt128) (ha : a < 4294967296) (hb : b < 4294967296) : (a % b).toUInt32 = a.toUInt32 % b.toUInt32 := UInt32.toNat.inj (by simpa using Nat.mod_mod_eq_mod_mod_mod ha hb) theorem UInt128.toUSize_mod (a b : UInt128) (ha : a < 4294967296) (hb : b < 4294967296) : (a % b).toUSize = a.toUSize % b.toUSize := USize.toNat.inj (Nat.mod_mod_eq_mod_mod_mod (Nat.lt_of_lt_of_le ha UInt32.size_le_usizeSize) (Nat.lt_of_lt_of_le hb UInt32.size_le_usizeSize)) theorem UInt128.toUSize_mod_of_toNat_lt (a b : UInt128) (ha : a.toNat < USize.size) (hb : b.toNat < USize.size) : (a % b).toUSize = a.toUSize % b.toUSize := USize.toNat.inj (by simpa using Nat.mod_mod_eq_mod_mod_mod ha hb) theorem UInt128.toUInt8_mod_of_dvd (a b : UInt128) (hb : b.toNat ∣ 256) : (a % b).toUInt8 = a.toUInt8 % b.toUInt8 := UInt8.toNat.inj (by simpa using Nat.mod_mod_eq_mod_mod_mod_of_dvd hb) theorem UInt128.toUInt16_mod_of_dvd (a b : UInt128)(hb : b.toNat ∣ 65536) : (a % b).toUInt16 = a.toUInt16 % b.toUInt16 := UInt16.toNat.inj (by simpa using Nat.mod_mod_eq_mod_mod_mod_of_dvd hb) theorem UInt128.toUInt32_mod_of_dvd (a b : UInt128) (hb : b.toNat ∣ 4294967296) : (a % b).toUInt32 = a.toUInt32 % b.toUInt32 := UInt32.toNat.inj (by simpa using Nat.mod_mod_eq_mod_mod_mod_of_dvd hb) theorem UInt128.toUSize_mod_of_dvd (a b : UInt128) (hb : b.toNat ∣ 4294967296) : (a % b).toUSize = a.toUSize % b.toUSize := USize.toNat.inj (Nat.mod_mod_eq_mod_mod_mod_of_dvd (Nat.dvd_trans hb UInt32.size_dvd_usizeSize)) theorem UInt128.toUSize_mod_of_dvd_usizeSize (a b : UInt128) (hb : b.toNat ∣ USize.size) : (a % b).toUSize = a.toUSize % b.toUSize := USize.toNat.inj (by simpa using Nat.mod_mod_eq_mod_mod_mod_of_dvd hb) @[simp] protected theorem UInt128.toFin_add (a b : UInt128) : (a + b).toFin = a.toFin + b.toFin := (rfl) @[simp] theorem UInt128.toUInt8_add (a b : UInt128) : (a + b).toUInt8 = a.toUInt8 + b.toUInt8 := UInt8.toNat.inj (by simp) @[simp] theorem UInt128.toUInt16_add (a b : UInt128) : (a + b).toUInt16 = a.toUInt16 + b.toUInt16 := UInt16.toNat.inj (by simp) @[simp] theorem UInt128.toUInt32_add (a b : UInt128) : (a + b).toUInt32 = a.toUInt32 + b.toUInt32 := UInt32.toNat.inj (by simp) @[simp] theorem UInt128.toUSize_add (a b : UInt128) : (a + b).toUSize = a.toUSize + b.toUSize := USize.toNat.inj (by simp) -- @[simp] theorem UInt8.toUInt128_add (a b : UInt8) : (a + b).toUInt128 = (a.toUInt128 + b.toUInt128) % 256 := UInt128.toNat.inj (by simp) -- @[simp] theorem UInt16.toUInt128_add (a b : UInt16) : (a + b).toUInt128 = (a.toUInt128 + b.toUInt128) % 65536 := UInt128.toNat.inj (by simp) -- @[simp] theorem UInt32.toUInt128_add (a b : UInt32) : (a + b).toUInt128 = (a.toUInt128 + b.toUInt128) % 4294967296 := UInt128.toNat.inj (by simp) @[simp] protected theorem UInt128.toFin_sub (a b : UInt128) : (a - b).toFin = a.toFin - b.toFin := (rfl) @[simp] protected theorem UInt128.toFin_mul (a b : UInt128) : (a * b).toFin = a.toFin * b.toFin := (rfl) @[simp] theorem UInt128.toUInt8_mul (a b : UInt128) : (a * b).toUInt8 = a.toUInt8 * b.toUInt8 := UInt8.toNat.inj (by simp) @[simp] theorem UInt128.toUInt16_mul (a b : UInt128) : (a * b).toUInt16 = a.toUInt16 * b.toUInt16 := UInt16.toNat.inj (by simp) @[simp] theorem UInt128.toUInt32_mul (a b : UInt128) : (a * b).toUInt32 = a.toUInt32 * b.toUInt32 := UInt32.toNat.inj (by simp) @[simp] theorem UInt128.toUSize_mul (a b : UInt128) : (a * b).toUSize = a.toUSize * b.toUSize := USize.toNat.inj (by simp) -- @[simp] theorem UInt8.toUInt128_mul (a b : UInt8) : (a * b).toUInt128 = (a.toUInt128 * b.toUInt128) % 256 := UInt128.toNat.inj (by simp) -- @[simp] theorem UInt16.toUInt128_mul (a b : UInt16) : (a * b).toUInt128 = (a.toUInt128 * b.toUInt128) % 65536 := UInt128.toNat.inj (by simp) -- @[simp] theorem UInt32.toUInt128_mul (a b : UInt32) : (a * b).toUInt128 = (a.toUInt128 * b.toUInt128) % 4294967296 := UInt128.toNat.inj (by simp) -- theorem UInt128.toUInt8_eq (a b : UInt128) : a.toUInt8 = b.toUInt8 ↔ a % 256 = b % 256 := by -- simp [← UInt8.toNat_inj, ← UInt128.toNat_inj] -- theorem UInt128.toUInt16_eq (a b : UInt128) : a.toUInt16 = b.toUInt16 ↔ a % 65536 = b % 65536 := by -- simp [← UInt16.toNat_inj, ← UInt128.toNat_inj] -- theorem UInt128.toUInt32_eq (a b : UInt128) : a.toUInt32 = b.toUInt32 ↔ a % 4294967296 = b % 4294967296 := by -- simp [← UInt32.toNat_inj, ← UInt128.toNat_inj] -- theorem UInt8.toUInt128_eq_mod_256_iff (a : UInt8) (b : UInt128) : a.toUInt128 = b % 256 ↔ a = b.toUInt8 := by -- simp [← UInt8.toNat_inj, ← UInt128.toNat_inj] -- theorem UInt16.toUInt128_eq_mod_65536_iff (a : UInt16) (b : UInt128) : a.toUInt128 = b % 65536 ↔ a = b.toUInt16 := by -- simp [← UInt16.toNat_inj, ← UInt128.toNat_inj] -- theorem UInt32.toUInt128_eq_mod_4294967296_iff (a : UInt32) (b : UInt128) : a.toUInt128 = b % 4294967296 ↔ a = b.toUInt32 := by -- simp [← UInt32.toNat_inj, ← UInt128.toNat_inj] -- theorem UInt64.toUInt128_eq_mod_4294967296_iff (a : UInt64) (b : UInt128) : a.toUInt128 = b % 4294967296 ↔ a = b.toUInt32 := by -- simp [← UInt64.toNat_inj, ← UInt128.toNat_inj] -- theorem UInt8.toUInt128_inj {a b : UInt8} : a.toUInt128 = b.toUInt128 ↔ a = b := -- ⟨fun h => by rw [← toUInt8_toUInt128 a, h, toUInt8_toUInt128], by rintro rfl; rfl⟩ -- theorem UInt16.toUInt128_inj {a b : UInt16} : a.toUInt128 = b.toUInt128 ↔ a = b := -- ⟨fun h => by rw [← toUInt16_toUInt128 a, h, toUInt16_toUInt128], by rintro rfl; rfl⟩ -- theorem UInt32.toUInt128_inj {a b : UInt32} : a.toUInt128 = b.toUInt128 ↔ a = b := -- ⟨fun h => by rw [← toUInt32_toUInt128 a, h, toUInt32_toUInt128], by rintro rfl; rfl⟩ -- theorem UInt64.toUInt128_inj {a b : UInt64} : a.toUInt128 = b.toUInt128 ↔ a = b := -- ⟨fun h => by rw [← toUInt64_toUInt128 a, h, toUInt64_toUInt128], by rintro rfl; rfl⟩ theorem UInt128.lt_iff_toFin_lt {a b : UInt128} : a < b ↔ a.toFin < b.toFin := Iff.rfl theorem UInt128.le_iff_toFin_le {a b : UInt128} : a ≤ b ↔ a.toFin ≤ b.toFin := Iff.rfl -- @[simp] theorem UInt8.toUInt128_lt {a b : UInt8} : a.toUInt128 < b.toUInt128 ↔ a < b := by -- simp [lt_iff_toNat_lt, UInt128.lt_iff_toNat_lt] -- @[simp] theorem UInt16.toUInt128_lt {a b : UInt16} : a.toUInt128 < b.toUInt128 ↔ a < b := by -- simp [lt_iff_toNat_lt, UInt128.lt_iff_toNat_lt] -- @[simp] theorem UInt32.toUInt128_lt {a b : UInt32} : a.toUInt128 < b.toUInt128 ↔ a < b := by -- simp [lt_iff_toNat_lt, UInt128.lt_iff_toNat_lt] -- @[simp] theorem UInt64.toUInt128_lt {a b : UInt64} : a.toUInt128 < b.toUInt128 ↔ a < b := by -- simp [lt_iff_toNat_lt, UInt128.lt_iff_toNat_lt] -- @[simp] theorem UInt8.toUInt128_le {a b : UInt8} : a.toUInt128 ≤ b.toUInt128 ↔ a ≤ b := by -- simp [le_iff_toNat_le, UInt128.le_iff_toNat_le] -- @[simp] theorem UInt16.toUInt128_le {a b : UInt16} : a.toUInt128 ≤ b.toUInt128 ↔ a ≤ b := by -- simp [le_iff_toNat_le, UInt128.le_iff_toNat_le] -- @[simp] theorem UInt32.toUInt128_le {a b : UInt32} : a.toUInt128 ≤ b.toUInt128 ↔ a ≤ b := by -- simp [le_iff_toNat_le, UInt128.le_iff_toNat_le] @[simp] theorem UInt128.toUInt8_le {a b : UInt128} : a.toUInt8 ≤ b.toUInt8 ↔ a % 256 ≤ b % 256 := by simp [le_iff_toNat_le, UInt8.le_iff_toNat_le] @[simp] theorem UInt128.toUInt16_le {a b : UInt128} : a.toUInt16 ≤ b.toUInt16 ↔ a % 65536 ≤ b % 65536 := by simp [le_iff_toNat_le, UInt16.le_iff_toNat_le] @[simp] theorem UInt128.toUInt32_le {a b : UInt128} : a.toUInt32 ≤ b.toUInt32 ↔ a % 4294967296 ≤ b % 4294967296 := by simp [le_iff_toNat_le, UInt32.le_iff_toNat_le] @[simp] theorem UInt128.toUInt8_neg (a : UInt128) : (-a).toUInt8 = -a.toUInt8 := UInt8.toBitVec_inj.1 (by simp) @[simp] theorem UInt128.toUInt16_neg (a : UInt128) : (-a).toUInt16 = -a.toUInt16 := UInt16.toBitVec_inj.1 (by simp) @[simp] theorem UInt128.toUInt32_neg (a : UInt128) : (-a).toUInt32 = -a.toUInt32 := UInt32.toBitVec_inj.1 (by simp) -- @[simp] theorem UInt8.toUInt128_neg (a : UInt8) : (-a).toUInt128 = -a.toUInt128 % 256 := by -- simp [UInt8.toUInt128_eq_mod_256_iff] -- @[simp] theorem UInt16.toUInt128_neg (a : UInt16) : (-a).toUInt128 = -a.toUInt128 % 65536 := by -- simp [UInt16.toUInt128_eq_mod_65536_iff] -- @[simp] theorem UInt32.toUInt128_neg (a : UInt32) : (-a).toUInt128 = -a.toUInt128 % 4294967296 := by -- simp [UInt32.toUInt128_eq_mod_4294967296_iff] @[simp] theorem UInt128.toNat_neg (a : UInt128) : (-a).toNat = (UInt128.size - a.toNat) % UInt128.size := (rfl) protected theorem UInt128.sub_eq_add_neg (a b : UInt128) : a - b = a + (-b) := UInt128.toBitVec_inj.1 (BitVec.sub_eq_add_neg _ _) protected theorem UInt128.add_neg_eq_sub {a b : UInt128} : a + -b = a - b := UInt128.toBitVec_inj.1 BitVec.add_neg_eq_sub theorem UInt128.neg_one_eq : (-1 : UInt128) = 340282366920938463463374607431768211455 := (rfl) theorem UInt128.toBitVec_zero : toBitVec 0 = 0#128 := (rfl) theorem UInt128.toBitVec_one : toBitVec 1 = 1#128 := (rfl) theorem UInt128.neg_eq_neg_one_mul (a : UInt128) : -a = -1 * a := by apply UInt128.toBitVec_inj.1 rw [UInt128.toBitVec_neg, UInt128.toBitVec_mul, UInt128.toBitVec_neg, UInt128.toBitVec_one, BitVec.neg_eq_neg_one_mul] theorem UInt128.sub_eq_add_mul (a b : UInt128) : a - b = a + 340282366920938463463374607431768211455 * b := by rw [UInt128.sub_eq_add_neg, neg_eq_neg_one_mul, neg_one_eq] theorem UInt128.ofNat_eq_iff_mod_eq_toNat (a : Nat) (b : UInt128) : UInt128.ofNat a = b ↔ a % 2 ^ 128 = b.toNat := by simp [← UInt128.toNat_inj] -- theorem UInt128.ofNat_sub {a b : Nat} (hab : b ≤ a) : UInt128.ofNat (a - b) = UInt128.ofNat a - UInt128.ofNat b := by -- rw [(Nat.sub_add_cancel hab ▸ UInt128.ofNat_add (a - b) b :), UInt128.add_sub_cancel] -- theorem UInt128.ofNatLT_sub {a b : Nat} (ha : a < 2 ^ 128) (hab : b ≤ a) : -- UInt128.ofNatLT (a - b) (Nat.sub_lt_of_lt ha) = UInt128.ofNatLT a ha - UInt128.ofNatLT b (Nat.lt_of_le_of_lt hab ha) := by -- simp [UInt128.ofNatLT_eq_ofNat, UInt128.ofNat_sub hab] -- @[simp] theorem UInt8.toUInt128_sub (a b : UInt8) : (a - b).toUInt128 = (a.toUInt128 - b.toUInt128) % 256 := by -- simp [UInt8.toUInt128_eq_mod_256_iff] -- @[simp] theorem UInt16.toUInt128_sub (a b : UInt16) : (a - b).toUInt128 = (a.toUInt128 - b.toUInt128) % 65536 := by -- simp [UInt16.toUInt128_eq_mod_65536_iff] -- @[simp] theorem UInt32.toUInt128_sub (a b : UInt32) : (a - b).toUInt128 = (a.toUInt128 - b.toUInt128) % 4294967296 := by -- simp [UInt32.toUInt64_eq_mod_4294967296_iff] -- @[simp] theorem UInt64.toUInt128_sub (a b : UInt64) : (a - b).toUInt128 = (a.toUInt128 - b.toUInt128) % 4294967296 := by -- simp [UInt64.toUInt64_eq_mod_4294967296_iff] @[simp] theorem UInt128.ofBitVec_neg (b : BitVec 128) : UInt128.ofBitVec (-b) = -UInt128.ofBitVec b := (rfl) @[simp] theorem UInt128.ofFin_div (a b : Fin UInt128.size) : UInt128.ofFin (a / b) = UInt128.ofFin a / UInt128.ofFin b := (rfl) @[simp] theorem UInt128.ofBitVec_div (a b : BitVec 128) : UInt128.ofBitVec (a / b) = UInt128.ofBitVec a / UInt128.ofBitVec b := (rfl) @[simp] theorem UInt128.ofFin_mod (a b : Fin UInt128.size) : UInt128.ofFin (a % b) = UInt128.ofFin a % UInt128.ofFin b := (rfl) @[simp] theorem UInt128.ofBitVec_mod (a b : BitVec 128) : UInt128.ofBitVec (a % b) = UInt128.ofBitVec a % UInt128.ofBitVec b := (rfl) -- theorem UInt128.ofNat_eq_iff_mod_eq_toNat (a : Nat) (b : UInt128) : UInt128.ofNat a = b ↔ a % 2 ^ 128 = b.toNat := by -- simp [← UInt128.toNat_inj] @[simp] theorem UInt128.ofNat_div {a b : Nat} (ha : a < 2 ^ 128) (hb : b < 2 ^ 128) : UInt128.ofNat (a / b) = UInt128.ofNat a / UInt128.ofNat b := by simp [UInt128.ofNat_eq_iff_mod_eq_toNat, Nat.div_mod_eq_mod_div_mod ha hb] @[simp] theorem UInt128.ofNatLT_div {a b : Nat} (ha : a < 2 ^ 128) (hb : b < 2 ^ 128) : UInt128.ofNatLT (a / b) (Nat.div_lt_of_lt ha) = UInt128.ofNatLT a ha / UInt128.ofNatLT b hb := by simp [UInt128.ofNatLT_eq_ofNat, UInt128.ofNat_div ha hb] @[simp] theorem UInt128.ofNat_mod {a b : Nat} (ha : a < 2 ^ 128) (hb : b < 2 ^ 128) : UInt128.ofNat (a % b) = UInt128.ofNat a % UInt128.ofNat b := by simp [UInt128.ofNat_eq_iff_mod_eq_toNat, Nat.mod_mod_eq_mod_mod_mod ha hb] @[simp] theorem UInt128.ofNatLT_mod {a b : Nat} (ha : a < 2 ^ 128) (hb : b < 2 ^ 128) : UInt128.ofNatLT (a % b) (Nat.mod_lt_of_lt ha) = UInt128.ofNatLT a ha % UInt128.ofNatLT b hb := by simp [UInt128.ofNatLT_eq_ofNat, UInt128.ofNat_mod ha hb] @[simp] theorem UInt128.ofInt_one : ofInt 1 = 1 := (rfl) @[simp] theorem UInt128.ofInt_neg_one : ofInt (-1) = -1 := (rfl) @[simp] theorem UInt128.ofNat_add (a b : Nat) : UInt128.ofNat (a + b) = UInt128.ofNat a + UInt128.ofNat b := by simp [UInt128.ofNat_eq_iff_mod_eq_toNat] @[simp] theorem UInt128.ofInt_add (x y : Int) : UInt128.ofInt (x + y) = UInt128.ofInt x + UInt128.ofInt y := by dsimp only [UInt128.ofInt] rw [Int.add_emod] have h₁ : 0 ≤ x % 2 ^ 128 := Int.emod_nonneg _ (by decide) have h₂ : 0 ≤ y % 2 ^ 128 := Int.emod_nonneg _ (by decide) have h₃ : 0 ≤ x % 2 ^ 128 + y % 2 ^ 128 := Int.add_nonneg h₁ h₂ rw [Int.toNat_emod h₃ (by decide), Int.toNat_add h₁ h₂] have : (2 ^ 128 : Int).toNat = 2 ^ 128 := (rfl) rw [this, UInt128.ofNat_mod_size, UInt128.ofNat_add] @[simp] theorem UInt128.ofNatLT_add {a b : Nat} (hab : a + b < 2 ^ 128) : UInt128.ofNatLT (a + b) hab = UInt128.ofNatLT a (Nat.lt_of_add_right_lt hab) + UInt128.ofNatLT b (Nat.lt_of_add_left_lt hab) := by simp [UInt128.ofNatLT_eq_ofNat] @[simp] theorem UInt128.ofFin_add (a b : Fin UInt128.size) : UInt128.ofFin (a + b) = UInt128.ofFin a + UInt128.ofFin b := (rfl) @[simp] theorem UInt128.ofBitVec_add (a b : BitVec 128) : UInt128.ofBitVec (a + b) = UInt128.ofBitVec a + UInt128.ofBitVec b := (rfl) @[simp] theorem UInt128.ofFin_sub (a b : Fin UInt128.size) : UInt128.ofFin (a - b) = UInt128.ofFin a - UInt128.ofFin b := (rfl) @[simp] theorem UInt128.ofBitVec_sub (a b : BitVec 128) : UInt128.ofBitVec (a - b) = UInt128.ofBitVec a - UInt128.ofBitVec b := (rfl) @[simp] protected theorem UInt128.add_sub_cancel (a b : UInt128) : a + b - b = a := UInt128.toBitVec_inj.1 (BitVec.add_sub_cancel _ _) theorem UInt128.ofNat_sub {a b : Nat} (hab : b ≤ a) : UInt128.ofNat (a - b) = UInt128.ofNat a - UInt128.ofNat b := by rw [(Nat.sub_add_cancel hab ▸ UInt128.ofNat_add (a - b) b :), UInt128.add_sub_cancel] theorem UInt128.ofNatLT_sub {a b : Nat} (ha : a < 2 ^ 128) (hab : b ≤ a) : UInt128.ofNatLT (a - b) (Nat.sub_lt_of_lt ha) = UInt128.ofNatLT a ha - UInt128.ofNatLT b (Nat.lt_of_le_of_lt hab ha) := by simp [UInt128.ofNatLT_eq_ofNat, UInt128.ofNat_sub hab] @[simp] theorem UInt128.ofNat_mul (a b : Nat) : UInt128.ofNat (a * b) = UInt128.ofNat a * UInt128.ofNat b := by simp [UInt128.ofNat_eq_iff_mod_eq_toNat] @[simp] theorem UInt128.ofInt_mul (x y : Int) : ofInt (x * y) = ofInt x * ofInt y := by dsimp only [UInt128.ofInt] rw [Int.mul_emod] have h₁ : 0 ≤ x % 2 ^ 128 := Int.emod_nonneg _ (by decide) have h₂ : 0 ≤ y % 2 ^ 128 := Int.emod_nonneg _ (by decide) have h₃ : 0 ≤ (x % 2 ^ 128) * (y % 2 ^ 128) := Int.mul_nonneg h₁ h₂ rw [Int.toNat_emod h₃ (by decide), Int.toNat_mul h₁ h₂] have : (2 ^ 128 : Int).toNat = 2 ^ 128 := (rfl) rw [this, UInt128.ofNat_mod_size, UInt128.ofNat_mul] @[simp] theorem UInt128.ofNatLT_mul {a b : Nat} (ha : a < 2 ^ 128) (hb : b < 2 ^ 128) (hab : a * b < 2 ^ 128) : UInt128.ofNatLT (a * b) hab = UInt128.ofNatLT a ha * UInt128.ofNatLT b hb := by simp [UInt128.ofNatLT_eq_ofNat] @[simp] theorem UInt128.ofFin_mul (a b : Fin UInt128.size) : UInt128.ofFin (a * b) = UInt128.ofFin a * UInt128.ofFin b := (rfl) @[simp] theorem UInt128.ofBitVec_mul (a b : BitVec 128) : UInt128.ofBitVec (a * b) = UInt128.ofBitVec a * UInt128.ofBitVec b := (rfl) theorem UInt128.ofFin_lt_iff_lt {a b : Fin UInt128.size} : UInt128.ofFin a < UInt128.ofFin b ↔ a < b := Iff.rfl theorem UInt128.ofFin_le_iff_le {a b : Fin UInt128.size} : UInt128.ofFin a ≤ UInt128.ofFin b ↔ a ≤ b := Iff.rfl theorem UInt128.ofBitVec_lt_iff_lt {a b : BitVec 128} : UInt128.ofBitVec a < UInt128.ofBitVec b ↔ a < b := Iff.rfl theorem UInt128.ofBitVec_le_iff_le {a b : BitVec 128} : UInt128.ofBitVec a ≤ UInt128.ofBitVec b ↔ a ≤ b := Iff.rfl theorem UInt128.ofNatLT_lt_iff_lt {a b : Nat} (ha : a < UInt128.size) (hb : b < UInt128.size) : UInt128.ofNatLT a ha < UInt128.ofNatLT b hb ↔ a < b := Iff.rfl theorem UInt128.ofNatLT_le_iff_le {a b : Nat} (ha : a < UInt128.size) (hb : b < UInt128.size) : UInt128.ofNatLT a ha ≤ UInt128.ofNatLT b hb ↔ a ≤ b := Iff.rfl theorem UInt128.ofNat_lt_iff_lt {a b : Nat} (ha : a < UInt128.size) (hb : b < UInt128.size) : UInt128.ofNat a < UInt128.ofNat b ↔ a < b := by rw [← ofNatLT_eq_ofNat (h := ha), ← ofNatLT_eq_ofNat (h := hb), ofNatLT_lt_iff_lt] theorem UInt128.ofNat_le_iff_le {a b : Nat} (ha : a < UInt128.size) (hb : b < UInt128.size) : UInt128.ofNat a ≤ UInt128.ofNat b ↔ a ≤ b := by rw [← ofNatLT_eq_ofNat (h := ha), ← ofNatLT_eq_ofNat (h := hb), ofNatLT_le_iff_le] theorem UInt128.toNat_one : (1 : UInt128).toNat = 1 := (rfl) -- theorem UInt128.zero_lt_one : (0 : UInt128) < 1 := by simp -- theorem UInt128.zero_ne_one : (0 : UInt128) ≠ 1 := by simp protected theorem UInt128.add_assoc (a b c : UInt128) : a + b + c = a + (b + c) := UInt128.toBitVec_inj.1 (BitVec.add_assoc _ _ _) instance : Std.Associative (α := UInt128) (· + ·) := ⟨UInt128.add_assoc⟩ protected theorem UInt128.add_comm (a b : UInt128) : a + b = b + a := UInt128.toBitVec_inj.1 (BitVec.add_comm _ _) instance : Std.Commutative (α := UInt128) (· + ·) := ⟨UInt128.add_comm⟩ @[simp] protected theorem UInt128.add_zero (a : UInt128) : a + 0 = a := UInt128.toBitVec_inj.1 (BitVec.add_zero _) @[simp] protected theorem UInt128.zero_add (a : UInt128) : 0 + a = a := UInt128.toBitVec_inj.1 (BitVec.zero_add _) instance : Std.LawfulIdentity (α := UInt128) (· + ·) 0 where left_id := UInt128.zero_add right_id := UInt128.add_zero @[simp] protected theorem UInt128.sub_zero (a : UInt128) : a - 0 = a := UInt128.toBitVec_inj.1 (BitVec.sub_zero _) @[simp] protected theorem UInt128.zero_sub (a : UInt128) : 0 - a = -a := UInt128.toBitVec_inj.1 (BitVec.zero_sub _) @[simp] protected theorem UInt128.sub_self (a : UInt128) : a - a = 0 := UInt128.toBitVec_inj.1 (BitVec.sub_self _) protected theorem UInt128.add_left_neg (a : UInt128) : -a + a = 0 := UInt128.toBitVec_inj.1 (BitVec.add_left_neg _) protected theorem UInt128.add_right_neg (a : UInt128) : a + -a = 0 := UInt128.toBitVec_inj.1 (BitVec.add_right_neg _) protected theorem UInt128.eq_sub_iff_add_eq {a b c : UInt128} : a = c - b ↔ a + b = c := by simpa [← UInt128.toBitVec_inj] using BitVec.eq_sub_iff_add_eq protected theorem UInt128.sub_eq_iff_eq_add {a b c : UInt128} : a - b = c ↔ a = c + b := by simpa [← UInt128.toBitVec_inj] using BitVec.sub_eq_iff_eq_add @[simp] protected theorem UInt128.neg_neg {a : UInt128} : - -a = a := UInt128.toBitVec_inj.1 BitVec.neg_neg @[simp] protected theorem UInt128.neg_inj {a b : UInt128} : -a = -b ↔ a = b := by simp [← UInt128.toBitVec_inj] @[simp] protected theorem UInt128.neg_ne_zero {a : UInt128} : -a ≠ 0 ↔ a ≠ 0 := by simp [← UInt128.toBitVec_inj] protected theorem UInt128.neg_add {a b : UInt128} : - (a + b) = -a - b := UInt128.toBitVec_inj.1 BitVec.neg_add @[simp] protected theorem UInt128.sub_neg {a b : UInt128} : a - -b = a + b := UInt128.toBitVec_inj.1 BitVec.sub_neg @[simp] protected theorem UInt128.neg_sub {a b : UInt128} : -(a - b) = b - a := by rw [UInt128.sub_eq_add_neg, UInt128.neg_add, UInt128.sub_neg, UInt128.add_comm, ← UInt128.sub_eq_add_neg] @[simp] protected theorem UInt128.ofInt_neg (x : Int) : ofInt (-x) = -ofInt x := by rw [Int.neg_eq_neg_one_mul, ofInt_mul, ofInt_neg_one, ← UInt128.neg_eq_neg_one_mul] @[simp] protected theorem UInt128.add_left_inj {a b : UInt128} (c : UInt128) : (a + c = b + c) ↔ a = b := by simp [← UInt128.toBitVec_inj] @[simp] protected theorem UInt128.add_right_inj {a b : UInt128} (c : UInt128) : (c + a = c + b) ↔ a = b := by simp [← UInt128.toBitVec_inj] @[simp] protected theorem UInt128.sub_left_inj {a b : UInt128} (c : UInt128) : (a - c = b - c) ↔ a = b := by simp [← UInt128.toBitVec_inj] @[simp] protected theorem UInt128.sub_right_inj {a b : UInt128} (c : UInt128) : (c - a = c - b) ↔ a = b := by simp [← UInt128.toBitVec_inj] @[simp] theorem UInt128.add_eq_right {a b : UInt128} : a + b = b ↔ a = 0 := by simp [← UInt128.toBitVec_inj] @[simp] theorem UInt128.add_eq_left {a b : UInt128} : a + b = a ↔ b = 0 := by simp [← UInt128.toBitVec_inj] @[simp] theorem UInt128.right_eq_add {a b : UInt128} : b = a + b ↔ a = 0 := by simp [← UInt128.toBitVec_inj] @[simp] theorem UInt128.left_eq_add {a b : UInt128} : a = a + b ↔ b = 0 := by simp [← UInt128.toBitVec_inj] protected theorem UInt128.mul_comm (a b : UInt128) : a * b = b * a := UInt128.toBitVec_inj.1 (BitVec.mul_comm _ _) instance : Std.Commutative (α := UInt128) (· * ·) := ⟨UInt128.mul_comm⟩ protected theorem UInt128.mul_assoc (a b c : UInt128) : a * b * c = a * (b * c) := UInt128.toBitVec_inj.1 (BitVec.mul_assoc _ _ _) instance : Std.Associative (α := UInt128) (· * ·) := ⟨UInt128.mul_assoc⟩ @[simp] theorem UInt128.mul_one (a : UInt128) : a * 1 = a := UInt128.toBitVec_inj.1 (BitVec.mul_one _) @[simp] theorem UInt128.one_mul (a : UInt128) : 1 * a = a := UInt128.toBitVec_inj.1 (BitVec.one_mul _) instance : Std.LawfulCommIdentity (α := UInt128) (· * ·) 1 where right_id := UInt128.mul_one @[simp] theorem UInt128.mul_zero {a : UInt128} : a * 0 = 0 := UInt128.toBitVec_inj.1 BitVec.mul_zero @[simp] theorem UInt128.zero_mul {a : UInt128} : 0 * a = 0 := UInt128.toBitVec_inj.1 BitVec.zero_mul @[simp] protected theorem UInt128.pow_zero (x : UInt128) : x ^ 0 = 1 := (rfl) protected theorem UInt128.pow_succ (x : UInt128) (n : Nat) : x ^ (n + 1) = x ^ n * x := (rfl) protected theorem UInt128.mul_add {a b c : UInt128} : a * (b + c) = a * b + a * c := UInt128.toBitVec_inj.1 BitVec.mul_add protected theorem UInt128.add_mul {a b c : UInt128} : (a + b) * c = a * c + b * c := by rw [UInt128.mul_comm, UInt128.mul_add, UInt128.mul_comm a c, UInt128.mul_comm c b] -- protected theorem UInt128.mul_succ {a b : UInt128} : a * (b + 1) = a * b + a := by simp [UInt128.mul_add] -- protected theorem UInt128.succ_mul {a b : UInt128} : (a + 1) * b = a * b + b := by simp [UInt128.add_mul] protected theorem UInt128.two_mul {a : UInt128} : 2 * a = a + a := UInt128.toBitVec_inj.1 BitVec.two_mul protected theorem UInt128.mul_two {a : UInt128} : a * 2 = a + a := UInt128.toBitVec_inj.1 BitVec.mul_two protected theorem UInt128.neg_mul (a b : UInt128) : -a * b = -(a * b) := UInt128.toBitVec_inj.1 (BitVec.neg_mul _ _) protected theorem UInt128.mul_neg (a b : UInt128) : a * -b = -(a * b) := UInt128.toBitVec_inj.1 (BitVec.mul_neg _ _) protected theorem UInt128.neg_mul_neg (a b : UInt128) : -a * -b = a * b := UInt128.toBitVec_inj.1 (BitVec.neg_mul_neg _ _) protected theorem UInt128.neg_mul_comm (a b : UInt128) : -a * b = a * -b := UInt128.toBitVec_inj.1 (BitVec.neg_mul_comm _ _) protected theorem UInt128.mul_sub {a b c : UInt128} : a * (b - c) = a * b - a * c := UInt128.toBitVec_inj.1 BitVec.mul_sub protected theorem UInt128.sub_mul {a b c : UInt128} : (a - b) * c = a * c - b * c := by rw [UInt128.mul_comm, UInt128.mul_sub, UInt128.mul_comm, UInt128.mul_comm c] theorem UInt128.neg_add_mul_eq_mul_not {a b : UInt128} : -(a + a * b) = a * ~~~b := UInt128.toBitVec_inj.1 BitVec.neg_add_mul_eq_mul_not theorem UInt128.neg_mul_not_eq_add_mul {a b : UInt128} : -(a * ~~~b) = a + a * b := UInt128.toBitVec_inj.1 BitVec.neg_mul_not_eq_add_mul protected theorem UInt128.le_of_lt {a b : UInt128} : a < b → a ≤ b := by simpa [lt_iff_toNat_lt, le_iff_toNat_le] using Nat.le_of_lt protected theorem UInt128.lt_of_le_of_ne {a b : UInt128} : a ≤ b → a ≠ b → a < b := by simpa [lt_iff_toNat_lt, le_iff_toNat_le, ← UInt128.toNat_inj] using Nat.lt_of_le_of_ne protected theorem UInt128.lt_iff_le_and_ne {a b : UInt128} : a < b ↔ a ≤ b ∧ a ≠ b := by simpa [lt_iff_toNat_lt, le_iff_toNat_le, ← UInt128.toNat_inj] using Nat.lt_iff_le_and_ne protected theorem UInt128.div_self {a : UInt128} : a / a = if a = 0 then 0 else 1 := by simp [← UInt128.toBitVec_inj, apply_ite] -- protected theorem UInt128.pos_iff_ne_zero {a : UInt128} : 0 < a ↔ a ≠ 0 := by simp [UInt128.lt_iff_le_and_ne, Eq.comm] protected theorem UInt128.lt_of_le_of_lt {a b c : UInt128} : a ≤ b → b < c → a < c := by simpa [le_iff_toNat_le, lt_iff_toNat_lt] using Nat.lt_of_le_of_lt protected theorem UInt128.lt_of_lt_of_le {a b c : UInt128} : a < b → b ≤ c → a < c := by simpa [le_iff_toNat_le, lt_iff_toNat_lt] using Nat.lt_of_lt_of_le protected theorem UInt128.lt_or_lt_of_ne {a b : UInt128} : a ≠ b → a < b ∨ b < a := by simpa [lt_iff_toNat_lt, ← UInt128.toNat_inj] using Nat.lt_or_lt_of_ne protected theorem UInt128.lt_or_le (a b : UInt128) : a < b ∨ b ≤ a := by simp [lt_iff_toNat_lt, le_iff_toNat_le]; omega protected theorem UInt128.le_or_lt (a b : UInt128) : a ≤ b ∨ b < a := (b.lt_or_le a).symm protected theorem UInt128.le_of_eq {a b : UInt128} : a = b → a ≤ b := (· ▸ UInt128.le_rfl) protected theorem UInt128.le_iff_lt_or_eq {a b : UInt128} : a ≤ b ↔ a < b ∨ a = b := by simpa [← UInt128.toNat_inj, le_iff_toNat_le, lt_iff_toNat_lt] using Nat.le_iff_lt_or_eq protected theorem UInt128.lt_or_eq_of_le {a b : UInt128} : a ≤ b → a < b ∨ a = b := UInt128.le_iff_lt_or_eq.mp protected theorem UInt128.sub_le {a b : UInt128} (hab : b ≤ a) : a - b ≤ a := by simp [le_iff_toNat_le, UInt128.toNat_sub_of_le _ _ hab] protected theorem UInt128.sub_lt {a b : UInt128} (hb : 0 < b) (hab : b ≤ a) : a - b < a := by rw [lt_iff_toNat_lt, UInt128.toNat_sub_of_le _ _ hab] refine Nat.sub_lt ?_ (UInt128.lt_iff_toNat_lt.1 hb) exact UInt128.lt_iff_toNat_lt.1 (UInt128.lt_of_lt_of_le hb hab) theorem UInt128.lt_add_one {c : UInt128} (h : c ≠ -1) : c < c + 1 := UInt128.lt_iff_toBitVec_lt.2 (BitVec.lt_add_one (by simpa [← UInt128.toBitVec_inj] using h)) ================================================ FILE: hax-lib/proof-libs/lean/Hax/MissingLean/Init/Data/Vector/Basic.lean ================================================ attribute [grind =] Vector.size_toArray ================================================ FILE: hax-lib/proof-libs/lean/Hax/MissingLean/Init/GrindInstances/Ring/SInt.lean ================================================ import Hax.MissingLean.Init.GrindInstances.ToInt open Lean Grind @[expose, instance_reducible] def Int128.natCast : NatCast Int128 where natCast x := Int128.ofNat x @[expose, instance_reducible] def Int128.intCast : IntCast Int128 where intCast x := Int128.ofInt x attribute [local instance] Int128.intCast in theorem Int128.intCast_neg (i : Int) : ((-i : Int) : Int128) = -(i : Int128) := Int128.ofInt_neg _ attribute [local instance] Int128.intCast in theorem Int128.intCast_ofNat (x : Nat) : (OfNat.ofNat (α := Int) x : Int128) = OfNat.ofNat x := Int128.ofInt_eq_ofNat attribute [local instance] Int128.natCast Int128.intCast in instance : CommRing Int128 where nsmul := ⟨(· * ·)⟩ zsmul := ⟨(· * ·)⟩ add_assoc := Int128.add_assoc add_comm := Int128.add_comm add_zero := Int128.add_zero neg_add_cancel := Int128.add_left_neg mul_assoc := Int128.mul_assoc mul_comm := Int128.mul_comm mul_one := Int128.mul_one one_mul := Int128.one_mul left_distrib _ _ _ := Int128.mul_add right_distrib _ _ _ := Int128.add_mul zero_mul _ := Int128.zero_mul mul_zero _ := Int128.mul_zero sub_eq_add_neg := Int128.sub_eq_add_neg pow_zero := Int128.pow_zero pow_succ := Int128.pow_succ ofNat_succ x := Int128.ofNat_add x 1 intCast_neg := Int128.ofInt_neg neg_zsmul i x := by change (-i : Int) * x = - (i * x) simp [Int128.intCast_neg, Int128.neg_mul] zsmul_natCast_eq_nsmul n a := congrArg (· * a) (Int128.intCast_ofNat _) instance : IsCharP Int128 (2 ^ 128) := IsCharP.mk' _ _ (ofNat_eq_zero_iff := fun x => by have : OfNat.ofNat x = Int128.ofInt x := rfl rw [this] simp [Int128.ofInt_eq_iff_bmod_eq_toInt, ← Int.dvd_iff_bmod_eq_zero, ← Nat.dvd_iff_mod_eq_zero, Int.ofNat_dvd_right]) -- Verify we can derive the instances showing how `toInt` interacts with operations: example : ToInt.Add Int128 (.sint 128) := inferInstance example : ToInt.Neg Int128 (.sint 128) := inferInstance example : ToInt.Sub Int128 (.sint 128) := inferInstance instance : ToInt.Pow Int128 (.sint 128) := ToInt.pow_of_semiring (by simp) ================================================ FILE: hax-lib/proof-libs/lean/Hax/MissingLean/Init/GrindInstances/Ring/UInt.lean ================================================ import Hax.MissingLean.Init.GrindInstances.ToInt open Lean Grind set_option autoImplicit true namespace UInt128 /-- Variant of `UInt128.ofNat_mod_size` replacing `2 ^ 128` with `340282366920938463463374607431768211456`.-/ theorem ofNat_mod_size' : ofNat (x % 340282366920938463463374607431768211456) = ofNat x := ofNat_mod_size @[expose, instance_reducible] def natCast : NatCast UInt128 where natCast x := UInt128.ofNat x @[expose, instance_reducible] def intCast : IntCast UInt128 where intCast x := UInt128.ofInt x attribute [local instance] natCast intCast theorem intCast_neg (x : Int) : ((-x : Int) : UInt128) = - (x : UInt128) := by simp only [Int.cast, IntCast.intCast, UInt128.ofInt_neg] theorem intCast_ofNat (x : Nat) : (OfNat.ofNat (α := Int) x : UInt128) = OfNat.ofNat x := by -- A better proof would be welcome! simp only [Int.cast, IntCast.intCast] rw [UInt128.ofInt] rw [Int.toNat_emod (Int.zero_le_ofNat x) (by decide)] erw [Int.toNat_natCast] rw [Int.toNat_pow_of_nonneg (by decide)] simp +instances only [ofNat, BitVec.ofNat, Fin.Internal.ofNat_eq_ofNat, Fin.ofNat, Int.reduceToNat, Nat.dvd_refl, Nat.mod_mod_of_dvd, instOfNat] try rfl end UInt128 attribute [local instance] UInt128.natCast UInt128.intCast in instance : CommRing UInt128 where nsmul := ⟨(· * ·)⟩ zsmul := ⟨(· * ·)⟩ add_assoc := UInt128.add_assoc add_comm := UInt128.add_comm add_zero := UInt128.add_zero neg_add_cancel := UInt128.add_left_neg mul_assoc := UInt128.mul_assoc mul_comm := UInt128.mul_comm mul_one := UInt128.mul_one one_mul := UInt128.one_mul left_distrib _ _ _ := UInt128.mul_add right_distrib _ _ _ := UInt128.add_mul zero_mul _ := UInt128.zero_mul mul_zero _ := UInt128.mul_zero sub_eq_add_neg := UInt128.sub_eq_add_neg pow_zero := UInt128.pow_zero pow_succ := UInt128.pow_succ ofNat_succ x := UInt128.ofNat_add x 1 intCast_neg := UInt128.ofInt_neg intCast_ofNat := UInt128.intCast_ofNat neg_zsmul i a := by change (-i : Int) * a = - (i * a) simp [UInt128.intCast_neg, UInt128.neg_mul] zsmul_natCast_eq_nsmul n a := congrArg (· * a) (UInt128.intCast_ofNat _) instance : IsCharP UInt128 340282366920938463463374607431768211456 := IsCharP.mk' _ _ (ofNat_eq_zero_iff := fun x => by have : OfNat.ofNat x = UInt128.ofNat x := rfl simp [this, UInt128.ofNat_eq_iff_mod_eq_toNat]) -- Verify we can derive the instances showing how `toInt` interacts with operations: example : ToInt.Add UInt128 (.uint 128) := inferInstance example : ToInt.Neg UInt128 (.uint 128) := inferInstance example : ToInt.Sub UInt128 (.uint 128) := inferInstance instance : ToInt.Pow UInt128 (.uint 128) := ToInt.pow_of_semiring (by simp) ================================================ FILE: hax-lib/proof-libs/lean/Hax/MissingLean/Init/GrindInstances/ToInt.lean ================================================ import Hax.MissingLean.Init.Data.SInt.Lemmas_Int128 import Hax.MissingLean.Init.Data.UInt.Lemmas_UInt128 open Lean.Grind instance : ToInt UInt128 (.uint 128) where toInt x := (x.toNat : Int) toInt_inj x y w := private UInt128.toNat_inj.mp (Int.ofNat_inj.mp w) toInt_mem x := by simpa using Int.lt_toNat.mp (UInt128.toNat_lt x) @[simp] theorem toInt_uint128 (x : UInt128) : ToInt.toInt x = (x.toNat : Int) := rfl instance : ToInt.Zero UInt128 (.uint 128) where toInt_zero := by simp instance : ToInt.OfNat UInt128 (.uint 128) where toInt_ofNat x := by simp; rfl instance : ToInt.Add UInt128 (.uint 128) where toInt_add x y := by simp instance : ToInt.Mul UInt128 (.uint 128) where toInt_mul x y := by simp -- The `ToInt.Pow` instance is defined in `Init.GrindInstances.Ring.UInt`, -- as it is convenient to use the ring structure. instance : ToInt.Mod UInt128 (.uint 128) where toInt_mod x y := by simp instance : ToInt.Div UInt128 (.uint 128) where toInt_div x y := by simp instance : ToInt.LE UInt128 (.uint 128) where le_iff x y := by simpa using UInt128.le_iff_toBitVec_le instance : ToInt.LT UInt128 (.uint 128) where lt_iff x y := by simpa using UInt128.lt_iff_toBitVec_lt instance : ToInt Int128 (.sint 128) where toInt x := x.toInt toInt_inj x y w := private Int128.toInt_inj.mp w toInt_mem x := by simp; exact ⟨Int128.le_toInt x, Int128.toInt_lt x⟩ @[simp] theorem toInt_int128 (x : Int128) : ToInt.toInt x = (x.toInt : Int) := rfl instance : ToInt.Zero Int128 (.sint 128) where toInt_zero := by -- simp -- FIXME: succeeds, but generates a `(kernel) application type mismatch` error! change (0 : Int128).toInt = _ rw [Int128.toInt_zero] instance : ToInt.OfNat Int128 (.sint 128) where toInt_ofNat x := by rw [toInt_int128, Int128.toInt_ofNat, Int128.size, Int.bmod_eq_emod, IntInterval.wrap] simp split <;> omega instance : ToInt.Add Int128 (.sint 128) where toInt_add x y := by simp [Int.bmod_eq_emod] split <;> · simp; omega instance : ToInt.Mul Int128 (.sint 128) where toInt_mul x y := by simp [Int.bmod_eq_emod] split <;> · simp; omega -- The `ToInt.Pow` instance is defined in `Init.GrindInstances.Ring.SInt`, -- as it is convenient to use the ring structure. instance : ToInt.LE Int128 (.sint 128) where le_iff x y := by simpa using Int128.le_iff_toInt_le instance : ToInt.LT Int128 (.sint 128) where lt_iff x y := by simpa using Int128.lt_iff_toInt_lt ================================================ FILE: hax-lib/proof-libs/lean/Hax/MissingLean/Init/Prelude.lean ================================================ -- Adapted from Init/Prelude.lean from the Lean v4.29.0-rc1 source code abbrev UInt128.size : Nat := 340282366920938463463374607431768211456 structure UInt128 where ofBitVec :: toBitVec : BitVec 128 def UInt128.ofNatLT (n : @& Nat) (h : LT.lt n UInt128.size) : UInt128 where toBitVec := BitVec.ofNatLT n h def UInt128.decEq (a b : UInt128) : Decidable (Eq a b) := match a, b with | ⟨n⟩, ⟨m⟩ => dite (Eq n m) (fun h => isTrue (h ▸ rfl)) (fun h => isFalse (fun h' => UInt128.noConfusion h' (fun h' => absurd h' h))) instance : DecidableEq UInt128 := UInt128.decEq instance : Inhabited UInt128 where default := UInt128.ofNatLT 0 (of_decide_eq_true rfl) ================================================ FILE: hax-lib/proof-libs/lean/Hax/MissingLean/Init/While.lean ================================================ import Std.Do namespace Lean open Order open Std.Do universe u v /-- Runs one iteration of a loop and continues with `l`. -/ def Loop.loopCombinator {β : Type u} {m : Type u → Type v} [Monad m] (f : Unit → β → m (ForInStep β)) (l : β → m β) (b : β) := do match ← f () b with | ForInStep.done b => pure b | ForInStep.yield b => l b /-- A monad function must implement this type class to be able to use loops based on `partial_fixpoint`. -/ class Loop.MonoLoopCombinator {β : Type u} {m : Type u → Type v} [Monad m] [∀ α, CCPO (m α)] (f : Unit → β → m (ForInStep β)) where mono : monotone (loopCombinator f) := by unfold Lean.Loop.loopCombinator <;> monotonicity /-- Our own copy of `Loop.forIn` because the original one is `partial` and thus we cannot reason about it. -/ @[inline] def Loop.MonoLoopCombinator.forIn {β : Type u} {m : Type u → Type v} [Monad m] [∀ α, CCPO (m α)] (_ : Loop) (init : β) (f : Unit → β → m (ForInStep β)) [MonoLoopCombinator f] : m β := let rec @[specialize] loop [MonoLoopCombinator f] (b : β) : m β := loopCombinator f loop b partial_fixpoint monotonicity MonoLoopCombinator.mono loop init /-- A while loop based on `Loop.MonoLoopCombinator.forIn`. -/ def Loop.MonoLoopCombinator.while_loop {m} {ps : PostShape} {β: Type} [Monad m] [∀ α, Order.CCPO (m α)] [WPMonad m ps] (loop : Loop) (cond: β → Bool) (init : β) (body : β -> m β) [∀ f : Unit → β → m (ForInStep β), Loop.MonoLoopCombinator f] : m β := Loop.MonoLoopCombinator.forIn loop init fun () s => do if cond s then let s ← body s pure (.yield s) else pure (.done s) end Lean ================================================ FILE: hax-lib/proof-libs/lean/Hax/MissingLean/Lean/Tactic/Simp/BuiltinSimpProcs/SInt.lean ================================================ import Lean import Hax.MissingLean.Lean.ToExpr namespace Int128 open Lean Meta Simp def fromExpr (e : Expr) : SimpM (Option Int128) := do if let some (n, _) ← getOfNatValue? e ``Int128 then return some (ofNat n) let_expr Neg.neg _ _ a ← e | return none let some (n, _) ← getOfNatValue? a ``Int128 | return none return some (ofInt (- n)) @[inline] def reduceBin (declName : Name) (arity : Nat) (op : Int128 → Int128 → Int128) (e : Expr) : SimpM DStep := do unless e.isAppOfArity declName arity do return .continue let some n ← (fromExpr e.appFn!.appArg!) | return .continue let some m ← (fromExpr e.appArg!) | return .continue return .done <| toExpr (op n m) @[inline] def reduceBinPred (declName : Name) (arity : Nat) (op : Int128 → Int128 → Bool) (e : Expr) : SimpM Step := do unless e.isAppOfArity declName arity do return .continue let some n ← (fromExpr e.appFn!.appArg!) | return .continue let some m ← (fromExpr e.appArg!) | return .continue evalPropStep e (op n m) @[inline] def reduceBoolPred (declName : Name) (arity : Nat) (op : Int128 → Int128 → Bool) (e : Expr) : SimpM DStep := do unless e.isAppOfArity declName arity do return .continue let some n ← (fromExpr e.appFn!.appArg!) | return .continue let some m ← (fromExpr e.appArg!) | return .continue return .done <| toExpr (op n m) open Lean Meta Simp in dsimproc [simp, seval] reduceNeg ((- _ : Int128)) := fun e => do let_expr Neg.neg _ _ arg ← e | return .continue if arg.isAppOfArity ``OfNat.ofNat 3 then -- We return .done to ensure `Neg.neg` is not unfolded even when `ground := true`. return .done e else let some v ← (fromExpr arg) | return .continue return .done <| toExpr (- v) dsimproc [simp, seval] reduceAdd ((_ + _ : Int128)) := reduceBin ``HAdd.hAdd 6 (· + ·) dsimproc [simp, seval] reduceMul ((_ * _ : Int128)) := reduceBin ``HMul.hMul 6 (· * ·) dsimproc [simp, seval] reduceSub ((_ - _ : Int128)) := reduceBin ``HSub.hSub 6 (· - ·) dsimproc [simp, seval] reduceDiv ((_ / _ : Int128)) := reduceBin ``HDiv.hDiv 6 (· / ·) dsimproc [simp, seval] reduceMod ((_ % _ : Int128)) := reduceBin ``HMod.hMod 6 (· % ·) simproc [simp, seval] reduceLT (( _ : Int128) < _) := reduceBinPred ``LT.lt 4 (. < .) simproc [simp, seval] reduceLE (( _ : Int128) ≤ _) := reduceBinPred ``LE.le 4 (. ≤ .) simproc [simp, seval] reduceGT (( _ : Int128) > _) := reduceBinPred ``GT.gt 4 (. > .) simproc [simp, seval] reduceGE (( _ : Int128) ≥ _) := reduceBinPred ``GE.ge 4 (. ≥ .) simproc [simp, seval] reduceEq (( _ : Int128) = _) := reduceBinPred ``Eq 3 (. = .) simproc [simp, seval] reduceNe (( _ : Int128) ≠ _) := reduceBinPred ``Ne 3 (. ≠ .) dsimproc [simp, seval] reduceBEq (( _ : Int128) == _) := reduceBoolPred ``BEq.beq 4 (. == .) dsimproc [simp, seval] reduceBNe (( _ : Int128) != _) := reduceBoolPred ``bne 4 (. != .) dsimproc [simp, seval] reduceOfIntLE (ofIntLE _ _ _) := fun e => do unless e.isAppOfArity ``ofIntLE 3 do return .continue let some value ← Int.fromExpr? e.appFn!.appFn!.appArg! | return .continue let value := ofInt value return .done <| toExpr value dsimproc [simp, seval] reduceOfNat (ofNat _) := fun e => do unless e.isAppOfArity ``ofNat 1 do return .continue let some value ← Nat.fromExpr? e.appArg! | return .continue let value := ofNat value return .done <| toExpr value dsimproc [simp, seval] reduceOfInt (ofInt _) := fun e => do unless e.isAppOfArity ``ofInt 1 do return .continue let some value ← Int.fromExpr? e.appArg! | return .continue let value := ofInt value return .done <| toExpr value dsimproc [simp, seval] reduceToInt (toInt _) := fun e => do unless e.isAppOfArity ``toInt 1 do return .continue let some v ← (fromExpr e.appArg!) | return .continue let n := toInt v return .done <| toExpr n dsimproc [simp, seval] reduceToNatClampNeg (toNatClampNeg _) := fun e => do unless e.isAppOfArity ``toNatClampNeg 1 do return .continue let some v ← (fromExpr e.appArg!) | return .continue let n := toNatClampNeg v return .done <| toExpr n /-- Return `.done` for Int values. We don't want to unfold in the symbolic evaluator. -/ dsimproc [seval] isValue ((OfNat.ofNat _ : Int128)) := fun e => do unless (e.isAppOfArity ``OfNat.ofNat 3) do return .continue return .done e end Int128 ================================================ FILE: hax-lib/proof-libs/lean/Hax/MissingLean/Lean/Tactic/Simp/BuiltinSimpProcs/UInt.lean ================================================ import Lean import Hax.MissingLean.Lean.ToExpr namespace UInt128 open Lean Meta Simp def fromExpr (e : Expr) : SimpM (Option UInt128) := do let some (n, _) ← getOfNatValue? e ``UInt128 | return none return ofNat n @[inline] def reduceBin (declName : Name) (arity : Nat) (op : UInt128 → UInt128 → UInt128) (e : Expr) : SimpM DStep := do unless e.isAppOfArity declName arity do return .continue let some n ← (fromExpr e.appFn!.appArg!) | return .continue let some m ← (fromExpr e.appArg!) | return .continue return .done <| toExpr (op n m) @[inline] def reduceBinPred (declName : Name) (arity : Nat) (op : UInt128 → UInt128 → Bool) (e : Expr) : SimpM Step := do unless e.isAppOfArity declName arity do return .continue let some n ← (fromExpr e.appFn!.appArg!) | return .continue let some m ← (fromExpr e.appArg!) | return .continue evalPropStep e (op n m) @[inline] def reduceBoolPred (declName : Name) (arity : Nat) (op : UInt128 → UInt128 → Bool) (e : Expr) : SimpM DStep := do unless e.isAppOfArity declName arity do return .continue let some n ← (fromExpr e.appFn!.appArg!) | return .continue let some m ← (fromExpr e.appArg!) | return .continue return .done <| toExpr (op n m) dsimproc [simp, seval] reduceAdd ((_ + _ : UInt128)) := reduceBin ``HAdd.hAdd 6 (· + ·) dsimproc [simp, seval] reduceMul ((_ * _ : UInt128)) := reduceBin ``HMul.hMul 6 (· * ·) dsimproc [simp, seval] reduceSub ((_ - _ : UInt128)) := reduceBin ``HSub.hSub 6 (· - ·) dsimproc [simp, seval] reduceDiv ((_ / _ : UInt128)) := reduceBin ``HDiv.hDiv 6 (· / ·) dsimproc [simp, seval] reduceMod ((_ % _ : UInt128)) := reduceBin ``HMod.hMod 6 (· % ·) simproc [simp, seval] reduceLT (( _ : UInt128) < _) := reduceBinPred ``LT.lt 4 (. < .) simproc [simp, seval] reduceLE (( _ : UInt128) ≤ _) := reduceBinPred ``LE.le 4 (. ≤ .) simproc [simp, seval] reduceGT (( _ : UInt128) > _) := reduceBinPred ``GT.gt 4 (. > .) simproc [simp, seval] reduceGE (( _ : UInt128) ≥ _) := reduceBinPred ``GE.ge 4 (. ≥ .) simproc [simp, seval] reduceEq (( _ : UInt128) = _) := reduceBinPred ``Eq 3 (. = .) simproc [simp, seval] reduceNe (( _ : UInt128) ≠ _) := reduceBinPred ``Ne 3 (. ≠ .) dsimproc [simp, seval] reduceBEq (( _ : UInt128) == _) := reduceBoolPred ``BEq.beq 4 (. == .) dsimproc [simp, seval] reduceBNe (( _ : UInt128) != _) := reduceBoolPred ``bne 4 (. != .) dsimproc [simp, seval] reduceOfNatLT (ofNatLT _ _) := fun e => do unless e.isAppOfArity ``ofNatLT 2 do return .continue let some value ← Nat.fromExpr? e.appFn!.appArg! | return .continue let value := ofNat value return .done <| toExpr value dsimproc [simp, seval] reduceOfNat (ofNat _) := fun e => do unless e.isAppOfArity ``ofNat 1 do return .continue let some value ← Nat.fromExpr? e.appArg! | return .continue let value := ofNat value return .done <| toExpr value dsimproc [simp, seval] reduceToNat (toNat _) := fun e => do unless e.isAppOfArity ``toNat 1 do return .continue let some v ← (fromExpr e.appArg!) | return .continue let n := toNat v return .done <| toExpr n /-- Return `.done` for UInt values. We don't want to unfold in the symbolic evaluator. -/ dsimproc [seval] isValue ((OfNat.ofNat _ : UInt128)) := fun e => do unless (e.isAppOfArity ``OfNat.ofNat 3) do return .continue return .done e end UInt128 ================================================ FILE: hax-lib/proof-libs/lean/Hax/MissingLean/Lean/ToExpr.lean ================================================ import Lean import Hax.MissingLean.Init.Data.UInt.Basic import Hax.MissingLean.Init.Data.SInt.Basic_Int128 open Lean in instance : ToExpr UInt128 where toTypeExpr := mkConst ``UInt128 toExpr a := let r := mkRawNatLit a.toNat mkApp3 (.const ``OfNat.ofNat [0]) (mkConst ``UInt128) r (.app (.const ``UInt128.instOfNat []) r) open Lean in instance : ToExpr Int128 where toTypeExpr := mkConst ``Int128 toExpr i := if 0 ≤ i then mkNat i.toNatClampNeg else mkApp3 (.const ``Neg.neg [0]) (.const ``Int128 []) (.const ``Int128.instNeg []) (mkNat (-(i.toInt)).toNat) where mkNat (n : Nat) : Expr := let r := mkRawNatLit n mkApp3 (.const ``OfNat.ofNat [0]) (.const ``Int128 []) r (.app (.const ``Int128.instOfNat []) r) ================================================ FILE: hax-lib/proof-libs/lean/Hax/MissingLean/Std/Do/PostCond.lean ================================================ import Std.Do.PostCond namespace Std.Do universe u variable {ps : PostShape.{u}} {α σ ε : Type u} theorem PostCond.entails.of_left_entails {p q : α → Assertion ps} {x : ExceptConds ps} (h : ∀ a, p a ⊢ₛ q a) : (p, x) ⊢ₚ (q, x) := by simp [h] end Std.Do ================================================ FILE: hax-lib/proof-libs/lean/Hax/MissingLean/Std/Do/Triple/Basic.lean ================================================ import Std.Do.Triple.Basic namespace Std.Do theorem Triple.of_entails_left {m} {ps : PostShape} {β: Type} [Monad m] [WPMonad m ps] (P Q : Assertion ps) (R : PostCond β ps) (x : m β) (hPR : ⦃P⦄ x ⦃R⦄) (hPQ : Q ⊢ₛ P) : ⦃Q⦄ x ⦃R⦄ := SPred.entails.trans hPQ hPR theorem Triple.of_entails_right {m} {ps : PostShape} {β: Type} [Monad m] [WPMonad m ps] (P : Assertion ps) (Q R : PostCond β ps) (x : m β) (hPR : ⦃P⦄ x ⦃Q⦄) (hPQ : Q ⊢ₚ R) : ⦃P⦄ x ⦃R⦄ := SPred.entails.trans hPR (PredTrans.mono _ _ _ hPQ) theorem Triple.map {m} {ps : PostShape} {α β} [Monad m] [WPMonad m ps] (f : α → β) (x : m α) (P : Assertion ps) (Q : PostCond β ps) : ⦃P⦄ (f <$> x) ⦃Q⦄ ↔ ⦃P⦄ x ⦃(fun a => Q.fst (f a), Q.snd)⦄ := by rw [Triple, WP.map]; rfl end Std.Do ================================================ FILE: hax-lib/proof-libs/lean/Hax/MissingLean/Std/Do/Triple/SpecLemmas.lean ================================================ import Std.Do.Triple.Basic import Hax.MissingLean.Std.Do.Triple.Basic import Hax.MissingLean.Init.While import Hax.MissingLean.Std.Do.PostCond namespace Std.Do open Lean @[spec] theorem Spec.forIn_monoLoopCombinator {m} {ps : PostShape} {β: Type} [Monad m] [∀ α, Order.CCPO (m α)] [WPMonad m ps] (loop : Loop) (init : β) (f : Unit → β → m (ForInStep β)) [Loop.MonoLoopCombinator f] (inv : β → Prop) (termination : β -> Nat) (post : β → Prop) (step : ∀ b, ⦃⌜ inv b ⌝⦄ f () b ⦃⇓ r => match r with | .yield b' => spred(⌜ termination b' < termination b ⌝ ∧ ⌜ inv b' ⌝) | .done b' => ⌜ post b' ⌝⦄) : ⦃⌜ inv init ⌝⦄ Loop.MonoLoopCombinator.forIn loop init f ⦃⇓ b => ⌜ post b ⌝⦄ := by unfold Loop.MonoLoopCombinator.forIn Loop.MonoLoopCombinator.forIn.loop Loop.loopCombinator apply Triple.bind · apply step · rintro (b | b) · refine Triple.pure b ?_ exact SPred.entails.refl _ · apply SPred.imp_elim apply SPred.pure_elim' intro h rw [SPred.entails_true_intro] apply Spec.forIn_monoLoopCombinator loop _ f inv termination post step termination_by termination init decreasing_by exact h @[spec] theorem Spec.MonoLoopCombinator.while_loop {m} {ps : PostShape} {β: Type} [Monad m] [∀ α, Order.CCPO (m α)] [WPMonad m ps] [∀ f : Unit → β → m (ForInStep β), Loop.MonoLoopCombinator f] (init : β) (loop : Loop) (cond: β → Bool) (body : β → m β) (inv: β → Prop) (termination : β → Nat) (step : ∀ (b : β), cond b → ⦃⌜ inv b ⌝⦄ body b ⦃⇓ b' => spred(⌜ termination b' < termination b ⌝ ∧ ⌜ inv b' ⌝)⦄ ) : ⦃⌜ inv init ⌝⦄ Loop.MonoLoopCombinator.while_loop loop cond init body ⦃⇓ b => ⌜ inv b ∧ ¬ cond b ⌝⦄ := by apply Spec.forIn_monoLoopCombinator intro b by_cases hb : cond b · simpa [hb, Triple.map] using step b hb · simp [hb, Triple.pure] ================================================ FILE: hax-lib/proof-libs/lean/Hax/MissingLean.lean ================================================ import Hax.MissingLean.Init.Data.Array.Lemmas import Hax.MissingLean.Init.Data.BitVec.Basic import Hax.MissingLean.Init.Data.Nat.Div.Basic import Hax.MissingLean.Init.Data.UInt.Basic import Hax.MissingLean.Init.Data.UInt.Lemmas import Hax.MissingLean.Init.Data.SInt.Basic import Hax.MissingLean.Init.Data.SInt.Lemmas import Hax.MissingLean.Init.Data.Vector.Basic import Hax.MissingLean.Init.Data.Nat.MinMax import Hax.MissingLean.Init.Data.Int.DivMod.Lemmas ================================================ FILE: hax-lib/proof-libs/lean/Hax/Tactic/HaxBVDecide.lean ================================================ import Std.Tactic.BVDecide macro "hax_bv_decide" c:Lean.Parser.Tactic.optConfig : tactic => `(tactic| ( simp only [hax_bv_decide] at *; bv_decide $c )) ================================================ FILE: hax-lib/proof-libs/lean/Hax/Tactic/HaxConstructPure.lean ================================================ import Hax.Tactic.HaxZify import Hax.Tactic.HaxMvcgen import Qq open Lean Elab Tactic Meta Qq Std.Do /-- This tactic is supposed to be run on results of `mvcgen` where the postcondition is of the form `⇓ r => r = ?mvar`. This tactic will analyse the goals produced by `mvcgen` and instantiate the metavariable accordingly. For example, `mvcgen` might produce a goal of the form ``` x r : Int32 h : r.toInt = x.toInt + x.toInt ⊢ ((r.toInt == 0) = true) = ?mvar ``` Then this tactic should instantiate `?mvar` with `((x.toInt + x.toInt == 0) = true)` -/ def haxConstructPure (mvarId : MVarId) : TacticM Unit := do -- Find goals that contain `mvar` let allGoals ← getGoals let goals ← allGoals.filterM fun goal => do pure ((← goal.getType).findMVar? (· == mvarId)).isSome if (goals.length > 1) then throwError m!"hax_construct_pure: `mvcgen generated more than one goal containing the \ metavariable. This is currently unsupported. Try to remove if-then-else and match-constructs." let [goal] := goals | throwError m!"hax_construct_pure: No goal contains the metavariable." goal.withContext do -- Zify: let zifyVars ← collectZifyVars let goal ← haxZify goal (fun decl => zifyVars.contains decl.fvarId) trace `Hax.hax_construct_pure fun () => m!"Goal after `zify`: {goal}" -- Subst: let goal ← substVars goal trace `Hax.hax_construct_pure fun () => m!"Goal after `subst`: {goal}" -- Assign the meta-variable by reflexivity withAssignableSyntheticOpaque goal.applyRfl pruneSolvedGoals where /-- Collect all machine integer variables that should be converted into integers. We want to collect all variables `x` with a hypothesis of the form `x.toInt = ...` here. Then, `hax_zify` will convert this into a hypothesis of the form `y = ...` for a new integer variable `y`, which we can ultimately eliminate using `subst_vars`. -/ collectZifyVars : MetaM (Std.HashSet FVarId) := do let lctx ← getLCtx let mut zifyVars := Std.HashSet.emptyWithCapacity lctx.size for decl in lctx do if !decl.type.isEq then continue let lhs := decl.type.getArg! 1 if !haxZifyTypes.any (fun (_, toInt, _) => lhs.isAppOfArity toInt 1) then continue let some fvarId := (lhs.getArg! 0).fvarId? | continue zifyVars := zifyVars.insert fvarId return zifyVars /-- The `hax_construct_pure` tactic should be applied to goals of the form ``` { p // ⦃⌜ ... ⌝⦄ ... ⦃⇓ r => ⌜r = p⌝⦄ } ``` Under the hood, it will use `hax_mvcgen` to generate verification conditions for the given Hoare triple and then generate a suitable value for `p`. The default call to `hax_mvcgen` can be replaced via the syntax `hax_construct_pure => custom_tactics`. -/ syntax (name := hax_construct_pure) "hax_construct_pure" (" => " tacticSeq)? : tactic @[tactic hax_construct_pure] def elabHaxConstructPure : Tactic := fun stx => do let tac ← match stx with | `(tactic| hax_construct_pure => $tac:tacticSeq) => pure tac | `(tactic| hax_construct_pure) => `(tacticSeq| hax_mvcgen -trivial <;> intros) | _ => throwUnsupportedSyntax let goal ← getMainGoal let goalType ← goal.getType unless goalType.isAppOf ``Subtype do throwError m!"hax_construct_pure: Goal must be of the form `\{ p // ... }` (Subtype), \ but got:\n{goalType}" let u ← mkFreshLevelMVar let type : Q(Type) ← mkFreshExprMVar (mkSort u) MetavarKind.natural Name.anonymous let mvarP : Q($type → Prop) ← mkFreshExprMVar q($type → Prop) let mvarVal : Q($type) ← mkFreshExprSyntheticOpaqueMVar type replaceMainGoal (← goal.apply q(@Subtype.mk $type $mvarP $mvarVal)) evalTactic (← `(tactic| intros)) evalTactic tac let goals ← getGoals trace `Hax.hax_construct_pure fun () => m!"Goals after `mvcgen`: {goals}" haxConstructPure mvarVal.mvarId! ================================================ FILE: hax-lib/proof-libs/lean/Hax/Tactic/HaxMvcgen.lean ================================================ import Hax.Tactic.SpecSet import Hax.Tactic.Init namespace Hax.HaxMvcgen open Lean Elab Syntax Parser Tactic def mkMvcgenCall (args: Array Name) (cfgStx : Syntax) (argStx : Syntax) : CoreM Syntax := do let cfgStx : TSyntax `Lean.Parser.Tactic.optConfig := .mk cfgStx let mut elems := argStx[1].getArgs.getSepElems for arg in args do elems := elems.push (Syntax.node .none ``Lean.Parser.Tactic.simpLemma #[mkNullNode, mkNullNode, mkIdent arg]) let argStx : TSepArray _ _ := Syntax.TSepArray.ofElems (elems.map .mk) let tac := ← `(tactic| mvcgen $cfgStx [$argStx,*]) pure tac syntax (name := hax_mvcgen) "hax_mvcgen" optConfig (" [" withoutPosition((simpStar <|> simpErase <|> simpLemma),*,?) "] ")? : tactic /-- A customized version of the `mvcgen` tactic. It provides `mvcgen` with additional lemmas gathered from `@[specset X]` annotations, where `X` is the current setting of `set_option hax_mvcgen.specset`. -/ @[tactic hax_mvcgen] def elabHaxMvcgen : Tactic := fun stx => do let specset := hax_mvcgen.specset.get (← getOptions) let cfgStx := stx[1] let argStx := stx[2] let extState := specSetExt.getState (← getEnv) let decls := (extState.getD specset.toName {}).toArray let tac ← mkMvcgenCall decls cfgStx argStx Tactic.evalTactic tac end Hax.HaxMvcgen ================================================ FILE: hax-lib/proof-libs/lean/Hax/Tactic/HaxSpec.lean ================================================ import Lean import Hax.rust_primitives.Spec namespace Hax.Tactic.HaxSpec open Lean Meta private def addContractSpec (declName : Name) (attrKind : AttributeKind) : MetaM Unit := do let cinfo ← getConstInfo declName let type ← instantiateMVars cinfo.type forallTelescope type fun xs bodyType => do let bodyType ← whnf bodyType unless bodyType.isAppOf ``Spec do throwError "@[hax_spec]: expected a definition of type `Spec`, got{indentExpr bodyType}" let us := cinfo.levelParams.map mkLevelParam let app := mkAppN (mkConst declName us) xs let contractVal := mkProj ``Spec 2 app let contractType ← inferType contractVal let contractType ← deltaExpand contractType (· == declName) let closedVal ← mkLambdaFVars xs contractVal let closedType ← mkForallFVars xs contractType let contractDeclName := declName ++ `contract addDecl (.thmDecl { name := contractDeclName levelParams := cinfo.levelParams type := closedType value := closedVal }) let specStx := mkNode ``Lean.Parser.Attr.simple #[mkIdent `spec, mkNullNode] Attribute.add contractDeclName `spec specStx attrKind initialize registerBuiltinAttribute { name := `hax_spec descr := "Registers a `Spec` definition for use with `mvcgen`." applicationTime := .afterCompilation add := fun declName _stx attrKind => do discard <| (addContractSpec declName attrKind).run {} {} } end Hax.Tactic.HaxSpec ================================================ FILE: hax-lib/proof-libs/lean/Hax/Tactic/HaxZify.lean ================================================ import Lean import Hax.rust_primitives.USize64 open Lean Elab Tactic Meta /-- List of types supported by `hax_zify` -/ def haxZifyTypes := [ (``Int8, ``Int8.toInt, ``Int8.ofInt_eq_of_toInt_eq), (``Int16, ``Int16.toInt, ``Int16.ofInt_eq_of_toInt_eq), (``Int32, ``Int32.toInt, ``Int32.ofInt_eq_of_toInt_eq), (``Int64, ``Int64.toInt, ``Int64.ofInt_eq_of_toInt_eq), (``UInt8, ``UInt8.toNat, ``UInt8.ofNat_eq_of_toNat_eq), (``UInt16, ``UInt16.toNat, ``UInt16.ofNat_eq_of_toNat_eq), (``UInt64, ``UInt64.toNat, ``UInt64.ofNat_eq_of_toNat_eq), (``USize64, ``USize64.toNat, ``USize64.ofNat_eq_of_toNat_eq), ] /-- Replaces a variable of machine integer type by a variable of integer type. This roughly corresponds to the application of the following tactics: ``` generalize h : var.toInt = x at * replace h := Int32.ofInt_eq_of_toInt_eq h subst h ``` -/ def haxZifySingle (mvarId : MVarId) (var : FVarId) (toInt ofInt_eq_of_toInt_eq : Name) : MetaM MVarId:= do mvarId.withContext do -- Generalize: let arg := {expr := ← mkAppM toInt #[mkFVar var], hName? := `h} let (_, newVars, mvarId) ← mvarId.generalizeHyp #[arg] ((← getLocalHyps).map (·.fvarId!)) mvarId.withContext do unless newVars.size == 2 do Lean.Meta.throwTacticEx `hax_zify mvarId (m!"expected two variables, got {newVars.size}") -- Replace: let {mvarId, fvarId, ..} ← mvarId.replace newVars[1]! (← mkAppM ofInt_eq_of_toInt_eq #[mkFVar newVars[1]!]) -- Subst: let (_, mvarId) ← substCore mvarId fvarId (symm := true) pure mvarId /-- Replaces all variables of machine integer type by variables of integer type. -/ def haxZify (mvarId : MVarId) (declFilter : LocalDecl → Bool := fun _ => true) : MetaM MVarId := do mvarId.withContext do let mut mvarId := mvarId let lctx ← getLCtx for decl in lctx do if decl.isImplementationDetail then continue if !declFilter decl then continue let some (_, toInt, ofInt_eq_of_toInt_eq) ← haxZifyTypes.findM? fun (ty, _, _) => (isDefEq decl.type (mkConst ty)) | continue let var := decl.fvarId mvarId ← haxZifySingle mvarId var toInt ofInt_eq_of_toInt_eq pure mvarId /-- Replaces all variables of machine integer type in the current goal by variables of integer type. -/ elab "hax_zify" : tactic => withMainContext do replaceMainGoal [(← haxZify (← getMainGoal))] ================================================ FILE: hax-lib/proof-libs/lean/Hax/Tactic/Init.lean ================================================ import Lean initialize do pure () <* Lean.Meta.registerSimpAttr `hax_bv_decide "simp rules for hax-specific bv_decide preprocessing" initialize Lean.registerTraceClass `Hax.hax_construct_pure register_option hax_mvcgen.specset : String := { defValue := "bv" descr := "Identifier of the set of specs used for `hax_mvcgen`" } ================================================ FILE: hax-lib/proof-libs/lean/Hax/Tactic/SpecSet.lean ================================================ import Lean open Lean Elab Std abbrev SpecSetMap := HashMap Name (HashSet Name) structure SpecSetEntry where specSet : Name decl : Name /-- Environment extension to store spec sets, i.e., sets of declarations to use with `hax_mvcgen`. -/ initialize specSetExt : SimplePersistentEnvExtension SpecSetEntry SpecSetMap ← registerSimplePersistentEnvExtension { name := `specSetExt addEntryFn := fun state {specSet, decl} => let set := state.getD specSet {} state.insert specSet (set.insert decl) addImportedFn := fun states => states.foldl (fun acc st => st.foldl (fun acc {specSet, decl} => let merged := (acc.getD specSet {}).insert decl acc.insert specSet merged) acc) {} } initialize registerBuiltinAttribute { name := `specset descr := "Add a declaration to a given spec set for `hax_mvcgen`. The spec set can be activated via `set_option hax_mvcgen.specset`" add := fun decl stx kind => do setEnv $ specSetExt.addEntry (← getEnv) {specSet := stx[1][0].getId, decl} } ================================================ FILE: hax-lib/proof-libs/lean/Hax/Tactic.lean ================================================ import Hax.Tactic.HaxBVDecide import Hax.Tactic.HaxConstructPure import Hax.Tactic.HaxMvcgen import Hax.Tactic.HaxSpec import Hax.Tactic.HaxZify import Hax.Tactic.Init import Hax.Tactic.SpecSet ================================================ FILE: hax-lib/proof-libs/lean/Hax/core_models/core_models.lean ================================================ -- Experimental lean backend for Hax -- The Hax prelude library can be found in hax/proof-libs/lean import Hax.core_models.prologue import Hax.Tactic.HaxSpec import Std.Tactic.Do import Std.Do.Triple import Std.Tactic.Do.Syntax open Std.Do open Std.Tactic set_option mvcgen.warning false set_option linter.unusedVariables false namespace core_models.array structure TryFromSliceError where -- no fields @[spec] def Impl_23.as_slice (T : Type) (N : usize) (s : (RustArray T N)) : RustM (RustSlice T) := do (rust_primitives.slice.array_as_slice T (N) s) end core_models.array namespace core_models.array.iter structure IntoIter (T : Type) (N : usize) where _0 : (rust_primitives.sequence.Seq T) end core_models.array.iter namespace core_models.borrow class Borrow.AssociatedTypes (Self : Type) (Borrowed : Type) where class Borrow (Self : Type) (Borrowed : Type) [associatedTypes : outParam (Borrow.AssociatedTypes (Self : Type) (Borrowed : Type))] where borrow (Self) (Borrowed) : (Self -> RustM Borrowed) end core_models.borrow namespace core_models.clone class Clone.AssociatedTypes (Self : Type) where class Clone (Self : Type) [associatedTypes : outParam (Clone.AssociatedTypes (Self : Type))] where clone (Self) : (Self -> RustM Self) @[reducible] instance Impl.AssociatedTypes (T : Type) : Clone.AssociatedTypes T where instance Impl (T : Type) : Clone T where clone := fun (self : T) => do (pure self) end core_models.clone namespace core_models.cmp class PartialEq.AssociatedTypes (Self : Type) (Rhs : Type) where class PartialEq (Self : Type) (Rhs : Type) [associatedTypes : outParam (PartialEq.AssociatedTypes (Self : Type) (Rhs : Type))] where eq (Self) (Rhs) : (Self -> Rhs -> RustM Bool) class Eq.AssociatedTypes (Self : Type) where [trait_constr_Eq_i0 : PartialEq.AssociatedTypes Self Self] attribute [instance_reducible, instance] Eq.AssociatedTypes.trait_constr_Eq_i0 class Eq (Self : Type) [associatedTypes : outParam (Eq.AssociatedTypes (Self : Type))] where [trait_constr_Eq_i0 : PartialEq Self Self] attribute [instance_reducible, instance] Eq.trait_constr_Eq_i0 inductive Ordering : Type | Less : Ordering | Equal : Ordering | Greater : Ordering def Ordering.Less.AnonConst : isize := (-1 : isize) def Ordering.Equal.AnonConst : isize := (0 : isize) def Ordering.Greater.AnonConst : isize := (1 : isize) @[spec] def Ordering_cast_to_repr (x : Ordering) : RustM isize := do match x with | (Ordering.Less ) => do (pure Ordering.Less.AnonConst) | (Ordering.Equal ) => do (pure Ordering.Equal.AnonConst) | (Ordering.Greater ) => do (pure Ordering.Greater.AnonConst) class Neq.AssociatedTypes (Self : Type) (Rhs : Type) where class Neq (Self : Type) (Rhs : Type) [associatedTypes : outParam (Neq.AssociatedTypes (Self : Type) (Rhs : Type))] where neq (Self) (Rhs) : (Self -> Rhs -> RustM Bool) @[reducible] instance Impl.AssociatedTypes (T : Type) [trait_constr_Impl_associated_type_i0 : PartialEq.AssociatedTypes T T] [trait_constr_Impl_i0 : PartialEq T T ] : Neq.AssociatedTypes T T where instance Impl (T : Type) [trait_constr_Impl_associated_type_i0 : PartialEq.AssociatedTypes T T] [trait_constr_Impl_i0 : PartialEq T T ] : Neq T T where neq := fun (self : T) (y : T) => do ((← (PartialEq.eq T T self y)) ==? false) structure Reverse (T : Type) where _0 : T @[reducible] instance Impl_3.AssociatedTypes (T : Type) [trait_constr_Impl_3_associated_type_i0 : PartialEq.AssociatedTypes T T] [trait_constr_Impl_3_i0 : PartialEq T T ] : PartialEq.AssociatedTypes (Reverse T) (Reverse T) where instance Impl_3 (T : Type) [trait_constr_Impl_3_associated_type_i0 : PartialEq.AssociatedTypes T T] [trait_constr_Impl_3_i0 : PartialEq T T ] : PartialEq (Reverse T) (Reverse T) where eq := fun (self : (Reverse T)) (other : (Reverse T)) => do (PartialEq.eq T T (Reverse._0 other) (Reverse._0 self)) @[reducible] instance Impl_4.AssociatedTypes (T : Type) [trait_constr_Impl_4_associated_type_i0 : Eq.AssociatedTypes T] [trait_constr_Impl_4_i0 : Eq T ] : Eq.AssociatedTypes (Reverse T) where instance Impl_4 (T : Type) [trait_constr_Impl_4_associated_type_i0 : Eq.AssociatedTypes T] [trait_constr_Impl_4_i0 : Eq T ] : Eq (Reverse T) where @[reducible] instance Impl_6.AssociatedTypes : PartialEq.AssociatedTypes u8 u8 where instance Impl_6 : PartialEq u8 u8 where eq := fun (self : u8) (other : u8) => do (self ==? other) @[reducible] instance Impl_7.AssociatedTypes : Eq.AssociatedTypes u8 where instance Impl_7 : Eq u8 where @[reducible] instance Impl_8.AssociatedTypes : PartialEq.AssociatedTypes i8 i8 where instance Impl_8 : PartialEq i8 i8 where eq := fun (self : i8) (other : i8) => do (self ==? other) @[reducible] instance Impl_9.AssociatedTypes : Eq.AssociatedTypes i8 where instance Impl_9 : Eq i8 where @[reducible] instance Impl_10.AssociatedTypes : PartialEq.AssociatedTypes u16 u16 where instance Impl_10 : PartialEq u16 u16 where eq := fun (self : u16) (other : u16) => do (self ==? other) @[reducible] instance Impl_11.AssociatedTypes : Eq.AssociatedTypes u16 where instance Impl_11 : Eq u16 where @[reducible] instance Impl_12.AssociatedTypes : PartialEq.AssociatedTypes i16 i16 where instance Impl_12 : PartialEq i16 i16 where eq := fun (self : i16) (other : i16) => do (self ==? other) @[reducible] instance Impl_13.AssociatedTypes : Eq.AssociatedTypes i16 where instance Impl_13 : Eq i16 where @[reducible] instance Impl_14.AssociatedTypes : PartialEq.AssociatedTypes u32 u32 where instance Impl_14 : PartialEq u32 u32 where eq := fun (self : u32) (other : u32) => do (self ==? other) @[reducible] instance Impl_15.AssociatedTypes : Eq.AssociatedTypes u32 where instance Impl_15 : Eq u32 where @[reducible] instance Impl_16.AssociatedTypes : PartialEq.AssociatedTypes i32 i32 where instance Impl_16 : PartialEq i32 i32 where eq := fun (self : i32) (other : i32) => do (self ==? other) @[reducible] instance Impl_17.AssociatedTypes : Eq.AssociatedTypes i32 where instance Impl_17 : Eq i32 where @[reducible] instance Impl_18.AssociatedTypes : PartialEq.AssociatedTypes u64 u64 where instance Impl_18 : PartialEq u64 u64 where eq := fun (self : u64) (other : u64) => do (self ==? other) @[reducible] instance Impl_19.AssociatedTypes : Eq.AssociatedTypes u64 where instance Impl_19 : Eq u64 where @[reducible] instance Impl_20.AssociatedTypes : PartialEq.AssociatedTypes i64 i64 where instance Impl_20 : PartialEq i64 i64 where eq := fun (self : i64) (other : i64) => do (self ==? other) @[reducible] instance Impl_21.AssociatedTypes : Eq.AssociatedTypes i64 where instance Impl_21 : Eq i64 where @[reducible] instance Impl_22.AssociatedTypes : PartialEq.AssociatedTypes u128 u128 where instance Impl_22 : PartialEq u128 u128 where eq := fun (self : u128) (other : u128) => do (self ==? other) @[reducible] instance Impl_23.AssociatedTypes : Eq.AssociatedTypes u128 where instance Impl_23 : Eq u128 where @[reducible] instance Impl_24.AssociatedTypes : PartialEq.AssociatedTypes i128 i128 where instance Impl_24 : PartialEq i128 i128 where eq := fun (self : i128) (other : i128) => do (self ==? other) @[reducible] instance Impl_25.AssociatedTypes : Eq.AssociatedTypes i128 where instance Impl_25 : Eq i128 where @[reducible] instance Impl_26.AssociatedTypes : PartialEq.AssociatedTypes usize usize where instance Impl_26 : PartialEq usize usize where eq := fun (self : usize) (other : usize) => do (self ==? other) @[reducible] instance Impl_27.AssociatedTypes : Eq.AssociatedTypes usize where instance Impl_27 : Eq usize where @[reducible] instance Impl_28.AssociatedTypes : PartialEq.AssociatedTypes isize isize where instance Impl_28 : PartialEq isize isize where eq := fun (self : isize) (other : isize) => do (self ==? other) @[reducible] instance Impl_29.AssociatedTypes : Eq.AssociatedTypes isize where instance Impl_29 : Eq isize where end core_models.cmp namespace core_models.convert class Into.AssociatedTypes (Self : Type) (T : Type) where class Into (Self : Type) (T : Type) [associatedTypes : outParam (Into.AssociatedTypes (Self : Type) (T : Type))] where into (Self) (T) : (Self -> RustM T) class From.AssociatedTypes (Self : Type) (T : Type) where class From (Self : Type) (T : Type) [associatedTypes : outParam (From.AssociatedTypes (Self : Type) (T : Type))] where _from (Self) (T) : (T -> RustM Self) @[reducible] instance Impl.AssociatedTypes (T : Type) (U : Type) [trait_constr_Impl_associated_type_i0 : From.AssociatedTypes U T] [trait_constr_Impl_i0 : From U T ] : Into.AssociatedTypes T U where instance Impl (T : Type) (U : Type) [trait_constr_Impl_associated_type_i0 : From.AssociatedTypes U T] [trait_constr_Impl_i0 : From U T ] : Into T U where into := fun (self : T) => do (From._from U T self) structure Infallible where -- no fields @[reducible] instance Impl_3.AssociatedTypes (T : Type) : From.AssociatedTypes T T where instance Impl_3 (T : Type) : From T T where _from := fun (x : T) => do (pure x) class AsRef.AssociatedTypes (Self : Type) (T : Type) where class AsRef (Self : Type) (T : Type) [associatedTypes : outParam (AsRef.AssociatedTypes (Self : Type) (T : Type))] where as_ref (Self) (T) : (Self -> RustM T) @[reducible] instance Impl_4.AssociatedTypes (T : Type) : AsRef.AssociatedTypes T T where instance Impl_4 (T : Type) : AsRef T T where as_ref := fun (self : T) => do (pure self) end core_models.convert namespace core_models.default class Default.AssociatedTypes (Self : Type) where class Default (Self : Type) [associatedTypes : outParam (Default.AssociatedTypes (Self : Type))] where default (Self) : (rust_primitives.hax.Tuple0 -> RustM Self) end core_models.default namespace core_models.f32 opaque Impl.abs (x : f64) : RustM f64 end core_models.f32 namespace core_models.fmt structure Error where -- no fields structure Formatter where -- no fields structure Arguments where _0 : rust_primitives.hax.Tuple0 end core_models.fmt namespace core_models.fmt.rt opaque ArgumentType : Type structure Argument where ty : ArgumentType opaque Impl.new_display (T : Type) (x : T) : RustM Argument opaque Impl.new_debug (T : Type) (x : T) : RustM Argument opaque Impl.new_lower_hex (T : Type) (x : T) : RustM Argument opaque Impl_1.new_binary (T : Type) (x : T) : RustM Argument opaque Impl_1.new_const (T : Type) (U : Type) (x : T) (y : U) : RustM core_models.fmt.Arguments opaque Impl_1.new_v1 (T : Type) (U : Type) (V : Type) (W : Type) (x : T) (y : U) (z : V) (t : W) : RustM core_models.fmt.Arguments @[spec] def Impl_1.none (_ : rust_primitives.hax.Tuple0) : RustM (RustArray Argument 0) := do (pure (RustArray.ofVec #v[])) opaque Impl_1.new_v1_formatted (T : Type) (U : Type) (V : Type) (x : T) (y : U) (z : V) : RustM core_models.fmt.Arguments inductive Count : Type | Is : u16 -> Count | Param : u16 -> Count | Implied : Count structure Placeholder where position : usize flags : u32 precision : Count width : Count structure UnsafeArg where -- no fields end core_models.fmt.rt namespace core_models.hash class Hasher.AssociatedTypes (Self : Type) where class Hasher (Self : Type) [associatedTypes : outParam (Hasher.AssociatedTypes (Self : Type))] where class Hash.AssociatedTypes (Self : Type) where class Hash (Self : Type) [associatedTypes : outParam (Hash.AssociatedTypes (Self : Type))] where hash (Self) (H : Type) [trait_constr_hash_associated_type_i1 : Hasher.AssociatedTypes H] [trait_constr_hash_i1 : Hasher H ] : (Self -> H -> RustM H) end core_models.hash namespace core_models.hint def black_box (T : Type) (dummy : T) : RustM T := do (pure dummy) set_option hax_mvcgen.specset "bv" in @[hax_spec] def black_box.spec (T : Type) (dummy : T) : Spec (requires := do pure True) (ensures := fun res => do (hax_lib.prop.Impl.from_bool true)) (black_box (T : Type) (dummy : T)) := { pureRequires := by hax_construct_pure <;> bv_decide pureEnsures := by hax_construct_pure <;> bv_decide contract := by hax_mvcgen [black_box] <;> bv_decide } def must_use (T : Type) (value : T) : RustM T := do (pure value) set_option hax_mvcgen.specset "bv" in @[hax_spec] def must_use.spec (T : Type) (value : T) : Spec (requires := do pure True) (ensures := fun res => do (hax_lib.prop.Impl.from_bool true)) (must_use (T : Type) (value : T)) := { pureRequires := by hax_construct_pure <;> bv_decide pureEnsures := by hax_construct_pure <;> bv_decide contract := by hax_mvcgen [must_use] <;> bv_decide } end core_models.hint namespace core_models.iter.adapters.enumerate structure Enumerate (I : Type) where iter : I count : usize @[spec] def Impl.new (I : Type) (iter : I) : RustM (Enumerate I) := do (pure (Enumerate.mk (iter := iter) (count := (0 : usize)))) end core_models.iter.adapters.enumerate namespace core_models.iter.adapters.step_by structure StepBy (I : Type) where iter : I step : usize @[spec] def Impl.new (I : Type) (iter : I) (step : usize) : RustM (StepBy I) := do (pure (StepBy.mk (iter := iter) (step := step))) end core_models.iter.adapters.step_by namespace core_models.iter.adapters.map structure Map (I : Type) (F : Type) where iter : I f : F @[spec] def Impl.new (I : Type) (F : Type) (iter : I) (f : F) : RustM (Map I F) := do (pure (Map.mk (iter := iter) (f := f))) end core_models.iter.adapters.map namespace core_models.iter.adapters.take structure Take (I : Type) where iter : I n : usize @[spec] def Impl.new (I : Type) (iter : I) (n : usize) : RustM (Take I) := do (pure (Take.mk (iter := iter) (n := n))) end core_models.iter.adapters.take namespace core_models.iter.adapters.zip structure Zip (I1 : Type) (I2 : Type) where it1 : I1 it2 : I2 end core_models.iter.adapters.zip namespace core_models.marker class Copy.AssociatedTypes (Self : Type) where [trait_constr_Copy_i0 : core_models.clone.Clone.AssociatedTypes Self] attribute [instance_reducible, instance] Copy.AssociatedTypes.trait_constr_Copy_i0 class Copy (Self : Type) [associatedTypes : outParam (Copy.AssociatedTypes (Self : Type))] where [trait_constr_Copy_i0 : core_models.clone.Clone Self] attribute [instance_reducible, instance] Copy.trait_constr_Copy_i0 class Send.AssociatedTypes (Self : Type) where class Send (Self : Type) [associatedTypes : outParam (Send.AssociatedTypes (Self : Type))] where class Sync.AssociatedTypes (Self : Type) where class Sync (Self : Type) [associatedTypes : outParam (Sync.AssociatedTypes (Self : Type))] where class Sized.AssociatedTypes (Self : Type) where class Sized (Self : Type) [associatedTypes : outParam (Sized.AssociatedTypes (Self : Type))] where class StructuralPartialEq.AssociatedTypes (Self : Type) where class StructuralPartialEq (Self : Type) [associatedTypes : outParam (StructuralPartialEq.AssociatedTypes (Self : Type))] where @[reducible] instance Impl.AssociatedTypes (T : Type) : Send.AssociatedTypes T where instance Impl (T : Type) : Send T where @[reducible] instance Impl_1.AssociatedTypes (T : Type) : Sync.AssociatedTypes T where instance Impl_1 (T : Type) : Sync T where @[reducible] instance Impl_2.AssociatedTypes (T : Type) : Sized.AssociatedTypes T where instance Impl_2 (T : Type) : Sized T where @[reducible] instance Impl_3.AssociatedTypes (T : Type) [trait_constr_Impl_3_associated_type_i0 : core_models.clone.Clone.AssociatedTypes T] [trait_constr_Impl_3_i0 : core_models.clone.Clone T ] : Copy.AssociatedTypes T where instance Impl_3 (T : Type) [trait_constr_Impl_3_associated_type_i0 : core_models.clone.Clone.AssociatedTypes T] [trait_constr_Impl_3_i0 : core_models.clone.Clone T ] : Copy T where structure PhantomData (T : Type) where end core_models.marker namespace core_models.mem opaque forget (T : Type) (t : T) : RustM rust_primitives.hax.Tuple0 opaque forget_unsized (T : Type) (t : T) : RustM rust_primitives.hax.Tuple0 opaque size_of (T : Type) (_ : rust_primitives.hax.Tuple0) : RustM usize opaque size_of_val (T : Type) (val : T) : RustM usize opaque min_align_of (T : Type) (_ : rust_primitives.hax.Tuple0) : RustM usize opaque min_align_of_val (T : Type) (val : T) : RustM usize opaque align_of (T : Type) (_ : rust_primitives.hax.Tuple0) : RustM usize opaque align_of_val (T : Type) (val : T) : RustM usize opaque align_of_val_raw (T : Type) (val : T) : RustM usize opaque needs_drop (T : Type) (_ : rust_primitives.hax.Tuple0) : RustM Bool opaque uninitialized (T : Type) (_ : rust_primitives.hax.Tuple0) : RustM T opaque swap (T : Type) (x : T) (y : T) : RustM (rust_primitives.hax.Tuple2 T T) opaque replace (T : Type) (dest : T) (src : T) : RustM (rust_primitives.hax.Tuple2 T T) opaque drop (T : Type) (_x : T) : RustM rust_primitives.hax.Tuple0 @[spec] def copy (T : Type) [trait_constr_copy_associated_type_i0 : core_models.marker.Copy.AssociatedTypes T] [trait_constr_copy_i0 : core_models.marker.Copy T ] (x : T) : RustM T := do (rust_primitives.mem.copy T x) opaque take (T : Type) (x : T) : RustM (rust_primitives.hax.Tuple2 T T) opaque transmute_copy (Src : Type) (Dst : Type) (src : Src) : RustM Dst opaque variant_count (T : Type) (_ : rust_primitives.hax.Tuple0) : RustM usize opaque zeroed (T : Type) (_ : rust_primitives.hax.Tuple0) : RustM T opaque transmute (Src : Type) (Dst : Type) (src : Src) : RustM Dst end core_models.mem namespace core_models.mem.manually_drop structure ManuallyDrop (T : Type) where value : T end core_models.mem.manually_drop namespace core_models.num.error structure TryFromIntError where _0 : rust_primitives.hax.Tuple0 structure IntErrorKind where -- no fields structure ParseIntError where kind : IntErrorKind end core_models.num.error namespace core_models.num @[spec] def Impl_6.wrapping_add (x : u8) (y : u8) : RustM u8 := do (rust_primitives.arithmetic.wrapping_add_u8 x y) @[spec] def Impl_6.wrapping_sub (x : u8) (y : u8) : RustM u8 := do (rust_primitives.arithmetic.wrapping_sub_u8 x y) @[spec] def Impl_6.wrapping_mul (x : u8) (y : u8) : RustM u8 := do (rust_primitives.arithmetic.wrapping_mul_u8 x y) @[spec] def Impl_6.pow (x : u8) (exp : u32) : RustM u8 := do (rust_primitives.arithmetic.pow_u8 x exp) opaque Impl_6.leading_zeros (x : u8) : RustM u32 opaque Impl_6.ilog2 (x : u8) : RustM u32 @[spec] def Impl_7.wrapping_add (x : u16) (y : u16) : RustM u16 := do (rust_primitives.arithmetic.wrapping_add_u16 x y) @[spec] def Impl_7.wrapping_sub (x : u16) (y : u16) : RustM u16 := do (rust_primitives.arithmetic.wrapping_sub_u16 x y) @[spec] def Impl_7.wrapping_mul (x : u16) (y : u16) : RustM u16 := do (rust_primitives.arithmetic.wrapping_mul_u16 x y) @[spec] def Impl_7.pow (x : u16) (exp : u32) : RustM u16 := do (rust_primitives.arithmetic.pow_u16 x exp) opaque Impl_7.leading_zeros (x : u16) : RustM u32 opaque Impl_7.ilog2 (x : u16) : RustM u32 @[spec] def Impl_8.wrapping_add (x : u32) (y : u32) : RustM u32 := do (rust_primitives.arithmetic.wrapping_add_u32 x y) @[spec] def Impl_8.wrapping_sub (x : u32) (y : u32) : RustM u32 := do (rust_primitives.arithmetic.wrapping_sub_u32 x y) @[spec] def Impl_8.wrapping_mul (x : u32) (y : u32) : RustM u32 := do (rust_primitives.arithmetic.wrapping_mul_u32 x y) @[spec] def Impl_8.pow (x : u32) (exp : u32) : RustM u32 := do (rust_primitives.arithmetic.pow_u32 x exp) opaque Impl_8.leading_zeros (x : u32) : RustM u32 opaque Impl_8.ilog2 (x : u32) : RustM u32 @[spec] def Impl_9.wrapping_add (x : u64) (y : u64) : RustM u64 := do (rust_primitives.arithmetic.wrapping_add_u64 x y) @[spec] def Impl_9.wrapping_sub (x : u64) (y : u64) : RustM u64 := do (rust_primitives.arithmetic.wrapping_sub_u64 x y) @[spec] def Impl_9.wrapping_mul (x : u64) (y : u64) : RustM u64 := do (rust_primitives.arithmetic.wrapping_mul_u64 x y) @[spec] def Impl_9.pow (x : u64) (exp : u32) : RustM u64 := do (rust_primitives.arithmetic.pow_u64 x exp) opaque Impl_9.leading_zeros (x : u64) : RustM u32 opaque Impl_9.ilog2 (x : u64) : RustM u32 @[spec] def Impl_10.wrapping_add (x : u128) (y : u128) : RustM u128 := do (rust_primitives.arithmetic.wrapping_add_u128 x y) @[spec] def Impl_10.wrapping_sub (x : u128) (y : u128) : RustM u128 := do (rust_primitives.arithmetic.wrapping_sub_u128 x y) @[spec] def Impl_10.wrapping_mul (x : u128) (y : u128) : RustM u128 := do (rust_primitives.arithmetic.wrapping_mul_u128 x y) @[spec] def Impl_10.pow (x : u128) (exp : u32) : RustM u128 := do (rust_primitives.arithmetic.pow_u128 x exp) opaque Impl_10.leading_zeros (x : u128) : RustM u32 opaque Impl_10.ilog2 (x : u128) : RustM u32 @[spec] def Impl_11.wrapping_add (x : usize) (y : usize) : RustM usize := do (rust_primitives.arithmetic.wrapping_add_usize x y) @[spec] def Impl_11.wrapping_sub (x : usize) (y : usize) : RustM usize := do (rust_primitives.arithmetic.wrapping_sub_usize x y) @[spec] def Impl_11.wrapping_mul (x : usize) (y : usize) : RustM usize := do (rust_primitives.arithmetic.wrapping_mul_usize x y) @[spec] def Impl_11.pow (x : usize) (exp : u32) : RustM usize := do (rust_primitives.arithmetic.pow_usize x exp) opaque Impl_11.leading_zeros (x : usize) : RustM u32 opaque Impl_11.ilog2 (x : usize) : RustM u32 @[spec] def Impl_12.wrapping_add (x : i8) (y : i8) : RustM i8 := do (rust_primitives.arithmetic.wrapping_add_i8 x y) @[spec] def Impl_12.wrapping_sub (x : i8) (y : i8) : RustM i8 := do (rust_primitives.arithmetic.wrapping_sub_i8 x y) @[spec] def Impl_12.wrapping_mul (x : i8) (y : i8) : RustM i8 := do (rust_primitives.arithmetic.wrapping_mul_i8 x y) @[spec] def Impl_12.pow (x : i8) (exp : u32) : RustM i8 := do (rust_primitives.arithmetic.pow_i8 x exp) opaque Impl_12.leading_zeros (x : i8) : RustM u32 opaque Impl_12.ilog2 (x : i8) : RustM u32 @[spec] def Impl_13.wrapping_add (x : i16) (y : i16) : RustM i16 := do (rust_primitives.arithmetic.wrapping_add_i16 x y) @[spec] def Impl_13.wrapping_sub (x : i16) (y : i16) : RustM i16 := do (rust_primitives.arithmetic.wrapping_sub_i16 x y) @[spec] def Impl_13.wrapping_mul (x : i16) (y : i16) : RustM i16 := do (rust_primitives.arithmetic.wrapping_mul_i16 x y) @[spec] def Impl_13.pow (x : i16) (exp : u32) : RustM i16 := do (rust_primitives.arithmetic.pow_i16 x exp) opaque Impl_13.leading_zeros (x : i16) : RustM u32 opaque Impl_13.ilog2 (x : i16) : RustM u32 @[spec] def Impl_14.wrapping_add (x : i32) (y : i32) : RustM i32 := do (rust_primitives.arithmetic.wrapping_add_i32 x y) @[spec] def Impl_14.wrapping_sub (x : i32) (y : i32) : RustM i32 := do (rust_primitives.arithmetic.wrapping_sub_i32 x y) @[spec] def Impl_14.wrapping_mul (x : i32) (y : i32) : RustM i32 := do (rust_primitives.arithmetic.wrapping_mul_i32 x y) @[spec] def Impl_14.pow (x : i32) (exp : u32) : RustM i32 := do (rust_primitives.arithmetic.pow_i32 x exp) opaque Impl_14.leading_zeros (x : i32) : RustM u32 opaque Impl_14.ilog2 (x : i32) : RustM u32 @[spec] def Impl_15.wrapping_add (x : i64) (y : i64) : RustM i64 := do (rust_primitives.arithmetic.wrapping_add_i64 x y) @[spec] def Impl_15.wrapping_sub (x : i64) (y : i64) : RustM i64 := do (rust_primitives.arithmetic.wrapping_sub_i64 x y) @[spec] def Impl_15.wrapping_mul (x : i64) (y : i64) : RustM i64 := do (rust_primitives.arithmetic.wrapping_mul_i64 x y) @[spec] def Impl_15.pow (x : i64) (exp : u32) : RustM i64 := do (rust_primitives.arithmetic.pow_i64 x exp) opaque Impl_15.leading_zeros (x : i64) : RustM u32 opaque Impl_15.ilog2 (x : i64) : RustM u32 @[spec] def Impl_16.wrapping_add (x : i128) (y : i128) : RustM i128 := do (rust_primitives.arithmetic.wrapping_add_i128 x y) @[spec] def Impl_16.wrapping_sub (x : i128) (y : i128) : RustM i128 := do (rust_primitives.arithmetic.wrapping_sub_i128 x y) @[spec] def Impl_16.wrapping_mul (x : i128) (y : i128) : RustM i128 := do (rust_primitives.arithmetic.wrapping_mul_i128 x y) @[spec] def Impl_16.pow (x : i128) (exp : u32) : RustM i128 := do (rust_primitives.arithmetic.pow_i128 x exp) opaque Impl_16.leading_zeros (x : i128) : RustM u32 opaque Impl_16.ilog2 (x : i128) : RustM u32 @[spec] def Impl_17.wrapping_add (x : isize) (y : isize) : RustM isize := do (rust_primitives.arithmetic.wrapping_add_isize x y) @[spec] def Impl_17.wrapping_sub (x : isize) (y : isize) : RustM isize := do (rust_primitives.arithmetic.wrapping_sub_isize x y) @[spec] def Impl_17.wrapping_mul (x : isize) (y : isize) : RustM isize := do (rust_primitives.arithmetic.wrapping_mul_isize x y) @[spec] def Impl_17.pow (x : isize) (exp : u32) : RustM isize := do (rust_primitives.arithmetic.pow_isize x exp) opaque Impl_17.leading_zeros (x : isize) : RustM u32 opaque Impl_17.ilog2 (x : isize) : RustM u32 @[reducible] instance Impl_18.AssociatedTypes : core_models.default.Default.AssociatedTypes u8 where instance Impl_18 : core_models.default.Default u8 where default := fun (_ : rust_primitives.hax.Tuple0) => do (pure (0 : u8)) @[reducible] instance Impl_19.AssociatedTypes : core_models.default.Default.AssociatedTypes u16 where instance Impl_19 : core_models.default.Default u16 where default := fun (_ : rust_primitives.hax.Tuple0) => do (pure (0 : u16)) @[reducible] instance Impl_20.AssociatedTypes : core_models.default.Default.AssociatedTypes u32 where instance Impl_20 : core_models.default.Default u32 where default := fun (_ : rust_primitives.hax.Tuple0) => do (pure (0 : u32)) @[reducible] instance Impl_21.AssociatedTypes : core_models.default.Default.AssociatedTypes u64 where instance Impl_21 : core_models.default.Default u64 where default := fun (_ : rust_primitives.hax.Tuple0) => do (pure (0 : u64)) @[reducible] instance Impl_22.AssociatedTypes : core_models.default.Default.AssociatedTypes u128 where instance Impl_22 : core_models.default.Default u128 where default := fun (_ : rust_primitives.hax.Tuple0) => do (pure (0 : u128)) @[reducible] instance Impl_23.AssociatedTypes : core_models.default.Default.AssociatedTypes usize where instance Impl_23 : core_models.default.Default usize where default := fun (_ : rust_primitives.hax.Tuple0) => do (pure (0 : usize)) @[reducible] instance Impl_24.AssociatedTypes : core_models.default.Default.AssociatedTypes i8 where instance Impl_24 : core_models.default.Default i8 where default := fun (_ : rust_primitives.hax.Tuple0) => do (pure (0 : i8)) @[reducible] instance Impl_25.AssociatedTypes : core_models.default.Default.AssociatedTypes i16 where instance Impl_25 : core_models.default.Default i16 where default := fun (_ : rust_primitives.hax.Tuple0) => do (pure (0 : i16)) @[reducible] instance Impl_26.AssociatedTypes : core_models.default.Default.AssociatedTypes i32 where instance Impl_26 : core_models.default.Default i32 where default := fun (_ : rust_primitives.hax.Tuple0) => do (pure (0 : i32)) @[reducible] instance Impl_27.AssociatedTypes : core_models.default.Default.AssociatedTypes i64 where instance Impl_27 : core_models.default.Default i64 where default := fun (_ : rust_primitives.hax.Tuple0) => do (pure (0 : i64)) @[reducible] instance Impl_28.AssociatedTypes : core_models.default.Default.AssociatedTypes i128 where instance Impl_28 : core_models.default.Default i128 where default := fun (_ : rust_primitives.hax.Tuple0) => do (pure (0 : i128)) @[reducible] instance Impl_29.AssociatedTypes : core_models.default.Default.AssociatedTypes isize where instance Impl_29 : core_models.default.Default isize where default := fun (_ : rust_primitives.hax.Tuple0) => do (pure (0 : isize)) end core_models.num namespace core_models.ops.arith class AddAssign.AssociatedTypes (Self : Type) (Rhs : Type) where class AddAssign (Self : Type) (Rhs : Type) [associatedTypes : outParam (AddAssign.AssociatedTypes (Self : Type) (Rhs : Type))] where add_assign (Self) (Rhs) : (Self -> Rhs -> RustM Self) class SubAssign.AssociatedTypes (Self : Type) (Rhs : Type) where class SubAssign (Self : Type) (Rhs : Type) [associatedTypes : outParam (SubAssign.AssociatedTypes (Self : Type) (Rhs : Type))] where sub_assign (Self) (Rhs) : (Self -> Rhs -> RustM Self) class MulAssign.AssociatedTypes (Self : Type) (Rhs : Type) where class MulAssign (Self : Type) (Rhs : Type) [associatedTypes : outParam (MulAssign.AssociatedTypes (Self : Type) (Rhs : Type))] where mul_assign (Self) (Rhs) : (Self -> Rhs -> RustM Self) class DivAssign.AssociatedTypes (Self : Type) (Rhs : Type) where class DivAssign (Self : Type) (Rhs : Type) [associatedTypes : outParam (DivAssign.AssociatedTypes (Self : Type) (Rhs : Type))] where div_assign (Self) (Rhs) : (Self -> Rhs -> RustM Self) class RemAssign.AssociatedTypes (Self : Type) (Rhs : Type) where class RemAssign (Self : Type) (Rhs : Type) [associatedTypes : outParam (RemAssign.AssociatedTypes (Self : Type) (Rhs : Type))] where rem_assign (Self) (Rhs) : (Self -> Rhs -> RustM Self) end core_models.ops.arith namespace core_models.ops.control_flow inductive ControlFlow (B : Type) (C : Type) : Type | Continue : C -> ControlFlow (B : Type) (C : Type) | Break : B -> ControlFlow (B : Type) (C : Type) end core_models.ops.control_flow namespace core_models.ops.try_trait class FromResidual.AssociatedTypes (Self : Type) (R : Type) where class FromResidual (Self : Type) (R : Type) [associatedTypes : outParam (FromResidual.AssociatedTypes (Self : Type) (R : Type))] where from_residual (Self) (R) : (R -> RustM Self) end core_models.ops.try_trait namespace core_models.ops.drop class Drop.AssociatedTypes (Self : Type) where class Drop (Self : Type) [associatedTypes : outParam (Drop.AssociatedTypes (Self : Type))] where drop (Self) : (Self -> RustM Self) end core_models.ops.drop namespace core_models.ops.range structure RangeTo (T : Type) where _end : T structure RangeFrom (T : Type) where start : T structure Range (T : Type) where start : T _end : T structure RangeFull where -- no fields end core_models.ops.range namespace core_models.option inductive Option (T : Type) : Type | Some : T -> Option (T : Type) | None : Option (T : Type) end core_models.option namespace core_models.cmp class PartialOrd.AssociatedTypes (Self : Type) (Rhs : Type) where [trait_constr_PartialOrd_i0 : PartialEq.AssociatedTypes Self Rhs] attribute [instance_reducible, instance] PartialOrd.AssociatedTypes.trait_constr_PartialOrd_i0 class PartialOrd (Self : Type) (Rhs : Type) [associatedTypes : outParam (PartialOrd.AssociatedTypes (Self : Type) (Rhs : Type))] where [trait_constr_PartialOrd_i0 : PartialEq Self Rhs] partial_cmp (Self) (Rhs) : (Self -> Rhs -> RustM (core_models.option.Option Ordering)) attribute [instance_reducible, instance] PartialOrd.trait_constr_PartialOrd_i0 class PartialOrdDefaults.AssociatedTypes (Self : Type) (Rhs : Type) where class PartialOrdDefaults (Self : Type) (Rhs : Type) [associatedTypes : outParam (PartialOrdDefaults.AssociatedTypes (Self : Type) (Rhs : Type))] where lt (Self) (Rhs) [trait_constr_lt_associated_type_i1 : PartialOrd.AssociatedTypes Self Rhs] [trait_constr_lt_i1 : PartialOrd Self Rhs ] : (Self -> Rhs -> RustM Bool) le (Self) (Rhs) [trait_constr_le_associated_type_i1 : PartialOrd.AssociatedTypes Self Rhs] [trait_constr_le_i1 : PartialOrd Self Rhs ] : (Self -> Rhs -> RustM Bool) gt (Self) (Rhs) [trait_constr_gt_associated_type_i1 : PartialOrd.AssociatedTypes Self Rhs] [trait_constr_gt_i1 : PartialOrd Self Rhs ] : (Self -> Rhs -> RustM Bool) ge (Self) (Rhs) [trait_constr_ge_associated_type_i1 : PartialOrd.AssociatedTypes Self Rhs] [trait_constr_ge_i1 : PartialOrd Self Rhs ] : (Self -> Rhs -> RustM Bool) @[reducible] instance Impl_1.AssociatedTypes (T : Type) [trait_constr_Impl_1_associated_type_i0 : PartialOrd.AssociatedTypes T T] [trait_constr_Impl_1_i0 : PartialOrd T T ] : PartialOrdDefaults.AssociatedTypes T T where instance Impl_1 (T : Type) [trait_constr_Impl_1_associated_type_i0 : PartialOrd.AssociatedTypes T T] [trait_constr_Impl_1_i0 : PartialOrd T T ] : PartialOrdDefaults T T where lt := fun [trait_constr_lt_associated_type_i1 : PartialOrd.AssociatedTypes T T] [trait_constr_lt_i1 : PartialOrd T T ] (self : T) (y : T) => do match (← (PartialOrd.partial_cmp T T self y)) with | (core_models.option.Option.Some (Ordering.Less )) => do (pure true) | _ => do (pure false) le := fun [trait_constr_le_associated_type_i1 : PartialOrd.AssociatedTypes T T] [trait_constr_le_i1 : PartialOrd T T ] (self : T) (y : T) => do match (← (PartialOrd.partial_cmp T T self y)) with | (core_models.option.Option.Some (Ordering.Less )) | (core_models.option.Option.Some (Ordering.Equal )) => do (pure true) | _ => do (pure false) gt := fun [trait_constr_gt_associated_type_i1 : PartialOrd.AssociatedTypes T T] [trait_constr_gt_i1 : PartialOrd T T ] (self : T) (y : T) => do match (← (PartialOrd.partial_cmp T T self y)) with | (core_models.option.Option.Some (Ordering.Greater )) => do (pure true) | _ => do (pure false) ge := fun [trait_constr_ge_associated_type_i1 : PartialOrd.AssociatedTypes T T] [trait_constr_ge_i1 : PartialOrd T T ] (self : T) (y : T) => do match (← (PartialOrd.partial_cmp T T self y)) with | (core_models.option.Option.Some (Ordering.Greater )) | (core_models.option.Option.Some (Ordering.Equal )) => do (pure true) | _ => do (pure false) class Ord.AssociatedTypes (Self : Type) where [trait_constr_Ord_i0 : Eq.AssociatedTypes Self] [trait_constr_Ord_i1 : PartialOrd.AssociatedTypes Self Self] attribute [instance_reducible, instance] Ord.AssociatedTypes.trait_constr_Ord_i0 attribute [instance_reducible, instance] Ord.AssociatedTypes.trait_constr_Ord_i1 class Ord (Self : Type) [associatedTypes : outParam (Ord.AssociatedTypes (Self : Type))] where [trait_constr_Ord_i0 : Eq Self] [trait_constr_Ord_i1 : PartialOrd Self Self] cmp (Self) : (Self -> Self -> RustM Ordering) attribute [instance_reducible, instance] Ord.trait_constr_Ord_i0 attribute [instance_reducible, instance] Ord.trait_constr_Ord_i1 @[spec] def max (T : Type) [trait_constr_max_associated_type_i0 : Ord.AssociatedTypes T] [trait_constr_max_i0 : Ord T ] (v1 : T) (v2 : T) : RustM T := do match (← (Ord.cmp T v1 v2)) with | (Ordering.Greater ) => do (pure v1) | _ => do (pure v2) @[spec] def min (T : Type) [trait_constr_min_associated_type_i0 : Ord.AssociatedTypes T] [trait_constr_min_i0 : Ord T ] (v1 : T) (v2 : T) : RustM T := do match (← (Ord.cmp T v1 v2)) with | (Ordering.Greater ) => do (pure v2) | _ => do (pure v1) @[reducible] instance Impl_2.AssociatedTypes (T : Type) [trait_constr_Impl_2_associated_type_i0 : PartialOrd.AssociatedTypes T T] [trait_constr_Impl_2_i0 : PartialOrd T T ] : PartialOrd.AssociatedTypes (Reverse T) (Reverse T) where instance Impl_2 (T : Type) [trait_constr_Impl_2_associated_type_i0 : PartialOrd.AssociatedTypes T T] [trait_constr_Impl_2_i0 : PartialOrd T T ] : PartialOrd (Reverse T) (Reverse T) where partial_cmp := fun (self : (Reverse T)) (other : (Reverse T)) => do (PartialOrd.partial_cmp T T (Reverse._0 other) (Reverse._0 self)) @[reducible] instance Impl_5.AssociatedTypes (T : Type) [trait_constr_Impl_5_associated_type_i0 : Ord.AssociatedTypes T] [trait_constr_Impl_5_i0 : Ord T ] : Ord.AssociatedTypes (Reverse T) where instance Impl_5 (T : Type) [trait_constr_Impl_5_associated_type_i0 : Ord.AssociatedTypes T] [trait_constr_Impl_5_i0 : Ord T ] : Ord (Reverse T) where cmp := fun (self : (Reverse T)) (other : (Reverse T)) => do (Ord.cmp T (Reverse._0 other) (Reverse._0 self)) @[reducible] instance Impl_30.AssociatedTypes : PartialOrd.AssociatedTypes u8 u8 where instance Impl_30 : PartialOrd u8 u8 where partial_cmp := fun (self : u8) (other : u8) => do if (← (self ? other)) then do (pure (core_models.option.Option.Some Ordering.Greater)) else do (pure (core_models.option.Option.Some Ordering.Equal)) @[reducible] instance Impl_31.AssociatedTypes : Ord.AssociatedTypes u8 where instance Impl_31 : Ord u8 where cmp := fun (self : u8) (other : u8) => do if (← (self ? other)) then do (pure Ordering.Greater) else do (pure Ordering.Equal) @[reducible] instance Impl_32.AssociatedTypes : PartialOrd.AssociatedTypes i8 i8 where instance Impl_32 : PartialOrd i8 i8 where partial_cmp := fun (self : i8) (other : i8) => do if (← (self ? other)) then do (pure (core_models.option.Option.Some Ordering.Greater)) else do (pure (core_models.option.Option.Some Ordering.Equal)) @[reducible] instance Impl_33.AssociatedTypes : Ord.AssociatedTypes i8 where instance Impl_33 : Ord i8 where cmp := fun (self : i8) (other : i8) => do if (← (self ? other)) then do (pure Ordering.Greater) else do (pure Ordering.Equal) @[reducible] instance Impl_34.AssociatedTypes : PartialOrd.AssociatedTypes u16 u16 where instance Impl_34 : PartialOrd u16 u16 where partial_cmp := fun (self : u16) (other : u16) => do if (← (self ? other)) then do (pure (core_models.option.Option.Some Ordering.Greater)) else do (pure (core_models.option.Option.Some Ordering.Equal)) @[reducible] instance Impl_35.AssociatedTypes : Ord.AssociatedTypes u16 where instance Impl_35 : Ord u16 where cmp := fun (self : u16) (other : u16) => do if (← (self ? other)) then do (pure Ordering.Greater) else do (pure Ordering.Equal) @[reducible] instance Impl_36.AssociatedTypes : PartialOrd.AssociatedTypes i16 i16 where instance Impl_36 : PartialOrd i16 i16 where partial_cmp := fun (self : i16) (other : i16) => do if (← (self ? other)) then do (pure (core_models.option.Option.Some Ordering.Greater)) else do (pure (core_models.option.Option.Some Ordering.Equal)) @[reducible] instance Impl_37.AssociatedTypes : Ord.AssociatedTypes i16 where instance Impl_37 : Ord i16 where cmp := fun (self : i16) (other : i16) => do if (← (self ? other)) then do (pure Ordering.Greater) else do (pure Ordering.Equal) @[reducible] instance Impl_38.AssociatedTypes : PartialOrd.AssociatedTypes u32 u32 where instance Impl_38 : PartialOrd u32 u32 where partial_cmp := fun (self : u32) (other : u32) => do if (← (self ? other)) then do (pure (core_models.option.Option.Some Ordering.Greater)) else do (pure (core_models.option.Option.Some Ordering.Equal)) @[reducible] instance Impl_39.AssociatedTypes : Ord.AssociatedTypes u32 where instance Impl_39 : Ord u32 where cmp := fun (self : u32) (other : u32) => do if (← (self ? other)) then do (pure Ordering.Greater) else do (pure Ordering.Equal) @[reducible] instance Impl_40.AssociatedTypes : PartialOrd.AssociatedTypes i32 i32 where instance Impl_40 : PartialOrd i32 i32 where partial_cmp := fun (self : i32) (other : i32) => do if (← (self ? other)) then do (pure (core_models.option.Option.Some Ordering.Greater)) else do (pure (core_models.option.Option.Some Ordering.Equal)) @[reducible] instance Impl_41.AssociatedTypes : Ord.AssociatedTypes i32 where instance Impl_41 : Ord i32 where cmp := fun (self : i32) (other : i32) => do if (← (self ? other)) then do (pure Ordering.Greater) else do (pure Ordering.Equal) @[reducible] instance Impl_42.AssociatedTypes : PartialOrd.AssociatedTypes u64 u64 where instance Impl_42 : PartialOrd u64 u64 where partial_cmp := fun (self : u64) (other : u64) => do if (← (self ? other)) then do (pure (core_models.option.Option.Some Ordering.Greater)) else do (pure (core_models.option.Option.Some Ordering.Equal)) @[reducible] instance Impl_43.AssociatedTypes : Ord.AssociatedTypes u64 where instance Impl_43 : Ord u64 where cmp := fun (self : u64) (other : u64) => do if (← (self ? other)) then do (pure Ordering.Greater) else do (pure Ordering.Equal) @[reducible] instance Impl_44.AssociatedTypes : PartialOrd.AssociatedTypes i64 i64 where instance Impl_44 : PartialOrd i64 i64 where partial_cmp := fun (self : i64) (other : i64) => do if (← (self ? other)) then do (pure (core_models.option.Option.Some Ordering.Greater)) else do (pure (core_models.option.Option.Some Ordering.Equal)) @[reducible] instance Impl_45.AssociatedTypes : Ord.AssociatedTypes i64 where instance Impl_45 : Ord i64 where cmp := fun (self : i64) (other : i64) => do if (← (self ? other)) then do (pure Ordering.Greater) else do (pure Ordering.Equal) @[reducible] instance Impl_46.AssociatedTypes : PartialOrd.AssociatedTypes u128 u128 where instance Impl_46 : PartialOrd u128 u128 where partial_cmp := fun (self : u128) (other : u128) => do if (← (self ? other)) then do (pure (core_models.option.Option.Some Ordering.Greater)) else do (pure (core_models.option.Option.Some Ordering.Equal)) @[reducible] instance Impl_47.AssociatedTypes : Ord.AssociatedTypes u128 where instance Impl_47 : Ord u128 where cmp := fun (self : u128) (other : u128) => do if (← (self ? other)) then do (pure Ordering.Greater) else do (pure Ordering.Equal) @[reducible] instance Impl_48.AssociatedTypes : PartialOrd.AssociatedTypes i128 i128 where instance Impl_48 : PartialOrd i128 i128 where partial_cmp := fun (self : i128) (other : i128) => do if (← (self ? other)) then do (pure (core_models.option.Option.Some Ordering.Greater)) else do (pure (core_models.option.Option.Some Ordering.Equal)) @[reducible] instance Impl_49.AssociatedTypes : Ord.AssociatedTypes i128 where instance Impl_49 : Ord i128 where cmp := fun (self : i128) (other : i128) => do if (← (self ? other)) then do (pure Ordering.Greater) else do (pure Ordering.Equal) @[reducible] instance Impl_50.AssociatedTypes : PartialOrd.AssociatedTypes usize usize where instance Impl_50 : PartialOrd usize usize where partial_cmp := fun (self : usize) (other : usize) => do if (← (self ? other)) then do (pure (core_models.option.Option.Some Ordering.Greater)) else do (pure (core_models.option.Option.Some Ordering.Equal)) @[reducible] instance Impl_51.AssociatedTypes : Ord.AssociatedTypes usize where instance Impl_51 : Ord usize where cmp := fun (self : usize) (other : usize) => do if (← (self ? other)) then do (pure Ordering.Greater) else do (pure Ordering.Equal) @[reducible] instance Impl_52.AssociatedTypes : PartialOrd.AssociatedTypes isize isize where instance Impl_52 : PartialOrd isize isize where partial_cmp := fun (self : isize) (other : isize) => do if (← (self ? other)) then do (pure (core_models.option.Option.Some Ordering.Greater)) else do (pure (core_models.option.Option.Some Ordering.Equal)) @[reducible] instance Impl_53.AssociatedTypes : Ord.AssociatedTypes isize where instance Impl_53 : Ord isize where cmp := fun (self : isize) (other : isize) => do if (← (self ? other)) then do (pure Ordering.Greater) else do (pure Ordering.Equal) end core_models.cmp namespace core_models.iter.adapters.flat_map structure FlatMap (I : Type) (U : Type) (F : Type) where it : I f : F current : (core_models.option.Option U) end core_models.iter.adapters.flat_map namespace core_models.option @[spec] def Impl.as_ref (T : Type) (self : (Option T)) : RustM (Option T) := do match self with | (Option.Some x) => do (pure (Option.Some x)) | (Option.None ) => do (pure Option.None) @[spec] def Impl.unwrap_or (T : Type) (self : (Option T)) (default : T) : RustM T := do match self with | (Option.Some x) => do (pure x) | (Option.None ) => do (pure default) @[spec] def Impl.unwrap_or_default (T : Type) [trait_constr_unwrap_or_default_associated_type_i0 : core_models.default.Default.AssociatedTypes T] [trait_constr_unwrap_or_default_i0 : core_models.default.Default T ] (self : (Option T)) : RustM T := do match self with | (Option.Some x) => do (pure x) | (Option.None ) => do (core_models.default.Default.default T rust_primitives.hax.Tuple0.mk) @[spec] def Impl.take (T : Type) (self : (Option T)) : RustM (rust_primitives.hax.Tuple2 (Option T) (Option T)) := do (pure (rust_primitives.hax.Tuple2.mk Option.None self)) def Impl.is_some (T : Type) (self : (Option T)) : RustM Bool := do match self with | (Option.Some _) => do (pure true) | _ => do (pure false) set_option hax_mvcgen.specset "bv" in @[hax_spec] def Impl.is_some.spec (T : Type) (self : (Option T)) : Spec (requires := do pure True) (ensures := fun res => do (hax_lib.prop.constructors.implies (← (hax_lib.prop.constructors.from_bool res)) (← (hax_lib.prop.Impl.from_bool true)))) (Impl.is_some (T : Type) (self : (Option T))) := { pureRequires := by hax_construct_pure <;> bv_decide pureEnsures := by hax_construct_pure <;> bv_decide contract := by hax_mvcgen [Impl.is_some] <;> bv_decide } @[spec] def Impl.is_none (T : Type) (self : (Option T)) : RustM Bool := do ((← (Impl.is_some T self)) ==? false) end core_models.option namespace core_models.panicking opaque panic_explicit (_ : rust_primitives.hax.Tuple0) : RustM rust_primitives.hax.Never opaque panic (_msg : String) : RustM rust_primitives.hax.Never opaque panic_fmt (_fmt : core_models.fmt.Arguments) : RustM rust_primitives.hax.Never end core_models.panicking namespace core_models.panicking.internal opaque panic (T : Type) (_ : rust_primitives.hax.Tuple0) : RustM T end core_models.panicking.internal namespace core_models.hash @[reducible] instance Impl.AssociatedTypes (T : Type) : Hash.AssociatedTypes T where instance Impl (T : Type) : Hash T where hash := fun (H : Type) [trait_constr_hash_associated_type_i0 : Hasher.AssociatedTypes H] [trait_constr_hash_i0 : Hasher H ] (self : T) (h : H) => do (core_models.panicking.internal.panic H rust_primitives.hax.Tuple0.mk) end core_models.hash namespace core_models.result inductive Result (T : Type) (E : Type) : Type | Ok : T -> Result (T : Type) (E : Type) | Err : E -> Result (T : Type) (E : Type) end core_models.result namespace core_models.fmt abbrev Result : Type := (core_models.result.Result rust_primitives.hax.Tuple0 Error) class Display.AssociatedTypes (Self : Type) where class Display (Self : Type) [associatedTypes : outParam (Display.AssociatedTypes (Self : Type))] where fmt (Self) : (Self -> Formatter -> RustM (rust_primitives.hax.Tuple2 Formatter (core_models.result.Result rust_primitives.hax.Tuple0 Error))) class Debug.AssociatedTypes (Self : Type) where class Debug (Self : Type) [associatedTypes : outParam (Debug.AssociatedTypes (Self : Type))] where dbg_fmt (Self) : (Self -> Formatter -> RustM (rust_primitives.hax.Tuple2 Formatter (core_models.result.Result rust_primitives.hax.Tuple0 Error))) end core_models.fmt namespace core_models.error class Error.AssociatedTypes (Self : Type) where [trait_constr_Error_i0 : core_models.fmt.Display.AssociatedTypes Self] [trait_constr_Error_i1 : core_models.fmt.Debug.AssociatedTypes Self] attribute [instance_reducible, instance] Error.AssociatedTypes.trait_constr_Error_i0 attribute [instance_reducible, instance] Error.AssociatedTypes.trait_constr_Error_i1 class Error (Self : Type) [associatedTypes : outParam (Error.AssociatedTypes (Self : Type))] where [trait_constr_Error_i0 : core_models.fmt.Display Self] [trait_constr_Error_i1 : core_models.fmt.Debug Self] attribute [instance_reducible, instance] Error.trait_constr_Error_i0 attribute [instance_reducible, instance] Error.trait_constr_Error_i1 end core_models.error namespace core_models.fmt @[reducible] instance Impl.AssociatedTypes (T : Type) : Debug.AssociatedTypes T where instance Impl (T : Type) : Debug T where dbg_fmt := fun (self : T) (f : Formatter) => do let hax_temp_output : (core_models.result.Result rust_primitives.hax.Tuple0 Error) := (core_models.result.Result.Ok rust_primitives.hax.Tuple0.mk); (pure (rust_primitives.hax.Tuple2.mk f hax_temp_output)) @[spec] def Impl_11.write_fmt (f : Formatter) (args : Arguments) : RustM (rust_primitives.hax.Tuple2 Formatter (core_models.result.Result rust_primitives.hax.Tuple0 Error)) := do let hax_temp_output : (core_models.result.Result rust_primitives.hax.Tuple0 Error) := (core_models.result.Result.Ok rust_primitives.hax.Tuple0.mk); (pure (rust_primitives.hax.Tuple2.mk f hax_temp_output)) end core_models.fmt namespace core_models.num opaque Impl_6.from_str_radix (src : String) (radix : u32) : RustM (core_models.result.Result u8 core_models.num.error.ParseIntError) opaque Impl_7.from_str_radix (src : String) (radix : u32) : RustM (core_models.result.Result u16 core_models.num.error.ParseIntError) opaque Impl_8.from_str_radix (src : String) (radix : u32) : RustM (core_models.result.Result u32 core_models.num.error.ParseIntError) opaque Impl_9.from_str_radix (src : String) (radix : u32) : RustM (core_models.result.Result u64 core_models.num.error.ParseIntError) opaque Impl_10.from_str_radix (src : String) (radix : u32) : RustM (core_models.result.Result u128 core_models.num.error.ParseIntError) opaque Impl_11.from_str_radix (src : String) (radix : u32) : RustM (core_models.result.Result usize core_models.num.error.ParseIntError) opaque Impl_12.from_str_radix (src : String) (radix : u32) : RustM (core_models.result.Result i8 core_models.num.error.ParseIntError) opaque Impl_13.from_str_radix (src : String) (radix : u32) : RustM (core_models.result.Result i16 core_models.num.error.ParseIntError) opaque Impl_14.from_str_radix (src : String) (radix : u32) : RustM (core_models.result.Result i32 core_models.num.error.ParseIntError) opaque Impl_15.from_str_radix (src : String) (radix : u32) : RustM (core_models.result.Result i64 core_models.num.error.ParseIntError) opaque Impl_16.from_str_radix (src : String) (radix : u32) : RustM (core_models.result.Result i128 core_models.num.error.ParseIntError) opaque Impl_17.from_str_radix (src : String) (radix : u32) : RustM (core_models.result.Result isize core_models.num.error.ParseIntError) end core_models.num namespace core_models.option @[spec] def Impl.ok_or (T : Type) (E : Type) (self : (Option T)) (err : E) : RustM (core_models.result.Result T E) := do match self with | (Option.Some v) => do (pure (core_models.result.Result.Ok v)) | (Option.None ) => do (pure (core_models.result.Result.Err err)) end core_models.option namespace core_models.result @[spec] def Impl.unwrap_or (T : Type) (E : Type) (self : (Result T E)) (default : T) : RustM T := do match self with | (Result.Ok t) => do (pure t) | (Result.Err _) => do (pure default) @[spec] def Impl.is_ok (T : Type) (E : Type) (self : (Result T E)) : RustM Bool := do match self with | (Result.Ok _) => do (pure true) | _ => do (pure false) @[spec] def Impl.ok (T : Type) (E : Type) (self : (Result T E)) : RustM (core_models.option.Option T) := do match self with | (Result.Ok x) => do (pure (core_models.option.Option.Some x)) | (Result.Err _) => do (pure core_models.option.Option.None) end core_models.result namespace core_models.slice.iter structure Chunks (T : Type) where cs : usize elements : (RustSlice T) @[spec] def Impl.new (T : Type) (cs : usize) (elements : (RustSlice T)) : RustM (Chunks T) := do (pure (Chunks.mk (cs := cs) (elements := elements))) structure ChunksExact (T : Type) where cs : usize elements : (RustSlice T) @[spec] def Impl_1.new (T : Type) (cs : usize) (elements : (RustSlice T)) : RustM (ChunksExact T) := do (pure (ChunksExact.mk (cs := cs) (elements := elements))) structure Iter (T : Type) where _0 : (rust_primitives.sequence.Seq T) end core_models.slice.iter namespace core_models.slice @[spec] def Impl.len (T : Type) (s : (RustSlice T)) : RustM usize := do (rust_primitives.slice.slice_length T s) @[spec] def Impl.chunks (T : Type) (s : (RustSlice T)) (cs : usize) : RustM (core_models.slice.iter.Chunks T) := do (core_models.slice.iter.Impl.new T cs s) @[spec] def Impl.iter (T : Type) (s : (RustSlice T)) : RustM (core_models.slice.iter.Iter T) := do (pure (core_models.slice.iter.Iter.mk (← (rust_primitives.sequence.seq_from_slice T s)))) @[spec] def Impl.chunks_exact (T : Type) (s : (RustSlice T)) (cs : usize) : RustM (core_models.slice.iter.ChunksExact T) := do (core_models.slice.iter.Impl_1.new T cs s) @[spec] def Impl.is_empty (T : Type) (s : (RustSlice T)) : RustM Bool := do ((← (Impl.len T s)) ==? (0 : usize)) opaque Impl.contains (T : Type) (s : (RustSlice T)) (v : T) : RustM Bool opaque Impl.copy_within (T : Type) (R : Type) [trait_constr_copy_within_associated_type_i0 : core.marker.Copy.AssociatedTypes T] [trait_constr_copy_within_i0 : core.marker.Copy T ] (s : (RustSlice T)) (src : R) (dest : usize) : RustM (RustSlice T) opaque Impl.binary_search (T : Type) (s : (RustSlice T)) (x : T) : RustM (core_models.result.Result usize usize) def Impl.copy_from_slice (T : Type) [trait_constr_copy_from_slice_associated_type_i0 : core_models.marker.Copy.AssociatedTypes T] [trait_constr_copy_from_slice_i0 : core_models.marker.Copy T ] (s : (RustSlice T)) (src : (RustSlice T)) : RustM (RustSlice T) := do let ⟨tmp0, out⟩ ← (rust_primitives.mem.replace (RustSlice T) s src); let s : (RustSlice T) := tmp0; let _ := out; (pure s) set_option hax_mvcgen.specset "bv" in @[hax_spec] def Impl.copy_from_slice.spec (T : Type) [trait_constr_copy_from_slice_associated_type_i0 : core_models.marker.Copy.AssociatedTypes T] [trait_constr_copy_from_slice_i0 : core_models.marker.Copy T ] (s : (RustSlice T)) (src : (RustSlice T)) : Spec (requires := do ((← (Impl.len T s)) ==? (← (Impl.len T src)))) (ensures := fun _ => pure True) (Impl.copy_from_slice (T : Type) (s : (RustSlice T)) (src : (RustSlice T))) := { pureRequires := by hax_construct_pure <;> bv_decide pureEnsures := by hax_construct_pure <;> bv_decide contract := by hax_mvcgen [Impl.copy_from_slice] <;> bv_decide } def Impl.clone_from_slice (T : Type) [trait_constr_clone_from_slice_associated_type_i0 : core_models.clone.Clone.AssociatedTypes T] [trait_constr_clone_from_slice_i0 : core_models.clone.Clone T ] (s : (RustSlice T)) (src : (RustSlice T)) : RustM (RustSlice T) := do let ⟨tmp0, out⟩ ← (rust_primitives.mem.replace (RustSlice T) s src); let s : (RustSlice T) := tmp0; let _ := out; (pure s) set_option hax_mvcgen.specset "bv" in @[hax_spec] def Impl.clone_from_slice.spec (T : Type) [trait_constr_clone_from_slice_associated_type_i0 : core_models.clone.Clone.AssociatedTypes T] [trait_constr_clone_from_slice_i0 : core_models.clone.Clone T ] (s : (RustSlice T)) (src : (RustSlice T)) : Spec (requires := do ((← (Impl.len T s)) ==? (← (Impl.len T src)))) (ensures := fun _ => pure True) (Impl.clone_from_slice (T : Type) (s : (RustSlice T)) (src : (RustSlice T))) := { pureRequires := by hax_construct_pure <;> bv_decide pureEnsures := by hax_construct_pure <;> bv_decide contract := by hax_mvcgen [Impl.clone_from_slice] <;> bv_decide } def Impl.split_at (T : Type) (s : (RustSlice T)) (mid : usize) : RustM (rust_primitives.hax.Tuple2 (RustSlice T) (RustSlice T)) := do (rust_primitives.slice.slice_split_at T s mid) set_option hax_mvcgen.specset "bv" in @[hax_spec] def Impl.split_at.spec (T : Type) (s : (RustSlice T)) (mid : usize) : Spec (requires := do (mid <=? (← (Impl.len T s)))) (ensures := fun _ => pure True) (Impl.split_at (T : Type) (s : (RustSlice T)) (mid : usize)) := { pureRequires := by hax_construct_pure <;> bv_decide pureEnsures := by hax_construct_pure <;> bv_decide contract := by hax_mvcgen [Impl.split_at] <;> bv_decide } @[spec] def Impl.split_at_checked (T : Type) (s : (RustSlice T)) (mid : usize) : RustM (core_models.option.Option (rust_primitives.hax.Tuple2 (RustSlice T) (RustSlice T))) := do if (← (mid <=? (← (Impl.len T s)))) then do (pure (core_models.option.Option.Some (← (Impl.split_at T s mid)))) else do (pure core_models.option.Option.None) end core_models.slice namespace core_models.str.error structure Utf8Error where -- no fields end core_models.str.error namespace core_models.str.converts opaque from_utf8 (s : (RustSlice u8)) : RustM (core_models.result.Result String core_models.str.error.Utf8Error) end core_models.str.converts namespace core_models.str.iter structure Split (T : Type) where _0 : T end core_models.str.iter namespace core_models.convert class TryInto.AssociatedTypes (Self : Type) (T : Type) where Error : Type attribute [reducible] TryInto.AssociatedTypes.Error abbrev TryInto.Error := TryInto.AssociatedTypes.Error class TryInto (Self : Type) (T : Type) [associatedTypes : outParam (TryInto.AssociatedTypes (Self : Type) (T : Type))] where try_into (Self) (T) : (Self -> RustM (core_models.result.Result T associatedTypes.Error)) class TryFrom.AssociatedTypes (Self : Type) (T : Type) where Error : Type attribute [reducible] TryFrom.AssociatedTypes.Error abbrev TryFrom.Error := TryFrom.AssociatedTypes.Error class TryFrom (Self : Type) (T : Type) [associatedTypes : outParam (TryFrom.AssociatedTypes (Self : Type) (T : Type))] where try_from (Self) (T) : (T -> RustM (core_models.result.Result Self associatedTypes.Error)) end core_models.convert namespace core_models.iter.traits.iterator class Iterator.AssociatedTypes (Self : Type) where Item : Type attribute [reducible] Iterator.AssociatedTypes.Item abbrev Iterator.Item := Iterator.AssociatedTypes.Item class Iterator (Self : Type) [associatedTypes : outParam (Iterator.AssociatedTypes (Self : Type))] where next (Self) : (Self -> RustM (rust_primitives.hax.Tuple2 Self (core_models.option.Option associatedTypes.Item))) end core_models.iter.traits.iterator namespace core_models.iter.traits.collect class IntoIterator.AssociatedTypes (Self : Type) where IntoIter : Type attribute [reducible] IntoIterator.AssociatedTypes.IntoIter abbrev IntoIterator.IntoIter := IntoIterator.AssociatedTypes.IntoIter class IntoIterator (Self : Type) [associatedTypes : outParam (IntoIterator.AssociatedTypes (Self : Type))] where into_iter (Self) : (Self -> RustM associatedTypes.IntoIter) end core_models.iter.traits.collect namespace core_models.ops.arith class Add.AssociatedTypes (Self : Type) (Rhs : Type) where Output : Type attribute [reducible] Add.AssociatedTypes.Output abbrev Add.Output := Add.AssociatedTypes.Output class Add (Self : Type) (Rhs : Type) [associatedTypes : outParam (Add.AssociatedTypes (Self : Type) (Rhs : Type))] where add (Self) (Rhs) : (Self -> Rhs -> RustM associatedTypes.Output) class Sub.AssociatedTypes (Self : Type) (Rhs : Type) where Output : Type attribute [reducible] Sub.AssociatedTypes.Output abbrev Sub.Output := Sub.AssociatedTypes.Output class Sub (Self : Type) (Rhs : Type) [associatedTypes : outParam (Sub.AssociatedTypes (Self : Type) (Rhs : Type))] where sub (Self) (Rhs) : (Self -> Rhs -> RustM associatedTypes.Output) class Mul.AssociatedTypes (Self : Type) (Rhs : Type) where Output : Type attribute [reducible] Mul.AssociatedTypes.Output abbrev Mul.Output := Mul.AssociatedTypes.Output class Mul (Self : Type) (Rhs : Type) [associatedTypes : outParam (Mul.AssociatedTypes (Self : Type) (Rhs : Type))] where mul (Self) (Rhs) : (Self -> Rhs -> RustM associatedTypes.Output) class Div.AssociatedTypes (Self : Type) (Rhs : Type) where Output : Type attribute [reducible] Div.AssociatedTypes.Output abbrev Div.Output := Div.AssociatedTypes.Output class Div (Self : Type) (Rhs : Type) [associatedTypes : outParam (Div.AssociatedTypes (Self : Type) (Rhs : Type))] where div (Self) (Rhs) : (Self -> Rhs -> RustM associatedTypes.Output) class Neg.AssociatedTypes (Self : Type) where Output : Type attribute [reducible] Neg.AssociatedTypes.Output abbrev Neg.Output := Neg.AssociatedTypes.Output class Neg (Self : Type) [associatedTypes : outParam (Neg.AssociatedTypes (Self : Type))] where neg (Self) : (Self -> RustM associatedTypes.Output) class Rem.AssociatedTypes (Self : Type) (Rhs : Type) where Output : Type attribute [reducible] Rem.AssociatedTypes.Output abbrev Rem.Output := Rem.AssociatedTypes.Output class Rem (Self : Type) (Rhs : Type) [associatedTypes : outParam (Rem.AssociatedTypes (Self : Type) (Rhs : Type))] where rem (Self) (Rhs) : (Self -> Rhs -> RustM associatedTypes.Output) end core_models.ops.arith namespace core_models.ops.bit class Shr.AssociatedTypes (Self : Type) (Rhs : Type) where Output : Type attribute [reducible] Shr.AssociatedTypes.Output abbrev Shr.Output := Shr.AssociatedTypes.Output class Shr (Self : Type) (Rhs : Type) [associatedTypes : outParam (Shr.AssociatedTypes (Self : Type) (Rhs : Type))] where shr (Self) (Rhs) : (Self -> Rhs -> RustM associatedTypes.Output) class Shl.AssociatedTypes (Self : Type) (Rhs : Type) where Output : Type attribute [reducible] Shl.AssociatedTypes.Output abbrev Shl.Output := Shl.AssociatedTypes.Output class Shl (Self : Type) (Rhs : Type) [associatedTypes : outParam (Shl.AssociatedTypes (Self : Type) (Rhs : Type))] where shl (Self) (Rhs) : (Self -> Rhs -> RustM associatedTypes.Output) class BitXor.AssociatedTypes (Self : Type) (Rhs : Type) where Output : Type attribute [reducible] BitXor.AssociatedTypes.Output abbrev BitXor.Output := BitXor.AssociatedTypes.Output class BitXor (Self : Type) (Rhs : Type) [associatedTypes : outParam (BitXor.AssociatedTypes (Self : Type) (Rhs : Type))] where bitxor (Self) (Rhs) : (Self -> Rhs -> RustM associatedTypes.Output) class BitAnd.AssociatedTypes (Self : Type) (Rhs : Type) where Output : Type attribute [reducible] BitAnd.AssociatedTypes.Output abbrev BitAnd.Output := BitAnd.AssociatedTypes.Output class BitAnd (Self : Type) (Rhs : Type) [associatedTypes : outParam (BitAnd.AssociatedTypes (Self : Type) (Rhs : Type))] where bitand (Self) (Rhs) : (Self -> Rhs -> RustM associatedTypes.Output) class BitOr.AssociatedTypes (Self : Type) (Rhs : Type) where Output : Type attribute [reducible] BitOr.AssociatedTypes.Output abbrev BitOr.Output := BitOr.AssociatedTypes.Output class BitOr (Self : Type) (Rhs : Type) [associatedTypes : outParam (BitOr.AssociatedTypes (Self : Type) (Rhs : Type))] where bitor (Self) (Rhs) : (Self -> Rhs -> RustM associatedTypes.Output) end core_models.ops.bit namespace core_models.ops.index class Index.AssociatedTypes (Self : Type) (Idx : Type) where Output : Type attribute [reducible] Index.AssociatedTypes.Output abbrev Index.Output := Index.AssociatedTypes.Output class Index (Self : Type) (Idx : Type) [associatedTypes : outParam (Index.AssociatedTypes (Self : Type) (Idx : Type))] where index (Self) (Idx) : (Self -> Idx -> RustM associatedTypes.Output) end core_models.ops.index namespace core_models.ops.function class FnOnce.AssociatedTypes (Self : Type) (Args : Type) where Output : Type attribute [reducible] FnOnce.AssociatedTypes.Output abbrev FnOnce.Output := FnOnce.AssociatedTypes.Output class FnOnce (Self : Type) (Args : Type) [associatedTypes : outParam (FnOnce.AssociatedTypes (Self : Type) (Args : Type))] where call_once (Self) (Args) : (Self -> Args -> RustM associatedTypes.Output) end core_models.ops.function namespace core_models.ops.try_trait class Try.AssociatedTypes (Self : Type) where Output : Type Residual : Type attribute [reducible] Try.AssociatedTypes.Output attribute [reducible] Try.AssociatedTypes.Residual abbrev Try.Output := Try.AssociatedTypes.Output abbrev Try.Residual := Try.AssociatedTypes.Residual class Try (Self : Type) [associatedTypes : outParam (Try.AssociatedTypes (Self : Type))] where from_output (Self) : (associatedTypes.Output -> RustM Self) branch (Self) : (Self -> RustM (core_models.ops.control_flow.ControlFlow associatedTypes.Residual associatedTypes.Output)) end core_models.ops.try_trait namespace core_models.ops.deref class Deref.AssociatedTypes (Self : Type) where Target : Type attribute [reducible] Deref.AssociatedTypes.Target abbrev Deref.Target := Deref.AssociatedTypes.Target class Deref (Self : Type) [associatedTypes : outParam (Deref.AssociatedTypes (Self : Type))] where deref (Self) : (Self -> RustM associatedTypes.Target) end core_models.ops.deref namespace core_models.slice class SliceIndex.AssociatedTypes (Self : Type) (T : Type) where Output : Type attribute [reducible] SliceIndex.AssociatedTypes.Output abbrev SliceIndex.Output := SliceIndex.AssociatedTypes.Output class SliceIndex (Self : Type) (T : Type) [associatedTypes : outParam (SliceIndex.AssociatedTypes (Self : Type) (T : Type))] where get (Self) (T) : (Self -> T -> RustM (core_models.option.Option associatedTypes.Output)) end core_models.slice namespace core_models.str.traits class FromStr.AssociatedTypes (Self : Type) where Err : Type attribute [reducible] FromStr.AssociatedTypes.Err abbrev FromStr.Err := FromStr.AssociatedTypes.Err class FromStr (Self : Type) [associatedTypes : outParam (FromStr.AssociatedTypes (Self : Type))] where from_str (Self) : (String -> RustM (core_models.result.Result Self associatedTypes.Err)) end core_models.str.traits namespace core_models.array @[spec] def Impl_23.map (T : Type) (N : usize) (F : Type) (U : Type) [trait_constr_map_associated_type_i0 : core_models.ops.function.FnOnce.AssociatedTypes F T] [trait_constr_map_i0 : core_models.ops.function.FnOnce F T (associatedTypes := { show core_models.ops.function.FnOnce.AssociatedTypes F T by infer_instance with Output := U})] (s : (RustArray T N)) (f : (T -> RustM U)) : RustM (RustArray U N) := do (rust_primitives.slice.array_map T U (N) (T -> RustM U) s f) @[spec] def from_fn (T : Type) (N : usize) (F : Type) [trait_constr_from_fn_associated_type_i0 : core_models.ops.function.FnOnce.AssociatedTypes F usize] [trait_constr_from_fn_i0 : core_models.ops.function.FnOnce F usize (associatedTypes := { show core_models.ops.function.FnOnce.AssociatedTypes F usize by infer_instance with Output := T})] (f : (usize -> RustM T)) : RustM (RustArray T N) := do (rust_primitives.slice.array_from_fn T (N) (usize -> RustM T) f) end core_models.array namespace core_models.convert @[reducible] instance Impl_1.AssociatedTypes (T : Type) (U : Type) [trait_constr_Impl_1_associated_type_i0 : From.AssociatedTypes U T] [trait_constr_Impl_1_i0 : From U T ] : TryFrom.AssociatedTypes U T where Error := Infallible instance Impl_1 (T : Type) (U : Type) [trait_constr_Impl_1_associated_type_i0 : From.AssociatedTypes U T] [trait_constr_Impl_1_i0 : From U T ] : TryFrom U T where try_from := fun (x : T) => do (pure (core_models.result.Result.Ok (← (From._from U T x)))) @[reducible] instance Impl_2.AssociatedTypes (T : Type) (U : Type) [trait_constr_Impl_2_associated_type_i0 : TryFrom.AssociatedTypes U T] [trait_constr_Impl_2_i0 : TryFrom U T ] : TryInto.AssociatedTypes T U where Error := (TryFrom.Error U T) instance Impl_2 (T : Type) (U : Type) [trait_constr_Impl_2_associated_type_i0 : TryFrom.AssociatedTypes U T] [trait_constr_Impl_2_i0 : TryFrom U T ] : TryInto T U where try_into := fun (self : T) => do (TryFrom.try_from U T self) end core_models.convert namespace core_models.iter.traits.iterator @[reducible] instance Impl_1.AssociatedTypes (I : Type) [trait_constr_Impl_1_associated_type_i0 : Iterator.AssociatedTypes I] [trait_constr_Impl_1_i0 : Iterator I ] : core_models.iter.traits.collect.IntoIterator.AssociatedTypes I where IntoIter := I instance Impl_1 (I : Type) [trait_constr_Impl_1_associated_type_i0 : Iterator.AssociatedTypes I] [trait_constr_Impl_1_i0 : Iterator I ] : core_models.iter.traits.collect.IntoIterator I where into_iter := fun (self : I) => do (pure self) end core_models.iter.traits.iterator namespace core_models.iter.traits.collect class FromIterator.AssociatedTypes (Self : Type) (A : Type) where class FromIterator (Self : Type) (A : Type) [associatedTypes : outParam (FromIterator.AssociatedTypes (Self : Type) (A : Type))] where from_iter (Self) (A) (T : Type) [trait_constr_from_iter_associated_type_i1 : IntoIterator.AssociatedTypes T] [trait_constr_from_iter_i1 : IntoIterator T ] : (T -> RustM Self) end core_models.iter.traits.collect namespace core_models.iter.adapters.enumerate @[reducible] instance Impl_1.AssociatedTypes (I : Type) [trait_constr_Impl_1_associated_type_i0 : core_models.iter.traits.iterator.Iterator.AssociatedTypes I] [trait_constr_Impl_1_i0 : core_models.iter.traits.iterator.Iterator I ] : core_models.iter.traits.iterator.Iterator.AssociatedTypes (Enumerate I) where Item := (rust_primitives.hax.Tuple2 usize (core_models.iter.traits.iterator.Iterator.Item I)) instance Impl_1 (I : Type) [trait_constr_Impl_1_associated_type_i0 : core_models.iter.traits.iterator.Iterator.AssociatedTypes I] [trait_constr_Impl_1_i0 : core_models.iter.traits.iterator.Iterator I ] : core_models.iter.traits.iterator.Iterator (Enumerate I) where next := fun (self : (Enumerate I)) => do let ⟨tmp0, out⟩ ← (core_models.iter.traits.iterator.Iterator.next I (Enumerate.iter self)); let self : (Enumerate I) := {self with iter := tmp0}; let ⟨self, hax_temp_output⟩ ← match out with | (core_models.option.Option.Some a) => do let i : usize := (Enumerate.count self); let _ ← (hax_lib.assume (← (hax_lib.prop.constructors.from_bool (← ((Enumerate.count self) do (pure (rust_primitives.hax.Tuple2.mk self core_models.option.Option.None)); (pure (rust_primitives.hax.Tuple2.mk self hax_temp_output)) end core_models.iter.adapters.enumerate namespace core_models.iter.adapters.step_by @[instance] opaque Impl_1.AssociatedTypes (I : Type) [trait_constr_Impl_1_associated_type_i0 : core_models.iter.traits.iterator.Iterator.AssociatedTypes I] [trait_constr_Impl_1_i0 : core_models.iter.traits.iterator.Iterator I ] : core_models.iter.traits.iterator.Iterator.AssociatedTypes (StepBy I) := by constructor <;> exact Inhabited.default @[instance] opaque Impl_1 (I : Type) [trait_constr_Impl_1_associated_type_i0 : core_models.iter.traits.iterator.Iterator.AssociatedTypes I] [trait_constr_Impl_1_i0 : core_models.iter.traits.iterator.Iterator I ] : core_models.iter.traits.iterator.Iterator (StepBy I) := by constructor <;> exact Inhabited.default end core_models.iter.adapters.step_by namespace core_models.iter.adapters.map @[reducible] instance Impl_1.AssociatedTypes (I : Type) (O : Type) (F : Type) [trait_constr_Impl_1_associated_type_i0 : core_models.iter.traits.iterator.Iterator.AssociatedTypes I] [trait_constr_Impl_1_i0 : core_models.iter.traits.iterator.Iterator I ] [trait_constr_Impl_1_associated_type_i1 : core_models.ops.function.FnOnce.AssociatedTypes F (core_models.iter.traits.iterator.Iterator.Item I)] [trait_constr_Impl_1_i1 : core_models.ops.function.FnOnce F (core_models.iter.traits.iterator.Iterator.Item I) (associatedTypes := { show core_models.ops.function.FnOnce.AssociatedTypes F (core_models.iter.traits.iterator.Iterator.Item I) by infer_instance with Output := O})] : core_models.iter.traits.iterator.Iterator.AssociatedTypes (Map I F) where Item := O instance Impl_1 (I : Type) (O : Type) (F : Type) [trait_constr_Impl_1_associated_type_i0 : core_models.iter.traits.iterator.Iterator.AssociatedTypes I] [trait_constr_Impl_1_i0 : core_models.iter.traits.iterator.Iterator I ] [trait_constr_Impl_1_associated_type_i1 : core_models.ops.function.FnOnce.AssociatedTypes F (core_models.iter.traits.iterator.Iterator.Item I)] [trait_constr_Impl_1_i1 : core_models.ops.function.FnOnce F (core_models.iter.traits.iterator.Iterator.Item I) (associatedTypes := { show core_models.ops.function.FnOnce.AssociatedTypes F (core_models.iter.traits.iterator.Iterator.Item I) by infer_instance with Output := O})] : core_models.iter.traits.iterator.Iterator (Map I F) where next := fun (self : (Map I F)) => do let ⟨tmp0, out⟩ ← (core_models.iter.traits.iterator.Iterator.next I (Map.iter self)); let self : (Map I F) := {self with iter := tmp0}; let hax_temp_output : (core_models.option.Option O) ← match out with | (core_models.option.Option.Some v) => do (pure (core_models.option.Option.Some (← (core_models.ops.function.FnOnce.call_once F (core_models.iter.traits.iterator.Iterator.Item I) (Map.f self) v)))) | (core_models.option.Option.None ) => do (pure core_models.option.Option.None); (pure (rust_primitives.hax.Tuple2.mk self hax_temp_output)) end core_models.iter.adapters.map namespace core_models.iter.adapters.take @[reducible] instance Impl_1.AssociatedTypes (I : Type) [trait_constr_Impl_1_associated_type_i0 : core_models.iter.traits.iterator.Iterator.AssociatedTypes I] [trait_constr_Impl_1_i0 : core_models.iter.traits.iterator.Iterator I ] : core_models.iter.traits.iterator.Iterator.AssociatedTypes (Take I) where Item := (core_models.iter.traits.iterator.Iterator.Item I) instance Impl_1 (I : Type) [trait_constr_Impl_1_associated_type_i0 : core_models.iter.traits.iterator.Iterator.AssociatedTypes I] [trait_constr_Impl_1_i0 : core_models.iter.traits.iterator.Iterator I ] : core_models.iter.traits.iterator.Iterator (Take I) where next := fun (self : (Take I)) => do let ⟨self, hax_temp_output⟩ ← if (← ((Take.n self) !=? (0 : usize))) then do let self : (Take I) := {self with n := (← ((Take.n self) -? (1 : usize)))}; let ⟨tmp0, out⟩ ← (core_models.iter.traits.iterator.Iterator.next I (Take.iter self)); let self : (Take I) := {self with iter := tmp0}; (pure (rust_primitives.hax.Tuple2.mk self out)) else do (pure (rust_primitives.hax.Tuple2.mk self core_models.option.Option.None)); (pure (rust_primitives.hax.Tuple2.mk self hax_temp_output)) end core_models.iter.adapters.take namespace core_models.iter.adapters.flat_map @[spec] def Impl.new (I : Type) (U : Type) (F : Type) [trait_constr_new_associated_type_i0 : core_models.iter.traits.iterator.Iterator.AssociatedTypes I] [trait_constr_new_i0 : core_models.iter.traits.iterator.Iterator I ] [trait_constr_new_associated_type_i1 : core_models.iter.traits.iterator.Iterator.AssociatedTypes U] [trait_constr_new_i1 : core_models.iter.traits.iterator.Iterator U ] [trait_constr_new_associated_type_i2 : core_models.ops.function.FnOnce.AssociatedTypes F (core_models.iter.traits.iterator.Iterator.Item I)] [trait_constr_new_i2 : core_models.ops.function.FnOnce F (core_models.iter.traits.iterator.Iterator.Item I) (associatedTypes := { show core_models.ops.function.FnOnce.AssociatedTypes F (core_models.iter.traits.iterator.Iterator.Item I) by infer_instance with Output := U})] (it : I) (f : F) : RustM (FlatMap I U F) := do (pure (FlatMap.mk (it := it) (f := f) (current := core_models.option.Option.None))) @[instance] opaque Impl_1.AssociatedTypes (I : Type) (U : Type) (F : Type) [trait_constr_Impl_1_associated_type_i0 : core_models.iter.traits.iterator.Iterator.AssociatedTypes I] [trait_constr_Impl_1_i0 : core_models.iter.traits.iterator.Iterator I ] [trait_constr_Impl_1_associated_type_i1 : core_models.iter.traits.iterator.Iterator.AssociatedTypes U] [trait_constr_Impl_1_i1 : core_models.iter.traits.iterator.Iterator U ] [trait_constr_Impl_1_associated_type_i2 : core_models.ops.function.FnOnce.AssociatedTypes F (core_models.iter.traits.iterator.Iterator.Item I)] [trait_constr_Impl_1_i2 : core_models.ops.function.FnOnce F (core_models.iter.traits.iterator.Iterator.Item I) (associatedTypes := { show core_models.ops.function.FnOnce.AssociatedTypes F (core_models.iter.traits.iterator.Iterator.Item I) by infer_instance with Output := U})] : core_models.iter.traits.iterator.Iterator.AssociatedTypes (FlatMap I U F) := by constructor <;> exact Inhabited.default @[instance] opaque Impl_1 (I : Type) (U : Type) (F : Type) [trait_constr_Impl_1_associated_type_i0 : core_models.iter.traits.iterator.Iterator.AssociatedTypes I] [trait_constr_Impl_1_i0 : core_models.iter.traits.iterator.Iterator I ] [trait_constr_Impl_1_associated_type_i1 : core_models.iter.traits.iterator.Iterator.AssociatedTypes U] [trait_constr_Impl_1_i1 : core_models.iter.traits.iterator.Iterator U ] [trait_constr_Impl_1_associated_type_i2 : core_models.ops.function.FnOnce.AssociatedTypes F (core_models.iter.traits.iterator.Iterator.Item I)] [trait_constr_Impl_1_i2 : core_models.ops.function.FnOnce F (core_models.iter.traits.iterator.Iterator.Item I) (associatedTypes := { show core_models.ops.function.FnOnce.AssociatedTypes F (core_models.iter.traits.iterator.Iterator.Item I) by infer_instance with Output := U})] : core_models.iter.traits.iterator.Iterator (FlatMap I U F) := by constructor <;> exact Inhabited.default end core_models.iter.adapters.flat_map namespace core_models.iter.adapters.flatten structure Flatten (I : Type) [trait_constr_Flatten_associated_type_i0 : core_models.iter.traits.iterator.Iterator.AssociatedTypes I] [trait_constr_Flatten_i0 : core_models.iter.traits.iterator.Iterator I ] [trait_constr_Flatten_associated_type_i1 : core_models.iter.traits.iterator.Iterator.AssociatedTypes (core_models.iter.traits.iterator.Iterator.Item I)] [trait_constr_Flatten_i1 : core_models.iter.traits.iterator.Iterator (core_models.iter.traits.iterator.Iterator.Item I) ] where it : I current : (core_models.option.Option (core_models.iter.traits.iterator.Iterator.Item I)) end core_models.iter.adapters.flatten namespace core_models.iter.traits.iterator class IteratorMethods.AssociatedTypes (Self : Type) where [trait_constr_IteratorMethods_i0 : Iterator.AssociatedTypes Self] attribute [instance_reducible, instance] IteratorMethods.AssociatedTypes.trait_constr_IteratorMethods_i0 class IteratorMethods (Self : Type) [associatedTypes : outParam (IteratorMethods.AssociatedTypes (Self : Type))] where [trait_constr_IteratorMethods_i0 : Iterator Self] fold (Self) (B : Type) (F : Type) [trait_constr_fold_associated_type_i1 : core_models.ops.function.FnOnce.AssociatedTypes F (rust_primitives.hax.Tuple2 B (Iterator.Item Self))] [trait_constr_fold_i1 : core_models.ops.function.FnOnce F (rust_primitives.hax.Tuple2 B (Iterator.Item Self)) (associatedTypes := { show core_models.ops.function.FnOnce.AssociatedTypes F (rust_primitives.hax.Tuple2 B (Iterator.Item Self)) by infer_instance with Output := B})] : (Self -> B -> F -> RustM B) enumerate (Self) : (Self -> RustM (core_models.iter.adapters.enumerate.Enumerate Self)) step_by (Self) : (Self -> usize -> RustM (core_models.iter.adapters.step_by.StepBy Self)) map (Self) (O : Type) (F : Type) [trait_constr_map_associated_type_i1 : core_models.ops.function.FnOnce.AssociatedTypes F (Iterator.Item Self)] [trait_constr_map_i1 : core_models.ops.function.FnOnce F (Iterator.Item Self) (associatedTypes := { show core_models.ops.function.FnOnce.AssociatedTypes F (Iterator.Item Self) by infer_instance with Output := O})] : (Self -> F -> RustM (core_models.iter.adapters.map.Map Self F)) all (Self) (F : Type) [trait_constr_all_associated_type_i1 : core_models.ops.function.FnOnce.AssociatedTypes F (Iterator.Item Self)] [trait_constr_all_i1 : core_models.ops.function.FnOnce F (Iterator.Item Self) (associatedTypes := { show core_models.ops.function.FnOnce.AssociatedTypes F (Iterator.Item Self) by infer_instance with Output := Bool})] : (Self -> F -> RustM Bool) take (Self) : (Self -> usize -> RustM (core_models.iter.adapters.take.Take Self)) flat_map (Self) (U : Type) (F : Type) [trait_constr_flat_map_associated_type_i1 : Iterator.AssociatedTypes U] [trait_constr_flat_map_i1 : Iterator U ] [trait_constr_flat_map_associated_type_i2 : core_models.ops.function.FnOnce.AssociatedTypes F (Iterator.Item Self)] [trait_constr_flat_map_i2 : core_models.ops.function.FnOnce F (Iterator.Item Self) (associatedTypes := { show core_models.ops.function.FnOnce.AssociatedTypes F (Iterator.Item Self) by infer_instance with Output := U})] : (Self -> F -> RustM (core_models.iter.adapters.flat_map.FlatMap Self U F)) flatten (Self) [trait_constr_flatten_associated_type_i1 : Iterator.AssociatedTypes (Iterator.Item Self)] [trait_constr_flatten_i1 : Iterator (Iterator.Item Self) ] : (Self -> RustM (core_models.iter.adapters.flatten.Flatten Self)) zip (Self) (I2 : Type) [trait_constr_zip_associated_type_i1 : Iterator.AssociatedTypes I2] [trait_constr_zip_i1 : Iterator I2 ] : (Self -> I2 -> RustM (core_models.iter.adapters.zip.Zip Self I2)) attribute [instance_reducible, instance] IteratorMethods.trait_constr_IteratorMethods_i0 end core_models.iter.traits.iterator namespace core_models.iter.adapters.flatten @[spec] def Impl.new (I : Type) [trait_constr_new_associated_type_i0 : core_models.iter.traits.iterator.Iterator.AssociatedTypes I] [trait_constr_new_i0 : core_models.iter.traits.iterator.Iterator I ] [trait_constr_new_associated_type_i1 : core_models.iter.traits.iterator.Iterator.AssociatedTypes (core_models.iter.traits.iterator.Iterator.Item I)] [trait_constr_new_i1 : core_models.iter.traits.iterator.Iterator (core_models.iter.traits.iterator.Iterator.Item I) ] (it : I) : RustM (Flatten I) := do (pure (Flatten.mk (it := it) (current := core_models.option.Option.None))) @[instance] opaque Impl_1.AssociatedTypes (I : Type) [trait_constr_Impl_1_associated_type_i0 : core_models.iter.traits.iterator.Iterator.AssociatedTypes I] [trait_constr_Impl_1_i0 : core_models.iter.traits.iterator.Iterator I ] [trait_constr_Impl_1_associated_type_i1 : core_models.iter.traits.iterator.Iterator.AssociatedTypes (core_models.iter.traits.iterator.Iterator.Item I)] [trait_constr_Impl_1_i1 : core_models.iter.traits.iterator.Iterator (core_models.iter.traits.iterator.Iterator.Item I) ] : core_models.iter.traits.iterator.Iterator.AssociatedTypes (Flatten I) := by constructor <;> exact Inhabited.default @[instance] opaque Impl_1 (I : Type) [trait_constr_Impl_1_associated_type_i0 : core_models.iter.traits.iterator.Iterator.AssociatedTypes I] [trait_constr_Impl_1_i0 : core_models.iter.traits.iterator.Iterator I ] [trait_constr_Impl_1_associated_type_i1 : core_models.iter.traits.iterator.Iterator.AssociatedTypes (core_models.iter.traits.iterator.Iterator.Item I)] [trait_constr_Impl_1_i1 : core_models.iter.traits.iterator.Iterator (core_models.iter.traits.iterator.Iterator.Item I) ] : core_models.iter.traits.iterator.Iterator (Flatten I) := by constructor <;> exact Inhabited.default end core_models.iter.adapters.flatten namespace core_models.iter.adapters.zip @[spec] def Impl.new (I1 : Type) (I2 : Type) [trait_constr_new_associated_type_i0 : core_models.iter.traits.iterator.Iterator.AssociatedTypes I1] [trait_constr_new_i0 : core_models.iter.traits.iterator.Iterator I1 ] [trait_constr_new_associated_type_i1 : core_models.iter.traits.iterator.Iterator.AssociatedTypes I2] [trait_constr_new_i1 : core_models.iter.traits.iterator.Iterator I2 ] (it1 : I1) (it2 : I2) : RustM (Zip I1 I2) := do (pure (Zip.mk (it1 := it1) (it2 := it2))) end core_models.iter.adapters.zip namespace core_models.iter.traits.iterator @[reducible] instance Impl.AssociatedTypes (I : Type) [trait_constr_Impl_associated_type_i0 : Iterator.AssociatedTypes I] [trait_constr_Impl_i0 : Iterator I ] : IteratorMethods.AssociatedTypes I where instance Impl (I : Type) [trait_constr_Impl_associated_type_i0 : Iterator.AssociatedTypes I] [trait_constr_Impl_i0 : Iterator I ] : IteratorMethods I where fold := fun (B : Type) (F : Type) [trait_constr_fold_associated_type_i1 : core_models.ops.function.FnOnce.AssociatedTypes F (rust_primitives.hax.Tuple2 B (Iterator.Item I))] [trait_constr_fold_i1 : core_models.ops.function.FnOnce F (rust_primitives.hax.Tuple2 B (Iterator.Item I)) (associatedTypes := { show core_models.ops.function.FnOnce.AssociatedTypes F (rust_primitives.hax.Tuple2 B (Iterator.Item I)) by infer_instance with Output := B})] (self : I) (init : B) (f : F) => do (pure init) enumerate := fun (self : I) => do (core_models.iter.adapters.enumerate.Impl.new I self) step_by := fun (self : I) (step : usize) => do (core_models.iter.adapters.step_by.Impl.new I self step) map := fun (O : Type) (F : Type) [trait_constr_map_associated_type_i1 : core_models.ops.function.FnOnce.AssociatedTypes F (Iterator.Item I)] [trait_constr_map_i1 : core_models.ops.function.FnOnce F (Iterator.Item I) (associatedTypes := { show core_models.ops.function.FnOnce.AssociatedTypes F (Iterator.Item I) by infer_instance with Output := O})] (self : I) (f : F) => do (core_models.iter.adapters.map.Impl.new I F self f) all := fun (F : Type) [trait_constr_all_associated_type_i1 : core_models.ops.function.FnOnce.AssociatedTypes F (Iterator.Item I)] [trait_constr_all_i1 : core_models.ops.function.FnOnce F (Iterator.Item I) (associatedTypes := { show core_models.ops.function.FnOnce.AssociatedTypes F (Iterator.Item I) by infer_instance with Output := Bool})] (self : I) (f : F) => do (pure true) take := fun (self : I) (n : usize) => do (core_models.iter.adapters.take.Impl.new I self n) flat_map := fun (U : Type) (F : Type) [trait_constr_flat_map_associated_type_i1 : Iterator.AssociatedTypes U] [trait_constr_flat_map_i1 : Iterator U ] [trait_constr_flat_map_associated_type_i2 : core_models.ops.function.FnOnce.AssociatedTypes F (Iterator.Item I)] [trait_constr_flat_map_i2 : core_models.ops.function.FnOnce F (Iterator.Item I) (associatedTypes := { show core_models.ops.function.FnOnce.AssociatedTypes F (Iterator.Item I) by infer_instance with Output := U})] (self : I) (f : F) => do (core_models.iter.adapters.flat_map.Impl.new I U F self f) flatten := fun [trait_constr_flatten_associated_type_i1 : Iterator.AssociatedTypes (Iterator.Item I)] [trait_constr_flatten_i1 : Iterator (Iterator.Item I) ] (self : I) => do (core_models.iter.adapters.flatten.Impl.new I self) zip := fun (I2 : Type) [trait_constr_zip_associated_type_i1 : Iterator.AssociatedTypes I2] [trait_constr_zip_i1 : Iterator I2 ] (self : I) (it2 : I2) => do (core_models.iter.adapters.zip.Impl.new I I2 self it2) end core_models.iter.traits.iterator namespace core_models.iter.adapters.zip @[instance] opaque Impl_1.AssociatedTypes (I1 : Type) (I2 : Type) [trait_constr_Impl_1_associated_type_i0 : core_models.iter.traits.iterator.Iterator.AssociatedTypes I1] [trait_constr_Impl_1_i0 : core_models.iter.traits.iterator.Iterator I1 ] [trait_constr_Impl_1_associated_type_i1 : core_models.iter.traits.iterator.Iterator.AssociatedTypes I2] [trait_constr_Impl_1_i1 : core_models.iter.traits.iterator.Iterator I2 ] : core_models.iter.traits.iterator.Iterator.AssociatedTypes (Zip I1 I2) := by constructor <;> exact Inhabited.default @[instance] opaque Impl_1 (I1 : Type) (I2 : Type) [trait_constr_Impl_1_associated_type_i0 : core_models.iter.traits.iterator.Iterator.AssociatedTypes I1] [trait_constr_Impl_1_i0 : core_models.iter.traits.iterator.Iterator I1 ] [trait_constr_Impl_1_associated_type_i1 : core_models.iter.traits.iterator.Iterator.AssociatedTypes I2] [trait_constr_Impl_1_i1 : core_models.iter.traits.iterator.Iterator I2 ] : core_models.iter.traits.iterator.Iterator (Zip I1 I2) := by constructor <;> exact Inhabited.default end core_models.iter.adapters.zip namespace core_models.ops.function class Fn.AssociatedTypes (Self : Type) (Args : Type) where [trait_constr_Fn_i0 : FnOnce.AssociatedTypes Self Args] attribute [instance_reducible, instance] Fn.AssociatedTypes.trait_constr_Fn_i0 class Fn (Self : Type) (Args : Type) [associatedTypes : outParam (Fn.AssociatedTypes (Self : Type) (Args : Type))] where [trait_constr_Fn_i0 : FnOnce Self Args] call (Self) (Args) : (Self -> Args -> RustM (FnOnce.Output Self Args)) attribute [instance_reducible, instance] Fn.trait_constr_Fn_i0 @[reducible] instance Impl_2.AssociatedTypes (Arg : Type) (Out : Type) : FnOnce.AssociatedTypes (Arg -> RustM Out) Arg where Output := Out instance Impl_2 (Arg : Type) (Out : Type) : FnOnce (Arg -> RustM Out) Arg where call_once := fun (self : (Arg -> RustM Out)) (arg : Arg) => do (self arg) @[reducible] instance Impl.AssociatedTypes (Arg1 : Type) (Arg2 : Type) (Out : Type) : FnOnce.AssociatedTypes (Arg1 -> Arg2 -> RustM Out) (rust_primitives.hax.Tuple2 Arg1 Arg2) where Output := Out instance Impl (Arg1 : Type) (Arg2 : Type) (Out : Type) : FnOnce (Arg1 -> Arg2 -> RustM Out) (rust_primitives.hax.Tuple2 Arg1 Arg2) where call_once := fun (self : (Arg1 -> Arg2 -> RustM Out)) (arg : (rust_primitives.hax.Tuple2 Arg1 Arg2)) => do (self (rust_primitives.hax.Tuple2._0 arg) (rust_primitives.hax.Tuple2._1 arg)) @[reducible] instance Impl_1.AssociatedTypes (Arg1 : Type) (Arg2 : Type) (Arg3 : Type) (Out : Type) : FnOnce.AssociatedTypes (Arg1 -> Arg2 -> Arg3 -> RustM Out) (rust_primitives.hax.Tuple3 Arg1 Arg2 Arg3) where Output := Out instance Impl_1 (Arg1 : Type) (Arg2 : Type) (Arg3 : Type) (Out : Type) : FnOnce (Arg1 -> Arg2 -> Arg3 -> RustM Out) (rust_primitives.hax.Tuple3 Arg1 Arg2 Arg3) where call_once := fun (self : (Arg1 -> Arg2 -> Arg3 -> RustM Out)) (arg : (rust_primitives.hax.Tuple3 Arg1 Arg2 Arg3)) => do (self (rust_primitives.hax.Tuple3._0 arg) (rust_primitives.hax.Tuple3._1 arg) (rust_primitives.hax.Tuple3._2 arg)) end core_models.ops.function namespace core_models.ops.deref @[reducible] instance Impl.AssociatedTypes (T : Type) : Deref.AssociatedTypes T where Target := T instance Impl (T : Type) : Deref T where deref := fun (self : T) => do (pure self) end core_models.ops.deref namespace core_models.option @[spec] def Impl.is_some_and (T : Type) (F : Type) [trait_constr_is_some_and_associated_type_i0 : core_models.ops.function.FnOnce.AssociatedTypes F T] [trait_constr_is_some_and_i0 : core_models.ops.function.FnOnce F T (associatedTypes := { show core_models.ops.function.FnOnce.AssociatedTypes F T by infer_instance with Output := Bool})] (self : (Option T)) (f : F) : RustM Bool := do match self with | (Option.None ) => do (pure false) | (Option.Some x) => do (core_models.ops.function.FnOnce.call_once F T f x) @[spec] def Impl.is_none_or (T : Type) (F : Type) [trait_constr_is_none_or_associated_type_i0 : core_models.ops.function.FnOnce.AssociatedTypes F T] [trait_constr_is_none_or_i0 : core_models.ops.function.FnOnce F T (associatedTypes := { show core_models.ops.function.FnOnce.AssociatedTypes F T by infer_instance with Output := Bool})] (self : (Option T)) (f : F) : RustM Bool := do match self with | (Option.None ) => do (pure true) | (Option.Some x) => do (core_models.ops.function.FnOnce.call_once F T f x) @[spec] def Impl.unwrap_or_else (T : Type) (F : Type) [trait_constr_unwrap_or_else_associated_type_i0 : core_models.ops.function.FnOnce.AssociatedTypes F rust_primitives.hax.Tuple0] [trait_constr_unwrap_or_else_i0 : core_models.ops.function.FnOnce F rust_primitives.hax.Tuple0 (associatedTypes := { show core_models.ops.function.FnOnce.AssociatedTypes F rust_primitives.hax.Tuple0 by infer_instance with Output := T})] (self : (Option T)) (f : F) : RustM T := do match self with | (Option.Some x) => do (pure x) | (Option.None ) => do (core_models.ops.function.FnOnce.call_once F rust_primitives.hax.Tuple0 f rust_primitives.hax.Tuple0.mk) @[spec] def Impl.map (T : Type) (U : Type) (F : Type) [trait_constr_map_associated_type_i0 : core_models.ops.function.FnOnce.AssociatedTypes F T] [trait_constr_map_i0 : core_models.ops.function.FnOnce F T (associatedTypes := { show core_models.ops.function.FnOnce.AssociatedTypes F T by infer_instance with Output := U})] (self : (Option T)) (f : F) : RustM (Option U) := do match self with | (Option.Some x) => do (pure (Option.Some (← (core_models.ops.function.FnOnce.call_once F T f x)))) | (Option.None ) => do (pure Option.None) @[spec] def Impl.map_or (T : Type) (U : Type) (F : Type) [trait_constr_map_or_associated_type_i0 : core_models.ops.function.FnOnce.AssociatedTypes F T] [trait_constr_map_or_i0 : core_models.ops.function.FnOnce F T (associatedTypes := { show core_models.ops.function.FnOnce.AssociatedTypes F T by infer_instance with Output := U})] (self : (Option T)) (default : U) (f : F) : RustM U := do match self with | (Option.Some t) => do (core_models.ops.function.FnOnce.call_once F T f t) | (Option.None ) => do (pure default) @[spec] def Impl.map_or_else (T : Type) (U : Type) (D : Type) (F : Type) [trait_constr_map_or_else_associated_type_i0 : core_models.ops.function.FnOnce.AssociatedTypes F T] [trait_constr_map_or_else_i0 : core_models.ops.function.FnOnce F T (associatedTypes := { show core_models.ops.function.FnOnce.AssociatedTypes F T by infer_instance with Output := U})] [trait_constr_map_or_else_associated_type_i1 : core_models.ops.function.FnOnce.AssociatedTypes D rust_primitives.hax.Tuple0] [trait_constr_map_or_else_i1 : core_models.ops.function.FnOnce D rust_primitives.hax.Tuple0 (associatedTypes := { show core_models.ops.function.FnOnce.AssociatedTypes D rust_primitives.hax.Tuple0 by infer_instance with Output := U})] (self : (Option T)) (default : D) (f : F) : RustM U := do match self with | (Option.Some t) => do (core_models.ops.function.FnOnce.call_once F T f t) | (Option.None ) => do (core_models.ops.function.FnOnce.call_once D rust_primitives.hax.Tuple0 default rust_primitives.hax.Tuple0.mk) @[spec] def Impl.map_or_default (T : Type) (U : Type) (F : Type) [trait_constr_map_or_default_associated_type_i0 : core_models.ops.function.FnOnce.AssociatedTypes F T] [trait_constr_map_or_default_i0 : core_models.ops.function.FnOnce F T (associatedTypes := { show core_models.ops.function.FnOnce.AssociatedTypes F T by infer_instance with Output := U})] [trait_constr_map_or_default_associated_type_i1 : core_models.default.Default.AssociatedTypes U] [trait_constr_map_or_default_i1 : core_models.default.Default U ] (self : (Option T)) (f : F) : RustM U := do match self with | (Option.Some t) => do (core_models.ops.function.FnOnce.call_once F T f t) | (Option.None ) => do (core_models.default.Default.default U rust_primitives.hax.Tuple0.mk) @[spec] def Impl.ok_or_else (T : Type) (E : Type) (F : Type) [trait_constr_ok_or_else_associated_type_i0 : core_models.ops.function.FnOnce.AssociatedTypes F rust_primitives.hax.Tuple0] [trait_constr_ok_or_else_i0 : core_models.ops.function.FnOnce F rust_primitives.hax.Tuple0 (associatedTypes := { show core_models.ops.function.FnOnce.AssociatedTypes F rust_primitives.hax.Tuple0 by infer_instance with Output := E})] (self : (Option T)) (err : F) : RustM (core_models.result.Result T E) := do match self with | (Option.Some v) => do (pure (core_models.result.Result.Ok v)) | (Option.None ) => do (pure (core_models.result.Result.Err (← (core_models.ops.function.FnOnce.call_once F rust_primitives.hax.Tuple0 err rust_primitives.hax.Tuple0.mk)))) @[spec] def Impl.and_then (T : Type) (U : Type) (F : Type) [trait_constr_and_then_associated_type_i0 : core_models.ops.function.FnOnce.AssociatedTypes F T] [trait_constr_and_then_i0 : core_models.ops.function.FnOnce F T (associatedTypes := { show core_models.ops.function.FnOnce.AssociatedTypes F T by infer_instance with Output := (Option U)})] (self : (Option T)) (f : F) : RustM (Option U) := do match self with | (Option.Some x) => do (core_models.ops.function.FnOnce.call_once F T f x) | (Option.None ) => do (pure Option.None) end core_models.option namespace core_models.result @[spec] def Impl.map (T : Type) (E : Type) (U : Type) (F : Type) [trait_constr_map_associated_type_i0 : core_models.ops.function.FnOnce.AssociatedTypes F T] [trait_constr_map_i0 : core_models.ops.function.FnOnce F T (associatedTypes := { show core_models.ops.function.FnOnce.AssociatedTypes F T by infer_instance with Output := U})] (self : (Result T E)) (op : F) : RustM (Result U E) := do match self with | (Result.Ok t) => do (pure (Result.Ok (← (core_models.ops.function.FnOnce.call_once F T op t)))) | (Result.Err e) => do (pure (Result.Err e)) @[spec] def Impl.map_or (T : Type) (E : Type) (U : Type) (F : Type) [trait_constr_map_or_associated_type_i0 : core_models.ops.function.FnOnce.AssociatedTypes F T] [trait_constr_map_or_i0 : core_models.ops.function.FnOnce F T (associatedTypes := { show core_models.ops.function.FnOnce.AssociatedTypes F T by infer_instance with Output := U})] (self : (Result T E)) (default : U) (f : F) : RustM U := do match self with | (Result.Ok t) => do (core_models.ops.function.FnOnce.call_once F T f t) | (Result.Err _e) => do (pure default) @[spec] def Impl.map_or_else (T : Type) (E : Type) (U : Type) (D : Type) (F : Type) [trait_constr_map_or_else_associated_type_i0 : core_models.ops.function.FnOnce.AssociatedTypes F T] [trait_constr_map_or_else_i0 : core_models.ops.function.FnOnce F T (associatedTypes := { show core_models.ops.function.FnOnce.AssociatedTypes F T by infer_instance with Output := U})] [trait_constr_map_or_else_associated_type_i1 : core_models.ops.function.FnOnce.AssociatedTypes D E] [trait_constr_map_or_else_i1 : core_models.ops.function.FnOnce D E (associatedTypes := { show core_models.ops.function.FnOnce.AssociatedTypes D E by infer_instance with Output := U})] (self : (Result T E)) (default : D) (f : F) : RustM U := do match self with | (Result.Ok t) => do (core_models.ops.function.FnOnce.call_once F T f t) | (Result.Err e) => do (core_models.ops.function.FnOnce.call_once D E default e) @[spec] def Impl.map_err (T : Type) (E : Type) (F : Type) (O : Type) [trait_constr_map_err_associated_type_i0 : core_models.ops.function.FnOnce.AssociatedTypes O E] [trait_constr_map_err_i0 : core_models.ops.function.FnOnce O E (associatedTypes := { show core_models.ops.function.FnOnce.AssociatedTypes O E by infer_instance with Output := F})] (self : (Result T E)) (op : O) : RustM (Result T F) := do match self with | (Result.Ok t) => do (pure (Result.Ok t)) | (Result.Err e) => do (pure (Result.Err (← (core_models.ops.function.FnOnce.call_once O E op e)))) @[spec] def Impl.and_then (T : Type) (E : Type) (U : Type) (F : Type) [trait_constr_and_then_associated_type_i0 : core_models.ops.function.FnOnce.AssociatedTypes F T] [trait_constr_and_then_i0 : core_models.ops.function.FnOnce F T (associatedTypes := { show core_models.ops.function.FnOnce.AssociatedTypes F T by infer_instance with Output := (Result U E)})] (self : (Result T E)) (op : F) : RustM (Result U E) := do match self with | (Result.Ok t) => do (core_models.ops.function.FnOnce.call_once F T op t) | (Result.Err e) => do (pure (Result.Err e)) end core_models.result namespace core_models.slice.iter @[reducible] instance Impl_2.AssociatedTypes (T : Type) : core_models.iter.traits.iterator.Iterator.AssociatedTypes (Iter T) where Item := T instance Impl_2 (T : Type) : core_models.iter.traits.iterator.Iterator (Iter T) where next := fun (self : (Iter T)) => do let ⟨self, hax_temp_output⟩ ← if (← ((← (rust_primitives.sequence.seq_len T (Iter._0 self))) ==? (0 : usize))) then do (pure (rust_primitives.hax.Tuple2.mk self core_models.option.Option.None)) else do let res : T ← (rust_primitives.sequence.seq_first T (Iter._0 self)); let self : (Iter T) := {self with _0 := (← (rust_primitives.sequence.seq_slice T (Iter._0 self) (1 : usize) (← (rust_primitives.sequence.seq_len T (Iter._0 self)))))}; (pure (rust_primitives.hax.Tuple2.mk self (core_models.option.Option.Some res))); (pure (rust_primitives.hax.Tuple2.mk self hax_temp_output)) @[reducible] instance Impl_3.AssociatedTypes (T : Type) : core_models.iter.traits.iterator.Iterator.AssociatedTypes (Chunks T) where Item := (RustSlice T) instance Impl_3 (T : Type) : core_models.iter.traits.iterator.Iterator (Chunks T) where next := fun (self : (Chunks T)) => do let ⟨self, hax_temp_output⟩ ← if (← ((← (rust_primitives.slice.slice_length T (Chunks.elements self))) ==? (0 : usize))) then do (pure (rust_primitives.hax.Tuple2.mk self core_models.option.Option.None)) else do if (← ((← (rust_primitives.slice.slice_length T (Chunks.elements self))) do let ⟨self, hax_temp_output⟩ ← if (← ((← (rust_primitives.slice.slice_length T (ChunksExact.elements self))) ⌜ r = .Ok (.ofVec (a.val.toVector.cast h)) ⌝ ⦄ := by intro h mvcgen [TryInto.try_into] grind end core_models.convert open Lean in set_option hygiene false in macro "declare_Hax_convert_from_instances" : command => do let mut cmds := #[] let tys := [ ("UInt8", 8, false), ("UInt16", 16, false), ("UInt32", 32, false), ("UInt64", 64, false), ("Int8", 8, true), ("Int16", 16, true), ("Int32", 32, true), ("Int64", 64, true) ] for (ty1, width1, signed1) in tys do for (ty2, width2, signed2) in tys do if ty1 == ty2 || signed1 != signed2 || width1 < width2 then continue let ty1Ident := mkIdent ty1.toName let ty2Ident := mkIdent ty2.toName let toTy1 := mkIdent ("to" ++ ty1).toName cmds := cmds.push $ ← `( @[reducible] instance : core_models.convert.From.AssociatedTypes $ty1Ident $ty2Ident where instance : core_models.convert.From $ty1Ident $ty2Ident where _from := fun x => pure x.$toTy1 ) return ⟨mkNullNode cmds⟩ declare_Hax_convert_from_instances attribute [specset bv, hax_bv_decide] core_models.convert.From._from ================================================ FILE: hax-lib/proof-libs/lean/Hax/core_models/epilogue/float.lean ================================================ import Hax.core_models.epilogue.num macro "declare_Hax_float_ops" typeName:ident : command => `( namespace $typeName instance : core_models.ops.arith.Add.AssociatedTypes $typeName $typeName where Output := $typeName instance : core_models.ops.arith.Sub.AssociatedTypes $typeName $typeName where Output := $typeName instance : core_models.ops.arith.Mul.AssociatedTypes $typeName $typeName where Output := $typeName instance : core_models.ops.arith.Div.AssociatedTypes $typeName $typeName where Output := $typeName instance : core_models.ops.arith.Add $typeName $typeName where add := fun x y => pure (x + y) instance : core_models.ops.arith.Sub $typeName $typeName where sub := fun x y => pure (x - y) instance : core_models.ops.arith.Mul $typeName $typeName where mul := fun x y => pure (x * y) instance : core_models.ops.arith.Div $typeName $typeName where div := fun x y => pure (x / y) end $typeName ) declare_Hax_float_ops f32 declare_Hax_float_ops f64 ================================================ FILE: hax-lib/proof-libs/lean/Hax/core_models/epilogue/folds.lean ================================================ import Hax.core_models.core_models import Hax.Tactic.SpecSet open Std.Do set_option mvcgen.warning false set_option linter.unusedVariables false /- # Folds Hax represents for-loops as folds over a range -/ section Fold open core_models.ops.control_flow open rust_primitives.hax class rust_primitives.hax.folds {int_type: Type} where /-- Encoding of Rust for-loops without early returns -/ fold_range {α : Type} (s e : int_type) (inv : α -> int_type -> RustM Prop) (init: α) (body : α -> int_type -> RustM α) (pureInv: {i : α -> int_type -> Prop // ∀ a b, ⦃⌜ True ⌝⦄ inv a b ⦃⇓ r => ⌜ r = (i a b) ⌝⦄} := by set_option hax_mvcgen.specset "bv" in hax_construct_pure <;> bv_decide) : RustM α /-- Encoding of Rust for-loops with early returns -/ fold_range_return {α_acc α_ret : Type} (s e: int_type) (inv : α_acc -> int_type -> RustM Prop) (init: α_acc) (body : α_acc -> int_type -> RustM (ControlFlow (ControlFlow α_ret (Tuple2 Tuple0 α_acc)) α_acc )) (pureInv: {i : α_acc -> int_type -> Prop // ∀ a b, ⦃⌜ True ⌝⦄ inv a b ⦃⇓ r => ⌜ r = (i a b) ⌝⦄} := by set_option hax_mvcgen.specset "bv" in hax_construct_pure <;> bv_decide) : RustM (ControlFlow α_ret α_acc) attribute [spec] rust_primitives.hax.folds.fold_range attribute [spec] rust_primitives.hax.folds.fold_range_return open Lean in set_option hygiene false in macro "declare_fold_specs" s:(&"signed" <|> &"unsigned") typeName:ident width:term : command => do let tyDot (n : Name) := mkIdent (typeName.getId ++ n) let tySimp (n : Name) : TSyntax _ := .mk (Syntax.node .none ``Lean.Parser.Tactic.simpLemma #[mkNullNode, mkNullNode, tyDot n]) let tyRw (n : Name) : TSyntax `Lean.Parser.Tactic.rwRule := .mk (Syntax.node .none ``Lean.Parser.Tactic.rwRule #[mkNullNode, tyDot n]) `( /-- Implementation of Rust for-loops without early returns -/ def $(tyDot `fold_range) {α : Type} (s e : $typeName) (inv : α -> $typeName -> RustM Prop) (init: α) (body : α -> $typeName -> RustM α) (pureInv: {i : α -> $typeName -> Prop // ∀ a b, ⦃⌜ True ⌝⦄ inv a b ⦃⇓ r => ⌜ r = (i a b) ⌝⦄}) : RustM α := do if s < e then fold_range (s + 1) e inv (← body init s) body pureInv else pure init termination_by (e - s) decreasing_by simp only [$(tySimp `sizeOf), Nat.add_lt_add_iff_right] exact $(tyDot `sub_succ_lt_self) _ _ (by assumption) /-- Implementation of Rust for-loops with early returns -/ def $(tyDot `fold_range_return) {α_acc α_ret : Type} (s e: $typeName) (inv : α_acc -> $typeName -> RustM Prop) (init: α_acc) (body : α_acc -> $typeName -> RustM (ControlFlow (ControlFlow α_ret (Tuple2 Tuple0 α_acc)) α_acc )) (pureInv: {i : α_acc -> $typeName -> Prop // ∀ a b, ⦃⌜ True ⌝⦄ inv a b ⦃⇓ r => ⌜ r = (i a b) ⌝⦄}) := do if s < e then match (← body init s) with -- Rust: `return` | .Break (.Break res ) => pure (ControlFlow.Break res) -- Rust: `break` | .Break (.Continue ⟨ ⟨ ⟩, res⟩) => pure (ControlFlow.Continue res) -- Rust: `continue` | .Continue res => fold_range_return (s + 1) e inv res body pureInv else pure (ControlFlow.Continue init) termination_by (e - s) decreasing_by simp only [$(tySimp `sizeOf), Nat.add_lt_add_iff_right] exact $(tyDot `sub_succ_lt_self) _ _ (by assumption) @[spec] instance : @rust_primitives.hax.folds $typeName where fold_range := $(tyDot `fold_range) fold_range_return := $(tyDot `fold_range_return) /-- Specification of Rust for-loops without early returns (for bv_decide) -/ @[specset bv] theorem $(mkIdent (s!"rust_primitives.hax.folds.fold_range_spec_bv_{typeName.getId}").toName) {α} (s e : $typeName) (inv : α -> $typeName -> RustM Prop) (pureInv) (init: α) (body : α -> $typeName -> RustM α) : s ≤ e → pureInv.val init s → (∀ (acc : α) (i : $typeName), s ≤ i → i < e → pureInv.val acc i → ⦃ ⌜ True ⌝ ⦄ (body acc i) ⦃ ⇓ res => ⌜ pureInv.val res (i+1) ⌝ ⦄) → ⦃ ⌜ True ⌝ ⦄ ($(tyDot `fold_range) s e inv init body pureInv) ⦃ ⇓ r => ⌜ pureInv.val r e ⌝ ⦄ := by intro h_le h_inv_s h_body unfold $(tyDot `fold_range) mvcgen · mstart mspec h_body _ _ ($(tyDot `le_refl) s) (by assumption) h_inv_s mspec $(mkIdent (s!"rust_primitives.hax.folds.fold_range_spec_bv_{typeName.getId}").toName) <;> grind · grind termination_by (e - s) decreasing_by simp only [$(tySimp `sizeOf), Nat.add_lt_add_iff_right] exact $(tyDot `sub_succ_lt_self) _ _ (by assumption) /-- Specification of Rust for-loops without early returns (for grind) -/ @[specset int] theorem $(mkIdent (s!"rust_primitives.hax.folds.fold_range_spec_int_{typeName.getId}").toName) {α} (s e : $typeName) (inv : α -> $typeName -> RustM Prop) (pureInv) (init: α) (body : α -> $typeName -> RustM α) : s.toNat ≤ e.toNat → pureInv.val init s → (∀ (acc : α) (i : $typeName), s.toNat ≤ i.toNat → i.toNat < e.toNat → pureInv.val acc i → ⦃ ⌜ True ⌝ ⦄ (body acc i) ⦃ ⇓ res => ⌜ pureInv.val res (i+1) ⌝ ⦄) → ⦃ ⌜ True ⌝ ⦄ ($(tyDot `fold_range) s e inv init body pureInv) ⦃ ⇓ r => ⌜ pureInv.val r e ⌝ ⦄ := by apply $(mkIdent (s!"rust_primitives.hax.folds.fold_range_spec_bv_{typeName.getId}").toName) ) declare_fold_specs unsigned UInt8 8 declare_fold_specs unsigned UInt16 16 declare_fold_specs unsigned UInt32 32 declare_fold_specs unsigned UInt64 64 declare_fold_specs unsigned USize64 64 end Fold ================================================ FILE: hax-lib/proof-libs/lean/Hax/core_models/epilogue/function.lean ================================================ import Hax.core_models.core_models set_option mvcgen.warning false open rust_primitives.hax open Std.Do namespace core_models.ops.function instance {α β} : FnOnce.AssociatedTypes (α → RustM β) α where Output := β instance {α β} : FnOnce.AssociatedTypes (α → RustM β) (Tuple1 α) where Output := β instance {α β γ} : FnOnce.AssociatedTypes (α → β → RustM γ) (Tuple2 α β) where Output := γ instance {α β} : FnOnce (α → RustM β) α where call_once f x := f x instance {α β} : FnOnce (α → RustM β) (Tuple1 α) where call_once f x := f x._0 instance {α β γ : Type} : FnOnce (α → β → RustM γ) (Tuple2 α β) where call_once f x := f x._0 x._1 instance {α β} [FnOnce.AssociatedTypes (α → RustM β) α] : Fn.AssociatedTypes (α → RustM β) α where instance {α β} [FnOnce.AssociatedTypes (α → RustM β) α] [FnOnce (α → RustM β) α] : Fn (α → RustM β) α where call f x := FnOnce.call_once _ _ f x instance {α β} [FnOnce.AssociatedTypes (α → RustM β) (Tuple1 α)] : Fn.AssociatedTypes (α → RustM β) (Tuple1 α) where instance {α β} [FnOnce.AssociatedTypes (α → RustM β) (Tuple1 α)] [FnOnce (α → RustM β) (Tuple1 α)] : Fn (α → RustM β) (Tuple1 α) where call f x := FnOnce.call_once _ _ f x instance {α β γ} [FnOnce.AssociatedTypes (α → β → RustM γ) (Tuple2 α β)] : Fn.AssociatedTypes (α → β → RustM γ) (Tuple2 α β) where instance {α β γ} [FnOnce.AssociatedTypes (α → β → RustM γ) (Tuple2 α β)] [FnOnce (α → β → RustM γ) (Tuple2 α β)] : Fn (α → β → RustM γ) (Tuple2 α β) where call f x := FnOnce.call_once _ _ f x end core_models.ops.function ================================================ FILE: hax-lib/proof-libs/lean/Hax/core_models/epilogue/num.lean ================================================ import Hax.core_models.core_models attribute [specset bv, hax_bv_decide] core_models.convert.From._from namespace core_models.num.Impl_8 @[spec] def rotate_left (x : u32) (n : u32) : RustM u32 := pure (UInt32.ofBitVec (BitVec.rotateLeft x.toBitVec n.toNat)) @[spec] def from_le_bytes (x : RustArray u8 4) : RustM u32 := pure (x.toVec[0].toUInt32 + (x.toVec[1].toUInt32 <<< 8) + (x.toVec[2].toUInt32 <<< 16) + (x.toVec[3].toUInt32 <<< 24)) @[spec] def to_le_bytes (x : u32) : RustM (RustArray u8 4) := pure (.ofVec #v[ (x % 256).toUInt8, (x >>> 8 % 256).toUInt8, (x >>> 16 % 256).toUInt8, (x >>> 24 % 256).toUInt8, ]) end core_models.num.Impl_8 attribute [spec] core_models.num.Impl_8.wrapping_add ================================================ FILE: hax-lib/proof-libs/lean/Hax/core_models/epilogue/ops.lean ================================================ import Hax.core_models.core_models attribute [spec] core_models.ops.deref.Impl core_models.ops.deref.Deref.deref ================================================ FILE: hax-lib/proof-libs/lean/Hax/core_models/epilogue/range.lean ================================================ import Hax.core_models.core_models open core_models.ops.range open Std.Do set_option mvcgen.warning false open rust_primitives.sequence attribute [local grind! .] USize64.toNat_lt_size instance Range.instGetElemResultArrayUSize64 {α: Type}: GetElemResult (Seq α) (Range usize) (Seq α) where getElemResult xs i := match i with | ⟨s, e⟩ => let size := xs.val.size; if s ≤ e && e.toNat ≤ size then pure ⟨xs.val.extract s.toNat e.toNat, by grind⟩ else RustM.fail Error.arrayOutOfBounds instance Range.instGetElemResultRustArrayUSize64 {α : Type} {n : usize} : GetElemResult (RustArray α n) (Range usize) (Seq α) where getElemResult xs i := match i with | ⟨s, e⟩ => if s ≤ e && e.toNat ≤ n.toNat then pure ⟨(xs.toVec.extract s.toNat e.toNat).toArray, by grind⟩ else RustM.fail Error.arrayOutOfBounds @[spec] theorem Range.getElemArrayUSize64_spec (α : Type) (a: Seq α) (s e: usize) : s.toNat ≤ e.toNat → e.toNat ≤ a.val.size → ⦃ ⌜ True ⌝ ⦄ ( a[(Range.mk s e)]_? ) ⦃ ⇓ r => ⌜ r = ⟨Array.extract a.val s.toNat e.toNat, by grind⟩ ⌝ ⦄ := by intros mvcgen [Range.instGetElemResultArrayUSize64, getElemResult] grind [USize64.le_iff_toNat_le] @[spec] theorem Range.getElemVectorUSize64_spec (α : Type) (n: usize) (a: RustArray α n) (s e: usize) : s.toNat ≤ e.toNat → e.toNat ≤ a.toVec.size → ⦃ ⌜ True ⌝ ⦄ ( a[(Range.mk s e)]_? ) ⦃ ⇓ r => ⌜ r = ⟨(Vector.extract a.toVec s.toNat e.toNat).toArray, by grind⟩ ⌝ ⦄ := by intros mvcgen [Range.instGetElemResultRustArrayUSize64, getElemResult] grind [USize64.le_iff_toNat_le] ================================================ FILE: hax-lib/proof-libs/lean/Hax/core_models/epilogue/result.lean ================================================ import Hax.core_models.core_models set_option mvcgen.warning false open rust_primitives.hax open Std.Do namespace core_models.result def Impl.unwrap (T : Type) (E : Type) (self : (Result T E)) : RustM T := do match self with | (Result.Ok t) => (pure t) | (Result.Err _) => (core_models.panicking.internal.panic T rust_primitives.hax.Tuple0.mk) @[spec] theorem Impl.unwrap.spec {α β} (x: Result α β) v : x = Result.Ok v → ⦃ ⌜ True ⌝ ⦄ (Impl.unwrap α β x) ⦃ ⇓ r => ⌜ r = v ⌝ ⦄ := by intros mvcgen [Impl.unwrap] <;> try grind end core_models.result ================================================ FILE: hax-lib/proof-libs/lean/Hax/core_models/epilogue/slice.lean ================================================ import Hax.core_models.core_models open Std.Do set_option mvcgen.warning false namespace core_models.slice @[spec] theorem Impl.len.spec (α : Type) (s : RustSlice α) : ⦃ ⌜ True ⌝ ⦄ Impl.len α s ⦃⇓ r => ⌜ r.toNat = s.val.size ⌝ ⦄ := by mvcgen; rw[USize64.toNat_ofNat_of_lt' s.size_lt_usizeSize] end core_models.slice ================================================ FILE: hax-lib/proof-libs/lean/Hax/core_models/epilogue/string.lean ================================================ abbrev string_indirection : Type := String abbrev alloc.string.String : Type := string_indirection ================================================ FILE: hax-lib/proof-libs/lean/Hax/core_models/epilogue.lean ================================================ import Hax.core_models.epilogue.alloc import Hax.core_models.epilogue.convert import Hax.core_models.epilogue.float import Hax.core_models.epilogue.folds import Hax.core_models.epilogue.function import Hax.core_models.epilogue.num import Hax.core_models.epilogue.ops import Hax.core_models.epilogue.range import Hax.core_models.epilogue.result import Hax.core_models.epilogue.slice import Hax.core_models.epilogue.string ================================================ FILE: hax-lib/proof-libs/lean/Hax/core_models/prologue/clone.lean ================================================ /- Hax Lean Backend - Cryspen Core-model for Clone represented as a no-op -/ import Hax.rust_primitives namespace core.clone class Clone (Self : Type) where def Clone.clone {Self: Type} : Self -> RustM Self := fun x => pure x end core.clone ================================================ FILE: hax-lib/proof-libs/lean/Hax/core_models/prologue/marker.lean ================================================ import Hax.core_models.prologue.clone class core.marker.Copy.AssociatedTypes (Self : Type) where class core.marker.Copy (Self : Type) [associatedTypes : outParam (core.marker.Copy.AssociatedTypes (Self : Type))] where [trait_constr : core.clone.Clone Self] attribute [instance_reducible, instance] core.marker.Copy.trait_constr ================================================ FILE: hax-lib/proof-libs/lean/Hax/core_models/prologue/num.lean ================================================ import Hax.rust_primitives def core.num.Impl_11.MAX : usize := -1 ================================================ FILE: hax-lib/proof-libs/lean/Hax/core_models/prologue.lean ================================================ import Hax.core_models.prologue.clone import Hax.core_models.prologue.marker import Hax.core_models.prologue.num ================================================ FILE: hax-lib/proof-libs/lean/Hax/core_models.lean ================================================ import Hax.core_models.prologue import Hax.core_models.core_models import Hax.core_models.epilogue ================================================ FILE: hax-lib/proof-libs/lean/Hax/rust_primitives/BVDecide.lean ================================================ import Hax.rust_primitives.RustM open Std.Do /- # BV_Decide Lemmas In the following, we define an encoding of the entire `RustM` monad so that we can run `bv_decide` on equalities between `RustM` values. -/ /-- We encode `RustM` values into the following structure to be able to run `bv_decide`: -/ structure BVRustM (α : Type) where ok : Bool val : α err : BitVec 3 /-- Encodes `RustM` values into `BVRustM` to be able to run `bv_decide`. -/ def RustM.toBVRustM {α : Type} [Inhabited α] : RustM α → BVRustM α | .ok v => ⟨ true, v, 0 ⟩ | .fail .assertionFailure => ⟨ false, default, 0 ⟩ | .fail .integerOverflow => ⟨ false, default, 1 ⟩ | .fail .divisionByZero => ⟨ false, default, 2 ⟩ | .fail .arrayOutOfBounds => ⟨ false, default, 3 ⟩ | .fail .maximumSizeExceeded => ⟨ false, default, 4 ⟩ | .fail .panic => ⟨ false, default, 5 ⟩ | .fail .undef => ⟨ false, default, 6 ⟩ | .div => ⟨ false, default, 7 ⟩ attribute [hax_bv_decide] Coe.coe @[hax_bv_decide] theorem RustM.toBVRustM_pure {α : Type} [Inhabited α] {v : α} : (pure v : RustM α).toBVRustM = ⟨ true, v, 0 ⟩ := rfl @[hax_bv_decide] theorem RustM.toBVRustM_ok {α : Type} [Inhabited α] {v : α} : (RustM.ok v).toBVRustM = ⟨ true, v, 0 ⟩ := rfl @[hax_bv_decide] theorem RustM.toBVRustM_assertionFailure {α : Type} [Inhabited α] : (RustM.fail .assertionFailure : RustM α).toBVRustM = ⟨ false, default, 0 ⟩ := rfl @[hax_bv_decide] theorem RustM.toBVRustM_integerOverflow {α : Type} [Inhabited α] : (RustM.fail .integerOverflow : RustM α).toBVRustM = ⟨ false, default, 1 ⟩ := rfl @[hax_bv_decide] theorem RustM.toBVRustM_divisionByZero {α : Type} [Inhabited α] : (RustM.fail .divisionByZero : RustM α).toBVRustM = ⟨ false, default, 2 ⟩ := rfl @[hax_bv_decide] theorem RustM.toBVRustM_arrayOutOfBounds {α : Type} [Inhabited α] : (RustM.fail .arrayOutOfBounds : RustM α).toBVRustM = ⟨ false, default, 3 ⟩ := rfl @[hax_bv_decide] theorem RustM.toBVRustM_maximumSizeExceeded {α : Type} [Inhabited α] : (RustM.fail .maximumSizeExceeded: RustM α).toBVRustM = ⟨ false, default, 4 ⟩ := rfl @[hax_bv_decide] theorem RustM.toBVRustM_panic {α : Type} [Inhabited α] : (RustM.fail .panic : RustM α).toBVRustM = ⟨ false, default, 5 ⟩ := rfl @[hax_bv_decide] theorem RustM.toBVRustM_undef {α : Type} [Inhabited α] : (RustM.fail .undef : RustM α).toBVRustM = ⟨ false, default, 6 ⟩ := rfl @[hax_bv_decide] theorem RustM.toBVRustM_div {α : Type} [Inhabited α] : (RustM.div : RustM α ).toBVRustM = ⟨ false, default, 7 ⟩ := rfl @[hax_bv_decide] theorem RustM.toBVRustM_ite {α : Type} [Inhabited α] {c : Prop} [Decidable c] (x y : RustM α) : (if c then x else y).toBVRustM = (if c then x.toBVRustM else y.toBVRustM) := by grind @[hax_bv_decide] theorem RustM.beq_iff_toBVRustM_eq {α : Type} [Inhabited α] [DecidableEq α] (x y : RustM α) : decide (x = y) = (x.toBVRustM.ok == y.toBVRustM.ok && x.toBVRustM.val == y.toBVRustM.val && x.toBVRustM.err == y.toBVRustM.err) := by by_cases h : x = y · simp [h] · revert h cases x using RustM.toBVRustM.match_1 <;> cases y using RustM.toBVRustM.match_1 <;> grind [toBVRustM] @[hax_bv_decide] theorem RustM.toBVRustM_bind {α β : Type} [Inhabited α] [Inhabited β] (x : RustM α) (f : α → RustM β) : (x >>= f).toBVRustM = if x.toBVRustM.ok then (f x.toBVRustM.val).toBVRustM else {x.toBVRustM with val := default} := by cases x using RustM.toBVRustM.match_1 <;> rfl @[hax_bv_decide] theorem RustM.Triple_iff_BitVec {α : Type} [Inhabited α] (a : Prop) [Decidable a] (b : α → Prop) (x : RustM α) [Decidable (b x.toBVRustM.val)] : ⦃ ⌜ a ⌝ ⦄ x ⦃ ⇓ r => ⌜ b r ⌝ ⦄ ↔ (!decide a || (x.toBVRustM.ok && decide (b x.toBVRustM.val))) := by cases x using RustM.toBVRustM.match_1 <;> by_cases a <;> simp only [Triple, PredTrans.apply, wp, SPred.entails_nil, SPred.down_pure, Decidable.imp_iff_not_or, toBVRustM, BitVec.ofNat_eq_ofNat, Bool.false_and, Bool.or_false, Bool.not_eq_eq_eq_not, Bool.not_true, decide_eq_false_iff_not, or_iff_left_iff_imp, Bool.true_and, Bool.or_eq_true, Bool.not_eq_eq_eq_not, Bool.not_true, decide_eq_false_iff_not, decide_eq_true_eq] <;> try rfl all_goals exact fun x => False.elim x /-- This lemma is used to make some variants of `>>>?` accessible for `bv_decide` -/ @[hax_bv_decide] theorem Int32.to_Int64_toNatClampNeg : (Int32.toNatClampNeg 1).toInt64 = 1 := rfl ================================================ FILE: hax-lib/proof-libs/lean/Hax/rust_primitives/Cast.lean ================================================ import Hax.rust_primitives.ops import Hax.Tactic.Init /- # Casts -/ section Cast /-- Rust-supported casts on base types -/ class Cast (α β: Type) where cast : α → RustM β attribute [spec, hax_bv_decide] Cast.cast -- Macro to generate Cast instances for all integer type pairs. open Lean in set_option hygiene false in macro "declare_Hax_cast_instances" : command => do let mut cmds := #[] let tys : List Name := [`UInt8,`UInt16,`UInt32,`UInt64,`USize64,`Int8,`Int16,`Int32,`Int64,`ISize] for srcName in tys do for dstName in tys do let srcIdent := mkIdent srcName let dstIdent := mkIdent dstName let result ← if dstName == srcName then `(x) else `($(mkIdent (srcName ++ dstName.appendBefore "to")) x) cmds := cmds.push $ ← `( @[spec] instance : Cast $srcIdent $dstIdent where cast x := pure $result ) return ⟨mkNullNode cmds⟩ declare_Hax_cast_instances @[spec] instance : Cast String String where cast x := pure x @[simp, spec, hax_bv_decide] def rust_primitives.hax.cast_op {α β} [c: Cast α β] (x:α) : (RustM β) := c.cast x end Cast ================================================ FILE: hax-lib/proof-libs/lean/Hax/rust_primitives/GetElemResult.lean ================================================ import Hax.rust_primitives.RustM import Hax.rust_primitives.ops import Hax.rust_primitives.sequence import Hax.rust_primitives.hax.array import Hax.Tactic.SpecSet open Error open Std.Do set_option mvcgen.warning false /- # Polymorphic index access Hax introduces polymorphic index accesses, for any integer type (returning a single value) and for ranges (returning an array of values). A typeclass-based notation `a[i]_?` is introduced to support panicking lookups -/ /-- The classes `GetElemResult` implement lookup notation `xs[i]_?`. -/ class GetElemResult (coll : Type) (idx : Type) (elem : outParam (Type)) where /-- The syntax `arr[i]_?` gets the `i`'th element of the collection `arr`. It can panic if the index is out of bounds. -/ getElemResult (xs : coll) (i : idx) : RustM elem export GetElemResult (getElemResult) @[inherit_doc getElemResult] syntax:max term noWs "[" withoutPosition(term) "]" noWs "_?": term macro_rules | `($x[$i]_?) => `(getElemResult $x $i) -- Have lean use the notation when printing @[app_unexpander getElemResult] meta def unexpandGetElemResult : Lean.PrettyPrinter.Unexpander | `($_ $array $index) => `($array[$index]_?) | _ => throw () open rust_primitives.sequence instance usize.instGetElemResultSeq {α} : GetElemResult (Seq α) usize α where getElemResult xs i := if h: i.toNat < xs.val.size then pure (xs.val[i]) else .fail arrayOutOfBounds instance usize.instGetElemResultVector {α n} : GetElemResult (RustArray α n) usize α where getElemResult xs i := if h: i.toNat < n.toNat then pure (xs.toVec[i.toNat]) else .fail arrayOutOfBounds instance Nat.instGetElemResultSeq {α} : GetElemResult (Seq α) Nat α where getElemResult xs i := if h: i < xs.val.size then pure (xs.val[i]) else .fail arrayOutOfBounds instance Nat.instGetElemResultVector {α n} : GetElemResult (RustArray α n) Nat α where getElemResult xs i := if h: i < n.toNat then pure (xs.toVec[i]) else .fail arrayOutOfBounds @[spec] theorem Nat.getElemSeqResult_spec (α : Type) (a: Seq α) (i: Nat) (h: i < a.val.size) : ⦃ ⌜ True ⌝ ⦄ ( a[i]_? ) ⦃ ⇓ r => ⌜ r = a.val[i] ⌝ ⦄ := by mvcgen [RustM.ofOption, Nat.instGetElemResultSeq, getElemResult] @[spec] theorem Nat.getElemVectorResult_spec (α : Type) (n : usize) (a : RustArray α n) (i: Nat) (h : i < n.toNat) : ⦃ ⌜ True ⌝ ⦄ ( a[i]_? ) ⦃ ⇓ r => ⌜ r = a.toVec[i] ⌝ ⦄ := by mvcgen [Nat.instGetElemResultVector, getElemResult] @[spec] theorem usize.getElemSeqResult_spec (α : Type) (a: Seq α) (i: usize) (h: i.toNat < a.val.size) : ⦃ ⌜ True ⌝ ⦄ ( a[i]_? ) ⦃ ⇓ r => ⌜ r = a.val[i.toNat] ⌝ ⦄ := by mvcgen [usize.instGetElemResultSeq, getElemResult] @[spec] theorem usize.getElemVectorResult_spec (α : Type) (n : usize) (a : RustArray α n) (i : usize) (h : i.toNat < n.toNat) : ⦃ ⌜ True ⌝ ⦄ ( a[i]_? ) ⦃ ⇓ r => ⌜ r = a.toVec[i.toNat] ⌝ ⦄ := by mvcgen [usize.instGetElemResultVector, getElemResult] ================================================ FILE: hax-lib/proof-libs/lean/Hax/rust_primitives/RustM.lean ================================================ import Hax.Tactic.Init import Hax.Tactic.SpecSet import Hax.MissingLean.Init.While import Std.Tactic.Do open Std.Do open Std.Tactic /- # Monadic encoding The encoding is based on the `RustM` monad: all rust computations are wrapped in the monad, representing the fact that they are not total. It borrows some definitions from the Aeneas project (https://github.com/AeneasVerif/aeneas/) -/ /-- (Aeneas) Error cases -/ inductive Error where | assertionFailure: Error | integerOverflow: Error | divisionByZero: Error | arrayOutOfBounds: Error | maximumSizeExceeded: Error | panic: Error | undef: Error deriving Repr, BEq, DecidableEq open Error /-- RustM monad (corresponding to Aeneas's `Result` monad), representing possible results of rust computations. Defined as `ExceptT Error Option`, i.e. `Option (Except Error α)`. The `Option` layer models divergence and the `Except Error` layer models Rust panics. The `ExceptT` transformer ensures that once a program has paniced, it can not diverge any more (and vice versa). -/ def RustM (α : Type) := ExceptT Error Option α namespace RustM -- These `Except` instances are missing in Lean's library. -- We use them to derive the corresponding `RustM` instances below. deriving instance BEq, DecidableEq for Except instance instBEq {α : Type} [BEq α] : BEq (RustM α) := inferInstanceAs (BEq (Option (Except Error α))) instance instDecidableEq {α : Type} [DecidableEq α] : DecidableEq (RustM α) := inferInstanceAs (DecidableEq (Option (Except Error α))) instance instInhabited {α : Type} : Inhabited (RustM α) := inferInstanceAs (Inhabited (Option (Except Error α))) instance instMonad : Monad RustM := inferInstanceAs (Monad (ExceptT Error Option)) instance instLawfulMonad : LawfulMonad RustM := inferInstanceAs (LawfulMonad (ExceptT Error Option)) @[reducible, match_pattern] def ok {α : Type} (v : α) : RustM α := some (.ok v) @[reducible, match_pattern] def fail {α : Type} (e : Error) : RustM α := some (.error e) @[reducible, match_pattern] def div {α : Type} : RustM α := none instance {α : Type} [Repr α] : Repr (RustM α) where reprPrec x prec := match x with | .ok v => Repr.addAppParen (f!"RustM.ok {reprArg v}") prec | .fail e => Repr.addAppParen (f!"RustM.fail {reprArg e}") prec | .div => "RustM.div" def ofOption {α : Type} (x : Option α) (e : Error) : RustM α := match x with | .some v => pure v | .none => .fail e @[reducible] def isOk {α : Type} (x : RustM α) : Bool := match x with | .ok _ => true | _ => false @[reducible, specset bv, hax_bv_decide] def of_isOk {α : Type} (x : RustM α) (h : RustM.isOk x) : α := match x with | .ok v => v @[simp, spec] def ok_of_isOk {α : Type} (v : α) (h : isOk (ok v)) : (ok v).of_isOk h = v := by rfl instance instWP : WP RustM (.except Error (.except PUnit .pure)) := inferInstanceAs (WP (ExceptT Error Option) _) instance instWPMonad : WPMonad RustM (.except Error (.except PUnit .pure)) := inferInstanceAs (WPMonad (ExceptT Error Option) _) section Order open Lean.Order /- These instances are required to use `partial_fixpoint` in the `RustM` monad. -/ instance {α : Type} : PartialOrder (RustM α) := inferInstanceAs (PartialOrder (ExceptT Error Option α)) instance {α : Type} : CCPO (RustM α) := inferInstanceAs (CCPO (ExceptT Error Option α)) instance : MonoBind RustM := inferInstanceAs (MonoBind (ExceptT Error Option)) open Lean Order in /-- `Loop.MonoLoopCombinator` is used to implement while loops in `RustM`: -/ instance {β : Type} (f : Unit → β → RustM (ForInStep β)) : Loop.MonoLoopCombinator f := { mono := by unfold Loop.loopCombinator repeat monotonicity } end Order end RustM ================================================ FILE: hax-lib/proof-libs/lean/Hax/rust_primitives/Spec.lean ================================================ import Std.Do import Hax.rust_primitives.RustM open Std.Do /- # Specs -/ structure Spec {α} (requires : RustM Prop) (ensures : α → RustM Prop) (f : RustM α) where pureRequires : {p : Prop // ⦃ ⌜ True ⌝ ⦄ requires ⦃ ⇓r => ⌜ r = p ⌝ ⦄} pureEnsures : {p : α → Prop // pureRequires.val → ∀ a, ⦃ ⌜ True ⌝ ⦄ ensures a ⦃ ⇓r => ⌜ r = p a ⌝ ⦄} contract : ⦃ ⌜ pureRequires.val ⌝ ⦄ f ⦃ ⇓r => ⌜ pureEnsures.val r ⌝ ⦄ ================================================ FILE: hax-lib/proof-libs/lean/Hax/rust_primitives/USize64.lean ================================================ import Hax.MissingLean import Lean.Meta.Tactic.Simp.BuiltinSimprocs.UInt /-! # USize64 We define a type `USize64` to represent Rust's `usize` type. It is simply a copy of `UInt64`. This file aims to collect all definitions, lemmas, and type class instances about `UInt64` from Lean's standard library and to state them for `USize64`. The regular `USize` type does not work for us because of https://github.com/cryspen/hax/issues/1702. -/ /-- A copy of `UInt64`, which we use to represent Rust's `usize` type. -/ structure USize64 where ofBitVec :: toBitVec : BitVec 64 @[reducible] def USize64.size : Nat := UInt64.size def USize64.ofNat (n : @& Nat) : USize64 := ⟨BitVec.ofNat 64 n⟩ def USize64.toNat (n : USize64) : Nat := n.toBitVec.toNat def USize64.toFin (x : USize64) : Fin UInt64.size := x.toBitVec.toFin def USize64.ofNatLT (n : @& Nat) (h : LT.lt n USize64.size) : USize64 where toBitVec := BitVec.ofNatLT n h def USize64.decEq (a b : USize64) : Decidable (Eq a b) := match a, b with | ⟨n⟩, ⟨m⟩ => dite (Eq n m) (fun h => isTrue (h ▸ rfl)) (fun h => isFalse (fun h' => USize64.noConfusion h' (fun h' => absurd h' h))) abbrev Nat.toUSize64 := USize64.ofNat namespace USize64 instance : DecidableEq USize64 := USize64.decEq instance : Inhabited USize64 where default := USize64.ofNatLT 0 (of_decide_eq_true rfl) instance {n} : OfNat USize64 n := ⟨⟨OfNat.ofNat n⟩⟩ end USize64 @[inline] def USize64.ofFin (a : Fin USize64.size) : USize64 := ⟨⟨a⟩⟩ def USize64.ofInt (x : Int) : USize64 := ofNat (x % 2 ^ 64).toNat @[simp] theorem USize64.le_size : 2 ^ 32 ≤ USize64.size := by simp [USize64.size, UInt64.size] @[simp] theorem USize64.size_le : USize64.size ≤ 2 ^ 64 := by simp [USize64.size, UInt64.size] protected def USize64.add (a b : USize64) : USize64 := ⟨a.toBitVec + b.toBitVec⟩ protected def USize64.sub (a b : USize64) : USize64 := ⟨a.toBitVec - b.toBitVec⟩ protected def USize64.mul (a b : USize64) : USize64 := ⟨a.toBitVec * b.toBitVec⟩ protected def USize64.div (a b : USize64) : USize64 := ⟨a.toBitVec / b.toBitVec⟩ protected def USize64.pow (x : USize64) (n : Nat) : USize64 := ⟨x.toBitVec ^ n⟩ protected def USize64.mod (a b : USize64) : USize64 := ⟨a.toBitVec % b.toBitVec⟩ protected def USize64.land (a b : USize64) : USize64 := ⟨a.toBitVec &&& b.toBitVec⟩ protected def USize64.lor (a b : USize64) : USize64 := ⟨a.toBitVec ||| b.toBitVec⟩ protected def USize64.xor (a b : USize64) : USize64 := ⟨a.toBitVec ^^^ b.toBitVec⟩ protected def USize64.shiftLeft (a b : USize64) : USize64 := ⟨a.toBitVec <<< (USize64.mod b 64).toBitVec⟩ protected def USize64.shiftRight (a b : USize64) : USize64 := ⟨a.toBitVec >>> (USize64.mod b 64).toBitVec⟩ protected def USize64.lt (a b : USize64) : Prop := a.toBitVec < b.toBitVec protected def USize64.le (a b : USize64) : Prop := a.toBitVec ≤ b.toBitVec instance : Add USize64 := ⟨USize64.add⟩ instance : Sub USize64 := ⟨USize64.sub⟩ instance : Mul USize64 := ⟨USize64.mul⟩ instance : Pow USize64 Nat := ⟨USize64.pow⟩ instance : Mod USize64 := ⟨USize64.mod⟩ instance : HMod USize64 Nat USize64 := ⟨fun x n => ⟨x.toBitVec % n⟩⟩ instance : Div USize64 := ⟨USize64.div⟩ instance : LT USize64 := ⟨USize64.lt⟩ instance : LE USize64 := ⟨USize64.le⟩ protected def USize64.complement (a : USize64) : USize64 := ⟨~~~a.toBitVec⟩ protected def USize64.neg (a : USize64) : USize64 := ⟨-a.toBitVec⟩ instance : Complement USize64 := ⟨USize64.complement⟩ instance : Neg USize64 := ⟨USize64.neg⟩ instance : AndOp USize64 := ⟨USize64.land⟩ instance : OrOp USize64 := ⟨USize64.lor⟩ instance : XorOp USize64 := ⟨USize64.xor⟩ instance : ShiftLeft USize64 := ⟨USize64.shiftLeft⟩ instance : ShiftRight USize64 := ⟨USize64.shiftRight⟩ def USize64.ofNat32 (n : @& Nat) (h : n < 4294967296) : USize64 := USize64.ofNatLT n (Nat.lt_of_lt_of_le h USize64.le_size) def UInt8.toUSize64 (a : UInt8) : USize64 := USize64.ofNat32 a.toBitVec.toNat (Nat.lt_trans a.toBitVec.isLt (by decide)) def USize64.toUInt8 (a : USize64) : UInt8 := a.toNat.toUInt8 def UInt16.toUSize64 (a : UInt16) : USize64 := USize64.ofNat32 a.toBitVec.toNat (Nat.lt_trans a.toBitVec.isLt (by decide)) def USize64.toUInt16 (a : USize64) : UInt16 := a.toNat.toUInt16 def UInt32.toUSize64 (a : UInt32) : USize64 := USize64.ofNat32 a.toBitVec.toNat a.toBitVec.isLt def USize64.toUInt32 (a : USize64) : UInt32 := a.toNat.toUInt32 def UInt64.toUSize64 (a : UInt64) : USize64 := a.toNat.toUSize64 def USize64.toUInt64 (a : USize64) : UInt64 := a.toNat.toUInt64 def USize64.toUSize (a : USize64) : USize := a.toNat.toUSize def USize64.toInt8 (a : USize64) : Int8 := a.toNat.toInt8 def USize64.toInt16 (a : USize64) : Int16 := a.toNat.toInt16 def USize64.toInt32 (a : USize64) : Int32 := a.toNat.toInt32 def USize64.toInt64 (a : USize64) : Int64 := a.toNat.toInt64 def USize64.toISize (a : USize64) : ISize := a.toNat.toISize def Int8.toUSize64 (a : Int8) : USize64 := USize64.ofInt a.toInt def Int16.toUSize64 (a : Int16) : USize64 := USize64.ofInt a.toInt def Int32.toUSize64 (a : Int32) : USize64 := USize64.ofInt a.toInt def Int64.toUSize64 (a : Int64) : USize64 := USize64.ofInt a.toInt def ISize.toUSize64 (a : ISize) : USize64 := USize64.ofInt a.toInt def Bool.toUSize64 (b : Bool) : USize64 := if b then 1 else 0 def USize64.decLt (a b : USize64) : Decidable (a < b) := inferInstanceAs (Decidable (a.toBitVec < b.toBitVec)) def USize64.decLe (a b : USize64) : Decidable (a ≤ b) := inferInstanceAs (Decidable (a.toBitVec ≤ b.toBitVec)) attribute [instance_reducible, instance] USize64.decLt USize64.decLe instance : Max USize64 := maxOfLe instance : Min USize64 := minOfLe instance {α} : GetElem (Array α) USize64 α fun xs i => i.toNat < xs.size where getElem xs i h := xs[i.toNat] open Std Lean in set_option autoImplicit true in declare_uint_theorems USize64 64 theorem USize64.uaddOverflow_iff (x y : USize64) : BitVec.uaddOverflow x.toBitVec y.toBitVec ↔ x.toNat + y.toNat ≥ 2 ^ 64 := by simp [BitVec.uaddOverflow] theorem USize64.umulOverflow_iff (x y : USize64) : BitVec.umulOverflow x.toBitVec y.toBitVec ↔ x.toNat * y.toNat ≥ 2 ^ 64 := by simp [BitVec.umulOverflow] attribute [grind =] USize64.toNat_toBitVec attribute [grind =] USize64.toNat_ofNat_of_lt attribute [grind =] USize64.toNat_ofNat_of_lt' grind_pattern USize64.toBitVec_ofNat => USize64.toBitVec (OfNat.ofNat n) additional_uint_decls USize64 64 @[simp] theorem USize64.toNat_lt (n : USize64) : n.toNat < 2 ^ 64 := n.toFin.isLt theorem USize64.le_self_add {a b : USize64} (h : a.toNat + b.toNat < 2 ^ 64) : a ≤ a + b := by rw [le_iff_toNat_le, USize64.toNat_add_of_lt h] exact Nat.le_add_right a.toNat b.toNat theorem USize64.add_le_of_le {a b c : USize64} (habc : a + b ≤ c) (hab : a.toNat + b.toNat < 2 ^ 64): a ≤ c := by rw [USize64.le_iff_toNat_le, USize64.toNat_add_of_lt hab] at * omega /-! ## Init.Data.UInt.Lemmas -/ protected theorem USize64.add_assoc (a b c : USize64) : a + b + c = a + (b + c) := USize64.toBitVec_inj.1 (BitVec.add_assoc _ _ _) protected theorem USize64.add_comm (a b : USize64) : a + b = b + a := USize64.toBitVec_inj.1 (BitVec.add_comm _ _) @[simp] protected theorem USize64.add_zero (a : USize64) : a + 0 = a := USize64.toBitVec_inj.1 (BitVec.add_zero _) protected theorem USize64.add_left_neg (a : USize64) : -a + a = 0 := USize64.toBitVec_inj.1 (BitVec.add_left_neg _) protected theorem USize64.mul_assoc (a b c : USize64) : a * b * c = a * (b * c) := USize64.toBitVec_inj.1 (BitVec.mul_assoc _ _ _) @[simp] theorem USize64.mul_one (a : USize64) : a * 1 = a := USize64.toBitVec_inj.1 (BitVec.mul_one _) @[simp] theorem USize64.one_mul (a : USize64) : 1 * a = a := USize64.toBitVec_inj.1 (BitVec.one_mul _) protected theorem USize64.mul_comm (a b : USize64) : a * b = b * a := USize64.toBitVec_inj.1 (BitVec.mul_comm _ _) @[simp] theorem USize64.mul_zero {a : USize64} : a * 0 = 0 := USize64.toBitVec_inj.1 BitVec.mul_zero @[simp] theorem USize64.zero_mul {a : USize64} : 0 * a = 0 := USize64.toBitVec_inj.1 BitVec.zero_mul protected theorem USize64.sub_eq_add_neg (a b : USize64) : a - b = a + (-b) := USize64.toBitVec_inj.1 (BitVec.sub_eq_add_neg _ _) @[simp] protected theorem USize64.pow_zero (x : USize64) : x ^ 0 = 1 := (rfl) protected theorem USize64.pow_succ (x : USize64) (n : Nat) : x ^ (n + 1) = x ^ n * x := (rfl) theorem USize64.ofNat_eq_iff_mod_eq_toNat (a : Nat) (b : USize64) : USize64.ofNat a = b ↔ a % 2 ^ 64 = b.toNat := by simp [← USize64.toNat_inj] @[simp] theorem USize64.ofNat_add (a b : Nat) : USize64.ofNat (a + b) = USize64.ofNat a + USize64.ofNat b := by simp [USize64.ofNat_eq_iff_mod_eq_toNat] theorem USize64.ofNat_mod_size (x : Nat) : ofNat (x % 2 ^ 64) = ofNat x := by simp [ofNat, BitVec.ofNat, Fin.ofNat] @[simp] theorem USize64.ofNat_mul (a b : Nat) : USize64.ofNat (a * b) = USize64.ofNat a * USize64.ofNat b := by simp [USize64.ofNat_eq_iff_mod_eq_toNat] @[simp] theorem USize64.ofInt_mul (x y : Int) : ofInt (x * y) = ofInt x * ofInt y := by dsimp only [USize64.ofInt] rw [Int.mul_emod] have h₁ : 0 ≤ x % 2 ^ 64 := Int.emod_nonneg _ (by decide) have h₂ : 0 ≤ y % 2 ^ 64 := Int.emod_nonneg _ (by decide) have h₃ : 0 ≤ (x % 2 ^ 64) * (y % 2 ^ 64) := Int.mul_nonneg h₁ h₂ rw [Int.toNat_emod h₃ (by decide), Int.toNat_mul h₁ h₂] have : (2 ^ 64 : Int).toNat = 2 ^ 64 := (rfl) rw [this, USize64.ofNat_mod_size, USize64.ofNat_mul] @[simp] theorem USize64.ofInt_neg_one : ofInt (-1) = -1 := (rfl) theorem USize64.toBitVec_one : toBitVec 1 = 1#64 := (rfl) theorem USize64.neg_eq_neg_one_mul (a : USize64) : -a = -1 * a := by apply USize64.toBitVec_inj.1 rw [USize64.toBitVec_neg, USize64.toBitVec_mul, USize64.toBitVec_neg, USize64.toBitVec_one, BitVec.neg_eq_neg_one_mul] @[simp] protected theorem USize64.ofInt_neg (x : Int) : ofInt (-x) = -ofInt x := by rw [Int.neg_eq_neg_one_mul, ofInt_mul, ofInt_neg_one, ← USize64.neg_eq_neg_one_mul] protected theorem USize64.mul_add {a b c : USize64} : a * (b + c) = a * b + a * c := USize64.toBitVec_inj.1 BitVec.mul_add protected theorem USize64.add_mul {a b c : USize64} : (a + b) * c = a * c + b * c := by rw [USize64.mul_comm, USize64.mul_add, USize64.mul_comm a c, USize64.mul_comm c b] protected theorem USize64.neg_mul (a b : USize64) : -a * b = -(a * b) := USize64.toBitVec_inj.1 (BitVec.neg_mul _ _) @[simp] protected theorem USize64.add_sub_cancel (a b : USize64) : a + b - b = a := USize64.toBitVec_inj.1 (BitVec.add_sub_cancel _ _) theorem USize64.ofNat_sub {a b : Nat} (hab : b ≤ a) : USize64.ofNat (a - b) = USize64.ofNat a - USize64.ofNat b := by rw [(Nat.sub_add_cancel hab ▸ USize64.ofNat_add (a - b) b :), USize64.add_sub_cancel] @[simp] protected theorem USize64.sub_add_cancel (a b : USize64) : a - b + b = a := USize64.toBitVec_inj.1 (BitVec.sub_add_cancel _ _) theorem USize64.le_ofNat_iff {n : USize64} {m : Nat} (h : m < size) : n ≤ ofNat m ↔ n.toNat ≤ m := by rw [le_iff_toNat_le, toNat_ofNat_of_lt' h] /-! ## Grind's ToInt For grind to use integer arithmetic on `USize64`, we need the following instances, inspired by the modules `Init.GrindInstances.ToInt` and `Init.GrindInstances.Ring.UInt`. -/ namespace Lean.Grind instance : ToInt USize64 (.uint 64) where toInt x := (x.toNat : Int) toInt_inj x y w := USize64.toNat_inj.mp (Int.ofNat_inj.mp w) toInt_mem x := by simpa using Int.lt_toNat.mp (USize64.toNat_lt x) @[simp] theorem toInt_usize64 (x : USize64) : ToInt.toInt x = (x.toNat : Int) := rfl instance : ToInt.Zero USize64 (.uint 64) where toInt_zero := by simp instance : ToInt.OfNat USize64 (.uint 64) where toInt_ofNat x := by simp; rfl instance : ToInt.Add USize64 (.uint 64) where toInt_add x y := by simp instance : ToInt.Mul USize64 (.uint 64) where toInt_mul x y := by simp instance : ToInt.Mod USize64 (.uint 64) where toInt_mod x y := by simp instance : ToInt.Div USize64 (.uint 64) where toInt_div x y := by simp instance : ToInt.LE USize64 (.uint 64) where le_iff x y := by simpa using USize64.le_iff_toBitVec_le instance : ToInt.LT USize64 (.uint 64) where lt_iff x y := by simpa using USize64.lt_iff_toBitVec_lt @[expose] def USize64.natCast : NatCast USize64 where natCast x := USize64.ofNat x @[expose] def USize64.intCast : IntCast USize64 where intCast x := USize64.ofInt x attribute [local instance_reducible, local instance] USize64.natCast USize64.intCast theorem USize64.intCast_ofNat (x : Nat) : (OfNat.ofNat (α := Int) x : USize64) = OfNat.ofNat x := by change USize64.ofInt (OfNat.ofNat x) = OfNat.ofNat x rw [USize64.ofInt] rw [Int.toNat_emod (Int.zero_le_ofNat x) (by decide)] erw [Int.toNat_natCast] rw [Int.toNat_pow_of_nonneg (by decide)] simp +instances only [USize64.ofNat, BitVec.ofNat, Fin.Internal.ofNat_eq_ofNat, Fin.ofNat, Int.reduceToNat, Nat.dvd_refl, Nat.mod_mod_of_dvd] try rfl theorem USize64.intCast_neg (x : Int) : ((-x : Int) : USize64) = - (x : USize64) := USize64.ofInt_neg _ instance : CommRing USize64 where nsmul := ⟨(· * ·)⟩ zsmul := ⟨(· * ·)⟩ add_assoc := USize64.add_assoc add_comm := USize64.add_comm add_zero := USize64.add_zero neg_add_cancel := USize64.add_left_neg mul_assoc := USize64.mul_assoc mul_comm := USize64.mul_comm mul_one := USize64.mul_one one_mul := USize64.one_mul left_distrib _ _ _ := USize64.mul_add right_distrib _ _ _ := USize64.add_mul zero_mul _ := USize64.zero_mul mul_zero _ := USize64.mul_zero sub_eq_add_neg := USize64.sub_eq_add_neg pow_zero := USize64.pow_zero pow_succ := USize64.pow_succ ofNat_succ x := USize64.ofNat_add x 1 intCast_neg := USize64.ofInt_neg intCast_ofNat := USize64.intCast_ofNat neg_zsmul i a := by change (-i : Int) * a = - (i * a) simp [USize64.intCast_neg, USize64.neg_mul] zsmul_natCast_eq_nsmul n a := congrArg (· * a) (USize64.intCast_ofNat _) instance : IsCharP USize64 18446744073709551616 := IsCharP.mk' _ _ (ofNat_eq_zero_iff := fun x => by have : OfNat.ofNat x = USize64.ofNat x := rfl simp [this, USize64.ofNat_eq_iff_mod_eq_toNat] ) instance : ToInt.Pow USize64 (.uint 64) := ToInt.pow_of_semiring (by simp) end Lean.Grind /-! ## Simp-Procs Grind and simp use some simplification procedures for UInts. They are defined in `Lean.Meta.Tactic.Simp.BuiltinSimprocs.UInt` and replicated here. -/ namespace USize64 open Lean Meta Simp instance : ToExpr USize64 where toTypeExpr := mkConst ``USize64 toExpr a := let r := mkRawNatLit a.toNat mkApp3 (.const ``OfNat.ofNat [0]) (mkConst ``USize64) r (.app (.const ``USize64.instOfNat []) r) def fromExpr (e : Expr) : SimpM (Option USize64) := do let some (n, _) ← getOfNatValue? e `USize64 | return none return USize64.ofNat n @[inline] def reduceBin (declName : Name) (arity : Nat) (op : USize64 → USize64 → USize64) (e : Expr) : SimpM DStep := do unless e.isAppOfArity declName arity do return .continue let some n ← (fromExpr e.appFn!.appArg!) | return .continue let some m ← (fromExpr e.appArg!) | return .continue return .done <| toExpr (op n m) @[inline] def reduceBinPred (declName : Name) (arity : Nat) (op : USize64 → USize64 → Bool) (e : Expr) : SimpM Step := do unless e.isAppOfArity declName arity do return .continue let some n ← (fromExpr e.appFn!.appArg!) | return .continue let some m ← (fromExpr e.appArg!) | return .continue evalPropStep e (op n m) @[inline] def reduceBoolPred (declName : Name) (arity : Nat) (op : USize64 → USize64 → Bool) (e : Expr) : SimpM DStep := do unless e.isAppOfArity declName arity do return .continue let some n ← (fromExpr e.appFn!.appArg!) | return .continue let some m ← (fromExpr e.appArg!) | return .continue return .done <| toExpr (op n m) dsimproc [simp, seval] reduceAdd ((_ + _ : USize64)) := reduceBin ``HAdd.hAdd 6 (· + ·) dsimproc [simp, seval] reduceMul ((_ * _ : USize64)) := reduceBin ``HMul.hMul 6 (· * ·) dsimproc [simp, seval] reduceSub ((_ - _ : USize64)) := reduceBin ``HSub.hSub 6 (· - ·) dsimproc [simp, seval] reduceDiv ((_ / _ : USize64)) := reduceBin ``HDiv.hDiv 6 (· / ·) dsimproc [simp, seval] reduceMod ((_ % _ : USize64)) := reduceBin ``HMod.hMod 6 (· % ·) simproc [simp, seval] reduceLT (( _ : USize64) < _) := reduceBinPred ``LT.lt 4 (. < .) simproc [simp, seval] reduceLE (( _ : USize64) ≤ _) := reduceBinPred ``LE.le 4 (. ≤ .) simproc [simp, seval] reduceGT (( _ : USize64) > _) := reduceBinPred ``GT.gt 4 (. > .) simproc [simp, seval] reduceGE (( _ : USize64) ≥ _) := reduceBinPred ``GE.ge 4 (. ≥ .) simproc [simp, seval] reduceEq (( _ : USize64) = _) := reduceBinPred ``Eq 3 (. = .) simproc [simp, seval] reduceNe (( _ : USize64) ≠ _) := reduceBinPred ``Ne 3 (. ≠ .) dsimproc [simp, seval] reduceBEq (( _ : USize64) == _) := reduceBoolPred ``BEq.beq 4 (. == .) dsimproc [simp, seval] reduceBNe (( _ : USize64) != _) := reduceBoolPred ``bne 4 (. != .) dsimproc [simp, seval] reduceOfNatLT (USize64.ofNatLT _ _) := fun e => do unless e.isAppOfArity `USize64.ofNatLT 2 do return .continue let some value ← Nat.fromExpr? e.appFn!.appArg! | return .continue let value := USize64.ofNat value return .done <| toExpr value dsimproc [simp, seval] reduceOfNat (USize64.ofNat _) := fun e => do unless e.isAppOfArity `USize64.ofNat 1 do return .continue let some value ← Nat.fromExpr? e.appArg! | return .continue let value := USize64.ofNat value return .done <| toExpr value dsimproc [simp, seval] reduceToNat (USize64.toNat _) := fun e => do unless e.isAppOfArity `USize64.toNat 1 do return .continue let some v ← (fromExpr e.appArg!) | return .continue let n := USize64.toNat v return .done <| toExpr n /-- Return `.done` for UInt values. We don't want to unfold in the symbolic evaluator. -/ dsimproc [seval] isValue ((OfNat.ofNat _ : USize64)) := fun e => do unless (e.isAppOfArity ``OfNat.ofNat 3) do return .continue return .done e end USize64 /- ## Lemmas from `Init.SizeOfLemmas`: -/ @[simp] protected theorem USize64.sizeOf (a : USize64) : sizeOf a = a.toNat + 3 := by cases a; simp +arith [USize64.toNat, BitVec.toNat, -BitVec.val_toFin] /- ## Lemmas from `MissingLean`: -/ theorem USize64.ofNat_eq_of_toNat_eq {a : Nat} {b : USize64} (h : b.toNat = a) : ofNat a = b := by subst_vars; exact USize64.ofNat_toNat theorem USize64.sub_add_eq {a b c : USize64} : a - (b + c) = a - b - c := by grind theorem USize64.sub_succ_lt_self (a b : USize64) (h : a < b) : (b - (a + 1)).toNat < (b - a).toNat := by rw [sub_add_eq] rw [USize64.toNat_sub_of_le] try simp only [USize.toNat_one] apply Nat.sub_one_lt_of_lt · change (0 : USize64).toNat < (b - a).toNat rw [← lt_iff_toNat_lt] grind · grind ================================================ FILE: hax-lib/proof-libs/lean/Hax/rust_primitives/arithmetic.lean ================================================ import Hax.rust_primitives.RustM import Hax.rust_primitives.ops open Lean in set_option hygiene false in macro "declare_arith_ops" s:(&"signed" <|> &"unsigned") typeName:ident suffix:ident width:term : command => do let signed ← match s.raw[0].getKind with | `signed => pure true | `unsigned => pure false | _ => throw .unsupportedSyntax let ident (kind: String) := mkIdent (kind ++ "_" ++ suffix.getId.toString).toName let mut cmds ← Syntax.getArgs <$> `( namespace rust_primitives.arithmetic @[spec] def $(ident "wrapping_add") (x : $typeName) (y : $typeName) : RustM $typeName := pure (x + y) @[spec] def $(ident "wrapping_sub") (x : $typeName) (y : $typeName) : RustM $typeName := pure (x - y) @[spec] def $(ident "wrapping_mul") (x : $typeName) (y : $typeName) : RustM $typeName := pure (x * y) ) if signed then cmds := cmds.push $ ← `( def $(ident "pow") (x : $typeName) (y : u32) : RustM $typeName := if x.toInt ^ y.toNat ≥ 2 ^ ($width - 1) || x.toInt ^ y.toNat < - 2 ^ ($width - 1) then .fail .integerOverflow else pure (x ^ y.toNat) ) else cmds := cmds.push $ ← `( def $(ident "pow") (x : $typeName) (y : u32) : RustM $typeName := if x.toNat ^ y.toNat ≥ 2 ^ $width then .fail .integerOverflow else pure (x ^ y.toNat) ) cmds := cmds.push $ ← `( end rust_primitives.arithmetic ) return ⟨mkNullNode cmds⟩ declare_arith_ops unsigned UInt8 u8 8 declare_arith_ops unsigned UInt16 u16 16 declare_arith_ops unsigned UInt32 u32 32 declare_arith_ops unsigned UInt64 u64 64 declare_arith_ops unsigned u128 u128 128 declare_arith_ops unsigned USize64 usize 64 declare_arith_ops signed Int8 i8 8 declare_arith_ops signed Int16 i16 16 declare_arith_ops signed Int32 i32 32 declare_arith_ops signed Int64 i64 64 declare_arith_ops signed i128 i128 128 declare_arith_ops signed ISize isize 64 ================================================ FILE: hax-lib/proof-libs/lean/Hax/rust_primitives/boxed.lean ================================================ abbrev alloc.boxed.Box (T _Allocator : Type) := T ================================================ FILE: hax-lib/proof-libs/lean/Hax/rust_primitives/hax/array.lean ================================================ import Hax.rust_primitives.RustM import Hax.rust_primitives.ops import Hax.rust_primitives.sequence open Std.Do set_option mvcgen.warning false attribute [local grind! .] USize64.toNat_lt_size /- # Arrays Rust arrays, are represented as Lean `Vector` (Lean Arrays of known size) -/ section RustArray structure RustArray (α : Type) (n : usize) where ofVec :: toVec : Vector α n.toNat @[spec] def rust_primitives.hax.monomorphized_update_at.update_at_usize {α n} (a : RustArray α n) (i : usize) (v : α) : RustM (RustArray α n) := if h: i.toNat < a.toVec.size then pure (.ofVec (Vector.set a.toVec i.toNat v)) else .fail (.arrayOutOfBounds) @[spec] def rust_primitives.hax.update_at {α n} (m : RustArray α n) (i : usize) (v : α) : RustM (RustArray α n) := if i.toNat < n.toNat then pure (.ofVec (Vector.setIfInBounds m.toVec i.toNat v)) else .fail (.arrayOutOfBounds) @[spec] def rust_primitives.hax.repeat {α int_type : Type} {n : usize} [ToNat int_type] (v:α) (size:int_type) : RustM (RustArray α n) := if (n.toNat = ToNat.toNat size) then pure (.ofVec (Vector.replicate n.toNat v)) else .fail Error.arrayOutOfBounds @[spec] def rust_primitives.unsize {α n} (a: RustArray α n) : RustM (rust_primitives.sequence.Seq α) := pure ⟨a.toVec.toArray, by grind⟩ end RustArray ================================================ FILE: hax-lib/proof-libs/lean/Hax/rust_primitives/hax/int.lean ================================================ import Hax.rust_primitives.RustM import Hax.rust_primitives.USize64 import Hax.rust_primitives.ops open Std.Do set_option mvcgen.warning false namespace rust_primitives.hax.int open Lean.Grind in abbrev from_machine {α} {range} [ToInt α range] (x : α) : RustM Int := pure (ToInt.toInt x) attribute [grind] Lean.Grind.ToInt.toInt Lean.Grind.instToIntUInt8UintOfNatNat Lean.Grind.instToIntUInt16UintOfNatNat Lean.Grind.instToIntUInt32UintOfNatNat Lean.Grind.instToIntUInt64UintOfNatNat Lean.Grind.instToIntUSize64UintOfNatNat Lean.Grind.instToIntInt8SintOfNatNat Lean.Grind.instToIntInt16SintOfNatNat Lean.Grind.instToIntInt32SintOfNatNat Lean.Grind.instToIntInt64SintOfNatNat Lean.Grind.instToIntISizeSintNumBits @[spec] def add (x y : Int) : RustM Int := pure (x + y) @[spec] def sub (x y : Int) : RustM Int := pure (x - y) @[spec] def mul (x y : Int) : RustM Int := pure (x * y) @[spec] def div (x y : Int) : RustM Int := if y == 0 then .fail .divisionByZero else pure (x / y) @[spec] def neg (x : Int) : RustM Int := pure (-x) @[spec] def gt (x y : Int) : RustM Bool := pure (x > y) @[spec] def lt (x y : Int) : RustM Bool := pure (x < y) @[spec] def ge (x y : Int) : RustM Bool := pure (x ≥ y) @[spec] def le (x y : Int) : RustM Bool := pure (x ≤ y) @[spec] def eq (x y : Int) : RustM Bool := pure (x == y) end rust_primitives.hax.int ================================================ FILE: hax-lib/proof-libs/lean/Hax/rust_primitives/hax/logical_op.lean ================================================ import Hax.Tactic.Init import Hax.rust_primitives.RustM /- Logic predicates introduced by Hax (in pre/post conditions) -/ namespace rust_primitives.hax.logical_op /-- Boolean conjunction. Cannot panic (always returns .ok ) -/ @[simp, spec, hax_bv_decide] def and (a b: Bool) : RustM Bool := pure (a && b) /-- Boolean disjunction. Cannot panic (always returns .ok )-/ @[simp, spec, hax_bv_decide] def or (a b: Bool) : RustM Bool := pure (a || b) /-- Boolean exclusive disjunction. Cannot panic (always returns .ok )-/ @[simp, spec, hax_bv_decide] def xor (a b: Bool) : RustM Bool := pure (a ^^ b) /-- Boolean negation. Cannot panic (always returns .ok )-/ @[simp, spec, hax_bv_decide] def not (a :Bool) : RustM Bool := pure (!a) @[inherit_doc] infixl:35 " &&? " => and @[inherit_doc] infixl:30 " ||? " => or @[inherit_doc] infixl:30 " ^^? " => xor @[inherit_doc] notation:max "!?" b:40 => not b end rust_primitives.hax.logical_op namespace rust_primitives.hax @[spec] def logical_op_or (x y : Bool) : RustM Bool := pure (x || y) @[spec] def logical_op_and (x y : Bool) : RustM Bool := pure (x && y) end rust_primitives.hax ================================================ FILE: hax-lib/proof-libs/lean/Hax/rust_primitives/hax/machine_int.lean ================================================ import Hax.Tactic.SpecSet attribute [specset bv] bne ================================================ FILE: hax-lib/proof-libs/lean/Hax/rust_primitives/hax/never.lean ================================================ namespace rust_primitives.hax abbrev Never : Type := Empty abbrev never_to_any.{u} {α : Sort u} : Never → α := Empty.elim end rust_primitives.hax ================================================ FILE: hax-lib/proof-libs/lean/Hax/rust_primitives/hax/tuple.lean ================================================ /- # Tuples -/ namespace rust_primitives.hax structure Tuple0 where deriving Repr, BEq, DecidableEq structure Tuple1 (α0: Type) where _0 : α0 deriving Repr, BEq, DecidableEq structure Tuple2 (α0 α1: Type) where _0 : α0 _1 : α1 deriving Repr, BEq, DecidableEq structure Tuple3 (α0 α1 α2: Type) where _0 : α0 _1 : α1 _2 : α2 deriving Repr, BEq, DecidableEq structure Tuple4 (α0 α1 α2 α3 : Type) where _0 : α0 _1 : α1 _2 : α2 _3 : α3 deriving Repr, BEq, DecidableEq structure Tuple5 (α0 α1 α2 α3 α4 : Type) where _0 : α0 _1 : α1 _2 : α2 _3 : α3 _4 : α4 deriving Repr, BEq, DecidableEq structure Tuple6 (α0 α1 α2 α3 α4 α5 : Type) where _0 : α0 _1 : α1 _2 : α2 _3 : α3 _4 : α4 _5 : α5 deriving Repr, BEq, DecidableEq structure Tuple7 (α0 α1 α2 α3 α4 α5 α6 : Type) where _0 : α0 _1 : α1 _2 : α2 _3 : α3 _4 : α4 _5 : α5 _6 : α6 deriving Repr, BEq, DecidableEq structure Tuple8 (α0 α1 α2 α3 α4 α5 α6 α7 : Type) where _0 : α0 _1 : α1 _2 : α2 _3 : α3 _4 : α4 _5 : α5 _6 : α6 _7 : α7 deriving Repr, BEq, DecidableEq structure Tuple9 (α0 α1 α2 α3 α4 α5 α6 α7 α8 : Type) where _0 : α0 _1 : α1 _2 : α2 _3 : α3 _4 : α4 _5 : α5 _6 : α6 _7 : α7 _8 : α8 deriving Repr, BEq, DecidableEq structure Tuple10 (α0 α1 α2 α3 α4 α5 α6 α7 α8 α9: Type) where _0 : α0 _1 : α1 _2 : α2 _3 : α3 _4 : α4 _5 : α5 _6 : α6 _7 : α7 _8 : α8 _9 : α9 deriving Repr, BEq, DecidableEq end rust_primitives.hax ================================================ FILE: hax-lib/proof-libs/lean/Hax/rust_primitives/hax/while_loop.lean ================================================ import Hax.rust_primitives.RustM import Hax.rust_primitives.hax_lib import Hax.MissingLean.Std.Do.Triple.SpecLemmas import Hax.Tactic.HaxConstructPure open Std.Do /- # Loops -/ open Lean /-- `while_loop` is used to represent while-loops in `RustM` programs. The function provides extra arguments to store a termination measure and an invariant, which can be used to verify the program. The arguments `pureInv` and `pureTermination` are usually not provided explicitly and derived by the default tactic given below. -/ def rust_primitives.hax.while_loop {β : Type} (inv: β → RustM Prop) (cond: β → RustM Bool) (termination : β -> RustM hax_lib.int.Int) (init : β) (body : β -> RustM β) (pureInv: {i : β -> Prop // ∀ b, ⦃⌜ True ⌝⦄ inv b ⦃⇓ r => ⌜ r = (i b) ⌝⦄} := by set_option hax_mvcgen.specset "int" in hax_construct_pure <;> grind) (_pureTermination : {t : β -> Nat // ∀ b, ⦃⌜ True ⌝⦄ termination b ⦃⇓ r => ⌜ r = Int.ofNat (t b) ⌝⦄} := by set_option hax_mvcgen.specset "int" in hax_construct_pure <;> grind) (pureCond : {c : β -> Bool // ∀ b, ⦃⌜ pureInv.val b ⌝⦄ cond b ⦃⇓ r => ⌜ r = c b ⌝⦄} := by set_option hax_mvcgen.specset "int" in hax_construct_pure <;> grind) : RustM β := Loop.MonoLoopCombinator.while_loop Loop.mk pureCond.val init body @[spec] theorem rust_primitives.hax.while_loop.spec {β : Type} (inv: β → RustM Prop) (cond: β → RustM Bool) (termination: β → RustM hax_lib.int.Int) (init : β) (body : β -> RustM β) (pureInv: {i : β -> Prop // ∀ b, ⦃⌜ True ⌝⦄ inv b ⦃⇓ r => ⌜ r = (i b) ⌝⦄}) (pureTermination : {t : β -> Nat // ∀ b, ⦃⌜ True ⌝⦄ termination b ⦃⇓ r => ⌜ r = Int.ofNat (t b) ⌝⦄}) (pureCond : {c : β -> Bool // ∀ b, ⦃⌜ pureInv.val b ⌝⦄ cond b ⦃⇓ r => ⌜ r = c b ⌝⦄}) (step : ∀ (b : β), pureCond.val b → ⦃⌜ pureInv.val b ⌝⦄ body b ⦃⇓ b' => spred(⌜ pureTermination.val b' < pureTermination.val b ⌝ ∧ ⌜ pureInv.val b' ⌝)⦄ ) : ⦃⌜ pureInv.val init ⌝⦄ while_loop inv cond termination init body pureInv pureTermination pureCond ⦃⇓ r => ⌜ pureInv.val r ∧ ¬ pureCond.val r ⌝⦄ := Spec.MonoLoopCombinator.while_loop init Loop.mk pureCond.val body pureInv pureTermination step ================================================ FILE: hax-lib/proof-libs/lean/Hax/rust_primitives/hax.lean ================================================ import Hax.rust_primitives.hax.array import Hax.rust_primitives.hax.int import Hax.rust_primitives.hax.logical_op import Hax.rust_primitives.hax.machine_int import Hax.rust_primitives.hax.never import Hax.rust_primitives.hax.tuple import Hax.rust_primitives.hax.while_loop ================================================ FILE: hax-lib/proof-libs/lean/Hax/rust_primitives/hax_lib.lean ================================================ import Hax.rust_primitives.hax.tuple import Hax.rust_primitives.RustM import Hax.Tactic.HaxConstructPure open rust_primitives.hax open Std.Do namespace hax_lib abbrev prop.Prop := Prop @[spec] def assert (b:Bool) : RustM Tuple0 := if b then pure ⟨ ⟩ else .fail (Error.assertionFailure) @[spec] def assume : Prop -> RustM Tuple0 := fun _ => pure ⟨ ⟩ @[spec] def prop.constructors.from_bool (b : Bool) : RustM Prop := pure (b = true) @[spec] def prop.Impl.from_bool (b : Bool) : RustM Prop := pure (b = true) @[spec] def prop.constructors.implies (a b : Prop) : RustM Prop := pure (a → b) @[spec] def prop.constructors.not (a : Prop) : RustM Prop := pure (¬ a) @[spec] def prop.constructors.and (a b : Prop) : RustM Prop := pure (a ∧ b) @[spec] def prop.constructors.or (a b : Prop) : RustM Prop := pure (a ∨ b) @[spec] def prop.constructors.eq (a b : Prop) : RustM Prop := pure (a = b) @[spec] def prop.constructors.ne (a b : Prop) : RustM Prop := pure (a ≠ b) @[spec] def prop.constructors.forall {α : Type} (p : α → RustM Prop) (pureP : {p' : α -> Prop // ∀ a, ⦃⌜ True ⌝⦄ p a ⦃⇓ r => ⌜ r = (p' a) ⌝⦄} := by set_option hax_mvcgen.specset "int" in hax_construct_pure <;> grind) : RustM Prop := pure (∀ a : α, pureP.val a) @[spec] def prop.constructors.exists {α : Type} (p : α → RustM Prop) (pureP : {p' : α -> Prop // ∀ a, ⦃⌜ True ⌝⦄ p a ⦃⇓ r => ⌜ r = (p' a) ⌝⦄} := by set_option hax_mvcgen.specset "int" in hax_construct_pure <;> grind) : RustM Prop := pure (∃ a : α, pureP.val a) end hax_lib abbrev hax_lib.int.Int : Type := _root_.Int ================================================ FILE: hax-lib/proof-libs/lean/Hax/rust_primitives/mem.lean ================================================ import Hax.rust_primitives.RustM import Hax.rust_primitives.hax def rust_primitives.mem.replace (α : Type) (dst : α) (src : α) : RustM (rust_primitives.hax.Tuple2 α α) := pure ⟨src, dst⟩ def rust_primitives.mem.copy (α : Type) (a : α) : RustM α := pure a ================================================ FILE: hax-lib/proof-libs/lean/Hax/rust_primitives/ops.lean ================================================ import Hax.Tactic.Init import Hax.rust_primitives.USize64 import Hax.Tactic.SpecSet import Hax.MissingLean import Hax.rust_primitives.RustM open Std.Do open Std.Tactic open Std.Do set_option mvcgen.warning false /- Integer types are represented as the corresponding type in Lean -/ abbrev u8 := UInt8 abbrev u16 := UInt16 abbrev u32 := UInt32 abbrev u64 := UInt64 abbrev u128 := UInt128 abbrev usize := USize64 abbrev i8 := Int8 abbrev i16 := Int16 abbrev i32 := Int32 abbrev i64 := Int64 abbrev i128 := Int128 abbrev isize := ISize abbrev f32 := Float32 abbrev f64 := Float /-- Class of objects that can be transformed into Nat -/ class ToNat (α: Type) where toNat : α -> Nat attribute [grind] ToNat.toNat @[simp, grind] instance : ToNat usize where toNat x := x.toNat @[simp, grind] instance : ToNat u128 where toNat x := x.toNat @[simp, grind] instance : ToNat u64 where toNat x := x.toNat @[simp, grind] instance : ToNat u32 where toNat x := x.toNat @[simp, grind] instance : ToNat u16 where toNat x := x.toNat @[simp, grind] instance : ToNat u8 where toNat x := x.toNat infixl:58 " ^^^? " => fun a b => pure (HXor.hXor a b) infixl:60 " &&&? " => fun a b => pure (HAnd.hAnd a b) infixl:60 " |||? " => fun a b => pure (HOr.hOr a b) prefix:75 "~?" => fun a => pure (~~~a) /- ## Boolean comparisons Boolean comparisons that are prettyfied for the integer and boolean types. -/ namespace rust_primitives.cmp def eq {α : Type} [BEq α] (a b : α) : RustM Bool := pure (a == b) def ne {α : Type} [BEq α] (a b : α) : RustM Bool := pure (a != b) def lt {α : Type} [LT α] [DecidableLT α] (a b : α) : RustM Bool := pure (decide (a < b)) def le {α : Type} [LE α] [DecidableLE α] (a b : α) : RustM Bool := pure (decide (a <= b)) def gt {α : Type} [LT α] [DecidableLT α] (a b : α) : RustM Bool := pure (decide (a > b)) def ge {α : Type} [LE α] [DecidableLE α] (a b : α) : RustM Bool := pure (decide (a >= b)) infixl:80 " ==? " => rust_primitives.cmp.eq infixl:80 " !=? " => rust_primitives.cmp.ne infixl:80 " rust_primitives.cmp.lt infixl:80 " <=? " => rust_primitives.cmp.le infixl:80 " >? " => rust_primitives.cmp.gt infixl:80 " >=? " => rust_primitives.cmp.ge attribute [spec 100, specset bv, hax_bv_decide] rust_primitives.cmp.eq rust_primitives.cmp.ne rust_primitives.cmp.lt rust_primitives.cmp.le rust_primitives.cmp.gt rust_primitives.cmp.ge open Lean in set_option hygiene false in macro "declare_comparison_specs" s:(&"signed" <|> &"unsigned") typeName:ident width:term : command => do let signed ← match s.raw[0].getKind with | `signed => pure true | `unsigned => pure false | _ => throw .unsupportedSyntax if signed then return ← `( namespace $typeName @[specset int] def eq_spec (x y : $typeName) : ⦃ ⌜ True ⌝ ⦄ eq x y ⦃ ⇓ r => ⌜ r = (x.toInt == y.toInt) ⌝ ⦄ := by mvcgen [eq]; rw [← @Bool.coe_iff_coe]; simp [x.toInt_inj] @[specset int] def ne_spec (x y : $typeName) : ⦃ ⌜ True ⌝ ⦄ ne x y ⦃ ⇓ r => ⌜ r = (x.toInt != y.toInt) ⌝ ⦄ := by mvcgen [ne]; rw [← @Bool.coe_iff_coe]; simp [x.toInt_inj] @[specset int] def lt_spec (x y : $typeName) : ⦃ ⌜ True ⌝ ⦄ lt x y ⦃ ⇓ r => ⌜ r = decide (x.toInt < y.toInt) ⌝ ⦄ := by mvcgen [lt]; simp [x.lt_iff_toInt_lt] @[specset int] def le_spec (x y : $typeName) : ⦃ ⌜ True ⌝ ⦄ le x y ⦃ ⇓ r => ⌜ r = decide (x.toInt ≤ y.toInt) ⌝ ⦄ := by mvcgen [le]; simp [x.le_iff_toInt_le] @[specset int] def gt_spec (x y : $typeName) : ⦃ ⌜ True ⌝ ⦄ gt x y ⦃ ⇓ r => ⌜ r = decide (x.toInt > y.toInt ) ⌝ ⦄ := by mvcgen [gt]; simp [y.lt_iff_toInt_lt] @[specset int] def ge_spec (x y : $typeName) : ⦃ ⌜ True ⌝ ⦄ ge x y ⦃ ⇓ r => ⌜ r = decide (x.toInt ≥ y.toInt) ⌝ ⦄ := by mvcgen [ge]; simp [y.le_iff_toInt_le] end $typeName ) else return ← `( namespace $typeName @[specset int] def eq_spec (x y : $typeName) : ⦃ ⌜ True ⌝ ⦄ eq x y ⦃ ⇓ r => ⌜ r = (x.toNat == y.toNat) ⌝ ⦄ := by mvcgen [eq]; rw [← @Bool.coe_iff_coe]; simp [x.toNat_inj] @[specset int] def ne_spec (x y : $typeName) : ⦃ ⌜ True ⌝ ⦄ ne x y ⦃ ⇓ r => ⌜ r = (x.toNat != y.toNat) ⌝ ⦄ := by mvcgen [ne]; rw [← @Bool.coe_iff_coe]; simp [x.toNat_inj] @[specset int] def lt_spec (x y : $typeName) : ⦃ ⌜ True ⌝ ⦄ lt x y ⦃ ⇓ r => ⌜ r = decide (x.toNat < y.toNat) ⌝ ⦄ := by mvcgen [lt] @[specset int] def le_spec (x y : $typeName) : ⦃ ⌜ True ⌝ ⦄ le x y ⦃ ⇓ r => ⌜ r = decide (x.toNat ≤ y.toNat) ⌝ ⦄ := by mvcgen [le] @[specset int] def gt_spec (x y : $typeName) : ⦃ ⌜ True ⌝ ⦄ gt x y ⦃ ⇓ r => ⌜ r = decide (x.toNat > y.toNat ) ⌝ ⦄ := by mvcgen [gt] @[specset int] def ge_spec (x y : $typeName) : ⦃ ⌜ True ⌝ ⦄ ge x y ⦃ ⇓ r => ⌜ r = decide (x.toNat ≥ y.toNat) ⌝ ⦄ := by mvcgen [ge] end $typeName ) declare_comparison_specs signed Int8 8 declare_comparison_specs signed Int16 16 declare_comparison_specs signed Int32 32 declare_comparison_specs signed Int64 64 declare_comparison_specs signed ISize System.Platform.numBits declare_comparison_specs unsigned UInt8 8 declare_comparison_specs unsigned UInt16 16 declare_comparison_specs unsigned UInt32 32 declare_comparison_specs unsigned UInt64 64 declare_comparison_specs unsigned USize64 64 end rust_primitives.cmp set_option linter.unusedVariables false in /- ## Arithmetic operations The Rust arithmetic operations have their own notations, using a `?`. They return a `RustM`, that is `.fail` when arithmetic overflows occur. -/ class rust_primitives.ops.arith.Add (α : Type) where add : α → α → RustM α class rust_primitives.ops.arith.Sub (α : Type) where sub : α → α → RustM α class rust_primitives.ops.arith.Mul (α : Type) where mul : α → α → RustM α class rust_primitives.ops.arith.Rem (α : Type) where rem : α → α → RustM α class rust_primitives.ops.arith.Div (α : Type) where div : α → α → RustM α class rust_primitives.ops.arith.Neg (α : Type) where neg : α → RustM α class rust_primitives.ops.bit.Shr (α : Type) (β : Type) where shr : α → β → RustM α class rust_primitives.ops.bit.Shl (α : Type) (β : Type) where shl : α → β → RustM α infixl:65 " +? " => rust_primitives.ops.arith.Add.add infixl:65 " -? " => rust_primitives.ops.arith.Sub.sub infixl:70 " *? " => rust_primitives.ops.arith.Mul.mul infixl:75 " >>>? " => rust_primitives.ops.bit.Shr.shr infixl:75 " << rust_primitives.ops.bit.Shl.shl infixl:70 " %? " => rust_primitives.ops.arith.Rem.rem infixl:70 " /? " => rust_primitives.ops.arith.Div.div prefix:75 "-?" => rust_primitives.ops.arith.Neg.neg attribute [specset bv, hax_bv_decide] rust_primitives.ops.arith.Add.add rust_primitives.ops.arith.Sub.sub rust_primitives.ops.arith.Mul.mul rust_primitives.ops.bit.Shr.shr rust_primitives.ops.bit.Shl.shl rust_primitives.ops.arith.Rem.rem rust_primitives.ops.arith.Div.div rust_primitives.ops.arith.Neg.neg open Lean in macro "declare_Hax_int_ops" s:(&"signed" <|> &"unsigned") typeName:ident width:term : command => do let signed ← match s.raw[0].getKind with | `signed => pure true | `unsigned => pure false | _ => throw .unsupportedSyntax let mut cmds ← Syntax.getArgs <$> `( /-- Addition on Rust integers. Panics on overflow. -/ instance : rust_primitives.ops.arith.Add $typeName where add x y := if ($(mkIdent (if signed then `BitVec.saddOverflow else `BitVec.uaddOverflow)) x.toBitVec y.toBitVec) then .fail .integerOverflow else pure (x + y) /-- Subtraction on Rust integers. Panics on overflow. -/ instance : rust_primitives.ops.arith.Sub $typeName where sub x y := if ($(mkIdent (if signed then `BitVec.ssubOverflow else `BitVec.usubOverflow)) x.toBitVec y.toBitVec) then .fail .integerOverflow else pure (x - y) /-- Multiplication on Rust integers. Panics on overflow. -/ instance : rust_primitives.ops.arith.Mul $typeName where mul x y := if ($(mkIdent (if signed then `BitVec.smulOverflow else `BitVec.umulOverflow)) x.toBitVec y.toBitVec) then .fail .integerOverflow else pure (x * y) ) if signed then cmds := cmds.append $ ← Syntax.getArgs <$> `( /-- Division of signed Rust integers. Panics on overflow (when x is IntMin and `y = -1`) and when dividing by zero. -/ instance : rust_primitives.ops.arith.Div $typeName where div x y := if x = $(mkIdent (typeName.getId ++ `minValue)) && y = -1 then .fail .integerOverflow else if y = 0 then .fail .divisionByZero else pure (x / y) /-- Remainder of signed Rust integers. Panics on overflow (when x is IntMin and `y = -1`) and when the modulus is zero. -/ instance : rust_primitives.ops.arith.Rem $typeName where rem x y := if x = $(mkIdent (typeName.getId ++ `minValue)) && y = -1 then .fail .integerOverflow else if y = 0 then .fail .divisionByZero else pure (x % y) /-- Negation on signed integers. Panics on overflow (when `x` is `minValue`). -/ instance : rust_primitives.ops.arith.Neg $typeName where neg x := if x = $(mkIdent (typeName.getId ++ `minValue)) then .fail .integerOverflow else pure (- x) ) else -- unsigned cmds := cmds.append $ ← Syntax.getArgs <$> `( /-- Division on unsigned Rust integers. Panics when dividing by zero. -/ instance : rust_primitives.ops.arith.Div $typeName where div x y := if y = 0 then .fail .divisionByZero else pure (x / y) /-- Division on unsigned Rust integers. Panics when the modulus is zero. -/ instance : rust_primitives.ops.arith.Rem $typeName where rem x y := if y = 0 then .fail .divisionByZero else pure (x % y) ) return ⟨mkNullNode cmds⟩ declare_Hax_int_ops unsigned UInt8 8 declare_Hax_int_ops unsigned UInt16 16 declare_Hax_int_ops unsigned UInt32 32 declare_Hax_int_ops unsigned UInt64 64 declare_Hax_int_ops unsigned UInt128 128 declare_Hax_int_ops unsigned USize64 64 declare_Hax_int_ops signed Int8 8 declare_Hax_int_ops signed Int16 16 declare_Hax_int_ops signed Int32 32 declare_Hax_int_ops signed Int64 64 declare_Hax_int_ops signed Int128 128 declare_Hax_int_ops signed ISize System.Platform.numBits open Lean in set_option hygiene false in macro "declare_Hax_shift_ops" : command => do let mut cmds := #[] let tys := [ ("UInt8", ← `(term| 8)), ("UInt16", ← `(term| 16)), ("UInt32", ← `(term| 32)), ("UInt64", ← `(term| 64)), ("UInt128", ← `(term| 128)), ("USize64", ← `(term| 64)), ("Int8", ← `(term| 8)), ("Int16", ← `(term| 16)), ("Int32", ← `(term| 32)), ("Int64", ← `(term| 64)), ("Int128", ← `(term| 128)), ("ISize", ← `(term| OfNat.ofNat System.Platform.numBits)) ] for (ty1, width1) in tys do for (ty2, _width2) in tys do let ty1Ident := mkIdent ty1.toName let ty2Ident := mkIdent ty2.toName let toTy1 := mkIdent ("to" ++ ty1).toName let ty2Signed := ty2.startsWith "I" let ty2ToNat := mkIdent (if ty2Signed then `toNatClampNeg else `toNat) let yConverted ← if ty1 == ty2 then `(y) else `(y.$ty2ToNat.$toTy1) cmds := cmds.push $ ← `( /-- Shift right for Rust integers. Panics when shifting by a negative number or by the bitsize or more. -/ instance : rust_primitives.ops.bit.Shr $ty1Ident $ty2Ident where shr x y := if 0 ≤ y && y < $width1 then pure (x >>> $yConverted) else .fail .integerOverflow /-- Left shifting on signed integers. Panics when shifting by a negative number, or when shifting by more than the size. -/ instance : rust_primitives.ops.bit.Shl $ty1Ident $ty2Ident where shl x y := if 0 ≤ y && y < $width1 then pure (x <<< $yConverted) else .fail .integerOverflow ) return ⟨mkNullNode cmds⟩ declare_Hax_shift_ops /- ## Specifications for integer operations -/ open Lean in set_option hygiene false in macro "declare_Hax_int_ops_spec" s:(&"signed" <|> &"unsigned") typeName:ident width:term : command => do let signed ← match s.raw[0].getKind with | `signed => pure true | `unsigned => pure false | _ => throw .unsupportedSyntax let toX := if signed then mkIdent `toInt else mkIdent `toNat let minValue := mkIdent (typeName.getId ++ `minValue) let grind : TSyntax `tactic ← if signed then `(tactic| grind) else `(tactic| grind [toNat_add_of_lt, toNat_sub_of_le', toNat_mul_of_lt]) let mut cmds ← Syntax.getArgs <$> `( namespace $typeName /-- Specification for rust addition -/ @[specset int] theorem haxAdd_spec {x y : $typeName} (h : ¬ $(mkIdent (typeName.getId ++ `addOverflow)) x y) : ⦃ ⌜ True ⌝ ⦄ (x +? y) ⦃ ⇓ r => ⌜ r.$toX = x.$toX + y.$toX ⌝ ⦄ := by mvcgen [rust_primitives.ops.arith.Add.add]; $grind /-- Specification for rust subtraction -/ @[specset int] theorem haxSub_spec {x y : $typeName} (h : ¬ $(mkIdent (typeName.getId ++ `subOverflow)) x y) : ⦃ ⌜ True ⌝ ⦄ (x -? y) ⦃ ⇓ r => ⌜ r.$toX = x.$toX - y.$toX ⌝ ⦄ := by mvcgen [rust_primitives.ops.arith.Sub.sub]; $grind /-- Specification for rust multiplication -/ @[specset int] theorem haxMul_spec {x y : $typeName} (h : ¬ $(mkIdent (typeName.getId ++ `mulOverflow)) x y) : ⦃ ⌜ True ⌝ ⦄ (x *? y) ⦃ ⇓ r => ⌜ r.$toX = x.$toX * y.$toX ⌝ ⦄ := by mvcgen [rust_primitives.ops.arith.Mul.mul]; $grind ) if signed then cmds := cmds.append $ ← Syntax.getArgs <$> `( /-- Specification for rust negation for signed integers-/ @[specset int] theorem haxNeg_spec {x : $typeName} (hx : x ≠ $minValue) : ⦃ ⌜ True ⌝ ⦄ (-? x) ⦃ ⇓ r => ⌜ r.toInt = - x.toInt ⌝ ⦄ := by mvcgen [rust_primitives.ops.arith.Neg.neg] rw [toInt_neg_of_ne_intMin hx] /-- Specification for rust multiplication for signed integers-/ @[specset int] theorem haxDiv_spec {x y : $typeName} (hx : x ≠ $minValue ∨ y ≠ -1) (hy : ¬ y = 0) : ⦃ ⌜ True ⌝ ⦄ (x /? y) ⦃ ⇓ r => ⌜ r.toInt = x.toInt.tdiv y.toInt ⌝ ⦄ := by have : ¬ (x = $minValue && y = -1) := by grind mvcgen [rust_primitives.ops.arith.Div.div] cases hx with | inl hx => apply toInt_div_of_ne_left x y hx | inr hx => apply toInt_div_of_ne_right x y hx /-- Specification for rust remainder for signed integers -/ @[specset int] theorem haxRem_spec (x y : $typeName) (hx : x ≠ $minValue ∨ y ≠ -1) (hy : ¬ y = 0) : ⦃ ⌜ True ⌝ ⦄ (x %? y) ⦃ ⇓ r => ⌜ r.toInt = x.toInt.tmod y.toInt ⌝ ⦄ := by have : ¬ (x = $minValue && y = -1) := by grind mvcgen [rust_primitives.ops.arith.Rem.rem] apply toInt_mod ) else -- unsigned cmds := cmds.append $ ← Syntax.getArgs <$> `( /-- Specification for rust multiplication for unsigned integers -/ @[specset int] theorem haxDiv_spec (x y : $typeName) (h : ¬ y = 0) : ⦃ ⌜ True ⌝ ⦄ (x /? y) ⦃ ⇓ r => ⌜ r.toNat = x.toNat / y.toNat ⌝ ⦄ := by mvcgen [rust_primitives.ops.arith.Div.div] /-- Specification for rust remainder for unsigned integers -/ @[specset int] theorem haxRem_spec (x y : $typeName) (h : ¬ y = 0) : ⦃ ⌜ True ⌝ ⦄ (x %? y) ⦃ ⇓ r => ⌜ r.toNat = x.toNat % y.toNat ⌝ ⦄ := by mvcgen [rust_primitives.ops.arith.Rem.rem] ) cmds := cmds.push $ ← `( end $typeName ) return ⟨mkNullNode cmds⟩ declare_Hax_int_ops_spec unsigned UInt8 8 declare_Hax_int_ops_spec unsigned UInt16 16 declare_Hax_int_ops_spec unsigned UInt32 32 declare_Hax_int_ops_spec unsigned UInt64 64 declare_Hax_int_ops_spec unsigned UInt128 128 declare_Hax_int_ops_spec unsigned USize64 64 declare_Hax_int_ops_spec signed Int8 8 declare_Hax_int_ops_spec signed Int16 16 declare_Hax_int_ops_spec signed Int32 32 declare_Hax_int_ops_spec signed Int64 64 declare_Hax_int_ops_spec signed Int128 128 declare_Hax_int_ops_spec signed ISize System.Platform.numBits open Lean in macro "declare_Hax_shift_ops_spec" : command => do let mut cmds := #[] let tys := [ ("UInt8", ← `(term| 8)), ("UInt16", ← `(term| 16)), ("UInt32", ← `(term| 32)), ("UInt64", ← `(term| 64)), -- ("UInt128", ← `(term| 128)), ("Int8", ← `(term| 8)), ("Int16", ← `(term| 16)), ("Int32", ← `(term| 32)), ("Int64", ← `(term| 64)), -- ("Int128", ← `(term| 128)), ] for (ty1, width1) in tys do for (ty2, _width2) in tys do let ty1Ident := mkIdent ty1.toName let ty2Ident := mkIdent ty2.toName let toTy1 := mkIdent ("to" ++ ty1).toName let ty2Signed := ty2.startsWith "I" let ty2ToNat := mkIdent (if ty2Signed then `toNatClampNeg else `toNat) let yConverted ← if ty1 == ty2 then `(y) else `(y.$ty2ToNat.$toTy1) let haxShiftRight_spec := mkIdent ("haxShiftRight_" ++ ty2 ++ "_spec").toName let haxShiftLeft_spec := mkIdent ("haxShiftLeft_" ++ ty2 ++ "_spec").toName cmds := cmds.push $ ← `( namespace $ty1Ident /-- Bitvec-based specification for rust remainder on unsigned integers -/ @[spec] theorem $haxShiftRight_spec (x : $ty1Ident) (y : $ty2Ident) : 0 ≤ y → y.$ty2ToNat < $width1 → ⦃ ⌜ True ⌝ ⦄ (x >>>? y) ⦃ ⇓ r => ⌜ r = x >>> $yConverted ⌝ ⦄ := by intros; mvcgen [rust_primitives.ops.bit.Shr.shr]; grind /-- Bitvec-based specification for rust remainder on unsigned integers -/ @[spec] theorem $haxShiftLeft_spec (x : $ty1Ident) (y : $ty2Ident) : 0 ≤ y → y.$ty2ToNat < $width1 → ⦃ ⌜ True ⌝ ⦄ (x << ⌜ r = x <<< $yConverted ⌝ ⦄ := by intros; mvcgen [rust_primitives.ops.bit.Shl.shl]; grind end $ty1Ident ) return ⟨mkNullNode cmds⟩ declare_Hax_shift_ops_spec ================================================ FILE: hax-lib/proof-libs/lean/Hax/rust_primitives/sequence.lean ================================================ import Hax.rust_primitives.RustM import Hax.rust_primitives.ops structure rust_primitives.sequence.Seq α where val : Array α size_lt_usizeSize : val.size < USize64.size attribute [grind .] rust_primitives.sequence.Seq.size_lt_usizeSize attribute [local grind! .] USize64.toNat_lt_size @[grind =, simp] theorem rust_primitives.sequence.Seq.toNat_ofNat_size {α} (m : rust_primitives.sequence.Seq α) : (USize64.ofNat m.val.size).toNat = m.val.size := USize64.toNat_ofNat_of_lt' m.size_lt_usizeSize def rust_primitives.sequence.seq_len (α : Type) (s : rust_primitives.sequence.Seq α) : RustM usize := pure (USize64.ofNat s.val.size) def rust_primitives.sequence.seq_first (α : Type) (s : rust_primitives.sequence.Seq α) : RustM α := if h : s.val.size == 0 then .fail .arrayOutOfBounds else pure (s.val[0]'(by grind)) def rust_primitives.sequence.seq_slice (α : Type) (seq : rust_primitives.sequence.Seq α) (s e : usize) : RustM (rust_primitives.sequence.Seq α) := if s ≤ e && e ≤ .ofNat seq.val.size then pure ⟨seq.val[s.toNat:e.toNat].toArray, by grind⟩ else .fail .arrayOutOfBounds ================================================ FILE: hax-lib/proof-libs/lean/Hax/rust_primitives/slice.lean ================================================ import Hax.rust_primitives.RustM import Hax.rust_primitives.hax import Hax.rust_primitives.sequence abbrev RustVector := rust_primitives.sequence.Seq abbrev RustSlice := rust_primitives.sequence.Seq attribute [local grind! .] rust_primitives.sequence.Seq.size_lt_usizeSize attribute [local grind! .] USize64.toNat_lt_size @[spec] def rust_primitives.slice.array_as_slice (α : Type) (n : usize) : RustArray α n → RustM (RustSlice α) := fun x => pure ⟨Vector.toArray x.toVec, by grind⟩ @[spec] def rust_primitives.slice.array_map (α : Type) (β : Type) (n : usize) (_ : Type) (a : RustArray α n) (f : α -> RustM β) : RustM (RustArray β n) := do pure (.ofVec (← a.toVec.mapM (f ·) )) @[spec] def rust_primitives.slice.array_from_fn (α : Type) (n : usize) (_ : Type) (f : usize -> RustM α) : RustM (RustArray α n) := do pure (.ofVec (← (Vector.range n.toNat).mapM fun i => f (USize64.ofNat i))) @[spec] def rust_primitives.slice.slice_length (α : Type) (s : RustSlice α) : RustM usize := pure (USize64.ofNat s.val.size) @[spec] def rust_primitives.sequence.seq_from_slice (α : Type) (s : RustSlice α) : RustM (rust_primitives.sequence.Seq α) := pure s @[spec] def rust_primitives.slice.slice_split_at (α : Type) (s : RustSlice α) (mid : usize) : RustM (rust_primitives.hax.Tuple2 (RustSlice α) (RustSlice α)) := if mid <= .ofNat s.val.size then pure ⟨⟨s.val.take mid.toNat, by grind⟩, ⟨s.val.drop mid.toNat, by grind⟩⟩ else .fail .arrayOutOfBounds def rust_primitives.slice.slice_slice (α : Type) (seq : RustSlice α) (s e : usize) : RustM (RustSlice α) := if s ≤ e && e ≤ .ofNat seq.val.size then pure ⟨seq.val[s.toNat:e.toNat].toArray, by grind⟩ else .fail .arrayOutOfBounds ================================================ FILE: hax-lib/proof-libs/lean/Hax/rust_primitives.lean ================================================ import Hax.rust_primitives.arithmetic import Hax.rust_primitives.boxed import Hax.rust_primitives.BVDecide import Hax.rust_primitives.Cast import Hax.rust_primitives.hax import Hax.rust_primitives.hax_lib import Hax.rust_primitives.GetElemResult import Hax.rust_primitives.mem import Hax.rust_primitives.ops import Hax.rust_primitives.RustM import Hax.rust_primitives.sequence import Hax.rust_primitives.Spec import Hax.rust_primitives.slice import Hax.rust_primitives.USize64 ================================================ FILE: hax-lib/proof-libs/lean/Hax.lean ================================================ /- Copyright 2025 Cryspen Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. -/ -- This module serves as the root of the `Hax` library. -- Import modules here that should be built as part of the library. /- Additions to the Lean library -/ import Hax.MissingLean /- Implementation of Rust primitives in Lean -/ import Hax.rust_primitives /- Core Models, extracted from our model written in Rust -/ import Hax.core_models /- Tactics -/ import Hax.Tactic ================================================ FILE: hax-lib/proof-libs/lean/README.md ================================================ # Hax Lean library This folder contains the Lean library necessary to use hax-extracted rust code in Lean. It is organized as follows: - `Lib.lean` : main prelude definitions (integer types, arrays, errors, etc) - `BitVec.lean` : additional lemmas and tactics to handle bitvectors ================================================ FILE: hax-lib/proof-libs/lean/lake-manifest.json ================================================ { "version": "1.1.0", "packagesDir": ".lake/packages", "packages": [ { "url": "https://github.com/leanprover-community/quote4", "type": "git", "subDir": null, "scope": "", "rev": "23324752757bf28124a518ec284044c8db79fee5", "name": "Qq", "manifestFile": "lake-manifest.json", "inputRev": "v4.29.0-rc1", "inherited": false, "configFile": "lakefile.toml" } ], "name": "Hax", "lakeDir": ".lake" } ================================================ FILE: hax-lib/proof-libs/lean/lakefile.toml ================================================ name = "Hax" version = "0.1.0" defaultTargets = ["Hax"] [leanOptions] autoImplicit = false relaxedAutoImplicit = false weak.linter.mathlibStandardSet = true maxSynthPendingDepth = 3 [[lean_lib]] name = "Hax" [[require]] name = "Qq" git = "https://github.com/leanprover-community/quote4" rev = "v4.29.0-rc1" ================================================ FILE: hax-lib/proof-libs/lean/lean-toolchain ================================================ leanprover/lean4:v4.29.0-rc1 ================================================ FILE: hax-lib/proofs/fstar/extraction/Hax_lib.Abstraction.fst ================================================ module Hax_lib.Abstraction #set-options "--fuel 0 --ifuel 1 --z3rlimit 15" open FStar.Mul open Core_models /// Marks a type as abstract: its values can be lowered to concrete /// values. This might panic. class t_Concretization (v_Self: Type0) (v_T: Type0) = { f_concretize_pre:v_Self -> Type0; f_concretize_post:v_Self -> v_T -> Type0; f_concretize:x0: v_Self -> Prims.Pure v_T (f_concretize_pre x0) (fun result -> f_concretize_post x0 result) } /// Marks a type as abstractable: its values can be mapped to an /// idealized version of the type. For instance, machine integers, /// which have bounds, can be mapped to mathematical integers. /// Each type can have only one abstraction. class t_Abstraction (v_Self: Type0) = { [@@@ FStar.Tactics.Typeclasses.no_method]f_AbstractType:Type0; f_lift_pre:v_Self -> Type0; f_lift_post:v_Self -> f_AbstractType -> Type0; f_lift:x0: v_Self -> Prims.Pure f_AbstractType (f_lift_pre x0) (fun result -> f_lift_post x0 result) } ================================================ FILE: hax-lib/proofs/fstar/extraction/Hax_lib.Bundle.fst ================================================ module Hax_lib.Bundle #set-options "--fuel 0 --ifuel 1 --z3rlimit 15" open FStar.Mul open Core_models let _ = (* This module has implicit dependencies, here we make them explicit. *) (* The implicit dependencies arise from typeclasses instances. *) let open Hax_lib.Abstraction in let open Num_bigint in let open Num_bigint.Bigint in let open Num_bigint.Bigint.Addition in let open Num_bigint.Bigint.Convert in let open Num_bigint.Bigint.Division in let open Num_bigint.Bigint.Multiplication in let open Num_bigint.Bigint.Subtraction in let open Num_traits.Cast in let open Num_traits.Ops.Euclid in () /// This function exists only when compiled with `hax`, and is not /// meant to be used directly. It is called by `assert!` only in /// appropriate situations. let v_assert (e_formula: bool) : Prims.unit = () /// This function exists only when compiled with `hax`, and is not meant to be /// used directly. It is called by `assert_prop!` only in appropriate /// situations. let assert_prop (e_formula: Hax_lib.Prop.t_Prop) : Prims.unit = () /// This function exists only when compiled with `hax`, and is not /// meant to be used directly. It is called by `assume!` only in /// appropriate situations. let v_assume (e_formula: Hax_lib.Prop.t_Prop) : Prims.unit = () /// Dummy function that carries a string to be printed as such in the output language let v_inline (_: string) : Prims.unit = () /// Similar to `inline`, but allows for any type. Do not use directly. let inline_unsafe (#v_T: Type0) (_: string) : v_T = Rust_primitives.Hax.never_to_any (Core_models.Panicking.panic "internal error: entered unreachable code" <: Rust_primitives.Hax.t_Never) /// Sink for any value into unit. This is used internally by hax to capture /// value of any type. Specifically, this is useful for the `decreases` clauses /// for the F* backend. let any_to_unit (#v_T: Type0) (_: v_T) : Prims.unit = Rust_primitives.Hax.never_to_any (Core_models.Panicking.panic "internal error: entered unreachable code" <: Rust_primitives.Hax.t_Never) /// A dummy function that holds a loop invariant. let e_internal_loop_invariant (#v_T: Type0) (#v_R: Type0) (#v_P: Type0) (#[FStar.Tactics.Typeclasses.tcresolve ()] i0: Core_models.Convert.t_Into v_R Hax_lib.Prop.t_Prop) (#[FStar.Tactics.Typeclasses.tcresolve ()] i1: Core_models.Ops.Function.t_FnOnce v_P v_T) (#_: unit{i1.Core_models.Ops.Function.f_Output == v_R}) (_: v_P) : Prims.unit = () /// A dummy function that holds a while loop invariant. let e_internal_while_loop_invariant (_: Hax_lib.Prop.t_Prop) : Prims.unit = () (* item error backend: The mutation of this &mut is not allowed here. This is discussed in issue https://github.com/hacspec/hax/issues/420. Please upvote or comment this issue if you see this error message. Note: the error was labeled with context `DirectAndMut`. Last available AST for this item: /// A type that implements `Refinement` should be a newtype for a /// type `T`. The field holding the value of type `T` should be /// private, and `Refinement` should be the only interface to the /// type. /// Please never implement this trait yourself, use the /// `refinement_type` macro instead. #[no_std()] #[feature(register_tool)] #[register_tool(_hax)] trait t_Refinement { /// The base type #[no_std()] #[feature(register_tool)] #[register_tool(_hax)] type f_InnerType: TodoPrintRustBoundsTyp; /// Smart constructor capturing an invariant. Its extraction will /// yield a proof obligation. #[no_std()] #[feature(register_tool)] #[register_tool(_hax)] fn f_new(_: proj_asso_type!()) -> Self; /// Destructor for the refined type #[no_std()] #[feature(register_tool)] #[register_tool(_hax)] fn f_get(_: Self) -> proj_asso_type!(); /// Gets a mutable reference to a refinement #[no_std()] #[feature(register_tool)] #[register_tool(_hax)] fn f_get_mut(_: Self) -> tuple2; /// Tests wether a value satisfies the refinement #[no_std()] #[feature(register_tool)] #[register_tool(_hax)] fn f_invariant(_: proj_asso_type!()) -> hax_lib::prop::t_Prop; } Last AST: /** print_rust: pitem: not implemented (item: { Concrete_ident.T.def_id = { Explicit_def_id.T.is_constructor = false; def_id = { Types.index = (0, 0, None); is_local = true; kind = Types.Trait; krate = "hax_lib"; parent = (Some { Types.contents = { Types.id = 0; value = { Types.index = (0, 0, None); is_local = true; kind = Types.Mod; krate = "hax_lib"; parent = None; path = [] } } }); path = [{ Types.data = (Types.TypeNs "Refinement"); disambiguator = 0 }] } }; moved = (Some { Concrete_ident.Fresh_module.id = 2; hints = [{ Explicit_def_id.T.is_constructor = false; def_id = { Types.index = (0, 0, None); is_local = true; kind = Types.Use; krate = "hax_lib"; parent = (Some { Types.contents = { Types.id = 0; value = { Types.index = (0, 0, None); is_local = true; kind = Types.Mod; krate = "hax_lib"; parent = None; path = [] } } }); path = [{ Types.data = Types.Use; disambiguator = 0 }] } }; { Explicit_def_id.T.is_constructor = false; def_id = { Types.index = (0, 0, None); is_local = true; kind = Types.ExternCrate; krate = "hax_lib"; parent = (Some { Types.contents = { Types.id = 0; value = { Types.index = (0, 0, None); is_local = true; kind = Types.Mod; krate = "hax_lib"; parent = None; path = [] } } }); path = [{ Types.data = (Types.TypeNs "core"); disambiguator = 0 }] } }; { Explicit_def_id.T.is_constructor = false; def_id = { Types.index = (0, 0, None); is_local = true; kind = Types.Use; krate = "hax_lib"; parent = (Some { Types.contents = { Types.id = 0; value = { Types.index = (0, 0, None); is_local = true; kind = Types.Mod; krate = "hax_lib"; parent = None; path = [] } } }); path = [{ Types.data = Types.Use; disambiguator = 1 }] } }; { Explicit_def_id.T.is_constructor = false; def_id = { Types.index = (0, 0, None); is_local = true; kind = Types.Use; krate = "hax_lib"; parent = (Some { Types.contents = { Types.id = 0; value = { Types.index = (0, 0, None); is_local = true; kind = Types.Mod; krate = "hax_lib"; parent = None; path = [] } } }); path = [{ Types.data = Types.Use; disambiguator = 2 }] } }; { Explicit_def_id.T.is_constructor = false; def_id = { Types.index = (0, 0, None); is_local = true; kind = Types.Use; krate = "hax_lib"; parent = (Some { Types.contents = { Types.id = 0; value = { Types.index = (0, 0, None); is_local = true; kind = Types.Mod; krate = "hax_lib"; parent = None; path = [] } } }); path = [{ Types.data = Types.Use; disambiguator = 3 }] } }; { Explicit_def_id.T.is_constructor = false; def_id = { Types.index = (0, 0, None); is_local = true; kind = Types.Use; krate = "hax_lib"; parent = (Some { Types.contents = { Types.id = 0; value = { Types.index = (0, 0, None); is_local = true; kind = Types.Mod; krate = "hax_lib"; parent = None; path = [] } } }); path = [{ Types.data = Types.Use; disambiguator = 4 }] } }; { Explicit_def_id.T.is_constructor = false; def_id = { Types.index = (0, 0, None); is_local = true; kind = (Types.Macro Types.Bang); krate = "hax_lib"; parent = (Some { Types.contents = { Types.id = 0; value = { Types.index = (0, 0, None); is_local = true; kind = Types.Mod; krate = "hax_lib"; parent = None; path = [] } } }); path = [{ Types.data = (Types.MacroNs "proxy_macro_if_not_hax"); disambiguator = 0 } ] } }; { Explicit_def_id.T.is_constructor = false; def_id = { Types.index = (0, 0, None); is_local = true; kind = (Types.Macro Types.Bang); krate = "hax_lib"; parent = (Some { Types.contents = { Types.id = 0; value = { Types.index = (0, 0, None); is_local = true; kind = Types.Mod; krate = "hax_lib"; parent = None; path = [] } } }); path = [{ Types.data = (Types.MacroNs "debug_assert"); disambiguator = 0 } ] } }; { Explicit_def_id.T.is_constructor = false; def_id = { Types.index = (0, 0, None); is_local = true; kind = (Types.Macro Types.Bang); krate = "hax_lib"; parent = (Some { Types.contents = { Types.id = 0; value = { Types.index = (0, 0, None); is_local = true; kind = Types.Mod; krate = "hax_lib"; parent = None; path = [] } } }); path = [{ Types.data = (Types.MacroNs "assert"); disambiguator = 0 } ] } }; { Explicit_def_id.T.is_constructor = false; def_id = { Types.index = (0, 0, None); is_local = true; kind = Types.Fn; krate = "hax_lib"; parent = (Some { Types.contents = { Types.id = 0; value = { Types.index = (0, 0, None); is_local = true; kind = Types.Mod; krate = "hax_lib"; parent = None; path = [] } } }); path = [{ Types.data = (Types.ValueNs "assert"); disambiguator = 0 } ] } }; { Explicit_def_id.T.is_constructor = false; def_id = { Types.index = (0, 0, None); is_local = true; kind = (Types.Macro Types.Bang); krate = "hax_lib"; parent = (Some { Types.contents = { Types.id = 0; value = { Types.index = (0, 0, None); is_local = true; kind = Types.Mod; krate = "hax_lib"; parent = None; path = [] } } }); path = [{ Types.data = (Types.MacroNs "assert_prop"); disambiguator = 0 } ] } }; { Explicit_def_id.T.is_constructor = false; def_id = { Types.index = (0, 0, None); is_local = true; kind = Types.Fn; krate = "hax_lib"; parent = (Some { Types.contents = { Types.id = 0; value = { Types.index = (0, 0, None); is_local = true; kind = Types.Mod; krate = "hax_lib"; parent = None; path = [] } } }); path = [{ Types.data = (Types.ValueNs "assert_prop"); disambiguator = 0 } ] } }; { Explicit_def_id.T.is_constructor = false; def_id = { Types.index = (0, 0, None); is_local = true; kind = Types.Fn; krate = "hax_lib"; parent = (Some { Types.contents = { Types.id = 0; value = { Types.index = (0, 0, None); is_local = true; kind = Types.Mod; krate = "hax_lib"; parent = None; path = [] } } }); path = [{ Types.data = (Types.ValueNs "assume"); disambiguator = 0 } ] } }; { Explicit_def_id.T.is_constructor = false; def_id = { Types.index = (0, 0, None); is_local = true; kind = (Types.Macro Types.Bang); krate = "hax_lib"; parent = (Some { Types.contents = { Types.id = 0; value = { Types.index = (0, 0, None); is_local = true; kind = Types.Mod; krate = "hax_lib"; parent = None; path = [] } } }); path = [{ Types.data = (Types.MacroNs "assume"); disambiguator = 0 } ] } }; { Explicit_def_id.T.is_constructor = false; def_id = { Types.index = (0, 0, None); is_local = true; kind = Types.Fn; krate = "hax_lib"; parent = (Some { Types.contents = { Types.id = 0; value = { Types.index = (0, 0, None); is_local = true; kind = Types.Mod; krate = "hax_lib"; parent = None; path = [] } } }); path = [{ Types.data = (Types.ValueNs "inline"); disambiguator = 0 } ] } }; { Explicit_def_id.T.is_constructor = false; def_id = { Types.index = (0, 0, None); is_local = true; kind = Types.Fn; krate = "hax_lib"; parent = (Some { Types.contents = { Types.id = 0; value = { Types.index = (0, 0, None); is_local = true; kind = Types.Mod; krate = "hax_lib"; parent = None; path = [] } } }); path = [{ Types.data = (Types.ValueNs "inline_unsafe"); disambiguator = 0 } ] } }; { Explicit_def_id.T.is_constructor = false; def_id = { Types.index = (0, 0, None); is_local = true; kind = Types.Fn; krate = "hax_lib"; parent = (Some { Types.contents = { Types.id = 0; value = { Types.index = (0, 0, None); is_local = true; kind = Types.Mod; krate = "hax_lib"; parent = None; path = [] } } }); path = [{ Types.data = (Types.ValueNs "any_to_unit"); disambiguator = 0 } ] } }; { Explicit_def_id.T.is_constructor = false; def_id = { Types.index = (0, 0, None); is_local = true; kind = Types.Fn; krate = "hax_lib"; parent = (Some { Types.contents = { Types.id = 0; value = { Types.index = (0, 0, None); is_local = true; kind = Types.Mod; krate = "hax_lib"; parent = None; path = [] } } }); path = [{ Types.data = (Types.ValueNs "_internal_loop_invariant"); disambiguator = 0 } ] } }; { Explicit_def_id.T.is_constructor = false; def_id = { Types.index = (0, 0, None); is_local = true; kind = Types.Fn; krate = "hax_lib"; parent = (Some { Types.contents = { Types.id = 0; value = { Types.index = (0, 0, None); is_local = true; kind = Types.Mod; krate = "hax_lib"; parent = None; path = [] } } }); path = [{ Types.data = (Types.ValueNs "_internal_while_loop_invariant"); disambiguator = 0 } ] } }; { Explicit_def_id.T.is_constructor = false; def_id = { Types.index = (0, 0, None); is_local = true; kind = Types.Fn; krate = "hax_lib"; parent = (Some { Types.contents = { Types.id = 0; value = { Types.index = (0, 0, None); is_local = true; kind = Types.Mod; krate = "hax_lib"; parent = None; path = [] } } }); path = [{ Types.data = (Types.ValueNs "_internal_loop_decreases"); disambiguator = 0 } ] } }; { Explicit_def_id.T.is_constructor = false; def_id = { Types.index = (0, 0, None); is_local = true; kind = Types.Trait; krate = "hax_lib"; parent = (Some { Types.contents = { Types.id = 0; value = { Types.index = (0, 0, None); is_local = true; kind = Types.Mod; krate = "hax_lib"; parent = None; path = [] } } }); path = [{ Types.data = (Types.TypeNs "Refinement"); disambiguator = 0 } ] } }; { Explicit_def_id.T.is_constructor = false; def_id = { Types.index = (0, 0, None); is_local = true; kind = Types.Trait; krate = "hax_lib"; parent = (Some { Types.contents = { Types.id = 0; value = { Types.index = (0, 0, None); is_local = true; kind = Types.Mod; krate = "hax_lib"; parent = None; path = [] } } }); path = [{ Types.data = (Types.TypeNs "RefineAs"); disambiguator = 0 } ] } }; { Explicit_def_id.T.is_constructor = false; def_id = { Types.index = (0, 0, None); is_local = true; kind = Types.Mod; krate = "hax_lib"; parent = (Some { Types.contents = { Types.id = 0; value = { Types.index = (0, 0, None); is_local = true; kind = Types.Mod; krate = "hax_lib"; parent = None; path = [] } } }); path = [{ Types.data = (Types.TypeNs "int"); disambiguator = 0 }] } }; { Explicit_def_id.T.is_constructor = false; def_id = { Types.index = (0, 0, None); is_local = true; kind = Types.Use; krate = "hax_lib"; parent = (Some { Types.contents = { Types.id = 0; value = { Types.index = (0, 0, None); is_local = true; kind = Types.Mod; krate = "hax_lib"; parent = (Some { Types.contents = { Types.id = 0; value = { Types.index = (0, 0, None); is_local = true; kind = Types.Mod; krate = "hax_lib"; parent = None; path = [] } } }); path = [{ Types.data = (Types.TypeNs "int"); disambiguator = 0 } ] } } }); path = [{ Types.data = (Types.TypeNs "int"); disambiguator = 0 }; { Types.data = Types.Use; disambiguator = 0 }] } }; { Explicit_def_id.T.is_constructor = false; def_id = { Types.index = (0, 0, None); is_local = true; kind = Types.Use; krate = "hax_lib"; parent = (Some { Types.contents = { Types.id = 0; value = { Types.index = (0, 0, None); is_local = true; kind = Types.Mod; krate = "hax_lib"; parent = (Some { Types.contents = { Types.id = 0; value = { Types.index = (0, 0, None); is_local = true; kind = Types.Mod; krate = "hax_lib"; parent = None; path = [] } } }); path = [{ Types.data = (Types.TypeNs "int"); disambiguator = 0 } ] } } }); path = [{ Types.data = (Types.TypeNs "int"); disambiguator = 0 }; { Types.data = Types.Use; disambiguator = 1 }] } }; { Explicit_def_id.T.is_constructor = false; def_id = { Types.index = (0, 0, None); is_local = true; kind = Types.Use; krate = "hax_lib"; parent = (Some { Types.contents = { Types.id = 0; value = { Types.index = (0, 0, None); is_local = true; kind = Types.Mod; krate = "hax_lib"; parent = (Some { Types.contents = { Types.id = 0; value = { Types.index = (0, 0, None); is_local = true; kind = Types.Mod; krate = "hax_lib"; parent = None; path = [] } } }); path = [{ Types.data = (Types.TypeNs "int"); disambiguator = 0 } ] } } }); path = [{ Types.data = (Types.TypeNs "int"); disambiguator = 0 }; { Types.data = Types.Use; disambiguator = 2 }] } }; { Explicit_def_id.T.is_constructor = false; def_id = { Types.index = (0, 0, None); is_local = true; kind = Types.Use; krate = "hax_lib"; parent = (Some { Types.contents = { Types.id = 0; value = { Types.index = (0, 0, None); is_local = true; kind = Types.Mod; krate = "hax_lib"; parent = (Some { Types.contents = { Types.id = 0; value = { Types.index = (0, 0, None); is_local = true; kind = Types.Mod; krate = "hax_lib"; parent = None; path = [] } } }); path = [{ Types.data = (Types.TypeNs "int"); disambiguator = 0 } ] } } }); path = [{ Types.data = (Types.TypeNs "int"); disambiguator = 0 }; { Types.data = Types.Use; disambiguator = 3 }] } }; { Explicit_def_id.T.is_constructor = false; def_id = { Types.index = (0, 0, None); is_local = true; kind = Types.Use; krate = "hax_lib"; parent = (Some { Types.contents = { Types.id = 0; value = { Types.index = (0, 0, None); is_local = true; kind = Types.Mod; krate = "hax_lib"; parent = (Some { Types.contents = { Types.id = 0; value = { Types.index = (0, 0, None); is_local = true; kind = Types.Mod; krate = "hax_lib"; parent = None; path = [] } } }); path = [{ Types.data = (Types.TypeNs "int"); disambiguator = 0 } ] } } }); path = [{ Types.data = (Types.TypeNs "int"); disambiguator = 0 }; { Types.data = Types.Use; disambiguator = 4 }] } }; { Explicit_def_id.T.is_constructor = false; def_id = { Types.index = (0, 0, None); is_local = true; kind = Types.Use; krate = "hax_lib"; parent = (Some { Types.contents = { Types.id = 0; value = { Types.index = (0, 0, None); is_local = true; kind = Types.Mod; krate = "hax_lib"; parent = (Some { Types.contents = { Types.id = 0; value = { Types.index = (0, 0, None); is_local = true; kind = Types.Mod; krate = "hax_lib"; parent = None; path = [] } } }); path = [{ Types.data = (Types.TypeNs "int"); disambiguator = 0 } ] } } }); path = [{ Types.data = (Types.TypeNs "int"); disambiguator = 0 }; { Types.data = Types.Use; disambiguator = 5 }] } }; { Explicit_def_id.T.is_constructor = false; def_id = { Types.index = (0, 0, None); is_local = true; kind = Types.Struct; krate = "hax_lib"; parent = (Some { Types.contents = { Types.id = 0; value = { Types.index = (0, 0, None); is_local = true; kind = Types.Mod; krate = "hax_lib"; parent = (Some { Types.contents = { Types.id = 0; value = { Types.index = (0, 0, None); is_local = true; kind = Types.Mod; krate = "hax_lib"; parent = None; path = [] } } }); path = [{ Types.data = (Types.TypeNs "int"); disambiguator = 0 } ] } } }); path = [{ Types.data = (Types.TypeNs "int"); disambiguator = 0 }; { Types.data = (Types.TypeNs "Int"); disambiguator = 0 }] } }; { Explicit_def_id.T.is_constructor = false; def_id = { Types.index = (0, 0, None); is_local = true; kind = Types.Impl {of_trait = true}; krate = "hax_lib"; parent = (Some { Types.contents = { Types.id = 0; value = { Types.index = (0, 0, None); is_local = true; kind = Types.Mod; krate = "hax_lib"; parent = (Some { Types.contents = { Types.id = 0; value = { Types.index = (0, 0, None); is_local = true; kind = Types.Mod; krate = "hax_lib"; parent = None; path = [] } } }); path = [{ Types.data = (Types.TypeNs "int"); disambiguator = 0 } ] } } }); path = [{ Types.data = (Types.TypeNs "int"); disambiguator = 0 }; { Types.data = Types.Impl; disambiguator = 8 }] } }; { Explicit_def_id.T.is_constructor = false; def_id = { Types.index = (0, 0, None); is_local = true; kind = Types.Impl {of_trait = true}; krate = "hax_lib"; parent = (Some { Types.contents = { Types.id = 0; value = { Types.index = (0, 0, None); is_local = true; kind = Types.Mod; krate = "hax_lib"; parent = (Some { Types.contents = { Types.id = 0; value = { Types.index = (0, 0, None); is_local = true; kind = Types.Mod; krate = "hax_lib"; parent = None; path = [] } } }); path = [{ Types.data = (Types.TypeNs "int"); disambiguator = 0 } ] } } }); path = [{ Types.data = (Types.TypeNs "int"); disambiguator = 0 }; { Types.data = Types.Impl; disambiguator = 9 }] } }; { Explicit_def_id.T.is_constructor = false; def_id = { Types.index = (0, 0, None); is_local = true; kind = Types.Impl {of_trait = true}; krate = "hax_lib"; parent = (Some { Types.contents = { Types.id = 0; value = { Types.index = (0, 0, None); is_local = true; kind = Types.Mod; krate = "hax_lib"; parent = (Some { Types.contents = { Types.id = 0; value = { Types.index = (0, 0, None); is_local = true; kind = Types.Mod; krate = "hax_lib"; parent = None; path = [] } } }); path = [{ Types.data = (Types.TypeNs "int"); disambiguator = 0 } ] } } }); path = [{ Types.data = (Types.TypeNs "int"); disambiguator = 0 }; { Types.data = Types.Impl; disambiguator = 10 }] } }; { Explicit_def_id.T.is_constructor = false; def_id = { Types.index = (0, 0, None); is_local = true; kind = Types.Impl {of_trait = true}; krate = "hax_lib"; parent = (Some { Types.contents = { Types.id = 0; value = { Types.index = (0, 0, None); is_local = true; kind = Types.Mod; krate = "hax_lib"; parent = (Some { Types.contents = { Types.id = 0; value = { Types.index = (0, 0, None); is_local = true; kind = Types.Mod; krate = "hax_lib"; parent = None; path = [] } } }); path = [{ Types.data = (Types.TypeNs "int"); disambiguator = 0 } ] } } }); path = [{ Types.data = (Types.TypeNs "int"); disambiguator = 0 }; { Types.data = Types.Impl; disambiguator = 11 }] } }; { Explicit_def_id.T.is_constructor = false; def_id = { Types.index = (0, 0, None); is_local = true; kind = Types.Impl {of_trait = true}; krate = "hax_lib"; parent = (Some { Types.contents = { Types.id = 0; value = { Types.index = (0, 0, None); is_local = true; kind = Types.Mod; krate = "hax_lib"; parent = (Some { Types.contents = { Types.id = 0; value = { Types.index = (0, 0, None); is_local = true; kind = Types.Mod; krate = "hax_lib"; parent = None; path = [] } } }); path = [{ Types.data = (Types.TypeNs "int"); disambiguator = 0 } ] } } }); path = [{ Types.data = (Types.TypeNs "int"); disambiguator = 0 }; { Types.data = Types.Impl; disambiguator = 12 }] } }; { Explicit_def_id.T.is_constructor = false; def_id = { Types.index = (0, 0, None); is_local = true; kind = Types.Impl {of_trait = true}; krate = "hax_lib"; parent = (Some { Types.contents = { Types.id = 0; value = { Types.index = (0, 0, None); is_local = true; kind = Types.Mod; krate = "hax_lib"; parent = (Some { Types.contents = { Types.id = 0; value = { Types.index = (0, 0, None); is_local = true; kind = Types.Mod; krate = "hax_lib"; parent = None; path = [] } } }); path = [{ Types.data = (Types.TypeNs "int"); disambiguator = 0 } ] } } }); path = [{ Types.data = (Types.TypeNs "int"); disambiguator = 0 }; { Types.data = Types.Impl; disambiguator = 13 }] } }; { Explicit_def_id.T.is_constructor = false; def_id = { Types.index = (0, 0, None); is_local = true; kind = Types.Impl {of_trait = true}; krate = "hax_lib"; parent = (Some { Types.contents = { Types.id = 0; value = { Types.index = (0, 0, None); is_local = true; kind = Types.Mod; krate = "hax_lib"; parent = (Some { Types.contents = { Types.id = 0; value = { Types.index = (0, 0, None); is_local = true; kind = Types.Mod; krate = "hax_lib"; parent = None; path = [] } } }); path = [{ Types.data = (Types.TypeNs "int"); disambiguator = 0 } ] } } }); path = [{ Types.data = (Types.TypeNs "int"); disambiguator = 0 }; { Types.data = Types.Impl; disambiguator = 14 }] } }; { Explicit_def_id.T.is_constructor = false; def_id = { Types.index = (0, 0, None); is_local = true; kind = Types.Impl {of_trait = true}; krate = "hax_lib"; parent = (Some { Types.contents = { Types.id = 0; value = { Types.index = (0, 0, None); is_local = true; kind = Types.Mod; krate = "hax_lib"; parent = (Some { Types.contents = { Types.id = 0; value = { Types.index = (0, 0, None); is_local = true; kind = Types.Mod; krate = "hax_lib"; parent = None; path = [] } } }); path = [{ Types.data = (Types.TypeNs "int"); disambiguator = 0 } ] } } }); path = [{ Types.data = (Types.TypeNs "int"); disambiguator = 0 }; { Types.data = Types.Impl; disambiguator = 15 }] } }; { Explicit_def_id.T.is_constructor = false; def_id = { Types.index = (0, 0, None); is_local = true; kind = Types.Impl {of_trait = true}; krate = "hax_lib"; parent = (Some { Types.contents = { Types.id = 0; value = { Types.index = (0, 0, None); is_local = true; kind = Types.Mod; krate = "hax_lib"; parent = (Some { Types.contents = { Types.id = 0; value = { Types.index = (0, 0, None); is_local = true; kind = Types.Mod; krate = "hax_lib"; parent = None; path = [] } } }); path = [{ Types.data = (Types.TypeNs "int"); disambiguator = 0 } ] } } }); path = [{ Types.data = (Types.TypeNs "int"); disambiguator = 0 }; { Types.data = Types.Impl; disambiguator = 0 }] } }; { Explicit_def_id.T.is_constructor = false; def_id = { Types.index = (0, 0, None); is_local = true; kind = Types.AssocFn; krate = "hax_lib"; parent = (Some { Types.contents = { Types.id = 0; value = { Types.index = (0, 0, None); is_local = true; kind = Types.Impl {of_trait = false}; krate = "hax_lib"; parent = (Some { Types.contents = { Types.id = 0; value = { Types.index = (0, 0, None); is_local = true; kind = Types.Mod; krate = "hax_lib"; parent = (Some { Types.contents = { Types.id = 0; value = { Types.index = (0, 0, None); is_local = true; kind = Types.Mod; krate = "hax_lib"; parent = None; path = [] } } }); path = [{ Types.data = (Types.TypeNs "int"); disambiguator = 0 } ] } } }); path = [{ Types.data = (Types.TypeNs "int"); disambiguator = 0 }; { Types.data = Types.Impl; disambiguator = 1 }] } } }); path = [{ Types.data = (Types.TypeNs "int"); disambiguator = 0 }; { Types.data = Types.Impl; disambiguator = 1 }; { Types.data = (Types.ValueNs "new"); disambiguator = 0 }] } }; { Explicit_def_id.T.is_constructor = false; def_id = { Types.index = (0, 0, None); is_local = true; kind = Types.AssocFn; krate = "hax_lib"; parent = (Some { Types.contents = { Types.id = 0; value = { Types.index = (0, 0, None); is_local = true; kind = Types.Impl {of_trait = false}; krate = "hax_lib"; parent = (Some { Types.contents = { Types.id = 0; value = { Types.index = (0, 0, None); is_local = true; kind = Types.Mod; krate = "hax_lib"; parent = (Some { Types.contents = { Types.id = 0; value = { Types.index = (0, 0, None); is_local = true; kind = Types.Mod; krate = "hax_lib"; parent = None; path = [] } } }); path = [{ Types.data = (Types.TypeNs "int"); disambiguator = 0 } ] } } }); path = [{ Types.data = (Types.TypeNs "int"); disambiguator = 0 }; { Types.data = Types.Impl; disambiguator = 1 }] } } }); path = [{ Types.data = (Types.TypeNs "int"); disambiguator = 0 }; { Types.data = Types.Impl; disambiguator = 1 }; { Types.data = (Types.ValueNs "get"); disambiguator = 0 }] } }; { Explicit_def_id.T.is_constructor = false; def_id = { Types.index = (0, 0, None); is_local = true; kind = Types.Impl {of_trait = true}; krate = "hax_lib"; parent = (Some { Types.contents = { Types.id = 0; value = { Types.index = (0, 0, None); is_local = true; kind = Types.Mod; krate = "hax_lib"; parent = (Some { Types.contents = { Types.id = 0; value = { Types.index = (0, 0, None); is_local = true; kind = Types.Mod; krate = "hax_lib"; parent = None; path = [] } } }); path = [{ Types.data = (Types.TypeNs "int"); disambiguator = 0 } ] } } }); path = [{ Types.data = (Types.TypeNs "int"); disambiguator = 0 }; { Types.data = Types.Impl; disambiguator = 2 }] } }; { Explicit_def_id.T.is_constructor = false; def_id = { Types.index = (0, 0, None); is_local = true; kind = Types.Impl {of_trait = true}; krate = "hax_lib"; parent = (Some { Types.contents = { Types.id = 0; value = { Types.index = (0, 0, None); is_local = true; kind = Types.Mod; krate = "hax_lib"; parent = (Some { Types.contents = { Types.id = 0; value = { Types.index = (0, 0, None); is_local = true; kind = Types.Mod; krate = "hax_lib"; parent = None; path = [] } } }); path = [{ Types.data = (Types.TypeNs "int"); disambiguator = 0 } ] } } }); path = [{ Types.data = (Types.TypeNs "int"); disambiguator = 0 }; { Types.data = Types.Impl; disambiguator = 3 }] } }; { Explicit_def_id.T.is_constructor = false; def_id = { Types.index = (0, 0, None); is_local = true; kind = Types.Impl {of_trait = true}; krate = "hax_lib"; parent = (Some { Types.contents = { Types.id = 0; value = { Types.index = (0, 0, None); is_local = true; kind = Types.Mod; krate = "hax_lib"; parent = (Some { Types.contents = { Types.id = 0; value = { Types.index = (0, 0, None); is_local = true; kind = Types.Mod; krate = "hax_lib"; parent = None; path = [] } } }); path = [{ Types.data = (Types.TypeNs "int"); disambiguator = 0 } ] } } }); path = [{ Types.data = (Types.TypeNs "int"); disambiguator = 0 }; { Types.data = Types.Impl; disambiguator = 4 }] } }; { Explicit_def_id.T.is_constructor = false; def_id = { Types.index = (0, 0, None); is_local = true; kind = Types.Impl {of_trait = true}; krate = "hax_lib"; parent = (Some { Types.contents = { Types.id = 0; value = { Types.index = (0, 0, None); is_local = true; kind = Types.Mod; krate = "hax_lib"; parent = (Some { Types.contents = { Types.id = 0; value = { Types.index = (0, 0, None); is_local = true; kind = Types.Mod; krate = "hax_lib"; parent = None; path = [] } } }); path = [{ Types.data = (Types.TypeNs "int"); disambiguator = 0 } ] } } }); path = [{ Types.data = (Types.TypeNs "int"); disambiguator = 0 }; { Types.data = Types.Impl; disambiguator = 5 }] } }; { Explicit_def_id.T.is_constructor = false; def_id = { Types.index = (0, 0, None); is_local = true; kind = Types.Impl {of_trait = true}; krate = "hax_lib"; parent = (Some { Types.contents = { Types.id = 0; value = { Types.index = (0, 0, None); is_local = true; kind = Types.Mod; krate = "hax_lib"; parent = (Some { Types.contents = { Types.id = 0; value = { Types.index = (0, 0, None); is_local = true; kind = Types.Mod; krate = "hax_lib"; parent = None; path = [] } } }); path = [{ Types.data = (Types.TypeNs "int"); disambiguator = 0 } ] } } }); path = [{ Types.data = (Types.TypeNs "int"); disambiguator = 0 }; { Types.data = Types.Impl; disambiguator = 6 }] } }; { Explicit_def_id.T.is_constructor = false; def_id = { Types.index = (0, 0, None); is_local = true; kind = Types.AssocFn; krate = "hax_lib"; parent = (Some { Types.contents = { Types.id = 0; value = { Types.index = (0, 0, None); is_local = true; kind = Types.Impl {of_trait = false}; krate = "hax_lib"; parent = (Some { Types.contents = { Types.id = 0; value = { Types.index = (0, 0, None); is_local = true; kind = Types.Mod; krate = "hax_lib"; parent = (Some { Types.contents = { Types.id = 0; value = { Types.index = (0, 0, None); is_local = true; kind = Types.Mod; krate = "hax_lib"; parent = None; path = [] } } }); path = [{ Types.data = (Types.TypeNs "int"); disambiguator = 0 } ] } } }); path = [{ Types.data = (Types.TypeNs "int"); disambiguator = 0 }; { Types.data = Types.Impl; disambiguator = 7 }] } } }); path = [{ Types.data = (Types.TypeNs "int"); disambiguator = 0 }; { Types.data = Types.Impl; disambiguator = 7 }; { Types.data = (Types.ValueNs "pow2"); disambiguator = 0 }] } }; { Explicit_def_id.T.is_constructor = false; def_id = { Types.index = (0, 0, None); is_local = true; kind = Types.AssocFn; krate = "hax_lib"; parent = (Some { Types.contents = { Types.id = 0; value = { Types.index = (0, 0, None); is_local = true; kind = Types.Impl {of_trait = false}; krate = "hax_lib"; parent = (Some { Types.contents = { Types.id = 0; value = { Types.index = (0, 0, None); is_local = true; kind = Types.Mod; krate = "hax_lib"; parent = (Some { Types.contents = { Types.id = 0; value = { Types.index = (0, 0, None); is_local = true; kind = Types.Mod; krate = "hax_lib"; parent = None; path = [] } } }); path = [{ Types.data = (Types.TypeNs "int"); disambiguator = 0 } ] } } }); path = [{ Types.data = (Types.TypeNs "int"); disambiguator = 0 }; { Types.data = Types.Impl; disambiguator = 7 }] } } }); path = [{ Types.data = (Types.TypeNs "int"); disambiguator = 0 }; { Types.data = Types.Impl; disambiguator = 7 }; { Types.data = (Types.ValueNs "_unsafe_from_str"); disambiguator = 0 } ] } }; { Explicit_def_id.T.is_constructor = false; def_id = { Types.index = (0, 0, None); is_local = true; kind = Types.AssocFn; krate = "hax_lib"; parent = (Some { Types.contents = { Types.id = 0; value = { Types.index = (0, 0, None); is_local = true; kind = Types.Impl {of_trait = false}; krate = "hax_lib"; parent = (Some { Types.contents = { Types.id = 0; value = { Types.index = (0, 0, None); is_local = true; kind = Types.Mod; krate = "hax_lib"; parent = (Some { Types.contents = { Types.id = 0; value = { Types.index = (0, 0, None); is_local = true; kind = Types.Mod; krate = "hax_lib"; parent = None; path = [] } } }); path = [{ Types.data = (Types.TypeNs "int"); disambiguator = 0 } ] } } }); path = [{ Types.data = (Types.TypeNs "int"); disambiguator = 0 }; { Types.data = Types.Impl; disambiguator = 7 }] } } }); path = [{ Types.data = (Types.TypeNs "int"); disambiguator = 0 }; { Types.data = Types.Impl; disambiguator = 7 }; { Types.data = (Types.ValueNs "rem_euclid"); disambiguator = 0 } ] } }; { Explicit_def_id.T.is_constructor = false; def_id = { Types.index = (0, 0, None); is_local = true; kind = Types.Use; krate = "hax_lib"; parent = (Some { Types.contents = { Types.id = 0; value = { Types.index = (0, 0, None); is_local = true; kind = Types.AssocFn; krate = "hax_lib"; parent = (Some { Types.contents = { Types.id = 0; value = { Types.index = (0, 0, None); is_local = true; kind = Types.Impl {of_trait = false}; krate = "hax_lib"; parent = (Some { Types.contents = { Types.id = 0; value = { Types.index = (0, 0, None); is_local = true; kind = Types.Mod; krate = "hax_lib"; parent = (Some { Types.contents = { Types.id = 0; value = { Types.index = (0, 0, None); is_local = true; kind = Types.Mod; krate = "hax_lib"; parent = None; path = [] } } }); path = [{ Types.data = (Types.TypeNs "int"); disambiguator = 0 } ] } } }); path = [{ Types.data = (Types.TypeNs "int"); disambiguator = 0 }; { Types.data = Types.Impl; disambiguator = 7 } ] } } }); path = [{ Types.data = (Types.TypeNs "int"); disambiguator = 0 }; { Types.data = Types.Impl; disambiguator = 7 }; { Types.data = (Types.ValueNs "_unsafe_from_str"); disambiguator = 0 } ] } } }); path = [{ Types.data = (Types.TypeNs "int"); disambiguator = 0 }; { Types.data = Types.Impl; disambiguator = 7 }; { Types.data = (Types.ValueNs "_unsafe_from_str"); disambiguator = 0 }; { Types.data = Types.Use; disambiguator = 0 }] } }; { Explicit_def_id.T.is_constructor = false; def_id = { Types.index = (0, 0, None); is_local = true; kind = Types.Use; krate = "hax_lib"; parent = (Some { Types.contents = { Types.id = 0; value = { Types.index = (0, 0, None); is_local = true; kind = Types.AssocFn; krate = "hax_lib"; parent = (Some { Types.contents = { Types.id = 0; value = { Types.index = (0, 0, None); is_local = true; kind = Types.Impl {of_trait = false}; krate = "hax_lib"; parent = (Some { Types.contents = { Types.id = 0; value = { Types.index = (0, 0, None); is_local = true; kind = Types.Mod; krate = "hax_lib"; parent = (Some { Types.contents = { Types.id = 0; value = { Types.index = (0, 0, None); is_local = true; kind = Types.Mod; krate = "hax_lib"; parent = None; path = [] } } }); path = [{ Types.data = (Types.TypeNs "int"); disambiguator = 0 } ] } } }); path = [{ Types.data = (Types.TypeNs "int"); disambiguator = 0 }; { Types.data = Types.Impl; disambiguator = 7 } ] } } }); path = [{ Types.data = (Types.TypeNs "int"); disambiguator = 0 }; { Types.data = Types.Impl; disambiguator = 7 }; { Types.data = (Types.ValueNs "rem_euclid"); disambiguator = 0 } ] } } }); path = [{ Types.data = (Types.TypeNs "int"); disambiguator = 0 }; { Types.data = Types.Impl; disambiguator = 7 }; { Types.data = (Types.ValueNs "rem_euclid"); disambiguator = 0 }; { Types.data = Types.Use; disambiguator = 0 }] } }; { Explicit_def_id.T.is_constructor = false; def_id = { Types.index = (0, 0, None); is_local = true; kind = Types.Trait; krate = "hax_lib"; parent = (Some { Types.contents = { Types.id = 0; value = { Types.index = (0, 0, None); is_local = true; kind = Types.Mod; krate = "hax_lib"; parent = (Some { Types.contents = { Types.id = 0; value = { Types.index = (0, 0, None); is_local = true; kind = Types.Mod; krate = "hax_lib"; parent = None; path = [] } } }); path = [{ Types.data = (Types.TypeNs "int"); disambiguator = 0 } ] } } }); path = [{ Types.data = (Types.TypeNs "int"); disambiguator = 0 }; { Types.data = (Types.TypeNs "ToInt"); disambiguator = 0 }] } }; { Explicit_def_id.T.is_constructor = false; def_id = { Types.index = (0, 0, None); is_local = true; kind = (Types.Macro Types.Bang); krate = "hax_lib"; parent = (Some { Types.contents = { Types.id = 0; value = { Types.index = (0, 0, None); is_local = true; kind = Types.Mod; krate = "hax_lib"; parent = (Some { Types.contents = { Types.id = 0; value = { Types.index = (0, 0, None); is_local = true; kind = Types.Mod; krate = "hax_lib"; parent = None; path = [] } } }); path = [{ Types.data = (Types.TypeNs "int"); disambiguator = 0 } ] } } }); path = [{ Types.data = (Types.TypeNs "int"); disambiguator = 0 }; { Types.data = (Types.MacroNs "implement_abstraction"); disambiguator = 0 } ] } }; { Explicit_def_id.T.is_constructor = false; def_id = { Types.index = (0, 0, None); is_local = true; kind = Types.Impl {of_trait = true}; krate = "hax_lib"; parent = (Some { Types.contents = { Types.id = 0; value = { Types.index = (0, 0, None); is_local = true; kind = Types.Mod; krate = "hax_lib"; parent = (Some { Types.contents = { Types.id = 0; value = { Types.index = (0, 0, None); is_local = true; kind = Types.Mod; krate = "hax_lib"; parent = None; path = [] } } }); path = [{ Types.data = (Types.TypeNs "int"); disambiguator = 0 } ] } } }); path = [{ Types.data = (Types.TypeNs "int"); disambiguator = 0 }; { Types.data = Types.Impl; disambiguator = 16 }] } }; { Explicit_def_id.T.is_constructor = false; def_id = { Types.index = (0, 0, None); is_local = true; kind = Types.Impl {of_trait = true}; krate = "hax_lib"; parent = (Some { Types.contents = { Types.id = 0; value = { Types.index = (0, 0, None); is_local = true; kind = Types.Mod; krate = "hax_lib"; parent = (Some { Types.contents = { Types.id = 0; value = { Types.index = (0, 0, None); is_local = true; kind = Types.Mod; krate = "hax_lib"; parent = None; path = [] } } }); path = [{ Types.data = (Types.TypeNs "int"); disambiguator = 0 } ] } } }); path = [{ Types.data = (Types.TypeNs "int"); disambiguator = 0 }; { Types.data = Types.Impl; disambiguator = 17 }] } }; { Explicit_def_id.T.is_constructor = false; def_id = { Types.index = (0, 0, None); is_local = true; kind = Types.Impl {of_trait = true}; krate = "hax_lib"; parent = (Some { Types.contents = { Types.id = 0; value = { Types.index = (0, 0, None); is_local = true; kind = Types.Mod; krate = "hax_lib"; parent = (Some { Types.contents = { Types.id = 0; value = { Types.index = (0, 0, None); is_local = true; kind = Types.Mod; krate = "hax_lib"; parent = None; path = [] } } }); path = [{ Types.data = (Types.TypeNs "int"); disambiguator = 0 } ] } } }); path = [{ Types.data = (Types.TypeNs "int"); disambiguator = 0 }; { Types.data = Types.Impl; disambiguator = 18 }] } }; { Explicit_def_id.T.is_constructor = false; def_id = { Types.index = (0, 0, None); is_local = true; kind = Types.Impl {of_trait = true}; krate = "hax_lib"; parent = (Some { Types.contents = { Types.id = 0; value = { Types.index = (0, 0, None); is_local = true; kind = Types.Mod; krate = "hax_lib"; parent = (Some { Types.contents = { Types.id = 0; value = { Types.index = (0, 0, None); is_local = true; kind = Types.Mod; krate = "hax_lib"; parent = None; path = [] } } }); path = [{ Types.data = (Types.TypeNs "int"); disambiguator = 0 } ] } } }); path = [{ Types.data = (Types.TypeNs "int"); disambiguator = 0 }; { Types.data = Types.Impl; disambiguator = 19 }] } }; { Explicit_def_id.T.is_constructor = false; def_id = { Types.index = (0, 0, None); is_local = true; kind = Types.Impl {of_trait = true}; krate = "hax_lib"; parent = (Some { Types.contents = { Types.id = 0; value = { Types.index = (0, 0, None); is_local = true; kind = Types.Mod; krate = "hax_lib"; parent = (Some { Types.contents = { Types.id = 0; value = { Types.index = (0, 0, None); is_local = true; kind = Types.Mod; krate = "hax_lib"; parent = None; path = [] } } }); path = [{ Types.data = (Types.TypeNs "int"); disambiguator = 0 } ] } } }); path = [{ Types.data = (Types.TypeNs "int"); disambiguator = 0 }; { Types.data = Types.Impl; disambiguator = 20 }] } }; { Explicit_def_id.T.is_constructor = false; def_id = { Types.index = (0, 0, None); is_local = true; kind = Types.Impl {of_trait = true}; krate = "hax_lib"; parent = (Some { Types.contents = { Types.id = 0; value = { Types.index = (0, 0, None); is_local = true; kind = Types.Mod; krate = "hax_lib"; parent = (Some { Types.contents = { Types.id = 0; value = { Types.index = (0, 0, None); is_local = true; kind = Types.Mod; krate = "hax_lib"; parent = None; path = [] } } }); path = [{ Types.data = (Types.TypeNs "int"); disambiguator = 0 } ] } } }); path = [{ Types.data = (Types.TypeNs "int"); disambiguator = 0 }; { Types.data = Types.Impl; disambiguator = 21 }] } }; { Explicit_def_id.T.is_constructor = false; def_id = { Types.index = (0, 0, None); is_local = true; kind = Types.Impl {of_trait = true}; krate = "hax_lib"; parent = (Some { Types.contents = { Types.id = 0; value = { Types.index = (0, 0, None); is_local = true; kind = Types.Mod; krate = "hax_lib"; parent = (Some { Types.contents = { Types.id = 0; value = { Types.index = (0, 0, None); is_local = true; kind = Types.Mod; krate = "hax_lib"; parent = None; path = [] } } }); path = [{ Types.data = (Types.TypeNs "int"); disambiguator = 0 } ] } } }); path = [{ Types.data = (Types.TypeNs "int"); disambiguator = 0 }; { Types.data = Types.Impl; disambiguator = 22 }] } }; { Explicit_def_id.T.is_constructor = false; def_id = { Types.index = (0, 0, None); is_local = true; kind = Types.Impl {of_trait = true}; krate = "hax_lib"; parent = (Some { Types.contents = { Types.id = 0; value = { Types.index = (0, 0, None); is_local = true; kind = Types.Mod; krate = "hax_lib"; parent = (Some { Types.contents = { Types.id = 0; value = { Types.index = (0, 0, None); is_local = true; kind = Types.Mod; krate = "hax_lib"; parent = None; path = [] } } }); path = [{ Types.data = (Types.TypeNs "int"); disambiguator = 0 } ] } } }); path = [{ Types.data = (Types.TypeNs "int"); disambiguator = 0 }; { Types.data = Types.Impl; disambiguator = 23 }] } }; { Explicit_def_id.T.is_constructor = false; def_id = { Types.index = (0, 0, None); is_local = true; kind = Types.Impl {of_trait = true}; krate = "hax_lib"; parent = (Some { Types.contents = { Types.id = 0; value = { Types.index = (0, 0, None); is_local = true; kind = Types.Mod; krate = "hax_lib"; parent = (Some { Types.contents = { Types.id = 0; value = { Types.index = (0, 0, None); is_local = true; kind = Types.Mod; krate = "hax_lib"; parent = None; path = [] } } }); path = [{ Types.data = (Types.TypeNs "int"); disambiguator = 0 } ] } } }); path = [{ Types.data = (Types.TypeNs "int"); disambiguator = 0 }; { Types.data = Types.Impl; disambiguator = 24 }] } }; { Explicit_def_id.T.is_constructor = false; def_id = { Types.index = (0, 0, None); is_local = true; kind = Types.Impl {of_trait = true}; krate = "hax_lib"; parent = (Some { Types.contents = { Types.id = 0; value = { Types.index = (0, 0, None); is_local = true; kind = Types.Mod; krate = "hax_lib"; parent = (Some { Types.contents = { Types.id = 0; value = { Types.index = (0, 0, None); is_local = true; kind = Types.Mod; krate = "hax_lib"; parent = None; path = [] } } }); path = [{ Types.data = (Types.TypeNs "int"); disambiguator = 0 } ] } } }); path = [{ Types.data = (Types.TypeNs "int"); disambiguator = 0 }; { Types.data = Types.Impl; disambiguator = 25 }] } }; { Explicit_def_id.T.is_constructor = false; def_id = { Types.index = (0, 0, None); is_local = true; kind = Types.Impl {of_trait = true}; krate = "hax_lib"; parent = (Some { Types.contents = { Types.id = 0; value = { Types.index = (0, 0, None); is_local = true; kind = Types.Mod; krate = "hax_lib"; parent = (Some { Types.contents = { Types.id = 0; value = { Types.index = (0, 0, None); is_local = true; kind = Types.Mod; krate = "hax_lib"; parent = None; path = [] } } }); path = [{ Types.data = (Types.TypeNs "int"); disambiguator = 0 } ] } } }); path = [{ Types.data = (Types.TypeNs "int"); disambiguator = 0 }; { Types.data = Types.Impl; disambiguator = 26 }] } }; { Explicit_def_id.T.is_constructor = false; def_id = { Types.index = (0, 0, None); is_local = true; kind = Types.Impl {of_trait = true}; krate = "hax_lib"; parent = (Some { Types.contents = { Types.id = 0; value = { Types.index = (0, 0, None); is_local = true; kind = Types.Mod; krate = "hax_lib"; parent = (Some { Types.contents = { Types.id = 0; value = { Types.index = (0, 0, None); is_local = true; kind = Types.Mod; krate = "hax_lib"; parent = None; path = [] } } }); path = [{ Types.data = (Types.TypeNs "int"); disambiguator = 0 } ] } } }); path = [{ Types.data = (Types.TypeNs "int"); disambiguator = 0 }; { Types.data = Types.Impl; disambiguator = 27 }] } }; { Explicit_def_id.T.is_constructor = false; def_id = { Types.index = (0, 0, None); is_local = true; kind = Types.Impl {of_trait = true}; krate = "hax_lib"; parent = (Some { Types.contents = { Types.id = 0; value = { Types.index = (0, 0, None); is_local = true; kind = Types.Mod; krate = "hax_lib"; parent = (Some { Types.contents = { Types.id = 0; value = { Types.index = (0, 0, None); is_local = true; kind = Types.Mod; krate = "hax_lib"; parent = None; path = [] } } }); path = [{ Types.data = (Types.TypeNs "int"); disambiguator = 0 } ] } } }); path = [{ Types.data = (Types.TypeNs "int"); disambiguator = 0 }; { Types.data = Types.Impl; disambiguator = 28 }] } }; { Explicit_def_id.T.is_constructor = false; def_id = { Types.index = (0, 0, None); is_local = true; kind = Types.Impl {of_trait = true}; krate = "hax_lib"; parent = (Some { Types.contents = { Types.id = 0; value = { Types.index = (0, 0, None); is_local = true; kind = Types.Mod; krate = "hax_lib"; parent = (Some { Types.contents = { Types.id = 0; value = { Types.index = (0, 0, None); is_local = true; kind = Types.Mod; krate = "hax_lib"; parent = None; path = [] } } }); path = [{ Types.data = (Types.TypeNs "int"); disambiguator = 0 } ] } } }); path = [{ Types.data = (Types.TypeNs "int"); disambiguator = 0 }; { Types.data = Types.Impl; disambiguator = 29 }] } }; { Explicit_def_id.T.is_constructor = false; def_id = { Types.index = (0, 0, None); is_local = true; kind = Types.Impl {of_trait = true}; krate = "hax_lib"; parent = (Some { Types.contents = { Types.id = 0; value = { Types.index = (0, 0, None); is_local = true; kind = Types.Mod; krate = "hax_lib"; parent = (Some { Types.contents = { Types.id = 0; value = { Types.index = (0, 0, None); is_local = true; kind = Types.Mod; krate = "hax_lib"; parent = None; path = [] } } }); path = [{ Types.data = (Types.TypeNs "int"); disambiguator = 0 } ] } } }); path = [{ Types.data = (Types.TypeNs "int"); disambiguator = 0 }; { Types.data = Types.Impl; disambiguator = 30 }] } }; { Explicit_def_id.T.is_constructor = false; def_id = { Types.index = (0, 0, None); is_local = true; kind = Types.Impl {of_trait = true}; krate = "hax_lib"; parent = (Some { Types.contents = { Types.id = 0; value = { Types.index = (0, 0, None); is_local = true; kind = Types.Mod; krate = "hax_lib"; parent = (Some { Types.contents = { Types.id = 0; value = { Types.index = (0, 0, None); is_local = true; kind = Types.Mod; krate = "hax_lib"; parent = None; path = [] } } }); path = [{ Types.data = (Types.TypeNs "int"); disambiguator = 0 } ] } } }); path = [{ Types.data = (Types.TypeNs "int"); disambiguator = 0 }; { Types.data = Types.Impl; disambiguator = 31 }] } }; { Explicit_def_id.T.is_constructor = false; def_id = { Types.index = (0, 0, None); is_local = true; kind = Types.Impl {of_trait = true}; krate = "hax_lib"; parent = (Some { Types.contents = { Types.id = 0; value = { Types.index = (0, 0, None); is_local = true; kind = Types.Mod; krate = "hax_lib"; parent = (Some { Types.contents = { Types.id = 0; value = { Types.index = (0, 0, None); is_local = true; kind = Types.Mod; krate = "hax_lib"; parent = None; path = [] } } }); path = [{ Types.data = (Types.TypeNs "int"); disambiguator = 0 } ] } } }); path = [{ Types.data = (Types.TypeNs "int"); disambiguator = 0 }; { Types.data = Types.Impl; disambiguator = 32 }] } }; { Explicit_def_id.T.is_constructor = false; def_id = { Types.index = (0, 0, None); is_local = true; kind = Types.Impl {of_trait = true}; krate = "hax_lib"; parent = (Some { Types.contents = { Types.id = 0; value = { Types.index = (0, 0, None); is_local = true; kind = Types.Mod; krate = "hax_lib"; parent = (Some { Types.contents = { Types.id = 0; value = { Types.index = (0, 0, None); is_local = true; kind = Types.Mod; krate = "hax_lib"; parent = None; path = [] } } }); path = [{ Types.data = (Types.TypeNs "int"); disambiguator = 0 } ] } } }); path = [{ Types.data = (Types.TypeNs "int"); disambiguator = 0 }; { Types.data = Types.Impl; disambiguator = 33 }] } }; { Explicit_def_id.T.is_constructor = false; def_id = { Types.index = (0, 0, None); is_local = true; kind = Types.Impl {of_trait = true}; krate = "hax_lib"; parent = (Some { Types.contents = { Types.id = 0; value = { Types.index = (0, 0, None); is_local = true; kind = Types.Mod; krate = "hax_lib"; parent = (Some { Types.contents = { Types.id = 0; value = { Types.index = (0, 0, None); is_local = true; kind = Types.Mod; krate = "hax_lib"; parent = None; path = [] } } }); path = [{ Types.data = (Types.TypeNs "int"); disambiguator = 0 } ] } } }); path = [{ Types.data = (Types.TypeNs "int"); disambiguator = 0 }; { Types.data = Types.Impl; disambiguator = 34 }] } }; { Explicit_def_id.T.is_constructor = false; def_id = { Types.index = (0, 0, None); is_local = true; kind = Types.Impl {of_trait = true}; krate = "hax_lib"; parent = (Some { Types.contents = { Types.id = 0; value = { Types.index = (0, 0, None); is_local = true; kind = Types.Mod; krate = "hax_lib"; parent = (Some { Types.contents = { Types.id = 0; value = { Types.index = (0, 0, None); is_local = true; kind = Types.Mod; krate = "hax_lib"; parent = None; path = [] } } }); path = [{ Types.data = (Types.TypeNs "int"); disambiguator = 0 } ] } } }); path = [{ Types.data = (Types.TypeNs "int"); disambiguator = 0 }; { Types.data = Types.Impl; disambiguator = 35 }] } }; { Explicit_def_id.T.is_constructor = false; def_id = { Types.index = (0, 0, None); is_local = true; kind = Types.Impl {of_trait = true}; krate = "hax_lib"; parent = (Some { Types.contents = { Types.id = 0; value = { Types.index = (0, 0, None); is_local = true; kind = Types.Mod; krate = "hax_lib"; parent = (Some { Types.contents = { Types.id = 0; value = { Types.index = (0, 0, None); is_local = true; kind = Types.Mod; krate = "hax_lib"; parent = None; path = [] } } }); path = [{ Types.data = (Types.TypeNs "int"); disambiguator = 0 } ] } } }); path = [{ Types.data = (Types.TypeNs "int"); disambiguator = 0 }; { Types.data = Types.Impl; disambiguator = 36 }] } }; { Explicit_def_id.T.is_constructor = false; def_id = { Types.index = (0, 0, None); is_local = true; kind = Types.Impl {of_trait = true}; krate = "hax_lib"; parent = (Some { Types.contents = { Types.id = 0; value = { Types.index = (0, 0, None); is_local = true; kind = Types.Mod; krate = "hax_lib"; parent = (Some { Types.contents = { Types.id = 0; value = { Types.index = (0, 0, None); is_local = true; kind = Types.Mod; krate = "hax_lib"; parent = None; path = [] } } }); path = [{ Types.data = (Types.TypeNs "int"); disambiguator = 0 } ] } } }); path = [{ Types.data = (Types.TypeNs "int"); disambiguator = 0 }; { Types.data = Types.Impl; disambiguator = 37 }] } }; { Explicit_def_id.T.is_constructor = false; def_id = { Types.index = (0, 0, None); is_local = true; kind = Types.Impl {of_trait = true}; krate = "hax_lib"; parent = (Some { Types.contents = { Types.id = 0; value = { Types.index = (0, 0, None); is_local = true; kind = Types.Mod; krate = "hax_lib"; parent = (Some { Types.contents = { Types.id = 0; value = { Types.index = (0, 0, None); is_local = true; kind = Types.Mod; krate = "hax_lib"; parent = None; path = [] } } }); path = [{ Types.data = (Types.TypeNs "int"); disambiguator = 0 } ] } } }); path = [{ Types.data = (Types.TypeNs "int"); disambiguator = 0 }; { Types.data = Types.Impl; disambiguator = 38 }] } }; { Explicit_def_id.T.is_constructor = false; def_id = { Types.index = (0, 0, None); is_local = true; kind = Types.Impl {of_trait = true}; krate = "hax_lib"; parent = (Some { Types.contents = { Types.id = 0; value = { Types.index = (0, 0, None); is_local = true; kind = Types.Mod; krate = "hax_lib"; parent = (Some { Types.contents = { Types.id = 0; value = { Types.index = (0, 0, None); is_local = true; kind = Types.Mod; krate = "hax_lib"; parent = None; path = [] } } }); path = [{ Types.data = (Types.TypeNs "int"); disambiguator = 0 } ] } } }); path = [{ Types.data = (Types.TypeNs "int"); disambiguator = 0 }; { Types.data = Types.Impl; disambiguator = 39 }] } }; { Explicit_def_id.T.is_constructor = false; def_id = { Types.index = (0, 0, None); is_local = true; kind = (Types.Macro Types.Bang); krate = "hax_lib"; parent = (Some { Types.contents = { Types.id = 0; value = { Types.index = (0, 0, None); is_local = true; kind = Types.Mod; krate = "hax_lib"; parent = (Some { Types.contents = { Types.id = 0; value = { Types.index = (0, 0, None); is_local = true; kind = Types.Mod; krate = "hax_lib"; parent = None; path = [] } } }); path = [{ Types.data = (Types.TypeNs "int"); disambiguator = 0 } ] } } }); path = [{ Types.data = (Types.TypeNs "int"); disambiguator = 0 }; { Types.data = (Types.MacroNs "implement_concretize"); disambiguator = 0 } ] } }; { Explicit_def_id.T.is_constructor = false; def_id = { Types.index = (0, 0, None); is_local = true; kind = Types.Impl {of_trait = true}; krate = "hax_lib"; parent = (Some { Types.contents = { Types.id = 0; value = { Types.index = (0, 0, None); is_local = true; kind = Types.Mod; krate = "hax_lib"; parent = (Some { Types.contents = { Types.id = 0; value = { Types.index = (0, 0, None); is_local = true; kind = Types.Mod; krate = "hax_lib"; parent = None; path = [] } } }); path = [{ Types.data = (Types.TypeNs "int"); disambiguator = 0 } ] } } }); path = [{ Types.data = (Types.TypeNs "int"); disambiguator = 0 }; { Types.data = Types.Impl; disambiguator = 40 }] } }; { Explicit_def_id.T.is_constructor = false; def_id = { Types.index = (0, 0, None); is_local = true; kind = Types.AssocFn; krate = "hax_lib"; parent = (Some { Types.contents = { Types.id = 0; value = { Types.index = (0, 0, None); is_local = true; kind = Types.Impl {of_trait = false}; krate = "hax_lib"; parent = (Some { Types.contents = { Types.id = 0; value = { Types.index = (0, 0, None); is_local = true; kind = Types.Mod; krate = "hax_lib"; parent = (Some { Types.contents = { Types.id = 0; value = { Types.index = (0, 0, None); is_local = true; kind = Types.Mod; krate = "hax_lib"; parent = None; path = [] } } }); path = [{ Types.data = (Types.TypeNs "int"); disambiguator = 0 } ] } } }); path = [{ Types.data = (Types.TypeNs "int"); disambiguator = 0 }; { Types.data = Types.Impl; disambiguator = 41 } ] } } }); path = [{ Types.data = (Types.TypeNs "int"); disambiguator = 0 }; { Types.data = Types.Impl; disambiguator = 41 }; { Types.data = (Types.ValueNs "to_u8"); disambiguator = 0 } ] } }; { Explicit_def_id.T.is_constructor = false; def_id = { Types.index = (0, 0, None); is_local = true; kind = Types.Impl {of_trait = true}; krate = "hax_lib"; parent = (Some { Types.contents = { Types.id = 0; value = { Types.index = (0, 0, None); is_local = true; kind = Types.Mod; krate = "hax_lib"; parent = (Some { Types.contents = { Types.id = 0; value = { Types.index = (0, 0, None); is_local = true; kind = Types.Mod; krate = "hax_lib"; parent = None; path = [] } } }); path = [{ Types.data = (Types.TypeNs "int"); disambiguator = 0 } ] } } }); path = [{ Types.data = (Types.TypeNs "int"); disambiguator = 0 }; { Types.data = Types.Impl; disambiguator = 42 }] } }; { Explicit_def_id.T.is_constructor = false; def_id = { Types.index = (0, 0, None); is_local = true; kind = Types.AssocFn; krate = "hax_lib"; parent = (Some { Types.contents = { Types.id = 0; value = { Types.index = (0, 0, None); is_local = true; kind = Types.Impl {of_trait = false}; krate = "hax_lib"; parent = (Some { Types.contents = { Types.id = 0; value = { Types.index = (0, 0, None); is_local = true; kind = Types.Mod; krate = "hax_lib"; parent = (Some { Types.contents = { Types.id = 0; value = { Types.index = (0, 0, None); is_local = true; kind = Types.Mod; krate = "hax_lib"; parent = None; path = [] } } }); path = [{ Types.data = (Types.TypeNs "int"); disambiguator = 0 } ] } } }); path = [{ Types.data = (Types.TypeNs "int"); disambiguator = 0 }; { Types.data = Types.Impl; disambiguator = 43 } ] } } }); path = [{ Types.data = (Types.TypeNs "int"); disambiguator = 0 }; { Types.data = Types.Impl; disambiguator = 43 }; { Types.data = (Types.ValueNs "to_u16"); disambiguator = 0 } ] } }; { Explicit_def_id.T.is_constructor = false; def_id = { Types.index = (0, 0, None); is_local = true; kind = Types.Impl {of_trait = true}; krate = "hax_lib"; parent = (Some { Types.contents = { Types.id = 0; value = { Types.index = (0, 0, None); is_local = true; kind = Types.Mod; krate = "hax_lib"; parent = (Some { Types.contents = { Types.id = 0; value = { Types.index = (0, 0, None); is_local = true; kind = Types.Mod; krate = "hax_lib"; parent = None; path = [] } } }); path = [{ Types.data = (Types.TypeNs "int"); disambiguator = 0 } ] } } }); path = [{ Types.data = (Types.TypeNs "int"); disambiguator = 0 }; { Types.data = Types.Impl; disambiguator = 44 }] } }; { Explicit_def_id.T.is_constructor = false; def_id = { Types.index = (0, 0, None); is_local = true; kind = Types.AssocFn; krate = "hax_lib"; parent = (Some { Types.contents = { Types.id = 0; value = { Types.index = (0, 0, None); is_local = true; kind = Types.Impl {of_trait = false}; krate = "hax_lib"; parent = (Some { Types.contents = { Types.id = 0; value = { Types.index = (0, 0, None); is_local = true; kind = Types.Mod; krate = "hax_lib"; parent = (Some { Types.contents = { Types.id = 0; value = { Types.index = (0, 0, None); is_local = true; kind = Types.Mod; krate = "hax_lib"; parent = None; path = [] } } }); path = [{ Types.data = (Types.TypeNs "int"); disambiguator = 0 } ] } } }); path = [{ Types.data = (Types.TypeNs "int"); disambiguator = 0 }; { Types.data = Types.Impl; disambiguator = 45 } ] } } }); path = [{ Types.data = (Types.TypeNs "int"); disambiguator = 0 }; { Types.data = Types.Impl; disambiguator = 45 }; { Types.data = (Types.ValueNs "to_u32"); disambiguator = 0 } ] } }; { Explicit_def_id.T.is_constructor = false; def_id = { Types.index = (0, 0, None); is_local = true; kind = Types.Impl {of_trait = true}; krate = "hax_lib"; parent = (Some { Types.contents = { Types.id = 0; value = { Types.index = (0, 0, None); is_local = true; kind = Types.Mod; krate = "hax_lib"; parent = (Some { Types.contents = { Types.id = 0; value = { Types.index = (0, 0, None); is_local = true; kind = Types.Mod; krate = "hax_lib"; parent = None; path = [] } } }); path = [{ Types.data = (Types.TypeNs "int"); disambiguator = 0 } ] } } }); path = [{ Types.data = (Types.TypeNs "int"); disambiguator = 0 }; { Types.data = Types.Impl; disambiguator = 46 }] } }; { Explicit_def_id.T.is_constructor = false; def_id = { Types.index = (0, 0, None); is_local = true; kind = Types.AssocFn; krate = "hax_lib"; parent = (Some { Types.contents = { Types.id = 0; value = { Types.index = (0, 0, None); is_local = true; kind = Types.Impl {of_trait = false}; krate = "hax_lib"; parent = (Some { Types.contents = { Types.id = 0; value = { Types.index = (0, 0, None); is_local = true; kind = Types.Mod; krate = "hax_lib"; parent = (Some { Types.contents = { Types.id = 0; value = { Types.index = (0, 0, None); is_local = true; kind = Types.Mod; krate = "hax_lib"; parent = None; path = [] } } }); path = [{ Types.data = (Types.TypeNs "int"); disambiguator = 0 } ] } } }); path = [{ Types.data = (Types.TypeNs "int"); disambiguator = 0 }; { Types.data = Types.Impl; disambiguator = 47 } ] } } }); path = [{ Types.data = (Types.TypeNs "int"); disambiguator = 0 }; { Types.data = Types.Impl; disambiguator = 47 }; { Types.data = (Types.ValueNs "to_u64"); disambiguator = 0 } ] } }; { Explicit_def_id.T.is_constructor = false; def_id = { Types.index = (0, 0, None); is_local = true; kind = Types.Impl {of_trait = true}; krate = "hax_lib"; parent = (Some { Types.contents = { Types.id = 0; value = { Types.index = (0, 0, None); is_local = true; kind = Types.Mod; krate = "hax_lib"; parent = (Some { Types.contents = { Types.id = 0; value = { Types.index = (0, 0, None); is_local = true; kind = Types.Mod; krate = "hax_lib"; parent = None; path = [] } } }); path = [{ Types.data = (Types.TypeNs "int"); disambiguator = 0 } ] } } }); path = [{ Types.data = (Types.TypeNs "int"); disambiguator = 0 }; { Types.data = Types.Impl; disambiguator = 48 }] } }; { Explicit_def_id.T.is_constructor = false; def_id = { Types.index = (0, 0, None); is_local = true; kind = Types.AssocFn; krate = "hax_lib"; parent = (Some { Types.contents = { Types.id = 0; value = { Types.index = (0, 0, None); is_local = true; kind = Types.Impl {of_trait = false}; krate = "hax_lib"; parent = (Some { Types.contents = { Types.id = 0; value = { Types.index = (0, 0, None); is_local = true; kind = Types.Mod; krate = "hax_lib"; parent = (Some { Types.contents = { Types.id = 0; value = { Types.index = (0, 0, None); is_local = true; kind = Types.Mod; krate = "hax_lib"; parent = None; path = [] } } }); path = [{ Types.data = (Types.TypeNs "int"); disambiguator = 0 } ] } } }); path = [{ Types.data = (Types.TypeNs "int"); disambiguator = 0 }; { Types.data = Types.Impl; disambiguator = 49 } ] } } }); path = [{ Types.data = (Types.TypeNs "int"); disambiguator = 0 }; { Types.data = Types.Impl; disambiguator = 49 }; { Types.data = (Types.ValueNs "to_u128"); disambiguator = 0 } ] } }; { Explicit_def_id.T.is_constructor = false; def_id = { Types.index = (0, 0, None); is_local = true; kind = Types.Impl {of_trait = true}; krate = "hax_lib"; parent = (Some { Types.contents = { Types.id = 0; value = { Types.index = (0, 0, None); is_local = true; kind = Types.Mod; krate = "hax_lib"; parent = (Some { Types.contents = { Types.id = 0; value = { Types.index = (0, 0, None); is_local = true; kind = Types.Mod; krate = "hax_lib"; parent = None; path = [] } } }); path = [{ Types.data = (Types.TypeNs "int"); disambiguator = 0 } ] } } }); path = [{ Types.data = (Types.TypeNs "int"); disambiguator = 0 }; { Types.data = Types.Impl; disambiguator = 50 }] } }; { Explicit_def_id.T.is_constructor = false; def_id = { Types.index = (0, 0, None); is_local = true; kind = Types.AssocFn; krate = "hax_lib"; parent = (Some { Types.contents = { Types.id = 0; value = { Types.index = (0, 0, None); is_local = true; kind = Types.Impl {of_trait = false}; krate = "hax_lib"; parent = (Some { Types.contents = { Types.id = 0; value = { Types.index = (0, 0, None); is_local = true; kind = Types.Mod; krate = "hax_lib"; parent = (Some { Types.contents = { Types.id = 0; value = { Types.index = (0, 0, None); is_local = true; kind = Types.Mod; krate = "hax_lib"; parent = None; path = [] } } }); path = [{ Types.data = (Types.TypeNs "int"); disambiguator = 0 } ] } } }); path = [{ Types.data = (Types.TypeNs "int"); disambiguator = 0 }; { Types.data = Types.Impl; disambiguator = 51 } ] } } }); path = [{ Types.data = (Types.TypeNs "int"); disambiguator = 0 }; { Types.data = Types.Impl; disambiguator = 51 }; { Types.data = (Types.ValueNs "to_usize"); disambiguator = 0 } ] } }; { Explicit_def_id.T.is_constructor = false; def_id = { Types.index = (0, 0, None); is_local = true; kind = Types.Impl {of_trait = true}; krate = "hax_lib"; parent = (Some { Types.contents = { Types.id = 0; value = { Types.index = (0, 0, None); is_local = true; kind = Types.Mod; krate = "hax_lib"; parent = (Some { Types.contents = { Types.id = 0; value = { Types.index = (0, 0, None); is_local = true; kind = Types.Mod; krate = "hax_lib"; parent = None; path = [] } } }); path = [{ Types.data = (Types.TypeNs "int"); disambiguator = 0 } ] } } }); path = [{ Types.data = (Types.TypeNs "int"); disambiguator = 0 }; { Types.data = Types.Impl; disambiguator = 52 }] } }; { Explicit_def_id.T.is_constructor = false; def_id = { Types.index = (0, 0, None); is_local = true; kind = Types.AssocFn; krate = "hax_lib"; parent = (Some { Types.contents = { Types.id = 0; value = { Types.index = (0, 0, None); is_local = true; kind = Types.Impl {of_trait = false}; krate = "hax_lib"; parent = (Some { Types.contents = { Types.id = 0; value = { Types.index = (0, 0, None); is_local = true; kind = Types.Mod; krate = "hax_lib"; parent = (Some { Types.contents = { Types.id = 0; value = { Types.index = (0, 0, None); is_local = true; kind = Types.Mod; krate = "hax_lib"; parent = None; path = [] } } }); path = [{ Types.data = (Types.TypeNs "int"); disambiguator = 0 } ] } } }); path = [{ Types.data = (Types.TypeNs "int"); disambiguator = 0 }; { Types.data = Types.Impl; disambiguator = 53 } ] } } }); path = [{ Types.data = (Types.TypeNs "int"); disambiguator = 0 }; { Types.data = Types.Impl; disambiguator = 53 }; { Types.data = (Types.ValueNs "to_i8"); disambiguator = 0 } ] } }; { Explicit_def_id.T.is_constructor = false; def_id = { Types.index = (0, 0, None); is_local = true; kind = Types.Impl {of_trait = true}; krate = "hax_lib"; parent = (Some { Types.contents = { Types.id = 0; value = { Types.index = (0, 0, None); is_local = true; kind = Types.Mod; krate = "hax_lib"; parent = (Some { Types.contents = { Types.id = 0; value = { Types.index = (0, 0, None); is_local = true; kind = Types.Mod; krate = "hax_lib"; parent = None; path = [] } } }); path = [{ Types.data = (Types.TypeNs "int"); disambiguator = 0 } ] } } }); path = [{ Types.data = (Types.TypeNs "int"); disambiguator = 0 }; { Types.data = Types.Impl; disambiguator = 54 }] } }; { Explicit_def_id.T.is_constructor = false; def_id = { Types.index = (0, 0, None); is_local = true; kind = Types.AssocFn; krate = "hax_lib"; parent = (Some { Types.contents = { Types.id = 0; value = { Types.index = (0, 0, None); is_local = true; kind = Types.Impl {of_trait = false}; krate = "hax_lib"; parent = (Some { Types.contents = { Types.id = 0; value = { Types.index = (0, 0, None); is_local = true; kind = Types.Mod; krate = "hax_lib"; parent = (Some { Types.contents = { Types.id = 0; value = { Types.index = (0, 0, None); is_local = true; kind = Types.Mod; krate = "hax_lib"; parent = None; path = [] } } }); path = [{ Types.data = (Types.TypeNs "int"); disambiguator = 0 } ] } } }); path = [{ Types.data = (Types.TypeNs "int"); disambiguator = 0 }; { Types.data = Types.Impl; disambiguator = 55 } ] } } }); path = [{ Types.data = (Types.TypeNs "int"); disambiguator = 0 }; { Types.data = Types.Impl; disambiguator = 55 }; { Types.data = (Types.ValueNs "to_i16"); disambiguator = 0 } ] } }; { Explicit_def_id.T.is_constructor = false; def_id = { Types.index = (0, 0, None); is_local = true; kind = Types.Impl {of_trait = true}; krate = "hax_lib"; parent = (Some { Types.contents = { Types.id = 0; value = { Types.index = (0, 0, None); is_local = true; kind = Types.Mod; krate = "hax_lib"; parent = (Some { Types.contents = { Types.id = 0; value = { Types.index = (0, 0, None); is_local = true; kind = Types.Mod; krate = "hax_lib"; parent = None; path = [] } } }); path = [{ Types.data = (Types.TypeNs "int"); disambiguator = 0 } ] } } }); path = [{ Types.data = (Types.TypeNs "int"); disambiguator = 0 }; { Types.data = Types.Impl; disambiguator = 56 }] } }; { Explicit_def_id.T.is_constructor = false; def_id = { Types.index = (0, 0, None); is_local = true; kind = Types.AssocFn; krate = "hax_lib"; parent = (Some { Types.contents = { Types.id = 0; value = { Types.index = (0, 0, None); is_local = true; kind = Types.Impl {of_trait = false}; krate = "hax_lib"; parent = (Some { Types.contents = { Types.id = 0; value = { Types.index = (0, 0, None); is_local = true; kind = Types.Mod; krate = "hax_lib"; parent = (Some { Types.contents = { Types.id = 0; value = { Types.index = (0, 0, None); is_local = true; kind = Types.Mod; krate = "hax_lib"; parent = None; path = [] } } }); path = [{ Types.data = (Types.TypeNs "int"); disambiguator = 0 } ] } } }); path = [{ Types.data = (Types.TypeNs "int"); disambiguator = 0 }; { Types.data = Types.Impl; disambiguator = 57 } ] } } }); path = [{ Types.data = (Types.TypeNs "int"); disambiguator = 0 }; { Types.data = Types.Impl; disambiguator = 57 }; { Types.data = (Types.ValueNs "to_i32"); disambiguator = 0 } ] } }; { Explicit_def_id.T.is_constructor = false; def_id = { Types.index = (0, 0, None); is_local = true; kind = Types.Impl {of_trait = true}; krate = "hax_lib"; parent = (Some { Types.contents = { Types.id = 0; value = { Types.index = (0, 0, None); is_local = true; kind = Types.Mod; krate = "hax_lib"; parent = (Some { Types.contents = { Types.id = 0; value = { Types.index = (0, 0, None); is_local = true; kind = Types.Mod; krate = "hax_lib"; parent = None; path = [] } } }); path = [{ Types.data = (Types.TypeNs "int"); disambiguator = 0 } ] } } }); path = [{ Types.data = (Types.TypeNs "int"); disambiguator = 0 }; { Types.data = Types.Impl; disambiguator = 58 }] } }; { Explicit_def_id.T.is_constructor = false; def_id = { Types.index = (0, 0, None); is_local = true; kind = Types.AssocFn; krate = "hax_lib"; parent = (Some { Types.contents = { Types.id = 0; value = { Types.index = (0, 0, None); is_local = true; kind = Types.Impl {of_trait = false}; krate = "hax_lib"; parent = (Some { Types.contents = { Types.id = 0; value = { Types.index = (0, 0, None); is_local = true; kind = Types.Mod; krate = "hax_lib"; parent = (Some { Types.contents = { Types.id = 0; value = { Types.index = (0, 0, None); is_local = true; kind = Types.Mod; krate = "hax_lib"; parent = None; path = [] } } }); path = [{ Types.data = (Types.TypeNs "int"); disambiguator = 0 } ] } } }); path = [{ Types.data = (Types.TypeNs "int"); disambiguator = 0 }; { Types.data = Types.Impl; disambiguator = 59 } ] } } }); path = [{ Types.data = (Types.TypeNs "int"); disambiguator = 0 }; { Types.data = Types.Impl; disambiguator = 59 }; { Types.data = (Types.ValueNs "to_i64"); disambiguator = 0 } ] } }; { Explicit_def_id.T.is_constructor = false; def_id = { Types.index = (0, 0, None); is_local = true; kind = Types.Impl {of_trait = true}; krate = "hax_lib"; parent = (Some { Types.contents = { Types.id = 0; value = { Types.index = (0, 0, None); is_local = true; kind = Types.Mod; krate = "hax_lib"; parent = (Some { Types.contents = { Types.id = 0; value = { Types.index = (0, 0, None); is_local = true; kind = Types.Mod; krate = "hax_lib"; parent = None; path = [] } } }); path = [{ Types.data = (Types.TypeNs "int"); disambiguator = 0 } ] } } }); path = [{ Types.data = (Types.TypeNs "int"); disambiguator = 0 }; { Types.data = Types.Impl; disambiguator = 60 }] } }; { Explicit_def_id.T.is_constructor = false; def_id = { Types.index = (0, 0, None); is_local = true; kind = Types.AssocFn; krate = "hax_lib"; parent = (Some { Types.contents = { Types.id = 0; value = { Types.index = (0, 0, None); is_local = true; kind = Types.Impl {of_trait = false}; krate = "hax_lib"; parent = (Some { Types.contents = { Types.id = 0; value = { Types.index = (0, 0, None); is_local = true; kind = Types.Mod; krate = "hax_lib"; parent = (Some { Types.contents = { Types.id = 0; value = { Types.index = (0, 0, None); is_local = true; kind = Types.Mod; krate = "hax_lib"; parent = None; path = [] } } }); path = [{ Types.data = (Types.TypeNs "int"); disambiguator = 0 } ] } } }); path = [{ Types.data = (Types.TypeNs "int"); disambiguator = 0 }; { Types.data = Types.Impl; disambiguator = 61 } ] } } }); path = [{ Types.data = (Types.TypeNs "int"); disambiguator = 0 }; { Types.data = Types.Impl; disambiguator = 61 }; { Types.data = (Types.ValueNs "to_i128"); disambiguator = 0 } ] } }; { Explicit_def_id.T.is_constructor = false; def_id = { Types.index = (0, 0, None); is_local = true; kind = Types.Impl {of_trait = true}; krate = "hax_lib"; parent = (Some { Types.contents = { Types.id = 0; value = { Types.index = (0, 0, None); is_local = true; kind = Types.Mod; krate = "hax_lib"; parent = (Some { Types.contents = { Types.id = 0; value = { Types.index = (0, 0, None); is_local = true; kind = Types.Mod; krate = "hax_lib"; parent = None; path = [] } } }); path = [{ Types.data = (Types.TypeNs "int"); disambiguator = 0 } ] } } }); path = [{ Types.data = (Types.TypeNs "int"); disambiguator = 0 }; { Types.data = Types.Impl; disambiguator = 62 }] } }; { Explicit_def_id.T.is_constructor = false; def_id = { Types.index = (0, 0, None); is_local = true; kind = Types.AssocFn; krate = "hax_lib"; parent = (Some { Types.contents = { Types.id = 0; value = { Types.index = (0, 0, None); is_local = true; kind = Types.Impl {of_trait = false}; krate = "hax_lib"; parent = (Some { Types.contents = { Types.id = 0; value = { Types.index = (0, 0, None); is_local = true; kind = Types.Mod; krate = "hax_lib"; parent = (Some { Types.contents = { Types.id = 0; value = { Types.index = (0, 0, None); is_local = true; kind = Types.Mod; krate = "hax_lib"; parent = None; path = [] } } }); path = [{ Types.data = (Types.TypeNs "int"); disambiguator = 0 } ] } } }); path = [{ Types.data = (Types.TypeNs "int"); disambiguator = 0 }; { Types.data = Types.Impl; disambiguator = 63 } ] } } }); path = [{ Types.data = (Types.TypeNs "int"); disambiguator = 0 }; { Types.data = Types.Impl; disambiguator = 63 }; { Types.data = (Types.ValueNs "to_isize"); disambiguator = 0 } ] } } ]; label = "bundle" }); suffix = None }) */ const _: () = (); *) /// A utilitary trait that provides a `into_checked` method on traits /// that have a refined counter part. This trait is parametrized by a /// type `Target`: a base type can be refined in multiple ways. /// Please never implement this trait yourself, use the /// `refinement_type` macro instead. class t_RefineAs (v_Self: Type0) (v_RefinedType: Type0) = { f_into_checked_pre:v_Self -> Type0; f_into_checked_post:v_Self -> v_RefinedType -> Type0; f_into_checked:x0: v_Self -> Prims.Pure v_RefinedType (f_into_checked_pre x0) (fun result -> f_into_checked_post x0 result) } /// Mathematical integers for writting specifications. Mathematical /// integers are unbounded and arithmetic operation on them never over /// or underflow. type t_Int = | Int : Hax_lib.Int.Bigint.t_BigInt -> t_Int /// A dummy function that holds a loop variant. let e_internal_loop_decreases (_: t_Int) : Prims.unit = () let impl_9: Core_models.Clone.t_Clone t_Int = { f_clone = (fun x -> x); f_clone_pre = (fun _ -> True); f_clone_post = (fun _ _ -> True) } [@@ FStar.Tactics.Typeclasses.tcinstance] assume val impl_8': Core_models.Marker.t_Copy t_Int unfold let impl_8 = impl_8' [@@ FStar.Tactics.Typeclasses.tcinstance] assume val impl_11': Core_models.Marker.t_StructuralPartialEq t_Int unfold let impl_11 = impl_11' [@@ FStar.Tactics.Typeclasses.tcinstance] assume val impl_12': Core_models.Cmp.t_PartialEq t_Int t_Int unfold let impl_12 = impl_12' [@@ FStar.Tactics.Typeclasses.tcinstance] assume val impl_10': Core_models.Cmp.t_Eq t_Int unfold let impl_10 = impl_10' [@@ FStar.Tactics.Typeclasses.tcinstance] assume val impl_14': Core_models.Cmp.t_PartialOrd t_Int t_Int unfold let impl_14 = impl_14' [@@ FStar.Tactics.Typeclasses.tcinstance] assume val impl_13': Core_models.Cmp.t_Ord t_Int unfold let impl_13 = impl_13' [@@ FStar.Tactics.Typeclasses.tcinstance] assume val impl_15': Core_models.Fmt.t_Debug t_Int unfold let impl_15 = impl_15' let impl_1__new (#iimpl_637761304_: Type0) (#[FStar.Tactics.Typeclasses.tcresolve ()] i0: Core_models.Convert.t_Into iimpl_637761304_ Num_bigint.Bigint.t_BigInt) (x: iimpl_637761304_) : t_Int = Int (Hax_lib.Int.Bigint.impl_BigInt__new (Core_models.Convert.f_into #iimpl_637761304_ #Num_bigint.Bigint.t_BigInt #FStar.Tactics.Typeclasses.solve x <: Num_bigint.Bigint.t_BigInt)) <: t_Int let impl_1__get (self: t_Int) : Num_bigint.Bigint.t_BigInt = Hax_lib.Int.Bigint.impl_BigInt__get self._0 [@@ FStar.Tactics.Typeclasses.tcinstance] let impl: Core_models.Fmt.t_Display t_Int = { f_fmt_pre = (fun (self: t_Int) (f: Core_models.Fmt.t_Formatter) -> true); f_fmt_post = (fun (self: t_Int) (f: Core_models.Fmt.t_Formatter) (out1: (Core_models.Fmt.t_Formatter & Core_models.Result.t_Result Prims.unit Core_models.Fmt.t_Error)) -> true); f_fmt = fun (self: t_Int) (f: Core_models.Fmt.t_Formatter) -> let args:t_Array Core_models.Fmt.Rt.t_Argument (mk_usize 1) = let list = [ Core_models.Fmt.Rt.impl__new_display #Num_bigint.Bigint.t_BigInt (impl_1__get self <: Num_bigint.Bigint.t_BigInt) ] in FStar.Pervasives.assert_norm (Prims.eq2 (List.Tot.length list) 1); Rust_primitives.Hax.array_of_list 1 list in let tmp0, out:(Core_models.Fmt.t_Formatter & Core_models.Result.t_Result Prims.unit Core_models.Fmt.t_Error) = Core_models.Fmt.impl_11__write_fmt f (Core_models.Fmt.Rt.impl_1__new_v1 (mk_usize 1) (mk_usize 1) (let list = [""] in FStar.Pervasives.assert_norm (Prims.eq2 (List.Tot.length list) 1); Rust_primitives.Hax.array_of_list 1 list) args <: Core_models.Fmt.t_Arguments) in let f:Core_models.Fmt.t_Formatter = tmp0 in let hax_temp_output:Core_models.Result.t_Result Prims.unit Core_models.Fmt.t_Error = out in f, hax_temp_output <: (Core_models.Fmt.t_Formatter & Core_models.Result.t_Result Prims.unit Core_models.Fmt.t_Error) } [@@ FStar.Tactics.Typeclasses.tcinstance] let impl_2: Core_models.Ops.Arith.t_Add t_Int t_Int = { f_Output = t_Int; f_add_pre = (fun (self: t_Int) (other: t_Int) -> true); f_add_post = (fun (self: t_Int) (other: t_Int) (out: t_Int) -> true); f_add = fun (self: t_Int) (other: t_Int) -> impl_1__new #Num_bigint.Bigint.t_BigInt (Core_models.Ops.Arith.f_add #Num_bigint.Bigint.t_BigInt #Num_bigint.Bigint.t_BigInt #FStar.Tactics.Typeclasses.solve (impl_1__get self <: Num_bigint.Bigint.t_BigInt) (impl_1__get other <: Num_bigint.Bigint.t_BigInt) <: Num_bigint.Bigint.t_BigInt) } [@@ FStar.Tactics.Typeclasses.tcinstance] let impl_3: Core_models.Ops.Arith.t_Neg t_Int = { f_Output = t_Int; f_neg_pre = (fun (self: t_Int) -> true); f_neg_post = (fun (self: t_Int) (out: t_Int) -> true); f_neg = fun (self: t_Int) -> impl_1__new #Num_bigint.Bigint.t_BigInt (Core_models.Ops.Arith.f_neg #Num_bigint.Bigint.t_BigInt #FStar.Tactics.Typeclasses.solve (impl_1__get self <: Num_bigint.Bigint.t_BigInt) <: Num_bigint.Bigint.t_BigInt) } [@@ FStar.Tactics.Typeclasses.tcinstance] let impl_4: Core_models.Ops.Arith.t_Sub t_Int t_Int = { f_Output = t_Int; f_sub_pre = (fun (self: t_Int) (other: t_Int) -> true); f_sub_post = (fun (self: t_Int) (other: t_Int) (out: t_Int) -> true); f_sub = fun (self: t_Int) (other: t_Int) -> impl_1__new #Num_bigint.Bigint.t_BigInt (Core_models.Ops.Arith.f_sub #Num_bigint.Bigint.t_BigInt #Num_bigint.Bigint.t_BigInt #FStar.Tactics.Typeclasses.solve (impl_1__get self <: Num_bigint.Bigint.t_BigInt) (impl_1__get other <: Num_bigint.Bigint.t_BigInt) <: Num_bigint.Bigint.t_BigInt) } [@@ FStar.Tactics.Typeclasses.tcinstance] let impl_5: Core_models.Ops.Arith.t_Mul t_Int t_Int = { f_Output = t_Int; f_mul_pre = (fun (self: t_Int) (other: t_Int) -> true); f_mul_post = (fun (self: t_Int) (other: t_Int) (out: t_Int) -> true); f_mul = fun (self: t_Int) (other: t_Int) -> impl_1__new #Num_bigint.Bigint.t_BigInt (Core_models.Ops.Arith.f_mul #Num_bigint.Bigint.t_BigInt #Num_bigint.Bigint.t_BigInt #FStar.Tactics.Typeclasses.solve (impl_1__get self <: Num_bigint.Bigint.t_BigInt) (impl_1__get other <: Num_bigint.Bigint.t_BigInt) <: Num_bigint.Bigint.t_BigInt) } [@@ FStar.Tactics.Typeclasses.tcinstance] let impl_6: Core_models.Ops.Arith.t_Div t_Int t_Int = { f_Output = t_Int; f_div_pre = (fun (self: t_Int) (other: t_Int) -> true); f_div_post = (fun (self: t_Int) (other: t_Int) (out: t_Int) -> true); f_div = fun (self: t_Int) (other: t_Int) -> impl_1__new #Num_bigint.Bigint.t_BigInt (Core_models.Ops.Arith.f_div #Num_bigint.Bigint.t_BigInt #Num_bigint.Bigint.t_BigInt #FStar.Tactics.Typeclasses.solve (impl_1__get self <: Num_bigint.Bigint.t_BigInt) (impl_1__get other <: Num_bigint.Bigint.t_BigInt) <: Num_bigint.Bigint.t_BigInt) } /// Raises `2` at the power `self` let impl_7__pow2 (self: t_Int) : t_Int = let exponent:u32 = Core_models.Option.impl__expect #u32 (Num_traits.Cast.f_to_u32 #Num_bigint.Bigint.t_BigInt #FStar.Tactics.Typeclasses.solve (impl_1__get self <: Num_bigint.Bigint.t_BigInt) <: Core_models.Option.t_Option u32) "Exponent doesn't fit in a u32" in impl_1__new #Num_bigint.Bigint.t_BigInt (Num_bigint.Bigint.impl_BigInt__pow (Core_models.Convert.f_from #Num_bigint.Bigint.t_BigInt #u8 #FStar.Tactics.Typeclasses.solve (mk_u8 2) <: Num_bigint.Bigint.t_BigInt) exponent <: Num_bigint.Bigint.t_BigInt) /// Constructs a `Int` out of a string literal. This function /// assumes its argument consists only of decimal digits, with /// optionally a minus sign prefix. let impl_7__e_unsafe_from_str (s: string) : t_Int = impl_1__new #Num_bigint.Bigint.t_BigInt (Core_models.Result.impl__unwrap #Num_bigint.Bigint.t_BigInt #Num_bigint.t_ParseBigIntError (Core_models.Str.Traits.f_from_str #Num_bigint.Bigint.t_BigInt #FStar.Tactics.Typeclasses.solve s <: Core_models.Result.t_Result Num_bigint.Bigint.t_BigInt Num_bigint.t_ParseBigIntError) <: Num_bigint.Bigint.t_BigInt) let impl_7__rem_euclid (self v: t_Int) : t_Int = impl_1__new #Num_bigint.Bigint.t_BigInt (Num_traits.Ops.Euclid.f_rem_euclid #Num_bigint.Bigint.t_BigInt #FStar.Tactics.Typeclasses.solve (impl_1__get self <: Num_bigint.Bigint.t_BigInt) (impl_1__get v <: Num_bigint.Bigint.t_BigInt) <: Num_bigint.Bigint.t_BigInt) class t_ToInt (v_Self: Type0) = { f_to_int_pre:v_Self -> Type0; f_to_int_post:v_Self -> t_Int -> Type0; f_to_int:x0: v_Self -> Prims.Pure t_Int (f_to_int_pre x0) (fun result -> f_to_int_post x0 result) } [@@ FStar.Tactics.Typeclasses.tcinstance] let impl_16: Hax_lib.Abstraction.t_Abstraction u8 = { f_AbstractType = t_Int; f_lift_pre = (fun (self: u8) -> true); f_lift_post = (fun (self: u8) (out: t_Int) -> true); f_lift = fun (self: u8) -> impl_1__new #Num_bigint.Bigint.t_BigInt (Core_models.Convert.f_from #Num_bigint.Bigint.t_BigInt #u8 #FStar.Tactics.Typeclasses.solve self <: Num_bigint.Bigint.t_BigInt) } [@@ FStar.Tactics.Typeclasses.tcinstance] let impl_17: t_ToInt u8 = { f_to_int_pre = (fun (self: u8) -> true); f_to_int_post = (fun (self: u8) (out: t_Int) -> true); f_to_int = fun (self: u8) -> Hax_lib.Abstraction.f_lift #u8 #FStar.Tactics.Typeclasses.solve self } [@@ FStar.Tactics.Typeclasses.tcinstance] let impl_18: Hax_lib.Abstraction.t_Abstraction u16 = { f_AbstractType = t_Int; f_lift_pre = (fun (self: u16) -> true); f_lift_post = (fun (self: u16) (out: t_Int) -> true); f_lift = fun (self: u16) -> impl_1__new #Num_bigint.Bigint.t_BigInt (Core_models.Convert.f_from #Num_bigint.Bigint.t_BigInt #u16 #FStar.Tactics.Typeclasses.solve self <: Num_bigint.Bigint.t_BigInt) } [@@ FStar.Tactics.Typeclasses.tcinstance] let impl_19: t_ToInt u16 = { f_to_int_pre = (fun (self: u16) -> true); f_to_int_post = (fun (self: u16) (out: t_Int) -> true); f_to_int = fun (self: u16) -> Hax_lib.Abstraction.f_lift #u16 #FStar.Tactics.Typeclasses.solve self } [@@ FStar.Tactics.Typeclasses.tcinstance] let impl_20: Hax_lib.Abstraction.t_Abstraction u32 = { f_AbstractType = t_Int; f_lift_pre = (fun (self: u32) -> true); f_lift_post = (fun (self: u32) (out: t_Int) -> true); f_lift = fun (self: u32) -> impl_1__new #Num_bigint.Bigint.t_BigInt (Core_models.Convert.f_from #Num_bigint.Bigint.t_BigInt #u32 #FStar.Tactics.Typeclasses.solve self <: Num_bigint.Bigint.t_BigInt) } [@@ FStar.Tactics.Typeclasses.tcinstance] let impl_21: t_ToInt u32 = { f_to_int_pre = (fun (self: u32) -> true); f_to_int_post = (fun (self: u32) (out: t_Int) -> true); f_to_int = fun (self: u32) -> Hax_lib.Abstraction.f_lift #u32 #FStar.Tactics.Typeclasses.solve self } [@@ FStar.Tactics.Typeclasses.tcinstance] let impl_22: Hax_lib.Abstraction.t_Abstraction u64 = { f_AbstractType = t_Int; f_lift_pre = (fun (self: u64) -> true); f_lift_post = (fun (self: u64) (out: t_Int) -> true); f_lift = fun (self: u64) -> impl_1__new #Num_bigint.Bigint.t_BigInt (Core_models.Convert.f_from #Num_bigint.Bigint.t_BigInt #u64 #FStar.Tactics.Typeclasses.solve self <: Num_bigint.Bigint.t_BigInt) } [@@ FStar.Tactics.Typeclasses.tcinstance] let impl_23: t_ToInt u64 = { f_to_int_pre = (fun (self: u64) -> true); f_to_int_post = (fun (self: u64) (out: t_Int) -> true); f_to_int = fun (self: u64) -> Hax_lib.Abstraction.f_lift #u64 #FStar.Tactics.Typeclasses.solve self } [@@ FStar.Tactics.Typeclasses.tcinstance] let impl_24: Hax_lib.Abstraction.t_Abstraction u128 = { f_AbstractType = t_Int; f_lift_pre = (fun (self: u128) -> true); f_lift_post = (fun (self: u128) (out: t_Int) -> true); f_lift = fun (self: u128) -> impl_1__new #Num_bigint.Bigint.t_BigInt (Core_models.Convert.f_from #Num_bigint.Bigint.t_BigInt #u128 #FStar.Tactics.Typeclasses.solve self <: Num_bigint.Bigint.t_BigInt) } [@@ FStar.Tactics.Typeclasses.tcinstance] let impl_25: t_ToInt u128 = { f_to_int_pre = (fun (self: u128) -> true); f_to_int_post = (fun (self: u128) (out: t_Int) -> true); f_to_int = fun (self: u128) -> Hax_lib.Abstraction.f_lift #u128 #FStar.Tactics.Typeclasses.solve self } [@@ FStar.Tactics.Typeclasses.tcinstance] let impl_26: Hax_lib.Abstraction.t_Abstraction usize = { f_AbstractType = t_Int; f_lift_pre = (fun (self: usize) -> true); f_lift_post = (fun (self: usize) (out: t_Int) -> true); f_lift = fun (self: usize) -> impl_1__new #Num_bigint.Bigint.t_BigInt (Core_models.Convert.f_from #Num_bigint.Bigint.t_BigInt #usize #FStar.Tactics.Typeclasses.solve self <: Num_bigint.Bigint.t_BigInt) } [@@ FStar.Tactics.Typeclasses.tcinstance] let impl_27: t_ToInt usize = { f_to_int_pre = (fun (self: usize) -> true); f_to_int_post = (fun (self: usize) (out: t_Int) -> true); f_to_int = fun (self: usize) -> Hax_lib.Abstraction.f_lift #usize #FStar.Tactics.Typeclasses.solve self } [@@ FStar.Tactics.Typeclasses.tcinstance] let impl_28: Hax_lib.Abstraction.t_Abstraction i8 = { f_AbstractType = t_Int; f_lift_pre = (fun (self: i8) -> true); f_lift_post = (fun (self: i8) (out: t_Int) -> true); f_lift = fun (self: i8) -> impl_1__new #Num_bigint.Bigint.t_BigInt (Core_models.Convert.f_from #Num_bigint.Bigint.t_BigInt #i8 #FStar.Tactics.Typeclasses.solve self <: Num_bigint.Bigint.t_BigInt) } [@@ FStar.Tactics.Typeclasses.tcinstance] let impl_29: t_ToInt i8 = { f_to_int_pre = (fun (self: i8) -> true); f_to_int_post = (fun (self: i8) (out: t_Int) -> true); f_to_int = fun (self: i8) -> Hax_lib.Abstraction.f_lift #i8 #FStar.Tactics.Typeclasses.solve self } [@@ FStar.Tactics.Typeclasses.tcinstance] let impl_30: Hax_lib.Abstraction.t_Abstraction i16 = { f_AbstractType = t_Int; f_lift_pre = (fun (self: i16) -> true); f_lift_post = (fun (self: i16) (out: t_Int) -> true); f_lift = fun (self: i16) -> impl_1__new #Num_bigint.Bigint.t_BigInt (Core_models.Convert.f_from #Num_bigint.Bigint.t_BigInt #i16 #FStar.Tactics.Typeclasses.solve self <: Num_bigint.Bigint.t_BigInt) } [@@ FStar.Tactics.Typeclasses.tcinstance] let impl_31: t_ToInt i16 = { f_to_int_pre = (fun (self: i16) -> true); f_to_int_post = (fun (self: i16) (out: t_Int) -> true); f_to_int = fun (self: i16) -> Hax_lib.Abstraction.f_lift #i16 #FStar.Tactics.Typeclasses.solve self } [@@ FStar.Tactics.Typeclasses.tcinstance] let impl_32: Hax_lib.Abstraction.t_Abstraction i32 = { f_AbstractType = t_Int; f_lift_pre = (fun (self: i32) -> true); f_lift_post = (fun (self: i32) (out: t_Int) -> true); f_lift = fun (self: i32) -> impl_1__new #Num_bigint.Bigint.t_BigInt (Core_models.Convert.f_from #Num_bigint.Bigint.t_BigInt #i32 #FStar.Tactics.Typeclasses.solve self <: Num_bigint.Bigint.t_BigInt) } [@@ FStar.Tactics.Typeclasses.tcinstance] let impl_33: t_ToInt i32 = { f_to_int_pre = (fun (self: i32) -> true); f_to_int_post = (fun (self: i32) (out: t_Int) -> true); f_to_int = fun (self: i32) -> Hax_lib.Abstraction.f_lift #i32 #FStar.Tactics.Typeclasses.solve self } [@@ FStar.Tactics.Typeclasses.tcinstance] let impl_34: Hax_lib.Abstraction.t_Abstraction i64 = { f_AbstractType = t_Int; f_lift_pre = (fun (self: i64) -> true); f_lift_post = (fun (self: i64) (out: t_Int) -> true); f_lift = fun (self: i64) -> impl_1__new #Num_bigint.Bigint.t_BigInt (Core_models.Convert.f_from #Num_bigint.Bigint.t_BigInt #i64 #FStar.Tactics.Typeclasses.solve self <: Num_bigint.Bigint.t_BigInt) } [@@ FStar.Tactics.Typeclasses.tcinstance] let impl_35: t_ToInt i64 = { f_to_int_pre = (fun (self: i64) -> true); f_to_int_post = (fun (self: i64) (out: t_Int) -> true); f_to_int = fun (self: i64) -> Hax_lib.Abstraction.f_lift #i64 #FStar.Tactics.Typeclasses.solve self } [@@ FStar.Tactics.Typeclasses.tcinstance] let impl_36: Hax_lib.Abstraction.t_Abstraction i128 = { f_AbstractType = t_Int; f_lift_pre = (fun (self: i128) -> true); f_lift_post = (fun (self: i128) (out: t_Int) -> true); f_lift = fun (self: i128) -> impl_1__new #Num_bigint.Bigint.t_BigInt (Core_models.Convert.f_from #Num_bigint.Bigint.t_BigInt #i128 #FStar.Tactics.Typeclasses.solve self <: Num_bigint.Bigint.t_BigInt) } [@@ FStar.Tactics.Typeclasses.tcinstance] let impl_37: t_ToInt i128 = { f_to_int_pre = (fun (self: i128) -> true); f_to_int_post = (fun (self: i128) (out: t_Int) -> true); f_to_int = fun (self: i128) -> Hax_lib.Abstraction.f_lift #i128 #FStar.Tactics.Typeclasses.solve self } [@@ FStar.Tactics.Typeclasses.tcinstance] let impl_38: Hax_lib.Abstraction.t_Abstraction isize = { f_AbstractType = t_Int; f_lift_pre = (fun (self: isize) -> true); f_lift_post = (fun (self: isize) (out: t_Int) -> true); f_lift = fun (self: isize) -> impl_1__new #Num_bigint.Bigint.t_BigInt (Core_models.Convert.f_from #Num_bigint.Bigint.t_BigInt #isize #FStar.Tactics.Typeclasses.solve self <: Num_bigint.Bigint.t_BigInt) } [@@ FStar.Tactics.Typeclasses.tcinstance] let impl_39: t_ToInt isize = { f_to_int_pre = (fun (self: isize) -> true); f_to_int_post = (fun (self: isize) (out: t_Int) -> true); f_to_int = fun (self: isize) -> Hax_lib.Abstraction.f_lift #isize #FStar.Tactics.Typeclasses.solve self } [@@ FStar.Tactics.Typeclasses.tcinstance] let impl_40: Hax_lib.Abstraction.t_Concretization t_Int u8 = { f_concretize_pre = (fun (self: t_Int) -> true); f_concretize_post = (fun (self: t_Int) (out: u8) -> true); f_concretize = fun (self: t_Int) -> let concretized:Core_models.Option.t_Option u8 = Num_traits.Cast.f_to_u8 #Num_bigint.Bigint.t_BigInt #FStar.Tactics.Typeclasses.solve (impl_1__get self <: Num_bigint.Bigint.t_BigInt) in let _:Prims.unit = if true then let _:Prims.unit = v_assert (Core_models.Option.impl__is_some #u8 concretized <: bool) in () in Core_models.Option.impl__unwrap #u8 concretized } let impl_41__to_u8 (self: t_Int) : u8 = Hax_lib.Abstraction.f_concretize #t_Int #u8 #FStar.Tactics.Typeclasses.solve self [@@ FStar.Tactics.Typeclasses.tcinstance] let impl_42: Hax_lib.Abstraction.t_Concretization t_Int u16 = { f_concretize_pre = (fun (self: t_Int) -> true); f_concretize_post = (fun (self: t_Int) (out: u16) -> true); f_concretize = fun (self: t_Int) -> let concretized:Core_models.Option.t_Option u16 = Num_traits.Cast.f_to_u16 #Num_bigint.Bigint.t_BigInt #FStar.Tactics.Typeclasses.solve (impl_1__get self <: Num_bigint.Bigint.t_BigInt) in let _:Prims.unit = if true then let _:Prims.unit = v_assert (Core_models.Option.impl__is_some #u16 concretized <: bool) in () in Core_models.Option.impl__unwrap #u16 concretized } let impl_43__to_u16 (self: t_Int) : u16 = Hax_lib.Abstraction.f_concretize #t_Int #u16 #FStar.Tactics.Typeclasses.solve self [@@ FStar.Tactics.Typeclasses.tcinstance] let impl_44: Hax_lib.Abstraction.t_Concretization t_Int u32 = { f_concretize_pre = (fun (self: t_Int) -> true); f_concretize_post = (fun (self: t_Int) (out: u32) -> true); f_concretize = fun (self: t_Int) -> let concretized:Core_models.Option.t_Option u32 = Num_traits.Cast.f_to_u32 #Num_bigint.Bigint.t_BigInt #FStar.Tactics.Typeclasses.solve (impl_1__get self <: Num_bigint.Bigint.t_BigInt) in let _:Prims.unit = if true then let _:Prims.unit = v_assert (Core_models.Option.impl__is_some #u32 concretized <: bool) in () in Core_models.Option.impl__unwrap #u32 concretized } let impl_45__to_u32 (self: t_Int) : u32 = Hax_lib.Abstraction.f_concretize #t_Int #u32 #FStar.Tactics.Typeclasses.solve self [@@ FStar.Tactics.Typeclasses.tcinstance] let impl_46: Hax_lib.Abstraction.t_Concretization t_Int u64 = { f_concretize_pre = (fun (self: t_Int) -> true); f_concretize_post = (fun (self: t_Int) (out: u64) -> true); f_concretize = fun (self: t_Int) -> let concretized:Core_models.Option.t_Option u64 = Num_traits.Cast.f_to_u64 #Num_bigint.Bigint.t_BigInt #FStar.Tactics.Typeclasses.solve (impl_1__get self <: Num_bigint.Bigint.t_BigInt) in let _:Prims.unit = if true then let _:Prims.unit = v_assert (Core_models.Option.impl__is_some #u64 concretized <: bool) in () in Core_models.Option.impl__unwrap #u64 concretized } let impl_47__to_u64 (self: t_Int) : u64 = Hax_lib.Abstraction.f_concretize #t_Int #u64 #FStar.Tactics.Typeclasses.solve self [@@ FStar.Tactics.Typeclasses.tcinstance] let impl_48: Hax_lib.Abstraction.t_Concretization t_Int u128 = { f_concretize_pre = (fun (self: t_Int) -> true); f_concretize_post = (fun (self: t_Int) (out: u128) -> true); f_concretize = fun (self: t_Int) -> let concretized:Core_models.Option.t_Option u128 = Num_traits.Cast.f_to_u128 #Num_bigint.Bigint.t_BigInt #FStar.Tactics.Typeclasses.solve (impl_1__get self <: Num_bigint.Bigint.t_BigInt) in let _:Prims.unit = if true then let _:Prims.unit = v_assert (Core_models.Option.impl__is_some #u128 concretized <: bool) in () in Core_models.Option.impl__unwrap #u128 concretized } let impl_49__to_u128 (self: t_Int) : u128 = Hax_lib.Abstraction.f_concretize #t_Int #u128 #FStar.Tactics.Typeclasses.solve self [@@ FStar.Tactics.Typeclasses.tcinstance] let impl_50: Hax_lib.Abstraction.t_Concretization t_Int usize = { f_concretize_pre = (fun (self: t_Int) -> true); f_concretize_post = (fun (self: t_Int) (out: usize) -> true); f_concretize = fun (self: t_Int) -> let concretized:Core_models.Option.t_Option usize = Num_traits.Cast.f_to_usize #Num_bigint.Bigint.t_BigInt #FStar.Tactics.Typeclasses.solve (impl_1__get self <: Num_bigint.Bigint.t_BigInt) in let _:Prims.unit = if true then let _:Prims.unit = v_assert (Core_models.Option.impl__is_some #usize concretized <: bool) in () in Core_models.Option.impl__unwrap #usize concretized } let impl_51__to_usize (self: t_Int) : usize = Hax_lib.Abstraction.f_concretize #t_Int #usize #FStar.Tactics.Typeclasses.solve self [@@ FStar.Tactics.Typeclasses.tcinstance] let impl_52: Hax_lib.Abstraction.t_Concretization t_Int i8 = { f_concretize_pre = (fun (self: t_Int) -> true); f_concretize_post = (fun (self: t_Int) (out: i8) -> true); f_concretize = fun (self: t_Int) -> let concretized:Core_models.Option.t_Option i8 = Num_traits.Cast.f_to_i8 #Num_bigint.Bigint.t_BigInt #FStar.Tactics.Typeclasses.solve (impl_1__get self <: Num_bigint.Bigint.t_BigInt) in let _:Prims.unit = if true then let _:Prims.unit = v_assert (Core_models.Option.impl__is_some #i8 concretized <: bool) in () in Core_models.Option.impl__unwrap #i8 concretized } let impl_53__to_i8 (self: t_Int) : i8 = Hax_lib.Abstraction.f_concretize #t_Int #i8 #FStar.Tactics.Typeclasses.solve self [@@ FStar.Tactics.Typeclasses.tcinstance] let impl_54: Hax_lib.Abstraction.t_Concretization t_Int i16 = { f_concretize_pre = (fun (self: t_Int) -> true); f_concretize_post = (fun (self: t_Int) (out: i16) -> true); f_concretize = fun (self: t_Int) -> let concretized:Core_models.Option.t_Option i16 = Num_traits.Cast.f_to_i16 #Num_bigint.Bigint.t_BigInt #FStar.Tactics.Typeclasses.solve (impl_1__get self <: Num_bigint.Bigint.t_BigInt) in let _:Prims.unit = if true then let _:Prims.unit = v_assert (Core_models.Option.impl__is_some #i16 concretized <: bool) in () in Core_models.Option.impl__unwrap #i16 concretized } let impl_55__to_i16 (self: t_Int) : i16 = Hax_lib.Abstraction.f_concretize #t_Int #i16 #FStar.Tactics.Typeclasses.solve self [@@ FStar.Tactics.Typeclasses.tcinstance] let impl_56: Hax_lib.Abstraction.t_Concretization t_Int i32 = { f_concretize_pre = (fun (self: t_Int) -> true); f_concretize_post = (fun (self: t_Int) (out: i32) -> true); f_concretize = fun (self: t_Int) -> let concretized:Core_models.Option.t_Option i32 = Num_traits.Cast.f_to_i32 #Num_bigint.Bigint.t_BigInt #FStar.Tactics.Typeclasses.solve (impl_1__get self <: Num_bigint.Bigint.t_BigInt) in let _:Prims.unit = if true then let _:Prims.unit = v_assert (Core_models.Option.impl__is_some #i32 concretized <: bool) in () in Core_models.Option.impl__unwrap #i32 concretized } let impl_57__to_i32 (self: t_Int) : i32 = Hax_lib.Abstraction.f_concretize #t_Int #i32 #FStar.Tactics.Typeclasses.solve self [@@ FStar.Tactics.Typeclasses.tcinstance] let impl_58: Hax_lib.Abstraction.t_Concretization t_Int i64 = { f_concretize_pre = (fun (self: t_Int) -> true); f_concretize_post = (fun (self: t_Int) (out: i64) -> true); f_concretize = fun (self: t_Int) -> let concretized:Core_models.Option.t_Option i64 = Num_traits.Cast.f_to_i64 #Num_bigint.Bigint.t_BigInt #FStar.Tactics.Typeclasses.solve (impl_1__get self <: Num_bigint.Bigint.t_BigInt) in let _:Prims.unit = if true then let _:Prims.unit = v_assert (Core_models.Option.impl__is_some #i64 concretized <: bool) in () in Core_models.Option.impl__unwrap #i64 concretized } let impl_59__to_i64 (self: t_Int) : i64 = Hax_lib.Abstraction.f_concretize #t_Int #i64 #FStar.Tactics.Typeclasses.solve self [@@ FStar.Tactics.Typeclasses.tcinstance] let impl_60: Hax_lib.Abstraction.t_Concretization t_Int i128 = { f_concretize_pre = (fun (self: t_Int) -> true); f_concretize_post = (fun (self: t_Int) (out: i128) -> true); f_concretize = fun (self: t_Int) -> let concretized:Core_models.Option.t_Option i128 = Num_traits.Cast.f_to_i128 #Num_bigint.Bigint.t_BigInt #FStar.Tactics.Typeclasses.solve (impl_1__get self <: Num_bigint.Bigint.t_BigInt) in let _:Prims.unit = if true then let _:Prims.unit = v_assert (Core_models.Option.impl__is_some #i128 concretized <: bool) in () in Core_models.Option.impl__unwrap #i128 concretized } let impl_61__to_i128 (self: t_Int) : i128 = Hax_lib.Abstraction.f_concretize #t_Int #i128 #FStar.Tactics.Typeclasses.solve self [@@ FStar.Tactics.Typeclasses.tcinstance] let impl_62: Hax_lib.Abstraction.t_Concretization t_Int isize = { f_concretize_pre = (fun (self: t_Int) -> true); f_concretize_post = (fun (self: t_Int) (out: isize) -> true); f_concretize = fun (self: t_Int) -> let concretized:Core_models.Option.t_Option isize = Num_traits.Cast.f_to_isize #Num_bigint.Bigint.t_BigInt #FStar.Tactics.Typeclasses.solve (impl_1__get self <: Num_bigint.Bigint.t_BigInt) in let _:Prims.unit = if true then let _:Prims.unit = v_assert (Core_models.Option.impl__is_some #isize concretized <: bool) in () in Core_models.Option.impl__unwrap #isize concretized } let impl_63__to_isize (self: t_Int) : isize = Hax_lib.Abstraction.f_concretize #t_Int #isize #FStar.Tactics.Typeclasses.solve self ================================================ FILE: hax-lib/proofs/fstar/extraction/Hax_lib.Int.Bigint.fst ================================================ module Hax_lib.Int.Bigint #set-options "--fuel 0 --ifuel 1 --z3rlimit 15" open FStar.Mul open Core_models let _ = (* This module has implicit dependencies, here we make them explicit. *) (* The implicit dependencies arise from typeclasses instances. *) let open Num_bigint.Bigint in () /// Maximal number of bytes stored in our copiable `BigInt`s. let v_BYTES: usize = mk_usize 1024 type t_BigInt = { f_sign:Num_bigint.Bigint.t_Sign; f_data:t_Array u8 (mk_usize 1024) } [@@ FStar.Tactics.Typeclasses.tcinstance] assume val impl_5': Core_models.Fmt.t_Debug t_BigInt unfold let impl_5 = impl_5' let impl_7: Core_models.Clone.t_Clone t_BigInt = { f_clone = (fun x -> x); f_clone_pre = (fun _ -> True); f_clone_post = (fun _ _ -> True) } [@@ FStar.Tactics.Typeclasses.tcinstance] assume val impl_6': Core_models.Marker.t_Copy t_BigInt unfold let impl_6 = impl_6' /// Construct a [`BigInt`] from a [`num_bigint::BigInt`]. This /// operation panics when the provided [`num_bigint::BigInt`] /// has more than [`BYTES`] bytes. let impl_BigInt__new (i: Num_bigint.Bigint.t_BigInt) : t_BigInt = let sign, bytes:(Num_bigint.Bigint.t_Sign & Alloc.Vec.t_Vec u8 Alloc.Alloc.t_Global) = Num_bigint.Bigint.impl_BigInt__to_bytes_be i in let _:Prims.unit = if (Alloc.Vec.impl_1__len #u8 #Alloc.Alloc.t_Global bytes <: usize) >. v_BYTES then Rust_primitives.Hax.never_to_any (Core_models.Panicking.panic_fmt (Core_models.Fmt.Rt.impl_1__new_const (mk_usize 1) (let list = ["`copiable_bigint::BigInt::new`: too big, please consider increasing `BYTES`"] in FStar.Pervasives.assert_norm (Prims.eq2 (List.Tot.length list) 1); Rust_primitives.Hax.array_of_list 1 list) <: Core_models.Fmt.t_Arguments) <: Rust_primitives.Hax.t_Never) in let data:t_Array u8 (mk_usize 1024) = Rust_primitives.Hax.repeat (mk_u8 0) (mk_usize 1024) in let data:t_Array u8 (mk_usize 1024) = Rust_primitives.Hax.Monomorphized_update_at.update_at_range_from data ({ Core_models.Ops.Range.f_start = v_BYTES -! (Alloc.Vec.impl_1__len #u8 #Alloc.Alloc.t_Global bytes <: usize) <: usize } <: Core_models.Ops.Range.t_RangeFrom usize) (Core_models.Slice.impl__copy_from_slice #u8 (data.[ { Core_models.Ops.Range.f_start = v_BYTES -! (Alloc.Vec.impl_1__len #u8 #Alloc.Alloc.t_Global bytes <: usize) <: usize } <: Core_models.Ops.Range.t_RangeFrom usize ] <: t_Slice u8) (bytes.[ Core_models.Ops.Range.RangeFull <: Core_models.Ops.Range.t_RangeFull ] <: t_Slice u8) <: t_Slice u8) in { f_sign = sign; f_data = data } <: t_BigInt /// Constructs a [`num_bigint::BigInt`] out of a [`BigInt`]. let impl_BigInt__get (self: t_BigInt) : Num_bigint.Bigint.t_BigInt = Num_bigint.Bigint.impl_BigInt__from_bytes_be self.f_sign (self.f_data <: t_Slice u8) [@@ FStar.Tactics.Typeclasses.tcinstance] let impl_1: Core_models.Cmp.t_PartialEq t_BigInt t_BigInt = { f_eq_pre = (fun (self: t_BigInt) (other: t_BigInt) -> true); f_eq_post = (fun (self: t_BigInt) (other: t_BigInt) (out: bool) -> true); f_eq = fun (self: t_BigInt) (other: t_BigInt) -> (impl_BigInt__get self <: Num_bigint.Bigint.t_BigInt) =. (impl_BigInt__get other <: Num_bigint.Bigint.t_BigInt) } [@@ FStar.Tactics.Typeclasses.tcinstance] let impl_2: Core_models.Cmp.t_Eq t_BigInt = { _super_i0 = FStar.Tactics.Typeclasses.solve } [@@ FStar.Tactics.Typeclasses.tcinstance] let impl_4: Core_models.Cmp.t_PartialOrd t_BigInt t_BigInt = { _super_i0 = FStar.Tactics.Typeclasses.solve; f_partial_cmp_pre = (fun (self: t_BigInt) (other: t_BigInt) -> true); f_partial_cmp_post = (fun (self: t_BigInt) (other: t_BigInt) (out: Core_models.Option.t_Option Core_models.Cmp.t_Ordering) -> true); f_partial_cmp = fun (self: t_BigInt) (other: t_BigInt) -> Core_models.Cmp.f_partial_cmp #Num_bigint.Bigint.t_BigInt #Num_bigint.Bigint.t_BigInt #FStar.Tactics.Typeclasses.solve (impl_BigInt__get self <: Num_bigint.Bigint.t_BigInt) (impl_BigInt__get other <: Num_bigint.Bigint.t_BigInt) } [@@ FStar.Tactics.Typeclasses.tcinstance] let impl_3: Core_models.Cmp.t_Ord t_BigInt = { _super_i0 = FStar.Tactics.Typeclasses.solve; _super_i1 = FStar.Tactics.Typeclasses.solve; f_cmp_pre = (fun (self: t_BigInt) (other: t_BigInt) -> true); f_cmp_post = (fun (self: t_BigInt) (other: t_BigInt) (out: Core_models.Cmp.t_Ordering) -> true); f_cmp = fun (self: t_BigInt) (other: t_BigInt) -> Core_models.Cmp.f_cmp #Num_bigint.Bigint.t_BigInt #FStar.Tactics.Typeclasses.solve (impl_BigInt__get self <: Num_bigint.Bigint.t_BigInt) (impl_BigInt__get other <: Num_bigint.Bigint.t_BigInt) } ================================================ FILE: hax-lib/proofs/fstar/extraction/Hax_lib.Int.fst ================================================ module Hax_lib.Int open Rust_primitives unfold type t_Int = int unfold let impl_Int__to_u8 (#t:inttype) (n:range_t t) : int_t t = mk_int #t n unfold let impl_Int__to_u16 (#t:inttype) (n:range_t t) : int_t t = mk_int #t n unfold let impl_Int__to_u32 (#t:inttype) (n:range_t t) : int_t t = mk_int #t n unfold let impl_Int__to_u64 (#t:inttype) (n:range_t t) : int_t t = mk_int #t n unfold let impl_Int__to_u128 (#t:inttype) (n:range_t t) : int_t t = mk_int #t n unfold let impl_Int__to_usize (#t:inttype) (n:range_t t) : int_t t = mk_int #t n unfold let impl_Int__to_i8 (#t:inttype) (n:range_t t) : int_t t = mk_int #t n unfold let impl_Int__to_i16 (#t:inttype) (n:range_t t) : int_t t = mk_int #t n unfold let impl_Int__to_i32 (#t:inttype) (n:range_t t) : int_t t = mk_int #t n unfold let impl_Int__to_i64 (#t:inttype) (n:range_t t) : int_t t = mk_int #t n unfold let impl_Int__to_i128 (#t:inttype) (n:range_t t) : int_t t = mk_int #t n unfold let impl_Int__to_isize (#t:inttype) (n:range_t t) : int_t t = mk_int #t n unfold let impl_Int__pow2 (n: nat) = pow2 n unfold let impl_Int__rem_euclid = (%) ================================================ FILE: hax-lib/proofs/fstar/extraction/Hax_lib.Prop.Bundle.fst ================================================ module Hax_lib.Prop.Bundle #set-options "--fuel 0 --ifuel 1 --z3rlimit 15" open FStar.Mul open Core_models let _ = (* This module has implicit dependencies, here we make them explicit. *) (* The implicit dependencies arise from typeclasses instances. *) let open Hax_lib.Abstraction in () /// Represent a logical proposition, that may be not computable. type t_Prop = | Prop : bool -> t_Prop let impl_7: Core_models.Clone.t_Clone t_Prop = { f_clone = (fun x -> x); f_clone_pre = (fun _ -> True); f_clone_post = (fun _ _ -> True) } [@@ FStar.Tactics.Typeclasses.tcinstance] assume val impl_8': Core_models.Marker.t_Copy t_Prop unfold let impl_8 = impl_8' [@@ FStar.Tactics.Typeclasses.tcinstance] assume val impl_9': Core_models.Fmt.t_Debug t_Prop unfold let impl_9 = impl_9' [@@ FStar.Tactics.Typeclasses.tcinstance] let impl_1: Hax_lib.Abstraction.t_Abstraction bool = { f_AbstractType = t_Prop; f_lift_pre = (fun (self: bool) -> true); f_lift_post = (fun (self: bool) (out: t_Prop) -> true); f_lift = fun (self: bool) -> Prop self <: t_Prop } class t_ToProp (v_Self: Type0) = { f_to_prop_pre:v_Self -> Type0; f_to_prop_post:v_Self -> t_Prop -> Type0; f_to_prop:x0: v_Self -> Prims.Pure t_Prop (f_to_prop_pre x0) (fun result -> f_to_prop_post x0 result) } [@@ FStar.Tactics.Typeclasses.tcinstance] let impl_2: t_ToProp bool = { f_to_prop_pre = (fun (self: bool) -> true); f_to_prop_post = (fun (self: bool) (out: t_Prop) -> true); f_to_prop = fun (self: bool) -> Hax_lib.Abstraction.f_lift #bool #FStar.Tactics.Typeclasses.solve self } [@@ FStar.Tactics.Typeclasses.tcinstance] let impl_3: Core_models.Convert.t_From t_Prop bool = { f_from_pre = (fun (value: bool) -> true); f_from_post = (fun (value: bool) (out: t_Prop) -> true); f_from = fun (value: bool) -> Prop value <: t_Prop } [@@ FStar.Tactics.Typeclasses.tcinstance] let impl_4 (#v_T: Type0) (#[FStar.Tactics.Typeclasses.tcresolve ()] i0: Core_models.Convert.t_Into v_T t_Prop) : Core_models.Ops.Bit.t_BitAnd t_Prop v_T = { f_Output = t_Prop; f_bitand_pre = (fun (self: t_Prop) (rhs: v_T) -> true); f_bitand_post = (fun (self: t_Prop) (rhs: v_T) (out: t_Prop) -> true); f_bitand = fun (self: t_Prop) (rhs: v_T) -> Prop (Core_models.Ops.Bit.f_bitand self._0 (Core_models.Convert.f_into #v_T #t_Prop #FStar.Tactics.Typeclasses.solve rhs <: t_Prop) ._0) <: t_Prop } [@@ FStar.Tactics.Typeclasses.tcinstance] let impl_5 (#v_T: Type0) (#[FStar.Tactics.Typeclasses.tcresolve ()] i0: Core_models.Convert.t_Into v_T t_Prop) : Core_models.Ops.Bit.t_BitOr t_Prop v_T = { f_Output = t_Prop; f_bitor_pre = (fun (self: t_Prop) (rhs: v_T) -> true); f_bitor_post = (fun (self: t_Prop) (rhs: v_T) (out: t_Prop) -> true); f_bitor = fun (self: t_Prop) (rhs: v_T) -> Prop (Core_models.Ops.Bit.f_bitor self._0 (Core_models.Convert.f_into #v_T #t_Prop #FStar.Tactics.Typeclasses.solve rhs <: t_Prop) ._0) <: t_Prop } [@@ FStar.Tactics.Typeclasses.tcinstance] let impl_6: Core_models.Ops.Bit.t_Not t_Prop = { f_Output = t_Prop; f_not_pre = (fun (self: t_Prop) -> true); f_not_post = (fun (self: t_Prop) (out: t_Prop) -> true); f_not = fun (self: t_Prop) -> Prop ~.self._0 <: t_Prop } let from_bool (b: bool) : t_Prop = Prop b <: t_Prop /// Lifts a boolean to a logical proposition. let impl__from_bool (b: bool) : t_Prop = from_bool b let v_and (lhs other: t_Prop) : t_Prop = Prop (lhs._0 && other._0) <: t_Prop /// Conjuction of two propositions. let impl__and (#iimpl_37134320_: Type0) (#[FStar.Tactics.Typeclasses.tcresolve ()] i0: Core_models.Convert.t_Into iimpl_37134320_ t_Prop) (self: t_Prop) (other: iimpl_37134320_) : t_Prop = v_and self (Core_models.Convert.f_into #iimpl_37134320_ #t_Prop #FStar.Tactics.Typeclasses.solve other <: t_Prop) let or (lhs other: t_Prop) : t_Prop = Prop (lhs._0 || other._0) <: t_Prop /// Disjunction of two propositions. let impl__or (#iimpl_37134320_: Type0) (#[FStar.Tactics.Typeclasses.tcresolve ()] i0: Core_models.Convert.t_Into iimpl_37134320_ t_Prop) (self: t_Prop) (other: iimpl_37134320_) : t_Prop = or self (Core_models.Convert.f_into #iimpl_37134320_ #t_Prop #FStar.Tactics.Typeclasses.solve other <: t_Prop) let not (lhs: t_Prop) : t_Prop = Prop ~.lhs._0 <: t_Prop /// Negation of a proposition. let impl__not (self: t_Prop) : t_Prop = not self /// Logical equality between two value of *any* type let eq (#v_T: Type0) (e_lhs e_rhs: v_T) : t_Prop = Prop true <: t_Prop /// Equality between two propositions. let impl__eq (#iimpl_37134320_: Type0) (#[FStar.Tactics.Typeclasses.tcresolve ()] i0: Core_models.Convert.t_Into iimpl_37134320_ t_Prop) (self: t_Prop) (other: iimpl_37134320_) : t_Prop = eq #t_Prop self (Core_models.Convert.f_into #iimpl_37134320_ #t_Prop #FStar.Tactics.Typeclasses.solve other <: t_Prop) let ne (#v_T: Type0) (e_lhs e_rhs: v_T) : t_Prop = Prop true <: t_Prop /// Equality between two propositions. let impl__ne (#iimpl_37134320_: Type0) (#[FStar.Tactics.Typeclasses.tcresolve ()] i0: Core_models.Convert.t_Into iimpl_37134320_ t_Prop) (self: t_Prop) (other: iimpl_37134320_) : t_Prop = ne #t_Prop self (Core_models.Convert.f_into #iimpl_37134320_ #t_Prop #FStar.Tactics.Typeclasses.solve other <: t_Prop) let implies__from__constructors (lhs other: t_Prop) : t_Prop = Prop (lhs._0 || ~.other._0) <: t_Prop /// Logical implication. let impl__implies (#iimpl_37134320_: Type0) (#[FStar.Tactics.Typeclasses.tcresolve ()] i0: Core_models.Convert.t_Into iimpl_37134320_ t_Prop) (self: t_Prop) (other: iimpl_37134320_) : t_Prop = implies__from__constructors self (Core_models.Convert.f_into #iimpl_37134320_ #t_Prop #FStar.Tactics.Typeclasses.solve other <: t_Prop) /// The logical implication `a ==> b`. let implies (#iimpl_979615818_ #iimpl_979615818_: Type0) (#[FStar.Tactics.Typeclasses.tcresolve ()] i0: Core_models.Convert.t_Into iimpl_979615818_ t_Prop) (#[FStar.Tactics.Typeclasses.tcresolve ()] i1: Core_models.Convert.t_Into iimpl_648681637_ t_Prop) (lhs: iimpl_979615818_) (rhs: iimpl_648681637_) : t_Prop = implies__from__constructors (Core_models.Convert.f_into #iimpl_979615818_ #t_Prop #FStar.Tactics.Typeclasses.solve lhs <: t_Prop) (Core_models.Convert.f_into #iimpl_648681637_ #t_Prop #FStar.Tactics.Typeclasses.solve rhs <: t_Prop) let v_forall__from__constructors (#v_A #v_F: Type0) (#[FStar.Tactics.Typeclasses.tcresolve ()] i0: Core_models.Ops.Function.t_Fn v_F v_A) (e_pred: v_F) : t_Prop = Prop true <: t_Prop /// The universal quantifier. This should be used only for Hax code: in /// Rust, this is always true. /// # Example: /// The Rust expression `forall(|x: T| phi(x))` corresponds to `∀ (x: T), phi(x)`. let v_forall (#v_T #v_U #iimpl_367644862_: Type0) (#[FStar.Tactics.Typeclasses.tcresolve ()] i0: Core_models.Convert.t_Into v_U t_Prop) (#[FStar.Tactics.Typeclasses.tcresolve ()] i1: Core_models.Ops.Function.t_Fn iimpl_367644862_ v_T) (f: iimpl_367644862_) : t_Prop = v_forall__from__constructors #v_T (fun x -> let x:v_T = x in Core_models.Convert.f_into #v_U #t_Prop #FStar.Tactics.Typeclasses.solve (Core_models.Ops.Function.f_call #iimpl_367644862_ #v_T #FStar.Tactics.Typeclasses.solve f (x <: v_T) <: v_U) <: t_Prop) let v_exists__from__constructors (#v_A #v_F: Type0) (#[FStar.Tactics.Typeclasses.tcresolve ()] i0: Core_models.Ops.Function.t_Fn v_F v_A) (e_pred: v_F) : t_Prop = Prop true <: t_Prop /// The existential quantifier. This should be used only for Hax code: in /// Rust, this is always true. /// # Example: /// The Rust expression `exists(|x: T| phi(x))` corresponds to `∃ (x: T), phi(x)`. let v_exists (#v_T #v_U #iimpl_367644862_: Type0) (#[FStar.Tactics.Typeclasses.tcresolve ()] i0: Core_models.Convert.t_Into v_U t_Prop) (#[FStar.Tactics.Typeclasses.tcresolve ()] i1: Core_models.Ops.Function.t_Fn iimpl_367644862_ v_T) (f: iimpl_367644862_) : t_Prop = v_exists__from__constructors #v_T (fun x -> let x:v_T = x in Core_models.Convert.f_into #v_U #t_Prop #FStar.Tactics.Typeclasses.solve (Core_models.Ops.Function.f_call #iimpl_367644862_ #v_T #FStar.Tactics.Typeclasses.solve f (x <: v_T) <: v_U) <: t_Prop) ================================================ FILE: hax-lib/proofs/fstar/extraction/Hax_lib.Prop.Constructors.fst ================================================ module Hax_lib.Prop.Constructors #set-options "--fuel 0 --ifuel 1 --z3rlimit 15" open FStar.Mul open Core_models include Hax_lib.Prop.Bundle {from_bool as from_bool} include Hax_lib.Prop.Bundle {v_and as v_and} include Hax_lib.Prop.Bundle {or as or} include Hax_lib.Prop.Bundle {not as not} include Hax_lib.Prop.Bundle {eq as eq} include Hax_lib.Prop.Bundle {ne as ne} include Hax_lib.Prop.Bundle {implies__from__constructors as implies} include Hax_lib.Prop.Bundle {v_forall__from__constructors as v_forall} include Hax_lib.Prop.Bundle {v_exists__from__constructors as v_exists} ================================================ FILE: hax-lib/proofs/fstar/extraction/Hax_lib.Prop.fst ================================================ module Hax_lib.Prop unfold type t_Prop = Type0 ================================================ FILE: hax-lib/proofs/fstar/extraction/Hax_lib.fst ================================================ module Hax_lib #set-options "--fuel 0 --ifuel 1 --z3rlimit 15" open FStar.Tactics val v_assert (p: bool) : Pure unit (requires p) (ensures (fun x -> p)) let v_assert (v__formula: bool) = () val assert_prop (p: Type0) : Pure unit (requires p) (ensures (fun x -> p)) let assert_prop (v__formula: Type0) = () val v_assume (p: Type0) : Pure unit (requires True) (ensures (fun x -> p)) let v_assume (v__formula: Type0) = assume v__formula ================================================ FILE: hax-lib/proofs/fstar/extraction/Makefile ================================================ # This is a generically useful Makefile for F* that is self-contained # # It is tempting to factor this out into multiple Makefiles but that # makes it less portable, so resist temptation, or move to a more # sophisticated build system. # # We expect: # 1. `fstar.exe` to be in PATH (alternatively, you can also set # $FSTAR_HOME to be set to your F* repo/install directory) # # 2. `cargo`, `rustup`, `hax` and `jq` to be installed and in PATH. # # 3. the extracted Cargo crate to have "hax-lib" as a dependency: # `hax-lib = { version = "0.1.0-pre.1", git = "https://github.com/hacspec/hax"}` # # Optionally, you can set `HACL_HOME`. # # ROOTS contains all the top-level F* files you wish to verify # The default target `verify` verified ROOTS and its dependencies # To lax-check instead, set `OTHERFLAGS="--lax"` on the command-line # # To make F* emacs mode use the settings in this file, you need to # add the following lines to your .emacs # # (setq-default fstar-executable "/bin/fstar.exe") # (setq-default fstar-smt-executable "/bin/z3") # # (defun my-fstar-compute-prover-args-using-make () # "Construct arguments to pass to F* by calling make." # (with-demoted-errors "Error when constructing arg string: %S" # (let* ((fname (file-name-nondirectory buffer-file-name)) # (target (concat fname "-in")) # (argstr (car (process-lines "make" "--quiet" target)))) # (split-string argstr)))) # (setq fstar-subp-prover-args #'my-fstar-compute-prover-args-using-make) # HACL_HOME ?= $(HOME)/.hax/hacl_home FSTAR_BIN ?= $(shell command -v fstar.exe 1>&2 2> /dev/null && echo "fstar.exe" || echo "$(FSTAR_HOME)/bin/fstar.exe") CACHE_DIR ?= .cache HINT_DIR ?= .hints SHELL ?= /usr/bin/env bash EXECUTABLES = cargo cargo-hax jq K := $(foreach bin,$(EXECUTABLES),\ $(if $(shell command -v $(bin) 2> /dev/null),,$(error "No $(bin) in PATH"))) .PHONY: all verify clean all: rm -f .depend && $(MAKE) .depend $(MAKE) verify # Default hax invocation HAX_CLI = "cargo hax into fstar" # If $HACL_HOME doesn't exist, clone it ${HACL_HOME}: mkdir -p "${HACL_HOME}" git clone --depth 1 https://github.com/hacl-star/hacl-star.git "${HACL_HOME}" # If no any F* file is detected, we run hax ifeq "$(wildcard *.fst *fsti)" "" $(shell $(SHELL) -c $(HAX_CLI)) endif # By default, we process all the files in the current directory ROOTS = $(wildcard *.fst *fsti) # Regenerate F* files via hax when Rust sources change $(ROOTS): $(shell find ../../../src -type f -name '*.rs') $(shell $(SHELL) -c $(HAX_CLI)) # The following is a bash script that discovers F* libraries define FINDLIBS # Prints a path if and only if it exists. Takes one argument: the # path. function print_if_exists() { if [ -d "$$1" ]; then echo "$$1" fi } # Asks Cargo all the dependencies for the current crate or workspace, # and extract all "root" directories for each. Takes zero argument. function dependencies() { cargo metadata --format-version 1 | jq -r '.packages | .[] | .manifest_path | split("/") | .[:-1] | join("/")' } # Find hax libraries *around* a given path. Takes one argument: the # path. function find_hax_libraries_at_path() { path="$$1" # if there is a `proofs/fstar/extraction` subfolder, then that's a # F* library print_if_exists "$$path/proofs/fstar/extraction" # Maybe the `proof-libs` folder of hax is around? MAYBE_PROOF_LIBS=$$(realpath -q "$$path/../proof-libs/fstar") if [ $$? -eq 0 ]; then print_if_exists "$$MAYBE_PROOF_LIBS/core" print_if_exists "$$MAYBE_PROOF_LIBS/rust_primitives" fi } { while IFS= read path; do find_hax_libraries_at_path "$$path" done < <(dependencies) } | sort -u endef export FINDLIBS FSTAR_INCLUDE_DIRS = $(HACL_HOME)/lib $(shell bash -c "$$FINDLIBS") FSTAR_FLAGS = --cmi \ --warn_error -331 \ --cache_checked_modules --cache_dir $(CACHE_DIR) \ --already_cached "+Prims+FStar+LowStar+C+Spec.Loops+TestLib" \ $(addprefix --include ,$(FSTAR_INCLUDE_DIRS)) FSTAR = $(FSTAR_BIN) $(FSTAR_FLAGS) .depend: $(HINT_DIR) $(CACHE_DIR) $(ROOTS) $(info $(ROOTS)) $(FSTAR) --cmi --dep full $(ROOTS) --extract '* -Prims -LowStar -FStar' > $@ include .depend $(HINT_DIR): mkdir -p $@ $(CACHE_DIR): mkdir -p $@ $(CACHE_DIR)/%.checked: | .depend $(HINT_DIR) $(CACHE_DIR) $(FSTAR) $(OTHERFLAGS) $< $(ENABLE_HINTS) --hint_file $(HINT_DIR)/$(notdir $*).hints verify: $(addsuffix .checked, $(addprefix $(CACHE_DIR)/,$(ROOTS))) # Targets for interactive mode %.fst-in: $(info $(FSTAR_FLAGS) \ $(ENABLE_HINTS) --hint_file $(HINT_DIR)/$(basename $@).fst.hints) %.fsti-in: $(info $(FSTAR_FLAGS) \ $(ENABLE_HINTS) --hint_file $(HINT_DIR)/$(basename $@).fsti.hints) # Clean targets clean: rm -rf $(CACHE_DIR)/* rm *.fst ================================================ FILE: hax-lib/src/abstraction.rs ================================================ /// Marks a type as abstractable: its values can be mapped to an /// idealized version of the type. For instance, machine integers, /// which have bounds, can be mapped to mathematical integers. /// /// Each type can have only one abstraction. pub trait Abstraction { /// What is the ideal type values should be mapped to? type AbstractType; /// Maps a concrete value to its abstract counterpart fn lift(self) -> Self::AbstractType; } /// Marks a type as abstract: its values can be lowered to concrete /// values. This might panic. pub trait Concretization { /// Maps an abstract value and lowers it to its concrete counterpart. fn concretize(self) -> T; } ================================================ FILE: hax-lib/src/dummy.rs ================================================ mod abstraction; pub use abstraction::Concretization; pub mod prop; pub use prop::*; pub use int::*; #[cfg(feature = "macros")] pub use crate::proc_macros::*; #[macro_export] macro_rules! debug_assert { ($($arg:tt)*) => { ::core::debug_assert!($($arg)*); }; } #[macro_export] macro_rules! assert { ($($arg:tt)*) => { ::core::assert!($($arg)*); }; } #[macro_export] macro_rules! assert_prop { ($($arg:tt)*) => {{}}; } #[macro_export] macro_rules! assume { ($formula:expr) => { () }; } #[doc(hidden)] pub fn inline(_: &str) {} #[doc(hidden)] pub fn inline_unsafe(_: &str) -> T { unreachable!() } #[doc(hidden)] pub const fn _internal_loop_invariant, P: FnOnce(T) -> R>(_: &P) {} #[doc(hidden)] pub const fn _internal_while_loop_invariant(_: Prop) {} #[doc(hidden)] pub const fn _internal_loop_decreases(_: int::Int) {} pub trait Refinement { type InnerType; fn new(x: Self::InnerType) -> Self; fn get(self) -> Self::InnerType; fn get_mut(&mut self) -> &mut Self::InnerType; fn invariant(value: Self::InnerType) -> crate::Prop; } pub trait RefineAs { fn into_checked(self) -> RefinedType; } pub mod int { use core::ops::*; #[macro_export] macro_rules! int { ($lit:expr) => { Int($lit) }; } #[derive(Copy, Clone, Eq, PartialEq, Ord, PartialOrd)] pub struct Int(pub u8); impl Int { pub fn new(x: impl Into) -> Self { Int(x.into()) } pub fn get(self) -> u8 { self.0 } } impl Add for Int { type Output = Self; fn add(self, other: Self) -> Self::Output { Int(self.0 + other.0) } } impl Sub for Int { type Output = Self; fn sub(self, other: Self) -> Self::Output { Int(self.0 - other.0) } } impl Mul for Int { type Output = Self; fn mul(self, other: Self) -> Self::Output { Int(self.0 * other.0) } } impl Div for Int { type Output = Self; fn div(self, other: Self) -> Self::Output { Int(self.0 / other.0) } } impl Int { pub fn pow2(self) -> Self { self } pub fn _unsafe_from_str(_s: &str) -> Self { Int(0) } pub fn rem_euclid(&self, v: Self) -> Self { Self::new(self.0.rem_euclid(v.0)) } } pub trait ToInt { fn to_int(self) -> Int; } pub trait Abstraction { type AbstractType; fn lift(self) -> Self::AbstractType; } pub trait Concretization { fn concretize(self) -> T; } macro_rules! implement_abstraction { ($ty:ident) => { impl Abstraction for $ty { type AbstractType = Int; fn lift(self) -> Self::AbstractType { Int(0) } } impl ToInt for $ty { fn to_int(self) -> Int { self.lift() } } }; ($($ty:ident)*) => { $(implement_abstraction!($ty);)* }; } implement_abstraction!(u8 u16 u32 u64 u128 usize); implement_abstraction!(i8 i16 i32 i64 i128 isize); macro_rules! implement_concretize { ($ty:ident $method:ident) => { impl Concretization<$ty> for Int { fn concretize(self) -> $ty { self.0 as $ty } } impl Int { pub fn $method(self) -> $ty { self.concretize() } } }; ($ty:ident $method:ident, $($tt:tt)*) => { implement_concretize!($ty $method); implement_concretize!($($tt)*); }; () => {}; } implement_concretize!( u8 to_u8, u16 to_u16, u32 to_u32, u64 to_u64, u128 to_u128, usize to_usize, i8 to_i8, i16 to_i16, i32 to_i32, i64 to_i64, i128 to_i128, isize to_isize, ); } ================================================ FILE: hax-lib/src/implementation.rs ================================================ mod abstraction; pub use abstraction::*; pub mod int; pub use int::*; pub mod prop; pub use prop::*; #[cfg(feature = "macros")] pub use crate::proc_macros::*; #[doc(hidden)] #[cfg(hax)] #[macro_export] macro_rules! proxy_macro_if_not_hax { ($macro:path, no, $($arg:tt)*) => { () }; ($macro:path, $f:expr, $cond:expr$(, $($arg:tt)*)?) => { $f($cond) }; } #[cfg(not(debug_assertions))] #[doc(hidden)] #[cfg(not(hax))] #[macro_export] macro_rules! proxy_macro_if_not_hax { ($macro:path, $f:expr, $($arg:tt)*) => {}; } #[cfg(debug_assertions)] #[doc(hidden)] #[cfg(not(hax))] #[macro_export] macro_rules! proxy_macro_if_not_hax { ($macro:path, $f:expr, $($arg:tt)*) => { $macro!($($arg)*) }; } #[macro_export] /// Proxy to `std::debug_assert!`. Compiled with `hax`, this /// disappears. macro_rules! debug_assert { ($($arg:tt)*) => { $crate::proxy_macro_if_not_hax!(::core::debug_assert, no, $($arg)*) }; } #[macro_export] /// Proxy to `std::assert!`. Compiled with `hax`, this is transformed /// into a `assert` in the backend. macro_rules! assert { ($($arg:tt)*) => { $crate::proxy_macro_if_not_hax!(::core::assert, $crate::assert, $($arg)*) }; } #[doc(hidden)] #[cfg(hax)] /// This function exists only when compiled with `hax`, and is not /// meant to be used directly. It is called by `assert!` only in /// appropriate situations. pub fn assert(_formula: bool) {} #[macro_export] /// Assert a logical proposition [`Prop`]: this exists only in the backends of /// hax. In Rust, this macro expands to an empty block `{ }`. macro_rules! assert_prop { ($($arg:tt)*) => { { #[cfg(hax)] { $crate::assert_prop(::hax_lib::Prop::from($($arg)*)); } } }; } #[doc(hidden)] #[cfg(hax)] /// This function exists only when compiled with `hax`, and is not meant to be /// used directly. It is called by `assert_prop!` only in appropriate /// situations. pub fn assert_prop(_formula: Prop) {} #[doc(hidden)] #[cfg(hax)] /// This function exists only when compiled with `hax`, and is not /// meant to be used directly. It is called by `assume!` only in /// appropriate situations. pub fn assume(_formula: Prop) {} #[cfg(hax)] #[macro_export] macro_rules! assume { ($formula:expr) => { $crate::assume(::hax_lib::Prop::from($formula)) }; } /// Assume a proposition holds. In Rust, this is expanded to the /// expression `()`. While extracted with Hax, this gets expanded to a /// call to an `assume` function. /// /// # Example: /// /// ```rust /// fn sum(x: u32, y: u32) -> u32 { /// hax_lib::assume!(x < 4242 && y < 424242); /// x + y /// } /// ``` #[cfg(not(hax))] #[macro_export] macro_rules! assume { ($formula:expr) => { () }; } /// Dummy function that carries a string to be printed as such in the output language #[doc(hidden)] pub fn inline(_: &str) {} /// Similar to `inline`, but allows for any type. Do not use directly. #[doc(hidden)] pub fn inline_unsafe(_: &str) -> T { unreachable!() } /// Sink for any value into unit. This is used internally by hax to capture /// value of any type. Specifically, this is useful for the `decreases` clauses /// for the F* backend. #[doc(hidden)] pub fn any_to_unit(_: T) -> () { unreachable!() } /// A dummy function that holds a loop invariant. #[doc(hidden)] pub fn _internal_loop_invariant, P: FnOnce(T) -> R>(_: P) {} /// A dummy function that holds a while loop invariant. #[doc(hidden)] pub const fn _internal_while_loop_invariant(_: Prop) {} /// A dummy function that holds a loop variant. #[doc(hidden)] pub fn _internal_loop_decreases(_: Int) {} /// A type that implements `Refinement` should be a newtype for a /// type `T`. The field holding the value of type `T` should be /// private, and `Refinement` should be the only interface to the /// type. /// /// Please never implement this trait yourself, use the /// `refinement_type` macro instead. pub trait Refinement { /// The base type type InnerType; /// Smart constructor capturing an invariant. Its extraction will /// yield a proof obligation. fn new(x: Self::InnerType) -> Self; /// Destructor for the refined type fn get(self) -> Self::InnerType; /// Gets a mutable reference to a refinement fn get_mut(&mut self) -> &mut Self::InnerType; /// Tests wether a value satisfies the refinement fn invariant(value: Self::InnerType) -> Prop; } /// A utilitary trait that provides a `into_checked` method on traits /// that have a refined counter part. This trait is parametrized by a /// type `Target`: a base type can be refined in multiple ways. /// /// Please never implement this trait yourself, use the /// `refinement_type` macro instead. pub trait RefineAs { /// Smart constructor for `RefinedType`, checking the invariant /// `RefinedType::invariant`. The check is done statically via /// extraction to hax: extracted code will yield static proof /// obligations. /// /// In addition, in debug mode, the invariant is checked at /// run-time, unless this behavior was disabled when defining the /// refinement type `RefinedType` with the `refinement_type` macro /// and its `no_debug_runtime_check` option. fn into_checked(self) -> RefinedType; } ================================================ FILE: hax-lib/src/int/bigint.rs ================================================ //! This module provides an approximation of `BigInt` which is //! copiable, via an big array of `u8` of an fixed arbitrary size //! `BYTES`. //! Its interface provides bridges to `num_bigint::BigInt`. /// Maximal number of bytes stored in our copiable `BigInt`s. const BYTES: usize = 1024; #[derive(Debug, Copy, Clone)] pub(super) struct BigInt { sign: num_bigint::Sign, data: [u8; BYTES], } impl BigInt { /// Construct a [`BigInt`] from a [`num_bigint::BigInt`]. This /// operation panics when the provided [`num_bigint::BigInt`] /// has more than [`BYTES`] bytes. pub(super) fn new(i: &num_bigint::BigInt) -> Self { let (sign, bytes) = i.to_bytes_be(); if bytes.len() > BYTES { panic!("`copiable_bigint::BigInt::new`: too big, please consider increasing `BYTES`"); } let mut data = [0; BYTES]; data[BYTES - bytes.len()..].copy_from_slice(&bytes[..]); BigInt { sign, data } } /// Constructs a [`num_bigint::BigInt`] out of a [`BigInt`]. pub(super) fn get(self) -> num_bigint::BigInt { num_bigint::BigInt::from_bytes_be(self.sign, &self.data) } } impl core::cmp::PartialEq for BigInt { fn eq(&self, other: &Self) -> bool { self.get() == other.get() } } impl core::cmp::Eq for BigInt {} impl core::cmp::Ord for BigInt { fn cmp(&self, other: &Self) -> core::cmp::Ordering { self.get().cmp(&other.get()) } } impl core::cmp::PartialOrd for BigInt { fn partial_cmp(&self, other: &Self) -> Option { self.get().partial_cmp(&other.get()) } } ================================================ FILE: hax-lib/src/int/mod.rs ================================================ use core::fmt; use core::ops::*; use num_traits::cast::ToPrimitive; mod bigint; use bigint::*; use super::abstraction::*; #[cfg(feature = "macros")] pub use hax_lib_macros::int; /// Mathematical integers for writting specifications. Mathematical /// integers are unbounded and arithmetic operation on them never over /// or underflow. #[derive(Copy, Clone, Eq, PartialEq, Ord, PartialOrd, Debug)] pub struct Int(BigInt); impl fmt::Display for Int { fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { write!(f, "{}", self.get()) } } impl Int { fn new(x: impl Into) -> Self { Int(BigInt::new(&x.into())) } fn get(self) -> num_bigint::BigInt { self.0.get() } } impl Add for Int { type Output = Self; fn add(self, other: Self) -> Self::Output { Self::new(self.get() + other.get()) } } impl Neg for Int { type Output = Self; fn neg(self) -> Self::Output { Self::new(-self.get()) } } impl Sub for Int { type Output = Self; fn sub(self, other: Self) -> Self::Output { Self::new(self.get() - other.get()) } } impl Mul for Int { type Output = Self; fn mul(self, other: Self) -> Self::Output { Self::new(self.get() * other.get()) } } impl Div for Int { type Output = Self; fn div(self, other: Self) -> Self::Output { Self::new(self.get() / other.get()) } } impl Int { /// Raises `2` at the power `self` pub fn pow2(self) -> Self { let exponent = self.get().to_u32().expect("Exponent doesn't fit in a u32"); Self::new(num_bigint::BigInt::from(2u8).pow(exponent)) } /// Constructs a `Int` out of a string literal. This function /// assumes its argument consists only of decimal digits, with /// optionally a minus sign prefix. pub fn _unsafe_from_str(s: &str) -> Self { use core::str::FromStr; Self::new(num_bigint::BigInt::from_str(s).unwrap()) } pub fn rem_euclid(&self, v: Self) -> Self { use num_traits::Euclid; Self::new(self.get().rem_euclid(&v.get())) } } #[cfg(feature = "macros")] pub trait ToInt { fn to_int(self) -> Int; } /// Instead of defining one overloaded instance, which relies /// explicitely on `num_bigint`: /// /// ```ignore /// impl> Abstraction for T { /// type AbstractType = Int; /// fn lift(self) -> Self::AbstractType { /// Int::new(self.into()) /// } /// } /// ``` /// /// We define an instance per machine type: we don't want the /// interface of this module to rely specifically on /// `num_bigint`. This module should be a very thin layer. macro_rules! implement_abstraction { ($ty:ident) => { impl Abstraction for $ty { type AbstractType = Int; fn lift(self) -> Self::AbstractType { Int::new(num_bigint::BigInt::from(self)) } } impl ToInt for $ty { fn to_int(self) -> Int { self.lift() } } }; ($($ty:ident)*) => { $(implement_abstraction!($ty);)* }; } implement_abstraction!(u8 u16 u32 u64 u128 usize); implement_abstraction!(i8 i16 i32 i64 i128 isize); macro_rules! implement_concretize { ($ty:ident $method:ident) => { impl Concretization<$ty> for Int { fn concretize(self) -> $ty { let concretized = self.get().$method(); debug_assert!(concretized.is_some()); concretized.unwrap().into() } } impl Int { pub fn $method(self) -> $ty { self.concretize() } } }; ($ty:ident $method:ident, $($tt:tt)*) => { implement_concretize!($ty $method); implement_concretize!($($tt)*); }; () => {}; } implement_concretize!( u8 to_u8, u16 to_u16, u32 to_u32, u64 to_u64, u128 to_u128, usize to_usize, i8 to_i8, i16 to_i16, i32 to_i32, i64 to_i64, i128 to_i128, isize to_isize, ); ================================================ FILE: hax-lib/src/lib.rs ================================================ //! Hax-specific helpers for Rust programs. Those helpers are usually //! no-ops when compiled normally but meaningful when compiled under //! hax. //! //! # Example: //! //! ```rust //! use hax_lib::*; //! fn sum(x: Vec, y: Vec) -> Vec { //! hax_lib::assume!(x.len() == y.len()); //! hax_lib::assert!(x.len() >= 0); //! hax_lib::assert_prop!(forall(|i: usize| implies(i < x.len(), x[i] < 4242))); //! hax_lib::debug_assert!(exists(|i: usize| implies(i < x.len(), x[i] > 123))); //! x.into_iter().zip(y.into_iter()).map(|(x, y)| x + y).collect() //! } //! ``` #![no_std] #[cfg(feature = "macros")] mod proc_macros; // hax engine relies on `hax-lib` names: to avoid cluttering names with // an additional `implementation` in all paths, we `include!` instead // of doing conditional `mod` and `pub use`. #[cfg(not(hax))] core::include!("dummy.rs"); #[cfg(hax)] core::include!("implementation.rs"); ================================================ FILE: hax-lib/src/proc_macros.rs ================================================ //! This module re-exports macros from `hax-lib-macros` since a //! proc-macro crate cannot export anything but procedural macros. pub use hax_lib_macros::{ attributes, decreases, ensures, exclude, impl_fn_decoration, include, lemma, loop_decreases, loop_invariant, opaque, opaque_type, refinement_type, requires, trait_fn_decoration, transparent, }; pub use hax_lib_macros::{ process_init, process_read, process_write, protocol_messages, pv_constructor, pv_handwritten, }; include!(concat!(env!("OUT_DIR"), "/proc_macros_generated.rs")); ================================================ FILE: hax-lib/src/prop.rs ================================================ use crate::abstraction::*; use core::ops::*; /// Represent a logical proposition, that may be not computable. #[derive(Clone, Copy, Debug)] pub struct Prop(bool); /// This module provides monomorphic constructors for `Prop`. /// Hax rewrite more elaborated versions (see `forall` or `AndBit` below) to those monomorphic constructors. pub mod constructors { use super::Prop; pub const fn from_bool(b: bool) -> Prop { Prop(b) } pub fn and(lhs: Prop, other: Prop) -> Prop { Prop(lhs.0 && other.0) } pub fn or(lhs: Prop, other: Prop) -> Prop { Prop(lhs.0 || other.0) } pub fn not(lhs: Prop) -> Prop { Prop(!lhs.0) } /// Logical equality between two value of *any* type pub fn eq(_lhs: T, _rhs: T) -> Prop { Prop(true) } pub fn ne(_lhs: T, _rhs: T) -> Prop { Prop(true) } pub fn implies(lhs: Prop, other: Prop) -> Prop { Prop(lhs.0 || !other.0) } pub fn forall Prop>(_pred: F) -> Prop { Prop(true) } pub fn exists Prop>(_pred: F) -> Prop { Prop(true) } } impl Prop { /// Lifts a boolean to a logical proposition. pub const fn from_bool(b: bool) -> Self { constructors::from_bool(b) } /// Conjuction of two propositions. pub fn and(self, other: impl Into) -> Self { constructors::and(self, other.into()) } /// Disjunction of two propositions. pub fn or(self, other: impl Into) -> Self { constructors::or(self, other.into()) } /// Negation of a proposition. pub fn not(self) -> Self { constructors::not(self) } /// Equality between two propositions. pub fn eq(self, other: impl Into) -> Self { constructors::eq(self, other.into()) } /// Equality between two propositions. pub fn ne(self, other: impl Into) -> Self { constructors::ne(self, other.into()) } /// Logical implication. pub fn implies(self, other: impl Into) -> Self { constructors::implies(self, other.into()) } } impl Abstraction for bool { type AbstractType = Prop; fn lift(self) -> Self::AbstractType { Prop(self) } } pub trait ToProp { fn to_prop(self) -> Prop; } impl ToProp for bool { fn to_prop(self) -> Prop { self.lift() } } impl From for Prop { fn from(value: bool) -> Self { Prop(value) } } impl> BitAnd for Prop { type Output = Prop; fn bitand(self, rhs: T) -> Self::Output { Prop(self.0 & rhs.into().0) } } impl> BitOr for Prop { type Output = Prop; fn bitor(self, rhs: T) -> Self::Output { Prop(self.0 | rhs.into().0) } } impl Not for Prop { type Output = Prop; fn not(self) -> Self::Output { Prop(!self.0) } } /// The universal quantifier. This should be used only for Hax code: in /// Rust, this is always true. /// /// # Example: /// /// The Rust expression `forall(|x: T| phi(x))` corresponds to `∀ (x: T), phi(x)`. pub fn forall>(f: impl Fn(T) -> U) -> Prop { constructors::forall(|x| f(x).into()) } /// The existential quantifier. This should be used only for Hax code: in /// Rust, this is always true. /// /// # Example: /// /// The Rust expression `exists(|x: T| phi(x))` corresponds to `∃ (x: T), phi(x)`. pub fn exists>(f: impl Fn(T) -> U) -> Prop { constructors::exists(|x| f(x).into()) } /// The logical implication `a ==> b`. pub fn implies(lhs: impl Into, rhs: impl Into) -> Prop { constructors::implies(lhs.into(), rhs.into()) } pub use constructors::eq; ================================================ FILE: hax-lib-protocol/Cargo.toml ================================================ [package] name = "hax-lib-protocol" version.workspace = true authors.workspace = true license.workspace = true homepage.workspace = true edition.workspace = true repository.workspace = true readme.workspace = true [dependencies] libcrux = "0.0.2-pre.2" [package.metadata.release] release = false ================================================ FILE: hax-lib-protocol/README.md ================================================ # Hax Protocol Library This crate provides tools for protocol developers to write protcol specifications for hax. ## Protocol Traits To hax, a protocol is a collection of communicating state machines. This module provides traits that describe parts of a state machine's behaviour, specifically it provides traits for creating an initial state, and for state transition behaviour when reading or writing a message. ## Cryptographic Abstractions Beside message passing and state transitions, a protocol of course includes operations on the sent and received messages. For cryptographic protocols, these will be of a fairly restricted set of cryptoraphic primitive operations, which are provided in these cryptographic abstractions. This allows protocol authors to specify protocol party internal operations in a way that is easily accessible to hax. ================================================ FILE: hax-lib-protocol/src/crypto.rs ================================================ //! This module defines a cryptographic abstraction layer for use in //! hax protocol specifications. use crate::ProtocolError; /// An abstract Diffie-Hellman scalar. #[derive(Clone)] pub struct DHScalar(Vec); impl DHScalar { /// Wrap bytes into a Diffie-Hellman scalar. Does *not* perform /// input validation. pub fn from_bytes(bytes: &[u8]) -> Self { DHScalar(bytes.to_vec()) } } /// An abstract Diffie-Hellman group element. pub struct DHElement(Vec); impl DHElement { /// Wrap bytes into a Diffie-Hellman group element. Does *not* perform /// input validation. pub fn from_bytes(bytes: &[u8]) -> Self { DHElement(bytes.to_vec()) } } /// Choice of Diffie-Hellman groups. pub enum DHGroup { X25519, X448, P256, P384, P521, } impl From for libcrux::ecdh::Algorithm { /// Converter to `libcrux` type. fn from(value: DHGroup) -> Self { match value { DHGroup::X25519 => libcrux::ecdh::Algorithm::X25519, DHGroup::X448 => libcrux::ecdh::Algorithm::X448, DHGroup::P256 => libcrux::ecdh::Algorithm::P256, DHGroup::P384 => libcrux::ecdh::Algorithm::P384, DHGroup::P521 => libcrux::ecdh::Algorithm::P521, } } } /// Scalar multiplication of `scalar` and `element`. pub fn dh_scalar_multiply(group: DHGroup, scalar: DHScalar, element: DHElement) -> Vec { libcrux::ecdh::derive(group.into(), element.0, scalar.0).unwrap() } /// Scalar multiplication of a fixed generator and `scalar`. pub fn dh_scalar_multiply_base(group: DHGroup, scalar: DHScalar) -> Vec { libcrux::ecdh::secret_to_public(group.into(), scalar.0).unwrap() } /// An abstract AEAD key. pub struct AEADKey(libcrux::aead::Key); /// Choice of AEAD algorithms. pub enum AEADAlgorithm { Aes128Gcm, Aes256Gcm, Chacha20Poly1305, } impl From for libcrux::aead::Algorithm { /// Converter to `libcrux` type. fn from(value: AEADAlgorithm) -> Self { match value { AEADAlgorithm::Aes128Gcm => libcrux::aead::Algorithm::Aes128Gcm, AEADAlgorithm::Aes256Gcm => libcrux::aead::Algorithm::Aes256Gcm, AEADAlgorithm::Chacha20Poly1305 => libcrux::aead::Algorithm::Chacha20Poly1305, } } } impl AEADKey { /// Attempt deserialization of `bytes` into an AEAD key for /// `algorithm`. Panics on failure. pub fn from_bytes(algorithm: AEADAlgorithm, bytes: &[u8]) -> Self { AEADKey(libcrux::aead::Key::from_bytes(algorithm.into(), bytes.to_vec()).unwrap()) } } /// An abstract AEAD initialization vector. pub struct AEADIV(libcrux::aead::Iv); impl AEADIV { /// Attempt construction of an AEAD IV from `bytes`. Panics if /// number of `bytes` is insufficient. pub fn from_bytes(bytes: &[u8]) -> Self { AEADIV(libcrux::aead::Iv::new(bytes).unwrap()) } } /// An abstract AEAD authentication tag. pub struct AEADTag(libcrux::aead::Tag); impl AEADTag { /// Attempt deserialization of an AEAD tag from `bytes`. Panics if /// number of `bytes` is insufficient. pub fn from_bytes(bytes: &[u8]) -> Self { let bytes: [u8; 16] = bytes.try_into().unwrap(); AEADTag(libcrux::aead::Tag::from(bytes)) } } /// Abstract AEAD encryption using `algorithm`. Returns a pair of byte /// vectors `(ciphertext, tag)`. pub fn aead_encrypt(key: AEADKey, iv: AEADIV, aad: &[u8], plain: &[u8]) -> (Vec, Vec) { let (tag, cip) = libcrux::aead::encrypt_detached(&key.0, plain, iv.0, aad).unwrap(); (cip, tag.as_ref().to_vec()) } /// Abstract AEAD decryption using `algorithm`. On success returns the /// decrypted plaintext, otherwise a `CryptoError`. pub fn aead_decrypt( key: AEADKey, iv: AEADIV, aad: &[u8], cip: &[u8], tag: AEADTag, ) -> Result, ProtocolError> { libcrux::aead::decrypt_detached(&key.0, cip, iv.0, aad, &tag.0) .map_err(|_| ProtocolError::CryptoError) } /// Choice of hashing algorithms. pub enum HashAlgorithm { Sha1, Sha224, Sha256, Sha384, Sha512, Blake2s, Blake2b, Sha3_224, Sha3_256, Sha3_384, Sha3_512, } impl From for libcrux::digest::Algorithm { /// Converter to `libcrux` type. fn from(value: HashAlgorithm) -> Self { match value { HashAlgorithm::Sha1 => libcrux::digest::Algorithm::Sha1, HashAlgorithm::Sha224 => libcrux::digest::Algorithm::Sha224, HashAlgorithm::Sha256 => libcrux::digest::Algorithm::Sha256, HashAlgorithm::Sha384 => libcrux::digest::Algorithm::Sha384, HashAlgorithm::Sha512 => libcrux::digest::Algorithm::Sha512, HashAlgorithm::Blake2s => libcrux::digest::Algorithm::Blake2s, HashAlgorithm::Blake2b => libcrux::digest::Algorithm::Blake2b, HashAlgorithm::Sha3_224 => libcrux::digest::Algorithm::Sha3_224, HashAlgorithm::Sha3_256 => libcrux::digest::Algorithm::Sha3_256, HashAlgorithm::Sha3_384 => libcrux::digest::Algorithm::Sha3_384, HashAlgorithm::Sha3_512 => libcrux::digest::Algorithm::Sha3_512, } } } /// Abstract hashing using `algorithm`. pub fn hash(algorithm: HashAlgorithm, input: &[u8]) -> Vec { libcrux::digest::hash(algorithm.into(), input) } /// Choice of algorithms for instantiation of HMAC. pub enum HMACAlgorithm { Sha1, Sha256, Sha384, Sha512, } impl From for libcrux::hmac::Algorithm { /// Converter to `libcrux` type. fn from(value: HMACAlgorithm) -> Self { match value { HMACAlgorithm::Sha1 => libcrux::hmac::Algorithm::Sha1, HMACAlgorithm::Sha256 => libcrux::hmac::Algorithm::Sha256, HMACAlgorithm::Sha384 => libcrux::hmac::Algorithm::Sha384, HMACAlgorithm::Sha512 => libcrux::hmac::Algorithm::Sha512, } } } /// Abstract HMAC using `algorithm` as the hash function. pub fn hmac(algorithm: HMACAlgorithm, key: &[u8], input: &[u8]) -> Vec { libcrux::hmac::hmac(algorithm.into(), key, input, None) } ================================================ FILE: hax-lib-protocol/src/lib.rs ================================================ //! This crate provides tools for protocol authors to write protocol //! specifications for hax. //! //! It contains a collection traits describing state machine behaviour, as //! well as a library of abstract primitive cryptographic operations for //! use in protocol specifications. pub mod crypto; pub mod state_machine; /// A protocol error type. #[derive(Debug)] pub enum ProtocolError { /// An error in the crypto abstraction layer CryptoError, /// On receiving an unexpected message, i.e. one that does not allow a state /// transition from the current state. InvalidMessage, /// On receiving invalid initialization data. InvalidPrologue, } pub type ProtocolResult = Result; ================================================ FILE: hax-lib-protocol/src/state_machine.rs ================================================ //! This module provides types and traits for implementing a protocol state //! machine. //! //! A protocol party is conceived of as having a set of possible states, one of //! which is the initial state. Transitioning to a different state is possible //! either through receiving and processing a message or through writing a //! message. use crate::ProtocolResult; /// A trait for protocol initial states. pub trait InitialState { /// Initializes the state given initialization data in `prologue`. /// /// Errors on invalid initialization data. fn init(prologue: Option>) -> ProtocolResult where Self: Sized; } /// A state where a message must be written before transitioning to the next state. /// /// `WriteState` can only be implemented once by every state type, implying that /// in any protocol party state, if a message is to be written, that message and /// the state the party is in after writing the message are uniquely determined. pub trait WriteState { /// The uniquely determined state that is transitioned to after writing the message. type NextState; /// The type of the message that is being written. type Message; /// Produce the message to be written when transitioning to the next state. fn write(self) -> ProtocolResult<(Self::NextState, Self::Message)>; } /// A state where a message must be read before transitioning to the next state. /// /// A state type may implement `ReadState` multiple times, for different /// instances of `NextState`, allowing the following state to depend on the /// message that was received. pub trait ReadState { /// The type of message to be read. type Message; /// Generate the next state based on the current state and the received /// message. fn read(self, msg: Self::Message) -> ProtocolResult; } ================================================ FILE: hax-lib-protocol-macros/Cargo.toml ================================================ [package] name = "hax-lib-protocol-macros" version.workspace = true authors.workspace = true license.workspace = true homepage.workspace = true edition.workspace = true repository.workspace = true readme.workspace = true [lib] proc-macro = true [dependencies] proc-macro-error2 = { version = "2.0" } proc-macro2.workspace = true quote.workspace = true syn = { version = "2.0", features = [ "full", "visit-mut", "extra-traits", "parsing", ] } [package.metadata.release] release = false ================================================ FILE: hax-lib-protocol-macros/src/lib.rs ================================================ use quote::quote; use syn::{parse, parse_macro_input}; /// This macro takes an `fn` as the basis of an `InitialState` implementation /// for the state type that is returned by the `fn` (on success). /// /// The `fn` is expected to build the state type specified as a `Path` attribute /// argument from a `Vec`, i.e. the signature should be compatible with /// `TryFrom>` for the state type given as argument to the macro. /// /// Example: /// ```ignore /// pub struct A0 { /// data: u8, /// } /// /// #[hax_lib_protocol_macros::init(A0)] /// fn init_a(prologue: Vec) -> ::hax_lib_protocol::ProtocolResult { /// if prologue.len() < 1 { /// return Err(::hax_lib_protocol::ProtocolError::InvalidPrologue); /// } /// Ok(A0 { data: prologue[0] }) /// } /// /// // The following is generated by the macro: /// #[hax_lib::exclude] /// impl TryFrom> for A0 { /// type Error = ::hax_lib_protocol::ProtocolError; /// fn try_from(value: Vec) -> Result { /// init_a(value) /// } /// } /// #[hax_lib::exclude] /// impl InitialState for A0 { /// fn init(prologue: Option>) -> ::hax_lib_protocol::ProtocolResult { /// if let Some(prologue) = prologue { /// prologue.try_into() /// } else { /// Err(::hax_lib_protocol::ProtocolError::InvalidPrologue) /// } /// } /// } /// ``` #[proc_macro_attribute] pub fn init( attr: proc_macro::TokenStream, item: proc_macro::TokenStream, ) -> proc_macro::TokenStream { let mut output = quote!(#[hax_lib::process_init]); output.extend(proc_macro2::TokenStream::from(item.clone())); let input: syn::ItemFn = parse_macro_input!(item); let return_type: syn::Path = parse_macro_input!(attr); let name = input.sig.ident; let expanded = quote!( #[hax_lib::exclude] impl TryFrom> for #return_type { type Error = ::hax_lib_protocol::ProtocolError; fn try_from(value: Vec) -> Result { #name(value) } } #[hax_lib::exclude] impl InitialState for #return_type { fn init(prologue: Option>) -> ::hax_lib_protocol::ProtocolResult { if let Some(prologue) = prologue { prologue.try_into() } else { Err(::hax_lib_protocol::ProtocolError::InvalidPrologue) } } } ); output.extend(expanded); output.into() } /// This macro takes an `fn` as the basis of an `InitialState` implementation /// for the state type that is returned by the `fn` (on success). /// /// The `fn` is expected to build the state type specified as a `Path` attribute /// argument without additional input. /// Example: /// ```ignore /// pub struct B0 {} /// /// #[hax_lib_protocol_macros::init_empty(B0)] /// fn init_b() -> ::hax_lib_protocol::ProtocolResult { /// Ok(B0 {}) /// } /// /// // The following is generated by the macro: /// #[hax_lib::exclude] /// impl InitialState for B0 { /// fn init(prologue: Option>) -> ::hax_lib_protocol::ProtocolResult { /// if let Some(_) = prologue { /// Err(::hax_lib_protocol::ProtocolError::InvalidPrologue) /// } else { /// init_b() /// } /// } /// } /// ``` #[proc_macro_error2::proc_macro_error] #[proc_macro_attribute] pub fn init_empty( attr: proc_macro::TokenStream, item: proc_macro::TokenStream, ) -> proc_macro::TokenStream { let mut output = quote!(#[hax_lib::process_init]); output.extend(proc_macro2::TokenStream::from(item.clone())); let input: syn::ItemFn = parse_macro_input!(item); let return_type: syn::Path = parse_macro_input!(attr); let name = input.sig.ident; let expanded = quote!( #[hax_lib::exclude] impl InitialState for #return_type { fn init(prologue: Option>) -> ::hax_lib_protocol::ProtocolResult { if let Some(_) = prologue { Err(::hax_lib_protocol::ProtocolError::InvalidPrologue) } else { #name() } } } ); output.extend(expanded); return output.into(); } /// A structure to parse transition tuples from `read` and `write` macros. struct Transition { /// `Path` to the current state type of the transition. pub current_state: syn::Path, /// `Path` to the destination state type of the transition. pub next_state: syn::Path, /// `Path` to the message type this transition is based on. pub message_type: syn::Path, } impl syn::parse::Parse for Transition { fn parse(input: parse::ParseStream) -> syn::Result { use syn::spanned::Spanned; let punctuated = syn::punctuated::Punctuated::::parse_terminated(input)?; if punctuated.len() != 3 { Err(syn::Error::new( punctuated.span(), "Insufficient number of arguments", )) } else { let mut args = punctuated.into_iter(); Ok(Self { current_state: args.next().unwrap(), next_state: args.next().unwrap(), message_type: args.next().unwrap(), }) } } } /// Macro deriving a `WriteState` implementation for the origin state type, /// generating a message of `message_type` and a new state, as indicated by the /// transition tuple. /// /// Example: /// ```ignore /// #[hax_lib_protocol_macros::write(A0, A1, Message)] /// fn write_ping(state: A0) -> ::hax_lib_protocol::ProtocolResult<(A1, Message)> { /// Ok((A1 {}, Message::Ping(state.data))) /// } /// /// // The following is generated by the macro: /// #[hax_lib::exclude] /// impl TryFrom for (A1, Message) { /// type Error = ::hax_lib_protocol::ProtocolError; /// /// fn try_from(value: A0) -> Result { /// write_ping(value) /// } /// } /// /// #[hax_lib::exclude] /// impl WriteState for A0 { /// type NextState = A1; /// type Message = Message; /// /// fn write(self) -> ::hax_lib_protocol::ProtocolResult<(Self::NextState, Message)> { /// self.try_into() /// } /// } /// ``` #[proc_macro_attribute] pub fn write( attr: proc_macro::TokenStream, item: proc_macro::TokenStream, ) -> proc_macro::TokenStream { let mut output = quote!(#[hax_lib::process_write]); output.extend(proc_macro2::TokenStream::from(item.clone())); let input: syn::ItemFn = parse_macro_input!(item); let Transition { current_state, next_state, message_type, } = parse_macro_input!(attr); let name = input.sig.ident; let expanded = quote!( #[hax_lib::exclude] impl TryFrom<#current_state> for (#next_state, #message_type) { type Error = ::hax_lib_protocol::ProtocolError; fn try_from(value: #current_state) -> Result { #name(value) } } #[hax_lib::exclude] impl WriteState for #current_state { type NextState = #next_state; type Message = #message_type; fn write(self) -> ::hax_lib_protocol::ProtocolResult<(Self::NextState, Self::Message)> { self.try_into() } } ); output.extend(expanded); output.into() } /// Macro deriving a `ReadState` implementation for the destination state type, /// consuming a message of `message_type` and the current state, as indicated by /// the transition tuple. /// /// Example: /// ```ignore /// #[hax_lib_protocol_macros::read(A1, A2, Message)] /// fn read_pong(_state: A1, msg: Message) -> ::hax_lib_protocol::ProtocolResult { /// match msg { /// Message::Ping(_) => Err(::hax_lib_protocol::ProtocolError::InvalidMessage), /// Message::Pong(received) => Ok(A2 { received }), /// } /// } /// // The following is generated by the macro: /// #[hax_lib::exclude] /// impl TryFrom<(A1, Message)> for A2 { /// type Error = ::hax_lib_protocol::ProtocolError; /// fn try_from((state, msg): (A1, Message)) -> Result { /// read_pong(state, msg) /// } /// } /// #[hax_lib::exclude] /// impl ReadState for A1 { /// type Message = Message; /// fn read(self, msg: Message) -> ::hax_lib_protocol::ProtocolResult { /// A2::try_from((self, msg)) /// } /// } /// ``` #[proc_macro_attribute] pub fn read( attr: proc_macro::TokenStream, item: proc_macro::TokenStream, ) -> proc_macro::TokenStream { let mut output = quote!(#[hax_lib::process_read]); output.extend(proc_macro2::TokenStream::from(item.clone())); let input: syn::ItemFn = parse_macro_input!(item); let Transition { current_state, next_state, message_type, } = parse_macro_input!(attr); let name = input.sig.ident; let expanded = quote!( #[hax_lib::exclude] impl TryFrom<(#current_state, #message_type)> for #next_state { type Error = ::hax_lib_protocol::ProtocolError; fn try_from((state, msg): (#current_state, #message_type)) -> Result { #name(state, msg) } } #[hax_lib::exclude] impl ReadState<#next_state> for #current_state { type Message = #message_type; fn read(self, msg: Self::Message) -> ::hax_lib_protocol::ProtocolResult<#next_state> { #next_state::try_from((self, msg)) } } ); output.extend(expanded); output.into() } ================================================ FILE: hax-types/Cargo.toml ================================================ [package] name = "hax-types" version.workspace = true authors.workspace = true license.workspace = true homepage.workspace = true edition.workspace = true repository.workspace = true readme.workspace = true description = "Helper crate defining the types used to communicate between the custom rustc driver, the CLI and the engine of hax." [dependencies] clap = { workspace = true, features = ["env"] } hax-frontend-exporter.workspace = true hax-frontend-exporter-options.workspace = true itertools.workspace = true path-clean = "1.0.1" schemars.workspace = true serde.workspace = true colored.workspace = true serde_json.workspace = true annotate-snippets.workspace = true hax-adt-into.workspace = true tracing.workspace = true serde-brief ={ version = "0.1", features = ["std", "alloc"]} zstd = "0.13.1" miette = "7.2.0" [features] rustc = ["hax-frontend-exporter/rustc"] ================================================ FILE: hax-types/README.md ================================================ # `hax-types` This crate contains the type definitions that are used to communicate between: - the command line (the `cargo-hax` binary); - the custom rustc driver; - the hax engine (the `hax-engine` binary). Those three component send and receive messages in JSON or CBOR on stdin and stdout. ================================================ FILE: hax-types/build.rs ================================================ macro_rules! set_empty_env_var_with { ($var:literal, $f: expr) => {{ println!("cargo:rurun-if-env-changed={}", $var); match option_env!($var) { Some(value) => value.to_string(), None => { let value = $f; println!("cargo:rustc-env={}={}", $var, value); value } } }}; } const UNKNOWN: &str = "unknown"; fn git_command(args: &[&str]) -> String { std::process::Command::new("git") .args(args) .output() .map(|output| String::from_utf8(output.stdout).unwrap().trim().to_string()) .ok() .filter(|s| !s.is_empty()) .unwrap_or(UNKNOWN.to_string()) } fn main() { let commit_hash = set_empty_env_var_with!("HAX_GIT_COMMIT_HASH", git_command(&["rev-parse", "HEAD"])); set_empty_env_var_with!("HAX_VERSION", { if commit_hash == UNKNOWN { env!("CARGO_PKG_VERSION").into() } else { git_command(&["tag", "--contains", &commit_hash]) .lines() .next() .and_then(|tag| tag.split_once("hax-v")) .map(|(_, version)| version.trim().to_string()) .unwrap_or_else(|| format!("untagged-git-rev-{}", &commit_hash[0..10])) } }); } ================================================ FILE: hax-types/src/cli_options/extension.rs ================================================ /// This module defines a way to extend externally the CLI of hax, via /// the `Extension` trait. This trait defines one associated type per /// extension point. use crate::prelude::*; use clap::{Parser, Subcommand}; macro_rules! trait_alias { ($name:ident = $($base:tt)+) => { pub trait $name: $($base)+ { } impl $name for T { } }; } trait_alias!( ExtensionPoint = std::fmt::Debug + for<'a> serde::Deserialize<'a> + serde::Serialize + JsonSchema + Clone ); trait_alias!(SubcommandExtensionPoint = ExtensionPoint + clap::Subcommand); trait_alias!(ArgsExtensionPoint = ExtensionPoint + clap::Args); #[derive_group(Serializers)] #[derive(JsonSchema, Parser, Debug, Clone)] pub struct EmptyArgsExtension {} #[derive_group(Serializers)] #[derive(JsonSchema, Subcommand, Debug, Clone)] pub enum EmptySubcommandExtension {} pub trait Extension: 'static { type Options: ArgsExtensionPoint; type Command: SubcommandExtensionPoint; type BackendOptions: ArgsExtensionPoint; type FStarOptions: ArgsExtensionPoint; } impl Extension for () { type Options = EmptyArgsExtension; type Command = EmptySubcommandExtension; type BackendOptions = EmptyArgsExtension; type FStarOptions = EmptyArgsExtension; } ================================================ FILE: hax-types/src/cli_options/mod.rs ================================================ use crate::prelude::*; use clap::{Parser, Subcommand, ValueEnum}; use std::fmt; pub use hax_frontend_exporter_options::*; pub mod extension; use extension::Extension; #[derive_group(Serializers)] #[derive(JsonSchema, Debug, Clone)] pub enum DebugEngineMode { File(PathOrDash), Interactive, } impl std::convert::From<&str> for DebugEngineMode { fn from(s: &str) -> Self { match s { "i" | "interactively" => DebugEngineMode::Interactive, s => DebugEngineMode::File(s.strip_prefix("file:").unwrap_or(s).into()), } } } #[derive_group(Serializers)] #[derive(JsonSchema, Debug, Clone, Default)] pub struct ForceCargoBuild { pub data: u64, } impl std::convert::From<&str> for ForceCargoBuild { fn from(s: &str) -> Self { use std::time::{SystemTime, UNIX_EPOCH}; if s == "false" { let data = SystemTime::now() .duration_since(UNIX_EPOCH) .map(|r| r.as_millis()) .unwrap_or(0); ForceCargoBuild { data: data as u64 } } else { ForceCargoBuild::default() } } } #[derive_group(Serializers)] #[derive(Debug, Clone, JsonSchema)] pub enum PathOrDash { Dash, Path(PathBuf), } impl std::convert::From<&str> for PathOrDash { fn from(s: &str) -> Self { match s { "-" => PathOrDash::Dash, _ => PathOrDash::Path(PathBuf::from(s)), } } } impl PathOrDash { pub fn open_or_stdout(&self) -> Box { use std::io::BufWriter; match self { PathOrDash::Dash => Box::new(BufWriter::new(std::io::stdout())), PathOrDash::Path(path) => { Box::new(BufWriter::new(std::fs::File::create(&path).unwrap())) } } } pub fn map_path PathBuf>(&self, f: F) -> Self { match self { PathOrDash::Path(path) => PathOrDash::Path(f(path)), PathOrDash::Dash => PathOrDash::Dash, } } } fn absolute_path(path: impl AsRef) -> std::io::Result { use path_clean::PathClean; let path = path.as_ref(); let absolute_path = if path.is_absolute() { path.to_path_buf() } else { std::env::current_dir()?.join(path) } .clean(); Ok(absolute_path) } pub trait NormalizePaths { fn normalize_paths(&mut self); } impl NormalizePaths for PathBuf { fn normalize_paths(&mut self) { *self = absolute_path(&self).unwrap(); } } impl NormalizePaths for PathOrDash { fn normalize_paths(&mut self) { match self { PathOrDash::Path(p) => p.normalize_paths(), PathOrDash::Dash => (), } } } #[derive_group(Serializers)] #[derive(JsonSchema, Parser, Debug, Clone)] pub struct ProVerifOptions { /// Items for which hax should extract a default-valued process /// macro with a corresponding type signature. This flag expects a /// space-separated list of inclusion clauses. An inclusion clause /// is a Rust path prefixed with `+`, `+!` or `-`. `-` means /// implementation only, `+!` means interface only and `+` means /// implementation and interface. Rust path chunks can be either a /// concrete string, or a glob (just like bash globs, but with /// Rust paths). #[arg( long, value_parser = parse_inclusion_clause, value_delimiter = ' ', allow_hyphen_values(true) )] pub assume_items: Vec, } #[derive_group(Serializers)] #[derive(JsonSchema, Parser, Debug, Clone)] pub struct FStarOptions { /// Set the Z3 per-query resource limit #[arg(long, default_value = "15")] pub z3rlimit: u32, /// Number of unrolling of recursive functions to try #[arg(long, default_value = "0")] pub fuel: u32, /// Number of unrolling of inductive datatypes to try #[arg(long, default_value = "1")] pub ifuel: u32, /// Modules for which Hax should extract interfaces (`*.fsti` /// files) in supplement to implementations (`*.fst` files). By /// default we extract no interface, only implementations. If a /// item is signature only (see the `+:` prefix of the /// `--include_namespaces` flag of the `into` subcommand), then /// its namespace is extracted with an interface. This flag /// expects a space-separated list of inclusion clauses. An /// inclusion clause is a Rust path prefixed with `+`, `+!` or /// `-`. `-` means implementation only, `+!` means interface only /// and `+` means implementation and interface. Rust path chunks /// can be either a concrete string, or a glob (just like bash /// globs, but with Rust paths). #[arg( long, value_parser = parse_inclusion_clause, value_delimiter = ' ', allow_hyphen_values(true) )] pub interfaces: Vec, #[arg(long, default_value = "100", env = "HAX_FSTAR_LINE_WIDTH")] pub line_width: u16, #[group(flatten)] pub cli_extension: E::FStarOptions, } #[derive_group(Serializers)] #[derive(JsonSchema, Subcommand, Debug, Clone)] pub enum Backend { /// Use the F* backend Fstar(FStarOptions), /// Use the Lean backend (warning: experimental) Lean, /// Use the Coq backend Coq, /// Use the SSProve backend Ssprove, /// Use the EasyCrypt backend (warning: work in progress!) Easycrypt, /// Use the ProVerif backend (warning: work in progress!) ProVerif(ProVerifOptions), /// Use the Rust backend (warning: work in progress!) #[clap(hide = true)] Rust, /// Extract `DefId`s of the crate as a Rust module tree. /// This is a command that regenerates code for the rust engine. #[clap(hide = true)] GenerateRustEngineNames, /// A debugger for the Rust engine Debugger { #[arg(long, short)] interactive: bool, }, } impl fmt::Display for Backend<()> { fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { BackendName::from(self).fmt(f) } } #[derive_group(Serializers)] #[derive(JsonSchema, Debug, Clone)] pub enum DepsKind { Transitive, Shallow, None, } #[derive_group(Serializers)] #[derive(JsonSchema, Debug, Clone)] pub enum InclusionKind { /// `+query` include the items selected by `query` Included(DepsKind), SignatureOnly, Excluded, } #[derive_group(Serializers)] #[derive(JsonSchema, Debug, Clone)] pub struct InclusionClause { pub kind: InclusionKind, pub namespace: Namespace, } const PREFIX_INCLUDED_TRANSITIVE: &str = "+"; const PREFIX_INCLUDED_SHALLOW: &str = "+~"; const PREFIX_INCLUDED_NONE: &str = "+!"; const PREFIX_SIGNATURE_ONLY: &str = "+:"; const PREFIX_EXCLUDED: &str = "-"; impl ToString for InclusionClause { fn to_string(&self) -> String { let kind = match self.kind { InclusionKind::Included(DepsKind::Transitive) => PREFIX_INCLUDED_TRANSITIVE, InclusionKind::Included(DepsKind::Shallow) => PREFIX_INCLUDED_SHALLOW, InclusionKind::Included(DepsKind::None) => PREFIX_INCLUDED_NONE, InclusionKind::SignatureOnly => PREFIX_SIGNATURE_ONLY, InclusionKind::Excluded => PREFIX_EXCLUDED, }; format!("{kind}{}", self.namespace.to_string()) } } pub fn parse_inclusion_clause( s: &str, ) -> Result> { let s = s.trim(); if s.is_empty() { Err("Expected `-` or `+`, got an empty string")? } let (prefix, namespace) = { let f = |&c: &char| matches!(c, '+' | '-' | '~' | '!' | ':'); ( s.chars().take_while(f).into_iter().collect::(), s.chars().skip_while(f).into_iter().collect::(), ) }; let kind = match &prefix[..] { PREFIX_INCLUDED_TRANSITIVE => InclusionKind::Included(DepsKind::Transitive), PREFIX_INCLUDED_SHALLOW => InclusionKind::Included(DepsKind::Shallow), PREFIX_INCLUDED_NONE => InclusionKind::Included(DepsKind::None), PREFIX_SIGNATURE_ONLY => InclusionKind::SignatureOnly, PREFIX_EXCLUDED => InclusionKind::Excluded, prefix => Err(format!( "Expected `+`, `+~`, `+!`, `+:` or `-`, got an `{prefix}`" ))?, }; Ok(InclusionClause { kind, namespace: namespace.to_string().into(), }) } #[derive_group(Serializers)] #[derive(JsonSchema, Parser, Debug, Clone)] pub struct TranslationOptions { /// Controls which Rust item should be extracted or not. /// /// This is a space-separated list of patterns prefixed with a /// modifier, read from the left to the right. /// /// A pattern is a Rust path (say `mycrate::mymod::myfn`) where /// globs are allowed: `*` matches any name /// (e.g. `mycrate::mymod::myfn` is matched by /// `mycrate::*::myfn`), while `**` matches any subpath, empty /// included (e.g. `mycrate::mymod::myfn` is matched by /// `**::myfn`). /// By default, hax includes all items. Then, the patterns /// prefixed by modifiers are processed from left to right, /// excluding or including items. Each pattern selects a number of /// item. The modifiers are: /// {n}{n} - `+`: includes the selected items with their /// dependencies, transitively (e.g. if function `f` calls `g` /// which in turn calls `h`, then `+k::f` includes `f`, `g` and /// `h`) /// {n} - `+~`: includes the selected items with their direct /// dependencies only (following the previous example, `+~k::f` /// would select `f` and `g`, but not `h`) /// {n} - `+!`: includes the selected items, without their /// dependencies (`+!k::f` would only select `f`) /// {n} - `+:`: only includes the type of the selected items (no /// dependencies). This includes full struct and enums, but only /// the type signature of functions and trait impls (except when /// they contain associated types), dropping their bodies. #[arg( value_parser = parse_inclusion_clause, value_delimiter = ' ', )] #[arg(short, allow_hyphen_values(true))] pub include_namespaces: Vec, } #[derive_group(Serializers)] #[derive(JsonSchema, Parser, Debug, Clone)] pub struct BackendOptions { #[command(subcommand)] pub backend: Backend, /// Don't write anything on disk. Output everything as JSON to stdout /// instead. #[arg(long = "dry-run")] pub dry_run: bool, /// Verbose mode for the Hax engine. Set `-vv` for maximal verbosity. #[arg(short, long, action = clap::ArgAction::Count)] pub verbose: u8, /// Prints statistics about how many items have been translated /// successfully by the engine. #[arg(long)] pub stats: bool, /// Enables profiling for the engine: for each phase of the /// engine, time and memory usage are recorded and reported. #[arg(long)] pub profile: bool, /// Prune Rust items that are not under the provided top-level module name. /// This will effectively remove all items that don't match `*::::**`. /// This prunning occurs directly on the `haxmeta` file, in the frontend. /// This is independent from any engine options. #[arg(long)] #[clap(hide = true)] pub prune_haxmeta: Option, /// Enable engine debugging: dumps the AST at each phase. /// /// The value of `` can be either: /// {n}{n} - `interactive` (or `i`): enables debugging of the engine, /// and visualize interactively in a webapp how a crate was /// transformed by each phase, both in Rust-like syntax and /// browsing directly the internal AST. By default, the webapp is /// hosted on `http://localhost:8000`, the port can be override by /// setting the `HAX_DEBUGGER_PORT` environment variable. /// {n} - `` or `file:`: outputs the different AST as JSON /// to ``. `` can be either [-] or a path. #[arg(short, long = "debug-engine")] pub debug_engine: Option, /// Extract type aliases. This is disabled by default, since /// extracted terms depends on expanded types rather than on type /// aliases. Turning this option on is discouraged: Rust type /// synonyms can ommit generic bounds, which are ususally /// necessary in the hax backends, leading to typechecking /// errors. For more details see /// https://github.com/hacspec/hax/issues/708. #[arg(long)] pub extract_type_aliases: bool, #[command(flatten)] pub translation_options: TranslationOptions, /// Where to put the output files resulting from the translation. /// Defaults to "/proofs//extraction". #[arg(long)] pub output_dir: Option, #[group(flatten)] pub cli_extension: E::BackendOptions, } #[derive_group(Serializers)] #[derive(JsonSchema, Subcommand, Debug, Clone)] pub enum Command { /// Translate to a backend. The translated modules will be written /// under the directory `/proofs//extraction`, where /// `` is the translated cargo package name and `` /// the name of the backend. #[clap(name = "into")] Backend(BackendOptions), /// Export directly as a JSON file JSON { /// Path to the output JSON file, "-" denotes stdout. #[arg( short, long = "output-file", default_value = "hax_frontend_export.json" )] output_file: PathOrDash, /// Whether the bodies are exported as THIR, built MIR, const /// MIR, or a combination. Repeat this option to extract a /// combination (e.g. `-k thir -k mir-built`). Pass `--kind` /// alone with no value to disable body extraction. #[arg( value_enum, short, long = "kind", num_args = 0..=3, default_values_t = [ExportBodyKind::Thir] )] kind: Vec, /// By default, `cargo hax json` outputs a JSON where every /// piece of information is inlined. This however creates very /// large JSON files. This flag enables the use of unique IDs /// and outputs a map from IDs to actual objects. #[arg(long)] use_ids: bool, /// Whether to include extra informations about `DefId`s. #[arg(short = 'E', long = "include-extra", default_value = "false")] include_extra: bool, }, /// Serialize to a `haxmeta` file, the internal binary format used by hax to /// store the ASTs produced by the hax exporter. #[clap(hide = true)] Serialize { /// Whether the bodies are exported as THIR, built MIR, const /// MIR, or a combination. Repeat this option to extract a /// combination (e.g. `-k thir -k mir-built`). Pass `--kind` /// alone with no value to disable body extraction. #[arg( value_enum, short, long = "kind", num_args = 0..=3, default_values_t = [ExportBodyKind::Thir] )] kind: Vec, /// When extracting to a given backend, the exporter is called with different `cfg` options. /// This option allows to set the same flags as `cargo hax into` would pick. #[arg(short)] backend: Option, }, #[command(flatten)] CliExtension(E::Command), } impl Command { pub fn body_kinds(&self) -> Vec { match self { Command::JSON { kind, .. } => kind.clone(), Command::Serialize { kind, .. } => kind.clone(), Command::Backend { .. } | Command::CliExtension { .. } => vec![ExportBodyKind::Thir], } } pub fn backend_name(&self) -> Option { match self { Command::Backend(backend_options) => Some((&backend_options.backend).into()), Command::JSON { .. } => None, Command::Serialize { backend, .. } => backend.clone(), Command::CliExtension(_) => None, } } } #[derive_group(Serializers)] #[derive(JsonSchema, ValueEnum, Debug, Clone, PartialEq, Eq, PartialOrd, Ord)] pub enum ExportBodyKind { Thir, MirBuilt, } #[derive_group(Serializers)] #[derive(JsonSchema, Parser, Debug, Clone)] #[command( author, version = crate::HAX_VERSION, long_version = concat!("\nversion=", env!("HAX_VERSION"), "\n", "commit=", env!("HAX_GIT_COMMIT_HASH")), name = "hax", about, long_about = None )] pub struct ExtensibleOptions { /// Semi-colon terminated list of arguments to pass to the /// `cargo build` invocation. For example, to apply this /// program on a package `foo`, use `-C -p foo ;`. (make sure /// to escape `;` correctly in your shell) #[arg(default_values = Vec::<&str>::new(), short='C', allow_hyphen_values=true, num_args=1.., long="cargo-args", value_terminator=";")] pub cargo_flags: Vec, #[command(subcommand)] pub command: Command, /// `cargo` caching is enable by default, this flag disables it. #[arg(long="disable-cargo-cache", action=clap::builder::ArgAction::SetFalse)] pub force_cargo_build: ForceCargoBuild, /// Apply the command to every local package of the dependency closure. By /// default, the command is only applied to the primary packages (i.e. the /// package(s) of the current directory, or the ones selected with cargo /// options like `-C -p ;`). #[arg(long = "deps")] pub deps: bool, /// Provide a precomputed haxmeta file explicitly. /// Setting this option bypasses rustc and the exporter altogether. #[arg(long)] #[clap(hide = true)] pub haxmeta: Option, /// By default, hax uses `$CARGO_TARGET_DIR/hax` as target folder, /// to avoid recompilation when working both with `cargo hax` and /// `cargo build` (or, e.g. `rust-analyzer`). This option disables /// this behavior. #[arg(long)] pub no_custom_target_directory: bool, /// Diagnostic format. Sets `cargo`'s `--message-format` as well, /// if not present. #[arg(long, default_value = "human")] pub message_format: MessageFormat, /// Enables experimental FullDef format for items exported from the frontend /// in the haxmeta file. #[arg(long, env = "HAX_EXPERIMENTAL_FULL_DEF")] pub experimental_full_def: bool, #[group(flatten)] pub extension: E::Options, } pub type Options = ExtensibleOptions<()>; #[derive_group(Serializers)] #[derive(JsonSchema, ValueEnum, Debug, Clone, Copy, Eq, PartialEq)] pub enum MessageFormat { Human, Json, } impl NormalizePaths for Command { fn normalize_paths(&mut self) { use Command::*; match self { JSON { output_file, .. } => output_file.normalize_paths(), _ => (), } } } impl NormalizePaths for Options { fn normalize_paths(&mut self) { self.command.normalize_paths() } } impl From for hax_frontend_exporter_options::Options { fn from(_opts: Options) -> hax_frontend_exporter_options::Options { hax_frontend_exporter_options::Options { inline_anon_consts: true, bounds_options: hax_frontend_exporter_options::BoundsOptions { resolve_destruct: false, prune_sized: true, }, item_ref_use_concrete_impl: false, } } } /// The subset of `Options` the frontend is sensible to. #[derive_group(Serializers)] #[derive(JsonSchema, Debug, Clone)] pub struct ExporterOptions { pub deps: bool, pub force_cargo_build: ForceCargoBuild, /// When exporting, the driver sets `--cfg hax_backend_{backkend}`, thus we need this information. pub backend: Option, pub body_kinds: Vec, pub experimental_full_def: bool, } #[derive_group(Serializers)] #[derive(JsonSchema, ValueEnum, Debug, Clone, Copy)] pub enum BackendName { Fstar, Coq, Ssprove, Easycrypt, ProVerif, Lean, Rust, GenerateRustEngineNames, Debugger, } impl fmt::Display for BackendName { fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { let name = match self { BackendName::Fstar => "fstar", BackendName::Coq => "coq", BackendName::Ssprove => "ssprove", BackendName::Easycrypt => "easycrypt", BackendName::ProVerif => "proverif", BackendName::Lean => "lean", BackendName::Rust => "rust", BackendName::GenerateRustEngineNames => "generate_rust_engine_names", BackendName::Debugger => "debugger", }; write!(f, "{name}") } } impl From<&Options> for ExporterOptions { fn from(options: &Options) -> Self { ExporterOptions { deps: options.deps, force_cargo_build: options.force_cargo_build.clone(), backend: options.command.backend_name(), body_kinds: options.command.body_kinds(), experimental_full_def: options.experimental_full_def, } } } impl From<&Backend> for BackendName { fn from(backend: &Backend) -> Self { match backend { Backend::Fstar { .. } => BackendName::Fstar, Backend::Coq { .. } => BackendName::Coq, Backend::Ssprove { .. } => BackendName::Ssprove, Backend::Easycrypt { .. } => BackendName::Easycrypt, Backend::ProVerif { .. } => BackendName::ProVerif, Backend::Lean { .. } => BackendName::Lean, Backend::Rust { .. } => BackendName::Rust, Backend::GenerateRustEngineNames { .. } => BackendName::GenerateRustEngineNames, Backend::Debugger { .. } => BackendName::Debugger, } } } pub const ENV_VAR_OPTIONS_FRONTEND: &str = "DRIVER_HAX_FRONTEND_OPTS"; pub const ENV_VAR_OPTIONS_FULL: &str = "DRIVER_HAX_FRONTEND_FULL_OPTS"; ================================================ FILE: hax-types/src/diagnostics/message.rs ================================================ use crate::cli_options::Backend; use crate::prelude::*; #[derive_group(Serializers)] #[derive(Debug, Clone, JsonSchema)] #[repr(u8)] pub enum HaxMessage { Diagnostic { diagnostic: super::Diagnostics, working_dir: Option, } = 254, EngineNotFound { is_opam_setup_correctly: bool, } = 0, ProducedFile { path: PathBuf, wrote: bool, } = 1, HaxEngineFailure { exit_code: i32, } = 2, CargoBuildFailure = 3, WarnExperimentalBackend { backend: Backend<()>, } = 4, ProfilingData(crate::engine_api::ProfilingData) = 5, Stats { errors_per_item: Vec<(hax_frontend_exporter::DefId, usize)>, } = 6, } impl HaxMessage { // https://doc.rust-lang.org/reference/items/enumerations.html#pointer-casting pub fn discriminant(&self) -> u16 { unsafe { *(self as *const Self as *const u16) } } pub fn code(&self) -> String { match self { HaxMessage::Diagnostic { diagnostic, .. } => diagnostic.kind.code(), _ => format!("CARGOHAX{:0>4}", self.discriminant()), } } } ================================================ FILE: hax-types/src/diagnostics/mod.rs ================================================ use crate::prelude::*; use colored::Colorize; pub mod message; pub mod report; #[derive_group(Serializers)] #[derive(Debug, Clone, JsonSchema, Eq, PartialEq, Hash)] pub struct Diagnostics { pub kind: Kind, pub span: Vec, pub context: String, pub owner_id: Option, } impl std::fmt::Display for Diagnostics { fn fmt(&self, f: &mut std::fmt::Formatter) -> std::fmt::Result { match &self.kind { Kind::Unimplemented { issue_id:_, details } => write!( f, "something is not implemented yet.\n{}", match details { Some(details) => format!("{}", details), _ => "".to_string(), }, ), Kind::UnsupportedMacro { id } => write!( f, "The unexpanded macro {} is not supported by this backend.\nPlease verify the argument you passed to the {} (or {}) option.", id.bold(), "--inline-macro-call".bold(), "-i".bold() ), Kind::UnsafeBlock => write!(f, "Unsafe blocks are not allowed."), Kind::AssertionFailure {details} => write!( f, "Fatal error: something we considered as impossible occurred! {}\nDetails: {}", "Please report this by submitting an issue on GitHub!".bold(), details ), Kind::UnallowedMutRef => write!( f, "The mutation of this {} is not allowed here.", "&mut".bold() ), Kind::ExpectedMutRef => write!( f, "At this position, Hax was expecting an expression of the shape `&mut _`.\nHax forbids `f(x)` (where `f` expects a mutable reference as input) when `x` is not a {}{} or when it is a dereference expression. {}", "place expression".bold(), "[1]".bright_black(), "[1]: https://doc.rust-lang.org/reference/expressions.html#place-expressions-and-value-expressions" ), Kind::ClosureMutatesParentBindings {bindings} => write!( f, "The bindings {:?} cannot be mutated here: they don't belong to the closure scope, and this is not allowed.", bindings ), Kind::ArbitraryLHS => write!(f, "Assignation of an arbitrary left-hand side is not supported.\n`lhs = e` is fine only when `lhs` is a combination of local identifiers, field accessors and index accessors."), Kind::AttributeRejected {reason} => write!(f, "Here, this attribute cannot be used: {reason}."), Kind::NonTrivialAndMutFnInput => write!(f, "The support in hax of function with one or more inputs of type `&mut _` is limited.\nOnly trivial patterns are allowed there: `fn f(x: &mut (T, U)) ...` is allowed while `f((x, y): &mut (T, U))` is rejected."), Kind::FStarParseError { fstar_snippet, details: _ } => write!(f, "The following code snippet could not be parsed as valid F*:\n```\n{fstar_snippet}\n```"), Kind::ExplicitRejection { reason , .. } => write!(f, "Explicit rejection by a phase in the Hax engine:\n{}", reason), _ => write!(f, "{:?}", self.kind), }?; write!(f, "\n\n")?; if let Some(issue) = self.kind.issue_number() { write!( f, "This is discussed in issue https://github.com/hacspec/hax/issues/{issue}.\nPlease upvote or comment this issue if you see this error message.\n" )?; } write!( f, "{}", format!( "Note: the error was labeled with context `{}`.\n", self.context ) .bright_black() )?; Ok(()) } } impl Kind { fn issue_number(&self) -> Option { match self { Kind::UnsafeBlock => None, Kind::ExplicitRejection { issue_id, .. } | Kind::Unimplemented { issue_id, .. } => { issue_id.clone() } Kind::AssertionFailure { .. } => None, Kind::UnallowedMutRef => Some(420), Kind::UnsupportedMacro { .. } => None, Kind::ErrorParsingMacroInvocation { .. } => None, Kind::ClosureMutatesParentBindings { .. } => Some(1060), Kind::ArbitraryLHS => None, Kind::UnsupportedTupleSize { .. } => None, Kind::ExpectedMutRef => Some(420), Kind::NonTrivialAndMutFnInput => Some(1405), Kind::AttributeRejected { .. } => None, Kind::FStarParseError { .. } => None, Kind::OcamlEngineErrorPayload { .. } => None, } } } #[derive_group(Serializers)] #[derive(Debug, Clone, Hash, Eq, PartialEq, Ord, PartialOrd, JsonSchema)] #[repr(u16)] pub enum Kind { /// Unsafe code is not supported UnsafeBlock = 0, /// A feature is not currently implemented Unimplemented { /// Issue on the GitHub repository issue_id: Option, details: Option, } = 1, /// Unknown error // This is useful when doing sanity checks (i.e. one can yield // this error kind for cases that should never happen) AssertionFailure { details: String, } = 2, /// Unallowed mutable reference UnallowedMutRef = 3, /// Unsupported macro invokation UnsupportedMacro { id: String, } = 4, /// Error parsing a macro invocation to a macro treated specifcially by a backend ErrorParsingMacroInvocation { macro_id: String, details: String, } = 5, /// Mutation of bindings living outside a closure scope are not supported ClosureMutatesParentBindings { bindings: Vec, } = 6, /// Assignation of an arbitrary left-hand side is not supported. `lhs = e` is fine only when `lhs` is a combination of local identifiers, field accessors and index accessors. ArbitraryLHS = 7, /// A phase explicitely rejected this chunk of code ExplicitRejection { reason: String, issue_id: Option, } = 8, /// A backend doesn't support a tuple size UnsupportedTupleSize { tuple_size: u32, reason: String, } = 9, ExpectedMutRef = 10, /// &mut inputs should be trivial patterns NonTrivialAndMutFnInput = 11, /// An hax attribute (from `hax-lib-macros`) was rejected AttributeRejected { reason: String, } = 12, /// A snippet of F* code could not be parsed FStarParseError { fstar_snippet: String, details: String, } = 13, /// Internal encoding OcamlEngineErrorPayload(String) = 9999, } impl Kind { // https://doc.rust-lang.org/reference/items/enumerations.html#pointer-casting pub fn discriminant(&self) -> u16 { unsafe { *(self as *const Self as *const u16) } } pub fn code(&self) -> String { format!("HAX{:0>4}", self.discriminant()) } } ================================================ FILE: hax-types/src/diagnostics/report.rs ================================================ use super::Diagnostics; use annotate_snippets::*; use miette::SourceOffset; use std::collections::{HashMap, HashSet}; use std::path::{Path, PathBuf}; use std::rc::Rc; /// A context for reporting diagnostics #[derive(Clone, Debug, Default)] pub struct ReportCtx { files: HashMap>, seen: HashSet, } /// Translates a line and column position into an absolute offset fn compute_offset(src: &str, line: usize, col: usize) -> usize { SourceOffset::from_location(src, line, col).offset() + 1 } impl ReportCtx { /// Read the contents of a file. The result is cached. fn file_contents<'a>(&'a mut self, path: PathBuf) -> Rc { self.files .entry(path.clone()) .or_insert_with(|| { let s = std::fs::read_to_string(&path).expect(&format!("Unable to read file {path:?}")); Rc::new(s) }) .clone() } /// Check if `diagnostic` have been seen already, and mark `diagnostic` as seen. pub fn seen_already(&mut self, diagnostic: Diagnostics) -> bool { !self.seen.insert(diagnostic) } } impl Diagnostics { /// Converts a `Diagnostics` to a `annotate_snippets::Message`, /// which can be accessed via `then`, a callback function. pub fn with_message FnMut(Message<'a>) -> R>( &self, report_ctx: &mut ReportCtx, working_dir: Option<&Path>, level: Level, mut then: F, ) -> R { let mut snippets_data = vec![]; for span in &self.span { if let Some(path) = span.filename.to_path() { let source = { let mut path = path.to_path_buf(); if let Some(working_dir) = working_dir && path.is_relative() { path = working_dir.join(&path); }; report_ctx.file_contents(path) }; let start = compute_offset(&source, span.lo.line, span.lo.col); let end = compute_offset(&source, span.hi.line, span.hi.col); let origin = format!("{}", path.display()); snippets_data.push((source, origin, start..end)); }; } let title = format!("[{}] {self}", self.kind.code()); let message = level .title(&title) .snippets(snippets_data.iter().map(|(source, origin, range)| { Snippet::source(source) .line_start(1) .origin(&origin) .fold(true) .annotation(level.span(range.clone())) })); then(message) } } ================================================ FILE: hax-types/src/driver_api.rs ================================================ use crate::prelude::*; pub const HAX_DRIVER_STDERR_PREFIX: &str = "::hax-driver::"; #[derive_group(Serializers)] #[derive(Debug, Clone)] pub struct EmitHaxMetaMessage { pub working_dir: Option, pub manifest_dir: Option, pub path: PathBuf, } #[derive_group(Serializers)] #[derive(Debug, Clone)] pub enum HaxDriverMessage { EmitHaxMeta(EmitHaxMetaMessage), } #[derive_group(Serializers)] #[derive(JsonSchema, Debug, Clone)] pub enum Items { FullDef(Vec>), Legacy(Vec>), } #[derive_group(Serializers)] #[derive(Debug, Clone)] pub struct HaxMeta { pub crate_name: String, pub cg_metadata: String, pub externs: Vec, pub items: Items, pub impl_infos: Vec<( hax_frontend_exporter::DefId, hax_frontend_exporter::ImplInfos, )>, pub def_ids: Vec, pub comments: Vec<(hax_frontend_exporter::Span, String)>, pub hax_version: String, } use hax_frontend_exporter::id_table; impl HaxMeta where Body: serde::Serialize + for<'de> serde::Deserialize<'de>, { #[tracing::instrument(level = "trace", skip(self, write, id_table))] pub fn write(self, write: &mut impl std::io::Write, id_table: id_table::Table) { let mut write = zstd::stream::write::Encoder::new(write, 0).unwrap(); id_table::WithTable::run(id_table, self, |with_table| { serde_brief::to_writer(with_table, &mut write).unwrap(); write.finish().unwrap(); }) } #[tracing::instrument(level = "trace", skip(reader))] pub fn read(reader: impl std::io::Read) -> (Self, id_table::Table) { let reader = zstd::stream::read::Decoder::new(reader).unwrap(); let reader = std::io::BufReader::new(reader); let haxmeta = id_table::WithTable::>::destruct( serde_brief::from_reader(reader).unwrap(), ); if haxmeta.0.hax_version != crate::HAX_VERSION { let version = haxmeta.0.hax_version; let expected = crate::HAX_VERSION; panic!( "An invariant was broken: `*.haxmeta` was produced by hax version `{version}` while the current version of hax is `{expected}`. Please report this to https://github.com/hacspec/hax/issues." ); }; haxmeta } } #[macro_export] macro_rules! with_kind_type { ($kind:expr, <$t:ident>|| $body:expr) => {{ mod from { pub use hax_types::cli_options::ExportBodyKind::{MirBuilt as MB, Thir as T}; } mod to { pub type T = hax_frontend_exporter::ThirBody; pub type MB = hax_frontend_exporter::MirBody; } let mut kind: Vec<::hax_types::cli_options::ExportBodyKind> = $kind; kind.sort(); kind.dedup(); match kind.as_slice() { [from::MB] => { type $t = to::MB; $body } [from::T] => { type $t = to::T; $body } [from::T, from::MB] => { type $t = (to::MB, to::T); $body } [] => { type $t = (); $body } _ => panic!("Unsupported kind {:#?}", kind), } }}; } pub use with_kind_type; ================================================ FILE: hax-types/src/engine_api.rs ================================================ use crate::cli_options::*; use crate::prelude::*; type ThirBody = hax_frontend_exporter::ThirBody; #[derive_group(Serializers)] #[derive(JsonSchema, Debug, Clone)] pub struct EngineOptions { pub hax_version: String, pub backend: BackendOptions<()>, pub input: crate::driver_api::Items, pub impl_infos: Vec<( hax_frontend_exporter::DefId, hax_frontend_exporter::ImplInfos, )>, } #[derive_group(Serializers)] #[allow(non_snake_case)] #[derive(JsonSchema, Debug, Clone)] pub struct SourceMap { pub mappings: String, pub sourceRoot: String, pub sources: Vec, pub sourcesContent: Vec>, pub names: Vec, pub version: u8, pub file: String, } impl SourceMap { pub fn inline_sources_content(&mut self) { self.sourcesContent = vec![]; for source in &self.sources { let path = if self.sourceRoot.is_empty() { source.clone() } else { format!("{}/{}", &self.sourceRoot, source) }; let contents = Some(std::fs::read_to_string(path).unwrap()); self.sourcesContent.push(contents); } } } #[derive_group(Serializers)] #[derive(JsonSchema, Debug, Clone)] pub struct File { pub path: String, pub contents: String, pub sourcemap: Option, } #[derive_group(Serializers)] #[derive(JsonSchema, Debug, Clone)] pub struct Output { pub diagnostics: Vec, pub files: Vec, pub debug_json: Vec, } #[derive_group(Serializers)] #[derive(JsonSchema, Debug, Clone)] pub struct ProfilingData { /// What context are we profiling? pub context: String, /// How long this took? pub time_ns: u64, /// How much memory this took? This is using OCaml's /// `Gc.minor_words`, and is probably not very precise. pub memory: u64, /// How many things were processed? (often, this is the number of /// items a phase processes) pub quantity: u32, /// Did the action errored? This is important since a failed /// action might have exited very early, making the numbers /// unusable. pub errored: bool, } pub mod protocol { use super::*; #[derive_group(Serializers)] #[derive(JsonSchema, Debug, Clone)] pub enum FromEngine { Diagnostic(crate::diagnostics::Diagnostics), File(File), PrettyPrintDiagnostic(crate::diagnostics::Diagnostics), PrettyPrintRust(String), DebugString(String), ProfilingData(ProfilingData), /// Declares a list of items that will be processed by the engine ItemProcessed(Vec), Exit, Ping, } #[derive_group(Serializers)] #[derive(JsonSchema, Debug, Clone)] pub enum ToEngine { PrettyPrintedDiagnostic(String), PrettyPrintedRust(Result), Pong, } impl FromEngine { pub fn requires_response(&self) -> bool { matches!( self, Self::PrettyPrintRust { .. } | Self::Ping { .. } | Self::PrettyPrintDiagnostic { .. } ) } } } // This is located here for dependency reason, but this is not related // to the engine (yet?). #[derive_group(Serializers)] #[derive(JsonSchema, Debug, Clone)] pub struct WithDefIds { pub def_ids: Vec, pub impl_infos: Vec<( hax_frontend_exporter::DefId, hax_frontend_exporter::ImplInfos, )>, pub items: crate::driver_api::Items, pub comments: Vec<(hax_frontend_exporter::Span, String)>, } ================================================ FILE: hax-types/src/lib.rs ================================================ #![cfg_attr(feature = "rustc", feature(rustc_private))] //! This crate contains the type definitions that are used to communicate between: //! - the command line (the `cargo-hax` binary); //! - the custom rustc driver; //! - the hax engine (the `hax-engine` binary). //! //! Those three component send and receive messages in JSON or CBOR on //! stdin and stdout. pub(crate) mod prelude; /// The CLI options for `cargo-hax`. The types defines in this module /// are also used by the driver and the engine. pub mod cli_options; /// Type to represent errors, mainly in `hax-engine`. The engine /// doesn't do any reporting itself: it only sends JSON to its stdout, /// and `cargo-hax` takes care of reporting everything in a rustc /// style. pub mod diagnostics; /// The types used to communicate between `cargo-hax` and the custom /// driver. pub mod driver_api; /// The types used to communicate between `cargo-hax` and /// `hax-engine`. pub mod engine_api; /// Compile-time version of hax pub const HAX_VERSION: &str = env!("HAX_VERSION"); ================================================ FILE: hax-types/src/prelude.rs ================================================ pub(crate) use hax_adt_into::derive_group; pub use schemars::JsonSchema; pub use std::path::{Path, PathBuf}; ================================================ FILE: justfile ================================================ @_default: just --list # Build Rust and OCaml parts and install binaries in PATH. To build # only OCaml parts or only Rust parts, set target to `rust` or # `ocaml`. @build target='rust+ocaml': ./.utils/rebuild.sh {{target}} alias b := build # alias for `build rust` @rust: just build rust # alias for `build ocaml` @ocaml: just build ocaml # `cargo expand` a crate, but sets flags and crate attributes so that the expansion is exactly what hax receives. This is useful to debug hax macros. [no-cd] expand *FLAGS: RUSTFLAGS='-Zcrate-attr=register_tool(_hax) -Zcrate-attr=feature(register_tool) --cfg hax_compilation --cfg _hax --cfg hax --cfg hax_backend_fstar --cfg hax' \ cargo \ $([[ "$(cargo --version)" == *nightly* ]] || echo "+nigthly") \ expand {{FLAGS}} # Show debug JSON emitted by the Rust engine @debug-json N: (_ensure_command_in_path "jless" "jless (https://jless.io/)") (_ensure_command_in_path "jq" "jq (https://jqlang.github.io/jq/)") cat /tmp/hax-ast-debug.json | jq -s '.[{{N}}]' | jless # Show the generated module `concrete_ident_generated.ml`, that contains all the Rust names the engine knows about. Those names are declared in the `./engine/names` crate. @list-names: hax-engine-names-extract | sed '/include .val/,$d' | just _pager # Show the Rust to OCaml generated types available to the engine. @list-types: just _ensure_command_in_path ocamlformat ocamlformat cd engine && dune describe pp lib/types.ml \ | sed -e '1,/open ParseError/ d' \ | sed '/let rec pp_/,$d' \ | ocamlformat --impl - \ | just _pager # Show the OCaml module `Generated_generic_printer_base` @show-generated-printer-ml: just _ensure_command_in_path ocamlformat ocamlformat cd engine && dune describe pp lib/generated_generic_printer_base.ml \ | ocamlformat --impl - \ | just _pager # Regenerate names in the Rust engine. Writes to `rust-engine/src/names/generated.rs`. regenerate-names: #!/usr/bin/env bash OUTPUT_FILE=rust-engine/src/ast/identifiers/global_id/generated.rs cargo hax -C --manifest-path engine/names/Cargo.toml \; into --output-dir $(dirname -- $OUTPUT_FILE) generate-rust-engine-names rustfmt "$OUTPUT_FILE" # Format all the code fmt: cargo fmt cd engine && dune fmt # Run hax tests: each test crate has a snapshot, so that we track changes in extracted code. If a snapshot changed, please review them with `just test-review`. test *FLAGS: cargo test --test toolchain {{FLAGS}} _test *FLAGS: CARGO_TESTS_ASSUME_BUILT=1 cargo test --test toolchain {{FLAGS}} # Review snapshots test-review: (_ensure_command_in_path "cargo-insta" "Insta (https://insta.rs)") cargo insta review # Serve documentation docs: (_ensure_command_in_path "mkdocs" "mkdocs (https://www.mkdocs.org/)") mkdocs serve # Check the coherency between issues labeled `marked-unimplemented` on GitHub and issues mentionned in the engine in the `Unimplemented {issue_id: ...}` errors. @check-issues: just _ensure_command_in_path jq "jq (https://jqlang.github.io/jq/)" just _ensure_command_in_path gh "GitHub CLI (https://cli.github.com/)" just _ensure_command_in_path rg "ripgrep (https://github.com/BurntSushi/ripgrep)" just _ensure_command_in_path sd "sd (https://github.com/chmln/sd)" diff -U0 \ <(gh issue -R hacspec/hax list --label 'marked-unimplemented' --json number,closed -L 200 \ | jq '.[] | select(.closed | not) | .number' | sort -u) \ <(rg 'issue_id:(\d+)' -Ior '$1' | sort -u) \ | rg '^[+-]\d' \ | sd '[-](\d+)' '#$1\t is labeled `marked-unimplemented`, but was not found in the code' \ | sd '[+](\d+)' '#$1\t is *not* labeled `marked-unimplemented` or is closed' # Check that the licenses of every crate and every package are compliant with `deny.toml` check-licenses: #!/usr/bin/env bash just _ensure_command_in_path cargo-deny "cargo-deny (https://embarkstudios.github.io/cargo-deny/)" just _ensure_command_in_path toml2json "toml2json (https://github.com/woodruffw/toml2json)" echo "> Check licenses for Rust" cargo deny check licenses cd engine echo "> Check licenses for OCaml" # initialize opam if needed opam env >& /dev/null || opam init --no # pin package `hax-engine` if needed opam list --required-by=hax-engine --column=name,license: -s >& /dev/null || opam pin . --yes # Check that every pacakge matches licenses of `deny.toml` if opam list --required-by=hax-engine --column=name,license: -s \ | grep -Pvi $(toml2json ../deny.toml| jq '.licenses.allow | join("|")'); then echo "Some licenses were non compliant to our policy (see `deny.toml`)" else echo "licenses ok" fi _ensure_command_in_path BINARY NAME: #!/usr/bin/env bash command -v {{BINARY}} &> /dev/null || { >&2 echo -e "\033[0;31mSorry, the binary \033[1m{{BINARY}}\033[0m\033[0;31m is required for this command.\033[0m" >&2 echo -e " \033[0;31m→ please install \033[1m{{NAME}}\033[0m" >&2 echo "" exit 1 } _pager: #!/usr/bin/env bash if command -v bat &> /dev/null; then bat -l ml else less fi # Serve the book [private] @book: echo "We moved out from mdbook: please run 'just docs'" exit 1 # Runs hax twice: once with the Rust import thir, once with the OCaml one. # Then it compares both. diff-thir-importers DIR: #!/usr/bin/env bash # Ensures hax is built just b # Utils function readJSON() { cat proofs/debugger/extraction/ast.json; } BASE="$PWD" OUT="$BASE/diff-thir-importers" # Remove previous results (if any) rm -rf "$OUT" cd {{DIR}} cargo hax json -o old-thir.json cargo hax --experimental-full-def json -o thir.json cargo hax --experimental-full-def into debugger readJSON > rust-import-thir-ast.json cargo hax into debugger readJSON > ocaml-import-thir-ast.json mkdir "$OUT" mv thir.json old-thir.json *ast.json "$OUT" cd "$OUT" diff ocaml-import-thir-ast.json rust-import-thir-ast.json > diff.json ================================================ FILE: mkdocs.yml ================================================ site_name: hax repo_url: https://github.com/cryspen/hax site_url: https://hax.cryspen.com site_author: Cryspen repo_name: GitHub theme: name: material logo: static/img/logo.png favicon: static/img/favicon.png custom_dir: docs/overrides palette: primary: white features: - content.code.copy - content.code.select - content.code.annotate - content.footnote.tooltips - navigation.sections - navigation.path - navigation.tabs - header.autohide - navigation.instant - navigation.indexes icon: repo: fontawesome/brands/github tag: draft: material/sticker-outline accepted: material/sticker-check-outline proposed: material/sticker-plus-outline deprecated: material/sticker-minus-outline superseded: material/sticker-alert-outline rejected: material/sticker-remove-outline extra_javascript: - javascripts/hax_playground.js - javascripts/ansi_up.js - javascripts/fstar.js - javascripts/lz-string.js extra_css: - stylesheets/tags-colors.css - stylesheets/hax_playground.css - stylesheets/logo.css extra: tags: Draft: draft Accepted: accepted Proposed: proposed Superseded: superseded Deprecated: deprecated Rejected: rejected markdown_extensions: - attr_list - tables - md_in_html - admonition - footnotes - pymdownx.blocks.html - pymdownx.details - pymdownx.superfences - pymdownx.inlinehilite - pymdownx.snippets - pymdownx.keys - pymdownx.caret - pymdownx.mark - pymdownx.tilde - pymdownx.highlight: anchor_linenums: true line_spans: __span pygments_lang_class: true - pymdownx.arithmatex: generic: true - pymdownx.tasklist: custom_checkbox: true - pymdownx.emoji: emoji_index: !!python/name:material.extensions.emoji.twemoji emoji_generator: !!python/name:material.extensions.emoji.to_svg - pymdownx.superfences: custom_fences: - name: mermaid class: mermaid format: !!python/name:pymdownx.superfences.fence_code_format plugins: - glightbox - search - blog - awesome-nav - mkdocs-nav-weight - tags: tags_file: RFCs/index.md ================================================ FILE: rust-engine/Cargo.toml ================================================ [package] name = "hax-rust-engine" version.workspace = true authors.workspace = true license.workspace = true homepage.workspace = true edition.workspace = true repository.workspace = true readme.workspace = true description = "The engine of the hax toolchain." [dependencies] hax-frontend-exporter.workspace = true hax-rust-engine-macros.workspace = true hax-lib-macros-types = { workspace = true, features = ["schemars"] } serde_json = { workspace = true, features = ["unbounded_depth"] } serde = { workspace = true, features = ["derive"] } hax-types.workspace = true schemars.workspace = true itertools.workspace = true serde_stacker = "0.1.12" pretty = "0.12" derive_generic_visitor = "0.3.0" pastey = "0.1.0" camino = "1.1.11" axum = { version = "0.8.7", features = ["macros"] } tokio = "1.48.0" [dev-dependencies] hax-lib.workspace = true ================================================ FILE: rust-engine/README.md ================================================ # Hax Rust Engine This crate implements an alternative engine for Rust: the main one is implemented in OCaml and is located in `/engine`. This Rust engine is designed so that it can re-use some bits of the OCaml engine. The plan is to slowly deprecate the OCaml engine, rewrite most of its components and drop it. ## Usage The Rust engine supports only one backend for now: `Lean`. To run it, use the following command: ```bash cargo hax into lean ``` ================================================ FILE: rust-engine/macros/Cargo.toml ================================================ [package] name = "hax-rust-engine-macros" description = "This crate provides helpers procedural macros for the `hax-rust-engine`" version.workspace = true authors.workspace = true license.workspace = true homepage.workspace = true edition.workspace = true repository.workspace = true readme.workspace = true [lib] proc-macro = true [dependencies] syn = { version = "2.0", features = ["full", "visit-mut"] } proc-macro2.workspace = true quote.workspace = true proc-macro-crate = "3.3.0" ================================================ FILE: rust-engine/macros/src/lib.rs ================================================ //! Helper crate providing procedural macros for the Rust engine of hax. //! //! Currently it provides the following. //! - Macros for deriving groups of traits. //! Most of the type from the AST have the same bounds, so that helps deduplicating a lot. //! Also, the fact those derive groups are named is helpful: for instance for code generation //! a simple `use derive_group_for_ast_base as derive_group_for_ast` can change what is to be //! derived without any attribute manipulation. use proc_macro::TokenStream; use proc_macro2::{Ident, Span}; use quote::quote; use syn::{Token, parse_macro_input, visit_mut::VisitMut}; use utils::*; mod partial_application; mod replace; mod struct_fields; /// Adds a new field with a fresh name to an existing `struct` type definition. /// The new field contains error handling and span information to be used with a /// visitor. This macro will also derive implementations of /// `hax_rust_engine::ast::visitors::wrappers::VisitorWithErrors` and /// `hax_rust_engine::ast::HasSpan` for the struct. #[proc_macro_attribute] pub fn setup_error_handling_struct(_attr: TokenStream, item: TokenStream) -> TokenStream { struct_fields::setup_error_handling_struct(_attr, item) } /// Adds a new field with a fresh name to an existing `struct` type definition. /// The new field contains span information to be used with a /// printer. This macro will also derive implementations of /// `hax_rust_engine::printer::pretty_ast::HasContextualSpan` for the struct. #[proc_macro_attribute] pub fn setup_printer_struct(_attr: TokenStream, item: TokenStream) -> TokenStream { struct_fields::setup_printer_struct(_attr, item) } mod utils { use super::*; /// Get the name of this macro crate (`hax_rust_engine_macros`) pub(crate) fn crate_name() -> Ident { let krate = module_path!().split("::").next().unwrap(); Ident::new(krate, Span::call_site()) } /// Prepends a `proc_macro2::TokenStream` to a `TokenStream` pub(crate) fn prepend(item: TokenStream, prefix: proc_macro2::TokenStream) -> TokenStream { let item: proc_macro2::TokenStream = item.into(); quote! { #prefix #item } .into() } /// Add a derive attribute to `item` pub(crate) fn add_derive(item: TokenStream, payload: proc_macro2::TokenStream) -> TokenStream { prepend(item, quote! {#[derive(#payload)]}) } /// Find the name of the crate `hax-rust-engine`. This can be either the /// keyword `crate` or the ident `hax_rust_engine`, depending on the context /// in which the macros using this function are called. pub(crate) fn rust_engine_krate_name() -> proc_macro2::TokenStream { use proc_macro_crate::{FoundCrate, crate_name}; match crate_name("hax-rust-engine").unwrap() { FoundCrate::Itself => quote!(crate), FoundCrate::Name(name) => { let ident = Ident::new(&name, Span::call_site()); quote!( #ident ) } } } } /// Derive the common derives for the hax engine AST. /// This is a equivalent to `derive_group_for_ast_serialization` and `derive_group_for_ast_base`. #[proc_macro_attribute] pub fn derive_group_for_ast(_attr: TokenStream, item: TokenStream) -> TokenStream { let krate = crate_name(); prepend( item, quote! { #[#krate::derive_group_for_ast_base] #[#krate::derive_group_for_ast_serialization] }, ) } /// Derive the necessary (de)serialization related traits for nodes in the AST. #[proc_macro_attribute] pub fn derive_group_for_ast_serialization(_attr: TokenStream, item: TokenStream) -> TokenStream { add_derive( item, quote! {::serde::Deserialize, ::serde::Serialize, ::schemars::JsonSchema}, ) } /// Derive the basic necessary traits for nodes in the AST. #[proc_macro_attribute] pub fn derive_group_for_ast_base(_attr: TokenStream, item: TokenStream) -> TokenStream { add_derive( item, quote! {Debug, Clone, Hash, Eq, PartialEq, PartialOrd, Ord, derive_generic_visitor::Drive, derive_generic_visitor::DriveMut}, ) } #[proc_macro_attribute] /// Replaces all occurrences of an identifier within the attached item. /// /// For example, `#[replace(Name => A, B, C)]` will replace `Name` by `A, B, C` /// in the item the proc-macro is applied on. /// /// The special case `#[replace(Name => include(VisitableAstNodes))]` will /// expand to a list of visitable AST nodes. This is useful in practice, as this /// list is often repeated. pub fn replace(attr: TokenStream, item: TokenStream) -> TokenStream { replace::replace(attr, item) } /// An attribute procedural macro that creates a new `macro_rules!` definition /// by partially applying an existing macro or function with a given token stream. /// /// Usage: /// ```rust,ignore /// #[partial_apply(original_macro!, my_expression,)] /// macro_rules! new_proxy_macro { /// // This content is ignored and replaced by the proc macro. /// } /// ``` #[proc_macro_attribute] pub fn partial_apply(attr: TokenStream, item: TokenStream) -> TokenStream { partial_application::partial_apply(attr, item) } /// Prepend the body any associated function with the given attribute payload. /// ```rust,ignore /// #[prepend_associated_functions_with(println!("self is {self}");)] /// impl Foo { /// fn f(self) {} /// } /// ``` /// /// Expands to: /// ```rust,ignore /// impl Foo { /// fn f(self) { /// println!("self is {self}"); /// } /// } /// ``` #[proc_macro_attribute] pub fn prepend_associated_functions_with(attr: TokenStream, item: TokenStream) -> TokenStream { struct Visitor { prefix: syn::Expr, } impl VisitMut for Visitor { fn visit_item_impl_mut(&mut self, impl_block: &mut syn::ItemImpl) { for item in &mut impl_block.items { let syn::ImplItem::Fn(impl_item_fn) = item else { continue; }; impl_item_fn.block.stmts.insert( 0, syn::Stmt::Expr(self.prefix.clone(), Some(Token![;](Span::mixed_site()))), ); } } } let mut item: syn::Item = parse_macro_input!(item); let prefix = parse_macro_input!(attr); Visitor { prefix }.visit_item_mut(&mut item); quote! {#item}.into() } ================================================ FILE: rust-engine/macros/src/partial_application.rs ================================================ use proc_macro::TokenStream; use quote::quote; use syn::{ExprPath, Token, parse_macro_input}; struct PartialApplyArgs { ident: ExprPath, bang: Option, prefix: proc_macro2::TokenStream, } impl syn::parse::Parse for PartialApplyArgs { fn parse(input: syn::parse::ParseStream) -> syn::Result { let ident = input.parse()?; let bang = input.parse()?; input.parse::()?; Ok(PartialApplyArgs { ident, bang, prefix: input.parse()?, }) } } /// See [`super::partial_apply`]. pub(crate) fn partial_apply(attr: TokenStream, item: TokenStream) -> TokenStream { let PartialApplyArgs { ident, bang, prefix, } = parse_macro_input!(attr as PartialApplyArgs); let input_macro = parse_macro_input!(item as syn::ItemMacro); let macro_name = input_macro.ident; let attrs = input_macro.attrs; quote! { #(#attrs)* macro_rules! #macro_name { ($($rest:tt)*) => { #ident #bang(#prefix $($rest)*) }; } } .into() } ================================================ FILE: rust-engine/macros/src/replace.rs ================================================ extern crate proc_macro; use proc_macro::TokenStream; use proc_macro2::{Group, TokenStream as TokenStream2, TokenTree}; use quote::quote; use syn::parse::{Parse, ParseStream, Result}; use syn::{Ident, Token, parse_macro_input}; mod kw { syn::custom_keyword!(include); } fn replace_in_stream( stream: TokenStream2, target: &Ident, replacement: &TokenStream2, ) -> TokenStream2 { stream .into_iter() .flat_map(|tt| match tt { TokenTree::Ident(ident) if ident == *target => { replacement.clone().into_iter().collect() } TokenTree::Group(group) => { let new_stream = replace_in_stream(group.stream(), target, replacement); let mut new_group = Group::new(group.delimiter(), new_stream); new_group.set_span(group.span()); vec![TokenTree::Group(new_group)] } other => vec![other], }) .collect() } // The arguments that the `replace` proc-macro can take struct AttributeArgs { target: Ident, replacement: TokenStream2, } impl Parse for AttributeArgs { fn parse(input: ParseStream) -> Result { let target: Ident = input.parse()?; input.parse::]>()?; let include_clause = |input: ParseStream| -> Result { input.parse::()?; let content; syn::parenthesized!(content in input); content.parse() }(input) .ok(); Ok(AttributeArgs { target, replacement: match include_clause { Some(clause) => match clause.to_string().as_str() { "VisitableAstNodes" => quote! { Expr, Pat, ExprKind, PatKind, Ty, TyKind, Metadata, Literal, LocalId, Lhs, Symbol, LoopKind, SafetyKind, Quote, SpannedTy, BindingMode, PrimitiveTy, Region, ImplExpr, IntKind, FloatKind, GenericValue, Arm, LoopState, ControlFlowKind, DynTraitGoal, Attribute, QuoteContent, BorrowKind, TraitGoal, ImplExprKind, IntSize, Signedness, Guard, AttributeKind, GuardKind, ImplItem, ImplItemKind, TraitItem, TraitItemKind, ItemQuoteOrigin, ItemQuoteOriginKind, ItemQuoteOriginPosition, GenericParamKind, ImplIdent, ProjectionPredicate, GenericParam, Generics, DocCommentKind, Param, Variant, ItemKind, Item, GenericConstraint, ErrorNode, Module, ResugaredExprKind, ResugaredTyKind, ResugaredPatKind, ResugaredImplItemKind, ResugaredTraitItemKind, ResugaredItemKind }.into(), _ => { return Err(syn::Error::new_spanned( clause, format!("This is not a recognized include pragma."), )); } }, None => input.parse::()?, }, }) } } pub fn replace(attr: TokenStream, item: TokenStream) -> TokenStream { let args = parse_macro_input!(attr as AttributeArgs); let item_stream: TokenStream2 = item.into(); replace_in_stream(item_stream, &args.target, &args.replacement).into() } ================================================ FILE: rust-engine/macros/src/struct_fields.rs ================================================ use crate::utils::*; use proc_macro::TokenStream; use proc_macro2::{Group, Ident, Span}; use quote::{ToTokens, quote}; use syn::{ Field, FieldsUnnamed, Token, parse_macro_input, parse_quote, punctuated::Punctuated, token::Paren, }; /// Adds a new field `extra_field_name` of type `extra_field_type` to an existing `struct` type definition. /// `extra_field_name` is just a name hint, if a field with this name exists already, a different name will be picked. /// Returns the actual name or `_N` (in the case of a tuple struct). fn add_field_to_item_struct( item: &mut syn::ItemStruct, extra_field_name: &str, extra_field_type: syn::Type, ) -> proc_macro2::TokenStream { // Deal with the case of unit structs. if let fields @ syn::Fields::Unit = &mut item.fields { let span = Group::new(proc_macro2::Delimiter::Brace, fields.to_token_stream()).delim_span(); *fields = syn::Fields::Unnamed(FieldsUnnamed { paren_token: Paren { span }, unnamed: Punctuated::default(), }) } /// Computes a fresh identifier given a list of existing identifiers. fn fresh_ident(base: &str, existing: &[Ident]) -> Ident { let existing: std::collections::HashSet<_> = existing.iter().map(|id| id.to_string()).collect(); (0..) .map(|i| { if i == 0 { base.to_string() } else { format!("{}{}", base, i) } }) .find(|name| !existing.contains(name)) .map(|name| Ident::new(&name, Span::call_site())) .expect("should always find a fresh identifier") } // Collect fields, disregarding their kind (are they named or not) let (fields, named) = match &mut item.fields { syn::Fields::Named(fields_named) => (&mut fields_named.named, true), syn::Fields::Unnamed(fields_unnamed) => (&mut fields_unnamed.unnamed, false), syn::Fields::Unit => unreachable!("Unit structs were dealt with."), }; let existing_names = fields .iter() .flat_map(|f| &f.ident) .cloned() .collect::>(); let (extra_field_ident, extra_field_ident_ts) = if named { let ident = fresh_ident(extra_field_name, &existing_names); (Some(ident.clone()), ident.to_token_stream()) } else { ( None, syn::LitInt::new(&format!("{}", fields.len()), Span::call_site()).to_token_stream(), ) }; fields.push(Field { attrs: vec![], vis: syn::Visibility::Inherited, mutability: syn::FieldMutability::None, ident: extra_field_ident, colon_token: named.then_some(Token![:](Span::call_site())), ty: extra_field_type, }); extra_field_ident_ts } /// This function is documented in [`crate::setup_error_handling_struct`]. pub(crate) fn setup_error_handling_struct(_attr: TokenStream, item: TokenStream) -> TokenStream { let mut item: syn::ItemStruct = parse_macro_input!(item); let krate = rust_engine_krate_name(); let extra_field_ident_ts = add_field_to_item_struct( &mut item, "error_handling_state", parse_quote! {#krate::ast::visitors::wrappers::ErrorHandlingState}, ); let struct_name = &item.ident; let generics = &item.generics; quote! { #item impl #generics #krate::ast::HasSpan for #struct_name #generics { fn span(&self) -> #krate::ast::span::Span { self.#extra_field_ident_ts.0.clone() } fn span_mut(&mut self) -> &mut #krate::ast::span::Span { &mut self.#extra_field_ident_ts.0 } } impl #generics #krate::ast::visitors::wrappers::VisitorWithErrors for #struct_name #generics { fn error_vault(&mut self) -> &mut #krate::ast::visitors::wrappers::ErrorVault { &mut self.#extra_field_ident_ts.1 } } } .into() } /// This function is documented in [`crate::setup_printer_struct`]. pub(crate) fn setup_printer_struct(_attr: TokenStream, item: TokenStream) -> TokenStream { let mut item: syn::ItemStruct = parse_macro_input!(item); let krate = rust_engine_krate_name(); let extra_contextual_span_field_ident_ts = add_field_to_item_struct( &mut item, "contextual_span", parse_quote! {Option<#krate::ast::span::Span>}, ); let extra_linked_item_graph_field_ident_ts = add_field_to_item_struct( &mut item, "linked_item_graph", parse_quote! {::std::rc::Rc<#krate::attributes::LinkedItemGraph>}, ); let struct_name = &item.ident; let generics = &item.generics; quote! { #item impl #generics #krate::printer::pretty_ast::HasContextualSpan for #struct_name #generics { fn span(&self) -> Option<#krate::ast::span::Span> { self.#extra_contextual_span_field_ident_ts.clone() } fn with_span(&self, span: #krate::ast::span::Span) -> Self { let mut printer = self.clone(); printer.#extra_contextual_span_field_ident_ts = Some(span); printer } } impl #generics #krate::printer::HasLinkedItemGraph for #struct_name #generics { fn linked_item_graph(&self) -> &#krate::attributes::LinkedItemGraph { &self.#extra_linked_item_graph_field_ident_ts } fn with_linked_item_graph(mut self, graph: ::std::rc::Rc<#krate::attributes::LinkedItemGraph>) -> Self { self.#extra_linked_item_graph_field_ident_ts = graph; self } } } .into() } ================================================ FILE: rust-engine/src/ast/diagnostics.rs ================================================ //! Diagnostic types used to represent and propagate errors (or warnings, notes, //! etc.) within the AST. //! //! This module is used to attach semantic or translation errors to AST nodes. use crate::ast::*; use hax_rust_engine_macros::*; pub use hax_types::diagnostics::Kind as DiagnosticInfoKind; /// Error diagnostic #[derive_group_for_ast] pub struct Diagnostic { node: Box, info: DiagnosticInfo, } /// Error description and location #[derive_group_for_ast] #[must_use] pub struct DiagnosticInfo { /// Diagnostic context pub context: Context, /// Location in the source code pub span: Span, /// Error type pub kind: DiagnosticInfoKind, } impl DiagnosticInfo { /// Emits the diagnostic information. pub fn emit(&self) { crate::hax_io::write(&hax_types::engine_api::protocol::FromEngine::Diagnostic( hax_types::diagnostics::Diagnostics { kind: self.kind.clone(), span: self.span.as_frontend_spans().to_vec(), context: format!("{}", self.context), owner_id: None, }, )) } } impl Diagnostic { /// Get diagnostic information pub fn info(&self) -> &DiagnosticInfo { &self.info } /// Get diagnostic node of origin pub fn node(&self) -> &Fragment { &self.node } /// Report an error pub fn new(node: impl Into, info: DiagnosticInfo) -> Self { let node = node.into(); info.emit(); Self { node: Box::new(node), info, } } } /// Context of an error #[derive_group_for_ast] pub enum Context { /// Error during import from THIR Import, /// Error during the projection from idenitfiers to views NameView, /// Error in a printer Printer(String), /// Error in an engine phase Phase(String), /// Debugger Debugger, /// Unknown Unknown, } impl std::fmt::Display for Context { fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { match self { Context::Import => write!(f, "Importer"), Context::NameView => write!(f, "Name rendering"), Context::Printer(p) => write!(f, "{p} Printer"), Context::Phase(p) => write!(f, "Engine phase ({p})"), Context::Debugger => write!(f, "Debugger"), Context::Unknown => write!(f, "Unknown"), } } } ================================================ FILE: rust-engine/src/ast/fragment.rs ================================================ //! Enumeration types of any possible fragment of AST (`Fragment` / `FragmentRef`). //! //! Many components (diagnostics, logging, printers) want to refer to “some AST //! node” without knowing its concrete type. This module provides: //! - [`Fragment`]: an **owned** enum covering core AST node types. //! - [`FragmentRef`]: a **borrowed** counterpart. //! //! These are handy when implementing generic facilities such as error reporters, //! debugging helpers, or pretty-printers that need to branch on “what kind of //! node is this?” at runtime. //! //! ## Notes //! - Both enums are mechanically generated to stay in sync with the canonical //! AST types. If you add a new core AST node, update the macro invocation at //! the bottom of this file so `Fragment`/`FragmentRef` learn about it. //! - The [`Unknown`] variant exists as a last-resort placeholder when a value //! cannot be represented by a known variant. Prefer concrete variants when //! possible. use crate::ast::*; /// The `mk!` macro takes a flat list of AST type identifiers and expands to /// two enums: /// - `Fragment` with owned variants (`Foo(Foo)`), and /// - `FragmentRef<'a>` with borrowed variants (`Foo(&'a Foo)`). /// /// The generated enums also implement the obvious `From` conversions, making /// it ergonomic to wrap concrete AST values as fragments. macro_rules! mk { (@visit_inner_call, Span, $self:ident, $x:expr) => {::std::ops::ControlFlow::Continue(())}; (@visit_inner_call, GloablId, $self:ident, $x:expr) => {::std::ops::ControlFlow::Continue(())}; (@visit_inner_call, $ty:ty, $self:ident, $x:expr) => { $self.visit_inner($x) }; ($($ty:ident),*) => { #[derive_group_for_ast] #[derive(Copy)] /// Type identifiers for fragments pub enum FragmentTypeId { $( #[doc = concat!("An identifier for the type [`", stringify!($ty), "`].")] $ty, )* } mod private { pub use super::*; pub trait Sealed {} $(impl Sealed for $ty {})* } /// Operations on any fragment of the AST of hax. pub trait AnyFragment: private::Sealed { /// Get a type identifier for this fragment. fn type_id() -> FragmentTypeId; /// Coerce as a fragment reference. fn as_fragment<'a>(&'a self, type_id: FragmentTypeId) -> Option>; /// Coerce as an owned fragment. fn as_owned_fragment(&self, type_id: FragmentTypeId) -> Option; } $( impl AnyFragment for $ty { fn type_id() -> FragmentTypeId { FragmentTypeId::$ty } fn as_fragment<'a>(&'a self, type_id: FragmentTypeId) -> Option> { if type_id == Self::type_id() { Some(self.into()) } else { None } } fn as_owned_fragment(&self, type_id: FragmentTypeId) -> Option { if type_id == Self::type_id() { #[allow(unreachable_code)] Some(self.clone().into()) } else { None } } } )* /// A marker about a sub AST fragment in a bigger AST. pub struct FragmentMarker { addr: usize, type_id: fragment::FragmentTypeId, } impl FragmentMarker { /// Creates a marker out of an AST fragment. pub fn new(value: &T) -> Self { Self { addr: (value as *const T).addr(), type_id: T::type_id(), } } } impl<'a> derive_generic_visitor::Visitor for FragmentMarker { type Break = Fragment; } impl visitors::AstEarlyExitVisitor for FragmentMarker { $( pastey::paste!{ fn [](&mut self, x: &$ty) -> ::std::ops::ControlFlow { if self.addr == (x as *const $ty).addr() && let Some(fragment) = x.as_owned_fragment(self.type_id) { return ::std::ops::ControlFlow::Break(fragment); } mk!(@visit_inner_call, $ty, self, x) } } )* } #[derive_group_for_ast] #[allow(missing_docs)] /// An owned fragment of AST in hax. pub enum Fragment { $( #[doc = concat!("An owned [`", stringify!($ty), "`] node.")] $ty($ty), )* /// Represent an unknown node in the AST with a message. Unknown(String), } #[derive(Copy)] #[derive_group_for_ast_base] #[derive(::serde::Serialize)] #[allow(missing_docs)] /// A borrowed fragment of AST in hax. pub enum FragmentRef<'lt> { $( #[doc = concat!("A borrowed [`", stringify!($ty), "`] node.")] $ty(&'lt $ty), )* } $( impl From<$ty> for Fragment { fn from(fragment: $ty) -> Self { Self::$ty(fragment) } } impl<'lt> From<&'lt $ty> for FragmentRef<'lt> { fn from(fragment: &'lt $ty) -> Self { Self::$ty(fragment) } } )* }; } #[hax_rust_engine_macros::replace(AstNodes => include(VisitableAstNodes))] mk!(GlobalId, Span, AstNodes); ================================================ FILE: rust-engine/src/ast/identifiers/global_id/compact_serialization.rs ================================================ //! Helper module that provides serialization and deserialization of DefId to //! compact representations. This is solely for conciseness purposes of the //! generated code. //! //! Concretely, this module defines `Repr` a (JSON-compact) representation of `DefId`s without parents. //! It provides a bijection from the fields `krate`, `path`, and `kind` of `DefId` and `Repr`. //! The choice of `Repr` itself is irrelevant. Anything that produces compact JSON is good. use crate::interning::Internable; use hax_frontend_exporter::{DefKind, DefPathItem, DisambiguatedDefPathItem}; use super::{DefIdInner, ExplicitDefId}; /// The compact reperesentation: a tuple (krate name, path, defkind, is_constructor) /// The path is a vector of tuples (DefPathItem, disambiguator). type Repr = (String, Vec<(DefPathItem, u32)>, DefKind, bool); /// `BorrowedRepr` is the borrowed variant of `Repr`. Useful for serialization. type BorrowedRepr<'a> = ( &'a String, Vec<(&'a DefPathItem, &'a u32)>, &'a DefKind, bool, ); /// Serialize an explicit def id into a compact represented string pub fn serialize(edid: &ExplicitDefId) -> String { let did = &edid.def_id; let path = did .path .iter() .map( |DisambiguatedDefPathItem { data, disambiguator, }| (data, disambiguator), ) .collect::>(); let data: BorrowedRepr<'_> = (&did.krate, path, &did.kind, edid.is_constructor); serde_json::to_string(&data).unwrap() } /// Deserialize from a (string) compact representation and a parent pub fn deserialize(s: &str, parent: Option) -> ExplicitDefId { let (krate, path, kind, is_constructor): Repr = serde_json::from_str(s).unwrap(); ExplicitDefId { def_id: DefIdInner { parent: parent.map(|parent| parent.def_id), krate, path: path .into_iter() .map(|(data, disambiguator)| DisambiguatedDefPathItem { data, disambiguator, }) .collect(), kind, } .intern(), is_constructor, } } ================================================ FILE: rust-engine/src/ast/identifiers/global_id/generated.rs ================================================ // This file was generated by `cargo hax into generate-rust-engine-names`. // To regenerate it, please use `just regenerate-names`. Under the hood, `cargo // hax into generate-rust-engine-names` runs the Rust engine, which in turn // calls `rust_engine::names::export_def_ids_to_mod`. static TABLE_AND_INTERNED_GLOBAL_IDS: ( crate::interning::LazyLockNewWithValue, [crate::interning::Interned; 660], ) = { crate::interning::InterningTable::new_with_values(|| { use crate::ast::identifiers::global_id::ExplicitDefId; use crate::ast::identifiers::global_id::compact_serialization::deserialize; fn did_0() -> ExplicitDefId { deserialize(r##"["rust_primitives",[],"Mod",false]"##, None) } fn did_1() -> ExplicitDefId { deserialize( r##"["rust_primitives",[[{"TypeNs":"hax"},0]],"Mod",false]"##, Some(did_0()), ) } fn did_2() -> ExplicitDefId { deserialize(r##"["alloc",[],"Mod",false]"##, None) } fn did_3() -> ExplicitDefId { deserialize( r##"["alloc",[[{"TypeNs":"alloc"},0]],"Mod",false]"##, Some(did_2()), ) } fn did_4() -> ExplicitDefId { deserialize( r##"["alloc",[[{"TypeNs":"alloc"},0],[{"TypeNs":"Global"},0]],"Struct",false]"##, Some(did_3()), ) } fn did_5() -> ExplicitDefId { deserialize( r##"["alloc",[[{"TypeNs":"vec"},0]],"Mod",false]"##, Some(did_2()), ) } fn did_6() -> ExplicitDefId { deserialize( r##"["alloc",[[{"TypeNs":"vec"},0],[{"TypeNs":"Vec"},0]],"Struct",false]"##, Some(did_5()), ) } fn did_7() -> ExplicitDefId { deserialize(r##"["core",[],"Mod",false]"##, None) } fn did_8() -> ExplicitDefId { deserialize( r##"["core",[[{"TypeNs":"clone"},0]],"Mod",false]"##, Some(did_7()), ) } fn did_9() -> ExplicitDefId { deserialize( r##"["core",[[{"TypeNs":"clone"},0],[{"TypeNs":"Clone"},0]],"Trait",false]"##, Some(did_8()), ) } fn did_10() -> ExplicitDefId { deserialize( r##"["core",[[{"TypeNs":"clone"},0],[{"TypeNs":"Clone"},0],[{"ValueNs":"clone"},0]],"AssocFn",false]"##, Some(did_9()), ) } fn did_11() -> ExplicitDefId { deserialize( r##"["core",[[{"TypeNs":"clone"},0],[{"TypeNs":"impls"},0]],"Mod",false]"##, Some(did_8()), ) } fn did_12() -> ExplicitDefId { deserialize( r##"["core",[[{"TypeNs":"clone"},0],[{"TypeNs":"impls"},0],["Impl",6]],{"Impl":{"of_trait":true}},false]"##, Some(did_11()), ) } fn did_13() -> ExplicitDefId { deserialize( r##"["core",[[{"TypeNs":"alloc"},0]],"Mod",false]"##, Some(did_7()), ) } fn did_14() -> ExplicitDefId { deserialize( r##"["core",[[{"TypeNs":"alloc"},0],[{"TypeNs":"Allocator"},0]],"Trait",false]"##, Some(did_13()), ) } fn did_15() -> ExplicitDefId { deserialize( r##"["alloc",[[{"TypeNs":"alloc"},0],["Impl",1]],{"Impl":{"of_trait":true}},false]"##, Some(did_3()), ) } fn did_16() -> ExplicitDefId { deserialize( r##"["alloc",[[{"TypeNs":"alloc"},0],["Impl",3]],{"Impl":{"of_trait":true}},false]"##, Some(did_3()), ) } fn did_17() -> ExplicitDefId { deserialize( r##"["alloc",[[{"TypeNs":"vec"},0],["Impl",11]],{"Impl":{"of_trait":true}},false]"##, Some(did_5()), ) } fn did_18() -> ExplicitDefId { deserialize(r##"["hax_lib_protocol",[],"Mod",false]"##, None) } fn did_19() -> ExplicitDefId { deserialize( r##"["hax_lib_protocol",[[{"TypeNs":"crypto"},0]],"Mod",false]"##, Some(did_18()), ) } fn did_20() -> ExplicitDefId { deserialize( r##"["hax_lib_protocol",[[{"TypeNs":"crypto"},0],[{"TypeNs":"HashAlgorithm"},0]],"Enum",false]"##, Some(did_19()), ) } fn did_21() -> ExplicitDefId { deserialize( r##"["rust_primitives",[[{"TypeNs":"hax"},0],[{"ValueNs":"deref_op"},0]],"Fn",false]"##, Some(did_1()), ) } fn did_22() -> ExplicitDefId { deserialize( r##"["core",[[{"TypeNs":"ops"},0]],"Mod",false]"##, Some(did_7()), ) } fn did_23() -> ExplicitDefId { deserialize( r##"["core",[[{"TypeNs":"ops"},0],[{"TypeNs":"deref"},0]],"Mod",false]"##, Some(did_22()), ) } fn did_24() -> ExplicitDefId { deserialize( r##"["core",[[{"TypeNs":"ops"},0],[{"TypeNs":"deref"},0],[{"TypeNs":"Deref"},0]],"Trait",false]"##, Some(did_23()), ) } fn did_25() -> ExplicitDefId { deserialize( r##"["core",[[{"TypeNs":"ops"},0],[{"TypeNs":"deref"},0],[{"TypeNs":"Deref"},0],[{"ValueNs":"deref"},0]],"AssocFn",false]"##, Some(did_24()), ) } fn did_26() -> ExplicitDefId { deserialize( r##"["alloc",[[{"TypeNs":"vec"},0],["Impl",8]],{"Impl":{"of_trait":true}},false]"##, Some(did_5()), ) } fn did_27() -> ExplicitDefId { deserialize( r##"["hax_lib_protocol",[[{"TypeNs":"crypto"},0],[{"ValueNs":"hash"},0]],"Fn",false]"##, Some(did_19()), ) } fn did_28() -> ExplicitDefId { deserialize( r##"["alloc",[[{"TypeNs":"vec"},0],["Impl",1]],{"Impl":{"of_trait":false}},false]"##, Some(did_5()), ) } fn did_29() -> ExplicitDefId { deserialize( r##"["alloc",[[{"TypeNs":"vec"},0],["Impl",1],[{"ValueNs":"truncate"},0]],"AssocFn",false]"##, Some(did_28()), ) } fn did_30() -> ExplicitDefId { deserialize( r##"["alloc",[[{"TypeNs":"vec"},0],["Impl",2]],{"Impl":{"of_trait":false}},false]"##, Some(did_5()), ) } fn did_31() -> ExplicitDefId { deserialize( r##"["alloc",[[{"TypeNs":"vec"},0],["Impl",2],[{"ValueNs":"extend_from_slice"},0]],"AssocFn",false]"##, Some(did_30()), ) } fn did_32() -> ExplicitDefId { deserialize( r##"["rust_primitives",[[{"TypeNs":"hax"},0],[{"ValueNs":"box_new"},0]],"Fn",false]"##, Some(did_1()), ) } fn did_33() -> ExplicitDefId { deserialize( r##"["alloc",[[{"TypeNs":"boxed"},0]],"Mod",false]"##, Some(did_2()), ) } fn did_34() -> ExplicitDefId { deserialize( r##"["alloc",[[{"TypeNs":"boxed"},0],[{"TypeNs":"Box"},0]],"Struct",false]"##, Some(did_33()), ) } fn did_35() -> ExplicitDefId { deserialize( r##"["rust_primitives",[[{"ValueNs":"unsize"},0]],"Fn",false]"##, Some(did_0()), ) } fn did_36() -> ExplicitDefId { deserialize( r##"["alloc",[[{"TypeNs":"slice"},0]],"Mod",false]"##, Some(did_2()), ) } fn did_37() -> ExplicitDefId { deserialize( r##"["alloc",[[{"TypeNs":"slice"},0],["Impl",0]],{"Impl":{"of_trait":false}},false]"##, Some(did_36()), ) } fn did_38() -> ExplicitDefId { deserialize( r##"["alloc",[[{"TypeNs":"slice"},0],["Impl",0],[{"ValueNs":"into_vec"},0]],"AssocFn",false]"##, Some(did_37()), ) } fn did_39() -> ExplicitDefId { deserialize( r##"["alloc",[[{"TypeNs":"slice"},0],[{"TypeNs":"Concat"},0]],"Trait",false]"##, Some(did_36()), ) } fn did_40() -> ExplicitDefId { deserialize( r##"["core",[[{"TypeNs":"borrow"},0]],"Mod",false]"##, Some(did_7()), ) } fn did_41() -> ExplicitDefId { deserialize( r##"["core",[[{"TypeNs":"borrow"},0],[{"TypeNs":"Borrow"},0]],"Trait",false]"##, Some(did_40()), ) } fn did_42() -> ExplicitDefId { deserialize( r##"["core",[[{"TypeNs":"borrow"},0],["Impl",2]],{"Impl":{"of_trait":true}},false]"##, Some(did_40()), ) } fn did_43() -> ExplicitDefId { deserialize( r##"["alloc",[[{"TypeNs":"slice"},0],["Impl",2]],{"Impl":{"of_trait":true}},false]"##, Some(did_36()), ) } fn did_44() -> ExplicitDefId { deserialize( r##"["alloc",[[{"TypeNs":"slice"},0],["Impl",0],[{"ValueNs":"concat"},0]],"AssocFn",false]"##, Some(did_37()), ) } fn did_45() -> ExplicitDefId { deserialize( r##"["alloc",[[{"TypeNs":"slice"},0],["Impl",0],[{"ValueNs":"to_vec"},0]],"AssocFn",false]"##, Some(did_37()), ) } fn did_46() -> ExplicitDefId { deserialize( r##"["core",[[{"TypeNs":"slice"},0]],"Mod",false]"##, Some(did_7()), ) } fn did_47() -> ExplicitDefId { deserialize( r##"["core",[[{"TypeNs":"slice"},0],["Impl",0]],{"Impl":{"of_trait":false}},false]"##, Some(did_46()), ) } fn did_48() -> ExplicitDefId { deserialize( r##"["core",[[{"TypeNs":"slice"},0],["Impl",0],[{"ValueNs":"len"},0]],"AssocFn",false]"##, Some(did_47()), ) } fn did_49() -> ExplicitDefId { deserialize( r##"["core",[[{"TypeNs":"ops"},0],[{"TypeNs":"range"},0]],"Mod",false]"##, Some(did_22()), ) } fn did_50() -> ExplicitDefId { deserialize( r##"["core",[[{"TypeNs":"ops"},0],[{"TypeNs":"range"},0],[{"TypeNs":"Range"},0]],"Struct",false]"##, Some(did_49()), ) } fn did_51() -> ExplicitDefId { deserialize( r##"["core",[[{"TypeNs":"ops"},0],[{"TypeNs":"range"},0],[{"TypeNs":"Range"},0],[{"ValueNs":"start"},0]],"Field",false]"##, Some(did_642()), ) } fn did_52() -> ExplicitDefId { deserialize( r##"["core",[[{"TypeNs":"ops"},0],[{"TypeNs":"range"},0],[{"TypeNs":"Range"},0],[{"ValueNs":"end"},0]],"Field",false]"##, Some(did_642()), ) } fn did_53() -> ExplicitDefId { deserialize( r##"["core",[[{"TypeNs":"ops"},0],[{"TypeNs":"index"},0]],"Mod",false]"##, Some(did_22()), ) } fn did_54() -> ExplicitDefId { deserialize( r##"["core",[[{"TypeNs":"ops"},0],[{"TypeNs":"index"},0],[{"TypeNs":"Index"},0]],"Trait",false]"##, Some(did_53()), ) } fn did_55() -> ExplicitDefId { deserialize( r##"["core",[[{"TypeNs":"ops"},0],[{"TypeNs":"index"},0],[{"TypeNs":"Index"},0],[{"ValueNs":"index"},0]],"AssocFn",false]"##, Some(did_54()), ) } fn did_56() -> ExplicitDefId { deserialize( r##"["core",[[{"TypeNs":"slice"},0],[{"TypeNs":"index"},0]],"Mod",false]"##, Some(did_46()), ) } fn did_57() -> ExplicitDefId { deserialize( r##"["core",[[{"TypeNs":"slice"},0],[{"TypeNs":"index"},0],[{"TypeNs":"SliceIndex"},0]],"Trait",false]"##, Some(did_56()), ) } fn did_58() -> ExplicitDefId { deserialize( r##"["core",[[{"TypeNs":"slice"},0],[{"TypeNs":"index"},0],["Impl",4]],{"Impl":{"of_trait":true}},false]"##, Some(did_56()), ) } fn did_59() -> ExplicitDefId { deserialize( r##"["alloc",[[{"TypeNs":"vec"},0],["Impl",13]],{"Impl":{"of_trait":true}},false]"##, Some(did_5()), ) } fn did_60() -> ExplicitDefId { deserialize( r##"["core",[[{"TypeNs":"num"},0]],"Mod",false]"##, Some(did_7()), ) } fn did_61() -> ExplicitDefId { deserialize( r##"["core",[[{"TypeNs":"num"},0],["Impl",9]],{"Impl":{"of_trait":false}},false]"##, Some(did_60()), ) } fn did_62() -> ExplicitDefId { deserialize( r##"["core",[[{"TypeNs":"num"},0],["Impl",9],[{"ValueNs":"to_le_bytes"},0]],"AssocFn",false]"##, Some(did_61()), ) } fn did_63() -> ExplicitDefId { deserialize( r##"["hax_lib_protocol",[[{"TypeNs":"crypto"},0],[{"TypeNs":"HMACAlgorithm"},0]],"Enum",false]"##, Some(did_19()), ) } fn did_64() -> ExplicitDefId { deserialize( r##"["hax_lib_protocol",[[{"TypeNs":"crypto"},0],[{"ValueNs":"hmac"},0]],"Fn",false]"##, Some(did_19()), ) } fn did_65() -> ExplicitDefId { deserialize( r##"["hax_lib_protocol",[[{"TypeNs":"crypto"},0],[{"TypeNs":"DHGroup"},0]],"Enum",false]"##, Some(did_19()), ) } fn did_66() -> ExplicitDefId { deserialize( r##"["hax_lib_protocol",[[{"TypeNs":"crypto"},0],[{"TypeNs":"DHScalar"},0]],"Struct",false]"##, Some(did_19()), ) } fn did_67() -> ExplicitDefId { deserialize( r##"["hax_lib_protocol",[[{"TypeNs":"crypto"},0],[{"ValueNs":"dh_scalar_multiply_base"},0]],"Fn",false]"##, Some(did_19()), ) } fn did_68() -> ExplicitDefId { deserialize( r##"["hax_lib_protocol",[[{"TypeNs":"crypto"},0],["Impl",9]],{"Impl":{"of_trait":true}},false]"##, Some(did_19()), ) } fn did_69() -> ExplicitDefId { deserialize( r##"["hax_lib_protocol",[[{"TypeNs":"crypto"},0],[{"TypeNs":"DHElement"},0]],"Struct",false]"##, Some(did_19()), ) } fn did_70() -> ExplicitDefId { deserialize( r##"["hax_lib_protocol",[[{"TypeNs":"crypto"},0],[{"ValueNs":"dh_scalar_multiply"},0]],"Fn",false]"##, Some(did_19()), ) } fn did_71() -> ExplicitDefId { deserialize( r##"["hax_lib_protocol",[[{"TypeNs":"crypto"},0],["Impl",0]],{"Impl":{"of_trait":false}},false]"##, Some(did_19()), ) } fn did_72() -> ExplicitDefId { deserialize( r##"["hax_lib_protocol",[[{"TypeNs":"crypto"},0],["Impl",0],[{"ValueNs":"from_bytes"},0]],"AssocFn",false]"##, Some(did_71()), ) } fn did_73() -> ExplicitDefId { deserialize( r##"["hax_lib_protocol",[[{"TypeNs":"crypto"},0],["Impl",1]],{"Impl":{"of_trait":false}},false]"##, Some(did_19()), ) } fn did_74() -> ExplicitDefId { deserialize( r##"["hax_lib_protocol",[[{"TypeNs":"crypto"},0],["Impl",1],[{"ValueNs":"from_bytes"},0]],"AssocFn",false]"##, Some(did_73()), ) } fn did_75() -> ExplicitDefId { deserialize( r##"["hax_lib_protocol",[[{"TypeNs":"ProtocolError"},0]],"Enum",false]"##, Some(did_18()), ) } fn did_76() -> ExplicitDefId { deserialize( r##"["core",[[{"TypeNs":"result"},0]],"Mod",false]"##, Some(did_7()), ) } fn did_77() -> ExplicitDefId { deserialize( r##"["core",[[{"TypeNs":"result"},0],[{"TypeNs":"Result"},0]],"Enum",false]"##, Some(did_76()), ) } fn did_78() -> ExplicitDefId { deserialize( r##"["hax_lib_protocol",[[{"TypeNs":"crypto"},0],[{"TypeNs":"AEADKey"},0]],"Struct",false]"##, Some(did_19()), ) } fn did_79() -> ExplicitDefId { deserialize( r##"["hax_lib_protocol",[[{"TypeNs":"crypto"},0],[{"TypeNs":"AEADIV"},0]],"Struct",false]"##, Some(did_19()), ) } fn did_80() -> ExplicitDefId { deserialize( r##"["hax_lib_protocol",[[{"TypeNs":"crypto"},0],["Impl",6]],{"Impl":{"of_trait":false}},false]"##, Some(did_19()), ) } fn did_81() -> ExplicitDefId { deserialize( r##"["hax_lib_protocol",[[{"TypeNs":"crypto"},0],["Impl",6],[{"ValueNs":"from_bytes"},0]],"AssocFn",false]"##, Some(did_80()), ) } fn did_82() -> ExplicitDefId { deserialize( r##"["hax_lib_protocol",[[{"TypeNs":"crypto"},0],[{"TypeNs":"AEADTag"},0]],"Struct",false]"##, Some(did_19()), ) } fn did_83() -> ExplicitDefId { deserialize( r##"["hax_lib_protocol",[[{"TypeNs":"crypto"},0],[{"ValueNs":"aead_decrypt"},0]],"Fn",false]"##, Some(did_19()), ) } fn did_84() -> ExplicitDefId { deserialize( r##"["hax_lib_protocol",[[{"TypeNs":"crypto"},0],[{"TypeNs":"AEADAlgorithm"},0]],"Enum",false]"##, Some(did_19()), ) } fn did_85() -> ExplicitDefId { deserialize( r##"["hax_lib_protocol",[[{"TypeNs":"crypto"},0],["Impl",4]],{"Impl":{"of_trait":false}},false]"##, Some(did_19()), ) } fn did_86() -> ExplicitDefId { deserialize( r##"["hax_lib_protocol",[[{"TypeNs":"crypto"},0],["Impl",4],[{"ValueNs":"from_bytes"},0]],"AssocFn",false]"##, Some(did_85()), ) } fn did_87() -> ExplicitDefId { deserialize( r##"["hax_lib_protocol",[[{"TypeNs":"crypto"},0],["Impl",5]],{"Impl":{"of_trait":false}},false]"##, Some(did_19()), ) } fn did_88() -> ExplicitDefId { deserialize( r##"["hax_lib_protocol",[[{"TypeNs":"crypto"},0],["Impl",5],[{"ValueNs":"from_bytes"},0]],"AssocFn",false]"##, Some(did_87()), ) } fn did_89() -> ExplicitDefId { deserialize( r##"["rust_primitives",[[{"TypeNs":"hax"},0],[{"TypeNs":"Tuple2"},0]],"Struct",false]"##, Some(did_1()), ) } fn did_90() -> ExplicitDefId { deserialize( r##"["hax_lib_protocol",[[{"TypeNs":"crypto"},0],[{"ValueNs":"aead_encrypt"},0]],"Fn",false]"##, Some(did_19()), ) } fn did_91() -> ExplicitDefId { deserialize( r##"["alloc",[[{"TypeNs":"vec"},0],[{"ValueNs":"from_elem"},0]],"Fn",false]"##, Some(did_5()), ) } fn did_92() -> ExplicitDefId { deserialize( r##"["core",[[{"TypeNs":"ptr"},0]],"Mod",false]"##, Some(did_7()), ) } fn did_93() -> ExplicitDefId { deserialize( r##"["core",[[{"TypeNs":"ptr"},0],[{"TypeNs":"const_ptr"},0]],"Mod",false]"##, Some(did_92()), ) } fn did_94() -> ExplicitDefId { deserialize( r##"["core",[[{"TypeNs":"ptr"},0],[{"TypeNs":"const_ptr"},0],["Impl",0]],{"Impl":{"of_trait":false}},false]"##, Some(did_93()), ) } fn did_95() -> ExplicitDefId { deserialize( r##"["core",[[{"TypeNs":"ptr"},0],[{"TypeNs":"const_ptr"},0],["Impl",0],[{"ValueNs":"offset"},0]],"AssocFn",false]"##, Some(did_94()), ) } fn did_96() -> ExplicitDefId { deserialize( r##"["rust_primitives",[[{"TypeNs":"hax"},0],[{"ValueNs":"cast_op"},0]],"Fn",false]"##, Some(did_1()), ) } fn did_97() -> ExplicitDefId { deserialize( r##"["core",[[{"TypeNs":"str"},0]],"Mod",false]"##, Some(did_7()), ) } fn did_98() -> ExplicitDefId { deserialize( r##"["core",[[{"TypeNs":"str"},0],["Impl",0]],{"Impl":{"of_trait":false}},false]"##, Some(did_97()), ) } fn did_99() -> ExplicitDefId { deserialize( r##"["core",[[{"TypeNs":"str"},0],["Impl",0],[{"ValueNs":"as_ptr"},0]],"AssocFn",false]"##, Some(did_98()), ) } fn did_100() -> ExplicitDefId { deserialize(r##"["hax_lib",[],"Mod",false]"##, None) } fn did_101() -> ExplicitDefId { deserialize( r##"["hax_lib",[[{"ValueNs":"any_to_unit"},0]],"Fn",false]"##, Some(did_100()), ) } fn did_102() -> ExplicitDefId { deserialize( r##"["hax_lib",[[{"ValueNs":"inline_unsafe"},0]],"Fn",false]"##, Some(did_100()), ) } fn did_103() -> ExplicitDefId { deserialize( r##"["hax_lib",[[{"ValueNs":"inline"},0]],"Fn",false]"##, Some(did_100()), ) } fn did_104() -> ExplicitDefId { deserialize( r##"["hax_lib",[[{"TypeNs":"int"},0]],"Mod",false]"##, Some(did_100()), ) } fn did_105() -> ExplicitDefId { deserialize( r##"["hax_lib",[[{"TypeNs":"int"},0],[{"TypeNs":"Int"},0]],"Struct",false]"##, Some(did_104()), ) } fn did_106() -> ExplicitDefId { deserialize( r##"["hax_lib",[[{"TypeNs":"abstraction"},0]],"Mod",false]"##, Some(did_100()), ) } fn did_107() -> ExplicitDefId { deserialize( r##"["hax_lib",[[{"TypeNs":"abstraction"},0],[{"TypeNs":"Concretization"},0]],"Trait",false]"##, Some(did_106()), ) } fn did_108() -> ExplicitDefId { deserialize( r##"["hax_lib",[[{"TypeNs":"abstraction"},0],[{"TypeNs":"Concretization"},0],[{"ValueNs":"concretize"},0]],"AssocFn",false]"##, Some(did_107()), ) } fn did_109() -> ExplicitDefId { deserialize( r##"["hax_lib",[[{"TypeNs":"int"},0],["Impl",44]],{"Impl":{"of_trait":true}},false]"##, Some(did_104()), ) } fn did_110() -> ExplicitDefId { deserialize( r##"["hax_lib",[[{"TypeNs":"int"},0],["Impl",7]],{"Impl":{"of_trait":false}},false]"##, Some(did_104()), ) } fn did_111() -> ExplicitDefId { deserialize( r##"["hax_lib",[[{"TypeNs":"int"},0],["Impl",7],[{"ValueNs":"_unsafe_from_str"},0]],"AssocFn",false]"##, Some(did_110()), ) } fn did_112() -> ExplicitDefId { deserialize( r##"["hax_lib",[[{"TypeNs":"int"},0],["Impl",9]],{"Impl":{"of_trait":true}},false]"##, Some(did_104()), ) } fn did_113() -> ExplicitDefId { deserialize( r##"["hax_lib",[[{"TypeNs":"int"},0],["Impl",7],[{"ValueNs":"pow2"},0]],"AssocFn",false]"##, Some(did_110()), ) } fn did_114() -> ExplicitDefId { deserialize( r##"["hax_lib",[[{"TypeNs":"int"},0],[{"TypeNs":"ToInt"},0]],"Trait",false]"##, Some(did_104()), ) } fn did_115() -> ExplicitDefId { deserialize( r##"["hax_lib",[[{"TypeNs":"int"},0],[{"TypeNs":"ToInt"},0],[{"ValueNs":"to_int"},0]],"AssocFn",false]"##, Some(did_114()), ) } fn did_116() -> ExplicitDefId { deserialize( r##"["hax_lib",[[{"TypeNs":"int"},0],["Impl",17]],{"Impl":{"of_trait":true}},false]"##, Some(did_104()), ) } fn did_117() -> ExplicitDefId { deserialize( r##"["hax_lib",[[{"TypeNs":"abstraction"},0],[{"TypeNs":"Abstraction"},0]],"Trait",false]"##, Some(did_106()), ) } fn did_118() -> ExplicitDefId { deserialize( r##"["hax_lib",[[{"TypeNs":"abstraction"},0],[{"TypeNs":"Abstraction"},0],[{"ValueNs":"lift"},0]],"AssocFn",false]"##, Some(did_117()), ) } fn did_119() -> ExplicitDefId { deserialize( r##"["hax_lib",[[{"TypeNs":"int"},0],["Impl",16]],{"Impl":{"of_trait":true}},false]"##, Some(did_104()), ) } fn did_120() -> ExplicitDefId { deserialize( r##"["core",[[{"TypeNs":"ops"},0],[{"TypeNs":"control_flow"},0]],"Mod",false]"##, Some(did_22()), ) } fn did_121() -> ExplicitDefId { deserialize( r##"["core",[[{"TypeNs":"ops"},0],[{"TypeNs":"control_flow"},0],[{"TypeNs":"ControlFlow"},0]],"Enum",false]"##, Some(did_120()), ) } fn did_122() -> ExplicitDefId { deserialize( r##"["core",[[{"TypeNs":"ops"},0],[{"TypeNs":"control_flow"},0],[{"TypeNs":"ControlFlow"},0],[{"TypeNs":"Break"},0],[{"ValueNs":"0"},0]],"Field",false]"##, Some(did_644()), ) } fn did_123() -> ExplicitDefId { deserialize( r##"["core",[[{"TypeNs":"ops"},0],[{"TypeNs":"control_flow"},0],[{"TypeNs":"ControlFlow"},0],[{"TypeNs":"Continue"},0],[{"ValueNs":"0"},0]],"Field",false]"##, Some(did_645()), ) } fn did_124() -> ExplicitDefId { deserialize( r##"["core",[[{"TypeNs":"ops"},0],[{"TypeNs":"range"},0],[{"TypeNs":"RangeTo"},0]],"Struct",false]"##, Some(did_49()), ) } fn did_125() -> ExplicitDefId { deserialize( r##"["core",[[{"TypeNs":"ops"},0],[{"TypeNs":"range"},0],[{"TypeNs":"RangeTo"},0],[{"ValueNs":"end"},0]],"Field",false]"##, Some(did_646()), ) } fn did_126() -> ExplicitDefId { deserialize( r##"["core",[[{"TypeNs":"ops"},0],[{"TypeNs":"range"},0],[{"TypeNs":"RangeFull"},0]],"Struct",false]"##, Some(did_49()), ) } fn did_127() -> ExplicitDefId { deserialize( r##"["core",[[{"TypeNs":"ops"},0],[{"TypeNs":"range"},0],[{"TypeNs":"RangeFrom"},0]],"Struct",false]"##, Some(did_49()), ) } fn did_128() -> ExplicitDefId { deserialize( r##"["core",[[{"TypeNs":"ops"},0],[{"TypeNs":"range"},0],[{"TypeNs":"RangeFrom"},0],[{"ValueNs":"start"},0]],"Field",false]"##, Some(did_648()), ) } fn did_129() -> ExplicitDefId { deserialize( r##"["core",[[{"TypeNs":"convert"},0]],"Mod",false]"##, Some(did_7()), ) } fn did_130() -> ExplicitDefId { deserialize( r##"["core",[[{"TypeNs":"convert"},0],[{"TypeNs":"Into"},0]],"Trait",false]"##, Some(did_129()), ) } fn did_131() -> ExplicitDefId { deserialize( r##"["core",[[{"TypeNs":"convert"},0],[{"TypeNs":"Into"},0],[{"ValueNs":"into"},0]],"AssocFn",false]"##, Some(did_130()), ) } fn did_132() -> ExplicitDefId { deserialize( r##"["core",[[{"TypeNs":"convert"},0],[{"TypeNs":"From"},0]],"Trait",false]"##, Some(did_129()), ) } fn did_133() -> ExplicitDefId { deserialize( r##"["core",[[{"TypeNs":"convert"},0],[{"TypeNs":"num"},0]],"Mod",false]"##, Some(did_129()), ) } fn did_134() -> ExplicitDefId { deserialize( r##"["core",[[{"TypeNs":"convert"},0],[{"TypeNs":"num"},0],["Impl",64]],{"Impl":{"of_trait":true}},false]"##, Some(did_133()), ) } fn did_135() -> ExplicitDefId { deserialize( r##"["core",[[{"TypeNs":"convert"},0],["Impl",3]],{"Impl":{"of_trait":true}},false]"##, Some(did_129()), ) } fn did_136() -> ExplicitDefId { deserialize( r##"["core",[[{"TypeNs":"array"},0]],"Mod",false]"##, Some(did_7()), ) } fn did_137() -> ExplicitDefId { deserialize( r##"["core",[[{"TypeNs":"array"},0],[{"TypeNs":"iter"},0]],"Mod",false]"##, Some(did_136()), ) } fn did_138() -> ExplicitDefId { deserialize( r##"["core",[[{"TypeNs":"array"},0],[{"TypeNs":"iter"},0],[{"TypeNs":"IntoIter"},0]],"Struct",false]"##, Some(did_137()), ) } fn did_139() -> ExplicitDefId { deserialize( r##"["core",[[{"TypeNs":"iter"},0]],"Mod",false]"##, Some(did_7()), ) } fn did_140() -> ExplicitDefId { deserialize( r##"["core",[[{"TypeNs":"iter"},0],[{"TypeNs":"traits"},0]],"Mod",false]"##, Some(did_139()), ) } fn did_141() -> ExplicitDefId { deserialize( r##"["core",[[{"TypeNs":"iter"},0],[{"TypeNs":"traits"},0],[{"TypeNs":"collect"},0]],"Mod",false]"##, Some(did_140()), ) } fn did_142() -> ExplicitDefId { deserialize( r##"["core",[[{"TypeNs":"iter"},0],[{"TypeNs":"traits"},0],[{"TypeNs":"collect"},0],[{"TypeNs":"IntoIterator"},0]],"Trait",false]"##, Some(did_141()), ) } fn did_143() -> ExplicitDefId { deserialize( r##"["core",[[{"TypeNs":"iter"},0],[{"TypeNs":"traits"},0],[{"TypeNs":"collect"},0],[{"TypeNs":"IntoIterator"},0],[{"ValueNs":"into_iter"},0]],"AssocFn",false]"##, Some(did_142()), ) } fn did_144() -> ExplicitDefId { deserialize( r##"["core",[[{"TypeNs":"array"},0],[{"TypeNs":"iter"},0],["Impl",1]],{"Impl":{"of_trait":true}},false]"##, Some(did_137()), ) } fn did_145() -> ExplicitDefId { deserialize( r##"["hax_lib",[[{"ValueNs":"_internal_loop_decreases"},0]],"Fn",false]"##, Some(did_100()), ) } fn did_146() -> ExplicitDefId { deserialize( r##"["core",[[{"TypeNs":"convert"},0],[{"TypeNs":"From"},0],[{"ValueNs":"from"},0]],"AssocFn",false]"##, Some(did_132()), ) } fn did_147() -> ExplicitDefId { deserialize( r##"["hax_lib",[[{"TypeNs":"prop"},0]],"Mod",false]"##, Some(did_100()), ) } fn did_148() -> ExplicitDefId { deserialize( r##"["hax_lib",[[{"TypeNs":"prop"},0],[{"TypeNs":"Prop"},0]],"Struct",false]"##, Some(did_147()), ) } fn did_149() -> ExplicitDefId { deserialize( r##"["hax_lib",[[{"TypeNs":"prop"},0],["Impl",3]],{"Impl":{"of_trait":true}},false]"##, Some(did_147()), ) } fn did_150() -> ExplicitDefId { deserialize( r##"["hax_lib",[[{"ValueNs":"_internal_while_loop_invariant"},0]],"Fn",false]"##, Some(did_100()), ) } fn did_151() -> ExplicitDefId { deserialize( r##"["core",[[{"TypeNs":"ops"},0],[{"TypeNs":"function"},0]],"Mod",false]"##, Some(did_22()), ) } fn did_152() -> ExplicitDefId { deserialize( r##"["core",[[{"TypeNs":"ops"},0],[{"TypeNs":"function"},0],[{"TypeNs":"FnOnce"},0]],"Trait",false]"##, Some(did_151()), ) } fn did_153() -> ExplicitDefId { deserialize( r##"["hax_lib",[[{"ValueNs":"_internal_loop_invariant"},0]],"Fn",false]"##, Some(did_100()), ) } fn did_154() -> ExplicitDefId { deserialize( r##"["hax_lib",[[{"ValueNs":"assert"},0]],"Fn",false]"##, Some(did_100()), ) } fn did_155() -> ExplicitDefId { deserialize( r##"["core",[[{"TypeNs":"cmp"},0]],"Mod",false]"##, Some(did_7()), ) } fn did_156() -> ExplicitDefId { deserialize( r##"["core",[[{"TypeNs":"cmp"},0],[{"TypeNs":"PartialEq"},0]],"Trait",false]"##, Some(did_155()), ) } fn did_157() -> ExplicitDefId { deserialize( r##"["core",[[{"TypeNs":"cmp"},0],[{"TypeNs":"PartialEq"},0],[{"ValueNs":"eq"},0]],"AssocFn",false]"##, Some(did_156()), ) } fn did_158() -> ExplicitDefId { deserialize( r##"["core",[[{"TypeNs":"ops"},0],[{"TypeNs":"bit"},0]],"Mod",false]"##, Some(did_22()), ) } fn did_159() -> ExplicitDefId { deserialize( r##"["core",[[{"TypeNs":"ops"},0],[{"TypeNs":"bit"},0],[{"TypeNs":"Not"},0]],"Trait",false]"##, Some(did_158()), ) } fn did_160() -> ExplicitDefId { deserialize( r##"["core",[[{"TypeNs":"ops"},0],[{"TypeNs":"bit"},0],[{"TypeNs":"Not"},0],[{"ValueNs":"not"},0]],"AssocFn",false]"##, Some(did_159()), ) } fn did_161() -> ExplicitDefId { deserialize( r##"["core",[[{"TypeNs":"panicking"},0]],"Mod",false]"##, Some(did_7()), ) } fn did_162() -> ExplicitDefId { deserialize( r##"["core",[[{"TypeNs":"panicking"},0],[{"TypeNs":"AssertKind"},0]],"Enum",false]"##, Some(did_161()), ) } fn did_163() -> ExplicitDefId { deserialize( r##"["core",[[{"TypeNs":"option"},0]],"Mod",false]"##, Some(did_7()), ) } fn did_164() -> ExplicitDefId { deserialize( r##"["core",[[{"TypeNs":"option"},0],[{"TypeNs":"Option"},0]],"Enum",false]"##, Some(did_163()), ) } fn did_165() -> ExplicitDefId { deserialize( r##"["core",[[{"TypeNs":"fmt"},0]],"Mod",false]"##, Some(did_7()), ) } fn did_166() -> ExplicitDefId { deserialize( r##"["core",[[{"TypeNs":"fmt"},0],[{"TypeNs":"Arguments"},0]],"Struct",false]"##, Some(did_165()), ) } fn did_167() -> ExplicitDefId { deserialize( r##"["core",[[{"TypeNs":"fmt"},0],[{"TypeNs":"Debug"},0]],"Trait",false]"##, Some(did_165()), ) } fn did_168() -> ExplicitDefId { deserialize( r##"["core",[[{"TypeNs":"fmt"},0],[{"TypeNs":"num"},0]],"Mod",false]"##, Some(did_165()), ) } fn did_169() -> ExplicitDefId { deserialize( r##"["core",[[{"TypeNs":"panicking"},0],[{"ValueNs":"assert_failed"},0]],"Fn",false]"##, Some(did_161()), ) } fn did_170() -> ExplicitDefId { deserialize( r##"["rust_primitives",[[{"TypeNs":"hax"},0],[{"TypeNs":"Never"},0]],"Enum",false]"##, Some(did_1()), ) } fn did_171() -> ExplicitDefId { deserialize( r##"["rust_primitives",[[{"TypeNs":"hax"},0],[{"ValueNs":"never_to_any"},0]],"Fn",false]"##, Some(did_1()), ) } fn did_172() -> ExplicitDefId { deserialize( r##"["core",[[{"TypeNs":"panicking"},0],[{"ValueNs":"panic"},0]],"Fn",false]"##, Some(did_161()), ) } fn did_173() -> ExplicitDefId { deserialize( r##"["core",[[{"TypeNs":"result"},0],[{"TypeNs":"Result"},0],[{"TypeNs":"Err"},0],[{"ValueNs":"0"},0]],"Field",false]"##, Some(did_649()), ) } fn did_174() -> ExplicitDefId { deserialize( r##"["core",[[{"TypeNs":"result"},0],["Impl",0]],{"Impl":{"of_trait":false}},false]"##, Some(did_76()), ) } fn did_175() -> ExplicitDefId { deserialize( r##"["core",[[{"TypeNs":"result"},0],["Impl",0],[{"ValueNs":"map_err"},0]],"AssocFn",false]"##, Some(did_174()), ) } fn did_176() -> ExplicitDefId { deserialize( r##"["core",[[{"TypeNs":"option"},0],["Impl",0]],{"Impl":{"of_trait":false}},false]"##, Some(did_163()), ) } fn did_177() -> ExplicitDefId { deserialize( r##"["core",[[{"TypeNs":"option"},0],["Impl",0],[{"ValueNs":"is_some"},0]],"AssocFn",false]"##, Some(did_176()), ) } fn did_178() -> ExplicitDefId { deserialize( r##"["core",[[{"TypeNs":"option"},0],[{"TypeNs":"Option"},0],[{"TypeNs":"Some"},0],[{"ValueNs":"0"},0]],"Field",false]"##, Some(did_650()), ) } fn did_179() -> ExplicitDefId { deserialize( r##"["alloc",[[{"TypeNs":"boxed"},0],["Impl",0]],{"Impl":{"of_trait":false}},false]"##, Some(did_33()), ) } fn did_180() -> ExplicitDefId { deserialize( r##"["alloc",[[{"TypeNs":"boxed"},0],["Impl",0],[{"ValueNs":"new"},0]],"AssocFn",false]"##, Some(did_179()), ) } fn did_181() -> ExplicitDefId { deserialize( r##"["alloc",[[{"TypeNs":"string"},0]],"Mod",false]"##, Some(did_2()), ) } fn did_182() -> ExplicitDefId { deserialize( r##"["alloc",[[{"TypeNs":"string"},0],[{"TypeNs":"String"},0]],"Struct",false]"##, Some(did_181()), ) } fn did_183() -> ExplicitDefId { deserialize( r##"["core",[[{"TypeNs":"convert"},0],[{"TypeNs":"Infallible"},0]],"Enum",false]"##, Some(did_129()), ) } fn did_184() -> ExplicitDefId { deserialize( r##"["core",[[{"TypeNs":"ops"},0],[{"TypeNs":"try_trait"},0]],"Mod",false]"##, Some(did_22()), ) } fn did_185() -> ExplicitDefId { deserialize( r##"["core",[[{"TypeNs":"ops"},0],[{"TypeNs":"try_trait"},0],[{"TypeNs":"FromResidual"},0]],"Trait",false]"##, Some(did_184()), ) } fn did_186() -> ExplicitDefId { deserialize( r##"["core",[[{"TypeNs":"ops"},0],[{"TypeNs":"try_trait"},0],[{"TypeNs":"FromResidual"},0],[{"ValueNs":"from_residual"},0]],"AssocFn",false]"##, Some(did_185()), ) } fn did_187() -> ExplicitDefId { deserialize( r##"["core",[[{"TypeNs":"convert"},0],[{"TypeNs":"num"},0],["Impl",88]],{"Impl":{"of_trait":true}},false]"##, Some(did_133()), ) } fn did_188() -> ExplicitDefId { deserialize( r##"["core",[[{"TypeNs":"result"},0],["Impl",28]],{"Impl":{"of_trait":true}},false]"##, Some(did_76()), ) } fn did_189() -> ExplicitDefId { deserialize( r##"["core",[[{"TypeNs":"slice"},0],[{"TypeNs":"index"},0],["Impl",2]],{"Impl":{"of_trait":true}},false]"##, Some(did_56()), ) } fn did_190() -> ExplicitDefId { deserialize( r##"["core",[[{"TypeNs":"iter"},0],[{"TypeNs":"traits"},0],[{"TypeNs":"iterator"},0]],"Mod",false]"##, Some(did_140()), ) } fn did_191() -> ExplicitDefId { deserialize( r##"["core",[[{"TypeNs":"iter"},0],[{"TypeNs":"traits"},0],[{"TypeNs":"iterator"},0],[{"TypeNs":"Iterator"},0]],"Trait",false]"##, Some(did_190()), ) } fn did_192() -> ExplicitDefId { deserialize( r##"["core",[[{"TypeNs":"iter"},0],[{"TypeNs":"traits"},0],[{"TypeNs":"iterator"},0],[{"TypeNs":"Iterator"},0],[{"ValueNs":"next"},0]],"AssocFn",false]"##, Some(did_191()), ) } fn did_193() -> ExplicitDefId { deserialize( r##"["core",[[{"TypeNs":"ops"},0],[{"TypeNs":"arith"},0]],"Mod",false]"##, Some(did_22()), ) } fn did_194() -> ExplicitDefId { deserialize( r##"["core",[[{"TypeNs":"ops"},0],[{"TypeNs":"arith"},0],[{"TypeNs":"Add"},0]],"Trait",false]"##, Some(did_193()), ) } fn did_195() -> ExplicitDefId { deserialize( r##"["core",[[{"TypeNs":"ops"},0],[{"TypeNs":"arith"},0],[{"TypeNs":"Add"},0],[{"ValueNs":"add"},0]],"AssocFn",false]"##, Some(did_194()), ) } fn did_196() -> ExplicitDefId { deserialize( r##"["core",[[{"TypeNs":"ops"},0],[{"TypeNs":"function"},0],[{"TypeNs":"FnMut"},0]],"Trait",false]"##, Some(did_151()), ) } fn did_197() -> ExplicitDefId { deserialize( r##"["core",[[{"TypeNs":"iter"},0],[{"TypeNs":"traits"},0],[{"TypeNs":"iterator"},0],[{"TypeNs":"Iterator"},0],[{"ValueNs":"fold"},0]],"AssocFn",false]"##, Some(did_191()), ) } fn did_198() -> ExplicitDefId { deserialize( r##"["core",[[{"TypeNs":"result"},0],[{"TypeNs":"Result"},0],[{"TypeNs":"Ok"},0],[{"ValueNs":"0"},0]],"Field",false]"##, Some(did_651()), ) } fn did_199() -> ExplicitDefId { deserialize( r##"["core",[[{"TypeNs":"convert"},0],["Impl",4]],{"Impl":{"of_trait":true}},false]"##, Some(did_129()), ) } fn did_200() -> ExplicitDefId { deserialize( r##"["hax_lib",[[{"TypeNs":"prop"},0],[{"ValueNs":"implies"},0]],"Fn",false]"##, Some(did_147()), ) } fn did_201() -> ExplicitDefId { deserialize( r##"["core",[[{"TypeNs":"ops"},0],[{"TypeNs":"function"},0],[{"TypeNs":"Fn"},0]],"Trait",false]"##, Some(did_151()), ) } fn did_202() -> ExplicitDefId { deserialize( r##"["hax_lib",[[{"TypeNs":"prop"},0],[{"ValueNs":"exists"},0]],"Fn",false]"##, Some(did_147()), ) } fn did_203() -> ExplicitDefId { deserialize( r##"["hax_lib",[[{"TypeNs":"prop"},0],[{"ValueNs":"forall"},0]],"Fn",false]"##, Some(did_147()), ) } fn did_204() -> ExplicitDefId { deserialize( r##"["hax_lib",[[{"TypeNs":"prop"},0],[{"TypeNs":"ToProp"},0]],"Trait",false]"##, Some(did_147()), ) } fn did_205() -> ExplicitDefId { deserialize( r##"["hax_lib",[[{"TypeNs":"prop"},0],[{"TypeNs":"ToProp"},0],[{"ValueNs":"to_prop"},0]],"AssocFn",false]"##, Some(did_204()), ) } fn did_206() -> ExplicitDefId { deserialize( r##"["hax_lib",[[{"TypeNs":"prop"},0],["Impl",2]],{"Impl":{"of_trait":true}},false]"##, Some(did_147()), ) } fn did_207() -> ExplicitDefId { deserialize( r##"["hax_lib",[[{"TypeNs":"prop"},0],["Impl",0]],{"Impl":{"of_trait":false}},false]"##, Some(did_147()), ) } fn did_208() -> ExplicitDefId { deserialize( r##"["hax_lib",[[{"TypeNs":"prop"},0],["Impl",0],[{"ValueNs":"implies"},0]],"AssocFn",false]"##, Some(did_207()), ) } fn did_209() -> ExplicitDefId { deserialize( r##"["hax_lib",[[{"TypeNs":"prop"},0],["Impl",0],[{"ValueNs":"ne"},0]],"AssocFn",false]"##, Some(did_207()), ) } fn did_210() -> ExplicitDefId { deserialize( r##"["hax_lib",[[{"TypeNs":"prop"},0],["Impl",0],[{"ValueNs":"eq"},0]],"AssocFn",false]"##, Some(did_207()), ) } fn did_211() -> ExplicitDefId { deserialize( r##"["hax_lib",[[{"TypeNs":"prop"},0],["Impl",0],[{"ValueNs":"not"},0]],"AssocFn",false]"##, Some(did_207()), ) } fn did_212() -> ExplicitDefId { deserialize( r##"["hax_lib",[[{"TypeNs":"prop"},0],["Impl",0],[{"ValueNs":"or"},0]],"AssocFn",false]"##, Some(did_207()), ) } fn did_213() -> ExplicitDefId { deserialize( r##"["hax_lib",[[{"TypeNs":"prop"},0],["Impl",0],[{"ValueNs":"and"},0]],"AssocFn",false]"##, Some(did_207()), ) } fn did_214() -> ExplicitDefId { deserialize( r##"["hax_lib",[[{"TypeNs":"prop"},0],["Impl",0],[{"ValueNs":"from_bool"},0]],"AssocFn",false]"##, Some(did_207()), ) } fn did_215() -> ExplicitDefId { deserialize( r##"["hax_lib",[[{"TypeNs":"prop"},0],[{"TypeNs":"constructors"},0]],"Mod",false]"##, Some(did_147()), ) } fn did_216() -> ExplicitDefId { deserialize( r##"["hax_lib",[[{"TypeNs":"prop"},0],[{"TypeNs":"constructors"},0],[{"ValueNs":"exists"},0]],"Fn",false]"##, Some(did_215()), ) } fn did_217() -> ExplicitDefId { deserialize( r##"["hax_lib",[[{"TypeNs":"prop"},0],[{"TypeNs":"constructors"},0],[{"ValueNs":"forall"},0]],"Fn",false]"##, Some(did_215()), ) } fn did_218() -> ExplicitDefId { deserialize( r##"["hax_lib",[[{"TypeNs":"prop"},0],[{"TypeNs":"constructors"},0],[{"ValueNs":"implies"},0]],"Fn",false]"##, Some(did_215()), ) } fn did_219() -> ExplicitDefId { deserialize( r##"["hax_lib",[[{"TypeNs":"prop"},0],[{"TypeNs":"constructors"},0],[{"ValueNs":"ne"},0]],"Fn",false]"##, Some(did_215()), ) } fn did_220() -> ExplicitDefId { deserialize( r##"["hax_lib",[[{"TypeNs":"prop"},0],[{"TypeNs":"constructors"},0],[{"ValueNs":"eq"},0]],"Fn",false]"##, Some(did_215()), ) } fn did_221() -> ExplicitDefId { deserialize( r##"["hax_lib",[[{"TypeNs":"prop"},0],[{"TypeNs":"constructors"},0],[{"ValueNs":"not"},0]],"Fn",false]"##, Some(did_215()), ) } fn did_222() -> ExplicitDefId { deserialize( r##"["hax_lib",[[{"TypeNs":"prop"},0],[{"TypeNs":"constructors"},0],[{"ValueNs":"or"},0]],"Fn",false]"##, Some(did_215()), ) } fn did_223() -> ExplicitDefId { deserialize( r##"["hax_lib",[[{"TypeNs":"prop"},0],[{"TypeNs":"constructors"},0],[{"ValueNs":"and"},0]],"Fn",false]"##, Some(did_215()), ) } fn did_224() -> ExplicitDefId { deserialize( r##"["hax_lib",[[{"TypeNs":"prop"},0],[{"TypeNs":"constructors"},0],[{"ValueNs":"from_bool"},0]],"Fn",false]"##, Some(did_215()), ) } fn did_225() -> ExplicitDefId { deserialize( r##"["core",[[{"TypeNs":"slice"},0],[{"TypeNs":"iter"},0]],"Mod",false]"##, Some(did_46()), ) } fn did_226() -> ExplicitDefId { deserialize( r##"["core",[[{"TypeNs":"slice"},0],[{"TypeNs":"iter"},0],[{"TypeNs":"Iter"},0]],"Struct",false]"##, Some(did_225()), ) } fn did_227() -> ExplicitDefId { deserialize( r##"["core",[[{"TypeNs":"slice"},0],["Impl",0],[{"ValueNs":"iter"},0]],"AssocFn",false]"##, Some(did_47()), ) } fn did_228() -> ExplicitDefId { deserialize( r##"["core",[[{"TypeNs":"slice"},0],[{"TypeNs":"iter"},0],[{"TypeNs":"ChunksExact"},0]],"Struct",false]"##, Some(did_225()), ) } fn did_229() -> ExplicitDefId { deserialize( r##"["core",[[{"TypeNs":"slice"},0],["Impl",0],[{"ValueNs":"chunks_exact"},0]],"AssocFn",false]"##, Some(did_47()), ) } fn did_230() -> ExplicitDefId { deserialize( r##"["core",[[{"TypeNs":"iter"},0],[{"TypeNs":"adapters"},0]],"Mod",false]"##, Some(did_139()), ) } fn did_231() -> ExplicitDefId { deserialize( r##"["core",[[{"TypeNs":"iter"},0],[{"TypeNs":"adapters"},0],[{"TypeNs":"enumerate"},0]],"Mod",false]"##, Some(did_230()), ) } fn did_232() -> ExplicitDefId { deserialize( r##"["core",[[{"TypeNs":"iter"},0],[{"TypeNs":"adapters"},0],[{"TypeNs":"enumerate"},0],[{"TypeNs":"Enumerate"},0]],"Struct",false]"##, Some(did_231()), ) } fn did_233() -> ExplicitDefId { deserialize( r##"["core",[[{"TypeNs":"iter"},0],[{"TypeNs":"traits"},0],[{"TypeNs":"iterator"},0],[{"TypeNs":"Iterator"},0],[{"ValueNs":"enumerate"},0]],"AssocFn",false]"##, Some(did_191()), ) } fn did_234() -> ExplicitDefId { deserialize( r##"["core",[[{"TypeNs":"iter"},0],[{"TypeNs":"adapters"},0],[{"TypeNs":"step_by"},0]],"Mod",false]"##, Some(did_230()), ) } fn did_235() -> ExplicitDefId { deserialize( r##"["core",[[{"TypeNs":"iter"},0],[{"TypeNs":"adapters"},0],[{"TypeNs":"step_by"},0],[{"TypeNs":"StepBy"},0]],"Struct",false]"##, Some(did_234()), ) } fn did_236() -> ExplicitDefId { deserialize( r##"["core",[[{"TypeNs":"iter"},0],[{"TypeNs":"traits"},0],[{"TypeNs":"iterator"},0],[{"TypeNs":"Iterator"},0],[{"ValueNs":"step_by"},0]],"AssocFn",false]"##, Some(did_191()), ) } fn did_237() -> ExplicitDefId { deserialize( r##"["core",[[{"TypeNs":"ops"},0],[{"TypeNs":"try_trait"},0],[{"TypeNs":"Try"},0]],"Trait",false]"##, Some(did_184()), ) } fn did_238() -> ExplicitDefId { deserialize( r##"["core",[[{"TypeNs":"ops"},0],[{"TypeNs":"try_trait"},0],[{"TypeNs":"Try"},0],[{"ValueNs":"branch"},0]],"AssocFn",false]"##, Some(did_237()), ) } fn did_239() -> ExplicitDefId { deserialize( r##"["core",[[{"TypeNs":"result"},0],["Impl",27]],{"Impl":{"of_trait":true}},false]"##, Some(did_76()), ) } fn did_240() -> ExplicitDefId { deserialize( r##"["hax_lib",[[{"TypeNs":"RefineAs"},0]],"Trait",false]"##, Some(did_100()), ) } fn did_241() -> ExplicitDefId { deserialize( r##"["hax_lib",[[{"TypeNs":"RefineAs"},0],[{"ValueNs":"into_checked"},0]],"AssocFn",false]"##, Some(did_240()), ) } fn did_242() -> ExplicitDefId { deserialize( r##"["hax_lib",[[{"TypeNs":"Refinement"},0]],"Trait",false]"##, Some(did_100()), ) } fn did_243() -> ExplicitDefId { deserialize( r##"["hax_lib",[[{"TypeNs":"Refinement"},0],[{"ValueNs":"get"},0]],"AssocFn",false]"##, Some(did_242()), ) } fn did_244() -> ExplicitDefId { deserialize( r##"["hax_lib",[[{"TypeNs":"Refinement"},0],[{"TypeNs":"InnerType"},0]],"AssocTy",false]"##, Some(did_242()), ) } fn did_245() -> ExplicitDefId { deserialize( r##"["hax_lib",[[{"TypeNs":"Refinement"},0],[{"ValueNs":"new"},0]],"AssocFn",false]"##, Some(did_242()), ) } fn did_246() -> ExplicitDefId { deserialize( r##"["hax_lib",[[{"TypeNs":"Refinement"},0],[{"ValueNs":"get_mut"},0]],"AssocFn",false]"##, Some(did_242()), ) } fn did_247() -> ExplicitDefId { deserialize( r##"["core",[[{"TypeNs":"ops"},0],[{"TypeNs":"bit"},0],[{"TypeNs":"BitAnd"},0]],"Trait",false]"##, Some(did_158()), ) } fn did_248() -> ExplicitDefId { deserialize( r##"["core",[[{"TypeNs":"ops"},0],[{"TypeNs":"bit"},0],[{"TypeNs":"BitAnd"},0],[{"ValueNs":"bitand"},0]],"AssocFn",false]"##, Some(did_247()), ) } fn did_249() -> ExplicitDefId { deserialize( r##"["core",[[{"TypeNs":"ops"},0],[{"TypeNs":"bit"},0],[{"TypeNs":"BitXor"},0]],"Trait",false]"##, Some(did_158()), ) } fn did_250() -> ExplicitDefId { deserialize( r##"["core",[[{"TypeNs":"ops"},0],[{"TypeNs":"bit"},0],[{"TypeNs":"BitXor"},0],[{"ValueNs":"bitxor"},0]],"AssocFn",false]"##, Some(did_249()), ) } fn did_251() -> ExplicitDefId { deserialize( r##"["core",[[{"TypeNs":"ops"},0],[{"TypeNs":"arith"},0],[{"TypeNs":"Div"},0]],"Trait",false]"##, Some(did_193()), ) } fn did_252() -> ExplicitDefId { deserialize( r##"["core",[[{"TypeNs":"ops"},0],[{"TypeNs":"arith"},0],[{"TypeNs":"Div"},0],[{"ValueNs":"div"},0]],"AssocFn",false]"##, Some(did_251()), ) } fn did_253() -> ExplicitDefId { deserialize( r##"["core",[[{"TypeNs":"ops"},0],[{"TypeNs":"arith"},0],[{"TypeNs":"Mul"},0]],"Trait",false]"##, Some(did_193()), ) } fn did_254() -> ExplicitDefId { deserialize( r##"["core",[[{"TypeNs":"ops"},0],[{"TypeNs":"arith"},0],[{"TypeNs":"Mul"},0],[{"ValueNs":"mul"},0]],"AssocFn",false]"##, Some(did_253()), ) } fn did_255() -> ExplicitDefId { deserialize( r##"["core",[[{"TypeNs":"ops"},0],[{"TypeNs":"arith"},0],[{"TypeNs":"Sub"},0]],"Trait",false]"##, Some(did_193()), ) } fn did_256() -> ExplicitDefId { deserialize( r##"["core",[[{"TypeNs":"ops"},0],[{"TypeNs":"arith"},0],[{"TypeNs":"Sub"},0],[{"ValueNs":"sub"},0]],"AssocFn",false]"##, Some(did_255()), ) } fn did_257() -> ExplicitDefId { deserialize( r##"["core",[[{"TypeNs":"ops"},0],[{"TypeNs":"arith"},0],[{"TypeNs":"Neg"},0]],"Trait",false]"##, Some(did_193()), ) } fn did_258() -> ExplicitDefId { deserialize( r##"["core",[[{"TypeNs":"ops"},0],[{"TypeNs":"arith"},0],[{"TypeNs":"Neg"},0],[{"ValueNs":"neg"},0]],"AssocFn",false]"##, Some(did_257()), ) } fn did_259() -> ExplicitDefId { deserialize( r##"["core",[[{"TypeNs":"ops"},0],[{"TypeNs":"arith"},0],[{"TypeNs":"Rem"},0]],"Trait",false]"##, Some(did_193()), ) } fn did_260() -> ExplicitDefId { deserialize( r##"["core",[[{"TypeNs":"ops"},0],[{"TypeNs":"arith"},0],[{"TypeNs":"Rem"},0],[{"ValueNs":"rem"},0]],"AssocFn",false]"##, Some(did_259()), ) } fn did_261() -> ExplicitDefId { deserialize( r##"["core",[[{"TypeNs":"ops"},0],[{"TypeNs":"bit"},0],[{"TypeNs":"Shl"},0]],"Trait",false]"##, Some(did_158()), ) } fn did_262() -> ExplicitDefId { deserialize( r##"["core",[[{"TypeNs":"ops"},0],[{"TypeNs":"bit"},0],[{"TypeNs":"Shl"},0],[{"ValueNs":"shl"},0]],"AssocFn",false]"##, Some(did_261()), ) } fn did_263() -> ExplicitDefId { deserialize( r##"["core",[[{"TypeNs":"ops"},0],[{"TypeNs":"bit"},0],[{"TypeNs":"Shr"},0]],"Trait",false]"##, Some(did_158()), ) } fn did_264() -> ExplicitDefId { deserialize( r##"["core",[[{"TypeNs":"ops"},0],[{"TypeNs":"bit"},0],[{"TypeNs":"Shr"},0],[{"ValueNs":"shr"},0]],"AssocFn",false]"##, Some(did_263()), ) } fn did_265() -> ExplicitDefId { deserialize( r##"["core",[[{"TypeNs":"ops"},0],[{"TypeNs":"bit"},0],[{"TypeNs":"BitOr"},0]],"Trait",false]"##, Some(did_158()), ) } fn did_266() -> ExplicitDefId { deserialize( r##"["core",[[{"TypeNs":"ops"},0],[{"TypeNs":"bit"},0],[{"TypeNs":"BitOr"},0],[{"ValueNs":"bitor"},0]],"AssocFn",false]"##, Some(did_265()), ) } fn did_267() -> ExplicitDefId { deserialize( r##"["core",[[{"TypeNs":"cmp"},0],[{"TypeNs":"PartialOrd"},0]],"Trait",false]"##, Some(did_155()), ) } fn did_268() -> ExplicitDefId { deserialize( r##"["core",[[{"TypeNs":"cmp"},0],[{"TypeNs":"PartialOrd"},0],[{"ValueNs":"lt"},0]],"AssocFn",false]"##, Some(did_267()), ) } fn did_269() -> ExplicitDefId { deserialize( r##"["core",[[{"TypeNs":"cmp"},0],[{"TypeNs":"PartialOrd"},0],[{"ValueNs":"gt"},0]],"AssocFn",false]"##, Some(did_267()), ) } fn did_270() -> ExplicitDefId { deserialize( r##"["rust_primitives",[[{"TypeNs":"hax"},0],[{"ValueNs":"logical_op_and"},0]],"Fn",false]"##, Some(did_1()), ) } fn did_271() -> ExplicitDefId { deserialize( r##"["core",[[{"TypeNs":"cmp"},0],[{"TypeNs":"PartialOrd"},0],[{"ValueNs":"le"},0]],"AssocFn",false]"##, Some(did_267()), ) } fn did_272() -> ExplicitDefId { deserialize( r##"["core",[[{"TypeNs":"cmp"},0],[{"TypeNs":"PartialOrd"},0],[{"ValueNs":"ge"},0]],"AssocFn",false]"##, Some(did_267()), ) } fn did_273() -> ExplicitDefId { deserialize( r##"["core",[[{"TypeNs":"cmp"},0],[{"TypeNs":"PartialEq"},0],[{"ValueNs":"ne"},0]],"AssocFn",false]"##, Some(did_156()), ) } fn did_274() -> ExplicitDefId { deserialize( r##"["core",[[{"TypeNs":"marker"},0]],"Mod",false]"##, Some(did_7()), ) } fn did_275() -> ExplicitDefId { deserialize( r##"["core",[[{"TypeNs":"marker"},0],[{"TypeNs":"Copy"},0]],"Trait",false]"##, Some(did_274()), ) } fn did_276() -> ExplicitDefId { deserialize( r##"["core",[[{"TypeNs":"ops"},0],[{"TypeNs":"try_trait"},0],[{"TypeNs":"Try"},0],[{"TypeNs":"Residual"},0]],"AssocTy",false]"##, Some(did_237()), ) } fn did_277() -> ExplicitDefId { deserialize( r##"["core",[[{"TypeNs":"ops"},0],[{"TypeNs":"try_trait"},0],[{"TypeNs":"Try"},0],[{"ValueNs":"from_output"},0]],"AssocFn",false]"##, Some(did_237()), ) } fn did_278() -> ExplicitDefId { deserialize( r##"["core",[[{"TypeNs":"ops"},0],[{"TypeNs":"deref"},0],[{"TypeNs":"Deref"},0],[{"TypeNs":"Target"},0]],"AssocTy",false]"##, Some(did_24()), ) } fn did_279() -> ExplicitDefId { deserialize( r##"["core",[[{"TypeNs":"ops"},0],[{"TypeNs":"deref"},0],[{"TypeNs":"DerefMut"},0]],"Trait",false]"##, Some(did_23()), ) } fn did_280() -> ExplicitDefId { deserialize( r##"["core",[[{"TypeNs":"ops"},0],[{"TypeNs":"deref"},0],[{"TypeNs":"DerefMut"},0],[{"ValueNs":"deref_mut"},0]],"AssocFn",false]"##, Some(did_279()), ) } fn did_281() -> ExplicitDefId { deserialize( r##"["rust_primitives",[[{"TypeNs":"u128"},0]],"Mod",false]"##, Some(did_0()), ) } fn did_282() -> ExplicitDefId { deserialize( r##"["rust_primitives",[[{"TypeNs":"u128"},0],[{"ValueNs":"rem"},0]],"Fn",false]"##, Some(did_281()), ) } fn did_283() -> ExplicitDefId { deserialize( r##"["rust_primitives",[[{"TypeNs":"u16"},0]],"Mod",false]"##, Some(did_0()), ) } fn did_284() -> ExplicitDefId { deserialize( r##"["rust_primitives",[[{"TypeNs":"u16"},0],[{"ValueNs":"neg"},0]],"Fn",false]"##, Some(did_283()), ) } fn did_285() -> ExplicitDefId { deserialize( r##"["rust_primitives",[[{"TypeNs":"u64"},0]],"Mod",false]"##, Some(did_0()), ) } fn did_286() -> ExplicitDefId { deserialize( r##"["rust_primitives",[[{"TypeNs":"u64"},0],[{"ValueNs":"mul"},0]],"Fn",false]"##, Some(did_285()), ) } fn did_287() -> ExplicitDefId { deserialize( r##"["rust_primitives",[[{"TypeNs":"isize"},0]],"Mod",false]"##, Some(did_0()), ) } fn did_288() -> ExplicitDefId { deserialize( r##"["rust_primitives",[[{"TypeNs":"isize"},0],[{"ValueNs":"lt"},0]],"Fn",false]"##, Some(did_287()), ) } fn did_289() -> ExplicitDefId { deserialize( r##"["rust_primitives",[[{"TypeNs":"hax"},0],[{"TypeNs":"folds"},0]],"Mod",false]"##, Some(did_1()), ) } fn did_290() -> ExplicitDefId { deserialize( r##"["rust_primitives",[[{"TypeNs":"hax"},0],[{"TypeNs":"folds"},0],[{"ValueNs":"fold_range_step_by"},0]],"Fn",false]"##, Some(did_289()), ) } fn did_291() -> ExplicitDefId { deserialize( r##"["rust_primitives",[[{"ValueNs":"dummy_hax_concrete_ident_wrapper"},0]],"Fn",false]"##, Some(did_0()), ) } fn did_292() -> ExplicitDefId { deserialize( r##"["rust_primitives",[[{"ValueNs":"dummy_hax_concrete_ident_wrapper"},0],[{"ValueNs":"_"},1]],"Const",false]"##, Some(did_291()), ) } fn did_293() -> ExplicitDefId { deserialize( r##"["rust_primitives",[[{"TypeNs":"crypto_abstractions"},0]],"Mod",false]"##, Some(did_0()), ) } fn did_294() -> ExplicitDefId { deserialize( r##"["rust_primitives",[[{"ValueNs":"dummy_hax_concrete_ident_wrapper"},0],[{"ValueNs":"dummy"},0]],"Fn",false]"##, Some(did_291()), ) } fn did_295() -> ExplicitDefId { deserialize( r##"["rust_primitives",[[{"TypeNs":"u8"},0]],"Mod",false]"##, Some(did_0()), ) } fn did_296() -> ExplicitDefId { deserialize( r##"["rust_primitives",[[{"TypeNs":"u8"},0],[{"ValueNs":"shr"},0]],"Fn",false]"##, Some(did_295()), ) } fn did_297() -> ExplicitDefId { deserialize( r##"["rust_primitives",[[{"TypeNs":"u32"},0]],"Mod",false]"##, Some(did_0()), ) } fn did_298() -> ExplicitDefId { deserialize( r##"["rust_primitives",[[{"TypeNs":"u32"},0],[{"ValueNs":"rem"},0]],"Fn",false]"##, Some(did_297()), ) } fn did_299() -> ExplicitDefId { deserialize( r##"["rust_primitives",[[{"TypeNs":"u16"},0],[{"ValueNs":"bit_and"},0]],"Fn",false]"##, Some(did_283()), ) } fn did_300() -> ExplicitDefId { deserialize( r##"["rust_primitives",[[{"ValueNs":"dummy_hax_concrete_ident_wrapper"},0],[{"ValueNs":"_"},0]],"Const",false]"##, Some(did_291()), ) } fn did_301() -> ExplicitDefId { deserialize( r##"["rust_primitives",[[{"TypeNs":"u64"},0],[{"ValueNs":"gt"},0]],"Fn",false]"##, Some(did_285()), ) } fn did_302() -> ExplicitDefId { deserialize( r##"["rust_primitives",[[{"TypeNs":"hax"},0],[{"TypeNs":"machine_int"},0]],"Mod",false]"##, Some(did_1()), ) } fn did_303() -> ExplicitDefId { deserialize( r##"["rust_primitives",[[{"TypeNs":"hax"},0],[{"TypeNs":"machine_int"},0],[{"ValueNs":"ge"},0]],"Fn",false]"##, Some(did_302()), ) } fn did_304() -> ExplicitDefId { deserialize( r##"["rust_primitives",[[{"TypeNs":"hax"},0],[{"TypeNs":"folds"},0],[{"ValueNs":"fold_range_return"},0]],"Fn",false]"##, Some(did_289()), ) } fn did_305() -> ExplicitDefId { deserialize( r##"["rust_primitives",[[{"TypeNs":"crypto_abstractions"},0],[{"ValueNs":"crypto_abstractions"},0]],"Fn",false]"##, Some(did_293()), ) } fn did_306() -> ExplicitDefId { deserialize( r##"["rust_primitives",[[{"TypeNs":"u64"},0],[{"ValueNs":"lt"},0]],"Fn",false]"##, Some(did_285()), ) } fn did_307() -> ExplicitDefId { deserialize( r##"["rust_primitives",[[{"TypeNs":"u32"},0],[{"ValueNs":"ne"},0]],"Fn",false]"##, Some(did_297()), ) } fn did_308() -> ExplicitDefId { deserialize( r##"["rust_primitives",[[{"TypeNs":"i32"},0]],"Mod",false]"##, Some(did_0()), ) } fn did_309() -> ExplicitDefId { deserialize( r##"["rust_primitives",[[{"TypeNs":"i32"},0],[{"ValueNs":"neg"},0]],"Fn",false]"##, Some(did_308()), ) } fn did_310() -> ExplicitDefId { deserialize( r##"["rust_primitives",[[{"TypeNs":"hax"},0],[{"TypeNs":"monomorphized_update_at"},0]],"Mod",false]"##, Some(did_1()), ) } fn did_311() -> ExplicitDefId { deserialize( r##"["rust_primitives",[[{"TypeNs":"hax"},0],[{"TypeNs":"monomorphized_update_at"},0],[{"ValueNs":"update_at_usize"},0]],"Fn",false]"##, Some(did_310()), ) } fn did_312() -> ExplicitDefId { deserialize( r##"["rust_primitives",[[{"ValueNs":"dummy_hax_concrete_ident_wrapper"},0],[{"ValueNs":"_"},1],[{"ValueNs":"f"},0]],"Fn",false]"##, Some(did_292()), ) } fn did_313() -> ExplicitDefId { deserialize( r##"["rust_primitives",[[{"TypeNs":"u16"},0],[{"ValueNs":"mul"},0]],"Fn",false]"##, Some(did_283()), ) } fn did_314() -> ExplicitDefId { deserialize( r##"["rust_primitives",[[{"TypeNs":"u32"},0],[{"ValueNs":"neg"},0]],"Fn",false]"##, Some(did_297()), ) } fn did_315() -> ExplicitDefId { deserialize( r##"["rust_primitives",[[{"TypeNs":"i16"},0]],"Mod",false]"##, Some(did_0()), ) } fn did_316() -> ExplicitDefId { deserialize( r##"["rust_primitives",[[{"TypeNs":"i16"},0],[{"ValueNs":"shl"},0]],"Fn",false]"##, Some(did_315()), ) } fn did_317() -> ExplicitDefId { deserialize( r##"["rust_primitives",[[{"TypeNs":"isize"},0],[{"ValueNs":"shl"},0]],"Fn",false]"##, Some(did_287()), ) } fn did_318() -> ExplicitDefId { deserialize( r##"["rust_primitives",[[{"TypeNs":"hax"},0],[{"TypeNs":"monomorphized_update_at"},0],[{"ValueNs":"update_at_range_from"},0]],"Fn",false]"##, Some(did_310()), ) } fn did_319() -> ExplicitDefId { deserialize( r##"["rust_primitives",[[{"TypeNs":"usize"},0]],"Mod",false]"##, Some(did_0()), ) } fn did_320() -> ExplicitDefId { deserialize( r##"["rust_primitives",[[{"TypeNs":"hax"},0],[{"TypeNs":"control_flow_monad"},0]],"Mod",false]"##, Some(did_1()), ) } fn did_321() -> ExplicitDefId { deserialize( r##"["rust_primitives",[[{"TypeNs":"hax"},0],[{"TypeNs":"control_flow_monad"},0],[{"TypeNs":"moption"},0]],"Mod",false]"##, Some(did_320()), ) } fn did_322() -> ExplicitDefId { deserialize( r##"["rust_primitives",[[{"TypeNs":"hax"},0],[{"TypeNs":"control_flow_monad"},0],[{"TypeNs":"moption"},0],[{"ValueNs":"run"},0]],"Fn",false]"##, Some(did_321()), ) } fn did_323() -> ExplicitDefId { deserialize( r##"["rust_primitives",[[{"TypeNs":"i32"},0],[{"ValueNs":"lt"},0]],"Fn",false]"##, Some(did_308()), ) } fn did_324() -> ExplicitDefId { deserialize( r##"["rust_primitives",[[{"TypeNs":"i8"},0]],"Mod",false]"##, Some(did_0()), ) } fn did_325() -> ExplicitDefId { deserialize( r##"["rust_primitives",[[{"TypeNs":"i8"},0],[{"ValueNs":"div"},0]],"Fn",false]"##, Some(did_324()), ) } fn did_326() -> ExplicitDefId { deserialize( r##"["rust_primitives",[[{"TypeNs":"u128"},0],[{"ValueNs":"shl"},0]],"Fn",false]"##, Some(did_281()), ) } fn did_327() -> ExplicitDefId { deserialize( r##"["rust_primitives",[[{"TypeNs":"hax"},0],[{"TypeNs":"machine_int"},0],[{"ValueNs":"not"},0]],"Fn",false]"##, Some(did_302()), ) } fn did_328() -> ExplicitDefId { deserialize( r##"["rust_primitives",[[{"TypeNs":"i64"},0]],"Mod",false]"##, Some(did_0()), ) } fn did_329() -> ExplicitDefId { deserialize( r##"["rust_primitives",[[{"TypeNs":"i64"},0],[{"ValueNs":"bit_xor"},0]],"Fn",false]"##, Some(did_328()), ) } fn did_330() -> ExplicitDefId { deserialize( r##"["rust_primitives",[[{"TypeNs":"u32"},0],[{"ValueNs":"eq"},0]],"Fn",false]"##, Some(did_297()), ) } fn did_331() -> ExplicitDefId { deserialize( r##"["rust_primitives",[[{"TypeNs":"i16"},0],[{"ValueNs":"bit_or"},0]],"Fn",false]"##, Some(did_315()), ) } fn did_332() -> ExplicitDefId { deserialize( r##"["rust_primitives",[[{"TypeNs":"i16"},0],[{"ValueNs":"ge"},0]],"Fn",false]"##, Some(did_315()), ) } fn did_333() -> ExplicitDefId { deserialize( r##"["rust_primitives",[[{"TypeNs":"i32"},0],[{"ValueNs":"le"},0]],"Fn",false]"##, Some(did_308()), ) } fn did_334() -> ExplicitDefId { deserialize( r##"["rust_primitives",[[{"TypeNs":"hax"},0],[{"TypeNs":"MutRef"},0]],"Enum",false]"##, Some(did_1()), ) } fn did_335() -> ExplicitDefId { deserialize( r##"["rust_primitives",[[{"TypeNs":"u16"},0],[{"ValueNs":"add"},0]],"Fn",false]"##, Some(did_283()), ) } fn did_336() -> ExplicitDefId { deserialize( r##"["rust_primitives",[[{"TypeNs":"i16"},0],[{"ValueNs":"add"},0]],"Fn",false]"##, Some(did_315()), ) } fn did_337() -> ExplicitDefId { deserialize( r##"["rust_primitives",[[{"TypeNs":"u32"},0],[{"ValueNs":"le"},0]],"Fn",false]"##, Some(did_297()), ) } fn did_338() -> ExplicitDefId { deserialize( r##"["rust_primitives",[[{"TypeNs":"u128"},0],[{"ValueNs":"le"},0]],"Fn",false]"##, Some(did_281()), ) } fn did_339() -> ExplicitDefId { deserialize( r##"["rust_primitives",[[{"TypeNs":"u32"},0],[{"ValueNs":"add"},0]],"Fn",false]"##, Some(did_297()), ) } fn did_340() -> ExplicitDefId { deserialize( r##"["rust_primitives",[[{"TypeNs":"i8"},0],[{"ValueNs":"bit_or"},0]],"Fn",false]"##, Some(did_324()), ) } fn did_341() -> ExplicitDefId { deserialize( r##"["rust_primitives",[[{"TypeNs":"usize"},0],[{"ValueNs":"ge"},0]],"Fn",false]"##, Some(did_319()), ) } fn did_342() -> ExplicitDefId { deserialize( r##"["rust_primitives",[[{"TypeNs":"i32"},0],[{"ValueNs":"rem"},0]],"Fn",false]"##, Some(did_308()), ) } fn did_343() -> ExplicitDefId { deserialize( r##"["rust_primitives",[[{"TypeNs":"u16"},0],[{"ValueNs":"ge"},0]],"Fn",false]"##, Some(did_283()), ) } fn did_344() -> ExplicitDefId { deserialize( r##"["rust_primitives",[[{"TypeNs":"isize"},0],[{"ValueNs":"mul"},0]],"Fn",false]"##, Some(did_287()), ) } fn did_345() -> ExplicitDefId { deserialize( r##"["rust_primitives",[[{"TypeNs":"hax"},0],[{"TypeNs":"int"},0]],"Mod",false]"##, Some(did_1()), ) } fn did_346() -> ExplicitDefId { deserialize( r##"["rust_primitives",[[{"TypeNs":"hax"},0],[{"TypeNs":"int"},0],[{"ValueNs":"sub"},0]],"Fn",false]"##, Some(did_345()), ) } fn did_347() -> ExplicitDefId { deserialize( r##"["rust_primitives",[[{"TypeNs":"i8"},0],[{"ValueNs":"gt"},0]],"Fn",false]"##, Some(did_324()), ) } fn did_348() -> ExplicitDefId { deserialize( r##"["rust_primitives",[[{"TypeNs":"hax"},0],[{"TypeNs":"monomorphized_update_at"},0],[{"ValueNs":"update_at_range"},0]],"Fn",false]"##, Some(did_310()), ) } fn did_349() -> ExplicitDefId { deserialize( r##"["rust_primitives",[[{"TypeNs":"u8"},0],[{"ValueNs":"add"},0]],"Fn",false]"##, Some(did_295()), ) } fn did_350() -> ExplicitDefId { deserialize( r##"["rust_primitives",[[{"TypeNs":"i32"},0],[{"ValueNs":"sub"},0]],"Fn",false]"##, Some(did_308()), ) } fn did_351() -> ExplicitDefId { deserialize( r##"["rust_primitives",[[{"TypeNs":"i8"},0],[{"ValueNs":"ne"},0]],"Fn",false]"##, Some(did_324()), ) } fn did_352() -> ExplicitDefId { deserialize( r##"["rust_primitives",[[{"TypeNs":"i128"},0]],"Mod",false]"##, Some(did_0()), ) } fn did_353() -> ExplicitDefId { deserialize( r##"["rust_primitives",[[{"TypeNs":"i128"},0],[{"ValueNs":"shl"},0]],"Fn",false]"##, Some(did_352()), ) } fn did_354() -> ExplicitDefId { deserialize( r##"["rust_primitives",[[{"TypeNs":"i128"},0],[{"ValueNs":"rem"},0]],"Fn",false]"##, Some(did_352()), ) } fn did_355() -> ExplicitDefId { deserialize( r##"["rust_primitives",[[{"TypeNs":"hax"},0],[{"TypeNs":"folds"},0],[{"ValueNs":"fold_enumerated_slice_cf"},0]],"Fn",false]"##, Some(did_289()), ) } fn did_356() -> ExplicitDefId { deserialize( r##"["rust_primitives",[[{"TypeNs":"i16"},0],[{"ValueNs":"neg"},0]],"Fn",false]"##, Some(did_315()), ) } fn did_357() -> ExplicitDefId { deserialize( r##"["rust_primitives",[[{"TypeNs":"i8"},0],[{"ValueNs":"shl"},0]],"Fn",false]"##, Some(did_324()), ) } fn did_358() -> ExplicitDefId { deserialize( r##"["rust_primitives",[[{"TypeNs":"i16"},0],[{"ValueNs":"shr"},0]],"Fn",false]"##, Some(did_315()), ) } fn did_359() -> ExplicitDefId { deserialize( r##"["rust_primitives",[[{"TypeNs":"u64"},0],[{"ValueNs":"div"},0]],"Fn",false]"##, Some(did_285()), ) } fn did_360() -> ExplicitDefId { deserialize( r##"["rust_primitives",[[{"TypeNs":"hax"},0],[{"TypeNs":"machine_int"},0],[{"ValueNs":"bitxor"},0]],"Fn",false]"##, Some(did_302()), ) } fn did_361() -> ExplicitDefId { deserialize( r##"["rust_primitives",[[{"TypeNs":"i32"},0],[{"ValueNs":"add"},0]],"Fn",false]"##, Some(did_308()), ) } fn did_362() -> ExplicitDefId { deserialize( r##"["rust_primitives",[[{"TypeNs":"u64"},0],[{"ValueNs":"bit_xor"},0]],"Fn",false]"##, Some(did_285()), ) } fn did_363() -> ExplicitDefId { deserialize( r##"["rust_primitives",[[{"TypeNs":"hax"},0],[{"TypeNs":"int"},0],[{"ValueNs":"le"},0]],"Fn",false]"##, Some(did_345()), ) } fn did_364() -> ExplicitDefId { deserialize( r##"["rust_primitives",[[{"TypeNs":"i128"},0],[{"ValueNs":"shr"},0]],"Fn",false]"##, Some(did_352()), ) } fn did_365() -> ExplicitDefId { deserialize( r##"["rust_primitives",[[{"TypeNs":"isize"},0],[{"ValueNs":"bit_or"},0]],"Fn",false]"##, Some(did_287()), ) } fn did_366() -> ExplicitDefId { deserialize( r##"["rust_primitives",[[{"TypeNs":"hax"},0],[{"TypeNs":"Failure"},0]],"Struct",false]"##, Some(did_1()), ) } fn did_367() -> ExplicitDefId { deserialize( r##"["rust_primitives",[[{"TypeNs":"hax"},0],[{"TypeNs":"folds"},0],[{"ValueNs":"fold_range"},0]],"Fn",false]"##, Some(did_289()), ) } fn did_368() -> ExplicitDefId { deserialize( r##"["rust_primitives",[[{"TypeNs":"i32"},0],[{"ValueNs":"shr"},0]],"Fn",false]"##, Some(did_308()), ) } fn did_369() -> ExplicitDefId { deserialize( r##"["rust_primitives",[[{"TypeNs":"u8"},0],[{"ValueNs":"div"},0]],"Fn",false]"##, Some(did_295()), ) } fn did_370() -> ExplicitDefId { deserialize( r##"["rust_primitives",[[{"TypeNs":"hax"},0],[{"ValueNs":"update_at"},0]],"Fn",false]"##, Some(did_1()), ) } fn did_371() -> ExplicitDefId { deserialize( r##"["rust_primitives",[[{"TypeNs":"hax"},0],[{"ValueNs":"array_of_list"},0]],"Fn",false]"##, Some(did_1()), ) } fn did_372() -> ExplicitDefId { deserialize( r##"["rust_primitives",[[{"TypeNs":"u8"},0],[{"ValueNs":"le"},0]],"Fn",false]"##, Some(did_295()), ) } fn did_373() -> ExplicitDefId { deserialize( r##"["rust_primitives",[[{"TypeNs":"hax"},0],[{"TypeNs":"int"},0],[{"ValueNs":"mul"},0]],"Fn",false]"##, Some(did_345()), ) } fn did_374() -> ExplicitDefId { deserialize( r##"["rust_primitives",[[{"TypeNs":"u64"},0],[{"ValueNs":"bit_or"},0]],"Fn",false]"##, Some(did_285()), ) } fn did_375() -> ExplicitDefId { deserialize( r##"["rust_primitives",[[{"TypeNs":"hax"},0],[{"TypeNs":"monomorphized_update_at"},0],[{"ValueNs":"update_at_range_full"},0]],"Fn",false]"##, Some(did_310()), ) } fn did_376() -> ExplicitDefId { deserialize( r##"["rust_primitives",[[{"TypeNs":"hax"},0],[{"TypeNs":"folds"},0],[{"ValueNs":"fold_enumerated_chunked_slice"},0]],"Fn",false]"##, Some(did_289()), ) } fn did_377() -> ExplicitDefId { deserialize( r##"["rust_primitives",[[{"TypeNs":"u8"},0],[{"ValueNs":"ne"},0]],"Fn",false]"##, Some(did_295()), ) } fn did_378() -> ExplicitDefId { deserialize( r##"["rust_primitives",[[{"TypeNs":"u32"},0],[{"ValueNs":"sub"},0]],"Fn",false]"##, Some(did_297()), ) } fn did_379() -> ExplicitDefId { deserialize( r##"["rust_primitives",[[{"TypeNs":"u8"},0],[{"ValueNs":"eq"},0]],"Fn",false]"##, Some(did_295()), ) } fn did_380() -> ExplicitDefId { deserialize( r##"["rust_primitives",[[{"TypeNs":"usize"},0],[{"ValueNs":"add"},0]],"Fn",false]"##, Some(did_319()), ) } fn did_381() -> ExplicitDefId { deserialize( r##"["rust_primitives",[[{"TypeNs":"hax"},0],[{"TypeNs":"int"},0],[{"ValueNs":"div"},0]],"Fn",false]"##, Some(did_345()), ) } fn did_382() -> ExplicitDefId { deserialize( r##"["rust_primitives",[[{"TypeNs":"hax"},0],[{"TypeNs":"folds"},0],[{"ValueNs":"fold_chunked_slice_cf"},0]],"Fn",false]"##, Some(did_289()), ) } fn did_383() -> ExplicitDefId { deserialize( r##"["rust_primitives",[[{"TypeNs":"hax"},0],[{"TypeNs":"machine_int"},0],[{"ValueNs":"eq"},0]],"Fn",false]"##, Some(did_302()), ) } fn did_384() -> ExplicitDefId { deserialize( r##"["rust_primitives",[[{"TypeNs":"u128"},0],[{"ValueNs":"mul"},0]],"Fn",false]"##, Some(did_281()), ) } fn did_385() -> ExplicitDefId { deserialize( r##"["rust_primitives",[[{"TypeNs":"i8"},0],[{"ValueNs":"neg"},0]],"Fn",false]"##, Some(did_324()), ) } fn did_386() -> ExplicitDefId { deserialize( r##"["rust_primitives",[[{"TypeNs":"i128"},0],[{"ValueNs":"add"},0]],"Fn",false]"##, Some(did_352()), ) } fn did_387() -> ExplicitDefId { deserialize( r##"["rust_primitives",[[{"TypeNs":"isize"},0],[{"ValueNs":"ne"},0]],"Fn",false]"##, Some(did_287()), ) } fn did_388() -> ExplicitDefId { deserialize( r##"["rust_primitives",[[{"TypeNs":"i16"},0],[{"ValueNs":"ne"},0]],"Fn",false]"##, Some(did_315()), ) } fn did_389() -> ExplicitDefId { deserialize( r##"["rust_primitives",[[{"ValueNs":"offset"},0]],"Fn",false]"##, Some(did_0()), ) } fn did_390() -> ExplicitDefId { deserialize( r##"["rust_primitives",[[{"TypeNs":"hax"},0],[{"TypeNs":"Tuple2"},0],[{"ValueNs":"0"},0]],"Field",false]"##, Some(did_643()), ) } fn did_391() -> ExplicitDefId { deserialize( r##"["rust_primitives",[[{"TypeNs":"u64"},0],[{"ValueNs":"ge"},0]],"Fn",false]"##, Some(did_285()), ) } fn did_392() -> ExplicitDefId { deserialize( r##"["rust_primitives",[[{"TypeNs":"u8"},0],[{"ValueNs":"ge"},0]],"Fn",false]"##, Some(did_295()), ) } fn did_393() -> ExplicitDefId { deserialize( r##"["rust_primitives",[[{"TypeNs":"i128"},0],[{"ValueNs":"sub"},0]],"Fn",false]"##, Some(did_352()), ) } fn did_394() -> ExplicitDefId { deserialize( r##"["rust_primitives",[[{"TypeNs":"u8"},0],[{"ValueNs":"sub"},0]],"Fn",false]"##, Some(did_295()), ) } fn did_395() -> ExplicitDefId { deserialize( r##"["rust_primitives",[[{"TypeNs":"u128"},0],[{"ValueNs":"shr"},0]],"Fn",false]"##, Some(did_281()), ) } fn did_396() -> ExplicitDefId { deserialize( r##"["rust_primitives",[[{"TypeNs":"usize"},0],[{"ValueNs":"neg"},0]],"Fn",false]"##, Some(did_319()), ) } fn did_397() -> ExplicitDefId { deserialize( r##"["rust_primitives",[[{"TypeNs":"i64"},0],[{"ValueNs":"neg"},0]],"Fn",false]"##, Some(did_328()), ) } fn did_398() -> ExplicitDefId { deserialize( r##"["rust_primitives",[[{"TypeNs":"hax"},0],[{"TypeNs":"Tuple2"},0],[{"ValueNs":"1"},0]],"Field",false]"##, Some(did_643()), ) } fn did_399() -> ExplicitDefId { deserialize( r##"["rust_primitives",[[{"TypeNs":"hax"},0],[{"TypeNs":"machine_int"},0],[{"ValueNs":"div"},0]],"Fn",false]"##, Some(did_302()), ) } fn did_400() -> ExplicitDefId { deserialize( r##"["rust_primitives",[[{"TypeNs":"u32"},0],[{"ValueNs":"gt"},0]],"Fn",false]"##, Some(did_297()), ) } fn did_401() -> ExplicitDefId { deserialize( r##"["rust_primitives",[[{"TypeNs":"i8"},0],[{"ValueNs":"sub"},0]],"Fn",false]"##, Some(did_324()), ) } fn did_402() -> ExplicitDefId { deserialize( r##"["rust_primitives",[[{"TypeNs":"u8"},0],[{"ValueNs":"mul"},0]],"Fn",false]"##, Some(did_295()), ) } fn did_403() -> ExplicitDefId { deserialize( r##"["rust_primitives",[[{"TypeNs":"u16"},0],[{"ValueNs":"gt"},0]],"Fn",false]"##, Some(did_283()), ) } fn did_404() -> ExplicitDefId { deserialize( r##"["rust_primitives",[[{"TypeNs":"usize"},0],[{"ValueNs":"bit_xor"},0]],"Fn",false]"##, Some(did_319()), ) } fn did_405() -> ExplicitDefId { deserialize( r##"["rust_primitives",[[{"TypeNs":"i8"},0],[{"ValueNs":"ge"},0]],"Fn",false]"##, Some(did_324()), ) } fn did_406() -> ExplicitDefId { deserialize( r##"["rust_primitives",[[{"TypeNs":"u128"},0],[{"ValueNs":"sub"},0]],"Fn",false]"##, Some(did_281()), ) } fn did_407() -> ExplicitDefId { deserialize( r##"["rust_primitives",[[{"TypeNs":"u8"},0],[{"ValueNs":"gt"},0]],"Fn",false]"##, Some(did_295()), ) } fn did_408() -> ExplicitDefId { deserialize( r##"["rust_primitives",[[{"TypeNs":"i16"},0],[{"ValueNs":"mul"},0]],"Fn",false]"##, Some(did_315()), ) } fn did_409() -> ExplicitDefId { deserialize( r##"["rust_primitives",[[{"TypeNs":"i128"},0],[{"ValueNs":"gt"},0]],"Fn",false]"##, Some(did_352()), ) } fn did_410() -> ExplicitDefId { deserialize( r##"["rust_primitives",[[{"TypeNs":"isize"},0],[{"ValueNs":"bit_and"},0]],"Fn",false]"##, Some(did_287()), ) } fn did_411() -> ExplicitDefId { deserialize( r##"["rust_primitives",[[{"TypeNs":"i8"},0],[{"ValueNs":"eq"},0]],"Fn",false]"##, Some(did_324()), ) } fn did_412() -> ExplicitDefId { deserialize( r##"["rust_primitives",[[{"TypeNs":"hax"},0],[{"TypeNs":"int"},0],[{"ValueNs":"ge"},0]],"Fn",false]"##, Some(did_345()), ) } fn did_413() -> ExplicitDefId { deserialize( r##"["rust_primitives",[[{"TypeNs":"hax"},0],[{"TypeNs":"folds"},0],[{"ValueNs":"fold_enumerated_slice"},0]],"Fn",false]"##, Some(did_289()), ) } fn did_414() -> ExplicitDefId { deserialize( r##"["rust_primitives",[[{"TypeNs":"i64"},0],[{"ValueNs":"ne"},0]],"Fn",false]"##, Some(did_328()), ) } fn did_415() -> ExplicitDefId { deserialize( r##"["rust_primitives",[[{"TypeNs":"usize"},0],[{"ValueNs":"shr"},0]],"Fn",false]"##, Some(did_319()), ) } fn did_416() -> ExplicitDefId { deserialize( r##"["rust_primitives",[[{"TypeNs":"isize"},0],[{"ValueNs":"div"},0]],"Fn",false]"##, Some(did_287()), ) } fn did_417() -> ExplicitDefId { deserialize( r##"["rust_primitives",[[{"TypeNs":"i8"},0],[{"ValueNs":"lt"},0]],"Fn",false]"##, Some(did_324()), ) } fn did_418() -> ExplicitDefId { deserialize( r##"["rust_primitives",[[{"TypeNs":"hax"},0],[{"TypeNs":"control_flow_monad"},0],[{"TypeNs":"mresult"},0]],"Mod",false]"##, Some(did_320()), ) } fn did_419() -> ExplicitDefId { deserialize( r##"["rust_primitives",[[{"TypeNs":"hax"},0],[{"TypeNs":"control_flow_monad"},0],[{"TypeNs":"mresult"},0],[{"ValueNs":"run"},0]],"Fn",false]"##, Some(did_418()), ) } fn did_420() -> ExplicitDefId { deserialize( r##"["rust_primitives",[[{"TypeNs":"hax"},0],[{"ValueNs":"repeat"},0]],"Fn",false]"##, Some(did_1()), ) } fn did_421() -> ExplicitDefId { deserialize( r##"["rust_primitives",[[{"TypeNs":"u8"},0],[{"ValueNs":"bit_or"},0]],"Fn",false]"##, Some(did_295()), ) } fn did_422() -> ExplicitDefId { deserialize( r##"["rust_primitives",[[{"TypeNs":"u16"},0],[{"ValueNs":"rem"},0]],"Fn",false]"##, Some(did_283()), ) } fn did_423() -> ExplicitDefId { deserialize( r##"["rust_primitives",[[{"TypeNs":"i32"},0],[{"ValueNs":"div"},0]],"Fn",false]"##, Some(did_308()), ) } fn did_424() -> ExplicitDefId { deserialize( r##"["rust_primitives",[[{"TypeNs":"hax"},0],[{"TypeNs":"int"},0],[{"ValueNs":"ne"},0]],"Fn",false]"##, Some(did_345()), ) } fn did_425() -> ExplicitDefId { deserialize( r##"["rust_primitives",[[{"TypeNs":"u64"},0],[{"ValueNs":"shl"},0]],"Fn",false]"##, Some(did_285()), ) } fn did_426() -> ExplicitDefId { deserialize( r##"["rust_primitives",[[{"TypeNs":"usize"},0],[{"ValueNs":"lt"},0]],"Fn",false]"##, Some(did_319()), ) } fn did_427() -> ExplicitDefId { deserialize( r##"["rust_primitives",[[{"TypeNs":"u128"},0],[{"ValueNs":"bit_and"},0]],"Fn",false]"##, Some(did_281()), ) } fn did_428() -> ExplicitDefId { deserialize( r##"["rust_primitives",[[{"TypeNs":"u16"},0],[{"ValueNs":"ne"},0]],"Fn",false]"##, Some(did_283()), ) } fn did_429() -> ExplicitDefId { deserialize( r##"["rust_primitives",[[{"TypeNs":"i8"},0],[{"ValueNs":"shr"},0]],"Fn",false]"##, Some(did_324()), ) } fn did_430() -> ExplicitDefId { deserialize( r##"["rust_primitives",[[{"ValueNs":"dummy_hax_concrete_ident_wrapper"},0],[{"ValueNs":"refinements"},0]],"Fn",false]"##, Some(did_291()), ) } fn did_431() -> ExplicitDefId { deserialize( r##"["rust_primitives",[[{"TypeNs":"u64"},0],[{"ValueNs":"sub"},0]],"Fn",false]"##, Some(did_285()), ) } fn did_432() -> ExplicitDefId { deserialize( r##"["rust_primitives",[[{"TypeNs":"hax"},0],[{"ValueNs":"while_loop"},0]],"Fn",false]"##, Some(did_1()), ) } fn did_433() -> ExplicitDefId { deserialize( r##"["rust_primitives",[[{"TypeNs":"i64"},0],[{"ValueNs":"rem"},0]],"Fn",false]"##, Some(did_328()), ) } fn did_434() -> ExplicitDefId { deserialize( r##"["rust_primitives",[[{"TypeNs":"u8"},0],[{"ValueNs":"rem"},0]],"Fn",false]"##, Some(did_295()), ) } fn did_435() -> ExplicitDefId { deserialize( r##"["rust_primitives",[[{"TypeNs":"i8"},0],[{"ValueNs":"mul"},0]],"Fn",false]"##, Some(did_324()), ) } fn did_436() -> ExplicitDefId { deserialize( r##"["rust_primitives",[[{"TypeNs":"i64"},0],[{"ValueNs":"div"},0]],"Fn",false]"##, Some(did_328()), ) } fn did_437() -> ExplicitDefId { deserialize( r##"["rust_primitives",[[{"TypeNs":"i64"},0],[{"ValueNs":"gt"},0]],"Fn",false]"##, Some(did_328()), ) } fn did_438() -> ExplicitDefId { deserialize( r##"["rust_primitives",[[{"TypeNs":"isize"},0],[{"ValueNs":"ge"},0]],"Fn",false]"##, Some(did_287()), ) } fn did_439() -> ExplicitDefId { deserialize( r##"["rust_primitives",[[{"TypeNs":"hax"},0],[{"TypeNs":"folds"},0],[{"ValueNs":"fold_range_cf"},0]],"Fn",false]"##, Some(did_289()), ) } fn did_440() -> ExplicitDefId { deserialize( r##"["rust_primitives",[[{"TypeNs":"hax"},0],[{"TypeNs":"folds"},0],[{"ValueNs":"fold_return"},0]],"Fn",false]"##, Some(did_289()), ) } fn did_441() -> ExplicitDefId { deserialize( r##"["rust_primitives",[[{"TypeNs":"usize"},0],[{"ValueNs":"sub"},0]],"Fn",false]"##, Some(did_319()), ) } fn did_442() -> ExplicitDefId { deserialize( r##"["rust_primitives",[[{"TypeNs":"usize"},0],[{"ValueNs":"shl"},0]],"Fn",false]"##, Some(did_319()), ) } fn did_443() -> ExplicitDefId { deserialize( r##"["rust_primitives",[[{"TypeNs":"usize"},0],[{"ValueNs":"rem"},0]],"Fn",false]"##, Some(did_319()), ) } fn did_444() -> ExplicitDefId { deserialize( r##"["rust_primitives",[[{"TypeNs":"u16"},0],[{"ValueNs":"lt"},0]],"Fn",false]"##, Some(did_283()), ) } fn did_445() -> ExplicitDefId { deserialize( r##"["rust_primitives",[[{"TypeNs":"u128"},0],[{"ValueNs":"gt"},0]],"Fn",false]"##, Some(did_281()), ) } fn did_446() -> ExplicitDefId { deserialize( r##"["rust_primitives",[[{"TypeNs":"i64"},0],[{"ValueNs":"lt"},0]],"Fn",false]"##, Some(did_328()), ) } fn did_447() -> ExplicitDefId { deserialize( r##"["rust_primitives",[[{"TypeNs":"hax"},0],[{"TypeNs":"folds"},0],[{"ValueNs":"fold_chunked_slice"},0]],"Fn",false]"##, Some(did_289()), ) } fn did_448() -> ExplicitDefId { deserialize( r##"["rust_primitives",[[{"TypeNs":"hax"},0],[{"TypeNs":"machine_int"},0],[{"ValueNs":"le"},0]],"Fn",false]"##, Some(did_302()), ) } fn did_449() -> ExplicitDefId { deserialize( r##"["rust_primitives",[[{"TypeNs":"u16"},0],[{"ValueNs":"shr"},0]],"Fn",false]"##, Some(did_283()), ) } fn did_450() -> ExplicitDefId { deserialize( r##"["rust_primitives",[[{"TypeNs":"u128"},0],[{"ValueNs":"ne"},0]],"Fn",false]"##, Some(did_281()), ) } fn did_451() -> ExplicitDefId { deserialize( r##"["rust_primitives",[[{"TypeNs":"hax"},0],[{"TypeNs":"int"},0],[{"ValueNs":"into_machine"},0]],"Fn",false]"##, Some(did_345()), ) } fn did_452() -> ExplicitDefId { deserialize( r##"["rust_primitives",[[{"TypeNs":"u64"},0],[{"ValueNs":"bit_and"},0]],"Fn",false]"##, Some(did_285()), ) } fn did_453() -> ExplicitDefId { deserialize( r##"["rust_primitives",[[{"TypeNs":"usize"},0],[{"ValueNs":"gt"},0]],"Fn",false]"##, Some(did_319()), ) } fn did_454() -> ExplicitDefId { deserialize( r##"["rust_primitives",[[{"TypeNs":"u32"},0],[{"ValueNs":"shl"},0]],"Fn",false]"##, Some(did_297()), ) } fn did_455() -> ExplicitDefId { deserialize( r##"["rust_primitives",[[{"TypeNs":"u8"},0],[{"ValueNs":"bit_xor"},0]],"Fn",false]"##, Some(did_295()), ) } fn did_456() -> ExplicitDefId { deserialize( r##"["rust_primitives",[[{"TypeNs":"u32"},0],[{"ValueNs":"bit_xor"},0]],"Fn",false]"##, Some(did_297()), ) } fn did_457() -> ExplicitDefId { deserialize( r##"["rust_primitives",[[{"TypeNs":"i32"},0],[{"ValueNs":"mul"},0]],"Fn",false]"##, Some(did_308()), ) } fn did_458() -> ExplicitDefId { deserialize( r##"["rust_primitives",[[{"TypeNs":"usize"},0],[{"ValueNs":"mul"},0]],"Fn",false]"##, Some(did_319()), ) } fn did_459() -> ExplicitDefId { deserialize( r##"["rust_primitives",[[{"TypeNs":"i64"},0],[{"ValueNs":"shl"},0]],"Fn",false]"##, Some(did_328()), ) } fn did_460() -> ExplicitDefId { deserialize( r##"["rust_primitives",[[{"TypeNs":"u32"},0],[{"ValueNs":"div"},0]],"Fn",false]"##, Some(did_297()), ) } fn did_461() -> ExplicitDefId { deserialize( r##"["rust_primitives",[[{"TypeNs":"u8"},0],[{"ValueNs":"neg"},0]],"Fn",false]"##, Some(did_295()), ) } fn did_462() -> ExplicitDefId { deserialize( r##"["rust_primitives",[[{"TypeNs":"i128"},0],[{"ValueNs":"div"},0]],"Fn",false]"##, Some(did_352()), ) } fn did_463() -> ExplicitDefId { deserialize( r##"["rust_primitives",[[{"TypeNs":"hax"},0],[{"TypeNs":"machine_int"},0],[{"ValueNs":"add"},0]],"Fn",false]"##, Some(did_302()), ) } fn did_464() -> ExplicitDefId { deserialize( r##"["rust_primitives",[[{"TypeNs":"i32"},0],[{"ValueNs":"bit_and"},0]],"Fn",false]"##, Some(did_308()), ) } fn did_465() -> ExplicitDefId { deserialize( r##"["rust_primitives",[[{"TypeNs":"hax"},0],[{"TypeNs":"folds"},0],[{"ValueNs":"fold_cf"},0]],"Fn",false]"##, Some(did_289()), ) } fn did_466() -> ExplicitDefId { deserialize( r##"["rust_primitives",[[{"TypeNs":"hax"},0],[{"TypeNs":"int"},0],[{"ValueNs":"gt"},0]],"Fn",false]"##, Some(did_345()), ) } fn did_467() -> ExplicitDefId { deserialize( r##"["rust_primitives",[[{"TypeNs":"u64"},0],[{"ValueNs":"le"},0]],"Fn",false]"##, Some(did_285()), ) } fn did_468() -> ExplicitDefId { deserialize( r##"["rust_primitives",[[{"TypeNs":"usize"},0],[{"ValueNs":"le"},0]],"Fn",false]"##, Some(did_319()), ) } fn did_469() -> ExplicitDefId { deserialize( r##"["rust_primitives",[[{"TypeNs":"u8"},0],[{"ValueNs":"shl"},0]],"Fn",false]"##, Some(did_295()), ) } fn did_470() -> ExplicitDefId { deserialize( r##"["rust_primitives",[[{"TypeNs":"hax"},0],[{"TypeNs":"control_flow_monad"},0],[{"TypeNs":"ControlFlowMonad"},0]],"Trait",false]"##, Some(did_320()), ) } fn did_471() -> ExplicitDefId { deserialize( r##"["rust_primitives",[[{"TypeNs":"hax"},0],[{"TypeNs":"control_flow_monad"},0],[{"TypeNs":"ControlFlowMonad"},0],[{"ValueNs":"lift"},0]],"AssocFn",false]"##, Some(did_470()), ) } fn did_472() -> ExplicitDefId { deserialize( r##"["rust_primitives",[[{"TypeNs":"isize"},0],[{"ValueNs":"sub"},0]],"Fn",false]"##, Some(did_287()), ) } fn did_473() -> ExplicitDefId { deserialize( r##"["rust_primitives",[[{"TypeNs":"hax"},0],[{"TypeNs":"machine_int"},0],[{"ValueNs":"ne"},0]],"Fn",false]"##, Some(did_302()), ) } fn did_474() -> ExplicitDefId { deserialize( r##"["rust_primitives",[[{"ValueNs":"dummy_hax_concrete_ident_wrapper"},0],[{"ValueNs":"iterator_functions"},0]],"Fn",false]"##, Some(did_291()), ) } fn did_475() -> ExplicitDefId { deserialize( r##"["rust_primitives",[[{"TypeNs":"u16"},0],[{"ValueNs":"bit_xor"},0]],"Fn",false]"##, Some(did_283()), ) } fn did_476() -> ExplicitDefId { deserialize( r##"["rust_primitives",[[{"TypeNs":"i32"},0],[{"ValueNs":"ne"},0]],"Fn",false]"##, Some(did_308()), ) } fn did_477() -> ExplicitDefId { deserialize( r##"["rust_primitives",[[{"TypeNs":"usize"},0],[{"ValueNs":"bit_and"},0]],"Fn",false]"##, Some(did_319()), ) } fn did_478() -> ExplicitDefId { deserialize( r##"["rust_primitives",[[{"TypeNs":"i16"},0],[{"ValueNs":"sub"},0]],"Fn",false]"##, Some(did_315()), ) } fn did_479() -> ExplicitDefId { deserialize( r##"["rust_primitives",[[{"TypeNs":"hax"},0],[{"TypeNs":"folds"},0],[{"ValueNs":"fold_chunked_slice_return"},0]],"Fn",false]"##, Some(did_289()), ) } fn did_480() -> ExplicitDefId { deserialize( r##"["rust_primitives",[[{"TypeNs":"i64"},0],[{"ValueNs":"shr"},0]],"Fn",false]"##, Some(did_328()), ) } fn did_481() -> ExplicitDefId { deserialize( r##"["rust_primitives",[[{"TypeNs":"u64"},0],[{"ValueNs":"neg"},0]],"Fn",false]"##, Some(did_285()), ) } fn did_482() -> ExplicitDefId { deserialize( r##"["rust_primitives",[[{"TypeNs":"hax"},0],[{"TypeNs":"int"},0],[{"ValueNs":"add"},0]],"Fn",false]"##, Some(did_345()), ) } fn did_483() -> ExplicitDefId { deserialize( r##"["rust_primitives",[[{"TypeNs":"hax"},0],[{"TypeNs":"int"},0],[{"ValueNs":"rem"},0]],"Fn",false]"##, Some(did_345()), ) } fn did_484() -> ExplicitDefId { deserialize( r##"["rust_primitives",[[{"TypeNs":"hax"},0],[{"TypeNs":"machine_int"},0],[{"ValueNs":"shr"},0]],"Fn",false]"##, Some(did_302()), ) } fn did_485() -> ExplicitDefId { deserialize( r##"["rust_primitives",[[{"TypeNs":"i64"},0],[{"ValueNs":"bit_or"},0]],"Fn",false]"##, Some(did_328()), ) } fn did_486() -> ExplicitDefId { deserialize( r##"["rust_primitives",[[{"TypeNs":"u32"},0],[{"ValueNs":"mul"},0]],"Fn",false]"##, Some(did_297()), ) } fn did_487() -> ExplicitDefId { deserialize( r##"["rust_primitives",[[{"TypeNs":"u64"},0],[{"ValueNs":"add"},0]],"Fn",false]"##, Some(did_285()), ) } fn did_488() -> ExplicitDefId { deserialize( r##"["rust_primitives",[[{"TypeNs":"hax"},0],[{"TypeNs":"machine_int"},0],[{"ValueNs":"rem"},0]],"Fn",false]"##, Some(did_302()), ) } fn did_489() -> ExplicitDefId { deserialize( r##"["rust_primitives",[[{"TypeNs":"i32"},0],[{"ValueNs":"bit_or"},0]],"Fn",false]"##, Some(did_308()), ) } fn did_490() -> ExplicitDefId { deserialize( r##"["rust_primitives",[[{"TypeNs":"hax"},0],[{"TypeNs":"int"},0],[{"ValueNs":"neg"},0]],"Fn",false]"##, Some(did_345()), ) } fn did_491() -> ExplicitDefId { deserialize( r##"["rust_primitives",[[{"TypeNs":"i64"},0],[{"ValueNs":"mul"},0]],"Fn",false]"##, Some(did_328()), ) } fn did_492() -> ExplicitDefId { deserialize( r##"["rust_primitives",[[{"TypeNs":"u64"},0],[{"ValueNs":"ne"},0]],"Fn",false]"##, Some(did_285()), ) } fn did_493() -> ExplicitDefId { deserialize( r##"["rust_primitives",[[{"TypeNs":"isize"},0],[{"ValueNs":"bit_xor"},0]],"Fn",false]"##, Some(did_287()), ) } fn did_494() -> ExplicitDefId { deserialize( r##"["rust_primitives",[[{"TypeNs":"hax"},0],[{"TypeNs":"folds"},0],[{"ValueNs":"fold_enumerated_chunked_slice_cf"},0]],"Fn",false]"##, Some(did_289()), ) } fn did_495() -> ExplicitDefId { deserialize( r##"["rust_primitives",[[{"TypeNs":"u32"},0],[{"ValueNs":"ge"},0]],"Fn",false]"##, Some(did_297()), ) } fn did_496() -> ExplicitDefId { deserialize( r##"["rust_primitives",[[{"TypeNs":"u128"},0],[{"ValueNs":"ge"},0]],"Fn",false]"##, Some(did_281()), ) } fn did_497() -> ExplicitDefId { deserialize( r##"["rust_primitives",[[{"TypeNs":"hax"},0],[{"TypeNs":"int"},0],[{"ValueNs":"from_machine"},0]],"Fn",false]"##, Some(did_345()), ) } fn did_498() -> ExplicitDefId { deserialize( r##"["rust_primitives",[[{"TypeNs":"i64"},0],[{"ValueNs":"add"},0]],"Fn",false]"##, Some(did_328()), ) } fn did_499() -> ExplicitDefId { deserialize( r##"["rust_primitives",[[{"TypeNs":"hax"},0],[{"TypeNs":"machine_int"},0],[{"ValueNs":"lt"},0]],"Fn",false]"##, Some(did_302()), ) } fn did_500() -> ExplicitDefId { deserialize( r##"["rust_primitives",[[{"TypeNs":"isize"},0],[{"ValueNs":"shr"},0]],"Fn",false]"##, Some(did_287()), ) } fn did_501() -> ExplicitDefId { deserialize( r##"["rust_primitives",[[{"TypeNs":"i32"},0],[{"ValueNs":"bit_xor"},0]],"Fn",false]"##, Some(did_308()), ) } fn did_502() -> ExplicitDefId { deserialize( r##"["rust_primitives",[[{"TypeNs":"hax"},0],[{"TypeNs":"machine_int"},0],[{"ValueNs":"shl"},0]],"Fn",false]"##, Some(did_302()), ) } fn did_503() -> ExplicitDefId { deserialize( r##"["rust_primitives",[[{"TypeNs":"u8"},0],[{"ValueNs":"lt"},0]],"Fn",false]"##, Some(did_295()), ) } fn did_504() -> ExplicitDefId { deserialize( r##"["rust_primitives",[[{"TypeNs":"hax"},0],[{"TypeNs":"machine_int"},0],[{"ValueNs":"gt"},0]],"Fn",false]"##, Some(did_302()), ) } fn did_505() -> ExplicitDefId { deserialize( r##"["rust_primitives",[[{"TypeNs":"u64"},0],[{"ValueNs":"shr"},0]],"Fn",false]"##, Some(did_285()), ) } fn did_506() -> ExplicitDefId { deserialize( r##"["rust_primitives",[[{"TypeNs":"u8"},0],[{"ValueNs":"bit_and"},0]],"Fn",false]"##, Some(did_295()), ) } fn did_507() -> ExplicitDefId { deserialize( r##"["rust_primitives",[[{"TypeNs":"isize"},0],[{"ValueNs":"add"},0]],"Fn",false]"##, Some(did_287()), ) } fn did_508() -> ExplicitDefId { deserialize( r##"["rust_primitives",[[{"TypeNs":"hax"},0],[{"TypeNs":"folds"},0],[{"ValueNs":"fold_range_step_by_return"},0]],"Fn",false]"##, Some(did_289()), ) } fn did_509() -> ExplicitDefId { deserialize( r##"["rust_primitives",[[{"TypeNs":"u128"},0],[{"ValueNs":"add"},0]],"Fn",false]"##, Some(did_281()), ) } fn did_510() -> ExplicitDefId { deserialize( r##"["rust_primitives",[[{"TypeNs":"i32"},0],[{"ValueNs":"eq"},0]],"Fn",false]"##, Some(did_308()), ) } fn did_511() -> ExplicitDefId { deserialize( r##"["rust_primitives",[[{"TypeNs":"hax"},0],[{"TypeNs":"folds"},0],[{"ValueNs":"fold_enumerated_slice_return"},0]],"Fn",false]"##, Some(did_289()), ) } fn did_512() -> ExplicitDefId { deserialize( r##"["rust_primitives",[[{"TypeNs":"hax"},0],[{"TypeNs":"control_flow_monad"},0],[{"TypeNs":"mexception"},0]],"Mod",false]"##, Some(did_320()), ) } fn did_513() -> ExplicitDefId { deserialize( r##"["rust_primitives",[[{"TypeNs":"hax"},0],[{"TypeNs":"int"},0],[{"ValueNs":"lt"},0]],"Fn",false]"##, Some(did_345()), ) } fn did_514() -> ExplicitDefId { deserialize( r##"["rust_primitives",[[{"TypeNs":"i32"},0],[{"ValueNs":"gt"},0]],"Fn",false]"##, Some(did_308()), ) } fn did_515() -> ExplicitDefId { deserialize( r##"["rust_primitives",[[{"TypeNs":"i128"},0],[{"ValueNs":"lt"},0]],"Fn",false]"##, Some(did_352()), ) } fn did_516() -> ExplicitDefId { deserialize( r##"["rust_primitives",[[{"TypeNs":"isize"},0],[{"ValueNs":"eq"},0]],"Fn",false]"##, Some(did_287()), ) } fn did_517() -> ExplicitDefId { deserialize( r##"["rust_primitives",[[{"TypeNs":"usize"},0],[{"ValueNs":"bit_or"},0]],"Fn",false]"##, Some(did_319()), ) } fn did_518() -> ExplicitDefId { deserialize( r##"["rust_primitives",[[{"TypeNs":"i16"},0],[{"ValueNs":"rem"},0]],"Fn",false]"##, Some(did_315()), ) } fn did_519() -> ExplicitDefId { deserialize( r##"["rust_primitives",[[{"ValueNs":"dummy_hax_concrete_ident_wrapper"},0],[{"ValueNs":"_"},0],[{"ValueNs":"arith"},0]],"Fn",false]"##, Some(did_300()), ) } fn did_520() -> ExplicitDefId { deserialize( r##"["rust_primitives",[[{"TypeNs":"u16"},0],[{"ValueNs":"shl"},0]],"Fn",false]"##, Some(did_283()), ) } fn did_521() -> ExplicitDefId { deserialize( r##"["rust_primitives",[[{"TypeNs":"u128"},0],[{"ValueNs":"div"},0]],"Fn",false]"##, Some(did_281()), ) } fn did_522() -> ExplicitDefId { deserialize( r##"["rust_primitives",[[{"TypeNs":"usize"},0],[{"ValueNs":"div"},0]],"Fn",false]"##, Some(did_319()), ) } fn did_523() -> ExplicitDefId { deserialize( r##"["rust_primitives",[[{"TypeNs":"i16"},0],[{"ValueNs":"bit_and"},0]],"Fn",false]"##, Some(did_315()), ) } fn did_524() -> ExplicitDefId { deserialize( r##"["rust_primitives",[[{"TypeNs":"hax"},0],[{"TypeNs":"machine_int"},0],[{"ValueNs":"bitand"},0]],"Fn",false]"##, Some(did_302()), ) } fn did_525() -> ExplicitDefId { deserialize( r##"["rust_primitives",[[{"TypeNs":"u16"},0],[{"ValueNs":"bit_or"},0]],"Fn",false]"##, Some(did_283()), ) } fn did_526() -> ExplicitDefId { deserialize( r##"["rust_primitives",[[{"TypeNs":"u128"},0],[{"ValueNs":"bit_or"},0]],"Fn",false]"##, Some(did_281()), ) } fn did_527() -> ExplicitDefId { deserialize( r##"["rust_primitives",[[{"TypeNs":"i64"},0],[{"ValueNs":"sub"},0]],"Fn",false]"##, Some(did_328()), ) } fn did_528() -> ExplicitDefId { deserialize( r##"["rust_primitives",[[{"TypeNs":"i8"},0],[{"ValueNs":"bit_and"},0]],"Fn",false]"##, Some(did_324()), ) } fn did_529() -> ExplicitDefId { deserialize( r##"["rust_primitives",[[{"TypeNs":"hax"},0],[{"TypeNs":"folds"},0],[{"ValueNs":"fold_enumerated_chunked_slice_return"},0]],"Fn",false]"##, Some(did_289()), ) } fn did_530() -> ExplicitDefId { deserialize( r##"["rust_primitives",[[{"TypeNs":"isize"},0],[{"ValueNs":"gt"},0]],"Fn",false]"##, Some(did_287()), ) } fn did_531() -> ExplicitDefId { deserialize( r##"["rust_primitives",[[{"TypeNs":"i128"},0],[{"ValueNs":"bit_and"},0]],"Fn",false]"##, Some(did_352()), ) } fn did_532() -> ExplicitDefId { deserialize( r##"["rust_primitives",[[{"TypeNs":"i16"},0],[{"ValueNs":"lt"},0]],"Fn",false]"##, Some(did_315()), ) } fn did_533() -> ExplicitDefId { deserialize( r##"["rust_primitives",[[{"TypeNs":"i128"},0],[{"ValueNs":"le"},0]],"Fn",false]"##, Some(did_352()), ) } fn did_534() -> ExplicitDefId { deserialize( r##"["rust_primitives",[[{"TypeNs":"i16"},0],[{"ValueNs":"le"},0]],"Fn",false]"##, Some(did_315()), ) } fn did_535() -> ExplicitDefId { deserialize( r##"["rust_primitives",[[{"TypeNs":"u16"},0],[{"ValueNs":"div"},0]],"Fn",false]"##, Some(did_283()), ) } fn did_536() -> ExplicitDefId { deserialize( r##"["rust_primitives",[[{"TypeNs":"u16"},0],[{"ValueNs":"eq"},0]],"Fn",false]"##, Some(did_283()), ) } fn did_537() -> ExplicitDefId { deserialize( r##"["rust_primitives",[[{"TypeNs":"hax"},0],[{"TypeNs":"machine_int"},0],[{"ValueNs":"sub"},0]],"Fn",false]"##, Some(did_302()), ) } fn did_538() -> ExplicitDefId { deserialize( r##"["rust_primitives",[[{"TypeNs":"usize"},0],[{"ValueNs":"ne"},0]],"Fn",false]"##, Some(did_319()), ) } fn did_539() -> ExplicitDefId { deserialize( r##"["rust_primitives",[[{"TypeNs":"hax"},0],[{"TypeNs":"int"},0],[{"ValueNs":"eq"},0]],"Fn",false]"##, Some(did_345()), ) } fn did_540() -> ExplicitDefId { deserialize( r##"["rust_primitives",[[{"TypeNs":"u16"},0],[{"ValueNs":"sub"},0]],"Fn",false]"##, Some(did_283()), ) } fn did_541() -> ExplicitDefId { deserialize( r##"["rust_primitives",[[{"TypeNs":"hax"},0],[{"TypeNs":"control_flow_monad"},0],[{"TypeNs":"mexception"},0],[{"ValueNs":"run"},0]],"Fn",false]"##, Some(did_512()), ) } fn did_542() -> ExplicitDefId { deserialize( r##"["rust_primitives",[[{"TypeNs":"hax"},0],[{"ValueNs":"logical_op_or"},0]],"Fn",false]"##, Some(did_1()), ) } fn did_543() -> ExplicitDefId { deserialize( r##"["rust_primitives",[[{"TypeNs":"hax"},0],[{"TypeNs":"folds"},0],[{"ValueNs":"fold_range_step_by_cf"},0]],"Fn",false]"##, Some(did_289()), ) } fn did_544() -> ExplicitDefId { deserialize( r##"["rust_primitives",[[{"TypeNs":"hax"},0],[{"ValueNs":"dropped_body"},0]],"Fn",false]"##, Some(did_1()), ) } fn did_545() -> ExplicitDefId { deserialize( r##"["rust_primitives",[[{"TypeNs":"u64"},0],[{"ValueNs":"eq"},0]],"Fn",false]"##, Some(did_285()), ) } fn did_546() -> ExplicitDefId { deserialize( r##"["rust_primitives",[[{"TypeNs":"u64"},0],[{"ValueNs":"rem"},0]],"Fn",false]"##, Some(did_285()), ) } fn did_547() -> ExplicitDefId { deserialize( r##"["rust_primitives",[[{"TypeNs":"u128"},0],[{"ValueNs":"lt"},0]],"Fn",false]"##, Some(did_281()), ) } fn did_548() -> ExplicitDefId { deserialize( r##"["rust_primitives",[[{"TypeNs":"u128"},0],[{"ValueNs":"bit_xor"},0]],"Fn",false]"##, Some(did_281()), ) } fn did_549() -> ExplicitDefId { deserialize( r##"["rust_primitives",[[{"TypeNs":"isize"},0],[{"ValueNs":"neg"},0]],"Fn",false]"##, Some(did_287()), ) } fn did_550() -> ExplicitDefId { deserialize( r##"["rust_primitives",[[{"TypeNs":"hax"},0],[{"ValueNs":"failure"},0]],"Fn",false]"##, Some(did_1()), ) } fn did_551() -> ExplicitDefId { deserialize( r##"["rust_primitives",[[{"TypeNs":"i8"},0],[{"ValueNs":"rem"},0]],"Fn",false]"##, Some(did_324()), ) } fn did_552() -> ExplicitDefId { deserialize( r##"["rust_primitives",[[{"TypeNs":"i8"},0],[{"ValueNs":"bit_xor"},0]],"Fn",false]"##, Some(did_324()), ) } fn did_553() -> ExplicitDefId { deserialize( r##"["rust_primitives",[[{"TypeNs":"i16"},0],[{"ValueNs":"div"},0]],"Fn",false]"##, Some(did_315()), ) } fn did_554() -> ExplicitDefId { deserialize( r##"["rust_primitives",[[{"TypeNs":"i64"},0],[{"ValueNs":"ge"},0]],"Fn",false]"##, Some(did_328()), ) } fn did_555() -> ExplicitDefId { deserialize( r##"["rust_primitives",[[{"TypeNs":"hax"},0],[{"ValueNs":"while_loop_cf"},0]],"Fn",false]"##, Some(did_1()), ) } fn did_556() -> ExplicitDefId { deserialize( r##"["rust_primitives",[[{"ValueNs":"dummy_hax_concrete_ident_wrapper"},0],[{"ValueNs":"question_mark_result"},0]],"Fn",false]"##, Some(did_291()), ) } fn did_557() -> ExplicitDefId { deserialize( r##"["rust_primitives",[[{"TypeNs":"i128"},0],[{"ValueNs":"bit_xor"},0]],"Fn",false]"##, Some(did_352()), ) } fn did_558() -> ExplicitDefId { deserialize( r##"["rust_primitives",[[{"TypeNs":"u32"},0],[{"ValueNs":"bit_or"},0]],"Fn",false]"##, Some(did_297()), ) } fn did_559() -> ExplicitDefId { deserialize( r##"["rust_primitives",[[{"TypeNs":"u32"},0],[{"ValueNs":"lt"},0]],"Fn",false]"##, Some(did_297()), ) } fn did_560() -> ExplicitDefId { deserialize( r##"["rust_primitives",[[{"TypeNs":"u128"},0],[{"ValueNs":"eq"},0]],"Fn",false]"##, Some(did_281()), ) } fn did_561() -> ExplicitDefId { deserialize( r##"["rust_primitives",[[{"TypeNs":"u32"},0],[{"ValueNs":"shr"},0]],"Fn",false]"##, Some(did_297()), ) } fn did_562() -> ExplicitDefId { deserialize( r##"["rust_primitives",[[{"TypeNs":"i32"},0],[{"ValueNs":"shl"},0]],"Fn",false]"##, Some(did_308()), ) } fn did_563() -> ExplicitDefId { deserialize( r##"["rust_primitives",[[{"TypeNs":"i16"},0],[{"ValueNs":"gt"},0]],"Fn",false]"##, Some(did_315()), ) } fn did_564() -> ExplicitDefId { deserialize( r##"["rust_primitives",[[{"ValueNs":"dummy_hax_concrete_ident_wrapper"},0],[{"ValueNs":"props"},0]],"Fn",false]"##, Some(did_291()), ) } fn did_565() -> ExplicitDefId { deserialize( r##"["rust_primitives",[[{"TypeNs":"i16"},0],[{"ValueNs":"eq"},0]],"Fn",false]"##, Some(did_315()), ) } fn did_566() -> ExplicitDefId { deserialize( r##"["rust_primitives",[[{"TypeNs":"usize"},0],[{"ValueNs":"eq"},0]],"Fn",false]"##, Some(did_319()), ) } fn did_567() -> ExplicitDefId { deserialize( r##"["rust_primitives",[[{"TypeNs":"i64"},0],[{"ValueNs":"le"},0]],"Fn",false]"##, Some(did_328()), ) } fn did_568() -> ExplicitDefId { deserialize( r##"["rust_primitives",[[{"TypeNs":"i128"},0],[{"ValueNs":"neg"},0]],"Fn",false]"##, Some(did_352()), ) } fn did_569() -> ExplicitDefId { deserialize( r##"["rust_primitives",[[{"TypeNs":"i128"},0],[{"ValueNs":"ge"},0]],"Fn",false]"##, Some(did_352()), ) } fn did_570() -> ExplicitDefId { deserialize( r##"["rust_primitives",[[{"TypeNs":"u128"},0],[{"ValueNs":"neg"},0]],"Fn",false]"##, Some(did_281()), ) } fn did_571() -> ExplicitDefId { deserialize( r##"["rust_primitives",[[{"TypeNs":"i64"},0],[{"ValueNs":"eq"},0]],"Fn",false]"##, Some(did_328()), ) } fn did_572() -> ExplicitDefId { deserialize( r##"["rust_primitives",[[{"TypeNs":"u16"},0],[{"ValueNs":"le"},0]],"Fn",false]"##, Some(did_283()), ) } fn did_573() -> ExplicitDefId { deserialize( r##"["rust_primitives",[[{"TypeNs":"isize"},0],[{"ValueNs":"le"},0]],"Fn",false]"##, Some(did_287()), ) } fn did_574() -> ExplicitDefId { deserialize( r##"["rust_primitives",[[{"TypeNs":"i16"},0],[{"ValueNs":"bit_xor"},0]],"Fn",false]"##, Some(did_315()), ) } fn did_575() -> ExplicitDefId { deserialize( r##"["rust_primitives",[[{"TypeNs":"hax"},0],[{"TypeNs":"machine_int"},0],[{"ValueNs":"bitor"},0]],"Fn",false]"##, Some(did_302()), ) } fn did_576() -> ExplicitDefId { deserialize( r##"["rust_primitives",[[{"TypeNs":"i8"},0],[{"ValueNs":"le"},0]],"Fn",false]"##, Some(did_324()), ) } fn did_577() -> ExplicitDefId { deserialize( r##"["rust_primitives",[[{"TypeNs":"i8"},0],[{"ValueNs":"add"},0]],"Fn",false]"##, Some(did_324()), ) } fn did_578() -> ExplicitDefId { deserialize( r##"["rust_primitives",[[{"TypeNs":"i128"},0],[{"ValueNs":"ne"},0]],"Fn",false]"##, Some(did_352()), ) } fn did_579() -> ExplicitDefId { deserialize( r##"["rust_primitives",[[{"TypeNs":"isize"},0],[{"ValueNs":"rem"},0]],"Fn",false]"##, Some(did_287()), ) } fn did_580() -> ExplicitDefId { deserialize( r##"["rust_primitives",[[{"TypeNs":"i128"},0],[{"ValueNs":"eq"},0]],"Fn",false]"##, Some(did_352()), ) } fn did_581() -> ExplicitDefId { deserialize( r##"["rust_primitives",[[{"TypeNs":"hax"},0],[{"TypeNs":"monomorphized_update_at"},0],[{"ValueNs":"update_at_range_to"},0]],"Fn",false]"##, Some(did_310()), ) } fn did_582() -> ExplicitDefId { deserialize( r##"["rust_primitives",[[{"TypeNs":"hax"},0],[{"TypeNs":"machine_int"},0],[{"ValueNs":"mul"},0]],"Fn",false]"##, Some(did_302()), ) } fn did_583() -> ExplicitDefId { deserialize( r##"["rust_primitives",[[{"TypeNs":"i128"},0],[{"ValueNs":"bit_or"},0]],"Fn",false]"##, Some(did_352()), ) } fn did_584() -> ExplicitDefId { deserialize( r##"["rust_primitives",[[{"TypeNs":"i128"},0],[{"ValueNs":"mul"},0]],"Fn",false]"##, Some(did_352()), ) } fn did_585() -> ExplicitDefId { deserialize( r##"["rust_primitives",[[{"TypeNs":"u32"},0],[{"ValueNs":"bit_and"},0]],"Fn",false]"##, Some(did_297()), ) } fn did_586() -> ExplicitDefId { deserialize( r##"["rust_primitives",[[{"TypeNs":"hax"},0],[{"ValueNs":"while_loop_return"},0]],"Fn",false]"##, Some(did_1()), ) } fn did_587() -> ExplicitDefId { deserialize( r##"["rust_primitives",[[{"TypeNs":"i32"},0],[{"ValueNs":"ge"},0]],"Fn",false]"##, Some(did_308()), ) } fn did_588() -> ExplicitDefId { deserialize( r##"["rust_primitives",[[{"TypeNs":"i64"},0],[{"ValueNs":"bit_and"},0]],"Fn",false]"##, Some(did_328()), ) } fn did_589() -> ExplicitDefId { deserialize( r##"["core",[[{"TypeNs":"iter"},0],[{"TypeNs":"traits"},0],[{"TypeNs":"iterator"},0],[{"TypeNs":"Iterator"},0],[{"TypeNs":"Item"},0]],"AssocTy",false]"##, Some(did_191()), ) } fn did_590() -> ExplicitDefId { deserialize( r##"["core",[[{"TypeNs":"ops"},0],[{"TypeNs":"arith"},0],[{"TypeNs":"Add"},0],[{"TypeNs":"Output"},0]],"AssocTy",false]"##, Some(did_194()), ) } fn did_591() -> ExplicitDefId { deserialize( r##"["core",[[{"TypeNs":"ops"},0],[{"TypeNs":"arith"},0],[{"TypeNs":"Sub"},0],[{"TypeNs":"Output"},0]],"AssocTy",false]"##, Some(did_255()), ) } fn did_592() -> ExplicitDefId { deserialize( r##"["core",[[{"TypeNs":"ops"},0],[{"TypeNs":"arith"},0],[{"TypeNs":"Mul"},0],[{"TypeNs":"Output"},0]],"AssocTy",false]"##, Some(did_253()), ) } fn did_593() -> ExplicitDefId { deserialize( r##"["core",[[{"TypeNs":"ops"},0],[{"TypeNs":"arith"},0],[{"TypeNs":"Div"},0],[{"TypeNs":"Output"},0]],"AssocTy",false]"##, Some(did_251()), ) } fn did_594() -> ExplicitDefId { deserialize( r##"["core",[[{"TypeNs":"ops"},0],[{"TypeNs":"arith"},0],[{"TypeNs":"Rem"},0],[{"TypeNs":"Output"},0]],"AssocTy",false]"##, Some(did_259()), ) } fn did_595() -> ExplicitDefId { deserialize( r##"["core",[[{"TypeNs":"ops"},0],[{"TypeNs":"bit"},0],[{"TypeNs":"BitXor"},0],[{"TypeNs":"Output"},0]],"AssocTy",false]"##, Some(did_249()), ) } fn did_596() -> ExplicitDefId { deserialize( r##"["core",[[{"TypeNs":"ops"},0],[{"TypeNs":"bit"},0],[{"TypeNs":"BitAnd"},0],[{"TypeNs":"Output"},0]],"AssocTy",false]"##, Some(did_247()), ) } fn did_597() -> ExplicitDefId { deserialize( r##"["core",[[{"TypeNs":"ops"},0],[{"TypeNs":"bit"},0],[{"TypeNs":"BitOr"},0],[{"TypeNs":"Output"},0]],"AssocTy",false]"##, Some(did_265()), ) } fn did_598() -> ExplicitDefId { deserialize( r##"["core",[[{"TypeNs":"ops"},0],[{"TypeNs":"bit"},0],[{"TypeNs":"Shl"},0],[{"TypeNs":"Output"},0]],"AssocTy",false]"##, Some(did_261()), ) } fn did_599() -> ExplicitDefId { deserialize( r##"["core",[[{"TypeNs":"ops"},0],[{"TypeNs":"bit"},0],[{"TypeNs":"Shr"},0],[{"TypeNs":"Output"},0]],"AssocTy",false]"##, Some(did_263()), ) } fn did_600() -> ExplicitDefId { deserialize( r##"["core",[[{"TypeNs":"ops"},0],[{"TypeNs":"arith"},0],[{"TypeNs":"Neg"},0],[{"TypeNs":"Output"},0]],"AssocTy",false]"##, Some(did_257()), ) } fn did_601() -> ExplicitDefId { deserialize( r##"["core",[[{"TypeNs":"ops"},0],[{"TypeNs":"bit"},0],[{"TypeNs":"Not"},0],[{"TypeNs":"Output"},0]],"AssocTy",false]"##, Some(did_159()), ) } fn did_602() -> ExplicitDefId { deserialize( r##"["core",[[{"TypeNs":"ops"},0],[{"TypeNs":"try_trait"},0],[{"TypeNs":"Try"},0],[{"TypeNs":"Output"},0]],"AssocTy",false]"##, Some(did_237()), ) } fn did_603() -> ExplicitDefId { deserialize( r##"["rust_primitives",[[{"TypeNs":"hax"},0],[{"TypeNs":"explicit_monadic"},0]],"Mod",false]"##, Some(did_1()), ) } fn did_604() -> ExplicitDefId { deserialize( r##"["rust_primitives",[[{"TypeNs":"hax"},0],[{"TypeNs":"explicit_monadic"},0],[{"ValueNs":"pure"},0]],"Fn",false]"##, Some(did_603()), ) } fn did_605() -> ExplicitDefId { deserialize( r##"["rust_primitives",[[{"TypeNs":"hax"},0],[{"TypeNs":"explicit_monadic"},0],[{"ValueNs":"lift"},0]],"Fn",false]"##, Some(did_603()), ) } fn did_606() -> ExplicitDefId { deserialize( r##"["core",[[{"TypeNs":"fmt"},0],[{"TypeNs":"num"},0],["Impl",54]],{"Impl":{"of_trait":true}},false]"##, Some(did_168()), ) } fn did_607() -> ExplicitDefId { deserialize( r##"["core",[[{"TypeNs":"marker"},0],[{"TypeNs":"Destruct"},0]],"Trait",false]"##, Some(did_274()), ) } fn did_608() -> ExplicitDefId { deserialize( r##"["core",[[{"TypeNs":"cmp"},0],[{"TypeNs":"Eq"},0]],"Trait",false]"##, Some(did_155()), ) } fn did_609() -> ExplicitDefId { deserialize( r##"["core",[[{"TypeNs":"cmp"},0],[{"TypeNs":"Ord"},0]],"Trait",false]"##, Some(did_155()), ) } fn did_610() -> ExplicitDefId { deserialize( r##"["core",[[{"TypeNs":"marker"},0],[{"TypeNs":"StructuralPartialEq"},0]],"Trait",false]"##, Some(did_274()), ) } fn did_611() -> ExplicitDefId { deserialize( r##"["rust_primitives",[[{"ValueNs":"dummy_hax_concrete_ident_wrapper"},0],["Impl",1]],{"Impl":{"of_trait":true}},false]"##, Some(did_291()), ) } fn did_612() -> ExplicitDefId { deserialize( r##"["rust_primitives",[[{"ValueNs":"dummy_hax_concrete_ident_wrapper"},0],[{"TypeNs":"Foo"},0]],"Struct",false]"##, Some(did_291()), ) } fn did_613() -> ExplicitDefId { deserialize( r##"["rust_primitives",[[{"ValueNs":"dummy_hax_concrete_ident_wrapper"},0],["Impl",0]],{"Impl":{"of_trait":true}},false]"##, Some(did_291()), ) } fn did_614() -> ExplicitDefId { deserialize( r##"["rust_primitives",[[{"TypeNs":"hax"},0],[{"TypeNs":"machine_int"},0],[{"ValueNs":"add_with_overflow"},0]],"Fn",false]"##, Some(did_302()), ) } fn did_615() -> ExplicitDefId { deserialize( r##"["rust_primitives",[[{"TypeNs":"arithmetic"},0]],"Mod",false]"##, Some(did_0()), ) } fn did_616() -> ExplicitDefId { deserialize( r##"["rust_primitives",[[{"TypeNs":"arithmetic"},0],[{"ValueNs":"neg"},0]],"Fn",false]"##, Some(did_615()), ) } fn did_617() -> ExplicitDefId { deserialize( r##"["rust_primitives",[[{"TypeNs":"hax"},0],[{"TypeNs":"machine_int"},0],[{"ValueNs":"sub_with_overflow"},0]],"Fn",false]"##, Some(did_302()), ) } fn did_618() -> ExplicitDefId { deserialize( r##"["rust_primitives",[[{"TypeNs":"hax"},0],[{"TypeNs":"machine_int"},0],[{"ValueNs":"mul_with_overflow"},0]],"Fn",false]"##, Some(did_302()), ) } fn did_619() -> ExplicitDefId { deserialize( r##"["rust_primitives",[[{"TypeNs":"hax"},0],[{"TypeNs":"machine_int"},0],[{"ValueNs":"cmp"},0]],"Fn",false]"##, Some(did_302()), ) } fn did_620() -> ExplicitDefId { deserialize( r##"["alloc",[[{"TypeNs":"vec"},0],["Impl",1],[{"ValueNs":"as_slice"},0]],"AssocFn",false]"##, Some(did_28()), ) } fn did_621() -> ExplicitDefId { deserialize( r##"["core",[[{"TypeNs":"ops"},0],[{"TypeNs":"index"},0],[{"TypeNs":"Index"},0],[{"TypeNs":"Output"},0]],"AssocTy",false]"##, Some(did_54()), ) } fn did_622() -> ExplicitDefId { deserialize( r##"["core",[[{"TypeNs":"ops"},0],[{"TypeNs":"index"},0],[{"TypeNs":"IndexMut"},0]],"Trait",false]"##, Some(did_53()), ) } fn did_623() -> ExplicitDefId { deserialize( r##"["core",[[{"TypeNs":"ops"},0],[{"TypeNs":"index"},0],[{"TypeNs":"IndexMut"},0],[{"ValueNs":"index_mut"},0]],"AssocFn",false]"##, Some(did_622()), ) } fn did_624() -> ExplicitDefId { deserialize( r##"["rust_primitives",[[{"ValueNs":"dummy_hax_concrete_ident_wrapper"},0],["Use",1]],"Use",false]"##, Some(did_291()), ) } fn did_625() -> ExplicitDefId { deserialize( r##"["rust_primitives",[[{"ValueNs":"dummy_hax_concrete_ident_wrapper"},0],[{"ValueNs":"props"},0],["Use",0]],"Use",false]"##, Some(did_564()), ) } fn did_626() -> ExplicitDefId { deserialize( r##"["rust_primitives",[[{"ValueNs":"dummy_hax_concrete_ident_wrapper"},0],[{"ValueNs":"_"},1],["Use",0]],"Use",false]"##, Some(did_292()), ) } fn did_627() -> ExplicitDefId { deserialize( r##"["rust_primitives",[[{"ValueNs":"dummy_hax_concrete_ident_wrapper"},0],[{"ValueNs":"_"},1],["Use",1]],"Use",false]"##, Some(did_292()), ) } fn did_628() -> ExplicitDefId { deserialize( r##"["rust_primitives",[["Use",0]],"Use",false]"##, Some(did_0()), ) } fn did_629() -> ExplicitDefId { deserialize( r##"["rust_primitives",[[{"ValueNs":"dummy_hax_concrete_ident_wrapper"},0],[{"ValueNs":"_"},0],["Use",0]],"Use",false]"##, Some(did_300()), ) } fn did_630() -> ExplicitDefId { deserialize( r##"["rust_primitives",[[{"MacroNs":"impl_arith"},0]],{"Macro":{"bang":true,"attr":false,"derive":false}},false]"##, Some(did_0()), ) } fn did_631() -> ExplicitDefId { deserialize( r##"["rust_primitives",[[{"ValueNs":"dummy_hax_concrete_ident_wrapper"},0],[{"ValueNs":"_"},1],[{"ValueNs":"g"},0]],"Fn",false]"##, Some(did_292()), ) } fn did_632() -> ExplicitDefId { deserialize( r##"["rust_primitives",[[{"ValueNs":"dummy_hax_concrete_ident_wrapper"},0],["Use",0]],"Use",false]"##, Some(did_291()), ) } fn did_633() -> ExplicitDefId { deserialize( r##"["rust_primitives",[[{"TypeNs":"alloc"},0]],"ExternCrate",false]"##, Some(did_0()), ) } fn did_634() -> ExplicitDefId { deserialize( r##"["rust_primitives",[[{"ValueNs":"dummy_hax_concrete_ident_wrapper"},0],["Use",2]],"Use",false]"##, Some(did_291()), ) } fn did_635() -> ExplicitDefId { deserialize( r##"["rust_primitives",[[{"TypeNs":"std"},0]],"ExternCrate",false]"##, Some(did_0()), ) } fn did_636() -> ExplicitDefId { deserialize( r##"["rust_primitives",[[{"ValueNs":"dummy_hax_concrete_ident_wrapper"},0],[{"ValueNs":"_"},0],["Use",1]],"Use",false]"##, Some(did_300()), ) } fn did_637() -> ExplicitDefId { deserialize( r##"["rust_primitives",[[{"ValueNs":"dummy_hax_concrete_ident_wrapper"},0],["Use",4]],"Use",false]"##, Some(did_291()), ) } fn did_638() -> ExplicitDefId { deserialize( r##"["rust_primitives",[[{"TypeNs":"crypto_abstractions"},0],["Use",0]],"Use",false]"##, Some(did_293()), ) } fn did_639() -> ExplicitDefId { deserialize( r##"["rust_primitives",[[{"ValueNs":"dummy_hax_concrete_ident_wrapper"},0],[{"ValueNs":"index_mut"},0]],"Fn",false]"##, Some(did_291()), ) } fn did_640() -> ExplicitDefId { deserialize( r##"["rust_primitives",[[{"ValueNs":"dummy_hax_concrete_ident_wrapper"},0],[{"ValueNs":"_"},0],["Use",2]],"Use",false]"##, Some(did_300()), ) } fn did_641() -> ExplicitDefId { deserialize( r##"["rust_primitives",[[{"ValueNs":"dummy_hax_concrete_ident_wrapper"},0],["Use",3]],"Use",false]"##, Some(did_291()), ) } fn did_642() -> ExplicitDefId { deserialize( r##"["core",[[{"TypeNs":"ops"},0],[{"TypeNs":"range"},0],[{"TypeNs":"Range"},0]],"Struct",true]"##, Some(did_49()), ) } fn did_643() -> ExplicitDefId { deserialize( r##"["rust_primitives",[[{"TypeNs":"hax"},0],[{"TypeNs":"Tuple2"},0]],"Struct",true]"##, Some(did_1()), ) } fn did_644() -> ExplicitDefId { deserialize( r##"["core",[[{"TypeNs":"ops"},0],[{"TypeNs":"control_flow"},0],[{"TypeNs":"ControlFlow"},0],[{"TypeNs":"Break"},0]],"Variant",true]"##, Some(did_121()), ) } fn did_645() -> ExplicitDefId { deserialize( r##"["core",[[{"TypeNs":"ops"},0],[{"TypeNs":"control_flow"},0],[{"TypeNs":"ControlFlow"},0],[{"TypeNs":"Continue"},0]],"Variant",true]"##, Some(did_121()), ) } fn did_646() -> ExplicitDefId { deserialize( r##"["core",[[{"TypeNs":"ops"},0],[{"TypeNs":"range"},0],[{"TypeNs":"RangeTo"},0]],"Struct",true]"##, Some(did_49()), ) } fn did_647() -> ExplicitDefId { deserialize( r##"["core",[[{"TypeNs":"ops"},0],[{"TypeNs":"range"},0],[{"TypeNs":"RangeFull"},0]],"Struct",true]"##, Some(did_49()), ) } fn did_648() -> ExplicitDefId { deserialize( r##"["core",[[{"TypeNs":"ops"},0],[{"TypeNs":"range"},0],[{"TypeNs":"RangeFrom"},0]],"Struct",true]"##, Some(did_49()), ) } fn did_649() -> ExplicitDefId { deserialize( r##"["core",[[{"TypeNs":"result"},0],[{"TypeNs":"Result"},0],[{"TypeNs":"Err"},0]],"Variant",true]"##, Some(did_77()), ) } fn did_650() -> ExplicitDefId { deserialize( r##"["core",[[{"TypeNs":"option"},0],[{"TypeNs":"Option"},0],[{"TypeNs":"Some"},0]],"Variant",true]"##, Some(did_164()), ) } fn did_651() -> ExplicitDefId { deserialize( r##"["core",[[{"TypeNs":"result"},0],[{"TypeNs":"Result"},0],[{"TypeNs":"Ok"},0]],"Variant",true]"##, Some(did_77()), ) } fn did_652() -> ExplicitDefId { deserialize( r##"["rust_primitives",[[{"TypeNs":"hax"},0],[{"TypeNs":"Failure"},0]],"Struct",true]"##, Some(did_1()), ) } fn did_653() -> ExplicitDefId { deserialize( r##"["rust_primitives",[[{"ValueNs":"dummy_hax_concrete_ident_wrapper"},0],[{"TypeNs":"Foo"},0]],"Struct",true]"##, Some(did_291()), ) } fn did_654() -> ExplicitDefId { deserialize( r##"["hax_lib_protocol",[[{"TypeNs":"crypto"},0],[{"TypeNs":"HashAlgorithm"},0],[{"TypeNs":"Sha256"},0]],"Variant",true]"##, Some(did_20()), ) } fn did_655() -> ExplicitDefId { deserialize( r##"["hax_lib_protocol",[[{"TypeNs":"crypto"},0],[{"TypeNs":"HMACAlgorithm"},0],[{"TypeNs":"Sha256"},0]],"Variant",true]"##, Some(did_63()), ) } fn did_656() -> ExplicitDefId { deserialize( r##"["hax_lib_protocol",[[{"TypeNs":"crypto"},0],[{"TypeNs":"DHGroup"},0],[{"TypeNs":"X25519"},0]],"Variant",true]"##, Some(did_65()), ) } fn did_657() -> ExplicitDefId { deserialize( r##"["hax_lib_protocol",[[{"TypeNs":"crypto"},0],[{"TypeNs":"AEADAlgorithm"},0],[{"TypeNs":"Chacha20Poly1305"},0]],"Variant",true]"##, Some(did_84()), ) } fn did_658() -> ExplicitDefId { deserialize( r##"["core",[[{"TypeNs":"option"},0],[{"TypeNs":"Option"},0],[{"TypeNs":"None"},0]],"Variant",true]"##, Some(did_164()), ) } fn did_659() -> ExplicitDefId { deserialize( r##"["core",[[{"TypeNs":"panicking"},0],[{"TypeNs":"AssertKind"},0],[{"TypeNs":"Eq"},0]],"Variant",true]"##, Some(did_162()), ) } [ did_0().into_global_id_inner(), did_1().into_global_id_inner(), did_2().into_global_id_inner(), did_3().into_global_id_inner(), did_4().into_global_id_inner(), did_5().into_global_id_inner(), did_6().into_global_id_inner(), did_7().into_global_id_inner(), did_8().into_global_id_inner(), did_9().into_global_id_inner(), did_10().into_global_id_inner(), did_11().into_global_id_inner(), did_12().into_global_id_inner(), did_13().into_global_id_inner(), did_14().into_global_id_inner(), did_15().into_global_id_inner(), did_16().into_global_id_inner(), did_17().into_global_id_inner(), did_18().into_global_id_inner(), did_19().into_global_id_inner(), did_20().into_global_id_inner(), did_21().into_global_id_inner(), did_22().into_global_id_inner(), did_23().into_global_id_inner(), did_24().into_global_id_inner(), did_25().into_global_id_inner(), did_26().into_global_id_inner(), did_27().into_global_id_inner(), did_28().into_global_id_inner(), did_29().into_global_id_inner(), did_30().into_global_id_inner(), did_31().into_global_id_inner(), did_32().into_global_id_inner(), did_33().into_global_id_inner(), did_34().into_global_id_inner(), did_35().into_global_id_inner(), did_36().into_global_id_inner(), did_37().into_global_id_inner(), did_38().into_global_id_inner(), did_39().into_global_id_inner(), did_40().into_global_id_inner(), did_41().into_global_id_inner(), did_42().into_global_id_inner(), did_43().into_global_id_inner(), did_44().into_global_id_inner(), did_45().into_global_id_inner(), did_46().into_global_id_inner(), did_47().into_global_id_inner(), did_48().into_global_id_inner(), did_49().into_global_id_inner(), did_50().into_global_id_inner(), did_51().into_global_id_inner(), did_52().into_global_id_inner(), did_53().into_global_id_inner(), did_54().into_global_id_inner(), did_55().into_global_id_inner(), did_56().into_global_id_inner(), did_57().into_global_id_inner(), did_58().into_global_id_inner(), did_59().into_global_id_inner(), did_60().into_global_id_inner(), did_61().into_global_id_inner(), did_62().into_global_id_inner(), did_63().into_global_id_inner(), did_64().into_global_id_inner(), did_65().into_global_id_inner(), did_66().into_global_id_inner(), did_67().into_global_id_inner(), did_68().into_global_id_inner(), did_69().into_global_id_inner(), did_70().into_global_id_inner(), did_71().into_global_id_inner(), did_72().into_global_id_inner(), did_73().into_global_id_inner(), did_74().into_global_id_inner(), did_75().into_global_id_inner(), did_76().into_global_id_inner(), did_77().into_global_id_inner(), did_78().into_global_id_inner(), did_79().into_global_id_inner(), did_80().into_global_id_inner(), did_81().into_global_id_inner(), did_82().into_global_id_inner(), did_83().into_global_id_inner(), did_84().into_global_id_inner(), did_85().into_global_id_inner(), did_86().into_global_id_inner(), did_87().into_global_id_inner(), did_88().into_global_id_inner(), did_89().into_global_id_inner(), did_90().into_global_id_inner(), did_91().into_global_id_inner(), did_92().into_global_id_inner(), did_93().into_global_id_inner(), did_94().into_global_id_inner(), did_95().into_global_id_inner(), did_96().into_global_id_inner(), did_97().into_global_id_inner(), did_98().into_global_id_inner(), did_99().into_global_id_inner(), did_100().into_global_id_inner(), did_101().into_global_id_inner(), did_102().into_global_id_inner(), did_103().into_global_id_inner(), did_104().into_global_id_inner(), did_105().into_global_id_inner(), did_106().into_global_id_inner(), did_107().into_global_id_inner(), did_108().into_global_id_inner(), did_109().into_global_id_inner(), did_110().into_global_id_inner(), did_111().into_global_id_inner(), did_112().into_global_id_inner(), did_113().into_global_id_inner(), did_114().into_global_id_inner(), did_115().into_global_id_inner(), did_116().into_global_id_inner(), did_117().into_global_id_inner(), did_118().into_global_id_inner(), did_119().into_global_id_inner(), did_120().into_global_id_inner(), did_121().into_global_id_inner(), did_122().into_global_id_inner(), did_123().into_global_id_inner(), did_124().into_global_id_inner(), did_125().into_global_id_inner(), did_126().into_global_id_inner(), did_127().into_global_id_inner(), did_128().into_global_id_inner(), did_129().into_global_id_inner(), did_130().into_global_id_inner(), did_131().into_global_id_inner(), did_132().into_global_id_inner(), did_133().into_global_id_inner(), did_134().into_global_id_inner(), did_135().into_global_id_inner(), did_136().into_global_id_inner(), did_137().into_global_id_inner(), did_138().into_global_id_inner(), did_139().into_global_id_inner(), did_140().into_global_id_inner(), did_141().into_global_id_inner(), did_142().into_global_id_inner(), did_143().into_global_id_inner(), did_144().into_global_id_inner(), did_145().into_global_id_inner(), did_146().into_global_id_inner(), did_147().into_global_id_inner(), did_148().into_global_id_inner(), did_149().into_global_id_inner(), did_150().into_global_id_inner(), did_151().into_global_id_inner(), did_152().into_global_id_inner(), did_153().into_global_id_inner(), did_154().into_global_id_inner(), did_155().into_global_id_inner(), did_156().into_global_id_inner(), did_157().into_global_id_inner(), did_158().into_global_id_inner(), did_159().into_global_id_inner(), did_160().into_global_id_inner(), did_161().into_global_id_inner(), did_162().into_global_id_inner(), did_163().into_global_id_inner(), did_164().into_global_id_inner(), did_165().into_global_id_inner(), did_166().into_global_id_inner(), did_167().into_global_id_inner(), did_168().into_global_id_inner(), did_169().into_global_id_inner(), did_170().into_global_id_inner(), did_171().into_global_id_inner(), did_172().into_global_id_inner(), did_173().into_global_id_inner(), did_174().into_global_id_inner(), did_175().into_global_id_inner(), did_176().into_global_id_inner(), did_177().into_global_id_inner(), did_178().into_global_id_inner(), did_179().into_global_id_inner(), did_180().into_global_id_inner(), did_181().into_global_id_inner(), did_182().into_global_id_inner(), did_183().into_global_id_inner(), did_184().into_global_id_inner(), did_185().into_global_id_inner(), did_186().into_global_id_inner(), did_187().into_global_id_inner(), did_188().into_global_id_inner(), did_189().into_global_id_inner(), did_190().into_global_id_inner(), did_191().into_global_id_inner(), did_192().into_global_id_inner(), did_193().into_global_id_inner(), did_194().into_global_id_inner(), did_195().into_global_id_inner(), did_196().into_global_id_inner(), did_197().into_global_id_inner(), did_198().into_global_id_inner(), did_199().into_global_id_inner(), did_200().into_global_id_inner(), did_201().into_global_id_inner(), did_202().into_global_id_inner(), did_203().into_global_id_inner(), did_204().into_global_id_inner(), did_205().into_global_id_inner(), did_206().into_global_id_inner(), did_207().into_global_id_inner(), did_208().into_global_id_inner(), did_209().into_global_id_inner(), did_210().into_global_id_inner(), did_211().into_global_id_inner(), did_212().into_global_id_inner(), did_213().into_global_id_inner(), did_214().into_global_id_inner(), did_215().into_global_id_inner(), did_216().into_global_id_inner(), did_217().into_global_id_inner(), did_218().into_global_id_inner(), did_219().into_global_id_inner(), did_220().into_global_id_inner(), did_221().into_global_id_inner(), did_222().into_global_id_inner(), did_223().into_global_id_inner(), did_224().into_global_id_inner(), did_225().into_global_id_inner(), did_226().into_global_id_inner(), did_227().into_global_id_inner(), did_228().into_global_id_inner(), did_229().into_global_id_inner(), did_230().into_global_id_inner(), did_231().into_global_id_inner(), did_232().into_global_id_inner(), did_233().into_global_id_inner(), did_234().into_global_id_inner(), did_235().into_global_id_inner(), did_236().into_global_id_inner(), did_237().into_global_id_inner(), did_238().into_global_id_inner(), did_239().into_global_id_inner(), did_240().into_global_id_inner(), did_241().into_global_id_inner(), did_242().into_global_id_inner(), did_243().into_global_id_inner(), did_244().into_global_id_inner(), did_245().into_global_id_inner(), did_246().into_global_id_inner(), did_247().into_global_id_inner(), did_248().into_global_id_inner(), did_249().into_global_id_inner(), did_250().into_global_id_inner(), did_251().into_global_id_inner(), did_252().into_global_id_inner(), did_253().into_global_id_inner(), did_254().into_global_id_inner(), did_255().into_global_id_inner(), did_256().into_global_id_inner(), did_257().into_global_id_inner(), did_258().into_global_id_inner(), did_259().into_global_id_inner(), did_260().into_global_id_inner(), did_261().into_global_id_inner(), did_262().into_global_id_inner(), did_263().into_global_id_inner(), did_264().into_global_id_inner(), did_265().into_global_id_inner(), did_266().into_global_id_inner(), did_267().into_global_id_inner(), did_268().into_global_id_inner(), did_269().into_global_id_inner(), did_270().into_global_id_inner(), did_271().into_global_id_inner(), did_272().into_global_id_inner(), did_273().into_global_id_inner(), did_274().into_global_id_inner(), did_275().into_global_id_inner(), did_276().into_global_id_inner(), did_277().into_global_id_inner(), did_278().into_global_id_inner(), did_279().into_global_id_inner(), did_280().into_global_id_inner(), did_281().into_global_id_inner(), did_282().into_global_id_inner(), did_283().into_global_id_inner(), did_284().into_global_id_inner(), did_285().into_global_id_inner(), did_286().into_global_id_inner(), did_287().into_global_id_inner(), did_288().into_global_id_inner(), did_289().into_global_id_inner(), did_290().into_global_id_inner(), did_291().into_global_id_inner(), did_292().into_global_id_inner(), did_293().into_global_id_inner(), did_294().into_global_id_inner(), did_295().into_global_id_inner(), did_296().into_global_id_inner(), did_297().into_global_id_inner(), did_298().into_global_id_inner(), did_299().into_global_id_inner(), did_300().into_global_id_inner(), did_301().into_global_id_inner(), did_302().into_global_id_inner(), did_303().into_global_id_inner(), did_304().into_global_id_inner(), did_305().into_global_id_inner(), did_306().into_global_id_inner(), did_307().into_global_id_inner(), did_308().into_global_id_inner(), did_309().into_global_id_inner(), did_310().into_global_id_inner(), did_311().into_global_id_inner(), did_312().into_global_id_inner(), did_313().into_global_id_inner(), did_314().into_global_id_inner(), did_315().into_global_id_inner(), did_316().into_global_id_inner(), did_317().into_global_id_inner(), did_318().into_global_id_inner(), did_319().into_global_id_inner(), did_320().into_global_id_inner(), did_321().into_global_id_inner(), did_322().into_global_id_inner(), did_323().into_global_id_inner(), did_324().into_global_id_inner(), did_325().into_global_id_inner(), did_326().into_global_id_inner(), did_327().into_global_id_inner(), did_328().into_global_id_inner(), did_329().into_global_id_inner(), did_330().into_global_id_inner(), did_331().into_global_id_inner(), did_332().into_global_id_inner(), did_333().into_global_id_inner(), did_334().into_global_id_inner(), did_335().into_global_id_inner(), did_336().into_global_id_inner(), did_337().into_global_id_inner(), did_338().into_global_id_inner(), did_339().into_global_id_inner(), did_340().into_global_id_inner(), did_341().into_global_id_inner(), did_342().into_global_id_inner(), did_343().into_global_id_inner(), did_344().into_global_id_inner(), did_345().into_global_id_inner(), did_346().into_global_id_inner(), did_347().into_global_id_inner(), did_348().into_global_id_inner(), did_349().into_global_id_inner(), did_350().into_global_id_inner(), did_351().into_global_id_inner(), did_352().into_global_id_inner(), did_353().into_global_id_inner(), did_354().into_global_id_inner(), did_355().into_global_id_inner(), did_356().into_global_id_inner(), did_357().into_global_id_inner(), did_358().into_global_id_inner(), did_359().into_global_id_inner(), did_360().into_global_id_inner(), did_361().into_global_id_inner(), did_362().into_global_id_inner(), did_363().into_global_id_inner(), did_364().into_global_id_inner(), did_365().into_global_id_inner(), did_366().into_global_id_inner(), did_367().into_global_id_inner(), did_368().into_global_id_inner(), did_369().into_global_id_inner(), did_370().into_global_id_inner(), did_371().into_global_id_inner(), did_372().into_global_id_inner(), did_373().into_global_id_inner(), did_374().into_global_id_inner(), did_375().into_global_id_inner(), did_376().into_global_id_inner(), did_377().into_global_id_inner(), did_378().into_global_id_inner(), did_379().into_global_id_inner(), did_380().into_global_id_inner(), did_381().into_global_id_inner(), did_382().into_global_id_inner(), did_383().into_global_id_inner(), did_384().into_global_id_inner(), did_385().into_global_id_inner(), did_386().into_global_id_inner(), did_387().into_global_id_inner(), did_388().into_global_id_inner(), did_389().into_global_id_inner(), did_390().into_global_id_inner(), did_391().into_global_id_inner(), did_392().into_global_id_inner(), did_393().into_global_id_inner(), did_394().into_global_id_inner(), did_395().into_global_id_inner(), did_396().into_global_id_inner(), did_397().into_global_id_inner(), did_398().into_global_id_inner(), did_399().into_global_id_inner(), did_400().into_global_id_inner(), did_401().into_global_id_inner(), did_402().into_global_id_inner(), did_403().into_global_id_inner(), did_404().into_global_id_inner(), did_405().into_global_id_inner(), did_406().into_global_id_inner(), did_407().into_global_id_inner(), did_408().into_global_id_inner(), did_409().into_global_id_inner(), did_410().into_global_id_inner(), did_411().into_global_id_inner(), did_412().into_global_id_inner(), did_413().into_global_id_inner(), did_414().into_global_id_inner(), did_415().into_global_id_inner(), did_416().into_global_id_inner(), did_417().into_global_id_inner(), did_418().into_global_id_inner(), did_419().into_global_id_inner(), did_420().into_global_id_inner(), did_421().into_global_id_inner(), did_422().into_global_id_inner(), did_423().into_global_id_inner(), did_424().into_global_id_inner(), did_425().into_global_id_inner(), did_426().into_global_id_inner(), did_427().into_global_id_inner(), did_428().into_global_id_inner(), did_429().into_global_id_inner(), did_430().into_global_id_inner(), did_431().into_global_id_inner(), did_432().into_global_id_inner(), did_433().into_global_id_inner(), did_434().into_global_id_inner(), did_435().into_global_id_inner(), did_436().into_global_id_inner(), did_437().into_global_id_inner(), did_438().into_global_id_inner(), did_439().into_global_id_inner(), did_440().into_global_id_inner(), did_441().into_global_id_inner(), did_442().into_global_id_inner(), did_443().into_global_id_inner(), did_444().into_global_id_inner(), did_445().into_global_id_inner(), did_446().into_global_id_inner(), did_447().into_global_id_inner(), did_448().into_global_id_inner(), did_449().into_global_id_inner(), did_450().into_global_id_inner(), did_451().into_global_id_inner(), did_452().into_global_id_inner(), did_453().into_global_id_inner(), did_454().into_global_id_inner(), did_455().into_global_id_inner(), did_456().into_global_id_inner(), did_457().into_global_id_inner(), did_458().into_global_id_inner(), did_459().into_global_id_inner(), did_460().into_global_id_inner(), did_461().into_global_id_inner(), did_462().into_global_id_inner(), did_463().into_global_id_inner(), did_464().into_global_id_inner(), did_465().into_global_id_inner(), did_466().into_global_id_inner(), did_467().into_global_id_inner(), did_468().into_global_id_inner(), did_469().into_global_id_inner(), did_470().into_global_id_inner(), did_471().into_global_id_inner(), did_472().into_global_id_inner(), did_473().into_global_id_inner(), did_474().into_global_id_inner(), did_475().into_global_id_inner(), did_476().into_global_id_inner(), did_477().into_global_id_inner(), did_478().into_global_id_inner(), did_479().into_global_id_inner(), did_480().into_global_id_inner(), did_481().into_global_id_inner(), did_482().into_global_id_inner(), did_483().into_global_id_inner(), did_484().into_global_id_inner(), did_485().into_global_id_inner(), did_486().into_global_id_inner(), did_487().into_global_id_inner(), did_488().into_global_id_inner(), did_489().into_global_id_inner(), did_490().into_global_id_inner(), did_491().into_global_id_inner(), did_492().into_global_id_inner(), did_493().into_global_id_inner(), did_494().into_global_id_inner(), did_495().into_global_id_inner(), did_496().into_global_id_inner(), did_497().into_global_id_inner(), did_498().into_global_id_inner(), did_499().into_global_id_inner(), did_500().into_global_id_inner(), did_501().into_global_id_inner(), did_502().into_global_id_inner(), did_503().into_global_id_inner(), did_504().into_global_id_inner(), did_505().into_global_id_inner(), did_506().into_global_id_inner(), did_507().into_global_id_inner(), did_508().into_global_id_inner(), did_509().into_global_id_inner(), did_510().into_global_id_inner(), did_511().into_global_id_inner(), did_512().into_global_id_inner(), did_513().into_global_id_inner(), did_514().into_global_id_inner(), did_515().into_global_id_inner(), did_516().into_global_id_inner(), did_517().into_global_id_inner(), did_518().into_global_id_inner(), did_519().into_global_id_inner(), did_520().into_global_id_inner(), did_521().into_global_id_inner(), did_522().into_global_id_inner(), did_523().into_global_id_inner(), did_524().into_global_id_inner(), did_525().into_global_id_inner(), did_526().into_global_id_inner(), did_527().into_global_id_inner(), did_528().into_global_id_inner(), did_529().into_global_id_inner(), did_530().into_global_id_inner(), did_531().into_global_id_inner(), did_532().into_global_id_inner(), did_533().into_global_id_inner(), did_534().into_global_id_inner(), did_535().into_global_id_inner(), did_536().into_global_id_inner(), did_537().into_global_id_inner(), did_538().into_global_id_inner(), did_539().into_global_id_inner(), did_540().into_global_id_inner(), did_541().into_global_id_inner(), did_542().into_global_id_inner(), did_543().into_global_id_inner(), did_544().into_global_id_inner(), did_545().into_global_id_inner(), did_546().into_global_id_inner(), did_547().into_global_id_inner(), did_548().into_global_id_inner(), did_549().into_global_id_inner(), did_550().into_global_id_inner(), did_551().into_global_id_inner(), did_552().into_global_id_inner(), did_553().into_global_id_inner(), did_554().into_global_id_inner(), did_555().into_global_id_inner(), did_556().into_global_id_inner(), did_557().into_global_id_inner(), did_558().into_global_id_inner(), did_559().into_global_id_inner(), did_560().into_global_id_inner(), did_561().into_global_id_inner(), did_562().into_global_id_inner(), did_563().into_global_id_inner(), did_564().into_global_id_inner(), did_565().into_global_id_inner(), did_566().into_global_id_inner(), did_567().into_global_id_inner(), did_568().into_global_id_inner(), did_569().into_global_id_inner(), did_570().into_global_id_inner(), did_571().into_global_id_inner(), did_572().into_global_id_inner(), did_573().into_global_id_inner(), did_574().into_global_id_inner(), did_575().into_global_id_inner(), did_576().into_global_id_inner(), did_577().into_global_id_inner(), did_578().into_global_id_inner(), did_579().into_global_id_inner(), did_580().into_global_id_inner(), did_581().into_global_id_inner(), did_582().into_global_id_inner(), did_583().into_global_id_inner(), did_584().into_global_id_inner(), did_585().into_global_id_inner(), did_586().into_global_id_inner(), did_587().into_global_id_inner(), did_588().into_global_id_inner(), did_589().into_global_id_inner(), did_590().into_global_id_inner(), did_591().into_global_id_inner(), did_592().into_global_id_inner(), did_593().into_global_id_inner(), did_594().into_global_id_inner(), did_595().into_global_id_inner(), did_596().into_global_id_inner(), did_597().into_global_id_inner(), did_598().into_global_id_inner(), did_599().into_global_id_inner(), did_600().into_global_id_inner(), did_601().into_global_id_inner(), did_602().into_global_id_inner(), did_603().into_global_id_inner(), did_604().into_global_id_inner(), did_605().into_global_id_inner(), did_606().into_global_id_inner(), did_607().into_global_id_inner(), did_608().into_global_id_inner(), did_609().into_global_id_inner(), did_610().into_global_id_inner(), did_611().into_global_id_inner(), did_612().into_global_id_inner(), did_613().into_global_id_inner(), did_614().into_global_id_inner(), did_615().into_global_id_inner(), did_616().into_global_id_inner(), did_617().into_global_id_inner(), did_618().into_global_id_inner(), did_619().into_global_id_inner(), did_620().into_global_id_inner(), did_621().into_global_id_inner(), did_622().into_global_id_inner(), did_623().into_global_id_inner(), did_624().into_global_id_inner(), did_625().into_global_id_inner(), did_626().into_global_id_inner(), did_627().into_global_id_inner(), did_628().into_global_id_inner(), did_629().into_global_id_inner(), did_630().into_global_id_inner(), did_631().into_global_id_inner(), did_632().into_global_id_inner(), did_633().into_global_id_inner(), did_634().into_global_id_inner(), did_635().into_global_id_inner(), did_636().into_global_id_inner(), did_637().into_global_id_inner(), did_638().into_global_id_inner(), did_639().into_global_id_inner(), did_640().into_global_id_inner(), did_641().into_global_id_inner(), did_642().into_global_id_inner(), did_643().into_global_id_inner(), did_644().into_global_id_inner(), did_645().into_global_id_inner(), did_646().into_global_id_inner(), did_647().into_global_id_inner(), did_648().into_global_id_inner(), did_649().into_global_id_inner(), did_650().into_global_id_inner(), did_651().into_global_id_inner(), did_652().into_global_id_inner(), did_653().into_global_id_inner(), did_654().into_global_id_inner(), did_655().into_global_id_inner(), did_656().into_global_id_inner(), did_657().into_global_id_inner(), did_658().into_global_id_inner(), did_659().into_global_id_inner(), ] }) }; static INTERNED_GLOBAL_IDS: [crate::interning::Interned< crate::ast::identifiers::global_id::GlobalIdInner, >; 660] = TABLE_AND_INTERNED_GLOBAL_IDS.1; impl crate::interning::Internable for crate::ast::identifiers::global_id::GlobalIdInner { fn interning_table() -> &'static std::sync::Mutex> { &TABLE_AND_INTERNED_GLOBAL_IDS.0 } } use super::root; pub mod alloc { #![doc = r##"This is the module [`::alloc`]."##] use super::root; pub mod alloc { #![doc = r##"This is the module [`::alloc::alloc`]."##] use super::root; #[doc = r##"This is the struct [`::alloc::alloc::Global`]."##] pub const Global: crate::ast::identifiers::global_id::GlobalId = crate::ast::identifiers::global_id::GlobalId(root::INTERNED_GLOBAL_IDS[4]); #[doc = r##"This is an impl block."##] pub const Impl__1: crate::ast::identifiers::global_id::GlobalId = crate::ast::identifiers::global_id::GlobalId(root::INTERNED_GLOBAL_IDS[15]); #[doc = r##"This is an impl block."##] pub const Impl__3: crate::ast::identifiers::global_id::GlobalId = crate::ast::identifiers::global_id::GlobalId(root::INTERNED_GLOBAL_IDS[16]); } pub mod boxed { #![doc = r##"This is the module [`::alloc::boxed`]."##] use super::root; pub mod Impl { #![doc = r##"This is an impl block."##] use super::root; #[doc = r##"This is the associated function [`::alloc::boxed::Impl::new`]."##] pub const new: crate::ast::identifiers::global_id::GlobalId = crate::ast::identifiers::global_id::GlobalId(root::INTERNED_GLOBAL_IDS[180]); } #[doc = r##"This is the struct [`::alloc::boxed::Box`]."##] pub const Box: crate::ast::identifiers::global_id::GlobalId = crate::ast::identifiers::global_id::GlobalId(root::INTERNED_GLOBAL_IDS[34]); #[doc = r##"This is an impl block."##] pub const Impl: crate::ast::identifiers::global_id::GlobalId = crate::ast::identifiers::global_id::GlobalId(root::INTERNED_GLOBAL_IDS[179]); } pub mod slice { #![doc = r##"This is the module [`::alloc::slice`]."##] use super::root; pub mod Impl { #![doc = r##"This is an impl block."##] use super::root; #[doc = r##"This is the associated function [`::alloc::slice::Impl::concat`]."##] pub const concat: crate::ast::identifiers::global_id::GlobalId = crate::ast::identifiers::global_id::GlobalId(root::INTERNED_GLOBAL_IDS[44]); #[doc = r##"This is the associated function [`::alloc::slice::Impl::into_vec`]."##] pub const into_vec: crate::ast::identifiers::global_id::GlobalId = crate::ast::identifiers::global_id::GlobalId(root::INTERNED_GLOBAL_IDS[38]); #[doc = r##"This is the associated function [`::alloc::slice::Impl::to_vec`]."##] pub const to_vec: crate::ast::identifiers::global_id::GlobalId = crate::ast::identifiers::global_id::GlobalId(root::INTERNED_GLOBAL_IDS[45]); } #[doc = r##"This is the trait [`::alloc::slice::Concat`]."##] pub const Concat: crate::ast::identifiers::global_id::GlobalId = crate::ast::identifiers::global_id::GlobalId(root::INTERNED_GLOBAL_IDS[39]); #[doc = r##"This is an impl block."##] pub const Impl: crate::ast::identifiers::global_id::GlobalId = crate::ast::identifiers::global_id::GlobalId(root::INTERNED_GLOBAL_IDS[37]); #[doc = r##"This is an impl block."##] pub const Impl__2: crate::ast::identifiers::global_id::GlobalId = crate::ast::identifiers::global_id::GlobalId(root::INTERNED_GLOBAL_IDS[43]); } pub mod string { #![doc = r##"This is the module [`::alloc::string`]."##] use super::root; #[doc = r##"This is the struct [`::alloc::string::String`]."##] pub const String: crate::ast::identifiers::global_id::GlobalId = crate::ast::identifiers::global_id::GlobalId(root::INTERNED_GLOBAL_IDS[182]); } pub mod vec { #![doc = r##"This is the module [`::alloc::vec`]."##] use super::root; pub mod Impl__1 { #![doc = r##"This is an impl block."##] use super::root; #[doc = r##"This is the associated function [`::alloc::vec::Impl__1::as_slice`]."##] pub const as_slice: crate::ast::identifiers::global_id::GlobalId = crate::ast::identifiers::global_id::GlobalId(root::INTERNED_GLOBAL_IDS[620]); #[doc = r##"This is the associated function [`::alloc::vec::Impl__1::truncate`]."##] pub const truncate: crate::ast::identifiers::global_id::GlobalId = crate::ast::identifiers::global_id::GlobalId(root::INTERNED_GLOBAL_IDS[29]); } pub mod Impl__2 { #![doc = r##"This is an impl block."##] use super::root; #[doc = r##"This is the associated function [`::alloc::vec::Impl__2::extend_from_slice`]."##] pub const extend_from_slice: crate::ast::identifiers::global_id::GlobalId = crate::ast::identifiers::global_id::GlobalId(root::INTERNED_GLOBAL_IDS[31]); } #[doc = r##"This is an impl block."##] pub const Impl__1: crate::ast::identifiers::global_id::GlobalId = crate::ast::identifiers::global_id::GlobalId(root::INTERNED_GLOBAL_IDS[28]); #[doc = r##"This is an impl block."##] pub const Impl__11: crate::ast::identifiers::global_id::GlobalId = crate::ast::identifiers::global_id::GlobalId(root::INTERNED_GLOBAL_IDS[17]); #[doc = r##"This is an impl block."##] pub const Impl__13: crate::ast::identifiers::global_id::GlobalId = crate::ast::identifiers::global_id::GlobalId(root::INTERNED_GLOBAL_IDS[59]); #[doc = r##"This is an impl block."##] pub const Impl__2: crate::ast::identifiers::global_id::GlobalId = crate::ast::identifiers::global_id::GlobalId(root::INTERNED_GLOBAL_IDS[30]); #[doc = r##"This is an impl block."##] pub const Impl__8: crate::ast::identifiers::global_id::GlobalId = crate::ast::identifiers::global_id::GlobalId(root::INTERNED_GLOBAL_IDS[26]); #[doc = r##"This is the struct [`::alloc::vec::Vec`]."##] pub const Vec: crate::ast::identifiers::global_id::GlobalId = crate::ast::identifiers::global_id::GlobalId(root::INTERNED_GLOBAL_IDS[6]); #[doc = r##"This is the function [`::alloc::vec::from_elem`]."##] pub const from_elem: crate::ast::identifiers::global_id::GlobalId = crate::ast::identifiers::global_id::GlobalId(root::INTERNED_GLOBAL_IDS[91]); } #[doc = r##"This is the module [`::alloc::alloc`]."##] pub const alloc: crate::ast::identifiers::global_id::GlobalId = crate::ast::identifiers::global_id::GlobalId(root::INTERNED_GLOBAL_IDS[3]); #[doc = r##"This is the module [`::alloc::boxed`]."##] pub const boxed: crate::ast::identifiers::global_id::GlobalId = crate::ast::identifiers::global_id::GlobalId(root::INTERNED_GLOBAL_IDS[33]); #[doc = r##"This is the module [`::alloc::slice`]."##] pub const slice: crate::ast::identifiers::global_id::GlobalId = crate::ast::identifiers::global_id::GlobalId(root::INTERNED_GLOBAL_IDS[36]); #[doc = r##"This is the module [`::alloc::string`]."##] pub const string: crate::ast::identifiers::global_id::GlobalId = crate::ast::identifiers::global_id::GlobalId(root::INTERNED_GLOBAL_IDS[181]); #[doc = r##"This is the module [`::alloc::vec`]."##] pub const vec: crate::ast::identifiers::global_id::GlobalId = crate::ast::identifiers::global_id::GlobalId(root::INTERNED_GLOBAL_IDS[5]); } pub mod core { #![doc = r##"This is the module [`::core`]."##] use super::root; pub mod alloc { #![doc = r##"This is the module [`::core::alloc`]."##] use super::root; #[doc = r##"This is the trait [`::core::alloc::Allocator`]."##] pub const Allocator: crate::ast::identifiers::global_id::GlobalId = crate::ast::identifiers::global_id::GlobalId(root::INTERNED_GLOBAL_IDS[14]); } pub mod array { #![doc = r##"This is the module [`::core::array`]."##] use super::root; pub mod iter { #![doc = r##"This is the module [`::core::array::iter`]."##] use super::root; #[doc = r##"This is an impl block."##] pub const Impl__1: crate::ast::identifiers::global_id::GlobalId = crate::ast::identifiers::global_id::GlobalId(root::INTERNED_GLOBAL_IDS[144]); #[doc = r##"This is the struct [`::core::array::iter::IntoIter`]."##] pub const IntoIter: crate::ast::identifiers::global_id::GlobalId = crate::ast::identifiers::global_id::GlobalId(root::INTERNED_GLOBAL_IDS[138]); } #[doc = r##"This is the module [`::core::array::iter`]."##] pub const iter: crate::ast::identifiers::global_id::GlobalId = crate::ast::identifiers::global_id::GlobalId(root::INTERNED_GLOBAL_IDS[137]); } pub mod borrow { #![doc = r##"This is the module [`::core::borrow`]."##] use super::root; #[doc = r##"This is the trait [`::core::borrow::Borrow`]."##] pub const Borrow: crate::ast::identifiers::global_id::GlobalId = crate::ast::identifiers::global_id::GlobalId(root::INTERNED_GLOBAL_IDS[41]); #[doc = r##"This is an impl block."##] pub const Impl__2: crate::ast::identifiers::global_id::GlobalId = crate::ast::identifiers::global_id::GlobalId(root::INTERNED_GLOBAL_IDS[42]); } pub mod clone { #![doc = r##"This is the module [`::core::clone`]."##] use super::root; pub mod Clone { #![doc = r##"This is the trait [`::core::clone::Clone`]."##] use super::root; #[doc = r##"This is the associated function [`::core::clone::Clone::clone`]."##] pub const clone: crate::ast::identifiers::global_id::GlobalId = crate::ast::identifiers::global_id::GlobalId(root::INTERNED_GLOBAL_IDS[10]); } pub mod impls { #![doc = r##"This is the module [`::core::clone::impls`]."##] use super::root; #[doc = r##"This is an impl block."##] pub const Impl__6: crate::ast::identifiers::global_id::GlobalId = crate::ast::identifiers::global_id::GlobalId(root::INTERNED_GLOBAL_IDS[12]); } #[doc = r##"This is the trait [`::core::clone::Clone`]."##] pub const Clone: crate::ast::identifiers::global_id::GlobalId = crate::ast::identifiers::global_id::GlobalId(root::INTERNED_GLOBAL_IDS[9]); #[doc = r##"This is the module [`::core::clone::impls`]."##] pub const impls: crate::ast::identifiers::global_id::GlobalId = crate::ast::identifiers::global_id::GlobalId(root::INTERNED_GLOBAL_IDS[11]); } pub mod cmp { #![doc = r##"This is the module [`::core::cmp`]."##] use super::root; pub mod PartialEq { #![doc = r##"This is the trait [`::core::cmp::PartialEq`]."##] use super::root; #[doc = r##"This is the associated function [`::core::cmp::PartialEq::eq`]."##] pub const eq: crate::ast::identifiers::global_id::GlobalId = crate::ast::identifiers::global_id::GlobalId(root::INTERNED_GLOBAL_IDS[157]); #[doc = r##"This is the associated function [`::core::cmp::PartialEq::ne`]."##] pub const ne: crate::ast::identifiers::global_id::GlobalId = crate::ast::identifiers::global_id::GlobalId(root::INTERNED_GLOBAL_IDS[273]); } pub mod PartialOrd { #![doc = r##"This is the trait [`::core::cmp::PartialOrd`]."##] use super::root; #[doc = r##"This is the associated function [`::core::cmp::PartialOrd::ge`]."##] pub const ge: crate::ast::identifiers::global_id::GlobalId = crate::ast::identifiers::global_id::GlobalId(root::INTERNED_GLOBAL_IDS[272]); #[doc = r##"This is the associated function [`::core::cmp::PartialOrd::gt`]."##] pub const gt: crate::ast::identifiers::global_id::GlobalId = crate::ast::identifiers::global_id::GlobalId(root::INTERNED_GLOBAL_IDS[269]); #[doc = r##"This is the associated function [`::core::cmp::PartialOrd::le`]."##] pub const le: crate::ast::identifiers::global_id::GlobalId = crate::ast::identifiers::global_id::GlobalId(root::INTERNED_GLOBAL_IDS[271]); #[doc = r##"This is the associated function [`::core::cmp::PartialOrd::lt`]."##] pub const lt: crate::ast::identifiers::global_id::GlobalId = crate::ast::identifiers::global_id::GlobalId(root::INTERNED_GLOBAL_IDS[268]); } #[doc = r##"This is the trait [`::core::cmp::Eq`]."##] pub const Eq: crate::ast::identifiers::global_id::GlobalId = crate::ast::identifiers::global_id::GlobalId(root::INTERNED_GLOBAL_IDS[608]); #[doc = r##"This is the trait [`::core::cmp::Ord`]."##] pub const Ord: crate::ast::identifiers::global_id::GlobalId = crate::ast::identifiers::global_id::GlobalId(root::INTERNED_GLOBAL_IDS[609]); #[doc = r##"This is the trait [`::core::cmp::PartialEq`]."##] pub const PartialEq: crate::ast::identifiers::global_id::GlobalId = crate::ast::identifiers::global_id::GlobalId(root::INTERNED_GLOBAL_IDS[156]); #[doc = r##"This is the trait [`::core::cmp::PartialOrd`]."##] pub const PartialOrd: crate::ast::identifiers::global_id::GlobalId = crate::ast::identifiers::global_id::GlobalId(root::INTERNED_GLOBAL_IDS[267]); } pub mod convert { #![doc = r##"This is the module [`::core::convert`]."##] use super::root; pub mod From { #![doc = r##"This is the trait [`::core::convert::From`]."##] use super::root; #[doc = r##"This is the associated function [`::core::convert::From::from`]."##] pub const from: crate::ast::identifiers::global_id::GlobalId = crate::ast::identifiers::global_id::GlobalId(root::INTERNED_GLOBAL_IDS[146]); } pub mod Into { #![doc = r##"This is the trait [`::core::convert::Into`]."##] use super::root; #[doc = r##"This is the associated function [`::core::convert::Into::into`]."##] pub const into: crate::ast::identifiers::global_id::GlobalId = crate::ast::identifiers::global_id::GlobalId(root::INTERNED_GLOBAL_IDS[131]); } pub mod num { #![doc = r##"This is the module [`::core::convert::num`]."##] use super::root; #[doc = r##"This is an impl block."##] pub const Impl__64: crate::ast::identifiers::global_id::GlobalId = crate::ast::identifiers::global_id::GlobalId(root::INTERNED_GLOBAL_IDS[134]); #[doc = r##"This is an impl block."##] pub const Impl__88: crate::ast::identifiers::global_id::GlobalId = crate::ast::identifiers::global_id::GlobalId(root::INTERNED_GLOBAL_IDS[187]); } #[doc = r##"This is the trait [`::core::convert::From`]."##] pub const From: crate::ast::identifiers::global_id::GlobalId = crate::ast::identifiers::global_id::GlobalId(root::INTERNED_GLOBAL_IDS[132]); #[doc = r##"This is an impl block."##] pub const Impl__3: crate::ast::identifiers::global_id::GlobalId = crate::ast::identifiers::global_id::GlobalId(root::INTERNED_GLOBAL_IDS[135]); #[doc = r##"This is an impl block."##] pub const Impl__4: crate::ast::identifiers::global_id::GlobalId = crate::ast::identifiers::global_id::GlobalId(root::INTERNED_GLOBAL_IDS[199]); #[doc = r##"This is the enum [`::core::convert::Infallible`]."##] pub const Infallible: crate::ast::identifiers::global_id::GlobalId = crate::ast::identifiers::global_id::GlobalId(root::INTERNED_GLOBAL_IDS[183]); #[doc = r##"This is the trait [`::core::convert::Into`]."##] pub const Into: crate::ast::identifiers::global_id::GlobalId = crate::ast::identifiers::global_id::GlobalId(root::INTERNED_GLOBAL_IDS[130]); #[doc = r##"This is the module [`::core::convert::num`]."##] pub const num: crate::ast::identifiers::global_id::GlobalId = crate::ast::identifiers::global_id::GlobalId(root::INTERNED_GLOBAL_IDS[133]); } pub mod fmt { #![doc = r##"This is the module [`::core::fmt`]."##] use super::root; pub mod num { #![doc = r##"This is the module [`::core::fmt::num`]."##] use super::root; #[doc = r##"This is an impl block."##] pub const Impl__54: crate::ast::identifiers::global_id::GlobalId = crate::ast::identifiers::global_id::GlobalId(root::INTERNED_GLOBAL_IDS[606]); } #[doc = r##"This is the struct [`::core::fmt::Arguments`]."##] pub const Arguments: crate::ast::identifiers::global_id::GlobalId = crate::ast::identifiers::global_id::GlobalId(root::INTERNED_GLOBAL_IDS[166]); #[doc = r##"This is the trait [`::core::fmt::Debug`]."##] pub const Debug: crate::ast::identifiers::global_id::GlobalId = crate::ast::identifiers::global_id::GlobalId(root::INTERNED_GLOBAL_IDS[167]); #[doc = r##"This is the module [`::core::fmt::num`]."##] pub const num: crate::ast::identifiers::global_id::GlobalId = crate::ast::identifiers::global_id::GlobalId(root::INTERNED_GLOBAL_IDS[168]); } pub mod iter { #![doc = r##"This is the module [`::core::iter`]."##] use super::root; pub mod adapters { #![doc = r##"This is the module [`::core::iter::adapters`]."##] use super::root; pub mod enumerate { #![doc = r##"This is the module [`::core::iter::adapters::enumerate`]."##] use super::root; #[doc = r##"This is the struct [`::core::iter::adapters::enumerate::Enumerate`]."##] pub const Enumerate: crate::ast::identifiers::global_id::GlobalId = crate::ast::identifiers::global_id::GlobalId(root::INTERNED_GLOBAL_IDS[232]); } pub mod step_by { #![doc = r##"This is the module [`::core::iter::adapters::step_by`]."##] use super::root; #[doc = r##"This is the struct [`::core::iter::adapters::step_by::StepBy`]."##] pub const StepBy: crate::ast::identifiers::global_id::GlobalId = crate::ast::identifiers::global_id::GlobalId(root::INTERNED_GLOBAL_IDS[235]); } #[doc = r##"This is the module [`::core::iter::adapters::enumerate`]."##] pub const enumerate: crate::ast::identifiers::global_id::GlobalId = crate::ast::identifiers::global_id::GlobalId(root::INTERNED_GLOBAL_IDS[231]); #[doc = r##"This is the module [`::core::iter::adapters::step_by`]."##] pub const step_by: crate::ast::identifiers::global_id::GlobalId = crate::ast::identifiers::global_id::GlobalId(root::INTERNED_GLOBAL_IDS[234]); } pub mod traits { #![doc = r##"This is the module [`::core::iter::traits`]."##] use super::root; pub mod collect { #![doc = r##"This is the module [`::core::iter::traits::collect`]."##] use super::root; pub mod IntoIterator { #![doc = r##"This is the trait [`::core::iter::traits::collect::IntoIterator`]."##] use super::root; #[doc = r##"This is the associated function [`::core::iter::traits::collect::IntoIterator::into_iter`]."##] pub const into_iter: crate::ast::identifiers::global_id::GlobalId = crate::ast::identifiers::global_id::GlobalId( root::INTERNED_GLOBAL_IDS[143], ); } #[doc = r##"This is the trait [`::core::iter::traits::collect::IntoIterator`]."##] pub const IntoIterator: crate::ast::identifiers::global_id::GlobalId = crate::ast::identifiers::global_id::GlobalId(root::INTERNED_GLOBAL_IDS[142]); } pub mod iterator { #![doc = r##"This is the module [`::core::iter::traits::iterator`]."##] use super::root; pub mod Iterator { #![doc = r##"This is the trait [`::core::iter::traits::iterator::Iterator`]."##] use super::root; #[doc = r##"This is the associated type [`::core::iter::traits::iterator::Iterator::Item`]."##] pub const Item: crate::ast::identifiers::global_id::GlobalId = crate::ast::identifiers::global_id::GlobalId( root::INTERNED_GLOBAL_IDS[589], ); #[doc = r##"This is the associated function [`::core::iter::traits::iterator::Iterator::enumerate`]."##] pub const enumerate: crate::ast::identifiers::global_id::GlobalId = crate::ast::identifiers::global_id::GlobalId( root::INTERNED_GLOBAL_IDS[233], ); #[doc = r##"This is the associated function [`::core::iter::traits::iterator::Iterator::fold`]."##] pub const fold: crate::ast::identifiers::global_id::GlobalId = crate::ast::identifiers::global_id::GlobalId( root::INTERNED_GLOBAL_IDS[197], ); #[doc = r##"This is the associated function [`::core::iter::traits::iterator::Iterator::next`]."##] pub const next: crate::ast::identifiers::global_id::GlobalId = crate::ast::identifiers::global_id::GlobalId( root::INTERNED_GLOBAL_IDS[192], ); #[doc = r##"This is the associated function [`::core::iter::traits::iterator::Iterator::step_by`]."##] pub const step_by: crate::ast::identifiers::global_id::GlobalId = crate::ast::identifiers::global_id::GlobalId( root::INTERNED_GLOBAL_IDS[236], ); } #[doc = r##"This is the trait [`::core::iter::traits::iterator::Iterator`]."##] pub const Iterator: crate::ast::identifiers::global_id::GlobalId = crate::ast::identifiers::global_id::GlobalId(root::INTERNED_GLOBAL_IDS[191]); } #[doc = r##"This is the module [`::core::iter::traits::collect`]."##] pub const collect: crate::ast::identifiers::global_id::GlobalId = crate::ast::identifiers::global_id::GlobalId(root::INTERNED_GLOBAL_IDS[141]); #[doc = r##"This is the module [`::core::iter::traits::iterator`]."##] pub const iterator: crate::ast::identifiers::global_id::GlobalId = crate::ast::identifiers::global_id::GlobalId(root::INTERNED_GLOBAL_IDS[190]); } #[doc = r##"This is the module [`::core::iter::adapters`]."##] pub const adapters: crate::ast::identifiers::global_id::GlobalId = crate::ast::identifiers::global_id::GlobalId(root::INTERNED_GLOBAL_IDS[230]); #[doc = r##"This is the module [`::core::iter::traits`]."##] pub const traits: crate::ast::identifiers::global_id::GlobalId = crate::ast::identifiers::global_id::GlobalId(root::INTERNED_GLOBAL_IDS[140]); } pub mod marker { #![doc = r##"This is the module [`::core::marker`]."##] use super::root; #[doc = r##"This is the trait [`::core::marker::Copy`]."##] pub const Copy: crate::ast::identifiers::global_id::GlobalId = crate::ast::identifiers::global_id::GlobalId(root::INTERNED_GLOBAL_IDS[275]); #[doc = r##"This is the trait [`::core::marker::Destruct`]."##] pub const Destruct: crate::ast::identifiers::global_id::GlobalId = crate::ast::identifiers::global_id::GlobalId(root::INTERNED_GLOBAL_IDS[607]); #[doc = r##"This is the trait [`::core::marker::StructuralPartialEq`]."##] pub const StructuralPartialEq: crate::ast::identifiers::global_id::GlobalId = crate::ast::identifiers::global_id::GlobalId(root::INTERNED_GLOBAL_IDS[610]); } pub mod num { #![doc = r##"This is the module [`::core::num`]."##] use super::root; pub mod Impl__9 { #![doc = r##"This is an impl block."##] use super::root; #[doc = r##"This is the associated function [`::core::num::Impl__9::to_le_bytes`]."##] pub const to_le_bytes: crate::ast::identifiers::global_id::GlobalId = crate::ast::identifiers::global_id::GlobalId(root::INTERNED_GLOBAL_IDS[62]); } #[doc = r##"This is an impl block."##] pub const Impl__9: crate::ast::identifiers::global_id::GlobalId = crate::ast::identifiers::global_id::GlobalId(root::INTERNED_GLOBAL_IDS[61]); } pub mod ops { #![doc = r##"This is the module [`::core::ops`]."##] use super::root; pub mod arith { #![doc = r##"This is the module [`::core::ops::arith`]."##] use super::root; pub mod Add { #![doc = r##"This is the trait [`::core::ops::arith::Add`]."##] use super::root; #[doc = r##"This is the associated type [`::core::ops::arith::Add::Output`]."##] pub const Output: crate::ast::identifiers::global_id::GlobalId = crate::ast::identifiers::global_id::GlobalId(root::INTERNED_GLOBAL_IDS[590]); #[doc = r##"This is the associated function [`::core::ops::arith::Add::add`]."##] pub const add: crate::ast::identifiers::global_id::GlobalId = crate::ast::identifiers::global_id::GlobalId(root::INTERNED_GLOBAL_IDS[195]); } pub mod Div { #![doc = r##"This is the trait [`::core::ops::arith::Div`]."##] use super::root; #[doc = r##"This is the associated type [`::core::ops::arith::Div::Output`]."##] pub const Output: crate::ast::identifiers::global_id::GlobalId = crate::ast::identifiers::global_id::GlobalId(root::INTERNED_GLOBAL_IDS[593]); #[doc = r##"This is the associated function [`::core::ops::arith::Div::div`]."##] pub const div: crate::ast::identifiers::global_id::GlobalId = crate::ast::identifiers::global_id::GlobalId(root::INTERNED_GLOBAL_IDS[252]); } pub mod Mul { #![doc = r##"This is the trait [`::core::ops::arith::Mul`]."##] use super::root; #[doc = r##"This is the associated type [`::core::ops::arith::Mul::Output`]."##] pub const Output: crate::ast::identifiers::global_id::GlobalId = crate::ast::identifiers::global_id::GlobalId(root::INTERNED_GLOBAL_IDS[592]); #[doc = r##"This is the associated function [`::core::ops::arith::Mul::mul`]."##] pub const mul: crate::ast::identifiers::global_id::GlobalId = crate::ast::identifiers::global_id::GlobalId(root::INTERNED_GLOBAL_IDS[254]); } pub mod Neg { #![doc = r##"This is the trait [`::core::ops::arith::Neg`]."##] use super::root; #[doc = r##"This is the associated type [`::core::ops::arith::Neg::Output`]."##] pub const Output: crate::ast::identifiers::global_id::GlobalId = crate::ast::identifiers::global_id::GlobalId(root::INTERNED_GLOBAL_IDS[600]); #[doc = r##"This is the associated function [`::core::ops::arith::Neg::neg`]."##] pub const neg: crate::ast::identifiers::global_id::GlobalId = crate::ast::identifiers::global_id::GlobalId(root::INTERNED_GLOBAL_IDS[258]); } pub mod Rem { #![doc = r##"This is the trait [`::core::ops::arith::Rem`]."##] use super::root; #[doc = r##"This is the associated type [`::core::ops::arith::Rem::Output`]."##] pub const Output: crate::ast::identifiers::global_id::GlobalId = crate::ast::identifiers::global_id::GlobalId(root::INTERNED_GLOBAL_IDS[594]); #[doc = r##"This is the associated function [`::core::ops::arith::Rem::rem`]."##] pub const rem: crate::ast::identifiers::global_id::GlobalId = crate::ast::identifiers::global_id::GlobalId(root::INTERNED_GLOBAL_IDS[260]); } pub mod Sub { #![doc = r##"This is the trait [`::core::ops::arith::Sub`]."##] use super::root; #[doc = r##"This is the associated type [`::core::ops::arith::Sub::Output`]."##] pub const Output: crate::ast::identifiers::global_id::GlobalId = crate::ast::identifiers::global_id::GlobalId(root::INTERNED_GLOBAL_IDS[591]); #[doc = r##"This is the associated function [`::core::ops::arith::Sub::sub`]."##] pub const sub: crate::ast::identifiers::global_id::GlobalId = crate::ast::identifiers::global_id::GlobalId(root::INTERNED_GLOBAL_IDS[256]); } #[doc = r##"This is the trait [`::core::ops::arith::Add`]."##] pub const Add: crate::ast::identifiers::global_id::GlobalId = crate::ast::identifiers::global_id::GlobalId(root::INTERNED_GLOBAL_IDS[194]); #[doc = r##"This is the trait [`::core::ops::arith::Div`]."##] pub const Div: crate::ast::identifiers::global_id::GlobalId = crate::ast::identifiers::global_id::GlobalId(root::INTERNED_GLOBAL_IDS[251]); #[doc = r##"This is the trait [`::core::ops::arith::Mul`]."##] pub const Mul: crate::ast::identifiers::global_id::GlobalId = crate::ast::identifiers::global_id::GlobalId(root::INTERNED_GLOBAL_IDS[253]); #[doc = r##"This is the trait [`::core::ops::arith::Neg`]."##] pub const Neg: crate::ast::identifiers::global_id::GlobalId = crate::ast::identifiers::global_id::GlobalId(root::INTERNED_GLOBAL_IDS[257]); #[doc = r##"This is the trait [`::core::ops::arith::Rem`]."##] pub const Rem: crate::ast::identifiers::global_id::GlobalId = crate::ast::identifiers::global_id::GlobalId(root::INTERNED_GLOBAL_IDS[259]); #[doc = r##"This is the trait [`::core::ops::arith::Sub`]."##] pub const Sub: crate::ast::identifiers::global_id::GlobalId = crate::ast::identifiers::global_id::GlobalId(root::INTERNED_GLOBAL_IDS[255]); } pub mod bit { #![doc = r##"This is the module [`::core::ops::bit`]."##] use super::root; pub mod BitAnd { #![doc = r##"This is the trait [`::core::ops::bit::BitAnd`]."##] use super::root; #[doc = r##"This is the associated type [`::core::ops::bit::BitAnd::Output`]."##] pub const Output: crate::ast::identifiers::global_id::GlobalId = crate::ast::identifiers::global_id::GlobalId(root::INTERNED_GLOBAL_IDS[596]); #[doc = r##"This is the associated function [`::core::ops::bit::BitAnd::bitand`]."##] pub const bitand: crate::ast::identifiers::global_id::GlobalId = crate::ast::identifiers::global_id::GlobalId(root::INTERNED_GLOBAL_IDS[248]); } pub mod BitOr { #![doc = r##"This is the trait [`::core::ops::bit::BitOr`]."##] use super::root; #[doc = r##"This is the associated type [`::core::ops::bit::BitOr::Output`]."##] pub const Output: crate::ast::identifiers::global_id::GlobalId = crate::ast::identifiers::global_id::GlobalId(root::INTERNED_GLOBAL_IDS[597]); #[doc = r##"This is the associated function [`::core::ops::bit::BitOr::bitor`]."##] pub const bitor: crate::ast::identifiers::global_id::GlobalId = crate::ast::identifiers::global_id::GlobalId(root::INTERNED_GLOBAL_IDS[266]); } pub mod BitXor { #![doc = r##"This is the trait [`::core::ops::bit::BitXor`]."##] use super::root; #[doc = r##"This is the associated type [`::core::ops::bit::BitXor::Output`]."##] pub const Output: crate::ast::identifiers::global_id::GlobalId = crate::ast::identifiers::global_id::GlobalId(root::INTERNED_GLOBAL_IDS[595]); #[doc = r##"This is the associated function [`::core::ops::bit::BitXor::bitxor`]."##] pub const bitxor: crate::ast::identifiers::global_id::GlobalId = crate::ast::identifiers::global_id::GlobalId(root::INTERNED_GLOBAL_IDS[250]); } pub mod Not { #![doc = r##"This is the trait [`::core::ops::bit::Not`]."##] use super::root; #[doc = r##"This is the associated type [`::core::ops::bit::Not::Output`]."##] pub const Output: crate::ast::identifiers::global_id::GlobalId = crate::ast::identifiers::global_id::GlobalId(root::INTERNED_GLOBAL_IDS[601]); #[doc = r##"This is the associated function [`::core::ops::bit::Not::not`]."##] pub const not: crate::ast::identifiers::global_id::GlobalId = crate::ast::identifiers::global_id::GlobalId(root::INTERNED_GLOBAL_IDS[160]); } pub mod Shl { #![doc = r##"This is the trait [`::core::ops::bit::Shl`]."##] use super::root; #[doc = r##"This is the associated type [`::core::ops::bit::Shl::Output`]."##] pub const Output: crate::ast::identifiers::global_id::GlobalId = crate::ast::identifiers::global_id::GlobalId(root::INTERNED_GLOBAL_IDS[598]); #[doc = r##"This is the associated function [`::core::ops::bit::Shl::shl`]."##] pub const shl: crate::ast::identifiers::global_id::GlobalId = crate::ast::identifiers::global_id::GlobalId(root::INTERNED_GLOBAL_IDS[262]); } pub mod Shr { #![doc = r##"This is the trait [`::core::ops::bit::Shr`]."##] use super::root; #[doc = r##"This is the associated type [`::core::ops::bit::Shr::Output`]."##] pub const Output: crate::ast::identifiers::global_id::GlobalId = crate::ast::identifiers::global_id::GlobalId(root::INTERNED_GLOBAL_IDS[599]); #[doc = r##"This is the associated function [`::core::ops::bit::Shr::shr`]."##] pub const shr: crate::ast::identifiers::global_id::GlobalId = crate::ast::identifiers::global_id::GlobalId(root::INTERNED_GLOBAL_IDS[264]); } #[doc = r##"This is the trait [`::core::ops::bit::BitAnd`]."##] pub const BitAnd: crate::ast::identifiers::global_id::GlobalId = crate::ast::identifiers::global_id::GlobalId(root::INTERNED_GLOBAL_IDS[247]); #[doc = r##"This is the trait [`::core::ops::bit::BitOr`]."##] pub const BitOr: crate::ast::identifiers::global_id::GlobalId = crate::ast::identifiers::global_id::GlobalId(root::INTERNED_GLOBAL_IDS[265]); #[doc = r##"This is the trait [`::core::ops::bit::BitXor`]."##] pub const BitXor: crate::ast::identifiers::global_id::GlobalId = crate::ast::identifiers::global_id::GlobalId(root::INTERNED_GLOBAL_IDS[249]); #[doc = r##"This is the trait [`::core::ops::bit::Not`]."##] pub const Not: crate::ast::identifiers::global_id::GlobalId = crate::ast::identifiers::global_id::GlobalId(root::INTERNED_GLOBAL_IDS[159]); #[doc = r##"This is the trait [`::core::ops::bit::Shl`]."##] pub const Shl: crate::ast::identifiers::global_id::GlobalId = crate::ast::identifiers::global_id::GlobalId(root::INTERNED_GLOBAL_IDS[261]); #[doc = r##"This is the trait [`::core::ops::bit::Shr`]."##] pub const Shr: crate::ast::identifiers::global_id::GlobalId = crate::ast::identifiers::global_id::GlobalId(root::INTERNED_GLOBAL_IDS[263]); } pub mod control_flow { #![doc = r##"This is the module [`::core::ops::control_flow`]."##] use super::root; pub mod ControlFlow { #![doc = r##"This is the enum [`::core::ops::control_flow::ControlFlow`]."##] use super::root; pub mod Break { use super::root; #[doc = r##"This is the variant [`::core::ops::control_flow::ControlFlow::Break::Constructor`]."##] pub const Constructor: crate::ast::identifiers::global_id::GlobalId = crate::ast::identifiers::global_id::GlobalId( root::INTERNED_GLOBAL_IDS[644], ); #[doc = r##"This is the field [`_0`] from ::core::ops::control_flow::ControlFlow::Break."##] pub const _0: crate::ast::identifiers::global_id::GlobalId = crate::ast::identifiers::global_id::GlobalId( root::INTERNED_GLOBAL_IDS[122], ); } pub mod Continue { use super::root; #[doc = r##"This is the variant [`::core::ops::control_flow::ControlFlow::Continue::Constructor`]."##] pub const Constructor: crate::ast::identifiers::global_id::GlobalId = crate::ast::identifiers::global_id::GlobalId( root::INTERNED_GLOBAL_IDS[645], ); #[doc = r##"This is the field [`_0`] from ::core::ops::control_flow::ControlFlow::Continue."##] pub const _0: crate::ast::identifiers::global_id::GlobalId = crate::ast::identifiers::global_id::GlobalId( root::INTERNED_GLOBAL_IDS[123], ); } } #[doc = r##"This is the enum [`::core::ops::control_flow::ControlFlow`]."##] pub const ControlFlow: crate::ast::identifiers::global_id::GlobalId = crate::ast::identifiers::global_id::GlobalId(root::INTERNED_GLOBAL_IDS[121]); } pub mod deref { #![doc = r##"This is the module [`::core::ops::deref`]."##] use super::root; pub mod Deref { #![doc = r##"This is the trait [`::core::ops::deref::Deref`]."##] use super::root; #[doc = r##"This is the associated type [`::core::ops::deref::Deref::Target`]."##] pub const Target: crate::ast::identifiers::global_id::GlobalId = crate::ast::identifiers::global_id::GlobalId(root::INTERNED_GLOBAL_IDS[278]); #[doc = r##"This is the associated function [`::core::ops::deref::Deref::deref`]."##] pub const deref: crate::ast::identifiers::global_id::GlobalId = crate::ast::identifiers::global_id::GlobalId(root::INTERNED_GLOBAL_IDS[25]); } pub mod DerefMut { #![doc = r##"This is the trait [`::core::ops::deref::DerefMut`]."##] use super::root; #[doc = r##"This is the associated function [`::core::ops::deref::DerefMut::deref_mut`]."##] pub const deref_mut: crate::ast::identifiers::global_id::GlobalId = crate::ast::identifiers::global_id::GlobalId(root::INTERNED_GLOBAL_IDS[280]); } #[doc = r##"This is the trait [`::core::ops::deref::Deref`]."##] pub const Deref: crate::ast::identifiers::global_id::GlobalId = crate::ast::identifiers::global_id::GlobalId(root::INTERNED_GLOBAL_IDS[24]); #[doc = r##"This is the trait [`::core::ops::deref::DerefMut`]."##] pub const DerefMut: crate::ast::identifiers::global_id::GlobalId = crate::ast::identifiers::global_id::GlobalId(root::INTERNED_GLOBAL_IDS[279]); } pub mod function { #![doc = r##"This is the module [`::core::ops::function`]."##] use super::root; #[doc = r##"This is the trait [`::core::ops::function::Fn`]."##] pub const Fn: crate::ast::identifiers::global_id::GlobalId = crate::ast::identifiers::global_id::GlobalId(root::INTERNED_GLOBAL_IDS[201]); #[doc = r##"This is the trait [`::core::ops::function::FnMut`]."##] pub const FnMut: crate::ast::identifiers::global_id::GlobalId = crate::ast::identifiers::global_id::GlobalId(root::INTERNED_GLOBAL_IDS[196]); #[doc = r##"This is the trait [`::core::ops::function::FnOnce`]."##] pub const FnOnce: crate::ast::identifiers::global_id::GlobalId = crate::ast::identifiers::global_id::GlobalId(root::INTERNED_GLOBAL_IDS[152]); } pub mod index { #![doc = r##"This is the module [`::core::ops::index`]."##] use super::root; pub mod Index { #![doc = r##"This is the trait [`::core::ops::index::Index`]."##] use super::root; #[doc = r##"This is the associated type [`::core::ops::index::Index::Output`]."##] pub const Output: crate::ast::identifiers::global_id::GlobalId = crate::ast::identifiers::global_id::GlobalId(root::INTERNED_GLOBAL_IDS[621]); #[doc = r##"This is the associated function [`::core::ops::index::Index::index`]."##] pub const index: crate::ast::identifiers::global_id::GlobalId = crate::ast::identifiers::global_id::GlobalId(root::INTERNED_GLOBAL_IDS[55]); } pub mod IndexMut { #![doc = r##"This is the trait [`::core::ops::index::IndexMut`]."##] use super::root; #[doc = r##"This is the associated function [`::core::ops::index::IndexMut::index_mut`]."##] pub const index_mut: crate::ast::identifiers::global_id::GlobalId = crate::ast::identifiers::global_id::GlobalId(root::INTERNED_GLOBAL_IDS[623]); } #[doc = r##"This is the trait [`::core::ops::index::Index`]."##] pub const Index: crate::ast::identifiers::global_id::GlobalId = crate::ast::identifiers::global_id::GlobalId(root::INTERNED_GLOBAL_IDS[54]); #[doc = r##"This is the trait [`::core::ops::index::IndexMut`]."##] pub const IndexMut: crate::ast::identifiers::global_id::GlobalId = crate::ast::identifiers::global_id::GlobalId(root::INTERNED_GLOBAL_IDS[622]); } pub mod range { #![doc = r##"This is the module [`::core::ops::range`]."##] use super::root; pub mod Range { #![doc = r##"This is the struct [`::core::ops::range::Range`]."##] use super::root; #[doc = r##"This is the struct [`::core::ops::range::Range::Constructor`]."##] pub const Constructor: crate::ast::identifiers::global_id::GlobalId = crate::ast::identifiers::global_id::GlobalId(root::INTERNED_GLOBAL_IDS[642]); #[doc = r##"This is the field [`end`] from ::core::ops::range::Range."##] pub const end: crate::ast::identifiers::global_id::GlobalId = crate::ast::identifiers::global_id::GlobalId(root::INTERNED_GLOBAL_IDS[52]); #[doc = r##"This is the field [`start`] from ::core::ops::range::Range."##] pub const start: crate::ast::identifiers::global_id::GlobalId = crate::ast::identifiers::global_id::GlobalId(root::INTERNED_GLOBAL_IDS[51]); } pub mod RangeFrom { #![doc = r##"This is the struct [`::core::ops::range::RangeFrom`]."##] use super::root; #[doc = r##"This is the struct [`::core::ops::range::RangeFrom::Constructor`]."##] pub const Constructor: crate::ast::identifiers::global_id::GlobalId = crate::ast::identifiers::global_id::GlobalId(root::INTERNED_GLOBAL_IDS[648]); #[doc = r##"This is the field [`start`] from ::core::ops::range::RangeFrom."##] pub const start: crate::ast::identifiers::global_id::GlobalId = crate::ast::identifiers::global_id::GlobalId(root::INTERNED_GLOBAL_IDS[128]); } pub mod RangeFull { #![doc = r##"This is the struct [`::core::ops::range::RangeFull`]."##] use super::root; #[doc = r##"This is the struct [`::core::ops::range::RangeFull::Constructor`]."##] pub const Constructor: crate::ast::identifiers::global_id::GlobalId = crate::ast::identifiers::global_id::GlobalId(root::INTERNED_GLOBAL_IDS[647]); } pub mod RangeTo { #![doc = r##"This is the struct [`::core::ops::range::RangeTo`]."##] use super::root; #[doc = r##"This is the struct [`::core::ops::range::RangeTo::Constructor`]."##] pub const Constructor: crate::ast::identifiers::global_id::GlobalId = crate::ast::identifiers::global_id::GlobalId(root::INTERNED_GLOBAL_IDS[646]); #[doc = r##"This is the field [`end`] from ::core::ops::range::RangeTo."##] pub const end: crate::ast::identifiers::global_id::GlobalId = crate::ast::identifiers::global_id::GlobalId(root::INTERNED_GLOBAL_IDS[125]); } #[doc = r##"This is the struct [`::core::ops::range::Range`]."##] pub const Range: crate::ast::identifiers::global_id::GlobalId = crate::ast::identifiers::global_id::GlobalId(root::INTERNED_GLOBAL_IDS[50]); #[doc = r##"This is the struct [`::core::ops::range::RangeFrom`]."##] pub const RangeFrom: crate::ast::identifiers::global_id::GlobalId = crate::ast::identifiers::global_id::GlobalId(root::INTERNED_GLOBAL_IDS[127]); #[doc = r##"This is the struct [`::core::ops::range::RangeFull`]."##] pub const RangeFull: crate::ast::identifiers::global_id::GlobalId = crate::ast::identifiers::global_id::GlobalId(root::INTERNED_GLOBAL_IDS[126]); #[doc = r##"This is the struct [`::core::ops::range::RangeTo`]."##] pub const RangeTo: crate::ast::identifiers::global_id::GlobalId = crate::ast::identifiers::global_id::GlobalId(root::INTERNED_GLOBAL_IDS[124]); } pub mod try_trait { #![doc = r##"This is the module [`::core::ops::try_trait`]."##] use super::root; pub mod FromResidual { #![doc = r##"This is the trait [`::core::ops::try_trait::FromResidual`]."##] use super::root; #[doc = r##"This is the associated function [`::core::ops::try_trait::FromResidual::from_residual`]."##] pub const from_residual: crate::ast::identifiers::global_id::GlobalId = crate::ast::identifiers::global_id::GlobalId(root::INTERNED_GLOBAL_IDS[186]); } pub mod Try { #![doc = r##"This is the trait [`::core::ops::try_trait::Try`]."##] use super::root; #[doc = r##"This is the associated type [`::core::ops::try_trait::Try::Output`]."##] pub const Output: crate::ast::identifiers::global_id::GlobalId = crate::ast::identifiers::global_id::GlobalId(root::INTERNED_GLOBAL_IDS[602]); #[doc = r##"This is the associated type [`::core::ops::try_trait::Try::Residual`]."##] pub const Residual: crate::ast::identifiers::global_id::GlobalId = crate::ast::identifiers::global_id::GlobalId(root::INTERNED_GLOBAL_IDS[276]); #[doc = r##"This is the associated function [`::core::ops::try_trait::Try::branch`]."##] pub const branch: crate::ast::identifiers::global_id::GlobalId = crate::ast::identifiers::global_id::GlobalId(root::INTERNED_GLOBAL_IDS[238]); #[doc = r##"This is the associated function [`::core::ops::try_trait::Try::from_output`]."##] pub const from_output: crate::ast::identifiers::global_id::GlobalId = crate::ast::identifiers::global_id::GlobalId(root::INTERNED_GLOBAL_IDS[277]); } #[doc = r##"This is the trait [`::core::ops::try_trait::FromResidual`]."##] pub const FromResidual: crate::ast::identifiers::global_id::GlobalId = crate::ast::identifiers::global_id::GlobalId(root::INTERNED_GLOBAL_IDS[185]); #[doc = r##"This is the trait [`::core::ops::try_trait::Try`]."##] pub const Try: crate::ast::identifiers::global_id::GlobalId = crate::ast::identifiers::global_id::GlobalId(root::INTERNED_GLOBAL_IDS[237]); } #[doc = r##"This is the module [`::core::ops::arith`]."##] pub const arith: crate::ast::identifiers::global_id::GlobalId = crate::ast::identifiers::global_id::GlobalId(root::INTERNED_GLOBAL_IDS[193]); #[doc = r##"This is the module [`::core::ops::bit`]."##] pub const bit: crate::ast::identifiers::global_id::GlobalId = crate::ast::identifiers::global_id::GlobalId(root::INTERNED_GLOBAL_IDS[158]); #[doc = r##"This is the module [`::core::ops::control_flow`]."##] pub const control_flow: crate::ast::identifiers::global_id::GlobalId = crate::ast::identifiers::global_id::GlobalId(root::INTERNED_GLOBAL_IDS[120]); #[doc = r##"This is the module [`::core::ops::deref`]."##] pub const deref: crate::ast::identifiers::global_id::GlobalId = crate::ast::identifiers::global_id::GlobalId(root::INTERNED_GLOBAL_IDS[23]); #[doc = r##"This is the module [`::core::ops::function`]."##] pub const function: crate::ast::identifiers::global_id::GlobalId = crate::ast::identifiers::global_id::GlobalId(root::INTERNED_GLOBAL_IDS[151]); #[doc = r##"This is the module [`::core::ops::index`]."##] pub const index: crate::ast::identifiers::global_id::GlobalId = crate::ast::identifiers::global_id::GlobalId(root::INTERNED_GLOBAL_IDS[53]); #[doc = r##"This is the module [`::core::ops::range`]."##] pub const range: crate::ast::identifiers::global_id::GlobalId = crate::ast::identifiers::global_id::GlobalId(root::INTERNED_GLOBAL_IDS[49]); #[doc = r##"This is the module [`::core::ops::try_trait`]."##] pub const try_trait: crate::ast::identifiers::global_id::GlobalId = crate::ast::identifiers::global_id::GlobalId(root::INTERNED_GLOBAL_IDS[184]); } pub mod option { #![doc = r##"This is the module [`::core::option`]."##] use super::root; pub mod Impl { #![doc = r##"This is an impl block."##] use super::root; #[doc = r##"This is the associated function [`::core::option::Impl::is_some`]."##] pub const is_some: crate::ast::identifiers::global_id::GlobalId = crate::ast::identifiers::global_id::GlobalId(root::INTERNED_GLOBAL_IDS[177]); } pub mod Option { #![doc = r##"This is the enum [`::core::option::Option`]."##] use super::root; pub mod None { use super::root; #[doc = r##"This is the variant [`::core::option::Option::None::Constructor`]."##] pub const Constructor: crate::ast::identifiers::global_id::GlobalId = crate::ast::identifiers::global_id::GlobalId(root::INTERNED_GLOBAL_IDS[658]); } pub mod Some { use super::root; #[doc = r##"This is the variant [`::core::option::Option::Some::Constructor`]."##] pub const Constructor: crate::ast::identifiers::global_id::GlobalId = crate::ast::identifiers::global_id::GlobalId(root::INTERNED_GLOBAL_IDS[650]); #[doc = r##"This is the field [`_0`] from ::core::option::Option::Some."##] pub const _0: crate::ast::identifiers::global_id::GlobalId = crate::ast::identifiers::global_id::GlobalId(root::INTERNED_GLOBAL_IDS[178]); } } #[doc = r##"This is an impl block."##] pub const Impl: crate::ast::identifiers::global_id::GlobalId = crate::ast::identifiers::global_id::GlobalId(root::INTERNED_GLOBAL_IDS[176]); #[doc = r##"This is the enum [`::core::option::Option`]."##] pub const Option: crate::ast::identifiers::global_id::GlobalId = crate::ast::identifiers::global_id::GlobalId(root::INTERNED_GLOBAL_IDS[164]); } pub mod panicking { #![doc = r##"This is the module [`::core::panicking`]."##] use super::root; pub mod AssertKind { #![doc = r##"This is the enum [`::core::panicking::AssertKind`]."##] use super::root; pub mod Eq { use super::root; #[doc = r##"This is the variant [`::core::panicking::AssertKind::Eq::Constructor`]."##] pub const Constructor: crate::ast::identifiers::global_id::GlobalId = crate::ast::identifiers::global_id::GlobalId(root::INTERNED_GLOBAL_IDS[659]); } } #[doc = r##"This is the enum [`::core::panicking::AssertKind`]."##] pub const AssertKind: crate::ast::identifiers::global_id::GlobalId = crate::ast::identifiers::global_id::GlobalId(root::INTERNED_GLOBAL_IDS[162]); #[doc = r##"This is the function [`::core::panicking::assert_failed`]."##] pub const assert_failed: crate::ast::identifiers::global_id::GlobalId = crate::ast::identifiers::global_id::GlobalId(root::INTERNED_GLOBAL_IDS[169]); #[doc = r##"This is the function [`::core::panicking::panic`]."##] pub const panic: crate::ast::identifiers::global_id::GlobalId = crate::ast::identifiers::global_id::GlobalId(root::INTERNED_GLOBAL_IDS[172]); } pub mod ptr { #![doc = r##"This is the module [`::core::ptr`]."##] use super::root; pub mod const_ptr { #![doc = r##"This is the module [`::core::ptr::const_ptr`]."##] use super::root; pub mod Impl { #![doc = r##"This is an impl block."##] use super::root; #[doc = r##"This is the associated function [`::core::ptr::const_ptr::Impl::offset`]."##] pub const offset: crate::ast::identifiers::global_id::GlobalId = crate::ast::identifiers::global_id::GlobalId(root::INTERNED_GLOBAL_IDS[95]); } #[doc = r##"This is an impl block."##] pub const Impl: crate::ast::identifiers::global_id::GlobalId = crate::ast::identifiers::global_id::GlobalId(root::INTERNED_GLOBAL_IDS[94]); } #[doc = r##"This is the module [`::core::ptr::const_ptr`]."##] pub const const_ptr: crate::ast::identifiers::global_id::GlobalId = crate::ast::identifiers::global_id::GlobalId(root::INTERNED_GLOBAL_IDS[93]); } pub mod result { #![doc = r##"This is the module [`::core::result`]."##] use super::root; pub mod Impl { #![doc = r##"This is an impl block."##] use super::root; #[doc = r##"This is the associated function [`::core::result::Impl::map_err`]."##] pub const map_err: crate::ast::identifiers::global_id::GlobalId = crate::ast::identifiers::global_id::GlobalId(root::INTERNED_GLOBAL_IDS[175]); } pub mod Result { #![doc = r##"This is the enum [`::core::result::Result`]."##] use super::root; pub mod Err { use super::root; #[doc = r##"This is the variant [`::core::result::Result::Err::Constructor`]."##] pub const Constructor: crate::ast::identifiers::global_id::GlobalId = crate::ast::identifiers::global_id::GlobalId(root::INTERNED_GLOBAL_IDS[649]); #[doc = r##"This is the field [`_0`] from ::core::result::Result::Err."##] pub const _0: crate::ast::identifiers::global_id::GlobalId = crate::ast::identifiers::global_id::GlobalId(root::INTERNED_GLOBAL_IDS[173]); } pub mod Ok { use super::root; #[doc = r##"This is the variant [`::core::result::Result::Ok::Constructor`]."##] pub const Constructor: crate::ast::identifiers::global_id::GlobalId = crate::ast::identifiers::global_id::GlobalId(root::INTERNED_GLOBAL_IDS[651]); #[doc = r##"This is the field [`_0`] from ::core::result::Result::Ok."##] pub const _0: crate::ast::identifiers::global_id::GlobalId = crate::ast::identifiers::global_id::GlobalId(root::INTERNED_GLOBAL_IDS[198]); } } #[doc = r##"This is an impl block."##] pub const Impl: crate::ast::identifiers::global_id::GlobalId = crate::ast::identifiers::global_id::GlobalId(root::INTERNED_GLOBAL_IDS[174]); #[doc = r##"This is an impl block."##] pub const Impl__27: crate::ast::identifiers::global_id::GlobalId = crate::ast::identifiers::global_id::GlobalId(root::INTERNED_GLOBAL_IDS[239]); #[doc = r##"This is an impl block."##] pub const Impl__28: crate::ast::identifiers::global_id::GlobalId = crate::ast::identifiers::global_id::GlobalId(root::INTERNED_GLOBAL_IDS[188]); #[doc = r##"This is the enum [`::core::result::Result`]."##] pub const Result: crate::ast::identifiers::global_id::GlobalId = crate::ast::identifiers::global_id::GlobalId(root::INTERNED_GLOBAL_IDS[77]); } pub mod slice { #![doc = r##"This is the module [`::core::slice`]."##] use super::root; pub mod Impl { #![doc = r##"This is an impl block."##] use super::root; #[doc = r##"This is the associated function [`::core::slice::Impl::chunks_exact`]."##] pub const chunks_exact: crate::ast::identifiers::global_id::GlobalId = crate::ast::identifiers::global_id::GlobalId(root::INTERNED_GLOBAL_IDS[229]); #[doc = r##"This is the associated function [`::core::slice::Impl::iter`]."##] pub const iter: crate::ast::identifiers::global_id::GlobalId = crate::ast::identifiers::global_id::GlobalId(root::INTERNED_GLOBAL_IDS[227]); #[doc = r##"This is the associated function [`::core::slice::Impl::len`]."##] pub const len: crate::ast::identifiers::global_id::GlobalId = crate::ast::identifiers::global_id::GlobalId(root::INTERNED_GLOBAL_IDS[48]); } pub mod index { #![doc = r##"This is the module [`::core::slice::index`]."##] use super::root; #[doc = r##"This is an impl block."##] pub const Impl__2: crate::ast::identifiers::global_id::GlobalId = crate::ast::identifiers::global_id::GlobalId(root::INTERNED_GLOBAL_IDS[189]); #[doc = r##"This is an impl block."##] pub const Impl__4: crate::ast::identifiers::global_id::GlobalId = crate::ast::identifiers::global_id::GlobalId(root::INTERNED_GLOBAL_IDS[58]); #[doc = r##"This is the trait [`::core::slice::index::SliceIndex`]."##] pub const SliceIndex: crate::ast::identifiers::global_id::GlobalId = crate::ast::identifiers::global_id::GlobalId(root::INTERNED_GLOBAL_IDS[57]); } pub mod iter { #![doc = r##"This is the module [`::core::slice::iter`]."##] use super::root; #[doc = r##"This is the struct [`::core::slice::iter::ChunksExact`]."##] pub const ChunksExact: crate::ast::identifiers::global_id::GlobalId = crate::ast::identifiers::global_id::GlobalId(root::INTERNED_GLOBAL_IDS[228]); #[doc = r##"This is the struct [`::core::slice::iter::Iter`]."##] pub const Iter: crate::ast::identifiers::global_id::GlobalId = crate::ast::identifiers::global_id::GlobalId(root::INTERNED_GLOBAL_IDS[226]); } #[doc = r##"This is an impl block."##] pub const Impl: crate::ast::identifiers::global_id::GlobalId = crate::ast::identifiers::global_id::GlobalId(root::INTERNED_GLOBAL_IDS[47]); #[doc = r##"This is the module [`::core::slice::index`]."##] pub const index: crate::ast::identifiers::global_id::GlobalId = crate::ast::identifiers::global_id::GlobalId(root::INTERNED_GLOBAL_IDS[56]); #[doc = r##"This is the module [`::core::slice::iter`]."##] pub const iter: crate::ast::identifiers::global_id::GlobalId = crate::ast::identifiers::global_id::GlobalId(root::INTERNED_GLOBAL_IDS[225]); } pub mod str { #![doc = r##"This is the module [`::core::str`]."##] use super::root; pub mod Impl { #![doc = r##"This is an impl block."##] use super::root; #[doc = r##"This is the associated function [`::core::str::Impl::as_ptr`]."##] pub const as_ptr: crate::ast::identifiers::global_id::GlobalId = crate::ast::identifiers::global_id::GlobalId(root::INTERNED_GLOBAL_IDS[99]); } #[doc = r##"This is an impl block."##] pub const Impl: crate::ast::identifiers::global_id::GlobalId = crate::ast::identifiers::global_id::GlobalId(root::INTERNED_GLOBAL_IDS[98]); } #[doc = r##"This is the module [`::core::alloc`]."##] pub const alloc: crate::ast::identifiers::global_id::GlobalId = crate::ast::identifiers::global_id::GlobalId(root::INTERNED_GLOBAL_IDS[13]); #[doc = r##"This is the module [`::core::array`]."##] pub const array: crate::ast::identifiers::global_id::GlobalId = crate::ast::identifiers::global_id::GlobalId(root::INTERNED_GLOBAL_IDS[136]); #[doc = r##"This is the module [`::core::borrow`]."##] pub const borrow: crate::ast::identifiers::global_id::GlobalId = crate::ast::identifiers::global_id::GlobalId(root::INTERNED_GLOBAL_IDS[40]); #[doc = r##"This is the module [`::core::clone`]."##] pub const clone: crate::ast::identifiers::global_id::GlobalId = crate::ast::identifiers::global_id::GlobalId(root::INTERNED_GLOBAL_IDS[8]); #[doc = r##"This is the module [`::core::cmp`]."##] pub const cmp: crate::ast::identifiers::global_id::GlobalId = crate::ast::identifiers::global_id::GlobalId(root::INTERNED_GLOBAL_IDS[155]); #[doc = r##"This is the module [`::core::convert`]."##] pub const convert: crate::ast::identifiers::global_id::GlobalId = crate::ast::identifiers::global_id::GlobalId(root::INTERNED_GLOBAL_IDS[129]); #[doc = r##"This is the module [`::core::fmt`]."##] pub const fmt: crate::ast::identifiers::global_id::GlobalId = crate::ast::identifiers::global_id::GlobalId(root::INTERNED_GLOBAL_IDS[165]); #[doc = r##"This is the module [`::core::iter`]."##] pub const iter: crate::ast::identifiers::global_id::GlobalId = crate::ast::identifiers::global_id::GlobalId(root::INTERNED_GLOBAL_IDS[139]); #[doc = r##"This is the module [`::core::marker`]."##] pub const marker: crate::ast::identifiers::global_id::GlobalId = crate::ast::identifiers::global_id::GlobalId(root::INTERNED_GLOBAL_IDS[274]); #[doc = r##"This is the module [`::core::num`]."##] pub const num: crate::ast::identifiers::global_id::GlobalId = crate::ast::identifiers::global_id::GlobalId(root::INTERNED_GLOBAL_IDS[60]); #[doc = r##"This is the module [`::core::ops`]."##] pub const ops: crate::ast::identifiers::global_id::GlobalId = crate::ast::identifiers::global_id::GlobalId(root::INTERNED_GLOBAL_IDS[22]); #[doc = r##"This is the module [`::core::option`]."##] pub const option: crate::ast::identifiers::global_id::GlobalId = crate::ast::identifiers::global_id::GlobalId(root::INTERNED_GLOBAL_IDS[163]); #[doc = r##"This is the module [`::core::panicking`]."##] pub const panicking: crate::ast::identifiers::global_id::GlobalId = crate::ast::identifiers::global_id::GlobalId(root::INTERNED_GLOBAL_IDS[161]); #[doc = r##"This is the module [`::core::ptr`]."##] pub const ptr: crate::ast::identifiers::global_id::GlobalId = crate::ast::identifiers::global_id::GlobalId(root::INTERNED_GLOBAL_IDS[92]); #[doc = r##"This is the module [`::core::result`]."##] pub const result: crate::ast::identifiers::global_id::GlobalId = crate::ast::identifiers::global_id::GlobalId(root::INTERNED_GLOBAL_IDS[76]); #[doc = r##"This is the module [`::core::slice`]."##] pub const slice: crate::ast::identifiers::global_id::GlobalId = crate::ast::identifiers::global_id::GlobalId(root::INTERNED_GLOBAL_IDS[46]); #[doc = r##"This is the module [`::core::str`]."##] pub const str: crate::ast::identifiers::global_id::GlobalId = crate::ast::identifiers::global_id::GlobalId(root::INTERNED_GLOBAL_IDS[97]); } pub mod hax_lib { #![doc = r##"This is the module [`::hax_lib`]."##] use super::root; pub mod RefineAs { #![doc = r##"This is the trait [`::hax_lib::RefineAs`]."##] use super::root; #[doc = r##"This is the associated function [`::hax_lib::RefineAs::into_checked`]."##] pub const into_checked: crate::ast::identifiers::global_id::GlobalId = crate::ast::identifiers::global_id::GlobalId(root::INTERNED_GLOBAL_IDS[241]); } pub mod Refinement { #![doc = r##"This is the trait [`::hax_lib::Refinement`]."##] use super::root; #[doc = r##"This is the associated type [`::hax_lib::Refinement::InnerType`]."##] pub const InnerType: crate::ast::identifiers::global_id::GlobalId = crate::ast::identifiers::global_id::GlobalId(root::INTERNED_GLOBAL_IDS[244]); #[doc = r##"This is the associated function [`::hax_lib::Refinement::get`]."##] pub const get: crate::ast::identifiers::global_id::GlobalId = crate::ast::identifiers::global_id::GlobalId(root::INTERNED_GLOBAL_IDS[243]); #[doc = r##"This is the associated function [`::hax_lib::Refinement::get_mut`]."##] pub const get_mut: crate::ast::identifiers::global_id::GlobalId = crate::ast::identifiers::global_id::GlobalId(root::INTERNED_GLOBAL_IDS[246]); #[doc = r##"This is the associated function [`::hax_lib::Refinement::new`]."##] pub const new: crate::ast::identifiers::global_id::GlobalId = crate::ast::identifiers::global_id::GlobalId(root::INTERNED_GLOBAL_IDS[245]); } pub mod abstraction { #![doc = r##"This is the module [`::hax_lib::abstraction`]."##] use super::root; pub mod Abstraction { #![doc = r##"This is the trait [`::hax_lib::abstraction::Abstraction`]."##] use super::root; #[doc = r##"This is the associated function [`::hax_lib::abstraction::Abstraction::lift`]."##] pub const lift: crate::ast::identifiers::global_id::GlobalId = crate::ast::identifiers::global_id::GlobalId(root::INTERNED_GLOBAL_IDS[118]); } pub mod Concretization { #![doc = r##"This is the trait [`::hax_lib::abstraction::Concretization`]."##] use super::root; #[doc = r##"This is the associated function [`::hax_lib::abstraction::Concretization::concretize`]."##] pub const concretize: crate::ast::identifiers::global_id::GlobalId = crate::ast::identifiers::global_id::GlobalId(root::INTERNED_GLOBAL_IDS[108]); } #[doc = r##"This is the trait [`::hax_lib::abstraction::Abstraction`]."##] pub const Abstraction: crate::ast::identifiers::global_id::GlobalId = crate::ast::identifiers::global_id::GlobalId(root::INTERNED_GLOBAL_IDS[117]); #[doc = r##"This is the trait [`::hax_lib::abstraction::Concretization`]."##] pub const Concretization: crate::ast::identifiers::global_id::GlobalId = crate::ast::identifiers::global_id::GlobalId(root::INTERNED_GLOBAL_IDS[107]); } pub mod int { #![doc = r##"This is the module [`::hax_lib::int`]."##] use super::root; pub mod Impl__7 { #![doc = r##"This is an impl block."##] use super::root; #[doc = r##"This is the associated function [`::hax_lib::int::Impl__7::_unsafe_from_str`]."##] pub const _unsafe_from_str: crate::ast::identifiers::global_id::GlobalId = crate::ast::identifiers::global_id::GlobalId(root::INTERNED_GLOBAL_IDS[111]); #[doc = r##"This is the associated function [`::hax_lib::int::Impl__7::pow2`]."##] pub const pow2: crate::ast::identifiers::global_id::GlobalId = crate::ast::identifiers::global_id::GlobalId(root::INTERNED_GLOBAL_IDS[113]); } pub mod ToInt { #![doc = r##"This is the trait [`::hax_lib::int::ToInt`]."##] use super::root; #[doc = r##"This is the associated function [`::hax_lib::int::ToInt::to_int`]."##] pub const to_int: crate::ast::identifiers::global_id::GlobalId = crate::ast::identifiers::global_id::GlobalId(root::INTERNED_GLOBAL_IDS[115]); } #[doc = r##"This is an impl block."##] pub const Impl__16: crate::ast::identifiers::global_id::GlobalId = crate::ast::identifiers::global_id::GlobalId(root::INTERNED_GLOBAL_IDS[119]); #[doc = r##"This is an impl block."##] pub const Impl__17: crate::ast::identifiers::global_id::GlobalId = crate::ast::identifiers::global_id::GlobalId(root::INTERNED_GLOBAL_IDS[116]); #[doc = r##"This is an impl block."##] pub const Impl__44: crate::ast::identifiers::global_id::GlobalId = crate::ast::identifiers::global_id::GlobalId(root::INTERNED_GLOBAL_IDS[109]); #[doc = r##"This is an impl block."##] pub const Impl__7: crate::ast::identifiers::global_id::GlobalId = crate::ast::identifiers::global_id::GlobalId(root::INTERNED_GLOBAL_IDS[110]); #[doc = r##"This is an impl block."##] pub const Impl__9: crate::ast::identifiers::global_id::GlobalId = crate::ast::identifiers::global_id::GlobalId(root::INTERNED_GLOBAL_IDS[112]); #[doc = r##"This is the struct [`::hax_lib::int::Int`]."##] pub const Int: crate::ast::identifiers::global_id::GlobalId = crate::ast::identifiers::global_id::GlobalId(root::INTERNED_GLOBAL_IDS[105]); #[doc = r##"This is the trait [`::hax_lib::int::ToInt`]."##] pub const ToInt: crate::ast::identifiers::global_id::GlobalId = crate::ast::identifiers::global_id::GlobalId(root::INTERNED_GLOBAL_IDS[114]); } pub mod prop { #![doc = r##"This is the module [`::hax_lib::prop`]."##] use super::root; pub mod Impl { #![doc = r##"This is an impl block."##] use super::root; #[doc = r##"This is the associated function [`::hax_lib::prop::Impl::and`]."##] pub const and: crate::ast::identifiers::global_id::GlobalId = crate::ast::identifiers::global_id::GlobalId(root::INTERNED_GLOBAL_IDS[213]); #[doc = r##"This is the associated function [`::hax_lib::prop::Impl::eq`]."##] pub const eq: crate::ast::identifiers::global_id::GlobalId = crate::ast::identifiers::global_id::GlobalId(root::INTERNED_GLOBAL_IDS[210]); #[doc = r##"This is the associated function [`::hax_lib::prop::Impl::from_bool`]."##] pub const from_bool: crate::ast::identifiers::global_id::GlobalId = crate::ast::identifiers::global_id::GlobalId(root::INTERNED_GLOBAL_IDS[214]); #[doc = r##"This is the associated function [`::hax_lib::prop::Impl::implies`]."##] pub const implies: crate::ast::identifiers::global_id::GlobalId = crate::ast::identifiers::global_id::GlobalId(root::INTERNED_GLOBAL_IDS[208]); #[doc = r##"This is the associated function [`::hax_lib::prop::Impl::ne`]."##] pub const ne: crate::ast::identifiers::global_id::GlobalId = crate::ast::identifiers::global_id::GlobalId(root::INTERNED_GLOBAL_IDS[209]); #[doc = r##"This is the associated function [`::hax_lib::prop::Impl::not`]."##] pub const not: crate::ast::identifiers::global_id::GlobalId = crate::ast::identifiers::global_id::GlobalId(root::INTERNED_GLOBAL_IDS[211]); #[doc = r##"This is the associated function [`::hax_lib::prop::Impl::or`]."##] pub const or: crate::ast::identifiers::global_id::GlobalId = crate::ast::identifiers::global_id::GlobalId(root::INTERNED_GLOBAL_IDS[212]); } pub mod ToProp { #![doc = r##"This is the trait [`::hax_lib::prop::ToProp`]."##] use super::root; #[doc = r##"This is the associated function [`::hax_lib::prop::ToProp::to_prop`]."##] pub const to_prop: crate::ast::identifiers::global_id::GlobalId = crate::ast::identifiers::global_id::GlobalId(root::INTERNED_GLOBAL_IDS[205]); } pub mod constructors { #![doc = r##"This is the module [`::hax_lib::prop::constructors`]."##] use super::root; #[doc = r##"This is the function [`::hax_lib::prop::constructors::and`]."##] pub const and: crate::ast::identifiers::global_id::GlobalId = crate::ast::identifiers::global_id::GlobalId(root::INTERNED_GLOBAL_IDS[223]); #[doc = r##"This is the function [`::hax_lib::prop::constructors::eq`]."##] pub const eq: crate::ast::identifiers::global_id::GlobalId = crate::ast::identifiers::global_id::GlobalId(root::INTERNED_GLOBAL_IDS[220]); #[doc = r##"This is the function [`::hax_lib::prop::constructors::exists`]."##] pub const exists: crate::ast::identifiers::global_id::GlobalId = crate::ast::identifiers::global_id::GlobalId(root::INTERNED_GLOBAL_IDS[216]); #[doc = r##"This is the function [`::hax_lib::prop::constructors::forall`]."##] pub const forall: crate::ast::identifiers::global_id::GlobalId = crate::ast::identifiers::global_id::GlobalId(root::INTERNED_GLOBAL_IDS[217]); #[doc = r##"This is the function [`::hax_lib::prop::constructors::from_bool`]."##] pub const from_bool: crate::ast::identifiers::global_id::GlobalId = crate::ast::identifiers::global_id::GlobalId(root::INTERNED_GLOBAL_IDS[224]); #[doc = r##"This is the function [`::hax_lib::prop::constructors::implies`]."##] pub const implies: crate::ast::identifiers::global_id::GlobalId = crate::ast::identifiers::global_id::GlobalId(root::INTERNED_GLOBAL_IDS[218]); #[doc = r##"This is the function [`::hax_lib::prop::constructors::ne`]."##] pub const ne: crate::ast::identifiers::global_id::GlobalId = crate::ast::identifiers::global_id::GlobalId(root::INTERNED_GLOBAL_IDS[219]); #[doc = r##"This is the function [`::hax_lib::prop::constructors::not`]."##] pub const not: crate::ast::identifiers::global_id::GlobalId = crate::ast::identifiers::global_id::GlobalId(root::INTERNED_GLOBAL_IDS[221]); #[doc = r##"This is the function [`::hax_lib::prop::constructors::or`]."##] pub const or: crate::ast::identifiers::global_id::GlobalId = crate::ast::identifiers::global_id::GlobalId(root::INTERNED_GLOBAL_IDS[222]); } #[doc = r##"This is an impl block."##] pub const Impl: crate::ast::identifiers::global_id::GlobalId = crate::ast::identifiers::global_id::GlobalId(root::INTERNED_GLOBAL_IDS[207]); #[doc = r##"This is an impl block."##] pub const Impl__2: crate::ast::identifiers::global_id::GlobalId = crate::ast::identifiers::global_id::GlobalId(root::INTERNED_GLOBAL_IDS[206]); #[doc = r##"This is an impl block."##] pub const Impl__3: crate::ast::identifiers::global_id::GlobalId = crate::ast::identifiers::global_id::GlobalId(root::INTERNED_GLOBAL_IDS[149]); #[doc = r##"This is the struct [`::hax_lib::prop::Prop`]."##] pub const Prop: crate::ast::identifiers::global_id::GlobalId = crate::ast::identifiers::global_id::GlobalId(root::INTERNED_GLOBAL_IDS[148]); #[doc = r##"This is the trait [`::hax_lib::prop::ToProp`]."##] pub const ToProp: crate::ast::identifiers::global_id::GlobalId = crate::ast::identifiers::global_id::GlobalId(root::INTERNED_GLOBAL_IDS[204]); #[doc = r##"This is the module [`::hax_lib::prop::constructors`]."##] pub const constructors: crate::ast::identifiers::global_id::GlobalId = crate::ast::identifiers::global_id::GlobalId(root::INTERNED_GLOBAL_IDS[215]); #[doc = r##"This is the function [`::hax_lib::prop::exists`]."##] pub const exists: crate::ast::identifiers::global_id::GlobalId = crate::ast::identifiers::global_id::GlobalId(root::INTERNED_GLOBAL_IDS[202]); #[doc = r##"This is the function [`::hax_lib::prop::forall`]."##] pub const forall: crate::ast::identifiers::global_id::GlobalId = crate::ast::identifiers::global_id::GlobalId(root::INTERNED_GLOBAL_IDS[203]); #[doc = r##"This is the function [`::hax_lib::prop::implies`]."##] pub const implies: crate::ast::identifiers::global_id::GlobalId = crate::ast::identifiers::global_id::GlobalId(root::INTERNED_GLOBAL_IDS[200]); } #[doc = r##"This is the trait [`::hax_lib::RefineAs`]."##] pub const RefineAs: crate::ast::identifiers::global_id::GlobalId = crate::ast::identifiers::global_id::GlobalId(root::INTERNED_GLOBAL_IDS[240]); #[doc = r##"This is the trait [`::hax_lib::Refinement`]."##] pub const Refinement: crate::ast::identifiers::global_id::GlobalId = crate::ast::identifiers::global_id::GlobalId(root::INTERNED_GLOBAL_IDS[242]); #[doc = r##"This is the function [`::hax_lib::_internal_loop_decreases`]."##] pub const _internal_loop_decreases: crate::ast::identifiers::global_id::GlobalId = crate::ast::identifiers::global_id::GlobalId(root::INTERNED_GLOBAL_IDS[145]); #[doc = r##"This is the function [`::hax_lib::_internal_loop_invariant`]."##] pub const _internal_loop_invariant: crate::ast::identifiers::global_id::GlobalId = crate::ast::identifiers::global_id::GlobalId(root::INTERNED_GLOBAL_IDS[153]); #[doc = r##"This is the function [`::hax_lib::_internal_while_loop_invariant`]."##] pub const _internal_while_loop_invariant: crate::ast::identifiers::global_id::GlobalId = crate::ast::identifiers::global_id::GlobalId(root::INTERNED_GLOBAL_IDS[150]); #[doc = r##"This is the module [`::hax_lib::abstraction`]."##] pub const abstraction: crate::ast::identifiers::global_id::GlobalId = crate::ast::identifiers::global_id::GlobalId(root::INTERNED_GLOBAL_IDS[106]); #[doc = r##"This is the function [`::hax_lib::any_to_unit`]."##] pub const any_to_unit: crate::ast::identifiers::global_id::GlobalId = crate::ast::identifiers::global_id::GlobalId(root::INTERNED_GLOBAL_IDS[101]); #[doc = r##"This is the function [`::hax_lib::assert`]."##] pub const assert: crate::ast::identifiers::global_id::GlobalId = crate::ast::identifiers::global_id::GlobalId(root::INTERNED_GLOBAL_IDS[154]); #[doc = r##"This is the function [`::hax_lib::inline`]."##] pub const inline: crate::ast::identifiers::global_id::GlobalId = crate::ast::identifiers::global_id::GlobalId(root::INTERNED_GLOBAL_IDS[103]); #[doc = r##"This is the function [`::hax_lib::inline_unsafe`]."##] pub const inline_unsafe: crate::ast::identifiers::global_id::GlobalId = crate::ast::identifiers::global_id::GlobalId(root::INTERNED_GLOBAL_IDS[102]); #[doc = r##"This is the module [`::hax_lib::int`]."##] pub const int: crate::ast::identifiers::global_id::GlobalId = crate::ast::identifiers::global_id::GlobalId(root::INTERNED_GLOBAL_IDS[104]); #[doc = r##"This is the module [`::hax_lib::prop`]."##] pub const prop: crate::ast::identifiers::global_id::GlobalId = crate::ast::identifiers::global_id::GlobalId(root::INTERNED_GLOBAL_IDS[147]); } pub mod hax_lib_protocol { #![doc = r##"This is the module [`::hax_lib_protocol`]."##] use super::root; pub mod crypto { #![doc = r##"This is the module [`::hax_lib_protocol::crypto`]."##] use super::root; pub mod AEADAlgorithm { #![doc = r##"This is the enum [`::hax_lib_protocol::crypto::AEADAlgorithm`]."##] use super::root; pub mod Chacha20Poly1305 { use super::root; #[doc = r##"This is the variant [`::hax_lib_protocol::crypto::AEADAlgorithm::Chacha20Poly1305::Constructor`]."##] pub const Constructor: crate::ast::identifiers::global_id::GlobalId = crate::ast::identifiers::global_id::GlobalId(root::INTERNED_GLOBAL_IDS[657]); } } pub mod DHGroup { #![doc = r##"This is the enum [`::hax_lib_protocol::crypto::DHGroup`]."##] use super::root; pub mod X25519 { use super::root; #[doc = r##"This is the variant [`::hax_lib_protocol::crypto::DHGroup::X25519::Constructor`]."##] pub const Constructor: crate::ast::identifiers::global_id::GlobalId = crate::ast::identifiers::global_id::GlobalId(root::INTERNED_GLOBAL_IDS[656]); } } pub mod HMACAlgorithm { #![doc = r##"This is the enum [`::hax_lib_protocol::crypto::HMACAlgorithm`]."##] use super::root; pub mod Sha256 { use super::root; #[doc = r##"This is the variant [`::hax_lib_protocol::crypto::HMACAlgorithm::Sha256::Constructor`]."##] pub const Constructor: crate::ast::identifiers::global_id::GlobalId = crate::ast::identifiers::global_id::GlobalId(root::INTERNED_GLOBAL_IDS[655]); } } pub mod HashAlgorithm { #![doc = r##"This is the enum [`::hax_lib_protocol::crypto::HashAlgorithm`]."##] use super::root; pub mod Sha256 { use super::root; #[doc = r##"This is the variant [`::hax_lib_protocol::crypto::HashAlgorithm::Sha256::Constructor`]."##] pub const Constructor: crate::ast::identifiers::global_id::GlobalId = crate::ast::identifiers::global_id::GlobalId(root::INTERNED_GLOBAL_IDS[654]); } } pub mod Impl { #![doc = r##"This is an impl block."##] use super::root; #[doc = r##"This is the associated function [`::hax_lib_protocol::crypto::Impl::from_bytes`]."##] pub const from_bytes: crate::ast::identifiers::global_id::GlobalId = crate::ast::identifiers::global_id::GlobalId(root::INTERNED_GLOBAL_IDS[72]); } pub mod Impl__1 { #![doc = r##"This is an impl block."##] use super::root; #[doc = r##"This is the associated function [`::hax_lib_protocol::crypto::Impl__1::from_bytes`]."##] pub const from_bytes: crate::ast::identifiers::global_id::GlobalId = crate::ast::identifiers::global_id::GlobalId(root::INTERNED_GLOBAL_IDS[74]); } pub mod Impl__4 { #![doc = r##"This is an impl block."##] use super::root; #[doc = r##"This is the associated function [`::hax_lib_protocol::crypto::Impl__4::from_bytes`]."##] pub const from_bytes: crate::ast::identifiers::global_id::GlobalId = crate::ast::identifiers::global_id::GlobalId(root::INTERNED_GLOBAL_IDS[86]); } pub mod Impl__5 { #![doc = r##"This is an impl block."##] use super::root; #[doc = r##"This is the associated function [`::hax_lib_protocol::crypto::Impl__5::from_bytes`]."##] pub const from_bytes: crate::ast::identifiers::global_id::GlobalId = crate::ast::identifiers::global_id::GlobalId(root::INTERNED_GLOBAL_IDS[88]); } pub mod Impl__6 { #![doc = r##"This is an impl block."##] use super::root; #[doc = r##"This is the associated function [`::hax_lib_protocol::crypto::Impl__6::from_bytes`]."##] pub const from_bytes: crate::ast::identifiers::global_id::GlobalId = crate::ast::identifiers::global_id::GlobalId(root::INTERNED_GLOBAL_IDS[81]); } #[doc = r##"This is the enum [`::hax_lib_protocol::crypto::AEADAlgorithm`]."##] pub const AEADAlgorithm: crate::ast::identifiers::global_id::GlobalId = crate::ast::identifiers::global_id::GlobalId(root::INTERNED_GLOBAL_IDS[84]); #[doc = r##"This is the struct [`::hax_lib_protocol::crypto::AEADIV`]."##] pub const AEADIV: crate::ast::identifiers::global_id::GlobalId = crate::ast::identifiers::global_id::GlobalId(root::INTERNED_GLOBAL_IDS[79]); #[doc = r##"This is the struct [`::hax_lib_protocol::crypto::AEADKey`]."##] pub const AEADKey: crate::ast::identifiers::global_id::GlobalId = crate::ast::identifiers::global_id::GlobalId(root::INTERNED_GLOBAL_IDS[78]); #[doc = r##"This is the struct [`::hax_lib_protocol::crypto::AEADTag`]."##] pub const AEADTag: crate::ast::identifiers::global_id::GlobalId = crate::ast::identifiers::global_id::GlobalId(root::INTERNED_GLOBAL_IDS[82]); #[doc = r##"This is the struct [`::hax_lib_protocol::crypto::DHElement`]."##] pub const DHElement: crate::ast::identifiers::global_id::GlobalId = crate::ast::identifiers::global_id::GlobalId(root::INTERNED_GLOBAL_IDS[69]); #[doc = r##"This is the enum [`::hax_lib_protocol::crypto::DHGroup`]."##] pub const DHGroup: crate::ast::identifiers::global_id::GlobalId = crate::ast::identifiers::global_id::GlobalId(root::INTERNED_GLOBAL_IDS[65]); #[doc = r##"This is the struct [`::hax_lib_protocol::crypto::DHScalar`]."##] pub const DHScalar: crate::ast::identifiers::global_id::GlobalId = crate::ast::identifiers::global_id::GlobalId(root::INTERNED_GLOBAL_IDS[66]); #[doc = r##"This is the enum [`::hax_lib_protocol::crypto::HMACAlgorithm`]."##] pub const HMACAlgorithm: crate::ast::identifiers::global_id::GlobalId = crate::ast::identifiers::global_id::GlobalId(root::INTERNED_GLOBAL_IDS[63]); #[doc = r##"This is the enum [`::hax_lib_protocol::crypto::HashAlgorithm`]."##] pub const HashAlgorithm: crate::ast::identifiers::global_id::GlobalId = crate::ast::identifiers::global_id::GlobalId(root::INTERNED_GLOBAL_IDS[20]); #[doc = r##"This is an impl block."##] pub const Impl: crate::ast::identifiers::global_id::GlobalId = crate::ast::identifiers::global_id::GlobalId(root::INTERNED_GLOBAL_IDS[71]); #[doc = r##"This is an impl block."##] pub const Impl__1: crate::ast::identifiers::global_id::GlobalId = crate::ast::identifiers::global_id::GlobalId(root::INTERNED_GLOBAL_IDS[73]); #[doc = r##"This is an impl block."##] pub const Impl__4: crate::ast::identifiers::global_id::GlobalId = crate::ast::identifiers::global_id::GlobalId(root::INTERNED_GLOBAL_IDS[85]); #[doc = r##"This is an impl block."##] pub const Impl__5: crate::ast::identifiers::global_id::GlobalId = crate::ast::identifiers::global_id::GlobalId(root::INTERNED_GLOBAL_IDS[87]); #[doc = r##"This is an impl block."##] pub const Impl__6: crate::ast::identifiers::global_id::GlobalId = crate::ast::identifiers::global_id::GlobalId(root::INTERNED_GLOBAL_IDS[80]); #[doc = r##"This is an impl block."##] pub const Impl__9: crate::ast::identifiers::global_id::GlobalId = crate::ast::identifiers::global_id::GlobalId(root::INTERNED_GLOBAL_IDS[68]); #[doc = r##"This is the function [`::hax_lib_protocol::crypto::aead_decrypt`]."##] pub const aead_decrypt: crate::ast::identifiers::global_id::GlobalId = crate::ast::identifiers::global_id::GlobalId(root::INTERNED_GLOBAL_IDS[83]); #[doc = r##"This is the function [`::hax_lib_protocol::crypto::aead_encrypt`]."##] pub const aead_encrypt: crate::ast::identifiers::global_id::GlobalId = crate::ast::identifiers::global_id::GlobalId(root::INTERNED_GLOBAL_IDS[90]); #[doc = r##"This is the function [`::hax_lib_protocol::crypto::dh_scalar_multiply`]."##] pub const dh_scalar_multiply: crate::ast::identifiers::global_id::GlobalId = crate::ast::identifiers::global_id::GlobalId(root::INTERNED_GLOBAL_IDS[70]); #[doc = r##"This is the function [`::hax_lib_protocol::crypto::dh_scalar_multiply_base`]."##] pub const dh_scalar_multiply_base: crate::ast::identifiers::global_id::GlobalId = crate::ast::identifiers::global_id::GlobalId(root::INTERNED_GLOBAL_IDS[67]); #[doc = r##"This is the function [`::hax_lib_protocol::crypto::hash`]."##] pub const hash: crate::ast::identifiers::global_id::GlobalId = crate::ast::identifiers::global_id::GlobalId(root::INTERNED_GLOBAL_IDS[27]); #[doc = r##"This is the function [`::hax_lib_protocol::crypto::hmac`]."##] pub const hmac: crate::ast::identifiers::global_id::GlobalId = crate::ast::identifiers::global_id::GlobalId(root::INTERNED_GLOBAL_IDS[64]); } #[doc = r##"This is the enum [`::hax_lib_protocol::ProtocolError`]."##] pub const ProtocolError: crate::ast::identifiers::global_id::GlobalId = crate::ast::identifiers::global_id::GlobalId(root::INTERNED_GLOBAL_IDS[75]); #[doc = r##"This is the module [`::hax_lib_protocol::crypto`]."##] pub const crypto: crate::ast::identifiers::global_id::GlobalId = crate::ast::identifiers::global_id::GlobalId(root::INTERNED_GLOBAL_IDS[19]); } pub mod rust_primitives { #![doc = r##"This is the module [`::rust_primitives`]."##] use super::root; pub mod arithmetic { #![doc = r##"This is the module [`::rust_primitives::arithmetic`]."##] use super::root; #[doc = r##"This is the function [`::rust_primitives::arithmetic::neg`]."##] pub const neg: crate::ast::identifiers::global_id::GlobalId = crate::ast::identifiers::global_id::GlobalId(root::INTERNED_GLOBAL_IDS[616]); } pub mod crypto_abstractions { #![doc = r##"This is the module [`::rust_primitives::crypto_abstractions`]."##] use super::root; #[doc = r##"This is the use item [`::rust_primitives::crypto_abstractions::Use`]."##] pub const Use: crate::ast::identifiers::global_id::GlobalId = crate::ast::identifiers::global_id::GlobalId(root::INTERNED_GLOBAL_IDS[638]); #[doc = r##"This is the function [`::rust_primitives::crypto_abstractions::crypto_abstractions`]."##] pub const crypto_abstractions: crate::ast::identifiers::global_id::GlobalId = crate::ast::identifiers::global_id::GlobalId(root::INTERNED_GLOBAL_IDS[305]); } pub mod dummy_hax_concrete_ident_wrapper { #![doc = r##"This is the function [`::rust_primitives::dummy_hax_concrete_ident_wrapper`]."##] use super::root; pub mod Foo { #![doc = r##"This is the struct [`::rust_primitives::dummy_hax_concrete_ident_wrapper::Foo`]."##] use super::root; #[doc = r##"This is the struct [`::rust_primitives::dummy_hax_concrete_ident_wrapper::Foo::Constructor`]."##] pub const Constructor: crate::ast::identifiers::global_id::GlobalId = crate::ast::identifiers::global_id::GlobalId(root::INTERNED_GLOBAL_IDS[653]); } pub mod ___1 { #![doc = r##"This is the const [`::rust_primitives::dummy_hax_concrete_ident_wrapper::___1`]."##] use super::root; #[doc = r##"This is the use item [`::rust_primitives::dummy_hax_concrete_ident_wrapper::___1::Use`]."##] pub const Use: crate::ast::identifiers::global_id::GlobalId = crate::ast::identifiers::global_id::GlobalId(root::INTERNED_GLOBAL_IDS[626]); #[doc = r##"This is the use item [`::rust_primitives::dummy_hax_concrete_ident_wrapper::___1::Use__1`]."##] pub const Use__1: crate::ast::identifiers::global_id::GlobalId = crate::ast::identifiers::global_id::GlobalId(root::INTERNED_GLOBAL_IDS[627]); #[doc = r##"This is the function [`::rust_primitives::dummy_hax_concrete_ident_wrapper::___1::f`]."##] pub const f: crate::ast::identifiers::global_id::GlobalId = crate::ast::identifiers::global_id::GlobalId(root::INTERNED_GLOBAL_IDS[312]); #[doc = r##"This is the function [`::rust_primitives::dummy_hax_concrete_ident_wrapper::___1::g`]."##] pub const g: crate::ast::identifiers::global_id::GlobalId = crate::ast::identifiers::global_id::GlobalId(root::INTERNED_GLOBAL_IDS[631]); } pub mod _anonymous { #![doc = r##"This is the const [`::rust_primitives::dummy_hax_concrete_ident_wrapper::_anonymous`]."##] use super::root; #[doc = r##"This is the use item [`::rust_primitives::dummy_hax_concrete_ident_wrapper::_anonymous::Use`]."##] pub const Use: crate::ast::identifiers::global_id::GlobalId = crate::ast::identifiers::global_id::GlobalId(root::INTERNED_GLOBAL_IDS[629]); #[doc = r##"This is the use item [`::rust_primitives::dummy_hax_concrete_ident_wrapper::_anonymous::Use__1`]."##] pub const Use__1: crate::ast::identifiers::global_id::GlobalId = crate::ast::identifiers::global_id::GlobalId(root::INTERNED_GLOBAL_IDS[636]); #[doc = r##"This is the use item [`::rust_primitives::dummy_hax_concrete_ident_wrapper::_anonymous::Use__2`]."##] pub const Use__2: crate::ast::identifiers::global_id::GlobalId = crate::ast::identifiers::global_id::GlobalId(root::INTERNED_GLOBAL_IDS[640]); #[doc = r##"This is the function [`::rust_primitives::dummy_hax_concrete_ident_wrapper::_anonymous::arith`]."##] pub const arith: crate::ast::identifiers::global_id::GlobalId = crate::ast::identifiers::global_id::GlobalId(root::INTERNED_GLOBAL_IDS[519]); } pub mod props { #![doc = r##"This is the function [`::rust_primitives::dummy_hax_concrete_ident_wrapper::props`]."##] use super::root; #[doc = r##"This is the use item [`::rust_primitives::dummy_hax_concrete_ident_wrapper::props::Use`]."##] pub const Use: crate::ast::identifiers::global_id::GlobalId = crate::ast::identifiers::global_id::GlobalId(root::INTERNED_GLOBAL_IDS[625]); } #[doc = r##"This is the struct [`::rust_primitives::dummy_hax_concrete_ident_wrapper::Foo`]."##] pub const Foo: crate::ast::identifiers::global_id::GlobalId = crate::ast::identifiers::global_id::GlobalId(root::INTERNED_GLOBAL_IDS[612]); #[doc = r##"This is an impl block."##] pub const Impl: crate::ast::identifiers::global_id::GlobalId = crate::ast::identifiers::global_id::GlobalId(root::INTERNED_GLOBAL_IDS[613]); #[doc = r##"This is an impl block."##] pub const Impl__1: crate::ast::identifiers::global_id::GlobalId = crate::ast::identifiers::global_id::GlobalId(root::INTERNED_GLOBAL_IDS[611]); #[doc = r##"This is the use item [`::rust_primitives::dummy_hax_concrete_ident_wrapper::Use`]."##] pub const Use: crate::ast::identifiers::global_id::GlobalId = crate::ast::identifiers::global_id::GlobalId(root::INTERNED_GLOBAL_IDS[632]); #[doc = r##"This is the use item [`::rust_primitives::dummy_hax_concrete_ident_wrapper::Use__1`]."##] pub const Use__1: crate::ast::identifiers::global_id::GlobalId = crate::ast::identifiers::global_id::GlobalId(root::INTERNED_GLOBAL_IDS[624]); #[doc = r##"This is the use item [`::rust_primitives::dummy_hax_concrete_ident_wrapper::Use__2`]."##] pub const Use__2: crate::ast::identifiers::global_id::GlobalId = crate::ast::identifiers::global_id::GlobalId(root::INTERNED_GLOBAL_IDS[634]); #[doc = r##"This is the use item [`::rust_primitives::dummy_hax_concrete_ident_wrapper::Use__3`]."##] pub const Use__3: crate::ast::identifiers::global_id::GlobalId = crate::ast::identifiers::global_id::GlobalId(root::INTERNED_GLOBAL_IDS[641]); #[doc = r##"This is the use item [`::rust_primitives::dummy_hax_concrete_ident_wrapper::Use__4`]."##] pub const Use__4: crate::ast::identifiers::global_id::GlobalId = crate::ast::identifiers::global_id::GlobalId(root::INTERNED_GLOBAL_IDS[637]); #[doc = r##"This is the const [`::rust_primitives::dummy_hax_concrete_ident_wrapper::___1`]."##] pub const ___1: crate::ast::identifiers::global_id::GlobalId = crate::ast::identifiers::global_id::GlobalId(root::INTERNED_GLOBAL_IDS[292]); #[doc = r##"This is the const [`::rust_primitives::dummy_hax_concrete_ident_wrapper::_anonymous`]."##] pub const _anonymous: crate::ast::identifiers::global_id::GlobalId = crate::ast::identifiers::global_id::GlobalId(root::INTERNED_GLOBAL_IDS[300]); #[doc = r##"This is the function [`::rust_primitives::dummy_hax_concrete_ident_wrapper::dummy`]."##] pub const dummy: crate::ast::identifiers::global_id::GlobalId = crate::ast::identifiers::global_id::GlobalId(root::INTERNED_GLOBAL_IDS[294]); #[doc = r##"This is the function [`::rust_primitives::dummy_hax_concrete_ident_wrapper::index_mut`]."##] pub const index_mut: crate::ast::identifiers::global_id::GlobalId = crate::ast::identifiers::global_id::GlobalId(root::INTERNED_GLOBAL_IDS[639]); #[doc = r##"This is the function [`::rust_primitives::dummy_hax_concrete_ident_wrapper::iterator_functions`]."##] pub const iterator_functions: crate::ast::identifiers::global_id::GlobalId = crate::ast::identifiers::global_id::GlobalId(root::INTERNED_GLOBAL_IDS[474]); #[doc = r##"This is the function [`::rust_primitives::dummy_hax_concrete_ident_wrapper::props`]."##] pub const props: crate::ast::identifiers::global_id::GlobalId = crate::ast::identifiers::global_id::GlobalId(root::INTERNED_GLOBAL_IDS[564]); #[doc = r##"This is the function [`::rust_primitives::dummy_hax_concrete_ident_wrapper::question_mark_result`]."##] pub const question_mark_result: crate::ast::identifiers::global_id::GlobalId = crate::ast::identifiers::global_id::GlobalId(root::INTERNED_GLOBAL_IDS[556]); #[doc = r##"This is the function [`::rust_primitives::dummy_hax_concrete_ident_wrapper::refinements`]."##] pub const refinements: crate::ast::identifiers::global_id::GlobalId = crate::ast::identifiers::global_id::GlobalId(root::INTERNED_GLOBAL_IDS[430]); } pub mod hax { #![doc = r##"This is the module [`::rust_primitives::hax`]."##] use super::root; pub mod Failure { #![doc = r##"This is the struct [`::rust_primitives::hax::Failure`]."##] use super::root; #[doc = r##"This is the struct [`::rust_primitives::hax::Failure::Constructor`]."##] pub const Constructor: crate::ast::identifiers::global_id::GlobalId = crate::ast::identifiers::global_id::GlobalId(root::INTERNED_GLOBAL_IDS[652]); } pub(in crate::ast::identifiers::global_id) mod Tuple2 { #![doc = r##"This is the struct [`::rust_primitives::hax::Tuple2`]."##] use super::root; #[doc = r##"This is the struct [`::rust_primitives::hax::Tuple2::Constructor`]."##] pub(in crate::ast::identifiers::global_id) const Constructor: crate::ast::identifiers::global_id::GlobalId = crate::ast::identifiers::global_id::GlobalId(root::INTERNED_GLOBAL_IDS[643]); #[doc = r##"This is the field [`_0`] from ::rust_primitives::hax::Tuple2."##] pub(in crate::ast::identifiers::global_id) const _0: crate::ast::identifiers::global_id::GlobalId = crate::ast::identifiers::global_id::GlobalId(root::INTERNED_GLOBAL_IDS[390]); #[doc = r##"This is the field [`_1`] from ::rust_primitives::hax::Tuple2."##] pub(in crate::ast::identifiers::global_id) const _1: crate::ast::identifiers::global_id::GlobalId = crate::ast::identifiers::global_id::GlobalId(root::INTERNED_GLOBAL_IDS[398]); } pub mod control_flow_monad { #![doc = r##"This is the module [`::rust_primitives::hax::control_flow_monad`]."##] use super::root; pub mod ControlFlowMonad { #![doc = r##"This is the trait [`::rust_primitives::hax::control_flow_monad::ControlFlowMonad`]."##] use super::root; #[doc = r##"This is the associated function [`::rust_primitives::hax::control_flow_monad::ControlFlowMonad::lift`]."##] pub const lift: crate::ast::identifiers::global_id::GlobalId = crate::ast::identifiers::global_id::GlobalId(root::INTERNED_GLOBAL_IDS[471]); } pub mod mexception { #![doc = r##"This is the module [`::rust_primitives::hax::control_flow_monad::mexception`]."##] use super::root; #[doc = r##"This is the function [`::rust_primitives::hax::control_flow_monad::mexception::run`]."##] pub const run: crate::ast::identifiers::global_id::GlobalId = crate::ast::identifiers::global_id::GlobalId(root::INTERNED_GLOBAL_IDS[541]); } pub mod moption { #![doc = r##"This is the module [`::rust_primitives::hax::control_flow_monad::moption`]."##] use super::root; #[doc = r##"This is the function [`::rust_primitives::hax::control_flow_monad::moption::run`]."##] pub const run: crate::ast::identifiers::global_id::GlobalId = crate::ast::identifiers::global_id::GlobalId(root::INTERNED_GLOBAL_IDS[322]); } pub mod mresult { #![doc = r##"This is the module [`::rust_primitives::hax::control_flow_monad::mresult`]."##] use super::root; #[doc = r##"This is the function [`::rust_primitives::hax::control_flow_monad::mresult::run`]."##] pub const run: crate::ast::identifiers::global_id::GlobalId = crate::ast::identifiers::global_id::GlobalId(root::INTERNED_GLOBAL_IDS[419]); } #[doc = r##"This is the trait [`::rust_primitives::hax::control_flow_monad::ControlFlowMonad`]."##] pub const ControlFlowMonad: crate::ast::identifiers::global_id::GlobalId = crate::ast::identifiers::global_id::GlobalId(root::INTERNED_GLOBAL_IDS[470]); #[doc = r##"This is the module [`::rust_primitives::hax::control_flow_monad::mexception`]."##] pub const mexception: crate::ast::identifiers::global_id::GlobalId = crate::ast::identifiers::global_id::GlobalId(root::INTERNED_GLOBAL_IDS[512]); #[doc = r##"This is the module [`::rust_primitives::hax::control_flow_monad::moption`]."##] pub const moption: crate::ast::identifiers::global_id::GlobalId = crate::ast::identifiers::global_id::GlobalId(root::INTERNED_GLOBAL_IDS[321]); #[doc = r##"This is the module [`::rust_primitives::hax::control_flow_monad::mresult`]."##] pub const mresult: crate::ast::identifiers::global_id::GlobalId = crate::ast::identifiers::global_id::GlobalId(root::INTERNED_GLOBAL_IDS[418]); } pub mod explicit_monadic { #![doc = r##"This is the module [`::rust_primitives::hax::explicit_monadic`]."##] use super::root; #[doc = r##"This is the function [`::rust_primitives::hax::explicit_monadic::lift`]."##] pub const lift: crate::ast::identifiers::global_id::GlobalId = crate::ast::identifiers::global_id::GlobalId(root::INTERNED_GLOBAL_IDS[605]); #[doc = r##"This is the function [`::rust_primitives::hax::explicit_monadic::pure`]."##] pub const pure: crate::ast::identifiers::global_id::GlobalId = crate::ast::identifiers::global_id::GlobalId(root::INTERNED_GLOBAL_IDS[604]); } pub mod folds { #![doc = r##"This is the module [`::rust_primitives::hax::folds`]."##] use super::root; #[doc = r##"This is the function [`::rust_primitives::hax::folds::fold_cf`]."##] pub const fold_cf: crate::ast::identifiers::global_id::GlobalId = crate::ast::identifiers::global_id::GlobalId(root::INTERNED_GLOBAL_IDS[465]); #[doc = r##"This is the function [`::rust_primitives::hax::folds::fold_chunked_slice`]."##] pub const fold_chunked_slice: crate::ast::identifiers::global_id::GlobalId = crate::ast::identifiers::global_id::GlobalId(root::INTERNED_GLOBAL_IDS[447]); #[doc = r##"This is the function [`::rust_primitives::hax::folds::fold_chunked_slice_cf`]."##] pub const fold_chunked_slice_cf: crate::ast::identifiers::global_id::GlobalId = crate::ast::identifiers::global_id::GlobalId(root::INTERNED_GLOBAL_IDS[382]); #[doc = r##"This is the function [`::rust_primitives::hax::folds::fold_chunked_slice_return`]."##] pub const fold_chunked_slice_return: crate::ast::identifiers::global_id::GlobalId = crate::ast::identifiers::global_id::GlobalId(root::INTERNED_GLOBAL_IDS[479]); #[doc = r##"This is the function [`::rust_primitives::hax::folds::fold_enumerated_chunked_slice`]."##] pub const fold_enumerated_chunked_slice: crate::ast::identifiers::global_id::GlobalId = crate::ast::identifiers::global_id::GlobalId(root::INTERNED_GLOBAL_IDS[376]); #[doc = r##"This is the function [`::rust_primitives::hax::folds::fold_enumerated_chunked_slice_cf`]."##] pub const fold_enumerated_chunked_slice_cf: crate::ast::identifiers::global_id::GlobalId = crate::ast::identifiers::global_id::GlobalId(root::INTERNED_GLOBAL_IDS[494]); #[doc = r##"This is the function [`::rust_primitives::hax::folds::fold_enumerated_chunked_slice_return`]."##] pub const fold_enumerated_chunked_slice_return: crate::ast::identifiers::global_id::GlobalId = crate::ast::identifiers::global_id::GlobalId(root::INTERNED_GLOBAL_IDS[529]); #[doc = r##"This is the function [`::rust_primitives::hax::folds::fold_enumerated_slice`]."##] pub const fold_enumerated_slice: crate::ast::identifiers::global_id::GlobalId = crate::ast::identifiers::global_id::GlobalId(root::INTERNED_GLOBAL_IDS[413]); #[doc = r##"This is the function [`::rust_primitives::hax::folds::fold_enumerated_slice_cf`]."##] pub const fold_enumerated_slice_cf: crate::ast::identifiers::global_id::GlobalId = crate::ast::identifiers::global_id::GlobalId(root::INTERNED_GLOBAL_IDS[355]); #[doc = r##"This is the function [`::rust_primitives::hax::folds::fold_enumerated_slice_return`]."##] pub const fold_enumerated_slice_return: crate::ast::identifiers::global_id::GlobalId = crate::ast::identifiers::global_id::GlobalId(root::INTERNED_GLOBAL_IDS[511]); #[doc = r##"This is the function [`::rust_primitives::hax::folds::fold_range`]."##] pub const fold_range: crate::ast::identifiers::global_id::GlobalId = crate::ast::identifiers::global_id::GlobalId(root::INTERNED_GLOBAL_IDS[367]); #[doc = r##"This is the function [`::rust_primitives::hax::folds::fold_range_cf`]."##] pub const fold_range_cf: crate::ast::identifiers::global_id::GlobalId = crate::ast::identifiers::global_id::GlobalId(root::INTERNED_GLOBAL_IDS[439]); #[doc = r##"This is the function [`::rust_primitives::hax::folds::fold_range_return`]."##] pub const fold_range_return: crate::ast::identifiers::global_id::GlobalId = crate::ast::identifiers::global_id::GlobalId(root::INTERNED_GLOBAL_IDS[304]); #[doc = r##"This is the function [`::rust_primitives::hax::folds::fold_range_step_by`]."##] pub const fold_range_step_by: crate::ast::identifiers::global_id::GlobalId = crate::ast::identifiers::global_id::GlobalId(root::INTERNED_GLOBAL_IDS[290]); #[doc = r##"This is the function [`::rust_primitives::hax::folds::fold_range_step_by_cf`]."##] pub const fold_range_step_by_cf: crate::ast::identifiers::global_id::GlobalId = crate::ast::identifiers::global_id::GlobalId(root::INTERNED_GLOBAL_IDS[543]); #[doc = r##"This is the function [`::rust_primitives::hax::folds::fold_range_step_by_return`]."##] pub const fold_range_step_by_return: crate::ast::identifiers::global_id::GlobalId = crate::ast::identifiers::global_id::GlobalId(root::INTERNED_GLOBAL_IDS[508]); #[doc = r##"This is the function [`::rust_primitives::hax::folds::fold_return`]."##] pub const fold_return: crate::ast::identifiers::global_id::GlobalId = crate::ast::identifiers::global_id::GlobalId(root::INTERNED_GLOBAL_IDS[440]); } pub mod int { #![doc = r##"This is the module [`::rust_primitives::hax::int`]."##] use super::root; #[doc = r##"This is the function [`::rust_primitives::hax::int::add`]."##] pub const add: crate::ast::identifiers::global_id::GlobalId = crate::ast::identifiers::global_id::GlobalId(root::INTERNED_GLOBAL_IDS[482]); #[doc = r##"This is the function [`::rust_primitives::hax::int::div`]."##] pub const div: crate::ast::identifiers::global_id::GlobalId = crate::ast::identifiers::global_id::GlobalId(root::INTERNED_GLOBAL_IDS[381]); #[doc = r##"This is the function [`::rust_primitives::hax::int::eq`]."##] pub const eq: crate::ast::identifiers::global_id::GlobalId = crate::ast::identifiers::global_id::GlobalId(root::INTERNED_GLOBAL_IDS[539]); #[doc = r##"This is the function [`::rust_primitives::hax::int::from_machine`]."##] pub const from_machine: crate::ast::identifiers::global_id::GlobalId = crate::ast::identifiers::global_id::GlobalId(root::INTERNED_GLOBAL_IDS[497]); #[doc = r##"This is the function [`::rust_primitives::hax::int::ge`]."##] pub const ge: crate::ast::identifiers::global_id::GlobalId = crate::ast::identifiers::global_id::GlobalId(root::INTERNED_GLOBAL_IDS[412]); #[doc = r##"This is the function [`::rust_primitives::hax::int::gt`]."##] pub const gt: crate::ast::identifiers::global_id::GlobalId = crate::ast::identifiers::global_id::GlobalId(root::INTERNED_GLOBAL_IDS[466]); #[doc = r##"This is the function [`::rust_primitives::hax::int::into_machine`]."##] pub const into_machine: crate::ast::identifiers::global_id::GlobalId = crate::ast::identifiers::global_id::GlobalId(root::INTERNED_GLOBAL_IDS[451]); #[doc = r##"This is the function [`::rust_primitives::hax::int::le`]."##] pub const le: crate::ast::identifiers::global_id::GlobalId = crate::ast::identifiers::global_id::GlobalId(root::INTERNED_GLOBAL_IDS[363]); #[doc = r##"This is the function [`::rust_primitives::hax::int::lt`]."##] pub const lt: crate::ast::identifiers::global_id::GlobalId = crate::ast::identifiers::global_id::GlobalId(root::INTERNED_GLOBAL_IDS[513]); #[doc = r##"This is the function [`::rust_primitives::hax::int::mul`]."##] pub const mul: crate::ast::identifiers::global_id::GlobalId = crate::ast::identifiers::global_id::GlobalId(root::INTERNED_GLOBAL_IDS[373]); #[doc = r##"This is the function [`::rust_primitives::hax::int::ne`]."##] pub const ne: crate::ast::identifiers::global_id::GlobalId = crate::ast::identifiers::global_id::GlobalId(root::INTERNED_GLOBAL_IDS[424]); #[doc = r##"This is the function [`::rust_primitives::hax::int::neg`]."##] pub const neg: crate::ast::identifiers::global_id::GlobalId = crate::ast::identifiers::global_id::GlobalId(root::INTERNED_GLOBAL_IDS[490]); #[doc = r##"This is the function [`::rust_primitives::hax::int::rem`]."##] pub const rem: crate::ast::identifiers::global_id::GlobalId = crate::ast::identifiers::global_id::GlobalId(root::INTERNED_GLOBAL_IDS[483]); #[doc = r##"This is the function [`::rust_primitives::hax::int::sub`]."##] pub const sub: crate::ast::identifiers::global_id::GlobalId = crate::ast::identifiers::global_id::GlobalId(root::INTERNED_GLOBAL_IDS[346]); } pub mod machine_int { #![doc = r##"This is the module [`::rust_primitives::hax::machine_int`]."##] use super::root; #[doc = r##"This is the function [`::rust_primitives::hax::machine_int::add`]."##] pub const add: crate::ast::identifiers::global_id::GlobalId = crate::ast::identifiers::global_id::GlobalId(root::INTERNED_GLOBAL_IDS[463]); #[doc = r##"This is the function [`::rust_primitives::hax::machine_int::add_with_overflow`]."##] pub const add_with_overflow: crate::ast::identifiers::global_id::GlobalId = crate::ast::identifiers::global_id::GlobalId(root::INTERNED_GLOBAL_IDS[614]); #[doc = r##"This is the function [`::rust_primitives::hax::machine_int::bitand`]."##] pub const bitand: crate::ast::identifiers::global_id::GlobalId = crate::ast::identifiers::global_id::GlobalId(root::INTERNED_GLOBAL_IDS[524]); #[doc = r##"This is the function [`::rust_primitives::hax::machine_int::bitor`]."##] pub const bitor: crate::ast::identifiers::global_id::GlobalId = crate::ast::identifiers::global_id::GlobalId(root::INTERNED_GLOBAL_IDS[575]); #[doc = r##"This is the function [`::rust_primitives::hax::machine_int::bitxor`]."##] pub const bitxor: crate::ast::identifiers::global_id::GlobalId = crate::ast::identifiers::global_id::GlobalId(root::INTERNED_GLOBAL_IDS[360]); #[doc = r##"This is the function [`::rust_primitives::hax::machine_int::cmp`]."##] pub const cmp: crate::ast::identifiers::global_id::GlobalId = crate::ast::identifiers::global_id::GlobalId(root::INTERNED_GLOBAL_IDS[619]); #[doc = r##"This is the function [`::rust_primitives::hax::machine_int::div`]."##] pub const div: crate::ast::identifiers::global_id::GlobalId = crate::ast::identifiers::global_id::GlobalId(root::INTERNED_GLOBAL_IDS[399]); #[doc = r##"This is the function [`::rust_primitives::hax::machine_int::eq`]."##] pub const eq: crate::ast::identifiers::global_id::GlobalId = crate::ast::identifiers::global_id::GlobalId(root::INTERNED_GLOBAL_IDS[383]); #[doc = r##"This is the function [`::rust_primitives::hax::machine_int::ge`]."##] pub const ge: crate::ast::identifiers::global_id::GlobalId = crate::ast::identifiers::global_id::GlobalId(root::INTERNED_GLOBAL_IDS[303]); #[doc = r##"This is the function [`::rust_primitives::hax::machine_int::gt`]."##] pub const gt: crate::ast::identifiers::global_id::GlobalId = crate::ast::identifiers::global_id::GlobalId(root::INTERNED_GLOBAL_IDS[504]); #[doc = r##"This is the function [`::rust_primitives::hax::machine_int::le`]."##] pub const le: crate::ast::identifiers::global_id::GlobalId = crate::ast::identifiers::global_id::GlobalId(root::INTERNED_GLOBAL_IDS[448]); #[doc = r##"This is the function [`::rust_primitives::hax::machine_int::lt`]."##] pub const lt: crate::ast::identifiers::global_id::GlobalId = crate::ast::identifiers::global_id::GlobalId(root::INTERNED_GLOBAL_IDS[499]); #[doc = r##"This is the function [`::rust_primitives::hax::machine_int::mul`]."##] pub const mul: crate::ast::identifiers::global_id::GlobalId = crate::ast::identifiers::global_id::GlobalId(root::INTERNED_GLOBAL_IDS[582]); #[doc = r##"This is the function [`::rust_primitives::hax::machine_int::mul_with_overflow`]."##] pub const mul_with_overflow: crate::ast::identifiers::global_id::GlobalId = crate::ast::identifiers::global_id::GlobalId(root::INTERNED_GLOBAL_IDS[618]); #[doc = r##"This is the function [`::rust_primitives::hax::machine_int::ne`]."##] pub const ne: crate::ast::identifiers::global_id::GlobalId = crate::ast::identifiers::global_id::GlobalId(root::INTERNED_GLOBAL_IDS[473]); #[doc = r##"This is the function [`::rust_primitives::hax::machine_int::not`]."##] pub const not: crate::ast::identifiers::global_id::GlobalId = crate::ast::identifiers::global_id::GlobalId(root::INTERNED_GLOBAL_IDS[327]); #[doc = r##"This is the function [`::rust_primitives::hax::machine_int::rem`]."##] pub const rem: crate::ast::identifiers::global_id::GlobalId = crate::ast::identifiers::global_id::GlobalId(root::INTERNED_GLOBAL_IDS[488]); #[doc = r##"This is the function [`::rust_primitives::hax::machine_int::shl`]."##] pub const shl: crate::ast::identifiers::global_id::GlobalId = crate::ast::identifiers::global_id::GlobalId(root::INTERNED_GLOBAL_IDS[502]); #[doc = r##"This is the function [`::rust_primitives::hax::machine_int::shr`]."##] pub const shr: crate::ast::identifiers::global_id::GlobalId = crate::ast::identifiers::global_id::GlobalId(root::INTERNED_GLOBAL_IDS[484]); #[doc = r##"This is the function [`::rust_primitives::hax::machine_int::sub`]."##] pub const sub: crate::ast::identifiers::global_id::GlobalId = crate::ast::identifiers::global_id::GlobalId(root::INTERNED_GLOBAL_IDS[537]); #[doc = r##"This is the function [`::rust_primitives::hax::machine_int::sub_with_overflow`]."##] pub const sub_with_overflow: crate::ast::identifiers::global_id::GlobalId = crate::ast::identifiers::global_id::GlobalId(root::INTERNED_GLOBAL_IDS[617]); } pub mod monomorphized_update_at { #![doc = r##"This is the module [`::rust_primitives::hax::monomorphized_update_at`]."##] use super::root; #[doc = r##"This is the function [`::rust_primitives::hax::monomorphized_update_at::update_at_range`]."##] pub const update_at_range: crate::ast::identifiers::global_id::GlobalId = crate::ast::identifiers::global_id::GlobalId(root::INTERNED_GLOBAL_IDS[348]); #[doc = r##"This is the function [`::rust_primitives::hax::monomorphized_update_at::update_at_range_from`]."##] pub const update_at_range_from: crate::ast::identifiers::global_id::GlobalId = crate::ast::identifiers::global_id::GlobalId(root::INTERNED_GLOBAL_IDS[318]); #[doc = r##"This is the function [`::rust_primitives::hax::monomorphized_update_at::update_at_range_full`]."##] pub const update_at_range_full: crate::ast::identifiers::global_id::GlobalId = crate::ast::identifiers::global_id::GlobalId(root::INTERNED_GLOBAL_IDS[375]); #[doc = r##"This is the function [`::rust_primitives::hax::monomorphized_update_at::update_at_range_to`]."##] pub const update_at_range_to: crate::ast::identifiers::global_id::GlobalId = crate::ast::identifiers::global_id::GlobalId(root::INTERNED_GLOBAL_IDS[581]); #[doc = r##"This is the function [`::rust_primitives::hax::monomorphized_update_at::update_at_usize`]."##] pub const update_at_usize: crate::ast::identifiers::global_id::GlobalId = crate::ast::identifiers::global_id::GlobalId(root::INTERNED_GLOBAL_IDS[311]); } #[doc = r##"This is the struct [`::rust_primitives::hax::Failure`]."##] pub const Failure: crate::ast::identifiers::global_id::GlobalId = crate::ast::identifiers::global_id::GlobalId(root::INTERNED_GLOBAL_IDS[366]); #[doc = r##"This is the enum [`::rust_primitives::hax::MutRef`]."##] pub const MutRef: crate::ast::identifiers::global_id::GlobalId = crate::ast::identifiers::global_id::GlobalId(root::INTERNED_GLOBAL_IDS[334]); #[doc = r##"This is the enum [`::rust_primitives::hax::Never`]."##] pub const Never: crate::ast::identifiers::global_id::GlobalId = crate::ast::identifiers::global_id::GlobalId(root::INTERNED_GLOBAL_IDS[170]); #[doc = r##"This is the struct [`::rust_primitives::hax::Tuple2`]."##] pub(in crate::ast::identifiers::global_id) const Tuple2: crate::ast::identifiers::global_id::GlobalId = crate::ast::identifiers::global_id::GlobalId(root::INTERNED_GLOBAL_IDS[89]); #[doc = r##"This is the function [`::rust_primitives::hax::array_of_list`]."##] pub const array_of_list: crate::ast::identifiers::global_id::GlobalId = crate::ast::identifiers::global_id::GlobalId(root::INTERNED_GLOBAL_IDS[371]); #[doc = r##"This is the function [`::rust_primitives::hax::box_new`]."##] pub const box_new: crate::ast::identifiers::global_id::GlobalId = crate::ast::identifiers::global_id::GlobalId(root::INTERNED_GLOBAL_IDS[32]); #[doc = r##"This is the function [`::rust_primitives::hax::cast_op`]."##] pub const cast_op: crate::ast::identifiers::global_id::GlobalId = crate::ast::identifiers::global_id::GlobalId(root::INTERNED_GLOBAL_IDS[96]); #[doc = r##"This is the module [`::rust_primitives::hax::control_flow_monad`]."##] pub const control_flow_monad: crate::ast::identifiers::global_id::GlobalId = crate::ast::identifiers::global_id::GlobalId(root::INTERNED_GLOBAL_IDS[320]); #[doc = r##"This is the function [`::rust_primitives::hax::deref_op`]."##] pub const deref_op: crate::ast::identifiers::global_id::GlobalId = crate::ast::identifiers::global_id::GlobalId(root::INTERNED_GLOBAL_IDS[21]); #[doc = r##"This is the function [`::rust_primitives::hax::dropped_body`]."##] pub const dropped_body: crate::ast::identifiers::global_id::GlobalId = crate::ast::identifiers::global_id::GlobalId(root::INTERNED_GLOBAL_IDS[544]); #[doc = r##"This is the module [`::rust_primitives::hax::explicit_monadic`]."##] pub const explicit_monadic: crate::ast::identifiers::global_id::GlobalId = crate::ast::identifiers::global_id::GlobalId(root::INTERNED_GLOBAL_IDS[603]); #[doc = r##"This is the function [`::rust_primitives::hax::failure`]."##] pub const failure: crate::ast::identifiers::global_id::GlobalId = crate::ast::identifiers::global_id::GlobalId(root::INTERNED_GLOBAL_IDS[550]); #[doc = r##"This is the module [`::rust_primitives::hax::folds`]."##] pub const folds: crate::ast::identifiers::global_id::GlobalId = crate::ast::identifiers::global_id::GlobalId(root::INTERNED_GLOBAL_IDS[289]); #[doc = r##"This is the module [`::rust_primitives::hax::int`]."##] pub const int: crate::ast::identifiers::global_id::GlobalId = crate::ast::identifiers::global_id::GlobalId(root::INTERNED_GLOBAL_IDS[345]); #[doc = r##"This is the function [`::rust_primitives::hax::logical_op_and`]."##] pub const logical_op_and: crate::ast::identifiers::global_id::GlobalId = crate::ast::identifiers::global_id::GlobalId(root::INTERNED_GLOBAL_IDS[270]); #[doc = r##"This is the function [`::rust_primitives::hax::logical_op_or`]."##] pub const logical_op_or: crate::ast::identifiers::global_id::GlobalId = crate::ast::identifiers::global_id::GlobalId(root::INTERNED_GLOBAL_IDS[542]); #[doc = r##"This is the module [`::rust_primitives::hax::machine_int`]."##] pub const machine_int: crate::ast::identifiers::global_id::GlobalId = crate::ast::identifiers::global_id::GlobalId(root::INTERNED_GLOBAL_IDS[302]); #[doc = r##"This is the module [`::rust_primitives::hax::monomorphized_update_at`]."##] pub const monomorphized_update_at: crate::ast::identifiers::global_id::GlobalId = crate::ast::identifiers::global_id::GlobalId(root::INTERNED_GLOBAL_IDS[310]); #[doc = r##"This is the function [`::rust_primitives::hax::never_to_any`]."##] pub const never_to_any: crate::ast::identifiers::global_id::GlobalId = crate::ast::identifiers::global_id::GlobalId(root::INTERNED_GLOBAL_IDS[171]); #[doc = r##"This is the function [`::rust_primitives::hax::repeat`]."##] pub const repeat: crate::ast::identifiers::global_id::GlobalId = crate::ast::identifiers::global_id::GlobalId(root::INTERNED_GLOBAL_IDS[420]); #[doc = r##"This is the function [`::rust_primitives::hax::update_at`]."##] pub const update_at: crate::ast::identifiers::global_id::GlobalId = crate::ast::identifiers::global_id::GlobalId(root::INTERNED_GLOBAL_IDS[370]); #[doc = r##"This is the function [`::rust_primitives::hax::while_loop`]."##] pub const while_loop: crate::ast::identifiers::global_id::GlobalId = crate::ast::identifiers::global_id::GlobalId(root::INTERNED_GLOBAL_IDS[432]); #[doc = r##"This is the function [`::rust_primitives::hax::while_loop_cf`]."##] pub const while_loop_cf: crate::ast::identifiers::global_id::GlobalId = crate::ast::identifiers::global_id::GlobalId(root::INTERNED_GLOBAL_IDS[555]); #[doc = r##"This is the function [`::rust_primitives::hax::while_loop_return`]."##] pub const while_loop_return: crate::ast::identifiers::global_id::GlobalId = crate::ast::identifiers::global_id::GlobalId(root::INTERNED_GLOBAL_IDS[586]); } pub mod i128 { #![doc = r##"This is the module [`::rust_primitives::i128`]."##] use super::root; #[doc = r##"This is the function [`::rust_primitives::i128::add`]."##] pub const add: crate::ast::identifiers::global_id::GlobalId = crate::ast::identifiers::global_id::GlobalId(root::INTERNED_GLOBAL_IDS[386]); #[doc = r##"This is the function [`::rust_primitives::i128::bit_and`]."##] pub const bit_and: crate::ast::identifiers::global_id::GlobalId = crate::ast::identifiers::global_id::GlobalId(root::INTERNED_GLOBAL_IDS[531]); #[doc = r##"This is the function [`::rust_primitives::i128::bit_or`]."##] pub const bit_or: crate::ast::identifiers::global_id::GlobalId = crate::ast::identifiers::global_id::GlobalId(root::INTERNED_GLOBAL_IDS[583]); #[doc = r##"This is the function [`::rust_primitives::i128::bit_xor`]."##] pub const bit_xor: crate::ast::identifiers::global_id::GlobalId = crate::ast::identifiers::global_id::GlobalId(root::INTERNED_GLOBAL_IDS[557]); #[doc = r##"This is the function [`::rust_primitives::i128::div`]."##] pub const div: crate::ast::identifiers::global_id::GlobalId = crate::ast::identifiers::global_id::GlobalId(root::INTERNED_GLOBAL_IDS[462]); #[doc = r##"This is the function [`::rust_primitives::i128::eq`]."##] pub const eq: crate::ast::identifiers::global_id::GlobalId = crate::ast::identifiers::global_id::GlobalId(root::INTERNED_GLOBAL_IDS[580]); #[doc = r##"This is the function [`::rust_primitives::i128::ge`]."##] pub const ge: crate::ast::identifiers::global_id::GlobalId = crate::ast::identifiers::global_id::GlobalId(root::INTERNED_GLOBAL_IDS[569]); #[doc = r##"This is the function [`::rust_primitives::i128::gt`]."##] pub const gt: crate::ast::identifiers::global_id::GlobalId = crate::ast::identifiers::global_id::GlobalId(root::INTERNED_GLOBAL_IDS[409]); #[doc = r##"This is the function [`::rust_primitives::i128::le`]."##] pub const le: crate::ast::identifiers::global_id::GlobalId = crate::ast::identifiers::global_id::GlobalId(root::INTERNED_GLOBAL_IDS[533]); #[doc = r##"This is the function [`::rust_primitives::i128::lt`]."##] pub const lt: crate::ast::identifiers::global_id::GlobalId = crate::ast::identifiers::global_id::GlobalId(root::INTERNED_GLOBAL_IDS[515]); #[doc = r##"This is the function [`::rust_primitives::i128::mul`]."##] pub const mul: crate::ast::identifiers::global_id::GlobalId = crate::ast::identifiers::global_id::GlobalId(root::INTERNED_GLOBAL_IDS[584]); #[doc = r##"This is the function [`::rust_primitives::i128::ne`]."##] pub const ne: crate::ast::identifiers::global_id::GlobalId = crate::ast::identifiers::global_id::GlobalId(root::INTERNED_GLOBAL_IDS[578]); #[doc = r##"This is the function [`::rust_primitives::i128::neg`]."##] pub const neg: crate::ast::identifiers::global_id::GlobalId = crate::ast::identifiers::global_id::GlobalId(root::INTERNED_GLOBAL_IDS[568]); #[doc = r##"This is the function [`::rust_primitives::i128::rem`]."##] pub const rem: crate::ast::identifiers::global_id::GlobalId = crate::ast::identifiers::global_id::GlobalId(root::INTERNED_GLOBAL_IDS[354]); #[doc = r##"This is the function [`::rust_primitives::i128::shl`]."##] pub const shl: crate::ast::identifiers::global_id::GlobalId = crate::ast::identifiers::global_id::GlobalId(root::INTERNED_GLOBAL_IDS[353]); #[doc = r##"This is the function [`::rust_primitives::i128::shr`]."##] pub const shr: crate::ast::identifiers::global_id::GlobalId = crate::ast::identifiers::global_id::GlobalId(root::INTERNED_GLOBAL_IDS[364]); #[doc = r##"This is the function [`::rust_primitives::i128::sub`]."##] pub const sub: crate::ast::identifiers::global_id::GlobalId = crate::ast::identifiers::global_id::GlobalId(root::INTERNED_GLOBAL_IDS[393]); } pub mod i16 { #![doc = r##"This is the module [`::rust_primitives::i16`]."##] use super::root; #[doc = r##"This is the function [`::rust_primitives::i16::add`]."##] pub const add: crate::ast::identifiers::global_id::GlobalId = crate::ast::identifiers::global_id::GlobalId(root::INTERNED_GLOBAL_IDS[336]); #[doc = r##"This is the function [`::rust_primitives::i16::bit_and`]."##] pub const bit_and: crate::ast::identifiers::global_id::GlobalId = crate::ast::identifiers::global_id::GlobalId(root::INTERNED_GLOBAL_IDS[523]); #[doc = r##"This is the function [`::rust_primitives::i16::bit_or`]."##] pub const bit_or: crate::ast::identifiers::global_id::GlobalId = crate::ast::identifiers::global_id::GlobalId(root::INTERNED_GLOBAL_IDS[331]); #[doc = r##"This is the function [`::rust_primitives::i16::bit_xor`]."##] pub const bit_xor: crate::ast::identifiers::global_id::GlobalId = crate::ast::identifiers::global_id::GlobalId(root::INTERNED_GLOBAL_IDS[574]); #[doc = r##"This is the function [`::rust_primitives::i16::div`]."##] pub const div: crate::ast::identifiers::global_id::GlobalId = crate::ast::identifiers::global_id::GlobalId(root::INTERNED_GLOBAL_IDS[553]); #[doc = r##"This is the function [`::rust_primitives::i16::eq`]."##] pub const eq: crate::ast::identifiers::global_id::GlobalId = crate::ast::identifiers::global_id::GlobalId(root::INTERNED_GLOBAL_IDS[565]); #[doc = r##"This is the function [`::rust_primitives::i16::ge`]."##] pub const ge: crate::ast::identifiers::global_id::GlobalId = crate::ast::identifiers::global_id::GlobalId(root::INTERNED_GLOBAL_IDS[332]); #[doc = r##"This is the function [`::rust_primitives::i16::gt`]."##] pub const gt: crate::ast::identifiers::global_id::GlobalId = crate::ast::identifiers::global_id::GlobalId(root::INTERNED_GLOBAL_IDS[563]); #[doc = r##"This is the function [`::rust_primitives::i16::le`]."##] pub const le: crate::ast::identifiers::global_id::GlobalId = crate::ast::identifiers::global_id::GlobalId(root::INTERNED_GLOBAL_IDS[534]); #[doc = r##"This is the function [`::rust_primitives::i16::lt`]."##] pub const lt: crate::ast::identifiers::global_id::GlobalId = crate::ast::identifiers::global_id::GlobalId(root::INTERNED_GLOBAL_IDS[532]); #[doc = r##"This is the function [`::rust_primitives::i16::mul`]."##] pub const mul: crate::ast::identifiers::global_id::GlobalId = crate::ast::identifiers::global_id::GlobalId(root::INTERNED_GLOBAL_IDS[408]); #[doc = r##"This is the function [`::rust_primitives::i16::ne`]."##] pub const ne: crate::ast::identifiers::global_id::GlobalId = crate::ast::identifiers::global_id::GlobalId(root::INTERNED_GLOBAL_IDS[388]); #[doc = r##"This is the function [`::rust_primitives::i16::neg`]."##] pub const neg: crate::ast::identifiers::global_id::GlobalId = crate::ast::identifiers::global_id::GlobalId(root::INTERNED_GLOBAL_IDS[356]); #[doc = r##"This is the function [`::rust_primitives::i16::rem`]."##] pub const rem: crate::ast::identifiers::global_id::GlobalId = crate::ast::identifiers::global_id::GlobalId(root::INTERNED_GLOBAL_IDS[518]); #[doc = r##"This is the function [`::rust_primitives::i16::shl`]."##] pub const shl: crate::ast::identifiers::global_id::GlobalId = crate::ast::identifiers::global_id::GlobalId(root::INTERNED_GLOBAL_IDS[316]); #[doc = r##"This is the function [`::rust_primitives::i16::shr`]."##] pub const shr: crate::ast::identifiers::global_id::GlobalId = crate::ast::identifiers::global_id::GlobalId(root::INTERNED_GLOBAL_IDS[358]); #[doc = r##"This is the function [`::rust_primitives::i16::sub`]."##] pub const sub: crate::ast::identifiers::global_id::GlobalId = crate::ast::identifiers::global_id::GlobalId(root::INTERNED_GLOBAL_IDS[478]); } pub mod i32 { #![doc = r##"This is the module [`::rust_primitives::i32`]."##] use super::root; #[doc = r##"This is the function [`::rust_primitives::i32::add`]."##] pub const add: crate::ast::identifiers::global_id::GlobalId = crate::ast::identifiers::global_id::GlobalId(root::INTERNED_GLOBAL_IDS[361]); #[doc = r##"This is the function [`::rust_primitives::i32::bit_and`]."##] pub const bit_and: crate::ast::identifiers::global_id::GlobalId = crate::ast::identifiers::global_id::GlobalId(root::INTERNED_GLOBAL_IDS[464]); #[doc = r##"This is the function [`::rust_primitives::i32::bit_or`]."##] pub const bit_or: crate::ast::identifiers::global_id::GlobalId = crate::ast::identifiers::global_id::GlobalId(root::INTERNED_GLOBAL_IDS[489]); #[doc = r##"This is the function [`::rust_primitives::i32::bit_xor`]."##] pub const bit_xor: crate::ast::identifiers::global_id::GlobalId = crate::ast::identifiers::global_id::GlobalId(root::INTERNED_GLOBAL_IDS[501]); #[doc = r##"This is the function [`::rust_primitives::i32::div`]."##] pub const div: crate::ast::identifiers::global_id::GlobalId = crate::ast::identifiers::global_id::GlobalId(root::INTERNED_GLOBAL_IDS[423]); #[doc = r##"This is the function [`::rust_primitives::i32::eq`]."##] pub const eq: crate::ast::identifiers::global_id::GlobalId = crate::ast::identifiers::global_id::GlobalId(root::INTERNED_GLOBAL_IDS[510]); #[doc = r##"This is the function [`::rust_primitives::i32::ge`]."##] pub const ge: crate::ast::identifiers::global_id::GlobalId = crate::ast::identifiers::global_id::GlobalId(root::INTERNED_GLOBAL_IDS[587]); #[doc = r##"This is the function [`::rust_primitives::i32::gt`]."##] pub const gt: crate::ast::identifiers::global_id::GlobalId = crate::ast::identifiers::global_id::GlobalId(root::INTERNED_GLOBAL_IDS[514]); #[doc = r##"This is the function [`::rust_primitives::i32::le`]."##] pub const le: crate::ast::identifiers::global_id::GlobalId = crate::ast::identifiers::global_id::GlobalId(root::INTERNED_GLOBAL_IDS[333]); #[doc = r##"This is the function [`::rust_primitives::i32::lt`]."##] pub const lt: crate::ast::identifiers::global_id::GlobalId = crate::ast::identifiers::global_id::GlobalId(root::INTERNED_GLOBAL_IDS[323]); #[doc = r##"This is the function [`::rust_primitives::i32::mul`]."##] pub const mul: crate::ast::identifiers::global_id::GlobalId = crate::ast::identifiers::global_id::GlobalId(root::INTERNED_GLOBAL_IDS[457]); #[doc = r##"This is the function [`::rust_primitives::i32::ne`]."##] pub const ne: crate::ast::identifiers::global_id::GlobalId = crate::ast::identifiers::global_id::GlobalId(root::INTERNED_GLOBAL_IDS[476]); #[doc = r##"This is the function [`::rust_primitives::i32::neg`]."##] pub const neg: crate::ast::identifiers::global_id::GlobalId = crate::ast::identifiers::global_id::GlobalId(root::INTERNED_GLOBAL_IDS[309]); #[doc = r##"This is the function [`::rust_primitives::i32::rem`]."##] pub const rem: crate::ast::identifiers::global_id::GlobalId = crate::ast::identifiers::global_id::GlobalId(root::INTERNED_GLOBAL_IDS[342]); #[doc = r##"This is the function [`::rust_primitives::i32::shl`]."##] pub const shl: crate::ast::identifiers::global_id::GlobalId = crate::ast::identifiers::global_id::GlobalId(root::INTERNED_GLOBAL_IDS[562]); #[doc = r##"This is the function [`::rust_primitives::i32::shr`]."##] pub const shr: crate::ast::identifiers::global_id::GlobalId = crate::ast::identifiers::global_id::GlobalId(root::INTERNED_GLOBAL_IDS[368]); #[doc = r##"This is the function [`::rust_primitives::i32::sub`]."##] pub const sub: crate::ast::identifiers::global_id::GlobalId = crate::ast::identifiers::global_id::GlobalId(root::INTERNED_GLOBAL_IDS[350]); } pub mod i64 { #![doc = r##"This is the module [`::rust_primitives::i64`]."##] use super::root; #[doc = r##"This is the function [`::rust_primitives::i64::add`]."##] pub const add: crate::ast::identifiers::global_id::GlobalId = crate::ast::identifiers::global_id::GlobalId(root::INTERNED_GLOBAL_IDS[498]); #[doc = r##"This is the function [`::rust_primitives::i64::bit_and`]."##] pub const bit_and: crate::ast::identifiers::global_id::GlobalId = crate::ast::identifiers::global_id::GlobalId(root::INTERNED_GLOBAL_IDS[588]); #[doc = r##"This is the function [`::rust_primitives::i64::bit_or`]."##] pub const bit_or: crate::ast::identifiers::global_id::GlobalId = crate::ast::identifiers::global_id::GlobalId(root::INTERNED_GLOBAL_IDS[485]); #[doc = r##"This is the function [`::rust_primitives::i64::bit_xor`]."##] pub const bit_xor: crate::ast::identifiers::global_id::GlobalId = crate::ast::identifiers::global_id::GlobalId(root::INTERNED_GLOBAL_IDS[329]); #[doc = r##"This is the function [`::rust_primitives::i64::div`]."##] pub const div: crate::ast::identifiers::global_id::GlobalId = crate::ast::identifiers::global_id::GlobalId(root::INTERNED_GLOBAL_IDS[436]); #[doc = r##"This is the function [`::rust_primitives::i64::eq`]."##] pub const eq: crate::ast::identifiers::global_id::GlobalId = crate::ast::identifiers::global_id::GlobalId(root::INTERNED_GLOBAL_IDS[571]); #[doc = r##"This is the function [`::rust_primitives::i64::ge`]."##] pub const ge: crate::ast::identifiers::global_id::GlobalId = crate::ast::identifiers::global_id::GlobalId(root::INTERNED_GLOBAL_IDS[554]); #[doc = r##"This is the function [`::rust_primitives::i64::gt`]."##] pub const gt: crate::ast::identifiers::global_id::GlobalId = crate::ast::identifiers::global_id::GlobalId(root::INTERNED_GLOBAL_IDS[437]); #[doc = r##"This is the function [`::rust_primitives::i64::le`]."##] pub const le: crate::ast::identifiers::global_id::GlobalId = crate::ast::identifiers::global_id::GlobalId(root::INTERNED_GLOBAL_IDS[567]); #[doc = r##"This is the function [`::rust_primitives::i64::lt`]."##] pub const lt: crate::ast::identifiers::global_id::GlobalId = crate::ast::identifiers::global_id::GlobalId(root::INTERNED_GLOBAL_IDS[446]); #[doc = r##"This is the function [`::rust_primitives::i64::mul`]."##] pub const mul: crate::ast::identifiers::global_id::GlobalId = crate::ast::identifiers::global_id::GlobalId(root::INTERNED_GLOBAL_IDS[491]); #[doc = r##"This is the function [`::rust_primitives::i64::ne`]."##] pub const ne: crate::ast::identifiers::global_id::GlobalId = crate::ast::identifiers::global_id::GlobalId(root::INTERNED_GLOBAL_IDS[414]); #[doc = r##"This is the function [`::rust_primitives::i64::neg`]."##] pub const neg: crate::ast::identifiers::global_id::GlobalId = crate::ast::identifiers::global_id::GlobalId(root::INTERNED_GLOBAL_IDS[397]); #[doc = r##"This is the function [`::rust_primitives::i64::rem`]."##] pub const rem: crate::ast::identifiers::global_id::GlobalId = crate::ast::identifiers::global_id::GlobalId(root::INTERNED_GLOBAL_IDS[433]); #[doc = r##"This is the function [`::rust_primitives::i64::shl`]."##] pub const shl: crate::ast::identifiers::global_id::GlobalId = crate::ast::identifiers::global_id::GlobalId(root::INTERNED_GLOBAL_IDS[459]); #[doc = r##"This is the function [`::rust_primitives::i64::shr`]."##] pub const shr: crate::ast::identifiers::global_id::GlobalId = crate::ast::identifiers::global_id::GlobalId(root::INTERNED_GLOBAL_IDS[480]); #[doc = r##"This is the function [`::rust_primitives::i64::sub`]."##] pub const sub: crate::ast::identifiers::global_id::GlobalId = crate::ast::identifiers::global_id::GlobalId(root::INTERNED_GLOBAL_IDS[527]); } pub mod i8 { #![doc = r##"This is the module [`::rust_primitives::i8`]."##] use super::root; #[doc = r##"This is the function [`::rust_primitives::i8::add`]."##] pub const add: crate::ast::identifiers::global_id::GlobalId = crate::ast::identifiers::global_id::GlobalId(root::INTERNED_GLOBAL_IDS[577]); #[doc = r##"This is the function [`::rust_primitives::i8::bit_and`]."##] pub const bit_and: crate::ast::identifiers::global_id::GlobalId = crate::ast::identifiers::global_id::GlobalId(root::INTERNED_GLOBAL_IDS[528]); #[doc = r##"This is the function [`::rust_primitives::i8::bit_or`]."##] pub const bit_or: crate::ast::identifiers::global_id::GlobalId = crate::ast::identifiers::global_id::GlobalId(root::INTERNED_GLOBAL_IDS[340]); #[doc = r##"This is the function [`::rust_primitives::i8::bit_xor`]."##] pub const bit_xor: crate::ast::identifiers::global_id::GlobalId = crate::ast::identifiers::global_id::GlobalId(root::INTERNED_GLOBAL_IDS[552]); #[doc = r##"This is the function [`::rust_primitives::i8::div`]."##] pub const div: crate::ast::identifiers::global_id::GlobalId = crate::ast::identifiers::global_id::GlobalId(root::INTERNED_GLOBAL_IDS[325]); #[doc = r##"This is the function [`::rust_primitives::i8::eq`]."##] pub const eq: crate::ast::identifiers::global_id::GlobalId = crate::ast::identifiers::global_id::GlobalId(root::INTERNED_GLOBAL_IDS[411]); #[doc = r##"This is the function [`::rust_primitives::i8::ge`]."##] pub const ge: crate::ast::identifiers::global_id::GlobalId = crate::ast::identifiers::global_id::GlobalId(root::INTERNED_GLOBAL_IDS[405]); #[doc = r##"This is the function [`::rust_primitives::i8::gt`]."##] pub const gt: crate::ast::identifiers::global_id::GlobalId = crate::ast::identifiers::global_id::GlobalId(root::INTERNED_GLOBAL_IDS[347]); #[doc = r##"This is the function [`::rust_primitives::i8::le`]."##] pub const le: crate::ast::identifiers::global_id::GlobalId = crate::ast::identifiers::global_id::GlobalId(root::INTERNED_GLOBAL_IDS[576]); #[doc = r##"This is the function [`::rust_primitives::i8::lt`]."##] pub const lt: crate::ast::identifiers::global_id::GlobalId = crate::ast::identifiers::global_id::GlobalId(root::INTERNED_GLOBAL_IDS[417]); #[doc = r##"This is the function [`::rust_primitives::i8::mul`]."##] pub const mul: crate::ast::identifiers::global_id::GlobalId = crate::ast::identifiers::global_id::GlobalId(root::INTERNED_GLOBAL_IDS[435]); #[doc = r##"This is the function [`::rust_primitives::i8::ne`]."##] pub const ne: crate::ast::identifiers::global_id::GlobalId = crate::ast::identifiers::global_id::GlobalId(root::INTERNED_GLOBAL_IDS[351]); #[doc = r##"This is the function [`::rust_primitives::i8::neg`]."##] pub const neg: crate::ast::identifiers::global_id::GlobalId = crate::ast::identifiers::global_id::GlobalId(root::INTERNED_GLOBAL_IDS[385]); #[doc = r##"This is the function [`::rust_primitives::i8::rem`]."##] pub const rem: crate::ast::identifiers::global_id::GlobalId = crate::ast::identifiers::global_id::GlobalId(root::INTERNED_GLOBAL_IDS[551]); #[doc = r##"This is the function [`::rust_primitives::i8::shl`]."##] pub const shl: crate::ast::identifiers::global_id::GlobalId = crate::ast::identifiers::global_id::GlobalId(root::INTERNED_GLOBAL_IDS[357]); #[doc = r##"This is the function [`::rust_primitives::i8::shr`]."##] pub const shr: crate::ast::identifiers::global_id::GlobalId = crate::ast::identifiers::global_id::GlobalId(root::INTERNED_GLOBAL_IDS[429]); #[doc = r##"This is the function [`::rust_primitives::i8::sub`]."##] pub const sub: crate::ast::identifiers::global_id::GlobalId = crate::ast::identifiers::global_id::GlobalId(root::INTERNED_GLOBAL_IDS[401]); } pub mod isize { #![doc = r##"This is the module [`::rust_primitives::isize`]."##] use super::root; #[doc = r##"This is the function [`::rust_primitives::isize::add`]."##] pub const add: crate::ast::identifiers::global_id::GlobalId = crate::ast::identifiers::global_id::GlobalId(root::INTERNED_GLOBAL_IDS[507]); #[doc = r##"This is the function [`::rust_primitives::isize::bit_and`]."##] pub const bit_and: crate::ast::identifiers::global_id::GlobalId = crate::ast::identifiers::global_id::GlobalId(root::INTERNED_GLOBAL_IDS[410]); #[doc = r##"This is the function [`::rust_primitives::isize::bit_or`]."##] pub const bit_or: crate::ast::identifiers::global_id::GlobalId = crate::ast::identifiers::global_id::GlobalId(root::INTERNED_GLOBAL_IDS[365]); #[doc = r##"This is the function [`::rust_primitives::isize::bit_xor`]."##] pub const bit_xor: crate::ast::identifiers::global_id::GlobalId = crate::ast::identifiers::global_id::GlobalId(root::INTERNED_GLOBAL_IDS[493]); #[doc = r##"This is the function [`::rust_primitives::isize::div`]."##] pub const div: crate::ast::identifiers::global_id::GlobalId = crate::ast::identifiers::global_id::GlobalId(root::INTERNED_GLOBAL_IDS[416]); #[doc = r##"This is the function [`::rust_primitives::isize::eq`]."##] pub const eq: crate::ast::identifiers::global_id::GlobalId = crate::ast::identifiers::global_id::GlobalId(root::INTERNED_GLOBAL_IDS[516]); #[doc = r##"This is the function [`::rust_primitives::isize::ge`]."##] pub const ge: crate::ast::identifiers::global_id::GlobalId = crate::ast::identifiers::global_id::GlobalId(root::INTERNED_GLOBAL_IDS[438]); #[doc = r##"This is the function [`::rust_primitives::isize::gt`]."##] pub const gt: crate::ast::identifiers::global_id::GlobalId = crate::ast::identifiers::global_id::GlobalId(root::INTERNED_GLOBAL_IDS[530]); #[doc = r##"This is the function [`::rust_primitives::isize::le`]."##] pub const le: crate::ast::identifiers::global_id::GlobalId = crate::ast::identifiers::global_id::GlobalId(root::INTERNED_GLOBAL_IDS[573]); #[doc = r##"This is the function [`::rust_primitives::isize::lt`]."##] pub const lt: crate::ast::identifiers::global_id::GlobalId = crate::ast::identifiers::global_id::GlobalId(root::INTERNED_GLOBAL_IDS[288]); #[doc = r##"This is the function [`::rust_primitives::isize::mul`]."##] pub const mul: crate::ast::identifiers::global_id::GlobalId = crate::ast::identifiers::global_id::GlobalId(root::INTERNED_GLOBAL_IDS[344]); #[doc = r##"This is the function [`::rust_primitives::isize::ne`]."##] pub const ne: crate::ast::identifiers::global_id::GlobalId = crate::ast::identifiers::global_id::GlobalId(root::INTERNED_GLOBAL_IDS[387]); #[doc = r##"This is the function [`::rust_primitives::isize::neg`]."##] pub const neg: crate::ast::identifiers::global_id::GlobalId = crate::ast::identifiers::global_id::GlobalId(root::INTERNED_GLOBAL_IDS[549]); #[doc = r##"This is the function [`::rust_primitives::isize::rem`]."##] pub const rem: crate::ast::identifiers::global_id::GlobalId = crate::ast::identifiers::global_id::GlobalId(root::INTERNED_GLOBAL_IDS[579]); #[doc = r##"This is the function [`::rust_primitives::isize::shl`]."##] pub const shl: crate::ast::identifiers::global_id::GlobalId = crate::ast::identifiers::global_id::GlobalId(root::INTERNED_GLOBAL_IDS[317]); #[doc = r##"This is the function [`::rust_primitives::isize::shr`]."##] pub const shr: crate::ast::identifiers::global_id::GlobalId = crate::ast::identifiers::global_id::GlobalId(root::INTERNED_GLOBAL_IDS[500]); #[doc = r##"This is the function [`::rust_primitives::isize::sub`]."##] pub const sub: crate::ast::identifiers::global_id::GlobalId = crate::ast::identifiers::global_id::GlobalId(root::INTERNED_GLOBAL_IDS[472]); } pub mod u128 { #![doc = r##"This is the module [`::rust_primitives::u128`]."##] use super::root; #[doc = r##"This is the function [`::rust_primitives::u128::add`]."##] pub const add: crate::ast::identifiers::global_id::GlobalId = crate::ast::identifiers::global_id::GlobalId(root::INTERNED_GLOBAL_IDS[509]); #[doc = r##"This is the function [`::rust_primitives::u128::bit_and`]."##] pub const bit_and: crate::ast::identifiers::global_id::GlobalId = crate::ast::identifiers::global_id::GlobalId(root::INTERNED_GLOBAL_IDS[427]); #[doc = r##"This is the function [`::rust_primitives::u128::bit_or`]."##] pub const bit_or: crate::ast::identifiers::global_id::GlobalId = crate::ast::identifiers::global_id::GlobalId(root::INTERNED_GLOBAL_IDS[526]); #[doc = r##"This is the function [`::rust_primitives::u128::bit_xor`]."##] pub const bit_xor: crate::ast::identifiers::global_id::GlobalId = crate::ast::identifiers::global_id::GlobalId(root::INTERNED_GLOBAL_IDS[548]); #[doc = r##"This is the function [`::rust_primitives::u128::div`]."##] pub const div: crate::ast::identifiers::global_id::GlobalId = crate::ast::identifiers::global_id::GlobalId(root::INTERNED_GLOBAL_IDS[521]); #[doc = r##"This is the function [`::rust_primitives::u128::eq`]."##] pub const eq: crate::ast::identifiers::global_id::GlobalId = crate::ast::identifiers::global_id::GlobalId(root::INTERNED_GLOBAL_IDS[560]); #[doc = r##"This is the function [`::rust_primitives::u128::ge`]."##] pub const ge: crate::ast::identifiers::global_id::GlobalId = crate::ast::identifiers::global_id::GlobalId(root::INTERNED_GLOBAL_IDS[496]); #[doc = r##"This is the function [`::rust_primitives::u128::gt`]."##] pub const gt: crate::ast::identifiers::global_id::GlobalId = crate::ast::identifiers::global_id::GlobalId(root::INTERNED_GLOBAL_IDS[445]); #[doc = r##"This is the function [`::rust_primitives::u128::le`]."##] pub const le: crate::ast::identifiers::global_id::GlobalId = crate::ast::identifiers::global_id::GlobalId(root::INTERNED_GLOBAL_IDS[338]); #[doc = r##"This is the function [`::rust_primitives::u128::lt`]."##] pub const lt: crate::ast::identifiers::global_id::GlobalId = crate::ast::identifiers::global_id::GlobalId(root::INTERNED_GLOBAL_IDS[547]); #[doc = r##"This is the function [`::rust_primitives::u128::mul`]."##] pub const mul: crate::ast::identifiers::global_id::GlobalId = crate::ast::identifiers::global_id::GlobalId(root::INTERNED_GLOBAL_IDS[384]); #[doc = r##"This is the function [`::rust_primitives::u128::ne`]."##] pub const ne: crate::ast::identifiers::global_id::GlobalId = crate::ast::identifiers::global_id::GlobalId(root::INTERNED_GLOBAL_IDS[450]); #[doc = r##"This is the function [`::rust_primitives::u128::neg`]."##] pub const neg: crate::ast::identifiers::global_id::GlobalId = crate::ast::identifiers::global_id::GlobalId(root::INTERNED_GLOBAL_IDS[570]); #[doc = r##"This is the function [`::rust_primitives::u128::rem`]."##] pub const rem: crate::ast::identifiers::global_id::GlobalId = crate::ast::identifiers::global_id::GlobalId(root::INTERNED_GLOBAL_IDS[282]); #[doc = r##"This is the function [`::rust_primitives::u128::shl`]."##] pub const shl: crate::ast::identifiers::global_id::GlobalId = crate::ast::identifiers::global_id::GlobalId(root::INTERNED_GLOBAL_IDS[326]); #[doc = r##"This is the function [`::rust_primitives::u128::shr`]."##] pub const shr: crate::ast::identifiers::global_id::GlobalId = crate::ast::identifiers::global_id::GlobalId(root::INTERNED_GLOBAL_IDS[395]); #[doc = r##"This is the function [`::rust_primitives::u128::sub`]."##] pub const sub: crate::ast::identifiers::global_id::GlobalId = crate::ast::identifiers::global_id::GlobalId(root::INTERNED_GLOBAL_IDS[406]); } pub mod u16 { #![doc = r##"This is the module [`::rust_primitives::u16`]."##] use super::root; #[doc = r##"This is the function [`::rust_primitives::u16::add`]."##] pub const add: crate::ast::identifiers::global_id::GlobalId = crate::ast::identifiers::global_id::GlobalId(root::INTERNED_GLOBAL_IDS[335]); #[doc = r##"This is the function [`::rust_primitives::u16::bit_and`]."##] pub const bit_and: crate::ast::identifiers::global_id::GlobalId = crate::ast::identifiers::global_id::GlobalId(root::INTERNED_GLOBAL_IDS[299]); #[doc = r##"This is the function [`::rust_primitives::u16::bit_or`]."##] pub const bit_or: crate::ast::identifiers::global_id::GlobalId = crate::ast::identifiers::global_id::GlobalId(root::INTERNED_GLOBAL_IDS[525]); #[doc = r##"This is the function [`::rust_primitives::u16::bit_xor`]."##] pub const bit_xor: crate::ast::identifiers::global_id::GlobalId = crate::ast::identifiers::global_id::GlobalId(root::INTERNED_GLOBAL_IDS[475]); #[doc = r##"This is the function [`::rust_primitives::u16::div`]."##] pub const div: crate::ast::identifiers::global_id::GlobalId = crate::ast::identifiers::global_id::GlobalId(root::INTERNED_GLOBAL_IDS[535]); #[doc = r##"This is the function [`::rust_primitives::u16::eq`]."##] pub const eq: crate::ast::identifiers::global_id::GlobalId = crate::ast::identifiers::global_id::GlobalId(root::INTERNED_GLOBAL_IDS[536]); #[doc = r##"This is the function [`::rust_primitives::u16::ge`]."##] pub const ge: crate::ast::identifiers::global_id::GlobalId = crate::ast::identifiers::global_id::GlobalId(root::INTERNED_GLOBAL_IDS[343]); #[doc = r##"This is the function [`::rust_primitives::u16::gt`]."##] pub const gt: crate::ast::identifiers::global_id::GlobalId = crate::ast::identifiers::global_id::GlobalId(root::INTERNED_GLOBAL_IDS[403]); #[doc = r##"This is the function [`::rust_primitives::u16::le`]."##] pub const le: crate::ast::identifiers::global_id::GlobalId = crate::ast::identifiers::global_id::GlobalId(root::INTERNED_GLOBAL_IDS[572]); #[doc = r##"This is the function [`::rust_primitives::u16::lt`]."##] pub const lt: crate::ast::identifiers::global_id::GlobalId = crate::ast::identifiers::global_id::GlobalId(root::INTERNED_GLOBAL_IDS[444]); #[doc = r##"This is the function [`::rust_primitives::u16::mul`]."##] pub const mul: crate::ast::identifiers::global_id::GlobalId = crate::ast::identifiers::global_id::GlobalId(root::INTERNED_GLOBAL_IDS[313]); #[doc = r##"This is the function [`::rust_primitives::u16::ne`]."##] pub const ne: crate::ast::identifiers::global_id::GlobalId = crate::ast::identifiers::global_id::GlobalId(root::INTERNED_GLOBAL_IDS[428]); #[doc = r##"This is the function [`::rust_primitives::u16::neg`]."##] pub const neg: crate::ast::identifiers::global_id::GlobalId = crate::ast::identifiers::global_id::GlobalId(root::INTERNED_GLOBAL_IDS[284]); #[doc = r##"This is the function [`::rust_primitives::u16::rem`]."##] pub const rem: crate::ast::identifiers::global_id::GlobalId = crate::ast::identifiers::global_id::GlobalId(root::INTERNED_GLOBAL_IDS[422]); #[doc = r##"This is the function [`::rust_primitives::u16::shl`]."##] pub const shl: crate::ast::identifiers::global_id::GlobalId = crate::ast::identifiers::global_id::GlobalId(root::INTERNED_GLOBAL_IDS[520]); #[doc = r##"This is the function [`::rust_primitives::u16::shr`]."##] pub const shr: crate::ast::identifiers::global_id::GlobalId = crate::ast::identifiers::global_id::GlobalId(root::INTERNED_GLOBAL_IDS[449]); #[doc = r##"This is the function [`::rust_primitives::u16::sub`]."##] pub const sub: crate::ast::identifiers::global_id::GlobalId = crate::ast::identifiers::global_id::GlobalId(root::INTERNED_GLOBAL_IDS[540]); } pub mod u32 { #![doc = r##"This is the module [`::rust_primitives::u32`]."##] use super::root; #[doc = r##"This is the function [`::rust_primitives::u32::add`]."##] pub const add: crate::ast::identifiers::global_id::GlobalId = crate::ast::identifiers::global_id::GlobalId(root::INTERNED_GLOBAL_IDS[339]); #[doc = r##"This is the function [`::rust_primitives::u32::bit_and`]."##] pub const bit_and: crate::ast::identifiers::global_id::GlobalId = crate::ast::identifiers::global_id::GlobalId(root::INTERNED_GLOBAL_IDS[585]); #[doc = r##"This is the function [`::rust_primitives::u32::bit_or`]."##] pub const bit_or: crate::ast::identifiers::global_id::GlobalId = crate::ast::identifiers::global_id::GlobalId(root::INTERNED_GLOBAL_IDS[558]); #[doc = r##"This is the function [`::rust_primitives::u32::bit_xor`]."##] pub const bit_xor: crate::ast::identifiers::global_id::GlobalId = crate::ast::identifiers::global_id::GlobalId(root::INTERNED_GLOBAL_IDS[456]); #[doc = r##"This is the function [`::rust_primitives::u32::div`]."##] pub const div: crate::ast::identifiers::global_id::GlobalId = crate::ast::identifiers::global_id::GlobalId(root::INTERNED_GLOBAL_IDS[460]); #[doc = r##"This is the function [`::rust_primitives::u32::eq`]."##] pub const eq: crate::ast::identifiers::global_id::GlobalId = crate::ast::identifiers::global_id::GlobalId(root::INTERNED_GLOBAL_IDS[330]); #[doc = r##"This is the function [`::rust_primitives::u32::ge`]."##] pub const ge: crate::ast::identifiers::global_id::GlobalId = crate::ast::identifiers::global_id::GlobalId(root::INTERNED_GLOBAL_IDS[495]); #[doc = r##"This is the function [`::rust_primitives::u32::gt`]."##] pub const gt: crate::ast::identifiers::global_id::GlobalId = crate::ast::identifiers::global_id::GlobalId(root::INTERNED_GLOBAL_IDS[400]); #[doc = r##"This is the function [`::rust_primitives::u32::le`]."##] pub const le: crate::ast::identifiers::global_id::GlobalId = crate::ast::identifiers::global_id::GlobalId(root::INTERNED_GLOBAL_IDS[337]); #[doc = r##"This is the function [`::rust_primitives::u32::lt`]."##] pub const lt: crate::ast::identifiers::global_id::GlobalId = crate::ast::identifiers::global_id::GlobalId(root::INTERNED_GLOBAL_IDS[559]); #[doc = r##"This is the function [`::rust_primitives::u32::mul`]."##] pub const mul: crate::ast::identifiers::global_id::GlobalId = crate::ast::identifiers::global_id::GlobalId(root::INTERNED_GLOBAL_IDS[486]); #[doc = r##"This is the function [`::rust_primitives::u32::ne`]."##] pub const ne: crate::ast::identifiers::global_id::GlobalId = crate::ast::identifiers::global_id::GlobalId(root::INTERNED_GLOBAL_IDS[307]); #[doc = r##"This is the function [`::rust_primitives::u32::neg`]."##] pub const neg: crate::ast::identifiers::global_id::GlobalId = crate::ast::identifiers::global_id::GlobalId(root::INTERNED_GLOBAL_IDS[314]); #[doc = r##"This is the function [`::rust_primitives::u32::rem`]."##] pub const rem: crate::ast::identifiers::global_id::GlobalId = crate::ast::identifiers::global_id::GlobalId(root::INTERNED_GLOBAL_IDS[298]); #[doc = r##"This is the function [`::rust_primitives::u32::shl`]."##] pub const shl: crate::ast::identifiers::global_id::GlobalId = crate::ast::identifiers::global_id::GlobalId(root::INTERNED_GLOBAL_IDS[454]); #[doc = r##"This is the function [`::rust_primitives::u32::shr`]."##] pub const shr: crate::ast::identifiers::global_id::GlobalId = crate::ast::identifiers::global_id::GlobalId(root::INTERNED_GLOBAL_IDS[561]); #[doc = r##"This is the function [`::rust_primitives::u32::sub`]."##] pub const sub: crate::ast::identifiers::global_id::GlobalId = crate::ast::identifiers::global_id::GlobalId(root::INTERNED_GLOBAL_IDS[378]); } pub mod u64 { #![doc = r##"This is the module [`::rust_primitives::u64`]."##] use super::root; #[doc = r##"This is the function [`::rust_primitives::u64::add`]."##] pub const add: crate::ast::identifiers::global_id::GlobalId = crate::ast::identifiers::global_id::GlobalId(root::INTERNED_GLOBAL_IDS[487]); #[doc = r##"This is the function [`::rust_primitives::u64::bit_and`]."##] pub const bit_and: crate::ast::identifiers::global_id::GlobalId = crate::ast::identifiers::global_id::GlobalId(root::INTERNED_GLOBAL_IDS[452]); #[doc = r##"This is the function [`::rust_primitives::u64::bit_or`]."##] pub const bit_or: crate::ast::identifiers::global_id::GlobalId = crate::ast::identifiers::global_id::GlobalId(root::INTERNED_GLOBAL_IDS[374]); #[doc = r##"This is the function [`::rust_primitives::u64::bit_xor`]."##] pub const bit_xor: crate::ast::identifiers::global_id::GlobalId = crate::ast::identifiers::global_id::GlobalId(root::INTERNED_GLOBAL_IDS[362]); #[doc = r##"This is the function [`::rust_primitives::u64::div`]."##] pub const div: crate::ast::identifiers::global_id::GlobalId = crate::ast::identifiers::global_id::GlobalId(root::INTERNED_GLOBAL_IDS[359]); #[doc = r##"This is the function [`::rust_primitives::u64::eq`]."##] pub const eq: crate::ast::identifiers::global_id::GlobalId = crate::ast::identifiers::global_id::GlobalId(root::INTERNED_GLOBAL_IDS[545]); #[doc = r##"This is the function [`::rust_primitives::u64::ge`]."##] pub const ge: crate::ast::identifiers::global_id::GlobalId = crate::ast::identifiers::global_id::GlobalId(root::INTERNED_GLOBAL_IDS[391]); #[doc = r##"This is the function [`::rust_primitives::u64::gt`]."##] pub const gt: crate::ast::identifiers::global_id::GlobalId = crate::ast::identifiers::global_id::GlobalId(root::INTERNED_GLOBAL_IDS[301]); #[doc = r##"This is the function [`::rust_primitives::u64::le`]."##] pub const le: crate::ast::identifiers::global_id::GlobalId = crate::ast::identifiers::global_id::GlobalId(root::INTERNED_GLOBAL_IDS[467]); #[doc = r##"This is the function [`::rust_primitives::u64::lt`]."##] pub const lt: crate::ast::identifiers::global_id::GlobalId = crate::ast::identifiers::global_id::GlobalId(root::INTERNED_GLOBAL_IDS[306]); #[doc = r##"This is the function [`::rust_primitives::u64::mul`]."##] pub const mul: crate::ast::identifiers::global_id::GlobalId = crate::ast::identifiers::global_id::GlobalId(root::INTERNED_GLOBAL_IDS[286]); #[doc = r##"This is the function [`::rust_primitives::u64::ne`]."##] pub const ne: crate::ast::identifiers::global_id::GlobalId = crate::ast::identifiers::global_id::GlobalId(root::INTERNED_GLOBAL_IDS[492]); #[doc = r##"This is the function [`::rust_primitives::u64::neg`]."##] pub const neg: crate::ast::identifiers::global_id::GlobalId = crate::ast::identifiers::global_id::GlobalId(root::INTERNED_GLOBAL_IDS[481]); #[doc = r##"This is the function [`::rust_primitives::u64::rem`]."##] pub const rem: crate::ast::identifiers::global_id::GlobalId = crate::ast::identifiers::global_id::GlobalId(root::INTERNED_GLOBAL_IDS[546]); #[doc = r##"This is the function [`::rust_primitives::u64::shl`]."##] pub const shl: crate::ast::identifiers::global_id::GlobalId = crate::ast::identifiers::global_id::GlobalId(root::INTERNED_GLOBAL_IDS[425]); #[doc = r##"This is the function [`::rust_primitives::u64::shr`]."##] pub const shr: crate::ast::identifiers::global_id::GlobalId = crate::ast::identifiers::global_id::GlobalId(root::INTERNED_GLOBAL_IDS[505]); #[doc = r##"This is the function [`::rust_primitives::u64::sub`]."##] pub const sub: crate::ast::identifiers::global_id::GlobalId = crate::ast::identifiers::global_id::GlobalId(root::INTERNED_GLOBAL_IDS[431]); } pub mod u8 { #![doc = r##"This is the module [`::rust_primitives::u8`]."##] use super::root; #[doc = r##"This is the function [`::rust_primitives::u8::add`]."##] pub const add: crate::ast::identifiers::global_id::GlobalId = crate::ast::identifiers::global_id::GlobalId(root::INTERNED_GLOBAL_IDS[349]); #[doc = r##"This is the function [`::rust_primitives::u8::bit_and`]."##] pub const bit_and: crate::ast::identifiers::global_id::GlobalId = crate::ast::identifiers::global_id::GlobalId(root::INTERNED_GLOBAL_IDS[506]); #[doc = r##"This is the function [`::rust_primitives::u8::bit_or`]."##] pub const bit_or: crate::ast::identifiers::global_id::GlobalId = crate::ast::identifiers::global_id::GlobalId(root::INTERNED_GLOBAL_IDS[421]); #[doc = r##"This is the function [`::rust_primitives::u8::bit_xor`]."##] pub const bit_xor: crate::ast::identifiers::global_id::GlobalId = crate::ast::identifiers::global_id::GlobalId(root::INTERNED_GLOBAL_IDS[455]); #[doc = r##"This is the function [`::rust_primitives::u8::div`]."##] pub const div: crate::ast::identifiers::global_id::GlobalId = crate::ast::identifiers::global_id::GlobalId(root::INTERNED_GLOBAL_IDS[369]); #[doc = r##"This is the function [`::rust_primitives::u8::eq`]."##] pub const eq: crate::ast::identifiers::global_id::GlobalId = crate::ast::identifiers::global_id::GlobalId(root::INTERNED_GLOBAL_IDS[379]); #[doc = r##"This is the function [`::rust_primitives::u8::ge`]."##] pub const ge: crate::ast::identifiers::global_id::GlobalId = crate::ast::identifiers::global_id::GlobalId(root::INTERNED_GLOBAL_IDS[392]); #[doc = r##"This is the function [`::rust_primitives::u8::gt`]."##] pub const gt: crate::ast::identifiers::global_id::GlobalId = crate::ast::identifiers::global_id::GlobalId(root::INTERNED_GLOBAL_IDS[407]); #[doc = r##"This is the function [`::rust_primitives::u8::le`]."##] pub const le: crate::ast::identifiers::global_id::GlobalId = crate::ast::identifiers::global_id::GlobalId(root::INTERNED_GLOBAL_IDS[372]); #[doc = r##"This is the function [`::rust_primitives::u8::lt`]."##] pub const lt: crate::ast::identifiers::global_id::GlobalId = crate::ast::identifiers::global_id::GlobalId(root::INTERNED_GLOBAL_IDS[503]); #[doc = r##"This is the function [`::rust_primitives::u8::mul`]."##] pub const mul: crate::ast::identifiers::global_id::GlobalId = crate::ast::identifiers::global_id::GlobalId(root::INTERNED_GLOBAL_IDS[402]); #[doc = r##"This is the function [`::rust_primitives::u8::ne`]."##] pub const ne: crate::ast::identifiers::global_id::GlobalId = crate::ast::identifiers::global_id::GlobalId(root::INTERNED_GLOBAL_IDS[377]); #[doc = r##"This is the function [`::rust_primitives::u8::neg`]."##] pub const neg: crate::ast::identifiers::global_id::GlobalId = crate::ast::identifiers::global_id::GlobalId(root::INTERNED_GLOBAL_IDS[461]); #[doc = r##"This is the function [`::rust_primitives::u8::rem`]."##] pub const rem: crate::ast::identifiers::global_id::GlobalId = crate::ast::identifiers::global_id::GlobalId(root::INTERNED_GLOBAL_IDS[434]); #[doc = r##"This is the function [`::rust_primitives::u8::shl`]."##] pub const shl: crate::ast::identifiers::global_id::GlobalId = crate::ast::identifiers::global_id::GlobalId(root::INTERNED_GLOBAL_IDS[469]); #[doc = r##"This is the function [`::rust_primitives::u8::shr`]."##] pub const shr: crate::ast::identifiers::global_id::GlobalId = crate::ast::identifiers::global_id::GlobalId(root::INTERNED_GLOBAL_IDS[296]); #[doc = r##"This is the function [`::rust_primitives::u8::sub`]."##] pub const sub: crate::ast::identifiers::global_id::GlobalId = crate::ast::identifiers::global_id::GlobalId(root::INTERNED_GLOBAL_IDS[394]); } pub mod usize { #![doc = r##"This is the module [`::rust_primitives::usize`]."##] use super::root; #[doc = r##"This is the function [`::rust_primitives::usize::add`]."##] pub const add: crate::ast::identifiers::global_id::GlobalId = crate::ast::identifiers::global_id::GlobalId(root::INTERNED_GLOBAL_IDS[380]); #[doc = r##"This is the function [`::rust_primitives::usize::bit_and`]."##] pub const bit_and: crate::ast::identifiers::global_id::GlobalId = crate::ast::identifiers::global_id::GlobalId(root::INTERNED_GLOBAL_IDS[477]); #[doc = r##"This is the function [`::rust_primitives::usize::bit_or`]."##] pub const bit_or: crate::ast::identifiers::global_id::GlobalId = crate::ast::identifiers::global_id::GlobalId(root::INTERNED_GLOBAL_IDS[517]); #[doc = r##"This is the function [`::rust_primitives::usize::bit_xor`]."##] pub const bit_xor: crate::ast::identifiers::global_id::GlobalId = crate::ast::identifiers::global_id::GlobalId(root::INTERNED_GLOBAL_IDS[404]); #[doc = r##"This is the function [`::rust_primitives::usize::div`]."##] pub const div: crate::ast::identifiers::global_id::GlobalId = crate::ast::identifiers::global_id::GlobalId(root::INTERNED_GLOBAL_IDS[522]); #[doc = r##"This is the function [`::rust_primitives::usize::eq`]."##] pub const eq: crate::ast::identifiers::global_id::GlobalId = crate::ast::identifiers::global_id::GlobalId(root::INTERNED_GLOBAL_IDS[566]); #[doc = r##"This is the function [`::rust_primitives::usize::ge`]."##] pub const ge: crate::ast::identifiers::global_id::GlobalId = crate::ast::identifiers::global_id::GlobalId(root::INTERNED_GLOBAL_IDS[341]); #[doc = r##"This is the function [`::rust_primitives::usize::gt`]."##] pub const gt: crate::ast::identifiers::global_id::GlobalId = crate::ast::identifiers::global_id::GlobalId(root::INTERNED_GLOBAL_IDS[453]); #[doc = r##"This is the function [`::rust_primitives::usize::le`]."##] pub const le: crate::ast::identifiers::global_id::GlobalId = crate::ast::identifiers::global_id::GlobalId(root::INTERNED_GLOBAL_IDS[468]); #[doc = r##"This is the function [`::rust_primitives::usize::lt`]."##] pub const lt: crate::ast::identifiers::global_id::GlobalId = crate::ast::identifiers::global_id::GlobalId(root::INTERNED_GLOBAL_IDS[426]); #[doc = r##"This is the function [`::rust_primitives::usize::mul`]."##] pub const mul: crate::ast::identifiers::global_id::GlobalId = crate::ast::identifiers::global_id::GlobalId(root::INTERNED_GLOBAL_IDS[458]); #[doc = r##"This is the function [`::rust_primitives::usize::ne`]."##] pub const ne: crate::ast::identifiers::global_id::GlobalId = crate::ast::identifiers::global_id::GlobalId(root::INTERNED_GLOBAL_IDS[538]); #[doc = r##"This is the function [`::rust_primitives::usize::neg`]."##] pub const neg: crate::ast::identifiers::global_id::GlobalId = crate::ast::identifiers::global_id::GlobalId(root::INTERNED_GLOBAL_IDS[396]); #[doc = r##"This is the function [`::rust_primitives::usize::rem`]."##] pub const rem: crate::ast::identifiers::global_id::GlobalId = crate::ast::identifiers::global_id::GlobalId(root::INTERNED_GLOBAL_IDS[443]); #[doc = r##"This is the function [`::rust_primitives::usize::shl`]."##] pub const shl: crate::ast::identifiers::global_id::GlobalId = crate::ast::identifiers::global_id::GlobalId(root::INTERNED_GLOBAL_IDS[442]); #[doc = r##"This is the function [`::rust_primitives::usize::shr`]."##] pub const shr: crate::ast::identifiers::global_id::GlobalId = crate::ast::identifiers::global_id::GlobalId(root::INTERNED_GLOBAL_IDS[415]); #[doc = r##"This is the function [`::rust_primitives::usize::sub`]."##] pub const sub: crate::ast::identifiers::global_id::GlobalId = crate::ast::identifiers::global_id::GlobalId(root::INTERNED_GLOBAL_IDS[441]); } #[doc = r##"This is the use item [`::rust_primitives::Use`]."##] pub const Use: crate::ast::identifiers::global_id::GlobalId = crate::ast::identifiers::global_id::GlobalId(root::INTERNED_GLOBAL_IDS[628]); #[doc = r##"This is the extern crate [`::rust_primitives::alloc`]."##] pub const alloc: crate::ast::identifiers::global_id::GlobalId = crate::ast::identifiers::global_id::GlobalId(root::INTERNED_GLOBAL_IDS[633]); #[doc = r##"This is the module [`::rust_primitives::arithmetic`]."##] pub const arithmetic: crate::ast::identifiers::global_id::GlobalId = crate::ast::identifiers::global_id::GlobalId(root::INTERNED_GLOBAL_IDS[615]); #[doc = r##"This is the module [`::rust_primitives::crypto_abstractions`]."##] pub const crypto_abstractions: crate::ast::identifiers::global_id::GlobalId = crate::ast::identifiers::global_id::GlobalId(root::INTERNED_GLOBAL_IDS[293]); #[doc = r##"This is the function [`::rust_primitives::dummy_hax_concrete_ident_wrapper`]."##] pub const dummy_hax_concrete_ident_wrapper: crate::ast::identifiers::global_id::GlobalId = crate::ast::identifiers::global_id::GlobalId(root::INTERNED_GLOBAL_IDS[291]); #[doc = r##"This is the module [`::rust_primitives::hax`]."##] pub const hax: crate::ast::identifiers::global_id::GlobalId = crate::ast::identifiers::global_id::GlobalId(root::INTERNED_GLOBAL_IDS[1]); #[doc = r##"This is the module [`::rust_primitives::i128`]."##] pub const i128: crate::ast::identifiers::global_id::GlobalId = crate::ast::identifiers::global_id::GlobalId(root::INTERNED_GLOBAL_IDS[352]); #[doc = r##"This is the module [`::rust_primitives::i16`]."##] pub const i16: crate::ast::identifiers::global_id::GlobalId = crate::ast::identifiers::global_id::GlobalId(root::INTERNED_GLOBAL_IDS[315]); #[doc = r##"This is the module [`::rust_primitives::i32`]."##] pub const i32: crate::ast::identifiers::global_id::GlobalId = crate::ast::identifiers::global_id::GlobalId(root::INTERNED_GLOBAL_IDS[308]); #[doc = r##"This is the module [`::rust_primitives::i64`]."##] pub const i64: crate::ast::identifiers::global_id::GlobalId = crate::ast::identifiers::global_id::GlobalId(root::INTERNED_GLOBAL_IDS[328]); #[doc = r##"This is the module [`::rust_primitives::i8`]."##] pub const i8: crate::ast::identifiers::global_id::GlobalId = crate::ast::identifiers::global_id::GlobalId(root::INTERNED_GLOBAL_IDS[324]); #[doc = r##"This is the macro [`::rust_primitives::impl_arith`]."##] pub const impl_arith: crate::ast::identifiers::global_id::GlobalId = crate::ast::identifiers::global_id::GlobalId(root::INTERNED_GLOBAL_IDS[630]); #[doc = r##"This is the module [`::rust_primitives::isize`]."##] pub const isize: crate::ast::identifiers::global_id::GlobalId = crate::ast::identifiers::global_id::GlobalId(root::INTERNED_GLOBAL_IDS[287]); #[doc = r##"This is the function [`::rust_primitives::offset`]."##] pub const offset: crate::ast::identifiers::global_id::GlobalId = crate::ast::identifiers::global_id::GlobalId(root::INTERNED_GLOBAL_IDS[389]); #[doc = r##"This is the extern crate [`::rust_primitives::std`]."##] pub const std: crate::ast::identifiers::global_id::GlobalId = crate::ast::identifiers::global_id::GlobalId(root::INTERNED_GLOBAL_IDS[635]); #[doc = r##"This is the module [`::rust_primitives::u128`]."##] pub const u128: crate::ast::identifiers::global_id::GlobalId = crate::ast::identifiers::global_id::GlobalId(root::INTERNED_GLOBAL_IDS[281]); #[doc = r##"This is the module [`::rust_primitives::u16`]."##] pub const u16: crate::ast::identifiers::global_id::GlobalId = crate::ast::identifiers::global_id::GlobalId(root::INTERNED_GLOBAL_IDS[283]); #[doc = r##"This is the module [`::rust_primitives::u32`]."##] pub const u32: crate::ast::identifiers::global_id::GlobalId = crate::ast::identifiers::global_id::GlobalId(root::INTERNED_GLOBAL_IDS[297]); #[doc = r##"This is the module [`::rust_primitives::u64`]."##] pub const u64: crate::ast::identifiers::global_id::GlobalId = crate::ast::identifiers::global_id::GlobalId(root::INTERNED_GLOBAL_IDS[285]); #[doc = r##"This is the module [`::rust_primitives::u8`]."##] pub const u8: crate::ast::identifiers::global_id::GlobalId = crate::ast::identifiers::global_id::GlobalId(root::INTERNED_GLOBAL_IDS[295]); #[doc = r##"This is the function [`::rust_primitives::unsize`]."##] pub const unsize: crate::ast::identifiers::global_id::GlobalId = crate::ast::identifiers::global_id::GlobalId(root::INTERNED_GLOBAL_IDS[35]); #[doc = r##"This is the module [`::rust_primitives::usize`]."##] pub const usize: crate::ast::identifiers::global_id::GlobalId = crate::ast::identifiers::global_id::GlobalId(root::INTERNED_GLOBAL_IDS[319]); } #[doc = r##"This is the module [`::alloc`]."##] pub const alloc: crate::ast::identifiers::global_id::GlobalId = crate::ast::identifiers::global_id::GlobalId(root::INTERNED_GLOBAL_IDS[2]); #[doc = r##"This is the module [`::core`]."##] pub const core: crate::ast::identifiers::global_id::GlobalId = crate::ast::identifiers::global_id::GlobalId(root::INTERNED_GLOBAL_IDS[7]); #[doc = r##"This is the module [`::hax_lib`]."##] pub const hax_lib: crate::ast::identifiers::global_id::GlobalId = crate::ast::identifiers::global_id::GlobalId(root::INTERNED_GLOBAL_IDS[100]); #[doc = r##"This is the module [`::hax_lib_protocol`]."##] pub const hax_lib_protocol: crate::ast::identifiers::global_id::GlobalId = crate::ast::identifiers::global_id::GlobalId(root::INTERNED_GLOBAL_IDS[18]); #[doc = r##"This is the module [`::rust_primitives`]."##] pub const rust_primitives: crate::ast::identifiers::global_id::GlobalId = crate::ast::identifiers::global_id::GlobalId(root::INTERNED_GLOBAL_IDS[0]); ================================================ FILE: rust-engine/src/ast/identifiers/global_id/generated_names.rs ================================================ /// We allow: /// - `unused`: we don't use all the names present in the `engine/names` crate. /// Filtering which `DefId` should be exposed would be complicated, and /// dependent library may use some names. (for instance, the backend for /// ProVerif may use names from `hax_lib_protocol` that are not needed /// anywhere else in the engine) /// - `non_snake_case`: we produce faithful names with respect to their /// original definitions in Rust. We generate for instance `fn Some() -> /// DefID {...}` that provides the `DefId` for the /// `std::option::Option::Some`. We want the function to be named `Some` /// here, not `some`. /// - `broken_intra_doc_links`: we produce documentation that link the function /// providing the `DefId` of a item to the item itself. Sometimes, we refer /// to private items, to re-exported items or to items that are not in the /// dependency closure of the engine: in such cases, `rustdoc` cannot link /// properly. #[allow( unused, non_snake_case, rustdoc::broken_intra_doc_links, missing_docs, clippy::module_inception, unused_qualifications, non_upper_case_globals )] pub mod root { include!("generated.rs"); } /// Global identifiers are built around `DefId` that comes out of the hax /// frontend. We use the Rust engine itself to produce the names: we run hax on /// the `engine/names` crate, we extract identifiers from the resulting AST, and /// we expose them back as Rust functions here. pub mod codegen { use itertools::*; use std::iter; use crate::ast::Item; use crate::ast::identifiers::{ GlobalId, global_id::{ExplicitDefId, compact_serialization}, }; use hax_frontend_exporter::DefKind; use std::collections::{HashMap, HashSet}; /// Replace the crate name `"hax_engine_names"` with `"rust_primitives"` in the given `DefId`. fn rename_krate(def_id: &mut ExplicitDefId) { if def_id.def_id.krate == "hax_engine_names" { def_id.rename_krate("rust_primitives"); } } /// Visit items and collect all the `DefId`s fn collect_def_ids(items: Vec) -> Vec { #[derive(Default)] struct DefIdCollector(HashSet); use crate::ast::visitors::*; impl AstVisitor for DefIdCollector { fn visit_global_id(&mut self, x: &GlobalId) { let mut current = x.0.explicit_def_id(); while let Some(def_id) = current { self.0.insert(def_id.clone()); current = def_id.parent(); } } } // Collect names let mut names: Vec<_> = DefIdCollector::default() .visit_by_val(&items) .0 .into_iter() .collect(); // In the OCaml engine, `hax_engine_names` is renamed to `rust_primitives`. names.iter_mut().for_each(rename_krate); // We consume names after import by the OCaml engine. Thus, the OCaml // engine may have introduced already some hax-specific Rust names, // directly in `rust_primitives`. After renaming from `hax_engine_names` // to `rust_primitives`, such names may be duplicated. For instance, // that's the case of `unsize`: the crate `hax_engine_names` contains // expression with implicit unsize operations, thus the OCaml engine // inserts `rust_primitives::unsize`. In the same time, // `hax_engine_names::unsize` exists and was renamed to // `rust_primitives::unsize`. Whence the need to dedup here. names.sort(); names.dedup(); names } /// Crafts a docstring for a `DefId`, hopefully (rustdoc) linking it back to /// its origin. fn docstring(explicit_id: &ExplicitDefId) -> String { let id = &explicit_id.def_id; let path = path_of_def_id(explicit_id); let (parent_path, def) = match &path[..] { [init @ .., last] => (init, last.clone()), _ => (&[] as &[_], id.krate.to_string()), }; let parent_path_str = format!("::{}", parent_path.join("::")); let path_str = format!("::{}", path_of_def_id(explicit_id).join("::")); let subject = match &id.kind { DefKind::Mod => format!("module [`{path_str}`]"), DefKind::Struct => format!("struct [`{path_str}`]"), DefKind::Union => format!("union [`{path_str}`]"), DefKind::Enum => format!("enum [`{path_str}`]"), DefKind::Variant => format!("variant [`{path_str}`]"), DefKind::Trait => format!("trait [`{path_str}`]"), DefKind::TyAlias => format!("type alias [`{path_str}`]"), DefKind::ForeignTy => format!("foreign type [`{path_str}`]"), DefKind::TraitAlias => format!("trait alias [`{path_str}`]"), DefKind::AssocTy => format!("associated type [`{path_str}`]"), DefKind::TyParam => format!("type parameter from [`{parent_path_str}`]"), DefKind::Fn => format!("function [`{path_str}`]"), DefKind::Const => format!("const [`{path_str}`]"), DefKind::ConstParam => format!("const parameter from [`{parent_path_str}`]"), DefKind::Static { .. } => format!("static [`{path_str}`]"), DefKind::Ctor { .. } => format!("constructor for [`{parent_path_str}`]"), DefKind::AssocFn => format!("associated function [`{path_str}`]"), DefKind::AssocConst => format!("associated constant [`{path_str}`]"), DefKind::Macro { .. } => format!("macro [`{path_str}`]"), DefKind::ExternCrate => format!("extern crate [`{path_str}`]"), DefKind::Use => format!("use item [`{path_str}`]"), DefKind::ForeignMod => format!("foreign module [`{path_str}`]"), DefKind::AnonConst => return "This is an anonymous constant.".to_string(), DefKind::PromotedConst | DefKind::InlineConst => { format!("This is an inline const from [`{parent_path_str}`]") } DefKind::OpaqueTy => { return format!("This is an opaque type for [`{parent_path_str}`]"); } DefKind::Field => format!("field [`{def}`] from {parent_path_str}"), DefKind::LifetimeParam => return "This is a lifetime parameter.".to_string(), DefKind::GlobalAsm => return "This is a global ASM block.".to_string(), DefKind::Impl { .. } => return "This is an impl block.".to_string(), DefKind::Closure => return "This is a closure.".to_string(), DefKind::SyntheticCoroutineBody => return "This is a coroutine body.".to_string(), }; format!("This is the {subject}.") } /// Computes a string path for a `DefId`. fn path_of_def_id(explicit_id: &ExplicitDefId) -> Vec { let id = &explicit_id.def_id; fn name_to_string(mut s: String) -> String { if s == "_" { s = "_anonymous".into(); }; if s.parse::().is_ok() { s = format!("_{s}"); } s } iter::once(id.krate.to_string()) .chain(id.path.iter().map(|item| { let data = match item.data.clone() { hax_frontend_exporter::DefPathItem::CrateRoot { name } => name, hax_frontend_exporter::DefPathItem::TypeNs(s) | hax_frontend_exporter::DefPathItem::ValueNs(s) | hax_frontend_exporter::DefPathItem::MacroNs(s) | hax_frontend_exporter::DefPathItem::LifetimeNs(s) => s, data => format!("{data:?}"), }; if item.disambiguator == 0 { data } else { format!("{data}__{}", item.disambiguator) } })) .chain(if explicit_id.is_constructor { Some("Constructor".to_string()) } else { None }) .chain(if matches!(id.kind, DefKind::Ctor(..)) { // TODO: get rid of `ctor` #1657 Some("ctor".to_string()) } else { None }) .map(name_to_string) .collect() } /// Given a list of `DefId`, this will create a Rust code source that provides those names. /// /// For example, given `krate::module::f` and `krate::g`, this will produce something like: /// ```rust,ignore /// mod krate { /// mod module { /// fn f() -> DefId {...} /// } /// fn g() -> DefId {...} /// } /// ``` fn generate_names_hierachy(def_ids: Vec) -> String { /// Helper struct: a graph of module and definitions. #[derive(Debug, Default)] struct Module { attached_def_id: Option, submodules: HashMap, definitions: Vec<(String, ExplicitDefId)>, } impl Module { fn new(def_ids: Vec) -> Self { let mut node = Self::default(); for def_id in &def_ids { node.insert(def_id); } for def_id in def_ids { let modpath = path_of_def_id(&def_id); if let Some(module) = node.find_module(&modpath) { module.attached_def_id = Some(def_id.clone()); } } node } /// Insert a `DefId` in our module tree fn insert(&mut self, def_id: &ExplicitDefId) { let fullpath = path_of_def_id(def_id); let [modpath @ .., def] = &fullpath[..] else { return; }; let mut node = self; for chunk in modpath { node = node.submodules.entry(chunk.clone()).or_default(); } node.definitions.push((def.clone(), def_id.clone())); } /// Get a mutable borrow to the submodule denoted by `modpath`, if it exists fn find_module(&mut self, modpath: &Vec) -> Option<&mut Self> { let mut node = self; for chunk in modpath { node = node.submodules.get_mut(chunk)?; } Some(node) } /// Render the module tree as a string fn render(self, path: String, indexes: &HashMap) -> String { /// Computes the visibility restriction for a given path. fn restriction(path: &str) -> &'static str { // Tuples are encoded directly in `GlobalIdInner::Tuple`. // The names here exist so that tuple identifiers can be handled in the exact same way as other identifiers. // But the canonical representation of tuples is not `names::rust_primitives::hax::Tuple*`. // Whence this visibility restriction. if path.starts_with("::rust_primitives::hax::Tuple") { "(in crate::ast::identifiers::global_id)" } else { "" } } let Self { submodules, definitions, attached_def_id, } = self; let submodules = submodules .into_iter() .sorted_by(|(a, _), (b, _)| a.cmp(b)) .map(|(name, contents)| { let path = format!("{path}::{name}"); let restriction = restriction(&path); format!( r###"pub{restriction} mod {name} {{ {} }}"###, contents.render(path, indexes) ) }); let definitions = definitions .into_iter() .sorted_by(|(a, _), (b, _)| a.cmp(b)) .map(|(name, def_id)| { let docstring = docstring(&def_id); let index = indexes.get(&def_id).unwrap(); let restriction = restriction(&format!("{path}::{name}")); format!(r###" #[doc = r##"{docstring}"##] pub{restriction} const {name}: crate::ast::identifiers::global_id::GlobalId = crate::ast::identifiers::global_id::GlobalId(root::INTERNED_GLOBAL_IDS[{index}]); "###) }); let docstring = attached_def_id .iter() .map(docstring) .map(|s| format!(r###"#![doc=r##"{s}"##]"###)); docstring .chain(iter::once("use super::root;".to_string())) .chain(submodules) .chain(definitions) .collect::>() .join("\n") } } let enumerated_def_ids = def_ids .iter() .cloned() .enumerate() .map(|(n, def_id)| (def_id, n)) .collect::>(); let indexes = HashMap::from_iter(enumerated_def_ids.iter().cloned()); let tree = Module::new(def_ids).render(String::new(), &indexes); let functions = { enumerated_def_ids.iter().map(|(did, i)| { let serialized = compact_serialization::serialize(did); let parent = did.parent().as_ref().map(|parent| *indexes.get(parent).unwrap()).map(|parent| format!("Some(did_{parent}())")).unwrap_or("None".into()); format!(r###"fn did_{i}() -> ExplicitDefId {{deserialize(r##"{serialized}"##, {parent})}}"###) }).collect::>().join("\n") }; let array_literal = enumerated_def_ids .iter() .map(|(_, i)| format!("did_{i}().into_global_id_inner()")) .collect::>() .join(","); let n = indexes.len(); format!( r#"// This file was generated by `cargo hax into generate-rust-engine-names`. // To regenerate it, please use `just regenerate-names`. Under the hood, `cargo // hax into generate-rust-engine-names` runs the Rust engine, which in turn // calls `rust_engine::names::export_def_ids_to_mod`. static TABLE_AND_INTERNED_GLOBAL_IDS: (crate::interning::LazyLockNewWithValue, [crate::interning::Interned; {n}]) = {{ crate::interning::InterningTable::new_with_values(|| {{ use crate::ast::identifiers::global_id::ExplicitDefId; use crate::ast::identifiers::global_id::compact_serialization::deserialize; {functions} [{array_literal}] }}) }}; static INTERNED_GLOBAL_IDS: [crate::interning::Interned; {n}] = TABLE_AND_INTERNED_GLOBAL_IDS.1; impl crate::interning::Internable for crate::ast::identifiers::global_id::GlobalIdInner {{ fn interning_table() -> &'static std::sync::Mutex> {{ &TABLE_AND_INTERNED_GLOBAL_IDS.0 }} }} {tree} "# ) } /// Finds all `DefId`s in `items`, and produce a Rust module exposing them. pub fn export_def_ids_to_mod(items: Vec) -> String { generate_names_hierachy(collect_def_ids(items)) } } ================================================ FILE: rust-engine/src/ast/identifiers/global_id/view.rs ================================================ //! Helpers to view and reason about **path segments** in Rust items. //! //! This module encodes a number of rustc invariants about which items are named vs. //! unnamed and which items have parents. Those invariants are enforced at runtime and //! will emit diagnostic if the invariants are broken. //! //! # What is a path segment? //! //! In Rust, every item lives inside a path. Every path begins with a crate root //! (the crate where the item is defined). //! //! For example, imagine a crate called `my_crate` with this code: //! //! ```ignore //! mod a { //! mod b { //! fn hello() {} //! } //! } //! ``` //! //! The function `hello` has the full path `my_crate::a::b::hello`. //! This path is made up of segments: `my_crate`, `a`, `b`, and `hello`. //! //! This module represents those segments as typed values, enriched with extra //! information such as: //! - whether the segment is named or unnamed (e.g. anonymous const or impl blocks), //! - what kind of item it points to (a crate, module, struct, field, associated fn, etc.), //! - its parent segment in the hierarchy (e.g. a field belongs to a constructor, //! which belongs to a type, which belongs to a module, which belongs to a crate). //! //! # Hierarchical nature of segments //! //! Segments form a hierarchy of ownership, starting from the crate root. //! For example, in a crate called `my_crate`: //! //! ```ignore //! struct Foo { //! bar: u32, //! } //! ``` //! //! The field `bar` is represented as a `Field` segment. It knows its parent is //! the constructor of `Foo`, which knows its parent is the type definition `Foo`, //! which in turn belongs to the crate `my_crate`. //! //! The hierarchy looks like this: //! //! ```text //! my_crate (crate) //! └── Foo (type) //! └── Foo (constructor) //! └── bar (field) //! ``` //! //! Similarly, with associated items in a crate `my_crate`: //! //! ```ignore //! trait T { //! fn f(); //! } //! ``` //! //! The function `f` is represented as an `AssocItem` segment, whose parent is the //! container `T` (a `Trait` segment), and ultimately the crate root: //! //! ```text //! my_crate (crate) //! └── T (trait) //! └── f (assoc fn) //! ``` //! //! This hierarchical model makes it possible to: //! - reliably find the **parent** of any segment (`bar` → constructor → type → crate), //! - disambiguate names in backends (e.g. when two crates define constructors with the //! same name, the crate root keeps them separate), //! - traverse full paths in a strongly-typed way (using [`View`] or [`PathSegment::parents`]). //! //! # Examples //! //! For this Rust code in crate `my_crate`: //! //! ```ignore //! mod a { //! trait Foo { //! fn f() { //! enum T { //! C { field: u8 }, //! } //! } //! } //! } //! ``` //! //! We can represent various identifiers as hierarchical segments: //! //! | Path | Segments | //! |------------------------------------|----------------------------------------------| //! | `my_crate` | `[my_crate]` | //! | `my_crate::a` | `[my_crate], [a]` | //! | `my_crate::a::b::hello` | `[my_crate], [a], [b], [hello]` | //! | `my_crate::a::Foo` | `[my_crate], [a], [Foo]` | //! | `my_crate::a::Foo::f` | `[my_crate], [a], [Foo::f]` | //! | `my_crate::a::Foo::f::T` | `[my_crate], [a], [Foo::f], [T]` | //! | `my_crate::a::Foo::f::T::C` | `[my_crate], [a], [Foo::f], [T::C]` | //! | `my_crate::a::Foo::f::T::C::field` | `[my_crate], [a], [Foo::f], [T::C::field]` | //! use hax_frontend_exporter::{CtorOf, DefKind, DefPathItem, ImplInfos}; use crate::{ ast::identifiers::global_id::{DefId, ExplicitDefId}, symbol::Symbol, }; #[derive(Debug, Clone)] /// The kind of a type definition: `struct`, `enum`, or `union`. pub enum TypeDefKind { /// A `struct` definition. Struct, /// An `enum` definition. Enum, /// A `union` definition. Union, } #[derive(Debug, Clone)] /// The kind of a container for associated items (i.e., a `trait` or an `impl` block). pub enum AssocItemContainerKind { /// An `impl` block. Impl { /// `true` if this is an inherent `impl` (no trait), `false` if it implements a trait. inherent: bool, /// Optional extra information about the impl (if available from the frontend). /// /// `None` when such information is not provided/collected. impl_infos: Option, }, /// A `trait` definition. Trait { /// `true` if this is a trait alias (a type alias to a trait). trait_alias: bool, }, } #[derive(Debug, Clone)] /// The kind of a constructo (tuple struct/variant/struct-ctor function). pub enum ConstructorKind { /// A constructor associated to a concrete type definition `ty`. Constructor { /// The type constructed ty: PathSegment, }, } #[derive(Debug, Clone)] /// The kind of an associated item within a trait or impl. pub enum AssocItemKind { /// An associated function. Fn, /// An associated constant. Const, /// An associated type. Ty, } #[derive(Debug, Clone)] /// The kind of any item that can occur as a path segment. /// /// This is a sum type that makes [`PathSegment`] expressive enough to encode /// precise parents (e.g., a field always has a constructor parent, an associated item /// always has a trait/impl container, etc.). pub enum AnyKind { /// A type definition (`struct`, `enum`, or `union`). TypeDef(TypeDefKind), /// A container of associated items (`trait` or `impl`). AssocItemContainer(AssocItemContainerKind), /// A constructor (for a struct or enum variant). Constructor(ConstructorKind), /// An associated item. AssocItem { /// Which associated item kind this is. kind: AssocItemKind, /// The parent container (trait or impl) of this associated item. container: PathSegment, }, /// A standalone function. Fn, /// A standalone constant. Const, /// A `use` item. Use, /// An anonymous constant (e.g., `const _: T = ...;`). AnonConst, /// An inline constant (e.g., `let x = { const Y: i32 = 0; Y };`). InlineConst, /// A trait alias. TraitAlias, /// A foreign module (`extern "C" { ... }`). Foreign, /// A foreign type (`extern type T;`). ForeignTy, /// A type alias (`type Foo = Bar;`). TyAlias, /// An `extern crate` item. ExternCrate, /// An opaque item (e.g., `type Foo = impl Trait;`). Opaque, /// A `static` item. Static, /// A macro definition or export. Macro, /// A module or crate. Mod, /// A global assembly block. GlobalAsm, /// A field of a struct or a struct-like enum variant. Field { /// `true` if the field is *named* (e.g., `x` in `struct S { x: u8 }`); /// `false` if it is *unnamed* (tuple field like `0` in `struct T(u8)`). named: bool, /// The parent constructor that owns this field. /// /// Example: The parent of `x` is the constructor of `Foo` in: /// `struct Foo { x: u8 }`. parent: PathSegment, }, /// A closure expression item. Closure, } #[derive(Debug, Clone)] /// Payloads used when a path segment is **unnamed**. /// /// These correspond to items that do not contribute a user-facing identifier in the path. pub enum UnnamedPathSegmentPayload { /// An `impl` block. Impl, /// An anonymous constant. AnonConst, /// An inline constant. InlineConst, /// A foreign module or crate. Foreign, /// A global assembly code block. GlobalAsm, /// A `use` item. Use, /// An opaque item (e.g., `type Foo = impl Trait;`). Opaque, /// A closure. Closure, } /// Each path segment carries a payload: /// - [`PathSegmentPayload::Named`] with a user-decided name, or /// - [`PathSegmentPayload::Unnamed`] for items that are anonymous in the path. #[derive(Debug, Clone)] pub enum PathSegmentPayload { /// A named segment (holds the name as a [`Symbol`]). Named(Symbol), /// An unnamed segment with a categorized payload. Unnamed(UnnamedPathSegmentPayload), } mod rustc_invariant_handling { //! This modules provides the function `error_dummy_value`, which emits errors. use std::any::{Any, type_name}; use std::fmt::Debug; use super::*; use crate::{ ast::{ diagnostics::{Context, DiagnosticInfo}, span::Span, }, names, }; use hax_types::diagnostics::Kind; #[derive(Clone, Copy)] /// Restrict [`ErrorDummyValue`] callers pub struct Permit(()); pub trait ErrorDummyValue { fn error_dummy_value(_: Permit) -> Self; } impl ErrorDummyValue for PathSegmentPayload { fn error_dummy_value(_: Permit) -> Self { Self::Named(Symbol::new("hax_engine_view_fatal_error")) } } impl ErrorDummyValue for TypeDefKind { fn error_dummy_value(_: Permit) -> Self { TypeDefKind::Enum } } impl ErrorDummyValue for ConstructorKind { fn error_dummy_value(permit: Permit) -> Self { ConstructorKind::Constructor { ty: PathSegment::::error_dummy_value(permit), } } } impl ErrorDummyValue for PathSegment { fn error_dummy_value(permit: Permit) -> Self { Self { identifier: DefId::error_dummy_value(permit), payload: PathSegmentPayload::error_dummy_value(permit), disambiguator: 0, kind: K::error_dummy_value(permit), } } } impl ErrorDummyValue for AnyKind { fn error_dummy_value(_: Permit) -> Self { Self::Fn } } impl ErrorDummyValue for DefId { fn error_dummy_value(_: Permit) -> Self { match names::rust_primitives::hax::failure.0.get() { crate::ast::identifiers::global_id::GlobalIdInner::Concrete(concrete_id) => { concrete_id.def_id.def_id } // The error dummy value is generated by hax, with a concrete identifier _ => unreachable!("Hax generated name for failure is concrete"), } } } impl ErrorDummyValue for AssocItemContainerKind { fn error_dummy_value(_: Permit) -> Self { AssocItemContainerKind::Trait { trait_alias: false } } } impl ErrorDummyValue for bool { fn error_dummy_value(_: Permit) -> Self { true } } pub(super) fn error_dummy_value( message: &str, value: &V, ) -> T { let details = format!( "A rustc invariant about `DefId` was violated.\nContext: {message}.\nValue (type {}) is:\n{value:#?}", type_name::() ); DiagnosticInfo { context: Context::NameView, span: Span::dummy(), kind: Kind::AssertionFailure { details }, } .emit(); T::error_dummy_value(Permit(())) } } use rustc_invariant_handling::error_dummy_value; impl PathSegmentPayload { /// Constructs a [`PathSegmentPayload`] from an [`ExplicitDefId`], assuming its last /// path segment is named. fn from_named(def_id: &ExplicitDefId) -> Self { Self::Named(match def_id.def_id.path.last() { Some(last) => match &last.data { DefPathItem::TypeNs(s) | DefPathItem::ValueNs(s) | DefPathItem::MacroNs(s) | DefPathItem::LifetimeNs(s) => Symbol::new(s), _ => return error_dummy_value("PathSegmentPayload::from_named", def_id), }, None => Symbol::new(&def_id.def_id.krate), }) } /// Constructs a [`PathSegmentPayload`] from an [`ExplicitDefId`], assuming its last /// path segment is unnamed. fn from_unnamed(def_id: &ExplicitDefId) -> Result { match def_id.def_id.path.last() { Some(last) => match &last.data { DefPathItem::TypeNs(_) | DefPathItem::ValueNs(_) | DefPathItem::MacroNs(_) | DefPathItem::LifetimeNs(_) => { return Err("PathSegmentPayload::from_unnamed, got name"); } _ => (), }, None => return Err("PathSegmentPayload::from_unnamed, got a root crate"), }; Ok(Self::Unnamed(match &def_id.def_id.kind { DefKind::Use => UnnamedPathSegmentPayload::Use, DefKind::ForeignMod => UnnamedPathSegmentPayload::Foreign, DefKind::AnonConst => UnnamedPathSegmentPayload::AnonConst, DefKind::InlineConst => UnnamedPathSegmentPayload::InlineConst, DefKind::OpaqueTy => UnnamedPathSegmentPayload::Opaque, DefKind::GlobalAsm => UnnamedPathSegmentPayload::GlobalAsm, DefKind::Impl { .. } => UnnamedPathSegmentPayload::Impl, DefKind::Closure => UnnamedPathSegmentPayload::Closure, _ => return Err("PathSegmentPayload::from_unnamed, bad kind"), })) } /// Constructs a [`PathSegmentPayload`] from an [`ExplicitDefId`], dispatching to /// `from_named` or `from_unnamed` according to the item's [`DefKind`]. /// /// This encodes rustc invariants about which kinds are name-bearing in paths. fn from_def_id(def_id: &ExplicitDefId) -> Self { match &def_id.def_id.kind { DefKind::Mod | DefKind::Struct | DefKind::Union | DefKind::Enum | DefKind::Variant | DefKind::Trait | DefKind::TyAlias | DefKind::ForeignTy | DefKind::TraitAlias | DefKind::AssocTy | DefKind::Fn | DefKind::Const | DefKind::Static { .. } | DefKind::Ctor { .. } | DefKind::AssocFn | DefKind::AssocConst | DefKind::Macro { .. } | DefKind::ExternCrate | DefKind::Field => Self::from_named(def_id), DefKind::Use | DefKind::ForeignMod | DefKind::AnonConst | DefKind::InlineConst | DefKind::OpaqueTy | DefKind::GlobalAsm | DefKind::Impl { .. } | DefKind::Closure => Self::from_unnamed(def_id) .unwrap_or_else(|message| error_dummy_value(message, def_id)), DefKind::TyParam | DefKind::ConstParam | DefKind::PromotedConst | DefKind::LifetimeParam | DefKind::SyntheticCoroutineBody => error_dummy_value( "PathSegmentPayload::from_def_id, kinds should never appear", def_id, ), } } } #[derive(Debug, Clone)] /// A typed path segment: one "piece" of a Rust path, with extra structure. /// /// # What does that mean? /// /// In Rust, every item (function, type, trait, field...) has a path starting at /// its crate root. For example, in a crate called `my_crate`: /// /// ```ignore /// mod a { /// mod b { /// fn hello() {} /// } /// trait Foo { /// fn f() { /// enum T { /// C { field: u8 }, /// } /// } /// } /// } /// ``` /// /// Some paths and their **segments** are: /// /// | Path | Segments | /// |------------------------------------|----------------------------------------------| /// | `my_crate` | `[my_crate]` | /// | `my_crate::a` | `[my_crate], [a]` | /// | `my_crate::a::b::hello` | `[my_crate], [a], [b], [hello]` | /// | `my_crate::a::Foo` | `[my_crate], [a], [Foo]` | /// | `my_crate::a::Foo::f` | `[my_crate], [a], [Foo::f]` | /// | `my_crate::a::Foo::f::T` | `[my_crate], [a], [Foo::f], [T]` | /// | `my_crate::a::Foo::f::T::C` | `[my_crate], [a], [Foo::f], [T::C]` | /// | `my_crate::a::Foo::f::T::C::field` | `[my_crate], [a], [Foo::f], [T::C::field]` | /// /// Each `[X]` here is a **path segment**. /// /// # Hierarchy /// /// Path segments form a hierarchy: each one knows its parent. For example, the /// field `my_field` is inside the constructor of `MyVariant`, which is inside /// the enum `MyEnum`, which lives inside the function `f`, and so on -- all the /// way up to the crate root. /// /// This parenthood is important: /// - a field segment always has a constructor parent /// (e.g. `my_field → MyVariant`). /// - an associated item always has a trait/impl container parent /// (e.g. `f → Foo`). /// - everything ultimately has a **crate** as its top parent. /// /// # Why does this matter? /// /// This strong typing of segments lets tools: /// - disambiguate names across contexts (e.g. two types with the same /// constructor name), /// - generate unique, human-readable names in other languages/backends, /// - walk up the chain of parents to reconstruct full paths. /// /// For example, with the F\* backend, constructors are not namespaced under the /// name of their type, but live directly at top-level. Thus, they need to be /// unique. Using the hierarchy, we can print them as `Foo_MyVariant` instead of /// `Foo.MyVariant`. pub struct PathSegment { identifier: DefId, payload: PathSegmentPayload, disambiguator: u32, kind: Kind, } impl PathSegment { /// Returns the payload of this path segment (named vs. unnamed and why). pub fn payload(&self) -> PathSegmentPayload { self.payload.clone() } /// Returns the rustc path disambiguator for this segment. pub fn disambiguator(&self) -> u32 { self.disambiguator } /// Returns the kind of this segment as an [`K`]. pub fn kind(&self) -> &K { &self.kind } /// Maps the segment's `kind` while preserving all other fields. fn map(self, f: impl Fn(K, &DefId) -> U) -> PathSegment { let Self { identifier, payload, disambiguator, kind, } = self; let kind = f(kind, &identifier); PathSegment { identifier, payload, disambiguator, kind, } } } impl PathSegment { /// Lift a `PathSegment` of kind `ConstructorKind` to a `PathSegment` of kind `AnyKind`. pub fn lift(&self) -> PathSegment { self.clone().map(|kind, _| AnyKind::Constructor(kind)) } } impl PathSegment { /// Lift a `PathSegment` of kind `TypeDefKind` to a `PathSegment` of kind `AnyKind`. pub fn lift(&self) -> PathSegment { self.clone().map(|kind, _| AnyKind::TypeDef(kind)) } } impl PathSegment { /// Lift a `PathSegment` of kind `AssocItemContainerKind` to a `PathSegment` of kind `AnyKind`. pub fn lift(&self) -> PathSegment { self.clone() .map(|kind, _| AnyKind::AssocItemContainer(kind)) } } impl PartialEq for PathSegment { fn eq(&self, other: &PathSegment) -> bool { self.identifier == other.identifier && self.disambiguator == other.disambiguator } } impl PathSegment { /// Asserts that this segment is a [`TypeDefKind`] and narrows the type. /// /// Emits a diagnostic if it doesn fn assert_type_def(self) -> PathSegment { self.map(|kind, did| match kind { AnyKind::TypeDef(inner) => inner, _ => error_dummy_value(&format!("expected TypeDefKind, got {kind:#?}"), did), }) } /// Asserts that this segment is an [`AssocItemContainerKind`] and narrows the type. fn assert_assoc_item_container(self) -> PathSegment { self.map(|kind, did| match kind { AnyKind::AssocItemContainer(inner) => inner, _ => error_dummy_value( &format!("expected AssocItemContainerKind, got {kind:#?}"), did, ), }) } /// Asserts that this segment is a [`ConstructorKind`] and narrows the type. fn assert_constructor(self) -> PathSegment { self.map(|kind, did| match kind { AnyKind::Constructor(inner) => inner, _ => error_dummy_value(&format!("expected ConstructorKind, got {kind:#?}"), did), }) } /// Internal constructor that consumes an iterator of [`ExplicitDefId`]s (from child /// to parents) and builds a single [`PathSegment`] at a time, honoring rustc /// invariants and wiring proper parents for kinds that require them /// (constructors, fields, associated items). /// /// Returns `None` when the iterator is exhausted. fn from_iterator(it: &mut impl Iterator) -> Option { let def_id = it.next()?; let mut from_iterator = |context: &str| match Self::from_iterator(it) { Some(value) => value, None => error_dummy_value( &format!("PathSegment::from_iterator, expected parent for {context}."), &def_id, ), }; let payload = PathSegmentPayload::from_def_id(&def_id); let kind = match &def_id.def_id.kind { // Struct constructor path segment special-casing (struct-as-ctor). DefKind::Ctor(CtorOf::Struct, _) | DefKind::Struct if def_id.is_constructor => { let parent_def_id = ExplicitDefId { is_constructor: false, def_id: def_id.def_id, }; let parent = match Self::from_iterator(&mut std::iter::once(parent_def_id)) { Some(value) => value, None => error_dummy_value( "PathSegment::from_iterator, expected parent for Struct/Ctor.", &def_id, ), }; AnyKind::Constructor(ConstructorKind::Constructor { ty: parent.assert_type_def(), }) } // Non-ctor struct item. DefKind::Ctor(CtorOf::Struct, _) => AnyKind::TypeDef(TypeDefKind::Struct), // Enum variants and non-struct ctors. DefKind::Variant | DefKind::Ctor(_, _) => { AnyKind::Constructor(ConstructorKind::Constructor { ty: from_iterator("Variant/Ctor").assert_type_def(), }) } DefKind::Struct => AnyKind::TypeDef(TypeDefKind::Struct), DefKind::Union => AnyKind::TypeDef(TypeDefKind::Union), DefKind::Enum => AnyKind::TypeDef(TypeDefKind::Enum), DefKind::Trait => { AnyKind::AssocItemContainer(AssocItemContainerKind::Trait { trait_alias: false }) } DefKind::Impl { of_trait } => AnyKind::AssocItemContainer( AssocItemContainerKind::Impl { inherent: !of_trait, impl_infos: /* intentionally left None; fill where available */ None }, ), // Simple leaf kinds. DefKind::Mod => AnyKind::Mod, DefKind::Fn => AnyKind::Fn, DefKind::Const => AnyKind::Const, DefKind::Static { .. } => AnyKind::Static, DefKind::Use => AnyKind::Use, DefKind::TyAlias => AnyKind::TyAlias, DefKind::TraitAlias => AnyKind::TraitAlias, DefKind::ForeignTy => AnyKind::ForeignTy, DefKind::ForeignMod => AnyKind::Foreign, DefKind::Macro { .. } => AnyKind::Macro, DefKind::AnonConst => AnyKind::AnonConst, DefKind::OpaqueTy => AnyKind::Opaque, DefKind::GlobalAsm => AnyKind::GlobalAsm, DefKind::Closure => AnyKind::Closure, DefKind::ExternCrate => AnyKind::ExternCrate, // Field: requires a constructor parent and conveys whether it's named. DefKind::Field => AnyKind::Field { parent: from_iterator("Field").assert_constructor(), named: match &payload { PathSegmentPayload::Named(symbol) => { // Tuple fields are numbered; parse success => unnamed field. str::parse::(symbol.as_ref()).is_ok() } PathSegmentPayload::Unnamed(_) => { error_dummy_value("Field should carry a ValueNs payload.", &def_id) } }, }, // Associated items: require a container parent. DefKind::AssocTy => AnyKind::AssocItem { container: from_iterator("AssocTy").assert_assoc_item_container(), kind: AssocItemKind::Ty, }, DefKind::AssocFn => AnyKind::AssocItem { container: from_iterator("AssocFn").assert_assoc_item_container(), kind: AssocItemKind::Fn, }, DefKind::AssocConst => AnyKind::AssocItem { container: from_iterator("AssocConst").assert_assoc_item_container(), kind: AssocItemKind::Const, }, _ => error_dummy_value("PathSegment::from_iterator_opt", &def_id), }; let identifier = def_id.def_id; let disambiguator = identifier.path.last().map(|d| d.disambiguator).unwrap_or(0); Some(Self { identifier, payload, disambiguator, kind, }) } } impl PathSegment { /// Returns the parent path segment, if any. /// /// Parents exist only for: /// - [`AnyKind::Constructor`] (parent is its [`TypeDefKind`]), /// - [`AnyKind::AssocItem`] (parent is its container `trait`/`impl`), /// - [`AnyKind::Field`] (parent is its constructor). /// /// All other kinds return `None`. pub fn parent(&self) -> Option { Some(match self.kind.clone() { AnyKind::Constructor(ConstructorKind::Constructor { ty }) => { ty.map(|kind, _| AnyKind::TypeDef(kind)) } AnyKind::AssocItem { container, .. } => { container.map(|kind, _| AnyKind::AssocItemContainer(kind)) } AnyKind::Field { parent, .. } => parent.map(|kind, _| AnyKind::Constructor(kind)), _ => return None, }) } /// Returns an iterator over `self` and all its ancestors, walking up via /// [`Self::parent`] until no parent remains. pub fn parents(&self) -> impl Iterator { std::iter::successors(Some(self.clone()), |seg| seg.parent()) } } mod view_encapsulation { //! Encapsulation module to scope [`View`]'s invariants use crate::ast::{ identifiers::global_id::{FreshModule, ReservedSuffix}, span::Span, }; use super::*; /// A view for an [`ExplicitDefId`], materialized as a list of typed /// [`PathSegment`]s ordered from the crate root/module towards the item. pub struct View(Vec, Option); impl View { /// Returns the full list of segments (non-empty). pub fn segments(&self) -> &[PathSegment] { &self.0 } /// Returns the last (most specific) segment. pub fn last(&self) -> &PathSegment { self.0 .last() .expect("Broken invariant: a view always contains at least one path path segments.") } /// Returns the first (outermost) segment. pub fn first(&self) -> &PathSegment { self.0 .first() .expect("Broken invariant: a view always contains at least one path path segments.") } /// Splits the view at the boundary between (Rust) modules and the first non-module /// segment. /// /// Returns `(modules, rest)`, where `modules` is the (non empty) prefix of /// `mod` segments (e.g., the crate/module path), and `rest` is the remaining /// segments starting at the first non-`mod`. pub fn split_at_module(&self) -> (&[PathSegment], &[PathSegment]) { let position = self .segments() .iter() .enumerate() .find(|(_, seg)| !matches!(seg.kind(), AnyKind::Mod)) .map(|(i, _)| i) .unwrap_or(self.segments().len()); self.segments().split_at(position) } /// Get the first parent which is a proper module (all its parent are modules as well). pub fn module(&self) -> &PathSegment { self.0 .iter() .take_while(|seg| !matches!(seg.kind(), AnyKind::Mod)) .last() .expect("Broken invariant, a name has at least a crate") } /// Get the optional suffix of this view pub fn suffix(&self) -> &Option { &self.1 } /// Add a suffix to a view pub fn with_suffix(mut self, suffix: Option) -> Self { self.1 = suffix; self } } impl From for View { /// Builds a [`View`] from an [`ExplicitDefId`], reconstructing segments by walking /// up the parent chain and then reversing to obtain the canonical outer→inner order. fn from(value: ExplicitDefId) -> Self { let mut it = value.parents(); let mut inner = std::iter::from_fn(|| PathSegment::from_iterator(&mut it)).collect::>(); inner.reverse(); debug_assert!(!inner.is_empty()); // invariant: non-empty Self(inner, None) } } impl From for View { fn from(value: FreshModule) -> Self { use crate::ast::diagnostics::{Context, DiagnosticInfo}; (DiagnosticInfo { context: Context::NameView, span: Span::dummy(), kind: hax_types::diagnostics::Kind::Unimplemented { issue_id: Some(1779), details: Some( "Fresh modules are not implemented yet in the Rust engine".into(), ), }, }) .emit(); // dummy value value .hints .first() .expect("The list of hints should be non-empty") .clone() .into() } } } pub use view_encapsulation::View; ================================================ FILE: rust-engine/src/ast/identifiers/global_id.rs ================================================ //! The global identifiers of hax. //! //! ## Public API //! The main type provided by this module is `GlobalId`. //! //! A global identifier is either: //! - a concrete identifier, something that could be represented as a Rust path //! - a tuple identifier //! //! To print a global identifier, you have to use the method [`GlobalId::view`], //! which will output a [`view::View`]. //! //! You can also try to interpret a global identifier as a tuple identifier //! ([`TupleId`]) via the method [`GlobalId::expect_tuple`]. //! //! ## Internal representations //! [`GlobalId`] is a wrapper for an interned [`GlobalIdInner`]. //! //! A [`GlobalIdInner`] is either a [`ConcreteId`] or a [`TupleId`]. A //! [`GlobalId`] can always be turned into a [`ConcreteId`]. //! //! A [`ConcreteId`] is an [`ExplicitDefId`] that can be moved to fresh //! namespaces or suffixed with reserved suffixes. //! //! An [`ExplicitDefId`] is a [`DefId`] that adds one piece of information: is //! the identifier refering to a constructor or not. This information is //! ambiguous in Rust's `DefId`s. //! //! A [`DefId`] is an interned [`DefIdInner`], which in turn is a datatype //! isomorphic to the raw representation of `DefId`s in the frontend. //! //! A [`DefIdInner`] is basically a definition kind, a krate name and a path. use hax_frontend_exporter::{DefKind, DefPathItem, DisambiguatedDefPathItem}; use hax_rust_engine_macros::*; use crate::interning::{Internable, Interned, InterningTable}; mod compact_serialization; pub(crate) mod generated_names; pub mod view; /// A Rust `DefId`: a lighter version of [`hax_frontend_exporter::DefId`]. #[derive_group_for_ast] struct DefIdInner { /// The crate of the definition krate: String, /// The full path for this definition, under the crate `krate` path: Vec, /// The parent `DefId`, if any. /// `parent` if node if and only if `path` is empty parent: Option, /// What kind is this definition? (e.g. an `enum`, a `const`, an assoc. `fn`...) kind: DefKind, } impl From for DefIdInner { fn from(value: hax_frontend_exporter::DefId) -> Self { Self { krate: value.krate.clone(), path: value.path.clone(), parent: value .parent .clone() .map(|def_id| DefIdInner::from(def_id).intern()), kind: value.kind.clone(), } } } impl DefIdInner { /// Change the krate field of `self` and propagate the change into all parents. fn rename_krate(&self, name: &str) -> Self { let mut def_id = self.clone(); def_id.krate = name.into(); def_id.parent = def_id.parent.map(|parent: DefId| parent.rename_krate(name)); def_id } fn to_debug_string(&self) -> String { fn disambiguator_suffix(disambiguator: u32) -> String { if disambiguator == 0 { "".into() } else { format!("__{disambiguator}") } } use itertools::Itertools; std::iter::once(self.krate.clone()) .chain(self.path.iter().map(|item| match &item.data { DefPathItem::TypeNs(s) | DefPathItem::ValueNs(s) | DefPathItem::MacroNs(s) | DefPathItem::LifetimeNs(s) => s.clone(), DefPathItem::Impl => "impl".into(), other => format!("{other:?}"), } + &disambiguator_suffix(item.disambiguator))) .join("::") } } use std::{ cell::{LazyCell, RefCell}, collections::HashMap, sync::{LazyLock, Mutex}, }; impl Internable for DefIdInner { fn interning_table() -> &'static Mutex> { static TABLE: LazyLock>> = LazyLock::new(|| Mutex::new(InterningTable::default())); &TABLE } } /// An interned Rust `DefId`: a lighter version of [`hax_frontend_exporter::DefId`]. type DefId = Interned; impl DefId { /// Change the krate name to `name`. fn rename_krate(&self, name: &str) -> Self { (*self).get().rename_krate(name).intern() } } /// An [`ExpliciDefId`] is a Rust [`DefId`] tagged withg some disambiguation metadata. /// /// [`DefId`] can be ambiguous, consider the following Rust code: /// /// ```rust /// struct S; /// fn f() -> S { S } /// ``` /// /// Here, the return type of `f` (that is, `S`) and the constructor `S` in the body of `f` refer to the exact same identifier `mycrate::S`. /// Yet, they denote two very different objects: a type versus a constructor. /// /// [`ExplicitDefId`] clears up this ambiguity, making constructors and types two separate things. /// /// Also, an [`ExplicitDefId`] always points to an item: an [`ExplicitDefId`] is never pointing to a crate alone. #[derive_group_for_ast] struct ExplicitDefId { /// Is this `DefId` a constructor? is_constructor: bool, /// The `DefId` itself def_id: DefId, } impl ExplicitDefId { /// Get the parent of an `ExplicitDefId`. fn parent(&self) -> Option { let def_id = &self.def_id; let is_constructor = matches!(&def_id.kind, DefKind::Field); Some(Self { is_constructor, def_id: def_id.parent?, }) } /// Returns an iterator that yields `self`, then `self.parent()`, etc. /// This iterator is non-empty. fn parents(&self) -> impl Iterator { std::iter::successors(Some(self.clone()), |id| id.parent()) } /// Change the krate name to `name`. fn rename_krate(&mut self, name: &str) { self.def_id = self.def_id.rename_krate(name); } /// Helper to get a `GlobalIdInner` out of an `ExplicitDefId`. fn into_global_id_inner(self) -> GlobalIdInner { GlobalIdInner::Concrete(ConcreteId { def_id: self, moved: None, suffix: None, }) } } /// Represents a fresh module: a module generated by hax and guaranteed to be fresh. #[derive_group_for_ast] pub struct FreshModule { /// Internal (unique) identifier id: usize, /// Non-empty list of identifiers that will be used to decide the name of the fresh module. hints: Vec, /// A decoration label that will be also used to decide the name of the fresh module. label: String, } impl FreshModule { /// Renders a view of the fresh module identifier. fn view(&self) -> view::View { self.clone().into() } /// Change the krate name in all hints. fn rename_krate(&self, name: &str) -> Self { let hints = self .hints .iter() .map(|hint| { let mut hint = hint.clone(); hint.rename_krate(name); hint }) .collect(); Self { hints, id: self.id, label: self.label.clone(), } } fn to_debug_string(&self) -> String { format!("fresh_module_{}_{}", self.id, self.label) } } /// [`ReservedSuffix`] helps at deriving fresh identifiers out of existing (Rust) ones. #[derive_group_for_ast] pub enum ReservedSuffix { /// Precondition of a function-like item. Pre, /// Postcondition of a function-like item. Post, /// Cast function for an `enum` discriminant. Cast, } /// A identifier that we call concrete: it exists concretely somewhere in Rust. #[derive_group_for_ast] pub struct ConcreteId { /// The explicit `def_id`. def_id: ExplicitDefId, /// A fresh module if this definition was moved to a fresh module. moved: Option, /// An optional suffix. suffix: Option, } /// A global identifier in hax. #[derive_group_for_ast] enum GlobalIdInner { /// A concrete identifier that exists in Rust. Concrete(ConcreteId), /// A fresh module introduced by Hax (typically, a bundle) FreshModule(FreshModule), /// A projector. Tuple(TupleId), } #[derive_group_for_ast] #[derive(Copy)] /// Represents tuple-related identifier in Rust. /// /// Since Rust tuples do not have user-defined names, this type is used to /// represent synthesized identifiers for tuple types, their constructors, and /// fields. This is necessary in cases where we need to refer to these /// components in a structured and identifiable way. /// /// For ergnomic purposes, `TupleId` can be transformed into `ConcreteId`s. /// After such a conversion, we loose structure, but we end up with a standard /// concrete identifier, which can be printed in a generic way. /// See [`ConcreteId::from_global_id`]. pub enum TupleId { /// Represents a tuple type with the given number of elements. /// /// For example, a tuple like `(i32, bool, String)` would have `length = 3`. Type { /// Number of elements in the tuple. length: usize, }, /// Represents the constructor function for a tuple with the given arity. /// /// This refers to the tuple expression itself (e.g., `(x, y, z)`), which constructs /// a value of the tuple type. Constructor { /// Number of elements in the tuple. length: usize, }, /// Represents a field within a tuple, addressed by position. /// /// For instance, accessing `.0` or `.1` on a tuple corresponds to a specific field. Field { /// Number of elements in the tuple. length: usize, /// Index of the field (zero-based). field: usize, }, } impl From for GlobalId { fn from(tuple_id: TupleId) -> Self { Self(GlobalIdInner::Tuple(tuple_id).intern()) } } impl TupleId { /// Creates a ConcreteId from a TupleId: `Tuple(1)` returns `Tuple1` fn into_owned_concrete_id(self) -> ConcreteId { fn patch_def_id(template: GlobalId, length: usize, field: usize) -> ConcreteId { let GlobalIdInner::Concrete(mut concrete_id) = template.0.get().clone() else { // `patch_def_id` is called with constant values (`hax::Tuple2` // and friends are constants) Those are of the shape // `GlobalIdInner::Concrete(_)`, *not* // `GlobalIdInner::Tuple(_)`. The tuple identifiers we deal with // in this functions are private identifiers used only in this // module, to provide normal concrete identifiers even for // tuples. unreachable!() }; fn inner(did: &mut DefIdInner, length: usize, field: usize) { for DisambiguatedDefPathItem { data, .. } in &mut did.path { // Patch field if let DefPathItem::ValueNs(s) = data && s == "1" { *s = field.to_string() } // Patch constructor / type name if let DefPathItem::TypeNs(s) = data && s.starts_with("Tuple") { *s = format!("Tuple{length}") } } if let Some(parent) = did.parent { let mut parent = parent.get().clone(); inner(&mut parent, length, field); did.parent = Some(parent.intern()); } } let mut did = concrete_id.def_id.def_id.get().clone(); inner(&mut did, length, field); concrete_id.def_id.def_id = did.intern(); concrete_id } use crate::names::rust_primitives::hax; match self { TupleId::Type { length } => patch_def_id(hax::Tuple2, length, 0), TupleId::Constructor { length } => patch_def_id(hax::Tuple2::Constructor, length, 0), TupleId::Field { length, field } => patch_def_id(hax::Tuple2::_1, length, field), } } /// Creates a static [`ConcreteId`] from a [`TupleId`]: `Tuple(1)` returns `Tuple1`. The function is /// memoized (as the same tuple ids may appear a lot in a program), and inserts identifiers in /// the GlobalId table to return a static lifetime. pub fn as_concreteid(self) -> &'static ConcreteId { thread_local! { static MEMO: LazyCell>> = LazyCell::new(|| RefCell::new(HashMap::new())); } MEMO.with(|memo| { let mut memo = memo.borrow_mut(); let reference: &'static ConcreteId = memo.entry(self).or_insert_with(|| { match GlobalIdInner::Concrete(self.into_owned_concrete_id()) .intern() .get() { GlobalIdInner::Concrete(concrete_id) => concrete_id, GlobalIdInner::FreshModule(_) | GlobalIdInner::Tuple(_) => { // This is a match on the Id that was just inserted in the table as a // ConcreteId unreachable!() } } }); reference }) } } /// A interned global identifier in hax. #[derive_group_for_ast] #[derive(Copy)] pub struct GlobalId(Interned); impl GlobalId { /// Import a def_id from the frontend pub fn from_frontend(id: hax_frontend_exporter::DefId, is_value: bool) -> Self { let mut def_id: DefIdInner = id.into(); use hax_frontend_exporter::DefKind as DK; let mut popped_ctor = false; if let Some(last) = def_id.path.last() && matches!(&last.data, DefPathItem::Ctor) { def_id.path.pop(); popped_ctor = true; if let Some(parent) = def_id.parent.as_ref() { def_id.parent = parent.parent; } } let is_constructor = is_value && (matches!(&def_id.kind, DK::Variant | DK::Union | DK::Struct) || popped_ctor); let inner = GlobalIdInner::Concrete(ConcreteId { def_id: ExplicitDefId { is_constructor, def_id: def_id.intern(), }, moved: None, suffix: None, }); Self(inner.intern()) } /// Extracts the Crate info pub fn krate(self) -> &'static str { match self.0.get() { GlobalIdInner::FreshModule(fresh_module) => { &fresh_module .hints .first() .expect("The hint list should always be non-empty") .def_id .krate } GlobalIdInner::Concrete(concrete_id) => &concrete_id.def_id.def_id.krate, GlobalIdInner::Tuple(tuple_id) => &tuple_id.as_concreteid().def_id.def_id.krate, } } /// Debug printing of identifiers, for testing purposes only. /// Prints path in a Rust-like way, as a `::` separated dismabiguated path. pub fn to_debug_string(self) -> String { match self.0.get() { GlobalIdInner::Concrete(id) => id.to_debug_string(), GlobalIdInner::FreshModule(id) => id.to_debug_string(), GlobalIdInner::Tuple(id) => id.as_concreteid().to_debug_string(), } } /// Returns true if the underlying identifier is a constructor pub fn is_constructor(self) -> bool { self.0.get().is_constructor() } /// Returns true if the underlying identifier is a projector pub fn is_projector(self) -> bool { self.0.get().is_projector() } /// Returns true if the underlying identifier is a precondition (trait/impl item) /// Should be removed once https://github.com/cryspen/hax/issues/1646 has been fixed pub fn is_precondition(self) -> bool { self.0.get().is_precondition() } /// Returns true if the underlying identifier is a postcondition (trait/impl item) /// Should be removed once https://github.com/cryspen/hax/issues/1646 has been fixed pub fn is_postcondition(self) -> bool { self.0.get().is_postcondition() } /// Renders a view of the global identifier. pub fn view(self) -> view::View { match self.0.get() { GlobalIdInner::FreshModule(id) => id.view(), GlobalIdInner::Concrete(id) => id.view(), GlobalIdInner::Tuple(id) => id.as_concreteid().view(), } } /// Returns a tuple identifier if `self` is indeed a tuple. pub fn expect_tuple(self) -> Option { match self.0.get() { GlobalIdInner::Tuple(tuple_id) => Some(*tuple_id), _ => None, } } /// Gets the closest module only parent identifier, that is, the closest parent whose path /// contains only path chunks of kind `DefKind::Mod`. Can be itself (for fresh modules). pub fn mod_only_closest_parent(self) -> Self { match self.0.get() { GlobalIdInner::FreshModule(_) => self, GlobalIdInner::Concrete(concrete_id) => concrete_id.mod_only_closest_parent().into(), GlobalIdInner::Tuple(tuple_id) => { tuple_id.as_concreteid().mod_only_closest_parent().into() } } } /// Change the krate name (the first element of the `GlobalId`) to `name`. pub fn rename_krate(self, name: &str) -> Self { match self.0.get() { GlobalIdInner::FreshModule(fresh_module) => { Self(GlobalIdInner::FreshModule(fresh_module.rename_krate(name)).intern()) } GlobalIdInner::Concrete(concrete_id) => { let mut concrete_id = concrete_id.clone(); concrete_id.rename_krate(name); Self(GlobalIdInner::Concrete(concrete_id).intern()) } GlobalIdInner::Tuple(tuple_id) => { let mut concrete_id = tuple_id.as_concreteid().clone(); concrete_id.rename_krate(name); Self(GlobalIdInner::Concrete(concrete_id).intern()) } } } /// Add a suffix to a GlobalId pub fn with_suffix(self, suffix: ReservedSuffix) -> Self { match self.0.get() { GlobalIdInner::Concrete(concrete_id) => Self( GlobalIdInner::Concrete(ConcreteId { suffix: Some(suffix), ..concrete_id.clone() }) .intern(), ), GlobalIdInner::Tuple(_) | GlobalIdInner::FreshModule(_) => self, } } } impl GlobalIdInner { /// Extract the `ExplicitDefId` from a `GlobalId`. fn explicit_def_id(&self) -> Option { match self { GlobalIdInner::Concrete(concrete_id) => Some(concrete_id.def_id.clone()), _ => None, } } /// Returns true if the underlying identifier is a constructor pub fn is_constructor(&self) -> bool { match self { GlobalIdInner::Concrete(concrete_id) => concrete_id.def_id.is_constructor, GlobalIdInner::Tuple(TupleId::Constructor { .. }) => true, _ => false, } } /// Returns true if the underlying identifier is a projector pub fn is_projector(&self) -> bool { match self { GlobalIdInner::Concrete(concrete_id) => { matches!(concrete_id.def_id.def_id.get().kind, DefKind::Field) } GlobalIdInner::Tuple(TupleId::Field { .. }) => true, _ => false, } } /// Returns true if the underlying identifier has the precondition suffix /// Should be removed once https://github.com/cryspen/hax/issues/1646 has been fixed pub fn is_precondition(&self) -> bool { matches!(self, GlobalIdInner::Concrete(concrete_id) if matches!(concrete_id.suffix, Some(ReservedSuffix::Pre))) } /// Returns true if the underlying identifier has the postcondition suffix /// Should be removed once https://github.com/cryspen/hax/issues/1646 has been fixed pub fn is_postcondition(&self) -> bool { matches!(self, GlobalIdInner::Concrete(concrete_id) if matches!(concrete_id.suffix, Some(ReservedSuffix::Post))) } } impl From for GlobalId { fn from(concrete_id: ConcreteId) -> Self { Self(GlobalIdInner::Concrete(concrete_id).intern()) } } impl ConcreteId { /// Renders a view of the concrete identifier. fn view(&self) -> view::View { view::View::from(self.def_id.clone()).with_suffix(self.suffix.clone()) } /// Gets the closest module only parent identifier, that is, the closest /// parent whose path contains only path chunks of kind `DefKind::Mod`. fn mod_only_closest_parent(&self) -> Self { let mut parents = self.def_id.parents().collect::>(); parents.reverse(); let def_id = parents .into_iter() .take_while(|id| matches!(id.def_id.kind, DefKind::Mod)) .last() .expect("Invariant broken: a DefId must always contain at least on `mod` segment (the crate)"); Self { def_id, moved: self.moved.clone(), suffix: None, } } fn rename_krate(&mut self, name: &str) { self.def_id.rename_krate(name); } fn to_debug_string(&self) -> String { self.def_id.def_id.get().to_debug_string() } } impl PartialEq for GlobalId { fn eq(&self, other: &DefId) -> bool { if let GlobalIdInner::Concrete(concrete) = self.0.get() { &concrete.def_id.def_id == other } else { false } } } impl PartialEq for DefId { fn eq(&self, other: &GlobalId) -> bool { other == self } } impl PartialEq for GlobalId { fn eq(&self, other: &ExplicitDefId) -> bool { self == &other.def_id } } impl PartialEq for ExplicitDefId { fn eq(&self, other: &GlobalId) -> bool { other == &self.def_id } } ================================================ FILE: rust-engine/src/ast/identifiers.rs ================================================ //! Identifier types used throughout the AST. //! //! This module provides two kinds of identifiers: //! - `GlobalId`: fully-qualified paths like `std::mem::drop` //! - `LocalId`: local identifiers use crate::symbol::Symbol; use hax_rust_engine_macros::*; use std::fmt; pub mod global_id; /// Local identifier // TODO(issue #1874): local identifiers should have unique indexes #[derive_group_for_ast] pub struct LocalId(pub Symbol); impl LocalId { /// Returns true if `self` is a local identifier named `self`: the Rust keyword `self`. pub fn is_self(&self) -> bool { self.0.as_ref() == "self" } } impl fmt::Display for LocalId { fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { write!(f, "{}", self.0) } } impl From<&hax_frontend_exporter::LocalIdent> for LocalId { fn from(value: &hax_frontend_exporter::LocalIdent) -> Self { Self(Symbol::new(&value.name)) } } impl From<&str> for LocalId { fn from(name: &str) -> Self { Self(Symbol::new(name)) } } pub use global_id::GlobalId; ================================================ FILE: rust-engine/src/ast/literals.rs ================================================ //! Literal and numeric type kinds used in constant expressions. use crate::symbol::Symbol; use hax_rust_engine_macros::*; /// Size of an integer type #[derive_group_for_ast] pub enum IntSize { /// 8 bits integer type S8, /// 16 bits integer type S16, /// 32 bits integer type S32, /// 64 bits integer type S64, /// 128 bits integer type S128, /// Pointer-sized integer type SSize, } use hax_frontend_exporter::{FloatTy, IntTy, UintTy}; impl From<&IntTy> for IntSize { fn from(value: &IntTy) -> Self { match value { IntTy::I128 => Self::S128, IntTy::I64 => Self::S64, IntTy::I32 => Self::S32, IntTy::I16 => Self::S16, IntTy::I8 => Self::S8, IntTy::Isize => Self::SSize, } } } impl From<&UintTy> for IntSize { fn from(value: &UintTy) -> Self { match value { UintTy::U128 => Self::S128, UintTy::U64 => Self::S64, UintTy::U32 => Self::S32, UintTy::U16 => Self::S16, UintTy::U8 => Self::S8, UintTy::Usize => Self::SSize, } } } impl From<&IntTy> for IntKind { fn from(value: &IntTy) -> Self { IntKind { size: value.into(), signedness: Signedness::Signed, } } } impl From<&UintTy> for IntKind { fn from(value: &UintTy) -> Self { IntKind { size: value.into(), signedness: Signedness::Unsigned, } } } impl From<&FloatTy> for FloatKind { fn from(value: &FloatTy) -> Self { match value { FloatTy::F128 => Self::F128, FloatTy::F64 => Self::F64, FloatTy::F32 => Self::F32, FloatTy::F16 => Self::F16, } } } /// Signedness of a numeric type #[derive_group_for_ast] pub enum Signedness { /// Signed type (`i32`, `i64`, ...) Signed, /// Unsigned type (`u32`, `u64`, ...) Unsigned, } /// Describes a Rust integer type (`u64`, `i32`, ...) #[derive_group_for_ast] pub struct IntKind { /// Size of this integer type pub size: IntSize, /// Whether this integer type is signed or unsigned pub signedness: Signedness, } /// Float types #[derive_group_for_ast] pub enum FloatKind { /// 16 bits float F16, /// 32 bits float F32, /// 64 bits float F64, /// 128 bits float F128, } /// Rust literal #[derive_group_for_ast] pub enum Literal { /// String literal String(Symbol), /// Character literal Char(char), /// Boolean literal Bool(bool), /// Integer literal Int { /// Value as u128 value: Symbol, /// True if `-` negative: bool, /// Rust int type description (size + signedness) kind: IntKind, }, /// Float literal Float { /// Value as a string value: Symbol, /// True if `-` negative: bool, /// Size kind: FloatKind, }, } ================================================ FILE: rust-engine/src/ast/resugared.rs ================================================ //! This module defines *resugared fragments* for the Hax Rust engine's AST. //! //! A resugared fragment is an additional AST node used solely for pretty-printing purposes. //! These nodes carry no semantic meaning in hax core logic but enable more accurate //! or backend-specific surface syntax reconstruction. //! //! For example, the engine represents the `unit` type as a zero-sized tuple `()`, //! mirroring Rust's internal representation. However, this may not suit all backends: //! in F*, `unit` is explicitly written as `unit`, not `()`. //! //! To accommodate such differences, we introduce resugared fragments (e.g. `UnitType`) that //! allow the printer to emit the expected syntax while maintaining the same internal semantics. use hax_rust_engine_macros::*; use super::*; /// Resugared variants for items. This represent extra printing-only items, see [`super::ItemKind::Resugared`]. #[derive_group_for_ast] pub enum ResugaredItemKind { /// A `const` item, for example `const NAME: T = body;`. /// The type of the constant is `body.ty`. Constant { /// The identifier of the constant, for example `krate::module::NAME`. name: GlobalId, /// The body of the constant, for example `body`. body: Expr, /// The generic arguments and constraints of the constant. /// Note: constant supporting generics is a nightly feature (generic_const_items). generics: Generics, }, /// A recursive function definition. Detected by checking whether the function /// body contains a reference to its own name. RecursiveFn { /// The identifier of the function. name: GlobalId, /// The generic arguments and constraints of the function. generics: Generics, /// The body of the function. body: Expr, /// The parameters of the function. params: Vec, /// The safety of the function. safety: SafetyKind, }, } /// Resugared variants for expressions. This represent extra printing-only expressions, see [`super::ExprKind::Resugared`]. #[derive_group_for_ast] // TODO: drop `clippy::large_enum_variant` when https://github.com/cryspen/hax/issues/1666 is addressed. #[allow(clippy::large_enum_variant)] pub enum ResugaredExprKind { /// A tuple constructor. /// /// # Example: /// `(a, b)` Tuple(Vec), /// A let-binding of a "pure" (non-panicking) expression /// /// # Example: /// `let x = 9; x + 0` LetPure { /// The left-hand side of the `let` expression. (`x` in the example) lhs: Pat, /// The right-hand side of the `let` expression. (`9` in the example) rhs: Expr, /// The body of the `let`. (`x + 0` in the example) body: Expr, }, } /// Resugared variants for patterns. This represent extra printing-only patterns, see [`super::PatKind::Resugared`]. #[derive_group_for_ast] pub enum ResugaredPatKind { /// A record constructor pattern where wildcard fields are replaced by `..`. ConstructWithEllipsis { /// The identifier of the constructor we are matching. constructor: GlobalId, /// Is this a struct? (meaning, *not* a variant from an enum) is_struct: bool, /// Only the explicitly-bound (non-wildcard) fields. fields: Vec<(GlobalId, Pat)>, }, } /// Resugared variants for types. This represent extra printing-only types, see [`super::TyKind::Resugared`]. #[derive_group_for_ast] pub enum ResugaredTyKind { /// A tuple tupe. /// /// # Example: /// `(i32, bool)` Tuple(Vec), } /// Resugared variants for impl. items. This represent extra printing-only impl. items, see [`super::ImplItemKind::Resugared`]. #[derive_group_for_ast] pub enum ResugaredImplItemKind { /// An associated `const` impl item, for example `const NAME: T = body;`. /// The type of the constant is `body.ty`. Constant { /// The body of the constant, for example `body`. body: Expr, }, } /// Resugared variants for trait items. This represent extra printing-only trait items, see [`super::TraitItemKind::Resugared`]. #[derive_group_for_ast] pub enum ResugaredTraitItemKind {} /// Marks a type as a resugar fragment of the AST. pub trait ResugaredFragment { /// What fragment of the AST this resugar is extending? type ParentFragment; } /// Convenience macro which implements [`ResugaredFragment`] on `$ty`, setting /// `$parent` as the `ParentFragment`, as well as `From<$ty>` for `$parent`, by /// wrapping the `$ty` in `$parent::Resugared(..)`. macro_rules! derive_from { ($($ty:ty => $parent:ty),*) => { $(impl ResugaredFragment for $ty { type ParentFragment = $parent; } impl From<$ty> for <$ty as ResugaredFragment>::ParentFragment { fn from(value: $ty) -> Self { Self::Resugared(value) } })* }; } derive_from!( ResugaredItemKind => ItemKind, ResugaredExprKind => ExprKind, ResugaredPatKind => PatKind, ResugaredTyKind => TyKind, ResugaredImplItemKind => ImplItemKind, ResugaredTraitItemKind => TraitItemKind ); ================================================ FILE: rust-engine/src/ast/span.rs ================================================ //! Source positions. use crate::interning::{Internable, Interned, InterningTable}; use hax_rust_engine_macros::*; use std::sync::{LazyLock, Mutex}; /// Creates a fresh identifier for a span. fn fresh_id() -> u32 { use std::sync::atomic::{AtomicU32 as AtomicInt, Ordering}; static CURRENT_ID: AtomicInt = AtomicInt::new(0); CURRENT_ID.fetch_add(1, Ordering::Relaxed) } /// Position of a Rust source #[derive_group_for_ast] struct SpanData { /// A vector of spans as defined by the frontend. /// This is useful for supporting in a trivial way union of spans. data: Vec, /// A reference to the item in which this span lives. This information is /// used for debugging and profiling purposes, e.g. for `cargo hax into /// --stats backend`. owner_hint: Option>, } impl SpanData { /// Creates a dummy span. fn dummy() -> Self { let lo: hax_frontend_exporter::Loc = hax_frontend_exporter::Loc { line: 0, col: 0 }; let hi = lo.clone(); SpanData { data: vec![hax_frontend_exporter::Span { lo, hi, filename: hax_frontend_exporter::FileName::Custom("dumny".into()), rust_span_data: None, }], owner_hint: None, } } /// Creates a [`Span`] given information from the hax exporter. fn from_exporter( span: hax_frontend_exporter::Span, owner_hint: Option<&hax_frontend_exporter::DefId>, ) -> Self { Self { data: vec![span], owner_hint: owner_hint.map(Interned::intern), } } } /// Position of a Rust source #[derive_group_for_ast] #[derive(Copy)] pub struct Span { #[serde(flatten)] data: Interned, /// A unique identifier. Since we store spans almost for every node of the /// AST, having a unique identifier for spans gives us a fine-grained way of /// refering to sub-nodes in debugging context. This id is indeed mostly /// used by the web debugger. id: u32, } impl Internable for SpanData { fn interning_table() -> &'static Mutex> { static TABLE: LazyLock>> = LazyLock::new(|| Mutex::new(InterningTable::default())); &TABLE } } impl Span { /// Creates a dummy span. pub fn dummy() -> Self { static DUMMY_SPAN: LazyLock = LazyLock::new(|| { let data = Interned::intern(&SpanData::dummy()); Span { data, id: fresh_id(), } }); *DUMMY_SPAN } /// Creates a [`Span`] given information from the hax exporter. pub fn from_exporter( span: hax_frontend_exporter::Span, owner_hint: Option<&hax_frontend_exporter::DefId>, ) -> Self { let data = Interned::intern(&SpanData::from_exporter(span, owner_hint)); Self { data, id: fresh_id(), } } /// Get a vector of frontend spans given a [`Span`]. pub fn as_frontend_spans(self) -> &'static [hax_frontend_exporter::Span] { &self.data.get().data } } impl Internable for hax_frontend_exporter::DefId { fn interning_table() -> &'static Mutex> { static TABLE: LazyLock>> = LazyLock::new(|| Mutex::new(InterningTable::default())); &TABLE } } ================================================ FILE: rust-engine/src/ast/utils.rs ================================================ //! This module provides a collection of utilities to work on AST. use super::visitors::*; use super::*; use identifiers::*; use std::collections::HashMap; /// Useful visitor to map AST fragments. pub mod mappers { use super::*; /// Visitor that substitutes local identifiers in ASTs. pub struct SubstLocalIds(HashMap); impl SubstLocalIds { /// Create a substituer given one replacement couple. pub fn one(from: LocalId, to: LocalId) -> Self { Self::many([(from, to)]) } /// Create a substituer given a bunch of replacement couples. pub fn many(replacements: impl IntoIterator) -> Self { Self(replacements.into_iter().collect()) } } impl AstVisitorMut for SubstLocalIds { fn visit_local_id(&mut self, local_id: &mut LocalId) { if let Some(replacement) = self.0.get(local_id) { *local_id = replacement.clone(); } } } } impl Expr { /// Create a tuple expression out of components. pub fn tuple(components: Vec, span: Span) -> Self { let ty = TyKind::tuple( components .iter() .map(Typed::ty) .cloned() .map(GenericValue::Ty) .collect(), ) .promote(); ExprKind::tuple(components).promote(ty, span) } /// Create a unit (tuple of size 0) expression. pub fn unit(span: Span) -> Self { ExprKind::GlobalId(global_id::TupleId::Constructor { length: 0 }.into()) .promote(TyKind::unit().promote(), span) } /// Creates a `App` node for a standalone function. pub fn standalone_fn_app( head: impl Into, generic_args: Vec, args: Vec, output_type: Ty, span: Span, ) -> Self { ExprKind::standalone_fn_app(head, generic_args, args, output_type.clone(), span) .promote(output_type, span) } /// Creates a `App` node. pub fn fn_app( head: impl Into, generic_args: Vec, args: Vec, output_type: Ty, bounds_impls: Vec, trait_: Option<(ImplExpr, Vec)>, span: Span, ) -> Self { ExprKind::fn_app( head, generic_args, args, output_type.clone(), bounds_impls, trait_, span, ) .promote(output_type, span) } /// Removes a box pub fn unbox_once(&self) -> Option<&Expr> { if let ExprKind::App { head, args, .. } = self.kind() && let [arg] = &**args && let ExprKind::GlobalId(head) = head.kind() && let crate::names::alloc::boxed::Impl::new | crate::names::rust_primitives::hax::box_new = *head { Some(arg) } else { None } } /// Removes a deref pub fn underef_once(&self) -> Option<&Expr> { if let ExprKind::App { head, args, .. } = self.kind() && let [arg] = &**args && let ExprKind::GlobalId(head) = head.kind() && let crate::names::rust_primitives::hax::deref_op = *head { Some(arg) } else { None } } /// Removes all boxes and derefs wrapping the expression pub fn unbox_underef(&self) -> &Expr { let mut current = self; while let Some(e) = current.unbox_once().or_else(|| current.underef_once()) { current = e } current } } impl ExprKind { /// Creates a `App` node for a standalone function. pub fn standalone_fn_app( head: impl Into, generic_args: Vec, args: Vec, output_type: Ty, span: Span, ) -> Self { Self::fn_app(head, generic_args, args, output_type, vec![], None, span) } /// Creates a `App` node. pub fn fn_app( head: impl Into, generic_args: Vec, args: Vec, output_type: Ty, bounds_impls: Vec, trait_: Option<(ImplExpr, Vec)>, span: Span, ) -> Self { let head = 'head: { let kind = match head.into() { FnAppHead::GlobalId(global_id) => ExprKind::GlobalId(global_id), FnAppHead::ExprKind(expr_kind) => expr_kind, FnAppHead::Expr(expr) => break 'head expr, }; let head_ty = TyKind::Arrow { inputs: args.iter().map(Typed::ty).cloned().collect(), output: output_type.clone(), } .promote(); kind.promote(head_ty, span) }; Self::App { head, args, generic_args, bounds_impls, trait_, } } /// Creates a tuple out of a vector of components. pub fn tuple(components: Vec) -> Self { let length = components.len(); ExprKind::Construct { constructor: global_id::TupleId::Constructor { length }.into(), is_record: false, is_struct: true, fields: components .into_iter() .enumerate() .map(|(field, expr)| (global_id::TupleId::Field { length, field }.into(), expr)) .collect(), base: None, } } /// Promote to an `Expr` pub fn promote(self, ty: Ty, span: Span) -> Expr { Expr { kind: Box::new(self), ty, meta: Metadata { span, attributes: Vec::new(), }, } } } impl Metadata { /// Get an iterator over hax attributes for this AST fragment. pub fn hax_attributes(&self) -> impl Iterator { crate::attributes::hax_attributes(&self.attributes) } } impl Pat { /// Expects the pattern to be a simple binding `self`. pub fn expect_self(&self) -> Option { if let PatKind::Binding { var, .. } = self.kind() && var.is_self() { Some(var.clone()) } else { None } } } /// Helper enum that describes what can serve as function application heads. /// This is an helper that is useful for [`ExprKind::fn_application`]. pub enum FnAppHead { /// A global identifier GlobalId(GlobalId), /// An expression kind ExprKind(ExprKind), /// A full blown expression Expr(Expr), } impl From for FnAppHead { fn from(value: GlobalId) -> Self { Self::GlobalId(value) } } impl From for FnAppHead { fn from(value: ExprKind) -> Self { Self::ExprKind(value) } } impl From for FnAppHead { fn from(value: Expr) -> Self { Self::Expr(value) } } impl Generics { /// Concatenate two generics pub fn concat(mut self, other: Self) -> Self { self.constraints.extend(other.constraints); self.params.extend(other.params); use std::cmp::Ordering; self.params.sort_by(|a, b| match (a.kind(), b.kind()) { (GenericParamKind::Lifetime, GenericParamKind::Lifetime) => Ordering::Equal, (GenericParamKind::Lifetime, _) => Ordering::Less, (_, GenericParamKind::Lifetime) => Ordering::Greater, _ => Ordering::Equal, }); self } /// Empty generics pub fn empty() -> Self { Self { params: Vec::new(), constraints: Vec::new(), } } } impl Item { /// Returns a `LocalId` named `self` if the item is a standalone function /// whose first argument is the keyword `self`. In other words, this /// function returns a local identifier only for associated methods from /// inherent `impl` blocks. pub fn self_id(&self) -> Option { if let ItemKind::Fn { params, .. } = self.kind() && let [first, ..] = ¶ms[..] && let Some(self_id) = first.pat.expect_self() { Some(self_id.clone()) } else { None } } } impl ItemKind { /// Promote to an item pub fn promote(self, ident: GlobalId, span: Span) -> Item { Item { ident, kind: self, meta: Metadata { span, attributes: Vec::new(), }, } } } impl GenericValue { /// Tries to extract a [`Ty`] out of a [`GenericValue`]. pub fn expect_ty(&self) -> Option<&Ty> { let Self::Ty(ty) = self else { return None }; Some(ty) } } impl TyKind { /// Tuple type pub fn tuple(args: Vec) -> Self { let head = global_id::TupleId::Type { length: args.len() }.into(); Self::App { head, args } } /// Unit type pub fn unit() -> Self { Self::tuple(Vec::new()) } /// Promote to a Ty pub fn promote(self) -> Ty { Ty(Box::new(self)) } } impl Arm { /// Create a non-guarded arm pub fn non_guarded(pat: Pat, body: Expr, span: Span) -> Self { Self { pat, body, guard: None, meta: Metadata { span, attributes: Vec::new(), }, } } } impl PatKind { /// Pattern for binding to a single variable pub fn var_pat(var: LocalId) -> Self { Self::Binding { mutable: false, var, mode: BindingMode::ByValue, sub_pat: None, } } /// Promote to a `Pat` pub fn promote(self, ty: Ty, span: Span) -> Pat { Pat { kind: Box::new(self), ty, meta: Metadata { span, attributes: Vec::new(), }, } } } impl Variant { /// Whether a variant has fields or not. /// See https://doc.rust-lang.org/reference/items/enumerations.html#field-less-enum. pub fn is_fieldless(&self) -> bool { self.arguments.is_empty() } } ================================================ FILE: rust-engine/src/ast/visitors.rs ================================================ //! Syntax tree traversals to walk a shared or mutable borrow of the syntax tree //! of Hax. The visitors are generated using the [`derive_generic_visitor`] //! library. //! //! This module provides visitors of different flavors of visitors, and visitor //! wrappers that can enhance the default behavior of a visitor. //! //! We provide four main visitors. //! - [`AstVisitor`] and [`AstVisitorMut`]: visitor that never early exit. //! - [`AstEarlyExitVisitor`] and [`AstEarlyExitVisitorMut`]: visitor that can early exit. //! //! Each trait provides methods `visit_expr`, `visit_ty`, etc. enabling easy AST //! traversal. //! //! Importantly, we also provide visitor wrappers that enhance visitors with //! common useful behavior. See the module [`wrappers`] for more information. use super::*; use derive_generic_visitor::*; use hax_lib_macros_types::AttrPayload; pub mod wrappers { //! This module provides a visitor wrappers, or transformer of visitors. //! Such wrappers transform the behavior of a visitor. //! //! For example, [`SpanWrapper`] takes care of keeping track of [`Span`]s //! while travesing an AST. use std::ops::Deref; use super::{infallible::AstVisitable as AstVisitableInfallible, *}; use diagnostics::*; /// A visitor wrapper that tracks span while visiting the AST. Whenever an /// AST node that carries a span is visited, using this wrapper, the ambient /// span is mutated and accessible via the `HasSpan` trait. pub struct SpanWrapper<'a, V>(pub &'a mut V); impl<'a, V: HasSpan> SpanWrapper<'a, V> { /// Performs a spanned action: calls the function `action` on /// `ast_fragment`, with the contextual span information in `self` being /// the span found in `ast_fragment`. fn spanned_action( &mut self, ast_fragment: T, action: impl Fn(&mut Self, T) -> U, ) -> U where T::Target: HasSpan, { let span_before = self.0.span(); *self.0.span_mut() = ast_fragment.span(); // Perform the provided action on `ast_fragment` with `ast_fragment`'s span as contextual span. let result = action(self, ast_fragment); *self.0.span_mut() = span_before; result } } impl<'a, V: AstVisitorMut + HasSpan> AstVisitorMut for SpanWrapper<'a, V> { fn visit_inner(&mut self, x: &mut T) where T: AstVisitableInfallible, T: for<'s> DriveMut<'s, AstVisitableInfallibleWrapper>, { x.drive_map(self.0) } fn visit_item(&mut self, x: &mut Item) { self.spanned_action(x, Self::visit_inner) } fn visit_expr(&mut self, x: &mut Expr) { self.spanned_action(x, Self::visit_inner) } fn visit_pat(&mut self, x: &mut Pat) { self.spanned_action(x, Self::visit_inner) } fn visit_guard(&mut self, x: &mut Guard) { self.spanned_action(x, Self::visit_inner) } fn visit_arm(&mut self, x: &mut Arm) { self.spanned_action(x, Self::visit_inner) } fn visit_impl_item(&mut self, x: &mut ImplItem) { self.spanned_action(x, Self::visit_inner) } fn visit_trait_item(&mut self, x: &mut TraitItem) { self.spanned_action(x, Self::visit_inner) } fn visit_generic_param(&mut self, x: &mut GenericParam) { self.spanned_action(x, Self::visit_inner) } fn visit_attribute(&mut self, x: &mut Attribute) { self.spanned_action(x, Self::visit_inner) } fn visit_spanned_ty(&mut self, x: &mut SpannedTy) { self.spanned_action(x, Self::visit_inner) } } /// A visitor wrapper that automatically collects errors in `ErrorNode`s. /// Coupled with the trait `VisitorWithErrors`, this provides an `error` /// method on a visitor that can be used to throw errors, which will be /// automatically inlined in the AST on the closest error-capable node. pub struct ErrorWrapper<'a, V>(pub &'a mut V); /// An opaque error vault. This is the state manipulated by the visitor wrapper [`ErrorWrapper`]. /// It is purposefully not-inspectable. #[derive(Default)] pub struct ErrorVault(Vec); impl ErrorVault { fn add(&mut self, diagnostic: Diagnostic) { self.0.push(diagnostic); } } /// Helper struct that contains error-handling related state information. /// This is used internally by [`setup_error_handling_struct`]. pub struct ErrorHandlingState(pub Span, pub ErrorVault); impl Default for ErrorHandlingState { fn default() -> Self { Self(Span::dummy(), Default::default()) } } #[macro_export] /// Use this macro in an implementation of `AstVisitorMut` to get automatic spans and error handling. macro_rules! setup_error_handling_impl { () => { fn visit(&mut self, x: &mut T) { $crate::ast::visitors::wrappers::SpanWrapper( &mut $crate::ast::visitors::wrappers::ErrorWrapper(self), ) .visit(x) } }; } pub use setup_error_handling_impl; /// Mark a visitor with a specific diagnostic context. pub trait VisitorWithContext { /// Returns the diagnostic context for this visitor. fn context(&self) -> Context; } impl HasSpan for ErrorWrapper<'_, T> { fn span(&self) -> Span { self.0.span() } fn span_mut(&mut self) -> &mut Span { self.0.span_mut() } } /// A visitor that can throw errors. It should be used in combination with /// `ErrorWrapper`, which will take care of bubbling error up to the nearest /// parent capable of representing errors. For instance, if you error out in /// a literal, the error will be represented in the parent expression or the /// parent type, as nodes [`ExprKind::Error`] or [`TyKind ::Error`]. pub trait VisitorWithErrors: HasSpan + VisitorWithContext { /// Projects the error vault. fn error_vault(&mut self) -> &mut ErrorVault; /// Send an error. fn error(&mut self, node: impl Into, kind: DiagnosticInfoKind) { let context = self.context(); let span = self.span(); self.error_vault().add(Diagnostic::new( node, DiagnosticInfo { context, span, kind, }, )); } } impl<'a, V: VisitorWithErrors> ErrorWrapper<'a, V> { fn error_handled_action< T: FallibleAstNode + Clone + std::fmt::Debug + Into, U, >( &mut self, x: &mut T, action: impl Fn(&mut Self, &mut T) -> U, ) -> U { let diagnostics_snapshot = self.0.error_vault().0.clone(); self.0.error_vault().0.clear(); let result = action(self, x); let diagnostics: Vec<_> = self.0.error_vault().0.drain(..).collect(); if !diagnostics.is_empty() { x.set_error(ErrorNode { fragment: Box::new(x.clone().into()), diagnostics, }); } self.0.error_vault().0 = diagnostics_snapshot; result } } impl<'a, V: AstVisitorMut + VisitorWithErrors> AstVisitorMut for ErrorWrapper<'a, V> { fn visit_inner(&mut self, x: &mut T) where T: AstVisitableInfallible, T: for<'s> DriveMut<'s, AstVisitableInfallibleWrapper>, { x.drive_map(self.0) } fn visit_item(&mut self, x: &mut Item) { self.error_handled_action(x, Self::visit_inner) } fn visit_pat(&mut self, x: &mut Pat) { self.error_handled_action(x, Self::visit_inner) } fn visit_expr(&mut self, x: &mut Expr) { self.error_handled_action(x, Self::visit_inner) } fn visit_ty(&mut self, x: &mut Ty) { self.error_handled_action(x, Self::visit_inner) } } } #[hax_rust_engine_macros::replace(AstNodes => include(VisitableAstNodes))] mod replaced { use super::*; pub mod infallible { use super::*; #[visitable_group( visitor(drive_map( /// An mutable visitor that visits the AST for hax. /// /// ```rust,ignore /// use crate::ast::{diagnostics::*, visitors::*}; /// #[setup_error_handling_struct] /// #[derive(Default)] /// struct MyVisitor; /// /// impl VisitorWithContext for MyVisitor { /// fn context(&self) -> Context { /// Context::Import /// } /// } /// /// impl AstVisitorMut for MyVisitor { /// setup_error_handling_impl!(); /// } /// /// // MyVisitor::visit(my_ast_node) /// ``` &mut AstVisitorMut ), infallible), visitor(drive( /// An immutable visitor that visits the AST for hax. &AstVisitor ), infallible), skip( String, bool, char, hax_frontend_exporter::Span, for crate::interning::Interned, ), drive( for Box, for Option, for Vec, for (A, B), for (A, B, C), usize ), override(AstNodes), override_skip( Span, Fragment, GlobalId, Diagnostic, AttrPayload, ), )] /// Helper trait to drive visitor. pub trait AstVisitable {} } #[allow(missing_docs)] pub mod fallible { use super::*; #[visitable_group( visitor(drive( /// An immutable visitor that can exit early. &AstEarlyExitVisitor )), visitor(drive_mut( /// An immutable visitor that can exit early and mutate the AST fragments. &mut AstEarlyExitVisitorMut )), skip( String, bool, char, hax_frontend_exporter::Span, for crate::interning::Interned, ), drive( for Box, for Option, for Vec, for (A, B), for (A, B, C), usize ), override(AstNodes), override_skip( Span, Fragment, GlobalId, Diagnostic, AttrPayload, ), )] /// Helper trait to drive visitor. pub trait AstVisitable {} } /// This modules provides `dyn` compatible trait for visitors. pub mod dyn_compatible { use super::*; macro_rules! derive_erased_ast_visitors { ({$($attrs:tt)*}, $name: ident, $helper: ident, $($ty:ty),*) => { $($attrs)* pub trait $name<'a>: $($helper<'a, $ty> + )* {} }; } macro_rules! render_path { ($head:ident) => {stringify!($head)}; ($head:ident $(::$tail:ident)*) => { concat!(stringify!($head), "::", render_path!($($tail)::*)) }; } macro_rules! make_dyn_compatible { ($($visitable_trait:ident)::*, $($visitor_trait:ident)::*, $helper_name: ident, $name: ident, mut:{$($mut:tt)?}, super:{$($super:ident)::*}, $ret:ty) => { #[doc = concat!("A [dyn-compatible](https://doc.rust-lang.org/reference/items/traits.html#dyn-compatibility) trait similar to [`", render_path!($($visitor_trait)::*),"`].")] #[doc = concat!("This trait provides one `visit` method to visit a given type `T` with a given visitor.")] pub trait $helper_name<'a, T: ?Sized>: $($super)::* { /// Visit a value with the visitor. fn visit(&mut self, _: &'a $($mut)? T) -> $ret; } impl<'a, T: $($visitable_trait)::*, V: $($visitor_trait)::*> $helper_name<'a, T> for V { fn visit(&mut self, e: &'a $($mut)? T) -> $ret { ::visit(self, e) } } derive_erased_ast_visitors!({ #[doc = concat!("A [dyn-compatible](https://doc.rust-lang.org/reference/items/traits.html#dyn-compatibility) trait similar to [`", render_path!($($visitor_trait)::*),"`].")] #[doc = concat!("This trait is empty, but it implies a super bound for every type in the AST, so that you can use [`", stringify!($helper_name), "::visit", "`] with the entire AST.")] }, $name, $helper_name, AstNodes); impl<'a, V: $($visitor_trait)::*> $name<'a> for V {} }; } make_dyn_compatible!( infallible::AstVisitable, infallible::AstVisitorMut, AstVisitableMut, AstVisitorMut, mut:{mut}, super:{}, () ); make_dyn_compatible!( infallible::AstVisitable, infallible::AstVisitor, AstVisitable, AstVisitor, mut:{}, super:{}, () ); make_dyn_compatible!( fallible::AstVisitable, fallible::AstEarlyExitVisitorMut, AstEarlyExitVisitableMut, AstEarlyExitVisitorMut, mut:{mut}, super:{Visitor}, ControlFlow<::Break> ); make_dyn_compatible!( fallible::AstVisitable, fallible::AstEarlyExitVisitor, AstEarlyExitVisitable, AstEarlyExitVisitor, mut:{}, super:{Visitor}, ControlFlow<::Break> ); } } pub use replaced::dyn_compatible; use replaced::{fallible, infallible}; pub use fallible::{ AstEarlyExitVisitor, AstEarlyExitVisitorMut, AstVisitable as AstVisitableFallible, AstVisitableWrapper, }; pub use hax_rust_engine_macros::setup_error_handling_struct; pub use infallible::{ AstVisitable as AstVisitableInfallible, AstVisitableInfallibleWrapper, AstVisitor, AstVisitorMut, }; pub use wrappers::{VisitorWithContext, VisitorWithErrors, setup_error_handling_impl}; #[test] fn double_literals_in_ast() { use crate::ast::diagnostics::*; #[setup_error_handling_struct] #[derive(Default)] struct DoubleU8Literals; impl VisitorWithContext for DoubleU8Literals { fn context(&self) -> Context { Context::Import } } impl AstVisitorMut for DoubleU8Literals { setup_error_handling_impl!(); fn visit_literal(&mut self, x: &mut Literal) { let Literal::Int { value, .. } = x else { return; }; let Ok(n): Result = str::parse(value) else { return self.error( x.clone(), DiagnosticInfoKind::AssertionFailure { details: "Bad literal".into(), }, ); }; let n = (n as u16) * 2; if n >= u8::MAX as u16 { return self.error( x.clone(), DiagnosticInfoKind::AssertionFailure { details: "Literal too big".into(), }, ); } *value = Symbol::new(&format!("{}", n)); } } // Syntax helpers let int_kind = IntKind { size: IntSize::S8, signedness: Signedness::Signed, }; let mk_lit = |n: isize| Literal::Int { value: Symbol::new(&format!("{}", n)), negative: false, kind: int_kind.clone(), }; let meta = Metadata { span: Span::dummy(), attributes: vec![], }; let mk_lit_expr = |n| Expr { kind: Box::new(ExprKind::Literal(mk_lit(n))), ty: Ty(Box::new(TyKind::Primitive(PrimitiveTy::Int( int_kind.clone(), )))), meta: meta.clone(), }; let mk_array = |exprs| Expr { kind: Box::new(ExprKind::Array(exprs)), ty: Ty(Box::new(TyKind::RawPointer)), // wrong type, but this is not important for this test. meta: meta.clone(), }; let mut lit_expr_200 = mk_lit_expr(200); // Creates the expression `[50u8, 100u8, 200u8]`: the last one cannot be doubled, and will cause an error. let mut e = mk_array(vec![ mk_lit_expr(50), mk_lit_expr(100), lit_expr_200.clone(), ]); // Visit the expression. DoubleU8Literals::default().visit(&mut e); // Transform `lit_expr_200` into the error `DoubleU8Literal` should produce lit_expr_200.set_error(ErrorNode { fragment: Box::new(lit_expr_200.clone().into()), diagnostics: vec![Diagnostic::new( mk_lit(200), DiagnosticInfo { span: lit_expr_200.span(), context: Context::Import, kind: DiagnosticInfoKind::AssertionFailure { details: "Literal too big".into(), }, }, )], }); // Check that the visitor works as expected assert_eq!( e, mk_array(vec![mk_lit_expr(100), mk_lit_expr(200), lit_expr_200]) ); } ================================================ FILE: rust-engine/src/ast.rs ================================================ //! The core abstract syntax tree (AST) representation for hax. //! //! This module defines the primary data structures used to represent //! typed syntax. //! //! The design of this AST is designed under the following constraints: //! 1. Valid (cargo check) pretty-printed Rust can be produced out of it. //! 2. The Rust THIR AST from the frontend can be imported into this AST. //! 3. The AST defined in the OCaml engine can be imported into this AST. //! 4. This AST can be exported to the OCaml engine. //! 5. This AST should be suitable for AST transformations. pub mod diagnostics; pub mod fragment; pub mod identifiers; pub mod literals; pub mod resugared; pub mod span; pub mod utils; pub mod visitors; use crate::{ast::diagnostics::Context, symbol::Symbol}; use diagnostics::Diagnostic; use fragment::Fragment; use hax_rust_engine_macros::*; pub use identifiers::*; use literals::*; use resugared::*; use span::Span; /// Represents a generic value used in type applications (e.g., `T` in `Vec`). #[derive_group_for_ast] pub enum GenericValue { /// A type-level generic value. /// /// # Example: /// `i32` in `Vec` Ty(Ty), /// A const-level generic value. /// /// # Example: /// `12` in `Foo<12>` Expr(Expr), /// A lifetime. /// /// # Example: /// `'a` in `foo<'a>` Lifetime, } /// Built-in primitive types. #[derive_group_for_ast] pub enum PrimitiveTy { /// The `bool` type. Bool, /// An integer type (e.g., `i32`, `u8`). Int(IntKind), /// A float type (e.g. `f32`) Float(FloatKind), /// The `char` type Char, /// The `str` type Str, } /// Represent a Rust lifetime region. #[derive_group_for_ast] pub struct Region; /// A indirection for the representation of types. #[derive_group_for_ast] pub struct Ty(pub(crate) Box); impl Ty { /// The type `bool` pub fn bool() -> Self { Self(Box::new(TyKind::Primitive(PrimitiveTy::Bool))) } /// The type `int` pub fn int(size: IntSize, signedness: Signedness) -> Self { Self(Box::new(TyKind::Primitive(PrimitiveTy::Int(IntKind { size, signedness, })))) } /// The `int` check pub fn is_int(&self) -> bool { let Self(b) = self; matches!( &**b, TyKind::Primitive(PrimitiveTy::Int(IntKind { size: _, signedness: _, })) ) } /// The (hax) type `Prop` pub fn prop() -> Self { Self(Box::new(TyKind::App { head: crate::names::hax_lib::prop::Prop, args: vec![], })) } } /// Describes any Rust type (e.g., `i32`, `Vec`, `fn(i32) -> bool`). #[derive_group_for_ast] pub enum TyKind { /// A primitive type. /// /// # Example: /// `i32`, `bool` Primitive(PrimitiveTy), /// A type application (generic type). /// /// # Example: /// `Vec` App { /// The type being applied (`Vec` in the example). head: GlobalId, /// The arguments (`[i32]` in the example). args: Vec, }, /// A function or closure type. /// /// # Example: /// `fn(i32) -> bool` or `Fn(i32) -> bool` Arrow { /// `i32` in the example inputs: Vec, /// `bool` in the example output: Ty, }, // TODO: Should we keep this type? /// A reference type. /// /// # Example: /// `&i32`, `&mut i32` Ref { /// The type inside the reference inner: Ty, /// Is the reference mutable? mutable: bool, /// The region of this reference region: Region, }, /// A parameter type Param(LocalId), // TODO: Should we keep this type? /// A slice type. /// /// # Example: /// `&[i32]` Slice(Ty), /// An array type. /// /// # Example: /// `&[i32; 10]` Array { /// The type of the items of the array ty: Ty, /// The length of the array length: Box, }, /// A raw pointer type RawPointer, /// An associated type /// /// # Example: /// ```rust,ignore /// fn f() -> T::A {...} /// ``` AssociatedType { /// Impl expr for `Tr` in the example impl_: ImplExpr, /// `Tr::A` in the example item: GlobalId, }, /// An opaque type /// /// # Example: /// ```rust,ignore /// type Foo = impl Bar; /// ``` Opaque(GlobalId), /// A `dyn` type /// /// # Example: /// ```rust,ignore /// dyn Tr /// ``` Dyn(Vec), /// A resugared type. /// This variant is introduced before printing only. /// Phases must not produce this variant. Resugared(ResugaredTyKind), /// Fallback constructor to carry errors. Error(ErrorNode), } #[derive_group_for_ast] /// Represent a node of the AST where an error occurred. pub struct ErrorNode { /// The node from the AST at the time something failed pub fragment: Box, /// The error(s) encountered. pub diagnostics: Vec, } impl ErrorNode { /// Creates an assertion failure out of an AST fragment and a message. pub fn assertion_failure( fragment: impl Into + HasMetadata, context: Context, message: impl Into, ) -> Self { let span = fragment.span(); let fragment = fragment.into(); ErrorNode { diagnostics: vec![Diagnostic::new( fragment.clone(), diagnostics::DiagnosticInfo { context, span, kind: hax_types::diagnostics::Kind::AssertionFailure { details: message.into(), }, }, )], fragment: Box::new(fragment), } } } /// A `dyn` trait. The generic arguments are known but the actual type /// implementing the trait is known dynamically. /// /// # Example: /// ```rust,ignore /// dyn Tr /// ``` #[derive_group_for_ast] pub struct DynTraitGoal { /// `Tr` in the example above pub trait_: GlobalId, /// `A, B` in the example above pub non_self_args: Vec, } /// Extra information attached to syntax nodes. #[derive_group_for_ast] pub struct Metadata { /// The location in the source code. pub span: Span, /// Rust attributes. pub attributes: Attributes, // TODO: add phase/desugar informations } /// A typed expression with metadata. #[derive_group_for_ast] pub struct Expr { /// The kind of expression. pub kind: Box, /// The type of this expression. pub ty: Ty, /// Source span and attributes. pub meta: Metadata, } /// A typed pattern with metadata. #[derive_group_for_ast] pub struct Pat { /// The kind of pattern. pub kind: Box, /// The type of this pattern. pub ty: Ty, /// Source span and attributes. pub meta: Metadata, } /// A pattern matching arm with metadata. #[derive_group_for_ast] pub struct Arm { /// The pattern of the arm. pub pat: Pat, /// The body of the arm. pub body: Expr, /// The optional guard of the arm. pub guard: Option, /// Source span and attributes. pub meta: Metadata, } /// A pattern matching arm guard with metadata. #[derive_group_for_ast] pub struct Guard { /// The kind of guard. pub kind: GuardKind, /// Source span and attributes. pub meta: Metadata, } /// Represents different levels of borrowing. #[derive_group_for_ast] pub enum BorrowKind { /// Shared reference /// /// # Example: /// `&x` Shared, /// Unique reference: this is internal to rustc Unique, /// Mutable reference /// /// # Example: /// `&mut x` Mut, } /// Binding modes used in patterns. #[derive_group_for_ast] pub enum BindingMode { /// Binding by value /// /// # Example: /// `x` ByValue, /// Binding by reference /// /// # Example: /// `ref x`, `ref mut x` ByRef(BorrowKind), } /// Represents the various kinds of patterns. #[derive_group_for_ast] pub enum PatKind { /// Wildcard pattern /// /// # Example: /// `_` Wild, /// An ascription pattern /// /// # Example: /// `p : ty` Ascription { /// The inner pattern (`p` in the example) pat: Pat, /// The (spanned) type ascription (`ty` in the example) ty: SpannedTy, }, /// An or pattern /// /// # Example: /// `p | q` /// Always contains at least 2 sub-patterns Or { /// A vector of sub-patterns sub_pats: Vec, }, /// An array pattern /// /// # Example: /// `[p, q]` Array { /// A vector of patterns args: Vec, }, /// A dereference pattern /// /// # Example: /// `&p` Deref { /// The inner pattern sub_pat: Pat, }, /// A constant pattern /// /// # Example: /// `1` Constant { /// The literal lit: Literal, }, /// A variable binding. /// /// # Examples: /// - `x` → `mutable: false` /// - `mut x` → `mutable: true` /// - `ref x` → `mode: ByRef(Shared)` Binding { /// Is the binding mutable? E.g. `x` is not mutable, `mut x` is. mutable: bool, /// The variable introduced by the binding pattern. var: LocalId, /// The binding mode, e.g. [`BindingMode::Shared`] for `ref x`. mode: BindingMode, /// The sub-pattern, if any. /// For example, this is `Some(inner_pat)` for the pattern `variable @ inner_pat`. sub_pat: Option, }, /// A constructor pattern /// /// # Example: /// ```rust,ignore /// Foo(x) /// ``` Construct { /// The identifier of the constructor we are matching constructor: GlobalId, /// Are we constructing a record? E.g. a struct or a variant with named fields. is_record: bool, /// Is this a struct? (meaning, *not* a variant from an enum) is_struct: bool, /// A list of fields. fields: Vec<(GlobalId, Pat)>, }, /// A resugared pattern. /// This variant is introduced before printing only. /// Phases must not produce this variant. Resugared(ResugaredPatKind), /// Fallback constructor to carry errors. Error(ErrorNode), } /// Represents the various kinds of pattern guards. #[derive_group_for_ast] pub enum GuardKind { /// An `if let` guard. /// /// # Example: /// ```rust,ignore /// match x { /// Some(value) if let Some(x) = f(value) => x, /// _ => ..., /// } /// ``` IfLet { /// The left-hand side of the guard. `Some(x)` in the example. lhs: Pat, /// The right-hand side of the guard. `f(value)` in the example. rhs: Expr, }, } // TODO: Replace by places, or just expressions /// The left-hand side of an assignment. #[derive_group_for_ast] #[allow(missing_docs)] pub enum Lhs { LocalVar { var: LocalId, ty: Ty, }, VecRef { e: Box, ty: Ty, }, ArbitraryExpr(Box), FieldAccessor { e: Box, ty: Ty, field: GlobalId, }, ArrayAccessor { e: Box, ty: Ty, index: Expr, }, } /// An `ImplExpr` describes the full data of a trait implementation. Because of /// generics, this may need to combine several concrete trait implementation /// items. For example, `((1u8, 2u8), "hello").clone()` combines the generic /// implementation of `Clone` for `(A, B)` with the concrete implementations for /// `u8` and `&str`, represented as a tree. #[derive_group_for_ast] pub struct ImplExpr { /// The impl. expression itself. pub kind: Box, /// The trait being implemented. pub goal: TraitGoal, } /// Represents all the kinds of impl expr. /// /// # Example: /// In the snippet below, the `clone` method on `x` corresponds to the implementation /// of `Clone` derived for `Vec` (`ImplApp`) given the `LocalBound` on `T`. /// ```rust,ignore /// fn f(x: Vec) -> Vec { /// x.clone() /// } /// ``` #[derive_group_for_ast] pub enum ImplExprKind { /// The trait implementation being defined. /// /// # Example: /// The impl expr for `Type: Trait` used in `self.f()` is `Self_`. /// ```rust,ignore /// impl Trait for Type { /// fn f(&self) {...} /// fn g(&self) {self.f()} /// } /// ``` Self_, /// A concrete `impl` block. /// /// # Example /// ```rust,ignore /// impl Clone for Type { // Consider this `impl` is called `impl0` /// ... /// } /// fn f(x: Type) { /// x.clone() // Here `clone` comes from `Concrete(impl0)` /// } /// ``` Concrete(TraitGoal), /// A bound introduced by a generic clause. /// /// # Example: /// ```rust,ignore /// fn f(x: T) -> T { /// x.clone() // Here the method comes from the bound `T: Clone` /// } /// ``` LocalBound { /// Local identifier to a bound. id: Symbol, }, /// A parent implementation. /// /// # Example: /// ```rust,ignore /// trait SubTrait: Clone {} /// fn f(x: T) -> T { /// x.clone() // Here the method comes from the parent of the bound `T: SubTrait` /// } /// ``` Parent { /// Parent implementation impl_: ImplExpr, /// Which implementation to pick in the parent ident: ImplIdent, }, /// A projected associated implementation. /// /// # Example: /// In this snippet, `T::Item` is an `AssociatedType` where the subsequent `ImplExpr` /// is a type projection of `ITerator`. /// ```rust,ignore /// fn f(x: T) -> Option { /// x.next() /// } /// ``` Projection { /// The base implementation from which we project impl_: ImplExpr, /// The item in the trait implemented by `impl_` item: GlobalId, /// Which implementation to pick on the item ident: ImplIdent, }, /// An instantiation of a generic implementation. /// /// # Example: /// ```rust,ignore /// fn f(x: Vec) -> Vec { /// x.clone() // The `Clone` implementation for `Vec` is instantiated with the local bound `T: Clone` /// } /// ``` ImplApp { /// The head of the application impl_: ImplExpr, /// The arguments of the application args: Vec, }, /// The implementation provided by a dyn. Dyn, /// A trait implemented natively by rust. Builtin(TraitGoal), /// Fallback constructor to carry errors. Error(ErrorNode), } /// Represents an impl item (associated type or function) /// /// # Example: /// ```rust,ignore /// impl ... { /// fn assoc_fn(...) {...} /// } /// ``` #[derive_group_for_ast] pub struct ImplItem { /// Metadata (span and attributes) for the impl item. pub meta: Metadata, /// Generics for this associated item. `T` in the example. pub generics: Generics, /// The associated item itself. pub kind: ImplItemKind, /// The unique identifier for this associated item. pub ident: GlobalId, } /// Represents the kinds of impl items #[derive_group_for_ast] pub enum ImplItemKind { /// An instantiation of associated type /// /// # Example: /// The associated type `Error` in the following example. /// ```rust,ignore /// impl TryInto for ... { /// type Error = u8; /// } /// ``` Type { /// The type expression, `u8` in the example. ty: Ty, /// The parent bounds. In the example, there are none (in the definition /// of `TryInto`, there is no `Error: Something` in the associated type /// definition). parent_bounds: Vec<(ImplExpr, ImplIdent)>, }, /// A definition for a trait function /// /// # Example: /// The associated function `into` in the following example. /// ```rust,ignore /// impl Into for T { /// fn into(&self) -> T {...} /// } /// ``` Fn { /// The body of the associated function (`...` in the example) body: Expr, /// The list of the argument for the associated function (`&self` in the example). params: Vec, }, /// A resugared impl item. /// This variant is introduced before printing only. /// Phases must not produce this variant. Resugared(ResugaredImplItemKind), /// Fallback constructor to carry errors. Error(ErrorNode), } /// Represents a trait item (associated type, fn, or default) #[derive_group_for_ast] pub struct TraitItem { /// Source span and attributes. pub meta: Metadata, /// The kind of trait item we are dealing with (an associated type or function). pub kind: TraitItemKind, /// The generics this associated item carries. /// /// # Example: /// The generics `` on `f`, **not** ``. /// ```rust,ignore /// trait ... { /// fn f(){} /// } /// ``` pub generics: Generics, /// The identifier of the associateed item. pub ident: GlobalId, } /// Represents the kinds of trait items #[derive_group_for_ast] pub enum TraitItemKind { /// An associated type Type(Vec), /// An associated function Fn(Ty), /// An associated function with a default body. /// A arrow type (like what is given in `TraitItemKind::Ty`) can be /// reconstructed using the types of the parameters and of the body. /// /// # Example: /// ```rust,ignore /// impl ... { /// fn f(x: u8) -> u8 { x + 2 } /// } /// ``` Default { /// The parameters of the associated function (`[x: u8]` in the example). params: Vec, /// The default body of the associated function (`x + 2` in the example). body: Expr, }, /// A resugared trait item. /// This variant is introduced before printing only. /// Phases must not produce this variant. Resugared(ResugaredTraitItemKind), /// Fallback constructor to carry errors. Error(ErrorNode), } /// A QuoteContent is a component of a quote: it can be a verbatim string, a Rust expression to embed in the quote, a pattern etc. /// /// # Example: /// ```rust,ignore /// fstar!("f ${x + 3} + 10") /// ``` /// results in `[Verbatim("f"), Expr([[x + 3]]), Verbatim(" + 10")]` #[derive_group_for_ast] pub enum QuoteContent { /// A verbatim chunk of backend code. Verbatim(String), /// A Rust expression to inject in the quote. Expr(Expr), /// A Rust pattern to inject in the quote. Pattern(Pat), /// A Rust type to inject in the quote. Ty(Ty), } /// Represents an inlined piece of backend code #[derive_group_for_ast] pub struct Quote(pub Vec); /// The origin of a quote item. #[derive_group_for_ast] pub struct ItemQuoteOrigin { /// From which kind of item this quote was placed on? pub item_kind: ItemQuoteOriginKind, /// From what item this quote was placed on? pub item_ident: GlobalId, /// What was the position of the quote? pub position: ItemQuoteOriginPosition, } /// The kind of a quote item's origin #[derive_group_for_ast] pub enum ItemQuoteOriginKind { /// A function Fn, /// A type alias TyAlias, /// A type definition (`enum`, `union`, `struct`) Type, /// A macro invocation /// TODO: drop MacroInvocation, /// A trait definition Trait, /// An `impl` block Impl, /// An alias Alias, /// A `use` Use, /// A quote Quote, /// An error HaxError, /// Something unknown NotImplementedYet, } /// The position of a quote item relative to its origin #[derive_group_for_ast] pub enum ItemQuoteOriginPosition { /// The quote was placed before an item Before, /// The quote was placed after an item After, /// The quote replaces an item Replace, } /// The kind of a loop (resugared by respective `Reconstruct...Loops` phases). /// Useful for `FunctionalizeLoops`. #[derive_group_for_ast] pub enum LoopKind { /// An unconditional loop. /// /// # Example: /// `loop { ... }` UnconditionalLoop, /// A while loop. /// /// # Example: /// ```rust,ignore /// while(condition) { ... } /// ``` WhileLoop { /// The boolean condition condition: Expr, }, /// A for loop. /// /// # Example: /// ```rust,ignore /// for i in iterator { ... } /// ``` ForLoop { /// The pattern of the for loop (`i` in the example). pat: Pat, /// The iterator we're looping on (`iterator` in the example). iterator: Expr, }, /// A specialized for loop on a range. /// /// # Example: /// ```rust,ignore /// for i in start..end { /// ... /// } /// ``` ForIndexLoop { /// Where the range begins (`start` in the example). start: Expr, /// Where the range ends (`end` in the example). end: Expr, /// The binding used for the iteration. var: LocalId, /// The type of the binding `var`. var_ty: Ty, }, } /// This is a marker to describe what control flow is present in a loop. /// It is added by phase `DropReturnBreakContinue` and the information is used in /// `FunctionalizeLoops`. We need it to replace the control flow nodes of the AST /// by an encoding in the `ControlFlow` enum. #[derive_group_for_ast] pub enum ControlFlowKind { /// Contains no `return`, maybe some `break`s BreakOnly, /// Contains both at least one `return` and maybe some `break`s BreakOrReturn, } /// Represent explicit mutation context for a loop. /// This is useful to make loops pure. #[derive_group_for_ast] pub struct LoopState { /// The initial state of the loop. pub init: Expr, /// The pattern that destructures the state of the loop. pub body_pat: Pat, } // TODO: Kill some nodes (e.g. `Array`)? /// Describes the shape of an expression. #[derive_group_for_ast] pub enum ExprKind { /// If expression. /// /// # Example: /// `if x > 0 { 1 } else { 2 }` If { /// The boolean condition (`x > 0` in the example). condition: Expr, /// The then branch (`1` in the example). then: Expr, /// An optional else branch (`Some(2)`in the example). else_: Option, }, /// Function application. /// /// # Example: /// `f(x, y)` App { /// The head of the function application (or, which function do we apply?). head: Expr, /// The arguments applied to the function. args: Vec, /// The generic arguments applied to the function. generic_args: Vec, /// If the function requires generic bounds to be called, `bounds_impls` /// is a vector of impl. expressions for those bounds. bounds_impls: Vec, /// If we apply an associated function, contains the impl. expr used. trait_: Option<(ImplExpr, Vec)>, }, /// A literal value. /// /// # Example: /// `42`, `"hello"` Literal(Literal), /// An array literal. /// /// # Example: /// `[1, 2, 3]` Array(Vec), /// A constructor application /// /// # Example: /// ```rust,ignore /// MyEnum::MyVariant { x : 1, ...base } /// `````` Construct { /// The identifier of the constructor we are building (`MyEnum::MyVariant` in the example). constructor: GlobalId, /// Are we constructing a record? E.g. a struct or a variant with named fields. (`true` in the example) is_record: bool, /// Is this a struct? Neaning, *not* a variant from an enum. (`false` in the example) is_struct: bool, /// A list of fields (`[(x, 1)]` in the example). fields: Vec<(GlobalId, Expr)>, /// The base expression, if any. (`Some(base)` in the example) base: Option, }, /// A `match`` expression. /// /// # Example: /// ```rust,ignore /// match x { /// pat1 => expr1, /// pat2 => expr2, /// } /// ``` Match { /// The expression on which we are matching. (`x` in the example) scrutinee: Expr, /// The arms of the match. (`pat1 => expr1` and `pat2 => expr2` in the example) arms: Vec, }, /// A reference expression. /// /// # Examples: /// - `&x` → `mutable: false` /// - `&mut x` → `mutable: true` Borrow { /// Is the borrow mutable? mutable: bool, /// The expression we are borrowing inner: Expr, }, /// Raw borrow /// /// # Example: /// `*const u8` AddressOf { /// Is the raw pointer mutable? mutable: bool, /// The expression on which we take a pointer inner: Expr, }, /// A `let` expression used in expressions. /// /// # Example: /// `let x = 1; x + 1` Let { /// The left-hand side of the `let` expression. (`x` in the example) lhs: Pat, /// The right-hand side of the `let` expression. (`1` in the example) rhs: Expr, /// The body of the `let`. (`x + 1` in the example) body: Expr, }, /// A global identifier. /// /// # Example: /// `std::mem::drop` GlobalId(GlobalId), /// A local variable. /// /// # Example: /// `x` LocalId(LocalId), /// Type ascription Ascription { /// The expression being ascribed. e: Expr, /// The type ty: Ty, }, /// Variable mutation /// /// # Example: /// `x = 1` Assign { /// the left-hand side (place) of the assign lhs: Lhs, /// The value we are assigning value: Expr, }, /// Loop /// /// # Example: /// `'label: loop { body }` Loop { /// The body of the loop. body: Expr, /// The kind of loop (e.g. `while`, `loop`, `for`...). kind: Box, /// An optional loop state, that makes explicit the state mutated by the /// loop. state: Option, /// What kind of control flow is performed by this loop? control_flow: Option, /// Optional loop label. label: Option, }, /// The `break` exppression, that breaks out of a loop. /// /// # Example: /// `break 'label 3` Break { /// The value we break with. By default, this is `()`. /// /// # Example: /// ```rust,ignore /// loop { break 3; } + 3 /// ``` value: Expr, /// What loop shall we break? By default, the parent enclosing loop. label: Option, /// When a loop has a state (see [`ExprKind::Loop::state`]), this field /// `state` is `Some(_)`. This carries the updated state for the loop. state: Option, }, /// Return from a function. /// /// # Example: /// `return 1` Return { /// The expression we return (`1` in the example). value: Expr, }, /// Continue (go to next loop iteration) /// /// # Example: /// `continue 'label` Continue { /// The loop we continue. label: Option, /// When a loop has a state (see [`ExprKind::Loop::state`]), this field /// `state` is `Some(_)`. This carries the updated state for the loop. state: Option, }, /// Closure (anonymous function) /// /// # Example: /// `|x| x` Closure { /// The parameters of the closure params: Vec, /// The body of the closure body: Expr, /// The captured expressions captures: Vec, }, /// Block of safe or unsafe expression /// /// # Example: /// `unsafe { ... }` Block { /// The body of the block. body: Expr, /// The safety of the block. safety_mode: SafetyKind, }, /// A quote is an inlined piece of backend code. Quote { /// The contents of the quote. contents: Quote, }, /// A resugared expression. /// This variant is introduced before printing only. /// Phases must not produce this variant. Resugared(ResugaredExprKind), /// Fallback constructor to carry errors. Error(ErrorNode), } /// Represents the kinds of generic parameters #[derive_group_for_ast] pub enum GenericParamKind { /// A generic lifetime Lifetime, /// A generic type Type, /// A generic constant Const { /// The type of the generic constant ty: Ty, }, } /// Represents an instantiated trait that needs to be implemented. /// /// # Example: /// A bound `_: std::ops::Add` #[derive_group_for_ast] pub struct TraitGoal { /// `std::ops::Add` in the example. pub trait_: GlobalId, /// `[u8]` in the example. pub args: Vec, } /// Represents a trait bound in a generic constraint #[derive_group_for_ast] pub struct ImplIdent { /// The trait goal of this impl identifier pub goal: TraitGoal, /// The name itself pub name: Symbol, } /// A projection predicate expresses a constraint over an associated type: /// ```rust,ignore /// fn f>(...) /// ``` /// In this example `Foo` has an associated type `S`. #[derive_group_for_ast] pub struct ProjectionPredicate { /// The impl expression we project from pub impl_: ImplExpr, /// The associated type being projected pub assoc_item: GlobalId, /// The equality constraint on the associated type pub ty: Ty, } /// A generic constraint (lifetime, type-class or equality) #[derive_group_for_ast] pub enum GenericConstraint { /// A lifetime Lifetime(String), // TODO: Remove `String` /// A type-class constraint (e.g. `T: Foo`) TypeClass(ImplIdent), /// An equality constraint on an associated type (e.g. `T::Assoc = u8`) Equality(ProjectionPredicate), } /// A generic parameter (lifetime, type parameter or const parameter) #[derive_group_for_ast] pub struct GenericParam { /// The local identifier for the generic parameter pub ident: LocalId, /// Metadata (span and attributes) for the generic parameter. pub meta: Metadata, /// The kind of generic parameter. pub kind: GenericParamKind, } /// Generic parameters and constraints (contained between `<>` in function declarations) #[derive_group_for_ast] pub struct Generics { /// A vector of generic parameters. pub params: Vec, /// A vector of generic constraints. pub constraints: Vec, } /// Safety level of a function. #[derive_group_for_ast] pub enum SafetyKind { /// Safe function (default). Safe, /// Unsafe function. Unsafe, } /// Represents a single attribute. #[derive_group_for_ast] pub struct Attribute { /// The kind of attribute (a comment, a tool attribute?). pub kind: AttributeKind, /// The span of the attribute. pub span: Span, } /// Represents the kind of an attribute. #[derive_group_for_ast] pub enum AttributeKind { /// A tool attribute `#[path(tokens)]` Tool { /// The path to the tool path: String, /// The payload tokens: String, }, /// A doc comment DocComment { /// What kind of comment? (single lines, block) kind: DocCommentKind, /// The contents of the comment body: String, }, /// Hax attribute Hax(hax_lib_macros_types::AttrPayload), } /// Represents the kind of a doc comment. #[derive_group_for_ast] pub enum DocCommentKind { /// Single line comment (`//...`) Line, /// Block comment (`/*...*/`) Block, } /// A list of attributes. pub type Attributes = Vec; /// A type with its associated span. #[derive_group_for_ast] pub struct SpannedTy { /// The span of the type pub span: Span, /// The type itself pub ty: Ty, } /// A function or closure parameter. /// /// # Example: /// ```rust,ignore /// (mut x, y): (T, u8) /// ``` #[derive_group_for_ast] pub struct Param { /// The pattern part (left-hand side) of a parameter (`(mut x, y)` in the example). pub pat: Pat, /// The type part (right-rand side) of a parameter (`(T, u8)` in the example). pub ty: Ty, /// The span of the type part (if available). pub ty_span: Option, /// Optionally, some attributes present on the parameter. pub attributes: Attributes, } /// A variant of an enum or struct. /// In our representation structs always have one variant with an argument for each field. #[derive_group_for_ast] pub struct Variant { /// Name of the variant pub name: GlobalId, /// Fields of this variant (named or anonymous) pub arguments: Vec<(GlobalId, Ty, Attributes)>, /// True if fields are named pub is_record: bool, // TODO Missing span /// Attributes of the variant pub attributes: Attributes, } /// A top-level item in the module. #[derive_group_for_ast] pub enum ItemKind { /// A function or constant item. /// /// # Example: /// ```rust,ignore /// fn add(x: i32, y: i32) -> i32 { /// x + y /// } /// ``` /// Constants are represented as functions of arity zero, while functions always have a non-zero arity. Fn { /// The identifier of the function. /// /// # Example: /// `add` name: GlobalId, /// The generic arguments and constraints of the function. /// /// # Example: /// the generic type `T` and the constraint `T: Clone` generics: Generics, /// The body of the function /// /// # Example: /// `x + y` body: Expr, /// The parameters of the function. /// /// # Example: /// `x: i32, y: i32` params: Vec, /// The safety of the function. safety: SafetyKind, }, /// A type alias. /// /// # Example: /// ```rust,ignore /// type A = u8; /// ``` TyAlias { /// Name of the alias /// /// # Example: /// `A` name: GlobalId, /// Generic arguments and constraints generics: Generics, /// Original type /// /// # Example: /// `u8` ty: Ty, }, /// A type definition (struct or enum) /// /// # Example: /// ```rust,ignore /// enum A {B, C} /// struct S {f: u8} /// ``` Type { /// Name of this type /// /// # Example: /// `A`, `S` name: GlobalId, /// Generic parameters and constraints generics: Generics, /// Variants /// /// # Example: /// `{B, C}` variants: Vec, /// Is this a struct (or an enum) is_struct: bool, }, /// A trait definition. /// /// # Example: /// ```rust,ignore /// trait T { /// type Assoc; /// fn m(x: Self::Assoc, y: Self) -> A; /// } /// ``` Trait { /// Name of this trait /// /// # Example: /// `T` name: GlobalId, /// Generic parameters and constraints /// /// # Example: /// `` generics: Generics, /// Items required to implement the trait /// /// # Example: /// `type Assoc;`, `fn m ...;` items: Vec, /// Safe or unsafe safety: SafetyKind, }, /// A trait implementation. /// /// # Example: /// ```rust,ignore /// impl T for u16 { /// type Assoc = u32; /// fn m(x: u32, y: u16) -> u8 { /// (x as u8) + (y as u8) /// } /// } /// ``` Impl { /// Generic arguments and constraints generics: Generics, /// The type we implement the trait for /// /// # Example: /// `u16` self_ty: Ty, /// Instantiated trait that is being implemented /// /// # Example: /// `T` of_trait: (GlobalId, Vec), /// Items in this impl /// /// # Example: /// `fn m ...`, `type Assoc ...` items: Vec, /// Implementations of traits required for this impl parent_bounds: Vec<(ImplExpr, ImplIdent)>, }, /// Internal node introduced by phases, corresponds to an alias to any item. Alias { /// New name name: GlobalId, /// Original name item: GlobalId, }, // TODO: Should we keep `Use`? /// A `use` statement Use { /// Path to used item(s) path: Vec, /// Comes from external crate is_external: bool, /// Optional `as` rename: Option, }, /// A `Quote` node is inserted by phase TransformHaxLibInline to deal with some `hax_lib` features. /// For example insertion of verbatim backend code. Quote { /// Content of the quote quote: Quote, /// Description of the quote target position origin: ItemQuoteOrigin, }, /// A Rust module (`mod`, inline or not). /// This exists solely because modules can have attributes relevant to the hax engine. RustModule, /// Fallback constructor to carry errors. Error(ErrorNode), /// A resugared item. /// This variant is introduced before printing only. /// Phases must not produce this variant. Resugared(ResugaredItemKind), /// Item that is not implemented yet NotImplementedYet, } /// A top-level item with metadata. #[derive_group_for_ast] pub struct Item { /// The global identifier of the item. pub ident: GlobalId, /// The kind of the item. pub kind: ItemKind, /// Source span and attributes. pub meta: Metadata, } impl Item { /// Checks whether the item was marked opaque using `hax_lib::opaque` pub fn is_opaque(&self) -> bool { self.meta.attributes.iter().any(|a| { matches!( a.kind, AttributeKind::Hax(hax_lib_macros_types::AttrPayload::Erased) ) }) } } /// A "flat" module: this contains only non-module items. #[derive_group_for_ast] pub struct Module { /// The global identifier of the module. pub ident: GlobalId, /// The list of items that belongs to this module. pub items: Vec, /// Source span and attributes. pub meta: Metadata, } impl Generics { /// Returns Iterator over all type-class constraints (`GenericConstraint::TypeClass`) pub fn type_class_constraints(&self) -> impl Iterator { self.constraints.iter().filter_map(|c| match c { GenericConstraint::TypeClass(impl_id) => Some(impl_id), _ => None, }) } /// Returns Iterator over all equality constraints (`GenericConstraint::Equality`) pub fn equality_constraints(&self) -> impl Iterator { self.constraints.iter().filter_map(|c| match c { GenericConstraint::Equality(pp) => Some(pp), _ => None, }) } } /// Traits for utilities on AST data types pub mod traits { use super::*; /// Marks AST data types that carry metadata (span + attributes) pub trait HasMetadata { /// Get metadata fn metadata(&self) -> &Metadata; /// Get mutable borrow on metadata fn metadata_mut(&mut self) -> &mut Metadata; } /// Marks AST data types that carry a span pub trait HasSpan { /// Get span fn span(&self) -> Span; /// Mutable borrow on the span fn span_mut(&mut self) -> &mut Span; } /// Marks AST data types that carry a Type pub trait Typed { /// Get type fn ty(&self) -> &Ty; } impl HasSpan for T { fn span(&self) -> Span { self.metadata().span } fn span_mut(&mut self) -> &mut Span { &mut self.metadata_mut().span } } /// Marks types of the AST that carry a kind (an enum for the actual content) pub trait HasKind { /// Type carrying the kind, should be named `Kind` type Kind; /// Get kind fn kind(&self) -> &Self::Kind; /// Get mutable borrow on kind fn kind_mut(&mut self) -> &mut Self::Kind; } macro_rules! derive_has_metadata { ($($ty:ty),*) => { $(impl HasMetadata for $ty { fn metadata(&self) -> &Metadata { &self.meta } fn metadata_mut(&mut self) -> &mut Metadata { &mut self.meta } })* }; } macro_rules! derive_has_kind { ($($ty:ty => $kind:ty),*) => { $(impl HasKind for $ty { type Kind = $kind; fn kind(&self) -> &Self::Kind { &self.kind } fn kind_mut(&mut self) -> &mut Self::Kind { &mut self.kind } })* }; } derive_has_metadata!( Item, Expr, Pat, Guard, Arm, ImplItem, TraitItem, GenericParam ); derive_has_kind!( Item => ItemKind, Expr => ExprKind, Pat => PatKind, Guard => GuardKind, GenericParam => GenericParamKind, ImplItem => ImplItemKind, TraitItem => TraitItemKind, ImplExpr => ImplExprKind ); impl HasSpan for Attribute { fn span(&self) -> Span { self.span } fn span_mut(&mut self) -> &mut Span { &mut self.span } } impl Typed for Expr { fn ty(&self) -> &Ty { &self.ty } } impl Typed for Pat { fn ty(&self) -> &Ty { &self.ty } } impl Typed for SpannedTy { fn ty(&self) -> &Ty { &self.ty } } impl HasSpan for SpannedTy { fn span(&self) -> Span { self.span } fn span_mut(&mut self) -> &mut Span { &mut self.span } } impl ExprKind { /// Convert to full `Expr` with type, span and attributes pub fn into_expr(self, span: Span, ty: Ty, attributes: Vec) -> Expr { Expr { kind: Box::new(self), ty, meta: Metadata { span, attributes }, } } } /// Manual implementation of HasKind as the Ty struct contains a Box /// instead of a TyKind directly. impl HasKind for Ty { type Kind = TyKind; fn kind(&self) -> &Self::Kind { &self.0 } fn kind_mut(&mut self) -> &mut Self::Kind { &mut self.0 } } /// Fragments of the AST on which we can store an `ErrorNode`. pub trait FallibleAstNode { /// Replace the current node with an error. fn set_error(&mut self, error_node: ErrorNode); /// Extract an error if any. fn get_error(&self) -> Option<&ErrorNode>; } macro_rules! derive_error_node { ($($ty:ident => $kind:ident),*) => {$( impl FallibleAstNode for $ty { fn set_error(&mut self, mut error_node: ErrorNode) { if let Some(base) = self.get_error().cloned() { error_node.diagnostics.extend_from_slice(&base.diagnostics); } *self.kind_mut() = $kind::Error(error_node) } fn get_error(&self) -> Option<&ErrorNode> { match &self.kind() { $kind::Error(error_node) => Some(error_node), _ => None, } } } )*}; } derive_error_node!(Item => ItemKind, Pat => PatKind, Expr => ExprKind, Ty => TyKind); } pub use traits::*; ================================================ FILE: rust-engine/src/attributes.rs ================================================ //! Work with hax attributes. use std::collections::HashMap; use hax_lib_macros_types::{AssociationRole, AttrPayload, ItemUid, ProofMethod}; use crate::ast::diagnostics::{Context, DiagnosticInfo, DiagnosticInfoKind}; use super::ast::*; use visitors::AstVisitorMut; /// A graph of items connected via the hax attribute [`AttrPayload::AssociatedItem`] and UUIDs. #[derive(Clone)] pub struct LinkedItemGraph { items: HashMap, context: Context, } impl Default for LinkedItemGraph { fn default() -> Self { Self { items: Default::default(), context: Context::Unknown, } } } /// Get an iterator over hax attributes contained in the given attributes. pub fn hax_attributes(attrs: &Attributes) -> impl Iterator { attrs.iter().flat_map(|attr| match &attr.kind { AttributeKind::Hax(attr_payload) => Some(attr_payload), _ => None, }) } /// Get proof attributes attached to the item pub fn hax_proof_attributes(item: &Item) -> Result { let mut proofs = hax_attributes(&item.meta.attributes).flat_map(|attr| match attr { AttrPayload::Proof(proof) => Some(proof.clone()), _ => None, }); let proof = proofs.next(); if proofs.next().is_some() { return Err("At most one `proof` attribute per item is allowed.".into()); } let mut pure_requires_proofs = hax_attributes(&item.meta.attributes).flat_map(|attr| match attr { AttrPayload::PureRequiresProof(proof) => Some(proof.clone()), _ => None, }); let pure_requires_proof = pure_requires_proofs.next(); if pure_requires_proofs.next().is_some() { return Err("At most one `pure_requires_proof` attribute per item is allowed.".into()); } let mut pure_ensures_proofs = hax_attributes(&item.meta.attributes).flat_map(|attr| match attr { AttrPayload::PureEnsuresProof(proof) => Some(proof.clone()), _ => None, }); let pure_ensures_proof = pure_ensures_proofs.next(); if pure_ensures_proofs.next().is_some() { return Err("At most one `pure_ensures_proof` attribute per item is allowed.".into()); } let mut proof_methods = hax_attributes(&item.meta.attributes).flat_map(|attr| match attr { AttrPayload::ProofMethod(method) => Some(*method), _ => None, }); let proof_method = proof_methods.next(); if proof_methods.next().is_some() { return Err("At most one `proof_method` attribute per item is allowed.".into()); } Ok(ProofAttributes { proof, pure_requires_proof, pure_ensures_proof, proof_method, }) } fn uuid(context: Context, item: &Item) -> Option { let mut uuids = hax_attributes(&item.meta.attributes).flat_map(|attr| match attr { AttrPayload::Uid(item_uid) => Some(item_uid), _ => None, }); let uuid = uuids.next()?; if let Some(other) = uuids.next() { emit_assertion_failure( context, item.span(), format!( "Found more than one UUID hax attribute on this item. The two first UUIDs are {uuid} and {other}." ), ); None } else { Some(uuid.clone()) } } fn emit_assertion_failure(context: Context, span: span::Span, message: impl Into) { DiagnosticInfo { context, span, kind: DiagnosticInfoKind::AssertionFailure { details: message.into(), }, } .emit(); } impl std::fmt::Debug for LinkedItemGraph { fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { f.debug_struct("LinkedItemGraph") .field( "items", &self .items .iter() .map(|(id, item)| (id.to_string(), item.ident.to_debug_string())) .collect::>(), ) .field("context", &self.context) .finish() } } impl LinkedItemGraph { /// Clone items marked with UUIDs attributes to build a graph of linked items. /// This graph clones the items that represent linked items: e.g. pre and post conditions. pub fn new(items: &[Item], context: Context) -> Self { Self { items: HashMap::from_iter( items .iter() .filter_map(|item| Some((uuid(context.clone(), item)?, item.clone()))), ), context, } } fn emit_assertion_failure(&self, span: span::Span, message: impl Into) { emit_assertion_failure(self.context.clone(), span, message) } fn emit_unimplemented(&self, span: span::Span, issue_id: u32, message: impl Into) { DiagnosticInfo { context: self.context.clone(), span, kind: DiagnosticInfoKind::Unimplemented { issue_id: Some(issue_id), details: Some(message.into()), }, } .emit(); } /// Given a graph and an item `item`, returns an iterator of the various items that are linked with `item`. pub fn linked_items_iter( &self, item: &impl HasMetadata, ) -> impl Iterator)> { let item_attributes = &item.metadata().attributes; hax_attributes(item_attributes).flat_map(move |attr| match attr { AttrPayload::AssociatedItem { role, item: target } => { let target = self.items.get(target).map(Ok).unwrap_or_else(|| { Err(DiagnosticInfo { context: self.context.clone(), span: item.span(), kind: DiagnosticInfoKind::AssertionFailure { details: format!("An item linked via hax attributes could not be found. The UUID is {target:?}. The graph is {:#?}.", self), }, }) }); Some((*role, target)) } _ => None, }) } /// Returns the items linked to a given item. pub fn linked_items( &self, item: &impl HasMetadata, ) -> HashMap>> { let mut map: HashMap> = HashMap::new(); for (role, item) in self.linked_items_iter(item) { map.entry(role).or_default().push(item); } map } /// Returns the precondition, postcondition and decreases clause, if any, for a given item. /// When operating on a linked function, `self_id` is the local identifier of `self`. pub fn fn_like_linked_expressions( &self, item: &impl HasMetadata, self_id: Option, ) -> FnLikeAssocatedExpressions { let assoc_items = self.linked_items(item); let get = |role| { assoc_items .get(&role) .iter() .flat_map(|vec| vec.iter()) .flat_map(|item| match item { Ok(item) => Some(item), Err(err) => { err.emit(); None } }) .map(|item| extract_expr(&self.context, item, self_id.clone())) .collect::>() }; let precondition = { let mut preconditions = get(AssociationRole::Requires).into_iter(); preconditions.next().map(|(e, _)| { for extra in preconditions { self.emit_unimplemented(extra.0.span(), 1270, "multiple pre-conditions"); } e }) }; let decreases = { let mut decreases = get(AssociationRole::Decreases).into_iter(); decreases.next().map(|(e, _)| { for extra in decreases { self.emit_unimplemented(extra.0.span(), 1270, "multiple decreases"); } e }) }; let postcondition = { let mut postconditions = get(AssociationRole::Ensures).into_iter(); postconditions.next().and_then(|(e, params)| { for extra in postconditions { self.emit_unimplemented(extra.0.span(), 1270, "multiple post-conditions"); } if let Some(last_param) = params.last() { Some(Postcondition { result_binder: last_param.pat.clone(), body: e.clone(), }) } else { self.emit_assertion_failure( e.span(), "hax ensures attribute: could not find output binder", ); None } }) }; FnLikeAssocatedExpressions { decreases, precondition, postcondition, } } /// Is there a specification that we should prove for this item? pub fn has_spec(&self, item: &Item) -> bool { let spec = self.fn_like_linked_expressions(item, item.self_id()); spec.precondition.is_some() || spec.postcondition.is_some() } } fn extract_expr<'a>( context: &Context, item: &'a Item, self_id: Option, ) -> (Expr, Vec<&'a Param>) { let ItemKind::Fn { body, params, .. } = item.kind() else { return ( ExprKind::Error(ErrorNode::assertion_failure( item.clone(), context.clone(), "Expected an function", )) .into_expr(item.span(), Ty::prop(), vec![]), vec![], ); }; let mut body = body.clone(); if let Some(self_id) = self_id && let [maybe_self, ..] = params.as_slice() && let PatKind::Binding { var, sub_pat: None, .. } = &*maybe_self.pat.kind { // Here, we expect `self_id` is `self`, thus we cannot have any shadowing. utils::mappers::SubstLocalIds::one(var.clone(), self_id.clone()).visit(&mut body) } (body, params.iter().collect()) } /// A postcondition. /// /// ## Example /// The expression `result != x` in the following is a postcondition. /// Note that `result` is an extra binder that represent the result of `f`, whose type is `u8` in this case: the return type of `f`. /// /// ```rust /// #[hax_lib::ensures(|result| result != x)] /// fn f(x: u8) -> u8 { x.wrapping_add(1) } /// ``` pub struct Postcondition { /// In the example, this is `|result|`. pub result_binder: Pat, /// The formula of the postcondition, `result != x` in the example. pub body: Expr, } /// The various linked expressions one can usually find on a (linked or not) function. pub struct FnLikeAssocatedExpressions { /// A decreases clause, see [`hax_lib::decreases`] pub decreases: Option, /// A precondition, see [`hax_lib::requires`] pub precondition: Option, /// A postcondition, see [`hax_lib::ensures`] pub postcondition: Option, } /// The various linked expressions one can usually find on a (linked or not) function. pub struct ProofAttributes { /// A custom proof, see [`hax_lib::lean::proof`] pub proof: Option, /// A proof that the precondition is pure, see [`hax_lib::lean::pure_requires_proof`] pub pure_requires_proof: Option, /// A proof that the postcondition is pure, see [`hax_lib::lean::pure_ensures_proof`] pub pure_ensures_proof: Option, /// A proof method, see [`hax_lib::lean::proof_method`] pub proof_method: Option, } ================================================ FILE: rust-engine/src/backends/fstar.rs ================================================ //! The F* backend. The F* printer is still implemented in Ocaml but the phase driver uses this infrastructure /// The F* backend pub struct FStarBackend; impl super::Backend for FStarBackend { // TODO Replace by an empty printer // This is a dummy value. The fstar backend's printer is implemented in OCaml type Printer = super::lean::LeanPrinter; fn module_path(&self, _module: &super::Module) -> camino::Utf8PathBuf { todo!("The fstar backend's printer is implemented in OCaml") } fn phases(&self) -> Vec { use crate::phase::legacy::LegacyOCamlPhase::*; vec![ RejectRawOrMutPointer.into(), RewriteLocalSelf.into(), TransformHaxLibInline.into(), Specialize.into(), DropSizedTrait.into(), SimplifyQuestionMarks.into(), AndMutDefsite.into(), ReconstructAsserts.into(), ReconstructForLoops.into(), ReconstructWhileLoops.into(), DirectAndMut.into(), RejectArbitraryLhs.into(), DropBlocks.into(), DropMatchGuards.into(), DropReferences.into(), ExplicitConversions.into(), TrivializeAssignLhs.into(), HoistSideEffects.into(), HoistDisjunctivePatterns.into(), SimplifyMatchReturn.into(), LocalMutation.into(), RewriteControlFlow.into(), DropReturnBreakContinue.into(), FunctionalizeLoops.into(), RejectQuestionMark.into(), RejectAsPattern.into(), TraitsSpecs.into(), SimplifyHoisting.into(), NewtypeAsRefinement.into(), RejectTraitItemDefault.into(), BundleCycles.into(), ReorderFields.into(), SortItems.into(), ] } } ================================================ FILE: rust-engine/src/backends/lean.rs ================================================ //! The Lean backend //! //! This module defines the trait implementations to export the rust ast to //! Pretty::Doc type, which can in turn be exported to string (or, eventually, //! source maps). use std::collections::HashSet; use std::sync::OnceLock; use super::prelude::*; use crate::{ ast::{ identifiers::global_id::view::{ConstructorKind, PathSegment, TypeDefKind}, span::Span, }, attributes::hax_proof_attributes, names::rust_primitives::hax::{ cast_op, explicit_monadic::{lift, pure}, }, phase::*, }; use camino::Utf8PathBuf; use hax_lib_macros_types::ProofMethod; use hax_types::engine_api::File; mod binops { pub use crate::names::core::cmp::PartialEq; pub use crate::names::core::ops::bit::*; pub use crate::names::core::ops::index::*; pub use crate::names::rust_primitives::arithmetic::neg; pub use crate::names::rust_primitives::hax::machine_int::*; pub use crate::names::rust_primitives::hax::{logical_op_and, logical_op_or}; } const LIFT: GlobalId = lift; const PURE: GlobalId = pure; const CAST_OP: GlobalId = cast_op; /// The Lean printer #[setup_printer_struct] #[derive(Default, Clone)] pub struct LeanPrinter { current_namespace: Option, } const INDENT: isize = 2; const HEADER: &str = " -- Experimental lean backend for Hax -- The Hax prelude library can be found in hax/proof-libs/lean import Hax import Std.Tactic.Do import Std.Do.Triple import Std.Tactic.Do.Syntax open Std.Do open Std.Tactic set_option mvcgen.warning false set_option linter.unusedVariables false "; impl RenderView for LeanPrinter { fn reserved_keywords() -> &'static HashSet { static SET: OnceLock> = OnceLock::new(); SET.get_or_init(|| { [ // reserved for Lean: "end", "def", "abbrev", "theorem", "example", "inductive", "structure", "from", // reserved for hax encoding: "associatedTypes", "AssociatedTypes", ] .into_iter() .map(|s| s.to_string()) .collect() }) } fn should_escape(id: &str) -> bool { Self::is_reserved_keyword(id) || id.starts_with(|c: char| c.is_ascii_digit()) || id.starts_with("trait_constr_") } fn separator(&self) -> &str { "." } fn relativize_module_path<'a>(&self, module_path: &'a [PathSegment]) -> &'a [PathSegment] { if let Some(namespace) = self.current_namespace && namespace.view().segments() == module_path { &[] } else { module_path } } fn render_path_segment(&self, chunk: &PathSegment) -> Vec { // Returning None indicates that the default rendering should be used (match chunk.kind() { AnyKind::Constructor(ConstructorKind::Constructor { ty }) if matches!(ty.kind(), TypeDefKind::Struct) => { Some(vec![ Self::escape(&self.render_path_segment_payload(chunk.payload())), "mk".to_string(), ]) } AnyKind::Field { named: _, parent } => match parent.kind() { ConstructorKind::Constructor { ty } if matches!(&ty.kind(), TypeDefKind::Struct) => { chunk.parent().map(|parent| { vec![ Self::escape(&self.render_path_segment_payload(parent.payload())), Self::escape(&self.render_path_segment_payload(chunk.payload())), ] }) } _ => None, }, _ => None, }) .unwrap_or(default::render_path_segment(self, chunk)) } } impl Printer for LeanPrinter {} /// The Lean backend pub struct LeanBackend; impl Backend for LeanBackend { type Printer = LeanPrinter; fn module_path(&self, module: &Module) -> Utf8PathBuf { let krate = module.ident.krate(); Utf8PathBuf::from(krate).with_extension("lean") } fn phases(&self) -> Vec { use crate::phase::{PhaseKind::*, legacy::LegacyOCamlPhase::*}; vec![ RejectRawOrMutPointer.into(), RejectImplTypeMethod.into(), RewriteLocalSelf.into(), TransformHaxLibInline.into(), Specialize.into(), DropSizedTrait.into(), SimplifyQuestionMarks.into(), AndMutDefsite.into(), ReconstructAsserts.into(), ReconstructForLoops.into(), ReconstructWhileLoops.into(), DirectAndMut.into(), RejectArbitraryLhs.into(), DropBlocks.into(), DropMatchGuards.into(), DropReferences.into(), TrivializeAssignLhs.into(), HoistSideEffects.into(), HoistDisjunctivePatterns.into(), SimplifyMatchReturn.into(), LocalMutation.into(), RewriteControlFlow.into(), DropReturnBreakContinue.into(), FunctionalizeLoops.into(), RejectQuestionMark.into(), TraitsSpecs.into(), SimplifyHoisting.into(), NewtypeAsRefinement.into(), ReorderFields.into(), SortItems.into(), FilterUnprintableItems, ExplicitMonadic, ] } fn resugaring_phases() -> Vec> { vec![ Box::new(RecursiveFunctions), Box::new(FunctionsToConstants), Box::new(LetPure), Box::new(RecordEllipsis), ] } fn items_to_module(&self, items: Vec) -> Vec { let mut modules: Vec = Vec::new(); for item in items { let module_ident = item.ident.mod_only_closest_parent(); if let Some(last_module) = modules.last_mut() && last_module.ident == module_ident { last_module.items.push(item); } else { modules.push(Module { ident: module_ident, items: vec![item], meta: Metadata { span: Span::dummy(), attributes: vec![], }, }); } } modules } fn modules_to_files(&self, modules: Vec, mut printer: Self::Printer) -> Vec { if modules.is_empty() { return vec![]; } let path = self.module_path(modules.first().unwrap()).to_string(); let contents = modules .into_iter() .map(|module: Module| { let (c, _) = printer.print(module); c }) .collect::>() .join("\n"); vec![File { path, contents: format!("{}{}", HEADER, contents), sourcemap: None, }] } } impl LeanPrinter { /// Checks if we are extracting core models to be able to use different namespeacing when /// referring to core. pub fn is_hax_core_models_extraction_mode(&self) -> bool { std::env::var("HAX_CORE_MODELS_EXTRACTION_MODE") .map(|v| v == "on") .unwrap_or(false) } /// Render a global id using the Rendering strategy of the Lean printer. Works for both concrete /// and projector ids. TODO: https://github.com/cryspen/hax/issues/1660 pub fn render_id(&self, id: &GlobalId) -> String { let id = if !self.is_hax_core_models_extraction_mode() && id.krate() == "core" { id.rename_krate("core_models") } else { *id }; self.render_string(&id.view()) } /// Renders the last, most local part of an id. Used for named arguments of constructors. pub fn render_last(&self, id: &GlobalId) -> String { self.render(&id.view()) .path .last() // TODO: Should be ensured by the rendering engine; see // https://github.com/cryspen/hax/issues/1660 .expect("Segments should always be non-empty") .clone() } /// Inject an identifier in before-last position while rendering /// TODO: use `DefIdInner::kind` for this instead (https://github.com/cryspen/hax/issues/1877) pub fn render_with_injection(&self, id: &GlobalId, injection: &String) -> String { let rendered = self.render(&id.view()); let (last, butlast) = rendered .path .split_last() // TODO: Should be ensured by the rendering engine; see // https://github.com/cryspen/hax/issues/1660 .expect("Segments should always be non-empty"); let path: Vec = butlast .iter() .chain(std::iter::once(injection)) .chain(std::iter::once(last)) .map(String::clone) .collect(); self.rendered_to_string(Rendered { module: rendered.module, path, }) } /// Escape a string for use in Lean string literals. /// Handles newlines, quotes, backslashes, and other special characters. fn escape_string(&self, s: &str) -> String { let mut result = String::with_capacity(s.len()); for c in s.chars() { match c { '"' => result.push_str("\\\""), '\'' => result.push_str("\\'"), '\\' => result.push_str("\\\\"), '\n' => result.push_str("\\n"), '\r' => result.push_str("\\r"), '\t' => result.push_str("\\t"), c if c.is_ascii_control() => { result.push_str(&format!("\\x{:02x}", c as u8)); } c => result.push(c), } } result } } /// Render parameters, adding a line after each parameter impl ToDocument for Vec { fn to_document(&self, printer: &LeanPrinter) -> DocBuilder { printer.params(self) } } #[prepend_associated_functions_with(install_pretty_helpers!(self: Self))] const _: () = { // Emits a CLI error with a github issue number, and prints "sorry" in the lean output macro_rules! emit_error {($($tt:tt)*) => {disambiguated_todo!($($tt)*)};} // Insert a new line in a doc (pretty) macro_rules! line {($($tt:tt)*) => {disambiguated_line!($($tt)*)};} // Concatenate docs (pretty ) macro_rules! concat {($($tt:tt)*) => {disambiguated_concat!($($tt)*)};} // Given an iterable `[A,B, ... , C]` and a separator `S`, create the doc `ASBS...CS` macro_rules! zip_right { ($a:expr, $sep:expr) => { docs![concat!($a.into_iter().map(|a| docs![a, $sep]))] }; } // Given an iterable `[A,B, ... , C]` and a separator `S`, create the doc `SASB...SC` macro_rules! zip_left { ($sep:expr, $a:expr) => { docs![concat!($a.into_iter().map(|a| docs![$sep, a]))] }; } // Prints a one-line comment macro_rules! comment { ($e:expr) => { docs!["-- ", $e] }; } // Extra methods, specific to the LeanPrinter impl LeanPrinter { /// Prints arguments a variant or constructor of struct, using named or unamed arguments based /// on the `is_record` flag. Used for both expressions and patterns pub fn arguments( &self, fields: &[(GlobalId, D)], is_record: &bool, ) -> DocBuilder where D: ToDocument, { if *is_record { self.named_arguments(fields) } else { self.positional_arguments(fields) } } /// Prints fields of structures (when in braced notation) fn struct_fields(&self, fields: &[(GlobalId, D)]) -> DocBuilder where D: ToDocument, { docs![intersperse!( fields .iter() .map(|(id, e)| { docs![self.render_last(id), reflow!(" := "), e].group() }), docs![",", line!()] )] .group() } /// Prints named arguments (record) of a variant or constructor of struct fn named_arguments(&self, fields: &[(GlobalId, D)]) -> DocBuilder where D: ToDocument, { docs![zip_left!( line!(), fields.iter().map(|(id, e)| { docs![self.render_last(id), reflow!(" := "), e] .parens() .group() }) )] .group() } /// Prints positional arguments (tuple) of a variant or constructor of struct fn positional_arguments( &self, fields: &[(GlobalId, D)], ) -> DocBuilder where D: ToDocument, { docs![zip_left!(line!(), fields.iter().map(|(_, e)| e))].group() } /// Prints parameters of functions (items, trait items, impl items) fn params(&self, params: &Vec) -> DocBuilder { zip_left!(line!(), params) } /// Print parameters as function arguments fn params_as_args(&self, params: &[Param]) -> DocBuilder { zip_left!( line!(), params.iter().map(|param| { let Ty(ty_kind) = ¶m.ty; // We need to print arguments of type `Tuple0` as `⟨⟩` instead of `_` // https://github.com/cryspen/hax/issues/1856 if let TyKind::App { head, .. } = **ty_kind && let Some(global_id::TupleId::Type { length: 0 }) = head.expect_tuple() { docs!["⟨⟩"] } else { docs![param] } }) ) } /// Renders expressions with an explicit ascription `(e : RustM ty)`. Used for the body of closure, for /// numeric literals, etc. fn expr_typed_result(&self, expr: &Expr) -> DocBuilder { docs![ expr, softline!(), ":", line!(), docs!["RustM", line!(), &expr.ty].group() ] .group() } fn pat_typed(&self, pat: &Pat) -> DocBuilder { docs![pat, reflow!(" :"), line!(), &pat.ty].parens().group() } fn do_block>(&self, body: D) -> DocBuilder { docs!["do", line!(), body].group() } /// Produces a name for a constraint on an trait-level constraint, or an associated /// type. The name is obtained by combining the type it applies to and the name of the /// constraint (and should be unique) fn constraint_name(&self, type_name: &String, constraint: &ImplIdent) -> String { format!("trait_constr_{}_{}", type_name, constraint.name) } /// Renders a named argument for associated types with equality constraints /// (aka projections). If there are no equality constraints, returns None. fn associated_type_projections( &self, impl_ident: &ImplIdent, projections: Vec>, ) -> Option> { (!projections.is_empty()).then_some( docs![ "(associatedTypes := {", line!(), docs![ "show", line!(), impl_ident.goal.trait_, ".AssociatedTypes", zip_left!(line!(), impl_ident.goal.args.iter()), ] .group() .nest(INDENT), line!(), reflow!("by infer_instance"), line!(), docs![ "with", line!(), intersperse!(projections, docs![",", line!()]), ] .group() .nest(INDENT), "})" ] .group() .nest(INDENT), ) } /// Turns an expression of type `RustM T` into one of type `T` (out of the monad), providing /// reflexivity as a proof witness. fn monad_extract(&self, expr: &Expr) -> DocBuilder { if let ExprKind::App { head, args, .. } = expr.kind() && let ExprKind::GlobalId(PURE) = head.kind() && let [pure_expr] = &args[..] && let ExprKind::Literal(_) | ExprKind::GlobalId(_) | ExprKind::LocalId(_) = pure_expr.kind() { // Pure values are displayed directly. Note that constructors, while pure, may // contain sub-expressions that are not, so they must be wrapped in a do-block docs![pure_expr] } else { // All other expressions are wrapped in a do-block, and extracted out of the monad docs![ "RustM.of_isOk", line!(), self.do_block(expr).parens(), line!(), "(by rfl)" ] .group() .nest(INDENT) } } /// Print trait items, adding trait-level params as extra arguments fn trait_item_with_trait_params( &self, trait_generics: &[GenericParam], TraitItem { meta: _, kind, generics: item_generics, ident, }: &TraitItem, ) -> DocBuilder { { let name = self.render_last(ident); let trait_generics = zip_left!( softline!(), trait_generics .iter() .map(|GenericParam { ident, .. }| docs![ident].parens()) ); docs![match kind { TraitItemKind::Fn(ty) => { docs![ name, trait_generics, self.generics(item_generics, &self.render_last(ident)), softline!(), ":", line!(), ty ] .group() .nest(INDENT) } TraitItemKind::Type(_) => { docs![name, softline!(), ":", line!(), "Type"] .group() .nest(INDENT) } TraitItemKind::Default { params, body } => docs![ docs![ name, trait_generics, self.generics(item_generics, &self.render_last(ident)), zip_left!(line!(), params).group(), softline!(), ":", if params.is_empty() { docs![body.ty, softline!(), reflow!(":=")] } else { docs!["RustM", softline!(), body.ty, softline!(), reflow!(":= do")] .group() } ] .group(), line!(), if params.is_empty() { self.monad_extract(body) } else { docs![body] }, ] .group() .nest(INDENT), TraitItemKind::Resugared(_) => { unreachable!("This backend has no resugaring for trait items") } TraitItemKind::Error(e) => docs![e], }] } } // Print generics, using `name` as a prefix for constraint names fn generics( &self, generics: &Generics, name: &String, ) -> DocBuilder { docs![ zip_left!(line!(), &generics.params), zip_left!( line!(), generics.type_class_constraints().map(|impl_ident| { let projections = generics .equality_constraints() .filter(|p| !matches!(&*p.impl_.kind, ImplExprKind::LocalBound { id } if *id != impl_ident.name )) .map(|p| { if let ImplExprKind::LocalBound { .. } = &*p.impl_.kind { docs![p] } else if let ImplExprKind::Parent { .. } = &*p.impl_.kind { emit_error!(issue 1923, "Unsupported equality constraints on associated types of parent trait") } else { emit_error!(issue 1924, "Unsupported variant of associated type projection") } }) .collect::>(); docs![ docs![ self.constraint_name(&format!("{}_associated_type", name), impl_ident), reflow!(" : "), impl_ident.goal.trait_, ".AssociatedTypes", concat!( impl_ident.goal.args.iter().map(|arg| docs![line!(), arg]) ) ] .brackets() .group() .nest(INDENT), line!(), docs![ self.constraint_name(name, impl_ident), reflow!(" : "), impl_ident.goal.trait_, concat!( impl_ident.goal.args.iter().map(|arg| docs![line!(), arg]) ), line!(), self.associated_type_projections(impl_ident, projections) ] .brackets() .nest(INDENT) .group() ] .group() }) ), ] .group() } /// Print spec of an item fn spec( &self, item: &Item, name: &GlobalId, generics: &Generics, params: &Vec, ) -> DocBuilder { let linked_items = HasLinkedItemGraph::linked_item_graph(self); let spec = linked_items.fn_like_linked_expressions(item, item.self_id()); if !linked_items.has_spec(item) { nil!() } else { match hax_proof_attributes(item) { Err(message) => emit_error!("{message}"), Ok(proof_attributes) => { let (tactic, specset) = match proof_attributes.proof_method { Some(ProofMethod::Grind) => ("grind", "int"), Some(ProofMethod::BvDecide) | None => ("bv_decide", "bv"), }; let pure_requires_proof = proof_attributes .pure_requires_proof .unwrap_or(format!("by hax_construct_pure <;> {tactic}")); let pure_ensures_proof = proof_attributes .pure_ensures_proof .unwrap_or(format!("by hax_construct_pure <;> {tactic}")); let proof = proof_attributes.proof.map(|s| docs![s]).unwrap_or(docs![ "by hax_mvcgen [", name, "] <;> ", tactic ]); { docs![ hardline!(), hardline!(), docs!["set_option hax_mvcgen.specset \"", specset, "\" in"], hardline!(), "@[hax_spec]", hardline!(), docs![ docs![ "def", line!(), name, ".spec", self.generics(generics, &self.render_last(name)), params, softline!(), ":" ] .group() .nest(INDENT), line!(), docs![ "Spec", line!(), docs![ "requires", softline!(), ":= do", line!(), spec.precondition .map_or(reflow!("pure True"), |p| docs![p]) ] .parens() .group() .nest(INDENT), line!(), docs![ "ensures := ", spec.postcondition.map_or( reflow!("fun _ => pure True"), |p| docs![ "fun", line!(), p.result_binder, softline!(), "=> do", line!(), p.body, ] .group() .nest(INDENT) ), ] .parens() .group() .nest(INDENT), line!(), docs![ name, zip_left!(line!(), &generics.params), self.params_as_args(params) ] .parens() .group() .nest(INDENT) ] .group() .nest(INDENT), softline!(), ":=", ] .group() .nest(2 * INDENT), softline!(), docs![ hardline!(), docs!["pureRequires :=", softline!(), pure_requires_proof], hardline!(), docs!["pureEnsures :=", softline!(), pure_ensures_proof], hardline!(), docs!["contract :=", softline!(), proof] .group() .nest(INDENT), hardline!(), ] .nest(INDENT) .braces(), ] } } } } } } impl ToDocument for (Vec, &TraitItem) { fn to_document(&self, printer: &LeanPrinter) -> DocBuilder { printer.trait_item_with_trait_params(&self.0, self.1) } } impl PrettyAst for LeanPrinter { const NAME: &'static str = "Lean"; /// Produce a non-panicking placeholder document. In general, prefer the use of the helper macro [`todo_document!`]. fn todo_document(&self, message: &str, issue_id: Option) -> DocBuilder { >::emit_diagnostic( self, hax_types::diagnostics::Kind::Unimplemented { issue_id, details: Some(message.into()), }, ); text!("sorry") } fn module(&self, module: &Module) -> DocBuilder { let current_namespace = module.ident; let new_printer = LeanPrinter { current_namespace: Some(current_namespace), ..self.clone() }; let items = &module.items; docs![ "namespace ", current_namespace, hardline!(), hardline!(), intersperse!( items.iter().map(|item| { item.to_document(&new_printer) }), docs![hardline!(), hardline!()] ), hardline!(), hardline!(), "end ", current_namespace, hardline!(), hardline!(), ] } fn global_id(&self, global_id: &GlobalId) -> DocBuilder { docs![self.render_id(global_id)] } fn generics(&self, generics: &Generics) -> DocBuilder { self.generics(generics, &String::new()) } fn generic_constraint(&self, _: &GenericConstraint) -> DocBuilder { unreachable!( "Generic constraints are rendered inline because they must contain associated type projections." ) } fn generic_param(&self, generic_param: &GenericParam) -> DocBuilder { match generic_param.kind() { GenericParamKind::Type => docs![&generic_param.ident, reflow!(" : Type")] .parens() .group(), GenericParamKind::Lifetime => unreachable_by_invariant!(Drop_references), GenericParamKind::Const { ty } => docs![&generic_param.ident, reflow!(" : "), ty] .parens() .group(), } } fn generic_value(&self, generic_value: &GenericValue) -> DocBuilder { match generic_value { GenericValue::Ty(ty) => docs![ty], GenericValue::Expr(expr) => docs![expr].parens(), GenericValue::Lifetime => unreachable_by_invariant!(Drop_references), } } fn expr(&self, Expr { kind, ty, meta: _ }: &Expr) -> DocBuilder { match &**kind { ExprKind::If { condition, then, else_, } => { if let Some(else_branch) = else_ { docs![ docs!["if", line!(), condition, reflow!(" then do")].group(), docs![line!(), then].nest(INDENT), line!(), reflow!("else do"), docs![line!(), else_branch].nest(INDENT) ] .group() } else { unreachable_by_invariant!(Local_mutation) } } ExprKind::App { head, args, generic_args, bounds_impls: _, trait_, } => { match (&args[..], &generic_args[..], head.kind()) { ([arg], [], ExprKind::GlobalId(LIFT)) => docs![reflow!("← "), arg].parens(), ([arg], [], ExprKind::GlobalId(PURE)) => { docs![reflow!("pure "), arg].parens() } ([arg], [], ExprKind::GlobalId(CAST_OP)) => docs![ // Add type annotation for `cast_op`: docs![head, line!(), arg], softline!(), ":", line!(), "RustM", softline!(), ty ] .parens() .group() .nest(INDENT), // TODO: Replace this match pattern with an `if let` guard when the feature stabilizes // Tracking PR: https://github.com/rust-lang/rust/pull/141295 ( [arg], [], ExprKind::GlobalId(op @ (binops::neg | binops::not | binops::Not::not)), ) if arg.ty == Ty::bool() || arg.ty.is_int() => { let symbol = match *op { binops::neg => "-?", binops::not => "~?", binops::Not::not => "!?", _ => unreachable!(), }; docs![symbol, softline!(), arg].parens() } ([lhs, rhs], [], ExprKind::GlobalId(binops::Index::index)) => { docs![lhs, "[", line_!(), rhs, line_!(), "]_?"] .nest(INDENT) .group() } // TODO: Replace this match pattern with an `if let` guard when the feature stabilizes // Tracking PR: https://github.com/rust-lang/rust/pull/141295 ( [lhs, rhs], [], ExprKind::GlobalId( op @ (binops::add | binops::sub | binops::mul | binops::div | binops::rem | binops::shr | binops::shl | binops::bitand | binops::BitAnd::bitand | binops::bitor | binops::BitOr::bitor | binops::bitxor | binops::BitXor::bitxor | binops::logical_op_and | binops::logical_op_or | binops::eq | binops::PartialEq::eq | binops::lt | binops::le | binops::gt | binops::ge | binops::ne | binops::PartialEq::ne), ), ) if (lhs.ty == Ty::bool() && rhs.ty == Ty::bool()) || (rhs.ty.is_int() && lhs.ty.is_int()) => { let symbol = match *op { binops::add => "+?", binops::sub => "-?", binops::mul => "*?", binops::div => "/?", binops::rem => "%?", binops::shr => ">>>?", binops::shl => "<< "&&&?", binops::BitAnd::bitand => "&&?", binops::bitor => "|||?", binops::BitOr::bitor => "||?", binops::bitxor => "^^^?", binops::BitXor::bitxor => "^^?", binops::logical_op_and => "&&?", binops::logical_op_or => "||?", binops::eq => "==?", binops::PartialEq::eq => "==?", binops::lt => " "<=?", binops::gt => ">?", binops::ge => ">=?", binops::ne => "!=?", binops::PartialEq::ne => "!=?", _ => unreachable!(), }; docs![lhs, line!(), docs![symbol, softline!(), rhs].group()] .group() .nest(INDENT) .parens() } _ => { // Fallback for any application docs![ head, trait_ .as_ref() .map(|(impl_expr, _)| zip_left!(line!(), &impl_expr.goal.args)), zip_left!(line!(), generic_args).group(), zip_left!(line!(), args).group(), ] .parens() .nest(INDENT) .group() } } } ExprKind::Literal(numeric_lit @ (Literal::Float { .. } | Literal::Int { .. })) => { docs![numeric_lit, reflow!(" : "), ty].parens().group() } ExprKind::Literal(literal) => docs![literal], ExprKind::Array(exprs) => docs![ "RustArray.ofVec #v[", intersperse!(exprs, docs![",", line!()]) .nest(INDENT) .group() .align(), "]" ] .parens() .group(), ExprKind::Construct { constructor, is_record, is_struct, fields, base, } => { if fields.is_empty() && base.is_none() { docs![constructor] } else if let Some(base) = base { if !(*is_record && *is_struct) { unreachable!( "Constructors with base expressions are necessarily structs with record-like arguments" ) } docs![base, line!(), reflow!("with "), self.struct_fields(fields)] .braces() .group() } else { docs![constructor, self.arguments(fields, is_record)] .nest(INDENT) .parens() .group() } } ExprKind::Let { lhs, rhs, body } | ExprKind::Resugared(ResugaredExprKind::LetPure { lhs, rhs, body }) => { let binder = if matches!(**kind, ExprKind::Let { .. }) { " ←" } else { " :=" }; docs![ docs![ docs![ "let", line!(), // TODO: Improve treatment of patterns in general. see // https://github.com/cryspen/hax/issues/1712 match *lhs.kind.clone() { PatKind::Ascription { .. } => docs![lhs, reflow!(" : "), &lhs.ty], PatKind::Binding { mutable: false, var, mode: BindingMode::ByValue, sub_pat: None, } => docs![&var, reflow!(" : "), &lhs.ty], _ => docs![lhs], }, ] .group(), binder, line!(), rhs, ";" ] .nest(INDENT) .group(), line!(), body, ] } ExprKind::GlobalId(global_id) => docs![global_id], ExprKind::LocalId(local_id) => docs![local_id], ExprKind::Ascription { e, ty } => docs![e, reflow!(" : "), ty].parens().group(), ExprKind::Closure { params, body, captures: _, } => docs![ docs![ reflow!("fun"), zip_left!(line!(), params), softline!(), "=>" ] .group(), line!(), self.do_block(self.expr_typed_result(body)).parens() ] .parens() .group() .nest(INDENT), ExprKind::Resugared(ResugaredExprKind::Tuple { .. }) => { unreachable!("This printer doesn't use the tuple resugaring") } ExprKind::Match { scrutinee, arms } => docs![ docs![ "match", docs![line!(), scrutinee].nest(INDENT), line!(), "with" ] .group(), docs![line!(), intersperse!(arms, line!())] .group() .nest(INDENT), ] .group(), ExprKind::Borrow { .. } => { unreachable_by_invariant!(Drop_references) } ExprKind::AddressOf { .. } => unreachable_by_invariant!(Reject_raw_or_mut_pointer), ExprKind::Assign { .. } => unreachable_by_invariant!(Local_mutation), ExprKind::Loop { .. } => unreachable_by_invariant!(Functionalize_loops), ExprKind::Break { .. } | ExprKind::Return { .. } | ExprKind::Continue { .. } => { unreachable_by_invariant!(Drop_break_continue_return) } ExprKind::Block { .. } => unreachable_by_invariant!(Drop_blocks), ExprKind::Quote { contents } => docs![contents], ExprKind::Error(error_node) => docs![error_node], } } fn arm(&self, arm: &Arm) -> DocBuilder { if let Some(_guard) = &arm.guard { unreachable_by_invariant!(Drop_match_guards) } else { docs![ reflow!("| "), &arm.pat, softline!(), "=>", softline!(), "do", line!(), &arm.body ] .nest(INDENT) .group() } } fn pat(&self, pat: &Pat) -> DocBuilder { match &*pat.kind { PatKind::Wild => docs!["_"], PatKind::Ascription { pat, ty: _ } => docs![pat], PatKind::Binding { mutable, var, mode, sub_pat, } => match (mutable, mode, sub_pat) { (true, _, _) => unreachable_by_invariant!(Local_mutation), (false, BindingMode::ByRef(_), _) => unreachable_by_invariant!(Drop_references), (false, BindingMode::ByValue, None) => docs![var], (false, BindingMode::ByValue, Some(pat)) => { docs![var, "@", softline_!(), pat].group() } }, PatKind::Or { sub_pats } => docs![intersperse!(sub_pats, reflow!(" | "))].group(), PatKind::Array { .. } => { emit_error!(issue 1712, "Unsupported pattern-matching on arrays") } PatKind::Deref { .. } => unreachable_by_invariant!(Drop_references), PatKind::Constant { lit: Literal::Float { .. }, } => { emit_error!(issue 1788, "Unsupported pattern-matching on floats") } PatKind::Constant { lit } => docs![lit], PatKind::Construct { constructor, is_record, is_struct, fields, } => { if *is_struct { if !*is_record { // Tuple-like structure, using positional arguments docs![ "⟨", intersperse!( fields.iter().map(|field| { docs![&field.1] }), docs![",", line!()] ) .align() .group(), "⟩" ] .align() .group() } else { // Record-like structure, using named arguments docs![intersperse!( fields.iter().map(|(id, pat)| { docs![self.render_last(id), reflow!(" :="), line!(), pat] .group() }), docs![",", line!()] )] .align() .braces() .group() } } else { // Variant docs![ constructor, line!(), self.arguments(fields, is_record).align() ] .parens() .group() .nest(INDENT) } } PatKind::Resugared(ResugaredPatKind::ConstructWithEllipsis { constructor, is_struct, fields, }) => { if *is_struct { // Struct: render as `{f1 := pat, f2 := pat, ..}` or `_` if fields.is_empty() { docs!["_"] } else { docs![intersperse!( fields .iter() .map(|(id, pat)| { docs![self.render_last(id), reflow!(" :="), line!(), pat] .group() }) .chain(std::iter::once(docs![".."])), docs![",", line!()] )] .align() .braces() .group() } } else { // Enum variant with named fields: (f1 := pat) (f2 := pat) .. let record_part = if fields.is_empty() { docs!["_"] } else { docs![intersperse!( fields.iter().map(|(id, pat)| { docs![self.render_last(id), reflow!(" :="), line!(), pat] .group() .parens() }), line!() )] .align() .group() }; docs![constructor, line!(), record_part, " .."] .parens() .group() .nest(INDENT) } } PatKind::Error(_) => { // TODO : Should be made unreachable by https://github.com/cryspen/hax/pull/1672 text!("sorry") } } } fn ty(&self, ty: &Ty) -> DocBuilder { match ty.kind() { TyKind::Primitive(primitive_ty) => docs![primitive_ty], TyKind::App { head, args } => { if args.is_empty() { docs![head] } else { docs![head, zip_left!(line!(), args)] .parens() .group() .nest(INDENT) } } TyKind::Arrow { inputs, output } => docs![ zip_right!(inputs, docs![softline!(), "->", line!()]), "RustM", softline!(), output ] .parens() .group(), TyKind::Param(local_id) => docs![local_id], TyKind::Slice(ty) => docs!["RustSlice", line!(), ty].parens().group(), TyKind::Array { ty, length } => docs!["RustArray", line!(), ty, line!(), { if let ExprKind::Literal(int_lit @ Literal::Int { .. }) = length.kind() { docs![int_lit] } else if let ExprKind::LocalId(local_id) = length.kind() { docs![local_id] } else { unreachable!( "Only arrays with integer literal or const param size are supported" ) } }] .parens() .group(), TyKind::AssociatedType { impl_, item } => { let kind = impl_.kind(); match &kind { ImplExprKind::Self_ => docs!["associatedTypes.", self.render_last(item)], ImplExprKind::Parent { ident, .. } | ImplExprKind::Projection { ident, .. } => { docs![item, zip_left!(line!(), ident.goal.args.iter())] .parens() .group() .nest(INDENT) } ImplExprKind::LocalBound { .. } => { docs![item, zip_left!(line!(), impl_.goal.args.iter())] .parens() .group() .nest(INDENT) } _ => { emit_error!(issue 1922, "Unsupported variant of associated type") } } } TyKind::Ref { .. } => unreachable_by_invariant!(Drop_references), TyKind::RawPointer => unreachable_by_invariant!(Reject_raw_or_mut_pointer), TyKind::Opaque(_) => emit_error!(issue 1714, "Unsupported opaque type definitions"), TyKind::Dyn(_) => emit_error!(issue 1708, "Unsupported `dyn` traits"), TyKind::Resugared(resugared_ty_kind) => match resugared_ty_kind { ResugaredTyKind::Tuple(_) => { unreachable!("This backend does not use tuple resugaring (yet)") } }, TyKind::Error(e) => docs![e], } } fn literal(&self, literal: &Literal) -> DocBuilder { docs![match literal { Literal::String(symbol) => format!("\"{}\"", self.escape_string(symbol)), Literal::Char(c) => format!("'{c}'"), Literal::Bool(b) => format!("{b}"), Literal::Int { value, negative, kind: _, } => format!("{}{value}", if *negative { "-" } else { "" }), Literal::Float { value, negative, kind: _, } => format!("{}{value}", if *negative { "-" } else { "" }), }] } fn local_id(&self, local_id: &LocalId) -> DocBuilder { // TODO: should be done by name rendering, see https://github.com/cryspen/hax/issues/1630 docs![Self::escape(&local_id.0)] } fn spanned_ty(&self, spanned_ty: &SpannedTy) -> DocBuilder { docs![&spanned_ty.ty] } fn primitive_ty(&self, primitive_ty: &PrimitiveTy) -> DocBuilder { match primitive_ty { PrimitiveTy::Bool => docs!["Bool"], PrimitiveTy::Int(int_kind) => docs![int_kind], PrimitiveTy::Float(float_kind) => docs![float_kind], PrimitiveTy::Char => docs!["Char"], PrimitiveTy::Str => docs!["String"], } } fn int_kind(&self, int_kind: &IntKind) -> DocBuilder { docs![match (&int_kind.signedness, &int_kind.size) { (Signedness::Signed, IntSize::S8) => "i8", (Signedness::Signed, IntSize::S16) => "i16", (Signedness::Signed, IntSize::S32) => "i32", (Signedness::Signed, IntSize::S64) => "i64", (Signedness::Signed, IntSize::S128) => "i128", (Signedness::Signed, IntSize::SSize) => "isize", (Signedness::Unsigned, IntSize::S8) => "u8", (Signedness::Unsigned, IntSize::S16) => "u16", (Signedness::Unsigned, IntSize::S32) => "u32", (Signedness::Unsigned, IntSize::S64) => "u64", (Signedness::Unsigned, IntSize::S128) => "u128", (Signedness::Unsigned, IntSize::SSize) => "usize", }] } fn float_kind(&self, float_kind: &FloatKind) -> DocBuilder { docs![match float_kind { FloatKind::F32 => "f32", FloatKind::F64 => "f64", _ => emit_error!(issue 1787, "The only supported float types are `f32` and `f64`."), }] } fn quote_content(&self, quote_content: &QuoteContent) -> DocBuilder { match quote_content { QuoteContent::Verbatim(s) => { intersperse!(s.lines().map(|x| x.to_string()), hardline!()) } QuoteContent::Expr(expr) => docs![expr], QuoteContent::Pattern(pat) => docs![pat], QuoteContent::Ty(ty) => docs![ty], } } fn quote(&self, quote: &Quote) -> DocBuilder { concat!["e.0] } fn param(&self, param: &Param) -> DocBuilder { if matches!( *param.pat.kind, PatKind::Wild | PatKind::Ascription { .. } | PatKind::Binding { sub_pat: None, .. } ) { self.pat_typed(¶m.pat) } else { emit_error!(issue 1791, "Function parameters must not contain patterns") } } fn item(&self, item @ Item { ident, kind, meta }: &Item) -> DocBuilder { let body = match kind { ItemKind::Fn { name, generics, body, params, safety: _, } => { let opaque = item.is_opaque(); let linked_items = HasLinkedItemGraph::linked_item_graph(self); docs![ if opaque || linked_items.has_spec(item) { nil!() } else { // Function should be unfolded by `mvcgen` docs!["@[spec]", hardline!()] }, docs![ docs![ docs![ docs![if opaque { "opaque" } else { "def" }, line!(), name] .group(), self.generics(generics, &self.render_last(name)), params, softline!(), ":" ] .group(), line!(), docs![ "RustM", line!(), &body.ty, if opaque { nil!() } else { docs![line!(), ":= do"] } ] .group(), ] .group() .nest(INDENT), if opaque { nil!() } else { docs![line!(), body] } ] .group() .nest(INDENT), if opaque { nil!() } else { docs![&self.spec(item, name, generics, params)] } ] } ItemKind::TyAlias { name, generics, ty } => docs![ "abbrev ", name, self.generics(generics, &self.render_last(name)), softline!(), ":", line!(), "Type", softline!(), ":=", line!(), ty ] .nest(INDENT) .group(), ItemKind::RustModule | ItemKind::Use { .. } => nil!(), ItemKind::Quote { quote, origin: _ } => docs![quote], ItemKind::NotImplementedYet => { emit_error!(issue 1706, "Item unsupported by the Hax engine (unimplemented yet)") } ItemKind::Type { name, generics, variants, is_struct, } => { if item.is_opaque() { docs![ reflow!("opaque "), name, self.generics(generics, &self.render_last(name)), softline!(), ":", line!(), "Type" ] .group() .nest(INDENT) } // TODO: use a resugaring, see https://github.com/cryspen/hax/issues/1668 else if *is_struct { // Structures let Some(variant) = variants.first() else { unreachable!( "Structures should always have a constructor (even empty ones)" ) }; let args = if variant.arguments.is_empty() { comment!["no fields"] } else if !variant.is_record { // Tuple-like structure, using positional arguments intersperse!( variant.arguments.iter().enumerate().map(|(i, (_, ty, _))| { docs![format!("_{i} :"), line!(), ty].group().nest(INDENT) }), hardline!() ) } else { // Structure-like structure, using named arguments intersperse!( variant.arguments.iter().map(|(id, ty, _)| { docs![self.render_last(id), reflow!(" : "), ty] .group() .nest(INDENT) }), hardline!() ) }; docs![ docs![ reflow!("structure "), name, self.generics(generics, &self.render_last(name)), line!(), "where" ] .group(), docs![hardline!(), args], ] .nest(INDENT) .group() } else { // Enums let applied_name: DocBuilder = if generics.params.is_empty() && generics.constraints.is_empty() { docs![name] } else { docs![name, self.generics(generics, &self.render_last(name))].group() }; docs![ docs![ "inductive ", name, self.generics(generics, &self.render_last(name)), softline!(), ":", line!(), "Type" ] .group(), hardline!(), intersperse!( variants.iter().map(|variant| docs![ "| ", variant, applied_name.clone() ] .group() .nest(INDENT)), hardline!() ), ] } } ItemKind::Trait { name, generics, items, safety: _, } => { let generic_types = generics.type_class_constraints().collect::>(); if generic_types.len() < generics.constraints.len() { emit_error!(issue 1921, "Unsupported equality constraints on associated types") } docs![ // A trait is encoded as two Lean type classes: one holding the associated types, // and one holding all other fields. // This is the type class holding the associated types: docs![ docs![ docs![reflow!("class "), name, ".AssociatedTypes"], zip_left!(line!(), &generics.params).group(), line!(), "where" ] .group(), zip_left!( hardline!(), generic_types.iter().map(|impl_ident| docs![ self.constraint_name(&self.render_last(name), impl_ident), " :", line!(), &impl_ident.goal.trait_, ".AssociatedTypes", line!(), intersperse!(&impl_ident.goal.args, line!()) ] .group() .brackets()) ), zip_left!( hardline!(), items .iter() .filter(|item| { matches!(item.kind, TraitItemKind::Type(_)) }) .map(|item| docs![(generics.params.clone(), item)]) ), ] .nest(INDENT), // We add the `[instance]` attribute to the contained constraints to make // them available for type inference: zip_left!( docs![hardline!(), hardline!()], generic_types.iter().map(|impl_ident| docs![ "attribute [instance_reducible, instance]", line!(), name, ".AssociatedTypes.", self.constraint_name(&self.render_last(name), impl_ident), ] .group() .nest(INDENT)) ), zip_left!( docs![hardline!(), hardline!()], items .iter() .filter(|item| { matches!(item.kind, TraitItemKind::Type(_)) }) .map(|item| docs![ "attribute [reducible]", line!(), self.render_with_injection( &item.ident, &"AssociatedTypes".to_string() ) ] .group() .nest(INDENT)) ), // When referencing associated types, we would like to refer to them as // `TraitName.TypeName` instead of `TraitName.AssociatedTypes.TypeName`: zip_left!( docs![hardline!(), hardline!()], items .iter() .filter(|item| { matches!(item.kind, TraitItemKind::Type(_)) }) .map(|item| { docs![ "abbrev ", name, ".", self.render_last(&item.ident), " :=", line!(), name, ".AssociatedTypes", ".", self.render_last(&item.ident), ] .nest(INDENT) }) ), hardline!(), hardline!(), // This is the type class holding all other fields: docs![ docs![ docs![reflow!("class "), name], docs![ // Type parameters are also parameters of the class, but constraints are fields of the class docs![zip_left!(line!(), &generics.params)].group(), line!(), // The collection of associated types is an extra parameter so that we can encode // equality constraints on associated types. docs![ reflow!("associatedTypes :"), softline!(), "outParam", softline!(), docs![ name, ".AssociatedTypes", softline!(), intersperse!(&generics.params, softline!()), ] .parens() .nest(INDENT) ] .brackets() .nest(INDENT) ] .group(), line!(), "where" ] .group(), // Lean's `extends` does not work for us because one cannot implement // different functions of the same name on the super- and on the // subclass. So we treat supertraits like any other constraint: zip_left!( hardline!(), generic_types.iter().map(|impl_ident| docs![ self.constraint_name(&self.render_last(name), impl_ident), softline!(), ":", line!(), impl_ident.goal.trait_, zip_left!(line!(), impl_ident.goal.args.iter()) ] .group() .brackets()) ), // We also add constraints on associated types here: concat!( items .iter() .filter(|item| { matches!(item.kind, TraitItemKind::Type(_)) }) .map(|item| docs![ self.generics( &item.generics, &self.render_last(&item.ident) ) ]) ), // Finally the regular trait items: zip_left!( hardline!(), items.iter().filter(|item| {!( // TODO: should be treated directly by name rendering, see : // https://github.com/cryspen/hax/issues/1646 item.ident.is_precondition() || item.ident.is_postcondition() || // Associated types are encoded in a separate type class. matches!(item.kind, TraitItemKind::Type(_)) )}).map(|item| docs![(generics.params.clone(), item)] ) ), ] .nest(INDENT), // We add the `[instance]` attribute to the contained constraints to make // them available for type inference: zip_left!( docs![hardline!(), hardline!()], generic_types.iter().map(|impl_ident| docs![ "attribute [instance_reducible, instance]", line!(), name, ".", self.constraint_name(&self.render_last(name), impl_ident), ] .group() .nest(INDENT)) ), ] } ItemKind::Impl { generics, self_ty: _, of_trait: (trait_, args), items, parent_bounds: _, } => { let opaque = item.is_opaque(); docs![ // An impl is encoded as two Lean instances: // One for the associated types... docs![ docs![ if opaque { reflow!("@[instance] opaque ") } else { reflow!("@[reducible] instance ") }, ident, ".AssociatedTypes", self.generics(generics, &self.render_last(ident)), softline!(), ":" ] .group(), line!(), docs![trait_, ".AssociatedTypes", zip_left!(line!(), args)].group(), if opaque { docs![ softline!(), ":=", line!(), reflow!("by constructor <;> exact Inhabited.default") ] } else { docs![line!(), "where"] }, ] .group() .nest(INDENT), if opaque { nil!() } else { docs![zip_left!( hardline!(), items.iter().filter(|item| { matches!(item.kind, ImplItemKind::Type { .. }) }) )] .nest(INDENT) }, hardline!(), hardline!(), // ...and one for all other fields: docs![ docs![ if opaque { reflow!("@[instance] opaque ") } else { reflow!("instance ") }, ident, self.generics(generics, &self.render_last(ident)), softline!(), ":" ] .group(), line!(), docs![trait_, zip_left!(line!(), args.iter())].group(), if opaque { docs![ softline!(), ":=", line!(), reflow!("by constructor <;> exact Inhabited.default") ] } else { docs![line!(), "where"] }, ] .group() .nest(INDENT), if opaque { nil!() } else { docs![zip_left!( hardline!(), items.iter().filter(|item| { !( // TODO: should be treated directly by name rendering, see : // https://github.com/cryspen/hax/issues/1646 item.ident.is_precondition() || item.ident.is_postcondition() || // Associated types are encoded into a separate type class matches!(item.kind, ImplItemKind::Type { .. }) ) }) )] .nest(INDENT) }, ] } ItemKind::Resugared(resugared_item_kind) => match resugared_item_kind { ResugaredItemKind::Constant { name, body, generics, } => docs![ docs![ docs![ docs!["def", line!(), name].group(), self.generics(generics, &self.render_last(ident)), softline!(), ":", ] .group(), line!(), &body.ty, line!(), ":=" ] .group(), line!(), self.monad_extract(body), ] .group() .nest(INDENT), ResugaredItemKind::RecursiveFn { name, generics, body, params, safety, } => { // Render the item with an appended `partial_fixpoint`: let item = Item { ident: item.ident, kind: ItemKind::Fn { name: *name, generics: generics.clone(), body: body.clone(), params: params.clone(), safety: safety.clone(), }, meta: item.meta.clone(), }; return docs![item, hardline!(), "partial_fixpoint"]; } }, ItemKind::Alias { .. } => { // aliases are introduced when creating bundles. Those should not appear in // Lean, as items can be named correctly in any file. emit_error!(issue 1658, "Unsupported alias item") } ItemKind::Error(e) => docs![e], }; docs![meta, body] } fn impl_item( &self, ImplItem { meta: _, generics, kind, ident, }: &ImplItem, ) -> DocBuilder { let name = self.render_last(ident); match kind { ImplItemKind::Type { ty, parent_bounds: _, } => docs![name, reflow!(" := "), ty], ImplItemKind::Fn { body, params } => docs![ docs![ name, softline!(), ":=", line!(), docs![ "fun", self.generics(generics, &self.render_last(ident)), zip_left!(line!(), params).group(), softline!(), "=>", softline!(), "do" ] .group() .nest(INDENT) ] .group(), line!(), body ] .group() .nest(INDENT), ImplItemKind::Resugared(ResugaredImplItemKind::Constant { body }) => { docs![ name, softline!(), ":=", softline!(), self.monad_extract(body) ] } ImplItemKind::Error(err) => docs!(err), } } fn impl_ident(&self, ImplIdent { .. }: &ImplIdent) -> DocBuilder { unreachable!( "`ImplIdent`s are rendered inline because we have multiple variants of how they must be rendered." ) } fn trait_goal(&self, TraitGoal { .. }: &TraitGoal) -> DocBuilder { unreachable!( "`TraitGoal`s are rendered inline because we have multiple variants of how they must be rendered." ) } fn variant( &self, Variant { name, arguments, is_record, attributes, }: &Variant, ) -> DocBuilder { docs![ concat!(attributes), self.render_last(name), softline!(), // args if *is_record { // Use named the arguments, keeping only the head of the identifier docs![ intersperse!( arguments.iter().map(|(id, ty, _)| { docs![self.render_last(id), reflow!(" : "), ty] .parens() .group() }), line!() ) .align() .nest(INDENT), line!(), reflow!(": "), ] .group() } else { // Use anonymous arguments docs![ reflow!(": "), concat!( arguments .iter() .map(|(_, ty, _)| { docs![ty, reflow!(" -> ")] }) ) ] } ] .group() .nest(INDENT) } fn symbol(&self, symbol: &Symbol) -> DocBuilder { docs![Self::escape(symbol)] } fn metadata( &self, Metadata { span: _, attributes, }: &Metadata, ) -> DocBuilder { concat!(attributes) } fn lhs(&self, _lhs: &Lhs) -> DocBuilder { unreachable_by_invariant!(Local_mutation) } fn safety_kind(&self, _safety_kind: &SafetyKind) -> DocBuilder { nil!() } fn binding_mode(&self, _binding_mode: &BindingMode) -> DocBuilder { unreachable!("This backend handle binding modes directly inside patterns") } fn region(&self, _region: &Region) -> DocBuilder { unreachable_by_invariant!(Drop_references) } fn dyn_trait_goal(&self, _dyn_trait_goal: &DynTraitGoal) -> DocBuilder { emit_error!(issue 1708, "`dyn` traits are unsupported") } fn attribute(&self, Attribute { kind, span: _ }: &Attribute) -> DocBuilder { match kind { AttributeKind::Tool { .. } | AttributeKind::Hax { .. } => { nil!() } AttributeKind::DocComment { kind: DocCommentKind::Line, body, } => comment!(body.clone()).append(hardline!()), AttributeKind::DocComment { kind: DocCommentKind::Block, body, } => docs![ "/--", line!(), intersperse!(body.lines().map(|line| line.to_string()), line!()), line!(), "-/" ] .nest(INDENT) .group() .append(hardline!()), } } fn borrow_kind(&self, _borrow_kind: &BorrowKind) -> DocBuilder { unreachable_by_invariant!(Drop_references) } fn guard(&self, _guard: &Guard) -> DocBuilder { unreachable_by_invariant!(Drop_match_guards) } fn projection_predicate( &self, projection_predicate: &ProjectionPredicate, ) -> DocBuilder { docs![ self.render_last(&projection_predicate.assoc_item), softline!(), ":=", line!(), projection_predicate.ty, ] .group() .nest(INDENT) } fn error_node(&self, _error_node: &ErrorNode) -> DocBuilder { // TODO : Should be made unreachable by https://github.com/cryspen/hax/pull/1672 text!("sorry") } // Impl expressions fn impl_expr(&self, _impl_expr: &ImplExpr) -> DocBuilder { emit_error!(issue 1716, "Explicit impl expressions are unsupported") } } }; ================================================ FILE: rust-engine/src/backends/rust/renamings ================================================ core:alloc:global:GlobalAlloc core:alloc:GlobalAlloc core:marker:Sync core:prelude:v1:Sync core:iter:adapters:SourceIter core:iter:SourceIter core:ops:arith:Sub core:ops:Sub core:slice:index:range core:slice:range core:sync:exclusive:Exclusive core:sync:Exclusive core:ops:bit:Shr core:ops:Shr core:ffi:primitives:c_uchar core:ffi:c_uchar core:range:iter:IterRangeFrom core:range:IterRangeFrom core:fmt:builders:DebugStruct core:fmt:DebugStruct core:macros:builtin:trace_macros core:prelude:v1:trace_macros core:fmt:builders:from_fn core:fmt:from_fn core:macros:builtin:assert core:prelude:v1:assert core:macros:builtin:bench core:prelude:v1:bench core:iter:adapters:chain:Chain core:iter:Chain core:iter:traits:marker:TrustedStep core:iter:TrustedStep core:slice:iter:RSplitMut core:slice:RSplitMut core:iter:traits:collect:Extend core:iter:Extend core:str:error:Utf8Error core:str:Utf8Error core:ptr:metadata:Thin core:ptr:Thin core:ops:arith:AddAssign core:ops:AddAssign core:mem:drop core:prelude:v1:drop core:async_iter:async_iter:IntoAsyncIterator core:async_iter:IntoAsyncIterator core:iter:adapters:flatten:Flatten core:iter:Flatten core:ops:unsize:DispatchFromDyn core:ops:DispatchFromDyn core:marker:Send core:prelude:v1:Send core:iter:traits:marker:InPlaceIterable core:iter:InPlaceIterable core:panic:unwind_safe:UnwindSafe core:panic:UnwindSafe core:hash:sip:SipHasher core:hash:SipHasher core:slice:iter:ArrayChunks core:slice:ArrayChunks core:slice:iter:SplitMut core:slice:SplitMut core:num:nonzero:NonZeroUsize core:num:NonZeroUsize core:slice:index:try_range core:slice:try_range core:task:wake:RawWaker core:task:RawWaker core:iter:adapters:rev:Rev core:iter:Rev core:ffi:primitives:c_float core:ffi:c_float core:convert:num:FloatToInt core:convert:FloatToInt core:async_iter:from_iter:from_iter core:async_iter:from_iter core:pin:unsafe_pinned:UnsafePinned core:pin:UnsafePinned core:slice:iter:ChunksExact core:slice:ChunksExact core:ffi:primitives:c_short core:ffi:c_short core:iter:adapters:skip_while:SkipWhile core:iter:SkipWhile core:macros:builtin:alloc_error_handler core:prelude:v1:alloc_error_handler core:ops:arith:Rem core:ops:Rem core:ops:range:RangeToInclusive core:range:RangeToInclusive core:slice:iter:SplitN core:slice:SplitN core:num:nonzero:NonZeroU32 core:num:NonZeroU32 core:convert:From core:prelude:v1:From core:ops:async_function:AsyncFnOnce core:ops:AsyncFnOnce core:marker:variance:PhantomInvariant core:marker:PhantomInvariant core:io:borrowed_buf:BorrowedBuf core:io:BorrowedBuf core:iter:sources:repeat_n:RepeatN core:iter:RepeatN core:marker:Copy core:prelude:v1:Copy core:ops:function:FnOnce core:ops:FnOnce core:ffi:primitives:c_ulong core:ffi:c_ulong core:iter:sources:successors:Successors core:iter:Successors core:ptr:non_null:NonNull core:ptr:NonNull core:iter:adapters:fuse:Fuse core:iter:Fuse core:macros:builtin:line core:prelude:v1:line core:panic:panic_info:PanicMessage core:panic:PanicMessage core:slice:iter:Chunks core:slice:Chunks core:slice:iter:ChunksMut core:slice:ChunksMut core:ffi:primitives:c_size_t core:ffi:c_size_t core:slice:iter:RChunksExactMut core:slice:RChunksExactMut core:slice:iter:RChunksMut core:slice:RChunksMut core:future:join:join core:future:join core:iter:adapters:chain:chain core:iter:chain core:ub_checks:assert_unsafe_precondition core:ub_checks:assert_unsafe_precondition core:ops:bit:BitOr core:ops:BitOr core:convert:AsMut core:prelude:v1:AsMut core:mem:transmutability:TransmuteFrom core:mem:TransmuteFrom core:cell:lazy:LazyCell core:cell:LazyCell core:macros:builtin:stringify core:prelude:v1:stringify core:iter:adapters:map_windows:MapWindows core:iter:MapWindows core:net:ip_addr:Ipv4Addr core:net:Ipv4Addr core:ffi:va_list:VaListImpl core:ffi:VaListImpl core:iter:adapters:take_while:TakeWhile core:iter:TakeWhile core:slice:iter:RChunks core:slice:RChunks core:slice:raw:from_raw_parts_mut core:slice:from_raw_parts_mut core:str:converts:from_raw_parts core:str:from_raw_parts core:ops:try_trait:Residual core:ops:Residual core:iter:adapters:cycle:Cycle core:iter:Cycle core:fmt:builders:DebugSet core:fmt:DebugSet core:ops:range:RangeTo core:range:RangeTo core:ops:bit:ShlAssign core:ops:ShlAssign core:ops:function:FnMut core:ops:FnMut core:str:iter:EscapeDebug core:str:EscapeDebug core:ffi:c_str:CStr core:ffi:CStr core:ops:deref:Receiver core:ops:Receiver core:ffi:primitives:c_int core:ffi:c_int core:iter:traits:iterator:Iterator core:iter:Iterator core:ops:coroutine:CoroutineState core:ops:CoroutineState core:macros:builtin:concat_bytes core:prelude:v1:concat_bytes core:mem:transmutability:Assume core:mem:Assume core:option:Option core:prelude:v1:Option core:ops:range:Bound core:range:Bound core:ffi:primitives:c_double core:ffi:c_double core:macros:builtin:include_str core:prelude:v1:include_str core:bstr:traits:impl_partial_eq core:bstr:impl_partial_eq core:ops:range:RangeBounds core:range:RangeBounds core:future:poll_fn:PollFn core:future:PollFn core:slice:iter:SplitInclusiveMut core:slice:SplitInclusiveMut core:hash:sip:SipHasher13 core:hash:SipHasher13 core:macros:builtin:autodiff_forward core:autodiff:autodiff_forward core:convert:TryFrom core:prelude:rust_future:TryFrom core:slice:iter:RSplit core:slice:RSplit core:iter:sources:repeat:repeat core:iter:repeat core:future:pending:pending core:future:pending core:slice:index:SliceIndex core:slice:SliceIndex core:macros:cfg_select core:cfg_select core:ptr:metadata:from_raw_parts_mut core:ptr:from_raw_parts_mut core:char:decode:DecodeUtf16Error core:char:DecodeUtf16Error core:ops:arith:MulAssign core:ops:MulAssign core:future:async_drop:async_drop_in_place core:future:async_drop_in_place core:num:nonzero:NonZeroI8 core:num:NonZeroI8 core:ops:deref:DerefMut core:ops:DerefMut core:iter:traits:marker:TrustedLen core:iter:TrustedLen core:num:nonzero:NonZeroU128 core:num:NonZeroU128 core:ptr:unique:Unique core:ptr:Unique core:marker:variance:PhantomCovariant core:marker:PhantomCovariant core:ops:arith:Div core:ops:Div core:iter:adapters:map_while:MapWhile core:iter:MapWhile core:net:ip_addr:Ipv6Addr core:net:Ipv6Addr core:slice:iter:ArrayWindows core:slice:ArrayWindows core:iter:adapters:zip:TrustedRandomAccess core:iter:TrustedRandomAccess core:ptr:metadata:Pointee core:ptr:Pointee core:ops:range:IntoBounds core:range:IntoBounds core:ops:control_flow:ControlFlow core:ops:ControlFlow core:ops:coroutine:Coroutine core:ops:Coroutine core:macros:builtin:global_allocator core:prelude:v1:global_allocator core:macros:builtin:define_opaque core:prelude:v1:define_opaque core:macros:builtin:option_env core:prelude:v1:option_env core:ptr:alignment:Alignment core:ptr:Alignment core:ops:bit:BitAndAssign core:ops:BitAndAssign core:io:borrowed_buf:BorrowedCursor core:io:BorrowedCursor core:iter:adapters:intersperse:IntersperseWith core:iter:IntersperseWith core:iter:sources:from_fn:FromFn core:iter:FromFn core:intrinsics:transmute core:mem:transmute core:ffi:primitives:c_long core:ffi:c_long core:iter:sources:repeat_n:repeat_n core:iter:repeat_n core:num:flt2dec:decoder:DecodableFloat core:num:flt2dec:DecodableFloat core:iter:adapters:copied:Copied core:iter:Copied core:ops:range:RangeFrom core:range:legacy:RangeFrom core:num:flt2dec:decoder:FullDecoded core:num:flt2dec:FullDecoded core:bstr:traits:impl_partial_eq_n core:bstr:impl_partial_eq_n core:iter:adapters:step_by:StepBy core:iter:StepBy core:slice:iter:Iter core:slice:Iter core:slice:raw:from_mut core:slice:from_mut core:unicode:unicode_data:conversions core:unicode:conversions core:future:poll_fn:poll_fn core:future:poll_fn core:ascii:ascii_char:AsciiChar core:ascii:AsciiChar core:panic:location:Location core:panic:Location core:macros:builtin:compile_error core:prelude:v1:compile_error core:future:async_drop:AsyncDrop core:future:AsyncDrop core:default:Default core:prelude:v1:Default core:ops:arith:Mul core:ops:Mul core:ffi:primitives:c_ptrdiff_t core:ffi:c_ptrdiff_t core:iter:sources:successors:successors core:iter:successors core:net:socket_addr:SocketAddrV4 core:net:SocketAddrV4 core:alloc:layout:Layout core:alloc:Layout core:iter:adapters:skip:Skip core:iter:Skip core:future:ready:ready core:future:ready core:str:converts:from_utf8 core:str:from_utf8 core:num:nonzero:NonZeroI128 core:num:NonZeroI128 core:iter:sources:from_fn:from_fn core:iter:from_fn core:ops:try_trait:FromResidual core:ops:FromResidual core:iter:range:Step core:range:Step core:macros:builtin:env core:prelude:v1:env core:str:iter:SplitAsciiWhitespace core:str:SplitAsciiWhitespace core:core_simd:simd core:simd:simd core:marker:variance:PhantomInvariantLifetime core:marker:PhantomInvariantLifetime core:ops:arith:Add core:ops:Add core:marker:variance:PhantomContravariantLifetime core:marker:PhantomContravariantLifetime core:num:flt2dec:decoder:Decoded core:num:flt2dec:Decoded core:ops:bit:BitOrAssign core:ops:BitOrAssign core:async_iter:async_iter:AsyncIterator core:async_iter:AsyncIterator core:slice:iter:RSplitNMut core:slice:RSplitNMut core:mem:manually_drop:ManuallyDrop core:mem:ManuallyDrop core:ops:arith:Neg core:ops:Neg core:ops:deref:LegacyReceiver core:ops:LegacyReceiver core:str:iter:EncodeUtf16 core:str:EncodeUtf16 core:num:error:ParseIntError core:num:ParseIntError core:ops:async_function:AsyncFnMut core:ops:AsyncFnMut core:macros:builtin:cfg_accessible core:prelude:v1:cfg_accessible core:ops:arith:RemAssign core:ops:RemAssign core:iter:sources:from_coroutine:from_coroutine core:iter:from_coroutine core:slice:ascii:is_ascii_simple core:slice:is_ascii_simple core:ops:arith:SubAssign core:ops:SubAssign core:mem:size_of core:prelude:v1:size_of core:task:poll:Poll core:task:Poll core:ops:unsize:CoerceUnsized core:ops:CoerceUnsized core:char:methods:encode_utf8_raw core:char:encode_utf8_raw core:fmt:macros:Debug core:fmt:Debug core:future:into_future:IntoFuture core:future:IntoFuture core:ffi:primitives:c_uint core:ffi:c_uint core:iter:sources:generator:iter core:iter:iter core:net:ip_addr:Ipv6MulticastScope core:net:Ipv6MulticastScope core:panic:unwind_safe:RefUnwindSafe core:panic:RefUnwindSafe core:fmt:num_buffer:NumBuffer core:fmt:NumBuffer core:slice:iter:ChunksExactMut core:slice:ChunksExactMut core:marker:Unpin core:prelude:v1:Unpin core:ops:deref:Deref core:ops:Deref core:num:nonzero:NonZeroU64 core:num:NonZeroU64 core:iter:traits:double_ended:DoubleEndedIterator core:iter:DoubleEndedIterator core:ops:index:Index core:ops:Index core:ops:range:Range core:range:legacy:Range core:str:validations:utf8_char_width core:str:utf8_char_width core:convert:TryInto core:prelude:rust_future:TryInto core:fmt:builders:DebugList core:fmt:DebugList core:ffi:c_str:FromBytesUntilNulError core:ffi:FromBytesUntilNulError core:slice:iter:SplitNMut core:slice:SplitNMut core:slice:ascii:EscapeAscii core:slice:EscapeAscii core:iter:sources:once_with:once_with core:iter:once_with core:str:iter:SplitWhitespace core:str:SplitWhitespace core:ops:range:OneSidedRangeBound core:ops:OneSidedRangeBound core:iter:traits:accum:Product core:iter:Product core:async_iter:from_iter:FromIter core:async_iter:FromIter core:future:ready:Ready core:future:Ready core:mem:align_of_val core:prelude:v1:align_of_val core:macros:builtin:deref core:prelude:v1:deref core:ops:bit:BitXor core:ops:BitXor core:clone:Clone core:prelude:v1:Clone core:ops:bit:Not core:ops:Not core:marker:Sized core:prelude:v1:Sized core:ops:index:IndexMut core:ops:IndexMut core:macros:builtin:format_args_nl core:prelude:v1:format_args_nl core:ffi:primitives:c_ushort core:ffi:c_ushort core:iter:adapters:scan:Scan core:iter:Scan core:fmt:builders:DebugTuple core:fmt:DebugTuple core:iter:sources:once:once core:iter:once core:ptr:metadata:DynMetadata core:ptr:DynMetadata core:slice:iter:Split core:slice:Split core:slice:iter:ChunkBy core:slice:ChunkBy core:ffi:primitives:c_char core:ffi:c_char core:iter:sources:once_with:OnceWith core:iter:OnceWith core:iter:sources:repeat_with:repeat_with core:iter:repeat_with core:str:converts:from_utf8_unchecked_mut core:str:from_utf8_unchecked_mut core:task:wake:LocalWaker core:task:LocalWaker core:panic:panic_info:PanicInfo core:panic:PanicInfo core:marker:variance:PhantomContravariant core:marker:PhantomContravariant core:iter:adapters:cloned:Cloned core:iter:Cloned core:task:wake:Waker core:task:Waker core:iter:traits:collect:FromIterator core:iter:FromIterator core:num:nonzero:NonZeroU16 core:num:NonZeroU16 core:future:pending:Pending core:future:Pending core:ops:function:Fn core:ops:Fn core:macros:builtin:file core:prelude:v1:file core:intrinsics:ub_checks core:ub_checks:ub_checks core:iter:adapters:filter:Filter core:iter:Filter core:iter:traits:exact_size:ExactSizeIterator core:iter:ExactSizeIterator core:marker:variance:variance core:marker:variance core:iter:sources:repeat_with:RepeatWith core:iter:RepeatWith core:char:methods:encode_utf16_raw core:char:encode_utf16_raw core:iter:adapters:intersperse:Intersperse core:iter:Intersperse core:iter:traits:accum:Sum core:iter:Sum core:str:iter:CharIndices core:str:CharIndices core:task:ready:ready core:task:ready core:cmp:Eq core:prelude:v1:Eq core:iter:sources:repeat:Repeat core:iter:Repeat core:macros:builtin:format_args core:prelude:v1:format_args core:ops:try_trait:Try core:ops:Try core:ops:arith:DivAssign core:ops:DivAssign core:result:Result core:prelude:v1:Result core:iter:adapters:enumerate:Enumerate core:iter:Enumerate core:net:ip_addr:IpAddr core:net:IpAddr core:iter:traits:marker:FusedIterator core:iter:FusedIterator core:convert:AsRef core:prelude:v1:AsRef core:macros:builtin:test core:prelude:v1:test core:iter:sources:empty:Empty core:iter:Empty core:future:future:Future core:future:Future core:fmt:builders:DebugMap core:fmt:DebugMap core:str:lossy:Utf8Chunk core:str:Utf8Chunk core:task:wake:RawWakerVTable core:task:RawWakerVTable core:iter:adapters:zip:zip core:iter:zip core:ptr:metadata:from_raw_parts core:ptr:from_raw_parts core:ptr:metadata:metadata core:ptr:metadata core:net:socket_addr:SocketAddr core:net:SocketAddr core:slice:iter:SplitInclusive core:slice:SplitInclusive core:ops:bit:BitAnd core:ops:BitAnd core:iter:adapters:zip:TrustedRandomAccessNoCoerce core:iter:TrustedRandomAccessNoCoerce core:iter:adapters:filter_map:FilterMap core:iter:FilterMap core:slice:raw:from_ref core:slice:from_ref core:core_arch:arch core:arch:arch core:iter:adapters:peekable:Peekable core:iter:Peekable core:cmp:Ord core:prelude:v1:Ord core:ffi:primitives:c_longlong core:ffi:c_longlong core:num:nonzero:ZeroablePrimitive core:num:ZeroablePrimitive core:slice:iter:IterMut core:slice:IterMut core:str:iter:Chars core:str:Chars core:macros:builtin:cfg_eval core:prelude:v1:cfg_eval core:range:iter:IterRangeInclusive core:range:IterRangeInclusive core:macros:builtin:include core:prelude:v1:include core:cell:once:OnceCell core:cell:OnceCell core:macros:builtin:autodiff_reverse core:autodiff:autodiff_reverse core:mem:size_of_val core:prelude:v1:size_of_val core:marker:variance:PhantomCovariantLifetime core:marker:PhantomCovariantLifetime core:macros:assert_matches core:assert_matches:assert_matches core:ops:bit:ShrAssign core:ops:ShrAssign core:macros:debug_assert_matches core:assert_matches:debug_assert_matches core:ffi:va_list:VaList core:ffi:VaList core:iter:sources:once:Once core:iter:Once core:char:methods:encode_utf8_raw_unchecked core:char:encode_utf8_raw_unchecked core:iter:sources:empty:empty core:iter:empty core:iter:adapters:array_chunks:ArrayChunks core:iter:ArrayChunks core:iter:traits:marker:TrustedFused core:iter:TrustedFused core:array:iter:IntoIter core:array:IntoIter core:ops:range:RangeInclusive core:range:legacy:RangeInclusive core:slice:iter:RChunksExact core:slice:RChunksExact core:macros:builtin:test_case core:prelude:v1:test_case core:slice:raw:from_ptr_range core:slice:from_ptr_range core:char:decode:DecodeUtf16 core:char:DecodeUtf16 core:marker:variance:Variance core:marker:Variance core:range:iter:IterRange core:range:IterRange core:macros:builtin:type_ascribe core:prelude:v1:type_ascribe core:net:parser:AddrParseError core:net:AddrParseError core:str:converts:from_raw_parts_mut core:str:from_raw_parts_mut core:str:converts:from_utf8_mut core:str:from_utf8_mut core:fmt:num_buffer:NumBufferTrait core:fmt:NumBufferTrait core:ffi:primitives:c_ulonglong core:ffi:c_ulonglong core:prelude:v1 core:prelude:rust_future:v1 core:mem:maybe_uninit:MaybeUninit core:mem:MaybeUninit core:ops:bit:Shl core:ops:Shl core:ops:range:OneSidedRange core:range:OneSidedRange core:iter:traits:collect:IntoIterator core:iter:IntoIterator core:slice:raw:from_raw_parts core:slice:from_raw_parts core:str:iter:EscapeDefault core:str:EscapeDefault core:macros:builtin:contracts_ensures core:contracts:contracts_ensures core:slice:iter:ChunkByMut core:slice:ChunkByMut core:str:validations:next_code_point core:str:next_code_point core:num:error:IntErrorKind core:num:IntErrorKind core:num:nonzero:NonZeroI16 core:num:NonZeroI16 core:num:nonzero:NonZeroI64 core:num:NonZeroI64 core:ops:async_function:AsyncFn core:ops:AsyncFn core:char:convert:ParseCharError core:char:ParseCharError core:num:nonzero:NonZeroIsize core:num:NonZeroIsize core:ops:drop:Drop core:ops:Drop core:char:convert:CharTryFromError core:char:CharTryFromError core:ffi:va_list:VaArgSafe core:ffi:VaArgSafe core:iter:sources:from_coroutine:FromCoroutine core:iter:FromCoroutine core:slice:iter:ArrayChunksMut core:slice:ArrayChunksMut core:bstr:traits:impl_partial_eq_ord core:bstr:impl_partial_eq_ord core:str:converts:from_utf8_unchecked core:str:from_utf8_unchecked core:str:iter:EscapeUnicode core:str:EscapeUnicode core:str:traits:FromStr core:str:FromStr core:task:wake:Context core:task:Context core:ffi:primitives:c_ssize_t core:ffi:c_ssize_t core:macros:builtin:derive_const core:prelude:v1:derive_const core:alloc:layout:LayoutErr core:alloc:LayoutErr core:fmt:builders:FromFn core:fmt:FromFn core:slice:iter:Windows core:slice:Windows core:ffi:primitives:c_schar core:ffi:c_schar core:num:nonzero:NonZeroU8 core:num:NonZeroU8 core:num:dec2flt:ParseFloatError core:num:ParseFloatError core:unicode:unicode_data:case_ignorable:lookup core:unicode:lookup core:cmp:PartialEq core:prelude:v1:PartialEq core:iter:adapters:flatten:FlatMap core:iter:FlatMap core:ops:bit:BitXorAssign core:ops:BitXorAssign core:macros:builtin:cfg core:prelude:v1:cfg core:iter:adapters:zip:Zip core:iter:Zip core:num:nonzero:NonZeroI32 core:num:NonZeroI32 core:convert:Into core:prelude:v1:Into core:macros:builtin:concat core:prelude:v1:concat core:mem:align_of core:prelude:v1:align_of core:str:lossy:Utf8Chunks core:str:Utf8Chunks core:macros:builtin:column core:prelude:v1:column core:hash:macros:Hash core:hash:Hash core:net:socket_addr:SocketAddrV6 core:net:SocketAddrV6 core:macros:builtin:derive core:prelude:v1:derive core:num:flt2dec:decoder:decode core:num:flt2dec:decode core:macros:builtin:contracts_requires core:contracts:contracts_requires core:slice:raw:from_mut_ptr_range core:slice:from_mut_ptr_range core:task:wake:ContextBuilder core:task:ContextBuilder core:num:nonzero:NonZero core:num:NonZero core:ffi:c_str:FromBytesWithNulError core:ffi:FromBytesWithNulError core:macros:builtin:log_syntax core:prelude:v1:log_syntax core:ops:deref:DerefPure core:ops:DerefPure core:cmp:PartialOrd core:prelude:v1:PartialOrd core:iter:adapters:inspect:Inspect core:iter:Inspect core:macros:builtin:module_path core:prelude:v1:module_path core:iter:adapters:map:Map core:iter:Map core:slice:iter:RSplitN core:slice:RSplitN core:iter:adapters:take:Take core:iter:Take core:macros:builtin:include_bytes core:prelude:v1:include_bytes core:unicode:unicode_data:cased:lookup core:unicode:lookup ================================================ FILE: rust-engine/src/backends/rust/renamings.rs ================================================ use std::{collections::HashMap, fmt::Debug, hash::Hash, sync::LazyLock}; use crate::backends::prelude::Rendered; #[derive(Debug)] struct Graph { node: Option, subtree: HashMap>>, } impl Default for Graph { fn default() -> Self { Self { node: Default::default(), subtree: Default::default(), } } } impl Graph { fn create_path(&mut self, path: &[K]) -> &mut Graph { let mut current = self; for chunk in path { current = current.subtree.entry(chunk.clone()).or_default(); } current } fn get_longest(&self, path: impl Iterator) -> Option<(Vec, &T)> { let mut current = self; let mut subpath = vec![]; let mut results = vec![]; for chunk in path { if let Some(sub) = current.subtree.get(&chunk) { current = sub; subpath.push(chunk.clone()); if let Some(node) = ¤t.node { results.push((subpath.clone(), node)); } } else { break; } } results.pop() } fn from_iter(it: impl Iterator, T)>) -> Self { let mut root = Self::default(); for (path, value) in it { root.create_path(&path).node = Some(value); } root } } static RENAMINGS: LazyLock>> = LazyLock::new(|| { let str = include_str!("renamings"); Graph::from_iter(str.lines().map(|line| { let (l, r) = line.split_once(" ").unwrap(); ( l.split(":").map(|s| s.to_string()).collect(), r.split(":").collect(), ) })) }); /// Rename a `Rendered` name according, so that we refer to public names of core, not private names. pub(super) fn rename_rendered(rendered: &mut Rendered) { let chunks = rendered .module .clone() .into_iter() .chain(rendered.path.clone()); if let Some((chunks_slice, rename)) = RENAMINGS.get_longest(chunks) { let rename: Vec = rename.iter().map(|s| s.to_string()).collect(); if chunks_slice.len() >= rendered.module.len() { let remainings = chunks_slice.len() - rendered.module.len(); let (mod_part, path_part) = rename.split_at((rename.len() - remainings).max(1)); rendered.module = mod_part.to_vec(); rendered.path.splice(0..remainings, path_part.to_vec()); } else { rendered.module.splice(0..chunks_slice.len(), rename); } } } ================================================ FILE: rust-engine/src/backends/rust.rs ================================================ //! A Rust backend (and printer) for hax. use super::prelude::*; use crate::ast::identifiers::global_id::view::{PathSegment, View}; use std::cell::RefCell; mod renamings; /// The Rust printer. #[setup_printer_struct] #[derive(Default, Clone)] pub struct RustPrinter { current_namespace: RefCell>>, } impl Printer for RustPrinter { const NAME: &str = "Rust"; } impl RenderView for RustPrinter { fn render_path_segment(&self, seg: &PathSegment) -> Vec { if let AnyKind::Constructor(constructor_kind) = seg.kind() { match constructor_kind { global_id::view::ConstructorKind::Constructor { ty } => { if let global_id::view::TypeDefKind::Struct = ty.kind() { return vec![ self.render_path_segment_payload(ty.lift().payload()) .to_string(), ]; } } } }; default::render_path_segment(self, seg) } fn render(&self, view: &View) -> Rendered { let (module_path, relative_path) = view.split_at_module(); let path_segment = |seg| self.render_path_segment(seg); let mut rendered = Rendered { module: module_path.iter().flat_map(path_segment).collect(), path: relative_path.iter().flat_map(path_segment).collect(), }; renamings::rename_rendered(&mut rendered); rendered } } /// The Rust backend. pub struct RustBackend; impl Backend for RustBackend { type Printer = RustPrinter; fn resugaring_phases() -> Vec> { vec![Box::new(FunctionsToConstants), Box::new(Tuples)] } fn module_path(&self, module: &Module) -> camino::Utf8PathBuf { let printer = RustPrinter::default(); let path = ::module(&printer, &module.ident.view()); camino::Utf8PathBuf::from_iter(path).with_extension("rs") } } const INDENT: isize = 4; #[prepend_associated_functions_with(install_pretty_helpers!(self: Self))] // Note: the `const` wrapping makes my IDE and LSP happy. Otherwise, I don't get // autocompletion of methods in the impl block below. const _: () = { macro_rules! todo { ($($tt:tt)*) => { disambiguated_todo!($($tt)*) }; } macro_rules! line { ($($tt:tt)*) => { disambiguated_line!($($tt)*) }; } macro_rules! concat { ($($tt:tt)*) => { disambiguated_concat!($($tt)*) }; } macro_rules! sep { ($l:expr, $it:expr, $r:expr, $sep:expr$(,)?) => { docs![ intersperse!($it, docs![$sep, line!()]), docs![","].flat_alt(nil!()) ] .enclose(line_!(), line_!()) .nest(INDENT) .enclose($l, $r) .group() }; ($l:expr, $it:expr, $r:expr$(,)?) => { sep!($l, $it, $r, ",") }; } macro_rules! print_tuple { ($into_docs:ident) => {{ let mut docs: Vec<_> = $into_docs.iter().map(|typ| docs![typ]).collect(); if docs.len() == 1 { docs.push(nil![]) } sep!("(", docs, ")") }}; } macro_rules! sep_opt { (@$l:expr, $it:expr, $($rest:tt)*) => { { let mut it = $it.into_iter().peekable(); if it.peek().is_some() { sep!($l, it, $($rest)*) } else { nil!() } } }; ($l:expr, $it:expr, $($rest:tt)*) => { sep_opt!(@$l, $it, $($rest)*) }; } macro_rules! block { ($body:expr) => { docs![line!(), $body, line!()].group().nest(INDENT).braces() }; } impl<'a, 'b> RustPrinter { fn generic_params(&'a self, generic_params: &'b [GenericParam]) -> DocBuilder { let generic_params = generic_params .iter() .filter(|p| !matches!(&p.kind, GenericParamKind::Lifetime if p.ident.0.to_string() == "_")) .collect::>(); sep_opt!("<", generic_params, ">") } fn where_clause(&'a self, constraints: &'b [GenericConstraint]) -> DocBuilder { if constraints.is_empty() { return nil!(); } docs![ line!(), "where", line!(), intersperse!(constraints, docs![",", line!()]) .nest(INDENT) .group(), line!(), ] .nest(INDENT) .group() } fn attributes(&'a self, attrs: &'b [Attribute]) -> DocBuilder { concat!( attrs .iter() .filter(|attr| match &attr.kind { AttributeKind::Tool { .. } | AttributeKind::Hax(_) => false, AttributeKind::DocComment { .. } => true, }) .map(|attr| docs![attr, hardline!()]) ) } fn id_name(&'a self, id: GlobalId) -> DocBuilder { let view = id.view(); let path = ::render_strings(self, &view); let name = path.last().unwrap().clone(); docs![if name == "_" { "___empty_name".into() } else { name }] } } impl PrettyAst for RustPrinter { const NAME: &'static str = "Rust"; fn module(&self, module: &Module) -> DocBuilder { let previous = self.current_namespace.borrow().clone(); let view = module.ident.view(); let module_path = ::module(self, &view); *self.current_namespace.borrow_mut() = Some(module_path); let doc = intersperse!(&module.items, docs![hardline!(), hardline!()]); *self.current_namespace.borrow_mut() = previous; doc } fn safety_kind(&self, safety_kind: &SafetyKind) -> DocBuilder { match safety_kind { SafetyKind::Safe => nil!(), SafetyKind::Unsafe => docs![text!("unsafe"), space!()], } } fn param(&self, param: &Param) -> DocBuilder { docs![¶m.pat, ":", space!(), ¶m.ty] } fn binding_mode(&self, binding_mode: &BindingMode) -> DocBuilder { match binding_mode { BindingMode::ByRef(BorrowKind::Mut) => docs!["ref mut", space!()], BindingMode::ByRef(_) => docs!["ref", space!()], _ => nil!(), } } fn pat(&self, pat: &Pat) -> DocBuilder { match &*pat.kind { PatKind::Wild => docs!["_"], PatKind::Ascription { pat, ty } => docs![pat, ":", space!(), ty], PatKind::Or { sub_pats } => { intersperse!(sub_pats, docs![line!(), "|", line!()]) } PatKind::Array { args } => sep!("[", args, "]", "|"), PatKind::Deref { sub_pat } => docs!["&", sub_pat], PatKind::Constant { lit } => docs![lit], PatKind::Binding { mutable, var, mode, sub_pat, } => { docs![ if *mutable { docs!["mut", space!()] } else { nil!() }, mode, var, sub_pat.as_ref().map(|pat| docs!["@", docs![pat]]), ] } PatKind::Construct { .. } => todo!("resugaring"), PatKind::Resugared(resugared_pat_kind) => docs![resugared_pat_kind], PatKind::Error(_) => todo!("resugaring"), } } fn primitive_ty(&self, primitive_ty: &PrimitiveTy) -> DocBuilder { match primitive_ty { PrimitiveTy::Bool => docs!["bool"], PrimitiveTy::Int(int_kind) => docs![int_kind], PrimitiveTy::Float(float_kind) => docs![float_kind], PrimitiveTy::Char => docs!["char"], PrimitiveTy::Str => docs!["str"], } } fn int_kind(&self, int_kind: &IntKind) -> DocBuilder { docs![match (&int_kind.signedness, &int_kind.size) { (Signedness::Signed, IntSize::S8) => "i8", (Signedness::Signed, IntSize::S16) => "i16", (Signedness::Signed, IntSize::S32) => "i32", (Signedness::Signed, IntSize::S64) => "i64", (Signedness::Signed, IntSize::S128) => "i128", (Signedness::Signed, IntSize::SSize) => "isize", (Signedness::Unsigned, IntSize::S8) => "u8", (Signedness::Unsigned, IntSize::S16) => "u16", (Signedness::Unsigned, IntSize::S32) => "u32", (Signedness::Unsigned, IntSize::S64) => "u64", (Signedness::Unsigned, IntSize::S128) => "u128", (Signedness::Unsigned, IntSize::SSize) => "usize", }] } fn generic_param(&self, generic_param: &GenericParam) -> DocBuilder { docs![ match &generic_param.kind { GenericParamKind::Const { .. } => docs!["const", space!()], _ => nil!(), }, &generic_param.ident, match &generic_param.kind { GenericParamKind::Const { ty } => docs![":", space!(), ty], _ => nil!(), } ] } fn generic_constraint(&self, generic_constraint: &GenericConstraint) -> DocBuilder { match generic_constraint { GenericConstraint::Lifetime(s) => docs![s.clone()], GenericConstraint::TypeClass(impl_ident) => docs![impl_ident], GenericConstraint::Equality(projection_predicate) => docs![projection_predicate], } } fn impl_ident(&self, impl_ident: &ImplIdent) -> DocBuilder { let trait_goal = &impl_ident.goal; let [self_ty, args @ ..] = &trait_goal.args[..] else { panic!() }; docs![ self_ty, space!(), ":", space!(), &trait_goal.trait_, sep_opt!("<", args, ">"), ] } fn ty(&self, ty: &Ty) -> DocBuilder { match ty.kind() { TyKind::Primitive(primitive_ty) => docs![primitive_ty], // TyKind::Tuple(items) => intersperse!(items, docs![",", line!()]) // .nest(INDENT) // .group(), TyKind::App { head, args } => docs![head, sep_opt!("<", args, ">")], TyKind::Arrow { inputs, output } => { docs!["fn", sep!("(", inputs, ")"), reflow!(" -> "), output] } TyKind::Ref { inner, mutable, region: _, } => docs![ "&", if *mutable { docs!["mut", space!()] } else { nil!() }, inner ], TyKind::Param(local_id) => docs![local_id], TyKind::Slice(ty) => docs![ty].brackets(), TyKind::Array { ty, length } => { docs![ty, ";", space!(), length.as_ref()].brackets() } TyKind::RawPointer => todo!(), TyKind::AssociatedType { impl_, item } => docs![impl_, "::", item], TyKind::Opaque(global_id) => docs![global_id], TyKind::Dyn(dyn_trait_goals) => docs![ "dyn", docs![ line!(), intersperse!(dyn_trait_goals, docs![line!(), "+", space!()]) ] .group() .hang(0) ], TyKind::Resugared(resugared_ty_kind) => docs![resugared_ty_kind], TyKind::Error(_) => todo!("resugaring"), } } fn resugared_ty_kind(&self, resugared_ty_kind: &ResugaredTyKind) -> DocBuilder { match resugared_ty_kind { ResugaredTyKind::Tuple(types) => print_tuple!(types), } } fn literal(&self, literal: &Literal) -> DocBuilder { match literal { Literal::String(symbol) => docs![symbol], Literal::Char(ch) => text!(format!("{}", ch)), Literal::Bool(b) => text!(format!("{}", b)), Literal::Int { value, negative, kind, } => docs![if *negative { docs!["-"] } else { nil!() }, value, kind], Literal::Float { value, negative, kind, } => docs![if *negative { docs!["-"] } else { nil!() }, value, kind], } } fn trait_goal(&self, trait_goal: &TraitGoal) -> DocBuilder { let [self_ty, args @ ..] = &trait_goal.args[..] else { panic!() }; docs![ self_ty, space!(), "as", space!(), &trait_goal.trait_, sep_opt!("<", args, ">"), ] .enclose("<", ">") } fn generic_value(&self, generic_value: &GenericValue) -> DocBuilder { match generic_value { GenericValue::Ty(ty) => docs![ty], GenericValue::Expr(expr) => docs![expr], GenericValue::Lifetime => docs!["'_"], } } fn arm(&self, arm: &Arm) -> DocBuilder { docs![ &arm.pat, arm.guard.as_ref().map(|guard| docs!["if", space!(), guard]), reflow!(" => "), block![&arm.body], ] } fn expr(&self, expr: &Expr) -> DocBuilder { match &*expr.kind { ExprKind::If { condition, then, else_, } => docs![ "if", space!(), docs![condition].parens(), space!(), block![then], else_ .as_ref() .map(|doc| docs![reflow!(" else "), block![doc]]) .unwrap_or(nil!()) ], ExprKind::App { head, args, generic_args, bounds_impls: _, // this is implicit in Rust trait_, } => { mod names { pub use crate::names::rust_primitives::hax::{ cast_op, deref_op, logical_op_and, logical_op_or, }; } use ExprKind::GlobalId; match (&*head.kind, &args[..]) { (GlobalId(names::deref_op), [reference]) => { Some(docs!["*", docs![reference].parens()]) } (GlobalId(names::cast_op), [value]) => { Some(docs![docs![value].parens(), reflow!(" as "), &expr.ty]) } (GlobalId(names::logical_op_and), [lhs, rhs]) => Some(docs![ docs![lhs].parens(), reflow!(" && "), docs![rhs].parens() ]), (GlobalId(names::logical_op_or), [lhs, rhs]) => Some(docs![ docs![lhs].parens(), reflow!(" || "), docs![rhs].parens() ]), _ => None, } .unwrap_or_else(|| match (trait_, &*head.kind) { (Some((trait_impl_expr, _trait_args)), GlobalId(head)) => { docs![ &trait_impl_expr.goal, "::", self.id_name(*head), sep_opt!("::<", generic_args, ">"), sep!("(", args, ")") ] } _ => docs![ head, sep_opt!("::<", generic_args, ">"), sep!("(", args, ")") ], }) } ExprKind::Literal(literal) => docs![literal], ExprKind::Array(exprs) => sep!("[", exprs, "]"), ExprKind::Construct { constructor, is_record, fields, // TODO: complete constructors with base .. } => { let payload = fields.iter().map(|(id, value)| { docs![ if *is_record { docs![id, ":", space!()] } else { nil!() }, value ] }); docs![ constructor, if *is_record { sep!("{", payload, "}") } else { sep!("(", payload, ")") } ] } ExprKind::Match { scrutinee, arms } => { docs![ "match", space!(), scrutinee, space!(), block!(intersperse!(arms, hardline!())), ] } ExprKind::Borrow { mutable, inner } => { docs!["&", if *mutable { reflow!["mut "] } else { nil!() }, inner] } ExprKind::AddressOf { mutable, inner } => docs![ inner, reflow!(" as *"), if *mutable { reflow!["mut "] } else { nil!() }, docs![&expr.ty] ] .parens(), ExprKind::Let { lhs, rhs, body } => docs![ "let", space!(), lhs, space!(), "=", docs![line!(), rhs].group().nest(INDENT), ";", hardline!(), body ], ExprKind::GlobalId(global_id) => docs![global_id], ExprKind::LocalId(local_id) => docs![local_id], ExprKind::Ascription { e, ty } => docs![e, ":", space!(), ty].parens(), ExprKind::Assign { lhs, value } => docs![lhs, space!(), "=", space!(), value], ExprKind::Loop { body, kind, state: None, control_flow: None, label: None, } => match &**kind { LoopKind::UnconditionalLoop => docs!["loop", space!(), block![body]], LoopKind::WhileLoop { condition } => { docs!["while", space!(), condition, space!(), block![body]] } LoopKind::ForLoop { pat, iterator } => { docs![ "for", space!(), pat, reflow!(" in "), iterator, space!(), block![body] ] } LoopKind::ForIndexLoop { start, end, var, var_ty: _, } => docs![ "for", space!(), var, reflow!(" in "), start, "..", end, space!(), block![body] ], }, ExprKind::Loop { .. } => { todo!("loop with explicit state or with a label") } ExprKind::Break { value, label: None, .. } => docs!["break", space!(), value], ExprKind::Break { .. } => todo!("break with a label"), ExprKind::Return { value } => docs!["return", space!(), value], ExprKind::Continue { label: None, .. } => docs!["continue"], ExprKind::Continue { .. } => todo!("continue with a label"), ExprKind::Closure { params, body, captures: _, } => docs![ intersperse!(params, docs![",", space!()]).enclose("|", "|"), body ], ExprKind::Block { body, safety_mode } => { docs![safety_mode, block![body]] } ExprKind::Quote { contents } => docs![contents], ExprKind::Resugared(resugared_expr_kind) => docs![resugared_expr_kind], ExprKind::Error { .. } => todo!("resugaring"), } } fn resugared_expr_kind(&self, resugared_expr_kind: &ResugaredExprKind) -> DocBuilder { match resugared_expr_kind { ResugaredExprKind::Tuple(values) => print_tuple!(values), ResugaredExprKind::LetPure { .. } => unreachable!("LetPure resugaring not active"), } } fn lhs(&self, lhs: &Lhs) -> DocBuilder { match lhs { Lhs::LocalVar { var, ty: _ } => docs![var], Lhs::VecRef { e, .. } => docs![e], Lhs::ArbitraryExpr(expr) => docs![std::ops::Deref::deref(expr)], Lhs::FieldAccessor { e, ty: _, field } => { docs![std::ops::Deref::deref(e), ".", field] } Lhs::ArrayAccessor { e, ty: _, index } => { docs![std::ops::Deref::deref(e), docs!(index).brackets()] } } } fn global_id(&self, global_id: &GlobalId) -> DocBuilder { let view = global_id.view(); let module = ::module(self, &view); if Some(module) == *self.current_namespace.borrow() { let rendered = self.render(&view); docs![rendered.path.join("::")] } else { docs![self.render_string(&view)] } } fn variant(&self, variant: &Variant) -> DocBuilder { let payload = variant.arguments.iter().map(|(id, ty, attrs)| { docs![ self.attributes(attrs), if variant.is_record { docs![id, ":", space!()] } else { nil!() }, ty ] }); if variant.is_record { sep!("{", payload, "}") } else { sep!("(", payload, ")") } } fn item(&self, item: &Item) -> DocBuilder { docs![&item.meta, item.kind()] } fn resugared_item_kind(&self, resugared_item_kind: &ResugaredItemKind) -> DocBuilder { match resugared_item_kind { ResugaredItemKind::Constant { name, body, .. } => { docs![ "const", space!(), self.id_name(*name), ":", space!(), &body.ty, reflow!(" = "), docs![body].braces(), ";" ] } ResugaredItemKind::RecursiveFn { .. } => { unreachable!("The Rust backend does not use the RecursiveFn resugaring") } } } fn item_kind(&self, item_kind: &ItemKind) -> DocBuilder { match item_kind { ItemKind::Fn { name, generics, body, params, safety, } => { docs![ safety, text!("fn"), space!(), self.id_name(*name), self.generic_params(&generics.params), sep!("(", params, ")"), reflow!(" -> "), &body.ty, space!(), self.where_clause(&generics.constraints), block![body] ] } ItemKind::TyAlias { name, generics: _, ty, } => docs!["type", space!(), name, space!(), "=", space!(), ty, ";"], ItemKind::Type { name, generics, variants, is_struct, } => match &variants[..] { [variant] if *is_struct => { docs![ "struct", space!(), self.id_name(*name), self.generic_params(&generics.params), variant, if variant.is_record { nil!() } else { docs![";"] } ] } _ => { docs![ "enum", space!(), self.id_name(*name), self.generic_params(&generics.params), sep!( "{", variants.iter().map(|variant| docs![ &variant.name, space!(), variant ]), "}", ), self.where_clause(&generics.constraints), ] } }, ItemKind::Trait { name, generics, items, safety: _, } => docs![ "trait", space!(), self.id_name(*name), self.generic_params(&generics.params), self.where_clause(&generics.constraints), sep!("{", items, "}", nil!()), ], ItemKind::Impl { generics, self_ty, of_trait: (trait_, trait_args), items, parent_bounds: _, } => docs![ "impl", self.generic_params(&generics.params), space!(), trait_, sep_opt!("<", trait_args[1..], ">"), space!(), "for", space!(), self_ty, self.where_clause(&generics.constraints), sep!("{", items, "}", nil!()), ], ItemKind::Alias { name, item } => { docs!["type", self.id_name(*name), reflow!(" = "), item, ";"] } ItemKind::RustModule | ItemKind::Use { .. } => nil!(), ItemKind::Quote { quote, .. } => docs![quote], ItemKind::Error { .. } => todo!("resugaring"), ItemKind::Resugared(resugared_item_kind) => docs![resugared_item_kind], ItemKind::NotImplementedYet => docs!["/* `NotImplementedYet` item */"], } } fn impl_item(&self, impl_item: &ImplItem) -> DocBuilder { match &impl_item.kind { ImplItemKind::Type { ty, parent_bounds: _, } => docs![ &impl_item.meta, reflow!("type "), self.id_name(impl_item.ident), reflow!(" = "), ty, ";" ], ImplItemKind::Fn { body, params } => docs![ &impl_item.meta, text!("fn"), space!(), self.id_name(impl_item.ident), self.generic_params(&impl_item.generics.params), sep!("(", params, ")"), reflow!(" -> "), &body.ty, space!(), self.where_clause(&impl_item.generics.constraints), docs![line_!(), body, line_!(),].nest(INDENT).braces() ], ImplItemKind::Resugared(_resugared_impl_item_kind) => todo!(), ImplItemKind::Error(_) => todo!(), } } fn metadata(&self, metadata: &Metadata) -> DocBuilder { self.attributes(&metadata.attributes) } fn attribute(&self, attribute: &Attribute) -> DocBuilder { match &attribute.kind { AttributeKind::Tool { .. } | AttributeKind::Hax(_) => nil!(), AttributeKind::DocComment { kind, body } => match kind { DocCommentKind::Line => { intersperse!( body.lines().map(|line| docs![format!("/// {line}")]), hardline!() ) } DocCommentKind::Block => { docs![ "/**", intersperse!(body.lines().map(|line| line.to_string()), hardline!()), "*/" ] } }, } } } }; ================================================ FILE: rust-engine/src/backends.rs ================================================ //! Code generation backends. //! //! A backend is consititued of: //! - a list of AST transformations to apply, those are called phases. //! - and a printer. //! //! This top-level module is mostly an index of available backends and a //! small prelude to make backend modules concise. //! //! # Adding a new backend //! 1. Create a submodule under `src/backends/`, e.g. `foo.rs`. //! 2. Put your printer and backend there. //! 3. Re-export it here with `pub mod foo;`. //! //! See [`rust`] for an example implementation. pub mod fstar; pub mod lean; pub mod rust; use std::{collections::HashMap, rc::Rc}; use crate::{ ast::{Item, Metadata, Module, span::Span}, attributes::LinkedItemGraph, phase::legacy::group_consecutive_ocaml_phases, printer::{HasLinkedItemGraph, Print, Printer}, }; use camino::Utf8PathBuf; use hax_types::engine_api::File; /// A hax backend. /// /// A backend is responsible for turning the hax AST into sources of a target language. /// It combines: /// - a sequence of AST transformation phases, and /// - a printer that generates textual output. /// /// For example, we have F\*, Coq, and Lean backends. /// Some are still in the old OCaml engine. pub trait Backend { /// The printer type used by this backend. type Printer: Printer; /// Construct a new printer instance. /// /// By default this calls `Default::default` on the printer type. fn printer(&self, linked_item_graph: Rc) -> Self::Printer { Self::Printer::default().with_linked_item_graph(linked_item_graph) } /// A short name identifying the backend. /// /// By default, this is delegated to the associated printer's [`Printer::NAME`]. const NAME: &'static str = Self::Printer::NAME; /// The AST phases to apply before printing. /// /// Backends can override this to add transformations. /// The default is an empty list (no transformations). fn phases(&self) -> Vec { vec![] } /// A list of resugaring phases. fn resugaring_phases() -> Vec> { vec![] } /// Group a flat list of items into modules. fn items_to_module(&self, items: Vec) -> Vec { let mut modules: HashMap<_, Vec<_>> = HashMap::new(); for item in items { let module_ident = item.ident.mod_only_closest_parent(); modules.entry(module_ident).or_default().push(item); } modules .into_iter() .map(|(ident, items)| Module { ident, items, meta: Metadata { span: Span::dummy(), attributes: vec![], }, }) .collect() } /// Print a list of modules into files fn modules_to_files(&self, modules: Vec, mut printer: Self::Printer) -> Vec { modules .into_iter() .map(|module: Module| { let path = self.module_path(&module).into_string(); let (contents, _) = printer.print(module); File { path, contents, sourcemap: None, } }) .collect() } /// Compute the relative filesystem path where a given module should be written. fn module_path(&self, module: &Module) -> Utf8PathBuf; } /// A backend can be interpreted as a phase impl crate::phase::Phase for B { fn apply(&self, items: &mut Vec) { for phase in group_consecutive_ocaml_phases(self.phases()) { phase.apply(items); } } } /// Apply a backend to a collection of AST items, producing output files. /// /// This runs all of the backend's [`Backend::phases`], groups the items into /// modules via [`Backend::items_to_module`], and then uses the backend's printer /// to generate source files with paths determined by [`Backend::module_path`]. pub fn apply_backend(backend: B, mut items: Vec) -> Vec { crate::phase::Phase::apply(&backend, &mut items); for mut resugaring_phase in B::resugaring_phases() { for item in &mut items { resugaring_phase.visit(item) } } let linked_items_graph = Rc::new(LinkedItemGraph::new( &items, prelude::diagnostics::Context::Printer(B::NAME.into()), )); /// Drop any item marked with a hax attribute whose payload deserializes to /// `AttrPayload::ItemStatus(ItemStatus::Included { late_skip: true })`. /// /// Items with such a "late-skip" attribute are typically generated by hax /// attributes. fn drop_skip_late_items(items: &mut Vec) { items.retain_mut(|item| { use hax_lib_macros_types::{AttrPayload, ItemStatus}; !item.meta.hax_attributes().any(|attr| { matches!( attr, AttrPayload::ItemStatus(ItemStatus::Included { late_skip: true }) ) }) }); } drop_skip_late_items(&mut items); let modules = backend.items_to_module(items); let printer = backend.printer(linked_items_graph.clone()); backend.modules_to_files(modules, printer) } mod prelude { //! Small "bring-into-scope" set used by backend modules. //! //! Importing this prelude saves repetitive `use` lists in per-backend //! modules without forcing these names on downstream users. pub use super::Backend; pub use crate::ast::{identifiers::global_id::view::AnyKind, literals::*, resugared::*, *}; pub use crate::printer::{ pretty_ast::{DocBuilder, PrettyAst, ToDocument, install_pretty_helpers}, render_view::*, *, }; pub use crate::resugarings::*; pub use crate::symbol::Symbol; pub use hax_rust_engine_macros::{prepend_associated_functions_with, setup_printer_struct}; } ================================================ FILE: rust-engine/src/debugger.rs ================================================ //! An interactive debugger server for the rust engine. use crate::ast::span::Span; use crate::ast::*; use crate::phase::Phase as _; use crate::phase::PhaseKind; use crate::printer::SourceMap; macro_rules! declare_printers { {$($name:ident = $printer:expr),*$(,)?} => { /// Enumeration of all declared printers. #[derive(Clone, Debug, Copy, serde::Serialize, serde::Deserialize)] pub enum Printer { $($name,)* /// The printer of a backend Backend(Backend), } impl Printer { fn print_items(self, items: Vec) -> (String, SourceMap) { let module = Module { ident: crate::names::rust_primitives::hax, items, meta: Metadata { span: Span::dummy(), attributes: vec![], }, }; match self { $(Self::$name => { $printer.print(module) }),* Self::Backend(backend) => backend.print_module(module), } } } }; } macro_rules! declare_backends { {$($name:ident = $backend:expr),*$(,)?} => { /// Enumeration of all declared backends. #[derive(Clone, Debug, Copy, serde::Serialize, serde::Deserialize)] pub enum Backend { $( #[doc = concat!("The ", stringify!($name), " backend.")] $name, )* } impl Backend { fn phases(self) -> Vec { use crate::backends::Backend; match self { $( Self::$name => $backend.phases(), )* } } fn print_module(self, module: Module) -> (String, SourceMap) { use crate::backends::Backend; use crate::printer::Print; let item_graph = crate::attributes::LinkedItemGraph::new(&module.items, crate::ast::diagnostics::Context::Debugger) ; match self { $( Self::$name => $backend.printer(std::rc::Rc::new(item_graph)).print(module), )* } } } }; } declare_backends! { Lean = crate::backends::lean::LeanBackend, } declare_printers! {} /// A request to send to the debugger. #[derive(Clone, Debug, serde::Serialize, serde::Deserialize)] pub enum Request { /// Apply a given phase to the current items. ApplyPhase(PhaseKind), /// List the phases applied by a backend. ListPhases(Backend), /// Print the items with a given printer. Print(Printer), /// Dump the AST of the current items. DumpAst(DumpAstOptions), } /// Options one can set when dumping ASTs. #[derive(Clone, Debug, serde::Serialize, serde::Deserialize)] pub struct DumpAstOptions { /// Sort the items via their global id. The order is not alphabetical, it is just deterministic. pub sort_items_by_global_id: bool, /// Drop `Use` items. pub drop_use_items: bool, /// Drop `RustModule` items. pub drop_rust_modules_items: bool, /// Drop `NotImplementedYet` items. pub drop_not_implemented_yet_items: bool, /// Drop every attributes in the AST. pub drop_attributes: bool, /// Erases spans, replacing them by `"erased"`. /// Setting this to true will return untyped JSON. pub erase_spans: bool, /// Erases indices (e.g. local variable indices). /// Setting this to true will return untyped JSON. pub erase_indices: bool, } impl Default for DumpAstOptions { fn default() -> Self { Self { sort_items_by_global_id: true, drop_use_items: true, drop_rust_modules_items: true, drop_not_implemented_yet_items: true, drop_attributes: true, erase_spans: true, erase_indices: true, } } } /// Response given by the debugger. #[derive(Clone, Debug, serde::Serialize, serde::Deserialize)] pub enum Response { /// Response for `Request::ApplyPhase`: a phase have been applied. PhaseApplied(PhaseKind), /// Response for `Request::ListPhase`: list the phases for a backend. ListedPhases(Vec), /// Response for `Request::Print`: items have been printed. Printed { /// The rendered printed items. rendered: String, /// The sourcemap. source_map: SourceMap, }, /// One of the possible response for `Request::DumpAst`. A AST was dumped as a typed JSON. TypedDumpedAst(Vec), /// One of the possible response for `Request::DumpAst`. A AST was dumped as an untyped JSON. DumpedAst(serde_json::Value), /// An error occured. Error(String), } /// The state against which the debugger is working. pub struct State { /// An immutable vector of items. pub initial_items: Vec, /// A sequence of requests. pub requests: Vec, } impl State { /// Compute the items at the current state fn items(&self) -> Vec { let mut items = self.initial_items.clone(); let phases = self.requests.iter().flat_map(|msg| match msg { Request::ApplyPhase(phase) => Some(*phase), _ => None, }); for phase in phases { phase.apply(&mut items); } items } /// Apply the request on a state. pub fn apply(&mut self, req: Request) -> Response { let mut items = self.items(); match req { Request::ApplyPhase(phase) => { phase.apply(&mut items); Response::PhaseApplied(phase) } Request::Print(printer) => { let (rendered, source_map) = printer.print_items(items); Response::Printed { rendered, source_map, } } Request::DumpAst(options) => { let mut items: Vec<_> = items .into_iter() .filter(|it| { let drop = match &it.kind { ItemKind::Use { .. } => options.drop_use_items, ItemKind::RustModule => options.drop_rust_modules_items, ItemKind::NotImplementedYet => options.drop_not_implemented_yet_items, _ => false, }; !drop }) .collect(); if options.sort_items_by_global_id { items.sort_by_key(|item| serde_json::to_string_pretty(&item.ident).ok()); } if options.drop_attributes { struct DropAttributes; use crate::ast::visitors::AstVisitorMut; impl AstVisitorMut for DropAttributes { fn visit_metadata(&mut self, x: &mut Metadata) { x.attributes = vec![]; } fn visit_param(&mut self, x: &mut Param) { x.attributes = vec![]; } fn visit_variant(&mut self, x: &mut Variant) { x.attributes = vec![]; } } DropAttributes.visit(&mut items); } if options.erase_indices || options.erase_spans { let mut items = match serde_json::to_value(items) { Ok(value) => value, Err(err) => return Response::Error(err.to_string()), }; use serde_json::Value; fn visit_json(value: &mut Value, f: &F) where F: Fn(&mut Value), { f(value); match value { Value::Array(arr) => { for v in arr { visit_json(v, f); } } Value::Object(map) => { for (_k, v) in map { visit_json(v, f); } } Value::Null | Value::Bool(_) | Value::Number(_) | Value::String(_) => {} } } let erased = || Value::String("".to_string()); visit_json(&mut items, &|value| { let Value::Object(map) = value else { return }; if options.erase_indices { map.iter_mut() .filter(|(k, _)| matches!(k.as_str(), "id" | "index")) .for_each(|(_, value)| { *value = erased(); }); } if options.erase_spans && map.contains_key("data") && map.contains_key("owner_hint") && map.contains_key("id") { *value = erased(); } }); Response::DumpedAst(items) } else { Response::TypedDumpedAst(items) } } Request::ListPhases(backend) => Response::ListedPhases(backend.phases()), } } } /// Entrypoint for the interactive HTTP debugger. pub fn http_interactive_debugger(items: Vec) { use axum::{Json, Router, extract, routing::post}; use std::sync::Arc; async fn process( extract::State(items): extract::State>>, Json((messages, message)): Json<(Vec, Request)>, ) -> Json { let mut state = State { initial_items: items.to_vec(), requests: messages, }; Json(state.apply(message)) } async fn serve(items: Vec) { let state = Arc::new(items); let app = Router::new() .route("/process", post(process)) .with_state(state); let listener = tokio::net::TcpListener::bind("127.0.0.1:0").await.unwrap(); let addr: std::net::SocketAddr = listener.local_addr().unwrap(); eprintln!("Listening on http://{addr}"); axum::serve(listener, app).await.unwrap(); } let rt = tokio::runtime::Builder::new_current_thread() .enable_all() .build() .unwrap(); rt.block_on(serve(items)); } ================================================ FILE: rust-engine/src/hax_io.rs ================================================ //! This module helps communicating with `cargo-hax`. use hax_types::engine_api::protocol::FromEngine; use serde::Deserialize; use serde::de::DeserializeOwned; use std::io::{BufRead, BufReader, Stdin, stdin, stdout}; use std::sync::{LazyLock, Mutex}; use hax_frontend_exporter::id_table::WithTable; use hax_types::engine_api::{EngineOptions, protocol::ToEngine}; static STDIN: LazyLock>> = LazyLock::new(|| Mutex::new(BufReader::new(stdin()))); /// Reads a message of any type from stdin fn read() -> T { let mut stdin = STDIN.lock().unwrap(); let mut slice = Vec::new(); stdin .read_until(b'\n', &mut slice) .expect("No message left! Did the engine crash?"); let mut de = serde_json::Deserializer::from_slice(&slice); de.disable_recursion_limit(); T::deserialize(serde_stacker::Deserializer::new(&mut de)).unwrap_or_else(|err| { panic!( "Could not parse as a `{}` message! Error: {err}", std::any::type_name::() ) }) } /// Reads a `ToEngine` message from the engine pub fn read_to_engine_message() -> ToEngine { read() } /// Reads the engine input JSON payload. pub fn read_engine_input_message() -> WithTable { read() } /// Reads a table of `EngineOptions` pub fn read_query() -> WithTable { let mut stdin = STDIN.lock().unwrap(); let mut slice = Vec::new(); stdin .read_until(b'\n', &mut slice) .expect("No message left! Did the engine crash?"); let mut de = serde_json::Deserializer::from_slice(&slice); de.disable_recursion_limit(); WithTable::deserialize(serde_stacker::Deserializer::new(&mut de)) .expect("Could not parse as a table of EngineOptions!") } /// Writes a `ExtendedFromEngine` message pub fn write(message: &FromEngine) { use std::io::Write; let mut stdout = stdout(); serde_json::to_writer(&mut stdout, message).unwrap(); stdout.write_all(b"\n").unwrap(); stdout.flush().unwrap(); } ================================================ FILE: rust-engine/src/import_thir.rs ================================================ //! This modules allows to import the THIR AST produced by the frontend, and convert it to the engine's internal AST use crate::ast; use crate::ast::HasKind as _; use crate::ast::identifiers::global_id::ReservedSuffix; use crate::ast::identifiers::global_id::TupleId; use crate::symbol::Symbol; use hax_frontend_exporter as frontend; fn unsupported(msg: &str, issue_id: u32, span: ast::span::Span) -> ast::ErrorNode { let fragment = ast::fragment::Fragment::Unknown(msg.to_owned()); let diagnostic = ast::diagnostics::Diagnostic::new( fragment.clone(), ast::diagnostics::DiagnosticInfo { context: ast::diagnostics::Context::Import, span, kind: hax_types::diagnostics::Kind::Unimplemented { issue_id: Some(issue_id), details: Some(msg.to_owned()), }, }, ); ast::ErrorNode { fragment: Box::new(fragment), diagnostics: vec![diagnostic], } } fn assertion_failure(msg: &str, span: ast::span::Span) -> ast::ErrorNode { let fragment = ast::fragment::Fragment::Unknown(msg.to_owned()); let diagnostic = ast::diagnostics::Diagnostic::new( fragment.clone(), ast::diagnostics::DiagnosticInfo { context: ast::diagnostics::Context::Import, span, kind: hax_types::diagnostics::Kind::AssertionFailure { details: msg.to_owned(), }, }, ); ast::ErrorNode { fragment: Box::new(fragment), diagnostics: vec![diagnostic], } } struct Context { owner_hint: Option, } fn is_self_type_constraint(gc: &ast::GenericConstraint) -> bool { match gc { ast::GenericConstraint::TypeClass(ast::ImplIdent { goal, .. }) => goal .args .first() .and_then(ast::GenericValue::expect_ty) .map(|ty| matches!(ty.0.as_ref(), ast::TyKind::Param(local) if local.0 == Symbol::new("Self"))) .unwrap_or(false), _ => false, } } fn is_constraint_on_ty(gc: &ast::GenericConstraint, ty: &ast::Ty) -> bool { match gc { ast::GenericConstraint::TypeClass(ast::ImplIdent { goal, .. }) => goal .args .first() .and_then(ast::GenericValue::expect_ty) .map(|arg| arg == ty) .unwrap_or(false), _ => false, } } fn resugar_index_mut(expr: &ast::Expr) -> Option<(&ast::Expr, &ast::Expr)> { if let ast::ExprKind::App { head, args, .. } = expr.kind() && let ast::ExprKind::GlobalId(method) = head.kind() && let [lhs, index] = args.as_slice() { use crate::names::core::ops::index::*; match (*method, lhs.kind()) { (IndexMut::index_mut, ast::ExprKind::Borrow { inner: lhs, .. }) => Some((lhs, index)), (Index::index, _) => Some((lhs, index)), _ => None, } } else { None } } fn lhs_from_expr(expr: &ast::Expr) -> ast::Lhs { if let ast::ExprKind::LocalId(var) = expr.kind() { return ast::Lhs::LocalVar { var: var.clone(), ty: expr.ty.clone(), }; } let expr = expr.unbox_underef(); if let Some((e, index)) = resugar_index_mut(expr) { ast::Lhs::ArrayAccessor { e: Box::new(lhs_from_expr(e)), ty: expr.ty.clone(), index: index.clone(), } } else if let ast::ExprKind::App { head, args, .. } = expr.kind() && let [arg] = args.as_slice() && let ast::ExprKind::GlobalId(field) = head.kind() && field.is_projector() { ast::Lhs::FieldAccessor { e: Box::new(lhs_from_expr(arg)), ty: expr.ty.clone(), field: *field, } } else { ast::Lhs::ArbitraryExpr(Box::new(expr.clone())) } } trait SpannedImport { fn spanned_import(&self, context: &Context, span: ast::span::Span) -> Out; } trait Import { fn import(&self, context: &Context) -> Out; } impl, Out> Import> for Vec { fn import(&self, context: &Context) -> Vec { self.iter() .map(|value| Import::import(value, context)) .collect() } } impl, Out> SpannedImport> for Vec { fn spanned_import(&self, context: &Context, span: ast::span::Span) -> Vec { self.iter() .map(|value| value.spanned_import(context, span)) .collect() } } trait DefIdImportHelpers { fn import_as_value(&self) -> ast::GlobalId; fn import_as_nonvalue(&self) -> ast::GlobalId; } impl DefIdImportHelpers for frontend::DefId { fn import_as_value(&self) -> ast::GlobalId { ast::GlobalId::from_frontend(self.clone(), true) } fn import_as_nonvalue(&self) -> ast::GlobalId { ast::GlobalId::from_frontend(self.clone(), false) } } impl Import for frontend::Span { fn import(&self, context: &Context) -> ast::span::Span { ast::span::Span::from_exporter(self.clone(), context.owner_hint.as_ref()) } } fn import_attributes(context: &Context, attrs: &[frontend::Attribute]) -> ast::Attributes { attrs.iter().flat_map(|attr| attr.import(context)).collect() } fn has_automatically_derived(attrs: &ast::Attributes) -> bool { attrs.iter().any(|attr| { matches!( attr.kind, ast::AttributeKind::Tool { ref path, .. } if path == "automatically_derived" ) }) } impl Import> for frontend::Attribute { fn import(&self, context: &Context) -> Option { match self { frontend::Attribute::Parsed(frontend::AttributeKind::DocComment { kind, span, comment, .. }) => { let kind = match kind { frontend::CommentKind::Block => ast::DocCommentKind::Block, frontend::CommentKind::Line => ast::DocCommentKind::Line, }; Some(ast::Attribute { kind: ast::AttributeKind::DocComment { kind, body: comment.clone(), }, span: span.import(context), }) } frontend::Attribute::Parsed(frontend::AttributeKind::AutomaticallyDerived(span)) => { let kind = ast::AttributeKind::Tool { path: "automatically_derived".to_owned(), tokens: String::new(), }; Some(ast::Attribute { kind, span: span.import(context), }) } frontend::Attribute::Unparsed(frontend::AttrItem { path, args: frontend::AttrArgs::Eq { expr: frontend::MetaItemLit { symbol, .. }, .. }, span, }) if path == "doc" => { let kind = ast::AttributeKind::DocComment { kind: ast::DocCommentKind::Line, body: symbol.clone(), }; Some(ast::Attribute { kind, span: span.import(context), }) } frontend::Attribute::Unparsed(frontend::AttrItem { path, args, span }) => { let tokens = if let frontend::AttrArgs::Delimited(frontend::DelimArgs { tokens, .. }) = args { tokens.clone() } else { String::new() }; Some(ast::Attribute { kind: ast::AttributeKind::Tool { path: path.clone(), tokens, }, span: span.import(context), }) } _ => None, } } } impl Import for Vec { fn import(&self, context: &Context) -> ast::Attributes { self.iter() .filter_map(|value| value.import(context)) .collect() } } impl Import for frontend::GenericParamDef { fn import(&self, context: &Context) -> ast::GenericParam { let span = self.span.import(context); let frontend::GenericParamDef { name, kind, .. } = self; let kind = match kind { frontend::GenericParamDefKind::Lifetime => ast::GenericParamKind::Lifetime, frontend::GenericParamDefKind::Type { .. } => ast::GenericParamKind::Type, frontend::GenericParamDefKind::Const { ty, .. } => ast::GenericParamKind::Const { ty: ty.spanned_import(context, span), }, }; ast::GenericParam { ident: ast::LocalId(Symbol::new(name.clone())), meta: ast::Metadata { span, attributes: self.attributes.import(context), }, kind, } } } impl SpannedImport for frontend::GenericArg { fn spanned_import(&self, context: &Context, span: ast::span::Span) -> ast::GenericValue { match self { frontend::GenericArg::Lifetime(_) => ast::GenericValue::Lifetime, frontend::GenericArg::Type(ty) => { ast::GenericValue::Ty(ty.spanned_import(context, span)) } frontend::GenericArg::Const(decorated) => { ast::GenericValue::Expr(frontend::Expr::from(decorated.clone()).import(context)) } } } } impl Import> for frontend::TyGenerics { fn import(&self, context: &Context) -> Vec { self.params .iter() .map(|value| value.import(context)) .collect() } } impl SpannedImport> for Vec { fn spanned_import( &self, context: &Context, span: ast::span::Span, ) -> Vec<(ast::ImplExpr, ast::ImplIdent)> { let impl_exprs: Vec = self.spanned_import(context, span); impl_exprs .into_iter() .enumerate() .map(|(i, ie)| { let impl_ident = ast::ImplIdent { goal: ie.goal.clone(), name: impl_expr_name(i as u64), }; (ie, impl_ident) }) .collect() } } impl SpannedImport> for frontend::Clause { fn spanned_import( &self, context: &Context, span: ast::span::Span, ) -> Option { match &self.kind.value { frontend::ClauseKind::Trait(trait_predicate) => { let args = trait_predicate .trait_ref .generic_args .spanned_import(context, span); let trait_ = trait_predicate.trait_ref.def_id.import_as_nonvalue(); let goal = ast::TraitGoal { trait_, args }; Some(ast::GenericConstraint::TypeClass(ast::ImplIdent { goal, name: impl_expr_name(self.id.0), })) } frontend::ClauseKind::Projection(frontend::ProjectionPredicate { impl_expr, assoc_item, ty, }) => { let impl_ = impl_expr.spanned_import(context, span); let assoc_item = assoc_item.def_id.import_as_nonvalue(); let ty = ty.spanned_import(context, span); Some(ast::GenericConstraint::Equality(ast::ProjectionPredicate { impl_, assoc_item, ty, })) } _ => None, } } } impl Import> for frontend::GenericPredicates { fn import(&self, context: &Context) -> Vec { let mut type_idx: u64 = 0; self.predicates .iter() .filter_map(|(clause, span)| { let span = span.import(context); let mut gc = clause.spanned_import(context, span)?; if let ast::GenericConstraint::TypeClass(impl_ident) = &mut gc { impl_ident.name = impl_expr_name(type_idx); type_idx += 1; } Some(gc) }) .collect() } } impl Import for frontend::ParamEnv { fn import(&self, context: &Context) -> ast::Generics { ast::Generics { params: self.generics.import(context), constraints: self.predicates.import(context), } } } impl Import for frontend::Safety { fn import(&self, _context: &Context) -> ast::SafetyKind { match self { frontend::Safety::Unsafe => ast::SafetyKind::Unsafe, frontend::Safety::Safe => ast::SafetyKind::Safe, } } } fn import_fn_sig( context: &Context, fn_sig: &frontend::TyFnSig, span: ast::span::Span, ) -> ast::TyKind { let inputs = if fn_sig.inputs.is_empty() { vec![ast::TyKind::unit().promote()] } else { fn_sig .inputs .iter() .map(|t| t.spanned_import(context, span)) .collect() }; ast::TyKind::Arrow { inputs, output: fn_sig.output.spanned_import(context, span), } } impl SpannedImport for frontend::Ty { fn spanned_import(&self, context: &Context, span: ast::span::Span) -> ast::Ty { let kind = match self.kind() { frontend::TyKind::Bool => ast::TyKind::Primitive(ast::PrimitiveTy::Bool), frontend::TyKind::Char => ast::TyKind::Primitive(ast::PrimitiveTy::Char), frontend::TyKind::Int(int_ty) => { ast::TyKind::Primitive(ast::PrimitiveTy::Int(int_ty.into())) } frontend::TyKind::Uint(uint_ty) => { ast::TyKind::Primitive(ast::PrimitiveTy::Int(uint_ty.into())) } frontend::TyKind::Float(float_ty) => { ast::TyKind::Primitive(ast::PrimitiveTy::Float(float_ty.into())) } frontend::TyKind::FnDef { fn_sig, .. } | frontend::TyKind::Arrow(fn_sig) => { import_fn_sig(context, &fn_sig.as_ref().value, span) } frontend::TyKind::Closure(frontend::ClosureArgs { fn_sig, .. }) => { import_fn_sig(context, &fn_sig.value, span) } frontend::TyKind::Adt(item_ref) => { let head = item_ref.def_id.import_as_nonvalue(); let args = item_ref.generic_args.spanned_import(context, span); ast::TyKind::App { head, args } } frontend::TyKind::Foreign(..) => { ast::TyKind::Error(unsupported("Foreign type", 928, span)) } frontend::TyKind::Str => ast::TyKind::Primitive(ast::PrimitiveTy::Str), frontend::TyKind::Array(item_ref) => { if let [ frontend::GenericArg::Type(ty), frontend::GenericArg::Const(length), ] = &item_ref.generic_args[..] { ast::TyKind::Array { ty: ty.spanned_import(context, span), length: Box::new(length.import(context)), } } else { ast::TyKind::Error(assertion_failure( "Wrong generics for array: expected a type and a constant. See synthetic_items in hax frontend.", span, )) } } frontend::TyKind::Slice(ty) => { if let [frontend::GenericArg::Type(ty)] = &ty.generic_args[..] { ast::TyKind::Slice(ty.spanned_import(context, span)) } else { ast::TyKind::Error(assertion_failure( "Wrong generics for slice: expected a type. See synthetic_items in hax frontend.", span, )) } } frontend::TyKind::RawPtr(..) => ast::TyKind::RawPointer, frontend::TyKind::Ref(_region, ty, mutable) => ast::TyKind::Ref { inner: ty.as_ref().spanned_import(context, span), mutable: *mutable, region: ast::Region, }, frontend::TyKind::Dynamic(_, generic_predicates, _region) => { let goals = generic_predicates .predicates .iter() .map(|(clause, _span)| match &clause.kind.value { frontend::ClauseKind::Trait(frontend::TraitPredicate { trait_ref, .. }) => Ok(ast::DynTraitGoal { trait_: trait_ref.def_id.import_as_nonvalue(), non_self_args: trait_ref.generic_args.spanned_import(context, span) [1..] .to_vec(), }), _ => Err(assertion_failure("type Dyn with non trait predicate", span)), }) .collect::, _>>(); match goals { Ok(goals) => ast::TyKind::Dyn(goals), Err(e) => ast::TyKind::Error(e), } } frontend::TyKind::Coroutine(_) => { ast::TyKind::Error(unsupported("Coroutine type", 924, span)) } frontend::TyKind::Never => ast::TyKind::App { head: crate::names::rust_primitives::hax::Never, args: Vec::new(), }, frontend::TyKind::Tuple(items) => { let args = items.generic_args.spanned_import(context, span); ast::TyKind::tuple(args) } frontend::TyKind::Alias(frontend::Alias { kind: frontend::AliasKind::Projection { impl_expr, .. }, def_id, .. }) => ast::TyKind::AssociatedType { impl_: impl_expr.spanned_import(context, span), item: def_id.import_as_nonvalue(), }, frontend::TyKind::Alias(frontend::Alias { kind: frontend::AliasKind::Opaque { .. }, def_id, .. }) => ast::TyKind::Opaque(def_id.import_as_nonvalue()), frontend::TyKind::Alias(frontend::Alias { kind: frontend::AliasKind::Inherent, .. }) => ast::TyKind::Error(assertion_failure( "Ty::Alias with AliasTyKind::Inherent", span, )), frontend::TyKind::Alias(frontend::Alias { kind: frontend::AliasKind::Free, .. }) => ast::TyKind::Error(assertion_failure("Ty::Alias with AliasTyKind::Free", span)), frontend::TyKind::Param(frontend::ParamTy { name, .. }) => { ast::TyKind::Param(ast::LocalId(Symbol::new(name.clone()))) } frontend::TyKind::Bound(..) => ast::TyKind::Error(assertion_failure( "type Bound: should be gone after typechecking", span, )), frontend::TyKind::Placeholder(..) => ast::TyKind::Error(assertion_failure( "type Placeholder: should be gone after typechecking", span, )), frontend::TyKind::Infer(..) => ast::TyKind::Error(assertion_failure( "type Infer: should be gone after typechecking", span, )), frontend::TyKind::Error => ast::TyKind::Error(assertion_failure( "got type `Error`: Rust compilation probably failed.", span, )), frontend::TyKind::Todo(_) => ast::TyKind::Error(assertion_failure("type Todo", span)), }; kind.promote() } } impl SpannedImport for frontend::ConstantLiteral { fn spanned_import(&self, _context: &Context, _span: ast::span::Span) -> ast::literals::Literal { match self { frontend::ConstantLiteral::Bool(b) => ast::literals::Literal::Bool(*b), frontend::ConstantLiteral::Char(c) => ast::literals::Literal::Char(*c), frontend::ConstantLiteral::Float(f, float_ty) => match f.strip_prefix("-") { Some(f) => ast::literals::Literal::Float { value: Symbol::new(f), negative: true, kind: float_ty.into(), }, None => ast::literals::Literal::Float { value: Symbol::new(f), negative: false, kind: float_ty.into(), }, }, frontend::ConstantLiteral::Int(frontend::ConstantInt::Int(v, ty)) => { ast::literals::Literal::Int { value: Symbol::new(v.abs().to_string()), negative: *v < 0, kind: ty.into(), } } frontend::ConstantLiteral::Int(frontend::ConstantInt::Uint(v, ty)) => { ast::literals::Literal::Int { value: Symbol::new(v.to_string()), negative: false, kind: ty.into(), } } frontend::ConstantLiteral::PtrNoProvenance(_) => { panic!("constant literal: PtrNoProvenance") } frontend::ConstantLiteral::Str(s) => ast::literals::Literal::String(Symbol::new(s)), frontend::ConstantLiteral::ByteStr(items) => { // Represent a byte string as an array of u8 literals, like the OCaml importer. let s = String::from_utf8_lossy(items).to_string(); ast::literals::Literal::String(Symbol::new(&s)) } } } } impl Import for frontend::ConstantExpr { fn import(&self, context: &Context) -> ast::Expr { let Self { ty, span, contents, attributes, .. } = self; let span = span.import(context); let kind = match contents.as_ref() { frontend::ConstantExprKind::Literal(constant_literal) => match constant_literal { frontend::ConstantLiteral::ByteStr(items) => { let elems: Vec = items .iter() .map(|b| { let ty = ast::TyKind::Primitive(ast::PrimitiveTy::Int( (&frontend::UintTy::U8).into(), )); ast::ExprKind::Literal(ast::literals::Literal::Int { value: Symbol::new(b.to_string()), negative: false, kind: (&frontend::UintTy::U8).into(), }) .promote(ty.promote(), span) }) .collect(); ast::ExprKind::Array(elems) } _ => ast::ExprKind::Literal(constant_literal.spanned_import(context, span)), }, frontend::ConstantExprKind::Adt { info, fields } => { let (is_struct, is_record) = match info.kind { frontend::VariantKind::Struct { named } => (true, named), frontend::VariantKind::Enum { named, .. } => (false, named), frontend::VariantKind::Union => (false, false), }; let constructor = info.variant.import_as_value(); let fields = fields .iter() .map(|f| (f.field.import_as_value(), f.value.import(context))) .collect(); ast::ExprKind::Construct { constructor, is_record, is_struct, fields, base: None, } } frontend::ConstantExprKind::Array { fields } => { ast::ExprKind::Array(fields.import(context)) } frontend::ConstantExprKind::Tuple { fields } => { let length = fields.len(); let constructor: ast::GlobalId = TupleId::Constructor { length }.into(); let fields = fields .iter() .enumerate() .map(|(idx, value)| { let field: ast::GlobalId = TupleId::Field { length, field: idx }.into(); (field, value.import(context)) }) .collect(); ast::ExprKind::Construct { constructor, is_record: false, is_struct: true, fields, base: None, } } frontend::ConstantExprKind::GlobalName(item_ref) => { ast::ExprKind::GlobalId(item_ref.contents().def_id.import_as_value()) } frontend::ConstantExprKind::Borrow(inner) => ast::ExprKind::Borrow { mutable: false, inner: inner.import(context), }, frontend::ConstantExprKind::ConstRef { id } => { ast::ExprKind::LocalId(ast::LocalId(Symbol::new(id.name.clone()))) } frontend::ConstantExprKind::TraitConst { .. } | frontend::ConstantExprKind::RawBorrow { .. } | frontend::ConstantExprKind::Cast { .. } | frontend::ConstantExprKind::FnPtr(_) | frontend::ConstantExprKind::Memory(_) | frontend::ConstantExprKind::Todo(_) => ast::ExprKind::Error(assertion_failure( "constant_lit_to_lit: TraitConst | FnPtr | RawBorrow | Cast | Memory", span, )), }; ast::Expr { kind: Box::new(kind), ty: ty.spanned_import(context, span), meta: ast::Metadata { span, attributes: import_attributes(context, attributes), }, } } } fn import_block_expr( context: &Context, block: &frontend::Block, ty: &frontend::Ty, full_span: ast::span::Span, attributes: Vec, ) -> ast::Expr { let typ = ty.spanned_import(context, full_span); let safety_mode = match block.safety_mode { frontend::BlockSafety::Safe => ast::SafetyKind::Safe, frontend::BlockSafety::BuiltinUnsafe | frontend::BlockSafety::ExplicitUnsafe => { ast::SafetyKind::Unsafe } }; let mut stmts = block.stmts.clone(); let mut tail_expr: Option = block.expr.clone(); if tail_expr.is_none() && matches!(ty.kind(), frontend::TyKind::Never) && let Some(frontend::Stmt { kind: frontend::StmtKind::Expr { expr, .. }, }) = stmts.pop() { tail_expr = Some(expr); } let mut acc = if let Some(expr) = tail_expr { let body = expr.import(context); ast::ExprKind::Block { body, safety_mode }.promote(typ.clone(), full_span) } else { ast::Expr::unit(full_span) }; for stmt in stmts.into_iter().rev() { match stmt.kind { frontend::StmtKind::Expr { expr, .. } => { let rhs = expr.import(context); let lhs = ast::PatKind::Wild.promote(rhs.ty.clone(), rhs.meta.span); acc = ast::ExprKind::Let { lhs, rhs, body: acc, } .promote(typ.clone(), full_span); } frontend::StmtKind::Let { pattern, initializer, else_block, .. } => { let Some(init) = initializer else { return ast::Expr { kind: Box::new(ast::ExprKind::Error(unsupported( "Sorry, Hax does not support declare-first let bindings (see https://doc.rust-lang.org/rust-by-example/variable_bindings/declare.html) for now.", 156, full_span, ))), ty: typ, meta: ast::Metadata { span: full_span, attributes, }, }; }; let lhs = pattern.import(context); let rhs = init.import(context); let body = acc; if let Some(else_block) = else_block { let else_span = else_block.span.import(context); let mut else_expr = import_block_expr(context, &else_block, ty, else_span, Vec::new()); else_expr.ty = body.ty.clone(); let arm_then = ast::Arm { pat: lhs, body, guard: None, meta: ast::Metadata { span: full_span, attributes: Vec::new(), }, }; let arm_else = ast::Arm { pat: ast::PatKind::Wild.promote(arm_then.pat.ty.clone(), else_span), body: else_expr, guard: None, meta: ast::Metadata { span: full_span, attributes: Vec::new(), }, }; acc = ast::ExprKind::Match { scrutinee: rhs, arms: vec![arm_then, arm_else], } .promote(typ.clone(), full_span) } else { acc = ast::ExprKind::Let { lhs, rhs, body }.promote(typ.clone(), full_span) } } } } ast::Expr { ty: typ, meta: ast::Metadata { span: full_span, attributes, }, ..acc } } impl Import for frontend::Expr { fn import(&self, context: &Context) -> ast::Expr { let Self { ty, span, contents, attributes, .. } = self; let span = span.import(context); let raw_attributes: Vec> = attributes.import(context); let attributes: Vec = raw_attributes.into_iter().flatten().collect(); let binop_id = |op| { use crate::names::core::cmp::*; use crate::names::core::ops::{arith::*, bit::*}; use crate::names::rust_primitives::hax::machine_int as hax_machine_int; use frontend::BinOp as Op; match op { Op::Add | Op::AddUnchecked => Add::add, Op::Sub | Op::SubUnchecked => Sub::sub, Op::Mul | Op::MulUnchecked => Mul::mul, Op::Div => Div::div, Op::Rem => Rem::rem, Op::BitXor => BitXor::bitxor, Op::BitAnd => BitAnd::bitand, Op::BitOr => BitOr::bitor, Op::Shl | Op::ShlUnchecked => Shl::shl, Op::Shr | Op::ShrUnchecked => Shr::shr, Op::Lt => PartialOrd::lt, Op::Le => PartialOrd::le, Op::Ne => PartialEq::ne, Op::Ge => PartialOrd::ge, Op::Gt => PartialOrd::gt, Op::Eq => PartialEq::eq, Op::Offset => crate::names::core::ptr::const_ptr::Impl::offset, Op::Cmp => hax_machine_int::cmp, Op::AddWithOverflow => hax_machine_int::add_with_overflow, Op::SubWithOverflow => hax_machine_int::sub_with_overflow, Op::MulWithOverflow => hax_machine_int::mul_with_overflow, } }; let binop_call = |op, x, y, out_type| -> _ { use frontend::BinOp as Op; let needs_borrow = matches!(op, Op::Lt | Op::Le | Op::Ne | Op::Ge | Op::Gt | Op::Eq); let borrow_if_needed = if needs_borrow { |e: ast::Expr| { use crate::ast::traits::HasKind; if matches!(e.ty.kind(), ast::TyKind::Ref { .. }) { e } else { let meta = e.meta.clone(); let ty = ast::TyKind::Ref { inner: e.ty.clone(), mutable: false, region: ast::Region, }; let kind = ast::ExprKind::Borrow { mutable: false, inner: e, }; ast::Expr { kind: Box::new(kind), ty: ty.promote(), meta, } } } } else { |e: ast::Expr| e }; let (bounds_impls, trait_, generic_args) = { // TODO: we pretend the call is a standalone funtion call. // This is not true, here we're calling methods. // This should be fixed. (vec![], None, vec![]) }; ast::ExprKind::fn_app( binop_id(op), generic_args, vec![borrow_if_needed(x), borrow_if_needed(y)], out_type, bounds_impls, trait_, span, ) }; let assign_binop = |op: frontend::AssignOp| match op { frontend::AssignOp::AddAssign => frontend::BinOp::Add, frontend::AssignOp::SubAssign => frontend::BinOp::Sub, frontend::AssignOp::MulAssign => frontend::BinOp::Mul, frontend::AssignOp::DivAssign => frontend::BinOp::Div, frontend::AssignOp::RemAssign => frontend::BinOp::Rem, frontend::AssignOp::BitXorAssign => frontend::BinOp::BitXor, frontend::AssignOp::BitAndAssign => frontend::BinOp::BitAnd, frontend::AssignOp::BitOrAssign => frontend::BinOp::BitOr, frontend::AssignOp::ShlAssign => frontend::BinOp::Shl, frontend::AssignOp::ShrAssign => frontend::BinOp::Shr, }; let kind = match contents.as_ref() { frontend::ExprKind::Box { value } => { let value = value.import(context); let ty = ty.spanned_import(context, span); let id = crate::names::rust_primitives::hax::box_new; ast::ExprKind::standalone_fn_app(id, vec![], vec![value], ty, span) } frontend::ExprKind::If { if_then_scope: _, cond, then, else_opt, } => { if let frontend::ExprKind::Let { expr, pat } = cond.contents.as_ref() { let scrutinee = expr.import(context); let pat = pat.import(context); let then_expr = then.import(context); let else_expr = else_opt .as_ref() .map(|value| value.import(context)) .unwrap_or_else(|| ast::Expr::unit(span)); let arm_then = ast::Arm { pat, body: then_expr, guard: None, meta: ast::Metadata { span, attributes: Vec::new(), }, }; let wildcard_pat = ast::Pat { kind: Box::new(ast::PatKind::Wild), ..arm_then.pat.clone() }; let arm_else = ast::Arm { pat: wildcard_pat, body: else_expr, guard: None, meta: ast::Metadata { span, attributes: Vec::new(), }, }; ast::ExprKind::Match { scrutinee, arms: vec![arm_then, arm_else], } } else { ast::ExprKind::If { condition: cond.import(context), then: then.import(context), else_: else_opt.as_ref().map(|value| value.import(context)), } } } frontend::ExprKind::Call { ty: _, fun, args, from_hir_call: _, fn_span: _, } => { let mut args = args.import(context); if args.is_empty() { args.push(ast::Expr::unit(span)); } if let frontend::ExprKind::GlobalName { item, .. } = fun.contents.as_ref() { let mut head = fun.import(context); *head.kind = ast::ExprKind::GlobalId(item.contents().def_id.import_as_value()); let generic_args = item.contents().generic_args.spanned_import(context, span); let bounds_impls = item .contents() .impl_exprs .iter() .map(|ie| ie.spanned_import(context, span)) .collect(); let trait_ = item.contents().in_trait.as_ref().map(|ie| { let impl_expr = ie.spanned_import(context, span); let args = impl_expr.goal.args.clone(); (impl_expr, args) }); ast::ExprKind::App { head, args, generic_args, bounds_impls, trait_, } } else { let head = fun.import(context); ast::ExprKind::App { head, args, generic_args: Vec::new(), bounds_impls: Vec::new(), trait_: None, } } } frontend::ExprKind::Deref { arg } => { let result_ty = ty.spanned_import(context, span); ast::ExprKind::standalone_fn_app( crate::names::rust_primitives::hax::deref_op, vec![], vec![arg.import(context)], result_ty, span, ) } frontend::ExprKind::Binary { op, lhs, rhs } => { let result_ty = ty.spanned_import(context, span); binop_call(*op, lhs.import(context), rhs.import(context), result_ty) } frontend::ExprKind::LogicalOp { op, lhs, rhs } => { let result_ty = ty.spanned_import(context, span); let id = match op { frontend::LogicalOp::And => crate::names::rust_primitives::hax::logical_op_and, frontend::LogicalOp::Or => crate::names::rust_primitives::hax::logical_op_or, }; ast::ExprKind::standalone_fn_app( id, vec![], vec![lhs.import(context), rhs.import(context)], result_ty, span, ) } frontend::ExprKind::Unary { op, arg } => { let result_ty = ty.spanned_import(context, span); let id = match op { frontend::UnOp::Not => crate::names::core::ops::bit::Not::not, frontend::UnOp::Neg => crate::names::core::ops::arith::Neg::neg, frontend::UnOp::PtrMetadata => crate::names::rust_primitives::hax::cast_op, }; ast::ExprKind::standalone_fn_app( id, vec![], vec![arg.import(context)], result_ty, span, ) } frontend::ExprKind::Cast { source } => { let source_ty = source.ty.spanned_import(context, span); let result_ty = ty.spanned_import(context, span); let cast_id = if let ast::TyKind::App { head, .. } = source_ty.0.as_ref() { if head.expect_tuple().is_none() { Some(head.with_suffix(ReservedSuffix::Cast)) } else { None } } else { None }; let id = cast_id.unwrap_or(crate::names::rust_primitives::hax::cast_op); ast::ExprKind::standalone_fn_app( id, vec![], vec![source.import(context)], result_ty, span, ) } frontend::ExprKind::Use { source } => return source.import(context), frontend::ExprKind::NeverToAny { source } => ast::ExprKind::standalone_fn_app( crate::names::rust_primitives::hax::never_to_any, vec![], vec![source.import(context)], ty.spanned_import(context, span), span, ), frontend::ExprKind::PointerCoercion { cast, source } => { let result_ty = ty.spanned_import(context, span); match cast { frontend::PointerCoercion::ClosureFnPointer(frontend::Safety::Safe) | frontend::PointerCoercion::ReifyFnPointer => return source.import(context), frontend::PointerCoercion::Unsize(_) => ast::ExprKind::standalone_fn_app( crate::names::rust_primitives::unsize, vec![], vec![source.import(context)], result_ty, span, ), _ => ast::ExprKind::Error(assertion_failure( &format!("Pointer, with [cast] being {:?}", cast), span, )), } } frontend::ExprKind::Loop { body } => ast::ExprKind::Loop { body: body.import(context), kind: Box::new(ast::LoopKind::UnconditionalLoop), state: None, control_flow: None, label: None, }, frontend::ExprKind::Match { scrutinee, arms } => ast::ExprKind::Match { scrutinee: scrutinee.import(context), arms: arms .iter() .map(|arm| ast::Arm { pat: arm.pattern.import(context), body: arm.body.import(context), guard: arm.guard.as_ref().map(|g| ast::Guard { kind: match g.contents.as_ref() { frontend::ExprKind::Let { expr, pat } => ast::GuardKind::IfLet { lhs: pat.import(context), rhs: expr.import(context), }, _ => ast::GuardKind::IfLet { lhs: ast::Pat { kind: Box::new(ast::PatKind::Constant { lit: ast::literals::Literal::Bool(true), }), ty: ast::TyKind::Primitive(ast::PrimitiveTy::Bool) .promote(), meta: ast::Metadata { span, attributes: Vec::new(), }, }, rhs: g.import(context), }, }, meta: ast::Metadata { span: g.span.import(context), attributes: g.attributes.import(context), }, }), meta: ast::Metadata { span: arm.span.import(context), attributes: Vec::new(), }, }) .collect(), }, frontend::ExprKind::Let { expr: _, pat: _ } => ast::ExprKind::Error(unsupported( "Let-chains (e.g. `if let .. && let ..`) are not supported.", 2018, span, )), frontend::ExprKind::Block { block } => { return import_block_expr(context, block, ty, span, attributes.clone()); } frontend::ExprKind::Assign { lhs, rhs } => { let lhs = lhs.import(context); let rhs = rhs.import(context); ast::ExprKind::Assign { lhs: lhs_from_expr(&lhs), value: rhs, } } frontend::ExprKind::AssignOp { op, lhs, rhs } => { let bin_op = assign_binop(*op); let lhs = lhs.import(context); let rhs = rhs.import(context); let result_ty = lhs.ty.clone(); let op_expr = binop_call(bin_op, lhs.clone(), rhs, result_ty.clone()) .promote(result_ty, span); ast::ExprKind::Assign { lhs: lhs_from_expr(&lhs), value: op_expr, } } frontend::ExprKind::Field { field, lhs } => ast::ExprKind::standalone_fn_app( field.import_as_value(), vec![], vec![lhs.import(context)], ty.spanned_import(context, span), span, ), frontend::ExprKind::TupleField { field, lhs } => { let length = match lhs.ty.kind() { frontend::TyKind::Tuple(item_ref) => item_ref.generic_args.len(), _ => panic!("TupleField on non-tuple type"), }; let projector: ast::GlobalId = TupleId::Field { length, field: *field, } .into(); ast::ExprKind::standalone_fn_app( projector, vec![], vec![lhs.import(context)], ty.spanned_import(context, span), span, ) } frontend::ExprKind::Index { lhs, index } => { let result_ty = ty.spanned_import(context, span); let id = crate::names::core::ops::index::Index::index; ast::ExprKind::standalone_fn_app( id, vec![], vec![lhs.import(context), index.import(context)], result_ty, span, ) } frontend::ExprKind::VarRef { id } => ast::ExprKind::LocalId(ast::LocalId::from(id)), frontend::ExprKind::ConstRef { id } => { ast::ExprKind::LocalId(ast::LocalId(Symbol::new(id.name.clone()))) } frontend::ExprKind::GlobalName { item, constructor: _, } => { let ident = item.contents().def_id.import_as_value(); ast::ExprKind::GlobalId(ident) } frontend::ExprKind::UpvarRef { closure_def_id: _, var_hir_id, } => ast::ExprKind::LocalId(ast::LocalId::from(var_hir_id)), frontend::ExprKind::Borrow { borrow_kind, arg } => { let inner = arg.import(context); let mutable = matches!(borrow_kind, frontend::BorrowKind::Mut { .. }); ast::ExprKind::Borrow { mutable, inner } } frontend::ExprKind::RawBorrow { mutability, arg } => ast::ExprKind::AddressOf { mutable: *mutability, inner: arg.import(context), }, frontend::ExprKind::Break { label: _, value } => { let value = value .as_ref() .map(|value| value.import(context)) .unwrap_or_else(|| ast::Expr::unit(span)); ast::ExprKind::Break { value, label: None, // TODO: honour the label (issue #1800) state: None, } } frontend::ExprKind::Continue { label: _ } => ast::ExprKind::Continue { label: None, // TODO: honour the label (issue #1800) state: None, }, frontend::ExprKind::Return { value } => { let value = value .as_ref() .map(|value| value.import(context)) .unwrap_or_else(|| ast::Expr::unit(span)); ast::ExprKind::Return { value } } frontend::ExprKind::ConstBlock(_item_ref) => { ast::ExprKind::Error(unsupported("ConstBlock", 923, span)) } frontend::ExprKind::Repeat { value, count } => { let value_expr: ast::Expr = value.import(context); let count_expr = count.import(context); let repeated = ast::Expr::standalone_fn_app( crate::names::rust_primitives::hax::repeat, vec![], vec![value_expr, count_expr], ty.spanned_import(context, span), span, ); ast::ExprKind::standalone_fn_app( crate::names::alloc::boxed::Impl::new, vec![], vec![repeated], ty.spanned_import(context, span), span, ) } frontend::ExprKind::Array { fields } => ast::ExprKind::Array(fields.import(context)), frontend::ExprKind::Tuple { fields } => { let length = fields.len(); let constructor: ast::GlobalId = TupleId::Constructor { length }.into(); let fields = fields .iter() .enumerate() .map(|(idx, value)| { let field: ast::GlobalId = TupleId::Field { length, field: idx }.into(); (field, value.import(context)) }) .collect(); ast::ExprKind::Construct { constructor, is_record: false, is_struct: true, fields, base: None, } } frontend::ExprKind::Adt(adt_expr) => { let (is_struct, is_record) = match adt_expr.info.kind { frontend::VariantKind::Struct { named } => (true, named), frontend::VariantKind::Enum { named, .. } => (false, named), frontend::VariantKind::Union => (false, false), }; let constructor = adt_expr.info.variant.import_as_value(); let base = match &adt_expr.base { frontend::AdtExprBase::None => None, frontend::AdtExprBase::Base(info) => Some(info.base.import(context)), frontend::AdtExprBase::DefaultFields(_) => { return ast::ExprKind::Error(unsupported( "Default field values: not supported", 1386, span, )) .promote(ty.spanned_import(context, span), span); } }; let fields = adt_expr .fields .iter() .map(|f| (f.field.import_as_value(), f.value.import(context))) .collect(); ast::ExprKind::Construct { constructor, is_record, is_struct, fields, base, } } frontend::ExprKind::PlaceTypeAscription { source: _, .. } => { ast::ExprKind::Error(assertion_failure( "Got a unexpected node `PlaceTypeAscription`. Please report, we were not able to figure out an expression yielding that node: a bug report would be very valuable here!", span, )) } frontend::ExprKind::ValueTypeAscription { source, .. } => { return source.import(context); } frontend::ExprKind::Closure { params, body, upvars, .. } => { let mut params: Vec = params .iter() .filter_map(|param| param.pat.as_ref().map(|pat| pat.import(context))) .collect(); if params.is_empty() { let ty = ast::TyKind::unit().promote(); params.push(ast::PatKind::Wild.promote(ty, span)); } ast::ExprKind::Closure { params, body: body.import(context), captures: upvars.import(context), } } frontend::ExprKind::Literal { lit, neg } => { let mut literal = match &lit.node { frontend::LitKind::Bool(b) => ast::literals::Literal::Bool(*b), frontend::LitKind::Char(c) => ast::literals::Literal::Char(*c), frontend::LitKind::Byte(b) => ast::literals::Literal::Int { value: Symbol::new(b.to_string()), negative: false, kind: (&frontend::UintTy::U8).into(), }, frontend::LitKind::Str(s, _) => ast::literals::Literal::String(Symbol::new(s)), frontend::LitKind::Int(value, kind) => { use frontend::LitIntType::*; let kind = match (kind, ty.kind()) { (Signed(int_ty), _) => ast::literals::IntKind::from(int_ty), (Unsigned(uint_ty), _) => ast::literals::IntKind::from(uint_ty), (Unsuffixed, frontend::TyKind::Int(int_ty)) => { ast::literals::IntKind::from(int_ty) } (Unsuffixed, frontend::TyKind::Uint(uint_ty)) => { ast::literals::IntKind::from(uint_ty) } _ => panic!("Unsuffixed int literal without int/uint type"), }; ast::literals::Literal::Int { value: Symbol::new(value.to_string()), negative: false, kind, } } frontend::LitKind::Float(value, float_ty) => ast::literals::Literal::Float { value: Symbol::new(value), negative: false, kind: match (float_ty, ty.kind()) { (frontend::LitFloatType::Suffixed(k), _) => { ast::literals::FloatKind::from(k) } (frontend::LitFloatType::Unsuffixed, frontend::TyKind::Float(k)) => { ast::literals::FloatKind::from(k) } _ => panic!("Unsuffixed float literal without float type"), }, }, frontend::LitKind::CStr(bytes, _) | frontend::LitKind::ByteStr(bytes, _) => { let elems: Vec = bytes .iter() .map(|b| { ast::ExprKind::Literal(ast::literals::Literal::Int { value: Symbol::new(b.to_string()), negative: false, kind: (&frontend::UintTy::U8).into(), }) .promote( ast::TyKind::Primitive(ast::PrimitiveTy::Int( (&frontend::UintTy::U8).into(), )) .promote(), span, ) }) .collect(); return ast::ExprKind::Array(elems) .promote(ty.spanned_import(context, span), span); } frontend::LitKind::Err(_) => { return ast::ExprKind::Error(assertion_failure( "[import_thir:literal] got an error literal: this means the Rust compiler or Hax's frontend probably reported errors above.", span, )).promote(ty.spanned_import(context, span), span); } }; if *neg { match &mut literal { ast::literals::Literal::Int { negative, .. } | ast::literals::Literal::Float { negative, .. } => { *negative = true; } _ => { return ast::ExprKind::Error(assertion_failure( "Unexpected negation on non-numeric literal", span, )) .promote(ty.spanned_import(context, span), span); } } } ast::ExprKind::Literal(literal) } frontend::ExprKind::ZstLiteral { .. } => ast::ExprKind::Error(assertion_failure( "`ZstLiteral` are expected to be handled before-hand", span, )), frontend::ExprKind::NamedConst { item, user_ty: _ } => { let generic_args: Vec = item.contents().generic_args.spanned_import(context, span); let const_args: Vec = generic_args .iter() .filter_map(|gv| match gv { ast::GenericValue::Expr(e) => Some(e.clone()), _ => None, }) .collect(); let def_id = item.contents().def_id.import_as_value(); if const_args.is_empty() && item.contents().in_trait.is_none() { ast::ExprKind::GlobalId(def_id) } else { ast::ExprKind::fn_app( def_id, vec![], const_args, ty.spanned_import(context, span), vec![], item.contents().in_trait.as_ref().map(|impl_expr| { ( impl_expr.spanned_import(context, span), generic_args.clone(), ) }), span, ) } } frontend::ExprKind::ConstParam { param, def_id: _ } => { ast::ExprKind::LocalId(ast::LocalId(Symbol::new(param.name.clone()))) } frontend::ExprKind::StaticRef { def_id, .. } => { ast::ExprKind::GlobalId(def_id.import_as_value()) } frontend::ExprKind::Yield { value: _ } => ast::ExprKind::Error(unsupported( "Got expression `Yield`: coroutines are not supported by hax", 924, span, )), frontend::ExprKind::Todo(payload) => ast::ExprKind::Error(assertion_failure( &format!("expression Todo\n{}", payload), span, )), }; ast::Expr { kind: Box::new(kind), ty: ty.spanned_import(context, span), meta: ast::Metadata { span, attributes }, } } } impl Import<(ast::GlobalId, ast::Ty, Vec)> for frontend::FieldDef { fn import(&self, context: &Context) -> (ast::GlobalId, ast::Ty, Vec) { ( self.did.import_as_value(), self.ty.spanned_import(context, self.span.import(context)), self.attributes.import(context), ) } } impl Import for frontend::ThirBody { fn import(&self, context: &Context) -> ast::Expr { self.expr.import(context) } } impl Import for frontend::PatKind { fn import(&self, context: &Context) -> ast::PatKind { match self { frontend::PatKind::Wild | frontend::PatKind::Missing => ast::PatKind::Wild, frontend::PatKind::AscribeUserType { subpattern, .. } => ast::PatKind::Ascription { pat: subpattern.import(context), ty: ast::SpannedTy { span: ast::span::Span::dummy(), ty: subpattern .ty .spanned_import(context, ast::span::Span::dummy()), }, }, frontend::PatKind::Binding { mode, var, subpattern, .. } => { let mutable = mode.mutability; let mode = match mode.by_ref { frontend::ByRef::Yes(_, mutability) => ast::BindingMode::ByRef(if mutability { ast::BorrowKind::Mut } else { ast::BorrowKind::Shared }), frontend::ByRef::No => ast::BindingMode::ByValue, }; ast::PatKind::Binding { mutable, var: ast::LocalId::from(var), mode, sub_pat: subpattern.as_ref().map(|value| value.import(context)), } } frontend::PatKind::Variant { info, subpatterns, .. } => { let (is_struct, is_record) = match info.kind { frontend::VariantKind::Struct { named } => (true, named), frontend::VariantKind::Enum { named, .. } => (false, named), frontend::VariantKind::Union => (false, false), }; let constructor = info.variant.import_as_value(); let fields = subpatterns .iter() .map(|f| (f.field.import_as_value(), f.pattern.import(context))) .collect(); ast::PatKind::Construct { constructor, is_record, is_struct, fields, } } frontend::PatKind::Tuple { subpatterns } => { let length = subpatterns.len(); let constructor: ast::GlobalId = TupleId::Constructor { length }.into(); let fields = subpatterns .iter() .enumerate() .map(|(idx, pat)| { let field: ast::GlobalId = TupleId::Field { length, field: idx }.into(); (field, pat.import(context)) }) .collect(); ast::PatKind::Construct { constructor, is_record: false, is_struct: true, fields, } } frontend::PatKind::Deref { subpattern } => ast::PatKind::Deref { sub_pat: subpattern.import(context), }, frontend::PatKind::DerefPattern { .. } => ast::PatKind::Error(unsupported( "pat DerefPattern", 926, ast::span::Span::dummy(), )), frontend::PatKind::Constant { value } => { use ast::*; fn expr_to_pat(expr: Expr) -> Pat { let Expr { kind, ty, meta } = expr; let kind = match *kind { ExprKind::Literal(lit) => PatKind::Constant { lit }, ExprKind::Array(args) => PatKind::Array { args: args.into_iter().map(expr_to_pat).collect(), }, ExprKind::Borrow { mutable: _, inner } => PatKind::Deref { sub_pat: expr_to_pat(inner), }, kind => PatKind::Error(assertion_failure( &format!( "expr_to_pat: the given expression could not be interpreted as a pattern. kind={kind:#?}" ), meta.span, )), }; let kind = Box::new(kind); Pat { kind, ty, meta } } *expr_to_pat(value.import(context)).kind } frontend::PatKind::ExpandedConstant { subpattern, .. } => { *subpattern.import(context).kind } frontend::PatKind::Range(_) => { ast::PatKind::Error(unsupported("pat Range", 925, ast::span::Span::dummy())) } frontend::PatKind::Slice { .. } | frontend::PatKind::Array { .. } => { ast::PatKind::Error(unsupported( "Pat:Array or Pat:Slice", 804, ast::span::Span::dummy(), )) } frontend::PatKind::Or { pats } => ast::PatKind::Or { sub_pats: pats.import(context), }, frontend::PatKind::Never => { ast::PatKind::Error(unsupported("pat Never", 927, ast::span::Span::dummy())) } frontend::PatKind::Error(_) => ast::PatKind::Error(assertion_failure( "`Error` node: Rust compilation failed. If Rust compilation was fine, please file an issue.", ast::span::Span::dummy(), )), } } } impl Import for frontend::Pat { fn import(&self, context: &Context) -> ast::Pat { let Self { ty, span, contents, hir_id: _, attributes, } = self; let span = span.import(context); let kind = match contents.as_ref() { frontend::PatKind::AscribeUserType { ascription: _, subpattern, } => ast::PatKind::Ascription { pat: subpattern.import(context), ty: ast::SpannedTy { span, ty: ty.spanned_import(context, span), }, }, other => other.import(context), }; ast::Pat { kind: Box::new(kind), ty: ty.spanned_import(context, span), meta: ast::Metadata { span, attributes: attributes.import(context), }, } } } fn import_params( context: &Context, params: &Vec, span: ast::span::Span, ) -> Vec { let params: Vec = params.spanned_import(context, span); if params.is_empty() { let ty = ast::TyKind::unit().promote(); vec![ast::Param { pat: ast::PatKind::Wild.promote(ty.clone(), span), ty, ty_span: None, attributes: vec![], }] } else { params } } impl SpannedImport for frontend::Param { fn spanned_import(&self, context: &Context, span: ast::span::Span) -> ast::Param { let frontend::Param { pat, ty, ty_span, attributes, .. } = self; let ty_span = ty_span.as_ref().map(|value| value.import(context)); let ty = ty.spanned_import(context, ty_span.unwrap_or(span)); ast::Param { pat: pat .as_ref() .map(|value| value.import(context)) .unwrap_or_else(|| ast::PatKind::Wild.promote(ty.clone(), span)), ty, ty_span, attributes: attributes.import(context), } } } impl Import for frontend::VariantDef { fn import(&self, context: &Context) -> ast::Variant { ast::Variant { name: self.def_id.import_as_value(), arguments: self.fields.import(context), is_record: self.fields.raw.first().is_some_and(|fd| fd.name.is_some()), attributes: self.attributes.import(context), } } } impl, B> Import> for frontend::IndexVec { fn import(&self, context: &Context) -> Vec { self.raw.iter().map(|value| value.import(context)).collect() } } fn import_trait_item( context: &Context, item: &frontend::FullDef, ) -> ast::TraitItem { let span = item.span.import(context); let attributes = item.attributes.import(context); let meta = ast::Metadata { span, attributes }; let (frontend::FullDefKind::AssocConst { param_env, .. } | frontend::FullDefKind::AssocFn { param_env, .. } | frontend::FullDefKind::AssocTy { param_env, .. }) = &item.kind else { unreachable!("Found associated item of an unknown kind.") }; let mut generics = param_env.import(context); let mut imported_constraints: Vec = Vec::new(); let mut is_assoc_ty = false; let kind = match &item.kind { frontend::FullDefKind::AssocConst { body: Some(default), .. } => ast::TraitItemKind::Default { params: Vec::new(), body: default.import(context), }, frontend::FullDefKind::AssocConst { ty, .. } => { ast::TraitItemKind::Fn(ty.spanned_import(context, span)) } frontend::FullDefKind::AssocFn { body: Some(default), sig, param_env, .. } => { generics = import_generics(context, &sig.bound_vars, param_env); ast::TraitItemKind::Default { params: import_params(context, &default.params, span), body: default.import(context), } } frontend::FullDefKind::AssocFn { sig, param_env, .. } => { generics = import_generics(context, &sig.bound_vars, param_env); let inputs = sig .value .inputs .iter() .map(|ty| ty.spanned_import(context, span)) .collect(); let output = sig.value.output.spanned_import(context, span); ast::TraitItemKind::Fn(ast::TyKind::Arrow { inputs, output }.promote()) } frontend::FullDefKind::AssocTy { value: Some(..), .. } => ast::TraitItemKind::Error(assertion_failure( "Associate types defaults are not supported by hax yet (it is a nightly feature)", span, )), frontend::FullDefKind::AssocTy { implied_predicates, .. } => { is_assoc_ty = true; imported_constraints = implied_predicates.import(context); let type_constraints = imported_constraints .iter() .filter_map(|gc| match gc { ast::GenericConstraint::TypeClass(t) => Some(t.clone()), _ => None, }) .collect(); ast::TraitItemKind::Type(type_constraints) } _ => ast::TraitItemKind::Error(assertion_failure( "Found associated item of an unknown kind.", span, )), }; if is_assoc_ty { generics.constraints = imported_constraints; } generics .constraints .retain(|gc| !is_self_type_constraint(gc)); for (idx, gc) in generics.constraints.iter_mut().enumerate() { if let ast::GenericConstraint::TypeClass(impl_ident) = gc { impl_ident.name = impl_expr_name(idx as u64); } } ast::TraitItem { meta, kind, generics, ident: item.def_id().import_as_nonvalue(), } } impl SpannedImport for frontend::TraitRef { fn spanned_import(&self, context: &Context, span: ast::span::Span) -> ast::TraitGoal { let trait_ = self.def_id.import_as_nonvalue(); let args = self.generic_args.spanned_import(context, span); ast::TraitGoal { trait_, args } } } fn impl_expr_name(index: u64) -> Symbol { Symbol::new(format!("i{}", index)) } fn browse_path( context: &Context, item_kind: ast::ImplExprKind, chunk: &frontend::ImplExprPathChunk, span: ast::span::Span, idx: usize, ) -> ast::ImplExprKind { match chunk { frontend::ImplExprPathChunk::AssocItem { item, predicate: frontend::Binder { value: frontend::TraitPredicate { trait_ref, .. }, .. }, .. } => { let ident = ast::ImplIdent { goal: trait_ref.spanned_import(context, span), name: impl_expr_name(idx as u64), }; let item = item.contents().def_id.import_as_nonvalue(); ast::ImplExprKind::Projection { impl_: ast::ImplExpr { kind: Box::new(item_kind), goal: trait_ref.spanned_import(context, span), }, item, ident, } } frontend::ImplExprPathChunk::Parent { predicate: frontend::Binder { value: frontend::TraitPredicate { trait_ref, .. }, .. }, .. } => { let ident = ast::ImplIdent { goal: trait_ref.spanned_import(context, span), name: impl_expr_name(idx as u64), }; ast::ImplExprKind::Parent { impl_: ast::ImplExpr { kind: Box::new(item_kind), goal: trait_ref.spanned_import(context, span), }, ident, } } } } fn import_impl_expr_atom( context: &Context, ie: &frontend::ImplExprAtom, span: ast::span::Span, goal: ast::TraitGoal, ) -> ast::ImplExprKind { match ie { frontend::ImplExprAtom::Concrete(item_ref) => { ast::ImplExprKind::Concrete(item_ref.spanned_import(context, span)) } frontend::ImplExprAtom::LocalBound { index, path, .. } => { let mut kind = ast::ImplExprKind::LocalBound { id: impl_expr_name(*index as u64), }; for (i, chunk) in path.iter().enumerate() { kind = browse_path(context, kind, chunk, span, i) } kind } frontend::ImplExprAtom::SelfImpl { path, .. } => { let mut kind = ast::ImplExprKind::Self_; for (i, chunk) in path.iter().enumerate() { kind = browse_path(context, kind, chunk, span, i) } kind } frontend::ImplExprAtom::Dyn => ast::ImplExprKind::Dyn, frontend::ImplExprAtom::Builtin { .. } => ast::ImplExprKind::Builtin(goal), frontend::ImplExprAtom::Error(msg) => ast::ImplExprKind::Error(unsupported(msg, 707, span)), } } impl SpannedImport for frontend::ImplExpr { fn spanned_import(&self, context: &Context, span: ast::span::Span) -> ast::ImplExpr { let goal = self.r#trait.value.spanned_import(context, span); let impl_ = ast::ImplExpr { kind: Box::new(import_impl_expr_atom( context, &self.r#impl, span, goal.clone(), )), goal: goal.clone(), }; match &self.r#impl { frontend::ImplExprAtom::Concrete(item_ref) if !item_ref.impl_exprs.is_empty() => { let args = item_ref .impl_exprs .iter() .map(|ie| ie.spanned_import(context, span)) .collect(); ast::ImplExpr { kind: Box::new(ast::ImplExprKind::ImplApp { impl_, args }), goal, } } _ => impl_, } } } fn generic_param_to_value(p: &ast::GenericParam) -> ast::GenericValue { match &p.kind { ast::GenericParamKind::Lifetime => ast::GenericValue::Lifetime, ast::GenericParamKind::Type => { ast::GenericValue::Ty(ast::TyKind::Param(p.ident.clone()).promote()) } ast::GenericParamKind::Const { ty } => ast::GenericValue::Expr( ast::ExprKind::LocalId(p.ident.clone()).promote(ty.clone(), p.meta.span), ), } } fn import_generics( context: &Context, bound_var_kinds: &[frontend::BoundVariableKind], param_env: &frontend::ParamEnv, ) -> ast::Generics { let mut generics: ast::Generics = param_env.import(context); bound_var_kinds .iter() .flat_map(|var| match var { frontend::BoundVariableKind::Region(frontend::BoundRegionKind::Named { def_id: _, name, span, attributes, }) => { let name = name.strip_prefix("'").unwrap_or(name); Some(ast::GenericParam { ident: ast::identifiers::LocalId(Symbol::new(name)), meta: ast::Metadata { span: span.import(context), attributes: import_attributes(context, attributes), }, kind: ast::GenericParamKind::Lifetime, }) } _ => None, }) .for_each(|var| generics.params.push(var)); generics } fn cast_of_enum( context: &Context, type_id: ast::GlobalId, generics: &ast::Generics, ty: ast::Ty, span: ast::span::Span, variants: impl Iterator, ) -> ast::Item { let name = ast::GlobalId::with_suffix(type_id, ReservedSuffix::Cast); let arms = { let ast::TyKind::Primitive(ast::PrimitiveTy::Int(int_kind)) = &*ty.0 else { return ast::ItemKind::Error(assertion_failure( &format!("cast_of_enum: expected int type, got {:?}", ty), span, )) .promote(name, span); }; let mut previous_explicit_determinator: Option = None; variants .map(|(variant, variant_def)| { // Each variant comes with a `rustc_middle::ty::VariantDiscr`. Some variant have `Explicit` discr (i.e. an expression) // while other have `Relative` discr (the distance to the previous last explicit discr). let body = match &variant_def.discr_def { frontend::DiscriminantDefinition::Relative(m) => { let relative = ast::ExprKind::Literal(ast::literals::Literal::Int { value: Symbol::new(m.to_string()), negative: false, kind: int_kind.clone(), }) .promote(ty.clone(), span); if let Some(base) = &previous_explicit_determinator { ast::ExprKind::fn_app( crate::names::core::ops::arith::Add::add, vec![], vec![base.clone(), relative], ty.clone(), vec![], None, span, ) .promote(ty.clone(), span) } else { relative } } frontend::DiscriminantDefinition::Explicit { def_id, span } => { let e = ast::ExprKind::GlobalId(def_id.import_as_value()) .promote(ty.clone(), span.import(context)); previous_explicit_determinator = Some(e.clone()); e } }; let pat = ast::PatKind::Construct { constructor: variant.name, is_record: variant.is_record, is_struct: false, fields: variant .arguments .iter() .map(|(cid, ty, _)| (*cid, ast::PatKind::Wild.promote(ty.clone(), span))) .collect(), } .promote(ty.clone(), span); ast::Arm::non_guarded(pat, body, span) }) .collect() }; let type_ref = ast::TyKind::App { head: type_id, args: generics.params.iter().map(generic_param_to_value).collect(), } .promote(); let scrutinee_var = ast::LocalId(Symbol::new("x")); let params = vec![ast::Param { pat: ast::PatKind::var_pat(scrutinee_var.clone()).promote(type_ref.clone(), span), ty: type_ref.clone(), ty_span: None, attributes: Vec::new(), }]; let scrutinee = ast::ExprKind::LocalId(scrutinee_var).promote(type_ref.clone(), span); ast::ItemKind::Fn { name, generics: generics.clone(), body: ast::ExprKind::Match { scrutinee, arms }.promote(ty, span), params, safety: ast::SafetyKind::Safe, } .promote(name, span) } fn expect_body<'a, Body>( optional: &'a Option, span: ast::span::Span, label: &str, ) -> Result<&'a Body, ast::ErrorNode> { optional .as_ref() .ok_or_else(|| assertion_failure(&format!("Expected body at {label}"), span)) } fn missing_associated_item() -> core::convert::Infallible { panic!("All assoc items should be included in the list of items produced by the frontend.") } use std::collections::HashMap; /// Import a `FullDef` item produced by the frontend, and produce the corresponding item /// (or items for inherent impls) pub fn import_item( item: &frontend::FullDef, all_items: &HashMap>, ) -> Vec { let frontend::FullDef { this, span, attributes, kind, .. } = item; let context = &Context { owner_hint: Some(this.contents().def_id.clone()), }; let ident = this.contents().def_id.clone().import_as_nonvalue(); let span = span.import(context); let attributes = attributes.import(context); let has_auto = has_automatically_derived(&attributes); let mut items = Vec::new(); let kind = match kind { frontend::FullDefKind::Adt { param_env, adt_kind, variants: frontend_variants, repr, .. } => { let generics = param_env.import(context); let frontend_variants = || frontend_variants.clone().into_iter(); let variants: Vec = frontend_variants().map(|v| v.import(context)).collect(); use frontend::{AdtKind, DiscriminantDefinition}; let adt_item_kind = { let make_type = |is_struct| ast::ItemKind::Type { name: ident, generics: generics.clone(), variants: variants.clone(), is_struct, }; match adt_kind { AdtKind::Enum => make_type(false), AdtKind::Struct => make_type(true), AdtKind::Union => ast::ItemKind::Error(unsupported("Union type", 998, span)), AdtKind::Array | AdtKind::Slice | AdtKind::Tuple => { ast::ItemKind::Error(assertion_failure( &format!( "While translating a item, we got an ADT of kind {adt_kind:#?}. This is not supposed to be ever produced." ), span, )) } } }; // For enums that are fieldless (see https://doc.rust-lang.org/reference/items/enumerations.html#casting), // we produce a cast function. if matches!(adt_kind, AdtKind::Enum) && variants.iter().all(ast::Variant::is_fieldless) { // Each variant might introduce a anonymous constant defining its discriminant integer let discriminant_const_items = frontend_variants().filter_map(|v| { let DiscriminantDefinition::Explicit { def_id, span } = &v.discr_def else { return None; }; let span = span.import(context); let name = def_id.import_as_value(); let value = v.discr_val.val; let (value, kind) = match v.discr_val.ty.kind() { frontend::TyKind::Int(int_ty) => (value.to_string(), int_ty.into()), frontend::TyKind::Uint(int_ty) => { ((value as i128).to_string(), int_ty.into()) } _ => { return Some( ast::ItemKind::Error(assertion_failure("", span)) .promote(name, span), ); } }; Some( ast::ItemKind::Fn { name, generics: ast::Generics::empty(), body: ast::ExprKind::Literal(ast::literals::Literal::Int { value: Symbol::new(value), negative: false, kind, }) .promote(v.discr_val.ty.spanned_import(context, span), span), params: Vec::new(), safety: ast::SafetyKind::Safe, } .promote(name, span), ) }); let cast_item = cast_of_enum( context, ident, &generics, repr.typ.spanned_import(context, span), span, variants.into_iter().zip(frontend_variants()), ); return std::iter::once(adt_item_kind.promote(ident, span)) .chain(discriminant_const_items) .chain(std::iter::once(cast_item)) .collect(); } else { adt_item_kind } } frontend::FullDefKind::TyAlias { param_env, ty } => ast::ItemKind::TyAlias { name: ident, generics: param_env.import(context), ty: ty.spanned_import(context, span), }, frontend::FullDefKind::ForeignTy => { ast::ItemKind::Error(unsupported("Foreign type", 928, span)) } frontend::FullDefKind::OpaqueTy => ast::ItemKind::Error(assertion_failure( "OpaqueTy should be replaced by Alias in the frontend", span, )), frontend::FullDefKind::Trait { param_env, implied_predicates, items, safety, .. } => { let mut generics = param_env.import(context); generics.constraints = implied_predicates.import(context); ast::ItemKind::Trait { name: ident, generics, items: items .iter() .map(|assoc_item| { let item = all_items .get(&assoc_item.def_id) .expect("Could not find definition for associated item"); import_trait_item(context, item) }) .collect(), safety: safety.import(context), } } frontend::FullDefKind::TraitAlias { .. } => { ast::ItemKind::Error(assertion_failure("Trait Alias", span)) } frontend::FullDefKind::TraitImpl { param_env, trait_pred, implied_impl_exprs, items, .. } => { let mut generics = param_env.import(context); let trait_ref = trait_pred.trait_ref.contents(); let of_trait: (ast::GlobalId, Vec) = ( trait_ref.def_id.import_as_nonvalue(), trait_ref .generic_args .iter() .map(|ga| ga.spanned_import(context, span)) .collect(), ); let mut parent_bounds: Vec<(ast::ImplExpr, ast::ImplIdent)> = implied_impl_exprs.spanned_import(context, span); let items: Vec = if has_auto { Vec::new() } else { items .iter() .flat_map(|assoc_item| { // The DefId for this very specific impl associated item. // The DefId of the original associated item on the trait is // `assoc_item.decl_def_id`, here we discard it, but it may // be useful in the future (for e.g. for // https://github.com/cryspen/hax-evit/issues/24). let method_def_id_impl = match &assoc_item.value { hax_frontend_exporter::ImplAssocItemValue::Provided { def_id, .. } => def_id, _ => { // TODO: Here, we skip defaulted associated items. return None; } }; let ident = method_def_id_impl.import_as_nonvalue(); let assoc_item_def = all_items.get(method_def_id_impl).unwrap_or_else( #[allow(unreachable_code)] || match missing_associated_item() {}, ); let span = assoc_item_def.span.import(context); let attributes = assoc_item_def.attributes.import(context); let (generics, kind) = match assoc_item_def.kind() { frontend::FullDefKind::AssocTy { param_env, value, .. } => ( param_env.import(context), match expect_body(value, span, "import_item/TraitImpl/AssocTy") { Ok(body) => ast::ImplItemKind::Type { ty: body.spanned_import(context, span), parent_bounds: assoc_item .required_impl_exprs .spanned_import(context, span), }, Err(error) => ast::ImplItemKind::Error(error), }, ), frontend::FullDefKind::AssocFn { param_env, body, sig, .. } => ( import_generics(context, &sig.bound_vars, param_env), match expect_body(body, span, "import_item/TraitImpl/AssocFn") { Ok(body) => ast::ImplItemKind::Fn { body: body.import(context), params: import_params(context, &body.params, span), }, Err(error) => ast::ImplItemKind::Error(error), }, ), frontend::FullDefKind::AssocConst { param_env, body, .. } => ( param_env.import(context), match expect_body(body, span, "import_item/TraitImpl/AssocConst") { Ok(body) => ast::ImplItemKind::Fn { body: body.import(context), params: Vec::new(), }, Err(error) => ast::ImplItemKind::Error(error), }, ), #[allow(unreachable_code)] _ => match missing_associated_item() {}, }; Some(ast::ImplItem { meta: ast::Metadata { span, attributes }, generics, kind, ident, }) }) .collect() }; if let [ast::GenericValue::Ty(self_ty), ..] = &of_trait.1[..] { parent_bounds.retain(|(impl_expr, _)| { matches!(impl_expr.goal.args.first(), Some(ast::GenericValue::Ty(arg_ty)) if arg_ty == self_ty) }); generics .constraints .retain(|gc| !is_constraint_on_ty(gc, self_ty)); if generics.constraints.len() > 1 { generics.constraints.truncate(1); } ast::ItemKind::Impl { generics, self_ty: self_ty.clone(), of_trait, items, parent_bounds, } } else { ast::ItemKind::Error(assertion_failure( "Self should always be the first generic argument of a trait application.", span, )) } } frontend::FullDefKind::InherentImpl { param_env, items, .. } => { if has_auto { return Vec::new(); } return items .iter() .map(|assoc_item| { let ident = assoc_item.def_id.import_as_nonvalue(); let assoc_item = all_items.get(&assoc_item.def_id).unwrap_or_else( #[allow(unused)] || match missing_associated_item() {}, ); let span = assoc_item.span.import(context); let attributes = assoc_item.attributes.import(context); let impl_generics = param_env.import(context); let kind = match assoc_item.kind() { frontend::FullDefKind::AssocTy { param_env, value, .. } => { let generics = impl_generics.clone().concat(param_env.import(context)); match expect_body(value, span, "import_item/InherentImpl/AssocTy") { Ok(body) => ast::ItemKind::TyAlias { name: ident, generics, ty: body.spanned_import(context, span), }, Err(err) => ast::ItemKind::Error(err), } } frontend::FullDefKind::AssocFn { param_env, sig, body, .. } => { let generics = impl_generics.clone().concat(import_generics( context, &sig.bound_vars, param_env, )); match expect_body(body, span, "import_item/InherentImpl/AssocFn") { Ok(body) => ast::ItemKind::Fn { name: ident, generics, body: body.import(context), params: import_params(context, &body.params, span), safety: sig.value.safety.import(context), }, Err(err) => ast::ItemKind::Error(err), } } frontend::FullDefKind::AssocConst { param_env, body, .. } => { let generics = impl_generics.clone().concat(param_env.import(context)); match expect_body(body, span, "import_item/InherentImpl/AssocConst") { Ok(body) => ast::ItemKind::Fn { name: ident, generics, body: body.import(context), params: Vec::new(), safety: ast::SafetyKind::Safe, }, Err(err) => ast::ItemKind::Error(err), } } _ => { #[allow(unused)] match missing_associated_item() {} } }; ast::Item { ident, kind, meta: ast::Metadata { span, attributes }, } }) .collect(); } frontend::FullDefKind::Fn { param_env, sig, body, .. } => match expect_body(body, span, "import_item/Fn") { Ok(body) => ast::ItemKind::Fn { name: ident, generics: import_generics(context, &sig.bound_vars, param_env), body: body.import(context), params: import_params(context, &body.params, span), safety: sig.value.safety.import(context), }, Err(err) => ast::ItemKind::Error(err), }, frontend::FullDefKind::Closure { .. } => { ast::ItemKind::Error(assertion_failure("Closure item", span)) } frontend::FullDefKind::Const { param_env, body, .. } => match expect_body(body, span, "import_item/Const") { Ok(body) => ast::ItemKind::Fn { name: ident, generics: param_env.import(context), body: body.import(context), params: Vec::new(), safety: ast::SafetyKind::Safe, }, Err(err) => ast::ItemKind::Error(err), }, frontend::FullDefKind::Static { mutability: true, .. } => ast::ItemKind::Error(unsupported("Mutable static item", 1343, span)), frontend::FullDefKind::Static { mutability: false, body, .. } => match expect_body(body, span, "import_item/Static") { Ok(body) => ast::ItemKind::Fn { name: ident, generics: ast::Generics { params: Vec::new(), constraints: Vec::new(), }, body: body.import(context), params: Vec::new(), safety: ast::SafetyKind::Safe, }, Err(err) => ast::ItemKind::Error(err), }, frontend::FullDefKind::Use(Some(( frontend::UsePath { res, segments, rename, .. }, _, ))) => ast::ItemKind::Use { path: segments .iter() .map(|segment| &segment.ident.0) .cloned() .collect(), is_external: res .iter() .any(|x| matches!(x, None | Some(frontend::Res::Err))), rename: rename.clone(), }, frontend::FullDefKind::Mod { .. } => ast::ItemKind::RustModule, frontend::FullDefKind::ExternCrate | frontend::FullDefKind::Use { .. } | frontend::FullDefKind::TyParam | frontend::FullDefKind::ConstParam | frontend::FullDefKind::LifetimeParam | frontend::FullDefKind::Variant | frontend::FullDefKind::Ctor { .. } | frontend::FullDefKind::Field | frontend::FullDefKind::Macro(_) | frontend::FullDefKind::ForeignMod { .. } | frontend::FullDefKind::SyntheticCoroutineBody => return Vec::new(), frontend::FullDefKind::GlobalAsm => { ast::ItemKind::Error(unsupported("Inline assembly item", 1344, span)) } frontend::FullDefKind::AssocConst { .. } | frontend::FullDefKind::AssocFn { .. } | frontend::FullDefKind::AssocTy { .. } => return Vec::new(), // These item kinds are handled by the case of Impl }; items.push(ast::Item { ident, kind, meta: ast::Metadata { span, attributes }, }); items } ================================================ FILE: rust-engine/src/interning.rs ================================================ //! # Interning System //! //! This module provides a minimal system for **global interning** of values in //! Rust. Interning allows you to deduplicate equal values and replace them with //! cheap, copyable handles (`Interned`) that support **O(1) equality**, //! hashing, and compact storage. //! //! ## Core Concepts //! //! - [`Interned`]: A compact, copyable handle to a deduplicated value. //! - [`InterningTable`]: Stores interned values and manages uniqueness. //! - [`Internable`]: A trait for types that can be interned. //! //! ## Safety Note //! //! The `.get()` method on `Interned` returns a `&'static T` using an //! internal `transmute`, assuming the backing storage (interning table) never //! remove items from its table. This is guaranteed by the implementation of //! `InterningTable`. use std::{ collections::{HashMap, HashSet}, fmt::Debug, hash::Hash, marker::PhantomData, ops::Deref, sync::{LazyLock, Mutex}, }; use schemars::JsonSchema; use serde::{Deserialize, Serialize}; /// An interning table storing unique values of `T` and assigning them stable indices. /// /// This type is primarily an implementation detail behind [`Interned`] and /// the [`Internable`] trait. You typically won't use it directly unless you're /// wiring up a new globally‑interned type. pub struct InterningTable { /// The raw items: item at index `n` will be an `Interned { index: n }`. /// Fast lookup. items: Vec, /// A map from `T`s to indexes, for fast interning of existing values. ids: HashMap>, } impl Default for InterningTable { fn default() -> Self { Self { items: Default::default(), ids: Default::default(), } } } /// A statically interned value of type `T`. /// /// An `Interned` is a compact, copyable handle that deduplicates equal values /// and compares in **O(1)** using its index. It behaves like `&'static T` via /// [`Deref`], and can be obtained with [`InternExtTrait::intern`] or /// [`Interned::intern`]. // Note: `Interned` has `PartialEq` only if `T` has `PartialEq`. If we // implement `PartialEq` manually, we loose the ability to pattern match on // constant of this type. This is because of structural equality (see // https://doc.rust-lang.org/stable/std/marker/trait.StructuralPartialEq.html). #[derive(Hash, Eq, PartialEq)] pub struct Interned { phantom: PhantomData, index: u32, } impl PartialOrd for Interned { fn partial_cmp(&self, other: &Self) -> Option { Some(self.cmp(other)) } } impl Ord for Interned { fn cmp(&self, other: &Self) -> std::cmp::Ordering { self.index.cmp(&other.index) } } impl Serialize for Interned { fn serialize(&self, serializer: S) -> Result where S: serde::Serializer, { self.get().serialize(serializer) } } impl AsRef for Interned { fn as_ref(&self) -> &T { (*self).get() } } impl<'a, T: Deserialize<'a> + Internable> Deserialize<'a> for Interned { fn deserialize(deserializer: D) -> Result where D: serde::Deserializer<'a>, { Ok(Interned::intern(&T::deserialize(deserializer)?)) } } impl JsonSchema for Interned { fn schema_name() -> String { T::schema_name() } fn json_schema(generator: &mut schemars::r#gen::SchemaGenerator) -> schemars::schema::Schema { T::json_schema(generator) } } impl Debug for Interned { fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { f.debug_struct("Interned") .field("index", &self.index) .field("value", self.get()) .finish() } } impl Clone for Interned { fn clone(&self) -> Self { *self } } impl Copy for Interned {} /// A tiny, `FnOnce`-compatible wrapper used to initialize a `LazyLock` with a /// captured value. /// /// This is a utility to build `LazyLock` where the initializer needs to own /// some value prepared in a `const` context. /// /// This is required since we need an explicit concrete type for the /// initializataion function given to `LazyLock::new`. /// /// You usually don't need this directly unless you're calling /// [`InterningTable::new_with_values`]. pub struct ExplicitClosure(T, fn(T) -> R); impl FnOnce<()> for ExplicitClosure { type Output = R; extern "rust-call" fn call_once(self, _: ()) -> Self::Output { let Self(input, function) = self; function(input) } } impl InterningTable { fn try_intern(&mut self, value: &T) -> Option> { Some(if let Some(interned) = self.ids.get(value) { *interned } else { let index = self.items.len(); self.items.push(value.clone()); let handle = Interned { phantom: PhantomData, index: index.try_into().ok()?, }; self.ids.insert(value.clone(), handle); handle }) } fn get(&self, interned: Interned) -> &T { &self.items[interned.index as usize] } /// Creates a global `LazyLock` interning table prepopulated with `values`, /// and returns both the lock and the corresponding `Interned` handles. /// /// # Panics /// /// Panics if `values` contains duplicates (by `Eq`) or if `N` is greater /// than `u32::MAX`. pub const fn new_with_values( values: fn() -> [T; N], ) -> (LazyLockNewWithValue, [Interned; N]) { assert!(N < u32::MAX as usize); let mut i = 0; let mut interned_values: [Interned; N] = [Interned { phantom: PhantomData, index: 0, }; N]; while i < N { interned_values[i].index = i as u32; i += 1; } let lazy_lock = LazyLock::new(ExplicitClosure(values, |values| { let values = values(); { // Ensure `value` has no duplicate. let set: HashSet<_> = values.iter().collect(); if set.len() != values.len() { panic!("new_with_values: the input has duplicates"); } } let mut table = InterningTable::default(); for value in values { if table.try_intern(&value).is_none() { unreachable!( "we asserted `N < u32::MAX`, the length of the internal vector `table` should be less than `u32::MAX`" ) } } Mutex::new(table) })); (lazy_lock, interned_values) } } /// A type alias representing a lazily initialized `Mutex>` /// backed by a fixed-size array initializer. /// /// This is the return type of [`InterningTable::new_with_values`]. pub type LazyLockNewWithValue = LazyLock>, ExplicitClosure [T; N], Mutex>>>; /// Types that have a single, process‑global interning table. /// /// Implement this for your type to opt in to interning: /// provide a `static` (usually a `LazyLock>>`) /// and return a reference to it. pub trait Internable: Sized + Hash + Eq + Clone + Send + 'static { /// Returns the global interning table for `Self`. fn interning_table() -> &'static Mutex>; /// Interns a `value` and returns its compact handle. /// /// If an equal value has been interned before, this returns the existing /// handle; otherwise it inserts the value into the global table. fn intern(&self) -> Interned { Interned::intern(self) } } impl Interned { /// Interns a `value` and returns its compact handle. /// /// If an equal value has been interned before, this returns the existing /// handle; otherwise it inserts the value into the global table. pub fn intern(value: &T) -> Self { { // Invariant: the interning mutex is only locked here, and InterningTable::try_intern // is panic-free (and does not invoke user code that may panic). Therefore, no // panic can occur while the mutex is held, so the mutex cannot be poisoned. // If this ever panics, our invariant was broken elsewhere. let mut table = T::interning_table() .lock() .expect("interning table mutex poisoned"); table.try_intern(value) } .unwrap_or_else(|| { panic!( "more than `u32::MAX` values have been interned for type `{}`", std::any::type_name::() ) }) } /// Returns a `&'static T` for this handle. /// /// # Safety & Lifetimes /// /// This method relies on the fact that the backing storage lives for the /// entire program (it is kept in a `static` global table). The `'static` /// reference is sound as long as values are never removed from that table. /// This implementation uses `transmute` internally for that reason. pub fn get(self) -> &'static T { let table = T::interning_table().lock().unwrap(); let local_reference = table.get(self); let static_reference: &'static T = unsafe { std::mem::transmute(local_reference) }; static_reference } } impl Deref for Interned { type Target = T; /// Dereferences to the underlying value (`&'static T`). /// /// Equivalent to calling [`Interned::get`]. fn deref(&self) -> &Self::Target { self.get() } } ================================================ FILE: rust-engine/src/lib.rs ================================================ //! The Rust engine of hax. #![feature(rustc_private)] #![feature(fn_traits, unboxed_closures)] #![warn( rustdoc::broken_intra_doc_links, missing_docs, unused_qualifications, unused_crate_dependencies )] pub mod ast; pub mod attributes; pub mod backends; pub mod debugger; pub mod hax_io; pub mod import_thir; pub mod interning; pub mod names; pub mod ocaml_engine; pub mod phase; pub mod printer; pub mod resugarings; pub mod symbol; ================================================ FILE: rust-engine/src/main.rs ================================================ use hax_rust_engine::{ backends, ocaml_engine::{self, Response}, }; use hax_types::{cli_options::Backend, engine_api::File}; use std::collections::HashMap; fn main() { let (value, table) = hax_rust_engine::hax_io::read_engine_input_message().destruct(); ocaml_engine::initialize(ocaml_engine::Meta { hax_version: value.hax_version, impl_infos: value.impl_infos, debug_bind_phase: value.backend.debug_engine.is_some(), profiling: value.backend.profile, }); let items = match value.input { hax_types::driver_api::Items::Legacy(input) => { let query = hax_rust_engine::ocaml_engine::QueryKind::ImportThir { input, translation_options: value.backend.translation_options, }; let Some(Response::ImportThir { output }) = query.execute(Some(table)) else { panic!() }; output } hax_types::driver_api::Items::FullDef(items) => { let items: Vec<_> = items .into_iter() .filter(|item| { !matches!( item.kind, hax_frontend_exporter::FullDefKind::Use(_) | hax_frontend_exporter::FullDefKind::ExternCrate ) }) .collect(); let items_by_def_id = HashMap::from_iter( items .iter() .map(|item| (item.this.contents().def_id.clone(), item)), ); items .iter() .flat_map(|item| hax_rust_engine::import_thir::import_item(item, &items_by_def_id)) .collect() } }; let files = match &value.backend.backend { Backend::Coq | Backend::Ssprove | Backend::Easycrypt | Backend::ProVerif { .. } => panic!( "The Rust engine cannot be called with backend {}.", value.backend.backend ), Backend::Fstar(_) => { let mut items = items; hax_rust_engine::phase::Phase::apply(&backends::fstar::FStarBackend, &mut items); let query = hax_rust_engine::ocaml_engine::QueryKind::Print { printer: value.backend.backend, input: items, }; let Some(Response::PrintOk) = query.execute(None) else { panic!() }; return; } Backend::Lean => backends::apply_backend(backends::lean::LeanBackend, items), Backend::Rust => backends::apply_backend(backends::rust::RustBackend, items), Backend::Debugger { interactive } => { use hax_rust_engine::debugger::*; if *interactive { http_interactive_debugger(items); vec![] } else { let mut state = State { initial_items: items, requests: vec![], }; let contents = match state.apply(Request::DumpAst(DumpAstOptions::default())) { Response::TypedDumpedAst(items) => { serde_json::to_string_pretty(&items).unwrap() } Response::DumpedAst(value) => serde_json::to_string_pretty(&value).unwrap(), _ => todo!(), }; vec![File { path: "ast.json".into(), contents, sourcemap: None, }] } } Backend::GenerateRustEngineNames => vec![File { path: "generated.rs".into(), contents: hax_rust_engine::names::codegen::export_def_ids_to_mod(items), sourcemap: None, }], }; for file in files { hax_rust_engine::hax_io::write(&hax_types::engine_api::protocol::FromEngine::File(file)); } } ================================================ FILE: rust-engine/src/names.rs ================================================ //! This module provides a list of handy `DefId` for the engine. //! The list of `DefId`s comes from the crate `/engine/names`: any name mentionned //! in that crate will be provided here automatically. //! //! For example, to be able to resugar `std::ops::Add::add(x, y)` into `x + y`, //! we need to: //! 1. match on the expression `std::ops::Add::add(x, y)`, figure out it is the //! application of the function denoted by the global identifier //! `std::ops::Add::add` with arguments `x` and `y`. //! 2. check that global identifier `id: GlobalId` `std::ops::Add::add` is //! indeed `std::ops::Add::add`. //! //! Point (2.) seems a bit tautological, but we need to write a comparison like //! `some_id == the_function_add`. This module basically provides such //! `the_function_add` symbols. //! //! As an example, the names `std::option::Option::Some` and `None` will be provided by this module as: //! ```rust,ignore //! mod std { //! mod option { //! mod Option { //! fn Some() -> DefId { ... } //! fn None() -> DefId { ... } //! } //! } //! } //! ``` pub use crate::ast::identifiers::global_id::generated_names::{codegen, root::*}; ================================================ FILE: rust-engine/src/ocaml_engine.rs ================================================ //! This module implements an interface to the OCaml hax engine. Via this //! interface, the rust engine can communicate with the OCaml engine, and reuse //! some of its components. use std::{io::BufRead, sync::OnceLock}; use hax_frontend_exporter::{ ThirBody, id_table::{Table, WithTable}, }; use hax_types::engine_api::protocol::{FromEngine, ToEngine}; use serde::Deserialize; /// A query for the OCaml engine #[derive(Debug, Clone, ::schemars::JsonSchema, ::serde::Deserialize, ::serde::Serialize)] pub struct Query { #[serde(flatten)] meta: Meta, /// The kind of query we want to send to the engine kind: QueryKind, } /// The metadata required to perform a query. #[derive(Debug, Clone, ::schemars::JsonSchema, ::serde::Deserialize, ::serde::Serialize)] pub struct Meta { /// The version of hax currently used pub hax_version: String, /// Dictionary from `DefId`s to `impl_infos` pub impl_infos: Vec<( hax_frontend_exporter::DefId, hax_frontend_exporter::ImplInfos, )>, /// Enable debugging of phases in the OCaml engine pub debug_bind_phase: bool, /// Enable profiling in the OCaml engine pub profiling: bool, } static STATE: OnceLock = OnceLock::new(); /// Initialize query metadata. pub fn initialize(meta: Meta) { STATE .set(meta) .expect("`ocaml_engine::initialize` was called more than once") } /// The payload of the query. [`Response`] below mirrors this enum to represent /// the response from the engine. #[derive(Debug, Clone, ::schemars::JsonSchema, ::serde::Deserialize, ::serde::Serialize)] pub enum QueryKind { /// Ask the OCaml engine to import the given THIR from the frontend ImportThir { /// The input THIR items input: Vec>, /// Translation options which contains include clauses (items filtering) translation_options: hax_types::cli_options::TranslationOptions, }, /// Ask the OCaml engine to run given phases on given items ApplyPhases { /// The phases to run. See `untyped_phases.ml`. phases: Vec, /// The items on which the phases will be applied. input: Vec, }, /// Ask the OCaml engine to call an OCaml printer Print { /// Which printer to use printer: hax_types::cli_options::Backend<()>, /// The items after applying the phases. input: Vec, }, } /// A Response after a [`Query`] #[derive(Debug, Clone, ::schemars::JsonSchema, ::serde::Deserialize, ::serde::Serialize)] pub enum Response { /// Return imported THIR as an internal AST from Rust engine ImportThir { /// The output Rust AST items output: Vec, }, /// Return items after phase application ApplyPhases { /// The output Rust AST items after phases output: Vec, }, /// Printing was done successfully PrintOk, } /// Extends the common `FromEngine` messages with one extra case: `Response`. #[derive(Debug, Clone, ::schemars::JsonSchema, ::serde::Deserialize, ::serde::Serialize)] #[serde(untagged)] pub enum ExtendedFromEngine { /// A standard `FromEngine` message FromEngine(FromEngine), /// A `Response` Response(Response), } impl QueryKind { /// Execute the query synchronously. pub fn execute(self, table: Option) -> Option { let query = Query { meta: STATE .get() .expect("`ocaml_engine::initialize` should be called first") .clone(), kind: self, }; use std::io::Write; use std::process::Command; macro_rules! send { ($where: expr, $value:expr) => { serde_json::to_writer(&mut $where, $value).unwrap(); $where.write_all(b"\n").unwrap(); $where.flush().unwrap(); }; } let mut engine_subprocess = Command::new(std::env::var("HAX_ENGINE_BINARY").unwrap_or("hax-engine".into())) .arg("driver_rust_engine") .stdin(std::process::Stdio::piped()) .stdout(std::process::Stdio::piped()) .spawn() .unwrap(); let mut stdin = std::io::BufWriter::new( engine_subprocess .stdin .as_mut() .expect("Could not write on stdin"), ); if let Some(table) = table { WithTable::run(table, query, |with_table| { send!(stdin, with_table); }); } else { send!(stdin, &(vec![] as Vec<()>, query)); } let mut response = None; let stdout = std::io::BufReader::new(engine_subprocess.stdout.take().unwrap()); // TODO: this should be streaming (i.e. use a `LineAsEOF` reader wrapper that consumes a reader until `\n` occurs) // See https://github.com/cryspen/hax/issues/1537. for slice in stdout.split(b'\n') { let msg = (|| { let slice = slice.ok()?; let mut de = serde_json::Deserializer::from_slice(&slice); de.disable_recursion_limit(); let de = serde_stacker::Deserializer::new(&mut de); let msg = ExtendedFromEngine::deserialize(de); msg.ok() })() .expect( "Hax engine sent an invalid json value. \ This might be caused by debug messages on stdout, \ which is reserved for JSON communication with cargo-hax", ); match msg { ExtendedFromEngine::Response(res) => response = Some(res), ExtendedFromEngine::FromEngine(FromEngine::Exit) => break, // Proxy messages from the OCaml engine ExtendedFromEngine::FromEngine(from_engine) => { crate::hax_io::write(&from_engine); if from_engine.requires_response() { let response: ToEngine = crate::hax_io::read_to_engine_message(); send!(stdin, &response); } } } } drop(stdin); let exit_status = engine_subprocess.wait().unwrap(); if !exit_status.success() { panic!("ocaml engine crashed"); } response } } ================================================ FILE: rust-engine/src/phase/explicit_monadic.rs ================================================ use std::fmt::Debug; use crate::ast::identifiers::GlobalId; use crate::ast::*; use crate::ast::{diagnostics::*, visitors::*}; use crate::phase::Phase; use crate::names::rust_primitives::hax::explicit_monadic::*; /// Monadic Phase /// /// This module defines a phase that makes the monadic encoding explicit by introducing calls to hax /// primitives (`pure` and `lift`) when necessary. /// /// # Details /// /// In backends with a monadic encoding (Lean for instance), rust computations that can *crash* are /// wrapped in an error Monad (say `RustM`): a function `fn f(x:u32) -> u32` will be extracted to /// something like `def f (x:u32) : RustM u32`. There are two challenges in this encoding : /// /// 1. Some expressions cannot panic (literals, consts, constructors for enums, etc) and should be /// wrapped in the monad[^coe]. This phase inserts explicit calls to `pure` to that aim. /// /// 2. Language constructs (if-then-else, `match`, etc.) and rust functions still expect rust values /// as input, not monadic ones. This phase inserts explicit calls to `lift` to materialize the /// sub-expressions that return a monadic result where a value is expected. The Lean backend turns /// them into explicit lifts `(← ..)`, which implicitly introduces a monadic bind /// /// This phase expects all function and closure bodies to be monadic computations by default. /// /// [^coe]: While implicit coercions can sometime be enough, they can also badly interact with /// inference, typically when dealing with branches (like if-then-else) where some branches are /// pure and some are not. #[derive(Default, Debug)] pub struct ExplicitMonadic; /// Stateless visitor #[setup_error_handling_struct] #[derive(Default)] struct ExplicitMonadicVisitor; /// Status of a rust expression. Computations are possibly panicking, while values are pure #[derive(Debug, Clone, Copy, Eq, PartialEq, Hash, Ord, PartialOrd)] enum MonadicStatus { Computation, Value, } impl Phase for ExplicitMonadic { fn apply(&self, items: &mut Vec) { ExplicitMonadicVisitor::default().visit(items) } } impl ExplicitMonadicVisitor { /// Helper while waiting for a proper ast API. Wraps an expression in an application node, where /// the head is a global id fn wrap_app(expr: &Expr, head_id: GlobalId) -> Box { let expr = expr.clone(); Box::new(ExprKind::App { head: Expr { kind: Box::new(ExprKind::GlobalId(head_id)), ty: Ty(Box::new(TyKind::Arrow { inputs: vec![expr.ty.clone()], output: expr.ty.clone(), })), meta: Metadata { span: expr.meta.span, attributes: vec![], }, }, args: vec![expr], generic_args: vec![], bounds_impls: vec![], trait_: None, }) } /// Helper to coerce a expression into a given status. `from` should be the status of `expr` fn coerce(&mut self, expr: &mut Expr, from: MonadicStatus, to: MonadicStatus) { // If the status is already correct, nothing to do. if from == to { return; } expr.kind = ExplicitMonadicVisitor::wrap_app( expr, match to { // from = Value, to = Computation : we insert `pure` MonadicStatus::Computation => pure, // from = Computation, to = Value : we insert `lift` MonadicStatus::Value => lift, }, ); } } impl VisitorWithContext for ExplicitMonadicVisitor { fn context(&self) -> Context { Context::Phase(stringify!(ExplicitMonadic).into()) } } impl ExplicitMonadicVisitor { fn visit_expr_coerce(&mut self, constraint: MonadicStatus, expr: &mut Expr) { // Expression can force a status (returned as `Some(...)`), or be "transparent" (typically // for control-flow) and just propagate the constraint. let opt_status = match &mut *expr.kind { // Control flow nodes ExprKind::If { condition, then, else_, } => { self.visit_expr_coerce(MonadicStatus::Value, condition); [Some(then), else_.as_mut()] .into_iter() .flatten() .for_each(|branch| self.visit_expr_coerce(MonadicStatus::Computation, branch)); Some(MonadicStatus::Computation) } ExprKind::Match { scrutinee, arms } => { self.visit_expr_coerce(MonadicStatus::Value, scrutinee); arms.iter_mut().for_each(|arm| { if let Some(Guard { kind: GuardKind::IfLet { rhs, .. }, .. }) = &mut arm.guard { self.visit_expr_coerce(MonadicStatus::Value, rhs); }; self.visit_expr_coerce(MonadicStatus::Computation, &mut arm.body) }); Some(MonadicStatus::Computation) } ExprKind::Block { body, .. } => { self.visit_expr_coerce(constraint, body); None } ExprKind::Break { .. } | ExprKind::Return { .. } | ExprKind::Continue { .. } | ExprKind::Loop { .. } => { unreachable_by_invariant!(Functionalize_loops) } // Opaque nodes ExprKind::Let { lhs: _, rhs, body } => { self.visit_expr_coerce(MonadicStatus::Computation, rhs); self.visit_expr_coerce(MonadicStatus::Computation, body); Some(MonadicStatus::Computation) } ExprKind::App { head, args, .. } => { self.visit_expr_coerce(MonadicStatus::Value, head); args.iter_mut() .for_each(|arg| self.visit_expr_coerce(MonadicStatus::Value, arg)); if let ExprKind::GlobalId(head) = &*head.kind && head.is_projector() { // Constructors for structures and enums are values Some(MonadicStatus::Value) } else if args.is_empty() { // Constants are values Some(MonadicStatus::Value) } else { // Other function calls are computations Some(MonadicStatus::Computation) } } ExprKind::Array(exprs) => { exprs .iter_mut() .for_each(|expr| self.visit_expr_coerce(MonadicStatus::Value, expr)); Some(MonadicStatus::Value) } ExprKind::Construct { fields, base, .. } => { fields .iter_mut() .map(|(_, e)| e) .chain(base.iter_mut()) .for_each(|expr| self.visit_expr_coerce(MonadicStatus::Value, expr)); Some(MonadicStatus::Value) } ExprKind::Assign { value: inner, .. } | ExprKind::Borrow { inner, .. } | ExprKind::AddressOf { inner, .. } => { self.visit_expr_coerce(MonadicStatus::Value, inner); Some(MonadicStatus::Value) } ExprKind::Ascription { e, ty } => { self.visit_expr_coerce(MonadicStatus::Value, e); self.visit(ty); Some(MonadicStatus::Value) } ExprKind::Closure { params: _, body, captures, } => { captures .iter_mut() .for_each(|capture| self.visit_expr_coerce(MonadicStatus::Value, capture)); self.visit_expr_coerce(MonadicStatus::Computation, body); Some(MonadicStatus::Value) } ExprKind::Literal(_) | ExprKind::GlobalId(_) | ExprKind::LocalId(_) | ExprKind::Quote { .. } | ExprKind::Error(_) => Some(MonadicStatus::Value), ExprKind::Resugared(_) => { unreachable!("Resugarings should happen after phases") } }; if let Some(status) = opt_status { self.coerce(expr, status, constraint) } } } impl AstVisitorMut for ExplicitMonadicVisitor { setup_error_handling_impl!(); fn visit_expr(&mut self, x: &mut Expr) { // Entry points are functions (items and impl items), which start with a `do` block, // therefore a monadic computation self.visit_expr_coerce(MonadicStatus::Computation, x) } fn visit_ty(&mut self, x: &mut Ty) { if let TyKind::Array { length, .. } = x.kind_mut() { self.visit_expr_coerce(MonadicStatus::Value, length); }; } fn visit_generic_value(&mut self, x: &mut GenericValue) { if let GenericValue::Expr(expr) = x { self.visit_expr_coerce(MonadicStatus::Value, expr); }; } } ================================================ FILE: rust-engine/src/phase/filter_unprintable_items.rs ================================================ use crate::ast::*; use crate::phase::Phase; /// Phase to filter unprintable items /// /// This phase filters out items that are not printable (Error, NotImplementedYet, Use). #[derive(Default, Debug)] pub struct FilterUnprintableItems; impl Phase for FilterUnprintableItems { fn apply(&self, items: &mut Vec) { items.retain(|item| match &item.kind { // Items to remove: ItemKind::Error(_) | ItemKind::NotImplementedYet | ItemKind::Use { .. } | ItemKind::RustModule => false, // Items to keep: ItemKind::Fn { .. } | ItemKind::TyAlias { .. } | ItemKind::Type { .. } | ItemKind::Trait { .. } | ItemKind::Impl { .. } | ItemKind::Alias { .. } | ItemKind::Resugared(_) | ItemKind::Quote { .. } => true, }); } } ================================================ FILE: rust-engine/src/phase/legacy.rs ================================================ //! This module exposes the legacy phases written in OCaml in the OCaml engine. use crate::{ ast::Item, phase::{Phase, PhaseKind}, }; /// Group consecutive ocaml phases as one monolithic phase, so that we avoid extra roundtrips to the OCaml engine. pub fn group_consecutive_ocaml_phases(phases: Vec) -> Vec> { let mut output: Vec> = vec![]; let mut ocaml_phases = vec![]; let mut phases = phases.into_iter(); struct LegacyOCamlPhases { phases: Vec, } impl Phase for LegacyOCamlPhases { fn apply(&self, items: &mut Vec) { apply_legacy_phases(&self.phases, items); } } loop { let phase = phases.next(); if let Some(PhaseKind::Legacy(ocaml_phase)) = phase { ocaml_phases.push(ocaml_phase) } else { if !ocaml_phases.is_empty() { output.push(Box::new(LegacyOCamlPhases { phases: std::mem::take(&mut ocaml_phases), })); } if let Some(phase) = phase { output.push(Box::new(phase)); } else { break; } } } output } fn apply_legacy_phases(phases: &[LegacyOCamlPhase], items: &mut Vec) { use crate::ocaml_engine::Response; let query = crate::ocaml_engine::QueryKind::ApplyPhases { input: std::mem::take(items), phases: phases.iter().map(ToString::to_string).collect(), }; let Some(Response::ApplyPhases { output }) = query.execute(None) else { panic!() }; *items = output; } macro_rules! make_ocaml_legacy_phase { ($($name:ident),*) => { pastey::paste!{ /// The list of exposed OCaml phases. #[derive(Debug, Clone, Copy, serde::Serialize, serde::Deserialize)] pub enum LegacyOCamlPhase { $( #[doc = concat!("The phase ", stringify!($name), " from the OCaml engine.")] [< $name:camel >] ),* } impl std::fmt::Display for LegacyOCamlPhase { fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { match self { $(Self::[< $name:camel >] => stringify!($name).fmt(f)),* } } } impl Phase for LegacyOCamlPhase { fn apply(&self, items: &mut Vec) { apply_legacy_phases(&[*self], items); } } } }; } impl From for PhaseKind { fn from(legacy_phase: LegacyOCamlPhase) -> Self { Self::Legacy(legacy_phase) } } make_ocaml_legacy_phase!( and_mut_defsite, bundle_cycles, cf_into_monads, direct_and_mut, drop_blocks, drop_match_guards, drop_references, drop_return_break_continue, drop_sized_trait, explicit_conversions, functionalize_loops, hoist_disjunctive_patterns, local_mutation, newtype_as_refinement, reconstruct_asserts, reconstruct_for_index_loops, reconstruct_for_loops, reconstruct_question_marks, reconstruct_while_loops, reorder_fields, rewrite_control_flow, rewrite_local_self, simplify_hoisting, simplify_match_return, simplify_question_marks, sort_items, specialize, traits_specs, transform_hax_lib_inline, trivialize_assign_lhs, reject_arbitrary_lhs, reject_continue, reject_question_mark, reject_raw_or_mut_pointer, reject_early_exit, reject_as_pattern, reject_dyn, reject_trait_item_default, reject_unsafe, reject_impl_type_method, hoist_side_effects ); ================================================ FILE: rust-engine/src/phase/reject_not_do_lean_dsl.rs ================================================ use crate::ast::*; use crate::ast::{diagnostics::*, visitors::*}; use crate::phase::Phase; /// Rejection Phase for patterns unsupported by Lean's do-notation DSL /// /// This phase rejects unsupported interleavings of expressions and statements. /// It is built as a visitor. #[derive(Default)] pub struct RejectNotDoLeanDSL; /// Expressions are either do-statements or do-expressions. The former can be downgraded into the /// latter. #[derive(Clone, Copy, Debug)] enum DoDSLExprKind { Statement, Expression, } /// Gives the "kind" of an expression in the do-notation DSL fn dsl_expr_kind(expr_kind: &ExprKind) -> DoDSLExprKind { match expr_kind { ExprKind::If { .. } | ExprKind::Match { .. } | ExprKind::Let { .. } => { DoDSLExprKind::Statement } _ => DoDSLExprKind::Expression, } } /// The default value for entry points of expression (function items, function impl items) impl Default for DoDSLExprKind { fn default() -> Self { Self::Statement } } /// Visitor internal state #[setup_error_handling_struct] #[derive(Default)] pub struct RejectNotDoLeanDSLVisitor { /// Expected kind for the visited expression. Used by `visit_expr`, ignored by other methods dsl_expr_kind: DoDSLExprKind, } impl VisitorWithContext for RejectNotDoLeanDSLVisitor { fn context(&self) -> Context { Context::Phase(stringify!(RejectNotDoLeanDSL).to_string()) } } impl AstVisitorMut for RejectNotDoLeanDSLVisitor { setup_error_handling_impl!(); fn visit_expr(&mut self, expr: &mut Expr) { use DoDSLExprKind::*; let parent_dsl_expr_kind = self.dsl_expr_kind; self.dsl_expr_kind = match (self.dsl_expr_kind, dsl_expr_kind(&expr.kind)) { // A do-expression cannot be upgraded to a do-statement, we throw an error (Expression, Statement) => { self.error( expr.clone(), DiagnosticInfoKind::ExplicitRejection { reason: "This interleaving of expression and statements does not fit in Lean's do-notation DSL.\ \nYou may try hoisting out let-bindings and control-flow.".to_string(), issue_id: Some(1741), }, ); Statement } // Closures body are do-statement, as a `do` keyword is introduced (_, _) if matches!(&*expr.kind, ExprKind::Closure { .. }) => Statement, // In other cases, we keep the computed kind (_, kind) => kind, }; self.visit_inner(expr); self.dsl_expr_kind = parent_dsl_expr_kind; } /// Visitor for types. Array lengths can be any (const) expression, so they are checked for dsl /// patterns (as DoDSL-expressions) fn visit_ty(&mut self, ty: &mut Ty) { if let TyKind::Array { length, .. } = ty.kind_mut() { // The Lean Backend does not support computation in array lengths yet. It should be // possible to have do-blocks, and treat them like constants. See // https://github.com/cryspen/hax/issues/1713 let parent_dsl_expr_kind = self.dsl_expr_kind; self.dsl_expr_kind = DoDSLExprKind::Expression; self.visit_inner(&mut *length); self.dsl_expr_kind = parent_dsl_expr_kind; } } } impl Phase for RejectNotDoLeanDSL { fn apply(&self, items: &mut Vec) { // Entry points are statements RejectNotDoLeanDSLVisitor::default().visit(items) } } ================================================ FILE: rust-engine/src/phase.rs ================================================ //! A phase rewrites the AST. use crate::ast::Item; // Special kind of unreachability that should be prevented by a phase macro_rules! unreachable_by_invariant { ($phase:ident) => { unreachable!( "The phase {} should make this unreachable", stringify!($phase) ) }; } pub(crate) use unreachable_by_invariant; /// A Rust phase that operates on the AST. pub trait Phase { /// Apply the phase on items. /// A phase may transform an item into zero, one or more items. fn apply(&self, items: &mut Vec); } pub mod legacy; mod explicit_monadic; mod filter_unprintable_items; mod reject_not_do_lean_dsl; macro_rules! declare_phase_kind { {$($name:ident = $phase:expr),*$(,)?} => { /// Enumeration of the available phases. #[derive(Clone, Debug, Copy, serde::Serialize, serde::Deserialize)] pub enum PhaseKind { $( #[doc = concat!("The phase [`", stringify!($phase), "].")] $name, )* /// A legacy (OCaml) phase. Legacy(crate::phase::legacy::LegacyOCamlPhase), } impl crate::phase::Phase for PhaseKind { fn apply(&self, items: &mut Vec) { match *self { $(Self::$name => $phase.apply(items),)* Self::Legacy(phase) => phase.apply(items), } } } }; } declare_phase_kind! { ExplicitMonadic = explicit_monadic::ExplicitMonadic, RejectNotDoLeanDSL = reject_not_do_lean_dsl::RejectNotDoLeanDSL, FilterUnprintableItems = filter_unprintable_items::FilterUnprintableItems, } ================================================ FILE: rust-engine/src/printer/pretty_ast/debug_json.rs ================================================ use std::fmt::{Debug, Display}; use crate::printer::pretty_ast::ToDocument; /// This type is primarily useful inside printer implementations when you want a /// low-friction way to inspect an AST fragment. /// /// # What it does /// - Appends a JSON representation of the wrapped value to /// `"/tmp/hax-ast-debug.json"` (one JSON document per line). /// - Implements [`std::fmt::Display`] to print a `just` invocation you can paste in a shell /// to re-open that same JSON by line number: /// `just debug-json ` /// /// # Example /// ```rust /// # use hax_rust_engine::printer::pretty_ast::DebugJSON; /// # #[derive(serde::Serialize)] /// # struct Small { x: u32 } /// let s = Small { x: 42 }; /// // Prints something like: `just debug-json 17`. /// println!("{}", DebugJSON(&s)); /// // Running `just debug-json 17` will print `{"x":42}` /// ``` /// /// # Notes /// - This is a **debugging convenience** and intentionally has a side-effect (file write). /// Avoid keeping it in user-facing output paths. /// - The file grows over time; occasionally delete it if you no longer need historical entries. pub struct DebugJSON(pub T); impl Display for DebugJSON { #[cfg(not(unix))] fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { write!(f, "") } #[cfg(unix)] fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { const PATH: &str = "/tmp/hax-ast-debug.json"; /// Write a new JSON as a line at the end of `PATH` fn append_line_json(value: &serde_json::Value) -> std::io::Result { use std::io::{BufRead, BufReader, Write}; cleanup(); let file = std::fs::OpenOptions::new() .read(true) .append(true) .create(true) .open(PATH)?; let count = BufReader::new(&file).lines().count(); writeln!(&file, "{value}")?; Ok(count) } /// Drop the file at `PATH` when we first write fn cleanup() { static DID_RUN: AtomicBool = AtomicBool::new(false); use std::sync::atomic::{AtomicBool, Ordering}; if DID_RUN .compare_exchange(false, true, Ordering::AcqRel, Ordering::Acquire) .is_ok() { let _ignored = std::fs::remove_file(PATH); } } if let Ok(id) = append_line_json(&serde_json::to_value(&self.0).unwrap()) { write!(f, "`just debug-json {id}`") } else { write!(f, "") } } } impl ToDocument for DebugJSON { fn to_document(&self, _: &P) -> super::DocBuilder { pretty::DocAllocator::as_string( &pretty::BoxAllocator, serde_json::to_string_pretty(&self.0).unwrap_or_else(|_| format!("{:#?}", &self.0)), ) } } ================================================ FILE: rust-engine/src/printer/pretty_ast/to_document.rs ================================================ use pretty::{BoxAllocator, DocAllocator}; use std::ops::Deref as _; /// A convenience alias tying the document builder to the global /// [`pretty::BoxAllocator`]. pub type DocBuilder = pretty::DocBuilder<'static, BoxAllocator, A>; /// Convert a value into a document by-value. /// /// Implementations typically delegate to [`ToDocument`] after adjusting the /// input ownership (e.g., cloning or borrowing). It allows helpers to accept /// either borrowed or owned values transparently. pub trait ToDocumentOwned { /// Produce a document using the provided printer. fn to_document_owned(self, printer: &P) -> DocBuilder; } impl> ToDocumentOwned for &T { fn to_document_owned(self, printer: &P) -> DocBuilder { self.to_document(printer) } } impl ToDocumentOwned for DocBuilder { fn to_document_owned(self, _printer: &P) -> DocBuilder { self } } impl ToDocumentOwned for &str { fn to_document_owned(self, _printer: &P) -> DocBuilder { DocAllocator::as_string(&BoxAllocator, self) } } impl ToDocumentOwned for String { fn to_document_owned(self, _printer: &P) -> DocBuilder { DocAllocator::as_string(&BoxAllocator, self) } } impl ToDocumentOwned for Option<&str> { fn to_document_owned(self, printer: &P) -> DocBuilder { self.map(|s| s.to_document_owned(printer)) .unwrap_or_else(|| DocAllocator::nil(&BoxAllocator)) } } /// Convert a value into a document using the supplied printer. /// /// This is the primary trait invoked throughout the pretty-printing pipeline; /// it mirrors [`pretty::Pretty::pretty`] while giving access to printer-specific /// context. pub trait ToDocument { /// Produce a document using the provided printer reference. fn to_document(&self, printer: &P) -> DocBuilder; } impl> ToDocument for Box { fn to_document(&self, printer: &P) -> DocBuilder { self.deref().to_document(printer) } } impl> ToDocument for Option { fn to_document(&self, printer: &P) -> DocBuilder { self.as_ref() .map(|value| value.to_document(printer)) .unwrap_or_else(|| DocAllocator::nil(&BoxAllocator)) } } impl ToDocument for String { fn to_document(&self, _printer: &P) -> DocBuilder { DocAllocator::as_string(&BoxAllocator, self) } } impl ToDocument for DocBuilder { #[inline(always)] fn to_document(&self, _printer: &P) -> DocBuilder { self.clone() } } impl ToDocument for &T where T: ToDocument, { #[inline(always)] fn to_document(&self, printer: &P) -> DocBuilder { (*self).to_document(printer) } } ================================================ FILE: rust-engine/src/printer/pretty_ast.rs ================================================ //! Pretty-printing support for the hax AST. //! //! This module defines the trait [`PrettyAst`], which is the **primary trait a printer should //! implement**. //! //! # Quickstart //! In most printers you: //! 1. Implement [`Printer`] for your printer type, //! 2. Implement [`PrettyAst`] for that printer type, //! 3. Call `ast_value.to_document(&print)` on AST values. //! //! See [`crate::backends`] for backend and printer examples. use std::{borrow::Cow, fmt::Display}; use super::*; use crate::ast::*; use pretty::BoxAllocator; use crate::symbol::Symbol; use literals::*; use resugared::*; mod debug_json; mod to_document; pub use debug_json::*; pub use to_document::*; #[macro_export] /// Similar to [`std::todo`], but returns a document instead of panicking with a message. /// In addition, `todo_document!` accepts a prefix to point to a specific issue number. /// /// ## Examples: /// - `todo_document!(allocator)` /// - `todo_document!(allocator, "This is a todo")` /// - `todo_document!(allocator, issue 42)` /// - `todo_document!(allocator, issue 42, "This is a todo")` macro_rules! todo_document { ($allocator:ident, issue $issue:literal) => { {return $allocator.todo_document(&format!("TODO_LINE_{}", std::line!()), Some($issue));} }; ($allocator:ident, issue $issue:literal, $($tt:tt)*) => { { let message = format!($($tt)*); return $allocator.todo_document(&message, Some($issue)); } }; ($allocator:ident,) => { {return $allocator.todo_document(&format!("TODO_LINE_{}", std::line!()), None);} }; ($allocator:ident, $($tt:tt)*) => { { let message = format!($($tt)*); return $allocator.todo_document(&message, None); } }; } pub use todo_document; /// Expand a list of values into documents and concatenate them in order. /// /// This helper mirrors [`pretty::docs!`] but automatically calls /// [`ToDocumentOwned::to_document_owned`] on each argument before appending it /// to the accumulator that starts as [`PrettyAstExt::nil`]. #[macro_export] macro_rules! pretty_ast_docs { ($printer: expr, $docs:expr) => {{ use $crate::printer::pretty_ast::{ToDocumentOwned}; $docs.to_document_owned($printer) }}; ($printer: expr, $($docs:expr),*$(,)?) => {{ use $crate::printer::pretty_ast::{ToDocumentOwned}; nil!() $(.append($docs.to_document_owned($printer)))* }}; } pub use pretty_ast_docs; /// Convert a collection of values into documents separated by another /// document. /// /// It forwards to [`PrettyAstExt::intersperse`] after materialising the /// separator. The macro exists so call sites can stay concise while still /// benefiting from the allocator captured by [`install_pretty_helpers!`]. #[macro_export] macro_rules! pretty_ast_intersperse { ($printer: expr, $docs:expr, $sep: expr$(,)?) => {{ let docs = $docs; let sep = $sep; $crate::printer::pretty_ast::PrettyAstExt::intersperse($printer, docs, sep) }}; } pub use pretty_ast_intersperse; #[macro_export] /// Install pretty-printing helpers partially applied with a given local /// allocator. /// /// This macro declares a set of small, local macros that proxy to the /// underlying [`pretty::DocAllocator`] methods and macro while capturing your /// allocator value. It keeps printing code concise and avoids passing the /// allocator around explicitly. /// /// # Syntax /// ```rust,ignore /// install_pretty_helpers!(alloc_ident: AllocatorType) /// ``` /// /// - `alloc_ident`: the in-scope variable that implements both /// [`pretty::DocAllocator`] and [`Printer`]. /// - `AllocatorType`: the concrete type of that variable. /// /// # What gets installed /// - macro shorthands for common allocator methods: /// [`PrettyAstExt::nil`], [`PrettyAstExt::fail`], /// [`PrettyAstExt::hardline`], [`PrettyAstExt::space`], /// [`PrettyAstExt::line`], [`PrettyAstExt::line_`], /// [`PrettyAstExt::softline`], [`PrettyAstExt::softline_`], /// [`PrettyAstExt::as_string`], [`PrettyAstExt::text`], /// [`PrettyAstExt::concat`], [`PrettyAstExt::intersperse`], /// [`PrettyAstExt::column`], [`PrettyAstExt::nesting`], /// [`PrettyAstExt::reflow`]. /// - a partially applied version of [`pretty::docs!`]. /// - [`todo_document!`]: produce a placeholder document (that does not panic). macro_rules! install_pretty_helpers { ($allocator:ident : $allocator_type:ty) => { $crate::printer::pretty_ast::install_pretty_helpers!( @$allocator, #[doc = ::std::concat!("Proxy macro for [`", stringify!($crate), "::printer::pretty_ast::todo_document`] that automatically uses `", stringify!($allocator),"` as allocator.")] #[doc = ::std::concat!(r#"Example: `disambiguated_todo!("Error message")` or `disambiguated_todo!(issue #123, "Error message with issue attached")`."#)] disambiguated_todo{$crate::printer::pretty_ast::todo_document!}, #[doc = ::std::concat!("Proxy macro for [`pretty::docs`] that automatically uses `", stringify!($allocator),"` as allocator.")] docs{$crate::printer::pretty_ast::pretty_ast_docs!}, #[doc = ::std::concat!("Proxy macro for [`PrettyAstExt::nil`] that automatically uses `", stringify!($allocator),"` as allocator.")] nil{<$allocator_type as $crate::printer::pretty_ast::PrettyAstExt<_>>::nil}, #[doc = ::std::concat!("Proxy macro for [`PrettyAstExt::fail`] that automatically uses `", stringify!($allocator),"` as allocator.")] fail{<$allocator_type as $crate::printer::pretty_ast::PrettyAstExt<_>>::fail}, #[doc = ::std::concat!("Proxy macro for [`PrettyAstExt::hardline`] that automatically uses `", stringify!($allocator),"` as allocator.")] hardline{<$allocator_type as $crate::printer::pretty_ast::PrettyAstExt<_>>::hardline}, #[doc = ::std::concat!("Proxy macro for [`PrettyAstExt::space`] that automatically uses `", stringify!($allocator),"` as allocator.")] space{<$allocator_type as $crate::printer::pretty_ast::PrettyAstExt<_>>::space}, #[doc = ::std::concat!("Proxy macro for [`PrettyAstExt::line`] that automatically uses `", stringify!($allocator),"` as allocator.")] disambiguated_line{<$allocator_type as $crate::printer::pretty_ast::PrettyAstExt<_>>::line}, #[doc = ::std::concat!("Proxy macro for [`PrettyAstExt::line_`] that automatically uses `", stringify!($allocator),"` as allocator.")] line_{<$allocator_type as $crate::printer::pretty_ast::PrettyAstExt<_>>::line_}, #[doc = ::std::concat!("Proxy macro for [`PrettyAstExt::softline`] that automatically uses `", stringify!($allocator),"` as allocator.")] softline{<$allocator_type as $crate::printer::pretty_ast::PrettyAstExt<_>>::softline}, #[doc = ::std::concat!("Proxy macro for [`PrettyAstExt::softline_`] that automatically uses `", stringify!($allocator),"` as allocator.")] softline_{<$allocator_type as $crate::printer::pretty_ast::PrettyAstExt<_>>::softline_}, #[doc = ::std::concat!("Proxy macro for [`PrettyAstExt::as_string`] that automatically uses `", stringify!($allocator),"` as allocator.")] as_string{<$allocator_type as $crate::printer::pretty_ast::PrettyAstExt<_>>::as_string}, #[doc = ::std::concat!("Proxy macro for [`PrettyAstExt::text`] that automatically uses `", stringify!($allocator),"` as allocator.")] text{<$allocator_type as $crate::printer::pretty_ast::PrettyAstExt<_>>::text}, #[doc = ::std::concat!("Proxy macro for [`PrettyAstExt::concat`] that automatically uses `", stringify!($allocator),"` as allocator.")] disambiguated_concat{<$allocator_type as $crate::printer::pretty_ast::PrettyAstExt<_>>::concat}, #[doc = ::std::concat!("Proxy macro for [`PrettyAstExt::intersperse`] that automatically uses `", stringify!($allocator),"` as allocator.")] intersperse{$crate::printer::pretty_ast::pretty_ast_intersperse!}, #[doc = ::std::concat!("Proxy macro for [`PrettyAstExt::column`] that automatically uses `", stringify!($allocator),"` as allocator.")] column{<$allocator_type as $crate::printer::pretty_ast::PrettyAstExt<_>>::column}, #[doc = ::std::concat!("Proxy macro for [`PrettyAstExt::nesting`] that automatically uses `", stringify!($allocator),"` as allocator.")] nesting{<$allocator_type as $crate::printer::pretty_ast::PrettyAstExt<_>>::nesting}, #[doc = ::std::concat!("Proxy macro for [`PrettyAstExt::reflow`] that automatically uses `", stringify!($allocator),"` as allocator.")] reflow{<$allocator_type as $crate::printer::pretty_ast::PrettyAstExt<_>>::reflow} ); }; (@$allocator:ident, $($(#[$($attrs:tt)*])*$name:ident{$($callable:tt)*}),*) => { $( #[hax_rust_engine_macros::partial_apply($($callable)*, $allocator,)] #[allow(unused)] $(#[$($attrs)*])* macro_rules! $name {} )* }; } pub use install_pretty_helpers; /// `PrettyAstExt` exposes `DocAllocator`-style constructors for printers. /// /// Every method simply forwards to the global [`pretty::BoxAllocator`] so that printers /// implementing [`PrettyAst`] can build documents without juggling allocator plumbing. pub trait PrettyAstExt: Sized { /// Returns an empty document. /// Mirrors [`pretty::DocAllocator::nil`]. fn nil(&self) -> DocBuilder { pretty::DocAllocator::nil(&BoxAllocator) } /// Produces a document that fails rendering immediately. /// Mirrors [`pretty::DocAllocator::fail`]. /// /// This is typically used to abort rendering inside the left side of a [`pretty::Doc::Union`]. fn fail(&self) -> DocBuilder { pretty::DocAllocator::fail(&BoxAllocator) } /// Inserts a mandatory line break. /// Mirrors [`pretty::DocAllocator::hardline`]. fn hardline(&self) -> DocBuilder { pretty::DocAllocator::hardline(&BoxAllocator) } /// Inserts a single space that disappears when groups flatten. /// Mirrors [`pretty::DocAllocator::space`]. fn space(&self) -> DocBuilder { pretty::DocAllocator::space(&BoxAllocator) } /// Acts like a `\n` but behaves like `space` once grouped onto a single line. /// Mirrors [`pretty::DocAllocator::line`]. fn line(&self) -> DocBuilder { pretty::DocAllocator::line(&BoxAllocator) } /// Acts like `line` but collapses to `nil` if grouped on a single line. /// Mirrors [`pretty::DocAllocator::line_`]. fn line_(&self) -> DocBuilder { pretty::DocAllocator::line_(&BoxAllocator) } /// Acts like `space` when the document fits the page, otherwise behaves like `line`. /// Mirrors [`pretty::DocAllocator::softline`]. fn softline(&self) -> DocBuilder { pretty::DocAllocator::softline(&BoxAllocator) } /// Acts like `nil` when the document fits the page, otherwise behaves like `line_`. /// Mirrors [`pretty::DocAllocator::softline_`]. fn softline_(&self) -> DocBuilder { pretty::DocAllocator::softline_(&BoxAllocator) } /// Renders `data` via its [`Display`] implementation. /// Mirrors [`pretty::DocAllocator::as_string`]. /// /// The resulting document must not contain explicit line breaks. fn as_string(&self, data: U) -> DocBuilder { pretty::DocAllocator::as_string(&BoxAllocator, data) } /// Renders the provided text verbatim. /// Mirrors [`pretty::DocAllocator::text`]. /// /// The supplied string must not contain line breaks. fn text<'a>(&self, data: impl Into>) -> DocBuilder { self.as_string(data.into()) } /// Concatenates the given values after turning each into a document. /// Mirrors [`pretty::DocAllocator::concat`]. fn concat(&self, docs: I) -> DocBuilder where I::Item: ToDocumentOwned, I: IntoIterator, { pretty::DocAllocator::concat( &BoxAllocator, docs.into_iter().map(|doc| doc.to_document_owned(self)), ) } /// Concatenates documents while interspersing `separator` between every pair. /// Mirrors [`pretty::DocAllocator::intersperse`]. /// /// `separator` may need to be cloned; consider cheap pointer documents like `RefDoc` or `RcDoc`. fn intersperse(&self, docs: I, separator: S) -> DocBuilder where I::Item: ToDocumentOwned, I: IntoIterator, S: ToDocumentOwned + Clone, A: Clone, { let separator = separator.to_document_owned(self); pretty::DocAllocator::intersperse( &BoxAllocator, docs.into_iter().map(|doc| doc.to_document_owned(self)), separator, ) } /// Reflows `text`, inserting `softline` wherever whitespace appears. /// Mirrors [`pretty::DocAllocator::reflow`]. fn reflow(&self, text: &'static str) -> DocBuilder where A: Clone, { pretty::DocAllocator::reflow(&BoxAllocator, text) } } impl> PrettyAstExt for P {} /// Generate a dispatcher macro that forwards a token to specialised macros. macro_rules! make_cases_macro { ( $macro_name:ident, $( $($idents:ident)|* => $target:ident, )* _ => $fallback:ident $(,)? ) => { macro_rules! $macro_name { $( $( ($idents $tt:tt) => { $target!($tt); }; )* )* ($anything:ident $tt:tt) => { $fallback!($tt); }; } }; } /// Helper macro used to ignore a matched arm in `make_cases_macro!`. macro_rules! skip { ($tt:tt) => {}; } /// Helper macro used to keep the body for specific matches in /// `make_cases_macro!`. macro_rules! keep { ({$($tt:tt)*}) => { $($tt)* }; } make_cases_macro!(method_deny_list, ExprKind | PatKind | TyKind | GuardKind | ImplExprKind | ImplItemKind | TraitItemKind | AttributeKind | DocCommentKind => skip, Signedness | IntSize => skip, ItemQuoteOrigin | ItemQuoteOriginKind | ItemQuoteOriginPosition => skip, ControlFlowKind | LoopState | LoopKind => skip, _ => keep ); make_cases_macro!(span_handling, Item | Expr | Pat | Guard | Arm | ImplItem | TraitItem | GenericParam | Attribute | Attribute => keep, _ => skip ); /// A trait that provides an optional contextual span for printers: during a /// pretty printing job, spans will be inserted so that errors are always tagged /// with precise location information. /// /// This should not be implemented by hand, instead, use /// [`hax_rust_engine_macros::setup_printer_struct`]. pub trait HasContextualSpan: Clone { /// Clone the printer, adding a span hint. Useful for errors. fn with_span(&self, _span: Span) -> Self; /// Returns the span currently associated with the printer, if any. fn span(&self) -> Option; } /// Declare the `PrettyAst` trait and wiring for deriving `ToDocument` for AST /// nodes. macro_rules! mk { ($($ty:ident),*) => { pastey::paste! { /// A trait that defines a print method per type in the AST. /// /// This is the main trait a printer should implement. /// /// You then implement the actual formatting logic in the generated /// per-type methods. These methods are intentionally marked /// `#[deprecated]` to discourage calling them directly; instead, /// call `node.to_document(self)` from the [`ToDocument`] trait to /// ensure annotations and spans are applied correctly. /// /// Note that using `install_pretty_helpers!` will produce macro /// that implicitely use `self` as allocator. Take a look at a /// printer in the [`backends`] module for an example. pub trait PrettyAst: Sized + HasContextualSpan { /// A name for this instance of `PrettyAst`. /// Useful for diagnostics and debugging. const NAME: &'static str; /// Emit a diagnostic with proper context and span. fn emit_diagnostic(&self, kind: hax_types::diagnostics::Kind) { let span = self.span().unwrap_or_else(|| Span::dummy()); use crate::ast::diagnostics::{DiagnosticInfo, Context}; (DiagnosticInfo { context: Context::Printer(Self::NAME.to_string()), span, kind }).emit() } /// Produce a non-panicking placeholder document. In general, prefer the use of the helper macro [`todo_document!`]. fn todo_document(&self, message: &str, issue_id: Option) -> DocBuilder { self.emit_diagnostic(hax_types::diagnostics::Kind::Unimplemented { issue_id, details: Some(message.into()), }); self.as_string(message) } /// Produce a structured error document for an unimplemented /// method. /// /// Printers may override this for nicer diagnostics (e.g., /// colored "unimplemented" banners or links back to source /// locations). The default produces a small, debuggable piece /// of text that includes the method name and a JSON handle for /// the AST fragment (via [`DebugJSON`]). fn unimplemented_method(&self, method: &str, ast: ast::fragment::FragmentRef<'_>) -> DocBuilder { let debug_json = DebugJSON(ast).to_string(); self.emit_diagnostic(hax_types::diagnostics::Kind::Unimplemented { issue_id: None, details: Some(format!("The method `{method}` is not implemented in the backend {}. To show the AST fragment that could not be printed, run {debug_json}.", Self::NAME)), }); self.text(format!("`{method}` unimpl, {debug_json}", )).parens() } $( method_deny_list!($ty{ #[doc = "Define how the printer formats a value of this AST type."] #[doc = "Do not call this method directly. Use [`ToDocument::to_document`] instead, so annotations/spans are preserved correctly."] #[deprecated = "Do not call this method directly. Use [`ToDocument::to_document`] instead, so annotations/spans are preserved correctly."] fn [<$ty:snake>](&self, [<$ty:snake>]: &$ty) -> DocBuilder { mk!(@method_body $ty [<$ty:snake>] self [<$ty:snake>]) } }); )* } $( method_deny_list!($ty{ impl> ToDocument for $ty { fn to_document(&self, printer: &P) -> DocBuilder { span_handling!($ty{ let printer = &(printer.with_span(self.span())); }); // Note about deprecation: // Here is the only place where calling the deprecated methods from the trait `PrettyAst` is fine. // Here is the place we (will) take care of spans, etc. #[allow(deprecated)] let print =

>::[<$ty:snake>]; print(printer, self) } } }); )* } }; // Special default implementation for specific types (@method_body Symbol $meth:ident $self:ident $value:ident) => { $self.as_string($value.to_string()) }; (@method_body LocalId $meth:ident $self:ident $value:ident) => { $value.0.to_document($self) }; (@method_body SpannedTy $meth:ident $self:ident $value:ident) => { $value.ty.to_document($self) }; (@method_body $ty:ident $meth:ident $self:ident $value:ident) => { $self.unimplemented_method(stringify!($meth), ast::fragment::FragmentRef::from($meth)) }; } #[hax_rust_engine_macros::replace(AstNodes => include(VisitableAstNodes))] mk!(GlobalId, AstNodes); ================================================ FILE: rust-engine/src/printer/render_view.rs ================================================ //! Tools for rendering Rust paths into strings. //! //! This module takes a typed [`View`] (a list of [`PathSegment`]s) and turns it //! into either: //! - a structured [`Rendered`] (with `module` vs. `path` parts), or //! - a single flat `String`. //! //! The [`RenderView`] trait allows for customization. use crate::{ ast::identifiers::global_id::{ ReservedSuffix, view::{PathSegment, PathSegmentPayload, UnnamedPathSegmentPayload, View}, }, symbol::Symbol, }; use std::collections::HashSet; use std::sync::OnceLock; /// A helper trait to render a [`View`] (a typed list of path segments) into /// strings. /// /// Rendering is split into two parts: /// - module path: the crate + module prefix, /// - relative path: the remaining (non-module) segments, and both may contain /// hierarchical sub-segments (e.g. `Foo::MyVariant::field`). /// /// Implementors can: /// - override how unnamed segments (e.g. `impl`, `anon const`) are displayed, /// - override how each segment is rendered, /// - customize the separator (defaults to `"::"`), /// - render to either a structured [`Rendered`] or a single flat `String`. /// /// # Terminology /// /// A path segment can be: /// - named: carries a `Symbol` that can be printed as-is, /// - unnamed: carries an [`UnnamedPathSegmentPayload`] (like `Impl`, `Closure`, /// …), which must be turned into a `Symbol` first (see /// [`RenderView::render_unnamed_path_segment_payload`]). /// /// # Hierarchical segments /// /// Some segments are actually small trees (e.g., field → constructor → type). /// [`RenderView::render_path_segment`] returns all display atoms for such a /// segment, so callers can flatten or join as needed. pub trait RenderView: Sized { /// List of reserved keywords that will be escaped when rendering fn reserved_keywords() -> &'static HashSet { static SET: OnceLock> = OnceLock::new(); SET.get_or_init(|| [].into_iter().collect()) } /// Check if a string is a reserved keyword that needs escaping fn is_reserved_keyword(id: &str) -> bool { let reserved = Self::reserved_keywords(); reserved.contains(id) } /// Check if a string needs escaping fn should_escape(id: &str) -> bool { Self::is_reserved_keyword(id) } /// Escape a string if it needs escaping according to `Self::should_escape` fn escape(id: &str) -> String { // See https://github.com/cryspen/hax/issues/1866 let id = id.replace([' ', '<', '>'], "_"); if id.is_empty() { "_ERROR_EMPTY_ID_".to_string() } else if Self::should_escape(id.trim_start_matches("_")) { format!("_{id}") } else { id } } /// Converts an unnamed path segment payload into a printable [`Symbol`]. /// /// Unnamed segments include `impl`, `anon const`, `inline const`, `foreign mod`, /// `global_asm`, `use`, `opaque`, and `closure`. By default, these map to /// their capitalized identifier (e.g., `Impl`, `AnonConst`, …). /// /// Override this method to customize how unnamed items appear in output. fn render_unnamed_path_segment_payload(&self, unnamed: UnnamedPathSegmentPayload) -> Symbol { default::render_unnamed_path_segment_payload(self, unnamed) } /// Converts a full [`PathSegmentPayload`] (named or unnamed) into a printable [`Symbol`]. /// /// Named payloads return their `Symbol` unchanged. Unnamed payloads are delegated to /// [`render_unnamed_path_segment_payload`]. fn render_path_segment_payload(&self, payload: PathSegmentPayload) -> Symbol { match payload { PathSegmentPayload::Named(symbol) => symbol, PathSegmentPayload::Unnamed(unnamed) => { self.render_unnamed_path_segment_payload(unnamed) } } } /// Renders a single [`PathSegment`] into a vector of display atoms. /// /// Most segments render to a single atom (e.g., `"Foo"`). Hierarchical segments /// (like a field) render to multiple atoms representing their parent chain /// (e.g., `["Foo", "MyVariant", "my_field"]`). Disambiguators (see /// [`PathSegment::disambiguator`]) are suffixed as `_N` when `N > 0`. /// /// The resulting atoms are suitable for joining with [`separator`](Self::separator), /// or for further grouping into module vs. relative path. fn render_path_segment(&self, seg: &PathSegment) -> Vec { default::render_path_segment(self, seg) } /// Renders the optional suffix fn render_suffix(&self, suffix: &ReservedSuffix) -> String { default::render_suffix(suffix) } /// Renders just the module path (crate + modules) of a [`View`], as a list of atoms. /// /// This is a convenience wrapper around [`render`](Self::render) that returns only /// the `module` component. fn module(&self, view: &View) -> Vec { self.render(view).module } /// Allows backends to adjust a module path before rendering, e.g., to shorten it according /// to currenly open namespaces. fn relativize_module_path<'a>(&self, module_path: &'a [PathSegment]) -> &'a [PathSegment] { module_path } /// Renders a [`View`] into a structured [`Rendered`] value, /// splitting output into `module` and `path` parts. /// /// Internally, this uses [`View::split_at_module`] to separate module segments /// from the remaining non-module segments, rendering each with /// [`render_path_segment`]. fn render(&self, view: &View) -> Rendered { let (module_path, relative_path) = view.split_at_module(); let module_path = self.relativize_module_path(module_path); let path_segment = |seg| self.render_path_segment(seg); let mut path: Vec = relative_path.iter().flat_map(path_segment).collect(); if let Some(last) = path.last_mut() && let Some(suffix) = view.suffix() { last.push_str(&self.render_suffix(suffix)); } Rendered { module: module_path.iter().flat_map(path_segment).collect(), path, } } /// Returns the string used to join rendered atoms (defaults to `"::"`). /// /// Override to customize separators (e.g., `"."`). fn separator(&self) -> &str { "::" } /// Lazy render a view as an iterator of strings. /// /// This chains `rendered.module` and `rendered.path` in order. fn rendered_to_strings(&self, rendered: Rendered) -> impl Iterator { rendered.module.into_iter().chain(rendered.path) } /// Joins the atoms contained in a [`Rendered`] into a single string using /// [`separator`](Self::separator). /// /// This concatenates `rendered.module` and `rendered.path` in order, inserting /// the separator between atoms. fn rendered_to_string(&self, rendered: Rendered) -> String { self.rendered_to_strings(rendered) .collect::>() .join(self.separator()) } /// Convenience: renders a [`View`] straight to a single `String`. fn render_string(&self, view: &View) -> String { self.rendered_to_string(self.render(view)) } /// Convenience: renders a [`View`] straight to a iterator of `String`s. fn render_strings(&self, view: &View) -> impl Iterator { self.rendered_to_strings(self.render(view)) } } /// Default rendering helpers used by [`RenderView`]'s blanket implementations. /// /// You can call these directly when composing your own renderer, or override the /// trait methods to change behavior selectively. pub mod default { use super::*; /// Default mapping of unnamed payloads to printable symbols. pub fn render_unnamed_path_segment_payload( _render_view: &V, unnamed: UnnamedPathSegmentPayload, ) -> Symbol { Symbol::new(match unnamed { UnnamedPathSegmentPayload::Impl => "Impl", UnnamedPathSegmentPayload::AnonConst => "AnonConst", UnnamedPathSegmentPayload::InlineConst => "InlineConst", UnnamedPathSegmentPayload::Foreign => "Foreign", UnnamedPathSegmentPayload::GlobalAsm => "GlobalAsm", UnnamedPathSegmentPayload::Use => "Use", UnnamedPathSegmentPayload::Opaque => "Opaque", UnnamedPathSegmentPayload::Closure => "Closure", }) } /// Default rendering of a single [`PathSegment`] into display atoms. /// /// This walks the segment's parent chain (see [`PathSegment::parents`]) and /// produces an atom for each level using /// [`RenderView::render_path_segment_payload`]. If a level has a disambiguator /// `> 0`, it is appended as `_` (e.g., `Foo_2`). pub fn render_path_segment( render_view: &V, seg: &PathSegment, ) -> Vec { let mut strings: Vec = seg .parents() .map(|seg| { let id = render_view.render_path_segment_payload(seg.payload()); let d = seg.disambiguator(); if d > 0 { format!("{id}_{d}") } else { format!("{id}") } }) .map(|str| V::escape(&str)) .collect(); strings.reverse(); strings } /// Default suffix rendering pub fn render_suffix(suffix: &ReservedSuffix) -> String { match suffix { ReservedSuffix::Pre => "_pre", ReservedSuffix::Post => "_post", ReservedSuffix::Cast => "_cast_to_repr", } .to_owned() } } /// The structured result of rendering a [`View`]. /// /// - `module`: atoms for the crate + modules prefix (may be empty for local/anonymous contexts), /// - `path`: atoms for the remaining segments (item, constructors, fields, etc.). /// /// Join with [`RenderView::rendered_to_string`] to obtain a single string. pub struct Rendered { /// Crate + module atoms (e.g., `["my_crate", "a", "b"]`). pub module: Vec, /// Non-module atoms (e.g., `["Foo::f", "MyEnum::MyVariant::my_field"]`). pub path: Vec, } ================================================ FILE: rust-engine/src/printer.rs ================================================ //! Printer infrastructure: allocators, traits, and the printing pipeline. //! //! This module contains the common plumbing that backends and printers rely on //! to turn AST values into formatted text: //! - [`Allocator`]: a thin wrapper around the `pretty` crate's allocator, //! parameterized by the backend, used to produce [`pretty::Doc`] nodes. //! - [`PrettyAst`]: the trait that printers implement to provide per-type //! formatting of Hax AST nodes (re-exported from [`pretty_ast`]). //! - The resugaring pipeline: a sequence of local AST rewrites that make //! emitted code idiomatic for the target language before pretty-printing. use std::ops::Deref; use crate::{ ast::{self, span::Span}, attributes::LinkedItemGraph, printer::pretty_ast::ToDocument, }; use ast::visitors::dyn_compatible; pub mod pretty_ast; pub use pretty_ast::PrettyAst; pub mod render_view; /// A resugaring is an erased mapper visitor with a name. /// A resugaring is a *local* transformation on the AST that produces exclusively `ast::resugared` nodes. /// Any involved or non-local transformation should be a phase, not a resugaring. /// /// Backends may provide **multiple resugaring phases** to incrementally refine /// the tree into something idiomatic for the target language (e.g., desugaring /// pattern sugar into a more uniform core, then resugaring back into target /// idioms). Each phase mutates the AST in place and should be small, focused, /// and easy to test. /// /// If you add a new phase, make sure it appears in the backend’s /// `resugaring_phases()` list in the correct order. pub trait Resugaring: for<'a> dyn_compatible::AstVisitorMut<'a> { /// Get the name of the resugar. fn name(&self) -> String; } /// A printer defines a list of resugaring phases. pub trait Printer: Sized + PrettyAst + Default + HasLinkedItemGraph { /// The name of the printer const NAME: &'static str = >::NAME; } /// Getter and setter for `LinkedItemGraph`, useful for printers. pub trait HasLinkedItemGraph { /// Get a reference of the `LinkedItemGraph`. fn linked_item_graph(&self) -> &LinkedItemGraph; /// Set a `LinkedItemGraph`. fn with_linked_item_graph(self, graph: std::rc::Rc) -> Self; } #[derive(Clone, Debug, serde::Serialize, serde::Deserialize)] /// Placeholder type for sourcemaps. pub struct SourceMap; /// Helper trait to print AST fragments. pub trait Print where for<'a> dyn Resugaring: dyn_compatible::AstVisitableMut<'a, T>, { /// Print a single AST fragment using this backend. fn print_returning_fragment(&mut self, fragment: T) -> (String, SourceMap, T) where T: ToDocument; /// Print a single AST fragment using this backend. fn print(&mut self, fragment: T) -> (String, SourceMap) where T: ToDocument; } impl Print for P where for<'a> dyn Resugaring: dyn_compatible::AstVisitableMut<'a, T>, { fn print_returning_fragment(&mut self, fragment: T) -> (String, SourceMap, T) where T: ToDocument, { let doc_builder = fragment.to_document(self).into_doc(); ( doc_builder.deref().pretty(80).to_string(), SourceMap, fragment, ) } fn print(&mut self, fragment: T) -> (String, SourceMap) where T: ToDocument, { let (rendered, sourcemap, _) = >::print_returning_fragment(self, fragment); (rendered, sourcemap) } } ================================================ FILE: rust-engine/src/resugarings.rs ================================================ //! The "resugaring" phases used by printers. //! This module defines resugarings instances (see //! [`hax_rust_engine::ast::Resugaring`] for the definition of a //! resugaring). Each backend defines its own set of resugaring phases. use crate::ast::identifiers::GlobalId; use crate::ast::resugared::*; use crate::ast::visitors::*; use crate::ast::*; use crate::printer::*; /// Transforms [`ItemKind::Fn`] of arity zero into [`ResugaredItemKind::Constant`]. /// Rust `const` items are encoded by the `ImportThir` phase of the hax engine as function of arity zero. /// Functions of arity zero themselves are encoded as functions operating on one argument of type `()`. #[derive(Copy, Clone, Default)] pub struct FunctionsToConstants; impl AstVisitorMut for FunctionsToConstants { fn enter_item_kind(&mut self, item_kind: &mut ItemKind) { let ItemKind::Fn { name, generics, body, params, safety: SafetyKind::Safe, } = item_kind else { return; }; if !params.is_empty() { return; } *item_kind = ItemKind::Resugared(ResugaredItemKind::Constant { name: *name, body: body.clone(), generics: generics.clone(), }); } fn enter_impl_item_kind(&mut self, item_kind: &mut ImplItemKind) { if let ImplItemKind::Fn { body, params } = item_kind && params.is_empty() { *item_kind = ImplItemKind::Resugared(ResugaredImplItemKind::Constant { body: body.clone() }) } } } impl Resugaring for FunctionsToConstants { fn name(&self) -> String { "functions-to-constants".to_string() } } /// Tuples resugaring. Resugars tuple constructors to the dedicated expression variant [`ResugaredExprKind::Tuple`], /// and tuple types to the dedicated type variant [`ResugaredTyKind::Tuple`]. pub struct Tuples; impl AstVisitorMut for Tuples { fn enter_expr_kind(&mut self, x: &mut ExprKind) { let (constructor, fields) = match x { ExprKind::Construct { constructor, is_record: false, is_struct: true, base: None, fields, } => (constructor, &fields[..]), ExprKind::GlobalId(constructor) => (constructor, &[][..]), _ => return, }; if constructor.expect_tuple().is_some() { let args = fields.iter().map(|(_, e)| e).cloned().collect(); *x = ExprKind::Resugared(ResugaredExprKind::Tuple(args)) } } fn enter_ty_kind(&mut self, x: &mut TyKind) { let TyKind::App { head, args } = x else { return; }; if head.expect_tuple().is_some() { let Some(args) = args .iter() .map(GenericValue::expect_ty) .collect::>>() else { return; }; *x = TyKind::Resugared(ResugaredTyKind::Tuple(args.into_iter().cloned().collect())) } } } impl Resugaring for Tuples { fn name(&self) -> String { "tuples".to_string() } } /// Let-pure resugaring. Use to identify expressions of the form `let x ← pure ..`, where the arrow /// can be turned into a normal assignment `:=` pub struct LetPure; impl AstVisitorMut for LetPure { fn enter_expr_kind(&mut self, expr: &mut ExprKind) { const PURE: GlobalId = crate::names::rust_primitives::hax::explicit_monadic::pure; if let ExprKind::Let { lhs, rhs, body } = expr && let ExprKind::App { head, args, generic_args, bounds_impls, trait_: None, } = rhs.kind() && *head.kind() == ExprKind::GlobalId(PURE) && let ([pure_rhs], [], []) = (&args[..], &generic_args[..], &bounds_impls[..]) { *expr = ExprKind::Resugared(ResugaredExprKind::LetPure { lhs: lhs.clone(), rhs: pure_rhs.clone(), body: body.clone(), }) } } } impl Resugaring for LetPure { fn name(&self) -> String { "let_pure".to_string() } } /// Recursive function detection. Identifies functions whose body contains a /// reference to their own name and resugars them to [`ResugaredItemKind::RecursiveFn`]. #[derive(Copy, Clone, Default)] pub struct RecursiveFunctions; /// Helper visitor that checks whether an expression tree contains a reference /// to a specific [`GlobalId`]. struct SelfReferenceChecker { target: GlobalId, found: bool, } impl AstVisitor for SelfReferenceChecker { fn enter_expr_kind(&mut self, kind: &ExprKind) { if let ExprKind::GlobalId(id) = kind && *id == self.target { self.found = true; } } } impl AstVisitorMut for RecursiveFunctions { fn visit_item_kind(&mut self, item_kind: &mut ItemKind) { if let ItemKind::Fn { name, generics, body, params, safety, } = &*item_kind { let mut checker = SelfReferenceChecker { target: *name, found: false, }; checker.visit_expr(body); if checker.found { *item_kind = ItemKind::Resugared(ResugaredItemKind::RecursiveFn { name: *name, generics: generics.clone(), body: body.clone(), params: params.clone(), safety: safety.clone(), }); } } } } impl Resugaring for RecursiveFunctions { fn name(&self) -> String { "recursive-functions".to_string() } } /// Record ellipsis resugaring. Identifies record-like `Construct` patterns where /// some fields are wildcards and resugars them into `ConstructWithEllipsis`, /// dropping the wildcard fields so the printer can emit `..`. pub struct RecordEllipsis; impl AstVisitorMut for RecordEllipsis { fn enter_pat_kind(&mut self, x: &mut PatKind) { let PatKind::Construct { constructor, is_record: true, is_struct, fields, } = x else { return; }; let non_wild: Vec<_> = fields .iter() .filter(|(_, pat)| !matches!(&*pat.kind, PatKind::Wild)) .cloned() .collect(); if non_wild.len() < fields.len() { *x = ResugaredPatKind::ConstructWithEllipsis { constructor: *constructor, is_struct: *is_struct, fields: non_wild, } .into(); } } } impl Resugaring for RecordEllipsis { fn name(&self) -> String { "record-ellipsis".to_string() } } ================================================ FILE: rust-engine/src/symbol.rs ================================================ //! Interned string identifiers used throughout the AST. //! //! Symbols are lightweight wrappers around `String` for use in identifiers. //! Eventually, this could be backed by a real interner or arena. use std::ops::Deref; use hax_rust_engine_macros::*; /// Interned string identifier for the AST #[derive_group_for_ast] pub struct Symbol(String); impl Symbol { /// Create a new symbol pub fn new(s: impl AsRef) -> Self { Self(s.as_ref().to_string()) } } impl Deref for Symbol { type Target = str; fn deref(&self) -> &Self::Target { &self.0 } } impl AsRef for Symbol { fn as_ref(&self) -> &str { &self.0 } } impl std::fmt::Display for Symbol { fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> Result<(), std::fmt::Error> { write!(f, "{}", self.0) } } ================================================ FILE: rust-toolchain.toml ================================================ [toolchain] channel = "nightly-2025-11-08" components = [ "rustc-dev", "llvm-tools-preview" , "rust-analysis" , "rust-src" , "rustfmt" ] ================================================ FILE: rustc-coverage-tests/Cargo.toml ================================================ [package] name = "coverage" version = "0.1.0" edition = "2021" [dependencies] hax-lib = { path = "../hax-lib" } [features] fstar = [] fstar-lax = [] coq = [] json = [] lean = [] ================================================ FILE: rustc-coverage-tests/README.md ================================================ This crate contains the [rust files](https://github.com/rust-lang/rust/tree/master/tests/coverage) from rustc [coverage tests](https://rustc-dev-guide.rust-lang.org/tests/compiletest.html#coverage-tests). The following test target are available: - `json` to test AST extraction as json using hax frontend - `fstar` to test that extraction to F* succeeds - `fstar-lax` to test that extraction to F* and lax-checking succeed - `coq` to test that extraction to coq succeeds - `lean` to test that extraction to Lean succeeds - `lean-tc` to test that extraction to Lean succeeds and type checks ## Running A script is available to run the tests using `python rustc-coverage-tests.py ` where `` is either one of the targets or `all`. ### Checking negative results If you run `python run-coverage-tests.py --with-negative ` you also check that tests that are not expected to succeed actually fail. It will produce a summary of the results file by file. This feature has some python dependencies, so you may need to run `pip install tabulate pyyaml` first. ### Checking stability If you run `python run-coverage-tests.py --with-negative --check-stability ` you check that the generated files correspond to the stored snapshots. To update these snapshots, run `python run-coverage-tests.py --with-negative --check-stability --update-snapshots ` ## Modifying ### Updating sources Run ./update-test-sources.sh to update the test with the latest versions used by rustc. ### Adding a new test target To add a new test target: - Add a corresponding feature to the `Cargo.toml` - Activate the wanted tests for this feature by enabling them under the feature. This is done using the `cfg` attributes in the `lib.rs`/`mod.rs` files (see next section). - Activate the same tests for the feature in `test_config.yaml`. - Modify the script to add the new target ### Activating a test file for a given target To activate a test for a target, you can add the feature corresponding to the target to the `cfg` attribute of this test in `lib.rs` (or `mod.rs` for tests contained in submodules). For example: ```rust #[cfg(any(feature = "json", feature = "fstar"))] mod abort; ``` This means that the test in `abort.rs` runs only for features `json` and `fstar`. If you want to also run it under a new feature you can modify this to `#[cfg(any(feature = "json", feature = "fstar", feature = ""))]`. Some tests are currently not activated for any feature. The corresponding module declarations are commented out (for example `// mod async_block;`). To add these tests to a target, uncomment the corresponding line and add the adequate `cfg` attribute. There is a separate configuration in `test_config.yaml` that should match what is defined with the `cfg` flags. This one is also used to check that tests that are not activated do fail. ================================================ FILE: rustc-coverage-tests/proofs/fstar/extraction/Makefile ================================================ # This is a generically useful Makefile for F* that is self-contained # # We expect: # 1. `fstar.exe` to be in PATH (alternatively, you can also set # $FSTAR_HOME to be set to your F* repo/install directory) # # 2. `cargo`, `rustup`, `hax` and `jq` to be installed and in PATH. # # 3. the extracted Cargo crate to have "hax-lib" as a dependency: # `hax-lib = { version = "0.1.0-pre.1", git = "https://github.com/hacspec/hax"}` # # Optionally, you can set `HACL_HOME`. # # ROOTS contains all the top-level F* files you wish to verify # The default target `verify` verified ROOTS and its dependencies # To lax-check instead, set `OTHERFLAGS="--lax"` on the command-line # # To make F* emacs mode use the settings in this file, you need to # add the following lines to your .emacs # # (setq-default fstar-executable "/bin/fstar.exe") # (setq-default fstar-smt-executable "/bin/z3") # # (defun my-fstar-compute-prover-args-using-make () # "Construct arguments to pass to F* by calling make." # (with-demoted-errors "Error when constructing arg string: %S" # (let* ((fname (file-name-nondirectory buffer-file-name)) # (target (concat fname "-in")) # (argstr (car (process-lines "make" "--quiet" target)))) # (split-string argstr)))) # (setq fstar-subp-prover-args #'my-fstar-compute-prover-args-using-make) # PATH_TO_CHILD_MAKEFILE := "$(abspath $(firstword $(MAKEFILE_LIST)))" PATH_TO_TEMPLATE_MAKEFILE := "$(abspath $(lastword $(MAKEFILE_LIST)))" HACL_HOME ?= $(HOME)/.hax/hacl_home # Expand variable FSTAR_BIN_DETECT now, so that we don't run this over and over FSTAR_BIN_DETECT := $(if $(shell command -v fstar.exe), fstar.exe, $(FSTAR_HOME)/bin/fstar.exe) FSTAR_BIN ?= $(FSTAR_BIN_DETECT) GIT_ROOT_DIR := $(shell git rev-parse --show-toplevel)/ CACHE_DIR ?= ${GIT_ROOT_DIR}.fstar-cache/checked HINT_DIR ?= ${GIT_ROOT_DIR}.fstar-cache/hints # Makes command quiet by default Q ?= @ # Verify the required executable are in PATH EXECUTABLES = cargo cargo-hax jq K := $(foreach exec,$(EXECUTABLES),\ $(if $(shell which $(exec)),some string,$(error "No $(exec) in PATH"))) export ANSI_COLOR_BLUE=\033[34m export ANSI_COLOR_RED=\033[31m export ANSI_COLOR_BBLUE=\033[1;34m export ANSI_COLOR_GRAY=\033[90m export ANSI_COLOR_TONE=\033[35m export ANSI_COLOR_RESET=\033[0m ifdef NO_COLOR export ANSI_COLOR_BLUE= export ANSI_COLOR_RED= export ANSI_COLOR_BBLUE= export ANSI_COLOR_GRAY= export ANSI_COLOR_TONE= export ANSI_COLOR_RESET= endif # The following is a bash script that discovers F* libraries. # Due to incompatibilities with make 4.3, I had to make a "oneliner" bash script... define FINDLIBS : "Prints a path if and only if it exists. Takes one argument: the path."; \ function print_if_exists() { \ if [ -d "$$1" ]; then \ echo "$$1"; \ fi; \ } ; \ : "Asks Cargo all the dependencies for the current crate or workspace,"; \ : "and extract all "root" directories for each. Takes zero argument."; \ function dependencies() { \ cargo metadata --format-version 1 | \ jq -r ".packages | .[] | .manifest_path | split(\"/\") | .[:-1] | join(\"/\")"; \ } ; \ : "Find hax libraries *around* a given path. Takes one argument: the"; \ : "path."; \ function find_hax_libraries_at_path() { \ path="$$1" ; \ : "if there is a [proofs/fstar/extraction] subfolder, then that s a F* library" ; \ print_if_exists "$$path/proofs/fstar/extraction" ; \ : "Maybe the [proof-libs] folder of hax is around?" ; \ MAYBE_PROOF_LIBS=$$(realpath -q "$$path/../proof-libs/fstar") ; \ if [ $$? -eq 0 ]; then \ print_if_exists "$$MAYBE_PROOF_LIBS/core" ; \ print_if_exists "$$MAYBE_PROOF_LIBS/rust_primitives" ; \ fi ; \ } ; \ { while IFS= read path; do \ find_hax_libraries_at_path "$$path"; \ done < <(dependencies) ; } | sort -u endef export FINDLIBS FSTAR_INCLUDE_DIRS_EXTRA ?= FINDLIBS_OUTPUT := $(shell bash -c '${FINDLIBS}') FSTAR_INCLUDE_DIRS = $(HACL_HOME)/lib $(FSTAR_INCLUDE_DIRS_EXTRA) $(FINDLIBS_OUTPUT) ../models # Make sure FSTAR_INCLUDE_DIRS has the `proof-libs`, print hints and # an error message otherwise ifneq (,$(findstring proof-libs/fstar,$(FSTAR_INCLUDE_DIRS))) else K += $(info ) ERROR := $(shell printf '${ANSI_COLOR_RED}Error: could not detect `proof-libs`!${ANSI_COLOR_RESET}') K += $(info ${ERROR}) ERROR := $(shell printf ' > Do you have `${ANSI_COLOR_BLUE}hax-lib${ANSI_COLOR_RESET}` in your `${ANSI_COLOR_BLUE}Cargo.toml${ANSI_COLOR_RESET}` as a ${ANSI_COLOR_BLUE}git${ANSI_COLOR_RESET} or ${ANSI_COLOR_BLUE}path${ANSI_COLOR_RESET} dependency?') K += $(info ${ERROR}) ERROR := $(shell printf ' ${ANSI_COLOR_BLUE}> Tip: you may want to run `cargo add --git https://github.com/hacspec/hax hax-lib`${ANSI_COLOR_RESET}') K += $(info ${ERROR}) K += $(info ) K += $(error Fatal error: `proof-libs` is required.) endif .PHONY: all verify clean all: $(Q)rm -f .depend $(Q)$(MAKE) -f $(PATH_TO_CHILD_MAKEFILE) .depend hax.fst.config.json verify all-keep-going: $(Q)rm -f .depend $(Q)$(MAKE) -f $(PATH_TO_CHILD_MAKEFILE) --keep-going .depend hax.fst.config.json verify # If $HACL_HOME doesn't exist, clone it ${HACL_HOME}: $(Q)mkdir -p "${HACL_HOME}" $(info Cloning Hacl* in ${HACL_HOME}...) git clone --depth 1 https://github.com/hacl-star/hacl-star.git "${HACL_HOME}" $(info Cloning Hacl* in ${HACL_HOME}... done!) # If no any F* file is detected, we run hax ifeq "$(wildcard *.fst *fsti)" "" $(shell cargo hax into fstar) endif # By default, we process all the files in the current directory ROOTS ?= $(wildcard *.fst *fsti) ADMIT_MODULES ?= ADMIT_MODULE_FLAGS ?= --admit_smt_queries true # Can be useful for debugging purposes FINDLIBS.sh: $(Q)echo '${FINDLIBS}' > FINDLIBS.sh include-dirs: $(Q)bash -c '${FINDLIBS}' FSTAR_FLAGS = \ --warn_error -321-331-241-274-239-271 \ --cache_checked_modules --cache_dir $(CACHE_DIR) \ --already_cached "+Prims+FStar+LowStar+C+Spec.Loops+TestLib" \ $(addprefix --include ,$(FSTAR_INCLUDE_DIRS)) FSTAR := $(FSTAR_BIN) $(FSTAR_FLAGS) .depend: $(HINT_DIR) $(CACHE_DIR) $(ROOTS) $(HACL_HOME) @$(FSTAR) --dep full $(ROOTS) --extract '* -Prims -LowStar -FStar' > $@ include .depend $(HINT_DIR) $(CACHE_DIR): $(Q)mkdir -p $@ define HELPMESSAGE echo "hax' default Makefile for F*" echo "" echo "The available targets are:" echo "" function target() { printf ' ${ANSI_COLOR_BLUE}%-20b${ANSI_COLOR_RESET} %s\n' "$$1" "$$2" } target "all" "Verify every F* files (stops whenever an F* fails first)" target "all-keep-going" "Verify every F* files (tries as many F* module as possible)" target "" "" target "run/${ANSI_COLOR_TONE} " 'Runs F* on `MyModule.fst` only' target "" "" target "vscode" 'Generates a `hax.fst.config.json` file' target "${ANSI_COLOR_TONE}${ANSI_COLOR_BLUE}-in " 'Useful for Emacs, outputs the F* prefix command to be used' target "" "" target "clean" 'Cleanup the target' target "include-dirs" 'List the F* include directories' target "" "" target "describe" 'List the F* root modules, and describe the environment.' echo "" echo "Variables:" target "NO_COLOR" "Set to anything to disable colors" target "ADMIT_MODULES" "List of modules where F* will assume every SMT query" target "FSTAR_INCLUDE_DIRS_EXTRA" "List of extra include F* dirs" endef export HELPMESSAGE describe: @printf '${ANSI_COLOR_BBLUE}F* roots:${ANSI_COLOR_RESET}\n' @for root in ${ROOTS}; do \ filename=$$(basename -- "$$root") ;\ ext="$${filename##*.}" ;\ noext="$${filename%.*}" ;\ printf "${ANSI_COLOR_GRAY}$$(dirname -- "$$root")/${ANSI_COLOR_RESET}%s${ANSI_COLOR_GRAY}.${ANSI_COLOR_TONE}%s${ANSI_COLOR_RESET}%b\n" "$$noext" "$$ext" $$([[ "${ADMIT_MODULES}" =~ (^| )$$root($$| ) ]] && echo '${ANSI_COLOR_RED}\t[ADMITTED]${ANSI_COLOR_RESET}'); \ done @printf '\n${ANSI_COLOR_BBLUE}Environment:${ANSI_COLOR_RESET}\n' @printf ' - ${ANSI_COLOR_BLUE}HACL_HOME${ANSI_COLOR_RESET} = %s\n' '${HACL_HOME}' @printf ' - ${ANSI_COLOR_BLUE}FSTAR_BIN${ANSI_COLOR_RESET} = %s\n' '${FSTAR_BIN}' @printf ' - ${ANSI_COLOR_BLUE}GIT_ROOT_DIR${ANSI_COLOR_RESET} = %s\n' '${GIT_ROOT_DIR}' @printf ' - ${ANSI_COLOR_BLUE}CACHE_DIR${ANSI_COLOR_RESET} = %s\n' '${CACHE_DIR}' @printf ' - ${ANSI_COLOR_BLUE}HINT_DIR${ANSI_COLOR_RESET} = %s\n' '${HINT_DIR}' @printf ' - ${ANSI_COLOR_BLUE}ADMIT_MODULE_FLAGS${ANSI_COLOR_RESET} = %s\n' '${ADMIT_MODULE_FLAGS}' @printf ' - ${ANSI_COLOR_BLUE}FSTAR_INCLUDE_DIRS_EXTRA${ANSI_COLOR_RESET} = %s\n' '${FSTAR_INCLUDE_DIRS_EXTRA}' help: ;@bash -c "$$HELPMESSAGE" h: ;@bash -c "$$HELPMESSAGE" HEADER = $(Q)printf '${ANSI_COLOR_BBLUE}[CHECK] %s ${ANSI_COLOR_RESET}\n' "$(basename $(notdir $@))" run/%: | .depend $(HINT_DIR) $(CACHE_DIR) $(HACL_HOME) ${HEADER} $(Q)$(FSTAR) $(OTHERFLAGS) $(@:run/%=%) VERIFIED_CHECKED = $(addsuffix .checked, $(addprefix $(CACHE_DIR)/,$(ROOTS))) ADMIT_CHECKED = $(addsuffix .checked, $(addprefix $(CACHE_DIR)/,$(ADMIT_MODULES))) $(ADMIT_CHECKED): $(Q)printf '${ANSI_COLOR_BBLUE}[${ANSI_COLOR_TONE}ADMIT${ANSI_COLOR_BBLUE}] %s ${ANSI_COLOR_RESET}\n' "$(basename $(notdir $@))" $(Q)$(FSTAR) $(OTHERFLAGS) $(ADMIT_MODULE_FLAGS) $< $(ENABLE_HINTS) --hint_file $(HINT_DIR)/$(notdir $*).hints || { \ echo "" ; \ exit 1 ; \ } $(Q)printf "\n\n" $(CACHE_DIR)/%.checked: | .depend $(HINT_DIR) $(CACHE_DIR) $(HACL_HOME) ${HEADER} $(Q)$(FSTAR) $(OTHERFLAGS) $< $(ENABLE_HINTS) --hint_file $(HINT_DIR)/$(notdir $*).hints || { \ echo "" ; \ exit 1 ; \ } touch $@ $(Q)printf "\n\n" verify: $(VERIFIED_CHECKED) $(ADMIT_CHECKED) # Targets for Emacs %.fst-in: $(info $(FSTAR_FLAGS) \ $(ENABLE_HINTS) --hint_file $(HINT_DIR)/$(basename $@).fst.hints) %.fsti-in: $(info $(FSTAR_FLAGS) \ $(ENABLE_HINTS) --hint_file $(HINT_DIR)/$(basename $@).fsti.hints) # Targets for VSCode hax.fst.config.json: .depend $(Q)echo "$(FSTAR_INCLUDE_DIRS)" | jq --arg fstar "$(FSTAR_BIN)" -R 'split(" ") | {fstar_exe: $$fstar | gsub("^\\s+|\\s+$$";""), include_dirs: .}' > $@ vscode: $(Q)rm -f .depend $(Q)$(MAKE) -f $(PATH_TO_CHILD_MAKEFILE) hax.fst.config.json SHELL=bash # Clean target clean: rm -rf $(CACHE_DIR)/* rm *.fst ================================================ FILE: rustc-coverage-tests/proofs/lean/extraction/lakefile.toml ================================================ name = "Coverage" version = "0.1.0" defaultTargets = ["Coverage"] [[lean_lib]] name = "Coverage" [[require]] name = "Hax" path = "../../../../hax-lib/proof-libs/lean" ================================================ FILE: rustc-coverage-tests/requirements.txt ================================================ pyyaml tabulate ================================================ FILE: rustc-coverage-tests/run-coverage-tests.py ================================================ #!/usr/bin/env python3 import argparse import subprocess import os import sys from pathlib import Path import shutil import filecmp import difflib CONFIG_FILE = "test_config.yaml" def compare_and_store_outputs(target, base_dir="proofs", store_dir="snapshots", update_snapshots = False): actual_dir = Path(base_dir) / target / "extraction" expected_dir = Path(store_dir) / target if not actual_dir.exists(): print(f"[WARN] Output dir not found: {actual_dir}") return True # No outputs to check unstable = False # Only consider .v and .fst files valid_extensions = {".fst", ".v"} files_to_check = [f for f in actual_dir.rglob("*") if f.is_file() and f.suffix in valid_extensions] for file in files_to_check: if file.is_file(): rel_path = file.relative_to(actual_dir) expected_file = expected_dir / rel_path if expected_file.exists(): if not filecmp.cmp(file, expected_file, shallow=False): if update_snapshots: shutil.copy(file, expected_file) print(f"✅ Stored new reference for file: {expected_file}") else: print(f"❌ File mismatch: {rel_path}") show_file_diff(expected_file, file) unstable = True else: # First time: store it expected_file.parent.mkdir(parents=True, exist_ok=True) shutil.copy(file, expected_file) print(f"✅ Stored new reference file: {expected_file}") return not unstable def cleanup_extraction(base_dir="proofs"): actual_dir = Path(base_dir) if not actual_dir.exists(): print(f"[WARN] Output dir not found: {actual_dir}") return True # No outputs to check # Only consider .v and .fst files valid_extensions = {".v", ".fst"} files_to_delete = [f for f in actual_dir.rglob("*") if f.is_file() and f.suffix in valid_extensions] for file in files_to_delete: if file.is_file(): os.remove(file) def show_file_diff(file1, file2): with open(file1, "r") as f1, open(file2, "r") as f2: lines1 = f1.readlines() lines2 = f2.readlines() diff = list(difflib.unified_diff(lines1, lines2, fromfile=str(file1), tofile=str(file2))) if diff: print("".join(diff)) def load_config(file_path): import yaml with open(file_path, "r") as f: return yaml.safe_load(f) def run_command(cmd): result = subprocess.run( cmd, stdout=subprocess.PIPE, stderr=subprocess.STDOUT, # Redirect stderr to stdout shell = True, text=True) return result def cargo_cmd(test_name, target, feature): feature_flag = f"--features {feature}" target_filter = f"-i '-** +coverage::{test_name}::**'" if test_name else "" print(f"cargo hax -C {feature_flag} \\; into {target_filter} {target}") return f"cargo hax -C {feature_flag} \\; into {target_filter} {target}" def run_fstar_lax(test_name, include_negative): cmd = cargo_cmd(test_name, "fstar", "json" if include_negative else "fstar-lax") extraction = run_command(cmd) if extraction.returncode != 0: return extraction return run_command("OTHERFLAGS='--admit_smt_queries true' make -C proofs/fstar/extraction") def run_lean_tc(test_name, include_negative): cmd = cargo_cmd(test_name, "lean", "json" if include_negative else "lean-tc") extraction = run_command(cmd) if extraction.returncode != 0: return extraction return run_command("lake --dir proofs/lean/extraction build") def run_json_target(): return run_command("cargo hax -C --features json \\; json") def write_summary(results, stability): from tabulate import tabulate headers = ["Test", "Target", "Expected", "Actual"] if stability: headers.append("Stability") headers.append("Result") rows = [] for r in results: row = [r['test'], r['target'], r['expected'], r['actual']] if stability: row.append(r.get("stability", "N/A")) row.append(r['result']) rows.append(row) table = tabulate(rows, headers=headers, tablefmt="github") summary = "## 🧪 Test Summary\n\n" + table + "\n" path = os.getenv("GITHUB_STEP_SUMMARY") if path: with open(path, "a") as f: f.write(summary) else: print(summary) def run_tests(config, target, include_negative, check_stability, update_snapshots): results = [] all_targets = ["coq", "fstar", "fstar-lax", "lean", "lean-tc", "json"] applicable_targets = [target] if target != "all" else all_targets if "json" in applicable_targets: json_result = run_json_target() rc = json_result.returncode if rc != 0: print(json_result.stdout) results.append({ "test": "cargo-hax-json", "target": "json", "expected": "✅ Pass", "actual": "✅ Pass" if rc == 0 else "❌ Fail", "result": "✅" if rc == 0 else "❌" }) return results if target == "json": return results for test_name, targets in config["tests"].items(): for t in applicable_targets: is_expected_to_run = t in targets should_run = is_expected_to_run or include_negative if not should_run: continue cleanup_extraction() if t == "fstar-lax": command_result = run_fstar_lax(test_name, include_negative) elif t == "lean-tc": command_result = run_lean_tc(test_name, include_negative) elif t == "json": command_result = json_result else: cmd = cargo_cmd(test_name, t, "json" if include_negative else t) command_result = run_command(cmd) rc = min(command_result.returncode, 1) expected_code = 0 if is_expected_to_run else 1 passed = (rc == expected_code) result = { "test": test_name, "target": t, "expected": "✅ Pass" if is_expected_to_run else "❌ Fail", "actual": "✅ Pass" if rc == 0 else "❌ Fail", "result": "✅" if passed else "❌" } if check_stability and t in ["fstar"]: is_stable = compare_and_store_outputs(t, update_snapshots = update_snapshots) if not is_stable: # optionally mark test as failed result["stability"] = "❌" result["result"] = "❌" else: result["stability"] = "✅" print(result) if not passed: print(command_result.stdout) results.append(result) return results def main(): parser = argparse.ArgumentParser() parser.add_argument("target", choices=["coq", "fstar", "fstar-lax", "lean", "lean-tc", "json", "all"], help="Test target") parser.add_argument("--config", help="Path to YAML config file") parser.add_argument("--with-negative", action="store_true", help="Also run non-enabled tests and expect them to fail") parser.add_argument("--check-stability", action="store_true", help="Compare output files to reference versions, applicable only in conjunction with with-negative") parser.add_argument("--update-snapshots", action="store_true", help="Store new reference versions of generated files, applicable only in conjunction with with-negative and check-stability") args = parser.parse_args() os.environ["RUSTFLAGS"] = "-C instrument-coverage" stability = args.check_stability and args.with_negative config = load_config(args.config) if args.config else load_config(CONFIG_FILE) if args.with_negative else {"tests" : {"": ["coq", "fstar", "fstar-lax", "lean", "lean-tc"]}} results = run_tests(config, args.target, args.with_negative, stability, args.update_snapshots) if args.with_negative: write_summary(results, stability) else: print(results) # Exit with non-zero if any result failed (actual != expected) if any(r["result"] == "❌" for r in results): sys.exit(1) if __name__ == "__main__": main() ================================================ FILE: rustc-coverage-tests/snapshots/fstar/Coverage.Abort.fst ================================================ module Coverage.Abort #set-options "--fuel 0 --ifuel 1 --z3rlimit 15" open FStar.Mul open Core_models let might_abort (should_abort: bool) : Prims.unit = if should_abort then let _:Prims.unit = Std.Io.Stdio.e_print (Core_models.Fmt.Rt.impl_1__new_const (mk_usize 1) (let list = ["aborting...\n"] in FStar.Pervasives.assert_norm (Prims.eq2 (List.Tot.length list) 1); Rust_primitives.Hax.array_of_list 1 list) <: Core_models.Fmt.t_Arguments) in let _:Prims.unit = () in Rust_primitives.Hax.never_to_any (Core_models.Panicking.panic_fmt (Core_models.Fmt.Rt.impl_1__new_const (mk_usize 1) (let list = ["panics and aborts"] in FStar.Pervasives.assert_norm (Prims.eq2 (List.Tot.length list) 1); Rust_primitives.Hax.array_of_list 1 list) <: Core_models.Fmt.t_Arguments) <: Rust_primitives.Hax.t_Never) else let _:Prims.unit = Std.Io.Stdio.e_print (Core_models.Fmt.Rt.impl_1__new_const (mk_usize 1) (let list = ["Don't Panic\n"] in FStar.Pervasives.assert_norm (Prims.eq2 (List.Tot.length list) 1); Rust_primitives.Hax.array_of_list 1 list) <: Core_models.Fmt.t_Arguments) in let _:Prims.unit = () in () let main (_: Prims.unit) : Core_models.Result.t_Result Prims.unit u8 = let countdown:i32 = mk_i32 10 in let countdown:i32 = Rust_primitives.Hax.while_loop (fun countdown -> let countdown:i32 = countdown in true) (fun countdown -> let countdown:i32 = countdown in countdown >. mk_i32 0 <: bool) (fun countdown -> let countdown:i32 = countdown in Rust_primitives.Hax.Int.from_machine (mk_u32 0) <: Hax_lib.Int.t_Int) countdown (fun countdown -> let countdown:i32 = countdown in let _:Prims.unit = if countdown <. mk_i32 5 then let _:Prims.unit = might_abort false in () in let _:Prims.unit = if countdown <. mk_i32 5 then let _:Prims.unit = might_abort false in () in let _:Prims.unit = if countdown <. mk_i32 5 then let _:Prims.unit = might_abort false in () in let countdown:i32 = countdown -! mk_i32 1 in countdown) in Core_models.Result.Result_Ok (() <: Prims.unit) <: Core_models.Result.t_Result Prims.unit u8 ================================================ FILE: rustc-coverage-tests/snapshots/fstar/Coverage.Assert.fst ================================================ module Coverage.Assert #set-options "--fuel 0 --ifuel 1 --z3rlimit 15" open FStar.Mul open Core_models let might_fail_assert (one_plus_one: u32) : Prims.unit = let args:u32 = one_plus_one <: u32 in let args:t_Array Core_models.Fmt.Rt.t_Argument (mk_usize 1) = let list = [Core_models.Fmt.Rt.impl__new_display #u32 args] in FStar.Pervasives.assert_norm (Prims.eq2 (List.Tot.length list) 1); Rust_primitives.Hax.array_of_list 1 list in let _:Prims.unit = Std.Io.Stdio.e_print (Core_models.Fmt.Rt.impl_1__new_v1 (mk_usize 2) (mk_usize 1) (let list = ["does 1 + 1 = "; "?\n"] in FStar.Pervasives.assert_norm (Prims.eq2 (List.Tot.length list) 2); Rust_primitives.Hax.array_of_list 2 list) args <: Core_models.Fmt.t_Arguments) in let _:Prims.unit = () in let _:Prims.unit = match mk_u32 1 +! mk_u32 1, one_plus_one <: (u32 & u32) with | left_val, right_val -> Hax_lib.v_assert (left_val =. right_val <: bool) in () let main (_: Prims.unit) : Core_models.Result.t_Result Prims.unit u8 = let countdown:i32 = mk_i32 10 in let countdown:i32 = Rust_primitives.Hax.while_loop (fun countdown -> let countdown:i32 = countdown in true) (fun countdown -> let countdown:i32 = countdown in countdown >. mk_i32 0 <: bool) (fun countdown -> let countdown:i32 = countdown in Rust_primitives.Hax.Int.from_machine (mk_u32 0) <: Hax_lib.Int.t_Int) countdown (fun countdown -> let countdown:i32 = countdown in let _:Prims.unit = if countdown =. mk_i32 1 then let _:Prims.unit = might_fail_assert (mk_u32 3) in () else if countdown <. mk_i32 5 then let _:Prims.unit = might_fail_assert (mk_u32 2) in () in let countdown:i32 = countdown -! mk_i32 1 in countdown) in Core_models.Result.Result_Ok (() <: Prims.unit) <: Core_models.Result.t_Result Prims.unit u8 ================================================ FILE: rustc-coverage-tests/snapshots/fstar/Coverage.Assert_ne.fst ================================================ module Coverage.Assert_ne #set-options "--fuel 0 --ifuel 1 --z3rlimit 15" open FStar.Mul open Core_models type t_Foo = | Foo : u32 -> t_Foo [@@ FStar.Tactics.Typeclasses.tcinstance] assume val impl': Core_models.Fmt.t_Debug t_Foo unfold let impl = impl' [@@ FStar.Tactics.Typeclasses.tcinstance] assume val impl_1': Core_models.Marker.t_StructuralPartialEq t_Foo unfold let impl_1 = impl_1' [@@ FStar.Tactics.Typeclasses.tcinstance] assume val impl_2': Core_models.Cmp.t_PartialEq t_Foo t_Foo unfold let impl_2 = impl_2' let main (_: Prims.unit) : Prims.unit = let _:Prims.unit = match Core_models.Hint.black_box #t_Foo (Foo (mk_u32 5) <: t_Foo), (if Core_models.Hint.black_box #bool false then Foo (mk_u32 0) <: t_Foo else Foo (mk_u32 1) <: t_Foo) <: (t_Foo & t_Foo) with | left_val, right_val -> Hax_lib.v_assert (~.(left_val =. right_val <: bool) <: bool) in () <: Prims.unit ================================================ FILE: rustc-coverage-tests/snapshots/fstar/Coverage.Assert_not.fst ================================================ module Coverage.Assert_not #set-options "--fuel 0 --ifuel 1 --z3rlimit 15" open FStar.Mul open Core_models let main (_: Prims.unit) : Prims.unit = let _:Prims.unit = Hax_lib.v_assert true in let _:Prims.unit = Hax_lib.v_assert (~.false <: bool) in let _:Prims.unit = Hax_lib.v_assert (~.(~.true <: bool) <: bool) in let _:Prims.unit = Hax_lib.v_assert (~.(~.(~.false <: bool) <: bool) <: bool) in () ================================================ FILE: rustc-coverage-tests/snapshots/fstar/Coverage.Attr.Impl_.fst ================================================ module Coverage.Attr.Impl_ #set-options "--fuel 0 --ifuel 1 --z3rlimit 15" open FStar.Mul open Core_models type t_MyStruct = | MyStruct : t_MyStruct let impl_MyStruct__off_inherit (_: Prims.unit) : Prims.unit = () let impl_MyStruct__off_on (_: Prims.unit) : Prims.unit = () let impl_MyStruct__off_off (_: Prims.unit) : Prims.unit = () let impl_MyStruct__on_inherit (_: Prims.unit) : Prims.unit = () let impl_MyStruct__on_on (_: Prims.unit) : Prims.unit = () let impl_MyStruct__on_off (_: Prims.unit) : Prims.unit = () class t_MyTrait (v_Self: Type0) = { f_method_pre:Prims.unit -> Type0; f_method_post:Prims.unit -> Prims.unit -> Type0; f_method:x0: Prims.unit -> Prims.Pure Prims.unit (f_method_pre x0) (fun result -> f_method_post x0 result) } [@@ FStar.Tactics.Typeclasses.tcinstance] let impl_MyTrait_for_MyStruct: t_MyTrait t_MyStruct = { f_method_pre = (fun (_: Prims.unit) -> true); f_method_post = (fun (_: Prims.unit) (out: Prims.unit) -> true); f_method = fun (_: Prims.unit) -> () } let main (_: Prims.unit) : Prims.unit = () ================================================ FILE: rustc-coverage-tests/snapshots/fstar/Coverage.Attr.Module.Nested_a.Nested_b.fst ================================================ module Coverage.Attr.Module.Nested_a.Nested_b #set-options "--fuel 0 --ifuel 1 --z3rlimit 15" open FStar.Mul open Core_models let inner (_: Prims.unit) : Prims.unit = () ================================================ FILE: rustc-coverage-tests/snapshots/fstar/Coverage.Attr.Module.Off.fst ================================================ module Coverage.Attr.Module.Off #set-options "--fuel 0 --ifuel 1 --z3rlimit 15" open FStar.Mul open Core_models let inherit (_: Prims.unit) : Prims.unit = () let on (_: Prims.unit) : Prims.unit = () let off (_: Prims.unit) : Prims.unit = () ================================================ FILE: rustc-coverage-tests/snapshots/fstar/Coverage.Attr.Module.On.fst ================================================ module Coverage.Attr.Module.On #set-options "--fuel 0 --ifuel 1 --z3rlimit 15" open FStar.Mul open Core_models let inherit (_: Prims.unit) : Prims.unit = () let on (_: Prims.unit) : Prims.unit = () let off (_: Prims.unit) : Prims.unit = () ================================================ FILE: rustc-coverage-tests/snapshots/fstar/Coverage.Attr.Module.fst ================================================ module Coverage.Attr.Module #set-options "--fuel 0 --ifuel 1 --z3rlimit 15" open FStar.Mul open Core_models let main (_: Prims.unit) : Prims.unit = () ================================================ FILE: rustc-coverage-tests/snapshots/fstar/Coverage.Attr.Off_on_sandwich.fst ================================================ module Coverage.Attr.Off_on_sandwich #set-options "--fuel 0 --ifuel 1 --z3rlimit 15" open FStar.Mul open Core_models let do_stuff (_: Prims.unit) : Prims.unit = () let dense_a__dense_b__dense_c (_: Prims.unit) : Prims.unit = let _:Prims.unit = do_stuff () in () let dense_a__dense_b (_: Prims.unit) : Prims.unit = let _:Prims.unit = dense_a__dense_b__dense_c () in let _:Prims.unit = dense_a__dense_b__dense_c () in () let dense_a (_: Prims.unit) : Prims.unit = let _:Prims.unit = dense_a__dense_b () in let _:Prims.unit = dense_a__dense_b () in () let sparse_a__sparse_b__sparse_c__sparse_d__sparse_e (_: Prims.unit) : Prims.unit = let _:Prims.unit = do_stuff () in () let sparse_a__sparse_b__sparse_c__sparse_d (_: Prims.unit) : Prims.unit = let _:Prims.unit = sparse_a__sparse_b__sparse_c__sparse_d__sparse_e () in let _:Prims.unit = sparse_a__sparse_b__sparse_c__sparse_d__sparse_e () in () let sparse_a__sparse_b__sparse_c (_: Prims.unit) : Prims.unit = let _:Prims.unit = sparse_a__sparse_b__sparse_c__sparse_d () in let _:Prims.unit = sparse_a__sparse_b__sparse_c__sparse_d () in () let sparse_a__sparse_b (_: Prims.unit) : Prims.unit = let _:Prims.unit = sparse_a__sparse_b__sparse_c () in let _:Prims.unit = sparse_a__sparse_b__sparse_c () in () let sparse_a (_: Prims.unit) : Prims.unit = let _:Prims.unit = sparse_a__sparse_b () in let _:Prims.unit = sparse_a__sparse_b () in () let main (_: Prims.unit) : Prims.unit = let _:Prims.unit = dense_a () in let _:Prims.unit = sparse_a () in () ================================================ FILE: rustc-coverage-tests/snapshots/fstar/Coverage.Attr.Trait_impl_inherit.fst ================================================ module Coverage.Attr.Trait_impl_inherit #set-options "--fuel 0 --ifuel 1 --z3rlimit 15" open FStar.Mul open Core_models (* item error backend: Explicit rejection by a phase in the Hax engine: a node of kind [Trait_item_default] have been found in the AST Note: the error was labeled with context `reject_TraitItemDefault`. Last available AST for this item: #[(any(feature = "json"))]#[feature(coverage_attribute)]#[(any(feature = "json", feature = "lean", feature = "fstar", feature = "fstar-lax", feature = "coq"))]#[feature(coverage_attribute)]#[allow(unused_attributes)]#[allow(dead_code)]#[allow(unreachable_code)]#[feature(register_tool)]#[register_tool(_hax)]trait t_T{fn f_f((self: Self)) -> tuple0{{let _: tuple0 = {std::io::stdio::e_print(core_models::fmt::rt::impl_1__new_const::(["default\n"]))};{let _: tuple0 = {Tuple0};Tuple0}}}} Last AST: /** print_rust: pitem: not implemented (item: { Concrete_ident.T.def_id = { Explicit_def_id.T.is_constructor = false; def_id = { Types.index = (0, 0, None); is_local = true; kind = Types.Trait; krate = "coverage"; parent = (Some { Types.contents = { Types.id = 0; value = { Types.index = (0, 0, None); is_local = true; kind = Types.Mod; krate = "coverage"; parent = (Some { Types.contents = { Types.id = 0; value = { Types.index = (0, 0, None); is_local = true; kind = Types.Mod; krate = "coverage"; parent = (Some { Types.contents = { Types.id = 0; value = { Types.index = (0, 0, None); is_local = true; kind = Types.Mod; krate = "coverage"; parent = None; path = [] } } }); path = [{ Types.data = (Types.TypeNs "attr"); disambiguator = 0 } ] } } }); path = [{ Types.data = (Types.TypeNs "attr"); disambiguator = 0 }; { Types.data = (Types.TypeNs "trait_impl_inherit"); disambiguator = 0 } ] } } }); path = [{ Types.data = (Types.TypeNs "attr"); disambiguator = 0 }; { Types.data = (Types.TypeNs "trait_impl_inherit"); disambiguator = 0 }; { Types.data = (Types.TypeNs "T"); disambiguator = 0 }] } }; moved = None; suffix = None }) */ const _: () = (); *) type t_S = | S : t_S [@@ FStar.Tactics.Typeclasses.tcinstance] let impl: t_T t_S = { f_f_pre = (fun (self: t_S) -> true); f_f_post = (fun (self: t_S) (out: Prims.unit) -> true); f_f = fun (self: t_S) -> let _:Prims.unit = Std.Io.Stdio.e_print (Core_models.Fmt.Rt.impl_1__new_const (mk_usize 1) (let list = ["impl S\n"] in FStar.Pervasives.assert_norm (Prims.eq2 (List.Tot.length list) 1); Rust_primitives.Hax.array_of_list 1 list) <: Core_models.Fmt.t_Arguments) in let _:Prims.unit = () in () } let main (_: Prims.unit) : Prims.unit = let _:Prims.unit = f_f #t_S #FStar.Tactics.Typeclasses.solve (S <: t_S) in () ================================================ FILE: rustc-coverage-tests/snapshots/fstar/Coverage.Auxiliary.Discard_all_helper.fst ================================================ module Coverage.Auxiliary.Discard_all_helper #set-options "--fuel 0 --ifuel 1 --z3rlimit 15" open FStar.Mul open Core_models let external_function (_: Prims.unit) : Prims.unit = () ================================================ FILE: rustc-coverage-tests/snapshots/fstar/Coverage.Auxiliary.Used_crate.fst ================================================ module Coverage.Auxiliary.Used_crate #set-options "--fuel 0 --ifuel 1 --z3rlimit 15" open FStar.Mul open Core_models let _ = (* This module has implicit dependencies, here we make them explicit. *) (* The implicit dependencies arise from typeclasses instances. *) let open Std.Env in () let used_only_from_bin_crate_generic_function (#v_T: Type0) (#[FStar.Tactics.Typeclasses.tcresolve ()] i0: Core_models.Fmt.t_Debug v_T) (arg: v_T) : Prims.unit = let args:v_T = arg <: v_T in let args:t_Array Core_models.Fmt.Rt.t_Argument (mk_usize 1) = let list = [Core_models.Fmt.Rt.impl__new_debug #v_T args] in FStar.Pervasives.assert_norm (Prims.eq2 (List.Tot.length list) 1); Rust_primitives.Hax.array_of_list 1 list in let _:Prims.unit = Std.Io.Stdio.e_print (Core_models.Fmt.Rt.impl_1__new_v1 (mk_usize 2) (mk_usize 1) (let list = ["used_only_from_bin_crate_generic_function with "; "\n"] in FStar.Pervasives.assert_norm (Prims.eq2 (List.Tot.length list) 2); Rust_primitives.Hax.array_of_list 2 list) args <: Core_models.Fmt.t_Arguments) in let _:Prims.unit = () in () let used_only_from_this_lib_crate_generic_function (#v_T: Type0) (#[FStar.Tactics.Typeclasses.tcresolve ()] i0: Core_models.Fmt.t_Debug v_T) (arg: v_T) : Prims.unit = let args:v_T = arg <: v_T in let args:t_Array Core_models.Fmt.Rt.t_Argument (mk_usize 1) = let list = [Core_models.Fmt.Rt.impl__new_debug #v_T args] in FStar.Pervasives.assert_norm (Prims.eq2 (List.Tot.length list) 1); Rust_primitives.Hax.array_of_list 1 list in let _:Prims.unit = Std.Io.Stdio.e_print (Core_models.Fmt.Rt.impl_1__new_v1 (mk_usize 2) (mk_usize 1) (let list = ["used_only_from_this_lib_crate_generic_function with "; "\n"] in FStar.Pervasives.assert_norm (Prims.eq2 (List.Tot.length list) 2); Rust_primitives.Hax.array_of_list 2 list) args <: Core_models.Fmt.t_Arguments) in let _:Prims.unit = () in () let used_from_bin_crate_and_lib_crate_generic_function (#v_T: Type0) (#[FStar.Tactics.Typeclasses.tcresolve ()] i0: Core_models.Fmt.t_Debug v_T) (arg: v_T) : Prims.unit = let args:v_T = arg <: v_T in let args:t_Array Core_models.Fmt.Rt.t_Argument (mk_usize 1) = let list = [Core_models.Fmt.Rt.impl__new_debug #v_T args] in FStar.Pervasives.assert_norm (Prims.eq2 (List.Tot.length list) 1); Rust_primitives.Hax.array_of_list 1 list in let _:Prims.unit = Std.Io.Stdio.e_print (Core_models.Fmt.Rt.impl_1__new_v1 (mk_usize 2) (mk_usize 1) (let list = ["used_from_bin_crate_and_lib_crate_generic_function with "; "\n"] in FStar.Pervasives.assert_norm (Prims.eq2 (List.Tot.length list) 2); Rust_primitives.Hax.array_of_list 2 list) args <: Core_models.Fmt.t_Arguments) in let _:Prims.unit = () in () let used_with_same_type_from_bin_crate_and_lib_crate_generic_function (#v_T: Type0) (#[FStar.Tactics.Typeclasses.tcresolve ()] i0: Core_models.Fmt.t_Debug v_T) (arg: v_T) : Prims.unit = let args:v_T = arg <: v_T in let args:t_Array Core_models.Fmt.Rt.t_Argument (mk_usize 1) = let list = [Core_models.Fmt.Rt.impl__new_debug #v_T args] in FStar.Pervasives.assert_norm (Prims.eq2 (List.Tot.length list) 1); Rust_primitives.Hax.array_of_list 1 list in let _:Prims.unit = Std.Io.Stdio.e_print (Core_models.Fmt.Rt.impl_1__new_v1 (mk_usize 2) (mk_usize 1) (let list = ["used_with_same_type_from_bin_crate_and_lib_crate_generic_function with "; "\n"] in FStar.Pervasives.assert_norm (Prims.eq2 (List.Tot.length list) 2); Rust_primitives.Hax.array_of_list 2 list) args <: Core_models.Fmt.t_Arguments) in let _:Prims.unit = () in () let unused_generic_function (#v_T: Type0) (#[FStar.Tactics.Typeclasses.tcresolve ()] i0: Core_models.Fmt.t_Debug v_T) (arg: v_T) : Prims.unit = let args:v_T = arg <: v_T in let args:t_Array Core_models.Fmt.Rt.t_Argument (mk_usize 1) = let list = [Core_models.Fmt.Rt.impl__new_debug #v_T args] in FStar.Pervasives.assert_norm (Prims.eq2 (List.Tot.length list) 1); Rust_primitives.Hax.array_of_list 1 list in let _:Prims.unit = Std.Io.Stdio.e_print (Core_models.Fmt.Rt.impl_1__new_v1 (mk_usize 2) (mk_usize 1) (let list = ["unused_generic_function with "; "\n"] in FStar.Pervasives.assert_norm (Prims.eq2 (List.Tot.length list) 2); Rust_primitives.Hax.array_of_list 2 list) args <: Core_models.Fmt.t_Arguments) in let _:Prims.unit = () in () let unused_function (_: Prims.unit) : Prims.unit = let is_true:bool = (Core_models.Iter.Traits.Exact_size.f_len #Std.Env.t_Args #FStar.Tactics.Typeclasses.solve (Std.Env.args () <: Std.Env.t_Args) <: usize) =. mk_usize 1 in let countdown:i32 = mk_i32 2 in if ~.is_true then let countdown:i32 = mk_i32 20 in () let unused_private_function (_: Prims.unit) : Prims.unit = let is_true:bool = (Core_models.Iter.Traits.Exact_size.f_len #Std.Env.t_Args #FStar.Tactics.Typeclasses.solve (Std.Env.args () <: Std.Env.t_Args) <: usize) =. mk_usize 1 in let countdown:i32 = mk_i32 2 in if ~.is_true then let countdown:i32 = mk_i32 20 in () let uuse_this_lib_crate (_: Prims.unit) : Prims.unit = let _:Prims.unit = used_from_bin_crate_and_lib_crate_generic_function #string "used from library used_crate.rs" in let _:Prims.unit = used_with_same_type_from_bin_crate_and_lib_crate_generic_function #string "used from library used_crate.rs" in let some_vec:Alloc.Vec.t_Vec i32 Alloc.Alloc.t_Global = Alloc.Slice.impl__into_vec #i32 #Alloc.Alloc.t_Global (Rust_primitives.unsize (Rust_primitives.Hax.box_new (let list = [mk_i32 5; mk_i32 6; mk_i32 7; mk_i32 8] in FStar.Pervasives.assert_norm (Prims.eq2 (List.Tot.length list) 4); Rust_primitives.Hax.array_of_list 4 list) <: Alloc.Boxed.t_Box (t_Array i32 (mk_usize 4)) Alloc.Alloc.t_Global) <: Alloc.Boxed.t_Box (t_Slice i32) Alloc.Alloc.t_Global) in let _:Prims.unit = used_only_from_this_lib_crate_generic_function #(Alloc.Vec.t_Vec i32 Alloc.Alloc.t_Global) some_vec in let _:Prims.unit = used_only_from_this_lib_crate_generic_function #string "used ONLY from library used_crate.rs" in () let used_function (_: Prims.unit) : Prims.unit = let is_true:bool = (Core_models.Iter.Traits.Exact_size.f_len #Std.Env.t_Args #FStar.Tactics.Typeclasses.solve (Std.Env.args () <: Std.Env.t_Args) <: usize) =. mk_usize 1 in let countdown:i32 = mk_i32 0 in let countdown:i32 = if is_true then let countdown:i32 = mk_i32 10 in countdown else countdown in let _:Prims.unit = uuse_this_lib_crate () in () ================================================ FILE: rustc-coverage-tests/snapshots/fstar/Coverage.Auxiliary.Used_inline_crate.fst ================================================ module Coverage.Auxiliary.Used_inline_crate #set-options "--fuel 0 --ifuel 1 --z3rlimit 15" open FStar.Mul open Core_models let _ = (* This module has implicit dependencies, here we make them explicit. *) (* The implicit dependencies arise from typeclasses instances. *) let open Std.Env in () let used_only_from_bin_crate_generic_function (#v_T: Type0) (#[FStar.Tactics.Typeclasses.tcresolve ()] i0: Core_models.Fmt.t_Debug v_T) (arg: v_T) : Prims.unit = let args:v_T = arg <: v_T in let args:t_Array Core_models.Fmt.Rt.t_Argument (mk_usize 1) = let list = [Core_models.Fmt.Rt.impl__new_debug #v_T args] in FStar.Pervasives.assert_norm (Prims.eq2 (List.Tot.length list) 1); Rust_primitives.Hax.array_of_list 1 list in let _:Prims.unit = Std.Io.Stdio.e_print (Core_models.Fmt.Rt.impl_1__new_v1 (mk_usize 2) (mk_usize 1) (let list = ["used_only_from_bin_crate_generic_function with "; "\n"] in FStar.Pervasives.assert_norm (Prims.eq2 (List.Tot.length list) 2); Rust_primitives.Hax.array_of_list 2 list) args <: Core_models.Fmt.t_Arguments) in let _:Prims.unit = () in () let used_only_from_this_lib_crate_generic_function (#v_T: Type0) (#[FStar.Tactics.Typeclasses.tcresolve ()] i0: Core_models.Fmt.t_Debug v_T) (arg: v_T) : Prims.unit = let args:v_T = arg <: v_T in let args:t_Array Core_models.Fmt.Rt.t_Argument (mk_usize 1) = let list = [Core_models.Fmt.Rt.impl__new_debug #v_T args] in FStar.Pervasives.assert_norm (Prims.eq2 (List.Tot.length list) 1); Rust_primitives.Hax.array_of_list 1 list in let _:Prims.unit = Std.Io.Stdio.e_print (Core_models.Fmt.Rt.impl_1__new_v1 (mk_usize 2) (mk_usize 1) (let list = ["used_only_from_this_lib_crate_generic_function with "; "\n"] in FStar.Pervasives.assert_norm (Prims.eq2 (List.Tot.length list) 2); Rust_primitives.Hax.array_of_list 2 list) args <: Core_models.Fmt.t_Arguments) in let _:Prims.unit = () in () let used_from_bin_crate_and_lib_crate_generic_function (#v_T: Type0) (#[FStar.Tactics.Typeclasses.tcresolve ()] i0: Core_models.Fmt.t_Debug v_T) (arg: v_T) : Prims.unit = let args:v_T = arg <: v_T in let args:t_Array Core_models.Fmt.Rt.t_Argument (mk_usize 1) = let list = [Core_models.Fmt.Rt.impl__new_debug #v_T args] in FStar.Pervasives.assert_norm (Prims.eq2 (List.Tot.length list) 1); Rust_primitives.Hax.array_of_list 1 list in let _:Prims.unit = Std.Io.Stdio.e_print (Core_models.Fmt.Rt.impl_1__new_v1 (mk_usize 2) (mk_usize 1) (let list = ["used_from_bin_crate_and_lib_crate_generic_function with "; "\n"] in FStar.Pervasives.assert_norm (Prims.eq2 (List.Tot.length list) 2); Rust_primitives.Hax.array_of_list 2 list) args <: Core_models.Fmt.t_Arguments) in let _:Prims.unit = () in () let used_with_same_type_from_bin_crate_and_lib_crate_generic_function (#v_T: Type0) (#[FStar.Tactics.Typeclasses.tcresolve ()] i0: Core_models.Fmt.t_Debug v_T) (arg: v_T) : Prims.unit = let args:v_T = arg <: v_T in let args:t_Array Core_models.Fmt.Rt.t_Argument (mk_usize 1) = let list = [Core_models.Fmt.Rt.impl__new_debug #v_T args] in FStar.Pervasives.assert_norm (Prims.eq2 (List.Tot.length list) 1); Rust_primitives.Hax.array_of_list 1 list in let _:Prims.unit = Std.Io.Stdio.e_print (Core_models.Fmt.Rt.impl_1__new_v1 (mk_usize 2) (mk_usize 1) (let list = ["used_with_same_type_from_bin_crate_and_lib_crate_generic_function with "; "\n"] in FStar.Pervasives.assert_norm (Prims.eq2 (List.Tot.length list) 2); Rust_primitives.Hax.array_of_list 2 list) args <: Core_models.Fmt.t_Arguments) in let _:Prims.unit = () in () let unused_generic_function (#v_T: Type0) (#[FStar.Tactics.Typeclasses.tcresolve ()] i0: Core_models.Fmt.t_Debug v_T) (arg: v_T) : Prims.unit = let args:v_T = arg <: v_T in let args:t_Array Core_models.Fmt.Rt.t_Argument (mk_usize 1) = let list = [Core_models.Fmt.Rt.impl__new_debug #v_T args] in FStar.Pervasives.assert_norm (Prims.eq2 (List.Tot.length list) 1); Rust_primitives.Hax.array_of_list 1 list in let _:Prims.unit = Std.Io.Stdio.e_print (Core_models.Fmt.Rt.impl_1__new_v1 (mk_usize 2) (mk_usize 1) (let list = ["unused_generic_function with "; "\n"] in FStar.Pervasives.assert_norm (Prims.eq2 (List.Tot.length list) 2); Rust_primitives.Hax.array_of_list 2 list) args <: Core_models.Fmt.t_Arguments) in let _:Prims.unit = () in () let unused_function (_: Prims.unit) : Prims.unit = let is_true:bool = (Core_models.Iter.Traits.Exact_size.f_len #Std.Env.t_Args #FStar.Tactics.Typeclasses.solve (Std.Env.args () <: Std.Env.t_Args) <: usize) =. mk_usize 1 in let countdown:i32 = mk_i32 2 in if ~.is_true then let countdown:i32 = mk_i32 20 in () let unused_private_function (_: Prims.unit) : Prims.unit = let is_true:bool = (Core_models.Iter.Traits.Exact_size.f_len #Std.Env.t_Args #FStar.Tactics.Typeclasses.solve (Std.Env.args () <: Std.Env.t_Args) <: usize) =. mk_usize 1 in let countdown:i32 = mk_i32 2 in if ~.is_true then let countdown:i32 = mk_i32 20 in () let uuse_this_lib_crate (_: Prims.unit) : Prims.unit = let _:Prims.unit = used_from_bin_crate_and_lib_crate_generic_function #string "used from library used_crate.rs" in let _:Prims.unit = used_with_same_type_from_bin_crate_and_lib_crate_generic_function #string "used from library used_crate.rs" in let some_vec:Alloc.Vec.t_Vec i32 Alloc.Alloc.t_Global = Alloc.Slice.impl__into_vec #i32 #Alloc.Alloc.t_Global (Rust_primitives.unsize (Rust_primitives.Hax.box_new (let list = [mk_i32 5; mk_i32 6; mk_i32 7; mk_i32 8] in FStar.Pervasives.assert_norm (Prims.eq2 (List.Tot.length list) 4); Rust_primitives.Hax.array_of_list 4 list) <: Alloc.Boxed.t_Box (t_Array i32 (mk_usize 4)) Alloc.Alloc.t_Global) <: Alloc.Boxed.t_Box (t_Slice i32) Alloc.Alloc.t_Global) in let _:Prims.unit = used_only_from_this_lib_crate_generic_function #(Alloc.Vec.t_Vec i32 Alloc.Alloc.t_Global) some_vec in let _:Prims.unit = used_only_from_this_lib_crate_generic_function #string "used ONLY from library used_crate.rs" in () let used_function (_: Prims.unit) : Prims.unit = let is_true:bool = (Core_models.Iter.Traits.Exact_size.f_len #Std.Env.t_Args #FStar.Tactics.Typeclasses.solve (Std.Env.args () <: Std.Env.t_Args) <: usize) =. mk_usize 1 in let countdown:i32 = mk_i32 0 in let countdown:i32 = if is_true then let countdown:i32 = mk_i32 10 in countdown else countdown in let _:Prims.unit = uuse_this_lib_crate () in () let used_inline_function (_: Prims.unit) : Prims.unit = let is_true:bool = (Core_models.Iter.Traits.Exact_size.f_len #Std.Env.t_Args #FStar.Tactics.Typeclasses.solve (Std.Env.args () <: Std.Env.t_Args) <: usize) =. mk_usize 1 in let countdown:i32 = mk_i32 0 in let countdown:i32 = if is_true then let countdown:i32 = mk_i32 10 in countdown else countdown in let _:Prims.unit = uuse_this_lib_crate () in () ================================================ FILE: rustc-coverage-tests/snapshots/fstar/Coverage.Closure_macro.fst ================================================ module Coverage.Closure_macro #set-options "--fuel 0 --ifuel 1 --z3rlimit 15" open FStar.Mul open Core_models let load_configuration_files (_: Prims.unit) : Core_models.Result.t_Result Alloc.String.t_String Alloc.String.t_String = Core_models.Result.Result_Ok (Core_models.Convert.f_from #Alloc.String.t_String #string #FStar.Tactics.Typeclasses.solve "config") <: Core_models.Result.t_Result Alloc.String.t_String Alloc.String.t_String let main (_: Prims.unit) : Core_models.Result.t_Result Prims.unit Alloc.String.t_String = let _:Prims.unit = Std.Io.Stdio.e_print (Core_models.Fmt.Rt.impl_1__new_const (mk_usize 1) (let list = ["Starting service\n"] in FStar.Pervasives.assert_norm (Prims.eq2 (List.Tot.length list) 1); Rust_primitives.Hax.array_of_list 1 list) <: Core_models.Fmt.t_Arguments) in let _:Prims.unit = () in match Core_models.Result.impl__or_else #Alloc.String.t_String #Alloc.String.t_String #Alloc.String.t_String (load_configuration_files () <: Core_models.Result.t_Result Alloc.String.t_String Alloc.String.t_String) (fun e -> let e:Alloc.String.t_String = e in let args:Alloc.String.t_String = e <: Alloc.String.t_String in let args:t_Array Core_models.Fmt.Rt.t_Argument (mk_usize 1) = let list = [Core_models.Fmt.Rt.impl__new_display #Alloc.String.t_String args] in FStar.Pervasives.assert_norm (Prims.eq2 (List.Tot.length list) 1); Rust_primitives.Hax.array_of_list 1 list in let message:Alloc.String.t_String = Core_models.Hint.must_use #Alloc.String.t_String (Alloc.Fmt.format (Core_models.Fmt.Rt.impl_1__new_v1 (mk_usize 1) (mk_usize 1) (let list = ["Error loading configs: "] in FStar.Pervasives.assert_norm (Prims.eq2 (List.Tot.length list) 1); Rust_primitives.Hax.array_of_list 1 list) args <: Core_models.Fmt.t_Arguments) <: Alloc.String.t_String) in if (Alloc.String.impl_String__len message <: usize) >. mk_usize 0 then let args:Alloc.String.t_String = message <: Alloc.String.t_String in let args:t_Array Core_models.Fmt.Rt.t_Argument (mk_usize 1) = let list = [Core_models.Fmt.Rt.impl__new_display #Alloc.String.t_String args] in FStar.Pervasives.assert_norm (Prims.eq2 (List.Tot.length list) 1); Rust_primitives.Hax.array_of_list 1 list in let _:Prims.unit = Std.Io.Stdio.e_print (Core_models.Fmt.Rt.impl_1__new_v1 (mk_usize 2) (mk_usize 1) (let list = [""; "\n"] in FStar.Pervasives.assert_norm (Prims.eq2 (List.Tot.length list) 2); Rust_primitives.Hax.array_of_list 2 list) args <: Core_models.Fmt.t_Arguments) in let _:Prims.unit = () in Core_models.Result.Result_Ok (Core_models.Convert.f_from #Alloc.String.t_String #string #FStar.Tactics.Typeclasses.solve "ok") <: Core_models.Result.t_Result Alloc.String.t_String Alloc.String.t_String else let _:Prims.unit = if (Core_models.Str.impl_str__len "error" <: usize) >. mk_usize 0 then let _:Prims.unit = Std.Io.Stdio.e_print (Core_models.Fmt.Rt.impl_1__new_const (mk_usize 1) (let list = ["no msg\n"] in FStar.Pervasives.assert_norm (Prims.eq2 (List.Tot.length list) 1); Rust_primitives.Hax.array_of_list 1 list) <: Core_models.Fmt.t_Arguments) in let _:Prims.unit = () in () else let _:Prims.unit = Std.Io.Stdio.e_print (Core_models.Fmt.Rt.impl_1__new_const (mk_usize 1) (let list = ["error\n"] in FStar.Pervasives.assert_norm (Prims.eq2 (List.Tot.length list) 1); Rust_primitives.Hax.array_of_list 1 list) <: Core_models.Fmt.t_Arguments) in let _:Prims.unit = () in () in Core_models.Result.Result_Err (Core_models.Convert.f_from #Alloc.String.t_String #string #FStar.Tactics.Typeclasses.solve "error") <: Core_models.Result.t_Result Alloc.String.t_String Alloc.String.t_String) <: Core_models.Result.t_Result Alloc.String.t_String Alloc.String.t_String with | Core_models.Result.Result_Ok config -> let startup_delay_duration:Alloc.String.t_String = Core_models.Convert.f_from #Alloc.String.t_String #string #FStar.Tactics.Typeclasses.solve "arg" in let _:(Alloc.String.t_String & Alloc.String.t_String) = config, startup_delay_duration <: (Alloc.String.t_String & Alloc.String.t_String) in Core_models.Result.Result_Ok (() <: Prims.unit) <: Core_models.Result.t_Result Prims.unit Alloc.String.t_String | Core_models.Result.Result_Err err -> Core_models.Result.Result_Err err <: Core_models.Result.t_Result Prims.unit Alloc.String.t_String ================================================ FILE: rustc-coverage-tests/snapshots/fstar/Coverage.Closure_unit_return.fst ================================================ module Coverage.Closure_unit_return #set-options "--fuel 0 --ifuel 1 --z3rlimit 15" open FStar.Mul open Core_models let explicit_unit (_: Prims.unit) : Prims.unit = let closure: Prims.unit -> Prims.unit = fun temp_0_ -> let _:Prims.unit = temp_0_ in let _:Prims.unit = () <: Prims.unit in () in let _:Prims.unit = Core_models.Mem.drop closure in () <: Prims.unit let implicit_unit (_: Prims.unit) : Prims.unit = let closure: Prims.unit -> Prims.unit = fun temp_0_ -> let _:Prims.unit = temp_0_ in let _:Prims.unit = () <: Prims.unit in () in let _:Prims.unit = Core_models.Mem.drop closure in () let main (_: Prims.unit) : Prims.unit = let _:Prims.unit = explicit_unit () in let _:Prims.unit = implicit_unit () in () ================================================ FILE: rustc-coverage-tests/snapshots/fstar/Coverage.Color.fst ================================================ module Coverage.Color #set-options "--fuel 0 --ifuel 1 --z3rlimit 15" open FStar.Mul open Core_models let main (_: Prims.unit) : (Prims.unit & Prims.unit) = Rust_primitives.Hax.Folds.fold_range (mk_i32 0) (mk_i32 0) (fun temp_0_ temp_1_ -> let _:Prims.unit = temp_0_ in let _:i32 = temp_1_ in true) () (fun temp_0_ e_i -> let _:Prims.unit = temp_0_ in let e_i:i32 = e_i in ()), () <: (Prims.unit & Prims.unit) ================================================ FILE: rustc-coverage-tests/snapshots/fstar/Coverage.Condition.Conditions.fst ================================================ module Coverage.Condition.Conditions #set-options "--fuel 0 --ifuel 1 --z3rlimit 15" open FStar.Mul open Core_models let simple_assign (a: bool) : Prims.unit = let x:bool = a in let _:bool = Core_models.Hint.black_box #bool x in () let assign_and (a b: bool) : Prims.unit = let x:bool = a && b in let _:bool = Core_models.Hint.black_box #bool x in () let assign_or (a b: bool) : Prims.unit = let x:bool = a || b in let _:bool = Core_models.Hint.black_box #bool x in () let assign_3_or_and (a b c: bool) : Prims.unit = let x:bool = a || b && c in let _:bool = Core_models.Hint.black_box #bool x in () let assign_3_and_or (a b c: bool) : Prims.unit = let x:bool = a && b || c in let _:bool = Core_models.Hint.black_box #bool x in () let foo (a: bool) : bool = Core_models.Hint.black_box #bool a let func_call (a b: bool) : Prims.unit = let _:bool = foo (a && b) in () let main (_: Prims.unit) : Prims.unit = let _:Prims.unit = simple_assign true in let _:Prims.unit = simple_assign false in let _:Prims.unit = assign_and true false in let _:Prims.unit = assign_and true true in let _:Prims.unit = assign_and false false in let _:Prims.unit = assign_or true false in let _:Prims.unit = assign_or true true in let _:Prims.unit = assign_or false false in let _:Prims.unit = assign_3_or_and true false false in let _:Prims.unit = assign_3_or_and true true false in let _:Prims.unit = assign_3_or_and false false true in let _:Prims.unit = assign_3_or_and false true true in let _:Prims.unit = assign_3_and_or true false false in let _:Prims.unit = assign_3_and_or true true false in let _:Prims.unit = assign_3_and_or false false true in let _:Prims.unit = assign_3_and_or false true true in let _:Prims.unit = func_call true false in let _:Prims.unit = func_call true true in let _:Prims.unit = func_call false false in () ================================================ FILE: rustc-coverage-tests/snapshots/fstar/Coverage.Conditions.fst ================================================ module Coverage.Conditions #set-options "--fuel 0 --ifuel 1 --z3rlimit 15" open FStar.Mul open Core_models let main__v_B: u32 = mk_u32 100 let main (_: Prims.unit) : Prims.unit = let countdown:u32 = mk_u32 0 in let countdown:u32 = if true then let countdown:u32 = mk_u32 10 in countdown else countdown in if countdown >. mk_u32 7 then let countdown:u32 = countdown -! mk_u32 4 in let (countdown: u32), (x: u32) = countdown, main__v_B <: (u32 & u32) in let countdown:i32 = mk_i32 0 in let countdown:i32 = if true then let countdown:i32 = mk_i32 10 in countdown else countdown in if countdown >. mk_i32 7 then let countdown:i32 = countdown -! mk_i32 4 in if true then let countdown:i32 = mk_i32 0 in let countdown:i32 = if true then let countdown:i32 = mk_i32 10 in countdown else countdown in if countdown >. mk_i32 7 then let countdown:i32 = countdown -! mk_i32 4 in let countdown:i32 = mk_i32 0 in let countdown:i32 = if true then let countdown:i32 = mk_i32 1 in countdown else countdown in if countdown >. mk_i32 7 then let countdown:i32 = countdown -! mk_i32 4 in let (countdown: i32), (z: Prims.unit) = countdown, () <: (i32 & Prims.unit) in if countdown >. mk_i32 7 then let countdown:i32 = countdown -! mk_i32 4 in let (countdown: i32), (w: Prims.unit) = countdown, () <: (i32 & Prims.unit) in () else if countdown >. mk_i32 2 then let countdown:i32 = if countdown <. mk_i32 1 || countdown >. mk_i32 5 || countdown <>. mk_i32 9 then let countdown:i32 = mk_i32 0 in countdown else countdown in let countdown:i32 = countdown -! mk_i32 5 in let (countdown: i32), (w: Prims.unit) = countdown, () <: (i32 & Prims.unit) in () else if countdown >. mk_i32 2 then let countdown:i32 = if countdown <. mk_i32 1 || countdown >. mk_i32 5 || countdown <>. mk_i32 9 then let countdown:i32 = mk_i32 0 in countdown else countdown in let countdown:i32 = countdown -! mk_i32 5 in let (countdown: i32), (z: Prims.unit) = countdown, () <: (i32 & Prims.unit) in if countdown >. mk_i32 7 then let countdown:i32 = countdown -! mk_i32 4 in let (countdown: i32), (w: Prims.unit) = countdown, () <: (i32 & Prims.unit) in () else if countdown >. mk_i32 2 then let countdown:i32 = if countdown <. mk_i32 1 || countdown >. mk_i32 5 || countdown <>. mk_i32 9 then let countdown:i32 = mk_i32 0 in countdown else countdown in let countdown:i32 = countdown -! mk_i32 5 in let (countdown: i32), (w: Prims.unit) = countdown, () <: (i32 & Prims.unit) in () else let should_be_reachable:i32 = countdown in let _:Prims.unit = Std.Io.Stdio.e_print (Core_models.Fmt.Rt.impl_1__new_const (mk_usize 1) (let list = ["reached\n"] in FStar.Pervasives.assert_norm (Prims.eq2 (List.Tot.length list) 1); Rust_primitives.Hax.array_of_list 1 list) <: Core_models.Fmt.t_Arguments) in let _:Prims.unit = () in () else if countdown >. mk_i32 2 then let countdown:i32 = if countdown <. mk_i32 1 || countdown >. mk_i32 5 || countdown <>. mk_i32 9 then let countdown:i32 = mk_i32 0 in countdown else countdown in let countdown:i32 = countdown -! mk_i32 5 in let countdown:i32 = mk_i32 0 in let countdown:i32 = if true then let countdown:i32 = mk_i32 1 in countdown else countdown in if countdown >. mk_i32 7 then let countdown:i32 = countdown -! mk_i32 4 in let (countdown: i32), (z: Prims.unit) = countdown, () <: (i32 & Prims.unit) in if countdown >. mk_i32 7 then let countdown:i32 = countdown -! mk_i32 4 in let (countdown: i32), (w: Prims.unit) = countdown, () <: (i32 & Prims.unit) in () else if countdown >. mk_i32 2 then let countdown:i32 = if countdown <. mk_i32 1 || countdown >. mk_i32 5 || countdown <>. mk_i32 9 then let countdown:i32 = mk_i32 0 in countdown else countdown in let countdown:i32 = countdown -! mk_i32 5 in let (countdown: i32), (w: Prims.unit) = countdown, () <: (i32 & Prims.unit) in () else if countdown >. mk_i32 2 then let countdown:i32 = if countdown <. mk_i32 1 || countdown >. mk_i32 5 || countdown <>. mk_i32 9 then let countdown:i32 = mk_i32 0 in countdown else countdown in let countdown:i32 = countdown -! mk_i32 5 in let (countdown: i32), (z: Prims.unit) = countdown, () <: (i32 & Prims.unit) in if countdown >. mk_i32 7 then let countdown:i32 = countdown -! mk_i32 4 in let (countdown: i32), (w: Prims.unit) = countdown, () <: (i32 & Prims.unit) in () else if countdown >. mk_i32 2 then let countdown:i32 = if countdown <. mk_i32 1 || countdown >. mk_i32 5 || countdown <>. mk_i32 9 then let countdown:i32 = mk_i32 0 in countdown else countdown in let countdown:i32 = countdown -! mk_i32 5 in let (countdown: i32), (w: Prims.unit) = countdown, () <: (i32 & Prims.unit) in () else let should_be_reachable:i32 = countdown in let _:Prims.unit = Std.Io.Stdio.e_print (Core_models.Fmt.Rt.impl_1__new_const (mk_usize 1) (let list = ["reached\n"] in FStar.Pervasives.assert_norm (Prims.eq2 (List.Tot.length list) 1); Rust_primitives.Hax.array_of_list 1 list) <: Core_models.Fmt.t_Arguments) in let _:Prims.unit = () in () else let countdown:i32 = mk_i32 0 in let countdown:i32 = if true then let countdown:i32 = mk_i32 1 in countdown else countdown in if countdown >. mk_i32 7 then let countdown:i32 = countdown -! mk_i32 4 in let (countdown: i32), (z: Prims.unit) = countdown, () <: (i32 & Prims.unit) in if countdown >. mk_i32 7 then let countdown:i32 = countdown -! mk_i32 4 in let (countdown: i32), (w: Prims.unit) = countdown, () <: (i32 & Prims.unit) in () else if countdown >. mk_i32 2 then let countdown:i32 = if countdown <. mk_i32 1 || countdown >. mk_i32 5 || countdown <>. mk_i32 9 then let countdown:i32 = mk_i32 0 in countdown else countdown in let countdown:i32 = countdown -! mk_i32 5 in let (countdown: i32), (w: Prims.unit) = countdown, () <: (i32 & Prims.unit) in () else if countdown >. mk_i32 2 then let countdown:i32 = if countdown <. mk_i32 1 || countdown >. mk_i32 5 || countdown <>. mk_i32 9 then let countdown:i32 = mk_i32 0 in countdown else countdown in let countdown:i32 = countdown -! mk_i32 5 in let (countdown: i32), (z: Prims.unit) = countdown, () <: (i32 & Prims.unit) in if countdown >. mk_i32 7 then let countdown:i32 = countdown -! mk_i32 4 in let (countdown: i32), (w: Prims.unit) = countdown, () <: (i32 & Prims.unit) in () else if countdown >. mk_i32 2 then let countdown:i32 = if countdown <. mk_i32 1 || countdown >. mk_i32 5 || countdown <>. mk_i32 9 then let countdown:i32 = mk_i32 0 in countdown else countdown in let countdown:i32 = countdown -! mk_i32 5 in let (countdown: i32), (w: Prims.unit) = countdown, () <: (i32 & Prims.unit) in () else let should_be_reachable:i32 = countdown in let _:Prims.unit = Std.Io.Stdio.e_print (Core_models.Fmt.Rt.impl_1__new_const (mk_usize 1) (let list = ["reached\n"] in FStar.Pervasives.assert_norm (Prims.eq2 (List.Tot.length list) 1); Rust_primitives.Hax.array_of_list 1 list) <: Core_models.Fmt.t_Arguments) in let _:Prims.unit = () in () else if countdown >. mk_i32 2 then let countdown:i32 = if countdown <. mk_i32 1 || countdown >. mk_i32 5 || countdown <>. mk_i32 9 then let countdown:i32 = mk_i32 0 in countdown else countdown in let countdown:i32 = countdown -! mk_i32 5 in if true then let countdown:i32 = mk_i32 0 in let countdown:i32 = if true then let countdown:i32 = mk_i32 10 in countdown else countdown in if countdown >. mk_i32 7 then let countdown:i32 = countdown -! mk_i32 4 in let countdown:i32 = mk_i32 0 in let countdown:i32 = if true then let countdown:i32 = mk_i32 1 in countdown else countdown in if countdown >. mk_i32 7 then let countdown:i32 = countdown -! mk_i32 4 in let (countdown: i32), (z: Prims.unit) = countdown, () <: (i32 & Prims.unit) in if countdown >. mk_i32 7 then let countdown:i32 = countdown -! mk_i32 4 in let (countdown: i32), (w: Prims.unit) = countdown, () <: (i32 & Prims.unit) in () else if countdown >. mk_i32 2 then let countdown:i32 = if countdown <. mk_i32 1 || countdown >. mk_i32 5 || countdown <>. mk_i32 9 then let countdown:i32 = mk_i32 0 in countdown else countdown in let countdown:i32 = countdown -! mk_i32 5 in let (countdown: i32), (w: Prims.unit) = countdown, () <: (i32 & Prims.unit) in () else if countdown >. mk_i32 2 then let countdown:i32 = if countdown <. mk_i32 1 || countdown >. mk_i32 5 || countdown <>. mk_i32 9 then let countdown:i32 = mk_i32 0 in countdown else countdown in let countdown:i32 = countdown -! mk_i32 5 in let (countdown: i32), (z: Prims.unit) = countdown, () <: (i32 & Prims.unit) in if countdown >. mk_i32 7 then let countdown:i32 = countdown -! mk_i32 4 in let (countdown: i32), (w: Prims.unit) = countdown, () <: (i32 & Prims.unit) in () else if countdown >. mk_i32 2 then let countdown:i32 = if countdown <. mk_i32 1 || countdown >. mk_i32 5 || countdown <>. mk_i32 9 then let countdown:i32 = mk_i32 0 in countdown else countdown in let countdown:i32 = countdown -! mk_i32 5 in let (countdown: i32), (w: Prims.unit) = countdown, () <: (i32 & Prims.unit) in () else let should_be_reachable:i32 = countdown in let _:Prims.unit = Std.Io.Stdio.e_print (Core_models.Fmt.Rt.impl_1__new_const (mk_usize 1) (let list = ["reached\n"] in FStar.Pervasives.assert_norm (Prims.eq2 (List.Tot.length list) 1); Rust_primitives.Hax.array_of_list 1 list) <: Core_models.Fmt.t_Arguments) in let _:Prims.unit = () in () else if countdown >. mk_i32 2 then let countdown:i32 = if countdown <. mk_i32 1 || countdown >. mk_i32 5 || countdown <>. mk_i32 9 then let countdown:i32 = mk_i32 0 in countdown else countdown in let countdown:i32 = countdown -! mk_i32 5 in let countdown:i32 = mk_i32 0 in let countdown:i32 = if true then let countdown:i32 = mk_i32 1 in countdown else countdown in if countdown >. mk_i32 7 then let countdown:i32 = countdown -! mk_i32 4 in let (countdown: i32), (z: Prims.unit) = countdown, () <: (i32 & Prims.unit) in if countdown >. mk_i32 7 then let countdown:i32 = countdown -! mk_i32 4 in let (countdown: i32), (w: Prims.unit) = countdown, () <: (i32 & Prims.unit) in () else if countdown >. mk_i32 2 then let countdown:i32 = if countdown <. mk_i32 1 || countdown >. mk_i32 5 || countdown <>. mk_i32 9 then let countdown:i32 = mk_i32 0 in countdown else countdown in let countdown:i32 = countdown -! mk_i32 5 in let (countdown: i32), (w: Prims.unit) = countdown, () <: (i32 & Prims.unit) in () else if countdown >. mk_i32 2 then let countdown:i32 = if countdown <. mk_i32 1 || countdown >. mk_i32 5 || countdown <>. mk_i32 9 then let countdown:i32 = mk_i32 0 in countdown else countdown in let countdown:i32 = countdown -! mk_i32 5 in let (countdown: i32), (z: Prims.unit) = countdown, () <: (i32 & Prims.unit) in if countdown >. mk_i32 7 then let countdown:i32 = countdown -! mk_i32 4 in let (countdown: i32), (w: Prims.unit) = countdown, () <: (i32 & Prims.unit) in () else if countdown >. mk_i32 2 then let countdown:i32 = if countdown <. mk_i32 1 || countdown >. mk_i32 5 || countdown <>. mk_i32 9 then let countdown:i32 = mk_i32 0 in countdown else countdown in let countdown:i32 = countdown -! mk_i32 5 in let (countdown: i32), (w: Prims.unit) = countdown, () <: (i32 & Prims.unit) in () else let should_be_reachable:i32 = countdown in let _:Prims.unit = Std.Io.Stdio.e_print (Core_models.Fmt.Rt.impl_1__new_const (mk_usize 1) (let list = ["reached\n"] in FStar.Pervasives.assert_norm (Prims.eq2 (List.Tot.length list) 1); Rust_primitives.Hax.array_of_list 1 list) <: Core_models.Fmt.t_Arguments) in let _:Prims.unit = () in () else let countdown:i32 = mk_i32 0 in let countdown:i32 = if true then let countdown:i32 = mk_i32 1 in countdown else countdown in if countdown >. mk_i32 7 then let countdown:i32 = countdown -! mk_i32 4 in let (countdown: i32), (z: Prims.unit) = countdown, () <: (i32 & Prims.unit) in if countdown >. mk_i32 7 then let countdown:i32 = countdown -! mk_i32 4 in let (countdown: i32), (w: Prims.unit) = countdown, () <: (i32 & Prims.unit) in () else if countdown >. mk_i32 2 then let countdown:i32 = if countdown <. mk_i32 1 || countdown >. mk_i32 5 || countdown <>. mk_i32 9 then let countdown:i32 = mk_i32 0 in countdown else countdown in let countdown:i32 = countdown -! mk_i32 5 in let (countdown: i32), (w: Prims.unit) = countdown, () <: (i32 & Prims.unit) in () else if countdown >. mk_i32 2 then let countdown:i32 = if countdown <. mk_i32 1 || countdown >. mk_i32 5 || countdown <>. mk_i32 9 then let countdown:i32 = mk_i32 0 in countdown else countdown in let countdown:i32 = countdown -! mk_i32 5 in let (countdown: i32), (z: Prims.unit) = countdown, () <: (i32 & Prims.unit) in if countdown >. mk_i32 7 then let countdown:i32 = countdown -! mk_i32 4 in let (countdown: i32), (w: Prims.unit) = countdown, () <: (i32 & Prims.unit) in () else if countdown >. mk_i32 2 then let countdown:i32 = if countdown <. mk_i32 1 || countdown >. mk_i32 5 || countdown <>. mk_i32 9 then let countdown:i32 = mk_i32 0 in countdown else countdown in let countdown:i32 = countdown -! mk_i32 5 in let (countdown: i32), (w: Prims.unit) = countdown, () <: (i32 & Prims.unit) in () else let should_be_reachable:i32 = countdown in let _:Prims.unit = Std.Io.Stdio.e_print (Core_models.Fmt.Rt.impl_1__new_const (mk_usize 1) (let list = ["reached\n"] in FStar.Pervasives.assert_norm (Prims.eq2 (List.Tot.length list) 1); Rust_primitives.Hax.array_of_list 1 list) <: Core_models.Fmt.t_Arguments) in let _:Prims.unit = () in () else if countdown >. mk_u32 2 then let countdown:u32 = if countdown <. mk_u32 1 || countdown >. mk_u32 5 || countdown <>. mk_u32 9 then let countdown:u32 = mk_u32 0 in countdown else countdown in let countdown:u32 = countdown -! mk_u32 5 in let (countdown: u32), (x: u32) = countdown, countdown <: (u32 & u32) in let countdown:i32 = mk_i32 0 in let countdown:i32 = if true then let countdown:i32 = mk_i32 10 in countdown else countdown in if countdown >. mk_i32 7 then let countdown:i32 = countdown -! mk_i32 4 in if true then let countdown:i32 = mk_i32 0 in let countdown:i32 = if true then let countdown:i32 = mk_i32 10 in countdown else countdown in if countdown >. mk_i32 7 then let countdown:i32 = countdown -! mk_i32 4 in let countdown:i32 = mk_i32 0 in let countdown:i32 = if true then let countdown:i32 = mk_i32 1 in countdown else countdown in if countdown >. mk_i32 7 then let countdown:i32 = countdown -! mk_i32 4 in let (countdown: i32), (z: Prims.unit) = countdown, () <: (i32 & Prims.unit) in if countdown >. mk_i32 7 then let countdown:i32 = countdown -! mk_i32 4 in let (countdown: i32), (w: Prims.unit) = countdown, () <: (i32 & Prims.unit) in () else if countdown >. mk_i32 2 then let countdown:i32 = if countdown <. mk_i32 1 || countdown >. mk_i32 5 || countdown <>. mk_i32 9 then let countdown:i32 = mk_i32 0 in countdown else countdown in let countdown:i32 = countdown -! mk_i32 5 in let (countdown: i32), (w: Prims.unit) = countdown, () <: (i32 & Prims.unit) in () else if countdown >. mk_i32 2 then let countdown:i32 = if countdown <. mk_i32 1 || countdown >. mk_i32 5 || countdown <>. mk_i32 9 then let countdown:i32 = mk_i32 0 in countdown else countdown in let countdown:i32 = countdown -! mk_i32 5 in let (countdown: i32), (z: Prims.unit) = countdown, () <: (i32 & Prims.unit) in if countdown >. mk_i32 7 then let countdown:i32 = countdown -! mk_i32 4 in let (countdown: i32), (w: Prims.unit) = countdown, () <: (i32 & Prims.unit) in () else if countdown >. mk_i32 2 then let countdown:i32 = if countdown <. mk_i32 1 || countdown >. mk_i32 5 || countdown <>. mk_i32 9 then let countdown:i32 = mk_i32 0 in countdown else countdown in let countdown:i32 = countdown -! mk_i32 5 in let (countdown: i32), (w: Prims.unit) = countdown, () <: (i32 & Prims.unit) in () else let should_be_reachable:i32 = countdown in let _:Prims.unit = Std.Io.Stdio.e_print (Core_models.Fmt.Rt.impl_1__new_const (mk_usize 1) (let list = ["reached\n"] in FStar.Pervasives.assert_norm (Prims.eq2 (List.Tot.length list) 1); Rust_primitives.Hax.array_of_list 1 list) <: Core_models.Fmt.t_Arguments) in let _:Prims.unit = () in () else if countdown >. mk_i32 2 then let countdown:i32 = if countdown <. mk_i32 1 || countdown >. mk_i32 5 || countdown <>. mk_i32 9 then let countdown:i32 = mk_i32 0 in countdown else countdown in let countdown:i32 = countdown -! mk_i32 5 in let countdown:i32 = mk_i32 0 in let countdown:i32 = if true then let countdown:i32 = mk_i32 1 in countdown else countdown in if countdown >. mk_i32 7 then let countdown:i32 = countdown -! mk_i32 4 in let (countdown: i32), (z: Prims.unit) = countdown, () <: (i32 & Prims.unit) in if countdown >. mk_i32 7 then let countdown:i32 = countdown -! mk_i32 4 in let (countdown: i32), (w: Prims.unit) = countdown, () <: (i32 & Prims.unit) in () else if countdown >. mk_i32 2 then let countdown:i32 = if countdown <. mk_i32 1 || countdown >. mk_i32 5 || countdown <>. mk_i32 9 then let countdown:i32 = mk_i32 0 in countdown else countdown in let countdown:i32 = countdown -! mk_i32 5 in let (countdown: i32), (w: Prims.unit) = countdown, () <: (i32 & Prims.unit) in () else if countdown >. mk_i32 2 then let countdown:i32 = if countdown <. mk_i32 1 || countdown >. mk_i32 5 || countdown <>. mk_i32 9 then let countdown:i32 = mk_i32 0 in countdown else countdown in let countdown:i32 = countdown -! mk_i32 5 in let (countdown: i32), (z: Prims.unit) = countdown, () <: (i32 & Prims.unit) in if countdown >. mk_i32 7 then let countdown:i32 = countdown -! mk_i32 4 in let (countdown: i32), (w: Prims.unit) = countdown, () <: (i32 & Prims.unit) in () else if countdown >. mk_i32 2 then let countdown:i32 = if countdown <. mk_i32 1 || countdown >. mk_i32 5 || countdown <>. mk_i32 9 then let countdown:i32 = mk_i32 0 in countdown else countdown in let countdown:i32 = countdown -! mk_i32 5 in let (countdown: i32), (w: Prims.unit) = countdown, () <: (i32 & Prims.unit) in () else let should_be_reachable:i32 = countdown in let _:Prims.unit = Std.Io.Stdio.e_print (Core_models.Fmt.Rt.impl_1__new_const (mk_usize 1) (let list = ["reached\n"] in FStar.Pervasives.assert_norm (Prims.eq2 (List.Tot.length list) 1); Rust_primitives.Hax.array_of_list 1 list) <: Core_models.Fmt.t_Arguments) in let _:Prims.unit = () in () else let countdown:i32 = mk_i32 0 in let countdown:i32 = if true then let countdown:i32 = mk_i32 1 in countdown else countdown in if countdown >. mk_i32 7 then let countdown:i32 = countdown -! mk_i32 4 in let (countdown: i32), (z: Prims.unit) = countdown, () <: (i32 & Prims.unit) in if countdown >. mk_i32 7 then let countdown:i32 = countdown -! mk_i32 4 in let (countdown: i32), (w: Prims.unit) = countdown, () <: (i32 & Prims.unit) in () else if countdown >. mk_i32 2 then let countdown:i32 = if countdown <. mk_i32 1 || countdown >. mk_i32 5 || countdown <>. mk_i32 9 then let countdown:i32 = mk_i32 0 in countdown else countdown in let countdown:i32 = countdown -! mk_i32 5 in let (countdown: i32), (w: Prims.unit) = countdown, () <: (i32 & Prims.unit) in () else if countdown >. mk_i32 2 then let countdown:i32 = if countdown <. mk_i32 1 || countdown >. mk_i32 5 || countdown <>. mk_i32 9 then let countdown:i32 = mk_i32 0 in countdown else countdown in let countdown:i32 = countdown -! mk_i32 5 in let (countdown: i32), (z: Prims.unit) = countdown, () <: (i32 & Prims.unit) in if countdown >. mk_i32 7 then let countdown:i32 = countdown -! mk_i32 4 in let (countdown: i32), (w: Prims.unit) = countdown, () <: (i32 & Prims.unit) in () else if countdown >. mk_i32 2 then let countdown:i32 = if countdown <. mk_i32 1 || countdown >. mk_i32 5 || countdown <>. mk_i32 9 then let countdown:i32 = mk_i32 0 in countdown else countdown in let countdown:i32 = countdown -! mk_i32 5 in let (countdown: i32), (w: Prims.unit) = countdown, () <: (i32 & Prims.unit) in () else let should_be_reachable:i32 = countdown in let _:Prims.unit = Std.Io.Stdio.e_print (Core_models.Fmt.Rt.impl_1__new_const (mk_usize 1) (let list = ["reached\n"] in FStar.Pervasives.assert_norm (Prims.eq2 (List.Tot.length list) 1); Rust_primitives.Hax.array_of_list 1 list) <: Core_models.Fmt.t_Arguments) in let _:Prims.unit = () in () else if countdown >. mk_i32 2 then let countdown:i32 = if countdown <. mk_i32 1 || countdown >. mk_i32 5 || countdown <>. mk_i32 9 then let countdown:i32 = mk_i32 0 in countdown else countdown in let countdown:i32 = countdown -! mk_i32 5 in if true then let countdown:i32 = mk_i32 0 in let countdown:i32 = if true then let countdown:i32 = mk_i32 10 in countdown else countdown in if countdown >. mk_i32 7 then let countdown:i32 = countdown -! mk_i32 4 in let countdown:i32 = mk_i32 0 in let countdown:i32 = if true then let countdown:i32 = mk_i32 1 in countdown else countdown in if countdown >. mk_i32 7 then let countdown:i32 = countdown -! mk_i32 4 in let (countdown: i32), (z: Prims.unit) = countdown, () <: (i32 & Prims.unit) in if countdown >. mk_i32 7 then let countdown:i32 = countdown -! mk_i32 4 in let (countdown: i32), (w: Prims.unit) = countdown, () <: (i32 & Prims.unit) in () else if countdown >. mk_i32 2 then let countdown:i32 = if countdown <. mk_i32 1 || countdown >. mk_i32 5 || countdown <>. mk_i32 9 then let countdown:i32 = mk_i32 0 in countdown else countdown in let countdown:i32 = countdown -! mk_i32 5 in let (countdown: i32), (w: Prims.unit) = countdown, () <: (i32 & Prims.unit) in () else if countdown >. mk_i32 2 then let countdown:i32 = if countdown <. mk_i32 1 || countdown >. mk_i32 5 || countdown <>. mk_i32 9 then let countdown:i32 = mk_i32 0 in countdown else countdown in let countdown:i32 = countdown -! mk_i32 5 in let (countdown: i32), (z: Prims.unit) = countdown, () <: (i32 & Prims.unit) in if countdown >. mk_i32 7 then let countdown:i32 = countdown -! mk_i32 4 in let (countdown: i32), (w: Prims.unit) = countdown, () <: (i32 & Prims.unit) in () else if countdown >. mk_i32 2 then let countdown:i32 = if countdown <. mk_i32 1 || countdown >. mk_i32 5 || countdown <>. mk_i32 9 then let countdown:i32 = mk_i32 0 in countdown else countdown in let countdown:i32 = countdown -! mk_i32 5 in let (countdown: i32), (w: Prims.unit) = countdown, () <: (i32 & Prims.unit) in () else let should_be_reachable:i32 = countdown in let _:Prims.unit = Std.Io.Stdio.e_print (Core_models.Fmt.Rt.impl_1__new_const (mk_usize 1) (let list = ["reached\n"] in FStar.Pervasives.assert_norm (Prims.eq2 (List.Tot.length list) 1); Rust_primitives.Hax.array_of_list 1 list) <: Core_models.Fmt.t_Arguments) in let _:Prims.unit = () in () else if countdown >. mk_i32 2 then let countdown:i32 = if countdown <. mk_i32 1 || countdown >. mk_i32 5 || countdown <>. mk_i32 9 then let countdown:i32 = mk_i32 0 in countdown else countdown in let countdown:i32 = countdown -! mk_i32 5 in let countdown:i32 = mk_i32 0 in let countdown:i32 = if true then let countdown:i32 = mk_i32 1 in countdown else countdown in if countdown >. mk_i32 7 then let countdown:i32 = countdown -! mk_i32 4 in let (countdown: i32), (z: Prims.unit) = countdown, () <: (i32 & Prims.unit) in if countdown >. mk_i32 7 then let countdown:i32 = countdown -! mk_i32 4 in let (countdown: i32), (w: Prims.unit) = countdown, () <: (i32 & Prims.unit) in () else if countdown >. mk_i32 2 then let countdown:i32 = if countdown <. mk_i32 1 || countdown >. mk_i32 5 || countdown <>. mk_i32 9 then let countdown:i32 = mk_i32 0 in countdown else countdown in let countdown:i32 = countdown -! mk_i32 5 in let (countdown: i32), (w: Prims.unit) = countdown, () <: (i32 & Prims.unit) in () else if countdown >. mk_i32 2 then let countdown:i32 = if countdown <. mk_i32 1 || countdown >. mk_i32 5 || countdown <>. mk_i32 9 then let countdown:i32 = mk_i32 0 in countdown else countdown in let countdown:i32 = countdown -! mk_i32 5 in let (countdown: i32), (z: Prims.unit) = countdown, () <: (i32 & Prims.unit) in if countdown >. mk_i32 7 then let countdown:i32 = countdown -! mk_i32 4 in let (countdown: i32), (w: Prims.unit) = countdown, () <: (i32 & Prims.unit) in () else if countdown >. mk_i32 2 then let countdown:i32 = if countdown <. mk_i32 1 || countdown >. mk_i32 5 || countdown <>. mk_i32 9 then let countdown:i32 = mk_i32 0 in countdown else countdown in let countdown:i32 = countdown -! mk_i32 5 in let (countdown: i32), (w: Prims.unit) = countdown, () <: (i32 & Prims.unit) in () else let should_be_reachable:i32 = countdown in let _:Prims.unit = Std.Io.Stdio.e_print (Core_models.Fmt.Rt.impl_1__new_const (mk_usize 1) (let list = ["reached\n"] in FStar.Pervasives.assert_norm (Prims.eq2 (List.Tot.length list) 1); Rust_primitives.Hax.array_of_list 1 list) <: Core_models.Fmt.t_Arguments) in let _:Prims.unit = () in () else let countdown:i32 = mk_i32 0 in let countdown:i32 = if true then let countdown:i32 = mk_i32 1 in countdown else countdown in if countdown >. mk_i32 7 then let countdown:i32 = countdown -! mk_i32 4 in let (countdown: i32), (z: Prims.unit) = countdown, () <: (i32 & Prims.unit) in if countdown >. mk_i32 7 then let countdown:i32 = countdown -! mk_i32 4 in let (countdown: i32), (w: Prims.unit) = countdown, () <: (i32 & Prims.unit) in () else if countdown >. mk_i32 2 then let countdown:i32 = if countdown <. mk_i32 1 || countdown >. mk_i32 5 || countdown <>. mk_i32 9 then let countdown:i32 = mk_i32 0 in countdown else countdown in let countdown:i32 = countdown -! mk_i32 5 in let (countdown: i32), (w: Prims.unit) = countdown, () <: (i32 & Prims.unit) in () else if countdown >. mk_i32 2 then let countdown:i32 = if countdown <. mk_i32 1 || countdown >. mk_i32 5 || countdown <>. mk_i32 9 then let countdown:i32 = mk_i32 0 in countdown else countdown in let countdown:i32 = countdown -! mk_i32 5 in let (countdown: i32), (z: Prims.unit) = countdown, () <: (i32 & Prims.unit) in if countdown >. mk_i32 7 then let countdown:i32 = countdown -! mk_i32 4 in let (countdown: i32), (w: Prims.unit) = countdown, () <: (i32 & Prims.unit) in () else if countdown >. mk_i32 2 then let countdown:i32 = if countdown <. mk_i32 1 || countdown >. mk_i32 5 || countdown <>. mk_i32 9 then let countdown:i32 = mk_i32 0 in countdown else countdown in let countdown:i32 = countdown -! mk_i32 5 in let (countdown: i32), (w: Prims.unit) = countdown, () <: (i32 & Prims.unit) in () else let should_be_reachable:i32 = countdown in let _:Prims.unit = Std.Io.Stdio.e_print (Core_models.Fmt.Rt.impl_1__new_const (mk_usize 1) (let list = ["reached\n"] in FStar.Pervasives.assert_norm (Prims.eq2 (List.Tot.length list) 1); Rust_primitives.Hax.array_of_list 1 list) <: Core_models.Fmt.t_Arguments) in let _:Prims.unit = () in () ================================================ FILE: rustc-coverage-tests/snapshots/fstar/Coverage.Continue_.fst ================================================ module Coverage.Continue_ #set-options "--fuel 0 --ifuel 1 --z3rlimit 15" open FStar.Mul open Core_models let _ = (* This module has implicit dependencies, here we make them explicit. *) (* The implicit dependencies arise from typeclasses instances. *) let open Std.Env in () let main (_: Prims.unit) : Prims.unit = let is_true:bool = (Core_models.Iter.Traits.Exact_size.f_len #Std.Env.t_Args #FStar.Tactics.Typeclasses.solve (Std.Env.args () <: Std.Env.t_Args) <: usize) =. mk_usize 1 in let x:i32 = mk_i32 0 in let x:i32 = Rust_primitives.Hax.Folds.fold_range (mk_i32 0) (mk_i32 10) (fun x temp_1_ -> let x:i32 = x in let _:i32 = temp_1_ in true) x (fun x temp_1_ -> let x:i32 = x in let _:i32 = temp_1_ in match is_true <: bool with | true -> x | _ -> let x:i32 = mk_i32 1 in mk_i32 3) in let x:i32 = Rust_primitives.Hax.Folds.fold_range (mk_i32 0) (mk_i32 10) (fun x temp_1_ -> let x:i32 = x in let _:i32 = temp_1_ in true) x (fun x temp_1_ -> let x:i32 = x in let _:i32 = temp_1_ in match is_true <: bool with | false -> let x:i32 = mk_i32 1 in mk_i32 3 | _ -> x) in let x:i32 = Rust_primitives.Hax.Folds.fold_range (mk_i32 0) (mk_i32 10) (fun x temp_1_ -> let x:i32 = x in let _:i32 = temp_1_ in true) x (fun x temp_1_ -> let x:i32 = x in let _:i32 = temp_1_ in match is_true <: bool with | true -> let x:i32 = mk_i32 1 in mk_i32 3 | _ -> x) in let x:i32 = Rust_primitives.Hax.Folds.fold_range (mk_i32 0) (mk_i32 10) (fun x temp_1_ -> let x:i32 = x in let _:i32 = temp_1_ in true) x (fun x temp_1_ -> let x:i32 = x in let _:i32 = temp_1_ in if is_true then x else mk_i32 3) in let x:i32 = Rust_primitives.Hax.Folds.fold_range (mk_i32 0) (mk_i32 10) (fun x temp_1_ -> let x:i32 = x in let _:i32 = temp_1_ in true) x (fun x temp_1_ -> let x:i32 = x in let _:i32 = temp_1_ in let x:i32 = match is_true <: bool with | false -> let x:i32 = mk_i32 1 in x | _ -> let _:i32 = x in x in let x:i32 = mk_i32 3 in x) in let x:i32 = Rust_primitives.Hax.Folds.fold_range_cf (mk_i32 0) (mk_i32 10) (fun x temp_1_ -> let x:i32 = x in let _:i32 = temp_1_ in true) x (fun x temp_1_ -> let x:i32 = x in let _:i32 = temp_1_ in match is_true <: bool with | false -> let x:i32 = mk_i32 1 in Core_models.Ops.Control_flow.ControlFlow_Continue (mk_i32 3) <: Core_models.Ops.Control_flow.t_ControlFlow (Prims.unit & i32) i32 | _ -> Core_models.Ops.Control_flow.ControlFlow_Break ((), x <: (Prims.unit & i32)) <: Core_models.Ops.Control_flow.t_ControlFlow (Prims.unit & i32) i32) in let _:i32 = x in () ================================================ FILE: rustc-coverage-tests/snapshots/fstar/Coverage.Dead_code.fst ================================================ module Coverage.Dead_code #set-options "--fuel 0 --ifuel 1 --z3rlimit 15" open FStar.Mul open Core_models let _ = (* This module has implicit dependencies, here we make them explicit. *) (* The implicit dependencies arise from typeclasses instances. *) let open Std.Env in () let unused_pub_fn_not_in_library (_: Prims.unit) : Prims.unit = let is_true:bool = (Core_models.Iter.Traits.Exact_size.f_len #Std.Env.t_Args #FStar.Tactics.Typeclasses.solve (Std.Env.args () <: Std.Env.t_Args) <: usize) =. mk_usize 1 in let countdown:i32 = mk_i32 0 in if is_true then let countdown:i32 = mk_i32 10 in () let unused_fn (_: Prims.unit) : Prims.unit = let is_true:bool = (Core_models.Iter.Traits.Exact_size.f_len #Std.Env.t_Args #FStar.Tactics.Typeclasses.solve (Std.Env.args () <: Std.Env.t_Args) <: usize) =. mk_usize 1 in let countdown:i32 = mk_i32 0 in if is_true then let countdown:i32 = mk_i32 10 in () let main (_: Prims.unit) : Prims.unit = let is_true:bool = (Core_models.Iter.Traits.Exact_size.f_len #Std.Env.t_Args #FStar.Tactics.Typeclasses.solve (Std.Env.args () <: Std.Env.t_Args) <: usize) =. mk_usize 1 in let countdown:i32 = mk_i32 0 in if is_true then let countdown:i32 = mk_i32 10 in () ================================================ FILE: rustc-coverage-tests/snapshots/fstar/Coverage.Drop_trait.fst ================================================ module Coverage.Drop_trait #set-options "--fuel 0 --ifuel 1 --z3rlimit 15" open FStar.Mul open Core_models type t_Firework = { f_strength:i32 } [@@ FStar.Tactics.Typeclasses.tcinstance] let impl: Core_models.Ops.Drop.t_Drop t_Firework = { f_drop_pre = (fun (self: t_Firework) -> true); f_drop_post = (fun (self: t_Firework) (out: t_Firework) -> true); f_drop = fun (self: t_Firework) -> let args:i32 = self.f_strength <: i32 in let args:t_Array Core_models.Fmt.Rt.t_Argument (mk_usize 1) = let list = [Core_models.Fmt.Rt.impl__new_display #i32 args] in FStar.Pervasives.assert_norm (Prims.eq2 (List.Tot.length list) 1); Rust_primitives.Hax.array_of_list 1 list in let _:Prims.unit = Std.Io.Stdio.e_print (Core_models.Fmt.Rt.impl_1__new_v1 (mk_usize 2) (mk_usize 1) (let list = ["BOOM times "; "!!!\n"] in FStar.Pervasives.assert_norm (Prims.eq2 (List.Tot.length list) 2); Rust_primitives.Hax.array_of_list 2 list) args <: Core_models.Fmt.t_Arguments) in let _:Prims.unit = () in self } let main (_: Prims.unit) : Core_models.Result.t_Result Prims.unit u8 = let e_firecracker:t_Firework = { f_strength = mk_i32 1 } <: t_Firework in let e_tnt:t_Firework = { f_strength = mk_i32 100 } <: t_Firework in if true then let _:Prims.unit = Std.Io.Stdio.e_print (Core_models.Fmt.Rt.impl_1__new_const (mk_usize 1) (let list = ["Exiting with error...\n"] in FStar.Pervasives.assert_norm (Prims.eq2 (List.Tot.length list) 1); Rust_primitives.Hax.array_of_list 1 list) <: Core_models.Fmt.t_Arguments) in let _:Prims.unit = () in Core_models.Result.Result_Err (mk_u8 1) <: Core_models.Result.t_Result Prims.unit u8 else let _:t_Firework = { f_strength = mk_i32 1000 } <: t_Firework in Core_models.Result.Result_Ok (() <: Prims.unit) <: Core_models.Result.t_Result Prims.unit u8 ================================================ FILE: rustc-coverage-tests/snapshots/fstar/Coverage.Fn_sig_into_try.fst ================================================ module Coverage.Fn_sig_into_try #set-options "--fuel 0 --ifuel 1 --z3rlimit 15" open FStar.Mul open Core_models let a (_: Prims.unit) : Core_models.Option.t_Option i32 = let _:Core_models.Option.t_Option i32 = Core_models.Option.Option_Some (mk_i32 7) <: Core_models.Option.t_Option i32 in Core_models.Option.Option_Some (mk_i32 0) <: Core_models.Option.t_Option i32 let b (_: Prims.unit) : Core_models.Option.t_Option i32 = match Core_models.Option.Option_Some (mk_i32 7) <: Core_models.Option.t_Option i32 with | Core_models.Option.Option_Some _ -> Core_models.Option.Option_Some (mk_i32 0) <: Core_models.Option.t_Option i32 | Core_models.Option.Option_None -> Core_models.Option.Option_None <: Core_models.Option.t_Option i32 let c (_: Prims.unit) : Core_models.Option.t_Option i32 = match Core_models.Option.Option_Some (mk_i32 7) <: Core_models.Option.t_Option i32 with | Core_models.Option.Option_Some _ -> Core_models.Option.Option_Some (mk_i32 0) <: Core_models.Option.t_Option i32 | Core_models.Option.Option_None -> Core_models.Option.Option_None <: Core_models.Option.t_Option i32 let d (_: Prims.unit) : Core_models.Option.t_Option i32 = let _:Prims.unit = () <: Prims.unit in match Core_models.Option.Option_Some (mk_i32 7) <: Core_models.Option.t_Option i32 with | Core_models.Option.Option_Some _ -> Core_models.Option.Option_Some (mk_i32 0) <: Core_models.Option.t_Option i32 | Core_models.Option.Option_None -> Core_models.Option.Option_None <: Core_models.Option.t_Option i32 let main (_: Prims.unit) : Prims.unit = let _:Core_models.Option.t_Option i32 = a () in let _:Core_models.Option.t_Option i32 = b () in let _:Core_models.Option.t_Option i32 = c () in let _:Core_models.Option.t_Option i32 = d () in () ================================================ FILE: rustc-coverage-tests/snapshots/fstar/Coverage.Generics.fst ================================================ module Coverage.Generics #set-options "--fuel 0 --ifuel 1 --z3rlimit 15" open FStar.Mul open Core_models type t_Firework (v_T: Type0) {| i0: Core_models.Marker.t_Copy v_T |} {| i1: Core_models.Fmt.t_Display v_T |} = { f_strength:v_T } let impl__set_strength (#v_T: Type0) (#[FStar.Tactics.Typeclasses.tcresolve ()] i0: Core_models.Marker.t_Copy v_T) (#[FStar.Tactics.Typeclasses.tcresolve ()] i1: Core_models.Fmt.t_Display v_T) (self: t_Firework v_T) (new_strength: v_T) : t_Firework v_T = let self:t_Firework v_T = { self with f_strength = new_strength } <: t_Firework v_T in self [@@ FStar.Tactics.Typeclasses.tcinstance] let impl_1 (#v_T: Type0) (#[FStar.Tactics.Typeclasses.tcresolve ()] i0: Core_models.Marker.t_Copy v_T) (#[FStar.Tactics.Typeclasses.tcresolve ()] i1: Core_models.Fmt.t_Display v_T) : Core_models.Ops.Drop.t_Drop (t_Firework v_T) = { f_drop_pre = (fun (self: t_Firework v_T) -> true); f_drop_post = (fun (self: t_Firework v_T) (out: t_Firework v_T) -> true); f_drop = fun (self: t_Firework v_T) -> let args:v_T = self.f_strength <: v_T in let args:t_Array Core_models.Fmt.Rt.t_Argument (mk_usize 1) = let list = [Core_models.Fmt.Rt.impl__new_display #v_T args] in FStar.Pervasives.assert_norm (Prims.eq2 (List.Tot.length list) 1); Rust_primitives.Hax.array_of_list 1 list in let _:Prims.unit = Std.Io.Stdio.e_print (Core_models.Fmt.Rt.impl_1__new_v1 (mk_usize 2) (mk_usize 1) (let list = ["BOOM times "; "!!!\n"] in FStar.Pervasives.assert_norm (Prims.eq2 (List.Tot.length list) 2); Rust_primitives.Hax.array_of_list 2 list) args <: Core_models.Fmt.t_Arguments) in let _:Prims.unit = () in self } let main (_: Prims.unit) : Core_models.Result.t_Result Prims.unit u8 = let firecracker:t_Firework i32 = { f_strength = mk_i32 1 } <: t_Firework i32 in let firecracker:t_Firework i32 = impl__set_strength #i32 firecracker (mk_i32 2) in let tnt:t_Firework float = { f_strength = mk_float "100.1" } <: t_Firework float in let tnt:t_Firework float = impl__set_strength #float tnt (mk_float "200.1") in let tnt:t_Firework float = impl__set_strength #float tnt (mk_float "300.3") in if true then let _:Prims.unit = Std.Io.Stdio.e_print (Core_models.Fmt.Rt.impl_1__new_const (mk_usize 1) (let list = ["Exiting with error...\n"] in FStar.Pervasives.assert_norm (Prims.eq2 (List.Tot.length list) 1); Rust_primitives.Hax.array_of_list 1 list) <: Core_models.Fmt.t_Arguments) in let _:Prims.unit = () in Core_models.Result.Result_Err (mk_u8 1) <: Core_models.Result.t_Result Prims.unit u8 else let _:t_Firework i32 = { f_strength = mk_i32 1000 } <: t_Firework i32 in Core_models.Result.Result_Ok (() <: Prims.unit) <: Core_models.Result.t_Result Prims.unit u8 ================================================ FILE: rustc-coverage-tests/snapshots/fstar/Coverage.If_.fst ================================================ module Coverage.If_ #set-options "--fuel 0 --ifuel 1 --z3rlimit 15" open FStar.Mul open Core_models let _ = (* This module has implicit dependencies, here we make them explicit. *) (* The implicit dependencies arise from typeclasses instances. *) let open Std.Env in () let main (_: Prims.unit) : Prims.unit = let is_true:bool = (Core_models.Iter.Traits.Exact_size.f_len #Std.Env.t_Args #FStar.Tactics.Typeclasses.solve (Std.Env.args () <: Std.Env.t_Args) <: usize) =. mk_usize 1 in let countdown:i32 = mk_i32 0 in if is_true then let countdown:i32 = mk_i32 10 in () ================================================ FILE: rustc-coverage-tests/snapshots/fstar/Coverage.If_else.fst ================================================ module Coverage.If_else #set-options "--fuel 0 --ifuel 1 --z3rlimit 15" open FStar.Mul open Core_models let _ = (* This module has implicit dependencies, here we make them explicit. *) (* The implicit dependencies arise from typeclasses instances. *) let open Std.Env in () let main (_: Prims.unit) : Prims.unit = let is_true:bool = (Core_models.Iter.Traits.Exact_size.f_len #Std.Env.t_Args #FStar.Tactics.Typeclasses.solve (Std.Env.args () <: Std.Env.t_Args) <: usize) =. mk_usize 1 in let countdown:i32 = mk_i32 0 in let countdown:i32 = if is_true then let countdown:i32 = mk_i32 10 in countdown else mk_i32 100 in if is_true then let countdown:i32 = mk_i32 10 in () else let countdown:i32 = mk_i32 100 in () ================================================ FILE: rustc-coverage-tests/snapshots/fstar/Coverage.If_not.fst ================================================ module Coverage.If_not #set-options "--fuel 0 --ifuel 1 --z3rlimit 15" open FStar.Mul open Core_models let if_not (cond: bool) : Prims.unit = let _:Prims.unit = if ~.cond then let _:Prims.unit = Std.Io.Stdio.e_print (Core_models.Fmt.Rt.impl_1__new_const (mk_usize 1) (let list = ["cond was false\n"] in FStar.Pervasives.assert_norm (Prims.eq2 (List.Tot.length list) 1); Rust_primitives.Hax.array_of_list 1 list) <: Core_models.Fmt.t_Arguments) in let _:Prims.unit = () in () in let _:Prims.unit = if ~.cond then let _:Prims.unit = Std.Io.Stdio.e_print (Core_models.Fmt.Rt.impl_1__new_const (mk_usize 1) (let list = ["cond was false\n"] in FStar.Pervasives.assert_norm (Prims.eq2 (List.Tot.length list) 1); Rust_primitives.Hax.array_of_list 1 list) <: Core_models.Fmt.t_Arguments) in let _:Prims.unit = () in () in if ~.cond then let _:Prims.unit = Std.Io.Stdio.e_print (Core_models.Fmt.Rt.impl_1__new_const (mk_usize 1) (let list = ["cond was false\n"] in FStar.Pervasives.assert_norm (Prims.eq2 (List.Tot.length list) 1); Rust_primitives.Hax.array_of_list 1 list) <: Core_models.Fmt.t_Arguments) in let _:Prims.unit = () in () else let _:Prims.unit = Std.Io.Stdio.e_print (Core_models.Fmt.Rt.impl_1__new_const (mk_usize 1) (let list = ["cond was true\n"] in FStar.Pervasives.assert_norm (Prims.eq2 (List.Tot.length list) 1); Rust_primitives.Hax.array_of_list 1 list) <: Core_models.Fmt.t_Arguments) in let _:Prims.unit = () in () let main (_: Prims.unit) : (Prims.unit & Prims.unit) = let _:Prims.unit = Rust_primitives.Hax.Folds.fold_range (mk_i32 0) (mk_i32 8) (fun temp_0_ temp_1_ -> let _:Prims.unit = temp_0_ in let _:i32 = temp_1_ in true) () (fun temp_0_ temp_1_ -> let _:Prims.unit = temp_0_ in let _:i32 = temp_1_ in if_not (Core_models.Hint.black_box #bool true <: bool) <: Prims.unit) in Rust_primitives.Hax.Folds.fold_range (mk_i32 0) (mk_i32 4) (fun temp_0_ temp_1_ -> let _:Prims.unit = temp_0_ in let _:i32 = temp_1_ in true) () (fun temp_0_ temp_1_ -> let _:Prims.unit = temp_0_ in let _:i32 = temp_1_ in if_not (Core_models.Hint.black_box #bool false <: bool) <: Prims.unit), () <: (Prims.unit & Prims.unit) ================================================ FILE: rustc-coverage-tests/snapshots/fstar/Coverage.Ignore_map.fst ================================================ module Coverage.Ignore_map #set-options "--fuel 0 --ifuel 1 --z3rlimit 15" open FStar.Mul open Core_models let main (_: Prims.unit) : Prims.unit = () ================================================ FILE: rustc-coverage-tests/snapshots/fstar/Coverage.Ignore_run.fst ================================================ module Coverage.Ignore_run #set-options "--fuel 0 --ifuel 1 --z3rlimit 15" open FStar.Mul open Core_models let main (_: Prims.unit) : Prims.unit = () ================================================ FILE: rustc-coverage-tests/snapshots/fstar/Coverage.Inline.fst ================================================ module Coverage.Inline #set-options "--fuel 0 --ifuel 1 --z3rlimit 15" open FStar.Mul open Core_models let length (#v_T: Type0) (xs: t_Slice v_T) : usize = Core_models.Slice.impl__len #v_T xs let swap (#v_T: Type0) (#[FStar.Tactics.Typeclasses.tcresolve ()] i0: Core_models.Marker.t_Copy v_T) (xs: t_Slice v_T) (i j: usize) : t_Slice v_T = let t:v_T = xs.[ i ] in let xs:t_Slice v_T = Rust_primitives.Hax.Monomorphized_update_at.update_at_usize xs i (xs.[ j ] <: v_T) in let xs:t_Slice v_T = Rust_primitives.Hax.Monomorphized_update_at.update_at_usize xs j t in xs let display (#v_T: Type0) (#[FStar.Tactics.Typeclasses.tcresolve ()] i0: Core_models.Fmt.t_Display v_T) (xs: t_Slice v_T) : Prims.unit = let _:Prims.unit = Core_models.Iter.Traits.Iterator.f_fold (Core_models.Iter.Traits.Collect.f_into_iter #(t_Slice v_T) #FStar.Tactics.Typeclasses.solve xs <: Core_models.Slice.Iter.t_Iter v_T) () (fun temp_0_ x -> let _:Prims.unit = temp_0_ in let x:v_T = x in let args:v_T = x <: v_T in let args:t_Array Core_models.Fmt.Rt.t_Argument (mk_usize 1) = let list = [Core_models.Fmt.Rt.impl__new_display #v_T args] in FStar.Pervasives.assert_norm (Prims.eq2 (List.Tot.length list) 1); Rust_primitives.Hax.array_of_list 1 list in let _:Prims.unit = Std.Io.Stdio.e_print (Core_models.Fmt.Rt.impl_1__new_v1 (mk_usize 1) (mk_usize 1) (let list = [""] in FStar.Pervasives.assert_norm (Prims.eq2 (List.Tot.length list) 1); Rust_primitives.Hax.array_of_list 1 list) args <: Core_models.Fmt.t_Arguments) in ()) in let _:Prims.unit = Std.Io.Stdio.e_print (Core_models.Fmt.Rt.impl_1__new_const (mk_usize 1) (let list = ["\n"] in FStar.Pervasives.assert_norm (Prims.eq2 (List.Tot.length list) 1); Rust_primitives.Hax.array_of_list 1 list) <: Core_models.Fmt.t_Arguments) in let _:Prims.unit = () in () let error (_: Prims.unit) : Prims.unit = Rust_primitives.Hax.never_to_any (Core_models.Panicking.panic_fmt (Core_models.Fmt.Rt.impl_1__new_const (mk_usize 1) (let list = ["error"] in FStar.Pervasives.assert_norm (Prims.eq2 (List.Tot.length list) 1); Rust_primitives.Hax.array_of_list 1 list) <: Core_models.Fmt.t_Arguments) <: Rust_primitives.Hax.t_Never) let rec permutate (#v_T: Type0) (#[FStar.Tactics.Typeclasses.tcresolve ()] i0: Core_models.Marker.t_Copy v_T) (#[FStar.Tactics.Typeclasses.tcresolve ()] i1: Core_models.Fmt.t_Display v_T) (xs: t_Slice v_T) (k: usize) : t_Slice v_T = let n:usize = length #v_T xs in let xs:t_Slice v_T = if k =. n then let _:Prims.unit = display #v_T xs in xs else if k <. n then Rust_primitives.Hax.Folds.fold_range k n (fun xs temp_1_ -> let xs:t_Slice v_T = xs in let _:usize = temp_1_ in true) xs (fun xs i -> let xs:t_Slice v_T = xs in let i:usize = i in let xs:t_Slice v_T = swap #v_T xs i k in let xs:t_Slice v_T = permutate #v_T xs (k +! mk_usize 1 <: usize) in let xs:t_Slice v_T = swap #v_T xs i k in xs) else let _:Prims.unit = error () in xs in xs let permutations (#v_T: Type0) (#[FStar.Tactics.Typeclasses.tcresolve ()] i0: Core_models.Marker.t_Copy v_T) (#[FStar.Tactics.Typeclasses.tcresolve ()] i1: Core_models.Fmt.t_Display v_T) (xs: t_Slice v_T) : Prims.unit = let ys:Alloc.Vec.t_Vec v_T Alloc.Alloc.t_Global = Alloc.Borrow.f_to_owned #(t_Slice v_T) #FStar.Tactics.Typeclasses.solve xs in let ys:Alloc.Vec.t_Vec v_T Alloc.Alloc.t_Global = permutate #v_T ys (mk_usize 0) in () let main (_: Prims.unit) : Prims.unit = let _:Prims.unit = permutations #FStar.Char.char ((let list = ['a'; 'b'; 'c'] in FStar.Pervasives.assert_norm (Prims.eq2 (List.Tot.length list) 3); Rust_primitives.Hax.array_of_list 3 list) <: t_Slice FStar.Char.char) in () ================================================ FILE: rustc-coverage-tests/snapshots/fstar/Coverage.Inline_dead.fst ================================================ module Coverage.Inline_dead #set-options "--fuel 0 --ifuel 1 --z3rlimit 15" open FStar.Mul open Core_models let dead (_: Prims.unit) : u32 = mk_u32 42 let live (v_B: bool) (_: Prims.unit) : u32 = if v_B then dead () else mk_u32 0 let main (_: Prims.unit) : Prims.unit = let args:u32 = live false () <: u32 in let args:t_Array Core_models.Fmt.Rt.t_Argument (mk_usize 1) = let list = [Core_models.Fmt.Rt.impl__new_display #u32 args] in FStar.Pervasives.assert_norm (Prims.eq2 (List.Tot.length list) 1); Rust_primitives.Hax.array_of_list 1 list in let _:Prims.unit = Std.Io.Stdio.e_print (Core_models.Fmt.Rt.impl_1__new_v1 (mk_usize 2) (mk_usize 1) (let list = [""; "\n"] in FStar.Pervasives.assert_norm (Prims.eq2 (List.Tot.length list) 2); Rust_primitives.Hax.array_of_list 2 list) args <: Core_models.Fmt.t_Arguments) in let _:Prims.unit = () in let f: bool -> Prims.unit = fun x -> let x:bool = x in let _:Prims.unit = if true then let _:Prims.unit = Hax_lib.v_assert x in () in () in let _:Prims.unit = Core_models.Ops.Function.f_call #bool #FStar.Tactics.Typeclasses.solve f (false <: bool) in () ================================================ FILE: rustc-coverage-tests/snapshots/fstar/Coverage.Inner_items.fst ================================================ module Coverage.Inner_items #set-options "--fuel 0 --ifuel 1 --z3rlimit 15" open FStar.Mul open Core_models let _ = (* This module has implicit dependencies, here we make them explicit. *) (* The implicit dependencies arise from typeclasses instances. *) let open Std.Env in () let main__t_in_mod__v_IN_MOD_CONST: u32 = mk_u32 1000 let main__in_func (a: u32) : Prims.unit = let b:u32 = mk_u32 1 in let c:u32 = a +! b in let args:u32 = c <: u32 in let args:t_Array Core_models.Fmt.Rt.t_Argument (mk_usize 1) = let list = [Core_models.Fmt.Rt.impl__new_display #u32 args] in FStar.Pervasives.assert_norm (Prims.eq2 (List.Tot.length list) 1); Rust_primitives.Hax.array_of_list 1 list in let _:Prims.unit = Std.Io.Stdio.e_print (Core_models.Fmt.Rt.impl_1__new_v1 (mk_usize 2) (mk_usize 1) (let list = ["c = "; "\n"] in FStar.Pervasives.assert_norm (Prims.eq2 (List.Tot.length list) 2); Rust_primitives.Hax.array_of_list 2 list) args <: Core_models.Fmt.t_Arguments) in () type main__t_InStruct = { main__f_in_struct_field:u32 } let main__v_IN_CONST: u32 = mk_u32 1234 (* item error backend: Explicit rejection by a phase in the Hax engine: a node of kind [Trait_item_default] have been found in the AST Note: the error was labeled with context `reject_TraitItemDefault`. Last available AST for this item: #[(any(feature = "json", feature = "lean"))]#[allow(unused_assignments, unused_variables, dead_code)]#[feature(coverage_attribute)]#[allow(unused_attributes)]#[allow(dead_code)]#[allow(unreachable_code)]#[feature(register_tool)]#[register_tool(_hax)]trait main__t_InTrait{#[_hax::json("\"TraitMethodNoPrePost\"")]fn main__f_trait_func_pre(_: Self,_: int) -> bool; #[_hax::json("\"TraitMethodNoPrePost\"")]fn main__f_trait_func_post(_: Self,_: int,_: Self) -> bool; fn main__f_trait_func(_: Self,_: int) -> Self; fn main__f_default_trait_func((self: Self)) -> Self{{let _: tuple0 = {coverage::inner_items::main__in_func(coverage::inner_items::main__v_IN_CONST)};{let self: Self = {coverage::inner_items::main__f_trait_func(self,coverage::inner_items::main__v_IN_CONST)};self}}}} Last AST: /** print_rust: pitem: not implemented (item: { Concrete_ident.T.def_id = { Explicit_def_id.T.is_constructor = false; def_id = { Types.index = (0, 0, None); is_local = true; kind = Types.Trait; krate = "coverage"; parent = (Some { Types.contents = { Types.id = 0; value = { Types.index = (0, 0, None); is_local = true; kind = Types.Fn; krate = "coverage"; parent = (Some { Types.contents = { Types.id = 0; value = { Types.index = (0, 0, None); is_local = true; kind = Types.Mod; krate = "coverage"; parent = (Some { Types.contents = { Types.id = 0; value = { Types.index = (0, 0, None); is_local = true; kind = Types.Mod; krate = "coverage"; parent = None; path = [] } } }); path = [{ Types.data = (Types.TypeNs "inner_items"); disambiguator = 0 } ] } } }); path = [{ Types.data = (Types.TypeNs "inner_items"); disambiguator = 0 }; { Types.data = (Types.ValueNs "main"); disambiguator = 0 } ] } } }); path = [{ Types.data = (Types.TypeNs "inner_items"); disambiguator = 0 }; { Types.data = (Types.ValueNs "main"); disambiguator = 0 }; { Types.data = (Types.TypeNs "InTrait"); disambiguator = 0 }] } }; moved = None; suffix = None }) */ const _: () = (); *) [@@ FStar.Tactics.Typeclasses.tcinstance] let main__impl: main__t_InTrait main__t_InStruct = { main__f_trait_func_pre = (fun (self: main__t_InStruct) (incr: u32) -> true); main__f_trait_func_post = (fun (self: main__t_InStruct) (incr: u32) (out: main__t_InStruct) -> true); main__f_trait_func = fun (self: main__t_InStruct) (incr: u32) -> let self:main__t_InStruct = { self with main__f_in_struct_field = self.main__f_in_struct_field +! incr } <: main__t_InStruct in let _:Prims.unit = main__in_func self.main__f_in_struct_field in self } let main (_: Prims.unit) : Prims.unit = let is_true:bool = (Core_models.Iter.Traits.Exact_size.f_len #Std.Env.t_Args #FStar.Tactics.Typeclasses.solve (Std.Env.args () <: Std.Env.t_Args) <: usize) =. mk_usize 1 in let countdown:u32 = mk_u32 0 in let countdown:u32 = if is_true then let countdown:u32 = mk_u32 10 in countdown else countdown in let _:Prims.unit = if is_true then let _:Prims.unit = main__in_func countdown in () in let v_val:main__t_InStruct = { main__f_in_struct_field = mk_u32 101 } <: main__t_InStruct in let v_val:main__t_InStruct = main__f_default_trait_func #main__t_InStruct #FStar.Tactics.Typeclasses.solve v_val in () ================================================ FILE: rustc-coverage-tests/snapshots/fstar/Coverage.Issue_83601_.fst ================================================ module Coverage.Issue_83601_ #set-options "--fuel 0 --ifuel 1 --z3rlimit 15" open FStar.Mul open Core_models type t_Foo = | Foo : u32 -> t_Foo [@@ FStar.Tactics.Typeclasses.tcinstance] assume val impl': Core_models.Fmt.t_Debug t_Foo unfold let impl = impl' [@@ FStar.Tactics.Typeclasses.tcinstance] assume val impl_1': Core_models.Marker.t_StructuralPartialEq t_Foo unfold let impl_1 = impl_1' [@@ FStar.Tactics.Typeclasses.tcinstance] assume val impl_2': Core_models.Cmp.t_PartialEq t_Foo t_Foo unfold let impl_2 = impl_2' [@@ FStar.Tactics.Typeclasses.tcinstance] assume val impl_3': Core_models.Cmp.t_Eq t_Foo unfold let impl_3 = impl_3' let main (_: Prims.unit) : Prims.unit = let bar:t_Foo = Foo (mk_u32 1) <: t_Foo in let _:Prims.unit = match bar, (Foo (mk_u32 1) <: t_Foo) <: (t_Foo & t_Foo) with | left_val, right_val -> Hax_lib.v_assert (left_val =. right_val <: bool) in let baz:t_Foo = Foo (mk_u32 0) <: t_Foo in let _:Prims.unit = match baz, (Foo (mk_u32 1) <: t_Foo) <: (t_Foo & t_Foo) with | left_val, right_val -> Hax_lib.v_assert (~.(left_val =. right_val <: bool) <: bool) in let args:t_Foo = (Foo (mk_u32 1) <: t_Foo) <: t_Foo in let args:t_Array Core_models.Fmt.Rt.t_Argument (mk_usize 1) = let list = [Core_models.Fmt.Rt.impl__new_debug #t_Foo args] in FStar.Pervasives.assert_norm (Prims.eq2 (List.Tot.length list) 1); Rust_primitives.Hax.array_of_list 1 list in let _:Prims.unit = Std.Io.Stdio.e_print (Core_models.Fmt.Rt.impl_1__new_v1 (mk_usize 2) (mk_usize 1) (let list = [""; "\n"] in FStar.Pervasives.assert_norm (Prims.eq2 (List.Tot.length list) 2); Rust_primitives.Hax.array_of_list 2 list) args <: Core_models.Fmt.t_Arguments) in let _:Prims.unit = () in let args:t_Foo = bar <: t_Foo in let args:t_Array Core_models.Fmt.Rt.t_Argument (mk_usize 1) = let list = [Core_models.Fmt.Rt.impl__new_debug #t_Foo args] in FStar.Pervasives.assert_norm (Prims.eq2 (List.Tot.length list) 1); Rust_primitives.Hax.array_of_list 1 list in let _:Prims.unit = Std.Io.Stdio.e_print (Core_models.Fmt.Rt.impl_1__new_v1 (mk_usize 2) (mk_usize 1) (let list = [""; "\n"] in FStar.Pervasives.assert_norm (Prims.eq2 (List.Tot.length list) 2); Rust_primitives.Hax.array_of_list 2 list) args <: Core_models.Fmt.t_Arguments) in let _:Prims.unit = () in let args:t_Foo = baz <: t_Foo in let args:t_Array Core_models.Fmt.Rt.t_Argument (mk_usize 1) = let list = [Core_models.Fmt.Rt.impl__new_debug #t_Foo args] in FStar.Pervasives.assert_norm (Prims.eq2 (List.Tot.length list) 1); Rust_primitives.Hax.array_of_list 1 list in let _:Prims.unit = Std.Io.Stdio.e_print (Core_models.Fmt.Rt.impl_1__new_v1 (mk_usize 2) (mk_usize 1) (let list = [""; "\n"] in FStar.Pervasives.assert_norm (Prims.eq2 (List.Tot.length list) 2); Rust_primitives.Hax.array_of_list 2 list) args <: Core_models.Fmt.t_Arguments) in let _:Prims.unit = () in () ================================================ FILE: rustc-coverage-tests/snapshots/fstar/Coverage.Lazy_boolean.fst ================================================ module Coverage.Lazy_boolean #set-options "--fuel 0 --ifuel 1 --z3rlimit 15" open FStar.Mul open Core_models let _ = (* This module has implicit dependencies, here we make them explicit. *) (* The implicit dependencies arise from typeclasses instances. *) let open Std.Env in () let main (_: Prims.unit) : Prims.unit = let is_true:bool = (Core_models.Iter.Traits.Exact_size.f_len #Std.Env.t_Args #FStar.Tactics.Typeclasses.solve (Std.Env.args () <: Std.Env.t_Args) <: usize) =. mk_usize 1 in let (a: i32), (b: i32), (c: i32) = mk_i32 0, mk_i32 0, mk_i32 0 <: (i32 & i32 & i32) in let (a: i32), (b: i32), (c: i32) = if is_true then let a:i32 = mk_i32 1 in let b:i32 = mk_i32 10 in let c:i32 = mk_i32 100 in a, b, c <: (i32 & i32 & i32) else a, b, c <: (i32 & i32 & i32) in let somebool:bool = a <. b || b <. c in let somebool:bool = b <. a || b <. c in let somebool:bool = a <. b && b <. c in let somebool:bool = b <. a && b <. c in let a:i32 = if ~.is_true then let a:i32 = mk_i32 2 in a else a in let (b: i32), (c: i32) = if is_true then let b:i32 = mk_i32 30 in b, c <: (i32 & i32) else let c:i32 = mk_i32 400 in b, c <: (i32 & i32) in let a:i32 = if ~.is_true then let a:i32 = mk_i32 2 in a else a in if is_true then let b:i32 = mk_i32 30 in () else let c:i32 = mk_i32 400 in () ================================================ FILE: rustc-coverage-tests/snapshots/fstar/Coverage.Let_else_loop.fst ================================================ module Coverage.Let_else_loop #set-options "--fuel 0 --ifuel 1 --z3rlimit 15" open FStar.Mul open Core_models let loopy (cond: bool) : Prims.unit = match cond <: bool with | true -> () | _ -> Rust_primitives.Hax.failure "something is not implemented yet.\nUnhandled loop kind\n\nThis is discussed in issue https://github.com/hacspec/hax/issues/933.\nPlease upvote or comment this issue if you see this error message.\nNote: the error was labeled with context `FunctionalizeLoops`.\n" "{\n loop {\n Tuple0\n }\n }", () <: (Prims.unit & Prims.unit) let e_loop_either_way (cond: bool) : Prims.unit = match cond <: bool with | true -> Rust_primitives.Hax.never_to_any ((Rust_primitives.Hax.failure "something is not implemented yet.\nUnhandled loop kind\n\nThis is discussed in issue https://github.com/hacspec/hax/issues/933.\nPlease upvote or comment this issue if you see this error message.\nNote: the error was labeled with context `FunctionalizeLoops`.\n" "{\n loop {\n Tuple0\n }\n }" <: Prims.unit), () <: (Prims.unit & Prims.unit)) | _ -> Rust_primitives.Hax.failure "something is not implemented yet.\nUnhandled loop kind\n\nThis is discussed in issue https://github.com/hacspec/hax/issues/933.\nPlease upvote or comment this issue if you see this error message.\nNote: the error was labeled with context `FunctionalizeLoops`.\n" "{\n loop {\n Tuple0\n }\n }", () <: (Prims.unit & Prims.unit) let e_if (cond: bool) : Prims.unit = if cond then Rust_primitives.Hax.never_to_any ((Rust_primitives.Hax.failure "something is not implemented yet.\nUnhandled loop kind\n\nThis is discussed in issue https://github.com/hacspec/hax/issues/933.\nPlease upvote or comment this issue if you see this error message.\nNote: the error was labeled with context `FunctionalizeLoops`.\n" "{\n loop {\n Tuple0\n }\n }" <: Prims.unit), () <: (Prims.unit & Prims.unit)) else Rust_primitives.Hax.never_to_any ((Rust_primitives.Hax.failure "something is not implemented yet.\nUnhandled loop kind\n\nThis is discussed in issue https://github.com/hacspec/hax/issues/933.\nPlease upvote or comment this issue if you see this error message.\nNote: the error was labeled with context `FunctionalizeLoops`.\n" "{\n loop {\n Tuple0\n }\n }" <: Prims.unit), () <: (Prims.unit & Prims.unit)) let main (_: Prims.unit) : Prims.unit = let _:Prims.unit = loopy true in () ================================================ FILE: rustc-coverage-tests/snapshots/fstar/Coverage.Long_and_wide.fst ================================================ module Coverage.Long_and_wide #set-options "--fuel 0 --ifuel 1 --z3rlimit 15" open FStar.Mul open Core_models let wide_function (_: Prims.unit) : Prims.unit = let _:Prims.unit = () <: Prims.unit in () let long_function (_: Prims.unit) : Prims.unit = () let far_function (_: Prims.unit) : Prims.unit = () let main (_: Prims.unit) : Prims.unit = let _:Prims.unit = wide_function () in let _:Prims.unit = long_function () in let _:Prims.unit = far_function () in () ================================================ FILE: rustc-coverage-tests/snapshots/fstar/Coverage.Loop_break.fst ================================================ module Coverage.Loop_break #set-options "--fuel 0 --ifuel 1 --z3rlimit 15" open FStar.Mul open Core_models let main (_: Prims.unit) : (Prims.unit & Prims.unit) = Rust_primitives.Hax.failure "something is not implemented yet.\nUnhandled loop kind\n\nThis is discussed in issue https://github.com/hacspec/hax/issues/933.\nPlease upvote or comment this issue if you see this error message.\nNote: the error was labeled with context `FunctionalizeLoops`.\n" "{\n loop {\n (if core_models::hint::black_box::(true) {\n core_models::ops::control_flow::ControlFlow_Break(\n Tuple2(Tuple0, Tuple0()),\n )\n } else {\n core_models::ops::control_flow::ControlFlow_Con..." , () <: (Prims.unit & Prims.unit) ================================================ FILE: rustc-coverage-tests/snapshots/fstar/Coverage.Loop_break_value.fst ================================================ module Coverage.Loop_break_value #set-options "--fuel 0 --ifuel 1 --z3rlimit 15" open FStar.Mul open Core_models let main (_: Prims.unit) : Prims.unit = let result:i32 = Rust_primitives.Hax.failure "something is not implemented yet.\nUnhandled loop kind\n\nThis is discussed in issue https://github.com/hacspec/hax/issues/933.\nPlease upvote or comment this issue if you see this error message.\nNote: the error was labeled with context `FunctionalizeLoops`.\n" "{\n loop {\n core_models::ops::control_flow::ControlFlow_Break(Tuple2(10, Tuple0()))\n }\n }", () <: (Prims.unit & Prims.unit) in () ================================================ FILE: rustc-coverage-tests/snapshots/fstar/Coverage.Loops_branches.fst ================================================ module Coverage.Loops_branches #set-options "--fuel 0 --ifuel 1 --z3rlimit 15" open FStar.Mul open Core_models type t_DebugTest = | DebugTest : t_DebugTest [@@ FStar.Tactics.Typeclasses.tcinstance] let impl: Core_models.Fmt.t_Debug t_DebugTest = { f_fmt_pre = (fun (self: t_DebugTest) (f: Core_models.Fmt.t_Formatter) -> true); f_fmt_post = (fun (self: t_DebugTest) (f: Core_models.Fmt.t_Formatter) (out1: (Core_models.Fmt.t_Formatter & Core_models.Result.t_Result Prims.unit Core_models.Fmt.t_Error)) -> true); f_fmt = fun (self: t_DebugTest) (f: Core_models.Fmt.t_Formatter) -> if true then let _:Prims.unit = if false then Rust_primitives.Hax.while_loop (fun temp_0_ -> let _:Prims.unit = temp_0_ in true) (fun temp_0_ -> let _:Prims.unit = temp_0_ in true) (fun temp_0_ -> let _:Prims.unit = temp_0_ in Rust_primitives.Hax.Int.from_machine (mk_u32 0) <: Hax_lib.Int.t_Int) () (fun temp_0_ -> let _:Prims.unit = temp_0_ in ()) in let (tmp0: Core_models.Fmt.t_Formatter), (out: Core_models.Result.t_Result Prims.unit Core_models.Fmt.t_Error) = Core_models.Fmt.impl_11__write_fmt f (Core_models.Fmt.Rt.impl_1__new_const (mk_usize 1) (let list = ["cool"] in FStar.Pervasives.assert_norm (Prims.eq2 (List.Tot.length list) 1); Rust_primitives.Hax.array_of_list 1 list) <: Core_models.Fmt.t_Arguments) in let f:Core_models.Fmt.t_Formatter = tmp0 in match out <: Core_models.Result.t_Result Prims.unit Core_models.Fmt.t_Error with | Core_models.Result.Result_Ok _ -> (match Rust_primitives.Hax.Folds.fold_range_return (mk_i32 0) (mk_i32 10) (fun f temp_1_ -> let f:Core_models.Fmt.t_Formatter = f in let _:i32 = temp_1_ in true) f (fun f i -> let f:Core_models.Fmt.t_Formatter = f in let i:i32 = i in if true then let _:Prims.unit = if false then Rust_primitives.Hax.while_loop (fun temp_0_ -> let _:Prims.unit = temp_0_ in true) (fun temp_0_ -> let _:Prims.unit = temp_0_ in true) (fun temp_0_ -> let _:Prims.unit = temp_0_ in Rust_primitives.Hax.Int.from_machine (mk_u32 0) <: Hax_lib.Int.t_Int ) () (fun temp_0_ -> let _:Prims.unit = temp_0_ in ()) in let (tmp0: Core_models.Fmt.t_Formatter), (out: Core_models.Result.t_Result Prims.unit Core_models.Fmt.t_Error) = Core_models.Fmt.impl_11__write_fmt f (Core_models.Fmt.Rt.impl_1__new_const (mk_usize 1) (let list = ["cool"] in FStar.Pervasives.assert_norm (Prims.eq2 (List.Tot.length list) 1); Rust_primitives.Hax.array_of_list 1 list) <: Core_models.Fmt.t_Arguments) in let f:Core_models.Fmt.t_Formatter = tmp0 in match out <: Core_models.Result.t_Result Prims.unit Core_models.Fmt.t_Error with | Core_models.Result.Result_Ok _ -> Core_models.Ops.Control_flow.ControlFlow_Continue f <: Core_models.Ops.Control_flow.t_ControlFlow (Core_models.Ops.Control_flow.t_ControlFlow (Core_models.Fmt.t_Formatter & Core_models.Result.t_Result Prims.unit Core_models.Fmt.t_Error) (Prims.unit & Core_models.Fmt.t_Formatter)) Core_models.Fmt.t_Formatter | Core_models.Result.Result_Err err -> Core_models.Ops.Control_flow.ControlFlow_Break (Core_models.Ops.Control_flow.ControlFlow_Break (f, (Core_models.Result.Result_Err err <: Core_models.Result.t_Result Prims.unit Core_models.Fmt.t_Error) <: (Core_models.Fmt.t_Formatter & Core_models.Result.t_Result Prims.unit Core_models.Fmt.t_Error)) <: Core_models.Ops.Control_flow.t_ControlFlow (Core_models.Fmt.t_Formatter & Core_models.Result.t_Result Prims.unit Core_models.Fmt.t_Error) (Prims.unit & Core_models.Fmt.t_Formatter)) <: Core_models.Ops.Control_flow.t_ControlFlow (Core_models.Ops.Control_flow.t_ControlFlow (Core_models.Fmt.t_Formatter & Core_models.Result.t_Result Prims.unit Core_models.Fmt.t_Error) (Prims.unit & Core_models.Fmt.t_Formatter)) Core_models.Fmt.t_Formatter else Core_models.Ops.Control_flow.ControlFlow_Continue f <: Core_models.Ops.Control_flow.t_ControlFlow (Core_models.Ops.Control_flow.t_ControlFlow (Core_models.Fmt.t_Formatter & Core_models.Result.t_Result Prims.unit Core_models.Fmt.t_Error) (Prims.unit & Core_models.Fmt.t_Formatter)) Core_models.Fmt.t_Formatter) <: Core_models.Ops.Control_flow.t_ControlFlow (Core_models.Fmt.t_Formatter & Core_models.Result.t_Result Prims.unit Core_models.Fmt.t_Error) Core_models.Fmt.t_Formatter with | Core_models.Ops.Control_flow.ControlFlow_Break ret -> ret | Core_models.Ops.Control_flow.ControlFlow_Continue f -> let hax_temp_output:Core_models.Result.t_Result Prims.unit Core_models.Fmt.t_Error = Core_models.Result.Result_Ok (() <: Prims.unit) <: Core_models.Result.t_Result Prims.unit Core_models.Fmt.t_Error in f, hax_temp_output <: (Core_models.Fmt.t_Formatter & Core_models.Result.t_Result Prims.unit Core_models.Fmt.t_Error)) | Core_models.Result.Result_Err err -> f, (Core_models.Result.Result_Err err <: Core_models.Result.t_Result Prims.unit Core_models.Fmt.t_Error) <: (Core_models.Fmt.t_Formatter & Core_models.Result.t_Result Prims.unit Core_models.Fmt.t_Error) else match Rust_primitives.Hax.Folds.fold_range_return (mk_i32 0) (mk_i32 10) (fun f temp_1_ -> let f:Core_models.Fmt.t_Formatter = f in let _:i32 = temp_1_ in true) f (fun f i -> let f:Core_models.Fmt.t_Formatter = f in let i:i32 = i in if true then let _:Prims.unit = if false then Rust_primitives.Hax.while_loop (fun temp_0_ -> let _:Prims.unit = temp_0_ in true) (fun temp_0_ -> let _:Prims.unit = temp_0_ in true) (fun temp_0_ -> let _:Prims.unit = temp_0_ in Rust_primitives.Hax.Int.from_machine (mk_u32 0) <: Hax_lib.Int.t_Int) () (fun temp_0_ -> let _:Prims.unit = temp_0_ in ()) in let (tmp0: Core_models.Fmt.t_Formatter), (out: Core_models.Result.t_Result Prims.unit Core_models.Fmt.t_Error) = Core_models.Fmt.impl_11__write_fmt f (Core_models.Fmt.Rt.impl_1__new_const (mk_usize 1) (let list = ["cool"] in FStar.Pervasives.assert_norm (Prims.eq2 (List.Tot.length list) 1); Rust_primitives.Hax.array_of_list 1 list) <: Core_models.Fmt.t_Arguments) in let f:Core_models.Fmt.t_Formatter = tmp0 in match out <: Core_models.Result.t_Result Prims.unit Core_models.Fmt.t_Error with | Core_models.Result.Result_Ok _ -> Core_models.Ops.Control_flow.ControlFlow_Continue f <: Core_models.Ops.Control_flow.t_ControlFlow (Core_models.Ops.Control_flow.t_ControlFlow (Core_models.Fmt.t_Formatter & Core_models.Result.t_Result Prims.unit Core_models.Fmt.t_Error) (Prims.unit & Core_models.Fmt.t_Formatter)) Core_models.Fmt.t_Formatter | Core_models.Result.Result_Err err -> Core_models.Ops.Control_flow.ControlFlow_Break (Core_models.Ops.Control_flow.ControlFlow_Break (f, (Core_models.Result.Result_Err err <: Core_models.Result.t_Result Prims.unit Core_models.Fmt.t_Error) <: (Core_models.Fmt.t_Formatter & Core_models.Result.t_Result Prims.unit Core_models.Fmt.t_Error)) <: Core_models.Ops.Control_flow.t_ControlFlow (Core_models.Fmt.t_Formatter & Core_models.Result.t_Result Prims.unit Core_models.Fmt.t_Error) (Prims.unit & Core_models.Fmt.t_Formatter)) <: Core_models.Ops.Control_flow.t_ControlFlow (Core_models.Ops.Control_flow.t_ControlFlow (Core_models.Fmt.t_Formatter & Core_models.Result.t_Result Prims.unit Core_models.Fmt.t_Error) (Prims.unit & Core_models.Fmt.t_Formatter)) Core_models.Fmt.t_Formatter else Core_models.Ops.Control_flow.ControlFlow_Continue f <: Core_models.Ops.Control_flow.t_ControlFlow (Core_models.Ops.Control_flow.t_ControlFlow (Core_models.Fmt.t_Formatter & Core_models.Result.t_Result Prims.unit Core_models.Fmt.t_Error) (Prims.unit & Core_models.Fmt.t_Formatter)) Core_models.Fmt.t_Formatter) <: Core_models.Ops.Control_flow.t_ControlFlow (Core_models.Fmt.t_Formatter & Core_models.Result.t_Result Prims.unit Core_models.Fmt.t_Error) Core_models.Fmt.t_Formatter with | Core_models.Ops.Control_flow.ControlFlow_Break ret -> ret | Core_models.Ops.Control_flow.ControlFlow_Continue f -> let hax_temp_output:Core_models.Result.t_Result Prims.unit Core_models.Fmt.t_Error = Core_models.Result.Result_Ok (() <: Prims.unit) <: Core_models.Result.t_Result Prims.unit Core_models.Fmt.t_Error in f, hax_temp_output <: (Core_models.Fmt.t_Formatter & Core_models.Result.t_Result Prims.unit Core_models.Fmt.t_Error) } type t_DisplayTest = | DisplayTest : t_DisplayTest [@@ FStar.Tactics.Typeclasses.tcinstance] let impl_1: Core_models.Fmt.t_Display t_DisplayTest = { f_fmt_pre = (fun (self: t_DisplayTest) (f: Core_models.Fmt.t_Formatter) -> true); f_fmt_post = (fun (self: t_DisplayTest) (f: Core_models.Fmt.t_Formatter) (out1: (Core_models.Fmt.t_Formatter & Core_models.Result.t_Result Prims.unit Core_models.Fmt.t_Error)) -> true); f_fmt = fun (self: t_DisplayTest) (f: Core_models.Fmt.t_Formatter) -> if false then match Rust_primitives.Hax.Folds.fold_range_return (mk_i32 0) (mk_i32 10) (fun f temp_1_ -> let f:Core_models.Fmt.t_Formatter = f in let _:i32 = temp_1_ in true) f (fun f i -> let f:Core_models.Fmt.t_Formatter = f in let i:i32 = i in if false then Core_models.Ops.Control_flow.ControlFlow_Continue f <: Core_models.Ops.Control_flow.t_ControlFlow (Core_models.Ops.Control_flow.t_ControlFlow (Core_models.Fmt.t_Formatter & Core_models.Result.t_Result Prims.unit Core_models.Fmt.t_Error) (Prims.unit & Core_models.Fmt.t_Formatter)) Core_models.Fmt.t_Formatter else let _:Prims.unit = if false then Rust_primitives.Hax.while_loop (fun temp_0_ -> let _:Prims.unit = temp_0_ in true) (fun temp_0_ -> let _:Prims.unit = temp_0_ in true) (fun temp_0_ -> let _:Prims.unit = temp_0_ in Rust_primitives.Hax.Int.from_machine (mk_u32 0) <: Hax_lib.Int.t_Int) () (fun temp_0_ -> let _:Prims.unit = temp_0_ in ()) in let (tmp0: Core_models.Fmt.t_Formatter), (out: Core_models.Result.t_Result Prims.unit Core_models.Fmt.t_Error) = Core_models.Fmt.impl_11__write_fmt f (Core_models.Fmt.Rt.impl_1__new_const (mk_usize 1) (let list = ["cool"] in FStar.Pervasives.assert_norm (Prims.eq2 (List.Tot.length list) 1); Rust_primitives.Hax.array_of_list 1 list) <: Core_models.Fmt.t_Arguments) in let f:Core_models.Fmt.t_Formatter = tmp0 in match out <: Core_models.Result.t_Result Prims.unit Core_models.Fmt.t_Error with | Core_models.Result.Result_Ok _ -> Core_models.Ops.Control_flow.ControlFlow_Continue f <: Core_models.Ops.Control_flow.t_ControlFlow (Core_models.Ops.Control_flow.t_ControlFlow (Core_models.Fmt.t_Formatter & Core_models.Result.t_Result Prims.unit Core_models.Fmt.t_Error) (Prims.unit & Core_models.Fmt.t_Formatter)) Core_models.Fmt.t_Formatter | Core_models.Result.Result_Err err -> Core_models.Ops.Control_flow.ControlFlow_Break (Core_models.Ops.Control_flow.ControlFlow_Break (f, (Core_models.Result.Result_Err err <: Core_models.Result.t_Result Prims.unit Core_models.Fmt.t_Error) <: (Core_models.Fmt.t_Formatter & Core_models.Result.t_Result Prims.unit Core_models.Fmt.t_Error)) <: Core_models.Ops.Control_flow.t_ControlFlow (Core_models.Fmt.t_Formatter & Core_models.Result.t_Result Prims.unit Core_models.Fmt.t_Error) (Prims.unit & Core_models.Fmt.t_Formatter)) <: Core_models.Ops.Control_flow.t_ControlFlow (Core_models.Ops.Control_flow.t_ControlFlow (Core_models.Fmt.t_Formatter & Core_models.Result.t_Result Prims.unit Core_models.Fmt.t_Error) (Prims.unit & Core_models.Fmt.t_Formatter)) Core_models.Fmt.t_Formatter) <: Core_models.Ops.Control_flow.t_ControlFlow (Core_models.Fmt.t_Formatter & Core_models.Result.t_Result Prims.unit Core_models.Fmt.t_Error) Core_models.Fmt.t_Formatter with | Core_models.Ops.Control_flow.ControlFlow_Break ret -> ret | Core_models.Ops.Control_flow.ControlFlow_Continue f -> let hax_temp_output:Core_models.Result.t_Result Prims.unit Core_models.Fmt.t_Error = Core_models.Result.Result_Ok (() <: Prims.unit) <: Core_models.Result.t_Result Prims.unit Core_models.Fmt.t_Error in f, hax_temp_output <: (Core_models.Fmt.t_Formatter & Core_models.Result.t_Result Prims.unit Core_models.Fmt.t_Error) else let _:Prims.unit = if false then Rust_primitives.Hax.while_loop (fun temp_0_ -> let _:Prims.unit = temp_0_ in true) (fun temp_0_ -> let _:Prims.unit = temp_0_ in true) (fun temp_0_ -> let _:Prims.unit = temp_0_ in Rust_primitives.Hax.Int.from_machine (mk_u32 0) <: Hax_lib.Int.t_Int) () (fun temp_0_ -> let _:Prims.unit = temp_0_ in ()) in let (tmp0: Core_models.Fmt.t_Formatter), (out: Core_models.Result.t_Result Prims.unit Core_models.Fmt.t_Error) = Core_models.Fmt.impl_11__write_fmt f (Core_models.Fmt.Rt.impl_1__new_const (mk_usize 1) (let list = ["cool"] in FStar.Pervasives.assert_norm (Prims.eq2 (List.Tot.length list) 1); Rust_primitives.Hax.array_of_list 1 list) <: Core_models.Fmt.t_Arguments) in let f:Core_models.Fmt.t_Formatter = tmp0 in match out <: Core_models.Result.t_Result Prims.unit Core_models.Fmt.t_Error with | Core_models.Result.Result_Ok _ -> (match Rust_primitives.Hax.Folds.fold_range_return (mk_i32 0) (mk_i32 10) (fun f temp_1_ -> let f:Core_models.Fmt.t_Formatter = f in let _:i32 = temp_1_ in true) f (fun f i -> let f:Core_models.Fmt.t_Formatter = f in let i:i32 = i in if false then Core_models.Ops.Control_flow.ControlFlow_Continue f <: Core_models.Ops.Control_flow.t_ControlFlow (Core_models.Ops.Control_flow.t_ControlFlow (Core_models.Fmt.t_Formatter & Core_models.Result.t_Result Prims.unit Core_models.Fmt.t_Error) (Prims.unit & Core_models.Fmt.t_Formatter)) Core_models.Fmt.t_Formatter else let _:Prims.unit = if false then Rust_primitives.Hax.while_loop (fun temp_0_ -> let _:Prims.unit = temp_0_ in true) (fun temp_0_ -> let _:Prims.unit = temp_0_ in true) (fun temp_0_ -> let _:Prims.unit = temp_0_ in Rust_primitives.Hax.Int.from_machine (mk_u32 0) <: Hax_lib.Int.t_Int ) () (fun temp_0_ -> let _:Prims.unit = temp_0_ in ()) in let (tmp0: Core_models.Fmt.t_Formatter), (out: Core_models.Result.t_Result Prims.unit Core_models.Fmt.t_Error) = Core_models.Fmt.impl_11__write_fmt f (Core_models.Fmt.Rt.impl_1__new_const (mk_usize 1) (let list = ["cool"] in FStar.Pervasives.assert_norm (Prims.eq2 (List.Tot.length list) 1); Rust_primitives.Hax.array_of_list 1 list) <: Core_models.Fmt.t_Arguments) in let f:Core_models.Fmt.t_Formatter = tmp0 in match out <: Core_models.Result.t_Result Prims.unit Core_models.Fmt.t_Error with | Core_models.Result.Result_Ok _ -> Core_models.Ops.Control_flow.ControlFlow_Continue f <: Core_models.Ops.Control_flow.t_ControlFlow (Core_models.Ops.Control_flow.t_ControlFlow (Core_models.Fmt.t_Formatter & Core_models.Result.t_Result Prims.unit Core_models.Fmt.t_Error) (Prims.unit & Core_models.Fmt.t_Formatter)) Core_models.Fmt.t_Formatter | Core_models.Result.Result_Err err -> Core_models.Ops.Control_flow.ControlFlow_Break (Core_models.Ops.Control_flow.ControlFlow_Break (f, (Core_models.Result.Result_Err err <: Core_models.Result.t_Result Prims.unit Core_models.Fmt.t_Error) <: (Core_models.Fmt.t_Formatter & Core_models.Result.t_Result Prims.unit Core_models.Fmt.t_Error)) <: Core_models.Ops.Control_flow.t_ControlFlow (Core_models.Fmt.t_Formatter & Core_models.Result.t_Result Prims.unit Core_models.Fmt.t_Error) (Prims.unit & Core_models.Fmt.t_Formatter)) <: Core_models.Ops.Control_flow.t_ControlFlow (Core_models.Ops.Control_flow.t_ControlFlow (Core_models.Fmt.t_Formatter & Core_models.Result.t_Result Prims.unit Core_models.Fmt.t_Error) (Prims.unit & Core_models.Fmt.t_Formatter)) Core_models.Fmt.t_Formatter) <: Core_models.Ops.Control_flow.t_ControlFlow (Core_models.Fmt.t_Formatter & Core_models.Result.t_Result Prims.unit Core_models.Fmt.t_Error) Core_models.Fmt.t_Formatter with | Core_models.Ops.Control_flow.ControlFlow_Break ret -> ret | Core_models.Ops.Control_flow.ControlFlow_Continue f -> let hax_temp_output:Core_models.Result.t_Result Prims.unit Core_models.Fmt.t_Error = Core_models.Result.Result_Ok (() <: Prims.unit) <: Core_models.Result.t_Result Prims.unit Core_models.Fmt.t_Error in f, hax_temp_output <: (Core_models.Fmt.t_Formatter & Core_models.Result.t_Result Prims.unit Core_models.Fmt.t_Error)) | Core_models.Result.Result_Err err -> f, (Core_models.Result.Result_Err err <: Core_models.Result.t_Result Prims.unit Core_models.Fmt.t_Error) <: (Core_models.Fmt.t_Formatter & Core_models.Result.t_Result Prims.unit Core_models.Fmt.t_Error) } let main (_: Prims.unit) : Prims.unit = let debug_test:t_DebugTest = DebugTest <: t_DebugTest in let args:t_DebugTest = debug_test <: t_DebugTest in let args:t_Array Core_models.Fmt.Rt.t_Argument (mk_usize 1) = let list = [Core_models.Fmt.Rt.impl__new_debug #t_DebugTest args] in FStar.Pervasives.assert_norm (Prims.eq2 (List.Tot.length list) 1); Rust_primitives.Hax.array_of_list 1 list in let _:Prims.unit = Std.Io.Stdio.e_print (Core_models.Fmt.Rt.impl_1__new_v1 (mk_usize 2) (mk_usize 1) (let list = [""; "\n"] in FStar.Pervasives.assert_norm (Prims.eq2 (List.Tot.length list) 2); Rust_primitives.Hax.array_of_list 2 list) args <: Core_models.Fmt.t_Arguments) in let _:Prims.unit = () in let display_test:t_DisplayTest = DisplayTest <: t_DisplayTest in let args:t_DisplayTest = display_test <: t_DisplayTest in let args:t_Array Core_models.Fmt.Rt.t_Argument (mk_usize 1) = let list = [Core_models.Fmt.Rt.impl__new_display #t_DisplayTest args] in FStar.Pervasives.assert_norm (Prims.eq2 (List.Tot.length list) 1); Rust_primitives.Hax.array_of_list 1 list in let _:Prims.unit = Std.Io.Stdio.e_print (Core_models.Fmt.Rt.impl_1__new_v1 (mk_usize 2) (mk_usize 1) (let list = [""; "\n"] in FStar.Pervasives.assert_norm (Prims.eq2 (List.Tot.length list) 2); Rust_primitives.Hax.array_of_list 2 list) args <: Core_models.Fmt.t_Arguments) in let _:Prims.unit = () in () ================================================ FILE: rustc-coverage-tests/snapshots/fstar/Coverage.Macro_in_closure.fst ================================================ module Coverage.Macro_in_closure #set-options "--fuel 0 --ifuel 1 --z3rlimit 15" open FStar.Mul open Core_models let v_NO_BLOCK: Prims.unit -> Prims.unit = fun temp_0_ -> let _:Prims.unit = temp_0_ in let _:Prims.unit = Std.Io.Stdio.e_print (Core_models.Fmt.Rt.impl_1__new_const (mk_usize 1) (let list = ["hello\n"] in FStar.Pervasives.assert_norm (Prims.eq2 (List.Tot.length list) 1); Rust_primitives.Hax.array_of_list 1 list) <: Core_models.Fmt.t_Arguments) in () let v_WITH_BLOCK: Prims.unit -> Prims.unit = fun temp_0_ -> let _:Prims.unit = temp_0_ in let _:Prims.unit = Std.Io.Stdio.e_print (Core_models.Fmt.Rt.impl_1__new_const (mk_usize 1) (let list = ["hello\n"] in FStar.Pervasives.assert_norm (Prims.eq2 (List.Tot.length list) 1); Rust_primitives.Hax.array_of_list 1 list) <: Core_models.Fmt.t_Arguments) in let _:Prims.unit = () in () let main (_: Prims.unit) : Prims.unit = let _:Prims.unit = v_NO_BLOCK () in let _:Prims.unit = v_WITH_BLOCK () in () ================================================ FILE: rustc-coverage-tests/snapshots/fstar/Coverage.Match_or_pattern.fst ================================================ module Coverage.Match_or_pattern #set-options "--fuel 0 --ifuel 1 --z3rlimit 15" open FStar.Mul open Core_models let _ = (* This module has implicit dependencies, here we make them explicit. *) (* The implicit dependencies arise from typeclasses instances. *) let open Std.Env in () let main (_: Prims.unit) : Prims.unit = let is_true:bool = (Core_models.Iter.Traits.Exact_size.f_len #Std.Env.t_Args #FStar.Tactics.Typeclasses.solve (Std.Env.args () <: Std.Env.t_Args) <: usize) =. mk_usize 1 in let (a: u8):u8 = mk_u8 0 in let (b: u8):u8 = mk_u8 0 in let (a: u8), (b: u8) = if is_true then let a:u8 = mk_u8 2 in let b:u8 = mk_u8 0 in a, b <: (u8 & u8) else a, b <: (u8 & u8) in let _:Prims.unit = match a, b <: (u8 & u8) with | Rust_primitives.Integers.MkInt 0, Rust_primitives.Integers.MkInt 2 | Rust_primitives.Integers.MkInt 0, Rust_primitives.Integers.MkInt 3 | Rust_primitives.Integers.MkInt 1, Rust_primitives.Integers.MkInt 2 | Rust_primitives.Integers.MkInt 1, Rust_primitives.Integers.MkInt 3 -> () | _ -> () in let (a: u8), (b: u8) = if is_true then let a:u8 = mk_u8 0 in let b:u8 = mk_u8 0 in a, b <: (u8 & u8) else a, b <: (u8 & u8) in let _:Prims.unit = match a, b <: (u8 & u8) with | Rust_primitives.Integers.MkInt 0, Rust_primitives.Integers.MkInt 2 | Rust_primitives.Integers.MkInt 0, Rust_primitives.Integers.MkInt 3 | Rust_primitives.Integers.MkInt 1, Rust_primitives.Integers.MkInt 2 | Rust_primitives.Integers.MkInt 1, Rust_primitives.Integers.MkInt 3 -> () | _ -> () in let (a: u8), (b: u8) = if is_true then let a:u8 = mk_u8 2 in let b:u8 = mk_u8 2 in a, b <: (u8 & u8) else a, b <: (u8 & u8) in let _:Prims.unit = match a, b <: (u8 & u8) with | Rust_primitives.Integers.MkInt 0, Rust_primitives.Integers.MkInt 2 | Rust_primitives.Integers.MkInt 0, Rust_primitives.Integers.MkInt 3 | Rust_primitives.Integers.MkInt 1, Rust_primitives.Integers.MkInt 2 | Rust_primitives.Integers.MkInt 1, Rust_primitives.Integers.MkInt 3 -> () | _ -> () in let (a: u8), (b: u8) = if is_true then let a:u8 = mk_u8 0 in let b:u8 = mk_u8 2 in a, b <: (u8 & u8) else a, b <: (u8 & u8) in match a, b <: (u8 & u8) with | Rust_primitives.Integers.MkInt 0, Rust_primitives.Integers.MkInt 2 | Rust_primitives.Integers.MkInt 0, Rust_primitives.Integers.MkInt 3 | Rust_primitives.Integers.MkInt 1, Rust_primitives.Integers.MkInt 2 | Rust_primitives.Integers.MkInt 1, Rust_primitives.Integers.MkInt 3 -> () | _ -> () ================================================ FILE: rustc-coverage-tests/snapshots/fstar/Coverage.Mcdc.Condition_limit.fst ================================================ module Coverage.Mcdc.Condition_limit #set-options "--fuel 0 --ifuel 1 --z3rlimit 15" open FStar.Mul open Core_models let accept_7_conditions (bool_arr: t_Array bool (mk_usize 7)) : Prims.unit = Rust_primitives.Hax.failure "something is not implemented yet.\nPat:Array\n\nThis is discussed in issue https://github.com/hacspec/hax/issues/804.\nPlease upvote or comment this issue if you see this error message.\nNote: the error was labeled with context `AST import`.\n" "" let main (_: Prims.unit) : Prims.unit = let _:Prims.unit = accept_7_conditions (Rust_primitives.Hax.repeat false (mk_usize 7) <: t_Array bool (mk_usize 7)) in let _:Prims.unit = accept_7_conditions (Rust_primitives.Hax.repeat true (mk_usize 7) <: t_Array bool (mk_usize 7)) in () ================================================ FILE: rustc-coverage-tests/snapshots/fstar/Coverage.Mcdc.If_.fst ================================================ module Coverage.Mcdc.If_ #set-options "--fuel 0 --ifuel 1 --z3rlimit 15" open FStar.Mul open Core_models let say (message: string) : Prims.unit = let _:string = Core_models.Hint.black_box #string message in () let mcdc_check_neither (a b: bool) : Prims.unit = if a && b then let _:Prims.unit = say "a and b" in () else let _:Prims.unit = say "not both" in () let mcdc_check_a (a b: bool) : Prims.unit = if a && b then let _:Prims.unit = say "a and b" in () else let _:Prims.unit = say "not both" in () let mcdc_check_b (a b: bool) : Prims.unit = if a && b then let _:Prims.unit = say "a and b" in () else let _:Prims.unit = say "not both" in () let mcdc_check_both (a b: bool) : Prims.unit = if a && b then let _:Prims.unit = say "a and b" in () else let _:Prims.unit = say "not both" in () let mcdc_check_tree_decision (a b c: bool) : Prims.unit = if a && (b || c) then let _:Prims.unit = say "pass" in () else let _:Prims.unit = say "reject" in () let mcdc_check_not_tree_decision (a b c: bool) : Prims.unit = if (a || b) && c then let _:Prims.unit = say "pass" in () else let _:Prims.unit = say "reject" in () let mcdc_nested_if (a b c: bool) : Prims.unit = if a || b then let _:Prims.unit = say "a or b" in if b && c then let _:Prims.unit = say "b and c" in () else let _:Prims.unit = say "neither a nor b" in () let main (_: Prims.unit) : Prims.unit = let _:Prims.unit = mcdc_check_neither false false in let _:Prims.unit = mcdc_check_neither false true in let _:Prims.unit = mcdc_check_a true true in let _:Prims.unit = mcdc_check_a false true in let _:Prims.unit = mcdc_check_b true true in let _:Prims.unit = mcdc_check_b true false in let _:Prims.unit = mcdc_check_both false true in let _:Prims.unit = mcdc_check_both true true in let _:Prims.unit = mcdc_check_both true false in let _:Prims.unit = mcdc_check_tree_decision false true true in let _:Prims.unit = mcdc_check_tree_decision true true false in let _:Prims.unit = mcdc_check_tree_decision true false false in let _:Prims.unit = mcdc_check_tree_decision true false true in let _:Prims.unit = mcdc_check_not_tree_decision false true true in let _:Prims.unit = mcdc_check_not_tree_decision true true false in let _:Prims.unit = mcdc_check_not_tree_decision true false false in let _:Prims.unit = mcdc_check_not_tree_decision true false true in let _:Prims.unit = mcdc_nested_if true false true in let _:Prims.unit = mcdc_nested_if true true true in let _:Prims.unit = mcdc_nested_if true true false in () ================================================ FILE: rustc-coverage-tests/snapshots/fstar/Coverage.Mcdc.Inlined_expressions.fst ================================================ module Coverage.Mcdc.Inlined_expressions #set-options "--fuel 0 --ifuel 1 --z3rlimit 15" open FStar.Mul open Core_models let inlined_instance (a b: bool) : bool = a && b let main (_: Prims.unit) : Prims.unit = let _:bool = inlined_instance true false in let _:bool = inlined_instance false true in let _:bool = inlined_instance true true in () ================================================ FILE: rustc-coverage-tests/snapshots/fstar/Coverage.Mcdc.Nested_if.fst ================================================ module Coverage.Mcdc.Nested_if #set-options "--fuel 0 --ifuel 1 --z3rlimit 15" open FStar.Mul open Core_models let say (message: string) : Prims.unit = let _:string = Core_models.Hint.black_box #string message in () let nested_if_in_condition (a b c: bool) : Prims.unit = if a && (if b || c then true else false) then let _:Prims.unit = say "yes" in () else let _:Prims.unit = say "no" in () let doubly_nested_if_in_condition (a b c d: bool) : Prims.unit = if a && (if b || (if c && d then true else false) then false else true) then let _:Prims.unit = say "yes" in () else let _:Prims.unit = say "no" in () let nested_single_condition_decision (a b: bool) : Prims.unit = if a && (if b then false else true) then let _:Prims.unit = say "yes" in () else let _:Prims.unit = say "no" in () let nested_in_then_block_in_condition (a b c d e: bool) : Prims.unit = if a && (if b || c then if d && e then true else false else false) then let _:Prims.unit = say "yes" in () else let _:Prims.unit = say "no" in () let main (_: Prims.unit) : Prims.unit = let _:Prims.unit = nested_if_in_condition true false false in let _:Prims.unit = nested_if_in_condition true true true in let _:Prims.unit = nested_if_in_condition true false true in let _:Prims.unit = nested_if_in_condition false true true in let _:Prims.unit = doubly_nested_if_in_condition true false false true in let _:Prims.unit = doubly_nested_if_in_condition true true true true in let _:Prims.unit = doubly_nested_if_in_condition true false true true in let _:Prims.unit = doubly_nested_if_in_condition false true true true in let _:Prims.unit = nested_single_condition_decision true true in let _:Prims.unit = nested_single_condition_decision true false in let _:Prims.unit = nested_single_condition_decision false false in let _:Prims.unit = nested_in_then_block_in_condition false false false false false in let _:Prims.unit = nested_in_then_block_in_condition true false false false false in let _:Prims.unit = nested_in_then_block_in_condition true true false false false in let _:Prims.unit = nested_in_then_block_in_condition true false true false false in let _:Prims.unit = nested_in_then_block_in_condition true false true true false in let _:Prims.unit = nested_in_then_block_in_condition true false true false true in let _:Prims.unit = nested_in_then_block_in_condition true false true true true in () ================================================ FILE: rustc-coverage-tests/snapshots/fstar/Coverage.Mcdc.Non_control_flow.fst ================================================ module Coverage.Mcdc.Non_control_flow #set-options "--fuel 0 --ifuel 1 --z3rlimit 15" open FStar.Mul open Core_models let assign_and (a b: bool) : Prims.unit = let x:bool = a && b in let _:bool = Core_models.Hint.black_box #bool x in () let assign_or (a b: bool) : Prims.unit = let x:bool = a || b in let _:bool = Core_models.Hint.black_box #bool x in () let assign_3_ (a b c: bool) : Prims.unit = let x:bool = a || b && c in let _:bool = Core_models.Hint.black_box #bool x in () let assign_3_bis (a b c: bool) : Prims.unit = let x:bool = a && b || c in let _:bool = Core_models.Hint.black_box #bool x in () let right_comb_tree (a b c d e: bool) : Prims.unit = let x:bool = a && (b && (c && (d && e))) in let _:bool = Core_models.Hint.black_box #bool x in () let foo (a: bool) : bool = Core_models.Hint.black_box #bool a let func_call (a b: bool) : Prims.unit = let _:bool = foo (a && b) in () let main (_: Prims.unit) : Prims.unit = let _:Prims.unit = assign_and true false in let _:Prims.unit = assign_and true true in let _:Prims.unit = assign_and false false in let _:Prims.unit = assign_or true false in let _:Prims.unit = assign_or true true in let _:Prims.unit = assign_or false false in let _:Prims.unit = assign_3_ true false false in let _:Prims.unit = assign_3_ true true false in let _:Prims.unit = assign_3_ false false true in let _:Prims.unit = assign_3_ false true true in let _:Prims.unit = assign_3_bis true false false in let _:Prims.unit = assign_3_bis true true false in let _:Prims.unit = assign_3_bis false false true in let _:Prims.unit = assign_3_bis false true true in let _:Prims.unit = right_comb_tree false false false true true in let _:Prims.unit = right_comb_tree true false false true true in let _:Prims.unit = right_comb_tree true true true true true in let _:Prims.unit = func_call true false in let _:Prims.unit = func_call true true in let _:Prims.unit = func_call false false in () ================================================ FILE: rustc-coverage-tests/snapshots/fstar/Coverage.Nested_loops.fst ================================================ module Coverage.Nested_loops #set-options "--fuel 0 --ifuel 1 --z3rlimit 15" open FStar.Mul open Core_models let _ = (* This module has implicit dependencies, here we make them explicit. *) (* The implicit dependencies arise from typeclasses instances. *) let open Std.Env in () let main (_: Prims.unit) : (i32 & Prims.unit) = let is_true:bool = (Core_models.Iter.Traits.Exact_size.f_len #Std.Env.t_Args #FStar.Tactics.Typeclasses.solve (Std.Env.args () <: Std.Env.t_Args) <: usize) =. mk_usize 1 in let countdown:i32 = mk_i32 10 in Rust_primitives.Hax.while_loop (fun countdown -> let countdown:i32 = countdown in true) (fun countdown -> let countdown:i32 = countdown in countdown >. mk_i32 0 <: bool) (fun countdown -> let countdown:i32 = countdown in Rust_primitives.Hax.Int.from_machine (mk_u32 0) <: Hax_lib.Int.t_Int) countdown (fun countdown -> let countdown:i32 = countdown in let a:i32 = mk_i32 100 in let b:i32 = mk_i32 100 in let (a: i32), (b: i32) = Rust_primitives.Hax.Folds.fold_range_cf (mk_i32 0) (mk_i32 50) (fun temp_0_ temp_1_ -> let (a: i32), (b: i32) = temp_0_ in let _:i32 = temp_1_ in true) (a, b <: (i32 & i32)) (fun temp_0_ temp_1_ -> let (a: i32), (b: i32) = temp_0_ in let _:i32 = temp_1_ in if a <. mk_i32 30 <: bool then Core_models.Ops.Control_flow.ControlFlow_Break ((), (a, b <: (i32 & i32)) <: (Prims.unit & (i32 & i32))) <: Core_models.Ops.Control_flow.t_ControlFlow (Prims.unit & (i32 & i32)) (i32 & i32) else let a:i32 = a -! mk_i32 5 in let b:i32 = b -! mk_i32 5 in if b <. mk_i32 90 then let a:i32 = a -! mk_i32 10 in if is_true then Core_models.Ops.Control_flow.ControlFlow_Break ((), (a, b <: (i32 & i32)) <: (Prims.unit & (i32 & i32))) <: Core_models.Ops.Control_flow.t_ControlFlow (Prims.unit & (i32 & i32)) (i32 & i32) else let a:i32 = a -! mk_i32 2 in Core_models.Ops.Control_flow.ControlFlow_Continue (a, b <: (i32 & i32)) <: Core_models.Ops.Control_flow.t_ControlFlow (Prims.unit & (i32 & i32)) (i32 & i32) else Core_models.Ops.Control_flow.ControlFlow_Continue (a, b <: (i32 & i32)) <: Core_models.Ops.Control_flow.t_ControlFlow (Prims.unit & (i32 & i32)) (i32 & i32)) in countdown -! mk_i32 1), () <: (i32 & Prims.unit) ================================================ FILE: rustc-coverage-tests/snapshots/fstar/Coverage.No_cov_crate.Nested_fns.fst ================================================ module Coverage.No_cov_crate.Nested_fns #set-options "--fuel 0 --ifuel 1 --z3rlimit 15" open FStar.Mul open Core_models let outer_not_covered__inner (is_true: bool) : Prims.unit = if is_true then let _:Prims.unit = Std.Io.Stdio.e_print (Core_models.Fmt.Rt.impl_1__new_const (mk_usize 1) (let list = ["called and covered\n"] in FStar.Pervasives.assert_norm (Prims.eq2 (List.Tot.length list) 1); Rust_primitives.Hax.array_of_list 1 list) <: Core_models.Fmt.t_Arguments) in let _:Prims.unit = () in () else let _:Prims.unit = Std.Io.Stdio.e_print (Core_models.Fmt.Rt.impl_1__new_const (mk_usize 1) (let list = ["absolutely not covered\n"] in FStar.Pervasives.assert_norm (Prims.eq2 (List.Tot.length list) 1); Rust_primitives.Hax.array_of_list 1 list) <: Core_models.Fmt.t_Arguments) in let _:Prims.unit = () in () let outer_not_covered (is_true: bool) : Prims.unit = let _:Prims.unit = Std.Io.Stdio.e_print (Core_models.Fmt.Rt.impl_1__new_const (mk_usize 1) (let list = ["called but not covered\n"] in FStar.Pervasives.assert_norm (Prims.eq2 (List.Tot.length list) 1); Rust_primitives.Hax.array_of_list 1 list) <: Core_models.Fmt.t_Arguments) in let _:Prims.unit = () in let _:Prims.unit = outer_not_covered__inner is_true in () let outer__inner_not_covered (is_true: bool) : Prims.unit = if is_true then let _:Prims.unit = Std.Io.Stdio.e_print (Core_models.Fmt.Rt.impl_1__new_const (mk_usize 1) (let list = ["called but not covered\n"] in FStar.Pervasives.assert_norm (Prims.eq2 (List.Tot.length list) 1); Rust_primitives.Hax.array_of_list 1 list) <: Core_models.Fmt.t_Arguments) in let _:Prims.unit = () in () else let _:Prims.unit = Std.Io.Stdio.e_print (Core_models.Fmt.Rt.impl_1__new_const (mk_usize 1) (let list = ["absolutely not covered\n"] in FStar.Pervasives.assert_norm (Prims.eq2 (List.Tot.length list) 1); Rust_primitives.Hax.array_of_list 1 list) <: Core_models.Fmt.t_Arguments) in let _:Prims.unit = () in () let outer (is_true: bool) : Prims.unit = let _:Prims.unit = Std.Io.Stdio.e_print (Core_models.Fmt.Rt.impl_1__new_const (mk_usize 1) (let list = ["called and covered\n"] in FStar.Pervasives.assert_norm (Prims.eq2 (List.Tot.length list) 1); Rust_primitives.Hax.array_of_list 1 list) <: Core_models.Fmt.t_Arguments) in let _:Prims.unit = () in let _:Prims.unit = outer__inner_not_covered is_true in () let outer_both_covered__inner (is_true: bool) : Prims.unit = if is_true then let _:Prims.unit = Std.Io.Stdio.e_print (Core_models.Fmt.Rt.impl_1__new_const (mk_usize 1) (let list = ["called and covered\n"] in FStar.Pervasives.assert_norm (Prims.eq2 (List.Tot.length list) 1); Rust_primitives.Hax.array_of_list 1 list) <: Core_models.Fmt.t_Arguments) in let _:Prims.unit = () in () else let _:Prims.unit = Std.Io.Stdio.e_print (Core_models.Fmt.Rt.impl_1__new_const (mk_usize 1) (let list = ["absolutely not covered\n"] in FStar.Pervasives.assert_norm (Prims.eq2 (List.Tot.length list) 1); Rust_primitives.Hax.array_of_list 1 list) <: Core_models.Fmt.t_Arguments) in let _:Prims.unit = () in () let outer_both_covered (is_true: bool) : Prims.unit = let _:Prims.unit = Std.Io.Stdio.e_print (Core_models.Fmt.Rt.impl_1__new_const (mk_usize 1) (let list = ["called and covered\n"] in FStar.Pervasives.assert_norm (Prims.eq2 (List.Tot.length list) 1); Rust_primitives.Hax.array_of_list 1 list) <: Core_models.Fmt.t_Arguments) in let _:Prims.unit = () in let _:Prims.unit = outer_both_covered__inner is_true in () ================================================ FILE: rustc-coverage-tests/snapshots/fstar/Coverage.No_cov_crate.fst ================================================ module Coverage.No_cov_crate #set-options "--fuel 0 --ifuel 1 --z3rlimit 15" open FStar.Mul open Core_models let _ = (* This module has implicit dependencies, here we make them explicit. *) (* The implicit dependencies arise from typeclasses instances. *) let open Std.Env in () let do_not_add_coverage_1_ (_: Prims.unit) : Prims.unit = let _:Prims.unit = Std.Io.Stdio.e_print (Core_models.Fmt.Rt.impl_1__new_const (mk_usize 1) (let list = ["called but not covered\n"] in FStar.Pervasives.assert_norm (Prims.eq2 (List.Tot.length list) 1); Rust_primitives.Hax.array_of_list 1 list) <: Core_models.Fmt.t_Arguments) in let _:Prims.unit = () in () let do_not_add_coverage_2_ (_: Prims.unit) : Prims.unit = let _:Prims.unit = Std.Io.Stdio.e_print (Core_models.Fmt.Rt.impl_1__new_const (mk_usize 1) (let list = ["called but not covered\n"] in FStar.Pervasives.assert_norm (Prims.eq2 (List.Tot.length list) 1); Rust_primitives.Hax.array_of_list 1 list) <: Core_models.Fmt.t_Arguments) in let _:Prims.unit = () in () let do_not_add_coverage_not_called (_: Prims.unit) : Prims.unit = let _:Prims.unit = Std.Io.Stdio.e_print (Core_models.Fmt.Rt.impl_1__new_const (mk_usize 1) (let list = ["not called and not covered\n"] in FStar.Pervasives.assert_norm (Prims.eq2 (List.Tot.length list) 1); Rust_primitives.Hax.array_of_list 1 list) <: Core_models.Fmt.t_Arguments) in let _:Prims.unit = () in () let add_coverage_1_ (_: Prims.unit) : Prims.unit = let _:Prims.unit = Std.Io.Stdio.e_print (Core_models.Fmt.Rt.impl_1__new_const (mk_usize 1) (let list = ["called and covered\n"] in FStar.Pervasives.assert_norm (Prims.eq2 (List.Tot.length list) 1); Rust_primitives.Hax.array_of_list 1 list) <: Core_models.Fmt.t_Arguments) in let _:Prims.unit = () in () let add_coverage_2_ (_: Prims.unit) : Prims.unit = let _:Prims.unit = Std.Io.Stdio.e_print (Core_models.Fmt.Rt.impl_1__new_const (mk_usize 1) (let list = ["called and covered\n"] in FStar.Pervasives.assert_norm (Prims.eq2 (List.Tot.length list) 1); Rust_primitives.Hax.array_of_list 1 list) <: Core_models.Fmt.t_Arguments) in let _:Prims.unit = () in () let add_coverage_not_called (_: Prims.unit) : Prims.unit = let _:Prims.unit = Std.Io.Stdio.e_print (Core_models.Fmt.Rt.impl_1__new_const (mk_usize 1) (let list = ["not called but covered\n"] in FStar.Pervasives.assert_norm (Prims.eq2 (List.Tot.length list) 1); Rust_primitives.Hax.array_of_list 1 list) <: Core_models.Fmt.t_Arguments) in let _:Prims.unit = () in () let main (_: Prims.unit) : Prims.unit = let is_true:bool = (Core_models.Iter.Traits.Exact_size.f_len #Std.Env.t_Args #FStar.Tactics.Typeclasses.solve (Std.Env.args () <: Std.Env.t_Args) <: usize) =. mk_usize 1 in let _:Prims.unit = do_not_add_coverage_1_ () in let _:Prims.unit = do_not_add_coverage_2_ () in let _:Prims.unit = add_coverage_1_ () in let _:Prims.unit = add_coverage_2_ () in let _:Prims.unit = Coverage.No_cov_crate.Nested_fns.outer_not_covered is_true in let _:Prims.unit = Coverage.No_cov_crate.Nested_fns.outer is_true in let _:Prims.unit = Coverage.No_cov_crate.Nested_fns.outer_both_covered is_true in () ================================================ FILE: rustc-coverage-tests/snapshots/fstar/Coverage.No_spans.fst ================================================ module Coverage.No_spans #set-options "--fuel 0 --ifuel 1 --z3rlimit 15" open FStar.Mul open Core_models let affected_function (_: Prims.unit) : Prims.unit -> Prims.unit = fun temp_0_ -> let _:Prims.unit = temp_0_ in () <: Prims.unit let main (_: Prims.unit) : Prims.unit = let _:Prims.unit = Core_models.Ops.Function.f_call #Prims.unit #FStar.Tactics.Typeclasses.solve (affected_function () <: Prims.unit -> Prims.unit) (() <: Prims.unit) in () ================================================ FILE: rustc-coverage-tests/snapshots/fstar/Coverage.No_spans_if_not.fst ================================================ module Coverage.No_spans_if_not #set-options "--fuel 0 --ifuel 1 --z3rlimit 15" open FStar.Mul open Core_models let affected_function (_: Prims.unit) : Prims.unit = if ~.false then () <: Prims.unit else () <: Prims.unit let main (_: Prims.unit) : Prims.unit = let _:Prims.unit = affected_function () in () ================================================ FILE: rustc-coverage-tests/snapshots/fstar/Coverage.Overflow.fst ================================================ module Coverage.Overflow #set-options "--fuel 0 --ifuel 1 --z3rlimit 15" open FStar.Mul open Core_models let might_overflow (to_add: u32) : u32 = let _:Prims.unit = if to_add >. mk_u32 5 then let _:Prims.unit = Std.Io.Stdio.e_print (Core_models.Fmt.Rt.impl_1__new_const (mk_usize 1) (let list = ["this will probably overflow\n"] in FStar.Pervasives.assert_norm (Prims.eq2 (List.Tot.length list) 1); Rust_primitives.Hax.array_of_list 1 list) <: Core_models.Fmt.t_Arguments) in let _:Prims.unit = () in () in let add_to:u32 = Core_models.Num.impl_u32__MAX -! mk_u32 5 in let args:(u32 & u32) = add_to, to_add <: (u32 & u32) in let args:t_Array Core_models.Fmt.Rt.t_Argument (mk_usize 2) = let list = [ Core_models.Fmt.Rt.impl__new_display #u32 args._1; Core_models.Fmt.Rt.impl__new_display #u32 args._2 ] in FStar.Pervasives.assert_norm (Prims.eq2 (List.Tot.length list) 2); Rust_primitives.Hax.array_of_list 2 list in let _:Prims.unit = Std.Io.Stdio.e_print (Core_models.Fmt.Rt.impl_1__new_v1 (mk_usize 3) (mk_usize 2) (let list = ["does "; " + "; " overflow?\n"] in FStar.Pervasives.assert_norm (Prims.eq2 (List.Tot.length list) 3); Rust_primitives.Hax.array_of_list 3 list) args <: Core_models.Fmt.t_Arguments) in let _:Prims.unit = () in let result:u32 = to_add +! add_to in let _:Prims.unit = Std.Io.Stdio.e_print (Core_models.Fmt.Rt.impl_1__new_const (mk_usize 1) (let list = ["continuing after overflow check\n"] in FStar.Pervasives.assert_norm (Prims.eq2 (List.Tot.length list) 1); Rust_primitives.Hax.array_of_list 1 list) <: Core_models.Fmt.t_Arguments) in let _:Prims.unit = () in result let main (_: Prims.unit) : Core_models.Result.t_Result Prims.unit u8 = let countdown:i32 = mk_i32 10 in let countdown:i32 = Rust_primitives.Hax.while_loop (fun countdown -> let countdown:i32 = countdown in true) (fun countdown -> let countdown:i32 = countdown in countdown >. mk_i32 0 <: bool) (fun countdown -> let countdown:i32 = countdown in Rust_primitives.Hax.Int.from_machine (mk_u32 0) <: Hax_lib.Int.t_Int) countdown (fun countdown -> let countdown:i32 = countdown in let _:Prims.unit = if countdown =. mk_i32 1 then let result:u32 = might_overflow (mk_u32 10) in let args:u32 = result <: u32 in let args:t_Array Core_models.Fmt.Rt.t_Argument (mk_usize 1) = let list = [Core_models.Fmt.Rt.impl__new_display #u32 args] in FStar.Pervasives.assert_norm (Prims.eq2 (List.Tot.length list) 1); Rust_primitives.Hax.array_of_list 1 list in let _:Prims.unit = Std.Io.Stdio.e_print (Core_models.Fmt.Rt.impl_1__new_v1 (mk_usize 2) (mk_usize 1) (let list = ["Result: "; "\n"] in FStar.Pervasives.assert_norm (Prims.eq2 (List.Tot.length list) 2); Rust_primitives.Hax.array_of_list 2 list) args <: Core_models.Fmt.t_Arguments) in let _:Prims.unit = () in () else if countdown <. mk_i32 5 then let result:u32 = might_overflow (mk_u32 1) in let args:u32 = result <: u32 in let args:t_Array Core_models.Fmt.Rt.t_Argument (mk_usize 1) = let list = [Core_models.Fmt.Rt.impl__new_display #u32 args] in FStar.Pervasives.assert_norm (Prims.eq2 (List.Tot.length list) 1); Rust_primitives.Hax.array_of_list 1 list in let _:Prims.unit = Std.Io.Stdio.e_print (Core_models.Fmt.Rt.impl_1__new_v1 (mk_usize 2) (mk_usize 1) (let list = ["Result: "; "\n"] in FStar.Pervasives.assert_norm (Prims.eq2 (List.Tot.length list) 2); Rust_primitives.Hax.array_of_list 2 list) args <: Core_models.Fmt.t_Arguments) in let _:Prims.unit = () in () in let countdown:i32 = countdown -! mk_i32 1 in countdown) in Core_models.Result.Result_Ok (() <: Prims.unit) <: Core_models.Result.t_Result Prims.unit u8 ================================================ FILE: rustc-coverage-tests/snapshots/fstar/Coverage.Panic_unwind.fst ================================================ module Coverage.Panic_unwind #set-options "--fuel 0 --ifuel 1 --z3rlimit 15" open FStar.Mul open Core_models let might_panic (should_panic: bool) : Prims.unit = if should_panic then let _:Prims.unit = Std.Io.Stdio.e_print (Core_models.Fmt.Rt.impl_1__new_const (mk_usize 1) (let list = ["panicking...\n"] in FStar.Pervasives.assert_norm (Prims.eq2 (List.Tot.length list) 1); Rust_primitives.Hax.array_of_list 1 list) <: Core_models.Fmt.t_Arguments) in let _:Prims.unit = () in Rust_primitives.Hax.never_to_any (Core_models.Panicking.panic_fmt (Core_models.Fmt.Rt.impl_1__new_const (mk_usize 1) (let list = ["panics"] in FStar.Pervasives.assert_norm (Prims.eq2 (List.Tot.length list) 1); Rust_primitives.Hax.array_of_list 1 list) <: Core_models.Fmt.t_Arguments) <: Rust_primitives.Hax.t_Never) else let _:Prims.unit = Std.Io.Stdio.e_print (Core_models.Fmt.Rt.impl_1__new_const (mk_usize 1) (let list = ["Don't Panic\n"] in FStar.Pervasives.assert_norm (Prims.eq2 (List.Tot.length list) 1); Rust_primitives.Hax.array_of_list 1 list) <: Core_models.Fmt.t_Arguments) in let _:Prims.unit = () in () let main (_: Prims.unit) : Core_models.Result.t_Result Prims.unit u8 = let countdown:i32 = mk_i32 10 in let countdown:i32 = Rust_primitives.Hax.while_loop (fun countdown -> let countdown:i32 = countdown in true) (fun countdown -> let countdown:i32 = countdown in countdown >. mk_i32 0 <: bool) (fun countdown -> let countdown:i32 = countdown in Rust_primitives.Hax.Int.from_machine (mk_u32 0) <: Hax_lib.Int.t_Int) countdown (fun countdown -> let countdown:i32 = countdown in let _:Prims.unit = if countdown =. mk_i32 1 then let _:Prims.unit = might_panic true in () else if countdown <. mk_i32 5 then let _:Prims.unit = might_panic false in () in let countdown:i32 = countdown -! mk_i32 1 in countdown) in Core_models.Result.Result_Ok (() <: Prims.unit) <: Core_models.Result.t_Result Prims.unit u8 ================================================ FILE: rustc-coverage-tests/snapshots/fstar/Coverage.Partial_eq.fst ================================================ module Coverage.Partial_eq #set-options "--fuel 0 --ifuel 1 --z3rlimit 15" open FStar.Mul open Core_models type t_Version = { f_major:usize; f_minor:usize; f_patch:usize } let impl_1: Core_models.Clone.t_Clone t_Version = { f_clone = (fun x -> x); f_clone_pre = (fun _ -> True); f_clone_post = (fun _ _ -> True) } [@@ FStar.Tactics.Typeclasses.tcinstance] assume val impl_2': Core_models.Fmt.t_Debug t_Version unfold let impl_2 = impl_2' [@@ FStar.Tactics.Typeclasses.tcinstance] assume val impl_3': Core_models.Marker.t_StructuralPartialEq t_Version unfold let impl_3 = impl_3' [@@ FStar.Tactics.Typeclasses.tcinstance] assume val impl_4': Core_models.Cmp.t_PartialEq t_Version t_Version unfold let impl_4 = impl_4' [@@ FStar.Tactics.Typeclasses.tcinstance] assume val impl_5': Core_models.Cmp.t_Eq t_Version unfold let impl_5 = impl_5' [@@ FStar.Tactics.Typeclasses.tcinstance] assume val impl_6': Core_models.Cmp.t_PartialOrd t_Version t_Version unfold let impl_6 = impl_6' [@@ FStar.Tactics.Typeclasses.tcinstance] assume val impl_7': Core_models.Cmp.t_Ord t_Version unfold let impl_7 = impl_7' let impl_Version__new (major minor patch: usize) : t_Version = { f_major = major; f_minor = minor; f_patch = patch } <: t_Version let main (_: Prims.unit) : Prims.unit = let version_3_2_1_:t_Version = impl_Version__new (mk_usize 3) (mk_usize 2) (mk_usize 1) in let version_3_3_0_:t_Version = impl_Version__new (mk_usize 3) (mk_usize 3) (mk_usize 0) in let args:(t_Version & t_Version & bool) = version_3_2_1_, version_3_3_0_, Core_models.Cmp.f_lt #t_Version #t_Version #FStar.Tactics.Typeclasses.solve version_3_2_1_ version_3_3_0_ <: (t_Version & t_Version & bool) in let args:t_Array Core_models.Fmt.Rt.t_Argument (mk_usize 3) = let list = [ Core_models.Fmt.Rt.impl__new_debug #t_Version args._1; Core_models.Fmt.Rt.impl__new_debug #t_Version args._2; Core_models.Fmt.Rt.impl__new_display #bool args._3 ] in FStar.Pervasives.assert_norm (Prims.eq2 (List.Tot.length list) 3); Rust_primitives.Hax.array_of_list 3 list in let _:Prims.unit = Std.Io.Stdio.e_print (Core_models.Fmt.Rt.impl_1__new_v1 (mk_usize 4) (mk_usize 3) (let list = [""; " < "; " = "; "\n"] in FStar.Pervasives.assert_norm (Prims.eq2 (List.Tot.length list) 4); Rust_primitives.Hax.array_of_list 4 list) args <: Core_models.Fmt.t_Arguments) in let _:Prims.unit = () in () ================================================ FILE: rustc-coverage-tests/snapshots/fstar/Coverage.Simple_loop.fst ================================================ module Coverage.Simple_loop #set-options "--fuel 0 --ifuel 1 --z3rlimit 15" open FStar.Mul open Core_models let _ = (* This module has implicit dependencies, here we make them explicit. *) (* The implicit dependencies arise from typeclasses instances. *) let open Std.Env in () let main (_: Prims.unit) : (i32 & Prims.unit) = let is_true:bool = (Core_models.Iter.Traits.Exact_size.f_len #Std.Env.t_Args #FStar.Tactics.Typeclasses.solve (Std.Env.args () <: Std.Env.t_Args) <: usize) =. mk_usize 1 in let countdown:i32 = mk_i32 0 in let countdown:i32 = if is_true then let countdown:i32 = mk_i32 10 in countdown else countdown in Rust_primitives.Hax.failure "something is not implemented yet.\nUnhandled loop kind\n\nThis is discussed in issue https://github.com/hacspec/hax/issues/933.\nPlease upvote or comment this issue if you see this error message.\nNote: the error was labeled with context `FunctionalizeLoops`.\n" "{\n (loop {\n |countdown| {\n (if rust_primitives::hax::machine_int::eq(countdown, 0) {\n core_models::ops::control_flow::ControlFlow_Break(\n Tuple2(Tuple0, countdown),\n )\n } else {\n core_models::ops::con..." , () <: (i32 & Prims.unit) ================================================ FILE: rustc-coverage-tests/snapshots/fstar/Coverage.Simple_match.fst ================================================ module Coverage.Simple_match #set-options "--fuel 0 --ifuel 1 --z3rlimit 15" open FStar.Mul open Core_models let _ = (* This module has implicit dependencies, here we make them explicit. *) (* The implicit dependencies arise from typeclasses instances. *) let open Std.Env in () let main (_: Prims.unit) : (Prims.unit & Prims.unit) = let is_true:bool = (Core_models.Iter.Traits.Exact_size.f_len #Std.Env.t_Args #FStar.Tactics.Typeclasses.solve (Std.Env.args () <: Std.Env.t_Args) <: usize) =. mk_usize 1 in let countdown:i32 = mk_i32 1 in let countdown:i32 = if is_true then let countdown:i32 = mk_i32 0 in countdown else countdown in Rust_primitives.Hax.Folds.fold_range (mk_i32 0) (mk_i32 2) (fun temp_0_ temp_1_ -> let _:Prims.unit = temp_0_ in let _:i32 = temp_1_ in true) () (fun temp_0_ temp_1_ -> let _:Prims.unit = temp_0_ in let _:i32 = temp_1_ in Rust_primitives.Hax.failure "something is not implemented yet.\nSorry, Hax does not support declare-first let bindings (see https://doc.rust-lang.org/rust-by-example/variable_bindings/declare.html) for now.\n\nThis is discussed in issue https://github.com/hacspec/hax/issues/156.\nPlease upvote or comment this issue if you see this error message.\nNote: the error was labeled with context `AST import`.\n" "" <: Prims.unit), () <: (Prims.unit & Prims.unit) ================================================ FILE: rustc-coverage-tests/snapshots/fstar/Coverage.Sort_groups.fst ================================================ module Coverage.Sort_groups #set-options "--fuel 0 --ifuel 1 --z3rlimit 15" open FStar.Mul open Core_models let _ = (* This module has implicit dependencies, here we make them explicit. *) (* The implicit dependencies arise from typeclasses instances. *) let open Std.Env in () let generic_fn (#v_T: Type0) (cond: bool) : Prims.unit = if cond then let args:string = Core_models.Any.type_name #v_T () <: string in let args:t_Array Core_models.Fmt.Rt.t_Argument (mk_usize 1) = let list = [Core_models.Fmt.Rt.impl__new_display #string args] in FStar.Pervasives.assert_norm (Prims.eq2 (List.Tot.length list) 1); Rust_primitives.Hax.array_of_list 1 list in let _:Prims.unit = Std.Io.Stdio.e_print (Core_models.Fmt.Rt.impl_1__new_v1 (mk_usize 2) (mk_usize 1) (let list = [""; "\n"] in FStar.Pervasives.assert_norm (Prims.eq2 (List.Tot.length list) 2); Rust_primitives.Hax.array_of_list 2 list) args <: Core_models.Fmt.t_Arguments) in let _:Prims.unit = () in () let other_fn (_: Prims.unit) : Prims.unit = () let main (_: Prims.unit) : Prims.unit = let cond:bool = (Core_models.Iter.Traits.Exact_size.f_len #Std.Env.t_Args #FStar.Tactics.Typeclasses.solve (Std.Env.args () <: Std.Env.t_Args) <: usize) >. mk_usize 1 in let _:Prims.unit = generic_fn #Prims.unit cond in let _:Prims.unit = generic_fn #string (~.cond <: bool) in let _:Prims.unit = if Core_models.Hint.black_box #bool false then let _:Prims.unit = generic_fn #FStar.Char.char cond in () in let _:Prims.unit = generic_fn #i32 cond in let _:Prims.unit = other_fn () in () ================================================ FILE: rustc-coverage-tests/snapshots/fstar/Coverage.Test_harness.fst ================================================ module Coverage.Test_harness #set-options "--fuel 0 --ifuel 1 --z3rlimit 15" open FStar.Mul open Core_models let unused (_: Prims.unit) : Prims.unit = () ================================================ FILE: rustc-coverage-tests/snapshots/fstar/Coverage.Tight_inf_loop.fst ================================================ module Coverage.Tight_inf_loop #set-options "--fuel 0 --ifuel 1 --z3rlimit 15" open FStar.Mul open Core_models let main (_: Prims.unit) : Prims.unit = if false then Rust_primitives.Hax.never_to_any ((Rust_primitives.Hax.failure "something is not implemented yet.\nUnhandled loop kind\n\nThis is discussed in issue https://github.com/hacspec/hax/issues/933.\nPlease upvote or comment this issue if you see this error message.\nNote: the error was labeled with context `FunctionalizeLoops`.\n" "{\n loop {\n Tuple0\n }\n }" <: Prims.unit), () <: (Prims.unit & Prims.unit)) ================================================ FILE: rustc-coverage-tests/snapshots/fstar/Coverage.Trivial.fst ================================================ module Coverage.Trivial #set-options "--fuel 0 --ifuel 1 --z3rlimit 15" open FStar.Mul open Core_models let main (_: Prims.unit) : Prims.unit = () ================================================ FILE: rustc-coverage-tests/snapshots/fstar/Coverage.Try_error_result.fst ================================================ module Coverage.Try_error_result #set-options "--fuel 0 --ifuel 1 --z3rlimit 15" open FStar.Mul open Core_models let call (return_error: bool) : Core_models.Result.t_Result Prims.unit Prims.unit = if return_error then Core_models.Result.Result_Err (() <: Prims.unit) <: Core_models.Result.t_Result Prims.unit Prims.unit else Core_models.Result.Result_Ok (() <: Prims.unit) <: Core_models.Result.t_Result Prims.unit Prims.unit let test1 (_: Prims.unit) : Core_models.Result.t_Result Prims.unit Prims.unit = let countdown:i32 = mk_i32 10 in match Rust_primitives.Hax.Folds.fold_range_return (mk_i32 0) (mk_i32 10) (fun countdown temp_1_ -> let countdown:i32 = countdown in let _:i32 = temp_1_ in true) countdown (fun countdown temp_1_ -> let countdown:i32 = countdown in let _:i32 = temp_1_ in let countdown:i32 = countdown -! mk_i32 1 in if countdown <. mk_i32 5 then match call true <: Core_models.Result.t_Result Prims.unit Prims.unit with | Core_models.Result.Result_Ok _ -> (match call false <: Core_models.Result.t_Result Prims.unit Prims.unit with | Core_models.Result.Result_Ok _ -> Core_models.Ops.Control_flow.ControlFlow_Continue countdown <: Core_models.Ops.Control_flow.t_ControlFlow (Core_models.Ops.Control_flow.t_ControlFlow (Core_models.Result.t_Result Prims.unit Prims.unit) (Prims.unit & i32)) i32 | Core_models.Result.Result_Err err -> Core_models.Ops.Control_flow.ControlFlow_Break (Core_models.Ops.Control_flow.ControlFlow_Break (Core_models.Result.Result_Err err <: Core_models.Result.t_Result Prims.unit Prims.unit) <: Core_models.Ops.Control_flow.t_ControlFlow (Core_models.Result.t_Result Prims.unit Prims.unit) (Prims.unit & i32)) <: Core_models.Ops.Control_flow.t_ControlFlow (Core_models.Ops.Control_flow.t_ControlFlow (Core_models.Result.t_Result Prims.unit Prims.unit) (Prims.unit & i32)) i32) | Core_models.Result.Result_Err err -> Core_models.Ops.Control_flow.ControlFlow_Break (Core_models.Ops.Control_flow.ControlFlow_Break (Core_models.Result.Result_Err err <: Core_models.Result.t_Result Prims.unit Prims.unit) <: Core_models.Ops.Control_flow.t_ControlFlow (Core_models.Result.t_Result Prims.unit Prims.unit) (Prims.unit & i32)) <: Core_models.Ops.Control_flow.t_ControlFlow (Core_models.Ops.Control_flow.t_ControlFlow (Core_models.Result.t_Result Prims.unit Prims.unit) (Prims.unit & i32)) i32 else match call false <: Core_models.Result.t_Result Prims.unit Prims.unit with | Core_models.Result.Result_Ok _ -> Core_models.Ops.Control_flow.ControlFlow_Continue countdown <: Core_models.Ops.Control_flow.t_ControlFlow (Core_models.Ops.Control_flow.t_ControlFlow (Core_models.Result.t_Result Prims.unit Prims.unit) (Prims.unit & i32)) i32 | Core_models.Result.Result_Err err -> Core_models.Ops.Control_flow.ControlFlow_Break (Core_models.Ops.Control_flow.ControlFlow_Break (Core_models.Result.Result_Err err <: Core_models.Result.t_Result Prims.unit Prims.unit) <: Core_models.Ops.Control_flow.t_ControlFlow (Core_models.Result.t_Result Prims.unit Prims.unit) (Prims.unit & i32)) <: Core_models.Ops.Control_flow.t_ControlFlow (Core_models.Ops.Control_flow.t_ControlFlow (Core_models.Result.t_Result Prims.unit Prims.unit) (Prims.unit & i32)) i32) <: Core_models.Ops.Control_flow.t_ControlFlow (Core_models.Result.t_Result Prims.unit Prims.unit) i32 with | Core_models.Ops.Control_flow.ControlFlow_Break ret -> ret | Core_models.Ops.Control_flow.ControlFlow_Continue countdown -> Core_models.Result.Result_Ok (() <: Prims.unit) <: Core_models.Result.t_Result Prims.unit Prims.unit type t_Thing1 = | Thing1 : t_Thing1 type t_Thing2 = | Thing2 : t_Thing2 let impl_Thing1__get_thing_2_ (self: t_Thing1) (return_error: bool) : Core_models.Result.t_Result t_Thing2 Prims.unit = if return_error then Core_models.Result.Result_Err (() <: Prims.unit) <: Core_models.Result.t_Result t_Thing2 Prims.unit else Core_models.Result.Result_Ok (Thing2 <: t_Thing2) <: Core_models.Result.t_Result t_Thing2 Prims.unit let impl_Thing2__call (self: t_Thing2) (return_error: bool) : Core_models.Result.t_Result u32 Prims.unit = if return_error then Core_models.Result.Result_Err (() <: Prims.unit) <: Core_models.Result.t_Result u32 Prims.unit else Core_models.Result.Result_Ok (mk_u32 57) <: Core_models.Result.t_Result u32 Prims.unit let test2 (_: Prims.unit) : Core_models.Result.t_Result Prims.unit Prims.unit = let thing1:t_Thing1 = Thing1 <: t_Thing1 in let countdown:i32 = mk_i32 10 in match Rust_primitives.Hax.Folds.fold_range_return (mk_i32 0) (mk_i32 10) (fun countdown temp_1_ -> let countdown:i32 = countdown in let _:i32 = temp_1_ in true) countdown (fun countdown temp_1_ -> let countdown:i32 = countdown in let _:i32 = temp_1_ in let countdown:i32 = countdown -! mk_i32 1 in if countdown <. mk_i32 5 then match impl_Thing1__get_thing_2_ thing1 false <: Core_models.Result.t_Result t_Thing2 Prims.unit with | Core_models.Result.Result_Ok hoist1 -> let _:Prims.unit = Core_models.Result.impl__expect_err #u32 #Prims.unit (impl_Thing2__call hoist1 true <: Core_models.Result.t_Result u32 Prims.unit) "call should fail" in (match impl_Thing1__get_thing_2_ thing1 false <: Core_models.Result.t_Result t_Thing2 Prims.unit with | Core_models.Result.Result_Ok hoist3 -> let _:Prims.unit = Core_models.Result.impl__expect_err #u32 #Prims.unit (impl_Thing2__call hoist3 true <: Core_models.Result.t_Result u32 Prims.unit) "call should fail" in (match impl_Thing1__get_thing_2_ thing1 true <: Core_models.Result.t_Result t_Thing2 Prims.unit with | Core_models.Result.Result_Ok hoist5 -> (match impl_Thing2__call hoist5 true <: Core_models.Result.t_Result u32 Prims.unit with | Core_models.Result.Result_Ok v_val -> let _:Prims.unit = match v_val, mk_u32 57 <: (u32 & u32) with | left_val, right_val -> Hax_lib.v_assert (left_val =. right_val <: bool) in (match impl_Thing1__get_thing_2_ thing1 true <: Core_models.Result.t_Result t_Thing2 Prims.unit with | Core_models.Result.Result_Ok hoist7 -> (match impl_Thing2__call hoist7 false <: Core_models.Result.t_Result u32 Prims.unit with | Core_models.Result.Result_Ok v_val -> let _:Prims.unit = match v_val, mk_u32 57 <: (u32 & u32) with | left_val, right_val -> Hax_lib.v_assert (left_val =. right_val <: bool) in Core_models.Ops.Control_flow.ControlFlow_Continue countdown <: Core_models.Ops.Control_flow.t_ControlFlow (Core_models.Ops.Control_flow.t_ControlFlow (Core_models.Result.t_Result Prims.unit Prims.unit) (Prims.unit & i32)) i32 | Core_models.Result.Result_Err err -> Core_models.Ops.Control_flow.ControlFlow_Break (Core_models.Ops.Control_flow.ControlFlow_Break (Core_models.Result.Result_Err err <: Core_models.Result.t_Result Prims.unit Prims.unit) <: Core_models.Ops.Control_flow.t_ControlFlow (Core_models.Result.t_Result Prims.unit Prims.unit) (Prims.unit & i32)) <: Core_models.Ops.Control_flow.t_ControlFlow (Core_models.Ops.Control_flow.t_ControlFlow (Core_models.Result.t_Result Prims.unit Prims.unit) (Prims.unit & i32)) i32) | Core_models.Result.Result_Err err -> Core_models.Ops.Control_flow.ControlFlow_Break (Core_models.Ops.Control_flow.ControlFlow_Break (Core_models.Result.Result_Err err <: Core_models.Result.t_Result Prims.unit Prims.unit) <: Core_models.Ops.Control_flow.t_ControlFlow (Core_models.Result.t_Result Prims.unit Prims.unit) (Prims.unit & i32)) <: Core_models.Ops.Control_flow.t_ControlFlow (Core_models.Ops.Control_flow.t_ControlFlow (Core_models.Result.t_Result Prims.unit Prims.unit) (Prims.unit & i32)) i32) | Core_models.Result.Result_Err err -> Core_models.Ops.Control_flow.ControlFlow_Break (Core_models.Ops.Control_flow.ControlFlow_Break (Core_models.Result.Result_Err err <: Core_models.Result.t_Result Prims.unit Prims.unit) <: Core_models.Ops.Control_flow.t_ControlFlow (Core_models.Result.t_Result Prims.unit Prims.unit) (Prims.unit & i32) ) <: Core_models.Ops.Control_flow.t_ControlFlow (Core_models.Ops.Control_flow.t_ControlFlow (Core_models.Result.t_Result Prims.unit Prims.unit) (Prims.unit & i32)) i32) | Core_models.Result.Result_Err err -> Core_models.Ops.Control_flow.ControlFlow_Break (Core_models.Ops.Control_flow.ControlFlow_Break (Core_models.Result.Result_Err err <: Core_models.Result.t_Result Prims.unit Prims.unit) <: Core_models.Ops.Control_flow.t_ControlFlow (Core_models.Result.t_Result Prims.unit Prims.unit) (Prims.unit & i32)) <: Core_models.Ops.Control_flow.t_ControlFlow (Core_models.Ops.Control_flow.t_ControlFlow (Core_models.Result.t_Result Prims.unit Prims.unit) (Prims.unit & i32)) i32) | Core_models.Result.Result_Err err -> Core_models.Ops.Control_flow.ControlFlow_Break (Core_models.Ops.Control_flow.ControlFlow_Break (Core_models.Result.Result_Err err <: Core_models.Result.t_Result Prims.unit Prims.unit) <: Core_models.Ops.Control_flow.t_ControlFlow (Core_models.Result.t_Result Prims.unit Prims.unit) (Prims.unit & i32)) <: Core_models.Ops.Control_flow.t_ControlFlow (Core_models.Ops.Control_flow.t_ControlFlow (Core_models.Result.t_Result Prims.unit Prims.unit) (Prims.unit & i32)) i32) | Core_models.Result.Result_Err err -> Core_models.Ops.Control_flow.ControlFlow_Break (Core_models.Ops.Control_flow.ControlFlow_Break (Core_models.Result.Result_Err err <: Core_models.Result.t_Result Prims.unit Prims.unit) <: Core_models.Ops.Control_flow.t_ControlFlow (Core_models.Result.t_Result Prims.unit Prims.unit) (Prims.unit & i32)) <: Core_models.Ops.Control_flow.t_ControlFlow (Core_models.Ops.Control_flow.t_ControlFlow (Core_models.Result.t_Result Prims.unit Prims.unit) (Prims.unit & i32)) i32 else match impl_Thing1__get_thing_2_ thing1 false <: Core_models.Result.t_Result t_Thing2 Prims.unit with | Core_models.Result.Result_Ok hoist9 -> (match impl_Thing2__call hoist9 false <: Core_models.Result.t_Result u32 Prims.unit with | Core_models.Result.Result_Ok v_val -> let _:Prims.unit = match v_val, mk_u32 57 <: (u32 & u32) with | left_val, right_val -> Hax_lib.v_assert (left_val =. right_val <: bool) in (match impl_Thing1__get_thing_2_ thing1 false <: Core_models.Result.t_Result t_Thing2 Prims.unit with | Core_models.Result.Result_Ok hoist11 -> (match impl_Thing2__call hoist11 false <: Core_models.Result.t_Result u32 Prims.unit with | Core_models.Result.Result_Ok v_val -> let _:Prims.unit = match v_val, mk_u32 57 <: (u32 & u32) with | left_val, right_val -> Hax_lib.v_assert (left_val =. right_val <: bool) in (match impl_Thing1__get_thing_2_ thing1 false <: Core_models.Result.t_Result t_Thing2 Prims.unit with | Core_models.Result.Result_Ok hoist13 -> (match impl_Thing2__call hoist13 false <: Core_models.Result.t_Result u32 Prims.unit with | Core_models.Result.Result_Ok v_val -> let _:Prims.unit = match v_val, mk_u32 57 <: (u32 & u32) with | left_val, right_val -> Hax_lib.v_assert (left_val =. right_val <: bool) in Core_models.Ops.Control_flow.ControlFlow_Continue countdown <: Core_models.Ops.Control_flow.t_ControlFlow (Core_models.Ops.Control_flow.t_ControlFlow (Core_models.Result.t_Result Prims.unit Prims.unit) (Prims.unit & i32)) i32 | Core_models.Result.Result_Err err -> Core_models.Ops.Control_flow.ControlFlow_Break (Core_models.Ops.Control_flow.ControlFlow_Break (Core_models.Result.Result_Err err <: Core_models.Result.t_Result Prims.unit Prims.unit) <: Core_models.Ops.Control_flow.t_ControlFlow (Core_models.Result.t_Result Prims.unit Prims.unit) (Prims.unit & i32)) <: Core_models.Ops.Control_flow.t_ControlFlow (Core_models.Ops.Control_flow.t_ControlFlow (Core_models.Result.t_Result Prims.unit Prims.unit) (Prims.unit & i32)) i32) | Core_models.Result.Result_Err err -> Core_models.Ops.Control_flow.ControlFlow_Break (Core_models.Ops.Control_flow.ControlFlow_Break (Core_models.Result.Result_Err err <: Core_models.Result.t_Result Prims.unit Prims.unit) <: Core_models.Ops.Control_flow.t_ControlFlow (Core_models.Result.t_Result Prims.unit Prims.unit) (Prims.unit & i32)) <: Core_models.Ops.Control_flow.t_ControlFlow (Core_models.Ops.Control_flow.t_ControlFlow (Core_models.Result.t_Result Prims.unit Prims.unit) (Prims.unit & i32)) i32) | Core_models.Result.Result_Err err -> Core_models.Ops.Control_flow.ControlFlow_Break (Core_models.Ops.Control_flow.ControlFlow_Break (Core_models.Result.Result_Err err <: Core_models.Result.t_Result Prims.unit Prims.unit) <: Core_models.Ops.Control_flow.t_ControlFlow (Core_models.Result.t_Result Prims.unit Prims.unit) (Prims.unit & i32) ) <: Core_models.Ops.Control_flow.t_ControlFlow (Core_models.Ops.Control_flow.t_ControlFlow (Core_models.Result.t_Result Prims.unit Prims.unit) (Prims.unit & i32)) i32) | Core_models.Result.Result_Err err -> Core_models.Ops.Control_flow.ControlFlow_Break (Core_models.Ops.Control_flow.ControlFlow_Break (Core_models.Result.Result_Err err <: Core_models.Result.t_Result Prims.unit Prims.unit) <: Core_models.Ops.Control_flow.t_ControlFlow (Core_models.Result.t_Result Prims.unit Prims.unit) (Prims.unit & i32)) <: Core_models.Ops.Control_flow.t_ControlFlow (Core_models.Ops.Control_flow.t_ControlFlow (Core_models.Result.t_Result Prims.unit Prims.unit) (Prims.unit & i32)) i32) | Core_models.Result.Result_Err err -> Core_models.Ops.Control_flow.ControlFlow_Break (Core_models.Ops.Control_flow.ControlFlow_Break (Core_models.Result.Result_Err err <: Core_models.Result.t_Result Prims.unit Prims.unit) <: Core_models.Ops.Control_flow.t_ControlFlow (Core_models.Result.t_Result Prims.unit Prims.unit) (Prims.unit & i32)) <: Core_models.Ops.Control_flow.t_ControlFlow (Core_models.Ops.Control_flow.t_ControlFlow (Core_models.Result.t_Result Prims.unit Prims.unit) (Prims.unit & i32)) i32) | Core_models.Result.Result_Err err -> Core_models.Ops.Control_flow.ControlFlow_Break (Core_models.Ops.Control_flow.ControlFlow_Break (Core_models.Result.Result_Err err <: Core_models.Result.t_Result Prims.unit Prims.unit) <: Core_models.Ops.Control_flow.t_ControlFlow (Core_models.Result.t_Result Prims.unit Prims.unit) (Prims.unit & i32)) <: Core_models.Ops.Control_flow.t_ControlFlow (Core_models.Ops.Control_flow.t_ControlFlow (Core_models.Result.t_Result Prims.unit Prims.unit) (Prims.unit & i32)) i32) <: Core_models.Ops.Control_flow.t_ControlFlow (Core_models.Result.t_Result Prims.unit Prims.unit) i32 with | Core_models.Ops.Control_flow.ControlFlow_Break ret -> ret | Core_models.Ops.Control_flow.ControlFlow_Continue countdown -> Core_models.Result.Result_Ok (() <: Prims.unit) <: Core_models.Result.t_Result Prims.unit Prims.unit let main (_: Prims.unit) : Core_models.Result.t_Result Prims.unit Prims.unit = let _:Prims.unit = Core_models.Result.impl__expect_err #Prims.unit #Prims.unit (test1 () <: Core_models.Result.t_Result Prims.unit Prims.unit) "test1 should fail" in match test2 () <: Core_models.Result.t_Result Prims.unit Prims.unit with | Core_models.Result.Result_Ok _ -> Core_models.Result.Result_Ok (() <: Prims.unit) <: Core_models.Result.t_Result Prims.unit Prims.unit | Core_models.Result.Result_Err err -> Core_models.Result.Result_Err err <: Core_models.Result.t_Result Prims.unit Prims.unit ================================================ FILE: rustc-coverage-tests/snapshots/fstar/Coverage.Unicode.fst ================================================ module Coverage.Unicode #set-options "--fuel 0 --ifuel 1 --z3rlimit 15" open FStar.Mul open Core_models let v_申し訳ございません (_: Prims.unit) : bool = Core_models.Hint.black_box #bool false let v_サビ (_: Prims.unit) : Prims.unit = () let main (_: Prims.unit) : Prims.unit = let _:Prims.unit = Core_models.Iter.Traits.Iterator.f_fold (Core_models.Iter.Traits.Collect.f_into_iter #(Core_models.Ops.Range.t_RangeInclusive FStar.Char.char) #FStar.Tactics.Typeclasses.solve (Core_models.Ops.Range.impl_7__new #FStar.Char.char 'Ð' 'Ð' <: Core_models.Ops.Range.t_RangeInclusive FStar.Char.char) <: Core_models.Ops.Range.t_RangeInclusive FStar.Char.char) () (fun temp_0_ e_İ -> let _:Prims.unit = temp_0_ in let e_İ:FStar.Char.char = e_İ in ()) in let _:Prims.unit = if v_申し訳ございません () && v_申し訳ございません () then let _:Prims.unit = Std.Io.Stdio.e_print (Core_models.Fmt.Rt.impl_1__new_const (mk_usize 1) (let list = ["true\n"] in FStar.Pervasives.assert_norm (Prims.eq2 (List.Tot.length list) 1); Rust_primitives.Hax.array_of_list 1 list) <: Core_models.Fmt.t_Arguments) in let _:Prims.unit = () in () in let _:Prims.unit = v_サビ () in () let v_他 (_: Prims.unit) : Prims.unit = () ================================================ FILE: rustc-coverage-tests/snapshots/fstar/Coverage.Unused.fst ================================================ module Coverage.Unused #set-options "--fuel 0 --ifuel 1 --z3rlimit 15" open FStar.Mul open Core_models let foo (#v_T: Type0) (x: v_T) : (i32 & Prims.unit) = let i:i32 = mk_i32 0 in Rust_primitives.Hax.while_loop (fun i -> let i:i32 = i in true) (fun i -> let i:i32 = i in i <. mk_i32 10 <: bool) (fun i -> let i:i32 = i in Rust_primitives.Hax.Int.from_machine (mk_u32 0) <: Hax_lib.Int.t_Int) i (fun i -> let i:i32 = i in let _:bool = i <>. mk_i32 0 || i <>. mk_i32 0 in let i:i32 = i +! mk_i32 1 in i), () <: (i32 & Prims.unit) let unused_template_func (#v_T: Type0) (x: v_T) : (i32 & Prims.unit) = let i:i32 = mk_i32 0 in Rust_primitives.Hax.while_loop (fun i -> let i:i32 = i in true) (fun i -> let i:i32 = i in i <. mk_i32 10 <: bool) (fun i -> let i:i32 = i in Rust_primitives.Hax.Int.from_machine (mk_u32 0) <: Hax_lib.Int.t_Int) i (fun i -> let i:i32 = i in let _:bool = i <>. mk_i32 0 || i <>. mk_i32 0 in let i:i32 = i +! mk_i32 1 in i), () <: (i32 & Prims.unit) let unused_func (a: u32) : Prims.unit = if a <>. mk_u32 0 then let a:u32 = a +! mk_u32 1 in () let unused_func2 (a: u32) : Prims.unit = if a <>. mk_u32 0 then let a:u32 = a +! mk_u32 1 in () let unused_func3 (a: u32) : Prims.unit = if a <>. mk_u32 0 then let a:u32 = a +! mk_u32 1 in () let main (_: Prims.unit) : Core_models.Result.t_Result Prims.unit u8 = let _:Prims.unit = foo #u32 (mk_u32 0) in let _:Prims.unit = foo #float (mk_float "0.0") in Core_models.Result.Result_Ok (() <: Prims.unit) <: Core_models.Result.t_Result Prims.unit u8 ================================================ FILE: rustc-coverage-tests/snapshots/fstar/Coverage.Unused_mod.Unused_module.fst ================================================ module Coverage.Unused_mod.Unused_module #set-options "--fuel 0 --ifuel 1 --z3rlimit 15" open FStar.Mul open Core_models let never_called_function (_: Prims.unit) : Prims.unit = let _:Prims.unit = Std.Io.Stdio.e_print (Core_models.Fmt.Rt.impl_1__new_const (mk_usize 1) (let list = ["I am never called\n"] in FStar.Pervasives.assert_norm (Prims.eq2 (List.Tot.length list) 1); Rust_primitives.Hax.array_of_list 1 list) <: Core_models.Fmt.t_Arguments) in let _:Prims.unit = () in () ================================================ FILE: rustc-coverage-tests/snapshots/fstar/Coverage.Unused_mod.fst ================================================ module Coverage.Unused_mod #set-options "--fuel 0 --ifuel 1 --z3rlimit 15" open FStar.Mul open Core_models let main (_: Prims.unit) : Prims.unit = let _:Prims.unit = Std.Io.Stdio.e_print (Core_models.Fmt.Rt.impl_1__new_const (mk_usize 1) (let list = ["hello world!\n"] in FStar.Pervasives.assert_norm (Prims.eq2 (List.Tot.length list) 1); Rust_primitives.Hax.array_of_list 1 list) <: Core_models.Fmt.t_Arguments) in let _:Prims.unit = () in () ================================================ FILE: rustc-coverage-tests/snapshots/fstar/Coverage.While_.fst ================================================ module Coverage.While_ #set-options "--fuel 0 --ifuel 1 --z3rlimit 15" open FStar.Mul open Core_models let main (_: Prims.unit) : (Prims.unit & Prims.unit) = let num:i32 = mk_i32 9 in Rust_primitives.Hax.while_loop (fun temp_0_ -> let _:Prims.unit = temp_0_ in true) (fun temp_0_ -> let _:Prims.unit = temp_0_ in num >=. mk_i32 10 <: bool) (fun temp_0_ -> let _:Prims.unit = temp_0_ in Rust_primitives.Hax.Int.from_machine (mk_u32 0) <: Hax_lib.Int.t_Int) () (fun temp_0_ -> let _:Prims.unit = temp_0_ in ()), () <: (Prims.unit & Prims.unit) ================================================ FILE: rustc-coverage-tests/snapshots/fstar/Coverage.While_early_ret.fst ================================================ module Coverage.While_early_ret #set-options "--fuel 0 --ifuel 1 --z3rlimit 15" open FStar.Mul open Core_models let main (_: Prims.unit) : Core_models.Result.t_Result Prims.unit u8 = let countdown:i32 = mk_i32 10 in match Rust_primitives.Hax.while_loop_return (fun countdown -> let countdown:i32 = countdown in true) (fun countdown -> let countdown:i32 = countdown in countdown >. mk_i32 0 <: bool) (fun countdown -> let countdown:i32 = countdown in Rust_primitives.Hax.Int.from_machine (mk_u32 0) <: Hax_lib.Int.t_Int) countdown (fun countdown -> let countdown:i32 = countdown in if countdown <. mk_i32 5 <: bool then Core_models.Ops.Control_flow.ControlFlow_Break (Core_models.Ops.Control_flow.ControlFlow_Break (if countdown >. mk_i32 8 <: bool then Core_models.Result.Result_Ok (() <: Prims.unit) <: Core_models.Result.t_Result Prims.unit u8 else Core_models.Result.Result_Err (mk_u8 1) <: Core_models.Result.t_Result Prims.unit u8) <: Core_models.Ops.Control_flow.t_ControlFlow (Core_models.Result.t_Result Prims.unit u8) (Prims.unit & i32)) <: Core_models.Ops.Control_flow.t_ControlFlow (Core_models.Ops.Control_flow.t_ControlFlow (Core_models.Result.t_Result Prims.unit u8) (Prims.unit & i32)) i32 else Core_models.Ops.Control_flow.ControlFlow_Continue (countdown -! mk_i32 1 <: i32) <: Core_models.Ops.Control_flow.t_ControlFlow (Core_models.Ops.Control_flow.t_ControlFlow (Core_models.Result.t_Result Prims.unit u8) (Prims.unit & i32)) i32) <: Core_models.Ops.Control_flow.t_ControlFlow (Core_models.Result.t_Result Prims.unit u8) i32 with | Core_models.Ops.Control_flow.ControlFlow_Break ret -> ret | Core_models.Ops.Control_flow.ControlFlow_Continue countdown -> Core_models.Result.Result_Ok (() <: Prims.unit) <: Core_models.Result.t_Result Prims.unit u8 ================================================ FILE: rustc-coverage-tests/src/abort.rs ================================================ #![allow(unused_assignments)] extern "C" fn might_abort(should_abort: bool) { if should_abort { println!("aborting..."); panic!("panics and aborts"); } else { println!("Don't Panic"); } } #[rustfmt::skip] fn main() -> Result<(), u8> { let mut countdown = 10; while countdown > 0 { if countdown < 5 { might_abort(false); } // See discussion (below the `Notes` section) on coverage results for the closing brace. if countdown < 5 { might_abort(false); } // Counts for different regions on one line. // For the following example, the closing brace is the last character on the line. // This shows the character after the closing brace is highlighted, even if that next // character is a newline. if countdown < 5 { might_abort(false); } countdown -= 1; } Ok(()) } // Notes: // 1. Compare this program and its coverage results to those of the similar tests // `panic_unwind.rs` and `try_error_result.rs`. // 2. This test confirms the coverage generated when a program includes `UnwindAction::Terminate`. // 3. The test does not invoke the abort. By executing to a successful completion, the coverage // results show where the program did and did not execute. // 4. If the program actually aborted, the coverage counters would not be saved (which "works as // intended"). Coverage results would show no executed coverage regions. // 6. If `should_abort` is `true` and the program aborts, the program exits with a `132` status // (on Linux at least). /* Expect the following coverage results: ```text 16| 11| while countdown > 0 { 17| 10| if countdown < 5 { 18| 4| might_abort(false); 19| 6| } ``` This is actually correct. The condition `countdown < 5` executed 10 times (10 loop iterations). It evaluated to `true` 4 times, and executed the `might_abort()` call. It skipped the body of the `might_abort()` call 6 times. If an `if` does not include an explicit `else`, the coverage implementation injects a counter, at the character immediately after the `if`s closing brace, to count the "implicit" `else`. This is the only way to capture the coverage of the non-true condition. As another example of why this is important, say the condition was `countdown < 50`, which is always `true`. In that case, we wouldn't have a test for what happens if `might_abort()` is not called. The closing brace would have a count of `0`, highlighting the missed coverage. */ ================================================ FILE: rustc-coverage-tests/src/assert-ne.rs ================================================ //@ edition: 2021 use core::hint::black_box; #[derive(Debug, PartialEq)] struct Foo(u32); fn main() { assert_ne!( black_box(Foo(5)), // Make sure this expression's span isn't lost. if black_box(false) { Foo(0) // } else { Foo(1) // } ); () } // This test is a short fragment extracted from `issue-84561.rs`, highlighting // a particular span of code that can easily be lost if overlapping spans are // processed incorrectly. ================================================ FILE: rustc-coverage-tests/src/assert.rs ================================================ #![allow(unused_assignments)] //@ failure-status: 101 fn might_fail_assert(one_plus_one: u32) { println!("does 1 + 1 = {}?", one_plus_one); assert_eq!(1 + 1, one_plus_one, "the argument was wrong"); } fn main() -> Result<(), u8> { let mut countdown = 10; while countdown > 0 { if countdown == 1 { might_fail_assert(3); } else if countdown < 5 { might_fail_assert(2); } countdown -= 1; } Ok(()) } // Notes: // 1. Compare this program and its coverage results to those of the very similar test // `panic_unwind.rs`, and similar tests `abort.rs` and `try_error_result.rs`. // 2. This test confirms the coverage generated when a program passes or fails an `assert!()` or // related `assert_*!()` macro. // 3. Notably, the `assert` macros *do not* generate `TerminatorKind::Assert`. The macros produce // conditional expressions, `TerminatorKind::SwitchInt` branches, and a possible call to // `begin_panic_fmt()` (that begins a panic unwind, if the assertion test fails). // 4. `TerminatoKind::Assert` is, however, also present in the MIR generated for this test // (and in many other coverage tests). The `Assert` terminator is typically generated by the // Rust compiler to check for runtime failures, such as numeric overflows. ================================================ FILE: rustc-coverage-tests/src/assert_not.rs ================================================ //@ edition: 2021 // Regression test for . // `assert!(true)` and `assert!(!false)` should have similar coverage spans. fn main() { assert!(true); assert!(!false); assert!(!!true); assert!(!!!false); } ================================================ FILE: rustc-coverage-tests/src/async.rs ================================================ #![feature(coverage_attribute)] #![feature(custom_inner_attributes)] // for #![rustfmt::skip] #![allow(unused_assignments, dead_code)] #![rustfmt::skip] //@ edition: 2018 //@ compile-flags: -Copt-level=1 //@ aux-build: executor.rs extern crate executor; async fn c(x: u8) -> u8 { if x == 8 { 1 } else { 0 } } async fn d() -> u8 { 1 } async fn e() -> u8 { 1 } // unused function; executor does not block on `g()` async fn f() -> u8 { 1 } async fn foo() -> [bool; 10] { [false; 10] } // unused function; executor does not block on `h()` pub async fn g(x: u8) { match x { y if e().await == y => (), y if f().await == y => (), _ => (), } } async fn h(x: usize) { // The function signature is counted when called, but the body is not // executed (not awaited) so the open brace has a `0` count (at least when // displayed with `llvm-cov show` in color-mode). match x { y if foo().await[y] => (), _ => (), } } async fn i(x: u8) { // line coverage is 1, but there are 2 regions: // (a) the function signature, counted when the function is called; and // (b) the open brace for the function body, counted once when the body is // executed asynchronously. match x { y if c(x).await == y + 1 => { d().await; } y if f().await == y + 1 => (), _ => (), } } fn j(x: u8) { // non-async versions of `c()`, `d()`, and `f()` to make it similar to async `i()`. fn c(x: u8) -> u8 { if x == 8 { 1 } else { 0 } } fn d() -> u8 { 1 } // inner function is defined in-line, but the function is not executed fn f() -> u8 { 1 } match x { y if c(x) == y + 1 => { d(); } y if f() == y + 1 => (), _ => (), } } fn k(x: u8) { // unused function match x { 1 => (), 2 => (), _ => (), } } fn l(x: u8) { match x { 1 => (), 2 => (), _ => (), } } async fn m(x: u8) -> u8 { x - 1 } fn main() { let _ = g(10); let _ = h(9); let mut future = Box::pin(i(8)); j(7); l(6); let _ = m(5); executor::block_on(future.as_mut()); } ================================================ FILE: rustc-coverage-tests/src/async2.rs ================================================ #![feature(coverage_attribute)] //@ edition: 2018 //@ aux-build: executor.rs extern crate executor; fn non_async_func() { println!("non_async_func was covered"); let b = true; if b { println!("non_async_func println in block"); } } async fn async_func() { println!("async_func was covered"); let b = true; if b { println!("async_func println in block"); } } async fn async_func_just_println() { println!("async_func_just_println was covered"); } fn main() { println!("codecovsample::main"); non_async_func(); executor::block_on(async_func()); executor::block_on(async_func_just_println()); } ================================================ FILE: rustc-coverage-tests/src/async_block.rs ================================================ #![feature(coverage_attribute)] //@ edition: 2021 //@ aux-build: executor.rs extern crate executor; fn main() { for i in 0..16 { let future = async { if i >= 12 { println!("big"); } else { println!("small"); } }; executor::block_on(future); } } ================================================ FILE: rustc-coverage-tests/src/async_closure.rs ================================================ //@ edition: 2021 //@ aux-build: executor.rs extern crate executor; async fn call_once(f: impl AsyncFnOnce()) { f().await; } pub fn main() { let async_closure = async || {}; executor::block_on(async_closure()); executor::block_on(call_once(async_closure)); } ================================================ FILE: rustc-coverage-tests/src/attr/impl.rs ================================================ #![feature(coverage_attribute)] //@ edition: 2021 //@ reference: attributes.coverage.nesting // Checks that `#[coverage(..)]` can be applied to impl and impl-trait blocks, // and is inherited by any enclosed functions. struct MyStruct; #[coverage(off)] impl MyStruct { fn off_inherit() {} #[coverage(on)] fn off_on() {} #[coverage(off)] fn off_off() {} } #[coverage(on)] impl MyStruct { fn on_inherit() {} #[coverage(on)] fn on_on() {} #[coverage(off)] fn on_off() {} } trait MyTrait { fn method(); } #[coverage(off)] impl MyTrait for MyStruct { fn method() {} } #[coverage(off)] fn main() {} ================================================ FILE: rustc-coverage-tests/src/attr/mod.rs ================================================ #[path = "impl.rs"] mod impl_; mod module; // mod nested; #[path = "off-on-sandwich.rs"] mod off_on_sandwich; #[path = "trait-impl-inherit.rs"] #[cfg(any(feature = "json"))] mod trait_impl_inherit; ================================================ FILE: rustc-coverage-tests/src/attr/module.rs ================================================ #![feature(coverage_attribute)] //@ edition: 2021 //@ reference: attributes.coverage.nesting // Checks that `#[coverage(..)]` can be applied to modules, and is inherited // by any enclosed functions. #[coverage(off)] mod off { fn inherit() {} #[coverage(on)] fn on() {} #[coverage(off)] fn off() {} } #[coverage(on)] mod on { fn inherit() {} #[coverage(on)] fn on() {} #[coverage(off)] fn off() {} } #[coverage(off)] mod nested_a { mod nested_b { fn inner() {} } } #[coverage(off)] fn main() {} ================================================ FILE: rustc-coverage-tests/src/attr/nested.rs ================================================ #![feature(coverage_attribute, stmt_expr_attributes)] //@ edition: 2021 //@ reference: attributes.coverage.nesting // Demonstrates the interaction between #[coverage(off)] and various kinds of // nested function. #[coverage(off)] fn do_stuff() {} #[coverage(off)] fn outer_fn() { fn middle_fn() { fn inner_fn() { do_stuff(); } do_stuff(); } do_stuff(); } struct MyOuter; impl MyOuter { #[coverage(off)] fn outer_method(&self) { struct MyMiddle; impl MyMiddle { fn middle_method(&self) { struct MyInner; impl MyInner { fn inner_method(&self) { do_stuff(); } } do_stuff(); } } do_stuff(); } } trait MyTrait { fn trait_method(&self); } impl MyTrait for MyOuter { #[coverage(off)] fn trait_method(&self) { struct MyMiddle; impl MyTrait for MyMiddle { fn trait_method(&self) { struct MyInner; impl MyTrait for MyInner { fn trait_method(&self) { do_stuff(); } } do_stuff(); } } do_stuff(); } } fn closure_expr() { let _outer = #[coverage(off)] || { let _middle = || { let _inner = || { do_stuff(); }; do_stuff(); }; do_stuff(); }; do_stuff(); } // This syntax is allowed, even without #![feature(stmt_expr_attributes)]. fn closure_tail() { let _outer = { #[coverage(off)] || { let _middle = { || { let _inner = { || { do_stuff(); } }; do_stuff(); } }; do_stuff(); } }; do_stuff(); } #[coverage(off)] fn main() { outer_fn(); MyOuter.outer_method(); MyOuter.trait_method(); closure_expr(); closure_tail(); } ================================================ FILE: rustc-coverage-tests/src/attr/off-on-sandwich.rs ================================================ #![feature(coverage_attribute)] //@ edition: 2021 //@ reference: attributes.coverage.nesting // Demonstrates the interaction of `#[coverage(off)]` and `#[coverage(on)]` // in nested functions. #[coverage(off)] fn do_stuff() {} #[coverage(off)] fn dense_a() { dense_b(); dense_b(); #[coverage(on)] fn dense_b() { dense_c(); dense_c(); #[coverage(off)] fn dense_c() { do_stuff(); } } } #[coverage(off)] fn sparse_a() { sparse_b(); sparse_b(); fn sparse_b() { sparse_c(); sparse_c(); #[coverage(on)] fn sparse_c() { sparse_d(); sparse_d(); fn sparse_d() { sparse_e(); sparse_e(); #[coverage(off)] fn sparse_e() { do_stuff(); } } } } } #[coverage(off)] fn main() { dense_a(); sparse_a(); } ================================================ FILE: rustc-coverage-tests/src/attr/trait-impl-inherit.rs ================================================ #![feature(coverage_attribute)] // Checks that `#[coverage(..)]` in a trait method is not inherited in an // implementation. //@ edition: 2021 //@ reference: attributes.coverage.trait-impl-inherit trait T { #[coverage(off)] fn f(&self) { println!("default"); } } struct S; impl T for S { fn f(&self) { println!("impl S"); } } #[coverage(off)] fn main() { S.f(); } ================================================ FILE: rustc-coverage-tests/src/auxiliary/discard_all_helper.rs ================================================ //@ edition: 2021 // Force this function to be generated in its home crate, so that it ends up // with normal coverage metadata. #[inline(never)] pub fn external_function() {} ================================================ FILE: rustc-coverage-tests/src/auxiliary/executor.rs ================================================ #![feature(coverage_attribute)] //@ edition: 2021 use core::future::Future; use core::pin::pin; use core::task::{Context, Poll, Waker}; /// Dummy "executor" that just repeatedly polls a future until it's ready. #[coverage(off)] pub fn block_on(mut future: F) -> F::Output { let mut future = pin!(future); let mut context = Context::from_waker(Waker::noop()); loop { if let Poll::Ready(val) = future.as_mut().poll(&mut context) { break val; } } } ================================================ FILE: rustc-coverage-tests/src/auxiliary/inline_always_with_dead_code.rs ================================================ //@ compile-flags: -Cinstrument-coverage -Ccodegen-units=4 -Copt-level=0 #![allow(dead_code)] mod foo { #[inline(always)] pub fn called() {} fn uncalled() {} } pub mod bar { pub fn call_me() { super::foo::called(); } } pub mod baz { pub fn call_me() { super::foo::called(); } } ================================================ FILE: rustc-coverage-tests/src/auxiliary/inline_mixed_helper.rs ================================================ //@ edition: 2021 //@ compile-flags: -Cinstrument-coverage=on #[inline] pub fn inline_me() {} #[inline(never)] pub fn no_inlining_please() {} pub fn generic() {} // FIXME(#132436): Even though this doesn't ICE, it still produces coverage // reports that undercount the affected code. ================================================ FILE: rustc-coverage-tests/src/auxiliary/macro_name_span_helper.rs ================================================ //@ edition: 2021 #[macro_export] macro_rules! macro_that_defines_a_function { (fn $name:ident () $body:tt) => { fn $name () -> () $body } } // Non-executable comment. ================================================ FILE: rustc-coverage-tests/src/auxiliary/mod.rs ================================================ mod discard_all_helper; // mod executor; // mod inline_always_with_dead_code; // mod inline_mixed_helper; // mod macro_name_span_helper; // mod unused_mod_helper; #[cfg(any(feature = "json", feature = "fstar", feature = "coq"))] mod used_crate; #[cfg(any(feature = "json", feature = "fstar", feature = "coq"))] mod used_inline_crate; ================================================ FILE: rustc-coverage-tests/src/auxiliary/unused_mod_helper.rs ================================================ #[allow(dead_code)] pub fn never_called_function() { println!("I am never called"); } ================================================ FILE: rustc-coverage-tests/src/auxiliary/used_crate.rs ================================================ #![allow(unused_assignments, unused_variables)] // Verify that coverage works with optimizations: //@ compile-flags: -C opt-level=3 use std::fmt::Debug; pub fn used_function() { // Initialize test constants in a way that cannot be determined at compile time, to ensure // rustc and LLVM cannot optimize out statements (or coverage counters) downstream from // dependent conditions. let is_true = std::env::args().len() == 1; let mut countdown = 0; if is_true { countdown = 10; } use_this_lib_crate(); } pub fn used_only_from_bin_crate_generic_function(arg: T) { println!("used_only_from_bin_crate_generic_function with {arg:?}"); } // Expect for above function: `Unexecuted instantiation` (see below) pub fn used_only_from_this_lib_crate_generic_function(arg: T) { println!("used_only_from_this_lib_crate_generic_function with {arg:?}"); } pub fn used_from_bin_crate_and_lib_crate_generic_function(arg: T) { println!("used_from_bin_crate_and_lib_crate_generic_function with {arg:?}"); } pub fn used_with_same_type_from_bin_crate_and_lib_crate_generic_function(arg: T) { println!("used_with_same_type_from_bin_crate_and_lib_crate_generic_function with {arg:?}"); } pub fn unused_generic_function(arg: T) { println!("unused_generic_function with {arg:?}"); } pub fn unused_function() { let is_true = std::env::args().len() == 1; let mut countdown = 2; if !is_true { countdown = 20; } } #[allow(dead_code)] fn unused_private_function() { let is_true = std::env::args().len() == 1; let mut countdown = 2; if !is_true { countdown = 20; } } fn use_this_lib_crate() { used_from_bin_crate_and_lib_crate_generic_function("used from library used_crate.rs"); used_with_same_type_from_bin_crate_and_lib_crate_generic_function( "used from library used_crate.rs", ); let some_vec = vec![5, 6, 7, 8]; used_only_from_this_lib_crate_generic_function(some_vec); used_only_from_this_lib_crate_generic_function("used ONLY from library used_crate.rs"); } // FIXME(#79651): "Unexecuted instantiation" errors appear in coverage results, // for example: // // | Unexecuted instantiation: used_crate::used_only_from_bin_crate_generic_function::<_> // // These notices appear when `llvm-cov` shows instantiations. This may be a // default option, but it can be suppressed with: // // ```shell // $ `llvm-cov show --show-instantiations=0 ...` // ``` // // The notice is triggered because the function is unused by the library itself, // so when the library is compiled, an "unused" set of mappings for that function // is included in the library's coverage metadata. // // Even though this function is used by `uses_crate.rs` (and // counted), with substitutions for `T`, those instantiations are only generated // when the generic function is actually used (from the binary, not from this // library crate). So the test result shows coverage for all instantiated // versions and their generic type substitutions, plus the `Unexecuted // instantiation` message for the non-substituted version. This is valid, but // unfortunately a little confusing. // // The library crate has its own coverage map, and the only way to show unused // coverage of a generic function is to include the generic function in the // coverage map, marked as an "unused function". If the library were used by // another binary that never used this generic function, then it would be valid // to show the unused generic, with unknown substitution (`_`). // // The alternative would be to exclude all generics from being included in the // "unused functions" list, which would then omit coverage results for // `unused_generic_function()`. ================================================ FILE: rustc-coverage-tests/src/auxiliary/used_inline_crate.rs ================================================ #![allow(unused_assignments, unused_variables)] // Verify that coverage works with optimizations: //@ compile-flags: -C opt-level=3 use std::fmt::Debug; pub fn used_function() { // Initialize test constants in a way that cannot be determined at compile time, to ensure // rustc and LLVM cannot optimize out statements (or coverage counters) downstream from // dependent conditions. let is_true = std::env::args().len() == 1; let mut countdown = 0; if is_true { countdown = 10; } use_this_lib_crate(); } #[inline(always)] pub fn used_inline_function() { // Initialize test constants in a way that cannot be determined at compile time, to ensure // rustc and LLVM cannot optimize out statements (or coverage counters) downstream from // dependent conditions. let is_true = std::env::args().len() == 1; let mut countdown = 0; if is_true { countdown = 10; } use_this_lib_crate(); } #[inline(always)] pub fn used_only_from_bin_crate_generic_function(arg: T) { println!("used_only_from_bin_crate_generic_function with {arg:?}"); } // Expect for above function: `Unexecuted instantiation` (see notes in `used_crate.rs`) #[inline(always)] pub fn used_only_from_this_lib_crate_generic_function(arg: T) { println!("used_only_from_this_lib_crate_generic_function with {arg:?}"); } #[inline(always)] pub fn used_from_bin_crate_and_lib_crate_generic_function(arg: T) { println!("used_from_bin_crate_and_lib_crate_generic_function with {arg:?}"); } #[inline(always)] pub fn used_with_same_type_from_bin_crate_and_lib_crate_generic_function(arg: T) { println!("used_with_same_type_from_bin_crate_and_lib_crate_generic_function with {arg:?}"); } #[inline(always)] pub fn unused_generic_function(arg: T) { println!("unused_generic_function with {arg:?}"); } #[inline(always)] pub fn unused_function() { let is_true = std::env::args().len() == 1; let mut countdown = 2; if !is_true { countdown = 20; } } #[inline(always)] #[allow(dead_code)] fn unused_private_function() { let is_true = std::env::args().len() == 1; let mut countdown = 2; if !is_true { countdown = 20; } } fn use_this_lib_crate() { used_from_bin_crate_and_lib_crate_generic_function("used from library used_crate.rs"); used_with_same_type_from_bin_crate_and_lib_crate_generic_function( "used from library used_crate.rs", ); let some_vec = vec![5, 6, 7, 8]; used_only_from_this_lib_crate_generic_function(some_vec); used_only_from_this_lib_crate_generic_function("used ONLY from library used_crate.rs"); } ================================================ FILE: rustc-coverage-tests/src/await_ready.rs ================================================ #![feature(coverage_attribute)] #![coverage(off)] //@ edition: 2021 //@ aux-build: executor.rs extern crate executor; async fn ready() -> u8 { 1 } #[coverage(on)] #[rustfmt::skip] async fn await_ready() -> u8 { // await should be covered even if the function never yields ready() .await } fn main() { let mut future = Box::pin(await_ready()); executor::block_on(future.as_mut()); } ================================================ FILE: rustc-coverage-tests/src/bad_counter_ids.rs ================================================ #![feature(coverage_attribute)] //@ edition: 2021 //@ compile-flags: -Copt-level=0 -Zmir-opt-level=3 // Regression test for . // // If some coverage counters were removed by MIR optimizations, we need to take // care not to refer to those counter IDs in coverage mappings, and instead // replace them with a constant zero value. If we don't, `llvm-cov` might see // a too-large counter ID and silently discard the entire function from its // coverage reports. #[derive(Debug, PartialEq, Eq)] struct Foo(u32); fn eq_good() { println!("a"); assert_eq!(Foo(1), Foo(1)); } fn eq_good_message() { println!("b"); assert_eq!(Foo(1), Foo(1), "message b"); } fn ne_good() { println!("c"); assert_ne!(Foo(1), Foo(3)); } fn ne_good_message() { println!("d"); assert_ne!(Foo(1), Foo(3), "message d"); } fn eq_bad() { println!("e"); assert_eq!(Foo(1), Foo(3)); } fn eq_bad_message() { println!("f"); assert_eq!(Foo(1), Foo(3), "message f"); } fn ne_bad() { println!("g"); assert_ne!(Foo(1), Foo(1)); } fn ne_bad_message() { println!("h"); assert_ne!(Foo(1), Foo(1), "message h"); } #[coverage(off)] fn main() { eq_good(); eq_good_message(); ne_good(); ne_good_message(); assert!(std::panic::catch_unwind(eq_bad).is_err()); assert!(std::panic::catch_unwind(eq_bad_message).is_err()); assert!(std::panic::catch_unwind(ne_bad).is_err()); assert!(std::panic::catch_unwind(ne_bad_message).is_err()); } ================================================ FILE: rustc-coverage-tests/src/bench.rs ================================================ #![feature(test)] //@ edition: 2021 //@ compile-flags: --test extern crate test; #[bench] fn my_bench(_b: &mut test::Bencher) {} ================================================ FILE: rustc-coverage-tests/src/branch/generics.rs ================================================ #![feature(coverage_attribute)] //@ edition: 2021 //@ compile-flags: -Zcoverage-options=branch //@ llvm-cov-flags: --show-branches=count fn print_size() { if std::mem::size_of::() > 4 { println!("size > 4"); } else { println!("size <= 4"); } } #[coverage(off)] fn main() { print_size::<()>(); print_size::(); print_size::(); } ================================================ FILE: rustc-coverage-tests/src/branch/guard.rs ================================================ #![feature(coverage_attribute)] //@ edition: 2021 //@ compile-flags: -Zcoverage-options=branch //@ llvm-cov-flags: --show-branches=count macro_rules! no_merge { () => { for _ in 0..1 {} }; } fn branch_match_guard(x: Option) { no_merge!(); match x { Some(0) => { println!("zero"); } Some(x) if x % 2 == 0 => { println!("is nonzero and even"); } Some(x) if x % 3 == 0 => { println!("is nonzero and odd, but divisible by 3"); } _ => { println!("something else"); } } } #[coverage(off)] fn main() { branch_match_guard(Some(0)); branch_match_guard(Some(2)); branch_match_guard(Some(6)); branch_match_guard(Some(3)); } ================================================ FILE: rustc-coverage-tests/src/branch/if-let.rs ================================================ #![feature(coverage_attribute, let_chains)] //@ edition: 2021 //@ compile-flags: -Zcoverage-options=branch //@ llvm-cov-flags: --show-branches=count macro_rules! no_merge { () => { for _ in 0..1 {} }; } fn if_let(input: Option<&str>) { no_merge!(); if let Some(x) = input { say(x); } else { say("none"); } say("done"); } fn if_let_chain(a: Option<&str>, b: Option<&str>) { if let Some(x) = a && let Some(y) = b { say(x); say(y); } else { say("not both"); } say("done"); } #[coverage(off)] fn say(message: &str) { core::hint::black_box(message); } #[coverage(off)] fn main() { if_let(Some("x")); if_let(Some("x")); if_let(None); for _ in 0..8 { if_let_chain(Some("a"), Some("b")); } for _ in 0..4 { if_let_chain(Some("a"), None); } for _ in 0..2 { if_let_chain(None, Some("b")); } if_let_chain(None, None); } // FIXME(#124118) Actually instrument if-let and let-chains for branch coverage. ================================================ FILE: rustc-coverage-tests/src/branch/if.rs ================================================ #![feature(coverage_attribute)] //@ edition: 2021 //@ compile-flags: -Zcoverage-options=branch //@ llvm-cov-flags: --show-branches=count macro_rules! no_merge { () => { for _ in 0..1 {} }; } fn branch_not(a: bool) { no_merge!(); if a { say("a") } if !a { say("not a"); } if !!a { say("not not a"); } if !!!a { say("not not not a"); } } fn branch_not_as(a: bool) { no_merge!(); if !(a as bool) { say("not (a as bool)"); } if !!(a as bool) { say("not not (a as bool)"); } if !!!(a as bool) { say("not not (a as bool)"); } } fn branch_and(a: bool, b: bool) { no_merge!(); if a && b { say("both"); } else { say("not both"); } } fn branch_or(a: bool, b: bool) { no_merge!(); if a || b { say("either"); } else { say("neither"); } } #[coverage(off)] fn say(message: &str) { core::hint::black_box(message); } #[coverage(off)] fn main() { for a in [false, true, true] { branch_not(a); branch_not_as(a); } for a in [false, true, true, true, true] { for b in [false, true, true] { branch_and(a, b); branch_or(a, b); } } } ================================================ FILE: rustc-coverage-tests/src/branch/lazy-boolean.rs ================================================ #![feature(coverage_attribute)] //@ edition: 2021 //@ compile-flags: -Zcoverage-options=branch //@ llvm-cov-flags: --show-branches=count // Tests for branch coverage of the lazy boolean operators `&&` and `||`, // as ordinary expressions that aren't part of an `if` condition or similar. use core::hint::black_box; // Helper macro to prevent start-of-function spans from being merged into // spans on the lines we care about. macro_rules! no_merge { () => { for _ in 0..1 {} }; } fn branch_and(a: bool, b: bool) { no_merge!(); // |13 |18 (no branch) let c = a && b; black_box(c); } fn branch_or(a: bool, b: bool) { no_merge!(); // |13 |18 (no branch) let c = a || b; black_box(c); } // Test for chaining one operator several times. fn chain(x: u32) { no_merge!(); // |13 |22 |31 |40 (no branch) let c = x > 1 && x > 2 && x > 4 && x > 8; black_box(c); // |13 |22 |31 |40 (no branch) let d = x < 1 || x < 2 || x < 4 || x < 8; black_box(d); } // Test for nested combinations of different operators. fn nested_mixed(x: u32) { no_merge!(); // |14 |23 |35 |44 (no branch) let c = (x < 4 || x >= 9) && (x < 2 || x >= 10); black_box(c); // |14 |23 |34 |44 (no branch) let d = (x < 4 && x < 1) || (x >= 8 && x >= 10); black_box(d); } #[coverage(off)] fn main() { // Use each set of arguments (2^n) times, so that each combination has a // unique sum, and we can use those sums to verify expected control flow. // 1x (false, false) // 2x (false, true) // 4x (true, false) // 8x (true, true) for a in [false, true, true, true, true] { for b in [false, true, true] { branch_and(a, b); branch_or(a, b); } } for x in 0..16 { chain(x); nested_mixed(x); } } ================================================ FILE: rustc-coverage-tests/src/branch/let-else.rs ================================================ #![feature(coverage_attribute)] //@ edition: 2021 //@ compile-flags: -Zcoverage-options=branch //@ llvm-cov-flags: --show-branches=count macro_rules! no_merge { () => { for _ in 0..1 {} }; } fn let_else(value: Option<&str>) { no_merge!(); let Some(x) = value else { say("none"); return; }; say(x); } #[coverage(off)] fn say(message: &str) { core::hint::black_box(message); } #[coverage(off)] fn main() { let_else(Some("x")); let_else(Some("x")); let_else(None); } // FIXME(#124118) Actually instrument let-else for branch coverage. ================================================ FILE: rustc-coverage-tests/src/branch/match-arms.rs ================================================ #![feature(coverage_attribute)] //@ edition: 2021 //@ compile-flags: -Zcoverage-options=branch //@ llvm-cov-flags: --show-branches=count // Tests for branch coverage of various kinds of match arms. // Helper macro to prevent start-of-function spans from being merged into // spans on the lines we care about. macro_rules! no_merge { () => { for _ in 0..1 {} }; } #[derive(Clone, Copy, Debug)] enum Enum { A(u32), B(u32), C(u32), D(u32), } fn match_arms(value: Enum) { no_merge!(); match value { Enum::D(d) => consume(d), Enum::C(c) => consume(c), Enum::B(b) => consume(b), Enum::A(a) => consume(a), } consume(0); } fn or_patterns(value: Enum) { no_merge!(); match value { Enum::D(x) | Enum::C(x) => consume(x), Enum::B(y) | Enum::A(y) => consume(y), } consume(0); } fn guards(value: Enum, cond: bool) { no_merge!(); match value { Enum::D(d) if cond => consume(d), Enum::C(c) if cond => consume(c), Enum::B(b) if cond => consume(b), Enum::A(a) if cond => consume(a), _ => consume(0), } consume(0); } #[coverage(off)] fn consume(x: T) { core::hint::black_box(x); } #[coverage(off)] fn main() { #[coverage(off)] fn call_everything(e: Enum) { match_arms(e); or_patterns(e); for cond in [false, false, true] { guards(e, cond); } } call_everything(Enum::A(0)); for b in 0..2 { call_everything(Enum::B(b)); } for c in 0..4 { call_everything(Enum::C(c)); } for d in 0..8 { call_everything(Enum::D(d)); } } // FIXME(#124118) Actually instrument match arms for branch coverage. ================================================ FILE: rustc-coverage-tests/src/branch/match-trivial.rs ================================================ #![feature(coverage_attribute)] //@ edition: 2021 //@ compile-flags: -Zcoverage-options=branch //@ llvm-cov-flags: --show-branches=count // When instrumenting match expressions for branch coverage, make sure we don't // cause an ICE or produce weird coverage output for matches with <2 arms. // Helper macro to prevent start-of-function spans from being merged into // spans on the lines we care about. macro_rules! no_merge { () => { for _ in 0..1 {} }; } enum Uninhabited {} enum Trivial { Value, } fn _uninhabited(x: Uninhabited) { no_merge!(); match x {} consume("done"); } fn trivial(x: Trivial) { no_merge!(); match x { Trivial::Value => consume("trivial"), } consume("done"); } #[coverage(off)] fn consume(x: T) { core::hint::black_box(x); } #[coverage(off)] fn main() { trivial(Trivial::Value); } ================================================ FILE: rustc-coverage-tests/src/branch/mod.rs ================================================ mod generics; mod guard; // #[path = "if-let.rs"] // mod if_let; #[path = "if.rs"] mod if_; #[path = "lazy-boolean.rs"] mod lazy_boolean; #[path = "let-else.rs"] mod let_else; #[path = "match-arms.rs"] mod match_arms; #[path = "match-trivial.rs"] mod match_trivial; #[path = "no-mir-spans.rs"] mod no_mir_spans; #[path = "while.rs"] mod while_; ================================================ FILE: rustc-coverage-tests/src/branch/no-mir-spans.rs ================================================ #![feature(coverage_attribute)] //@ edition: 2021 //@ compile-flags: -Zcoverage-options=branch,no-mir-spans //@ llvm-cov-flags: --show-branches=count // Tests the behaviour of the `-Zcoverage-options=no-mir-spans` debugging flag. // The actual code below is just some non-trivial code copied from another test // (`while.rs`), and has no particular significance. macro_rules! no_merge { () => { for _ in 0..1 {} }; } fn while_cond() { no_merge!(); let mut a = 8; while a > 0 { a -= 1; } } fn while_cond_not() { no_merge!(); let mut a = 8; while !(a == 0) { a -= 1; } } fn while_op_and() { no_merge!(); let mut a = 8; let mut b = 4; while a > 0 && b > 0 { a -= 1; b -= 1; } } fn while_op_or() { no_merge!(); let mut a = 4; let mut b = 8; while a > 0 || b > 0 { a -= 1; b -= 1; } } #[coverage(off)] fn main() { while_cond(); while_cond_not(); while_op_and(); while_op_or(); } ================================================ FILE: rustc-coverage-tests/src/branch/while.rs ================================================ #![feature(coverage_attribute)] //@ edition: 2021 //@ compile-flags: -Zcoverage-options=branch //@ llvm-cov-flags: --show-branches=count macro_rules! no_merge { () => { for _ in 0..1 {} }; } fn while_cond() { no_merge!(); let mut a = 8; while a > 0 { a -= 1; } } fn while_cond_not() { no_merge!(); let mut a = 8; while !(a == 0) { a -= 1; } } fn while_op_and() { no_merge!(); let mut a = 8; let mut b = 4; while a > 0 && b > 0 { a -= 1; b -= 1; } } fn while_op_or() { no_merge!(); let mut a = 4; let mut b = 8; while a > 0 || b > 0 { a -= 1; b -= 1; } } #[coverage(off)] fn main() { while_cond(); while_cond_not(); while_op_and(); while_op_or(); } ================================================ FILE: rustc-coverage-tests/src/closure.rs ================================================ #![allow(unused_assignments, unused_variables)] //@ compile-flags: -C opt-level=2 // This test used to be sensitive to certain coverage-specific hacks in // `rustc_middle/mir/mono.rs`, but those hacks were later cleaned up by // . #[rustfmt::skip] fn main() { // Initialize test constants in a way that cannot be determined at compile time, to ensure // rustc and LLVM cannot optimize out statements (or coverage counters) downstream from // dependent conditions. let is_true = std::env::args().len() == 1; let is_false = !is_true; let mut some_string = Some(String::from("the string content")); println!( "The string or alt: {}" , some_string . unwrap_or_else ( || { let mut countdown = 0; if is_false { countdown = 10; } "alt string 1".to_owned() } ) ); some_string = Some(String::from("the string content")); let a = || { let mut countdown = 0; if is_false { countdown = 10; } "alt string 2".to_owned() }; println!( "The string or alt: {}" , some_string . unwrap_or_else ( a ) ); some_string = None; println!( "The string or alt: {}" , some_string . unwrap_or_else ( || { let mut countdown = 0; if is_false { countdown = 10; } "alt string 3".to_owned() } ) ); some_string = None; let a = || { let mut countdown = 0; if is_false { countdown = 10; } "alt string 4".to_owned() }; println!( "The string or alt: {}" , some_string . unwrap_or_else ( a ) ); let quote_closure = |val| { let mut countdown = 0; if is_false { countdown = 10; } format!("'{}'", val) }; println!( "Repeated, quoted string: {:?}" , std::iter::repeat("repeat me") .take(5) .map ( quote_closure ) .collect::>() ); let _unused_closure = | mut countdown | { if is_false { countdown = 10; } "closure should be unused".to_owned() }; let mut countdown = 10; let _short_unused_closure = | _unused_arg: u8 | countdown += 1; let short_used_covered_closure_macro = | used_arg: u8 | println!("called"); let short_used_not_covered_closure_macro = | used_arg: u8 | println!("not called"); let _short_unused_closure_macro = | _unused_arg: u8 | println!("not called"); let _short_unused_closure_block = | _unused_arg: u8 | { println!("not called") }; let _shortish_unused_closure = | _unused_arg: u8 | { println!("not called") }; let _as_short_unused_closure = | _unused_arg: u8 | { println!("not called") }; let _almost_as_short_unused_closure = | _unused_arg: u8 | { println!("not called") } ; let _short_unused_closure_line_break_no_block = | _unused_arg: u8 | println!("not called") ; let _short_unused_closure_line_break_no_block2 = | _unused_arg: u8 | println!( "not called" ) ; let short_used_not_covered_closure_line_break_no_block_embedded_branch = | _unused_arg: u8 | println!( "not called: {}", if is_true { "check" } else { "me" } ) ; let short_used_not_covered_closure_line_break_block_embedded_branch = | _unused_arg: u8 | { println!( "not called: {}", if is_true { "check" } else { "me" } ) } ; let short_used_covered_closure_line_break_no_block_embedded_branch = | _unused_arg: u8 | println!( "not called: {}", if is_true { "check" } else { "me" } ) ; let short_used_covered_closure_line_break_block_embedded_branch = | _unused_arg: u8 | { println!( "not called: {}", if is_true { "check" } else { "me" } ) } ; if is_false { short_used_not_covered_closure_macro(0); short_used_not_covered_closure_line_break_no_block_embedded_branch(0); short_used_not_covered_closure_line_break_block_embedded_branch(0); } short_used_covered_closure_macro(0); short_used_covered_closure_line_break_no_block_embedded_branch(0); short_used_covered_closure_line_break_block_embedded_branch(0); } ================================================ FILE: rustc-coverage-tests/src/closure_bug.rs ================================================ // Regression test for #115930. // All of these closures are identical, and should produce identical output in // the coverage report. However, an unstable sort was causing them to be treated // inconsistently when preparing coverage spans. #[rustfmt::skip] fn main() { let truthy = std::env::args().len() == 1; let a = | | if truthy { true } else { false }; a(); if truthy { a(); } let b = | | if truthy { true } else { false }; b(); if truthy { b(); } let c = | | if truthy { true } else { false }; c(); if truthy { c(); } let d = | | if truthy { true } else { false }; d(); if truthy { d(); } } ================================================ FILE: rustc-coverage-tests/src/closure_macro.rs ================================================ //@ edition: 2018 macro_rules! bail { ($msg:literal $(,)?) => { if $msg.len() > 0 { println!("no msg"); } else { println!($msg); } return Err(String::from($msg)); }; } macro_rules! on_error { ($value:expr, $error_message:expr) => { $value.or_else(|e| { // This closure, which is declared in a macro, should be instrumented. let message = format!($error_message, e); if message.len() > 0 { println!("{}", message); Ok(String::from("ok")) } else { bail!("error"); } }) }; } fn load_configuration_files() -> Result { Ok(String::from("config")) } pub fn main() -> Result<(), String> { println!("Starting service"); let config = on_error!(load_configuration_files(), "Error loading configs: {}")?; let startup_delay_duration = String::from("arg"); let _ = (config, startup_delay_duration); Ok(()) } ================================================ FILE: rustc-coverage-tests/src/closure_macro_async.rs ================================================ #![feature(coverage_attribute)] //@ edition: 2018 //@ aux-build: executor.rs extern crate executor; macro_rules! bail { ($msg:literal $(,)?) => { if $msg.len() > 0 { println!("no msg"); } else { println!($msg); } return Err(String::from($msg)); }; } macro_rules! on_error { ($value:expr, $error_message:expr) => { $value.or_else(|e| { // This closure, which is declared in a macro, should be instrumented. let message = format!($error_message, e); if message.len() > 0 { println!("{}", message); Ok(String::from("ok")) } else { bail!("error"); } }) }; } fn load_configuration_files() -> Result { Ok(String::from("config")) } pub async fn test() -> Result<(), String> { println!("Starting service"); let config = on_error!(load_configuration_files(), "Error loading configs: {}")?; let startup_delay_duration = String::from("arg"); let _ = (config, startup_delay_duration); Ok(()) } #[coverage(off)] fn main() { executor::block_on(test()).unwrap(); } ================================================ FILE: rustc-coverage-tests/src/closure_unit_return.rs ================================================ #![feature(coverage_attribute)] //@ edition: 2021 // Regression test for an inconsistency between functions that return the value // of their trailing expression, and functions that implicitly return `()`. fn explicit_unit() { let closure = || { (); }; drop(closure); () // explicit return of trailing value } fn implicit_unit() { let closure = || { (); }; drop(closure); // implicit return of `()` } #[coverage(off)] fn main() { explicit_unit(); implicit_unit(); } ================================================ FILE: rustc-coverage-tests/src/color.rs ================================================ //@ edition: 2021 //@ ignore-coverage-map //@ ignore-windows //@ llvm-cov-flags: --use-color // Verify that telling `llvm-cov` to use colored output actually works. // Ignored on Windows because we can't tell the tool to use ANSI escapes. fn main() { for _i in 0..0 {} } ================================================ FILE: rustc-coverage-tests/src/condition/conditions.rs ================================================ #![feature(coverage_attribute)] //@ edition: 2021 //@ compile-flags: -Zcoverage-options=condition //@ llvm-cov-flags: --show-branches=count use core::hint::black_box; fn simple_assign(a: bool) { let x = a; black_box(x); } fn assign_and(a: bool, b: bool) { let x = a && b; black_box(x); } fn assign_or(a: bool, b: bool) { let x = a || b; black_box(x); } fn assign_3_or_and(a: bool, b: bool, c: bool) { let x = a || b && c; black_box(x); } fn assign_3_and_or(a: bool, b: bool, c: bool) { let x = a && b || c; black_box(x); } fn foo(a: bool) -> bool { black_box(a) } fn func_call(a: bool, b: bool) { foo(a && b); } #[coverage(off)] fn main() { simple_assign(true); simple_assign(false); assign_and(true, false); assign_and(true, true); assign_and(false, false); assign_or(true, false); assign_or(true, true); assign_or(false, false); assign_3_or_and(true, false, false); assign_3_or_and(true, true, false); assign_3_or_and(false, false, true); assign_3_or_and(false, true, true); assign_3_and_or(true, false, false); assign_3_and_or(true, true, false); assign_3_and_or(false, false, true); assign_3_and_or(false, true, true); func_call(true, false); func_call(true, true); func_call(false, false); } ================================================ FILE: rustc-coverage-tests/src/condition/mod.rs ================================================ mod conditions; ================================================ FILE: rustc-coverage-tests/src/conditions.rs ================================================ #![allow(unused_assignments, unused_variables)] fn main() { let mut countdown = 0; if true { countdown = 10; } const B: u32 = 100; let x = if countdown > 7 { countdown -= 4; B } else if countdown > 2 { if countdown < 1 || countdown > 5 || countdown != 9 { countdown = 0; } countdown -= 5; countdown } else { return; }; let mut countdown = 0; if true { countdown = 10; } if countdown > 7 { countdown -= 4; } else if countdown > 2 { if countdown < 1 || countdown > 5 || countdown != 9 { countdown = 0; } countdown -= 5; } else { return; } if true { let mut countdown = 0; if true { countdown = 10; } if countdown > 7 { countdown -= 4; } // else if countdown > 2 { if countdown < 1 || countdown > 5 || countdown != 9 { countdown = 0; } countdown -= 5; } else { return; } } let mut countdown = 0; if true { countdown = 1; } let z = if countdown > 7 { countdown -= 4; } else if countdown > 2 { if countdown < 1 || countdown > 5 || countdown != 9 { countdown = 0; } countdown -= 5; } else { let should_be_reachable = countdown; println!("reached"); return; }; let w = if countdown > 7 { countdown -= 4; } else if countdown > 2 { if countdown < 1 || countdown > 5 || countdown != 9 { countdown = 0; } countdown -= 5; } else { return; }; } ================================================ FILE: rustc-coverage-tests/src/continue.rs ================================================ #![allow(unused_assignments, unused_variables)] fn main() { let is_true = std::env::args().len() == 1; let mut x = 0; for _ in 0..10 { match is_true { true => { continue; } _ => { x = 1; } } x = 3; } for _ in 0..10 { match is_true { false => { x = 1; } _ => { continue; } } x = 3; } for _ in 0..10 { match is_true { true => { x = 1; } _ => { continue; } } x = 3; } for _ in 0..10 { if is_true { continue; } x = 3; } for _ in 0..10 { match is_true { false => { x = 1; } _ => { let _ = x; } } x = 3; } for _ in 0..10 { match is_true { false => { x = 1; } _ => { break; } } x = 3; } let _ = x; } ================================================ FILE: rustc-coverage-tests/src/coroutine.rs ================================================ #![feature(coroutines, coroutine_trait, stmt_expr_attributes)] use std::ops::{Coroutine, CoroutineState}; use std::pin::Pin; // The following implementation of a function called from a `yield` statement // (apparently requiring the Result and the `String` type or constructor) // creates conditions where the `coroutine::StateTransform` MIR transform will // drop all `Counter` `Coverage` statements from a MIR. `simplify.rs` has logic // to handle this condition, and still report dead block coverage. fn get_u32(val: bool) -> Result { if val { Ok(1) // } else { Err(String::from("some error")) // } } fn main() { let is_true = std::env::args().len() == 1; let mut coroutine = #[coroutine] || { yield get_u32(is_true); return "foo"; }; match Pin::new(&mut coroutine).resume(()) { CoroutineState::Yielded(Ok(1)) => {} _ => panic!("unexpected return from resume"), } match Pin::new(&mut coroutine).resume(()) { CoroutineState::Complete("foo") => {} _ => panic!("unexpected return from resume"), } } ================================================ FILE: rustc-coverage-tests/src/coverage_attr_closure.rs ================================================ #![feature(coverage_attribute, stmt_expr_attributes)] #![allow(dead_code)] //@ edition: 2021 static GLOBAL_CLOSURE_ON: fn(&str) = #[coverage(on)] |input: &str| { println!("{input}"); }; static GLOBAL_CLOSURE_OFF: fn(&str) = #[coverage(off)] |input: &str| { println!("{input}"); }; #[coverage(on)] fn contains_closures_on() { let _local_closure_on = #[coverage(on)] |input: &str| { println!("{input}"); }; let _local_closure_off = #[coverage(off)] |input: &str| { println!("{input}"); }; } #[coverage(off)] fn contains_closures_off() { let _local_closure_on = #[coverage(on)] |input: &str| { println!("{input}"); }; let _local_closure_off = #[coverage(off)] |input: &str| { println!("{input}"); }; } #[coverage(off)] fn main() { contains_closures_on(); contains_closures_off(); } ================================================ FILE: rustc-coverage-tests/src/dead_code.rs ================================================ #![allow(dead_code, unused_assignments, unused_variables)] pub fn unused_pub_fn_not_in_library() { // Initialize test constants in a way that cannot be determined at compile time, to ensure // rustc and LLVM cannot optimize out statements (or coverage counters) downstream from // dependent conditions. let is_true = std::env::args().len() == 1; let mut countdown = 0; if is_true { countdown = 10; } } fn unused_fn() { // Initialize test constants in a way that cannot be determined at compile time, to ensure // rustc and LLVM cannot optimize out statements (or coverage counters) downstream from // dependent conditions. let is_true = std::env::args().len() == 1; let mut countdown = 0; if is_true { countdown = 10; } } fn main() { // Initialize test constants in a way that cannot be determined at compile time, to ensure // rustc and LLVM cannot optimize out statements (or coverage counters) downstream from // dependent conditions. let is_true = std::env::args().len() == 1; let mut countdown = 0; if is_true { countdown = 10; } } ================================================ FILE: rustc-coverage-tests/src/discard-all-issue-133606.rs ================================================ //! Regression test for . //! //! In rare cases, all of a function's coverage spans are discarded at a late //! stage during codegen. When that happens, the subsequent code needs to take //! special care to avoid emitting coverage metadata that would cause `llvm-cov` //! to fail with a fatal error. //! //! We currently don't know of a concise way to reproduce that scenario with //! ordinary Rust source code, so instead we set a special testing-only flag to //! force it to occur. //@ edition: 2021 //@ compile-flags: -Zcoverage-options=discard-all-spans-in-codegen // The `llvm-cov` tool will complain if the test binary ends up having no // coverage metadata at all. To prevent that, we also link to instrumented // code in an auxiliary crate that doesn't have the special flag set. //@ aux-build: discard_all_helper.rs extern crate discard_all_helper; fn main() { discard_all_helper::external_function(); } ================================================ FILE: rustc-coverage-tests/src/drop_trait.rs ================================================ #![allow(unused_assignments)] //@ failure-status: 1 struct Firework { strength: i32, } impl Drop for Firework { fn drop(&mut self) { println!("BOOM times {}!!!", self.strength); } } fn main() -> Result<(), u8> { let _firecracker = Firework { strength: 1 }; let _tnt = Firework { strength: 100 }; if true { println!("Exiting with error..."); return Err(1); } let _ = Firework { strength: 1000 }; Ok(()) } // Expected program output: // Exiting with error... // BOOM times 100!!! // BOOM times 1!!! // Error: 1 ================================================ FILE: rustc-coverage-tests/src/fn_sig_into_try.rs ================================================ #![feature(coverage_attribute)] //@ edition: 2021 // Regression test for inconsistent handling of function signature spans that // are followed by code using the `?` operator. // // For each of these similar functions, the line containing the function // signature should be handled in the same way. fn a() -> Option // { Some(7i32); Some(0) } fn b() -> Option // { Some(7i32)?; Some(0) } fn c() -> Option // { let _ = Some(7i32)?; Some(0) } fn d() -> Option // { let _: () = (); Some(7i32)?; Some(0) } #[coverage(off)] fn main() { a(); b(); c(); d(); } ================================================ FILE: rustc-coverage-tests/src/generic-unused-impl.rs ================================================ // Regression test for #135235. trait Foo { type Assoc; fn from(s: Self::Assoc) -> Self; } struct W(T); impl From<[T::Assoc; 1]> for W { fn from(from: [T::Assoc; 1]) -> Self { let [item] = from; W(Foo::from(item)) } } fn main() {} ================================================ FILE: rustc-coverage-tests/src/generics.rs ================================================ #![allow(unused_assignments)] //@ failure-status: 1 struct Firework { strength: T, } impl Firework { #[inline(always)] fn set_strength(&mut self, new_strength: T) { self.strength = new_strength; } } impl Drop for Firework { #[inline(always)] fn drop(&mut self) { println!("BOOM times {}!!!", self.strength); } } fn main() -> Result<(), u8> { let mut firecracker = Firework { strength: 1 }; firecracker.set_strength(2); let mut tnt = Firework { strength: 100.1 }; tnt.set_strength(200.1); tnt.set_strength(300.3); if true { println!("Exiting with error..."); return Err(1); } let _ = Firework { strength: 1000 }; Ok(()) } // Expected program output: // Exiting with error... // BOOM times 100!!! // BOOM times 1!!! // Error: 1 ================================================ FILE: rustc-coverage-tests/src/holes.rs ================================================ //@ edition: 2021 // Nested items/closures should be treated as "holes", so that their spans are // not displayed as executable code in the enclosing function. use core::hint::black_box; fn main() { black_box(()); static MY_STATIC: () = (); black_box(()); const MY_CONST: () = (); // Splitting this across multiple lines makes it easier to see where the // coverage mapping regions begin and end. #[rustfmt::skip] let _closure = | _arg: (), | { black_box(()); } ; black_box(()); fn _unused_fn() {} black_box(()); struct MyStruct { _x: u32, _y: u32, } black_box(()); impl MyStruct { fn _method(&self) {} } black_box(()); trait MyTrait {} black_box(()); impl MyTrait for MyStruct {} black_box(()); macro_rules! _my_macro { () => {}; } black_box(()); #[rustfmt::skip] let _const = const { 7 + 4 } ; black_box(()); #[rustfmt::skip] let _async = async { 7 + 4 } ; black_box(()); // This tests the edge case of a const block nested inside an "anon const", // such as the length of an array literal. Handling this case requires // `nested_filter::OnlyBodies` or equivalent. #[rustfmt::skip] let _const_block_inside_anon_const = [ 0 ; 7 + const { 3 } ] ; black_box(()); } ================================================ FILE: rustc-coverage-tests/src/if.rs ================================================ #![allow(unused_assignments, unused_variables)] #[rustfmt::skip] fn main() { // Initialize test constants in a way that cannot be determined at compile time, to ensure // rustc and LLVM cannot optimize out statements (or coverage counters) downstream from // dependent conditions. let is_true = std::env::args().len() == 1 ; let mut countdown = 0 ; if is_true { countdown = 10 ; } } ================================================ FILE: rustc-coverage-tests/src/if_else.rs ================================================ #![allow(unused_assignments, unused_variables)] #[rustfmt::skip] fn main() { // Initialize test constants in a way that cannot be determined at compile time, to ensure // rustc and LLVM cannot optimize out statements (or coverage counters) downstream from // dependent conditions. let is_true = std::env::args().len() == 1; let mut countdown = 0; if is_true { countdown = 10 ; } else // Note coverage region difference without semicolon { countdown = 100 } if is_true { countdown = 10 ; } else { countdown = 100 ; } } ================================================ FILE: rustc-coverage-tests/src/if_not.rs ================================================ #![feature(coverage_attribute)] //@ edition: 2021 #[rustfmt::skip] fn if_not(cond: bool) { if ! cond { println!("cond was false"); } if ! cond { println!("cond was false"); } if ! cond { println!("cond was false"); } else { println!("cond was true"); } } #[coverage(off)] fn main() { for _ in 0..8 { if_not(std::hint::black_box(true)); } for _ in 0..4 { if_not(std::hint::black_box(false)); } } ================================================ FILE: rustc-coverage-tests/src/ignore_map.rs ================================================ //@ ignore-coverage-map fn main() {} ================================================ FILE: rustc-coverage-tests/src/ignore_run.rs ================================================ //@ ignore-coverage-run fn main() {} ================================================ FILE: rustc-coverage-tests/src/inline-dead.rs ================================================ // Regression test for issue #98833. //@ compile-flags: -Zinline-mir -Cdebug-assertions=off fn main() { println!("{}", live::()); let f = |x: bool| { debug_assert!(x); }; f(false); } #[inline] fn live() -> u32 { if B { dead() // } else { 0 } } #[inline] fn dead() -> u32 { 42 } ================================================ FILE: rustc-coverage-tests/src/inline.rs ================================================ //@ compile-flags: -Zinline-mir use std::fmt::Display; fn main() { permutations(&['a', 'b', 'c']); } #[inline(always)] fn permutations(xs: &[T]) { let mut ys = xs.to_owned(); permutate(&mut ys, 0); } fn permutate(xs: &mut [T], k: usize) { let n = length(xs); if k == n { display(xs); } else if k < n { for i in k..n { swap(xs, i, k); permutate(xs, k + 1); swap(xs, i, k); } } else { error(); } } fn length(xs: &[T]) -> usize { xs.len() } #[inline] fn swap(xs: &mut [T], i: usize, j: usize) { let t = xs[i]; xs[i] = xs[j]; xs[j] = t; } fn display(xs: &[T]) { for x in xs { print!("{}", x); } println!(); } #[inline(always)] fn error() { panic!("error"); } ================================================ FILE: rustc-coverage-tests/src/inline_mixed.rs ================================================ //@ edition: 2021 //@ compile-flags: -Cinstrument-coverage=off //@ ignore-coverage-run //@ aux-crate: inline_mixed_helper=inline_mixed_helper.rs // Regression test for . // Various forms of cross-crate inlining can cause coverage statements to be // inlined into crates that are being built without coverage instrumentation. // At the very least, we need to not ICE when that happens. fn main() { inline_mixed_helper::inline_me(); inline_mixed_helper::no_inlining_please(); inline_mixed_helper::generic::(); } // FIXME(#132437): We currently don't test this in coverage-run mode, because // whether or not it produces a `.profraw` file appears to differ between // platforms. ================================================ FILE: rustc-coverage-tests/src/inner_items.rs ================================================ #![allow(unused_assignments, unused_variables, dead_code)] fn main() { // Initialize test constants in a way that cannot be determined at compile time, to ensure // rustc and LLVM cannot optimize out statements (or coverage counters) downstream from // dependent conditions. let is_true = std::env::args().len() == 1; let mut countdown = 0; if is_true { countdown = 10; } mod in_mod { const IN_MOD_CONST: u32 = 1000; } fn in_func(a: u32) { let b = 1; let c = a + b; println!("c = {}", c) } struct InStruct { in_struct_field: u32, } const IN_CONST: u32 = 1234; trait InTrait { fn trait_func(&mut self, incr: u32); fn default_trait_func(&mut self) { in_func(IN_CONST); self.trait_func(IN_CONST); } } impl InTrait for InStruct { fn trait_func(&mut self, incr: u32) { self.in_struct_field += incr; in_func(self.in_struct_field); } } type InType = String; if is_true { in_func(countdown); } let mut val = InStruct { in_struct_field: 101, // }; val.default_trait_func(); } ================================================ FILE: rustc-coverage-tests/src/issue-83601.rs ================================================ // Shows that rust-lang/rust/83601 is resolved #[derive(Debug, PartialEq, Eq)] struct Foo(u32); fn main() { let bar = Foo(1); assert_eq!(bar, Foo(1)); let baz = Foo(0); assert_ne!(baz, Foo(1)); println!("{:?}", Foo(1)); println!("{:?}", bar); println!("{:?}", baz); } ================================================ FILE: rustc-coverage-tests/src/issue-84561.rs ================================================ // This demonstrated Issue #84561: function-like macros produce unintuitive coverage results. //@ failure-status: 101 #[derive(PartialEq, Eq)] struct Foo(u32); #[rustfmt::skip] fn test3() { let is_true = std::env::args().len() == 1; let bar = Foo(1); assert_eq!(bar, Foo(1)); let baz = Foo(0); assert_ne!(baz, Foo(1)); println!("{:?}", Foo(1)); println!("{:?}", bar); println!("{:?}", baz); assert_eq!(Foo(1), Foo(1)); assert_ne!(Foo(0), Foo(1)); assert_eq!(Foo(2), Foo(2)); let bar = Foo(0); assert_ne!(bar, Foo(3)); assert_ne!(Foo(0), Foo(4)); assert_eq!(Foo(3), Foo(3), "with a message"); println!("{:?}", bar); println!("{:?}", Foo(1)); assert_ne!(Foo(0), Foo(5), "{}", if is_true { "true message" } else { "false message" }); assert_ne!( Foo(0) , Foo(5) , "{}" , if is_true { "true message" } else { "false message" } ); let is_true = std::env::args().len() == 1; assert_eq!( Foo(1), Foo(1) ); assert_ne!( Foo(0), Foo(1) ); assert_eq!( Foo(2), Foo(2) ); let bar = Foo(1); assert_ne!( bar, Foo(3) ); if is_true { assert_ne!( Foo(0), Foo(4) ); } else { assert_eq!( Foo(3), Foo(3) ); } if is_true { assert_ne!( Foo(0), Foo(4), "with a message" ); } else { assert_eq!( Foo(3), Foo(3), "with a message" ); } assert_ne!( if is_true { Foo(0) } else { Foo(1) }, Foo(5) ); assert_ne!( Foo(5), if is_true { Foo(0) } else { Foo(1) } ); assert_ne!( if is_true { assert_eq!( Foo(3), Foo(3) ); Foo(0) } else { assert_ne!( if is_true { Foo(0) } else { Foo(1) }, Foo(5) ); Foo(1) }, Foo(5), "with a message" ); assert_eq!( Foo(1), Foo(3), "this assert should fail" ); assert_eq!( Foo(3), Foo(3), "this assert should not be reached" ); } impl std::fmt::Debug for Foo { fn fmt(&self, f: &mut std::fmt::Formatter) -> std::fmt::Result { write!(f, "try and succeed")?; Ok(()) } } static mut DEBUG_LEVEL_ENABLED: bool = false; macro_rules! debug { ($($arg:tt)+) => ( if unsafe { DEBUG_LEVEL_ENABLED } { println!($($arg)+); } ); } fn test1() { debug!("debug is enabled"); debug!("debug is enabled"); let _ = 0; debug!("debug is enabled"); unsafe { DEBUG_LEVEL_ENABLED = true; } debug!("debug is enabled"); } macro_rules! call_debug { ($($arg:tt)+) => ( fn call_print(s: &str) { print!("{}", s); } call_print("called from call_debug: "); debug!($($arg)+); ); } fn test2() { call_debug!("debug is enabled"); } fn main() { test1(); test2(); test3(); } ================================================ FILE: rustc-coverage-tests/src/issue-85461.rs ================================================ // Regression test for #85461: MSVC sometimes fail to link with dead code and #[inline(always)] //@ aux-build:inline_always_with_dead_code.rs extern crate inline_always_with_dead_code; use inline_always_with_dead_code::{bar, baz}; fn main() { bar::call_me(); baz::call_me(); } ================================================ FILE: rustc-coverage-tests/src/issue-93054.rs ================================================ #![allow(dead_code, unreachable_code)] //@ edition: 2021 // Regression test for #93054: Functions using uninhabited types often only have a single, // unreachable basic block which doesn't get instrumented. This should not cause llvm-cov to fail. // Since these kinds functions can't be invoked anyway, it's ok to not have coverage data for them. enum Never {} impl Never { fn foo(self) { match self {} make().map(|never| match never {}); } fn bar(&self) { match *self {} } } async fn foo2(never: Never) { match never {} } fn make() -> Option { None } fn main() {} ================================================ FILE: rustc-coverage-tests/src/lazy_boolean.rs ================================================ #![allow(unused_assignments, unused_variables)] #[rustfmt::skip] fn main() { // Initialize test constants in a way that cannot be determined at compile time, to ensure // rustc and LLVM cannot optimize out statements (or coverage counters) downstream from // dependent conditions. let is_true = std::env::args().len() == 1; let (mut a, mut b, mut c) = (0, 0, 0); if is_true { a = 1; b = 10; c = 100; } let somebool = a < b || b < c ; let somebool = b < a || b < c ; let somebool = a < b && b < c; let somebool = b < a && b < c; if ! is_true { a = 2 ; } if is_true { b = 30 ; } else { c = 400 ; } if !is_true { a = 2; } if is_true { b = 30; } else { c = 400; } } ================================================ FILE: rustc-coverage-tests/src/let_else_loop.rs ================================================ #![feature(coverage_attribute)] //@ edition: 2021 // Regression test for . // These code patterns should not trigger an ICE when allocating a physical // counter to a node and also one of its in-edges, because that is allowed // when the node contains a tight loop to itself. fn loopy(cond: bool) { let true = cond else { loop {} }; } // Variant that also has `loop {}` on the success path. // This isn't needed to catch the original ICE, but might help detect regressions. fn _loop_either_way(cond: bool) { let true = cond else { loop {} }; loop {} } // Variant using regular `if` instead of let-else. // This doesn't trigger the original ICE, but might help detect regressions. fn _if(cond: bool) { if cond { loop {} } else { loop {} } } #[coverage(off)] fn main() { loopy(true); } ================================================ FILE: rustc-coverage-tests/src/lib.rs ================================================ #![feature(coverage_attribute)] #![allow(unused_attributes)] #![allow(dead_code)] #![allow(unreachable_code)] #[cfg(any( feature = "json", feature = "lean", feature = "fstar", feature = "fstar-lax", feature = "coq" ))] mod attr; #[cfg(any( feature = "json", feature = "lean", feature = "fstar", feature = "fstar-lax", feature = "coq" ))] mod auxiliary; /* Modules that are commented out are not used by any test target. They are kept in case they need to be added to a target in the future. */ // mod branch; #[cfg(any( feature = "json", feature = "lean", feature = "fstar", feature = "fstar-lax" ))] mod abort; #[cfg(any( feature = "json", feature = "lean", feature = "fstar", feature = "fstar-lax" ))] mod assert; #[cfg(any( feature = "json", feature = "lean", feature = "fstar", feature = "fstar-lax", feature = "coq" ))] #[path = "assert-ne.rs"] mod assert_ne; #[cfg(any( feature = "json", feature = "fstar", feature = "fstar-lax", feature = "coq" ))] mod assert_not; #[cfg(any( feature = "json", feature = "lean", feature = "fstar", feature = "fstar-lax", feature = "coq" ))] mod condition; mod mcdc; // mod async_block; // mod async_closure; // mod r#async; // mod async2; // mod await_ready; // mod bad_counter_ids; // mod bench; // mod closure_bug; // mod closure_macro_async; #[cfg(any(feature = "json", feature = "lean", feature = "fstar", feature = "coq"))] mod closure_macro; // mod closure; #[cfg(any(feature = "json", feature = "lean", feature = "fstar", feature = "coq"))] mod closure_unit_return; #[cfg(any(feature = "json", feature = "lean"))] mod color; #[cfg(any(feature = "json", feature = "lean", feature = "fstar", feature = "coq"))] mod conditions; #[cfg(any(feature = "json", feature = "lean", feature = "fstar"))] #[path = "continue.rs"] mod continue_; // mod coroutine; // mod coverage_attr_closure; #[cfg(any(feature = "json", feature = "lean", feature = "fstar", feature = "coq"))] mod dead_code; // #[path = "discard-all-issue-133606.rs"] // mod discard_all_issue_133606; #[cfg(any(feature = "json", feature = "lean", feature = "fstar", feature = "coq"))] mod drop_trait; #[cfg(any( feature = "json", feature = "lean", feature = "fstar", feature = "fstar-lax", feature = "coq" ))] mod fn_sig_into_try; #[cfg(any(feature = "json", feature = "fstar", feature = "coq"))] mod generics; // #[path = "generic-unused-impl.rs"] // mod generic_unused_impl; // mod holes; #[cfg(any(feature = "json", feature = "lean", feature = "fstar", feature = "coq"))] #[path = "if.rs"] mod if_; #[cfg(any(feature = "json", feature = "lean", feature = "fstar", feature = "coq"))] mod if_else; #[cfg(any(feature = "json", feature = "lean"))] mod if_not; #[cfg(any( feature = "json", feature = "lean", feature = "fstar", feature = "fstar-lax", feature = "coq" ))] mod ignore_map; #[cfg(any( feature = "json", feature = "lean", feature = "fstar", feature = "fstar-lax", feature = "coq" ))] mod ignore_run; #[cfg(any(feature = "json", feature = "lean", feature = "fstar", feature = "coq"))] #[path = "inline-dead.rs"] mod inline_dead; // mod inline_mixed; #[cfg(any(feature = "json", feature = "lean"))] mod inline; #[cfg(any(feature = "json", feature = "lean"))] mod inner_items; #[cfg(any( feature = "json", feature = "lean", feature = "fstar", feature = "fstar-lax", feature = "coq" ))] #[path = "issue-83601.rs"] mod issue_83601; // #[path = "issue-84561.rs"] // mod issue_84561; // #[path = "issue-85461.rs"] // mod issue_85461; // #[path = "issue-93054.rs"] // mod issue_93054; #[cfg(any(feature = "json", feature = "lean", feature = "fstar", feature = "coq"))] mod lazy_boolean; #[cfg(any(feature = "json", feature = "lean"))] mod let_else_loop; #[cfg(any( feature = "json", feature = "lean", feature = "fstar", feature = "fstar-lax", feature = "coq" ))] mod long_and_wide; #[cfg(any(feature = "json", feature = "lean"))] #[path = "loop-break.rs"] mod loop_break; #[cfg(any(feature = "json", feature = "lean"))] mod loop_break_value; #[cfg(any(feature = "json", feature = "lean"))] mod loops_branches; #[cfg(any( feature = "json", feature = "lean", feature = "fstar", feature = "fstar-lax", feature = "coq" ))] mod macro_in_closure; // mod macro_name_span; #[cfg(any(feature = "json", feature = "lean", feature = "fstar", feature = "coq"))] mod match_or_pattern; #[cfg(any(feature = "json", feature = "lean", feature = "fstar"))] mod nested_loops; // #[path = "no-core.rs"] // mod no_core; #[cfg(any(feature = "json", feature = "lean", feature = "fstar", feature = "coq"))] mod no_cov_crate; #[cfg(any(feature = "json", feature = "lean", feature = "fstar", feature = "coq"))] mod no_spans; #[cfg(any( feature = "json", feature = "lean", feature = "fstar", feature = "fstar-lax", feature = "coq" ))] mod no_spans_if_not; #[cfg(any(feature = "json", feature = "fstar"))] mod overflow; #[cfg(any( feature = "json", feature = "lean", feature = "fstar", feature = "fstar-lax" ))] mod panic_unwind; #[cfg(any(feature = "json", feature = "fstar", feature = "coq"))] mod partial_eq; #[cfg(any(feature = "json", feature = "lean"))] mod simple_loop; #[cfg(any(feature = "json", feature = "lean"))] mod simple_match; #[cfg(any(feature = "json", feature = "lean", feature = "fstar", feature = "coq"))] mod sort_groups; #[cfg(any( feature = "json", feature = "lean", feature = "fstar", feature = "fstar-lax", feature = "coq" ))] mod test_harness; #[cfg(any(feature = "json", feature = "lean"))] mod tight_inf_loop; #[cfg(any( feature = "json", feature = "lean", feature = "fstar", feature = "fstar-lax", feature = "coq" ))] mod trivial; #[cfg(any(feature = "json", feature = "lean", feature = "fstar", feature = "coq"))] mod try_error_result; #[cfg(any(feature = "json"))] mod unicode; // mod unreachable; #[cfg(any(feature = "json", feature = "lean", feature = "fstar"))] mod unused; #[cfg(any( feature = "json", feature = "lean", feature = "fstar", feature = "fstar-lax", feature = "coq" ))] mod unused_mod; // mod uses_crate; // mod uses_inline_crate; #[cfg(any(feature = "json", feature = "lean"))] #[path = "while.rs"] mod while_; #[cfg(any(feature = "json", feature = "fstar"))] mod while_early_ret; // mod r#yield; ================================================ FILE: rustc-coverage-tests/src/long_and_wide.rs ================================================ //@ edition: 2021 // ignore-tidy-linelength // This file deliberately contains line and column numbers larger than 127, // to verify that `coverage-dump`'s ULEB128 parser can handle them. fn main() { wide_function(); long_function(); far_function(); } #[rustfmt::skip] fn wide_function() { /* */ (); } fn long_function() { // // // // // // // // // // // // // // // // // // // // // // // // // // // // // // // // // // // // // // // // // // // // // // // // // // // // // // // // // // // // // // // // // // // // // // // // // // // // // // // // // // // // // // // // // // // // // // // // // // // // // // // // // // // // // // // // // // // // // // // // // // // // // // // // // // // } fn far_function() {} ================================================ FILE: rustc-coverage-tests/src/loop-break.rs ================================================ //@ edition: 2021 fn main() { loop { if core::hint::black_box(true) { break; } } } // This test is a lightly-modified version of `tests/mir-opt/coverage/instrument_coverage.rs`. // If this test needs to be blessed, then the mir-opt version probably needs to // be blessed too! ================================================ FILE: rustc-coverage-tests/src/loop_break_value.rs ================================================ #![allow(unused_assignments, unused_variables)] #[rustfmt::skip] fn main() { let result = loop { break 10 ; } ; } ================================================ FILE: rustc-coverage-tests/src/loops_branches.rs ================================================ #![allow(unused_assignments, unused_variables, while_true)] // This test confirms that (1) unexecuted infinite loops are handled correctly by the // InstrumentCoverage MIR pass; and (2) Counter Expressions that subtract from zero can be dropped. struct DebugTest; impl std::fmt::Debug for DebugTest { fn fmt(&self, f: &mut std::fmt::Formatter) -> std::fmt::Result { if true { if false { while true {} } write!(f, "cool")?; } else { } for i in 0..10 { if true { if false { while true {} } write!(f, "cool")?; } else { } } Ok(()) } } struct DisplayTest; impl std::fmt::Display for DisplayTest { fn fmt(&self, f: &mut std::fmt::Formatter) -> std::fmt::Result { if false { } else { if false { while true {} } write!(f, "cool")?; } for i in 0..10 { if false { } else { if false { while true {} } write!(f, "cool")?; } } Ok(()) } } fn main() { let debug_test = DebugTest; println!("{:?}", debug_test); let display_test = DisplayTest; println!("{}", display_test); } ================================================ FILE: rustc-coverage-tests/src/macro_in_closure.rs ================================================ #![feature(coverage_attribute)] //@ edition: 2021 // If a closure body consists entirely of a single bang-macro invocation, the // body span ends up inside the macro-expansion, so we need to un-expand it // back to the declaration site. static NO_BLOCK: fn() = || println!("hello"); static WITH_BLOCK: fn() = || { println!("hello"); }; #[coverage(off)] fn main() { NO_BLOCK(); WITH_BLOCK(); } ================================================ FILE: rustc-coverage-tests/src/macro_name_span.rs ================================================ //@ edition: 2021 // Regression test for . // Under some circumstances, the heuristics that detect macro name spans can // get confused and produce incorrect spans beyond the bounds of the span // being processed. //@ aux-build: macro_name_span_helper.rs extern crate macro_name_span_helper; fn main() { affected_function(); } macro_rules! macro_with_an_unreasonably_and_egregiously_long_name { () => { println!("hello"); }; } macro_name_span_helper::macro_that_defines_a_function! { fn affected_function() { macro_with_an_unreasonably_and_egregiously_long_name!(); } } ================================================ FILE: rustc-coverage-tests/src/match_or_pattern.rs ================================================ fn main() { // Initialize test constants in a way that cannot be determined at compile time, to ensure // rustc and LLVM cannot optimize out statements (or coverage counters) downstream from // dependent conditions. let is_true = std::env::args().len() == 1; let mut a: u8 = 0; let mut b: u8 = 0; if is_true { a = 2; b = 0; } match (a, b) { // Or patterns generate MIR `SwitchInt` with multiple targets to the same `BasicBlock`. // This test confirms a fix for Issue #79569. (0 | 1, 2 | 3) => {} _ => {} } if is_true { a = 0; b = 0; } match (a, b) { (0 | 1, 2 | 3) => {} _ => {} } if is_true { a = 2; b = 2; } match (a, b) { (0 | 1, 2 | 3) => {} _ => {} } if is_true { a = 0; b = 2; } match (a, b) { (0 | 1, 2 | 3) => {} _ => {} } } ================================================ FILE: rustc-coverage-tests/src/mcdc/condition-limit.rs ================================================ #![feature(coverage_attribute)] //@ edition: 2021 //@ compile-flags: -Zcoverage-options=mcdc //@ llvm-cov-flags: --show-branches=count --show-mcdc fn accept_7_conditions(bool_arr: [bool; 7]) { let [a, b, c, d, e, f, g] = bool_arr; if a && b && c && d && e && f && g { core::hint::black_box("hello"); } } #[coverage(off)] fn main() { accept_7_conditions([false; 7]); accept_7_conditions([true; 7]); } ================================================ FILE: rustc-coverage-tests/src/mcdc/if.rs ================================================ #![feature(coverage_attribute)] //@ edition: 2021 //@ compile-flags: -Zcoverage-options=mcdc //@ llvm-cov-flags: --show-branches=count --show-mcdc fn mcdc_check_neither(a: bool, b: bool) { if a && b { say("a and b"); } else { say("not both"); } } fn mcdc_check_a(a: bool, b: bool) { if a && b { say("a and b"); } else { say("not both"); } } fn mcdc_check_b(a: bool, b: bool) { if a && b { say("a and b"); } else { say("not both"); } } fn mcdc_check_both(a: bool, b: bool) { if a && b { say("a and b"); } else { say("not both"); } } fn mcdc_check_tree_decision(a: bool, b: bool, c: bool) { // This expression is intentionally written in a way // where 100% branch coverage indicates 100% mcdc coverage. if a && (b || c) { say("pass"); } else { say("reject"); } } fn mcdc_check_not_tree_decision(a: bool, b: bool, c: bool) { // Contradict to `mcdc_check_tree_decision`, // 100% branch coverage of this expression does not indicate 100% mcdc coverage. if (a || b) && c { say("pass"); } else { say("reject"); } } fn mcdc_nested_if(a: bool, b: bool, c: bool) { if a || b { say("a or b"); if b && c { say("b and c"); } } else { say("neither a nor b"); } } #[coverage(off)] fn main() { mcdc_check_neither(false, false); mcdc_check_neither(false, true); mcdc_check_a(true, true); mcdc_check_a(false, true); mcdc_check_b(true, true); mcdc_check_b(true, false); mcdc_check_both(false, true); mcdc_check_both(true, true); mcdc_check_both(true, false); mcdc_check_tree_decision(false, true, true); mcdc_check_tree_decision(true, true, false); mcdc_check_tree_decision(true, false, false); mcdc_check_tree_decision(true, false, true); mcdc_check_not_tree_decision(false, true, true); mcdc_check_not_tree_decision(true, true, false); mcdc_check_not_tree_decision(true, false, false); mcdc_check_not_tree_decision(true, false, true); mcdc_nested_if(true, false, true); mcdc_nested_if(true, true, true); mcdc_nested_if(true, true, false); } #[coverage(off)] fn say(message: &str) { core::hint::black_box(message); } ================================================ FILE: rustc-coverage-tests/src/mcdc/inlined_expressions.rs ================================================ #![feature(coverage_attribute)] //@ edition: 2021 //@ compile-flags: -Zcoverage-options=mcdc -Copt-level=z -Cllvm-args=--inline-threshold=0 //@ llvm-cov-flags: --show-branches=count --show-mcdc #[inline(always)] fn inlined_instance(a: bool, b: bool) -> bool { a && b } #[coverage(off)] fn main() { let _ = inlined_instance(true, false); let _ = inlined_instance(false, true); let _ = inlined_instance(true, true); } ================================================ FILE: rustc-coverage-tests/src/mcdc/mod.rs ================================================ #[path = "condition-limit.rs"] #[cfg(any(feature = "json"))] mod condition_limit; #[cfg(any( feature = "json", feature = "fstar", feature = "fstar-lax", feature = "coq" ))] #[path = "if.rs"] mod if_; #[cfg(any( feature = "json", feature = "fstar", feature = "fstar-lax", feature = "coq" ))] mod inlined_expressions; #[cfg(any( feature = "json", feature = "fstar", feature = "fstar-lax", feature = "coq" ))] mod nested_if; #[cfg(any( feature = "json", feature = "fstar", feature = "fstar-lax", feature = "coq" ))] mod non_control_flow; ================================================ FILE: rustc-coverage-tests/src/mcdc/nested_if.rs ================================================ #![feature(coverage_attribute)] //@ edition: 2021 //@ compile-flags: -Zcoverage-options=mcdc //@ llvm-cov-flags: --show-branches=count --show-mcdc fn nested_if_in_condition(a: bool, b: bool, c: bool) { if a && if b || c { true } else { false } { say("yes"); } else { say("no"); } } fn doubly_nested_if_in_condition(a: bool, b: bool, c: bool, d: bool) { if a && if b || if c && d { true } else { false } { false } else { true } { say("yes"); } else { say("no"); } } fn nested_single_condition_decision(a: bool, b: bool) { // Decision with only 1 decision should not be instrumented by MCDC because // branch-coverage is equivalent to MCDC coverage in this case, and we don't // want to waste bitmap space for this. if a && if b { false } else { true } { say("yes"); } else { say("no"); } } fn nested_in_then_block_in_condition(a: bool, b: bool, c: bool, d: bool, e: bool) { if a && if b || c { if d && e { true } else { false } } else { false } { say("yes"); } else { say("no"); } } #[coverage(off)] fn main() { nested_if_in_condition(true, false, false); nested_if_in_condition(true, true, true); nested_if_in_condition(true, false, true); nested_if_in_condition(false, true, true); doubly_nested_if_in_condition(true, false, false, true); doubly_nested_if_in_condition(true, true, true, true); doubly_nested_if_in_condition(true, false, true, true); doubly_nested_if_in_condition(false, true, true, true); nested_single_condition_decision(true, true); nested_single_condition_decision(true, false); nested_single_condition_decision(false, false); nested_in_then_block_in_condition(false, false, false, false, false); nested_in_then_block_in_condition(true, false, false, false, false); nested_in_then_block_in_condition(true, true, false, false, false); nested_in_then_block_in_condition(true, false, true, false, false); nested_in_then_block_in_condition(true, false, true, true, false); nested_in_then_block_in_condition(true, false, true, false, true); nested_in_then_block_in_condition(true, false, true, true, true); } #[coverage(off)] fn say(message: &str) { core::hint::black_box(message); } ================================================ FILE: rustc-coverage-tests/src/mcdc/non_control_flow.rs ================================================ #![feature(coverage_attribute)] //@ edition: 2021 //@ compile-flags: -Zcoverage-options=mcdc //@ llvm-cov-flags: --show-branches=count --show-mcdc // This test ensures that boolean expressions that are not inside control flow // decisions are correctly instrumented. use core::hint::black_box; fn assign_and(a: bool, b: bool) { let x = a && b; black_box(x); } fn assign_or(a: bool, b: bool) { let x = a || b; black_box(x); } fn assign_3(a: bool, b: bool, c: bool) { let x = a || b && c; black_box(x); } fn assign_3_bis(a: bool, b: bool, c: bool) { let x = a && b || c; black_box(x); } fn right_comb_tree(a: bool, b: bool, c: bool, d: bool, e: bool) { let x = a && (b && (c && (d && (e)))); black_box(x); } fn foo(a: bool) -> bool { black_box(a) } fn func_call(a: bool, b: bool) { foo(a && b); } #[coverage(off)] fn main() { assign_and(true, false); assign_and(true, true); assign_and(false, false); assign_or(true, false); assign_or(true, true); assign_or(false, false); assign_3(true, false, false); assign_3(true, true, false); assign_3(false, false, true); assign_3(false, true, true); assign_3_bis(true, false, false); assign_3_bis(true, true, false); assign_3_bis(false, false, true); assign_3_bis(false, true, true); right_comb_tree(false, false, false, true, true); right_comb_tree(true, false, false, true, true); right_comb_tree(true, true, true, true, true); func_call(true, false); func_call(true, true); func_call(false, false); } ================================================ FILE: rustc-coverage-tests/src/nested_loops.rs ================================================ fn main() { let is_true = std::env::args().len() == 1; let mut countdown = 10; 'outer: while countdown > 0 { let mut a = 100; let mut b = 100; for _ in 0..50 { if a < 30 { break; } a -= 5; b -= 5; if b < 90 { a -= 10; if is_true { break 'outer; } else { a -= 2; } } } countdown -= 1; } } ================================================ FILE: rustc-coverage-tests/src/no-core.rs ================================================ #![feature(no_core)] #![no_core] //@ edition: 2021 // Test that coverage instrumentation works for `#![no_core]` crates. // For this test, we pull in std anyway, to avoid having to set up our own // no-core or no-std environment. What's important is that the compiler allows // coverage for a crate with the `#![no_core]` annotation. extern crate std; fn main() {} ================================================ FILE: rustc-coverage-tests/src/no_cov_crate.rs ================================================ #![feature(coverage_attribute)] // Enables `coverage(off)` on the entire crate //@ reference: attributes.coverage.intro //@ reference: attributes.coverage.nesting #[coverage(off)] fn do_not_add_coverage_1() { println!("called but not covered"); } fn do_not_add_coverage_2() { #![coverage(off)] println!("called but not covered"); } #[coverage(off)] #[allow(dead_code)] fn do_not_add_coverage_not_called() { println!("not called and not covered"); } fn add_coverage_1() { println!("called and covered"); } fn add_coverage_2() { println!("called and covered"); } #[allow(dead_code)] fn add_coverage_not_called() { println!("not called but covered"); } // FIXME: These test-cases illustrate confusing results of nested functions. // See https://github.com/rust-lang/rust/issues/93319 mod nested_fns { #[coverage(off)] pub fn outer_not_covered(is_true: bool) { fn inner(is_true: bool) { if is_true { println!("called and covered"); } else { println!("absolutely not covered"); } } println!("called but not covered"); inner(is_true); } pub fn outer(is_true: bool) { println!("called and covered"); inner_not_covered(is_true); #[coverage(off)] fn inner_not_covered(is_true: bool) { if is_true { println!("called but not covered"); } else { println!("absolutely not covered"); } } } pub fn outer_both_covered(is_true: bool) { println!("called and covered"); inner(is_true); fn inner(is_true: bool) { if is_true { println!("called and covered"); } else { println!("absolutely not covered"); } } } } fn main() { let is_true = std::env::args().len() == 1; do_not_add_coverage_1(); do_not_add_coverage_2(); add_coverage_1(); add_coverage_2(); nested_fns::outer_not_covered(is_true); nested_fns::outer(is_true); nested_fns::outer_both_covered(is_true); } ================================================ FILE: rustc-coverage-tests/src/no_spans.rs ================================================ #![feature(coverage_attribute)] //@ edition: 2021 // If the span extractor can't find any relevant spans for a function, the // refinement loop will terminate with nothing in its `prev` slot. If the // subsequent code tries to unwrap `prev`, it will panic. // // This scenario became more likely after #118525 started discarding spans that // can't be un-expanded back to within the function body. // // Regression test for "invalid attempt to unwrap a None some_prev", as seen // in issues such as #118643 and #118662. #[coverage(off)] fn main() { affected_function()(); } macro_rules! macro_that_defines_a_function { (fn $name:ident () $body:tt) => { fn $name () -> impl Fn() $body } } macro_that_defines_a_function! { fn affected_function() { || () } } ================================================ FILE: rustc-coverage-tests/src/no_spans_if_not.rs ================================================ //@ edition: 2021 // If the span extractor can't find any relevant spans for a function, // but the function contains coverage span-marker statements (e.g. inserted // for `if !`), coverage codegen may think that it is instrumented and // consequently complain that it has no spans. // // Regression test for , // "A used function should have had coverage mapping data but did not". fn main() { affected_function(); } macro_rules! macro_that_defines_a_function { (fn $name:ident () $body:tt) => { fn $name () $body } } macro_that_defines_a_function! { fn affected_function() { if !false { () } else { () } } } ================================================ FILE: rustc-coverage-tests/src/overflow.rs ================================================ #![allow(unused_assignments)] //@ compile-flags: -Coverflow-checks=yes //@ failure-status: 101 fn might_overflow(to_add: u32) -> u32 { if to_add > 5 { println!("this will probably overflow"); } let add_to = u32::MAX - 5; println!("does {} + {} overflow?", add_to, to_add); let result = to_add + add_to; println!("continuing after overflow check"); result } fn main() -> Result<(), u8> { let mut countdown = 10; while countdown > 0 { if countdown == 1 { let result = might_overflow(10); println!("Result: {}", result); } else if countdown < 5 { let result = might_overflow(1); println!("Result: {}", result); } countdown -= 1; } Ok(()) } // Notes: // 1. Compare this program and its coverage results to those of the very similar test `assert.rs`, // and similar tests `panic_unwind.rs`, abort.rs` and `try_error_result.rs`. // 2. This test confirms the coverage generated when a program passes or fails a // compiler-generated `TerminatorKind::Assert` (based on an overflow check, in this case). // 3. Similar to how the coverage instrumentation handles `TerminatorKind::Call`, // compiler-generated assertion failures are assumed to be a symptom of a program bug, not // expected behavior. To simplify the coverage graphs and keep instrumented programs as // small and fast as possible, `Assert` terminators are assumed to always succeed, and // therefore are considered "non-branching" terminators. So, an `Assert` terminator does not // get its own coverage counter. // 4. After an unhandled panic or failed Assert, coverage results may not always be intuitive. // In this test, the final count for the statements after the `if` block in `might_overflow()` // is 4, even though the lines after `to_add + add_to` were executed only 3 times. Depending // on the MIR graph and the structure of the code, this count could have been 3 (which might // have been valid for the overflowed add `+`, but should have been 4 for the lines before // the overflow. The reason for this potential uncertainty is, a `CounterKind` is incremented // via StatementKind::Counter at the end of the block, but (as in the case in this test), // a CounterKind::Expression is always evaluated. In this case, the expression was based on // a `Counter` incremented as part of the evaluation of the `if` expression, which was // executed, and counted, 4 times, before reaching the overflow add. // If the program did not overflow, the coverage for `might_overflow()` would look like this: // // 4| |fn might_overflow(to_add: u32) -> u32 { // 5| 4| if to_add > 5 { // 6| 0| println!("this will probably overflow"); // 7| 4| } // 8| 4| let add_to = u32::MAX - 5; // 9| 4| println!("does {} + {} overflow?", add_to, to_add); // 10| 4| let result = to_add + add_to; // 11| 4| println!("continuing after overflow check"); // 12| 4| result // 13| 4|} ================================================ FILE: rustc-coverage-tests/src/panic_unwind.rs ================================================ #![allow(unused_assignments)] //@ failure-status: 101 fn might_panic(should_panic: bool) { if should_panic { println!("panicking..."); panic!("panics"); } else { println!("Don't Panic"); } } fn main() -> Result<(), u8> { let mut countdown = 10; while countdown > 0 { if countdown == 1 { might_panic(true); } else if countdown < 5 { might_panic(false); } countdown -= 1; } Ok(()) } // Notes: // 1. Compare this program and its coverage results to those of the similar tests `abort.rs` and // `try_error_result.rs`. // 2. Since the `panic_unwind.rs` test is allowed to unwind, it is also allowed to execute the // normal program exit cleanup, including writing out the current values of the coverage // counters. ================================================ FILE: rustc-coverage-tests/src/partial_eq.rs ================================================ // This test confirms an earlier problem was resolved, supporting the MIR graph generated by the // structure of this test. #[derive(Clone, Debug, PartialEq, Eq, PartialOrd, Ord)] pub struct Version { major: usize, minor: usize, patch: usize, } impl Version { pub fn new(major: usize, minor: usize, patch: usize) -> Self { Self { major, minor, patch, } } } fn main() { let version_3_2_1 = Version::new(3, 2, 1); let version_3_3_0 = Version::new(3, 3, 0); println!( "{:?} < {:?} = {}", version_3_2_1, version_3_3_0, version_3_2_1 < version_3_3_0, // ); } /* This test verifies a bug was fixed that otherwise generated this error: thread 'rustc' panicked at 'No counters provided the source_hash for function: Instance { def: Item(WithOptConstParam { did: DefId(0:101 ~ autocfg[c44a]::version::{impl#2}::partial_cmp), const_param_did: None }), args: [] }' The `PartialOrd` derived by `Version` happened to generate a MIR that generated coverage without a code region associated with any `Counter`. Code regions were associated with at least one expression, which is allowed, but the `function_source_hash` was only passed to the codegen (coverage mapgen) phase from a `Counter`s code region. A new method was added to pass the `function_source_hash` without a code region, if necessary. */ ================================================ FILE: rustc-coverage-tests/src/simple_loop.rs ================================================ #![allow(unused_assignments)] #[rustfmt::skip] fn main() { // Initialize test constants in a way that cannot be determined at compile time, to ensure // rustc and LLVM cannot optimize out statements (or coverage counters) downstream from // dependent conditions. let is_true = std::env::args().len() == 1; let mut countdown = 0; if is_true { countdown = 10 ; } loop { if countdown == 0 { break ; } countdown -= 1 ; } } ================================================ FILE: rustc-coverage-tests/src/simple_match.rs ================================================ #![allow(unused_assignments, unused_variables)] #[rustfmt::skip] fn main() { // Initialize test constants in a way that cannot be determined at compile time, to ensure // rustc and LLVM cannot optimize out statements (or coverage counters) downstream from // dependent conditions. let is_true = std::env::args().len() == 1; let mut countdown = 1; if is_true { countdown = 0; } for _ in 0..2 { let z ; match countdown { x if x < 1 => { z = countdown ; let y = countdown ; countdown = 10 ; } _ => {} } } } ================================================ FILE: rustc-coverage-tests/src/sort_groups.rs ================================================ //@ edition: 2021 // Demonstrate that `sort_subviews.py` can sort instantiation groups into a // predictable order, while preserving their heterogeneous contents. fn main() { let cond = std::env::args().len() > 1; generic_fn::<()>(cond); generic_fn::<&'static str>(!cond); if std::hint::black_box(false) { generic_fn::(cond); } generic_fn::(cond); other_fn(); } fn generic_fn(cond: bool) { if cond { println!("{}", std::any::type_name::()); } } fn other_fn() {} ================================================ FILE: rustc-coverage-tests/src/test_harness.rs ================================================ // Verify that the entry point injected by the test harness doesn't cause // weird artifacts in the coverage report (e.g. issue #10749). //@ compile-flags: --test #[allow(dead_code)] fn unused() {} #[test] fn my_test() {} ================================================ FILE: rustc-coverage-tests/src/tight_inf_loop.rs ================================================ fn main() { if false { loop {} } } ================================================ FILE: rustc-coverage-tests/src/trivial.rs ================================================ //@ edition: 2021 fn main() {} ================================================ FILE: rustc-coverage-tests/src/try_error_result.rs ================================================ #![allow(unused_assignments)] #![cfg_attr(rustfmt, rustfmt::skip)] //@ failure-status: 1 fn call(return_error: bool) -> Result<(), ()> { if return_error { Err(()) } else { Ok(()) } } fn test1() -> Result<(), ()> { let mut countdown = 10 ; for _ in 0..10 { countdown -= 1 ; if countdown < 5 { call(/*return_error=*/ true)?; call(/*return_error=*/ false)?; } else { call(/*return_error=*/ false)?; } } Ok(()) } struct Thing1; impl Thing1 { fn get_thing_2(&self, return_error: bool) -> Result { if return_error { Err(()) } else { Ok(Thing2 {}) } } } struct Thing2; impl Thing2 { fn call(&self, return_error: bool) -> Result { if return_error { Err(()) } else { Ok(57) } } } fn test2() -> Result<(), ()> { let thing1 = Thing1{}; let mut countdown = 10 ; for _ in 0..10 { countdown -= 1 ; if countdown < 5 { thing1.get_thing_2(/*err=*/ false)?.call(/*err=*/ true).expect_err("call should fail"); thing1 . get_thing_2(/*return_error=*/ false) ? . call(/*return_error=*/ true) . expect_err( "call should fail" ); let val = thing1.get_thing_2(/*return_error=*/ true)?.call(/*return_error=*/ true)?; assert_eq!(val, 57); let val = thing1.get_thing_2(/*return_error=*/ true)?.call(/*return_error=*/ false)?; assert_eq!(val, 57); } else { let val = thing1.get_thing_2(/*return_error=*/ false)?.call(/*return_error=*/ false)?; assert_eq!(val, 57); let val = thing1 .get_thing_2(/*return_error=*/ false)? .call(/*return_error=*/ false)?; assert_eq!(val, 57); let val = thing1 .get_thing_2(/*return_error=*/ false) ? .call(/*return_error=*/ false) ? ; assert_eq!(val, 57); } } Ok(()) } fn main() -> Result<(), ()> { test1().expect_err("test1 should fail"); test2() ? ; Ok(()) } ================================================ FILE: rustc-coverage-tests/src/unicode.rs ================================================ //@ edition: 2021 //@ ignore-windows - we can't force `llvm-cov` to use ANSI escapes on Windows //@ llvm-cov-flags: --use-color // Check that column numbers are denoted in bytes, so that they don't cause // `llvm-cov` to fail or emit malformed output. // // Note that when `llvm-cov` prints ^ arrows on a subsequent line, it simply // inserts one space character for each "column", with no understanding of // Unicode or character widths. So those arrows will tend to be misaligned // for non-ASCII source code, regardless of whether column numbers are code // points or bytes. fn main() { for _İ in 'А'..='Я' { /* Я */ } if 申し訳ございません() && 申し訳ございません() { println!("true"); } サビ(); } fn 申し訳ございません() -> bool { std::hint::black_box(false) } macro_rules! macro_that_defines_a_function { (fn $名:ident () $体:tt) => { fn $名 () $体 fn 他 () {} } } macro_that_defines_a_function! { fn サビ() {} } ================================================ FILE: rustc-coverage-tests/src/unreachable.rs ================================================ #![feature(core_intrinsics, coverage_attribute)] //@ edition: 2021 // // If we instrument a function for coverage, but all of its counter-increment // statements are removed by MIR optimizations, LLVM will think it isn't // instrumented and it will disappear from coverage maps and coverage reports. // Most MIR opts won't cause this because they tend not to remove statements // from bb0, but `UnreachablePropagation` can do so if it sees that bb0 ends // with `TerminatorKind::Unreachable`. use std::hint::{black_box, unreachable_unchecked}; static UNREACHABLE_CLOSURE: fn() = || unsafe { unreachable_unchecked() }; fn unreachable_function() { unsafe { unreachable_unchecked() } } // Use an intrinsic to more reliably trigger unreachable-propagation. fn unreachable_intrinsic() { unsafe { std::intrinsics::unreachable() } } #[coverage(off)] fn main() { if black_box(false) { UNREACHABLE_CLOSURE(); } if black_box(false) { unreachable_function(); } if black_box(false) { unreachable_intrinsic(); } } ================================================ FILE: rustc-coverage-tests/src/unused.rs ================================================ #![allow(dead_code, unused_assignments, unused_must_use, unused_variables)] fn foo(x: T) { let mut i = 0; while i < 10 { i != 0 || i != 0; i += 1; } } fn unused_template_func(x: T) { let mut i = 0; while i < 10 { i != 0 || i != 0; i += 1; } } fn unused_func(mut a: u32) { if a != 0 { a += 1; } } fn unused_func2(mut a: u32) { if a != 0 { a += 1; } } fn unused_func3(mut a: u32) { if a != 0 { a += 1; } } fn main() -> Result<(), u8> { foo::(0); foo::(0.0); Ok(()) } ================================================ FILE: rustc-coverage-tests/src/unused_mod.rs ================================================ #[path = "auxiliary/unused_mod_helper.rs"] mod unused_module; fn main() { println!("hello world!"); } ================================================ FILE: rustc-coverage-tests/src/uses_crate.rs ================================================ // This test was failing on Linux for a while due to #110393 somehow making // the unused functions not instrumented, but it seems to be fine now. // Validates coverage now works with optimizations //@ compile-flags: -C opt-level=3 #![allow(unused_assignments, unused_variables)] //@ aux-build:used_crate.rs extern crate used_crate; fn main() { used_crate::used_function(); let some_vec = vec![1, 2, 3, 4]; used_crate::used_only_from_bin_crate_generic_function(&some_vec); used_crate::used_only_from_bin_crate_generic_function("used from bin uses_crate.rs"); used_crate::used_from_bin_crate_and_lib_crate_generic_function(some_vec); used_crate::used_with_same_type_from_bin_crate_and_lib_crate_generic_function("interesting?"); } ================================================ FILE: rustc-coverage-tests/src/uses_inline_crate.rs ================================================ // This test was failing on Linux for a while due to #110393 somehow making // the unused functions not instrumented, but it seems to be fine now. // Validates coverage now works with optimizations //@ compile-flags: -C opt-level=3 #![allow(unused_assignments, unused_variables)] //@ aux-build:used_inline_crate.rs extern crate used_inline_crate; fn main() { used_inline_crate::used_function(); used_inline_crate::used_inline_function(); let some_vec = vec![1, 2, 3, 4]; used_inline_crate::used_only_from_bin_crate_generic_function(&some_vec); used_inline_crate::used_only_from_bin_crate_generic_function("used from bin uses_crate.rs"); used_inline_crate::used_from_bin_crate_and_lib_crate_generic_function(some_vec); used_inline_crate::used_with_same_type_from_bin_crate_and_lib_crate_generic_function( "interesting?", ); } ================================================ FILE: rustc-coverage-tests/src/while.rs ================================================ fn main() { let num = 9; while num >= 10 { // loop body } } ================================================ FILE: rustc-coverage-tests/src/while_early_ret.rs ================================================ #![allow(unused_assignments)] //@ failure-status: 1 #[rustfmt::skip] fn main() -> Result<(), u8> { let mut countdown = 10; while countdown > 0 { if countdown < 5 { return if countdown > 8 { Ok(()) } else { Err(1) } ; } countdown -= 1 ; } Ok(()) } // ISSUE(77553): Originally, this test had `Err(1)` on line 22 (instead of `Ok(())`) and // `std::process::exit(2)` on line 26 (instead of `Err(1)`); and this worked as expected on Linux // and MacOS. But on Windows (MSVC, at least), the call to `std::process::exit()` exits the program // without saving the InstrProf coverage counters. The use of `std::process:exit()` is not critical // to the coverage test for early returns, but this is a limitation that should be fixed. ================================================ FILE: rustc-coverage-tests/src/yield.rs ================================================ #![feature(coroutines, coroutine_trait, stmt_expr_attributes)] #![allow(unused_assignments)] use std::ops::{Coroutine, CoroutineState}; use std::pin::Pin; fn main() { let mut coroutine = #[coroutine] || { yield 1; return "foo"; }; match Pin::new(&mut coroutine).resume(()) { CoroutineState::Yielded(1) => {} _ => panic!("unexpected value from resume"), } match Pin::new(&mut coroutine).resume(()) { CoroutineState::Complete("foo") => {} _ => panic!("unexpected value from resume"), } let mut coroutine = #[coroutine] || { yield 1; yield 2; yield 3; return "foo"; }; match Pin::new(&mut coroutine).resume(()) { CoroutineState::Yielded(1) => {} _ => panic!("unexpected value from resume"), } match Pin::new(&mut coroutine).resume(()) { CoroutineState::Yielded(2) => {} _ => panic!("unexpected value from resume"), } } ================================================ FILE: rustc-coverage-tests/test_config.yaml ================================================ tests: attr::impl_: - json - coq - fstar - fstar-lax - lean - lean-tc attr::module: - json - coq - lean - lean-tc - fstar - fstar-lax attr::off_on_sandwich: - json - coq - lean - lean-tc - fstar - fstar-lax attr::trait_impl_inherit: - json - lean auxiliary::discard_all_helper: - json - coq - lean - lean-tc - fstar - fstar-lax auxiliary::used_crate: - json - coq - fstar - lean auxiliary::used_inline_crate: - json - coq - fstar - lean condition::conditions: - json - coq - fstar - fstar-lax - lean mcdc::condition_limit: - json - lean mcdc::if_: - json - coq - fstar - fstar-lax - lean mcdc::inlined_expressions: - json - coq - fstar - fstar-lax - lean - lean-tc mcdc::nested_if: - json - coq - fstar - fstar-lax mcdc::non_control_flow: - json - coq - fstar - fstar-lax - lean abort: - json - lean - fstar - fstar-lax assert: - json - lean - fstar - fstar-lax assert_ne: - json - coq - fstar - fstar-lax assert_not: - json - coq - lean - fstar - fstar-lax closure_macro: - json - coq - fstar - lean closure_unit_return: - json - coq - lean - fstar - fstar-lax color: - json - lean - coq - fstar - fstar-lax conditions: - json - coq - fstar - lean continue_: - json - fstar dead_code: - json - coq - fstar - lean drop_trait: - json - coq - fstar - fstar-lax - lean fn_sig_into_try: - json - coq - lean - lean-tc - fstar - fstar-lax generics: - json - coq - fstar if_: - json - coq - fstar - lean if_else: - json - coq - fstar - lean if_not: - json - lean - coq - fstar - fstar-lax ignore_map: - json - coq - lean - lean-tc - fstar - fstar-lax ignore_run: - json - coq - lean - lean-tc - fstar - fstar-lax inline_dead: - json - coq - fstar inline: - json - lean - coq - fstar inner_items: - json - lean issue_83601: - json - coq - fstar - fstar-lax - lean lazy_boolean: - json - coq - fstar - lean let_else_loop: - json long_and_wide: - json - lean - lean-tc - fstar - fstar-lax - coq loop_break: - json - lean loop_break_value: - json - lean loops_branches: - json - fstar - lean macro_in_closure: - json - lean - fstar - fstar-lax - coq match_or_pattern: - json - fstar - coq nested_loops: - json - fstar - lean no_cov_crate: - json - lean - fstar - coq no_spans: - json - lean - fstar - coq no_spans_if_not: - json - lean - fstar - fstar-lax - coq overflow: - json - fstar - fstar-lax - lean panic_unwind: - json - lean - fstar - fstar-lax partial_eq: - json - fstar - coq - fstar-lax - lean simple_loop: - json - lean simple_match: - json - lean sort_groups: - json - lean - fstar - coq test_harness: - json - lean - lean-tc - fstar - fstar-lax - coq tight_inf_loop: - json - lean trivial: - json - lean - lean-tc - fstar - fstar-lax - coq try_error_result: - json - lean - fstar - coq unicode: - json - coq - fstar unused: - json - fstar unused_mod: - json - lean - fstar - fstar-lax - coq while_: - json - lean - fstar - fstar-lax while_early_ret: - json - fstar - fstar-lax ================================================ FILE: rustc-coverage-tests/update-test-sources.sh ================================================ #!/bin/bash # Get the necessary part of the rust repo git clone --depth 1 --filter=blob:none --no-checkout https://github.com/rust-lang/rust.git cd rust git sparse-checkout init --cone git checkout master git sparse-checkout set tests/coverage # Copy the rust files cd .. find rust/tests/coverage -type f -name "*.rs" -exec bash -c ' for file; do dest="src/$(dirname "$file" | sed "s|rust/tests/coverage||")" mkdir -p "$dest" cp -f "$file" "$dest" done ' bash {} + # Cleanup cargo fmt rm -rf rust ================================================ FILE: rustfmt.toml ================================================ style_edition = "2024" edition = "2024" ================================================ FILE: setup.sh ================================================ #!/usr/bin/env bash set -eu SCRIPTPATH="$( cd -- "$(dirname "$0")" >/dev/null 2>&1 ; pwd -P )" opam_jobs=4 CLEANUP_WORKSPACE=on # Parse command line arguments. all_args=("$@") while [ $# -gt 0 ]; do case "$1" in -j) opam_jobs=$2 shift ;; --no-cleanup) CLEANUP_WORKSPACE=off ;; --help) echo "hax setup script" echo "" echo "Usage: $0 [OPTIONS]" echo "" echo "Options:" echo ' -j The number of opam jobs to run in parallel' echo ' --no-cleanup Disables the default behavior that runs `cargo clean` and `opam clean`' exit ;; esac shift done # Cleanup the cargo and dune workspace, to make sure we are in a clean # state cleanup_workspace() { cargo clean ( cd engine opam clean ) } # Warns if we're building in a dirty checkout of hax: while hacking on # hax, we should really be using `just build`. warn_if_dirty() { ( cd "$SCRIPTPATH" if ! git diff-index --quiet HEAD -- >& /dev/null; then printf '\e[33mWarning: This is a dirty checkout of hax!\n If you are hacking on hax, please use the \e[1m`./.utils/rebuild.sh`\e[0m\e[33m script.\e[0m\n\n' fi ) } # Ensures a given binary is available in PATH ensure_binary_available() { command -v "$1" >/dev/null 2>&1 || { printf '\e[31mError: binary \e[1m%s\e[0m\e[31m was not found.\e[0m\n' "$1" printf '\e[37m(Did you look at \e[1mManual installation\e[0m\e[37m in \e[1mREADME.md\e[0m\e[37m?)\e[0m.\n' exit 1 } } NODE_VERSION_MIN_MAJOR=17 ensure_node_is_recent_enough() { function strip_first_char () { cut -c2- } function get_major () { cut -d'.' -f1 } VERSION=$(node --version) MAJOR=$(echo "$VERSION" | strip_first_char | get_major) if [[ "$MAJOR" -lt "$NODE_VERSION_MIN_MAJOR" ]]; then printf '\e[31mError: \e[1m%s\e[0m\e[31m appears to be too old.\e[0m\n' "NodeJS" printf '\e[37m(the minimal version required is \e[1m%s\e[0m\e[37m, yours is \e[1m%s\e[0m\e[37m)\e[0m.\n' "v${NODE_VERSION_MIN_MAJOR}.*.*" "$VERSION" exit 1 fi } # Installs the Rust CLI & frontend, providing `cargo-hax` and `driver-hax` install_rust_binaries() { for i in driver subcommands ../engine/names/extract ../rust-engine; do ( set -x cargo install --locked --force --path "cli/$i" ) done } # Provides the `hax-engine` binary install_ocaml_engine() { # Fixes out of memory issues (https://github.com/hacspec/hax/issues/197) { # Limit the number of thread spawned by opam export OPAMJOBS=$opam_jobs # Make the garbadge collector of OCaml more agressive (see # https://discuss.ocaml.org/t/how-to-limit-the-amount-of-memory-the-ocaml-compiler-is-allowed-to-use/797) export OCAMLRUNPARAM="o=20" } # Make opam show logs when an error occurs export OPAMERRLOGLEN=0 # Make opam ignore system dependencies (it doesn't handle properly certain situations) export OPAMASSUMEDEPEXTS=1 ( set -x opam uninstall hax-engine || true # Lift the soft stack limit for ocamlopt: large preprocessed # files (e.g. `lib/types.pp.ml`) overflow the default stack on # recent GitHub Actions runner images. macOS rejects # `unlimited`, so try `hard` first. ulimit -s hard 2>/dev/null || ulimit -s unlimited 2>/dev/null || true opam install --yes ./engine ) } warn_if_dirty for binary in opam node rustup jq; do ensure_binary_available $binary done ensure_node_is_recent_enough # Make sure the correct rust toolchain is installed rustup show active-toolchain || rustup toolchain install if [ "$CLEANUP_WORKSPACE" = "on" ]; then cleanup_workspace fi install_rust_binaries install_ocaml_engine ================================================ FILE: test-harness/.gitignore ================================================ *.snap.new ================================================ FILE: test-harness/Cargo.toml ================================================ [package] name = "hax-test-harness" version.workspace = true authors.workspace = true license.workspace = true homepage.workspace = true edition.workspace = true repository.workspace = true readme.workspace = true [[test]] name = "toolchain" path = "src/harness.rs" harness = false test = false [dev-dependencies] libtest-mimic = "0.6" cargo_metadata.workspace = true enum-iterator = "1.4" serde_json = "1.0" lazy_static = "1.4" assert_cmd = "2.0" insta = {version = "1.29.0", features = ["filters", "toml"]} serde = { version = "1.0", features = ["derive"] } regex = "1" hax-types.workspace = true [package.metadata.release] release = false ================================================ FILE: test-harness/README.md ================================================ # Tests the whole toolchain This crate defines a custom test harness[^1][^2] that scans for packages in the Cargo workspace `../tests/Cargo.toml`. Each package in that workspace should define a sequence of tests to be run in the `package.metadata.hax-tests` dictionary of its `Cargo.toml` manifest. Note this cargo test is disabled by default, since it requires both the Cargo and Dune package to be built. To run this test, please use the command `cargo test --test toolchain`. ## Format for `package.metadata.hax-tests` `package.metadata.hax-tests` is a map from a target (e.g. `into fstar` or `lint hacspec`) to a **test specification** (see below). `package.metadata.hax-tests` is expected to be a **dictionary** with the following optional fields: - `lint`, a map from a **linter name** to a **test specification**. - `into`, a map from a **backend name** to a **test specification**. Note that instead of linter or backend names, conjunction are allowed, for instance `fstar+coq`. ### Test specifications A **test specification** is a dictionary with the following fields: - positive: bool ⟨true⟩: is the test positive (the exit code of the `cargo hax` command is `0`) or negative (the exit code is non-null)? - snapshots: should we enforce the stability of the output of the `cargo hax` command? - snapshots.stdout: bool ⟨true⟩ - snapshots.stderr: bool ⟨true⟩ **Note:** this field can also be set to the following strings: `stdout`, `stderr`, `both` or `none`. - optional: bool ⟨false⟩: is the test optional? (useful for slow tests for instance) - broken: bool ⟨false⟩: is this test broken because of some feature not being implemented? - issue_id: u64 ⟨null⟩: when the test has a companion issue on GitHub (closed or not) ### Linter names The available linters can be listed by running `cargo hax lint --help`. ### Backend names The available backends can be listed by running `cargo hax into --help`. ## The `insta` tool and library Those tests are written using the [`insta` library](https://insta.rs/). This allows us to enforce the stability of `stdout` and `stderr` for negative tests. In the future, we will also ensure the files produced by the different backends remains the same in positive tests of extraction. When some `stderr` changes, one can review (by interactively accepting or rejecting changes) changes using the [`cargo-insta` subcommand](https://insta.rs/docs/cli/). [^1]: https://doc.rust-lang.org/cargo/reference/cargo-targets.html#the-harness-field [^2]: https://nexte.st/book/custom-test-harnesses.html ## Miscelaneous - If the environment variable `DUNEJOBS` is set, it will set the `-j` flag when `dune build`ing, controlling the maximum number of jobs `dune build` will run in parallel. ================================================ FILE: test-harness/src/command_hax_ext.rs ================================================ use lazy_static::{__Deref, lazy_static}; use std::{ffi::OsStr, process::Command}; pub trait CommandHaxExt { fn hax, S: AsRef>(args: I) -> Self; } /// Computes a list of arguments that setup the number of parallel /// jobs for dune accordingly to environment variable `DUNEJOBS`. fn dune_jobs_args() -> Vec { if let Ok(jobs) = std::env::var("DUNEJOBS") { vec!["-j".into(), jobs] } else { vec![] } } impl CommandHaxExt for Command { fn hax, S: AsRef>(args: I) -> Command { use assert_cmd::cargo::cargo_bin; use std::path::PathBuf; struct Paths { engine: PathBuf, rust_engine: PathBuf, cargo_hax: PathBuf, } const CARGO_HAX: &str = "cargo-hax"; const HAX_RUST_ENGINE: &str = "hax-rust-engine"; lazy_static! { static ref PATHS: Option = { if let "yes" | "y" | "true" | "1" = std::env::var("CARGO_TESTS_ASSUME_BUILT").unwrap_or("".into()).to_lowercase().as_str() { return None; } let root = std::env::current_dir().unwrap(); let root = root.parent().unwrap(); let engine_dir = root.join("engine"); // Make sure binaries are built. Note this doesn't // include `hax-engine-names-extract`: its build // script requires the driver and CLI of `hax` to be // available. assert!(Command::new("cargo") .args(&["build", "--bins"]) .current_dir(&root) .status() .unwrap() .success()); let cargo_hax = cargo_bin(CARGO_HAX); // Now the driver & CLI are installed, call `cargo // build` injecting their paths assert!(Command::new("cargo") .args(&["build", "--workspace", "--bin", "hax-engine-names-extract"]) .env("HAX_CARGO_COMMAND_PATH", &cargo_hax) .current_dir(&root) .status() .unwrap() .success()); assert!(Command::new("cargo") .args(&["build"]) .current_dir(&root.join("rust-engine")) .status() .unwrap() .success()); assert!(Command::new("dune") .args(&["build"]) .args(dune_jobs_args()) .env("HAX_JSON_SCHEMA_EXPORTER_BINARY", cargo_bin("hax-export-json-schemas")) .env("HAX_ENGINE_NAMES_EXTRACT_BINARY", cargo_bin("hax-engine-names-extract")) .current_dir(engine_dir.clone()) .status() .unwrap() .success()); let rust_engine = cargo_bin(HAX_RUST_ENGINE); Some(Paths { cargo_hax, rust_engine, engine: engine_dir.join("_build/install/default/bin/hax-engine"), }) }; } let mut cmd = match PATHS.deref() { Some(paths) => { let mut cmd = Command::new(paths.cargo_hax.clone()); cmd.env("HAX_ENGINE_BINARY", paths.engine.clone()); cmd.env("HAX_RUST_ENGINE_BINARY", paths.rust_engine.clone()); cmd } None => Command::new(CARGO_HAX), }; cmd.args(args); // As documented in // https://doc.rust-lang.org/cargo/reference/environment-variables.html#dynamic-library-paths, // [cargo run] (and thus also [cargo test]) sets dynamic // library paths, which causes some issues with dependencies // when compiling without rustup for env in ["DYLD_FALLBACK_LIBRARY_PATH", "LD_LIBRARY_PATH"] { cmd.env_remove(env); } cmd } } ================================================ FILE: test-harness/src/harness.rs ================================================ #![feature(rustc_private)] mod command_hax_ext; use command_hax_ext::*; use serde_json::{Map, Value}; use std::process::{Command, Stdio}; #[derive(Clone, Debug, serde::Serialize)] pub enum TestKind { Translate { backend: String }, } impl TestKind { fn as_name(&self) -> String { (match self { TestKind::Translate { backend } => ["into".to_string(), backend.clone()], }) .join("-") } } #[allow(dead_code)] fn bool_true() -> bool { true } #[derive(Clone, Debug, serde::Serialize)] pub struct TestSnapshot { #[serde(default = "bool_true")] pub stderr: bool, #[serde(default = "bool_true")] pub stdout: bool, } #[derive(Clone, Debug, serde::Serialize)] pub struct TestSpec { /// is the test optional? (useful for slow tests for instance) pub optional: bool, /// a broken test a test that should succeed (or fail) but does /// not dues to a bug to be fixed (see field [issue_id] below) pub broken: bool, /// Github issue ID pub issue_id: Option, /// Is that a positive or a negative test? pub positive: bool, pub snapshot: TestSnapshot, pub include_flag: Option, pub backend_options: Option>, } impl From for TestSpec { /// Parse a JSON value into a TestSpec fn from(o: Value) -> Self { fn as_opt_bool(v: &Value, def: bool) -> Option { if v.is_null() { return Some(def); } v.as_bool() } fn as_bool(o: &Value, k: &str, def: bool) -> bool { let v = &o[k]; as_opt_bool(v, def) .expect(format!("[{}] was expected to be a boolean, got {}", k, v).as_str()) } let snapshot = &o["snapshot"]; TestSpec { optional: as_bool(&o, "optional", false), broken: as_bool(&o, "broken", false), positive: as_bool(&o, "positive", true), issue_id: o["positive"].as_u64(), include_flag: o["include-flag"].as_str().map(|s| s.into()), backend_options: serde_json::from_value(o["backend-options"].clone()).unwrap(), snapshot: as_opt_bool(snapshot, true) .map(|b| TestSnapshot { stderr: b, stdout: b, }) .or_else(|| match snapshot.as_str() { Some(v @ ("stdout" | "stderr" | "both" | "none")) => Some(TestSnapshot { stdout: matches!(v, "stdout" | "both"), stderr: matches!(v, "stderr" | "both"), }), Some(v) => panic!( "[snapshot] is \"{}\" but was expected to be \"stderr\", \"stdout\" or \"both\"", v ), None => None, }) .unwrap_or_else(|| TestSnapshot { stderr: as_bool(&snapshot, "stderr", true), stdout: as_bool(&snapshot, "stdout", true), }), } } } /// The information for a test is given by `cargo metadata` #[derive(Clone, Debug, serde::Serialize)] pub struct TestInfo { pub name: String, pub manifest: std::path::PathBuf, pub description: Option, } #[derive(Clone, Debug, serde::Serialize)] pub struct Test { pub kind: TestKind, pub info: TestInfo, pub spec: TestSpec, } impl Test { fn as_args(&self) -> Vec { match &self.kind { TestKind::Translate { backend } => { let mut args = vec![]; args.push("into".to_string()); if let Some(i) = self.spec.include_flag.as_ref() { args.push("-i".to_string()); args.push(i.to_string()); } args.push("--dry-run".to_string()); args.push(backend.clone()); if let Some(backend_options) = &self.spec.backend_options { args.extend_from_slice(backend_options.clone().as_slice()); } args } } } } impl std::fmt::Display for Test { fn fmt(&self, f: &mut std::fmt::Formatter) -> std::fmt::Result { write!(f, "{} - {:?}", self.info.name, self.kind)?; if let Some(issue_id) = self.spec.issue_id { write!(f, " #{}", issue_id)?; }; Ok(()) } } impl Test { fn into_runner(self, workspace: String) -> Result<(), libtest_mimic::Failed> { // 1. cook a command let mut cmd = Command::hax(&["-C"]); cmd.arg("--manifest-path").arg(self.info.manifest.clone()); cmd.arg(";"); cmd.stdout(Stdio::piped()).stderr(Stdio::piped()); cmd.args(self.as_args()); // 2. execute it (twice, idea of @franziskuskiefer, so that // the messages related to building dependencies are not // included in the second one) let _ = cmd.output().unwrap(); let out = cmd.output().unwrap(); let command_successful = out.status.success(); let cleanup = |s: String| { use lazy_static::lazy_static; use regex::Regex; lazy_static! { // Regex [TIME] matches compile times static ref TIME: Regex = Regex::new(r"\bin \d+(\.\d+)?s\b").unwrap(); static ref LOCK: Regex = Regex::new(r"Blocking waiting for \w+ lock on (the registry index|build directory|package cache)").unwrap(); } TIME.replace_all( LOCK.replace_all( &s.replace(r"\", "/").replace(&workspace, "WORKSPACE_ROOT"), "", ) .as_ref(), "in XXs", ) .trim() .to_string() }; let serr = cleanup(String::from_utf8_lossy(&out.stderr).to_string()); let sout = String::from_utf8_lossy(&out.stdout).to_string(); // 3. make sure the test is successful let mut snapshot: Map = Map::new(); if self.spec.snapshot.stderr { snapshot.insert("stderr".to_string(), Value::String(serr.clone())); } if self.spec.snapshot.stdout { snapshot.insert( "stdout".to_string(), serde_json::from_str(&sout) .map(|out: hax_types::engine_api::Output| { use serde_json::json; json!({ "diagnostics": Value::Array(out.diagnostics.into_iter().map(|diag| json!({ "spans": Value::Array(diag.span.clone().into_iter().map(|span| Value::String(format!("{:?}", span))).collect()), "message": Value::String(format!("{}", diag)), })).collect()), "files": Value::Object(out.files.into_iter().map(|file| (file.path, Value::String(file.contents))).collect()) }) }) .unwrap_or_else(|_| Value::String(cleanup(sout.clone()))), ); } if !snapshot.is_empty() { let exit = out.status.code().unwrap_or(std::i32::MAX); snapshot.insert("exit".to_string(), exit.into()); let snapshot = Value::Object(snapshot); let name = format!("{} {}", self.info.name, self.kind.as_name()); let mut info = self.clone(); info.info.manifest = info.info.manifest.strip_prefix(workspace).unwrap().into(); insta::with_settings!({ info => &info, }, { insta::assert_toml_snapshot!(name, snapshot) }) } let err = |s: &str| { Err(format!( "Command {s}.\nThe command was: {:?}{}", cmd, if command_successful { "".to_string() } else { format!("\nSTDOUT:\n{}\nSTDERR:\n{}", sout, serr) } )) }; match (command_successful, (self.spec.positive, self.spec.broken)) { (true, (true, false) | (false, true)) => Ok(()), (false, (false, false) | (true, true)) => Ok(()), (false, (false, true)) => err("failed, but this is a negative test marked broken")?, (false, (true, false)) => err("failed")?, (true, (true, true)) => err("succeeded, but this is a positive test marked broken")?, (true, (false, false)) => err("succeeded, but this is a negative test")?, } } fn into_trial(&self, workspace: &String) -> libtest_mimic::Trial { libtest_mimic::Trial::test(format!("{}", &self), { let test = self.clone(); let workspace = workspace.clone(); move || test.clone().into_runner(workspace) }) .with_kind(if self.spec.positive { "positive" } else { "negative" }) .with_ignored_flag(self.spec.optional) } } /// Given [metadata] the table declared in a test's [Cargo.toml] /// [workspace.hax-tests], this function returns a list of tests fn parse_hax_tests_metadata(info: TestInfo, metadata: &Value) -> Vec { if metadata.is_null() { return vec![]; } metadata .as_object() .expect( format!( "Expected value at key [hax-tests] to be a dictionary for package {:#?}", info ) .as_str(), ) .into_iter() .flat_map(|(a, o)| { o.as_object() .expect( format!( "Expected value at key [{}] be a dictionary for package {:#?}", a, info ) .as_str(), ) .into_iter() .flat_map(|(key, o)| key.split("+").map(|k| (k.trim().to_string(), o.clone()))) .map(|(b, o)| (a.clone(), b, o)) }) .map(|(a, b, o)| Test { spec: o.into(), info: info.clone(), kind: match a.as_str() { "into" => TestKind::Translate { backend: b }, _ => panic!( "unexpected metadata [hax-tests.{}.{}] for package {:#?}", a, b, info ), }, }) .collect() } fn main() { let metadata = cargo_metadata::MetadataCommand::new() .manifest_path("../tests/Cargo.toml") .exec() .unwrap(); let workspace_root: String = metadata.workspace_root.into(); let mut args = libtest_mimic::Arguments::from_args(); args.test_threads = Some(1); libtest_mimic::run( &args, metadata .packages .into_iter() .flat_map(|o| { parse_hax_tests_metadata( TestInfo { name: o.name, description: o.description, manifest: o.manifest_path.into(), }, &o.metadata["hax-tests"], ) }) .map(|test| test.into_trial(&workspace_root)) .collect(), ) .exit(); } ================================================ FILE: test-harness/src/lib.rs ================================================ ================================================ FILE: test-harness/src/snapshots/toolchain__assert into-coq.snap ================================================ --- source: test-harness/src/harness.rs expression: snapshot info: kind: Translate: backend: coq info: name: assert manifest: assert/Cargo.toml description: ~ spec: optional: false broken: false issue_id: ~ positive: true snapshot: stderr: false stdout: true include_flag: ~ backend_options: ~ --- exit = 0 [stdout] diagnostics = [] [stdout.files] "Assert.v" = ''' (* File automatically generated by Hacspec *) From Coq Require Import ZArith. Require Import List. Import List.ListNotations. Open Scope Z_scope. Open Scope bool_scope. Require Import Ascii. Require Import String. Require Import Coq.Floats.Floats. From RecordUpdate Require Import RecordSet. Import RecordSetNotations. From Core Require Import Core. (* NotImplementedYet *) Definition asserts '(_ : unit) : unit := let _ := assert ((true : bool)) in let _ := assert (f_eq ((1 : t_i32)) ((1 : t_i32))) in let _ := match ((2 : t_i32),(2 : t_i32)) with | (left_val,right_val) => assert (f_eq (left_val) (right_val)) end in let _ := match ((1 : t_i32),(2 : t_i32)) with | (left_val,right_val) => assert (f_not (f_eq (left_val) (right_val))) end in tt. ''' _CoqProject = ''' -R ./ TODO -arg -w -arg all Assert.v''' ================================================ FILE: test-harness/src/snapshots/toolchain__assert into-fstar.snap ================================================ --- source: test-harness/src/harness.rs expression: snapshot info: kind: Translate: backend: fstar info: name: assert manifest: assert/Cargo.toml description: ~ spec: optional: false broken: false issue_id: ~ positive: true snapshot: stderr: false stdout: true include_flag: ~ backend_options: ~ --- exit = 0 [stdout] diagnostics = [] [stdout.files] "Assert.fst" = ''' module Assert #set-options "--fuel 0 --ifuel 1 --z3rlimit 15" open FStar.Mul open Core_models let asserts (_: Prims.unit) : Prims.unit = let _:Prims.unit = Hax_lib.v_assert true in let _:Prims.unit = Hax_lib.v_assert (mk_i32 1 =. mk_i32 1 <: bool) in let _:Prims.unit = match mk_i32 2, mk_i32 2 <: (i32 & i32) with | left_val, right_val -> Hax_lib.v_assert (left_val =. right_val <: bool) in let _:Prims.unit = match mk_i32 1, mk_i32 2 <: (i32 & i32) with | left_val, right_val -> Hax_lib.v_assert (~.(left_val =. right_val <: bool) <: bool) in () ''' ================================================ FILE: test-harness/src/snapshots/toolchain__assert into-ssprove.snap ================================================ --- source: test-harness/src/harness.rs expression: snapshot info: kind: Translate: backend: ssprove info: name: assert manifest: assert/Cargo.toml description: ~ spec: optional: false broken: false issue_id: ~ positive: true snapshot: stderr: false stdout: true include_flag: ~ backend_options: ~ --- exit = 0 [stdout] diagnostics = [] [stdout.files] "Assert.v" = ''' (* File automatically generated by Hacspec *) Set Warnings "-notation-overridden,-ambiguous-paths". From Crypt Require Import choice_type Package Prelude. Import PackageNotation. From extructures Require Import ord fset. From mathcomp Require Import word_ssrZ word. (* From Jasmin Require Import word. *) From Coq Require Import ZArith. From Coq Require Import Strings.String. Import List.ListNotations. Open Scope list_scope. Open Scope Z_scope. Open Scope bool_scope. From Hacspec Require Import ChoiceEquality. From Hacspec Require Import LocationUtility. From Hacspec Require Import Hacspec_Lib_Comparable. From Hacspec Require Import Hacspec_Lib_Pre. From Hacspec Require Import Hacspec_Lib. Open Scope hacspec_scope. Import choice.Choice.Exports. From RecordUpdate Require Import RecordUpdate. Import RecordSetNotations. Obligation Tactic := (* try timeout 8 *) solve_ssprove_obligations. (*Not implemented yet? todo(item)*) Equations asserts (_ : both 'unit) : both 'unit := asserts _ := letb _ := assert (ret_both (true : 'bool)) in letb _ := assert ((ret_both (1 : int32)) =.? (ret_both (1 : int32))) in letb _ := matchb prod_b (ret_both (2 : int32),ret_both (2 : int32)) with | '(left_val,right_val) => assert (left_val =.? right_val) end in letb _ := matchb prod_b (ret_both (1 : int32),ret_both (2 : int32)) with | '(left_val,right_val) => assert (f_not (left_val =.? right_val)) end in ret_both (tt : 'unit) : both 'unit. Fail Next Obligation. ''' ================================================ FILE: test-harness/src/snapshots/toolchain__attribute-opaque into-fstar.snap ================================================ --- source: test-harness/src/harness.rs expression: snapshot info: kind: Translate: backend: fstar info: name: attribute-opaque manifest: attribute-opaque/Cargo.toml description: ~ spec: optional: false broken: false issue_id: ~ positive: true snapshot: stderr: true stdout: true include_flag: ~ backend_options: - "--interfaces" - +** --- exit = 0 stderr = 'Finished `dev` profile [unoptimized + debuginfo] target(s) in XXs' [stdout] diagnostics = [] [stdout.files] "Attribute_opaque.fst" = ''' module Attribute_opaque #set-options "--fuel 0 --ifuel 1 --z3rlimit 15" open FStar.Mul open Core_models assume val t_OpaqueStruct': v_X: usize -> v_T: Type0 -> v_U: Type0 -> eqtype let t_OpaqueStruct (v_X: usize) (v_T v_U: Type0) = t_OpaqueStruct' v_X v_T v_U assume val t_OpaqueEnum': v_X: usize -> v_T: Type0 -> v_U: Type0 -> eqtype let t_OpaqueEnum (v_X: usize) (v_T v_U: Type0) = t_OpaqueEnum' v_X v_T v_U assume val ff_generic': v_X: usize -> #v_T: Type0 -> #v_U: Type0 -> x: v_U -> Prims.Pure (t_OpaqueEnum v_X v_T v_U) Prims.l_True (fun _ -> Prims.l_True) let ff_generic (v_X: usize) (#v_T #v_U: Type0) = ff_generic' v_X #v_T #v_U assume val f': x: bool -> y: bool -> Prims.Pure bool Prims.l_True (fun _ -> Prims.l_True) let f = f' assume val ff_pre_post': x: bool -> y: bool -> Prims.Pure bool (requires x) (ensures fun result -> let result:bool = result in result =. y) let ff_pre_post = ff_pre_post' [@@ FStar.Tactics.Typeclasses.tcinstance] assume val impl_T_for_u8': t_T u8 let impl_T_for_u8 = impl_T_for_u8' [@@ FStar.Tactics.Typeclasses.tcinstance] assume val impl_2': #v_U: Type0 -> {| i0: Core_models.Clone.t_Clone v_U |} -> t_TrGeneric i32 v_U let impl_2 (#v_U: Type0) (#[FStar.Tactics.Typeclasses.tcresolve ()] i0: Core_models.Clone.t_Clone v_U) = impl_2' #v_U #i0 assume val v_C': u8 let v_C = v_C' assume val impl_S1__ff_s1': Prims.unit -> Prims.Pure Prims.unit Prims.l_True (fun _ -> Prims.l_True) let impl_S1__ff_s1 = impl_S1__ff_s1' assume val impl_S2__ff_s2': Prims.unit -> Prims.Pure Prims.unit Prims.l_True (fun _ -> Prims.l_True) let impl_S2__ff_s2 = impl_S2__ff_s2' ''' "Attribute_opaque.fsti" = ''' module Attribute_opaque #set-options "--fuel 0 --ifuel 1 --z3rlimit 15" open FStar.Mul open Core_models val t_OpaqueStruct (v_X: usize) (v_T v_U: Type0) : eqtype val t_OpaqueEnum (v_X: usize) (v_T v_U: Type0) : eqtype val ff_generic (v_X: usize) (#v_T #v_U: Type0) (x: v_U) : Prims.Pure (t_OpaqueEnum v_X v_T v_U) Prims.l_True (fun _ -> Prims.l_True) val f (x y: bool) : Prims.Pure bool Prims.l_True (fun _ -> Prims.l_True) val ff_pre_post (x y: bool) : Prims.Pure bool (requires x) (ensures fun result -> let result:bool = result in result =. y) class t_T (v_Self: Type0) = { [@@@ FStar.Tactics.Typeclasses.no_method]f_U:Type0; f_c:u8; f_d_pre:Prims.unit -> Type0; f_d_post:Prims.unit -> Prims.unit -> Type0; f_d:x0: Prims.unit -> Prims.Pure Prims.unit (f_d_pre x0) (fun result -> f_d_post x0 result); f_m_pre:self_: v_Self -> x: u8 -> pred: Type0{x =. mk_u8 0 ==> pred}; f_m_post:v_Self -> u8 -> bool -> Type0; f_m:x0: v_Self -> x1: u8 -> Prims.Pure bool (f_m_pre x0 x1) (fun result -> f_m_post x0 x1 result) } [@@ FStar.Tactics.Typeclasses.tcinstance] val impl_T_for_u8:t_T u8 class t_TrGeneric (v_Self: Type0) (v_U: Type0) = { [@@@ FStar.Tactics.Typeclasses.no_method]_super_i0:Core_models.Clone.t_Clone v_U; f_f_pre:v_U -> Type0; f_f_post:v_U -> v_Self -> Type0; f_f:x0: v_U -> Prims.Pure v_Self (f_f_pre x0) (fun result -> f_f_post x0 result) } [@@ FStar.Tactics.Typeclasses.tcinstance] let _ = fun (v_Self:Type0) (v_U:Type0) {|i: t_TrGeneric v_Self v_U|} -> i._super_i0 [@@ FStar.Tactics.Typeclasses.tcinstance] val impl_2 (#v_U: Type0) {| i0: Core_models.Clone.t_Clone v_U |} : t_TrGeneric i32 v_U val v_C:u8 type t_S1 = | S1 : t_S1 val impl_S1__ff_s1: Prims.unit -> Prims.Pure Prims.unit Prims.l_True (fun _ -> Prims.l_True) type t_S2 = | S2 : t_S2 val impl_S2__ff_s2: Prims.unit -> Prims.Pure Prims.unit Prims.l_True (fun _ -> Prims.l_True) ''' ================================================ FILE: test-harness/src/snapshots/toolchain__attributes into-fstar.snap ================================================ --- source: test-harness/src/harness.rs expression: snapshot info: kind: Translate: backend: fstar info: name: attributes manifest: attributes/Cargo.toml description: ~ spec: optional: false broken: false issue_id: ~ positive: true snapshot: stderr: false stdout: true include_flag: ~ backend_options: ~ --- exit = 0 [stdout] diagnostics = [] [stdout.files] "Attributes.Ensures_on_arity_zero_fns.fst" = ''' module Attributes.Ensures_on_arity_zero_fns #set-options "--fuel 0 --ifuel 1 --z3rlimit 15" open FStar.Mul open Core_models let doing_nothing (_: Prims.unit) : Prims.Pure Prims.unit (requires true) (ensures fun e_x -> let e_x:Prims.unit = e_x in true) = () let basically_a_constant (_: Prims.unit) : Prims.Pure u8 (requires true) (ensures fun x -> let x:u8 = x in x >. mk_u8 100) = mk_u8 127 ''' "Attributes.Future_self.fst" = ''' module Attributes.Future_self #set-options "--fuel 0 --ifuel 1 --z3rlimit 15" open FStar.Mul open Core_models type t_Dummy = | Dummy : t_Dummy [@@ FStar.Tactics.Typeclasses.tcinstance] assume val impl_1': Core_models.Marker.t_StructuralPartialEq t_Dummy unfold let impl_1 = impl_1' [@@ FStar.Tactics.Typeclasses.tcinstance] assume val impl_2': Core_models.Cmp.t_PartialEq t_Dummy t_Dummy unfold let impl_2 = impl_2' [@@ FStar.Tactics.Typeclasses.tcinstance] assume val impl': Core_models.Cmp.t_Eq t_Dummy unfold let impl = impl' let impl_Dummy__f (self: t_Dummy) : Prims.Pure t_Dummy Prims.l_True (ensures fun self_e_future -> let self_e_future:t_Dummy = self_e_future in self_e_future =. self) = self ''' "Attributes.Inlined_code_ensures_requires.fst" = ''' module Attributes.Inlined_code_ensures_requires #set-options "--fuel 0 --ifuel 1 --z3rlimit 15" open FStar.Mul open Core_models let increment_array (v: t_Array u8 (mk_usize 4)) : Prims.Pure (t_Array u8 (mk_usize 4)) (requires forall i. FStar.Seq.index v i <. mk_u8 254) (ensures fun vv_future -> let vv_future:t_Array u8 (mk_usize 4) = vv_future in let future_v:t_Array u8 (mk_usize 4) = vv_future in forall i. FStar.Seq.index future_v i >. mk_u8 0) = let v:t_Array u8 (mk_usize 4) = Rust_primitives.Hax.Monomorphized_update_at.update_at_usize v (mk_usize 0) ((v.[ mk_usize 0 ] <: u8) +! mk_u8 1 <: u8) in let v:t_Array u8 (mk_usize 4) = Rust_primitives.Hax.Monomorphized_update_at.update_at_usize v (mk_usize 1) ((v.[ mk_usize 1 ] <: u8) +! mk_u8 1 <: u8) in let v:t_Array u8 (mk_usize 4) = Rust_primitives.Hax.Monomorphized_update_at.update_at_usize v (mk_usize 2) ((v.[ mk_usize 2 ] <: u8) +! mk_u8 1 <: u8) in let v:t_Array u8 (mk_usize 4) = Rust_primitives.Hax.Monomorphized_update_at.update_at_usize v (mk_usize 3) ((v.[ mk_usize 3 ] <: u8) +! mk_u8 1 <: u8) in v ''' "Attributes.Int_model.fst" = ''' module Attributes.Int_model #set-options "--fuel 0 --ifuel 1 --z3rlimit 15" open FStar.Mul open Core_models unfold type t_Int = int let impl_1: Core_models.Clone.t_Clone t_Int = { f_clone = (fun x -> x); f_clone_pre = (fun _ -> True); f_clone_post = (fun _ _ -> True) } [@@ FStar.Tactics.Typeclasses.tcinstance] assume val impl': Core_models.Marker.t_Copy t_Int unfold let impl = impl' unfold let add x y = x + y unfold instance impl: Core.Ops.Arith.t_Sub t_Int t_Int = { f_Output = t_Int; f_sub_pre = (fun (self: t_Int) (other: t_Int) -> true); f_sub_post = (fun (self: t_Int) (other: t_Int) (out: t_Int) -> true); f_sub = fun (self: t_Int) (other: t_Int) -> self + other } ''' "Attributes.Issue_1266_.fst" = ''' module Attributes.Issue_1266_ #set-options "--fuel 0 --ifuel 1 --z3rlimit 15" open FStar.Mul open Core_models class t_T (v_Self: Type0) = { f_v_pre:v_Self -> Type0; f_v_post:x: v_Self -> x_future: v_Self -> pred: Type0{pred ==> true}; f_v:x0: v_Self -> Prims.Pure v_Self (f_v_pre x0) (fun result -> f_v_post x0 result) } ''' "Attributes.Issue_1276_.fst" = ''' module Attributes.Issue_1276_ #set-options "--fuel 0 --ifuel 1 --z3rlimit 15" open FStar.Mul open Core_models type t_S = | S : u8 -> t_S let impl_S__f (self: t_S) (self_ self_0_ self_1_ self_2_: u8) : Prims.Pure Prims.unit (requires self._0 =. mk_u8 0 && self_ =. self_1_ && self_2_ =. mk_u8 9) (fun _ -> Prims.l_True) = () ''' "Attributes.Issue_evit_57_.fst" = ''' module Attributes.Issue_evit_57_ #set-options "--fuel 0 --ifuel 1 --z3rlimit 15" open FStar.Mul open Core_models type t_Foo = | Foo : t_Foo let impl_Foo__f (self: t_Foo) : Prims.Pure Prims.unit (requires true) (fun _ -> Prims.l_True) = () ''' "Attributes.Nested_refinement_elim.fst" = ''' module Attributes.Nested_refinement_elim #set-options "--fuel 0 --ifuel 1 --z3rlimit 15" open FStar.Mul open Core_models let t_DummyRefinement = x: u16{true} let elim_twice (x: t_DummyRefinement) : u16 = ((x <: u16) <: t_DummyRefinement) <: u16 ''' "Attributes.Newtype_pattern.fst" = ''' module Attributes.Newtype_pattern #set-options "--fuel 0 --ifuel 1 --z3rlimit 15" open FStar.Mul open Core_models let v_MAX: usize = mk_usize 10 type t_SafeIndex = { f_i:f_i: usize{b2t (f_i <. v_MAX <: bool)} } let impl_SafeIndex__new (i: usize) : Core_models.Option.t_Option t_SafeIndex = if i <. v_MAX then Core_models.Option.Option_Some ({ f_i = i } <: t_SafeIndex) <: Core_models.Option.t_Option t_SafeIndex else Core_models.Option.Option_None <: Core_models.Option.t_Option t_SafeIndex let impl_SafeIndex__as_usize (self: t_SafeIndex) : usize = self.f_i [@@ FStar.Tactics.Typeclasses.tcinstance] let impl_1 (#v_T: Type0) : Core_models.Ops.Index.t_Index (t_Array v_T (mk_usize 10)) t_SafeIndex = { f_Output = v_T; f_index_pre = (fun (self: t_Array v_T (mk_usize 10)) (index: t_SafeIndex) -> true); f_index_post = (fun (self: t_Array v_T (mk_usize 10)) (index: t_SafeIndex) (out: v_T) -> true); f_index = fun (self: t_Array v_T (mk_usize 10)) (index: t_SafeIndex) -> self.[ index.f_i ] } ''' "Attributes.Postprocess_with.Somewhere.fst" = ''' module Attributes.Postprocess_with.Somewhere #set-options "--fuel 0 --ifuel 1 --z3rlimit 15" open FStar.Mul open Core_models let some_hypothetical_tactic (some_param: u8) : Prims.unit = () ''' "Attributes.Postprocess_with.fst" = ''' module Attributes.Postprocess_with #set-options "--fuel 0 --ifuel 1 --z3rlimit 15" open FStar.Mul open Core_models [@@FStar.Tactics.postprocess_with (fun _ -> FStar.Tactics.trefl ())] let f (_: Prims.unit) : Prims.unit = () [@@FStar.Tactics.postprocess_with ( fun temp_0_ -> let ():Prims.unit = temp_0_ in Attributes.Postprocess_with.Somewhere.some_hypothetical_tactic (mk_u8 12) )] let g (_: Prims.unit) : Prims.unit = () ''' "Attributes.Pre_post_on_traits_and_impls.fst" = ''' module Attributes.Pre_post_on_traits_and_impls #set-options "--fuel 0 --ifuel 1 --z3rlimit 15" open FStar.Mul open Core_models class t_Operation (v_Self: Type0) = { f_double_pre:x: u8 -> pred: Type0 { (Rust_primitives.Hax.Int.from_machine x <: Hax_lib.Int.t_Int) <= (127 <: Hax_lib.Int.t_Int) ==> pred }; f_double_post:x: u8 -> result: u8 -> pred: Type0 { pred ==> ((Rust_primitives.Hax.Int.from_machine x <: Hax_lib.Int.t_Int) * (2 <: Hax_lib.Int.t_Int) <: Hax_lib.Int.t_Int) = (Rust_primitives.Hax.Int.from_machine result <: Hax_lib.Int.t_Int) }; f_double:x0: u8 -> Prims.Pure u8 (f_double_pre x0) (fun result -> f_double_post x0 result) } type t_ViaAdd = | ViaAdd : t_ViaAdd type t_ViaMul = | ViaMul : t_ViaMul [@@ FStar.Tactics.Typeclasses.tcinstance] let impl: t_Operation t_ViaAdd = { f_double_pre = (fun (x: u8) -> (Rust_primitives.Hax.Int.from_machine x <: Hax_lib.Int.t_Int) <= (127 <: Hax_lib.Int.t_Int)); f_double_post = (fun (x: u8) (result: u8) -> ((Rust_primitives.Hax.Int.from_machine x <: Hax_lib.Int.t_Int) * (2 <: Hax_lib.Int.t_Int) <: Hax_lib.Int.t_Int) = (Rust_primitives.Hax.Int.from_machine result <: Hax_lib.Int.t_Int)); f_double = fun (x: u8) -> x +! x } [@@ FStar.Tactics.Typeclasses.tcinstance] let impl_Operation_for_ViaMul: t_Operation t_ViaMul = { f_double_pre = (fun (x: u8) -> (Rust_primitives.Hax.Int.from_machine x <: Hax_lib.Int.t_Int) <= (127 <: Hax_lib.Int.t_Int)); f_double_post = (fun (x: u8) (result: u8) -> ((Rust_primitives.Hax.Int.from_machine x <: Hax_lib.Int.t_Int) * (2 <: Hax_lib.Int.t_Int) <: Hax_lib.Int.t_Int) = (Rust_primitives.Hax.Int.from_machine result <: Hax_lib.Int.t_Int)); f_double = fun (x: u8) -> x *! mk_u8 2 } class t_TraitWithRequiresAndEnsures (v_Self: Type0) = { f_method_pre:self_: v_Self -> x: u8 -> pred: Type0{x <. mk_u8 100 ==> pred}; f_method_post:self_: v_Self -> x: u8 -> r: u8 -> pred: Type0{pred ==> r >. mk_u8 88}; f_method:x0: v_Self -> x1: u8 -> Prims.Pure u8 (f_method_pre x0 x1) (fun result -> f_method_post x0 x1 result) } let test (#v_T: Type0) (#[FStar.Tactics.Typeclasses.tcresolve ()] i0: t_TraitWithRequiresAndEnsures v_T) (x: v_T) : u8 = (f_method #v_T #FStar.Tactics.Typeclasses.solve x (mk_u8 99) <: u8) -! mk_u8 88 ''' "Attributes.Props.fst" = ''' module Attributes.Props #set-options "--fuel 0 --ifuel 1 --z3rlimit 15" open FStar.Mul open Core_models let f (x: Hax_lib.Prop.t_Prop) (y: bool) : Hax_lib.Prop.t_Prop = let (xprop: Hax_lib.Prop.t_Prop):Hax_lib.Prop.t_Prop = b2t y in let p:Hax_lib.Prop.t_Prop = b2t y /\ xprop /\ b2t y /\ b2t y in ~(p \/ b2t y ==> (forall (x: u8). b2t (x <=. Core_models.Num.impl_u8__MAX <: bool)) /\ (exists (x: u16). b2t (x >. mk_u16 300 <: bool))) ''' "Attributes.Refined_arithmetic.fst" = ''' module Attributes.Refined_arithmetic #set-options "--fuel 0 --ifuel 1 --z3rlimit 15" open FStar.Mul open Core_models type t_Foo = | Foo : u8 -> t_Foo [@@ FStar.Tactics.Typeclasses.tcinstance] let impl: Core_models.Ops.Arith.t_Add t_Foo t_Foo = { f_Output = t_Foo; f_add_pre = (fun (self_: t_Foo) (rhs: t_Foo) -> self_._0 <. (mk_u8 255 -! rhs._0 <: u8)); f_add_post = (fun (self: t_Foo) (rhs: t_Foo) (out: t_Foo) -> true); f_add = fun (self: t_Foo) (rhs: t_Foo) -> Foo (self._0 +! rhs._0) <: t_Foo } [@@ FStar.Tactics.Typeclasses.tcinstance] let impl_1: Core_models.Ops.Arith.t_Mul t_Foo t_Foo = { f_Output = t_Foo; f_mul_pre = (fun (self_: t_Foo) (rhs: t_Foo) -> rhs._0 =. mk_u8 0 || self_._0 <. (mk_u8 255 /! rhs._0 <: u8) ); f_mul_post = (fun (self: t_Foo) (rhs: t_Foo) (out: t_Foo) -> true); f_mul = fun (self: t_Foo) (rhs: t_Foo) -> Foo (self._0 *! rhs._0) <: t_Foo } ''' "Attributes.Refined_indexes.fst" = ''' module Attributes.Refined_indexes #set-options "--fuel 0 --ifuel 1 --z3rlimit 15" open FStar.Mul open Core_models let v_MAX: usize = mk_usize 10 type t_MyArray = | MyArray : t_Array u8 (mk_usize 10) -> t_MyArray /// Triple dash comment (** Multiline double star comment Maecenas blandit accumsan feugiat. Done vitae ullamcorper est. Curabitur id dui eget sem viverra interdum. *) let mutation_example (uuse_generic_update_at: t_MyArray) (uuse_specialized_update_at: t_Slice u8) (specialized_as_well: Alloc.Vec.t_Vec u8 Alloc.Alloc.t_Global) : (t_MyArray & t_Slice u8 & Alloc.Vec.t_Vec u8 Alloc.Alloc.t_Global) = let uuse_generic_update_at:t_MyArray = Rust_primitives.Hax.update_at uuse_generic_update_at (mk_usize 2) (mk_u8 0) in let uuse_specialized_update_at:t_Slice u8 = Rust_primitives.Hax.Monomorphized_update_at.update_at_usize uuse_specialized_update_at (mk_usize 2) (mk_u8 0) in let specialized_as_well:Alloc.Vec.t_Vec u8 Alloc.Alloc.t_Global = Alloc.Slice.impl__to_vec (Rust_primitives.Hax.Monomorphized_update_at.update_at_usize (Alloc.Vec.impl_1__as_slice specialized_as_well <: t_Slice u8) (mk_usize 2) (mk_u8 0) <: t_Slice u8) in uuse_generic_update_at, uuse_specialized_update_at, specialized_as_well <: (t_MyArray & t_Slice u8 & Alloc.Vec.t_Vec u8 Alloc.Alloc.t_Global) [@@ FStar.Tactics.Typeclasses.tcinstance] let impl: Core_models.Ops.Index.t_Index t_MyArray usize = { f_Output = u8; f_index_pre = (fun (self_: t_MyArray) (index: usize) -> index <. v_MAX); f_index_post = (fun (self: t_MyArray) (index: usize) (out: u8) -> true); f_index = fun (self: t_MyArray) (index: usize) -> self.[ index ] } ''' "Attributes.Refinement_types.fst" = ''' module Attributes.Refinement_types #set-options "--fuel 0 --ifuel 1 --z3rlimit 15" open FStar.Mul open Core_models let t_BoundedU8 (v_MIN v_MAX: u8) = x: u8{x >=. v_MIN && x <=. v_MAX} let bounded_u8 (x: t_BoundedU8 (mk_u8 12) (mk_u8 15)) (y: t_BoundedU8 (mk_u8 10) (mk_u8 11)) : t_BoundedU8 (mk_u8 1) (mk_u8 23) = (x <: u8) +! (y <: u8) <: t_BoundedU8 (mk_u8 1) (mk_u8 23) /// Even `u8` numbers. Constructing pub Even values triggers static /// proofs in the extraction. let t_Even = x: u8{(x %! mk_u8 2 <: u8) =. mk_u8 0} let double (x: u8) : Prims.Pure t_Even (requires x <. mk_u8 127) (fun _ -> Prims.l_True) = x +! x <: t_Even let double_refine (x: u8) : Prims.Pure t_Even (requires x <. mk_u8 127) (fun _ -> Prims.l_True) = x +! x <: t_Even /// A string that contains no space. let t_NoE = x: Alloc.String.t_String { let (_: Core_models.Str.Iter.t_Chars), (out: bool) = Core_models.Iter.Traits.Iterator.f_any #Core_models.Str.Iter.t_Chars #FStar.Tactics.Typeclasses.solve #(FStar.Char.char -> bool) (Core_models.Str.impl_str__chars (Core_models.Ops.Deref.f_deref #Alloc.String.t_String #FStar.Tactics.Typeclasses.solve x <: string) <: Core_models.Str.Iter.t_Chars) (fun ch -> let ch:FStar.Char.char = ch in ch =. ' ' <: bool) in ~.out } /// A modular mutliplicative inverse let t_ModInverse (v_MOD: u32) = n: u32 { (((cast (n <: u32) <: u128) *! (cast (v_MOD <: u32) <: u128) <: u128) %! (cast (v_MOD <: u32) <: u128) <: u128) =. mk_u128 1 } /// A field element let t_FieldElement = x: u16{x <=. mk_u16 2347} /// Example of a specific constraint on a value let t_CompressionFactor = x: u8{x =. mk_u8 4 || x =. mk_u8 5 || x =. mk_u8 10 || x =. mk_u8 11} let t_BoundedAbsI16 (v_B: usize) = x: i16 { (Rust_primitives.Hax.Int.from_machine v_B <: Hax_lib.Int.t_Int) < (32768 <: Hax_lib.Int.t_Int) && (Rust_primitives.Hax.Int.from_machine x <: Hax_lib.Int.t_Int) >= (- (Rust_primitives.Hax.Int.from_machine v_B <: Hax_lib.Int.t_Int) <: Hax_lib.Int.t_Int) && (Rust_primitives.Hax.Int.from_machine x <: Hax_lib.Int.t_Int) <= (Rust_primitives.Hax.Int.from_machine v_B <: Hax_lib.Int.t_Int) } let impl (v_B: usize) : Core_models.Clone.t_Clone (t_BoundedAbsI16 v_B) = { f_clone = (fun x -> x); f_clone_pre = (fun _ -> True); f_clone_post = (fun _ _ -> True) } [@@ FStar.Tactics.Typeclasses.tcinstance] assume val impl_1': v_B: usize -> Core_models.Marker.t_Copy (t_BoundedAbsI16 v_B) unfold let impl_1 (v_B: usize) = impl_1' v_B [@@ FStar.Tactics.Typeclasses.tcinstance] assume val impl_3': v_B: usize -> Core_models.Marker.t_StructuralPartialEq (t_BoundedAbsI16 v_B) unfold let impl_3 (v_B: usize) = impl_3' v_B [@@ FStar.Tactics.Typeclasses.tcinstance] assume val impl_4': v_B: usize -> Core_models.Cmp.t_PartialEq (t_BoundedAbsI16 v_B) (t_BoundedAbsI16 v_B) unfold let impl_4 (v_B: usize) = impl_4' v_B [@@ FStar.Tactics.Typeclasses.tcinstance] assume val impl_2': v_B: usize -> Core_models.Cmp.t_Eq (t_BoundedAbsI16 v_B) unfold let impl_2 (v_B: usize) = impl_2' v_B [@@ FStar.Tactics.Typeclasses.tcinstance] assume val impl_6': v_B: usize -> Core_models.Cmp.t_PartialOrd (t_BoundedAbsI16 v_B) (t_BoundedAbsI16 v_B) unfold let impl_6 (v_B: usize) = impl_6' v_B [@@ FStar.Tactics.Typeclasses.tcinstance] assume val impl_5': v_B: usize -> Core_models.Cmp.t_Ord (t_BoundedAbsI16 v_B) unfold let impl_5 (v_B: usize) = impl_5' v_B [@@ FStar.Tactics.Typeclasses.tcinstance] assume val impl_7': v_B: usize -> Core_models.Hash.t_Hash (t_BoundedAbsI16 v_B) unfold let impl_7 (v_B: usize) = impl_7' v_B let double_abs_i16 (v_N v_M: usize) (x: t_BoundedAbsI16 v_N) : Prims.Pure (t_BoundedAbsI16 v_M) (requires (Rust_primitives.Hax.Int.from_machine v_M <: Hax_lib.Int.t_Int) < (32768 <: Hax_lib.Int.t_Int) && (Rust_primitives.Hax.Int.from_machine v_M <: Hax_lib.Int.t_Int) = ((Rust_primitives.Hax.Int.from_machine v_N <: Hax_lib.Int.t_Int) * (2 <: Hax_lib.Int.t_Int) <: Hax_lib.Int.t_Int)) (fun _ -> Prims.l_True) = Core_models.Ops.Arith.f_mul #(t_BoundedAbsI16 v_N) #i16 #FStar.Tactics.Typeclasses.solve x (mk_i16 2) <: t_BoundedAbsI16 v_M ''' "Attributes.Reorder.fst" = ''' module Attributes.Reorder #set-options "--fuel 0 --ifuel 1 --z3rlimit 15" open FStar.Mul open Core_models type t_Foo = { f_field_3_:u8; f_field_4_:u8; f_field_2_:u8; f_field_1_:u8 } type t_Bar = | Bar_A { f_a_field_3_:u8; f_a_field_1_:u8; f_a_field_2_:u8 }: t_Bar | Bar_B { f_b_field_1_:u8; f_b_field_3_:u8; f_b_field_2_:u8 }: t_Bar ''' "Attributes.Replace_body.fst" = ''' module Attributes.Replace_body #set-options "--fuel 0 --ifuel 1 --z3rlimit 15" open FStar.Mul open Core_models let f (x y: u8) : u8 = magic x type t_Foo = | Foo : t_Foo let impl_Foo__assoc_fn (self: t_Foo) (x: u8) : Prims.unit = (magic (self <: t_Foo)) x [@@ FStar.Tactics.Typeclasses.tcinstance] let impl_1: Alloc.String.t_ToString t_Foo = { f_to_string_pre = (fun (self: t_Foo) -> true); f_to_string_post = (fun (self: t_Foo) (out: Alloc.String.t_String) -> true); f_to_string = fun (self: t_Foo) -> "The type was t_Foo" } ''' "Attributes.Requires_mut.fst" = ''' module Attributes.Requires_mut #set-options "--fuel 0 --ifuel 1 --z3rlimit 15" open FStar.Mul open Core_models class t_Foo (v_Self: Type0) = { f_f_pre:x: u8 -> y: u8 -> pred: Type0 { ((Rust_primitives.Hax.Int.from_machine x <: Hax_lib.Int.t_Int) + (Rust_primitives.Hax.Int.from_machine y <: Hax_lib.Int.t_Int) <: Hax_lib.Int.t_Int) < (254 <: Hax_lib.Int.t_Int) ==> pred }; f_f_post:x: u8 -> y: u8 -> x1: (u8 & u8) -> pred: Type0 { pred ==> (let (y_future: u8), (output_variable: u8) = x1 in output_variable =. y_future) }; f_f:x0: u8 -> x1: u8 -> Prims.Pure (u8 & u8) (f_f_pre x0 x1) (fun result -> f_f_post x0 x1 result); f_g_pre:u8 -> u8 -> Type0; f_g_post:u8 -> u8 -> u8 -> Type0; f_g:x0: u8 -> x1: u8 -> Prims.Pure u8 (f_g_pre x0 x1) (fun result -> f_g_post x0 x1 result); f_h_pre:u8 -> u8 -> Type0; f_h_post:u8 -> u8 -> Prims.unit -> Type0; f_h:x0: u8 -> x1: u8 -> Prims.Pure Prims.unit (f_h_pre x0 x1) (fun result -> f_h_post x0 x1 result); f_i_pre:u8 -> u8 -> Type0; f_i_post:u8 -> u8 -> u8 -> Type0; f_i:x0: u8 -> x1: u8 -> Prims.Pure u8 (f_i_pre x0 x1) (fun result -> f_i_post x0 x1 result) } [@@ FStar.Tactics.Typeclasses.tcinstance] let impl: t_Foo Prims.unit = { f_f_pre = (fun (x: u8) (y: u8) -> ((Rust_primitives.Hax.Int.from_machine x <: Hax_lib.Int.t_Int) + (Rust_primitives.Hax.Int.from_machine y <: Hax_lib.Int.t_Int) <: Hax_lib.Int.t_Int) < (254 <: Hax_lib.Int.t_Int)); f_f_post = (fun (x: u8) (y: u8) (y_future, output_variable: (u8 & u8)) -> output_variable =. y_future); f_f = (fun (x: u8) (y: u8) -> let y:u8 = y +! x in let hax_temp_output:u8 = y in y, hax_temp_output <: (u8 & u8)); f_g_pre = (fun (x: u8) (y: u8) -> true); f_g_post = (fun (x: u8) (y: u8) (output_variable: u8) -> output_variable =. y); f_g = (fun (x: u8) (y: u8) -> y); f_h_pre = (fun (x: u8) (y: u8) -> true); f_h_post = (fun (x: u8) (y: u8) (output_variable: Prims.unit) -> output_variable =. (() <: Prims.unit)); f_h = (fun (x: u8) (y: u8) -> () <: Prims.unit); f_i_pre = (fun (x: u8) (y: u8) -> true); f_i_post = (fun (x: u8) (y: u8) (y_future: u8) -> y_future =. y); f_i = fun (x: u8) (y: u8) -> let _:Prims.unit = () <: Prims.unit in y } ''' "Attributes.Verifcation_status.fst" = ''' module Attributes.Verifcation_status #set-options "--fuel 0 --ifuel 1 --z3rlimit 15" open FStar.Mul open Core_models #push-options "--admit_smt_queries true" let a_function_which_only_laxes (_: Prims.unit) : Prims.unit = Hax_lib.v_assert false #pop-options let a_panicfree_function (_: Prims.unit) : Prims.Pure u8 Prims.l_True (ensures fun x -> let x:u8 = x in false) = let a:u8 = mk_u8 3 in let b:u8 = mk_u8 6 in let result:u8 = a +! b in let _:Prims.unit = admit () (* Panic freedom *) in result let another_panicfree_function (_: Prims.unit) : Prims.Pure Prims.unit Prims.l_True (ensures fun x -> let x:Prims.unit = x in false) = let not_much:i32 = mk_i32 0 in let nothing:i32 = mk_i32 0 in let still_not_much:i32 = not_much +! nothing in admit () (* Panic freedom *) ''' "Attributes.fst" = ''' module Attributes #set-options "--fuel 0 --ifuel 1 --z3rlimit 15" open FStar.Mul open Core_models let u32_max: u32 = mk_u32 90000 /// A doc comment on `add3` ///another doc comment on add3 let add3 (x y z: u32) : Prims.Pure u32 (requires x >. mk_u32 10 && y >. mk_u32 10 && z >. mk_u32 10 && ((x +! y <: u32) +! z <: u32) <. u32_max) (ensures fun result -> let result:u32 = result in b2t true ==> b2t (result >. mk_u32 32 <: bool)) = (x +! y <: u32) +! z let swap_and_mut_req_ens (x y: u32) : Prims.Pure (u32 & u32 & u32) (requires x <. mk_u32 40 && y <. mk_u32 300) (ensures fun temp_0_ -> let (x_future: u32), (y_future: u32), (result: u32) = temp_0_ in x_future =. y && y_future =. x && result =. (x +! y <: u32)) = let x0:u32 = x in let x:u32 = y in let y:u32 = x0 in let hax_temp_output:u32 = x +! y in x, y, hax_temp_output <: (u32 & u32 & u32) let issue_844_ (e_x: u8) : Prims.Pure u8 Prims.l_True (ensures fun e_x_future -> let e_x_future:u8 = e_x_future in true) = e_x let add3_lemma (x: u32) : Lemma (ensures x <=. mk_u32 10 || x >=. (u32_max /! mk_u32 3 <: u32) || (add3 x x x <: u32) =. (x *! mk_u32 3 <: u32)) = () let dummy_function (x: u32) : u32 = x let apply_dummy_function_lemma (x: u32) : Lemma (ensures x =. (dummy_function x <: u32)) [SMTPat x] = () type t_Foo = { f_x:u32; f_y:f_y: u32{b2t (f_y >. mk_u32 3 <: bool)}; f_z:f_z: u32{b2t (((f_y +! f_x <: u32) +! f_z <: u32) >. mk_u32 3 <: bool)} } let props (_: Prims.unit) : Prims.unit = let _:Prims.unit = Hax_lib.v_assume True in let _:Prims.unit = Hax_lib.assert_prop True in let _:Prims.unit = () in () let inlined_code__v_V: u8 = mk_u8 12 let before_inlined_code = "example before" let inlined_code (foo: t_Foo) : Prims.unit = let vv_a:i32 = mk_i32 13 in let _:Prims.unit = let x = foo.f_x in let { f_x = _ ; f_y = y ; f_z = _ } = foo in add3 ((fun _ -> 3ul) foo) vv_a inlined_code__v_V y in () let inlined_code_after = "example after" let before_1 = "example before 1" let before_2 = "example before 2" let before_3 = "example before 3" let mutliple_before_after (_: Prims.unit) : Prims.unit = () let after 1 = "example after 1" let after 2 = "example after 2" let after 3 = "example after 3" unfold let some_function _ = "hello from F*" let rec fib (x: usize) : Prims.Tot usize (decreases x) = if x <=. mk_usize 2 then x else Core_models.Num.impl_usize__wrapping_add (fib (x -! mk_usize 1 <: usize) <: usize) (fib (x -! mk_usize 2 <: usize) <: usize) ''' ================================================ FILE: test-harness/src/snapshots/toolchain__constructor-as-closure into-fstar.snap ================================================ --- source: test-harness/src/harness.rs expression: snapshot info: kind: Translate: backend: fstar info: name: constructor-as-closure manifest: constructor-as-closure/Cargo.toml description: ~ spec: optional: false broken: false issue_id: ~ positive: true snapshot: stderr: false stdout: true include_flag: ~ backend_options: ~ --- exit = 0 [stdout] diagnostics = [] [stdout.files] "Constructor_as_closure.fst" = ''' module Constructor_as_closure #set-options "--fuel 0 --ifuel 1 --z3rlimit 15" open FStar.Mul open Core_models type t_Test = | Test : i32 -> t_Test let impl_Test__test (x: Core_models.Option.t_Option i32) : Core_models.Option.t_Option t_Test = Core_models.Option.impl__map #i32 #t_Test #(i32 -> t_Test) x Test type t_Context = | Context_A : i32 -> t_Context | Context_B : i32 -> t_Context let impl_Context__test (x: Core_models.Option.t_Option i32) : Core_models.Option.t_Option t_Context = Core_models.Option.impl__map #i32 #t_Context #(i32 -> t_Context) x Context_B ''' ================================================ FILE: test-harness/src/snapshots/toolchain__cyclic-modules into-fstar.snap ================================================ --- source: test-harness/src/harness.rs expression: snapshot info: kind: Translate: backend: fstar info: name: cyclic-modules manifest: cyclic-modules/Cargo.toml description: ~ spec: optional: false broken: false issue_id: ~ positive: true snapshot: stderr: false stdout: true include_flag: ~ backend_options: ~ --- exit = 0 [stdout] diagnostics = [] [stdout.files] "Cyclic_modules.B.fst" = ''' module Cyclic_modules.B #set-options "--fuel 0 --ifuel 1 --z3rlimit 15" open FStar.Mul open Core_models include Cyclic_modules.Bundle {g as g} ''' "Cyclic_modules.Bundle.fst" = ''' module Cyclic_modules.Bundle #set-options "--fuel 0 --ifuel 1 --z3rlimit 15" open FStar.Mul open Core_models let f (_: Prims.unit) : Prims.unit = () let h2 (_: Prims.unit) : Prims.unit = Cyclic_modules.C.i () let g (_: Prims.unit) : Prims.unit = f () let h (_: Prims.unit) : Prims.unit = let _:Prims.unit = g () in Cyclic_modules.C.i () ''' "Cyclic_modules.Bundle_d.fst" = ''' module Cyclic_modules.Bundle_d #set-options "--fuel 0 --ifuel 1 --z3rlimit 15" open FStar.Mul open Core_models let d1 (_: Prims.unit) : Prims.unit = () let e1 (_: Prims.unit) : Prims.unit = d1 () let de1 (_: Prims.unit) : Prims.unit = e1 () let d2 (_: Prims.unit) : Prims.unit = de1 () ''' "Cyclic_modules.Bundle_disjoint_cycle_a.fst" = ''' module Cyclic_modules.Bundle_disjoint_cycle_a #set-options "--fuel 0 --ifuel 1 --z3rlimit 15" open FStar.Mul open Core_models let g (_: Prims.unit) : Prims.unit = () let h (_: Prims.unit) : Prims.unit = () let f (_: Prims.unit) : Prims.unit = h () let i (_: Prims.unit) : Prims.unit = g () ''' "Cyclic_modules.Bundle_enums_a.fst" = ''' module Cyclic_modules.Bundle_enums_a #set-options "--fuel 0 --ifuel 1 --z3rlimit 15" open FStar.Mul open Core_models type t_U = | U_A : t_U | U_B : t_U | U_C : Alloc.Vec.t_Vec t_T Alloc.Alloc.t_Global -> t_U and t_T__from__enums_b = | T_A : t_T__from__enums_b | T_B : t_T__from__enums_b | T_C : Alloc.Vec.t_Vec t_T Alloc.Alloc.t_Global -> t_T__from__enums_b and t_T = | T_A__from__enums_a : t_T | T_B__from__enums_a : t_T | T_C__from__enums_a : Alloc.Vec.t_Vec t_U Alloc.Alloc.t_Global -> t_T | T_D : Alloc.Vec.t_Vec t_T__from__enums_b Alloc.Alloc.t_Global -> t_T let f (_: Prims.unit) : t_T__from__enums_b = T_A <: t_T__from__enums_b ''' "Cyclic_modules.Bundle_late_skip_a.fst" = ''' module Cyclic_modules.Bundle_late_skip_a #set-options "--fuel 0 --ifuel 1 --z3rlimit 15" open FStar.Mul open Core_models let rec f (_: Prims.unit) : Prims.Pure Prims.unit (requires true) (fun _ -> Prims.l_True) = f__from__late_skip_a () and f__from__late_skip_a (_: Prims.unit) : Prims.unit = f () ''' "Cyclic_modules.Bundle_m1.fst" = ''' module Cyclic_modules.Bundle_m1 #set-options "--fuel 0 --ifuel 1 --z3rlimit 15" open FStar.Mul open Core_models let d (_: Prims.unit) : Prims.unit = () let c (_: Prims.unit) : Prims.unit = () let a (_: Prims.unit) : Prims.unit = c () let b (_: Prims.unit) : Prims.unit = let _:Prims.unit = a () in d () ''' "Cyclic_modules.Bundle_rec1_same_name.fst" = ''' module Cyclic_modules.Bundle_rec1_same_name #set-options "--fuel 0 --ifuel 1 --z3rlimit 15" open FStar.Mul open Core_models let rec f (x: i32) : i32 = if x >. mk_i32 0 then f__from__rec1_same_name (x -! mk_i32 1 <: i32) else mk_i32 0 and f__from__rec1_same_name (x: i32) : i32 = f x ''' "Cyclic_modules.Bundle_typ_a.fst" = ''' module Cyclic_modules.Bundle_typ_a #set-options "--fuel 0 --ifuel 1 --z3rlimit 15" open FStar.Mul open Core_models type t_T1 = | T1_T1 : t_T1 type t_T = | T_T : t_T1 -> t_T let t_T1_cast_to_repr (x: t_T1) : isize = match x <: t_T1 with | T1_T1 -> mk_isize 0 type t_T2 = | T2_T2 : t_T -> t_T2 type t_T2Rec = | T2Rec_T2 : t_TRec -> t_T2Rec and t_T1Rec = | T1Rec_T1 : t_T2Rec -> t_T1Rec and t_TRec = | TRec_T : t_T1Rec -> t_TRec | TRec_Empty : t_TRec ''' "Cyclic_modules.Bundle_variant_constructor_a.fst" = ''' module Cyclic_modules.Bundle_variant_constructor_a #set-options "--fuel 0 --ifuel 1 --z3rlimit 15" open FStar.Mul open Core_models type t_Context = | Context_A : i32 -> t_Context | Context_B : i32 -> t_Context let impl__test (x: Core_models.Option.t_Option i32) : Core_models.Option.t_Option t_Context = Core_models.Option.impl__map #i32 #t_Context #(i32 -> t_Context) x Context_A let h (_: Prims.unit) : t_Context = Context_A (mk_i32 1) <: t_Context let f (_: Prims.unit) : t_Context = h () ''' "Cyclic_modules.C.fst" = ''' module Cyclic_modules.C #set-options "--fuel 0 --ifuel 1 --z3rlimit 15" open FStar.Mul open Core_models let i (_: Prims.unit) : Prims.unit = () ''' "Cyclic_modules.D.fst" = ''' module Cyclic_modules.D #set-options "--fuel 0 --ifuel 1 --z3rlimit 15" open FStar.Mul open Core_models include Cyclic_modules.Bundle_d {d1 as d1} include Cyclic_modules.Bundle_d {d2 as d2} ''' "Cyclic_modules.De.fst" = ''' module Cyclic_modules.De #set-options "--fuel 0 --ifuel 1 --z3rlimit 15" open FStar.Mul open Core_models include Cyclic_modules.Bundle_d {de1 as de1} ''' "Cyclic_modules.Disjoint_cycle_a.fst" = ''' module Cyclic_modules.Disjoint_cycle_a #set-options "--fuel 0 --ifuel 1 --z3rlimit 15" open FStar.Mul open Core_models include Cyclic_modules.Bundle_disjoint_cycle_a {f as f} include Cyclic_modules.Bundle_disjoint_cycle_a {g as g} ''' "Cyclic_modules.Disjoint_cycle_b.fst" = ''' module Cyclic_modules.Disjoint_cycle_b #set-options "--fuel 0 --ifuel 1 --z3rlimit 15" open FStar.Mul open Core_models include Cyclic_modules.Bundle_disjoint_cycle_a {h as h} include Cyclic_modules.Bundle_disjoint_cycle_a {i as i} ''' "Cyclic_modules.E.fst" = ''' module Cyclic_modules.E #set-options "--fuel 0 --ifuel 1 --z3rlimit 15" open FStar.Mul open Core_models include Cyclic_modules.Bundle_d {e1 as e1} ''' "Cyclic_modules.Enums_a.fst" = ''' module Cyclic_modules.Enums_a #set-options "--fuel 0 --ifuel 1 --z3rlimit 15" open FStar.Mul open Core_models include Cyclic_modules.Bundle_enums_a {t_T as t_T} include Cyclic_modules.Bundle_enums_a {T_A__from__enums_a as T_A} include Cyclic_modules.Bundle_enums_a {T_B__from__enums_a as T_B} include Cyclic_modules.Bundle_enums_a {T_C__from__enums_a as T_C} include Cyclic_modules.Bundle_enums_a {T_D as T_D} ''' "Cyclic_modules.Enums_b.fst" = ''' module Cyclic_modules.Enums_b #set-options "--fuel 0 --ifuel 1 --z3rlimit 15" open FStar.Mul open Core_models include Cyclic_modules.Bundle_enums_a {t_U as t_U} include Cyclic_modules.Bundle_enums_a {U_A as U_A} include Cyclic_modules.Bundle_enums_a {U_B as U_B} include Cyclic_modules.Bundle_enums_a {U_C as U_C} include Cyclic_modules.Bundle_enums_a {t_T__from__enums_b as t_T} include Cyclic_modules.Bundle_enums_a {T_A as T_A} include Cyclic_modules.Bundle_enums_a {T_B as T_B} include Cyclic_modules.Bundle_enums_a {T_C as T_C} include Cyclic_modules.Bundle_enums_a {f as f} ''' "Cyclic_modules.Issue_1823_.First_example.A.fst" = ''' module Cyclic_modules.Issue_1823_.First_example.A #set-options "--fuel 0 --ifuel 1 --z3rlimit 15" open FStar.Mul open Core_models include Cyclic_modules.Issue_1823_.First_example.Bundle {t_A as t_A} include Cyclic_modules.Issue_1823_.First_example.Bundle {A as A} include Cyclic_modules.Issue_1823_.First_example.Bundle {impl__mkb as impl_A__mkb} ''' "Cyclic_modules.Issue_1823_.First_example.B.fst" = ''' module Cyclic_modules.Issue_1823_.First_example.B #set-options "--fuel 0 --ifuel 1 --z3rlimit 15" open FStar.Mul open Core_models include Cyclic_modules.Issue_1823_.First_example.Bundle {t_B as t_B} include Cyclic_modules.Issue_1823_.First_example.Bundle {B as B} include Cyclic_modules.Issue_1823_.First_example.Bundle {impl__mka as impl_B__mka} ''' "Cyclic_modules.Issue_1823_.First_example.Bundle.fst" = ''' module Cyclic_modules.Issue_1823_.First_example.Bundle #set-options "--fuel 0 --ifuel 1 --z3rlimit 15" open FStar.Mul open Core_models type t_A = | A : t_A type t_B = | B : t_B let impl__mkb (self: t_A) : t_B = B <: t_B let impl__mka (self: t_B) : t_A = A <: t_A ''' "Cyclic_modules.Issue_1823_.Second_example.A.fst" = ''' module Cyclic_modules.Issue_1823_.Second_example.A #set-options "--fuel 0 --ifuel 1 --z3rlimit 15" open FStar.Mul open Core_models include Cyclic_modules.Issue_1823_.Second_example.Bundle {call_b as call_b} include Cyclic_modules.Issue_1823_.Second_example.Bundle {a as a} ''' "Cyclic_modules.Issue_1823_.Second_example.B.fst" = ''' module Cyclic_modules.Issue_1823_.Second_example.B #set-options "--fuel 0 --ifuel 1 --z3rlimit 15" open FStar.Mul open Core_models include Cyclic_modules.Issue_1823_.Second_example.Bundle {call_a as call_a} include Cyclic_modules.Issue_1823_.Second_example.Bundle {b as b} ''' "Cyclic_modules.Issue_1823_.Second_example.Bundle.fst" = ''' module Cyclic_modules.Issue_1823_.Second_example.Bundle #set-options "--fuel 0 --ifuel 1 --z3rlimit 15" open FStar.Mul open Core_models let a (_: Prims.unit) : Prims.unit = () let call_a (_: Prims.unit) : Prims.unit = a () let b (_: Prims.unit) : Prims.unit = () let call_b (_: Prims.unit) : Prims.unit = b () ''' "Cyclic_modules.Late_skip_a.fst" = ''' module Cyclic_modules.Late_skip_a #set-options "--fuel 0 --ifuel 1 --z3rlimit 15" open FStar.Mul open Core_models include Cyclic_modules.Bundle_late_skip_a {f__from__late_skip_a as f} ''' "Cyclic_modules.Late_skip_b.fst" = ''' module Cyclic_modules.Late_skip_b #set-options "--fuel 0 --ifuel 1 --z3rlimit 15" open FStar.Mul open Core_models include Cyclic_modules.Bundle_late_skip_a {f as f} ''' "Cyclic_modules.M1.fst" = ''' module Cyclic_modules.M1 #set-options "--fuel 0 --ifuel 1 --z3rlimit 15" open FStar.Mul open Core_models include Cyclic_modules.Bundle_m1 {a as a} ''' "Cyclic_modules.M2.fst" = ''' module Cyclic_modules.M2 #set-options "--fuel 0 --ifuel 1 --z3rlimit 15" open FStar.Mul open Core_models include Cyclic_modules.Bundle_m1 {d as d} include Cyclic_modules.Bundle_m1 {b as b} include Cyclic_modules.Bundle_m1 {c as c} ''' "Cyclic_modules.Rec.fst" = ''' module Cyclic_modules.Rec #set-options "--fuel 0 --ifuel 1 --z3rlimit 15" open FStar.Mul open Core_models type t_T = | T_t1 : t_T | T_t2 : t_T let t_T_cast_to_repr (x: t_T) : isize = match x <: t_T with | T_t1 -> mk_isize 0 | T_t2 -> mk_isize 1 let rec hf (x: t_T) : t_T = match x <: t_T with | T_t1 -> hf (T_t2 <: t_T) | T_t2 -> x let rec g2 (x: t_T) : t_T = match x <: t_T with | T_t1 -> g1 x | T_t2 -> hf x and g1 (x: t_T) : t_T = match x <: t_T with | T_t1 -> g2 x | T_t2 -> T_t1 <: t_T ''' "Cyclic_modules.Rec1_same_name.fst" = ''' module Cyclic_modules.Rec1_same_name #set-options "--fuel 0 --ifuel 1 --z3rlimit 15" open FStar.Mul open Core_models include Cyclic_modules.Bundle_rec1_same_name {f__from__rec1_same_name as f} ''' "Cyclic_modules.Rec2_same_name.fst" = ''' module Cyclic_modules.Rec2_same_name #set-options "--fuel 0 --ifuel 1 --z3rlimit 15" open FStar.Mul open Core_models include Cyclic_modules.Bundle_rec1_same_name {f as f} ''' "Cyclic_modules.Typ_a.fst" = ''' module Cyclic_modules.Typ_a #set-options "--fuel 0 --ifuel 1 --z3rlimit 15" open FStar.Mul open Core_models include Cyclic_modules.Bundle_typ_a {t_TRec as t_TRec} include Cyclic_modules.Bundle_typ_a {TRec_T as TRec_T} include Cyclic_modules.Bundle_typ_a {TRec_Empty as TRec_Empty} include Cyclic_modules.Bundle_typ_a {t_T as t_T} include Cyclic_modules.Bundle_typ_a {T_T as T_T} ''' "Cyclic_modules.Typ_b.fst" = ''' module Cyclic_modules.Typ_b #set-options "--fuel 0 --ifuel 1 --z3rlimit 15" open FStar.Mul open Core_models include Cyclic_modules.Bundle_typ_a {t_T1Rec as t_T1Rec} include Cyclic_modules.Bundle_typ_a {T1Rec_T1 as T1Rec_T1} include Cyclic_modules.Bundle_typ_a {t_T2Rec as t_T2Rec} include Cyclic_modules.Bundle_typ_a {T2Rec_T2 as T2Rec_T2} include Cyclic_modules.Bundle_typ_a {t_T1_cast_to_repr as t_T1_cast_to_repr} include Cyclic_modules.Bundle_typ_a {t_T1 as t_T1} include Cyclic_modules.Bundle_typ_a {T1_T1 as T1_T1} include Cyclic_modules.Bundle_typ_a {t_T2 as t_T2} include Cyclic_modules.Bundle_typ_a {T2_T2 as T2_T2} ''' "Cyclic_modules.Variant_constructor_a.fst" = ''' module Cyclic_modules.Variant_constructor_a #set-options "--fuel 0 --ifuel 1 --z3rlimit 15" open FStar.Mul open Core_models include Cyclic_modules.Bundle_variant_constructor_a {t_Context as t_Context} include Cyclic_modules.Bundle_variant_constructor_a {Context_A as Context_A} include Cyclic_modules.Bundle_variant_constructor_a {Context_B as Context_B} include Cyclic_modules.Bundle_variant_constructor_a {f as f} include Cyclic_modules.Bundle_variant_constructor_a {impl__test as impl_Context__test} ''' "Cyclic_modules.Variant_constructor_b.fst" = ''' module Cyclic_modules.Variant_constructor_b #set-options "--fuel 0 --ifuel 1 --z3rlimit 15" open FStar.Mul open Core_models include Cyclic_modules.Bundle_variant_constructor_a {h as h} ''' "Cyclic_modules.fst" = ''' module Cyclic_modules #set-options "--fuel 0 --ifuel 1 --z3rlimit 15" open FStar.Mul open Core_models include Cyclic_modules.Bundle {f as f} include Cyclic_modules.Bundle {h as h} include Cyclic_modules.Bundle {h2 as h2} ''' ================================================ FILE: test-harness/src/snapshots/toolchain__cyclic-modules into-lean.snap ================================================ --- source: test-harness/src/harness.rs expression: snapshot info: kind: Translate: backend: lean info: name: cyclic-modules manifest: cyclic-modules/Cargo.toml description: ~ spec: optional: false broken: false issue_id: ~ positive: true snapshot: stderr: false stdout: true include_flag: ~ backend_options: ~ --- exit = 0 [stdout] diagnostics = [] [stdout.files] "cyclic_modules.lean" = ''' -- Experimental lean backend for Hax -- The Hax prelude library can be found in hax/proof-libs/lean import Hax import Std.Tactic.Do import Std.Do.Triple import Std.Tactic.Do.Syntax open Std.Do open Std.Tactic set_option mvcgen.warning false set_option linter.unusedVariables false namespace cyclic_modules.typ_b inductive T1 : Type | T1 : T1 end cyclic_modules.typ_b namespace cyclic_modules.typ_a inductive T : Type | T : cyclic_modules.typ_b.T1 -> T end cyclic_modules.typ_a namespace cyclic_modules.typ_b @[spec] def T1_cast_to_repr (x : T1) : RustM isize := do match x with | (T1.T1 ) => do (pure (0 : isize)) inductive T2 : Type | T2 : cyclic_modules.typ_a.T -> T2 end cyclic_modules.typ_b namespace cyclic_modules @[spec] def f (_ : rust_primitives.hax.Tuple0) : RustM rust_primitives.hax.Tuple0 := do (pure rust_primitives.hax.Tuple0.mk) end cyclic_modules namespace cyclic_modules.b @[spec] def g (_ : rust_primitives.hax.Tuple0) : RustM rust_primitives.hax.Tuple0 := do (cyclic_modules.f rust_primitives.hax.Tuple0.mk) end cyclic_modules.b namespace cyclic_modules.c @[spec] def i (_ : rust_primitives.hax.Tuple0) : RustM rust_primitives.hax.Tuple0 := do (pure rust_primitives.hax.Tuple0.mk) end cyclic_modules.c namespace cyclic_modules @[spec] def h (_ : rust_primitives.hax.Tuple0) : RustM rust_primitives.hax.Tuple0 := do let _ ← (cyclic_modules.b.g rust_primitives.hax.Tuple0.mk); (cyclic_modules.c.i rust_primitives.hax.Tuple0.mk) @[spec] def h2 (_ : rust_primitives.hax.Tuple0) : RustM rust_primitives.hax.Tuple0 := do (cyclic_modules.c.i rust_primitives.hax.Tuple0.mk) end cyclic_modules namespace cyclic_modules.d @[spec] def d1 (_ : rust_primitives.hax.Tuple0) : RustM rust_primitives.hax.Tuple0 := do (pure rust_primitives.hax.Tuple0.mk) end cyclic_modules.d namespace cyclic_modules.e @[spec] def e1 (_ : rust_primitives.hax.Tuple0) : RustM rust_primitives.hax.Tuple0 := do (cyclic_modules.d.d1 rust_primitives.hax.Tuple0.mk) end cyclic_modules.e namespace cyclic_modules.de @[spec] def de1 (_ : rust_primitives.hax.Tuple0) : RustM rust_primitives.hax.Tuple0 := do (cyclic_modules.e.e1 rust_primitives.hax.Tuple0.mk) end cyclic_modules.de namespace cyclic_modules.d @[spec] def d2 (_ : rust_primitives.hax.Tuple0) : RustM rust_primitives.hax.Tuple0 := do (cyclic_modules.de.de1 rust_primitives.hax.Tuple0.mk) end cyclic_modules.d namespace cyclic_modules.rec inductive T : Type | t1 : T | t2 : T @[spec] def T_cast_to_repr (x : T) : RustM isize := do match x with | (T.t1 ) => do (pure (0 : isize)) | (T.t2 ) => do (pure (1 : isize)) end cyclic_modules.rec namespace cyclic_modules.m2 @[spec] def d (_ : rust_primitives.hax.Tuple0) : RustM rust_primitives.hax.Tuple0 := do (pure rust_primitives.hax.Tuple0.mk) @[spec] def c (_ : rust_primitives.hax.Tuple0) : RustM rust_primitives.hax.Tuple0 := do (pure rust_primitives.hax.Tuple0.mk) end cyclic_modules.m2 namespace cyclic_modules.m1 @[spec] def a (_ : rust_primitives.hax.Tuple0) : RustM rust_primitives.hax.Tuple0 := do (cyclic_modules.m2.c rust_primitives.hax.Tuple0.mk) end cyclic_modules.m1 namespace cyclic_modules.m2 @[spec] def b (_ : rust_primitives.hax.Tuple0) : RustM rust_primitives.hax.Tuple0 := do let _ ← (cyclic_modules.m1.a rust_primitives.hax.Tuple0.mk); (d rust_primitives.hax.Tuple0.mk) end cyclic_modules.m2 namespace cyclic_modules.disjoint_cycle_a @[spec] def g (_ : rust_primitives.hax.Tuple0) : RustM rust_primitives.hax.Tuple0 := do (pure rust_primitives.hax.Tuple0.mk) end cyclic_modules.disjoint_cycle_a namespace cyclic_modules.disjoint_cycle_b @[spec] def h (_ : rust_primitives.hax.Tuple0) : RustM rust_primitives.hax.Tuple0 := do (pure rust_primitives.hax.Tuple0.mk) end cyclic_modules.disjoint_cycle_b namespace cyclic_modules.disjoint_cycle_a @[spec] def f (_ : rust_primitives.hax.Tuple0) : RustM rust_primitives.hax.Tuple0 := do (cyclic_modules.disjoint_cycle_b.h rust_primitives.hax.Tuple0.mk) end cyclic_modules.disjoint_cycle_a namespace cyclic_modules.disjoint_cycle_b @[spec] def i (_ : rust_primitives.hax.Tuple0) : RustM rust_primitives.hax.Tuple0 := do (cyclic_modules.disjoint_cycle_a.g rust_primitives.hax.Tuple0.mk) end cyclic_modules.disjoint_cycle_b namespace cyclic_modules.variant_constructor_a inductive Context : Type | A : i32 -> Context | B : i32 -> Context @[spec] def Impl.test (x : (core_models.option.Option i32)) : RustM (core_models.option.Option Context) := do (core_models.option.Impl.map i32 Context (i32 -> RustM Context) x Context.A) end cyclic_modules.variant_constructor_a namespace cyclic_modules.variant_constructor_b @[spec] def h (_ : rust_primitives.hax.Tuple0) : RustM cyclic_modules.variant_constructor_a.Context := do (pure (cyclic_modules.variant_constructor_a.Context.A (1 : i32))) end cyclic_modules.variant_constructor_b namespace cyclic_modules.variant_constructor_a @[spec] def f (_ : rust_primitives.hax.Tuple0) : RustM Context := do (cyclic_modules.variant_constructor_b.h rust_primitives.hax.Tuple0.mk) end cyclic_modules.variant_constructor_a namespace cyclic_modules.issue_1823.first_example.a structure A where -- no fields end cyclic_modules.issue_1823.first_example.a namespace cyclic_modules.issue_1823.first_example.b structure B where -- no fields end cyclic_modules.issue_1823.first_example.b namespace cyclic_modules.issue_1823.first_example.a @[spec] def Impl.mkb (self : A) : RustM cyclic_modules.issue_1823.first_example.b.B := do (pure cyclic_modules.issue_1823.first_example.b.B.mk) end cyclic_modules.issue_1823.first_example.a namespace cyclic_modules.issue_1823.first_example.b @[spec] def Impl.mka (self : B) : RustM cyclic_modules.issue_1823.first_example.a.A := do (pure cyclic_modules.issue_1823.first_example.a.A.mk) end cyclic_modules.issue_1823.first_example.b namespace cyclic_modules.issue_1823.second_example.a @[spec] def a (_ : rust_primitives.hax.Tuple0) : RustM rust_primitives.hax.Tuple0 := do (pure rust_primitives.hax.Tuple0.mk) end cyclic_modules.issue_1823.second_example.a namespace cyclic_modules.issue_1823.second_example.b @[spec] def call_a (_ : rust_primitives.hax.Tuple0) : RustM rust_primitives.hax.Tuple0 := do (cyclic_modules.issue_1823.second_example.a.a rust_primitives.hax.Tuple0.mk) @[spec] def b (_ : rust_primitives.hax.Tuple0) : RustM rust_primitives.hax.Tuple0 := do (pure rust_primitives.hax.Tuple0.mk) end cyclic_modules.issue_1823.second_example.b namespace cyclic_modules.issue_1823.second_example.a @[spec] def call_b (_ : rust_primitives.hax.Tuple0) : RustM rust_primitives.hax.Tuple0 := do (cyclic_modules.issue_1823.second_example.b.b rust_primitives.hax.Tuple0.mk) end cyclic_modules.issue_1823.second_example.a namespace cyclic_modules.typ_b inductive T2Rec : Type | T2 : cyclic_modules.typ_a.TRec -> T2Rec inductive T1Rec : Type | T1 : T2Rec -> T1Rec end cyclic_modules.typ_b namespace cyclic_modules.typ_a inductive TRec : Type | T : cyclic_modules.typ_b.T1Rec -> TRec | Empty : TRec end cyclic_modules.typ_a namespace cyclic_modules.rec @[spec] def hf (x : T) : RustM T := do match x with | (T.t1 ) => do (hf T.t2) | (T.t2 ) => do (pure x) partial_fixpoint end cyclic_modules.rec namespace cyclic_modules.rec2_same_name @[spec] def f (x : i32) : RustM i32 := do if (← (x >? (0 : i32))) then do (cyclic_modules.rec1_same_name.f (← (x -? (1 : i32)))) else do (pure (0 : i32)) end cyclic_modules.rec2_same_name namespace cyclic_modules.rec1_same_name @[spec] def f (x : i32) : RustM i32 := do (cyclic_modules.rec2_same_name.f x) end cyclic_modules.rec1_same_name namespace cyclic_modules.late_skip_b def f (_ : rust_primitives.hax.Tuple0) : RustM rust_primitives.hax.Tuple0 := do (cyclic_modules.late_skip_a.f rust_primitives.hax.Tuple0.mk) set_option hax_mvcgen.specset "bv" in @[hax_spec] def f.spec (_ : rust_primitives.hax.Tuple0) : Spec (requires := do (pure true)) (ensures := fun _ => pure True) (f ⟨⟩) := { pureRequires := by hax_construct_pure <;> bv_decide pureEnsures := by hax_construct_pure <;> bv_decide contract := by hax_mvcgen [f] <;> bv_decide } end cyclic_modules.late_skip_b namespace cyclic_modules.late_skip_a @[spec] def f (_ : rust_primitives.hax.Tuple0) : RustM rust_primitives.hax.Tuple0 := do (cyclic_modules.late_skip_b.f rust_primitives.hax.Tuple0.mk) end cyclic_modules.late_skip_a namespace cyclic_modules.enums_b inductive U : Type | A : U | B : U | C : (alloc.vec.Vec cyclic_modules.enums_a.T alloc.alloc.Global) -> U inductive T : Type | A : T | B : T | C : (alloc.vec.Vec cyclic_modules.enums_a.T alloc.alloc.Global) -> T end cyclic_modules.enums_b namespace cyclic_modules.enums_a inductive T : Type | A : T | B : T | C : (alloc.vec.Vec cyclic_modules.enums_b.U alloc.alloc.Global) -> T | D : (alloc.vec.Vec cyclic_modules.enums_b.T alloc.alloc.Global) -> T end cyclic_modules.enums_a namespace cyclic_modules.enums_b @[spec] def f (_ : rust_primitives.hax.Tuple0) : RustM T := do (pure T.A) end cyclic_modules.enums_b namespace cyclic_modules.rec @[spec] def g2 (x : T) : RustM T := do match x with | (T.t1 ) => do (g1 x) | (T.t2 ) => do (hf x) @[spec] def g1 (x : T) : RustM T := do match x with | (T.t1 ) => do (g2 x) | (T.t2 ) => do (pure T.t1) end cyclic_modules.rec ''' ================================================ FILE: test-harness/src/snapshots/toolchain__dyn into-fstar.snap ================================================ --- source: test-harness/src/harness.rs expression: snapshot info: kind: Translate: backend: fstar info: name: dyn manifest: dyn/Cargo.toml description: ~ spec: optional: false broken: false issue_id: ~ positive: true snapshot: stderr: false stdout: true include_flag: ~ backend_options: ~ --- exit = 0 [stdout] diagnostics = [] [stdout.files] "Dyn.fst" = ''' module Dyn #set-options "--fuel 0 --ifuel 1 --z3rlimit 15" open FStar.Mul open Core_models class t_Printable (v_Self: Type0) (v_S: Type0) = { f_stringify_pre:v_Self -> Type0; f_stringify_post:v_Self -> v_S -> Type0; f_stringify:x0: v_Self -> Prims.Pure v_S (f_stringify_pre x0) (fun result -> f_stringify_post x0 result) } [@@ FStar.Tactics.Typeclasses.tcinstance] let impl: t_Printable i32 Alloc.String.t_String = { f_stringify_pre = (fun (self: i32) -> true); f_stringify_post = (fun (self: i32) (out: Alloc.String.t_String) -> true); f_stringify = fun (self: i32) -> Alloc.String.f_to_string #i32 #FStar.Tactics.Typeclasses.solve self } let print (a: dyn 1 (fun z -> t_Printable z Alloc.String.t_String)) : Prims.unit = let args:Alloc.String.t_String = f_stringify #(dyn 1 (fun z -> t_Printable z Alloc.String.t_String)) #Alloc.String.t_String #FStar.Tactics.Typeclasses.solve a <: Alloc.String.t_String in let args:t_Array Core_models.Fmt.Rt.t_Argument (mk_usize 1) = let list = [Core_models.Fmt.Rt.impl__new_display #Alloc.String.t_String args] in FStar.Pervasives.assert_norm (Prims.eq2 (List.Tot.length list) 1); Rust_primitives.Hax.array_of_list 1 list in let _:Prims.unit = Std.Io.Stdio.e_print (Core_models.Fmt.Rt.impl_1__new_v1 (mk_usize 2) (mk_usize 1) (let list = [""; "\n"] in FStar.Pervasives.assert_norm (Prims.eq2 (List.Tot.length list) 2); Rust_primitives.Hax.array_of_list 2 list) args <: Core_models.Fmt.t_Arguments) in let _:Prims.unit = () in () ''' ================================================ FILE: test-harness/src/snapshots/toolchain__enum-repr into-coq.snap ================================================ --- source: test-harness/src/harness.rs expression: snapshot info: kind: Translate: backend: coq info: name: enum-repr manifest: enum-repr/Cargo.toml description: ~ spec: optional: false broken: false issue_id: ~ positive: true snapshot: stderr: true stdout: true include_flag: ~ backend_options: ~ --- exit = 0 stderr = 'Finished `dev` profile [unoptimized + debuginfo] target(s) in XXs' [stdout] diagnostics = [] [stdout.files] "Enum_repr.v" = ''' (* File automatically generated by Hacspec *) From Coq Require Import ZArith. Require Import List. Import List.ListNotations. Open Scope Z_scope. Open Scope bool_scope. Require Import Ascii. Require Import String. Require Import Coq.Floats.Floats. From RecordUpdate Require Import RecordSet. Import RecordSetNotations. From Core Require Import Core. (* NotImplementedYet *) Inductive t_EnumWithRepr : Type := | EnumWithRepr_ExplicitDiscr1 | EnumWithRepr_ExplicitDiscr2 | EnumWithRepr_ImplicitDiscrEmptyTuple | EnumWithRepr_ImplicitDiscrEmptyStruct. Definition anon_const_EnumWithRepr_ExplicitDiscr1__anon_const_0 : t_u16 := (1 : t_u16). Definition anon_const_EnumWithRepr_ExplicitDiscr2__anon_const_0 : t_u16 := (5 : t_u16). Definition t_EnumWithRepr_cast_to_repr (x : t_EnumWithRepr) : t_u16 := match x with | EnumWithRepr_ExplicitDiscr1 => anon_const_EnumWithRepr_ExplicitDiscr1__anon_const_0 | EnumWithRepr_ExplicitDiscr2 => anon_const_EnumWithRepr_ExplicitDiscr2__anon_const_0 | EnumWithRepr_ImplicitDiscrEmptyTuple => f_add (anon_const_EnumWithRepr_ExplicitDiscr2__anon_const_0) ((1 : t_u16)) | EnumWithRepr_ImplicitDiscrEmptyStruct => f_add (anon_const_EnumWithRepr_ExplicitDiscr2__anon_const_0) ((2 : t_u16)) end. Inductive t_ImplicitReprs : Type := | ImplicitReprs_A | ImplicitReprs_B | ImplicitReprs_C | ImplicitReprs_D | ImplicitReprs_E | ImplicitReprs_F | ImplicitReprs_G | ImplicitReprs_H | ImplicitReprs_I. Definition anon_const_ImplicitReprs_E__anon_const_0 : t_u64 := (30 : t_u64). Definition t_ImplicitReprs_cast_to_repr (x : t_ImplicitReprs) : t_u64 := match x with | ImplicitReprs_A => (0 : t_u64) | ImplicitReprs_B => (1 : t_u64) | ImplicitReprs_C => (2 : t_u64) | ImplicitReprs_D => (3 : t_u64) | ImplicitReprs_E => anon_const_ImplicitReprs_E__anon_const_0 | ImplicitReprs_F => f_add (anon_const_ImplicitReprs_E__anon_const_0) ((1 : t_u64)) | ImplicitReprs_G => f_add (anon_const_ImplicitReprs_E__anon_const_0) ((2 : t_u64)) | ImplicitReprs_H => f_add (anon_const_ImplicitReprs_E__anon_const_0) ((3 : t_u64)) | ImplicitReprs_I => f_add (anon_const_ImplicitReprs_E__anon_const_0) ((4 : t_u64)) end. Definition f '(_ : unit) : t_u32 := let e_x := cast (f_add (anon_const_EnumWithRepr_ExplicitDiscr2__anon_const_0) ((0 : t_u16))) in f_add (cast (t_EnumWithRepr_cast_to_repr (EnumWithRepr_ImplicitDiscrEmptyTuple))) (cast (t_EnumWithRepr_cast_to_repr (EnumWithRepr_ImplicitDiscrEmptyStruct))). Definition f__v_CONST : t_u16 := cast (f_add (anon_const_EnumWithRepr_ExplicitDiscr1__anon_const_0) ((0 : t_u16))). Definition get_repr (x : t_EnumWithRepr) : t_u16 := t_EnumWithRepr_cast_to_repr (x). Definition get_casted_repr (x : t_EnumWithRepr) : t_u64 := cast (t_EnumWithRepr_cast_to_repr (x)). ''' _CoqProject = ''' -R ./ TODO -arg -w -arg all Enum_repr.v''' ================================================ FILE: test-harness/src/snapshots/toolchain__enum-repr into-fstar.snap ================================================ --- source: test-harness/src/harness.rs expression: snapshot info: kind: Translate: backend: fstar info: name: enum-repr manifest: enum-repr/Cargo.toml description: ~ spec: optional: false broken: false issue_id: ~ positive: true snapshot: stderr: true stdout: true include_flag: ~ backend_options: ~ --- exit = 0 stderr = 'Finished `dev` profile [unoptimized + debuginfo] target(s) in XXs' [stdout] diagnostics = [] [stdout.files] "Enum_repr.fst" = ''' module Enum_repr #set-options "--fuel 0 --ifuel 1 --z3rlimit 15" open FStar.Mul open Core_models type t_EnumWithRepr = | EnumWithRepr_ExplicitDiscr1 : t_EnumWithRepr | EnumWithRepr_ExplicitDiscr2 : t_EnumWithRepr | EnumWithRepr_ImplicitDiscrEmptyTuple : t_EnumWithRepr | EnumWithRepr_ImplicitDiscrEmptyStruct : t_EnumWithRepr let anon_const_EnumWithRepr_ExplicitDiscr1__anon_const_0: u16 = mk_u16 1 let anon_const_EnumWithRepr_ExplicitDiscr2__anon_const_0: u16 = mk_u16 5 let t_EnumWithRepr_cast_to_repr (x: t_EnumWithRepr) : u16 = match x <: t_EnumWithRepr with | EnumWithRepr_ExplicitDiscr1 -> anon_const_EnumWithRepr_ExplicitDiscr1__anon_const_0 | EnumWithRepr_ExplicitDiscr2 -> anon_const_EnumWithRepr_ExplicitDiscr2__anon_const_0 | EnumWithRepr_ImplicitDiscrEmptyTuple -> anon_const_EnumWithRepr_ExplicitDiscr2__anon_const_0 +! mk_u16 1 | EnumWithRepr_ImplicitDiscrEmptyStruct -> anon_const_EnumWithRepr_ExplicitDiscr2__anon_const_0 +! mk_u16 2 type t_ImplicitReprs = | ImplicitReprs_A : t_ImplicitReprs | ImplicitReprs_B : t_ImplicitReprs | ImplicitReprs_C : t_ImplicitReprs | ImplicitReprs_D : t_ImplicitReprs | ImplicitReprs_E : t_ImplicitReprs | ImplicitReprs_F : t_ImplicitReprs | ImplicitReprs_G : t_ImplicitReprs | ImplicitReprs_H : t_ImplicitReprs | ImplicitReprs_I : t_ImplicitReprs let anon_const_ImplicitReprs_E__anon_const_0: u64 = mk_u64 30 let t_ImplicitReprs_cast_to_repr (x: t_ImplicitReprs) : u64 = match x <: t_ImplicitReprs with | ImplicitReprs_A -> mk_u64 0 | ImplicitReprs_B -> mk_u64 1 | ImplicitReprs_C -> mk_u64 2 | ImplicitReprs_D -> mk_u64 3 | ImplicitReprs_E -> anon_const_ImplicitReprs_E__anon_const_0 | ImplicitReprs_F -> anon_const_ImplicitReprs_E__anon_const_0 +! mk_u64 1 | ImplicitReprs_G -> anon_const_ImplicitReprs_E__anon_const_0 +! mk_u64 2 | ImplicitReprs_H -> anon_const_ImplicitReprs_E__anon_const_0 +! mk_u64 3 | ImplicitReprs_I -> anon_const_ImplicitReprs_E__anon_const_0 +! mk_u64 4 let f (_: Prims.unit) : u32 = let e_x:u16 = cast (anon_const_EnumWithRepr_ExplicitDiscr2__anon_const_0 +! mk_u16 0 <: u16) <: u16 in (cast (t_EnumWithRepr_cast_to_repr (EnumWithRepr_ImplicitDiscrEmptyTuple <: t_EnumWithRepr) <: u16 ) <: u32) +! (cast (t_EnumWithRepr_cast_to_repr (EnumWithRepr_ImplicitDiscrEmptyStruct <: t_EnumWithRepr) <: u16) <: u32) let f__v_CONST: u16 = cast (anon_const_EnumWithRepr_ExplicitDiscr1__anon_const_0 +! mk_u16 0 <: u16) <: u16 let get_repr (x: t_EnumWithRepr) : u16 = t_EnumWithRepr_cast_to_repr x let get_casted_repr (x: t_EnumWithRepr) : u64 = cast (t_EnumWithRepr_cast_to_repr x <: u16) <: u64 ''' ================================================ FILE: test-harness/src/snapshots/toolchain__enum-repr into-ssprove.snap ================================================ --- source: test-harness/src/harness.rs expression: snapshot info: kind: Translate: backend: ssprove info: name: enum-repr manifest: enum-repr/Cargo.toml description: ~ spec: optional: false broken: false issue_id: ~ positive: true snapshot: stderr: true stdout: true include_flag: ~ backend_options: ~ --- exit = 0 stderr = 'Finished `dev` profile [unoptimized + debuginfo] target(s) in XXs' [stdout] diagnostics = [] [stdout.files] "Enum_repr.v" = ''' (* File automatically generated by Hacspec *) Set Warnings "-notation-overridden,-ambiguous-paths". From Crypt Require Import choice_type Package Prelude. Import PackageNotation. From extructures Require Import ord fset. From mathcomp Require Import word_ssrZ word. (* From Jasmin Require Import word. *) From Coq Require Import ZArith. From Coq Require Import Strings.String. Import List.ListNotations. Open Scope list_scope. Open Scope Z_scope. Open Scope bool_scope. From Hacspec Require Import ChoiceEquality. From Hacspec Require Import LocationUtility. From Hacspec Require Import Hacspec_Lib_Comparable. From Hacspec Require Import Hacspec_Lib_Pre. From Hacspec Require Import Hacspec_Lib. Open Scope hacspec_scope. Import choice.Choice.Exports. From RecordUpdate Require Import RecordUpdate. Import RecordSetNotations. Obligation Tactic := (* try timeout 8 *) solve_ssprove_obligations. (*Not implemented yet? todo(item)*) Definition t_EnumWithRepr : choice_type := ('unit ∐ 'unit ∐ 'unit ∐ 'unit). Notation "'EnumWithRepr_ExplicitDiscr1_case'" := (inl (inl (inl tt))) (at level 100). Equations EnumWithRepr_ExplicitDiscr1 : both t_EnumWithRepr := EnumWithRepr_ExplicitDiscr1 := ret_both (inl (inl (inl (tt : 'unit))) : t_EnumWithRepr) : both t_EnumWithRepr. Fail Next Obligation. Notation "'EnumWithRepr_ExplicitDiscr2_case'" := (inl (inl (inr tt))) (at level 100). Equations EnumWithRepr_ExplicitDiscr2 : both t_EnumWithRepr := EnumWithRepr_ExplicitDiscr2 := ret_both (inl (inl (inr (tt : 'unit))) : t_EnumWithRepr) : both t_EnumWithRepr. Fail Next Obligation. Notation "'EnumWithRepr_ImplicitDiscrEmptyTuple_case'" := (inl (inr tt)) (at level 100). Equations EnumWithRepr_ImplicitDiscrEmptyTuple : both t_EnumWithRepr := EnumWithRepr_ImplicitDiscrEmptyTuple := ret_both (inl (inr (tt : 'unit)) : t_EnumWithRepr) : both t_EnumWithRepr. Fail Next Obligation. Notation "'EnumWithRepr_ImplicitDiscrEmptyStruct_case'" := (inr tt) (at level 100). Equations EnumWithRepr_ImplicitDiscrEmptyStruct : both t_EnumWithRepr := EnumWithRepr_ImplicitDiscrEmptyStruct := ret_both (inr (tt : 'unit) : t_EnumWithRepr) : both t_EnumWithRepr. Fail Next Obligation. Equations anon_const_EnumWithRepr_ExplicitDiscr1__anon_const_0 : both int16 := anon_const_EnumWithRepr_ExplicitDiscr1__anon_const_0 := ret_both (1 : int16) : both int16. Fail Next Obligation. Equations anon_const_EnumWithRepr_ExplicitDiscr2__anon_const_0 : both int16 := anon_const_EnumWithRepr_ExplicitDiscr2__anon_const_0 := ret_both (5 : int16) : both int16. Fail Next Obligation. Equations t_EnumWithRepr_cast_to_repr (x : both t_EnumWithRepr) : both int16 := t_EnumWithRepr_cast_to_repr x := matchb x with | EnumWithRepr_ExplicitDiscr1_case => anon_const_EnumWithRepr_ExplicitDiscr1__anon_const_0 | EnumWithRepr_ExplicitDiscr2_case => anon_const_EnumWithRepr_ExplicitDiscr2__anon_const_0 | EnumWithRepr_ImplicitDiscrEmptyTuple_case => anon_const_EnumWithRepr_ExplicitDiscr2__anon_const_0 .+ (ret_both (1 : int16)) | EnumWithRepr_ImplicitDiscrEmptyStruct_case => anon_const_EnumWithRepr_ExplicitDiscr2__anon_const_0 .+ (ret_both (2 : int16)) end : both int16. Fail Next Obligation. Definition t_ImplicitReprs : choice_type := ('unit ∐ 'unit ∐ 'unit ∐ 'unit ∐ 'unit ∐ 'unit ∐ 'unit ∐ 'unit ∐ 'unit). Notation "'ImplicitReprs_A_case'" := (inl (inl (inl (inl (inl (inl (inl (inl tt)))))))) (at level 100). Equations ImplicitReprs_A : both t_ImplicitReprs := ImplicitReprs_A := ret_both (inl (inl (inl (inl (inl (inl (inl (inl (tt : 'unit)))))))) : t_ImplicitReprs) : both t_ImplicitReprs. Fail Next Obligation. Notation "'ImplicitReprs_B_case'" := (inl (inl (inl (inl (inl (inl (inl (inr tt)))))))) (at level 100). Equations ImplicitReprs_B : both t_ImplicitReprs := ImplicitReprs_B := ret_both (inl (inl (inl (inl (inl (inl (inl (inr (tt : 'unit)))))))) : t_ImplicitReprs) : both t_ImplicitReprs. Fail Next Obligation. Notation "'ImplicitReprs_C_case'" := (inl (inl (inl (inl (inl (inl (inr tt))))))) (at level 100). Equations ImplicitReprs_C : both t_ImplicitReprs := ImplicitReprs_C := ret_both (inl (inl (inl (inl (inl (inl (inr (tt : 'unit))))))) : t_ImplicitReprs) : both t_ImplicitReprs. Fail Next Obligation. Notation "'ImplicitReprs_D_case'" := (inl (inl (inl (inl (inl (inr tt)))))) (at level 100). Equations ImplicitReprs_D : both t_ImplicitReprs := ImplicitReprs_D := ret_both (inl (inl (inl (inl (inl (inr (tt : 'unit)))))) : t_ImplicitReprs) : both t_ImplicitReprs. Fail Next Obligation. Notation "'ImplicitReprs_E_case'" := (inl (inl (inl (inl (inr tt))))) (at level 100). Equations ImplicitReprs_E : both t_ImplicitReprs := ImplicitReprs_E := ret_both (inl (inl (inl (inl (inr (tt : 'unit))))) : t_ImplicitReprs) : both t_ImplicitReprs. Fail Next Obligation. Notation "'ImplicitReprs_F_case'" := (inl (inl (inl (inr tt)))) (at level 100). Equations ImplicitReprs_F : both t_ImplicitReprs := ImplicitReprs_F := ret_both (inl (inl (inl (inr (tt : 'unit)))) : t_ImplicitReprs) : both t_ImplicitReprs. Fail Next Obligation. Notation "'ImplicitReprs_G_case'" := (inl (inl (inr tt))) (at level 100). Equations ImplicitReprs_G : both t_ImplicitReprs := ImplicitReprs_G := ret_both (inl (inl (inr (tt : 'unit))) : t_ImplicitReprs) : both t_ImplicitReprs. Fail Next Obligation. Notation "'ImplicitReprs_H_case'" := (inl (inr tt)) (at level 100). Equations ImplicitReprs_H : both t_ImplicitReprs := ImplicitReprs_H := ret_both (inl (inr (tt : 'unit)) : t_ImplicitReprs) : both t_ImplicitReprs. Fail Next Obligation. Notation "'ImplicitReprs_I_case'" := (inr tt) (at level 100). Equations ImplicitReprs_I : both t_ImplicitReprs := ImplicitReprs_I := ret_both (inr (tt : 'unit) : t_ImplicitReprs) : both t_ImplicitReprs. Fail Next Obligation. Equations anon_const_ImplicitReprs_E__anon_const_0 : both int64 := anon_const_ImplicitReprs_E__anon_const_0 := ret_both (30 : int64) : both int64. Fail Next Obligation. Equations t_ImplicitReprs_cast_to_repr (x : both t_ImplicitReprs) : both int64 := t_ImplicitReprs_cast_to_repr x := matchb x with | ImplicitReprs_A_case => ret_both (0 : int64) | ImplicitReprs_B_case => ret_both (1 : int64) | ImplicitReprs_C_case => ret_both (2 : int64) | ImplicitReprs_D_case => ret_both (3 : int64) | ImplicitReprs_E_case => anon_const_ImplicitReprs_E__anon_const_0 | ImplicitReprs_F_case => anon_const_ImplicitReprs_E__anon_const_0 .+ (ret_both (1 : int64)) | ImplicitReprs_G_case => anon_const_ImplicitReprs_E__anon_const_0 .+ (ret_both (2 : int64)) | ImplicitReprs_H_case => anon_const_ImplicitReprs_E__anon_const_0 .+ (ret_both (3 : int64)) | ImplicitReprs_I_case => anon_const_ImplicitReprs_E__anon_const_0 .+ (ret_both (4 : int64)) end : both int64. Fail Next Obligation. Equations f (_ : both 'unit) : both int32 := f _ := letb e_x := cast_int (WS2 := _) (anon_const_EnumWithRepr_ExplicitDiscr2__anon_const_0 .+ (ret_both (0 : int16))) in (cast_int (WS2 := _) (t_EnumWithRepr_cast_to_repr EnumWithRepr_ImplicitDiscrEmptyTuple)) .+ (cast_int (WS2 := _) (t_EnumWithRepr_cast_to_repr EnumWithRepr_ImplicitDiscrEmptyStruct)) : both int32. Fail Next Obligation. Equations f__v_CONST : both int16 := f__v_CONST := cast_int (WS2 := _) (anon_const_EnumWithRepr_ExplicitDiscr1__anon_const_0 .+ (ret_both (0 : int16))) : both int16. Fail Next Obligation. Equations get_repr (x : both t_EnumWithRepr) : both int16 := get_repr x := t_EnumWithRepr_cast_to_repr x : both int16. Fail Next Obligation. Equations get_casted_repr (x : both t_EnumWithRepr) : both int64 := get_casted_repr x := cast_int (WS2 := _) (t_EnumWithRepr_cast_to_repr x) : both int64. Fail Next Obligation. ''' ================================================ FILE: test-harness/src/snapshots/toolchain__functions into-coq.snap ================================================ --- source: test-harness/src/harness.rs expression: snapshot info: kind: Translate: backend: coq info: name: functions manifest: functions/Cargo.toml description: ~ spec: optional: false broken: false issue_id: ~ positive: true snapshot: stderr: false stdout: true include_flag: ~ backend_options: ~ --- exit = 1 [[stdout.diagnostics]] message = ''' (Coq backend) something is not implemented yet. [ty] node typ''' spans = ['Span { lo: Loc { line: 11, col: 4 }, hi: Loc { line: 17, col: 5 }, filename: Real(LocalPath("functions/src/lib.rs")), rust_span_data: None }'] [stdout.files] "Functions.v" = ''' (* File automatically generated by Hacspec *) From Hacspec Require Import Hacspec_Lib MachineIntegers. From Coq Require Import ZArith. Import List.ListNotations. Open Scope Z_scope. Open Scope bool_scope. (*Not implemented yet? todo(item)*) (*Not implemented yet? todo(item)*) Definition calling_function_pointer__f (_ : unit) : unit := tt. Definition calling_function_pointer (_ : unit) : unit := let f_ptr := calling_function_pointer__f : unit -> unit in let _ := calling_function_pointer__f tt : unit in tt. ''' "Functions_Issue_1048_.v" = ''' (* File automatically generated by Hacspec *) From Hacspec Require Import Hacspec_Lib MachineIntegers. From Coq Require Import ZArith. Import List.ListNotations. Open Scope Z_scope. Open Scope bool_scope. Record t_CallableViaDeref : Type := { }. (*item error backend*) Definition call_via_deref (_ : unit) : bool := f_deref CallableViaDereft_CallableViaDeref_t tt. ''' ================================================ FILE: test-harness/src/snapshots/toolchain__functions into-fstar.snap ================================================ --- source: test-harness/src/harness.rs expression: snapshot info: kind: Translate: backend: fstar info: name: functions manifest: functions/Cargo.toml description: ~ spec: optional: false broken: false issue_id: ~ positive: true snapshot: stderr: false stdout: true include_flag: ~ backend_options: ~ --- exit = 0 [stdout] diagnostics = [] [stdout.files] "Functions.Issue_1048_.fst" = ''' module Functions.Issue_1048_ #set-options "--fuel 0 --ifuel 1 --z3rlimit 15" open FStar.Mul open Core_models type t_CallableViaDeref = | CallableViaDeref : t_CallableViaDeref [@@ FStar.Tactics.Typeclasses.tcinstance] let impl: Core_models.Ops.Deref.t_Deref t_CallableViaDeref = { f_Target = Prims.unit -> bool; f_deref_pre = (fun (self: t_CallableViaDeref) -> true); f_deref_post = (fun (self: t_CallableViaDeref) (out: (Prims.unit -> bool)) -> true); f_deref = fun (self: t_CallableViaDeref) -> fun temp_0_ -> let _:Prims.unit = temp_0_ in true } let call_via_deref (_: Prims.unit) : bool = Core_models.Ops.Deref.f_deref #t_CallableViaDeref #FStar.Tactics.Typeclasses.solve (CallableViaDeref <: t_CallableViaDeref) () ''' "Functions.fst" = ''' module Functions #set-options "--fuel 0 --ifuel 1 --z3rlimit 15" open FStar.Mul open Core_models let calling_function_pointer__f (#v_T: Type0) (_: Prims.unit) : Prims.unit = () /// Issue #757 let calling_function_pointer (_: Prims.unit) : Prims.unit = let ff_ptr: Prims.unit -> Prims.unit = calling_function_pointer__f in let _:Prims.unit = calling_function_pointer__f #i32 () in () ''' ================================================ FILE: test-harness/src/snapshots/toolchain__generics into-fstar.snap ================================================ --- source: test-harness/src/harness.rs expression: snapshot info: kind: Translate: backend: fstar info: name: generics manifest: generics/Cargo.toml description: ~ spec: optional: false broken: false issue_id: ~ positive: true snapshot: stderr: true stdout: true include_flag: ~ backend_options: ~ --- exit = 0 stderr = 'Finished `dev` profile [unoptimized + debuginfo] target(s) in XXs' [stdout] diagnostics = [] [stdout.files] "Generics.Assoc_const_param.fst" = ''' module Generics.Assoc_const_param #set-options "--fuel 0 --ifuel 1 --z3rlimit 15" open FStar.Mul open Core_models type t_Test (v_N: usize) = | Test : t_Test v_N let impl__A (v_N: usize) : t_Test v_N = Test <: t_Test v_N let test (_: Prims.unit) : t_Test (mk_usize 1) = impl__A (mk_usize 1) ''' "Generics.Defaults_generics.fst" = ''' module Generics.Defaults_generics #set-options "--fuel 0 --ifuel 1 --z3rlimit 15" open FStar.Mul open Core_models type t_Defaults (v_T: Type0) (v_N: usize) = | Defaults : t_Array v_T v_N -> t_Defaults v_T v_N let f (_: t_Defaults Prims.unit (mk_usize 2)) : Prims.unit = () ''' "Generics.Impl_generics.fst" = ''' module Generics.Impl_generics #set-options "--fuel 0 --ifuel 1 --z3rlimit 15" open FStar.Mul open Core_models type t_Test = | Test : t_Test let impl_Test__set_ciphersuites (#v_S #iimpl_995885649_: Type0) (#[FStar.Tactics.Typeclasses.tcresolve ()] i0: Core_models.Convert.t_AsRef v_S string) (#[FStar.Tactics.Typeclasses.tcresolve ()] i1: Core_models.Iter.Traits.Collect.t_IntoIterator iimpl_995885649_) (#_: unit{i1.Core_models.Iter.Traits.Collect.f_Item == v_S}) (self: t_Test) (ciphers: iimpl_995885649_) : Core_models.Result.t_Result Prims.unit Prims.unit = Core_models.Result.Result_Ok (() <: Prims.unit) <: Core_models.Result.t_Result Prims.unit Prims.unit let impl_Test__set_alpn_protocols (#v_S #iimpl_995885649_: Type0) (#[FStar.Tactics.Typeclasses.tcresolve ()] i0: Core_models.Convert.t_AsRef v_S string) (#[FStar.Tactics.Typeclasses.tcresolve ()] i1: Core_models.Iter.Traits.Collect.t_IntoIterator iimpl_995885649_) (#_: unit{i1.Core_models.Iter.Traits.Collect.f_Item == v_S}) (self: t_Test) (e_protocols: iimpl_995885649_) : Core_models.Result.t_Result Prims.unit Prims.unit = Core_models.Result.Result_Ok (() <: Prims.unit) <: Core_models.Result.t_Result Prims.unit Prims.unit ''' "Generics.fst" = ''' module Generics #set-options "--fuel 0 --ifuel 1 --z3rlimit 15" open FStar.Mul open Core_models let dup (#v_T: Type0) (#[FStar.Tactics.Typeclasses.tcresolve ()] i0: Core_models.Clone.t_Clone v_T) (x: v_T) : (v_T & v_T) = Core_models.Clone.f_clone #v_T #FStar.Tactics.Typeclasses.solve x, Core_models.Clone.f_clone #v_T #FStar.Tactics.Typeclasses.solve x <: (v_T & v_T) let foo (v_LEN: usize) (arr: t_Array usize v_LEN) : usize = let acc:usize = v_LEN +! mk_usize 9 in let acc:usize = Rust_primitives.Hax.Folds.fold_range (mk_usize 0) v_LEN (fun acc temp_1_ -> let acc:usize = acc in let _:usize = temp_1_ in true) acc (fun acc i -> let acc:usize = acc in let i:usize = i in acc +! (arr.[ i ] <: usize) <: usize) in acc let repeat (v_LEN: usize) (#v_T: Type0) (#[FStar.Tactics.Typeclasses.tcresolve ()] i0: Core_models.Marker.t_Copy v_T) (x: v_T) : t_Array v_T v_LEN = Rust_primitives.Hax.repeat x v_LEN let f (v_N x: usize) : usize = (v_N +! v_N <: usize) +! x let call_f (_: Prims.unit) : usize = (f (mk_usize 10) (mk_usize 3) <: usize) +! mk_usize 3 let g (v_N: usize) (#v_T: Type0) (#[FStar.Tactics.Typeclasses.tcresolve ()] i0: Core_models.Convert.t_Into v_T (t_Array usize v_N)) (arr: v_T) : usize = (Core_models.Option.impl__unwrap_or #usize (Core_models.Iter.Traits.Iterator.f_max #(Core_models.Array.Iter.t_IntoIter usize v_N) #FStar.Tactics.Typeclasses.solve (Core_models.Iter.Traits.Collect.f_into_iter #(t_Array usize v_N) #FStar.Tactics.Typeclasses.solve (Core_models.Convert.f_into #v_T #(t_Array usize v_N) #FStar.Tactics.Typeclasses.solve arr <: t_Array usize v_N) <: Core_models.Array.Iter.t_IntoIter usize v_N) <: Core_models.Option.t_Option usize) v_N <: usize) +! v_N let call_g (_: Prims.unit) : usize = (g (mk_usize 3) #(t_Array usize (mk_usize 3)) (let list = [mk_usize 42; mk_usize 3; mk_usize 49] in FStar.Pervasives.assert_norm (Prims.eq2 (List.Tot.length list) 3); Rust_primitives.Hax.array_of_list 3 list) <: usize) +! mk_usize 3 class t_Foo (v_Self: Type0) = { f_const_add_pre:v_N: usize -> v_Self -> Type0; f_const_add_post:v_N: usize -> v_Self -> usize -> Type0; f_const_add:v_N: usize -> x0: v_Self -> Prims.Pure usize (f_const_add_pre v_N x0) (fun result -> f_const_add_post v_N x0 result) } [@@ FStar.Tactics.Typeclasses.tcinstance] let impl: t_Foo usize = { f_const_add_pre = (fun (v_N: usize) (self: usize) -> true); f_const_add_post = (fun (v_N: usize) (self: usize) (out: usize) -> true); f_const_add = fun (v_N: usize) (self: usize) -> self +! v_N } type t_Bar = | Bar : t_Bar let impl_Bar__inherent_impl_generics (#v_T: Type0) (v_N: usize) (x: t_Array v_T v_N) : Prims.unit = () ''' ================================================ FILE: test-harness/src/snapshots/toolchain__guards into-coq.snap ================================================ --- source: test-harness/src/harness.rs expression: snapshot info: kind: Translate: backend: coq info: name: guards manifest: guards/Cargo.toml description: ~ spec: optional: false broken: false issue_id: ~ positive: true snapshot: stderr: false stdout: true include_flag: ~ backend_options: ~ --- exit = 0 [stdout] diagnostics = [] [stdout.files] "Guards.v" = ''' (* File automatically generated by Hacspec *) From Coq Require Import ZArith. Require Import List. Import List.ListNotations. Open Scope Z_scope. Open Scope bool_scope. Require Import Ascii. Require Import String. Require Import Coq.Floats.Floats. From RecordUpdate Require Import RecordSet. Import RecordSetNotations. From Core Require Import Core. (* NotImplementedYet *) Definition if_let_guard (x : t_Option ((t_Result ((t_i32)) ((t_i32))))) : t_i32 := match x with | Option_None => (0 : t_i32) | _ => match match x with | Option_Some (v) => match v with | Result_Ok (y) => Option_Some (y) | _ => Option_None end | _ => Option_None end with | Option_Some (x) => x | Option_None => match x with | Option_Some (Result_Err (y)) => y | _ => (1 : t_i32) end end end. Definition equivalent (x : t_Option ((t_Result ((t_i32)) ((t_i32))))) : t_i32 := match x with | Option_None => (0 : t_i32) | _ => match match x with | Option_Some (v) => match v with | Result_Ok (y) => Option_Some (y) | _ => Option_None end | _ => Option_None end with | Option_Some (y) => y | Option_None => match x with | Option_Some (Result_Err (y)) => y | _ => (1 : t_i32) end end end. Definition multiple_guards (x : t_Option ((t_Result ((t_i32)) ((t_i32))))) : t_i32 := match x with | Option_None => (0 : t_i32) | _ => match match x with | Option_Some (Result_Ok (v)) => match Option_Some (f_add (v) ((1 : t_i32))) with | Option_Some (1) => Option_Some ((0 : t_i32)) | _ => Option_None end | _ => Option_None end with | Option_Some (x) => x | Option_None => match match x with | Option_Some (v) => match v with | Result_Ok (y) => Option_Some (y) | _ => Option_None end | _ => Option_None end with | Option_Some (x) => x | Option_None => match x with | Option_Some (Result_Err (y)) => y | _ => (1 : t_i32) end end end end. Definition if_guard (x : t_Option ((t_i32))) : t_i32 := match match x with | Option_Some (v) => match f_gt (v) ((0 : t_i32)) with | true => Option_Some (v) | _ => Option_None end | _ => Option_None end with | Option_Some (x) => x | Option_None => (0 : t_i32) end. ''' _CoqProject = ''' -R ./ TODO -arg -w -arg all Guards.v''' ================================================ FILE: test-harness/src/snapshots/toolchain__guards into-fstar.snap ================================================ --- source: test-harness/src/harness.rs expression: snapshot info: kind: Translate: backend: fstar info: name: guards manifest: guards/Cargo.toml description: ~ spec: optional: false broken: false issue_id: ~ positive: true snapshot: stderr: false stdout: true include_flag: ~ backend_options: ~ --- exit = 0 [stdout] diagnostics = [] [stdout.files] "Guards.fst" = ''' module Guards #set-options "--fuel 0 --ifuel 1 --z3rlimit 15" open FStar.Mul open Core_models let if_let_guard (x: Core_models.Option.t_Option (Core_models.Result.t_Result i32 i32)) : i32 = match x <: Core_models.Option.t_Option (Core_models.Result.t_Result i32 i32) with | Core_models.Option.Option_None -> mk_i32 0 | _ -> match (match x <: Core_models.Option.t_Option (Core_models.Result.t_Result i32 i32) with | Core_models.Option.Option_Some v -> (match v <: Core_models.Result.t_Result i32 i32 with | Core_models.Result.Result_Ok y -> Core_models.Option.Option_Some y <: Core_models.Option.t_Option i32 | _ -> Core_models.Option.Option_None <: Core_models.Option.t_Option i32) | _ -> Core_models.Option.Option_None <: Core_models.Option.t_Option i32) <: Core_models.Option.t_Option i32 with | Core_models.Option.Option_Some x -> x | Core_models.Option.Option_None -> match x <: Core_models.Option.t_Option (Core_models.Result.t_Result i32 i32) with | Core_models.Option.Option_Some (Core_models.Result.Result_Err y) -> y | _ -> mk_i32 1 let equivalent (x: Core_models.Option.t_Option (Core_models.Result.t_Result i32 i32)) : i32 = match x <: Core_models.Option.t_Option (Core_models.Result.t_Result i32 i32) with | Core_models.Option.Option_None -> mk_i32 0 | _ -> match (match x <: Core_models.Option.t_Option (Core_models.Result.t_Result i32 i32) with | Core_models.Option.Option_Some v -> (match v <: Core_models.Result.t_Result i32 i32 with | Core_models.Result.Result_Ok y -> Core_models.Option.Option_Some y <: Core_models.Option.t_Option i32 | _ -> Core_models.Option.Option_None <: Core_models.Option.t_Option i32) | _ -> Core_models.Option.Option_None <: Core_models.Option.t_Option i32) <: Core_models.Option.t_Option i32 with | Core_models.Option.Option_Some y -> y | Core_models.Option.Option_None -> match x <: Core_models.Option.t_Option (Core_models.Result.t_Result i32 i32) with | Core_models.Option.Option_Some (Core_models.Result.Result_Err y) -> y | _ -> mk_i32 1 let multiple_guards (x: Core_models.Option.t_Option (Core_models.Result.t_Result i32 i32)) : i32 = match x <: Core_models.Option.t_Option (Core_models.Result.t_Result i32 i32) with | Core_models.Option.Option_None -> mk_i32 0 | _ -> match (match x <: Core_models.Option.t_Option (Core_models.Result.t_Result i32 i32) with | Core_models.Option.Option_Some (Core_models.Result.Result_Ok v) -> (match Core_models.Option.Option_Some (v +! mk_i32 1) <: Core_models.Option.t_Option i32 with | Core_models.Option.Option_Some (Rust_primitives.Integers.MkInt 1) -> Core_models.Option.Option_Some (mk_i32 0) <: Core_models.Option.t_Option i32 | _ -> Core_models.Option.Option_None <: Core_models.Option.t_Option i32) | _ -> Core_models.Option.Option_None <: Core_models.Option.t_Option i32) <: Core_models.Option.t_Option i32 with | Core_models.Option.Option_Some x -> x | Core_models.Option.Option_None -> match (match x <: Core_models.Option.t_Option (Core_models.Result.t_Result i32 i32) with | Core_models.Option.Option_Some v -> (match v <: Core_models.Result.t_Result i32 i32 with | Core_models.Result.Result_Ok y -> Core_models.Option.Option_Some y <: Core_models.Option.t_Option i32 | _ -> Core_models.Option.Option_None <: Core_models.Option.t_Option i32) | _ -> Core_models.Option.Option_None <: Core_models.Option.t_Option i32) <: Core_models.Option.t_Option i32 with | Core_models.Option.Option_Some x -> x | Core_models.Option.Option_None -> match x <: Core_models.Option.t_Option (Core_models.Result.t_Result i32 i32) with | Core_models.Option.Option_Some (Core_models.Result.Result_Err y) -> y | _ -> mk_i32 1 let if_guard (x: Core_models.Option.t_Option i32) : i32 = match (match x <: Core_models.Option.t_Option i32 with | Core_models.Option.Option_Some v -> (match v >. mk_i32 0 <: bool with | true -> Core_models.Option.Option_Some v <: Core_models.Option.t_Option i32 | _ -> Core_models.Option.Option_None <: Core_models.Option.t_Option i32) | _ -> Core_models.Option.Option_None <: Core_models.Option.t_Option i32) <: Core_models.Option.t_Option i32 with | Core_models.Option.Option_Some x -> x | Core_models.Option.Option_None -> mk_i32 0 ''' ================================================ FILE: test-harness/src/snapshots/toolchain__guards into-ssprove.snap ================================================ --- source: test-harness/src/harness.rs expression: snapshot info: kind: Translate: backend: ssprove info: name: guards manifest: guards/Cargo.toml description: ~ spec: optional: false broken: false issue_id: ~ positive: true snapshot: stderr: false stdout: true include_flag: ~ backend_options: ~ --- exit = 0 [stdout] diagnostics = [] [stdout.files] "Guards.v" = ''' (* File automatically generated by Hacspec *) Set Warnings "-notation-overridden,-ambiguous-paths". From Crypt Require Import choice_type Package Prelude. Import PackageNotation. From extructures Require Import ord fset. From mathcomp Require Import word_ssrZ word. (* From Jasmin Require Import word. *) From Coq Require Import ZArith. From Coq Require Import Strings.String. Import List.ListNotations. Open Scope list_scope. Open Scope Z_scope. Open Scope bool_scope. From Hacspec Require Import ChoiceEquality. From Hacspec Require Import LocationUtility. From Hacspec Require Import Hacspec_Lib_Comparable. From Hacspec Require Import Hacspec_Lib_Pre. From Hacspec Require Import Hacspec_Lib. Open Scope hacspec_scope. Import choice.Choice.Exports. From RecordUpdate Require Import RecordUpdate. Import RecordSetNotations. Obligation Tactic := (* try timeout 8 *) solve_ssprove_obligations. (*Not implemented yet? todo(item)*) Equations if_let_guard (x : both (t_Option (t_Result int32 int32))) : both int32 := if_let_guard x := matchb x with | Option_None_case => ret_both (0 : int32) | _ => matchb matchb x with | Option_Some_case v => letb v := ret_both ((v) : (t_Result int32 int32)) in matchb v with | Result_Ok_case y => letb y := ret_both ((y) : (int32)) in Option_Some y | _ => Option_None end | _ => Option_None end with | Option_Some_case x => letb x := ret_both ((x) : (int32)) in x | Option_None_case => matchb x with | Option_Some_case Result_Err y => letb y := ret_both ((((y))) : (t_Result int32 int32)) in y | _ => ret_both (1 : int32) end end end : both int32. Fail Next Obligation. Equations equivalent (x : both (t_Option (t_Result int32 int32))) : both int32 := equivalent x := matchb x with | Option_None_case => ret_both (0 : int32) | _ => matchb matchb x with | Option_Some_case v => letb v := ret_both ((v) : (t_Result int32 int32)) in matchb v with | Result_Ok_case y => letb y := ret_both ((y) : (int32)) in Option_Some y | _ => Option_None end | _ => Option_None end with | Option_Some_case y => letb y := ret_both ((y) : (int32)) in y | Option_None_case => matchb x with | Option_Some_case Result_Err y => letb y := ret_both ((((y))) : (t_Result int32 int32)) in y | _ => ret_both (1 : int32) end end end : both int32. Fail Next Obligation. Equations multiple_guards (x : both (t_Option (t_Result int32 int32))) : both int32 := multiple_guards x := matchb x with | Option_None_case => ret_both (0 : int32) | _ => matchb matchb x with | Option_Some_case Result_Ok v => letb v := ret_both ((((v))) : (t_Result int32 int32)) in matchb Option_Some (v .+ (ret_both (1 : int32))) with | Option_Some_case 1 => letb 1 := ret_both ((1) : (int32)) in Option_Some (ret_both (0 : int32)) | _ => Option_None end | _ => Option_None end with | Option_Some_case x => letb x := ret_both ((x) : (int32)) in x | Option_None_case => matchb matchb x with | Option_Some_case v => letb v := ret_both ((v) : (t_Result int32 int32)) in matchb v with | Result_Ok_case y => letb y := ret_both ((y) : (int32)) in Option_Some y | _ => Option_None end | _ => Option_None end with | Option_Some_case x => letb x := ret_both ((x) : (int32)) in x | Option_None_case => matchb x with | Option_Some_case Result_Err y => letb y := ret_both ((((y))) : (t_Result int32 int32)) in y | _ => ret_both (1 : int32) end end end end : both int32. Fail Next Obligation. Equations if_guard (x : both (t_Option int32)) : both int32 := if_guard x := matchb matchb x with | Option_Some_case v => letb v := ret_both ((v) : (int32)) in matchb v >.? (ret_both (0 : int32)) with | true => Option_Some v | _ => Option_None end | _ => Option_None end with | Option_Some_case x => letb x := ret_both ((x) : (int32)) in x | Option_None_case => ret_both (0 : int32) end : both int32. Fail Next Obligation. ''' ================================================ FILE: test-harness/src/snapshots/toolchain__include-flag into-coq.snap ================================================ --- source: test-harness/src/harness.rs expression: snapshot info: kind: Translate: backend: coq info: name: include-flag manifest: cli/include-flag/Cargo.toml description: ~ spec: optional: false broken: false issue_id: ~ positive: true snapshot: stderr: false stdout: true include_flag: ~ backend_options: ~ --- exit = 0 [stdout] diagnostics = [] [stdout.files] "Include_flag.v" = ''' (* File automatically generated by Hacspec *) From Coq Require Import ZArith. Require Import List. Import List.ListNotations. Open Scope Z_scope. Open Scope bool_scope. Require Import Ascii. Require Import String. Require Import Coq.Floats.Floats. From RecordUpdate Require Import RecordSet. Import RecordSetNotations. From Core Require Import Core. (* NotImplementedYet *) Record Foo_record : Type := { }. #[export] Notation "'Foo_Foo_record'" := Build_Foo_record. Class t_Trait (v_Self : Type) : Type := { }. Arguments t_Trait (_). Instance t_Trait_810848144 : t_Trait ((t_Foo)) := { }. Definition main_a_a '(_ : unit) : unit := tt. Definition main_b_a '(_ : unit) : unit := tt. Definition main_c_a '(_ : unit) : unit := tt. Definition main_a_b '(_ : unit) : unit := tt. Definition main_b_b '(_ : unit) : unit := tt. Definition main_c_b '(_ : unit) : unit := tt. Definition main_a_c '(_ : unit) : unit := tt. Definition main_a `{v_T : Type} `{t_Trait (v_T)} (x : v_T) : unit := let _ := main_a_a (tt) in let _ := main_a_b (tt) in let _ := main_a_c (tt) in tt. Definition main_b_c '(_ : unit) : unit := tt. Definition main_b '(_ : unit) : unit := let _ := main_b_a (tt) in let _ := main_b_b (tt) in let _ := main_b_c (tt) in tt. Definition main_c_c '(_ : unit) : unit := tt. Definition main_c '(_ : unit) : unit := let _ := main_c_a (tt) in let _ := main_c_b (tt) in let _ := main_c_c (tt) in tt. Definition main '(_ : unit) : unit := let _ := main_a (Foo) in let _ := main_b (tt) in let _ := main_c (tt) in tt. ''' _CoqProject = ''' -R ./ TODO -arg -w -arg all Include_flag.v''' ================================================ FILE: test-harness/src/snapshots/toolchain__include-flag into-fstar.snap ================================================ --- source: test-harness/src/harness.rs expression: snapshot info: kind: Translate: backend: fstar info: name: include-flag manifest: cli/include-flag/Cargo.toml description: ~ spec: optional: false broken: false issue_id: ~ positive: true snapshot: stderr: false stdout: true include_flag: ~ backend_options: ~ --- exit = 0 [stdout] diagnostics = [] [stdout.files] "Include_flag.fst" = ''' module Include_flag #set-options "--fuel 0 --ifuel 1 --z3rlimit 15" open FStar.Mul open Core_models type t_Foo = | Foo : t_Foo class t_Trait (v_Self: Type0) = { __marker_trait_t_Trait:Prims.unit } [@@ FStar.Tactics.Typeclasses.tcinstance] let impl: t_Trait t_Foo = { __marker_trait_t_Trait = () } /// Indirect dependencies let main_a_a (_: Prims.unit) : Prims.unit = () let main_b_a (_: Prims.unit) : Prims.unit = () let main_c_a (_: Prims.unit) : Prims.unit = () let main_a_b (_: Prims.unit) : Prims.unit = () let main_b_b (_: Prims.unit) : Prims.unit = () let main_c_b (_: Prims.unit) : Prims.unit = () let main_a_c (_: Prims.unit) : Prims.unit = () /// Direct dependencies let main_a (#v_T: Type0) (#[FStar.Tactics.Typeclasses.tcresolve ()] i0: t_Trait v_T) (x: v_T) : Prims.unit = let _:Prims.unit = main_a_a () in let _:Prims.unit = main_a_b () in let _:Prims.unit = main_a_c () in () let main_b_c (_: Prims.unit) : Prims.unit = () let main_b (_: Prims.unit) : Prims.unit = let _:Prims.unit = main_b_a () in let _:Prims.unit = main_b_b () in let _:Prims.unit = main_b_c () in () let main_c_c (_: Prims.unit) : Prims.unit = () let main_c (_: Prims.unit) : Prims.unit = let _:Prims.unit = main_c_a () in let _:Prims.unit = main_c_b () in let _:Prims.unit = main_c_c () in () /// Entrypoint let main (_: Prims.unit) : Prims.unit = let _:Prims.unit = main_a #t_Foo (Foo <: t_Foo) in let _:Prims.unit = main_b () in let _:Prims.unit = main_c () in () ''' ================================================ FILE: test-harness/src/snapshots/toolchain__interface-only into-fstar.snap ================================================ --- source: test-harness/src/harness.rs expression: snapshot info: kind: Translate: backend: fstar info: name: interface-only manifest: cli/interface-only/Cargo.toml description: ~ spec: optional: false broken: false issue_id: ~ positive: true snapshot: stderr: true stdout: true include_flag: "+:** -interface_only::Foo" backend_options: ~ --- exit = 0 stderr = 'Finished `dev` profile [unoptimized + debuginfo] target(s) in XXs' [stdout] diagnostics = [] [stdout.files] "Interface_only.fst" = ''' module Interface_only #set-options "--fuel 0 --ifuel 1 --z3rlimit 15" open FStar.Mul open Core_models /// This item contains unsafe blocks and raw references, two features /// not supported by hax. Thanks to the `-i` flag and the `+:` /// modifier, `f` is still extractable as an interface. /// Expressions within type are still extracted, as well as pre- and /// post-conditions. assume val f': x: u8 -> Prims.Pure (t_Array u8 (mk_usize 4)) (requires x <. mk_u8 254) (ensures fun r -> let r:t_Array u8 (mk_usize 4) = r in (r.[ mk_usize 0 ] <: u8) >. x) unfold let f = f' type t_Bar = | Bar : t_Bar /// Non-inherent implementations are extracted, their bodies are not /// dropped. This might be a bit surprising: see /// https://github.com/hacspec/hax/issues/616. [@@ FStar.Tactics.Typeclasses.tcinstance] assume val impl': Core_models.Convert.t_From t_Bar Prims.unit unfold let impl = impl' /// If you need to drop the body of a method, please hoist it: [@@ FStar.Tactics.Typeclasses.tcinstance] assume val impl_1': Core_models.Convert.t_From t_Bar u8 unfold let impl_1 = impl_1' assume val f_from__impl_1__from': u8 -> t_Bar unfold let f_from__impl_1__from = f_from__impl_1__from' type t_Holder (v_T: Type0) = { f_value:Alloc.Vec.t_Vec v_T Alloc.Alloc.t_Global } [@@ FStar.Tactics.Typeclasses.tcinstance] assume val impl_2': #v_T: Type0 -> Core_models.Convert.t_From (t_Holder v_T) Prims.unit unfold let impl_2 (#v_T: Type0) = impl_2' #v_T type t_Param (v_SIZE: usize) = { f_value:t_Array u8 v_SIZE } [@@ FStar.Tactics.Typeclasses.tcinstance] assume val impl_3': v_SIZE: usize -> Core_models.Convert.t_From (t_Param v_SIZE) Prims.unit unfold let impl_3 (v_SIZE: usize) = impl_3' v_SIZE assume val ff_generic': v_X: usize -> #v_U: Type0 -> e_x: v_U -> t_Param v_X unfold let ff_generic (v_X: usize) (#v_U: Type0) = ff_generic' v_X #v_U class t_T (v_Self: Type0) = { [@@@ FStar.Tactics.Typeclasses.no_method]f_Assoc:Type0; f_d_pre:Prims.unit -> Type0; f_d_post:Prims.unit -> Prims.unit -> Type0; f_d:x0: Prims.unit -> Prims.Pure Prims.unit (f_d_pre x0) (fun result -> f_d_post x0 result) } /// Impls with associated types are not erased [@@ FStar.Tactics.Typeclasses.tcinstance] let impl_T_for_u8: t_T u8 = { f_Assoc = u8; f_d_pre = (fun (_: Prims.unit) -> true); f_d_post = (fun (_: Prims.unit) (out: Prims.unit) -> true); f_d = fun (_: Prims.unit) -> () } class t_T2 (v_Self: Type0) = { f_d_pre:Prims.unit -> Type0; f_d_post:Prims.unit -> Prims.unit -> Type0; f_d:x0: Prims.unit -> Prims.Pure Prims.unit (f_d_pre x0) (fun result -> f_d_post x0 result) } /// Items can be forced to be transparent [@@ FStar.Tactics.Typeclasses.tcinstance] let impl_T2_for_u8: t_T2 u8 = { f_d_pre = (fun (_: Prims.unit) -> false); f_d_post = (fun (_: Prims.unit) (out: Prims.unit) -> true); f_d = fun (_: Prims.unit) -> () } assume val padlen': b: t_Slice u8 -> n: usize -> Prims.Pure usize (requires (Core_models.Slice.impl__len #u8 b <: usize) >=. n) (ensures fun out -> let out:usize = out in out <=. n) unfold let padlen = padlen' ''' ================================================ FILE: test-harness/src/snapshots/toolchain__lean-core-models into-lean.snap ================================================ --- source: test-harness/src/harness.rs expression: snapshot info: kind: Translate: backend: lean info: name: lean-core-models manifest: lean-core-models/Cargo.toml description: ~ spec: optional: false broken: false issue_id: ~ positive: true snapshot: stderr: true stdout: true include_flag: ~ backend_options: ~ --- exit = 0 stderr = 'Finished `dev` profile [unoptimized + debuginfo] target(s) in XXs' [stdout] diagnostics = [] [stdout.files] "lean_core_models.lean" = ''' -- Experimental lean backend for Hax -- The Hax prelude library can be found in hax/proof-libs/lean import Hax import Std.Tactic.Do import Std.Do.Triple import Std.Tactic.Do.Syntax open Std.Do open Std.Tactic set_option mvcgen.warning false set_option linter.unusedVariables false namespace lean_core_models.default.structs structure S where f1 : usize @[reducible] instance Impl.AssociatedTypes : core_models.default.Default.AssociatedTypes S where instance Impl : core_models.default.Default S where default := fun (_ : rust_primitives.hax.Tuple0) => do (pure (S.mk (f1 := (0 : usize)))) @[spec] def test (_ : rust_primitives.hax.Tuple0) : RustM S := do (core_models.default.Default.default S rust_primitives.hax.Tuple0.mk) end lean_core_models.default.structs namespace lean_core_models.default.enums inductive E (T : Type) : Type | C1 : u32 -> E (T : Type) | C2 : T -> E (T : Type) @[reducible] instance Impl.AssociatedTypes (T : Type) [trait_constr_Impl_associated_type_i0 : core_models.default.Default.AssociatedTypes T] [trait_constr_Impl_i0 : core_models.default.Default T ] : core_models.default.Default.AssociatedTypes (E T) where instance Impl (T : Type) [trait_constr_Impl_associated_type_i0 : core_models.default.Default.AssociatedTypes T] [trait_constr_Impl_i0 : core_models.default.Default T ] : core_models.default.Default (E T) where default := fun (_ : rust_primitives.hax.Tuple0) => do (pure (E.C2 (← (core_models.default.Default.default T rust_primitives.hax.Tuple0.mk)))) end lean_core_models.default.enums namespace lean_core_models.function @[spec] def test (_ : rust_primitives.hax.Tuple0) : RustM u32 := do let f_1 : (u32 -> RustM u32) := (fun _ => (do (pure (9 : u32)) : RustM u32)); let f_2 : (u32 -> u32 -> RustM u32) := (fun x y => (do (x +? y) : RustM u32)); let f_2_tuple : ((rust_primitives.hax.Tuple2 u32 u32) -> RustM u32) := (fun ⟨x, y⟩ => (do (x +? y) : RustM u32)); ((← ((← (core_models.ops.function.Fn.call (u32 -> RustM u32) (rust_primitives.hax.Tuple1 u32) f_1 (rust_primitives.hax.Tuple1.mk (0 : u32)))) +? (← (core_models.ops.function.Fn.call (u32 -> u32 -> RustM u32) (rust_primitives.hax.Tuple2 u32 u32) f_2 (rust_primitives.hax.Tuple2.mk (1 : u32) (2 : u32)))))) +? (← (core_models.ops.function.Fn.call ((rust_primitives.hax.Tuple2 u32 u32) -> RustM u32) (rust_primitives.hax.Tuple1 (rust_primitives.hax.Tuple2 u32 u32)) f_2_tuple (rust_primitives.hax.Tuple1.mk (rust_primitives.hax.Tuple2.mk (1 : u32) (2 : u32)))))) end lean_core_models.function namespace lean_core_models.option structure S where f1 : u32 inductive E : Type | C : u32 -> E @[reducible] instance Impl.AssociatedTypes : core_models.default.Default.AssociatedTypes S where instance Impl : core_models.default.Default S where default := fun (_ : rust_primitives.hax.Tuple0) => do (pure (S.mk (f1 := (42 : u32)))) @[spec] def test (_ : rust_primitives.hax.Tuple0) : RustM rust_primitives.hax.Tuple0 := do let o1 : (core_models.option.Option i32) := (core_models.option.Option.Some (4 : i32)); let o2 : (core_models.option.Option i32) := core_models.option.Option.None; let o3 : Bool ← (core_models.option.Impl.is_some_and i32 (i32 -> RustM Bool) (← (core_models.clone.Clone.clone (core_models.option.Option i32) o1)) (fun x => (do (x ==? (0 : i32)) : RustM Bool))); let o3 : Bool ← (core_models.option.Impl.is_none_or i32 (i32 -> RustM Bool) (← (core_models.clone.Clone.clone (core_models.option.Option i32) o1)) (fun x => (do (x ==? (0 : i32)) : RustM Bool))); let o4 : i32 ← (core_models.option.Impl.unwrap i32 (core_models.option.Option.Some (0 : i32))); let o5 : i32 ← (core_models.option.Impl.unwrap_or i32 (core_models.option.Option.Some (0 : i32)) (9 : i32)); let o6 : i32 ← (core_models.option.Impl.unwrap_or_else i32 (rust_primitives.hax.Tuple0 -> RustM i32) (core_models.option.Option.Some (0 : i32)) (fun _ => (do (pure (9 : i32)) : RustM i32))); let o7 : S ← (core_models.option.Impl.unwrap_or_default S core_models.option.Option.None); let o8 : (core_models.option.Option i32) ← (core_models.option.Impl.map i32 i32 (i32 -> RustM i32) (core_models.option.Option.Some (0 : i32)) (fun x => (do (x +? (1 : i32)) : RustM i32))); let o9 : i32 ← (core_models.option.Impl.map_or i32 i32 (i32 -> RustM i32) (core_models.option.Option.Some (1 : i32)) (9 : i32) (fun x => (do (x +? (1 : i32)) : RustM i32))); let o10 : i32 ← (core_models.option.Impl.map_or_else i32 i32 (rust_primitives.hax.Tuple0 -> RustM i32) (i32 -> RustM i32) (core_models.option.Option.Some (2 : i32)) (fun _ => (do (pure (9 : i32)) : RustM i32)) (fun x => (do (x +? (1 : i32)) : RustM i32))); let o11 : (core_models.result.Result i32 E) ← (core_models.option.Impl.ok_or i32 E (core_models.option.Option.Some (3 : i32)) (E.C (0 : u32))); let o12 : (core_models.result.Result i32 E) ← (core_models.option.Impl.ok_or_else i32 E (rust_primitives.hax.Tuple0 -> RustM E) (core_models.option.Option.Some (1 : i32)) (fun _ => (do (pure (E.C (1 : u32))) : RustM E))); let o13 : (core_models.option.Option u32) ← (core_models.option.Impl.and_then u32 u32 (u32 -> RustM (core_models.option.Option u32)) core_models.option.Option.None (fun x => (do (pure (core_models.option.Option.Some x)) : RustM (core_models.option.Option u32)))); let ⟨_, out⟩ ← (core_models.option.Impl.take S (core_models.option.Option.Some (S.mk (f1 := (9 : u32))))); let o14 : (core_models.option.Option S) := out; let o15 : Bool ← (core_models.option.Impl.is_some i32 (core_models.option.Option.Some (1 : i32))); let o16 : Bool ← (core_models.option.Impl.is_none i32 (core_models.option.Option.Some (2 : i32))); let o17 : i32 ← (core_models.option.Impl.expect i32 (core_models.option.Option.Some (3 : i32)) "Should be Some"); let o18 : i32 ← (core_models.option.Impl.unwrap i32 (core_models.option.Option.Some (4 : i32))); (pure rust_primitives.hax.Tuple0.mk) end lean_core_models.option namespace lean_core_models.phantom class Foo.AssociatedTypes (Self : Type) where class Foo (Self : Type) [associatedTypes : outParam (Foo.AssociatedTypes (Self : Type))] where structure Bar (F : Type) [trait_constr_Bar_associated_type_i0 : Foo.AssociatedTypes F] [trait_constr_Bar_i0 : Foo F ] where _phantom : (core_models.marker.PhantomData F) @[spec] def Impl.new (F : Type) [trait_constr_new_associated_type_i0 : Foo.AssociatedTypes F] [trait_constr_new_i0 : Foo F ] (_ : rust_primitives.hax.Tuple0) : RustM (Bar F) := do (pure (Bar.mk (_phantom := core_models.marker.PhantomData.mk))) end lean_core_models.phantom namespace lean_core_models.result inductive E1 : Type | C1 : E1 | C2 : u32 -> E1 @[instance] opaque Impl.AssociatedTypes : core_models.clone.Clone.AssociatedTypes E1 := by constructor <;> exact Inhabited.default @[instance] opaque Impl : core_models.clone.Clone E1 := by constructor <;> exact Inhabited.default inductive E2 : Type | C1 : E2 | C2 : u32 -> E2 @[spec] def tests (_ : rust_primitives.hax.Tuple0) : RustM (core_models.result.Result u32 E1) := do let v1 : (core_models.result.Result u32 E1) := (core_models.result.Result.Ok (1 : u32)); let v2 : (core_models.result.Result u32 E1) := (core_models.result.Result.Err E1.C1); let f : (u32 -> RustM u32) := (fun x => (do (x +? (1 : u32)) : RustM u32)); let v5 : (core_models.result.Result i32 E1) ← (core_models.result.Impl.map i32 E1 i32 (i32 -> RustM i32) (core_models.result.Result.Ok (1 : i32)) (fun v => (do (v +? (1 : i32)) : RustM i32))); let v6 : u32 ← (core_models.result.Impl.map_or u32 E1 u32 (u32 -> RustM u32) (core_models.result.Result.Ok (1 : u32)) (9 : u32) f); let v7 : u32 ← (core_models.result.Impl.map_or_else u32 E1 u32 (E1 -> RustM u32) (u32 -> RustM u32) (core_models.result.Result.Ok (1 : u32)) (fun _ => (do (pure (10 : u32)) : RustM u32)) f); let v8 : (core_models.result.Result i32 E2) ← (core_models.result.Impl.map_err i32 E1 E2 (E1 -> RustM E2) (core_models.result.Result.Ok (0 : i32)) (fun e => (do match e with | (E1.C1 ) => do (pure E2.C1) | (E1.C2 x) => do (pure (E2.C2 (← (x +? (1 : u32))))) : RustM E2))); let v9 : Bool ← (core_models.result.Impl.is_ok u32 E1 v1); let v10 : Bool ← (core_models.result.Impl.is_err u32 E1 v1); let v11 : (core_models.result.Result u32 E1) ← (core_models.result.Impl.and_then u32 E1 u32 (u32 -> RustM (core_models.result.Result u32 E1)) (← (core_models.clone.Clone.clone (core_models.result.Result u32 E1) v1)) (fun x => (do (pure (core_models.result.Result.Ok (← (x +? (1 : u32))))) : RustM (core_models.result.Result u32 E1)))); let v12 : u32 ← (core_models.result.Impl.unwrap u32 u32 (← (core_models.clone.Clone.clone (core_models.result.Result u32 u32) (core_models.result.Result.Ok (0 : u32))))); let v13 : u32 ← (core_models.result.Impl.expect u32 u32 (← (core_models.clone.Clone.clone (core_models.result.Result u32 u32) (core_models.result.Result.Ok (0 : u32)))) "Should be Ok"); match (← (core_models.result.Impl.map u32 E1 u32 (u32 -> RustM u32) v1 f)) with | (core_models.result.Result.Ok hoist2) => do match v2 with | (core_models.result.Result.Ok hoist1) => do let v3 : u32 ← (hoist2 +? hoist1); (pure (core_models.result.Result.Ok v3)) | (core_models.result.Result.Err err) => do (pure (core_models.result.Result.Err err)) | (core_models.result.Result.Err err) => do (pure (core_models.result.Result.Err err)) end lean_core_models.result ''' ================================================ FILE: test-harness/src/snapshots/toolchain__lean-tests into-lean.snap ================================================ --- source: test-harness/src/harness.rs expression: snapshot info: kind: Translate: backend: lean info: name: lean-tests manifest: lean-tests/Cargo.toml description: ~ spec: optional: false broken: false issue_id: ~ positive: true snapshot: stderr: true stdout: true include_flag: ~ backend_options: ~ --- exit = 0 stderr = 'Finished `dev` profile [unoptimized + debuginfo] target(s) in XXs' [stdout] diagnostics = [] [stdout.files] "lean_tests.lean" = """ -- Experimental lean backend for Hax -- The Hax prelude library can be found in hax/proof-libs/lean import Hax import Std.Tactic.Do import Std.Do.Triple import Std.Tactic.Do.Syntax open Std.Do open Std.Tactic set_option mvcgen.warning false set_option linter.unusedVariables false namespace lean_tests.array @[spec] def f (N : usize) (x : (RustArray u8 N)) : RustM rust_primitives.hax.Tuple0 := do (pure rust_primitives.hax.Tuple0.mk) @[spec] def g (N : usize) (x : (RustArray u8 N)) : RustM rust_primitives.hax.Tuple0 := do let _ ← (f (N) x); let _ ← (f ((10 : usize)) (← (rust_primitives.hax.repeat (0 : u8) (10 : usize)))); (pure rust_primitives.hax.Tuple0.mk) end lean_tests.array namespace lean_tests.associated_types.projection class T1.AssociatedTypes (Self : Type) where A1 : Type attribute [reducible] T1.AssociatedTypes.A1 abbrev T1.A1 := T1.AssociatedTypes.A1 class T1 (Self : Type) [associatedTypes : outParam (T1.AssociatedTypes (Self : Type))] where end lean_tests.associated_types.projection namespace lean_tests.associated_types.multiple_projections class FnOnce.AssociatedTypes (Self : Type) (T : Type) where Output : Type attribute [reducible] FnOnce.AssociatedTypes.Output abbrev FnOnce.Output := FnOnce.AssociatedTypes.Output class FnOnce (Self : Type) (T : Type) [associatedTypes : outParam (FnOnce.AssociatedTypes (Self : Type) (T : Type))] where @[spec] def func (T : Type) (U : Type) (D : Type) (F : Type) [trait_constr_func_associated_type_i0 : FnOnce.AssociatedTypes F T] [trait_constr_func_i0 : FnOnce F T (associatedTypes := { show FnOnce.AssociatedTypes F T by infer_instance with Output := U})] [trait_constr_func_associated_type_i1 : FnOnce.AssociatedTypes D T] [trait_constr_func_i1 : FnOnce D T (associatedTypes := { show FnOnce.AssociatedTypes D T by infer_instance with Output := U})] (d : D) (f : F) (u : U) : RustM rust_primitives.hax.Tuple0 := do (pure rust_primitives.hax.Tuple0.mk) end lean_tests.associated_types.multiple_projections namespace lean_tests.binops @[spec] def noop (x : i32) : RustM i32 := do (pure x) @[spec] def neg_int (x : i32) : RustM i32 := do (-? x) @[spec] def not_int (x : i32) : RustM i32 := do (~? x) @[spec] def not_bool (x : Bool) : RustM Bool := do (!? x) @[spec] def index (x : (RustArray i32 1)) : RustM i32 := do x[(0 : usize)]_? @[spec] def add_int (x : i32) (y : i32) : RustM i32 := do (x +? y) @[spec] def sub_int (x : i32) (y : i32) : RustM i32 := do (x -? y) @[spec] def mul_int (x : i32) (y : i32) : RustM i32 := do (x *? y) @[spec] def div_int (x : i32) (y : i32) : RustM i32 := do (x /? y) @[spec] def rem_int (x : i32) (y : i32) : RustM i32 := do (x %? y) @[spec] def shr_int (x : i32) (y : i32) : RustM i32 := do (x >>>? y) @[spec] def shl_int (x : i32) (y : i32) : RustM i32 := do (x <<? y) @[spec] def ge_int (x : i32) (y : i32) : RustM Bool := do (x >=? y) structure S where -- no fields @[reducible] instance Impl.AssociatedTypes : core_models.ops.bit.Not.AssociatedTypes S where Output := S instance Impl : core_models.ops.bit.Not S where not := fun (self : S) => do (pure self) @[reducible] instance Impl_1.AssociatedTypes : core_models.ops.arith.Add.AssociatedTypes S S where Output := S instance Impl_1 : core_models.ops.arith.Add S S where add := fun (self : S) (rhs : S) => do (pure self) @[spec] def not_s (x : S) : RustM S := do (core_models.ops.bit.Not.not S x) @[spec] def add_s (x : S) (y : S) : RustM S := do (core_models.ops.arith.Add.add S S x y) end lean_tests.binops namespace lean_tests.casts -- Returns true if all casting edge cases behave as expected. def casting_edge_cases (_dummy : Bool) : RustM Bool := do let case1 : Bool ← ((← (rust_primitives.hax.cast_op (256 : u16) : RustM u8)) ==? (0 : u8)); let case2 : Bool ← ((← (rust_primitives.hax.cast_op (-1 : i16) : RustM u8)) ==? (255 : u8)); let case3 : Bool ← ((← (rust_primitives.hax.cast_op (-1 : i8) : RustM i16)) ==? (-1 : i16)); let case4 : Bool ← ((← (rust_primitives.hax.cast_op (128 : u8) : RustM i8)) ==? (-128 : i8)); let case5 : Bool ← ((← (rust_primitives.hax.cast_op (4294967295 : u32) : RustM i32)) ==? (-1 : i32)); ((← ((← ((← (case1 &&? case2)) &&? case3)) &&? case4)) &&? case5) set_option hax_mvcgen.specset \"bv\" in @[hax_spec] def casting_edge_cases.spec (_dummy : Bool) : Spec (requires := do pure True) (ensures := fun result => do (pure result)) (casting_edge_cases (_dummy : Bool)) := { pureRequires := by hax_construct_pure <;> bv_decide pureEnsures := by hax_construct_pure <;> bv_decide contract := by hax_mvcgen [casting_edge_cases] <;> bv_decide } -- https://github.com/cryspen/hax/issues/1912 @[spec] def shift_after_cast (x : u16) (n : u8) : RustM u32 := do ((← (rust_primitives.hax.cast_op x : RustM u32)) << RustM usize) -- Struct definition structure S (N : usize) where _0 : u32 @[reducible] instance Impl.AssociatedTypes (N_TRAIT : usize) : T.AssociatedTypes (S (N_TRAIT)) (N_TRAIT) where instance Impl (N_TRAIT : usize) : T (S (N_TRAIT)) (N_TRAIT) where f := fun (N_FIELD : usize) (self : (S (N_TRAIT))) => do (N_TRAIT -? N_FIELD) @[spec] def test2 (N2 : usize) (A : Type) [trait_constr_test2_associated_type_i0 : T.AssociatedTypes A (N2)] [trait_constr_test2_i0 : T A (N2) ] (x : A) : RustM usize := do let s : (S ((10 : usize))) := (S.mk (9 : u32)); let _ ← ((← (T.f (S ((10 : usize))) ((10 : usize)) ((1 : usize)) s)) +? (← (T.f A (N2) ((11 : usize)) x))); let s : (S ((3 : usize))) := (S.mk (9 : u32)); (T.f A (N2) ((4 : usize)) x) end lean_tests.constants.const_parameters namespace lean_tests.enums inductive E : Type | V1 : E | V2 : E | V3 : usize -> E | V4 : usize -> usize -> usize -> E | V5 (f1 : usize) (f2 : usize) : E | V6 (f1 : usize) (f2 : usize) : E end lean_tests.enums namespace lean_tests.floats def N : f32 := (1.0 : f32) @[spec] def test (_ : rust_primitives.hax.Tuple0) : RustM rust_primitives.hax.Tuple0 := do let l0 : f64 := (1.0 : f64); let l1 : f64 := (0.9 : f64); let l2 : f32 := (5.0 : f32); let l5 : f32 := N; (pure rust_primitives.hax.Tuple0.mk) @[spec] def f (x : f64) (y : f32) : RustM f32 := do (pure y) end lean_tests.floats namespace lean_tests.ite @[spec] def test1 (_ : rust_primitives.hax.Tuple0) : RustM i32 := do let x : i32 ← if true then do (pure (0 : i32)) else do (pure (1 : i32)); if false then do (pure (2 : i32)) else do (pure (3 : i32)) @[spec] def test2 (b : Bool) : RustM i32 := do let x : i32 ← if b then do (pure (0 : i32)) else do (pure (9 : i32)); let y : i32 := (0 : i32); let y : i32 ← if true then do ((← (y +? x)) +? (1 : i32)) else do ((← (y -? x)) -? (1 : i32)); if b then do let z : i32 ← (y +? y); ((← (z +? y)) +? x) else do let z : i32 ← (y -? x); ((← (z +? y)) +? x) end lean_tests.ite namespace lean_tests.loops -- Simple for-loop @[spec] def loop1 (_ : rust_primitives.hax.Tuple0) : RustM u32 := do let x : u32 := (0 : u32); let x : u32 ← (rust_primitives.hax.folds.fold_range (1 : u32) (10 : u32) (fun x _ => (do (pure true) : RustM Bool)) x (fun x i => (do (x +? i) : RustM u32))); (pure x) -- For-loop with a return @[spec] def loop2 (_ : rust_primitives.hax.Tuple0) : RustM u32 := do let x : u32 := (0 : u32); match (← (rust_primitives.hax.folds.fold_range_return (1 : u32) (10 : u32) (fun x _ => (do (pure true) : RustM Bool)) x (fun x i => (do if (← (i ==? (5 : u32))) then do (pure (core_models.ops.control_flow.ControlFlow.Break (core_models.ops.control_flow.ControlFlow.Break x))) else do (pure (core_models.ops.control_flow.ControlFlow.Continue (← (x +? i)))) : RustM (core_models.ops.control_flow.ControlFlow (core_models.ops.control_flow.ControlFlow u32 (rust_primitives.hax.Tuple2 rust_primitives.hax.Tuple0 u32)) u32))))) with | (core_models.ops.control_flow.ControlFlow.Break ret) => do (pure ret) | (core_models.ops.control_flow.ControlFlow.Continue x) => do (pure x) -- For-loop with a spec def for_loop_with_spec (y : u64) : RustM u64 := do let x : u64 := y; let x : u64 ← (rust_primitives.hax.folds.fold_range (0 : u64) y (fun x i => (do (x >? (0 : u64)) : RustM Bool)) x (fun x i => (do if (← ((← (x %? (5 : u64))) ==? (0 : u64))) then do let x : u64 := (200 : u64); (pure x) else do let x : u64 ← (x %? (5 : u64)); (pure x) : RustM u64))); (pure x) set_option hax_mvcgen.specset \"bv\" in @[hax_spec] def for_loop_with_spec.spec (y : u64) : Spec (requires := do (y >? (0 : u64))) (ensures := fun res => do (res >? (0 : u64))) (for_loop_with_spec (y : u64)) := { pureRequires := by hax_construct_pure <;> bv_decide pureEnsures := by hax_construct_pure <;> bv_decide contract := by hax_mvcgen [for_loop_with_spec] <;> bv_decide } -- while-loop def while_loop1 (s : u32) : RustM u32 := do let x : u32 := s; let x : u32 ← (rust_primitives.hax.while_loop (fun x => (do (pure true) : RustM Bool)) (fun x => (do (x >? (0 : u32)) : RustM Bool)) (fun x => (do (rust_primitives.hax.int.from_machine x) : RustM hax_lib.int.Int)) x (fun x => (do let x : u32 ← (x -? (1 : u32)); (pure x) : RustM u32))); (pure x) set_option hax_mvcgen.specset \"int\" in @[hax_spec] def while_loop1.spec (s : u32) : Spec (requires := do pure True) (ensures := fun r => do (r ==? (0 : u32))) (while_loop1 (s : u32)) := { pureRequires := by hax_construct_pure <;> grind pureEnsures := by hax_construct_pure <;> grind contract := by hax_mvcgen [while_loop1] <;> grind } end lean_tests.loops namespace lean_tests.loops.errors inductive Error : Type | Foo : Error | Bar : u32 -> Error @[spec] def loop3 (_ : rust_primitives.hax.Tuple0) : RustM (core_models.result.Result u32 Error) := do let x : u32 := (0 : u32); let _end : u32 := (10 : u32); match (← (rust_primitives.hax.folds.fold_range_return (1 : u32) _end (fun x _ => (do (pure true) : RustM Bool)) x (fun x i => (do if (← (i ==? (5 : u32))) then do (pure (core_models.ops.control_flow.ControlFlow.Break (core_models.ops.control_flow.ControlFlow.Break (core_models.result.Result.Err Error.Foo)))) else do (pure (core_models.ops.control_flow.ControlFlow.Continue (← (x +? (5 : u32))))) : RustM (core_models.ops.control_flow.ControlFlow (core_models.ops.control_flow.ControlFlow (core_models.result.Result u32 Error) (rust_primitives.hax.Tuple2 rust_primitives.hax.Tuple0 u32)) u32))))) with | (core_models.ops.control_flow.ControlFlow.Break ret) => do (pure ret) | (core_models.ops.control_flow.ControlFlow.Continue x) => do (pure (core_models.result.Result.Ok x)) @[spec] def loop4 (_ : rust_primitives.hax.Tuple0) : RustM (core_models.result.Result (rust_primitives.hax.Tuple2 u32 u32) Error) := do let e : u32 := (0 : u32); let f : (rust_primitives.hax.Tuple0 -> RustM u32) := (fun ⟨⟩ => (do (pure (42 : u32)) : RustM u32)); match (← (rust_primitives.hax.folds.fold_range_return (0 : u32) (← (core_models.ops.function.Fn.call (rust_primitives.hax.Tuple0 -> RustM u32) (rust_primitives.hax.Tuple1 rust_primitives.hax.Tuple0) f (rust_primitives.hax.Tuple1.mk rust_primitives.hax.Tuple0.mk))) (fun e _ => (do (pure true) : RustM Bool)) e (fun e i => (do if (← (i >? (10 : u32))) then do (pure (core_models.ops.control_flow.ControlFlow.Break (core_models.ops.control_flow.ControlFlow.Break (core_models.result.Result.Err (Error.Bar e))))) else do (pure (core_models.ops.control_flow.ControlFlow.Continue (← (e +? i)))) : RustM (core_models.ops.control_flow.ControlFlow (core_models.ops.control_flow.ControlFlow (core_models.result.Result (rust_primitives.hax.Tuple2 u32 u32) Error) (rust_primitives.hax.Tuple2 rust_primitives.hax.Tuple0 u32)) u32))))) with | (core_models.ops.control_flow.ControlFlow.Break ret) => do (pure ret) | (core_models.ops.control_flow.ControlFlow.Continue e) => do (pure (core_models.result.Result.Ok (rust_primitives.hax.Tuple2.mk e e))) end lean_tests.loops.errors namespace lean_tests.matching @[spec] def test_const_matching (x : u32) (c : Char) (s : String) (b : Bool) : RustM u32 := do let x : u32 ← match x with | 0 => do (pure (42 : u32)) | _ => do (pure (0 : u32)); let c : u32 ← match c with | 'a' => do (pure (42 : u32)) | _ => do (pure (0 : u32)); let s : u32 ← match s with | \"Hello\" => do (pure (42 : u32)) | _ => do (pure (0 : u32)); let b : u32 ← match b with | true => do (pure (42 : u32)) | false => do (pure (0 : u32)); ((← ((← (x +? c)) +? s)) +? b) @[spec] def test_binding_subpattern_matching (x : (rust_primitives.hax.Tuple2 u8 (rust_primitives.hax.Tuple2 u8 u8))) : RustM u8 := do match x with | ⟨0, pair@⟨a, b⟩⟩ => do ((← ((← (a +? b)) +? (rust_primitives.hax.Tuple2._0 pair))) +? (rust_primitives.hax.Tuple2._1 pair)) | _ => do (pure (0 : u8)) inductive test_ellipsis_records.E : Type | C (f1 : u8) (f2 : u8) (f3 : u8) (f4 : u8) : test_ellipsis_records.E @[spec] def test_ellipsis_records (_ : rust_primitives.hax.Tuple0) : RustM rust_primitives.hax.Tuple0 := do let c : test_ellipsis_records.E := (test_ellipsis_records.E.C (f1 := (1 : u8)) (f2 := (2 : u8)) (f3 := (3 : u8)) (f4 := (4 : u8))); let _ ← match c with | (test_ellipsis_records.E.C _ ..) => do (hax_lib.assert true); let _ ← match c with | (test_ellipsis_records.E.C (f1 := f1) ..) => do (hax_lib.assert (← (f1 ==? (1 : u8)))); let _ ← match c with | (test_ellipsis_records.E.C (f1 := f1) (f2 := f2) ..) => do (hax_lib.assert (← ((← (f1 ==? (1 : u8))) &&? (← (f2 ==? (2 : u8)))))); let _ ← match c with | (test_ellipsis_records.E.C (f2 := f2) (f4 := f4) ..) => do (hax_lib.assert (← ((← (f2 ==? (2 : u8))) &&? (← (f4 ==? (4 : u8)))))); let _ ← match c with | (test_ellipsis_records.E.C (f1 := f1) (f2 := f2) (f3 := f3) (f4 := f4)) => do (hax_lib.assert (← ((← ((← ((← (f1 ==? (1 : u8))) &&? (← (f2 ==? (2 : u8))))) &&? (← (f3 ==? (3 : u8))))) &&? (← (f4 ==? (4 : u8)))))); (pure rust_primitives.hax.Tuple0.mk) structure test_ellipsis_structs.S where f1 : u8 f2 : u8 f3 : u8 f4 : u8 @[spec] def test_ellipsis_structs (_ : rust_primitives.hax.Tuple0) : RustM rust_primitives.hax.Tuple0 := do let c : test_ellipsis_structs.S := (test_ellipsis_structs.S.mk (f1 := (1 : u8)) (f2 := (2 : u8)) (f3 := (3 : u8)) (f4 := (4 : u8))); let _ ← match c with | _ => do (hax_lib.assert true); let _ ← match c with | {f1 := f1, ..} => do (hax_lib.assert (← (f1 ==? (1 : u8)))); let _ ← match c with | {f1 := f1, f2 := f2, ..} => do (hax_lib.assert (← ((← (f1 ==? (1 : u8))) &&? (← (f2 ==? (2 : u8)))))); let _ ← match c with | {f2 := f2, f4 := f4, ..} => do (hax_lib.assert (← ((← (f2 ==? (2 : u8))) &&? (← (f4 ==? (4 : u8)))))); let _ ← match c with | {f1 := f1, f2 := f2, f3 := f3, f4 := f4} => do (hax_lib.assert (← ((← ((← ((← (f1 ==? (1 : u8))) &&? (← (f2 ==? (2 : u8))))) &&? (← (f3 ==? (3 : u8))))) &&? (← (f4 ==? (4 : u8)))))); (pure rust_primitives.hax.Tuple0.mk) @[spec] def test_ellipsis_bare_tuples (_ : rust_primitives.hax.Tuple0) : RustM rust_primitives.hax.Tuple0 := do let t : (rust_primitives.hax.Tuple4 u8 u8 u8 u8) := (rust_primitives.hax.Tuple4.mk (1 : u8) (2 : u8) (3 : u8) (4 : u8)); let _ ← match t with | ⟨_, _, _, _⟩ => do (hax_lib.assert true); let _ ← match t with | ⟨a, _, _, _⟩ => do (hax_lib.assert (← (a ==? (1 : u8)))); let _ ← match t with | ⟨a, b, _, _⟩ => do (hax_lib.assert (← ((← (a ==? (1 : u8))) &&? (← (b ==? (2 : u8)))))); let _ ← match t with | ⟨_, _, _, d⟩ => do (hax_lib.assert (← (d ==? (4 : u8)))); let _ ← match t with | ⟨_, _, c, d⟩ => do (hax_lib.assert (← ((← (c ==? (3 : u8))) &&? (← (d ==? (4 : u8)))))); let _ ← match t with | ⟨a, _, _, d⟩ => do (hax_lib.assert (← ((← (a ==? (1 : u8))) &&? (← (d ==? (4 : u8)))))); let _ ← match t with | ⟨a, b, c, d⟩ => do (hax_lib.assert (← ((← ((← ((← (a ==? (1 : u8))) &&? (← (b ==? (2 : u8))))) &&? (← (c ==? (3 : u8))))) &&? (← (d ==? (4 : u8)))))); (pure rust_primitives.hax.Tuple0.mk) inductive test_ellipsis_tuples.F : Type | D : u8 -> u8 -> u8 -> u8 -> test_ellipsis_tuples.F @[spec] def test_ellipsis_tuples (_ : rust_primitives.hax.Tuple0) : RustM rust_primitives.hax.Tuple0 := do let d : test_ellipsis_tuples.F := (test_ellipsis_tuples.F.D (1 : u8) (2 : u8) (3 : u8) (4 : u8)); let _ ← match d with | (test_ellipsis_tuples.F.D _ _ _ _) => do (hax_lib.assert true); let _ ← match d with | (test_ellipsis_tuples.F.D a _ _ _) => do (hax_lib.assert (← (a ==? (1 : u8)))); let _ ← match d with | (test_ellipsis_tuples.F.D a b _ _) => do (hax_lib.assert (← ((← (a ==? (1 : u8))) &&? (← (b ==? (2 : u8)))))); let _ ← match d with | (test_ellipsis_tuples.F.D _ _ _ d) => do (hax_lib.assert (← (d ==? (4 : u8)))); let _ ← match d with | (test_ellipsis_tuples.F.D _ _ c d) => do (hax_lib.assert (← ((← (c ==? (3 : u8))) &&? (← (d ==? (4 : u8)))))); let _ ← match d with | (test_ellipsis_tuples.F.D a _ _ d) => do (hax_lib.assert (← ((← (a ==? (1 : u8))) &&? (← (d ==? (4 : u8)))))); let _ ← match d with | (test_ellipsis_tuples.F.D a b c d) => do (hax_lib.assert (← ((← ((← ((← (a ==? (1 : u8))) &&? (← (b ==? (2 : u8))))) &&? (← (c ==? (3 : u8))))) &&? (← (d ==? (4 : u8)))))); (pure rust_primitives.hax.Tuple0.mk) end lean_tests.matching namespace lean_tests.monadic structure S where f : u32 @[spec] def test (_ : rust_primitives.hax.Tuple0) : RustM rust_primitives.hax.Tuple0 := do let _ := (9 : i32); let _ ← ((9 : i32) +? (9 : i32)); let _ := (S.mk (f := (9 : u32))); let _ := (S.mk (f := (← ((9 : u32) +? (9 : u32))))); let _ := (S.f (S.mk (f := (← ((9 : u32) +? (9 : u32)))))); let _ ← ((S.f (S.mk (f := (← ((9 : u32) +? (9 : u32)))))) +? (9 : u32)); let _ ← if true then do ((3 : i32) +? (4 : i32)) else do ((3 : i32) -? (4 : i32)); let _ ← if (← ((← ((9 : i32) +? (9 : i32))) ==? (0 : i32))) then do ((3 : i32) +? (4 : i32)) else do ((3 : i32) -? (4 : i32)); let _ ← if true then do let x : i32 := (9 : i32); let _ ← ((3 : i32) +? x); (pure rust_primitives.hax.Tuple0.mk) else do let y : i32 := (19 : i32); let _ ← ((← ((3 : i32) +? y)) -? (4 : i32)); (pure rust_primitives.hax.Tuple0.mk); (pure rust_primitives.hax.Tuple0.mk) end lean_tests.monadic namespace lean_tests.monadic.trait_constants class Foo.AssociatedTypes (Self : Type) where class Foo (Self : Type) [associatedTypes : outParam (Foo.AssociatedTypes (Self : Type))] where F (Self) : u32 class Bar.AssociatedTypes (Self : Type) where class Bar (Self : Type) [associatedTypes : outParam (Bar.AssociatedTypes (Self : Type))] where B (Self) : u32 structure Baz where -- no fields @[reducible] instance Impl.AssociatedTypes : Foo.AssociatedTypes Baz where instance Impl : Foo Baz where F := (1 : u32) @[reducible] instance Impl_1.AssociatedTypes : Bar.AssociatedTypes Baz where instance Impl_1 : Bar Baz where B := RustM.of_isOk (do ((Foo.F Baz) -? (1 : u32))) (by rfl) end lean_tests.monadic.trait_constants namespace lean_tests.nested_control_flow @[spec] def nested_control_flow (_ : rust_primitives.hax.Tuple0) : RustM rust_primitives.hax.Tuple0 := do let x1 : i32 ← ((1 : i32) +? (← if true then do (pure (0 : i32)) else do (pure (1 : i32)))); let x2 : i32 ← ((1 : i32) +? (← match (rust_primitives.hax.Tuple2.mk (1 : i32) (2 : i32)) with | _ => do (pure (0 : i32)))); let x : i32 := (9 : i32); let x3 : i32 ← ((1 : i32) +? (← (x +? (1 : i32)))); (pure rust_primitives.hax.Tuple0.mk) @[spec] def explicit_hoisting (_ : rust_primitives.hax.Tuple0) : RustM rust_primitives.hax.Tuple0 := do let x1_tmp : i32 ← if true then do (pure (0 : i32)) else do (pure (1 : i32)); let x1 : i32 ← ((1 : i32) +? x1_tmp); let x2_tmp : i32 ← match (rust_primitives.hax.Tuple2.mk (1 : i32) (2 : i32)) with | _ => do (pure (0 : i32)); let x2 : i32 ← ((1 : i32) +? x2_tmp); let x3_tmp_x : i32 := (9 : i32); let x3_tmp : i32 ← (x3_tmp_x +? (1 : i32)); let x3 : i32 ← ((1 : i32) +? x3_tmp); (pure rust_primitives.hax.Tuple0.mk) @[spec] def complex_nesting (_ : rust_primitives.hax.Tuple0) : RustM (rust_primitives.hax.Tuple2 rust_primitives.hax.Tuple0 rust_primitives.hax.Tuple0) := do let x1 : i32 ← if true then do let y : i32 ← if false then do let z : i32 ← match rust_primitives.hax.Tuple0.mk with | _ => do (pure (9 : i32)); let z : i32 ← ((1 : i32) +? z); (z +? (1 : i32)) else do let z : i32 := (9 : i32); let z : i32 ← (z +? (1 : i32)); (pure z); let y : i32 ← (y +? (1 : i32)); (y +? (1 : i32)) else do (pure (0 : i32)); let x1 : i32 ← (x1 +? (1 : i32)); let x2 : i32 ← match (core_models.option.Option.Some (89 : i32)) with | (core_models.option.Option.Some a) => do let y : i32 ← ((1 : i32) +? a); let y : i32 ← (y +? (1 : i32)); if (← (y ==? (0 : i32))) then do let z : i32 := (9 : i32); let z : i32 ← ((← (z +? y)) +? (1 : i32)); (pure z) else do (pure (10 : i32)) | (core_models.option.Option.None ) => do let y : i32 ← if false then do (pure (9 : i32)) else do let z : i32 := (9 : i32); let z : i32 ← (z +? (1 : i32)); (z +? (9 : i32)); let y : i32 ← (y +? (1 : i32)); (pure y); (pure (rust_primitives.hax.Tuple2.mk rust_primitives.hax.Tuple0.mk rust_primitives.hax.Tuple0.mk)) end lean_tests.nested_control_flow namespace lean_tests.opaque opaque an_opaque_fn (_ : rust_primitives.hax.Tuple0) : RustM rust_primitives.hax.Tuple0 class T.AssociatedTypes (Self : Type) where A : Type attribute [reducible] T.AssociatedTypes.A abbrev T.A := T.AssociatedTypes.A class T (Self : Type) [associatedTypes : outParam (T.AssociatedTypes (Self : Type))] where f (Self) : (rust_primitives.hax.Tuple0 -> RustM rust_primitives.hax.Tuple0) structure S where -- no fields @[instance] opaque Impl.AssociatedTypes : T.AssociatedTypes S := by constructor <;> exact Inhabited.default @[instance] opaque Impl : T S := by constructor <;> exact Inhabited.default opaque OpaqueStruct : Type end lean_tests.opaque namespace lean_tests.specs def test (x : u8) : RustM u8 := do (pure x) set_option hax_mvcgen.specset \"bv\" in @[hax_spec] def test.spec (x : u8) : Spec (requires := do (x >? (0 : u8))) (ensures := fun r => do (r ==? x)) (test (x : u8)) := { pureRequires := by hax_construct_pure <;> bv_decide pureEnsures := by hax_construct_pure <;> bv_decide contract := by hax_mvcgen [test] <;> bv_decide } def use_previous_result (x : u8) : RustM u8 := do (test x) set_option hax_mvcgen.specset \"bv\" in @[hax_spec] def use_previous_result.spec (x : u8) : Spec (requires := do (x >? (0 : u8))) (ensures := fun r => do (r ==? x)) (use_previous_result (x : u8)) := { pureRequires := by hax_construct_pure <;> bv_decide pureEnsures := by hax_construct_pure <;> bv_decide contract := by hax_mvcgen [use_previous_result] <;> bv_decide } def test_proof (x : u8) : RustM u8 := do (pure x) set_option hax_mvcgen.specset \"bv\" in @[hax_spec] def test_proof.spec (x : u8) : Spec (requires := do (x >? (0 : u8))) (ensures := fun r => do (r ==? x)) (test_proof (x : u8)) := { pureRequires := by hax_construct_pure <;> bv_decide pureEnsures := by hax_construct_pure <;> bv_decide contract := by unfold lean_tests.specs.test_proof; hax_bv_decide } def square (x : u8) : RustM u8 := do (x *? x) set_option hax_mvcgen.specset \"bv\" in @[hax_spec] def square.spec (x : u8) : Spec (requires := do (x do (res >=? x)) (square (x : u8)) := { pureRequires := by hax_construct_pure <;> bv_decide pureEnsures := by hax_construct_pure <;> bv_decide contract := by hax_mvcgen [square] <;> bv_decide } def forall_and_exists (x : u8) : RustM u8 := do (pure x) set_option hax_mvcgen.specset \"int\" in @[hax_spec] def forall_and_exists.spec (x : u8) : Spec (requires := do (hax_lib.prop.constructors.forall (fun i => (do (hax_lib.prop.constructors.implies (← (hax_lib.prop.constructors.from_bool (← (i ? i))))) : RustM hax_lib.prop.Prop)))) (ensures := fun r => do (hax_lib.prop.constructors.not (← (hax_lib.prop.constructors.exists (fun i => (do (hax_lib.prop.constructors.not (← (hax_lib.prop.constructors.implies (← (hax_lib.prop.constructors.from_bool (← (i ? i))))))) : RustM hax_lib.prop.Prop)))))) (forall_and_exists (x : u8)) := { pureRequires := by hax_construct_pure <;> grind pureEnsures := by hax_construct_pure <;> grind contract := by hax_mvcgen [forall_and_exists] <;> grind } -- Test function without arguments -- https://github.com/cryspen/hax/issues/1856 def fn_without_args (_ : rust_primitives.hax.Tuple0) : RustM rust_primitives.hax.Tuple0 := do (pure rust_primitives.hax.Tuple0.mk) set_option hax_mvcgen.specset \"bv\" in @[hax_spec] def fn_without_args.spec (_ : rust_primitives.hax.Tuple0) : Spec (requires := do pure True) (ensures := fun _ => do (pure true)) (fn_without_args ⟨⟩) := { pureRequires := by hax_construct_pure <;> bv_decide pureEnsures := by hax_construct_pure <;> bv_decide contract := by hax_mvcgen [fn_without_args] <;> bv_decide } end lean_tests.specs namespace lean_tests.specs.issue_1852 structure T where -- no fields @[spec] def Impl.test (self : T) : RustM Bool := do (pure true) def Impl.func (self : T) : RustM rust_primitives.hax.Tuple0 := do (pure rust_primitives.hax.Tuple0.mk) set_option hax_mvcgen.specset \"bv\" in @[hax_spec] def Impl.func.spec (self : T) : Spec (requires := do (Impl.test self)) (ensures := fun _ => pure True) (Impl.func (self : T)) := { pureRequires := by hax_construct_pure <;> bv_decide pureEnsures := by hax_construct_pure <;> bv_decide contract := by hax_mvcgen [Impl.func] <;> bv_decide } end lean_tests.specs.issue_1852 namespace lean_tests.specs def custom_pure_proofs (x : u8) : RustM rust_primitives.hax.Tuple0 := do (pure rust_primitives.hax.Tuple0.mk) set_option hax_mvcgen.specset \"bv\" in @[hax_spec] def custom_pure_proofs.spec (x : u8) : Spec (requires := do (pure true)) (ensures := fun r => do (pure true)) (custom_pure_proofs (x : u8)) := { pureRequires := ⟨True, by mvcgen⟩ pureEnsures := ⟨fun _ => True, by intros; mvcgen⟩ contract := by hax_mvcgen [custom_pure_proofs] <;> bv_decide } end lean_tests.specs namespace lean_tests.specs.issue_1945 def mktuple (a : i32) : RustM Bool := do let x : i32 := a; (a ==? (0 : i32)) set_option hax_mvcgen.specset \"bv\" in @[hax_spec] def mktuple.spec (a : i32) : Spec (requires := do let x : i32 := a; (a ==? (0 : i32))) (ensures := fun _ => pure True) (mktuple (a : i32)) := { pureRequires := by hax_construct_pure <;> bv_decide pureEnsures := by hax_construct_pure <;> bv_decide contract := by hax_mvcgen [mktuple] <;> bv_decide } end lean_tests.specs.issue_1945 namespace lean_tests.structs structure T0 where -- no fields structure T1 (A : Type) where _0 : A structure T2 (A : Type) (B : Type) where _0 : A _1 : B structure T3 (A : Type) (B : Type) (C : Type) where _0 : A _1 : B _2 : C structure T3p (A : Type) (B : Type) (C : Type) where _0 : A _1 : (T2 B C) @[spec] def tuple_structs (_ : rust_primitives.hax.Tuple0) : RustM rust_primitives.hax.Tuple0 := do let t0 : T0 := T0.mk; let t1 : (T1 i32) := (T1.mk (1 : i32)); let t2 : (T2 i32 i32) := (T2.mk (1 : i32) (2 : i32)); let t3 : (T3 T0 (T1 i32) (T2 i32 i32)) := (T3.mk T0.mk (T1.mk (1 : i32)) (T2.mk (1 : i32) (2 : i32))); let t3p : (T3p T0 (T1 i32) (T2 i32 i32)) := (T3p.mk T0.mk (T2.mk (T1.mk (1 : i32)) (T2.mk (1 : i32) (2 : i32)))); let ⟨⟩ := t0; let ⟨u1⟩ := t1; let ⟨u2, u3⟩ := t2; let ⟨⟨⟩, ⟨_⟩, ⟨_, _⟩⟩ := t3; let ⟨⟨⟩, ⟨⟨_⟩, ⟨_, _⟩⟩⟩ := t3p; let _ := (T1._0 t1); let _ := (T2._0 t2); let _ := (T2._1 t2); let _ := (T3._0 t3); let _ := (T3._1 t3); let _ := (T3._2 t3); let _ := (T2._1 (T3._2 t3)); let _ := (T3p._0 t3p); let _ := (T3p._1 t3p); let _ := (T2._0 (T2._1 (T3p._1 t3p))); let _ := (T2._0 (T3p._1 t3p)); let _ := (T2._1 (T3p._1 t3p)); let _ ← match t0 with | ⟨⟩ => do (pure rust_primitives.hax.Tuple0.mk); let _ ← match t1 with | ⟨u1⟩ => do (pure rust_primitives.hax.Tuple0.mk); let _ ← match t2 with | ⟨u2, u3⟩ => do (pure rust_primitives.hax.Tuple0.mk); let _ ← match t3 with | ⟨⟨⟩, ⟨u1⟩, ⟨u2, u3⟩⟩ => do (pure rust_primitives.hax.Tuple0.mk); let _ ← match t3p with | ⟨⟨⟩, ⟨⟨u1⟩, ⟨u2, u3⟩⟩⟩ => do (pure rust_primitives.hax.Tuple0.mk); (pure rust_primitives.hax.Tuple0.mk) structure S1 where f1 : usize f2 : usize structure S2 where f1 : S1 f2 : usize structure S3 where _end : usize _def : usize _theorem : usize _structure : usize _inductive : usize @[spec] def normal_structs (_ : rust_primitives.hax.Tuple0) : RustM rust_primitives.hax.Tuple0 := do let s1 : S1 := (S1.mk (f1 := (0 : usize)) (f2 := (1 : usize))); let s2 : S2 := (S2.mk (f1 := (S1.mk (f1 := (2 : usize)) (f2 := (3 : usize)))) (f2 := (4 : usize))); let s3 : S3 := (S3.mk (_end := (0 : usize)) (_def := (0 : usize)) (_theorem := (0 : usize)) (_structure := (0 : usize)) (_inductive := (0 : usize))); let {f1 := f1, f2 := f2} := s1; let {f1 := f1, f2 := other_name_for_f2} := s1; let {f1 := {f1 := f1, f2 := f2}, f2 := other_name_for_f2} := s2; let {_end := _end, _def := _def, _theorem := _theorem, _structure := _structure, _inductive := _inductive} := s3; let _ := (rust_primitives.hax.Tuple2.mk (S1.f1 s1) (S1.f2 s1)); let _ := (rust_primitives.hax.Tuple8.mk (S1.f1 s1) (S1.f2 s1) (S1.f1 (S2.f1 s2)) (S1.f2 (S2.f1 s2)) (S2.f2 s2) (S3._end s3) (S3._def s3) (S3._theorem s3)); let _ ← match s1 with | {f1 := f1, f2 := f2} => do (pure rust_primitives.hax.Tuple0.mk); let _ ← match s2 with | {f1 := {f1 := f1, f2 := other_name_for_f2}, f2 := f2} => do (pure rust_primitives.hax.Tuple0.mk); match s3 with | {_end := _end, _def := _def, _theorem := _theorem, _structure := _structure, _inductive := _inductive} => do (pure rust_primitives.hax.Tuple0.mk) end lean_tests.structs namespace lean_tests.structs.miscellaneous structure S where f : i32 @[spec] def test_tuples (_ : rust_primitives.hax.Tuple0) : RustM (rust_primitives.hax.Tuple2 i32 i32) := do let lit : i32 := (1 : i32); let constr : S := (S.mk (f := (42 : i32))); let proj : i32 := (S.f constr); let ite : (rust_primitives.hax.Tuple2 i32 i32) ← if true then do (pure (rust_primitives.hax.Tuple2.mk (1 : i32) (2 : i32))) else do let z : i32 ← ((1 : i32) +? (2 : i32)); (pure (rust_primitives.hax.Tuple2.mk z z)); (pure (rust_primitives.hax.Tuple2.mk (1 : i32) (2 : i32))) end lean_tests.structs.miscellaneous namespace lean_tests.structs.base_expressions structure S where f1 : u32 f2 : u32 f3 : u32 @[spec] def test (_ : rust_primitives.hax.Tuple0) : RustM rust_primitives.hax.Tuple0 := do let s1 : S := (S.mk (f1 := (1 : u32)) (f2 := (2 : u32)) (f3 := (3 : u32))); let _ := {s1 with f1 := (0 : u32)}; let _ := {s1 with f2 := (0 : u32)}; let _ := {s1 with f3 := (0 : u32)}; let _ := {s1 with f1 := (0 : u32), f2 := (1 : u32)}; let _ := {s1 with f2 := (0 : u32), f3 := (1 : u32)}; let _ := {s1 with f3 := (0 : u32), f1 := (2 : u32)}; let _ := {s1 with f1 := (0 : u32), f2 := (1 : u32), f3 := (0 : u32)}; (pure rust_primitives.hax.Tuple0.mk) end lean_tests.structs.base_expressions namespace lean_tests.traits.basic class T1.AssociatedTypes (Self : Type) where class T1 (Self : Type) [associatedTypes : outParam (T1.AssociatedTypes (Self : Type))] where f1 (Self) : (Self -> RustM usize) f2 (Self) : (Self -> Self -> RustM usize) structure S where -- no fields @[reducible] instance Impl.AssociatedTypes : T1.AssociatedTypes S where instance Impl : T1 S where f1 := fun (self : S) => do (pure (42 : usize)) f2 := fun (self : S) (other : S) => do (pure (43 : usize)) @[spec] def f (T : Type) [trait_constr_f_associated_type_i0 : T1.AssociatedTypes T] [trait_constr_f_i0 : T1 T ] (x : T) : RustM usize := do ((← (T1.f1 T x)) +? (← (T1.f2 T x x))) end lean_tests.traits.basic namespace lean_tests.traits.bounds class T1.AssociatedTypes (Self : Type) where class T1 (Self : Type) [associatedTypes : outParam (T1.AssociatedTypes (Self : Type))] where f1 (Self) : (Self -> RustM usize) class T2.AssociatedTypes (Self : Type) where class T2 (Self : Type) [associatedTypes : outParam (T2.AssociatedTypes (Self : Type))] where f2 (Self) : (Self -> RustM usize) class Test.AssociatedTypes (Self : Type) (T : Type) where [trait_constr_Test_i0 : T2.AssociatedTypes Self] [trait_constr_Test_i1 : T1.AssociatedTypes T] attribute [instance_reducible, instance] Test.AssociatedTypes.trait_constr_Test_i0 attribute [instance_reducible, instance] Test.AssociatedTypes.trait_constr_Test_i1 class Test (Self : Type) (T : Type) [associatedTypes : outParam (Test.AssociatedTypes (Self : Type) (T : Type))] where [trait_constr_Test_i0 : T2 Self] [trait_constr_Test_i1 : T1 T] f_test (Self) (T) : (Self -> T -> RustM usize) attribute [instance_reducible, instance] Test.trait_constr_Test_i0 attribute [instance_reducible, instance] Test.trait_constr_Test_i1 structure S1 where -- no fields @[reducible] instance Impl.AssociatedTypes : T1.AssociatedTypes S1 where instance Impl : T1 S1 where f1 := fun (self : S1) => do (pure (0 : usize)) structure S2 where -- no fields @[reducible] instance Impl_1.AssociatedTypes : T2.AssociatedTypes S2 where instance Impl_1 : T2 S2 where f2 := fun (self : S2) => do (pure (1 : usize)) @[reducible] instance Impl_2.AssociatedTypes : Test.AssociatedTypes S2 S1 where instance Impl_2 : Test S2 S1 where f_test := fun (self : S2) (x : S1) => do ((← ((← (T1.f1 S1 x)) +? (← (T2.f2 S2 self)))) +? (1 : usize)) @[spec] def test (x1 : S1) (x2 : S2) : RustM usize := do ((← (Test.f_test S2 S1 x2 x1)) +? (← (T1.f1 S1 x1))) end lean_tests.traits.bounds namespace lean_tests.traits.associated_types class Foo.AssociatedTypes (Self : Type) (T : Type) where class Foo (Self : Type) (T : Type) [associatedTypes : outParam (Foo.AssociatedTypes (Self : Type) (T : Type))] where class Bar.AssociatedTypes (Self : Type) where class Bar (Self : Type) [associatedTypes : outParam (Bar.AssociatedTypes (Self : Type))] where structure S where -- no fields @[reducible] instance Impl_2.AssociatedTypes : Bar.AssociatedTypes i16 where instance Impl_2 : Bar i16 where @[reducible] instance Impl_3.AssociatedTypes (A : Type) : Foo.AssociatedTypes (rust_primitives.hax.Tuple2 u32 A) i16 where instance Impl_3 (A : Type) : Foo (rust_primitives.hax.Tuple2 u32 A) i16 where class Chain0.AssociatedTypes (Self : Type) where class Chain0 (Self : Type) [associatedTypes : outParam (Chain0.AssociatedTypes (Self : Type))] where @[reducible] instance Impl_4.AssociatedTypes : Chain0.AssociatedTypes u8 where instance Impl_4 : Chain0 u8 where end lean_tests.traits.associated_types namespace lean_tests.traits.overlapping_methods class T1.AssociatedTypes (Self : Type) where class T1 (Self : Type) [associatedTypes : outParam (T1.AssociatedTypes (Self : Type))] where f (Self) : (Self -> RustM usize) class T2.AssociatedTypes (Self : Type) where class T2 (Self : Type) [associatedTypes : outParam (T2.AssociatedTypes (Self : Type))] where f (Self) : (Self -> RustM usize) class T3.AssociatedTypes (Self : Type) where class T3 (Self : Type) [associatedTypes : outParam (T3.AssociatedTypes (Self : Type))] where f (Self) : (Self -> RustM usize) @[reducible] instance Impl.AssociatedTypes : T1.AssociatedTypes u32 where instance Impl : T1 u32 where f := fun (self : u32) => do (pure (0 : usize)) @[reducible] instance Impl_1.AssociatedTypes : T2.AssociatedTypes u32 where instance Impl_1 : T2 u32 where f := fun (self : u32) => do (pure (1 : usize)) @[reducible] instance Impl_2.AssociatedTypes : T3.AssociatedTypes u32 where instance Impl_2 : T3 u32 where f := fun (self : u32) => do (pure (2 : usize)) @[spec] def test (_ : rust_primitives.hax.Tuple0) : RustM usize := do let x : u32 := (9 : u32); ((← ((← (T1.f u32 x)) +? (← (T2.f u32 x)))) +? (← (T3.f u32 x))) end lean_tests.traits.overlapping_methods namespace lean_tests.traits.inheritance class T1.AssociatedTypes (Self : Type) where class T1 (Self : Type) [associatedTypes : outParam (T1.AssociatedTypes (Self : Type))] where f1 (Self) : (Self -> RustM usize) class T2.AssociatedTypes (Self : Type) where class T2 (Self : Type) [associatedTypes : outParam (T2.AssociatedTypes (Self : Type))] where f2 (Self) : (Self -> RustM usize) class T3.AssociatedTypes (Self : Type) where [trait_constr_T3_i0 : T2.AssociatedTypes Self] [trait_constr_T3_i1 : T1.AssociatedTypes Self] attribute [instance_reducible, instance] T3.AssociatedTypes.trait_constr_T3_i0 attribute [instance_reducible, instance] T3.AssociatedTypes.trait_constr_T3_i1 class T3 (Self : Type) [associatedTypes : outParam (T3.AssociatedTypes (Self : Type))] where [trait_constr_T3_i0 : T2 Self] [trait_constr_T3_i1 : T1 Self] f3 (Self) : (Self -> RustM usize) attribute [instance_reducible, instance] T3.trait_constr_T3_i0 attribute [instance_reducible, instance] T3.trait_constr_T3_i1 class Tp1.AssociatedTypes (Self : Type) where class Tp1 (Self : Type) [associatedTypes : outParam (Tp1.AssociatedTypes (Self : Type))] where f1 (Self) : (Self -> RustM usize) class Tp2.AssociatedTypes (Self : Type) where [trait_constr_Tp2_i0 : Tp1.AssociatedTypes Self] [trait_constr_Tp2_i1 : T3.AssociatedTypes Self] attribute [instance_reducible, instance] Tp2.AssociatedTypes.trait_constr_Tp2_i0 attribute [instance_reducible, instance] Tp2.AssociatedTypes.trait_constr_Tp2_i1 class Tp2 (Self : Type) [associatedTypes : outParam (Tp2.AssociatedTypes (Self : Type))] where [trait_constr_Tp2_i0 : Tp1 Self] [trait_constr_Tp2_i1 : T3 Self] fp2 (Self) : (Self -> RustM usize) attribute [instance_reducible, instance] Tp2.trait_constr_Tp2_i0 attribute [instance_reducible, instance] Tp2.trait_constr_Tp2_i1 structure S where -- no fields @[reducible] instance Impl.AssociatedTypes : T1.AssociatedTypes S where instance Impl : T1 S where f1 := fun (self : S) => do (pure (1 : usize)) @[reducible] instance Impl_1.AssociatedTypes : T2.AssociatedTypes S where instance Impl_1 : T2 S where f2 := fun (self : S) => do (pure (2 : usize)) @[reducible] instance Impl_2.AssociatedTypes : T3.AssociatedTypes S where instance Impl_2 : T3 S where f3 := fun (self : S) => do (pure (3 : usize)) @[reducible] instance Impl_3.AssociatedTypes : Tp1.AssociatedTypes S where instance Impl_3 : Tp1 S where f1 := fun (self : S) => do (pure (10 : usize)) @[reducible] instance Impl_4.AssociatedTypes : Tp2.AssociatedTypes S where instance Impl_4 : Tp2 S where fp2 := fun (self : S) => do ((← ((← ((← (Tp1.f1 S self)) +? (← (T1.f1 S self)))) +? (← (T2.f2 S self)))) +? (← (T3.f3 S self))) @[spec] def test (_ : rust_primitives.hax.Tuple0) : RustM usize := do let s : S := S.mk; ((← (T3.f3 S s)) +? (1 : usize)) end lean_tests.traits.inheritance namespace lean_tests.traits.default class Easy.AssociatedTypes (Self : Type) where class Easy (Self : Type) [associatedTypes : outParam (Easy.AssociatedTypes (Self : Type))] where dft (Self) (self : Self) :RustM usize := do (pure (32 : usize)) @[reducible] instance Impl.AssociatedTypes : Easy.AssociatedTypes usize where instance Impl : Easy usize where dft := fun (self : usize) => do (self +? (1 : usize)) @[reducible] instance Impl_1.AssociatedTypes : Easy.AssociatedTypes u32 where instance Impl_1 : Easy u32 where class T1.AssociatedTypes (Self : Type) where class T1 (Self : Type) [associatedTypes : outParam (T1.AssociatedTypes (Self : Type))] where f1 (Self) : (Self -> RustM usize) f2 (Self) (self : Self) :RustM usize := do (pure (1 : usize)) f3 (Self) (A : Type) (self : Self) (x : A) :RustM usize := do (pure (1 : usize)) f4 (Self) (A : Type) [trait_constr_f4_associated_type_i1 : Easy.AssociatedTypes A] [trait_constr_f4_i1 : Easy A ] (self : Self) (x : A) :RustM usize := do ((← (Easy.dft A x)) +? (1 : usize)) structure S (A : Type) where _0 : usize _1 : A @[reducible] instance Impl_2.AssociatedTypes : T1.AssociatedTypes (S usize) where instance Impl_2 : T1 (S usize) where f1 := fun (self : (S usize)) => do ((S._0 self) +? (S._1 self)) f2 := fun (self : (S usize)) => do (pure (S._1 self)) @[reducible] instance Impl_3.AssociatedTypes : T1.AssociatedTypes (S Bool) where instance Impl_3 : T1 (S Bool) where f1 := fun (self : (S Bool)) => do if (S._1 self) then do (pure (S._0 self)) else do (pure (9 : usize)) f2 := fun (self : (S Bool)) => do ((S._0 self) +? (1 : usize)) @[reducible] instance Impl_4.AssociatedTypes : T1.AssociatedTypes (S alloc.string.String) where instance Impl_4 : T1 (S alloc.string.String) where f1 := fun (self : (S alloc.string.String)) => do (pure (0 : usize)) end lean_tests.traits.default namespace lean_tests.traits.trait_level_args class T1.AssociatedTypes (Self : Type) (A : Type) (B : Type) where class T1 (Self : Type) (A : Type) (B : Type) [associatedTypes : outParam (T1.AssociatedTypes (Self : Type) (A : Type) (B : Type))] where f1 (Self) (A) (B) (C : Type) (D : Type) : (Self -> RustM rust_primitives.hax.Tuple0) f2 (Self) (A) (B) (C : Type) (D : Type) : (Self -> A -> RustM rust_primitives.hax.Tuple0) f3 (Self) (A) (B) (C : Type) (D : Type) : (Self -> A -> B -> RustM rust_primitives.hax.Tuple0) @[reducible] instance Impl.AssociatedTypes : T1.AssociatedTypes usize u32 u64 where instance Impl : T1 usize u32 u64 where f1 := fun (C : Type) (D : Type) (self : usize) => do (pure rust_primitives.hax.Tuple0.mk) f2 := fun (C : Type) (D : Type) (self : usize) (x : u32) => do (pure rust_primitives.hax.Tuple0.mk) f3 := fun (C : Type) (D : Type) (self : usize) (x : u32) (y : u64) => do (pure rust_primitives.hax.Tuple0.mk) @[spec] def test (A : Type) (B : Type) (C : Type) (D : Type) (U : Type) [trait_constr_test_associated_type_i0 : T1.AssociatedTypes U A B] [trait_constr_test_i0 : T1 U A B ] (x : U) (a : A) (b : B) : RustM rust_primitives.hax.Tuple0 := do let _ ← (T1.f1 U A B C D x); let _ ← (T1.f2 U A B C D x a); let _ ← (T1.f3 U A B C D x a b); (pure rust_primitives.hax.Tuple0.mk) end lean_tests.traits.trait_level_args namespace lean_tests.traits.trait_with_constraints class T1.AssociatedTypes (Self : Type) where class T1 (Self : Type) [associatedTypes : outParam (T1.AssociatedTypes (Self : Type))] where class T2.AssociatedTypes (Self : Type) where class T2 (Self : Type) [associatedTypes : outParam (T2.AssociatedTypes (Self : Type))] where func (Self) [trait_constr_func_associated_type_i1 : T1.AssociatedTypes Self] [trait_constr_func_i1 : T1 Self ] : (Self -> RustM Bool) @[reducible] instance Impl.AssociatedTypes (A : Type) [trait_constr_Impl_associated_type_i0 : T1.AssociatedTypes A] [trait_constr_Impl_i0 : T1 A ] : T2.AssociatedTypes A where instance Impl (A : Type) [trait_constr_Impl_associated_type_i0 : T1.AssociatedTypes A] [trait_constr_Impl_i0 : T1 A ] : T2 A where func := fun [trait_constr_func_associated_type_i1 : T1.AssociatedTypes A] [trait_constr_func_i1 : T1 A ] (self : A) => do (pure true) end lean_tests.traits.trait_with_constraints namespace lean_tests.traits.associated_constant class Foo.AssociatedTypes (Self : Type) where class Foo (Self : Type) [associatedTypes : outParam (Foo.AssociatedTypes (Self : Type))] where f (Self) : Bool x (Self) :u8 := (0 : u8) structure Bar where -- no fields @[reducible] instance Impl.AssociatedTypes : Foo.AssociatedTypes Bar where instance Impl : Foo Bar where f := true x := RustM.of_isOk (do ((1 : u8) +? (1 : u8))) (by rfl) class Baz.AssociatedTypes (Self : Type) where class Baz (Self : Type) [associatedTypes : outParam (Baz.AssociatedTypes (Self : Type))] where One (Self) :u32 := (1 : u32) @[spec] def foo (F : Type) [trait_constr_foo_associated_type_i0 : Baz.AssociatedTypes F] [trait_constr_foo_i0 : Baz F ] (n : u32) : RustM u32 := do (n +? (Baz.One F)) end lean_tests.traits.associated_constant namespace lean_tests.types abbrev UsizeAlias : Type := usize abbrev MyOption (A : Type) : Type := (core_models.option.Option A) abbrev MyResult (A : Type) (B : Type) : Type := (core_models.result.Result (core_models.option.Option A) B) abbrev ErrorMonad (A : Type) (E : Type) : Type := (core_models.result.Result A E) abbrev StateMonad (A : Type) (S : Type) : Type := (rust_primitives.hax.Tuple2 A S) abbrev ESMonad (A : Type) (S : Type) (E : Type) : Type := (rust_primitives.hax.Tuple2 (core_models.result.Result A E) S) end lean_tests.types namespace lean_tests def FORTYTWO : usize := (42 : usize) def MINUS_FORTYTWO : isize := (-42 : isize) @[spec] def returns42 (_ : rust_primitives.hax.Tuple0) : RustM usize := do (pure FORTYTWO) @[spec] def add_two_numbers (x : usize) (y : usize) : RustM usize := do (x +? y) @[spec] def letBinding (x : usize) (y : usize) : RustM usize := do let useless : rust_primitives.hax.Tuple0 := rust_primitives.hax.Tuple0.mk; let result1 : usize ← (x +? y); let result2 : usize ← (result1 +? (2 : usize)); (result2 +? (1 : usize)) @[spec] def closure (_ : rust_primitives.hax.Tuple0) : RustM i32 := do let x : i32 := (41 : i32); let f1 : (i32 -> RustM i32) := (fun y => (do (y +? x) : RustM i32)); let f2 : (i32 -> i32 -> RustM i32) := (fun y z => (do ((← (y +? x)) +? z) : RustM i32)); let res1 : i32 ← (core_models.ops.function.Fn.call (i32 -> RustM i32) (rust_primitives.hax.Tuple1 i32) f1 (rust_primitives.hax.Tuple1.mk (1 : i32))); let res2 : i32 ← (core_models.ops.function.Fn.call (i32 -> i32 -> RustM i32) (rust_primitives.hax.Tuple2 i32 i32) f2 (rust_primitives.hax.Tuple2.mk (2 : i32) (3 : i32))); (res1 +? res2) example : Nat := 42 @[spec] def test_before_verbatime_single_line (x : u8) : RustM u8 := do (pure (42 : u8)) def multiline : Unit := () @[spec] def test_before_verbatim_multi_line (x : u8) : RustM u8 := do (pure (32 : u8)) def NULL_CHAR : Char := '\u0000' -- Test string literals with escape sequences @[spec] def string_escapes (_ : rust_primitives.hax.Tuple0) : RustM rust_primitives.hax.Tuple0 := do let _empty : String := \"\"; let _plain : String := \"hello world\"; let _with_quotes : String := \"she said \\\"hello\\\"\"; let _with_single_quote : String := \"it\\'s fine\"; let _with_backslash : String := \"path\\\\to\\\\file\"; let _with_newline : String := \"line1\\nline2\"; let _with_tab : String := \"col1\\tcol2\"; let _with_carriage_return : String := \"before\\rafter\"; let _mixed : String := \"say \\\"hello\\\"\\nand\\t\\'goodbye\\'\\\\end\"; let _carriage_return : String := \"carriage\\rreturn\"; let _control_chars : String := \"null\\x00byte bell\\x07char font\\x1b[0mreset\"; (pure rust_primitives.hax.Tuple0.mk) end lean_tests namespace lean_tests.associated_types.basic class Iterable.AssociatedTypes (Self : Type) where Item : Type attribute [reducible] Iterable.AssociatedTypes.Item abbrev Iterable.Item := Iterable.AssociatedTypes.Item class Iterable (Self : Type) [associatedTypes : outParam (Iterable.AssociatedTypes (Self : Type))] where first (Self) : (Self -> RustM associatedTypes.Item) end lean_tests.associated_types.basic namespace lean_tests.associated_types.projection class T2.AssociatedTypes (Self : Type) where A2 : Type attribute [reducible] T2.AssociatedTypes.A2 abbrev T2.A2 := T2.AssociatedTypes.A2 class T2 (Self : Type) [associatedTypes : outParam (T2.AssociatedTypes (Self : Type))] where [trait_constr_A2_associated_type_i1 : T1.AssociatedTypes associatedTypes.A2] [trait_constr_A2_i1 : T1 associatedTypes.A2 ] f (Self) : (rust_primitives.hax.Tuple0 -> RustM (T1.A1 associatedTypes.A2)) end lean_tests.associated_types.projection namespace lean_tests.associated_types.multiple_associated_types class Pair.AssociatedTypes (Self : Type) where First : Type Second : Type attribute [reducible] Pair.AssociatedTypes.First attribute [reducible] Pair.AssociatedTypes.Second abbrev Pair.First := Pair.AssociatedTypes.First abbrev Pair.Second := Pair.AssociatedTypes.Second class Pair (Self : Type) [associatedTypes : outParam (Pair.AssociatedTypes (Self : Type))] where first (Self) : (Self -> RustM associatedTypes.First) second (Self) : (Self -> RustM associatedTypes.Second) end lean_tests.associated_types.multiple_associated_types namespace lean_tests.enums inductive MyList (T : Type) : Type | Nil : MyList (T : Type) | Cons (hd : T) (tl : (MyList T)) : MyList (T : Type) end lean_tests.enums namespace lean_tests.recursion @[spec] def factorial (n : u32) : RustM u32 := do if (← (n ==? (0 : u32))) then do (pure (1 : u32)) else do (n *? (← (factorial (← (n -? (1 : u32)))))) partial_fixpoint end lean_tests.recursion namespace lean_tests.traits.associated_types class T1.AssociatedTypes (Self : Type) where T : Type attribute [reducible] T1.AssociatedTypes.T abbrev T1.T := T1.AssociatedTypes.T class T1 (Self : Type) [associatedTypes : outParam (T1.AssociatedTypes (Self : Type))] where f (Self) : (Self -> associatedTypes.T -> RustM associatedTypes.T) class T3.AssociatedTypes (Self : Type) where T : Type Tp : Type attribute [reducible] T3.AssociatedTypes.T attribute [reducible] T3.AssociatedTypes.Tp abbrev T3.T := T3.AssociatedTypes.T abbrev T3.Tp := T3.AssociatedTypes.Tp class T3 (Self : Type) [associatedTypes : outParam (T3.AssociatedTypes (Self : Type))] where [trait_constr_T_associated_type_i1 : Bar.AssociatedTypes associatedTypes.T] [trait_constr_T_i1 : Bar associatedTypes.T ] (A : Type) [trait_constr_Tp_associated_type_i1 : Foo.AssociatedTypes associatedTypes.Tp associatedTypes.T] [trait_constr_Tp_i1 : Foo associatedTypes.Tp associatedTypes.T ] f (Self) (A : Type) [trait_constr_f_associated_type_i1 : Bar.AssociatedTypes A] [trait_constr_f_i1 : Bar A ] : (Self -> associatedTypes.T -> associatedTypes.Tp -> RustM usize) class Chain1.AssociatedTypes (Self : Type) where A : Type B : Type attribute [reducible] Chain1.AssociatedTypes.A attribute [reducible] Chain1.AssociatedTypes.B abbrev Chain1.A := Chain1.AssociatedTypes.A abbrev Chain1.B := Chain1.AssociatedTypes.B class Chain1 (Self : Type) [associatedTypes : outParam (Chain1.AssociatedTypes (Self : Type))] where [trait_constr_A_associated_type_i1 : Chain0.AssociatedTypes associatedTypes.A] [trait_constr_A_i1 : Chain0 associatedTypes.A ] [trait_constr_B_associated_type_i1 : Chain0.AssociatedTypes associatedTypes.B] [trait_constr_B_i1 : Chain0 associatedTypes.B ] end lean_tests.traits.associated_types namespace lean_tests.associated_types.basic @[spec] def just_the_first (I : Type) [trait_constr_just_the_first_associated_type_i0 : Iterable.AssociatedTypes I] [trait_constr_just_the_first_i0 : Iterable I ] (iter : I) : RustM (Iterable.Item I) := do (Iterable.first I iter) @[spec] def first_plus_1 (I : Type) [trait_constr_first_plus_1_associated_type_i0 : Iterable.AssociatedTypes I] [trait_constr_first_plus_1_i0 : Iterable I (associatedTypes := { show Iterable.AssociatedTypes I by infer_instance with Item := i32})] (iter : I) : RustM i32 := do ((← (Iterable.first I iter)) +? (1 : i32)) @[reducible] instance Impl.AssociatedTypes : Iterable.AssociatedTypes Bool where Item := i32 instance Impl : Iterable Bool where first := fun (self : Bool) => do (pure (3 : i32)) @[spec] def a (_ : rust_primitives.hax.Tuple0) : RustM rust_primitives.hax.Tuple0 := do let _ ← (first_plus_1 Bool true); (pure rust_primitives.hax.Tuple0.mk) end lean_tests.associated_types.basic namespace lean_tests.associated_types.multiple_associated_types @[spec] def get_both (P : Type) [trait_constr_get_both_associated_type_i0 : Pair.AssociatedTypes P] [trait_constr_get_both_i0 : Pair P ] (pair : P) : RustM (rust_primitives.hax.Tuple2 (Pair.First P) (Pair.Second P)) := do (pure (rust_primitives.hax.Tuple2.mk (← (Pair.first P pair)) (← (Pair.second P pair)))) @[reducible] instance Impl.AssociatedTypes : Pair.AssociatedTypes (rust_primitives.hax.Tuple2 i32 Bool) where First := i32 Second := Bool instance Impl : Pair (rust_primitives.hax.Tuple2 i32 Bool) where first := fun (self : (rust_primitives.hax.Tuple2 i32 Bool)) => do (pure (rust_primitives.hax.Tuple2._0 self)) second := fun (self : (rust_primitives.hax.Tuple2 i32 Bool)) => do (pure (rust_primitives.hax.Tuple2._1 self)) @[spec] def b (_ : rust_primitives.hax.Tuple0) : RustM rust_primitives.hax.Tuple0 := do let pair : (rust_primitives.hax.Tuple2 i32 Bool) := (rust_primitives.hax.Tuple2.mk (42 : i32) true); let both : (rust_primitives.hax.Tuple2 i32 Bool) ← (get_both (rust_primitives.hax.Tuple2 i32 Bool) pair); (pure rust_primitives.hax.Tuple0.mk) @[spec] def get_first_as_i32 (P : Type) [trait_constr_get_first_as_i32_associated_type_i0 : Pair.AssociatedTypes P] [trait_constr_get_first_as_i32_i0 : Pair P (associatedTypes := { show Pair.AssociatedTypes P by infer_instance with First := i32})] (pair : P) : RustM i32 := do (Pair.first P pair) end lean_tests.associated_types.multiple_associated_types namespace lean_tests.enums @[spec] def enums (_ : rust_primitives.hax.Tuple0) : RustM rust_primitives.hax.Tuple0 := do let e_v1 : E := E.V1; let e_v2 : E := E.V2; let e_v3 : E := (E.V3 (23 : usize)); let e_v4 : E := (E.V4 (23 : usize) (12 : usize) (1 : usize)); let e_v5 : E := (E.V5 (f1 := (23 : usize)) (f2 := (43 : usize))); let e_v6 : E := (E.V6 (f1 := (12 : usize)) (f2 := (13 : usize))); let nil : (MyList usize) := MyList.Nil; let cons_1 : (MyList usize) := (MyList.Cons (hd := (1 : usize)) (tl := nil)); let cons_2_1 : (MyList usize) := (MyList.Cons (hd := (2 : usize)) (tl := cons_1)); match e_v1 with | (E.V1 ) => do (pure rust_primitives.hax.Tuple0.mk) | (E.V2 ) => do (pure rust_primitives.hax.Tuple0.mk) | (E.V3 _) => do (pure rust_primitives.hax.Tuple0.mk) | (E.V4 x1 x2 x3) => do let y1 : usize ← (x1 +? x2); let y2 : usize ← (y1 -? x2); let y3 : usize ← (y2 +? x3); (pure rust_primitives.hax.Tuple0.mk) | (E.V5 (f1 := f1) (f2 := f2)) => do (pure rust_primitives.hax.Tuple0.mk) | (E.V6 (f1 := f1) (f2 := other_name_for_f2)) => do (pure rust_primitives.hax.Tuple0.mk) end lean_tests.enums namespace lean_tests.traits.associated_types @[reducible] instance Impl.AssociatedTypes : T1.AssociatedTypes S where T := i32 instance Impl : T1 S where f := fun (self : S) (x : i32) => do (pure (2121 : i32)) class Chain2.AssociatedTypes (Self : Type) where [trait_constr_Chain2_i0 : Chain1.AssociatedTypes Self] attribute [instance_reducible, instance] Chain2.AssociatedTypes.trait_constr_Chain2_i0 class Chain2 (Self : Type) [associatedTypes : outParam (Chain2.AssociatedTypes (Self : Type))] where [trait_constr_Chain2_i0 : Chain1 Self] attribute [instance_reducible, instance] Chain2.trait_constr_Chain2_i0 class Chain3.AssociatedTypes (Self : Type) where [trait_constr_Chain3_i0 : Chain2.AssociatedTypes Self] attribute [instance_reducible, instance] Chain3.AssociatedTypes.trait_constr_Chain3_i0 class Chain3 (Self : Type) [associatedTypes : outParam (Chain3.AssociatedTypes (Self : Type))] where [trait_constr_Chain3_i0 : Chain2 Self] f (Self) : (rust_primitives.hax.Tuple0 -> RustM (Chain1.A Self)) attribute [instance_reducible, instance] Chain3.trait_constr_Chain3_i0 @[reducible] instance Impl_5.AssociatedTypes : Chain1.AssociatedTypes u8 where A := u8 B := u8 instance Impl_5 : Chain1 u8 where @[reducible] instance Impl_6.AssociatedTypes : Chain2.AssociatedTypes u8 where instance Impl_6 : Chain2 u8 where @[reducible] instance Impl_7.AssociatedTypes : Chain3.AssociatedTypes u8 where instance Impl_7 : Chain3 u8 where f := fun (_ : rust_primitives.hax.Tuple0) => do (pure (0 : u8)) class T2.AssociatedTypes (Self : Type) where T : Type attribute [reducible] T2.AssociatedTypes.T abbrev T2.T := T2.AssociatedTypes.T class T2 (Self : Type) [associatedTypes : outParam (T2.AssociatedTypes (Self : Type))] where [trait_constr_T_associated_type_i1 : T1.AssociatedTypes associatedTypes.T] [trait_constr_T_i1 : T1 associatedTypes.T ] f (Self) : (Self -> associatedTypes.T -> RustM usize) @[reducible] instance Impl_1.AssociatedTypes : T2.AssociatedTypes S where T := S instance Impl_1 : T2 S where f := fun (self : S) (x : S) => do (pure (21 : usize)) end lean_tests.traits.associated_types """ ================================================ FILE: test-harness/src/snapshots/toolchain__let-else into-coq.snap ================================================ --- source: test-harness/src/harness.rs expression: snapshot info: kind: Translate: backend: coq info: name: let-else manifest: let-else/Cargo.toml description: ~ spec: optional: false broken: false issue_id: ~ positive: true snapshot: stderr: false stdout: true include_flag: ~ backend_options: ~ --- exit = 0 [stdout] diagnostics = [] [stdout.files] "Let_else.v" = ''' (* File automatically generated by Hacspec *) From Coq Require Import ZArith. Require Import List. Import List.ListNotations. Open Scope Z_scope. Open Scope bool_scope. Require Import Ascii. Require Import String. Require Import Coq.Floats.Floats. From RecordUpdate Require Import RecordSet. Import RecordSetNotations. From Core Require Import Core. (* NotImplementedYet *) Definition let_else (opt : t_Option ((t_u32))) : bool := run (match opt with | Option_Some (x) => ControlFlow_Continue ((true : bool)) | _ => ControlFlow_Break ((false : bool)) end). Definition let_else_different_type (opt : t_Option ((t_u32))) : bool := run (let hoist1 := match opt with | Option_Some (x) => ControlFlow_Continue (Option_Some (f_add (x) ((1 : t_u32)))) | _ => ControlFlow_Break ((false : bool)) end in ControlFlow_Continue (let_else (hoist1))). ''' _CoqProject = ''' -R ./ TODO -arg -w -arg all Let_else.v''' ================================================ FILE: test-harness/src/snapshots/toolchain__let-else into-fstar.snap ================================================ --- source: test-harness/src/harness.rs expression: snapshot info: kind: Translate: backend: fstar info: name: let-else manifest: let-else/Cargo.toml description: ~ spec: optional: false broken: false issue_id: ~ positive: true snapshot: stderr: false stdout: true include_flag: ~ backend_options: ~ --- exit = 0 [stdout] diagnostics = [] [stdout.files] "Let_else.fst" = ''' module Let_else #set-options "--fuel 0 --ifuel 1 --z3rlimit 15" open FStar.Mul open Core_models let let_else (opt: Core_models.Option.t_Option u32) : bool = match opt <: Core_models.Option.t_Option u32 with | Core_models.Option.Option_Some x -> true | _ -> false let let_else_different_type (opt: Core_models.Option.t_Option u32) : bool = match opt <: Core_models.Option.t_Option u32 with | Core_models.Option.Option_Some x -> let_else (Core_models.Option.Option_Some (x +! mk_u32 1 <: u32) <: Core_models.Option.t_Option u32) | _ -> false ''' ================================================ FILE: test-harness/src/snapshots/toolchain__let-else into-ssprove.snap ================================================ --- source: test-harness/src/harness.rs expression: snapshot info: kind: Translate: backend: ssprove info: name: let-else manifest: let-else/Cargo.toml description: ~ spec: optional: false broken: false issue_id: ~ positive: true snapshot: stderr: false stdout: true include_flag: ~ backend_options: ~ --- exit = 0 [stdout] diagnostics = [] [stdout.files] "Let_else.v" = ''' (* File automatically generated by Hacspec *) Set Warnings "-notation-overridden,-ambiguous-paths". From Crypt Require Import choice_type Package Prelude. Import PackageNotation. From extructures Require Import ord fset. From mathcomp Require Import word_ssrZ word. (* From Jasmin Require Import word. *) From Coq Require Import ZArith. From Coq Require Import Strings.String. Import List.ListNotations. Open Scope list_scope. Open Scope Z_scope. Open Scope bool_scope. From Hacspec Require Import ChoiceEquality. From Hacspec Require Import LocationUtility. From Hacspec Require Import Hacspec_Lib_Comparable. From Hacspec Require Import Hacspec_Lib_Pre. From Hacspec Require Import Hacspec_Lib. Open Scope hacspec_scope. Import choice.Choice.Exports. From RecordUpdate Require Import RecordUpdate. Import RecordSetNotations. Obligation Tactic := (* try timeout 8 *) solve_ssprove_obligations. (*Not implemented yet? todo(item)*) Equations let_else (opt : both (t_Option int32)) : both 'bool := let_else opt := run (matchb opt with | Option_Some_case x => letb x := ret_both ((x) : (int32)) in ControlFlow_Continue (ret_both (true : 'bool)) | _ => ControlFlow_Break (ret_both (false : 'bool)) end) : both 'bool. Fail Next Obligation. Equations let_else_different_type (opt : both (t_Option int32)) : both 'bool := let_else_different_type opt := run (letm[choice_typeMonad.result_bind_code 'bool] hoist1 := matchb opt with | Option_Some_case x => letb x := ret_both ((x) : (int32)) in ControlFlow_Continue (Option_Some (x .+ (ret_both (1 : int32)))) | _ => ControlFlow_Break (ret_both (false : 'bool)) end in ControlFlow_Continue (let_else hoist1)) : both 'bool. Fail Next Obligation. ''' ================================================ FILE: test-harness/src/snapshots/toolchain__literals into-coq.snap ================================================ --- source: test-harness/src/harness.rs expression: snapshot info: kind: Translate: backend: coq info: name: literals manifest: literals/Cargo.toml description: ~ spec: optional: false broken: false issue_id: ~ positive: true snapshot: stderr: true stdout: true include_flag: ~ backend_options: ~ --- exit = 0 stderr = 'Finished `dev` profile [unoptimized + debuginfo] target(s) in XXs' [stdout] diagnostics = [] [stdout.files] "Literals.v" = """ (* File automatically generated by Hacspec *) From Coq Require Import ZArith. Require Import List. Import List.ListNotations. Open Scope Z_scope. Open Scope bool_scope. Require Import Ascii. Require Import String. Require Import Coq.Floats.Floats. From RecordUpdate Require Import RecordSet. Import RecordSetNotations. From Core Require Import Core. (* NotImplementedYet *) Definition math_integers (x : t_Int) `{andb (f_gt (x) (impl_Int__e_unsafe_from_str ((\"0\"%string : string)))) (f_lt (x) (impl_Int__e_unsafe_from_str ((\"16\"%string : string)))) = true} : t_u8 := let _ : t_Int := f_lift ((3 : t_usize)) in let e_neg_dec := impl_Int__e_unsafe_from_str ((\"-340282366920938463463374607431768211455000\"%string : string)) in let e_pos_dec := impl_Int__e_unsafe_from_str ((\"340282366920938463463374607431768211455000\"%string : string)) in let e_neg_hex := impl_Int__e_unsafe_from_str ((\"-340282366920938463463374607431768211455000\"%string : string)) in let e_pos_hex := impl_Int__e_unsafe_from_str ((\"340282366920938463463374607431768211455000\"%string : string)) in let e_neg_octal := impl_Int__e_unsafe_from_str ((\"-340282366920938463463374607431768211455000\"%string : string)) in let e_pos_octal := impl_Int__e_unsafe_from_str ((\"340282366920938463463374607431768211455000\"%string : string)) in let e_neg_bin := impl_Int__e_unsafe_from_str ((\"-340282366920938463463374607431768211455000\"%string : string)) in let e_pos_bin := impl_Int__e_unsafe_from_str ((\"340282366920938463463374607431768211455000\"%string : string)) in let _ := f_gt (impl_Int__e_unsafe_from_str ((\"-340282366920938463463374607431768211455000\"%string : string))) (impl_Int__e_unsafe_from_str ((\"340282366920938463463374607431768211455000\"%string : string))) in let _ := f_lt (x) (x) in let _ := f_ge (x) (x) in let _ := f_le (x) (x) in let _ := f_ne (x) (x) in let _ := f_eq (x) (x) in let _ := f_add (x) (x) in let _ := f_sub (x) (x) in let _ := f_mul (x) (x) in let _ := f_div (x) (x) in let _ : t_i16 := impl_Int__to_i16 (x) in let _ : t_i32 := impl_Int__to_i32 (x) in let _ : t_i64 := impl_Int__to_i64 (x) in let _ : t_i128 := impl_Int__to_i128 (x) in let _ : t_isize := impl_Int__to_isize (x) in let _ : t_u16 := impl_Int__to_u16 (x) in let _ : t_u32 := impl_Int__to_u32 (x) in let _ : t_u64 := impl_Int__to_u64 (x) in let _ : t_u128 := impl_Int__to_u128 (x) in let _ : t_usize := impl_Int__to_usize (x) in impl_Int__to_u8 (f_add (x) (f_mul (x) (x))). Definition panic_with_msg '(_ : unit) : unit := never_to_any (panic_fmt (impl_1__new_const ([(\"with msg\"%string : string)]))). Record Foo_record : Type := { Foo_f_field : t_u8; }. #[export] Instance settable_Foo_record : Settable _ := settable! (Build_Foo_record) . Definition v_CONSTANT : t_Foo := Foo ((3 : t_u8)). Definition numeric '(_ : unit) : unit := let _ : t_usize := (123 : t_usize) in let _ : t_isize := (-42 : t_isize) in let _ : t_isize := (42 : t_isize) in let _ : t_i32 := (-42 : t_i32) in let _ : t_u128 := (22222222222222222222 : t_u128) in tt. Definition patterns '(_ : unit) : unit := let _ := match (1 : t_u8) with | 2 => tt | _ => tt end in let _ := match ((\"hello\"%string : string),((123 : t_i32),[(\"a\"%string : string); (\"b\"%string : string)])) with | (\"hello\"%string,(123,e_todo)) => tt | _ => tt end in let _ := match Foo ((4 : t_u8)) with | Foo (3) => tt | _ => tt end in tt. Definition casts (x8 : t_u8) (x16 : t_u16) (x32 : t_u32) (x64 : t_u64) (xs : t_usize) : unit := let _ : t_u64 := f_add (f_add (f_add (f_add (cast (x8)) (cast (x16))) (cast (x32))) (x64)) (cast (xs)) in let _ : t_u32 := f_add (f_add (f_add (f_add (cast (x8)) (cast (x16))) (x32)) (cast (x64))) (cast (xs)) in let _ : t_u16 := f_add (f_add (f_add (f_add (cast (x8)) (x16)) (cast (x32))) (cast (x64))) (cast (xs)) in let _ : t_u8 := f_add (f_add (f_add (f_add (x8) (cast (x16))) (cast (x32))) (cast (x64))) (cast (xs)) in let _ : t_i64 := f_add (f_add (f_add (f_add (cast (x8)) (cast (x16))) (cast (x32))) (cast (x64))) (cast (xs)) in let _ : t_i32 := f_add (f_add (f_add (f_add (cast (x8)) (cast (x16))) (cast (x32))) (cast (x64))) (cast (xs)) in let _ : t_i16 := f_add (f_add (f_add (f_add (cast (x8)) (cast (x16))) (cast (x32))) (cast (x64))) (cast (xs)) in let _ : t_i8 := f_add (f_add (f_add (f_add (cast (x8)) (cast (x16))) (cast (x32))) (cast (x64))) (cast (xs)) in tt. Definition empty_array '(_ : unit) : unit := let _ : t_Slice t_u8 := unsize ([]) in tt. Definition fn_pointer_cast '(_ : unit) : unit := let f : t_u32 -> t_u32 := fun x => x in tt. Definition strings '(_ : unit) : unit := let _ : string := (\"hello\"%string : string) in let _ : string := (\"hello\"world\"%string : string) in let _ : string := (\"it's\"%string : string) in let _ : string := (\"back\\slash\"%string : string) in let _ : string := (\"line break\"%string : string) in let _ : string := (\"carriage\rreturn\"%string : string) in let _ : string := (\"tab\there\"%string : string) in let _ : string := (\"null\u0000byte\"%string : string) in let _ : string := (\"bell\u0007char\"%string : string) in let _ : string := (\"\u001B[0m\"%string : string) in let _ : string := (\"🦀\"%string : string) in tt. """ _CoqProject = ''' -R ./ TODO -arg -w -arg all Literals.v''' ================================================ FILE: test-harness/src/snapshots/toolchain__literals into-fstar.snap ================================================ --- source: test-harness/src/harness.rs expression: snapshot info: kind: Translate: backend: fstar info: name: literals manifest: literals/Cargo.toml description: ~ spec: optional: false broken: false issue_id: ~ positive: true snapshot: stderr: true stdout: true include_flag: ~ backend_options: ~ --- exit = 0 stderr = 'Finished `dev` profile [unoptimized + debuginfo] target(s) in XXs' [stdout] diagnostics = [] [stdout.files] "Literals.fst" = """ module Literals #set-options \"--fuel 0 --ifuel 1 --z3rlimit 15\" open FStar.Mul open Core_models let math_integers (x: Hax_lib.Int.t_Int) : Prims.Pure u8 (requires x > (0 <: Hax_lib.Int.t_Int) && x < (16 <: Hax_lib.Int.t_Int)) (fun _ -> Prims.l_True) = let _:Hax_lib.Int.t_Int = Rust_primitives.Hax.Int.from_machine (mk_usize 3) in let e_neg_dec:Hax_lib.Int.t_Int = (-340282366920938463463374607431768211455000) in let e_pos_dec:Hax_lib.Int.t_Int = 340282366920938463463374607431768211455000 in let e_neg_hex:Hax_lib.Int.t_Int = (-340282366920938463463374607431768211455000) in let e_pos_hex:Hax_lib.Int.t_Int = 340282366920938463463374607431768211455000 in let e_neg_octal:Hax_lib.Int.t_Int = (-340282366920938463463374607431768211455000) in let e_pos_octal:Hax_lib.Int.t_Int = 340282366920938463463374607431768211455000 in let e_neg_bin:Hax_lib.Int.t_Int = (-340282366920938463463374607431768211455000) in let e_pos_bin:Hax_lib.Int.t_Int = 340282366920938463463374607431768211455000 in let _:bool = ((-340282366920938463463374607431768211455000) <: Hax_lib.Int.t_Int) > (340282366920938463463374607431768211455000 <: Hax_lib.Int.t_Int) in let _:bool = x < x in let _:bool = x >= x in let _:bool = x <= x in let _:bool = x <> x in let _:bool = x = x in let _:Hax_lib.Int.t_Int = x + x in let _:Hax_lib.Int.t_Int = x - x in let _:Hax_lib.Int.t_Int = x * x in let _:Hax_lib.Int.t_Int = x / x in let _:i16 = Hax_lib.Int.impl_Int__to_i16 x in let _:i32 = Hax_lib.Int.impl_Int__to_i32 x in let _:i64 = Hax_lib.Int.impl_Int__to_i64 x in let _:i128 = Hax_lib.Int.impl_Int__to_i128 x in let _:isize = Hax_lib.Int.impl_Int__to_isize x in let _:u16 = Hax_lib.Int.impl_Int__to_u16 x in let _:u32 = Hax_lib.Int.impl_Int__to_u32 x in let _:u64 = Hax_lib.Int.impl_Int__to_u64 x in let _:u128 = Hax_lib.Int.impl_Int__to_u128 x in let _:usize = Hax_lib.Int.impl_Int__to_usize x in Hax_lib.Int.impl_Int__to_u8 (x + (x * x <: Hax_lib.Int.t_Int) <: Hax_lib.Int.t_Int) let panic_with_msg (_: Prims.unit) : Prims.unit = Rust_primitives.Hax.never_to_any (Core_models.Panicking.panic_fmt (Core_models.Fmt.Rt.impl_1__new_const (mk_usize 1) (let list = [\"with msg\"] in FStar.Pervasives.assert_norm (Prims.eq2 (List.Tot.length list) 1); Rust_primitives.Hax.array_of_list 1 list) <: Core_models.Fmt.t_Arguments) <: Rust_primitives.Hax.t_Never) type t_Foo = { f_field:u8 } [@@ FStar.Tactics.Typeclasses.tcinstance] assume val impl': Core_models.Marker.t_StructuralPartialEq t_Foo unfold let impl = impl' [@@ FStar.Tactics.Typeclasses.tcinstance] assume val impl_1': Core_models.Cmp.t_PartialEq t_Foo t_Foo unfold let impl_1 = impl_1' [@@ FStar.Tactics.Typeclasses.tcinstance] assume val impl_2': Core_models.Cmp.t_Eq t_Foo unfold let impl_2 = impl_2' let v_CONSTANT: t_Foo = { f_field = mk_u8 3 } <: t_Foo let numeric (_: Prims.unit) : Prims.unit = let _:usize = mk_usize 123 in let _:isize = mk_isize (-42) in let _:isize = mk_isize 42 in let _:i32 = mk_i32 (-42) in let _:u128 = mk_u128 22222222222222222222 in () let patterns (_: Prims.unit) : Prims.unit = let _:Prims.unit = match mk_u8 1 <: u8 with | Rust_primitives.Integers.MkInt 2 -> () <: Prims.unit | _ -> () <: Prims.unit in let _:Prims.unit = match \"hello\", (mk_i32 123, (let list = [\"a\"; \"b\"] in FStar.Pervasives.assert_norm (Prims.eq2 (List.Tot.length list) 2); Rust_primitives.Hax.array_of_list 2 list) <: (i32 & t_Array string (mk_usize 2))) <: (string & (i32 & t_Array string (mk_usize 2))) with | \"hello\", (Rust_primitives.Integers.MkInt 123, e_todo) -> () <: Prims.unit | _ -> () <: Prims.unit in let _:Prims.unit = match { f_field = mk_u8 4 } <: t_Foo with | { f_field = Rust_primitives.Integers.MkInt 3 } -> () <: Prims.unit | _ -> () <: Prims.unit in () let casts (x8: u8) (x16: u16) (x32: u32) (x64: u64) (xs: usize) : Prims.unit = let _:u64 = ((((cast (x8 <: u8) <: u64) +! (cast (x16 <: u16) <: u64) <: u64) +! (cast (x32 <: u32) <: u64) <: u64) +! x64 <: u64) +! (cast (xs <: usize) <: u64) in let _:u32 = ((((cast (x8 <: u8) <: u32) +! (cast (x16 <: u16) <: u32) <: u32) +! x32 <: u32) +! (cast (x64 <: u64) <: u32) <: u32) +! (cast (xs <: usize) <: u32) in let _:u16 = ((((cast (x8 <: u8) <: u16) +! x16 <: u16) +! (cast (x32 <: u32) <: u16) <: u16) +! (cast (x64 <: u64) <: u16) <: u16) +! (cast (xs <: usize) <: u16) in let _:u8 = (((x8 +! (cast (x16 <: u16) <: u8) <: u8) +! (cast (x32 <: u32) <: u8) <: u8) +! (cast (x64 <: u64) <: u8) <: u8) +! (cast (xs <: usize) <: u8) in let _:i64 = ((((cast (x8 <: u8) <: i64) +! (cast (x16 <: u16) <: i64) <: i64) +! (cast (x32 <: u32) <: i64) <: i64) +! (cast (x64 <: u64) <: i64) <: i64) +! (cast (xs <: usize) <: i64) in let _:i32 = ((((cast (x8 <: u8) <: i32) +! (cast (x16 <: u16) <: i32) <: i32) +! (cast (x32 <: u32) <: i32) <: i32) +! (cast (x64 <: u64) <: i32) <: i32) +! (cast (xs <: usize) <: i32) in let _:i16 = ((((cast (x8 <: u8) <: i16) +! (cast (x16 <: u16) <: i16) <: i16) +! (cast (x32 <: u32) <: i16) <: i16) +! (cast (x64 <: u64) <: i16) <: i16) +! (cast (xs <: usize) <: i16) in let _:i8 = ((((cast (x8 <: u8) <: i8) +! (cast (x16 <: u16) <: i8) <: i8) +! (cast (x32 <: u32) <: i8) <: i8) +! (cast (x64 <: u64) <: i8) <: i8) +! (cast (xs <: usize) <: i8) in () let empty_array (_: Prims.unit) : Prims.unit = let _:t_Slice u8 = (let list:Prims.list u8 = [] in FStar.Pervasives.assert_norm (Prims.eq2 (List.Tot.length list) 0); Rust_primitives.Hax.array_of_list 0 list) <: t_Slice u8 in () /// https://github.com/hacspec/hax/issues/500 let fn_pointer_cast (_: Prims.unit) : Prims.unit = let (f: (u32 -> u32)): u32 -> u32 = fun x -> x in () let strings (_: Prims.unit) : Prims.unit = let _:string = \"hello\" in let _:string = \"hello\\\"world\" in let _:string = \"it's\" in let _:string = \"back\\slash\" in let _:string = \"line\\nbreak\" in let _:string = \"carriage\\rreturn\" in let _:string = \"tab\\there\" in let _:string = \"null\\0byte\" in let _:string = \"bell\u0007char\" in let _:string = \"\u001B[0m\" in let _:string = \"🦀\" in () """ ================================================ FILE: test-harness/src/snapshots/toolchain__literals into-lean.snap ================================================ --- source: test-harness/src/harness.rs expression: snapshot info: kind: Translate: backend: lean info: name: literals manifest: literals/Cargo.toml description: ~ spec: optional: false broken: false issue_id: ~ positive: true snapshot: stderr: true stdout: true include_flag: ~ backend_options: ~ --- exit = 0 stderr = 'Finished `dev` profile [unoptimized + debuginfo] target(s) in XXs' [stdout] diagnostics = [] [stdout.files] "literals.lean" = ''' -- Experimental lean backend for Hax -- The Hax prelude library can be found in hax/proof-libs/lean import Hax import Std.Tactic.Do import Std.Do.Triple import Std.Tactic.Do.Syntax open Std.Do open Std.Tactic set_option mvcgen.warning false set_option linter.unusedVariables false namespace literals def math_integers (x : hax_lib.int.Int) : RustM u8 := do let _ : hax_lib.int.Int ← (rust_primitives.hax.int.from_machine (3 : usize)); let _neg_dec : hax_lib.int.Int ← (hax_lib.int.Impl_7._unsafe_from_str "-340282366920938463463374607431768211455000"); let _pos_dec : hax_lib.int.Int ← (hax_lib.int.Impl_7._unsafe_from_str "340282366920938463463374607431768211455000"); let _neg_hex : hax_lib.int.Int ← (hax_lib.int.Impl_7._unsafe_from_str "-340282366920938463463374607431768211455000"); let _pos_hex : hax_lib.int.Int ← (hax_lib.int.Impl_7._unsafe_from_str "340282366920938463463374607431768211455000"); let _neg_octal : hax_lib.int.Int ← (hax_lib.int.Impl_7._unsafe_from_str "-340282366920938463463374607431768211455000"); let _pos_octal : hax_lib.int.Int ← (hax_lib.int.Impl_7._unsafe_from_str "340282366920938463463374607431768211455000"); let _neg_bin : hax_lib.int.Int ← (hax_lib.int.Impl_7._unsafe_from_str "-340282366920938463463374607431768211455000"); let _pos_bin : hax_lib.int.Int ← (hax_lib.int.Impl_7._unsafe_from_str "340282366920938463463374607431768211455000"); let _ ← (rust_primitives.hax.int.gt (← (hax_lib.int.Impl_7._unsafe_from_str "-340282366920938463463374607431768211455000")) (← (hax_lib.int.Impl_7._unsafe_from_str "340282366920938463463374607431768211455000"))); let _ ← (rust_primitives.hax.int.lt x x); let _ ← (rust_primitives.hax.int.ge x x); let _ ← (rust_primitives.hax.int.le x x); let _ ← (rust_primitives.hax.int.ne x x); let _ ← (rust_primitives.hax.int.eq x x); let _ ← (rust_primitives.hax.int.add x x); let _ ← (rust_primitives.hax.int.sub x x); let _ ← (rust_primitives.hax.int.mul x x); let _ ← (rust_primitives.hax.int.div x x); let _ : i16 ← (hax_lib.int.Impl_55.to_i16 x); let _ : i32 ← (hax_lib.int.Impl_57.to_i32 x); let _ : i64 ← (hax_lib.int.Impl_59.to_i64 x); let _ : i128 ← (hax_lib.int.Impl_61.to_i128 x); let _ : isize ← (hax_lib.int.Impl_63.to_isize x); let _ : u16 ← (hax_lib.int.Impl_43.to_u16 x); let _ : u32 ← (hax_lib.int.Impl_45.to_u32 x); let _ : u64 ← (hax_lib.int.Impl_47.to_u64 x); let _ : u128 ← (hax_lib.int.Impl_49.to_u128 x); let _ : usize ← (hax_lib.int.Impl_51.to_usize x); (hax_lib.int.Impl_41.to_u8 (← (rust_primitives.hax.int.add x (← (rust_primitives.hax.int.mul x x))))) set_option hax_mvcgen.specset "bv" in @[hax_spec] def math_integers.spec (x : hax_lib.int.Int) : Spec (requires := do ((← (rust_primitives.hax.int.gt x (← (hax_lib.int.Impl_7._unsafe_from_str "0")))) &&? (← (rust_primitives.hax.int.lt x (← (hax_lib.int.Impl_7._unsafe_from_str "16")))))) (ensures := fun _ => pure True) (math_integers (x : hax_lib.int.Int)) := { pureRequires := by hax_construct_pure <;> bv_decide pureEnsures := by hax_construct_pure <;> bv_decide contract := by hax_mvcgen [math_integers] <;> bv_decide } @[spec] def panic_with_msg (_ : rust_primitives.hax.Tuple0) : RustM rust_primitives.hax.Tuple0 := do (rust_primitives.hax.never_to_any (← (core_models.panicking.panic_fmt (← (core_models.fmt.rt.Impl_1.new_const ((1 : usize)) (RustArray.ofVec #v["with msg"])))))) structure Foo where field : u8 @[instance] opaque Impl.AssociatedTypes : core_models.marker.StructuralPartialEq.AssociatedTypes Foo := by constructor <;> exact Inhabited.default @[instance] opaque Impl : core_models.marker.StructuralPartialEq Foo := by constructor <;> exact Inhabited.default @[instance] opaque Impl_1.AssociatedTypes : core_models.cmp.PartialEq.AssociatedTypes Foo Foo := by constructor <;> exact Inhabited.default @[instance] opaque Impl_1 : core_models.cmp.PartialEq Foo Foo := by constructor <;> exact Inhabited.default @[instance] opaque Impl_2.AssociatedTypes : core_models.cmp.Eq.AssociatedTypes Foo := by constructor <;> exact Inhabited.default @[instance] opaque Impl_2 : core_models.cmp.Eq Foo := by constructor <;> exact Inhabited.default def CONSTANT : Foo := RustM.of_isOk (do (pure (Foo.mk (field := (3 : u8))))) (by rfl) @[spec] def numeric (_ : rust_primitives.hax.Tuple0) : RustM rust_primitives.hax.Tuple0 := do let _ : usize := (123 : usize); let _ : isize := (-42 : isize); let _ : isize := (42 : isize); let _ : i32 := (-42 : i32); let _ : u128 := (22222222222222222222 : u128); (pure rust_primitives.hax.Tuple0.mk) @[spec] def patterns (_ : rust_primitives.hax.Tuple0) : RustM rust_primitives.hax.Tuple0 := do let _ ← match (1 : u8) with | 2 => do (pure rust_primitives.hax.Tuple0.mk) | _ => do (pure rust_primitives.hax.Tuple0.mk); let _ ← match (rust_primitives.hax.Tuple2.mk "hello" (rust_primitives.hax.Tuple2.mk (123 : i32) (RustArray.ofVec #v["a", "b"]))) with | ⟨"hello", ⟨123, _todo⟩⟩ => do (pure rust_primitives.hax.Tuple0.mk) | _ => do (pure rust_primitives.hax.Tuple0.mk); let _ ← match (Foo.mk (field := (4 : u8))) with | {field := 3} => do (pure rust_primitives.hax.Tuple0.mk) | _ => do (pure rust_primitives.hax.Tuple0.mk); (pure rust_primitives.hax.Tuple0.mk) @[spec] def casts (x8 : u8) (x16 : u16) (x32 : u32) (x64 : u64) (xs : usize) : RustM rust_primitives.hax.Tuple0 := do let _ : u64 ← ((← ((← ((← ((← (rust_primitives.hax.cast_op x8 : RustM u64)) +? (← (rust_primitives.hax.cast_op x16 : RustM u64)))) +? (← (rust_primitives.hax.cast_op x32 : RustM u64)))) +? x64)) +? (← (rust_primitives.hax.cast_op xs : RustM u64))); let _ : u32 ← ((← ((← ((← ((← (rust_primitives.hax.cast_op x8 : RustM u32)) +? (← (rust_primitives.hax.cast_op x16 : RustM u32)))) +? x32)) +? (← (rust_primitives.hax.cast_op x64 : RustM u32)))) +? (← (rust_primitives.hax.cast_op xs : RustM u32))); let _ : u16 ← ((← ((← ((← ((← (rust_primitives.hax.cast_op x8 : RustM u16)) +? x16)) +? (← (rust_primitives.hax.cast_op x32 : RustM u16)))) +? (← (rust_primitives.hax.cast_op x64 : RustM u16)))) +? (← (rust_primitives.hax.cast_op xs : RustM u16))); let _ : u8 ← ((← ((← ((← (x8 +? (← (rust_primitives.hax.cast_op x16 : RustM u8)))) +? (← (rust_primitives.hax.cast_op x32 : RustM u8)))) +? (← (rust_primitives.hax.cast_op x64 : RustM u8)))) +? (← (rust_primitives.hax.cast_op xs : RustM u8))); let _ : i64 ← ((← ((← ((← ((← (rust_primitives.hax.cast_op x8 : RustM i64)) +? (← (rust_primitives.hax.cast_op x16 : RustM i64)))) +? (← (rust_primitives.hax.cast_op x32 : RustM i64)))) +? (← (rust_primitives.hax.cast_op x64 : RustM i64)))) +? (← (rust_primitives.hax.cast_op xs : RustM i64))); let _ : i32 ← ((← ((← ((← ((← (rust_primitives.hax.cast_op x8 : RustM i32)) +? (← (rust_primitives.hax.cast_op x16 : RustM i32)))) +? (← (rust_primitives.hax.cast_op x32 : RustM i32)))) +? (← (rust_primitives.hax.cast_op x64 : RustM i32)))) +? (← (rust_primitives.hax.cast_op xs : RustM i32))); let _ : i16 ← ((← ((← ((← ((← (rust_primitives.hax.cast_op x8 : RustM i16)) +? (← (rust_primitives.hax.cast_op x16 : RustM i16)))) +? (← (rust_primitives.hax.cast_op x32 : RustM i16)))) +? (← (rust_primitives.hax.cast_op x64 : RustM i16)))) +? (← (rust_primitives.hax.cast_op xs : RustM i16))); let _ : i8 ← ((← ((← ((← ((← (rust_primitives.hax.cast_op x8 : RustM i8)) +? (← (rust_primitives.hax.cast_op x16 : RustM i8)))) +? (← (rust_primitives.hax.cast_op x32 : RustM i8)))) +? (← (rust_primitives.hax.cast_op x64 : RustM i8)))) +? (← (rust_primitives.hax.cast_op xs : RustM i8))); (pure rust_primitives.hax.Tuple0.mk) @[spec] def empty_array (_ : rust_primitives.hax.Tuple0) : RustM rust_primitives.hax.Tuple0 := do let _ : (RustSlice u8) ← (rust_primitives.unsize (RustArray.ofVec #v[])); (pure rust_primitives.hax.Tuple0.mk) -- https://github.com/hacspec/hax/issues/500 @[spec] def fn_pointer_cast (_ : rust_primitives.hax.Tuple0) : RustM rust_primitives.hax.Tuple0 := do let f : (u32 -> RustM u32) := (fun x => (do (pure x) : RustM u32)); (pure rust_primitives.hax.Tuple0.mk) @[spec] def strings (_ : rust_primitives.hax.Tuple0) : RustM rust_primitives.hax.Tuple0 := do let _ : String := "hello"; let _ : String := "hello\"world"; let _ : String := "it\'s"; let _ : String := "back\\slash"; let _ : String := "line\nbreak"; let _ : String := "carriage\rreturn"; let _ : String := "tab\there"; let _ : String := "null\x00byte"; let _ : String := "bell\x07char"; let _ : String := "\x1b[0m"; let _ : String := "🦀"; (pure rust_primitives.hax.Tuple0.mk) end literals ''' ================================================ FILE: test-harness/src/snapshots/toolchain__loops into-fstar.snap ================================================ --- source: test-harness/src/harness.rs expression: snapshot info: kind: Translate: backend: fstar info: name: loops manifest: loops/Cargo.toml description: ~ spec: optional: false broken: false issue_id: ~ positive: true snapshot: stderr: true stdout: true include_flag: ~ backend_options: ~ --- exit = 0 stderr = 'Finished `dev` profile [unoptimized + debuginfo] target(s) in XXs' [stdout] diagnostics = [] [stdout.files] "Loops.And_mut_side_effect_loop.fst" = ''' module Loops.And_mut_side_effect_loop #set-options "--fuel 0 --ifuel 1 --z3rlimit 15" open FStar.Mul open Core_models let looping (array: t_Array u8 (mk_usize 5)) : t_Array u8 (mk_usize 5) = let array:t_Array u8 (mk_usize 5) = Rust_primitives.Hax.Folds.fold_range (mk_usize 0) (Core_models.Slice.impl__len #u8 (array <: t_Slice u8) <: usize) (fun array temp_1_ -> let array:t_Array u8 (mk_usize 5) = array in let _:usize = temp_1_ in true) array (fun array i -> let array:t_Array u8 (mk_usize 5) = array in let i:usize = i in Rust_primitives.Hax.Monomorphized_update_at.update_at_usize array i (cast (i <: usize) <: u8) <: t_Array u8 (mk_usize 5)) in array let looping_2_ (array: t_Array u8 (mk_usize 5)) : t_Array u8 (mk_usize 5) = let (array: t_Array u8 (mk_usize 5)), (result: Prims.unit) = Rust_primitives.Hax.Folds.fold_range (mk_usize 0) (Core_models.Slice.impl__len #u8 (array <: t_Slice u8) <: usize) (fun array temp_1_ -> let array:t_Array u8 (mk_usize 5) = array in let _:usize = temp_1_ in true) array (fun array i -> let array:t_Array u8 (mk_usize 5) = array in let i:usize = i in Rust_primitives.Hax.Monomorphized_update_at.update_at_usize array i (cast (i <: usize) <: u8) <: t_Array u8 (mk_usize 5)), () <: (t_Array u8 (mk_usize 5) & Prims.unit) in let _:Prims.unit = admit () (* Panic freedom *) in let _:Prims.unit = result in array ''' "Loops.Control_flow.fst" = ''' module Loops.Control_flow #set-options "--fuel 0 --ifuel 1 --z3rlimit 15" open FStar.Mul open Core_models let double_sum (_: Prims.unit) : i32 = let sum:i32 = mk_i32 0 in let sum:i32 = Rust_primitives.Hax.Folds.fold_range_cf (mk_i32 1) (mk_i32 10) (fun sum temp_1_ -> let sum:i32 = sum in let _:i32 = temp_1_ in true) sum (fun sum i -> let sum:i32 = sum in let i:i32 = i in if i <. mk_i32 0 <: bool then Core_models.Ops.Control_flow.ControlFlow_Break ((), sum <: (Prims.unit & i32)) <: Core_models.Ops.Control_flow.t_ControlFlow (Prims.unit & i32) i32 else Core_models.Ops.Control_flow.ControlFlow_Continue (sum +! i <: i32) <: Core_models.Ops.Control_flow.t_ControlFlow (Prims.unit & i32) i32) in sum *! mk_i32 2 let double_sum2 (_: Prims.unit) : i32 = let sum:i32 = mk_i32 0 in let sum2:i32 = mk_i32 0 in let (sum: i32), (sum2: i32) = Rust_primitives.Hax.Folds.fold_range_cf (mk_i32 1) (mk_i32 10) (fun temp_0_ temp_1_ -> let (sum: i32), (sum2: i32) = temp_0_ in let _:i32 = temp_1_ in true) (sum, sum2 <: (i32 & i32)) (fun temp_0_ i -> let (sum: i32), (sum2: i32) = temp_0_ in let i:i32 = i in if i <. mk_i32 0 <: bool then Core_models.Ops.Control_flow.ControlFlow_Break ((), (sum, sum2 <: (i32 & i32)) <: (Prims.unit & (i32 & i32))) <: Core_models.Ops.Control_flow.t_ControlFlow (Prims.unit & (i32 & i32)) (i32 & i32) else let sum:i32 = sum +! i in Core_models.Ops.Control_flow.ControlFlow_Continue (sum, sum2 +! i <: (i32 & i32)) <: Core_models.Ops.Control_flow.t_ControlFlow (Prims.unit & (i32 & i32)) (i32 & i32)) in sum +! sum2 let double_sum_return (v: t_Slice i32) : i32 = let sum:i32 = mk_i32 0 in match Rust_primitives.Hax.Folds.fold_return (Core_models.Iter.Traits.Collect.f_into_iter #(t_Slice i32 ) #FStar.Tactics.Typeclasses.solve v <: Core_models.Slice.Iter.t_Iter i32) sum (fun sum i -> let sum:i32 = sum in let i:i32 = i in if i <. mk_i32 0 <: bool then Core_models.Ops.Control_flow.ControlFlow_Break (Core_models.Ops.Control_flow.ControlFlow_Break (mk_i32 0) <: Core_models.Ops.Control_flow.t_ControlFlow i32 (Prims.unit & i32)) <: Core_models.Ops.Control_flow.t_ControlFlow (Core_models.Ops.Control_flow.t_ControlFlow i32 (Prims.unit & i32)) i32 else Core_models.Ops.Control_flow.ControlFlow_Continue (sum +! i <: i32) <: Core_models.Ops.Control_flow.t_ControlFlow (Core_models.Ops.Control_flow.t_ControlFlow i32 (Prims.unit & i32)) i32) <: Core_models.Ops.Control_flow.t_ControlFlow i32 i32 with | Core_models.Ops.Control_flow.ControlFlow_Break ret -> ret | Core_models.Ops.Control_flow.ControlFlow_Continue sum -> sum *! mk_i32 2 let double_sum2_return (v: t_Slice i32) : i32 = let sum:i32 = mk_i32 0 in let sum2:i32 = mk_i32 0 in match Rust_primitives.Hax.Folds.fold_return (Core_models.Iter.Traits.Collect.f_into_iter #(t_Slice i32 ) #FStar.Tactics.Typeclasses.solve v <: Core_models.Slice.Iter.t_Iter i32) (sum, sum2 <: (i32 & i32)) (fun temp_0_ i -> let (sum: i32), (sum2: i32) = temp_0_ in let i:i32 = i in if i <. mk_i32 0 <: bool then Core_models.Ops.Control_flow.ControlFlow_Break (Core_models.Ops.Control_flow.ControlFlow_Break (mk_i32 0) <: Core_models.Ops.Control_flow.t_ControlFlow i32 (Prims.unit & (i32 & i32))) <: Core_models.Ops.Control_flow.t_ControlFlow (Core_models.Ops.Control_flow.t_ControlFlow i32 (Prims.unit & (i32 & i32))) (i32 & i32) else let sum:i32 = sum +! i in Core_models.Ops.Control_flow.ControlFlow_Continue (sum, sum2 +! i <: (i32 & i32)) <: Core_models.Ops.Control_flow.t_ControlFlow (Core_models.Ops.Control_flow.t_ControlFlow i32 (Prims.unit & (i32 & i32))) (i32 & i32)) <: Core_models.Ops.Control_flow.t_ControlFlow i32 (i32 & i32) with | Core_models.Ops.Control_flow.ControlFlow_Break ret -> ret | Core_models.Ops.Control_flow.ControlFlow_Continue (sum, sum2) -> sum +! sum2 let bigger_power_2_ (x: i32) : i32 = let pow:i32 = mk_i32 1 in Rust_primitives.Hax.while_loop_cf (fun pow -> let pow:i32 = pow in true) (fun pow -> let pow:i32 = pow in pow <. mk_i32 1000000 <: bool) (fun pow -> let pow:i32 = pow in Rust_primitives.Hax.Int.from_machine (mk_u32 0) <: Hax_lib.Int.t_Int) pow (fun pow -> let pow:i32 = pow in let pow:i32 = pow *! mk_i32 2 in if pow <. x then let pow:i32 = pow *! mk_i32 3 in if true then Core_models.Ops.Control_flow.ControlFlow_Break ((), pow <: (Prims.unit & i32)) <: Core_models.Ops.Control_flow.t_ControlFlow (Prims.unit & i32) i32 else Core_models.Ops.Control_flow.ControlFlow_Continue (pow *! mk_i32 2) <: Core_models.Ops.Control_flow.t_ControlFlow (Prims.unit & i32) i32 else Core_models.Ops.Control_flow.ControlFlow_Continue (pow *! mk_i32 2) <: Core_models.Ops.Control_flow.t_ControlFlow (Prims.unit & i32) i32) type t_M = { f_m:Alloc.Vec.t_Vec u8 Alloc.Alloc.t_Global } let impl_M__decoded_message (self: t_M) : Core_models.Option.t_Option (Alloc.Vec.t_Vec u8 Alloc.Alloc.t_Global) = match Rust_primitives.Hax.Folds.fold_range_return (mk_usize 0) (Alloc.Vec.impl_1__len #u8 #Alloc.Alloc.t_Global self.f_m <: usize) (fun temp_0_ temp_1_ -> let _:Prims.unit = temp_0_ in let _:usize = temp_1_ in true) () (fun temp_0_ i -> let _:Prims.unit = temp_0_ in let i:usize = i in if i >. mk_usize 5 <: bool then Core_models.Ops.Control_flow.ControlFlow_Break (Core_models.Ops.Control_flow.ControlFlow_Break (Core_models.Option.Option_None <: Core_models.Option.t_Option (Alloc.Vec.t_Vec u8 Alloc.Alloc.t_Global)) <: Core_models.Ops.Control_flow.t_ControlFlow (Core_models.Option.t_Option (Alloc.Vec.t_Vec u8 Alloc.Alloc.t_Global)) (Prims.unit & Prims.unit)) <: Core_models.Ops.Control_flow.t_ControlFlow (Core_models.Ops.Control_flow.t_ControlFlow (Core_models.Option.t_Option (Alloc.Vec.t_Vec u8 Alloc.Alloc.t_Global)) (Prims.unit & Prims.unit)) Prims.unit else Core_models.Ops.Control_flow.ControlFlow_Continue () <: Core_models.Ops.Control_flow.t_ControlFlow (Core_models.Ops.Control_flow.t_ControlFlow (Core_models.Option.t_Option (Alloc.Vec.t_Vec u8 Alloc.Alloc.t_Global)) (Prims.unit & Prims.unit)) Prims.unit) <: Core_models.Ops.Control_flow.t_ControlFlow (Core_models.Option.t_Option (Alloc.Vec.t_Vec u8 Alloc.Alloc.t_Global)) Prims.unit with | Core_models.Ops.Control_flow.ControlFlow_Break ret -> ret | Core_models.Ops.Control_flow.ControlFlow_Continue _ -> Core_models.Option.Option_Some (Core_models.Clone.f_clone #(Alloc.Vec.t_Vec u8 Alloc.Alloc.t_Global) #FStar.Tactics.Typeclasses.solve self.f_m) <: Core_models.Option.t_Option (Alloc.Vec.t_Vec u8 Alloc.Alloc.t_Global) let nested (_: Prims.unit) : i32 = let sum:i32 = mk_i32 0 in let sum:i32 = Rust_primitives.Hax.Folds.fold_range (mk_i32 1) (mk_i32 10) (fun sum temp_1_ -> let sum:i32 = sum in let _:i32 = temp_1_ in true) sum (fun sum i -> let sum:i32 = sum in let i:i32 = i in let sum:i32 = Rust_primitives.Hax.Folds.fold_range_cf (mk_i32 1) (mk_i32 10) (fun sum temp_1_ -> let sum:i32 = sum in let _:i32 = temp_1_ in true) sum (fun sum j -> let sum:i32 = sum in let j:i32 = j in if j <. mk_i32 0 <: bool then Core_models.Ops.Control_flow.ControlFlow_Break ((), sum <: (Prims.unit & i32)) <: Core_models.Ops.Control_flow.t_ControlFlow (Prims.unit & i32) i32 else Core_models.Ops.Control_flow.ControlFlow_Continue (sum +! j <: i32) <: Core_models.Ops.Control_flow.t_ControlFlow (Prims.unit & i32) i32) in sum +! i) in sum *! mk_i32 2 let nested_return (_: Prims.unit) : i32 = let sum:i32 = mk_i32 0 in match Rust_primitives.Hax.Folds.fold_range_return (mk_i32 1) (mk_i32 10) (fun sum temp_1_ -> let sum:i32 = sum in let _:i32 = temp_1_ in true) sum (fun sum i -> let sum:i32 = sum in let i:i32 = i in match Rust_primitives.Hax.Folds.fold_range_return (mk_i32 1) (mk_i32 10) (fun sum temp_1_ -> let sum:i32 = sum in let _:i32 = temp_1_ in true) sum (fun sum j -> let sum:i32 = sum in let j:i32 = j in if j <. mk_i32 0 <: bool then Core_models.Ops.Control_flow.ControlFlow_Break (Core_models.Ops.Control_flow.ControlFlow_Break (mk_i32 0) <: Core_models.Ops.Control_flow.t_ControlFlow i32 (Prims.unit & i32)) <: Core_models.Ops.Control_flow.t_ControlFlow (Core_models.Ops.Control_flow.t_ControlFlow i32 (Prims.unit & i32)) i32 else Core_models.Ops.Control_flow.ControlFlow_Continue (sum +! j <: i32) <: Core_models.Ops.Control_flow.t_ControlFlow (Core_models.Ops.Control_flow.t_ControlFlow i32 (Prims.unit & i32)) i32) <: Core_models.Ops.Control_flow.t_ControlFlow i32 i32 with | Core_models.Ops.Control_flow.ControlFlow_Break ret -> Core_models.Ops.Control_flow.ControlFlow_Break (Core_models.Ops.Control_flow.ControlFlow_Break ret <: Core_models.Ops.Control_flow.t_ControlFlow i32 (Prims.unit & i32)) <: Core_models.Ops.Control_flow.t_ControlFlow (Core_models.Ops.Control_flow.t_ControlFlow i32 (Prims.unit & i32)) i32 | Core_models.Ops.Control_flow.ControlFlow_Continue sum -> Core_models.Ops.Control_flow.ControlFlow_Continue (sum +! i <: i32) <: Core_models.Ops.Control_flow.t_ControlFlow (Core_models.Ops.Control_flow.t_ControlFlow i32 (Prims.unit & i32)) i32) <: Core_models.Ops.Control_flow.t_ControlFlow i32 i32 with | Core_models.Ops.Control_flow.ControlFlow_Break ret -> ret | Core_models.Ops.Control_flow.ControlFlow_Continue sum -> sum *! mk_i32 2 let continue_only (x: t_Slice i32) : (i32 & Prims.unit) = let product:i32 = mk_i32 1 in Core_models.Iter.Traits.Iterator.f_fold (Core_models.Iter.Traits.Collect.f_into_iter #(t_Slice i32 ) #FStar.Tactics.Typeclasses.solve x <: Core_models.Slice.Iter.t_Iter i32) product (fun product i -> let product:i32 = product in let i:i32 = i in if i =. mk_i32 0 <: bool then product else Core_models.Ops.Arith.f_mul_assign #i32 #i32 #FStar.Tactics.Typeclasses.solve product i <: i32), () <: (i32 & Prims.unit) let continue_and_break (x: t_Slice i32) : (i32 & Prims.unit) = let product:i32 = mk_i32 1 in Rust_primitives.Hax.Folds.fold_cf (Core_models.Iter.Traits.Collect.f_into_iter #(t_Slice i32) #FStar.Tactics.Typeclasses.solve x <: Core_models.Slice.Iter.t_Iter i32) product (fun product i -> let product:i32 = product in let i:i32 = i in if i =. mk_i32 0 <: bool then Core_models.Ops.Control_flow.ControlFlow_Continue product <: Core_models.Ops.Control_flow.t_ControlFlow (Prims.unit & i32) i32 else if i <. mk_i32 0 <: bool then Core_models.Ops.Control_flow.ControlFlow_Break ((), product <: (Prims.unit & i32)) <: Core_models.Ops.Control_flow.t_ControlFlow (Prims.unit & i32) i32 else Core_models.Ops.Control_flow.ControlFlow_Continue (Core_models.Ops.Arith.f_mul_assign #i32 #i32 #FStar.Tactics.Typeclasses.solve product i <: i32) <: Core_models.Ops.Control_flow.t_ControlFlow (Prims.unit & i32) i32), () <: (i32 & Prims.unit) ''' "Loops.For_loops.fst" = ''' module Loops.For_loops #set-options "--fuel 0 --ifuel 1 --z3rlimit 15" open FStar.Mul open Core_models let range1 (_: Prims.unit) : usize = let acc:usize = mk_usize 0 in let acc:usize = Rust_primitives.Hax.Folds.fold_range (mk_usize 0) (mk_usize 15) (fun acc temp_1_ -> let acc:usize = acc in let _:usize = temp_1_ in true) acc (fun acc i -> let acc:usize = acc in let i:usize = i in acc +! i <: usize) in acc let range2 (n: usize) : usize = let acc:usize = mk_usize 0 in let acc:usize = Rust_primitives.Hax.Folds.fold_range (mk_usize 0) (n +! mk_usize 10 <: usize) (fun acc temp_1_ -> let acc:usize = acc in let _:usize = temp_1_ in true) acc (fun acc i -> let acc:usize = acc in let i:usize = i in (acc +! i <: usize) +! mk_usize 1 <: usize) in acc let composed_range (n: usize) : usize = let acc:usize = mk_usize 0 in let acc:usize = Core_models.Iter.Traits.Iterator.f_fold (Core_models.Iter.Traits.Collect.f_into_iter #(Core_models.Iter.Adapters.Chain.t_Chain (Core_models.Ops.Range.t_Range usize) (Core_models.Ops.Range.t_Range usize)) #FStar.Tactics.Typeclasses.solve (Core_models.Iter.Traits.Iterator.f_chain #(Core_models.Ops.Range.t_Range usize) #FStar.Tactics.Typeclasses.solve #(Core_models.Ops.Range.t_Range usize) ({ Core_models.Ops.Range.f_start = mk_usize 0; Core_models.Ops.Range.f_end = n } <: Core_models.Ops.Range.t_Range usize) ({ Core_models.Ops.Range.f_start = n +! mk_usize 10 <: usize; Core_models.Ops.Range.f_end = n +! mk_usize 50 <: usize } <: Core_models.Ops.Range.t_Range usize) <: Core_models.Iter.Adapters.Chain.t_Chain (Core_models.Ops.Range.t_Range usize) (Core_models.Ops.Range.t_Range usize)) <: Core_models.Iter.Adapters.Chain.t_Chain (Core_models.Ops.Range.t_Range usize) (Core_models.Ops.Range.t_Range usize)) acc (fun acc i -> let acc:usize = acc in let i:usize = i in (acc +! i <: usize) +! mk_usize 1 <: usize) in acc let rev_range (n: usize) : usize = let acc:usize = mk_usize 0 in let acc:usize = Core_models.Iter.Traits.Iterator.f_fold (Core_models.Iter.Traits.Collect.f_into_iter #(Core_models.Iter.Adapters.Rev.t_Rev (Core_models.Ops.Range.t_Range usize)) #FStar.Tactics.Typeclasses.solve (Core_models.Iter.Traits.Iterator.f_rev #(Core_models.Ops.Range.t_Range usize) #FStar.Tactics.Typeclasses.solve ({ Core_models.Ops.Range.f_start = mk_usize 0; Core_models.Ops.Range.f_end = n } <: Core_models.Ops.Range.t_Range usize) <: Core_models.Iter.Adapters.Rev.t_Rev (Core_models.Ops.Range.t_Range usize)) <: Core_models.Iter.Adapters.Rev.t_Rev (Core_models.Ops.Range.t_Range usize)) acc (fun acc i -> let acc:usize = acc in let i:usize = i in (acc +! i <: usize) +! mk_usize 1 <: usize) in acc let chunks (v_CHUNK_LEN: usize) (arr: Alloc.Vec.t_Vec usize Alloc.Alloc.t_Global) : usize = let acc:usize = mk_usize 0 in let chunks:Core_models.Slice.Iter.t_ChunksExact usize = Core_models.Slice.impl__chunks_exact #usize (Alloc.Vec.impl_1__as_slice arr <: t_Slice usize) v_CHUNK_LEN in let acc:usize = Core_models.Iter.Traits.Iterator.f_fold (Core_models.Iter.Traits.Collect.f_into_iter #(Core_models.Slice.Iter.t_ChunksExact usize) #FStar.Tactics.Typeclasses.solve (Core_models.Clone.f_clone #(Core_models.Slice.Iter.t_ChunksExact usize) #FStar.Tactics.Typeclasses.solve chunks <: Core_models.Slice.Iter.t_ChunksExact usize) <: Core_models.Slice.Iter.t_ChunksExact usize) acc (fun acc chunk -> let acc:usize = acc in let chunk:t_Slice usize = chunk in let mean:usize = mk_usize 0 in let mean:usize = Core_models.Iter.Traits.Iterator.f_fold (Core_models.Iter.Traits.Collect.f_into_iter #(t_Slice usize) #FStar.Tactics.Typeclasses.solve chunk <: Core_models.Slice.Iter.t_Iter usize) mean (fun mean item -> let mean:usize = mean in let item:usize = item in mean +! item <: usize) in let acc:usize = acc +! (mean /! v_CHUNK_LEN <: usize) in acc) in let acc:usize = Core_models.Iter.Traits.Iterator.f_fold (Core_models.Iter.Traits.Collect.f_into_iter #(t_Slice usize) #FStar.Tactics.Typeclasses.solve (Core_models.Slice.Iter.impl_88__remainder #usize chunks <: t_Slice usize) <: Core_models.Slice.Iter.t_Iter usize) acc (fun acc item -> let acc:usize = acc in let item:usize = item in acc -! item <: usize) in acc let iterator (arr: Alloc.Vec.t_Vec usize Alloc.Alloc.t_Global) : usize = let acc:usize = mk_usize 0 in let acc:usize = Core_models.Iter.Traits.Iterator.f_fold (Core_models.Iter.Traits.Collect.f_into_iter #(Core_models.Slice.Iter.t_Iter usize) #FStar.Tactics.Typeclasses.solve (Core_models.Slice.impl__iter #usize (Alloc.Vec.impl_1__as_slice arr <: t_Slice usize) <: Core_models.Slice.Iter.t_Iter usize) <: Core_models.Slice.Iter.t_Iter usize) acc (fun acc item -> let acc:usize = acc in let item:usize = item in acc +! item <: usize) in acc let nested (arr: Alloc.Vec.t_Vec usize Alloc.Alloc.t_Global) : usize = let acc:usize = mk_usize 0 in let acc:usize = Core_models.Iter.Traits.Iterator.f_fold (Core_models.Iter.Traits.Collect.f_into_iter #(Core_models.Slice.Iter.t_Iter usize) #FStar.Tactics.Typeclasses.solve (Core_models.Slice.impl__iter #usize (Alloc.Vec.impl_1__as_slice arr <: t_Slice usize) <: Core_models.Slice.Iter.t_Iter usize) <: Core_models.Slice.Iter.t_Iter usize) acc (fun acc item -> let acc:usize = acc in let item:usize = item in Core_models.Iter.Traits.Iterator.f_fold (Core_models.Iter.Traits.Collect.f_into_iter #(Core_models.Iter.Adapters.Rev.t_Rev (Core_models.Ops.Range.t_Range usize)) #FStar.Tactics.Typeclasses.solve (Core_models.Iter.Traits.Iterator.f_rev #(Core_models.Ops.Range.t_Range usize) #FStar.Tactics.Typeclasses.solve ({ Core_models.Ops.Range.f_start = mk_usize 0; Core_models.Ops.Range.f_end = item } <: Core_models.Ops.Range.t_Range usize) <: Core_models.Iter.Adapters.Rev.t_Rev (Core_models.Ops.Range.t_Range usize)) <: Core_models.Iter.Adapters.Rev.t_Rev (Core_models.Ops.Range.t_Range usize)) acc (fun acc i -> let acc:usize = acc in let i:usize = i in let acc:usize = acc +! mk_usize 1 in Core_models.Iter.Traits.Iterator.f_fold (Core_models.Iter.Traits.Collect.f_into_iter #(Core_models.Iter.Adapters.Zip.t_Zip (Core_models.Slice.Iter.t_Iter usize) (Core_models.Ops.Range.t_Range usize)) #FStar.Tactics.Typeclasses.solve (Core_models.Iter.Traits.Iterator.f_zip #(Core_models.Slice.Iter.t_Iter usize) #FStar.Tactics.Typeclasses.solve #(Core_models.Ops.Range.t_Range usize) (Core_models.Slice.impl__iter #usize (Alloc.Vec.impl_1__as_slice arr <: t_Slice usize) <: Core_models.Slice.Iter.t_Iter usize) ({ Core_models.Ops.Range.f_start = mk_usize 4; Core_models.Ops.Range.f_end = i } <: Core_models.Ops.Range.t_Range usize) <: Core_models.Iter.Adapters.Zip.t_Zip (Core_models.Slice.Iter.t_Iter usize) (Core_models.Ops.Range.t_Range usize)) <: Core_models.Iter.Adapters.Zip.t_Zip (Core_models.Slice.Iter.t_Iter usize) (Core_models.Ops.Range.t_Range usize)) acc (fun acc j -> let acc:usize = acc in let j:(usize & usize) = j in (((acc +! item <: usize) +! i <: usize) +! j._1 <: usize) +! j._2 <: usize)) <: usize) in acc let pattern (arr: Alloc.Vec.t_Vec (usize & usize) Alloc.Alloc.t_Global) : usize = let acc:usize = mk_usize 0 in let acc:usize = Core_models.Iter.Traits.Iterator.f_fold (Core_models.Iter.Traits.Collect.f_into_iter #(Alloc.Vec.t_Vec (usize & usize) Alloc.Alloc.t_Global) #FStar.Tactics.Typeclasses.solve arr <: Alloc.Vec.Into_iter.t_IntoIter (usize & usize) Alloc.Alloc.t_Global) acc (fun acc temp_1_ -> let acc:usize = acc in let (x: usize), (y: usize) = temp_1_ in acc +! (x *! y <: usize) <: usize) in acc let enumerate_chunks (arr: Alloc.Vec.t_Vec usize Alloc.Alloc.t_Global) : usize = let acc:usize = mk_usize 0 in let acc:usize = Core_models.Iter.Traits.Iterator.f_fold (Core_models.Iter.Traits.Collect.f_into_iter #(Core_models.Iter.Adapters.Enumerate.t_Enumerate (Core_models.Slice.Iter.t_Chunks usize)) #FStar.Tactics.Typeclasses.solve (Core_models.Iter.Traits.Iterator.f_enumerate #(Core_models.Slice.Iter.t_Chunks usize) #FStar.Tactics.Typeclasses.solve (Core_models.Slice.impl__chunks #usize (Alloc.Vec.impl_1__as_slice arr <: t_Slice usize) (mk_usize 4) <: Core_models.Slice.Iter.t_Chunks usize) <: Core_models.Iter.Adapters.Enumerate.t_Enumerate (Core_models.Slice.Iter.t_Chunks usize)) <: Core_models.Iter.Adapters.Enumerate.t_Enumerate (Core_models.Slice.Iter.t_Chunks usize)) acc (fun acc temp_1_ -> let acc:usize = acc in let (i: usize), (chunk: t_Slice usize) = temp_1_ in Rust_primitives.Hax.Folds.fold_enumerated_slice chunk (fun acc temp_1_ -> let acc:usize = acc in let _:usize = temp_1_ in true) acc (fun acc temp_1_ -> let acc:usize = acc in let (j: usize), (x: usize) = temp_1_ in (i +! j <: usize) +! x <: usize) <: usize) in acc let bool_returning (x: u8) : bool = x <. mk_u8 10 let f (_: Prims.unit) : (u8 & Prims.unit) = let acc:u8 = mk_u8 0 in Rust_primitives.Hax.Folds.fold_range (mk_u8 1) (mk_u8 10) (fun acc temp_1_ -> let acc:u8 = acc in let _:u8 = temp_1_ in true) acc (fun acc i -> let acc:u8 = acc in let i:u8 = i in let acc:u8 = acc +! i in let _:bool = bool_returning i in acc), () <: (u8 & Prims.unit) ''' "Loops.Recognized_loops.fst" = ''' module Loops.Recognized_loops #set-options "--fuel 0 --ifuel 1 --z3rlimit 15" open FStar.Mul open Core_models let range (_: Prims.unit) : (u64 & Prims.unit) = let count:u64 = mk_u64 0 in Rust_primitives.Hax.Folds.fold_range (mk_u8 0) (mk_u8 10) (fun count i -> let count:u64 = count in let i:u8 = i in i <=. mk_u8 10 <: bool) count (fun count i -> let count:u64 = count in let i:u8 = i in let count:u64 = count +! mk_u64 1 in count), () <: (u64 & Prims.unit) let range_step_by (_: Prims.unit) : (u64 & Prims.unit) = let count:u64 = mk_u64 0 in Rust_primitives.Hax.Folds.fold_range_step_by (mk_u8 0) (mk_u8 10) (mk_usize 2) (fun count i -> let count:u64 = count in let i:u8 = i in i <=. mk_u8 10 <: bool) count (fun count i -> let count:u64 = count in let i:u8 = i in let count:u64 = count +! mk_u64 1 in count), () <: (u64 & Prims.unit) let enumerated_slice (#v_T: Type0) (slice: t_Slice v_T) : (u64 & Prims.unit) = let count:u64 = mk_u64 0 in Rust_primitives.Hax.Folds.fold_enumerated_slice slice (fun count i -> let count:u64 = count in let i:usize = i in i <=. mk_usize 10 <: bool) count (fun count i -> let count:u64 = count in let i:(usize & v_T) = i in let count:u64 = count +! mk_u64 2 in count), () <: (u64 & Prims.unit) let enumerated_chunked_slice (#v_T: Type0) (slice: t_Slice v_T) : (u64 & Prims.unit) = let count:u64 = mk_u64 0 in Rust_primitives.Hax.Folds.fold_enumerated_chunked_slice (mk_usize 3) slice (fun count i -> let count:u64 = count in let i:usize = i in i <= Core_models.Slice.impl__len #v_T slice) count (fun count i -> let count:u64 = count in let i:(usize & t_Slice v_T) = i in let count:u64 = count +! mk_u64 3 in count), () <: (u64 & Prims.unit) ''' "Loops.While_loops.fst" = ''' module Loops.While_loops #set-options "--fuel 0 --ifuel 1 --z3rlimit 15" open FStar.Mul open Core_models let f (_: Prims.unit) : u8 = let x:u8 = mk_u8 0 in let x:u8 = Rust_primitives.Hax.while_loop (fun x -> let x:u8 = x in true) (fun x -> let x:u8 = x in x <. mk_u8 10 <: bool) (fun x -> let x:u8 = x in Rust_primitives.Hax.Int.from_machine (mk_u32 0) <: Hax_lib.Int.t_Int) x (fun x -> let x:u8 = x in let x:u8 = x +! mk_u8 3 in x) in x +! mk_u8 12 let while_invariant_decr (_: Prims.unit) : u8 = let x:u8 = mk_u8 0 in let x:u8 = Rust_primitives.Hax.while_loop (fun x -> let x:u8 = x in b2t (x <=. mk_u8 10 <: bool)) (fun x -> let x:u8 = x in x <. mk_u8 10 <: bool) (fun x -> let x:u8 = x in Rust_primitives.Hax.Int.from_machine (mk_u8 10 -! x <: u8) <: Hax_lib.Int.t_Int) x (fun x -> let x:u8 = x in let x:u8 = x +! mk_u8 3 in x) in x +! mk_u8 12 let while_invariant_decr_rev (_: Prims.unit) : u8 = let x:u8 = mk_u8 0 in let x:u8 = Rust_primitives.Hax.while_loop (fun x -> let x:u8 = x in b2t (x <=. mk_u8 10 <: bool)) (fun x -> let x:u8 = x in x <. mk_u8 10 <: bool) (fun x -> let x:u8 = x in Rust_primitives.Hax.Int.from_machine (mk_u8 10 -! x <: u8) <: Hax_lib.Int.t_Int) x (fun x -> let x:u8 = x in let x:u8 = x +! mk_u8 3 in x) in x +! mk_u8 12 ''' ================================================ FILE: test-harness/src/snapshots/toolchain__mut-ref-functionalization into-fstar.snap ================================================ --- source: test-harness/src/harness.rs expression: snapshot info: kind: Translate: backend: fstar info: name: mut-ref-functionalization manifest: mut-ref-functionalization/Cargo.toml description: ~ spec: optional: false broken: false issue_id: ~ positive: true snapshot: stderr: false stdout: true include_flag: ~ backend_options: ~ --- exit = 0 [stdout] diagnostics = [] [stdout.files] "Mut_ref_functionalization.fst" = ''' module Mut_ref_functionalization #set-options "--fuel 0 --ifuel 1 --z3rlimit 15" open FStar.Mul open Core_models type t_S = { f_b:t_Array u8 (mk_usize 5) } let foo (lhs rhs: t_S) : t_S = let lhs:t_S = Rust_primitives.Hax.Folds.fold_range (mk_usize 0) (mk_usize 1) (fun lhs temp_1_ -> let lhs:t_S = lhs in let _:usize = temp_1_ in true) lhs (fun lhs i -> let lhs:t_S = lhs in let i:usize = i in { lhs with f_b = Rust_primitives.Hax.Monomorphized_update_at.update_at_usize lhs.f_b i ((lhs.f_b.[ i ] <: u8) +! (rhs.f_b.[ i ] <: u8) <: u8) <: t_Array u8 (mk_usize 5) } <: t_S) in lhs let impl_S__update (self: t_S) (x: u8) : t_S = let self:t_S = { self with f_b = Rust_primitives.Hax.Monomorphized_update_at.update_at_usize self.f_b (mk_usize 0) x } <: t_S in self let index_mutation (x: Core_models.Ops.Range.t_Range usize) (a: t_Slice u8) : Prims.unit = let v:Alloc.Vec.t_Vec u8 Alloc.Alloc.t_Global = Alloc.Slice.impl__into_vec #u8 #Alloc.Alloc.t_Global ((let list = [mk_u8 1] in FStar.Pervasives.assert_norm (Prims.eq2 (List.Tot.length list) 1); Rust_primitives.Hax.array_of_list 1 list) <: t_Slice u8) in let v:Alloc.Vec.t_Vec u8 Alloc.Alloc.t_Global = Alloc.Slice.impl__to_vec (Rust_primitives.Hax.Monomorphized_update_at.update_at_range (Alloc.Vec.impl_1__as_slice v <: t_Slice u8) x (Core_models.Slice.impl__copy_from_slice #u8 (v.[ x ] <: t_Slice u8) a <: t_Slice u8) <: t_Slice u8) in let v:Alloc.Vec.t_Vec u8 Alloc.Alloc.t_Global = Alloc.Slice.impl__to_vec (Rust_primitives.Hax.Monomorphized_update_at.update_at_usize (Alloc.Vec.impl_1__as_slice v <: t_Slice u8) (mk_usize 1) (mk_u8 3) <: t_Slice u8) in () let index_mutation_unsize (x: t_Array u8 (mk_usize 12)) : u8 = let x:t_Array u8 (mk_usize 12) = Rust_primitives.Hax.Monomorphized_update_at.update_at_range x ({ Core_models.Ops.Range.f_start = mk_usize 4; Core_models.Ops.Range.f_end = mk_usize 5 } <: Core_models.Ops.Range.t_Range usize) (Core_models.Slice.impl__copy_from_slice #u8 (x.[ { Core_models.Ops.Range.f_start = mk_usize 4; Core_models.Ops.Range.f_end = mk_usize 5 } <: Core_models.Ops.Range.t_Range usize ] <: t_Slice u8) ((let list = [mk_u8 1; mk_u8 2] in FStar.Pervasives.assert_norm (Prims.eq2 (List.Tot.length list) 2); Rust_primitives.Hax.array_of_list 2 list) <: t_Slice u8) <: t_Slice u8) in mk_u8 42 let build_vec (_: Prims.unit) : Alloc.Vec.t_Vec u8 Alloc.Alloc.t_Global = Alloc.Slice.impl__into_vec #u8 #Alloc.Alloc.t_Global ((let list = [mk_u8 1; mk_u8 2; mk_u8 3] in FStar.Pervasives.assert_norm (Prims.eq2 (List.Tot.length list) 3); Rust_primitives.Hax.array_of_list 3 list) <: t_Slice u8) let test_append (_: Prims.unit) : Alloc.Vec.t_Vec u8 Alloc.Alloc.t_Global = let vec1:Alloc.Vec.t_Vec u8 Alloc.Alloc.t_Global = Alloc.Vec.impl__new #u8 () in let vec2:Alloc.Vec.t_Vec u8 Alloc.Alloc.t_Global = Alloc.Slice.impl__into_vec #u8 #Alloc.Alloc.t_Global ((let list = [mk_u8 1; mk_u8 2; mk_u8 3] in FStar.Pervasives.assert_norm (Prims.eq2 (List.Tot.length list) 3); Rust_primitives.Hax.array_of_list 3 list) <: t_Slice u8) in let (tmp0: Alloc.Vec.t_Vec u8 Alloc.Alloc.t_Global), (tmp1: Alloc.Vec.t_Vec u8 Alloc.Alloc.t_Global) = Alloc.Vec.impl_1__append #u8 #Alloc.Alloc.t_Global vec1 vec2 in let vec1:Alloc.Vec.t_Vec u8 Alloc.Alloc.t_Global = tmp0 in let vec2:Alloc.Vec.t_Vec u8 Alloc.Alloc.t_Global = tmp1 in let _:Prims.unit = () in let vec1:Alloc.Vec.t_Vec u8 Alloc.Alloc.t_Global = Alloc.Vec.impl_1__append #u8 #Alloc.Alloc.t_Global vec1 (build_vec () <: Alloc.Vec.t_Vec u8 Alloc.Alloc.t_Global) in vec1 let f (_: Prims.unit) : Alloc.Vec.t_Vec u8 Alloc.Alloc.t_Global = let vec:Alloc.Vec.t_Vec u8 Alloc.Alloc.t_Global = Alloc.Vec.impl__new #u8 () in let vec:Alloc.Vec.t_Vec u8 Alloc.Alloc.t_Global = Alloc.Vec.impl_1__push #u8 #Alloc.Alloc.t_Global vec (mk_u8 1) in let vec:Alloc.Vec.t_Vec u8 Alloc.Alloc.t_Global = Alloc.Vec.impl_1__push #u8 #Alloc.Alloc.t_Global vec (mk_u8 2) in let vec:Alloc.Vec.t_Vec u8 Alloc.Alloc.t_Global = Alloc.Slice.impl__to_vec (Core_models.Slice.impl__swap #u8 (Alloc.Vec.impl_1__as_slice vec <: t_Slice u8) (mk_usize 0) (mk_usize 1) <: t_Slice u8) in let vec:Alloc.Vec.t_Vec u8 Alloc.Alloc.t_Global = Alloc.Slice.impl__to_vec (Core_models.Slice.impl__swap #u8 (Alloc.Vec.impl_1__as_slice vec <: t_Slice u8) (mk_usize 0) (mk_usize 1) <: t_Slice u8) in vec type t_Foo = { f_field:Alloc.Vec.t_Vec u8 Alloc.Alloc.t_Global } type t_Pair (v_T: Type0) = { f_a:v_T; f_b:t_Foo } let g (x: t_Pair (Alloc.Vec.t_Vec u8 Alloc.Alloc.t_Global)) : Alloc.Vec.t_Vec u8 Alloc.Alloc.t_Global = let x:t_Pair (Alloc.Vec.t_Vec u8 Alloc.Alloc.t_Global) = x in let x:t_Pair (Alloc.Vec.t_Vec u8 Alloc.Alloc.t_Global) = Rust_primitives.Hax.Folds.fold_range (mk_u8 1) (mk_u8 10) (fun x temp_1_ -> let x:t_Pair (Alloc.Vec.t_Vec u8 Alloc.Alloc.t_Global) = x in let _:u8 = temp_1_ in true) x (fun x i -> let x:t_Pair (Alloc.Vec.t_Vec u8 Alloc.Alloc.t_Global) = x in let i:u8 = i in { x with f_a = Alloc.Vec.impl_1__push #u8 #Alloc.Alloc.t_Global x.f_a i <: Alloc.Vec.t_Vec u8 Alloc.Alloc.t_Global } <: t_Pair (Alloc.Vec.t_Vec u8 Alloc.Alloc.t_Global)) in let x:t_Pair (Alloc.Vec.t_Vec u8 Alloc.Alloc.t_Global) = { x with f_a = Alloc.Slice.impl__to_vec (Core_models.Slice.impl__swap #u8 (Alloc.Vec.impl_1__as_slice x.f_a <: t_Slice u8) (mk_usize 0) (mk_usize 1) <: t_Slice u8) } <: t_Pair (Alloc.Vec.t_Vec u8 Alloc.Alloc.t_Global) in let x:t_Pair (Alloc.Vec.t_Vec u8 Alloc.Alloc.t_Global) = { x with f_b = { x.f_b with f_field = Alloc.Slice.impl__to_vec (Core_models.Slice.impl__swap #u8 (Alloc.Vec.impl_1__as_slice x.f_b.f_field <: t_Slice u8) (mk_usize 0) (mk_usize 1) <: t_Slice u8) } <: t_Foo } <: t_Pair (Alloc.Vec.t_Vec u8 Alloc.Alloc.t_Global) in x.f_a let h (x: u8) : u8 = let x:u8 = x +! mk_u8 10 in x type t_Bar = { f_a:u8; f_b:u8 } let i (bar: t_Bar) : (t_Bar & u8) = let bar:t_Bar = { bar with f_b = bar.f_b +! bar.f_a } <: t_Bar in let bar:t_Bar = { bar with f_a = h bar.f_a } <: t_Bar in let hax_temp_output:u8 = bar.f_a +! bar.f_b in bar, hax_temp_output <: (t_Bar & u8) let j (x: t_Bar) : (t_Bar & u8) = let out:u8 = mk_u8 123 in let (tmp0: t_Bar), (out1: u8) = i x in let x:t_Bar = tmp0 in let hax_temp_output:u8 = out1 +! out in x, hax_temp_output <: (t_Bar & u8) let k (vec: Alloc.Vec.t_Vec u8 Alloc.Alloc.t_Global) (arg_1_wild3: u16) (arg_1_wild: u8) (arg_3_wild2: Prims.unit) : (Alloc.Vec.t_Vec u8 Alloc.Alloc.t_Global & u16 & Prims.unit & u64) = let arg_1_wild2:u8 = vec.[ mk_usize 1 ] in let arg_3_wild:u8 = vec.[ mk_usize 2 ] in let arg_1_wild1:u8 = vec.[ mk_usize 3 ] in let arg_3_wild1:u8 = vec.[ mk_usize 4 ] in let vec:Alloc.Vec.t_Vec u8 Alloc.Alloc.t_Global = Alloc.Slice.impl__to_vec (Rust_primitives.Hax.Monomorphized_update_at.update_at_usize (Alloc.Vec.impl_1__as_slice vec <: t_Slice u8) (mk_usize 0) ((((arg_1_wild +! arg_3_wild <: u8) +! arg_1_wild1 <: u8) +! arg_3_wild1 <: u8) +! arg_1_wild <: u8) <: t_Slice u8) in let hax_temp_output:u64 = mk_u64 12345 in vec, arg_1_wild3, arg_3_wild2, hax_temp_output <: (Alloc.Vec.t_Vec u8 Alloc.Alloc.t_Global & u16 & Prims.unit & u64) class t_FooTrait (v_Self: Type0) = { f_z_pre:v_Self -> Type0; f_z_post:v_Self -> v_Self -> Type0; f_z:x0: v_Self -> Prims.Pure v_Self (f_z_pre x0) (fun result -> f_z_post x0 result) } [@@ FStar.Tactics.Typeclasses.tcinstance] let impl_FooTrait_for_Foo: t_FooTrait t_Foo = { f_z_pre = (fun (self: t_Foo) -> true); f_z_post = (fun (self: t_Foo) (out: t_Foo) -> true); f_z = fun (self: t_Foo) -> self } let array (x: t_Array u8 (mk_usize 10)) : t_Array u8 (mk_usize 10) = let x:t_Array u8 (mk_usize 10) = Rust_primitives.Hax.Monomorphized_update_at.update_at_usize x (mk_usize 1) (x.[ mk_usize 2 ] <: u8) in x ''' ================================================ FILE: test-harness/src/snapshots/toolchain__naming into-coq.snap ================================================ --- source: test-harness/src/harness.rs expression: snapshot info: kind: Translate: backend: coq info: name: naming manifest: naming/Cargo.toml description: ~ spec: optional: false broken: false issue_id: ~ positive: true snapshot: stderr: false stdout: true include_flag: ~ backend_options: ~ --- exit = 1 [[stdout.diagnostics]] message = ''' (Coq backend) something is not implemented yet. [ty] node str''' spans = ['Span { lo: Loc { line: 160, col: 0 }, hi: Loc { line: 160, col: 43 }, filename: Real(LocalPath("naming/src/lib.rs")), rust_span_data: None }'] [stdout.files] "Naming.v" = ''' (* File automatically generated by Hacspec *) From Hacspec Require Import Hacspec_Lib MachineIntegers. From Coq Require Import ZArith. Import List.ListNotations. Open Scope Z_scope. Open Scope bool_scope. Inductive t_Foo : Type := | Foo_A : t_Foo | Foo_B : Foo_B -> t_Foo. Definition impl__Foo__f (self : t_Foo_t) : t_Foo_t := Foo_At_Foo_t. Inductive t_Foo2 : Type := | Foo2_A : t_Foo2 | Foo2_B : Foo2_B -> t_Foo2. Class t_FooTrait (Self : Type) := { f_ASSOCIATED_CONSTANT : uint_size ; }. Class t_T1 (Self : Type) := { }. #[global] Instance t_Foo_t_t_T1 : t_T1 t_Foo_t := { }. #[global] Instance t_Foo_t × int8_t_T1 : t_T1 (t_Foo_t × int8) := { }. Class t_T2_for_a (Self : Type) := { }. Class t_T3_e_for_a (Self : Type) := { }. #[global] Instance t_Foo_t_t_T3_e_for_a : t_T3_e_for_a t_Foo_t := { }. (*Not implemented yet? todo(item)*) (*Not implemented yet? todo(item)*) Definition v_INHERENT_CONSTANT : uint_size := (@repr WORDSIZE32 3). Definition constants (_ : unit) : uint_size := f_ASSOCIATED_CONSTANT.+v_INHERENT_CONSTANT. Definition ff__g (_ : unit) : unit := tt. Inductive t_f__g__impl__g__Foo : Type := | C_f__g__impl__g__Foo_A : t_f__g__impl__g__Foo | C_f__g__impl__g__Foo_B : C_f__g__impl__g__Foo_B -> t_f__g__impl__g__Foo. Definition ff__g__impl_1__g (self : t_Foo_t) : uint_size := (@repr WORDSIZE32 1). (*Not implemented yet? todo(item)*) Definition reserved_names (val : int8) (noeq : int8) (of : int8) : int8 := (val.+noeq).+of. (*item error backend*) Record t_Arity1 (T : _) : Type := { 0 : T; }. #[global] Instance t_Arity1_t (t_Foo_t × int8)_t_T2_for_a : t_T2_for_a (t_Arity1_t (t_Foo_t × int8)) := { }. Record t_B : Type := { }. Definition impl__B__f (self : t_B_t) : t_B_t := Bt_B_t. Record t_C : Type := { f_x : uint_size; }. Record t_Foobar : Type := { f_a : t_Foo_t; }. Record t_StructA : Type := { f_a : uint_size; }. Record t_StructB : Type := { f_a : uint_size; f_b : uint_size; }. Record t_StructC : Type := { f_a : uint_size; }. Record t_StructD : Type := { f_a : uint_size; f_b : uint_size; }. Record t_X : Type := { }. Definition construct_structs (a : uint_size) (b : uint_size) : unit := let _ := Build_StructA (f_a := a) : t_StructA_t in let _ := Build_StructB (f_a := a) (f_b := b) : t_StructB_t in let _ := Build_StructC (f_a := a) : t_StructC_t in let _ := Build_StructD (f_a := a) (f_b := b) : t_StructD_t in tt. Definition f (x : t_Foobar_t) : uint_size := ff__g__impl_1__g (f_a x). Definition ff__g__impl__g (self : t_B_t) : uint_size := (@repr WORDSIZE32 0). Definition mk_c (_ : unit) : t_C_t := let _ := Build_Foo_B (f_x := (@repr WORDSIZE32 3)) : t_Foo_t in let _ := Xt_X_t : t_X_t in Build_C (f_x := (@repr WORDSIZE32 3)). ''' "Naming_Ambiguous_names.v" = ''' (* File automatically generated by Hacspec *) From Hacspec Require Import Hacspec_Lib MachineIntegers. From Coq Require Import ZArith. Import List.ListNotations. Open Scope Z_scope. Open Scope bool_scope. (*Not implemented yet? todo(item)*) Definition debug (label : int32) (value : int32) : unit := let _ := v__print (impl_2__new_v1 (array_from_list [[; ] a=; ]) (array_from_list [impl_1__new_display label; impl_1__new_display value])) : unit in tt. Definition f (_ : unit) : unit := let a_1 := (@repr WORDSIZE32 104) : int32 in let a_2 := (@repr WORDSIZE32 205) : int32 in let a_3 := (@repr WORDSIZE32 306) : int32 in let a := (@repr WORDSIZE32 123) : int32 in let _ := debug (@repr WORDSIZE32 3) a_3 : unit in let _ := debug (@repr WORDSIZE32 2) a_2 : unit in let _ := debug (@repr WORDSIZE32 1) a_1 : unit in debug (@repr WORDSIZE32 4) a. Definition ff_expand (_ : unit) : unit := let a := (@repr WORDSIZE32 104) : int32 in let a := (@repr WORDSIZE32 205) : int32 in let a := (@repr WORDSIZE32 306) : int32 in let a := (@repr WORDSIZE32 123) : int32 in let _ := debug (@repr WORDSIZE32 3) a : unit in let _ := debug (@repr WORDSIZE32 2) a : unit in let _ := debug (@repr WORDSIZE32 1) a : unit in debug (@repr WORDSIZE32 0) a. ''' "Naming_F_G_Impl_1_G_Hello.v" = ''' (* File automatically generated by Hacspec *) From Hacspec Require Import Hacspec_Lib MachineIntegers. From Coq Require Import ZArith. Import List.ListNotations. Open Scope Z_scope. Open Scope bool_scope. Definition h (_ : unit) : unit := tt. ''' ================================================ FILE: test-harness/src/snapshots/toolchain__naming into-fstar.snap ================================================ --- source: test-harness/src/harness.rs expression: snapshot info: kind: Translate: backend: fstar info: name: naming manifest: naming/Cargo.toml description: ~ spec: optional: false broken: false issue_id: ~ positive: true snapshot: stderr: false stdout: true include_flag: ~ backend_options: ~ --- exit = 0 [stdout] diagnostics = [] [stdout.files] "Naming.Ambiguous_names.fst" = ''' module Naming.Ambiguous_names #set-options "--fuel 0 --ifuel 1 --z3rlimit 15" open FStar.Mul open Core_models let debug (label value: u32) : Prims.unit = let args:(u32 & u32) = label, value <: (u32 & u32) in let args:t_Array Core_models.Fmt.Rt.t_Argument (mk_usize 2) = let list = [ Core_models.Fmt.Rt.impl__new_display #u32 args._1; Core_models.Fmt.Rt.impl__new_display #u32 args._2 ] in FStar.Pervasives.assert_norm (Prims.eq2 (List.Tot.length list) 2); Rust_primitives.Hax.array_of_list 2 list in let _:Prims.unit = Std.Io.Stdio.e_print (Core_models.Fmt.Rt.impl_1__new_v1 (mk_usize 3) (mk_usize 2) (let list = ["["; "] a="; "\n"] in FStar.Pervasives.assert_norm (Prims.eq2 (List.Tot.length list) 3); Rust_primitives.Hax.array_of_list 3 list) args <: Core_models.Fmt.t_Arguments) in () /// `f` stacks mutliple let bindings declaring different `a`s. let f (_: Prims.unit) : Prims.unit = let a_1_:u32 = mk_u32 104 in let a_2_:u32 = mk_u32 205 in let a_3_:u32 = mk_u32 306 in let a:u32 = mk_u32 123 in let _:Prims.unit = debug (mk_u32 3) a_3_ in let _:Prims.unit = debug (mk_u32 2) a_2_ in let _:Prims.unit = debug (mk_u32 1) a_1_ in debug (mk_u32 4) a /// `f` is expanded into `f_expand` below, while the execution of `f` gives: /// ```plaintext /// [3] a=306 /// [2] a=205 /// [1] a=104 /// [last] a=123 /// ``` let ff_expand (_: Prims.unit) : Prims.unit = let a:i32 = mk_i32 104 in let a:i32 = mk_i32 205 in let a:i32 = mk_i32 306 in let a:u32 = mk_u32 123 in let _:Prims.unit = debug (mk_u32 3) a in let _:Prims.unit = debug (mk_u32 2) a in let _:Prims.unit = debug (mk_u32 1) a in debug (mk_u32 0) a ''' "Naming.Functions_defined_in_trait_impls.fst" = ''' module Naming.Functions_defined_in_trait_impls #set-options "--fuel 0 --ifuel 1 --z3rlimit 15" open FStar.Mul open Core_models type t_A = | A : t_A [@@ FStar.Tactics.Typeclasses.tcinstance] let impl: Core_models.Cmp.t_PartialEq t_A t_A = { f_eq_pre = (fun (self: t_A) (other: t_A) -> true); f_eq_post = (fun (self: t_A) (other: t_A) (out: bool) -> true); f_eq = fun (self: t_A) (other: t_A) -> Rust_primitives.Hax.never_to_any (Core_models.Panicking.panic "explicit panic" <: Rust_primitives.Hax.t_Never) } type t_B = | B : t_B [@@ FStar.Tactics.Typeclasses.tcinstance] let impl_1: Core_models.Cmp.t_PartialEq t_B t_B = { f_eq_pre = (fun (self: t_B) (other: t_B) -> true); f_eq_post = (fun (self: t_B) (other: t_B) (out: bool) -> true); f_eq = fun (self: t_B) (other: t_B) -> Rust_primitives.Hax.never_to_any (Core_models.Panicking.panic "explicit panic" <: Rust_primitives.Hax.t_Never) } ''' "Naming.fst" = ''' module Naming #set-options "--fuel 0 --ifuel 1 --z3rlimit 15" open FStar.Mul open Core_models type t_Foo = | Foo_A : t_Foo | Foo_B { f_x:usize }: t_Foo type t_Foo2 = | Foo2_A : t_Foo2 | Foo2_B { f_x:usize }: t_Foo2 type t_B = | B : t_B type t_C = { f_x:usize } type t_X = | X : t_X let mk_c (_: Prims.unit) : t_C = let _:t_Foo = Foo_B ({ f_x = mk_usize 3 }) <: t_Foo in let _:t_X = X <: t_X in { f_x = mk_usize 3 } <: t_C let impl_Foo__f (self: t_Foo) : t_Foo = Foo_A <: t_Foo let impl_B__f (self: t_B) : t_B = B <: t_B type t_Foobar = { f_a:t_Foo } let f__g (_: Prims.unit) : Prims.unit = () let f__g__impl_B__g (self: t_B) : usize = mk_usize 0 type f__g__impl_B__g__t_Foo = | C_f__g__impl_B__g__Foo_A : f__g__impl_B__g__t_Foo | C_f__g__impl_B__g__Foo_B { f__g__impl_B__g__f_x:usize }: f__g__impl_B__g__t_Foo let f__g__impl_Foo__g (self: t_Foo) : usize = mk_usize 1 let f (x: t_Foobar) : usize = f__g__impl_Foo__g x.f_a let f__g__impl_Foo__g__t_hello__h (_: Prims.unit) : Prims.unit = () let reserved_names (v_val v_noeq v_of: u8) : u8 = (v_val +! v_noeq <: u8) +! v_of type t_Arity1 (v_T: Type0) = | Arity1 : v_T -> t_Arity1 v_T class t_T1 (v_Self: Type0) = { __marker_trait_t_T1:Prims.unit } [@@ FStar.Tactics.Typeclasses.tcinstance] let impl_T1_for_Foo: t_T1 t_Foo = { __marker_trait_t_T1 = () } [@@ FStar.Tactics.Typeclasses.tcinstance] let impl_T1_for_tuple_Foo_u8: t_T1 (t_Foo & u8) = { __marker_trait_t_T1 = () } class t_T2_for_a (v_Self: Type0) = { __marker_trait_t_T2_for_a:Prims.unit } [@@ FStar.Tactics.Typeclasses.tcinstance] let impl_T2_ee_for_a_for_Arity1_of_tuple_Foo_u8: t_T2_for_a (t_Arity1 (t_Foo & u8)) = { __marker_trait_t_T2_for_a = () } class t_T3_ee_for_a (v_Self: Type0) = { __marker_trait_t_T3_ee_for_a:Prims.unit } [@@ FStar.Tactics.Typeclasses.tcinstance] let impl_T3_ee_e_for_a_for_Foo: t_T3_ee_for_a t_Foo = { __marker_trait_t_T3_ee_for_a = () } type t_StructA = { f_a:usize } type t_StructB = { f_a:usize; f_b:usize } type t_StructC = { f_a:usize } type t_StructD = { f_a:usize; f_b:usize } let construct_structs (a b: usize) : Prims.unit = let _:t_StructA = { f_a = a } <: t_StructA in let _:t_StructB = { f_a = a; f_b = b } <: t_StructB in let _:t_StructC = { f_a = a } <: t_StructC in let _:t_StructD = { f_a = a; f_b = b } <: t_StructD in () let v_INHERENT_CONSTANT: usize = mk_usize 3 class t_FooTrait (v_Self: Type0) = { f_ASSOCIATED_CONSTANT:usize } let constants (#v_T: Type0) (#[FStar.Tactics.Typeclasses.tcresolve ()] i0: t_FooTrait v_T) (_: Prims.unit) : usize = (f_ASSOCIATED_CONSTANT #FStar.Tactics.Typeclasses.solve <: usize) +! v_INHERENT_CONSTANT /// From issue https://github.com/hacspec/hax/issues/839 let string_shadows (v_string n: string) : Prims.unit = () /// From issue https://github.com/cryspen/hax/issues/1450 let items_under_closures (_: Prims.unit) : Prims.unit = let _: Prims.unit -> Prims.unit = fun temp_0_ -> let _:Prims.unit = temp_0_ in () in () let items_under_closures__anon_const_0__nested_function (_: Prims.unit) : Prims.unit = () type items_under_closures__anon_const_0__t_NestedStruct = | C_items_under_closures__anon_const_0__NestedStruct : items_under_closures__anon_const_0__t_NestedStruct let items_under_closures__nested_function (_: Prims.unit) : Prims.unit = () type items_under_closures__t_NestedStruct = | C_items_under_closures__NestedStruct : items_under_closures__t_NestedStruct ''' ================================================ FILE: test-harness/src/snapshots/toolchain__pattern-or into-coq.snap ================================================ --- source: test-harness/src/harness.rs expression: snapshot info: kind: Translate: backend: coq info: name: pattern-or manifest: pattern-or/Cargo.toml description: ~ spec: optional: false broken: false issue_id: ~ positive: true snapshot: stderr: true stdout: true include_flag: ~ backend_options: ~ --- exit = 0 stderr = 'Finished `dev` profile [unoptimized + debuginfo] target(s) in XXs' [stdout] diagnostics = [] [stdout.files] "Pattern_or.v" = ''' (* File automatically generated by Hacspec *) From Coq Require Import ZArith. Require Import List. Import List.ListNotations. Open Scope Z_scope. Open Scope bool_scope. Require Import Ascii. Require Import String. Require Import Coq.Floats.Floats. From RecordUpdate Require Import RecordSet. Import RecordSetNotations. From Core Require Import Core. (* NotImplementedYet *) Inductive t_E : Type := | E_A | E_B. Definition t_E_cast_to_repr (x : t_E) : t_isize := match x with | E_A => (0 : t_isize) | E_B => (1 : t_isize) end. Definition bar (x : t_E) : unit := match x with | E_A | E_B => tt end. Definition nested (x : t_Option ((t_i32))) : t_i32 := match x with | Option_Some (1 | 2) => (1 : t_i32) | Option_Some (x) => x | Option_None => (0 : t_i32) end. Definition deep (x : (t_i32*t_Option ((t_i32)))) : t_i32 := match x with | (1 | 2,Option_Some (3 | 4)) => (0 : t_i32) | (x,_) => x end. Definition equivalent (x : (t_i32*t_Option ((t_i32)))) : t_i32 := match x with | (1,Option_Some (3)) | (1,Option_Some (4)) | (2,Option_Some (3)) | (2,Option_Some (4)) => (0 : t_i32) | (x,_) => x end. Definition deep_capture (x : t_Result (((t_i32*t_i32))) (((t_i32*t_i32)))) : t_i32 := match x with | Result_Ok ((1 | 2,x)) | Result_Err ((3 | 4,x)) => x | Result_Ok ((x,_)) | Result_Err ((x,_)) => x end. ''' _CoqProject = ''' -R ./ TODO -arg -w -arg all Pattern_or.v''' ================================================ FILE: test-harness/src/snapshots/toolchain__pattern-or into-fstar.snap ================================================ --- source: test-harness/src/harness.rs expression: snapshot info: kind: Translate: backend: fstar info: name: pattern-or manifest: pattern-or/Cargo.toml description: ~ spec: optional: false broken: false issue_id: ~ positive: true snapshot: stderr: true stdout: true include_flag: ~ backend_options: ~ --- exit = 0 stderr = 'Finished `dev` profile [unoptimized + debuginfo] target(s) in XXs' [stdout] diagnostics = [] [stdout.files] "Pattern_or.fst" = ''' module Pattern_or #set-options "--fuel 0 --ifuel 1 --z3rlimit 15" open FStar.Mul open Core_models type t_E = | E_A : t_E | E_B : t_E let t_E_cast_to_repr (x: t_E) : isize = match x <: t_E with | E_A -> mk_isize 0 | E_B -> mk_isize 1 let bar (x: t_E) : Prims.unit = match x <: t_E with | E_A | E_B -> () <: Prims.unit let nested (x: Core_models.Option.t_Option i32) : i32 = match x <: Core_models.Option.t_Option i32 with | Core_models.Option.Option_Some (Rust_primitives.Integers.MkInt 1) | Core_models.Option.Option_Some (Rust_primitives.Integers.MkInt 2) -> mk_i32 1 | Core_models.Option.Option_Some x -> x | Core_models.Option.Option_None -> mk_i32 0 let deep (x: (i32 & Core_models.Option.t_Option i32)) : i32 = match x <: (i32 & Core_models.Option.t_Option i32) with | Rust_primitives.Integers.MkInt 1, Core_models.Option.Option_Some (Rust_primitives.Integers.MkInt 3) | Rust_primitives.Integers.MkInt 1, Core_models.Option.Option_Some (Rust_primitives.Integers.MkInt 4) | Rust_primitives.Integers.MkInt 2, Core_models.Option.Option_Some (Rust_primitives.Integers.MkInt 3) | Rust_primitives.Integers.MkInt 2, Core_models.Option.Option_Some (Rust_primitives.Integers.MkInt 4) -> mk_i32 0 | x, _ -> x let equivalent (x: (i32 & Core_models.Option.t_Option i32)) : i32 = match x <: (i32 & Core_models.Option.t_Option i32) with | Rust_primitives.Integers.MkInt 1, Core_models.Option.Option_Some (Rust_primitives.Integers.MkInt 3) | Rust_primitives.Integers.MkInt 1, Core_models.Option.Option_Some (Rust_primitives.Integers.MkInt 4) | Rust_primitives.Integers.MkInt 2, Core_models.Option.Option_Some (Rust_primitives.Integers.MkInt 3) | Rust_primitives.Integers.MkInt 2, Core_models.Option.Option_Some (Rust_primitives.Integers.MkInt 4) -> mk_i32 0 | x, _ -> x let deep_capture (x: Core_models.Result.t_Result (i32 & i32) (i32 & i32)) : i32 = match x <: Core_models.Result.t_Result (i32 & i32) (i32 & i32) with | Core_models.Result.Result_Ok (Rust_primitives.Integers.MkInt 1, x) | Core_models.Result.Result_Ok (Rust_primitives.Integers.MkInt 2, x) | Core_models.Result.Result_Err (Rust_primitives.Integers.MkInt 3, x) | Core_models.Result.Result_Err (Rust_primitives.Integers.MkInt 4, x) -> x | Core_models.Result.Result_Ok (x, _) | Core_models.Result.Result_Err (x, _) -> x ''' ================================================ FILE: test-harness/src/snapshots/toolchain__patterns into-fstar.snap ================================================ --- source: test-harness/src/harness.rs expression: snapshot info: kind: Translate: backend: fstar info: name: patterns manifest: patterns/Cargo.toml description: ~ spec: optional: false broken: false issue_id: ~ positive: true snapshot: stderr: true stdout: true include_flag: ~ backend_options: ~ --- exit = 0 stderr = 'Finished `dev` profile [unoptimized + debuginfo] target(s) in XXs' [stdout] diagnostics = [] [stdout.files] "Patterns.fst" = ''' module Patterns #set-options "--fuel 0 --ifuel 1 --z3rlimit 15" open FStar.Mul open Core_models type t_Other = | Other : i32 -> t_Other type t_Test = | Test_C1 : t_Other -> t_Test let impl__test (self: t_Test) : i32 = match self <: t_Test with | Test_C1 c -> c._0 ''' ================================================ FILE: test-harness/src/snapshots/toolchain__recursion into-fstar.snap ================================================ --- source: test-harness/src/harness.rs expression: snapshot info: kind: Translate: backend: fstar info: name: recursion manifest: recursion/Cargo.toml description: ~ spec: optional: false broken: false issue_id: ~ positive: true snapshot: stderr: true stdout: true include_flag: ~ backend_options: ~ --- exit = 0 stderr = 'Finished `dev` profile [unoptimized + debuginfo] target(s) in XXs' [stdout] diagnostics = [] [stdout.files] "Recursion.fst" = ''' module Recursion #set-options "--fuel 0 --ifuel 1 --z3rlimit 15" open FStar.Mul open Core_models let rec f (n: u8) : u8 = if n =. mk_u8 0 then mk_u8 0 else n +! (f (n -! mk_u8 1 <: u8) <: u8) ''' ================================================ FILE: test-harness/src/snapshots/toolchain__reordering into-coq.snap ================================================ --- source: test-harness/src/harness.rs expression: snapshot info: kind: Translate: backend: coq info: name: reordering manifest: reordering/Cargo.toml description: ~ spec: optional: false broken: false issue_id: ~ positive: true snapshot: stderr: false stdout: true include_flag: ~ backend_options: ~ --- exit = 0 [stdout] diagnostics = [] [stdout.files] "Reordering.v" = ''' (* File automatically generated by Hacspec *) From Coq Require Import ZArith. Require Import List. Import List.ListNotations. Open Scope Z_scope. Open Scope bool_scope. Require Import Ascii. Require Import String. Require Import Coq.Floats.Floats. From RecordUpdate Require Import RecordSet. Import RecordSetNotations. From Core Require Import Core. (* NotImplementedYet *) Definition no_dependency_1_ '(_ : unit) : unit := tt. Definition no_dependency_2_ '(_ : unit) : unit := tt. Inductive t_Foo : Type := | Foo_A | Foo_B. Definition f '(_ : t_u32) : t_Foo := Foo_A. Record Bar_record : Type := { Bar_0 : t_Foo; }. #[export] Instance settable_Bar_record : Settable _ := settable! (Build_Bar_record) . Notation "'Bar_Bar_record'" := Build_Bar_record. Definition g '(_ : unit) : t_Bar := Bar (f ((32 : t_u32))). Definition t_Foo_cast_to_repr (x : t_Foo) : t_isize := match x with | Foo_A => (0 : t_isize) | Foo_B => (1 : t_isize) end. ''' "Reordering_Independent_cycles.v" = ''' (* File automatically generated by Hacspec *) From Coq Require Import ZArith. Require Import List. Import List.ListNotations. Open Scope Z_scope. Open Scope bool_scope. Require Import Ascii. Require Import String. Require Import Coq.Floats.Floats. From RecordUpdate Require Import RecordSet. Import RecordSetNotations. From Core Require Import Core. (* NotImplementedYet *) Definition c '(_ : unit) : unit := a (tt). Definition a '(_ : unit) : unit := c (tt). Definition d '(_ : unit) : unit := b (tt). Definition b '(_ : unit) : unit := d (tt). ''' "Reordering_Mut_rec.v" = ''' (* File automatically generated by Hacspec *) From Coq Require Import ZArith. Require Import List. Import List.ListNotations. Open Scope Z_scope. Open Scope bool_scope. Require Import Ascii. Require Import String. Require Import Coq.Floats.Floats. From RecordUpdate Require Import RecordSet. Import RecordSetNotations. From Core Require Import Core. (* NotImplementedYet *) Definition g '(_ : unit) : unit := f (tt). Definition f '(_ : unit) : unit := g (tt). Definition ff_2_ '(_ : unit) : unit := f (tt). ''' _CoqProject = ''' -R ./ TODO -arg -w -arg all Reordering_Mut_rec.v Reordering_Independent_cycles.v Reordering.v''' ================================================ FILE: test-harness/src/snapshots/toolchain__reordering into-fstar.snap ================================================ --- source: test-harness/src/harness.rs expression: snapshot info: kind: Translate: backend: fstar info: name: reordering manifest: reordering/Cargo.toml description: ~ spec: optional: false broken: false issue_id: ~ positive: true snapshot: stderr: false stdout: true include_flag: ~ backend_options: ~ --- exit = 0 [stdout] diagnostics = [] [stdout.files] "Reordering.Independent_cycles.fst" = ''' module Reordering.Independent_cycles #set-options "--fuel 0 --ifuel 1 --z3rlimit 15" open FStar.Mul open Core_models let rec c (_: Prims.unit) : Prims.unit = a () and a (_: Prims.unit) : Prims.unit = c () let rec d (_: Prims.unit) : Prims.unit = b () and b (_: Prims.unit) : Prims.unit = d () ''' "Reordering.Mut_rec.fst" = ''' module Reordering.Mut_rec #set-options "--fuel 0 --ifuel 1 --z3rlimit 15" open FStar.Mul open Core_models let rec g (_: Prims.unit) : Prims.unit = f () and f (_: Prims.unit) : Prims.unit = g () let ff_2_ (_: Prims.unit) : Prims.unit = f () ''' "Reordering.fst" = ''' module Reordering #set-options "--fuel 0 --ifuel 1 --z3rlimit 15" open FStar.Mul open Core_models let no_dependency_1_ (_: Prims.unit) : Prims.unit = () let no_dependency_2_ (_: Prims.unit) : Prims.unit = () type t_Foo = | Foo_A : t_Foo | Foo_B : t_Foo let f (_: u32) : t_Foo = Foo_A <: t_Foo type t_Bar = | Bar : t_Foo -> t_Bar let g (_: Prims.unit) : t_Bar = Bar (f (mk_u32 32)) <: t_Bar let t_Foo_cast_to_repr (x: t_Foo) : isize = match x <: t_Foo with | Foo_A -> mk_isize 0 | Foo_B -> mk_isize 1 ''' ================================================ FILE: test-harness/src/snapshots/toolchain__reordering into-ssprove.snap ================================================ --- source: test-harness/src/harness.rs expression: snapshot info: kind: Translate: backend: ssprove info: name: reordering manifest: reordering/Cargo.toml description: ~ spec: optional: false broken: false issue_id: ~ positive: true snapshot: stderr: false stdout: true include_flag: ~ backend_options: ~ --- exit = 0 [stdout] diagnostics = [] [stdout.files] "Reordering.v" = ''' (* File automatically generated by Hacspec *) Set Warnings "-notation-overridden,-ambiguous-paths". From Crypt Require Import choice_type Package Prelude. Import PackageNotation. From extructures Require Import ord fset. From mathcomp Require Import word_ssrZ word. (* From Jasmin Require Import word. *) From Coq Require Import ZArith. From Coq Require Import Strings.String. Import List.ListNotations. Open Scope list_scope. Open Scope Z_scope. Open Scope bool_scope. From Hacspec Require Import ChoiceEquality. From Hacspec Require Import LocationUtility. From Hacspec Require Import Hacspec_Lib_Comparable. From Hacspec Require Import Hacspec_Lib_Pre. From Hacspec Require Import Hacspec_Lib. Open Scope hacspec_scope. Import choice.Choice.Exports. From RecordUpdate Require Import RecordUpdate. Import RecordSetNotations. Obligation Tactic := (* try timeout 8 *) solve_ssprove_obligations. (*Not implemented yet? todo(item)*) Equations no_dependency_1_ (_ : both 'unit) : both 'unit := no_dependency_1_ _ := ret_both (tt : 'unit) : both 'unit. Fail Next Obligation. Equations no_dependency_2_ (_ : both 'unit) : both 'unit := no_dependency_2_ _ := ret_both (tt : 'unit) : both 'unit. Fail Next Obligation. Definition t_Foo : choice_type := ('unit ∐ 'unit). Notation "'Foo_A_case'" := (inl tt) (at level 100). Equations Foo_A : both t_Foo := Foo_A := ret_both (inl (tt : 'unit) : t_Foo) : both t_Foo. Fail Next Obligation. Notation "'Foo_B_case'" := (inr tt) (at level 100). Equations Foo_B : both t_Foo := Foo_B := ret_both (inr (tt : 'unit) : t_Foo) : both t_Foo. Fail Next Obligation. Equations f (_ : both int32) : both t_Foo := f _ := Foo_A : both t_Foo. Fail Next Obligation. Definition t_Bar : choice_type := (t_Foo). Equations 0 (s : both t_Bar) : both t_Foo := 0 s := bind_both s (fun x => ret_both (x : t_Foo)) : both t_Foo. Fail Next Obligation. Equations Build_t_Bar {0 : both t_Foo} : both (t_Bar) := Build_t_Bar := bind_both 0 (fun 0 => ret_both ((0) : (t_Bar))) : both (t_Bar). Fail Next Obligation. Notation "'Build_t_Bar' '[' x ']' '(' '0' ':=' y ')'" := (Build_t_Bar (0 := y)). Equations g (_ : both 'unit) : both t_Bar := g _ := Bar (f (ret_both (32 : int32))) : both t_Bar. Fail Next Obligation. Equations t_Foo_cast_to_repr (x : both t_Foo) : both uint_size := t_Foo_cast_to_repr x := matchb x with | Foo_A_case => ret_both (0 : uint_size) | Foo_B_case => ret_both (1 : uint_size) end : both uint_size. Fail Next Obligation. ''' "Reordering_Independent_cycles.v" = ''' (* File automatically generated by Hacspec *) Set Warnings "-notation-overridden,-ambiguous-paths". From Crypt Require Import choice_type Package Prelude. Import PackageNotation. From extructures Require Import ord fset. From mathcomp Require Import word_ssrZ word. (* From Jasmin Require Import word. *) From Coq Require Import ZArith. From Coq Require Import Strings.String. Import List.ListNotations. Open Scope list_scope. Open Scope Z_scope. Open Scope bool_scope. From Hacspec Require Import ChoiceEquality. From Hacspec Require Import LocationUtility. From Hacspec Require Import Hacspec_Lib_Comparable. From Hacspec Require Import Hacspec_Lib_Pre. From Hacspec Require Import Hacspec_Lib. Open Scope hacspec_scope. Import choice.Choice.Exports. From RecordUpdate Require Import RecordUpdate. Import RecordSetNotations. Obligation Tactic := (* try timeout 8 *) solve_ssprove_obligations. (*Not implemented yet? todo(item)*) Equations c (_ : both 'unit) : both 'unit := c _ := a : both 'unit. Fail Next Obligation. Equations a (_ : both 'unit) : both 'unit := a _ := c : both 'unit. Fail Next Obligation. Equations d (_ : both 'unit) : both 'unit := d _ := b : both 'unit. Fail Next Obligation. Equations b (_ : both 'unit) : both 'unit := b _ := d : both 'unit. Fail Next Obligation. ''' "Reordering_Mut_rec.v" = ''' (* File automatically generated by Hacspec *) Set Warnings "-notation-overridden,-ambiguous-paths". From Crypt Require Import choice_type Package Prelude. Import PackageNotation. From extructures Require Import ord fset. From mathcomp Require Import word_ssrZ word. (* From Jasmin Require Import word. *) From Coq Require Import ZArith. From Coq Require Import Strings.String. Import List.ListNotations. Open Scope list_scope. Open Scope Z_scope. Open Scope bool_scope. From Hacspec Require Import ChoiceEquality. From Hacspec Require Import LocationUtility. From Hacspec Require Import Hacspec_Lib_Comparable. From Hacspec Require Import Hacspec_Lib_Pre. From Hacspec Require Import Hacspec_Lib. Open Scope hacspec_scope. Import choice.Choice.Exports. From RecordUpdate Require Import RecordUpdate. Import RecordSetNotations. Obligation Tactic := (* try timeout 8 *) solve_ssprove_obligations. (*Not implemented yet? todo(item)*) Equations g (_ : both 'unit) : both 'unit := g _ := f : both 'unit. Fail Next Obligation. Equations f (_ : both 'unit) : both 'unit := f _ := g : both 'unit. Fail Next Obligation. Equations ff_2_ (_ : both 'unit) : both 'unit := ff_2_ _ := f : both 'unit. Fail Next Obligation. ''' ================================================ FILE: test-harness/src/snapshots/toolchain__side-effects into-fstar.snap ================================================ --- source: test-harness/src/harness.rs expression: snapshot info: kind: Translate: backend: fstar info: name: side-effects manifest: side-effects/Cargo.toml description: ~ spec: optional: false broken: false issue_id: ~ positive: true snapshot: stderr: true stdout: true include_flag: ~ backend_options: ~ --- exit = 0 stderr = 'Finished `dev` profile [unoptimized + debuginfo] target(s) in XXs' [stdout] diagnostics = [] [stdout.files] "Side_effects.Issue_1083_.fst" = ''' module Side_effects.Issue_1083_ #set-options "--fuel 0 --ifuel 1 --z3rlimit 15" open FStar.Mul open Core_models class t_MyFrom (v_Self: Type0) (v_T: Type0) = { f_my_from_pre:v_T -> Type0; f_my_from_post:v_T -> v_Self -> Type0; f_my_from:x0: v_T -> Prims.Pure v_Self (f_my_from_pre x0) (fun result -> f_my_from_post x0 result) } [@@ FStar.Tactics.Typeclasses.tcinstance] let impl: t_MyFrom u16 u8 = { f_my_from_pre = (fun (x: u8) -> true); f_my_from_post = (fun (x: u8) (out: u16) -> true); f_my_from = fun (x: u8) -> cast (x <: u8) <: u16 } let f (x: u8) : Core_models.Result.t_Result u16 u16 = match Core_models.Result.Result_Err (mk_u8 1) <: Core_models.Result.t_Result Prims.unit u8 with | Core_models.Result.Result_Ok _ -> Core_models.Result.Result_Ok (f_my_from #u16 #u8 #FStar.Tactics.Typeclasses.solve x) <: Core_models.Result.t_Result u16 u16 | Core_models.Result.Result_Err err -> Core_models.Result.Result_Err (Core_models.Convert.f_from #u16 #u8 #FStar.Tactics.Typeclasses.solve err) <: Core_models.Result.t_Result u16 u16 ''' "Side_effects.Issue_1089_.fst" = ''' module Side_effects.Issue_1089_ #set-options "--fuel 0 --ifuel 1 --z3rlimit 15" open FStar.Mul open Core_models let test (x y: Core_models.Option.t_Option i32) : Core_models.Option.t_Option i32 = match Core_models.Option.impl__map #i32 #(Core_models.Option.t_Option i32) #(i32 -> Core_models.Option.t_Option i32) x (fun i -> let i:i32 = i in match y <: Core_models.Option.t_Option i32 with | Core_models.Option.Option_Some hoist38 -> Core_models.Option.Option_Some (i +! hoist38 <: i32) <: Core_models.Option.t_Option i32 | Core_models.Option.Option_None -> Core_models.Option.Option_None <: Core_models.Option.t_Option i32) <: Core_models.Option.t_Option (Core_models.Option.t_Option i32) with | Core_models.Option.Option_Some some -> some | Core_models.Option.Option_None -> Core_models.Option.Option_None <: Core_models.Option.t_Option i32 ''' "Side_effects.Issue_1299_.fst" = ''' module Side_effects.Issue_1299_ #set-options "--fuel 0 --ifuel 1 --z3rlimit 15" open FStar.Mul open Core_models type t_Foo = { f_y:u8 } type t_S = { f_g:t_Foo } type t_OtherS = { f_g:Core_models.Option.t_Option t_Foo } let impl_Foo__from (i: t_Foo) : t_Foo = { f_y = Core_models.Clone.f_clone #u8 #FStar.Tactics.Typeclasses.solve i.f_y } <: t_Foo type t_Error = | Error : t_Error let impl_S__from (i: t_OtherS) : Core_models.Result.t_Result t_S t_Error = match Core_models.Option.impl__ok_or #t_Foo #t_Error (Core_models.Option.impl__as_ref #t_Foo i.f_g <: Core_models.Option.t_Option t_Foo) (Error <: t_Error) <: Core_models.Result.t_Result t_Foo t_Error with | Core_models.Result.Result_Ok hoist47 -> Core_models.Result.Result_Ok ({ f_g = impl_Foo__from hoist47 } <: t_S) <: Core_models.Result.t_Result t_S t_Error | Core_models.Result.Result_Err err -> Core_models.Result.Result_Err err <: Core_models.Result.t_Result t_S t_Error ''' "Side_effects.Issue_1300_.fst" = ''' module Side_effects.Issue_1300_ #set-options "--fuel 0 --ifuel 1 --z3rlimit 15" open FStar.Mul open Core_models let v_fun (_: Prims.unit) : Core_models.Result.t_Result Prims.unit u8 = match Core_models.Iter.Traits.Iterator.f_collect #(Core_models.Iter.Adapters.Map.t_Map (Core_models.Slice.Iter.t_Iter u8) (u8 -> Core_models.Result.t_Result (u8 & t_Array u8 (mk_usize 32)) u8)) #FStar.Tactics.Typeclasses.solve #(Core_models.Result.t_Result (Alloc.Vec.t_Vec (u8 & t_Array u8 (mk_usize 32)) Alloc.Alloc.t_Global) u8) (Core_models.Iter.Traits.Iterator.f_map #(Core_models.Slice.Iter.t_Iter u8) #FStar.Tactics.Typeclasses.solve #(Core_models.Result.t_Result (u8 & t_Array u8 (mk_usize 32)) u8) #(u8 -> Core_models.Result.t_Result (u8 & t_Array u8 (mk_usize 32)) u8) (Core_models.Slice.impl__iter #u8 (Rust_primitives.Hax.repeat (mk_u8 0) (mk_usize 5) <: t_Slice u8) <: Core_models.Slice.Iter.t_Iter u8) (fun prev -> let prev:u8 = prev in match Core_models.Result.Result_Ok (Rust_primitives.Hax.repeat (mk_u8 0) (mk_usize 32) <: t_Array u8 (mk_usize 32)) <: Core_models.Result.t_Result (t_Array u8 (mk_usize 32)) u8 with | Core_models.Result.Result_Ok hoist45 -> Core_models.Result.Result_Ok (prev, hoist45 <: (u8 & t_Array u8 (mk_usize 32))) <: Core_models.Result.t_Result (u8 & t_Array u8 (mk_usize 32)) u8 | Core_models.Result.Result_Err err -> Core_models.Result.Result_Err err <: Core_models.Result.t_Result (u8 & t_Array u8 (mk_usize 32)) u8) <: Core_models.Iter.Adapters.Map.t_Map (Core_models.Slice.Iter.t_Iter u8) (u8 -> Core_models.Result.t_Result (u8 & t_Array u8 (mk_usize 32)) u8)) <: Core_models.Result.t_Result (Alloc.Vec.t_Vec (u8 & t_Array u8 (mk_usize 32)) Alloc.Alloc.t_Global) u8 with | Core_models.Result.Result_Ok v_val -> Core_models.Result.Result_Ok (() <: Prims.unit) <: Core_models.Result.t_Result Prims.unit u8 | Core_models.Result.Result_Err err -> Core_models.Result.Result_Err err <: Core_models.Result.t_Result Prims.unit u8 ''' "Side_effects.Nested_return.fst" = ''' module Side_effects.Nested_return #set-options "--fuel 0 --ifuel 1 --z3rlimit 15" open FStar.Mul open Core_models let other_fun (rng: i8) : (i8 & Core_models.Result.t_Result Prims.unit Prims.unit) = let hax_temp_output:Core_models.Result.t_Result Prims.unit Prims.unit = Core_models.Result.Result_Ok (() <: Prims.unit) <: Core_models.Result.t_Result Prims.unit Prims.unit in rng, hax_temp_output <: (i8 & Core_models.Result.t_Result Prims.unit Prims.unit) let v_fun (rng: i8) : (i8 & Core_models.Result.t_Result Prims.unit Prims.unit) = let (tmp0: i8), (out: Core_models.Result.t_Result Prims.unit Prims.unit) = other_fun rng in let rng:i8 = tmp0 in match out <: Core_models.Result.t_Result Prims.unit Prims.unit with | Core_models.Result.Result_Ok hoist41 -> rng, (Core_models.Result.Result_Ok hoist41 <: Core_models.Result.t_Result Prims.unit Prims.unit) <: (i8 & Core_models.Result.t_Result Prims.unit Prims.unit) | Core_models.Result.Result_Err err -> rng, (Core_models.Result.Result_Err err <: Core_models.Result.t_Result Prims.unit Prims.unit) <: (i8 & Core_models.Result.t_Result Prims.unit Prims.unit) ''' "Side_effects.fst" = ''' module Side_effects #set-options "--fuel 0 --ifuel 1 --z3rlimit 15" open FStar.Mul open Core_models /// Helper function let add3 (x y z: u32) : u32 = Core_models.Num.impl_u32__wrapping_add (Core_models.Num.impl_u32__wrapping_add x y <: u32) z /// Exercise local mutation with control flow and loops let local_mutation (x: u32) : u32 = let y:u32 = mk_u32 0 in let x:u32 = Core_models.Num.impl_u32__wrapping_add x (mk_u32 1) in if x >. mk_u32 3 then let x:u32 = Core_models.Num.impl_u32__wrapping_sub x (mk_u32 3) in let y:u32 = x /! mk_u32 2 in let y:u32 = Core_models.Num.impl_u32__wrapping_add y (mk_u32 2) in let y:u32 = Core_models.Iter.Traits.Iterator.f_fold (Core_models.Iter.Traits.Collect.f_into_iter #(Core_models.Ops.Range.t_Range u32) #FStar.Tactics.Typeclasses.solve ({ Core_models.Ops.Range.f_start = mk_u32 0; Core_models.Ops.Range.f_end = mk_u32 10 } <: Core_models.Ops.Range.t_Range u32) <: Core_models.Ops.Range.t_Range u32) y (fun y i -> let y:u32 = y in let i:u32 = i in Core_models.Num.impl_u32__wrapping_add x i <: u32) in Core_models.Num.impl_u32__wrapping_add x y else let ((x: u32), (y: u32)), (hoist7: u32) = match x <: u32 with | Rust_primitives.Integers.MkInt 12 -> let y:u32 = Core_models.Num.impl_u32__wrapping_add x y in (x, y <: (u32 & u32)), mk_u32 3 <: ((u32 & u32) & u32) | Rust_primitives.Integers.MkInt 13 -> let x:u32 = Core_models.Num.impl_u32__wrapping_add x (mk_u32 1) in (x, y <: (u32 & u32)), add3 x (Core_models.Num.impl_u32__wrapping_add (mk_u32 123) x <: u32) x <: ((u32 & u32) & u32) | _ -> (x, y <: (u32 & u32)), mk_u32 0 <: ((u32 & u32) & u32) in let x:u32 = hoist7 in Core_models.Num.impl_u32__wrapping_add x y /// Exercise early returns with control flow and loops let early_returns (x: u32) : u32 = if x >. mk_u32 3 then mk_u32 0 else if x >. mk_u32 30 then match true <: bool with | true -> mk_u32 34 | _ -> let (x: u32), (hoist11: u32) = x, mk_u32 3 <: (u32 & u32) in Core_models.Num.impl_u32__wrapping_add (Core_models.Num.impl_u32__wrapping_add (mk_u32 123) hoist11 <: u32) x else let x:u32 = x +! mk_u32 9 in let (x: u32), (hoist11: u32) = x, x +! mk_u32 1 <: (u32 & u32) in Core_models.Num.impl_u32__wrapping_add (Core_models.Num.impl_u32__wrapping_add (mk_u32 123) hoist11 <: u32) x let simplifiable_return (c1 c2 c3: bool) : i32 = let x:i32 = mk_i32 0 in if c1 then if c2 then let x:i32 = x +! mk_i32 10 in if c3 then mk_i32 1 else x +! mk_i32 1 else x +! mk_i32 1 else x let simplifiable_question_mark (c: bool) (x: Core_models.Option.t_Option i32) : Core_models.Option.t_Option i32 = if c then match x <: Core_models.Option.t_Option i32 with | Core_models.Option.Option_Some hoist16 -> let a:i32 = hoist16 +! mk_i32 10 in let b:i32 = mk_i32 20 in Core_models.Option.Option_Some (a +! b) <: Core_models.Option.t_Option i32 | Core_models.Option.Option_None -> Core_models.Option.Option_None <: Core_models.Option.t_Option i32 else let a:i32 = mk_i32 0 in let b:i32 = mk_i32 20 in Core_models.Option.Option_Some (a +! b) <: Core_models.Option.t_Option i32 /// Question mark without error coercion let direct_result_question_mark (y: Core_models.Result.t_Result Prims.unit u32) : Core_models.Result.t_Result i8 u32 = match y <: Core_models.Result.t_Result Prims.unit u32 with | Core_models.Result.Result_Ok _ -> Core_models.Result.Result_Ok (mk_i8 0) <: Core_models.Result.t_Result i8 u32 | Core_models.Result.Result_Err err -> Core_models.Result.Result_Err err <: Core_models.Result.t_Result i8 u32 /// Question mark with an error coercion let direct_result_question_mark_coercion (y: Core_models.Result.t_Result i8 u16) : Core_models.Result.t_Result i8 u32 = match y <: Core_models.Result.t_Result i8 u16 with | Core_models.Result.Result_Ok hoist17 -> Core_models.Result.Result_Ok hoist17 <: Core_models.Result.t_Result i8 u32 | Core_models.Result.Result_Err err -> Core_models.Result.Result_Err (Core_models.Convert.f_from #u32 #u16 #FStar.Tactics.Typeclasses.solve err) <: Core_models.Result.t_Result i8 u32 /// Test question mark on `Option`s with some control flow let options (x y: Core_models.Option.t_Option u8) (z: Core_models.Option.t_Option u64) : Core_models.Option.t_Option u8 = match x <: Core_models.Option.t_Option u8 with | Core_models.Option.Option_Some hoist21 -> if hoist21 >. mk_u8 10 then match x <: Core_models.Option.t_Option u8 with | Core_models.Option.Option_Some hoist23 -> (match Core_models.Option.Option_Some (Core_models.Num.impl_u8__wrapping_add hoist23 (mk_u8 3)) <: Core_models.Option.t_Option u8 with | Core_models.Option.Option_Some hoist29 -> (match hoist29 <: u8 with | Rust_primitives.Integers.MkInt 3 -> (match Core_models.Option.Option_None <: Core_models.Option.t_Option u8 with | Core_models.Option.Option_Some some -> let v:u8 = some in (match x <: Core_models.Option.t_Option u8 with | Core_models.Option.Option_Some hoist30 -> (match y <: Core_models.Option.t_Option u8 with | Core_models.Option.Option_Some hoist31 -> Core_models.Option.Option_Some (Core_models.Num.impl_u8__wrapping_add (Core_models.Num.impl_u8__wrapping_add v hoist30 <: u8) hoist31) <: Core_models.Option.t_Option u8 | Core_models.Option.Option_None -> Core_models.Option.Option_None <: Core_models.Option.t_Option u8) | Core_models.Option.Option_None -> Core_models.Option.Option_None <: Core_models.Option.t_Option u8) | Core_models.Option.Option_None -> Core_models.Option.Option_None <: Core_models.Option.t_Option u8) | Rust_primitives.Integers.MkInt 4 -> (match z <: Core_models.Option.t_Option u64 with | Core_models.Option.Option_Some hoist18 -> let v:u8 = mk_u8 4 +! (if hoist18 >. mk_u64 4 <: bool then mk_u8 0 else mk_u8 3) in (match x <: Core_models.Option.t_Option u8 with | Core_models.Option.Option_Some hoist30 -> (match y <: Core_models.Option.t_Option u8 with | Core_models.Option.Option_Some hoist31 -> Core_models.Option.Option_Some (Core_models.Num.impl_u8__wrapping_add (Core_models.Num.impl_u8__wrapping_add v hoist30 <: u8) hoist31) <: Core_models.Option.t_Option u8 | Core_models.Option.Option_None -> Core_models.Option.Option_None <: Core_models.Option.t_Option u8) | Core_models.Option.Option_None -> Core_models.Option.Option_None <: Core_models.Option.t_Option u8) | Core_models.Option.Option_None -> Core_models.Option.Option_None <: Core_models.Option.t_Option u8) | _ -> let v:u8 = mk_u8 12 in match x <: Core_models.Option.t_Option u8 with | Core_models.Option.Option_Some hoist30 -> (match y <: Core_models.Option.t_Option u8 with | Core_models.Option.Option_Some hoist31 -> Core_models.Option.Option_Some (Core_models.Num.impl_u8__wrapping_add (Core_models.Num.impl_u8__wrapping_add v hoist30 <: u8) hoist31) <: Core_models.Option.t_Option u8 | Core_models.Option.Option_None -> Core_models.Option.Option_None <: Core_models.Option.t_Option u8) | Core_models.Option.Option_None -> Core_models.Option.Option_None <: Core_models.Option.t_Option u8) | Core_models.Option.Option_None -> Core_models.Option.Option_None <: Core_models.Option.t_Option u8) | Core_models.Option.Option_None -> Core_models.Option.Option_None <: Core_models.Option.t_Option u8 else (match x <: Core_models.Option.t_Option u8 with | Core_models.Option.Option_Some hoist26 -> (match y <: Core_models.Option.t_Option u8 with | Core_models.Option.Option_Some hoist25 -> (match Core_models.Option.Option_Some (Core_models.Num.impl_u8__wrapping_add hoist26 hoist25) <: Core_models.Option.t_Option u8 with | Core_models.Option.Option_Some hoist29 -> (match hoist29 <: u8 with | Rust_primitives.Integers.MkInt 3 -> (match Core_models.Option.Option_None <: Core_models.Option.t_Option u8 with | Core_models.Option.Option_Some some -> let v:u8 = some in (match x <: Core_models.Option.t_Option u8 with | Core_models.Option.Option_Some hoist30 -> (match y <: Core_models.Option.t_Option u8 with | Core_models.Option.Option_Some hoist31 -> Core_models.Option.Option_Some (Core_models.Num.impl_u8__wrapping_add (Core_models.Num.impl_u8__wrapping_add v hoist30 <: u8) hoist31) <: Core_models.Option.t_Option u8 | Core_models.Option.Option_None -> Core_models.Option.Option_None <: Core_models.Option.t_Option u8) | Core_models.Option.Option_None -> Core_models.Option.Option_None <: Core_models.Option.t_Option u8) | Core_models.Option.Option_None -> Core_models.Option.Option_None <: Core_models.Option.t_Option u8) | Rust_primitives.Integers.MkInt 4 -> (match z <: Core_models.Option.t_Option u64 with | Core_models.Option.Option_Some hoist18 -> let v:u8 = mk_u8 4 +! (if hoist18 >. mk_u64 4 <: bool then mk_u8 0 else mk_u8 3) in (match x <: Core_models.Option.t_Option u8 with | Core_models.Option.Option_Some hoist30 -> (match y <: Core_models.Option.t_Option u8 with | Core_models.Option.Option_Some hoist31 -> Core_models.Option.Option_Some (Core_models.Num.impl_u8__wrapping_add (Core_models.Num.impl_u8__wrapping_add v hoist30 <: u8) hoist31) <: Core_models.Option.t_Option u8 | Core_models.Option.Option_None -> Core_models.Option.Option_None <: Core_models.Option.t_Option u8) | Core_models.Option.Option_None -> Core_models.Option.Option_None <: Core_models.Option.t_Option u8) | Core_models.Option.Option_None -> Core_models.Option.Option_None <: Core_models.Option.t_Option u8) | _ -> let v:u8 = mk_u8 12 in match x <: Core_models.Option.t_Option u8 with | Core_models.Option.Option_Some hoist30 -> (match y <: Core_models.Option.t_Option u8 with | Core_models.Option.Option_Some hoist31 -> Core_models.Option.Option_Some (Core_models.Num.impl_u8__wrapping_add (Core_models.Num.impl_u8__wrapping_add v hoist30 <: u8) hoist31) <: Core_models.Option.t_Option u8 | Core_models.Option.Option_None -> Core_models.Option.Option_None <: Core_models.Option.t_Option u8) | Core_models.Option.Option_None -> Core_models.Option.Option_None <: Core_models.Option.t_Option u8) | Core_models.Option.Option_None -> Core_models.Option.Option_None <: Core_models.Option.t_Option u8) | Core_models.Option.Option_None -> Core_models.Option.Option_None <: Core_models.Option.t_Option u8) | Core_models.Option.Option_None -> Core_models.Option.Option_None <: Core_models.Option.t_Option u8) | Core_models.Option.Option_None -> Core_models.Option.Option_None <: Core_models.Option.t_Option u8 /// Test question mark on `Result`s with local mutation let question_mark (x: u32) : Core_models.Result.t_Result u32 u32 = if x >. mk_u32 40 then let y:u32 = mk_u32 0 in let x:u32 = Core_models.Num.impl_u32__wrapping_add x (mk_u32 3) in let y:u32 = Core_models.Num.impl_u32__wrapping_add x y in let x:u32 = Core_models.Num.impl_u32__wrapping_add x y in if x >. mk_u32 90 then match Core_models.Result.Result_Err (mk_u8 12) <: Core_models.Result.t_Result Prims.unit u8 with | Core_models.Result.Result_Ok ok -> Core_models.Result.Result_Ok (Core_models.Num.impl_u32__wrapping_add (mk_u32 3) x) <: Core_models.Result.t_Result u32 u32 | Core_models.Result.Result_Err err -> Core_models.Result.Result_Err (Core_models.Convert.f_from #u32 #u8 #FStar.Tactics.Typeclasses.solve err) <: Core_models.Result.t_Result u32 u32 else Core_models.Result.Result_Ok (Core_models.Num.impl_u32__wrapping_add (mk_u32 3) x) <: Core_models.Result.t_Result u32 u32 else Core_models.Result.Result_Ok (Core_models.Num.impl_u32__wrapping_add (mk_u32 3) x) <: Core_models.Result.t_Result u32 u32 type t_A = | A : t_A type t_B = | B : t_B /// Combine `?` and early return let monad_lifting (x: u8) : Core_models.Result.t_Result t_A t_B = if x >. mk_u8 123 then match Core_models.Result.Result_Err (B <: t_B) <: Core_models.Result.t_Result t_A t_B with | Core_models.Result.Result_Ok hoist35 -> Core_models.Result.Result_Ok hoist35 <: Core_models.Result.t_Result t_A t_B | Core_models.Result.Result_Err err -> Core_models.Result.Result_Err err <: Core_models.Result.t_Result t_A t_B else Core_models.Result.Result_Ok (A <: t_A) <: Core_models.Result.t_Result t_A t_B type t_Bar = { f_a:bool; f_b:(t_Array (bool & bool) (mk_usize 6) & bool) } type t_Foo = { f_x:bool; f_y:(bool & Alloc.Vec.t_Vec t_Bar Alloc.Alloc.t_Global); f_z:t_Array t_Bar (mk_usize 6); f_bar:t_Bar } /// Test assignation on non-trivial places let assign_non_trivial_lhs (foo: t_Foo) : t_Foo = let foo:t_Foo = { foo with f_x = true } <: t_Foo in let foo:t_Foo = { foo with f_bar = { foo.f_bar with f_a = true } <: t_Bar } <: t_Foo in let foo:t_Foo = { foo with f_bar = { foo.f_bar with f_b = { foo.f_bar.f_b with _1 = Rust_primitives.Hax.Monomorphized_update_at.update_at_usize foo.f_bar.f_b._1 (mk_usize 3) ({ (foo.f_bar.f_b._1.[ mk_usize 3 ] <: (bool & bool)) with _2 = true } <: (bool & bool)) } <: (t_Array (bool & bool) (mk_usize 6) & bool) } <: t_Bar } <: t_Foo in let foo:t_Foo = { foo with f_z = Rust_primitives.Hax.Monomorphized_update_at.update_at_usize foo.f_z (mk_usize 3) ({ (foo.f_z.[ mk_usize 3 ] <: t_Bar) with f_a = true } <: t_Bar) } <: t_Foo in let foo:t_Foo = { foo with f_y = { foo.f_y with _2 = Alloc.Slice.impl__to_vec (Rust_primitives.Hax.Monomorphized_update_at.update_at_usize (Alloc.Vec.impl_1__as_slice foo.f_y._2 <: t_Slice t_Bar) (mk_usize 3) ({ (foo.f_y._2.[ mk_usize 3 ] <: t_Bar) with f_b = { (foo.f_y._2.[ mk_usize 3 ] <: t_Bar).f_b with _1 = Rust_primitives.Hax.Monomorphized_update_at.update_at_usize (foo.f_y._2.[ mk_usize 3 ] <: t_Bar) .f_b ._1 (mk_usize 5) ({ ((foo.f_y._2.[ mk_usize 3 ] <: t_Bar).f_b._1.[ mk_usize 5 ] <: (bool & bool)) with _1 = true } <: (bool & bool)) <: t_Array (bool & bool) (mk_usize 6) } <: (t_Array (bool & bool) (mk_usize 6) & bool) } <: t_Bar) <: t_Slice t_Bar) } <: (bool & Alloc.Vec.t_Vec t_Bar Alloc.Alloc.t_Global) } <: t_Foo in foo ''' ================================================ FILE: test-harness/src/snapshots/toolchain__side-effects into-ssprove.snap ================================================ --- source: test-harness/src/harness.rs expression: snapshot info: kind: Translate: backend: ssprove info: name: side-effects manifest: side-effects/Cargo.toml description: ~ spec: optional: false broken: false issue_id: ~ positive: true snapshot: stderr: true stdout: true include_flag: ~ backend_options: ~ --- exit = 1 stderr = """ Finished `dev` profile [unoptimized + debuginfo] target(s) in XXs \u001B[1m\u001B[91merror\u001B[0m: \u001B[1m[HAX0001] (SSProve backend) something is not implemented yet. [expr] node app global vcar projector tuple\u001B[0m \u001B[1m\u001B[94m-->\u001B[0m side-effects/src/lib.rs:156:5 \u001B[1m\u001B[94m |\u001B[0m \u001B[1m\u001B[94m156 |\u001B[0m foo.y.1[3].b.0[5].0 = true; \u001B[1m\u001B[94m |\u001B[0m\u001B[1m\u001B[91m ^^^^^^^^^^^^^^^^^^^^^^^^^^\u001B[0m \u001B[1m\u001B[94m |\u001B[0m""" [[stdout.diagnostics]] message = ''' (SSProve backend) something is not implemented yet. [expr] node app global vcar projector tuple''' spans = ['Span { lo: Loc { line: 156, col: 4 }, hi: Loc { line: 156, col: 30 }, filename: Real(LocalPath("side-effects/src/lib.rs")), rust_span_data: None }'] [stdout.files] "Side_effects.v" = ''' (* File automatically generated by Hacspec *) Set Warnings "-notation-overridden,-ambiguous-paths". From Crypt Require Import choice_type Package Prelude. Import PackageNotation. From extructures Require Import ord fset. From mathcomp Require Import word_ssrZ word. (* From Jasmin Require Import word. *) From Coq Require Import ZArith. From Coq Require Import Strings.String. Import List.ListNotations. Open Scope list_scope. Open Scope Z_scope. Open Scope bool_scope. From Hacspec Require Import ChoiceEquality. From Hacspec Require Import LocationUtility. From Hacspec Require Import Hacspec_Lib_Comparable. From Hacspec Require Import Hacspec_Lib_Pre. From Hacspec Require Import Hacspec_Lib. Open Scope hacspec_scope. Import choice.Choice.Exports. From RecordUpdate Require Import RecordUpdate. Import RecordSetNotations. Obligation Tactic := (* try timeout 8 *) solve_ssprove_obligations. (*Not implemented yet? todo(item)*) Equations add3 (x : both int32) (y : both int32) (z : both int32) : both int32 := add3 x y z := impl_u32__wrapping_add (impl_u32__wrapping_add x y) z : both int32. Fail Next Obligation. Equations local_mutation (x : both int32) : both int32 := local_mutation x := letb y := ret_both (0 : int32) in letb x := impl_u32__wrapping_add x (ret_both (1 : int32)) in letb hoist1 := x >.? (ret_both (3 : int32)) in ifb hoist1 then letb x := impl_u32__wrapping_sub x (ret_both (3 : int32)) in letb y := x ./ (ret_both (2 : int32)) in letb y := impl_u32__wrapping_add y (ret_both (2 : int32)) in letb hoist2 := ret_both (0 : int32) in letb hoist3 := Build_t_Range (f_start := hoist2) (f_end := ret_both (10 : int32)) in letb hoist4 := f_into_iter hoist3 in letb y := foldi_both_list hoist4 (fun i => ssp (fun y => impl_u32__wrapping_add x i : (both int32))) y in impl_u32__wrapping_add x y else letb '((x,y),hoist7) := matchb x with | 12 => letb y := impl_u32__wrapping_add x y in prod_b (prod_b (x,y),ret_both (3 : int32)) | 13 => letb hoist6 := x in letb x := impl_u32__wrapping_add x (ret_both (1 : int32)) in letb hoist5 := impl_u32__wrapping_add (ret_both (123 : int32)) x in prod_b (prod_b (x,y),add3 hoist6 hoist5 x) | _ => prod_b (prod_b (x,y),ret_both (0 : int32)) end in letb x := hoist7 in impl_u32__wrapping_add x y : both int32. Fail Next Obligation. Equations early_returns (x : both int32) : both int32 := early_returns x := run (letm[choice_typeMonad.result_bind_code int32] _ := ifb x >.? (ret_both (3 : int32)) then letm[choice_typeMonad.result_bind_code int32] hoist8 := ControlFlow_Break (ret_both (0 : int32)) in ControlFlow_Continue (never_to_any hoist8) else ControlFlow_Continue (ret_both (tt : 'unit)) in letb hoist9 := x >.? (ret_both (30 : int32)) in letm[choice_typeMonad.result_bind_code int32] '(x,hoist11) := ifb hoist9 then matchb ret_both (true : 'bool) with | true => letm[choice_typeMonad.result_bind_code int32] hoist10 := ControlFlow_Break (ret_both (34 : int32)) in ControlFlow_Continue (prod_b (x,never_to_any hoist10)) | _ => ControlFlow_Continue (prod_b (x,ret_both (3 : int32))) end else ControlFlow_Continue (letb x := x .+ (ret_both (9 : int32)) in prod_b (x,x .+ (ret_both (1 : int32)))) in letb hoist12 := impl_u32__wrapping_add (ret_both (123 : int32)) hoist11 in letb hoist13 := impl_u32__wrapping_add hoist12 x in letm[choice_typeMonad.result_bind_code int32] hoist14 := ControlFlow_Break hoist13 in ControlFlow_Continue (never_to_any hoist14)) : both int32. Fail Next Obligation. Equations simplifiable_return (c1 : both 'bool) (c2 : both 'bool) (c3 : both 'bool) : both int32 := simplifiable_return c1 c2 c3 := run (letb x := ret_both (0 : int32) in letm[choice_typeMonad.result_bind_code int32] x := ifb c1 then letm[choice_typeMonad.result_bind_code int32] x := ifb c2 then letb x := x .+ (ret_both (10 : int32)) in ifb c3 then letm[choice_typeMonad.result_bind_code int32] hoist15 := ControlFlow_Break (ret_both (1 : int32)) in ControlFlow_Continue x else ControlFlow_Continue x else ControlFlow_Continue x in ControlFlow_Continue (letb x := x .+ (ret_both (1 : int32)) in x) else ControlFlow_Continue x in ControlFlow_Continue x) : both int32. Fail Next Obligation. Equations simplifiable_question_mark (c : both 'bool) (x : both (t_Option int32)) : both (t_Option int32) := simplifiable_question_mark c x := run (letm[choice_typeMonad.option_bind_code] a := ifb c then letm[choice_typeMonad.option_bind_code] hoist16 := x in Option_Some (hoist16 .+ (ret_both (10 : int32))) else Option_Some (ret_both (0 : int32)) in Option_Some (letb b := ret_both (20 : int32) in Option_Some (a .+ b))) : both (t_Option int32). Fail Next Obligation. Equations direct_result_question_mark (y : both (t_Result 'unit int32)) : both (t_Result int8 int32) := direct_result_question_mark y := run (letm[choice_typeMonad.result_bind_code int32] _ := y in Result_Ok (Result_Ok (ret_both (0 : int8)))) : both (t_Result int8 int32). Fail Next Obligation. Equations direct_result_question_mark_coercion (y : both (t_Result int8 int16)) : both (t_Result int8 int32) := direct_result_question_mark_coercion y := run (letm[choice_typeMonad.result_bind_code int32] hoist17 := impl__map_err y f_from in Result_Ok (Result_Ok hoist17)) : both (t_Result int8 int32). Fail Next Obligation. Equations options (x : both (t_Option int8)) (y : both (t_Option int8)) (z : both (t_Option int64)) : both (t_Option int8) := options x y z := run (letm[choice_typeMonad.option_bind_code] hoist21 := x in letb hoist22 := hoist21 >.? (ret_both (10 : int8)) in letm[choice_typeMonad.option_bind_code] hoist28 := ifb hoist22 then letm[choice_typeMonad.option_bind_code] hoist23 := x in Option_Some (letb hoist24 := impl_u8__wrapping_add hoist23 (ret_both (3 : int8)) in Option_Some hoist24) else letm[choice_typeMonad.option_bind_code] hoist26 := x in letm[choice_typeMonad.option_bind_code] hoist25 := y in Option_Some (letb hoist27 := impl_u8__wrapping_add hoist26 hoist25 in Option_Some hoist27) in letm[choice_typeMonad.option_bind_code] hoist29 := hoist28 in letm[choice_typeMonad.option_bind_code] v := matchb hoist29 with | 3 => Option_None | 4 => letm[choice_typeMonad.option_bind_code] hoist18 := z in Option_Some (letb hoist19 := hoist18 >.? (ret_both (4 : int64)) in letb hoist20 := ifb hoist19 then ret_both (0 : int8) else ret_both (3 : int8) in (ret_both (4 : int8)) .+ hoist20) | _ => Option_Some (ret_both (12 : int8)) end in letm[choice_typeMonad.option_bind_code] hoist30 := x in letb hoist32 := impl_u8__wrapping_add v hoist30 in letm[choice_typeMonad.option_bind_code] hoist31 := y in Option_Some (letb hoist33 := impl_u8__wrapping_add hoist32 hoist31 in Option_Some hoist33)) : both (t_Option int8). Fail Next Obligation. Equations question_mark (x : both int32) : both (t_Result int32 int32) := question_mark x := run (letm[choice_typeMonad.result_bind_code int32] x := ifb x >.? (ret_both (40 : int32)) then letb y := ret_both (0 : int32) in letb x := impl_u32__wrapping_add x (ret_both (3 : int32)) in letb y := impl_u32__wrapping_add x y in letb x := impl_u32__wrapping_add x y in letb hoist34 := x >.? (ret_both (90 : int32)) in ifb hoist34 then letm[choice_typeMonad.result_bind_code int32] _ := impl__map_err (Result_Err (ret_both (12 : int8))) f_from in Result_Ok x else Result_Ok x else Result_Ok x in Result_Ok (Result_Ok (impl_u32__wrapping_add (ret_both (3 : int32)) x))) : both (t_Result int32 int32). Fail Next Obligation. Definition t_A : choice_type := 'unit. Equations Build_t_A : both (t_A) := Build_t_A := ret_both (tt (* Empty tuple *) : (t_A)) : both (t_A). Fail Next Obligation. Definition t_B : choice_type := 'unit. Equations Build_t_B : both (t_B) := Build_t_B := ret_both (tt (* Empty tuple *) : (t_B)) : both (t_B). Fail Next Obligation. Equations monad_lifting (x : both int8) : both (t_Result t_A t_B) := monad_lifting x := run (ifb x >.? (ret_both (123 : int8)) then letm[choice_typeMonad.result_bind_code (t_Result t_A t_B)] hoist35 := ControlFlow_Continue (Result_Err B) in letb hoist36 := Result_Ok hoist35 in letm[choice_typeMonad.result_bind_code (t_Result t_A t_B)] hoist37 := ControlFlow_Break hoist36 in ControlFlow_Continue (never_to_any hoist37) else ControlFlow_Continue (Result_Ok A)) : both (t_Result t_A t_B). Fail Next Obligation. Definition t_Bar : choice_type := ('bool × nseq ('bool × 'bool) 6 × 'bool). Equations f_a (s : both t_Bar) : both 'bool := f_a s := bind_both s (fun x => ret_both (fst x : 'bool)) : both 'bool. Fail Next Obligation. Equations f_b (s : both t_Bar) : both (nseq ('bool × 'bool) 6 × 'bool) := f_b s := bind_both s (fun x => ret_both (snd x : (nseq ('bool × 'bool) 6 × 'bool))) : both (nseq ('bool × 'bool) 6 × 'bool). Fail Next Obligation. Equations Build_t_Bar {f_a : both 'bool} {f_b : both (nseq ('bool × 'bool) 6 × 'bool)} : both (t_Bar) := Build_t_Bar := bind_both f_b (fun f_b => bind_both f_a (fun f_a => ret_both ((f_a,f_b) : (t_Bar)))) : both (t_Bar). Fail Next Obligation. Notation "'Build_t_Bar' '[' x ']' '(' 'f_a' ':=' y ')'" := (Build_t_Bar (f_a := y) (f_b := f_b x)). Notation "'Build_t_Bar' '[' x ']' '(' 'f_b' ':=' y ')'" := (Build_t_Bar (f_a := f_a x) (f_b := y)). Definition t_Foo : choice_type := ('bool × 'bool × t_Vec t_Bar t_Global × nseq t_Bar 6 × t_Bar). Equations f_x (s : both t_Foo) : both 'bool := f_x s := bind_both s (fun x => ret_both (fst (fst (fst x)) : 'bool)) : both 'bool. Fail Next Obligation. Equations f_y (s : both t_Foo) : both ('bool × t_Vec t_Bar t_Global) := f_y s := bind_both s (fun x => ret_both (snd (fst (fst x)) : ('bool × t_Vec t_Bar t_Global))) : both ('bool × t_Vec t_Bar t_Global). Fail Next Obligation. Equations f_z (s : both t_Foo) : both (nseq t_Bar 6) := f_z s := bind_both s (fun x => ret_both (snd (fst x) : (nseq t_Bar 6))) : both (nseq t_Bar 6). Fail Next Obligation. Equations f_bar (s : both t_Foo) : both t_Bar := f_bar s := bind_both s (fun x => ret_both (snd x : t_Bar)) : both t_Bar. Fail Next Obligation. Equations Build_t_Foo {f_x : both 'bool} {f_y : both ('bool × t_Vec t_Bar t_Global)} {f_z : both (nseq t_Bar 6)} {f_bar : both t_Bar} : both (t_Foo) := Build_t_Foo := bind_both f_bar (fun f_bar => bind_both f_z (fun f_z => bind_both f_y (fun f_y => bind_both f_x (fun f_x => ret_both ((f_x,f_y,f_z,f_bar) : (t_Foo)))))) : both (t_Foo). Fail Next Obligation. Notation "'Build_t_Foo' '[' x ']' '(' 'f_x' ':=' y ')'" := (Build_t_Foo (f_x := y) (f_y := f_y x) (f_z := f_z x) (f_bar := f_bar x)). Notation "'Build_t_Foo' '[' x ']' '(' 'f_y' ':=' y ')'" := (Build_t_Foo (f_x := f_x x) (f_y := y) (f_z := f_z x) (f_bar := f_bar x)). Notation "'Build_t_Foo' '[' x ']' '(' 'f_z' ':=' y ')'" := (Build_t_Foo (f_x := f_x x) (f_y := f_y x) (f_z := y) (f_bar := f_bar x)). Notation "'Build_t_Foo' '[' x ']' '(' 'f_bar' ':=' y ')'" := (Build_t_Foo (f_x := f_x x) (f_y := f_y x) (f_z := f_z x) (f_bar := y)). (*item error backend*) ''' "Side_effects_Issue_1083_.v" = ''' (* File automatically generated by Hacspec *) Set Warnings "-notation-overridden,-ambiguous-paths". From Crypt Require Import choice_type Package Prelude. Import PackageNotation. From extructures Require Import ord fset. From mathcomp Require Import word_ssrZ word. (* From Jasmin Require Import word. *) From Coq Require Import ZArith. From Coq Require Import Strings.String. Import List.ListNotations. Open Scope list_scope. Open Scope Z_scope. Open Scope bool_scope. From Hacspec Require Import ChoiceEquality. From Hacspec Require Import LocationUtility. From Hacspec Require Import Hacspec_Lib_Comparable. From Hacspec Require Import Hacspec_Lib_Pre. From Hacspec Require Import Hacspec_Lib. Open Scope hacspec_scope. Import choice.Choice.Exports. From RecordUpdate Require Import RecordUpdate. Import RecordSetNotations. Obligation Tactic := (* try timeout 8 *) solve_ssprove_obligations. (*Not implemented yet? todo(item)*) Class t_MyFrom (Self : choice_type) (v_Self : v_Self) {v_T : v_T} `{ t_Sized v_T} := { f_my_from : (both v_T -> both v_Self) ; }. #[global] Program Instance int16_t_MyFrom : t_MyFrom int16 int8 := let f_my_from := fun (x : both int8) => cast_int (WS2 := _) x : both int16 in {| f_my_from := (@f_my_from)|}. Fail Next Obligation. Hint Unfold int16_t_MyFrom. Equations f (x : both int8) : both (t_Result int16 int16) := f x := run (letm[choice_typeMonad.result_bind_code int16] _ := impl__map_err (Result_Err (ret_both (1 : int8))) f_from in Result_Ok (Result_Ok (f_my_from x))) : both (t_Result int16 int16). Fail Next Obligation. ''' "Side_effects_Issue_1089_.v" = ''' (* File automatically generated by Hacspec *) Set Warnings "-notation-overridden,-ambiguous-paths". From Crypt Require Import choice_type Package Prelude. Import PackageNotation. From extructures Require Import ord fset. From mathcomp Require Import word_ssrZ word. (* From Jasmin Require Import word. *) From Coq Require Import ZArith. From Coq Require Import Strings.String. Import List.ListNotations. Open Scope list_scope. Open Scope Z_scope. Open Scope bool_scope. From Hacspec Require Import ChoiceEquality. From Hacspec Require Import LocationUtility. From Hacspec Require Import Hacspec_Lib_Comparable. From Hacspec Require Import Hacspec_Lib_Pre. From Hacspec Require Import Hacspec_Lib. Open Scope hacspec_scope. Import choice.Choice.Exports. From RecordUpdate Require Import RecordUpdate. Import RecordSetNotations. Obligation Tactic := (* try timeout 8 *) solve_ssprove_obligations. (*Not implemented yet? todo(item)*) Equations test (x : both (t_Option int32)) (y : both (t_Option int32)) : both (t_Option int32) := test x y := run (impl__map x (fun i => letm[choice_typeMonad.option_bind_code] hoist38 := y in Option_Some (letb hoist39 := i .+ hoist38 in Option_Some hoist39))) : both (t_Option int32). Fail Next Obligation. ''' "Side_effects_Issue_1299_.v" = ''' (* File automatically generated by Hacspec *) Set Warnings "-notation-overridden,-ambiguous-paths". From Crypt Require Import choice_type Package Prelude. Import PackageNotation. From extructures Require Import ord fset. From mathcomp Require Import word_ssrZ word. (* From Jasmin Require Import word. *) From Coq Require Import ZArith. From Coq Require Import Strings.String. Import List.ListNotations. Open Scope list_scope. Open Scope Z_scope. Open Scope bool_scope. From Hacspec Require Import ChoiceEquality. From Hacspec Require Import LocationUtility. From Hacspec Require Import Hacspec_Lib_Comparable. From Hacspec Require Import Hacspec_Lib_Pre. From Hacspec Require Import Hacspec_Lib. Open Scope hacspec_scope. Import choice.Choice.Exports. From RecordUpdate Require Import RecordUpdate. Import RecordSetNotations. Obligation Tactic := (* try timeout 8 *) solve_ssprove_obligations. (*Not implemented yet? todo(item)*) Definition t_Foo : choice_type := (int8). Equations f_y (s : both t_Foo) : both int8 := f_y s := bind_both s (fun x => ret_both (x : int8)) : both int8. Fail Next Obligation. Equations Build_t_Foo {f_y : both int8} : both (t_Foo) := Build_t_Foo := bind_both f_y (fun f_y => ret_both ((f_y) : (t_Foo))) : both (t_Foo). Fail Next Obligation. Notation "'Build_t_Foo' '[' x ']' '(' 'f_y' ':=' y ')'" := (Build_t_Foo (f_y := y)). Definition t_S : choice_type := (t_Foo). Equations f_g (s : both t_S) : both t_Foo := f_g s := bind_both s (fun x => ret_both (x : t_Foo)) : both t_Foo. Fail Next Obligation. Equations Build_t_S {f_g : both t_Foo} : both (t_S) := Build_t_S := bind_both f_g (fun f_g => ret_both ((f_g) : (t_S))) : both (t_S). Fail Next Obligation. Notation "'Build_t_S' '[' x ']' '(' 'f_g' ':=' y ')'" := (Build_t_S (f_g := y)). Definition t_OtherS : choice_type := (t_Option t_Foo). Equations f_g (s : both t_OtherS) : both (t_Option t_Foo) := f_g s := bind_both s (fun x => ret_both (x : (t_Option t_Foo))) : both (t_Option t_Foo). Fail Next Obligation. Equations Build_t_OtherS {f_g : both (t_Option t_Foo)} : both (t_OtherS) := Build_t_OtherS := bind_both f_g (fun f_g => ret_both ((f_g) : (t_OtherS))) : both (t_OtherS). Fail Next Obligation. Notation "'Build_t_OtherS' '[' x ']' '(' 'f_g' ':=' y ')'" := (Build_t_OtherS (f_g := y)). Equations impl_Foo__from (i : both t_Foo) : both t_Foo := impl_Foo__from i := Build_t_Foo (f_y := f_clone (f_y i)) : both t_Foo. Fail Next Obligation. Definition t_Error : choice_type := 'unit. Equations Build_t_Error : both (t_Error) := Build_t_Error := ret_both (tt (* Empty tuple *) : (t_Error)) : both (t_Error). Fail Next Obligation. Equations impl_S__from (i : both t_OtherS) : both (t_Result t_S t_Error) := impl_S__from i := run (letm[choice_typeMonad.result_bind_code t_Error] hoist49 := impl__ok_or (impl__as_ref (f_g i)) Error in Result_Ok (letb hoist50 := impl_Foo__from hoist49 in letb hoist51 := Build_t_S (f_g := hoist50) in Result_Ok hoist51)) : both (t_Result t_S t_Error). Fail Next Obligation. ''' "Side_effects_Issue_1300_.v" = ''' (* File automatically generated by Hacspec *) Set Warnings "-notation-overridden,-ambiguous-paths". From Crypt Require Import choice_type Package Prelude. Import PackageNotation. From extructures Require Import ord fset. From mathcomp Require Import word_ssrZ word. (* From Jasmin Require Import word. *) From Coq Require Import ZArith. From Coq Require Import Strings.String. Import List.ListNotations. Open Scope list_scope. Open Scope Z_scope. Open Scope bool_scope. From Hacspec Require Import ChoiceEquality. From Hacspec Require Import LocationUtility. From Hacspec Require Import Hacspec_Lib_Comparable. From Hacspec Require Import Hacspec_Lib_Pre. From Hacspec Require Import Hacspec_Lib. Open Scope hacspec_scope. Import choice.Choice.Exports. From RecordUpdate Require Import RecordUpdate. Import RecordSetNotations. Obligation Tactic := (* try timeout 8 *) solve_ssprove_obligations. (*Not implemented yet? todo(item)*) Equations fun (_ : both 'unit) : both (t_Result 'unit int8) := fun _ := letb val := f_collect (f_map (impl__iter (unsize (repeat (ret_both (0 : int8)) (ret_both (5 : uint_size))))) (fun prev => letb hoist47 := Result_Ok (repeat (ret_both (0 : int8)) (ret_both (32 : uint_size))) in letb hoist48 := prod_b (prev,hoist47) in Result_Ok hoist48)) in Result_Ok (ret_both (tt : 'unit)) : both (t_Result 'unit int8). Fail Next Obligation. ''' "Side_effects_Nested_return.v" = ''' (* File automatically generated by Hacspec *) Set Warnings "-notation-overridden,-ambiguous-paths". From Crypt Require Import choice_type Package Prelude. Import PackageNotation. From extructures Require Import ord fset. From mathcomp Require Import word_ssrZ word. (* From Jasmin Require Import word. *) From Coq Require Import ZArith. From Coq Require Import Strings.String. Import List.ListNotations. Open Scope list_scope. Open Scope Z_scope. Open Scope bool_scope. From Hacspec Require Import ChoiceEquality. From Hacspec Require Import LocationUtility. From Hacspec Require Import Hacspec_Lib_Comparable. From Hacspec Require Import Hacspec_Lib_Pre. From Hacspec Require Import Hacspec_Lib. Open Scope hacspec_scope. Import choice.Choice.Exports. From RecordUpdate Require Import RecordUpdate. Import RecordSetNotations. Obligation Tactic := (* try timeout 8 *) solve_ssprove_obligations. (*Not implemented yet? todo(item)*) Equations other_fun (rng : both int8) : both (int8 × t_Result 'unit 'unit) := other_fun rng := letb hax_temp_output := Result_Ok (ret_both (tt : 'unit)) in prod_b (rng,hax_temp_output) : both (int8 × t_Result 'unit 'unit). Fail Next Obligation. Equations fun (rng : both int8) : both (int8 × t_Result 'unit 'unit) := fun rng := run (letb '(tmp0,out) := other_fun rng in letb rng := tmp0 in letb hoist41 := out in letb hoist42 := f_branch hoist41 in letm[choice_typeMonad.result_bind_code (int8 × t_Result 'unit 'unit)] hoist43 := matchb hoist42 with | ControlFlow_Break_case residual => letb residual := ret_both ((residual) : (t_Result t_Infallible 'unit)) in letm[choice_typeMonad.result_bind_code (int8 × t_Result 'unit 'unit)] hoist40 := ControlFlow_Break (prod_b (rng,f_from_residual residual)) in ControlFlow_Continue (never_to_any hoist40) | ControlFlow_Continue_case val => letb val := ret_both ((val) : ('unit)) in ControlFlow_Continue val end in letb hoist44 := Result_Ok hoist43 in letb hoist45 := prod_b (rng,hoist44) in letm[choice_typeMonad.result_bind_code (int8 × t_Result 'unit 'unit)] hoist46 := ControlFlow_Break hoist45 in ControlFlow_Continue (letb hax_temp_output := never_to_any hoist46 in prod_b (rng,hax_temp_output))) : both (int8 × t_Result 'unit 'unit). Fail Next Obligation. ''' ================================================ FILE: test-harness/src/snapshots/toolchain__slices into-coq.snap ================================================ --- source: test-harness/src/harness.rs expression: snapshot info: kind: Translate: backend: coq info: name: slices manifest: slices/Cargo.toml description: ~ spec: optional: false broken: false issue_id: ~ positive: true snapshot: stderr: true stdout: true include_flag: ~ backend_options: ~ --- exit = 0 stderr = 'Finished `dev` profile [unoptimized + debuginfo] target(s) in XXs' [stdout] diagnostics = [] [stdout.files] "Slices.v" = ''' (* File automatically generated by Hacspec *) From Coq Require Import ZArith. Require Import List. Import List.ListNotations. Open Scope Z_scope. Open Scope bool_scope. Require Import Ascii. Require Import String. Require Import Coq.Floats.Floats. From RecordUpdate Require Import RecordSet. Import RecordSetNotations. From Core Require Import Core. (* NotImplementedYet *) Definition v_VERSION : t_Slice t_u8 := unsize ([(118 : t_u8); (49 : t_u8)]). Definition do_something '(_ : t_Slice t_u8) : unit := tt. Definition r#unsized '(_ : t_Array (t_Slice t_u8) ((1 : t_usize))) : unit := tt. Definition sized (x : t_Array (t_Array (t_u8) ((4 : t_usize))) ((1 : t_usize))) : unit := r#unsized ([unsize (f_index (x) ((0 : t_usize)))]). ''' _CoqProject = ''' -R ./ TODO -arg -w -arg all Slices.v''' ================================================ FILE: test-harness/src/snapshots/toolchain__slices into-fstar.snap ================================================ --- source: test-harness/src/harness.rs expression: snapshot info: kind: Translate: backend: fstar info: name: slices manifest: slices/Cargo.toml description: ~ spec: optional: false broken: false issue_id: ~ positive: true snapshot: stderr: true stdout: true include_flag: ~ backend_options: ~ --- exit = 0 stderr = 'Finished `dev` profile [unoptimized + debuginfo] target(s) in XXs' [stdout] diagnostics = [] [stdout.files] "Slices.fst" = ''' module Slices #set-options "--fuel 0 --ifuel 1 --z3rlimit 15" open FStar.Mul open Core_models let v_VERSION: t_Slice u8 = (let list = [mk_u8 118; mk_u8 49] in FStar.Pervasives.assert_norm (Prims.eq2 (List.Tot.length list) 2); Rust_primitives.Hax.array_of_list 2 list) <: t_Slice u8 let do_something (_: t_Slice u8) : Prims.unit = () let r#unsized (_: t_Array (t_Slice u8) (mk_usize 1)) : Prims.unit = () let sized (x: t_Array (t_Array u8 (mk_usize 4)) (mk_usize 1)) : Prims.unit = r#unsized (let list = [x.[ mk_usize 0 ] <: t_Slice u8] in FStar.Pervasives.assert_norm (Prims.eq2 (List.Tot.length list) 1); Rust_primitives.Hax.array_of_list 1 list) ''' ================================================ FILE: test-harness/src/snapshots/toolchain__statics into-fstar.snap ================================================ --- source: test-harness/src/harness.rs expression: snapshot info: kind: Translate: backend: fstar info: name: statics manifest: statics/Cargo.toml description: ~ spec: optional: false broken: false issue_id: ~ positive: true snapshot: stderr: false stdout: true include_flag: ~ backend_options: ~ --- exit = 0 [stdout] diagnostics = [] [stdout.files] "Statics.fst" = ''' module Statics #set-options "--fuel 0 --ifuel 1 --z3rlimit 15" open FStar.Mul open Core_models let v_FOO: usize = mk_usize 0 let get_foo (_: Prims.unit) : usize = v_FOO ''' ================================================ FILE: test-harness/src/snapshots/toolchain__traits into-fstar.snap ================================================ --- source: test-harness/src/harness.rs expression: snapshot info: kind: Translate: backend: fstar info: name: traits manifest: traits/Cargo.toml description: ~ spec: optional: false broken: false issue_id: ~ positive: true snapshot: stderr: false stdout: true include_flag: ~ backend_options: ~ --- exit = 0 [stdout] diagnostics = [] [stdout.files] "Traits.Block_size.fst" = ''' module Traits.Block_size #set-options "--fuel 0 --ifuel 1 --z3rlimit 15" open FStar.Mul open Core_models class t_BlockSizeUser (v_Self: Type0) = { [@@@ FStar.Tactics.Typeclasses.no_method]f_BlockSize:Type0 } class t_ParBlocksSizeUser (v_Self: Type0) = { [@@@ FStar.Tactics.Typeclasses.no_method]_super_i0:t_BlockSizeUser v_Self } [@@ FStar.Tactics.Typeclasses.tcinstance] let _ = fun (v_Self:Type0) {|i: t_ParBlocksSizeUser v_Self|} -> i._super_i0 class t_BlockBackend (v_Self: Type0) = { [@@@ FStar.Tactics.Typeclasses.no_method]_super_i0:t_ParBlocksSizeUser v_Self; f_proc_block_pre:Alloc.Vec.t_Vec _ Alloc.Alloc.t_Global -> Type0; f_proc_block_post:Alloc.Vec.t_Vec _ Alloc.Alloc.t_Global -> Prims.unit -> Type0; f_proc_block:x0: Alloc.Vec.t_Vec _ Alloc.Alloc.t_Global -> Prims.Pure Prims.unit (f_proc_block_pre x0) (fun result -> f_proc_block_post x0 result) } [@@ FStar.Tactics.Typeclasses.tcinstance] let _ = fun (v_Self:Type0) {|i: t_BlockBackend v_Self|} -> i._super_i0 ''' "Traits.Default_traits_parameters.fst" = ''' module Traits.Default_traits_parameters #set-options "--fuel 0 --ifuel 1 --z3rlimit 15" open FStar.Mul open Core_models class t_Bar (v_Self: Type0) (v_T: Type0) = { __marker_trait_t_Bar:Prims.unit } class t_Foo (v_Self: Type0) = { [@@@ FStar.Tactics.Typeclasses.no_method]_super_i0:t_Bar v_Self f_U; [@@@ FStar.Tactics.Typeclasses.no_method]f_U:Type0 } [@@ FStar.Tactics.Typeclasses.tcinstance] let _ = fun (v_Self:Type0) {|i: t_Foo v_Self|} -> i._super_i0 ''' "Traits.For_clauses.Issue_495_.Minimized_3_.fst" = ''' module Traits.For_clauses.Issue_495_.Minimized_3_ #set-options "--fuel 0 --ifuel 1 --z3rlimit 15" open FStar.Mul open Core_models class t_Trait (v_Self: Type0) = { __marker_trait_t_Trait:Prims.unit } [@@ FStar.Tactics.Typeclasses.tcinstance] let impl (#v_P: Type0) (#[FStar.Tactics.Typeclasses.tcresolve ()] i0: Core_models.Ops.Function.t_FnMut v_P u8) : t_Trait v_P = { __marker_trait_t_Trait = () } ''' "Traits.For_clauses.Issue_495_.fst" = ''' module Traits.For_clauses.Issue_495_ #set-options "--fuel 0 --ifuel 1 --z3rlimit 15" open FStar.Mul open Core_models let original_function_from_495_ (list: Alloc.Vec.t_Vec u8 Alloc.Alloc.t_Global) : Prims.unit = let (e_indices: Alloc.Vec.t_Vec u8 Alloc.Alloc.t_Global):Alloc.Vec.t_Vec u8 Alloc.Alloc.t_Global = Core_models.Iter.Traits.Iterator.f_collect #(Core_models.Iter.Adapters.Filter.t_Filter (Core_models.Ops.Range.t_Range u8) (u8 -> bool)) #FStar.Tactics.Typeclasses.solve #(Alloc.Vec.t_Vec u8 Alloc.Alloc.t_Global) (Core_models.Iter.Traits.Iterator.f_filter #(Core_models.Ops.Range.t_Range u8) #FStar.Tactics.Typeclasses.solve #(u8 -> bool) ({ Core_models.Ops.Range.f_start = mk_u8 0; Core_models.Ops.Range.f_end = mk_u8 5 } <: Core_models.Ops.Range.t_Range u8) (fun i -> let i:u8 = i in let (_: Core_models.Slice.Iter.t_Iter u8), (out: bool) = Core_models.Iter.Traits.Iterator.f_any #(Core_models.Slice.Iter.t_Iter u8) #FStar.Tactics.Typeclasses.solve #(u8 -> bool) (Core_models.Slice.impl__iter #u8 (Alloc.Vec.impl_1__as_slice list <: t_Slice u8) <: Core_models.Slice.Iter.t_Iter u8) (fun n -> let n:u8 = n in n =. i <: bool) in out) <: Core_models.Iter.Adapters.Filter.t_Filter (Core_models.Ops.Range.t_Range u8) (u8 -> bool)) in () let minimized_1_ (list: Alloc.Vec.t_Vec u8 Alloc.Alloc.t_Global) : Alloc.Vec.t_Vec u8 Alloc.Alloc.t_Global = Core_models.Iter.Traits.Iterator.f_collect #(Core_models.Iter.Adapters.Filter.t_Filter (Core_models.Ops.Range.t_Range u8) (u8 -> bool)) #FStar.Tactics.Typeclasses.solve #(Alloc.Vec.t_Vec u8 Alloc.Alloc.t_Global) (Core_models.Iter.Traits.Iterator.f_filter #(Core_models.Ops.Range.t_Range u8) #FStar.Tactics.Typeclasses.solve #(u8 -> bool) ({ Core_models.Ops.Range.f_start = mk_u8 0; Core_models.Ops.Range.f_end = mk_u8 5 } <: Core_models.Ops.Range.t_Range u8) (fun temp_0_ -> let _:u8 = temp_0_ in true) <: Core_models.Iter.Adapters.Filter.t_Filter (Core_models.Ops.Range.t_Range u8) (u8 -> bool)) let minimized_2_ (it: Core_models.Iter.Adapters.Filter.t_Filter (Core_models.Ops.Range.t_Range u8) (u8 -> bool)) : Prims.unit = let (e_indices: Alloc.Vec.t_Vec u8 Alloc.Alloc.t_Global):Alloc.Vec.t_Vec u8 Alloc.Alloc.t_Global = Core_models.Iter.Traits.Iterator.f_collect #(Core_models.Iter.Adapters.Filter.t_Filter (Core_models.Ops.Range.t_Range u8) (u8 -> bool)) #FStar.Tactics.Typeclasses.solve #(Alloc.Vec.t_Vec u8 Alloc.Alloc.t_Global) it in () ''' "Traits.For_clauses.fst" = ''' module Traits.For_clauses #set-options "--fuel 0 --ifuel 1 --z3rlimit 15" open FStar.Mul open Core_models class t_Foo (v_Self: Type0) (v_T: Type0) = { f_to_t_pre:v_Self -> Type0; f_to_t_post:v_Self -> v_T -> Type0; f_to_t:x0: v_Self -> Prims.Pure v_T (f_to_t_pre x0) (fun result -> f_to_t_post x0 result) } let e_f (#v_X: Type0) (#[FStar.Tactics.Typeclasses.tcresolve ()] i0: t_Foo v_X u8) (x: v_X) : Prims.unit = let _:u8 = f_to_t #v_X #u8 #FStar.Tactics.Typeclasses.solve x in () ''' "Traits.Impl_expr_in_goal.fst" = ''' module Traits.Impl_expr_in_goal #set-options "--fuel 0 --ifuel 1 --z3rlimit 15" open FStar.Mul open Core_models class t_T1 (v_Self: Type0) = { [@@@ FStar.Tactics.Typeclasses.no_method]f_Assoc:Type0 } class t_T2 (v_Self: Type0) = { __marker_trait_t_T2:Prims.unit } [@@ FStar.Tactics.Typeclasses.tcinstance] let impl (#v_U: Type0) (#[FStar.Tactics.Typeclasses.tcresolve ()] i0: t_T1 v_U) (#[FStar.Tactics.Typeclasses.tcresolve ()] i1: t_T2 i0.f_Assoc) : t_T2 v_U = { __marker_trait_t_T2 = () } ''' "Traits.Implement_arithmetic_trait.fst" = ''' module Traits.Implement_arithmetic_trait #set-options "--fuel 0 --ifuel 1 --z3rlimit 15" open FStar.Mul open Core_models type t_Wrapped = | Wrapped : i32 -> t_Wrapped [@@ FStar.Tactics.Typeclasses.tcinstance] let impl: Core_models.Ops.Arith.t_Add t_Wrapped t_Wrapped = { f_Output = t_Wrapped; f_add_pre = (fun (self: t_Wrapped) (rhs: t_Wrapped) -> true); f_add_post = (fun (self: t_Wrapped) (rhs: t_Wrapped) (out: t_Wrapped) -> true); f_add = fun (self: t_Wrapped) (rhs: t_Wrapped) -> Wrapped (self._0 +! rhs._0) <: t_Wrapped } let test (x y: t_Wrapped) : t_Wrapped = Core_models.Ops.Arith.f_add #t_Wrapped #t_Wrapped #FStar.Tactics.Typeclasses.solve x y ''' "Traits.Implicit_dependencies_issue_667_.Define_type.fst" = ''' module Traits.Implicit_dependencies_issue_667_.Define_type #set-options "--fuel 0 --ifuel 1 --z3rlimit 15" open FStar.Mul open Core_models type t_MyType = | MyType : t_MyType ''' "Traits.Implicit_dependencies_issue_667_.Impl_type.fst" = ''' module Traits.Implicit_dependencies_issue_667_.Impl_type #set-options "--fuel 0 --ifuel 1 --z3rlimit 15" open FStar.Mul open Core_models [@@ FStar.Tactics.Typeclasses.tcinstance] let impl: Traits.Implicit_dependencies_issue_667_.Trait_definition.t_MyTrait Traits.Implicit_dependencies_issue_667_.Define_type.t_MyType = { f_my_method_pre = (fun (self: Traits.Implicit_dependencies_issue_667_.Define_type.t_MyType) -> true); f_my_method_post = (fun (self: Traits.Implicit_dependencies_issue_667_.Define_type.t_MyType) (out: Prims.unit) -> true); f_my_method = fun (self: Traits.Implicit_dependencies_issue_667_.Define_type.t_MyType) -> () } ''' "Traits.Implicit_dependencies_issue_667_.Trait_definition.fst" = ''' module Traits.Implicit_dependencies_issue_667_.Trait_definition #set-options "--fuel 0 --ifuel 1 --z3rlimit 15" open FStar.Mul open Core_models class t_MyTrait (v_Self: Type0) = { f_my_method_pre:v_Self -> Type0; f_my_method_post:v_Self -> Prims.unit -> Type0; f_my_method:x0: v_Self -> Prims.Pure Prims.unit (f_my_method_pre x0) (fun result -> f_my_method_post x0 result) } ''' "Traits.Implicit_dependencies_issue_667_.Use_type.fst" = ''' module Traits.Implicit_dependencies_issue_667_.Use_type #set-options "--fuel 0 --ifuel 1 --z3rlimit 15" open FStar.Mul open Core_models let _ = (* This module has implicit dependencies, here we make them explicit. *) (* The implicit dependencies arise from typeclasses instances. *) let open Traits.Implicit_dependencies_issue_667_.Impl_type in let open Traits.Implicit_dependencies_issue_667_.Trait_definition in () let some_function (x: Traits.Implicit_dependencies_issue_667_.Define_type.t_MyType) : Prims.unit = Traits.Implicit_dependencies_issue_667_.Trait_definition.f_my_method #Traits.Implicit_dependencies_issue_667_.Define_type.t_MyType #FStar.Tactics.Typeclasses.solve x ''' "Traits.Implicit_explicit_calling_conventions.fst" = ''' module Traits.Implicit_explicit_calling_conventions #set-options "--fuel 0 --ifuel 1 --z3rlimit 15" open FStar.Mul open Core_models type t_Type (v_TypeArg: Type0) (v_ConstArg: usize) = { f_field:t_Array v_TypeArg v_ConstArg } class t_Trait (v_Self: Type0) (v_TypeArg: Type0) (v_ConstArg: usize) = { f_method_pre: #v_MethodTypeArg: Type0 -> v_MethodConstArg: usize -> v_Self -> v_TypeArg -> t_Type v_TypeArg v_ConstArg -> Type0; f_method_post: #v_MethodTypeArg: Type0 -> v_MethodConstArg: usize -> v_Self -> v_TypeArg -> t_Type v_TypeArg v_ConstArg -> Prims.unit -> Type0; f_method: #v_MethodTypeArg: Type0 -> v_MethodConstArg: usize -> x0: v_Self -> x1: v_TypeArg -> x2: t_Type v_TypeArg v_ConstArg -> Prims.Pure Prims.unit (f_method_pre #v_MethodTypeArg v_MethodConstArg x0 x1 x2) (fun result -> f_method_post #v_MethodTypeArg v_MethodConstArg x0 x1 x2 result); f_associated_function_pre: #v_MethodTypeArg: Type0 -> v_MethodConstArg: usize -> v_Self -> v_TypeArg -> t_Type v_TypeArg v_ConstArg -> Type0; f_associated_function_post: #v_MethodTypeArg: Type0 -> v_MethodConstArg: usize -> v_Self -> v_TypeArg -> t_Type v_TypeArg v_ConstArg -> Prims.unit -> Type0; f_associated_function: #v_MethodTypeArg: Type0 -> v_MethodConstArg: usize -> x0: v_Self -> x1: v_TypeArg -> x2: t_Type v_TypeArg v_ConstArg -> Prims.Pure Prims.unit (f_associated_function_pre #v_MethodTypeArg v_MethodConstArg x0 x1 x2) (fun result -> f_associated_function_post #v_MethodTypeArg v_MethodConstArg x0 x1 x2 result) } [@@ FStar.Tactics.Typeclasses.tcinstance] let impl (#v_TypeArg: Type0) (v_ConstArg: usize) : t_Trait Prims.unit v_TypeArg v_ConstArg = { f_method_pre = (fun (#v_MethodTypeArg: Type0) (v_MethodConstArg: usize) (self: Prims.unit) (value_TypeArg: v_TypeArg) (value_Type: t_Type v_TypeArg v_ConstArg) -> true); f_method_post = (fun (#v_MethodTypeArg: Type0) (v_MethodConstArg: usize) (self: Prims.unit) (value_TypeArg: v_TypeArg) (value_Type: t_Type v_TypeArg v_ConstArg) (out: Prims.unit) -> true); f_method = (fun (#v_MethodTypeArg: Type0) (v_MethodConstArg: usize) (self: Prims.unit) (value_TypeArg: v_TypeArg) (value_Type: t_Type v_TypeArg v_ConstArg) -> ()); f_associated_function_pre = (fun (#v_MethodTypeArg: Type0) (v_MethodConstArg: usize) (e_self: Prims.unit) (value_TypeArg: v_TypeArg) (value_Type: t_Type v_TypeArg v_ConstArg) -> true); f_associated_function_post = (fun (#v_MethodTypeArg: Type0) (v_MethodConstArg: usize) (e_self: Prims.unit) (value_TypeArg: v_TypeArg) (value_Type: t_Type v_TypeArg v_ConstArg) (out: Prims.unit) -> true); f_associated_function = fun (#v_MethodTypeArg: Type0) (v_MethodConstArg: usize) (e_self: Prims.unit) (value_TypeArg: v_TypeArg) (value_Type: t_Type v_TypeArg v_ConstArg) -> () } let method_caller (#v_MethodTypeArg #v_TypeArg: Type0) (v_ConstArg v_MethodConstArg: usize) (#v_ImplTrait: Type0) (#[FStar.Tactics.Typeclasses.tcresolve ()] i0: t_Trait v_ImplTrait v_TypeArg v_ConstArg) (x: v_ImplTrait) (value_TypeArg: v_TypeArg) (value_Type: t_Type v_TypeArg v_ConstArg) : Prims.unit = let _:Prims.unit = f_method #v_ImplTrait #v_TypeArg #v_ConstArg #FStar.Tactics.Typeclasses.solve #v_MethodTypeArg v_MethodConstArg x value_TypeArg value_Type in () let associated_function_caller (#v_MethodTypeArg #v_TypeArg: Type0) (v_ConstArg v_MethodConstArg: usize) (#v_ImplTrait: Type0) (#[FStar.Tactics.Typeclasses.tcresolve ()] i0: t_Trait v_ImplTrait v_TypeArg v_ConstArg) (x: v_ImplTrait) (value_TypeArg: v_TypeArg) (value_Type: t_Type v_TypeArg v_ConstArg) : Prims.unit = let _:Prims.unit = f_associated_function #v_ImplTrait #v_TypeArg #v_ConstArg #FStar.Tactics.Typeclasses.solve #v_MethodTypeArg v_MethodConstArg x value_TypeArg value_Type in () class t_SubTrait (v_Self: Type0) (v_TypeArg: Type0) (v_ConstArg: usize) = { [@@@ FStar.Tactics.Typeclasses.no_method]_super_i0:t_Trait v_Self v_TypeArg v_ConstArg; [@@@ FStar.Tactics.Typeclasses.no_method]f_AssocType:Type0; f_AssocType_i0:t_Trait f_AssocType v_TypeArg v_ConstArg } [@@ FStar.Tactics.Typeclasses.tcinstance] let _ = fun (v_Self:Type0) (v_TypeArg:Type0) (v_ConstArg:usize) {|i: t_SubTrait v_Self v_TypeArg v_ConstArg|} -> i._super_i0 ''' "Traits.Interlaced_consts_types.fst" = ''' module Traits.Interlaced_consts_types #set-options "--fuel 0 --ifuel 1 --z3rlimit 15" open FStar.Mul open Core_models type t_Bar (v_FooConst: usize) (v_FooType: Type0) = | Bar : t_Array v_FooType v_FooConst -> t_Bar v_FooConst v_FooType class t_Foo (v_Self: Type0) (v_FooConst: usize) (v_FooType: Type0) = { f_fun_pre: v_FunConst: usize -> #v_FunType: Type0 -> t_Array v_FooType v_FooConst -> t_Array v_FunType v_FunConst -> Type0; f_fun_post: v_FunConst: usize -> #v_FunType: Type0 -> t_Array v_FooType v_FooConst -> t_Array v_FunType v_FunConst -> Prims.unit -> Type0; f_fun: v_FunConst: usize -> #v_FunType: Type0 -> x0: t_Array v_FooType v_FooConst -> x1: t_Array v_FunType v_FunConst -> Prims.Pure Prims.unit (f_fun_pre v_FunConst #v_FunType x0 x1) (fun result -> f_fun_post v_FunConst #v_FunType x0 x1 result) } [@@ FStar.Tactics.Typeclasses.tcinstance] let impl (v_FooConst: usize) (#v_FooType #v_SelfType: Type0) : t_Foo v_SelfType v_FooConst v_FooType = { f_fun_pre = (fun (v_FunConst: usize) (#v_FunType: Type0) (x: t_Array v_FooType v_FooConst) (y: t_Array v_FunType v_FunConst) -> true); f_fun_post = (fun (v_FunConst: usize) (#v_FunType: Type0) (x: t_Array v_FooType v_FooConst) (y: t_Array v_FunType v_FunConst) (out: Prims.unit) -> true); f_fun = fun (v_FunConst: usize) (#v_FunType: Type0) (x: t_Array v_FooType v_FooConst) (y: t_Array v_FunType v_FunConst) -> () } ''' "Traits.Recursive_trait_with_assoc_type.fst" = ''' module Traits.Recursive_trait_with_assoc_type #set-options "--fuel 0 --ifuel 1 --z3rlimit 15" open FStar.Mul open Core_models class t_Trait1 (v_Self: Type0) = { [@@@ FStar.Tactics.Typeclasses.no_method]f_T:Type0; f_T_i0:t_Trait1 f_T } class t_Trait2 (v_Self: Type0) = { [@@@ FStar.Tactics.Typeclasses.no_method]_super_i0:t_Trait1 v_Self; [@@@ FStar.Tactics.Typeclasses.no_method]f_U:Type0 } [@@ FStar.Tactics.Typeclasses.tcinstance] let _ = fun (v_Self:Type0) {|i: t_Trait2 v_Self|} -> i._super_i0 ''' "Traits.Type_alias_bounds_issue_707_.fst" = ''' module Traits.Type_alias_bounds_issue_707_ #set-options "--fuel 0 --ifuel 1 --z3rlimit 15" open FStar.Mul open Core_models type t_StructWithGenericBounds (v_T: Type0) {| i0: Core_models.Clone.t_Clone v_T |} = | StructWithGenericBounds : v_T -> t_StructWithGenericBounds v_T ''' "Traits.Typenum_perf.fst" = ''' module Traits.Typenum_perf #set-options "--fuel 0 --ifuel 1 --z3rlimit 15" open FStar.Mul open Core_models let _ = (* This module has implicit dependencies, here we make them explicit. *) (* The implicit dependencies arise from typeclasses instances. *) let open Typenum.Type_operators in () let e_f (#v_T: Type0) (#[FStar.Tactics.Typeclasses.tcresolve ()] i0: Typenum.Type_operators.t_IsLess v_T (Typenum.Uint.t_UInt (Typenum.Uint.t_UInt (Typenum.Uint.t_UInt (Typenum.Uint.t_UInt (Typenum.Uint.t_UInt (Typenum.Uint.t_UInt (Typenum.Uint.t_UInt (Typenum.Uint.t_UInt (Typenum.Uint.t_UInt (Typenum.Uint.t_UInt (Typenum.Uint.t_UInt (Typenum.Uint.t_UInt (Typenum.Uint.t_UInt (Typenum.Uint.t_UInt (Typenum.Uint.t_UInt (Typenum.Uint.t_UInt (Typenum.Uint.t_UInt (Typenum.Uint.t_UInt (Typenum.Uint.t_UInt (Typenum.Uint.t_UInt Typenum.Uint.t_UTerm Typenum.Bit.t_B1 ) Typenum.Bit.t_B1 ) Typenum.Bit.t_B1 ) Typenum.Bit.t_B1) Typenum.Bit.t_B1) Typenum.Bit.t_B1) Typenum.Bit.t_B1) Typenum.Bit.t_B1) Typenum.Bit.t_B1) Typenum.Bit.t_B1) Typenum.Bit.t_B1) Typenum.Bit.t_B1) Typenum.Bit.t_B1) Typenum.Bit.t_B1 ) Typenum.Bit.t_B1) Typenum.Bit.t_B1) Typenum.Bit.t_B1) Typenum.Bit.t_B1) Typenum.Bit.t_B1) Typenum.Bit.t_B1)) (_: Prims.unit) : Prims.unit = () ''' "Traits.Unconstrainted_types_issue_677_.fst" = ''' module Traits.Unconstrainted_types_issue_677_ #set-options "--fuel 0 --ifuel 1 --z3rlimit 15" open FStar.Mul open Core_models class t_PolyOp (v_Self: Type0) = { f_op_pre:u32 -> u32 -> Type0; f_op_post:u32 -> u32 -> u32 -> Type0; f_op:x0: u32 -> x1: u32 -> Prims.Pure u32 (f_op_pre x0 x1) (fun result -> f_op_post x0 x1 result) } type t_Plus = | Plus : t_Plus [@@ FStar.Tactics.Typeclasses.tcinstance] let impl: t_PolyOp t_Plus = { f_op_pre = (fun (x: u32) (y: u32) -> true); f_op_post = (fun (x: u32) (y: u32) (out: u32) -> true); f_op = fun (x: u32) (y: u32) -> x +! y } type t_Times = | Times : t_Times [@@ FStar.Tactics.Typeclasses.tcinstance] let impl_PolyOp_for_Times: t_PolyOp t_Times = { f_op_pre = (fun (x: u32) (y: u32) -> true); f_op_post = (fun (x: u32) (y: u32) (out: u32) -> true); f_op = fun (x: u32) (y: u32) -> x *! y } let twice (#v_OP: Type0) (#[FStar.Tactics.Typeclasses.tcresolve ()] i0: t_PolyOp v_OP) (x: u32) : u32 = f_op #v_OP #FStar.Tactics.Typeclasses.solve x x let both (x: u32) : (u32 & u32) = twice #t_Plus x, twice #t_Times x <: (u32 & u32) ''' "Traits.fst" = ''' module Traits #set-options "--fuel 0 --ifuel 1 --z3rlimit 15" open FStar.Mul open Core_models class t_SuperTrait (v_Self: Type0) = { [@@@ FStar.Tactics.Typeclasses.no_method]_super_i0:Core_models.Clone.t_Clone v_Self; f_function_of_super_trait_pre:v_Self -> Type0; f_function_of_super_trait_post:v_Self -> u32 -> Type0; f_function_of_super_trait:x0: v_Self -> Prims.Pure u32 (f_function_of_super_trait_pre x0) (fun result -> f_function_of_super_trait_post x0 result) } [@@ FStar.Tactics.Typeclasses.tcinstance] let _ = fun (v_Self:Type0) {|i: t_SuperTrait v_Self|} -> i._super_i0 [@@ FStar.Tactics.Typeclasses.tcinstance] let impl: t_SuperTrait i32 = { _super_i0 = FStar.Tactics.Typeclasses.solve; f_function_of_super_trait_pre = (fun (self: i32) -> true); f_function_of_super_trait_post = (fun (self: i32) (out: u32) -> true); f_function_of_super_trait = fun (self: i32) -> cast (Core_models.Num.impl_i32__abs self <: i32) <: u32 } type t_Struct = | Struct : t_Struct class t_Bar (v_Self: Type0) = { f_bar_pre:v_Self -> Type0; f_bar_post:v_Self -> Prims.unit -> Type0; f_bar:x0: v_Self -> Prims.Pure Prims.unit (f_bar_pre x0) (fun result -> f_bar_post x0 result) } let impl_2__method (#v_T: Type0) (#[FStar.Tactics.Typeclasses.tcresolve ()] i0: t_Bar v_T) (x: v_T) : Prims.unit = f_bar #v_T #FStar.Tactics.Typeclasses.solve x let cclosure_iimpl_expr (#v_I: Type0) (#[FStar.Tactics.Typeclasses.tcresolve ()] i0: Core_models.Iter.Traits.Iterator.t_Iterator v_I) (#_: unit{i0.Core_models.Iter.Traits.Iterator.f_Item == Prims.unit}) (it: v_I) : Alloc.Vec.t_Vec Prims.unit Alloc.Alloc.t_Global = Core_models.Iter.Traits.Iterator.f_collect #(Core_models.Iter.Adapters.Map.t_Map v_I (Prims.unit -> Prims.unit)) #FStar.Tactics.Typeclasses.solve #(Alloc.Vec.t_Vec Prims.unit Alloc.Alloc.t_Global) (Core_models.Iter.Traits.Iterator.f_map #v_I #FStar.Tactics.Typeclasses.solve #Prims.unit #(Prims.unit -> Prims.unit) it (fun x -> x) <: Core_models.Iter.Adapters.Map.t_Map v_I (Prims.unit -> Prims.unit)) let cclosure_iimpl_expr_fngen (#v_I #v_F: Type0) (#[FStar.Tactics.Typeclasses.tcresolve ()] i0: Core_models.Iter.Traits.Iterator.t_Iterator v_I) (#[FStar.Tactics.Typeclasses.tcresolve ()] i1: Core_models.Ops.Function.t_FnMut v_F Prims.unit) (#_: unit{i0.Core_models.Iter.Traits.Iterator.f_Item == Prims.unit}) (it: v_I) (f: v_F) : Alloc.Vec.t_Vec Prims.unit Alloc.Alloc.t_Global = Core_models.Iter.Traits.Iterator.f_collect #(Core_models.Iter.Adapters.Map.t_Map v_I v_F) #FStar.Tactics.Typeclasses.solve #(Alloc.Vec.t_Vec Prims.unit Alloc.Alloc.t_Global) (Core_models.Iter.Traits.Iterator.f_map #v_I #FStar.Tactics.Typeclasses.solve #Prims.unit #v_F it f <: Core_models.Iter.Adapters.Map.t_Map v_I v_F) type t_Error = | Error_Fail : t_Error let t_Error_cast_to_repr (x: t_Error) : isize = match x <: t_Error with | Error_Fail -> mk_isize 0 let impl_Error__for_application_callback (_: Prims.unit) : Prims.unit -> t_Error = fun temp_0_ -> let _:Prims.unit = temp_0_ in Error_Fail <: t_Error let iter_option (#v_T: Type0) (x: Core_models.Option.t_Option v_T) : Core_models.Option.t_IntoIter v_T = Core_models.Iter.Traits.Collect.f_into_iter #(Core_models.Option.t_Option v_T) #FStar.Tactics.Typeclasses.solve (Core_models.Option.impl__as_ref #v_T x <: Core_models.Option.t_Option v_T) let uuse_iimpl_trait (_: Prims.unit) : Prims.unit = let iter:Core_models.Option.t_IntoIter bool = iter_option #bool (Core_models.Option.Option_Some false <: Core_models.Option.t_Option bool) in let (tmp0: Core_models.Option.t_IntoIter bool), (out: Core_models.Option.t_Option bool) = Core_models.Iter.Traits.Iterator.f_next #(Core_models.Option.t_IntoIter bool) #FStar.Tactics.Typeclasses.solve iter in let iter:Core_models.Option.t_IntoIter bool = tmp0 in let _:Core_models.Option.t_Option bool = out in () class t_Foo (v_Self: Type0) = { [@@@ FStar.Tactics.Typeclasses.no_method]f_AssocType:Type0; f_AssocType_i0:t_SuperTrait f_AssocType; f_N:usize; f_assoc_f_pre:Prims.unit -> Type0; f_assoc_f_post:Prims.unit -> Prims.unit -> Type0; f_assoc_f:x0: Prims.unit -> Prims.Pure Prims.unit (f_assoc_f_pre x0) (fun result -> f_assoc_f_post x0 result); f_method_f_pre:v_Self -> Type0; f_method_f_post:v_Self -> Prims.unit -> Type0; f_method_f:x0: v_Self -> Prims.Pure Prims.unit (f_method_f_pre x0) (fun result -> f_method_f_post x0 result); f_assoc_type_pre:{| i1: Core_models.Marker.t_Copy f_AssocType |} -> f_AssocType -> Type0; f_assoc_type_post:{| i1: Core_models.Marker.t_Copy f_AssocType |} -> f_AssocType -> Prims.unit -> Type0; f_assoc_type:{| i1: Core_models.Marker.t_Copy f_AssocType |} -> x0: f_AssocType -> Prims.Pure Prims.unit (f_assoc_type_pre #i1 x0) (fun result -> f_assoc_type_post #i1 x0 result) } class t_Lang (v_Self: Type0) = { [@@@ FStar.Tactics.Typeclasses.no_method]f_Var:Type0; f_s_pre:v_Self -> i32 -> Type0; f_s_post:v_Self -> i32 -> (v_Self & f_Var) -> Type0; f_s:x0: v_Self -> x1: i32 -> Prims.Pure (v_Self & f_Var) (f_s_pre x0 x1) (fun result -> f_s_post x0 x1 result) } let f (#v_T: Type0) (#[FStar.Tactics.Typeclasses.tcresolve ()] i0: t_Foo v_T) (x: v_T) : Prims.unit = let _:Prims.unit = f_assoc_f #v_T #FStar.Tactics.Typeclasses.solve () in f_method_f #v_T #FStar.Tactics.Typeclasses.solve x let g (#v_T: Type0) (#[FStar.Tactics.Typeclasses.tcresolve ()] i0: t_Foo v_T) (x: i0.f_AssocType) : u32 = f_function_of_super_trait #i0.f_AssocType #FStar.Tactics.Typeclasses.solve x [@@ FStar.Tactics.Typeclasses.tcinstance] let impl_Foo_for_tuple_: t_Foo Prims.unit = { f_AssocType = i32; f_AssocType_i0 = FStar.Tactics.Typeclasses.solve; f_N = mk_usize 32; f_assoc_f_pre = (fun (_: Prims.unit) -> true); f_assoc_f_post = (fun (_: Prims.unit) (out: Prims.unit) -> true); f_assoc_f = (fun (_: Prims.unit) -> () <: Prims.unit); f_method_f_pre = (fun (self: Prims.unit) -> true); f_method_f_post = (fun (self: Prims.unit) (out: Prims.unit) -> true); f_method_f = (fun (self: Prims.unit) -> f_assoc_f #Prims.unit #FStar.Tactics.Typeclasses.solve ()); f_assoc_type_pre = (fun (_: i32) -> true); f_assoc_type_post = (fun (_: i32) (out: Prims.unit) -> true); f_assoc_type = fun (_: i32) -> () } ''' ================================================ FILE: test-harness/src/snapshots/toolchain__unsafe into-fstar.snap ================================================ --- source: test-harness/src/harness.rs expression: snapshot info: kind: Translate: backend: fstar info: name: unsafe manifest: unsafe/Cargo.toml description: ~ spec: optional: false broken: false issue_id: ~ positive: true snapshot: stderr: true stdout: true include_flag: ~ backend_options: ~ --- exit = 0 stderr = 'Finished `dev` profile [unoptimized + debuginfo] target(s) in XXs' [stdout] diagnostics = [] [stdout.files] "Unsafe.fst" = ''' module Unsafe #set-options "--fuel 0 --ifuel 1 --z3rlimit 15" open FStar.Mul open Core_models type t_Impossible = let t_Impossible_cast_to_repr (x: t_Impossible) : Rust_primitives.Hax.t_Never = match x <: t_Impossible with let impossible (_: Prims.unit) : Prims.Pure t_Impossible (requires false) (fun _ -> Prims.l_True) = Rust_primitives.Hax.never_to_any (Core_models.Hint.unreachable_unchecked () <: Rust_primitives.Hax.t_Never) let get_unchecked_example (slice: t_Slice u8) : Prims.Pure u8 (requires (Core_models.Slice.impl__len #u8 slice <: usize) >. mk_usize 10) (fun _ -> Prims.l_True) = Core_models.Slice.impl__get_unchecked #u8 #usize slice (mk_usize 6) ''' ================================================ FILE: tests/.gitignore ================================================ # ignore all output folder generated by the tool proofs/ ================================================ FILE: tests/Cargo.toml ================================================ [workspace] members = [ "assert", "enum-struct-variant", "literals", "slices", "naming", "if-let", "let-else", "enum-repr", "pattern-or", "side-effects", "mut-ref-functionalization", "generics", "lean-tests", "lean-core-models", "loops", "even", "odd", "never-type", "attributes", "attribute-opaque", "raw-attributes", "traits", "dyn", "reordering", "nested-derefs", "patterns", "proverif-minimal", "proverif-basic-structs", "proverif-ping-pong", "proverif-noise", "proverif-fn-to-letfun", "cli/include-flag", "cli/interface-only", "recursion", "functions", "guards", "cyclic-modules", "unsafe", "constructor-as-closure", "statics", ] resolver = "2" ================================================ FILE: tests/README.md ================================================ # Tests This directory contains tests for the engine and the frontend. For examples of verification using hax, see `../examples`. ================================================ FILE: tests/assert/Cargo.toml ================================================ [package] name = "assert" version = "0.1.0" edition = "2021" [dependencies] [package.metadata.hax-tests] into."fstar+coq+ssprove" = { broken = false, snapshot = "stdout", issue_id = "285" } ================================================ FILE: tests/assert/src/lib.rs ================================================ #![allow(dead_code)] pub fn asserts() { assert!({ assert!(true); 1 == 1 }); assert_eq!(2, 2); assert_ne!(1, 2); } ================================================ FILE: tests/attribute-opaque/Cargo.toml ================================================ [package] name = "attribute-opaque" version = "0.1.0" edition = "2021" [dependencies] hax-lib = { path = "../../hax-lib" } serde = { version = "1.0", features = ["derive"] } [package.metadata.hax-tests] into."fstar" = { backend-options = ["--interfaces", "+**"] } ================================================ FILE: tests/attribute-opaque/src/lib.rs ================================================ #[hax_lib::opaque] struct OpaqueStruct { field: [T; X], other_field: U, } #[hax_lib::opaque] enum OpaqueEnum { A([T; X]), B(U), } #[hax_lib::opaque] fn f_generic(x: U) -> OpaqueEnum { OpaqueEnum::B(x) } #[hax_lib::opaque] fn f(x: bool, y: bool) -> bool { x && y } #[hax_lib::opaque] #[hax_lib::requires(x)] #[hax_lib::ensures(|result| result == y)] fn f_pre_post(x: bool, y: bool) -> bool { x && y } #[hax_lib::attributes] trait T { type U; const c: u8; fn d(); #[hax_lib::requires(x == 0)] fn m(&self, x: u8) -> bool; } #[hax_lib::attributes] #[hax_lib::opaque] impl T for u8 { type U = u8; const c: u8 = 0; fn d() { unsafe { let my_num: i32 = 10; let _my_num_ptr: *const i32 = &my_num; let mut my_speed: i32 = 88; let _my_speed_ptr: *mut i32 = &mut my_speed; } } #[hax_lib::requires(x == 0)] #[hax_lib::ensures(|result| result)] fn m(&self, x: u8) -> bool { *self >= x } } trait TrGeneric { fn f(x: U) -> Self; } #[hax_lib::opaque] impl TrGeneric for i32 { fn f(_x: U) -> Self { 0 } } #[hax_lib::opaque] const C: u8 = 0 + 0; struct S1(); impl S1 { #[hax_lib::opaque] fn f_s1() {} } struct S2(); #[hax_lib::opaque] impl S2 { fn f_s2() {} } ================================================ FILE: tests/attributes/Cargo.toml ================================================ [package] name = "attributes" version = "0.1.0" edition = "2021" [dependencies] hax-lib = { path = "../../hax-lib" } hax-bounded-integers = { path = "../../hax-bounded-integers" } serde = { version = "1.0", features = ["derive"] } [package.metadata.hax-tests] into."fstar" = { snapshot = "stdout" } ================================================ FILE: tests/attributes/src/lib.rs ================================================ use hax_lib as hax; // dummy max value const u32_max: u32 = 90000; /// A doc comment on `add3` #[doc = "another doc comment on add3"] #[hax::requires(x > 10 && y > 10 && z > 10 && x + y + z < u32_max)] #[hax::ensures(|result| hax_lib::implies(true, result > 32))] fn add3(x: u32, y: u32, z: u32) -> u32 { x + y + z } #[hax::requires(*x < 40 && *y < 300)] #[hax::ensures(|result| *future(x) == *y && *future(y) == *x && result == *x + *y)] fn swap_and_mut_req_ens(x: &mut u32, y: &mut u32) -> u32 { let x0 = *x; *x = *y; *y = x0; *x + *y } #[hax_lib::ensures(|_| true)] fn issue_844(_x: &mut u8) {} // From issue #845 mod ensures_on_arity_zero_fns { #[hax_lib::requires(true)] #[hax_lib::ensures(|_x| true)] fn doing_nothing() {} #[hax_lib::requires(true)] #[hax_lib::ensures(|x| x > 100)] fn basically_a_constant() -> u8 { 127 } } #[hax::lemma] fn add3_lemma(x: u32) -> Proof<{ x <= 10 || x >= u32_max / 3 || add3(x, x, x) == x * 3 }> {} fn dummy_function(x: u32) -> u32 { x } #[hax::lemma] #[hax::fstar::smt_pat(x)] fn apply_dummy_function_lemma(x: u32) -> Proof<{ x == dummy_function(x) }> {} mod postprocess_with { #[hax_lib::fstar::postprocess_with("fun _ -> FStar.Tactics.trefl ()")] fn f() {} pub mod somewhere { pub fn some_hypothetical_tactic(some_param: u8) {} } use somewhere::some_hypothetical_tactic; #[hax_lib::fstar::postprocess_with(|()| some_hypothetical_tactic(12))] fn g() {} } #[hax::exclude] pub fn f<'a, T>(c: bool, x: &'a mut T, y: &'a mut T) -> &'a mut T { if c { x } else { y } } #[hax::decreases(x)] fn fib(x: usize) -> usize { if x <= 2 { x } else { fib(x - 1).wrapping_add(fib(x - 2)) } } #[hax::attributes] pub struct Foo { pub x: u32, #[refine(y > 3)] pub y: u32, #[refine(y + x + z > 3)] pub z: u32, } #[hax::exclude] impl Foo { fn g(&self) {} } impl Foo { #[hax::exclude] fn h(&self) {} } fn props() { hax_lib::assume!(hax_lib::fstar::prop!("True")); hax_lib::assert_prop!(hax_lib::fstar::prop!("True")); } #[hax::attributes] mod refined_arithmetic { use core::ops::{Add, Mul}; struct Foo(u8); impl Add for Foo { type Output = Foo; #[requires(self.0 < 255 - rhs.0)] fn add(self, rhs: Foo) -> Foo { Foo(self.0 + rhs.0) } } impl Mul for Foo { type Output = Foo; #[requires(rhs.0 == 0 || self.0 < 255 / rhs.0)] fn mul(self, rhs: Foo) -> Foo { Foo(self.0 * rhs.0) } } } mod refined_indexes { use hax_lib as hax; const MAX: usize = 10; struct MyArray(pub [u8; MAX]); #[hax::attributes] impl std::ops::Index for MyArray { type Output = u8; #[requires(index < MAX)] fn index(&self, index: usize) -> &Self::Output { &self[index] } } #[hax::exclude] impl std::ops::IndexMut for MyArray { fn index_mut(&mut self, index: usize) -> &mut Self::Output { &mut self[index] } } /// Triple dash comment /** Multiline double star comment Maecenas blandit accumsan feugiat. Done vitae ullamcorper est. Curabitur id dui eget sem viverra interdum. */ fn mutation_example( use_generic_update_at: &mut MyArray, use_specialized_update_at: &mut [u8], specialized_as_well: &mut Vec, ) { use_generic_update_at[2] = 0; use_specialized_update_at[2] = 0; specialized_as_well[2] = 0; } } mod newtype_pattern { use hax_lib as hax; const MAX: usize = 10; #[hax::attributes] struct SafeIndex { #[refine(i < MAX)] i: usize, } impl SafeIndex { fn new(i: usize) -> Option { if i < MAX { Some(Self { i }) } else { None } } fn as_usize(&self) -> usize { self.i } } impl std::ops::Index for [T; MAX] { type Output = T; fn index(&self, index: SafeIndex) -> &Self::Output { &self[index.i] } } } #[hax::fstar::before(r#"let before_inlined_code = "example before""#)] #[hax::fstar::after(r#"let inlined_code_after = "example after""#)] fn inlined_code(foo: Foo) { const V: u8 = 12; let v_a = 13; hax::fstar!( r"let x = ${foo.x} in let $?{Foo {y, ..}} = $foo in $add3 ((fun _ -> 3ul) $foo) $v_a $V y " ); } #[hax::fstar::before(r#"let before_1 = "example before 1""#)] #[hax::fstar::before(r#"let before_2 = "example before 2""#)] #[hax::fstar::before(r#"let before_3 = "example before 3""#)] #[hax::fstar::after(r#"let after 1 = "example after 1""#)] #[hax::fstar::after(r#"let after 2 = "example after 2""#)] #[hax::fstar::after(r#"let after 3 = "example after 3""#)] fn mutliple_before_after() {} #[hax::fstar::replace(r#"unfold let $some_function _ = "hello from F*""#)] fn some_function() -> String { String::from("hello from Rust") } mod future_self { #[derive(Eq, PartialEq)] struct Dummy; #[hax_lib::attributes] impl Dummy { #[hax_lib::ensures(|_| future(self) == self)] fn f(&mut self) {} } } mod replace_body { #[hax_lib::fstar::replace_body("magic ${x}")] fn f(x: u8, y: u8) -> u8 { 1 + 2 } struct Foo; impl Foo { #[hax_lib::fstar::replace_body("(magic (${self} <: $:{Self})) ${x}")] fn assoc_fn(&self, x: u8) {} } impl ToString for Foo { #[hax_lib::fstar::replace_body(r#""The type was $:{Self}""#)] fn to_string(&self) -> String { "Hello".into() } } } mod pre_post_on_traits_and_impls { use hax_lib::*; #[hax_lib::attributes] trait Operation { // we allow `hax_lib`, `::hax_lib` or no path at all #[hax_lib::requires(x.lift() <= int!(127))] #[ensures(|result| x.lift() * int!(2) == result.lift())] fn double(x: u8) -> u8; } struct ViaAdd; struct ViaMul; #[hax_lib::attributes] impl Operation for ViaAdd { #[::hax_lib::requires(x.lift() <= int!(127))] #[ensures(|result| x.lift() * int!(2) == result.lift())] fn double(x: u8) -> u8 { x + x } } #[hax_lib::attributes] impl Operation for ViaMul { #[requires(x.lift() <= int!(127))] #[::hax_lib::ensures(|result| x.lift() * int!(2) == result.lift())] fn double(x: u8) -> u8 { x * 2 } } #[hax_lib::attributes] trait TraitWithRequiresAndEnsures { #[requires(x < 100)] #[ensures(|r| r > 88)] fn method(&self, x: u8) -> u8; } fn test(x: T) -> u8 { x.method(99) - 88 } } /// An minimal example of a model of math integers for F* mod int_model { use super::hax; #[hax::fstar::replace(r#"unfold type $:{Int} = int"#)] #[derive(Copy, Clone)] struct Int(u128); #[hax::fstar::replace(r#"unfold let ${add} x y = x + y"#)] fn add(x: Int, y: Int) -> Int { Int(x.0 + y.0) } use std::ops::Sub; #[hax::fstar::replace( r#" unfold instance impl: Core.Ops.Arith.t_Sub $:Int $:Int = { f_Output = $:Int; f_sub_pre = (fun (self: $:Int) (other: $:Int) -> true); f_sub_post = (fun (self: $:Int) (other: $:Int) (out: $:Int) -> true); f_sub = fun (self: $:Int) (other: $:Int) -> self + other } "# )] impl Sub for Int { type Output = Self; fn sub(self, other: Self) -> Self::Output { Self(self.0 + other.0) } } } /// Illustration of the `refinement_type` macro that helps creating refinement types via thin newtype wrappers. mod refinement_types { use hax_lib::*; #[hax_lib::refinement_type(|x| x >= MIN && x <= MAX)] pub struct BoundedU8(u8); pub fn bounded_u8(x: BoundedU8<12, 15>, y: BoundedU8<10, 11>) -> BoundedU8<1, 23> { BoundedU8::new(x.get() + y.get()) } /// Even `u8` numbers. Constructing pub Even values triggers static /// proofs in the extraction. #[hax_lib::refinement_type(|x| x % 2 == 0)] pub struct Even(u8); #[hax_lib::requires(x < 127)] pub fn double(x: u8) -> Even { Even::new(x + x) } #[hax_lib::requires(x < 127)] pub fn double_refine(x: u8) -> Even { (x + x).into_checked() } /// A string that contains no space. #[hax_lib::refinement_type(|x| !x.chars().any(|ch| ch == ' '))] pub struct NoE(String); /// A modular mutliplicative inverse #[hax_lib::refinement_type(|n| (n as u128 * MOD as u128) % MOD as u128 == 1)] pub struct ModInverse(u32); /// A field element #[hax_lib::refinement_type(|x| x <= 2347)] pub struct FieldElement(u16); /// Example of a specific constraint on a value #[hax_lib::refinement_type(|x| x == 4 || x == 5 || x == 10 || x == 11)] pub struct CompressionFactor(u8); use hax_lib::int::*; /// Example of a refined int, that derives all common arithmetic operations hax_bounded_integers::refinement_int!( BoundedAbsI16(i16, 2, |x| B.lift() < int!(32768) && x.lift() >= -B.lift() && x.lift() <= B.lift()) ); #[hax_lib::requires(M.lift() < int!(32768) && M.lift() == N.lift() * int!(2))] fn double_abs_i16(x: BoundedAbsI16) -> BoundedAbsI16 { (x * 2).into_checked() } } mod nested_refinement_elim { use hax_lib::*; #[refinement_type(|x| true)] pub struct DummyRefinement(u16); fn elim_twice(x: DummyRefinement) -> u16 { (DummyRefinement::new(x.get())).get() } } /// `ensures` and `requires` with inlined code (issue #825) mod inlined_code_ensures_requires { #[hax_lib::requires(fstar!("forall i. FStar.Seq.index $v i <. ${254u8}"))] #[hax_lib::ensures(|()| { let future_v = future(v); fstar!("forall i. FStar.Seq.index ${future_v} i >. ${0u8}") })] fn increment_array(v: &mut [u8; 4]) { v[0] += 1; v[1] += 1; v[2] += 1; v[3] += 1; } } mod verifcation_status { #[hax_lib::fstar::verification_status(lax)] fn a_function_which_only_laxes() { assert!(/*very complicated stuff*/ false) } #[hax_lib::fstar::verification_status(panic_free)] #[hax_lib::ensures(|x|/*very complicated stuff*/false)] fn a_panicfree_function() -> u8 { let a = 3; let b = 6; a + b } #[hax_lib::fstar::verification_status(panic_free)] #[hax_lib::ensures(|x|/*very complicated stuff*/false)] fn another_panicfree_function() { let not_much = 0; let nothing = 0; let still_not_much = not_much + nothing; } } mod requires_mut { use hax_lib::*; #[hax_lib::attributes] trait Foo { #[hax_lib::requires(x.lift() + y.lift() < int!(254))] #[hax_lib::ensures(|output_variable| output_variable == *future(y))] fn f(x: u8, y: &mut u8) -> u8; fn g(x: u8, y: u8) -> u8; fn h(x: u8, y: u8); fn i(x: u8, y: &mut u8); } #[hax_lib::attributes] impl Foo for () { #[hax_lib::requires(x.lift() + y.lift() < int!(254))] #[hax_lib::ensures(|output_variable| output_variable == *future(y))] fn f(x: u8, y: &mut u8) -> u8 { *y += x; *y } #[hax_lib::requires(true)] #[hax_lib::ensures(|output_variable| output_variable == y)] fn g(x: u8, y: u8) -> u8 { y } #[hax_lib::requires(true)] #[hax_lib::ensures(|output_variable| output_variable == ())] fn h(x: u8, y: u8) { () } #[hax_lib::requires(true)] #[hax_lib::ensures(|out| *future(y) == *y)] fn i(x: u8, y: &mut u8) { () } } } mod issue_1266 { #[hax_lib::attributes] trait T { #[hax_lib::ensures(|_|true)] fn v(x: &mut Self); } } mod props { use hax_lib::*; fn f(x: Prop, y: bool) -> Prop { let xprop: Prop = y.into(); let p = y.lift() & xprop & y & y.to_prop(); !(p | y).implies(forall(|x: u8| x <= u8::MAX) & exists(|x: u16| x > 300)) } } mod reorder { #[hax_lib::attributes] struct Foo { #[order(40)] pub field_1: u8, #[hax_lib::order(31)] pub field_2: u8, pub field_3: u8, pub field_4: u8, } #[hax_lib::attributes] enum Bar { A { a_field_1: u8, a_field_2: u8, #[hax_lib::order(-42)] a_field_3: u8, }, B { b_field_1: u8, #[hax_lib::order(42)] b_field_2: u8, b_field_3: u8, }, } } mod issue_1276 { struct S(pub u8); #[hax_lib::attributes] impl S { #[hax_lib::requires(self.0 == 0 && self_ == self_1 && self_2 == 9)] fn f(&self, self_: u8, self_0: u8, self_1: u8, self_2: u8) {} } } mod issue_evit_57 { struct Foo; #[hax_lib::attributes] impl Foo { #[hax_lib::requires(true)] fn f(mut self) {} } } ================================================ FILE: tests/cli/include-flag/Cargo.toml ================================================ [package] name = "include-flag" version = "0.1.0" edition = "2021" [dependencies] [package.metadata.hax-tests] into."fstar+coq" = { snapshot = "stdout" } ================================================ FILE: tests/cli/include-flag/src/lib.rs ================================================ #![allow(dead_code)] #![allow(non_camel_case_types)] /// Entrypoint fn main() { main_a(Foo); main_b(); main_c(); } /// Direct dependencies fn main_a(x: T) { main_a_a(); main_a_b(); main_a_c(); } fn main_b() { main_b_a(); main_b_b(); main_b_c(); } fn main_c() { main_c_a(); main_c_b(); main_c_c(); } struct Foo; trait Trait {} impl Trait for Foo {} /// Indirect dependencies fn main_a_a() {} fn main_b_a() {} fn main_c_a() {} fn main_a_b() {} fn main_b_b() {} fn main_c_b() {} fn main_a_c() {} fn main_b_c() {} fn main_c_c() {} ================================================ FILE: tests/cli/interface-only/Cargo.toml ================================================ [package] name = "interface-only" version = "0.1.0" edition = "2021" [dependencies] hax-lib = { path = "../../../hax-lib" } [package.metadata.hax-tests] into."fstar" = { include-flag = "+:** -interface_only::Foo" } ================================================ FILE: tests/cli/interface-only/src/lib.rs ================================================ #![allow(dead_code)] /// This item contains unsafe blocks and raw references, two features /// not supported by hax. Thanks to the `-i` flag and the `+:` /// modifier, `f` is still extractable as an interface. /// /// Expressions within type are still extracted, as well as pre- and /// post-conditions. #[hax_lib::requires(x < 254)] #[hax_lib::ensures(|r| r[0] > x)] fn f(x: u8) -> [u8; 4] { let y = x as *const i8; unsafe { println!("{}", *y); } [x + 1, x, x, x] } /// This struct contains a field which uses raw pointers, which are /// not supported by hax. This item cannot be extracted at all: we /// need to exclude it with `-i '-*::Foo'`. struct Foo { unsupported_field: *const u8, } struct Bar; /// Non-inherent implementations are extracted, their bodies are not /// dropped. This might be a bit surprising: see /// https://github.com/hacspec/hax/issues/616. impl From<()> for Bar { fn from((): ()) -> Self { Bar } } /// If you need to drop the body of a method, please hoist it: impl From for Bar { fn from(x: u8) -> Self { fn from(_: u8) -> Bar { Bar } from(x) } } pub struct Holder { pub(crate) value: Vec, } impl From<()> for Holder { fn from((): ()) -> Self { Holder { value: Vec::new() } } } pub struct Param { pub(crate) value: [u8; SIZE], } impl From<()> for Param { fn from((): ()) -> Self { Param { value: [0; SIZE] } } } fn f_generic(_x: U) -> Param { Param { value: [0; X] } } trait T { type Assoc; fn d(); } /// Impls with associated types are not erased impl T for u8 { type Assoc = u8; fn d() {} } trait T2 { fn d(); } /// Items can be forced to be transparent #[hax_lib::transparent] #[hax_lib::attributes] impl T2 for u8 { #[hax_lib::requires(false)] fn d() {} } #[hax_lib::requires(b.len() >= n)] #[hax_lib::ensures(|out| out <= n)] fn padlen(b: &[u8], n: usize) -> usize { if n > 0 && b[n - 1] == 0 { 1 + padlen(b, n - 1) } else { 0 } } ================================================ FILE: tests/constructor-as-closure/Cargo.toml ================================================ [package] name = "constructor-as-closure" version = "0.1.0" edition = "2021" [dependencies] [package.metadata.hax-tests] into."fstar" = { broken = false, snapshot = "stdout", issue_id = "914" } ================================================ FILE: tests/constructor-as-closure/src/lib.rs ================================================ struct Test(i32); impl Test { pub fn test(x: Option) -> Option { x.map(Self) } } pub enum Context { A(i32), B(i32), } impl Context { pub fn test(x: Option) -> Option { x.map(Self::B) } } ================================================ FILE: tests/cyclic-modules/Cargo.toml ================================================ [package] name = "cyclic-modules" version = "0.1.0" edition = "2021" [dependencies] hax-lib = { path = "../../hax-lib" } [package.metadata.hax-tests] into."fstar" = { broken = false, snapshot = "stdout", issue_id = "396" } into."lean" = { broken = false, snapshot = "stdout" } ================================================ FILE: tests/cyclic-modules/src/lib.rs ================================================ mod typ_a { pub enum TRec { T(super::typ_b::T1Rec), Empty, } pub enum T { T(super::typ_b::T1), } } mod typ_b { pub enum T1Rec { T1(Box), } pub enum T2Rec { T2(super::typ_a::TRec), } pub enum T1 { T1, } pub enum T2 { T2(super::typ_a::T), } } fn f() {} mod b { pub fn g() { super::f() } } fn h() { b::g(); c::i() } fn h2() { c::i() } mod c { pub fn i() {} } mod d { pub fn d1() {} pub fn d2() { super::de::de1() } } mod e { pub fn e1() { super::d::d1() } } mod de { pub fn de1() { super::e::e1() } } mod rec { enum T { t1, t2, } pub fn g1(x: T) -> T { match x { T::t1 => g2(x), T::t2 => T::t1, } } pub fn g2(x: T) -> T { match x { T::t1 => g1(x), T::t2 => hf(x), } } pub fn hf(x: T) -> T { match x { T::t1 => hf(T::t2), T::t2 => x, } } } mod rec1_same_name { pub fn f(x: i32) -> i32 { super::rec2_same_name::f(x) } } mod rec2_same_name { pub fn f(x: i32) -> i32 { if x > 0 { super::rec1_same_name::f(x - 1) } else { 0 } } } mod enums_a { pub enum T { A, B, C(Vec), D(Vec), } } mod enums_b { pub enum U { A, B, C(Vec), } pub enum T { A, B, C(Vec), } pub fn f() -> T { T::A } } mod m1 { pub fn a() { super::m2::c() } } mod m2 { pub fn d() {} pub fn b() { super::m1::a(); d() } pub fn c() {} } pub mod disjoint_cycle_a { pub fn f() { super::disjoint_cycle_b::h() } pub fn g() {} } pub mod disjoint_cycle_b { pub fn h() {} pub fn i() { super::disjoint_cycle_a::g() } } pub mod variant_constructor_a { pub enum Context { A(i32), B(i32), } pub fn f() -> Context { super::variant_constructor_b::h() } impl Context { pub fn test(x: Option) -> Option { x.map(Self::A) } } } pub mod variant_constructor_b { pub fn h() -> super::variant_constructor_a::Context { super::variant_constructor_a::Context::A(1) } } pub mod late_skip_a { pub fn f() { super::late_skip_b::f() } } pub mod late_skip_b { #[hax_lib::requires(true)] pub fn f() { super::late_skip_a::f() } } mod issue_1823 { mod first_example { pub mod a { pub struct A {} impl A { pub fn mkb(self) -> super::b::B { super::b::B {} } } } pub mod b { pub struct B {} impl B { pub fn mka(self) -> super::a::A { super::a::A {} } } } } mod second_example { pub mod a { pub fn call_b() { super::b::b() } pub fn a() {} } pub mod b { pub fn call_a() { super::a::a() } pub fn b() {} } } } ================================================ FILE: tests/dyn/Cargo.toml ================================================ [package] name = "dyn" version = "0.1.0" edition = "2021" [dependencies] [package.metadata.hax-tests] into."fstar" = { broken = false, snapshot = "stdout", issue_id = "296" } ================================================ FILE: tests/dyn/src/lib.rs ================================================ #![allow(dead_code)] pub trait Printable { fn stringify(&self) -> S; } impl Printable for i32 { fn stringify(&self) -> String { self.to_string() } } pub fn print(a: Box>) { println!("{}", a.stringify()); } ================================================ FILE: tests/enum-repr/Cargo.toml ================================================ [package] name = "enum-repr" version = "0.1.0" edition = "2021" [dependencies] [package.metadata.hax-tests] into."fstar+coq+ssprove" = { broken = false, issue_id = "162" } ================================================ FILE: tests/enum-repr/src/lib.rs ================================================ #![allow(dead_code)] #[repr(u16)] enum EnumWithRepr { ExplicitDiscr1 = 1, ExplicitDiscr2 = 5, ImplicitDiscrEmptyTuple(), ImplicitDiscrEmptyStruct {}, } #[repr(u64)] enum ImplicitReprs { A, B(), C {}, D, E = 30, F, G, H {}, I(), } fn f() -> u32 { const CONST: u16 = EnumWithRepr::ExplicitDiscr1 as u16; let _x = EnumWithRepr::ExplicitDiscr2 as u16; EnumWithRepr::ImplicitDiscrEmptyTuple() as u32 + EnumWithRepr::ImplicitDiscrEmptyStruct {} as u32 } fn get_repr(x: EnumWithRepr) -> u16 { x as u16 } fn get_casted_repr(x: EnumWithRepr) -> u64 { x as u64 } ================================================ FILE: tests/enum-struct-variant/Cargo.toml ================================================ [package] name = "enum-struct-variant" version = "0.1.0" edition = "2021" # See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html [dependencies] [package.metadata.hax-tests] into."fstar+coq" = {broken = false, snapshot = "none"} into."ssprove" = {broken = true, snapshot = "none"} ================================================ FILE: tests/enum-struct-variant/src/lib.rs ================================================ #![allow(dead_code)] #[derive(Debug)] pub struct Money { value: u64, } #[derive(Debug)] pub enum EnumWithStructVariant { Funds { balance: Money }, } ================================================ FILE: tests/even/Cargo.toml ================================================ [package] name = "even" version = "0.0.1" edition = "2021" [dependencies] ================================================ FILE: tests/even/src/lib.rs ================================================ #![allow(dead_code)] pub fn even(n: usize) -> bool { n % 2 == 0 } ================================================ FILE: tests/functions/Cargo.toml ================================================ [package] name = "functions" version = "0.1.0" edition = "2021" [dependencies] hax-lib = { path = "../../hax-lib" } [package.metadata.hax-tests] into."fstar" = { snapshot = "stdout" } ================================================ FILE: tests/functions/src/lib.rs ================================================ /// Issue #757 fn calling_function_pointer() { fn f() {} let f_ptr = f::; f_ptr(); } mod issue_1048 { pub struct CallableViaDeref; impl core::ops::Deref for CallableViaDeref { type Target = fn() -> bool; fn deref(&self) -> &Self::Target { &((|| true) as fn() -> bool) } } pub fn call_via_deref() -> bool { CallableViaDeref() } } ================================================ FILE: tests/generics/Cargo.toml ================================================ [package] name = "generics" version = "0.1.0" edition = "2021" [dependencies] [package.metadata.hax-tests] into."fstar" = { broken = false, issue_id = "21" } ================================================ FILE: tests/generics/src/lib.rs ================================================ #![allow(dead_code)] fn dup(x: T) -> (T, T) { (x.clone(), x.clone()) } fn foo(arr: [usize; LEN]) -> usize { let mut acc = LEN + 9; for i in 0..LEN { acc += arr[i]; } acc } fn repeat(x: T) -> [T; LEN] { [x; LEN] } fn call_f() -> usize { f::<10>(3) + 3 } fn f(x: usize) -> usize { N + N + x } fn call_g() -> usize { g::<3, [usize; 3]>([42, 3, 49]) + 3 } fn g>(arr: T) -> usize { arr.into().into_iter().max().unwrap_or(N) + N } trait Foo { fn const_add(self) -> usize; } impl Foo for usize { fn const_add(self) -> usize { self + N } } struct Bar; impl Bar { fn inherent_impl_generics(x: [T; N]) {} } /// Test defaults types and constants mod defaults_generics { struct Defaults([T; N]); fn f(_: Defaults) {} } /// See https://github.com/hacspec/hax/issues/1176 mod impl_generics { struct Test(); impl Test { fn set_ciphersuites(&self, ciphers: impl IntoIterator) -> Result<(), ()> where S: AsRef, { Ok(()) } fn set_alpn_protocols(&self, _protocols: impl IntoIterator) -> Result<(), ()> where S: AsRef, { Ok(()) } } } /// See https://github.com/cryspen/hax/issues/1289 mod assoc_const_param { struct Test(); impl Test { const A: Self = Self(); } fn test() -> Test<1> { Test::<1>::A } } ================================================ FILE: tests/guards/Cargo.toml ================================================ [package] name = "guards" version = "0.1.0" edition = "2021" [dependencies] hax-lib = { path = "../../hax-lib" } [package.metadata.hax-tests] into."fstar+coq+ssprove" = { broken = false, snapshot = "stdout", issue_id = "814" } ================================================ FILE: tests/guards/src/lib.rs ================================================ #![feature(if_let_guard)] #![allow(dead_code)] pub fn if_let_guard(x: Option>) -> i32 { match x { None => 0, Some(v) if let Ok(y) = v => y, Some(Err(y)) => y, _ => 1, } } pub fn equivalent(x: Option>) -> i32 { match x { None => 0, _ => match match x { Some(v) => match v { Ok(y) => Some(y), _ => None, }, _ => None, } { Some(y) => y, None => match x { Some(Err(y)) => y, _ => 1, }, }, } } pub fn multiple_guards(x: Option>) -> i32 { match x { None => 0, Some(Ok(v)) if let Some(1) = Some(v + 1) => 0, Some(v) if let Ok(y) = v => y, Some(Err(y)) => y, _ => 1, } } pub fn if_guard(x: Option) -> i32 { match x { Some(v) if v > 0 => v, _ => 0, } } ================================================ FILE: tests/if-let/Cargo.toml ================================================ [package] name = "if-let" version = "0.1.0" edition = "2021" [dependencies] [package.metadata.hax-tests] into."fstar+coq+ssprove" = { broken = false, snapshot = "none", issue_id = "85" } ================================================ FILE: tests/if-let/src/lib.rs ================================================ #![allow(dead_code)] pub fn fun_with_if_let() -> u8 { let x = Some(5); if let Some(x) = x { x } else { 7 } } ================================================ FILE: tests/lean-core-models/Cargo.toml ================================================ [package] name = "lean-core-models" version = "0.1.0" edition = "2021" [dependencies] hax-lib = { path = "../../hax-lib" } [package.metadata.hax-tests] into."lean" = {} ================================================ FILE: tests/lean-core-models/src/default.rs ================================================ // Tests for core models in lean #![allow(dead_code)] #![allow(unused_variables)] // Default on struct mod structs { struct S { f1: usize, } impl Default for S { fn default() -> Self { S { f1: 0 } } } fn test() -> S { S::default() } } // Default on enum mod enums { enum E { C1(u32), C2(T), } impl Default for E { fn default() -> Self { E::C2(T::default()) } } } ================================================ FILE: tests/lean-core-models/src/function.rs ================================================ #![allow(dead_code)] #![allow(unused_variables)] fn test() -> u32 { let f_1 = |_: u32| 9; let f_2 = |x: u32, y: u32| x + y; let f_2_tuple = |(x, y): (u32, u32)| x + y; f_1(0) + f_2(1, 2) + f_2_tuple((1, 2)) } ================================================ FILE: tests/lean-core-models/src/lib.rs ================================================ // Tests for core models in lean #![allow(dead_code)] #![allow(unused_variables)] pub mod default; pub mod function; pub mod option; pub mod phantom; pub mod result; ================================================ FILE: tests/lean-core-models/src/option.rs ================================================ // Tests for core models in lean #![allow(dead_code)] #![allow(unused_variables)] struct S { f1: u32, } enum E { C(u32), } impl Default for S { fn default() -> Self { S { f1: 42 } } } fn test() { let o1 = Option::Some(4); let o2: Option = None; let o3 = o1.clone().is_some_and(|x| x == 0); let o3 = o1.clone().is_none_or(|x| x == 0); let o4 = Some(0).unwrap(); let o5 = Some(0).unwrap_or(9); let o6 = Some(0).unwrap_or_else(|| 9); let o7 = Option::None::.unwrap_or_default(); // maps let o8 = Some(0).map(|x| x + 1); let o9 = Some(1).map_or(9, |x| x + 1); let o10 = Some(2).map_or_else(|| 9, |x| x + 1); // options and results let o11 = Some(3).ok_or(E::C(0)); let o12 = Some(1).ok_or_else(|| E::C(1)); let o13 = None.and_then(|x: u32| Some(x)); let o14 = Some(S { f1: 9 }).take(); // tests let o15 = Some(1).is_some(); let o16 = Some(2).is_none(); let o17 = Some(3).expect("Should be Some"); let o18 = Some(4).unwrap(); } ================================================ FILE: tests/lean-core-models/src/phantom.rs ================================================ // Tests for core models in lean #![allow(dead_code)] #![allow(unused_variables)] use core::marker::PhantomData; trait Foo {} struct Bar { _phantom: PhantomData, } impl Bar { fn new() -> Self { Self {_phantom : PhantomData} } } ================================================ FILE: tests/lean-core-models/src/result.rs ================================================ // Tests for core models in lean #![allow(dead_code)] #![allow(unused_variables)] #[derive(Clone)] enum E1 { C1, C2(u32), } enum E2 { C1, C2(u32), } fn tests() -> Result { // Constructors let v1 = Result::::Ok(1); let v2 = Result::::Err(E1::C1); let f = |x: u32| x + 1; // map let v5 = Ok::<_, E1>(1).map(|v| v + 1); let v6 = Ok::<_, E1>(1).map_or(9, f); let v7 = Ok::<_, E1>(1).map_or_else(|_| 10, f); let v8 = Ok(0).map_err(|e: E1| match e { E1::C1 => E2::C1, E1::C2(x) => E2::C2(x + 1), }); let v9 = v1.is_ok(); let v10 = v1.is_err(); let v11 = v1.clone().and_then(|x| Ok::<_, E1>(x + 1)); let v12 = Ok::(0).clone().unwrap(); let v13 = Ok::(0).clone().expect("Should be Ok"); // ? notation let v3 = v1.map(f)? + v2?; Ok(v3) } ================================================ FILE: tests/lean-tests/Cargo.toml ================================================ [package] name = "lean-tests" version = "0.1.0" edition = "2021" [dependencies] hax-lib = { path = "../../hax-lib" } [package.metadata.hax-tests] into."lean" = { broken = false } ================================================ FILE: tests/lean-tests/src/array.rs ================================================ // Arrays with const generic sizes fn f(x: [u8; N]) {} fn g(x: [u8; N]) { f(x); f([0u8; 10]); } ================================================ FILE: tests/lean-tests/src/associated_types.rs ================================================ mod basic { trait Iterable { type Item; fn first(&self) -> Self::Item; } fn just_the_first(iter: I) -> I::Item { iter.first() } fn first_plus_1>(iter: I) -> i32 { iter.first() + 1 } impl Iterable for bool { type Item = i32; fn first(&self) -> i32 { 3 } } fn a() { first_plus_1(true); } } mod projection { trait T1 { type A1; } trait T2 { type A2: T1; fn f() -> ::A1; } } mod multiple_associated_types { trait Pair { type First; type Second; fn first(&self) -> Self::First; fn second(&self) -> Self::Second; } fn get_both(pair: P) -> (P::First, P::Second) { (pair.first(), pair.second()) } impl Pair for (i32, bool) { type First = i32; type Second = bool; fn first(&self) -> i32 { self.0 } fn second(&self) -> bool { self.1 } } fn b() { let pair = (42, true); let both = get_both(pair); } fn get_first_as_i32>(pair: P) -> i32 { pair.first() } } mod multiple_projections { trait FnOnce { type Output; } pub fn func(d: D, f: F, u: U) where F: FnOnce, D: FnOnce, { } } ================================================ FILE: tests/lean-tests/src/binops.rs ================================================ //! Tests known binops #![allow(dead_code)] #![allow(unused_variables)] fn noop (x: i32) -> i32 { x } ///////////////////// // UNARY FUNCTIONS // ///////////////////// fn neg_int (x: i32) -> i32 { -x } fn not_int (x: i32) -> i32 { !x } fn not_bool (x: bool) -> bool { !x } fn index (x: [i32; 1]) -> i32 { x[0] } ////////////////////// // BINARY FUNCTIONS // ////////////////////// fn add_int (x: i32, y: i32) -> i32 { x + y } fn sub_int (x: i32, y: i32) -> i32 { x - y } fn mul_int (x: i32, y: i32) -> i32 { x * y } fn div_int (x: i32, y: i32) -> i32 { x / y } fn rem_int (x: i32, y: i32) -> i32 { x % y } fn shr_int (x: i32, y: i32) -> i32 { x >> y } fn shl_int (x: i32, y: i32) -> i32 { x << y } fn bitand_int (x: i32, y: i32) -> i32 { x & y } fn bitand_bool (x : bool, y: bool) -> bool { x & y } fn bitor_int (x: i32, y: i32) -> i32 { x | y } fn bitor_bool (x : bool, y: bool) -> bool { x | y } fn bitxor_int (x: i32, y: i32) -> i32 { x ^ y } fn bitxor_bool (x: bool, y: bool) -> bool { x ^ y } fn logical_op_and (x: bool, y: bool) -> bool { x && y } fn logical_op_or (x: bool, y: bool) -> bool { x || y } fn eq_int(x : i32, y: i32) -> bool { x == y } fn eq_bool(x : bool, y: bool) -> bool { x == y } fn neq_int (x : i32, y: i32) -> bool { x != y } fn neq_bool(x : bool, y: bool) -> bool { x != y } fn lt_int(x : i32, y: i32) -> bool { x < y } fn le_int(x : i32, y: i32) -> bool { x <= y } fn gt_int(x : i32, y: i32) -> bool { x > y } fn ge_int(x : i32, y: i32) -> bool { x >= y } ////////////////////// // NON BOOL AND INT // ////////////////////// // Known binops with non boolean or integer arguments should not be pretty printed struct S; impl std::ops::Not for S { type Output = S; fn not(self) -> S { self } } impl std::ops::Add for S { type Output = S; fn add(self, rhs: Self) -> Self::Output { self } } fn not_s(x: S) -> S { !x } fn add_s(x: S, y: S) -> S { x + y } ================================================ FILE: tests/lean-tests/src/casts.rs ================================================ use hax_lib::*; /// Returns true if all casting edge cases behave as expected. #[ensures(|result| result)] pub fn casting_edge_cases(_dummy: bool) -> bool { // 1. Truncation: u16 to u8 (256 -> 0) // 256 is 0x0100. Truncating to lower 8 bits gives 0x00. let case1 = (256u16 as u8) == 0; // 2. Truncation of negative: i16 to u8 (-1 -> 255) // -1 in i16 is 0xFFFF. Truncating to u8 gives 0xFF (255). let case2 = (-1i16 as u8) == 255; // 3. Sign extension: i8 to i16 (-1 -> -1) // -1 in i8 is 0xFF. Sign extending to i16 gives 0xFFFF (-1). let case3 = (-1i8 as i16) == -1; // 4. Reinterpretation of bits: u8 to i8 (128 -> -128) // 128 in u8 is 0x80. In i8 (two's complement), 0x80 is -128. let case4 = (128u8 as i8) == -128; // 5. Large u32 to i32 (0xFFFFFFFF -> -1) // 0xFFFFFFFF in u32. In i32 (two's complement), this is -1. let case5 = (0xFFFFFFFFu32 as i32) == -1; case1 && case2 && case3 && case4 && case5 } /// https://github.com/cryspen/hax/issues/1912 pub fn shift_after_cast(x: u16, n: u8) -> u32 { (x as u32) << (n as u32) } /// https://github.com/cryspen/hax/issues/1911 pub fn add_after_cast(a: u8, b: u8, c: u8) -> u16 { (a as u16) + (b as u16) + (c as u16) } ================================================ FILE: tests/lean-tests/src/comments.rs ================================================ #![allow(dead_code)] #![allow(unused_variables)] /// Single line doc comment fn f() {} /** Block doc-comment : Lorem ipsum dolor sit amet, consectetur adipiscing elit. Vestibulum rutrum orci ac tellus ullamcorper sollicitudin. Sed fringilla mi id arcu suscipit rhoncus. Pellentesque et metus a ante feugiat lobortis. Nam a mauris eget nisl congue egestas. Duis et gravida nulla. Curabitur mattis leo vel molestie posuere. Etiam malesuada et augue eget varius. Pellentesque quis tincidunt erat. Vestibulum id consectetur turpis. Cras elementum magna id urna volutpat fermentum. In vel erat quis nunc rhoncus porta. Aliquam sed pellentesque tellus. Quisque odio diam, mollis ut venenatis non, scelerisque at nulla. Nunc urna ante, tristique quis nisi quis, congue maximus nisl. Curabitur non efficitur odio. */ fn heavily_documented() -> u32 { 4 } ================================================ FILE: tests/lean-tests/src/constants.rs ================================================ // Tests on constants #![allow(dead_code)] #![allow(unused_variables)] const C1: u32 = 5678; const C2: u32 = C1 + 1; const C3: u32 = if true { 890 } else { 9 / 0 }; const fn computation(x: u32) -> u32 { x + x + 1 } const C4: u32 = computation(C1) + C2; const C5: (u32, u32) = (0 + 0, 0); const C6: [u32; 1] = [0]; fn test() { let x = C1 + 1; let y = C2 + C3; let z = C4 - C3; } mod const_parameters { /// Function with const parameter fn f() -> usize { N } const N0: usize = 1; const N1: usize = 10; fn test() { let _ = f::<9>() + f::(); } /// Trait definition trait T { fn f(&self) -> usize; } /// Struct definition struct S(u32); impl T for S { fn f(&self) -> usize { N_TRAIT - N_FIELD } } fn test2>(x: A) -> usize { let s = S::(9); let _ = s.f::<1>() + x.f::<{ 1 + N1 }>(); let s = S::<{ 1 + 2 }>(9); x.f::<{ 2 + 2 }>() } } ================================================ FILE: tests/lean-tests/src/enums.rs ================================================ //! Tests on enums #![allow(dead_code)] #![allow(unused_variables)] // 1. Type definition enum E { // unit-like V1, V2, // with positional arguments V3(usize), V4(usize, usize, usize), // with named arguments V5 { f1: usize, f2: usize }, V6 { f1: usize, f2: usize }, } enum MyList { Nil, Cons { hd: T, tl: Box> }, } fn enums() -> () { // 2. Expressions let e_v1 = E::V1; let e_v2 = E::V2; let e_v3 = E::V3(23); let e_v4 = E::V4(23, 12, 1); let e_v5 = E::V5 { f1: 23, f2: 43 }; let e_v6 = E::V6 { f1: 12, f2: 13 }; let nil: MyList = MyList::Nil; let cons_1 = MyList::Cons { hd: 1, tl: Box::new(nil), }; let cons_2_1 = MyList::Cons { hd: 2, tl: Box::new(cons_1), }; // 3. Pattern matching match e_v1 { E::V1 => (), E::V2 => (), E::V3(_) => (), E::V4(x1, x2, x3) => { let y1 = x1 + x2; let y2 = y1 - x2; let y3 = y2 + x3; () } E::V5 { f1, f2 } => (), E::V6 { f1, f2: other_name_for_f2, } => (), } } ================================================ FILE: tests/lean-tests/src/floats.rs ================================================ // Tests on floats #![allow(dead_code)] #![allow(unused_variables)] const N: f32 = 1.0; fn test() { let l0 = 1.0; let l1 = 0.9; let l2 = 5.0f32; let l5 = N; } fn f(x: f64, y: f32) -> f32 { y } ================================================ FILE: tests/lean-tests/src/ite.rs ================================================ //! Tests on if-then-else #![allow(dead_code)] #![allow(unused_variables)] fn test1() -> i32 { let x = if true { 0 } else { 1 }; if false { 2 } else { 3 } } fn test2(b: bool) -> i32 { let x = if b { 0 } else { 9 }; let mut y = 0; if true { y = y + x + 1 } else { y = y - x - 1 }; if b { let z = y + y; z + y + x } else { let z = y - x; z + y + x } } ================================================ FILE: tests/lean-tests/src/lib.rs ================================================ #![allow(dead_code)] #![allow(unused_variables)] pub mod array; pub mod associated_types; pub mod binops; pub mod casts; pub mod comments; pub mod constants; pub mod enums; pub mod floats; pub mod ite; pub mod loops; pub mod matching; pub mod monadic; pub mod nested_control_flow; pub mod opaque; pub mod recursion; pub mod specs; pub mod structs; pub mod traits; pub mod types; const FORTYTWO: usize = 42; const MINUS_FORTYTWO: isize = -42; fn returns42() -> usize { FORTYTWO } fn add_two_numbers(x: usize, y: usize) -> usize { x + y } fn letBinding(x: usize, y: usize) -> usize { let useless = (); let result1 = x + y; let result2 = result1 + 2; result2 + 1 } fn closure() -> i32 { let x = 41; let f1 = |y| y + x; let f2 = |y, z| y + x + z; let res1 = f1(1); let res2 = f2(2, 3); res1 + res2 } #[hax_lib::lean::before("example : Nat := 42")] fn test_before_verbatime_single_line(x: u8) -> u8 { 42 } #[hax_lib::lean::before( " def multiline : Unit := () " )] fn test_before_verbatim_multi_line(x: u8) -> u8 { 32 } const NULL_CHAR: char = '\0'; /// Test string literals with escape sequences fn string_escapes() { let _empty = ""; let _plain = "hello world"; let _with_quotes = "she said \"hello\""; let _with_single_quote = "it's fine"; let _with_backslash = "path\\to\\file"; let _with_newline = "line1\nline2"; let _with_tab = "col1\tcol2"; let _with_carriage_return = "before\rafter"; let _mixed = "say \"hello\"\nand\t'goodbye'\\end"; let _carriage_return = "carriage\rreturn"; let _control_chars = "null\x00byte bell\x07char font\x1b[0mreset"; } ================================================ FILE: tests/lean-tests/src/loops.rs ================================================ //! Tests on loops #![allow(dead_code)] #![allow(unused_variables)] /// Simple for-loop fn loop1() -> u32 { let mut x: u32 = 0; for i in 1..10 { x = x + i } x } /// For-loop with a return fn loop2() -> u32 { let mut x: u32 = 0; for i in 1..10 { if i == 5 { return x; } x = x + i; } x } /// For-loop with a spec #[hax_lib::requires(y > 0)] #[hax_lib::ensures(|res| res > 0)] fn for_loop_with_spec(y: u64) -> u64 { let mut x: u64 = y; for i in 0..y { hax_lib::loop_invariant!(|i: u64| x > 0); if x % 5 == 0 { x = 200; } else { x = x % 5; } } x } /// while-loop #[hax_lib::ensures(|r| r == 0)] #[hax_lib::lean::proof_method::grind] fn while_loop1(s: u32) -> u32 { let mut x: u32 = s; while x > 0 { hax_lib::loop_decreases!(x); x = x - 1; } x } mod errors { enum Error { Foo, Bar(u32), } fn loop3() -> Result { let mut x = 0; let end: u32 = 10; for i in 1..end { if i == 5 { return Err(Error::Foo); } x = x + 5 } Ok(x) } fn loop4() -> Result<(u32, u32), Error> { let mut e = 0; let f = |()| 42; for i in 0..(f(())) { // verify degree bound if i > 10 { return Err(Error::Bar(e)); } e = e + i } Ok((e, e)) } } ================================================ FILE: tests/lean-tests/src/matching.rs ================================================ fn test_const_matching(x: u32, c: char, s: &str, b: bool) -> u32 { let x = match x { 0 => 42, _ => 0, }; let c = match c { 'a' => 42, _ => 0, }; let s = match s { "Hello" => 42, _ => 0, }; let b = match b { true => 42, false => 0, }; return x + c + s + b; } fn test_binding_subpattern_matching(x: (u8, (u8, u8))) -> u8 { match x { (0, pair @ (a, b)) => a + b + pair.0 + pair.1, _ => 0, } } fn test_ellipsis_records() { enum E { C { f1: u8, f2: u8, f3: u8, f4: u8 }, } let c = E::C { f1: 1, f2: 2, f3: 3, f4: 4, }; match c { E::C { .. } => assert!(true), }; match c { E::C { f1, .. } => assert!(f1 == 1), }; match c { E::C { f1, f2, .. } => assert!(f1 == 1 && f2 == 2), }; match c { E::C { f2, f4, .. } => assert!(f2 == 2 && f4 == 4), }; match c { E::C { f1, f2, f3, f4 } => assert!(f1 == 1 && f2 == 2 && f3 == 3 && f4 == 4), }; } fn test_ellipsis_structs() { struct S { f1: u8, f2: u8, f3: u8, f4: u8, } let c = S { f1: 1, f2: 2, f3: 3, f4: 4, }; match c { S { .. } => assert!(true), }; match c { S { f1, .. } => assert!(f1 == 1), }; match c { S { f1, f2, .. } => assert!(f1 == 1 && f2 == 2), }; match c { S { f2, f4, .. } => assert!(f2 == 2 && f4 == 4), }; match c { S { f1, f2, f3, f4 } => assert!(f1 == 1 && f2 == 2 && f3 == 3 && f4 == 4), }; } fn test_ellipsis_bare_tuples() { let t = (1u8, 2u8, 3u8, 4u8); match t { (..) => assert!(true), }; match t { (a, ..) => assert!(a == 1), }; match t { (a, b, ..) => assert!(a == 1 && b == 2), }; match t { (.., d) => assert!(d == 4), }; match t { (.., c, d) => assert!(c == 3 && d == 4), }; match t { (a, .., d) => assert!(a == 1 && d == 4), }; match t { (a, b, c, d) => assert!(a == 1 && b == 2 && c == 3 && d == 4), }; } fn test_ellipsis_tuples() { enum F { D(u8, u8, u8, u8), } let d = F::D(1, 2, 3, 4); match d { F::D(..) => assert!(true), }; match d { F::D(a, ..) => assert!(a == 1), }; match d { F::D(a, b, ..) => assert!(a == 1 && b == 2), }; match d { F::D(.., d) => assert!(d == 4), }; match d { F::D(.., c, d) => assert!(c == 3 && d == 4), }; match d { F::D(a, .., d) => assert!(a == 1 && d == 4), }; match d { F::D(a, b, c, d) => assert!(a == 1 && b == 2 && c == 3 && d == 4), }; } ================================================ FILE: tests/lean-tests/src/monadic.rs ================================================ //! Tests on monadic encoding #![allow(dead_code)] #![allow(unused_variables)] struct S { f: u32, } fn test() { let _ = 9; // value let _ = 9 + 9; // computation let _ = S { f: 9 }; // constructors are values let _ = S { f: 9 + 9 }; // computation within a value let _ = (S { f: 9 + 9 }).f; // projections are values let _ = (S { f: 9 + 9 }).f + 9; // projections are values let _ = if true { 3 + 4 } else { 3 - 4 }; // ite expects value for condition let _ = if 9 + 9 == 0 { 3 + 4 } else { 3 - 4 }; // ite expects value for condition let _ = if true { let x = 9; 3 + x; } else { let y = 19; 3 + y - 4; }; } mod trait_constants { // https://github.com/cryspen/hax/issues/1928 trait Foo { const F : u32; } trait Bar { const B : u32; } struct Baz; impl Foo for Baz { const F : u32 = 1; } impl Bar for Baz { const B : u32 = Self::F - 1; } } ================================================ FILE: tests/lean-tests/src/nested_control_flow.rs ================================================ //! Tests for nested control flow in expressions #![allow(dead_code)] #![allow(unused_variables)] fn nested_control_flow() { let x1 = 1 + (if true { 0 } else { 1 }); let x2 = 1 + (match (1, 2) { _ => 0, }); let x3 = 1 + { let x = 9; x + 1 }; } fn explicit_hoisting() { let x1_tmp = if true { 0 } else { 1 }; let x1 = 1 + x1_tmp; let x2_tmp = match (1, 2) { _ => 0, }; let x2 = 1 + x2_tmp; let x3_tmp_x = 9; let x3_tmp = x3_tmp_x + 1; let x3 = 1 + x3_tmp; } fn complex_nesting() { let mut x1 = if true { let mut y = if false { let mut z = match () { _ => 9, }; z = 1 + z; z + 1 } else { let mut z = 9; z = z + 1; z }; y = y + 1; y + 1 } else { 0 }; x1 = x1 + 1; let mut x2 = match Some(89) { Some(a) => { let mut y = 1 + a; y = y + 1; if y == 0 { let mut z = 9; z = z + y + 1; z } else { 10 } } None => { let mut y = if false { 9 } else { let mut z = 9; z = z + 1; z + 9 }; y = y + 1; y } }; x2 = x1 + 1 + x2 } ================================================ FILE: tests/lean-tests/src/opaque.rs ================================================ #[hax_lib::opaque] pub fn an_opaque_fn() {} trait T { type A; fn f(); } struct S; #[hax_lib::opaque] impl T for S { type A = usize; fn f() {} } #[hax_lib::opaque] struct OpaqueStruct; ================================================ FILE: tests/lean-tests/src/recursion.rs ================================================ fn factorial(n: u32) -> u32 { if n == 0 { 1 } else { n * factorial(n - 1) } } ================================================ FILE: tests/lean-tests/src/specs.rs ================================================ #[hax_lib::requires(x > 0)] #[hax_lib::ensures(|r| r == x)] fn test(x: u8) -> u8 { x } #[hax_lib::requires(x > 0)] #[hax_lib::ensures(|r| r == x)] fn use_previous_result(x: u8) -> u8 { test(x) } #[hax_lib::requires(x > 0)] #[hax_lib::ensures(|r| r == x)] #[hax_lib::lean::proof("by unfold lean_tests.specs.test_proof; hax_bv_decide")] fn test_proof(x: u8) -> u8 { x } #[hax_lib::requires(x < 16)] #[hax_lib::ensures(|res| res >= x)] fn square(x: u8) -> u8 { x * x } #[hax_lib::requires(hax_lib::forall(|i:u8| hax_lib::implies(i < 20, x > i)))] #[hax_lib::ensures(|r| !hax_lib::exists(|i:u8| !hax_lib::implies(i < 20, r > i)))] #[hax_lib::lean::proof_method::grind] fn forall_and_exists(x: u8) -> u8 { x } /// Test function without arguments /// https://github.com/cryspen/hax/issues/1856 #[hax_lib::ensures(|_| true)] fn fn_without_args() {} /// The Lean backend used to produce `self_` instead of `self` in annotations in /// impl blocks. See https://github.com/cryspen/hax/issues/1852. mod issue_1852 { struct T {} #[hax_lib::attributes] impl T { pub fn test(self) -> bool { true } #[hax_lib::requires(T::test(self))] pub fn func(self) {} } } #[hax_lib::requires(true)] #[hax_lib::ensures(|r| true)] #[hax_lib::lean::pure_requires_proof("⟨True, by mvcgen⟩")] #[hax_lib::lean::pure_ensures_proof("⟨fun _ => True, by intros; mvcgen⟩")] fn custom_pure_proofs(x: u8) {} /// Resugarings need to be apply also to linked items /// https://github.com/cryspen/hax/issues/1945 mod issue_1945 { #[hax_lib::requires({let x = a; a == 0})] fn mktuple(a: i32) -> bool { {let x = a; a == 0} } } ================================================ FILE: tests/lean-tests/src/structs.rs ================================================ //! Tests on structs #![allow(dead_code)] #![allow(unused_variables)] // # Tuple Structs // 1. Type definitions struct T0(); struct T1(A); struct T2(A, B); struct T3(A, B, C); struct T3p(A, T2); fn tuple_structs() -> () { // 2. Expressions let t0 = T0(); let t1 = T1(1); let t2 = T2(1, 2); let t3 = T3(T0(), T1(1), T2(1, 2)); let t3p = T3p(T0(), T2(T1(1), T2(1, 2))); // 3. Patterns let T0() = t0; let T1(u1) = t1; let T2(u2, u3) = t2; let T3(T0(), T1(_), T2(_, _)) = t3; let T3p(T0(), T2(T1(_), T2(_, _))) = t3p; // 4. Accessors let _ = t1.0; let _ = t2.0; let _ = t2.1; let _ = t3.0; let _ = t3.1; let _ = t3.2; let _ = t3.2.1; let _ = t3p.0; let _ = t3p.1; let _ = t3p.1.1.0; let _ = t3p.1.0; let _ = t3p.1.1; // 5. Pattern matching let _ = match t0 { T0() => {} }; let _ = match t1 { T1(u1) => {} }; let _ = match t2 { T2(u2, u3) => {} }; let _ = match t3 { T3(T0(), T1(u1), T2(u2, u3)) => {} }; let _ = match t3p { T3p(T0(), T2(T1(u1), T2(u2, u3))) => {} }; } // # Normal Structs // 1. Type definitions struct S1 { f1: usize, f2: usize, } struct S2 { // Nested structs f1: S1, // possible shadowing between fields f2: usize, } struct S3 { // Reserved keywords in Lean end: usize, def: usize, theorem: usize, structure: usize, inductive: usize, } fn normal_structs() -> () { // 2. Expressions let s1 = S1 { f1: 0, f2: 1 }; let s2 = S2 { f1: S1 { f1: 2, f2: 3 }, f2: 4, }; let s3 = S3 { end: 0, def: 0, theorem: 0, structure: 0, inductive: 0, }; // 3. Patterns let S1 { f1, f2 } = s1; let S1 { f1, f2: other_name_for_f2, } = s1; let S2 { f1: S1 { f1, f2 }, f2: other_name_for_f2, } = s2; let S3 { end, def, theorem, structure, inductive, } = s3; // 4. Accessors let _ = (s1.f1, s1.f2); let _ = ( s1.f1, s1.f2, s2.f1.f1, s2.f1.f2, s2.f2, s3.end, s3.def, s3.theorem, ); // 5. Pattern-matching match s1 { S1 { f1, f2 } => {} }; match s2 { S2 { f1: S1 { f1, f2: other_name_for_f2, }, f2, } => {} } match s3 { S3 { end, def, theorem, structure, inductive, } => {} } } mod miscellaneous { struct S { f: i32, } fn test_tuples() -> (i32, i32) { let lit = 1; let constr = S { f: 42 }; let proj = constr.f; let ite = if true { (1, 2) } else { let z = 1 + 2; (z, z) }; (1, 2) } } mod base_expressions { struct S { f1: u32, f2: u32, f3: u32, } fn test() { let s1 = S { f1: 1, f2: 2, f3: 3, }; let _ = S { f1: 0, ..s1 }; let _ = S { f2: 0, ..s1 }; let _ = S { f3: 0, ..s1 }; let _ = S { f1: 0, f2: 1, ..s1 }; let _ = S { f2: 0, f3: 1, ..s1 }; let _ = S { f3: 0, f1: 2, ..s1 }; let _ = S { f1: 0, f2: 1, f3: 0, ..s1 }; } } ================================================ FILE: tests/lean-tests/src/traits.rs ================================================ // Tests on traits #![allow(dead_code)] #![allow(unused_variables)] // Simple trait mod basic { trait T1 { fn f1(&self) -> usize; fn f2(&self, other: &Self) -> usize; } // Simple Impl struct S; impl T1 for S { fn f1(&self) -> usize { 42 } fn f2(&self, other: &Self) -> usize { 43 } } // Simple ImplExpr fn f(x: T) -> usize { x.f1() + x.f2(&x) } } // Bounds on parameters and on self mod bounds { trait T1 { fn f1(&self) -> usize; } trait T2 { fn f2(&self) -> usize; } trait Test: T2 { fn f_test(&self, x: &T) -> usize; } struct S1; impl T1 for S1 { fn f1(&self) -> usize { 0 } } struct S2; impl T2 for S2 { fn f2(&self) -> usize { 1 } } impl Test for S2 { fn f_test(&self, x: &S1) -> usize { x.f1() + self.f2() + 1 } } fn test(x1: S1, x2: S2) -> usize { x2.f_test(&x1) + x1.f1() } } mod associated_types { trait T1 { type T; fn f(&self, x: Self::T) -> Self::T; } trait T2 { type T: T1; fn f(&self, x: Self::T) -> usize; } trait Foo {} trait Bar {} trait T3 { type T: Bar; type Tp: Foo; fn f(&self, x: Self::T, y: Self::Tp) -> usize; } struct S {} impl T1 for S { type T = i32; fn f(&self, x: Self::T) -> Self::T { 2121 } } impl T2 for S { type T = S; fn f(&self, x: Self::T) -> usize { 21 } } impl Bar for i16 {} impl Foo for (u32, A) {} // impl T3 for S { // type T = i16; // type Tp = (u32, A); // fn f(&self, x: Self::T, y: Self::Tp) -> usize { // 12 // } // } trait Chain0 {} trait Chain1 { type A: Chain0; type B: Chain0; } trait Chain2: Chain1 {} trait Chain3: Chain2 { fn f() -> Self::A; } impl Chain0 for u8 {} impl Chain1 for u8 { type A = u8; type B = u8; } impl Chain2 for u8 {} impl Chain3 for u8 { fn f() -> u8 { 0 } } } mod overlapping_methods { trait T1 { fn f(&self) -> usize; } trait T2 { fn f(&self) -> usize; } trait T3 { fn f(&self) -> usize; } impl T1 for u32 { fn f(&self) -> usize { 0 } } impl T2 for u32 { fn f(&self) -> usize { 1 } } impl T3 for u32 { fn f(&self) -> usize { 2 } } fn test() -> usize { let x: u32 = 9; T1::f(&x) + T2::f(&x) + T3::f(&x) } } mod inheritance { trait T1 { fn f1(&self) -> usize; } trait T2 { fn f2(&self) -> usize; } trait T3: T2 + T1 { fn f3(&self) -> usize; } trait Tp1 { fn f1(&self) -> usize; } trait Tp2: Tp1 + T3 { fn fp2(&self) -> usize; } struct S {} impl T1 for S { fn f1(&self) -> usize { 1 } } impl T2 for S { fn f2(&self) -> usize { 2 } } impl T3 for S { fn f3(&self) -> usize { 3 } } impl Tp1 for S { fn f1(&self) -> usize { 10 } } impl Tp2 for S { fn fp2(&self) -> usize { Tp1::f1(self) + T1::f1(self) + T2::f2(self) + T3::f3(self) } } fn test() -> usize { let s = S {}; s.f3() + 1 } } mod default { trait Easy { fn dft(&self) -> usize { 32 } } impl Easy for usize { fn dft(&self) -> usize { self + 1 } } impl Easy for u32 {} trait T1 { fn f1(&self) -> usize; fn f2(&self) -> usize { 1 } fn f3(&self, x: &A) -> usize { 1 } fn f4(&self, x: &A) -> usize { x.dft() + 1 } } struct S(usize, A); // Override impl T1 for S { fn f1(&self) -> usize { self.0 + self.1 } fn f2(&self) -> usize { self.1 } } impl T1 for S { fn f1(&self) -> usize { if self.1 { self.0 } else { 9 } } fn f2(&self) -> usize { self.0 + 1 } } // No override impl T1 for S { fn f1(&self) -> usize { 0 } } } mod trait_level_args { trait T1 { fn f1(&self) -> (); // A and B do not appear fn f2(&self, x: &A) -> (); // A appears fn f3(&self, x: &A, y: &B) -> (); // Both appear } impl T1 for usize { fn f1(&self) {} fn f2(&self, x: &u32) {} fn f3(&self, x: &u32, y: &u64) {} } fn test>(x: U, a: &A, b: &B) -> () { x.f1::(); x.f2::(a); x.f3::(a, b); } } mod trait_with_constraints { trait T1 {} trait T2 { fn func(&self) -> bool where Self: T1; } impl T2 for A { fn func(&self) -> bool where A: T1, { true } } } mod associated_constant { pub trait Foo { const f: bool; const x: u8 = 0; } pub struct Bar; impl Foo for Bar { const f: bool = true; const x: u8 = 1 + 1; } // https://github.com/cryspen/hax/issues/1940 trait Baz { const One: u32 = 1; } fn foo(n: u32) -> u32 { n + F::One } } ================================================ FILE: tests/lean-tests/src/types.rs ================================================ #![allow(dead_code)] #![allow(unused_variables)] // Tests on type aliases type UsizeAlias = usize; type MyOption = Option; type MyResult = Result, B>; type ErrorMonad = Result; type StateMonad = (A, S); type ESMonad = StateMonad, S>; ================================================ FILE: tests/let-else/Cargo.toml ================================================ [package] name = "let-else" version = "0.1.0" edition = "2021" [dependencies] [package.metadata.hax-tests] into."fstar+coq+ssprove" = { broken = false, snapshot = "stdout", issue_id = "155" } ================================================ FILE: tests/let-else/src/lib.rs ================================================ #![allow(dead_code)] pub fn let_else(opt: Option) -> bool { let Some(x) = opt else { return false }; true } pub fn let_else_different_type(opt: Option) -> bool { let_else({ let Some(x) = opt else { return false }; Some(x + 1) }) } ================================================ FILE: tests/literals/Cargo.toml ================================================ [package] name = "literals" version = "0.1.0" edition = "2021" [dependencies] hax-lib = { path = "../../hax-lib" } [package.metadata.hax-tests] into."lean" = { broken = false, issue_id = "85" } into."fstar" = { broken = false, issue_id = "85" } into."coq" = { broken = false, issue_id = "85" } into."ssprove" = { broken = true, snapshot = "none", issue_id = "85" } ================================================ FILE: tests/literals/src/lib.rs ================================================ #![allow(dead_code)] use hax_lib::*; #[hax_lib::requires(x > int!(0) && x < int!(16))] fn math_integers(x: Int) -> u8 { let _: Int = 3usize.lift(); let _neg_dec = int!(-340282366920938463463374607431768211455000); let _pos_dec = int!(340282366920938463463374607431768211455000); let _neg_hex = int!(-0x3E7FFFFFFFFFFFFFFFFFFFFFFFFFFFFFC18); let _pos_hex = int!(0x3E7FFFFFFFFFFFFFFFFFFFFFFFFFFFFFC18); let _neg_octal = int!(-0o7637777777777777777777777777777777777777776030); let _pos_octal = int!(0o7637777777777777777777777777777777777777776030); let _neg_bin = int!(-0b111110011111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111110000011000); let _pos_bin = int!(0b111110011111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111110000011000); let _ = int!(-340282366920938463463374607431768211455000) > int!(340282366920938463463374607431768211455000); let _ = x < x; let _ = x >= x; let _ = x <= x; let _ = x != x; let _ = x == x; let _ = x + x; let _ = x - x; let _ = x * x; let _ = x / x; let _: i16 = x.to_i16(); let _: i32 = x.to_i32(); let _: i64 = x.to_i64(); let _: i128 = x.to_i128(); let _: isize = x.to_isize(); let _: u16 = x.to_u16(); let _: u32 = x.to_u32(); let _: u64 = x.to_u64(); let _: u128 = x.to_u128(); let _: usize = x.to_usize(); (x + x * x).to_u8() } pub fn panic_with_msg() { panic!("with msg") } #[derive(PartialEq, Eq)] struct Foo { field: u8, } const CONSTANT: Foo = Foo { field: 3 }; fn numeric() { let _: usize = 123; let _: isize = -42; let _: isize = 42; let _: i32 = -42; let _: u128 = 22222222222222222222; } pub fn patterns() { match 1u8 { 2 => (), _ => (), }; match ("hello", (123, ["a", "b"])) { ("hello", (123, _todo)) => (), _ => (), }; match (Foo { field: 4 }) { CONSTANT => (), // Note [CONSTANT] is not a free variable here, we're really matching against the *value* of CONSTANT _ => (), }; } fn casts(x8: u8, x16: u16, x32: u32, x64: u64, xs: usize) { let _: u64 = x8 as u64 + x16 as u64 + x32 as u64 + x64 as u64 + xs as u64; let _: u32 = x8 as u32 + x16 as u32 + x32 as u32 + x64 as u32 + xs as u32; let _: u16 = x8 as u16 + x16 as u16 + x32 as u16 + x64 as u16 + xs as u16; let _: u8 = x8 as u8 + x16 as u8 + x32 as u8 + x64 as u8 + xs as u8; let _: i64 = x8 as i64 + x16 as i64 + x32 as i64 + x64 as i64 + xs as i64; let _: i32 = x8 as i32 + x16 as i32 + x32 as i32 + x64 as i32 + xs as i32; let _: i16 = x8 as i16 + x16 as i16 + x32 as i16 + x64 as i16 + xs as i16; let _: i8 = x8 as i8 + x16 as i8 + x32 as i8 + x64 as i8 + xs as i8; } pub fn empty_array() { let _: &[u8] = &[]; } /// https://github.com/hacspec/hax/issues/500 fn fn_pointer_cast() { let f: fn(&u32) -> &u32 = |x| x; } fn strings() { let _: &str = "hello"; let _: &str = "hello\"world"; let _: &str = "it's"; let _: &str = "back\\slash"; let _: &str = "line\nbreak"; let _: &str = "carriage\rreturn"; let _: &str = "tab\there"; let _: &str = "null\x00byte"; let _: &str = "bell\x07char"; let _: &str = "\x1b[0m"; let _: &str = "🦀"; } ================================================ FILE: tests/loops/Cargo.toml ================================================ [package] name = "loops" version = "0.1.0" edition = "2021" [dependencies] hax-lib = { path = "../../hax-lib" } [package.metadata.hax-tests] into."fstar" = { } into."coq" = { broken = true, snapshot = "none", issue_id = "137" } into."ssprove" = { broken = true, snapshot = "none", issue_id = "137" } ================================================ FILE: tests/loops/src/lib.rs ================================================ mod recognized_loops { fn range() { let mut count = 0u64; for i in 0u8..10u8 { hax_lib::loop_invariant!(|i: u8| i <= 10); count += 1; } } fn range_step_by() { let mut count = 0u64; for i in (0u8..10u8).step_by(2) { hax_lib::loop_invariant!(|i: u8| i <= 10); count += 1; } } fn enumerated_slice(slice: &[T]) { let mut count = 0u64; for i in slice.into_iter().enumerate() { hax_lib::loop_invariant!(|i: usize| i <= 10); count += 2; } } fn enumerated_chunked_slice(slice: &[T]) { let mut count = 0u64; for i in slice.chunks_exact(3).enumerate() { hax_lib::loop_invariant!(|i: usize| { fstar!("$i <= ${slice.len()}") }); count += 3; } } } mod for_loops { fn range1() -> usize { let mut acc = 0; for i in 0..15 { acc = acc + i; } acc } fn range2(n: usize) -> usize { let mut acc = 0; for i in 0..(n + 10) { acc = acc + i + 1; } acc } fn composed_range(n: usize) -> usize { let mut acc = 0; for i in (0..n).chain((n + 10)..(n + 50)) { acc = acc + i + 1; } acc } fn rev_range(n: usize) -> usize { let mut acc = 0; for i in (0..n).rev() { acc = acc + i + 1; } acc } fn chunks(arr: Vec) -> usize { let mut acc = 0; let chunks = arr.chunks_exact(CHUNK_LEN); for chunk in chunks.clone() { let mut mean = 0; for item in chunk { mean = mean + item; } acc = acc + mean / CHUNK_LEN; } for item in chunks.remainder() { acc = acc - item; } acc } fn iterator(arr: Vec) -> usize { let mut acc = 0; for item in arr.iter() { acc = acc + item; } acc } fn nested(arr: Vec) -> usize { let mut acc = 0; for item in arr.iter() { for i in (0..*item).rev() { acc = acc + 1; for j in arr.iter().zip(4..i) { acc = acc + item + i + j.0 + j.1; } } } acc } fn pattern(arr: Vec<(usize, usize)>) -> usize { let mut acc = 0; for (x, y) in arr { acc = acc + x * y; } acc } fn enumerate_chunks(arr: Vec) -> usize { let mut acc = 0; for (i, chunk) in arr.chunks(4).enumerate() { for (j, x) in chunk.iter().enumerate() { acc = i + j + x; } } acc } fn bool_returning(x: u8) -> bool { x < 10 } fn f() { let mut acc = 0; for i in 1..10 { acc += i; bool_returning(i); } } } mod while_loops { fn f() -> u8 { let mut x = 0; while x < 10 { x = x + 3; } x + 12 } fn while_invariant_decr() -> u8 { let mut x = 0; while x < 10 { hax_lib::loop_invariant!(x <= 10); hax_lib::loop_decreases!(10 - x); x = x + 3; } x + 12 } fn while_invariant_decr_rev() -> u8 { let mut x = 0; while x < 10 { hax_lib::loop_decreases!(10 - x); hax_lib::loop_invariant!(x <= 10); x = x + 3; } x + 12 } } mod control_flow { fn double_sum() -> i32 { let mut sum = 0; for i in 1..10 { if i < 0 { break; } sum += i; } sum *= 2; sum } fn double_sum2() -> i32 { let mut sum = 0; let mut sum2 = 0; for i in 1..10 { if i < 0 { break; } sum += i; sum2 += i } sum + sum2 } fn double_sum_return(v: &[i32]) -> i32 { let mut sum = 0; for i in v { if *i < 0 { return 0; } sum += *i; } sum *= 2; sum } fn double_sum2_return(v: &[i32]) -> i32 { let mut sum = 0; let mut sum2 = 0; for i in v { if *i < 0 { return 0; } sum += *i; sum2 += *i } sum + sum2 } fn bigger_power_2(x: i32) -> i32 { let mut pow = 1; while pow < 1000000 { pow *= 2; if pow < x { pow *= 3; if true { break; } } pow *= 2 } pow } struct M { m: Vec, } impl M { fn decoded_message(&self) -> Option> { for i in 0..self.m.len() { if i > 5 { return None; } } return Some(self.m.clone()); } } fn nested() -> i32 { let mut sum = 0; for i in 1..10 { for j in 1..10 { if j < 0 { break; } sum += j; } sum += i; } sum *= 2; sum } fn nested_return() -> i32 { let mut sum = 0; for i in 1..10 { for j in 1..10 { if j < 0 { return 0; } sum += j; } sum += i; } sum *= 2; sum } fn continue_only(x: &[i32]) { let mut product = 1; for i in x { if *i == 0 { continue; } product *= i } } fn continue_and_break(x: &[i32]) { let mut product = 1; for i in x { if *i == 0 { continue; } if *i < 0 { break; } product *= i } } } mod and_mut_side_effect_loop { // https://github.com/hacspec/hax/issues/720 fn looping(array: &mut [u8; 5]) { for i in 0..array.len() { array[i] = i as u8; } } #[hax_lib::fstar::verification_status(panic_free)] fn looping_2(array: &mut [u8; 5]) { for i in 0..array.len() { array[i] = i as u8; } } } ================================================ FILE: tests/mut-ref-functionalization/Cargo.toml ================================================ [package] name = "mut-ref-functionalization" version = "0.1.0" edition = "2021" [dependencies] [package.metadata.hax-tests] into.fstar = {broken = false, issue_id = 90, snapshot = "stdout"} ================================================ FILE: tests/mut-ref-functionalization/src/lib.rs ================================================ #![allow(dead_code)] struct S { b: [u8; 5], } fn foo(mut lhs: S, rhs: &S) -> S { for i in 0..1 { lhs.b[i] += rhs.b[i]; } lhs } impl S { fn update(&mut self, x: u8) { self.b[0] = x; } } fn index_mutation(x: core::ops::Range, a: &'static [u8]) { let mut v = vec![1]; v[x].copy_from_slice(a); v[1] = 3; } fn index_mutation_unsize(mut x: [u8; 12]) -> u8 { x[4..5].copy_from_slice(&[1, 2]); 42 } fn build_vec() -> Vec { vec![1, 2, 3] } fn test_append() -> Vec { let mut vec1 = Vec::new(); let mut vec2 = vec![1u8, 2, 3]; vec1.append(&mut vec2); vec1.append(&mut build_vec()); vec1 } fn f() -> Vec { let mut vec = Vec::new(); vec.push(1); vec.push(2); vec.swap(0, 1); // `vec.swap(0, 1)` is desugared into: use std::ops::DerefMut; (&mut *(vec.deref_mut())).swap(0, 1); vec } struct Foo { field: Vec, } struct Pair { a: T, b: Foo, } fn g(x: Pair>) -> Vec { let mut x = x; for i in 1..10 { x.a.push(i); } x.a.swap(0, 1); x.b.field.swap(0, 1); x.a } fn h(x: &mut u8) { *x += 10; } struct Bar { a: u8, b: u8, } fn i(bar: &mut Bar) -> u8 { (*bar).b += bar.a; h(&mut bar.a); bar.a + bar.b } fn j(x: &mut Bar) -> u8 { let out = 123; i(x) + out } fn k( vec: &mut Vec, _: &mut u16, /*test var shadowing*/ arg_1_wild: u8, _: &mut (), ) -> u64 { // test variable shadowing let arg_1_wild2 = vec[1]; let arg_3_wild = vec[2]; let arg_1_wild1 = vec[3]; let arg_3_wild1 = vec[4]; vec[0] = arg_1_wild + arg_3_wild + arg_1_wild1 + arg_3_wild1 + arg_1_wild; 12345 } trait FooTrait { fn z(&mut self); } impl FooTrait for Foo { fn z(&mut self) {} } fn array(x: &mut [u8; 10]) { x[1] = x[2]; } ================================================ FILE: tests/naming/Cargo.toml ================================================ [package] name = "naming" version = "0.1.0" edition = "2021" [dependencies] [package.metadata.hax-tests] into."fstar" = { snapshot = "stdout" } ================================================ FILE: tests/naming/src/lib.rs ================================================ #![allow(dead_code)] #![allow(non_camel_case_types)] enum Foo { A, B { x: usize }, } enum Foo2 { A, B { x: usize }, } struct B; struct C { x: usize, } struct X {} fn mk_c() -> C { let _ = Foo::B { x: 3 }; let _ = X {}; C { x: 3 } } impl Foo { fn f(self) -> Foo { Foo::A } } impl B { fn f(self) -> B { B } } struct Foobar { a: Foo, } fn f(x: Foobar) -> usize { fn g() { impl B { fn g(self) -> usize { enum Foo { A, B { x: usize }, } 0usize } } impl Foo { fn g(self) -> usize { mod hello { fn h() {} } 1usize } } } x.a.g() } fn reserved_names(val: u8, noeq: u8, of: u8) -> u8 { val + noeq + of } struct Arity1(T); trait T1 {} impl T1 for Foo {} impl T1 for (Foo, u8) {} trait T2_for_a {} impl T2_for_a for Arity1<(Foo, u8)> {} trait T3_e_for_a {} impl T3_e_for_a for Foo {} struct StructA { a: usize, } struct StructB { a: usize, b: usize, } struct StructC { a: usize, } struct StructD { a: usize, b: usize, } fn construct_structs(a: usize, b: usize) { let _ = StructA { a }; let _ = StructB { a, b }; let _ = StructC { a }; let _ = StructD { a, b }; } const INHERENT_CONSTANT: usize = 3; trait FooTrait { const ASSOCIATED_CONSTANT: usize; } fn constants() -> usize { ::ASSOCIATED_CONSTANT + INHERENT_CONSTANT } /// Test for ambiguous local names renaming: when two local vars are /// ambiguous by name but not by their internal IDs. /// Such situation can occur playing with *hygenic* macros. /// Also, this happens with some internal Rustc rewrite. (e.g. assignment of tuples) mod ambiguous_names { fn debug(label: u32, value: u32) { println!("[{}] a={}", label, value) } /// This macro surround a given expression with a let binding for /// an identifier `a` and a print of that `a`. macro_rules! introduce_binding_to_new_name_a { ($label:expr, $value:expr, $($e:tt)*) => { let a = $value; $($e)* debug($label, a) }; } /// `f` stacks mutliple let bindings declaring different `a`s. fn f() { introduce_binding_to_new_name_a!(1, 104, introduce_binding_to_new_name_a!(2, 205, introduce_binding_to_new_name_a!(3, 306, let a = 123;); ); ); debug(4, a) } /// `f` is expanded into `f_expand` below, while the execution of `f` gives: /// /// ```plaintext /// [3] a=306 /// [2] a=205 /// [1] a=104 /// [last] a=123 /// ``` #[allow(unused)] fn f_expand() { let a = 104; let a = 205; let a = 306; let a = 123; debug(3, a); debug(2, a); debug(1, a); debug(0, a) } } /// From issue https://github.com/hacspec/hax/issues/839 fn string_shadows(string: &str, n: &str) {} /// From issue https://github.com/cryspen/hax/issues/1411 mod functions_defined_in_trait_impls { struct A; impl PartialEq for A { fn eq(&self, other: &Self) -> bool { panic!() } } struct B; impl PartialEq for B { fn eq(&self, other: &Self) -> bool { panic!() } } } /// From issue https://github.com/cryspen/hax/issues/1450 fn items_under_closures() { let _: fn() -> () = || { fn nested_function() {} struct NestedStruct; }; fn nested_function() {} struct NestedStruct; } ================================================ FILE: tests/nested-derefs/Cargo.toml ================================================ [package] name = "nested-derefs" version = "0.1.0" edition = "2021" [package.metadata.hax-tests] into."fstar+coq+ssprove" = { snapshot = "none" } ================================================ FILE: tests/nested-derefs/src/lib.rs ================================================ fn f(x: &usize) -> usize { *x } fn g(x: &&usize) -> usize { f(*x) } ================================================ FILE: tests/never-type/Cargo.toml ================================================ [package] name = "never-type" version = "0.1.0" edition = "2021" [dependencies] [package.metadata.hax-tests] into."fstar" = { snapshot = "none" } ================================================ FILE: tests/never-type/src/lib.rs ================================================ #![allow(dead_code)] #![feature(never_type)] enum False {} fn never(h: False) -> ! { match h {} } fn test(b: bool) -> u8 { if b { panic!(); }; 3 } fn any() -> T { panic!() } ================================================ FILE: tests/odd/Cargo.toml ================================================ [package] name = "odd" version = "0.0.1" edition = "2021" [dependencies] even = { path = "../even" } ================================================ FILE: tests/odd/src/lib.rs ================================================ #![allow(dead_code)] pub fn odd(n: usize) -> bool { !even::even(n) } ================================================ FILE: tests/pattern-or/Cargo.toml ================================================ [package] name = "pattern-or" version = "0.1.0" edition = "2021" [dependencies] [package.metadata.hax-tests] into."coq" = { issue_id = "161" } into."fstar" = { } # into."ssprove" = { broken = true, snapshot = "none" } ================================================ FILE: tests/pattern-or/src/lib.rs ================================================ #![allow(dead_code)] pub enum E { A, B, } pub fn bar(x: E) { match x { E::A | E::B => (), } } pub fn nested(x: Option) -> i32 { match x { Some(1 | 2) => 1, Some(x) => x, None => 0, } } pub fn deep(x: (i32, Option)) -> i32 { match x { (1 | 2, Some(3 | 4)) => 0, (x, _) => x, } } pub fn equivalent(x: (i32, Option)) -> i32 { match x { (1, Some(3)) | (1, Some(4)) | (2, Some(3)) | (2, Some(4)) => 0, (x, _) => x, } } pub fn deep_capture(x: Result<(i32, i32), (i32, i32)>) -> i32 { match x { Ok((1 | 2, x)) | Err((3 | 4, x)) => x, Ok((x, _)) | Err((x, _)) => x, } } ================================================ FILE: tests/patterns/Cargo.toml ================================================ [package] name = "patterns" version = "0.1.0" edition = "2021" [dependencies] [package.metadata.hax-tests] into."fstar" = { issue_id = "1170" } ================================================ FILE: tests/patterns/src/lib.rs ================================================ #![allow(dead_code)] struct Other<'a>(&'a i32); enum Test<'a> { C1(Other<'a>), } impl<'a> Test<'a> { fn test(&self) -> i32 { match self { Self::C1(c) => *c.0, } } } ================================================ FILE: tests/proverif-basic-structs/Cargo.toml ================================================ [package] name = "basic-structs" version = "0.1.0" edition = "2021" # See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html [dependencies] [package.metadata.hax-tests] into."pro-verif" = { broken = false, snapshot = "none" } ================================================ FILE: tests/proverif-basic-structs/src/lib.rs ================================================ // Record struct with single field struct Ainitial { x: u8, } // Record struct with multiple fields struct A { one: usize, two: usize, } // Non-record struct struct B(usize); ================================================ FILE: tests/proverif-fn-to-letfun/Cargo.toml ================================================ [package] name = "fn-to-letfun" version = "0.1.0" edition = "2021" # See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html [dependencies] ================================================ FILE: tests/proverif-fn-to-letfun/src/lib.rs ================================================ struct A { x: usize, y: u8, } struct B { b: bool, } fn some_function() -> bool { true } fn some_other_function(b: bool) -> u8 { 5 } fn longer_function(x: &str) -> A { let b = some_function(); let d = some_other_function(b); A { x: 12usize, y: 9u8 } } fn another_longer_function() -> B { let b = some_function(); let d = some_other_function(b); B { b: false } } fn void_function() { let b = some_function(); let d = some_other_function(b); } ================================================ FILE: tests/proverif-minimal/Cargo.toml ================================================ [package] name = "minimal" version = "0.1.0" edition = "2021" # See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html [dependencies] [package.metadata.hax-tests] into."pro-verif" = { broken = false, snapshot = "none" } ================================================ FILE: tests/proverif-minimal/src/lib.rs ================================================ pub fn add(left: usize, right: usize) -> usize { left + right } #[cfg(test)] mod tests { use super::*; #[test] fn it_works() { let result = add(2, 2); assert_eq!(result, 4); } } ================================================ FILE: tests/proverif-noise/Cargo.toml ================================================ [package] name = "noise-kkpsk0" version = "0.1.0" authors = ["Karthik Bhargavan "] edition = "2018" license = "MIT OR Apache-2.0" description = "hacspec chacha20 poly1305 authenticated encryption" readme = "README.md" [dependencies] hax-lib-protocol = { path = "../../hax-lib-protocol" } hax-lib-protocol-macros = { path = "../../hax-lib-protocol-macros" } hax-lib = { path = "../../hax-lib" } [dev-dependencies] serde_json = "1.0" serde = { version = "1.0", features = ["derive"] } rayon = "1.3.0" criterion = "0.4" rand = "0.8" hacspec-dev = { git = "https://github.com/hacspec/hacspec.git" } [package.metadata.hax-tests] into."pro-verif" = { broken = false, snapshot = "none" } ================================================ FILE: tests/proverif-noise/src/lib.rs ================================================ pub mod noise_crypto; pub mod noise_kkpsk0; pub mod noise_lib; ================================================ FILE: tests/proverif-noise/src/noise_crypto.rs ================================================ // Import hacspec and all needed definitions. use hax_lib_protocol::crypto::{DHGroup, *}; /// This file formalizes the Crypto Functions from the Noise Specification /// Section 4: Crypto Functions /// https://noiseprotocol.org/noise.html#crypto-functions pub enum Error { CryptoError, } /// Section 4.1 and 12.1: Diffie-Hellman Functions for Curve25519 pub struct KeyPair { private_key: DHScalar, pub public_key: Vec, } pub const DHLEN: usize = 32; pub fn generate_keypair(sk: &[u8]) -> KeyPair { let sk = DHScalar::from_bytes(sk); let pk = dh_scalar_multiply_base(DHGroup::X25519, sk.clone()); KeyPair { private_key: sk, public_key: pk, } } pub fn dh(sk: &KeyPair, pk: &[u8]) -> Vec { let pk = DHElement::from_bytes(pk); dh_scalar_multiply(DHGroup::X25519, sk.private_key.clone(), pk) } /// Section 4.2 and 12.3: Cipher functions for ChaCha20-Poly1305 pub fn encrypt(key: &[u8], counter: u64, aad: &[u8], plain: &[u8]) -> Vec { let mut chacha_iv = vec![0u8; 4]; chacha_iv.extend_from_slice(&counter.to_le_bytes()); let (mut cipher, tag) = aead_encrypt( AEADKey::from_bytes(AEADAlgorithm::Chacha20Poly1305, key), AEADIV::from_bytes(&chacha_iv), aad, plain, ); cipher.extend_from_slice(&tag); cipher } pub fn decrypt(key: &[u8], counter: u64, aad: &[u8], cipher: &[u8]) -> Result, Error> { let mut chacha_iv = vec![0u8; 4]; chacha_iv.extend_from_slice(&counter.to_le_bytes()); let cipher_len = cipher.len() - 16; let cip = &cipher[0..cipher_len]; let tag = &cipher[cipher_len..cipher.len()]; aead_decrypt( AEADKey::from_bytes(AEADAlgorithm::Chacha20Poly1305, key), AEADIV::from_bytes(&chacha_iv), aad, cip, AEADTag::from_bytes(tag), ) .map_err(|_| Error::CryptoError) } pub fn rekey(key: &[u8]) -> Vec { encrypt(key, 0xffffffffffffffffu64, &Vec::new(), &[0u8; 32]) } /// Section 4.3 and 12.5: Hash functions for SHA-256 pub const HASHLEN: usize = 32; pub const BLOCKLEN: usize = 64; pub fn hash(input: &[u8]) -> Vec { hax_lib_protocol::crypto::hash(HashAlgorithm::Sha256, input) } pub fn hmac_hash(key: &[u8], input: &[u8]) -> Vec { hmac(HMACAlgorithm::Sha256, key, input) } /// HKDF spec as per Noise /// Alternative would be to directly use HKDF pub fn kdf_next(secret: &[u8], prev: &[u8], counter: u8) -> Vec { hmac_hash(secret, &[prev, &[counter]].concat()) } pub fn hkdf1(key: &[u8], ikm: &[u8]) -> Vec { let secret = hmac_hash(key, ikm); kdf_next(&secret, &Vec::new(), 1) } pub fn hkdf2(key: &[u8], ikm: &[u8]) -> (Vec, Vec) { let secret = hmac_hash(key, ikm); let k1 = kdf_next(&secret, &Vec::new(), 1); let k2 = kdf_next(&secret, &k1, 2); (k1, k2) } pub fn hkdf3(key: &[u8], ikm: &[u8]) -> (Vec, Vec, Vec) { let secret = hmac_hash(key, ikm); let k1 = kdf_next(&secret, &Vec::new(), 1); let k2 = kdf_next(&secret, &k1, 2); let k3 = kdf_next(&secret, &k1, 3); (k1, k2, k3) } ================================================ FILE: tests/proverif-noise/src/noise_kkpsk0.rs ================================================ // Import hacspec and all needed definitions. use crate::*; use noise_crypto::*; use noise_lib::*; pub struct HandshakeStateI0 { st: SymmetricState, psk: Vec, s: KeyPair, e: KeyPair, rs: Vec, } pub struct HandshakeStateI1 { st: SymmetricState, s: KeyPair, e: KeyPair, } pub struct HandshakeStateR0 { st: SymmetricState, psk: Vec, s: KeyPair, e: KeyPair, rs: Vec, } pub struct HandshakeStateR1 { st: SymmetricState, e: KeyPair, rs: Vec, re: Vec, } pub struct Transport { send: CipherState, recv: CipherState, handshake_hash: Vec, } struct ProtocolName([u8; 36]); #[allow(non_upper_case_globals)] const Noise_KKpsk0_25519_ChaChaPoly_SHA256: ProtocolName = ProtocolName([ 78u8, 111u8, 105u8, 115u8, 101u8, 95u8, 75u8, 75u8, 112u8, 115u8, 107u8, 48u8, 95u8, 50u8, 53u8, 53u8, 49u8, 57u8, 95u8, 67u8, 104u8, 97u8, 67u8, 104u8, 97u8, 80u8, 111u8, 108u8, 121u8, 95u8, 83u8, 72u8, 65u8, 50u8, 53u8, 54u8, ]); /// KKpsk0: /// -> s /// <- s /// ... pub fn initialize_initiator( prologue: &[u8], psk: Vec, s: KeyPair, e: KeyPair, rs: &[u8], ) -> HandshakeStateI0 { let st = initialize_symmetric(&Noise_KKpsk0_25519_ChaChaPoly_SHA256.0); let st = mix_hash(st, prologue); let st = mix_hash(st, &s.public_key); let st = mix_hash(st, rs); HandshakeStateI0 { psk, st, s, e, rs: rs.to_vec(), } } pub fn initialize_responder( prologue: &[u8], psk: Vec, s: KeyPair, e: KeyPair, rs: &[u8], ) -> HandshakeStateR0 { let st = initialize_symmetric(&Noise_KKpsk0_25519_ChaChaPoly_SHA256.0); let st = mix_hash(st, prologue); let st = mix_hash(st, rs); let st = mix_hash(st, &s.public_key); HandshakeStateR0 { st, psk, s, e, rs: rs.to_vec(), } } /// KKpsk0: /// ... /// -> psk, e, es, ss pub fn write_message1( hs: HandshakeStateI0, payload: &[u8], ) -> Result<(HandshakeStateI1, Vec), Error> { let HandshakeStateI0 { st, psk, s, e, rs } = hs; let st = mix_key_and_hash(st, &psk); let st = mix_hash(st, &e.public_key); let st = mix_key(st, &e.public_key); let es = dh(&e, &rs); let st = mix_key(st, &es); let ss = dh(&s, &rs); let st = mix_key(st, &ss); let (st, ciphertext) = encrypt_and_hash(st, payload)?; let hs = HandshakeStateI1 { st, s, e }; Ok((hs, ciphertext)) } pub fn read_message1( hs: HandshakeStateR0, ciphertext: &[u8], ) -> Result<(HandshakeStateR1, Vec), Error> { let HandshakeStateR0 { st, psk, s, e, rs } = hs; let re = &ciphertext[0..DHLEN]; let ciphertext = &ciphertext[DHLEN..ciphertext.len()]; let st = mix_key_and_hash(st, &psk); let st = mix_hash(st, re); let st = mix_key(st, re); let es = dh(&s, re); let st = mix_key(st, &es); let ss = dh(&s, &rs); let st = mix_key(st, &ss); let (st, plaintext) = decrypt_and_hash(st, ciphertext)?; let hs = HandshakeStateR1 { st, e, rs, re: re.to_vec(), }; Ok((hs, plaintext)) } /// KKpsk0: /// ... /// <- e, ee, se pub fn write_message2(hs: HandshakeStateR1, payload: &[u8]) -> Result<(Transport, Vec), Error> { let HandshakeStateR1 { st, e, rs, re } = hs; let st = mix_hash(st, &e.public_key); let st = mix_key(st, &e.public_key); let ee = dh(&e, &re); let st = mix_key(st, &ee); let se = dh(&e, &rs); let st = mix_key(st, &se); let (st, ciphertext) = encrypt_and_hash(st, payload)?; let (c1, c2, h) = split(st); let tx = Transport { send: c2, recv: c1, handshake_hash: h, }; Ok((tx, ciphertext)) } pub fn read_message2( hs: HandshakeStateI1, ciphertext: &[u8], ) -> Result<(Transport, Vec), Error> { let HandshakeStateI1 { st, s, e } = hs; let re = &ciphertext[0..DHLEN]; let ciphertext = &ciphertext[DHLEN..ciphertext.len()]; let st = mix_hash(st, re); let st = mix_key(st, re); let ee = dh(&e, re); let st = mix_key(st, &ee); let se = dh(&s, re); let st = mix_key(st, &se); let (st, plaintext) = decrypt_and_hash(st, ciphertext)?; let (c1, c2, h) = split(st); let tx = Transport { send: c1, recv: c2, handshake_hash: h, }; Ok((tx, plaintext)) } /// KKpsk0: /// -> /// <- pub fn write_transport( tx: Transport, ad: &[u8], payload: &[u8], ) -> Result<(Transport, Vec), Error> { let Transport { send, recv, handshake_hash, } = tx; let (send, ciphertext) = encrypt_with_ad(send, ad, payload)?; let tx = Transport { send, recv, handshake_hash, }; Ok((tx, ciphertext)) } pub fn read_transport( tx: Transport, ad: &[u8], ciphertext: &[u8], ) -> Result<(Transport, Vec), Error> { let Transport { send, recv, handshake_hash, } = tx; let (recv, payload) = decrypt_with_ad(recv, ad, ciphertext)?; let tx = Transport { send, recv, handshake_hash, }; Ok((tx, payload)) } ================================================ FILE: tests/proverif-noise/src/noise_lib.rs ================================================ // Import hacspec and all needed definitions. use crate::*; use noise_crypto::*; /// This module defines the generic Noise processing rules /// Section 5: https://noiseprotocol.org/noise.html#processing-rules pub struct CipherState { k: Option>, n: u64, } pub struct SymmetricState { cs: CipherState, ck: Vec, h: Vec, } /// 5.1: The CipherState Object pub fn initialize_key(key: Option>) -> CipherState { CipherState { k: key, n: 0u64 } } pub fn has_key(cs: &CipherState) -> bool { cs.k.is_some() } pub fn set_nonce(cs: CipherState, n: u64) -> CipherState { let CipherState { k, n: _ } = cs; CipherState { k, n } } pub fn encrypt_with_ad( cs: CipherState, ad: &[u8], plaintext: &[u8], ) -> Result<(CipherState, Vec), Error> { let CipherState { k, n } = cs; if n == 0xffffffffffffffffu64 { Err(Error::CryptoError) } else { match k { Some(k) => { let cip = encrypt(&k, n, ad, plaintext); Ok(( CipherState { k: Some(k), n: n + 1, }, cip, )) } None => Ok((CipherState { k, n }, plaintext.to_vec())), } } } pub fn decrypt_with_ad( cs: CipherState, ad: &[u8], ciphertext: &[u8], ) -> Result<(CipherState, Vec), Error> { let CipherState { k, n } = cs; if n == 0xffffffffffffffffu64 { Err(Error::CryptoError) } else { match k { Some(k) => { let plain = decrypt(&k, n, ad, ciphertext)?; Ok(( CipherState { k: Some(k), n: n + 1, }, plain, )) } None => Ok((CipherState { k, n }, ciphertext.to_vec())), } } } pub fn rekey(cs: CipherState) -> Result { let CipherState { k, n } = cs; match k { Some(k) => { let new_k = noise_crypto::rekey(&k); Ok(CipherState { k: Some(new_k), n }) } None => Err(Error::CryptoError), } } /// 5.2: The SymmetricState Object pub fn initialize_symmetric(protocol_name: &[u8]) -> SymmetricState { let pnlen = protocol_name.len(); let hv: Vec = if pnlen < HASHLEN { [protocol_name, &vec![0u8; 32 - pnlen]].concat() } else { hash(protocol_name) }; let ck = hv.clone(); SymmetricState { cs: initialize_key(None), ck, h: hv, } } pub fn mix_key(st: SymmetricState, input_key_material: &[u8]) -> SymmetricState { let SymmetricState { cs: _, ck, h } = st; let (ck, mut temp_k) = hkdf2(&ck, input_key_material); if HASHLEN == 64 { temp_k.truncate(32); } SymmetricState { cs: initialize_key(Some(temp_k)), ck, h, } } pub fn mix_hash(st: SymmetricState, data: &[u8]) -> SymmetricState { let SymmetricState { cs, ck, h } = st; SymmetricState { cs, ck, h: hash(&[&h, data].concat()), } } pub fn mix_key_and_hash(st: SymmetricState, input_key_material: &[u8]) -> SymmetricState { let SymmetricState { cs: _, ck, h } = st; let (ck, temp_h, mut temp_k) = hkdf3(&ck, input_key_material); let mut new_h = h; new_h.extend_from_slice(&temp_h); let new_h = hash(&new_h); if HASHLEN == 64 { temp_k.truncate(32); } SymmetricState { cs: initialize_key(Some(temp_k)), ck, h: new_h, } } /// Unclear if we need a special function for psk or we can reuse mix_key_and_hash above //pub fn mix_psk(st:SymmetricState,psk:&[u8]) -> (Vec,Vec,Vec) { // let (ck,temp_hash,cs_k) = kdf3(key,psk); // let next_hash = mix_hash(prev_hash,&temp_hash); // (ck,cs_k,next_hash) //} pub fn encrypt_and_hash( st: SymmetricState, plaintext: &[u8], ) -> Result<(SymmetricState, Vec), Error> { let (new_cs, ciphertext) = encrypt_with_ad(st.cs, &st.h, plaintext)?; let mut new_h = st.h.clone(); new_h.extend_from_slice(&ciphertext); let new_h = hash(&new_h); Ok(( SymmetricState { cs: new_cs, ck: st.ck, h: new_h, }, ciphertext, )) } pub fn decrypt_and_hash( st: SymmetricState, ciphertext: &[u8], ) -> Result<(SymmetricState, Vec), Error> { let (new_cs, plaintext) = decrypt_with_ad(st.cs, &st.h, ciphertext)?; let mut new_h = st.h.clone(); new_h.extend_from_slice(ciphertext); let new_h = hash(&new_h); Ok(( SymmetricState { cs: new_cs, ck: st.ck, h: new_h, }, plaintext, )) } pub fn split(st: SymmetricState) -> (CipherState, CipherState, Vec) { let (mut temp_k1, mut temp_k2) = hkdf2(&st.ck, &Vec::new()); if HASHLEN == 64 { temp_k1.truncate(32); temp_k2.truncate(32); } ( initialize_key(Some(temp_k1)), initialize_key(Some(temp_k2)), st.h, ) } ================================================ FILE: tests/proverif-ping-pong/Cargo.toml ================================================ [package] name = "ping-pong" version = "0.1.0" edition = "2021" # See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html [dependencies] hax-lib-protocol = { path = "../../hax-lib-protocol" } hax-lib-protocol-macros = { path = "../../hax-lib-protocol-macros" } hax-lib = { path = "../../hax-lib" } ================================================ FILE: tests/proverif-ping-pong/pingpong.pv ================================================ set attacker = passive. channel c. type ping_t. type pong_t. fun new_ping(): ping_t. fun ping2pong(ping_t): pong_t. event PingSent(ping_t). event PingReceived(ping_t). event PongSent(pong_t). event PongReceived(pong_t). query p: ping_t; event(PingReceived(p)) ==> event(PingSent(p)). let A = ( let ping = new_ping() in event PingSent(ping); out(c, ping) ) | ( in(c, pong: pong_t); event PongReceived(pong) ). let B = in(c, ping: ping_t); event PingReceived(ping); let pong = ping2pong(ping) in event PongSent(pong); out(c, pong); 0. process A | B ================================================ FILE: tests/proverif-ping-pong/src/a.rs ================================================ use hax_lib_protocol::{state_machine::*, ProtocolError, ProtocolResult}; use crate::Message; // ==== A states ==== pub struct A0 { data: u8, } pub struct A1 {} pub struct A2 { #[allow(dead_code)] received: u8, } // ==== A initialization ==== #[hax_lib_protocol_macros::init(A0)] fn init_a(prologue: Vec) -> ProtocolResult { if prologue.len() < 1 { Err(ProtocolError::InvalidPrologue) } else { Ok(A0 { data: prologue[0] }) } } // The following generated by macro: /* #[hax_lib_macros::exclude] impl TryFrom> for A0 { type Error = ProtocolError; fn try_from(value: Vec) -> Result { init_a(value) } } #[hax_lib_macros::exclude] impl InitialState for A0 { fn init(prologue: Option>) -> ProtocolResult { if let Some(prologue) = prologue { prologue.try_into() } else { Err(ProtocolError::InvalidPrologue) } } } */ // ==== A state transistion functions ==== #[hax_lib_protocol_macros::write(A0, A1, Message)] fn write_ping(state: A0) -> ProtocolResult<(A1, Message)> { Ok((A1 {}, Message::Ping(state.data))) } // The following generated by macro: /*#[hax_lib_macros::exclude] impl TryFrom for (A1, Message) { type Error = ProtocolError; fn try_from(value: A0) -> Result { write_ping(value) } } #[hax_lib_macros::exclude] impl WriteState for A0 { type NextState = A1; type Message = Message; fn write(self) -> ProtocolResult<(Self::NextState, Message)> { self.try_into() } }*/ #[hax_lib_protocol_macros::read(A1, A2, Message)] fn read_pong(_state: A1, msg: Message) -> ProtocolResult { match msg { Message::Ping(_) => Err(ProtocolError::InvalidMessage), Message::Pong(received) => Ok(A2 { received }), } } // The following generated by macro: /*#[hax_lib_macros::exclude] impl TryFrom<(A1, Message)> for A2 { type Error = ProtocolError; fn try_from((state, msg): (A1, Message)) -> Result { read_pong(state, msg) } } #[hax_lib_macros::exclude] impl ReadState for A1 { type Message = Message; fn read(self, msg: Message) -> ProtocolResult { A2::try_from((self, msg)) } }*/ ================================================ FILE: tests/proverif-ping-pong/src/b.rs ================================================ use hax_lib_protocol::{state_machine::*, ProtocolError, ProtocolResult}; use crate::Message; // ==== B states ==== pub struct B0 {} pub struct B1 { received: u8, } // An alternative successor of B0 to show read alternatives pub struct B1alt {} pub struct B2 {} // ==== B initialization ==== #[hax_lib_protocol_macros::init_empty(B0)] fn init_b() -> ProtocolResult { Ok(B0 {}) } // The following generated by macro: // #[hax_lib_macros::exclude] // impl InitialState for B0 { // fn init(prologue: Option>) -> ProtocolResult { // if let Some(_) = prologue { // Err(ProtocolError::InvalidPrologue) // } else { // init_b() // } // } // } // ==== B state transistion functions ==== #[hax_lib_protocol_macros::read(B0, B1, Message)] fn read_ping(_state: B0, msg: Message) -> ProtocolResult { match msg { Message::Ping(received) => Ok(B1 { received }), Message::Pong(_) => Err(ProtocolError::InvalidMessage), } } // The following generated by macro: /*#[hax_lib_macros::exclude] impl TryFrom<(B0, Message)> for B1 { type Error = ProtocolError; fn try_from((state, msg): (B0, Message)) -> Result { read_ping(state, msg) } } #[hax_lib_macros::exclude] impl ReadState for B0 { type Message = Message; fn read(self, msg: Message) -> Result { B1::try_from((self, msg)) } }*/ #[hax_lib_protocol_macros::read(B0, B1alt, Message)] fn read_ping_alt(_state: B0, msg: Message) -> ProtocolResult { match msg { Message::Ping(received) if received == 42 => Ok(B1alt {}), _ => Err(ProtocolError::InvalidMessage), } } // The following generated by macro: /*#[hax_lib_macros::exclude] impl TryFrom<(B0, Message)> for B1alt { type Error = ProtocolError; fn try_from((state, msg): (B0, Message)) -> Result { read_ping_alt(state, msg) } } #[hax_lib_macros::exclude] impl ReadState for B0 { type Message = Message; fn read(self, msg: Message) -> Result { B1alt::try_from((self, msg)) } }*/ #[hax_lib_protocol_macros::write(B1, B2, Message)] fn write_pong(state: B1) -> ProtocolResult<(B2, Message)> { Ok((B2 {}, Message::Pong(state.received))) } // The following generated by macro: /*#[hax_lib_macros::exclude] impl TryFrom for (B2, Message) { type Error = ProtocolError; fn try_from(value: B1) -> Result { write_pong(value) } } #[hax_lib_macros::exclude] impl WriteState for B1 { type Message = Message; type NextState = B2; fn write(self) -> Result<(Self::NextState, Message), ProtocolError> { self.try_into() } }*/ ================================================ FILE: tests/proverif-ping-pong/src/lib.rs ================================================ mod a; mod b; #[hax_lib::protocol_messages] pub enum Message { Ping(u8), Pong(u8), } #[test] fn run() { use a::A0; use b::{B0, B1}; use hax_lib_protocol::state_machine::{InitialState, ReadState, WriteState}; let a = A0::init(Some(vec![1])).unwrap(); let b = B0::init(None).unwrap(); let (a, msg) = a.write().unwrap(); let b: B1 = b.read(msg).unwrap(); let (_b, msg) = b.write().unwrap(); let _a = a.read(msg).unwrap(); } ================================================ FILE: tests/raw-attributes/Cargo.toml ================================================ [package] name = "raw-attributes" version = "0.1.0" edition = "2021" [dependencies] hax-lib = { path = "../../hax-lib" } [package.metadata.hax-tests] into."fstar" = { snapshot = "none" } ================================================ FILE: tests/raw-attributes/README.md ================================================ This example is more interesting with the debug mode enabled, to see all the attributes: - `cargo hax into --debug-engine SOME_EMPTY_EXISTING_DIR fstar` - `cd engine/utils/phase_debug_webapp && PORT=8989 node server.js SOME_EMPTY_EXISTING_DIR` - browse `http://localhost:8989/`, and observe the attributes ================================================ FILE: tests/raw-attributes/src/lib.rs ================================================ #![allow(dead_code)] #![feature(register_tool)] #![register_tool(hax)] /** TypeAlias:BlockDocComment Lorem ipsum dolor sit amet, consectetur adipiscing elit. Integer bibendum, massa quis facilisis aliquam, dui libero auctor sem, aliquet dignissim urna magna ac turpis. */ #[hax::a::path(TypeAlias attr)] type TypeAlias<#[hax::a::path(TypeAlias:T attr)] T: Clone> = (T, u8); /// f:LineBlockComment #[hax::a::path(f attr)] fn f<#[hax::a::path(f:T attr)] T, #[hax::a::path(f:Y attr)] const Y: usize>(_x: T) -> usize { Y } #[hax::a::path(Foo attr)] enum Foo { #[hax::a::path(Foo:A attr)] A( #[hax::a::path(Foo:A:u8 attr)] u8, #[hax::a::path(Foo:A:u16 attr)] u16, ), #[hax::a::path(Foo:B attr)] B { /// some Foo::B::x comment #[hax::a::path(Foo:B:x attr)] x: u8, /// some Foo::B::y comment #[hax::a::path(Foo:B:y attr)] y: u16, }, } #[hax::a::path(Bar attr)] struct Bar { #[hax::a::path(Bar:field1 attr)] field1: u64, /** some Bar::field2 comment Quisque et purus lacinia, venenatis risus eu, hendrerit arcu. Nunc posuere iaculis mattis. Sed at enim justo. Praesent aliquet ipsum in enim mollis faucibus. Morbi eu diam molestie, posuere quam eget, pulvinar diam. */ #[hax::a::path(Bar:field2 attr)] field2: u32, } ================================================ FILE: tests/recursion/Cargo.toml ================================================ [package] name = "recursion" version = "0.1.0" edition = "2021" [dependencies] [package.metadata.hax-tests] into."fstar" = { } ================================================ FILE: tests/recursion/src/lib.rs ================================================ #![allow(dead_code)] pub fn f(n: u8) -> u8 { if n == 0 { 0 } else { n + f(n - 1) } } ================================================ FILE: tests/reordering/Cargo.toml ================================================ [package] name = "reordering" version = "0.1.0" edition = "2021" [dependencies] [package.metadata.hax-tests] into."fstar+coq+ssprove" = { snapshot = "stdout" } ================================================ FILE: tests/reordering/src/lib.rs ================================================ #![allow(dead_code)] fn no_dependency_1() {} fn g() -> Bar { Bar(f(32)) } fn no_dependency_2() {} fn f(_: u32) -> Foo { Foo::A } struct Bar(Foo); enum Foo { A, B, } mod mut_rec { fn f() { g() } fn f_2() { f() } fn g() { f() } } mod independent_cycles { fn a() { c() } fn b() { d() } fn c() { a() } fn d() { b() } } ================================================ FILE: tests/side-effects/Cargo.toml ================================================ [package] name = "side-effects" version = "0.1.0" edition = "2021" [dependencies] [package.metadata.hax-tests] into.fstar = {} into.coq = {broken = true, snapshot = "none", issue_id = 134} into.ssprove = {broken = true, snapshot = "none"} ================================================ FILE: tests/side-effects/src/lib.rs ================================================ #![allow(dead_code)] /// Helper function fn add3(x: u32, y: u32, z: u32) -> u32 { x.wrapping_add(y).wrapping_add(z) } /// Exercise local mutation with control flow and loops fn local_mutation(mut x: u32) -> u32 { let mut y = 0; if { x = x.wrapping_add(1); x > 3 } { x = x.wrapping_sub(3); let mut y = x / 2; for i in { y = y.wrapping_add(2); 0 }..10 { y = x.wrapping_add(i); } x.wrapping_add(y) } else { x = match x { 12 => { y = x.wrapping_add(y); 3 } 13 => add3( x, { x = x.wrapping_add(1); 123u32.wrapping_add(x) }, x, ), _ => 0, }; x.wrapping_add(y) } } /// Exercise early returns with control flow and loops fn early_returns(mut x: u32) -> u32 { return (123u32.wrapping_add( if { if x > 3 { return 0; }; x > 30 } { match true { true => return 34, _ => 3, } } else { x = x + 9; x + 1 }, )) .wrapping_add(x); } fn simplifiable_return(c1: bool, c2: bool, c3: bool) -> i32 { let mut x = 0; if c1 { if c2 { x += 10; if c3 { return 1; } } x += 1; } x } fn simplifiable_question_mark(c: bool, x: Option) -> Option { let a = if c { x? + 10 } else { 0 }; let b = 20; Some(a + b) } /// Question mark without error coercion fn direct_result_question_mark(y: Result<(), u32>) -> Result { y?; Ok(0) } /// Question mark with an error coercion fn direct_result_question_mark_coercion(y: Result) -> Result { Ok(y?) } /// Test question mark on `Option`s with some control flow fn options(x: Option, y: Option, z: Option) -> Option { let v = match (if x? > 10 { Some(x?.wrapping_add(3)) } else { Some(x?.wrapping_add(y?)) })? { 3 => None?, 4 => 4 + (if z? > 4 { 0 } else { 3 }), _ => 12u8, }; Some(v.wrapping_add(x?).wrapping_add(y?)) } /// Test question mark on `Result`s with local mutation fn question_mark(mut x: u32) -> Result { if x > 40u32 { let mut y = 0; x = x.wrapping_add(3); y = x.wrapping_add(y); if { x = x.wrapping_add(y); x > 90u32 } { Err(12u8)? } }; Ok(3u32.wrapping_add(x)) } struct A; struct B; /// Combine `?` and early return fn monad_lifting(x: u8) -> Result { if x > 123 { return Ok(Err(B)?); } else { Ok(A) } } struct Bar { a: bool, b: ([(bool, bool); 6], bool), } struct Foo { x: bool, y: (bool, Vec), z: [Bar; 6], bar: Bar, } /// Test assignation on non-trivial places fn assign_non_trivial_lhs(mut foo: Foo) -> Foo { foo.x = true; foo.bar.a = true; foo.bar.b.0[3].1 = true; foo.z[3].a = true; foo.y.1[3].b.0[5].0 = true; foo } mod issue_1083 { trait MyFrom { fn my_from(x: T) -> Self; } impl MyFrom for u16 { fn my_from(x: u8) -> u16 { x as u16 } } fn f(x: u8) -> Result { Err(1u8)?; Ok(u16::my_from(x)) } } mod issue_1089 { fn test(x: Option, y: Option) -> Option { x.map(|i| Some(i + y?))? } } /// issue 1175 mod nested_return { fn other_fun(rng: &mut i8) -> Result<(), ()> { Ok(()) } fn fun(rng: &mut i8) -> Result<(), ()> { return Ok(other_fun(rng)?); } } mod issue_1300 { fn fun() -> Result<(), u8> { let val = [0u8; 5] .iter() // Removing the inner Result/? below makes this pass .map(|&prev| Ok::<(u8, [u8; 32]), u8>((prev, Ok::<[u8; 32], u8>([0u8; 32])?))) // Removing the ? below makes this pass .collect::, _>>()?; Ok(()) } } mod issue_1299 { pub struct S { pub g: Foo, } pub struct OtherS { pub g: Option, } pub struct Foo { y: u8, } impl Foo { pub fn from(i: &Foo) -> Self { Self { y: i.y.clone() } } } struct Error(); impl S { pub fn from(i: &OtherS) -> Result { Ok(Self { g: Foo::from(i.g.as_ref().ok_or(Error())?), }) } } } ================================================ FILE: tests/slices/Cargo.toml ================================================ [package] name = "slices" version = "0.1.0" edition = "2021" [dependencies] [package.metadata.hax-tests] into."fstar+coq" = { broken = false, issue_id = "85" } ================================================ FILE: tests/slices/src/lib.rs ================================================ #![allow(dead_code)] // The issue here is probably both, pointer and slice. We first run into the slice. const VERSION: &[u8] = b"v1"; // This panics // thread 'rustc' panicked at 'hax-engine exited with non-zero code', cli/driver/src/exporter.rs:217:2 pub fn do_something(_: &[u8]) {} pub fn sized(x: &[&[u8; 4]; 1]) { r#unsized(&[(x[0] as &[u8])]) } pub fn r#unsized(_: &[&[u8]; 1]) {} ================================================ FILE: tests/statics/Cargo.toml ================================================ [package] name = "statics" version = "0.1.0" edition = "2021" [package.metadata.hax-tests] into."fstar" = { snapshot = "stdout" } ================================================ FILE: tests/statics/src/lib.rs ================================================ static FOO: usize = 0; fn get_foo() -> usize { FOO } ================================================ FILE: tests/traits/Cargo.toml ================================================ [package] name = "traits" version = "0.1.0" edition = "2021" [dependencies] typenum = "1.18.0" [package.metadata.hax-tests] into."fstar" = { snapshot = "stdout" } ================================================ FILE: tests/traits/src/lib.rs ================================================ #![allow(dead_code)] pub trait SuperTrait: Clone { fn function_of_super_trait(self) -> u32; } pub trait Foo { type AssocType: SuperTrait; const N: usize; fn assoc_f() -> (); fn method_f(&self) -> (); fn assoc_type(_: Self::AssocType) -> () where Self::AssocType: Copy; } impl SuperTrait for i32 { fn function_of_super_trait(self) -> u32 { self.abs() as u32 } } impl Foo for () { type AssocType = i32; const N: usize = 32; fn assoc_f() { () } fn method_f(&self) { Self::assoc_f() } fn assoc_type(_: Self::AssocType) -> () {} } fn f(x: T) { T::assoc_f(); x.method_f() } fn g(x: T::AssocType) -> u32 { x.function_of_super_trait() } struct Struct; trait Bar<'a> { fn bar(self); } impl<'a> Struct { fn method>(x: T) { x.bar() } } pub fn closure_impl_expr>(it: I) -> Vec<()> { it.map(|x| x).collect() } pub fn closure_impl_expr_fngen, F: FnMut(()) -> ()>(it: I, f: F) -> Vec<()> { it.map(f).collect() } // From issue #523 pub trait Lang: Sized { type Var; fn s(self, _: i32) -> (Self, Self::Var); } pub enum Error { Fail, } // From issue #474 impl Error { pub fn for_application_callback() -> impl FnOnce() -> Self { || Self::Fail } } // Trickier case. fn iter_option<'a, T>(x: &'a Option) -> impl Iterator { x.as_ref().into_iter() } // Issue #684 fn use_impl_trait() { let mut iter = iter_option(&Some(false)); let _ = iter.next(); } mod for_clauses { trait Foo { fn to_t(&self) -> T; } fn _f Foo<&'a u8>>(x: X) { x.to_t(); } // From issue #495 mod issue_495 { use core::iter::Filter; use core::ops::Range; fn original_function_from_495(list: Vec) { let _indices: Vec<_> = (0..5).filter(|i| list.iter().any(|n| n == i)).collect(); } fn minimized_1(list: Vec) -> Vec { (0..5).filter(|_| true).collect() } fn minimized_2(it: Filter, for<'a> fn(&'a u8) -> bool>) { let _indices: Vec<_> = it.collect(); } mod minimized_3 { pub trait Trait {} impl bool> Trait for P {} } } } mod unconstrainted_types_issue_677 { trait PolyOp { fn op(x: u32, y: u32) -> u32; } struct Plus; impl PolyOp for Plus { fn op(x: u32, y: u32) -> u32 { x + y } } struct Times; impl PolyOp for Times { fn op(x: u32, y: u32) -> u32 { x * y } } fn twice(x: u32) -> u32 { OP::op(x, x) } fn both(x: u32) -> (u32, u32) { (twice::(x), twice::(x)) } #[test] fn test() { assert!(both(10) == (20, 100)); } } // From issue_667 mod implicit_dependencies_issue_667 { mod trait_definition { pub trait MyTrait { fn my_method(self); } } mod define_type { pub struct MyType; } mod impl_type { impl super::trait_definition::MyTrait for super::define_type::MyType { fn my_method(self) {} } } mod use_type { fn some_function(x: super::define_type::MyType) { use super::trait_definition::MyTrait; x.my_method() } } } // Related to issue 719 mod interlaced_consts_types { struct Bar([FooType; FooConst]); trait Foo { fn fun(x: [FooType; FooConst], y: [FunType; FunConst]); } impl Foo for SelfType { fn fun(x: [FooType; FooConst], y: [FunType; FunConst]) {} } } // Related to issue #719 (after reopen) mod implicit_explicit_calling_conventions { struct Type { field: [TypeArg; ConstArg], } trait Trait { fn method( self, value_TypeArg: TypeArg, value_Type: Type, ); fn associated_function( _self: Self, value_TypeArg: TypeArg, value_Type: Type, ); } impl Trait for () { fn method( self, value_TypeArg: TypeArg, value_Type: Type, ) { } fn associated_function( _self: Self, value_TypeArg: TypeArg, value_Type: Type, ) { } } trait SubTrait: Trait { type AssocType: Trait; } fn method_caller< MethodTypeArg, TypeArg, const ConstArg: usize, const MethodConstArg: usize, ImplTrait: Trait, >( x: ImplTrait, value_TypeArg: TypeArg, value_Type: Type, ) { x.method::(value_TypeArg, value_Type); } fn associated_function_caller< MethodTypeArg, TypeArg, const ConstArg: usize, const MethodConstArg: usize, ImplTrait: Trait, >( x: ImplTrait, value_TypeArg: TypeArg, value_Type: Type, ) { ImplTrait::associated_function::( x, value_TypeArg, value_Type, ); } } mod type_alias_bounds_issue_707 { struct StructWithGenericBounds(T); type SynonymA = StructWithGenericBounds; type SynonymB = StructWithGenericBounds<(T, T)>; } // Related to PR 730 mod block_size { pub trait BlockSizeUser { type BlockSize; } pub trait ParBlocksSizeUser: BlockSizeUser {} pub trait BlockBackend: ParBlocksSizeUser { fn proc_block(block: Vec<::BlockSize>); } } // issue 692 mod recursive_trait_with_assoc_type { pub trait Trait1 { type T: Trait1; } pub trait Trait2: Trait1 { type U; } } // issue 310 mod default_traits_parameters { trait Foo: Bar { type U; } trait Bar::U> {} } // issue 1218 mod impl_expr_in_goal { trait T1 { type Assoc; } trait T2 {} impl T2 for U where U::Assoc: T2 {} } // issue 1290 mod implement_arithmetic_trait { struct Wrapped(i32); impl std::ops::Add for Wrapped { type Output = Wrapped; fn add(self, rhs: Self) -> Self::Output { Wrapped(self.0 + rhs.0) } } fn test(x: Wrapped, y: Wrapped) -> Wrapped { x + y } } // issue 1566 mod typenum_perf { use typenum::{IsLess, UInt, UTerm, B1}; type I20 = UInt; type I19 = UInt; type I18 = UInt; type I17 = UInt; type I16 = UInt; type I15 = UInt; type I14 = UInt; type I13 = UInt; type I12 = UInt; type I11 = UInt; type I10 = UInt; type I9 = UInt; type I8 = UInt; type I7 = UInt; type I6 = UInt; type I5 = UInt; type I4 = UInt; type I3 = UInt; type I2 = UInt; type I1 = UInt; type I0 = UTerm; fn _f>() {} } ================================================ FILE: tests/tuples/Cargo.toml ================================================ [package] name = "tuples" version = "0.0.1" edition = "2021" [dependencies] [package.metadata.hax-tests] into."fstar" = { snapshot = "stdout" } ================================================ FILE: tests/tuples/src/lib.rs ================================================ #![allow(dead_code)] pub fn project_tuple1() -> u8 { let tuple1: (u8,) = (3,); tuple1.0 } ================================================ FILE: tests/unsafe/Cargo.toml ================================================ [package] name = "unsafe" version = "0.1.0" edition = "2021" [dependencies] hax-lib = { path = "../../hax-lib" } [package.metadata.hax-tests] into."fstar" = { broken = false } ================================================ FILE: tests/unsafe/src/lib.rs ================================================ #![allow(dead_code)] enum Impossible {} #[hax_lib::requires(false)] pub fn impossible() -> Impossible { unsafe { std::hint::unreachable_unchecked() } } #[hax_lib::requires(slice.len() > 10)] pub fn get_unchecked_example(slice: &[u8]) -> u8 { unsafe { *slice.get_unchecked(6) } }