Repository: DioxusLabs/dioxus Branch: main Commit: be7296f4f037 Files: 1335 Total size: 6.4 MB Directory structure: gitextract_gubgus_p/ ├── .devcontainer/ │ ├── Dockerfile │ ├── README.md │ └── devcontainer.json ├── .github/ │ ├── CODEOWNERS │ ├── FUNDING.yml │ ├── ISSUE_TEMPLATE/ │ │ ├── bug_report.md │ │ └── feature_requst.md │ ├── actions/ │ │ └── free-disk-space/ │ │ └── action.yml │ ├── dependabot.yml │ ├── install.ps1 │ ├── install.sh │ └── workflows/ │ ├── main.yml │ ├── merge.yml │ ├── promote.yml │ ├── publish.yml │ ├── setup-dev-drive.ps1 │ └── typos.yml ├── .gitignore ├── .vscode/ │ └── settings.json ├── AGENTS.md ├── Cargo.toml ├── LICENSE-APACHE ├── LICENSE-MIT ├── README.md ├── _typos.toml ├── codecov.yml ├── examples/ │ ├── 01-app-demos/ │ │ ├── bluetooth-scanner/ │ │ │ ├── .gitignore │ │ │ ├── Cargo.toml │ │ │ ├── README.md │ │ │ ├── assets/ │ │ │ │ └── tailwind.css │ │ │ ├── src/ │ │ │ │ └── main.rs │ │ │ └── tailwind.css │ │ ├── calculator.rs │ │ ├── calculator_mutable.rs │ │ ├── counters.rs │ │ ├── crm.rs │ │ ├── dog_app.rs │ │ ├── ecommerce-site/ │ │ │ ├── .gitignore │ │ │ ├── Cargo.toml │ │ │ ├── README.md │ │ │ ├── public/ │ │ │ │ ├── loading.css │ │ │ │ └── tailwind.css │ │ │ ├── src/ │ │ │ │ ├── api.rs │ │ │ │ ├── components/ │ │ │ │ │ ├── error.rs │ │ │ │ │ ├── home.rs │ │ │ │ │ ├── loading.rs │ │ │ │ │ ├── nav.rs │ │ │ │ │ ├── product_item.rs │ │ │ │ │ └── product_page.rs │ │ │ │ └── main.rs │ │ │ └── tailwind.css │ │ ├── file-explorer/ │ │ │ ├── .gitignore │ │ │ ├── Cargo.toml │ │ │ ├── Dioxus.toml │ │ │ ├── README.md │ │ │ ├── assets/ │ │ │ │ └── fileexplorer.css │ │ │ └── src/ │ │ │ └── main.rs │ │ ├── geolocation-native-plugin/ │ │ │ ├── Cargo.toml │ │ │ ├── Dioxus.toml │ │ │ ├── README.md │ │ │ ├── assets/ │ │ │ │ └── main.css │ │ │ └── src/ │ │ │ ├── android/ │ │ │ │ ├── build.gradle.kts │ │ │ │ ├── consumer-rules.pro │ │ │ │ └── src/ │ │ │ │ └── main/ │ │ │ │ ├── AndroidManifest.xml │ │ │ │ └── kotlin/ │ │ │ │ └── com/ │ │ │ │ └── dioxus/ │ │ │ │ └── geolocation/ │ │ │ │ ├── Geolocation.kt │ │ │ │ └── GeolocationPlugin.kt │ │ │ ├── ios/ │ │ │ │ ├── .gitignore │ │ │ │ ├── plugin/ │ │ │ │ │ ├── Package.swift │ │ │ │ │ ├── Sources/ │ │ │ │ │ │ ├── GeolocationPlugin.swift │ │ │ │ │ │ └── LocationActivityAttributes.swift │ │ │ │ │ └── Tests/ │ │ │ │ │ └── PluginTests/ │ │ │ │ │ └── PluginTests.swift │ │ │ │ └── widget/ │ │ │ │ ├── Package.swift │ │ │ │ └── Sources/ │ │ │ │ ├── LocationActivityAttributes.swift │ │ │ │ └── LocationWidget.swift │ │ │ ├── main.rs │ │ │ └── plugin/ │ │ │ ├── error.rs │ │ │ ├── mod.rs │ │ │ └── models.rs │ │ ├── hackernews/ │ │ │ ├── .gitignore │ │ │ ├── Cargo.toml │ │ │ ├── assets/ │ │ │ │ └── hackernews.css │ │ │ └── src/ │ │ │ └── main.rs │ │ ├── hello_world.rs │ │ ├── hotdog/ │ │ │ ├── .gitignore │ │ │ ├── Cargo.toml │ │ │ ├── Dioxus.toml │ │ │ ├── Dockerfile │ │ │ ├── README.md │ │ │ ├── assets/ │ │ │ │ └── main.css │ │ │ ├── fly.toml │ │ │ └── src/ │ │ │ ├── backend.rs │ │ │ ├── frontend.rs │ │ │ └── main.rs │ │ ├── image_generator_openai.rs │ │ ├── repo_readme.rs │ │ ├── todomvc.rs │ │ ├── todomvc_store.rs │ │ ├── weather_app.rs │ │ └── websocket_chat.rs │ ├── 02-building-ui/ │ │ ├── disabled.rs │ │ ├── nested_listeners.rs │ │ └── svg.rs │ ├── 03-assets-styling/ │ │ ├── css_modules.rs │ │ ├── custom_assets.rs │ │ ├── dynamic_assets.rs │ │ ├── meta.rs │ │ └── meta_elements.rs │ ├── 04-managing-state/ │ │ ├── context_api.rs │ │ ├── error_handling.rs │ │ ├── global.rs │ │ ├── memo_chain.rs │ │ ├── reducer.rs │ │ └── signals.rs │ ├── 05-using-async/ │ │ ├── backgrounded_futures.rs │ │ ├── clock.rs │ │ ├── future.rs │ │ ├── streams.rs │ │ └── suspense.rs │ ├── 06-routing/ │ │ ├── flat_router.rs │ │ ├── hash_fragment_state.rs │ │ ├── link.rs │ │ ├── query_segment_search.rs │ │ ├── router.rs │ │ ├── router_resource.rs │ │ ├── router_restore_scroll.rs │ │ └── simple_router.rs │ ├── 07-fullstack/ │ │ ├── auth/ │ │ │ ├── Cargo.toml │ │ │ └── src/ │ │ │ ├── auth.rs │ │ │ └── main.rs │ │ ├── custom_axum_serve.rs │ │ ├── custom_error_page.rs │ │ ├── desktop/ │ │ │ ├── Cargo.toml │ │ │ └── src/ │ │ │ └── main.rs │ │ ├── dog_app_self_hosted.rs │ │ ├── full_request_access.rs │ │ ├── fullstack_hello_world.rs │ │ ├── handling_errors.rs │ │ ├── header_map.rs │ │ ├── hello-world/ │ │ │ ├── Cargo.toml │ │ │ ├── assets/ │ │ │ │ └── hello.css │ │ │ └── src/ │ │ │ └── main.rs │ │ ├── login_form.rs │ │ ├── middleware.rs │ │ ├── multipart_form.rs │ │ ├── query_params.rs │ │ ├── redirect.rs │ │ ├── router/ │ │ │ ├── Cargo.toml │ │ │ └── src/ │ │ │ └── main.rs │ │ ├── server_functions.rs │ │ ├── server_sent_events.rs │ │ ├── server_state.rs │ │ ├── ssr-only/ │ │ │ ├── Cargo.toml │ │ │ └── src/ │ │ │ └── main.rs │ │ ├── streaming.rs │ │ ├── streaming_file_upload.rs │ │ ├── through_reqwest.rs │ │ └── websocket.rs │ ├── 08-apis/ │ │ ├── control_focus.rs │ │ ├── custom_html.rs │ │ ├── custom_menu.rs │ │ ├── drag_and_drop.rs │ │ ├── eval.rs │ │ ├── file_upload.rs │ │ ├── form.rs │ │ ├── logging.rs │ │ ├── multiwindow.rs │ │ ├── multiwindow_with_tray_icon.rs │ │ ├── on_resize.rs │ │ ├── on_visible.rs │ │ ├── overlay.rs │ │ ├── read_size.rs │ │ ├── scroll_to_offset.rs │ │ ├── scroll_to_top.rs │ │ ├── shortcut.rs │ │ ├── ssr.rs │ │ ├── title.rs │ │ ├── video_stream.rs │ │ ├── wgpu_child_window.rs │ │ ├── window_event.rs │ │ ├── window_focus.rs │ │ ├── window_popup.rs │ │ └── window_zoom.rs │ ├── 09-reference/ │ │ ├── all_events.rs │ │ ├── generic_component.rs │ │ ├── optional_props.rs │ │ ├── rsx_usage.rs │ │ ├── shorthand.rs │ │ ├── simple_list.rs │ │ ├── spread.rs │ │ ├── web_component.rs │ │ └── xss_safety.rs │ ├── 10-integrations/ │ │ ├── bevy/ │ │ │ ├── Cargo.toml │ │ │ └── src/ │ │ │ ├── bevy_renderer.rs │ │ │ ├── bevy_scene_plugin.rs │ │ │ ├── demo_renderer.rs │ │ │ ├── main.rs │ │ │ └── styles.css │ │ ├── native-headless/ │ │ │ ├── Cargo.toml │ │ │ └── src/ │ │ │ └── main.rs │ │ ├── native-headless-in-bevy/ │ │ │ ├── Cargo.toml │ │ │ ├── README.md │ │ │ └── src/ │ │ │ ├── bevy_scene_plugin.rs │ │ │ ├── dioxus_in_bevy_plugin.rs │ │ │ ├── main.rs │ │ │ ├── ui.css │ │ │ └── ui.rs │ │ ├── pwa/ │ │ │ ├── Cargo.toml │ │ │ ├── Dioxus.toml │ │ │ ├── LICENSE │ │ │ ├── README.md │ │ │ ├── index.html │ │ │ ├── public/ │ │ │ │ ├── manifest.json │ │ │ │ └── sw.js │ │ │ └── src/ │ │ │ └── main.rs │ │ ├── tailwind/ │ │ │ ├── .gitignore │ │ │ ├── Cargo.toml │ │ │ ├── README.md │ │ │ ├── assets/ │ │ │ │ └── tailwind.css │ │ │ ├── src/ │ │ │ │ └── main.rs │ │ │ └── tailwind.css │ │ └── wgpu-texture/ │ │ ├── Cargo.toml │ │ └── src/ │ │ ├── demo_renderer.rs │ │ ├── main.rs │ │ ├── shader.wgsl │ │ └── styles.css │ ├── assets/ │ │ ├── calculator.css │ │ ├── clock.css │ │ ├── context_api.css │ │ ├── counter.css │ │ ├── crm.css │ │ ├── css_module1.css │ │ ├── css_module2.css │ │ ├── custom_assets.css │ │ ├── events.css │ │ ├── file_upload.css │ │ ├── flat_router.css │ │ ├── links.css │ │ ├── overlay.css │ │ ├── radio.css │ │ ├── read_size.css │ │ ├── roulette.css │ │ ├── router.css │ │ ├── todomvc.css │ │ ├── visible.css │ │ └── weatherapp.css │ └── scripts/ │ └── scrape_examples.rs ├── flake.nix ├── lychee.toml ├── notes/ │ ├── CONTRIBUTING.md │ ├── FAQ.md │ ├── RELEASING.md │ ├── SECURITY.md │ ├── android_and_ios2.avif │ ├── architecture/ │ │ ├── 00-OVERVIEW.md │ │ ├── 01-CORE.md │ │ ├── 02-CLI.md │ │ ├── 03-RSX.md │ │ ├── 04-SIGNALS.md │ │ ├── 05-FULLSTACK.md │ │ ├── 06-RENDERERS.md │ │ ├── 07-HOTRELOAD.md │ │ ├── 08-ASSETS.md │ │ ├── 09-ROUTER.md │ │ ├── 10-WASM-SPLIT.md │ │ ├── 11-NATIVE-PLUGIN-FFI.md │ │ └── 12-MANIFEST-SYSTEM.md │ ├── dioxus-community.avif │ ├── dioxus_splash_8.avif │ ├── docs.avif │ ├── ebou2.avif │ ├── flat-splash.avif │ ├── fullstack-websockets.avif │ ├── image-splash.avif │ ├── primitive-components.avif │ ├── releases/ │ │ └── 0.7.0-alpha.0.md │ └── translations/ │ ├── fa-ir/ │ │ └── README.md │ ├── ja-jp/ │ │ └── README.md │ ├── ko-kr/ │ │ └── README.md │ ├── pt-br/ │ │ └── README.md │ ├── tr-tr/ │ │ └── README.md │ └── zh-cn/ │ └── README.md └── packages/ ├── asset-resolver/ │ ├── Cargo.toml │ ├── assets/ │ │ └── data.json │ └── src/ │ ├── lib.rs │ ├── native.rs │ └── web.rs ├── autofmt/ │ ├── .vscode/ │ │ └── settings.json │ ├── Cargo.toml │ ├── README.md │ ├── src/ │ │ ├── buffer.rs │ │ ├── collect_macros.rs │ │ ├── indent.rs │ │ ├── lib.rs │ │ ├── prettier_please.rs │ │ └── writer.rs │ └── tests/ │ ├── error_handling.rs │ ├── partials/ │ │ ├── no_parse.rsx │ │ ├── okay.rsx │ │ └── wrong.rsx │ ├── samples/ │ │ ├── asset.rsx │ │ ├── attributes.rsx │ │ ├── basic_expr.rsx │ │ ├── blank_lines.rsx │ │ ├── blank_lines_preserved.rsx │ │ ├── collapse.rsx │ │ ├── collapse_expr.rsx │ │ ├── comments.rsx │ │ ├── commentshard.rsx │ │ ├── complex.rsx │ │ ├── docsite.rsx │ │ ├── emoji.rsx │ │ ├── expr_on_conditional.rsx │ │ ├── fat_exprs.rsx │ │ ├── ifchain_forloop.rsx │ │ ├── immediate_expr.rsx │ │ ├── key.rsx │ │ ├── letsome.rsx │ │ ├── long.rsx │ │ ├── long_exprs.rsx │ │ ├── manual_props.rsx │ │ ├── many_exprs.rsx │ │ ├── messy_indent.rsx │ │ ├── misplaced.rsx │ │ ├── multirsx.rsx │ │ ├── nested.rsx │ │ ├── oneline.rsx │ │ ├── prop_rsx.rsx │ │ ├── raw_strings.rsx │ │ ├── reallylong.rsx │ │ ├── shorthand.rsx │ │ ├── simple.rsx │ │ ├── skip.rsx │ │ ├── spaces.rsx │ │ ├── staged.rsx │ │ ├── t2.rsx │ │ ├── tiny.rsx │ │ ├── tinynoopt.rsx │ │ └── trailing_expr.rsx │ ├── samples.rs │ ├── srcless/ │ │ ├── asset.rsx │ │ └── basic_expr.rsx │ ├── srcless.rs │ ├── wrong/ │ │ ├── comments-4sp.rsx │ │ ├── comments-4sp.wrong.rsx │ │ ├── comments-attributes-4sp.rsx │ │ ├── comments-attributes-4sp.wrong.rsx │ │ ├── comments-big.rsx │ │ ├── comments-big.wrong.rsx │ │ ├── comments-inline-4sp.rsx │ │ ├── comments-inline-4sp.wrong.rsx │ │ ├── comments-tab.rsx │ │ ├── comments-tab.wrong.rsx │ │ ├── multi-4sp.rsx │ │ ├── multi-4sp.wrong.rsx │ │ ├── multi-tab.rsx │ │ ├── multi-tab.wrong.rsx │ │ ├── multiexpr-4sp.rsx │ │ ├── multiexpr-4sp.wrong.rsx │ │ ├── multiexpr-many.rsx │ │ ├── multiexpr-many.wrong.rsx │ │ ├── multiexpr-tab.rsx │ │ ├── multiexpr-tab.wrong.rsx │ │ ├── oneline-expand.rsx │ │ ├── oneline-expand.wrong.rsx │ │ ├── shortened.rsx │ │ ├── shortened.wrong.rsx │ │ ├── simple-combo-expr.rsx │ │ ├── simple-combo-expr.wrong.rsx │ │ ├── skipfail.rsx │ │ ├── skipfail.wrong.rsx │ │ ├── syntax_error.rsx │ │ └── syntax_error.wrong.rsx │ └── wrong.rs ├── check/ │ ├── Cargo.toml │ ├── README.md │ └── src/ │ ├── check.rs │ ├── issues.rs │ ├── lib.rs │ └── metadata.rs ├── cli/ │ ├── .gitignore │ ├── Cargo.toml │ ├── Dioxus.toml │ ├── README.md │ ├── assets/ │ │ ├── android/ │ │ │ ├── MainActivity.kt.hbs │ │ │ ├── gen/ │ │ │ │ ├── .gitignore │ │ │ │ ├── app/ │ │ │ │ │ ├── build.gradle.kts.hbs │ │ │ │ │ ├── proguard-rules.pro │ │ │ │ │ └── src/ │ │ │ │ │ └── main/ │ │ │ │ │ ├── AndroidManifest.xml.hbs │ │ │ │ │ ├── assets/ │ │ │ │ │ │ └── .gitignore │ │ │ │ │ ├── kotlin/ │ │ │ │ │ │ └── .gitignore │ │ │ │ │ └── res/ │ │ │ │ │ ├── drawable/ │ │ │ │ │ │ └── ic_launcher_background.xml │ │ │ │ │ ├── drawable-v24/ │ │ │ │ │ │ └── ic_launcher_foreground.xml │ │ │ │ │ ├── mipmap-anydpi-v26/ │ │ │ │ │ │ └── ic_launcher.xml │ │ │ │ │ ├── values/ │ │ │ │ │ │ ├── colors.xml │ │ │ │ │ │ ├── strings.xml.hbs │ │ │ │ │ │ └── styles.xml │ │ │ │ │ └── xml/ │ │ │ │ │ └── network_security_config.xml │ │ │ │ ├── build.gradle.kts │ │ │ │ ├── gradle/ │ │ │ │ │ └── wrapper/ │ │ │ │ │ ├── gradle-wrapper.jar │ │ │ │ │ └── gradle-wrapper.properties │ │ │ │ ├── gradle.properties │ │ │ │ ├── gradlew │ │ │ │ ├── gradlew.bat │ │ │ │ └── settings.gradle │ │ │ └── prebuilt/ │ │ │ └── README.md │ │ ├── dioxus.toml │ │ ├── ios/ │ │ │ └── ios.plist.hbs │ │ ├── macos/ │ │ │ └── mac.plist.hbs │ │ └── web/ │ │ ├── dev.index.html │ │ ├── dev.loading.html │ │ └── prod.index.html │ ├── build.rs │ ├── schema.json │ └── src/ │ ├── build/ │ │ ├── assets.rs │ │ ├── builder.rs │ │ ├── cache.rs │ │ ├── context.rs │ │ ├── ios_swift.rs │ │ ├── manifest.rs │ │ ├── manifest_mapper.rs │ │ ├── mod.rs │ │ ├── patch.rs │ │ ├── pre_render.rs │ │ ├── request.rs │ │ └── tools.rs │ ├── bundle_utils.rs │ ├── cargo_toml.rs │ ├── cli/ │ │ ├── autoformat.rs │ │ ├── build.rs │ │ ├── build_assets.rs │ │ ├── bundle.rs │ │ ├── check.rs │ │ ├── component.rs │ │ ├── config.rs │ │ ├── create.rs │ │ ├── doctor.rs │ │ ├── hotpatch.rs │ │ ├── init.rs │ │ ├── link.rs │ │ ├── mod.rs │ │ ├── platform_override.rs │ │ ├── print.rs │ │ ├── run.rs │ │ ├── serve.rs │ │ ├── target.rs │ │ ├── translate.rs │ │ ├── update.rs │ │ └── verbosity.rs │ ├── config/ │ │ ├── app.rs │ │ ├── bundle.rs │ │ ├── component.rs │ │ ├── dioxus_config.rs │ │ ├── inline_config.rs │ │ ├── manifest.rs │ │ ├── mod.rs │ │ ├── serve.rs │ │ └── web.rs │ ├── devcfg.rs │ ├── dx_build_info.rs │ ├── error.rs │ ├── fastfs.rs │ ├── logging.rs │ ├── main.rs │ ├── platform.rs │ ├── rustcwrapper.rs │ ├── serve/ │ │ ├── ansi_buffer.rs │ │ ├── mod.rs │ │ ├── output.rs │ │ ├── proxy.rs │ │ ├── proxy_ws.rs │ │ ├── runner.rs │ │ ├── server.rs │ │ └── update.rs │ ├── settings.rs │ ├── tailwind.rs │ ├── test_harnesses.rs │ ├── wasm_bindgen.rs │ ├── wasm_opt.rs │ └── workspace.rs ├── cli-config/ │ ├── .gitignore │ ├── Cargo.toml │ ├── README.md │ └── src/ │ └── lib.rs ├── cli-harnesses/ │ ├── .gitignore │ ├── README.md │ ├── harness-default-to-non-default/ │ │ ├── Cargo.toml │ │ └── src/ │ │ └── main.rs │ ├── harness-fullstack-desktop/ │ │ ├── Cargo.toml │ │ └── src/ │ │ └── main.rs │ ├── harness-fullstack-desktop-with-default/ │ │ ├── Cargo.toml │ │ └── src/ │ │ └── main.rs │ ├── harness-fullstack-desktop-with-features/ │ │ ├── Cargo.toml │ │ └── src/ │ │ └── main.rs │ ├── harness-fullstack-multi-target/ │ │ ├── Cargo.toml │ │ └── src/ │ │ └── main.rs │ ├── harness-fullstack-multi-target-no-default/ │ │ ├── Cargo.toml │ │ └── src/ │ │ └── main.rs │ ├── harness-fullstack-with-optional-tokio/ │ │ ├── Cargo.toml │ │ └── src/ │ │ └── main.rs │ ├── harness-no-dioxus/ │ │ ├── Cargo.toml │ │ └── src/ │ │ └── main.rs │ ├── harness-renderer-swap/ │ │ ├── Cargo.toml │ │ └── src/ │ │ └── main.rs │ ├── harness-simple-dedicated-client/ │ │ ├── Cargo.toml │ │ └── src/ │ │ └── main.rs │ ├── harness-simple-dedicated-server/ │ │ ├── Cargo.toml │ │ └── src/ │ │ └── main.rs │ ├── harness-simple-desktop/ │ │ ├── Cargo.toml │ │ └── src/ │ │ └── main.rs │ ├── harness-simple-fullstack/ │ │ ├── Cargo.toml │ │ └── src/ │ │ └── main.rs │ ├── harness-simple-fullstack-native-with-default/ │ │ ├── Cargo.toml │ │ └── src/ │ │ └── main.rs │ ├── harness-simple-fullstack-with-default/ │ │ ├── Cargo.toml │ │ └── src/ │ │ └── main.rs │ ├── harness-simple-mobile/ │ │ ├── Cargo.toml │ │ └── src/ │ │ └── main.rs │ ├── harness-simple-web/ │ │ ├── Cargo.toml │ │ └── src/ │ │ └── main.rs │ ├── harness-web-with-default-features/ │ │ ├── Cargo.toml │ │ └── src/ │ │ └── main.rs │ └── harness-web-with-no-default-features/ │ ├── Cargo.toml │ └── src/ │ └── main.rs ├── cli-opt/ │ ├── Cargo.toml │ ├── build.rs │ └── src/ │ ├── build_info.rs │ ├── css.rs │ ├── file.rs │ ├── folder.rs │ ├── hash.rs │ ├── image/ │ │ ├── jpg.rs │ │ ├── mod.rs │ │ └── png.rs │ ├── js.rs │ ├── json.rs │ └── lib.rs ├── cli-telemetry/ │ ├── Cargo.toml │ └── src/ │ └── lib.rs ├── component-manifest/ │ ├── Cargo.toml │ └── src/ │ └── lib.rs ├── config-macro/ │ ├── Cargo.toml │ ├── README.md │ └── src/ │ └── lib.rs ├── config-macros/ │ ├── Cargo.toml │ ├── README.md │ └── src/ │ └── lib.rs ├── const-serialize/ │ ├── .gitignore │ ├── Cargo.toml │ ├── README.md │ ├── src/ │ │ ├── array.rs │ │ ├── cbor.rs │ │ ├── const_buffers.rs │ │ ├── const_vec.rs │ │ ├── enum.rs │ │ ├── lib.rs │ │ ├── list.rs │ │ ├── primitive.rs │ │ ├── str.rs │ │ └── struct.rs │ └── tests/ │ ├── enum.rs │ ├── eq.rs │ ├── lists.rs │ ├── primitive.rs │ ├── str.rs │ ├── structs.rs │ └── tuples.rs ├── const-serialize-macro/ │ ├── Cargo.toml │ └── src/ │ └── lib.rs ├── core/ │ ├── .vscode/ │ │ ├── settings.json │ │ └── spellright.dict │ ├── Cargo.toml │ ├── README.md │ ├── docs/ │ │ ├── common_spawn_errors.md │ │ └── reactivity.md │ ├── src/ │ │ ├── any_props.rs │ │ ├── arena.rs │ │ ├── diff/ │ │ │ ├── component.rs │ │ │ ├── iterator.rs │ │ │ ├── mod.rs │ │ │ └── node.rs │ │ ├── effect.rs │ │ ├── error_boundary.rs │ │ ├── events.rs │ │ ├── fragment.rs │ │ ├── generational_box.rs │ │ ├── global_context.rs │ │ ├── hotreload_utils.rs │ │ ├── launch.rs │ │ ├── lib.rs │ │ ├── mutations.rs │ │ ├── nodes.rs │ │ ├── properties.rs │ │ ├── reactive_context.rs │ │ ├── render_error.rs │ │ ├── root_wrapper.rs │ │ ├── runtime.rs │ │ ├── scheduler.rs │ │ ├── scope_arena.rs │ │ ├── scope_context.rs │ │ ├── scopes.rs │ │ ├── suspense/ │ │ │ ├── component.rs │ │ │ └── mod.rs │ │ ├── tasks.rs │ │ └── virtual_dom.rs │ └── tests/ │ ├── .rustfmt.toml │ ├── README.md │ ├── attr_cleanup.rs │ ├── attributes_pass.rs │ ├── boolattrs.rs │ ├── bubble_error.rs │ ├── children_drop_futures.rs │ ├── conditional_formatted_attributes.rs │ ├── context_api.rs │ ├── create_dom.rs │ ├── create_fragments.rs │ ├── create_lists.rs │ ├── create_passthru.rs │ ├── cycle.rs │ ├── diff_component.rs │ ├── diff_dynamic_node.rs │ ├── diff_element.rs │ ├── diff_keyed_list.rs │ ├── diff_unkeyed_list.rs │ ├── error_boundary.rs │ ├── event_propagation.rs │ ├── fuzzing.rs │ ├── hotreloading.rs │ ├── kitchen_sink.rs │ ├── lifecycle.rs │ ├── many_roots.rs │ ├── memory_leak.rs │ ├── miri_full_app.rs │ ├── miri_simple.rs │ ├── miri_stress.rs │ ├── safety.rs │ ├── suspense.rs │ ├── task.rs │ ├── tracing.rs │ └── use_drop.rs ├── core-macro/ │ ├── .vscode/ │ │ └── settings.json │ ├── Cargo.toml │ ├── README.md │ ├── docs/ │ │ ├── component.md │ │ ├── props.md │ │ └── rsx.md │ ├── src/ │ │ ├── component.rs │ │ ├── lib.rs │ │ ├── props/ │ │ │ └── mod.rs │ │ └── utils.rs │ └── tests/ │ ├── event_handler.rs │ ├── generics.rs │ ├── rsx/ │ │ ├── trailing-comma-0.rs │ │ └── trailing-comma-0.stderr │ ├── rsx.rs │ └── values_memoize_in_place.rs ├── core-types/ │ ├── Cargo.toml │ └── src/ │ ├── attributes.rs │ ├── bubbles.rs │ ├── bundled.rs │ ├── formatter.rs │ ├── hr_context.rs │ └── lib.rs ├── depinfo/ │ ├── Cargo.toml │ ├── README.md │ └── src/ │ ├── dx.d │ └── lib.rs ├── desktop/ │ ├── .vscode/ │ │ └── settings.json │ ├── Cargo.toml │ ├── README.md │ ├── build.rs │ ├── headless_tests/ │ │ ├── eval.rs │ │ ├── events.rs │ │ ├── forms.rs │ │ ├── rendering.rs │ │ └── utils.rs │ ├── src/ │ │ ├── android_sync_lock.rs │ │ ├── app.rs │ │ ├── assets/ │ │ │ ├── dev.index.html │ │ │ └── prod.index.html │ │ ├── assets.rs │ │ ├── config.rs │ │ ├── desktop_context.rs │ │ ├── document.rs │ │ ├── edits.rs │ │ ├── element.rs │ │ ├── event_handlers.rs │ │ ├── events.rs │ │ ├── file_upload.rs │ │ ├── hooks.rs │ │ ├── ipc.rs │ │ ├── js/ │ │ │ ├── hash.txt │ │ │ └── native_eval.js │ │ ├── launch.rs │ │ ├── lib.rs │ │ ├── menubar.rs │ │ ├── mobile.rs │ │ ├── mobile_shortcut.rs │ │ ├── protocol.rs │ │ ├── query.rs │ │ ├── readme.md │ │ ├── shortcut.rs │ │ ├── trayicon.rs │ │ ├── ts/ │ │ │ └── native_eval.ts │ │ ├── waker.rs │ │ └── webview.rs │ └── tsconfig.json ├── devtools/ │ ├── Cargo.toml │ └── src/ │ └── lib.rs ├── devtools-types/ │ ├── Cargo.toml │ └── src/ │ └── lib.rs ├── dioxus/ │ ├── Cargo.toml │ ├── README.md │ ├── benches/ │ │ └── jsframework.rs │ └── src/ │ ├── launch.rs │ └── lib.rs ├── document/ │ ├── Cargo.toml │ ├── assets/ │ │ ├── script.js │ │ └── style.css │ ├── build.rs │ ├── docs/ │ │ ├── eval.md │ │ └── head.md │ ├── src/ │ │ ├── document.rs │ │ ├── elements/ │ │ │ ├── link.rs │ │ │ ├── meta.rs │ │ │ ├── mod.rs │ │ │ ├── script.rs │ │ │ ├── style.rs │ │ │ ├── stylesheet.rs │ │ │ └── title.rs │ │ ├── error.rs │ │ ├── eval.rs │ │ ├── js/ │ │ │ ├── hash.txt │ │ │ └── head.js │ │ ├── lib.rs │ │ └── ts/ │ │ ├── .gitignore │ │ ├── eval.ts │ │ └── head.ts │ └── tsconfig.json ├── dx-wire-format/ │ ├── Cargo.toml │ └── src/ │ └── lib.rs ├── extension/ │ ├── .eslintrc.js │ ├── .gitignore │ ├── .vscode/ │ │ ├── launch.json │ │ └── tasks.json │ ├── Cargo.toml │ ├── DEV.md │ ├── LICENSE.txt │ ├── README.md │ ├── package.json │ ├── src/ │ │ ├── lib.rs │ │ └── main.ts │ ├── tsconfig.json │ └── webpack.config.js ├── fullstack/ │ ├── .vscode/ │ │ └── settings.json │ ├── Cargo.toml │ ├── README.md │ ├── src/ │ │ ├── client.rs │ │ ├── encoding.rs │ │ ├── lazy.rs │ │ ├── lib.rs │ │ ├── magic.rs │ │ ├── payloads/ │ │ │ ├── axum_types.rs │ │ │ ├── cbor.rs │ │ │ ├── files.rs │ │ │ ├── form.rs │ │ │ ├── header.rs │ │ │ ├── msgpack.rs │ │ │ ├── multipart.rs │ │ │ ├── postcard.rs │ │ │ ├── query.rs │ │ │ ├── redirect.rs │ │ │ ├── sse.rs │ │ │ ├── stream.rs │ │ │ ├── text.rs │ │ │ └── websocket.rs │ │ ├── request.rs │ │ └── spawn.rs │ └── tests/ │ └── compile-test.rs ├── fullstack-core/ │ ├── Cargo.toml │ ├── README.md │ └── src/ │ ├── document.rs │ ├── error.rs │ ├── errors.rs │ ├── history.rs │ ├── httperror.rs │ ├── lib.rs │ ├── loader.rs │ ├── server_cached.rs │ ├── server_future.rs │ ├── streaming.rs │ └── transport.rs ├── fullstack-macro/ │ ├── Cargo.toml │ └── src/ │ └── lib.rs ├── fullstack-server/ │ ├── .gitignore │ ├── Cargo.toml │ ├── README.md │ └── src/ │ ├── config.rs │ ├── document.rs │ ├── index_html.rs │ ├── isrg/ │ │ ├── config.rs │ │ ├── freshness.rs │ │ ├── fs_cache.rs │ │ ├── memory_cache.rs │ │ └── mod.rs │ ├── launch.rs │ ├── lib.rs │ ├── redirect.rs │ ├── server.rs │ ├── serverfn.rs │ ├── ssr.rs │ └── streaming.rs ├── generational-box/ │ ├── Cargo.toml │ ├── README.md │ ├── benches/ │ │ └── lock.rs │ ├── src/ │ │ ├── entry.rs │ │ ├── error.rs │ │ ├── lib.rs │ │ ├── references.rs │ │ ├── sync.rs │ │ └── unsync.rs │ └── tests/ │ ├── basic.rs │ ├── errors.rs │ ├── reference_counting.rs │ ├── reused.rs │ └── sync.rs ├── history/ │ ├── Cargo.toml │ └── src/ │ ├── lib.rs │ └── memory.rs ├── hooks/ │ ├── Cargo.toml │ ├── README.md │ ├── docs/ │ │ ├── derived_state.md │ │ ├── moving_state_around.md │ │ ├── rules_of_hooks.md │ │ ├── side_effects.md │ │ └── use_resource.md │ ├── src/ │ │ ├── lib.rs │ │ ├── use_action.rs │ │ ├── use_after_suspense_resolved.rs │ │ ├── use_callback.rs │ │ ├── use_collection.rs │ │ ├── use_context.rs │ │ ├── use_coroutine.rs │ │ ├── use_effect.rs │ │ ├── use_future.rs │ │ ├── use_hook_did_run.rs │ │ ├── use_memo.rs │ │ ├── use_on_destroy.rs │ │ ├── use_reactive.rs │ │ ├── use_resource.rs │ │ ├── use_root_context.rs │ │ ├── use_set_compare.rs │ │ ├── use_signal.rs │ │ ├── use_sorted.rs │ │ └── use_waker.rs │ └── tests/ │ ├── effect.rs │ └── memo.rs ├── html/ │ ├── Cargo.toml │ ├── README.md │ ├── docs/ │ │ ├── common_event_handler_errors.md │ │ └── event_handlers.md │ ├── src/ │ │ ├── attribute_groups.rs │ │ ├── data_transfer.rs │ │ ├── elements.rs │ │ ├── events/ │ │ │ ├── animation.rs │ │ │ ├── cancel.rs │ │ │ ├── clipboard.rs │ │ │ ├── composition.rs │ │ │ ├── drag.rs │ │ │ ├── focus.rs │ │ │ ├── form.rs │ │ │ ├── image.rs │ │ │ ├── keyboard.rs │ │ │ ├── media.rs │ │ │ ├── mod.rs │ │ │ ├── mounted.rs │ │ │ ├── mouse.rs │ │ │ ├── pointer.rs │ │ │ ├── resize.rs │ │ │ ├── scroll.rs │ │ │ ├── selection.rs │ │ │ ├── toggle.rs │ │ │ ├── touch.rs │ │ │ ├── transition.rs │ │ │ ├── visible.rs │ │ │ └── wheel.rs │ │ ├── file_data.rs │ │ ├── geometry.rs │ │ ├── input_data.rs │ │ ├── lib.rs │ │ ├── point_interaction.rs │ │ ├── render_template.rs │ │ └── transit.rs │ └── tsconfig.json ├── html-internal-macro/ │ ├── Cargo.toml │ ├── src/ │ │ └── lib.rs │ └── tests/ │ ├── 01-simple.rs │ └── progress.rs ├── interpreter/ │ ├── .gitignore │ ├── Cargo.toml │ ├── NOTES.md │ ├── README.md │ ├── build.rs │ ├── src/ │ │ ├── js/ │ │ │ ├── common.js │ │ │ ├── core.js │ │ │ ├── hash.txt │ │ │ ├── hydrate.js │ │ │ ├── initialize_streaming.js │ │ │ ├── native.js │ │ │ ├── patch_console.js │ │ │ └── set_attribute.js │ │ ├── lib.rs │ │ ├── ts/ │ │ │ ├── .gitignore │ │ │ ├── core.ts │ │ │ ├── hydrate.ts │ │ │ ├── hydrate_types.ts │ │ │ ├── initialize_streaming.ts │ │ │ ├── native.ts │ │ │ ├── patch_console.ts │ │ │ ├── serialize.ts │ │ │ └── set_attribute.ts │ │ ├── unified_bindings.rs │ │ └── write_native_mutations.rs │ ├── tests/ │ │ ├── e2e.rs │ │ └── serialize.rs │ └── tsconfig.json ├── lazy-js-bundle/ │ ├── Cargo.toml │ └── src/ │ └── lib.rs ├── liveview/ │ ├── Cargo.toml │ ├── README.md │ ├── examples/ │ │ ├── axum.rs │ │ └── axum_stress.rs │ └── src/ │ ├── adapters/ │ │ ├── axum_adapter.rs │ │ └── mod.rs │ ├── config.rs │ ├── document.rs │ ├── element.rs │ ├── events.rs │ ├── history.rs │ ├── index.html │ ├── launch.rs │ ├── lib.rs │ ├── main.js │ ├── pool.rs │ └── query.rs ├── logger/ │ ├── Cargo.toml │ ├── README.md │ └── src/ │ └── lib.rs ├── manganis/ │ ├── manganis/ │ │ ├── Cargo.toml │ │ ├── README.md │ │ ├── assets/ │ │ │ ├── script.js │ │ │ └── style.css │ │ └── src/ │ │ ├── android/ │ │ │ ├── activity.rs │ │ │ ├── callback.rs │ │ │ ├── java.rs │ │ │ ├── metadata.rs │ │ │ └── mod.rs │ │ ├── darwin/ │ │ │ └── mod.rs │ │ ├── lib.rs │ │ └── macro_helpers.rs │ ├── manganis-07/ │ │ ├── Cargo.toml │ │ └── src/ │ │ ├── asset.rs │ │ ├── css.rs │ │ ├── css_module.rs │ │ ├── folder.rs │ │ ├── images.rs │ │ ├── js.rs │ │ ├── lib.rs │ │ └── options.rs │ ├── manganis-core/ │ │ ├── Cargo.toml │ │ ├── assets/ │ │ │ ├── script.js │ │ │ └── style.css │ │ └── src/ │ │ ├── asset.rs │ │ ├── css.rs │ │ ├── css_module.rs │ │ ├── css_module_parser.rs │ │ ├── ffi.rs │ │ ├── folder.rs │ │ ├── images.rs │ │ ├── js.rs │ │ ├── lib.rs │ │ └── options.rs │ └── manganis-macro/ │ ├── Cargo.toml │ ├── README.md │ ├── assets/ │ │ ├── asset.txt │ │ ├── script.js │ │ └── style.css │ ├── src/ │ │ ├── asset.rs │ │ ├── css_module.rs │ │ ├── ffi.rs │ │ ├── lib.rs │ │ └── linker.rs │ └── tests/ │ └── option_asset.rs ├── native/ │ ├── Cargo.toml │ └── src/ │ ├── assets.rs │ ├── config.rs │ ├── contexts.rs │ ├── dioxus_application.rs │ ├── dioxus_renderer.rs │ ├── lib.rs │ ├── link_handler.rs │ └── prelude.rs ├── native-dom/ │ ├── Cargo.toml │ └── src/ │ ├── dioxus_document.rs │ ├── events.rs │ ├── lib.rs │ └── mutation_writer.rs ├── playwright-tests/ │ ├── .gitignore │ ├── barebones-template/ │ │ ├── .gitignore │ │ ├── Cargo.toml │ │ ├── Dioxus.toml │ │ ├── README.md │ │ ├── assets/ │ │ │ └── main.css │ │ └── src/ │ │ └── main.rs │ ├── cli-optimization/ │ │ ├── .gitignore │ │ ├── Cargo.toml │ │ ├── assets/ │ │ │ └── data.json │ │ ├── build.rs │ │ └── src/ │ │ ├── lib.rs │ │ ├── main.rs │ │ └── old_cli.rs │ ├── cli-optimization.spec.js │ ├── default-features-disabled/ │ │ ├── .gitignore │ │ ├── Cargo.toml │ │ └── src/ │ │ └── main.rs │ ├── default-features-disabled.spec.js │ ├── fullstack/ │ │ ├── .gitignore │ │ ├── Cargo.toml │ │ ├── Dioxus.toml │ │ └── src/ │ │ └── main.rs │ ├── fullstack-error-codes/ │ │ ├── .gitignore │ │ ├── Cargo.toml │ │ └── src/ │ │ └── main.rs │ ├── fullstack-error-codes.spec.js │ ├── fullstack-errors/ │ │ ├── .gitignore │ │ ├── Cargo.toml │ │ └── src/ │ │ └── main.rs │ ├── fullstack-errors.spec.js │ ├── fullstack-hydration-order/ │ │ ├── Cargo.toml │ │ └── src/ │ │ └── main.rs │ ├── fullstack-hydration-order.spec.js │ ├── fullstack-mounted/ │ │ ├── Cargo.toml │ │ └── src/ │ │ └── main.rs │ ├── fullstack-mounted.spec.js │ ├── fullstack-routing/ │ │ ├── .gitignore │ │ ├── Cargo.toml │ │ └── src/ │ │ └── main.rs │ ├── fullstack-routing.spec.js │ ├── fullstack-spread/ │ │ ├── Cargo.toml │ │ └── src/ │ │ └── main.rs │ ├── fullstack-spread.spec.js │ ├── fullstack.spec.js │ ├── liveview/ │ │ ├── Cargo.toml │ │ └── src/ │ │ └── main.rs │ ├── liveview.spec.js │ ├── nested-suspense/ │ │ ├── .gitignore │ │ ├── Cargo.toml │ │ ├── assets/ │ │ │ └── style.css │ │ └── src/ │ │ ├── lib.rs │ │ ├── main.rs │ │ └── ssg.rs │ ├── nested-suspense-no-js.spec.js │ ├── nested-suspense-ssg.spec.js │ ├── nested-suspense.spec.js │ ├── package.json │ ├── playwright.config.js │ ├── suspense-carousel/ │ │ ├── .gitignore │ │ ├── Cargo.toml │ │ └── src/ │ │ └── main.rs │ ├── suspense-carousel.spec.js │ ├── wasm-split-harness/ │ │ ├── .cargo/ │ │ │ └── config.toml │ │ ├── Cargo.toml │ │ ├── data/ │ │ │ ├── .gitignore │ │ │ └── index.html │ │ ├── docsite.sh │ │ ├── run.sh │ │ └── src/ │ │ ├── main.rs │ │ └── stars.js │ ├── wasm-split.spec.js │ ├── web/ │ │ ├── .gitignore │ │ ├── Cargo.toml │ │ └── src/ │ │ └── main.rs │ ├── web-hash-routing/ │ │ ├── Cargo.toml │ │ └── src/ │ │ └── main.rs │ ├── web-hash-routing.spec.js │ ├── web-hot-patch/ │ │ ├── .gitignore │ │ ├── Cargo.toml │ │ ├── assets/ │ │ │ ├── alternative-style.css │ │ │ └── style.css │ │ └── src/ │ │ └── main.rs │ ├── web-hot-patch-fullstack/ │ │ ├── .gitignore │ │ ├── Cargo.toml │ │ ├── assets/ │ │ │ ├── alternative-style.css │ │ │ └── style.css │ │ └── src/ │ │ └── main.rs │ ├── web-patch-fullstack.spec.js │ ├── web-patch.spec.js │ ├── web-routing/ │ │ ├── .gitignore │ │ ├── Cargo.toml │ │ └── src/ │ │ └── main.rs │ ├── web-routing.spec.js │ ├── web.spec.js │ ├── windows-headless/ │ │ ├── Cargo.toml │ │ └── src/ │ │ └── main.rs │ └── windows.spec.js ├── router/ │ ├── .gitignore │ ├── Cargo.toml │ ├── README.md │ ├── src/ │ │ ├── components/ │ │ │ ├── child_router.rs │ │ │ ├── default_errors.rs │ │ │ ├── history_buttons.rs │ │ │ ├── history_provider.rs │ │ │ ├── link.rs │ │ │ ├── outlet.rs │ │ │ └── router.rs │ │ ├── contexts/ │ │ │ ├── navigator.rs │ │ │ ├── outlet.rs │ │ │ └── router.rs │ │ ├── hooks/ │ │ │ ├── use_navigator.rs │ │ │ ├── use_route.rs │ │ │ └── use_router.rs │ │ ├── lib.rs │ │ ├── navigation.rs │ │ ├── routable.rs │ │ ├── router_cfg.rs │ │ └── utils/ │ │ └── use_router_internal.rs │ └── tests/ │ ├── parent.rs │ ├── parsing.rs │ ├── site_map.rs │ └── via_ssr/ │ ├── child_outlet.rs │ ├── link.rs │ ├── main.rs │ ├── navigation.rs │ ├── outlet.rs │ ├── redirect.rs │ └── without_index.rs ├── router-macro/ │ ├── Cargo.toml │ └── src/ │ ├── hash.rs │ ├── layout.rs │ ├── lib.rs │ ├── nest.rs │ ├── query.rs │ ├── redirect.rs │ ├── route.rs │ ├── route_tree.rs │ └── segment.rs ├── rsx/ │ ├── .vscode/ │ │ └── settings.json │ ├── Cargo.toml │ ├── README.md │ ├── src/ │ │ ├── assign_dyn_ids.rs │ │ ├── attribute.rs │ │ ├── component.rs │ │ ├── diagnostics.rs │ │ ├── element.rs │ │ ├── expr_node.rs │ │ ├── forloop.rs │ │ ├── ifchain.rs │ │ ├── ifmt.rs │ │ ├── lib.rs │ │ ├── literal.rs │ │ ├── location.rs │ │ ├── node.rs │ │ ├── partial_closure.rs │ │ ├── raw_expr.rs │ │ ├── rsx_block.rs │ │ ├── rsx_call.rs │ │ ├── template_body.rs │ │ ├── text_node.rs │ │ └── util.rs │ └── tests/ │ └── parsing.rs ├── rsx-hotreload/ │ ├── Cargo.toml │ ├── src/ │ │ ├── collect.rs │ │ ├── diff.rs │ │ ├── extensions.rs │ │ ├── last_build_state.rs │ │ └── lib.rs │ └── tests/ │ ├── hotreload_pattern.rs │ ├── hotreloads.rs │ └── valid/ │ ├── combo.new.rsx │ ├── combo.old.rsx │ ├── expr.new.rsx │ ├── expr.old.rsx │ ├── for_.new.rsx │ ├── for_.old.rsx │ ├── if_.new.rsx │ ├── if_.old.rsx │ ├── let_.new.rsx │ ├── let_.old.rsx │ ├── nested.new.rsx │ └── nested.old.rsx ├── rsx-rosetta/ │ ├── Cargo.toml │ ├── README.md │ ├── examples/ │ │ └── html.rs │ ├── src/ │ │ └── lib.rs │ └── tests/ │ ├── escape.rs │ ├── h-tags.rs │ ├── raw.rs │ ├── simple.rs │ ├── svgs.rs │ └── web-component.rs ├── signals/ │ ├── Cargo.toml │ ├── README.md │ ├── docs/ │ │ ├── hoist/ │ │ │ ├── error.rs │ │ │ └── fixed_list.rs │ │ ├── memo.md │ │ └── signals.md │ ├── examples/ │ │ ├── context.rs │ │ ├── dependencies.rs │ │ ├── map_signal.rs │ │ ├── read_only_degrade.rs │ │ ├── selector.rs │ │ ├── send.rs │ │ ├── send_store.rs │ │ └── split_subscriptions.rs │ ├── src/ │ │ ├── boxed.rs │ │ ├── copy_value.rs │ │ ├── global/ │ │ │ ├── memo.rs │ │ │ ├── mod.rs │ │ │ └── signal.rs │ │ ├── impls.rs │ │ ├── lib.rs │ │ ├── map.rs │ │ ├── map_mut.rs │ │ ├── memo.rs │ │ ├── props.rs │ │ ├── read.rs │ │ ├── set_compare.rs │ │ ├── signal.rs │ │ ├── warnings.rs │ │ └── write.rs │ └── tests/ │ ├── create.rs │ ├── memo.rs │ └── subscribe.rs ├── ssr/ │ ├── Cargo.toml │ ├── README.md │ ├── src/ │ │ ├── cache.rs │ │ ├── config.rs │ │ ├── lib.rs │ │ ├── renderer.rs │ │ └── template.rs │ └── tests/ │ ├── bool_attr.rs │ ├── escape.rs │ ├── forward_spreads.rs │ ├── hydration.rs │ ├── inner_html.rs │ ├── simple.rs │ ├── spread.rs │ └── styles.rs ├── stores/ │ ├── Cargo.toml │ ├── README.md │ ├── src/ │ │ ├── impls/ │ │ │ ├── btreemap.rs │ │ │ ├── deref.rs │ │ │ ├── hashmap.rs │ │ │ ├── index.rs │ │ │ ├── mod.rs │ │ │ ├── option.rs │ │ │ ├── result.rs │ │ │ ├── slice.rs │ │ │ └── vec.rs │ │ ├── lib.rs │ │ ├── scope.rs │ │ ├── store.rs │ │ └── subscriptions.rs │ └── tests/ │ ├── coercions.rs │ └── marco.rs ├── stores-macro/ │ ├── Cargo.toml │ └── src/ │ ├── derive.rs │ ├── extend.rs │ └── lib.rs ├── subsecond/ │ ├── README.md │ ├── subsecond/ │ │ ├── Cargo.toml │ │ └── src/ │ │ └── lib.rs │ ├── subsecond-tests/ │ │ ├── cross-tls-crate/ │ │ │ ├── Cargo.toml │ │ │ └── src/ │ │ │ └── lib.rs │ │ ├── cross-tls-crate-dylib/ │ │ │ ├── Cargo.toml │ │ │ └── src/ │ │ │ └── lib.rs │ │ └── cross-tls-test/ │ │ ├── Cargo.toml │ │ ├── README.md │ │ └── src/ │ │ └── main.rs │ └── subsecond-types/ │ ├── Cargo.toml │ └── src/ │ └── lib.rs ├── wasm-split/ │ ├── README.md │ ├── wasm-split/ │ │ ├── Cargo.toml │ │ └── src/ │ │ └── lib.rs │ ├── wasm-split-cli/ │ │ ├── Cargo.toml │ │ ├── data/ │ │ │ └── .gitignore │ │ └── src/ │ │ ├── __wasm_split.js │ │ ├── lib.rs │ │ └── main.rs │ ├── wasm-split-macro/ │ │ ├── Cargo.toml │ │ └── src/ │ │ └── lib.rs │ └── wasm-used/ │ ├── Cargo.toml │ └── src/ │ └── lib.rs └── web/ ├── .gitignore ├── .vscode/ │ └── settings.json ├── Cargo.toml ├── README.md ├── build.rs └── src/ ├── cfg.rs ├── data_transfer.rs ├── devtools.rs ├── document.rs ├── dom.rs ├── events/ │ ├── animation.rs │ ├── cancel.rs │ ├── clipboard.rs │ ├── composition.rs │ ├── drag.rs │ ├── file.rs │ ├── focus.rs │ ├── form.rs │ ├── keyboard.rs │ ├── load.rs │ ├── media.rs │ ├── mod.rs │ ├── mounted.rs │ ├── mouse.rs │ ├── pointer.rs │ ├── resize.rs │ ├── scroll.rs │ ├── selection.rs │ ├── toggle.rs │ ├── touch.rs │ ├── transition.rs │ ├── visible.rs │ └── wheel.rs ├── files.rs ├── history.rs ├── hydration/ │ ├── hydrate.rs │ └── mod.rs ├── js/ │ ├── eval.js │ └── hash.txt ├── launch.rs ├── lib.rs ├── mutations.rs └── ts/ └── eval.ts ================================================ FILE CONTENTS ================================================ ================================================ FILE: .devcontainer/Dockerfile ================================================ ARG VARIANT="nightly-bookworm-slim" FROM rustlang/rust:${VARIANT} ENV DEBIAN_FRONTEND=noninteractive # Install required system libraries and NPM # Reference: https://dioxuslabs.com/learn/0.7/beyond/contributing#before-you-contribute RUN apt-get update -qq \ && apt-get install -y -qq \ libgdk3.0-cil \ libatk1.0-dev \ libcairo2-dev \ libpango1.0-dev \ libgdk-pixbuf2.0-dev \ libsoup-3.0-dev \ libjavascriptcoregtk-4.1-dev \ libwebkit2gtk-4.1-dev \ npm \ && rm -rf /var/lib/apt/lists/* # Set a shared folder for pre-installed browsers ENV PLAYWRIGHT_BROWSERS_PATH=/ms-playwright # Temporarily install Playwright globally to install the browsers and their dependencies RUN npm install -g @playwright/test && \ npx playwright install --with-deps && \ npm uninstall -g @playwright/test ================================================ FILE: .devcontainer/README.md ================================================ # Dev Container A dev container in the most simple context allows one to create a consistent development environment within a docker container that can easily be opened locally or remotely via codespaces such that contributors don't need to install anything to contribute. ## Useful Links - - - - ## Using A Dev Container ### Locally To use this dev container locally, make sure Docker is installed and in VSCode install the `ms-vscode-remote.remote-containers` extension. Then from the root of Dioxus you can type `Ctrl + Shift + P`, then choose `Dev Containers: Rebuild and Reopen in Devcontainer`. ### Codespaces [Codespaces Setup](https://docs.github.com/en/codespaces/developing-in-codespaces/creating-a-codespace-for-a-repository#creating-a-codespace-for-a-repository) ### Playwright Tests The dev container comes with Playwright dependencies pre-installed. You can run the tests located in `packages/playwright-tests` by using the VSCode extension or by executing commands such as: ```bash # Run all tests npx playwright test # Run tests using the UI mode npx playwright test --ui-host=0.0.0.0 ``` ## Troubleshooting If having difficulty commiting with github, and you use ssh or gpg keys, you may need to ensure that the keys are being shared properly between your host and VSCode. Though VSCode does a pretty good job sharing credentials between host and devcontainer, to save some time you can always just reopen the container locally to commit with `Ctrl + Shift + P`, then choose `Dev Containers: Reopen Folder Locally` ================================================ FILE: .devcontainer/devcontainer.json ================================================ { "name": "dioxus", "remoteUser": "vscode", "build": { "dockerfile": "./Dockerfile", "context": "." }, "features": { "ghcr.io/devcontainers/features/common-utils:2": { "installZsh": "true", "username": "vscode", "uid": "1000", "gid": "1000", "upgradePackages": "true" } }, "containerEnv": { "RUST_LOG": "INFO" }, "customizations": { "vscode": { "settings": { "files.watcherExclude": { "**/target/**": true }, "[rust]": { "editor.formatOnSave": true } }, "extensions": [ "rust-lang.rust-analyzer", "tamasfe.even-better-toml", "fill-labs.dependi", "ms-playwright.playwright" ] } } } ================================================ FILE: .github/CODEOWNERS ================================================ # borrowed from tauri - only allow core maintainers to approve PRs * @DioxusLabs/core .github @DioxusLabs/core ================================================ FILE: .github/FUNDING.yml ================================================ # These are supported funding model platforms github: DioxusLabs # Replace with up to 4 GitHub Sponsors-enabled usernames e.g., [user1, user2] open_collective: dioxus-labs # Replace with a single Open Collective username ================================================ FILE: .github/ISSUE_TEMPLATE/bug_report.md ================================================ --- name: Bug report about: Create a report to help us improve Dioxus labels: bug --- **Problem** **Steps To Reproduce** Steps to reproduce the behavior: - - - **Expected behavior** **Screenshots** **Environment:** - Dioxus version: - Rust version: - OS info: - App platform: **Questionnaire** ================================================ FILE: .github/ISSUE_TEMPLATE/feature_requst.md ================================================ --- name: Feature Request about: If you have any interesting advice, you can tell us. labels: enhancement --- ## Feature Request ## Implement Suggestion ================================================ FILE: .github/actions/free-disk-space/action.yml ================================================ name: Free Disk Space description: Free up disk space on the runner runs: using: composite steps: - name: Free Disk Space (Ubuntu) if: runner.os == 'Linux' shell: bash run: | echo "Freeing up disk space..." sudo rm -rf /opt/ghc sudo rm -rf /usr/share/dotnet sudo rm -rf /usr/local/lib/android sudo rm -rf /usr/share/swift sudo docker image prune --all --force || true ================================================ FILE: .github/dependabot.yml ================================================ version: 2 updates: # Maintain dependencies for GitHub Actions - package-ecosystem: "github-actions" directory: "/" schedule: interval: "weekly" ================================================ FILE: .github/install.ps1 ================================================ #!/usr/bin/env pwsh $ErrorActionPreference = 'Stop' if ($v) { $Version = "v${v}" } if ($Args.Length -eq 1) { $Version = $Args.Get(0) } $DxInstall = $env:DX_INSTALL $BinDir = if ($DxInstall) { "${DxInstall}\bin" } else { "${Home}\.dx\bin" } $DxZip = "$BinDir\dx.zip" $DxExe = "$BinDir\dx.exe" $Target = 'x86_64-pc-windows-msvc' $DownloadUrl = if (!$Version) { "https://github.com/dioxuslabs/dioxus/releases/latest/download/dx-${target}.zip" } else { "https://github.com/dioxuslabs/dioxus/releases/download/${Version}/dx-${target}.zip" } if (!(Test-Path $BinDir)) { New-Item $BinDir -ItemType Directory | Out-Null } curl.exe --ssl-revoke-best-effort -Lo $DxZip $DownloadUrl tar.exe xf $DxZip -C $BinDir Remove-Item $DxZip $CargoBin = "${Home}\.cargo\bin" if (!(Test-Path $CargoBin)) { New-Item $CargoBin -ItemType Directory | Out-Null } Copy-Item $DxExe "$CargoBin\dx.exe" -Force # $User = [System.EnvironmentVariableTarget]::User # $Path = [System.Environment]::GetEnvironmentVariable('Path', $User) # if (!(";${Path};".ToLower() -like "*;${BinDir};*".ToLower())) { # [System.Environment]::SetEnvironmentVariable('Path', "${Path};${BinDir}", $User) # $Env:Path += ";${BinDir}" # } Write-Output "dx was installed successfully! 💫" Write-Output "Run 'dx --help' to get started" ================================================ FILE: .github/install.sh ================================================ #!/bin/sh set -eo pipefail # Reset Color_Off='' # Regular Colors Red='' Green='' Dim='' # White # Bold Bold_White='' Bold_Green='' if [ -t 1 ]; then # Reset Color_Off='\033[0m' # Text Reset # Regular Colors Red='\033[0;31m' # Red Green='\033[0;32m' # Green Dim='\033[0;2m' # White # Bold Bold_Green='\033[1;32m' # Bold Green Bold_White='\033[1m' # Bold White fi error() { printf "${Red}error${Color_Off}: %s\n" "$*" >&2 exit 1 } info() { printf "${Dim}%s ${Color_Off}\n" "$*" } info_bold() { printf "${Bold_White}%s ${Color_Off}\n" "$*" } success() { printf "${Green}%s ${Color_Off}\n" "$*" } command -v unzip >/dev/null || error 'unzip is required to install dx' if [ $# -gt 2 ]; then error 'Too many arguments, only 2 are allowed. The first can be a specific tag of dx to install. (e.g. "dx-v0.7.1") or `nightly` or `pr ` to install the latest nightly or PR build.' fi if [ "$OS" = "Windows_NT" ]; then target="x86_64-pc-windows-msvc" else case $(uname -sm) in "Darwin x86_64") target="x86_64-apple-darwin" ;; "Darwin arm64") target="aarch64-apple-darwin" ;; "Linux aarch64") if [ -f /etc/alpine-release ]; then target="aarch64-unknown-linux-musl" else target="aarch64-unknown-linux-gnu" fi ;; *) if [ -f /etc/alpine-release ]; then target="x86_64-unknown-linux-musl" else target="x86_64-unknown-linux-gnu" fi ;; esac fi GITHUB=${GITHUB-"https://github.com"} github_repo="$GITHUB/dioxuslabs/dioxus" exe_name=dx if [ $# = 0 ]; then dx_uri=$github_repo/releases/latest/download/dx-$target.zip else dx_uri=$github_repo/releases/download/$1/dx-$target.zip fi if [ -n "$DX_INSTALL" ]; then dx_install="$DX_INSTALL" elif [ -n "$XDG_DATA_HOME" ]; then dx_install="$XDG_DATA_HOME/dx" else dx_install="$HOME/.dx" fi bin_dir="$dx_install/bin" exe="$bin_dir/dx" cargo_bin_dir="${CARGO_HOME:-$HOME/.cargo}/bin" cargo_bin_exe="$cargo_bin_dir/dx" if [ ! -d "$bin_dir" ]; then mkdir -p "$bin_dir" fi curl --fail --location --progress-bar --output "$exe.zip" "$dx_uri" if command -v unzip >/dev/null; then unzip -d "$bin_dir" -o "$exe.zip" else 7z x -o"$bin_dir" -y "$exe.zip" fi chmod +x "$exe" cp "$exe" "$cargo_bin_exe" || error "Failed to copy dx to $cargo_bin_dir" rm "$exe.zip" echo " installed: $cargo_bin_exe" echo echo "dx was installed successfully! 💫" echo if command -v dx >/dev/null; then echo "Run 'dx --help' to get started" else echo "Run '$exe --help' to get started" fi ================================================ FILE: .github/workflows/main.yml ================================================ # Whenever an open PR is updated, the workflow will be triggered # # This can get expensive, so we do a lot of caching and checks to prevent unnecessary runs name: Rust CI on: push: branches: - main paths: - packages/** - examples/** - docs/guide/** - src/** - .github/** - lib.rs - Cargo.toml - Makefile.toml pull_request: types: [opened, synchronize, reopened, ready_for_review] branches: - main paths: - packages/** - examples/** - src/** - .github/** - lib.rs - Cargo.toml # workflow_dispatch: concurrency: group: ${{ github.workflow }}-${{ github.event.pull_request.number || github.ref }} cancel-in-progress: true env: CARGO_TERM_COLOR: always CARGO_INCREMENTAL: 0 # todo(jon): cargo-cache wipes incremental artifacts, but we eventually want to cache them RUST_BACKTRACE: 1 rust_nightly: nightly-2025-10-05 jobs: check-msrv: if: github.event.pull_request.draft == false name: Check MSRV runs-on: blacksmith-4vcpu-ubuntu-2404 steps: - uses: actions/checkout@v5 - uses: dtolnay/rust-toolchain@1.88.0 - uses: Swatinem/rust-cache@v2 with: cache-all-crates: "true" # https://github.com/foresterre/cargo-msrv/blob/4345edfe3f4fc91cc8ae6c7d6804c0748fae92ae/.github/workflows/msrv.yml - name: install_cargo_msrv run: cargo install cargo-msrv --all-features --version 0.16.3 --locked - name: version_of_cargo_msrv run: cargo msrv --version - name: run_cargo_msrv run: cargo msrv --output-format json verify -- cargo check - name: run_cargo_msrv_on_verify_failure if: ${{ failure() }} run: cargo msrv --output-format json -- cargo check test: if: github.event.pull_request.draft == false name: Test Suite runs-on: blacksmith-4vcpu-ubuntu-2404 steps: - uses: actions/checkout@v5 - name: Free Disk Space uses: ./.github/actions/free-disk-space - uses: awalsh128/cache-apt-pkgs-action@latest with: packages: libwebkit2gtk-4.1-dev libgtk-3-dev libasound2-dev libudev-dev libayatana-appindicator3-dev libxdo-dev libglib2.0-dev version: 1.0 - uses: dtolnay/rust-toolchain@1.88.0 with: components: rustfmt, clippy - uses: Swatinem/rust-cache@v2 with: cache-all-crates: "true" - uses: browser-actions/setup-firefox@latest - run: cargo test --lib --bins --tests --examples --workspace --exclude dioxus-desktop release-test: if: github.event.pull_request.draft == false name: Test Suite with Optimizations runs-on: blacksmith-4vcpu-ubuntu-2404 steps: - uses: actions/checkout@v5 - name: Free Disk Space uses: ./.github/actions/free-disk-space - uses: awalsh128/cache-apt-pkgs-action@latest with: packages: libwebkit2gtk-4.1-dev libgtk-3-dev libasound2-dev libudev-dev libayatana-appindicator3-dev libxdo-dev libglib2.0-dev version: 1.0 - uses: dtolnay/rust-toolchain@1.88.0 with: components: rustfmt, clippy - uses: Swatinem/rust-cache@v2 with: cache-all-crates: "true" - uses: browser-actions/setup-firefox@latest - run: cargo test --lib --bins --tests --examples --workspace --exclude dioxus-desktop --profile release-unoptimized fmt: if: github.event.pull_request.draft == false name: Rustfmt runs-on: blacksmith-4vcpu-ubuntu-2404 steps: - uses: actions/checkout@v5 - uses: dtolnay/rust-toolchain@1.88.0 with: components: rustfmt - uses: Swatinem/rust-cache@v2 with: cache-all-crates: "true" - run: cargo fmt --all -- --check schema: if: github.event.pull_request.draft == false name: Check Schema runs-on: blacksmith-4vcpu-ubuntu-2404 steps: - uses: actions/checkout@v5 - uses: dtolnay/rust-toolchain@1.88.0 - uses: Swatinem/rust-cache@v2 with: cache-all-crates: "true" - name: Generate schema run: cargo run -p dioxus-cli -- config schema --out packages/cli/schema.json - name: Check for uncommitted changes run: | if ! git diff --exit-code packages/cli/schema.json; then echo "::error::Schema is out of date. Run 'dx config schema --out packages/cli/schema.json' and commit the changes." exit 1 fi docs: if: github.event.pull_request.draft == false name: Docs runs-on: blacksmith-4vcpu-ubuntu-2404 steps: - uses: actions/checkout@v5 - uses: awalsh128/cache-apt-pkgs-action@latest with: packages: libwebkit2gtk-4.1-dev libgtk-3-dev libasound2-dev libudev-dev libayatana-appindicator3-dev libxdo-dev libglib2.0-dev version: 1.0 - name: Install Rust ${{ env.rust_nightly }} uses: dtolnay/rust-toolchain@nightly with: toolchain: ${{ env.rust_nightly }} - uses: Swatinem/rust-cache@v2 with: cache-all-crates: "true" - name: "doc --lib --all-features" run: | cargo doc --workspace --no-deps --all-features --document-private-items env: RUSTDOCFLAGS: -Dwarnings --document-private-items test-docs: if: github.event.pull_request.draft == false name: Test Docs runs-on: blacksmith-4vcpu-ubuntu-2404 steps: - uses: actions/checkout@v5 - uses: awalsh128/cache-apt-pkgs-action@latest with: packages: libwebkit2gtk-4.1-dev libgtk-3-dev libasound2-dev libudev-dev libayatana-appindicator3-dev libxdo-dev libglib2.0-dev version: 1.0 - name: Install Rust ${{ env.rust_nightly }} uses: dtolnay/rust-toolchain@nightly with: toolchain: ${{ env.rust_nightly }} - uses: Swatinem/rust-cache@v2 with: cache-all-crates: "true" - name: "doc --lib --all-features" run: | cargo test --doc --workspace --all-features check: if: github.event.pull_request.draft == false name: Check runs-on: blacksmith-4vcpu-ubuntu-2404 steps: - uses: actions/checkout@v5 - uses: awalsh128/cache-apt-pkgs-action@latest with: packages: libwebkit2gtk-4.1-dev libgtk-3-dev libasound2-dev libudev-dev libayatana-appindicator3-dev libxdo-dev libglib2.0-dev version: 1.0 - uses: dtolnay/rust-toolchain@1.88.0 - uses: Swatinem/rust-cache@v2 with: cache-all-crates: "true" - run: cargo check --workspace --all-features --all-targets clippy: if: github.event.pull_request.draft == false name: Clippy runs-on: blacksmith-4vcpu-ubuntu-2404 steps: - uses: actions/checkout@v5 - uses: awalsh128/cache-apt-pkgs-action@latest with: packages: libwebkit2gtk-4.1-dev libgtk-3-dev libasound2-dev libudev-dev libayatana-appindicator3-dev libxdo-dev libglib2.0-dev version: 1.0 - uses: dtolnay/rust-toolchain@1.90.0 with: components: rustfmt, clippy - uses: Swatinem/rust-cache@v2 with: cache-all-crates: "true" - run: cargo clippy --workspace --examples --tests --all-features --all-targets -- -D warnings nix: if: github.event.pull_request.draft == false runs-on: ${{ matrix.os }} strategy: matrix: os: [blacksmith-4vcpu-ubuntu-2404, macos-latest] steps: - uses: actions/checkout@v5 - uses: nixbuild/nix-quick-install-action@master - uses: nix-community/cache-nix-action@main with: primary-key: nix-${{ runner.os }}-${{ runner.arch }}-${{ hashFiles('**/*.nix', '**/flake.lock') }} restore-prefixes-first-match: nix-${{ runner.os }}-${{ runner.arch }} - name: Install omnix run: nix --accept-flake-config profile install "github:juspay/omnix" - name: Build all flake outputs run: om ci - name: Ensure devShell has all build deps run: nix develop -c cargo build -p dioxus-cli --features no-downloads playwright: if: github.event.pull_request.draft == false name: Playwright Tests runs-on: ${{ matrix.os }} timeout-minutes: 45 strategy: matrix: os: [blacksmith-4vcpu-windows-2025, blacksmith-8vcpu-ubuntu-2404] platform: - { toolchain: beta } # - { toolchain: 1.86.0 } steps: # Do our best to cache the toolchain and node install steps - uses: actions/checkout@v5 - name: Free Disk Space if: ${{ matrix.os == 'ubuntu-24.04' || matrix.os == 'ubuntu-24.04-arm' || matrix.os == 'blacksmith-4vcpu-ubuntu-2404' || matrix.os == 'blacksmith-4vcpu-ubuntu-2404-arm' || matrix.os == 'blacksmith-8vcpu-ubuntu-2404' }} uses: ./.github/actions/free-disk-space - uses: awalsh128/cache-apt-pkgs-action@latest if: ${{ matrix.os == 'ubuntu-24.04' || matrix.os == 'ubuntu-24.04-arm' || matrix.os == 'blacksmith-4vcpu-ubuntu-2404' || matrix.os == 'blacksmith-4vcpu-ubuntu-2404-arm' || matrix.os == 'blacksmith-8vcpu-ubuntu-2404' }} with: packages: glib-networking glib-networking-common glib-networking-services libaa1 libabsl20220623t64 libass9 libasyncns0 libavc1394-0 libavcodec60 libavfilter9 libavformat60 libavtp0 libavutil58 libblas3 libbluray2 libbs2b0 libcaca0 libcairo-script-interpreter2 libcdparanoia0 libchromaprint1 libcjson1 libcodec2-1.2 libdav1d7 libdc1394-25 libdca0 libdecor-0-0 libdirectfb-1.7-7t64 libdv4t64 libdvdnav4 libdvdread8t64 libfaad2 libflac12t64 libfluidsynth3 libfreeaptx0 libgav1-1 libgme0 libgraphene-1.0-0 libgsm1 libgssdp-1.6-0 libgstreamer-plugins-good1.0-0 libgtk-4-common libgupnp-1.6-0 libgupnp-igd-1.6-0 libhwy1t64 libiec61883-0 libimath-3-1-29t64 libinstpatch-1.0-2 libjack-jackd2-0 libjxl0.7 liblapack3 liblc3-1 libldacbt-enc2 liblilv-0-0 liblrdf0 libltc11 libmbedcrypto7t64 libmfx1 libmjpegutils-2.1-0t64 libmodplug1 libmp3lame0 libmpcdec6 libmpeg2encpp-2.1-0t64 libmpg123-0t64 libmplex2-2.1-0t64 libmysofa1 libneon27t64 libnice10 libogg0 libopenal-data libopenal1 libopenexr-3-1-30 libopenh264-7 libopenmpt0t64 libopenni2-0 liborc-0.4-0t64 libpipewire-0.3-0t64 libplacebo338 libpocketsphinx3 libpostproc57 libproxy1v5 libpulse0 libqrencode4 libraptor2-0 librav1e0 libraw1394-11 librist4 librubberband2 libsamplerate0 libsbc1 libsdl2-2.0-0 libsecret-common libserd-0-0 libshine3 libshout3 libsndfile1 libsndio7.0 libsord-0-0 libsoundtouch1 libsoup-3.0-0 libsoup-3.0-common libsoxr0 libspa-0.2-modules libspandsp2t64 libspeex1 libsphinxbase3t64 libsratom-0-0 libsrt1.5-gnutls libsrtp2-1 libssh-gcrypt-4 libsvtav1enc1d1 libswresample4 libswscale7 libtag1v5 libtag1v5-vanilla libtheora0 libtwolame0 libudfread0 libunibreak5 libv4l-0t64 libv4lconvert0t64 libva-drm2 libva-x11-2 libva2 libvdpau1 libvidstab1.1 libvisual-0.4-0 libvo-aacenc0 libvo-amrwbenc0 libvorbis0a libvorbisenc2 libvorbisfile3 libvpl2 libwavpack1 libwebrtc-audio-processing1 libwildmidi2 libx265-199 libxcb-xkb1 libxkbcommon-x11-0 libcups2t64 libxml2 libxml2-dev libxvidcore4 libyuv0 libzbar0t64 libzimg2 libzix-0-0 libzvbi-common libzvbi0t64 libzxing3 ocl-icd-libopencl1 timgm6mb-soundfont xfonts-encodings xfonts-utils binutils lld binutils-devel binutils-gold fonts-freefont-ttf fonts-ipafont-gothic fonts-tlwg-loma-otf fonts-unifont fonts-wqy-zenhei gstreamer1.0-libav gstreamer1.0-plugins-bad gstreamer1.0-plugins-good libavif16 libevent-2.1-7t64 libgstreamer-plugins-bad1.0-0 libharfbuzz-icu0 libhyphen0 libmanette-0.2-0 libsecret-1-0 libwoff1 xfonts-cyrillic xfonts-scalable fonts-ipafont-mincho fonts-tlwg-loma gstreamer1.0-x version: 1.0 - uses: actions/setup-node@v4 with: node-version: 24 - name: Install Rust uses: dtolnay/rust-toolchain@master with: toolchain: ${{ matrix.platform.toolchain }} targets: x86_64-unknown-linux-gnu,wasm32-unknown-unknown - uses: Swatinem/rust-cache@v2 with: key: "playwright-${{ matrix.platform.toolchain }}-${{ runner.os }}" cache-all-crates: "true" cache-on-failure: "true" - name: Wipe dx cache if: ${{ matrix.os == 'ubuntu-24.04' || matrix.os == 'ubuntu-24.04-arm' || matrix.os == 'blacksmith-4vcpu-ubuntu-2404' || matrix.os == 'blacksmith-4vcpu-ubuntu-2404-arm' || matrix.os == 'blacksmith-8vcpu-ubuntu-2404' }} run: | rm -rf ./target/dx - name: Playwright working-directory: ./packages/playwright-tests env: # The hot patch test requires incremental compilation CARGO_INCREMENTAL: 1 run: | npm ci npm install -D @playwright/test npx playwright install npx playwright test - uses: actions/upload-artifact@v6 if: always() with: name: playwright-report-${{ matrix.platform.toolchain }}-${{ runner.os }} path: ./packages/playwright-tests/playwright-report/ retention-days: 30 matrix_test: runs-on: ${{ matrix.platform.os }} if: github.event.pull_request.draft == false env: RUST_CARGO_COMMAND: ${{ matrix.platform.cross == true && 'cross' || 'cargo' }} strategy: matrix: platform: - { target: aarch64-apple-darwin, os: macos-latest, toolchain: "1.88.0", cross: false, command: "test", args: "--all --tests", platform: "desktop", } - { target: aarch64-apple-ios, os: macos-latest, toolchain: "1.88.0", cross: false, command: "build", args: "--package dioxus-desktop", platform: "ios", } - { target: aarch64-unknown-linux-gnu, os: ubuntu-24.04-arm, toolchain: "1.88.0", cross: false, command: "build", args: "--all --tests", platform: "desktop", } - { target: aarch64-linux-android, os: blacksmith-4vcpu-ubuntu-2404, toolchain: "1.88.0", cross: true, command: "build", args: "--package dioxus-desktop", platform: "android", } # commented out because it's having issues with space on the device, but we already test it above # - { # target: x86_64-unknown-linux-gnu, # os: ubuntu-24.04, # toolchain: "1.88.0", # cross: false, # command: "build", # args: "--all --tests", # platform: "desktop", # } steps: - uses: actions/checkout@v5 - name: Free Disk Space if: ${{ matrix.platform.os == 'ubuntu-24.04' || matrix.platform.os == 'ubuntu-24.04-arm' || matrix.platform.os == 'blacksmith-4vcpu-ubuntu-2404' || matrix.platform.os == 'blacksmith-4vcpu-ubuntu-2404-arm' }} uses: ./.github/actions/free-disk-space - uses: awalsh128/cache-apt-pkgs-action@latest if: ${{ matrix.platform.os == 'ubuntu-24.04' || matrix.platform.os == 'ubuntu-24.04-arm' || matrix.platform.os == 'blacksmith-4vcpu-ubuntu-2404' || matrix.platform.os == 'blacksmith-4vcpu-ubuntu-2404-arm' }} with: packages: libwebkit2gtk-4.1-dev libgtk-3-dev libasound2-dev libudev-dev libayatana-appindicator3-dev libxdo-dev libglib2.0-dev version: ${{ matrix.platform.target }}-${{ matrix.platform.os }} # disambiguate since we're in a matrix and this caching action doesn't factor in these variables - name: install stable uses: dtolnay/rust-toolchain@master with: toolchain: ${{ matrix.platform.toolchain }} targets: ${{ matrix.platform.target }} components: rustfmt - name: Install nasm for windows (tls) if: ${{ matrix.platform.target == 'x86_64-pc-windows-msvc' }} uses: ilammy/setup-nasm@v1 - name: Install cross if: ${{ matrix.platform.cross == true }} uses: taiki-e/install-action@cross - uses: Swatinem/rust-cache@v2 with: key: "matrix-${{ matrix.platform.target }}" cache-all-crates: "true" - name: test run: | ${{ env.RUST_CARGO_COMMAND }} ${{ matrix.platform.command }} ${{ matrix.platform.args }} --target ${{ matrix.platform.target }} # borrowed from uv # https://raw.githubusercontent.com/astral-sh/uv/refs/heads/main/.github/workflows/ci.yml cargo-test-windows: if: github.event.pull_request.draft == false runs-on: labels: "blacksmith-4vcpu-windows-2025" name: "cargo test | windows" steps: - uses: actions/checkout@v5 - uses: dtolnay/rust-toolchain@1.88.0 with: components: rustfmt, clippy - uses: Swatinem/rust-cache@v2 with: workspaces: ${{ env.UV_WORKSPACE }} cache-all-crates: "true" - name: "Install Rust toolchain" working-directory: ${{ env.UV_WORKSPACE }} run: rustup show - name: "Cargo test" working-directory: ${{ env.UV_WORKSPACE }} run: | cargo test --workspace --tests ================================================ FILE: .github/workflows/merge.yml ================================================ # Runs whenever a PR is merged: # - attempt to backports fixes # - upload nightly docs # # Future: # - upload nightly CLI builds # - upload nightly vscode extension # - upload benchmarks # - compute coverge # # Note that direct commits to master circumvent this workflow! name: Backport merged pull request on: pull_request_target: types: [closed] permissions: contents: write # so it can comment pull-requests: write # so it can create pull requests jobs: # Attempt to backport a merged pull request to the latest stable release backport: name: Backport pull request runs-on: blacksmith-4vcpu-ubuntu-2404 # Don't run on closed unmerged pull requests, or pull requests with the "breaking" label if: github.event.pull_request.merged && !contains(github.event.pull_request.labels.*.name, 'breaking') steps: - uses: actions/checkout@v5 - name: Create backport pull requests uses: korthout/backport-action@v3 # Upload nightly docs to the website docs: runs-on: blacksmith-4vcpu-ubuntu-2404 permissions: contents: write steps: - uses: actions/checkout@v5 - run: sudo apt-get update - run: sudo apt install libwebkit2gtk-4.1-dev libgtk-3-dev libayatana-appindicator3-dev libxdo-dev libglib2.0-dev - uses: dtolnay/rust-toolchain@nightly with: toolchain: nightly-2024-02-01 - uses: Swatinem/rust-cache@v2 with: cache-all-crates: "true" save-if: ${{ github.ref == 'refs/heads/main' }} - name: cargo doc run: cargo doc --no-deps --workspace --all-features - name: Deploy uses: JamesIves/github-pages-deploy-action@v4.7.3 with: branch: gh-pages folder: target/doc target-folder: api-docs/nightly repository-name: dioxuslabs/docsite clean: false token: ${{ secrets.DEPLOY_KEY }} # Attempt to backport a merged pull request to the latest stable release # # If the backported PR is succesfully merged # Any PR without the "breaking" label will be attempted to be backported to the latest stable release # Coverage is disabled until we can fix it # coverage: # name: Coverage # runs-on: ubuntu-latest # container: # image: xd009642/tarpaulin:develop-nightly # options: --security-opt seccomp=unconfined # steps: # - name: Checkout repository # uses: actions/checkout@v5 # - name: Generate code coverage # run: | # apt-get update &&\ # apt-get install build-essential &&\ # apt install libwebkit2gtk-4.0-dev libgtk-3-dev libayatana-appindicator3-dev -y &&\ # cargo +nightly tarpaulin --verbose --all-features --workspace --timeout 120 --out Xml # - name: Upload to codecov.io # uses: codecov/codecov-action@v2 # with: # fail_ci_if_error: false ================================================ FILE: .github/workflows/promote.yml ================================================ # Promote the current main branch to a stable release. # This will not actually release anything, so you need to run the release workflow after this. # # IE if the current master version is 0.4.0-rc.7, this will create a PR to promote it to 0.4.0 # # - update the version in the Cargo.toml to v0.4.0 # - generate a v0.4 branch # - push the branch to the repository # - then bump 0.4.0-rc.1 to 0.5.0-rc.0 # # This means main will never be a "stable" release, and we can always merge breaking changes to main # and backport them to the latest stable release # # This is configured to be ran manually, but could honestly just be a release workflow name: Promote main to stable branch on: workflow_dispatch: permissions: actions: write jobs: promote: runs-on: blacksmith-4vcpu-ubuntu-2404 steps: - uses: actions/checkout@v5 - name: Publish the next pre-release run: | git config --global user.email "github-actions[bot]@users.noreply.github.com" git config --global user.name "github-actions[bot]" # go from eg 0.4.0-rc.7 to 0.4.0, committing the change cargo workspaces version -y minor # create a new branch for the release RELEASE_BRANCH=$(cargo metadata --no-deps --format-version 1 | jq -r '.packages[0].version') RELEASE_BRANCH=v$(echo $RELEASE_BRANCH | sed 's/\.[0-9]*$//') git branch $RELEASE_BRANCH # go from 0.4.0 to 0.5.0-rc.0 cargo workspaces version -y preminor --pre-id rc # push the new branch to the repository git push origin $RELEASE_BRANCH # push the new version to the repository git push origin main ================================================ FILE: .github/workflows/publish.yml ================================================ # Release workflow # # We parallelize builds, dump all the artifacts into a release, and then publish the release # This guarantees everything is properly built and cached in case anything goes wrong # # The artifacts also need to get pushed to the various places # - the CLI goes to the releases page for binstall # - the extension goes to the marketplace # - the docs go to the website # # We need to be aware of the channel we're releasing # - prerelease is master # - stable is whatever the latest stable release is (ie 0.4 or 0.5 or 0.6 etc) # # It's intended that this workflow is run manually, and only when we're ready to release name: Publish CLI on: workflow_dispatch: inputs: post: name: "Release Post" required: true description: Choose the release post to publish with. Must be a tag (eg v0.4.0) type: string channel: name: "CLI Binary Version" required: true description: Choose the version number to publish with. Must be a tag (ie v0.4.0) type: string env: # make sure we have the right version # main is always a prepatch until we hit 1.0, and then this script needs to be updated # note that we need to promote the prepatch to a minor bump when we actually do a release # this means the version in git will always be one minor bump ahead of the actual release - basically meaning once # we release a version, it's fair game to merge breaking changes to main since all semver-compatible changes will be # backported automatically # SEMVER: ${{ github.event.inputs.channel == 'main' && 'prerelease' || 'patch' }} # PRERELEASE_TAG: ${{ github.event.inputs.channel == 'main' && '-pre' || '' }} RELEASE_TAG: ${{ github.event.inputs.channel }} RELEASE_POST: ${{ github.event.inputs.post }} jobs: release-cli: permissions: contents: write runs-on: ${{ matrix.platform.os }} strategy: matrix: platform: - target: x86_64-pc-windows-msvc os: windows-latest - target: aarch64-pc-windows-msvc os: windows-latest - target: x86_64-apple-darwin os: macos-15-intel - target: aarch64-apple-darwin os: macos-latest - target: x86_64-unknown-linux-gnu os: ubuntu-24.04 - target: aarch64-unknown-linux-gnu os: ubuntu-24.04-arm # os: blacksmith-4vcpu-ubuntu-2404 # os: blacksmith-4vcpu-ubuntu-2404 # - target: x86_64-unknown-linux-musl # os: ubuntu-24.04 # - target: aarch64-unknown-linux-musl # os: ubuntu-24.04-arm steps: - name: Checkout uses: actions/checkout@v5 - name: Install openssl on macos if: matrix.platform.os == 'macos-latest' run: brew install openssl - name: Install nasm for windows (tls) if: ${{ matrix.platform.target == 'x86_64-pc-windows-msvc' }} uses: ilammy/setup-nasm@v1 - name: Free Disk Space if: ${{ matrix.platform.os == 'ubuntu-24.04' || matrix.platform.os == 'ubuntu-24.04-arm' }} uses: ./.github/actions/free-disk-space - uses: awalsh128/cache-apt-pkgs-action@latest if: ${{ matrix.platform.os == 'ubuntu-24.04' || matrix.platform.os == 'ubuntu-24.04-arm' }} with: packages: libwebkit2gtk-4.1-dev libgtk-3-dev libayatana-appindicator3-dev libxdo-dev libglib2.0-dev musl-tools version: 1.0 - name: Install stable uses: dtolnay/rust-toolchain@master with: toolchain: "1.88.0" targets: ${{ matrix.platform.target }} - uses: Swatinem/rust-cache@v2 with: cache-all-crates: "true" save-if: ${{ github.ref == 'refs/heads/main' }} - name: Free Disk Space uses: jlumbroso/free-disk-space@v1.3.1 with: # speed things up a bit large-packages: false docker-images: false swap-storage: false # Todo: we want `cargo install dx` to actually just use a prebuilt binary instead of building it - name: Build and upload CLI binaries uses: taiki-e/upload-rust-binary-action@v1 with: bin: dx token: ${{ secrets.GITHUB_TOKEN }} target: ${{ matrix.platform.target }} archive: $bin-$target checksum: sha256 manifest_path: packages/cli/Cargo.toml ref: refs/tags/${{ env.RELEASE_POST }} zip: "all" # todo: these things # Run benchmarks, which we'll use to display on the website # release-benchmarks: # Build the vscode extension, uploading the artifact to the marketplace # release-extension: # First, run checks (clippy, tests, etc) and then publish the crates to crates.io # release-crates: # steps: # # Checkout the right branch, and the nightly stuff # - uses: actions/checkout@v5 # ref: ${{ github.event.inputs.channel }} # - run: sudo apt-get update # - run: sudo apt install libwebkit2gtk-4.1-dev libgtk-3-dev libayatana-appindicator3-dev libxdo-dev libglib2.0-dev # - uses: dtolnay/rust-toolchain@nightly # with: # toolchain: nightly-2024-02-01 # - uses: Swatinem/rust-cache@v2 # with: # cache-all-crates: "true" # - name: Free Disk Space (Ubuntu) # uses: jlumbroso/free-disk-space@v1.3.1 # with: # speed things up a bit # large-packages: false # docker-images: false # swap-storage: false # # Just make sure clippy is happy before doing anything else # # Don't publish versions with clippy errors! # - name: Clippy # run: cargo clippy --workspace --all --examples --tests --all-features --all-targets -- -D warnings # # Build the docs here too before publishing, to ensure they're up to date # - name: cargo doc # run: cargo doc --no-deps --workspace --all-features # - name: Publish to crates.io # run: | # git config --global user.email "github-actions[bot]@users.noreply.github.com" # git config --global user.name "github-actions[bot]" # cargo workspaces version -y ${{ env.SEMVER }} --pre-id rc --no-git-commit # # todo: actually just publish! # # cargo workspaces publish -y ${{ github.event.inputs.semver }} # this will be more useful when we publish the website with updated docs # Build the docs.rs docs and publish them to the website under the right folder # v0.4.x -> docs/0.4 # v0.5.x -> docs/0.5 etc # main -> docs/nightly # strip the v from the channel, and the .x from the end, and replace main with nightly # - name: determine docs folder by channel # id: determine_docs_folder # run: echo "::set-output name=folder::$(echo ${{ github.event.inputs.channel }} | sed 's/v//g' | sed 's/\.x//g' | sed 's/main/nightly/g')" ================================================ FILE: .github/workflows/setup-dev-drive.ps1 ================================================ # This creates a 20GB dev drive, and exports all required environment # variables so that rustup, uv and others all use the dev drive as much # as possible. $Volume = New-VHD -Path C:/uv_dev_drive.vhdx -SizeBytes 20GB | Mount-VHD -Passthru | Initialize-Disk -Passthru | New-Partition -AssignDriveLetter -UseMaximumSize | Format-Volume -FileSystem ReFS -Confirm:$false -Force Write-Output $Volume $Drive = "$($Volume.DriveLetter):" $Tmp = "$($Drive)/uv-tmp" # Create the directory ahead of time in an attempt to avoid race-conditions New-Item $Tmp -ItemType Directory Write-Output ` "DEV_DRIVE=$($Drive)" ` "TMP=$($Tmp)" ` "TEMP=$($Tmp)" ` "RUSTUP_HOME=$($Drive)/.rustup" ` "CARGO_HOME=$($Drive)/.cargo" ` "UV_WORKSPACE=$($Drive)/uv" ` "PATH=$($Drive)/.cargo/bin;$env:PATH" ` >> $env:GITHUB_ENV ================================================ FILE: .github/workflows/typos.yml ================================================ # Whenever an open PR is updated, the workflow will be triggered name: Language Linting on: push: branches: - main pull_request: types: [opened, synchronize, reopened, ready_for_review] branches: - main jobs: typos: if: github.event.pull_request.draft == false name: Check for typos runs-on: blacksmith-4vcpu-ubuntu-2404 steps: - uses: actions/checkout@v5 - name: Check for typos uses: crate-ci/typos@master # Check for invalid links in the repository link-check: if: github.event.pull_request.draft == false name: Check For Invalid Links runs-on: blacksmith-4vcpu-ubuntu-2404 steps: - uses: actions/checkout@v5 - name: Restore lychee cache uses: actions/cache@v5 with: path: .lycheecache key: cache-lychee-${{ github.sha }} restore-keys: cache-lychee- - name: Run lychee uses: lycheeverse/lychee-action@v2 with: args: --config ./lychee.toml './**/*.md' fail: true ================================================ FILE: .gitignore ================================================ .dioxus /target /packages/playwright-tests/cli-optimization/monaco-editor-0.52.2 /packages/playwright-tests/web/dist /packages/playwright-tests/fullstack/dist /packages/playwright-tests/test-results /packages/playwright-tests/web-hot-patch-temp /packages/playwright-tests/web-hot-patch-fullstack-temp /packages/playwright-tests/web-hot-patch/Cargo.lock /packages/playwright-tests/web-hot-patch-fullstack/Cargo.lock /dist .DS_Store /examples/assets/test_video.mp4 /examples/_assets/test_video.mp4 static # new recommendation to keep the lockfile in for CI and reproducible builds # Cargo.lock .vscode/* !.vscode/settings.json !.vscode/tasks.json !.vscode/launch.json !.vscode/extensions.json tarpaulin-report.html # Jetbrain .idea/ node_modules/ /test-results/ /packages/playwright-report/ /packages/playwright/.cache/ # Allow geolocation plugin sources to be tracked /packages/geolocation/android/build/ /packages/geolocation/android/.gradle/ # Zed .zed/ # ignore the output of tmps tmp/ bundle/ # in debugging we frequently dump wasm to wat with `wasm-tools print` *.wat ================================================ FILE: .vscode/settings.json ================================================ { "editor.formatOnSave": true, "[toml]": { "editor.formatOnSave": false }, "[handlebars]": { "editor.formatOnSave": false }, "[javascript]": { "editor.formatOnSave": false }, "[html]": { "editor.formatOnSave": false }, "dioxus.formatOnSave": "disabled", // "rust-analyzer.check.workspace": true, // "rust-analyzer.check.workspace": false, // "rust-analyzer.check.features": "all", // "rust-analyzer.cargo.buildScripts.rebuildOnSave": false, // "rust-analyzer.check.workspace": false, // "rust-analyzer.check.allTargets": true, "rust-analyzer.cargo.features": "all", "rust-analyzer.check.features": "all", "rust-analyzer.cargo.extraArgs": [ "--tests" ], } ================================================ FILE: AGENTS.md ================================================ # Dioxus Agent Guide Dioxus is a cross-platform UI framework for Rust, similar to React. It compiles to web (WASM), desktop (webview), mobile (iOS/Android), and native (GPU-rendered). ## Quick Overview - **Language**: Rust (stable toolchain) - **UI Model**: React-like with VirtualDOM, components, hooks, signals - **Syntax**: JSX-like `rsx!` macro for declaring UI - **Platforms**: Web, Desktop (Windows/macOS/Linux), Mobile, Native, LiveView (server-rendered) ## Workspace Structure ``` packages/ ├── dioxus/ # Main re-export crate users depend on ├── core/ # VirtualDOM, components, diffing, scheduling ├── rsx/ # RSX macro parsing and code generation ├── rsx-hotreload/ # Template diffing for hot-reload ├── signals/ # Reactive state (Signal, Memo, Store) ├── hooks/ # Built-in hooks (use_signal, use_effect, etc.) ├── router/ # Type-safe routing with #[derive(Routable)] ├── fullstack/ # SSR, hydration, #[server] functions ├── cli/ # `dx` build tool, dev server, bundling ├── web/ # WASM renderer ├── desktop/ # Wry/Tao webview renderer ├── native/ # Blitz/Vello GPU renderer ├── liveview/ # WebSocket streaming renderer ├── manganis/ # asset!() macro for compile-time assets ├── subsecond/ # Hot-patching system (jump table indirection) ├── devtools/ # Dev server communication protocol ├── interpreter/ # Sledgehammer JS for DOM mutations └── wasm-split/ # WASM code splitting ``` ## Architecture Documentation For deeper understanding, see `notes/architecture/`: | When working on... | Read... | | ------------------------------------------ | ------------------ | | VirtualDOM, components, diffing, events | `01-CORE.md` | | CLI, build system, bundling, dev server | `02-CLI.md` | | RSX macro, parsing, formatting | `03-RSX.md` | | Signals, state management, reactivity | `04-SIGNALS.md` | | Server functions, SSR, hydration | `05-FULLSTACK.md` | | Web/desktop/native/liveview renderers | `06-RENDERERS.md` | | Hot-reload, hot-patching, devtools | `07-HOTRELOAD.md` | | Asset macro, manganis, const serialization | `08-ASSETS.md` | | Router, navigation, nested routes | `09-ROUTER.md` | | WASM code splitting | `10-WASM-SPLIT.md` | ## Key Concepts - **VirtualDOM**: Tree of `VNode` with templates, dynamic nodes, and attributes - **Signals**: Copy-able reactive primitives via generational-box (generation-based validity) - **WriteMutations**: Trait that renderers implement to apply DOM changes - **RSX**: Proc macro that compiles JSX-like syntax to `VNode` construction - **Server Functions**: `#[server]` macro generates client RPC stubs and server handlers - **Subsecond**: Hot-patches Rust code via jump table indirection (no memory modification) - **Manganis**: `asset!("/main.css")` macro for including assets by embedding data via linker symbols ## Common Patterns **Component definition**: ```rust #[component] fn MyComponent(name: String) -> Element { let mut count = use_signal(|| 0); rsx! { button { onclick: move |_| count += 1, "{name}: {count}" } } } ``` ## Notes for Agents 1. The `dioxus` crate re-exports from other crates - most implementation is in `packages/core`, `packages/signals`, etc. 2. RSX macro expansion happens in `packages/rsx` - look there for syntax questions 3. Each renderer implements `WriteMutations` differently - see `06-RENDERERS.md` 4. Hot-reload has two systems: RSX template diffing (fast) and Subsecond code patching (full Rust) 5. Assets use link sections and binary patching - the `asset!()` macro creates symbols the CLI processes ================================================ FILE: Cargo.toml ================================================ [workspace] resolver = "2" members = [ "packages/dioxus", "packages/core", "packages/core-types", "packages/cli", "packages/cli-opt", "packages/cli-config", "packages/cli-telemetry", "packages/core-macro", "packages/config-macro", "packages/router-macro", "packages/extension", "packages/router", "packages/html", "packages/html-internal-macro", "packages/hooks", "packages/web", "packages/ssr", "packages/desktop", "packages/interpreter", "packages/liveview", "packages/autofmt", "packages/check", "packages/devtools-types", "packages/devtools", "packages/document", "packages/fullstack", "packages/fullstack-core", "packages/fullstack-macro", "packages/fullstack-server", "packages/generational-box", "packages/history", "packages/lazy-js-bundle", "packages/rsx-hotreload", "packages/rsx-rosetta", "packages/rsx", "packages/signals", "packages/stores", "packages/stores-macro", "packages/const-serialize", "packages/const-serialize-macro", "packages/dx-wire-format", "packages/logger", "packages/config-macros", "packages/native", "packages/native-dom", "packages/asset-resolver", "packages/depinfo", "packages/component-manifest", # CLI harnesses, all included "packages/cli-harnesses/*", # Playwright tests "packages/playwright-tests/liveview", "packages/playwright-tests/web", "packages/playwright-tests/web-routing", "packages/playwright-tests/web-hash-routing", "packages/playwright-tests/barebones-template", "packages/playwright-tests/fullstack", "packages/playwright-tests/fullstack-errors", "packages/playwright-tests/fullstack-mounted", "packages/playwright-tests/fullstack-spread", "packages/playwright-tests/fullstack-routing", "packages/playwright-tests/fullstack-hydration-order", "packages/playwright-tests/suspense-carousel", "packages/playwright-tests/nested-suspense", "packages/playwright-tests/cli-optimization", "packages/playwright-tests/wasm-split-harness", "packages/playwright-tests/default-features-disabled", "packages/playwright-tests/fullstack-error-codes", "packages/playwright-tests/windows-headless", # manganis "packages/manganis/manganis", "packages/manganis/manganis-core", "packages/manganis/manganis-macro", "packages/manganis/manganis-07", # wasm-split "packages/wasm-split/wasm-split", "packages/wasm-split/wasm-split-macro", "packages/wasm-split/wasm-split-cli", "packages/wasm-split/wasm-used", # subsecond "packages/subsecond/subsecond", "packages/subsecond/subsecond-types", "packages/subsecond/subsecond-tests/cross-tls-crate", "packages/subsecond/subsecond-tests/cross-tls-crate-dylib", "packages/subsecond/subsecond-tests/cross-tls-test", # Full project examples "examples/01-app-demos/hackernews", "examples/01-app-demos/ecommerce-site", "examples/01-app-demos/bluetooth-scanner", "examples/01-app-demos/file-explorer", "examples/01-app-demos/hotdog", "examples/01-app-demos/geolocation-native-plugin", # Fullstack examples "examples/07-fullstack/hello-world", "examples/07-fullstack/router", "examples/07-fullstack/desktop", "examples/07-fullstack/auth", "examples/07-fullstack/ssr-only", # Integrations "examples/10-integrations/tailwind", "examples/10-integrations/pwa", "examples/10-integrations/wgpu-texture", "examples/10-integrations/native-headless", "examples/10-integrations/native-headless-in-bevy", "examples/10-integrations/bevy", ] [workspace.package] version = "0.7.3" # dependencies that are shared across packages [workspace.dependencies] dioxus = { path = "packages/dioxus", version = "0.7.3" } dioxus-core = { path = "packages/core", version = "0.7.3" } dioxus-core-types = { path = "packages/core-types", version = "0.7.3" } dioxus-core-macro = { path = "packages/core-macro", version = "0.7.3" } dioxus-config-macro = { path = "packages/config-macro", version = "0.7.3" } dioxus-router = { path = "packages/router", version = "0.7.3" } dioxus-router-macro = { path = "packages/router-macro", version = "0.7.3" } dioxus-document = { path = "packages/document", version = "0.7.3", default-features = false } dioxus-history = { path = "packages/history", version = "0.7.3", default-features = false } dioxus-html = { path = "packages/html", version = "0.7.3", default-features = false } dioxus-html-internal-macro = { path = "packages/html-internal-macro", version = "0.7.3" } dioxus-hooks = { path = "packages/hooks", version = "0.7.3" } dioxus-web = { path = "packages/web", version = "0.7.3", default-features = false } dioxus-ssr = { path = "packages/ssr", version = "0.7.3", default-features = false } dioxus-desktop = { path = "packages/desktop", version = "0.7.3", default-features = false } dioxus-interpreter-js = { path = "packages/interpreter", version = "0.7.3" } dioxus-liveview = { path = "packages/liveview", version = "0.7.3" } dioxus-autofmt = { path = "packages/autofmt", version = "0.7.3" } dioxus-check = { path = "packages/check", version = "0.7.3" } dioxus-rsx = { path = "packages/rsx", version = "0.7.3" } dioxus-rsx-hotreload = { path = "packages/rsx-hotreload", version = "0.7.3" } dioxus-rsx-rosetta = { path = "packages/rsx-rosetta", version = "0.7.3" } dioxus-signals = { path = "packages/signals", version = "0.7.3" } dioxus-stores = { path = "packages/stores", version = "0.7.3" } dioxus-stores-macro = { path = "packages/stores-macro", version = "0.7.3" } dioxus-devtools = { path = "packages/devtools", version = "0.7.3" } dioxus-devtools-types = { path = "packages/devtools-types", version = "0.7.3" } dioxus-fullstack = { path = "packages/fullstack", version = "0.7.3", default-features = false } dioxus-fullstack-core = { path = "packages/fullstack-core", version = "0.7.3", default-features = false } dioxus-fullstack-macro = { path = "packages/fullstack-macro", version = "0.7.3", default-features = false } dioxus-server = { path = "packages/fullstack-server", version = "0.7.3" } dioxus-dx-wire-format = { path = "packages/dx-wire-format", version = "0.7.3" } dioxus-logger = { path = "packages/logger", version = "0.7.3" } dioxus-native = { path = "packages/native", version = "0.7.3" } dioxus-native-dom = { path = "packages/native-dom", version = "0.7.3" } dioxus-asset-resolver = { path = "packages/asset-resolver", version = "0.7.3" } dioxus-config-macros = { path = "packages/config-macros", version = "0.7.3" } dioxus-component-manifest = { path = "packages/component-manifest", version = "0.7.3" } generational-box = { path = "packages/generational-box", version = "0.7.3" } lazy-js-bundle = { path = "packages/lazy-js-bundle", version = "0.7.3" } # cli dioxus-cli-opt = { path = "packages/cli-opt", version = "0.7.3" } dioxus-cli-telemetry = { path = "packages/cli-telemetry", version = "0.7.3" } dioxus-cli-config = { path = "packages/cli-config", version = "0.7.3" } # subsecond subsecond-types = { path = "packages/subsecond/subsecond-types", version = "0.7.3" } subsecond = { path = "packages/subsecond/subsecond", version = "0.7.3" } # wasm-split wasm-splitter = { path = "packages/wasm-split/wasm-split", version = "0.7.3" } wasm-split-macro = { path = "packages/wasm-split/wasm-split-macro", version = "0.7.3" } wasm-split-cli = { path = "packages/wasm-split/wasm-split-cli", version = "0.7.3" } wasm-used = { path = "packages/wasm-split/wasm-used", version = "0.7.3" } # our dep-info parsing crate depinfo = { path = "packages/depinfo", version = "0.7.3" } # manganis manganis = { path = "packages/manganis/manganis", version = "0.7.3" } manganis-core = { path = "packages/manganis/manganis-core", version = "0.7.3" } manganis-macro = { path = "packages/manganis/manganis-macro", version = "0.7.3" } manganis-core-07 = { path = "packages/manganis/manganis-07", version = "0.7.2" } # const-serialize. # these are on "alpha" versions, but really, we are prepping for 0.8 but not committing to it yet # once the workspace moves onto 0.8, we can clean this up, moving the const-serialize stuff back to normal versions const-serialize = { path = "packages/const-serialize", version = "0.8.0-alpha.0" } const-serialize-macro = { path = "packages/const-serialize-macro", version = "0.8.0-alpha.0" } warnings = { version = "0.2.1" } # blitz blitz-dom = { version = "0.2.4", default-features = false } blitz-net = { version = "0.2" } blitz-html = { version = "0.2" } blitz-paint = { version = "0.2" } blitz-traits = { version = "0.2" } blitz-shell = { version = "0.2", default-features = false } anyrender = { version = "0.6.2", default-features = false } anyrender_vello = { version = "0.6", default-features = false } anyrender_vello_cpu = { version = "0.8", default-features = false } wgpu_context = { version = "0.1", default-features = false } wgpu = { version = "26.0" } vello = "0.6" bevy = "0.17" # a fork of pretty please for tests - let's get off of this if we can! prettier-please = { version = "0.3.0", features = ["verbatim"] } anyhow = "1.0.98" clap = { version = "4.5.40" } askama_escape = "0.13.0" tracing = "0.1.41" tracing-futures = "0.2.5" tracing-subscriber = { version = "0.3.19", default-features = false } toml = "0.8" tokio = "1.48" tokio-util = { version = "0.7.15" } tokio-stream = { version = "0.1.17" } slab = "0.4.10" slotmap = { version = "1.0.7", features = ["serde"] } futures = "0.3.31" futures-channel = "0.3.31" futures-util = { version = "0.3", default-features = false } rustc-hash = "2.1.1" wasm-bindgen = "0.2.105" wasm-bindgen-futures = "0.4.50" js-sys = "0.3" web-sys = { version = "0.3.77", default-features = false } html_parser = "0.7.0" thiserror = "2.0.12" prettyplease = { version = "0.2.35", features = ["verbatim"] } const_format = "0.2.34" cargo_toml = { version = "0.22.1" } tauri-utils = { version = "=2.5.0" } tauri-macos-sign = { version = "=2.2.0" } tauri-bundler = { version = "=2.5.0" } lru = "0.16.0" async-trait = "0.1.88" axum = { version = "0.8.4", default-features = false } axum-server = { version = "0.7.3", default-features = false } http-body = { version = "1.0" } tower = "0.5.2" http = "1.3.1" notify = { version = "8.1.0" } tower-http = "0.6.6" hyper = "1.6.0" hyper-rustls = { version = "0.27.7", default-features = false, features = [ "native-tokio", "http1", "http2", "tls12", "logging", "ring", ] } rustls = { version = "0.23.28", default-features = false, features = [ "logging", "std", "tls12", "ring", ] } serde_json = "1.0.140" serde = "1.0.219" schemars = "1.0" syn = "2.0" quote = "1.0" axum-core = "0.5" proc-macro2 = "1.0.101" axum_session = "0.16.0" axum_session_auth = "0.16.0" axum_session_sqlx = "0.5.0" axum-extra = "0.10.1" reqwest = { version = "0.12.23", default-features = false } owo-colors = "4.2.2" ciborium = "0.2.2" base64 = "0.22.1" uuid = "1.17.0" convert_case = "0.8.0" tungstenite = { version = "0.27.0" } tokio-tungstenite = { version = "0.27.0" } gloo-timers = "0.3.0" internment = { version = "0.8.6" } proc-macro2-diagnostics = { version = "0.10", default-features = false } env_logger = "0.11.8" chrono = { version = "0.4.39" } rustversion = "1.0.21" rand = "0.9" longest-increasing-subsequence = "0.1.0" trybuild = "1.0" dirs = "6.0.0" cargo-config2 = "0.1.34" criterion = { version = "0.6" } cargo_metadata = "0.19.2" parking_lot = "0.12.4" tracing-wasm = "0.2.1" base16 = "0.2.1" digest = "0.10.7" sha2 = "0.10.9" walrus = { version = "0.23.3", features = ["parallel"] } id-arena = "2.2.1" async-compression = { version = "0.4", features = [ "futures-io", "gzip", "brotli", ] } getrandom = { version = "0.3.3" } async-once-cell = { version = "0.5.4" } rayon = "1.10.0" wasmparser = "0.235.0" itertools = "0.14.0" object = { version = "0.37.1" } inventory = { version = "0.3" } macro-string = "0.1.4" walkdir = "2.5.0" url = "2" data-url = "0.3.2" separator = "0.4.1" pretty_assertions = "1" serde_repr = "0.1" hyper-util = "0.1" krates = { version = "0.17.5" } libloading = "0.8.8" libc = "0.2.174" memmap2 = "0.9.5" memfd = "0.6.4" xxhash-rust = { version = "0.8.15", default-features = false } serde_qs = "0.15.0" multer = "3.1.0" const-str = "0.7.0" bytes = "1.10" send_wrapper = "0.6.0" pin-project = { version = "1.1.10" } postcard = { version = "1.1.3", default-features = false } serde_urlencoded = "0.7" form_urlencoded = "1.2.1" winnow = "0.7.14" # desktop wry = { version = "0.53.5", default-features = false } tao = { version = "0.34.0", features = ["rwh_05"] } infer = "0.19.0" dunce = "1.0.5" percent-encoding = "2.3.1" muda = "0.17.0" tray-icon = "0.21.0" open = "5.3.2" webbrowser = "1.0" # web gloo-dialogs = "0.2.0" # tui stuff ansi-to-tui = "7.0" ansi-to-html = "0.2.2" path-absolutize = "3.1" crossterm = { version = "0.29.0" } ratatui = { version = "0.29.0" } shell-words = "1.1.0" # native keyboard-types = { version = "0.7", default-features = false } winit = { version = "0.30.11", features = ["rwh_06"] } # our release profile should be fast to compile and fast to run # when we ship our CI builds, we turn on LTO which improves perf leftover by turning on incremental [profile.release] incremental = true # crank up the opt level for wasm-split-cli in dev mode # important here that lto is on and the debug symbols are present (since they're used by wasm-opt) [profile.wasm-split-release] inherits = "release" opt-level = 'z' lto = true debug = true # a profile for running the CLI that's also incremental [profile.cli-release-dev] inherits = "release" opt-level = 3 incremental = true # crank up walrus since it's quite slow in dev mode [profile.dev.package.walrus] opt-level = 3 # ensure we have adversarial setup for tls [profile.dev.package.cross-tls-crate] opt-level = 2 [profile.dev.package.cross-tls-crate-dylib] opt-level = 2 [profile.release-max-opt] inherits = "release" lto = true codegen-units = 1 # Disable debug assertions to check the released path of core and other packages, but build without optimizations to keep build times quick [profile.release-unoptimized] inherits = "dev" debug-assertions = false incremental = true [profile.wasm-dev] inherits = "dev" opt-level = 1 [profile.server-dev] inherits = "dev" [profile.android-dev] inherits = "dev" # This is a "virtual package" # It is not meant to be published, but is used so "cargo run --example XYZ" works properly [package] name = "dioxus-examples" authors = ["Jonathan Kelley"] edition = "2024" description = "Top level crate for the Dioxus repository" license = "MIT OR Apache-2.0" repository = "https://github.com/DioxusLabs/dioxus/" homepage = "https://dioxuslabs.com" documentation = "https://dioxuslabs.com" keywords = ["dom", "ui", "gui", "react", "wasm"] rust-version = "1.85.0" publish = false version = "0.7.0" [dependencies] reqwest = { workspace = true, features = ["json"] } ciborium = { workspace = true, optional = true } base64 = { workspace = true, optional = true } http-range = { version = "0.1.5" } wgpu = { workspace = true, optional = true } winit = { workspace = true, optional = true } ouroboros = { version = "*", optional = true } wasm-splitter = { workspace = true } sqlx = { version = "0.8.6", features = [ "macros", "migrate", "postgres", "sqlite", "_unstable-all-types", "tls-native-tls", "runtime-tokio", ], optional = true } wasm-streams = "0.4.2" [dev-dependencies] dioxus = { workspace = true, features = ["router", "fullstack"] } dioxus-html = { workspace = true, features = ["serialize"] } dioxus-stores = { workspace = true } dioxus-ssr = { workspace = true } futures-util = { workspace = true } separator = { workspace = true } serde = { workspace = true, features = ["derive"] } serde_json = { workspace = true } rand = { workspace = true, features = ["small_rng"] } form_urlencoded = "1.2.1" async-std = "1.13.1" web-time = "1.1.0" anyhow = { workspace = true } thiserror = { workspace = true } bytes = { workspace = true } futures = { workspace = true } axum-core = { workspace = true } uuid = { workspace = true, features = ["v4", "serde"] } tower-http = { workspace = true, features = ["timeout"] } pollster = "0.4.0" [target.'cfg(target_arch = "wasm32")'.dev-dependencies] getrandom = { workspace = true, features = ["wasm_js"] } tokio = { version = "1.48", default-features = false, features = [ "sync", "macros", "io-util", "rt", "time", ] } uuid = { workspace = true, features = ["v4", "serde", "js"] } [target.'cfg(not(target_arch = "wasm32"))'.dev-dependencies] tokio = { version = "1.48", features = ["full"] } # To make most examples faster to compile, we split out assets and http-related stuff # This trims off like 270 dependencies, leading to a significant speedup in compilation time [features] default = ["desktop"] desktop = ["dioxus/desktop"] native = ["dioxus/native", "winit"] liveview = ["dioxus/liveview"] server = ["dioxus/server"] mobile = ["dioxus/mobile"] web = ["dioxus/web"] gpu = ["dep:ouroboros", "dep:wgpu"] [[example]] name = "websocket_chat" path = "examples/01-app-demos/websocket_chat.rs" doc-scrape-examples = true [[example]] name = "weather_app" path = "examples/01-app-demos/weather_app.rs" doc-scrape-examples = true [[example]] name = "crm" path = "examples/01-app-demos/crm.rs" doc-scrape-examples = true [[example]] name = "image_generator_openai" path = "examples/01-app-demos/image_generator_openai.rs" doc-scrape-examples = true [[example]] name = "todomvc" path = "examples/01-app-demos/todomvc.rs" doc-scrape-examples = true [[example]] name = "calculator_mutable" path = "examples/01-app-demos/calculator_mutable.rs" doc-scrape-examples = true [[example]] name = "hello_world" path = "examples/01-app-demos/hello_world.rs" doc-scrape-examples = true [[example]] name = "counters" path = "examples/01-app-demos/counters.rs" doc-scrape-examples = true [[example]] name = "todomvc_store" path = "examples/01-app-demos/todomvc_store.rs" doc-scrape-examples = true [[example]] name = "dog_app" path = "examples/01-app-demos/dog_app.rs" doc-scrape-examples = true [[example]] name = "calculator" path = "examples/01-app-demos/calculator.rs" doc-scrape-examples = true [[example]] name = "repo_readme" path = "examples/01-app-demos/repo_readme.rs" doc-scrape-examples = true [[example]] name = "nested_listeners" path = "examples/02-building-ui/nested_listeners.rs" doc-scrape-examples = true [[example]] name = "disabled" path = "examples/02-building-ui/disabled.rs" doc-scrape-examples = true [[example]] name = "svg" path = "examples/02-building-ui/svg.rs" doc-scrape-examples = true [[example]] name = "css_modules" path = "examples/03-assets-styling/css_modules.rs" doc-scrape-examples = true [[example]] name = "custom_assets" path = "examples/03-assets-styling/custom_assets.rs" doc-scrape-examples = true [[example]] name = "dynamic_assets" path = "examples/03-assets-styling/dynamic_assets.rs" doc-scrape-examples = true [[example]] name = "meta" path = "examples/03-assets-styling/meta.rs" doc-scrape-examples = true [[example]] name = "meta_elements" path = "examples/03-assets-styling/meta_elements.rs" doc-scrape-examples = true [[example]] name = "reducer" path = "examples/04-managing-state/reducer.rs" doc-scrape-examples = true [[example]] name = "memo_chain" path = "examples/04-managing-state/memo_chain.rs" doc-scrape-examples = true [[example]] name = "global" path = "examples/04-managing-state/global.rs" doc-scrape-examples = true [[example]] name = "context_api" path = "examples/04-managing-state/context_api.rs" doc-scrape-examples = true [[example]] name = "signals" path = "examples/04-managing-state/signals.rs" doc-scrape-examples = true [[example]] name = "errors" path = "examples/04-managing-state/error_handling.rs" doc-scrape-examples = true [[example]] name = "backgrounded_futures" path = "examples/05-using-async/backgrounded_futures.rs" doc-scrape-examples = true [[example]] name = "clock" path = "examples/05-using-async/clock.rs" doc-scrape-examples = true [[example]] name = "streams" path = "examples/05-using-async/streams.rs" doc-scrape-examples = true [[example]] name = "suspense" path = "examples/05-using-async/suspense.rs" doc-scrape-examples = true [[example]] name = "future" path = "examples/05-using-async/future.rs" doc-scrape-examples = true [[example]] name = "simple_router" path = "examples/06-routing/simple_router.rs" doc-scrape-examples = true [[example]] name = "router_restore_scroll" path = "examples/06-routing/router_restore_scroll.rs" doc-scrape-examples = true [[example]] name = "link" path = "examples/06-routing/link.rs" doc-scrape-examples = true [[example]] name = "hash_fragment_state" required-features = ["ciborium", "base64"] path = "examples/06-routing/hash_fragment_state.rs" doc-scrape-examples = true [[example]] name = "router" path = "examples/06-routing/router.rs" doc-scrape-examples = true [[example]] name = "router_resource" path = "examples/06-routing/router_resource.rs" doc-scrape-examples = true [[example]] name = "query_segment_search" path = "examples/06-routing/query_segment_search.rs" doc-scrape-examples = true [[example]] name = "flat_router" path = "examples/06-routing/flat_router.rs" doc-scrape-examples = true [[example]] name = "query_params" path = "examples/07-fullstack/query_params.rs" doc-scrape-examples = true [[example]] name = "server_functions" path = "examples/07-fullstack/server_functions.rs" doc-scrape-examples = true [[example]] name = "middleware" path = "examples/07-fullstack/middleware.rs" doc-scrape-examples = true [[example]] name = "custom_error_page" path = "examples/07-fullstack/custom_error_page.rs" doc-scrape-examples = true [[example]] name = "redirect" path = "examples/07-fullstack/redirect.rs" doc-scrape-examples = true [[example]] name = "header_map" path = "examples/07-fullstack/header_map.rs" doc-scrape-examples = true [[example]] name = "login_form" path = "examples/07-fullstack/login_form.rs" doc-scrape-examples = true [[example]] name = "handling_errors" path = "examples/07-fullstack/handling_errors.rs" doc-scrape-examples = true [[example]] name = "dog_app_self_hosted" path = "examples/07-fullstack/dog_app_self_hosted.rs" doc-scrape-examples = true [[example]] name = "through_reqwest" path = "examples/07-fullstack/through_reqwest.rs" doc-scrape-examples = true [[example]] name = "streaming_file_upload" path = "examples/07-fullstack/streaming_file_upload.rs" doc-scrape-examples = true [[example]] name = "multipart_form" path = "examples/07-fullstack/multipart_form.rs" doc-scrape-examples = true [[example]] name = "full_request_access" path = "examples/07-fullstack/full_request_access.rs" doc-scrape-examples = true [[example]] name = "fullstack_hello_world" path = "examples/07-fullstack/fullstack_hello_world.rs" doc-scrape-examples = true [[example]] name = "server_sent_events" path = "examples/07-fullstack/server_sent_events.rs" doc-scrape-examples = true [[example]] name = "server_state" path = "examples/07-fullstack/server_state.rs" doc-scrape-examples = true required-features = ["sqlx"] [[example]] name = "streaming" path = "examples/07-fullstack/streaming.rs" doc-scrape-examples = true [[example]] name = "custom_axum_serve" path = "examples/07-fullstack/custom_axum_serve.rs" doc-scrape-examples = true [[example]] name = "websocket" path = "examples/07-fullstack/websocket.rs" doc-scrape-examples = true [[example]] name = "drag_and_drop" path = "examples/08-apis/drag_and_drop.rs" doc-scrape-examples = true [[example]] name = "control_focus" path = "examples/08-apis/control_focus.rs" doc-scrape-examples = true [[example]] name = "window_popup" required-features = ["desktop"] path = "examples/08-apis/window_popup.rs" doc-scrape-examples = true [[example]] name = "custom_html" required-features = ["desktop"] path = "examples/08-apis/custom_html.rs" doc-scrape-examples = true [[example]] name = "multiwindow_with_tray_icon" required-features = ["desktop"] path = "examples/08-apis/multiwindow_with_tray_icon.rs" doc-scrape-examples = true [[example]] name = "window_event" required-features = ["desktop"] path = "examples/08-apis/window_event.rs" doc-scrape-examples = true [[example]] name = "read_size" path = "examples/08-apis/read_size.rs" doc-scrape-examples = true [[example]] name = "logging" path = "examples/08-apis/logging.rs" doc-scrape-examples = true [[example]] name = "overlay" required-features = ["desktop"] path = "examples/08-apis/overlay.rs" doc-scrape-examples = true [[example]] name = "ssr" path = "examples/08-apis/ssr.rs" doc-scrape-examples = true [[example]] name = "video_stream" required-features = ["desktop"] path = "examples/08-apis/video_stream.rs" doc-scrape-examples = true [[example]] name = "title" path = "examples/08-apis/title.rs" doc-scrape-examples = true [[example]] name = "file_upload" path = "examples/08-apis/file_upload.rs" doc-scrape-examples = true [[example]] name = "window_focus" required-features = ["desktop"] path = "examples/08-apis/window_focus.rs" doc-scrape-examples = true [[example]] name = "eval" path = "examples/08-apis/eval.rs" doc-scrape-examples = true [[example]] name = "shortcut" required-features = ["desktop"] path = "examples/08-apis/shortcut.rs" doc-scrape-examples = true [[example]] name = "scroll_to_offset" path = "examples/08-apis/scroll_to_offset.rs" doc-scrape-examples = true [[example]] name = "scroll_to_top" path = "examples/08-apis/scroll_to_top.rs" doc-scrape-examples = true [[example]] name = "wgpu_child_window" path = "examples/08-apis/wgpu_child_window.rs" required-features = ["gpu", "desktop"] doc-scrape-examples = true [[example]] name = "multiwindow" required-features = ["desktop"] path = "examples/08-apis/multiwindow.rs" doc-scrape-examples = true [[example]] name = "window_zoom" required-features = ["desktop"] path = "examples/08-apis/window_zoom.rs" doc-scrape-examples = true [[example]] name = "custom_menu" required-features = ["desktop"] path = "examples/08-apis/custom_menu.rs" doc-scrape-examples = true [[example]] name = "on_resize" path = "examples/08-apis/on_resize.rs" doc-scrape-examples = true [[example]] name = "form" path = "examples/08-apis/form.rs" doc-scrape-examples = true [[example]] name = "on_visible" path = "examples/08-apis/on_visible.rs" doc-scrape-examples = true [[example]] name = "all_events" path = "examples/09-reference/all_events.rs" doc-scrape-examples = true [[example]] name = "xss_safety" path = "examples/09-reference/xss_safety.rs" doc-scrape-examples = true [[example]] name = "web_component" path = "examples/09-reference/web_component.rs" doc-scrape-examples = true [[example]] name = "generic_component" path = "examples/09-reference/generic_component.rs" doc-scrape-examples = true [[example]] name = "shorthand" path = "examples/09-reference/shorthand.rs" doc-scrape-examples = true [[example]] name = "simple_list" path = "examples/09-reference/simple_list.rs" doc-scrape-examples = true [[example]] name = "optional_props" path = "examples/09-reference/optional_props.rs" doc-scrape-examples = true [[example]] name = "rsx_usage" path = "examples/09-reference/rsx_usage.rs" doc-scrape-examples = true [[example]] name = "spread" path = "examples/09-reference/spread.rs" doc-scrape-examples = true [[example]] name = "__scrape_example_list" path = "examples/scripts/scrape_examples.rs" ================================================ FILE: LICENSE-APACHE ================================================ Apache License Version 2.0, January 2004 http://www.apache.org/licenses/ TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION 1. Definitions. "License" shall mean the terms and conditions for use, reproduction, and distribution as defined by Sections 1 through 9 of this document. "Licensor" shall mean the copyright owner or entity authorized by the copyright owner that is granting the License. "Legal Entity" shall mean the union of the acting entity and all other entities that control, are controlled by, or are under common control with that entity. For the purposes of this definition, "control" means (i) the power, direct or indirect, to cause the direction or management of such entity, whether by contract or otherwise, or (ii) ownership of fifty percent (50%) or more of the outstanding shares, or (iii) beneficial ownership of such entity. "You" (or "Your") shall mean an individual or Legal Entity exercising permissions granted by this License. "Source" form shall mean the preferred form for making modifications, including but not limited to software source code, documentation source, and configuration files. "Object" form shall mean any form resulting from mechanical transformation or translation of a Source form, including but not limited to compiled object code, generated documentation, and conversions to other media types. "Work" shall mean the work of authorship, whether in Source or Object form, made available under the License, as indicated by a copyright notice that is included in or attached to the work (an example is provided in the Appendix below). "Derivative Works" shall mean any work, whether in Source or Object form, that is based on (or derived from) the Work and for which the editorial revisions, annotations, elaborations, or other modifications represent, as a whole, an original work of authorship. For the purposes of this License, Derivative Works shall not include works that remain separable from, or merely link (or bind by name) to the interfaces of, the Work and Derivative Works thereof. "Contribution" shall mean any work of authorship, including the original version of the Work and any modifications or additions to that Work or Derivative Works thereof, that is intentionally submitted to Licensor for inclusion in the Work by the copyright owner or by an individual or Legal Entity authorized to submit on behalf of the copyright owner. For the purposes of this definition, "submitted" means any form of electronic, verbal, or written communication sent to the Licensor or its representatives, including but not limited to communication on electronic mailing lists, source code control systems, and issue tracking systems that are managed by, or on behalf of, the Licensor for the purpose of discussing and improving the Work, but excluding communication that is conspicuously marked or otherwise designated in writing by the copyright owner as "Not a Contribution." "Contributor" shall mean Licensor and any individual or Legal Entity on behalf of whom a Contribution has been received by Licensor and subsequently incorporated within the Work. 2. Grant of Copyright License. Subject to the terms and conditions of this License, each Contributor hereby grants to You a perpetual, worldwide, non-exclusive, no-charge, royalty-free, irrevocable copyright license to reproduce, prepare Derivative Works of, publicly display, publicly perform, sublicense, and distribute the Work and such Derivative Works in Source or Object form. 3. Grant of Patent License. Subject to the terms and conditions of this License, each Contributor hereby grants to You a perpetual, worldwide, non-exclusive, no-charge, royalty-free, irrevocable (except as stated in this section) patent license to make, have made, use, offer to sell, sell, import, and otherwise transfer the Work, where such license applies only to those patent claims licensable by such Contributor that are necessarily infringed by their Contribution(s) alone or by combination of their Contribution(s) with the Work to which such Contribution(s) was submitted. If You institute patent litigation against any entity (including a cross-claim or counterclaim in a lawsuit) alleging that the Work or a Contribution incorporated within the Work constitutes direct or contributory patent infringement, then any patent licenses granted to You under this License for that Work shall terminate as of the date such litigation is filed. 4. Redistribution. You may reproduce and distribute copies of the Work or Derivative Works thereof in any medium, with or without modifications, and in Source or Object form, provided that You meet the following conditions: (a) You must give any other recipients of the Work or Derivative Works a copy of this License; and (b) You must cause any modified files to carry prominent notices stating that You changed the files; and (c) You must retain, in the Source form of any Derivative Works that You distribute, all copyright, patent, trademark, and attribution notices from the Source form of the Work, excluding those notices that do not pertain to any part of the Derivative Works; and (d) If the Work includes a "NOTICE" text file as part of its distribution, then any Derivative Works that You distribute must include a readable copy of the attribution notices contained within such NOTICE file, excluding those notices that do not pertain to any part of the Derivative Works, in at least one of the following places: within a NOTICE text file distributed as part of the Derivative Works; within the Source form or documentation, if provided along with the Derivative Works; or, within a display generated by the Derivative Works, if and wherever such third-party notices normally appear. The contents of the NOTICE file are for informational purposes only and do not modify the License. You may add Your own attribution notices within Derivative Works that You distribute, alongside or as an addendum to the NOTICE text from the Work, provided that such additional attribution notices cannot be construed as modifying the License. You may add Your own copyright statement to Your modifications and may provide additional or different license terms and conditions for use, reproduction, or distribution of Your modifications, or for any such Derivative Works as a whole, provided Your use, reproduction, and distribution of the Work otherwise complies with the conditions stated in this License. 5. Submission of Contributions. Unless You explicitly state otherwise, any Contribution intentionally submitted for inclusion in the Work by You to the Licensor shall be under the terms and conditions of this License, without any additional terms or conditions. Notwithstanding the above, nothing herein shall supersede or modify the terms of any separate license agreement you may have executed with Licensor regarding such Contributions. 6. Trademarks. This License does not grant permission to use the trade names, trademarks, service marks, or product names of the Licensor, except as required for reasonable and customary use in describing the origin of the Work and reproducing the content of the NOTICE file. 7. Disclaimer of Warranty. Unless required by applicable law or agreed to in writing, Licensor provides the Work (and each Contributor provides its Contributions) on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied, including, without limitation, any warranties or conditions of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A PARTICULAR PURPOSE. You are solely responsible for determining the appropriateness of using or redistributing the Work and assume any risks associated with Your exercise of permissions under this License. 8. Limitation of Liability. In no event and under no legal theory, whether in tort (including negligence), contract, or otherwise, unless required by applicable law (such as deliberate and grossly negligent acts) or agreed to in writing, shall any Contributor be liable to You for damages, including any direct, indirect, special, incidental, or consequential damages of any character arising as a result of this License or out of the use or inability to use the Work (including but not limited to damages for loss of goodwill, work stoppage, computer failure or malfunction, or any and all other commercial damages or losses), even if such Contributor has been advised of the possibility of such damages. 9. Accepting Warranty or Additional Liability. While redistributing the Work or Derivative Works thereof, You may choose to offer, and charge a fee for, acceptance of support, warranty, indemnity, or other liability obligations and/or rights consistent with this License. However, in accepting such obligations, You may act only on Your own behalf and on Your sole responsibility, not on behalf of any other Contributor, and only if You agree to indemnify, defend, and hold each Contributor harmless for any liability incurred by, or claims asserted against, such Contributor by reason of your accepting any such warranty or additional liability. END OF TERMS AND CONDITIONS ================================================ FILE: LICENSE-MIT ================================================ Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions: The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software. THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. ================================================ FILE: README.md ================================================



✨ Dioxus 0.7 is out!!! ✨


Build for web, desktop, and mobile, and more with a single codebase. Zero-config setup, integrated hot-reloading, and signals-based state management. Add backend functionality with Server Functions and bundle with our CLI. ```rust fn app() -> Element { let mut count = use_signal(|| 0); rsx! { h1 { "High-Five counter: {count}" } button { onclick: move |_| count += 1, "Up high!" } button { onclick: move |_| count -= 1, "Down low!" } } } ``` ## ⭐️ Unique features: - Cross-platform apps in three lines of code (web, desktop, mobile, server, and more) - [Ergonomic state management](https://dioxuslabs.com/blog/release-050) combines the best of React, Solid, and Svelte - Built-in featureful, type-safe, fullstack web framework - Integrated bundler for deploying to the web, macOS, Linux, and Windows - Subsecond Rust hot-patching and asset hot-reloading - And more! [Take a tour of Dioxus](https://dioxuslabs.com/learn/0.7/). ## Instant hot-reloading With one command, `dx serve` and your app is running. Edit your markup, styles, and see changes in milliseconds. Use our experimental `dx serve --hotpatch` to update Rust code in real time.
## Build Beautiful Apps Dioxus apps are styled with HTML and CSS. Use the built-in TailwindCSS support or load your favorite CSS library. Easily call into native code (objective-c, JNI, Web-Sys) for a perfect native touch.
## Truly fullstack applications Dioxus deeply integrates with [axum](https://github.com/tokio-rs/axum) to provide powerful fullstack capabilities for both clients and servers. Pick from a wide array of built-in batteries like WebSockets, SSE, Streaming, File Upload/Download, Server-Side-Rendering, Forms, Middleware, and Hot-Reload, or go fully custom and integrate your existing axum backend.
## Experimental Native Renderer Render using web-sys, webview, server-side-rendering, liveview, or even with our experimental WGPU-based renderer. Embed Dioxus in Bevy, WGPU, or even run on embedded Linux!
## First-party primitive components Get started quickly with a complete set of primitives modeled after shadcn/ui and Radix-Primitives.
## First-class Android and iOS support Dioxus is the fastest way to build native mobile apps with Rust. Simply run `dx serve --platform android` and your app is running in an emulator or on device in seconds. Call directly into JNI and Native APIs.
## Bundle for web, desktop, and mobile Simply run `dx bundle` and your app will be built and bundled with maximization optimizations. On the web, take advantage of [`.avif` generation, `.wasm` compression, minification](https://dioxuslabs.com/learn/0.7/tutorial/assets), and more. Build WebApps weighing [less than 50kb](https://github.com/ealmloff/tiny-dioxus/) and desktop/mobile apps less than 5mb.
## Fantastic documentation We've put a ton of effort into building clean, readable, and comprehensive documentation. All html elements and listeners are documented with MDN docs, and our Docs runs continuous integration with Dioxus itself to ensure that the docs are always up to date. Check out the [Dioxus website](https://dioxuslabs.com/learn/0.7/) for guides, references, recipes, and more. Fun fact: we use the Dioxus website as a testbed for new Dioxus features - [check it out!](https://github.com/dioxusLabs/docsite)
## Modular and Customizable Build your own renderer. Use our modular components like RSX, VirtualDom, Blitz, Taffy, and Subsecond. ## Community Dioxus is a community-driven project, with a very active [Discord](https://discord.gg/XgGxMSkvUM) and [GitHub](https://github.com/DioxusLabs/dioxus/issues) community. We're always looking for help, and we're happy to answer questions and help you get started. [Our SDK](https://github.com/DioxusLabs/dioxus-std) is community-run and we even have a [GitHub organization](https://github.com/dioxus-community/) for the best Dioxus crates that receive free upgrades and support.
## Full-time core team Dioxus has grown from a side project to a small team of fulltime engineers. Thanks to the generous support of FutureWei, Satellite.im, the GitHub Accelerator program, we're able to work on Dioxus full-time. Our long term goal is for Dioxus to become self-sustaining by providing paid high-quality enterprise tools. If your company is interested in adopting Dioxus and would like to work with us, please reach out! ## Supported Platforms
Web
  • Render directly to the DOM using WebAssembly
  • Pre-render with SSR and rehydrate on the client
  • Simple "hello world" at about 50kb, comparable to React
  • Built-in dev server and hot reloading for quick iteration
Desktop
  • Render using Webview or - experimentally - with WGPU or Freya (Skia)
  • Zero-config setup. Simply `cargo run` or `dx serve` to build your app
  • Full support for native system access without IPC
  • Supports macOS, Linux, and Windows. Portable <3mb binaries
Mobile
  • Render using Webview or - experimentally - with WGPU or Skia
  • Build .ipa and .apk files for iOS and Android
  • Call directly into Java and Objective-C with minimal overhead
  • From "hello world" to running on device in seconds
Server-side Rendering
  • Suspense, hydration, and server-side rendering
  • Quickly drop in backend functionality with server functions
  • Extractors, middleware, and routing integrations
  • Static-site generation and incremental regeneration
## Running the examples > The examples in the main branch of this repository target the git version of dioxus and the CLI. If you are looking for examples that work with the latest stable release of dioxus, check out the [0.6 branch](https://github.com/DioxusLabs/dioxus/tree/v0.6/examples). The examples in the top level of this repository can be run with: ```sh cargo run --example ``` However, we encourage you to download the dioxus-cli to test out features like hot-reloading. To install the most recent binary CLI, you can use cargo binstall. ```sh cargo binstall dioxus-cli@0.7.0 --force ``` If this CLI is out-of-date, you can install it directly from git ```sh cargo install --git https://github.com/DioxusLabs/dioxus dioxus-cli --locked ``` With the CLI, you can also run examples with the web platform. You will need to disable the default desktop feature and enable the web feature with this command: ```sh dx serve --example --platform web -- --no-default-features ``` ## Contributing - Check out the website [section on contributing](https://dioxuslabs.com/learn/0.7/beyond/contributing). - Report issues on our [issue tracker](https://github.com/dioxuslabs/dioxus/issues). - [Join](https://discord.gg/XgGxMSkvUM) the discord and ask questions! ## License This project is licensed under either the [MIT license] or the [Apache-2 License]. [apache-2 license]: https://github.com/DioxusLabs/dioxus/blob/master/LICENSE-APACHE [mit license]: https://github.com/DioxusLabs/dioxus/blob/master/LICENSE-MIT Unless you explicitly state otherwise, any contribution intentionally submitted for inclusion in Dioxus by you, shall be licensed as MIT or Apache-2, without any additional terms or conditions. ================================================ FILE: _typos.toml ================================================ [default.extend-words] # https://ratatui.rs/ ratatui = "ratatui" # lits is short for literals lits = "lits" # https://developer.mozilla.org/en-US/docs/Web/API/HTMLMediaElement/seeked_event seeked = "seeked" # https://developer.apple.com/forums/thread/108953 # udid = unique device identifier udid = "udid" # Part of Blitz's API unparented = "unparented" [files] extend-exclude = ["notes/translations/*", "CHANGELOG.md", "*.js"] ================================================ FILE: codecov.yml ================================================ comment: false fail_ci_if_error: false ================================================ FILE: examples/01-app-demos/bluetooth-scanner/.gitignore ================================================ /target ================================================ FILE: examples/01-app-demos/bluetooth-scanner/Cargo.toml ================================================ [package] name = "bluetooth-scanner" version = "0.1.1" edition = "2021" publish = false # See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html [dependencies] tokio = { workspace = true, features = ["full"] } dioxus = { workspace = true } futures-channel = { workspace = true } futures = { workspace = true } btleplug = "0.11.8" [features] default = ["desktop"] desktop = ["dioxus/desktop"] native = ["dioxus/native"] ================================================ FILE: examples/01-app-demos/bluetooth-scanner/README.md ================================================ # Bluetooth scanner app This desktop app showcases the use of background threads. ![Demo of app](./demo_small.png) ================================================ FILE: examples/01-app-demos/bluetooth-scanner/assets/tailwind.css ================================================ /*! tailwindcss v4.1.5 | MIT License | https://tailwindcss.com */ @layer properties; @layer theme, base, components, utilities; @layer theme { :root, :host { --font-sans: ui-sans-serif, system-ui, sans-serif, 'Apple Color Emoji', 'Segoe UI Emoji', 'Segoe UI Symbol', 'Noto Color Emoji'; --font-mono: ui-monospace, SFMono-Regular, Menlo, Monaco, Consolas, 'Liberation Mono', 'Courier New', monospace; --color-green-500: oklch(72.3% 0.219 149.579); --color-indigo-500: oklch(58.5% 0.233 277.117); --color-indigo-600: oklch(51.1% 0.262 276.966); --color-purple-50: oklch(97.7% 0.014 308.299); --color-purple-500: oklch(62.7% 0.265 303.9); --color-gray-50: oklch(98.5% 0.002 247.839); --color-gray-500: oklch(55.1% 0.027 264.364); --color-white: #fff; --spacing: 0.25rem; --text-xs: 0.75rem; --text-xs--line-height: calc(1 / 0.75); --text-2xl: 1.5rem; --text-2xl--line-height: calc(2 / 1.5); --font-weight-medium: 500; --font-weight-bold: 700; --default-transition-duration: 150ms; --default-transition-timing-function: cubic-bezier(0.4, 0, 0.2, 1); --default-font-family: var(--font-sans); --default-mono-font-family: var(--font-mono); } } @layer base { *, ::after, ::before, ::backdrop, ::file-selector-button { box-sizing: border-box; margin: 0; padding: 0; border: 0 solid; } html, :host { line-height: 1.5; -webkit-text-size-adjust: 100%; tab-size: 4; font-family: var(--default-font-family, ui-sans-serif, system-ui, sans-serif, 'Apple Color Emoji', 'Segoe UI Emoji', 'Segoe UI Symbol', 'Noto Color Emoji'); font-feature-settings: var(--default-font-feature-settings, normal); font-variation-settings: var(--default-font-variation-settings, normal); -webkit-tap-highlight-color: transparent; } hr { height: 0; color: inherit; border-top-width: 1px; } abbr:where([title]) { -webkit-text-decoration: underline dotted; text-decoration: underline dotted; } h1, h2, h3, h4, h5, h6 { font-size: inherit; font-weight: inherit; } a { color: inherit; -webkit-text-decoration: inherit; text-decoration: inherit; } b, strong { font-weight: bolder; } code, kbd, samp, pre { font-family: var(--default-mono-font-family, ui-monospace, SFMono-Regular, Menlo, Monaco, Consolas, 'Liberation Mono', 'Courier New', monospace); font-feature-settings: var(--default-mono-font-feature-settings, normal); font-variation-settings: var(--default-mono-font-variation-settings, normal); font-size: 1em; } small { font-size: 80%; } sub, sup { font-size: 75%; line-height: 0; position: relative; vertical-align: baseline; } sub { bottom: -0.25em; } sup { top: -0.5em; } table { text-indent: 0; border-color: inherit; border-collapse: collapse; } :-moz-focusring { outline: auto; } progress { vertical-align: baseline; } summary { display: list-item; } ol, ul, menu { list-style: none; } img, svg, video, canvas, audio, iframe, embed, object { display: block; vertical-align: middle; } img, video { max-width: 100%; height: auto; } button, input, select, optgroup, textarea, ::file-selector-button { font: inherit; font-feature-settings: inherit; font-variation-settings: inherit; letter-spacing: inherit; color: inherit; border-radius: 0; background-color: transparent; opacity: 1; } :where(select:is([multiple], [size])) optgroup { font-weight: bolder; } :where(select:is([multiple], [size])) optgroup option { padding-inline-start: 20px; } ::file-selector-button { margin-inline-end: 4px; } ::placeholder { opacity: 1; } @supports (not (-webkit-appearance: -apple-pay-button)) or (contain-intrinsic-size: 1px) { ::placeholder { color: currentcolor; @supports (color: color-mix(in lab, red, red)) { color: color-mix(in oklab, currentcolor 50%, transparent); } } } textarea { resize: vertical; } ::-webkit-search-decoration { -webkit-appearance: none; } ::-webkit-date-and-time-value { min-height: 1lh; text-align: inherit; } ::-webkit-datetime-edit { display: inline-flex; } ::-webkit-datetime-edit-fields-wrapper { padding: 0; } ::-webkit-datetime-edit, ::-webkit-datetime-edit-year-field, ::-webkit-datetime-edit-month-field, ::-webkit-datetime-edit-day-field, ::-webkit-datetime-edit-hour-field, ::-webkit-datetime-edit-minute-field, ::-webkit-datetime-edit-second-field, ::-webkit-datetime-edit-millisecond-field, ::-webkit-datetime-edit-meridiem-field { padding-block: 0; } :-moz-ui-invalid { box-shadow: none; } button, input:where([type='button'], [type='reset'], [type='submit']), ::file-selector-button { appearance: button; } ::-webkit-inner-spin-button, ::-webkit-outer-spin-button { height: auto; } [hidden]:where(:not([hidden='until-found'])) { display: none !important; } } @layer utilities { .container { width: 100%; @media (width >= 40rem) { max-width: 40rem; } @media (width >= 48rem) { max-width: 48rem; } @media (width >= 64rem) { max-width: 64rem; } @media (width >= 80rem) { max-width: 80rem; } @media (width >= 96rem) { max-width: 96rem; } } .mx-auto { margin-inline: auto; } .mb-6 { margin-bottom: calc(var(--spacing) * 6); } .flex { display: flex; } .inline-block { display: inline-block; } .table { display: table; } .w-full { width: 100%; } .table-auto { table-layout: auto; } .overflow-x-auto { overflow-x: auto; } .rounded { border-radius: 0.25rem; } .rounded-full { border-radius: calc(infinity * 1px); } .bg-gray-50 { background-color: var(--color-gray-50); } .bg-green-500 { background-color: var(--color-green-500); } .bg-indigo-500 { background-color: var(--color-indigo-500); } .bg-purple-50 { background-color: var(--color-purple-50); } .bg-white { background-color: var(--color-white); } .p-4 { padding: calc(var(--spacing) * 4); } .px-2 { padding-inline: calc(var(--spacing) * 2); } .px-4 { padding-inline: calc(var(--spacing) * 4); } .px-6 { padding-inline: calc(var(--spacing) * 6); } .py-1 { padding-block: calc(var(--spacing) * 1); } .py-3 { padding-block: calc(var(--spacing) * 3); } .py-5 { padding-block: calc(var(--spacing) * 5); } .py-8 { padding-block: calc(var(--spacing) * 8); } .pb-3 { padding-bottom: calc(var(--spacing) * 3); } .pl-6 { padding-left: calc(var(--spacing) * 6); } .text-left { text-align: left; } .text-2xl { font-size: var(--text-2xl); line-height: var(--tw-leading, var(--text-2xl--line-height)); } .text-xs { font-size: var(--text-xs); line-height: var(--tw-leading, var(--text-xs--line-height)); } .font-bold { --tw-font-weight: var(--font-weight-bold); font-weight: var(--font-weight-bold); } .font-medium { --tw-font-weight: var(--font-weight-medium); font-weight: var(--font-weight-medium); } .text-gray-500 { color: var(--color-gray-500); } .text-purple-500 { color: var(--color-purple-500); } .text-white { color: var(--color-white); } .shadow { --tw-shadow: 0 1px 3px 0 var(--tw-shadow-color, rgb(0 0 0 / 0.1)), 0 1px 2px -1px var(--tw-shadow-color, rgb(0 0 0 / 0.1)); box-shadow: var(--tw-inset-shadow), var(--tw-inset-ring-shadow), var(--tw-ring-offset-shadow), var(--tw-ring-shadow), var(--tw-shadow); } .transition { transition-property: color, background-color, border-color, outline-color, text-decoration-color, fill, stroke, --tw-gradient-from, --tw-gradient-via, --tw-gradient-to, opacity, box-shadow, transform, translate, scale, rotate, filter, -webkit-backdrop-filter, backdrop-filter, display, visibility, content-visibility, overlay, pointer-events; transition-timing-function: var(--tw-ease, var(--default-transition-timing-function)); transition-duration: var(--tw-duration, var(--default-transition-duration)); } .duration-200 { --tw-duration: 200ms; transition-duration: 200ms; } .hover\:bg-indigo-600 { &:hover { @media (hover: hover) { background-color: var(--color-indigo-600); } } } .md\:w-auto { @media (width >= 48rem) { width: auto; } } } @property --tw-font-weight { syntax: "*"; inherits: false; } @property --tw-shadow { syntax: "*"; inherits: false; initial-value: 0 0 #0000; } @property --tw-shadow-color { syntax: "*"; inherits: false; } @property --tw-shadow-alpha { syntax: ""; inherits: false; initial-value: 100%; } @property --tw-inset-shadow { syntax: "*"; inherits: false; initial-value: 0 0 #0000; } @property --tw-inset-shadow-color { syntax: "*"; inherits: false; } @property --tw-inset-shadow-alpha { syntax: ""; inherits: false; initial-value: 100%; } @property --tw-ring-color { syntax: "*"; inherits: false; } @property --tw-ring-shadow { syntax: "*"; inherits: false; initial-value: 0 0 #0000; } @property --tw-inset-ring-color { syntax: "*"; inherits: false; } @property --tw-inset-ring-shadow { syntax: "*"; inherits: false; initial-value: 0 0 #0000; } @property --tw-ring-inset { syntax: "*"; inherits: false; } @property --tw-ring-offset-width { syntax: ""; inherits: false; initial-value: 0px; } @property --tw-ring-offset-color { syntax: "*"; inherits: false; initial-value: #fff; } @property --tw-ring-offset-shadow { syntax: "*"; inherits: false; initial-value: 0 0 #0000; } @property --tw-duration { syntax: "*"; inherits: false; } @layer properties { @supports ((-webkit-hyphens: none) and (not (margin-trim: inline))) or ((-moz-orient: inline) and (not (color:rgb(from red r g b)))) { *, ::before, ::after, ::backdrop { --tw-font-weight: initial; --tw-shadow: 0 0 #0000; --tw-shadow-color: initial; --tw-shadow-alpha: 100%; --tw-inset-shadow: 0 0 #0000; --tw-inset-shadow-color: initial; --tw-inset-shadow-alpha: 100%; --tw-ring-color: initial; --tw-ring-shadow: 0 0 #0000; --tw-inset-ring-color: initial; --tw-inset-ring-shadow: 0 0 #0000; --tw-ring-inset: initial; --tw-ring-offset-width: 0px; --tw-ring-offset-color: #fff; --tw-ring-offset-shadow: 0 0 #0000; --tw-duration: initial; } } } ================================================ FILE: examples/01-app-demos/bluetooth-scanner/src/main.rs ================================================ use dioxus::prelude::*; fn main() { dioxus::launch(app) } fn app() -> Element { let mut scan = use_action(|| async { use btleplug::api::{Central, Manager as _, Peripheral, ScanFilter}; let manager = btleplug::platform::Manager::new().await?; // get the first bluetooth adapter let adapters = manager.adapters().await?; let central = adapters .into_iter() .next() .context("No Bluetooth adapter found")?; // start scanning for devices central.start_scan(ScanFilter::default()).await?; tokio::time::sleep(std::time::Duration::from_secs(2)).await; // Return the list of peripherals after scanning let mut devices = vec![]; for p in central.peripherals().await? { if let Some(p) = p.properties().await? { devices.push(p); } } // Sort them by RSSI (signal strength) devices.sort_by_key(|p| p.rssi.unwrap_or(-100)); dioxus::Ok(devices) }); rsx! { Stylesheet { href: asset!("/assets/tailwind.css") } div { div { class: "py-8 px-6", div { class: "container px-4 mx-auto", h2 { class: "text-2xl font-bold", "Scan for Bluetooth Devices" } button { class: "inline-block w-full md:w-auto px-6 py-3 font-medium text-white bg-indigo-500 hover:bg-indigo-600 rounded transition duration-200", disabled: scan.pending(), onclick: move |_| { scan.call(); }, if scan.pending() { "Scanning" } else { "Scan" } } } } section { class: "py-8", div { class: "container px-4 mx-auto", div { class: "p-4 mb-6 bg-white shadow rounded overflow-x-auto", table { class: "table-auto w-full", thead { tr { class: "text-xs text-gray-500 text-left", th { class: "pl-6 pb-3 font-medium", "Strength" } th { class: "pb-3 font-medium", "Network" } th { class: "pb-3 font-medium", "Channel" } th { class: "pb-3 px-2 font-medium", "Security" } } } match scan.value() { None if scan.pending() => rsx! { "Scanning..." }, None => rsx! { "Press Scan to start scanning" }, Some(Err(_err)) => rsx! { "Failed to scan" }, Some(Ok(peripherals)) => rsx! { tbody { for peripheral in peripherals.read().iter().rev() { tr { class: "text-xs bg-gray-50", td { class: "py-5 px-6 font-medium", "{peripheral.rssi.unwrap_or(-100)}" } td { class: "flex py-3 font-medium", "{peripheral.local_name.clone().unwrap_or_default()}" } td { span { class: "inline-block py-1 px-2 text-white bg-green-500 rounded-full", "{peripheral.address}" } } td { span { class: "inline-block py-1 px-2 text-purple-500 bg-purple-50 rounded-full", "{peripheral.tx_power_level.unwrap_or_default()}" } } } } } } } } } } } } } } ================================================ FILE: examples/01-app-demos/bluetooth-scanner/tailwind.css ================================================ @import "tailwindcss"; @source "./src/**/*.{rs,html,css}"; ================================================ FILE: examples/01-app-demos/calculator.rs ================================================ //! Calculator //! //! This example is a simple iOS-style calculator. Instead of wrapping the state in a single struct like the //! `calculate_mutable` example, this example uses several closures to manage actions with the state. Most //! components will start like this since it's the quickest way to start adding state to your app. The `Signal` type //! in Dioxus is `Copy` - meaning you don't need to clone it to use it in a closure. //! //! Notice how our logic is consolidated into just a few callbacks instead of a single struct. This is a rather organic //! way to start building state management in Dioxus, and it's a great way to start. use dioxus::events::*; use dioxus::html::input_data::keyboard_types::Key; use dioxus::prelude::*; const TITLE: &str = "Calculator"; const STYLE: Asset = asset!("/examples/assets/calculator.css"); fn main() { dioxus::LaunchBuilder::new() .with_cfg(desktop!({ use dioxus::desktop::{Config, LogicalSize, WindowBuilder}; Config::new().with_window( WindowBuilder::default() .with_title(TITLE) .with_inner_size(LogicalSize::new(300.0, 525.0)), ) })) .with_cfg(native!({ use dioxus::native::{Config, LogicalSize, WindowAttributes}; Config::new().with_window_attributes( WindowAttributes::default() .with_title(TITLE) .with_inner_size(LogicalSize::new(300.0, 525.0)), ) })) .launch(app); } fn app() -> Element { let mut val = use_signal(|| String::from("0")); let mut input_digit = move |num: String| { if val() == "0" { val.set(String::new()); } val.push_str(num.as_str()); }; let mut input_operator = move |key: &str| val.push_str(key); let handle_key_down_event = move |evt: KeyboardEvent| match evt.key() { Key::Backspace => { if !val().is_empty() { val.pop(); } } Key::Character(character) => match character.as_str() { "+" | "-" | "/" | "*" => input_operator(&character), "0" | "1" | "2" | "3" | "4" | "5" | "6" | "7" | "8" | "9" => input_digit(character), _ => {} }, _ => {} }; rsx! { Stylesheet { href: STYLE } div { id: "wrapper", div { class: "app", div { class: "calculator", tabindex: "0", onkeydown: handle_key_down_event, div { class: "calculator-display", if val().is_empty() { "0" } else { "{val}" } } div { class: "calculator-keypad", div { class: "input-keys", div { class: "function-keys", button { class: "calculator-key key-clear", onclick: move |_| { val.set(String::new()); if !val.cloned().is_empty() { val.set("0".into()); } }, if val.cloned().is_empty() { "C" } else { "AC" } } button { class: "calculator-key key-sign", onclick: move |_| { let new_val = calc_val(val.cloned().as_str()); if new_val > 0.0 { val.set(format!("-{new_val}")); } else { val.set(format!("{}", new_val.abs())); } }, "±" } button { class: "calculator-key key-percent", onclick: move |_| val.set(format!("{}", calc_val(val.cloned().as_str()) / 100.0)), "%" } } div { class: "digit-keys", button { class: "calculator-key key-0", onclick: move |_| input_digit(0.to_string()), "0" } button { class: "calculator-key key-dot", onclick: move |_| val.push('.'), "●" } for k in 1..10 { button { class: "calculator-key {k}", name: "key-{k}", onclick: move |_| input_digit(k.to_string()), "{k}" } } } } div { class: "operator-keys", for (key, class) in [("/", "key-divide"), ("*", "key-multiply"), ("-", "key-subtract"), ("+", "key-add")] { button { class: "calculator-key {class}", onclick: move |_| input_operator(key), "{key}" } } button { class: "calculator-key key-equals", onclick: move |_| val.set(format!("{}", calc_val(val.cloned().as_str()))), "=" } } } } } } } } fn calc_val(val: &str) -> f64 { if val.is_empty() { return 0.0; } let mut temp = String::new(); let mut operation = "+".to_string(); let mut start_index = 0; let mut temp_value; let mut fin_index = 0; if val.len() > 1 && &val[0..1] == "-" { temp_value = String::from("-"); fin_index = 1; start_index += 1; } else { temp_value = String::from(""); } for c in val[fin_index..].chars() { if c == '+' || c == '-' || c == '*' || c == '/' { break; } temp_value.push(c); start_index += 1; } let mut result = temp_value.parse::().unwrap(); if start_index + 1 >= val.len() { return result; } for c in val[start_index..].chars() { if c == '+' || c == '-' || c == '*' || c == '/' { if !temp.is_empty() { match &operation as &str { "+" => result += temp.parse::().unwrap(), "-" => result -= temp.parse::().unwrap(), "*" => result *= temp.parse::().unwrap(), "/" => result /= temp.parse::().unwrap(), _ => unreachable!(), }; } operation = c.to_string(); temp = String::new(); } else { temp.push(c); } } if !temp.is_empty() { match &operation as &str { "+" => result += temp.parse::().unwrap(), "-" => result -= temp.parse::().unwrap(), "*" => result *= temp.parse::().unwrap(), "/" => result /= temp.parse::().unwrap(), _ => unreachable!(), }; } result } ================================================ FILE: examples/01-app-demos/calculator_mutable.rs ================================================ //! This example showcases a simple calculator using an approach to state management where the state is composed of only //! a single signal. Since Dioxus implements traditional React diffing, state can be consolidated into a typical Rust struct //! with methods that take `&mut self`. For many use cases, this is a simple way to manage complex state without wrapping //! everything in a signal. //! //! Generally, you'll want to split your state into several signals if you have a large application, but for small //! applications, or focused components, this is a great way to manage state. use dioxus::html::MouseEvent; use dioxus::html::input_data::keyboard_types::Key; use dioxus::prelude::*; fn main() { dioxus::LaunchBuilder::new() .with_cfg(desktop! {{ use dioxus::desktop::{Config, LogicalSize, WindowBuilder}; Config::new().with_window( WindowBuilder::new() .with_title("Calculator Demo") .with_resizable(false) .with_inner_size(LogicalSize::new(320.0, 530.0)), ) }}) .with_cfg(native! {{ use dioxus::native::{Config, LogicalSize, WindowAttributes}; Config::new().with_window_attributes( WindowAttributes::default() .with_title("Calculator Demo") .with_inner_size(LogicalSize::new(300.0, 525.0)), ) }}) .launch(app); } fn app() -> Element { let mut state = use_signal(Calculator::new); rsx! { Stylesheet { href: asset!("/examples/assets/calculator.css") } div { id: "wrapper", div { class: "app", div { class: "calculator", onkeypress: move |evt| state.write().handle_keydown(evt), div { class: "calculator-display", {state.read().formatted_display()} } div { class: "calculator-keypad", div { class: "input-keys", div { class: "function-keys", CalculatorKey { name: "key-clear", onclick: move |_| state.write().clear_display(), if state.read().display_value == "0" { "C" } else { "AC" } } CalculatorKey { name: "key-sign", onclick: move |_| state.write().toggle_sign(), "±" } CalculatorKey { name: "key-percent", onclick: move |_| state.write().toggle_percent(), "%" } } div { class: "digit-keys", CalculatorKey { name: "key-0", onclick: move |_| state.write().input_digit(0), "0" } CalculatorKey { name: "key-dot", onclick: move |_| state.write().input_dot(), "●" } for k in 1..10 { CalculatorKey { key: "{k}", name: "key-{k}", onclick: move |_| state.write().input_digit(k), "{k}" } } } } div { class: "operator-keys", CalculatorKey { name: "key-divide", onclick: move |_| state.write().set_operator(Operator::Div), "÷" } CalculatorKey { name: "key-multiply", onclick: move |_| state.write().set_operator(Operator::Mul), "×" } CalculatorKey { name: "key-subtract", onclick: move |_| state.write().set_operator(Operator::Sub), "−" } CalculatorKey { name: "key-add", onclick: move |_| state.write().set_operator(Operator::Add), "+" } CalculatorKey { name: "key-equals", onclick: move |_| state.write().perform_operation(), "=" } } } } } } } } #[component] fn CalculatorKey(name: String, onclick: EventHandler, children: Element) -> Element { rsx! { button { class: "calculator-key {name}", onclick, {children} } } } struct Calculator { display_value: String, operator: Option, waiting_for_operand: bool, cur_val: f64, } #[derive(Clone)] enum Operator { Add, Sub, Mul, Div, } impl Calculator { fn new() -> Self { Calculator { display_value: "0".to_string(), operator: None, waiting_for_operand: false, cur_val: 0.0, } } fn formatted_display(&self) -> String { use separator::Separatable; self.display_value .parse::() .unwrap() .separated_string() } fn clear_display(&mut self) { self.display_value = "0".to_string(); } fn input_digit(&mut self, digit: u8) { let content = digit.to_string(); if self.waiting_for_operand || self.display_value == "0" { self.waiting_for_operand = false; self.display_value = content; } else { self.display_value.push_str(content.as_str()); } } fn input_dot(&mut self) { if !self.display_value.contains('.') { self.display_value.push('.'); } } fn perform_operation(&mut self) { if let Some(op) = &self.operator { let rhs = self.display_value.parse::().unwrap(); let new_val = match op { Operator::Add => self.cur_val + rhs, Operator::Sub => self.cur_val - rhs, Operator::Mul => self.cur_val * rhs, Operator::Div => self.cur_val / rhs, }; self.cur_val = new_val; self.display_value = new_val.to_string(); self.operator = None; } } fn toggle_sign(&mut self) { if self.display_value.starts_with('-') { self.display_value = self.display_value.trim_start_matches('-').to_string(); } else { self.display_value = format!("-{}", self.display_value); } } fn toggle_percent(&mut self) { self.display_value = (self.display_value.parse::().unwrap() / 100.0).to_string(); } fn backspace(&mut self) { if !self.display_value.as_str().eq("0") { self.display_value.pop(); } } fn set_operator(&mut self, operator: Operator) { self.operator = Some(operator); self.cur_val = self.display_value.parse::().unwrap(); self.waiting_for_operand = true; } fn handle_keydown(&mut self, evt: KeyboardEvent) { match evt.key() { Key::Backspace => self.backspace(), Key::Character(c) => match c.as_str() { "0" => self.input_digit(0), "1" => self.input_digit(1), "2" => self.input_digit(2), "3" => self.input_digit(3), "4" => self.input_digit(4), "5" => self.input_digit(5), "6" => self.input_digit(6), "7" => self.input_digit(7), "8" => self.input_digit(8), "9" => self.input_digit(9), "+" => self.operator = Some(Operator::Add), "-" => self.operator = Some(Operator::Sub), "/" => self.operator = Some(Operator::Div), "*" => self.operator = Some(Operator::Mul), _ => {} }, _ => {} } } } ================================================ FILE: examples/01-app-demos/counters.rs ================================================ //! A simple counters example that stores a list of items in a vec and then iterates over them. use dioxus::prelude::*; const STYLE: Asset = asset!("/examples/assets/counter.css"); fn main() { dioxus::launch(app); } fn app() -> Element { // Store the counters in a signal let mut counters = use_signal(|| vec![0, 0, 0]); // Whenever the counters change, sum them up let sum = use_memo(move || counters.read().iter().copied().sum::()); rsx! { Stylesheet { href: STYLE } div { id: "controls", button { onclick: move |_| counters.push(0), "Add counter" } button { onclick: move |_| { counters.pop(); }, "Remove counter" } } h3 { "Total: {sum}" } // Calling `iter` on a Signal> gives you a GenerationalRef to each entry in the vec // We enumerate to get the idx of each counter, which we use later to modify the vec for (i, counter) in counters.iter().enumerate() { // We need a key to uniquely identify each counter. You really shouldn't be using the index, so we're using // the counter value itself. // // If we used the index, and a counter is removed, dioxus would need to re-write the contents of all following // counters instead of simply removing the one that was removed // // You should use a stable identifier for the key, like a unique id or the value of the counter itself li { key: "{i}", button { onclick: move |_| counters.write()[i] -= 1, "-1" } input { r#type: "number", value: "{counter}", oninput: move |e| { if let Ok(value) = e.parsed() { counters.write()[i] = value; } } } button { onclick: move |_| counters.write()[i] += 1, "+1" } button { onclick: move |_| { counters.remove(i); }, "x" } } } } } ================================================ FILE: examples/01-app-demos/crm.rs ================================================ //! Tiny CRM - A simple CRM app using the Router component and global signals //! //! This shows how to use the `Router` component to manage different views in your app. It also shows how to use global //! signals to manage state across the entire app. //! //! We could simply pass the state as a prop to each component, but this is a good example of how to use global state //! in a way that works across pages. //! //! We implement a number of important details here too, like focusing inputs, handling form submits, navigating the router, //! platform-specific configuration, and importing 3rd party CSS libraries. use dioxus::prelude::*; fn main() { dioxus::LaunchBuilder::new() .with_cfg(desktop!({ use dioxus::desktop::{LogicalSize, WindowBuilder}; dioxus::desktop::Config::default() .with_window(WindowBuilder::new().with_inner_size(LogicalSize::new(800, 600))) })) .launch(|| { rsx! { Stylesheet { href: "https://unpkg.com/purecss@2.0.6/build/pure-min.css", integrity: "sha384-Uu6IeWbM+gzNVXJcM9XV3SohHtmWE+3VGi496jvgX1jyvDTXfdK+rfZc8C1Aehk5", crossorigin: "anonymous", } Stylesheet { href: asset!("/examples/assets/crm.css") } h1 { "Dioxus CRM Example" } Router:: {} } }); } /// We only have one list of clients for the whole app, so we can use a global signal. static CLIENTS: GlobalSignal> = Signal::global(Vec::new); struct Client { first_name: String, last_name: String, description: String, } /// The pages of the app, each with a route #[derive(Routable, Clone)] enum Route { #[route("/")] List, #[route("/new")] New, #[route("/settings")] Settings, } #[component] fn List() -> Element { rsx! { h2 { "List of Clients" } Link { to: Route::New, class: "pure-button pure-button-primary", "Add Client" } Link { to: Route::Settings, class: "pure-button", "Settings" } for client in CLIENTS.read().iter() { div { class: "client", style: "margin-bottom: 50px", p { "Name: {client.first_name} {client.last_name}" } p { "Description: {client.description}" } } } } } #[component] fn New() -> Element { let mut first_name = use_signal(String::new); let mut last_name = use_signal(String::new); let mut description = use_signal(String::new); let submit_client = move |_| { // Write the client CLIENTS.write().push(Client { first_name: first_name(), last_name: last_name(), description: description(), }); // And then navigate back to the client list router().push(Route::List); }; rsx! { h2 { "Add new Client" } form { class: "pure-form pure-form-aligned", onsubmit: submit_client, fieldset { div { class: "pure-control-group", label { r#for: "first_name", "First Name" } input { id: "first_name", r#type: "text", placeholder: "First Name…", required: true, value: "{first_name}", oninput: move |e| first_name.set(e.value()), // when the form mounts, focus the first name input onmounted: move |e| async move { _ = e.set_focus(true).await; }, } } div { class: "pure-control-group", label { r#for: "last_name", "Last Name" } input { id: "last_name", r#type: "text", placeholder: "Last Name…", required: true, value: "{last_name}", oninput: move |e| last_name.set(e.value()), } } div { class: "pure-control-group", label { r#for: "description", "Description" } textarea { id: "description", placeholder: "Description…", value: "{description}", oninput: move |e| description.set(e.value()), } } div { class: "pure-controls", button { r#type: "submit", class: "pure-button pure-button-primary", "Save" } Link { to: Route::List, class: "pure-button pure-button-primary red", "Cancel" } } } } } } #[component] fn Settings() -> Element { rsx! { h2 { "Settings" } button { class: "pure-button pure-button-primary red", onclick: move |_| { CLIENTS.write().clear(); dioxus::router::router().push(Route::List); }, "Remove all Clients" } Link { to: Route::List, class: "pure-button", "Go back" } } } ================================================ FILE: examples/01-app-demos/dog_app.rs ================================================ //! This example demonstrates a simple app that fetches a list of dog breeds and displays a random dog. //! //! This app combines `use_loader` and `use_action` to fetch data from the Dog API. //! - `use_loader` automatically fetches the list of dog breeds when the component mounts. //! - `use_action` fetches a random dog image whenever the `.dispatch` method is called. use dioxus::prelude::*; use serde::{Deserialize, Serialize}; use std::collections::HashMap; fn main() { dioxus::launch(app); } fn app() -> Element { // Fetch the list of breeds from the Dog API, using the `?` syntax to suspend or throw errors let breed_list = use_loader(move || async move { #[derive(Deserialize, Serialize, Debug, PartialEq, Clone)] struct ListBreeds { message: HashMap>, } reqwest::get("https://dog.ceo/api/breeds/list/all") .await? .json::() .await })?; // Whenever this action is called, it will re-run the future and return the result. let mut breed = use_action(move |breed| async move { #[derive(Deserialize, Serialize, Debug, PartialEq)] struct DogApi { message: String, } reqwest::get(format!("https://dog.ceo/api/breed/{breed}/images/random")) .await .unwrap() .json::() .await }); rsx! { h1 { "Doggo selector" } div { width: "400px", for cur_breed in breed_list.read().message.keys().take(20).cloned() { button { onclick: move |_| { breed.call(cur_breed.clone()); }, "{cur_breed}" } } } div { match breed.value() { None => rsx! { div { "Click the button to fetch a dog!" } }, Some(Err(_e)) => rsx! { div { "Failed to fetch a dog, please try again." } }, Some(Ok(res)) => rsx! { img { max_width: "500px", max_height: "500px", src: "{res.read().message}" } }, } } } } ================================================ FILE: examples/01-app-demos/ecommerce-site/.gitignore ================================================ /target ================================================ FILE: examples/01-app-demos/ecommerce-site/Cargo.toml ================================================ [package] name = "ecommerce-site" version = "0.1.1" edition = "2021" publish = false # See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html [dependencies] dioxus = { workspace = true, features = ["fullstack", "router"] } reqwest = { workspace = true, features = ["json"] } serde = { workspace = true } [target.'cfg(target_family = "wasm")'.dependencies] chrono = { workspace = true, features = ["serde", "wasmbind"] } [target.'cfg(not(target_family = "wasm"))'.dependencies] chrono = { workspace = true, features = ["serde"] } [features] web = ["dioxus/web"] server = ["dioxus/server"] ================================================ FILE: examples/01-app-demos/ecommerce-site/README.md ================================================ # Dioxus Example: An e-commerce site using the FakeStoreAPI This example app is a fullstack web application leveraging the FakeStoreAPI and [Tailwind CSS](https://tailwindcss.com/). ![Demo Image](demo.png) # Development 1. Run the following commands to serve the application: ```bash dx serve ``` Note that in Dioxus 0.7, the Tailwind watcher is initialized automatically if a `tailwind.css` file is find in your app's root. # Status This is a work in progress. The following features are currently implemented: - [x] A homepage with a list of products dynamically fetched from the FakeStoreAPI (rendered using SSR) - [x] A product detail page with details about a product (rendered using LiveView) - [ ] A cart page - [ ] A checkout page - [ ] A login page ================================================ FILE: examples/01-app-demos/ecommerce-site/public/loading.css ================================================ @keyframes spin { 0% { transform: rotate(0deg); } 100% { transform: rotate(360deg); } } .spinner { width: 10px; height: 10px; border: 4px solid #f3f3f3; border-top: 4px solid #3498db; border-radius: 50%; animation: spin 2s linear infinite; } ================================================ FILE: examples/01-app-demos/ecommerce-site/public/tailwind.css ================================================ /*! tailwindcss v4.1.0 | MIT License | https://tailwindcss.com */ @supports ((-webkit-hyphens: none) and (not (margin-trim: inline))) or ((-moz-orient: inline) and (not (color:rgb(from red r g b)))) { @layer base { *, ::before, ::after, ::backdrop { --tw-translate-x: 0; --tw-translate-y: 0; --tw-translate-z: 0; --tw-rotate-x: rotateX(0); --tw-rotate-y: rotateY(0); --tw-rotate-z: rotateZ(0); --tw-skew-x: skewX(0); --tw-skew-y: skewY(0); --tw-border-style: solid; --tw-font-weight: initial; --tw-shadow: 0 0 #0000; --tw-shadow-color: initial; --tw-shadow-alpha: 100%; --tw-inset-shadow: 0 0 #0000; --tw-inset-shadow-color: initial; --tw-inset-shadow-alpha: 100%; --tw-ring-color: initial; --tw-ring-shadow: 0 0 #0000; --tw-inset-ring-color: initial; --tw-inset-ring-shadow: 0 0 #0000; --tw-ring-inset: initial; --tw-ring-offset-width: 0px; --tw-ring-offset-color: #fff; --tw-ring-offset-shadow: 0 0 #0000; --tw-duration: initial; } } } @layer theme, base, components, utilities; @layer theme { :root, :host { --font-sans: ui-sans-serif, system-ui, sans-serif, "Apple Color Emoji", "Segoe UI Emoji", "Segoe UI Symbol", "Noto Color Emoji"; --font-mono: ui-monospace, SFMono-Regular, Menlo, Monaco, Consolas, "Liberation Mono", "Courier New", monospace; --color-orange-300: oklch(83.7% 0.128 66.29); --color-orange-400: oklch(75% 0.183 55.934); --color-blue-300: oklch(80.9% 0.105 251.813); --color-gray-50: oklch(98.5% 0.002 247.839); --color-gray-100: oklch(96.7% 0.003 264.542); --color-gray-200: oklch(92.8% 0.006 264.531); --color-gray-400: oklch(70.7% 0.022 261.325); --color-gray-500: oklch(55.1% 0.027 264.364); --color-gray-600: oklch(44.6% 0.03 256.802); --color-gray-700: oklch(37.3% 0.034 259.733); --color-gray-800: oklch(27.8% 0.033 256.848); --color-white: #fff; --spacing: 0.25rem; --container-sm: 24rem; --container-md: 28rem; --container-xl: 36rem; --container-2xl: 42rem; --text-xs: 0.75rem; --text-xs--line-height: calc(1 / 0.75); --text-2xl: 1.5rem; --text-2xl--line-height: calc(2 / 1.5); --text-3xl: 1.875rem; --text-3xl--line-height: calc(2.25 / 1.875); --text-5xl: 3rem; --text-5xl--line-height: 1; --text-6xl: 3.75rem; --text-6xl--line-height: 1; --font-weight-semibold: 600; --font-weight-bold: 700; --radius-md: 0.375rem; --radius-lg: 0.5rem; --default-transition-duration: 150ms; --default-transition-timing-function: cubic-bezier(0.4, 0, 0.2, 1); --default-font-family: var(--font-sans); --default-mono-font-family: var(--font-mono); } } @layer base { *, ::after, ::before, ::backdrop, ::file-selector-button { box-sizing: border-box; margin: 0; padding: 0; border: 0 solid; } html, :host { line-height: 1.5; -webkit-text-size-adjust: 100%; tab-size: 4; font-family: var(--default-font-family, ui-sans-serif, system-ui, sans-serif, "Apple Color Emoji", "Segoe UI Emoji", "Segoe UI Symbol", "Noto Color Emoji"); font-feature-settings: var(--default-font-feature-settings, normal); font-variation-settings: var(--default-font-variation-settings, normal); -webkit-tap-highlight-color: transparent; } hr { height: 0; color: inherit; border-top-width: 1px; } abbr:where([title]) { -webkit-text-decoration: underline dotted; text-decoration: underline dotted; } h1, h2, h3, h4, h5, h6 { font-size: inherit; font-weight: inherit; } a { color: inherit; -webkit-text-decoration: inherit; text-decoration: inherit; } b, strong { font-weight: bolder; } code, kbd, samp, pre { font-family: var(--default-mono-font-family, ui-monospace, SFMono-Regular, Menlo, Monaco, Consolas, "Liberation Mono", "Courier New", monospace); font-feature-settings: var(--default-mono-font-feature-settings, normal); font-variation-settings: var(--default-mono-font-variation-settings, normal); font-size: 1em; } small { font-size: 80%; } sub, sup { font-size: 75%; line-height: 0; position: relative; vertical-align: baseline; } sub { bottom: -0.25em; } sup { top: -0.5em; } table { text-indent: 0; border-color: inherit; border-collapse: collapse; } :-moz-focusring { outline: auto; } progress { vertical-align: baseline; } summary { display: list-item; } ol, ul, menu { list-style: none; } img, svg, video, canvas, audio, iframe, embed, object { display: block; vertical-align: middle; } img, video { max-width: 100%; height: auto; } button, input, select, optgroup, textarea, ::file-selector-button { font: inherit; font-feature-settings: inherit; font-variation-settings: inherit; letter-spacing: inherit; color: inherit; border-radius: 0; background-color: transparent; opacity: 1; } :where(select:is([multiple], [size])) optgroup { font-weight: bolder; } :where(select:is([multiple], [size])) optgroup option { padding-inline-start: 20px; } ::file-selector-button { margin-inline-end: 4px; } ::placeholder { opacity: 1; } @supports (not (-webkit-appearance: -apple-pay-button)) or (contain-intrinsic-size: 1px) { ::placeholder { color: color-mix(in oklab, currentColor 50%, transparent); } } textarea { resize: vertical; } ::-webkit-search-decoration { -webkit-appearance: none; } ::-webkit-date-and-time-value { min-height: 1lh; text-align: inherit; } ::-webkit-datetime-edit { display: inline-flex; } ::-webkit-datetime-edit-fields-wrapper { padding: 0; } ::-webkit-datetime-edit, ::-webkit-datetime-edit-year-field, ::-webkit-datetime-edit-month-field, ::-webkit-datetime-edit-day-field, ::-webkit-datetime-edit-hour-field, ::-webkit-datetime-edit-minute-field, ::-webkit-datetime-edit-second-field, ::-webkit-datetime-edit-millisecond-field, ::-webkit-datetime-edit-meridiem-field { padding-block: 0; } :-moz-ui-invalid { box-shadow: none; } button, input:where([type="button"], [type="reset"], [type="submit"]), ::file-selector-button { appearance: button; } ::-webkit-inner-spin-button, ::-webkit-outer-spin-button { height: auto; } [hidden]:where(:not([hidden="until-found"])) { display: none !important; } } @layer utilities { .absolute { position: absolute; } .fixed { position: fixed; } .relative { position: relative; } .inset-0 { inset: calc(var(--spacing) * 0); } .top-0 { top: calc(var(--spacing) * 0); } .top-1\/2 { top: calc(1/2 * 100%); } .right-0 { right: calc(var(--spacing) * 0); } .bottom-0 { bottom: calc(var(--spacing) * 0); } .left-0 { left: calc(var(--spacing) * 0); } .z-50 { z-index: 50; } .container { width: 100%; @media (width >= 40rem) { max-width: 40rem; } @media (width >= 48rem) { max-width: 48rem; } @media (width >= 64rem) { max-width: 64rem; } @media (width >= 80rem) { max-width: 80rem; } @media (width >= 96rem) { max-width: 96rem; } } .m-0 { margin: calc(var(--spacing) * 0); } .m-2 { margin: calc(var(--spacing) * 2); } .-mx-4 { margin-inline: calc(var(--spacing) * -4); } .mx-auto { margin-inline: auto; } .mt-2 { margin-top: calc(var(--spacing) * 2); } .mr-1 { margin-right: calc(var(--spacing) * 1); } .mr-2 { margin-right: calc(var(--spacing) * 2); } .mr-3 { margin-right: calc(var(--spacing) * 3); } .mr-6 { margin-right: calc(var(--spacing) * 6); } .mr-8 { margin-right: calc(var(--spacing) * 8); } .mr-10 { margin-right: calc(var(--spacing) * 10); } .mr-12 { margin-right: calc(var(--spacing) * 12); } .mr-14 { margin-right: calc(var(--spacing) * 14); } .mr-16 { margin-right: calc(var(--spacing) * 16); } .mr-auto { margin-right: auto; } .mb-4 { margin-bottom: calc(var(--spacing) * 4); } .mb-6 { margin-bottom: calc(var(--spacing) * 6); } .mb-8 { margin-bottom: calc(var(--spacing) * 8); } .mb-10 { margin-bottom: calc(var(--spacing) * 10); } .mb-12 { margin-bottom: calc(var(--spacing) * 12); } .mb-14 { margin-bottom: calc(var(--spacing) * 14); } .mb-16 { margin-bottom: calc(var(--spacing) * 16); } .mb-24 { margin-bottom: calc(var(--spacing) * 24); } .ml-8 { margin-left: calc(var(--spacing) * 8); } .block { display: block; } .flex { display: flex; } .hidden { display: none; } .inline-block { display: inline-block; } .inline-flex { display: inline-flex; } .h-2 { height: calc(var(--spacing) * 2); } .h-6 { height: calc(var(--spacing) * 6); } .h-8 { height: calc(var(--spacing) * 8); } .h-9 { height: calc(var(--spacing) * 9); } .h-40 { height: calc(var(--spacing) * 40); } .h-full { height: 100%; } .w-1\/2 { width: calc(1/2 * 100%); } .w-1\/4 { width: calc(1/4 * 100%); } .w-1\/6 { width: calc(1/6 * 100%); } .w-2 { width: calc(var(--spacing) * 2); } .w-5\/6 { width: calc(5/6 * 100%); } .w-6 { width: calc(var(--spacing) * 6); } .w-8 { width: calc(var(--spacing) * 8); } .w-12 { width: calc(var(--spacing) * 12); } .w-full { width: 100%; } .max-w-2xl { max-width: var(--container-2xl); } .max-w-md { max-width: var(--container-md); } .max-w-sm { max-width: var(--container-sm); } .max-w-xl { max-width: var(--container-xl); } .shrink-0 { flex-shrink: 0; } .translate-1\/2 { --tw-translate-x: calc(1/2 * 100%); --tw-translate-y: calc(1/2 * 100%); translate: var(--tw-translate-x) var(--tw-translate-y); } .transform { transform: var(--tw-rotate-x) var(--tw-rotate-y) var(--tw-rotate-z) var(--tw-skew-x) var(--tw-skew-y); } .cursor-pointer { cursor: pointer; } .flex-col { flex-direction: column; } .flex-row { flex-direction: row; } .flex-wrap { flex-wrap: wrap; } .place-items-center { place-items: center; } .items-center { align-items: center; } .justify-between { justify-content: space-between; } .self-center { align-self: center; } .overflow-y-auto { overflow-y: auto; } .rounded { border-radius: 0.25rem; } .rounded-full { border-radius: calc(infinity * 1px); } .rounded-lg { border-radius: var(--radius-lg); } .rounded-md { border-radius: var(--radius-md); } .border { border-style: var(--tw-border-style); border-width: 1px; } .border-0 { border-style: var(--tw-border-style); border-width: 0px; } .border-r { border-right-style: var(--tw-border-style); border-right-width: 1px; } .border-b { border-bottom-style: var(--tw-border-style); border-bottom-width: 1px; } .border-b-2 { border-bottom-style: var(--tw-border-style); border-bottom-width: 2px; } .border-l { border-left-style: var(--tw-border-style); border-left-width: 1px; } .border-gray-200 { border-color: var(--color-gray-200); } .border-transparent { border-color: transparent; } .bg-gray-50 { background-color: var(--color-gray-50); } .bg-gray-100 { background-color: var(--color-gray-100); } .bg-gray-800 { background-color: var(--color-gray-800); } .bg-orange-300 { background-color: var(--color-orange-300); } .bg-white { background-color: var(--color-white); } .object-cover { object-fit: cover; } .object-scale-down { object-fit: scale-down; } .p-2 { padding: calc(var(--spacing) * 2); } .p-10 { padding: calc(var(--spacing) * 10); } .px-2 { padding-inline: calc(var(--spacing) * 2); } .px-4 { padding-inline: calc(var(--spacing) * 4); } .px-6 { padding-inline: calc(var(--spacing) * 6); } .px-8 { padding-inline: calc(var(--spacing) * 8); } .px-10 { padding-inline: calc(var(--spacing) * 10); } .px-12 { padding-inline: calc(var(--spacing) * 12); } .py-2 { padding-block: calc(var(--spacing) * 2); } .py-4 { padding-block: calc(var(--spacing) * 4); } .py-5 { padding-block: calc(var(--spacing) * 5); } .py-6 { padding-block: calc(var(--spacing) * 6); } .py-8 { padding-block: calc(var(--spacing) * 8); } .py-20 { padding-block: calc(var(--spacing) * 20); } .pr-10 { padding-right: calc(var(--spacing) * 10); } .pb-10 { padding-bottom: calc(var(--spacing) * 10); } .pl-4 { padding-left: calc(var(--spacing) * 4); } .pl-6 { padding-left: calc(var(--spacing) * 6); } .text-center { text-align: center; } .text-left { text-align: left; } .text-2xl { font-size: var(--text-2xl); line-height: var(--tw-leading, var(--text-2xl--line-height)); } .text-3xl { font-size: var(--text-3xl); line-height: var(--tw-leading, var(--text-3xl--line-height)); } .text-5xl { font-size: var(--text-5xl); line-height: var(--tw-leading, var(--text-5xl--line-height)); } .text-xs { font-size: var(--text-xs); line-height: var(--tw-leading, var(--text-xs--line-height)); } .font-bold { --tw-font-weight: var(--font-weight-bold); font-weight: var(--font-weight-bold); } .font-semibold { --tw-font-weight: var(--font-weight-semibold); font-weight: var(--font-weight-semibold); } .text-ellipsis { text-overflow: ellipsis; } .text-blue-300 { color: var(--color-blue-300); } .text-gray-400 { color: var(--color-gray-400); } .text-gray-500 { color: var(--color-gray-500); } .text-gray-600 { color: var(--color-gray-600); } .text-white { color: var(--color-white); } .uppercase { text-transform: uppercase; } .placeholder-gray-400 { &::placeholder { color: var(--color-gray-400); } } .opacity-25 { opacity: 25%; } .shadow-2xl { --tw-shadow: 0 25px 50px -12px var(--tw-shadow-color, rgb(0 0 0 / 0.25)); box-shadow: var(--tw-inset-shadow), var(--tw-inset-ring-shadow), var(--tw-ring-offset-shadow), var(--tw-ring-shadow), var(--tw-shadow); } .shadow-lg { --tw-shadow: 0 10px 15px -3px var(--tw-shadow-color, rgb(0 0 0 / 0.1)), 0 4px 6px -4px var(--tw-shadow-color, rgb(0 0 0 / 0.1)); box-shadow: var(--tw-inset-shadow), var(--tw-inset-ring-shadow), var(--tw-ring-offset-shadow), var(--tw-ring-shadow), var(--tw-shadow); } .ring-1 { --tw-ring-shadow: var(--tw-ring-inset,) 0 0 0 calc(1px + var(--tw-ring-offset-width)) var(--tw-ring-color, currentColor); box-shadow: var(--tw-inset-shadow), var(--tw-inset-ring-shadow), var(--tw-ring-offset-shadow), var(--tw-ring-shadow), var(--tw-shadow); } .transition { transition-property: color, background-color, border-color, outline-color, text-decoration-color, fill, stroke, --tw-gradient-from, --tw-gradient-via, --tw-gradient-to, opacity, box-shadow, transform, translate, scale, rotate, filter, -webkit-backdrop-filter, backdrop-filter; transition-timing-function: var(--tw-ease, var(--default-transition-timing-function)); transition-duration: var(--tw-duration, var(--default-transition-duration)); } .transition-all { transition-property: all; transition-timing-function: var(--tw-ease, var(--default-transition-timing-function)); transition-duration: var(--tw-duration, var(--default-transition-duration)); } .duration-200 { --tw-duration: 200ms; transition-duration: 200ms; } .hover\:bg-orange-400 { &:hover { @media (hover: hover) { background-color: var(--color-orange-400); } } } .hover\:text-gray-600 { &:hover { @media (hover: hover) { color: var(--color-gray-600); } } } .hover\:text-gray-700 { &:hover { @media (hover: hover) { color: var(--color-gray-700); } } } .hover\:shadow-2xl { &:hover { @media (hover: hover) { --tw-shadow: 0 25px 50px -12px var(--tw-shadow-color, rgb(0 0 0 / 0.25)); box-shadow: var(--tw-inset-shadow), var(--tw-inset-ring-shadow), var(--tw-ring-offset-shadow), var(--tw-ring-shadow), var(--tw-shadow); } } } .hover\:ring-4 { &:hover { @media (hover: hover) { --tw-ring-shadow: var(--tw-ring-inset,) 0 0 0 calc(4px + var(--tw-ring-offset-width)) var(--tw-ring-color, currentColor); box-shadow: var(--tw-inset-shadow), var(--tw-inset-ring-shadow), var(--tw-ring-offset-shadow), var(--tw-ring-shadow), var(--tw-shadow); } } } .focus\:border-blue-300 { &:focus { border-color: var(--color-blue-300); } } .focus\:ring-blue-300 { &:focus { --tw-ring-color: var(--color-blue-300); } } .focus\:ring-transparent { &:focus { --tw-ring-color: transparent; } } .focus\:outline-hidden { &:focus { --tw-outline-style: none; outline-style: none; @media (forced-colors: active) { outline: 2px solid transparent; outline-offset: 2px; } } } .md\:mb-0 { @media (width >= 48rem) { margin-bottom: calc(var(--spacing) * 0); } } .md\:w-1\/2 { @media (width >= 48rem) { width: calc(1/2 * 100%); } } .md\:w-auto { @media (width >= 48rem) { width: auto; } } .md\:text-right { @media (width >= 48rem) { text-align: right; } } .md\:text-6xl { @media (width >= 48rem) { font-size: var(--text-6xl); line-height: var(--tw-leading, var(--text-6xl--line-height)); } } .lg\:pl-20 { @media (width >= 64rem) { padding-left: calc(var(--spacing) * 20); } } .xl\:mx-auto { @media (width >= 80rem) { margin-inline: auto; } } .xl\:mb-0 { @media (width >= 80rem) { margin-bottom: calc(var(--spacing) * 0); } } .xl\:block { @media (width >= 80rem) { display: block; } } .xl\:flex { @media (width >= 80rem) { display: flex; } } .xl\:hidden { @media (width >= 80rem) { display: none; } } .xl\:inline-block { @media (width >= 80rem) { display: inline-block; } } .xl\:w-2\/3 { @media (width >= 80rem) { width: calc(2/3 * 100%); } } } @property --tw-translate-x { syntax: "*"; inherits: false; initial-value: 0; } @property --tw-translate-y { syntax: "*"; inherits: false; initial-value: 0; } @property --tw-translate-z { syntax: "*"; inherits: false; initial-value: 0; } @property --tw-rotate-x { syntax: "*"; inherits: false; initial-value: rotateX(0); } @property --tw-rotate-y { syntax: "*"; inherits: false; initial-value: rotateY(0); } @property --tw-rotate-z { syntax: "*"; inherits: false; initial-value: rotateZ(0); } @property --tw-skew-x { syntax: "*"; inherits: false; initial-value: skewX(0); } @property --tw-skew-y { syntax: "*"; inherits: false; initial-value: skewY(0); } @property --tw-border-style { syntax: "*"; inherits: false; initial-value: solid; } @property --tw-font-weight { syntax: "*"; inherits: false; } @property --tw-shadow { syntax: "*"; inherits: false; initial-value: 0 0 #0000; } @property --tw-shadow-color { syntax: "*"; inherits: false; } @property --tw-shadow-alpha { syntax: ""; inherits: false; initial-value: 100%; } @property --tw-inset-shadow { syntax: "*"; inherits: false; initial-value: 0 0 #0000; } @property --tw-inset-shadow-color { syntax: "*"; inherits: false; } @property --tw-inset-shadow-alpha { syntax: ""; inherits: false; initial-value: 100%; } @property --tw-ring-color { syntax: "*"; inherits: false; } @property --tw-ring-shadow { syntax: "*"; inherits: false; initial-value: 0 0 #0000; } @property --tw-inset-ring-color { syntax: "*"; inherits: false; } @property --tw-inset-ring-shadow { syntax: "*"; inherits: false; initial-value: 0 0 #0000; } @property --tw-ring-inset { syntax: "*"; inherits: false; } @property --tw-ring-offset-width { syntax: ""; inherits: false; initial-value: 0px; } @property --tw-ring-offset-color { syntax: "*"; inherits: false; initial-value: #fff; } @property --tw-ring-offset-shadow { syntax: "*"; inherits: false; initial-value: 0 0 #0000; } @property --tw-duration { syntax: "*"; inherits: false; } ================================================ FILE: examples/01-app-demos/ecommerce-site/src/api.rs ================================================ use dioxus::prelude::Result; use serde::{Deserialize, Serialize}; use std::fmt::Display; // Cache up to 100 requests, invalidating them after 60 seconds pub(crate) async fn fetch_product(product_id: usize) -> Result { Ok( reqwest::get(format!("https://fakestoreapi.com/products/{product_id}")) .await? .json() .await?, ) } // Cache up to 100 requests, invalidating them after 60 seconds pub(crate) async fn fetch_products(count: usize, sort: Sort) -> Result> { Ok(reqwest::get(format!( "https://fakestoreapi.com/products/?sort={sort}&limit={count}" )) .await? .json() .await?) } #[derive(Serialize, Deserialize, PartialEq, Clone, Debug, Default)] pub(crate) struct Product { pub(crate) id: u32, pub(crate) title: String, pub(crate) price: f32, pub(crate) description: String, pub(crate) category: String, pub(crate) image: String, pub(crate) rating: Rating, } #[derive(Serialize, Deserialize, PartialEq, Clone, Debug, Default)] pub(crate) struct Rating { pub(crate) rate: f32, pub(crate) count: u32, } impl Display for Rating { fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { let rounded = self.rate.round() as usize; for _ in 0..rounded { "★".fmt(f)?; } for _ in 0..(5 - rounded) { "☆".fmt(f)?; } write!(f, " ({:01}) ({} ratings)", self.rate, self.count)?; Ok(()) } } #[allow(unused)] #[derive(Clone, Copy, Hash, PartialEq, Eq, PartialOrd)] pub(crate) enum Sort { Descending, Ascending, } impl Display for Sort { fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { match self { Sort::Descending => write!(f, "desc"), Sort::Ascending => write!(f, "asc"), } } } ================================================ FILE: examples/01-app-demos/ecommerce-site/src/components/error.rs ================================================ use dioxus::prelude::*; #[component] pub fn error_page() -> Element { rsx! { section { class: "py-20", div { class: "container mx-auto px-4", div { class: "flex flex-wrap -mx-4 mb-24 text-center", "An internal error has occurred" } } } } } ================================================ FILE: examples/01-app-demos/ecommerce-site/src/components/home.rs ================================================ // The homepage is statically rendered, so we don't need to a persistent websocket connection. use crate::{ api::{fetch_products, Sort}, components::nav::Nav, components::product_item::ProductItem, }; use dioxus::prelude::*; pub(crate) fn Home() -> Element { let products = use_loader(|| fetch_products(10, Sort::Ascending))?; rsx! { Nav {} section { class: "p-10", for product in products.iter() { ProductItem { product: product.clone() } } } } } ================================================ FILE: examples/01-app-demos/ecommerce-site/src/components/loading.rs ================================================ use dioxus::prelude::*; #[component] pub(crate) fn ChildrenOrLoading(children: Element) -> Element { rsx! { Stylesheet { href: asset!("/public/loading.css") } SuspenseBoundary { fallback: |_| rsx! { div { class: "spinner", } }, {children} } } } ================================================ FILE: examples/01-app-demos/ecommerce-site/src/components/nav.rs ================================================ use dioxus::prelude::*; #[component] pub fn Nav() -> Element { rsx! { section { class: "relative", nav { class: "flex justify-between border-b", div { class: "px-12 py-8 flex w-full items-center", a { class: "hidden xl:block mr-16", href: "/", icons::cart_icon {} } ul { class: "hidden xl:flex font-semibold font-heading", li { class: "mr-12", a { class: "hover:text-gray-600", href: "/", "Category" } } li { class: "mr-12", a { class: "hover:text-gray-600", href: "/", "Collection" } } li { class: "mr-12", a { class: "hover:text-gray-600", href: "/", "Story" } } li { a { class: "hover:text-gray-600", href: "/", "Brand" } } } a { class: "shrink-0 xl:mx-auto text-3xl font-bold font-heading", href: "/", img { class: "h-9", width: "auto", alt: "", src: "https://shuffle.dev/yofte-assets/logos/yofte-logo.svg", } } div { class: "hidden xl:inline-block mr-14", input { class: "py-5 px-8 w-full placeholder-gray-400 text-xs uppercase font-semibold font-heading bg-gray-50 border border-gray-200 focus:ring-blue-300 focus:border-blue-300 rounded-md", placeholder: "Search", r#type: "text", } } div { class: "hidden xl:flex items-center", a { class: "mr-10 hover:text-gray-600", href: "", icons::icon_1 {} } a { class: "flex items-center hover:text-gray-600", href: "/", icons::icon_2 {} span { class: "inline-block w-6 h-6 text-center bg-gray-50 rounded-full font-semibold font-heading", "3" } } } } a { class: "hidden xl:flex items-center px-12 border-l font-semibold font-heading hover:text-gray-600", href: "/", icons::icon_3 {} span { "Sign In" } } a { class: "xl:hidden flex mr-6 items-center text-gray-600", href: "/", icons::icon_4 {} span { class: "inline-block w-6 h-6 text-center bg-gray-50 rounded-full font-semibold font-heading", "3" } } a { class: "navbar-burger self-center mr-12 xl:hidden", href: "/", icons::icon_5 {} } } div { class: "hidden navbar-menu fixed top-0 left-0 bottom-0 w-5/6 max-w-sm z-50", div { class: "navbar-backdrop fixed inset-0 bg-gray-800 opacity-25", } nav { class: "relative flex flex-col py-6 px-6 w-full h-full bg-white border-r overflow-y-auto", div { class: "flex items-center mb-8", a { class: "mr-auto text-3xl font-bold font-heading", href: "/", img { class: "h-9", src: "https://shuffle.dev/yofte-assets/logos/yofte-logo.svg", width: "auto", alt: "", } } button { class: "navbar-close", icons::icon_6 {} } } div { class: "flex mb-8 justify-between", a { class: "inline-flex items-center font-semibold font-heading", href: "/", icons::icon_7 {} span { "Sign In" } } div { class: "flex items-center", a { class: "mr-10", href: "/", icons::icon_8 {} } a { class: "flex items-center", href: "/", icons::icon_9 {} span { class: "inline-block w-6 h-6 text-center bg-gray-100 rounded-full font-semibold font-heading", "3" } } } } input { class: "block mb-10 py-5 px-8 bg-gray-100 rounded-md border-transparent focus:ring-blue-300 focus:border-blue-300 focus:outline-hidden", r#type: "search", placeholder: "Search", } ul { class: "text-3xl font-bold font-heading", li { class: "mb-8", a { href: "/", "Category" } } li { class: "mb-8", a { href: "/", "Collection" } } li { class: "mb-8", a { href: "/", "Story" } } li { a { href: "/", "Brand" } } } } } } } } mod icons { use super::*; pub(super) fn cart_icon() -> Element { rsx! { svg { class: "mr-3", fill: "none", xmlns: "http://www.w3.org/2000/svg", view_box: "0 0 23 23", width: "23", height: "23", path { stroke_linejoin: "round", d: "M18.1159 8.72461H2.50427C1.99709 8.72461 1.58594 9.12704 1.58594 9.62346V21.3085C1.58594 21.8049 1.99709 22.2074 2.50427 22.2074H18.1159C18.6231 22.2074 19.0342 21.8049 19.0342 21.3085V9.62346C19.0342 9.12704 18.6231 8.72461 18.1159 8.72461Z", stroke: "currentColor", stroke_linecap: "round", stroke_width: "1.5", } path { stroke: "currentColor", stroke_linecap: "round", d: "M6.34473 6.34469V4.95676C6.34473 3.85246 6.76252 2.79338 7.5062 2.01252C8.24988 1.23165 9.25852 0.792969 10.3102 0.792969C11.362 0.792969 12.3706 1.23165 13.1143 2.01252C13.858 2.79338 14.2758 3.85246 14.2758 4.95676V6.34469", stroke_width: "1.5", stroke_linejoin: "round", } } } } pub(super) fn icon_1() -> Element { rsx! { svg { xmlns: "http://www.w3.org/2000/svg", height: "20", view_box: "0 0 23 20", width: "23", fill: "none", path { d: "M11.4998 19.2061L2.70115 9.92527C1.92859 9.14433 1.41864 8.1374 1.24355 7.04712C1.06847 5.95684 1.23713 4.8385 1.72563 3.85053V3.85053C2.09464 3.10462 2.63366 2.45803 3.29828 1.96406C3.9629 1.47008 4.73408 1.14284 5.5483 1.00931C6.36252 0.875782 7.19647 0.939779 7.98144 1.19603C8.7664 1.45228 9.47991 1.89345 10.0632 2.48319L11.4998 3.93577L12.9364 2.48319C13.5197 1.89345 14.2332 1.45228 15.0182 1.19603C15.8031 0.939779 16.6371 0.875782 17.4513 1.00931C18.2655 1.14284 19.0367 1.47008 19.7013 1.96406C20.3659 2.45803 20.905 3.10462 21.274 3.85053V3.85053C21.7625 4.8385 21.9311 5.95684 21.756 7.04712C21.581 8.1374 21.071 9.14433 20.2984 9.92527L11.4998 19.2061Z", stroke: "currentColor", stroke_width: "1.5", stroke_linejoin: "round", stroke_linecap: "round", } } } } pub(super) fn icon_2() -> Element { rsx! { svg { class: "mr-3", fill: "none", height: "31", xmlns: "http://www.w3.org/2000/svg", width: "32", view_box: "0 0 32 31", path { stroke_linejoin: "round", stroke_width: "1.5", d: "M16.0006 16.3154C19.1303 16.3154 21.6673 13.799 21.6673 10.6948C21.6673 7.59064 19.1303 5.07422 16.0006 5.07422C12.871 5.07422 10.334 7.59064 10.334 10.6948C10.334 13.799 12.871 16.3154 16.0006 16.3154Z", stroke_linecap: "round", stroke: "currentColor", } path { stroke_width: "1.5", d: "M24.4225 23.8963C23.6678 22.3507 22.4756 21.0445 20.9845 20.1298C19.4934 19.2151 17.7647 18.7295 15.9998 18.7295C14.2349 18.7295 12.5063 19.2151 11.0152 20.1298C9.52406 21.0445 8.33179 22.3507 7.57715 23.8963", stroke: "currentColor", stroke_linecap: "round", stroke_linejoin: "round", } } } } pub(super) fn icon_3() -> Element { rsx! { svg { class: "h-2 w-2 text-gray-500 cursor-pointer", height: "10", width: "10", xmlns: "http://www.w3.org/2000/svg", fill: "none", view_box: "0 0 10 10", path { stroke_width: "1.5", stroke_linejoin: "round", d: "M9.00002 1L1 9.00002M1.00003 1L9.00005 9.00002", stroke: "black", stroke_linecap: "round", } } } } pub(super) fn icon_4() -> Element { rsx! { svg { view_box: "0 0 20 12", fill: "none", width: "20", xmlns: "http://www.w3.org/2000/svg", height: "12", path { d: "M1 2H19C19.2652 2 19.5196 1.89464 19.7071 1.70711C19.8946 1.51957 20 1.26522 20 1C20 0.734784 19.8946 0.48043 19.7071 0.292893C19.5196 0.105357 19.2652 0 19 0H1C0.734784 0 0.48043 0.105357 0.292893 0.292893C0.105357 0.48043 0 0.734784 0 1C0 1.26522 0.105357 1.51957 0.292893 1.70711C0.48043 1.89464 0.734784 2 1 2ZM19 10H1C0.734784 10 0.48043 10.1054 0.292893 10.2929C0.105357 10.4804 0 10.7348 0 11C0 11.2652 0.105357 11.5196 0.292893 11.7071C0.48043 11.8946 0.734784 12 1 12H19C19.2652 12 19.5196 11.8946 19.7071 11.7071C19.8946 11.5196 20 11.2652 20 11C20 10.7348 19.8946 10.4804 19.7071 10.2929C19.5196 10.1054 19.2652 10 19 10ZM19 5H1C0.734784 5 0.48043 5.10536 0.292893 5.29289C0.105357 5.48043 0 5.73478 0 6C0 6.26522 0.105357 6.51957 0.292893 6.70711C0.48043 6.89464 0.734784 7 1 7H19C19.2652 7 19.5196 6.89464 19.7071 6.70711C19.8946 6.51957 20 6.26522 20 6C20 5.73478 19.8946 5.48043 19.7071 5.29289C19.5196 5.10536 19.2652 5 19 5Z", fill: "#8594A5", } } } } pub(super) fn icon_5() -> Element { rsx! { svg { class: "mr-2", fill: "none", xmlns: "http://www.w3.org/2000/svg", width: "23", height: "23", view_box: "0 0 23 23", path { stroke_width: "1.5", stroke_linecap: "round", stroke_linejoin: "round", d: "M18.1159 8.72461H2.50427C1.99709 8.72461 1.58594 9.12704 1.58594 9.62346V21.3085C1.58594 21.8049 1.99709 22.2074 2.50427 22.2074H18.1159C18.6231 22.2074 19.0342 21.8049 19.0342 21.3085V9.62346C19.0342 9.12704 18.6231 8.72461 18.1159 8.72461Z", stroke: "currentColor", } path { d: "M6.34473 6.34469V4.95676C6.34473 3.85246 6.76252 2.79338 7.5062 2.01252C8.24988 1.23165 9.25852 0.792969 10.3102 0.792969C11.362 0.792969 12.3706 1.23165 13.1143 2.01252C13.858 2.79338 14.2758 3.85246 14.2758 4.95676V6.34469", stroke_linejoin: "round", stroke_width: "1.5", stroke_linecap: "round", stroke: "currentColor", } } } } pub(super) fn icon_6() -> Element { rsx! { svg { class: "mr-3", height: "31", xmlns: "http://www.w3.org/2000/svg", view_box: "0 0 32 31", width: "32", fill: "none", path { stroke: "currentColor", stroke_width: "1.5", d: "M16.0006 16.3154C19.1303 16.3154 21.6673 13.799 21.6673 10.6948C21.6673 7.59064 19.1303 5.07422 16.0006 5.07422C12.871 5.07422 10.334 7.59064 10.334 10.6948C10.334 13.799 12.871 16.3154 16.0006 16.3154Z", stroke_linecap: "round", stroke_linejoin: "round", } path { stroke_linecap: "round", stroke_width: "1.5", stroke: "currentColor", stroke_linejoin: "round", d: "M24.4225 23.8963C23.6678 22.3507 22.4756 21.0445 20.9845 20.1298C19.4934 19.2151 17.7647 18.7295 15.9998 18.7295C14.2349 18.7295 12.5063 19.2151 11.0152 20.1298C9.52406 21.0445 8.33179 22.3507 7.57715 23.8963", } } } } pub(super) fn icon_7() -> Element { rsx! { svg { class: "mr-3", view_box: "0 0 23 23", fill: "none", height: "23", width: "23", xmlns: "http://www.w3.org/2000/svg", path { stroke_linecap: "round", stroke: "currentColor", stroke_width: "1.5", stroke_linejoin: "round", d: "M18.1159 8.72461H2.50427C1.99709 8.72461 1.58594 9.12704 1.58594 9.62346V21.3085C1.58594 21.8049 1.99709 22.2074 2.50427 22.2074H18.1159C18.6231 22.2074 19.0342 21.8049 19.0342 21.3085V9.62346C19.0342 9.12704 18.6231 8.72461 18.1159 8.72461Z", } path { d: "M6.34473 6.34469V4.95676C6.34473 3.85246 6.76252 2.79338 7.5062 2.01252C8.24988 1.23165 9.25852 0.792969 10.3102 0.792969C11.362 0.792969 12.3706 1.23165 13.1143 2.01252C13.858 2.79338 14.2758 3.85246 14.2758 4.95676V6.34469", stroke_width: "1.5", stroke_linecap: "round", stroke: "currentColor", stroke_linejoin: "round", } } } } pub(super) fn icon_8() -> Element { rsx! { svg { height: "20", width: "23", fill: "none", view_box: "0 0 23 20", xmlns: "http://www.w3.org/2000/svg", path { d: "M11.4998 19.2061L2.70115 9.92527C1.92859 9.14433 1.41864 8.1374 1.24355 7.04712C1.06847 5.95684 1.23713 4.8385 1.72563 3.85053V3.85053C2.09464 3.10462 2.63366 2.45803 3.29828 1.96406C3.9629 1.47008 4.73408 1.14284 5.5483 1.00931C6.36252 0.875782 7.19647 0.939779 7.98144 1.19603C8.7664 1.45228 9.47991 1.89345 10.0632 2.48319L11.4998 3.93577L12.9364 2.48319C13.5197 1.89345 14.2332 1.45228 15.0182 1.19603C15.8031 0.939779 16.6371 0.875782 17.4513 1.00931C18.2655 1.14284 19.0367 1.47008 19.7013 1.96406C20.3659 2.45803 20.905 3.10462 21.274 3.85053V3.85053C21.7625 4.8385 21.9311 5.95684 21.756 7.04712C21.581 8.1374 21.071 9.14433 20.2984 9.92527L11.4998 19.2061Z", stroke_linejoin: "round", stroke: "currentColor", stroke_width: "1.5", stroke_linecap: "round", } } } } pub(super) fn icon_9() -> Element { rsx! { svg { view_box: "0 0 18 18", xmlns: "http://www.w3.org/2000/svg", width: "18", height: "18", fill: "none", path { fill: "black", d: "M18 15.4688H0V17.7207H18V15.4688Z", } path { fill: "black", d: "M11.0226 7.87402H0V10.126H11.0226V7.87402Z", } path { fill: "black", d: "M18 0.279297H0V2.53127H18V0.279297Z", } } } } } ================================================ FILE: examples/01-app-demos/ecommerce-site/src/components/product_item.rs ================================================ use dioxus::prelude::*; use crate::api::Product; #[component] pub(crate) fn ProductItem(product: Product) -> Element { let Product { id, title, price, category, image, rating, .. } = product; rsx! { section { class: "h-40 p-2 m-2 shadow-lg ring-1 rounded-lg flex flex-row place-items-center hover:ring-4 hover:shadow-2xl transition-all duration-200", img { class: "object-scale-down w-1/6 h-full", src: "{image}", } div { class: "pl-4 text-left text-ellipsis", a { href: "/details/{id}", class: "w-full text-center", "{title}" } p { class: "w-full", "{rating}" } p { class: "w-full", "{category}" } p { class: "w-1/4", "${price}" } } } } } ================================================ FILE: examples/01-app-demos/ecommerce-site/src/components/product_page.rs ================================================ use std::{fmt::Display, str::FromStr}; use crate::api::{fetch_product, Product}; use dioxus::prelude::*; #[component] pub fn ProductPage(product_id: ReadSignal) -> Element { let mut quantity = use_signal(|| 1); let mut size = use_signal(Size::default); let product = use_loader(move || fetch_product(product_id()))?; let Product { title, price, description, category, image, rating, .. } = product(); rsx! { section { class: "py-20", div { class: "container mx-auto px-4", div { class: "flex flex-wrap -mx-4 mb-24", div { class: "w-full md:w-1/2 px-4 mb-8 md:mb-0", div { class: "relative mb-10", style: "height: 564px;", a { class: "absolute top-1/2 left-0 ml-8 transform translate-1/2", href: "#", icons::icon_0 {} } img { class: "object-cover w-full h-full", alt: "", src: "{image}", } a { class: "absolute top-1/2 right-0 mr-8 transform translate-1/2", href: "#", icons::icon_1 {} } } } div { class: "w-full md:w-1/2 px-4", div { class: "lg:pl-20", div { class: "mb-10 pb-10 border-b", h2 { class: "mt-2 mb-6 max-w-xl text-5xl md:text-6xl font-bold font-heading", "{title}" } div { class: "mb-8", "{rating}" } p { class: "inline-block mb-8 text-2xl font-bold font-heading text-blue-300", span { "${price}" } } p { class: "max-w-md text-gray-500", "{description}" } } div { class: "flex mb-12", div { class: "mr-6", span { class: "block mb-4 font-bold font-heading text-gray-400 uppercase", "QTY" } div { class: "inline-flex items-center px-4 font-semibold font-heading text-gray-500 border border-gray-200 focus:ring-blue-300 focus:border-blue-300 rounded-md", button { class: "py-2 hover:text-gray-700", onclick: move |_| quantity += 1, icons::icon_2 {} } input { class: "w-12 m-0 px-2 py-4 text-center md:text-right border-0 focus:ring-transparent focus:outline-hidden rounded-md", placeholder: "1", r#type: "number", value: "{quantity}", oninput: move |evt| if let Ok(as_number) = evt.value().parse() { quantity.set(as_number) }, } button { class: "py-2 hover:text-gray-700", onclick: move |_| quantity -= 1, icons::icon_3 {} } } } div { span { class: "block mb-4 font-bold font-heading text-gray-400 uppercase", "Size" } select { class: "pl-6 pr-10 py-4 font-semibold font-heading text-gray-500 border border-gray-200 focus:ring-blue-300 focus:border-blue-300 rounded-md", id: "", name: "", onchange: move |evt| { if let Ok(new_size) = evt.value().parse() { size.set(new_size); } }, option { value: "1", "Medium" } option { value: "2", "Small" } option { value: "3", "Large" } } } } div { class: "flex flex-wrap -mx-4 mb-14 items-center", div { class: "w-full xl:w-2/3 px-4 mb-4 xl:mb-0", a { class: "block bg-orange-300 hover:bg-orange-400 text-center text-white font-bold font-heading py-5 px-8 rounded-md uppercase transition duration-200", href: "#", "Add to cart" } } } div { class: "flex items-center", span { class: "mr-8 text-gray-500 font-bold font-heading uppercase", "SHARE IT" } a { class: "mr-1 w-8 h-8", href: "#", img { alt: "", src: "https://shuffle.dev/yofte-assets/buttons/facebook-circle.svg", } } a { class: "mr-1 w-8 h-8", href: "#", img { alt: "", src: "https://shuffle.dev/yofte-assets/buttons/instagram-circle.svg", } } a { class: "w-8 h-8", href: "#", img { src: "https://shuffle.dev/yofte-assets/buttons/twitter-circle.svg", alt: "", } } } } } } div { ul { class: "flex flex-wrap mb-16 border-b-2", li { class: "w-1/2 md:w-auto", a { class: "inline-block py-6 px-10 bg-white text-gray-500 font-bold font-heading shadow-2xl", href: "#", "Description" } } li { class: "w-1/2 md:w-auto", a { class: "inline-block py-6 px-10 text-gray-500 font-bold font-heading", href: "#", "Customer reviews" } } li { class: "w-1/2 md:w-auto", a { class: "inline-block py-6 px-10 text-gray-500 font-bold font-heading", href: "#", "Shipping & returns" } } li { class: "w-1/2 md:w-auto", a { class: "inline-block py-6 px-10 text-gray-500 font-bold font-heading", href: "#", "Brand" } } } h3 { class: "mb-8 text-3xl font-bold font-heading text-blue-300", "{category}" } p { class: "max-w-2xl text-gray-500", "{description}" } } } } } } #[derive(Default)] enum Size { Small, #[default] Medium, Large, } impl Display for Size { fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { match self { Size::Small => "small".fmt(f), Size::Medium => "medium".fmt(f), Size::Large => "large".fmt(f), } } } impl FromStr for Size { type Err = (); fn from_str(s: &str) -> Result { use Size::*; match s.to_lowercase().as_str() { "small" => Ok(Small), "medium" => Ok(Medium), "large" => Ok(Large), _ => Err(()), } } } mod icons { use super::*; pub(super) fn icon_0() -> Element { rsx! { svg { class: "w-6 h-6", view_box: "0 0 24 23", xmlns: "http://www.w3.org/2000/svg", height: "23", fill: "none", width: "24", path { stroke: "black", fill: "black", d: "M2.01328 18.9877C2.05682 16.7902 2.71436 12.9275 6.3326 9.87096L6.33277 9.87116L6.33979 9.86454L6.3398 9.86452C6.34682 9.85809 8.64847 7.74859 13.4997 7.74859C13.6702 7.74859 13.8443 7.75111 14.0206 7.757L14.0213 7.75702L14.453 7.76978L14.6331 7.77511V7.59486V3.49068L21.5728 10.5736L14.6331 17.6562V13.6558V13.5186L14.4998 13.4859L14.1812 13.4077C14.1807 13.4075 14.1801 13.4074 14.1792 13.4072M2.01328 18.9877L14.1792 13.4072M2.01328 18.9877C7.16281 11.8391 14.012 13.3662 14.1792 13.4072M2.01328 18.9877L14.1792 13.4072M23.125 10.6961L23.245 10.5736L23.125 10.4512L13.7449 0.877527L13.4449 0.571334V1V6.5473C8.22585 6.54663 5.70981 8.81683 5.54923 8.96832C-0.317573 13.927 0.931279 20.8573 0.946581 20.938L0.946636 20.9383L1.15618 22.0329L1.24364 22.4898L1.47901 22.0885L2.041 21.1305L2.04103 21.1305C4.18034 17.4815 6.71668 15.7763 8.8873 15.0074C10.9246 14.2858 12.6517 14.385 13.4449 14.4935V20.1473V20.576L13.7449 20.2698L23.125 10.6961Z", stroke_width: "0.35", } } } } pub(super) fn icon_1() -> Element { rsx! { svg { class: "w-6 h-6", height: "27", view_box: "0 0 27 27", fill: "none", width: "27", xmlns: "http://www.w3.org/2000/svg", path { d: "M13.4993 26.2061L4.70067 16.9253C3.9281 16.1443 3.41815 15.1374 3.24307 14.0471C3.06798 12.9568 3.23664 11.8385 3.72514 10.8505V10.8505C4.09415 10.1046 4.63318 9.45803 5.29779 8.96406C5.96241 8.47008 6.73359 8.14284 7.54782 8.00931C8.36204 7.87578 9.19599 7.93978 9.98095 8.19603C10.7659 8.45228 11.4794 8.89345 12.0627 9.48319L13.4993 10.9358L14.9359 9.48319C15.5192 8.89345 16.2327 8.45228 17.0177 8.19603C17.8026 7.93978 18.6366 7.87578 19.4508 8.00931C20.265 8.14284 21.0362 8.47008 21.7008 8.96406C22.3654 9.45803 22.9045 10.1046 23.2735 10.8505V10.8505C23.762 11.8385 23.9306 12.9568 23.7556 14.0471C23.5805 15.1374 23.0705 16.1443 22.298 16.9253L13.4993 26.2061Z", stroke: "black", stroke_width: "1.5", stroke_linecap: "round", stroke_linejoin: "round", } } } } pub(super) fn icon_2() -> Element { rsx! { svg { view_box: "0 0 12 12", height: "12", width: "12", fill: "none", xmlns: "http://www.w3.org/2000/svg", g { opacity: "0.35", rect { height: "12", x: "5", fill: "currentColor", width: "2", } rect { fill: "currentColor", width: "2", height: "12", x: "12", y: "5", transform: "rotate(90 12 5)", } } } } } pub(super) fn icon_3() -> Element { rsx! { svg { width: "12", fill: "none", view_box: "0 0 12 2", height: "2", xmlns: "http://www.w3.org/2000/svg", g { opacity: "0.35", rect { transform: "rotate(90 12 0)", height: "12", fill: "currentColor", x: "12", width: "2", } } } } } } ================================================ FILE: examples/01-app-demos/ecommerce-site/src/main.rs ================================================ #![allow(non_snake_case)] use components::home::Home; use components::loading::ChildrenOrLoading; use dioxus::prelude::*; mod components { pub mod error; pub mod home; pub mod loading; pub mod nav; pub mod product_item; pub mod product_page; } mod api; fn main() { dioxus::launch(|| { rsx! { document::Link { rel: "stylesheet", href: asset!("/public/tailwind.css") } ChildrenOrLoading { Router:: {} } } }); } #[derive(Clone, Routable, Debug, PartialEq)] enum Route { #[route("/")] Home {}, #[route("/details/:product_id")] Details { product_id: usize }, } #[component] /// Render a more sophisticated page with ssr fn Details(product_id: usize) -> Element { rsx! { div { components::nav::Nav {} components::product_page::ProductPage { product_id } } } } ================================================ FILE: examples/01-app-demos/ecommerce-site/tailwind.css ================================================ @import "tailwindcss"; @source "./src/**/*.{rs,html,css}"; ================================================ FILE: examples/01-app-demos/file-explorer/.gitignore ================================================ # Generated by Cargo # will have compiled files and executables /target/ /dist/ /static/ /.dioxus/ # Remove Cargo.lock from gitignore if creating an executable, leave it for libraries # More information here https://doc.rust-lang.org/cargo/guide/cargo-toml-vs-cargo-lock.html Cargo.lock # These are backup files generated by rustfmt **/*.rs.bk ================================================ FILE: examples/01-app-demos/file-explorer/Cargo.toml ================================================ [package] name = "file-explorer" edition = "2021" version = "0.1.0" publish = false # See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html [dependencies] dioxus = { workspace = true } open = { workspace = true } [features] default = ["desktop"] desktop = ["dioxus/desktop"] native = ["dioxus/native"] ================================================ FILE: examples/01-app-demos/file-explorer/Dioxus.toml ================================================ [application] # App (Project) Name name = "file-explorer" # Dioxus App Default Platform # desktop, web default_platform = "desktop" # `build` & `serve` dist path out_dir = "dist" # assets file folder asset_dir = "assets" [web.app] # HTML title tag content title = "file-explorer" [web.watcher] # when watcher trigger, regenerate the `index.html` reload_html = true # which files or dirs will be watcher monitoring watch_path = ["src", "assets"] # include `assets` in web platform [web.resource] # CSS style file style = [] # Javascript code file script = [] [web.resource.dev] # Javascript code file # serve: [dev-server] only script = [] ================================================ FILE: examples/01-app-demos/file-explorer/README.md ================================================ # File-explorer with Rust and Dioxus This example shows how a Dioxus App can directly leverage system calls and libraries to bridge native functionality with the WebView renderer. ![example](./assets/image.png) ## To run this example: ``` dx serve ``` ================================================ FILE: examples/01-app-demos/file-explorer/assets/fileexplorer.css ================================================ * { margin: 0; padding: 0; font-family: 'Roboto', sans-serif; user-select: none; transition: .2s all; } body { padding-top: 77px; } /* header { position: fixed; top: 0; left: 0; right: 0; z-index: 10; padding: 20px; background-color: #2196F3; color: white; } header h1 { float: left; font-size: 20px; font-weight: 400; } header .material-icons { float: right; cursor: pointer; } header .icon-menu { float: left; margin-right: 20px; } */ main { padding: 20px 50px; } .folder * { width: 100px; } .folder { float: left; width: 100px; height: 152px; /* //padding: 20px; */ margin-right: 50px; margin-bottom: 70px; border-radius: 2px; /* //overflow: hidden; */ cursor: pointer; } .folder:hover h1 { display: none; } .folder:hover p.cooltip { opacity: 1; top: 0; } .folder * { text-align: center; } .folder i { margin: 0; font-size: 100px; color: #607D8B; } .folder h1 { position: relative; display: block; top: -37px; font-size: 20px; font-weight: 400; } .folder p.cooltip { position: relative; top: 5px; left: -50%; margin-left: 35px; background: #212121; font-size: 15px; color: white; border-radius: 4px; padding: 10px 20px; padding-right: 30px; width: 100px; opacity: 0; } .folder p.cooltip:before { content: ''; position: absolute; display: block; top: -4px; left: 50%; margin-left: -5px; height: 10px; width: 10px; border-radius: 2px; background-color: #212121; transform: rotate(45deg); } div.properties { position: fixed; top: 0; right: 0; bottom: 0; z-index: 10; width: 300px; background-color: white; } div.properties:before { content: ''; position: fixed; top: 0; left: 0; right: 300px; bottom: 0; background-color: #212121; opacity: .5; overflow: hidden; } div.properties img { position: relative; top: -1px; left: -1px; width: 110%; height: 200px; filter: blur(2px); } div.properties h1 { position: relative; width: 100%; text-align: left; margin-left: 20px; color: white; } header { position: fixed; top: 0; left: 0; right: 0; padding: 20px; background-color: #2196F3; color: white; display: flex; align-items: center; } header h1 { font-weight: 400; } header span { flex: 1; } header i { margin: 0 10px; cursor: pointer; } header i:nth-child(1) { margin: 0 20px; } ================================================ FILE: examples/01-app-demos/file-explorer/src/main.rs ================================================ //! Example: File Explorer //! //! This is a fun little desktop application that lets you explore the file system. //! //! This example is interesting because it's mixing filesystem operations and GUI, which is typically hard for UI to do. //! We store the state entirely in a single signal, making the explorer logic fairly easy to reason about. use std::env::current_dir; use std::path::PathBuf; use dioxus::prelude::*; fn main() { dioxus::launch(app); } fn app() -> Element { let mut files = use_signal(Files::new); rsx! { Stylesheet { href: asset!("/assets/fileexplorer.css") } Stylesheet { href: "https://fonts.googleapis.com/icon?family=Material+Icons" } div { header { i { class: "material-icons icon-menu", "menu" } h1 { "Files: " {files.read().current()} } span { } i { class: "material-icons", onclick: move |_| files.write().go_up(), "logout" } } main { for (dir_id, path) in files.read().path_names.iter().enumerate() { { let path_end = path.components().next_back().map(|p|p.as_os_str()).unwrap_or(path.as_os_str()).to_string_lossy(); let path_display = path.display(); let is_file = path.is_file(); rsx! { div { class: "folder", key: "{path_display}", i { class: "material-icons", onclick: move |_| files.write().enter_dir(dir_id), if is_file { "description" } else { "folder" } p { class: "cooltip", "0 folders / 0 files" } } h1 { "{path_end}" } } } } } if let Some(err) = files.read().err.as_ref() { div { code { "{err}" } button { onclick: move |_| files.write().clear_err(), "x" } } } } } } } /// A simple little struct to hold the file explorer state /// /// We don't use any fancy signals or memoization here - Dioxus is so fast that even a file explorer can be done with a /// single signal. struct Files { current_path: PathBuf, path_names: Vec, err: Option, } impl Files { fn new() -> Self { let mut files = Self { current_path: std::path::absolute(current_dir().unwrap()).unwrap(), path_names: vec![], err: None, }; files.reload_path_list(); files } fn reload_path_list(&mut self) { let paths = match std::fs::read_dir(&self.current_path) { Ok(e) => e, Err(err) => { let err = format!("An error occurred: {err:?}"); self.err = Some(err); return; } }; let collected = paths.collect::>(); // clear the current state self.clear_err(); self.path_names.clear(); for path in collected { self.path_names.push(path.unwrap().path().to_path_buf()); } } fn go_up(&mut self) { self.current_path = match self.current_path.parent() { Some(path) => path.to_path_buf(), None => { self.err = Some("Cannot go up from the root directory".to_string()); return; } }; self.reload_path_list(); } fn enter_dir(&mut self, dir_id: usize) { let path = &self.path_names[dir_id]; if !path.is_dir() { return; } self.current_path.clone_from(path); self.reload_path_list(); } fn current(&self) -> String { self.current_path.display().to_string() } fn clear_err(&mut self) { self.err = None; } } ================================================ FILE: examples/01-app-demos/geolocation-native-plugin/Cargo.toml ================================================ [package] name = "geolocation-native-plugin" version = "0.1.0" authors = ["Sabin Regmi "] edition = "2021" publish = false # See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html [dependencies] dioxus = { workspace = true, features = [] } manganis = { workspace = true } serde = { workspace = true, features = ["derive"] } serde_json = { workspace = true } thiserror = { workspace = true } [features] default = ["mobile"] web = ["dioxus/web"] desktop = ["dioxus/desktop"] mobile = ["dioxus/mobile"] ================================================ FILE: examples/01-app-demos/geolocation-native-plugin/Dioxus.toml ================================================ #:schema ../../../packages/cli/schema.json [bundle] identifier = "com.dioxuslabs.geolocation" publisher = "Dioxus Labs" [ios] deployment_target = "16.2" background_modes = ["location"] [ios.plist] NSSupportsLiveActivities = true [[ios.widget_extensions]] source = "src/ios/widget" display_name = "Location Widget" bundle_id_suffix = "location-widget" deployment_target = "16.2" module_name = "GeolocationPlugin" [android] min_sdk = 24 target_sdk = 34 features = ["android.hardware.location.gps"] [permissions] location = { precision = "fine", description = "Access your precise location to provide location-based services" } ================================================ FILE: examples/01-app-demos/geolocation-native-plugin/README.md ================================================ # Geolocation demo A minimal Dioxus application that implements a native plugin. The plugin demonstrated here makes it possible to access the user's geolocation. It does a few things: - Inspect and request location permissions using the native Android/iOS dialogs. - Configure one-shot position requests (high-accuracy toggle + maximum cached age). - Inspect the last reported coordinates, accuracy, altitude, heading, and speed. The example shares the same metadata pipeline as any plugin crate: the native Gradle/Swift artifacts are embedded via linker symbols and bundled automatically by `dx`. ## Running the example ```bash # Inside the repository root dx serve --project examples/01-app-demos/geolocation --platform mobile ``` For Android/iOS you’ll need the respective toolchains installed (Android SDK/NDK, Xcode) so the geolocation crate’s `build.rs` can build the native modules. The UI also works on desktop/web, but location calls will return an error because the plugin only supports mobile targets—those errors are shown inline in the demo. ## Things to try 1. Tap **Check permissions** to see the current OS state (granted/denied/prompt). 2. Tap **Request permissions** to trigger the native dialog from within the app. 3. Toggle *High accuracy* and set a *Max cached age* before requesting the current position. 4. Observe the coordinate grid update whenever a new reading arrives, or the error banner if the operation fails (e.g., permissions denied or running on an unsupported platform). ================================================ FILE: examples/01-app-demos/geolocation-native-plugin/assets/main.css ================================================ body { background-color: #05060a; color: #f4f4f5; font-family: 'Inter', 'Segoe UI', sans-serif; margin: 0; min-height: 100vh; display: flex; justify-content: center; padding: calc(16px + env(safe-area-inset-top, 0px)) 0 40px; } .app { width: min(960px, 100%); padding: 0 20px; box-sizing: border-box; } .hero { display: flex; gap: 24px; align-items: center; margin-bottom: 32px; flex-wrap: wrap; } .hero img { width: 200px; max-width: 35%; border-radius: 16px; box-shadow: 0 10px 30px rgba(0, 0, 0, 0.4); } .hero__copy h1 { margin: 0 0 8px; font-size: clamp(28px, 6vw, 36px); } .hero__copy p { margin: 0; line-height: 1.5; color: #c8cad7; } .cards { display: grid; grid-template-columns: repeat(auto-fit, minmax(320px, 1fr)); gap: 24px; margin-bottom: 16px; } .card { background: linear-gradient(165deg, rgba(17, 20, 32, 0.95), rgba(6, 7, 16, 0.98)); border: 1px solid #222534; border-radius: 16px; padding: 24px; box-shadow: 0 25px 45px rgba(0, 0, 0, 0.4); } .card h2 { margin-top: 0; font-size: 1.5rem; } .muted { color: #a5a7b6; font-size: 0.95rem; } .button-row { display: flex; flex-wrap: wrap; gap: 12px; margin-top: 16px; } button { border: none; border-radius: 999px; padding: 10px 18px; font-size: 0.95rem; cursor: pointer; transition: background 0.2s ease; } button.primary, button { background: linear-gradient(135deg, #8f63ff, #4d8dff); color: white; box-shadow: 0 10px 25px rgba(77, 141, 255, 0.25); } button.secondary { background: transparent; color: #b3b7cf; border: 1px solid #2f3244; } button.full-width { width: 100%; margin-top: 16px; } button.toggle { width: fit-content; background: #1a1d29; border: 1px solid #2c2f40; color: #d8d9e5; } button.toggle--active { background: #23304d; border-color: #4b6cff; color: #ffffff; } .settings { display: flex; flex-direction: column; gap: 12px; margin-top: 16px; } .field { display: flex; flex-direction: column; gap: 6px; } .field input { background: #0b0d13; border: 1px solid #26293a; border-radius: 10px; padding: 10px 12px; color: white; } .status-grid { margin-top: 20px; display: grid; gap: 14px; } .permission-row { display: flex; justify-content: space-between; align-items: center; } .badge { padding: 4px 10px; border-radius: 999px; font-size: 0.85rem; text-transform: uppercase; } .badge--granted { background: rgba(70, 221, 154, 0.15); color: #7efac6; border: 1px solid rgba(70, 221, 154, 0.4); } .badge--denied { background: rgba(255, 98, 98, 0.16); color: #ff8ea0; border: 1px solid rgba(255, 98, 98, 0.4); } .badge--prompt { background: rgba(255, 205, 112, 0.16); color: #ffd27e; border: 1px solid rgba(255, 205, 112, 0.35); } .position { margin-top: 20px; } .position__grid { margin-top: 14px; display: grid; grid-template-columns: repeat(auto-fit, minmax(140px, 1fr)); gap: 10px; } .coordinate-row { display: flex; flex-direction: column; gap: 2px; padding: 10px; background: #080a11; border-radius: 12px; border: 1px solid #1c1f2b; } .error-banner { margin-top: 24px; padding: 14px 18px; background: #3c1017; border: 1px solid #a44856; border-radius: 12px; color: #ffe6ea; } @media (max-width: 640px) { .hero { flex-direction: column; text-align: center; } .hero img { max-width: 60%; } .button-row { flex-direction: column; } button { width: 100%; text-align: center; } } ================================================ FILE: examples/01-app-demos/geolocation-native-plugin/src/android/build.gradle.kts ================================================ import org.gradle.api.tasks.bundling.AbstractArchiveTask plugins { id("com.android.library") version "8.4.2" kotlin("android") version "1.9.24" } android { namespace = "com.dioxus.geolocation" compileSdk = 34 defaultConfig { minSdk = 24 targetSdk = 34 consumerProguardFiles("consumer-rules.pro") } buildTypes { getByName("release") { isMinifyEnabled = false } getByName("debug") { isMinifyEnabled = false } } compileOptions { sourceCompatibility = JavaVersion.VERSION_17 targetCompatibility = JavaVersion.VERSION_17 } kotlinOptions { jvmTarget = "17" } } dependencies { implementation("androidx.core:core-ktx:1.12.0") implementation("com.google.android.gms:play-services-location:21.3.0") } tasks.withType().configureEach { archiveBaseName.set("geolocation-plugin") } ================================================ FILE: examples/01-app-demos/geolocation-native-plugin/src/android/consumer-rules.pro ================================================ # Intentionally empty; no consumer Proguard rules required for the geolocation plugin. ================================================ FILE: examples/01-app-demos/geolocation-native-plugin/src/android/src/main/AndroidManifest.xml ================================================ ================================================ FILE: examples/01-app-demos/geolocation-native-plugin/src/android/src/main/kotlin/com/dioxus/geolocation/Geolocation.kt ================================================ // Copyright 2019-2023 Tauri Programme within The Commons Conservancy // SPDX-License-Identifier: Apache-2.0 // SPDX-License-Identifier: MIT package com.dioxus.geolocation import android.annotation.SuppressLint import android.content.Context import android.location.Location import android.location.LocationManager import android.os.SystemClock import androidx.core.location.LocationManagerCompat import android.util.Log import com.google.android.gms.common.ConnectionResult import com.google.android.gms.common.GoogleApiAvailability import com.google.android.gms.location.LocationServices import com.google.android.gms.location.Priority class Geolocation(private val context: Context) { fun isLocationServicesEnabled(): Boolean { val lm = context.getSystemService(Context.LOCATION_SERVICE) as LocationManager return LocationManagerCompat.isLocationEnabled(lm) } @SuppressWarnings("MissingPermission") fun sendLocation( enableHighAccuracy: Boolean, successCallback: (location: Location) -> Unit, errorCallback: (error: String) -> Unit, ) { val resultCode = GoogleApiAvailability.getInstance().isGooglePlayServicesAvailable(context) if (resultCode == ConnectionResult.SUCCESS) { val lm = context.getSystemService(Context.LOCATION_SERVICE) as LocationManager if (this.isLocationServicesEnabled()) { var networkEnabled = false try { networkEnabled = lm.isProviderEnabled(LocationManager.NETWORK_PROVIDER) } catch (_: Exception) { Log.e("Geolocation", "isProviderEnabled failed") } val lowPrio = if (networkEnabled) Priority.PRIORITY_BALANCED_POWER_ACCURACY else Priority.PRIORITY_LOW_POWER val prio = if (enableHighAccuracy) Priority.PRIORITY_HIGH_ACCURACY else lowPrio Log.d("Geolocation", "Using priority $prio") LocationServices .getFusedLocationProviderClient(context) .getCurrentLocation(prio, null) .addOnFailureListener { e -> e.message?.let { errorCallback(it) } } .addOnSuccessListener { location -> if (location == null) { errorCallback("Location unavailable.") } else { successCallback(location) } } } else { errorCallback("Location disabled.") } } else { errorCallback("Google Play Services unavailable.") } } @SuppressLint("MissingPermission") fun getLastLocation(maximumAge: Long): Location? { var lastLoc: Location? = null val lm = context.getSystemService(Context.LOCATION_SERVICE) as LocationManager for (provider in lm.allProviders) { val tmpLoc = lm.getLastKnownLocation(provider) if (tmpLoc != null) { val locationAge = SystemClock.elapsedRealtimeNanos() - tmpLoc.elapsedRealtimeNanos val maxAgeNano = maximumAge * 1_000_000L if (locationAge <= maxAgeNano && (lastLoc == null || lastLoc.elapsedRealtimeNanos > tmpLoc.elapsedRealtimeNanos)) { lastLoc = tmpLoc } } } return lastLoc } } ================================================ FILE: examples/01-app-demos/geolocation-native-plugin/src/android/src/main/kotlin/com/dioxus/geolocation/GeolocationPlugin.kt ================================================ // Copyright 2019-2023 Tauri Programme within The Commons Conservancy // SPDX-License-Identifier: Apache-2.0 // SPDX-License-Identifier: MIT package com.dioxus.geolocation import android.Manifest import android.app.Activity import android.content.pm.PackageManager import android.location.Location import android.os.Handler import android.os.Looper import android.webkit.WebView import androidx.core.app.ActivityCompat import androidx.core.content.ContextCompat import org.json.JSONObject import java.util.concurrent.CountDownLatch import java.util.concurrent.TimeUnit import java.util.Timer import kotlin.concurrent.schedule class GeolocationPlugin(private val activity: Activity) { private val geolocation = Geolocation(activity) fun checkPermissions(): Map { val response = mutableMapOf() val coarseStatus = ContextCompat.checkSelfPermission(activity, Manifest.permission.ACCESS_COARSE_LOCATION) val fineStatus = ContextCompat.checkSelfPermission(activity, Manifest.permission.ACCESS_FINE_LOCATION) response["location"] = permissionToStatus(fineStatus) response["coarseLocation"] = permissionToStatus(coarseStatus) return response } fun requestPermissions(callback: (Map) -> Unit) { val permissionsToRequest = mutableListOf() if (ContextCompat.checkSelfPermission(activity, Manifest.permission.ACCESS_FINE_LOCATION) != PackageManager.PERMISSION_GRANTED) { permissionsToRequest.add(Manifest.permission.ACCESS_FINE_LOCATION) } if (ContextCompat.checkSelfPermission(activity, Manifest.permission.ACCESS_COARSE_LOCATION) != PackageManager.PERMISSION_GRANTED) { permissionsToRequest.add(Manifest.permission.ACCESS_COARSE_LOCATION) } if (permissionsToRequest.isEmpty()) { callback(checkPermissions()) } else { ActivityCompat.requestPermissions(activity, permissionsToRequest.toTypedArray(), 1001) Handler(Looper.getMainLooper()).postDelayed({ callback(checkPermissions()) }, 1000) } } fun getCurrentPosition( enableHighAccuracy: Boolean, timeout: Long, maximumAge: Long, successCallback: (Location) -> Unit, errorCallback: (String) -> Unit, ) { val lastLocation = geolocation.getLastLocation(maximumAge) if (lastLocation != null) { successCallback(lastLocation) return } val timer = Timer() timer.schedule(timeout) { activity.runOnUiThread { errorCallback("Timeout waiting for location.") } } geolocation.sendLocation( enableHighAccuracy, { location -> timer.cancel() successCallback(location) }, { error -> timer.cancel() errorCallback(error) }, ) } private fun permissionToStatus(value: Int): String = when (value) { PackageManager.PERMISSION_GRANTED -> "granted" PackageManager.PERMISSION_DENIED -> "denied" else -> "prompt" } // ---- Platform bridge helpers expected by Rust JNI layer ---- // Called by Rust after constructing the plugin. No-op placeholder to match signature. fun load(webView: WebView?) { /* no-op */ } // Serialize current permission status as JSON string fun checkPermissionsJson(): String { val status = checkPermissions() val json = JSONObject() json.put("location", status["location"]) // granted|denied|prompt json.put("coarseLocation", status["coarseLocation"]) // granted|denied|prompt return json.toString() } // Request permissions and return resulting status JSON (waits briefly for result) fun requestPermissionsJson(permissionsJson: String?): String { val latch = CountDownLatch(1) var result: String = checkPermissionsJson() requestPermissions { status -> val json = JSONObject() json.put("location", status["location"]) json.put("coarseLocation", status["coarseLocation"]) result = json.toString() latch.countDown() } // Wait up to 5 seconds for the permission result, then return whatever we have latch.await(5, TimeUnit.SECONDS) return result } // Convert a Location to the Position JSON expected by Rust side private fun locationToPositionJson(location: Location): String { val coords = JSONObject() coords.put("latitude", location.latitude) coords.put("longitude", location.longitude) coords.put("accuracy", location.accuracy.toDouble()) if (location.hasAltitude()) coords.put("altitude", location.altitude) if (android.os.Build.VERSION.SDK_INT >= 26) { val vAcc = try { location.verticalAccuracyMeters } catch (_: Exception) { null } if (vAcc != null) coords.put("altitudeAccuracy", vAcc.toDouble()) } if (location.hasSpeed()) coords.put("speed", location.speed.toDouble()) if (location.hasBearing()) coords.put("heading", location.bearing.toDouble()) val obj = JSONObject() obj.put("timestamp", System.currentTimeMillis()) obj.put("coords", coords) return obj.toString() } // Synchronous wrapper returning JSON for getCurrentPosition // Accepts a JSON string with options: {"enableHighAccuracy": bool, "timeout": number, "maximumAge": number} fun getCurrentPositionJson(optionsJson: String?): String { val options = try { if (optionsJson.isNullOrEmpty()) JSONObject() else JSONObject(optionsJson) } catch (e: Exception) { JSONObject() } val enableHighAccuracy = options.optBoolean("enableHighAccuracy", false) val timeout = options.optLong("timeout", 10000L) val maximumAge = options.optLong("maximumAge", 0L) var output: String? = null val latch = CountDownLatch(1) getCurrentPosition( enableHighAccuracy, timeout, maximumAge, { location -> output = locationToPositionJson(location) latch.countDown() }, { error -> output = JSONObject(mapOf("error" to error)).toString() latch.countDown() }, ) // Wait up to the timeout + 2s buffer latch.await(timeout + 2000, TimeUnit.MILLISECONDS) return output ?: JSONObject(mapOf("error" to "Timeout waiting for location.")).toString() } } ================================================ FILE: examples/01-app-demos/geolocation-native-plugin/src/ios/.gitignore ================================================ .DS_Store /.build /Packages /*.xcodeproj xcuserdata/ DerivedData/ .swiftpm/config/registries.json .swiftpm/xcode/package.xcworkspace/contents.xcworkspacedata .netrc Package.resolved ================================================ FILE: examples/01-app-demos/geolocation-native-plugin/src/ios/plugin/Package.swift ================================================ // swift-tools-version:5.9 // Copyright 2019-2023 Tauri Programme within The Commons Conservancy // SPDX-License-Identifier: Apache-2.0 // SPDX-License-Identifier: MIT import PackageDescription let package = Package( name: "GeolocationPlugin", platforms: [ .iOS(.v17), // iOS 17+ for latest ActivityKit APIs .macOS(.v14), ], products: [ .library( name: "GeolocationPlugin", type: .static, targets: ["GeolocationPlugin"] ) ], dependencies: [], targets: [ .target( name: "GeolocationPlugin", path: "Sources", linkerSettings: [ .linkedFramework("CoreLocation"), .linkedFramework("Foundation"), .linkedFramework("ActivityKit", .when(platforms: [.iOS])), ] ) ] ) ================================================ FILE: examples/01-app-demos/geolocation-native-plugin/src/ios/plugin/Sources/GeolocationPlugin.swift ================================================ // Copyright 2019-2023 Tauri Programme within The Commons Conservancy // SPDX-License-Identifier: Apache-2.0 // SPDX-License-Identifier: MIT import CoreLocation import Foundation import Dispatch import ActivityKit /** * Simplified GeolocationPlugin for Dioxus that works without Tauri dependencies. * This can be shared with Tauri plugins with minimal changes. */ @objc(GeolocationPlugin) public class GeolocationPlugin: NSObject, CLLocationManagerDelegate { private let locationManager = CLLocationManager() private var positionCallbacks: [String: (String) -> Void] = [:] override init() { super.init() locationManager.delegate = self } /** * Get current position as JSON string (called from ObjC/Rust) */ @objc public func getCurrentPositionJson(_ optionsJson: String) -> String { // Parse options from JSON guard let optionsData = optionsJson.data(using: .utf8), let optionsDict = try? JSONSerialization.jsonObject(with: optionsData) as? [String: Any] else { let error = ["error": "Invalid options JSON"] return (try? JSONSerialization.data(withJSONObject: error))?.base64EncodedString() ?? "" } let enableHighAccuracy = optionsDict["enableHighAccuracy"] as? Bool ?? false let timeoutMs = optionsDict["timeout"] as? Double ?? 10000 let maximumAgeMs = optionsDict["maximumAge"] as? Double ?? 0 // If we have a recent cached location, return it immediately if let lastLocation = self.locationManager.location { let ageMs = Date().timeIntervalSince(lastLocation.timestamp) * 1000 if maximumAgeMs <= 0 || ageMs <= maximumAgeMs { return self.convertLocationToJson(lastLocation) } } let callbackId = UUID().uuidString let semaphore = DispatchSemaphore(value: 0) var responseJson: String? self.positionCallbacks[callbackId] = { result in responseJson = result semaphore.signal() } if enableHighAccuracy { self.locationManager.desiredAccuracy = kCLLocationAccuracyBest } else { self.locationManager.desiredAccuracy = kCLLocationAccuracyKilometer } if CLLocationManager.authorizationStatus() == .notDetermined { self.locationManager.requestWhenInUseAuthorization() } else { self.locationManager.requestLocation() } let timeoutSeconds = max(timeoutMs / 1000.0, 0.1) let deadline = Date().addingTimeInterval(timeoutSeconds) while responseJson == nil && Date() < deadline { let _ = RunLoop.current.run(mode: .default, before: Date().addingTimeInterval(0.05)) if semaphore.wait(timeout: .now()) == .success { break } } if let json = responseJson { return json } else { // Timed out waiting for location self.positionCallbacks.removeValue(forKey: callbackId) let error = ["error": "Timeout waiting for location"] return (try? JSONSerialization.data(withJSONObject: error)).flatMap { String(data: $0, encoding: .utf8) } ?? "{\"error\":\"Timeout waiting for location\"}" } } /** * Check permissions and return JSON string (called from ObjC/Rust) */ @objc public func checkPermissionsJson() -> String { var status: String = "" if CLLocationManager.locationServicesEnabled() { switch CLLocationManager.authorizationStatus() { case .notDetermined: status = "prompt" case .restricted, .denied: status = "denied" case .authorizedAlways, .authorizedWhenInUse: status = "granted" @unknown default: status = "prompt" } } else { let error = ["error": "Location services are not enabled"] return (try? JSONSerialization.data(withJSONObject: error))?.base64EncodedString() ?? "" } let result: [String: String] = ["location": status, "coarseLocation": status] if let jsonData = try? JSONSerialization.data(withJSONObject: result), let jsonString = String(data: jsonData, encoding: .utf8) { return jsonString } return "" } /** * Request permissions and return JSON string (called from ObjC/Rust) */ @objc public func requestPermissionsJson(_ permissionsJson: String) -> String { if CLLocationManager.locationServicesEnabled() { if CLLocationManager.authorizationStatus() == .notDetermined { DispatchQueue.main.async { self.locationManager.requestWhenInUseAuthorization() } // Return current status - actual result comes via delegate return self.checkPermissionsJson() } else { return self.checkPermissionsJson() } } else { let error = ["error": "Location services are not enabled"] if let jsonData = try? JSONSerialization.data(withJSONObject: error), let jsonString = String(data: jsonData, encoding: .utf8) { return jsonString } return "" } } // // CLLocationManagerDelegate methods // public func locationManager(_ manager: CLLocationManager, didFailWithError error: Error) { let errorMessage = error.localizedDescription // Notify all position callbacks for (_, callback) in self.positionCallbacks { let errorJson = "{\"error\":\"\(errorMessage)\"}" callback(errorJson) } self.positionCallbacks.removeAll() } public func locationManager( _ manager: CLLocationManager, didUpdateLocations locations: [CLLocation] ) { guard let location = locations.last else { return } let resultJson = self.convertLocationToJson(location) // Notify all position callbacks for (_, callback) in self.positionCallbacks { callback(resultJson) } self.positionCallbacks.removeAll() } public func locationManager( _ manager: CLLocationManager, didChangeAuthorization status: CLAuthorizationStatus ) { if !self.positionCallbacks.isEmpty { self.locationManager.requestLocation() } } // // Internal/Helper methods // private func convertLocationToJson(_ location: CLLocation) -> String { var ret: [String: Any] = [:] var coords: [String: Any] = [:] coords["latitude"] = location.coordinate.latitude coords["longitude"] = location.coordinate.longitude coords["accuracy"] = location.horizontalAccuracy coords["altitude"] = location.altitude coords["altitudeAccuracy"] = location.verticalAccuracy coords["speed"] = location.speed coords["heading"] = location.course ret["timestamp"] = Int((location.timestamp.timeIntervalSince1970 * 1000)) ret["coords"] = coords if let jsonData = try? JSONSerialization.data(withJSONObject: ret), let jsonString = String(data: jsonData, encoding: .utf8) { return jsonString } return "{\"error\":\"Failed to serialize location\"}" } // // Live Activity methods // /// Start a Live Activity showing current location /// Returns JSON with activity ID or error @objc public func startLiveActivityJson() -> String { if #available(iOS 16.2, *) { // Check if Live Activities are enabled guard ActivityAuthorizationInfo().areActivitiesEnabled else { return "{\"error\":\"Live Activities are not enabled\"}" } // Get current location guard let location = self.locationManager.location else { return "{\"error\":\"No location available. Request location first.\"}" } let attributes = LocationPermissionAttributes(appName: "Geolocation Demo") let contentState = LocationPermissionAttributes.ContentState( latitude: location.coordinate.latitude, longitude: location.coordinate.longitude, accuracy: location.horizontalAccuracy, speed: location.speed >= 0 ? location.speed : nil, heading: location.course >= 0 ? location.course : nil, lastUpdated: Date() ) do { let activity = try Activity.request( attributes: attributes, content: .init(state: contentState, staleDate: nil), pushType: nil ) let result: [String: Any] = [ "activityId": activity.id, "latitude": location.coordinate.latitude, "longitude": location.coordinate.longitude, "accuracy": location.horizontalAccuracy ] if let jsonData = try? JSONSerialization.data(withJSONObject: result), let jsonString = String(data: jsonData, encoding: .utf8) { return jsonString } return "{\"error\":\"Failed to serialize result\"}" } catch { return "{\"error\":\"Failed to start Live Activity: \(error.localizedDescription)\"}" } } else { return "{\"error\":\"Live Activities require iOS 16.2+\"}" } } /// Update the Live Activity with current location @objc public func updateLiveActivityJson(_ statusJson: String) -> String { if #available(iOS 16.2, *) { // Get current location guard let location = self.locationManager.location else { return "{\"error\":\"No location available\"}" } let contentState = LocationPermissionAttributes.ContentState( latitude: location.coordinate.latitude, longitude: location.coordinate.longitude, accuracy: location.horizontalAccuracy, speed: location.speed >= 0 ? location.speed : nil, heading: location.course >= 0 ? location.course : nil, lastUpdated: Date() ) // Update all running activities of this type Task { for activity in Activity.activities { await activity.update( ActivityContent(state: contentState, staleDate: nil) ) } } let result: [String: Any] = [ "latitude": location.coordinate.latitude, "longitude": location.coordinate.longitude, "accuracy": location.horizontalAccuracy ] if let jsonData = try? JSONSerialization.data(withJSONObject: result), let jsonString = String(data: jsonData, encoding: .utf8) { return jsonString } return "{\"error\":\"Failed to serialize result\"}" } else { return "{\"error\":\"Live Activities require iOS 16.2+\"}" } } /// End all Live Activities @objc public func endLiveActivityJson() -> String { if #available(iOS 16.2, *) { Task { for activity in Activity.activities { await activity.end(nil, dismissalPolicy: .immediate) } } return "{\"success\":true}" } else { return "{\"error\":\"Live Activities require iOS 16.2+\"}" } } } ================================================ FILE: examples/01-app-demos/geolocation-native-plugin/src/ios/plugin/Sources/LocationActivityAttributes.swift ================================================ // Shared ActivityAttributes for Live Activities // This MUST be the single source of truth for both the main app and widget extension import Foundation import ActivityKit /// Live Activity attributes for displaying current location. /// /// This struct is shared between the main app (which starts/updates activities) /// and the widget extension (which renders them on the lock screen). public struct LocationPermissionAttributes: ActivityAttributes { /// Dynamic content that can be updated while the activity is running public struct ContentState: Codable, Hashable { /// Current latitude public var latitude: Double /// Current longitude public var longitude: Double /// Horizontal accuracy in meters public var accuracy: Double /// Current speed in m/s (nil if not available) public var speed: Double? /// Current heading in degrees (nil if not available) public var heading: Double? /// Last update timestamp public var lastUpdated: Date public init( latitude: Double, longitude: Double, accuracy: Double, speed: Double? = nil, heading: Double? = nil, lastUpdated: Date ) { self.latitude = latitude self.longitude = longitude self.accuracy = accuracy self.speed = speed self.heading = heading self.lastUpdated = lastUpdated } } /// Static data set when the activity is started (cannot change) public var appName: String public init(appName: String) { self.appName = appName } } ================================================ FILE: examples/01-app-demos/geolocation-native-plugin/src/ios/plugin/Tests/PluginTests/PluginTests.swift ================================================ // Copyright 2019-2023 Tauri Programme within The Commons Conservancy // SPDX-License-Identifier: Apache-2.0 // SPDX-License-Identifier: MIT import XCTest @testable import ExamplePlugin final class ExamplePluginTests: XCTestCase { func testExample() throws { let plugin = ExamplePlugin() } } ================================================ FILE: examples/01-app-demos/geolocation-native-plugin/src/ios/widget/Package.swift ================================================ // swift-tools-version:5.9 // Widget Extension for displaying location permission status on lock screen // // IMPORTANT: The target name MUST match the main app's Swift module name. // The plugin uses "GeolocationPlugin" as its package/target name, so the // widget must also use "GeolocationPlugin" for ActivityKit type matching. import PackageDescription let package = Package( name: "GeolocationPlugin", platforms: [ .iOS(.v17), // iOS 17+ for latest ActivityKit APIs ], products: [ // Executable name must be "widget" for the build system to find it // But the TARGET name determines the Swift module name .executable( name: "widget", targets: ["GeolocationPlugin"] ) ], dependencies: [], targets: [ // Target name = Swift module name = "GeolocationPlugin" // This MUST match the main app's Swift plugin module name! .executableTarget( name: "GeolocationPlugin", path: "Sources", linkerSettings: [ .linkedFramework("WidgetKit"), .linkedFramework("SwiftUI"), .linkedFramework("ActivityKit"), ] ) ] ) ================================================ FILE: examples/01-app-demos/geolocation-native-plugin/src/ios/widget/Sources/LocationActivityAttributes.swift ================================================ // Shared ActivityAttributes for Live Activities // This MUST be the single source of truth for both the main app and widget extension import Foundation import ActivityKit /// Live Activity attributes for displaying current location. /// /// This struct is shared between the main app (which starts/updates activities) /// and the widget extension (which renders them on the lock screen). public struct LocationPermissionAttributes: ActivityAttributes { /// Dynamic content that can be updated while the activity is running public struct ContentState: Codable, Hashable { /// Current latitude public var latitude: Double /// Current longitude public var longitude: Double /// Horizontal accuracy in meters public var accuracy: Double /// Current speed in m/s (nil if not available) public var speed: Double? /// Current heading in degrees (nil if not available) public var heading: Double? /// Last update timestamp public var lastUpdated: Date public init( latitude: Double, longitude: Double, accuracy: Double, speed: Double? = nil, heading: Double? = nil, lastUpdated: Date ) { self.latitude = latitude self.longitude = longitude self.accuracy = accuracy self.speed = speed self.heading = heading self.lastUpdated = lastUpdated } } /// Static data set when the activity is started (cannot change) public var appName: String public init(appName: String) { self.appName = appName } } ================================================ FILE: examples/01-app-demos/geolocation-native-plugin/src/ios/widget/Sources/LocationWidget.swift ================================================ // Widget Extension for Live Activity only // Note: Having multiple widgets in the same bundle can cause Live Activities to show black // See: https://developer.apple.com/forums/thread/807726 import ActivityKit import SwiftUI import WidgetKit @main struct LocationWidgetBundle: WidgetBundle { var body: some Widget { // Only include the Live Activity - other widgets can cause rendering issues LocationPermissionLiveActivity() } } // Helper to get accuracy color func accuracyColor(_ accuracy: Double) -> Color { if accuracy < 10 { return .green } else if accuracy < 50 { return .yellow } else if accuracy < 100 { return .orange } else { return .red } } // Helper to format coordinates with degree symbol func formatCoord(_ value: Double, isLat: Bool) -> String { let direction = isLat ? (value >= 0 ? "N" : "S") : (value >= 0 ? "E" : "W") return String(format: "%.5f° %@", abs(value), direction) } // Live Activity widget struct LocationPermissionLiveActivity: Widget { var body: some WidgetConfiguration { ActivityConfiguration(for: LocationPermissionAttributes.self) { context in // Lock screen view - flashy gradient design ZStack { // Gradient background LinearGradient( colors: [ Color(red: 0.1, green: 0.1, blue: 0.2), Color(red: 0.05, green: 0.15, blue: 0.25) ], startPoint: .topLeading, endPoint: .bottomTrailing ) VStack(spacing: 12) { // Header with pulsing indicator HStack(spacing: 12) { // Animated location icon with glow ZStack { Circle() .fill(accuracyColor(context.state.accuracy).opacity(0.3)) .frame(width: 44, height: 44) Circle() .fill(accuracyColor(context.state.accuracy).opacity(0.6)) .frame(width: 32, height: 32) Image(systemName: "location.fill") .font(.system(size: 18, weight: .bold)) .foregroundColor(.white) } VStack(alignment: .leading, spacing: 2) { Text(context.attributes.appName) .font(.headline) .fontWeight(.bold) .foregroundColor(.white) HStack(spacing: 4) { Image(systemName: "antenna.radiowaves.left.and.right") .font(.caption2) Text("LIVE") .font(.caption2) .fontWeight(.bold) } .foregroundColor(accuracyColor(context.state.accuracy)) } Spacer() // Accuracy with animated ring VStack(spacing: 2) { Text("\(Int(context.state.accuracy))") .font(.system(size: 24, weight: .bold, design: .rounded)) .foregroundColor(accuracyColor(context.state.accuracy)) .contentTransition(.numericText()) Text("meters") .font(.caption2) .foregroundColor(.gray) } .padding(.horizontal, 12) .padding(.vertical, 8) .background( RoundedRectangle(cornerRadius: 12) .fill(accuracyColor(context.state.accuracy).opacity(0.15)) .overlay( RoundedRectangle(cornerRadius: 12) .strokeBorder(accuracyColor(context.state.accuracy).opacity(0.5), lineWidth: 1) ) ) } // Coordinates in stylish cards HStack(spacing: 8) { // Latitude card VStack(alignment: .leading, spacing: 4) { HStack(spacing: 4) { Image(systemName: "arrow.up.arrow.down") .font(.caption2) Text("LATITUDE") .font(.caption2) .fontWeight(.semibold) } .foregroundColor(.cyan.opacity(0.8)) Text(formatCoord(context.state.latitude, isLat: true)) .font(.system(.callout, design: .monospaced)) .fontWeight(.medium) .foregroundColor(.white) .contentTransition(.numericText()) } .frame(maxWidth: .infinity, alignment: .leading) .padding(10) .background( RoundedRectangle(cornerRadius: 10) .fill(Color.cyan.opacity(0.1)) ) // Longitude card VStack(alignment: .leading, spacing: 4) { HStack(spacing: 4) { Image(systemName: "arrow.left.arrow.right") .font(.caption2) Text("LONGITUDE") .font(.caption2) .fontWeight(.semibold) } .foregroundColor(.purple.opacity(0.8)) Text(formatCoord(context.state.longitude, isLat: false)) .font(.system(.callout, design: .monospaced)) .fontWeight(.medium) .foregroundColor(.white) .contentTransition(.numericText()) } .frame(maxWidth: .infinity, alignment: .leading) .padding(10) .background( RoundedRectangle(cornerRadius: 10) .fill(Color.purple.opacity(0.1)) ) } // Speed and heading row (if available) if let speed = context.state.speed, speed >= 0 { HStack(spacing: 16) { // Speed HStack(spacing: 6) { Image(systemName: "speedometer") .foregroundColor(.orange) Text(String(format: "%.1f m/s", speed)) .font(.system(.caption, design: .monospaced)) .fontWeight(.medium) .foregroundColor(.white) .contentTransition(.numericText()) } // Heading if available if let heading = context.state.heading, heading >= 0 { HStack(spacing: 6) { Image(systemName: "safari") .foregroundColor(.mint) .rotationEffect(.degrees(heading)) Text(String(format: "%.0f°", heading)) .font(.system(.caption, design: .monospaced)) .fontWeight(.medium) .foregroundColor(.white) .contentTransition(.numericText()) } } Spacer() // Last updated Text(context.state.lastUpdated, style: .relative) .font(.caption2) .foregroundColor(.gray) } } } .padding() } .activitySystemActionForegroundColor(.white) } dynamicIsland: { context in DynamicIsland { // Expanded regions DynamicIslandExpandedRegion(.leading) { VStack(alignment: .leading, spacing: 4) { ZStack { Circle() .fill(accuracyColor(context.state.accuracy).opacity(0.3)) .frame(width: 36, height: 36) Image(systemName: "location.fill") .foregroundColor(accuracyColor(context.state.accuracy)) .font(.system(size: 16, weight: .bold)) } Text("\(Int(context.state.accuracy))m") .font(.caption2) .fontWeight(.bold) .foregroundColor(accuracyColor(context.state.accuracy)) .contentTransition(.numericText()) } } DynamicIslandExpandedRegion(.center) { VStack(spacing: 6) { // Latitude HStack(spacing: 4) { Text("LAT") .font(.caption2) .foregroundColor(.cyan.opacity(0.7)) Text(String(format: "%.5f°", context.state.latitude)) .font(.system(.caption, design: .monospaced)) .fontWeight(.semibold) .foregroundColor(.cyan) .contentTransition(.numericText()) } // Longitude HStack(spacing: 4) { Text("LON") .font(.caption2) .foregroundColor(.purple.opacity(0.7)) Text(String(format: "%.5f°", context.state.longitude)) .font(.system(.caption, design: .monospaced)) .fontWeight(.semibold) .foregroundColor(.purple) .contentTransition(.numericText()) } } } DynamicIslandExpandedRegion(.trailing) { if let speed = context.state.speed, speed >= 0 { VStack(alignment: .trailing, spacing: 4) { Image(systemName: "speedometer") .foregroundColor(.orange) .font(.caption) Text(String(format: "%.1f", speed)) .font(.system(.caption, design: .rounded)) .fontWeight(.bold) .foregroundColor(.white) .contentTransition(.numericText()) Text("m/s") .font(.caption2) .foregroundColor(.gray) } } else { // Show heading compass if no speed if let heading = context.state.heading, heading >= 0 { VStack(spacing: 2) { Image(systemName: "safari") .font(.title3) .foregroundColor(.mint) .rotationEffect(.degrees(heading)) Text(String(format: "%.0f°", heading)) .font(.caption2) .foregroundColor(.white) } } } } DynamicIslandExpandedRegion(.bottom) { HStack { Text(context.attributes.appName) .font(.caption2) .foregroundColor(.gray) Spacer() Text(context.state.lastUpdated, style: .relative) .font(.caption2) .foregroundColor(.gray) } } } compactLeading: { ZStack { Circle() .fill(accuracyColor(context.state.accuracy).opacity(0.3)) .frame(width: 24, height: 24) Image(systemName: "location.fill") .foregroundColor(accuracyColor(context.state.accuracy)) .font(.caption) } } compactTrailing: { Text(String(format: "%.4f°", context.state.latitude)) .font(.system(.caption2, design: .monospaced)) .fontWeight(.medium) .foregroundColor(.cyan) .contentTransition(.numericText()) } minimal: { ZStack { Circle() .fill(accuracyColor(context.state.accuracy).opacity(0.3)) .frame(width: 22, height: 22) Image(systemName: "location.fill") .foregroundColor(accuracyColor(context.state.accuracy)) .font(.system(size: 10, weight: .bold)) } } } } } ================================================ FILE: examples/01-app-demos/geolocation-native-plugin/src/main.rs ================================================ //! A simple Dioxus app demonstrating how to build a native plugin using manganis. //! //! This example shows how to use the `#[manganis::ffi]` macro to automatically generate //! FFI bindings between Rust and native platforms (Swift/Kotlin). //! //! It also demonstrates how to use the widget!() macro to bundle a Widget Extension //! for Live Activities on iOS. use dioxus::prelude::*; // Import the local plugin module mod plugin; #[cfg(target_os = "ios")] use plugin::LiveActivityResult; use plugin::{Geolocation, PermissionState, PermissionStatus, Position, PositionOptions}; fn main() { dioxus::launch(App); } #[component] fn App() -> Element { let mut geolocation = use_signal(Geolocation::new); let mut permission_status = use_signal(|| None::); let mut last_position = use_signal(|| None::); let mut error = use_signal(|| None::); let mut use_high_accuracy = use_signal(|| true); let mut max_age_input = use_signal(|| String::from("0")); let on_check_permissions = { move |_| match geolocation.write().check_permissions() { Ok(status) => { permission_status.set(Some(status)); error.set(None); } Err(err) => error.set(Some(err.to_string())), } }; let on_request_permissions = move |_| { let mut geo = geolocation.write(); match geo.request_permissions(None) { Ok(_) => match geo.check_permissions() { Ok(status) => { permission_status.set(Some(status)); error.set(None); } Err(err) => error.set(Some(err.to_string())), }, Err(err) => error.set(Some(err.to_string())), } }; let on_toggle_accuracy = move |_| use_high_accuracy.toggle(); let on_max_age_input = move |evt: FormEvent| max_age_input.set(evt.value()); let on_fetch_position = move |_| { let maximum_age = max_age_input.read().trim().parse::().unwrap_or(0); let options = PositionOptions { enable_high_accuracy: use_high_accuracy(), timeout: 10_000, maximum_age, }; match geolocation.write().get_current_position(Some(options)) { Ok(position) => { last_position.set(Some(position)); error.set(None); } Err(err) => error.set(Some(err.to_string())), } }; let accuracy_label = if use_high_accuracy() { "High accuracy: on" } else { "High accuracy: off" }; rsx! { Stylesheet { href: asset!("/assets/main.css") } main { class: "app", header { class: "hero", div { class: "hero__copy", h1 { "Geolocation plugin demo" } p { "One-shot location fetching through the Dioxus geolocation plugin. Measure permissions, request access, and inspect the last fix received from the device." } } } div { class: "cards", section { class: "card", h2 { "Permissions" } p { class: "muted", "First, inspect what the OS currently allows this app to do. \ On Android & iOS these calls talk to the native permission dialog APIs." } div { class: "button-row", button { onclick: on_check_permissions, "Check permissions" } button { class: "secondary", onclick: on_request_permissions, "Request permissions" } } match permission_status() { Some(status) => rsx! { div { class: "status-grid", PermissionBadge { label: "Location".to_string(), state: status.location } PermissionBadge { label: "Coarse location".to_string(), state: status.coarse_location } } }, None => rsx!(p { class: "muted", "Tap “Check permissions” to see the current status." }), } } section { class: "card", h2 { "Current position" } p { class: "muted", "The plugin resolves the device location once per request (no background watch). \ Configure the query and then fetch the coordinates." } div { class: "settings", button { class: if use_high_accuracy() { "toggle toggle--active" } else { "toggle" }, onclick: on_toggle_accuracy, "{accuracy_label}" } label { class: "field", span { "Max cached age (ms)" } input { r#type: "number", inputmode: "numeric", min: "0", placeholder: "0", value: "{max_age_input()}", oninput: on_max_age_input, } } } button { class: "primary full-width", onclick: on_fetch_position, "Get current position" } match last_position() { Some(position) => { let snapshot = position.clone(); let coords = snapshot.coords.clone(); rsx! { div { class: "position", h3 { "Latest reading" } p { class: "muted", "Timestamp: {snapshot.timestamp} ms since Unix epoch" } div { class: "position__grid", CoordinateRow { label: "Latitude".to_string(), value: format!("{:.6}", coords.latitude) } CoordinateRow { label: "Longitude".to_string(), value: format!("{:.6}", coords.longitude) } CoordinateRow { label: "Accuracy (m)".to_string(), value: format!("{:.1}", coords.accuracy) } CoordinateRow { label: "Altitude (m)".to_string(), value: format_optional(coords.altitude) } CoordinateRow { label: "Altitude accuracy (m)".to_string(), value: format_optional(coords.altitude_accuracy) } CoordinateRow { label: "Speed (m/s)".to_string(), value: format_optional(coords.speed) } CoordinateRow { label: "Heading (°)".to_string(), value: format_optional(coords.heading) } } } } } None => rsx!(p { class: "muted", "No location fetched yet." }), } } // Live Activity card (iOS only) LiveActivityCard { geolocation, error } } if let Some(message) = error() { div { class: "error-banner", "Last error: {message}" } } } } } #[component] fn PermissionBadge(label: String, state: PermissionState) -> Element { let (text, class) = match state { PermissionState::Granted => ("Granted", "badge badge--granted"), PermissionState::Denied => ("Denied", "badge badge--denied"), PermissionState::Prompt | PermissionState::PromptWithRationale => { ("Needs prompt", "badge badge--prompt") } }; rsx! { div { class: "permission-row", span { class: "muted", "{label}" } span { class: class, "{text}" } } } } #[component] fn CoordinateRow(label: String, value: String) -> Element { rsx! { div { class: "coordinate-row", span { class: "muted", "{label}" } strong { "{value}" } } } } fn format_optional(value: Option) -> String { value .map(|inner| format!("{inner:.2}")) .unwrap_or_else(|| "—".to_string()) } #[cfg(target_os = "ios")] #[component] fn LiveActivityCard( mut geolocation: Signal, mut error: Signal>, ) -> Element { let mut live_activity = use_signal(|| None::); let on_start_live_activity = move |_| match geolocation.write().start_live_activity() { Ok(result) => { live_activity.set(Some(result)); error.set(None); } Err(err) => error.set(Some(err.to_string())), }; let on_update_live_activity = move |_| match geolocation.write().update_live_activity() { Ok(_) => error.set(None), Err(err) => error.set(Some(err.to_string())), }; let on_end_live_activity = move |_| match geolocation.write().end_live_activity() { Ok(_) => { live_activity.set(None); error.set(None); } Err(err) => error.set(Some(err.to_string())), }; rsx! { section { class: "card", h2 { "Live Activity" } p { class: "muted", "Start a Live Activity to show your current location on the lock screen. \ Fetch your position first, then start the activity." } div { class: "button-row", button { onclick: on_start_live_activity, "Start Activity" } button { class: "secondary", onclick: on_update_live_activity, "Update" } button { class: "secondary", onclick: on_end_live_activity, "End" } } match live_activity() { Some(activity) => rsx! { div { class: "status-grid", div { class: "permission-row", span { class: "muted", "Activity ID" } span { "{activity.activity_id}" } } div { class: "permission-row", span { class: "muted", "Latitude" } span { "{activity.latitude:.6}" } } div { class: "permission-row", span { class: "muted", "Longitude" } span { "{activity.longitude:.6}" } } div { class: "permission-row", span { class: "muted", "Accuracy" } span { class: "badge badge--granted", "{activity.accuracy:.1}m" } } } }, None => rsx! { p { class: "muted", "No Live Activity running." } }, } } } } #[cfg(not(target_os = "ios"))] #[component] fn LiveActivityCard(geolocation: Signal, error: Signal>) -> Element { VNode::empty() } ================================================ FILE: examples/01-app-demos/geolocation-native-plugin/src/plugin/error.rs ================================================ // Copyright 2019-2023 Tauri Programme within The Commons Conservancy // SPDX-License-Identifier: Apache-2.0 // SPDX-License-Identifier: MIT use serde::{ser::Serializer, Serialize}; pub type Result = std::result::Result; #[derive(Debug, thiserror::Error)] pub enum Error { /// JSON serialization/deserialization error #[error("JSON error: {0}")] Json(#[from] serde_json::Error), /// Platform bridge error #[error("Platform bridge error: {0}")] PlatformBridge(String), /// Location unavailable #[error("Location unavailable: {0}")] LocationUnavailable(String), /// Live Activity error (iOS 16.1+) #[cfg(target_os = "ios")] #[error("Live Activity error: {0}")] LiveActivity(String), } impl Serialize for Error { fn serialize(&self, serializer: S) -> std::result::Result where S: Serializer, { serializer.serialize_str(self.to_string().as_ref()) } } impl From<&str> for Error { fn from(s: &str) -> Self { Error::PlatformBridge(s.to_string()) } } impl From for Error { fn from(s: String) -> Self { Error::PlatformBridge(s) } } ================================================ FILE: examples/01-app-demos/geolocation-native-plugin/src/plugin/mod.rs ================================================ #![allow(non_snake_case)] // Copyright 2019-2023 Tauri Programme within The Commons Conservancy // SPDX-License-Identifier: Apache-2.0 // SPDX-License-Identifier: MIT //! Dioxus Geolocation Plugin //! //! This plugin provides APIs for getting and tracking the device's current position //! on Android and iOS mobile platforms. //! //! This example demonstrates the use of the `#[manganis::ffi]` macro for automatic //! FFI binding generation between Rust and native platforms. pub use models::*; mod error; mod models; pub use error::{Error, Result}; // Note: Permissions are now declared in Dioxus.toml using the unified manifest system. // See Dioxus.toml in the project root for the permission configuration: // // [permissions] // location = { precision = "fine", description = "Access your precise location..." } // // The CLI automatically maps these to platform-specific identifiers: // - Android: ACCESS_FINE_LOCATION in AndroidManifest.xml // - iOS: NSLocationWhenInUseUsageDescription in Info.plist /// Access to the geolocation APIs. /// /// This struct provides a unified interface for accessing geolocation functionality /// on both Android and iOS platforms. It uses the `#[manganis::ffi]` macro for /// automatic FFI binding generation. /// /// # Example /// /// ```rust,no_run /// use plugin::{Geolocation, PermissionState, PositionOptions}; /// /// let mut geolocation = Geolocation::new(); /// /// // Check permissions /// let status = geolocation.check_permissions()?; /// if status.location == PermissionState::Prompt { /// let new_status = geolocation.request_permissions(None)?; /// } /// /// // Get current position /// let options = PositionOptions { /// enable_high_accuracy: true, /// timeout: 10000, /// maximum_age: 0, /// }; /// let position = geolocation.get_current_position(Some(options))?; /// println!("Latitude: {}, Longitude: {}", position.coords.latitude, position.coords.longitude); /// /// # Ok::<(), plugin::Error>(()) /// ``` pub struct Geolocation { plugin: Option, } impl Geolocation { /// Create a new Geolocation instance pub fn new() -> Self { Self { plugin: None } } /// Get or initialize the plugin instance fn get_plugin(&mut self) -> Result<&GeolocationPlugin> { if self.plugin.is_none() { self.plugin = Some(GeolocationPlugin::new()?); } Ok(self.plugin.as_ref().unwrap()) } /// Get the device's current position. /// /// # Arguments /// /// * `options` - Optional position options. If `None`, default options are used. /// /// # Returns /// /// Returns the current position or an error if the location cannot be obtained. pub fn get_current_position(&mut self, options: Option) -> Result { let options = options.unwrap_or_default(); let options_json = serde_json::to_string(&options).map_err(Error::Json)?; let plugin = self.get_plugin()?; let result_json = getCurrentPositionJson(plugin, options_json)?; // Check for error in response let json_value: serde_json::Value = serde_json::from_str(&result_json).map_err(Error::Json)?; if let Some(error_msg) = json_value.get("error") { return Err(Error::LocationUnavailable( error_msg.as_str().unwrap_or("Unknown error").to_string(), )); } let position: Position = serde_json::from_str(&result_json).map_err(Error::Json)?; Ok(position) } /// Check the current permission status. /// /// # Returns /// /// Returns the permission status for location and coarse location permissions. pub fn check_permissions(&mut self) -> Result { let plugin = self.get_plugin()?; let result_json = checkPermissionsJson(plugin)?; let status: PermissionStatus = serde_json::from_str(&result_json).map_err(Error::Json)?; Ok(status) } /// Request location permissions from the user. /// /// # Arguments /// /// * `permissions` - Optional list of specific permission types to request. /// If `None`, requests all location permissions. /// /// # Returns /// /// Returns the permission status after the user responds to the permission request. pub fn request_permissions( &mut self, permissions: Option>, ) -> Result { let perms_json = serde_json::to_string(&permissions).map_err(Error::Json)?; let plugin = self.get_plugin()?; let result_json = requestPermissionsJson(plugin, perms_json)?; let status: PermissionStatus = serde_json::from_str(&result_json).map_err(Error::Json)?; Ok(status) } // ========================================================================= // Live Activity methods (iOS 16.1+) // ========================================================================= /// Start a Live Activity showing the current permission status. /// /// Live Activities appear on the lock screen and Dynamic Island (on supported devices). /// Requires iOS 16.1+ and a Widget Extension for the UI (see docs). /// /// # Returns /// /// Returns the activity ID and current permission status, or an error. #[cfg(target_os = "ios")] pub fn start_live_activity(&mut self) -> Result { let plugin = self.get_plugin()?; let result_json = startLiveActivityJson(plugin)?; // Check for error in response let json_value: serde_json::Value = serde_json::from_str(&result_json).map_err(Error::Json)?; if let Some(error_msg) = json_value.get("error") { return Err(Error::LiveActivity( error_msg.as_str().unwrap_or("Unknown error").to_string(), )); } let result: LiveActivityResult = serde_json::from_str(&result_json).map_err(Error::Json)?; Ok(result) } /// Update the Live Activity with the current permission status. /// /// Call this after permission changes to reflect the new state. #[cfg(target_os = "ios")] pub fn update_live_activity(&mut self) -> Result { let plugin = self.get_plugin()?; let result_json = updateLiveActivityJson(plugin, "{}".to_string())?; let json_value: serde_json::Value = serde_json::from_str(&result_json).map_err(Error::Json)?; if let Some(error_msg) = json_value.get("error") { return Err(Error::LiveActivity( error_msg.as_str().unwrap_or("Unknown error").to_string(), )); } let result: LiveActivityUpdate = serde_json::from_str(&result_json).map_err(Error::Json)?; Ok(result) } /// End all Live Activities for this app. #[cfg(target_os = "ios")] pub fn end_live_activity(&mut self) -> Result<()> { let plugin = self.get_plugin()?; let result_json = endLiveActivityJson(plugin)?; let json_value: serde_json::Value = serde_json::from_str(&result_json).map_err(Error::Json)?; if let Some(error_msg) = json_value.get("error") { return Err(Error::LiveActivity( error_msg.as_str().unwrap_or("Unknown error").to_string(), )); } Ok(()) } } impl Default for Geolocation { fn default() -> Self { Self::new() } } /// iOS/macOS native bindings - the macro generates all FFI code automatically. /// The path "src/ios/plugin" points to the SwiftPM package containing GeolocationPlugin.swift #[cfg(any(target_os = "ios", target_os = "macos"))] #[manganis::ffi("src/ios/plugin")] extern "Swift" { /// The native GeolocationPlugin class pub type GeolocationPlugin; /// Get current position as JSON string /// Swift signature: func getCurrentPositionJson(_ optionsJson: String) -> String pub fn getCurrentPositionJson(this: &GeolocationPlugin, optionsJson: String) -> String; /// Check permissions and return status as JSON /// Swift signature: func checkPermissionsJson() -> String pub fn checkPermissionsJson(this: &GeolocationPlugin) -> String; /// Request permissions with optional types list as JSON, return status as JSON /// Swift signature: func requestPermissionsJson(_ permissionsJson: String) -> String pub fn requestPermissionsJson(this: &GeolocationPlugin, permissionsJson: String) -> String; /// Start a Live Activity showing permission status (iOS 16.1+) /// Swift signature: func startLiveActivityJson() -> String pub fn startLiveActivityJson(this: &GeolocationPlugin) -> String; /// Update the Live Activity with current permission status /// Swift signature: func updateLiveActivityJson(_ statusJson: String) -> String pub fn updateLiveActivityJson(this: &GeolocationPlugin, statusJson: String) -> String; /// End all Live Activities /// Swift signature: func endLiveActivityJson() -> String pub fn endLiveActivityJson(this: &GeolocationPlugin) -> String; } /// Android native bindings - the macro generates all JNI code automatically. /// The path "src/android" points to the Gradle project containing GeolocationPlugin.kt #[cfg(target_os = "android")] #[manganis::ffi("src/android")] extern "Kotlin" { /// The native GeolocationPlugin class pub type GeolocationPlugin; /// Get current position as JSON string /// Kotlin signature: fun getCurrentPositionJson(optionsJson: String): String pub fn getCurrentPositionJson(this: &GeolocationPlugin, optionsJson: String) -> String; /// Check permissions and return status as JSON /// Kotlin signature: fun checkPermissionsJson(): String pub fn checkPermissionsJson(this: &GeolocationPlugin) -> String; /// Request permissions with optional types list as JSON, return status as JSON /// Kotlin signature: fun requestPermissionsJson(permissionsJson: String): String pub fn requestPermissionsJson(this: &GeolocationPlugin, permissionsJson: String) -> String; } // ============================================================================= // Stub for non-native platforms (web, Linux desktop, etc.) // ============================================================================= #[cfg(not(any( all(any(target_os = "ios", target_os = "macos")), all(target_os = "android") )))] use fallback::*; #[cfg(not(any( all(any(target_os = "ios", target_os = "macos")), all(target_os = "android") )))] mod fallback { #![allow(non_snake_case)] use super::{Error, Result}; pub struct GeolocationPlugin; impl GeolocationPlugin { pub fn new() -> Result { Err(Error::PlatformBridge( "Geolocation is only supported on Android, iOS, and macOS".to_string(), )) } } pub fn getCurrentPositionJson(_: &GeolocationPlugin, _: String) -> Result { Err(Error::PlatformBridge( "Geolocation is only supported on Android, iOS, and macOS".to_string(), )) } pub fn checkPermissionsJson(_: &GeolocationPlugin) -> Result { Err(Error::PlatformBridge( "Geolocation is only supported on Android, iOS, and macOS".to_string(), )) } pub fn requestPermissionsJson(_: &GeolocationPlugin, _: String) -> Result { Err(Error::PlatformBridge( "Geolocation is only supported on Android, iOS, and macOS".to_string(), )) } } ================================================ FILE: examples/01-app-demos/geolocation-native-plugin/src/plugin/models.rs ================================================ // Copyright 2019-2023 Tauri Programme within The Commons Conservancy // SPDX-License-Identifier: Apache-2.0 // SPDX-License-Identifier: MIT use serde::{Deserialize, Serialize}; /// Permission state for geolocation permissions #[derive(Debug, Clone, Copy, PartialEq, Eq, Serialize, Deserialize)] #[serde(rename_all = "kebab-case")] #[derive(Default)] pub enum PermissionState { /// Permission not yet determined (user hasn't been asked) #[default] Prompt, /// Permission prompt shown with rationale (Android 12+) PromptWithRationale, /// Permission granted Granted, /// Permission denied Denied, } #[derive(Debug, Clone, Default, Serialize, Deserialize)] #[serde(rename_all = "camelCase")] pub struct PermissionStatus { /// Permission state for the location alias. /// /// On Android it requests/checks both ACCESS_COARSE_LOCATION and ACCESS_FINE_LOCATION permissions. /// /// On iOS it requests/checks location permissions. pub location: PermissionState, /// Permissions state for the coarseLocation alias. /// /// On Android it requests/checks ACCESS_COARSE_LOCATION. /// /// On Android 12+, users can choose between Approximate location (ACCESS_COARSE_LOCATION) and Precise location (ACCESS_FINE_LOCATION). /// /// On iOS it will have the same value as the `location` alias. pub coarse_location: PermissionState, } #[derive(Debug, Clone, Default, Serialize, Deserialize)] #[serde(rename_all = "camelCase")] pub struct PositionOptions { /// High accuracy mode (such as GPS, if available) /// Will be ignored on Android 12+ if users didn't grant the ACCESS_FINE_LOCATION permission. pub enable_high_accuracy: bool, /// The maximum wait time in milliseconds for location updates. /// Default: 10000 /// On Android the timeout gets ignored for getCurrentPosition. /// Ignored on iOS. // TODO: Handle Infinity and default to it. // TODO: Should be u64+ but specta doesn't like that? pub timeout: u32, /// The maximum age in milliseconds of a possible cached position that is acceptable to return. /// Default: 0 /// Ignored on iOS. // TODO: Handle Infinity. // TODO: Should be u64+ but specta doesn't like that? pub maximum_age: u32, } #[derive(Debug, Clone, Serialize, Deserialize)] #[serde(rename_all = "camelCase")] pub enum PermissionType { Location, CoarseLocation, } #[derive(Debug, Clone, Default, Serialize, Deserialize)] #[serde(rename_all = "camelCase")] pub struct Coordinates { /// Latitude in decimal degrees. pub latitude: f64, /// Longitude in decimal degrees. pub longitude: f64, /// Accuracy level of the latitude and longitude coordinates in meters. pub accuracy: f64, /// Accuracy level of the altitude coordinate in meters, if available. /// Available on all iOS versions and on Android 8 and above. pub altitude_accuracy: Option, /// The altitude the user is at, if available. pub altitude: Option, // The speed the user is traveling, if available. pub speed: Option, /// The heading the user is facing, if available. pub heading: Option, } #[derive(Debug, Clone, Default, Serialize, Deserialize)] #[serde(rename_all = "camelCase")] pub struct Position { /// Creation time for these coordinates. // TODO: Check if we're actually losing precision. pub timestamp: u64, /// The GPS coordinates along with the accuracy of the data. pub coords: Coordinates, } // ============================================================================= // Live Activity types (iOS 16.1+) // ============================================================================= /// Result from starting a Live Activity #[cfg(target_os = "ios")] #[derive(Debug, Clone, Serialize, Deserialize)] #[serde(rename_all = "camelCase")] pub struct LiveActivityResult { /// Unique identifier for the activity pub activity_id: String, /// Current latitude displayed in the activity pub latitude: f64, /// Current longitude displayed in the activity pub longitude: f64, /// Horizontal accuracy in meters pub accuracy: f64, } /// Result from updating a Live Activity #[cfg(target_os = "ios")] #[derive(Debug, Clone, Serialize, Deserialize)] #[serde(rename_all = "camelCase")] pub struct LiveActivityUpdate { /// Current latitude after update pub latitude: f64, /// Current longitude after update pub longitude: f64, /// Horizontal accuracy in meters pub accuracy: f64, } ================================================ FILE: examples/01-app-demos/hackernews/.gitignore ================================================ /static /dist ================================================ FILE: examples/01-app-demos/hackernews/Cargo.toml ================================================ [package] name = "fullstack-hackernews-example" version = "0.1.0" authors = ["Evan Almloff "] edition = "2021" publish = false # See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html [dependencies] dioxus = { workspace = true, features = ["fullstack", "router"] } chrono = { workspace = true, features = ["serde"] } reqwest = { workspace= true, features = ["json"] } serde = { workspace = true, features = ["derive"] } [features] default = [] server = ["dioxus/server"] web = ["dioxus/web"] ================================================ FILE: examples/01-app-demos/hackernews/assets/hackernews.css ================================================ @keyframes spin { 0% { transform: rotate(0deg); } 100% { transform: rotate(360deg); } } .spinner { width: 10px; height: 10px; border: 4px solid #f3f3f3; border-top: 4px solid #3498db; border-radius: 50%; animation: spin 2s linear infinite; } ================================================ FILE: examples/01-app-demos/hackernews/src/main.rs ================================================ #![allow(non_snake_case, unused)] use dioxus::prelude::*; // Define the Hackernews API and types use chrono::{DateTime, Utc}; use serde::{Deserialize, Serialize}; use std::{ fmt::{Display, Formatter}, num::ParseIntError, str::FromStr, }; use svg_attributes::to; fn main() { LaunchBuilder::new() .with_cfg(server_only! { dioxus::server::ServeConfig::builder().enable_out_of_order_streaming() }) .launch(|| { rsx! { Stylesheet { href: asset!("/assets/hackernews.css") } Router:: {} } }); } #[derive(Clone, Routable)] enum Route { #[route("/story&:story")] StoryPreview { story: Option }, } #[component] fn StoryPreview(story: ReadSignal>) -> Element { rsx! { div { display: "flex", flex_direction: "row", width: "100%", div { width: "50%", SuspenseBoundary { fallback: |context| rsx! { "Loading..." }, Stories {} } } div { width: "50%", SuspenseBoundary { fallback: |context| rsx! { "Loading preview..." }, if let Some(story) = story() { Preview { story_id: story } } else { div { padding: "0.5rem", "Select a story to preview" } } } } } } } #[component] fn Stories() -> Element { let stories = use_loader(move || async move { let stories_ids = reqwest::get(&format!("{}topstories.json", BASE_API_URL)) .await? .json::>() .await? .into_iter() .take(30) .collect::>(); dioxus::Ok(stories_ids) })?; rsx! { div { for story in stories() { ChildrenOrLoading { key: "{story}", StoryListing { story } } } } } } #[component] fn StoryListing(story: ReadSignal) -> Element { let story = use_loader(move || get_story(story()))?; let StoryItem { title, url, by, score, time, kids, id, .. } = story().item; let url = url.as_deref().unwrap_or_default(); let hostname = url .trim_start_matches("https://") .trim_start_matches("http://") .trim_start_matches("www."); let score = format!("{score} {}", if score == 1 { " point" } else { " points" }); let comments = format!( "{} {}", kids.len(), if kids.len() == 1 { " comment" } else { " comments" } ); let time = time.format("%D %l:%M %p"); rsx! { div { padding: "0.5rem", position: "relative", div { font_size: "1.5rem", Link { to: Route::StoryPreview { story: Some(id) }, "{title}" } a { color: "gray", href: "https://news.ycombinator.com/from?site={hostname}", text_decoration: "none", " ({hostname})" } } div { display: "flex", flex_direction: "row", color: "gray", div { "{score}" } div { padding_left: "0.5rem", "by {by}" } div { padding_left: "0.5rem", "{time}" } div { padding_left: "0.5rem", "{comments}" } } } } } #[component] fn Preview(story_id: ReadSignal) -> Element { let story = use_loader(move || get_story(story_id()))?.cloned(); rsx! { div { padding: "0.5rem", div { font_size: "1.5rem", a { href: story.item.url, "{story.item.title}" } } if let Some(text) = &story.item.text { div { dangerous_inner_html: "{text}" } } for comment in story.item.kids.iter().copied() { ChildrenOrLoading { key: "{comment}", Comment { comment } } } } } } #[component] fn Comment(comment: ReadSignal) -> Element { let comment = use_loader(move || async move { let mut comment = reqwest::get(&format!("{}{}{}.json", BASE_API_URL, ITEM_API, comment)) .await? .json::() .await?; dioxus::Ok(comment) })?; let CommentData { by, time, text, id, kids, .. } = comment(); rsx! { div { padding: "0.5rem", div { color: "gray", "by {by}" } div { dangerous_inner_html: "{text}" } for comment in kids.iter().copied() { ChildrenOrLoading { key: "{comment}", Comment { comment } } } } } } pub static BASE_API_URL: &str = "https://hacker-news.firebaseio.com/v0/"; pub static ITEM_API: &str = "item/"; pub static USER_API: &str = "user/"; const COMMENT_DEPTH: i64 = 1; #[derive(Clone, Debug, PartialEq, Serialize, Deserialize)] pub struct StoryPageData { #[serde(flatten)] pub item: StoryItem, #[serde(default)] pub comments: Vec, } #[derive(Clone, Debug, PartialEq, Serialize, Deserialize)] pub struct CommentData { pub id: i64, /// there will be no by field if the comment was deleted #[serde(default)] pub by: String, #[serde(default)] pub text: String, #[serde(with = "chrono::serde::ts_seconds")] pub time: DateTime, #[serde(default)] pub kids: Vec, pub r#type: String, } #[derive(Clone, Debug, PartialEq, Serialize, Deserialize)] pub struct StoryItem { pub id: i64, pub title: String, pub url: Option, pub text: Option, #[serde(default)] pub by: String, #[serde(default)] pub score: i64, #[serde(default)] pub descendants: i64, #[serde(with = "chrono::serde::ts_seconds")] pub time: DateTime, #[serde(default)] pub kids: Vec, pub r#type: String, } pub async fn get_story(id: i64) -> Result { Ok( reqwest::get(&format!("{}{}{}.json", BASE_API_URL, ITEM_API, id)) .await? .json::() .await?, ) } #[component] fn ChildrenOrLoading(children: Element) -> Element { rsx! { SuspenseBoundary { fallback: |_| rsx! { div { class: "spinner", } }, children } } } ================================================ FILE: examples/01-app-demos/hello_world.rs ================================================ //! The simplest example of a Dioxus app. //! //! In this example we: //! - import a number of important items from the prelude (launch, Element, rsx, div, etc.) //! - define a main function that calls the launch function with our app function //! - define an app function that returns a div element with the text "Hello, world!" //! //! The `launch` function is the entry point for all Dioxus apps. It takes a function that returns an Element. This function //! calls "launch" on the currently-configured renderer you have. So if the `web` feature is enabled, it will launch a web //! app, and if the `desktop` feature is enabled, it will launch a desktop app. use dioxus::prelude::*; fn main() { dioxus::launch(app); } fn app() -> Element { rsx! { div { "Hello, world!" } } } ================================================ FILE: examples/01-app-demos/hotdog/.gitignore ================================================ hotdogdb/* ================================================ FILE: examples/01-app-demos/hotdog/Cargo.toml ================================================ [package] name = "hotdog" version = "0.1.0" authors = ["Dioxus Labs"] edition = "2021" publish = false [dependencies] dioxus = { workspace = true, features = ["fullstack", "router"] } reqwest = { workspace = true, features = ["json"] } serde = { workspace = true, features = ["derive"] } serde_json = { workspace = true } rusqlite = { version = "0.32.0", optional = true } anyhow = { workspace = true } [features] default = ["web", "server"] web = ["dioxus/web"] desktop = ["dioxus/desktop"] native = ["dioxus/native"] mobile = ["dioxus/mobile"] server = ["dioxus/server", "dep:rusqlite"] ================================================ FILE: examples/01-app-demos/hotdog/Dioxus.toml ================================================ [application] # App (Project) Name name = "hot_dog" [bundle] identifier = "com.dioxuslabs" publisher = "Dioxus Labs" ================================================ FILE: examples/01-app-demos/hotdog/Dockerfile ================================================ FROM rust:1 AS chef RUN cargo install cargo-chef WORKDIR /app FROM chef AS planner COPY . . RUN cargo chef prepare --recipe-path recipe.json FROM chef AS builder COPY --from=planner /app/recipe.json recipe.json RUN cargo chef cook --release --recipe-path recipe.json COPY . . RUN curl -L --proto '=https' --tlsv1.2 -sSf https://raw.githubusercontent.com/DioxusLabs/dioxus/refs/heads/main/.github/install.sh | bash RUN /.cargo/bin/dx bundle --platform web FROM chef AS runtime COPY --from=builder /app/target/dx/hotdog/release/web/ /usr/local/app ENV PORT=8080 ENV IP=0.0.0.0 EXPOSE 8080 WORKDIR /usr/local/app ENTRYPOINT [ "/usr/local/app/server" ] ================================================ FILE: examples/01-app-demos/hotdog/README.md ================================================ # Hot diggity dog! A Dioxus demo app for the new tutorial! ![Demo](assets/screenshot.png) ## To run Make sure you cd to this directory (dioxus/hotdog) and then `serve` any platform: ```rust dx serve --platform web dx serve --platform desktop dx serve --platform ios dx serve --platform android ``` ================================================ FILE: examples/01-app-demos/hotdog/assets/main.css ================================================ /* App-wide styling */ html, body { background-color: #0e0e0e; color: white; font-family: 'Segoe UI', Tahoma, Geneva, Verdana, sans-serif; height: 100%; width: 100%; overflow: hidden; margin: 0; } #main { display: flex; flex-direction: column; height: 100%; justify-content: space-between; } #dogview { max-height: 80vh; flex-grow: 1; width: 100%; display: flex; flex-direction: column; align-items: center; justify-content: center; } #dogimg { display: block; max-width: 50%; max-height: 50%; transform: scale(1.8); border-radius: 5px; border: 1px solid rgb(233, 233, 233); box-shadow: 0px 0px 5px 1px rgb(216, 216, 216, 0.5); } #title { text-align: center; padding-top: 10px; border-bottom: 1px solid #a8a8a8; display: flex; flex-direction: row; justify-content: space-evenly; align-items: center; } #title a { text-decoration: none; color: white; } #heart { background-color: white; padding: 5px; border-radius: 5px; } #title span { width: 20px; } #title h1 { margin: 0.25em; font-style: italic; } #buttons { display: flex; flex-direction: row; justify-content: center; gap: 20px; /* padding-top: 20px; */ padding-bottom: 20px; } #skip { background-color: gray } #save { background-color: green; } #skip, #save { padding: 5px 30px 5px 30px; border-radius: 3px; font-size: 2rem; font-weight: bold; color: rgb(230, 230, 230) } #navbar { border: 1px solid rgb(233, 233, 233); border-width: 1px 0px 0px 0px; display: flex; flex-direction: row; justify-content: space-evenly; padding: 20px; gap: 20px; } #navbar a { background-color: #a8a8a8; border-radius: 5px; border: 1px solid black; text-decoration: none; color: black; padding: 10px 30px 10px 30px; } #favorites { flex-grow: 1; overflow: hidden; display: flex; flex-direction: column; padding: 10px; } #favorites-container { overflow-y: auto; overflow-x: hidden; display: flex; flex-direction: row; flex-wrap: wrap; justify-content: center; gap: 10px; padding: 10px; } .favorite-dog { max-height: 180px; max-width: 60%; position: relative; } .favorite-dog img { max-height: 150px; border-radius: 5px; margin: 5px; } .favorite-dog:hover button { display: block; } .favorite-dog button { display: none; position: absolute; bottom: 10px; left: 10px; z-index: 10; } ================================================ FILE: examples/01-app-demos/hotdog/fly.toml ================================================ # fly.toml app configuration file generated for hot-dog on 2024-12-19T18:23:45-08:00 # # See https://fly.io/docs/reference/configuration/ for information about how to use this file. # app = 'hot-dog' primary_region = 'sjc' [build] [http_service] internal_port = 8080 force_https = true auto_stop_machines = 'stop' auto_start_machines = true min_machines_running = 0 processes = ['app'] [[vm]] memory = '1gb' cpu_kind = 'shared' cpus = 1 [mounts] source = "hotdogdb" destination = "/usr/local/app/hotdogdb" ================================================ FILE: examples/01-app-demos/hotdog/src/backend.rs ================================================ use anyhow::Result; use dioxus::prelude::*; #[cfg(feature = "server")] thread_local! { static DB: std::sync::LazyLock = std::sync::LazyLock::new(|| { std::fs::create_dir("hotdogdb").unwrap(); let conn = rusqlite::Connection::open("hotdogdb/hotdog.db").expect("Failed to open database"); conn.execute_batch( "CREATE TABLE IF NOT EXISTS dogs ( id INTEGER PRIMARY KEY, url TEXT NOT NULL );", ) .unwrap(); conn }); } #[get("/api/dogs")] pub async fn list_dogs() -> Result> { DB.with(|db| { Ok(db .prepare("SELECT id, url FROM dogs ORDER BY id DESC LIMIT 10")? .query_map([], |row| Ok((row.get(0)?, row.get(1)?)))? .collect::, rusqlite::Error>>()?) }) } #[delete("/api/dogs/{id}")] pub async fn remove_dog(id: usize) -> Result<()> { DB.with(|db| db.execute("DELETE FROM dogs WHERE id = ?1", [id]))?; Ok(()) } #[post("/api/dogs")] pub async fn save_dog(image: String) -> Result<()> { DB.with(|db| db.execute("INSERT INTO dogs (url) VALUES (?1)", [&image]))?; Ok(()) } ================================================ FILE: examples/01-app-demos/hotdog/src/frontend.rs ================================================ use dioxus::prelude::*; use serde::{Deserialize, Serialize}; use crate::{ backend::{list_dogs, remove_dog, save_dog}, Route, }; #[component] pub fn Favorites() -> Element { let mut favorites = use_loader(list_dogs)?; rsx! { div { id: "favorites", for (id , url) in favorites.cloned() { div { class: "favorite-dog", key: "{id}", img { src: "{url}" } button { onclick: move |_| async move { _ = remove_dog(id).await; favorites.restart(); }, "❌" } } } } } } #[component] pub fn NavBar() -> Element { rsx! { div { id: "title", span {} Link { to: Route::DogView, h1 { "🌭 HotDog! " } } Link { to: Route::Favorites, id: "heart", "♥️" } } Outlet:: {} } } #[component] pub fn DogView() -> Element { let mut img_src = use_loader(|| async move { #[derive(Deserialize, Serialize, Debug, PartialEq)] struct DogApi { message: String, } let json = reqwest::get("https://dog.ceo/api/breeds/image/random") .await? .json::() .await?; let url = json.message; dioxus::Ok(url) })?; rsx! { div { id: "dogview", img { id: "dogimg", src: "{img_src}" } } div { id: "buttons", button { id: "skip", onclick: move |_| img_src.restart(), "skip" } button { id: "save", onclick: move |_| async move { _ = save_dog(img_src()).await }, "save!" } } } } ================================================ FILE: examples/01-app-demos/hotdog/src/main.rs ================================================ mod backend; mod frontend; use dioxus::prelude::*; use frontend::*; #[derive(Routable, PartialEq, Clone)] enum Route { #[layout(NavBar)] #[route("/")] DogView, #[route("/favorites")] Favorites, } fn main() { #[cfg(not(feature = "server"))] dioxus::fullstack::set_server_url("https://hot-dog.fly.dev"); dioxus::launch(app); } fn app() -> Element { rsx! { Stylesheet { href: asset!("/assets/main.css") } Router:: {} } } ================================================ FILE: examples/01-app-demos/image_generator_openai.rs ================================================ use dioxus::prelude::*; fn main() { dioxus::launch(app) } fn app() -> Element { let mut api_key = use_signal(|| "".to_string()); let mut prompt = use_signal(|| "".to_string()); let mut num_images = use_signal(|| 1.to_string()); let mut image = use_action(move || async move { #[derive(serde::Serialize, serde::Deserialize, Debug, PartialEq, Props, Clone, Default)] struct ImageResponse { created: i32, data: Vec, } #[derive(serde::Serialize, serde::Deserialize, Debug, PartialEq, Props, Clone)] struct UrlImage { url: String, } if api_key.peek().is_empty() || prompt.peek().is_empty() || num_images.peek().is_empty() { return dioxus::Ok(ImageResponse::default()); } let res = reqwest::Client::new() .post("https://api.openai.com/v1/images/generations") .json(&serde_json::json!({ "prompt": prompt.cloned(), "n": num_images.cloned().parse::().unwrap_or(1), "size":"1024x1024", })) .bearer_auth(api_key) .send() .await? .json::() .await?; Ok(res) }); rsx! { Stylesheet { href: "https://unpkg.com/bulma@0.9.0/css/bulma.min.css" } div { class: "container", div { class: "columns", div { class: "column", input { class: "input is-primary mt-4", value: "{api_key}", r#type: "text", placeholder: "Your OpenAI API Key", oninput: move |evt| api_key.set(evt.value()), } input { class: "input is-primary mt-4", placeholder: "MAX 1000 Dgts", r#type: "text", value:"{prompt}", oninput: move |evt| prompt.set(evt.value()) } input { class: "input is-primary mt-4", r#type: "number", min:"1", max:"10", value:"{num_images}", oninput: move |evt| num_images.set(evt.value()), } } } button { class: "button is-primary", class: if image.pending() { "is-loading" }, onclick: move |_| { image.call(); }, "Generate image" } if let Some(Ok(image)) = image.value() { for image in image.read().data.as_slice() { section { class: "is-flex", div { class: "container is-fluid", div { class: "container has-text-centered", div { class: "is-justify-content-center", div { class: "level", div { class: "level-item", figure { class: "image", img { alt: "", src: "{image.url}", } } } } } } } } } } } } } ================================================ FILE: examples/01-app-demos/repo_readme.rs ================================================ //! The example from the readme! //! //! This example demonstrates how to create a simple counter app with dioxus. The `Signal` type wraps inner values, //! making them `Copy`, allowing them to be freely used in closures and async functions. `Signal` also provides //! helper methods like AddAssign, SubAssign, toggle, etc, to make it easy to update the value without running //! into lock issues. use dioxus::prelude::*; fn main() { dioxus::launch(app); } fn app() -> Element { let mut count = use_signal(|| 0); rsx! { h1 { "High-Five counter: {count}" } button { onclick: move |_| count += 1, "Up high!" } button { onclick: move |_| count -= 1, "Down low!" } } } ================================================ FILE: examples/01-app-demos/todomvc.rs ================================================ //! The typical TodoMVC app, implemented in Dioxus. use dioxus::prelude::*; use std::collections::HashMap; const STYLE: Asset = asset!("/examples/assets/todomvc.css"); fn main() { dioxus::launch(app); } #[derive(PartialEq, Eq, Clone, Copy)] enum FilterState { All, Active, Completed, } struct TodoItem { checked: bool, contents: String, } fn app() -> Element { // We store the todos in a HashMap in a Signal. // Each key is the id of the todo, and the value is the todo itself. let mut todos = use_signal(HashMap::::new); let filter = use_signal(|| FilterState::All); // We use a simple memoized signal to calculate the number of active todos. // Whenever the todos change, the active_todo_count will be recalculated. let active_todo_count = use_memo(move || todos.read().values().filter(|item| !item.checked).count()); // We use a memoized signal to filter the todos based on the current filter state. // Whenever the todos or filter change, the filtered_todos will be recalculated. // Note that we're only storing the IDs of the todos, not the todos themselves. let filtered_todos = use_memo(move || { let mut filtered_todos = todos .read() .iter() .filter(|(_, item)| match filter() { FilterState::All => true, FilterState::Active => !item.checked, FilterState::Completed => item.checked, }) .map(|f| *f.0) .collect::>(); filtered_todos.sort_unstable(); filtered_todos }); // Toggle all the todos to the opposite of the current state. // If all todos are checked, uncheck them all. If any are unchecked, check them all. let toggle_all = move |_| { let check = active_todo_count() != 0; for (_, item) in todos.write().iter_mut() { item.checked = check; } }; rsx! { Stylesheet { href: STYLE } section { class: "todoapp", TodoHeader { todos } section { class: "main", if !todos.read().is_empty() { input { id: "toggle-all", class: "toggle-all", r#type: "checkbox", onchange: toggle_all, checked: active_todo_count() == 0 } label { r#for: "toggle-all" } } // Render the todos using the filtered_todos signal // We pass the ID into the TodoEntry component so it can access the todo from the todos signal. // Since we store the todos in a signal too, we also need to send down the todo list ul { class: "todo-list", for id in filtered_todos() { TodoEntry { key: "{id}", id, todos } } } // We only show the footer if there are todos. if !todos.read().is_empty() { ListFooter { active_todo_count, todos, filter } } } } // A simple info footer footer { class: "info", p { "Double-click to edit a todo" } p { "Created by " a { href: "http://github.com/jkelleyrtp/", "jkelleyrtp" } } p { "Part of " a { href: "http://todomvc.com", "TodoMVC" } } } } } #[component] fn TodoHeader(mut todos: WriteSignal>) -> Element { let mut draft = use_signal(|| "".to_string()); let mut todo_id = use_signal(|| 0); let onkeydown = move |evt: KeyboardEvent| { if evt.key() == Key::Enter && !draft.is_empty() { let id = todo_id(); let todo = TodoItem { checked: false, contents: draft.to_string(), }; todos.insert(id, todo); todo_id += 1; draft.set("".to_string()); } }; rsx! { header { class: "header", h1 { "todos" } input { class: "new-todo", placeholder: "What needs to be done?", value: "{draft}", autofocus: "true", oninput: move |evt| draft.set(evt.value()), onkeydown } } } } /// A single todo entry /// This takes the ID of the todo and the todos signal as props /// We can use these together to memoize the todo contents and checked state #[component] fn TodoEntry(mut todos: WriteSignal>, id: u32) -> Element { let mut is_editing = use_signal(|| false); // To avoid re-rendering this component when the todo list changes, we isolate our reads to memos // This way, the component will only re-render when the contents of the todo change, or when the editing state changes. // This does involve taking a local clone of the todo contents, but it allows us to prevent this component from re-rendering let checked = use_memo(move || todos.read().get(&id).unwrap().checked); let contents = use_memo(move || todos.read().get(&id).unwrap().contents.clone()); rsx! { li { // Dioxus lets you use if statements in rsx to conditionally render attributes // These will get merged into a single class attribute class: if checked() { "completed" }, class: if is_editing() { "editing" }, // Some basic controls for the todo div { class: "view", input { class: "toggle", r#type: "checkbox", id: "cbg-{id}", checked: "{checked}", oninput: move |evt| todos.get_mut(&id).unwrap().checked = evt.checked() } label { r#for: "cbg-{id}", ondoubleclick: move |_| is_editing.set(true), onclick: |evt| evt.prevent_default(), "{contents}" } button { class: "destroy", onclick: move |evt| { evt.prevent_default(); todos.remove(&id); }, } } // Only render the actual input if we're editing if is_editing() { input { class: "edit", value: "{contents}", oninput: move |evt| todos.get_mut(&id).unwrap().contents = evt.value(), autofocus: "true", onfocusout: move |_| is_editing.set(false), onkeydown: move |evt| { match evt.key() { Key::Enter | Key::Escape | Key::Tab => is_editing.set(false), _ => {} } } } } } } } #[component] fn ListFooter( mut todos: WriteSignal>, active_todo_count: ReadSignal, mut filter: WriteSignal, ) -> Element { // We use a memoized signal to calculate whether we should show the "Clear completed" button. // This will recompute whenever the todos change, and if the value is true, the button will be shown. let show_clear_completed = use_memo(move || todos.read().values().any(|todo| todo.checked)); rsx! { footer { class: "footer", span { class: "todo-count", strong { "{active_todo_count} " } span { match active_todo_count() { 1 => "item", _ => "items", } " left" } } ul { class: "filters", for (state , state_text , url) in [ (FilterState::All, "All", "#/"), (FilterState::Active, "Active", "#/active"), (FilterState::Completed, "Completed", "#/completed"), ] { li { a { href: url, class: if filter() == state { "selected" }, onclick: move |evt| { evt.prevent_default(); filter.set(state) }, {state_text} } } } } if show_clear_completed() { button { class: "clear-completed", onclick: move |_| todos.retain(|_, todo| !todo.checked), "Clear completed" } } } } } ================================================ FILE: examples/01-app-demos/todomvc_store.rs ================================================ //! The typical TodoMVC app, implemented in Dioxus with stores. Stores let us //! share nested reactive state between components. They let us keep our todomvc //! state in a single struct without wrapping every type in a signal while still //! maintaining fine grained reactivity. use dioxus::prelude::*; use std::{collections::HashMap, vec}; const STYLE: Asset = asset!("/examples/assets/todomvc.css"); /// Deriving the store macro on a struct will automatically generate an extension trait /// for Store with method to zoom into the fields of the struct. /// /// For this struct, the macro derives the following methods for Store: /// - `todos(self) -> Store, _>` /// - `filter(self) -> Store` #[derive(Store, PartialEq, Clone, Debug)] struct TodoState { todos: HashMap, filter: FilterState, } // We can also add custom methods to the store by using the `store` attribute on an impl block. // The store attribute turns the impl block into an extension trait for Store. // Methods that take &self will automatically get a bound that Lens: Readable // Methods that take &mut self will automatically get a bound that Lens: Writable #[store] impl Store { fn active_items(&self) -> Vec { let filter = self.filter().cloned(); let mut active_ids: Vec = self .todos() .iter() .filter_map(|(id, item)| item.active(filter).then_some(id)) .collect(); active_ids.sort_unstable(); active_ids } fn incomplete_count(&self) -> usize { self.todos() .values() .filter(|item| item.incomplete()) .count() } fn toggle_all(&mut self) { let check = self.incomplete_count() != 0; for item in self.todos().values() { item.checked().set(check); } } fn has_todos(&self) -> bool { !self.todos().is_empty() } } #[derive(PartialEq, Eq, Clone, Copy, Debug)] enum FilterState { All, Active, Completed, } #[derive(Store, PartialEq, Clone, Debug)] struct TodoItem { checked: bool, contents: String, } impl TodoItem { fn new(contents: impl ToString) -> Self { Self { checked: false, contents: contents.to_string(), } } } #[store] impl Store { fn complete(&self) -> bool { self.checked().cloned() } fn incomplete(&self) -> bool { !self.complete() } fn active(&self, filter: FilterState) -> bool { match filter { FilterState::All => true, FilterState::Active => self.incomplete(), FilterState::Completed => self.complete(), } } } fn main() { dioxus::launch(app); } fn app() -> Element { // We store the state of our todo list in a store to use throughout the app. let mut todos = use_store(|| TodoState { todos: HashMap::new(), filter: FilterState::All, }); // We use a simple memo to calculate the number of active todos. // Whenever the todos change, the active_todo_count will be recalculated. let active_todo_count = use_memo(move || todos.incomplete_count()); // We use a memo to filter the todos based on the current filter state. // Whenever the todos or filter change, the filtered_todos will be recalculated. // Note that we're only storing the IDs of the todos, not the todos themselves. let filtered_todos = use_memo(move || todos.active_items()); // Toggle all the todos to the opposite of the current state. // If all todos are checked, uncheck them all. If any are unchecked, check them all. let toggle_all = move |_| { todos.toggle_all(); }; rsx! { Stylesheet { href: STYLE } section { class: "todoapp", TodoHeader { todos } section { class: "main", if todos.has_todos() { input { id: "toggle-all", class: "toggle-all", r#type: "checkbox", onchange: toggle_all, checked: active_todo_count() == 0 } label { r#for: "toggle-all" } } // Render the todos using the filtered_todos memo // We pass the ID along with the hashmap into the TodoEntry component so it can access the todo from the todos store. ul { class: "todo-list", for id in filtered_todos() { TodoEntry { key: "{id}", id, todos } } } // We only show the footer if there are todos. if todos.has_todos() { ListFooter { active_todo_count, todos } } } } // A simple info footer footer { class: "info", p { "Double-click to edit a todo" } p { "Created by " a { href: "http://github.com/jkelleyrtp/", "jkelleyrtp" } } p { "Part of " a { href: "http://todomvc.com", "TodoMVC" } } } } } #[component] fn TodoHeader(mut todos: Store) -> Element { let mut draft = use_signal(|| "".to_string()); let mut todo_id = use_signal(|| 0); let onkeydown = move |evt: KeyboardEvent| { if evt.key() == Key::Enter && !draft.is_empty() { let id = todo_id(); let todo = TodoItem::new(draft.take()); todos.todos().insert(id, todo); todo_id += 1; } }; rsx! { header { class: "header", h1 { "todos" } input { class: "new-todo", placeholder: "What needs to be done?", value: "{draft}", autofocus: "true", oninput: move |evt| draft.set(evt.value()), onkeydown } } } } /// A single todo entry /// This takes the ID of the todo and the todos store as props /// We can use these together to memoize the todo contents and checked state #[component] fn TodoEntry(mut todos: Store, id: u32) -> Element { let mut is_editing = use_signal(|| false); // When we get an item out of the store, it will only subscribe to that specific item. // Since we only get the single todo item, the component will only rerender when that item changes. let entry = todos.todos().get(id).unwrap(); let checked = entry.checked(); let contents = entry.contents(); rsx! { li { // Dioxus lets you use if statements in rsx to conditionally render attributes // These will get merged into a single class attribute class: if checked() { "completed" }, class: if is_editing() { "editing" }, // Some basic controls for the todo div { class: "view", input { class: "toggle", r#type: "checkbox", id: "cbg-{id}", checked: "{checked}", oninput: move |evt| entry.checked().set(evt.checked()) } label { r#for: "cbg-{id}", ondoubleclick: move |_| is_editing.set(true), onclick: |evt| evt.prevent_default(), "{contents}" } button { class: "destroy", onclick: move |evt| { evt.prevent_default(); todos.todos().remove(&id); }, } } // Only render the actual input if we're editing if is_editing() { input { class: "edit", value: "{contents}", oninput: move |evt| entry.contents().set(evt.value()), autofocus: "true", onfocusout: move |_| is_editing.set(false), onkeydown: move |evt| { match evt.key() { Key::Enter | Key::Escape | Key::Tab => is_editing.set(false), _ => {} } } } } } } } #[component] fn ListFooter(mut todos: Store, active_todo_count: ReadSignal) -> Element { // We use a memo to calculate whether we should show the "Clear completed" button. // This will recompute whenever the number of todos change or the checked state of an existing // todo changes let show_clear_completed = use_memo(move || todos.todos().values().any(|todo| todo.complete())); let mut filter = todos.filter(); rsx! { footer { class: "footer", span { class: "todo-count", strong { "{active_todo_count} " } span { match active_todo_count() { 1 => "item", _ => "items", } " left" } } ul { class: "filters", for (state , state_text , url) in [ (FilterState::All, "All", "#/"), (FilterState::Active, "Active", "#/active"), (FilterState::Completed, "Completed", "#/completed"), ] { li { a { href: url, class: if filter() == state { "selected" }, onclick: move |evt| { evt.prevent_default(); filter.set(state) }, {state_text} } } } } if show_clear_completed() { button { class: "clear-completed", onclick: move |_| todos.todos().retain(|_, todo| !todo.checked), "Clear completed" } } } } } ================================================ FILE: examples/01-app-demos/weather_app.rs ================================================ #![allow(non_snake_case)] use dioxus::{fullstack::Loading, prelude::*}; use serde::{Deserialize, Serialize}; use std::fmt::Display; fn main() { dioxus::launch(app); } fn app() -> Element { let country = use_signal(|| WeatherLocation { name: "Berlin".to_string(), country: "Germany".to_string(), latitude: 52.5244, longitude: 13.4105, id: 2950159, }); let current_weather = use_loader(move || get_weather(country())); rsx! { Stylesheet { href: asset!("/examples/assets/weatherapp.css") } div { class: "mx-auto p-4 bg-gray-100 h-screen flex justify-center", div { class: "flex items-center justify-center flex-row", div { class: "flex items-start justify-center flex-row", SearchBox { country } div { class: "flex flex-wrap w-full px-2", div { class: "bg-gray-900 text-white relative min-w-0 break-words rounded-lg overflow-hidden shadow-sm mb-4 w-full dark:bg-gray-600", div { class: "px-6 py-6 relative", match current_weather { Ok(weather) => rsx! { CountryData { country: country.read().clone(), weather: weather.cloned(), } Forecast { weather: weather.cloned() } div { height: "20px", margin_top: "10px", if weather.loading() { "Fetching weather data..." } } }, Err(Loading::Pending(_)) => rsx! { div { "Loading weather data..." } }, Err(Loading::Failed(_)) => rsx! { div { "Failed to load weather data." } } } } } } } } } } } #[allow(non_snake_case)] #[component] fn CountryData(weather: WeatherResponse, country: WeatherLocation) -> Element { let today = "Today"; let max_temp = weather.daily.temperature_2m_max[0]; let min_temp = weather.daily.temperature_2m_min[0]; rsx! { div { class: "flex mb-4 justify-between items-center", div { h5 { class: "mb-0 font-medium text-xl", "{country.name} 🏞️" } h6 { class: "mb-0", "{today}" } } div { div { class: "flex items-center", span { "Temp min" } span { class: "px-2 inline-block", "👉 {min_temp}°" } } div { class: "flex items-center", span { "Temp max" } span { class: "px-2 inline-block ", "👉 {max_temp}º" } } } } } } #[allow(non_snake_case)] #[component] fn Forecast(weather: WeatherResponse) -> Element { let today = (weather.daily.temperature_2m_max[0] + weather.daily.temperature_2m_max[0]) / 2.0; let tomorrow = (weather.daily.temperature_2m_max[1] + weather.daily.temperature_2m_max[1]) / 2.0; let past_tomorrow = (weather.daily.temperature_2m_max[2] + weather.daily.temperature_2m_max[2]) / 2.0; rsx! { div { class: "px-6 pt-4 relative", div { class: "w-full h-px bg-gray-100 mb-4" } div { p { class: "text-center w-full mb-4", "👇 Forecast 📆" } } div { class: "text-center justify-between items-center flex", div { class: "text-center mb-0 flex items-center justify-center flex-col mx-4 w-16", span { class: "block my-1", "Today" } span { class: "block my-1", "{today}°" } } div { class: "text-center mb-0 flex items-center justify-center flex-col mx-8 w-16", span { class: "block my-1", "Tomorrow" } span { class: "block my-1", "{tomorrow}°" } } div { class: "text-center mb-0 flex items-center justify-center flex-col mx-2 w-30", span { class: "block my-1", "Past Tomorrow" } span { class: "block my-1", "{past_tomorrow}°" } } } } } } #[component] fn SearchBox(mut country: WriteSignal) -> Element { let mut input = use_signal(|| "".to_string()); let locations = use_loader(move || get_locations(input())); rsx! { div { div { class: "inline-flex flex-col justify-center relative text-gray-500", div { class: "relative", input { class: "p-2 pl-8 rounded-lg border border-gray-200 bg-gray-200 focus:bg-white focus:outline-none focus:ring-2 focus:ring-yellow-600 focus:border-transparent", placeholder: "Country name", "type": "text", autofocus: true, oninput: move |e: FormEvent| input.set(e.value()) } svg { class: "w-4 h-4 absolute left-2.5 top-3.5", "viewBox": "0 0 24 24", fill: "none", stroke: "currentColor", xmlns: "http://www.w3.org/2000/svg", path { d: "M21 21l-6-6m2-5a7 7 0 11-14 0 7 7 0 0114 0z", "stroke-linejoin": "round", "stroke-linecap": "round", "stroke-width": "2" } } } ul { class: "bg-white border border-gray-100 w-full mt-2 max-h-72 overflow-auto", match locations { Ok(locs) if locs.is_empty() => rsx! { li { class: "pl-8 pr-2 py-1 border-b-2 border-gray-100 relative", "No locations found" } }, Ok(locs) => rsx! { for wl in locs.read().iter().take(5).cloned() { li { class: "pl-8 pr-2 py-1 border-b-2 border-gray-100 relative cursor-pointer hover:bg-yellow-50 hover:text-gray-900", onclick: move |_| country.set(wl.clone()), MapIcon {} b { "{wl.name}" } " · {wl.country}" } } }, Err(Loading::Pending(_)) => rsx! { li { class: "pl-8 pr-2 py-1 border-b-2 border-gray-100 relative", "Searching..." } }, Err(Loading::Failed(handle)) => rsx! { li { class: "pl-8 pr-2 py-1 border-b-2 border-gray-100 relative", "Failed to search: {handle.error():?}" } } } } } } } } fn MapIcon() -> Element { rsx! { svg { class: "stroke-current absolute w-4 h-4 left-2 top-2", stroke: "currentColor", xmlns: "http://www.w3.org/2000/svg", "viewBox": "0 0 24 24", fill: "none", path { "stroke-linejoin": "round", "stroke-width": "2", "stroke-linecap": "round", d: "M17.657 16.657L13.414 20.9a1.998 1.998 0 01-2.827 0l-4.244-4.243a8 8 0 1111.314 0z" } path { "stroke-linecap": "round", "stroke-linejoin": "round", d: "M15 11a3 3 0 11-6 0 3 3 0 016 0z", "stroke-width": "2" } } } } #[derive(Debug, Default, Serialize, Deserialize, PartialEq, Clone)] struct WeatherLocation { id: usize, name: String, latitude: f32, longitude: f32, country: String, } type WeatherLocations = Vec; #[derive(Debug, Default, Serialize, Deserialize)] struct SearchResponse { #[serde(default)] results: WeatherLocations, } async fn get_locations(input: impl Display) -> Result { let res = reqwest::get(&format!( "https://geocoding-api.open-meteo.com/v1/search?name={input}" )) .await? .json::() .await?; Ok(res.results) } #[derive(Debug, Default, Serialize, Deserialize, PartialEq, Clone)] struct WeatherResponse { daily: DailyWeather, hourly: HourlyWeather, } #[derive(Debug, Default, Serialize, Deserialize, PartialEq, Clone)] struct HourlyWeather { time: Vec, temperature_2m: Vec, } #[derive(Debug, Default, Serialize, Deserialize, PartialEq, Clone)] struct DailyWeather { temperature_2m_min: Vec, temperature_2m_max: Vec, } async fn get_weather(location: WeatherLocation) -> reqwest::Result { reqwest::get(&format!("https://api.open-meteo.com/v1/forecast?latitude={}&longitude={}&hourly=temperature_2m&daily=temperature_2m_max,temperature_2m_min,apparent_temperature_max,apparent_temperature_min&timezone=GMT", location.latitude, location.longitude)) .await? .json::() .await } ================================================ FILE: examples/01-app-demos/websocket_chat.rs ================================================ //! A websocket chat demo using Dioxus' built-in websocket support. //! //! We setup an endpoint at `/api/chat` that accepts a `name` and `user_id` query parameter. //! Each client connects to that endpoint, and we use a `tokio::broadcast` channel //! to send messages to all connected clients. //! //! In practice, you'd use a distributed messaging system (Redis PubSub / Kafka / etc) to coordinate //! between multiple server instances and an additional database to persist chat history. use dioxus::fullstack::{WebSocketOptions, Websocket, use_websocket}; use dioxus::prelude::*; use serde::{Deserialize, Serialize}; use uuid::Uuid; fn main() { dioxus::launch(app); } fn app() -> Element { // store the user's current input let mut input = use_signal(|| "".to_string()); // Select a unique id for the user, and then use that entropy to pick a random name let user_id = use_signal(uuid::Uuid::new_v4); let user_name = use_signal(|| { match user_id.read().as_bytes()[0] % 7 { 0 => "Alice", 1 => "Bob", 2 => "Eve", 3 => "Mallory", 4 => "Trent", 5 => "Peggy", 6 => "Victor", _ => "Charlie", } .to_string() }); // Store the messages we've received from the server let mut message_list = use_signal(Vec::::new); // Connect to the websocket endpoint let mut socket = use_websocket(move || uppercase_ws(user_name(), user_id(), Default::default())); use_future(move || async move { while let Ok(msg) = socket.recv().await { match msg { ServerEvent::ReceiveMessage(message) => message_list.push(message), ServerEvent::Connected { messages } => message_list.set(messages), } } }); rsx! { h1 { "WebSocket Chat" } p { "Connection status: {socket.status():?} as {user_name}" } input { placeholder: "Type a message", value: "{input}", oninput: move |e| async move { input.set(e.value()) }, onkeydown: move |e| async move { if e.key() == Key::Enter { _ = socket.send(ClientEvent::SendMessage(input.read().clone())).await; input.set("".to_string()); } } } div { for message in message_list.read().iter().rev() { pre { "{message.name}: {message.message}" } } } } } /// The events that the client can send to the server #[derive(Serialize, Deserialize, Debug)] enum ClientEvent { SendMessage(String), } /// The events that the server can send to the client #[derive(Serialize, Deserialize, Debug)] enum ServerEvent { Connected { messages: Vec }, ReceiveMessage(ChatMessage), } #[derive(Serialize, Deserialize, Debug, Clone, PartialEq)] struct ChatMessage { user_id: Uuid, name: String, message: String, } #[get("/api/chat?name&user_id")] async fn uppercase_ws( name: String, user_id: Uuid, options: WebSocketOptions, ) -> Result> { use std::sync::LazyLock; use tokio::sync::{ Mutex, broadcast::{self, Sender}, }; // Every chat app needs a chat room! For this demo, we just use a tokio broadcast channel and a mutex-protected // list of messages to store chat history. // // We place these types in the body of this serverfn since they're not used on the client, only the server. static MESSAGES: LazyLock>> = LazyLock::new(|| Mutex::new(Vec::new())); static BROADCAST: LazyLock> = LazyLock::new(|| broadcast::channel(100).0); Ok(options.on_upgrade(move |mut socket| async move { // Send back all the messages from the room to the new client let messages = MESSAGES.lock().await.clone(); _ = socket.send(ServerEvent::Connected { messages }).await; // Subscriber to the broadcast channel let sender = BROADCAST.clone(); let mut broadcast = sender.subscribe(); // Announce that we've joined let _ = sender.send(ChatMessage { message: format!("{name} has connected."), user_id, name: "[CONSOLE]".to_string(), }); // Loop poll the broadcast receiver and the websocket for new messages // If we receive a message from the broadcast channel, send it to the client // If we receive a message from the client, broadcast it to all other clients and save it to the message list loop { tokio::select! { Ok(msg) = broadcast.recv() => { let _ = socket.send(ServerEvent::ReceiveMessage(msg)).await; } Ok(ClientEvent::SendMessage(message)) = socket.recv() => { let chat_message = ChatMessage { user_id, name: name.clone(), message, }; let _ = sender.send(chat_message.clone()); MESSAGES.lock().await.push(chat_message.clone()); }, else => break, } } _ = sender.send(ChatMessage { name: "[CONSOLE]".to_string(), message: format!("{name} has disconnected."), user_id, }); })) } ================================================ FILE: examples/02-building-ui/disabled.rs ================================================ //! A simple demonstration of how to set attributes on buttons to disable them. //! //! This example also showcases the shorthand syntax for attributes, and how signals themselves implement IntoAttribute use dioxus::prelude::*; fn main() { dioxus::launch(app); } fn app() -> Element { let mut disabled = use_signal(|| false); rsx! { div { text_align: "center", margin: "20px", display: "flex", flex_direction: "column", align_items: "center", button { onclick: move |_| disabled.toggle(), "click to " if disabled() { "enable" } else { "disable" } " the lower button" } button { disabled, "lower button" } } } } ================================================ FILE: examples/02-building-ui/nested_listeners.rs ================================================ //! Nested Listeners //! //! This example showcases how to control event bubbling from child to parents. //! //! Both web and desktop support bubbling and bubble cancellation. use dioxus::prelude::*; fn main() { dioxus::launch(app); } fn app() -> Element { rsx! { div { onclick: move |_| println!("clicked! top"), "- div" button { onclick: move |_| println!("clicked! bottom propagate"), "Propagate" } button { onclick: move |evt| { println!("clicked! bottom no bubbling"); evt.stop_propagation(); }, "Dont propagate" } button { "Does not handle clicks - only propagate" } } } } ================================================ FILE: examples/02-building-ui/svg.rs ================================================ //! Thanks to @japsu and their project https://github.com/japsu/jatsi for the example! //! //! This example shows how to create a simple dice rolling app using SVG and Dioxus. //! The `svg` element and its children have a custom namespace, and are attached using different methods than regular //! HTML elements. Any element can specify a custom namespace by using the `namespace` meta attribute. //! //! If you `go-to-definition` on the `svg` element, you'll see its custom namespace. use dioxus::prelude::*; use rand::{Rng, rng}; fn main() { dioxus::launch(|| { rsx! { div { user_select: "none", webkit_user_select: "none", margin_left: "10%", margin_right: "10%", h1 { "Click die to generate a new value" } div { cursor: "pointer", height: "100%", width: "100%", Dice {} } } } }); } #[component] fn Dice() -> Element { const Y: bool = true; const N: bool = false; const DOTS: [(i64, i64); 7] = [(-1, -1), (-1, -0), (-1, 1), (1, -1), (1, 0), (1, 1), (0, 0)]; const DOTS_FOR_VALUE: [[bool; 7]; 6] = [ [N, N, N, N, N, N, Y], [N, N, Y, Y, N, N, N], [N, N, Y, Y, N, N, Y], [Y, N, Y, Y, N, Y, N], [Y, N, Y, Y, N, Y, Y], [Y, Y, Y, Y, Y, Y, N], ]; let mut value = use_signal(|| 5); let active_dots = use_memo(move || &DOTS_FOR_VALUE[(value() - 1) as usize]); rsx! { svg { view_box: "-1000 -1000 2000 2000", onclick: move |event| { event.prevent_default(); value.set(rng().random_range(1..=6)) }, rect { x: -1000, y: -1000, width: 2000, height: 2000, rx: 200, fill: "#aaa" } for ((x, y), _) in DOTS.iter().zip(active_dots.read().iter()).filter(|(_, active)| **active) { circle { cx: *x * 600, cy: *y * 600, r: 200, fill: "#333" } } } } } ================================================ FILE: examples/03-assets-styling/css_modules.rs ================================================ //! This example shows how to use css modules with the `css_module` macro. Css modules convert css //! class names to unique names to avoid class name collisions. use dioxus::prelude::*; fn main() { dioxus::launch(app); } fn app() -> Element { // Each `css_module` macro will expand the annotated struct in the current scope #[css_module("/examples/assets/css_module1.css")] struct Styles; #[css_module( "/examples/assets/css_module2.css", // `css_module` can take `AssetOptions` as well AssetOptions::css_module() .with_minify(true) .with_preload(false) )] struct OtherStyles; rsx! { div { class: Styles::container, div { class: OtherStyles::test, "Hello, world!" } div { class: OtherStyles::highlight, "This is highlighted" } div { class: Styles::global_class, "This uses a global class (no hash)" } } } } ================================================ FILE: examples/03-assets-styling/custom_assets.rs ================================================ //! A simple example on how to use assets loading from the filesystem. //! //! Dioxus provides the asset!() macro which is a convenient way to load assets from the filesystem. //! This ensures the asset makes it into the bundle through dependencies and is accessible in environments //! like web and android where assets are lazily loaded using platform-specific APIs. use dioxus::prelude::*; static ASSET_PATH: Asset = asset!("/examples/assets/logo.png"); fn main() { dioxus::launch(app); } fn app() -> Element { rsx! { div { h1 { "This should show an image:" } img { src: ASSET_PATH } } } } ================================================ FILE: examples/03-assets-styling/dynamic_assets.rs ================================================ //! This example shows how to load in custom assets with the use_asset_handler hook. //! //! This hook is currently only available on desktop and allows you to intercept any request made by the webview //! and respond with your own data. You could use this to load in custom videos, streams, stylesheets, images, //! or any asset that isn't known at compile time. use dioxus::desktop::{use_asset_handler, wry::http::Response}; use dioxus::prelude::*; const STYLE: Asset = asset!("/examples/assets/custom_assets.css"); fn main() { dioxus::LaunchBuilder::desktop().launch(app); } fn app() -> Element { use_asset_handler("logos", |request, response| { // We get the original path - make sure you handle that! if request.uri().path() != "/logos/logo.png" { return; } response.respond(Response::new(include_bytes!("../assets/logo.png").to_vec())); }); rsx! { Stylesheet { href: STYLE } h1 { "Dynamic Assets" } img { src: "/logos/logo.png" } } } ================================================ FILE: examples/03-assets-styling/meta.rs ================================================ //! This example shows how to add metadata to the page with the Meta component use dioxus::prelude::*; fn main() { dioxus::launch(app); } fn app() -> Element { rsx! { // You can use the Meta component to render a meta tag into the head of the page // Meta tags are useful to provide information about the page to search engines and social media sites // This example sets up meta tags for the open graph protocol for social media previews document::Meta { property: "og:title", content: "My Site", } document::Meta { property: "og:type", content: "website", } document::Meta { property: "og:url", content: "https://www.example.com", } document::Meta { property: "og:image", content: "https://example.com/image.jpg", } document::Meta { name: "description", content: "My Site is a site", } } } ================================================ FILE: examples/03-assets-styling/meta_elements.rs ================================================ //! This example shows how to add metadata to the page with the Meta component use dioxus::prelude::*; fn main() { dioxus::launch(app); } fn app() -> Element { rsx! { // You can use the Meta component to render a meta tag into the head of the page // Meta tags are useful to provide information about the page to search engines and social media sites // This example sets up meta tags for the open graph protocol for social media previews Meta { property: "og:title", content: "My Site" } Meta { property: "og:type", content: "website" } Meta { property: "og:url", content: "https://www.example.com" } Meta { property: "og:image", content: "https://example.com/image.jpg" } Meta { name: "description", content: "My Site is a site" } } } ================================================ FILE: examples/04-managing-state/context_api.rs ================================================ //! Demonstrates cross-component state sharing using Dioxus' Context API //! //! Features: //! - Context provider initialization //! - Nested component consumption //! - Reactive state updates //! - Error handling for missing context //! - Platform-agnostic implementation use dioxus::prelude::*; const STYLE: Asset = asset!("/examples/assets/context_api.css"); fn main() { launch(app); } #[component] fn app() -> Element { // Provide theme context at root level use_context_provider(|| Signal::new(Theme::Light)); rsx! { Stylesheet { href: STYLE } main { class: "main-container", h1 { "Theme Switcher" } ThemeControls {} ThemeDisplay {} } } } #[derive(Clone, Copy, PartialEq, Debug)] enum Theme { Light, Dark, } impl Theme { fn stylesheet(&self) -> &'static str { match self { Theme::Light => "light-theme", Theme::Dark => "dark-theme", } } } #[component] fn ThemeControls() -> Element { let mut theme = use_theme_context(); let current_theme = *theme.read(); rsx! { div { class: "controls", button { class: "btn", onclick: move |_| theme.set(Theme::Light), disabled: current_theme== Theme::Light, "Switch to Light" } button { class: "btn", onclick: move |_| theme.set(Theme::Dark), disabled: current_theme == Theme::Dark, "Switch to Dark" } } } } #[component] fn ThemeDisplay() -> Element { let theme = use_theme_context(); rsx! { div { class: "display {theme.read().stylesheet()}", p { "Current theme: {theme:?}" } p { "Try switching themes using the buttons above!" } } } } fn use_theme_context() -> Signal { try_use_context::>() .expect("Theme context not found. Ensure is the root component.") } ================================================ FILE: examples/04-managing-state/error_handling.rs ================================================ //! This example showcases how to use the ErrorBoundary component to handle errors in your app. //! //! The ErrorBoundary component is a special component that can be used to catch panics and other errors that occur. //! By default, Dioxus will catch panics during rendering, async, and handlers, and bubble them up to the nearest //! error boundary. If no error boundary is present, it will be caught by the root error boundary and the app will //! render the error message as just a string. //! //! NOTE: In wasm, panics can currently not be caught by the error boundary. This is a limitation of WASM in rust. #![allow(non_snake_case)] use dioxus::prelude::*; fn main() { dioxus::launch(|| rsx! { Router:: {} }); } /// You can use an ErrorBoundary to catch errors in children and display a warning fn Simple() -> Element { rsx! { GoBackButton { "Home" } ErrorBoundary { handle_error: |error: ErrorContext| rsx! { h1 { "An error occurred" } pre { "{error:#?}" } }, ParseNumber {} } } } #[component] fn ParseNumber() -> Element { rsx! { h1 { "Error handler demo" } button { onclick: move |_| { // You can return a result from an event handler which lets you easily quit rendering early if something fails let data: i32 = "0.5".parse()?; println!("parsed {data}"); Ok(()) }, "Click to throw an error" } } } // You can provide additional context for the Error boundary to visualize fn Show() -> Element { rsx! { GoBackButton { "Home" } div { ErrorBoundary { handle_error: |errors: ErrorContext| { rsx! { for error in errors.error() { // You can downcast the error to see if it's a specific type and render something specific for it if let Some(_error) = error.downcast_ref::() { div { background_color: "red", border: "black", border_width: "2px", border_radius: "5px", p { "Failed to parse data" } Link { to: Route::Home {}, "Go back to the homepage" } } } else { pre { color: "red", "{error}" } } } } }, ParseNumberWithShow {} } } } } #[component] fn ParseNumberWithShow() -> Element { rsx! { h1 { "Error handler demo" } button { onclick: move |_| { let request_data = "0.5"; let data: i32 = request_data.parse()?; println!("parsed {data}"); Ok(()) }, "Click to throw an error" } } } // On desktop, dioxus will catch panics in components and insert an error automatically fn Panic() -> Element { rsx! { GoBackButton { "Home" } ErrorBoundary { handle_error: |errors: ErrorContext| rsx! { h1 { "Another error occurred" } pre { "{errors:#?}" } }, ComponentPanic {} } } } #[component] fn ComponentPanic() -> Element { panic!("This component panics") } #[derive(Routable, Clone, Debug, PartialEq)] enum Route { #[route("/")] Home {}, #[route("/simple")] Simple {}, #[route("/panic")] Panic {}, #[route("/show")] Show {}, } fn Home() -> Element { rsx! { ul { li { Link { to: Route::Simple {}, "Simple errors" } } li { Link { to: Route::Panic {}, "Capture panics" } } li { Link { to: Route::Show {}, "Show errors" } } } } } ================================================ FILE: examples/04-managing-state/global.rs ================================================ //! Example: Global signals and memos //! //! This example demonstrates how to use global signals and memos to share state across your app. //! Global signals are simply signals that live on the root of your app and are accessible from anywhere. To access a //! global signal, simply use its methods like a regular signal. Calls to `read` and `write` will be forwarded to the //! signal at the root of your app using the `static`'s address. use dioxus::prelude::*; const STYLE: Asset = asset!("/examples/assets/counter.css"); static COUNT: GlobalSignal = Signal::global(|| 0); static DOUBLED_COUNT: GlobalMemo = Memo::global(|| COUNT() * 2); fn main() { dioxus::launch(app); } fn app() -> Element { rsx! { Stylesheet { href: STYLE } Increment {} Decrement {} Reset {} Display {} } } #[component] fn Increment() -> Element { rsx! { button { onclick: move |_| *COUNT.write() += 1, "Up high!" } } } #[component] fn Decrement() -> Element { rsx! { button { onclick: move |_| *COUNT.write() -= 1, "Down low!" } } } #[component] fn Display() -> Element { rsx! { p { "Count: {COUNT}" } p { "Doubled: {DOUBLED_COUNT}" } } } #[component] fn Reset() -> Element { // Not all write methods are available on global signals since `write` requires a mutable reference. In these cases, // We can simply pull out the actual signal using the signal() method. let mut as_signal = use_hook(|| COUNT.resolve()); rsx! { button { onclick: move |_| as_signal.set(0), "Reset" } } } ================================================ FILE: examples/04-managing-state/memo_chain.rs ================================================ //! This example shows how you can chain memos together to create a tree of memoized values. //! //! Memos will also pause when their parent component pauses, so if you have a memo that depends on a signal, and the //! signal pauses, the memo will pause too. use dioxus::prelude::*; fn main() { dioxus::launch(app); } fn app() -> Element { let mut value = use_signal(|| 0); let mut depth = use_signal(|| 0_usize); let items = use_memo(move || (0..depth()).map(|f| f as _).collect::>()); let state = use_memo(move || value() + 1); println!("rendering app"); rsx! { button { onclick: move |_| value += 1, "Increment" } button { onclick: move |_| depth += 1, "Add depth" } button { onclick: move |_| depth -= 1, "Remove depth" } if depth() > 0 { Child { depth, items, state } } } } #[component] fn Child(state: Memo, items: Memo>, depth: ReadSignal) -> Element { // These memos don't get re-computed when early returns happen let state = use_memo(move || state() + 1); let item = use_memo(move || items()[depth() - 1]); let depth = use_memo(move || depth() - 1); println!("rendering child: {}", depth()); rsx! { h3 { "Depth({depth})-Item({item}): {state}"} if depth() > 0 { Child { depth, state, items } } } } ================================================ FILE: examples/04-managing-state/reducer.rs ================================================ //! Example: Reducer Pattern //! ----------------- //! //! This example shows how to encapsulate state in dioxus components with the reducer pattern. //! This pattern is very useful when a single component can handle many types of input that can //! be represented by an enum. use dioxus::prelude::*; const STYLE: Asset = asset!("/examples/assets/radio.css"); fn main() { dioxus::launch(app); } fn app() -> Element { let mut state = use_signal(|| PlayerState { is_playing: false }); rsx!( Stylesheet { href: STYLE } h1 {"Select an option"} // Add some cute animations if the radio is playing! div { class: if state.read().is_playing { "bounce" }, "The radio is... " {state.read().is_playing()} "!" } button { id: "play", onclick: move |_| state.write().reduce(PlayerAction::Pause), "Pause" } button { id: "pause", onclick: move |_| state.write().reduce(PlayerAction::Play), "Play" } ) } enum PlayerAction { Pause, Play, } #[derive(Clone)] struct PlayerState { is_playing: bool, } impl PlayerState { fn reduce(&mut self, action: PlayerAction) { match action { PlayerAction::Pause => self.is_playing = false, PlayerAction::Play => self.is_playing = true, } } fn is_playing(&self) -> &'static str { match self.is_playing { true => "currently playing!", false => "not currently playing", } } } ================================================ FILE: examples/04-managing-state/signals.rs ================================================ //! A simple example demonstrating how to use signals to modify state from several different places. //! //! This simple example implements a counter that can be incremented, decremented, and paused. It also demonstrates //! that background tasks in use_futures can modify the value as well. //! //! Most signals implement Into>, making ReadSignal a good default type when building new //! library components that don't need to modify their values. use async_std::task::sleep; use dioxus::prelude::*; fn main() { dioxus::launch(app); } fn app() -> Element { let mut running = use_signal(|| true); let mut count = use_signal(|| 0); let mut saved_values = use_signal(|| vec![0.to_string()]); // use_memo will recompute the value of the signal whenever the captured signals change let doubled_count = use_memo(move || count() * 2); // use_effect will subscribe to any changes in the signal values it captures // effects will always run after first mount and then whenever the signal values change use_effect(move || println!("Count changed to {count}")); // We can do early returns and conditional rendering which will pause all futures that haven't been polled if count() > 30 { return rsx! { h1 { "Count is too high!" } button { onclick: move |_| count.set(0), "Press to reset" } }; } // use_future will spawn an infinitely running future that can be started and stopped use_future(move || async move { loop { if running() { count += 1; } sleep(std::time::Duration::from_millis(400)).await; } }); // use_resource will spawn a future that resolves to a value let _slow_count = use_resource(move || async move { sleep(std::time::Duration::from_millis(200)).await; count() * 2 }); rsx! { h1 { "High-Five counter: {count}" } button { onclick: move |_| count += 1, "Up high!" } button { onclick: move |_| count -= 1, "Down low!" } button { onclick: move |_| running.toggle(), "Toggle counter" } button { onclick: move |_| saved_values.push(count.to_string()), "Save this value" } button { onclick: move |_| saved_values.clear(), "Clear saved values" } // We can do boolean operations on the current signal value if count() > 5 { h2 { "High five!" } } // We can cleanly map signals with iterators for value in saved_values.iter() { h3 { "Saved value: {value}" } } // We can also use the signal value as a slice if let [first, .., last] = saved_values.read().as_slice() { li { "First and last: {first}, {last}" } } else { "No saved values" } // You can pass a value directly to any prop that accepts a signal Child { count: doubled_count() } Child { count: doubled_count } } } #[component] fn Child(mut count: ReadSignal) -> Element { println!("rendering child with count {count}"); rsx! { h1 { "{count}" } } } ================================================ FILE: examples/05-using-async/backgrounded_futures.rs ================================================ //! Backgrounded futures example //! //! This showcases how use_future, use_memo, and use_effect will stop running if the component returns early. //! Generally you should avoid using early returns around hooks since most hooks are not properly designed to //! handle early returns. However, use_future *does* pause the future when the component returns early, and so //! hooks that build on top of it like use_memo and use_effect will also pause. //! //! This example is more of a demonstration of the behavior than a practical use case, but it's still interesting to see. use async_std::task::sleep; use dioxus::prelude::*; fn main() { dioxus::launch(app); } fn app() -> Element { let mut show_child = use_signal(|| true); let mut count = use_signal(|| 0); let child = use_memo(move || { rsx! { Child { count } } }); rsx! { // Some toggle/controls to show the child or increment the count button { onclick: move |_| show_child.toggle(), "Toggle child" } button { onclick: move |_| count += 1, "Increment count" } if show_child() { {child()} } } } #[component] fn Child(count: WriteSignal) -> Element { let mut early_return = use_signal(|| false); let early = rsx! { button { onclick: move |_| early_return.toggle(), "Toggle {early_return} early return" } }; if early_return() { return early; } use_future(move || async move { loop { sleep(std::time::Duration::from_millis(100)).await; println!("Child") } }); use_effect(move || println!("Child count: {}", count())); rsx! { div { "Child component" {early} } } } ================================================ FILE: examples/05-using-async/clock.rs ================================================ //! A simple little clock that updates the time every few milliseconds. use async_std::task::sleep; use dioxus::prelude::*; use web_time::Instant; fn main() { dioxus::launch(app); } fn app() -> Element { let mut millis = use_signal(|| 0); use_future(move || async move { // Save our initial time let start = Instant::now(); loop { sleep(std::time::Duration::from_millis(27)).await; // Update the time, using a more precise approach of getting the duration since we started the timer millis.set(start.elapsed().as_millis() as i64); } }); // Format the time as a string // This is rather cheap so it's fine to leave it in the render function let time = format!( "{:02}:{:02}:{:03}", millis() / 1000 / 60 % 60, millis() / 1000 % 60, millis() % 1000 ); rsx! { document::Stylesheet { href: asset!("/examples/assets/clock.css") } div { id: "app", div { id: "title", "Carpe diem 🎉" } div { id: "clock-display", "{time}" } } } } ================================================ FILE: examples/05-using-async/future.rs ================================================ //! A simple example that shows how to use the use_future hook to run a background task. //! //! use_future won't return a value, analogous to use_effect. //! If you want to return a value from a future, use use_resource instead. use async_std::task::sleep; use dioxus::prelude::*; fn main() { dioxus::launch(app); } fn app() -> Element { let mut count = use_signal(|| 0); // use_future is a non-reactive hook that simply runs a future in the background. // You can use the UseFuture handle to pause, resume, restart, or cancel the future. use_future(move || async move { loop { sleep(std::time::Duration::from_millis(200)).await; count += 1; } }); // use_effect is a reactive hook that runs a future when signals captured by its reactive context // are modified. This is similar to use_effect in React and is useful for running side effects // that depend on the state of your component. // // Generally, we recommend performing async work in event as a reaction to a user event. use_effect(move || { spawn(async move { sleep(std::time::Duration::from_secs(5)).await; count.set(100); }); }); // You can run futures directly from event handlers as well. Note that if the event handler is // fired multiple times, the future will be spawned multiple times. rsx! { h1 { "Current count: {count}" } button { onclick: move |_| async move { sleep(std::time::Duration::from_millis(200)).await; count.set(0); }, "Slowly reset the count" } } } ================================================ FILE: examples/05-using-async/streams.rs ================================================ //! Handle async streams using use_future and awaiting the next value. use async_std::task::sleep; use dioxus::prelude::*; use futures_util::{Stream, StreamExt, future, stream}; fn main() { dioxus::launch(app); } fn app() -> Element { let mut count = use_signal(|| 10); use_future(move || async move { // Create the stream. // This could be a network request, a file read, or any other async operation. let mut stream = some_stream(); // Await the next value from the stream. while let Some(second) = stream.next().await { count.set(second); } }); rsx! { h1 { "High-Five counter: {count}" } } } fn some_stream() -> std::pin::Pin>> { Box::pin( stream::once(future::ready(0)).chain(stream::iter(1..).then(|second| async move { sleep(std::time::Duration::from_secs(1)).await; second })), ) } ================================================ FILE: examples/05-using-async/suspense.rs ================================================ //! Suspense in Dioxus //! //! Suspense allows components to bubble up loading states to parent components, simplifying data fetching. use dioxus::prelude::*; fn main() { dioxus::launch(app) } fn app() -> Element { rsx! { div { h1 { "Dogs are very important" } p { "The dog or domestic dog (Canis familiaris[4][5] or Canis lupus familiaris[5])" "is a domesticated descendant of the wolf which is characterized by an upturning tail." "The dog derived from an ancient, extinct wolf,[6][7] and the modern grey wolf is the" "dog's nearest living relative.[8] The dog was the first species to be domesticated,[9][8]" "by hunter–gatherers over 15,000 years ago,[7] before the development of agriculture.[1]" } h3 { "Illustrious Dog Photo" } ErrorBoundary { handle_error: |_| rsx! { p { "Error loading doggos" } }, SuspenseBoundary { fallback: move |_| rsx! { "Loading doggos..." }, Doggo {} } } } } } #[component] fn Doggo() -> Element { // `use_loader` returns a Result, Loading>. Loading can either be "Pending" or "Failed". // When we use the `?` operator, the pending/error state will be thrown to the nearest Suspense or Error boundary. // // During SSR, `use_loader` will serialize the contents of the fetch, and during hydration, the client will // use the pre-fetched data instead of re-fetching to render. let mut dog = use_loader(move || async move { #[derive(serde::Deserialize, serde::Serialize, PartialEq)] struct DogApi { message: String, } reqwest::get("https://dog.ceo/api/breeds/image/random/") .await? .json::() .await })?; rsx! { button { onclick: move |_| dog.restart(), "Click to fetch another doggo" } div { img { max_width: "500px", max_height: "500px", src: "{dog.read().message}" } } } } ================================================ FILE: examples/06-routing/flat_router.rs ================================================ //! This example shows how to use the `Router` component to create a simple navigation system. //! The more complex router example uses all of the router features, while this simple example showcases //! just the `Layout` and `Route` features. //! //! Layouts let you wrap chunks of your app with a component. This is useful for things like a footers, heeaders, etc. //! Routes are enum variants with that match the name of a component in scope. This way you can create a new route //! in your app simply by adding the variant to the enum and creating a new component with the same name. You can //! override this of course. use dioxus::prelude::*; const STYLE: Asset = asset!("/examples/assets/flat_router.css"); fn main() { dioxus::launch(|| { rsx! { Stylesheet { href: STYLE } Router:: {} } }) } #[derive(Routable, Clone)] #[rustfmt::skip] enum Route { #[layout(Footer)] // wrap the entire app in a footer #[route("/")] Home {}, #[route("/games")] Games {}, #[route("/play")] Play {}, #[route("/settings")] Settings {}, } #[component] fn Footer() -> Element { rsx! { nav { Link { to: Route::Home {}, class: "nav-btn", "Home" } Link { to: Route::Games {}, class: "nav-btn", "Games" } Link { to: Route::Play {}, class: "nav-btn", "Play" } Link { to: Route::Settings {}, class: "nav-btn", "Settings" } } div { id: "content", Outlet:: {} } } } #[component] fn Home() -> Element { rsx!( h1 { "Home" } p { "Lorem ipsum dolor sit amet, consectetur adipiscing elit. Sed do eiusmod tempor incididunt ut labore et dolore magna aliqua." } ) } #[component] fn Games() -> Element { rsx!( h1 { "Games" } // Dummy text that talks about video games p { "Lorem games are sit amet Sed do eiusmod tempor et dolore magna aliqua." } ) } #[component] fn Play() -> Element { rsx!( h1 { "Play" } p { "Always play with your full heart adipiscing elit. Sed do eiusmod tempor incididunt ut labore et dolore magna aliqua." } ) } #[component] fn Settings() -> Element { rsx!( h1 { "Settings" } p { "Settings are consectetur adipiscing elit. Sed do eiusmod tempor incididunt ut labore et dolore magna aliqua." } ) } ================================================ FILE: examples/06-routing/hash_fragment_state.rs ================================================ //! This example shows how to use the hash segment to store state in the url. //! //! You can set up two way data binding between the url hash and signals. //! //! Run this example on desktop with //! ```sh //! dx serve --example hash_fragment_state --features=ciborium,base64 //! ``` //! Or on web with //! ```sh //! dx serve --platform web --features web --example hash_fragment_state --features=ciborium,base64 -- --no-default-features //! ``` use std::{fmt::Display, str::FromStr}; use base64::engine::general_purpose::STANDARD; use base64::Engine; use dioxus::prelude::*; use serde::{Deserialize, Serialize}; fn main() { dioxus::launch(|| { rsx! { Router:: {} } }); } #[derive(Routable, Clone, Debug, PartialEq)] #[rustfmt::skip] enum Route { #[route("/#:url_hash")] Home { url_hash: State, }, } // You can use a custom type with the hash segment as long as it implements Display, FromStr and Default #[derive(Serialize, Deserialize, Clone, Debug, Default, PartialEq)] struct State { counters: Vec, } // Display the state in a way that can be parsed by FromStr impl Display for State { fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { let mut serialized = Vec::new(); if ciborium::into_writer(self, &mut serialized).is_ok() { write!(f, "{}", STANDARD.encode(serialized))?; } Ok(()) } } enum StateParseError { DecodeError(base64::DecodeError), CiboriumError(ciborium::de::Error), } impl std::fmt::Display for StateParseError { fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { match self { Self::DecodeError(err) => write!(f, "Failed to decode base64: {}", err), Self::CiboriumError(err) => write!(f, "Failed to deserialize: {}", err), } } } // Parse the state from a string that was created by Display impl FromStr for State { type Err = StateParseError; fn from_str(s: &str) -> Result { let decompressed = STANDARD .decode(s.as_bytes()) .map_err(StateParseError::DecodeError)?; let parsed = ciborium::from_reader(std::io::Cursor::new(decompressed)) .map_err(StateParseError::CiboriumError)?; Ok(parsed) } } #[component] fn Home(url_hash: ReadSignal) -> Element { // The initial state of the state comes from the url hash let mut state = use_signal(&*url_hash); // Change the state signal when the url hash changes use_memo(move || { if *state.peek() != *url_hash.read() { state.set(url_hash()); } }); // Change the url hash when the state changes use_memo(move || { if *state.read() != *url_hash.peek() { navigator().replace(Route::Home { url_hash: state() }); } }); rsx! { button { onclick: move |_| state.write().counters.clear(), "Reset" } button { onclick: move |_| { state.write().counters.push(0); }, "Add Counter" } for counter in 0..state.read().counters.len() { div { button { onclick: move |_| { state.write().counters.remove(counter); }, "Remove" } button { onclick: move |_| { state.write().counters[counter] += 1; }, "Count: {state.read().counters[counter]}" } } } } } ================================================ FILE: examples/06-routing/link.rs ================================================ //! How to use links in Dioxus //! //! The `router` crate gives us a `Link` component which is a much more powerful version of the standard HTML link. //! However, you can use the traditional `` tag if you want to build your own `Link` component. //! //! The `Link` component integrates with the Router and is smart enough to detect if the link is internal or external. //! It also allows taking any `Route` as a target, making your links typesafe use dioxus::prelude::*; const STYLE: Asset = asset!("/examples/assets/links.css"); fn main() { dioxus::launch(app); } fn app() -> Element { rsx! ( Stylesheet { href: STYLE } Router:: {} ) } #[derive(Routable, Clone)] #[rustfmt::skip] enum Route { #[layout(Header)] #[route("/")] Home {}, #[route("/default-links")] DefaultLinks {}, #[route("/settings")] Settings {}, } #[component] fn Header() -> Element { rsx! { h1 { "Your app here" } nav { id: "nav", Link { to: Route::Home {}, "home" } Link { to: Route::DefaultLinks {}, "default links" } Link { to: Route::Settings {}, "settings" } } Outlet:: {} } } #[component] fn Home() -> Element { rsx!( h1 { "Home" } ) } #[component] fn Settings() -> Element { rsx!( h1 { "Settings" } ) } #[component] fn DefaultLinks() -> Element { rsx! { // Just some default links div { id: "external-links", // This link will open in a webbrowser a { href: "http://dioxuslabs.com/", "Default link - links outside of your app" } // This link will do nothing - we're preventing the default behavior // It will just log "Hello Dioxus" to the console a { href: "http://dioxuslabs.com/", onclick: |event| { event.prevent_default(); println!("Hello Dioxus") }, "Custom event link - links inside of your app" } } } } ================================================ FILE: examples/06-routing/query_segment_search.rs ================================================ //! This example shows how to access and use query segments present in an url on the web. //! //! The enum router makes it easy to use your route as state in your app. This example shows how to use the router to encode search text into the url and decode it back into a string. //! //! Run this example on desktop with //! ```sh //! dx serve --example query_segment_search //! ``` //! Or on web with //! ```sh //! dx serve --platform web --features web --example query_segment_search -- --no-default-features //! ``` use dioxus::prelude::*; fn main() { dioxus::launch(|| { rsx! { Router:: {} } }); } #[derive(Routable, Clone, Debug, PartialEq)] #[rustfmt::skip] enum Route { #[route("/")] Home {}, // The each query segment must implement and Display. // You can use multiple query segments separated by `&`s. #[route("/search?:query&:word_count")] Search { query: String, word_count: usize, }, } #[component] fn Home() -> Element { // Display a list of example searches in the home page rsx! { ul { li { Link { to: Route::Search { query: "hello".to_string(), word_count: 1 }, "Search for results containing 'hello' and at least one word" } } li { Link { to: Route::Search { query: "dioxus".to_string(), word_count: 2 }, "Search for results containing 'dioxus' and at least two word" } } } } } // Instead of accepting String and usize directly, we use ReadSignal to make the parameters `Copy` and let us subscribe to them automatically inside the meme #[component] fn Search(query: ReadSignal, word_count: ReadSignal) -> Element { const ITEMS: &[&str] = &[ "hello", "world", "hello world", "hello dioxus", "hello dioxus-router", ]; // Find all results that contain the query and the right number of words // This memo will automatically rerun when the query or word count changes because we read the signals inside the closure let results = use_memo(move || { ITEMS .iter() .filter(|item| { item.contains(&*query.read()) && item.split_whitespace().count() >= word_count() }) .collect::>() }); rsx! { h1 { "Search for {query}" } input { oninput: move |e| { // Every time the query changes, we change the current route to the new query navigator().replace(Route::Search { query: e.value(), word_count: word_count(), }); }, value: "{query}", } input { r#type: "number", oninput: move |e| { // Every time the word count changes, we change the current route to the new query if let Ok(word_count) = e.value().parse() { navigator().replace(Route::Search { query: query(), word_count, }); } }, value: "{word_count}", } for result in results.read().iter() { div { "{result}" } } } } ================================================ FILE: examples/06-routing/router.rs ================================================ //! An advanced usage of the router with nested routes and redirects. //! //! Dioxus implements an enum-based router, which allows you to define your routes in a type-safe way. //! However, since we need to bake quite a bit of logic into the enum, we have to add some extra syntax. //! //! Note that you don't need to use advanced features like nest, redirect, etc, since these can all be implemented //! manually, but they are provided as a convenience. use dioxus::prelude::*; const STYLE: Asset = asset!("/examples/assets/router.css"); fn main() { dioxus::launch(|| { rsx! { Stylesheet { href: STYLE } Router:: {} } }); } // Turn off rustfmt since we're doing layouts and routes in the same enum #[derive(Routable, Clone, Debug, PartialEq)] #[rustfmt::skip] #[allow(clippy::empty_line_after_outer_attr)] enum Route { // Wrap Home in a Navbar Layout #[layout(NavBar)] // The default route is always "/" unless otherwise specified #[route("/")] Home {}, // Wrap the next routes in a layout and a nest #[nest("/blog")] #[layout(Blog)] // At "/blog", we want to show a list of blog posts #[route("/")] BlogList {}, // At "/blog/:name", we want to show a specific blog post, using the name slug #[route("/:name")] BlogPost { name: String }, // We need to end the blog layout and nest // Note we don't need either - we could've just done `/blog/` and `/blog/:name` without nesting, // but it's a bit cleaner this way #[end_layout] #[end_nest] // And the regular page layout #[end_layout] // Add some redirects for the `/myblog` route #[nest("/myblog")] #[redirect("/", || Route::BlogList {})] #[redirect("/:name", |name: String| Route::BlogPost { name })] #[end_nest] // Finally, we need to handle the 404 page #[route("/:..route")] PageNotFound { route: Vec, }, } #[component] fn NavBar() -> Element { rsx! { nav { id: "navbar", Link { to: Route::Home {}, "Home" } Link { to: Route::BlogList {}, "Blog" } } Outlet:: {} } } #[component] fn Home() -> Element { rsx! { h1 { "Welcome to the Dioxus Blog!" } } } #[component] fn Blog() -> Element { rsx! { h1 { "Blog" } Outlet:: {} } } #[component] fn BlogList() -> Element { rsx! { h2 { "Choose a post" } div { id: "blog-list", Link { to: Route::BlogPost { name: "Blog post 1".into() }, "Read the first blog post" } Link { to: Route::BlogPost { name: "Blog post 2".into() }, "Read the second blog post" } } } } // We can use the `name` slug to show a specific blog post // In theory we could read from the filesystem or a database here #[component] fn BlogPost(name: String) -> Element { let contents = match name.as_str() { "Blog post 1" => "This is the first blog post. It's not very interesting.", "Blog post 2" => "This is the second blog post. It's not very interesting either.", _ => "This blog post doesn't exist.", }; rsx! { h2 { "{name}" } p { "{contents}" } } } #[component] fn PageNotFound(route: Vec) -> Element { rsx! { h1 { "Page not found" } p { "We are terribly sorry, but the page you requested doesn't exist." } pre { color: "red", "log:\nattempted to navigate to: {route:?}" } } } ================================================ FILE: examples/06-routing/router_resource.rs ================================================ //! Example: Updating components with use_resource //! ----------------- //! //! This example shows how to use ReadSignal to make props reactive //! when linking to it from the same component, when using use_resource use dioxus::prelude::*; #[derive(Clone, Routable, Debug, PartialEq)] enum Route { #[route("/")] Home {}, #[route("/blog/:id")] Blog { id: i32 }, } fn main() { dioxus::launch(App); } #[component] fn App() -> Element { rsx! { Router:: {} } } // We use id: ReadSignal instead of id: i32 to make id work with reactive hooks // Any i32 we pass in will automatically be converted into a ReadSignal #[component] fn Blog(id: ReadSignal) -> Element { async fn future(n: i32) -> i32 { n } // Because we accept ReadSignal instead of i32, the resource will automatically subscribe to the id when we read it let res = use_resource(move || future(id())); match res() { Some(id) => rsx! { div { "Blog post {id}" } for i in 0..10 { div { Link { to: Route::Blog { id: i }, "Go to Blog {i}" } } } }, None => rsx! {}, } } #[component] fn Home() -> Element { rsx! { Link { to: Route::Blog { id: 0 }, "Go to blog" } } } ================================================ FILE: examples/06-routing/router_restore_scroll.rs ================================================ use std::rc::Rc; use dioxus::html::geometry::PixelsVector2D; use dioxus::prelude::*; #[derive(Clone, Routable, Debug, PartialEq)] enum Route { #[route("/")] Home {}, #[route("/blog/:id")] Blog { id: i32 }, } fn main() { dioxus::launch(App); } #[component] fn App() -> Element { use_context_provider(|| Signal::new(Scroll::default())); rsx! { Router:: {} } } #[component] fn Blog(id: i32) -> Element { rsx! { GoBackButton { "Go back" } div { "Blog post {id}" } } } type Scroll = Option; #[component] fn Home() -> Element { let mut element: Signal>> = use_signal(|| None); let mut scroll = use_context::>(); _ = use_resource(move || async move { if let (Some(element), Some(scroll)) = (element.read().as_ref(), *scroll.peek()) { element .scroll(scroll, ScrollBehavior::Instant) .await .unwrap(); } }); rsx! { div { height: "300px", overflow_y: "auto", border: "1px solid black", onmounted: move |event| element.set(Some(event.data())), onscroll: move |_| async move { if let Some(element) = element.cloned() { scroll.set(Some(element.get_scroll_offset().await.unwrap())) } }, for i in 0..100 { div { height: "20px", Link { to: Route::Blog { id: i }, "Blog {i}" } } } } } } ================================================ FILE: examples/06-routing/simple_router.rs ================================================ //! A simple example of a router with a few routes and a nav bar. use dioxus::prelude::*; fn main() { // launch the router, using our `Route` component as the generic type // This will automatically boot the app to "/" unless otherwise specified dioxus::launch(|| rsx! { Router:: {} }); } /// By default, the Routable derive will use the name of the variant as the route /// You can also specify a specific component by adding the Component name to the `#[route]` attribute #[rustfmt::skip] #[derive(Routable, Clone, PartialEq)] enum Route { // Wrap the app in a Nav layout #[layout(Nav)] #[route("/")] Homepage {}, #[route("/blog/:id")] Blog { id: String }, } #[component] fn Homepage() -> Element { rsx! { h1 { "Welcome home" } } } #[component] fn Blog(id: String) -> Element { rsx! { h1 { "How to make: " } p { "{id}" } } } /// A simple nav bar that links to the homepage and blog pages /// /// The `Route` enum gives up typesafe routes, allowing us to rename routes and serialize them automatically #[component] fn Nav() -> Element { rsx! { nav { li { Link { to: Route::Homepage {}, "Go home" } } li { Link { to: Route::Blog { id: "Brownies".to_string(), }, onclick: move |_| { println!("Clicked on Brownies") }, "Learn Brownies" } } li { Link { to: Route::Blog { id: "Cookies".to_string(), }, "Learn Cookies" } } } div { Outlet:: {} } } } ================================================ FILE: examples/07-fullstack/auth/Cargo.toml ================================================ [package] name = "fullstack-auth-example" version = "0.1.0" edition = "2021" publish = false # See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html [dependencies] dioxus-web = { workspace = true, features = ["hydrate"], optional = true } dioxus = { features = ["fullstack"], workspace = true } axum = { workspace = true, optional = true, features = ["macros"]} tokio = { workspace = true, features = ["full"], optional = true } tower-http = { workspace = true, features = ["auth"], optional = true } async-trait = { version = "0.1.88", optional = true } sqlx = { version = "0.8.6", features = [ "macros", "migrate", "postgres", "sqlite", "_unstable-all-types", "tls-native-tls", "runtime-tokio", ], optional = true } http = { workspace = true, optional = true } tower = { workspace = true, optional = true } execute = "0.2.13" serde = { workspace = true } anyhow = { workspace = true } [dependencies.axum_session] workspace = true optional = true [dependencies.axum_session_auth] workspace = true optional = true [dependencies.axum_session_sqlx] workspace = true features = ["sqlite"] optional = true [features] default = ["web", "server"] server = [ "dioxus/server", "dep:axum", "dep:tokio", "dep:tower-http", "dep:async-trait", "dep:sqlx", "dep:axum_session", "dep:axum_session_auth", "dep:axum_session_sqlx", "dep:http", "dep:tower", ] web = ["dioxus/web", "dep:dioxus-web"] ================================================ FILE: examples/07-fullstack/auth/src/auth.rs ================================================ //! The code here is pulled from the `axum-session-auth` crate examples, requiring little to no //! modification to work with dioxus fullstack. use async_trait::async_trait; use axum_session_auth::*; use axum_session_sqlx::SessionSqlitePool; use serde::{Deserialize, Serialize}; use sqlx::sqlite::SqlitePool; use std::collections::HashSet; pub(crate) type Session = axum_session_auth::AuthSession; pub(crate) type AuthLayer = axum_session_auth::AuthSessionLayer; #[derive(Debug, Clone, Serialize, Deserialize)] pub(crate) struct User { pub id: i32, pub anonymous: bool, pub username: String, pub permissions: HashSet, } #[derive(sqlx::FromRow, Clone)] pub(crate) struct SqlPermissionTokens { pub token: String, } #[async_trait] impl Authentication for User { async fn load_user(userid: i64, pool: Option<&SqlitePool>) -> Result { let db = pool.unwrap(); #[derive(sqlx::FromRow, Clone)] struct SqlUser { id: i32, anonymous: bool, username: String, } let sqluser = sqlx::query_as::<_, SqlUser>("SELECT * FROM users WHERE id = $1") .bind(userid) .fetch_one(db) .await .unwrap(); //lets just get all the tokens the user can use, we will only use the full permissions if modifying them. let sql_user_perms = sqlx::query_as::<_, SqlPermissionTokens>( "SELECT token FROM user_permissions WHERE user_id = $1;", ) .bind(userid) .fetch_all(db) .await .unwrap(); Ok(User { id: sqluser.id, anonymous: sqluser.anonymous, username: sqluser.username, permissions: sql_user_perms.into_iter().map(|x| x.token).collect(), }) } fn is_authenticated(&self) -> bool { !self.anonymous } fn is_active(&self) -> bool { !self.anonymous } fn is_anonymous(&self) -> bool { self.anonymous } } #[async_trait] impl HasPermission for User { async fn has(&self, perm: &str, _pool: &Option<&SqlitePool>) -> bool { self.permissions.contains(perm) } } ================================================ FILE: examples/07-fullstack/auth/src/main.rs ================================================ //! This example showcases how to use the `axum-session-auth` crate with Dioxus fullstack. //! We add the `auth::Session` extractor to our server functions to get access to the current user session. //! //! To initialize the axum router, we use `dioxus::serve` to spawn a custom axum server that creates //! our database, session store, and authentication layer. //! //! The `.serve_dioxus_application` method is used to mount our Dioxus app as a fallback service to //! handle HTML rendering and static assets. //! //! We easily share the "permissions" between the server and client by using a `HashSet` //! which is serialized to/from JSON automatically by the server function system. use std::collections::HashSet; use dioxus::prelude::*; #[cfg(feature = "server")] mod auth; fn main() { // On the client, we simply launch the app as normal, taking over the main thread #[cfg(not(feature = "server"))] dioxus::launch(app); // On the server, we can use `dioxus::serve` to create a server that serves our app. // // The `serve` function takes a closure that returns a `Future` which resolves to an `axum::Router`. // // We return a `Router` such that dioxus sets up logging, hot-reloading, devtools, and wires up the // IP and PORT environment variables to our server. #[cfg(feature = "server")] dioxus::serve(|| async { use crate::auth::*; use axum_session::{SessionConfig, SessionLayer, SessionStore}; use axum_session_auth::AuthConfig; use axum_session_sqlx::SessionSqlitePool; use sqlx::{sqlite::SqlitePoolOptions, Executor}; // Create an in-memory SQLite database and set up our tables let db = SqlitePoolOptions::new() .max_connections(20) .connect_with("sqlite::memory:".parse()?) .await?; // Create the tables (sessions, users) db.execute(r#"CREATE TABLE IF NOT EXISTS users ( "id" INTEGER PRIMARY KEY, "anonymous" BOOLEAN NOT NULL, "username" VARCHAR(256) NOT NULL )"#,) .await?; db.execute(r#"CREATE TABLE IF NOT EXISTS user_permissions ( "user_id" INTEGER NOT NULL, "token" VARCHAR(256) NOT NULL)"#,) .await?; // Insert in some test data for two users (one anonymous, one normal) db.execute(r#"INSERT INTO users (id, anonymous, username) SELECT 1, true, 'Guest' ON CONFLICT(id) DO UPDATE SET anonymous = EXCLUDED.anonymous, username = EXCLUDED.username"#,) .await?; db.execute(r#"INSERT INTO users (id, anonymous, username) SELECT 2, false, 'Test' ON CONFLICT(id) DO UPDATE SET anonymous = EXCLUDED.anonymous, username = EXCLUDED.username"#,) .await?; // Make sure our test user has the ability to view categories db.execute(r#"INSERT INTO user_permissions (user_id, token) SELECT 2, 'Category::View'"#) .await?; // Create an axum router that dioxus will attach the app to Ok(dioxus::server::router(app) .layer( AuthLayer::new(Some(db.clone())) .with_config(AuthConfig::::default().with_anonymous_user_id(Some(1))), ) .layer(SessionLayer::new( SessionStore::::new( Some(db.into()), SessionConfig::default().with_table_name("test_table"), ) .await?, ))) }); } /// The UI for our app - is just a few buttons to call our server functions and display the results. fn app() -> Element { let mut login = use_action(login); let mut user_name = use_action(get_user_name); let mut permissions = use_action(get_permissions); let mut logout = use_action(logout); let fetch_new = move |_| async move { user_name.call().await; permissions.call().await; }; rsx! { div { button { onclick: move |_| async move { login.call().await; }, "Login Test User" } button { onclick: move |_| async move { logout.call().await; }, "Logout" } button { onclick: fetch_new, "Fetch User Info" } pre { "Logged in: {login.value():?}" } pre { "User name: {user_name.value():?}" } pre { "Permissions: {permissions.value():?}" } } } } /// We use the `auth::Session` extractor to get access to the current user session. /// This lets us modify the user session, log in/out, and access the current user. #[post("/api/user/login", auth: auth::Session)] pub async fn login() -> Result<()> { auth.login_user(2); Ok(()) } /// Just like `login`, but this time we log out the user. #[post("/api/user/logout", auth: auth::Session)] pub async fn logout() -> Result<()> { auth.logout_user(); Ok(()) } /// We can access the current user via `auth.current_user`. /// We can have both anonymous user (id 1) and a logged in user (id 2). /// /// Logged-in users will have more permissions which we can modify. #[post("/api/user/name", auth: auth::Session)] pub async fn get_user_name() -> Result { Ok(auth.current_user.unwrap().username) } /// Get the current user's permissions, guarding the endpoint with the `Auth` validator. /// If this returns false, we use the `or_unauthorized` extension to return a 401 error. #[get("/api/user/permissions", auth: auth::Session)] pub async fn get_permissions() -> Result> { use crate::auth::User; use axum_session_auth::{Auth, Rights}; let user = auth.current_user.unwrap(); Auth::::build([axum::http::Method::GET], false) .requires(Rights::any([ Rights::permission("Category::View"), Rights::permission("Admin::View"), ])) .validate(&user, &axum::http::Method::GET, None) .await .or_unauthorized("You do not have permission to view categories")?; Ok(user.permissions) } ================================================ FILE: examples/07-fullstack/custom_axum_serve.rs ================================================ //! This example demonstrates how to use `dioxus::serve` with a custom Axum router. //! //! By default, `dioxus::launch` takes over the main thread and runs the Dioxus application. //! However, if you want to integrate Dioxus into an existing web server or use a custom router, //! you can use `dioxus::serve` to create a server that serves your Dioxus application alongside //! other routes. //! //! `dioxus::serve` sets up an async runtime, logging, hot-reloading, crash handling, and more. //! You can then use the `.serve_dioxus_application` method on your router to serve the Dioxus app. //! //! `dioxus::serve` is most useful for customizing the server setup, such as adding middleware, //! custom routes, or integrating with existing axum backend code. //! //! Note that `dioxus::serve` is accepts a Router from `axum`. Dioxus will use the IP and PORT //! environment variables to determine where to bind the server. To customize the port, use environment //! variables or a `.env` file. //! //! On other platforms (like desktop or mobile), you'll want to use `dioxus::launch` instead and then //! handle async loading of data through hooks like `use_future` or `use_resource` and give the user //! a loading state while data is being fetched. use dioxus::prelude::*; fn main() { // On the client we just launch the app as normal. #[cfg(not(feature = "server"))] dioxus::launch(app); // On the server, we can use `dioxus::serve` and `.serve_dioxus_application` to serve our app with routing. // The `dioxus::server::router` function creates a new axum Router with the necessary routes to serve the Dioxus app. #[cfg(feature = "server")] dioxus::serve(|| async move { use dioxus::server::axum::routing::{get, post}; Ok(dioxus::server::router(app) .route("/submit", post(|| async { "Form submitted!" })) .route("/about", get(|| async { "About us" })) .route("/contact", get(|| async { "Contact us" }))) }); } fn app() -> Element { rsx! { div { "Hello from Dioxus!" } } } ================================================ FILE: examples/07-fullstack/custom_error_page.rs ================================================ //! To render custom error pages, you can create a layout component that captures errors from routes //! with an `ErrorBoundary` and display different content based on the error type. //! //! While capturing the error, we set the appropriate HTTP status code using `FullstackContext::commit_error_status`. //! The router will then use this status code when doing server-side rendering (SSR). //! //! Any errors not captured by an error boundary will be handled by dioxus-ssr itself, which will render //! a generic error page instead. use dioxus::prelude::*; use dioxus_fullstack::{FullstackContext, StatusCode}; fn main() { dioxus::launch(|| { rsx! { Router:: {} } }); } #[derive(Routable, PartialEq, Clone, Debug)] enum Route { #[layout(ErrorLayout)] #[route("/")] Home, #[route("/blog/:id")] Blog { id: u32 }, } #[component] fn Home() -> Element { rsx! { div { "Welcome to the home page!" } div { display: "flex", flex_direction: "column", Link { to: Route::Blog { id: 1 }, "Go to blog post 1" } Link { to: Route::Blog { id: 2 }, "Go to blog post 2" } Link { to: Route::Blog { id: 3 }, "Go to blog post 3 (error)" } Link { to: Route::Blog { id: 4 }, "Go to blog post 4 (not found)" } } } } #[component] fn Blog(id: u32) -> Element { match id { 1 => rsx! { div { "Blog post 1" } }, 2 => rsx! { div { "Blog post 2" } }, 3 => dioxus_core::bail!("An error occurred while loading the blog post!"), _ => HttpError::not_found("Blog post not found")?, } } /// In our `ErrorLayout` component, we wrap the `Outlet` in an `ErrorBoundary`. This lets us attempt /// to downcast the error to an `HttpError` and set the appropriate status code. /// /// The `commit_error_status` function will attempt to downcast the error to an `HttpError` and /// set the status code accordingly. Note that you can commit any status code you want with `commit_http_status`. /// /// The router will automatically set the HTTP status code when doing SSR. #[component] fn ErrorLayout() -> Element { rsx! { ErrorBoundary { handle_error: move |err: ErrorContext| { let http_error = FullstackContext::commit_error_status(err.error().unwrap()); match http_error.status { StatusCode::NOT_FOUND => rsx! { div { "404 - Page not found" } }, StatusCode::UNAUTHORIZED => rsx! { div { "401 - Unauthorized" } }, StatusCode::INTERNAL_SERVER_ERROR => rsx! { div { "500 - Internal Server Error" } }, _ => rsx! { div { "An unknown error occurred" } }, } }, Outlet:: {} } } } ================================================ FILE: examples/07-fullstack/desktop/Cargo.toml ================================================ [package] name = "fullstack-desktop-example" version = "0.1.0" edition = "2021" publish = false [dependencies] dioxus = { workspace = true, features = ["launch", "fullstack"] } serde = { workspace = true } [features] default = [] server = ["dioxus/server"] desktop = ["dioxus/desktop"] ================================================ FILE: examples/07-fullstack/desktop/src/main.rs ================================================ #![allow(non_snake_case)] use dioxus::prelude::*; fn main() { // Make sure to set the url of the server where server functions are hosted - they aren't always at localhost #[cfg(not(feature = "server"))] dioxus::fullstack::set_server_url("http://127.0.0.1:8080"); dioxus::launch(app); } pub fn app() -> Element { let mut count = use_signal(|| 0); let mut text = use_signal(|| "...".to_string()); rsx! { h1 { "High-Five counter: {count}" } button { onclick: move |_| count += 1, "Up high!" } button { onclick: move |_| count -= 1, "Down low!" } button { onclick: move |_| async move { let data = get_server_data().await?; println!("Client received: {}", data); text.set(data.clone()); post_server_data(data).await?; Ok(()) }, "Run a server function" } "Server said: {text}" } } #[post("/api/data")] async fn post_server_data(data: String) -> ServerFnResult { println!("Server received: {}", data); Ok(()) } #[get("/api/data")] async fn get_server_data() -> ServerFnResult { Ok("Hello from the server!".to_string()) } ================================================ FILE: examples/07-fullstack/dog_app_self_hosted.rs ================================================ //! This example showcases a fullstack variant of the "dog app" demo, but with the loader and actions //! self-hosted instead of using the Dog API. use dioxus::prelude::*; fn main() { dioxus::launch(app); } fn app() -> Element { // Fetch the list of breeds from the Dog API, using the `?` syntax to suspend or throw errors let breed_list = use_loader(list_breeds)?; // Whenever this action is called, it will re-run the future and return the result. let mut breed = use_action(get_random_breed_image); rsx! { h1 { "Doggo selector" } div { width: "400px", for cur_breed in breed_list.read().iter().take(20).cloned() { button { onclick: move |_| { breed.call(cur_breed.clone()); }, "{cur_breed}" } } } div { match breed.value() { None => rsx! { div { "Click the button to fetch a dog!" } }, Some(Err(_e)) => rsx! { div { "Failed to fetch a dog, please try again." } }, Some(Ok(res)) => rsx! { img { max_width: "500px", max_height: "500px", src: "{res}" } }, } } } } #[get("/api/breeds/list/all")] async fn list_breeds() -> Result> { Ok(vec!["bulldog".into(), "labrador".into(), "poodle".into()]) } #[get("/api/breed/{breed}/images/random")] async fn get_random_breed_image(breed: String) -> Result { match breed.as_str() { "bulldog" => Ok("https://images.dog.ceo/breeds/buhund-norwegian/hakon3.jpg".into()), "labrador" => Ok("https://images.dog.ceo/breeds/labrador/n02099712_2501.jpg".into()), "poodle" => Ok("https://images.dog.ceo/breeds/poodle-standard/n02113799_5973.jpg".into()), _ => HttpError::not_found("Breed not found")?, } } ================================================ FILE: examples/07-fullstack/full_request_access.rs ================================================ //! This example shows how to get access to the full axum request in a handler. //! //! The extra arguments in the `post` macro are passed to the handler function, but not exposed //! to the client. This means we can still call the endpoint from the client, but have full access //! to the request on the server. use dioxus::prelude::*; fn main() { dioxus::launch(app); } fn app() -> Element { let mut file_id = use_action(full_request); rsx! { div { "Access to full axum request" } button { onclick: move |_| file_id.call(), "Upload file" } } } /// Example of accessing the full axum request in a handler /// /// The `request: axum_core::extract::Request` argument is placed in the handler function, but not /// exposed to the client. #[post("/api/full_request_access", request: axum_core::extract::Request)] async fn full_request() -> Result<()> { let headers = request.headers(); if headers.contains_key("x-api-key") { println!("API key found"); } else { println!("No API key found"); } Ok(()) } ================================================ FILE: examples/07-fullstack/fullstack_hello_world.rs ================================================ //! A simple example using Dioxus Fullstack to call a server action. //! //! the `get`, `post`, `put`, `delete`, etc macros are used to define server actions that can be //! called from the client. The action can take arguments and return a value, and the client //! will automatically serialize and deserialize the data. use dioxus::prelude::*; fn main() { dioxus::launch(|| { let mut message = use_action(get_message); rsx! { h1 { "Server says: "} pre { "{message:?}"} button { onclick: move |_| message.call("world".into(), 30), "Click me!" } } }); } #[get("/api/:name/?age")] async fn get_message(name: String, age: i32) -> Result { Ok(format!("Hello {}, you are {} years old!", name, age)) } ================================================ FILE: examples/07-fullstack/handling_errors.rs ================================================ //! An example of handling errors from server functions. //! //! This example showcases a few important error handling patterns when using Dioxus Fullstack. //! //! Run with: //! //! ```sh //! dx serve --web //! ``` //! //! What this example shows: //! - You can return `anyhow::Result` (i.e. `Result` without an `E`) for //! untyped errors with rich context (converted to HTTP 500 responses by default). //! - You can return `Result` where `E` is one of: //! - `HttpError` (convenience for returning HTTP status codes) //! - `StatusCode` (return raw status codes) //! - a custom error type that implements `From` or //! is `Serialize`/`Deserialize` so it can be sent to the client. //! - This file demonstrates external API errors, custom typed errors, explicit //! HTTP errors, and basic success cases. The UI uses `use_action` to call //! server functions and shows loading/result states simply. //! //! Try running requests against the endpoints directly with `curl` or `postman` to see the actual HTTP responses! use dioxus::fullstack::{AsStatusCode, Json, StatusCode}; use dioxus::prelude::*; use serde::{Deserialize, Serialize}; fn main() { dioxus::launch(|| { let mut dog_data = use_action(get_dog_data); let mut dog_data_err = use_action(get_dog_data_err); let mut ip_data = use_action(get_ip_data); let mut custom_data = use_action(move || { get_custom_encoding(Json(serde_json::json!({ "example": "data", "number": 123, "array": [1, 2, 3], }))) }); let mut error_data = use_action(get_throws_error); let mut typed_error_data = use_action(get_throws_typed_error); let mut throws_ok_data = use_action(get_throws_ok); let mut http_error_data = use_action(throws_http_error); let mut http_error_context_data = use_action(throws_http_error_context); rsx! { button { onclick: move |_| { dog_data.call(); }, "Fetch dog data" } button { onclick: move |_| { ip_data.call(); }, "Fetch IP data" } button { onclick: move |_| { custom_data.call(); }, "Fetch custom encoded data" } button { onclick: move |_| { error_data.call(); }, "Fetch error data" } button { onclick: move |_| { typed_error_data.call(); }, "Fetch typed error data" } button { onclick: move |_| { dog_data_err.call(); }, "Fetch dog error data" } button { onclick: move |_| { throws_ok_data.call(); }, "Fetch throws ok data" } button { onclick: move |_| { http_error_data.call(); }, "Fetch HTTP 400" } button { onclick: move |_| { http_error_context_data.call(); }, "Fetch HTTP 400 (context)" } button { onclick: move |_| { ip_data.reset(); dog_data.reset(); custom_data.reset(); error_data.reset(); typed_error_data.reset(); dog_data_err.reset(); throws_ok_data.reset(); http_error_data.reset(); http_error_context_data.reset(); }, "Clear data" } div { display: "flex", flex_direction: "column", gap: "8px", pre { "Dog data: {dog_data.value():#?}" } pre { "IP data: {ip_data.value():#?}" } pre { "Custom encoded data: {custom_data.value():#?}" } pre { "Error data: {error_data.value():#?}" } pre { "Typed error data: {typed_error_data.value():#?}" } pre { "HTTP 400 data: {http_error_data.value():#?}" } pre { "HTTP 400 (context) data: {http_error_context_data.value():#?}" } pre { "Dog error data: {dog_data_err.value():#?}" } pre { "Throws ok data: {throws_ok_data.value():#?}" } } } }); } /// Simple POST endpoint used to show a successful server function that returns `StatusCode`. #[post("/api/data")] async fn post_server_data(data: String) -> Result<(), StatusCode> { println!("Server received: {}", data); Ok(()) } /// Fetches IP info from an external service. Demonstrates propagation of external errors. #[get("/api/ip-data")] async fn get_ip_data() -> Result { Ok(reqwest::get("https://httpbin.org/ip").await?.json().await?) } /// Fetches a random dog image (successful external API example). #[get("/api/dog-data")] async fn get_dog_data() -> Result { Ok(reqwest::get("https://dog.ceo/api/breeds/image/random") .await? .json() .await?) } /// Calls the Dog API with an invalid breed to trigger an external API error (e.g. 404). #[get("/api/dog-data-err")] async fn get_dog_data_err() -> Result { Ok( reqwest::get("https://dog.ceo/api/breed/NOT_A_REAL_DOG/images") .await? .json() .await?, ) } /// Accepts JSON and returns a custom-encoded JSON response. #[post("/api/custom-encoding")] async fn get_custom_encoding(takes: Json) -> Result { Ok(serde_json::json!({ "message": "This response was encoded with a custom encoder!", "success": true, "you sent": takes.0, })) } /// Returns an untyped `anyhow` error with context (results in HTTP 500). #[get("/api/untyped-error")] async fn get_throws_error() -> Result<()> { Err(None.context("This is an example error using anyhow::Error")?) } /// Demonstrates returning an explicit HTTP error (400 Bad Request) using `HttpError`. #[get("/api/throws-http-error")] async fn throws_http_error() -> Result<()> { HttpError::bad_request("Bad request example")?; Ok(()) } /// Convenience example: handles an Option and returns HTTP 400 with a message if None. #[get("/api/throws-http-error-context")] async fn throws_http_error_context() -> Result { let res = None.or_bad_request("Value was None")?; Ok(res) } /// A simple server function that always succeeds. #[get("/api/throws-ok")] async fn get_throws_ok() -> Result<()> { Ok(()) } #[derive(thiserror::Error, Debug, Serialize, Deserialize)] enum MyCustomError { #[error("bad request")] BadRequest { custom_name: String }, #[error("not found")] NotFound, #[error("internal server error: {0}")] ServerFnError(#[from] ServerFnError), } impl AsStatusCode for MyCustomError { fn as_status_code(&self) -> StatusCode { match self { MyCustomError::BadRequest { .. } => StatusCode::BAD_REQUEST, MyCustomError::NotFound => StatusCode::NOT_FOUND, MyCustomError::ServerFnError(e) => e.as_status_code(), } } } /// Returns a custom typed error (serializable) so clients can handle specific cases. /// /// Our custom error must implement `AsStatusCode` so it can properly set the outgoing HTTP status code. #[get("/api/typed-error")] async fn get_throws_typed_error() -> Result<(), MyCustomError> { Err(MyCustomError::BadRequest { custom_name: "Invalid input".into(), }) } /// Simple POST endpoint used to show a successful server function that returns `StatusCode`. #[post("/api/data")] async fn get_throws_serverfn_error() -> Result<(), ServerFnError> { Err(ServerFnError::ServerError { message: "Unauthorized access".to_string(), code: StatusCode::UNAUTHORIZED.as_u16(), details: None, }) } ================================================ FILE: examples/07-fullstack/header_map.rs ================================================ //! This example shows how you can extract a HeaderMap from requests to read custom headers. //! //! The extra arguments in the `#[get(...)]` macro are passed to the underlying axum handler, //! and only visible on the server. This lets you run normal axum extractors like `HeaderMap`, //! `TypedHeader`, `Query`, etc. //! //! Note that headers returned by the server are not always visible to the client due to CORS. //! Headers like `Set-Cookie` are hidden by default, and need to be explicitly allowed //! by the server using the `Access-Control-Expose-Headers` header (which dioxus-fullstack does not //! currently expose directly). use dioxus::prelude::*; fn main() { dioxus::launch(app); } fn app() -> Element { let mut headers = use_action(get_headers); rsx! { h1 { "Header Map Example" } button { onclick: move |_| headers.call(), "Get Headers" } if let Some(Ok(headers)) = headers.value() { p { "Response from server:" } pre { "{headers}" } } else { p { "No headers yet" } } } } #[get("/api/example", headers: dioxus::fullstack::HeaderMap)] async fn get_headers() -> Result { Ok(format!("{:#?}", headers)) } ================================================ FILE: examples/07-fullstack/hello-world/Cargo.toml ================================================ [package] name = "fullstack-hello-world-example" version = "0.1.0" edition = "2021" publish = false # See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html [dependencies] dioxus = { workspace = true, features = ["fullstack"]} serde = { workspace = true } reqwest = { workspace = true, features = ["json"] } serde_json = { workspace = true } anyhow = { workspace = true } thiserror = { workspace = true } [features] default = [] server = ["dioxus/server"] web = ["dioxus/web"] ================================================ FILE: examples/07-fullstack/hello-world/assets/hello.css ================================================ h1 { font-family: monospace; } ================================================ FILE: examples/07-fullstack/hello-world/src/main.rs ================================================ //! A simple hello world example for Dioxus fullstack //! //! Run with: //! //! ```sh //! dx serve --web //! ``` //! //! This example demonstrates a simple Dioxus fullstack application with a client-side counter //! and a server function that returns a greeting message. //! //! The `use_action` hook makes it easy to call async work (like server functions) from the client side //! and handle loading and error states. use dioxus::prelude::*; use dioxus_fullstack::get; fn main() { dioxus::launch(app); } fn app() -> Element { let mut count = use_signal(|| 0); let mut message = use_action(get_greeting); rsx! { div { style: "padding: 2rem; font-family: Arial, sans-serif;", h1 { "Hello, Dioxus Fullstack!" } // Client-side counter - you can use any client functionality in your app! div { style: "margin: 1rem 0;", h2 { "Client Counter: {count}" } button { onclick: move |_| count += 1, "Increment" } button { onclick: move |_| count -= 1, "Decrement" } } // We can handle the action result and display loading state div { style: "margin: 1rem 0;", h2 { "Server Greeting" } button { onclick: move |_| message.call("World".to_string(), 30), "Get Server Greeting" } if message.pending() { p { "Loading..." } } p { "{message:#?}" } } } } } /// A simple server function that returns a greeting /// /// Our server function takes a name as a path and query parameters as inputs and returns a greeting message. #[get("/api/greeting/{name}/{age}")] async fn get_greeting(name: String, age: i32) -> Result { Ok(format!( "Hello from the server, {}! You are {} years old. 🚀", name, age )) } ================================================ FILE: examples/07-fullstack/login_form.rs ================================================ //! This example demonstrates how to use types like `Form`, `SetHeader`, and `TypedHeader` //! to create a simple login form that sets a cookie in the browser and uses it for authentication //! on a protected endpoint. //! //! For more information on handling forms in general, see the multipart_form example. //! //! The intent with this example is to show how to use the building blocks like `Form` and `SetHeader` //! to roll a simple authentication system. use dioxus::fullstack::{Form, SetCookie, SetHeader}; use dioxus::prelude::*; use serde::{Deserialize, Serialize}; #[cfg(feature = "server")] use { dioxus::fullstack::{Cookie, TypedHeader}, std::sync::LazyLock, uuid::Uuid, }; fn main() { dioxus::launch(app); } fn app() -> Element { let mut fetch_login = use_action(login); let mut fetch_sensitive = use_action(sensitive); rsx! { h1 { "Login Form Demo" } button { onclick: move |_| async move { fetch_sensitive.call(); }, "Get Sensitive Data", } pre { "Response from locked API: {fetch_sensitive.value():?}"} form { onsubmit: move |evt: FormEvent| async move { // Prevent the browser from navigating away. evt.prevent_default(); // Extract the form values into our `LoginForm` struct. The `.parsed_values` method // is provided by Dioxus and works with any form element that has `name` attributes. let values: LoginForm = evt.parsed_values().unwrap(); // Call our server function with the form values wrapped in `Form`. The `SetHeader` // response will set a cookie in the browser if the login is successful. fetch_login.call(Form(values)).await; // Now that we're logged in, we can call our sensitive endpoint. fetch_sensitive.call().await; }, input { r#type: "text", id: "username", name: "username" } label { "Username" } input { r#type: "password", id: "password", name: "password" } label { "Password" } button { "Login" } } } } #[derive(Deserialize, Serialize)] pub struct LoginForm { username: String, password: String, } /// A static session ID for demonstration purposes. This forces all previous logins to be invalidated /// when the server restarts. #[cfg(feature = "server")] static THIS_SESSION_ID: LazyLock = LazyLock::new(Uuid::new_v4); /// In our `login` form, we'll return a `SetCookie` header if the login is successful. /// /// This will set a cookie in the user's browser that can be used for subsequent authenticated requests. /// The `SetHeader::new()` method takes anything that can be converted into a `HeaderValue`. /// /// We can set multiple headers by returning a tuple of `SetHeader` types, or passing in a tuple /// of headers to `SetHeader::new()`. #[post("/api/login")] async fn login(form: Form) -> Result> { // Verify the username and password. In a real application, you'd check these against a database. if form.0.username == "admin" && form.0.password == "password" { return Ok(SetHeader::new(format!("auth-demo={};", &*THIS_SESSION_ID))?); } HttpError::unauthorized("Invalid username or password")? } /// We'll use the `TypedHeader` extractor on the server to get the cookie from the request. #[get("/api/sensitive", header: TypedHeader)] async fn sensitive() -> Result { // Extract the cookie from the request headers and use `.eq` to verify its value. // The `or_unauthorized` works on boolean values, returning a 401 if the condition is false. header .get("auth-demo") .or_unauthorized("Missing auth-demo cookie")? .eq(THIS_SESSION_ID.to_string().as_str()) .or_unauthorized("Invalid auth-demo cookie")?; Ok("Sensitive data".to_string()) } ================================================ FILE: examples/07-fullstack/middleware.rs ================================================ //! This example shows how to use middleware in a fullstack Dioxus app. //! //! Dioxus supports two ways of middleware: //! - Applying layers to the top-level axum router //! - Apply `#[middleware]` attributes to individual handlers use dioxus::prelude::*; #[cfg(feature = "server")] use {std::time::Duration, tower_http::timeout::TimeoutLayer}; fn main() { #[cfg(not(feature = "server"))] dioxus::launch(app); #[cfg(feature = "server")] dioxus::serve(|| async move { use axum::{extract::Request, middleware::Next}; use dioxus::server::axum; Ok(dioxus::server::router(app) // we can apply a layer to the entire router using axum's `.layer` method .layer(axum::middleware::from_fn( |request: Request, next: Next| async move { println!("Request: {} {}", request.method(), request.uri().path()); let res = next.run(request).await; println!("Response: {}", res.status()); res }, ))) }); } fn app() -> Element { let mut per_route = use_action(per_route_middleware); rsx! { h1 { "Fullstack Middleware Example" } button { onclick: move |_| per_route.call(), "Fetch Data" } pre { "{per_route.value():#?}" } } } // We can use the `#[middleware]` attribute to apply middleware to individual handlers. // // Here, we're applying a timeout to the `per_route_middleware` handler, which will return a 504 // if the handler takes longer than 3 seconds to complete. // // To add multiple middleware layers, simply stack multiple `#[middleware]` attributes. #[get("/api/count")] #[middleware(TimeoutLayer::with_status_code(408.try_into().unwrap(), Duration::from_secs(3)))] async fn per_route_middleware() -> Result { tokio::time::sleep(Duration::from_secs(5)).await; Ok("Hello, world!".to_string()) } ================================================ FILE: examples/07-fullstack/multipart_form.rs ================================================ //! This example showcases how to handle multipart form data uploads in Dioxus. //! //! Dioxus provides the `MultipartFormData` type to allow converting from the websys `FormData` //! type directly into a streaming multipart form data handler. use dioxus::{fullstack::MultipartFormData, prelude::*}; fn main() { dioxus::launch(app); } fn app() -> Element { // The `MultipartFormData` type can be used to handle multipart form data uploads. // We can convert into it by using `.into()` on the `FormEvent`'s data, or by crafting // a `MultipartFormData` instance manually. let mut upload_as_multipart = use_action(move |event: FormEvent| upload(event.into())); rsx! { Stylesheet { href: asset!("/examples/assets/file_upload.css") } img { src: asset!("/examples/assets/logo.png"), width: "200px" } div { h3 { "Upload as Multipart" } p { "Use the built-in multipart form handling" } form { display: "flex", flex_direction: "column", gap: "8px", onsubmit: move |evt| async move { evt.prevent_default(); upload_as_multipart.call(evt).await; }, label { r#for: "headshot", "Photos" } input { r#type: "file", name: "headshot", multiple: true, accept: ".png,.jpg,.jpeg" } label { r#for: "resume", "Resume" } input { r#type: "file", name: "resume", multiple: false, accept: ".pdf" } label { r#for: "name", "Name" } input { r#type: "text", name: "name", placeholder: "Name" } label { r#for: "age", "Age" } input { r#type: "number", name: "age", placeholder: "Age" } input { r#type: "submit", name: "submit", value: "Submit your resume" } } } } } /// Upload a form as multipart form data. /// /// MultipartFormData is typed over the form data structure, allowing us to extract /// both files and other form fields in a type-safe manner. /// /// On the server, we have access to axum's `Multipart` extractor #[post("/api/upload-multipart")] async fn upload(mut form: MultipartFormData) -> Result<()> { while let Ok(Some(field)) = form.next_field().await { let name = field.name().unwrap_or("").to_string(); let file_name = field.file_name().unwrap_or("").to_string(); let content_type = field.content_type().unwrap_or("").to_string(); let size = field.bytes().await.unwrap().len(); info!( "Field name: {:?}, filename: {:?}, content_type: {:?}, size: {:?}", name, file_name, content_type, size ); } Ok(()) } ================================================ FILE: examples/07-fullstack/query_params.rs ================================================ //! An example showcasing query parameters in Dioxus Fullstack server functions. //! //! The query parameter syntax mostly follows axum, but with a few extra conveniences. //! - can rename parameters in the function signature with `?age=age_in_years` where `age_in_years` is Rust variable name //! - can absorb all query params with `?{object}` directly into a struct implementing `Deserialize` use dioxus::prelude::*; fn main() { dioxus::launch(|| { let mut message = use_action(get_message); let mut message_rebind = use_action(get_message_rebind); let mut message_all = use_action(get_message_all); rsx! { h1 { "Server says: "} div { button { onclick: move |_| message.call(22), "Single" } pre { "{message:?}"} } div { button { onclick: move |_| message_rebind.call(25), "Rebind" } pre { "{message_rebind:?}"} } div { button { onclick: move |_| message_all.call(Params { age: 30, name: "world".into() }), "Bind all" } pre { "{message_all:?}"} } } }); } #[get("/api/message/?age")] async fn get_message(age: i32) -> Result { Ok(format!("You are {} years old!", age)) } #[get("/api/rebind/?age=age_in_years")] async fn get_message_rebind(age_in_years: i32) -> Result { Ok(format!("You are {} years old!", age_in_years)) } #[derive(serde::Deserialize, serde::Serialize, Debug)] struct Params { age: i32, name: String, } #[get("/api/all/?{query}")] async fn get_message_all(query: Params) -> Result { Ok(format!( "Hello {}, you are {} years old!", query.name, query.age )) } ================================================ FILE: examples/07-fullstack/redirect.rs ================================================ //! This example shows how to use the axum `Redirect` type to redirect the client to a different URL. //! //! On the web, a redirect will not be handled directly by JS, but instead the browser will automatically //! follow the redirect. This is useful for redirecting to different pages after a form submission. //! //! Note that redirects returned to the client won't navigate the SPA to a new page automatically. //! For managing a session or auth with client side routing, you'll need to handle that in the SPA itself. use dioxus::{fullstack::Redirect, prelude::*}; fn main() { dioxus::launch(|| { rsx! { Router:: {} } }); } #[derive(Clone, PartialEq, Routable)] enum Route { #[route("/")] Home, #[route("/blog")] Blog, } #[component] fn Home() -> Element { rsx! { h1 { "Welcome home" } form { method: "post", action: "/api/old-blog", button { "Go to blog" } } } } #[component] fn Blog() -> Element { rsx! { h1 { "Welcome to the blog!" } } } #[post("/api/old-blog")] async fn redirect_to_blog() -> Result { Ok(Redirect::to("/blog")) } ================================================ FILE: examples/07-fullstack/router/Cargo.toml ================================================ [package] name = "fullstack-router-example" version = "0.1.0" edition = "2021" publish = false # See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html [dependencies] dioxus = { workspace = true, features = ["fullstack", "router"] } axum = { workspace = true, optional = true } tokio = { workspace = true, features = ["full"], optional = true } serde = { workspace = true, features = ["derive"] } [features] default = [] server = ["axum", "dioxus/server"] web = ["dioxus/web"] ================================================ FILE: examples/07-fullstack/router/src/main.rs ================================================ //! Run with: //! //! ```sh //! dx serve --platform web //! ``` use dioxus::prelude::*; fn main() { dioxus::LaunchBuilder::new() .with_cfg(server_only!(ServeConfig::builder().incremental( dioxus::server::IncrementalRendererConfig::default() .invalidate_after(std::time::Duration::from_secs(120)), ))) .launch(app); } fn app() -> Element { rsx! { Router:: {} } } #[derive(Clone, Routable, Debug, PartialEq, serde::Serialize, serde::Deserialize)] enum Route { #[route("/")] Home {}, #[route("/blog/:id/")] Blog { id: i32 }, } #[component] fn Blog(id: i32) -> Element { rsx! { Link { to: Route::Home {}, "Go to counter" } table { tbody { for _ in 0..id { tr { for _ in 0..id { td { "hello world!" } } } } } } } } #[component] fn Home() -> Element { let mut count = use_signal(|| 0); let mut text = use_signal(|| "...".to_string()); rsx! { Link { to: Route::Blog { id: count() }, "Go to blog" } div { h1 { "High-Five counter: {count}" } button { onclick: move |_| count += 1, "Up high!" } button { onclick: move |_| count -= 1, "Down low!" } button { onclick: move |_| async move { let data = get_server_data().await?; println!("Client received: {}", data); text.set(data.clone()); post_server_data(data).await?; Ok(()) }, "Run server function!" } "Server said: {text}" } } } #[post("/api/data")] async fn post_server_data(data: String) -> ServerFnResult { println!("Server received: {}", data); Ok(()) } #[get("/api/data")] async fn get_server_data() -> ServerFnResult { Ok("Hello from the server!".to_string()) } ================================================ FILE: examples/07-fullstack/server_functions.rs ================================================ //! This example is a simple showcase of Dioxus Server Functions. //! //! The other examples in this folder showcase advanced features of server functions like custom //! data types, error handling, websockets, and more. //! //! This example is meant to just be a simple starting point to show how server functions work. //! //! ## Server Functions //! //! In Dioxus, Server Functions are `axum` backend endpoints that can be called directly from the client //! as if you were simply calling a local Rust function. You can do anything with a server function //! that an Axum handler can do like extracting path, query, headers, and body parameters. //! //! ## Server Function Arguments //! //! Unlike Axum handlers, the arguments of the server functions have some special magic enabled by //! the accompanying `#[get]`/`#[post]` attributes. This magic enables you to choose between //! arguments that are purely serializable (i.e. `String`, `i32`, `Vec`, etc) as the JSON body of //! //! the request *or* arguments that implement Axum's `FromRequest` trait. This magic enables simple //! RPC functions but also complex extractors for things like auth, sessions, cookies, and more. //! //! ## Server Function Return Types //! //! The return type of the server function is also somewhat magical. Unlike Axum handlers, all server //! functions must return a `Result` type, giving the client an opportunity to handle errors properly. //! //! The `Ok` type can be anything that implements `Serialize + DeserializeOwned` so it can be sent //! to the client as JSON, or it can be anything that implements `IntoResponse` just like an Axum handler. //! //! ## Error Types //! //! The `Err` type of the server function return type is also somewhat special. The `Err` type can be: //! - `anyhow::Error` (the `dioxus_core::Err` type alias) for untyped errors with rich context. Note //! that these errors will always downcast to `ServerFnError` on the client, losing the original //! error stack and type. //! - `ServerFnError` for typed errors with a status code and optional message. //! - `StatusCode` for returning raw HTTP status codes. //! - `HttpError` for returning HTTP status codes with custom messages. //! - Any custom errors that implement `From` and are `Serialize`/`Deserialize` //! //! The only way to set the HTTP status code of the response is to use one of the above error types, //! or to implement a custom `IntoResponse` type that sets the status code manually. //! //! The `anyhow::Error` type is the best choice for rapid development, but is somewhat limited when //! handling specific error cases on the client since all errors are downcast to `ServerFnError`. //! //! ## Calling Server Functions from the Client //! //! Server functions can be called from the client by simply importing the function and calling it //! like a normal Rust function. Unlike regular axum handlers, Dioxus server functions have a few //! non-obvious restrictions. //! //! Most importantly, the arguments to the server function must implement either `Deserialize` *or* //! `IntoRequest`. The `IntoRequest` trait is a Dioxus abstraction that represents the "inverse" of the //! Axum `FromRequest` trait. Anything that is sent to the server from the client must be both extractable //! with `FromRequest` on the server *and* constructible with `IntoRequest` on the client. //! //! Types like `WebsocketOptions` implement `IntoRequest` and pass along things like upgrade headers //! to the server so that the server can properly upgrade the connection. //! //! When receiving data from the server, the return type must implement `Deserialize` *or* `FromResponse`. //! The `FromResponse` trait is the inverse of Axum's `IntoResponse` trait, and is implemented //! for types like `Websocket` where the raw HTTP response is needed to complete the construction //! of the type. //! //! ## Server-only Extractors //! //! Because the arguments of the server function define the structure of the public API, some extractors //! might not make sense to expose directly, nor would they be possible to construct on the client. //! For example, on the web, you typically don't work directly with cookies since the browser handles //! them for you. In these cases, the client would omit the `Cookie` header entirely, and we would need //! "hoist" our extractor into a "server-only extractor". //! //! Server-only extractors are function arguments placed after the path in the `#[get]`/`#[post]` attribute. //! These arguments are extracted on the server, but not passed in from the client. This lets the //! server function remain callable from the client, while still allowing full access to axum's //! extractors. //! //! ``` //! #[post("/api/authenticate", auth: AuthCookie)] //! async fn authenticate() -> Result { /* ... */ } //! ``` //! //! ## Automatic Registration //! //! Unlike axum handlers, server functions do not need to be manually registered with a router. //! By default, *all* server functions in your app will be automatically registered with the //! server when you call `dioxus::launch` or create a router manually with `dioxus::server::router()`. //! //! However, not all server functions are automatically registered by default. Server functions that //! take a `State` extractor cannot be automatically added to the router since the dioxus router //! type does not know how to construct the `T` type. //! //! These server functions will be registered once the `ServerState` layer is added to the app with //! `router = router.layer(ServerState::new(your_state))`. //! //! ## Middleware //! //! Middleware can be added to server functions using the `#[middleware(MiddlewareType)]` attribute. //! Middleware will be applied in the order they are specified, and will be applied before any //! server-only extractors. //! //! To add router-level middleware, you can customize the axum `Router` using layers and extensions //! as you would in a normal axum app. //! //! ## Anonymous Server Functions //! //! The `#[server]` attribute can be used without a path to create an anonymous server function. //! These functions are still exposed as HTTP endpoints, but their names are procedurally generated //! from the module path, function name, and a hash of the function signature. This makes it hard to //! call these functions with `curl` or `postman`, but save you the trouble of coming up with unique //! names for simple functions that are only called from your Dioxus app. //! //! If you're shipping desktop/mobile apps, we don't recommend using anonymous server functions //! since the function names could change between builds and thus make older versions of your app //! incompatible with newer versions of your server. //! //! ## Cross-platform Clients //! //! Server functions can be called from any platform (web, desktop mobile, etc) and use the best //! underlying `fetch` implementation available. //! //! ## More examples //! //! With Dioxus Fullstack 0.7, pretty much anything you can do with an Axum handler, you can do with //! a server function. More advanced examples can be found in this folder showcasing custom data types, //! error handling, websockets, and more. use axum_core::response::IntoResponse; use dioxus::prelude::*; use dioxus_fullstack::FromResponse; use dioxus_fullstack::http::StatusCode; use serde::{Deserialize, Serialize}; fn main() { dioxus::launch(app); } fn app() -> Element { let mut echo_action = use_action(echo); let mut chat_action = use_action(chat); let mut dog_data = use_action(get_data); let mut custom_data = use_action(get_custom_data); let mut anonymous_action = use_action(anonymous); let mut custom_anonymous_action = use_action(custom_anonymous); let mut custom_response_action = use_action(get_custom_response); rsx! { h1 { "Server Functions Example" } div { display: "flex", flex_direction: "column", gap: "8px", button { onclick: move |_| echo_action.call("Hello from client".into()), "Echo: Hello" } button { onclick: move |_| chat_action.call(42u32, Some(7u32)), "Chat (user 42, room 7)" } button { onclick: move |_| dog_data.call(), "Get dog data" } button { onclick: move |_| custom_data.call(), "Get custom data" } button { onclick: move |_| anonymous_action.call(), "Call anonymous" } button { onclick: move |_| custom_anonymous_action.call(), "Call custom anonymous" } button { onclick: move |_| custom_response_action.call(), "Get custom response" } button { onclick: move |_| { echo_action.reset(); chat_action.reset(); dog_data.reset(); custom_data.reset(); anonymous_action.reset(); custom_anonymous_action.reset(); custom_response_action.reset(); }, "Clear results" } pre { "Echo result: {echo_action.value():#?}" } pre { "Chat result: {chat_action.value():#?}" } pre { "Dog data: {dog_data.value():#?}" } pre { "Custom data: {custom_data.value():#?}" } pre { "Anonymous: {anonymous_action.value():#?}" } pre { "Custom anonymous: {custom_anonymous_action.value():#?}" } pre { "Custom response: {custom_response_action.value():#?}" } } } } /// A plain server function at a `POST` endpoint that takes a string and returns it. /// Here, we use the `Result` return type which is an alias to `Result`. #[post("/api/echo")] async fn echo(body: String) -> Result { Ok(body) } /// A Server function that takes path and query parameters, as well as a server-only extractor. #[post("/api/{user_id}/chat?room_id", headers: dioxus_fullstack::HeaderMap)] async fn chat(user_id: u32, room_id: Option) -> Result { Ok(format!( "User ID: {}, Room ID: {} - Headers: {:#?}", user_id, room_id.map_or("None".to_string(), |id| id.to_string()), headers )) } /// A plain server function at a `GET` endpoint that returns some JSON data. Because `DogData` is /// `Serialize` and `Deserialize`, it can be sent to the client as JSON automatically. /// /// You can `curl` this endpoint and it will return a 200 status code with a JSON body: /// /// ```json /// { /// "name": "Fido", /// "age": 4 /// } /// ``` #[get("/api/dog")] async fn get_data() -> Result { Ok(DogData { name: "Fido".to_string(), age: 4, }) } #[derive(Serialize, Deserialize, Debug)] struct DogData { name: String, age: u8, } /// A server function that returns a custom struct as JSON. #[get("/api/custom")] async fn get_custom_data() -> Result { Ok(CustomData { message: "Hello from the server!".to_string(), }) } #[derive(Debug)] struct CustomData { message: String, } impl IntoResponse for CustomData { fn into_response(self) -> axum_core::response::Response { axum_core::response::Response::builder() .status(StatusCode::ACCEPTED) .body(serde_json::to_string(&self.message).unwrap().into()) .unwrap() } } impl FromResponse for CustomData { async fn from_response(res: dioxus_fullstack::ClientResponse) -> Result { let message = res.json::().await?; Ok(CustomData { message }) } } /// A server function that returns an axum type directly. /// /// When make these endpoints, we need to use the `axum::response::Response` type and then call `into_response` /// on the return value to convert it into a response. #[get("/api/custom_response")] async fn get_custom_response() -> Result { Ok(axum_core::response::Response::builder() .status(StatusCode::CREATED) .body("Created!".to_string()) .unwrap() .into_response()) } /// An anonymous server function - the url path is generated from the module path and function name. /// /// This will end up as `/api/anonymous_` where `` is a hash of the function signature. #[server] async fn anonymous() -> Result { Ok("Hello from an anonymous server function!".to_string()) } /// An anonymous server function with a custom prefix and a fixed endpoint name. /// /// This is less preferred over the `#[get]`/`#[post]` syntax but is still functional for backwards /// compatibility. Previously, only the `#[server]` attribute was available, but as of Dioxus 0.7, /// the `#[get]`/`#[post]` attributes are preferred for new code. /// /// You can also use server-only extractors here as well, provided they come after the configuration. #[server(prefix = "/api/custom", endpoint = "my_anonymous", headers: dioxus_fullstack::HeaderMap)] async fn custom_anonymous() -> Result { Ok(format!( "Hello from a custom anonymous server function! -> {:#?}", headers )) } ================================================ FILE: examples/07-fullstack/server_sent_events.rs ================================================ //! This example demonstrates server-sent events (SSE) using Dioxus Fullstack. //! //! Server-sent events allow the server to push updates to the client over a single HTTP connection. //! This is useful for real-time updates, notifications, or any scenario where the server needs to //! send data to the client without the client explicitly requesting it. //! //! SSE is a simpler alternative to WebSockets, not requiring a full-duplex, stateful connection with //! the server. Instead, it uses a single long-lived HTTP connection to stream events from the server to the client. //! //! This means that SSE messages are stringly encoded, and thus binary data must be base64 encoded. //! If you need to send binary data, consider using the `Streaming` type instead, which lets //! you send raw bytes over a streaming HTTP response with a custom encoding. You'd reach for SSE //! when dealing with clients that might not support custom streaming protocols. //! //! Calling an SSE endpoint is as simple as calling any other server function. The return type of an //! SSE endpoint is a `ServerEvents` where `T` is the type of event you want to send to the client. //! //! On the client, the `ServerEvents` type implements `Stream>` //! so you can use it with async streams to get new events as they arrive. //! //! `T` must be serializable and deserializable, so anything that implements `Serialize` and `Deserialize` //! can be used as an event type. Calls to `.recv()` will wait for the next event to arrive and //! deserialize it into the correct type. use dioxus::prelude::*; use dioxus_fullstack::ServerEvents; fn main() { dioxus::launch(app); } fn app() -> Element { let mut events = use_signal(Vec::new); use_future(move || async move { // Call the SSE endpoint to get a stream of events let mut stream = listen_for_changes().await?; // And then poll it for new events, adding them to our signal while let Some(Ok(event)) = stream.recv().await { events.push(event); } dioxus::Ok(()) }); rsx! { h1 { "Events from server: " } for msg in events.read().iter().rev() { pre { "{msg:?}" } } } } /// We can send anything that's serializable as a server event - strings, numbers, structs, enums, etc. #[derive(serde::Serialize, serde::Deserialize, Debug, Clone)] enum MyServerEvent { Yay { message: String }, Nay { error: String }, } /// Our SSE endpoint, when called, will return the ServerEvents handle which streams events to the client. /// On the client, we can interact with this stream object to get new events as they arrive. #[get("/api/sse")] async fn listen_for_changes() -> Result> { use std::time::Duration; Ok(ServerEvents::new(|mut tx| async move { let mut count = 1; loop { // Create our serializable message let msg = if count % 5 == 0 { MyServerEvent::Nay { error: "An error occurred".into(), } } else { MyServerEvent::Yay { message: format!("Hello number {count}"), } }; // Send the message to the client. If it errors, the client has disconnected if tx.send(msg).await.is_err() { // client disconnected, do some cleanup break; } count += 1; // Poll some data source here, subscribe to changes, maybe call an LLM? tokio::time::sleep(Duration::from_secs(1)).await; } })) } ================================================ FILE: examples/07-fullstack/server_state.rs ================================================ //! This example shows how to use global state to maintain state between server functions. use std::rc::Rc; use axum_core::extract::{FromRef, FromRequest}; use dioxus::{ fullstack::{FullstackContext, extract::State}, prelude::*, }; use reqwest::header::HeaderMap; #[cfg(feature = "server")] use { dioxus::fullstack::Lazy, dioxus::fullstack::axum, futures::lock::Mutex, sqlx::{Executor, Row}, std::sync::LazyLock, }; /* Option 1: For simple, synchronous, thread-safe data, we can use statics with atomic types or mutexes. The `LazyLock` type from the standard library is a great choice for simple, synchronous data */ #[cfg(feature = "server")] static MESSAGES: LazyLock>> = LazyLock::new(|| Mutex::new(Vec::new())); #[post("/api/messages")] async fn add_message() -> Result<()> { MESSAGES.lock().await.push("New message".to_string()); Ok(()) } #[get("/api/messages")] async fn read_messages() -> Result> { Ok(MESSAGES.lock().await.clone()) } /* Option 2: For complex async data, we can use the `Lazy` type from Dioxus Fullstack. The `Lazy` type provides an interface like `once_cell::Lazy` but supports async initialization. When reading the value from a `Lazy`, the value will be initialized synchronously, blocking the current task until the value is ready. Alternatively, you can create a `Lazy` with `Lazy::lazy` and then initialize it later with `Lazy::initialize`. */ #[cfg(feature = "server")] static DATABASE: Lazy = Lazy::new(|| async move { use sqlx::sqlite::SqlitePoolOptions; dioxus::Ok( SqlitePoolOptions::new() .max_connections(5) .connect_with("sqlite::memory:".parse().unwrap()) .await?, ) }); /// When using the `Lazy` type, it implements `Deref`, so you can use it like a normal reference. #[get("/api/users")] async fn get_users() -> Result> { let users = DATABASE .fetch_all(sqlx::query("SELECT name FROM users")) .await? .iter() .map(|row| row.get::("name")) .collect::>(); Ok(users) } /* Option 3: For data that needs to be provided per-request, we can use axum's `Extension` type to provide data to our app. This is useful for things like request-scoped data or data that needs to be initialized per-requestz */ #[cfg(feature = "server")] type BroadcastExtension = axum::Extension>; #[post("/api/broadcast", ext: BroadcastExtension)] async fn broadcast_message() -> Result<()> { let rt = Rc::new("asdasd".to_string()); ext.send("New broadcast message".to_string())?; tokio::time::sleep(std::time::Duration::from_millis(100)).await; println!("rt: {}", rt); Ok(()) } /* Option 4: You can use Axum's `State` extractor to provide custom application state to your server functions. All ServerFunctions pull in `FullstackContext`, so you need to implement `FromRef` for your custom state type. To add your state to your app, you can use `.register_server_functions()` on a router for a given state type, which will automatically add your state into the `FullstackContext` used by your server functions. There are two details to note here: - You need to implement `FromRef` for your custom state type. - Custom extractors need to implement `FromRequest` where `S` is the state type that implements `FromRef`. */ #[derive(Clone)] struct MyAppState { abc: i32, } impl FromRef for MyAppState { fn from_ref(state: &FullstackContext) -> Self { state.extension::().unwrap() } } struct CustomExtractor { abc: i32, headermap: HeaderMap, } impl FromRequest for CustomExtractor where MyAppState: FromRef, S: Send + Sync, { type Rejection = (); async fn from_request( _req: axum::extract::Request, state: &S, ) -> std::result::Result { let state = MyAppState::from_ref(state); Ok(CustomExtractor { abc: state.abc, headermap: HeaderMap::new(), }) } } #[post("/api/stateful", state: State, ex: CustomExtractor)] async fn app_state() -> Result<()> { println!("abc: {}", state.abc); println!("state abc: {:?}", ex.abc); println!("headermap: {:?}", ex.headermap); Ok(()) } fn main() { #[cfg(not(feature = "server"))] dioxus::launch(app); // When using `Lazy` items, or axum `Extension`s, we need to initialize them in `dioxus::serve` // before launching our app. #[cfg(feature = "server")] dioxus::serve(|| async move { use dioxus::server::axum::Extension; // For axum `Extension`s, we can use the `layer` method to add them to our router. let router = dioxus::server::router(app) .layer(Extension(tokio::sync::broadcast::channel::(16).0)); // To use our custom app state with `State`, we need to register it // as an extension since our `FromRef` implementation relies on it. let router = router.layer(Extension(MyAppState { abc: 42 })); Ok(router) }); } fn app() -> Element { let mut users = use_action(get_users); let mut messages = use_action(read_messages); let mut broadcast = use_action(broadcast_message); let mut add = use_action(add_message); rsx! { div { button { onclick: move |_| users.call(), "Get Users" } pre { "{users.value():?}" } button { onclick: move |_| messages.call(), "Get Messages" } pre { "{messages.value():?}" } button { onclick: move |_| broadcast.call(), "Broadcast Message" } pre { "{broadcast.value():?}" } button { onclick: move |_| add.call(), "Add Message" } pre { "{add.value():?}" } } } } ================================================ FILE: examples/07-fullstack/ssr-only/Cargo.toml ================================================ [package] name = "ssr-only" edition = "2024" version.workspace = true publish = false [dependencies] dioxus = { workspace = true, features = ["fullstack", "router"] } [features] default = ["server"] server = ["dioxus/server"] ================================================ FILE: examples/07-fullstack/ssr-only/src/main.rs ================================================ //! This example showcases how to use Fullstack in a server-side rendering only context. //! //! This means we have no client-side bundle at all, and *everything* is rendered on the server. //! You can still use signals, resources, etc, but they won't be reactive on the client. //! //! This is useful for static site generation, or if you want to use Dioxus Fullstack as a server-side //! framework without the `rsx! {}` markup. //! //! To run this example, simply run `cargo run --package ssr-only` and navigate to `http://localhost:8080`. use dioxus::prelude::*; fn main() { dioxus::launch(|| rsx! { Router:: { } }); } #[derive(Routable, Clone, Debug, PartialEq)] enum Route { #[route("/")] Home, #[route("/post/:id")] Post { id: u32 }, } #[component] fn Home() -> Element { rsx! { h1 { "home" } ul { li { a { href: "/post/1", "Post 1" } } li { a { href: "/post/2", "Post 2" } } li { a { href: "/post/3", "Post 3 (404)" } } } } } #[component] fn Post(id: ReadSignal) -> Element { // You can return `HttpError` to return a specific HTTP status code and message. // `404 Not Found` will cause the server to return a 404 status code. // // `use_loader` will suspend the server-side rendering until the future resolves. let post_data = use_loader(move || get_post(id()))?; rsx! { h1 { "Post {id}" } p { "{post_data}" } } } #[get("/api/post/{id}")] async fn get_post(id: u32) -> Result { match id { 1 => Ok("first post".to_string()), 2 => Ok("second post".to_string()), _ => HttpError::not_found("Post not found")?, } } ================================================ FILE: examples/07-fullstack/streaming.rs ================================================ //! This example shows how to use the `Streaming` type to send streaming responses from the //! server to the client (and the client to the server!). //! //! The `Streaming` type automatically coordinates sending and receiving streaming data over HTTP. //! The `T` type parameter is the type of data being sent, and the `E` type parameter is the encoding //! used to serialize and deserialize the data. //! //! Dioxus Fullstack provides several built-in encodings: //! - JsonEncoding: the default, uses JSON for serialization //! - CborEncoding: uses CBOR for binary serialization //! - PostcardEncoding: uses Postcard for binary serialization //! - MsgPackEncoding: uses MessagePack for binary serialization //! - RkyvEncoding: uses Rkyv for zero-copy binary serialization //! //! The default encoding is `JsonEncoding`, which works well for most use cases and can be used by //! most clients. If you need a more efficient binary encoding, consider using one of the //! binary encodings. use bytes::Bytes; use dioxus::{ fullstack::{JsonEncoding, Streaming, TextStream}, prelude::*, }; fn main() { dioxus::launch(app) } fn app() -> Element { let mut text_responses = use_signal(String::new); let mut json_responses = use_signal(Vec::new); let mut start_text_stream = use_action(move || async move { text_responses.clear(); let mut stream = text_stream(Some(100)).await?; while let Some(Ok(text)) = stream.next().await { text_responses.push_str(&text); text_responses.push('\n'); } dioxus::Ok(()) }); let mut start_json_stream = use_action(move || async move { json_responses.clear(); let mut stream = json_stream().await?; while let Some(Ok(dog)) = stream.next().await { json_responses.push(dog); } dioxus::Ok(()) }); rsx! { div { button { onclick: move |_| start_text_stream.call(), "Start text stream" } button { onclick: move |_| start_text_stream.cancel(), "Stop text stream" } pre { "{text_responses}" } } div { button { onclick: move |_| start_json_stream.call(), "Start JSON stream" } button { onclick: move |_| start_json_stream.cancel(), "Stop JSON stream" } for dog in json_responses.read().iter() { pre { "{dog:?}" } } } } } /// The `TextStream` type is an alias for `Streaming` with a text/plain encoding. /// /// The `TextStream::new()` method takes anything that implements `Stream`, so /// we can use a channel to send strings from a background task. #[get("/api/test_stream?start")] async fn text_stream(start: Option) -> Result { let (tx, rx) = futures::channel::mpsc::unbounded(); tokio::spawn(async move { let mut count = start.unwrap_or(0); loop { let message = format!("Hello, world! {}", count); if tx.unbounded_send(message).is_err() { break; } count += 1; tokio::time::sleep(tokio::time::Duration::from_millis(500)).await; } }); Ok(Streaming::new(rx)) } #[derive(serde::Serialize, serde::Deserialize, Debug)] struct Dog { name: String, age: u8, } /// A custom `Streaming` endpoint that streams JSON-encoded `Dog` structs to the client. /// /// Dioxus provides the `JsonEncoding` type which can be used to encode and decode JSON data. #[get("/api/json_stream")] async fn json_stream() -> Result> { let (tx, rx) = futures::channel::mpsc::unbounded(); tokio::spawn(async move { let mut count = 0; loop { let dog = Dog { name: format!("Dog {}", count), age: (count % 10) as u8, }; if tx.unbounded_send(dog).is_err() { // If the channel is closed, stop sending chunks break; } count += 1; tokio::time::sleep(tokio::time::Duration::from_millis(500)).await; } }); Ok(Streaming::new(rx)) } /// An example of streaming raw bytes to the client using `Streaming`. /// This is useful for sending binary data, such as images, files, or zero-copy data. #[get("/api/byte_stream")] async fn byte_stream() -> Result> { let (tx, rx) = futures::channel::mpsc::unbounded(); tokio::spawn(async move { let mut count = 0; loop { let bytes = vec![count; 10]; if tx.unbounded_send(bytes.into()).is_err() { break; } count = (count + 1) % 255; tokio::time::sleep(tokio::time::Duration::from_millis(500)).await; } }); Ok(Streaming::new(rx)) } ================================================ FILE: examples/07-fullstack/streaming_file_upload.rs ================================================ //! This example showcases how to upload files from the client to the server. //! //! We can use the `FileStream` type to handle file uploads in a streaming fashion. //! This allows us to handle large files without loading them entirely into memory. //! //! `FileStream` and `FileDownload` are built on multi-part form data and streams, which we //! also showcase here. use dioxus::{ fullstack::{ByteStream, FileStream}, prelude::*, }; use dioxus_html::{FileData, HasFileData}; use futures::StreamExt; fn main() { dioxus::launch(app); } fn app() -> Element { // Dioxus provides the `FileStream` type for efficiently uploading files in a streaming fashion. // This approach automatically automatically sets relevant metadata such as headers like // Content-Type, Content-Length, and Content-Disposition. // // The `FileStream` type can be created from a `FileData` instance using `.into()`. // This approach is better suited for public-facing APIs where standard headers are expected. // // `FileStream` uses the platform's native file streaming capabilities when available, // making it more efficient than manually streaming bytes. let mut upload_as_file_upload = use_action(move |files: Vec| async move { for file in files { upload_file_as_filestream(file.into()).await?; } dioxus::Ok(()) }); // We can upload files by directly using the `ByteStream` type. With this approach, we need to // specify the file name and size as query parameters since its an opaque stream. // // The `FileData` type has a `byte_stream` method which returns a `Pin + Send>>` // that we can turn into a `ByteStream` with `.into()`. // // In WASM, this will buffer the entire file in memory, so it's not the most efficient way to upload files. // This approach is best suited for data created by the user in the browser. let mut upload_files_as_bytestream = use_action(move |files: Vec| async move { info!("Uploading {} files", files.len()); for file in files { upload_as_bytestream(file.name(), file.size(), file.byte_stream().into()).await?; } dioxus::Ok(()) }); let mut download_file = use_action(move || async move { let mut file = download_as_filestream().await?; let mut bytes = vec![]; info!("Downloaded file: {:?}", file); while let Some(Ok(chunk)) = file.next().await { bytes.extend_from_slice(&chunk); } dioxus::Ok(String::from_utf8_lossy(&bytes).to_string()) }); rsx! { Stylesheet { href: asset!("/examples/assets/file_upload.css") } div { max_width: "600px", margin: "auto", h1 { "File upload example" } div { h3 { "Upload as FileUpload" } div { class: "drop-zone", ondragover: move |evt| evt.prevent_default(), ondrop: move |evt| async move { evt.prevent_default(); upload_as_file_upload.call(evt.files()).await; }, "Drop files here" } pre { "{upload_as_file_upload.value():?}" } } div { h3 { "Upload as ByteStream" } div { class: "drop-zone", ondragover: move |evt| evt.prevent_default(), ondrop: move |evt| async move { evt.prevent_default(); upload_files_as_bytestream.call(evt.files()).await; }, "Drop files here" } } div { h3 { "Download a file from the server" } button { onclick: move |_| download_file.call(), "Download file" } if let Some(Ok(content)) = &download_file.value() { pre { "{content}" } } else if let Some(Err(e)) = &download_file.value() { pre { "Error downloading file: {e}" } } } } } } /// Upload a file using the `FileStream` type which automatically sets relevant metadata /// as headers like Content-Type, Content-Length, and Content-Disposition. #[post("/api/upload_as_file_stream")] async fn upload_file_as_filestream(mut upload: FileStream) -> Result { use futures::StreamExt; use std::env::temp_dir; use tokio::io::AsyncWriteExt; info!("Received file upload: {:?}", upload); // Create a temporary file to write the uploaded data to. let upload_file = std::path::absolute(temp_dir().join(upload.file_name()))?; // Reject paths that are outside the temp directory for security reasons. if !upload_file.starts_with(temp_dir()) { HttpError::bad_request("Invalid file path")?; } info!( "Uploading bytes of {:?} file to {:?}", upload.size(), upload_file ); // Open the file for writing. tokio::fs::create_dir_all(upload_file.parent().unwrap()).await?; let mut file = tokio::fs::File::create(&upload_file).await?; let expected = upload.size(); // Stream the data from the request body to the file. let mut uploaded: u64 = 0; let mut errored = false; while let Some(chunk) = upload.next().await { match chunk { Ok(bytes) => { uploaded += bytes.len() as u64; if file.write_all(&bytes).await.is_err() { errored = true; break; } // 1GB max file size or attempting to upload more than expected. if uploaded > expected.unwrap_or(1024 * 1024 * 1024) { errored = true; break; } } Err(_) => { errored = true; break; } } } // Clean up the file if there was an error during upload. if errored { _ = file.sync_data().await; let _ = tokio::fs::remove_file(&upload_file).await; HttpError::internal_server_error("Failed to upload file")?; } Ok(uploaded as u32) } /// Upload a file as a raw byte stream. This requires us to specify the file name and size /// as query parameters since the `ByteStream` type is an opaque stream without metadata. /// /// We could also use custom headers to pass metadata if we wanted to avoid query parameters. #[post("/api/upload_as_bytestream?name&size")] async fn upload_as_bytestream(name: String, size: u64, mut stream: ByteStream) -> Result<()> { let mut collected = 0; while let Some(chunk) = stream.next().await { let chunk = chunk?; collected += chunk.len() as u64; info!("Received {} bytes for file {}", chunk.len(), name); if collected > size { HttpError::bad_request("Received more data than expected")?; } } Ok(()) } /// Download a file from the server as a `FileStream`. This automatically sets relevant /// headers like Content-Type, Content-Length, and Content-Disposition. /// /// This endpoint is nice because 3rd-party clients can visit it directly and download the file! /// Try visiting this endpoint directly in your browser. #[get("/api/download_as_filestream")] async fn download_as_filestream() -> Result { Ok(FileStream::from_path(file!()).await?) } ================================================ FILE: examples/07-fullstack/through_reqwest.rs ================================================ //! This example demonstrates that dioxus server functions can be called directly as a Rust //! function or via an HTTP request using reqwest. //! //! Dioxus server functions generated a REST endpoint that can be called using any HTTP client. //! By default, they also support different serialization formats like JSON and CBOR. Try changing //! your `accept` header to see the different formats. use dioxus::prelude::*; fn main() { dioxus::launch(app); } fn app() -> Element { let mut user_from_server_fn = use_action(get_user); let mut user_from_reqwest = use_action(move |id: i32| async move { let port = dioxus::cli_config::server_port().unwrap_or(8080); reqwest::get(&format!("http://localhost:{}/api/user/{}", port, id)) .await? .json::() .await }); rsx! { button { onclick: move |_| user_from_server_fn.call(123), "Fetch Data" } button { onclick: move |_| user_from_reqwest.call(456), "Fetch From Endpoint" } div { display: "flex", flex_direction: "column", pre { "User from server: {user_from_server_fn.value():?}", } pre { "User from server: {user_from_reqwest.value():?}", } } } } #[derive(serde::Serialize, serde::Deserialize, Debug)] struct User { id: String, name: String, } #[get("/api/user/{id}")] async fn get_user(id: i32) -> Result { Ok(User { id: id.to_string(), name: "John Doe".into(), }) } ================================================ FILE: examples/07-fullstack/websocket.rs ================================================ //! This example showcases the built-in websocket functionality in Dioxus Fullstack. //! //! We can create a new websocket endpoint that takes the WebSocketOptions as a body and returns //! a `Websocket` instance that the client uses to communicate with the server. //! //! The `Websocket` type is generic over the message types and the encoding used to serialize the messages. //! //! By default, we use `JsonEncoding`, but in this example, we use `CborEncoding` to demonstrate that //! binary encodings also work. //! //! The `use_websocket` hook wraps the `Websocket` instance and provides a reactive interface to the //! state of the connection, as well as methods to send and receive messages. //! //! Because the websocket is generic over the message types, calls to `.recv()` and `.send()` are //! strongly typed, making it easy to send and receive messages without having to manually //! serialize and deserialize them. use dioxus::{fullstack::CborEncoding, prelude::*}; use dioxus_fullstack::{WebSocketOptions, Websocket, use_websocket}; use serde::{Deserialize, Serialize}; fn main() { dioxus::launch(app); } fn app() -> Element { let mut messages = use_signal(std::vec::Vec::new); // This signal is read inside the use_websocket closure, making it a reactive dependency. // Whenever it changes, the websocket will automatically re-connect. let mut name = use_signal(|| "John Doe".to_string()); let mut socket = use_websocket(move || uppercase_ws(name.cloned(), 30, WebSocketOptions::new())); use_future(move || async move { loop { // Wait for the socket to connect _ = socket.connect().await; // Loop poll with recv. Throws an error when the connection closes, making it possible // to run code before the socket re-connects when the name input changes while let Ok(msg) = socket.recv().await { messages.push(msg); } } }); rsx! { h1 { "WebSocket Example" } p { "Type a message and see it echoed back in uppercase!" } p { "Connection status: {socket.status():?}" } p { "Change your name to trigger a websocket re-connect" } input { placeholder: "Your name", value: "{name}", oninput: move |e| name.set(e.value()), } input { placeholder: "Type a message", oninput: move |e| async move { _ = socket.send(ClientEvent::TextInput(e.value())).await; }, } button { onclick: move |_| messages.clear(), "Clear messages" } for message in messages.read().iter().rev() { pre { "{message:?}" } } } } #[derive(Serialize, Deserialize, Debug)] enum ClientEvent { TextInput(String), } #[derive(Serialize, Deserialize, Debug)] enum ServerEvent { Uppercase(String), } #[get("/api/uppercase_ws?name&age")] async fn uppercase_ws( name: String, age: i32, options: WebSocketOptions, ) -> Result> { Ok(options.on_upgrade(move |mut socket| async move { // send back a greeting message _ = socket .send(ServerEvent::Uppercase(format!( "First message from server: Hello, {}! You are {} years old.", name, age ))) .await; // Loop and echo back uppercase messages while let Ok(ClientEvent::TextInput(next)) = socket.recv().await { _ = socket.send(ServerEvent::Uppercase(next)).await; } })) } ================================================ FILE: examples/08-apis/control_focus.rs ================================================ //! Managing focus //! //! This example shows how to manage focus in a Dioxus application. We implement a "roulette" that focuses on each input //! in the grid every few milliseconds until the user interacts with the inputs. use std::rc::Rc; use async_std::task::sleep; use dioxus::prelude::*; const STYLE: Asset = asset!("/examples/assets/roulette.css"); fn main() { dioxus::launch(app); } fn app() -> Element { // Element data is stored as Rc so we can clone it and pass it around let mut elements = use_signal(Vec::>::new); let mut running = use_signal(|| true); use_future(move || async move { let mut focused = 0; loop { sleep(std::time::Duration::from_millis(50)).await; if !running() { continue; } if let Some(element) = elements.with(|f| f.get(focused).cloned()) { _ = element.set_focus(true).await; } else { focused = 0; } focused += 1; } }); rsx! { Stylesheet { href: STYLE } h1 { "Input Roulette" } button { onclick: move |_| running.toggle(), "Toggle roulette" } div { id: "roulette-grid", // Restart the roulette if the user presses escape onkeydown: move |event| { if event.code().to_string() == "Escape" { running.set(true); } }, // Draw the grid of inputs for i in 0..100 { input { r#type: "number", value: "{i}", onmounted: move |cx| elements.push(cx.data()), oninput: move |_| running.set(false), } } } } } ================================================ FILE: examples/08-apis/custom_html.rs ================================================ //! This example shows how to use a custom index.html and custom extensions //! to add things like stylesheets, scripts, and third-party JS libraries. use dioxus::prelude::*; fn main() { dioxus::LaunchBuilder::new() .with_cfg( dioxus::desktop::Config::new().with_custom_index( r#" Dioxus app

External HTML

"# .into(), ), ) .launch(app); } fn app() -> Element { rsx! { h1 { "Custom HTML!" } } } ================================================ FILE: examples/08-apis/custom_menu.rs ================================================ //! This example shows how to use a custom menu bar with Dioxus desktop. //! This example is not supported on the mobile or web renderers. use dioxus::desktop::{muda::*, use_muda_event_handler}; use dioxus::prelude::*; fn main() { // Create a menu bar that only contains the edit menu let menu = Menu::new(); let edit_menu = Submenu::new("Edit", true); edit_menu .append_items(&[ &PredefinedMenuItem::undo(None), &PredefinedMenuItem::redo(None), &PredefinedMenuItem::separator(), &PredefinedMenuItem::cut(None), &PredefinedMenuItem::copy(None), &PredefinedMenuItem::paste(None), &PredefinedMenuItem::select_all(None), &MenuItem::with_id("switch-text", "Switch text", true, None), ]) .unwrap(); menu.append(&edit_menu).unwrap(); // Create a desktop config that overrides the default menu with the custom menu let config = dioxus::desktop::Config::new().with_menu(menu); // Launch the app with the custom menu dioxus::LaunchBuilder::new().with_cfg(config).launch(app) } fn app() -> Element { let mut text = use_signal(String::new); // You can use the `use_muda_event_handler` hook to run code when a menu event is triggered. use_muda_event_handler(move |muda_event| { if muda_event.id() == "switch-text" { text.set("Switched to text".to_string()); } }); rsx! { div { h1 { "Custom Menu" } p { "Text: {text}" } } } } ================================================ FILE: examples/08-apis/drag_and_drop.rs ================================================ //! This example shows how to implement a simple drag-and-drop kanban board using Dioxus. //! You can drag items between different categories and edit their contents. //! //! This example uses the `.data_transfer()` API to handle drag-and-drop events. When an item is dragged, //! its ID is stored in the data transfer object. When the item is dropped into a new category, its ID is retrieved //! from the data transfer object and used to update the item's category. //! //! Note that in a real-world application, you'll want more sophisticated drop handling, such as visual //! feedback during dragging, and better drop-zone detection to allow dropping *between* items. use dioxus::prelude::*; fn main() { dioxus::launch(app); } struct Item { id: usize, name: String, category: String, contents: String, } fn app() -> Element { let mut items = use_signal(initial_kanban_data); rsx! { div { display: "flex", gap: "20px", flex_direction: "row", for category in ["A", "B", "C"] { div { class: "category", display: "flex", flex_direction: "column", gap: "10px", padding: "10px", flex_grow: "1", border: "2px solid black", min_height: "300px", background_color: "#f0f0f0", ondragover: |e| e.prevent_default(), ondrop: move |e| { if let Some(item_id) = e.data_transfer().get_data("text/plain").and_then(|data| data.parse::().ok()) { if let Some(pos) = items.iter().position(|item| item.id == item_id) { items.write()[pos].category = category.to_string(); } } }, h2 { "Category: {category}" } for (index, item) in items.iter().enumerate().filter(|item| item.1.category == category) { div { key: "{item.id}", width: "200px", height: "50px", border: "1px solid black", padding: "10px", class: "item", draggable: "true", background: "white", cursor: "grab", ondragstart: move |e| { let id = items.read()[index].id.to_string(); e.data_transfer().set_data("text/plain", &id).unwrap(); }, pre { webkit_user_select: "none", "{item.name}" } input { r#type: "text", value: "{item.contents}", oninput: move |e| { items.write()[index].contents = e.value(); } } } } } } } } } fn initial_kanban_data() -> Vec { vec![ Item { id: 1, name: "Item 1".into(), category: "A".into(), contents: "This is item 1".into(), }, Item { id: 2, name: "Item 2".into(), category: "A".into(), contents: "This is item 2".into(), }, Item { id: 3, name: "Item 3".into(), category: "A".into(), contents: "This is item 3".into(), }, Item { id: 4, name: "Item 4".into(), category: "B".into(), contents: "This is item 4".into(), }, Item { id: 5, name: "Item 5".into(), category: "B".into(), contents: "This is item 5".into(), }, Item { id: 6, name: "Item 6".into(), category: "C".into(), contents: "This is item 6".into(), }, ] } ================================================ FILE: examples/08-apis/eval.rs ================================================ //! This example shows how to use the `eval` function to run JavaScript code in the webview. //! //! Eval will only work with renderers that support javascript - so currently only the web and desktop/mobile renderers //! that use a webview. Native renderers will throw "unsupported" errors when calling `eval`. use async_std::task::sleep; use dioxus::prelude::*; fn main() { dioxus::launch(app); } fn app() -> Element { // Create a future that will resolve once the javascript has been successfully executed. let future = use_resource(move || async move { // Wait a little bit just to give the appearance of a loading screen sleep(std::time::Duration::from_secs(1)).await; // The `eval` is available in the prelude - and simply takes a block of JS. // Dioxus' eval is interesting since it allows sending messages to and from the JS code using the `await dioxus.recv()` // builtin function. This allows you to create a two-way communication channel between Rust and JS. let mut eval = document::eval( r#" dioxus.send("Hi from JS!"); let msg = await dioxus.recv(); console.log(msg); return "hi from JS!"; "#, ); // Send a message to the JS code. eval.send("Hi from Rust!").unwrap(); // Our line on the JS side will log the message and then return "hello world". let res: String = eval.recv().await.unwrap(); // This will print "Hi from JS!" and "Hi from Rust!". println!("{:?}", eval.await); res }); match future.value().as_ref() { Some(v) => rsx!( p { "{v}" } ), _ => rsx!( p { "waiting.." } ), } } ================================================ FILE: examples/08-apis/file_upload.rs ================================================ //! This example shows how to use the `file` methods on FormEvent and DragEvent to handle file uploads and drops. //! //! Dioxus intercepts these events and provides a Rusty interface to the file data. Since we want this interface to //! be crossplatform, use dioxus::html::HasFileData; use dioxus::prelude::*; use dioxus_html::FileData; const STYLE: Asset = asset!("/examples/assets/file_upload.css"); fn main() { dioxus::launch(app); } struct UploadedFile { name: String, contents: String, } fn app() -> Element { let mut enable_directory_upload = use_signal(|| false); let mut files_uploaded = use_signal(|| Vec::new() as Vec); let mut hovered = use_signal(|| false); let upload_files = move |files: Vec| async move { for file in files { let filename = file.name(); if let Ok(contents) = file.read_string().await { files_uploaded.push(UploadedFile { name: filename, contents, }); } else { files_uploaded.push(UploadedFile { name: filename, contents: "Failed to read file".into(), }); } } }; rsx! { Stylesheet { href: STYLE } h1 { "File Upload Example" } p { "Drop a .txt, .rs, or .js file here to read it" } button { onclick: move |_| files_uploaded.clear(), "Clear files" } div { label { r#for: "directory-upload", "Enable directory upload" } input { r#type: "checkbox", id: "directory-upload", checked: enable_directory_upload, oninput: move |evt| enable_directory_upload.set(evt.checked()), } } div { label { r#for: "textreader", "Upload text/rust files and read them" } input { r#type: "file", accept: ".txt,.rs,.js", multiple: true, name: "textreader", directory: enable_directory_upload, onchange: move |evt| async move { upload_files(evt.files()).await }, } } div { id: "drop-zone", background_color: if hovered() { "lightblue" } else { "lightgray" }, ondragover: move |evt| { evt.prevent_default(); hovered.set(true) }, ondragleave: move |_| hovered.set(false), ondrop: move |evt| async move { evt.prevent_default(); hovered.set(false); upload_files(evt.files()).await; }, "Drop files here" } ul { for file in files_uploaded.read().iter().rev() { li { span { "{file.name}" } pre { "{file.contents}" } } } } } } ================================================ FILE: examples/08-apis/form.rs ================================================ //! Forms //! //! Dioxus forms deviate slightly from html, automatically returning all named inputs //! in the "values" field. use dioxus::prelude::*; fn main() { dioxus::launch(app); } fn app() -> Element { let mut values = use_signal(Vec::new); let mut submitted_values = use_signal(Vec::new); rsx! { div { style: "display: flex", div { style: "width: 50%", h1 { "Form" } if !submitted_values.read().is_empty() { h2 { "Submitted! ✅" } } // The form element is used to create an HTML form for user input // You can attach regular attributes to it form { id: "cool-form", style: "display: flex; flex-direction: column;", // You can attach a handler to the entire form oninput: move |ev| { println!("Input event: {:#?}", ev); values.set(ev.values()); }, // On desktop/liveview, the form will not navigate the page - the expectation is that you handle // The form event. // However, if your form doesn't have a submit handler, it might navigate the page depending on the webview. // We suggest always attaching a submit handler to the form. onsubmit: move |ev| { println!("Submit event: {:#?}", ev); submitted_values.set(ev.values()); }, // Regular text inputs with handlers label { r#for: "username", "Username" } input { r#type: "text", name: "username", oninput: move |ev| { println!("setting username"); values.set(ev.values()); } } // And then the various inputs that might exist // Note for a value to be returned in .values(), it must be named! label { r#for: "full-name", "Full Name" } input { r#type: "text", name: "full-name" } input { r#type: "text", name: "full-name" } label { r#for: "email", "Email (matching @example.com)" } input { r#type: "email", size: "30", id: "email", name: "email" } label { r#for: "password", "Password" } input { r#type: "password", name: "password" } label { r#for: "color", "Color" } input { r#type: "radio", checked: true, name: "color", value: "red" } input { r#type: "radio", name: "color", value: "blue" } input { r#type: "radio", name: "color", value: "green" } // Select multiple comes in as a comma separated list of selected values // You should split them on the comma to get the values manually label { r#for: "country", "Country" } select { name: "country", multiple: true, oninput: move |ev| { println!("Input event: {:#?}", ev); println!("Values: {:#?}", ev.value().split(',').collect::>()); }, option { value: "usa", "USA" } option { value: "canada", "Canada" } option { value: "mexico", "Mexico" } } // Safari can be quirky with color inputs on mac. // We recommend always providing a text input for color as a fallback. label { r#for: "color", "Color" } input { r#type: "color", value: "#000002", name: "head", id: "head" } // Dates! input { min: "2018-01-01", value: "2018-07-22", r#type: "date", name: "trip-start", max: "2025-12-31", id: "start" } // CHekcboxes label { r#for: "cbox", "Color" } div { label { r#for: "cbox-red", "red" } input { r#type: "checkbox", checked: true, name: "cbox", value: "red", id: "cbox-red" } } div { label { r#for: "cbox-blue", "blue" } input { r#type: "checkbox", name: "cbox", value: "blue", id: "cbox-blue" } } div { label { r#for: "cbox-green", "green" } input { r#type: "checkbox", name: "cbox", value: "green", id: "cbox-green" } } div { label { r#for: "cbox-yellow", "yellow" } input { r#type: "checkbox", name: "cbox", value: "yellow", id: "cbox-yellow" } } // File input label { r#for: "headshot", "Headshot" } input { r#type: "file", name: "headshot", id: "headshot", multiple: true, accept: ".png,.jpg,.jpeg" } // Buttons will submit your form by default. button { r#type: "submit", value: "Submit", "Submit the form" } } } div { style: "width: 50%", h1 { "Oninput Values" } pre { "{values:#?}" } } } button { onclick: move |_| { println!("Values: {:#?}", values.read()); }, "Log values" } } } ================================================ FILE: examples/08-apis/logging.rs ================================================ //! Dioxus ships out-of-the-box with tracing hooks that integrate with the Dioxus-CLI. //! //! The built-in tracing-subscriber automatically sets up a wasm panic hook and wires up output //! to be consumed in a machine-readable format when running under `dx`. //! //! You can disable the built-in tracing-subscriber or customize the log level yourself. //! //! By default: //! - in `dev` mode, the default log output is `debug` //! - in `release` mode, the default log output is `info` //! //! To use the dioxus logger in your app, simply call any of the tracing functions (info!(), warn!(), error!()) use dioxus::logger::tracing::{Level, debug, error, info, warn}; use dioxus::prelude::*; fn main() { // `dioxus::logger::init` is optional and called automatically by `dioxus::launch`. // In development mode, the `Debug` tracing level is set, and in release only the `Info` level is set. // You can call it yourself manually in the cases you: // - want to customize behavior // - aren't using `dioxus::launch` (i.e. custom fullstack setups) but want the integration. // The Tracing crate is the logging interface that the dioxus-logger uses. dioxus::logger::init(Level::INFO).expect("Failed to initialize logger"); dioxus::launch(app); } fn app() -> Element { rsx! { div { h1 { "Logger demo" } button { onclick: move |_| warn!("Here's a warning!"), "Warn!" } button { onclick: move |_| error!("Here's an error!"), "Error!" } button { onclick: move |_| { debug!("Here's a debug"); warn!("The log level is set to info so there should not be a debug message") }, "Debug!" } button { onclick: move |_| info!("Here's an info!"), "Info!" } } } } ================================================ FILE: examples/08-apis/multiwindow.rs ================================================ //! Multiwindow example //! //! This example shows how to implement a simple multiwindow application using dioxus. //! This works by spawning a new window when the user clicks a button. We have to build a new virtualdom which has its //! own context, root elements, etc. use dioxus::prelude::*; fn main() { dioxus::launch(app); } fn app() -> Element { let onclick = move |_| { dioxus::desktop::window().new_window(VirtualDom::new(popup), Default::default()); }; rsx! { button { onclick, "New Window" } } } fn popup() -> Element { let mut count = use_signal(|| 0); rsx! { div { h1 { "Popup Window" } p { "Count: {count}" } button { onclick: move |_| count += 1, "Increment" } } } } ================================================ FILE: examples/08-apis/multiwindow_with_tray_icon.rs ================================================ //! Multiwindow with tray icon example //! //! This example shows how to implement a simple multiwindow application and tray icon using dioxus. //! This works by spawning a new window when the user clicks a button. We have to build a new virtualdom which has its //! own context, root elements, etc. //! //! This is useful for apps that incorporate settings panels or persistent windows like Raycast. use dioxus::desktop::{ WindowCloseBehaviour, trayicon::{default_tray_icon, init_tray_icon}, window, }; use dioxus::prelude::*; fn main() { dioxus::launch(app); } fn app() -> Element { use_hook(|| { // Set the close behavior for the main window // This will hide the window instead of closing it when the user clicks the close button window().set_close_behavior(WindowCloseBehaviour::WindowHides); // Initialize the tray icon with a default icon and no menu // This will provide the tray into context for the application init_tray_icon(default_tray_icon(), None) }); rsx! { button { onclick: move |_| { window().new_window(VirtualDom::new(popup), Default::default()); }, "New Window" } } } fn popup() -> Element { rsx! { div { "This is a popup window!" } } } ================================================ FILE: examples/08-apis/on_resize.rs ================================================ //! Run a callback //! //! Whenever an Element is finally mounted to the Dom, its data is available to be read. //! These fields can typically only be read asynchronously, since various renderers need to release the main thread to //! perform layout and painting. use dioxus::prelude::*; use dioxus_elements::geometry::euclid::Size2D; fn main() { dioxus::launch(app); } fn app() -> Element { let mut dimensions = use_signal(Size2D::zero); rsx!( Stylesheet { href: asset!("/examples/assets/read_size.css") } div { width: "50%", height: "50%", background_color: "red", onresize: move |evt| dimensions.set(evt.data().get_content_box_size().unwrap()), "This element is {dimensions():?}" } ) } ================================================ FILE: examples/08-apis/on_visible.rs ================================================ //! Port of the https://codepen.io/ryanfinni/pen/VwZeGxN example use dioxus::prelude::*; fn main() { dioxus::launch(app); } fn app() -> Element { let mut animated_classes = use_signal(|| ["animated-text", ""]); rsx! { Stylesheet { href: asset!("/examples/assets/visible.css") } div { class: "container", p { "Scroll to the bottom of the page. The text will transition in when it becomes visible in the viewport." } p { "First, let's create a new project for our hacker news app. We can use the CLI to create a new project. You can select a platform of your choice or view the getting started guide for more information on each option. If you aren't sure what platform to try out, we recommend getting started with web or desktop:" } p { "The template contains some boilerplate to help you get started. For this guide, we will be rebuilding some of the code from scratch for learning purposes. You can clear the src/main.rs file. We will be adding new code in the next sections." } p { "Next, let's setup our dependencies. We need to set up a few dependencies to work with the hacker news API: " } p { "First, let's create a new project for our hacker news app. We can use the CLI to create a new project. You can select a platform of your choice or view the getting started guide for more information on each option. If you aren't sure what platform to try out, we recommend getting started with web or desktop:" } p { "The template contains some boilerplate to help you get started. For this guide, we will be rebuilding some of the code from scratch for learning purposes. You can clear the src/main.rs file. We will be adding new code in the next sections." } p { "Next, let's setup our dependencies. We need to set up a few dependencies to work with the hacker news API: " } p { "First, let's create a new project for our hacker news app. We can use the CLI to create a new project. You can select a platform of your choice or view the getting started guide for more information on each option. If you aren't sure what platform to try out, we recommend getting started with web or desktop:" } p { "The template contains some boilerplate to help you get started. For this guide, we will be rebuilding some of the code from scratch for learning purposes. You can clear the src/main.rs file. We will be adding new code in the next sections." } p { "Next, let's setup our dependencies. We need to set up a few dependencies to work with the hacker news API: " } h2 { class: animated_classes().join(" "), onvisible: move |evt| { let data = evt.data(); if let Ok(is_intersecting) = data.is_intersecting() { animated_classes.write()[1] = if is_intersecting { "visible" } else { "" }; } }, "Animated Text" } } } } ================================================ FILE: examples/08-apis/overlay.rs ================================================ //! This example demonstrates how to create an overlay window with dioxus. //! //! Basically, we just create a new window with a transparent background and no decorations, size it to the screen, and //! then we can draw whatever we want on it. In this case, we're drawing a simple overlay with a draggable header. //! //! We also add a global shortcut to toggle the overlay on and off, so you could build a raycast-type app with this. use dioxus::desktop::{ HotKeyState, LogicalSize, WindowBuilder, tao::dpi::PhysicalPosition, use_global_shortcut, }; use dioxus::prelude::*; fn main() { dioxus::LaunchBuilder::desktop() .with_cfg(make_config()) .launch(app); } fn app() -> Element { let mut show_overlay = use_signal(|| true); _ = use_global_shortcut("cmd+g", move |state| { if state == HotKeyState::Pressed { show_overlay.toggle(); } }); rsx! { Stylesheet { href: asset!("/examples/assets/overlay.css") } if show_overlay() { div { width: "100%", height: "100%", background_color: "red", border: "1px solid black", div { width: "100%", height: "10px", background_color: "black", onmousedown: move |_| dioxus::desktop::window().drag(), } "This is an overlay!" } } } } fn make_config() -> dioxus::desktop::Config { dioxus::desktop::Config::default().with_window(make_window()) } fn make_window() -> WindowBuilder { WindowBuilder::new() .with_transparent(true) .with_decorations(false) .with_resizable(false) .with_always_on_top(true) .with_position(PhysicalPosition::new(0, 0)) .with_max_inner_size(LogicalSize::new(100000, 50)) } ================================================ FILE: examples/08-apis/read_size.rs ================================================ //! Read the size of elements using the MountedData struct. //! //! Whenever an Element is finally mounted to the Dom, its data is available to be read. //! These fields can typically only be read asynchronously, since various renderers need to release the main thread to //! perform layout and painting. use std::rc::Rc; use dioxus::{html::geometry::euclid::Rect, prelude::*}; fn main() { dioxus::launch(app); } fn app() -> Element { let mut div_element = use_signal(|| None as Option>); let mut dimensions = use_signal(Rect::zero); let read_dims = move |_| async move { let read = div_element.read(); let client_rect = read.as_ref().map(|el| el.get_client_rect()); if let Some(client_rect) = client_rect { if let Ok(rect) = client_rect.await { dimensions.set(rect); } } }; rsx! { Stylesheet { href: asset!("/examples/assets/read_size.css") } div { width: "50%", height: "50%", background_color: "red", onmounted: move |cx| div_element.set(Some(cx.data())), "This element is {dimensions():?}" } button { onclick: read_dims, "Read dimensions" } } } ================================================ FILE: examples/08-apis/scroll_to_offset.rs ================================================ //! Scroll elements using their MountedData //! //! Dioxus exposes a few helpful APIs around elements (mimicking the DOM APIs) to allow you to interact with elements //! across the renderers. This includes scrolling, reading dimensions, and more. //! //! In this example we demonstrate how to scroll to a given y offset of the scrollable parent using the `scroll` method on the `MountedData` use dioxus::html::geometry::PixelsVector2D; use dioxus::prelude::*; fn main() { dioxus::launch(app); } fn app() -> Element { rsx! { ScrollToCoordinates {} ScrollToCoordinates {} } } #[component] fn ScrollToCoordinates() -> Element { let mut element = use_signal(|| None); rsx! { div { border: "1px solid black", position: "relative", div { height: "300px", overflow_y: "auto", onmounted: move |event| element.set(Some(event.data())), for i in 0..100 { div { height: "20px", "Item {i}" } } } div { position: "absolute", top: 0, right: 0, input { r#type: "number", min: "0", max: "99", oninput: move |event| async move { if let Some(ul) = element.cloned() { let data = event.data(); if let Ok(value) = data.parsed::() { ul.scroll(PixelsVector2D::new(0.0, 20.0 * value), ScrollBehavior::Smooth) .await .unwrap(); } } }, } } } } } ================================================ FILE: examples/08-apis/scroll_to_top.rs ================================================ //! Scroll elements using their MountedData //! //! Dioxus exposes a few helpful APIs around elements (mimicking the DOM APIs) to allow you to interact with elements //! across the renderers. This includes scrolling, reading dimensions, and more. //! //! In this example we demonstrate how to scroll to the top of the page using the `scroll_to` method on the `MountedData` use dioxus::prelude::*; fn main() { dioxus::launch(app); } fn app() -> Element { let mut header_element = use_signal(|| None); rsx! { div { h1 { onmounted: move |cx| header_element.set(Some(cx.data())), "Scroll to top example" } for i in 0..100 { div { "Item {i}" } } button { onclick: move |_| async move { if let Some(header) = header_element.cloned() { header.scroll_to(ScrollBehavior::Smooth).await.unwrap(); } }, "Scroll to top" } } } } ================================================ FILE: examples/08-apis/shortcut.rs ================================================ //! Add global shortcuts to your app while a component is active //! //! This demo shows how to add a global shortcut to your app that toggles a signal. You could use this to implement //! a raycast-type app, or to add a global shortcut to your app that toggles a component on and off. //! //! These are *global* shortcuts, so they will work even if your app is not in focus. use dioxus::desktop::{HotKeyState, use_global_shortcut}; use dioxus::prelude::*; fn main() { dioxus::LaunchBuilder::desktop().launch(app); } fn app() -> Element { let mut toggled = use_signal(|| false); _ = use_global_shortcut("ctrl+s", move |state| { if state == HotKeyState::Pressed { toggled.toggle(); } }); rsx!("toggle: {toggled}") } ================================================ FILE: examples/08-apis/ssr.rs ================================================ //! Example: SSR //! //! This example shows how we can render the Dioxus Virtualdom using SSR. //! Dioxus' SSR is quite comprehensive and can generate a number of utility markers for things like hydration. //! //! You can also render without any markers to get a clean HTML output. use dioxus::prelude::*; fn main() { // We can render VirtualDoms let vdom = VirtualDom::prebuilt(app); println!("{}", dioxus_ssr::render(&vdom)); // Or we can render rsx! calls themselves println!( "{}", dioxus_ssr::render_element(rsx! { div { h1 { "Hello, world!" } } }) ); // We can configure the SSR rendering to add ids for rehydration println!("{}", dioxus_ssr::pre_render(&vdom)); // We can render to a buf directly too let mut file = String::new(); let mut renderer = dioxus_ssr::Renderer::default(); renderer.render_to(&mut file, &vdom).unwrap(); println!("{file}"); } fn app() -> Element { rsx!( div { h1 { "Title" } p { "Body" } } ) } ================================================ FILE: examples/08-apis/title.rs ================================================ //! This example shows how to set the title of the page or window with the Title component use dioxus::prelude::*; fn main() { dioxus::launch(app); } fn app() -> Element { let mut count = use_signal(|| 0); rsx! { div { // You can set the title of the page with the Title component // In web applications, this sets the title in the head. // On desktop, it sets the window title Title { "My Application (Count {count})" } button { onclick: move |_| count += 1, "Up high!" } button { onclick: move |_| count -= 1, "Down low!" } } } } ================================================ FILE: examples/08-apis/video_stream.rs ================================================ //! Using `wry`'s http module, we can stream a video file from the local file system. //! //! You could load in any file type, but this example uses a video file. use dioxus::desktop::wry::http; use dioxus::desktop::wry::http::Response; use dioxus::desktop::{AssetRequest, use_asset_handler}; use dioxus::prelude::*; use http::{header::*, response::Builder as ResponseBuilder, status::StatusCode}; use std::{io::SeekFrom, path::PathBuf}; use tokio::io::{AsyncReadExt, AsyncSeekExt, AsyncWriteExt}; const VIDEO_PATH: &str = "./examples/assets/test_video.mp4"; fn main() { // For the sake of this example, we will download the video file if it doesn't exist ensure_video_is_loaded(); dioxus::launch(app); } fn app() -> Element { // Any request to /videos will be handled by this handler use_asset_handler("videos", move |request, responder| { // Using spawn works, but is slower than a dedicated thread tokio::task::spawn(async move { let video_file = PathBuf::from(VIDEO_PATH); let mut file = tokio::fs::File::open(&video_file).await.unwrap(); match get_stream_response(&mut file, &request).await { Ok(response) => responder.respond(response), Err(err) => eprintln!("Error: {}", err), } }); }); rsx! { div { video { src: "/videos/test_video.mp4", autoplay: true, controls: true, width: 640, height: 480 } } } } /// This was taken from wry's example async fn get_stream_response( asset: &mut (impl tokio::io::AsyncSeek + tokio::io::AsyncRead + Unpin + Send + Sync), request: &AssetRequest, ) -> Result>, Box> { // get stream length let len = { let old_pos = asset.stream_position().await?; let len = asset.seek(SeekFrom::End(0)).await?; asset.seek(SeekFrom::Start(old_pos)).await?; len }; let mut resp = ResponseBuilder::new().header(CONTENT_TYPE, "video/mp4"); // if the webview sent a range header, we need to send a 206 in return // Actually only macOS and Windows are supported. Linux will ALWAYS return empty headers. let http_response = if let Some(range_header) = request.headers().get("range") { let not_satisfiable = || { ResponseBuilder::new() .status(StatusCode::RANGE_NOT_SATISFIABLE) .header(CONTENT_RANGE, format!("bytes */{len}")) .body(vec![]) }; // parse range header let ranges = if let Ok(ranges) = http_range::HttpRange::parse(range_header.to_str()?, len) { ranges .iter() // map the output back to spec range , example: 0-499 .map(|r| (r.start, r.start + r.length - 1)) .collect::>() } else { return Ok(not_satisfiable()?); }; /// The Maximum bytes we send in one range const MAX_LEN: u64 = 1000 * 1024; if ranges.len() == 1 { let &(start, mut end) = ranges.first().unwrap(); // check if a range is not satisfiable // // this should be already taken care of by HttpRange::parse // but checking here again for extra assurance if start >= len || end >= len || end < start { return Ok(not_satisfiable()?); } // adjust end byte for MAX_LEN end = start + (end - start).min(len - start).min(MAX_LEN - 1); // calculate number of bytes needed to be read let bytes_to_read = end + 1 - start; // allocate a buf with a suitable capacity let mut buf = Vec::with_capacity(bytes_to_read as usize); // seek the file to the starting byte asset.seek(SeekFrom::Start(start)).await?; // read the needed bytes asset.take(bytes_to_read).read_to_end(&mut buf).await?; resp = resp.header(CONTENT_RANGE, format!("bytes {start}-{end}/{len}")); resp = resp.header(CONTENT_LENGTH, end + 1 - start); resp = resp.status(StatusCode::PARTIAL_CONTENT); resp.body(buf) } else { let mut buf = Vec::new(); let ranges = ranges .iter() .filter_map(|&(start, mut end)| { // filter out unsatisfiable ranges // // this should be already taken care of by HttpRange::parse // but checking here again for extra assurance if start >= len || end >= len || end < start { None } else { // adjust end byte for MAX_LEN end = start + (end - start).min(len - start).min(MAX_LEN - 1); Some((start, end)) } }) .collect::>(); let boundary = format!("{:x}", rand::random::()); let boundary_sep = format!("\r\n--{boundary}\r\n"); let boundary_closer = format!("\r\n--{boundary}\r\n"); resp = resp.header( CONTENT_TYPE, format!("multipart/byteranges; boundary={boundary}"), ); for (end, start) in ranges { // a new range is being written, write the range boundary buf.write_all(boundary_sep.as_bytes()).await?; // write the needed headers `Content-Type` and `Content-Range` buf.write_all(format!("{CONTENT_TYPE}: video/mp4\r\n").as_bytes()) .await?; buf.write_all(format!("{CONTENT_RANGE}: bytes {start}-{end}/{len}\r\n").as_bytes()) .await?; // write the separator to indicate the start of the range body buf.write_all("\r\n".as_bytes()).await?; // calculate number of bytes needed to be read let bytes_to_read = end + 1 - start; let mut local_buf = vec![0_u8; bytes_to_read as usize]; asset.seek(SeekFrom::Start(start)).await?; asset.read_exact(&mut local_buf).await?; buf.extend_from_slice(&local_buf); } // all ranges have been written, write the closing boundary buf.write_all(boundary_closer.as_bytes()).await?; resp.body(buf) } } else { resp = resp.header(CONTENT_LENGTH, len); let mut buf = Vec::with_capacity(len as usize); asset.read_to_end(&mut buf).await?; resp.body(buf) }; http_response.map_err(Into::into) } fn ensure_video_is_loaded() { let video_file = PathBuf::from(VIDEO_PATH); if !video_file.exists() { tokio::runtime::Runtime::new() .unwrap() .block_on(async move { println!("Downloading video file..."); let video_url = "http://commondatastorage.googleapis.com/gtv-videos-bucket/sample/BigBuckBunny.mp4"; let mut response = reqwest::get(video_url).await.unwrap(); let mut file = tokio::fs::File::create(&video_file).await.unwrap(); while let Some(chunk) = response.chunk().await.unwrap() { file.write_all(&chunk).await.unwrap(); } }); } } ================================================ FILE: examples/08-apis/wgpu_child_window.rs ================================================ //! Demonstrate how to use dioxus as a child window for use in alternative renderers like wgpu. //! //! The code here is borrowed from wry's example: //! https://github.com/tauri-apps/wry/blob/dev/examples/wgpu.rs //! //! To use this feature set `with_as_child_window()` on your desktop config which will then let you use dioxus::prelude::*; use dioxus::{ desktop::tao::{event::Event as WryEvent, window::Window}, desktop::{Config, tao::window::WindowBuilder, use_wry_event_handler, window}, }; use std::sync::Arc; fn main() { let config = Config::new() .with_window(WindowBuilder::new().with_transparent(true)) .with_on_window(|window, dom| { let resources = Arc::new(pollster::block_on(async { let resource = GraphicsContextAsyncBuilder { desktop: window, resources_builder: |ctx| Box::pin(GraphicsResources::new(ctx.clone())), } .build() .await; resource.with_resources(|resources| resources.render()); resource })); dom.provide_root_context(resources); }) .with_as_child_window(); dioxus::LaunchBuilder::desktop() .with_cfg(config) .launch(app); } fn app() -> Element { let graphics_resources = consume_context::>(); // on first render request a redraw use_effect(|| { window().window.request_redraw(); }); use_wry_event_handler(move |event, _| { use dioxus::desktop::tao::event::WindowEvent; if let WryEvent::WindowEvent { event: WindowEvent::Resized(new_size), .. } = event { graphics_resources.with_resources(|srcs| { let mut cfg = srcs.config.clone(); cfg.width = new_size.width; cfg.height = new_size.height; srcs.surface.configure(&srcs.device, &cfg); }); window().window.request_redraw(); } }); rsx! { div { color: "blue", width: "100vw", height: "100vh", display: "flex", justify_content: "center", align_items: "center", font_size: "20px", div { "text overlaid on a wgpu surface!" } } } } /// This borrows from the `window` which is contained within an `Arc` so we need to wrap it in a self-borrowing struct /// to be able to borrow the window for the wgpu::Surface #[ouroboros::self_referencing] struct GraphicsContext { desktop: Arc, #[borrows(desktop)] #[not_covariant] resources: GraphicsResources<'this>, } struct GraphicsResources<'a> { surface: wgpu::Surface<'a>, device: wgpu::Device, pipeline: wgpu::RenderPipeline, queue: wgpu::Queue, config: wgpu::SurfaceConfiguration, } impl<'a> GraphicsResources<'a> { async fn new(window: Arc) -> Self { let size = window.inner_size(); let instance = wgpu::Instance::default(); let surface: wgpu::Surface<'a> = instance.create_surface(window).unwrap(); let adapter = instance .request_adapter(&wgpu::RequestAdapterOptions { power_preference: wgpu::PowerPreference::default(), force_fallback_adapter: false, // Request an adapter which can render to our surface compatible_surface: Some(&surface), }) .await .expect("Failed to find an appropriate adapter"); // Create the logical device and command queue let (device, queue) = adapter .request_device(&wgpu::DeviceDescriptor { label: None, required_features: wgpu::Features::empty(), // Make sure we use the texture resolution limits from the adapter, so we can support images the size of the swapchain. required_limits: wgpu::Limits::downlevel_webgl2_defaults() .using_resolution(adapter.limits()), memory_hints: wgpu::MemoryHints::default(), ..Default::default() }) .await .expect("Failed to create device"); // Load the shaders from disk let shader = device.create_shader_module(wgpu::ShaderModuleDescriptor { label: None, source: wgpu::ShaderSource::Wgsl( r#" @vertex fn vs_main(@builtin(vertex_index) in_vertex_index: u32) -> @builtin(position) vec4 { let x = f32(i32(in_vertex_index) - 1); let y = f32(i32(in_vertex_index & 1u) * 2 - 1); return vec4(x, y, 0.0, 1.0); } @fragment fn fs_main() -> @location(0) vec4 { return vec4(1.0, 0.0, 0.0, 1.0); } "# .into(), ), }); let pipeline_layout = device.create_pipeline_layout(&wgpu::PipelineLayoutDescriptor { label: None, bind_group_layouts: &[], push_constant_ranges: &[], }); let swapchain_capabilities = surface.get_capabilities(&adapter); let swapchain_format = swapchain_capabilities.formats[0]; let pipeline = device.create_render_pipeline(&wgpu::RenderPipelineDescriptor { label: None, layout: Some(&pipeline_layout), vertex: wgpu::VertexState { module: &shader, entry_point: Some("vs_main"), buffers: &[], compilation_options: wgpu::PipelineCompilationOptions::default(), }, fragment: Some(wgpu::FragmentState { module: &shader, entry_point: Some("fs_main"), targets: &[Some(swapchain_format.into())], compilation_options: wgpu::PipelineCompilationOptions::default(), }), primitive: wgpu::PrimitiveState::default(), depth_stencil: None, multisample: wgpu::MultisampleState::default(), multiview: None, cache: None, }); let config = wgpu::SurfaceConfiguration { usage: wgpu::TextureUsages::RENDER_ATTACHMENT, format: swapchain_format, width: size.width, height: size.height, present_mode: wgpu::PresentMode::Fifo, desired_maximum_frame_latency: 2, alpha_mode: wgpu::CompositeAlphaMode::PostMultiplied, view_formats: vec![], }; surface.configure(&device, &config); GraphicsResources { surface, device, pipeline, queue, config, } } fn render(&self) { let GraphicsResources { surface, device, pipeline, queue, .. } = self; let frame = surface .get_current_texture() .expect("Failed to acquire next swap chain texture"); let view = frame .texture .create_view(&wgpu::TextureViewDescriptor::default()); let mut encoder = device.create_command_encoder(&wgpu::CommandEncoderDescriptor { label: None }); { let mut rpass = encoder.begin_render_pass(&wgpu::RenderPassDescriptor { label: None, color_attachments: &[Some(wgpu::RenderPassColorAttachment { view: &view, resolve_target: None, ops: wgpu::Operations { load: wgpu::LoadOp::Clear(wgpu::Color::TRANSPARENT), store: wgpu::StoreOp::Store, }, depth_slice: None, })], depth_stencil_attachment: None, timestamp_writes: None, occlusion_query_set: None, }); rpass.set_pipeline(pipeline); rpass.draw(0..3, 0..1); } queue.submit(Some(encoder.finish())); frame.present(); } } ================================================ FILE: examples/08-apis/window_event.rs ================================================ //! This example demonstrates how to handle window events and change window properties. //! //! We're able to do things like: //! - implement window dragging //! - toggle fullscreen //! - toggle always on top //! - toggle window decorations //! - change the window title //! //! The entire featuresuite of wry and tao is available to you use dioxus::desktop::{Config, WindowBuilder, window}; use dioxus::prelude::*; fn main() { dioxus::LaunchBuilder::desktop() .with_cfg( Config::new().with_window( WindowBuilder::new() .with_title("Borderless Window") .with_decorations(false), ), ) .launch(app) } fn app() -> Element { rsx!( document::Link { href: "https://unpkg.com/tailwindcss@^2/dist/tailwind.min.css", rel: "stylesheet" } Header {} div { class: "container mx-auto", div { class: "grid grid-cols-5", SetOnTop {} SetDecorations {} SetTitle {} } } ) } #[component] fn Header() -> Element { let mut fullscreen = use_signal(|| false); rsx! { header { class: "text-gray-400 bg-gray-900 body-font", onmousedown: move |_| window().drag(), div { class: "container mx-auto flex flex-wrap p-5 flex-col md:flex-row items-center", a { class: "flex title-font font-medium items-center text-white mb-4 md:mb-0", span { class: "ml-3 text-xl", "Dioxus" } } nav { class: "md:ml-auto flex flex-wrap items-center text-base justify-center" } // Set the window to minimized button { class: "inline-flex items-center bg-gray-800 border-0 py-1 px-3 focus:outline-none hover:bg-gray-700 rounded text-base mt-4 md:mt-0", onmousedown: |evt| evt.stop_propagation(), onclick: move |_| window().set_minimized(true), "Minimize" } // Toggle fullscreen button { class: "inline-flex items-center bg-gray-800 border-0 py-1 px-3 focus:outline-none hover:bg-gray-700 rounded text-base mt-4 md:mt-0", onmousedown: |evt| evt.stop_propagation(), onclick: move |_| { window().set_fullscreen(!fullscreen()); window().set_resizable(fullscreen()); fullscreen.toggle(); }, "Fullscreen" } // Close the window // If the window is the last window open, the app will close, if you configured the close behavior to do so button { class: "inline-flex items-center bg-gray-800 border-0 py-1 px-3 focus:outline-none hover:bg-gray-700 rounded text-base mt-4 md:mt-0", onmousedown: |evt| evt.stop_propagation(), onclick: move |_| window().close(), "Close" } } } } } #[component] fn SetOnTop() -> Element { let mut always_on_top = use_signal(|| false); rsx! { div { button { class: "inline-flex items-center text-white bg-green-500 border-0 py-1 px-3 hover:bg-green-700 rounded", onmousedown: |evt| evt.stop_propagation(), onclick: move |_| { window().set_always_on_top(!always_on_top()); always_on_top.toggle(); }, "Always On Top" } } } } #[component] fn SetDecorations() -> Element { let mut decorations = use_signal(|| false); rsx! { div { button { class: "inline-flex items-center text-white bg-blue-500 border-0 py-1 px-3 hover:bg-green-700 rounded", onmousedown: |evt| evt.stop_propagation(), onclick: move |_| { window().set_decorations(!decorations()); decorations.toggle(); }, "Set Decorations" } } } } #[component] fn SetTitle() -> Element { rsx! { div { button { class: "inline-flex items-center text-white bg-blue-500 border-0 py-1 px-3 hover:bg-green-700 rounded", onmousedown: |evt| evt.stop_propagation(), onclick: move |_| window().set_title("Dioxus Application"), "Change Title" } } } } ================================================ FILE: examples/08-apis/window_focus.rs ================================================ //! Listen for window focus events using a wry event handler //! //! This example shows how to use the use_wry_event_handler hook to listen for window focus events. //! We can intercept any Wry event, but in this case we're only interested in the WindowEvent::Focused event. //! //! This lets you do things like backgrounding tasks, pausing animations, or changing the UI when the window is focused or not. use dioxus::desktop::tao::event::Event as WryEvent; use dioxus::desktop::tao::event::WindowEvent; use dioxus::desktop::use_wry_event_handler; use dioxus::prelude::*; fn main() { dioxus::launch(app); } fn app() -> Element { let mut focused = use_signal(|| true); use_wry_event_handler(move |event, _| { if let WryEvent::WindowEvent { event: WindowEvent::Focused(new_focused), .. } = event { focused.set(*new_focused) } }); rsx! { div { width: "100%", height: "100%", display: "flex", flex_direction: "column", align_items: "center", if focused() { "This window is focused!" } else { "This window is not focused!" } } } } ================================================ FILE: examples/08-apis/window_popup.rs ================================================ //! This example shows how to create a popup window and send data back to the parent window. //! Currently Dioxus doesn't support nested renderers, hence the need to create popups as separate windows. use dioxus::prelude::*; use std::rc::Rc; fn main() { dioxus::LaunchBuilder::desktop().launch(app); } fn app() -> Element { let mut emails_sent = use_signal(|| Vec::new() as Vec); // Wait for responses to the compose channel, and then push them to the emails_sent signal. let handle = use_coroutine(move |mut rx: UnboundedReceiver| async move { use futures_util::StreamExt; while let Some(message) = rx.next().await { emails_sent.push(message); } }); let open_compose_window = move |_evt: MouseEvent| { let tx = handle.tx(); dioxus::desktop::window().new_window( VirtualDom::new_with_props(popup, Rc::new(move |s| tx.unbounded_send(s).unwrap())), Default::default(), ); }; rsx! { h1 { "This is your email" } button { onclick: open_compose_window, "Click to compose a new email" } ul { for message in emails_sent.read().iter() { li { h3 { "email" } span { "{message}" } } } } } } fn popup(send: Rc) -> Element { let mut user_input = use_signal(String::new); let window = dioxus::desktop::use_window(); let close_window = move |_| { println!("Attempting to close Window B"); window.close(); }; rsx! { div { h1 { "Compose a new email" } button { onclick: close_window, "Close Window B (button)" } button { onclick: move |_| { send(user_input.cloned()); dioxus::desktop::window().close(); }, "Send" } input { oninput: move |e| user_input.set(e.value()), value: "{user_input}" } } } } ================================================ FILE: examples/08-apis/window_zoom.rs ================================================ //! Adjust the zoom of a desktop app //! //! This example shows how to adjust the zoom of a desktop app using the webview.zoom method. use dioxus::prelude::*; fn main() { dioxus::LaunchBuilder::desktop().launch(app); } fn app() -> Element { let mut level = use_signal(|| 1.0); rsx! { h1 { "Zoom level: {level}" } p { "Change the zoom level of the webview by typing a number in the input below." } input { r#type: "number", value: "{level}", oninput: move |e| { if let Ok(new_zoom) = e.value().parse::() { level.set(new_zoom); _ = dioxus::desktop::window().webview.zoom(new_zoom); } } } } } ================================================ FILE: examples/09-reference/all_events.rs ================================================ //! This example shows how to listen to all events on a div and log them to the console. //! //! The primary demonstration here is the properties on the events themselves, hoping to give you some inspiration //! on adding interactivity to your own application. use dioxus::prelude::*; use std::{collections::VecDeque, fmt::Debug, rc::Rc}; const STYLE: Asset = asset!("/examples/assets/events.css"); fn main() { dioxus::launch(app); } fn app() -> Element { // Using a VecDeque so its cheap to pop old events off the front let mut events = use_signal(VecDeque::new); // All events and their data implement Debug, so we can re-cast them as Rc instead of their specific type let mut log_event = move |event: Rc| { // Only store the last 20 events if events.read().len() >= 20 { events.write().pop_front(); } events.write().push_back(event); }; let random_text = "This is some random repeating text. ".repeat(1000); rsx! { Stylesheet { href: STYLE } div { id: "container", // focusing is necessary to catch keyboard events div { id: "receiver", tabindex: 0, onmousemove: move |event| log_event(event.data()), onclick: move |event| log_event(event.data()), ondoubleclick: move |event| log_event(event.data()), onmousedown: move |event| log_event(event.data()), onmouseup: move |event| log_event(event.data()), onwheel: move |event| log_event(event.data()), onkeydown: move |event| log_event(event.data()), onkeyup: move |event| log_event(event.data()), onkeypress: move |event| log_event(event.data()), onfocusin: move |event| log_event(event.data()), onfocusout: move |event| log_event(event.data()), "Hover, click, type or scroll to see the info down below" } div { style: "padding: 50px;", div { style: "text-align: center; padding: 20px; font-family: sans-serif; overflow: auto; height: 400px;", onscroll: move |event: Event| { log_event(event.data()); }, div { style: "margin: 20px; padding: 15px; border: 1px solid #ccc; border-radius: 5px;", p { "{random_text}" } } } } div { id: "log", for event in events.read().iter() { div { "{event:?}" } } } } } } ================================================ FILE: examples/09-reference/generic_component.rs ================================================ //! This example demonstrates how to create a generic component in Dioxus. //! //! Generic components can be useful when you want to create a component that renders differently depending on the type //! of data it receives. In this particular example, we're just using a type that implements `Display` and `PartialEq`, use dioxus::prelude::*; use std::fmt::Display; fn main() { dioxus::launch(app); } fn app() -> Element { rsx! { generic_child { data: 0 } } } #[derive(PartialEq, Props, Clone)] struct GenericChildProps { data: T, } fn generic_child(props: GenericChildProps) -> Element { rsx! { div { "{props.data}" } } } ================================================ FILE: examples/09-reference/optional_props.rs ================================================ //! Optional props //! //! This example demonstrates how to use optional props in your components. The `Button` component has several props, //! and we use a variety of attributes to set them. use dioxus::prelude::*; fn main() { dioxus::launch(app); } fn app() -> Element { rsx! { // We can set some of the props, and the rest will be filled with their default values // By default `c` can take a `None` value, but `d` is required to wrap a `Some` value Button { a: "asd".to_string(), // b can be omitted, and it will be filled with its default value c: "asd".to_string(), d: Some("asd".to_string()), e: Some("asd".to_string()), } Button { a: "asd".to_string(), b: "asd".to_string(), // We can omit the `Some` on `c` since Dioxus automatically transforms Option into optional c: "asd".to_string(), d: Some("asd".to_string()), e: "asd".to_string(), } // `b` and `e` are omitted Button { a: "asd".to_string(), c: "asd".to_string(), d: Some("asd".to_string()), } } } #[derive(Props, PartialEq, Clone)] struct ButtonProps { a: String, #[props(default)] b: String, c: Option, #[props(!optional)] d: Option, #[props(optional)] e: SthElse, } type SthElse = Option; #[allow(non_snake_case)] fn Button(props: ButtonProps) -> Element { rsx! { button { "{props.a} | " "{props.b:?} | " "{props.c:?} | " "{props.d:?} | " "{props.e:?}" } } } ================================================ FILE: examples/09-reference/rsx_usage.rs ================================================ //! A tour of the rsx! macro //! ------------------------ //! //! This example serves as an informal quick reference of all the things that the rsx! macro can do. //! //! A full in-depth reference guide is available at: https://www.notion.so/rsx-macro-basics-ef6e367dec124f4784e736d91b0d0b19 //! //! ### Elements //! - Create any element from its tag //! - Accept compile-safe attributes for each tag //! - Display documentation for elements //! - Arguments instead of String //! - Text //! - Inline Styles //! //! ## General Concepts //! - Iterators //! - Keys //! - Match statements //! - Conditional Rendering //! //! ### Events //! - Handle events with the "onXYZ" syntax //! - Closures can capture their environment with the 'static lifetime //! //! //! ### Components //! - Components can be made by specifying the name //! - Components can be referenced by path //! - Components may have optional parameters //! - Components may have their properties specified by spread syntax //! - Components may accept child nodes //! - Components that accept "onXYZ" get those closures bump allocated //! //! ### Fragments //! - Allow fragments using the built-in `Fragment` component //! - Accept a list of vnodes as children for a Fragment component //! - Allow keyed fragments in iterators //! - Allow top-level fragments fn main() { dioxus::launch(app) } use core::{fmt, str::FromStr}; use std::fmt::Display; use baller::Baller; use dioxus::prelude::*; fn app() -> Element { let formatting = "formatting!"; let formatting_tuple = ("a", "b"); let lazy_fmt = format_args!("lazily formatted text"); let asd = 123; rsx! { div { // Elements div {} h1 {"Some text"} h1 {"Some text with {formatting}"} h1 {"Formatting basic expressions {formatting_tuple.0} and {formatting_tuple.1}"} h1 {"Formatting without interpolation " {formatting_tuple.0} "and" {formatting_tuple.1} } h2 { "Multiple" "Text" "Blocks" "Use comments as separators in html" } div { h1 {"multiple"} h2 {"nested"} h3 {"elements"} } div { class: "my special div", h1 {"Headers and attributes!"} } div { h1 {"Style attributes!"} p { "hello" b { "world" } i { "foo" } span { style: "color: red;font-style:italic", "red" } span { color: "blue", font_weight: "bold", "attr_blue" } } } div { // pass simple rust expressions in class: "{lazy_fmt}", id: format_args!("attributes can be passed lazily with std::fmt::Arguments"), class: "asd", class: "{asd}", // if statements can be used to conditionally render attributes class: if formatting.contains("form") { "{asd}" }, // longer if chains also work class: if formatting.contains("form") { "{asd}" } else if formatting.contains("my other form") { "{asd}" }, class: if formatting.contains("form") { "{asd}" } else if formatting.contains("my other form") { "{asd}" } else { "{asd}" }, div { class: format_args!("Arguments can be passed in through curly braces for complex {asd}") } } // dangerous_inner_html for both html and svg div { dangerous_inner_html: "

hello dangerous inner html

" } svg { dangerous_inner_html: "" } // Built-in idents can be used use {} link { as: "asd" } // Expressions can be used in element position too: {rsx!(p { "More templating!" })} // Iterators {(0..10).map(|i| rsx!(li { "{i}" }))} // Iterators within expressions { let data = std::collections::HashMap::<&'static str, &'static str>::new(); // Iterators *should* have keys when you can provide them. // Keys make your app run faster. Make sure your keys are stable, unique, and predictable. // Using an "ID" associated with your data is a good idea. data.into_iter().map(|(k, v)| rsx!(li { key: "{k}", "{v}" })) } // Matching match true { true => rsx!( h1 {"Top text"}), false => rsx!( h1 {"Bottom text"}) } // Conditional rendering // Dioxus conditional rendering is based around None/Some. We have no special syntax for conditionals. // You can convert a bool condition to rsx! with .then and .or {true.then(|| rsx!(div {}))} // Alternatively, you can use the "if" syntax - but both branches must be resolve to Element if false { h1 {"Top text"} } else { h1 {"Bottom text"} } // Using optionals for diverging branches // Note that since this is wrapped in curlies, it's interpreted as an expression {if true { Some(rsx!(h1 {"Top text"})) } else { None }} // returning "None" without a diverging branch is a bit noisy... but rare in practice {None as Option<()>} // can also just use empty fragments Fragment {} // Fragments let you insert groups of nodes without a parent. // This lets you make components that insert elements as siblings without a container. div {"A"} Fragment { div {"B"} div {"C"} Fragment { "D" Fragment { "E" "F" } } } // Components // Can accept any paths // Notice how you still get syntax highlighting and IDE support :) Baller {} baller::Baller {} crate::baller::Baller {} // Can take properties Taller { a: "asd" } // Can take optional properties Taller { a: "asd" } // Can pass in props directly as an expression { let props = TallerProps {a: "hello", children: VNode::empty() }; rsx!(Taller { ..props }) } // Spreading can also be overridden manually Taller { a: "not ballin!", ..TallerProps { a: "ballin!", children: VNode::empty() } } // Can take children too! Taller { a: "asd", div {"hello world!"} } // This component's props are defined *inline* with the `component` macro WithInline { text: "using functionc all syntax" } // Components can be generic too // This component takes i32 type to give you typed input TypedInput:: {} // Type inference can be used too TypedInput { initial: 10.0 } // generic with the `component` macro Label { text: "hello generic world!" } Label { text: 99.9 } // Lowercase components work too, as long as they are access using a path baller::lowercase_component {} // For in-scope lowercase components, use the `self` keyword self::lowercase_helper {} // helper functions // Anything that implements IntoVnode can be dropped directly into Rsx {helper("hello world!")} // Strings can be supplied directly {String::from("Hello world!")} // So can format_args // todo(jon): this is broken in edition 2024 // {format_args!("Hello {}!", "world")} // Or we can shell out to a helper function {format_dollars(10, 50)} } } } fn format_dollars(dollars: u32, cents: u32) -> String { format!("${dollars}.{cents:02}") } fn helper(text: &str) -> Element { rsx! { p { "{text}" } } } // no_case_check disables PascalCase checking if you *really* want a snake_case component. // This will likely be deprecated/removed in a future update that will introduce a more polished linting system, // something like Clippy. #[component(no_case_check)] fn lowercase_helper() -> Element { rsx! { "asd" } } mod baller { use super::*; #[component] /// This component totally balls pub fn Baller() -> Element { rsx! { "ballin'" } } // no_case_check disables PascalCase checking if you *really* want a snake_case component. // This will likely be deprecated/removed in a future update that will introduce a more polished linting system, // something like Clippy. #[component(no_case_check)] pub fn lowercase_component() -> Element { rsx! { "look ma, no uppercase" } } } /// Documentation for this component is visible within the rsx macro #[component] pub fn Taller( /// Fields are documented and accessible in rsx! a: &'static str, children: Element, ) -> Element { rsx! { {&children} } } #[derive(Props, Clone, PartialEq, Eq)] pub struct TypedInputProps { #[props(optional, default)] initial: Option, } #[allow(non_snake_case)] pub fn TypedInput(props: TypedInputProps) -> Element where T: FromStr + fmt::Display + PartialEq + Clone + 'static, ::Err: std::fmt::Display, { if let Some(props) = props.initial { return rsx! { "{props}" }; } VNode::empty() } #[component] fn WithInline(text: String) -> Element { rsx! { p { "{text}" } } } #[component] fn Label(text: T) -> Element { rsx! { p { "{text}" } } } ================================================ FILE: examples/09-reference/shorthand.rs ================================================ //! Dioxus supports shorthand syntax for creating elements and components. use dioxus::prelude::*; fn main() { dioxus::launch(app); } fn app() -> Element { let a = 123; let b = 456; let c = 789; let class = "class"; let id = "id"; // todo: i'd like it for children on elements to be inferred as the children of the element // also should shorthands understand references/dereferences? // ie **a, *a, &a, &mut a, etc let children = rsx! { "Child" }; let onclick = move |_| println!("Clicked!"); rsx! { div { class, id, {&children} } Component { a, b, c, children, onclick } Component { a, ..ComponentProps { a: 1, b: 2, c: 3, children: VNode::empty(), onclick: Default::default() } } } } #[component] fn Component( a: i32, b: i32, c: i32, children: Element, onclick: EventHandler, ) -> Element { rsx! { div { "{a}" } div { "{b}" } div { "{c}" } div { {children} } div { onclick } } } ================================================ FILE: examples/09-reference/simple_list.rs ================================================ //! A few ways of mapping elements into rsx! syntax //! //! Rsx allows anything that's an iterator where the output type implements Into, so you can use any of the following: use dioxus::prelude::*; fn main() { dioxus::launch(app); } fn app() -> Element { rsx!( div { // Use Map directly to lazily pull elements {(0..10).map(|f| rsx! { "{f}" })} // Collect into an intermediate collection if necessary, and call into_iter {["a", "b", "c", "d", "e", "f"] .into_iter() .map(|f| rsx! { "{f}" }) .collect::>() .into_iter()} // Use optionals {Some(rsx! { "Some" })} // use a for loop where the body itself is RSX for name in 0..10 { div { "{name}" } } // Or even use an unterminated conditional if true { "hello world!" } } ) } ================================================ FILE: examples/09-reference/spread.rs ================================================ //! This example demonstrates how to use the spread operator to pass attributes to child components. //! //! This lets components like the `Link` allow the user to extend the attributes of the underlying `a` tag. //! These attributes are bundled into a `Vec` which can be spread into the child component using the `..` operator. use dioxus::prelude::*; fn main() { let dom = VirtualDom::prebuilt(app); let html = dioxus_ssr::render(&dom); println!("{}", html); } fn app() -> Element { rsx! { SpreadableComponent { width: "10px", extra_data: "hello{1}", extra_data2: "hello{2}", height: "10px", left: 1, "data-custom-attribute": "value", } } } #[derive(Props, PartialEq, Clone)] struct Props { #[props(extends = GlobalAttributes)] attributes: Vec, extra_data: String, extra_data2: String, } #[component] fn SpreadableComponent(props: Props) -> Element { rsx! { audio { ..props.attributes, "1: {props.extra_data}\n2: {props.extra_data2}" } } } ================================================ FILE: examples/09-reference/web_component.rs ================================================ //! Dioxus allows webcomponents to be created with a simple syntax. //! //! Read more about webcomponents [here](https://developer.mozilla.org/en-US/docs/Web/Web_Components) //! //! We typically suggest wrapping webcomponents in a strongly typed interface using a component. use dioxus::prelude::*; fn main() { dioxus::launch(app); } fn app() -> Element { rsx! { div { h1 { "Web Components" } CoolWebComponent { my_prop: "Hello, world!".to_string() } } } } /// A web-component wrapped with a strongly typed interface using a component #[component] fn CoolWebComponent(my_prop: String) -> Element { rsx! { // rsx! takes a webcomponent as long as its tag name is separated with dashes web-component { // Since web-components don't have built-in attributes, the attribute names must be passed as a string "my-prop": my_prop, } } } ================================================ FILE: examples/09-reference/xss_safety.rs ================================================ //! XSS Safety //! //! This example proves that Dioxus is safe from XSS attacks. use dioxus::prelude::*; fn main() { dioxus::launch(app); } fn app() -> Element { let mut contents = use_signal(|| String::from("")); rsx! { div { h1 {"Dioxus is XSS-Safe"} h3 { "{contents}" } input { value: "{contents}", r#type: "text", oninput: move |e| contents.set(e.value()), } } } } ================================================ FILE: examples/10-integrations/bevy/Cargo.toml ================================================ [package] name = "bevy-example" version = "0.0.0" edition = "2021" license = "MIT" publish = false [features] tracing = ["dep:tracing-subscriber", "dioxus-native/tracing"] [dependencies] bevy = { workspace = true } dioxus-native = { workspace = true, features = ["prelude"] } wgpu = { workspace = true } color = "0.3" tracing-subscriber = { workspace = true, optional = true } ================================================ FILE: examples/10-integrations/bevy/src/bevy_renderer.rs ================================================ use crate::bevy_scene_plugin::BevyScenePlugin; use bevy::{ camera::{ManualTextureViewHandle, RenderTarget}, prelude::*, render::{ render_resource::TextureFormat, renderer::{ RenderAdapter, RenderAdapterInfo, RenderDevice, RenderInstance, RenderQueue, WgpuWrapper, }, settings::{RenderCreation, RenderResources}, texture::ManualTextureView, RenderPlugin, }, }; use dioxus_native::{CustomPaintCtx, DeviceHandle, TextureHandle}; use std::sync::Arc; #[derive(Resource, Default)] pub struct UIData { pub color: [f32; 3], } pub struct BevyRenderer { app: App, wgpu_device: wgpu::Device, last_texture_size: (u32, u32), texture_handle: Option, manual_texture_view_handle: Option, } impl BevyRenderer { pub fn new(device_handle: &DeviceHandle) -> Self { // Create a headless Bevy App. let mut app = App::new(); app.add_plugins( DefaultPlugins .set(RenderPlugin { // Reuse the render resources from the Dioxus native renderer. render_creation: RenderCreation::Manual(RenderResources( RenderDevice::new(WgpuWrapper::new(device_handle.device.clone())), RenderQueue(Arc::new(WgpuWrapper::new(device_handle.queue.clone()))), RenderAdapterInfo(WgpuWrapper::new(device_handle.adapter.get_info())), RenderAdapter(Arc::new(WgpuWrapper::new(device_handle.adapter.clone()))), RenderInstance(Arc::new(WgpuWrapper::new(device_handle.instance.clone()))), )), synchronous_pipeline_compilation: true, ..default() }) .set(WindowPlugin { primary_window: None, exit_condition: bevy::window::ExitCondition::DontExit, close_when_requested: false, ..Default::default() }) .disable::(), ); // Setup the rendering to texture. app.insert_resource(ManualTextureViews::default()); // Add data from the UI. app.insert_resource(UIData::default()); // Add the Bevy scene. app.add_plugins(BevyScenePlugin {}); // Initialize the app to set up the render world properly. app.finish(); app.cleanup(); Self { app, wgpu_device: device_handle.device.clone(), last_texture_size: (0, 0), texture_handle: None, manual_texture_view_handle: None, } } pub fn render( &mut self, ctx: CustomPaintCtx<'_>, color: [f32; 3], width: u32, height: u32, _start_time: &std::time::Instant, ) -> Option { // Update the UI data. if let Some(mut ui) = self.app.world_mut().get_resource_mut::() { ui.color = color; } // Init self.texture_handle if None or if width/height changed. self.init_texture(ctx, width, height); // Run one frame of the Bevy app to render the 3D scene. self.app.update(); self.texture_handle.clone() } fn init_texture(&mut self, mut ctx: CustomPaintCtx<'_>, width: u32, height: u32) { // Reuse self.texture_handle if already initialized to the correct size. let current_size = (width, height); if self.texture_handle.is_some() && self.last_texture_size == current_size { return; } let world = self.app.world_mut(); // Skip if no camera. if world.query::<&Camera>().single(world).is_err() { return; } if let Some(mut manual_texture_views) = world.get_resource_mut::() { // Clean previous texture if any. if self.texture_handle.is_some() { ctx.unregister_texture(self.texture_handle.take().unwrap()); } if let Some(old_handle) = self.manual_texture_view_handle { manual_texture_views.remove(&old_handle); self.manual_texture_view_handle = None; } // Create the texture for the camera target and the CustomPaintCtx. let format = TextureFormat::Rgba8UnormSrgb; let wgpu_texture = self.wgpu_device.create_texture(&wgpu::TextureDescriptor { label: None, size: wgpu::Extent3d { width, height, depth_or_array_layers: 1, }, mip_level_count: 1, sample_count: 1, dimension: wgpu::TextureDimension::D2, format, usage: wgpu::TextureUsages::TEXTURE_BINDING | wgpu::TextureUsages::RENDER_ATTACHMENT | wgpu::TextureUsages::COPY_SRC, view_formats: &[], }); let wgpu_texture_view = wgpu_texture.create_view(&wgpu::TextureViewDescriptor::default()); let manual_texture_view = ManualTextureView { texture_view: wgpu_texture_view.into(), size: bevy::math::UVec2::new(width, height), format, }; let manual_texture_view_handle = ManualTextureViewHandle(0); manual_texture_views.insert(manual_texture_view_handle, manual_texture_view); if let Ok(mut camera) = world.query::<&mut Camera>().single_mut(world) { camera.target = RenderTarget::TextureView(manual_texture_view_handle); self.last_texture_size = current_size; self.manual_texture_view_handle = Some(manual_texture_view_handle); self.texture_handle = Some(ctx.register_texture(wgpu_texture)); } } } } ================================================ FILE: examples/10-integrations/bevy/src/bevy_scene_plugin.rs ================================================ use crate::bevy_renderer::UIData; use bevy::prelude::*; #[derive(Component)] pub struct DynamicColoredCube; pub struct BevyScenePlugin {} impl Plugin for BevyScenePlugin { fn build(&self, app: &mut App) { app.insert_resource(ClearColor(bevy::color::Color::srgba(0.0, 0.0, 0.0, 0.0))); app.add_systems(Startup, setup); app.add_systems(Update, (animate, update_cube_color)); } } fn setup( mut commands: Commands, mut meshes: ResMut>, mut materials: ResMut>, ) { commands.spawn(( Mesh3d(meshes.add(Cuboid::new(1.0, 1.0, 1.0))), MeshMaterial3d(materials.add(StandardMaterial { base_color: bevy::color::Color::srgb(1.0, 0.0, 0.0), metallic: 0.0, perceptual_roughness: 0.5, ..default() })), Transform::from_xyz(0.0, 0.0, 0.0), DynamicColoredCube, )); commands.spawn(( DirectionalLight { color: bevy::color::Color::WHITE, illuminance: 10000.0, shadows_enabled: false, ..default() }, Transform::from_xyz(1.0, 1.0, 1.0).looking_at(Vec3::ZERO, Vec3::Y), )); commands.insert_resource(AmbientLight { color: bevy::color::Color::WHITE, brightness: 100.0, affects_lightmapped_meshes: true, }); commands.spawn(( Camera3d::default(), Transform::from_xyz(0.0, 0.0, 3.0).looking_at(Vec3::new(0.0, 0.0, 0.0), Vec3::Y), Name::new("MainCamera"), )); } fn animate(time: Res
================================================ FILE: packages/cli/assets/web/prod.index.html ================================================ {app_title}
================================================ FILE: packages/cli/build.rs ================================================ fn main() { built::write_built_file().expect("Failed to acquire build-time information"); } ================================================ FILE: packages/cli/schema.json ================================================ { "$schema": "http://json-schema.org/draft-07/schema#", "title": "DioxusConfig", "type": "object", "properties": { "android": { "description": "Android-specific configuration.", "$ref": "#/definitions/AndroidConfig" }, "application": { "$ref": "#/definitions/ApplicationConfig" }, "background": { "description": "Unified background mode configuration. Background capabilities declared here are mapped to platform-specific configurations. Use `[ios]`, `[android]` sections for overrides.", "$ref": "#/definitions/BackgroundConfig" }, "bundle": { "$ref": "#/definitions/BundleConfig" }, "components": { "$ref": "#/definitions/ComponentConfig" }, "deep_links": { "description": "Unified deep linking configuration. URL schemes and universal links declared here are mapped to platform-specific configurations. Use `[ios]`, `[android]`, `[macos]` sections for overrides.", "$ref": "#/definitions/DeepLinkConfig" }, "ios": { "description": "iOS-specific configuration.", "$ref": "#/definitions/IosConfig" }, "linux": { "description": "Linux-specific configuration.", "$ref": "#/definitions/LinuxConfig" }, "macos": { "description": "macOS-specific configuration.", "$ref": "#/definitions/MacosConfig" }, "permissions": { "description": "Unified permissions configuration. Permissions declared here are automatically mapped to platform-specific identifiers (AndroidManifest.xml, Info.plist, etc.)", "$ref": "#/definitions/PermissionsConfig" }, "web": { "$ref": "#/definitions/WebConfig" }, "windows": { "description": "Windows-specific configuration.", "$ref": "#/definitions/WindowsConfig" } }, "definitions": { "AndroidApplicationConfig": { "type": "object", "properties": { "large_heap": { "description": "Enable large heap.", "type": [ "boolean", "null" ] }, "supports_rtl": { "description": "RTL layout support.", "type": [ "boolean", "null" ] }, "theme": { "description": "Application theme.", "type": [ "string", "null" ] }, "uses_cleartext_traffic": { "description": "Enable cleartext (HTTP) traffic.", "type": [ "boolean", "null" ] } } }, "AndroidConfig": { "description": "Android-specific configuration.\n\nExample: ```toml [android] min_sdk = 24 target_sdk = 34 identifier = \"com.example.myapp.android\" # Override bundle.identifier for Android features = [\"android.hardware.location.gps\"]\n\n# Android signing configuration (previously in [bundle.android]) [android.signing] jks_file = \"keystore.jks\" jks_password = \"password\" key_alias = \"mykey\" key_password = \"keypassword\"\n\n[android.permissions] \"android.permission.FOREGROUND_SERVICE\" = { description = \"Background service\" } ```", "type": "object", "properties": { "application": { "description": "Application-level config.", "$ref": "#/definitions/AndroidApplicationConfig" }, "category": { "description": "App category. Overrides `bundle.category` for Android builds.", "type": [ "string", "null" ] }, "compile_sdk": { "description": "Compile SDK version.", "type": [ "integer", "null" ], "format": "uint32", "minimum": 0.0 }, "copyright": { "description": "Copyright notice. Overrides `bundle.copyright` for Android builds.", "type": [ "string", "null" ] }, "features": { "description": "Hardware/software features required.", "type": "array", "items": { "type": "string" } }, "foreground_service_types": { "description": "Foreground service types for background operations. Valid values: \"camera\", \"connectedDevice\", \"dataSync\", \"health\", \"location\", \"mediaPlayback\", \"mediaProjection\", \"microphone\", \"phoneCall\", \"remoteMessaging\", \"shortService\", \"specialUse\", \"systemExempted\"", "type": "array", "items": { "type": "string" } }, "gradle_dependencies": { "description": "Gradle dependencies to add.", "type": "array", "items": { "type": "string" } }, "gradle_plugins": { "description": "Gradle plugins to apply.", "type": "array", "items": { "type": "string" } }, "icon": { "description": "Icons for the app. Overrides `bundle.icon` for Android builds.", "type": [ "array", "null" ], "items": { "type": "string" } }, "identifier": { "description": "The app's identifier (e.g., \"com.example.myapp\"). Overrides `bundle.identifier` for Android builds.", "type": [ "string", "null" ] }, "intent_filters": { "description": "Intent filters for deep linking. These extend the unified `[deep_links]` configuration with Android-specific options.", "type": "array", "items": { "$ref": "#/definitions/AndroidIntentFilter" } }, "long_description": { "description": "Long description. Overrides `bundle.long_description` for Android builds.", "type": [ "string", "null" ] }, "manifest": { "description": "Path to custom AndroidManifest.xml to merge.", "type": [ "string", "null" ] }, "min_sdk": { "description": "Minimum SDK version.", "type": [ "integer", "null" ], "format": "uint32", "minimum": 0.0 }, "permissions": { "description": "Additional Android permissions not in unified config.", "type": "object", "additionalProperties": { "$ref": "#/definitions/RawPermission" } }, "proguard_rules": { "description": "ProGuard rule files.", "type": "array", "items": { "type": "string" } }, "publisher": { "description": "The app's publisher. Overrides `bundle.publisher` for Android builds.", "type": [ "string", "null" ] }, "queries": { "description": "Queries for package visibility (required for Android 11+). Specify packages or intents your app needs to query.", "$ref": "#/definitions/AndroidQueries" }, "raw": { "description": "Raw XML injection points.", "$ref": "#/definitions/AndroidRawConfig" }, "resources": { "description": "Additional resources to bundle. Overrides `bundle.resources` for Android builds.", "type": [ "array", "null" ], "items": { "type": "string" } }, "short_description": { "description": "Short description. Overrides `bundle.short_description` for Android builds.", "type": [ "string", "null" ] }, "signing": { "description": "Android signing configuration for release builds. This replaces the deprecated `[bundle.android]` section.", "anyOf": [ { "$ref": "#/definitions/AndroidSigningConfig" }, { "type": "null" } ] }, "target_sdk": { "description": "Target SDK version.", "type": [ "integer", "null" ], "format": "uint32", "minimum": 0.0 }, "url_schemes": { "description": "Additional URL schemes beyond unified `[deep_links]`.schemes. These are merged with the unified schemes.", "type": "array", "items": { "type": "string" } } } }, "AndroidIntentData": { "description": "Android intent data specification.", "type": "object", "properties": { "host": { "description": "Host (e.g., \"example.com\").", "type": [ "string", "null" ] }, "mime_type": { "description": "MIME type.", "type": [ "string", "null" ] }, "path": { "description": "Path (exact match).", "type": [ "string", "null" ] }, "path_pattern": { "description": "Path pattern (with wildcards).", "type": [ "string", "null" ] }, "path_prefix": { "description": "Path prefix.", "type": [ "string", "null" ] }, "port": { "description": "Port number.", "type": [ "string", "null" ] }, "scheme": { "description": "URL scheme (e.g., \"https\", \"myapp\").", "type": [ "string", "null" ] } } }, "AndroidIntentFilter": { "description": "Android intent filter for deep linking.", "type": "object", "properties": { "actions": { "description": "Actions (e.g., \"android.intent.action.VIEW\").", "type": "array", "items": { "type": "string" } }, "auto_verify": { "description": "Auto-verify for App Links (requires HTTPS and assetlinks.json).", "type": "boolean" }, "categories": { "description": "Categories (e.g., \"android.intent.category.DEFAULT\", \"android.intent.category.BROWSABLE\").", "type": "array", "items": { "type": "string" } }, "data": { "description": "Data specifications.", "type": "array", "items": { "$ref": "#/definitions/AndroidIntentData" } } } }, "AndroidQueries": { "description": "Android package visibility queries.", "type": "object", "properties": { "intents": { "description": "Intent actions to query.", "type": "array", "items": { "$ref": "#/definitions/AndroidQueryIntent" } }, "packages": { "description": "Package names to query.", "type": "array", "items": { "type": "string" } } } }, "AndroidQueryIntent": { "description": "Android query intent specification.", "type": "object", "required": [ "action" ], "properties": { "action": { "description": "Action (e.g., \"android.intent.action.SEND\").", "type": "string" }, "mime_type": { "description": "MIME type (e.g., \"text/plain\").", "type": [ "string", "null" ] }, "scheme": { "description": "Data scheme (e.g., \"mailto\").", "type": [ "string", "null" ] } } }, "AndroidRawConfig": { "type": "object", "properties": { "application": { "description": "Raw XML inside `` element.", "type": [ "string", "null" ] }, "application_attrs": { "description": "Raw attributes for `` element.", "type": [ "string", "null" ] }, "manifest": { "description": "Raw XML to inject into manifest (after permissions).", "type": [ "string", "null" ] } } }, "AndroidSettings": { "type": "object", "required": [ "jks_file", "jks_password", "key_alias", "key_password" ], "properties": { "jks_file": { "type": "string" }, "jks_password": { "type": "string" }, "key_alias": { "type": "string" }, "key_password": { "type": "string" } } }, "AndroidSigningConfig": { "description": "Android signing configuration for release builds.", "type": "object", "required": [ "jks_file", "jks_password", "key_alias", "key_password" ], "properties": { "jks_file": { "description": "Path to the Java keystore file.", "type": "string" }, "jks_password": { "description": "Password for the keystore.", "type": "string" }, "key_alias": { "description": "Alias of the key in the keystore.", "type": "string" }, "key_password": { "description": "Password for the key.", "type": "string" } } }, "ApplicationConfig": { "type": "object", "properties": { "android_main_activity": { "description": "Use this file for the MainActivity.kt associated with the Android app.", "type": [ "string", "null" ] }, "android_manifest": { "description": "Use this file for the AndroidManifest.xml associated with the Android app. `dx` will merge any required settings into this file required to build the app", "type": [ "string", "null" ] }, "android_min_sdk_version": { "description": "Specified minimum sdk version for gradle to build the app with.", "type": [ "integer", "null" ], "format": "uint32", "minimum": 0.0 }, "asset_dir": { "description": "The path where global assets will be added when components are added with `dx components add`", "type": [ "string", "null" ] }, "ios_entitlements": { "description": "Use this file for the entitlements.plist associated with the iOS app.", "type": [ "string", "null" ] }, "ios_info_plist": { "description": "Use this file for the info.plist associated with the iOS app. `dx` will merge any required settings into this file required to build the app", "type": [ "string", "null" ] }, "macos_entitlements": { "description": "Use this file for the entitlements.plist associated with the macOS app.", "type": [ "string", "null" ] }, "macos_info_plist": { "description": "Use this file for the info.plist associated with the macOS app. `dx` will merge any required settings into this file required to build the app", "type": [ "string", "null" ] }, "out_dir": { "type": [ "string", "null" ] }, "public_dir": { "type": [ "string", "null" ] }, "tailwind_input": { "type": [ "string", "null" ] }, "tailwind_output": { "type": [ "string", "null" ] } } }, "BackgroundConfig": { "description": "Unified background execution configuration.\n\nThis provides a cross-platform interface for background capabilities. Platform-specific overrides can be configured in `[ios]` and `[android]` sections.\n\nExample: ```toml [background] location = true audio = true fetch = true ```", "type": "object", "properties": { "audio": { "description": "Background audio playback. iOS: UIBackgroundModes \"audio\" Android: FOREGROUND_SERVICE_MEDIA_PLAYBACK", "type": "boolean" }, "bluetooth": { "description": "Bluetooth LE accessories. iOS: UIBackgroundModes \"bluetooth-central\" and \"bluetooth-peripheral\" Android: FOREGROUND_SERVICE_CONNECTED_DEVICE", "type": "boolean" }, "external-accessory": { "description": "External accessory communication. iOS: UIBackgroundModes \"external-accessory\"", "type": "boolean" }, "fetch": { "description": "Background data fetch. iOS: UIBackgroundModes \"fetch\" Android: WorkManager or foreground service", "type": "boolean" }, "location": { "description": "Background location updates. iOS: UIBackgroundModes \"location\" Android: ACCESS_BACKGROUND_LOCATION permission", "type": "boolean" }, "processing": { "description": "Background processing tasks. iOS: UIBackgroundModes \"processing\" Android: WorkManager", "type": "boolean" }, "remote-notifications": { "description": "Remote push notifications. iOS: UIBackgroundModes \"remote-notification\" Android: Firebase Cloud Messaging", "type": "boolean" }, "voip": { "description": "VoIP calls. iOS: UIBackgroundModes \"voip\" Android: FOREGROUND_SERVICE_PHONE_CALL", "type": "boolean" } } }, "BundleConfig": { "type": "object", "properties": { "android": { "anyOf": [ { "$ref": "#/definitions/AndroidSettings" }, { "type": "null" } ] }, "category": { "type": [ "string", "null" ] }, "copyright": { "type": [ "string", "null" ] }, "deb": { "anyOf": [ { "$ref": "#/definitions/DebianSettings" }, { "type": "null" } ] }, "external_bin": { "type": [ "array", "null" ], "items": { "type": "string" } }, "icon": { "type": [ "array", "null" ], "items": { "type": "string" } }, "identifier": { "type": [ "string", "null" ] }, "long_description": { "type": [ "string", "null" ] }, "macos": { "anyOf": [ { "$ref": "#/definitions/MacOsSettings" }, { "type": "null" } ] }, "publisher": { "type": [ "string", "null" ] }, "resources": { "type": [ "array", "null" ], "items": { "type": "string" } }, "short_description": { "type": [ "string", "null" ] }, "windows": { "anyOf": [ { "$ref": "#/definitions/WindowsSettings" }, { "type": "null" } ] } } }, "ComponentConfig": { "description": "Configuration for the `dioxus component` commands", "type": "object", "properties": { "components_dir": { "description": "The path where components are stored when adding or removing components", "type": [ "string", "null" ] }, "registry": { "description": "The component registry to default to when adding components", "$ref": "#/definitions/ComponentRegistry" } } }, "ComponentRegistry": { "description": "Arguments for a component registry Either a path to a local directory or a remote git repo (with optional rev)", "type": "object", "properties": { "git": { "description": "The url of the component registry", "type": [ "string", "null" ] }, "path": { "description": "The path to the components directory", "type": [ "string", "null" ] }, "rev": { "description": "The revision of the component registry", "type": [ "string", "null" ] } } }, "CustomSignCommandSettings": { "type": "object", "required": [ "args", "cmd" ], "properties": { "args": { "description": "The arguments to pass to the command.\n\n\"%1\" will be replaced with the path to the binary to be signed.", "type": "array", "items": { "type": "string" } }, "cmd": { "description": "The command to run to sign the binary.", "type": "string" } } }, "DebianSettings": { "type": "object", "properties": { "changelog": { "description": "Path of the uncompressed Changelog file, to be stored at /usr/share/doc/package-name/changelog.gz. See ", "type": [ "string", "null" ] }, "conflicts": { "description": "the list of package conflicts.", "type": [ "array", "null" ], "items": { "type": "string" } }, "depends": { "description": "the list of debian dependencies.", "type": [ "array", "null" ], "items": { "type": "string" } }, "desktop_template": { "description": "Path to a custom desktop file Handlebars template.\n\nAvailable variables: `categories`, `comment` (optional), `exec`, `icon` and `name`.", "type": [ "string", "null" ] }, "files": { "description": "List of custom files to add to the deb package. Maps the path on the debian package to the path of the file to include (relative to the current working directory).", "type": "object", "additionalProperties": { "type": "string" } }, "post_install_script": { "description": "Path to script that will be executed after the package is unpacked. See ", "type": [ "string", "null" ] }, "post_remove_script": { "description": "Path to script that will be executed after the package is removed. See ", "type": [ "string", "null" ] }, "pre_install_script": { "description": "Path to script that will be executed before the package is unpacked. See ", "type": [ "string", "null" ] }, "pre_remove_script": { "description": "Path to script that will be executed before the package is removed. See ", "type": [ "string", "null" ] }, "priority": { "description": "Change the priority of the Debian Package. By default, it is set to `optional`. Recognized Priorities as of now are : `required`, `important`, `standard`, `optional`, `extra`", "type": [ "string", "null" ] }, "provides": { "description": "the list of dependencies the package provides.", "type": [ "array", "null" ], "items": { "type": "string" } }, "recommends": { "description": "the list of recommended debian dependencies.", "type": [ "array", "null" ], "items": { "type": "string" } }, "replaces": { "description": "the list of package replaces.", "type": [ "array", "null" ], "items": { "type": "string" } }, "section": { "description": "Define the section in Debian Control file. See : ", "type": [ "string", "null" ] } } }, "DeepLinkConfig": { "description": "Unified deep linking configuration.\n\nThis provides a cross-platform interface for URL schemes and universal/app links. Platform-specific overrides can be configured in `[ios]` and `[android]` sections.\n\nExample: ```toml [deep_links] schemes = [\"myapp\", \"com.example.myapp\"] hosts = [\"example.com\", \"*.example.com\"] ```", "type": "object", "properties": { "hosts": { "description": "Universal link / App link hosts (e.g., \"example.com\"). Maps to Associated Domains on iOS and App Links on Android. Supports wildcards like \"*.example.com\".", "type": "array", "items": { "type": "string" } }, "paths": { "description": "Path patterns for universal/app links (e.g., \"/app/*\", \"/share/*\"). If empty, all paths are matched.", "type": "array", "items": { "type": "string" } }, "schemes": { "description": "Custom URL schemes (e.g., \"myapp\" for myapp://path). Maps to CFBundleURLSchemes on iOS/macOS and intent-filter on Android.", "type": "array", "items": { "type": "string" } } } }, "IosConfig": { "description": "iOS-specific configuration.\n\nExample: ```toml [ios] deployment_target = \"15.0\" identifier = \"com.example.myapp.ios\" # Override bundle.identifier for iOS\n\n[ios.entitlements] app-groups = [\"group.com.example.app\"]\n\n[ios.plist] UIBackgroundModes = [\"location\", \"fetch\"] ```", "type": "object", "properties": { "background_modes": { "description": "Additional background modes beyond unified `[background]`. Valid values: \"audio\", \"location\", \"voip\", \"fetch\", \"remote-notification\", \"newsstand-content\", \"external-accessory\", \"bluetooth-central\", \"bluetooth-peripheral\", \"processing\"", "type": "array", "items": { "type": "string" } }, "category": { "description": "App category. Overrides `bundle.category` for iOS builds.", "type": [ "string", "null" ] }, "copyright": { "description": "Copyright notice. Overrides `bundle.copyright` for iOS builds.", "type": [ "string", "null" ] }, "deployment_target": { "description": "Minimum iOS deployment target (e.g., \"15.0\").", "type": [ "string", "null" ] }, "document_types": { "description": "Document types the app can open.", "type": "array", "items": { "$ref": "#/definitions/IosDocumentType" } }, "entitlements": { "description": "iOS entitlements configuration.", "$ref": "#/definitions/IosEntitlements" }, "exported_type_identifiers": { "description": "Exported type identifiers (custom UTIs).", "type": "array", "items": { "$ref": "#/definitions/IosTypeIdentifier" } }, "icon": { "description": "Icons for the app. Overrides `bundle.icon` for iOS builds.", "type": [ "array", "null" ], "items": { "type": "string" } }, "identifier": { "description": "The app's identifier (e.g., \"com.example.myapp\"). Overrides `bundle.identifier` for iOS builds.", "type": [ "string", "null" ] }, "imported_type_identifiers": { "description": "Imported type identifiers.", "type": "array", "items": { "$ref": "#/definitions/IosTypeIdentifier" } }, "info_plist": { "description": "Path to custom Info.plist to merge with generated.", "type": [ "string", "null" ] }, "long_description": { "description": "Long description. Overrides `bundle.long_description` for iOS builds.", "type": [ "string", "null" ] }, "plist": { "description": "Additional Info.plist keys to merge.", "type": "object", "additionalProperties": true }, "publisher": { "description": "The app's publisher. Overrides `bundle.publisher` for iOS builds.", "type": [ "string", "null" ] }, "raw": { "description": "Raw XML injection points.", "$ref": "#/definitions/IosRawConfig" }, "resources": { "description": "Additional resources to bundle. Overrides `bundle.resources` for iOS builds.", "type": [ "array", "null" ], "items": { "type": "string" } }, "short_description": { "description": "Short description. Overrides `bundle.short_description` for iOS builds.", "type": [ "string", "null" ] }, "url_schemes": { "description": "Additional URL schemes beyond unified `[deep_links]`.schemes. These are merged with the unified schemes.", "type": "array", "items": { "type": "string" } }, "widget_extensions": { "description": "Widget extensions to compile and bundle. Each entry defines a Swift-based widget extension (.appex) that will be compiled and installed into the app's PlugIns folder.", "type": "array", "items": { "$ref": "#/definitions/WidgetExtensionConfig" } } } }, "IosDocumentType": { "description": "iOS document type declaration.", "type": "object", "required": [ "name" ], "properties": { "extensions": { "description": "File extensions (e.g., [\"txt\", \"md\"]).", "type": "array", "items": { "type": "string" } }, "icon": { "description": "Icon file name.", "type": [ "string", "null" ] }, "mime_types": { "description": "MIME types.", "type": "array", "items": { "type": "string" } }, "name": { "description": "Document type name.", "type": "string" }, "role": { "description": "Role: \"Editor\", \"Viewer\", \"Shell\", or \"None\".", "type": [ "string", "null" ] }, "types": { "description": "UTI types.", "type": "array", "items": { "type": "string" } } } }, "IosEntitlements": { "type": "object", "properties": { "app-groups": { "description": "App groups for shared data.", "type": "array", "items": { "type": "string" } }, "apple-pay": { "description": "Enable Apple Pay.", "type": "boolean" }, "aps-environment": { "description": "Push notification environment: \"development\" or \"production\".", "type": [ "string", "null" ] }, "associated-domains": { "description": "Associated domains for universal links.", "type": "array", "items": { "type": "string" } }, "healthkit": { "description": "Enable HealthKit.", "type": "boolean" }, "homekit": { "description": "Enable HomeKit.", "type": "boolean" }, "icloud": { "description": "Enable iCloud container support.", "type": "boolean" }, "keychain-access-groups": { "description": "Keychain access groups.", "type": "array", "items": { "type": "string" } } }, "additionalProperties": true }, "IosRawConfig": { "type": "object", "properties": { "entitlements": { "description": "Raw XML to inject into entitlements.plist.", "type": [ "string", "null" ] }, "info_plist": { "description": "Raw XML to inject into Info.plist.", "type": [ "string", "null" ] } } }, "IosTypeIdentifier": { "description": "iOS Uniform Type Identifier declaration.", "type": "object", "required": [ "identifier" ], "properties": { "conforms_to": { "description": "Conforms to these UTIs.", "type": "array", "items": { "type": "string" } }, "description": { "description": "Human-readable description.", "type": [ "string", "null" ] }, "extensions": { "description": "File extensions.", "type": "array", "items": { "type": "string" } }, "identifier": { "description": "UTI identifier (e.g., \"com.example.myformat\").", "type": "string" }, "mime_types": { "description": "MIME types.", "type": "array", "items": { "type": "string" } } } }, "LinuxConfig": { "description": "Linux-specific configuration.\n\nExample: ```toml [linux] identifier = \"com.example.myapp.linux\" # Override bundle.identifier for Linux categories = [\"Utility\"]\n\n# Debian package settings (previously in [bundle.deb]) [linux.deb] depends = [\"libwebkit2gtk-4.0-37\"] section = \"utils\" ```", "type": "object", "properties": { "categories": { "description": "Desktop entry categories.", "type": "array", "items": { "type": "string" } }, "category": { "description": "App category. Overrides `bundle.category` for Linux builds.", "type": [ "string", "null" ] }, "copyright": { "description": "Copyright notice. Overrides `bundle.copyright` for Linux builds.", "type": [ "string", "null" ] }, "dbus_access": { "description": "D-Bus interfaces to access.", "type": "array", "items": { "type": "string" } }, "deb": { "description": "Debian-specific package settings.", "anyOf": [ { "$ref": "#/definitions/LinuxDebSettings" }, { "type": "null" } ] }, "flatpak_permissions": { "description": "Flatpak sandbox permissions.", "type": "array", "items": { "type": "string" } }, "icon": { "description": "Icons for the app. Overrides `bundle.icon` for Linux builds.", "type": [ "array", "null" ], "items": { "type": "string" } }, "identifier": { "description": "The app's identifier (e.g., \"com.example.myapp\"). Overrides `bundle.identifier` for Linux builds.", "type": [ "string", "null" ] }, "keywords": { "description": "Desktop entry keywords.", "type": "array", "items": { "type": "string" } }, "long_description": { "description": "Long description. Overrides `bundle.long_description` for Linux builds.", "type": [ "string", "null" ] }, "mime_types": { "description": "MIME types the app can handle.", "type": "array", "items": { "type": "string" } }, "publisher": { "description": "The app's publisher. Overrides `bundle.publisher` for Linux builds.", "type": [ "string", "null" ] }, "resources": { "description": "Additional resources to bundle. Overrides `bundle.resources` for Linux builds.", "type": [ "array", "null" ], "items": { "type": "string" } }, "short_description": { "description": "Short description. Overrides `bundle.short_description` for Linux builds.", "type": [ "string", "null" ] } } }, "LinuxDebSettings": { "description": "Debian package settings.", "type": "object", "properties": { "changelog": { "description": "Path to changelog file.", "type": [ "string", "null" ] }, "conflicts": { "description": "Package conflicts.", "type": [ "array", "null" ], "items": { "type": "string" } }, "depends": { "description": "Package dependencies.", "type": [ "array", "null" ], "items": { "type": "string" } }, "desktop_template": { "description": "Path to custom desktop template.", "type": [ "string", "null" ] }, "files": { "description": "Additional files to include. Maps package path to source path.", "type": "object", "additionalProperties": { "type": "string" } }, "post_install_script": { "description": "Post-install script path.", "type": [ "string", "null" ] }, "post_remove_script": { "description": "Post-remove script path.", "type": [ "string", "null" ] }, "pre_install_script": { "description": "Pre-install script path.", "type": [ "string", "null" ] }, "pre_remove_script": { "description": "Pre-remove script path.", "type": [ "string", "null" ] }, "priority": { "description": "Package priority (\"required\", \"important\", \"standard\", \"optional\", \"extra\").", "type": [ "string", "null" ] }, "provides": { "description": "Packages this provides.", "type": [ "array", "null" ], "items": { "type": "string" } }, "recommends": { "description": "Recommended packages.", "type": [ "array", "null" ], "items": { "type": "string" } }, "replaces": { "description": "Packages this replaces.", "type": [ "array", "null" ], "items": { "type": "string" } }, "section": { "description": "Debian section (e.g., \"utils\", \"web\").", "type": [ "string", "null" ] } } }, "LocationPermission": { "description": "Location permission with precision control.", "type": "object", "required": [ "description" ], "properties": { "description": { "description": "User-facing description shown in permission dialogs.", "type": "string" }, "precision": { "description": "Precision level: \"fine\" (GPS) or \"coarse\" (network-based).", "$ref": "#/definitions/LocationPrecision" } } }, "LocationPrecision": { "type": "string", "enum": [ "fine", "coarse" ] }, "MacOsSettings": { "type": "object", "properties": { "bundle_name": { "type": [ "string", "null" ] }, "bundle_version": { "type": [ "string", "null" ] }, "entitlements": { "type": [ "string", "null" ] }, "exception_domain": { "type": [ "string", "null" ] }, "files": { "description": "List of custom files to add to the application bundle. Maps the path in the Contents directory in the app to the path of the file to include (relative to the current working directory).", "type": "object", "additionalProperties": { "type": "string" } }, "frameworks": { "type": [ "array", "null" ], "items": { "type": "string" } }, "hardened_runtime": { "description": "Preserve the hardened runtime version flag, see \n\nSettings this to `false` is useful when using an ad-hoc signature, making it less strict.", "type": "boolean" }, "info_plist_path": { "type": [ "string", "null" ] }, "license": { "type": [ "string", "null" ] }, "minimum_system_version": { "type": [ "string", "null" ] }, "provider_short_name": { "type": [ "string", "null" ] }, "signing_identity": { "type": [ "string", "null" ] } } }, "MacosConfig": { "description": "macOS-specific configuration.\n\nExample: ```toml [macos] minimum_system_version = \"11.0\" identifier = \"com.example.myapp.macos\" # Override bundle.identifier for macOS\n\n# macOS signing (previously in [bundle.macos]) signing_identity = \"Developer ID Application: My Company\" provider_short_name = \"MYCOMPANY\" ```", "type": "object", "properties": { "bundle_name": { "description": "The bundle short version string (CFBundleShortVersionString).", "type": [ "string", "null" ] }, "bundle_version": { "description": "The bundle version string (CFBundleVersion).", "type": [ "string", "null" ] }, "category": { "description": "App category for the Mac App Store. E.g., \"public.app-category.productivity\"", "type": [ "string", "null" ] }, "copyright": { "description": "Copyright notice. Overrides `bundle.copyright` for macOS builds.", "type": [ "string", "null" ] }, "document_types": { "description": "Document types the app can open (uses same format as iOS).", "type": "array", "items": { "$ref": "#/definitions/IosDocumentType" } }, "entitlements": { "description": "macOS entitlements.", "$ref": "#/definitions/MacosEntitlements" }, "entitlements_file": { "description": "Path to custom entitlements file for code signing. This overrides the generated entitlements.", "type": [ "string", "null" ] }, "exception_domain": { "description": "Exception domain for App Transport Security.", "type": [ "string", "null" ] }, "exported_type_identifiers": { "description": "Exported type identifiers (custom UTIs).", "type": "array", "items": { "$ref": "#/definitions/IosTypeIdentifier" } }, "files": { "description": "Additional files to include in the app bundle. Maps the path in the Contents directory to the source file path.", "type": "object", "additionalProperties": { "type": "string" } }, "frameworks": { "description": "Frameworks to embed.", "type": "array", "items": { "type": "string" } }, "hardened_runtime": { "description": "Preserve the hardened runtime version flag. Setting this to false is useful when using an ad-hoc signature.", "type": [ "boolean", "null" ] }, "icon": { "description": "Icons for the app. Overrides `bundle.icon` for macOS builds.", "type": [ "array", "null" ], "items": { "type": "string" } }, "identifier": { "description": "The app's identifier (e.g., \"com.example.myapp\"). Overrides `bundle.identifier` for macOS builds.", "type": [ "string", "null" ] }, "imported_type_identifiers": { "description": "Imported type identifiers.", "type": "array", "items": { "$ref": "#/definitions/IosTypeIdentifier" } }, "info_plist": { "description": "Path to custom Info.plist.", "type": [ "string", "null" ] }, "license": { "description": "License file to include in DMG.", "type": [ "string", "null" ] }, "long_description": { "description": "Long description. Overrides `bundle.long_description` for macOS builds.", "type": [ "string", "null" ] }, "minimum_system_version": { "description": "Minimum macOS version (e.g., \"11.0\").", "type": [ "string", "null" ] }, "plist": { "description": "Additional Info.plist keys.", "type": "object", "additionalProperties": true }, "provider_short_name": { "description": "The provider short name for notarization.", "type": [ "string", "null" ] }, "publisher": { "description": "The app's publisher. Overrides `bundle.publisher` for macOS builds.", "type": [ "string", "null" ] }, "raw": { "description": "Raw injection points.", "$ref": "#/definitions/MacosRawConfig" }, "resources": { "description": "Additional resources to bundle. Overrides `bundle.resources` for macOS builds.", "type": [ "array", "null" ], "items": { "type": "string" } }, "short_description": { "description": "Short description. Overrides `bundle.short_description` for macOS builds.", "type": [ "string", "null" ] }, "signing_identity": { "description": "The signing identity to use for code signing. E.g., \"Developer ID Application: My Company (TEAMID)\"", "type": [ "string", "null" ] }, "url_schemes": { "description": "Additional URL schemes beyond unified `[deep_links]`.schemes. These are merged with the unified schemes.", "type": "array", "items": { "type": "string" } } } }, "MacosEntitlements": { "type": "object", "properties": { "addressbook": { "description": "Address book access.", "type": [ "boolean", "null" ] }, "allow-jit": { "description": "Allow JIT.", "type": [ "boolean", "null" ] }, "allow-unsigned-executable-memory": { "description": "Allow unsigned executable memory.", "type": [ "boolean", "null" ] }, "app-sandbox": { "description": "Enable App Sandbox.", "type": [ "boolean", "null" ] }, "bluetooth": { "description": "Bluetooth access.", "type": [ "boolean", "null" ] }, "calendars": { "description": "Calendars access.", "type": [ "boolean", "null" ] }, "camera": { "description": "Camera access.", "type": [ "boolean", "null" ] }, "disable-library-validation": { "description": "Disable library validation.", "type": [ "boolean", "null" ] }, "files-user-selected": { "description": "User-selected file access (read-write).", "type": [ "boolean", "null" ] }, "files-user-selected-readonly": { "description": "User-selected file access (read-only).", "type": [ "boolean", "null" ] }, "location": { "description": "Location services.", "type": [ "boolean", "null" ] }, "microphone": { "description": "Microphone access.", "type": [ "boolean", "null" ] }, "network-client": { "description": "Outgoing network connections.", "type": [ "boolean", "null" ] }, "network-server": { "description": "Incoming network connections.", "type": [ "boolean", "null" ] }, "print": { "description": "Printing.", "type": [ "boolean", "null" ] }, "usb": { "description": "USB access.", "type": [ "boolean", "null" ] } }, "additionalProperties": true }, "MacosRawConfig": { "type": "object", "properties": { "entitlements": { "description": "Raw XML to inject into entitlements.plist.", "type": [ "string", "null" ] }, "info_plist": { "description": "Raw XML to inject into Info.plist.", "type": [ "string", "null" ] } } }, "NSISInstallerMode": { "type": "string", "enum": [ "CurrentUser", "PerMachine", "Both" ] }, "NsisSettings": { "type": "object", "properties": { "custom_language_files": { "type": [ "object", "null" ], "additionalProperties": { "type": "string" } }, "display_language_selector": { "type": "boolean" }, "header_image": { "type": [ "string", "null" ] }, "install_mode": { "$ref": "#/definitions/NSISInstallerMode" }, "installer_hooks": { "type": [ "string", "null" ] }, "installer_icon": { "type": [ "string", "null" ] }, "languages": { "type": [ "array", "null" ], "items": { "type": "string" } }, "license": { "type": [ "string", "null" ] }, "minimum_webview2_version": { "description": "Try to ensure that the WebView2 version is equal to or newer than this version, if the user's WebView2 is older than this version, the installer will try to trigger a WebView2 update.", "type": [ "string", "null" ] }, "sidebar_image": { "type": [ "string", "null" ] }, "start_menu_folder": { "type": [ "string", "null" ] }, "template": { "type": [ "string", "null" ] } } }, "PermissionsConfig": { "description": "Unified permission configuration that maps to platform-specific identifiers.\n\nExample: ```toml [permissions] location = { precision = \"fine\", description = \"Track your runs\" } camera = { description = \"Take photos for your profile\" } ```", "type": "object", "properties": { "background-location": { "description": "Background location updates.", "anyOf": [ { "$ref": "#/definitions/SimplePermission" }, { "type": "null" } ] }, "biometrics": { "description": "Biometric authentication (Face ID, fingerprint).", "anyOf": [ { "$ref": "#/definitions/SimplePermission" }, { "type": "null" } ] }, "bluetooth": { "description": "Bluetooth connectivity.", "anyOf": [ { "$ref": "#/definitions/SimplePermission" }, { "type": "null" } ] }, "calendar": { "description": "Calendar access.", "anyOf": [ { "$ref": "#/definitions/StoragePermission" }, { "type": "null" } ] }, "camera": { "description": "Camera access permission.", "anyOf": [ { "$ref": "#/definitions/SimplePermission" }, { "type": "null" } ] }, "contacts": { "description": "Contacts access.", "anyOf": [ { "$ref": "#/definitions/StoragePermission" }, { "type": "null" } ] }, "health": { "description": "Health data access.", "anyOf": [ { "$ref": "#/definitions/StoragePermission" }, { "type": "null" } ] }, "homekit": { "description": "HomeKit integration (iOS only).", "anyOf": [ { "$ref": "#/definitions/SimplePermission" }, { "type": "null" } ] }, "local-network": { "description": "Local network access.", "anyOf": [ { "$ref": "#/definitions/SimplePermission" }, { "type": "null" } ] }, "location": { "description": "Location permission with precision level. Maps to ACCESS_FINE_LOCATION/ACCESS_COARSE_LOCATION on Android, NSLocationWhenInUseUsageDescription on iOS/macOS.", "anyOf": [ { "$ref": "#/definitions/LocationPermission" }, { "type": "null" } ] }, "media-library": { "description": "Media library access.", "anyOf": [ { "$ref": "#/definitions/SimplePermission" }, { "type": "null" } ] }, "microphone": { "description": "Microphone access permission.", "anyOf": [ { "$ref": "#/definitions/SimplePermission" }, { "type": "null" } ] }, "motion": { "description": "Motion and fitness data.", "anyOf": [ { "$ref": "#/definitions/SimplePermission" }, { "type": "null" } ] }, "nearby-wifi": { "description": "Nearby Wi-Fi devices (Android).", "anyOf": [ { "$ref": "#/definitions/SimplePermission" }, { "type": "null" } ] }, "nfc": { "description": "NFC access.", "anyOf": [ { "$ref": "#/definitions/SimplePermission" }, { "type": "null" } ] }, "notifications": { "description": "Push notifications permission.", "anyOf": [ { "$ref": "#/definitions/SimplePermission" }, { "type": "null" } ] }, "photos": { "description": "Photo library access.", "anyOf": [ { "$ref": "#/definitions/StoragePermission" }, { "type": "null" } ] }, "siri": { "description": "Siri integration (iOS only).", "anyOf": [ { "$ref": "#/definitions/SimplePermission" }, { "type": "null" } ] }, "speech": { "description": "Speech recognition.", "anyOf": [ { "$ref": "#/definitions/SimplePermission" }, { "type": "null" } ] } } }, "RawPermission": { "description": "Raw platform permission entry.", "type": "object", "required": [ "description" ], "properties": { "description": { "type": "string" } } }, "SimplePermission": { "description": "Simple permission with just a description.", "type": "object", "required": [ "description" ], "properties": { "description": { "description": "User-facing description shown in permission dialogs.", "type": "string" } } }, "StorageAccess": { "type": "string", "enum": [ "read", "write", "read-write" ] }, "StoragePermission": { "description": "Storage permission with access level control.", "type": "object", "required": [ "description" ], "properties": { "access": { "description": "Access level: \"read\", \"write\", or \"read-write\".", "$ref": "#/definitions/StorageAccess" }, "description": { "description": "User-facing description shown in permission dialogs.", "type": "string" } } }, "WasmOptConfig": { "description": "The wasm-opt configuration", "type": "object", "properties": { "debug": { "description": "Keep debug symbols in the wasm file", "type": "boolean" }, "extra_features": { "description": "Extra arguments to pass to wasm-opt\n\nFor example, to enable simd, you can set this to `[\"--enable-simd\"]`.\n\nYou can also disable features by prefixing them with `--disable-`, e.g. `[\"--disable-bulk-memory\"]`.\n\nCurrently only --enable and --disable flags are supported.", "type": "array", "items": { "type": "string" } }, "keep_names": { "description": "Keep the wasm name section, useful for profiling and debugging\n\nUnlike `debug` which preserves DWARF debug symbols (requiring a browser extension to read), the name section allows tools like `console_error_panic_hook` to print backtraces with human-readable function names without any browser extension.", "type": "boolean" }, "level": { "description": "The wasm-opt level to use for release builds [default: s] Options: - z: optimize aggressively for size - s: optimize for size - 1: optimize for speed - 2: optimize for more for speed - 3: optimize for even more for speed - 4: optimize aggressively for speed", "$ref": "#/definitions/WasmOptLevel" }, "memory_packing": { "description": "Enable memory packing", "type": "boolean" } } }, "WasmOptLevel": { "description": "The wasm-opt level to use for release web builds [default: Z]", "oneOf": [ { "description": "Optimize aggressively for size", "type": "string", "enum": [ "z" ] }, { "description": "Optimize for size", "type": "string", "enum": [ "s" ] }, { "description": "Don't optimize", "type": "string", "enum": [ "0" ] }, { "description": "Optimize for speed", "type": "string", "enum": [ "1" ] }, { "description": "Optimize for more for speed", "type": "string", "enum": [ "2" ] }, { "description": "Optimize for even more for speed", "type": "string", "enum": [ "3" ] }, { "description": "Optimize aggressively for speed", "type": "string", "enum": [ "4" ] } ] }, "WebAppConfig": { "type": "object", "properties": { "base_path": { "type": [ "string", "null" ] }, "title": { "type": "string" } } }, "WebConfig": { "type": "object", "properties": { "app": { "$ref": "#/definitions/WebAppConfig" }, "https": { "$ref": "#/definitions/WebHttpsConfig" }, "pre_compress": { "description": "Whether to enable pre-compression of assets and wasm during a web build in release mode", "type": "boolean" }, "proxy": { "type": "array", "items": { "$ref": "#/definitions/WebProxyConfig" } }, "resource": { "$ref": "#/definitions/WebResourceConfig" }, "wasm_opt": { "description": "The wasm-opt configuration", "$ref": "#/definitions/WasmOptConfig" }, "watcher": { "$ref": "#/definitions/WebWatcherConfig" } } }, "WebDevResourceConfig": { "type": "object", "properties": { "script": { "type": "array", "items": { "type": "string" } }, "style": { "type": "array", "items": { "type": "string" } } } }, "WebHttpsConfig": { "type": "object", "properties": { "cert_path": { "type": [ "string", "null" ] }, "enabled": { "type": [ "boolean", "null" ] }, "key_path": { "type": [ "string", "null" ] }, "mkcert": { "type": [ "boolean", "null" ] } } }, "WebProxyConfig": { "type": "object", "required": [ "backend" ], "properties": { "backend": { "type": "string" } } }, "WebResourceConfig": { "type": "object", "required": [ "dev" ], "properties": { "dev": { "$ref": "#/definitions/WebDevResourceConfig" }, "script": { "type": [ "array", "null" ], "items": { "type": "string" } }, "style": { "type": [ "array", "null" ], "items": { "type": "string" } } } }, "WebWatcherConfig": { "type": "object", "properties": { "index_on_404": { "type": "boolean" }, "reload_html": { "type": "boolean" }, "watch_path": { "type": "array", "items": { "type": "string" } } } }, "WebviewInstallMode": { "oneOf": [ { "type": "string", "enum": [ "Skip" ] }, { "type": "object", "required": [ "DownloadBootstrapper" ], "properties": { "DownloadBootstrapper": { "type": "object", "required": [ "silent" ], "properties": { "silent": { "type": "boolean" } } } }, "additionalProperties": false }, { "type": "object", "required": [ "EmbedBootstrapper" ], "properties": { "EmbedBootstrapper": { "type": "object", "required": [ "silent" ], "properties": { "silent": { "type": "boolean" } } } }, "additionalProperties": false }, { "type": "object", "required": [ "OfflineInstaller" ], "properties": { "OfflineInstaller": { "type": "object", "required": [ "silent" ], "properties": { "silent": { "type": "boolean" } } } }, "additionalProperties": false }, { "type": "object", "required": [ "FixedRuntime" ], "properties": { "FixedRuntime": { "type": "object", "required": [ "path" ], "properties": { "path": { "type": "string" } } } }, "additionalProperties": false } ] }, "WidgetExtensionConfig": { "description": "Configuration for an iOS Widget Extension.\n\nWidget extensions are compiled as Swift executables and bundled as .appex bundles in the app's PlugIns folder.\n\nExample in Dioxus.toml: ```toml [[ios.widget_extensions]] source = \"src/ios/widget\" display_name = \"Location Widget\" bundle_id_suffix = \"location-widget\" deployment_target = \"16.2\" module_name = \"GeolocationPlugin\" ```", "type": "object", "required": [ "bundle_id_suffix", "display_name", "module_name", "source" ], "properties": { "bundle_id_suffix": { "description": "Bundle ID suffix appended to the app's bundle identifier. For example, if the app is \"com.example.app\" and suffix is \"location-widget\", the widget bundle ID will be \"com.example.app.location-widget\".", "type": "string" }, "deployment_target": { "description": "Minimum deployment target (e.g., \"16.2\"). Defaults to the app's iOS deployment target if not specified.", "type": [ "string", "null" ] }, "display_name": { "description": "Display name for the widget (shown in system UI).", "type": "string" }, "module_name": { "description": "Swift module name for the widget. This MUST match the module name used by the main app's Swift plugin for ActivityKit type matching to work.", "type": "string" }, "source": { "description": "Path to the Swift package source directory (relative to project root).", "type": "string" } } }, "WindowsConfig": { "description": "Windows-specific configuration.\n\nExample: ```toml [windows] identifier = \"com.example.myapp.windows\" # Override bundle.identifier for Windows\n\n# Windows installer settings (previously in [bundle.windows]) [windows.nsis] install_mode = \"PerMachine\"\n\n[windows.wix] language = [[\"en-US\", null]] ```", "type": "object", "properties": { "allow_downgrades": { "description": "Allow downgrades when installing.", "type": [ "boolean", "null" ] }, "capabilities": { "description": "UWP/MSIX capabilities.", "type": "array", "items": { "type": "string" } }, "category": { "description": "App category. Overrides `bundle.category` for Windows builds.", "type": [ "string", "null" ] }, "certificate_thumbprint": { "description": "Certificate thumbprint for code signing.", "type": [ "string", "null" ] }, "copyright": { "description": "Copyright notice. Overrides `bundle.copyright` for Windows builds.", "type": [ "string", "null" ] }, "device_capabilities": { "description": "Device capabilities.", "type": "array", "items": { "type": "string" } }, "digest_algorithm": { "description": "Digest algorithm for code signing.", "type": [ "string", "null" ] }, "icon": { "description": "Icons for the app. Overrides `bundle.icon` for Windows builds.", "type": [ "array", "null" ], "items": { "type": "string" } }, "icon_path": { "description": "Path to custom Windows icon.", "type": [ "string", "null" ] }, "identifier": { "description": "The app's identifier (e.g., \"com.example.myapp\"). Overrides `bundle.identifier` for Windows builds.", "type": [ "string", "null" ] }, "long_description": { "description": "Long description. Overrides `bundle.long_description` for Windows builds.", "type": [ "string", "null" ] }, "nsis": { "description": "NSIS installer settings.", "anyOf": [ { "$ref": "#/definitions/WindowsNsisSettings" }, { "type": "null" } ] }, "publisher": { "description": "The app's publisher. Overrides `bundle.publisher` for Windows builds.", "type": [ "string", "null" ] }, "resources": { "description": "Additional resources to bundle. Overrides `bundle.resources` for Windows builds.", "type": [ "array", "null" ], "items": { "type": "string" } }, "restricted_capabilities": { "description": "Restricted capabilities.", "type": "array", "items": { "type": "string" } }, "short_description": { "description": "Short description. Overrides `bundle.short_description` for Windows builds.", "type": [ "string", "null" ] }, "sign_command": { "description": "Custom sign command.", "anyOf": [ { "$ref": "#/definitions/WindowsSignCommand" }, { "type": "null" } ] }, "timestamp_url": { "description": "Timestamp server URL for code signing.", "type": [ "string", "null" ] }, "tsp": { "description": "Use TSP (RFC 3161) timestamp.", "type": [ "boolean", "null" ] }, "webview_install_mode": { "description": "WebView2 installation mode.", "anyOf": [ { "$ref": "#/definitions/WindowsWebviewInstallMode" }, { "type": "null" } ] }, "wix": { "description": "WiX installer settings.", "anyOf": [ { "$ref": "#/definitions/WindowsWixSettings" }, { "type": "null" } ] } } }, "WindowsNsisSettings": { "description": "NSIS installer settings.", "type": "object", "properties": { "custom_language_files": { "description": "Custom language files.", "type": [ "object", "null" ], "additionalProperties": { "type": "string" } }, "display_language_selector": { "description": "Display language selector.", "type": [ "boolean", "null" ] }, "header_image": { "description": "Header image path.", "type": [ "string", "null" ] }, "install_mode": { "description": "Installation mode: \"CurrentUser\", \"PerMachine\", or \"Both\".", "type": [ "string", "null" ] }, "installer_hooks": { "description": "Installer hooks script path.", "type": [ "string", "null" ] }, "installer_icon": { "description": "Installer icon path.", "type": [ "string", "null" ] }, "languages": { "description": "Languages to include.", "type": [ "array", "null" ], "items": { "type": "string" } }, "license": { "description": "License file path.", "type": [ "string", "null" ] }, "minimum_webview2_version": { "description": "Minimum WebView2 version required.", "type": [ "string", "null" ] }, "sidebar_image": { "description": "Sidebar image path.", "type": [ "string", "null" ] }, "start_menu_folder": { "description": "Start menu folder name.", "type": [ "string", "null" ] }, "template": { "description": "Path to custom NSIS template.", "type": [ "string", "null" ] } } }, "WindowsSettings": { "type": "object", "properties": { "allow_downgrades": { "type": "boolean" }, "certificate_thumbprint": { "type": [ "string", "null" ] }, "digest_algorithm": { "type": [ "string", "null" ] }, "icon_path": { "type": [ "string", "null" ] }, "nsis": { "anyOf": [ { "$ref": "#/definitions/NsisSettings" }, { "type": "null" } ] }, "sign_command": { "description": "Specify a custom command to sign the binaries. This command needs to have a `%1` in it which is just a placeholder for the binary path, which we will detect and replace before calling the command.\n\nExample: ```text sign-cli --arg1 --arg2 %1 ```\n\nBy Default we use `signtool.exe` which can be found only on Windows so if you are on another platform and want to cross-compile and sign you will need to use another tool like `osslsigncode`.", "anyOf": [ { "$ref": "#/definitions/CustomSignCommandSettings" }, { "type": "null" } ] }, "timestamp_url": { "type": [ "string", "null" ] }, "tsp": { "type": "boolean" }, "webview_fixed_runtime_path": { "type": [ "string", "null" ] }, "webview_install_mode": { "$ref": "#/definitions/WebviewInstallMode" }, "wix": { "anyOf": [ { "$ref": "#/definitions/WixSettings" }, { "type": "null" } ] } } }, "WindowsSignCommand": { "description": "Custom sign command for Windows code signing.", "type": "object", "required": [ "args", "cmd" ], "properties": { "args": { "description": "Command arguments. Use \"%1\" as placeholder for binary path.", "type": "array", "items": { "type": "string" } }, "cmd": { "description": "The command to run.", "type": "string" } } }, "WindowsWebviewInstallMode": { "description": "WebView2 installation mode.", "oneOf": [ { "description": "Skip WebView2 installation.", "type": "object", "required": [ "type" ], "properties": { "type": { "type": "string", "enum": [ "Skip" ] } } }, { "description": "Download bootstrapper.", "type": "object", "required": [ "type" ], "properties": { "silent": { "type": "boolean" }, "type": { "type": "string", "enum": [ "DownloadBootstrapper" ] } } }, { "description": "Embed bootstrapper.", "type": "object", "required": [ "type" ], "properties": { "silent": { "type": "boolean" }, "type": { "type": "string", "enum": [ "EmbedBootstrapper" ] } } }, { "description": "Use offline installer.", "type": "object", "required": [ "type" ], "properties": { "silent": { "type": "boolean" }, "type": { "type": "string", "enum": [ "OfflineInstaller" ] } } }, { "description": "Use fixed runtime from path.", "type": "object", "required": [ "path", "type" ], "properties": { "path": { "type": "string" }, "type": { "type": "string", "enum": [ "FixedRuntime" ] } } } ] }, "WindowsWixSettings": { "description": "WiX installer settings.", "type": "object", "properties": { "banner_path": { "description": "Banner image path.", "type": [ "string", "null" ] }, "component_group_refs": { "description": "Component group references.", "type": "array", "items": { "type": "string" } }, "component_refs": { "description": "Component references.", "type": "array", "items": { "type": "string" } }, "dialog_image_path": { "description": "Dialog image path.", "type": [ "string", "null" ] }, "enable_elevated_update_task": { "description": "Enable elevated update task.", "type": [ "boolean", "null" ] }, "feature_group_refs": { "description": "Feature group references.", "type": "array", "items": { "type": "string" } }, "feature_refs": { "description": "Feature references.", "type": "array", "items": { "type": "string" } }, "fips_compliant": { "description": "FIPS compliant mode.", "type": [ "boolean", "null" ] }, "fragment_paths": { "description": "WiX fragment files to include.", "type": "array", "items": { "type": "string" } }, "language": { "description": "Languages and their locale paths.", "type": "array", "items": { "type": "array", "items": [ { "type": "string" }, { "type": [ "string", "null" ] } ], "maxItems": 2, "minItems": 2 } }, "license": { "description": "License file path.", "type": [ "string", "null" ] }, "merge_refs": { "description": "Merge module references.", "type": "array", "items": { "type": "string" } }, "skip_webview_install": { "description": "Skip WebView2 installation.", "type": [ "boolean", "null" ] }, "template": { "description": "Path to custom WiX template.", "type": [ "string", "null" ] }, "upgrade_code": { "description": "MSI upgrade code (GUID).", "type": [ "string", "null" ] }, "version": { "description": "MSI version string.", "type": [ "string", "null" ] } } }, "WixSettings": { "type": "object", "properties": { "banner_path": { "type": [ "string", "null" ] }, "component_group_refs": { "type": "array", "items": { "type": "string" } }, "component_refs": { "type": "array", "items": { "type": "string" } }, "dialog_image_path": { "type": [ "string", "null" ] }, "enable_elevated_update_task": { "type": "boolean" }, "feature_group_refs": { "type": "array", "items": { "type": "string" } }, "feature_refs": { "type": "array", "items": { "type": "string" } }, "fips_compliant": { "type": "boolean" }, "fragment_paths": { "type": "array", "items": { "type": "string" } }, "language": { "type": "array", "items": { "type": "array", "items": [ { "type": "string" }, { "type": [ "string", "null" ] } ], "maxItems": 2, "minItems": 2 } }, "license": { "type": [ "string", "null" ] }, "merge_refs": { "type": "array", "items": { "type": "string" } }, "skip_webview_install": { "type": "boolean" }, "template": { "type": [ "string", "null" ] }, "upgrade_code": { "description": "A GUID upgrade code for MSI installer. This code **_must stay the same across all of your updates_**, otherwise, Windows will treat your update as a different app and your users will have duplicate versions of your app.\n\nBy default, tauri generates this code by generating a Uuid v5 using the string `.exe.app.x64` in the DNS namespace. You can use Tauri's CLI to generate and print this code for you by running `tauri inspect wix-upgrade-code`.\n\nIt is recommended that you set this value in your tauri config file to avoid accidental changes in your upgrade code whenever you want to change your product name.", "type": [ "string", "null" ] }, "version": { "description": "MSI installer version in the format `major.minor.patch.build` (build is optional).\n\nBecause a valid version is required for MSI installer, it will be derived from [`tauri_bundler::PackageSettings::version`] if this field is not set.\n\nThe first field is the major version and has a maximum value of 255. The second field is the minor version and has a maximum value of 255. The third and fourth fields have a maximum value of 65,535.\n\nSee for more info.", "type": [ "string", "null" ] } } } } } ================================================ FILE: packages/cli/src/build/assets.rs ================================================ //! The dioxus asset system. //! //! This module provides functionality for extracting assets from a binary file and then writing back //! their asset hashes directly into the binary file. Previously, we performed asset hashing in the //! `asset!()` macro. The new system, implemented here, instead performs the hashing at build time, //! which provides more flexibility in the asset processing pipeline. //! //! We chose to implement this approach since assets might reference each other which means we minimally //! need to parse the asset to create a unique hash for each asset before they are used in the application. //! The hashes are used both for cache busting the asset in the browser and to cache the asset optimization //! process in the build system. //! //! We use the same lessons learned from the hot-patching engine which parses the binary file and its //! symbol table to find symbols that match the `__ASSETS__` prefix. These symbols are ideally data //! symbols and contain the BundledAsset data type which implements ConstSerialize and ConstDeserialize. //! //! When the binary is built, the `dioxus asset!()` macro will emit its metadata into the __ASSETS__ //! symbols, which we process here. After reading the metadata directly from the executable, we then //! hash it and write the hash directly into the binary file. //! //! During development, we can skip this step for most platforms since local paths are sufficient //! for asset loading. However, for WASM and for production builds, we need to ensure that assets //! can be found relative to the current exe. Unfortunately, on android, the `current_exe` path is wrong, //! so the assets are resolved against the "asset root" - which is covered by the asset loader crate. //! //! Finding the __ASSETS__ symbols is not quite straightforward when hotpatching, especially on WASM //! since we build and link the module as relocatable, which is not a stable WASM proposal. In this //! implementation, we handle both the non-PIE *and* PIC cases which are rather bespoke to our whole //! build system. use std::{ io::{Cursor, Read, Seek, Write}, path::{Path, PathBuf}, }; use crate::Result; use anyhow::{bail, Context}; use const_serialize::{deserialize_const, serialize_const, ConstVec}; use dioxus_cli_opt::AssetManifest; use manganis::{AssetOptions, AssetVariant, BundledAsset, ImageFormat, ImageSize}; use manganis_core::{AndroidArtifactMetadata, SwiftPackageMetadata, SymbolData}; use object::{File, Object, ObjectSection, ObjectSymbol, ReadCache, ReadRef, Section, Symbol}; use pdb::FallibleIterator; use rayon::iter::{IntoParallelRefMutIterator, ParallelIterator}; /// Extract all manganis symbols and their sections from the given object file. fn manganis_symbols<'a, 'b, R: ReadRef<'a>>( file: &'b File<'a, R>, ) -> impl Iterator, Section<'a, 'b, R>)> + 'b { file.symbols().filter_map(move |symbol| { let name = symbol.name().ok()?; let version = looks_like_manganis_symbol(name)?; let section_index = symbol.section_index()?; let section = file.section_by_index(section_index).ok()?; Some((version, symbol, section)) }) } #[derive(Copy, Clone)] enum ManganisVersion { /// The legacy version of the manganis format published with 0.7.0 and 0.7.1 Legacy, /// The new version of the manganis format 0.7.2 onward /// This now includes both assets (old BundledAsset format) and permissions (SymbolData format) New, } impl ManganisVersion { fn size(&self) -> usize { match self { ManganisVersion::Legacy => { ::MEMORY_LAYOUT.size() } // For new format, we use a larger buffer size to accommodate variable-length CBOR // The actual size will be determined by CBOR deserialization ManganisVersion::New => 4096, } } /// Deserialize data, trying multiple formats for backward compatibility /// /// Tries in order: /// 1. SymbolData (new unified format) - can contain Asset or Permission /// 2. BundledAsset (old asset format) - for backward compatibility fn deserialize(&self, data: &[u8]) -> Option { match self { ManganisVersion::Legacy => { let buffer = const_serialize_07::ConstReadBuffer::new(data); let (_, legacy_asset) = const_serialize_07::deserialize_const!(manganis_core_07::BundledAsset, buffer)?; Some(SymbolDataOrAsset::Asset(legacy_asset_to_modern_asset( &legacy_asset, ))) } ManganisVersion::New => { // First try SymbolData (new format with enum variant) // const-serialize deserialization returns (remaining_bytes, value) // We accept if remaining is empty or contains only padding (zeros) if let Some((remaining, symbol_data)) = deserialize_const!(SymbolData, data) { // Check if remaining bytes are all zeros (padding) or empty // This handles the case where the linker section is larger than the actual data // Be very lenient with padding - as long as we successfully deserialized, accept it // The padding is just zeros added to fill the buffer size let is_valid = remaining.is_empty() || remaining.iter().all(|&b| b == 0) || remaining.len() <= data.len(); // Allow any amount of padding as long as it's not larger than data if is_valid { return Some(SymbolDataOrAsset::SymbolData(Box::new(symbol_data))); } else { tracing::debug!( "SymbolData deserialized but invalid padding: {} remaining bytes out of {} total (first few bytes: {:?})", remaining.len(), data.len(), &data[..data.len().min(32)] ); } } else { tracing::debug!( "Failed to deserialize as SymbolData. Data length: {}, first few bytes: {:?}", data.len(), &data[..data.len().min(32)] ); } // Fallback: try BundledAsset (direct format - assets are now serialized this way) // This handles assets that were serialized directly as BundledAsset (not wrapped in SymbolData) if let Some((remaining, asset)) = deserialize_const!(BundledAsset, data) { // Check if remaining bytes are all zeros (padding) or empty // Accept any amount of padding as long as it's all zeros (which is what we pad with) let is_valid = remaining.is_empty() || remaining.iter().all(|&b| b == 0); if is_valid { tracing::debug!( "Successfully deserialized BundledAsset, remaining padding: {} bytes", remaining.len() ); return Some(SymbolDataOrAsset::Asset(asset)); } else { tracing::warn!( "BundledAsset deserialized but remaining bytes are not all zeros: {} remaining bytes, first few: {:?}", remaining.len(), &remaining[..remaining.len().min(16)] ); } } else { tracing::warn!( "Failed to deserialize as BundledAsset. Data length: {}, first 32 bytes: {:?}", data.len(), &data[..data.len().min(32)] ); } None } } } fn serialize_asset(&self, asset: &BundledAsset) -> Vec { match self { ManganisVersion::Legacy => { let legacy_asset = modern_asset_to_legacy_asset(asset); let buffer = const_serialize_07::serialize_const( &legacy_asset, const_serialize_07::ConstVec::new(), ); buffer.as_ref().to_vec() } ManganisVersion::New => { // New format: serialize as BundledAsset directly (backward compatible) // Pad to 4096 bytes to match the linker output size let buffer = serialize_const(asset, ConstVec::new()); let mut data = buffer.as_ref().to_vec(); if data.len() < 4096 { data.resize(4096, 0); } data } } } fn serialize_symbol_data(&self, data: &SymbolData) -> Option> { match self { ManganisVersion::Legacy => None, ManganisVersion::New => { let buffer = serialize_const(data, ConstVec::new()); let mut bytes = buffer.as_ref().to_vec(); if bytes.len() < 4096 { bytes.resize(4096, 0); } Some(bytes) } } } } /// Result of deserializing a symbol - can be either SymbolData or legacy Asset #[derive(Debug, Clone)] #[allow(clippy::large_enum_variant)] enum SymbolDataOrAsset { /// New unified format (can contain Asset or Permission) SymbolData(Box), /// Old asset format (backward compatibility) Asset(BundledAsset), } #[derive(Clone, Copy)] struct AssetWriteEntry { symbol: ManganisSymbolOffset, asset_index: usize, representation: AssetRepresentation, } impl AssetWriteEntry { fn new( symbol: ManganisSymbolOffset, asset_index: usize, representation: AssetRepresentation, ) -> Self { Self { symbol, asset_index, representation, } } } #[derive(Clone, Copy)] enum AssetRepresentation { /// Serialized as a raw BundledAsset (legacy or new format) RawBundled, /// Serialized as SymbolData::Asset (new CBOR format) SymbolData, } fn legacy_asset_to_modern_asset( legacy_asset: &manganis_core_07::BundledAsset, ) -> manganis_core::BundledAsset { let bundled_path = legacy_asset.bundled_path(); let absolute_path = legacy_asset.absolute_source_path(); let legacy_options = legacy_asset.options(); let add_hash = legacy_options.hash_suffix(); let options = match legacy_options.variant() { manganis_core_07::AssetVariant::Image(image) => { let format = match image.format() { manganis_core_07::ImageFormat::Png => ImageFormat::Png, manganis_core_07::ImageFormat::Jpg => ImageFormat::Jpg, manganis_core_07::ImageFormat::Webp => ImageFormat::Webp, manganis_core_07::ImageFormat::Avif => ImageFormat::Avif, manganis_core_07::ImageFormat::Unknown => ImageFormat::Unknown, }; let size = match image.size() { manganis_core_07::ImageSize::Automatic => ImageSize::Automatic, manganis_core_07::ImageSize::Manual { width, height } => { ImageSize::Manual { width, height } } }; let preload = image.preloaded(); AssetOptions::image() .with_format(format) .with_size(size) .with_preload(preload) .with_hash_suffix(add_hash) .into_asset_options() } manganis_core_07::AssetVariant::Folder(_) => AssetOptions::folder() .with_hash_suffix(add_hash) .into_asset_options(), manganis_core_07::AssetVariant::Css(css) => AssetOptions::css() .with_hash_suffix(add_hash) .with_minify(css.minified()) .with_preload(css.preloaded()) .with_static_head(css.static_head()) .into_asset_options(), manganis_core_07::AssetVariant::CssModule(css_module) => AssetOptions::css_module() .with_hash_suffix(add_hash) .with_minify(css_module.minified()) .with_preload(css_module.preloaded()) .into_asset_options(), manganis_core_07::AssetVariant::Js(js) => AssetOptions::js() .with_hash_suffix(add_hash) .with_minify(js.minified()) .with_preload(js.preloaded()) .with_static_head(js.static_head()) .into_asset_options(), _ => AssetOptions::builder() .with_hash_suffix(add_hash) .into_asset_options(), }; BundledAsset::new(absolute_path, bundled_path, options) } fn modern_asset_to_legacy_asset(modern_asset: &BundledAsset) -> manganis_core_07::BundledAsset { let bundled_path = modern_asset.bundled_path(); let absolute_path = modern_asset.absolute_source_path(); let legacy_options = modern_asset.options(); let add_hash = legacy_options.hash_suffix(); let options = match legacy_options.variant() { AssetVariant::Image(image) => { let format = match image.format() { ImageFormat::Png => manganis_core_07::ImageFormat::Png, ImageFormat::Jpg => manganis_core_07::ImageFormat::Jpg, ImageFormat::Webp => manganis_core_07::ImageFormat::Webp, ImageFormat::Avif => manganis_core_07::ImageFormat::Avif, ImageFormat::Unknown => manganis_core_07::ImageFormat::Unknown, }; let size = match image.size() { ImageSize::Automatic => manganis_core_07::ImageSize::Automatic, ImageSize::Manual { width, height } => { manganis_core_07::ImageSize::Manual { width, height } } }; let preload = image.preloaded(); manganis_core_07::AssetOptions::image() .with_format(format) .with_size(size) .with_preload(preload) .with_hash_suffix(add_hash) .into_asset_options() } AssetVariant::Folder(_) => manganis_core_07::AssetOptions::folder() .with_hash_suffix(add_hash) .into_asset_options(), AssetVariant::Css(css) => manganis_core_07::AssetOptions::css() .with_hash_suffix(add_hash) .with_minify(css.minified()) .with_preload(css.preloaded()) .with_static_head(css.static_head()) .into_asset_options(), AssetVariant::CssModule(css_module) => manganis_core_07::AssetOptions::css_module() .with_hash_suffix(add_hash) .with_minify(css_module.minified()) .with_preload(css_module.preloaded()) .into_asset_options(), AssetVariant::Js(js) => manganis_core_07::AssetOptions::js() .with_hash_suffix(add_hash) .with_minify(js.minified()) .with_preload(js.preloaded()) .with_static_head(js.static_head()) .into_asset_options(), _ => manganis_core_07::AssetOptions::builder() .with_hash_suffix(add_hash) .into_asset_options(), }; manganis_core_07::BundledAsset::new(absolute_path, bundled_path, options) } fn looks_like_manganis_symbol(name: &str) -> Option { if name.contains("__MANGANIS__") { Some(ManganisVersion::Legacy) } else if name.contains("__ASSETS__") { Some(ManganisVersion::New) } else { None } } /// An asset offset in the binary #[derive(Clone, Copy)] struct ManganisSymbolOffset { version: ManganisVersion, offset: u64, } impl ManganisSymbolOffset { fn new(version: ManganisVersion, offset: u64) -> Self { Self { version, offset } } } /// Find the offsets of any manganis symbols in the given file. fn find_symbol_offsets<'a, R: ReadRef<'a>>( path: &Path, file_contents: &[u8], file: &File<'a, R>, ) -> Result> { let pdb_file = find_pdb_file(path); match file.format() { // We need to handle dynamic offsets in wasm files differently object::BinaryFormat::Wasm => find_wasm_symbol_offsets(file_contents, file), // Windows puts the symbol information in a PDB file alongside the executable. // If this is a windows PE file and we found a PDB file, we will use that to find the symbol offsets. object::BinaryFormat::Pe if pdb_file.is_some() => { find_pdb_symbol_offsets(&pdb_file.unwrap()) } // Otherwise, look for manganis symbols in the object file. _ => find_native_symbol_offsets(file), } } /// Find the pdb file matching the executable file. fn find_pdb_file(path: &Path) -> Option { let mut pdb_file = path.with_extension("pdb"); // Also try to find it in the same directory as the executable with _'s instead of -'s if let Some(file_name) = pdb_file.file_name() { let new_file_name = file_name.to_string_lossy().replace('-', "_"); let altrnate_pdb_file = pdb_file.with_file_name(new_file_name); // Keep the most recent pdb file match (pdb_file.metadata(), altrnate_pdb_file.metadata()) { (Ok(pdb_metadata), Ok(alternate_metadata)) => { if let (Ok(pdb_modified), Ok(alternate_modified)) = (pdb_metadata.modified(), alternate_metadata.modified()) { if pdb_modified < alternate_modified { pdb_file = altrnate_pdb_file; } } } (Err(_), Ok(_)) => { pdb_file = altrnate_pdb_file; } _ => {} } } if pdb_file.exists() { Some(pdb_file) } else { None } } /// Find the offsets of any manganis symbols in a pdb file. fn find_pdb_symbol_offsets(pdb_file: &Path) -> Result> { let pdb_file_handle = std::fs::File::open(pdb_file)?; let mut pdb_file = pdb::PDB::open(pdb_file_handle).context("Failed to open PDB file")?; let Ok(Some(sections)) = pdb_file.sections() else { tracing::error!("Failed to read sections from PDB file"); return Ok(Vec::new()); }; let global_symbols = pdb_file .global_symbols() .context("Failed to read global symbols from PDB file")?; let address_map = pdb_file .address_map() .context("Failed to read address map from PDB file")?; let mut symbols = global_symbols.iter(); let mut addresses = Vec::new(); while let Ok(Some(symbol)) = symbols.next() { let Ok(pdb::SymbolData::Public(data)) = symbol.parse() else { continue; }; let Some(rva) = data.offset.to_section_offset(&address_map) else { continue; }; let name = data.name.to_string(); if let Some(version) = looks_like_manganis_symbol(&name) { let section = sections .get(rva.section as usize - 1) .expect("Section index out of bounds"); addresses.push(ManganisSymbolOffset::new( version, (section.pointer_to_raw_data + rva.offset) as u64, )); } } Ok(addresses) } /// Find the offsets of any manganis symbols in a native object file. fn find_native_symbol_offsets<'a, R: ReadRef<'a>>( file: &File<'a, R>, ) -> Result> { let mut offsets = Vec::new(); for (version, symbol, section) in manganis_symbols(file) { let virtual_address = symbol.address(); let Some((section_range_start, _)) = section.file_range() else { tracing::error!( "Found __ASSETS__ symbol {:?} in section {}, but the section has no file range", symbol.name(), section.index() ); continue; }; // Translate the section_relative_address to the file offset let section_relative_address: u64 = (virtual_address as i128 - section.address() as i128) .try_into() .expect("Virtual address should be greater than or equal to section address"); let file_offset = section_range_start + section_relative_address; offsets.push(ManganisSymbolOffset::new(version, file_offset)); } Ok(offsets) } /// Evaluate a walrus global expression to get its value. fn eval_walrus_global_expr(module: &walrus::Module, expr: &walrus::ConstExpr) -> Option { match expr { walrus::ConstExpr::Value(walrus::ir::Value::I32(value)) => Some(*value as u64), walrus::ConstExpr::Value(walrus::ir::Value::I64(value)) => Some(*value as u64), walrus::ConstExpr::Global(id) => { let global = module.globals.get(*id); if let walrus::GlobalKind::Local(pointer) = &global.kind { eval_walrus_global_expr(module, pointer) } else { None } } _ => None, } } /// Find the value of a global export by name. fn find_global_export_value(module: &walrus::Module, name: &str) -> Option { for export in module.exports.iter() { if export.name == name { if let walrus::ExportItem::Global(g) = export.item { if let walrus::GlobalKind::Local(expr) = &module.globals.get(g).kind { return eval_walrus_global_expr(module, expr); } } } } None } /// Find the offsets of any manganis symbols in the wasm file. /// /// This handles both standard WASM builds and builds with advanced features like: /// - Bulk memory operations (passive data segments) /// - Thread Local Storage (TLS) /// - Atomics and shared memory fn find_wasm_symbol_offsets<'a, R: ReadRef<'a>>( file_contents: &[u8], file: &File<'a, R>, ) -> Result> { let Some(section) = file .sections() .find(|section| section.name() == Ok("")) else { tracing::error!("Failed to find section in WASM file"); return Ok(Vec::new()); }; let Some((_, section_range_end)) = section.file_range() else { tracing::error!("Failed to find file range for section in WASM file"); return Ok(Vec::new()); }; let section_size = section.data()?.len() as u64; let section_start = section_range_end - section_size; // Parse data segments with wasmparser to get file offsets. // Walrus doesn't expose file offset information, so we need wasmparser for this. // With bulk memory operations, there may be multiple data segments. let reader = wasmparser::DataSectionReader::new(wasmparser::BinaryReader::new( &file_contents[section_start as usize..section_range_end as usize], 0, )) .context("Failed to create WASM data section reader")?; // Collect all data segments with their file offsets and sizes let mut segment_file_info: Vec<(u64, u64)> = Vec::new(); for segment in reader.into_iter() { let segment = segment.context("Failed to read data segment")?; segment_file_info.push(( (segment.data.as_ptr() as u64) .checked_sub(file_contents.as_ptr() as u64) .expect("Data segment should be within file contents"), segment.data.len() as u64, )); } if segment_file_info.is_empty() { return Ok(Vec::new()); } // Parse the wasm file with walrus to find globals and exports let module = walrus::Module::from_buffer(file_contents) .context("Failed to parse WASM module with walrus")?; // Determine the memory base address for symbol lookup let main_memory_walrus = module .data .iter() .next() .context("Failed to find main memory in WASM module")?; let main_memory_offset = match &main_memory_walrus.kind { walrus::DataKind::Active { offset, .. } => { // Active segments have an explicit offset expression eval_walrus_global_expr(&module, offset).unwrap_or_default() } walrus::DataKind::Passive => { // For passive segments (bulk memory operations), there's no static offset. // The memory.init instruction determines placement at runtime. // // Try to find the actual memory base from linker exports: // - __memory_base: Set by the linker for bulk-memory builds // - Falls back to 0x100000 (Rust/LLVM default for static data) // // With TLS support, the linker calculates symbol addresses as if TLS data // is at the base address followed by main data. But at runtime, TLS is stored // separately per-thread via __wasm_init_tls. We detect TLS by looking for // __tls_size and adjust accordingly. // // IMPORTANT: The linker aligns main data to a 4-byte boundary after TLS. // This alignment padding exists in MEMORY but NOT in the FILE. We must // use the aligned TLS size for base calculation, but the file segments // are stored without this padding. let memory_base = find_global_export_value(&module, "__memory_base"); let tls_size = find_global_export_value(&module, "__tls_size").unwrap_or(0); // If TLS is present and segment 0 matches TLS size, remove TLS segment // from our file info since it's not where data symbols point if tls_size > 0 && !segment_file_info.is_empty() && segment_file_info[0].1 == tls_size { segment_file_info.remove(0); } // Align TLS size up to 4 bytes to match linker's memory layout. // The linker aligns main data to a 4-byte boundary after TLS, so symbol // addresses are calculated from (memory_base + aligned_tls_size). // However, file segments are stored without this alignment padding. let tls_aligned = (tls_size + 3) & !3; // Use __memory_base if available (set by linker in release builds), // otherwise fall back to 0x100000 (debug builds default) memory_base.unwrap_or(0x100000u64) + tls_aligned } }; // Find all manganis symbols and calculate their file offsets let mut offsets = Vec::new(); for export in module.exports.iter() { let Some(version) = looks_like_manganis_symbol(&export.name) else { continue; }; let walrus::ExportItem::Global(global) = export.item else { continue; }; let global_data = module.globals.get(global); let walrus::GlobalKind::Local(pointer) = global_data.kind else { continue; }; let Some(virtual_address) = eval_walrus_global_expr(&module, &pointer) else { tracing::error!( "Found __ASSETS__ symbol {:?} in WASM file, but the global expression could not be evaluated", export.name ); continue; }; // Calculate offset relative to the data base address let data_relative_offset = match (virtual_address as i128).checked_sub(main_memory_offset as i128) { Some(offset) if offset >= 0 => offset as u64, _ => { tracing::error!( "Virtual address 0x{:x} is below main memory offset 0x{:x}", virtual_address, main_memory_offset ); continue; } }; // Find which segment this offset falls into. // Segments are laid out contiguously in memory. let mut cumulative_offset = 0u64; let mut file_offset = None; for (seg_file_offset, seg_size) in segment_file_info.iter() { if data_relative_offset < cumulative_offset + seg_size { let offset_in_segment = data_relative_offset - cumulative_offset; file_offset = Some(seg_file_offset + offset_in_segment); break; } cumulative_offset += seg_size; } let Some(file_offset) = file_offset else { tracing::error!( "Virtual address 0x{:x} is beyond all data segments", virtual_address ); continue; }; offsets.push(ManganisSymbolOffset::new(version, file_offset)); } Ok(offsets) } /// Result of extracting symbols from a binary file #[derive(Debug, Clone)] pub(crate) struct SymbolExtractionResult { /// Assets found in the binary pub assets: Vec, /// Android plugin artifacts discovered in the binary pub android_artifacts: Vec, /// Swift packages discovered in the binary pub swift_packages: Vec, } /// Find all assets in the given file, hash them, and write them back to the file. /// Also extracts Android/Swift plugin metadata for FFI bindings. pub(crate) async fn extract_symbols_from_file( path: impl AsRef, ) -> Result { let path = path.as_ref(); let mut file = open_file_for_writing_with_timeout( path, std::fs::OpenOptions::new().write(true).read(true), ) .await?; let mut file_contents = Vec::new(); file.read_to_end(&mut file_contents)?; let mut reader = Cursor::new(&file_contents); let read_cache = ReadCache::new(&mut reader); let object_file = object::File::parse(&read_cache)?; let offsets = find_symbol_offsets(path, &file_contents, &object_file)?; let mut assets = Vec::new(); let mut android_artifacts = Vec::new(); let mut swift_packages = Vec::new(); let mut write_entries = Vec::new(); // Read each symbol from the data section using the offsets for symbol in offsets.iter().copied() { let version = symbol.version; let offset = symbol.offset; // Read data from file_contents (already loaded into memory) // Use a large buffer for variable length data, but don't exceed file size let buffer_size = version .size() .min(file_contents.len().saturating_sub(offset as usize)); if buffer_size == 0 { tracing::warn!("Symbol at offset {offset} is beyond file size"); continue; } let data_in_range = if (offset as usize) + buffer_size <= file_contents.len() { &file_contents[offset as usize..(offset as usize) + buffer_size] } else { &file_contents[offset as usize..] }; // Try to deserialize - const-serialize will handle variable-length data correctly // The deserialization should work even with padding (zeros) at the end if let Some(result) = version.deserialize(data_in_range) { match result { SymbolDataOrAsset::SymbolData(symbol_data) => match *symbol_data { SymbolData::Asset(asset) => { tracing::debug!( "Found asset (via SymbolData) at offset {offset}: {:?}", asset.absolute_source_path() ); let asset_index = assets.len(); assets.push(asset); write_entries.push(AssetWriteEntry::new( symbol, asset_index, AssetRepresentation::SymbolData, )); } SymbolData::AndroidArtifact(meta) => { tracing::debug!( "Found Android artifact declaration for plugin {}", meta.plugin_name.as_str() ); android_artifacts.push(meta); } SymbolData::SwiftPackage(meta) => { tracing::debug!( "Found Swift package declaration for plugin {}", meta.plugin_name.as_str() ); swift_packages.push(meta); } _ => {} }, SymbolDataOrAsset::Asset(asset) => { tracing::debug!( "Found asset (old format) at offset {offset}: {:?}", asset.absolute_source_path() ); let asset_index = assets.len(); assets.push(asset); write_entries.push(AssetWriteEntry::new( symbol, asset_index, AssetRepresentation::RawBundled, )); } } } else { tracing::warn!("Found a symbol at offset {offset} that could not be deserialized. This may be caused by a mismatch between your dioxus and dioxus-cli versions, or the symbol may be in an unsupported format."); } } // Add the hash to each asset in parallel assets .par_iter_mut() .for_each(dioxus_cli_opt::add_hash_to_asset); // Write back only assets to the binary file (permissions are not modified) for entry in write_entries { let version = entry.symbol.version; let offset = entry.symbol.offset; let asset = assets .get(entry.asset_index) .copied() .expect("asset index collected from symbol scan"); match entry.representation { AssetRepresentation::RawBundled => { tracing::debug!("Writing asset to offset {offset}: {:?}", asset); let new_data = version.serialize_asset(&asset); if new_data.len() > version.size() { tracing::warn!( "Asset at offset {offset} serialized to {} bytes, but buffer is only {} bytes. Truncating output.", new_data.len(), version.size() ); } write_serialized_bytes(&mut file, offset, &new_data, version.size())?; } AssetRepresentation::SymbolData => { tracing::debug!("Writing asset (SymbolData) to offset {offset}: {:?}", asset); let Some(new_data) = version.serialize_symbol_data(&SymbolData::Asset(asset)) else { tracing::warn!( "Symbol at offset {offset} was stored as SymbolData but the binary format only supports raw assets" ); continue; }; if new_data.len() > version.size() { tracing::warn!( "SymbolData asset at offset {offset} serialized to {} bytes, but buffer is only {} bytes. Truncating output.", new_data.len(), version.size() ); } write_serialized_bytes(&mut file, offset, &new_data, version.size())?; } } } // Ensure the file is flushed to disk file.sync_all() .context("Failed to sync file after writing assets")?; // If the file is a macos binary, we need to re-sign the modified binary if object_file.format() == object::BinaryFormat::MachO && !assets.is_empty() { // Spawn the codesign command to re-sign the binary let output = std::process::Command::new("codesign") .arg("--force") .arg("--sign") .arg("-") // Sign with an empty identity .arg(path) .output() .context("Failed to run codesign - is `codesign` in your path?")?; if !output.status.success() { bail!( "Failed to re-sign the binary with codesign after finalizing the assets: {}", String::from_utf8_lossy(&output.stderr) ); } } Ok(SymbolExtractionResult { assets, android_artifacts, swift_packages, }) } /// Find all assets in the given file, hash them, and write them back to the file. /// Then return an `AssetManifest` containing all the assets found in the file. /// /// This is a convenience function that extracts symbols and returns only assets. pub(crate) async fn extract_assets_from_file(path: impl AsRef) -> Result { let result = extract_symbols_from_file(path).await?; let mut manifest = AssetManifest::default(); for asset in result.assets { manifest.insert_asset(asset); } Ok(manifest) } /// Try to open a file for writing, retrying if the file is already open by another process. /// /// This is useful on windows where antivirus software might grab the executable before we have a chance to read it. async fn open_file_for_writing_with_timeout( file: &Path, options: &mut std::fs::OpenOptions, ) -> Result { let start_time = std::time::Instant::now(); let timeout = std::time::Duration::from_secs(5); loop { match options.open(file) { Ok(file) => return Ok(file), Err(e) => { if cfg!(windows) && e.raw_os_error() == Some(32) && start_time.elapsed() < timeout { // File is already open, wait and retry tracing::trace!( "Failed to open file because another process is using it. Retrying..." ); tokio::time::sleep(std::time::Duration::from_millis(50)).await; } else { return Err(e.into()); } } } } } fn write_serialized_bytes( file: &mut std::fs::File, offset: u64, data: &[u8], buffer_size: usize, ) -> Result<()> { use std::io::SeekFrom; file.seek(SeekFrom::Start(offset))?; if data.len() <= buffer_size { file.write_all(data)?; if data.len() < buffer_size { let padding = vec![0; buffer_size - data.len()]; file.write_all(&padding)?; } } else { file.write_all(&data[..buffer_size])?; } Ok(()) } ================================================ FILE: packages/cli/src/build/builder.rs ================================================ use crate::{ build::cache::ObjectCache, serve::WebServer, verbosity_or_default, BuildArtifacts, BuildRequest, BuildStage, BuilderUpdate, BundleFormat, ProgressRx, ProgressTx, Result, RustcArgs, StructuredOutput, }; use anyhow::{bail, Context, Error}; use dioxus_cli_opt::process_file_to; use futures_util::{future::OptionFuture, pin_mut, FutureExt}; use itertools::Itertools; use std::{ collections::HashSet, env, time::{Duration, Instant, SystemTime}, }; use std::{ net::SocketAddr, path::{Path, PathBuf}, process::Stdio, }; use subsecond_types::JumpTable; use target_lexicon::Architecture; use tokio::{ io::{AsyncBufReadExt, BufReader, Lines}, process::{Child, ChildStderr, ChildStdout, Command}, task::JoinHandle, }; use tokio_stream::wrappers::UnboundedReceiverStream; use super::{BuildContext, BuildId, BuildMode, HotpatchModuleCache}; /// The component of the serve engine that watches ongoing builds and manages their state, open handle, /// and progress. /// /// Previously, the builder allowed multiple apps to be built simultaneously, but this newer design /// simplifies the code and allows only one app and its server to be built at a time. /// /// Here, we track the number of crates being compiled, assets copied, the times of these events, and /// other metadata that gives us useful indicators for the UI. /// /// A handle to a running app. /// /// The actual child processes might not be present (web) or running (died/killed). /// /// The purpose of this struct is to accumulate state about the running app and its server, like /// any runtime information needed to hotreload the app or send it messages. /// /// We might want to bring in websockets here too, so we know the exact channels the app is using to /// communicate with the devserver. Currently that's a broadcast-type system, so this struct isn't super /// duper useful. /// /// todo: restructure this such that "open" is a running task instead of blocking the main thread pub(crate) struct AppBuilder { pub tx: ProgressTx, pub rx: ProgressRx, // The original request with access to its build directory pub build: BuildRequest, // Ongoing build task, if any pub build_task: JoinHandle>, // If a build has already finished, we'll have its artifacts (rustc, link args, etc) to work with pub artifacts: Option, /// The aslr offset of this running app pub aslr_reference: Option, /// The list of patches applied to the app, used to know which ones to reapply and/or iterate from. pub patches: Vec, pub patch_cache: Option, /// The virtual directory that assets will be served from /// Used mostly for apk/ipa builds since they live in simulator pub runtime_asset_dir: Option, // These might be None if the app died or the user did not specify a server pub child: Option, // stdio for the app so we can read its stdout/stderr // we don't map stdin today (todo) but most apps don't need it pub stdout: Option>>, pub stderr: Option>>, // Android logcat stream (treated as stderr for error/warn levels) pub adb_logcat_stdout: Option>, /// Handle to the task that's monitoring the child process pub spawn_handle: Option>>, /// The executables but with some extra entropy in their name so we can run two instances of the /// same app without causing collisions on the filesystem. pub entropy_app_exe: Option, pub builds_opened: usize, // Metadata about the build that needs to be managed by watching build updates // used to render the TUI pub stage: BuildStage, pub compiled_crates: usize, pub expected_crates: usize, pub bundling_progress: f64, pub compile_start: Option, pub compile_end: Option, pub bundle_start: Option, pub bundle_end: Option, /// The debugger for the app - must be enabled with the `d` key pub(crate) pid: Option, /// Cumulative set of workspace crates modified since the last fat build. /// Each patch includes objects from ALL crates in this set. pub modified_crates: HashSet, /// Cache of the latest `.rcgu.o` files for each modified workspace crate. pub object_cache: ObjectCache, } impl AppBuilder { /// Create a new `AppBuilder` and immediately start a build process. /// /// This method initializes the builder with the provided `BuildRequest` and spawns an asynchronous /// task (`build_task`) to handle the build process. The build process involves several stages: /// /// 1. **Tooling Verification**: Ensures that the necessary tools are available for the build. /// 2. **Build Directory Preparation**: Sets up the directory structure required for the build. /// 3. **Build Execution**: Executes the build process asynchronously. /// 4. **Bundling**: Packages the built artifacts into a final bundle. /// /// The `build_task` is a Tokio task that runs the build process in the background. It uses a /// `BuildContext` to manage the build state and communicate progress or errors via a message /// channel (`tx`). /// /// The builder is initialized with default values for various fields, such as the build stage, /// progress metrics, and optional runtime configurations. /// /// # Notes /// /// - The `build_task` is immediately spawned and will run independently of the caller. /// - The caller can use other methods on the `AppBuilder` to monitor the build progress or handle /// updates (e.g., `wait`, `finish_build`). /// - The build process is designed to be cancellable and restartable using methods like `abort_all` /// or `rebuild`. pub(crate) fn new(request: &BuildRequest) -> Result { let (tx, rx) = futures_channel::mpsc::unbounded(); Ok(Self { build: request.clone(), stage: BuildStage::Initializing, build_task: tokio::task::spawn(std::future::pending()), tx, rx, patches: vec![], compiled_crates: 0, expected_crates: 1, bundling_progress: 0.0, builds_opened: 0, compile_start: Some(Instant::now()), aslr_reference: None, compile_end: None, bundle_start: None, bundle_end: None, runtime_asset_dir: None, child: None, stderr: None, stdout: None, adb_logcat_stdout: None, spawn_handle: None, entropy_app_exe: None, artifacts: None, patch_cache: None, pid: None, modified_crates: HashSet::new(), object_cache: ObjectCache::new(&request.session_cache_dir()), }) } /// Create a new `AppBuilder` and immediately start a build process. pub fn started(request: &BuildRequest, mode: BuildMode, build_id: BuildId) -> Result { let mut builder = Self::new(request)?; builder.start(mode, build_id); Ok(builder) } pub(crate) fn start(&mut self, mode: BuildMode, build_id: BuildId) { self.build_task = tokio::spawn({ let request = self.build.clone(); let tx = self.tx.clone(); async move { let ctx = BuildContext { mode, build_id, tx: tx.clone(), }; request.verify_tooling(&ctx).await?; request.prebuild(&ctx).await?; request.build(&ctx).await } }); } /// Wait for any new updates to the builder - either it completed or gave us a message etc pub(crate) async fn wait(&mut self) -> BuilderUpdate { use futures_util::StreamExt; use BuilderUpdate::*; // Wait for the build to finish or for it to emit a status message let update = tokio::select! { Some(progress) = self.rx.next() => progress, bundle = (&mut self.build_task) => { // Replace the build with an infinitely pending task so we can select it again without worrying about deadlocks/spins self.build_task = tokio::task::spawn(std::future::pending()); match bundle { Ok(Ok(bundle)) => BuilderUpdate::BuildReady { bundle }, Ok(Err(err)) => BuilderUpdate::BuildFailed { err }, Err(err) => BuilderUpdate::BuildFailed { err: anyhow::anyhow!("Build panicked! {err:#?}") }, } }, Some(Ok(Some(msg))) = OptionFuture::from(self.stdout.as_mut().map(|f| f.next_line())) => { StdoutReceived { msg } }, Some(Ok(Some(msg))) = OptionFuture::from(self.stderr.as_mut().map(|f| f.next_line())) => { StderrReceived { msg } }, Some(msg) = OptionFuture::from(self.spawn_handle.as_mut()) => { // Prevent re-polling the spawn future, similar to above self.spawn_handle = None; match msg { Ok(Ok(_)) => StdoutReceived { msg: "Finished launching app".to_string() }, Ok(Err(err)) => StderrReceived { msg: err.to_string() }, Err(err) => StderrReceived { msg: err.to_string() } } }, Some(Some(msg)) = OptionFuture::from(self.adb_logcat_stdout.as_mut().map(|s| s.next())) => { // Send as stderr for errors/warnings, stdout for info/debug // Parse the priority level from a logcat line // // Logcat brief format: "I/TAG(12345): message" // Returns the priority char (V, D, I, W, E, F) if matches!(msg.chars().next().unwrap_or('I'), 'E' | 'W' | 'F') { StderrReceived { msg } } else { StdoutReceived { msg } } }, Some(status) = OptionFuture::from(self.child.as_mut().map(|f| f.wait())) => { match status { Ok(status) => { self.child = None; ProcessExited { status } }, Err(err) => { let () = futures_util::future::pending().await; ProcessWaitFailed { err } } } } }; // Update the internal stage of the build so the UI can render it // *VERY IMPORTANT* - DO NOT AWAIT HERE // doing so will cause the changes to be lost since this wait call is called under a cancellable task // todo - move this handling to a separate function that won't be cancelled match &update { BuilderUpdate::Progress { stage } => { // Prevent updates from flowing in after the build has already finished if !self.is_finished() { self.stage = stage.clone(); match stage { BuildStage::Initializing => { self.compiled_crates = 0; self.bundling_progress = 0.0; } BuildStage::Starting { crate_count, .. } => { self.expected_crates = *crate_count.max(&1); } BuildStage::InstallingTooling => {} BuildStage::Compiling { current, total, .. } => { self.compiled_crates = *current; self.expected_crates = *total.max(&1); if self.compile_start.is_none() { self.compile_start = Some(Instant::now()); } } BuildStage::Bundling => { self.complete_compile(); self.bundling_progress = 0.0; self.bundle_start = Some(Instant::now()); } BuildStage::OptimizingWasm => {} BuildStage::CopyingAssets { current, total, .. } => { self.bundling_progress = *current as f64 / *total as f64; } BuildStage::Success => { self.compiled_crates = self.expected_crates; self.bundling_progress = 1.0; } BuildStage::Failed => { self.compiled_crates = self.expected_crates; self.bundling_progress = 1.0; } BuildStage::Aborted => {} BuildStage::Restarting => { self.compiled_crates = 0; self.expected_crates = 1; self.bundling_progress = 0.0; } BuildStage::RunningBindgen => {} _ => {} } } } BuilderUpdate::CompilerMessage { .. } => {} BuilderUpdate::BuildReady { .. } => { self.compiled_crates = self.expected_crates; self.bundling_progress = 1.0; self.stage = BuildStage::Success; self.complete_compile(); self.bundle_end = Some(Instant::now()); } BuilderUpdate::BuildFailed { .. } => { tracing::debug!("Setting builder to failed state"); self.stage = BuildStage::Failed; } StdoutReceived { .. } => {} StderrReceived { .. } => {} ProcessExited { .. } => {} ProcessWaitFailed { .. } => {} } update } pub(crate) fn patch_rebuild( &mut self, changed_files: Vec, changed_crates: Vec, build_id: BuildId, ) { // We need the rustc args from the original build to pass to the new build let Some(artifacts) = self.artifacts.as_ref().cloned() else { tracing::warn!( "Ignoring patch rebuild for {build_id:?} since there is no existing build." ); return; }; // On web, our patches are fully relocatable, so we don't need to worry about ASLR, but // for all other platforms, we need to use the ASLR reference to know where to insert the patch. let aslr_reference = match self.aslr_reference { Some(val) => val, None if matches!( self.build.triple.architecture, Architecture::Wasm32 | Architecture::Wasm64 ) => { 0 } None => { tracing::warn!( "Ignoring hotpatch since there is no ASLR reference. Is the client connected?" ); return; } }; let cache = artifacts .patch_cache .clone() .context("Failed to get patch cache") .unwrap(); // Pre-compute the cumulative modified_crates set. Every patch includes objects from // ALL crates modified since the fat build. We compute the full cascade closure here // (while we have &mut self) so it doesn't need to be round-tripped through BuildArtifacts. // // Note: compile_workspace_deps() independently computes which crates to compile for THIS // patch (starting from changed_crates + cascade). That serves a different purpose — it only // compiles what changed now, not everything ever modified. Both use workspace_dependents_of // for the BFS, so they stay in sync automatically. let tip_crate_name = self.build.main_target.replace('-', "_"); self.modified_crates.insert(tip_crate_name.clone()); // Add changed crates and their transitive workspace dependents (cascade). let mut to_visit: Vec = changed_crates.clone(); let mut visited = HashSet::new(); while let Some(c) = to_visit.pop() { if !visited.insert(c.clone()) { continue; } self.modified_crates.insert(c.clone()); for dep in self.build.workspace_dependents_of(&c) { if dep != tip_crate_name && !visited.contains(&dep) { to_visit.push(dep); } } } tracing::debug!( "Patch rebuild: changed_crates={:?}, modified_crates={:?}", changed_crates, self.modified_crates, ); // Abort all the ongoing builds, cleaning up any loose artifacts and waiting to cleanly exit self.abort_all(BuildStage::Restarting); self.build_task = tokio::spawn({ let request = self.build.clone(); let ctx = BuildContext { build_id, tx: self.tx.clone(), mode: BuildMode::Thin { changed_files, changed_crates, modified_crates: self.modified_crates.clone(), workspace_rustc_args: artifacts.workspace_rustc_args, aslr_reference, cache, object_cache: self.object_cache.clone(), }, }; async move { request.build(&ctx).await } }); } /// Restart this builder with new build arguments. pub(crate) fn start_rebuild(&mut self, mode: BuildMode, build_id: BuildId) { // Abort all the ongoing builds, cleaning up any loose artifacts and waiting to cleanly exit // And then start a new build, resetting our progress/stage to the beginning and replacing the old tokio task self.abort_all(BuildStage::Restarting); self.artifacts.take(); self.patch_cache.take(); // A full rebuild resets all accumulated hotpatch state — the fat binary is a clean baseline. self.modified_crates.clear(); self.object_cache = ObjectCache::new(&self.build.session_cache_dir()); self.build_task = tokio::spawn({ let request = self.build.clone(); let ctx = BuildContext { tx: self.tx.clone(), mode, build_id, }; async move { request.build(&ctx).await } }); } /// Shutdown the current build process /// /// todo: might want to use a cancellation token here to allow cleaner shutdowns pub(crate) fn abort_all(&mut self, stage: BuildStage) { self.stage = stage; self.compiled_crates = 0; self.expected_crates = 1; self.bundling_progress = 0.0; self.compile_start = None; self.bundle_start = None; self.bundle_end = None; self.compile_end = None; self.build_task.abort(); } /// Wait for the build to finish, returning the final bundle /// Should only be used by code that's not interested in the intermediate updates and only cares about the final bundle /// /// todo(jon): maybe we want to do some logging here? The build/bundle/run screens could be made to /// use the TUI output for prettier outputs. pub(crate) async fn finish_build(&mut self) -> Result { loop { match self.wait().await { BuilderUpdate::Progress { stage } => { match &stage { BuildStage::Compiling { current, total, krate, .. } => { tracing::info!("Compiled [{current:>3}/{total}]: {krate}"); } BuildStage::RunningBindgen => tracing::info!("Running wasm-bindgen..."), BuildStage::CopyingAssets { current, total, path, } => { tracing::info!( "Copying asset ({}/{total}): {}", current + 1, path.display() ); } BuildStage::Bundling => tracing::info!("Bundling app..."), BuildStage::CodeSigning => tracing::info!("Code signing app..."), _ => {} } tracing::info!(json = %StructuredOutput::BuildUpdate { stage: stage.clone() }); } BuilderUpdate::CompilerMessage { message } => { tracing::info!(json = %StructuredOutput::RustcOutput { message: message.clone() }, %message); } BuilderUpdate::BuildReady { bundle } => { tracing::debug!(json = %StructuredOutput::BuildFinished { artifacts: bundle.clone().into_structured_output(), }); return Ok(bundle); } BuilderUpdate::BuildFailed { err } => { // Flush remaining compiler messages while let Ok(Some(msg)) = self.rx.try_next() { if let BuilderUpdate::CompilerMessage { message } = msg { tracing::info!(json = %StructuredOutput::RustcOutput { message: message.clone() }, %message); } } return Err(err); } BuilderUpdate::StdoutReceived { .. } => {} BuilderUpdate::StderrReceived { .. } => {} BuilderUpdate::ProcessExited { .. } => {} BuilderUpdate::ProcessWaitFailed { .. } => {} } } } /// Create a list of environment variables that the child process will use /// /// We try to emulate running under `cargo` as much as possible, carrying over vars like `CARGO_MANIFEST_DIR`. /// Previously, we didn't want to emulate this behavior, but now we do in order to be a good /// citizen of the Rust ecosystem and allow users to use `cargo` features like `CARGO_MANIFEST_DIR`. /// /// Note that Dioxus apps *should not* rely on this vars being set, but libraries like Bevy do. pub(crate) fn child_environment_variables( &mut self, devserver_ip: Option, start_fullstack_on_address: Option, always_on_top: bool, build_id: BuildId, ) -> Vec<(String, String)> { let krate = &self.build; // Set the env vars that the clients will expect // These need to be stable within a release version (ie 0.6.0) let mut envs: Vec<(String, String)> = vec![ ( dioxus_cli_config::CLI_ENABLED_ENV.into(), "true".to_string(), ), ( dioxus_cli_config::APP_TITLE_ENV.into(), krate.config.web.app.title.clone(), ), ( dioxus_cli_config::SESSION_CACHE_DIR.into(), self.build.session_cache_dir().display().to_string(), ), (dioxus_cli_config::BUILD_ID.into(), build_id.0.to_string()), ( dioxus_cli_config::ALWAYS_ON_TOP_ENV.into(), always_on_top.to_string(), ), ]; if let Some(devserver_ip) = devserver_ip { envs.push(( dioxus_cli_config::DEVSERVER_IP_ENV.into(), devserver_ip.ip().to_string(), )); envs.push(( dioxus_cli_config::DEVSERVER_PORT_ENV.into(), devserver_ip.port().to_string(), )); } if verbosity_or_default().verbose { envs.push(("RUST_BACKTRACE".into(), "1".to_string())); } if let Some(base_path) = krate.trimmed_base_path() { envs.push(( dioxus_cli_config::ASSET_ROOT_ENV.into(), base_path.to_string(), )); } if let Some(env_filter) = env::var_os("RUST_LOG").and_then(|e| e.into_string().ok()) { envs.push(("RUST_LOG".into(), env_filter)); } // Launch the server if we were given an address to start it on, and the build includes a server. After we // start the server, consume its stdout/stderr. if let Some(addr) = start_fullstack_on_address { envs.push(( dioxus_cli_config::SERVER_IP_ENV.into(), addr.ip().to_string(), )); envs.push(( dioxus_cli_config::SERVER_PORT_ENV.into(), addr.port().to_string(), )); } // If there's any CARGO vars in the rustc_wrapper files, push those too. // Read from any per-crate args file in the directory (they all share the same CARGO_ envs). if let Ok(entries) = std::fs::read_dir(self.build.rustc_wrapper_args_dir()) { for entry in entries.flatten() { let path = entry.path(); if path.extension().is_some_and(|e| e == "json") { if let Ok(contents) = std::fs::read_to_string(&path) { if let Ok(args) = serde_json::from_str::(&contents) { for (key, value) in args.envs { if key.starts_with("CARGO_") { envs.push((key, value)); } } break; // Only need one file for CARGO_ env vars } } } } } envs } #[allow(clippy::too_many_arguments)] pub(crate) async fn open( &mut self, devserver_ip: SocketAddr, open_address: Option, start_fullstack_on_address: Option, open_browser: bool, always_on_top: bool, build_id: BuildId, args: &[String], ) -> Result<()> { let envs = self.child_environment_variables( Some(devserver_ip), start_fullstack_on_address, always_on_top, build_id, ); // We try to use stdin/stdout to communicate with the app match self.build.bundle { // Unfortunately web won't let us get a proc handle to it (to read its stdout/stderr) so instead // use use the websocket to communicate with it. I wish we could merge the concepts here, // like say, opening the socket as a subprocess, but alas, it's simpler to do that somewhere else. BundleFormat::Web => { // Only the first build we open the web app, after that the user knows it's running if open_browser { self.open_web(open_address.unwrap_or(devserver_ip)); } } BundleFormat::Ios => { if let Some(device) = self.build.device_name.to_owned() { self.open_ios_device(&device).await? } else { self.open_ios_sim(envs).await? } } BundleFormat::Android => { self.open_android(false, devserver_ip, envs, self.build.device_name.clone()) .await?; } // These are all just basically running the main exe, but with slightly different resource dir paths BundleFormat::Server | BundleFormat::MacOS | BundleFormat::Windows | BundleFormat::Linux => self.open_with_main_exe(envs, args)?, }; self.builds_opened += 1; Ok(()) } /// Gracefully kill the process and all of its children /// /// Uses the `SIGTERM` signal on unix and `taskkill` on windows. /// This complex logic is necessary for things like window state preservation to work properly. /// /// Also wipes away the entropy executables if they exist. pub(crate) async fn soft_kill(&mut self) { use futures_util::FutureExt; // Kill any running executables on Windows let Some(mut process) = self.child.take() else { return; }; let Some(pid) = process.id() else { _ = process.kill().await; return; }; // on unix, we can send a signal to the process to shut down #[cfg(unix)] { _ = Command::new("kill") .args(["-s", "TERM", &pid.to_string()]) .spawn(); } // on windows, use the `taskkill` command #[cfg(windows)] { _ = Command::new("taskkill") .args(["/PID", &pid.to_string()]) .spawn(); } // join the wait with a 100ms timeout futures_util::select! { _ = process.wait().fuse() => {} _ = tokio::time::sleep(std::time::Duration::from_millis(1000)).fuse() => {} }; // Wipe out the entropy executables if they exist if let Some(entropy_app_exe) = self.entropy_app_exe.take() { _ = std::fs::remove_file(entropy_app_exe); } // Abort the spawn handle monitoring task if it exists if let Some(spawn_handle) = self.spawn_handle.take() { spawn_handle.abort(); } } pub(crate) async fn hotpatch( &mut self, res: &BuildArtifacts, cache: &HotpatchModuleCache, ) -> Result { let original = self.build.main_exe(); let new = self.build.patch_exe(res.time_start); let asset_dir = self.build.asset_dir(); // Hotpatch asset!() calls for bundled in res.assets.unique_assets() { let original_artifacts = self .artifacts .as_mut() .context("No artifacts to hotpatch")?; if original_artifacts.assets.contains(bundled) { continue; } // If this is a new asset, insert it into the artifacts so we can track it when hot reloading original_artifacts.assets.insert_asset(*bundled); let from = dunce::canonicalize(PathBuf::from(bundled.absolute_source_path()))?; let to = asset_dir.join(bundled.bundled_path()); tracing::debug!("Copying asset from patch: {}", from.display()); if let Err(e) = dioxus_cli_opt::process_file_to(bundled.options(), &from, &to) { tracing::error!("Failed to copy asset: {e}"); continue; } // If the emulator is android, we need to copy the asset to the device with `adb push asset /data/local/tmp/dx/assets/filename.ext` if self.build.bundle == BundleFormat::Android { let bundled_name = PathBuf::from(bundled.bundled_path()); _ = self.copy_file_to_android_tmp(&from, &bundled_name).await; } } // Make sure to add `include!()` calls to the watcher so we can watch changes as they evolve for file in res.depinfo.files.iter() { let original_artifacts = self .artifacts .as_mut() .context("No artifacts to hotpatch")?; if !original_artifacts.depinfo.files.contains(file) { original_artifacts.depinfo.files.push(file.clone()); } } tracing::debug!("Patching {} -> {}", original.display(), new.display()); let mut jump_table = self.build.create_jump_table(&new, cache)?; // If it's android, we need to copy the assets to the device and then change the location of the patch if self.build.bundle == BundleFormat::Android { jump_table.lib = self .copy_file_to_android_tmp(&new, &(PathBuf::from(new.file_name().unwrap()))) .await?; } let changed_files = match &res.mode { BuildMode::Thin { changed_files, .. } => changed_files.clone(), _ => vec![], }; use crate::styles::{GLOW_STYLE, NOTE_STYLE}; let changed_file = changed_files.first().unwrap(); tracing::info!( "Hot-patching: {NOTE_STYLE}{}{NOTE_STYLE:#} took {GLOW_STYLE}{:?}ms{GLOW_STYLE:#}", changed_file .strip_prefix(self.build.workspace_dir()) .unwrap_or(changed_file) .display(), SystemTime::now() .duration_since(res.time_start) .unwrap() .as_millis() ); // Commit this patch self.patches.push(jump_table.clone()); // Sync the updated object cache back from the build artifacts. self.object_cache = res.object_cache.clone(); Ok(jump_table) } /// Hotreload an asset in the running app. /// /// This will modify the build dir in place! Be careful! We generally assume you want all bundles /// to reflect the latest changes, so we will modify the bundle. /// /// However, not all platforms work like this, so we might also need to update a separate asset /// dir that the system simulator might be providing. We know this is the case for ios simulators /// and haven't yet checked for android. /// /// This will return the bundled name of the assets such that we can send it to the clients letting /// them know what to reload. It's not super important that this is robust since most clients will /// kick all stylsheets without necessarily checking the name. pub(crate) async fn hotreload_bundled_assets( &self, changed_file: &PathBuf, ) -> Option> { let artifacts = self.artifacts.as_ref()?; // Use the build dir if there's no runtime asset dir as the override. For the case of ios apps, // we won't actually be using the build dir. let asset_dir = match self.runtime_asset_dir.as_ref() { Some(dir) => dir.to_path_buf().join("assets/"), None => self.build.asset_dir(), }; // Canonicalize the path as Windows may use long-form paths "\\\\?\\C:\\". let changed_file = dunce::canonicalize(changed_file) .inspect_err(|e| tracing::debug!("Failed to canonicalize hotreloaded asset: {e}")) .ok()?; // The asset might've been renamed thanks to the manifest, let's attempt to reload that too let resources = artifacts.assets.get_assets_for_source(&changed_file)?; let mut bundled_names = Vec::new(); for resource in resources { let output_path = asset_dir.join(resource.bundled_path()); tracing::debug!("Hotreloading asset {changed_file:?} in target {asset_dir:?}"); // Remove the old asset if it exists _ = std::fs::remove_file(&output_path); // And then process the asset with the options into the **old** asset location. If we recompiled, // the asset would be in a new location because the contents and hash have changed. Since we are // hotreloading, we need to use the old asset location it was originally written to. let options = *resource.options(); let res = process_file_to(&options, &changed_file, &output_path); let bundled_name = PathBuf::from(resource.bundled_path()); if let Err(e) = res { tracing::debug!("Failed to hotreload asset {e}"); } // If the emulator is android, we need to copy the asset to the device with `adb push asset /data/local/tmp/dx/assets/filename.ext` if self.build.bundle == BundleFormat::Android { _ = self .copy_file_to_android_tmp(&changed_file, &bundled_name) .await; } bundled_names.push(bundled_name); } Some(bundled_names) } /// Copy this file to the tmp folder on the android device, returning the path to the copied file /// /// When we push patches (.so), the runtime will dlopen the file from the tmp folder by first copying /// it to shared memory. This is a workaround since not all android devices will be rooted and we /// can't drop the file into the `/data/data/com.org.app/lib/` directory. pub(crate) async fn copy_file_to_android_tmp( &self, changed_file: &Path, bundled_name: &Path, ) -> Result { let target = dioxus_cli_config::android_session_cache_dir().join(bundled_name); tracing::debug!("Pushing asset to device: {target:?}"); let res = Command::new(&self.build.workspace.android_tools()?.adb) .arg("push") .arg(changed_file) .arg(&target) .output() .await .context("Failed to push asset to device"); if let Err(e) = res { tracing::debug!("Failed to push asset to device: {e}"); } Ok(target) } /// Open the native app simply by running its main exe /// /// Eventually, for mac, we want to run the `.app` with `open` to fix issues with `dylib` paths, /// but for now, we just run the exe directly. Very few users should be caring about `dylib` search /// paths right now, but they will when we start to enable things like swift integration. /// /// Server/liveview/desktop are all basically the same, though fn open_with_main_exe(&mut self, envs: Vec<(String, String)>, args: &[String]) -> Result<()> { let main_exe = self.app_exe(); tracing::debug!("Opening app with main exe: {main_exe:?}"); let mut child = Command::new(main_exe) .args(args) .envs(envs) .stderr(Stdio::piped()) .stdout(Stdio::piped()) .kill_on_drop(true) .spawn()?; let stdout = BufReader::new(child.stdout.take().unwrap()); let stderr = BufReader::new(child.stderr.take().unwrap()); self.stdout = Some(stdout.lines()); self.stderr = Some(stderr.lines()); self.child = Some(child); Ok(()) } /// Open the web app by opening the browser to the given address. /// Check if we need to use https or not, and if so, add the protocol. /// Go to the basepath if that's set too. fn open_web(&self, address: SocketAddr) { let base_path = self.build.base_path(); let https = self.build.config.web.https.enabled.unwrap_or_default(); let protocol = if https { "https" } else { "http" }; let base_path = match base_path { Some(base_path) => format!("/{}", base_path.trim_matches('/')), None => "".to_owned(), }; _ = open::that_detached(format!("{protocol}://{address}{base_path}")); } /// Use `xcrun` to install the app to the simulator /// With simulators, we're free to basically do anything, so we don't need to do any fancy codesigning /// or entitlements, or anything like that. /// /// However, if there's no simulator running, this *might* fail. /// /// TODO(jon): we should probably check if there's a simulator running before trying to install, /// and open the simulator if we have to. async fn open_ios_sim(&mut self, envs: Vec<(String, String)>) -> Result<()> { tracing::debug!("Installing app to simulator {:?}", self.build.root_dir()); let res = Command::new("xcrun") .arg("simctl") .arg("install") .arg("booted") .arg(self.build.root_dir()) .output() .await?; tracing::debug!("Installed app to simulator with exit code: {res:?}"); // Remap the envs to the correct simctl env vars // iOS sim lets you pass env vars but they need to be in the format "SIMCTL_CHILD_XXX=XXX" let ios_envs = envs .iter() .map(|(k, v)| (format!("SIMCTL_CHILD_{k}"), v.clone())); let mut child = Command::new("xcrun") .arg("simctl") .arg("launch") .arg("--console") .arg("booted") .arg(self.build.bundle_identifier()) .envs(ios_envs) .stderr(Stdio::piped()) .stdout(Stdio::piped()) .kill_on_drop(true) .spawn()?; let stdout = BufReader::new(child.stdout.take().unwrap()); let stderr = BufReader::new(child.stderr.take().unwrap()); self.stdout = Some(stdout.lines()); self.stderr = Some(stderr.lines()); self.child = Some(child); Ok(()) } /// Upload the app to the device and launch it async fn open_ios_device(&mut self, device_query: &str) -> Result<()> { let device_query = device_query.to_string(); let root_dir = self.build.root_dir().clone(); let application_id = self.build.bundle_identifier(); self.spawn_handle = Some(tokio::task::spawn(async move { // 1. Find an active device let device_uuid = Self::get_ios_device_uuid(&device_query).await?; tracing::info!("Uploading app to iOS device, this might take a while..."); // 2. Install the app to the device Self::install_ios_app(&device_uuid, &root_dir).await?; // 3. Launch the app into the background, paused Self::launch_ios_app_paused(&device_uuid, &application_id).await?; Result::Ok(()) as Result<()> })); Ok(()) } /// Parse the xcrun output to get the device based on its name and connected state. /// /// ```json, ignore /// "connectionProperties" : { /// "authenticationType" : "manualPairing", /// "isMobileDeviceOnly" : false, /// "lastConnectionDate" : "2025-08-15T01:46:43.182Z", /// "pairingState" : "paired", /// "potentialHostnames" : [ /// "00008130-0002058401E8001C.coredevice.local", /// "67054C13-C6C8-5AC2-B967-24C040AD3F17.coredevice.local" /// ], /// "transportType" : "localNetwork", /// "tunnelState" : "disconnected", /// "tunnelTransportProtocol" : "tcp" /// }, /// "deviceProperties" : { /// "bootedFromSnapshot" : true, /// "bootedSnapshotName" : "com.apple.os.update-A771E2B3E8C155D1B1188896B3247851B64737ACDE91A5B6F6C1F03A541406AA", /// "ddiServicesAvailable" : false, /// "developerModeStatus" : "enabled", /// "hasInternalOSBuild" : false, /// "name" : "Jon’s iPhone (2)", /// "osBuildUpdate" : "22G86", /// "osVersionNumber" : "18.6", /// "rootFileSystemIsWritable" : false /// } /// ``` async fn get_ios_device_uuid(device_name_query: &str) -> Result { use serde_json::Value; let tmpfile = tempfile::NamedTempFile::new() .context("Failed to create temporary file for device list")?; Command::new("xcrun") .args([ "devicectl".to_string(), "list".to_string(), "devices".to_string(), "--json-output".to_string(), tmpfile.path().to_str().unwrap().to_string(), ]) .output() .await?; let json: Value = serde_json::from_str(&std::fs::read_to_string(tmpfile.path())?) .context("Failed to parse xcrun output")?; let devices = json .get("result") .context("Failed to parse xcrun output")? .get("devices") .context("Failed to parse xcrun output")? .as_array() .context("Failed to get devices from xcrun output")?; // by default, we just pick the first available device and then look for better fits. let mut device_idx = 0; match device_name_query.is_empty() { // If the user provided a query, then we look through the device list looking for the right one. // This searches both UUIDs and names, making it possible to paste an ID or a name. false => { use nucleo::{chars, Config, Matcher, Utf32Str}; let normalize = |c: char| chars::to_lower_case(chars::normalize(c)); let mut matcher = Matcher::new(Config::DEFAULT); let mut best_score = 0; let needle = device_name_query.chars().map(normalize).collect::(); for (idx, device) in devices.iter().enumerate() { let device_name = device .get("deviceProperties") .and_then(|f| f.get("name")) .and_then(|n| n.as_str()) .unwrap_or_default(); let device_uuid = device .get("identifier") .and_then(|n| n.as_str()) .unwrap_or_default(); let haystack = format!("{device_name} {device_uuid}") .chars() .map(normalize) .collect::(); let name_score = matcher.fuzzy_match( Utf32Str::Ascii(haystack.as_bytes()), Utf32Str::Ascii(needle.as_bytes()), ); if let Some(score) = name_score { if score > best_score { best_score = score; device_idx = idx; } } } if best_score == 0 { tracing::warn!( "No device found matching query: {device_name_query}. Using first available device." ); } } // If the query is empty, then we just find the first connected/available device // This is somewhat based on the bundle format, since we don't want to accidentally upload // iOS apps to watches/tvs true => { for (idx, device) in devices.iter().enumerate() { let is_paired = device .get("connectionProperties") .and_then(|g| g.get("pairingState")) .map(|s| s.as_str() == Some("paired")) .unwrap_or(false); let is_ios_device = matches!( device .get("hardwareProperties") .and_then(|h| h.get("deviceType")) .and_then(|s| s.as_str()), Some("iPhone") | Some("iPad") | Some("iPod") ); let is_available = device .get("connectionProperties") .and_then(|c| c.get("tunnelState")) .and_then(|s| s.as_str()) != Some("unavailable"); if is_paired && is_ios_device && is_available { device_idx = idx; break; } } } } devices .get(device_idx) .context("No devices found")? .get("identifier") .and_then(|id| id.as_str()) .map(|s| s.to_string()) .context("Failed to extract device UUID") } async fn install_ios_app(device_uuid: &str, app_path: &Path) -> Result<()> { let tmpfile = tempfile::NamedTempFile::new() .context("Failed to create temporary file for device list")?; // xcrun devicectl device install app --device --path --json-output let output = Command::new("xcrun") .args([ "devicectl", "device", "install", "app", "--device", device_uuid, &app_path.display().to_string(), "--json-output", ]) .arg(tmpfile.path()) .output() .await?; if !output.status.success() { let stderr = String::from_utf8_lossy(&output.stderr); if stderr.contains("DeviceLocked") || stderr.contains("device is locked") { bail!( "Failed to install app: your device is locked.\n\ Unlock your iPhone/iPad and try again." ); } if stderr.contains("cannot be installed on this device") || stderr.contains("0xe8008012") { bail!( "Failed to install app: your device is not registered in the provisioning profile.\n\n\ Your device UDID needs to be added to your Apple Developer account and the \ provisioning profile regenerated.\n\n\ To fix this:\n \ 1. Accept the latest Program License Agreement at:\n \ https://developer.apple.com/account\n \ 2. Register your device at:\n \ https://developer.apple.com/account/resources/devices\n \ 3. Regenerate your provisioning profile to include the new device\n \ 4. Or open any project in Xcode, select your device, and build —\n \ Xcode will update the profile automatically\n\n\ Raw error: {stderr}" ); } if stderr.contains("provisioning profile") || stderr.contains("ApplicationVerificationFailed") || stderr.contains("code signature") { bail!( "Failed to install app: code signing error.\n\ A valid provisioning profile was not found for this app.\n\n\ To fix this:\n \ 1. Accept the latest Program License Agreement at:\n \ https://developer.apple.com/account\n \ 2. Open the project in Xcode, select your device, and build once —\n \ Xcode will set up signing and provisioning automatically\n \ 3. Ensure your device is registered in your Apple Developer account\n\n\ Raw error: {stderr}" ); } bail!("Failed to install app to device {device_uuid}: {stderr}"); } Ok(()) } async fn launch_ios_app_paused(device_uuid: &str, application_id: &str) -> Result<()> { let tmpfile = tempfile::NamedTempFile::new() .context("Failed to create temporary file for device list")?; let output = Command::new("xcrun") .args([ "devicectl", "device", "process", "launch", "--no-activate", "--verbose", "--device", device_uuid, application_id, "--json-output", ]) .arg(tmpfile.path()) .output() .await?; if !output.status.success() { bail!("Failed to launch app: {output:?}"); } let json: serde_json::Value = serde_json::from_str(&std::fs::read_to_string(tmpfile.path())?) .context("Failed to parse xcrun output")?; let status_pid = json["result"]["process"]["processIdentifier"] .as_u64() .context("Failed to extract process identifier")?; let output = Command::new("xcrun") .args([ "devicectl", "device", "process", "resume", "--device", device_uuid, "--pid", &status_pid.to_string(), ]) .output() .await?; if !output.status.success() { bail!("Failed to resume app: {output:?}"); } Ok(()) } /// Launch the Android simulator and deploy the application. /// /// This function handles the process of starting the Android simulator, installing the APK, /// forwarding the development server port, and launching the application on the simulator. /// /// The following `adb` commands are executed: /// /// 1. **Enable Root Access**: /// - `adb root`: Enables root access on the Android simulator, allowing for advanced operations like pushing files to restricted directories. /// /// 2. **Port Forwarding**: /// - `adb reverse tcp: tcp:`: Forwards the development server port from the host /// machine to the Android simulator, enabling communication between the app and the dev server. /// /// 3. **APK Installation**: /// - `adb install -r `: Installs the APK onto the Android simulator. The `-r` flag /// ensures that any existing installation of the app is replaced. /// /// 4. **Environment Variables**: /// - Writes environment variables to a `.env` file in the session cache directory. /// - `adb push `: Pushes the `.env` file to the Android device /// to configure runtime environment variables for the app. /// /// 5. **App Launch**: /// - `adb shell am start -n /`: Launches the app on the Android /// simulator. The `` and `` are derived from the app's configuration. /// /// # Notes /// /// - This function is asynchronous and spawns a background task to handle the simulator setup and app launch. /// - The Android tools (`adb`) must be available in the system's PATH for this function to work. /// - If the app fails to launch, errors are logged for debugging purposes. /// /// # Resources: /// - async fn open_android( &mut self, root: bool, devserver_socket: SocketAddr, envs: Vec<(String, String)>, device_name_query: Option, ) -> Result<()> { let apk_path = self.build.debug_apk_path(); let session_cache = self.build.session_cache_dir(); let application_id = self.build.bundle_identifier(); let adb = self.build.workspace.android_tools()?.adb.clone(); let (stdout_tx, stdout_rx) = tokio::sync::mpsc::unbounded_channel::(); // Start backgrounded since .open() is called while in the arm of the top-level match let task = tokio::task::spawn(async move { // call `adb root` so we can push patches to the device if root { if let Err(e) = Command::new(&adb).arg("root").output().await { tracing::error!("Failed to run `adb root`: {e}"); } } // Try to get the transport ID for the device in case there are multiple specified devices // All future commands should use this since its the most recent. let transport_id_args = Self::get_android_device_transport_id(&adb, device_name_query.as_deref()).await; // Wait for device to be ready let cmd = Command::new(&adb) .args(transport_id_args) .arg("wait-for-device") .arg("shell") .arg(r#"while [[ -z $(getprop sys.boot_completed) ]]; do sleep 1; done;"#) .output(); let cmd_future = cmd.fuse(); pin_mut!(cmd_future); tokio::select! { _ = &mut cmd_future => {} _ = tokio::time::sleep(Duration::from_millis(50)) => { tracing::info!("Waiting for android emulator to be ready..."); _ = cmd_future.await; } } let port = devserver_socket.port(); if let Err(e) = Command::new(&adb) .arg("reverse") .arg(format!("tcp:{port}")) .arg(format!("tcp:{port}")) .output() .await { tracing::error!("failed to forward port {port}: {e}"); } // Install // adb install -r app-debug.apk let res = Command::new(&adb) .arg("install") .arg("-r") .arg(apk_path) .output() .await?; let std_err = String::from_utf8_lossy(&res.stderr); if !std_err.is_empty() { tracing::error!("Failed to install apk with `adb`: {std_err}"); } // Clear the session cache dir on the device Command::new(&adb) .arg("shell") .arg("rm") .arg("-rf") .arg(dioxus_cli_config::android_session_cache_dir()) .output() .await?; // Write the env vars to a .env file in our session cache let env_file = session_cache.join(".env"); _ = std::fs::write( &env_file, envs.iter() .map(|(key, value)| format!("{key}={value}")) .collect::>() .join("\n"), ); // Push the env file to the device Command::new(&adb) .arg("push") .arg(env_file) .arg(dioxus_cli_config::android_session_cache_dir().join(".env")) .output() .await?; // eventually, use the user's MainActivity, not our MainActivity // adb shell am start -n dev.dioxus.main/dev.dioxus.main.MainActivity let activity_name = format!("{application_id}/dev.dioxus.main.MainActivity"); let res = Command::new(&adb) .arg("shell") .arg("am") .arg("start") .arg("-n") .arg(activity_name) .output() .await?; let std_err = String::from_utf8_lossy(res.stderr.trim_ascii()); if !std_err.is_empty() { tracing::error!("Failed to start app with `adb`: {std_err}"); } // Try to get the transport ID for the device let transport_id_args = Self::get_android_device_transport_id(&adb, device_name_query.as_deref()).await; // Get the app's PID with retries // Retry up to 10 times (10 seconds total) since app launch is asynchronous let mut pid: Option = None; for attempt in 1..=10 { match Self::get_android_app_pid(&adb, &application_id, &transport_id_args).await { Ok(p) => { pid = Some(p); break; } Err(_) if attempt < 10 => { tracing::debug!( "App PID not found yet, retrying in 1 second... (attempt {}/10)", attempt ); tokio::time::sleep(Duration::from_secs(1)).await; } Err(e) => { return Err(e).context( "Failed to get app PID after 10 attempts - app may not have started", ); } } } let pid = pid.context("Failed to get app PID")?; // Spawn logcat with filtering // By default: show only RustStdoutStderr (app Rust logs) and fatal errors // With tracing enabled: show all logs from the app process // Note: We always capture at DEBUG level, then filter in Rust based on trace flag let mut child = Command::new(&adb) .args(&transport_id_args) .arg("logcat") .arg("-v") .arg("brief") .arg("--pid") .arg(&pid) .arg("*:D") // Capture all logs at DEBUG level (filtered in Rust) .stdout(Stdio::piped()) .stderr(Stdio::null()) .kill_on_drop(true) .spawn()?; let stdout = child.stdout.take().unwrap(); let mut reader = BufReader::new(stdout).lines(); while let Ok(Some(line)) = reader.next_line().await { _ = stdout_tx.send(line); } Ok::<(), Error>(()) }); self.spawn_handle = Some(task); self.adb_logcat_stdout = Some(UnboundedReceiverStream::new(stdout_rx)); Ok(()) } fn make_entropy_path(exe: &PathBuf) -> PathBuf { let id = uuid::Uuid::new_v4(); let name = id.to_string(); let some_entropy = name.split('-').next().unwrap(); // Split up the exe into the file stem and extension let extension = exe.extension().unwrap_or_default(); let file_stem = exe.file_stem().unwrap().to_str().unwrap(); // Make a copy of the server exe with a new name let entropy_server_exe = exe .with_file_name(format!("{}-{}", file_stem, some_entropy)) .with_extension(extension); std::fs::copy(exe, &entropy_server_exe).unwrap(); entropy_server_exe } fn app_exe(&mut self) -> PathBuf { let mut main_exe = self.build.main_exe(); // The requirement here is based on the platform, not necessarily our current architecture. let requires_entropy = match self.build.bundle { // When running "bundled", we don't need entropy BundleFormat::Web | BundleFormat::MacOS | BundleFormat::Ios | BundleFormat::Android => { false } // But on platforms that aren't running as "bundled", we do. BundleFormat::Windows | BundleFormat::Linux | BundleFormat::Server => true, }; if requires_entropy || crate::devcfg::should_force_entropy() { // If we already have an entropy app exe, return it - this is useful for re-opening the same app if let Some(existing_app_exe) = self.entropy_app_exe.clone() { return existing_app_exe; } let entropy_app_exe = Self::make_entropy_path(&main_exe); self.entropy_app_exe = Some(entropy_app_exe.clone()); main_exe = entropy_app_exe; } main_exe } fn complete_compile(&mut self) { if self.compile_end.is_none() { self.compiled_crates = self.expected_crates; self.compile_end = Some(Instant::now()); } } /// Get the total duration of the build, if all stages have completed pub(crate) fn total_build_time(&self) -> Option { Some(self.compile_duration()? + self.bundle_duration()?) } pub(crate) fn compile_duration(&self) -> Option { Some( self.compile_end .unwrap_or_else(Instant::now) .duration_since(self.compile_start?), ) } pub(crate) fn bundle_duration(&self) -> Option { Some( self.bundle_end .unwrap_or_else(Instant::now) .duration_since(self.bundle_start?), ) } /// Return a number between 0 and 1 representing the progress of the app build pub(crate) fn compile_progress(&self) -> f64 { self.compiled_crates as f64 / self.expected_crates as f64 } pub(crate) fn bundle_progress(&self) -> f64 { self.bundling_progress } pub(crate) fn is_finished(&self) -> bool { match self.stage { BuildStage::Success => true, BuildStage::Failed => true, BuildStage::Aborted => true, BuildStage::Restarting => false, _ => false, } } /// Check if the queued build is blocking hotreloads pub(crate) fn can_receive_hotreloads(&self) -> bool { matches!(&self.stage, BuildStage::Success | BuildStage::Failed) } pub(crate) async fn open_debugger(&mut self, server: &WebServer) -> Result<()> { let url = match self.build.bundle { BundleFormat::MacOS | BundleFormat::Windows | BundleFormat::Linux | BundleFormat::Server => { let Some(Some(pid)) = self.child.as_mut().map(|f| f.id()) else { tracing::warn!("No process to attach debugger to"); return Ok(()); }; format!( "vscode://vadimcn.vscode-lldb/launch/config?{{'request':'attach','pid':{pid}}}" ) } BundleFormat::Web => { // code --open-url "vscode://DioxusLabs.dioxus/debugger?uri=http://127.0.0.1:8080" // todo - debugger could open to the *current* page afaik we don't have a way to have that info let address = server.devserver_address(); let base_path = self.build.base_path(); let https = self.build.config.web.https.enabled.unwrap_or_default(); let protocol = if https { "https" } else { "http" }; let base_path = match base_path { Some(base_path) => format!("/{}", base_path.trim_matches('/')), None => "".to_owned(), }; format!("vscode://DioxusLabs.dioxus/debugger?uri={protocol}://{address}{base_path}") } BundleFormat::Ios => { let Some(pid) = self.pid else { tracing::warn!("No process to attach debugger to"); return Ok(()); }; format!( "vscode://vadimcn.vscode-lldb/launch/config?{{'request':'attach','pid':{pid}}}" ) } // https://stackoverflow.com/questions/53733781/how-do-i-use-lldb-to-debug-c-code-on-android-on-command-line/64997332#64997332 // https://android.googlesource.com/platform/development/+/refs/heads/main/scripts/gdbclient.py // run lldbserver on the device and then connect // // # TODO: https://code.visualstudio.com/api/references/vscode-api#debug and // # https://code.visualstudio.com/api/extension-guides/debugger-extension and // # https://github.com/vadimcn/vscode-lldb/blob/6b775c439992b6615e92f4938ee4e211f1b060cf/extension/pickProcess.ts#L6 // // res = { // "name": "(lldbclient.py) Attach {} (port: {})".format(binary_name.split("/")[-1], port), // "type": "lldb", // "request": "custom", // "relativePathBase": root, // "sourceMap": { "/b/f/w" : root, '': root, '.': root }, // "initCommands": ['settings append target.exec-search-paths {}'.format(' '.join(solib_search_path))], // "targetCreateCommands": ["target create {}".format(binary_name), // "target modules search-paths add / {}/".format(sysroot)], // "processCreateCommands": ["gdb-remote {}".format(str(port))] // } // // https://github.com/vadimcn/codelldb/issues/213 // // lots of pain to figure this out: // // (lldb) image add target/dx/tw6/debug/android/app/app/src/main/jniLibs/arm64-v8a/libdioxusmain.so // (lldb) settings append target.exec-search-paths target/dx/tw6/debug/android/app/app/src/main/jniLibs/arm64-v8a/libdioxusmain.so // (lldb) process handle SIGSEGV --pass true --stop false --notify true (otherwise the java threads cause crash) // BundleFormat::Android => { // adb push ./sdk/ndk/29.0.13113456/toolchains/llvm/prebuilt/darwin-x86_64/lib/clang/20/lib/linux/aarch64/lldb-server /tmp // adb shell "/tmp/lldb-server --server --listen ..." // "vscode://vadimcn.vscode-lldb/launch/config?{{'request':'connect','port': {}}}", // format!( // "vscode://vadimcn.vscode-lldb/launch/config?{{'request':'attach','pid':{pid}}}" // ) let tools = &self.build.workspace.android_tools()?; // get the pid of the app let pid = Command::new(&tools.adb) .arg("shell") .arg("pidof") .arg(self.build.bundle_identifier()) .output() .await .ok() .and_then(|output| String::from_utf8(output.stdout).ok()) .and_then(|s| s.trim().parse::().ok()) .unwrap(); // copy the lldb-server to the device let lldb_server = tools .android_tools_dir() .parent() .unwrap() .join("lib") .join("clang") .join("20") .join("lib") .join("linux") .join("aarch64") .join("lldb-server"); tracing::info!("Copying lldb-server to device: {lldb_server:?}"); _ = Command::new(&tools.adb) .arg("push") .arg(lldb_server) .arg("/tmp/lldb-server") .output() .await; // Forward requests on 10086 to the device _ = Command::new(&tools.adb) .arg("forward") .arg("tcp:10086") .arg("tcp:10086") .output() .await; // start the server - running it multiple times will make the subsequent ones fail (which is fine) _ = Command::new(&tools.adb) .arg("shell") .arg(r#"cd /tmp && ./lldb-server platform --server --listen '*:10086'"#) .kill_on_drop(false) .stdin(Stdio::null()) .stdout(Stdio::piped()) .stderr(Stdio::piped()) .spawn(); let program_path = self.build.main_exe(); format!( r#"vscode://vadimcn.vscode-lldb/launch/config?{{ 'name':'Attach to Android', 'type':'lldb', 'request':'attach', 'pid': '{pid}', 'processCreateCommands': [ 'platform select remote-android', 'platform connect connect://localhost:10086', 'settings set target.inherit-env false', 'settings set target.inline-breakpoint-strategy always', 'settings set target.process.thread.step-avoid-regexp \"JavaBridge|JDWP|Binder|ReferenceQueueDaemon\"', 'process handle SIGSEGV --pass true --stop false --notify true"', 'settings append target.exec-search-paths {program_path}', 'attach --pid {pid}', 'continue' ] }}"#, program_path = program_path.display(), ) .lines() .map(|line| line.trim()) .join("") } }; tracing::info!("Opening debugger for [{}]: {url}", self.build.bundle); _ = tokio::process::Command::new("code") .arg("--open-url") .arg(url) .spawn(); Ok(()) } async fn get_android_device_transport_id( adb: &PathBuf, device_name_query: Option<&str>, ) -> Vec { // If there are multiple devices, we pick the one matching the query let mut device_specifier_args = vec![]; if let Some(device_name_query) = device_name_query { if let Ok(res) = Command::new(adb).arg("devices").arg("-l").output().await { let devices = String::from_utf8_lossy(&res.stdout); let mut best_score = 0; let mut device_identifier = "".to_string(); use nucleo::{chars, Config, Matcher, Utf32Str}; let mut matcher = Matcher::new(Config::DEFAULT); let normalize = |c: char| chars::to_lower_case(chars::normalize(c)); let needle = device_name_query.chars().map(normalize).collect::>(); for line in devices.lines() { let device_name = line.split_whitespace().next().unwrap_or(""); let Some(transport_id) = line .split_whitespace() .find(|s| s.starts_with("transport_id:")) .map(|s| s.trim_start_matches("transport_id:")) else { continue; }; let device_name = device_name.chars().map(normalize).collect::>(); let score = matcher .fuzzy_match(Utf32Str::Unicode(&device_name), Utf32Str::Unicode(&needle)); if let Some(score) = score { if score > best_score { best_score = score; device_identifier = transport_id.to_string(); } } } if best_score != 0 { device_specifier_args.push("-t".to_string()); device_specifier_args.push(device_identifier.to_string()); } } if device_specifier_args.is_empty() { tracing::warn!( "No device found matching query: {device_name_query}. Using default transport ID." ); } } device_specifier_args } /// Get the PID of the running Android app async fn get_android_app_pid( adb: &Path, application_id: &str, transport_id_args: &[String], ) -> Result { let output = Command::new(adb) .args(transport_id_args) .arg("shell") .arg("pidof") .arg(application_id) .output() .await?; let pid = String::from_utf8(output.stdout)?.trim().to_string(); if pid.is_empty() { anyhow::bail!("App process not found - may not have started yet"); } Ok(pid) } } ================================================ FILE: packages/cli/src/build/cache.rs ================================================ //! Object file cache for workspace hotpatching. //! //! Maintains the latest `.rcgu.o` files for each crate in the cumulative `modified_crates` set. //! Used for accumulated relinking: combining objects from all modified crates into a single //! patch dylib. //! //! Objects are stored on disk under `session_cache_dir/object_cache/{crate_name}/` so they //! persist across patches without holding file contents in memory. The session cache dir //! lives in `/tmp/` and is cleaned up by the OS. //! //! - **Dep crates:** objects are extracted from their rlib in `target/deps/`. //! - **Tip crate:** objects are copied from linker arg paths since incremental compilation //! overwrites them in place. use std::collections::HashMap; use std::io::Read; use std::path::{Path, PathBuf}; /// Cache of compiled object files on disk, keyed by crate name. /// /// After each compilation, the cache is updated for the compiled crate by extracting /// objects to `dir/{crate_name}/`. On relink, paths from all crates in `modified_crates` /// are passed directly to the linker — no intermediate copy needed. #[derive(Clone, Debug, Default, PartialEq)] pub struct ObjectCache { /// Root directory: `session_cache_dir/object_cache/` dir: PathBuf, /// crate_name -> object file paths on disk objects: HashMap>, } impl ObjectCache { pub fn new(session_cache_dir: &Path) -> Self { let dir = session_cache_dir.join("object_cache"); Self { dir, objects: HashMap::new(), } } /// Extract `.rcgu.o` files from an rlib archive and write them to /// `dir/{crate_name}/`. Replaces any previously cached objects for this crate. pub fn cache_from_rlib(&mut self, crate_name: &str, rlib_path: &Path) -> anyhow::Result<()> { let crate_dir = self.dir.join(crate_name); // Clear previous objects for this crate if crate_dir.exists() { std::fs::remove_dir_all(&crate_dir)?; } std::fs::create_dir_all(&crate_dir)?; let rlib_contents = std::fs::read(rlib_path)?; let mut reader = ar::Archive::new(std::io::Cursor::new(rlib_contents)); let mut paths = Vec::new(); while let Some(Ok(mut entry)) = reader.next_entry() { let name = std::str::from_utf8(entry.header().identifier()) .unwrap_or_default() .to_string(); // Skip rmeta and empty entries if name.ends_with(".rmeta") || entry.header().size() == 0 { continue; } // Only keep object files if !name.ends_with(".o") { continue; } let mut data = Vec::with_capacity(entry.header().size() as usize); entry.read_to_end(&mut data)?; let obj_path = crate_dir.join(&name); std::fs::write(&obj_path, &data)?; paths.push(obj_path); } self.objects.insert(crate_name.to_string(), paths); Ok(()) } /// Cache tip crate objects by copying them from their filesystem paths. /// /// Tip crate `.rcgu.o` files get overwritten on recompilation, so we copy /// them into our cache directory for stable access. pub fn cache_from_paths( &mut self, crate_name: &str, object_paths: &[impl AsRef], ) -> anyhow::Result<()> { let crate_dir = self.dir.join(crate_name); if crate_dir.exists() { std::fs::remove_dir_all(&crate_dir)?; } std::fs::create_dir_all(&crate_dir)?; let mut paths = Vec::with_capacity(object_paths.len()); for path in object_paths { let path = path.as_ref(); let name = path .file_name() .and_then(|n| n.to_str()) .unwrap_or_default(); let dest = crate_dir.join(name); std::fs::copy(path, &dest)?; paths.push(dest); } self.objects.insert(crate_name.to_string(), paths); Ok(()) } /// Get cached object file paths for a crate. pub fn get(&self, crate_name: &str) -> Option<&Vec> { self.objects.get(crate_name) } } ================================================ FILE: packages/cli/src/build/context.rs ================================================ //! Report progress about the build to the user. We use channels to report progress back to the CLI. use super::BuildMode; use crate::{BuildArtifacts, BuildStage, Error, TraceSrc}; use cargo_metadata::diagnostic::Diagnostic; use futures_channel::mpsc::{UnboundedReceiver, UnboundedSender}; use serde::{Deserialize, Serialize}; use std::{path::PathBuf, process::ExitStatus}; /// The context of the build process. While the BuildRequest is a "plan" for the build, the BuildContext /// provides some dynamic configuration that is only known at runtime. For example, the Progress channel /// and the BuildMode can change while serving. /// /// The structure of this is roughly taken from cargo itself which uses a similar pattern. #[derive(Debug, Clone)] pub struct BuildContext { pub tx: ProgressTx, pub mode: BuildMode, pub build_id: BuildId, } pub type ProgressTx = UnboundedSender; pub type ProgressRx = UnboundedReceiver; #[derive(Debug, Clone, Copy, PartialEq, Eq, Deserialize, Serialize)] pub struct BuildId(pub(crate) usize); impl BuildId { pub const PRIMARY: Self = Self(0); pub const SECONDARY: Self = Self(1); } #[allow(clippy::large_enum_variant)] pub enum BuilderUpdate { Progress { stage: BuildStage, }, CompilerMessage { message: Diagnostic, }, /// The build completed successfully and the artifacts are ready. The artifacts are dependent on /// the build mode (fat vs thin vs base). BuildReady { bundle: BuildArtifacts, }, /// The build failed. This might be because of a compilation error, or an error internal to DX. BuildFailed { err: Error, }, /// A running process has received a stdout. /// May or may not be a complete line - do not treat it as a line. It will include a line if it is a complete line. /// /// We will poll lines and any content in a 50ms interval StdoutReceived { msg: String, }, /// A running process has received a stderr. /// May or may not be a complete line - do not treat it as a line. It will include a line if it is a complete line. /// /// We will poll lines and any content in a 50ms interval StderrReceived { msg: String, }, /// The running app (DUT) has exited and is no longer running. ProcessExited { status: ExitStatus, }, /// Waiting for the process failed. This might be because it's hung or being debugged. /// This is not the same as the process exiting, so it should just be logged but not treated as an error. ProcessWaitFailed { err: std::io::Error, }, } impl BuildContext { /// Returns true if this is a client build - basically, is this the primary build? /// We try not to duplicate work between client and server builds, like asset copying. pub(crate) fn is_primary_build(&self) -> bool { self.build_id == BuildId::PRIMARY } pub(crate) fn status_wasm_bindgen_start(&self) { _ = self.tx.unbounded_send(BuilderUpdate::Progress { stage: BuildStage::RunningBindgen, }); } pub(crate) fn status_splitting_bundle(&self) { _ = self.tx.unbounded_send(BuilderUpdate::Progress { stage: BuildStage::SplittingBundle, }); } pub(crate) fn status_start_bundle(&self) { _ = self.tx.unbounded_send(BuilderUpdate::Progress { stage: BuildStage::Bundling, }); } pub(crate) fn status_running_gradle(&self) { _ = self.tx.unbounded_send(BuilderUpdate::Progress { stage: BuildStage::RunningGradle, }) } pub(crate) fn status_compiling_native_plugins(&self, detail: impl Into) { _ = self.tx.unbounded_send(BuilderUpdate::Progress { stage: BuildStage::CompilingNativePlugins { detail: detail.into(), }, }); } pub(crate) fn status_codesigning(&self) { _ = self.tx.unbounded_send(BuilderUpdate::Progress { stage: BuildStage::CodeSigning, }); } pub(crate) fn status_build_diagnostic(&self, message: Diagnostic) { _ = self .tx .unbounded_send(BuilderUpdate::CompilerMessage { message }); } pub(crate) fn status_build_error(&self, line: String) { tracing::warn!(dx_src = ?TraceSrc::Cargo, "{line}"); } pub(crate) fn status_build_message(&self, line: String) { tracing::trace!(dx_src = ?TraceSrc::Cargo, "{line}"); } pub(crate) fn status_build_progress(&self, count: usize, total: usize, name: String) { _ = self.tx.unbounded_send(BuilderUpdate::Progress { stage: BuildStage::Compiling { current: count, total, krate: name, }, }); } pub(crate) fn status_starting_build(&self, crate_count: usize) { _ = self.tx.unbounded_send(BuilderUpdate::Progress { stage: BuildStage::Starting { patch: matches!(self.mode, BuildMode::Thin { .. }), crate_count, }, }); } pub(crate) fn status_starting_link(&self) { _ = self.tx.unbounded_send(BuilderUpdate::Progress { stage: BuildStage::Linking, }); } pub(crate) fn status_copied_asset( progress: &UnboundedSender, current: usize, total: usize, path: PathBuf, ) { _ = progress.unbounded_send(BuilderUpdate::Progress { stage: BuildStage::CopyingAssets { current, total, path, }, }); } pub(crate) fn status_optimizing_wasm(&self) { _ = self.tx.unbounded_send(BuilderUpdate::Progress { stage: BuildStage::OptimizingWasm, }); } pub(crate) fn status_hotpatching(&self) { _ = self.tx.unbounded_send(BuilderUpdate::Progress { stage: BuildStage::Hotpatching, }); } pub(crate) fn status_installing_tooling(&self) { _ = self.tx.unbounded_send(BuilderUpdate::Progress { stage: BuildStage::InstallingTooling, }); } pub(crate) fn status_compressing_assets(&self) { _ = self.tx.unbounded_send(BuilderUpdate::Progress { stage: BuildStage::CompressingAssets, }); } pub(crate) fn status_extracting_assets(&self) { _ = self.tx.unbounded_send(BuilderUpdate::Progress { stage: BuildStage::ExtractingAssets, }); } } ================================================ FILE: packages/cli/src/build/ios_swift.rs ================================================ //! iOS/macOS Swift package manifest helpers and compilation. use crate::Result; use anyhow::Context; use manganis_core::SwiftPackageMetadata; use std::path::{Path, PathBuf}; use target_lexicon::{OperatingSystem, Triple}; use tokio::process::Command; /// Create a proper framework bundle from a dylib for iOS/macOS. /// /// iOS uses a flat structure while macOS uses a versioned structure. /// Both require an Info.plist for proper App Store submission. pub async fn create_framework_bundle( dylib_path: &Path, framework_name: &str, output_dir: &Path, target_triple: &Triple, bundle_identifier: &str, ) -> Result { let is_ios = matches!(target_triple.operating_system, OperatingSystem::IOS(_)); let min_os_version = if is_ios { "13.0" } else { "11.0" }; let framework_dir = output_dir.join(format!("{}.framework", framework_name)); // Remove existing framework if present if framework_dir.exists() { std::fs::remove_dir_all(&framework_dir)?; } if is_ios { // iOS uses flat structure: Framework.framework/FrameworkName + Info.plist std::fs::create_dir_all(&framework_dir)?; // Copy dylib as the framework executable (no extension) let exec_path = framework_dir.join(framework_name); std::fs::copy(dylib_path, &exec_path)?; // Set the install name using install_name_tool let output = Command::new("xcrun") .arg("install_name_tool") .arg("-id") .arg(format!( "@rpath/{}.framework/{}", framework_name, framework_name )) .arg(&exec_path) .output() .await?; if !output.status.success() { let stderr = String::from_utf8_lossy(&output.stderr); anyhow::bail!("install_name_tool failed: {}", stderr); } // Create Info.plist let info_plist = format!( r#" CFBundleDevelopmentRegion en CFBundleExecutable {framework_name} CFBundleIdentifier {bundle_identifier} CFBundleInfoDictionaryVersion 6.0 CFBundleName {framework_name} CFBundlePackageType FMWK CFBundleShortVersionString 1.0 CFBundleVersion 1 MinimumOSVersion {min_os_version} CFBundleSupportedPlatforms iPhoneOS "# ); std::fs::write(framework_dir.join("Info.plist"), info_plist)?; } else { // macOS uses versioned structure with symlinks let versions_a = framework_dir.join("Versions").join("A"); let resources_dir = versions_a.join("Resources"); std::fs::create_dir_all(&resources_dir)?; // Copy dylib as the framework executable let exec_path = versions_a.join(framework_name); std::fs::copy(dylib_path, &exec_path)?; // Set install name let output = Command::new("xcrun") .arg("install_name_tool") .arg("-id") .arg(format!( "@rpath/{}.framework/Versions/A/{}", framework_name, framework_name )) .arg(&exec_path) .output() .await?; if !output.status.success() { let stderr = String::from_utf8_lossy(&output.stderr); anyhow::bail!("install_name_tool failed: {}", stderr); } // Create Info.plist in Resources let info_plist = format!( r#" CFBundleDevelopmentRegion en CFBundleExecutable {framework_name} CFBundleIdentifier {bundle_identifier} CFBundleInfoDictionaryVersion 6.0 CFBundleName {framework_name} CFBundlePackageType FMWK CFBundleShortVersionString 1.0 CFBundleVersion 1 LSMinimumSystemVersion {min_os_version} "# ); std::fs::write(resources_dir.join("Info.plist"), info_plist)?; // Create symbolic links (required for macOS framework structure) let versions_dir = framework_dir.join("Versions"); #[cfg(unix)] { std::os::unix::fs::symlink("A", versions_dir.join("Current"))?; std::os::unix::fs::symlink( format!("Versions/Current/{}", framework_name), framework_dir.join(framework_name), )?; std::os::unix::fs::symlink( "Versions/Current/Resources", framework_dir.join("Resources"), )?; } } tracing::debug!( "Created {} framework bundle: {}", if is_ios { "iOS" } else { "macOS" }, framework_dir.display() ); Ok(framework_dir) } /// Compile Swift sources and return the path to the dynamic framework bundle. /// /// This function: /// 1. Generates an umbrella Package.swift that includes all Swift plugins /// 2. Runs `swift build` to compile into a dynamic library /// 3. Wraps the dylib in a proper .framework bundle for iOS/macOS /// 4. Returns the path to the resulting `.framework` bundle pub async fn compile_swift_sources( swift_sources: &[SwiftPackageMetadata], target_triple: &Triple, build_dir: &Path, release: bool, ) -> Result> { if swift_sources.is_empty() { return Ok(None); } tracing::debug!( "Compiling {} Swift plugin(s) for {}", swift_sources.len(), target_triple ); // Create the plugins build directory let plugins_dir = build_dir.join("swift-plugins"); std::fs::create_dir_all(&plugins_dir)?; // Copy and prepare all Swift source packages let mut plugin_paths = Vec::new(); for source in swift_sources { let source_path = PathBuf::from(source.package_path.as_str()); let plugin_name = source.plugin_name.as_str(); let product_name = source.product.as_str(); if !source_path.exists() { tracing::warn!( "Swift package path does not exist: {} (for plugin {})", source_path.display(), plugin_name ); continue; } let dest_path = plugins_dir.join(plugin_name); if dest_path.exists() { std::fs::remove_dir_all(&dest_path)?; } copy_dir_recursive(&source_path, &dest_path)?; // Modify the Package.swift to produce a dynamic library if let Err(e) = modify_package_for_dynamic_library(&dest_path, product_name) { tracing::warn!("Failed to modify Package.swift for dynamic library: {}", e); } plugin_paths.push((plugin_name.to_string(), product_name.to_string(), dest_path)); tracing::debug!( "Copied Swift plugin '{}' from {} to {}", plugin_name, source_path.display(), plugins_dir.join(plugin_name).display() ); } if plugin_paths.is_empty() { tracing::warn!("No valid Swift packages found to compile"); return Ok(None); } // Determine Swift target triple and SDK let (swift_triple, sdk_name) = swift_target_and_sdk(target_triple)?; let sdk_path = lookup_sdk_path(&sdk_name).await?; // Build configuration let configuration = if release { "release" } else { "debug" }; // Build each plugin package individually for (plugin_name, product_name, package_path) in &plugin_paths { tracing::debug!( "Building Swift plugin '{}' (product: {})", plugin_name, product_name ); let build_path = package_path.join(".build"); let mut cmd = Command::new("xcrun"); cmd.args(["swift", "build"]) .arg("--package-path") .arg(package_path) .arg("--configuration") .arg(configuration) .arg("--triple") .arg(&swift_triple) .arg("--sdk") .arg(&sdk_path) .arg("--product") .arg(product_name) .arg("--build-path") .arg(&build_path); tracing::debug!("Running: xcrun swift build for {}", product_name); let output = cmd.output().await?; if !output.status.success() { let stderr = String::from_utf8_lossy(&output.stderr); let stdout = String::from_utf8_lossy(&output.stdout); anyhow::bail!( "Swift build failed for plugin '{}':\n{}\n{}", plugin_name, stdout, stderr ); } if !output.stderr.is_empty() { let stderr = String::from_utf8_lossy(&output.stderr); tracing::debug!("Swift build warnings for {}:\n{}", plugin_name, stderr); } } // Find the output dynamic library for each plugin // Swift puts the output in .build///lib.dylib // or .build//lib.dylib depending on the version let mut all_dylibs = Vec::new(); for (_, product_name, package_path) in &plugin_paths { let build_path = package_path.join(".build"); let lib_name = format!("lib{}.dylib", product_name); let lib_search_paths = [ build_path.join(&swift_triple).join(configuration), build_path.join(configuration), build_path.clone(), ]; let mut found = false; for search_path in &lib_search_paths { let lib_path = search_path.join(&lib_name); if lib_path.exists() { tracing::debug!("Found Swift dynamic library: {}", lib_path.display()); all_dylibs.push((product_name.clone(), lib_path)); found = true; break; } } if !found { tracing::warn!( "Could not find compiled Swift dynamic library for product '{}' (expected {})", product_name, lib_name ); } } if all_dylibs.is_empty() { tracing::warn!("No Swift dynamic libraries were compiled successfully"); return Ok(None); } // For dynamic libraries, we need to wrap each in a framework bundle // If there's only one library, create a single framework // If there are multiple, we'll create frameworks for each (they're independent) // The first one is the "primary" framework that gets returned let (_primary_name, primary_dylib) = all_dylibs.remove(0); // Create the framework bundle from the dylib // Use "DioxusSwiftPlugins" as the umbrella framework name let framework_name = "DioxusSwiftPlugins"; let bundle_identifier = "com.dioxus.swift.plugins"; let framework_path = create_framework_bundle( &primary_dylib, framework_name, build_dir, target_triple, bundle_identifier, ) .await?; // If there are additional dylibs, create separate framework bundles for them for (name, dylib_path) in all_dylibs { let extra_framework = create_framework_bundle( &dylib_path, &name, build_dir, target_triple, &format!("com.dioxus.swift.{}", name.to_lowercase()), ) .await?; tracing::debug!( "Created additional framework: {}", extra_framework.display() ); } Ok(Some(framework_path)) } /// Modify a Package.swift to produce a dynamic library instead of static. /// This is needed for runtime class lookup via NSClassFromString. fn modify_package_for_dynamic_library(package_path: &Path, product_name: &str) -> Result<()> { let package_swift_path = package_path.join("Package.swift"); if !package_swift_path.exists() { anyhow::bail!( "Package.swift not found at {}", package_swift_path.display() ); } let content = std::fs::read_to_string(&package_swift_path)?; // Replace .static with .dynamic for the library type let modified = content .replace("type: .static", "type: .dynamic") .replace("type:.static", "type: .dynamic"); // If no library type was specified, we need to add it // Look for .library(name: "ProductName", targets: [...]) and change to // .library(name: "ProductName", type: .dynamic, targets: [...]) let pattern = format!( r#".library\s*\(\s*name\s*:\s*"{}"\s*,\s*targets"#, regex::escape(product_name) ); let replacement = format!( r#".library(name: "{}", type: .dynamic, targets"#, product_name ); let modified = if let Ok(re) = regex::Regex::new(&pattern) { re.replace_all(&modified, replacement.as_str()).to_string() } else { modified }; std::fs::write(&package_swift_path, modified)?; Ok(()) } /// Convert a Rust target triple to Swift target triple and SDK name fn swift_target_and_sdk(triple: &Triple) -> Result<(String, String)> { use target_lexicon::{Architecture, Environment, OperatingSystem}; // Check if this is a simulator target using the environment field let is_simulator = triple.environment == Environment::Sim; let swift_triple = match (&triple.architecture, &triple.operating_system) { (Architecture::Aarch64(_), OperatingSystem::IOS(_)) => { if is_simulator { "arm64-apple-ios-simulator" } else { "arm64-apple-ios" } } (Architecture::Aarch64(_), OperatingSystem::MacOSX { .. } | OperatingSystem::Darwin(_)) => { "arm64-apple-macosx" } (Architecture::X86_64, OperatingSystem::IOS(_)) => "x86_64-apple-ios-simulator", (Architecture::X86_64, OperatingSystem::MacOSX { .. } | OperatingSystem::Darwin(_)) => { "x86_64-apple-macosx" } _ => anyhow::bail!("Unsupported target for Swift compilation: {}", triple), }; let sdk_name = match &triple.operating_system { OperatingSystem::IOS(_) => { // Check if this is a simulator target using the environment field if is_simulator { "iphonesimulator" } else { "iphoneos" } } OperatingSystem::MacOSX { .. } | OperatingSystem::Darwin(_) => "macosx", _ => anyhow::bail!( "Unsupported operating system for Swift compilation: {:?}", triple.operating_system ), }; Ok((swift_triple.to_string(), sdk_name.to_string())) } /// Look up the SDK path using xcrun async fn lookup_sdk_path(sdk_name: &str) -> Result { let output = Command::new("xcrun") .args(["--sdk", sdk_name, "--show-sdk-path"]) .output() .await .context("Failed to run xcrun to find SDK path")?; if !output.status.success() { let stderr = String::from_utf8_lossy(&output.stderr); anyhow::bail!("Failed to find SDK '{}': {}", sdk_name, stderr); } let sdk_path = String::from_utf8(output.stdout) .context("Invalid UTF-8 in SDK path")? .trim() .to_string(); if sdk_path.is_empty() { anyhow::bail!("SDK path for '{}' is empty", sdk_name); } Ok(sdk_path) } /// Recursively copy a directory fn copy_dir_recursive(src: &Path, dst: &Path) -> Result<()> { std::fs::create_dir_all(dst)?; for entry in std::fs::read_dir(src)? { let entry = entry?; let ty = entry.file_type()?; let src_path = entry.path(); let dst_path = dst.join(entry.file_name()); if ty.is_dir() { // Skip .build directories if entry.file_name() == ".build" { continue; } copy_dir_recursive(&src_path, &dst_path)?; } else { std::fs::copy(&src_path, &dst_path)?; } } Ok(()) } /// Extract Swift metadata from object files in link arguments pub fn extract_swift_metadata_from_link_args( link_args: &[String], workspace_dir: &Path, ) -> Vec { let mut swift_packages = Vec::new(); // Look through rlibs and object files for Swift metadata for arg in link_args { let path = PathBuf::from(arg); // Only process files in our workspace if !path.starts_with(workspace_dir) { continue; } // Check for .rlib files if arg.ends_with(".rlib") { if let Ok(swift_meta) = extract_swift_from_rlib(&path) { swift_packages.extend(swift_meta); } } // Check for .o files else if arg.ends_with(".o") || arg.ends_with(".obj") { if let Ok(swift_meta) = extract_swift_from_object(&path) { swift_packages.extend(swift_meta); } } } // Deduplicate by plugin name swift_packages.sort_by(|a, b| a.plugin_name.as_str().cmp(b.plugin_name.as_str())); swift_packages.dedup_by(|a, b| a.plugin_name.as_str() == b.plugin_name.as_str()); swift_packages } /// Extract Swift metadata from an rlib file fn extract_swift_from_rlib(rlib_path: &Path) -> Result> { let mut results = Vec::new(); let rlib_contents = std::fs::read(rlib_path)?; let mut reader = ar::Archive::new(std::io::Cursor::new(rlib_contents)); while let Some(Ok(entry)) = reader.next_entry() { let name = std::str::from_utf8(entry.header().identifier()).unwrap_or_default(); // Only process .o files if !name.ends_with(".rcgu.o") && !name.ends_with(".obj") { continue; } // Read the object file contents let mut obj_contents = Vec::new(); std::io::Read::read_to_end(&mut std::io::BufReader::new(entry), &mut obj_contents)?; if let Ok(swift_meta) = extract_swift_from_bytes(&obj_contents) { results.extend(swift_meta); } } Ok(results) } /// Extract Swift metadata from an object file fn extract_swift_from_object(obj_path: &Path) -> Result> { let obj_contents = std::fs::read(obj_path)?; extract_swift_from_bytes(&obj_contents) } /// Extract Swift metadata from raw object file bytes fn extract_swift_from_bytes(bytes: &[u8]) -> Result> { use manganis_core::SymbolData; use object::{Object, ObjectSection, ObjectSymbol}; let mut results = Vec::new(); let file = match object::File::parse(bytes) { Ok(f) => f, Err(_) => return Ok(results), }; // Look for __ASSETS__ symbols for symbol in file.symbols() { let name = match symbol.name() { Ok(n) => n, Err(_) => continue, }; if !name.starts_with("__ASSETS__") { continue; } // Try to get the symbol's data if let Some(section_idx) = symbol.section().index() { if let Ok(section) = file.section_by_index(section_idx) { if let Ok(data) = section.data() { // Try to find the symbol data in the section let addr = symbol.address(); let section_addr = section.address(); let offset = (addr - section_addr) as usize; if offset < data.len() { let symbol_data = &data[offset..]; // Try to deserialize as SymbolData if let Some((_, SymbolData::SwiftPackage(meta))) = const_serialize::deserialize_const!(SymbolData, symbol_data) { results.push(meta); } } } } } } Ok(results) } /// Recursively collect all Swift source files from a directory fn collect_swift_files(dir: &Path) -> Result> { let mut swift_files = Vec::new(); if !dir.exists() { return Ok(swift_files); } for entry in std::fs::read_dir(dir)? { let entry = entry?; let path = entry.path(); if path.is_dir() { // Recursively collect from subdirectories swift_files.extend(collect_swift_files(&path)?); } else if path.extension().is_some_and(|ext| ext == "swift") { swift_files.push(path); } } Ok(swift_files) } /// Information about an Apple Widget Extension to compile pub struct AppleWidgetSource { /// Path to the Swift package source directory pub source_path: PathBuf, /// Display name for the widget (shown in system UI) pub display_name: String, /// Bundle ID suffix (appended to app bundle ID) pub bundle_id_suffix: String, /// Minimum deployment target (e.g., "16.0") pub deployment_target: String, /// Swift module name for the widget. /// This MUST match the module name used by the main app's Swift plugin /// for ActivityKit type matching to work (e.g., both must define /// `ModuleName.LocationPermissionAttributes` as the same type). pub module_name: String, } /// Compile an Apple Widget Extension from a Swift package source. /// /// Widget Extensions are compiled as executables (not libraries) and bundled /// as .appex bundles which are installed in the app's PlugIns folder. /// /// **Important**: Widget extensions are XPC services that require special initialization. /// We use `-e _NSExtensionMain` as the entry point instead of the default `_main` that /// Swift generates with `@main`. The `_NSExtensionMain` entry point (provided by Foundation): /// 1. Sets up the XPC listener /// 2. Initializes ExtensionFoundation's `_EXRunningExtension` singleton /// 3. Registers with PlugInKit /// 4. Then calls your Widget code /// /// # Arguments /// * `widget` - Widget extension source configuration /// * `target_triple` - The target platform (e.g., aarch64-apple-ios) /// * `build_dir` - Directory for intermediate build files /// * `app_bundle_id` - The main app's bundle identifier (widget ID is derived from this) /// * `release` - Whether to build in release mode /// /// # Returns /// Path to the compiled .appex bundle, ready to be installed to PlugIns/ pub async fn compile_apple_widget( widget: &AppleWidgetSource, target_triple: &Triple, build_dir: &Path, app_bundle_id: &str, release: bool, ) -> Result { use target_lexicon::OperatingSystem; // Validate we're on an Apple platform let is_ios = matches!(target_triple.operating_system, OperatingSystem::IOS(_)); let is_macos = matches!( target_triple.operating_system, OperatingSystem::MacOSX { .. } | OperatingSystem::Darwin(_) ); if !is_ios && !is_macos { anyhow::bail!( "Apple Widget Extensions are only supported on iOS and macOS, not {:?}", target_triple.operating_system ); } // Validate source path exists if !widget.source_path.exists() { anyhow::bail!( "Widget Extension source path does not exist: {}", widget.source_path.display() ); } tracing::debug!( "Compiling Apple Widget Extension '{}' for {}", widget.display_name, target_triple ); // Create the widget build directory let widget_build_dir = build_dir.join("widget-extensions"); std::fs::create_dir_all(&widget_build_dir)?; // Copy the Swift package to build directory // Use the bundle_id_suffix as a unique name since the folder name might just be "widget" let widget_name = widget.bundle_id_suffix.replace("-", "_"); let source_dir = widget_build_dir.join(format!("{}_src", widget_name)); if source_dir.exists() { std::fs::remove_dir_all(&source_dir)?; } copy_dir_recursive(&widget.source_path, &source_dir)?; // Get Swift target triple and SDK let (swift_triple, sdk_name) = swift_target_and_sdk(target_triple)?; // Collect all Swift source files from the Sources directory let swift_sources_dir = source_dir.join("Sources"); let swift_files = collect_swift_files(&swift_sources_dir)?; if swift_files.is_empty() { anyhow::bail!( "No Swift source files found in widget extension Sources directory: {}", swift_sources_dir.display() ); } tracing::debug!( "Found {} Swift files for widget: {:?}", swift_files.len(), swift_files ); // Build output path let exec_path = widget_build_dir.join(&widget_name); // Compile the widget extension using swiftc directly // Widget extensions are XPC services that require _NSExtensionMain as the entry point let mut cmd = Command::new("xcrun"); cmd.arg("--sdk").arg(&sdk_name).arg("swiftc"); // Add all Swift source files for swift_file in &swift_files { cmd.arg(swift_file); } // Output executable cmd.arg("-o").arg(&exec_path); // Target triple with proper iOS version // Format: arm64-apple-ios17.0 or arm64-apple-ios17.0-simulator let is_simulator = swift_triple.contains("simulator"); let base_triple = swift_triple.replace("-simulator", ""); let swift_target = if is_simulator { format!("{}{}-simulator", base_triple, widget.deployment_target) } else { format!("{}{}", base_triple, widget.deployment_target) }; cmd.arg("-target").arg(&swift_target); // Module name - use a consistent name that matches the main app's plugin module // This is critical for ActivityKit type matching between app and widget cmd.arg("-module-name").arg(&widget.module_name); // Optimization flags if release { cmd.arg("-O").arg("-whole-module-optimization"); } // Extension-specific flags cmd.arg("-application-extension"); // Critical: Use _NSExtensionMain as the entry point for widget extensions // Without this, the widget crashes because ExtensionFoundation's singleton isn't initialized cmd.arg("-Xlinker") .arg("-e") .arg("-Xlinker") .arg("_NSExtensionMain"); // Link Objective-C runtime (required for Swift/ObjC interop) cmd.arg("-lobjc"); // Link required frameworks cmd.arg("-framework").arg("Foundation"); cmd.arg("-framework").arg("SwiftUI"); cmd.arg("-framework").arg("WidgetKit"); cmd.arg("-framework").arg("ActivityKit"); tracing::debug!("Running swiftc for widget: {:?}", cmd); let output = cmd.output().await?; if !output.status.success() { let stderr = String::from_utf8_lossy(&output.stderr); let stdout = String::from_utf8_lossy(&output.stdout); anyhow::bail!( "Swift compilation failed for widget extension '{}':\n{}\n{}", widget_name, stdout, stderr ); } tracing::debug!("Compiled widget executable: {}", exec_path.display()); // Create the .appex bundle let appex_name = format!("{}.appex", widget_name); let appex_dir = widget_build_dir.join(&appex_name); // Remove existing appex if present if appex_dir.exists() { std::fs::remove_dir_all(&appex_dir)?; } std::fs::create_dir_all(&appex_dir)?; // Copy the executable into the appex bundle let bundle_exec = appex_dir.join(&widget_name); std::fs::copy(&exec_path, &bundle_exec)?; // Create Info.plist for the widget extension let widget_bundle_id = format!("{}.{}", app_bundle_id, widget.bundle_id_suffix); let min_os_version = &widget.deployment_target; let platform_info = if is_ios { format!( r#" MinimumOSVersion {min_os_version} CFBundleSupportedPlatforms iPhoneOS UIDeviceFamily 1 2 "# ) } else { format!( r#" LSMinimumSystemVersion {min_os_version} CFBundleSupportedPlatforms MacOSX "# ) }; let info_plist = format!( r#" CFBundleDevelopmentRegion en CFBundleDisplayName {display_name} CFBundleExecutable {widget_name} CFBundleIdentifier {widget_bundle_id} CFBundleInfoDictionaryVersion 6.0 CFBundleName {widget_name} CFBundlePackageType XPC! CFBundleShortVersionString 1.0 CFBundleVersion 1 {platform_info} NSExtension NSExtensionPointIdentifier com.apple.widgetkit-extension NSSupportsLiveActivities "#, display_name = widget.display_name, widget_name = widget_name, widget_bundle_id = widget_bundle_id, platform_info = platform_info, ); std::fs::write(appex_dir.join("Info.plist"), info_plist)?; tracing::debug!("Created Widget Extension bundle: {}", appex_dir.display()); Ok(appex_dir) } ================================================ FILE: packages/cli/src/build/manifest.rs ================================================ //! The build manifest for `dx` applications, containing metadata about the build including //! the CLI version, Rust version, and all bundled assets. //! //! We eventually plan to use this manifest to support tighter integration with deployment platforms //! and CDNs. //! //! This manifest contains the list of assets, rust version, and cli version used to build the app. //! Eventually, we might want to expand this to include more metadata about the build, including //! build time, target platform, etc. use dioxus_cli_opt::AssetManifest; use serde::{Deserialize, Serialize}; #[derive(Default, Serialize, Deserialize)] pub struct AppManifest { /// Stable since 0.7.0 pub cli_version: String, /// Stable since 0.7.0 pub rust_version: String, /// Stable since 0.7.0 pub assets: AssetManifest, } ================================================ FILE: packages/cli/src/build/manifest_mapper.rs ================================================ //! Maps unified Dioxus.toml config to platform-specific manifest data. //! //! This module converts cross-platform declarations (permissions, deep links, //! background modes) into platform-specific identifiers: //! - Android: `` entries, intent filters, foreground service types //! - iOS/macOS: Info.plist keys, URL schemes, UIBackgroundModes use crate::config::{ AndroidConfig, BackgroundConfig, DeepLinkConfig, IosConfig, LocationPrecision, MacosConfig, PermissionsConfig, StorageAccess, }; /// Android permission entry for AndroidManifest.xml #[derive(Debug, Clone)] pub struct AndroidPermissionEntry { /// Full Android permission string (e.g., "android.permission.CAMERA") pub permission: String, /// User-facing description (used for documentation) pub description: String, } /// iOS/macOS plist entry for Info.plist #[derive(Debug, Clone)] pub struct PlistEntry { /// Plist key (e.g., "NSCameraUsageDescription") pub key: String, /// User-facing description shown in permission dialogs pub value: String, } /// Maps unified permissions, deep links, and background modes to platform-specific identifiers #[derive(Debug, Default)] pub struct ManifestMapper { pub android_permissions: Vec, pub android_features: Vec, pub ios_plist_entries: Vec, pub macos_plist_entries: Vec, /// URL schemes for iOS CFBundleURLTypes (merged from deep_links.schemes + ios.url_schemes) pub ios_url_schemes: Vec, /// URL schemes for macOS CFBundleURLTypes (merged from deep_links.schemes + macos.url_schemes) pub macos_url_schemes: Vec, /// URL schemes for Android intent-filter (merged from deep_links.schemes + android.url_schemes) pub android_url_schemes: Vec, /// Associated domains for iOS (from deep_links.hosts → "applinks:host") pub ios_associated_domains: Vec, /// Android intent filters from config (android.intent_filters) pub android_intent_filters: Vec, /// App link hosts for Android auto-verify (from deep_links.hosts) pub android_app_link_hosts: Vec, /// iOS UIBackgroundModes (merged from BackgroundConfig + ios.background_modes) pub ios_background_modes: Vec, /// Android foreground service types (from BackgroundConfig + android.foreground_service_types) pub android_foreground_service_types: Vec, } impl ManifestMapper { /// Create a new permission mapper from the unified config pub fn from_config( permissions: &PermissionsConfig, deep_links: &DeepLinkConfig, background: &BackgroundConfig, android: &AndroidConfig, ios: &IosConfig, macos: &MacosConfig, ) -> Self { let mut mapper = Self::default(); // Map unified permissions mapper.map_location(permissions); mapper.map_camera(permissions); mapper.map_microphone(permissions); mapper.map_notifications(permissions); mapper.map_photos(permissions); mapper.map_bluetooth(permissions); mapper.map_background_location(permissions); mapper.map_contacts(permissions); mapper.map_calendar(permissions); mapper.map_biometrics(permissions); mapper.map_nfc(permissions); mapper.map_motion(permissions); mapper.map_health(permissions); mapper.map_speech(permissions); mapper.map_media_library(permissions); mapper.map_siri(permissions); mapper.map_homekit(permissions); mapper.map_local_network(permissions); mapper.map_nearby_wifi(permissions); // Add raw Android permissions for (perm, config) in &android.permissions { mapper.android_permissions.push(AndroidPermissionEntry { permission: perm.clone(), description: config.description.clone(), }); } // Add Android features mapper.android_features.extend(android.features.clone()); // Map deep links mapper.map_deep_links(deep_links, android, ios, macos); // Map background modes mapper.map_background_modes(background, android, ios); // Log mapped permissions for debugging for perm in &mapper.android_permissions { tracing::debug!( "Android permission: {} - {}", perm.permission, perm.description ); } for entry in &mapper.ios_plist_entries { tracing::debug!("iOS plist: {} = {}", entry.key, entry.value); } mapper } fn map_location(&mut self, permissions: &PermissionsConfig) { if let Some(loc) = &permissions.location { let android_perm = match loc.precision { LocationPrecision::Fine => "android.permission.ACCESS_FINE_LOCATION", LocationPrecision::Coarse => "android.permission.ACCESS_COARSE_LOCATION", }; self.android_permissions.push(AndroidPermissionEntry { permission: android_perm.to_string(), description: loc.description.clone(), }); // For fine location, also add coarse as it's often needed if loc.precision == LocationPrecision::Fine { self.android_permissions.push(AndroidPermissionEntry { permission: "android.permission.ACCESS_COARSE_LOCATION".to_string(), description: loc.description.clone(), }); } self.ios_plist_entries.push(PlistEntry { key: "NSLocationWhenInUseUsageDescription".to_string(), value: loc.description.clone(), }); self.macos_plist_entries.push(PlistEntry { key: "NSLocationUsageDescription".to_string(), value: loc.description.clone(), }); } } fn map_camera(&mut self, permissions: &PermissionsConfig) { if let Some(cam) = &permissions.camera { self.android_permissions.push(AndroidPermissionEntry { permission: "android.permission.CAMERA".to_string(), description: cam.description.clone(), }); self.ios_plist_entries.push(PlistEntry { key: "NSCameraUsageDescription".to_string(), value: cam.description.clone(), }); self.macos_plist_entries.push(PlistEntry { key: "NSCameraUsageDescription".to_string(), value: cam.description.clone(), }); } } fn map_microphone(&mut self, permissions: &PermissionsConfig) { if let Some(mic) = &permissions.microphone { self.android_permissions.push(AndroidPermissionEntry { permission: "android.permission.RECORD_AUDIO".to_string(), description: mic.description.clone(), }); self.ios_plist_entries.push(PlistEntry { key: "NSMicrophoneUsageDescription".to_string(), value: mic.description.clone(), }); self.macos_plist_entries.push(PlistEntry { key: "NSMicrophoneUsageDescription".to_string(), value: mic.description.clone(), }); } } fn map_notifications(&mut self, permissions: &PermissionsConfig) { if let Some(notif) = &permissions.notifications { self.android_permissions.push(AndroidPermissionEntry { permission: "android.permission.POST_NOTIFICATIONS".to_string(), description: notif.description.clone(), }); // iOS notifications are handled at runtime, no plist entry needed } } fn map_photos(&mut self, permissions: &PermissionsConfig) { if let Some(photos) = &permissions.photos { match photos.access { StorageAccess::Read => { self.android_permissions.push(AndroidPermissionEntry { permission: "android.permission.READ_MEDIA_IMAGES".to_string(), description: photos.description.clone(), }); self.ios_plist_entries.push(PlistEntry { key: "NSPhotoLibraryUsageDescription".to_string(), value: photos.description.clone(), }); } StorageAccess::Write => { self.android_permissions.push(AndroidPermissionEntry { permission: "android.permission.WRITE_EXTERNAL_STORAGE".to_string(), description: photos.description.clone(), }); self.ios_plist_entries.push(PlistEntry { key: "NSPhotoLibraryAddUsageDescription".to_string(), value: photos.description.clone(), }); } StorageAccess::ReadWrite => { self.android_permissions.push(AndroidPermissionEntry { permission: "android.permission.READ_MEDIA_IMAGES".to_string(), description: photos.description.clone(), }); self.android_permissions.push(AndroidPermissionEntry { permission: "android.permission.WRITE_EXTERNAL_STORAGE".to_string(), description: photos.description.clone(), }); self.ios_plist_entries.push(PlistEntry { key: "NSPhotoLibraryUsageDescription".to_string(), value: photos.description.clone(), }); self.ios_plist_entries.push(PlistEntry { key: "NSPhotoLibraryAddUsageDescription".to_string(), value: photos.description.clone(), }); } } self.macos_plist_entries.push(PlistEntry { key: "NSPhotoLibraryUsageDescription".to_string(), value: photos.description.clone(), }); } } fn map_bluetooth(&mut self, permissions: &PermissionsConfig) { if let Some(bt) = &permissions.bluetooth { self.android_permissions.push(AndroidPermissionEntry { permission: "android.permission.BLUETOOTH_CONNECT".to_string(), description: bt.description.clone(), }); self.android_permissions.push(AndroidPermissionEntry { permission: "android.permission.BLUETOOTH_SCAN".to_string(), description: bt.description.clone(), }); self.ios_plist_entries.push(PlistEntry { key: "NSBluetoothAlwaysUsageDescription".to_string(), value: bt.description.clone(), }); self.macos_plist_entries.push(PlistEntry { key: "NSBluetoothAlwaysUsageDescription".to_string(), value: bt.description.clone(), }); } } fn map_background_location(&mut self, permissions: &PermissionsConfig) { if let Some(bg_loc) = &permissions.background_location { self.android_permissions.push(AndroidPermissionEntry { permission: "android.permission.ACCESS_BACKGROUND_LOCATION".to_string(), description: bg_loc.description.clone(), }); self.ios_plist_entries.push(PlistEntry { key: "NSLocationAlwaysAndWhenInUseUsageDescription".to_string(), value: bg_loc.description.clone(), }); } } fn map_contacts(&mut self, permissions: &PermissionsConfig) { if let Some(contacts) = &permissions.contacts { match contacts.access { StorageAccess::Read => { self.android_permissions.push(AndroidPermissionEntry { permission: "android.permission.READ_CONTACTS".to_string(), description: contacts.description.clone(), }); } StorageAccess::Write => { self.android_permissions.push(AndroidPermissionEntry { permission: "android.permission.WRITE_CONTACTS".to_string(), description: contacts.description.clone(), }); } StorageAccess::ReadWrite => { self.android_permissions.push(AndroidPermissionEntry { permission: "android.permission.READ_CONTACTS".to_string(), description: contacts.description.clone(), }); self.android_permissions.push(AndroidPermissionEntry { permission: "android.permission.WRITE_CONTACTS".to_string(), description: contacts.description.clone(), }); } } self.ios_plist_entries.push(PlistEntry { key: "NSContactsUsageDescription".to_string(), value: contacts.description.clone(), }); self.macos_plist_entries.push(PlistEntry { key: "NSContactsUsageDescription".to_string(), value: contacts.description.clone(), }); } } fn map_calendar(&mut self, permissions: &PermissionsConfig) { if let Some(cal) = &permissions.calendar { match cal.access { StorageAccess::Read => { self.android_permissions.push(AndroidPermissionEntry { permission: "android.permission.READ_CALENDAR".to_string(), description: cal.description.clone(), }); } StorageAccess::Write => { self.android_permissions.push(AndroidPermissionEntry { permission: "android.permission.WRITE_CALENDAR".to_string(), description: cal.description.clone(), }); } StorageAccess::ReadWrite => { self.android_permissions.push(AndroidPermissionEntry { permission: "android.permission.READ_CALENDAR".to_string(), description: cal.description.clone(), }); self.android_permissions.push(AndroidPermissionEntry { permission: "android.permission.WRITE_CALENDAR".to_string(), description: cal.description.clone(), }); } } self.ios_plist_entries.push(PlistEntry { key: "NSCalendarsUsageDescription".to_string(), value: cal.description.clone(), }); self.macos_plist_entries.push(PlistEntry { key: "NSCalendarsUsageDescription".to_string(), value: cal.description.clone(), }); } } fn map_biometrics(&mut self, permissions: &PermissionsConfig) { if let Some(bio) = &permissions.biometrics { self.android_permissions.push(AndroidPermissionEntry { permission: "android.permission.USE_BIOMETRIC".to_string(), description: bio.description.clone(), }); self.ios_plist_entries.push(PlistEntry { key: "NSFaceIDUsageDescription".to_string(), value: bio.description.clone(), }); } } fn map_nfc(&mut self, permissions: &PermissionsConfig) { if let Some(nfc) = &permissions.nfc { self.android_permissions.push(AndroidPermissionEntry { permission: "android.permission.NFC".to_string(), description: nfc.description.clone(), }); self.ios_plist_entries.push(PlistEntry { key: "NFCReaderUsageDescription".to_string(), value: nfc.description.clone(), }); } } fn map_motion(&mut self, permissions: &PermissionsConfig) { if let Some(motion) = &permissions.motion { self.android_permissions.push(AndroidPermissionEntry { permission: "android.permission.ACTIVITY_RECOGNITION".to_string(), description: motion.description.clone(), }); self.ios_plist_entries.push(PlistEntry { key: "NSMotionUsageDescription".to_string(), value: motion.description.clone(), }); } } fn map_health(&mut self, permissions: &PermissionsConfig) { if let Some(health) = &permissions.health { self.android_permissions.push(AndroidPermissionEntry { permission: "android.permission.BODY_SENSORS".to_string(), description: health.description.clone(), }); match health.access { StorageAccess::Read => { self.ios_plist_entries.push(PlistEntry { key: "NSHealthShareUsageDescription".to_string(), value: health.description.clone(), }); } StorageAccess::Write => { self.ios_plist_entries.push(PlistEntry { key: "NSHealthUpdateUsageDescription".to_string(), value: health.description.clone(), }); } StorageAccess::ReadWrite => { self.ios_plist_entries.push(PlistEntry { key: "NSHealthShareUsageDescription".to_string(), value: health.description.clone(), }); self.ios_plist_entries.push(PlistEntry { key: "NSHealthUpdateUsageDescription".to_string(), value: health.description.clone(), }); } } } } fn map_speech(&mut self, permissions: &PermissionsConfig) { if let Some(speech) = &permissions.speech { // Speech recognition uses microphone on Android self.android_permissions.push(AndroidPermissionEntry { permission: "android.permission.RECORD_AUDIO".to_string(), description: speech.description.clone(), }); self.ios_plist_entries.push(PlistEntry { key: "NSSpeechRecognitionUsageDescription".to_string(), value: speech.description.clone(), }); } } fn map_media_library(&mut self, permissions: &PermissionsConfig) { if let Some(media) = &permissions.media_library { self.android_permissions.push(AndroidPermissionEntry { permission: "android.permission.READ_MEDIA_AUDIO".to_string(), description: media.description.clone(), }); self.ios_plist_entries.push(PlistEntry { key: "NSAppleMusicUsageDescription".to_string(), value: media.description.clone(), }); } } fn map_siri(&mut self, permissions: &PermissionsConfig) { if let Some(siri) = &permissions.siri { // Siri is iOS only self.ios_plist_entries.push(PlistEntry { key: "NSSiriUsageDescription".to_string(), value: siri.description.clone(), }); } } fn map_homekit(&mut self, permissions: &PermissionsConfig) { if let Some(homekit) = &permissions.homekit { // HomeKit is iOS only self.ios_plist_entries.push(PlistEntry { key: "NSHomeKitUsageDescription".to_string(), value: homekit.description.clone(), }); } } fn map_local_network(&mut self, permissions: &PermissionsConfig) { if let Some(network) = &permissions.local_network { // Local network is iOS only self.ios_plist_entries.push(PlistEntry { key: "NSLocalNetworkUsageDescription".to_string(), value: network.description.clone(), }); } } fn map_nearby_wifi(&mut self, permissions: &PermissionsConfig) { if let Some(wifi) = &permissions.nearby_wifi { // Nearby WiFi is Android only self.android_permissions.push(AndroidPermissionEntry { permission: "android.permission.NEARBY_WIFI_DEVICES".to_string(), description: wifi.description.clone(), }); } } /// Map deep link config to platform-specific URL schemes, associated domains, and intent filters fn map_deep_links( &mut self, deep_links: &DeepLinkConfig, android: &AndroidConfig, ios: &IosConfig, macos: &MacosConfig, ) { // Merge unified schemes with platform-specific overrides let mut ios_schemes: Vec = deep_links.schemes.clone(); ios_schemes.extend(ios.url_schemes.clone()); ios_schemes.dedup(); self.ios_url_schemes = ios_schemes; let mut macos_schemes: Vec = deep_links.schemes.clone(); macos_schemes.extend(macos.url_schemes.clone()); macos_schemes.dedup(); self.macos_url_schemes = macos_schemes; let mut android_schemes: Vec = deep_links.schemes.clone(); android_schemes.extend(android.url_schemes.clone()); android_schemes.dedup(); self.android_url_schemes = android_schemes; // Map universal link hosts to iOS associated domains for host in &deep_links.hosts { self.ios_associated_domains.push(format!("applinks:{host}")); } // Store app link hosts for Android auto-verify intent filters self.android_app_link_hosts = deep_links.hosts.clone(); // Add explicit Android intent filters from config self.android_intent_filters = android.intent_filters.clone(); } /// Map background mode config to platform-specific background capabilities fn map_background_modes( &mut self, background: &BackgroundConfig, android: &AndroidConfig, ios: &IosConfig, ) { // Build iOS UIBackgroundModes from unified config let mut ios_modes: Vec = Vec::new(); if background.location { ios_modes.push("location".to_string()); } if background.audio { ios_modes.push("audio".to_string()); } if background.fetch { ios_modes.push("fetch".to_string()); } if background.remote_notifications { ios_modes.push("remote-notification".to_string()); } if background.voip { ios_modes.push("voip".to_string()); } if background.bluetooth { ios_modes.push("bluetooth-central".to_string()); ios_modes.push("bluetooth-peripheral".to_string()); } if background.external_accessory { ios_modes.push("external-accessory".to_string()); } if background.processing { ios_modes.push("processing".to_string()); } // Merge platform-specific overrides for mode in &ios.background_modes { if !ios_modes.contains(mode) { ios_modes.push(mode.clone()); } } self.ios_background_modes = ios_modes; // Build Android foreground service types and permissions let mut android_types: Vec = Vec::new(); if background.location { self.android_permissions.push(AndroidPermissionEntry { permission: "android.permission.ACCESS_BACKGROUND_LOCATION".to_string(), description: "Background location updates".to_string(), }); } if background.audio { android_types.push("mediaPlayback".to_string()); } if background.voip { android_types.push("phoneCall".to_string()); } if background.bluetooth { android_types.push("connectedDevice".to_string()); } // Merge platform-specific overrides for stype in &android.foreground_service_types { if !android_types.contains(stype) { android_types.push(stype.clone()); } } // If we have any foreground service types, add the FOREGROUND_SERVICE permission if !android_types.is_empty() { self.android_permissions.push(AndroidPermissionEntry { permission: "android.permission.FOREGROUND_SERVICE".to_string(), description: "Run foreground services".to_string(), }); } self.android_foreground_service_types = android_types; } } #[cfg(test)] mod tests { use super::*; use crate::config::{LocationPermission, SimplePermission}; #[test] fn test_location_permission_mapping() { let permissions = PermissionsConfig { location: Some(LocationPermission { precision: LocationPrecision::Fine, description: "Track your runs".to_string(), }), ..Default::default() }; let mapper = ManifestMapper::from_config( &permissions, &DeepLinkConfig::default(), &BackgroundConfig::default(), &AndroidConfig::default(), &IosConfig::default(), &MacosConfig::default(), ); // Should have both fine and coarse for Android assert!(mapper .android_permissions .iter() .any(|p| p.permission == "android.permission.ACCESS_FINE_LOCATION")); assert!(mapper .android_permissions .iter() .any(|p| p.permission == "android.permission.ACCESS_COARSE_LOCATION")); // Should have iOS location plist entry assert!(mapper .ios_plist_entries .iter() .any(|e| e.key == "NSLocationWhenInUseUsageDescription")); } #[test] fn test_camera_permission_mapping() { let permissions = PermissionsConfig { camera: Some(SimplePermission { description: "Take photos".to_string(), }), ..Default::default() }; let mapper = ManifestMapper::from_config( &permissions, &DeepLinkConfig::default(), &BackgroundConfig::default(), &AndroidConfig::default(), &IosConfig::default(), &MacosConfig::default(), ); assert!(mapper .android_permissions .iter() .any(|p| p.permission == "android.permission.CAMERA")); assert!(mapper .ios_plist_entries .iter() .any(|e| e.key == "NSCameraUsageDescription")); } #[test] fn test_android_camera_permission_data() { let permissions = PermissionsConfig { camera: Some(SimplePermission { description: "Take photos".to_string(), }), ..Default::default() }; let mapper = ManifestMapper::from_config( &permissions, &DeepLinkConfig::default(), &BackgroundConfig::default(), &AndroidConfig::default(), &IosConfig::default(), &MacosConfig::default(), ); assert!(mapper .android_permissions .iter() .any(|p| p.permission == "android.permission.CAMERA")); } } ================================================ FILE: packages/cli/src/build/mod.rs ================================================ //! The core build module for `dx`, enabling building, bundling, and runtime hot-patching of Rust //! applications. This module defines the entire end-to-end build process, including bundling for //! all major platforms including Mac, Windows, Linux, iOS, Android, and WebAssembly. //! //! The bulk of the builder code is contained within the [`request`] module which establishes the //! arguments and flow of the build process. The [`context`] module contains the context for the build //! including status updates and build customization. The [`patch`] module contains the logic for //! hot-patching Rust code through binary analysis and a custom linker. The [`builder`] module contains //! the management of the ongoing build and methods to open the build as a running app. mod assets; mod builder; mod cache; mod context; mod ios_swift; mod manifest; mod manifest_mapper; mod patch; mod pre_render; mod request; mod tools; pub(crate) use assets::*; pub(crate) use builder::*; pub(crate) use cache::*; pub(crate) use context::*; pub(crate) use manifest::*; pub(crate) use patch::*; pub(crate) use pre_render::*; pub(crate) use request::*; pub(crate) use tools::*; ================================================ FILE: packages/cli/src/build/patch.rs ================================================ use anyhow::Context; use itertools::Itertools; use object::{ macho::{self}, read::File, write::{MachOBuildVersion, SectionId, StandardSection, Symbol, SymbolId, SymbolSection}, Endianness, Object, ObjectSection, ObjectSymbol, SymbolFlags, SymbolKind, SymbolScope, }; use rayon::prelude::{IntoParallelRefIterator, ParallelIterator}; use std::{ collections::{BTreeMap, HashMap, HashSet}, ops::{Deref, Range}, path::Path, path::PathBuf, sync::{Arc, RwLock}, }; use subsecond_types::{AddressMap, JumpTable}; use target_lexicon::{Architecture, OperatingSystem, PointerWidth, Triple}; use thiserror::Error; use walrus::{ ConstExpr, DataKind, ElementItems, ElementKind, FunctionBuilder, FunctionId, FunctionKind, ImportKind, Module, ModuleConfig, TableId, }; use wasmparser::{ BinaryReader, BinaryReaderError, Linking, LinkingSectionReader, Payload, SymbolInfo, }; type Result = std::result::Result; #[derive(Debug, Error)] pub enum PatchError { #[error("Failed to read file: {0}")] ReadFs(#[from] std::io::Error), #[error("No debug symbols in the patch output. Check your profile's `opt-level` and debug symbols config.")] MissingSymbols, #[error("Failed to parse wasm section: {0}")] ParseSection(#[from] wasmparser::BinaryReaderError), #[error("Failed to parse object file, {0}")] ParseObjectFile(#[from] object::read::Error), #[error("Failed to write object file: {0}")] WriteObjectFIle(#[from] object::write::Error), #[error("Failed to emit module: {0}")] RuntimeError(#[from] anyhow::Error), #[error("Failed to read module's PDB file: {0}")] PdbLoadError(#[from] pdb::Error), #[error("{0}")] InvalidModule(String), #[error("Unsupported platform: {0}")] UnsupportedPlatform(String), } /// A cache for the hotpatching engine that stores the original module's parsed symbol table. /// For large projects, this can shave up to 50% off the total patching time. Since we compile the base /// module with every symbol in it, it can be quite large (hundreds of MB), so storing this here lets /// us avoid re-parsing the module every time we want to patch it. /// /// On the Dioxus Docsite, it dropped the patch time from 3s to 1.1s (!) #[derive(Default)] pub struct HotpatchModuleCache { pub path: PathBuf, // .... wasm stuff pub symbol_ifunc_map: HashMap, pub old_wasm: Module, pub old_bytes: Vec, pub old_exports: HashSet, pub old_imports: HashSet, // ... native stuff pub symbol_table: HashMap, /// Contents of the .tdata section from the original binary (TLS initialization image). /// Used to provide correct init data for TLS symbol stubs instead of garbage addresses. pub tls_init_data: Vec, /// Map from `$tlv$init` symbol name to (offset_in_tdata, computed_size). /// On macOS, Mach-O nlist doesn't carry symbol sizes, so we compute them from /// adjacent symbol addresses in the `__thread_data` section. This lets us provide /// correctly-sized TLS init data in stubs instead of defaulting to pointer_width. pub tls_init_sizes: HashMap, } pub struct CachedSymbol { pub address: u64, pub kind: SymbolKind, pub is_undefined: bool, pub is_weak: bool, pub size: u64, pub flags: SymbolFlags, } impl PartialEq for HotpatchModuleCache { fn eq(&self, other: &Self) -> bool { self.path == other.path } } impl std::fmt::Debug for HotpatchModuleCache { fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { f.debug_struct("HotpatchModuleCache") .field("_path", &self.path) .finish() } } impl HotpatchModuleCache { /// This caching step is crucial for performance on large projects. The original module can be /// quite large (hundreds of MB), so this step drastically speeds it up. pub fn new(original: &Path, triple: &Triple) -> Result { let cache = match triple.operating_system { OperatingSystem::Windows => { use pdb::FallibleIterator; // due to lifetimes, this code is unfortunately duplicated. // the pdb crate doesn't bind the lifetime of the items in the iterator to the symbol table, // so we're stuck with local lifetime.s let old_pdb_file = original.with_extension("pdb"); let old_pdb_file_handle = std::fs::File::open(old_pdb_file)?; let mut pdb_file = pdb::PDB::open(old_pdb_file_handle)?; let global_symbols = pdb_file.global_symbols()?; let address_map = pdb_file.address_map()?; let mut symbol_table = HashMap::new(); let mut symbols = global_symbols.iter(); while let Ok(Some(symbol)) = symbols.next() { match symbol.parse() { Ok(pdb::SymbolData::Public(data)) => { let rva = data.offset.to_rva(&address_map); let is_undefined = rva.is_none(); // treat undefined symbols as 0 to match macho/elf let rva = rva.unwrap_or_default(); symbol_table.insert( data.name.to_string().to_string(), CachedSymbol { address: rva.0 as u64, kind: if data.function { SymbolKind::Text } else { SymbolKind::Data }, is_undefined, is_weak: false, size: 0, flags: SymbolFlags::None, }, ); } Ok(pdb::SymbolData::Data(data)) => { let rva = data.offset.to_rva(&address_map); let is_undefined = rva.is_none(); // treat undefined symbols as 0 to match macho/elf let rva = rva.unwrap_or_default(); symbol_table.insert( data.name.to_string().to_string(), CachedSymbol { address: rva.0 as u64, kind: SymbolKind::Data, is_undefined, is_weak: false, size: 0, flags: SymbolFlags::None, }, ); } _ => {} } } HotpatchModuleCache { symbol_table, path: original.to_path_buf(), ..Default::default() } } // We need to load the ifunc table from the original module since that gives us the map // of name to address (since ifunc entries are also pointers in wasm - ie 0x30 is the 30th // entry in the ifunc table) // // One detail here is that with high optimization levels, the names of functions in the ifunc // table will be smaller than the total number of functions in the module. This is because // in high opt-levels, functions are merged. Fortunately, the symbol table remains intact // and functions with different names point to the same function index (not to be confused // with the function index in the module!). // // We need to take an extra step to account for merged functions by mapping function index // to a set of functions that point to the same index. _ if triple.architecture == Architecture::Wasm32 => { let bytes = std::fs::read(original)?; let ParsedModule { module, symbols, .. } = parse_module_with_ids(&bytes)?; if symbols.symbols.is_empty() { return Err(PatchError::MissingSymbols); } let name_to_ifunc_old = collect_func_ifuncs(&module); // These are the "real" bindings for functions in the module // Basically a map between a function's index and its real name let func_to_index = module .funcs .par_iter() .filter_map(|f| { let name = f.name.as_deref()?; Some((*symbols.code_symbol_map.get(name)?, name)) }) .collect::>(); // Find the corresponding function that shares the same index, but in the ifunc table let name_to_ifunc_old: HashMap<_, _> = symbols .code_symbol_map .par_iter() .filter_map(|(name, idx)| { let new_modules_unified_function = func_to_index.get(idx)?; let offset = name_to_ifunc_old.get(new_modules_unified_function)?; Some((*name, *offset)) }) .collect(); let symbol_ifunc_map = name_to_ifunc_old .par_iter() .map(|(name, idx)| (name.to_string(), *idx)) .collect::>(); let old_exports = module .exports .iter() .map(|e| e.name.to_string()) .collect::>(); let old_imports = module .imports .iter() .map(|i| i.name.to_string()) .collect::>(); HotpatchModuleCache { path: original.to_path_buf(), old_bytes: bytes, symbol_ifunc_map, old_exports, old_imports, old_wasm: module, ..Default::default() } } _ => { let old_bytes = std::fs::read(original)?; let obj = File::parse(&old_bytes as &[u8])?; let symbol_table = obj .symbols() .filter_map(|s| { let flags = match s.flags() { SymbolFlags::None => SymbolFlags::None, SymbolFlags::Elf { st_info, st_other } => { SymbolFlags::Elf { st_info, st_other } } SymbolFlags::MachO { n_desc } => SymbolFlags::MachO { n_desc }, _ => SymbolFlags::None, }; Some(( s.name().ok()?.to_string(), CachedSymbol { address: s.address(), is_undefined: s.is_undefined(), is_weak: s.is_weak(), kind: s.kind(), size: s.size(), flags, }, )) }) .collect::>(); // Extract TLS initialization data and section metadata. // This is used to correctly initialize TLS symbols in the stub // instead of writing bogus absolute addresses into .tdata. let tls_section = obj .sections() .find(|s| matches!(s.name(), Ok(".tdata" | "__thread_data"))); let tls_init_data = tls_section .as_ref() .and_then(|s| s.data().ok()) .unwrap_or(&[]) .to_vec(); // Build TLS init size map for macOS. Mach-O nlist doesn't carry symbol // sizes, so we compute them from adjacent symbols in __thread_data. // LLVM/rustc names init data symbols as `FOO$tlv$init` in __thread_data. let tls_data_addr = tls_section.as_ref().map(|s| s.address()).unwrap_or(0); let tls_data_size = tls_section.as_ref().map(|s| s.size()).unwrap_or(0); let tls_section_index = tls_section.as_ref().map(|s| s.index()); let mut tls_init_syms: Vec<(u64, String)> = Vec::new(); for sym in obj.symbols() { if let (Some(section_idx), Ok(sname)) = (sym.section_index(), sym.name()) { if Some(section_idx) == tls_section_index { let offset = sym.address().saturating_sub(tls_data_addr); tls_init_syms.push((offset, sname.to_string())); } } } tls_init_syms.sort_by_key(|(addr, _)| *addr); tls_init_syms.dedup_by_key(|(addr, _)| *addr); let mut tls_init_sizes: HashMap = HashMap::new(); for (i, (offset, sname)) in tls_init_syms.iter().enumerate() { let size = if i + 1 < tls_init_syms.len() { tls_init_syms[i + 1].0 - offset } else { tls_data_size.saturating_sub(*offset) }; tls_init_sizes.insert(sname.clone(), (*offset, size)); } HotpatchModuleCache { symbol_table, path: original.to_path_buf(), old_bytes, tls_init_data, tls_init_sizes, ..Default::default() } } }; Ok(cache) } } pub fn create_windows_jump_table(patch: &Path, cache: &HotpatchModuleCache) -> Result { use pdb::FallibleIterator; let old_name_to_addr = &cache.symbol_table; let mut new_name_to_addr = HashMap::new(); let new_pdb_file_handle = std::fs::File::open(patch.with_extension("pdb"))?; let mut pdb_file = pdb::PDB::open(new_pdb_file_handle)?; let symbol_table = pdb_file.global_symbols()?; let address_map = pdb_file.address_map()?; let mut symbol_iter = symbol_table.iter(); while let Ok(Some(symbol)) = symbol_iter.next() { if let Ok(pdb::SymbolData::Public(data)) = symbol.parse() { let rva = data.offset.to_rva(&address_map); if let Some(rva) = rva { new_name_to_addr.insert(data.name.to_string(), rva.0 as u64); } } } let mut map = AddressMap::default(); for (new_name, new_addr) in new_name_to_addr.iter() { if let Some(old_addr) = old_name_to_addr.get(new_name.as_ref()) { map.insert(old_addr.address, *new_addr); } } let new_base_address = new_name_to_addr .get("main") .cloned() .context("failed to find 'main' symbol in patch")?; let aslr_reference = old_name_to_addr .get("main") .map(|s| s.address) .context("failed to find '_main' symbol in original module")?; Ok(JumpTable { lib: patch.to_path_buf(), map, new_base_address, aslr_reference, ifunc_count: 0, }) } /// Assemble a jump table for "nix" architectures. This uses the `object` crate to parse both /// executable's symbol tables and then creates a mapping between the two. Unlike windows, the symbol /// tables are stored within the binary itself, so we can use the `object` crate to parse them. /// /// We use the `_aslr_reference` as a reference point in the base program to calculate the aslr slide /// both at compile time and at runtime. /// /// This does not work for WASM since the `object` crate does not support emitting the WASM format, /// and because WASM requires more logic to handle the wasm-bindgen transformations. pub fn create_native_jump_table( patch: &Path, triple: &Triple, cache: &HotpatchModuleCache, ) -> Result { let old_name_to_addr = &cache.symbol_table; let obj2_bytes = std::fs::read(patch)?; let obj2 = File::parse(&obj2_bytes as &[u8])?; let mut map = AddressMap::default(); let new_syms = obj2.symbol_map(); let new_name_to_addr = new_syms .symbols() .par_iter() .map(|s| (s.name(), s.address())) .collect::>(); for (new_name, new_addr) in new_name_to_addr.iter() { if let Some(old_addr) = old_name_to_addr.get(*new_name) { map.insert(old_addr.address, *new_addr); } } let sentinel = main_sentinel(triple); let new_base_address = new_name_to_addr .get(sentinel) .cloned() .context("failed to find 'main' symbol in base - are deubg symbols enabled?")?; let aslr_reference = old_name_to_addr .get(sentinel) .map(|s| s.address) .context("failed to find 'main' symbol in original module - are debug symbols enabled?")?; Ok(JumpTable { lib: patch.to_path_buf(), map, new_base_address, aslr_reference, ifunc_count: 0, }) } /// In the web, our patchable functions are actually ifuncs /// /// We need to line up the ifuncs from the main module to the ifuncs in the patch. /// /// According to the dylink spec, there will be two sets of entries: /// /// - got.func: functions in the indirect function table /// - got.mem: data objects in the data segments /// /// It doesn't seem like we can compile the base module to export these, sadly, so we're going /// to manually satisfy them here, removing their need to be imported. /// /// pub fn create_wasm_jump_table(patch: &Path, cache: &HotpatchModuleCache) -> Result { let name_to_ifunc_old = &cache.symbol_ifunc_map; let old = &cache.old_wasm; let old_symbols = parse_bytes_to_data_segment(&cache.old_bytes).context("Failed to parse data segment")?; let new_bytes = std::fs::read(patch).context("Could not read patch file")?; let mut new = Module::from_buffer(&new_bytes)?; let mut got_mems = vec![]; let mut got_funcs = vec![]; let mut wbg_funcs = vec![]; let mut env_funcs = vec![]; // Collect all the GOT entries from the new module. // The GOT imports come from the wasm-ld implementation of the dynamic linking spec // // https://github.com/WebAssembly/tool-conventions/blob/main/DynamicLinking.md#imports // // Normally, the base module would synthesize these as exports, but we're not compiling the base // module with `--pie` (nor does wasm-bindgen support it yet), so we need to manually satisfy them. // // One thing to watch out for here is that GOT.func entries have no visibility to any de-duplication // or merging, so we need to take great care in the base module to export *every* symbol even if // they point to the same function. // // The other thing to watch out for here is the __wbindgen_placeholder__ entries. These are meant // to be satisfied by wasm-bindgen via manual code generation, but we can't run wasm-bindgen on the // patch, so we need to do it ourselves. This involves preventing their elimination in the base module // by prefixing them with `__saved_wbg_`. When handling the imports here, we need modify the imported // name to match the prefixed export name in the base module. for import in new.imports.iter() { match import.module.as_str() { "GOT.func" => { let Some(entry) = name_to_ifunc_old.get(import.name.as_str()).cloned() else { return Err(PatchError::InvalidModule(format!( "Expected to find GOT.func entry in ifunc table: {}", import.name.as_str() ))); }; got_funcs.push((import.id(), entry)); } "GOT.mem" => got_mems.push(import.id()), "env" => env_funcs.push(import.id()), "__wbindgen_placeholder__" => wbg_funcs.push(import.id()), m => tracing::trace!("Unknown import: {m}:{}", import.name), } } // We need to satisfy the GOT.func imports of this side module. The GOT imports come from the wasm-ld // implementation of the dynamic linking spec // // https://github.com/WebAssembly/tool-conventions/blob/main/DynamicLinking.md#imports // // Most importantly, these functions are functions meant to be called indirectly. In normal wasm // code generation, only functions that Rust code references via pointers are given a slot in // the indirection function table. The optimization here traditionally meaning that if a function // can be called directly, then it doesn't need to be referenced indirectly and potentially inlined // or dissolved during LTO. // // In our "fat build" setup, we aggregated all symbols from dependencies into a `dependencies.ar` file. // By promoting these functions to the dynamic scope, we also prevent their inlining because the // linker can still expect some form of interposition to happen, requiring the symbol *actually* // exists. // // Our technique here takes advantage of that and the [`prepare_wasm_base_module`] function promotes // every possible function to the indirect function table. This means that the GOT imports that // `relocation-model=pic` synthesizes can reference the functions via the indirect function table // even if they are not normally synthesized in regular wasm code generation. // // Normally, the dynamic linker setup would resolve GOT.func against the same GOT.func export in // the main module, but we don't have that. Instead, we simply re-parse the main module, aggregate // its ifunc table, and then resolve directly to the index in that table. for (import_id, ifunc_index) in got_funcs { let import = new.imports.get(import_id); let ImportKind::Global(id) = import.kind else { return Err(PatchError::InvalidModule(format!( "Expected GOT.func import to be a global: {}", import.name ))); }; // "satisfying" the import means removing it from the import table and replacing its target // value with a local global. new.imports.delete(import_id); new.globals.get_mut(id).kind = walrus::GlobalKind::Local(ConstExpr::Value(walrus::ir::Value::I32(ifunc_index))); } // We need to satisfy the GOT.mem imports of this side module. The GOT.mem imports come from the wasm-ld // implementation of the dynamic linking spec // // https://github.com/WebAssembly/tool-conventions/blob/main/DynamicLinking.md#imports // // Unlike the ifunc table, the GOT.mem imports do not need any additional post-processing of the // base module to satisfy. Since our patching approach works but leveraging the experimental dynamic // PIC support in rustc[wasm] and wasm-ld, we are using the GOT.mem imports as a way of identifying // data segments that are present in the base module. // // Normally, the dynamic linker would synthesize corresponding GOT.mem exports in the main module, // but since we're patching on-the-fly, this table will always be out-of-date. // // Instead, we use the symbol table from the base module to find the corresponding data symbols // and then resolve the offset of the data segment in the main module. Using the symbol table // can be somewhat finicky if the user compiled the code with a high-enough opt level that nukes // the names of the data segments, but otherwise this system works well. // // We simply use the name of the import as a key into the symbol table and then its offset into // its data segment as the value within the global. for mem in got_mems { let import = new.imports.get(mem); let data_symbol_idx = *old_symbols .data_symbol_map .get(import.name.as_str()) .with_context(|| { format!("Failed to find GOT.mem import by its name: {}", import.name) })?; let data_symbol = old_symbols .data_symbols .get(&data_symbol_idx) .context("Failed to find data symbol by its index")?; let data = old .data .iter() .nth(data_symbol.which_data_segment) .context("Missing data segment in the main module")?; let offset = match data.kind { DataKind::Active { offset: ConstExpr::Value(walrus::ir::Value::I32(idx)), .. } => idx, DataKind::Active { offset: ConstExpr::Value(walrus::ir::Value::I64(idx)), .. } => idx as i32, _ => { return Err(PatchError::InvalidModule(format!( "Data segment of invalid table: {:?}", data.kind ))); } }; let ImportKind::Global(global_id) = import.kind else { return Err(PatchError::InvalidModule( "Expected GOT.mem import to be a global".to_string(), )); }; // "satisfying" the import means removing it from the import table and replacing its target // value with a local global. new.imports.delete(mem); new.globals.get_mut(global_id).kind = walrus::GlobalKind::Local(ConstExpr::Value( walrus::ir::Value::I32(offset + data_symbol.segment_offset as i32), )); } // wasm-bindgen has a limit on the number of exports a module can have, so we need to call the main // module's functions indirectly. This is done by dropping the env import and replacing it with a // local function that calls the indirect function from the table. // // https://github.com/emscripten-core/emscripten/issues/22863 let ifunc_table_initializer = new .elements .iter() .find_map(|e| match e.kind { ElementKind::Active { table, .. } => Some(table), _ => None, }) .context("Missing ifunc table")?; for env_func_import in env_funcs { let import = new.imports.get(env_func_import); let ImportKind::Function(func_id) = import.kind else { continue; }; if cache.old_exports.contains(import.name.as_str()) || cache.old_imports.contains(import.name.as_str()) { continue; } let name = import.name.as_str().to_string(); if let Some(table_idx) = name_to_ifunc_old.get(import.name.as_str()) { new.imports.delete(env_func_import); convert_func_to_ifunc_call( &mut new, ifunc_table_initializer, func_id, *table_idx, name.clone(), ); continue; } if name_is_bindgen_symbol(&name) { new.imports.delete(env_func_import); convert_func_to_ifunc_call(&mut new, ifunc_table_initializer, func_id, 0, name); continue; } tracing::warn!("[hotpatching]: Symbol slipped through the cracks: {}", name); } // Wire up the preserved intrinsic functions that we saved before running wasm-bindgen to the expected // imports from the patch. for import_id in wbg_funcs { let import = new.imports.get_mut(import_id); let ImportKind::Function(func_id) = import.kind else { continue; }; import.module = "env".into(); import.name = format!("__saved_wbg_{}", import.name); if name_is_bindgen_symbol(&import.name) { let name = import.name.as_str().to_string(); new.imports.delete(import_id); convert_func_to_ifunc_call(&mut new, ifunc_table_initializer, func_id, 0, name); } } // Rewrite the wbg_cast functions to call the indirect functions from the original module. // This is necessary because wasm-bindgen uses these calls to perform dynamic type casting through // the JS layer. If we don't rewrite these, they end up as calls to `breaks_if_inlined` functions // which are no-ops and get rewritten by the wbindgen post-processing step. // // Here, we find the corresponding wbg_cast function in the old module by name and then rewrite // the patch module's cast function to call the indirect function from the original module. // // See the wbg_cast implementation in wasm-bindgen for more details: // let new_func_ids = new.funcs.iter().map(|f| f.id()).collect::>(); for func_id in new_func_ids { let Some(name) = new.funcs.get(func_id).name.as_deref() else { continue; }; if name.contains("wasm_bindgen4__rt8wbg_cast") && !name.contains("breaks_if_inline") { let name = name.to_string(); let old_idx = name_to_ifunc_old .get(&name) .copied() .ok_or_else(|| anyhow::anyhow!("Could not find matching wbg_cast function for [{name}] - must generate new JS bindings."))?; convert_func_to_ifunc_call(&mut new, ifunc_table_initializer, func_id, old_idx, name); } } // Wipe away the unnecessary sections let customs = new.customs.iter().map(|f| f.0).collect::>(); for custom_id in customs { if let Some(custom) = new.customs.get_mut(custom_id) { if custom.name().contains("manganis") || custom.name().contains("__wasm_bindgen") { new.customs.delete(custom_id); } } } // Clear the start function from the patch - we don't want any code automatically running! new.start = None; // Update the wasm module on the filesystem to use the newly lifted version let lib = patch.to_path_buf(); std::fs::write(&lib, new.emit_wasm())?; // And now assemble the jump table by mapping the old ifunc table to the new one, by name // // The ifunc_count will be passed to the dynamic loader so it can allocate the right amount of space // in the indirect function table when loading the patch. let name_to_ifunc_new = collect_func_ifuncs(&new); let ifunc_count = name_to_ifunc_new.len() as u64; let mut map = AddressMap::default(); for (name, idx) in name_to_ifunc_new.iter() { // Find the corresponding ifunc in the old module by name if let Some(old_idx) = name_to_ifunc_old.get(*name) { map.insert(*old_idx as u64, *idx as u64); continue; } } Ok(JumpTable { map, lib, ifunc_count, aslr_reference: 0, new_base_address: 0, }) } fn convert_func_to_ifunc_call( new: &mut Module, ifunc_table_initializer: TableId, func_id: FunctionId, table_idx: i32, name: String, ) { use walrus::ir; let func = new.funcs.get_mut(func_id); let ty_id = func.ty(); // Convert the import function to a local function that calls the indirect function from the table let ty = new.types.get(ty_id); let params = ty.params().to_vec(); let results = ty.results().to_vec(); let locals: Vec<_> = params.iter().map(|ty| new.locals.add(*ty)).collect(); // New function that calls the indirect function let mut builder = FunctionBuilder::new(&mut new.types, ¶ms, &results); let mut body = builder.name(name).func_body(); // Push the params onto the stack for arg in locals.iter() { body.local_get(*arg); } // And then the address of the indirect function body.instr(ir::Instr::Const(ir::Const { value: ir::Value::I32(table_idx), })); // And call it body.instr(ir::Instr::CallIndirect(ir::CallIndirect { ty: ty_id, table: ifunc_table_initializer, })); new.funcs.get_mut(func_id).kind = FunctionKind::Local(builder.local_func(locals)); } fn collect_func_ifuncs(m: &Module) -> HashMap<&str, i32> { // Collect all the functions in the module that are ifuncs let mut func_to_offset = HashMap::new(); for el in m.elements.iter() { let ElementKind::Active { offset, .. } = &el.kind else { continue; }; let offset = match offset { // Handle explicit offsets ConstExpr::Value(value) => match value { walrus::ir::Value::I32(idx) => *idx, walrus::ir::Value::I64(idx) => *idx as i32, _ => continue, }, // Globals are usually imports and thus don't add a specific offset // ie the ifunc table is offset by a global, so we don't need to push the offset out ConstExpr::Global(_) => 0, _ => continue, }; match &el.items { ElementItems::Functions(ids) => { for (idx, id) in ids.iter().enumerate() { if let Some(name) = m.funcs.get(*id).name.as_deref() { func_to_offset.insert(name, offset + idx as i32); } } } ElementItems::Expressions(_ref_type, _const_exprs) => {} } } func_to_offset } /// Resolve the undefined symbols in the incrementals against the original binary, returning an object /// file that can be linked along the incrementals. /// /// This makes it possible to dlopen the resulting object file and use the original binary's symbols /// bypassing the dynamic linker. /// /// This is very similar to malware :) but it's not! /// /// Note - this function is not defined to run on WASM binaries. The `object` crate does not /// /// todo... we need to wire up the cache pub fn create_undefined_symbol_stub( cache: &HotpatchModuleCache, incrementals: &[PathBuf], triple: &Triple, aslr_reference: u64, ) -> Result> { let sorted: Vec<_> = incrementals.iter().sorted().collect(); // Find all the undefined symbols in the incrementals let mut undefined_symbols = HashSet::new(); let mut defined_symbols = HashSet::new(); for path in sorted { let bytes = std::fs::read(path).with_context(|| format!("failed to read {path:?}"))?; let file = File::parse(bytes.deref() as &[u8])?; for symbol in file.symbols() { if symbol.is_undefined() { undefined_symbols.insert(symbol.name()?.to_string()); } else if symbol.is_global() { defined_symbols.insert(symbol.name()?.to_string()); } } } let undefined_symbols: Vec<_> = undefined_symbols .difference(&defined_symbols) .cloned() .collect(); tracing::trace!("Undefined symbols: {:#?}", undefined_symbols); // Create a new object file (architecture doesn't matter much for our purposes) let mut obj = object::write::Object::new( match triple.binary_format { target_lexicon::BinaryFormat::Elf => object::BinaryFormat::Elf, target_lexicon::BinaryFormat::Macho => object::BinaryFormat::MachO, target_lexicon::BinaryFormat::Coff => object::BinaryFormat::Coff, target_lexicon::BinaryFormat::Wasm => object::BinaryFormat::Wasm, target_lexicon::BinaryFormat::Xcoff => object::BinaryFormat::Xcoff, _ => return Err(PatchError::UnsupportedPlatform(triple.to_string())), }, match triple.architecture { Architecture::Aarch64(_) => object::Architecture::Aarch64, Architecture::Wasm32 => object::Architecture::Wasm32, Architecture::X86_64 => object::Architecture::X86_64, _ => return Err(PatchError::UnsupportedPlatform(triple.to_string())), }, match triple.endianness() { Ok(target_lexicon::Endianness::Little) => Endianness::Little, Ok(target_lexicon::Endianness::Big) => Endianness::Big, _ => Endianness::Little, }, ); // Write the headers so we load properly in ios/macos #[allow(clippy::identity_op)] match triple.operating_system { OperatingSystem::Darwin(_) => { obj.set_macho_build_version({ let mut build_version = MachOBuildVersion::default(); build_version.platform = macho::PLATFORM_MACOS; build_version.minos = (11 << 16) | (0 << 8) | 0; // 11.0.0 build_version.sdk = (11 << 16) | (0 << 8) | 0; // SDK 11.0.0 build_version }); } OperatingSystem::IOS(_) => { obj.set_macho_build_version({ let mut build_version = MachOBuildVersion::default(); build_version.platform = match triple.environment { target_lexicon::Environment::Sim => macho::PLATFORM_IOSSIMULATOR, _ => macho::PLATFORM_IOS, }; build_version.minos = (14 << 16) | (0 << 8) | 0; // 14.0.0 build_version.sdk = (14 << 16) | (0 << 8) | 0; // SDK 14.0.0 build_version }); } _ => {} } // Get the offset from the main module and adjust the addresses by the slide; let aslr_ref_address = cache .symbol_table .get(main_sentinel(triple)) .context("failed to find '_main' symbol in patch")? .address; if aslr_reference < aslr_ref_address { return Err(PatchError::InvalidModule( format!( "ASLR reference is less than the main module's address - is there a `main`?. {aslr_reference:x} < {aslr_ref_address:x}" ) )); } let aslr_offset = aslr_reference - aslr_ref_address; // we need to assemble a PLT/GOT so direct calls to the patch symbols work // for each symbol we either write the address directly (as a symbol) or create a PLT/GOT entry let text_section = obj.section_id(StandardSection::Text); for name in undefined_symbols { let Some(sym) = cache .symbol_table .get(name.as_str().trim_start_matches("__imp_")) else { tracing::debug!("Symbol not found: {}", name); continue; }; // Undefined symbols tend to be import symbols (darwin gives them an address of 0 until defined). // If we fail to skip these, then we end up with stuff like alloc at 0x0 which is quite bad! if sym.is_undefined { continue; } // ld64 likes to prefix symbols in intermediate object files with an underscore, but our symbol // table doesn't, so we need to strip it off. let name_offset = match triple.operating_system { OperatingSystem::MacOSX(_) | OperatingSystem::Darwin(_) | OperatingSystem::IOS(_) => 1, _ => 0, }; let abs_addr = sym.address + aslr_offset; match sym.kind { // Handle synthesized window linker cross-dll statics. // // The `__imp_` prefix is a rather poorly documented feature of link.exe that makes it possible // to reference statics in DLLs via text sections. The linker will synthesize a function // that returns the address of the static, so calling that function will return the address. // We want to satisfy it by creating a data symbol with the contents of the *actual* symbol // in the original binary. // // We ca't use the `__imp_` from the original binary because it was not properly compiled // with this in mind. Instead we have to create the new symbol. // // This is currently only implemented for 64bit architectures (haven't tested 32bit yet). // // https://stackoverflow.com/questions/5159353/how-can-i-get-rid-of-the-imp-prefix-in-the-linker-in-vc _ if name.starts_with("__imp_") => { let data_section = obj.section_id(StandardSection::Data); // Add a pointer to the resolved address let offset = obj.append_section_data( data_section, &abs_addr.to_le_bytes(), 8, // Use proper alignment ); // Add the symbol as a data symbol in our data section obj.add_symbol(Symbol { name: name.as_bytes().to_vec(), value: offset, // Offset within the data section size: 8, // Size of pointer scope: SymbolScope::Linkage, kind: SymbolKind::Data, // Always Data for IAT entries weak: false, section: SymbolSection::Section(data_section), flags: SymbolFlags::None, }); } // Text symbols are normal code symbols. We need to assemble stubs that resolve the undefined // symbols and jump to the original address in the original binary. // // Unfortunately this isn't simply cross-platform, so we need to handle Unix and Windows // calling conventions separately. It also depends on the architecture, making it even more // complicated. SymbolKind::Text => { let jump_asm = match triple.operating_system { // The windows ABI and calling convention is different than the SystemV ABI. OperatingSystem::Windows => match triple.architecture { Architecture::X86_64 => { // Windows x64 has specific requirements for alignment and position-independent code let mut code = vec![ 0x48, 0xB8, // movabs RAX, imm64 (move 64-bit immediate to RAX) ]; // Append the absolute 64-bit address code.extend_from_slice(&abs_addr.to_le_bytes()); // jmp RAX (jump to the address in RAX) code.extend_from_slice(&[0xFF, 0xE0]); code } Architecture::X86_32(_) => { // On Windows 32-bit, we can use direct jump but need proper alignment let mut code = vec![ 0xB8, // mov EAX, imm32 (move immediate value to EAX) ]; // Append the absolute 32-bit address code.extend_from_slice(&(abs_addr as u32).to_le_bytes()); // jmp EAX (jump to the address in EAX) code.extend_from_slice(&[0xFF, 0xE0]); code } Architecture::Aarch64(_) => { // Use MOV/MOVK sequence to load 64-bit address into X16 // This is more reliable than ADRP+LDR for direct hotpatching let mut code = Vec::new(); // MOVZ X16, #imm16_0 (bits 0-15 of address) let imm16_0 = (abs_addr & 0xFFFF) as u16; let movz = 0xD2800010u32 | ((imm16_0 as u32) << 5); code.extend_from_slice(&movz.to_le_bytes()); // MOVK X16, #imm16_1, LSL #16 (bits 16-31 of address) let imm16_1 = ((abs_addr >> 16) & 0xFFFF) as u16; let movk1 = 0xF2A00010u32 | ((imm16_1 as u32) << 5); code.extend_from_slice(&movk1.to_le_bytes()); // MOVK X16, #imm16_2, LSL #32 (bits 32-47 of address) let imm16_2 = ((abs_addr >> 32) & 0xFFFF) as u16; let movk2 = 0xF2C00010u32 | ((imm16_2 as u32) << 5); code.extend_from_slice(&movk2.to_le_bytes()); // MOVK X16, #imm16_3, LSL #48 (bits 48-63 of address) let imm16_3 = ((abs_addr >> 48) & 0xFFFF) as u16; let movk3 = 0xF2E00010u32 | ((imm16_3 as u32) << 5); code.extend_from_slice(&movk3.to_le_bytes()); // BR X16 (Branch to address in X16) code.extend_from_slice(&[0x00, 0x02, 0x1F, 0xD6]); code } Architecture::Arm(_) => { // For Windows 32-bit ARM, we need a different approach let mut code = Vec::new(); // LDR r12, [pc, #8] ; Load the address into r12 code.extend_from_slice(&[0x08, 0xC0, 0x9F, 0xE5]); // BX r12 ; Branch to the address in r12 code.extend_from_slice(&[0x1C, 0xFF, 0x2F, 0xE1]); // 4-byte alignment padding code.extend_from_slice(&[0x00, 0x00, 0x00, 0x00]); // Store the 32-bit address - 4-byte aligned code.extend_from_slice(&(abs_addr as u32).to_le_bytes()); code } _ => return Err(PatchError::UnsupportedPlatform(triple.to_string())), }, _ => match triple.architecture { Architecture::X86_64 => { // Use JMP instruction to absolute address: FF 25 followed by 32-bit offset // Then the 64-bit absolute address let mut code = vec![0xFF, 0x25, 0x00, 0x00, 0x00, 0x00]; // jmp [rip+0] // Append the 64-bit address code.extend_from_slice(&abs_addr.to_le_bytes()); code } Architecture::X86_32(_) => { // For 32-bit Intel, use JMP instruction with absolute address let mut code = vec![0xE9]; // jmp rel32 let rel_addr = abs_addr as i32 - 5; // Relative address (offset from next instruction) code.extend_from_slice(&rel_addr.to_le_bytes()); code } Architecture::Aarch64(_) => { // For ARM64, we load the address into a register and branch let mut code = Vec::new(); // LDR X16, [PC, #0] ; Load from the next instruction code.extend_from_slice(&[0x50, 0x00, 0x00, 0x58]); // BR X16 ; Branch to the address in X16 code.extend_from_slice(&[0x00, 0x02, 0x1F, 0xD6]); // Store the 64-bit address code.extend_from_slice(&abs_addr.to_le_bytes()); code } Architecture::Arm(_) => { // For 32-bit ARM, use LDR PC, [PC, #-4] to load the address and branch let mut code = Vec::new(); // LDR PC, [PC, #-4] ; Load the address into PC (branching to it) code.extend_from_slice(&[0x04, 0xF0, 0x1F, 0xE5]); // Store the 32-bit address code.extend_from_slice(&(abs_addr as u32).to_le_bytes()); code } _ => return Err(PatchError::UnsupportedPlatform(triple.to_string())), }, }; let offset = obj.append_section_data(text_section, &jump_asm, 8); obj.add_symbol(Symbol { name: name.as_bytes()[name_offset..].to_vec(), value: offset, size: jump_asm.len() as u64, scope: SymbolScope::Linkage, kind: SymbolKind::Text, weak: false, section: SymbolSection::Section(text_section), flags: SymbolFlags::None, // ignore for these stubs }); } // Rust code typically generates Tls accessors as functions (text), but they are referenced // indirectly as data symbols. We end up handling this by adding the TLS symbol as a data // symbol with the initializer as the address of the original tls initializer. That way // if new TLS are added at runtime, they get initialized properly, but otherwise, the // tls initialization check (cbz) properly skips re-initialization on patches. // // ``` // __ZN17crossbeam_channel5waker17current_thread_id9THREAD_ID29_$u7b$$u7b$constant$u7d$$u7d$28_$u7b$$u7b$closure$u7d$$u7d$17h33618d877d86bb77E: // stp x20, x19, [sp, #-0x20]! // stp x29, x30, [sp, #0x10] // add x29, sp, #0x10 // adrp x19, 21603 ; 0x1054bd000 // add x19, x19, #0x998 // ldr x20, [x19] // mov x0, x19 // blr x20 // ldr x8, [x0] // cbz x8, 0x10005acc0 // mov x0, x19 // blr x20 // ldp x29, x30, [sp, #0x10] // ldp x20, x19, [sp], #0x20 // ret // mov x0, x19 // blr x20 // bl __ZN3std3sys12thread_local6native4lazy20Storage$LT$T$C$D$GT$10initialize17h818476638edff4e6E // b 0x10005acac // ``` SymbolKind::Tls => { let tls_section = obj.section_id(StandardSection::Tls); let pointer_width = match triple.pointer_width().unwrap() { PointerWidth::U16 => 2, PointerWidth::U32 => 4, PointerWidth::U64 => 8, }; // Resolve the TLS init data offset and size. // // On ELF: sym.address IS the TLS offset and sym.size is the data size. // On Mach-O: sym.address points to __thread_vars (TLV descriptor), NOT // __thread_data. Mach-O nlist has no size field (always 0). We look up // the corresponding $tlv$init symbol (LLVM convention) to get the real // offset and size within __thread_data. // // Note: each patch gets its own TLS copy (not shared with the main exe). // TLS variables reset to their initial value on patch. // Use the full name (with Mach-O `_` prefix) since tls_init_sizes // keys come from the same symbol table and include the prefix. let init_key = format!("{}$tlv$init", name); let (tls_offset, size) = if let Some(&(offset, size)) = cache.tls_init_sizes.get(&init_key) { // macOS: found the $tlv$init symbol with correct offset and size (offset, size) } else if sym.size > 0 { // ELF: sym.address is the TLS offset, sym.size is the data size (sym.address, sym.size) } else if !cache.tls_init_sizes.is_empty() { // macOS fallback: $tlv$init not found but map isn't empty (binary // might be partially stripped). Use entire tdata as upper bound. (0, cache.tls_init_data.len() as u64) } else { // Last resort (ELF with size=0): use pointer width (sym.address, pointer_width) }; let align = size.min(pointer_width).next_power_of_two(); let start = tls_offset as usize; let end = start + size as usize; let init = if end <= cache.tls_init_data.len() { cache.tls_init_data[start..end].to_vec() } else { // Beyond .tdata bounds (.tbss) or Mach-O fallback: zero-init vec![0u8; size as usize] }; // Use add_symbol_data() so the object crate's Mach-O writer auto-creates // __thread_vars TLV descriptors (via macho_add_thread_var). Without this, // the symbol stays in __thread_data and the runtime misinterprets raw init // bytes as a TLV descriptor — first 8 bytes become the thunk pointer. let sym_id = obj.add_symbol(Symbol { name: name.as_bytes()[name_offset..].to_vec(), value: 0, size: 0, scope: SymbolScope::Linkage, kind: SymbolKind::Tls, weak: false, section: SymbolSection::Undefined, flags: SymbolFlags::None, }); obj.add_symbol_data(sym_id, tls_section, &init, align); } // We just assume all non-text symbols are data (globals, statics, etc) _ => { // darwin statics show up as "unknown" symbols even though they are data symbols. let kind = match sym.kind { SymbolKind::Unknown => SymbolKind::Data, k => k, }; // plain linux *wants* these flags, but android doesn't. // unsure what's going on here, but this is special cased for now. // I think the more advanced linkers don't want these flags, but the default linux linker (ld) does. let flags = match triple.environment { target_lexicon::Environment::Android => SymbolFlags::None, _ => sym.flags, }; obj.add_symbol(Symbol { name: name.as_bytes()[name_offset..].to_vec(), value: abs_addr, size: 0, scope: SymbolScope::Linkage, kind, weak: sym.is_weak, section: SymbolSection::Absolute, flags, }); } } } Ok(obj.write()?) } /// Prepares the base module before running wasm-bindgen. /// /// This tries to work around how wasm-bindgen works by intelligently promoting non-wasm-bindgen functions /// to the export table. /// /// It also moves all functions and memories to be callable indirectly. pub fn prepare_wasm_base_module(bytes: &[u8]) -> Result> { let ParsedModule { mut module, ids, symbols, .. } = parse_module_with_ids(bytes)?; // Due to monomorphizations, functions will get merged and multiple names will point to the same function. // Walrus loses this information, so we need to manually parse the names table to get the indices // and names of these functions. // // Unfortunately, the indices it gives us ARE NOT VALID. // We need to work around it by using the FunctionId from the module as a link between the merged function names. let ifunc_map = collect_func_ifuncs(&module); let ifuncs = module .funcs .par_iter() .filter_map(|f| ifunc_map.get(f.name.as_deref()?).map(|_| f.id())) .collect::>(); let imported_funcs = module .imports .iter() .filter_map(|i| match i.kind { ImportKind::Function(id) => Some((id, i.id())), _ => None, }) .collect::>(); let mut exported = HashSet::new(); // Wasm-bindgen will synthesize imports to satisfy its external calls. This facilitates things // like inline-js, snippets, and literally the `#[wasm_bindgen]` macro. All calls to JS are // just `extern "wbg"` blocks! // // However, wasm-bindgen will run a GC pass on the module, removing any unused imports. let mut make_indirect = vec![]; for (imported_func, importid) in imported_funcs { let import = module.imports.get(importid); let name_is_wbg = import.name.starts_with("__wbindgen") || import.name.starts_with("__wbg_"); if name_is_wbg && !name_is_bindgen_symbol(import.name.as_str()) { let func = module.funcs.get(imported_func); let ty = module.types.get(func.ty()); let params = ty.params().to_vec(); let results = ty.results().to_vec(); let mut builder = FunctionBuilder::new(&mut module.types, ¶ms, &results); let mut body = builder .name(format!("__saved_wbg_{}", import.name)) .func_body(); let locals = params .iter() .map(|ty| module.locals.add(*ty)) .collect::>(); for l in locals.iter() { body.local_get(*l); } body.call(imported_func); let new_func_id = module.funcs.add_local(builder.local_func(locals)); let saved_name = format!("__saved_wbg_{}", import.name); if exported.insert(saved_name.clone()) { module.exports.add(&saved_name, new_func_id); } make_indirect.push(new_func_id); } } for (name, index) in symbols.code_symbol_map.iter() { if name_is_bindgen_symbol(name) { continue; } let func = module.funcs.get(ids[*index]); // We want to preserve the intrinsics from getting gc-ed out. // // These will create corresponding shim functions in the main module, that the patches will // then call. Wasm-bindgen doesn't actually check if anyone uses the `__wbindgen` exports and // forcefully deletes them literally by checking for symbols that start with `__wbindgen`. We // preserve these symbols by naming them `__saved_wbg_` and then exporting them. // // When wasm-bindgen runs, it will wrap these intrinsics with an `externref shim`, but we // want to preserve the actual underlying function so side modules can call them directly. // // https://github.com/rustwasm/wasm-bindgen/blob/c35cc9369d5e0dc418986f7811a0dd702fb33ef9/crates/cli-support/src/wit/mod.rs#L1505 if name.starts_with("__wbindgen") { let saved_name = format!("__saved_wbg_{}", name); if exported.insert(saved_name.clone()) { module.exports.add(&saved_name, func.id()); } } // This is basically `--export-all` but designed to work around wasm-bindgen not properly gc-ing // imports like __wbindgen_placeholder__ and __wbindgen_externref__ // // We only export local functions, and then make sure they can be accessible indirectly. // If we weren't dealing with PIC code, then we could just create local ifuncs in the patch that // call the original function directly. Unfortunately, this would require adding a new relocation // to corresponding GOT.func entry, which we don't want to deal with. // // Note that we don't export via the export table, but rather the ifunc table. This is to work // around issues on large projects where we hit the maximum number of exports. // // https://github.com/emscripten-core/emscripten/issues/22863 if let FunctionKind::Local(_) = &func.kind { if !ifuncs.contains(&func.id()) { make_indirect.push(func.id()); } } } // Now we need to make sure to add the new ifuncs to the ifunc segment initializer. // We just assume the last segment is the safest one we can add to which is common practice. let segment = module .elements .iter_mut() .last() .context("Missing ifunc table")?; let make_indirect_count = make_indirect.len() as u64; let ElementItems::Functions(segment_ids) = &mut segment.items else { return Err(PatchError::InvalidModule( "Expected ifunc table to be a function table".into(), )); }; for func in make_indirect { segment_ids.push(func); } if let ElementKind::Active { table, .. } = segment.kind { let table = module.tables.get_mut(table); table.initial += make_indirect_count; if let Some(max) = table.maximum { table.maximum = Some(max + make_indirect_count); } } Ok(module.emit_wasm()) } /// Check if the name is a wasm-bindgen symbol /// /// todo(jon): I believe we can just look at all the functions the wasm_bindgen describe export references. /// this is kinda hacky on slow. /// /// Uses the heuristics from the wasm-bindgen source code itself: /// /// fn name_is_bindgen_symbol(name: &str) -> bool { name.contains("__wbindgen_describe") || name.contains("__wbindgen_externref") || name.contains("wasm_bindgen8describe6inform") || name.contains("wasm_bindgen..describe..WasmDescribe") || name.contains("wasm_bindgen..closure..WasmClosure$GT$8describe") || name.contains("wasm_bindgen7closure16Closure$LT$T$GT$4wrap8describe") } /// Manually parse the data section from a wasm module /// /// We need to do this for data symbols because walrus doesn't provide the right range and offset /// information for data segments. Fortunately, it provides it for code sections, so we only need to /// do a small amount extra of parsing here. fn parse_bytes_to_data_segment(bytes: &[u8]) -> Result> { let parser = wasmparser::Parser::new(0); let mut parser = parser.parse_all(bytes); let mut segments = vec![]; let mut data_range = 0..0; let mut symbols = vec![]; // Process the payloads in the raw wasm file so we can extract the specific sections we need while let Some(Ok(payload)) = parser.next() { match payload { Payload::DataSection(section) => { data_range = section.range(); segments = section .into_iter() .collect::, BinaryReaderError>>()? } Payload::CustomSection(section) if section.name() == "linking" => { let reader = BinaryReader::new(section.data(), 0); let reader = LinkingSectionReader::new(reader)?; for subsection in reader.subsections() { if let Linking::SymbolTable(map) = subsection? { symbols = map.into_iter().collect::, _>>()?; } } } Payload::CustomSection(section) => { tracing::trace!("Skipping Custom section: {:?}", section.name()); } _ => {} } } // Accumulate the data symbols into a btreemap for later use let mut data_symbols = BTreeMap::new(); let mut data_symbol_map = HashMap::new(); let mut code_symbol_map = BTreeMap::new(); for (index, symbol) in symbols.iter().enumerate() { if let SymbolInfo::Func { name, index, .. } = symbol { if let Some(name) = name { code_symbol_map.insert(*name, *index as usize); } continue; } let SymbolInfo::Data { symbol: Some(symbol), name, .. } = symbol else { continue; }; data_symbol_map.insert(*name, index); let data_segment = segments .get(symbol.index as usize) .context("Failed to find data segment")?; let offset: usize = data_segment.range.end - data_segment.data.len() + (symbol.offset as usize); let range = offset..(offset + symbol.size as usize); data_symbols.insert( index, DataSymbol { _index: index, _range: range, segment_offset: symbol.offset as usize, _symbol_size: symbol.size as usize, which_data_segment: symbol.index as usize, }, ); } Ok(RawDataSection { _data_range: data_range, symbols, data_symbols, data_symbol_map, code_symbol_map, }) } struct RawDataSection<'a> { _data_range: Range, symbols: Vec>, code_symbol_map: BTreeMap<&'a str, usize>, data_symbols: BTreeMap, data_symbol_map: HashMap<&'a str, usize>, } #[derive(Debug)] struct DataSymbol { _index: usize, _range: Range, segment_offset: usize, _symbol_size: usize, which_data_segment: usize, } struct ParsedModule<'a> { module: Module, ids: Vec, symbols: RawDataSection<'a>, } /// Parse a module and return the mapping of index to FunctionID. /// We'll use this mapping to remap ModuleIDs fn parse_module_with_ids(bindgened: &[u8]) -> Result> { let ids = Arc::new(RwLock::new(Vec::new())); let ids_ = ids.clone(); let module = Module::from_buffer_with_config( bindgened, ModuleConfig::new().on_parse(move |_m, our_ids| { let mut ids = ids_.write().expect("No shared writers"); let mut idx = 0; while let Ok(entry) = our_ids.get_func(idx) { ids.push(entry); idx += 1; } Ok(()) }), )?; let mut ids_ = ids.write().expect("No shared writers"); let mut ids = vec![]; std::mem::swap(&mut ids, &mut *ids_); let symbols = parse_bytes_to_data_segment(bindgened).context("Failed to parse data segment")?; Ok(ParsedModule { module, ids, symbols, }) } /// Get the main sentinel symbol for the given target triple /// /// We need to special case darwin since `main` is the entrypoint but `_main` is the actual symbol. /// The entrypoint ends up outside the text section, seemingly, and breaks our aslr detection. fn main_sentinel(triple: &Triple) -> &'static str { match triple.operating_system { // The symbol in the symtab is called "_main" but in the dysymtab it is called "main" OperatingSystem::MacOSX(_) | OperatingSystem::Darwin(_) | OperatingSystem::IOS(_) => { "_main" } _ => "main", } } ================================================ FILE: packages/cli/src/build/pre_render.rs ================================================ use anyhow::Context; use dioxus_cli_config::{server_ip, server_port}; use dioxus_dx_wire_format::BuildStage; use futures_util::{stream::FuturesUnordered, StreamExt}; use std::{ net::{IpAddr, Ipv4Addr, SocketAddr}, time::Duration, }; use tokio::process::Command; use crate::BuildId; use super::{AppBuilder, BuilderUpdate}; /// Pre-render the static routes, performing static-site generation pub(crate) async fn pre_render_static_routes( devserver_ip: Option, builder: &mut AppBuilder, updates: Option<&futures_channel::mpsc::UnboundedSender>, ) -> anyhow::Result<()> { if let Some(updates) = updates { updates .unbounded_send(BuilderUpdate::Progress { stage: BuildStage::Prerendering, }) .unwrap(); } let server_exe = builder.build.main_exe(); // Use the address passed in through environment variables or default to localhost:9999. We need // to default to a value that is different than the CLI default address to avoid conflicts let ip = server_ip().unwrap_or_else(|| IpAddr::V4(Ipv4Addr::new(127, 0, 0, 1))); let port = server_port().unwrap_or(9999); let fullstack_address = SocketAddr::new(ip, port); let address = fullstack_address.ip().to_string(); let port = fullstack_address.port().to_string(); // Borrow port and address so we can easily move them into multiple tasks below let address = &address; let port = &port; tracing::info!("Running SSG at http://{address}:{port} for {server_exe:?}"); let vars = builder.child_environment_variables( devserver_ip, Some(fullstack_address), false, BuildId::SECONDARY, ); // Run the server executable let _child = Command::new(&server_exe) .envs(vars) .current_dir(server_exe.parent().unwrap()) .stdout(std::process::Stdio::null()) .stderr(std::process::Stdio::null()) .kill_on_drop(true) .spawn()?; // Borrow reqwest_client so we only move the reference into the futures let reqwest_client = reqwest::Client::new(); let reqwest_client = &reqwest_client; // Get the routes from the `/static_routes` endpoint let mut routes = None; // The server may take a few seconds to start up. Try fetching the route up to 5 times with a one second delay const RETRY_ATTEMPTS: usize = 5; for i in 0..=RETRY_ATTEMPTS { tracing::debug!( "Attempting to get static routes from server. Attempt {i} of {RETRY_ATTEMPTS}" ); let request = reqwest_client .post(format!("http://{address}:{port}/api/static_routes")) .body("{}".to_string()) .send() .await; match request { Ok(request) => { routes = Some(request .json::>() .await .inspect(|text| tracing::debug!("Got static routes: {text:?}")) .context("Failed to parse static routes from the server. Make sure your server function returns Vec with the (default) json encoding")?); break; } Err(err) => { // If the request fails, try up to 5 times with a one second delay // If it fails 5 times, return the error if i == RETRY_ATTEMPTS { return Err(err).context("Failed to get static routes from server. Make sure you have a server function at the `/api/static_routes` endpoint that returns Vec of static routes."); } tokio::time::sleep(std::time::Duration::from_secs(1)).await; } } } let routes = routes.expect( "static routes should exist or an error should have been returned on the last attempt", ); // Create a pool of futures that cache each route let mut resolved_routes = routes .into_iter() .map(|route| async move { tracing::info!("Rendering {route} for SSG"); // For each route, ping the server to force it to cache the response for ssg let request = reqwest_client .get(format!("http://{address}:{port}{route}")) .header("Accept", "text/html") .send() .await?; // If it takes longer than 30 seconds to resolve the route, log a warning let warning_task = tokio::spawn({ let route = route.clone(); async move { tokio::time::sleep(Duration::from_secs(30)).await; tracing::warn!("Route {route} has been rendering for 30 seconds"); } }); // Wait for the streaming response to completely finish before continuing. We don't use the html it returns directly // because it may contain artifacts of intermediate streaming steps while the page is loading. The SSG app should write // the final clean HTML to the disk automatically after the request completes. let _html = request.text().await?; // Cancel the warning task if it hasn't already run warning_task.abort(); Ok::<_, reqwest::Error>(route) }) .collect::>(); while let Some(route) = resolved_routes.next().await { match route { Ok(route) => tracing::debug!("ssg success: {route:?}"), Err(err) => tracing::error!("ssg error: {err:?}"), } } tracing::info!("SSG complete"); drop(_child); Ok(()) } ================================================ FILE: packages/cli/src/build/request.rs ================================================ //! # [`BuildRequest`] - the core of the build process //! //! The [`BuildRequest`] object is the core of the build process. It contains all the resolved arguments //! flowing in from the CLI, dioxus.toml, env vars, and the workspace. //! //! Every BuildRequest is tied to a given workspace and BuildArgs. For simplicity's sake, the BuildArgs //! struct is used to represent the CLI arguments and all other configuration is basically just //! extra CLI arguments, but in a configuration format. //! //! When [`BuildRequest::build`] is called, it will prepare its work directory in the target folder //! and then start running the build process. A [`BuildContext`] is required to customize this //! build process, containing a channel for progress updates and the build mode. //! //! The [`BuildMode`] is extremely important since it influences how the build is performed. Most //! "normal" builds just use [`BuildMode::Base`], but we also support [`BuildMode::Fat`] and //! [`BuildMode::Thin`]. These builds are used together to power the hot-patching and fast-linking //! engine. //! - BuildMode::Base: A normal build generated using `cargo rustc` //! - BuildMode::Fat: A "fat" build where all dependency rlibs are merged into a static library //! - BuildMode::Thin: A "thin" build that dynamically links against the artifacts produced by the "fat" build //! //! The BuildRequest is also responsible for writing the final build artifacts to disk. This includes //! //! - Writing the executable //! - Processing assets from the artifact //! - Writing any metadata or configuration files (Info.plist, AndroidManifest.xml) //! - Bundle splitting (for wasm) and wasm-bindgen //! //! In some cases, the BuildRequest also handles the linking of the final executable. Specifically, //! - For Android, we use `dx` as an opaque linker to dynamically find the true android linker //! - For hotpatching, the CLI manually links the final executable with a stub file //! //! ## Build formats: //! //! We support building for the most popular platforms: //! - Web via wasm-bindgen //! - macOS via app-bundle //! - iOS via app-bundle //! - Android via gradle //! - Linux via app-image //! - Windows via exe, msi/msix //! //! Note that we are missing some setups that we *should* support: //! - PWAs, WebWorkers, ServiceWorkers //! - Web Extensions //! - Linux via flatpak/snap //! //! There are some less popular formats that we might want to support eventually: //! - TVOS, watchOS //! - OpenHarmony //! //! Also, some deploy platforms have their own bespoke formats: //! - Cloudflare workers //! - AWS Lambda //! //! Currently, we defer most of our deploy-based bundling to Tauri bundle, though we should migrate //! to just bundling everything ourselves. This would require us to implement code-signing which //! is a bit of a pain, but fortunately a solved process (). //! //! ## Build Structure //! //! Builds generally follow the same structure everywhere: //! - A main executable //! - Sidecars (alternate entrypoints, framewrok plugins, etc) //! - Assets (images, fonts, etc) //! - Metadata (Info.plist, AndroidManifest.xml) //! - Glue code (java, kotlin, javascript etc) //! - Entitlements for code-signing and verification //! //! We need to be careful to not try and put a "round peg in a square hole," but most platforms follow //! the same pattern. //! //! As such, we try to assemble a build directory that's somewhat sensible: //! - A main "staging" dir for a given app //! - Per-profile dirs (debug/release) //! - A platform dir (ie web/desktop/android/ios) //! - The "bundle" dir which is basically the `.app` format or `wwww` dir. //! - The "executable" dir where the main exe is housed //! - The "assets" dir where the assets are housed //! - The "meta" dir where stuff like Info.plist, AndroidManifest.xml, etc are housed //! //! There's also some "quirky" folders that need to be stable between builds but don't influence the //! bundle itself: //! - session_cache_dir which stores stuff like window position //! //! ### Web: //! //! Create a folder that is somewhat similar to an app-image (exe + asset) //! The server is dropped into the `web` folder, even if there's no `public` folder. //! If there's no server (SPA), we still use the `web` folder, but it only contains the //! public folder. //! //! ``` //! web/ //! server //! assets/ //! public/ //! index.html //! wasm/ //! app.wasm //! glue.js //! snippets/ //! ... //! assets/ //! logo.png //! ``` //! //! ### Linux: //! //! //! current_exe.join("Assets") //! ``` //! app.appimage/ //! AppRun //! app.desktop //! package.json //! assets/ //! logo.png //! ``` //! //! ### Macos //! //! We simply use the macos format where binaries are in `Contents/MacOS` and assets are in `Contents/Resources` //! We put assets in an assets dir such that it generally matches every other platform and we can //! output `/assets/blah` from manganis. //! ``` //! App.app/ //! Contents/ //! Info.plist //! MacOS/ //! Frameworks/ //! Resources/ //! assets/ //! blah.icns //! blah.png //! CodeResources //! _CodeSignature/ //! ``` //! //! ### iOS //! //! Not the same as mac! ios apps are a bit "flattened" in comparison. simpler format, presumably //! since most ios apps don't ship frameworks/plugins and such. //! //! todo(jon): include the signing and entitlements in this format diagram. //! ``` //! App.app/ //! main //! assets/ //! ``` //! //! ### Android: //! //! Currently we need to generate a `src` type structure, not a pre-packaged apk structure, since //! we need to compile kotlin and java. This pushes us into using gradle and following a structure //! similar to that of cargo mobile2. Eventually I'd like to slim this down (drop buildSrc) and //! drive the kotlin build ourselves. This would let us drop gradle (yay! no plugins!) but requires //! us to manage dependencies (like kotlinc) ourselves (yuck!). //! //! //! //! Unfortunately, it seems that while we can drop the `android` build plugin, we still will need //! gradle since kotlin is basically gradle-only. //! //! Pre-build: //! ``` //! app.apk/ //! .gradle //! app/ //! src/ //! main/ //! assets/ //! jniLibs/ //! java/ //! kotlin/ //! res/ //! AndroidManifest.xml //! build.gradle.kts //! proguard-rules.pro //! buildSrc/ //! build.gradle.kts //! src/ //! main/ //! kotlin/ //! BuildTask.kt //! build.gradle.kts //! gradle.properties //! gradlew //! gradlew.bat //! settings.gradle //! ``` //! //! Final build: //! ``` //! app.apk/ //! AndroidManifest.xml //! classes.dex //! assets/ //! logo.png //! lib/ //! armeabi-v7a/ //! libmyapp.so //! arm64-v8a/ //! libmyapp.so //! x86/ //! libmyapp.so //! x86_64/ //! libmyapp.so //! ``` //! Notice that we *could* feasibly build this ourselves :) //! //! ### Windows: //! //! Windows does not provide an AppImage format, so instead we're going build the same folder //! structure as an AppImage, but when distributing, we'll create a .exe that embeds the resources //! as an embedded .zip file. When the app runs, it will implicitly unzip its resources into the //! Program Files folder. Any subsequent launches of the parent .exe will simply call the AppRun.exe //! entrypoint in the associated Program Files folder. //! //! This is, in essence, the same as an installer, so we might eventually just support something like msi/msix //! which functionally do the same thing but with a sleeker UI. //! //! This means no installers are required and we can bake an updater into the host exe. //! //! ## Handling asset lookups: //! current_exe.join("assets") //! ``` //! app.appimage/ //! main.exe //! main.desktop //! package.json //! assets/ //! logo.png //! ``` //! //! Since we support just a few locations, we could just search for the first that exists //! - usr //! - ../Resources //! - assets //! - Assets //! - $cwd/assets //! //! ``` //! assets::root() -> //! mac -> ../Resources/ //! ios -> ../Resources/ //! android -> assets/ //! server -> assets/ //! liveview -> assets/ //! web -> /assets/ //! root().join(bundled) //! ``` //! //! Every dioxus app can have an optional server executable which will influence the final bundle. //! This is built in parallel with the app executable during the `build` phase and the progres/status //! of the build is aggregated. //! //! The server will *always* be dropped into the `web` folder since it is considered "web" in nature, //! and will likely need to be combined with the public dir to be useful. //! //! We do our best to assemble read-to-go bundles here, such that the "bundle" step for each platform //! can just use the build dir //! //! When we write the AppBundle to a folder, it'll contain each bundle for each platform under the app's name: //! ``` //! dog-app/ //! build/ //! web/ //! server.exe //! assets/ //! some-secret-asset.txt (a server-side asset) //! public/ //! index.html //! assets/ //! logo.png //! desktop/ //! App.app //! App.appimage //! App.exe //! server/ //! server //! assets/ //! some-secret-asset.txt (a server-side asset) //! ios/ //! App.app //! App.ipa //! android/ //! App.apk //! bundle/ //! build.json //! Desktop.app //! Mobile_x64.ipa //! Mobile_arm64.ipa //! Mobile_rosetta.ipa //! web.appimage //! web/ //! server.exe //! assets/ //! some-secret-asset.txt //! public/ //! index.html //! assets/ //! logo.png //! style.css //! ``` //! //! When deploying, the build.json file will provide all the metadata that dx-deploy will use to //! push the app to stores, set up infra, manage versions, etc. //! //! The format of each build will follow the name plus some metadata such that when distributing you //! can easily trim off the metadata. //! //! The idea here is that we can run any of the programs in the same way that they're deployed. //! //! ## Bundle structure links //! - apple: //! - appimage: //! //! ## Extra links //! - xbuild: use super::HotpatchModuleCache; use crate::{ AndroidTools, AppManifest, BuildContext, BuildId, BundleFormat, DioxusConfig, Error, LinkAction, LinkerFlavor, ObjectCache, Platform, Renderer, Result, RustcArgs, TargetArgs, TraceSrc, WasmBindgen, WasmOptConfig, Workspace, DX_RUSTC_WRAPPER_ENV_VAR, }; use anyhow::{bail, Context}; use cargo_metadata::diagnostic::Diagnostic; use cargo_toml::{Profile, Profiles, StripSetting}; use depinfo::RustcDepInfo; use dioxus_cli_config::{format_base_path_meta_element, PRODUCT_NAME_ENV}; use dioxus_cli_config::{APP_TITLE_ENV, ASSET_ROOT_ENV}; use dioxus_cli_opt::{process_file_to, AssetManifest}; use itertools::Itertools; use krates::{cm::TargetKind, NodeId}; use manganis::{AssetOptions, BundledAsset, SwiftPackageMetadata}; use manganis_core::{AndroidArtifactMetadata, AssetVariant}; use rayon::prelude::{IntoParallelRefIterator, ParallelIterator}; use serde::{Deserialize, Serialize}; use std::{borrow::Cow, ffi::OsString}; use std::{ collections::{BTreeMap, HashMap, HashSet}, io::Write, path::{Path, PathBuf}, process::Stdio, sync::{ atomic::{AtomicUsize, Ordering}, Arc, }, time::{SystemTime, UNIX_EPOCH}, }; use subsecond_types::JumpTable; use target_lexicon::{Architecture, OperatingSystem, Triple}; use tempfile::TempDir; use tokio::{io::AsyncBufReadExt, process::Command}; use uuid::Uuid; /// This struct is used to plan the build process. /// /// The point here is to be able to take in the user's config from the CLI without modifying the /// arguments in place. Creating a buildplan "resolves" their config into a build plan that can be /// introspected. For example, the users might not specify a "Triple" in the CLI but the triple will /// be guaranteed to be resolved here. /// /// Creating a buildplan also lets us introspect build requests and modularize our build process. /// This will, however, lead to duplicate fields between the CLI and the build engine. This is fine /// since we have the freedom to evolve the schema internally without breaking the API. /// /// All updates from the build will be sent on a global "BuildProgress" channel. #[derive(Clone)] pub(crate) struct BuildRequest { pub(crate) workspace: Arc, pub(crate) config: DioxusConfig, pub(crate) crate_package: NodeId, pub(crate) crate_target: krates::cm::Target, pub(crate) profile: String, pub(crate) release: bool, pub(crate) bundle: BundleFormat, pub(crate) triple: Triple, pub(crate) device_name: Option, pub(crate) should_codesign: bool, pub(crate) package: String, pub(crate) main_target: String, pub(crate) features: Vec, pub(crate) rustflags: cargo_config2::Flags, pub(crate) extra_cargo_args: Vec, pub(crate) extra_rustc_args: Vec, pub(crate) no_default_features: bool, pub(crate) all_features: bool, pub(crate) target_dir: PathBuf, pub(crate) skip_assets: bool, pub(crate) wasm_split: bool, pub(crate) debug_symbols: bool, pub(crate) keep_names: bool, pub(crate) inject_loading_scripts: bool, pub(crate) custom_linker: Option, pub(crate) base_path: Option, pub(crate) using_dioxus_explicitly: bool, pub(crate) apple_entitlements: Option, pub(crate) apple_team_id: Option, pub(crate) session_cache_dir: PathBuf, pub(crate) raw_json_diagnostics: bool, pub(crate) windows_subsystem: Option, } /// dx can produce different "modes" of a build. A "regular" build is a "base" build. The Fat and Thin /// modes are used together to achieve binary patching and linking. /// /// Guide: /// ---------- /// - Base: A normal build generated using `cargo rustc`, intended for production use cases /// /// - Fat: A "fat" build with -Wl,-all_load and no_dead_strip, keeping *every* symbol in the binary. /// Intended for development for larger up-front builds with faster link times and the ability /// to binary patch the final binary. On WASM, this also forces wasm-bindgen to generate all /// JS-WASM bindings, saving us the need to re-wasmbindgen the final binary. /// /// - Thin: A "thin" build that dynamically links against the dependencies produced by the "fat" build. /// This is generated by calling rustc *directly* and might be more fragile to construct, but /// generates *much* faster than a regular base or fat build. #[allow(clippy::large_enum_variant)] #[derive(Clone, Debug, PartialEq)] pub enum BuildMode { /// A normal build generated using `cargo rustc` /// /// "run" indicates whether this build is intended to be run immediately after building. /// This means we try to capture the build environment, saving vars like `CARGO_MANIFEST_DIR` /// for the running executable. Base { run: bool }, /// A "Fat" build generated with cargo rustc and dx as a custom linker without -Wl,-dead-strip Fat, /// A "thin" build generated with `rustc` directly and dx as a custom linker Thin { /// List of changed files causing this rebuild. Mostly used for diagnostics changed_files: Vec, /// Which workspace crates had source file changes in this edit. changed_crates: Vec, /// The ASLR slide of the running program, used to hardcode symbol jumps aslr_reference: u64, /// The captured RustcArgs for every crate in the workspace, collected by RUSTC_WORKSPACE_WRAPPER /// This is used for replaying rustc invocations for workspace hotpatching workspace_rustc_args: HashMap, /// Cumulative set of all workspace crates modified since the fat build. modified_crates: HashSet, /// Cache of compiled objects from previous thin builds, used by future re-linking object_cache: ObjectCache, /// Cache of initial binary parsing which speeds up stub creation cache: Arc, }, } /// The end result of a build. /// /// Contains the final asset manifest, the executable, and metadata about the build. /// Note that the `exe` might be stale and/or overwritten by the time you read it! /// /// The patch cache is only populated on fat builds and then used for thin builds (see `BuildMode::Thin`). #[derive(Clone, Debug)] pub struct BuildArtifacts { pub(crate) root_dir: PathBuf, pub(crate) exe: PathBuf, pub(crate) workspace_rustc_args: HashMap, pub(crate) time_start: SystemTime, pub(crate) time_end: SystemTime, pub(crate) assets: AssetManifest, pub(crate) android_artifacts: Vec, pub(crate) swift_sources: Vec, pub(crate) mode: BuildMode, pub(crate) patch_cache: Option>, pub(crate) depinfo: RustcDepInfo, pub(crate) build_id: BuildId, pub(crate) object_cache: ObjectCache, } impl BuildRequest { /// Create a new build request. /// /// This method consolidates various inputs into a single source of truth. It combines: /// - Command-line arguments provided by the user. /// - The crate's `Cargo.toml`. /// - The `dioxus.toml` configuration file. /// - User-specific CLI settings. /// - The workspace metadata. /// - Host-specific details (e.g., Android tools, installed frameworks). /// - The intended target platform. /// /// Fields may be duplicated from the inputs to allow for autodetection and resolution. /// /// Autodetection is performed for unspecified fields where possible. /// /// Note: Build requests are typically created only when the CLI is invoked or when significant /// changes are detected in the `Cargo.toml` (e.g., features added or removed). pub(crate) async fn new(args: &TargetArgs, workspace: Arc) -> Result { let crate_package = workspace.find_main_package(args.package.clone())?; let target_kind = match args.example.is_some() { true => TargetKind::Example, false => TargetKind::Bin, }; let main_package = &workspace.krates[crate_package]; let target_name = args .example .clone() .or(args.bin.clone()) .or_else(|| { if let Some(default_run) = &main_package.default_run { return Some(default_run.to_string()); } let bin_count = main_package .targets .iter() .filter(|x| x.kind.contains(&target_kind)) .count(); if bin_count != 1 { return None; } main_package.targets.iter().find_map(|x| { if x.kind.contains(&target_kind) { Some(x.name.clone()) } else { None } }) }) .unwrap_or(workspace.krates[crate_package].name.clone()); // Use the main_target for the client + server build if it is set, otherwise use the target name for this // specific build. This is important for @client @server syntax so we use the client's output directory for the bundle. let main_target = args.client_target.clone().unwrap_or(target_name.clone()); let crate_target = main_package .targets .iter() .find(|target| { target_name == target.name.as_str() && target.kind.contains(&target_kind) }) .with_context(|| { let target_of_kind = |kind|-> String { let filtered_packages = main_package .targets .iter() .filter_map(|target| { target.kind.contains(kind).then_some(target.name.as_str()) }).collect::>(); filtered_packages.join(", ")}; if let Some(example) = &args.example { let examples = target_of_kind(&TargetKind::Example); format!("Failed to find example {example}. \nAvailable examples are:\n{examples}") } else if let Some(bin) = &args.bin { let binaries = target_of_kind(&TargetKind::Bin); format!("Failed to find binary {bin}. \nAvailable binaries are:\n{binaries}") } else { format!("Failed to find target {target_name}. \nIt looks like you are trying to build dioxus in a library crate. \ You either need to run dx from inside a binary crate or build a specific example with the `--example` flag. \ Available examples are:\n{}", target_of_kind(&TargetKind::Example)) } })? .clone(); // Load config from Dioxus.toml and/or inline config in the target's source file. // Inline config in doc comments takes precedence over Dioxus.toml. let config = workspace .load_dioxus_config(crate_package, Some(crate_target.src_path.as_std_path()))? .unwrap_or_default(); // We usually use the simulator unless --device is passed *or* a device is detected by probing. // For now, though, since we don't have probing, it just defaults to false // Tools like xcrun/adb can detect devices let device = args.device.clone(); let using_dioxus_explicitly = main_package .dependencies .iter() .any(|dep| dep.name == "dioxus"); /* Determine which features, triple, profile, etc to pass to the build. Most of the time, users should use `dx serve --` where the platform name directly corresponds to the feature in their cargo.toml. So, - `dx serve --web` will enable the `web` feature - `dx serve --mobile` will enable the `mobile` feature - `dx serve --desktop` will enable the `desktop` feature In this case, we set default-features to false and then add back the default features that aren't renderers, and then add the feature for the given renderer (ie web/desktop/mobile). We call this "no-default-features-stripped." There are a few cases where the user doesn't need to pass a platform. - they selected one via `dioxus = { features = ["web"] }` - they have a single platform in their default features `default = ["web"]` - there is only a single non-server renderer as a feature `web = ["dioxus/web"], server = ["dioxus/server"]` - they compose the super triple via triple + bundleformat + features Note that we only use the names of the features to correspond with the platform. Platforms are "super triples", meaning they contain information about - bundle format - target triple - how to serve - enabled features By default, the --platform presets correspond to: - web: bundle(web), triple(wasm32), serve(http-serve), features("web") - desktop: alias to mac/win/linux - mac: bundle(mac), triple(host), serve(appbundle-open), features("desktop") - windows: bundle(exefolder), triple(host), serve(run-exe), features("desktop") - linux: bundle(appimage), triple(host), serve(run-exe), features("desktop") - ios: bundle(ios), triple(arm64-apple-ios), serve(ios-simulator/xcrun), features("mobile") - android: bundle(android), triple(arm64-apple-ios), serve(android-emulator/adb), features("mobile") - server: bundle(server), triple(host), serve(run-exe), features("server") (and disables the client) - liveview: bundle(liveview), triple(host), serve(run-exe), features("liveview") - unknown: Fullstack usage is inferred from the presence of the fullstack feature or --fullstack. */ let mut features = args.features.clone(); let no_default_features = args.no_default_features; let all_features = args.all_features; let mut triple = args.target.clone(); let mut renderer = args.renderer; let mut bundle_format = args.bundle; let mut platform = args.platform; // the crate might be selecting renderers but the user also passes a renderer. this is weird // ie dioxus = { features = ["web"] } but also --platform desktop // anyways, we collect it here in the event we need it if platform is not specified. let dioxus_direct_renderer = Self::renderer_enabled_by_dioxus_dependency(main_package); let known_features_as_renderers = Self::features_that_enable_renderers(main_package); // The crate might enable multiple platforms or no platforms at // We collect all the platforms it enables first and then select based on the --platform arg let enabled_renderers = if no_default_features { vec![] } else { Self::enabled_cargo_toml_default_features_renderers(main_package) }; // Try the easy autodetects. // - if the user has `dioxus = { features = ["web"] }` // - if the `default =["web"]` or `default = ["dioxus/web"]` // - if there's only one non-server platform ie `web = ["dioxus/web"], server = ["dioxus/server"]` // Only do this if we're explicitly using dioxus if matches!(platform, Platform::Unknown) && using_dioxus_explicitly { let auto = dioxus_direct_renderer .or_else(|| { if enabled_renderers.len() == 1 { Some(enabled_renderers[0].clone()) } else { None } }) .or_else(|| { // If multiple renderers are enabled, pick the first non-server one if enabled_renderers.len() == 2 && enabled_renderers .iter() .any(|f| matches!(f.0, Renderer::Server)) { return Some( enabled_renderers .iter() .find(|f| !matches!(f.0, Renderer::Server)) .cloned() .unwrap(), ); } None }) .or_else(|| { // Pick the first non-server feature in the cargo.toml let non_server_features = known_features_as_renderers .iter() .filter(|f| f.1.as_str() != "server") .collect::>(); if non_server_features.len() == 1 { Some(non_server_features[0].clone()) } else { None } }); if let Some((direct, feature)) = auto { match direct { _ if feature == "mobile" || feature == "dioxus/mobile" => { bail!( "Could not autodetect mobile platform. Use --ios or --android instead." ); } Renderer::Webview | Renderer::Native => { if cfg!(target_os = "macos") { platform = Platform::MacOS; } else if cfg!(target_os = "linux") { platform = Platform::Linux; } else if cfg!(target_os = "windows") { platform = Platform::Windows; } } Renderer::Server => platform = Platform::Server, Renderer::Liveview => platform = Platform::Liveview, Renderer::Web => platform = Platform::Web, } renderer = renderer.or(Some(direct)); } } // Set the super triple from the platform if it's provided. // Otherwise, we attempt to guess it from the rest of their inputs. match platform { Platform::Unknown => {} Platform::Web => { if main_package.features.contains_key("web") && renderer.is_none() { features.push("web".into()); } renderer = renderer.or(Some(Renderer::Web)); bundle_format = bundle_format.or(Some(BundleFormat::Web)); triple = triple.or(Some("wasm32-unknown-unknown".parse()?)); } Platform::MacOS => { if main_package.features.contains_key("desktop") && renderer.is_none() { features.push("desktop".into()); } renderer = renderer.or(Some(Renderer::Webview)); bundle_format = bundle_format.or(Some(BundleFormat::MacOS)); triple = triple.or(Some(Triple::host())); } Platform::Windows => { if main_package.features.contains_key("desktop") && renderer.is_none() { features.push("desktop".into()); } renderer = renderer.or(Some(Renderer::Webview)); bundle_format = bundle_format.or(Some(BundleFormat::Windows)); triple = triple.or(Some(Triple::host())); } Platform::Linux => { if main_package.features.contains_key("desktop") && renderer.is_none() { features.push("desktop".into()); } renderer = renderer.or(Some(Renderer::Webview)); bundle_format = bundle_format.or(Some(BundleFormat::Linux)); triple = triple.or(Some(Triple::host())); } Platform::Ios => { if main_package.features.contains_key("mobile") && renderer.is_none() { features.push("mobile".into()); } renderer = renderer.or(Some(Renderer::Webview)); bundle_format = bundle_format.or(Some(BundleFormat::Ios)); match device.is_some() { // If targeting device, we want to build for the device which is always aarch64 true => triple = triple.or(Some("aarch64-apple-ios".parse()?)), // If the host is aarch64, we assume the user wants to build for iOS simulator false if matches!(Triple::host().architecture, Architecture::Aarch64(_)) => { triple = triple.or(Some("aarch64-apple-ios-sim".parse()?)) } // Otherwise, it's the x86_64 simulator, which is just x86_64-apple-ios _ => triple = triple.or(Some("x86_64-apple-ios".parse()?)), } } Platform::Android => { if main_package.features.contains_key("mobile") && renderer.is_none() { features.push("mobile".into()); } renderer = renderer.or(Some(Renderer::Webview)); bundle_format = bundle_format.or(Some(BundleFormat::Android)); // maybe probe adb? if let Some(_device_name) = device.as_ref() { if triple.is_none() { triple = Some( crate::get_android_tools() .context("Failed to get android tools")? .autodetect_android_device_triple() .await, ); } } else { triple = triple.or(Some({ match Triple::host().architecture { Architecture::X86_32(_) => "i686-linux-android".parse()?, Architecture::X86_64 => "x86_64-linux-android".parse()?, Architecture::Aarch64(_) => "aarch64-linux-android".parse()?, _ => "aarch64-linux-android".parse()?, } })); } } Platform::Server => { if main_package.features.contains_key("server") && renderer.is_none() { features.push("server".into()); } renderer = renderer.or(Some(Renderer::Server)); bundle_format = bundle_format.or(Some(BundleFormat::Server)); triple = triple.or(Some(Triple::host())); } Platform::Liveview => { if main_package.features.contains_key("liveview") && renderer.is_none() { features.push("liveview".into()); } renderer = renderer.or(Some(Renderer::Liveview)); bundle_format = bundle_format.or(Some(BundleFormat::Server)); triple = triple.or(Some(Triple::host())); } } // If default features are enabled, we need to add the default features // which don't enable a renderer if !no_default_features { features.extend(Self::rendererless_features(main_package)); features.dedup(); features.sort(); } // The triple will be the triple passed or the host if using dioxus. let triple = if using_dioxus_explicitly { triple.context("Could not automatically detect target triple")? } else { triple.unwrap_or(Triple::host()) }; // The bundle format will be the bundle format passed or the host. let bundle = if using_dioxus_explicitly { bundle_format.context("Could not automatically detect bundle format")? } else { bundle_format.unwrap_or(BundleFormat::host()) }; // Add any features required to turn on the client if let Some(renderer) = renderer { if let Some(feature) = Self::feature_for_platform_and_renderer(main_package, &triple, renderer) { features.push(feature); features.dedup(); } } // Set the profile of the build if it's not already set // This is mostly used for isolation of builds (preventing thrashing) but also useful to have multiple performance profiles // We might want to move some of these profiles into dioxus.toml and make them "virtual". let profile = match args.profile.clone() { Some(profile) => profile, None => bundle.profile_name(args.release), }; // Determine if we should codesign let should_codesign = args.codesign || device.is_some() || args.apple_entitlements.is_some(); // Determining release mode is based on the profile, actually, so we need to check that let release = workspace.is_release_profile(&profile); // Determine the --package we'll pass to cargo. // todo: I think this might be wrong - we don't want to use main_package necessarily... let package = args .package .clone() .unwrap_or_else(|| main_package.name.clone()); // Somethings we override are also present in the user's config. // If we can't get them by introspecting cargo, then we need to get them from the config // // This involves specifically two fields: // - The linker since we override it for Android and hotpatching // - RUSTFLAGS since we also override it for Android and hotpatching let cargo_config = cargo_config2::Config::load().unwrap(); let mut custom_linker = cargo_config.linker(triple.to_string()).ok().flatten(); let mut rustflags = cargo_config2::Flags::default(); // Make sure to take into account the RUSTFLAGS env var and the CARGO_TARGET__RUSTFLAGS for env in [ "RUSTFLAGS".to_string(), format!("CARGO_TARGET_{triple}_RUSTFLAGS"), ] { if let Ok(flags) = std::env::var(env) { rustflags .flags .extend(cargo_config2::Flags::from_space_separated(&flags).flags); } } // Use the user's linker if the specify it at the target level if let Ok(target) = cargo_config.target(triple.to_string()) { if let Some(flags) = target.rustflags { rustflags.flags.extend(flags.flags); } } // When we do android builds we need to make sure we link against the android libraries // We also `--export-dynamic` to make sure we can do shenanigans like `dlsym` the `main` symbol if matches!(bundle, BundleFormat::Android) { rustflags.flags.extend([ "-Clink-arg=-landroid".to_string(), "-Clink-arg=-llog".to_string(), "-Clink-arg=-lOpenSLES".to_string(), "-Clink-arg=-lc++abi".to_string(), "-Clink-arg=-Wl,--export-dynamic".to_string(), format!( "-Clink-arg=-Wl,--sysroot={}", workspace.android_tools()?.sysroot().display() ), ]); } // Make sure we set the sysroot for ios builds in the event the user doesn't have it set if matches!(bundle, BundleFormat::Ios) && matches!( triple.operating_system, target_lexicon::OperatingSystem::IOS(_) ) { let xcode_path = Workspace::get_xcode_path() .await .unwrap_or_else(|| "/Applications/Xcode.app".to_string().into()); let sysroot_location = match triple.environment { target_lexicon::Environment::Sim => xcode_path .join("Platforms/iPhoneSimulator.platform/Developer/SDKs/iPhoneSimulator.sdk"), _ => xcode_path.join("Platforms/iPhoneOS.platform/Developer/SDKs/iPhoneOS.sdk"), }; if sysroot_location.exists() && !rustflags.flags.iter().any(|f| f == "-isysroot") { rustflags.flags.extend([ "-Clink-arg=-isysroot".to_string(), format!("-Clink-arg={}", sysroot_location.display()), ]); } } // automatically set the getrandom backend for web builds if the user requested it if matches!(bundle, BundleFormat::Web) && args.wasm_js_cfg { rustflags.flags.extend( cargo_config2::Flags::from_space_separated(r#"--cfg getrandom_backend="wasm_js""#) .flags, ); } // If no custom linker is set, then android falls back to us as the linker if custom_linker.is_none() && bundle == BundleFormat::Android { let min_sdk_version = config.application.android_min_sdk_version.unwrap_or(28); custom_linker = Some( workspace .android_tools()? .android_cc(&triple, min_sdk_version), ); } let target_dir = std::env::var("CARGO_TARGET_DIR") .ok() .map(PathBuf::from) .or_else(|| cargo_config.build.target_dir.clone()) .unwrap_or_else(|| workspace.workspace_root().join("target")); // If the user provided a profile and wasm_split is enabled, we should check that LTO=true and debug=true if args.wasm_split { if let Some(profile_data) = workspace.cargo_toml.profile.custom.get(&profile) { use cargo_toml::{DebugSetting, LtoSetting}; if matches!(profile_data.lto, Some(LtoSetting::None) | None) { tracing::warn!("wasm-split requires LTO to be enabled in the profile. \ Please set `lto = true` in the `[profile.{profile}]` section of your Cargo.toml"); } if matches!(profile_data.debug, Some(DebugSetting::None) | None) { tracing::warn!("wasm-split requires debug symbols to be enabled in the profile. \ Please set `debug = true` in the `[profile.{profile}]` section of your Cargo.toml"); } } } #[allow(deprecated)] let session_cache_dir = args .session_cache_dir .clone() .unwrap_or_else(|| TempDir::new().unwrap().into_path()); let extra_rustc_args = shell_words::split(&args.rustc_args.clone().unwrap_or_default()) .context("Failed to parse rustc args")?; let extra_cargo_args = shell_words::split(&args.cargo_args.clone().unwrap_or_default()) .context("Failed to parse cargo args")?; tracing::debug!( r#"Target Info: • features: {features:?} • triple: {triple} • bundle format: {bundle:?} • session cache dir: {session_cache_dir:?} • linker: {custom_linker:?} • target_dir: {target_dir:?}"#, ); Ok(Self { features, bundle, // We hardcode passing `--no-default-features` to Cargo because dx manually enables // the default features we want. no_default_features: true, all_features, crate_package, crate_target, profile, triple, device_name: device, workspace, config, target_dir, custom_linker, extra_rustc_args, extra_cargo_args, release, package, main_target, rustflags, using_dioxus_explicitly, should_codesign, session_cache_dir, skip_assets: args.skip_assets, base_path: args.base_path.clone(), wasm_split: args.wasm_split, debug_symbols: args.debug_symbols, keep_names: args.keep_names, inject_loading_scripts: args.inject_loading_scripts, apple_entitlements: args.apple_entitlements.clone(), apple_team_id: args.apple_team_id.clone(), raw_json_diagnostics: args.raw_json_diagnostics, windows_subsystem: args.windows_subsystem.clone(), }) } pub(crate) async fn prebuild(&self, ctx: &BuildContext) -> Result<()> { // Create the session cache directory let cache_dir = self.session_cache_dir(); _ = std::fs::create_dir_all(&cache_dir); _ = std::fs::create_dir_all(self.rustc_wrapper_args_dir()); _ = std::fs::File::create_new(self.link_err_file()); _ = std::fs::File::create_new(self.link_args_file()); _ = std::fs::File::create_new(self.windows_command_file()); if !matches!(ctx.mode, BuildMode::Thin { .. }) { self.prepare_build_dir(ctx)?; } if !ctx.is_primary_build() { return Ok(()); } // Run the tailwind build before bundling anything else _ = crate::TailwindCli::run_once( self.package_manifest_dir(), self.config.application.tailwind_input.clone(), self.config.application.tailwind_output.clone(), ) .await; // We want to copy over the prebuilt OpenSSL binaries to ~/.dx/prebuilt/openssl- if self.bundle == BundleFormat::Android { AndroidTools::unpack_prebuilt_openssl()?; } Ok(()) } pub(crate) async fn build(&self, ctx: &BuildContext) -> Result { let time_start = SystemTime::now(); // If we forget to do this, then we won't get the linker args since rust skips the full build // We need to make sure to not react to this though, so the filemap must cache it _ = self.bust_fingerprint(ctx); // Run the cargo build to produce our artifacts. // For thin builds this also pre-compiles workspace dep crates before the tip. let mut artifacts = self.cargo_build(ctx).await?; // Write the build artifacts to the bundle on the disk match &ctx.mode { BuildMode::Thin { aslr_reference, cache, modified_crates, .. } => { self.write_patch(ctx, *aslr_reference, &mut artifacts, cache, modified_crates) .await?; } BuildMode::Base { .. } | BuildMode::Fat => { ctx.status_start_bundle(); self.strip_binary(&artifacts).await?; self.write_executable(ctx, &artifacts.exe, &mut artifacts.assets) .await .context("Failed to write executable")?; self.write_frameworks(ctx, &artifacts) .await .context("Failed to write frameworks")?; self.write_assets(ctx, &artifacts.assets) .await .context("Failed to write assets")?; self.write_metadata() .await .context("Failed to write metadata")?; // Install prebuilt Android plugin artifacts (AARs + Gradle deps) if self.bundle == BundleFormat::Android && !artifacts.android_artifacts.is_empty() { let names: Vec<_> = artifacts .android_artifacts .iter() .map(|a| a.plugin_name.as_str().to_string()) .collect(); ctx.status_compiling_native_plugins(format!( "Kotlin build: {}", names.join(", ") )); self.install_android_artifacts(&artifacts.android_artifacts) .context("Failed to install Android plugin artifacts")?; } if matches!(self.bundle, BundleFormat::Ios | BundleFormat::MacOS) && !artifacts.swift_sources.is_empty() { let names: Vec<_> = artifacts .swift_sources .iter() .map(|s| s.plugin_name.as_str().to_string()) .collect(); ctx.status_compiling_native_plugins(format!( "Swift build: {}", names.join(", ") )); // Compile Swift packages from source self.compile_swift_sources(&artifacts.swift_sources) .await .context("Failed to compile Swift packages")?; // Then embed Swift standard libraries self.embed_swift_stdlibs(&artifacts.swift_sources) .await .context("Failed to embed Swift standard libraries")?; } // Compile and install Apple Widget Extensions from Dioxus.toml config if matches!(self.bundle, BundleFormat::Ios | BundleFormat::MacOS) && !self.config.ios.widget_extensions.is_empty() { let names: Vec<_> = self .config .ios .widget_extensions .iter() .map(|w| w.display_name.clone()) .collect(); ctx.status_compiling_native_plugins(format!( "Widget build: {}", names.join(", ") )); self.compile_widget_extensions() .await .context("Failed to compile widget extensions")?; } self.optimize(ctx) .await .context("Failed to optimize build")?; self.assemble(ctx) .await .context("Failed to assemble build")?; // Populate the patch cache if we're in fat mode if matches!(ctx.mode, BuildMode::Fat) { artifacts.patch_cache = Some(Arc::new(self.create_patch_cache(&artifacts.exe)?)); } tracing::debug!("Bundle created at {}", self.root_dir().display()); } } // Record the build duration as a telemetry event self.record_build_duration(time_start, ctx); Ok(artifacts) } /// Run the cargo build by assembling the build command and executing it. /// /// This method needs to be very careful with processing output since errors being swallowed will /// be very confusing to the user. async fn cargo_build(&self, ctx: &BuildContext) -> Result { let time_start = SystemTime::now(); // For thin builds, compile workspace dep crates before the tip. // This updates dep rlibs on disk so cargo links the tip against fresh code. let object_cache = self.compile_workspace_deps(ctx).await?; // Extract the unit count of the crate graph so build_cargo has more accurate data // "Thin" builds only build the final exe, so we only need to build one crate let crate_count = match ctx.mode { BuildMode::Thin { .. } => 1, _ => self.get_unit_count_estimate(&ctx.mode).await, }; // Update the status to show that we're starting the build and how many crates we expect to build ctx.status_starting_build(crate_count); let mut cmd = self.build_command(&ctx.mode)?; tracing::debug!(dx_src = ?TraceSrc::Build, "Executing cargo for {} using {}", self.bundle, self.triple); let mut child = cmd .stdout(Stdio::piped()) .stderr(Stdio::piped()) .spawn() .context("Failed to spawn cargo build")?; let stdout = tokio::io::BufReader::new(child.stdout.take().unwrap()); let stderr = tokio::io::BufReader::new(child.stderr.take().unwrap()); let mut output_location: Option = None; let mut stdout = stdout.lines(); let mut stderr = stderr.lines(); let mut units_compiled = 0; let mut emitting_error = false; loop { use cargo_metadata::Message; let line = tokio::select! { Ok(Some(line)) = stdout.next_line() => line, Ok(Some(line)) = stderr.next_line() => line, else => break, }; // If raw JSON diagnostics are requested, relay the line directly if self.raw_json_diagnostics { println!("{}", line); } let Some(Ok(message)) = Message::parse_stream(std::io::Cursor::new(line)).next() else { continue; }; match message { Message::BuildScriptExecuted(_) => units_compiled += 1, Message::CompilerMessage(msg) => ctx.status_build_diagnostic(msg.message), Message::TextLine(line) => { // Handle the case where we're getting lines directly from rustc. // These are in a different format than the normal cargo output, though I imagine // this parsing code is quite fragile/sensitive to changes in cargo, cargo_metadata, rustc, etc. #[derive(Deserialize)] struct RustcArtifact { artifact: PathBuf, emit: String, } // These outputs look something like: // // { "artifact":"target/debug/deps/libdioxus_core-4f2a0b3c1e5f8b7c.rlib", "emit":"link" } // // There are other outputs like depinfo that we might be interested in the future. if let Ok(artifact) = serde_json::from_str::(&line) { if artifact.emit == "link" { output_location = Some(artifact.artifact); } } // Handle direct rustc diagnostics if let Ok(diag) = serde_json::from_str::(&line) { ctx.status_build_diagnostic(diag); } // For whatever reason, if there's an error while building, we still receive the TextLine // instead of an "error" message. However, the following messages *also* tend to // be the error message, and don't start with "error:". So we'll check if we've already // emitted an error message and if so, we'll emit all following messages as errors too. // // todo: This can lead to some really ugly output though, so we might want to look // into a more reliable way to detect errors propagating out of the compiler. If // we always wrapped rustc, then we could store this data somewhere in a much more // reliable format. if line.trim_start().starts_with("error:") { emitting_error = true; } // Note that previous text lines might have set emitting_error to true match emitting_error { true => ctx.status_build_error(line), false => ctx.status_build_message(line), } } Message::CompilerArtifact(artifact) => { units_compiled += 1; ctx.status_build_progress(units_compiled, crate_count, artifact.target.name); output_location = artifact.executable.map(Into::into); } // todo: this can occasionally swallow errors, so we should figure out what exactly is going wrong // since that is a really bad user experience. Message::BuildFinished(finished) => { if !finished.success { bail!( "cargo build finished with errors for target: {} [{}]", self.main_target, self.triple ); } } _ => {} } } // Load per-crate rustc args from the wrapper directory. // Each workspace crate compiled through the wrapper has its own JSON file: // "{crate_name}.lib.json" (key: "{crate_name}.lib") for lib targets and // "{crate_name}.bin.json" (key: "{crate_name}.bin") for bin targets. let mut workspace_rustc_args = HashMap::new(); let args_dir = self.rustc_wrapper_args_dir(); if let Ok(entries) = std::fs::read_dir(&args_dir) { for entry in entries.flatten() { let path = entry.path(); if path.extension().is_some_and(|e| e == "json") { if let Ok(contents) = std::fs::read_to_string(&path) { if let Ok(args) = serde_json::from_str::(&contents) { if let Some(stem) = path.file_stem().and_then(|s| s.to_str()) { workspace_rustc_args.insert(stem.to_string(), args); } } } } } } tracing::trace!( "Loaded workspace rustc args from {}: keys={:?}", args_dir.display(), workspace_rustc_args.keys().collect::>(), ); // If there's any warnings from the linker, we should print them out if let Ok(linker_warnings) = std::fs::read_to_string(self.link_err_file()) { if !linker_warnings.is_empty() { if output_location.is_none() { tracing::error!("Linker warnings: {}", linker_warnings); } else { tracing::debug!("Linker warnings: {}", linker_warnings); } } } // Collect the linker args and attach them to the tip crate's bin entry let tip_crate_name = self.tip_crate_name(); let tip_bin_key = format!("{tip_crate_name}.bin"); if let Some(tip_args) = workspace_rustc_args.get_mut(&tip_bin_key) { tip_args.link_args = std::fs::read_to_string(self.link_args_file()) .context("Failed to read link args from file")? .lines() .map(|s| s.to_string()) .collect(); } let exe = output_location.context("Cargo build failed - no output location. Toggle tracing mode (press `t`) for more information.")?; // Fat builds need to be linked with the fat linker. Would also like to link here for thin builds if matches!(ctx.mode, BuildMode::Fat) { ctx.status_starting_link(); let link_start = SystemTime::now(); self.run_fat_link( &exe, &workspace_rustc_args .get(&tip_bin_key) .cloned() .unwrap_or_default(), ) .await?; tracing::debug!( "Fat linking completed in {}us", SystemTime::now() .duration_since(link_start) .unwrap() .as_micros() ); } // Extract all linker metadata (assets, Android/iOS plugins, widget extensions) in a single pass. let (assets, android_artifacts, swift_sources) = self.collect_assets_and_metadata(&exe, ctx).await?; let time_end = SystemTime::now(); let mode = ctx.mode.clone(); let depinfo = RustcDepInfo::from_file(&exe.with_extension("d")).unwrap_or_default(); tracing::debug!( "Build completed successfully in {}us: {:?}", time_end.duration_since(time_start).unwrap().as_micros(), exe ); Ok(BuildArtifacts { time_end, exe, workspace_rustc_args, time_start, assets, android_artifacts, swift_sources, mode, depinfo, root_dir: self.root_dir(), patch_cache: None, build_id: ctx.build_id, object_cache, }) } /// For thin builds, compile workspace dep crates BEFORE the tip crate. /// /// This updates dep rlibs on disk so cargo links the tip against fresh code. Handles cascade /// (recompiling workspace dependents for SVH consistency) and lib+bin tip targets. /// /// Returns the updated `ObjectCache` — defaulting to empty for non-thin builds. async fn compile_workspace_deps(&self, ctx: &BuildContext) -> Result { let BuildMode::Thin { workspace_rustc_args, changed_crates, object_cache, .. } = &ctx.mode else { return Ok(ObjectCache::new(&self.session_cache_dir())); }; let tip_name = self.tip_crate_name(); let mut object_cache = object_cache.clone(); // Compile workspace dep crates with cascade. Start with the explicitly changed dep // crates (already in leaf-first order from handle_file_change). As we compile each, // add the crate's workspace dependents so their rlibs have consistent SVH references. let mut crates_to_compile: Vec = changed_crates .iter() .filter(|c| *c != &tip_name) .cloned() .collect(); let mut compiled = HashSet::new(); let mut idx = 0; while idx < crates_to_compile.len() { let crate_name = crates_to_compile[idx].clone(); idx += 1; if !compiled.insert(crate_name.clone()) || crate_name == tip_name { continue; } let Some(rustc_args) = workspace_rustc_args.get(&format!("{crate_name}.lib")) else { tracing::warn!("No captured rustc args for workspace crate {crate_name}, skipping"); continue; }; tracing::debug!("Compiling workspace dep crate: {crate_name}"); self.compile_dep_crate(&crate_name, rustc_args) .await .with_context(|| format!("Failed to compile workspace dep crate '{crate_name}'"))?; if let Some(rlib_path) = self.find_rlib_for_crate(&crate_name, rustc_args) { if let Err(e) = object_cache.cache_from_rlib(&crate_name, &rlib_path) { tracing::warn!("Failed to cache objects from rlib for {crate_name}: {e}"); } } for dependent in self.workspace_dependents_of(&crate_name) { if dependent != tip_name && !compiled.contains(&dependent) { tracing::debug!( "Cascade: recompiling {dependent} (depends on recompiled {crate_name})" ); crates_to_compile.push(dependent); } } } // If the tip crate has a lib target (src/lib.rs + src/main.rs), compile it // before the bin target so the bin links against the fresh lib rlib. let lib_key = format!("{tip_name}.lib"); if let Some(lib_args) = workspace_rustc_args.get(&lib_key) { let rlib_pre = self.find_rlib_for_crate(&tip_name, lib_args); let pre_modified = rlib_pre .as_ref() .and_then(|p| std::fs::metadata(p).ok()) .and_then(|m| m.modified().ok()); tracing::info!("Compiling tip lib target: {lib_key}"); if let Err(e) = self.compile_dep_crate(&tip_name, lib_args).await { tracing::warn!("Failed to compile tip lib target: {e}"); } else if let Some(rlib_path) = self.find_rlib_for_crate(&tip_name, lib_args) { let post_modified = std::fs::metadata(&rlib_path) .ok() .and_then(|m| m.modified().ok()); let rlib_changed = match (pre_modified, post_modified) { (Some(pre), Some(post)) => post > pre, _ => true, }; tracing::info!( "Found lib rlib at: {} (modified={})", rlib_path.display(), rlib_changed, ); match object_cache.cache_from_rlib(&lib_key, &rlib_path) { Ok(()) => { let count = object_cache.get(&lib_key).map(|v| v.len()).unwrap_or(0); tracing::info!("Cached {count} objects from tip lib rlib"); } Err(e) => tracing::warn!("Failed to cache tip lib objects: {e}"), } } else { tracing::warn!("Could not find rlib for tip lib target {tip_name}"); } } else { tracing::debug!( "No lib target for tip crate (key '{lib_key}' not in workspace_rustc_args, keys={:?})", workspace_rustc_args.keys().collect::>() ); } Ok(object_cache) } /// Collect assets and plugin metadata from the final executable in one pass /// /// This method extracts assets and FFI plugin metadata (Android/Swift) from the /// binary. Permissions are now read from Dioxus.toml, not extracted from the binary. async fn collect_assets_and_metadata( &self, exe: &Path, ctx: &BuildContext, ) -> Result<( AssetManifest, Vec, Vec, )> { use super::assets::extract_symbols_from_file; let skip_assets = self.skip_assets; let needs_android_artifacts = self.bundle == BundleFormat::Android; let needs_swift_packages = matches!(self.bundle, BundleFormat::Ios | BundleFormat::MacOS); if skip_assets && !needs_android_artifacts && !needs_swift_packages { return Ok((AssetManifest::default(), Vec::new(), Vec::new())); } ctx.status_extracting_assets(); let super::assets::SymbolExtractionResult { assets: extracted_assets, android_artifacts, swift_packages, } = extract_symbols_from_file(exe).await?; let asset_manifest = if skip_assets { AssetManifest::default() } else { let mut manifest = AssetManifest::default(); for asset in extracted_assets { manifest.insert_asset(asset); } if matches!(self.bundle, BundleFormat::Web) && matches!(ctx.mode, BuildMode::Base { .. } | BuildMode::Fat) { if let Some(dir) = self.user_public_dir() { for entry in walkdir::WalkDir::new(&dir) .into_iter() .filter_map(|e| e.ok()) .filter(|e| e.file_type().is_file()) { let from = entry.path().to_path_buf(); let relative_path = from.strip_prefix(&dir).unwrap(); let to = format!("../{}", relative_path.display()); manifest.insert_asset(BundledAsset::new( from.to_string_lossy().as_ref(), to.as_str(), manganis_core::AssetOptions::builder() .with_hash_suffix(false) .into_asset_options(), )); } } } manifest }; if !android_artifacts.is_empty() { tracing::debug!( "Found {} Android artifact declaration(s)", android_artifacts.len() ); for artifact in android_artifacts.iter() { tracing::debug!( " Plugin: {} Artifact: {}", artifact.plugin_name.as_str(), artifact.artifact_path.as_str() ); } } if !swift_packages.is_empty() { tracing::debug!( "Found {} Swift package declaration(s) for {:?}", swift_packages.len(), self.bundle ); for source in &swift_packages { tracing::debug!( " Plugin: {} (Swift package path={} product={})", source.plugin_name.as_str(), source.package_path.as_str(), source.product.as_str() ); } } Ok((asset_manifest, android_artifacts, swift_packages)) } /// Install Android plugin artifacts by bundling source folders as Gradle submodules. /// /// This function handles both prebuilt AARs and source folders: /// - If `artifact_path` is a file (ends in .aar), copy it to libs/ and add file dependency /// - If `artifact_path` is a directory, copy it as a Gradle submodule and add project dependency /// /// All sources are bundled first, then a single Gradle build compiles everything in `assemble()`. fn install_android_artifacts( &self, android_artifacts: &[AndroidArtifactMetadata], ) -> Result<()> { let libs_dir = self.root_dir().join("app").join("libs"); std::fs::create_dir_all(&libs_dir)?; let plugins_dir = self.root_dir().join("plugins"); let build_gradle = self.root_dir().join("app").join("build.gradle.kts"); let settings_gradle = self.root_dir().join("settings.gradle"); for artifact in android_artifacts { let artifact_path = PathBuf::from(artifact.artifact_path.as_str()); let plugin_name = artifact.plugin_name.as_str(); if artifact_path.is_dir() { // It's a source folder - copy it as a Gradle submodule tracing::debug!( "Bundling Android plugin '{}' from source: {}", plugin_name, artifact_path.display() ); // Create module directory let module_dir = plugins_dir.join(plugin_name); self.copy_build_dir_recursive(&artifact_path, &module_dir)?; // Strip version specifiers from build.gradle.kts to avoid conflicts with parent project self.strip_gradle_plugin_versions(&module_dir)?; // Add to settings.gradle self.ensure_settings_gradle_include(&settings_gradle, plugin_name)?; // Add project dependency to app/build.gradle.kts let dep_line = format!("implementation(project(\":plugins:{}\"))", plugin_name); self.ensure_gradle_dependency(&build_gradle, &dep_line)?; tracing::debug!( "Added Android plugin module :plugins:{} from {}", plugin_name, artifact_path.display() ); } else if artifact_path.extension().is_some_and(|ext| ext == "aar") { // It's a prebuilt AAR - copy directly to libs if !artifact_path.exists() { anyhow::bail!( "Android plugin artifact not found: {}", artifact_path.display() ); } let filename = artifact_path .file_name() .ok_or_else(|| { anyhow::anyhow!( "Android plugin artifact path has no filename: {}", artifact_path.display() ) })? .to_owned(); let dest_file = libs_dir.join(&filename); std::fs::copy(&artifact_path, &dest_file)?; tracing::debug!( "Copied Android artifact {} -> {}", artifact_path.display(), dest_file.display() ); let dep_line = format!( "implementation(files(\"libs/{}\"))", filename.to_string_lossy() ); self.ensure_gradle_dependency(&build_gradle, &dep_line)?; } else { anyhow::bail!( "Android artifact path is neither a directory nor an AAR file: {}", artifact_path.display() ); } // Add any extra Gradle dependencies specified by the plugin for dependency in artifact .gradle_dependencies .as_str() .lines() .map(str::trim) .filter(|line| !line.is_empty()) { self.ensure_gradle_dependency(&build_gradle, dependency)?; } } Ok(()) } /// Recursively copy a directory and its contents. #[allow(clippy::only_used_in_recursion)] fn copy_build_dir_recursive(&self, src: &Path, dst: &Path) -> Result<()> { std::fs::create_dir_all(dst)?; for entry in std::fs::read_dir(src)? { let entry = entry?; let src_path = entry.path(); let dst_path = dst.join(entry.file_name()); if src_path.is_dir() { // Skip build directories and hidden folders let name = entry.file_name(); let name_str = name.to_string_lossy(); if name_str == "build" || name_str == ".gradle" || name_str.starts_with('.') { continue; } self.copy_build_dir_recursive(&src_path, &dst_path)?; } else { std::fs::copy(&src_path, &dst_path)?; } } Ok(()) } /// Strip version specifiers from build.gradle.kts plugins block. /// /// When a plugin module is included as a subproject, having version specifiers in the /// plugins block causes conflicts because the parent project already has the plugins /// on the classpath. This function removes version specifications like: /// - `version "8.4.2"` or `version "1.9.24"` /// - Entire version calls from plugin declarations fn strip_gradle_plugin_versions(&self, module_dir: &Path) -> Result<()> { use std::fs; let build_gradle = module_dir.join("build.gradle.kts"); if !build_gradle.exists() { return Ok(()); } let contents = fs::read_to_string(&build_gradle)?; // Remove version specifications from plugin declarations // Matches: id("com.android.library") version "8.4.2" -> id("com.android.library") // Matches: kotlin("android") version "1.9.24" -> kotlin("android") let version_pattern = regex::Regex::new(r#"\s+version\s+"[^"]+""#).expect("Invalid regex"); let cleaned = version_pattern.replace_all(&contents, ""); if cleaned != contents { fs::write(&build_gradle, cleaned.as_ref())?; tracing::debug!( "Stripped version specifiers from {}", build_gradle.display() ); } Ok(()) } /// Add a module include to settings.gradle if not already present. fn ensure_settings_gradle_include( &self, settings_gradle: &Path, plugin_name: &str, ) -> Result<()> { use std::fs; let include_line = format!("include ':plugins:{}'", plugin_name); let mut contents = fs::read_to_string(settings_gradle)?; if contents.contains(&include_line) { return Ok(()); } // Add the include at the end contents.push_str(&format!("\n{}\n", include_line)); fs::write(settings_gradle, contents)?; Ok(()) } /// Bundle and compile Swift packages from source into dynamic frameworks. /// /// This function: /// 1. Calls ios_swift::compile_swift_sources to compile Swift packages /// 2. The function creates proper .framework bundles from the dylibs /// 3. Installs the frameworks to the app's Frameworks folder async fn compile_swift_sources(&self, swift_sources: &[SwiftPackageMetadata]) -> Result<()> { if swift_sources.is_empty() { return Ok(()); } let build_dir = self.target_dir.join("swift-build"); std::fs::create_dir_all(&build_dir)?; // Compile Swift sources and get the framework bundle path let framework_path = super::ios_swift::compile_swift_sources( swift_sources, &self.triple, &build_dir, self.release, ) .await?; // If a framework was created, install it to the Frameworks folder if let Some(framework) = framework_path { self.install_swift_framework(&framework).await?; } Ok(()) } /// Install a Swift framework bundle into the app's Frameworks directory. async fn install_swift_framework(&self, framework_path: &Path) -> Result<()> { let frameworks_dir = self.frameworks_folder(); std::fs::create_dir_all(&frameworks_dir)?; let framework_name = framework_path .file_name() .ok_or_else(|| anyhow::anyhow!("Invalid framework path: no filename"))?; let dest = frameworks_dir.join(framework_name); // Remove existing framework if present if dest.exists() { std::fs::remove_dir_all(&dest)?; } // Copy the entire framework bundle self.copy_build_dir_recursive(framework_path, &dest)?; tracing::debug!( "Installed Swift framework '{}' to {}", framework_name.to_string_lossy(), frameworks_dir.display() ); Ok(()) } /// Embed Swift standard libraries into the app bundle when Swift plugins are present. async fn embed_swift_stdlibs(&self, swift_sources: &[SwiftPackageMetadata]) -> Result<()> { if swift_sources.is_empty() { return Ok(()); } let platform_flag = match self.bundle { BundleFormat::Ios => { let triple_str = self.triple.to_string(); if triple_str.contains("sim") || triple_str.contains("x86_64") { "iphonesimulator" } else { "iphoneos" } } BundleFormat::MacOS => "macosx", _ => return Ok(()), }; let frameworks_dir = self.frameworks_folder(); std::fs::create_dir_all(&frameworks_dir)?; let exe_path = self.main_exe(); if !exe_path.exists() { anyhow::bail!( "Expected executable at {} when embedding Swift stdlibs", exe_path.display() ); } // Use swift-stdlib-tool to copy Swift runtime libraries needed by: // 1. The main executable (--scan-executable) // 2. Any Swift frameworks in the Frameworks folder (--scan-folder) let output = Command::new("xcrun") .arg("swift-stdlib-tool") .arg("--copy") .arg("--platform") .arg(platform_flag) .arg("--scan-executable") .arg(&exe_path) .arg("--scan-folder") .arg(&frameworks_dir) .arg("--destination") .arg(&frameworks_dir) .output() .await?; if !output.status.success() { let stderr = String::from_utf8_lossy(&output.stderr); let stdout = String::from_utf8_lossy(&output.stdout); anyhow::bail!( "swift-stdlib-tool failed: {}{}", stderr.trim(), if stdout.trim().is_empty() { "".to_string() } else { format!(" | {}", stdout.trim()) } ); } Ok(()) } /// Compile and install Apple Widget Extensions from Dioxus.toml config. /// /// This processes widget extensions declared in `[[ios.widget_extensions]]` by: /// 1. Compiling the Swift package as a Widget Extension executable /// 2. Creating the .appex bundle structure with Info.plist /// 3. Installing to the app's PlugIns folder async fn compile_widget_extensions(&self) -> Result<()> { let widget_configs = &self.config.ios.widget_extensions; if widget_configs.is_empty() { return Ok(()); } tracing::debug!( "Compiling {} Apple Widget Extension(s)", widget_configs.len() ); let build_dir = self.target_dir.join("widget-build"); std::fs::create_dir_all(&build_dir)?; let app_bundle_id = self.bundle_identifier(); let default_deployment_target = self .config .ios .deployment_target .as_deref() .unwrap_or("16.0"); let plugins_dir = self.plugins_folder(); std::fs::create_dir_all(&plugins_dir)?; for widget_config in widget_configs { let source_path = self.package_manifest_dir().join(&widget_config.source); let deployment_target = widget_config .deployment_target .as_deref() .unwrap_or(default_deployment_target); let widget_source = super::ios_swift::AppleWidgetSource { source_path, display_name: widget_config.display_name.clone(), bundle_id_suffix: widget_config.bundle_id_suffix.clone(), deployment_target: deployment_target.to_string(), module_name: widget_config.module_name.clone(), }; let appex_path = super::ios_swift::compile_apple_widget( &widget_source, &self.triple, &build_dir, &app_bundle_id, self.release, ) .await .with_context(|| { format!( "Failed to compile widget extension '{}'", widget_source.display_name ) })?; // Install the .appex bundle to PlugIns/ let appex_name = appex_path .file_name() .map(|n| n.to_string_lossy().to_string()) .unwrap_or_else(|| "Widget.appex".to_string()); let dest_path = plugins_dir.join(&appex_name); if dest_path.exists() { std::fs::remove_dir_all(&dest_path)?; } self.copy_build_dir_recursive(&appex_path, &dest_path)?; tracing::debug!( "Installed widget extension '{}' to {}", widget_source.display_name, dest_path.display() ); } Ok(()) } /// Take the output of rustc and make it into the main exe of the bundle /// /// For wasm, we'll want to run `wasm-bindgen` to make it a wasm binary along with some other optimizations /// Other platforms we might do some stripping or other optimizations /// Move the executable to the workdir async fn write_executable( &self, ctx: &BuildContext, exe: &Path, assets: &mut AssetManifest, ) -> Result<()> { match self.bundle { // Run wasm-bindgen on the wasm binary and set its output to be in the bundle folder // Also run wasm-opt on the wasm binary, and sets the index.html since that's also the "executable". // // The wasm stuff will be in a folder called "wasm" in the workdir. // // Final output format: // ``` // dx/ // app/ // web/ // bundle/ // build/ // server.exe // public/ // index.html // wasm/ // app.wasm // glue.js // snippets/ // ... // assets/ // logo.png // ``` BundleFormat::Web => { self.bundle_web(ctx, exe, assets).await?; } // this will require some extra oomf to get the multi architecture builds... // for now, we just copy the exe into the current arch (which, sorry, is hardcoded for my m1) // we'll want to do multi-arch builds in the future, so there won't be *one* exe dir to worry about // eventually `exe_dir` and `main_exe` will need to take in an arch and return the right exe path // // todo(jon): maybe just symlink this rather than copy it? // we might want to eventually use the objcopy logic to handle this // // https://github.com/rust-mobile/xbuild/blob/master/xbuild/template/lib.rs // https://github.com/rust-mobile/xbuild/blob/master/apk/src/lib.rs#L19 // // These are all super simple, just copy the exe into the folder // eventually, perhaps, maybe strip + encrypt the exe? BundleFormat::Android | BundleFormat::MacOS | BundleFormat::Windows | BundleFormat::Linux | BundleFormat::Ios | BundleFormat::Server => { std::fs::create_dir_all(self.exe_dir())?; std::fs::copy(exe, self.main_exe())?; } } Ok(()) } async fn write_frameworks( &self, _ctx: &BuildContext, artifacts: &BuildArtifacts, ) -> Result<()> { let framework_dir = self.frameworks_folder(); // We use the rustc for the tip crate `main.rs` because that's where the linking happens let direct_rustc = artifacts .workspace_rustc_args .get(&format!("{}.bin", self.tip_crate_name())) .cloned() .unwrap_or_default(); // We have some prebuilt stuff that needs to be copied into the framework dir let openssl_dir = AndroidTools::openssl_lib_dir(&self.triple); let openssl_dir_disp = openssl_dir.display().to_string(); for arg in &direct_rustc.link_args { // todo - how do we handle windows dlls? we don't want to bundle the system dlls // for now, we don't do anything with dlls, and only use .dylibs and .so files // Write dylibs and dlls to the frameworks folder if arg.ends_with(".dylib") | arg.ends_with(".so") { let from = PathBuf::from(arg); let to = framework_dir.join(from.file_name().unwrap()); _ = std::fs::remove_file(&to); tracing::debug!("Copying framework from {from:?} to {to:?}"); _ = std::fs::create_dir_all(&framework_dir); // in dev and on normal oses, we want to symlink the file // otherwise, just copy it (since in release you want to distribute the framework) if cfg!(any(windows, unix)) && !self.release { #[cfg(windows)] std::os::windows::fs::symlink_file(from, to).with_context(|| { "Failed to symlink framework into bundle: {from:?} -> {to:?}" })?; #[cfg(unix)] std::os::unix::fs::symlink(from, to).with_context(|| { "Failed to symlink framework into bundle: {from:?} -> {to:?}" })?; } else { std::fs::copy(from, to)?; } } // Always create the framework dir for android if self.bundle == BundleFormat::Android { _ = std::fs::create_dir_all(&framework_dir); } // On android, the c++_shared flag means we need to copy the libc++_shared.so precompiled // library to the jniLibs folder if self.bundle == BundleFormat::Android && arg.contains("-lc++_shared") { std::fs::copy( self.workspace.android_tools()?.libcpp_shared(&self.triple), framework_dir.join("libc++_shared.so"), ) .with_context(|| "Failed to copy libc++_shared.so into bundle")?; } // Copy over libssl and libcrypto if they are present in the link args if self.bundle == BundleFormat::Android && arg.contains(openssl_dir_disp.as_str()) { let libssl_source = openssl_dir.join("libssl.so"); let libcrypto_source = openssl_dir.join("libcrypto.so"); let libssl_target = framework_dir.join("libssl.so"); let libcrypto_target = framework_dir.join("libcrypto.so"); std::fs::copy(&libssl_source, &libssl_target).with_context(|| { format!("Failed to copy libssl.so into bundle\nfrom {libssl_source:?}\nto {libssl_target:?}") })?; std::fs::copy(&libcrypto_source, &libcrypto_target).with_context( || format!("Failed to copy libcrypto.so into bundle\nfrom {libcrypto_source:?}\nto {libcrypto_target:?}"), )?; } } Ok(()) } fn frameworks_folder(&self) -> PathBuf { match self.triple.operating_system { OperatingSystem::Darwin(_) | OperatingSystem::MacOSX(_) => { self.root_dir().join("Contents").join("Frameworks") } OperatingSystem::IOS(_) => self.root_dir().join("Frameworks"), OperatingSystem::Linux if self.bundle == BundleFormat::Android => { let arch = match self.triple.architecture { Architecture::Aarch64(_) => "arm64-v8a", Architecture::Arm(_) => "armeabi-v7a", Architecture::X86_32(_) => "x86", Architecture::X86_64 => "x86_64", _ => panic!( "Unsupported architecture for Android: {:?}", self.triple.architecture ), }; self.root_dir() .join("app") .join("src") .join("main") .join("jniLibs") .join(arch) } OperatingSystem::Linux | OperatingSystem::Windows => self.root_dir(), _ => self.root_dir(), } } /// Get the folder where Apple Widget Extensions (.appex bundles) are installed. /// This is only applicable to iOS and macOS bundles. fn plugins_folder(&self) -> PathBuf { match self.triple.operating_system { OperatingSystem::Darwin(_) | OperatingSystem::MacOSX(_) => { self.root_dir().join("Contents").join("PlugIns") } OperatingSystem::IOS(_) => self.root_dir().join("PlugIns"), _ => self.root_dir().join("PlugIns"), } } /// Copy the assets out of the manifest and into the target location /// /// Should be the same on all platforms - just copy over the assets from the manifest into the output directory async fn write_assets(&self, ctx: &BuildContext, assets: &AssetManifest) -> Result<()> { // Server doesn't need assets - web will provide them if !ctx.is_primary_build() { return Ok(()); } let asset_dir = self.asset_dir(); // First, clear the asset dir of any files that don't exist in the new manifest _ = std::fs::create_dir_all(&asset_dir); // Create a set of all the paths that new files will be bundled to let mut keep_bundled_output_paths: HashSet<_> = assets .unique_assets() .map(|a| asset_dir.join(a.bundled_path())) .collect(); // The CLI creates a .manifest.json file in the asset dir to keep track of the assets and // other build metadata. If we can't parse this file (or the CLI version changed), then we // want to re-copy all the assets rather than trying to do an incremental update. let clear_cache = self .load_manifest() .map(|manifest| manifest.cli_version != crate::VERSION.as_str()) .unwrap_or(true); if clear_cache { keep_bundled_output_paths.clear(); } tracing::trace!( "Keeping bundled output paths: {:#?}", keep_bundled_output_paths ); // todo(jon): we also want to eventually include options for each asset's optimization and compression, which we currently aren't let mut assets_to_transfer = vec![]; // Queue the bundled assets (skip sidecar assets that require special processing) for bundled in assets.unique_assets() { let from = PathBuf::from(bundled.absolute_source_path()); let to = asset_dir.join(bundled.bundled_path()); // prefer to log using a shorter path relative to the workspace dir by trimming the workspace dir let from_ = from .strip_prefix(self.workspace_dir()) .unwrap_or(from.as_path()); let to_ = from .strip_prefix(self.workspace_dir()) .unwrap_or(to.as_path()); tracing::debug!("Copying asset {from_:?} to {to_:?}"); assets_to_transfer.push((from, to, *bundled.options())); } let asset_count = assets_to_transfer.len(); let started_processing = AtomicUsize::new(0); let copied = AtomicUsize::new(0); // Parallel Copy over the assets and keep track of progress with an atomic counter let progress = ctx.tx.clone(); let ws_dir = self.workspace_dir(); // Optimizing assets is expensive and blocking, so we do it in a tokio spawn blocking task tokio::task::spawn_blocking(move || { assets_to_transfer .par_iter() .try_for_each(|(from, to, options)| { let processing = started_processing.fetch_add(1, Ordering::SeqCst); let from_ = from.strip_prefix(&ws_dir).unwrap_or(from); tracing::trace!( "Starting asset copy {processing}/{asset_count} from {from_:?}" ); let res = process_file_to(options, from, to); if let Err(err) = res.as_ref() { tracing::error!("Failed to copy asset {from:?}: {err}"); } let finished = copied.fetch_add(1, Ordering::SeqCst); BuildContext::status_copied_asset( &progress, finished, asset_count, from.to_path_buf(), ); res.map(|_| ()) }) }) .await .map_err(|e| anyhow::anyhow!("A task failed while trying to copy assets: {e}"))??; // Remove the wasm dir if we packaged it to an "asset"-type app if self.should_bundle_to_asset() { _ = std::fs::remove_dir_all(self.wasm_bindgen_out_dir()); } // Write the version file so we know what version of the optimizer we used self.write_app_manifest(assets).await?; Ok(()) } /// Run our custom linker setup to generate a patch file in the right location /// /// This should be the only case where the cargo output is a "dummy" file and requires us to /// manually do any linking. /// /// We also run some post processing steps here, like extracting out any new assets. /// /// `extra_objects` contains additional object file paths from compiled workspace dep crates /// that should be included in the patch dylib. These are combined with the tip crate's /// `.rcgu.o` files extracted from linker args, creating a self-contained patch. async fn write_patch( &self, ctx: &BuildContext, aslr_reference: u64, artifacts: &mut BuildArtifacts, cache: &Arc, modified_crates: &HashSet, ) -> Result<()> { ctx.status_hotpatching(); let tip_name = self.tip_crate_name(); // Cache tip crate objects from the FRESH linker args (from the just-completed // thin build, not the stale ones from ctx.mode's fat build). let tip_bin_key = format!("{tip_name}.bin"); let args = artifacts .workspace_rustc_args .get(&tip_bin_key) .cloned() .with_context(|| { format!( "Missing rustc args for tip bin target '{tip_bin_key}' \ (available keys: {:?})", artifacts.workspace_rustc_args.keys().collect::>() ) })?; // Collect objs from tip and re-cache them in the obj cache map let tip_object_paths: Vec = args .link_args .iter() .filter(|arg| arg.ends_with(".rcgu.o")) .map(PathBuf::from) .collect(); if !tip_object_paths.is_empty() { artifacts .object_cache .cache_from_paths(&tip_name, &tip_object_paths) .context("Failed to cache objs during patch")?; } // Collect cached object paths from all modified dep crates. // Objects are already on disk in the object cache directory. // These must NOT be deleted after linking — they persist across patches. let mut cached_objects: Vec = Vec::new(); for dep_name in modified_crates.iter().filter(|c| *c != &tip_name) { if let Some(paths) = artifacts.object_cache.get(dep_name) { cached_objects.extend(paths.iter().cloned()); } } // If the tip has a lib target (lib+bin crate), include its cached objects too. let lib_key = format!("{tip_name}.lib"); if let Some(paths) = artifacts.object_cache.get(&lib_key) { cached_objects.extend(paths.iter().cloned()); } // Extract out the incremental object files. // // This is sadly somewhat of a hack, but it might be a moderately reliable hack. // // When rustc links your project, it passes the args as how a linker would expect, but with // a somewhat reliable ordering. These are all internal details to cargo/rustc, so we can't // rely on them *too* much, but the *are* fundamental to how rust compiles your projects, and // linker interfaces probably won't change drastically for another 40 years. // // We need to tear apart this command and only pass the args that are relevant to our thin link. // Mainly, we don't want any rlibs to be linked. Occasionally some libraries like objc_exception // export a folder with their artifacts - unsure if we actually need to include them. Generally // you can err on the side that most *libraries* don't need to be linked here since dlopen // satisfies those symbols anyways when the binary is loaded. // // Many args are passed twice, too, which can be confusing, but generally don't have any real // effect. Note that on macos/ios, there's a special macho header that needs to be set, otherwise // dyld will complain. // // Also, some flags in darwin land might become deprecated, need to be super conservative: // - https://developer.apple.com/forums/thread/773907 // // The format of this command roughly follows: // ``` // clang // /dioxus/target/debug/subsecond-cli // /var/folders/zs/gvrfkj8x33d39cvw2p06yc700000gn/T/rustcAqQ4p2/symbols.o // /dioxus/target/subsecond-dev/deps/subsecond_harness-acfb69cb29ffb8fa.05stnb4bovskp7a00wyyf7l9s.rcgu.o // /dioxus/target/subsecond-dev/deps/subsecond_harness-acfb69cb29ffb8fa.08rgcutgrtj2mxoogjg3ufs0g.rcgu.o // /dioxus/target/subsecond-dev/deps/subsecond_harness-acfb69cb29ffb8fa.0941bd8fa2bydcv9hfmgzzne9.rcgu.o // /dioxus/target/subsecond-dev/deps/libbincode-c215feeb7886f81b.rlib // /dioxus/target/subsecond-dev/deps/libanyhow-e69ac15c094daba6.rlib // /dioxus/target/subsecond-dev/deps/libratatui-c3364579b86a1dfc.rlib // /.rustup/toolchains/stable-aarch64-apple-darwin/lib/rustlib/aarch64-apple-darwin/lib/libstd-019f0f6ae6e6562b.rlib // /.rustup/toolchains/stable-aarch64-apple-darwin/lib/rustlib/aarch64-apple-darwin/lib/libpanic_unwind-7387d38173a2eb37.rlib // /.rustup/toolchains/stable-aarch64-apple-darwin/lib/rustlib/aarch64-apple-darwin/lib/libobject-2b03cf6ece171d21.rlib // -framework AppKit // -lc // -framework Foundation // -framework Carbon // -lSystem // -framework CoreFoundation // -lobjc // -liconv // -lm // -arch arm64 // -mmacosx-version-min=11.0.0 // -L /dioxus/target/subsecond-dev/build/objc_exception-dc226cad0480ea65/out // -o /dioxus/target/subsecond-dev/deps/subsecond_harness-acfb69cb29ffb8fa // -nodefaultlibs // -Wl,-all_load // ``` let mut dylibs = vec![]; // Tip objects from link_args are temps — safe to delete after linking. let temp_objects: Vec = args .link_args .iter() .filter(|arg| arg.ends_with(".rcgu.o")) .sorted() .map(PathBuf::from) .collect(); // Merge both sets for the linker. let mut object_files: Vec = Vec::with_capacity(cached_objects.len() + temp_objects.len()); object_files.append(&mut cached_objects); object_files.extend(temp_objects.iter().cloned()); // On non-wasm platforms, we generate a special shim object file which converts symbols from // fat binary into direct addresses from the running process. // // Our wasm approach is quite specific to wasm. We don't need to resolve any missing symbols // there since wasm is relocatable, but there is considerable pre and post processing work to // satisfy undefined symbols that we do by munging the binary directly. // // todo: can we adjust our wasm approach to also use a similar system? // todo: don't require the aslr reference and just patch the got when loading. // // Requiring the ASLR offset here is necessary but unfortunately might be flakey in practice. // Android apps can take a long time to open, and a hot patch might've been issued in the interim, // making this hotpatch a failure. if !self.is_wasm_or_wasi() { let stub_bytes = crate::build::create_undefined_symbol_stub( cache, &object_files, &self.triple, aslr_reference, ) .expect("failed to resolve patch symbols"); // Currently we're dropping stub.o in the exe dir, but should probably just move to a tempfile? let patch_file = self.main_exe().with_file_name("stub.o"); std::fs::write(&patch_file, stub_bytes)?; object_files.push(patch_file); // Add the dylibs/sos to the linker args // Make sure to use the one in the bundle, not the ones in the target dir or system. for arg in &args.link_args { if arg.ends_with(".dylib") || arg.ends_with(".so") { let path = PathBuf::from(arg); dylibs.push(self.frameworks_folder().join(path.file_name().unwrap())); } } } // And now we can run the linker with our new args let linker = self.select_linker()?; let out_exe = self.patch_exe(artifacts.time_start); let out_arg = match self.triple.operating_system { OperatingSystem::Windows => vec![format!("/OUT:{}", out_exe.display())], _ => vec!["-o".to_string(), out_exe.display().to_string()], }; tracing::trace!("Linking with {:?} using args: {:#?}", linker, object_files); let mut out_args: Vec = vec![]; out_args.extend(object_files.iter().map(Into::into)); out_args.extend(dylibs.iter().map(Into::into)); out_args.extend(self.thin_link_args(&args.link_args)?.iter().map(Into::into)); out_args.extend(out_arg.iter().map(Into::into)); if cfg!(windows) { let cmd_contents: String = out_args .iter() .map(|s| format!("\"{}\"", s.to_string_lossy())) .join(" "); std::fs::write(self.windows_command_file(), cmd_contents) .context("Failed to write linker command file")?; out_args = vec![format!("@{}", self.windows_command_file().display()).into()]; } // Add more search paths for the linker let mut command_envs = args.envs.clone(); // On linux, we need to set a more complete PATH for the linker to find its libraries if cfg!(target_os = "linux") { command_envs.push(("PATH".to_string(), std::env::var("PATH").unwrap())); } // Run the linker directly! // // We dump its output directly into the patch exe location which is different than how rustc // does it since it uses llvm-objcopy into the `target/debug/` folder. let res = Command::new(linker) .args(out_args) .env_clear() .envs(command_envs) .output() .await?; if !res.stderr.is_empty() { let errs = String::from_utf8_lossy(&res.stderr); if !self.patch_exe(artifacts.time_start).exists() || !res.status.success() { tracing::error!( telemetry = %serde_json::json!({ "event": "hotpatch_linker_failed" }), "Failed to generate patch: {}", errs.trim() ); } else { tracing::trace!("Linker output during thin linking: {}", errs.trim()); } } // For some really weird reason that I think is because of dlopen caching, future loads of the // jump library will fail if we don't remove the original fat file. I think this could be // because of library versioning and namespaces, but really unsure. // // The errors if you forget to do this are *extremely* cryptic - missing symbols that never existed. // // Fortunately, this binary exists in two places - the deps dir and the target out dir. We // can just remove the one in the deps dir and the problem goes away. if let Some(idx) = args.link_args.iter().position(|arg| *arg == "-o") { _ = std::fs::remove_file(PathBuf::from(args.link_args[idx + 1].as_str())); } // Now extract linker metadata from the fat binary (assets, plugin data) let (assets, android_artifacts, swift_sources) = self .collect_assets_and_metadata(&self.patch_exe(artifacts.time_start), ctx) .await?; artifacts.assets = assets; artifacts.android_artifacts = android_artifacts; artifacts.swift_sources = swift_sources; // If this is a web build, reset the index.html file in case it was modified by SSG self.write_index_html(&artifacts.assets) .context("Failed to write index.html")?; // Clean up temp object files (tip incremental objects + stub.o). // Cached dep objects in object_cache/ are NOT deleted — they persist across patches. for file in &temp_objects { _ = std::fs::remove_file(file); } Ok(()) } /// Take the original args passed to the "fat" build and then create the "thin" variant. /// /// This is basically just stripping away the rlibs and other libraries that will be satisfied /// by our stub step. fn thin_link_args(&self, original_args: &[String]) -> Result> { let mut out_args = vec![]; match self.linker_flavor() { // wasm32-unknown-unknown -> use wasm-ld (gnu-lld) // // We need to import a few things - namely the memory and ifunc table. // // We can safely export everything, I believe, though that led to issues with the "fat" // binaries that also might lead to issues here too. wasm-bindgen chokes on some symbols // and the resulting JS has issues. // // We turn on both --pie and --experimental-pic but I think we only need --pie. // // We don't use *any* of the original linker args since they do lots of custom exports // and other things that we don't need. // // The trickiest one here is -Crelocation-model=pic, which forces data symbols // into a GOT, making it possible to import them from the main module. // // I think we can make relocation-model=pic work for non-wasm platforms, enabling // fully relocatable modules with no host coordination in lieu of sending out // the aslr slide at runtime. LinkerFlavor::WasmLld => { out_args.extend([ "--fatal-warnings".to_string(), "--verbose".to_string(), "--import-memory".to_string(), "--import-table".to_string(), "--growable-table".to_string(), "--export".to_string(), "main".to_string(), "--allow-undefined".to_string(), "--no-demangle".to_string(), "--no-entry".to_string(), "--pie".to_string(), "--experimental-pic".to_string(), ]); // retain exports so post-processing has hooks to work with for (idx, arg) in original_args.iter().enumerate() { if *arg == "--export" { out_args.push(arg.to_string()); out_args.push(original_args[idx + 1].to_string()); } } } // This uses "cc" and these args need to be ld compatible // // Most importantly, we want to pass `-dylib` to both CC and the linker to indicate that // we want to generate the shared library instead of an executable. LinkerFlavor::Darwin => { out_args.extend(["-Wl,-dylib".to_string()]); // Preserve the original args. We only preserve: // -framework // -arch // -lxyz // There might be more, but some flags might break our setup. for (idx, arg) in original_args.iter().enumerate() { if *arg == "-framework" || *arg == "-arch" || *arg == "-L" || *arg == "-target" || (*arg == "-isysroot" && matches!( self.triple.operating_system, target_lexicon::OperatingSystem::IOS(_) )) { out_args.push(arg.to_string()); out_args.push(original_args[idx + 1].to_string()); } if arg.starts_with("-l") || arg.starts_with("-m") || arg.starts_with("-nodefaultlibs") { out_args.push(arg.to_string()); } } } // android/linux need to be compatible with lld // // android currently drags along its own libraries and other zany flags LinkerFlavor::Gnu => { out_args.extend([ "-shared".to_string(), "-Wl,--eh-frame-hdr".to_string(), "-Wl,-z,noexecstack".to_string(), "-Wl,-z,relro,-z,now".to_string(), "-nodefaultlibs".to_string(), "-Wl,-Bdynamic".to_string(), ]); // Preserve the original args. We only preserve: // -L // -arch // -lxyz // There might be more, but some flags might break our setup. for (idx, arg) in original_args.iter().enumerate() { if *arg == "-L" { out_args.push(arg.to_string()); out_args.push(original_args[idx + 1].to_string()); } if arg.starts_with("-l") || arg.starts_with("-m") || arg.starts_with("-Wl,--target=") || arg.starts_with("-Wl,-fuse-ld") || arg.starts_with("-fuse-ld") || arg.contains("-ld-path") { out_args.push(arg.to_string()); } } } LinkerFlavor::Msvc => { out_args.extend([ "shlwapi.lib".to_string(), "kernel32.lib".to_string(), "advapi32.lib".to_string(), "ntdll.lib".to_string(), "userenv.lib".to_string(), "ws2_32.lib".to_string(), "dbghelp.lib".to_string(), "/defaultlib:msvcrt".to_string(), "/DLL".to_string(), "/DEBUG".to_string(), "/PDBALTPATH:%_PDB%".to_string(), "/EXPORT:main".to_string(), "/HIGHENTROPYVA:NO".to_string(), ]); } LinkerFlavor::Unsupported => { bail!("Unsupported platform for thin linking") } } let extract_value = |arg: &str| -> Option { original_args .iter() .position(|a| *a == arg) .map(|i| original_args[i + 1].to_string()) }; if let Some(vale) = extract_value("-target") { out_args.push("-target".to_string()); out_args.push(vale); } if let Some(vale) = extract_value("-isysroot") { if matches!( self.triple.operating_system, target_lexicon::OperatingSystem::IOS(_) ) { out_args.push("-isysroot".to_string()); out_args.push(vale); } } Ok(out_args) } /// Patches are stored in the same directory as the main executable, but with a name based on the /// time the patch started compiling. /// /// - lib{name}-patch-{time}.(so/dll/dylib) (next to the main exe) /// /// Note that weirdly enough, the name of dylibs can actually matter. In some environments, libs /// can override each other with symbol interposition. /// /// Also, on Android - and some Linux, we *need* to start the lib name with `lib` for the dynamic /// loader to consider it a shared library. /// /// todo: the time format might actually be problematic if two platforms share the same build folder. pub(crate) fn patch_exe(&self, time_start: SystemTime) -> PathBuf { let path = self.main_exe().with_file_name(format!( "lib{}-patch-{}", self.executable_name(), time_start .duration_since(UNIX_EPOCH) .map(|f| f.as_millis()) .unwrap_or(0), )); let extension = match self.linker_flavor() { LinkerFlavor::Darwin => "dylib", LinkerFlavor::Gnu => "so", LinkerFlavor::WasmLld => "wasm", LinkerFlavor::Msvc => "dll", LinkerFlavor::Unsupported => "", }; path.with_extension(extension) } /// When we link together the fat binary, we need to make sure every `.o` file in *every* rlib /// is taken into account. This is the same work that the rust compiler does when assembling /// staticlibs. /// /// /// /// Since we're going to be passing these to the linker, we need to make sure and not provide any /// weird files (like the rmeta) file that rustc generates. /// /// We discovered the need for this after running into issues with wasm-ld not being able to /// handle the rmeta file. /// /// /// /// Also, crates might not drag in all their dependent code. The monorphizer won't lift trait-based generics: /// /// /// /// When Rust normally handles this, it uses the +whole-archive directive which adjusts how the rlib /// is written to disk. /// /// Since creating this object file can be a lot of work, we cache it in the target dir by hashing /// the names of the rlibs in the command and storing it in the target dir. That way, when we run /// this command again, we can just used the cached object file. /// /// In theory, we only need to do this for every crate accessible by the current crate, but that's /// hard acquire without knowing the exported symbols from each crate. /// /// todo: I think we can traverse our immediate dependencies and inspect their symbols, unless they `pub use` a crate /// todo: we should try and make this faster with memmapping pub(crate) async fn run_fat_link(&self, exe: &Path, rustc_args: &RustcArgs) -> Result<()> { // Filter out the rlib files from the arguments let rlibs = rustc_args .link_args .iter() .filter(|arg| arg.ends_with(".rlib")) .map(PathBuf::from) .collect::>(); // Acquire a hash from the rlib names, sizes, modified times, and dx's git commit hash // This ensures that any changes in dx or the rlibs will cause a new hash to be generated // The hash relies on both dx and rustc hashes, so it should be thoroughly unique. Keep it // short to avoid long file names. let hash_id = Uuid::new_v5( &Uuid::NAMESPACE_OID, rlibs .iter() .map(|p| { format!( "{}-{}-{}-{}", p.file_name().unwrap().to_string_lossy(), p.metadata().map(|m| m.len()).unwrap_or_default(), p.metadata() .ok() .and_then(|m| m.modified().ok()) .and_then(|f| f.duration_since(UNIX_EPOCH).map(|f| f.as_secs()).ok()) .unwrap_or_default(), crate::dx_build_info::GIT_COMMIT_HASH.unwrap_or_default() ) }) .collect::() .as_bytes(), ) .to_string() .chars() .take(8) .collect::(); // Check if we already have a cached object file let out_ar_path = exe.with_file_name(format!("libdeps-{hash_id}.a",)); let out_rlibs_list = exe.with_file_name(format!("rlibs-{hash_id}.txt")); let mut archive_has_contents = out_ar_path.exists(); // Use the rlibs list if it exists let mut compiler_rlibs = std::fs::read_to_string(&out_rlibs_list) .ok() .map(|s| s.lines().map(PathBuf::from).collect::>()) .unwrap_or_default(); // Create it by dumping all the rlibs into it // This will include the std rlibs too, which can severely bloat the size of the archive // // The nature of this process involves making extremely fat archives, so we should try and // speed up the future linking process by caching the archive. // // Since we're using the git hash for the CLI entropy, debug builds should always regenerate // the archive since their hash might not change, but the logic might. if !archive_has_contents || cfg!(debug_assertions) { compiler_rlibs.clear(); let mut bytes = vec![]; let mut out_ar = ar::Builder::new(&mut bytes); for rlib in &rlibs { // Skip compiler rlibs since they're missing bitcode // // https://github.com/rust-lang/rust/issues/94232#issuecomment-1048342201 // // if the rlib is not in the target directory, we skip it. if !rlib.starts_with(self.workspace_dir()) { compiler_rlibs.push(rlib.clone()); tracing::trace!("Skipping rlib: {:?}", rlib); continue; } tracing::trace!("Adding rlib to staticlib: {:?}", rlib); let rlib_contents = std::fs::read(rlib)?; let mut reader = ar::Archive::new(std::io::Cursor::new(rlib_contents)); let mut keep_linker_rlib = false; while let Some(Ok(object_file)) = reader.next_entry() { let name = std::str::from_utf8(object_file.header().identifier()).unwrap(); if name.ends_with(".rmeta") { continue; } if object_file.header().size() == 0 { continue; } // rlibs might contain dlls/sos/lib files which we don't want to include // // This catches .dylib, .so, .dll, .lib, .o, etc files that are not compatible with // our "fat archive" linking process. // // We only trust `.rcgu.o` files to make it into the --all_load archive. // This is a temporary stopgap to prevent issues with libraries that generate // object files that are not compatible with --all_load. // see https://github.com/DioxusLabs/dioxus/issues/4237 if !(name.ends_with(".rcgu.o") || name.ends_with(".obj")) { keep_linker_rlib = true; continue; } archive_has_contents = true; out_ar .append(&object_file.header().clone(), object_file) .context("Failed to add object file to archive")?; } // Some rlibs contain weird artifacts that we don't want to include in the fat archive. // However, we still want them around in the linker in case the regular linker can handle them. if keep_linker_rlib { compiler_rlibs.push(rlib.clone()); } } let bytes = out_ar.into_inner().context("Failed to finalize archive")?; std::fs::write(&out_ar_path, bytes).context("Failed to write archive")?; tracing::debug!("Wrote fat archive to {:?}", out_ar_path); // Run the ranlib command to index the archive. This slows down this process a bit, // but is necessary for some linkers to work properly. // We ignore its error in case it doesn't recognize the architecture if self.linker_flavor() == LinkerFlavor::Darwin { if let Some(ranlib) = Workspace::select_ranlib() { _ = Command::new(ranlib).arg(&out_ar_path).output().await; } } } compiler_rlibs.dedup(); // We're going to replace the first rlib in the args with our fat archive // And then remove the rest of the rlibs // // We also need to insert the -force_load flag to force the linker to load the archive let mut args: Vec<_> = rustc_args.link_args.clone(); if let Some(last_object) = args.iter().rposition(|arg| arg.ends_with(".o")) { if archive_has_contents { match self.linker_flavor() { LinkerFlavor::WasmLld => { args.insert(last_object, "--whole-archive".to_string()); args.insert(last_object + 1, out_ar_path.display().to_string()); args.insert(last_object + 2, "--no-whole-archive".to_string()); args.retain(|arg| !arg.ends_with(".rlib")); for rlib in compiler_rlibs.iter().rev() { args.insert(last_object + 3, rlib.display().to_string()); } } LinkerFlavor::Gnu => { args.insert(last_object, "-Wl,--whole-archive".to_string()); args.insert(last_object + 1, out_ar_path.display().to_string()); args.insert(last_object + 2, "-Wl,--no-whole-archive".to_string()); args.retain(|arg| !arg.ends_with(".rlib")); for rlib in compiler_rlibs.iter().rev() { args.insert(last_object + 3, rlib.display().to_string()); } } LinkerFlavor::Darwin => { args.insert(last_object, "-Wl,-force_load".to_string()); args.insert(last_object + 1, out_ar_path.display().to_string()); args.retain(|arg| !arg.ends_with(".rlib")); for rlib in compiler_rlibs.iter().rev() { args.insert(last_object + 2, rlib.display().to_string()); } } LinkerFlavor::Msvc => { args.insert( last_object, format!("/WHOLEARCHIVE:{}", out_ar_path.display()), ); args.retain(|arg| !arg.ends_with(".rlib")); for rlib in compiler_rlibs.iter().rev() { args.insert(last_object + 1, rlib.display().to_string()); } } LinkerFlavor::Unsupported => { tracing::error!("Unsupported platform for fat linking: {}", self.triple); } }; } } // Add custom args to the linkers match self.linker_flavor() { LinkerFlavor::Gnu => { // Export `main` so subsecond can use it for a reference point args.push("-Wl,--export-dynamic-symbol,main".to_string()); } LinkerFlavor::Darwin => { args.push("-Wl,-exported_symbol,_main".to_string()); } LinkerFlavor::Msvc => { // Prevent alsr from overflowing 32 bits args.push("/HIGHENTROPYVA:NO".to_string()); // Export `main` so subsecond can use it for a reference point args.push("/EXPORT:main".to_string()); } LinkerFlavor::WasmLld | LinkerFlavor::Unsupported => {} } // We also need to remove the `-o` flag since we want the linker output to end up in the // rust exe location, not in the deps dir as it normally would. if let Some(idx) = args .iter() .position(|arg| *arg == "-o" || *arg == "--output") { args.remove(idx + 1); args.remove(idx); } // same but windows support if let Some(idx) = args.iter().position(|arg| arg.starts_with("/OUT")) { args.remove(idx); } // We want to go through wasm-ld directly, so we need to remove the -flavor flag if let Some(flavor_idx) = args.iter().position(|arg| *arg == "-flavor") { args.remove(flavor_idx + 1); args.remove(flavor_idx); } // Note: Swift sources are now compiled as dynamic frameworks during the main build flow. // Dynamic frameworks are loaded at runtime, not linked statically, so we don't add // them to the linker args here. The framework will be installed to the Frameworks // folder by compile_swift_sources() in the main bundle creation phase. if matches!( self.triple.operating_system, OperatingSystem::IOS(_) | OperatingSystem::MacOSX { .. } | OperatingSystem::Darwin(_) ) { let workspace_dir = self.workspace_dir(); let swift_sources = super::ios_swift::extract_swift_metadata_from_link_args( &rustc_args.link_args, &workspace_dir, ); if !swift_sources.is_empty() { tracing::debug!( "Found {} Swift plugin source(s) - will be compiled as dynamic framework during bundle creation", swift_sources.len() ); } } // Set the output file match self.triple.operating_system { OperatingSystem::Windows => args.push(format!("/OUT:{}", exe.display())), _ => args.extend(["-o".to_string(), exe.display().to_string()]), } // And now we can run the linker with our new args let linker = self.select_linker()?; tracing::trace!("Fat linking with args: {:?} {:#?}", linker, args); tracing::trace!("Fat linking with env:"); for e in rustc_args.envs.iter() { tracing::trace!(" {}={}", e.0, e.1); } // Handle windows command files let mut out_args = args.clone(); if cfg!(windows) { let cmd_contents: String = out_args.iter().map(|f| format!("\"{f}\"")).join(" "); std::fs::write(self.windows_command_file(), cmd_contents) .context("Failed to write linker command file")?; out_args = vec![format!("@{}", self.windows_command_file().display())]; } // Add more search paths for the linker let mut command_envs = rustc_args.envs.clone(); // On linux, we need to set a more complete PATH for the linker to find its libraries if cfg!(target_os = "linux") { command_envs.push(("PATH".to_string(), std::env::var("PATH").unwrap())); } // Run the linker directly! let res = Command::new(linker) .args(out_args) .env_clear() .envs(command_envs) .output() .await?; if !res.stderr.is_empty() { let errs = String::from_utf8_lossy(&res.stderr); if !res.status.success() { tracing::error!( telemetry = %serde_json::json!({ "event": "hotpatch_fat_binary_generation_failed" }), "Failed to generate fat binary: {}", errs.trim() ); } else { tracing::trace!("Warnings during fat linking: {}", errs.trim()); } } if !res.stdout.is_empty() { let out = String::from_utf8_lossy(&res.stdout); tracing::trace!("Output from fat linking: {}", out.trim()); } // Clean up the temps manually for f in args.iter().filter(|arg| arg.ends_with(".rcgu.o")) { _ = std::fs::remove_file(f); } // Cache the rlibs list _ = std::fs::write( &out_rlibs_list, compiler_rlibs .into_iter() .map(|s| s.display().to_string()) .join("\n"), ); Ok(()) } pub(crate) fn create_jump_table( &self, patch: &Path, cache: &HotpatchModuleCache, ) -> Result { use crate::build::patch::{ create_native_jump_table, create_wasm_jump_table, create_windows_jump_table, }; let root_dir = self.root_dir(); let base_path = self.base_path(); let triple = &self.triple; // Symbols are stored differently based on the platform, so we need to handle them differently. // - Wasm requires the walrus crate and actually modifies the patch file // - windows requires the pdb crate and pdb files // - nix requires the object crate let mut jump_table = match triple.operating_system { OperatingSystem::Windows => create_windows_jump_table(patch, cache)?, _ if triple.architecture == Architecture::Wasm32 => { create_wasm_jump_table(patch, cache)? } _ => create_native_jump_table(patch, triple, cache)?, }; // root_dir: &Path, // base_path: Option<&str>, // Rebase the wasm binary to be relocatable once the jump table is generated if triple.architecture == target_lexicon::Architecture::Wasm32 { // Make sure we use the dir relative to the public dir, so the web can load it as a proper URL // // ie we would've shipped `/Users/foo/Projects/dioxus/target/dx/project/debug/web/public/wasm/lib.wasm` // but we want to ship `/wasm/lib.wasm` jump_table.lib = PathBuf::from( "/".to_string() + base_path.unwrap_or_default().trim_start_matches('/'), ) .join(jump_table.lib.strip_prefix(root_dir).unwrap()) } Ok(jump_table) } /// Automatically detect the linker flavor based on the target triple and any custom linkers. /// /// This tries to replicate what rustc does when selecting the linker flavor based on the linker /// and triple. fn linker_flavor(&self) -> LinkerFlavor { if let Some(custom) = self.custom_linker.as_ref() { let name = custom.file_name().unwrap().to_ascii_lowercase(); match name.to_str() { Some("lld-link") => return LinkerFlavor::Msvc, Some("lld-link.exe") => return LinkerFlavor::Msvc, Some("wasm-ld") => return LinkerFlavor::WasmLld, Some("ld64.lld") => return LinkerFlavor::Darwin, Some("ld.lld") => return LinkerFlavor::Gnu, Some("ld.gold") => return LinkerFlavor::Gnu, Some("mold") => return LinkerFlavor::Gnu, Some("sold") => return LinkerFlavor::Gnu, Some("wild") => return LinkerFlavor::Gnu, _ => {} } } match self.triple.environment { target_lexicon::Environment::Gnu | target_lexicon::Environment::Gnuabi64 | target_lexicon::Environment::Gnueabi | target_lexicon::Environment::Gnueabihf | target_lexicon::Environment::GnuLlvm => LinkerFlavor::Gnu, target_lexicon::Environment::Musl => LinkerFlavor::Gnu, target_lexicon::Environment::Android => LinkerFlavor::Gnu, target_lexicon::Environment::Msvc => LinkerFlavor::Msvc, target_lexicon::Environment::Macabi => LinkerFlavor::Darwin, _ => match self.triple.operating_system { OperatingSystem::Darwin(_) => LinkerFlavor::Darwin, OperatingSystem::IOS(_) => LinkerFlavor::Darwin, OperatingSystem::MacOSX(_) => LinkerFlavor::Darwin, OperatingSystem::Linux => LinkerFlavor::Gnu, OperatingSystem::Windows => LinkerFlavor::Msvc, _ => match self.triple.architecture { target_lexicon::Architecture::Wasm32 => LinkerFlavor::WasmLld, target_lexicon::Architecture::Wasm64 => LinkerFlavor::WasmLld, _ => LinkerFlavor::Unsupported, }, }, } } /// Select the linker to use for this platform. /// /// We prefer to use the rust-lld linker when we can since it's usually there. /// On macos, we use the system linker since macho files can be a bit finicky. /// /// This means we basically ignore the linker flavor that the user configured, which could /// cause issues with a custom linker setup. In theory, rust translates most flags to the right /// linker format. fn select_linker(&self) -> Result { if let Some(linker) = self.custom_linker.clone() { return Ok(linker); } let cc = match self.linker_flavor() { LinkerFlavor::WasmLld => self.workspace.wasm_ld(), // On macOS, we use the system linker since it's usually there. // We could also use `lld` here, but it might not be installed by default. // // Note that this is *clang*, not `lld`. LinkerFlavor::Darwin => self.workspace.cc(), // On Linux, we use the system linker since it's usually there. LinkerFlavor::Gnu => self.workspace.cc(), // On windows, instead of trying to find the system linker, we just go with the lld.link // that rustup provides. It's faster and more stable then reyling on link.exe in path. LinkerFlavor::Msvc => self.workspace.lld_link(), // The rest of the platforms use `cc` as the linker which should be available in your path, // provided you have build-tools setup. On mac/linux this is the default, but on Windows // it requires msvc or gnu downloaded, which is a requirement to use rust anyways. // // The default linker might actually be slow though, so we could consider using lld or rust-lld // since those are shipping by default on linux as of 1.86. Window's linker is the really slow one. // // https://blog.rust-lang.org/2024/05/17/enabling-rust-lld-on-linux.html // // Note that "cc" is *not* a linker. It's a compiler! The arguments we pass need to be in // the form of `-Wl,` for them to make it to the linker. This matches how rust does it // which is confusing. LinkerFlavor::Unsupported => self.workspace.cc(), }; Ok(cc) } /// Assemble the `cargo rustc` / `rustc` command /// /// When building fat/base binaries, we use `cargo rustc`. /// When building thin binaries, we use `rustc` directly. /// /// When processing the output of this command, you need to make sure to handle both cases which /// both have different formats (but with json output for both). fn build_command(&self, build_mode: &BuildMode) -> Result { match build_mode { // We're assembling rustc directly, so we need to be *very* careful. Cargo sets rustc's // env up very particularly, and we want to match it 1:1 but with some changes. // // To do this, we reset the env completely, and then pass every env var that the original // rustc process had 1:1. // // We need to unset a few things, like the RUSTC wrappers and then our special env var // indicating that dx itself is the compiler. If we forget to do this, then the compiler // ends up doing some recursive nonsense and dx is trying to link instead of compiling. // // todo: maybe rustc needs to be found on the FS instead of using the one in the path? BuildMode::Thin { workspace_rustc_args, .. } => { let rustc_args = workspace_rustc_args .get(&format!("{}.bin", self.tip_crate_name())) .context("Missing rustc args for tip crate")?; let mut cmd = Command::new("rustc"); cmd.current_dir(self.workspace_dir()); cmd.env_clear(); cmd.args(rustc_args.args[1..].iter()); cmd.env_remove("RUSTC_WORKSPACE_WRAPPER"); cmd.env_remove("RUSTC_WRAPPER"); cmd.env_remove(DX_RUSTC_WRAPPER_ENV_VAR); cmd.envs( self.cargo_build_env_vars(build_mode)? .iter() .map(|(k, v)| (k.as_ref(), v)), ); cmd.arg(format!("-Clinker={}", Workspace::path_to_dx()?.display())); if self.is_wasm_or_wasi() { cmd.arg("-Crelocation-model=pic"); } cmd.envs(rustc_args.envs.iter().cloned()); Ok(cmd) } // For Base and Fat builds, we use a regular cargo setup, but we intercept rustc for // workspace member crates to capture their args/envs for hot-patching. // // We use RUSTC_WORKSPACE_WRAPPER which wraps only workspace member crates, letting us // capture per-crate args without interfering with external dependency compilation. // // We've also had a number of issues with incorrect canonicalization when passing paths // through envs on windows, hence the frequent use of dunce::canonicalize. _ => { let mut cmd = Command::new("cargo"); let env = self.cargo_build_env_vars(build_mode)?; let args = self.cargo_build_arguments(build_mode); tracing::trace!("Building with cargo rustc"); for e in env.iter() { tracing::trace!(": {}={}", e.0, e.1.to_string_lossy()); } for a in args.iter() { tracing::trace!(": {}", a); } cmd.arg("rustc") .current_dir(self.crate_dir()) .arg("--message-format") .arg("json-diagnostic-rendered-ansi") .args(args) .envs(env.iter().map(|(k, v)| (k.as_ref(), v))); if matches!(build_mode, BuildMode::Fat | BuildMode::Base { run: true }) { let args_dir = self.rustc_wrapper_args_dir(); std::fs::create_dir_all(&args_dir) .context("Failed to create rustc wrapper args directory")?; cmd.env( DX_RUSTC_WRAPPER_ENV_VAR, dunce::canonicalize(&args_dir) .context("Failed to canonicalize rustc wrapper args dir")? .display() .to_string(), ); cmd.env( "RUSTC_WORKSPACE_WRAPPER", Workspace::path_to_dx()?.display().to_string(), ); } Ok(cmd) } } } /// Create a list of arguments for cargo builds /// /// We always use `cargo rustc` *or* `rustc` directly. This means we can pass extra flags like /// `-C` arguments directly to the compiler. #[allow(clippy::vec_init_then_push)] pub(crate) fn cargo_build_arguments(&self, build_mode: &BuildMode) -> Vec { let mut cargo_args = Vec::with_capacity(4); // Set the `--config profile.{profile}.{key}={value}` flags for the profile, filling in adhoc profile cargo_args.extend(self.profile_args()); // Add required profile flags. --release overrides any custom profiles. cargo_args.push("--profile".to_string()); cargo_args.push(self.profile.to_string()); // Pass the appropriate target to cargo. We *always* specify a target which is somewhat helpful for preventing thrashing cargo_args.push("--target".to_string()); cargo_args.push(self.triple.to_string()); // We always run in verbose since the CLI itself is the one doing the presentation cargo_args.push("--verbose".to_string()); if self.no_default_features { cargo_args.push("--no-default-features".to_string()); } if self.all_features { cargo_args.push("--all-features".to_string()); } if !self.features.is_empty() { cargo_args.push("--features".to_string()); cargo_args.push(self.features.join(" ")); } // We *always* set the package since that's discovered from cargo metadata cargo_args.push(String::from("-p")); cargo_args.push(self.package.clone()); // Set the executable match self.executable_type() { TargetKind::Bin => cargo_args.push("--bin".to_string()), TargetKind::Lib => cargo_args.push("--lib".to_string()), TargetKind::Example => cargo_args.push("--example".to_string()), _ => {} }; cargo_args.push(self.executable_name().to_string()); // Set offline/locked/frozen let lock_opts = crate::verbosity_or_default(); if lock_opts.frozen { cargo_args.push("--frozen".to_string()); } if lock_opts.locked { cargo_args.push("--locked".to_string()); } if lock_opts.offline { cargo_args.push("--offline".to_string()); } // Merge in extra args. Order shouldn't really matter. cargo_args.extend(self.extra_cargo_args.clone()); cargo_args.push("--".to_string()); cargo_args.extend(self.extra_rustc_args.clone()); // On windows, we pass /SUBSYSTEM:WINDOWS to prevent a console from appearing if matches!(self.bundle, BundleFormat::Windows) && !self .rustflags .flags .iter() .any(|f| f.starts_with("-Clink-arg=/SUBSYSTEM:")) { let subsystem = self .windows_subsystem .clone() .unwrap_or_else(|| "WINDOWS".to_string()); cargo_args.push(format!("-Clink-arg=/SUBSYSTEM:{}", subsystem)); // We also need to set the entry point to mainCRTStartup to avoid windows looking // for a WinMain function cargo_args.push("-Clink-arg=/ENTRY:mainCRTStartup".to_string()); } // The bundle splitter needs relocation data to create a call-graph. // This will automatically be erased by wasm-opt during the optimization step. if self.bundle == BundleFormat::Web && self.wasm_split { cargo_args.push("-Clink-args=--emit-relocs".to_string()); } // dx links android, thin builds, and fat builds with a custom linker. // Note: We don't intercept Darwin Base builds since Swift plugins are compiled as dynamic // frameworks that load at runtime, not linked statically into the binary. let use_dx_linker = self.custom_linker.is_some() || matches!(build_mode, BuildMode::Thin { .. } | BuildMode::Fat); if use_dx_linker { cargo_args.push(format!( "-Clinker={}", Workspace::path_to_dx().expect("can't find dx").display() )); } // for debuggability, we need to make sure android studio can properly understand our build // https://stackoverflow.com/questions/68481401/debugging-a-prebuilt-shared-library-in-android-studio if self.bundle == BundleFormat::Android { cargo_args.push("-Clink-arg=-Wl,--build-id=sha1".to_string()); } // Handle frameworks/dylibs by setting the rpath // This is dependent on the bundle structure - iOS uses a flat structure while macOS uses nested // todo: we need to figure out what to do for windows match self.triple.operating_system { OperatingSystem::Darwin(_) | OperatingSystem::MacOSX { .. } => { // macOS: App.app/Contents/MacOS/exe -> ../Frameworks/ cargo_args.push("-Clink-arg=-Wl,-rpath,@executable_path/../Frameworks".to_string()); cargo_args.push("-Clink-arg=-Wl,-rpath,@executable_path".to_string()); } OperatingSystem::IOS(_) => { // iOS: App.app/exe -> Frameworks/ (flat bundle structure) cargo_args.push("-Clink-arg=-Wl,-rpath,@executable_path/Frameworks".to_string()); cargo_args.push("-Clink-arg=-Wl,-rpath,@executable_path".to_string()); } OperatingSystem::Linux => { cargo_args.push("-Clink-arg=-Wl,-rpath,$ORIGIN/../lib".to_string()); cargo_args.push("-Clink-arg=-Wl,-rpath,$ORIGIN".to_string()); } _ => {} } // Our fancy hot-patching engine needs a lot of customization to work properly. // // These args are mostly intended to be passed when *fat* linking but are generally fine to // pass for both fat and thin linking. // // We need save-temps and no-dead-strip in both cases though. When we run `cargo rustc` with // these args, they will be captured and re-ran for the fast compiles in the future, so whatever // we set here will be set for all future hot patches too. if matches!(build_mode, BuildMode::Thin { .. } | BuildMode::Fat) { // rustc gives us some portable flags required: // - link-dead-code: prevents rust from passing -dead_strip to the linker since that's the default. // - save-temps=true: keeps the incremental object files around, which we need for manually linking. cargo_args.extend_from_slice(&[ "-Csave-temps=true".to_string(), "-Clink-dead-code".to_string(), ]); // We need to set some extra args that ensure all symbols make it into the final output // and that the linker doesn't strip them out. // // This basically amounts of -all_load or --whole-archive, depending on the linker. // We just assume an ld-like interface on macos and a gnu-ld interface elsewhere. // // macOS/iOS use ld64 but through the `cc` interface. // cargo_args.push("-Clink-args=-Wl,-all_load".to_string()); // // Linux and Android fit under this umbrella, both with the same clang-like entrypoint // and the gnu-ld interface. // // cargo_args.push("-Clink-args=-Wl,--whole-archive".to_string()); // // If windows -Wl,--whole-archive is required since it follows gnu-ld convention. // There might be other flags on windows - we haven't tested windows thoroughly. // // cargo_args.push("-Clink-args=-Wl,--whole-archive".to_string()); // https://learn.microsoft.com/en-us/cpp/build/reference/wholearchive-include-all-library-object-files?view=msvc-170 // // ------------------------------------------------------------ // // if web, -Wl,--whole-archive is required since it follows gnu-ld convention. // // We also use --no-gc-sections and --export-table and --export-memory to push // said symbols into the export table. // // We use --emit-relocs to build up a solid call graph. // // rust uses its own wasm-ld linker which can be found here (it's just gcc-ld with a `-target wasm` flag): // - ~/.rustup/toolchains/stable-aarch64-apple-darwin/lib/rustlib/aarch64-apple-darwin/bin/gcc-ld // - ~/.rustup/toolchains/stable-aarch64-apple-darwin/lib/rustlib/aarch64-apple-darwin/bin/gcc-ld/wasm-ld // // Note that we can't use --export-all, unfortunately, since some symbols are internal // to wasm-bindgen and exporting them causes the JS generation to fail. // // We are basically replicating what emscripten does here with its dynamic linking // approach where the MAIN_MODULE is very "fat" and exports the necessary arguments // for the side modules to be linked in. This guide is really helpful: // // https://github.com/WebAssembly/tool-conventions/blob/main/DynamicLinking.md // // The tricky one is -Ctarget-cpu=mvp, which prevents rustc from generating externref // entries. // // https://blog.rust-lang.org/2024/09/24/webassembly-targets-change-in-default-target-features/#disabling-on-by-default-webassembly-proposals // // It's fine that these exist in the base module but not in the patch. if matches!( self.triple.architecture, target_lexicon::Architecture::Wasm32 | target_lexicon::Architecture::Wasm64 ) || self.triple.operating_system == OperatingSystem::Wasi { // cargo_args.push("-Ctarget-cpu=mvp".into()); // disabled due to changes in wasm-bindgne cargo_args.push("-Clink-arg=--no-gc-sections".into()); cargo_args.push("-Clink-arg=--growable-table".into()); cargo_args.push("-Clink-arg=--export-table".into()); cargo_args.push("-Clink-arg=--export-memory".into()); cargo_args.push("-Clink-arg=--emit-relocs".into()); cargo_args.push("-Clink-arg=--export=__stack_pointer".into()); cargo_args.push("-Clink-arg=--export=__heap_base".into()); cargo_args.push("-Clink-arg=--export=__data_end".into()); } } cargo_args } pub(crate) fn cargo_build_env_vars( &self, build_mode: &BuildMode, ) -> Result, OsString)>> { let mut env_vars = vec![]; // Make sure to set all the crazy android flags. Cross-compiling is hard, man. if self.bundle == BundleFormat::Android { env_vars.extend(self.android_env_vars()?); }; // If this is a release build, bake the base path and title into the binary with env vars. // todo: should we even be doing this? might be better being a build.rs or something else. if self.release { if let Some(base_path) = self.trimmed_base_path() { env_vars.push((ASSET_ROOT_ENV.into(), base_path.to_string().into())); } env_vars.push(( APP_TITLE_ENV.into(), self.config.web.app.title.clone().into(), )); env_vars.push((PRODUCT_NAME_ENV.into(), self.bundled_app_name().into())); } // Assemble the rustflags by peering into the `.cargo/config.toml` file let rust_flags = self.rustflags.clone(); // seems like this is fixed? // // Disable reference types on wasm when using hotpatching // // https://blog.rust-lang.org/2024/09/24/webassembly-targets-change-in-default-target-features/#disabling-on-by-default-webassembly-proposals // if self.is_wasm_or_wasi() && matches!(build_mode, BuildMode::Thin { .. } | BuildMode::Fat) { // rust_flags.flags.push("-Ctarget-cpu=mvp".to_string()); // } // Set the rust flags for the build if they're not empty. if !rust_flags.flags.is_empty() { env_vars.push(( "RUSTFLAGS".into(), rust_flags .encode_space_separated() .context("Failed to encode RUSTFLAGS")? .into(), )); } // If we're either zero-linking or using a custom linker, make `dx` itself do the linking. // Note: We don't intercept Darwin Base builds since Swift plugins are compiled as dynamic // frameworks that load at runtime, not linked statically into the binary. let use_dx_linker = self.custom_linker.is_some() || matches!(build_mode, BuildMode::Thin { .. } | BuildMode::Fat); if use_dx_linker { // For Android, we pass the actual linker so cargo can still link normally. // For Fat/Thin builds, we use no-link mode (linker = None). LinkAction { triple: self.triple.clone(), linker: self.custom_linker.clone(), link_err_file: dunce::canonicalize(self.link_err_file())?, link_args_file: dunce::canonicalize(self.link_args_file())?, } .write_env_vars(&mut env_vars)?; } Ok(env_vars) } /// Set the environment variables required for building on Android. /// /// This involves setting sysroots, CC, CXX, AR, and other environment variables along with /// vars that cc-rs uses for its C/C++ compilation. /// /// We pulled the environment setup from `cargo ndk` and attempt to mimic its behavior to retain /// compatibility with existing crates that work with `cargo ndk`. /// /// /// /// cargo-ndk is MIT licensed. /// /// fn android_env_vars(&self) -> Result, OsString)>> { // Derived from getenv_with_target_prefixes in `cc` crate. fn cc_env(var_base: &str, triple: &str) -> (String, Option) { #[inline] fn env_var_with_key(key: String) -> Option<(String, String)> { std::env::var(&key).map(|value| (key, value)).ok() } let triple_u = triple.replace('-', "_"); let most_specific_key = format!("{}_{}", var_base, triple); env_var_with_key(most_specific_key.to_string()) .or_else(|| env_var_with_key(format!("{}_{}", var_base, triple_u))) .or_else(|| env_var_with_key(format!("TARGET_{}", var_base))) .or_else(|| env_var_with_key(var_base.to_string())) .map(|(key, value)| (key, Some(value))) .unwrap_or_else(|| (most_specific_key, None)) } fn cargo_env_target_cfg(triple: &str, key: &str) -> String { format!("CARGO_TARGET_{}_{}", &triple.replace('-', "_"), key).to_uppercase() } fn clang_target(rust_target: &str, api_level: u8) -> String { let target = match rust_target { "arm-linux-androideabi" => "armv7a-linux-androideabi", "armv7-linux-androideabi" => "armv7a-linux-androideabi", _ => rust_target, }; format!("--target={target}{api_level}") } fn sysroot_target(rust_target: &str) -> &str { (match rust_target { "armv7-linux-androideabi" => "arm-linux-androideabi", _ => rust_target, }) as _ } fn rt_builtins(rust_target: &str) -> &str { (match rust_target { "armv7-linux-androideabi" => "arm", "aarch64-linux-android" => "aarch64", "i686-linux-android" => "i686", "x86_64-linux-android" => "x86_64", _ => rust_target, }) as _ } let mut env_vars: Vec<(Cow<'static, str>, OsString)> = vec![]; let min_sdk_version = self.min_sdk_version_or_default(); let tools = self.workspace.android_tools()?; let linker = tools.android_cc(&self.triple, min_sdk_version); let ar_path = tools.ar_path(); let target_cc = tools.target_cc(); let target_cxx = tools.target_cxx(); let java_home = tools.java_home(); let ndk_home = tools.ndk.clone(); let sdk_root = tools.sdk(); let artifact_dir = self.android_artifact_dir()?; tracing::debug!( r#"Using android: min_sdk_version: {min_sdk_version} linker: {linker:?} ar_path: {ar_path:?} target_cc: {target_cc:?} target_cxx: {target_cxx:?} java_home: {java_home:?} sdk_root: {sdk_root:?} artifact_dir: {artifact_dir:?} "# ); if let Some(java_home) = &java_home { tracing::debug!("Setting JAVA_HOME to {java_home:?}"); env_vars.push(("JAVA_HOME".into(), java_home.clone().into_os_string())); env_vars.push(( "DX_ANDROID_JAVA_HOME".into(), java_home.clone().into_os_string(), )); } env_vars.push(( "DX_ANDROID_ARTIFACT_DIR".into(), artifact_dir.into_os_string(), )); env_vars.push(( "DX_ANDROID_NDK_HOME".into(), ndk_home.clone().into_os_string(), )); env_vars.push(( "DX_ANDROID_SDK_ROOT".into(), sdk_root.clone().into_os_string(), )); env_vars.push(("ANDROID_NDK_HOME".into(), ndk_home.clone().into_os_string())); env_vars.push(("ANDROID_SDK_ROOT".into(), sdk_root.clone().into_os_string())); env_vars.push(("ANDROID_HOME".into(), sdk_root.into_os_string())); env_vars.push(("NDK_HOME".into(), ndk_home.clone().into_os_string())); let triple = self.triple.to_string(); // Environment variables for the `cc` crate let (cc_key, _cc_value) = cc_env("CC", &triple); let (cflags_key, cflags_value) = cc_env("CFLAGS", &triple); let (cxx_key, _cxx_value) = cc_env("CXX", &triple); let (cxxflags_key, cxxflags_value) = cc_env("CXXFLAGS", &triple); let (ar_key, _ar_value) = cc_env("AR", &triple); let (ranlib_key, _ranlib_value) = cc_env("RANLIB", &triple); // Environment variables for cargo let cargo_ar_key = cargo_env_target_cfg(&triple, "ar"); let cargo_rust_flags_key = cargo_env_target_cfg(&triple, "rustflags"); let bindgen_clang_args_key = format!("BINDGEN_EXTRA_CLANG_ARGS_{}", &triple.replace('-', "_")); let clang_target = clang_target(&self.triple.to_string(), min_sdk_version as _); let target_cc = tools.target_cc(); let target_cflags = match cflags_value { Some(v) => format!("{clang_target} {v}"), None => clang_target.to_string(), }; let target_cxx = tools.target_cxx(); let target_cxxflags = match cxxflags_value { Some(v) => format!("{clang_target} {v}"), None => clang_target.to_string(), }; let cargo_ndk_sysroot_path_key = "CARGO_NDK_SYSROOT_PATH"; let cargo_ndk_sysroot_path = tools.sysroot(); let cargo_ndk_sysroot_target_key = "CARGO_NDK_SYSROOT_TARGET"; let cargo_ndk_sysroot_target = sysroot_target(&triple); let cargo_ndk_sysroot_libs_path_key = "CARGO_NDK_SYSROOT_LIBS_PATH"; let cargo_ndk_sysroot_libs_path = cargo_ndk_sysroot_path .join("usr") .join("lib") .join(cargo_ndk_sysroot_target); let target_ar = tools.ar_path(); let target_ranlib = tools.ranlib(); let clang_folder = tools.clang_folder(); // choose the clang target with the highest version // Should we filter for only numbers? let clang_rt = std::fs::read_dir(&clang_folder) .map(|dir| { let clang_builtins_target = dir .filter_map(|a| a.ok()) .max_by(|a, b| a.file_name().cmp(&b.file_name())) .map(|s| s.path()) .unwrap_or_else(|| clang_folder.join("clang")); format!( "-L{} -lstatic=clang_rt.builtins-{}-android", clang_builtins_target.join("lib").join("linux").display(), rt_builtins(&triple) ) }) .unwrap_or_default(); let extra_include: String = format!( "{}/usr/include/{}", &cargo_ndk_sysroot_path.display(), &cargo_ndk_sysroot_target ); let bindgen_args = format!( "--sysroot={} -I{}", &cargo_ndk_sysroot_path.display(), extra_include ); // Load up the OpenSSL environment variables, using our defaults if not set. // if the user specifies `/vendor`, then they get vendored, unless OPENSSL_NO_VENDOR is passed (implicitly...) let openssl_lib_dir = std::env::var("OPENSSL_LIB_DIR") .map(PathBuf::from) .unwrap_or_else(|_| AndroidTools::openssl_lib_dir(&self.triple)); let openssl_include_dir = std::env::var("OPENSSL_INCLUDE_DIR") .map(PathBuf::from) .unwrap_or_else(|_| AndroidTools::openssl_include_dir()); let openssl_libs = std::env::var("OPENSSL_LIBS").unwrap_or_else(|_| "ssl:crypto".to_string()); for env in [ (cc_key, target_cc.clone().into_os_string()), (cflags_key, target_cflags.into()), (cxx_key, target_cxx.into_os_string()), (cxxflags_key, target_cxxflags.into()), (ar_key, target_ar.clone().into()), (ranlib_key, target_ranlib.into_os_string()), (cargo_ar_key, target_ar.into_os_string()), ( cargo_ndk_sysroot_path_key.to_string(), cargo_ndk_sysroot_path.clone().into_os_string(), ), ( cargo_ndk_sysroot_libs_path_key.to_string(), cargo_ndk_sysroot_libs_path.into_os_string(), ), ( cargo_ndk_sysroot_target_key.to_string(), cargo_ndk_sysroot_target.into(), ), (cargo_rust_flags_key, clang_rt.into()), (bindgen_clang_args_key, bindgen_args.into()), ( "ANDROID_NATIVE_API_LEVEL".to_string(), min_sdk_version.to_string().into(), ), ( format!( "CARGO_TARGET_{}_LINKER", self.triple .to_string() .to_ascii_uppercase() .replace("-", "_") ), linker.into_os_string(), ), ( "ANDROID_NDK_ROOT".to_string(), ndk_home.clone().into_os_string(), ), ( "OPENSSL_LIB_DIR".to_string(), openssl_lib_dir.into_os_string(), ), ( "OPENSSL_INCLUDE_DIR".to_string(), openssl_include_dir.into_os_string(), ), ("OPENSSL_LIBS".to_string(), openssl_libs.into()), // Set the wry env vars - this is where wry will dump its kotlin files. // Their setup is really annoying and requires us to hardcode `dx` to specific versions of tao/wry. ( "WRY_ANDROID_PACKAGE".to_string(), "dev.dioxus.main".to_string().into(), ), ( "WRY_ANDROID_LIBRARY".to_string(), "dioxusmain".to_string().into(), ), ("WRY_ANDROID_KOTLIN_FILES_OUT_DIR".to_string(), { let kotlin_dir = self.wry_android_kotlin_files_out_dir(); // Ensure the directory exists for WRY's canonicalize check if let Err(e) = std::fs::create_dir_all(&kotlin_dir) { tracing::error!("Failed to create kotlin directory {:?}: {}", kotlin_dir, e); return Err(anyhow::anyhow!("Failed to create kotlin directory: {}", e)); } tracing::debug!("Created kotlin directory: {:?}", kotlin_dir); kotlin_dir.into_os_string() }), // Found this through a comment related to bindgen using the wrong clang for cross compiles // // https://github.com/rust-lang/rust-bindgen/issues/2962#issuecomment-2438297124 // // https://github.com/KyleMayes/clang-sys?tab=readme-ov-file#environment-variables ("CLANG_PATH".into(), target_cc.with_extension("exe").into()), ] { env_vars.push((env.0.into(), env.1)); } if std::env::var("MSYSTEM").is_ok() || std::env::var("CYGWIN").is_ok() { for var in env_vars.iter_mut() { // Convert windows paths to unix-style paths // This is a workaround for the fact that the `cc` crate expects unix-style paths // and will fail if it encounters windows-style paths. var.1 = var.1.to_string_lossy().replace('\\', "/").into(); } } Ok(env_vars) } fn android_artifact_dir(&self) -> Result { let dir = self .internal_out_dir() .join(&self.main_target) .join(if self.release { "release" } else { "debug" }) .join("android-artifacts") .join(self.triple.to_string()); std::fs::create_dir_all(&dir)?; Ok(dir) } /// Get an estimate of the number of units in the crate. If nightly rustc is not available, this /// will return an estimate of the number of units in the crate based on cargo metadata. /// /// TODO: always use once it is stable async fn get_unit_count_estimate(&self, build_mode: &BuildMode) -> usize { // Try to get it from nightly if let Ok(count) = self.get_unit_count(build_mode).await { return count; } // Otherwise, use cargo metadata let units = self .workspace .krates .krates_filtered(krates::DepKind::Dev) .iter() .map(|k| k.targets.len()) .sum::(); (units as f64 / 3.5) as usize } /// Try to get the unit graph for the crate. This is a nightly only feature which may not be /// available with the current version of rustc the user has installed. /// /// It also might not be super reliable - I think in practice it occasionally returns 2x the units. async fn get_unit_count(&self, build_mode: &BuildMode) -> crate::Result { #[derive(Debug, Deserialize)] struct UnitGraph { units: Vec, } let output = tokio::process::Command::new("cargo") .arg("+nightly") .arg("rustc") .arg("--unit-graph") .arg("-Z") .arg("unstable-options") .args(self.cargo_build_arguments(build_mode)) .envs( self.cargo_build_env_vars(build_mode)? .iter() .map(|(k, v)| (k.as_ref(), v)), ) .output() .await?; if !output.status.success() { tracing::trace!( "Failed to get unit count: {}", String::from_utf8_lossy(&output.stderr) ); bail!("Failed to get unit count"); } let output_text = String::from_utf8(output.stdout).context("Failed to get unit count")?; let graph: UnitGraph = serde_json::from_str(&output_text).context("Failed to get unit count")?; Ok(graph.units.len()) } pub(crate) fn all_target_features(&self) -> Vec { let mut features = self.features.clone(); if !self.no_default_features { features.extend( self.package() .features .get("default") .cloned() .unwrap_or_default(), ); } features.dedup(); features } /// returns the path to root build folder. This will be our working directory for the build. /// /// we only add an extension to the folders where it sorta matters that it's named with the extension. /// for example, on mac, the `.app` indicates we can `open` it and it pulls in icons, dylibs, etc. /// /// for our simulator-based platforms, this is less important since they need to be zipped up anyways /// to run in the simulator. /// /// For windows/linux, it's also not important since we're just running the exe directly out of the folder /// /// The idea of this folder is that we can run our top-level build command against it and we'll get /// a final build output somewhere. Some platforms have basically no build command, and can simply /// be ran by executing the exe directly. pub(crate) fn root_dir(&self) -> PathBuf { let platform_dir = self.platform_dir(); match self.bundle { BundleFormat::Web => platform_dir.join("public"), BundleFormat::Server => platform_dir.clone(), // ends up *next* to the public folder // These might not actually need to be called `.app` but it does let us run these with `open` BundleFormat::MacOS => platform_dir.join(format!("{}.app", self.bundled_app_name())), BundleFormat::Ios => platform_dir.join(format!("{}.app", self.bundled_app_name())), // in theory, these all could end up directly in the root dir BundleFormat::Android => platform_dir.join("app"), // .apk (after bundling) BundleFormat::Linux => platform_dir.join("app"), // .appimage (after bundling) BundleFormat::Windows => platform_dir.join("app"), // .exe (after bundling) } } /// Create a workdir for the given platform /// This can be used as a temporary directory for the build, but in an observable way such that /// you can see the files in the directory via `target` /// /// target/dx/build/app/web/ /// target/dx/build/app/web/public/ /// target/dx/build/app/web/server.exe fn platform_dir(&self) -> PathBuf { self.internal_out_dir() .join(&self.main_target) .join(if self.release { "release" } else { "debug" }) .join(self.bundle.build_folder_name()) } fn platform_exe_name(&self) -> String { match self.bundle { // mac/ios are unixy and dont have an exe extension BundleFormat::MacOS | BundleFormat::Ios => self.executable_name().to_string(), // "server" and windows can be the same BundleFormat::Server | BundleFormat::Windows => match self.triple.operating_system { OperatingSystem::Windows => format!("{}.exe", self.executable_name()), _ => self.executable_name().to_string(), }, // from the apk spec, the root exe is a shared library // we include the user's rust code as a shared library with a fixed namespace BundleFormat::Android => "libdioxusmain.so".to_string(), // this will be wrong, I think, but not important? BundleFormat::Web => format!("{}_bg.wasm", self.executable_name()), // todo: maybe this should be called AppRun? BundleFormat::Linux => self.executable_name().to_string(), } } /// Assemble the android app dir. /// /// This is a bit of a mess since we need to create a lot of directories and files. Other approaches /// would be to unpack some zip folder or something stored via `include_dir!()`. However, we do /// need to customize the whole setup a bit, so it's just simpler (though messier) to do it this way. fn build_android_app_dir(&self) -> Result<()> { use std::fs::{create_dir_all, write}; let root = self.root_dir(); // gradle let wrapper = root.join("gradle").join("wrapper"); create_dir_all(&wrapper)?; // app let app = root.join("app"); let app_main = app.join("src").join("main"); let app_kotlin = app_main.join("kotlin"); let app_java = app_main.join("java"); let app_jnilibs = app_main.join("jniLibs"); let app_assets = app_main.join("assets"); let app_kotlin_out = self.wry_android_kotlin_files_out_dir(); create_dir_all(&app)?; create_dir_all(&app_main)?; create_dir_all(&app_kotlin)?; create_dir_all(&app_java)?; create_dir_all(&app_jnilibs)?; create_dir_all(&app_assets)?; create_dir_all(&app_kotlin_out)?; tracing::debug!( r#"Initialized android dirs: - gradle: {wrapper:?} - app/ {app:?} - app/src: {app_main:?} - app/src/kotlin: {app_kotlin:?} - app/src/jniLibs: {app_jnilibs:?} - app/src/assets: {app_assets:?} - app/src/kotlin/main: {app_kotlin_out:?} "# ); // handlebars #[derive(Serialize)] struct AndroidHandlebarsObjects { application_id: String, app_name: String, version: String, android_bundle: Option, /// Android SDK version settings min_sdk: u32, target_sdk: u32, compile_sdk: u32, /// Android permission strings (e.g., "android.permission.CAMERA") permissions: Vec, /// Android hardware features (e.g., "android.hardware.location.gps") features: Vec, /// Raw manifest XML to inject raw_manifest: String, /// URL schemes for deep linking url_schemes: Vec, /// App link hosts for auto-verified deep links app_link_hosts: Vec, /// Pipe-joined foreground service type string (e.g., "location|mediaPlayback") foreground_service_type: String, /// Extra Gradle dependencies from [android] config gradle_dependencies: Vec, /// Extra Gradle plugins from [android] config gradle_plugins: Vec, /// Application-level manifest attributes from [android.application] uses_cleartext_traffic: Option, app_theme: Option, supports_rtl: Option, large_heap: Option, } // Get permission mapper from config let mapper = super::manifest_mapper::ManifestMapper::from_config( &self.config.permissions, &self.config.deep_links, &self.config.background, &self.config.android, &self.config.ios, &self.config.macos, ); // Collect Android permissions let permissions: Vec = mapper .android_permissions .iter() .map(|p| p.permission.clone()) .collect(); // Collect Android features from config let features = self.config.android.features.clone(); // Get raw manifest XML let raw_manifest = self.config.android.raw.manifest.clone().unwrap_or_default(); // Foreground service types as pipe-separated string let foreground_service_type = mapper.android_foreground_service_types.join("|"); let hbs_data = AndroidHandlebarsObjects { application_id: self.bundle_identifier(), app_name: self.bundled_app_name(), version: self.crate_version(), android_bundle: self.config.bundle.android.clone(), min_sdk: self.config.android.min_sdk.unwrap_or(24), target_sdk: self.config.android.target_sdk.unwrap_or(34), compile_sdk: self.config.android.compile_sdk.unwrap_or(34), permissions, features, raw_manifest, url_schemes: mapper.android_url_schemes, app_link_hosts: mapper.android_app_link_hosts, foreground_service_type, gradle_dependencies: self.config.android.gradle_dependencies.clone(), gradle_plugins: self.config.android.gradle_plugins.clone(), uses_cleartext_traffic: self.config.android.application.uses_cleartext_traffic, app_theme: self.config.android.application.theme.clone(), supports_rtl: self.config.android.application.supports_rtl, large_heap: self.config.android.application.large_heap, }; let hbs = handlebars::Handlebars::new(); // Top-level gradle config write( root.join("build.gradle.kts"), include_bytes!("../../assets/android/gen/build.gradle.kts"), )?; write( root.join("gradle.properties"), include_bytes!("../../assets/android/gen/gradle.properties"), )?; write( root.join("gradlew"), include_bytes!("../../assets/android/gen/gradlew"), )?; write( root.join("gradlew.bat"), include_bytes!("../../assets/android/gen/gradlew.bat"), )?; write( root.join("settings.gradle"), include_bytes!("../../assets/android/gen/settings.gradle"), )?; // Then the wrapper and its properties write( wrapper.join("gradle-wrapper.properties"), include_bytes!("../../assets/android/gen/gradle/wrapper/gradle-wrapper.properties"), )?; write( wrapper.join("gradle-wrapper.jar"), include_bytes!("../../assets/android/gen/gradle/wrapper/gradle-wrapper.jar"), )?; // Now the app directory write( app.join("build.gradle.kts"), hbs.render_template( include_str!("../../assets/android/gen/app/build.gradle.kts.hbs"), &hbs_data, )?, )?; write( app.join("proguard-rules.pro"), include_bytes!("../../assets/android/gen/app/proguard-rules.pro"), )?; // Copy additional ProGuard rule files from Dioxus.toml [android] config for rule_file in &self.config.android.proguard_rules { let src = self.package_manifest_dir().join(rule_file); if src.exists() { let dest_name = src .file_name() .unwrap_or_default() .to_string_lossy() .to_string(); std::fs::copy(&src, app.join(&dest_name))?; tracing::debug!("Copied ProGuard rules: {}", dest_name); } else { tracing::warn!("ProGuard rules file not found: {}", src.display()); } } let manifest_xml = match self.config.application.android_manifest.as_deref() { Some(manifest) => std::fs::read_to_string(self.package_manifest_dir().join(manifest)) .context("Failed to locate custom AndroidManifest.xml")?, _ => hbs.render_template( include_str!("../../assets/android/gen/app/src/main/AndroidManifest.xml.hbs"), &hbs_data, )?, }; write( app.join("src").join("main").join("AndroidManifest.xml"), manifest_xml, )?; // Write the main activity manually since tao dropped support for it let main_activity = match self.config.application.android_main_activity.as_deref() { Some(activity) => std::fs::read_to_string(self.package_manifest_dir().join(activity)) .context("Failed to locate custom MainActivity.kt")?, _ => hbs.render_template( include_str!("../../assets/android/MainActivity.kt.hbs"), &hbs_data, )?, }; write( self.wry_android_kotlin_files_out_dir() .join("MainActivity.kt"), main_activity, )?; // Write the res folder, containing stuff like default icons, colors, and menubars. let res = app_main.join("res"); create_dir_all(&res)?; create_dir_all(res.join("values"))?; write( res.join("values").join("strings.xml"), hbs.render_template( include_str!("../../assets/android/gen/app/src/main/res/values/strings.xml.hbs"), &hbs_data, )?, )?; write( res.join("values").join("colors.xml"), include_bytes!("../../assets/android/gen/app/src/main/res/values/colors.xml"), )?; write( res.join("values").join("styles.xml"), include_bytes!("../../assets/android/gen/app/src/main/res/values/styles.xml"), )?; create_dir_all(res.join("xml"))?; write( res.join("xml").join("network_security_config.xml"), include_bytes!( "../../assets/android/gen/app/src/main/res/xml/network_security_config.xml" ), )?; create_dir_all(res.join("drawable"))?; write( res.join("drawable").join("ic_launcher_background.xml"), include_bytes!( "../../assets/android/gen/app/src/main/res/drawable/ic_launcher_background.xml" ), )?; create_dir_all(res.join("drawable-v24"))?; write( res.join("drawable-v24").join("ic_launcher_foreground.xml"), include_bytes!( "../../assets/android/gen/app/src/main/res/drawable-v24/ic_launcher_foreground.xml" ), )?; create_dir_all(res.join("mipmap-anydpi-v26"))?; write( res.join("mipmap-anydpi-v26").join("ic_launcher.xml"), include_bytes!( "../../assets/android/gen/app/src/main/res/mipmap-anydpi-v26/ic_launcher.xml" ), )?; create_dir_all(res.join("mipmap-hdpi"))?; write( res.join("mipmap-hdpi").join("ic_launcher.webp"), include_bytes!( "../../assets/android/gen/app/src/main/res/mipmap-hdpi/ic_launcher.webp" ), )?; create_dir_all(res.join("mipmap-mdpi"))?; write( res.join("mipmap-mdpi").join("ic_launcher.webp"), include_bytes!( "../../assets/android/gen/app/src/main/res/mipmap-mdpi/ic_launcher.webp" ), )?; create_dir_all(res.join("mipmap-xhdpi"))?; write( res.join("mipmap-xhdpi").join("ic_launcher.webp"), include_bytes!( "../../assets/android/gen/app/src/main/res/mipmap-xhdpi/ic_launcher.webp" ), )?; create_dir_all(res.join("mipmap-xxhdpi"))?; write( res.join("mipmap-xxhdpi").join("ic_launcher.webp"), include_bytes!( "../../assets/android/gen/app/src/main/res/mipmap-xxhdpi/ic_launcher.webp" ), )?; create_dir_all(res.join("mipmap-xxxhdpi"))?; write( res.join("mipmap-xxxhdpi").join("ic_launcher.webp"), include_bytes!( "../../assets/android/gen/app/src/main/res/mipmap-xxxhdpi/ic_launcher.webp" ), )?; Ok(()) } fn wry_android_kotlin_files_out_dir(&self) -> PathBuf { let mut kotlin_dir = self .root_dir() .join("app") .join("src") .join("main") .join("kotlin"); for segment in "dev.dioxus.main".split('.') { kotlin_dir = kotlin_dir.join(segment); } kotlin_dir } fn ensure_gradle_dependency(&self, build_gradle: &Path, dependency_line: &str) -> Result<()> { use std::fs; let mut contents = fs::read_to_string(build_gradle)?; if contents.contains(dependency_line) { return Ok(()); } if let Some(idx) = contents.find("dependencies {") { let insert_pos = idx + "dependencies {".len(); contents.insert_str(insert_pos, &format!("\n {dependency_line}")); } else { contents.push_str(&format!("\ndependencies {{\n {dependency_line}\n}}\n")); } fs::write(build_gradle, contents)?; Ok(()) } /// Get the directory where this app can write to for this session that's guaranteed to be stable /// for the same app. This is useful for emitting state like window position and size. /// /// The directory is specific for this app and might be pub(crate) fn session_cache_dir(&self) -> PathBuf { self.session_cache_dir.join(self.bundle.to_string()) } pub(crate) fn rustc_wrapper_args_dir(&self) -> PathBuf { self.session_cache_dir().join("rustc_wrapper_args") } /// The crate name that rustc uses for the tip crate (hyphens replaced with underscores). fn tip_crate_name(&self) -> String { self.main_target.replace('-', "_") } fn link_err_file(&self) -> PathBuf { self.session_cache_dir().join("link_err.txt") } fn link_args_file(&self) -> PathBuf { self.session_cache_dir().join("link_args.json") } fn windows_command_file(&self) -> PathBuf { self.session_cache_dir().join("windows_command.txt") } /// Get the outdir specified by the Dioxus.toml, relative to the crate directory. /// We don't support workspaces yet since that would cause a collision of bundles per project. pub(crate) fn crate_out_dir(&self) -> Option { self.config .application .out_dir .as_ref() .map(|out_dir| self.crate_dir().join(out_dir)) } /// Compose an out directory. Represents the typical "dist" directory that /// is "distributed" after building an application (configurable in the /// `Dioxus.toml`). fn internal_out_dir(&self) -> PathBuf { let dir = self.target_dir.join("dx"); std::fs::create_dir_all(&dir).unwrap(); dir } /// target/dx/bundle/app/ /// target/dx/bundle/app/blah.app /// target/dx/bundle/app/blah.exe /// target/dx/bundle/app/public/ pub(crate) fn bundle_dir(&self, bundle: BundleFormat) -> PathBuf { self.internal_out_dir() .join(&self.main_target) .join("bundle") .join(bundle.build_folder_name()) } /// Get the workspace directory for the crate pub(crate) fn workspace_dir(&self) -> PathBuf { self.workspace .krates .workspace_root() .as_std_path() .to_path_buf() } /// Get the directory of the crate pub(crate) fn crate_dir(&self) -> PathBuf { self.package() .manifest_path .parent() .unwrap() .as_std_path() .to_path_buf() } /// Get the package we are currently in pub(crate) fn package(&self) -> &krates::cm::Package { &self.workspace.krates[self.crate_package] } /// Get the name of the package we are compiling pub(crate) fn executable_name(&self) -> &str { &self.crate_target.name } /// Get the type of executable we are compiling pub(crate) fn executable_type(&self) -> TargetKind { self.crate_target.kind[0].clone() } /// Get the features required to build for the given platform fn feature_for_platform_and_renderer( package: &krates::cm::Package, triple: &Triple, renderer: Renderer, ) -> Option { // Try to find the feature that activates the dioxus feature for the given platform let dioxus_feature = renderer.feature_name(triple); let res = package.features.iter().find_map(|(key, features)| { // if the feature is just the name of the platform, we use that if key == dioxus_feature { tracing::debug!("Found feature {key} for renderer {renderer}"); return Some(key.clone()); } // Otherwise look for the feature that starts with dioxus/ or dioxus?/ and matches just the single platform // we are looking for. let mut dioxus_renderers_enabled = Vec::new(); for feature in features { if let Some((_, after_dioxus)) = feature.split_once("dioxus") { if let Some(dioxus_feature_enabled) = after_dioxus.trim_start_matches('?').strip_prefix('/') { if Renderer::autodetect_from_cargo_feature(dioxus_feature_enabled).is_some() { dioxus_renderers_enabled.push(dioxus_feature_enabled.to_string()); } } } } // If there is exactly one renderer enabled by this feature, we can use it if let [feature_name] = dioxus_renderers_enabled.as_slice() { if feature_name == dioxus_feature { tracing::debug!( "Found feature {key} for renderer {renderer} which enables dioxus/{renderer}" ); return Some(key.clone()); } } None }); res.or_else(|| { let depends_on_dioxus = package.dependencies.iter().any(|dep| dep.name == "dioxus"); if depends_on_dioxus { let fallback = format!("dioxus/{dioxus_feature}"); tracing::debug!( "Could not find explicit feature for renderer {renderer}, passing `fallback` instead" ); Some(fallback) } else { None } }) } /// Checks the strip setting for the package, resolving profiles recursively pub(crate) fn get_strip_setting(&self) -> StripSetting { let cargo_toml = &self.workspace.cargo_toml; let profile = &self.profile; let release = self.release; let profile = match (cargo_toml.profile.custom.get(profile), release) { (Some(custom_profile), _) => Some(custom_profile), (_, true) => cargo_toml.profile.release.as_ref(), (_, false) => cargo_toml.profile.dev.as_ref(), }; let Some(profile) = profile else { return StripSetting::None; }; // Get the strip setting from the profile or the profile it inherits from fn get_strip(profile: &Profile, profiles: &Profiles) -> Option { profile.strip.or_else(|| { // If we can't find the strip setting, check if we inherit from another profile profile.inherits.as_ref().and_then(|inherits| { let profile = match inherits.as_str() { "dev" => profiles.dev.as_ref(), "release" => profiles.release.as_ref(), "test" => profiles.test.as_ref(), "bench" => profiles.bench.as_ref(), other => profiles.custom.get(other), }; profile.and_then(|p| get_strip(p, profiles)) }) }) } let Some(strip) = get_strip(profile, &cargo_toml.profile) else { // If the profile doesn't have a strip option, return None return StripSetting::None; }; strip } pub(crate) fn renderer_enabled_by_dioxus_dependency( package: &krates::cm::Package, ) -> Option<(Renderer, String)> { let mut renderers = vec![]; // Attempt to discover the platform directly from the dioxus dependency // // [dependencies] // dioxus = { features = ["web"] } // if let Some(dxs) = package.dependencies.iter().find(|dep| dep.name == "dioxus") { for feature in dxs.features.iter() { if let Some(renderer) = Renderer::autodetect_from_cargo_feature(feature) { renderers.push((renderer, format!("dioxus/{}", feature))); } } } if renderers.len() != 1 { return None; } Some(renderers[0].clone()) } pub(crate) fn features_that_enable_renderers( package: &krates::cm::Package, ) -> Vec<(Renderer, String)> { package .features .keys() .filter_map(|key| { Renderer::autodetect_from_cargo_feature(key).map(|v| (v, key.to_string())) }) .collect() } /// Return the platforms that are enabled for the package only from the default features /// /// Ideally only one platform is enabled but we need to be able to pub(crate) fn enabled_cargo_toml_default_features_renderers( package: &krates::cm::Package, ) -> Vec<(Renderer, String)> { let mut renderers = vec![]; // Start searching through the default features // // [features] // default = ["dioxus/web"] // // or // // [features] // default = ["web"] // web = ["dioxus/web"] let Some(default) = package.features.get("default") else { return renderers; }; // we only trace features 1 level deep.. // TODO: trace all enabled features, not just default features for feature in default.iter() { // If the user directly specified a platform we can just use that. if feature.starts_with("dioxus/") { let dx_feature = feature.trim_start_matches("dioxus/"); let auto = Renderer::autodetect_from_cargo_feature(dx_feature); if let Some(auto) = auto { renderers.push((auto, dx_feature.to_string())); } } // If the user is specifying an internal feature that points to a platform, we can use that let internal_feature = package.features.get(feature); if let Some(internal_feature) = internal_feature { for feature in internal_feature { if feature.starts_with("dioxus/") { let dx_feature = feature.trim_start_matches("dioxus/"); let auto = Renderer::autodetect_from_cargo_feature(dx_feature); if let Some(auto) = auto { renderers.push((auto, dx_feature.to_string())); } } } } } renderers.sort(); renderers.dedup(); renderers } /// Gather the features that are enabled for the package fn rendererless_features(package: &krates::cm::Package) -> Vec { let Some(default) = package.features.get("default") else { return Vec::new(); }; let mut kept_features = vec![]; // Only keep the top-level features in the default list that don't point to a platform directly // IE we want to drop `web` if default = ["web"] 'top: for feature in default { // Don't keep features that point to a platform via dioxus/blah if feature.starts_with("dioxus/") { let dx_feature = feature.trim_start_matches("dioxus/"); if Renderer::autodetect_from_cargo_feature(dx_feature).is_some() { tracing::debug!( "Dropping feature {feature} since it points to a platform renderer" ); continue 'top; } } // Don't keep features that point to a platform via an internal feature if let Some(internal_feature) = package.features.get(feature) { for feature in internal_feature { if feature.starts_with("dioxus/") { let dx_feature = feature.trim_start_matches("dioxus/"); if Renderer::autodetect_from_cargo_feature(dx_feature).is_some() { tracing::debug!( "Dropping feature {feature} since it points to a platform renderer transitively" ); continue 'top; } } } } // Otherwise we can keep it kept_features.push(feature.to_string()); } kept_features } pub(crate) fn bundled_app_name(&self) -> String { use convert_case::{Case, Casing}; self.executable_name().to_case(Case::Pascal) } /// Get the crate version from Cargo.toml (e.g., "0.1.0") fn crate_version(&self) -> String { self.workspace.krates[self.crate_package] .version .to_string() } pub(crate) fn bundle_identifier(&self) -> String { use crate::config::BundlePlatform; // Check platform-specific identifier override first, then fall back to base bundle let platform: BundlePlatform = self.bundle.into(); if let Some(identifier) = self.config.resolved_identifier(platform) { let identifier = identifier.to_string(); if identifier.contains('.') && !identifier.starts_with('.') && !identifier.ends_with('.') && !identifier.contains("..") { return identifier; } else { tracing::error!( "Invalid bundle identifier: {identifier:?}. Must contain at least one '.' and not start/end with '.'. E.g. `com.example.app`" ); } } format!("com.example.{}", self.bundled_app_name()) } /// The item that we'll try to run directly if we need to. /// /// todo(jon): we should name the app properly instead of making up the exe name. It's kinda okay for dev mode, but def not okay for prod pub(crate) fn main_exe(&self) -> PathBuf { self.exe_dir().join(self.platform_exe_name()) } fn is_wasm_or_wasi(&self) -> bool { matches!( self.triple.architecture, target_lexicon::Architecture::Wasm32 | target_lexicon::Architecture::Wasm64 ) || self.triple.operating_system == target_lexicon::OperatingSystem::Wasi } /// Does the app specify: /// /// - Dioxus with "fullstack" enabled? (access to serverfns, etc) /// - An explicit "fullstack" feature that enables said feature? /// /// Note that we don't detect if dependencies enable it transitively since we want to be explicit about it. /// /// The intention here is to detect if "fullstack" is enabled in the target's features list: /// ```toml /// [dependencies] /// dioxus = { version = "0.4", features = ["fullstack"] } /// ``` /// /// or as an explicit feature in default: /// ```toml /// [features] /// default = ["dioxus/fullstack"] /// ``` /// /// or as a default feature that enables the dioxus feature: /// ```toml /// [features] /// default = ["fullstack"] /// fullstack = ["dioxus/fullstack"] /// ``` /// /// or as an explicit feature (that enables the dioxus feature): /// ``` /// dx serve app --features "fullstack" /// ``` pub(crate) fn fullstack_feature_enabled(&self) -> bool { let dioxus_dep = self .package() .dependencies .iter() .find(|dep| dep.name == "dioxus"); // If we don't have a dioxus dependency, we can't be fullstack. This shouldn't impact non-dioxus projects let Some(dioxus_dep) = dioxus_dep else { return false; }; // Check if the dioxus dependency has the "fullstack" feature enabled if dioxus_dep.features.iter().any(|f| f == "fullstack") { return true; } // Check if any of the features in our feature list enables a feature that enables "fullstack" let transitive = self .package() .features .iter() .filter(|(_name, list)| list.iter().any(|f| f == "dioxus/fullstack")); for (name, _list) in transitive { if self.features.contains(name) { return true; } } false } /// todo(jon): use handlebars templates instead of these prebaked templates async fn write_metadata(&self) -> Result<()> { // write the Info.plist file match self.bundle { BundleFormat::MacOS => { let dest = self.root_dir().join("Contents").join("Info.plist"); let plist = self.info_plist_contents(self.bundle)?; std::fs::write(dest, plist)?; } BundleFormat::Ios => { let dest = self.root_dir().join("Info.plist"); let plist = self.info_plist_contents(self.bundle)?; std::fs::write(dest, plist)?; } // AndroidManifest.xml // er.... maybe even all the kotlin/java/gradle stuff? BundleFormat::Android => {} // Probably some custom format or a plist file (haha) // When we do the proper bundle, we'll need to do something with wix templates, I think? BundleFormat::Windows => {} // eventually we'll create the .appimage file, I guess? BundleFormat::Linux => {} // These are served as folders, not appimages, so we don't need to do anything special (I think?) // Eventually maybe write some secrets/.env files for the server? // We could also distribute them as a deb/rpm for linux and msi for windows BundleFormat::Web => {} BundleFormat::Server => {} } Ok(()) } /// Run the optimizers, obfuscators, minimizers, signers, etc async fn optimize(&self, ctx: &BuildContext) -> Result<()> { match self.bundle { BundleFormat::Web => { // Compress the asset dir // If pre-compressing is enabled, we can pre_compress the wasm-bindgen output let pre_compress = self.should_pre_compress_web_assets(self.release); if pre_compress { ctx.status_compressing_assets(); let asset_dir = self.asset_dir(); tokio::task::spawn_blocking(move || { crate::fastfs::pre_compress_folder(&asset_dir, pre_compress) }) .await .unwrap()?; } } BundleFormat::MacOS | BundleFormat::Windows | BundleFormat::Linux | BundleFormat::Ios | BundleFormat::Android | BundleFormat::Server => {} } Ok(()) } /// Strip the final binary after extracting all assets with rustc-objcopy async fn strip_binary(&self, artifacts: &BuildArtifacts) -> Result<()> { // Never strip the binary if we are going to bundle split it if self.wasm_split { return Ok(()); } let exe = &artifacts.exe; // https://github.com/rust-lang/rust/blob/cb80ff132a0e9aa71529b701427e4e6c243b58df/compiler/rustc_codegen_ssa/src/back/linker.rs#L1433-L1443 let strip_arg = match self.get_strip_setting() { StripSetting::Debuginfo => Some("--strip-debug"), StripSetting::Symbols => Some("--strip-all"), StripSetting::None => None, }; if let Some(strip_arg) = strip_arg { let rustc_objcopy = self.workspace.rustc_objcopy(); let dylib_path = self.workspace.rustc_objcopy_dylib_path(); let mut command = Command::new(rustc_objcopy); command.env("LD_LIBRARY_PATH", &dylib_path); command.arg(strip_arg).arg(exe).arg(exe); let output = command.output().await?; if !output.status.success() { if let Ok(stdout) = std::str::from_utf8(&output.stdout) { tracing::error!("{}", stdout); } if let Ok(stderr) = std::str::from_utf8(&output.stderr) { tracing::error!("{}", stderr); } return Err(anyhow::anyhow!("Failed to strip binary")); } } Ok(()) } /// Check if assets should be pre_compressed. This will only be true in release mode if the user /// has enabled pre_compress in the web config. fn should_pre_compress_web_assets(&self, release: bool) -> bool { self.config.web.pre_compress & release } /// Check if the wasm output should be bundled to an asset type app. fn should_bundle_to_asset(&self) -> bool { self.release && !self.wasm_split && self.bundle == BundleFormat::Web } /// Bundle the web app /// - Run wasm-bindgen /// - Bundle split /// - Run wasm-opt /// - Register the .wasm and .js files with the asset system async fn bundle_web( &self, ctx: &BuildContext, exe: &Path, assets: &mut AssetManifest, ) -> Result<()> { use crate::{wasm_bindgen::WasmBindgen, wasm_opt}; use std::fmt::Write; // Locate the output of the build files and the bindgen output // We'll fill these in a second if they don't already exist let bindgen_outdir = self.wasm_bindgen_out_dir(); let post_bindgen_wasm = self.wasm_bindgen_wasm_output_file(); let should_bundle_split: bool = self.wasm_split; let bindgen_version = self .workspace .wasm_bindgen_version() .expect("this should have been checked by tool verification"); // Prepare any work dirs _ = std::fs::remove_dir_all(&bindgen_outdir); std::fs::create_dir_all(&bindgen_outdir)?; // Lift the internal functions to exports if ctx.mode == BuildMode::Fat { let unprocessed = std::fs::read(exe)?; let all_exported_bytes = crate::build::prepare_wasm_base_module(&unprocessed)?; std::fs::write(exe, all_exported_bytes)?; } // Prepare our configuration // // we turn on debug symbols in dev mode // // We leave demangling to false since it's faster and these tools seem to prefer the raw symbols. // todo(jon): investigate if the chrome extension needs them demangled or demangles them automatically. let keep_debug = self.config.web.wasm_opt.debug || self.debug_symbols || self.wasm_split || !self.release || ctx.mode == BuildMode::Fat; let keep_names = self.config.web.wasm_opt.keep_names || self.keep_names || self.wasm_split || ctx.mode == BuildMode::Fat; let demangle = false; let wasm_opt_options = WasmOptConfig { memory_packing: self.wasm_split, debug: self.debug_symbols, ..self.config.web.wasm_opt.clone() }; // Run wasm-bindgen. Some of the options are not "optimal" but will be fixed up by wasm-opt // // There's performance implications here. Running with --debug is slower than without // We're keeping around lld sections and names but wasm-opt will fix them // todo(jon): investigate a good balance of wiping debug symbols during dev (or doing a double build?) ctx.status_wasm_bindgen_start(); tracing::debug!(dx_src = ?TraceSrc::Bundle, "Running wasm-bindgen"); let start = std::time::Instant::now(); WasmBindgen::new(&bindgen_version) .input_path(exe) .target("web") .debug(keep_debug) .demangle(demangle) .keep_debug(keep_debug) .keep_lld_sections(true) .out_name(self.executable_name()) .out_dir(&bindgen_outdir) .remove_name_section(!keep_names) .remove_producers_section(!keep_names) .run() .await .context("Failed to generate wasm-bindgen bindings")?; tracing::debug!(dx_src = ?TraceSrc::Bundle, "wasm-bindgen complete in {:?}", start.elapsed()); // Run bundle splitting if the user has requested it // It's pretty expensive but because of rayon should be running separate threads, hopefully // not blocking this thread. Dunno if that's true if should_bundle_split { ctx.status_splitting_bundle(); // Load the contents of these binaries since we need both of them // We're going to use the default makeLoad glue from wasm-split let original = std::fs::read(exe)?; let bindgened = std::fs::read(&post_bindgen_wasm)?; let mut glue = wasm_split_cli::MAKE_LOAD_JS.to_string(); // Run the emitter let splitter = wasm_split_cli::Splitter::new(&original, &bindgened); let modules = splitter .context("Failed to parse wasm for splitter")? .emit() .context("Failed to emit wasm split modules")?; // Write the chunks that contain shared imports // These will be in the format of chunk_0_modulename.wasm - this is hardcoded in wasm-split tracing::debug!("Writing split chunks to disk"); for (idx, chunk) in modules.chunks.iter().enumerate() { let path = bindgen_outdir.join(format!("chunk_{}_{}.wasm", idx, chunk.module_name)); wasm_opt::write_wasm(&chunk.bytes, &path, &wasm_opt_options).await?; writeln!( glue, "export const __wasm_split_load_chunk_{idx} = makeLoad(\"/{base_path}/assets/{url}\", [], fusedImports);", base_path = self.base_path_or_default(), url = assets .register_asset(&path, AssetOptions::builder().into_asset_options())?.bundled_path(), )?; } // Write the modules that contain the entrypoints tracing::debug!("Writing split modules to disk"); for (idx, module) in modules.modules.iter().enumerate() { let comp_name = module .component_name .as_ref() .context("generated bindgen module has no name?")?; let path = bindgen_outdir.join(format!("module_{idx}_{comp_name}.wasm")); wasm_opt::write_wasm(&module.bytes, &path, &wasm_opt_options).await?; let hash_id = module .hash_id .as_ref() .context("generated wasm-split bindgen module has no hash id?")?; writeln!( glue, "export const __wasm_split_load_{module}_{hash_id}_{comp_name} = makeLoad(\"/{base_path}/assets/{url}\", [{deps}], fusedImports);", module = module.module_name, base_path = self.base_path_or_default(), // Again, register this wasm with the asset system url = assets .register_asset(&path, AssetOptions::builder().into_asset_options())? .bundled_path(), // This time, make sure to write the dependencies of this chunk // The names here are again, hardcoded in wasm-split - fix this eventually. deps = module .relies_on_chunks .iter() .map(|idx| format!("__wasm_split_load_chunk_{idx}")) .collect::>() .join(", ") )?; } // Write the js binding // It's not registered as an asset since it will get included in the main.js file let js_output_path = bindgen_outdir.join("__wasm_split.js"); std::fs::write(&js_output_path, &glue)?; // Make sure to write some entropy to the main.js file so it gets a new hash // If we don't do this, the main.js file will be cached and never pick up the chunk names let uuid = Uuid::new_v5(&Uuid::NAMESPACE_URL, glue.as_bytes()); std::fs::OpenOptions::new() .append(true) .open(self.wasm_bindgen_js_output_file()) .context("Failed to open main.js file")? .write_all(format!("/*{uuid}*/").as_bytes())?; // Write the main wasm_bindgen file and register it with the asset system // This will overwrite the file in place // We will wasm-opt it in just a second... std::fs::write(&post_bindgen_wasm, modules.main.bytes).unwrap(); } if matches!(ctx.mode, BuildMode::Fat) { // add `export { __wbg_get_imports };` to the end of the wasmbindgen js file let mut js = std::fs::read(self.wasm_bindgen_js_output_file())?; writeln!(js, "\nexport {{ __wbg_get_imports }};")?; std::fs::write(self.wasm_bindgen_js_output_file(), js)?; } // Make sure to optimize the main wasm file if requested or if bundle splitting if should_bundle_split || self.release { ctx.status_optimizing_wasm(); wasm_opt::optimize(&post_bindgen_wasm, &post_bindgen_wasm, &wasm_opt_options).await?; } if self.should_bundle_to_asset() { // Make sure to register the main wasm file with the asset system assets.register_asset( &post_bindgen_wasm, AssetOptions::builder().into_asset_options(), )?; } // Now that the wasm is registered as an asset, we can write the js glue shim self.write_js_glue_shim(assets)?; if self.should_bundle_to_asset() { // Register the main.js with the asset system so it bundles in the snippets and optimizes assets.register_asset( &self.wasm_bindgen_js_output_file(), AssetOptions::js() .with_minify(true) .with_preload(true) .into_asset_options(), )?; } // Write the index.html file with the pre-configured contents we got from pre-rendering self.write_index_html(assets)?; Ok(()) } fn write_js_glue_shim(&self, assets: &AssetManifest) -> Result<()> { let wasm_path = self.bundled_wasm_path(assets); // Load and initialize wasm without requiring a separate javascript file. // This also allows using a strict Content-Security-Policy. let mut js = std::fs::OpenOptions::new() .append(true) .open(self.wasm_bindgen_js_output_file())?; let mut buf_writer = std::io::BufWriter::new(&mut js); writeln!( buf_writer, r#" globalThis.__wasm_split_main_initSync = initSync; // Actually perform the load __wbg_init({{module_or_path: "/{}/{wasm_path}"}}).then((wasm) => {{ // assign this module to be accessible globally globalThis.__dx_mainWasm = wasm; globalThis.__dx_mainInit = __wbg_init; globalThis.__dx_mainInitSync = initSync; globalThis.__dx___wbg_get_imports = __wbg_get_imports; if (wasm.__wbindgen_start == undefined) {{ wasm.main(); }} }}); "#, self.base_path_or_default(), )?; Ok(()) } /// Write the index.html file to the output directory. This must be called after the wasm and js /// assets are registered with the asset system if this is a release build. pub(crate) fn write_index_html(&self, assets: &AssetManifest) -> Result<()> { let wasm_path = self.bundled_wasm_path(assets); let js_path = self.bundled_js_path(assets); // Write the index.html file with the pre-configured contents we got from pre-rendering std::fs::write( self.root_dir().join("index.html"), self.prepare_html(assets, &wasm_path, &js_path).unwrap(), )?; Ok(()) } fn bundled_js_path(&self, assets: &AssetManifest) -> String { let wasm_bindgen_js_out = self.wasm_bindgen_js_output_file(); if self.should_bundle_to_asset() { let name = assets .get_first_asset_for_source(&wasm_bindgen_js_out) .expect("The js source must exist before creating index.html"); format!("assets/{}", name.bundled_path()) } else { format!( "wasm/{}", wasm_bindgen_js_out.file_name().unwrap().to_str().unwrap() ) } } /// Get the path to the wasm-bindgen output files. Either the direct file or the optimized one depending on the build mode fn bundled_wasm_path(&self, assets: &AssetManifest) -> String { let wasm_bindgen_wasm_out = self.wasm_bindgen_wasm_output_file(); if self.should_bundle_to_asset() { let name = assets .get_first_asset_for_source(&wasm_bindgen_wasm_out) .expect("The wasm source must exist before creating index.html"); format!("assets/{}", name.bundled_path()) } else { format!( "wasm/{}", wasm_bindgen_wasm_out.file_name().unwrap().to_str().unwrap() ) } } fn info_plist_contents(&self, bundle: BundleFormat) -> Result { /// A permission entry for plist (key + description) #[derive(Serialize)] struct PlistPermission { key: String, description: String, } #[derive(Serialize)] pub struct InfoPlistData { pub display_name: String, pub bundle_name: String, pub bundle_identifier: String, pub executable_name: String, /// App version string (from Cargo.toml) pub version: String, /// Permission usage descriptions pub permissions: Vec, /// Additional plist entries as raw XML pub plist_entries: String, /// Raw plist XML to inject pub raw_plist: String, /// Minimum system version (macOS only) pub minimum_system_version: String, /// URL schemes for deep linking pub url_schemes: Vec, /// iOS UIBackgroundModes pub background_modes: Vec, } // Attempt to use the user's manually specified let _app = &self.config.application; match bundle { BundleFormat::MacOS => { if let Some(macos_info_plist) = _app.macos_info_plist.as_deref() { return Ok(std::fs::read_to_string(macos_info_plist)?); } } BundleFormat::Ios => { if let Some(macos_info_plist) = _app.ios_info_plist.as_deref() { return Ok(std::fs::read_to_string(macos_info_plist)?); } } _ => {} } // Get permission mapper from config let mapper = super::manifest_mapper::ManifestMapper::from_config( &self.config.permissions, &self.config.deep_links, &self.config.background, &self.config.android, &self.config.ios, &self.config.macos, ); match bundle { BundleFormat::MacOS => { // Convert macOS plist entries to permission structs let permissions: Vec = mapper .macos_plist_entries .iter() .map(|p| PlistPermission { key: p.key.clone(), description: p.value.clone(), }) .collect(); // Generate plist entries from config let plist_entries = generate_plist_entries(&self.config.macos.plist); let raw_plist = self.config.macos.raw.info_plist.clone().unwrap_or_default(); let minimum_system_version = self .config .macos .minimum_system_version .clone() .unwrap_or_else(|| "10.15".to_string()); handlebars::Handlebars::new() .render_template( include_str!("../../assets/macos/mac.plist.hbs"), &InfoPlistData { display_name: self.bundled_app_name(), bundle_name: self.bundled_app_name(), executable_name: self.platform_exe_name(), bundle_identifier: self.bundle_identifier(), version: self.crate_version(), permissions, plist_entries, raw_plist, minimum_system_version, url_schemes: mapper.macos_url_schemes.clone(), background_modes: Vec::new(), // macOS doesn't use UIBackgroundModes }, ) .map_err(|e| e.into()) } BundleFormat::Ios => { // Convert iOS plist entries to permission structs let permissions: Vec = mapper .ios_plist_entries .iter() .map(|p| PlistPermission { key: p.key.clone(), description: p.value.clone(), }) .collect(); // Generate plist entries from config let plist_entries = generate_plist_entries(&self.config.ios.plist); let raw_plist = self.config.ios.raw.info_plist.clone().unwrap_or_default(); handlebars::Handlebars::new() .render_template( include_str!("../../assets/ios/ios.plist.hbs"), &InfoPlistData { display_name: self.bundled_app_name(), bundle_name: self.bundled_app_name(), executable_name: self.platform_exe_name(), bundle_identifier: self.bundle_identifier(), version: self.crate_version(), permissions, plist_entries, raw_plist, minimum_system_version: String::new(), // Not used for iOS url_schemes: mapper.ios_url_schemes.clone(), background_modes: mapper.ios_background_modes.clone(), }, ) .map_err(|e| e.into()) } _ => Err(anyhow::anyhow!("Unsupported platform for Info.plist")), } } /// Run any final tools to produce apks or other artifacts we might need. /// /// This might include codesigning, zipping, creating an appimage, etc async fn assemble(&self, ctx: &BuildContext) -> Result<()> { if let BundleFormat::Android = self.bundle { ctx.status_running_gradle(); // When the build mode is set to release and there is an Android signature configuration, use assembleRelease let build_type = if self.release && self.config.bundle.android.is_some() { "assembleRelease" } else { "assembleDebug" }; let output = Command::new(self.gradle_exe()?) .arg(build_type) .current_dir(self.root_dir()) .output() .await .context("Failed to run gradle")?; if !output.status.success() { bail!( "Failed to assemble apk: {}", String::from_utf8_lossy(&output.stderr) ); } } // if the triple is a ios or macos target, we need to codesign the binary if matches!( self.triple.operating_system, OperatingSystem::Darwin(_) | OperatingSystem::IOS(_) ) && self.should_codesign { self.codesign_apple(ctx).await?; } Ok(()) } /// Run bundleRelease and return the path to the `.aab` file /// /// pub(crate) async fn android_gradle_bundle(&self) -> Result { let output = Command::new(self.gradle_exe()?) .arg("bundleRelease") .current_dir(self.root_dir()) .output() .await .context("Failed to run gradle bundleRelease")?; if !output.status.success() { bail!( "Failed to bundleRelease: {}", String::from_utf8_lossy(&output.stderr) ); } let app_release = self .root_dir() .join("app") .join("build") .join("outputs") .join("bundle") .join("release"); // Rename it to Name-arch.aab let from = app_release.join("app-release.aab"); let to = app_release.join(format!("{}-{}.aab", self.bundled_app_name(), self.triple)); std::fs::rename(from, &to).context("Failed to rename aab")?; Ok(to) } fn gradle_exe(&self) -> Result { // make sure we can execute the gradlew script #[cfg(unix)] { use std::os::unix::prelude::PermissionsExt; std::fs::set_permissions( self.root_dir().join("gradlew"), std::fs::Permissions::from_mode(0o755), ) .context("Failed to make gradlew executable")?; } let gradle_exec_name = match cfg!(windows) { true => "gradlew.bat", false => "gradlew", }; Ok(self.root_dir().join(gradle_exec_name)) } pub(crate) fn debug_apk_path(&self) -> PathBuf { self.root_dir() .join("app") .join("build") .join("outputs") .join("apk") .join("debug") .join("app-debug.apk") } /// We only really currently care about: /// /// - app dir (.app, .exe, .apk, etc) /// - assetas dir /// - exe dir (.exe, .app, .apk, etc) /// - extra scaffolding /// /// It's not guaranteed that they're different from any other folder pub(crate) fn prepare_build_dir(&self, ctx: &BuildContext) -> Result<()> { use std::fs::{create_dir_all, remove_dir_all}; use std::sync::OnceLock; static PRIMARY_INITIALIZED: OnceLock> = OnceLock::new(); static SECONDARY_INITIALIZED: OnceLock> = OnceLock::new(); let initializer = if ctx.is_primary_build() { &PRIMARY_INITIALIZED } else { &SECONDARY_INITIALIZED }; let success = initializer.get_or_init(|| { if ctx.is_primary_build() { _ = remove_dir_all(self.exe_dir()); } create_dir_all(self.root_dir())?; create_dir_all(self.exe_dir())?; create_dir_all(self.asset_dir())?; tracing::debug!( r#"Initialized build dirs: • root dir: {:?} • exe dir: {:?} • asset dir: {:?}"#, self.root_dir(), self.exe_dir(), self.asset_dir(), ); // we could download the templates from somewhere (github?) but after having banged my head against // cargo-mobile2 for ages, I give up with that. We're literally just going to hardcode the templates // by writing them here. if self.bundle == BundleFormat::Android { self.build_android_app_dir()?; } Ok(()) }); if let Err(e) = success.as_ref() { bail!("Failed to initialize build directory: {e}"); } Ok(()) } pub(crate) fn asset_dir(&self) -> PathBuf { match self.bundle { BundleFormat::MacOS => self .root_dir() .join("Contents") .join("Resources") .join("assets"), BundleFormat::Android => self .root_dir() .join("app") .join("src") .join("main") .join("assets"), // We put assets in public/assets for server apps BundleFormat::Server => self.root_dir().join("public").join("assets"), // everyone else is soooo normal, just app/assets :) BundleFormat::Web | BundleFormat::Ios | BundleFormat::Windows | BundleFormat::Linux => { self.root_dir().join("assets") } } } /// The directory in which we'll put the main exe /// /// Mac, Android, Web are a little weird /// - mac wants to be in Contents/MacOS /// - android wants to be in jniLibs/arm64-v8a (or others, depending on the platform / architecture) /// - web wants to be in wasm (which... we don't really need to, we could just drop the wasm into public and it would work) /// /// I think all others are just in the root folder /// /// todo(jon): investigate if we need to put .wasm in `wasm`. It kinda leaks implementation details, which ideally we don't want to do. fn exe_dir(&self) -> PathBuf { match self.bundle { BundleFormat::MacOS => self.root_dir().join("Contents").join("MacOS"), BundleFormat::Web => self.root_dir().join("wasm"), // Android has a whole build structure to it BundleFormat::Android => self .root_dir() .join("app") .join("src") .join("main") .join("jniLibs") .join(AndroidTools::android_jnilib(&self.triple)), // these are all the same, I think? BundleFormat::Windows | BundleFormat::Linux | BundleFormat::Ios | BundleFormat::Server => self.root_dir(), } } /// Get the path to the wasm bindgen temporary output folder fn wasm_bindgen_out_dir(&self) -> PathBuf { self.root_dir().join("wasm") } /// Get the path to the wasm bindgen javascript output file pub(crate) fn wasm_bindgen_js_output_file(&self) -> PathBuf { self.wasm_bindgen_out_dir() .join(self.executable_name()) .with_extension("js") } /// Get the path to the wasm bindgen wasm output file pub(crate) fn wasm_bindgen_wasm_output_file(&self) -> PathBuf { self.wasm_bindgen_out_dir() .join(format!("{}_bg", self.executable_name())) .with_extension("wasm") } /// Get the path to the app manifest file /// /// This includes metadata about the build such as the bundle format, target triple, features, etc. /// Manifests are only written by the `PRIMARY` build. pub(crate) fn app_manifest(&self) -> PathBuf { self.platform_dir().join(".manifest.json") } pub(crate) fn load_manifest(&self) -> Result { let manifest_path = self.app_manifest(); let manifest_data = std::fs::read_to_string(&manifest_path) .with_context(|| format!("Failed to read manifest at {:?}", &manifest_path))?; let manifest: AppManifest = serde_json::from_str(&manifest_data) .with_context(|| format!("Failed to parse manifest at {:?}", &manifest_path))?; Ok(manifest) } /// Check for tooling that might be required for this build. /// /// This should generally be only called on the first build since it takes time to verify the tooling /// is in place, and we don't want to slow down subsequent builds. pub(crate) async fn verify_tooling(&self, ctx: &BuildContext) -> Result<()> { ctx.status_installing_tooling(); self.verify_toolchain_installed().await?; match self.bundle { BundleFormat::Web => self.verify_web_tooling().await?, BundleFormat::Ios => self.verify_ios_tooling().await?, BundleFormat::Android => self.verify_android_tooling().await?, BundleFormat::Linux => self.verify_linux_tooling().await?, BundleFormat::MacOS | BundleFormat::Windows | BundleFormat::Server => {} } Ok(()) } async fn verify_toolchain_installed(&self) -> Result<()> { let toolchain_dir = self.workspace.sysroot.join("lib/rustlib"); let triple = self.triple.to_string(); // Install target using rustup. if !toolchain_dir.join(&triple).exists() { tracing::info!( "{} platform requires {} to be installed. Installing...", self.bundle, triple ); let mut child = tokio::process::Command::new("rustup") .args(["target", "add"]) .arg(&triple) .stdout(Stdio::piped()) .stderr(Stdio::piped()) .kill_on_drop(true) .spawn()?; let stdout = tokio::io::BufReader::new(child.stdout.take().unwrap()); let stderr = tokio::io::BufReader::new(child.stderr.take().unwrap()); let mut stdout_lines = stdout.lines(); let mut stderr_lines = stderr.lines(); loop { tokio::select! { line = stdout_lines.next_line() => { match line { Ok(Some(line)) => tracing::info!("{}", line), Err(err) => tracing::error!("{}", err), Ok(_) => break, } } line = stderr_lines.next_line() => { match line { Ok(Some(line)) => tracing::info!("{}", line), Err(err) => tracing::error!("{}", err), Ok(_) => break, } } } } } // Ensure target is installed. if !toolchain_dir.join(&triple).exists() { bail!("Missing rust target {}", triple); } Ok(()) } async fn verify_web_tooling(&self) -> Result<()> { // Wasm bindgen let krate_bindgen_version = self.workspace .wasm_bindgen_version() .ok_or(anyhow::anyhow!( "failed to detect wasm-bindgen version, unable to proceed" ))?; WasmBindgen::verify_install(&krate_bindgen_version).await?; Ok(()) } /// Currently does nothing, but eventually we need to check that the mobile tooling is installed. /// /// For ios, this would be just aarch64-apple-ios + aarch64-apple-ios-sim, as well as xcrun and xcode-select /// /// We don't auto-install these yet since we're not doing an architecture check. We assume most users /// are running on an Apple Silicon Mac, but it would be confusing if we installed these when we actually /// should be installing the x86 versions. async fn verify_ios_tooling(&self) -> Result<()> { // open the simulator // _ = tokio::process::Command::new("open") // .arg("/Applications/Xcode.app/Contents/Developer/Applications/Simulator.app") // .output() // .await; // Now xcrun to open the device // todo: we should try and query the device list and/or parse it rather than hardcode this simulator // _ = tokio::process::Command::new("xcrun") // .args(["simctl", "boot", "83AE3067-987F-4F85-AE3D-7079EF48C967"]) // .output() // .await; // if !rustup // .installed_toolchains // .contains(&"aarch64-apple-ios".to_string()) // { // tracing::error!("You need to install aarch64-apple-ios to build for ios. Run `rustup target add aarch64-apple-ios` to install it."); // } // if !rustup // .installed_toolchains // .contains(&"aarch64-apple-ios-sim".to_string()) // { // tracing::error!("You need to install aarch64-apple-ios to build for ios. Run `rustup target add aarch64-apple-ios` to install it."); // } Ok(()) } /// Check if the android tooling is installed /// /// looks for the android sdk + ndk /// /// will do its best to fill in the missing bits by exploring the sdk structure /// IE will attempt to use the Java installed from android studio if possible. async fn verify_android_tooling(&self) -> Result<()> { let linker = self .workspace .android_tools()? .android_cc(&self.triple, self.min_sdk_version_or_default()); tracing::debug!("Verifying android linker: {linker:?}"); if linker.exists() { return Ok(()); } bail!( "Android linker not found at {linker:?}. Please set the `ANDROID_NDK_HOME` environment variable to the root of your NDK installation." ); } /// Ensure the right dependencies are installed for linux apps. /// This varies by distro, so we just do nothing for now. /// /// Eventually, we want to check for the prereqs for wry/tao as outlined by tauri: /// async fn verify_linux_tooling(&self) -> Result<()> { Ok(()) } /// Blow away the fingerprint for this package, forcing rustc to recompile it. /// /// This prevents rustc from using the cached version of the binary, which can cause issues /// Find workspace crates that directly depend on the given crate. /// /// Returns underscore-normalized crate names of workspace members that have `crate_name` /// as a dependency. Used for cascade detection — when a dep's public symbols change, /// its dependents need recompilation too. pub(crate) fn workspace_dependents_of(&self, crate_name: &str) -> Vec { let krates = &self.workspace.krates; // Find the NodeId for the target crate let target_nid = krates.workspace_members().find_map(|member| { if let krates::Node::Krate { id, krate, .. } = member { if krate.name.replace('-', "_") == crate_name { return krates.nid_for_kid(id); } } None }); let Some(target_nid) = target_nid else { return Vec::new(); }; // Use krates' direct_dependents to find reverse deps, filter to workspace members let workspace_names: HashSet = krates .workspace_members() .filter_map(|m| { if let krates::Node::Krate { krate, .. } = m { Some(krate.name.replace('-', "_")) } else { None } }) .collect(); krates .direct_dependents(target_nid) .into_iter() .filter_map(|dep| { let name = dep.krate.name.replace('-', "_"); if workspace_names.contains(&name) { Some(name) } else { None } }) .collect() } /// Compile a workspace dependency crate directly with `rustc` using its captured args. /// /// This produces an updated rlib at the same path cargo originally wrote to. /// Used during thin builds to recompile changed workspace deps before the tip crate. async fn compile_dep_crate(&self, crate_name: &str, rustc_args: &RustcArgs) -> Result<()> { let mut cmd = Command::new("rustc"); cmd.current_dir(self.workspace_dir()); cmd.env_clear(); // Skip args[0] which is the rustc binary path captured by the wrapper cmd.args(rustc_args.args[1..].iter()); // Restore the captured environment, filtering out wrapper env vars and // stale cargo jobserver vars to prevent recursive invocation and warnings. let filtered_env_keys = [ "RUSTC_WORKSPACE_WRAPPER", "RUSTC_WRAPPER", DX_RUSTC_WRAPPER_ENV_VAR, "CARGO_MAKEFLAGS", "MAKEFLAGS", ]; cmd.envs( rustc_args .envs .iter() .filter(|(k, _)| !filtered_env_keys.contains(&k.as_str())) .cloned(), ); let output = cmd.output().await?; if !output.status.success() { let stderr = String::from_utf8_lossy(&output.stderr); bail!("Failed to compile workspace dep crate '{crate_name}':\n{stderr}"); } Ok(()) } /// Find the rlib path for a workspace crate from its captured rustc args. /// /// Extracts `--out-dir` and `-C extra-filename` from the args to construct the exact /// rlib filename. This is important because multiple rlibs for the same crate can coexist /// in the deps directory (e.g., from different dx builds that produce different `-C metadata`), /// and globbing would return an arbitrary one. fn find_rlib_for_crate(&self, crate_name: &str, rustc_args: &RustcArgs) -> Option { // Extract --out-dir from the captured args let out_dir = rustc_args .args .iter() .zip(rustc_args.args.iter().skip(1)) .find(|(flag, _)| *flag == "--out-dir") .map(|(_, dir)| PathBuf::from(dir))?; // Extract -C extra-filename from captured args. // Cargo passes this to rustc to disambiguate output filenames via metadata hash. // Handle all forms: `-Cextra-filename=X`, `-C extra-filename=X`, and `-C` `extra-filename=X`. let extra_filename = rustc_args.args.iter().enumerate().find_map(|(i, arg)| { arg.strip_prefix("-Cextra-filename=") .map(|s| s.to_string()) .or_else(|| { // Handle `-C` followed by `extra-filename=X` as separate args if arg == "-C" { rustc_args.args.get(i + 1).and_then(|next| { next.strip_prefix("extra-filename=").map(|s| s.to_string()) }) } else { None } }) }); // If we have an exact extra-filename, construct the precise rlib path. if let Some(extra) = &extra_filename { let exact = out_dir.join(format!("lib{crate_name}{extra}.rlib")); if exact.exists() { return Some(exact); } } // Fallback: glob for lib-.rlib in the output directory. // This handles cases where -C extra-filename isn't in the captured args. // Prefer the most recently modified rlib to avoid picking up stale artifacts. let prefix = format!("lib{crate_name}-"); let entries = std::fs::read_dir(&out_dir).ok()?; let mut best: Option<(PathBuf, std::time::SystemTime)> = None; for entry in entries.flatten() { if let Some(name) = entry.file_name().to_str() { if name.starts_with(&prefix) && name.ends_with(".rlib") { let mtime = entry.metadata().ok()?.modified().ok()?; if best.as_ref().map_or(true, |(_, t)| mtime > *t) { best = Some((entry.path(), mtime)); } } } } if let Some((path, _)) = best { return Some(path); } None } /// with our hotpatching setup since it uses linker interception. /// /// This is sadly a hack. I think there might be other ways of busting the fingerprint (rustc wrapper?) /// but that would require relying on cargo internals. /// /// This might stop working if/when cargo stabilizes contents-based fingerprinting. /// /// `dx` compiles everything with `--target` which ends up with a structure like: /// `target///.fingerprint/-` /// /// Normally you can't rely on this structure (ie with `cargo build`) but the explicit /// target arg guarantees this will work. fn bust_fingerprint(&self, ctx: &BuildContext) -> Result<()> { if matches!(ctx.mode, BuildMode::Fat) { let fingerprint_dir = self .target_dir .join(self.triple.to_string()) .join(&self.profile) .join(".fingerprint"); // Bust fingerprints for ALL workspace member crates during Fat builds. // This ensures cargo recompiles them through RUSTC_WORKSPACE_WRAPPER // so we capture their rustc args for later thin builds. let mut busted = HashSet::new(); for member in self.workspace.krates.workspace_members() { if let krates::Node::Krate { krate, .. } = member { busted.insert(krate.name.as_str()); } } // split at the last `-` used to separate the hash from the name // This causes to more aggressively bust hashes for all combinations of features // and fingerprints for this package since we're just ignoring the hash if let Ok(entries) = std::fs::read_dir(&fingerprint_dir) { for entry in entries.flatten() { if let Some(fname) = entry.file_name().to_str() { if let Some((name, _)) = fname.rsplit_once('-') { if busted.contains(name) { _ = std::fs::remove_dir_all(entry.path()); } } } } } } Ok(()) } pub(crate) fn patch_cache_exe(&self, exe: &Path) -> PathBuf { match self.bundle { BundleFormat::Web => self.wasm_bindgen_wasm_output_file(), _ => exe.to_path_buf(), } } pub(crate) fn create_patch_cache(&self, exe: &Path) -> Result { Ok(HotpatchModuleCache::new( &self.patch_cache_exe(exe), &self.triple, )?) } /// Users create an index.html for their SPA if they want it /// /// We always write our wasm as main.js and main_bg.wasm /// /// In prod we run the optimizer which bundles everything together properly /// /// So their index.html needs to include main.js in the scripts otherwise nothing happens? /// /// Seems like every platform has a weird file that declares a bunch of stuff /// - web: index.html /// - ios: info.plist /// - macos: info.plist /// - linux: appimage root thing? /// - android: androidmanifest.xml /// /// You also might different variants of these files (staging / prod) and different flavors (eu/us) /// /// web's index.html is weird since it's not just a bundle format but also a *content* format pub(crate) fn prepare_html( &self, assets: &AssetManifest, wasm_path: &str, js_path: &str, ) -> Result { let mut html = { const DEV_DEFAULT_HTML: &str = include_str!("../../assets/web/dev.index.html"); const PROD_DEFAULT_HTML: &str = include_str!("../../assets/web/prod.index.html"); let crate_root: &Path = &self.crate_dir(); let custom_html_file = crate_root.join("index.html"); let default_html = match self.release { true => PROD_DEFAULT_HTML, false => DEV_DEFAULT_HTML, }; std::fs::read_to_string(custom_html_file).unwrap_or_else(|_| String::from(default_html)) }; // Inject any resources from the config into the html self.inject_resources(assets, &mut html)?; // Inject loading scripts if they are not already present self.inject_loading_scripts(assets, &mut html); // Replace any special placeholders in the HTML with resolved values self.replace_template_placeholders(&mut html, wasm_path, js_path); let title = self.config.web.app.title.clone(); Self::replace_or_insert_before("{app_title}", " bool { !self.release } // Inject any resources from the config into the html fn inject_resources(&self, assets: &AssetManifest, html: &mut String) -> Result<()> { use std::fmt::Write; // Collect all resources into a list of styles and scripts let resources = &self.config.web.resource; let mut style_list = resources.style.clone().unwrap_or_default(); let mut script_list = resources.script.clone().unwrap_or_default(); if self.is_dev_build() { style_list.extend(resources.dev.style.iter().cloned()); script_list.extend(resources.dev.script.iter().cloned()); } let mut head_resources = String::new(); // Add all styles to the head for style in &style_list { writeln!( &mut head_resources, "", &style.to_str().unwrap(), )?; } // Add all scripts to the head for script in &script_list { writeln!( &mut head_resources, "", &script.to_str().unwrap(), )?; } // Add the base path to the head if this is a debug build if self.is_dev_build() { if let Some(base_path) = &self.trimmed_base_path() { head_resources.push_str(&format_base_path_meta_element(base_path)); } } // Inject any resources from manganis into the head for asset in assets.unique_assets() { let asset_path = asset.bundled_path(); match asset.options().variant() { AssetVariant::Css(css_options) => { if css_options.preloaded() { _ = write!( head_resources, r#""# ); } if css_options.static_head() { _ = write!( head_resources, r#""# ); } } AssetVariant::Image(image_options) => { if image_options.preloaded() { _ = write!( head_resources, r#""# ); } } AssetVariant::Js(js_options) => { if js_options.preloaded() { _ = write!( head_resources, r#""# ); } if js_options.static_head() { _ = write!( head_resources, r#""# ); } } _ => {} } } // Do not preload the wasm file, because in Safari, preload as=fetch requires additional fetch() options to exactly match the network request // And if they do not match then Safari downloads the wasm file twice. // See https://github.com/wasm-bindgen/wasm-bindgen/blob/ac51055a4c39fa0affe02f7b63fb1d4c9b3ddfaf/crates/cli-support/src/js/mod.rs#L967 Self::replace_or_insert_before("{style_include}", " Option { let path = self.config.application.public_dir.as_ref()?; if path.as_os_str().is_empty() { return None; } Some(if path.is_absolute() { path.clone() } else { self.crate_dir().join(path) }) } pub(crate) fn path_is_in_public_dir(&self, path: &Path) -> bool { let Some(static_dir) = self.user_public_dir() else { return false; }; // Canonicalize when possible so we work with editors that use tmp files let canonical_static = dunce::canonicalize(&static_dir).unwrap_or_else(|_| static_dir.clone()); let canonical_path = dunce::canonicalize(path).unwrap_or_else(|_| path.to_path_buf()); canonical_path.starts_with(&canonical_static) } /// Get the base path from the config or None if this is not a web or server build pub(crate) fn base_path(&self) -> Option<&str> { self.base_path .as_deref() .or(self.config.web.app.base_path.as_deref()) .filter(|_| matches!(self.bundle, BundleFormat::Web | BundleFormat::Server)) } /// Get the normalized base path for the application with `/` trimmed from both ends. pub(crate) fn trimmed_base_path(&self) -> Option<&str> { self.base_path() .map(|p| p.trim_matches('/')) .filter(|p| !p.is_empty()) } /// Get the trimmed base path or `.` if no base path is set pub(crate) fn base_path_or_default(&self) -> &str { self.trimmed_base_path().unwrap_or(".") } /// Get the path to the package manifest directory pub(crate) fn package_manifest_dir(&self) -> PathBuf { self.workspace.krates[self.crate_package] .manifest_path .parent() .unwrap() .to_path_buf() .into() } /// Returns the min sdk version set in config. If not set 24 is returned as a default. pub(crate) fn min_sdk_version_or_default(&self) -> u32 { self.config .application .android_min_sdk_version .unwrap_or(28) } pub(crate) async fn start_simulators(&self) -> Result<()> { if self.device_name.is_some() { return Ok(()); } match self.bundle { // Boot an iOS simulator if one is not already running. // // We always choose the most recently opened simulator based on the xcrun list. // Note that simulators can be running but the simulator app itself is not open. // Calling `open::that` is always fine, even on running apps, since apps are singletons. BundleFormat::Ios => { #[derive(Deserialize, Debug)] struct XcrunListJson { // "com.apple.CoreSimulator.SimRuntime.iOS-18-4": [{}, {}, {}] devices: BTreeMap>, } #[derive(Deserialize, Debug)] struct XcrunDevice { #[serde(rename = "lastBootedAt")] last_booted_at: Option, udid: String, name: String, state: String, } let xcrun_list = Command::new("xcrun") .arg("simctl") .arg("list") .arg("-j") .output() .await?; let as_str = String::from_utf8_lossy(&xcrun_list.stdout); let xcrun_list_json = serde_json::from_str::(as_str.trim()); if let Ok(xcrun_list_json) = xcrun_list_json { if xcrun_list_json.devices.is_empty() { tracing::warn!( "No iOS sdks installed found. Please install the iOS SDK in Xcode." ); } if let Some((_rt, devices)) = xcrun_list_json.devices.iter().next() { if devices.iter().all(|device| device.state != "Booted") { let last_booted = devices .iter() .max_by_key(|device| match device.last_booted_at { Some(ref last_booted) => last_booted, None => "2000-01-01T01:01:01Z", }); if let Some(device) = last_booted { tracing::info!("Booting iOS simulator: \"{}\"", device.name); Command::new("xcrun") .arg("simctl") .arg("boot") .arg(&device.udid) .output() .await?; } } } } let path_to_xcode = Command::new("xcode-select") .arg("--print-path") .output() .await?; let path_to_xcode: PathBuf = String::from_utf8_lossy(&path_to_xcode.stdout) .as_ref() .trim() .into(); let path_to_sim = path_to_xcode.join("Applications").join("Simulator.app"); open::that_detached(path_to_sim)?; } BundleFormat::Android => { let tools = self.workspace.android_tools()?; tokio::spawn(async move { let emulator = tools.emulator(); let avds = Command::new(&emulator) .arg("-list-avds") .output() .await .unwrap(); let avds = String::from_utf8_lossy(&avds.stdout); let avd = avds.trim().lines().next().map(|s| s.trim().to_string()); if let Some(avd) = avd { tracing::info!("Booting Android emulator: \"{avd}\""); Command::new(&emulator) .arg("-avd") .arg(avd) .args(["-netdelay", "none", "-netspeed", "full"]) .stdout(std::process::Stdio::null()) // prevent accumulating huge amounts of mem usage .stderr(std::process::Stdio::null()) // prevent accumulating huge amounts of mem usage .output() .await .unwrap(); } else { tracing::warn!("No Android emulators found. Please create one using `emulator -avd `"); } }); } _ => { // nothing - maybe on the web we should open the browser? } }; Ok(()) } /// Assemble a series of `--config key=value` arguments for the build command. /// /// This adds adhoc profiles that dx uses to isolate builds from each other. Normally if you ran /// `cargo build --feature desktop` and `cargo build --feature server`, then both binaries get /// the same name and overwrite each other, causing thrashing and locking issues. /// /// By creating adhoc profiles, we can ensure that each build is isolated and doesn't interfere with each other. /// /// The user can also define custom profiles in their `Cargo.toml` file, which will be used instead /// of the adhoc profiles. /// /// The names of the profiles are: /// - web-dev /// - web-release /// - desktop-dev /// - desktop-release /// - server-dev /// - server-release /// - ios-dev /// - ios-release /// - android-dev /// - android-release /// - liveview-dev /// - liveview-release /// /// Note how every platform gets its own profile, and each platform has a dev and release profile. fn profile_args(&self) -> Vec { // Always disable stripping so symbols still exist for the asset system. We will apply strip manually // after assets are built let profile = self.profile.as_str(); let mut args = Vec::new(); args.push(format!(r#"profile.{profile}.strip=false"#)); // If the user defined the profile in the Cargo.toml, we don't need to add it to our adhoc list if !self .workspace .cargo_toml .profile .custom .contains_key(&self.profile) { // Otherwise, we need to add the profile arguments to make it adhoc let inherits = if self.release { "release" } else { "dev" }; // Add the profile definition first. args.push(format!(r#"profile.{profile}.inherits="{inherits}""#)); // The default dioxus experience is to lightly optimize the web build, both in debug and release // Note that typically in release builds, you would strip debuginfo, but we actually choose to do // that with wasm-opt tooling instead. if matches!(self.bundle, BundleFormat::Web) { if self.release { args.push(format!(r#"profile.{profile}.opt-level="s""#)); } if self.wasm_split { args.push(format!(r#"profile.{profile}.lto=true"#)); args.push(format!(r#"profile.{profile}.debug=true"#)); } } } // Prepend --config to each argument args.into_iter() .flat_map(|arg| ["--config".to_string(), arg]) .collect() } pub async fn codesign_apple(&self, ctx: &BuildContext) -> Result<()> { ctx.status_codesigning(); // We don't want to drop the entitlements file, until the end of the block, so we hoist it to this temporary. let mut _saved_entitlements = None; let mut app_dev_name = self.apple_team_id.clone(); if app_dev_name.is_none() { app_dev_name = Some(Self::auto_provision_signing_name().await.context( "Failed to automatically provision signing name for Apple codesigning.", )?); } let mut entitlements_file = self.apple_entitlements.clone(); let mut provisioning_profile_path = None; if entitlements_file.is_none() { let bundle_id = self.bundle_identifier(); let (entitlements_xml, profile_path) = Self::auto_provision_entitlements(&bundle_id) .await .context("Failed to auto-provision entitlements for Apple codesigning.")?; // Enrich with entitlements from Dioxus.toml config let entitlements_xml = self.enrich_entitlements_from_config(entitlements_xml)?; let entitlements_temp_file = tempfile::NamedTempFile::new()?; std::fs::write(entitlements_temp_file.path(), entitlements_xml)?; entitlements_file = Some(entitlements_temp_file.path().to_path_buf()); provisioning_profile_path = Some(profile_path); _saved_entitlements = Some(entitlements_temp_file); } let entitlements_file = entitlements_file.as_ref().context( "No entitlements file provided and could not provision entitlements to sign app.", )?; let app_dev_name = app_dev_name.as_ref().context( "No Apple Development signing name provided and could not auto-provision one.", )?; tracing::debug!( "Codesigning Apple app with entitlements: {} and dev name: {}", entitlements_file.display(), app_dev_name ); // determine the target exe - the server and macos bundles are different let target_exe = match self.bundle { BundleFormat::MacOS => self.root_dir(), BundleFormat::Ios => self.root_dir(), BundleFormat::Server => self.main_exe(), _ => bail!("Codesigning is only supported for MacOS and iOS bundles"), }; // iOS devices require the provisioning profile to be embedded in the .app bundle if self.bundle == BundleFormat::Ios { if let Some(profile_path) = &provisioning_profile_path { let dest = target_exe.join("embedded.mobileprovision"); std::fs::copy(profile_path, &dest) .context("Failed to embed provisioning profile into .app bundle")?; } } // codesign the app let output = Command::new("codesign") .args([ "--force", "--entitlements", entitlements_file.to_str().unwrap(), "--sign", app_dev_name, ]) .arg(target_exe) .output() .await .context("Failed to codesign the app - is `codesign` in your path?")?; if !output.status.success() { bail!( "Failed to codesign the app: {}", String::from_utf8(output.stderr).unwrap_or_default() ); } Ok(()) } async fn auto_provision_signing_name() -> Result { let identities = Command::new("security") .args(["find-identity", "-v", "-p", "codesigning"]) .output() .await .context("Failed to run `security find-identity -v -p codesigning` - is `security` in your path?") .map(|e| { String::from_utf8(e.stdout) .context("Failed to parse `security find-identity -v -p codesigning`") })??; // Parsing this: // 1231231231231asdasdads123123 "Apple Development: foo@gmail.com (XYZYZY)" let app_dev_name = regex::Regex::new(r#""Apple Development: (.+)""#) .unwrap() .captures(&identities) .and_then(|caps| caps.get(1)) .map(|m| m.as_str()) .context( "Failed to find Apple Development in `security find-identity -v -p codesigning`", )?; Ok(app_dev_name.to_string()) } /// Enrich auto-provisioned entitlements XML with config from Dioxus.toml. /// /// Injects entitlements from `[ios.entitlements]` or `[macos.entitlements]` sections /// and associated domains from `[deep_links]` into the base entitlements XML. fn enrich_entitlements_from_config(&self, base_xml: String) -> Result { let mut extra_entries = String::new(); match self.bundle { BundleFormat::Ios => { let ent = &self.config.ios.entitlements; // Associated domains (from deep_links.hosts + ios.entitlements.associated-domains) let mapper = super::manifest_mapper::ManifestMapper::from_config( &self.config.permissions, &self.config.deep_links, &self.config.background, &self.config.android, &self.config.ios, &self.config.macos, ); let mut domains: Vec = mapper.ios_associated_domains; domains.extend(ent.associated_domains.clone()); domains.dedup(); if !domains.is_empty() { extra_entries.push_str( " com.apple.developer.associated-domains\n \n", ); for domain in &domains { extra_entries.push_str(&format!(" {domain}\n")); } extra_entries.push_str(" \n"); } // App groups if !ent.app_groups.is_empty() { extra_entries.push_str( " com.apple.security.application-groups\n \n", ); for group in &ent.app_groups { extra_entries.push_str(&format!(" {group}\n")); } extra_entries.push_str(" \n"); } // APS environment (push notifications) if let Some(env) = &ent.aps_environment { extra_entries.push_str(&format!( " aps-environment\n {env}\n" )); } // iCloud if ent.icloud { extra_entries.push_str( " com.apple.developer.icloud-container-identifiers\n \n\ com.apple.developer.icloud-services\n \n CloudDocuments\n \n" ); } // Keychain access groups // (base entitlements already include one from provisioning profile, only add extras) if !ent.keychain_access_groups.is_empty() { extra_entries.push_str(" keychain-access-groups\n \n"); for group in &ent.keychain_access_groups { extra_entries.push_str(&format!(" {group}\n")); } extra_entries.push_str(" \n"); } // Apple Pay if ent.apple_pay { extra_entries.push_str( " com.apple.developer.in-app-payments\n \n merchant.*\n \n" ); } // HealthKit if ent.healthkit { extra_entries .push_str(" com.apple.developer.healthkit\n \n"); } // HomeKit if ent.homekit { extra_entries .push_str(" com.apple.developer.homekit\n \n"); } // Additional entitlements from the flat map for (key, value) in &ent.additional { extra_entries.push_str(&format!( " {key}\n {}\n", value_to_plist_xml(value, 1) )); } // Raw entitlements XML if let Some(raw) = &self.config.ios.raw.entitlements { extra_entries.push_str(raw); extra_entries.push('\n'); } } BundleFormat::MacOS => { let ent = &self.config.macos.entitlements; // App Sandbox if let Some(v) = ent.app_sandbox { extra_entries.push_str(&format!( " com.apple.security.app-sandbox\n <{v}/>\n" )); } // File access if let Some(true) = ent.files_user_selected { extra_entries.push_str( " com.apple.security.files.user-selected.read-write\n \n" ); } if let Some(true) = ent.files_user_selected_readonly { extra_entries.push_str( " com.apple.security.files.user-selected.read-only\n \n" ); } // Network if let Some(true) = ent.network_client { extra_entries.push_str( " com.apple.security.network.client\n \n", ); } if let Some(true) = ent.network_server { extra_entries.push_str( " com.apple.security.network.server\n \n", ); } // Device access if let Some(true) = ent.camera { extra_entries .push_str(" com.apple.security.device.camera\n \n"); } if let Some(true) = ent.microphone { extra_entries.push_str( " com.apple.security.device.microphone\n \n", ); } if let Some(true) = ent.usb { extra_entries .push_str(" com.apple.security.device.usb\n \n"); } if let Some(true) = ent.bluetooth { extra_entries.push_str( " com.apple.security.device.bluetooth\n \n", ); } if let Some(true) = ent.print { extra_entries .push_str(" com.apple.security.print\n \n"); } // Personal information if let Some(true) = ent.location { extra_entries.push_str( " com.apple.security.personal-information.location\n \n" ); } if let Some(true) = ent.addressbook { extra_entries.push_str( " com.apple.security.personal-information.addressbook\n \n" ); } if let Some(true) = ent.calendars { extra_entries.push_str( " com.apple.security.personal-information.calendars\n \n" ); } // Runtime exceptions if let Some(true) = ent.disable_library_validation { extra_entries.push_str( " com.apple.security.cs.disable-library-validation\n \n" ); } if let Some(true) = ent.allow_jit { extra_entries .push_str(" com.apple.security.cs.allow-jit\n \n"); } if let Some(true) = ent.allow_unsigned_executable_memory { extra_entries.push_str( " com.apple.security.cs.allow-unsigned-executable-memory\n \n" ); } // Additional entitlements from the flat map for (key, value) in &ent.additional { extra_entries.push_str(&format!( " {key}\n {}\n", value_to_plist_xml(value, 1) )); } // Raw entitlements XML if let Some(raw) = &self.config.macos.raw.entitlements { extra_entries.push_str(raw); extra_entries.push('\n'); } } _ => {} } if extra_entries.is_empty() { return Ok(base_xml); } // Insert before closing if let Some(pos) = base_xml.rfind("") { let mut enriched = base_xml[..pos].to_string(); enriched.push_str(&extra_entries); enriched.push_str(&base_xml[pos..]); Ok(enriched) } else { tracing::warn!("Could not find in entitlements XML to inject config entries"); Ok(base_xml) } } async fn auto_provision_entitlements(bundle_id: &str) -> Result<(String, PathBuf)> { const CODESIGN_ERROR: &str = r#"This is likely because you haven't - Created a provisioning profile before - Accepted the Apple Developer Program License Agreement The agreement changes frequently and might need to be accepted again. To accept the agreement, go to https://developer.apple.com/account To create a provisioning profile, follow the instructions here: https://developer.apple.com/documentation/xcode/sharing-your-teams-signing-certificates"#; // Check the xcode 16 location first let mut profiles_folder = dirs::home_dir() .context("Your machine has no home-dir")? .join("Library/Developer/Xcode/UserData/Provisioning Profiles"); // If it doesn't exist, check the old location if !profiles_folder.exists() { profiles_folder = dirs::home_dir() .context("Your machine has no home-dir")? .join("Library/MobileDevice/Provisioning Profiles"); } if !profiles_folder.exists() || profiles_folder.read_dir()?.next().is_none() { tracing::error!( r#"No provisioning profiles found when trying to codesign the app. We checked the folders: - XCode16: ~/Library/Developer/Xcode/UserData/Provisioning Profiles - XCode15: ~/Library/MobileDevice/Provisioning Profiles {CODESIGN_ERROR} "# ) } #[derive(serde::Deserialize, Debug)] struct ProvisioningProfile { #[serde(rename = "TeamIdentifier")] team_identifier: Vec, #[serde(rename = "Entitlements")] entitlements: ProfileEntitlements, #[allow(dead_code)] #[serde(rename = "ApplicationIdentifierPrefix")] application_identifier_prefix: Vec, #[serde(rename = "ProvisionedDevices", default)] provisioned_devices: Vec, } #[derive(serde::Deserialize, Debug)] struct ProfileEntitlements { #[serde(rename = "application-identifier")] application_identifier: String, #[serde(rename = "keychain-access-groups")] keychain_access_groups: Vec, } // The .mobileprovision file has some random binary thrown into it, but it's still basically a plist // Let's use the plist markers to find the start and end of the plist fn cut_plist(bytes: &[u8], byte_match: &[u8]) -> Option { bytes .windows(byte_match.len()) .enumerate() .rev() .find(|(_, slice)| *slice == byte_match) .map(|(i, _)| i + byte_match.len()) } fn parse_profile(path: &Path) -> Result { let bytes = std::fs::read(path)?; let cut1 = cut_plist(&bytes, b""#.as_bytes()) .context("Failed to parse .mobileprovision file")?; let sub_bytes = &bytes[(cut1 - 6)..cut2]; plist::from_bytes(sub_bytes).context("Failed to parse .mobileprovision file") } /// Check if a provisioning profile's application-identifier matches the given bundle ID. /// The app ID is in the format "TEAMID.com.example.app" or "TEAMID.*" for wildcard profiles. fn profile_matches_bundle_id(app_identifier: &str, bundle_id: &str) -> bool { // Strip the team ID prefix (everything before and including the first dot) let app_id_suffix = match app_identifier.split_once('.') { Some((_, suffix)) => suffix, None => return false, }; // Wildcard profile matches everything if app_id_suffix == "*" { return true; } // Check exact match if app_id_suffix == bundle_id { return true; } // Check wildcard prefix match (e.g. "com.example.*" matches "com.example.app") if let Some(prefix) = app_id_suffix.strip_suffix(".*") { return bundle_id.starts_with(prefix); } false } // Collect all provisioning profiles and find the best match for the bundle ID. // Priority: exact app ID match > more provisioned devices > newer file. let mut best_match: Option<(PathBuf, ProvisioningProfile, bool, usize)> = None; for entry in profiles_folder.read_dir()?.flatten() { let path = entry.path(); let is_mobileprovision = path .extension() .map(|e| e == "mobileprovision") .unwrap_or(false); if !is_mobileprovision { continue; } let profile = match parse_profile(&path) { Ok(p) => p, Err(e) => { tracing::debug!("Skipping profile {}: {e}", path.display()); continue; } }; let app_id = &profile.entitlements.application_identifier; if !profile_matches_bundle_id(app_id, bundle_id) { tracing::debug!( "Skipping profile {} (app ID {app_id} does not match bundle ID {bundle_id})", path.display() ); continue; } let is_exact = !app_id.ends_with(".*") && !app_id.ends_with("*"); let num_devices = profile.provisioned_devices.len(); tracing::debug!( "Found matching profile {} (app ID: {app_id}, exact: {is_exact}, devices: {num_devices})", path.display() ); // Prefer: exact match > more provisioned devices (newer profiles have more devices) let dominated = match &best_match { Some((_, _, prev_exact, prev_devices)) => { if *prev_exact && !is_exact { true // existing exact match beats wildcard } else if is_exact && !*prev_exact { false // new exact match beats existing wildcard } else { // same specificity — prefer more provisioned devices num_devices <= *prev_devices } } None => false, }; if !dominated { best_match = Some((path, profile, is_exact, num_devices)); } } let (profile_path, mbfile) = match best_match { Some((path, profile, _, _)) => { tracing::info!( "Using provisioning profile: {} (app ID: {})", path.display(), profile.entitlements.application_identifier ); (path, profile) } None => { bail!( "No provisioning profile found matching bundle identifier \"{bundle_id}\".\n\ \n\ Your provisioning profiles are in: {}\n\ \n\ To fix this, either:\n \ 1. Set `bundle.identifier` in Dioxus.toml to match an existing profile\n \ 2. Create a wildcard provisioning profile in your Apple Developer account\n \ 3. Open the project in Xcode and let it auto-provision\n\ \n\ {CODESIGN_ERROR}", profiles_folder.display() ); } }; let entitlements_xml = format!( r#" application-identifier {APPLICATION_IDENTIFIER} keychain-access-groups {APP_ID_ACCESS_GROUP}.* get-task-allow com.apple.developer.team-identifier {TEAM_IDENTIFIER} "#, APPLICATION_IDENTIFIER = mbfile.entitlements.application_identifier, APP_ID_ACCESS_GROUP = mbfile.entitlements.keychain_access_groups[0], TEAM_IDENTIFIER = mbfile.team_identifier[0], ); Ok((entitlements_xml, profile_path)) } async fn write_app_manifest(&self, assets: &AssetManifest) -> Result<()> { let manifest = AppManifest { assets: assets.clone(), cli_version: crate::VERSION.to_string(), rust_version: self.workspace.rustc_version.clone(), }; let manifest_path = self.app_manifest(); std::fs::write(&manifest_path, serde_json::to_string_pretty(&manifest)?)?; Ok(()) } /// Log the build duration and some metadata about the build, saving a telemetry event. fn record_build_duration(&self, time_start: SystemTime, ctx: &BuildContext) { // Calculate some final metadata for logging let time_taken = SystemTime::now() .duration_since(time_start) .map(|d| d.as_millis()) .unwrap_or_default(); tracing::debug!( telemetry = %serde_json::json!({ "event": "build_and_bundle_complete", "time_taken": time_taken, "mode": match ctx.mode { BuildMode::Base { .. } => "base", BuildMode::Fat => "fat", BuildMode::Thin { .. } => "thin", }, "blah": 123, "triple": self.triple.to_string(), "format": self.bundle.to_string(), "num_dependencies": self.workspace.krates.len(), }), "Build completed in {time_taken}ms", ); } } /// Generate plist XML entries from a HashMap of key-value pairs /// /// Converts a HashMap like `{ "UIBackgroundModes" = ["location", "fetch"] }` to plist XML: /// ```xml /// UIBackgroundModes /// /// location /// fetch /// /// ``` fn generate_plist_entries(plist: &std::collections::HashMap) -> String { let mut output = String::new(); for (key, value) in plist { output.push_str(&format!("\t{}\n", key)); output.push_str(&value_to_plist_xml(value, 1)); } output } /// Convert a serde_json::Value to plist XML format fn value_to_plist_xml(value: &serde_json::Value, indent: usize) -> String { let tabs = "\t".repeat(indent); match value { serde_json::Value::String(s) => format!("{}{}\n", tabs, s), serde_json::Value::Bool(b) => { if *b { format!("{}\n", tabs) } else { format!("{}\n", tabs) } } serde_json::Value::Number(n) => { if n.is_i64() { format!("{}{}\n", tabs, n) } else { format!("{}{}\n", tabs, n) } } serde_json::Value::Array(arr) => { let mut output = format!("{}\n", tabs); for item in arr { output.push_str(&value_to_plist_xml(item, indent + 1)); } output.push_str(&format!("{}\n", tabs)); output } serde_json::Value::Object(obj) => { let mut output = format!("{}\n", tabs); for (k, v) in obj { output.push_str(&format!("{}\t{}\n", tabs, k)); output.push_str(&value_to_plist_xml(v, indent + 1)); } output.push_str(&format!("{}\n", tabs)); output } serde_json::Value::Null => String::new(), } } ================================================ FILE: packages/cli/src/build/tools.rs ================================================ use crate::Result; use anyhow::Context; use itertools::Itertools; use std::{path::PathBuf, sync::Arc}; use target_lexicon::{ Aarch64Architecture, Architecture, ArmArchitecture, Triple, X86_32Architecture, }; use tokio::process::Command; /// The tools for Android (ndk, sdk, etc) /// /// #[derive(Debug, Clone)] pub(crate) struct AndroidTools { pub(crate) sdk: Option, pub(crate) ndk: PathBuf, pub(crate) adb: PathBuf, pub(crate) java_home: Option, } pub fn get_android_tools() -> Option> { // We check for SDK first since users might install Android Studio and then install the SDK // After that they might install the NDK, so the SDK drives the source of truth. let sdk = var_or_debug("ANDROID_SDK_ROOT") .or_else(|| var_or_debug("ANDROID_SDK")) .or_else(|| var_or_debug("ANDROID_HOME")); // Check the ndk. We look for users's overrides first and then look into the SDK. // Sometimes users set only the NDK (especially if they're somewhat advanced) so we need to look for it manually // // Might look like this, typically under "sdk": // "/Users/jonkelley/Library/Android/sdk/ndk/25.2.9519653/toolchains/llvm/prebuilt/darwin-x86_64/bin/aarch64-linux-android24-clang" let ndk = var_or_debug("NDK_HOME") .or_else(|| var_or_debug("ANDROID_NDK_HOME")) .or_else(|| { // Look for the most recent NDK in the event the user has installed multiple NDK // Eventually we might need to drive this from Dioxus.toml let sdk = sdk.as_ref()?; let ndk_dir = sdk.join("ndk").read_dir().ok()?; ndk_dir .flatten() .map(|dir| (dir.file_name(), dir.path())) .sorted() .next_back() .map(|(_, path)| path.to_path_buf()) })?; // Look for ADB in the SDK. If it's not there we'll use `adb` from the PATH let adb = sdk .as_ref() .and_then(|sdk| { let tools = sdk.join("platform-tools"); if tools.join("adb").exists() { return Some(tools.join("adb")); } if tools.join("adb.exe").exists() { return Some(tools.join("adb.exe")); } None }) .unwrap_or_else(|| PathBuf::from("adb")); // https://stackoverflow.com/questions/71381050/java-home-is-set-to-an-invalid-directory-android-studio-flutter // always respect the user's JAVA_HOME env var above all other options // // we only attempt autodetection if java_home is not set // // this is a better fallback than falling onto the users' system java home since many users might // not even know which java that is - they just know they have android studio installed let java_home = std::env::var_os("JAVA_HOME") .map(PathBuf::from) .or_else(|| { // Attempt to autodetect java home from the android studio path or jdk path on macos #[cfg(target_os = "macos")] { let jbr_home = PathBuf::from("/Applications/Android Studio.app/Contents/jbr/Contents/Home/"); if jbr_home.exists() { return Some(jbr_home); } let jre_home = PathBuf::from("/Applications/Android Studio.app/Contents/jre/Contents/Home"); if jre_home.exists() { return Some(jre_home); } let jdk_home = PathBuf::from("/Library/Java/JavaVirtualMachines/openjdk.jdk/Contents/Home/"); if jdk_home.exists() { return Some(jdk_home); } } #[cfg(target_os = "windows")] { let jbr_home = PathBuf::from("C:\\Program Files\\Android\\Android Studio\\jbr"); if jbr_home.exists() { return Some(jbr_home); } } // todo(jon): how do we detect java home on linux? #[cfg(target_os = "linux")] { let jbr_home = PathBuf::from("/usr/lib/jvm/java-11-openjdk-amd64"); if jbr_home.exists() { return Some(jbr_home); } } None }); Some(Arc::new(AndroidTools { ndk, adb, java_home, sdk, })) } impl AndroidTools { pub(crate) fn android_tools_dir(&self) -> PathBuf { let prebuilt = self.ndk.join("toolchains").join("llvm").join("prebuilt"); if cfg!(target_os = "macos") { // for whatever reason, even on aarch64 macos, the linker is under darwin-x86_64 return prebuilt.join("darwin-x86_64").join("bin"); } if cfg!(target_os = "linux") { return prebuilt.join("linux-x86_64").join("bin"); } if cfg!(target_os = "windows") { return prebuilt.join("windows-x86_64").join("bin"); } // Otherwise return the first entry in the prebuilt directory prebuilt .read_dir() .expect("Failed to read android toolchains directory") .next() .expect("Failed to find android toolchains directory") .expect("Failed to read android toolchain file") .path() } /// Return the location of the clang toolchain for the given target triple. /// /// Note that we use clang: /// "~/Library/Android/sdk/ndk/25.2.9519653/toolchains/llvm/prebuilt/darwin-x86_64/bin/aarch64-linux-android24-clang" /// /// But if we needed the linker, we would use: /// "~/Library/Android/sdk/ndk/25.2.9519653/toolchains/llvm/prebuilt/darwin-x86_64/bin/ld" /// /// However, for our purposes, we only go through the cc driver and not the linker directly. pub(crate) fn android_cc(&self, triple: &Triple, sdk_version: u32) -> PathBuf { let suffix = if cfg!(target_os = "windows") { ".cmd" } else { "" }; let target = match triple.architecture { Architecture::Arm(_) => "armv7a-linux-androideabi", _ => &triple.to_string(), }; self.android_tools_dir() .join(format!("{}{}-clang{}", target, sdk_version, suffix)) } pub(crate) fn sysroot(&self) -> PathBuf { // The sysroot is usually located in the NDK under: // "~/Library/Android/sdk/ndk/25.2.9519653/toolchains/llvm/prebuilt/darwin-x86_64/sysroot" // or similar, depending on the platform. self.android_tools_dir().parent().unwrap().join("sysroot") } pub(crate) fn sdk(&self) -> PathBuf { // /Users/jonathankelley/Library/Android/sdk/ndk/25.2/... (25.2 is the ndk here) // /Users/jonathankelley/Library/Android/sdk/ self.sdk .clone() .unwrap_or_else(|| self.ndk.parent().unwrap().parent().unwrap().to_path_buf()) } pub(crate) fn emulator(&self) -> PathBuf { self.sdk().join("emulator").join("emulator") } pub(crate) fn clang_folder(&self) -> PathBuf { // The clang folder is usually located in the NDK under: // "~/Library/Android/sdk/ndk/25.2.9519653/toolchains/llvm/prebuilt/darwin-x86_64/lib/clang/" // or similar, depending on the platform. self.android_tools_dir() .parent() .unwrap() .join("lib") .join("clang") } pub(crate) fn ranlib(&self) -> PathBuf { self.android_tools_dir().join("llvm-ranlib") } pub(crate) fn ar_path(&self) -> PathBuf { self.android_tools_dir().join("llvm-ar") } pub(crate) fn target_cc(&self) -> PathBuf { self.android_tools_dir().join("clang") } pub(crate) fn target_cxx(&self) -> PathBuf { self.android_tools_dir().join("clang++") } pub(crate) fn java_home(&self) -> Option { self.java_home.clone() } pub(crate) fn android_jnilib(triple: &Triple) -> &'static str { use target_lexicon::Architecture; match triple.architecture { Architecture::Arm(_) => "armeabi-v7a", Architecture::Aarch64(_) => "arm64-v8a", Architecture::X86_32(_) => "x86", Architecture::X86_64 => "x86_64", _ => unimplemented!("Unsupported architecture"), } } pub(crate) async fn autodetect_android_device_triple(&self) -> Triple { // Use the host's triple and then convert field by field // ie, the "best" emulator for an m1 mac would be: "aarch64-linux-android" // - We assume android is always "linux" // - We try to match the architecture unless otherwise specified. This is because // emulators that match the host arch are usually faster. let mut triple = "aarch64-linux-android".parse::().unwrap(); // TODO: Wire this up with --device flag. (add `-s serial`` flag before `shell` arg) let output = Command::new(&self.adb) .arg("shell") .arg("uname") .arg("-m") .output() .await .map(|out| String::from_utf8(out.stdout)); match output { Ok(Ok(out)) => match out.trim() { "armv7l" | "armv8l" => { triple.architecture = Architecture::Arm(ArmArchitecture::Arm) } "aarch64" => { triple.architecture = Architecture::Aarch64(Aarch64Architecture::Aarch64) } "i386" => triple.architecture = Architecture::X86_32(X86_32Architecture::I386), "x86_64" => { triple.architecture = Architecture::X86_64; } "" => { tracing::debug!("No device running - probably waiting for emulator"); } other => { tracing::debug!("Unknown architecture from adb: {other}"); } }, Ok(Err(err)) => { tracing::debug!("Failed to parse adb output: {err}"); } Err(err) => { tracing::debug!("ADB command failed: {:?}", err); } }; triple } pub(crate) fn libcpp_shared(&self, triple: &Triple) -> PathBuf { // The libc++_shared.so is usually located in the sysroot under: // "~/Library/Android/sdk/ndk/25.2.9519653/toolchains/llvm/prebuilt/darwin-x86_64/sysroot/usr/lib//libc++_shared.so" // or similar, depending on the platform. self.sysroot() .join("usr") .join("lib") .join(Self::sysroot_target(&triple.to_string())) .join("libc++_shared.so") } pub(crate) fn sysroot_target(rust_target: &str) -> &str { (match rust_target { "armv7-linux-androideabi" => "arm-linux-androideabi", _ => rust_target, }) as _ } pub(crate) fn openssl_prebuilt_aar() -> &'static [u8] { include_bytes!("../../assets/android/prebuilt/openssl-1.1.1q-beta-1.tar.gz") } pub(crate) fn openssl_prebuilt_dest() -> PathBuf { crate::Workspace::dioxus_data_dir() .join("prebuilt") .join("openssl-1.1.1q-beta-1") } pub(crate) fn openssl_lib_dir(arch: &Triple) -> PathBuf { let libs_dir = Self::openssl_prebuilt_dest().join("ssl").join("libs"); match arch.architecture { Architecture::Arm(_) => libs_dir.join("android.armeabi-v7a"), Architecture::Aarch64(_) => libs_dir.join("android.arm64-v8a"), Architecture::X86_32(_) => libs_dir.join("android.x86"), Architecture::X86_64 => libs_dir.join("android.x86_64"), _ => libs_dir.join("android.arm64-v8a"), // Default to arm64-v8a } } pub(crate) fn openssl_include_dir() -> PathBuf { Self::openssl_prebuilt_dest().join("ssl").join("include") } /// Unzip the prebuilt OpenSSL AAR file into the `.dx/prebuilt/openssl-` directory pub(crate) fn unpack_prebuilt_openssl() -> Result<()> { let raw_aar = AndroidTools::openssl_prebuilt_aar(); let aar_dest = AndroidTools::openssl_prebuilt_dest(); if aar_dest.exists() { tracing::trace!("Prebuilt OpenSSL already exists at {:?}", aar_dest); return Ok(()); } std::fs::create_dir_all(aar_dest.parent().context("no parent for aar")?) .context("failed to create prebuilt OpenSSL directory")?; // Unpack the entire tar.gz file into the destination directory let mut archive = tar::Archive::new(flate2::read::GzDecoder::new(raw_aar as &[u8])); archive .unpack(aar_dest.parent().context("no parent for aar dest")?) .context("failed to unpack prebuilt OpenSSL archive")?; tracing::debug!("Unpacked prebuilt OpenSSL to {:?}", aar_dest); Ok(()) } } fn var_or_debug(name: &str) -> Option { use std::env::var; var(name) .inspect_err(|_| tracing::trace!("{name} not set")) .ok() .map(PathBuf::from) } ================================================ FILE: packages/cli/src/bundle_utils.rs ================================================ use crate::{ config::BundleConfig, CustomSignCommandSettings, DebianSettings, MacOsSettings, NSISInstallerMode, NsisSettings, PackageType, WebviewInstallMode, WindowsSettings, WixSettings, }; impl From for tauri_bundler::NsisSettings { fn from(val: NsisSettings) -> Self { tauri_bundler::NsisSettings { header_image: val.header_image, sidebar_image: val.sidebar_image, installer_icon: val.installer_icon, install_mode: val.install_mode.into(), languages: val.languages, display_language_selector: val.display_language_selector, custom_language_files: None, template: None, compression: tauri_utils::config::NsisCompression::None, start_menu_folder: val.start_menu_folder, installer_hooks: val.installer_hooks, minimum_webview2_version: val.minimum_webview2_version, } } } impl From for tauri_bundler::BundleSettings { fn from(val: BundleConfig) -> Self { tauri_bundler::BundleSettings { identifier: val.identifier, publisher: val.publisher, icon: val.icon, resources: val.resources, copyright: val.copyright, category: val.category.and_then(|c| c.parse().ok()), short_description: val.short_description, long_description: val.long_description, external_bin: val.external_bin, deb: val.deb.map(Into::into).unwrap_or_default(), macos: val.macos.map(Into::into).unwrap_or_default(), windows: val.windows.map(Into::into).unwrap_or_default(), ..Default::default() } } } impl From for tauri_bundler::DebianSettings { fn from(val: DebianSettings) -> Self { tauri_bundler::DebianSettings { depends: val.depends, files: val.files, desktop_template: val.desktop_template, provides: val.provides, conflicts: val.conflicts, replaces: val.replaces, section: val.section, priority: val.priority, changelog: val.changelog, pre_install_script: val.pre_install_script, post_install_script: val.post_install_script, pre_remove_script: val.pre_remove_script, post_remove_script: val.post_remove_script, recommends: val.recommends, } } } impl From for tauri_bundler::WixSettings { fn from(val: WixSettings) -> Self { tauri_bundler::WixSettings { language: tauri_bundler::bundle::WixLanguage({ let mut languages: Vec<_> = val .language .iter() .map(|l| { ( l.0.clone(), tauri_bundler::bundle::WixLanguageConfig { locale_path: l.1.clone(), }, ) }) .collect(); if languages.is_empty() { languages.push(("en-US".into(), Default::default())); } languages }), template: val.template, fragment_paths: val.fragment_paths, component_group_refs: val.component_group_refs, component_refs: val.component_refs, feature_group_refs: val.feature_group_refs, feature_refs: val.feature_refs, merge_refs: val.merge_refs, enable_elevated_update_task: val.enable_elevated_update_task, banner_path: val.banner_path, dialog_image_path: val.dialog_image_path, fips_compliant: val.fips_compliant, version: val.version, upgrade_code: val.upgrade_code, } } } impl From for tauri_bundler::MacOsSettings { fn from(val: MacOsSettings) -> Self { tauri_bundler::MacOsSettings { frameworks: val.frameworks, minimum_system_version: val.minimum_system_version, exception_domain: val.exception_domain, signing_identity: val.signing_identity, provider_short_name: val.provider_short_name, entitlements: val.entitlements, info_plist_path: val.info_plist_path, files: val.files, hardened_runtime: val.hardened_runtime, bundle_version: val.bundle_version, bundle_name: val.bundle_name, } } } #[allow(deprecated)] impl From for tauri_bundler::WindowsSettings { fn from(val: WindowsSettings) -> Self { tauri_bundler::WindowsSettings { digest_algorithm: val.digest_algorithm, certificate_thumbprint: val.certificate_thumbprint, timestamp_url: val.timestamp_url, tsp: val.tsp, wix: val.wix.map(Into::into), webview_install_mode: val.webview_install_mode.into(), allow_downgrades: val.allow_downgrades, nsis: val.nsis.map(Into::into), sign_command: val.sign_command.map(Into::into), icon_path: val.icon_path.unwrap_or("./icons/icon.ico".into()), } } } impl From for tauri_utils::config::NSISInstallerMode { fn from(val: NSISInstallerMode) -> Self { match val { NSISInstallerMode::CurrentUser => tauri_utils::config::NSISInstallerMode::CurrentUser, NSISInstallerMode::PerMachine => tauri_utils::config::NSISInstallerMode::PerMachine, NSISInstallerMode::Both => tauri_utils::config::NSISInstallerMode::Both, } } } impl From for tauri_bundler::PackageType { fn from(value: PackageType) -> Self { match value { PackageType::MacOsBundle => Self::MacOsBundle, PackageType::IosBundle => Self::IosBundle, PackageType::WindowsMsi => Self::WindowsMsi, PackageType::Deb => Self::Deb, PackageType::Rpm => Self::Rpm, PackageType::AppImage => Self::AppImage, PackageType::Dmg => Self::Dmg, PackageType::Updater => Self::Updater, PackageType::Nsis => Self::Nsis, } } } impl WebviewInstallMode { fn into(self) -> tauri_utils::config::WebviewInstallMode { match self { Self::Skip => tauri_utils::config::WebviewInstallMode::Skip, Self::DownloadBootstrapper { silent } => { tauri_utils::config::WebviewInstallMode::DownloadBootstrapper { silent } } Self::EmbedBootstrapper { silent } => { tauri_utils::config::WebviewInstallMode::EmbedBootstrapper { silent } } Self::OfflineInstaller { silent } => { tauri_utils::config::WebviewInstallMode::OfflineInstaller { silent } } Self::FixedRuntime { path } => { tauri_utils::config::WebviewInstallMode::FixedRuntime { path } } } } } impl From for tauri_bundler::CustomSignCommandSettings { fn from(val: CustomSignCommandSettings) -> Self { tauri_bundler::CustomSignCommandSettings { cmd: val.cmd, args: val.args, } } } ================================================ FILE: packages/cli/src/cargo_toml.rs ================================================ //! The cargo_toml crate contains some logic for resolving Cargo.toml files with workspace inheritance, but it //! doesn't handle global configs like ~/.cargo/config.toml. This module handles extending the manifest with those //! settings if they exist. use std::path::{Path, PathBuf}; use cargo_toml::{Manifest, Profile, Profiles}; /// Load the manifest from a path inheriting from the global config where needed pub fn load_manifest_from_path(path: &Path) -> Result { let mut original = Manifest::from_path(path)?; // Merge the .cargo/config.toml if it exists extend_manifest_config_toml(&mut original, &path.join(".cargo").join("config.toml")); // Merge the global cargo config if it exists if let Some(global_config) = global_cargo_config_path() { extend_manifest_config_toml(&mut original, &global_config); } Ok(original) } /// Get the path to cargo home fn cargo_home() -> Option { // If the cargo home env var is set, use that if let Some(cargo_home) = std::env::var_os("CARGO_HOME") { return Some(PathBuf::from(cargo_home)); } // Otherwise, use the default location if cfg!(windows) { std::env::var_os("USERPROFILE") .map(|user_profile| PathBuf::from(user_profile).join(".cargo")) } else if cfg!(unix) { dirs::home_dir().map(|home_dir| home_dir.join(".cargo")) } else { None } } /// Get the global cargo config path if it exists fn global_cargo_config_path() -> Option { cargo_home().map(|cargo_home| cargo_home.join("config.toml")) } // Extend a manifest with a config.toml if it exists fn extend_manifest_config_toml(manifest: &mut Manifest, path: &Path) { // Read the config.toml if it exists let Ok(config) = std::fs::read_to_string(path) else { return; }; let Ok(config) = config.parse::() else { return; }; // Try to parse profiles if let Some(profiles) = config.get("profile").and_then(|p| p.as_table()) { merge_profiles( &mut manifest.profile, toml::from_str::(&profiles.to_string()).unwrap_or_default(), ); } } /// Merge the new profiles into the target profiles. Keep the existing values if they exist. fn merge_profiles(target: &mut Profiles, new: Profiles) { if let Some(new_release) = new.release { if target.release.is_none() { target.release = Some(new_release); } else { merge_profile(target.release.as_mut().unwrap(), new_release); } } if let Some(new_dev) = new.dev { if target.dev.is_none() { target.dev = Some(new_dev); } else { merge_profile(target.dev.as_mut().unwrap(), new_dev); } } if let Some(new_test) = new.test { if target.test.is_none() { target.test = Some(new_test); } else { merge_profile(target.test.as_mut().unwrap(), new_test); } } if let Some(new_bench) = new.bench { if target.bench.is_none() { target.bench = Some(new_bench); } else { merge_profile(target.bench.as_mut().unwrap(), new_bench); } } #[allow(deprecated)] if let Some(new_doc) = new.doc { if target.doc.is_none() { target.doc = Some(new_doc); } else { merge_profile(target.doc.as_mut().unwrap(), new_doc); } } for (profile_name, profile) in new.custom { if let Some(target_profile) = target.custom.get_mut(&profile_name) { merge_profile(target_profile, profile); } else { target.custom.insert(profile_name, profile); } } } /// Merge the new profile into the target profile. Keep the existing values if they exist. fn merge_profile(target: &mut Profile, new: Profile) { if target.opt_level.is_none() { target.opt_level = new.opt_level; } if target.debug.is_none() { target.debug = new.debug; } if target.split_debuginfo.is_none() { target.split_debuginfo = new.split_debuginfo; } if target.rpath.is_none() { target.rpath = new.rpath; } if target.lto.is_none() { target.lto = new.lto; } if target.debug_assertions.is_none() { target.debug_assertions = new.debug_assertions; } if target.codegen_units.is_none() { target.codegen_units = new.codegen_units; } if target.panic.is_none() { target.panic = new.panic; } if target.incremental.is_none() { target.incremental = new.incremental; } if target.overflow_checks.is_none() { target.overflow_checks = new.overflow_checks; } if target.strip.is_none() { target.strip = new.strip; } if target.build_override.is_none() { target.build_override = new.build_override; } if target.inherits.is_none() { target.inherits = new.inherits; } target.package.extend(new.package); } ================================================ FILE: packages/cli/src/cli/autoformat.rs ================================================ use super::{check::collect_rs_files, *}; use crate::Workspace; use anyhow::{bail, Context}; use dioxus_autofmt::{IndentOptions, IndentType}; use rayon::prelude::*; use std::{borrow::Cow, fs, path::Path}; // For reference, the rustfmt main.rs file // https://github.com/rust-lang/rustfmt/blob/master/src/bin/main.rs /// Format some rsx #[derive(Clone, Debug, Parser)] pub(crate) struct Autoformat { /// Format rust code before the formatting the rsx macros #[clap(long)] pub(crate) all_code: bool, /// Run in 'check' mode. Exits with 0 if input is formatted correctly. Exits /// with 1 and prints a diff if formatting is required. #[clap(short, long)] pub(crate) check: bool, /// Input rsx (selection) #[clap(short, long)] pub(crate) raw: Option, /// Input file at path (set to "-" to read file from stdin, and output formatted file to stdout) #[clap(short, long)] pub(crate) file: Option, /// Split attributes in lines or not #[clap(short, long, default_value = "false")] pub(crate) split_line_attributes: bool, /// The package to build #[clap(short, long)] pub(crate) package: Option, } impl Autoformat { pub(crate) async fn autoformat(self) -> Result { let Autoformat { check, raw, file, split_line_attributes, all_code: format_rust_code, .. } = self; if let Some(file) = file { // Format a single file refactor_file(file, split_line_attributes, format_rust_code)?; } else if let Some(raw) = raw { // Format raw text. let indent = indentation_for(".", self.split_line_attributes)?; let formatted = dioxus_autofmt::fmt_block(&raw, 0, indent).context("error formatting codeblock")?; println!("{}", formatted); } else { // Default to formatting the project. let crate_dir = if let Some(package) = self.package { let workspace = Workspace::current().await?; let dx_crate = workspace .find_main_package(Some(package)) .context("Failed to find package")?; workspace.krates[dx_crate] .manifest_path .parent() .unwrap() .to_path_buf() .into() } else { Cow::Borrowed(Path::new(".")) }; autoformat_project(check, split_line_attributes, format_rust_code, crate_dir) .context("error autoformatting project")?; } Ok(StructuredOutput::Success) } } fn refactor_file( file: String, split_line_attributes: bool, format_rust_code: bool, ) -> Result<(), Error> { let indent = indentation_for(".", split_line_attributes)?; let file_content = if file == "-" { let mut contents = String::new(); std::io::stdin().read_to_string(&mut contents)?; Ok(contents) } else { fs::read_to_string(&file) }; let mut s = file_content.context("failed to open file")?; if format_rust_code { s = format_rust(&s)?; } let parsed = syn::parse_file(&s).context("failed to parse file")?; let edits = dioxus_autofmt::try_fmt_file(&s, &parsed, indent).context("failed to format file")?; let out = dioxus_autofmt::apply_formats(&s, edits); if file == "-" { print!("{out}"); } else if let Err(e) = fs::write(&file, out) { tracing::error!("failed to write formatted content to file: {e}",); } else { println!("formatted {file}"); } Ok(()) } fn format_file( path: impl AsRef, indent: IndentOptions, format_rust_code: bool, ) -> Result { let mut contents = fs::read_to_string(&path)?; let mut if_write = false; if format_rust_code { let formatted = format_rust(&contents).context("Syntax Error")?; if contents != formatted { if_write = true; contents = formatted; } } let parsed = syn::parse_file(&contents).context("Failed to parse file")?; let edits = dioxus_autofmt::try_fmt_file(&contents, &parsed, indent) .context("Failed to format file")?; let len = edits.len(); if !edits.is_empty() { if_write = true; } if if_write { let out = dioxus_autofmt::apply_formats(&contents, edits); fs::write(path, out)?; } Ok(len) } /// Read every .rs file accessible when considering the .gitignore and try to format it /// /// Runs using rayon for multithreading, so it should be really really fast /// /// Doesn't do mod-descending, so it will still try to format unreachable files. TODO. fn autoformat_project( check: bool, split_line_attributes: bool, format_rust_code: bool, dir: impl AsRef, ) -> Result<()> { let mut files_to_format = vec![]; collect_rs_files(dir.as_ref(), &mut files_to_format); if files_to_format.is_empty() { return Ok(()); } if files_to_format.is_empty() { return Ok(()); } let indent = indentation_for(&files_to_format[0], split_line_attributes)?; let counts = files_to_format .into_par_iter() .map(|path| { let res = format_file(&path, indent.clone(), format_rust_code); match res { Ok(cnt) => Some(cnt), Err(err) => { tracing::error!("error formatting file : {}\n{:#?}", path.display(), err); None } } }) .collect::>(); let files_formatted: usize = counts.into_iter().flatten().sum(); if files_formatted > 0 && check { bail!("{files_formatted} files needed formatting"); } Ok(()) } fn indentation_for( file_or_dir: impl AsRef, split_line_attributes: bool, ) -> Result { let out = std::process::Command::new("cargo") .args(["fmt", "--", "--print-config", "current"]) .arg(file_or_dir.as_ref()) .stdout(std::process::Stdio::piped()) .stderr(std::process::Stdio::inherit()) .output()?; if !out.status.success() { bail!("cargo fmt failed with status: {out:?}"); } let config = String::from_utf8_lossy(&out.stdout); let hard_tabs = config .lines() .find(|line| line.starts_with("hard_tabs ")) .and_then(|line| line.split_once('=')) .map(|(_, value)| value.trim() == "true") .context("Could not find hard_tabs option in rustfmt config")?; let tab_spaces = config .lines() .find(|line| line.starts_with("tab_spaces ")) .and_then(|line| line.split_once('=')) .map(|(_, value)| value.trim().parse::()) .context("Could not find tab_spaces option in rustfmt config")? .context("Could not parse tab_spaces option in rustfmt config")?; Ok(IndentOptions::new( if hard_tabs { IndentType::Tabs } else { IndentType::Spaces }, tab_spaces, split_line_attributes, )) } /// Format rust code using prettyplease fn format_rust(input: &str) -> Result { let syntax_tree = syn::parse_file(input) .map_err(format_syn_error) .context("Failed to parse Rust syntax")?; let output = prettyplease::unparse(&syntax_tree); Ok(output) } fn format_syn_error(err: syn::Error) -> Error { let start = err.span().start(); let line = start.line; let column = start.column; anyhow::anyhow!("Syntax Error in line {line} column {column}:\n{err}") } #[tokio::test] async fn test_auto_fmt() { let test_rsx = r#" // div {} // // // "# .to_string(); let fmt = Autoformat { all_code: false, check: false, raw: Some(test_rsx), file: None, split_line_attributes: false, package: None, }; fmt.autoformat().await.unwrap(); } ================================================ FILE: packages/cli/src/cli/build.rs ================================================ use dioxus_dx_wire_format::StructuredBuildArtifacts; use crate::{ cli::*, Anonymized, AppBuilder, BuildArtifacts, BuildId, BuildMode, BuildRequest, BundleFormat, Platform, TargetArgs, Workspace, }; /// Build the Rust Dioxus app and all of its assets. /// /// Produces a final output build. If a "server" feature is present in the package's Cargo.toml, it will /// be considered a fullstack app and the server will be built as well. #[derive(Clone, Debug, Default, Parser)] pub struct BuildArgs { /// Enable fullstack mode [default: false] /// /// This is automatically detected from `dx serve` if the "fullstack" feature is enabled by default. #[arg( long, default_missing_value = "true", num_args = 0..=1, )] pub(crate) fullstack: Option, /// Pre-render all routes returned from the app's `/static_routes` endpoint [default: false] #[clap(long)] pub(crate) ssg: bool, /// Force a "fat" binary, required to use `dx build-tools hotpatch` #[clap(long)] pub(crate) fat_binary: bool, /// This flag only applies to fullstack builds. By default fullstack builds will run the server /// and client builds in parallel. This flag will force the build to run the server build first, then the client build. [default: false] /// /// If CI is enabled, this will be set to true by default. /// #[clap( long, default_missing_value = "true", num_args = 0..=1, )] pub(crate) force_sequential: Option, /// Arguments for the build itself #[clap(flatten)] pub(crate) build_arguments: TargetArgs, } impl BuildArgs { pub(crate) fn force_sequential_build(&self) -> bool { self.force_sequential .unwrap_or_else(|| std::env::var("CI").is_ok()) } } impl Anonymized for BuildArgs { fn anonymized(&self) -> Value { json! {{ "fullstack": self.fullstack, "ssg": self.ssg, "build_arguments": self.build_arguments.anonymized(), }} } } pub struct BuildTargets { pub client: BuildRequest, pub server: Option, } impl CommandWithPlatformOverrides { /// We need to decompose the combined `BuildArgs` into the individual targets that we need to build. /// /// Only in a few cases do we spin out an additional server binary: /// - the fullstack feature is passed /// - the fullstack flag is enabled /// - the server flag is enabled /// /// The buildtargets configuration comes in two flavors: /// - implied via the `fullstack` feature /// - explicit when using `@server and @client` /// /// We use the client arguments to build the client target, and then make a few changes to make /// the server target. /// /// The `--fullstack` feature is basically the same as passing `--features fullstack` /// /// Some examples: /// ```shell, ignore /// dx serve --target wasm32-unknown-unknown --fullstack # serves both client and server /// dx serve --target wasm32-unknown-unknown --features fullstack # serves both client and server /// dx serve --target wasm32-unknown-unknown # only serves the client /// dx serve --target wasm32-unknown-unknown # servers both if `fullstack` is enabled on dioxus /// dx serve @client --target wasm32-unknown-unknown # only serves the client /// dx serve @client --target wasm32-unknown-unknown --fullstack # serves both client and server /// ``` /// /// Currently it is not possible to serve the server without the client, but this could be added in the future. pub async fn into_targets(mut self) -> Result { let workspace = Workspace::current().await?; // do some logging to ensure dx matches the dioxus version since we're not always API compatible workspace.check_dioxus_version_against_cli(); // The client args are the `@client` arguments, or the shared build arguments if @client is not specified. let client_args = &self.client.as_ref().unwrap_or(&self.shared).build_arguments; // Create the client build request let client = BuildRequest::new(client_args, workspace.clone()).await?; // Create the server build request if needed let mut server = None; if matches!(self.shared.fullstack, Some(true)) || client.fullstack_feature_enabled() || self.server.is_some() { match self.server.as_mut() { Some(server_args) => { // Make sure we set the client target here so @server knows to place its output into the @client target directory. server_args.build_arguments.client_target = Some(client.main_target.clone()); // We don't override anything except the bundle format since @server usually implies a server output server_args.build_arguments.bundle = server_args .build_arguments .bundle .or(Some(BundleFormat::Server)); server = Some( BuildRequest::new(&server_args.build_arguments, workspace.clone()).await?, ); } None if client_args.platform == Platform::Server => { // If the user requests a server build with `--server`, then we don't need to build a separate server binary. // There's no client to use, so even though fullstack is true, we only build the server. } None => { let mut args = self.shared.build_arguments.clone(); args.platform = crate::Platform::Server; args.renderer = Some(crate::Renderer::Server); args.bundle = Some(crate::BundleFormat::Server); args.target = Some(target_lexicon::Triple::host()); server = Some(BuildRequest::new(&args, workspace.clone()).await?); } } } Ok(BuildTargets { client, server }) } pub async fn build(self) -> Result { tracing::info!("Building project..."); let force_sequential = self.shared.force_sequential_build(); let ssg = self.shared.ssg; let mode = match self.shared.fat_binary { true => BuildMode::Fat, false => BuildMode::Base { run: false }, }; let targets = self.into_targets().await?; let build_client = Self::build_client_inner(&targets.client, mode.clone()); let build_server = Self::build_server_inner(&targets.server, mode.clone(), ssg); let (client, server) = match force_sequential { true => (build_client.await, build_server.await), false => tokio::join!(build_client, build_server), }; Ok(StructuredOutput::BuildsFinished { client: client?.into_structured_output(), server: server?.map(|s| s.into_structured_output()), }) } pub(crate) async fn build_client_inner( request: &BuildRequest, mode: BuildMode, ) -> Result { AppBuilder::started(request, mode, BuildId::PRIMARY)? .finish_build() .await .inspect(|_| { tracing::info!(path = ?request.root_dir(), "Client build completed successfully! 🚀"); }) } pub(crate) async fn build_server_inner( request: &Option, mode: BuildMode, ssg: bool, ) -> Result> { let Some(server) = request.as_ref() else { return Ok(None); }; // If the server is present, we need to build it as well let mut server_build = AppBuilder::started(server, mode, BuildId::SECONDARY)?; let server_artifacts = server_build.finish_build().await?; // Run SSG and cache static routes if ssg { crate::pre_render_static_routes(None, &mut server_build, None).await?; } tracing::info!(path = ?server.root_dir(), "Server build completed successfully! 🚀"); Ok(Some(server_artifacts)) } } impl BuildArtifacts { pub(crate) fn into_structured_output(self) -> StructuredBuildArtifacts { // Extract the tip crate's args for the structured output. // The tip crate is identified by replacing hyphens with underscores in the target name, // but since we don't have the BuildRequest here, we look for the entry with link_args // (only the tip crate has link_args attached) or fall back to any entry. let tip_args = self .workspace_rustc_args .values() .find(|a| !a.link_args.is_empty()) .or_else(|| self.workspace_rustc_args.values().next()) .cloned() .unwrap_or_default(); StructuredBuildArtifacts { path: self.root_dir, exe: self.exe, rustc_args: tip_args.args, rustc_envs: tip_args.envs, link_args: tip_args.link_args, assets: self.assets.unique_assets().cloned().collect(), } } } ================================================ FILE: packages/cli/src/cli/build_assets.rs ================================================ use std::{fs::create_dir_all, path::PathBuf}; use crate::{extract_assets_from_file, Result, StructuredOutput}; use clap::Parser; use dioxus_cli_opt::process_file_to; use tracing::debug; #[derive(Clone, Debug, Parser)] pub struct BuildAssets { /// The source executable to build assets for. pub(crate) executable: PathBuf, /// The destination directory for the assets. pub(crate) destination: PathBuf, } impl BuildAssets { pub async fn run(self) -> Result { let manifest = extract_assets_from_file(&self.executable).await?; create_dir_all(&self.destination)?; for asset in manifest.unique_assets() { let source_path = PathBuf::from(asset.absolute_source_path()); let destination_path = self.destination.join(asset.bundled_path()); debug!( "Processing asset {} --> {} {:#?}", source_path.display(), destination_path.display(), asset ); process_file_to(asset.options(), &source_path, &destination_path)?; } Ok(StructuredOutput::Success) } } ================================================ FILE: packages/cli/src/cli/bundle.rs ================================================ use crate::{AppBuilder, BuildArgs, BuildId, BuildMode, BuildRequest, BundleFormat}; use anyhow::{bail, Context}; use path_absolutize::Absolutize; use std::collections::HashMap; use tauri_bundler::{BundleBinary, BundleSettings, PackageSettings, SettingsBuilder}; use walkdir::WalkDir; use super::*; /// Bundle an app and its assets. /// /// This will produce a client `public` folder and the associated server executable in the output folder. #[derive(Clone, Debug, Parser)] pub struct Bundle { /// The package types to bundle #[clap(long)] pub package_types: Option>, /// The directory in which the final bundle will be placed. /// /// Relative paths will be placed relative to the current working directory if specified. /// Otherwise, the out_dir path specified in Dioxus.toml will be used (relative to the crate root). /// /// We will flatten the artifacts into this directory - there will be no differentiation between /// artifacts produced by different platforms. #[clap(long)] pub out_dir: Option, /// The arguments for the dioxus build #[clap(flatten)] pub(crate) args: CommandWithPlatformOverrides, } impl Bundle { // todo: make sure to run pre-render static routes! we removed this from the other bundling step pub(crate) async fn bundle(mut self) -> Result { tracing::info!("Bundling project..."); let BuildTargets { client, server } = self.args.into_targets().await?; let mut server_artifacts = None; let client_artifacts = AppBuilder::started(&client, BuildMode::Base { run: false }, BuildId::PRIMARY)? .finish_build() .await?; tracing::info!(path = ?client.root_dir(), "Client build completed successfully! 🚀"); if let Some(server) = server.as_ref() { // If the server is present, we need to build it as well server_artifacts = Some( AppBuilder::started(server, BuildMode::Base { run: false }, BuildId::SECONDARY)? .finish_build() .await?, ); tracing::info!(path = ?client.root_dir(), "Server build completed successfully! 🚀"); } // If we're building for iOS, we need to bundle the iOS bundle if client.bundle == BundleFormat::Ios && self.package_types.is_none() { self.package_types = Some(vec![crate::PackageType::IosBundle]); } let mut bundles = vec![]; // Copy the server over if it exists if let Some(server) = server.as_ref() { bundles.push(server.main_exe()); } // Create a list of bundles that we might need to copy match client.bundle { // By default, mac/win/linux work with tauri bundle BundleFormat::MacOS | BundleFormat::Linux | BundleFormat::Windows => { tracing::info!("Running desktop bundler..."); for bundle in Self::bundle_desktop(&client, &self.package_types)? { bundles.extend(bundle.bundle_paths); } } // Web/ios can just use their root_dir BundleFormat::Web => bundles.push(client.root_dir()), BundleFormat::Ios => { tracing::warn!("iOS bundles are not currently codesigned! You will need to codesign the app before distributing."); bundles.push(client.root_dir()) } BundleFormat::Server => bundles.push(client.root_dir()), BundleFormat::Android => { let aab = client .android_gradle_bundle() .await .context("Failed to run gradle bundleRelease")?; bundles.push(aab); } }; // Copy the bundles to the output directory if one was specified let crate_outdir = client.crate_out_dir(); if let Some(outdir) = self.out_dir.clone().or(crate_outdir) { let outdir = outdir .absolutize() .context("Failed to absolutize output directory")?; tracing::info!("Copying bundles to output directory: {}", outdir.display()); std::fs::create_dir_all(&outdir)?; for bundle_path in bundles.iter_mut() { let destination = outdir.join(bundle_path.file_name().unwrap()); tracing::debug!( "Copying from {} to {}", bundle_path.display(), destination.display() ); if bundle_path.is_dir() { dircpy::CopyBuilder::new(&bundle_path, &destination) .overwrite(true) .run_par() .context("Failed to copy the app to output directory")?; } else { std::fs::copy(&bundle_path, &destination) .context("Failed to copy the app to output directory")?; } *bundle_path = destination; } } for bundle_path in bundles.iter() { tracing::info!( "Bundled app at: {}", bundle_path.absolutize().unwrap().display() ); } let client = client_artifacts.into_structured_output(); let server = server_artifacts.map(|s| s.into_structured_output()); Ok(StructuredOutput::BundleOutput { bundles, client, server, }) } fn bundle_desktop( build: &BuildRequest, package_types: &Option>, ) -> Result, Error> { let krate = &build; let exe = build.main_exe(); _ = std::fs::remove_dir_all(krate.bundle_dir(build.bundle)); let package = krate.package(); let mut name: PathBuf = krate.executable_name().into(); if cfg!(windows) { name.set_extension("exe"); } std::fs::create_dir_all(krate.bundle_dir(build.bundle)) .context("Failed to create bundle directory")?; std::fs::copy(&exe, krate.bundle_dir(build.bundle).join(&name)) .with_context(|| "Failed to copy the output executable into the bundle directory")?; let binaries = vec![ // We use the name of the exe but it has to be in the same directory BundleBinary::new(krate.executable_name().to_string(), true) .set_src_path(Some(exe.display().to_string())), ]; let mut bundle_settings: BundleSettings = krate.config.bundle.clone().into(); // Check if required fields are provided instead of failing silently. if bundle_settings.identifier.is_none() { bail!("\n\nBundle identifier was not provided in `Dioxus.toml`. Add it as:\n\n[bundle]\nidentifier = \"com.mycompany\"\n\n"); } if bundle_settings.publisher.is_none() { bail!("\n\nBundle publisher was not provided in `Dioxus.toml`. Add it as:\n\n[bundle]\npublisher = \"MyCompany\"\n\n"); } /// Resolve an icon path relative to the crate dir fn canonicalize_icon_path(build: &BuildRequest, icon: &mut String) -> Result<(), Error> { let icon_path = build .crate_dir() .join(&icon) .canonicalize() .with_context(|| format!("Failed to canonicalize path to icon {icon:?}"))?; *icon = icon_path.to_string_lossy().to_string(); Ok(()) } // Resolve bundle.icon relative to the crate dir if let Some(icons) = bundle_settings.icon.as_mut() { for icon in icons.iter_mut() { canonicalize_icon_path(build, icon)?; } } #[allow(deprecated)] if cfg!(windows) { // Resolve bundle.windows.icon_path relative to the crate dir let mut windows_icon_path = bundle_settings .windows .icon_path .to_string_lossy() .to_string(); canonicalize_icon_path(build, &mut windows_icon_path)?; bundle_settings.windows.icon_path = PathBuf::from(&windows_icon_path); let windows_icon_override = krate.config.bundle.windows.as_ref().map(|w| &w.icon_path); if windows_icon_override.is_none() { let icon_path = bundle_settings .icon .as_ref() .and_then(|icons| icons.first()); if let Some(icon_path) = icon_path { bundle_settings.icon = Some(vec![icon_path.into()]); }; } } if bundle_settings.resources_map.is_none() { bundle_settings.resources_map = Some(HashMap::new()); } let asset_dir = build.asset_dir(); if asset_dir.exists() { for entry in WalkDir::new(&asset_dir) { let entry = entry.unwrap(); let path = entry.path(); if path.is_file() { let old = path .canonicalize() .with_context(|| format!("Failed to canonicalize {entry:?}"))?; let new = PathBuf::from("assets").join(path.strip_prefix(&asset_dir).unwrap_or(path)); tracing::debug!("Bundled asset: {old:?} -> {new:?}"); bundle_settings .resources_map .as_mut() .expect("to be set") .insert(old.display().to_string(), new.display().to_string()); } } } for resource_path in bundle_settings.resources.take().into_iter().flatten() { bundle_settings .resources_map .as_mut() .expect("to be set") .insert(resource_path, "".to_string()); } let mut settings = SettingsBuilder::new() .project_out_directory(krate.bundle_dir(build.bundle)) .package_settings(PackageSettings { product_name: krate.bundled_app_name(), version: package.version.to_string(), description: package.description.clone().unwrap_or_default(), homepage: Some(package.homepage.clone().unwrap_or_default()), authors: Some(package.authors.clone()), default_run: Some(name.display().to_string()), }) .log_level(log::Level::Debug) .binaries(binaries) .bundle_settings(bundle_settings); if let Some(packages) = &package_types { settings = settings.package_types(packages.iter().map(|p| (*p).into()).collect()); } settings = settings.target(build.triple.to_string()); let settings = settings .build() .context("failed to bundle tauri bundle settings")?; tracing::debug!("Bundling project with settings: {:#?}", settings); if cfg!(target_os = "macos") { std::env::set_var("CI", "true"); } let bundles = tauri_bundler::bundle::bundle_project(&settings).inspect_err(|err| { tracing::error!("Failed to bundle project: {:#?}", err); if cfg!(target_os = "macos") { tracing::error!("Make sure you have automation enabled in your terminal (https://github.com/tauri-apps/tauri/issues/3055#issuecomment-1624389208) and full disk access enabled for your terminal (https://github.com/tauri-apps/tauri/issues/3055#issuecomment-1624389208)"); } })?; Ok(bundles) } } ================================================ FILE: packages/cli/src/cli/check.rs ================================================ //! Run linting against the user's codebase. //! //! For reference, the rustfmt main.rs file //! use super::*; use crate::BuildRequest; use anyhow::{anyhow, Context}; use futures_util::{stream::FuturesUnordered, StreamExt}; use std::path::Path; /// Check the Rust files in the project for issues. #[derive(Clone, Debug, Parser)] pub(crate) struct Check { /// Input file #[clap(short, long)] pub(crate) file: Option, /// Information about the target to check #[clap(flatten)] pub(crate) build_args: CommandWithPlatformOverrides, } impl Check { // Todo: check the entire crate pub(crate) async fn check(self) -> Result { let BuildTargets { client, server } = self.build_args.into_targets().await?; match self.file { // Default to checking the project None => { check_project_and_report(&client) .await .context("error checking project")?; if let Some(server) = server { if server.package != client.package { check_project_and_report(&server) .await .context("error checking project")?; } } } Some(file) => { check_file_and_report(file) .await .context("error checking file")?; } } Ok(StructuredOutput::Success) } } async fn check_file_and_report(path: PathBuf) -> Result<()> { check_files_and_report(vec![path]).await } /// Read every .rs file accessible when considering the .gitignore and check it /// /// Runs using Tokio for multithreading, so it should be really really fast /// /// Doesn't do mod-descending, so it will still try to check unreachable files. TODO. async fn check_project_and_report(build: &BuildRequest) -> Result<()> { let dioxus_crate = build .workspace .find_main_package(Some(build.package.clone()))?; let dioxus_crate = &build.workspace.krates[dioxus_crate]; let mut files_to_check = vec![]; collect_rs_files( dioxus_crate.manifest_path.parent().unwrap().as_std_path(), &mut files_to_check, ); check_files_and_report(files_to_check).await } /// Check a list of files and report the issues. async fn check_files_and_report(files_to_check: Vec) -> Result<()> { let issue_reports = files_to_check .into_iter() .filter(|file| file.components().all(|f| f.as_os_str() != "target")) .map(|path| async move { let _path = path.clone(); let res = tokio::spawn(async move { tokio::fs::read_to_string(&_path) .await .map(|contents| dioxus_check::check_file(_path, &contents)) }) .await; if res.is_err() { tracing::error!("error checking file: {}", path.display()); } res }) .collect::>() .collect::>() .await; // remove error results which we've already printed let issue_reports = issue_reports .into_iter() .flatten() .flatten() .collect::>(); let total_issues = issue_reports.iter().map(|r| r.issues.len()).sum::(); for report in issue_reports.into_iter() { if !report.issues.is_empty() { tracing::info!("{}", report); } } match total_issues { 0 => { tracing::info!("No issues found."); Ok(()) } 1 => Err(anyhow!("1 issue found.")), _ => Err(anyhow!("{total_issues} issues found.")), } } pub(crate) fn collect_rs_files(folder: &Path, files: &mut Vec) { for entry in ignore::Walk::new(folder).flatten() { if entry.path().extension() == Some("rs".as_ref()) { files.push(entry.path().to_path_buf()); } } } ================================================ FILE: packages/cli/src/cli/component.rs ================================================ use std::{ collections::{HashMap, HashSet}, ops::Deref, path::{Path, PathBuf}, }; use crate::{verbosity_or_default, DioxusConfig, Result, StructuredOutput, Workspace}; use anyhow::{bail, Context}; use clap::Parser; use dioxus_component_manifest::{ component_manifest_schema, CargoDependency, Component, ComponentDependency, }; use git2::Repository; use schemars::JsonSchema; use serde::{Deserialize, Serialize}; use tokio::{process::Command, task::JoinSet}; use tracing::debug; #[derive(Clone, Debug, Parser)] pub enum ComponentCommand { /// Add a component from a registry Add { #[clap(flatten)] component: ComponentArgs, /// The registry to use #[clap(flatten)] registry: ComponentRegistry, /// Overwrite the component if it already exists #[clap(long)] force: bool, }, /// Remove a component Remove { #[clap(flatten)] component: ComponentArgs, /// The registry to use #[clap(flatten)] registry: ComponentRegistry, }, /// Update a component registry Update { /// The registry to update #[clap(flatten)] registry: Option, }, /// List available components in a registry List { /// The registry to list components in #[clap(flatten)] registry: ComponentRegistry, }, /// Clear the component registry cache Clean, /// Print the schema for component manifests Schema, } /// Arguments for a component and component module location #[derive(Clone, Debug, Parser, Serialize)] pub struct ComponentArgs { /// The components to add or remove #[clap(required_unless_present = "all", value_delimiter = ',')] components: Vec, /// The location of the component module in your project (default: src/components) #[clap(long)] module_path: Option, /// The location of the global assets in your project (default: assets) #[clap(long)] global_assets_path: Option, /// Include all components in the registry #[clap(long)] all: bool, } impl ComponentCommand { /// Run the component command pub async fn run(self) -> Result { match self { // List all components in the registry Self::List { registry } => { let config = Self::resolve_config().await?; let registry = Self::resolve_registry(registry, &config)?; let mut components = registry.read_components().await?; components.sort_by_key(|c| c.name.clone()); for component in components { println!("- {}: {}", component.name, component.description); } } // Add a component to the managed component module Self::Add { component: component_args, registry, force, } => { // Resolve the config let config = Self::resolve_config().await?; // Resolve the registry let registry = Self::resolve_registry(registry, &config)?; // Get the registry root. Components can't copy files outside of this path let registry_root = registry.resolve().await?; // Read all components from the registry let components = registry.read_components().await?; let mode = if force { ComponentExistsBehavior::Overwrite } else { ComponentExistsBehavior::Error }; // Find the requested components let components = if component_args.all { components } else { component_args .components .iter() .map(|component| find_component(&components, component)) .collect::>>()? }; // Find and initialize the components module if it doesn't exist let components_root = components_root(component_args.module_path.as_deref(), &config)?; let new_components_module = ensure_components_module_exists(&components_root).await?; // Recursively add dependencies // A map of the components that have been added or are queued to be added let mut required_components = HashMap::new(); required_components.extend(components.iter().cloned().map(|c| (c, mode))); // A stack of components to process let mut queued_components = components; while let Some(queued_component) = queued_components.pop() { for dependency in &queued_component.component_dependencies { let (registry, name) = match dependency { ComponentDependency::Builtin(name) => { (ComponentRegistry::default(), name) } ComponentDependency::ThirdParty { name, git, rev } => ( ComponentRegistry { remote: RemoteComponentRegistry { git: Some(git.clone()), rev: rev.clone(), }, path: None, }, name, ), }; let registry_components = registry.read_components().await?; let dependency_component = find_component(®istry_components, name)?; if required_components .insert( dependency_component.clone(), ComponentExistsBehavior::Return, ) .is_none() { queued_components.push(dependency_component); } } } // Then collect all required rust dependencies let mut rust_dependencies = HashSet::new(); for component in required_components.keys() { rust_dependencies.extend(component.cargo_dependencies.iter().cloned()); } // And add them to Cargo.toml Self::add_rust_dependencies(&rust_dependencies).await?; // Once we have all required components, add them for (component, mode) in required_components { add_component( ®istry_root, component_args.global_assets_path.as_deref(), component_args.module_path.as_deref(), &component, mode, &config, ) .await?; } // If we created a new components module, print instructions about the final setup steps required if new_components_module { println!( "Created new components module at {}.", components_root.display() ); println!("To finish setting up components, you will need to:"); println!("- manually reference the module by adding `mod components;` to your `main.rs` file"); if registry.is_default() { println!("- add a reference to `asset!(\"/assets/dx-components-theme.css\")` as a stylesheet in your app"); } } } // Update the remote component registry Self::Update { registry } => { let config = Self::resolve_config().await?; registry .unwrap_or(config.components.registry.remote) .update() .await?; } // Remove a component from the managed component module Self::Remove { component, registry, } => { Self::remove_component(&component, registry).await?; } // Clear the component registry cache Self::Clean => { _ = tokio::fs::remove_dir_all(&Workspace::component_cache_dir()).await; } // Print the schema for component manifests Self::Schema => { let schema = component_manifest_schema(); println!( "{}", serde_json::to_string_pretty(&schema).unwrap_or_default() ); } } Ok(StructuredOutput::Success) } /// Remove a component from the managed component module async fn remove_component( component_args: &ComponentArgs, registry: ComponentRegistry, ) -> Result<()> { let config = Self::resolve_config().await?; let registry = Self::resolve_registry(registry, &config)?; let components_root = components_root(component_args.module_path.as_deref(), &config)?; // Find the requested components let components = if component_args.all { registry .read_components() .await? .into_iter() .map(|c| c.component.name) .collect() } else { component_args.components.clone() }; for component_name in components { // Remove the component module _ = tokio::fs::remove_dir_all(&components_root.join(&component_name)).await; // Remove the module from the components mod.rs let mod_rs_path = components_root.join("mod.rs"); let mod_rs_content = tokio::fs::read_to_string(&mod_rs_path) .await .with_context(|| format!("Failed to read {}", mod_rs_path.display()))?; let mod_line = format!("pub mod {};\n", component_name); let new_mod_rs_content = mod_rs_content.replace(&mod_line, ""); tokio::fs::write(&mod_rs_path, new_mod_rs_content) .await .with_context(|| format!("Failed to write to {}", mod_rs_path.display()))?; } Ok(()) } /// Load the config async fn resolve_config() -> Result { let workspace = Workspace::current().await?; let crate_package = workspace.find_main_package(None)?; Ok(workspace .load_dioxus_config(crate_package, None)? .unwrap_or_default()) } /// Resolve a registry from the config if none is provided fn resolve_registry( registry: ComponentRegistry, config: &DioxusConfig, ) -> Result { if !registry.is_default() { return Ok(registry); } Ok(config.components.registry.clone()) } /// Add any rust dependencies required for a component async fn add_rust_dependencies(dependencies: &HashSet) -> Result<()> { for dep in dependencies { let status = Command::from(dep.add_command()) .status() .await .with_context(|| { format!( "Failed to run command to add dependency {} to Cargo.toml", dep.name() ) })?; if !status.success() { bail!("Failed to add dependency {} to Cargo.toml", dep.name()); } } Ok(()) } } /// Arguments for the default or custom remote registry /// If both values are None, the default registry will be used #[derive(Clone, Debug, Parser, Default, Serialize, Deserialize, JsonSchema)] pub struct RemoteComponentRegistry { /// The url of the component registry #[arg(long)] git: Option, /// The revision of the component registry #[arg(long)] rev: Option, } impl RemoteComponentRegistry { /// Resolve the path to the component registry, downloading the remote registry if needed async fn resolve(&self) -> Result { // If a git url is provided use that (plus optional rev) // Otherwise use the built-in registry let (git, rev) = self.resolve_or_default(); let repo_dir = Workspace::component_cache_path(&git, rev.as_deref()); // If the repo already exists, use it otherwise clone it if !repo_dir.exists() { // If offline, we cannot download the registry if verbosity_or_default().offline { bail!("Cannot download component registry '{}' while offline", git); } // Make sure the parent directory exists tokio::fs::create_dir_all(&repo_dir).await?; tokio::task::spawn_blocking({ let git = git.clone(); let repo_dir = repo_dir.clone(); move || { println!("Downloading {git}..."); // Clone the repo let repo = Repository::clone(&git, repo_dir)?; // If a rev is provided, checkout that rev if let Some(rev) = &rev { Self::checkout_rev(&repo, &git, rev)?; } anyhow::Ok(()) } }) .await??; } Ok(repo_dir) } /// Update the component registry by fetching the latest changes from the remote async fn update(&self) -> Result<()> { let (git, rev) = self.resolve_or_default(); // Make sure the repo is cloned let path = self.resolve().await?; // Open the repo and update it tokio::task::spawn_blocking({ let path = path.clone(); move || { let repo = Repository::open(path)?; let mut remote = repo.find_remote("origin")?; // Fetch all remote branches with the same name as local branches remote.fetch(&["refs/heads/*:refs/heads/*"], None, None)?; // If a rev is provided, checkout that rev if let Some(rev) = &rev { Self::checkout_rev(&repo, &git, rev)?; } // Otherwise, just checkout the latest commit on the default branch else { let head = repo.head()?; let branch = head.shorthand().unwrap_or("main"); let oid = repo.refname_to_id(&format!("refs/remotes/origin/{branch}"))?; let object = repo.find_object(oid, None).unwrap(); repo.reset(&object, git2::ResetType::Hard, None)?; } anyhow::Ok(()) } }) .await??; Ok(()) } /// If a git url is provided use that (plus optional rev) /// Otherwise use the built-in registry fn resolve_or_default(&self) -> (String, Option) { if let Some(git) = &self.git { (git.clone(), self.rev.clone()) } else { ("https://github.com/dioxuslabs/components".into(), None) } } /// Checkout the given rev in the given repo fn checkout_rev(repo: &Repository, git: &str, rev: &str) -> Result<()> { let (object, reference) = repo .revparse_ext(rev) .with_context(|| format!("Failed to find revision '{}' in '{}'", rev, git))?; repo.checkout_tree(&object, None)?; if let Some(gref) = reference { if let Some(name) = gref.name() { repo.set_head(name)?; } } else { repo.set_head_detached(object.id())?; } Ok(()) } } /// Arguments for a component registry /// Either a path to a local directory or a remote git repo (with optional rev) #[derive(Clone, Debug, Parser, Default, Serialize, Deserialize, JsonSchema)] pub struct ComponentRegistry { /// The remote repo args #[clap(flatten)] #[serde(flatten)] remote: RemoteComponentRegistry, /// The path to the components directory #[arg(long)] path: Option, } impl ComponentRegistry { /// Resolve the path to the component registry, downloading the remote registry if needed async fn resolve(&self) -> Result { // If a path is provided, use that if let Some(path) = &self.path { return Ok(PathBuf::from(path)); } // Otherwise use the remote/default registry self.remote.resolve().await } /// Read all components that are part of this registry async fn read_components(&self) -> Result> { let path = self.resolve().await?; let root = read_component(&path).await?; let mut components = discover_components(root).await?; // Filter out any virtual components with members components.retain(|c| c.members.is_empty()); Ok(components) } /// Check if this is the default registry fn is_default(&self) -> bool { self.path.is_none() && self.remote.git.is_none() && self.remote.rev.is_none() } } /// A component that has been downloaded and resolved at a specific path #[derive(Clone, Debug, PartialEq, Eq, Hash)] struct ResolvedComponent { path: PathBuf, component: Component, } impl ResolvedComponent { /// Get the absolute paths to members of this component fn member_paths(&self) -> Vec { self.component .members .iter() .map(|m| self.path.join(m)) .collect() } } impl Deref for ResolvedComponent { type Target = Component; fn deref(&self) -> &Self::Target { &self.component } } // Find a component by name in a list of components fn find_component(components: &[ResolvedComponent], component: &str) -> Result { components .iter() .find(|c| c.name == component) .cloned() .ok_or_else(|| anyhow::anyhow!("Component '{}' not found in registry", component)) } /// Get the path to the components module, defaulting to src/components fn components_root(module_path: Option<&Path>, config: &DioxusConfig) -> Result { if let Some(module_path) = module_path { return Ok(PathBuf::from(module_path)); } let root = Workspace::crate_root_from_path()?; if let Some(component_path) = &config.components.components_dir { return Ok(root.join(component_path)); } Ok(root.join("src").join("components")) } /// Get the path to the global assets directory, defaulting to assets async fn global_assets_root(assets_path: Option<&Path>, config: &DioxusConfig) -> Result { if let Some(assets_path) = assets_path { return Ok(PathBuf::from(assets_path)); } if let Some(asset_dir) = &config.application.asset_dir { return Ok(asset_dir.clone()); } let root = Workspace::crate_root_from_path()?; Ok(root.join("assets")) } /// How should we handle the component if it already exists #[derive(Clone, Copy, Debug)] enum ComponentExistsBehavior { /// Return an error (default) Error, /// Return early for component dependencies Return, /// Overwrite the existing component Overwrite, } /// Add a component to the managed component module async fn add_component( registry_root: &Path, assets_path: Option<&Path>, component_path: Option<&Path>, component: &ResolvedComponent, behavior: ComponentExistsBehavior, config: &DioxusConfig, ) -> Result<()> { // Copy the folder content to the components directory let components_root = components_root(component_path, config)?; let copied = copy_component_files( &component.path, &components_root.join(&component.name), &component.exclude, behavior, ) .await?; if !copied { debug!( "Component '{}' already exists, skipping copy", component.name ); return Ok(()); } // Copy any global assets let assets_root = global_assets_root(assets_path, config).await?; copy_global_assets(registry_root, &assets_root, component).await?; // Add the module to the components mod.rs let mod_rs_path = components_root.join("mod.rs"); let mut mod_rs = tokio::fs::OpenOptions::new() .append(true) .read(true) .open(&mod_rs_path) .await .with_context(|| format!("Failed to open {}", mod_rs_path.display()))?; // Check if the module already exists let mod_rs_content = tokio::fs::read_to_string(&mod_rs_path) .await .with_context(|| format!("Failed to read {}", mod_rs_path.display()))?; if !mod_rs_content.contains(&format!("mod {};", component.name)) { let mod_line = format!("pub mod {};\n", component.name); tokio::io::AsyncWriteExt::write_all(&mut mod_rs, mod_line.as_bytes()) .await .with_context(|| format!("Failed to write to {}", mod_rs_path.display()))?; } Ok(()) } /// Copy the component files. Returns true if the component was copied, false if it was skipped. async fn copy_component_files( src: &Path, dest: &Path, exclude: &[String], behavior: ComponentExistsBehavior, ) -> Result { async fn read_dir_paths(src: &Path) -> Result> { let mut entries = tokio::fs::read_dir(src).await?; let mut paths = vec![]; while let Some(entry) = entries.next_entry().await? { paths.push(entry.path()); } Ok(paths) } // If the directory already exists, return an error, return silently or overwrite it depending on the behavior if dest.exists() { match behavior { // The default behavior is to return an error ComponentExistsBehavior::Error => { bail!("Destination directory '{}' already exists", dest.display()); } // For dependencies, we return early ComponentExistsBehavior::Return => { debug!( "Destination directory '{}' already exists, returning early", dest.display() ); return Ok(false); } // If the force flag is set, we overwrite the existing component ComponentExistsBehavior::Overwrite => { debug!( "Destination directory '{}' already exists, overwriting", dest.display() ); tokio::fs::remove_dir_all(dest).await?; } } } tokio::fs::create_dir_all(dest).await?; let exclude = exclude .iter() .map(|exclude| dunce::canonicalize(src.join(exclude))) .collect::, _>>()?; // Set set of tasks to read directories let mut read_folder_tasks = JoinSet::new(); // Set set of tasks to copy files let mut copy_tasks = JoinSet::new(); // Start by reading the source directory let src = src.to_path_buf(); read_folder_tasks.spawn({ let src = src.clone(); async move { read_dir_paths(&src).await } }); // Continue while there are read tasks while let Some(res) = read_folder_tasks.join_next().await { let paths = res??; for path in paths { let path = dunce::canonicalize(path)?; // Skip excluded paths if exclude.iter().any(|e| *e == path || path.starts_with(e)) { debug!("Excluding path {}", path.display()); continue; } // Find the path in the destination directory let Ok(path_relative_to_src) = path.strip_prefix(&src) else { continue; }; let dest = dest.join(path_relative_to_src); // If it's a directory, read it, otherwise copy the file if path.is_dir() { read_folder_tasks.spawn(async move { read_dir_paths(&path).await }); } else { copy_tasks.spawn(async move { if let Some(parent) = dest.parent() { if !parent.exists() { tokio::fs::create_dir_all(parent).await?; } } tokio::fs::copy(&path, &dest).await }); } } } // Wait for all copy tasks to finish while let Some(res) = copy_tasks.join_next().await { res??; } Ok(true) } /// Make sure the components directory and a mod.rs file exists. Returns true if the directory was created, false if it already existed. async fn ensure_components_module_exists(components_dir: &Path) -> Result { if components_dir.exists() { return Ok(false); } tokio::fs::create_dir_all(&components_dir).await?; let mod_rs_path = components_dir.join("mod.rs"); if mod_rs_path.exists() { return Ok(false); } tokio::fs::write(&mod_rs_path, "// AUTOGENERATED Components module\n").await?; Ok(true) } /// Read a component from the given path async fn read_component(path: &Path) -> Result { let json_path = path.join("component.json"); let bytes = tokio::fs::read(&json_path).await.with_context(|| { format!( "Failed to open component manifest at {}", json_path.display() ) })?; let component = serde_json::from_slice(&bytes)?; let absolute_path = dunce::canonicalize(path)?; Ok(ResolvedComponent { path: absolute_path, component, }) } /// Recursively discover all components starting from the root component async fn discover_components(root: ResolvedComponent) -> Result> { // Create a queue of members to read let mut queue = root.member_paths(); // The list of discovered components let mut components = vec![root]; // The set of pending read tasks let mut pending = JoinSet::new(); loop { // First, spawn tasks for all queued paths while let Some(root_path) = queue.pop() { pending.spawn(async move { read_component(&root_path).await }); } // Then try to join the next task let Some(component) = pending.join_next().await else { break; }; let component = component??; // And add the result to the queue and list queue.extend(component.member_paths()); components.push(component); } Ok(components) } /// Copy any global assets for the component async fn copy_global_assets( registry_root: &Path, assets_root: &Path, component: &ResolvedComponent, ) -> Result<()> { let canonical_registry_root = dunce::canonicalize(registry_root)?; for path in &component.global_assets { let src = component.path.join(path); let absolute_source = dunce::canonicalize(&src).with_context(|| { format!( "Failed to find global asset '{}' for component '{}'", src.display(), component.name ) })?; // Make sure the source is inside the component registry somewhere if !absolute_source.starts_with(&canonical_registry_root) { bail!( "Cannot copy global asset '{}' for component '{}' because it is outside of the component registry '{}'", absolute_source.display(), component.name, canonical_registry_root.display() ); } // Copy the file into the assets directory, preserving the file name and extension let dest = assets_root.join( absolute_source .components() .next_back() .context("Global assets must have at least one file component")?, ); // Make sure the asset dir exists if let Some(parent) = dest.parent() { if !parent.exists() { tokio::fs::create_dir_all(parent).await?; } } tokio::fs::copy(&src, &dest).await.with_context(|| { format!( "Failed to copy global asset from {} to {}", src.display(), dest.display() ) })?; } Ok(()) } ================================================ FILE: packages/cli/src/cli/config.rs ================================================ use super::*; use crate::{CliSettings, TraceSrc, Workspace}; /// Dioxus config file controls #[derive(Clone, Debug, Deserialize, Subcommand)] pub(crate) enum Config { /// Init `Dioxus.toml` for project/folder. Init { /// Init project name name: String, /// Cover old config #[clap(long)] #[serde(default)] force: bool, }, /// Format print Dioxus config. FormatPrint {}, /// Create a custom html file. CustomHtml {}, /// Set CLI settings. #[command(subcommand)] Set(Setting), /// Generate JSON schema for Dioxus.toml configuration. /// Useful for IDE autocomplete and validation. Schema { /// Output file path. If not provided, prints to stdout. #[clap(long, short)] out: Option, }, } #[derive(Debug, Clone, Copy, Deserialize, Subcommand)] pub(crate) enum Setting { /// Set the value of the always-hot-reload setting. AlwaysHotReload { value: BoolValue }, /// Set the value of the always-open-browser setting. AlwaysOpenBrowser { value: BoolValue }, /// Set the value of the always-on-top desktop setting. AlwaysOnTop { value: BoolValue }, /// Set the interval that file changes are polled on WSL for hot reloading. WSLFilePollInterval { value: u16 }, /// Disable the built-in telemetry for the CLI DisableTelemetry { value: BoolValue }, } impl Display for Setting { fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { match self { Self::AlwaysHotReload { value: _ } => write!(f, "always-hot-reload"), Self::AlwaysOpenBrowser { value: _ } => write!(f, "always-open-browser"), Self::AlwaysOnTop { value: _ } => write!(f, "always-on-top"), Self::WSLFilePollInterval { value: _ } => write!(f, "wsl-file-poll-interval"), Self::DisableTelemetry { value: _ } => write!(f, "disable-telemetry"), } } } // Clap complains if we use a bool directly and I can't find much info about it. // "Argument 'value` is positional and it must take a value but action is SetTrue" #[derive(Debug, Clone, Copy, serde::Serialize, Deserialize, clap::ValueEnum)] pub(crate) enum BoolValue { True, False, } impl From for bool { fn from(value: BoolValue) -> Self { match value { BoolValue::True => true, BoolValue::False => false, } } } impl Config { pub(crate) async fn config(self) -> Result { let crate_root = Workspace::crate_root_from_path()?; match self { Config::Init { name, force } => { let conf_path = crate_root.join("Dioxus.toml"); if conf_path.is_file() && !force { tracing::warn!( "config file `Dioxus.toml` already exist, use `--force` to overwrite it." ); return Ok(StructuredOutput::Success); } let mut file = File::create(conf_path)?; let content = String::from(include_str!("../../assets/dioxus.toml")) .replace("{{project-name}}", &name); file.write_all(content.as_bytes())?; tracing::info!(dx_src = ?TraceSrc::Dev, "🚩 Init config file completed."); } Config::FormatPrint {} => { let workspace = Workspace::current().await?; tracing::info!("{:#?}", workspace.settings); } Config::CustomHtml {} => { let html_path = crate_root.join("index.html"); let mut file = File::create(html_path)?; let content = include_str!("../../assets/web/dev.index.html"); file.write_all(content.as_bytes())?; tracing::info!(dx_src = ?TraceSrc::Dev, "🚩 Create custom html file done."); } // Handle CLI settings. Config::Set(setting) => { CliSettings::modify_settings(|settings| match setting { Setting::AlwaysOnTop { value } => settings.always_on_top = Some(value.into()), Setting::AlwaysHotReload { value } => { settings.always_hot_reload = Some(value.into()) } Setting::AlwaysOpenBrowser { value } => { settings.always_open_browser = Some(value.into()) } Setting::WSLFilePollInterval { value } => { settings.wsl_file_poll_interval = Some(value) } Setting::DisableTelemetry { value } => { settings.disable_telemetry = Some(value.into()); } })?; tracing::info!(dx_src = ?TraceSrc::Dev, "🚩 CLI setting `{setting}` has been set."); } Config::Schema { out } => { let schema = crate::config::generate_manifest_schema(); let json = serde_json::to_string_pretty(&schema)?; match out { Some(path) => { std::fs::write(&path, format!("{json}\n"))?; tracing::info!(dx_src = ?TraceSrc::Dev, "Schema written to {}", path.display()); } None => println!("{json}"), } } } Ok(StructuredOutput::Success) } } ================================================ FILE: packages/cli/src/cli/create.rs ================================================ use super::*; use crate::TraceSrc; use anyhow::{bail, Context}; use cargo_generate::{GenerateArgs, TemplatePath, Vcs}; use std::{fs, path::Path}; pub(crate) static DEFAULT_TEMPLATE: &str = "gh:dioxuslabs/dioxus-template"; #[derive(Clone, Debug, Default, Deserialize, Parser)] #[clap(name = "new")] pub struct Create { /// Create a new Dioxus project at PATH pub path: PathBuf, /// Project name. Defaults to directory name #[arg(short, long)] pub name: Option, /// Template path #[clap(short, long)] pub template: Option, /// Branch to select when using `template` from a git repository. /// Mutually exclusive with: `--revision`, `--tag`. #[clap(long, conflicts_with_all(["revision", "tag"]))] pub branch: Option, /// A commit hash to select when using `template` from a git repository. /// Mutually exclusive with: `--branch`, `--tag`. #[clap(long, conflicts_with_all(["branch", "tag"]))] pub revision: Option, /// Tag to select when using `template` from a git repository. /// Mutually exclusive with: `--branch`, `--revision`. #[clap(long, conflicts_with_all(["branch", "revision"]))] pub tag: Option, /// Specify a sub-template within the template repository to be used as the actual template #[clap(long)] pub subtemplate: Option, /// Pass `