Full Code of dani-garcia/vaultwarden for AI

main 2b3736802d5c cached
513 files
2.7 MB
729.1k tokens
2375 symbols
1 requests
Download .txt
Showing preview only (2,901K chars total). Download the full file or copy to clipboard to get everything.
Repository: dani-garcia/vaultwarden
Branch: main
Commit: 2b3736802d5c
Files: 513
Total size: 2.7 MB

Directory structure:
gitextract_kb30epk3/

├── .dockerignore
├── .editorconfig
├── .gitattributes
├── .github/
│   ├── CODEOWNERS
│   ├── FUNDING.yml
│   ├── ISSUE_TEMPLATE/
│   │   ├── bug_report.yml
│   │   └── config.yml
│   └── workflows/
│       ├── build.yml
│       ├── check-templates.yml
│       ├── hadolint.yml
│       ├── release.yml
│       ├── releasecache-cleanup.yml
│       ├── trivy.yml
│       ├── typos.yml
│       └── zizmor.yml
├── .gitignore
├── .hadolint.yaml
├── .pre-commit-config.yaml
├── .typos.toml
├── Cargo.toml
├── LICENSE.txt
├── README.md
├── SECURITY.md
├── build.rs
├── diesel.toml
├── docker/
│   ├── DockerSettings.yaml
│   ├── Dockerfile.alpine
│   ├── Dockerfile.debian
│   ├── Dockerfile.j2
│   ├── Makefile
│   ├── README.md
│   ├── bake.sh
│   ├── bake_env.sh
│   ├── docker-bake.hcl
│   ├── healthcheck.sh
│   ├── podman-bake.sh
│   ├── render_template
│   └── start.sh
├── macros/
│   ├── Cargo.toml
│   └── src/
│       └── lib.rs
├── migrations/
│   ├── mysql/
│   │   ├── 2018-01-14-171611_create_tables/
│   │   │   ├── down.sql
│   │   │   └── up.sql
│   │   ├── 2018-02-17-205753_create_collections_and_orgs/
│   │   │   ├── down.sql
│   │   │   └── up.sql
│   │   ├── 2018-04-27-155151_create_users_ciphers/
│   │   │   ├── down.sql
│   │   │   └── up.sql
│   │   ├── 2018-05-08-161616_create_collection_cipher_map/
│   │   │   ├── down.sql
│   │   │   └── up.sql
│   │   ├── 2018-05-25-232323_update_attachments_reference/
│   │   │   ├── down.sql
│   │   │   └── up.sql
│   │   ├── 2018-06-01-112529_update_devices_twofactor_remember/
│   │   │   ├── down.sql
│   │   │   └── up.sql
│   │   ├── 2018-07-11-181453_create_u2f_twofactor/
│   │   │   ├── down.sql
│   │   │   └── up.sql
│   │   ├── 2018-08-27-172114_update_ciphers/
│   │   │   ├── down.sql
│   │   │   └── up.sql
│   │   ├── 2018-09-10-111213_add_invites/
│   │   │   ├── down.sql
│   │   │   └── up.sql
│   │   ├── 2018-09-19-144557_add_kdf_columns/
│   │   │   ├── down.sql
│   │   │   └── up.sql
│   │   ├── 2018-11-27-152651_add_att_key_columns/
│   │   │   ├── down.sql
│   │   │   └── up.sql
│   │   ├── 2019-05-26-216651_rename_key_and_type_columns/
│   │   │   ├── down.sql
│   │   │   └── up.sql
│   │   ├── 2019-10-10-083032_add_column_to_twofactor/
│   │   │   ├── down.sql
│   │   │   └── up.sql
│   │   ├── 2019-11-17-011009_add_email_verification/
│   │   │   ├── down.sql
│   │   │   └── up.sql
│   │   ├── 2020-03-13-205045_add_policy_table/
│   │   │   ├── down.sql
│   │   │   └── up.sql
│   │   ├── 2020-04-09-235005_add_cipher_delete_date/
│   │   │   ├── down.sql
│   │   │   └── up.sql
│   │   ├── 2020-07-01-214531_add_hide_passwords/
│   │   │   ├── down.sql
│   │   │   └── up.sql
│   │   ├── 2020-08-02-025025_add_favorites_table/
│   │   │   ├── down.sql
│   │   │   └── up.sql
│   │   ├── 2020-11-30-224000_add_user_enabled/
│   │   │   ├── down.sql
│   │   │   └── up.sql
│   │   ├── 2020-12-09-173101_add_stamp_exception/
│   │   │   ├── down.sql
│   │   │   └── up.sql
│   │   ├── 2021-03-11-190243_add_sends/
│   │   │   ├── down.sql
│   │   │   └── up.sql
│   │   ├── 2021-04-30-233251_add_reprompt/
│   │   │   ├── down.sql
│   │   │   └── up.sql
│   │   ├── 2021-05-11-205202_add_hide_email/
│   │   │   ├── down.sql
│   │   │   └── up.sql
│   │   ├── 2021-07-01-203140_add_password_reset_keys/
│   │   │   ├── down.sql
│   │   │   └── up.sql
│   │   ├── 2021-08-30-193501_create_emergency_access/
│   │   │   ├── down.sql
│   │   │   └── up.sql
│   │   ├── 2021-10-24-164321_add_2fa_incomplete/
│   │   │   ├── down.sql
│   │   │   └── up.sql
│   │   ├── 2022-01-17-234911_add_api_key/
│   │   │   ├── down.sql
│   │   │   └── up.sql
│   │   ├── 2022-03-02-210038_update_devices_primary_key/
│   │   │   ├── down.sql
│   │   │   └── up.sql
│   │   ├── 2022-07-27-110000_add_group_support/
│   │   │   ├── down.sql
│   │   │   └── up.sql
│   │   ├── 2022-10-18-170602_add_events/
│   │   │   ├── down.sql
│   │   │   └── up.sql
│   │   ├── 2023-01-06-151600_add_reset_password_support/
│   │   │   ├── down.sql
│   │   │   └── up.sql
│   │   ├── 2023-01-11-205851_add_avatar_color/
│   │   │   ├── down.sql
│   │   │   └── up.sql
│   │   ├── 2023-01-31-222222_add_argon2/
│   │   │   ├── down.sql
│   │   │   └── up.sql
│   │   ├── 2023-02-18-125735_push_uuid_table/
│   │   │   ├── down.sql
│   │   │   └── up.sql
│   │   ├── 2023-06-02-200424_create_organization_api_key/
│   │   │   ├── down.sql
│   │   │   └── up.sql
│   │   ├── 2023-06-17-200424_create_auth_requests_table/
│   │   │   ├── down.sql
│   │   │   └── up.sql
│   │   ├── 2023-06-28-133700_add_collection_external_id/
│   │   │   ├── down.sql
│   │   │   └── up.sql
│   │   ├── 2023-09-01-170620_update_auth_request_table/
│   │   │   ├── down.sql
│   │   │   └── up.sql
│   │   ├── 2023-09-02-212336_move_user_external_id/
│   │   │   ├── down.sql
│   │   │   └── up.sql
│   │   ├── 2023-09-10-133000_add_sso/
│   │   │   ├── down.sql
│   │   │   └── up.sql
│   │   ├── 2023-09-14-133000_add_users_organizations_invited_by_email/
│   │   │   ├── down.sql
│   │   │   └── up.sql
│   │   ├── 2023-10-21-221242_add_cipher_key/
│   │   │   ├── down.sql
│   │   │   └── up.sql
│   │   ├── 2024-01-12-210182_change_attachment_size/
│   │   │   ├── down.sql
│   │   │   └── up.sql
│   │   ├── 2024-02-14-135828_change_time_stamp_data_type/
│   │   │   ├── down.sql
│   │   │   └── up.sql
│   │   ├── 2024-02-14-170000_add_state_to_sso_nonce/
│   │   │   ├── down.sql
│   │   │   └── up.sql
│   │   ├── 2024-02-26-170000_add_pkce_to_sso_nonce/
│   │   │   ├── down.sql
│   │   │   └── up.sql
│   │   ├── 2024-03-06-170000_add_sso_users/
│   │   │   ├── down.sql
│   │   │   └── up.sql
│   │   ├── 2024-03-13-170000_sso_users_cascade/
│   │   │   ├── down.sql
│   │   │   └── up.sql
│   │   ├── 2024-06-05-131359_add_2fa_duo_store/
│   │   │   ├── down.sql
│   │   │   └── up.sql
│   │   ├── 2024-09-04-091351_use_device_type_for_mails/
│   │   │   ├── down.sql
│   │   │   └── up.sql
│   │   ├── 2025-01-09-172300_add_manage/
│   │   │   ├── down.sql
│   │   │   └── up.sql
│   │   └── 2025-08-20-120000_sso_nonce_to_auth/
│   │       ├── down.sql
│   │       └── up.sql
│   ├── postgresql/
│   │   ├── 2019-09-12-100000_create_tables/
│   │   │   ├── down.sql
│   │   │   └── up.sql
│   │   ├── 2019-09-16-150000_fix_attachments/
│   │   │   ├── down.sql
│   │   │   └── up.sql
│   │   ├── 2019-10-10-083032_add_column_to_twofactor/
│   │   │   ├── down.sql
│   │   │   └── up.sql
│   │   ├── 2019-11-17-011009_add_email_verification/
│   │   │   ├── down.sql
│   │   │   └── up.sql
│   │   ├── 2020-03-13-205045_add_policy_table/
│   │   │   ├── down.sql
│   │   │   └── up.sql
│   │   ├── 2020-04-09-235005_add_cipher_delete_date/
│   │   │   ├── down.sql
│   │   │   └── up.sql
│   │   ├── 2020-07-01-214531_add_hide_passwords/
│   │   │   ├── down.sql
│   │   │   └── up.sql
│   │   ├── 2020-08-02-025025_add_favorites_table/
│   │   │   ├── down.sql
│   │   │   └── up.sql
│   │   ├── 2020-11-30-224000_add_user_enabled/
│   │   │   ├── down.sql
│   │   │   └── up.sql
│   │   ├── 2020-12-09-173101_add_stamp_exception/
│   │   │   ├── down.sql
│   │   │   └── up.sql
│   │   ├── 2021-03-11-190243_add_sends/
│   │   │   ├── down.sql
│   │   │   └── up.sql
│   │   ├── 2021-03-15-163412_rename_send_key/
│   │   │   ├── down.sql
│   │   │   └── up.sql
│   │   ├── 2021-04-30-233251_add_reprompt/
│   │   │   ├── down.sql
│   │   │   └── up.sql
│   │   ├── 2021-05-11-205202_add_hide_email/
│   │   │   ├── down.sql
│   │   │   └── up.sql
│   │   ├── 2021-07-01-203140_add_password_reset_keys/
│   │   │   ├── down.sql
│   │   │   └── up.sql
│   │   ├── 2021-08-30-193501_create_emergency_access/
│   │   │   ├── down.sql
│   │   │   └── up.sql
│   │   ├── 2021-10-24-164321_add_2fa_incomplete/
│   │   │   ├── down.sql
│   │   │   └── up.sql
│   │   ├── 2022-01-17-234911_add_api_key/
│   │   │   ├── down.sql
│   │   │   └── up.sql
│   │   ├── 2022-03-02-210038_update_devices_primary_key/
│   │   │   ├── down.sql
│   │   │   └── up.sql
│   │   ├── 2022-07-27-110000_add_group_support/
│   │   │   ├── down.sql
│   │   │   └── up.sql
│   │   ├── 2022-10-18-170602_add_events/
│   │   │   ├── down.sql
│   │   │   └── up.sql
│   │   ├── 2023-01-06-151600_add_reset_password_support/
│   │   │   ├── down.sql
│   │   │   └── up.sql
│   │   ├── 2023-01-11-205851_add_avatar_color/
│   │   │   ├── down.sql
│   │   │   └── up.sql
│   │   ├── 2023-01-31-222222_add_argon2/
│   │   │   ├── down.sql
│   │   │   └── up.sql
│   │   ├── 2023-02-18-125735_push_uuid_table/
│   │   │   ├── down.sql
│   │   │   └── up.sql
│   │   ├── 2023-06-02-200424_create_organization_api_key/
│   │   │   ├── down.sql
│   │   │   └── up.sql
│   │   ├── 2023-06-17-200424_create_auth_requests_table/
│   │   │   ├── down.sql
│   │   │   └── up.sql
│   │   ├── 2023-06-28-133700_add_collection_external_id/
│   │   │   ├── down.sql
│   │   │   └── up.sql
│   │   ├── 2023-09-01-170620_update_auth_request_table/
│   │   │   ├── down.sql
│   │   │   └── up.sql
│   │   ├── 2023-09-02-212336_move_user_external_id/
│   │   │   ├── down.sql
│   │   │   └── up.sql
│   │   ├── 2023-09-10-133000_add_sso/
│   │   │   ├── down.sql
│   │   │   └── up.sql
│   │   ├── 2023-09-14-133000_add_users_organizations_invited_by_email/
│   │   │   ├── down.sql
│   │   │   └── up.sql
│   │   ├── 2023-10-21-221242_add_cipher_key/
│   │   │   ├── down.sql
│   │   │   └── up.sql
│   │   ├── 2024-01-12-210182_change_attachment_size/
│   │   │   ├── down.sql
│   │   │   └── up.sql
│   │   ├── 2024-02-14-135953_change_time_stamp_data_type/
│   │   │   ├── down.sql
│   │   │   └── up.sql
│   │   ├── 2024-02-14-170000_add_state_to_sso_nonce/
│   │   │   ├── down.sql
│   │   │   └── up.sql
│   │   ├── 2024-02-26-170000_add_pkce_to_sso_nonce/
│   │   │   ├── down.sql
│   │   │   └── up.sql
│   │   ├── 2024-03-06-170000_add_sso_users/
│   │   │   ├── down.sql
│   │   │   └── up.sql
│   │   ├── 2024-03-13-170000_sso_users_cascade/
│   │   │   ├── down.sql
│   │   │   └── up.sql
│   │   ├── 2024-06-05-131359_add_2fa_duo_store/
│   │   │   ├── down.sql
│   │   │   └── up.sql
│   │   ├── 2024-09-04-091351_use_device_type_for_mails/
│   │   │   ├── down.sql
│   │   │   └── up.sql
│   │   ├── 2025-01-09-172300_add_manage/
│   │   │   ├── down.sql
│   │   │   └── up.sql
│   │   └── 2025-08-20-120000_sso_nonce_to_auth/
│   │       ├── down.sql
│   │       └── up.sql
│   └── sqlite/
│       ├── 2018-01-14-171611_create_tables/
│       │   ├── down.sql
│       │   └── up.sql
│       ├── 2018-02-17-205753_create_collections_and_orgs/
│       │   ├── down.sql
│       │   └── up.sql
│       ├── 2018-04-27-155151_create_users_ciphers/
│       │   ├── down.sql
│       │   └── up.sql
│       ├── 2018-05-08-161616_create_collection_cipher_map/
│       │   ├── down.sql
│       │   └── up.sql
│       ├── 2018-05-25-232323_update_attachments_reference/
│       │   ├── down.sql
│       │   └── up.sql
│       ├── 2018-06-01-112529_update_devices_twofactor_remember/
│       │   ├── down.sql
│       │   └── up.sql
│       ├── 2018-07-11-181453_create_u2f_twofactor/
│       │   ├── down.sql
│       │   └── up.sql
│       ├── 2018-08-27-172114_update_ciphers/
│       │   ├── down.sql
│       │   └── up.sql
│       ├── 2018-09-10-111213_add_invites/
│       │   ├── down.sql
│       │   └── up.sql
│       ├── 2018-09-19-144557_add_kdf_columns/
│       │   ├── down.sql
│       │   └── up.sql
│       ├── 2018-11-27-152651_add_att_key_columns/
│       │   ├── down.sql
│       │   └── up.sql
│       ├── 2019-05-26-216651_rename_key_and_type_columns/
│       │   ├── down.sql
│       │   └── up.sql
│       ├── 2019-10-10-083032_add_column_to_twofactor/
│       │   ├── down.sql
│       │   └── up.sql
│       ├── 2019-11-17-011009_add_email_verification/
│       │   ├── down.sql
│       │   └── up.sql
│       ├── 2020-03-13-205045_add_policy_table/
│       │   ├── down.sql
│       │   └── up.sql
│       ├── 2020-04-09-235005_add_cipher_delete_date/
│       │   ├── down.sql
│       │   └── up.sql
│       ├── 2020-07-01-214531_add_hide_passwords/
│       │   ├── down.sql
│       │   └── up.sql
│       ├── 2020-08-02-025025_add_favorites_table/
│       │   ├── down.sql
│       │   └── up.sql
│       ├── 2020-11-30-224000_add_user_enabled/
│       │   ├── down.sql
│       │   └── up.sql
│       ├── 2020-12-09-173101_add_stamp_exception/
│       │   ├── down.sql
│       │   └── up.sql
│       ├── 2021-03-11-190243_add_sends/
│       │   ├── down.sql
│       │   └── up.sql
│       ├── 2021-03-15-163412_rename_send_key/
│       │   ├── down.sql
│       │   └── up.sql
│       ├── 2021-04-30-233251_add_reprompt/
│       │   ├── down.sql
│       │   └── up.sql
│       ├── 2021-05-11-205202_add_hide_email/
│       │   ├── down.sql
│       │   └── up.sql
│       ├── 2021-07-01-203140_add_password_reset_keys/
│       │   ├── down.sql
│       │   └── up.sql
│       ├── 2021-08-30-193501_create_emergency_access/
│       │   ├── down.sql
│       │   └── up.sql
│       ├── 2021-10-24-164321_add_2fa_incomplete/
│       │   ├── down.sql
│       │   └── up.sql
│       ├── 2022-01-17-234911_add_api_key/
│       │   ├── down.sql
│       │   └── up.sql
│       ├── 2022-03-02-210038_update_devices_primary_key/
│       │   ├── down.sql
│       │   └── up.sql
│       ├── 2022-07-27-110000_add_group_support/
│       │   ├── down.sql
│       │   └── up.sql
│       ├── 2022-10-18-170602_add_events/
│       │   ├── down.sql
│       │   └── up.sql
│       ├── 2023-01-06-151600_add_reset_password_support/
│       │   ├── down.sql
│       │   └── up.sql
│       ├── 2023-01-11-205851_add_avatar_color/
│       │   ├── down.sql
│       │   └── up.sql
│       ├── 2023-01-31-222222_add_argon2/
│       │   ├── down.sql
│       │   └── up.sql
│       ├── 2023-02-18-125735_push_uuid_table/
│       │   ├── down.sql
│       │   └── up.sql
│       ├── 2023-06-02-200424_create_organization_api_key/
│       │   ├── down.sql
│       │   └── up.sql
│       ├── 2023-06-17-200424_create_auth_requests_table/
│       │   ├── down.sql
│       │   └── up.sql
│       ├── 2023-06-28-133700_add_collection_external_id/
│       │   ├── down.sql
│       │   └── up.sql
│       ├── 2023-09-01-170620_update_auth_request_table/
│       │   ├── down.sql
│       │   └── up.sql
│       ├── 2023-09-02-212336_move_user_external_id/
│       │   ├── down.sql
│       │   └── up.sql
│       ├── 2023-09-10-133000_add_sso/
│       │   ├── down.sql
│       │   └── up.sql
│       ├── 2023-09-14-133000_add_users_organizations_invited_by_email/
│       │   ├── down.sql
│       │   └── up.sql
│       ├── 2023-10-21-221242_add_cipher_key/
│       │   ├── down.sql
│       │   └── up.sql
│       ├── 2024-01-12-210182_change_attachment_size/
│       │   ├── down.sql
│       │   └── up.sql
│       ├── 2024-02-14-140000_change_time_stamp_data_type/
│       │   ├── down.sql
│       │   └── up.sql
│       ├── 2024-02-14-170000_add_state_to_sso_nonce/
│       │   ├── down.sql
│       │   └── up.sql
│       ├── 2024-02-26-170000_add_pkce_to_sso_nonce/
│       │   ├── down.sql
│       │   └── up.sql
│       ├── 2024-03-06-170000_add_sso_users/
│       │   ├── down.sql
│       │   └── up.sql
│       ├── 2024-03-13_170000_sso_userscascade/
│       │   ├── down.sql
│       │   └── up.sql
│       ├── 2024-06-05-131359_add_2fa_duo_store/
│       │   ├── down.sql
│       │   └── up.sql
│       ├── 2024-09-04-091351_use_device_type_for_mails/
│       │   ├── down.sql
│       │   └── up.sql
│       ├── 2025-01-09-172300_add_manage/
│       │   ├── down.sql
│       │   └── up.sql
│       └── 2025-08-20-120000_sso_nonce_to_auth/
│           ├── down.sql
│           └── up.sql
├── playwright/
│   ├── .gitignore
│   ├── README.md
│   ├── compose/
│   │   ├── keycloak/
│   │   │   ├── Dockerfile
│   │   │   └── setup.sh
│   │   ├── playwright/
│   │   │   └── Dockerfile
│   │   └── warden/
│   │       ├── Dockerfile
│   │       └── build.sh
│   ├── docker-compose.yml
│   ├── global-setup.ts
│   ├── global-utils.ts
│   ├── package.json
│   ├── playwright.config.ts
│   ├── test.env
│   └── tests/
│       ├── collection.spec.ts
│       ├── login.smtp.spec.ts
│       ├── login.spec.ts
│       ├── organization.smtp.spec.ts
│       ├── organization.spec.ts
│       ├── setups/
│       │   ├── 2fa.ts
│       │   ├── db-setup.ts
│       │   ├── db-teardown.ts
│       │   ├── db-test.ts
│       │   ├── orgs.ts
│       │   ├── sso-setup.ts
│       │   ├── sso-teardown.ts
│       │   ├── sso.ts
│       │   └── user.ts
│       ├── sso_login.smtp.spec.ts
│       ├── sso_login.spec.ts
│       ├── sso_organization.smtp.spec.ts
│       └── sso_organization.spec.ts
├── rust-toolchain.toml
├── rustfmt.toml
├── src/
│   ├── api/
│   │   ├── admin.rs
│   │   ├── core/
│   │   │   ├── accounts.rs
│   │   │   ├── ciphers.rs
│   │   │   ├── emergency_access.rs
│   │   │   ├── events.rs
│   │   │   ├── folders.rs
│   │   │   ├── mod.rs
│   │   │   ├── organizations.rs
│   │   │   ├── public.rs
│   │   │   ├── sends.rs
│   │   │   └── two_factor/
│   │   │       ├── authenticator.rs
│   │   │       ├── duo.rs
│   │   │       ├── duo_oidc.rs
│   │   │       ├── email.rs
│   │   │       ├── mod.rs
│   │   │       ├── protected_actions.rs
│   │   │       ├── webauthn.rs
│   │   │       └── yubikey.rs
│   │   ├── icons.rs
│   │   ├── identity.rs
│   │   ├── mod.rs
│   │   ├── notifications.rs
│   │   ├── push.rs
│   │   └── web.rs
│   ├── auth.rs
│   ├── config.rs
│   ├── crypto.rs
│   ├── db/
│   │   ├── mod.rs
│   │   ├── models/
│   │   │   ├── attachment.rs
│   │   │   ├── auth_request.rs
│   │   │   ├── cipher.rs
│   │   │   ├── collection.rs
│   │   │   ├── device.rs
│   │   │   ├── emergency_access.rs
│   │   │   ├── event.rs
│   │   │   ├── favorite.rs
│   │   │   ├── folder.rs
│   │   │   ├── group.rs
│   │   │   ├── mod.rs
│   │   │   ├── org_policy.rs
│   │   │   ├── organization.rs
│   │   │   ├── send.rs
│   │   │   ├── sso_auth.rs
│   │   │   ├── two_factor.rs
│   │   │   ├── two_factor_duo_context.rs
│   │   │   ├── two_factor_incomplete.rs
│   │   │   └── user.rs
│   │   ├── query_logger.rs
│   │   └── schema.rs
│   ├── error.rs
│   ├── http_client.rs
│   ├── mail.rs
│   ├── main.rs
│   ├── ratelimit.rs
│   ├── sso.rs
│   ├── sso_client.rs
│   ├── static/
│   │   ├── global_domains.json
│   │   ├── scripts/
│   │   │   ├── 404.css
│   │   │   ├── admin.css
│   │   │   ├── admin.js
│   │   │   ├── admin_diagnostics.js
│   │   │   ├── admin_organizations.js
│   │   │   ├── admin_settings.js
│   │   │   ├── admin_users.js
│   │   │   ├── bootstrap.bundle.js
│   │   │   ├── bootstrap.css
│   │   │   ├── datatables.css
│   │   │   ├── datatables.js
│   │   │   ├── jdenticon-3.3.0.js
│   │   │   └── jquery-4.0.0.slim.js
│   │   └── templates/
│   │       ├── 404.hbs
│   │       ├── admin/
│   │       │   ├── base.hbs
│   │       │   ├── diagnostics.hbs
│   │       │   ├── login.hbs
│   │       │   ├── organizations.hbs
│   │       │   ├── settings.hbs
│   │       │   └── users.hbs
│   │       ├── email/
│   │       │   ├── admin_reset_password.hbs
│   │       │   ├── admin_reset_password.html.hbs
│   │       │   ├── change_email.hbs
│   │       │   ├── change_email.html.hbs
│   │       │   ├── change_email_existing.hbs
│   │       │   ├── change_email_existing.html.hbs
│   │       │   ├── change_email_invited.hbs
│   │       │   ├── change_email_invited.html.hbs
│   │       │   ├── delete_account.hbs
│   │       │   ├── delete_account.html.hbs
│   │       │   ├── email_footer.hbs
│   │       │   ├── email_footer_text.hbs
│   │       │   ├── email_header.hbs
│   │       │   ├── emergency_access_invite_accepted.hbs
│   │       │   ├── emergency_access_invite_accepted.html.hbs
│   │       │   ├── emergency_access_invite_confirmed.hbs
│   │       │   ├── emergency_access_invite_confirmed.html.hbs
│   │       │   ├── emergency_access_recovery_approved.hbs
│   │       │   ├── emergency_access_recovery_approved.html.hbs
│   │       │   ├── emergency_access_recovery_initiated.hbs
│   │       │   ├── emergency_access_recovery_initiated.html.hbs
│   │       │   ├── emergency_access_recovery_rejected.hbs
│   │       │   ├── emergency_access_recovery_rejected.html.hbs
│   │       │   ├── emergency_access_recovery_reminder.hbs
│   │       │   ├── emergency_access_recovery_reminder.html.hbs
│   │       │   ├── emergency_access_recovery_timed_out.hbs
│   │       │   ├── emergency_access_recovery_timed_out.html.hbs
│   │       │   ├── incomplete_2fa_login.hbs
│   │       │   ├── incomplete_2fa_login.html.hbs
│   │       │   ├── invite_accepted.hbs
│   │       │   ├── invite_accepted.html.hbs
│   │       │   ├── invite_confirmed.hbs
│   │       │   ├── invite_confirmed.html.hbs
│   │       │   ├── new_device_logged_in.hbs
│   │       │   ├── new_device_logged_in.html.hbs
│   │       │   ├── protected_action.hbs
│   │       │   ├── protected_action.html.hbs
│   │       │   ├── pw_hint_none.hbs
│   │       │   ├── pw_hint_none.html.hbs
│   │       │   ├── pw_hint_some.hbs
│   │       │   ├── pw_hint_some.html.hbs
│   │       │   ├── register_verify_email.hbs
│   │       │   ├── register_verify_email.html.hbs
│   │       │   ├── send_2fa_removed_from_org.hbs
│   │       │   ├── send_2fa_removed_from_org.html.hbs
│   │       │   ├── send_emergency_access_invite.hbs
│   │       │   ├── send_emergency_access_invite.html.hbs
│   │       │   ├── send_org_invite.hbs
│   │       │   ├── send_org_invite.html.hbs
│   │       │   ├── send_single_org_removed_from_org.hbs
│   │       │   ├── send_single_org_removed_from_org.html.hbs
│   │       │   ├── smtp_test.hbs
│   │       │   ├── smtp_test.html.hbs
│   │       │   ├── sso_change_email.hbs
│   │       │   ├── sso_change_email.html.hbs
│   │       │   ├── twofactor_email.hbs
│   │       │   ├── twofactor_email.html.hbs
│   │       │   ├── verify_email.hbs
│   │       │   ├── verify_email.html.hbs
│   │       │   ├── welcome.hbs
│   │       │   ├── welcome.html.hbs
│   │       │   ├── welcome_must_verify.hbs
│   │       │   └── welcome_must_verify.html.hbs
│   │       └── scss/
│   │           ├── user.vaultwarden.scss.hbs
│   │           └── vaultwarden.scss.hbs
│   └── util.rs
└── tools/
    └── global_domains.py

================================================
FILE CONTENTS
================================================

================================================
FILE: .dockerignore
================================================
// Ignore everything
*

// Allow what is needed
!.git
!docker/healthcheck.sh
!docker/start.sh
!macros
!migrations
!src

!build.rs
!Cargo.lock
!Cargo.toml
!rustfmt.toml
!rust-toolchain.toml


================================================
FILE: .editorconfig
================================================
# EditorConfig is awesome: https://EditorConfig.org

# top-most EditorConfig file
root = true

[*]
end_of_line = lf
charset = utf-8

[*.{rs,py}]
indent_style = space
indent_size = 4
trim_trailing_whitespace = true
insert_final_newline = true

[*.{yml,yaml}]
indent_style = space
indent_size = 2
trim_trailing_whitespace = true
insert_final_newline = true

[Makefile]
indent_style = tab


================================================
FILE: .gitattributes
================================================
# Ignore vendored scripts in GitHub stats
src/static/scripts/* linguist-vendored



================================================
FILE: .github/CODEOWNERS
================================================
/.github @dani-garcia @BlackDex
/.github/** @dani-garcia @BlackDex
/.github/CODEOWNERS @dani-garcia @BlackDex
/.github/ISSUE_TEMPLATE/** @dani-garcia @BlackDex
/.github/workflows/** @dani-garcia @BlackDex
/SECURITY.md @dani-garcia @BlackDex


================================================
FILE: .github/FUNDING.yml
================================================
github: dani-garcia
liberapay: dani-garcia
custom: ["https://paypal.me/DaniGG"]


================================================
FILE: .github/ISSUE_TEMPLATE/bug_report.yml
================================================
name: Bug Report
description: File a bug report
labels: ["bug"]
body:
  #
  - type: markdown
    attributes:
      value: |
        Thanks for taking the time to fill out this bug report!

        Please **do not** submit feature requests or ask for help on how to configure Vaultwarden here!

        The [GitHub Discussions](https://github.com/dani-garcia/vaultwarden/discussions/) has sections for Questions and Ideas.

        Our [Wiki](https://github.com/dani-garcia/vaultwarden/wiki/) has topics on how to configure Vaultwarden.

        Also, make sure you are running [![GitHub Release](https://img.shields.io/github/release/dani-garcia/vaultwarden.svg)](https://github.com/dani-garcia/vaultwarden/releases/latest) of Vaultwarden!

        Be sure to check and validate the Vaultwarden Admin Diagnostics (`/admin/diagnostics`) page for any errors!
        See here [how to enable the admin page](https://github.com/dani-garcia/vaultwarden/wiki/Enabling-admin-page).

        > [!IMPORTANT]
        > ## :bangbang: Search for existing **Closed _AND_ Open** [Issues](https://github.com/dani-garcia/vaultwarden/issues?q=is%3Aissue%20) **_AND_** [Discussions](https://github.com/dani-garcia/vaultwarden/discussions?discussions_q=) regarding your topic before posting! :bangbang:
  #
  - type: checkboxes
    id: checklist
    attributes:
      label: Prerequisites
      description: Please confirm you have completed the following before submitting an issue!
      options:
        - label: I have searched the existing **Closed _AND_ Open** [Issues](https://github.com/dani-garcia/vaultwarden/issues?q=is%3Aissue%20) **_AND_** [Discussions](https://github.com/dani-garcia/vaultwarden/discussions?discussions_q=)
          required: true
        - label: I have searched and read the [documentation](https://github.com/dani-garcia/vaultwarden/wiki/)
          required: true
  #
  - id: support-string
    type: textarea
    attributes:
      label: Vaultwarden Support String
      description: Output of the **Generate Support String** from the `/admin/diagnostics` page.
      placeholder: |
        1. Go to the Vaultwarden Admin of your instance https://example.domain.tld/admin/diagnostics
        2. Click on `Generate Support String`
        3. Click on `Copy To Clipboard`
        4. Replace this text by pasting it into this textarea without any modifications
    validations:
      required: true
  #
  - id: version
    type: input
    attributes:
      label: Vaultwarden Build Version
      description: What version of Vaultwarden are you running?
      placeholder: ex. v1.34.0 or v1.34.1-53f58b14
    validations:
      required: true
  #
  - id: deployment
    type: dropdown
    attributes:
      label: Deployment method
      description: How did you deploy Vaultwarden?
      multiple: false
      options:
        - Official Container Image
        - Build from source
        - OS Package (apt, yum/dnf, pacman, apk, nix, ...)
        - Manually Extracted from Container Image
        - Downloaded from GitHub Actions Release Workflow
        - Other method
    validations:
      required: true
  #
  - id: deployment-other
    type: textarea
    attributes:
      label: Custom deployment method
      description: If you deployed Vaultwarden via any other method, please describe how.
  #
  - id: reverse-proxy
    type: input
    attributes:
      label: Reverse Proxy
      description: Are you using a reverse proxy, if so which and what version?
      placeholder: ex. nginx 1.29.0, caddy 2.10.0, traefik 3.4.4, haproxy 3.2
    validations:
      required: true
  #
  - id: os
    type: dropdown
    attributes:
      label: Host/Server Operating System
      description: On what operating system are you running the Vaultwarden server?
      multiple: false
      options:
        - Linux
        - NAS/SAN
        - Cloud
        - Windows
        - macOS
        - Other
    validations:
      required: true
  #
  - id: os-version
    type: input
    attributes:
      label: Operating System Version
      description: What version of the operating system(s) are you seeing the problem on?
      placeholder: ex. Arch Linux, Ubuntu 24.04, Kubernetes, Synology DSM 7.x, Windows 11
  #
  - id: clients
    type: dropdown
    attributes:
      label: Clients
      description: What client(s) are you seeing the problem on?
      multiple: true
      options:
        - Web Vault
        - Browser Extension
        - CLI
        - Desktop
        - Android
        - iOS
    validations:
      required: true
  #
  - id: client-version
    type: input
    attributes:
      label: Client Version
      description: What version(s) of the client(s) are you seeing the problem on?
      placeholder: ex. CLI v2025.7.0, Firefox 140 - v2025.6.1
  #
  - id: reproduce
    type: textarea
    attributes:
      label: Steps To Reproduce
      description: How can we reproduce the behavior.
      value: |
        1. Go to '...'
        2. Click on '....'
        3. Scroll down to '....'
        4. Click on '...'
        5. Etc '...'
    validations:
      required: true
  #
  - id: expected
    type: textarea
    attributes:
      label: Expected Result
      description: A clear and concise description of what you expected to happen.
    validations:
      required: true
  #
  - id: actual
    type: textarea
    attributes:
      label: Actual Result
      description: A clear and concise description of what is happening.
    validations:
      required: true
  #
  - id: logs
    type: textarea
    attributes:
      label: Logs
      description: Provide the logs generated by Vaultwarden during the time this issue occurs.
      render: text
  #
  - id: screenshots
    type: textarea
    attributes:
      label: Screenshots or Videos
      description: If applicable, add screenshots and/or a short video to help explain your problem.
  #
  - id: additional-context
    type: textarea
    attributes:
      label: Additional Context
      description: Add any other context about the problem here.


================================================
FILE: .github/ISSUE_TEMPLATE/config.yml
================================================
blank_issues_enabled: false
contact_links:
  - name: GitHub Discussions for Vaultwarden
    url: https://github.com/dani-garcia/vaultwarden/discussions
    about: Use the discussions to request features or get help with usage/configuration.
  - name: Discourse forum for Vaultwarden
    url: https://vaultwarden.discourse.group/
    about: An alternative to the GitHub Discussions, if this is easier for you.


================================================
FILE: .github/workflows/build.yml
================================================
name: Build
permissions: {}

concurrency:
  group: ${{ github.workflow }}-${{ github.event.pull_request.number || github.ref }}
  cancel-in-progress: true

on:
  push:
    paths:
      - ".github/workflows/build.yml"
      - "src/**"
      - "migrations/**"
      - "Cargo.*"
      - "build.rs"
      - "rust-toolchain.toml"
      - "rustfmt.toml"
      - "diesel.toml"
      - "docker/Dockerfile.j2"
      - "docker/DockerSettings.yaml"
      - "macros/**"

  pull_request:
    paths:
      - ".github/workflows/build.yml"
      - "src/**"
      - "migrations/**"
      - "Cargo.*"
      - "build.rs"
      - "rust-toolchain.toml"
      - "rustfmt.toml"
      - "diesel.toml"
      - "docker/Dockerfile.j2"
      - "docker/DockerSettings.yaml"
      - "macros/**"

defaults:
  run:
    shell: bash

jobs:
  build:
    name: Build and Test ${{ matrix.channel }}
    runs-on: ubuntu-24.04
    timeout-minutes: 120
    # Make warnings errors, this is to prevent warnings slipping through.
    # This is done globally to prevent rebuilds when the RUSTFLAGS env variable changes.
    env:
      RUSTFLAGS: "-Dwarnings"
    strategy:
      fail-fast: false
      matrix:
        channel:
          - "rust-toolchain" # The version defined in rust-toolchain
          - "msrv" # The supported MSRV

    steps:
      # Install dependencies
      - name: "Install dependencies Ubuntu"
        run: sudo apt-get update && sudo apt-get install -y --no-install-recommends openssl build-essential libmariadb-dev-compat libpq-dev libssl-dev pkg-config
      # End Install dependencies

      # Checkout the repo
      - name: "Checkout"
        uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6.0.2
        with:
          persist-credentials: false
          fetch-depth: 0
      # End Checkout the repo

      # Determine rust-toolchain version
      - name: Init Variables
        id: toolchain
        env:
          CHANNEL: ${{ matrix.channel }}
        run: |
          if [[ "${CHANNEL}" == 'rust-toolchain' ]]; then
            RUST_TOOLCHAIN="$(grep -m1 -oP 'channel.*"(\K.*?)(?=")' rust-toolchain.toml)"
          elif [[ "${CHANNEL}" == 'msrv' ]]; then
            RUST_TOOLCHAIN="$(grep -m1 -oP 'rust-version\s.*"(\K.*?)(?=")' Cargo.toml)"
          else
            RUST_TOOLCHAIN="${CHANNEL}"
          fi
          echo "RUST_TOOLCHAIN=${RUST_TOOLCHAIN}" | tee -a "${GITHUB_OUTPUT}"
      # End Determine rust-toolchain version


      # Only install the clippy and rustfmt components on the default rust-toolchain
      - name: "Install rust-toolchain version"
        uses: dtolnay/rust-toolchain@efa25f7f19611383d5b0ccf2d1c8914531636bf9 # master @ Feb 13, 2026, 3:46 AM GMT+1
        if: ${{ matrix.channel == 'rust-toolchain' }}
        with:
          toolchain: "${{steps.toolchain.outputs.RUST_TOOLCHAIN}}"
          components: clippy, rustfmt
      # End Uses the rust-toolchain file to determine version


      # Install the any other channel to be used for which we do not execute clippy and rustfmt
      - name: "Install MSRV version"
        uses: dtolnay/rust-toolchain@efa25f7f19611383d5b0ccf2d1c8914531636bf9 # master @ Feb 13, 2026, 3:46 AM GMT+1
        if: ${{ matrix.channel != 'rust-toolchain' }}
        with:
          toolchain: "${{steps.toolchain.outputs.RUST_TOOLCHAIN}}"
      # End Install the MSRV channel to be used

      # Set the current matrix toolchain version as default
      - name: "Set toolchain ${{steps.toolchain.outputs.RUST_TOOLCHAIN}} as default"
        env:
          RUST_TOOLCHAIN: ${{steps.toolchain.outputs.RUST_TOOLCHAIN}}
        run: |
          # Remove the rust-toolchain.toml
          rm rust-toolchain.toml
          # Set the default
          rustup default "${RUST_TOOLCHAIN}"

      # Show environment
      - name: "Show environment"
        run: |
          rustc -vV
          cargo -vV
      # End Show environment

      # Enable Rust Caching
      - name: Rust Caching
        uses: Swatinem/rust-cache@779680da715d629ac1d338a641029a2f4372abb5 # v2.8.2
        with:
          # Use a custom prefix-key to force a fresh start. This is sometimes needed with bigger changes.
          # Like changing the build host from Ubuntu 20.04 to 22.04 for example.
          # Only update when really needed! Use a <year>.<month>[.<inc>] format.
          prefix-key: "v2025.09-rust"
      # End Enable Rust Caching

      # Run cargo tests
      # First test all features together, afterwards test them separately.
      - name: "test features: sqlite,mysql,postgresql,enable_mimalloc,s3"
        id: test_sqlite_mysql_postgresql_mimalloc_s3
        if: ${{ !cancelled() }}
        run: |
          cargo test --profile ci --features sqlite,mysql,postgresql,enable_mimalloc,s3

      - name: "test features: sqlite,mysql,postgresql,enable_mimalloc"
        id: test_sqlite_mysql_postgresql_mimalloc
        if: ${{ !cancelled() }}
        run: |
          cargo test --profile ci --features sqlite,mysql,postgresql,enable_mimalloc

      - name: "test features: sqlite,mysql,postgresql"
        id: test_sqlite_mysql_postgresql
        if: ${{ !cancelled() }}
        run: |
          cargo test --profile ci --features sqlite,mysql,postgresql

      - name: "test features: sqlite"
        id: test_sqlite
        if: ${{ !cancelled() }}
        run: |
          cargo test --profile ci --features sqlite

      - name: "test features: mysql"
        id: test_mysql
        if: ${{ !cancelled() }}
        run: |
          cargo test --profile ci --features mysql

      - name: "test features: postgresql"
        id: test_postgresql
        if: ${{ !cancelled() }}
        run: |
          cargo test --profile ci --features postgresql
      # End Run cargo tests


      # Run cargo clippy, and fail on warnings
      - name: "clippy features: sqlite,mysql,postgresql,enable_mimalloc,s3"
        id: clippy
        if: ${{ !cancelled() && matrix.channel == 'rust-toolchain' }}
        run: |
          cargo clippy --profile ci --features sqlite,mysql,postgresql,enable_mimalloc,s3
      # End Run cargo clippy


      # Run cargo fmt (Only run on rust-toolchain defined version)
      - name: "check formatting"
        id: formatting
        if: ${{ !cancelled() && matrix.channel == 'rust-toolchain' }}
        run: |
          cargo fmt --all -- --check
      # End Run cargo fmt


      # Check for any previous failures, if there are stop, else continue.
      # This is useful so all test/clippy/fmt actions are done, and they can all be addressed
      - name: "Some checks failed"
        if: ${{ failure() }}
        env:
          TEST_DB_M_S3: ${{ steps.test_sqlite_mysql_postgresql_mimalloc_s3.outcome }}
          TEST_DB_M: ${{ steps.test_sqlite_mysql_postgresql_mimalloc.outcome }}
          TEST_DB: ${{ steps.test_sqlite_mysql_postgresql.outcome }}
          TEST_SQLITE: ${{ steps.test_sqlite.outcome }}
          TEST_MYSQL: ${{ steps.test_mysql.outcome }}
          TEST_POSTGRESQL: ${{ steps.test_postgresql.outcome }}
          CLIPPY: ${{ steps.clippy.outcome }}
          FMT: ${{ steps.formatting.outcome }}
        run: |
          echo "### :x: Checks Failed!" >> "${GITHUB_STEP_SUMMARY}"
          echo "" >> "${GITHUB_STEP_SUMMARY}"
          echo "|Job|Status|" >> "${GITHUB_STEP_SUMMARY}"
          echo "|---|------|" >> "${GITHUB_STEP_SUMMARY}"
          echo "|test (sqlite,mysql,postgresql,enable_mimalloc,s3)|${TEST_DB_M_S3}|" >> "${GITHUB_STEP_SUMMARY}"
          echo "|test (sqlite,mysql,postgresql,enable_mimalloc)|${TEST_DB_M}|" >> "${GITHUB_STEP_SUMMARY}"
          echo "|test (sqlite,mysql,postgresql)|${TEST_DB}|" >> "${GITHUB_STEP_SUMMARY}"
          echo "|test (sqlite)|${TEST_SQLITE}|" >> "${GITHUB_STEP_SUMMARY}"
          echo "|test (mysql)|${TEST_MYSQL}|" >> "${GITHUB_STEP_SUMMARY}"
          echo "|test (postgresql)|${TEST_POSTGRESQL}|" >> "${GITHUB_STEP_SUMMARY}"
          echo "|clippy (sqlite,mysql,postgresql,enable_mimalloc,s3)|${CLIPPY}|" >> "${GITHUB_STEP_SUMMARY}"
          echo "|fmt|${FMT}|" >> "${GITHUB_STEP_SUMMARY}"
          echo "" >> "${GITHUB_STEP_SUMMARY}"
          echo "Please check the failed jobs and fix where needed." >> "${GITHUB_STEP_SUMMARY}"
          echo "" >> "${GITHUB_STEP_SUMMARY}"
          exit 1


      # Check for any previous failures, if there are stop, else continue.
      # This is useful so all test/clippy/fmt actions are done, and they can all be addressed
      - name: "All checks passed"
        if: ${{ success() }}
        run: |
          echo "### :tada: Checks Passed!" >> "${GITHUB_STEP_SUMMARY}"
          echo "" >> "${GITHUB_STEP_SUMMARY}"


================================================
FILE: .github/workflows/check-templates.yml
================================================
name: Check templates
permissions: {}

concurrency:
  group: ${{ github.workflow }}-${{ github.event.pull_request.number || github.ref }}
  cancel-in-progress: true

on: [ push, pull_request ]

defaults:
  run:
    shell: bash

jobs:
  docker-templates:
    name: Validate docker templates
    runs-on: ubuntu-24.04
    timeout-minutes: 30

    steps:
      # Checkout the repo
      - name: "Checkout"
        uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6.0.2
        with:
          persist-credentials: false
      # End Checkout the repo

      - name: Run make to rebuild templates
        working-directory: docker
        run: make

      - name: Check for unstaged changes
        working-directory: docker
        run: git diff --exit-code
        continue-on-error: false


================================================
FILE: .github/workflows/hadolint.yml
================================================
name: Hadolint
permissions: {}

concurrency:
  group: ${{ github.workflow }}-${{ github.event.pull_request.number || github.ref }}
  cancel-in-progress: true

on: [ push, pull_request ]

defaults:
  run:
    shell: bash

jobs:
  hadolint:
    name: Validate Dockerfile syntax
    runs-on: ubuntu-24.04
    timeout-minutes: 30

    steps:
      # Start Docker Buildx
      - name: Setup Docker Buildx
        uses: docker/setup-buildx-action@4d04d5d9486b7bd6fa91e7baf45bbb4f8b9deedd # v4.0.0
        # https://github.com/moby/buildkit/issues/3969
        # Also set max parallelism to 2, the default of 4 breaks GitHub Actions and causes OOMKills
        with:
          buildkitd-config-inline: |
            [worker.oci]
              max-parallelism = 2
          driver-opts: |
            network=host

      # Download hadolint - https://github.com/hadolint/hadolint/releases
      - name: Download hadolint
        run: |
          sudo curl -L https://github.com/hadolint/hadolint/releases/download/v${HADOLINT_VERSION}/hadolint-$(uname -s)-$(uname -m) -o /usr/local/bin/hadolint && \
          sudo chmod +x /usr/local/bin/hadolint
        env:
          HADOLINT_VERSION: 2.14.0
      # End Download hadolint
      # Checkout the repo
      - name: Checkout
        uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6.0.2
        with:
          persist-credentials: false
      # End Checkout the repo

      # Test Dockerfiles with hadolint
      - name: Run hadolint
        run: hadolint docker/Dockerfile.{debian,alpine}
      # End Test Dockerfiles with hadolint

      # Test Dockerfiles with docker build checks
      - name: Run docker build check
        run: |
          echo "Checking docker/Dockerfile.debian"
          docker build --check . -f docker/Dockerfile.debian
          echo "Checking docker/Dockerfile.alpine"
          docker build --check . -f docker/Dockerfile.alpine
      # End Test Dockerfiles with docker build checks


================================================
FILE: .github/workflows/release.yml
================================================
name: Release
permissions: {}

concurrency:
  # Apply concurrency control only on the upstream repo
  group: ${{ github.repository == 'dani-garcia/vaultwarden' && format('{0}-{1}', github.workflow, github.ref) || github.run_id }}
  # Don't cancel other runs when creating a tag
  cancel-in-progress: ${{ github.ref_type == 'branch' }}

on:
  push:
    branches:
      - main

    tags:
      # https://docs.github.com/en/actions/writing-workflows/workflow-syntax-for-github-actions#filter-pattern-cheat-sheet
      - '[1-2].[0-9]+.[0-9]+'

defaults:
  run:
    shell: bash

env:
  # The *_REPO variables need to be configured as repository variables
  # Append `/settings/variables/actions` to your repo url
  # DOCKERHUB_REPO needs to be 'index.docker.io/<user>/<repo>'
  # Check for Docker hub credentials in secrets
  HAVE_DOCKERHUB_LOGIN: ${{ vars.DOCKERHUB_REPO != '' && secrets.DOCKERHUB_USERNAME != '' && secrets.DOCKERHUB_TOKEN != '' }}
  # GHCR_REPO needs to be 'ghcr.io/<user>/<repo>'
  # Check for Github credentials in secrets
  HAVE_GHCR_LOGIN: ${{ vars.GHCR_REPO != '' && github.repository_owner != '' && secrets.GITHUB_TOKEN != '' }}
  # QUAY_REPO needs to be 'quay.io/<user>/<repo>'
  # Check for Quay.io credentials in secrets
  HAVE_QUAY_LOGIN: ${{ vars.QUAY_REPO != '' && secrets.QUAY_USERNAME != '' && secrets.QUAY_TOKEN != '' }}

jobs:
  docker-build:
    name: Build Vaultwarden containers
    if: ${{ github.repository == 'dani-garcia/vaultwarden' }}
    permissions:
      packages: write # Needed to upload packages and artifacts
      contents: read
      attestations: write # Needed to generate an artifact attestation for a build
      id-token: write # Needed to mint the OIDC token necessary to request a Sigstore signing certificate
    runs-on: ${{ contains(matrix.arch, 'arm') && 'ubuntu-24.04-arm' || 'ubuntu-24.04' }}
    timeout-minutes: 120
    env:
      SOURCE_COMMIT: ${{ github.sha }}
      SOURCE_REPOSITORY_URL: "https://github.com/${{ github.repository }}"
    strategy:
      matrix:
        arch: ["amd64", "arm64", "arm/v7", "arm/v6"]
        base_image: ["debian","alpine"]

    steps:
      - name: Initialize QEMU binfmt support
        uses: docker/setup-qemu-action@ce360397dd3f832beb865e1373c09c0e9f86d70a # v4.0.0
        with:
          platforms: "arm64,arm"

      # Start Docker Buildx
      - name: Setup Docker Buildx
        uses: docker/setup-buildx-action@4d04d5d9486b7bd6fa91e7baf45bbb4f8b9deedd # v4.0.0
        # https://github.com/moby/buildkit/issues/3969
        # Also set max parallelism to 2, the default of 4 breaks GitHub Actions and causes OOMKills
        with:
          cache-binary: false
          buildkitd-config-inline: |
            [worker.oci]
              max-parallelism = 2
          driver-opts: |
            network=host

      # Checkout the repo
      - name: Checkout
        uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6.0.2
        # We need fetch-depth of 0 so we also get all the tag metadata
        with:
          persist-credentials: false
          fetch-depth: 0

      # Normalize the architecture string for use in paths and cache keys
      - name: Normalize architecture string
        env:
          MATRIX_ARCH: ${{ matrix.arch }}
        run: |
          # Replace slashes with nothing to create a safe string for paths/cache keys
          NORMALIZED_ARCH="${MATRIX_ARCH//\/}"
          echo "NORMALIZED_ARCH=${NORMALIZED_ARCH}" | tee -a "${GITHUB_ENV}"

      # Determine Source Version
      - name: Determine Source Version
        run: |
          # Get the Source Version for this release
          GIT_EXACT_TAG="$(git describe --tags --abbrev=0 --exact-match 2>/dev/null || true)"
          if [[ -n "${GIT_EXACT_TAG}" ]]; then
              echo "SOURCE_VERSION=${GIT_EXACT_TAG}" | tee -a "${GITHUB_ENV}"
          else
              GIT_LAST_TAG="$(git describe --tags --abbrev=0)"
              echo "SOURCE_VERSION=${GIT_LAST_TAG}-${SOURCE_COMMIT:0:8}" | tee -a "${GITHUB_ENV}"
          fi

      # Login to Docker Hub
      - name: Login to Docker Hub
        uses: docker/login-action@b45d80f862d83dbcd57f89517bcf500b2ab88fb2 # v4.0.0
        with:
          username: ${{ secrets.DOCKERHUB_USERNAME }}
          password: ${{ secrets.DOCKERHUB_TOKEN }}
        if: ${{ env.HAVE_DOCKERHUB_LOGIN == 'true' }}

      - name: Add registry for DockerHub
        if: ${{ env.HAVE_DOCKERHUB_LOGIN == 'true' }}
        env:
          DOCKERHUB_REPO: ${{ vars.DOCKERHUB_REPO }}
        run: |
          echo "CONTAINER_REGISTRIES=${DOCKERHUB_REPO}" | tee -a "${GITHUB_ENV}"

      # Login to GitHub Container Registry
      - name: Login to GitHub Container Registry
        uses: docker/login-action@b45d80f862d83dbcd57f89517bcf500b2ab88fb2 # v4.0.0
        with:
          registry: ghcr.io
          username: ${{ github.repository_owner }}
          password: ${{ secrets.GITHUB_TOKEN }}
        if: ${{ env.HAVE_GHCR_LOGIN == 'true' }}

      - name: Add registry for ghcr.io
        if: ${{ env.HAVE_GHCR_LOGIN == 'true' }}
        env:
          GHCR_REPO: ${{ vars.GHCR_REPO }}
        run: |
          echo "CONTAINER_REGISTRIES=${CONTAINER_REGISTRIES:+${CONTAINER_REGISTRIES},}${GHCR_REPO}" | tee -a "${GITHUB_ENV}"

      # Login to Quay.io
      - name: Login to Quay.io
        uses: docker/login-action@b45d80f862d83dbcd57f89517bcf500b2ab88fb2 # v4.0.0
        with:
          registry: quay.io
          username: ${{ secrets.QUAY_USERNAME }}
          password: ${{ secrets.QUAY_TOKEN }}
        if: ${{ env.HAVE_QUAY_LOGIN == 'true' }}

      - name: Add registry for Quay.io
        if: ${{ env.HAVE_QUAY_LOGIN == 'true' }}
        env:
          QUAY_REPO: ${{ vars.QUAY_REPO }}
        run: |
          echo "CONTAINER_REGISTRIES=${CONTAINER_REGISTRIES:+${CONTAINER_REGISTRIES},}${QUAY_REPO}" | tee -a "${GITHUB_ENV}"

      - name: Configure build cache from/to
        env:
          GHCR_REPO: ${{ vars.GHCR_REPO }}
          BASE_IMAGE: ${{ matrix.base_image }}
          NORMALIZED_ARCH: ${{ env.NORMALIZED_ARCH }}
        run: |
          #
          # Check if there is a GitHub Container Registry Login and use it for caching
          if [[ -n "${HAVE_GHCR_LOGIN}" ]]; then
            echo "BAKE_CACHE_FROM=type=registry,ref=${GHCR_REPO}-buildcache:${BASE_IMAGE}-${NORMALIZED_ARCH}" | tee -a "${GITHUB_ENV}"
            echo "BAKE_CACHE_TO=type=registry,ref=${GHCR_REPO}-buildcache:${BASE_IMAGE}-${NORMALIZED_ARCH},compression=zstd,mode=max" | tee -a "${GITHUB_ENV}"
          else
            echo "BAKE_CACHE_FROM="
            echo "BAKE_CACHE_TO="
          fi
          #

      - name: Generate tags
        id: tags
        env:
          CONTAINER_REGISTRIES: "${{ env.CONTAINER_REGISTRIES }}"
        run: |
          # Convert comma-separated list to newline-separated set commands
          TAGS=$(echo "${CONTAINER_REGISTRIES}" | tr ',' '\n' | sed "s|.*|*.tags=&|")

          # Output for use in next step
          {
            echo "TAGS<<EOF"
            echo "$TAGS"
            echo "EOF"
          } >> "$GITHUB_ENV"

      - name: Bake ${{ matrix.base_image }} containers
        id: bake_vw
        uses: docker/bake-action@82490499d2e5613fcead7e128237ef0b0ea210f7 # v7.0.0
        env:
          BASE_TAGS: "${{ steps.determine-version.outputs.BASE_TAGS }}"
          SOURCE_COMMIT: "${{ env.SOURCE_COMMIT }}"
          SOURCE_VERSION: "${{ env.SOURCE_VERSION }}"
          SOURCE_REPOSITORY_URL: "${{ env.SOURCE_REPOSITORY_URL }}"
        with:
          pull: true
          source: .
          files: docker/docker-bake.hcl
          targets: "${{ matrix.base_image }}-multi"
          set: |
            *.cache-from=${{ env.BAKE_CACHE_FROM }}
            *.cache-to=${{ env.BAKE_CACHE_TO }}
            *.platform=linux/${{ matrix.arch }}
            ${{ env.TAGS }}
            *.output=type=local,dest=./output
            *.output=type=image,push-by-digest=true,name-canonical=true,push=true

      - name: Extract digest SHA
        env:
          BAKE_METADATA: ${{ steps.bake_vw.outputs.metadata }}
          BASE_IMAGE: ${{ matrix.base_image }}
        run: |
          GET_DIGEST_SHA="$(jq -r --arg base "$BASE_IMAGE" '.[$base + "-multi"]."containerimage.digest"' <<< "${BAKE_METADATA}")"
          echo "DIGEST_SHA=${GET_DIGEST_SHA}" | tee -a "${GITHUB_ENV}"

      - name: Export digest
        env:
          DIGEST_SHA: ${{ env.DIGEST_SHA }}
          RUNNER_TEMP: ${{ runner.temp }}
        run: |
          mkdir -p "${RUNNER_TEMP}"/digests
          digest="${DIGEST_SHA}"
          touch "${RUNNER_TEMP}/digests/${digest#sha256:}"

      - name: Upload digest
        uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7.0.0
        with:
          name: digests-${{ env.NORMALIZED_ARCH }}-${{ matrix.base_image }}
          path: ${{ runner.temp }}/digests/*
          if-no-files-found: error
          retention-days: 1

      - name: Rename binaries to match target platform
        env:
          NORMALIZED_ARCH: ${{ env.NORMALIZED_ARCH }}
        run: |
          mv ./output/vaultwarden vaultwarden-"${NORMALIZED_ARCH}"

      # Upload artifacts to Github Actions and Attest the binaries
      - name: Attest binaries
        uses: actions/attest-build-provenance@a2bbfa25375fe432b6a289bc6b6cd05ecd0c4c32 # v4.1.0
        with:
          subject-path: vaultwarden-${{ env.NORMALIZED_ARCH }}

      - name: Upload binaries as artifacts
        uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7.0.0
        with:
          name: vaultwarden-${{ env.SOURCE_VERSION }}-linux-${{ env.NORMALIZED_ARCH }}-${{ matrix.base_image }}
          path: vaultwarden-${{ env.NORMALIZED_ARCH }}

  merge-manifests:
    name: Merge manifests
    runs-on: ubuntu-latest
    needs: docker-build
    permissions:
      packages: write # Needed to upload packages and artifacts
      attestations: write # Needed to generate an artifact attestation for a build
      id-token: write # Needed to mint the OIDC token necessary to request a Sigstore signing certificate
    strategy:
      matrix:
        base_image: ["debian","alpine"]

    steps:
      - name: Download digests
        uses: actions/download-artifact@70fc10c6e5e1ce46ad2ea6f2b72d43f7d47b13c3 # v8.0.0
        with:
          path: ${{ runner.temp }}/digests
          pattern: digests-*-${{ matrix.base_image }}
          merge-multiple: true

      # Login to Docker Hub
      - name: Login to Docker Hub
        uses: docker/login-action@b45d80f862d83dbcd57f89517bcf500b2ab88fb2 # v4.0.0
        with:
          username: ${{ secrets.DOCKERHUB_USERNAME }}
          password: ${{ secrets.DOCKERHUB_TOKEN }}
        if: ${{ env.HAVE_DOCKERHUB_LOGIN == 'true' }}

      - name: Add registry for DockerHub
        if: ${{ env.HAVE_DOCKERHUB_LOGIN == 'true' }}
        env:
          DOCKERHUB_REPO: ${{ vars.DOCKERHUB_REPO }}
        run: |
          echo "CONTAINER_REGISTRIES=${DOCKERHUB_REPO}" | tee -a "${GITHUB_ENV}"

      # Login to GitHub Container Registry
      - name: Login to GitHub Container Registry
        uses: docker/login-action@b45d80f862d83dbcd57f89517bcf500b2ab88fb2 # v4.0.0
        with:
          registry: ghcr.io
          username: ${{ github.repository_owner }}
          password: ${{ secrets.GITHUB_TOKEN }}
        if: ${{ env.HAVE_GHCR_LOGIN == 'true' }}

      - name: Add registry for ghcr.io
        if: ${{ env.HAVE_GHCR_LOGIN == 'true' }}
        env:
          GHCR_REPO: ${{ vars.GHCR_REPO }}
        run: |
          echo "CONTAINER_REGISTRIES=${CONTAINER_REGISTRIES:+${CONTAINER_REGISTRIES},}${GHCR_REPO}" | tee -a "${GITHUB_ENV}"

      # Login to Quay.io
      - name: Login to Quay.io
        uses: docker/login-action@b45d80f862d83dbcd57f89517bcf500b2ab88fb2 # v4.0.0
        with:
          registry: quay.io
          username: ${{ secrets.QUAY_USERNAME }}
          password: ${{ secrets.QUAY_TOKEN }}
        if: ${{ env.HAVE_QUAY_LOGIN == 'true' }}

      - name: Add registry for Quay.io
        if: ${{ env.HAVE_QUAY_LOGIN == 'true' }}
        env:
          QUAY_REPO: ${{ vars.QUAY_REPO }}
        run: |
          echo "CONTAINER_REGISTRIES=${CONTAINER_REGISTRIES:+${CONTAINER_REGISTRIES},}${QUAY_REPO}" | tee -a "${GITHUB_ENV}"

      # Determine Base Tags
      - name: Determine Base Tags
        env:
          BASE_IMAGE_TAG: "${{ matrix.base_image != 'debian' && format('-{0}', matrix.base_image) || '' }}"
          REF_TYPE: ${{ github.ref_type }}
        run: |
          # Check which main tag we are going to build determined by ref_type
          if [[ "${REF_TYPE}" == "tag" ]]; then
            echo "BASE_TAGS=latest${BASE_IMAGE_TAG},${GITHUB_REF#refs/*/}${BASE_IMAGE_TAG}${BASE_IMAGE_TAG//-/,}" | tee -a "${GITHUB_ENV}"
          elif [[ "${REF_TYPE}" == "branch" ]]; then
            echo "BASE_TAGS=testing${BASE_IMAGE_TAG}" | tee -a "${GITHUB_ENV}"
          fi

      - name: Create manifest list, push it and extract digest SHA
        working-directory: ${{ runner.temp }}/digests
        env:
          BASE_TAGS: "${{ env.BASE_TAGS }}"
          CONTAINER_REGISTRIES: "${{ env.CONTAINER_REGISTRIES }}"
        run: |
          IFS=',' read -ra IMAGES <<< "${CONTAINER_REGISTRIES}"
          IFS=',' read -ra TAGS <<< "${BASE_TAGS}"

          TAG_ARGS=()
          for img in "${IMAGES[@]}"; do
            for tag in "${TAGS[@]}"; do
              TAG_ARGS+=("-t" "${img}:${tag}")
            done
          done

          echo "Creating manifest"
          if ! OUTPUT=$(docker buildx imagetools create \
                          "${TAG_ARGS[@]}" \
                          $(printf "${IMAGES[0]}@sha256:%s " *) 2>&1); then
            echo "Manifest creation failed"
            echo "${OUTPUT}"
            exit 1
          fi

          echo "Manifest created successfully"
          echo "${OUTPUT}"

          # Extract digest SHA for subsequent steps
          GET_DIGEST_SHA="$(echo "${OUTPUT}" | grep -oE 'sha256:[a-f0-9]{64}' | tail -1)"
          echo "DIGEST_SHA=${GET_DIGEST_SHA}" | tee -a "${GITHUB_ENV}"

      # Attest container images
      - name: Attest - docker.io - ${{ matrix.base_image }}
        if: ${{ env.HAVE_DOCKERHUB_LOGIN == 'true' && env.DIGEST_SHA != ''}}
        uses: actions/attest-build-provenance@a2bbfa25375fe432b6a289bc6b6cd05ecd0c4c32 # v4.1.0
        with:
          subject-name: ${{ vars.DOCKERHUB_REPO }}
          subject-digest: ${{ env.DIGEST_SHA }}
          push-to-registry: true

      - name: Attest - ghcr.io - ${{ matrix.base_image }}
        if: ${{ env.HAVE_GHCR_LOGIN == 'true' && env.DIGEST_SHA != ''}}
        uses: actions/attest-build-provenance@a2bbfa25375fe432b6a289bc6b6cd05ecd0c4c32 # v4.1.0
        with:
          subject-name: ${{ vars.GHCR_REPO }}
          subject-digest: ${{ env.DIGEST_SHA }}
          push-to-registry: true

      - name: Attest - quay.io - ${{ matrix.base_image }}
        if: ${{ env.HAVE_QUAY_LOGIN == 'true' && env.DIGEST_SHA != ''}}
        uses: actions/attest-build-provenance@a2bbfa25375fe432b6a289bc6b6cd05ecd0c4c32 # v4.1.0
        with:
          subject-name: ${{ vars.QUAY_REPO }}
          subject-digest: ${{ env.DIGEST_SHA }}
          push-to-registry: true


================================================
FILE: .github/workflows/releasecache-cleanup.yml
================================================
name: Cleanup
permissions: {}

concurrency:
  group: ${{ github.workflow }}
  cancel-in-progress: false

on:
  workflow_dispatch:
    inputs:
      manual_trigger:
        description: "Manual trigger buildcache cleanup"
        required: false
        default: ""

  schedule:
    - cron: '0 1 * * FRI'

jobs:
  releasecache-cleanup:
    name: Releasecache Cleanup
    permissions:
      packages: write # To be able to cleanup old caches
    runs-on: ubuntu-24.04
    continue-on-error: true
    timeout-minutes: 30
    steps:
      - name: Delete vaultwarden-buildcache containers
        uses: actions/delete-package-versions@e5bc658cc4c965c472efe991f8beea3981499c55 # v5.0.0
        with:
          package-name: 'vaultwarden-buildcache'
          package-type: 'container'
          min-versions-to-keep: 0
          delete-only-untagged-versions: 'false'


================================================
FILE: .github/workflows/trivy.yml
================================================
name: Trivy
permissions: {}

concurrency:
  group: ${{ github.workflow }}-${{ github.event.pull_request.number || github.ref }}
  cancel-in-progress: true

on:
  push:
    branches:
      - main

    tags:
      - '*'

  pull_request:
    branches:
      - main

  schedule:
    - cron: '08 11 * * *'

jobs:
  trivy-scan:
    # Only run this in the upstream repo and not on forks
    # When all forks run this at the same time, it is causing `Too Many Requests` issues
    if: ${{ github.repository == 'dani-garcia/vaultwarden' }}
    name: Trivy Scan
    permissions:
      security-events: write # To write the security report
    runs-on: ubuntu-24.04
    timeout-minutes: 30

    steps:
      - name: Checkout code
        uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6.0.2
        with:
          persist-credentials: false

      - name: Run Trivy vulnerability scanner
        uses: aquasecurity/trivy-action@97e0b3872f55f89b95b2f65b3dbab56962816478 # 0.34.2
        env:
          TRIVY_DB_REPOSITORY: docker.io/aquasec/trivy-db:2,public.ecr.aws/aquasecurity/trivy-db:2,ghcr.io/aquasecurity/trivy-db:2
          TRIVY_JAVA_DB_REPOSITORY: docker.io/aquasec/trivy-java-db:1,public.ecr.aws/aquasecurity/trivy-java-db:1,ghcr.io/aquasecurity/trivy-java-db:1
        with:
          scan-type: repo
          ignore-unfixed: true
          format: sarif
          output: trivy-results.sarif
          severity: CRITICAL,HIGH

      - name: Upload Trivy scan results to GitHub Security tab
        uses: github/codeql-action/upload-sarif@0d579ffd059c29b07949a3cce3983f0780820c98 # v4.32.6
        with:
          sarif_file: 'trivy-results.sarif'


================================================
FILE: .github/workflows/typos.yml
================================================
name: Code Spell Checking
permissions: {}

concurrency:
  group: ${{ github.workflow }}-${{ github.event.pull_request.number || github.ref }}
  cancel-in-progress: true

on: [ push, pull_request ]

jobs:
  typos:
    name: Run typos spell checking
    runs-on: ubuntu-24.04
    timeout-minutes: 30

    steps:
      # Checkout the repo
      - name: Checkout
        uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6.0.2
        with:
          persist-credentials: false
      # End Checkout the repo

      # When this version is updated, do not forget to update this in `.pre-commit-config.yaml` too
      - name: Spell Check Repo
        uses: crate-ci/typos@631208b7aac2daa8b707f55e7331f9112b0e062d # v1.44.0


================================================
FILE: .github/workflows/zizmor.yml
================================================
name: Security Analysis with zizmor
permissions: {}

concurrency:
  group: ${{ github.workflow }}-${{ github.event.pull_request.number || github.ref }}
  cancel-in-progress: true

on:
  push:
    branches: ["main"]
  pull_request:
    branches: ["**"]

jobs:
  zizmor:
    name: Run zizmor
    runs-on: ubuntu-latest
    permissions:
      security-events: write # To write the security report
    steps:
      - name: Checkout repository
        uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6.0.2
        with:
          persist-credentials: false

      - name: Run zizmor
        uses: zizmorcore/zizmor-action@0dce2577a4760a2749d8cfb7a84b7d5585ebcb7d # v0.5.0
        with:
          # intentionally not scanning the entire repository,
          # since it contains integration tests.
          inputs: ./.github/


================================================
FILE: .gitignore
================================================
# Local build artifacts
target

# Data folder
data

# IDE files
.vscode
.idea
*.iml

# Environment file
.env

# Web vault
web-vault


================================================
FILE: .hadolint.yaml
================================================
ignored:
  # To prevent issues and make clear some images only work on linux/amd64, we ignore this
  - DL3029
  # disable explicit version for apt install
  - DL3008
  # disable explicit version for apk install
  - DL3018
  # Ignore shellcheck info message
  - SC1091
trustedRegistries:
  - docker.io
  - ghcr.io
  - quay.io


================================================
FILE: .pre-commit-config.yaml
================================================
---
repos:
-   repo: https://github.com/pre-commit/pre-commit-hooks
    rev: 3e8a8703264a2f4a69428a0aa4dcb512790b2c8c # v6.0.0
    hooks:
    - id: check-yaml
    - id: check-json
    - id: check-toml
    - id: mixed-line-ending
      args: ["--fix=no"]
    - id: end-of-file-fixer
      exclude: "(.*js$|.*css$)"
    - id: check-case-conflict
    - id: check-merge-conflict
    - id: detect-private-key
    - id: check-symlinks
    - id: forbid-submodules
-   repo: local
    hooks:
    - id: fmt
      name: fmt
      description: Format files with cargo fmt.
      entry: cargo fmt
      language: system
      always_run: true
      pass_filenames: false
      args: ["--", "--check"]
    - id: cargo-test
      name: cargo test
      description: Test the package for errors.
      entry: cargo test
      language: system
      args: ["--features", "sqlite,mysql,postgresql", "--"]
      types_or: [rust, file]
      files: (Cargo.toml|Cargo.lock|rust-toolchain.toml|rustfmt.toml|.*\.rs$)
      pass_filenames: false
    - id: cargo-clippy
      name: cargo clippy
      description: Lint Rust sources
      entry: cargo clippy
      language: system
      args: ["--features", "sqlite,mysql,postgresql", "--", "-D", "warnings"]
      types_or: [rust, file]
      files: (Cargo.toml|Cargo.lock|rust-toolchain.toml|rustfmt.toml|.*\.rs$)
      pass_filenames: false
    - id: check-docker-templates
      name: check docker templates
      description: Check if the Docker templates are updated
      language: system
      entry: sh
      args:
        - "-c"
        - "cd docker && make"
# When this version is updated, do not forget to update this in `.github/workflows/typos.yaml` too
- repo: https://github.com/crate-ci/typos
  rev: 631208b7aac2daa8b707f55e7331f9112b0e062d # v1.44.0
  hooks:
    - id: typos


================================================
FILE: .typos.toml
================================================
[files]
extend-exclude = [
    ".git/",
    "playwright/",
    "*.js", # Ignore all JavaScript files
    "!admin*.js", # Except our own JavaScript files
]
ignore-hidden = false

[default]
extend-ignore-re = [
    # We use this in place of the reserved type identifier at some places
    "typ",
    # In SMTP it's called HELO, so ignore it
    "(?i)helo_name",
    "Server name sent during.+HELO",
    # COSE Is short for CBOR Object Signing and Encryption, ignore these specific items
    "COSEKey",
    "COSEAlgorithm",
    # Ignore this specific string as it's valid
    "Ensure they are valid OTPs",
    # This word is misspelled upstream
    # https://github.com/bitwarden/server/blob/dff9f1cf538198819911cf2c20f8cda3307701c5/src/Notifications/HubHelpers.cs#L86
    # https://github.com/bitwarden/clients/blob/9612a4ac45063e372a6fbe87eb253c7cb3c588fb/libs/common/src/auth/services/anonymous-hub.service.ts#L45
    "AuthRequestResponseRecieved",
]


================================================
FILE: Cargo.toml
================================================
[workspace.package]
edition = "2021"
rust-version = "1.92.0"
license = "AGPL-3.0-only"
repository = "https://github.com/dani-garcia/vaultwarden"
publish = false

[workspace]
members = ["macros"]

[package]
name = "vaultwarden"
version = "1.0.0"
authors = ["Daniel García <dani-garcia@users.noreply.github.com>"]
readme = "README.md"
build = "build.rs"
resolver = "2"
repository.workspace = true
edition.workspace = true
rust-version.workspace = true
license.workspace = true
publish.workspace = true

[features]
default = [
    # "sqlite",
    # "mysql",
    # "postgresql",
]
# Empty to keep compatibility, prefer to set USE_SYSLOG=true
enable_syslog = []
mysql = ["diesel/mysql", "diesel_migrations/mysql"]
postgresql = ["diesel/postgres", "diesel_migrations/postgres"]
sqlite = ["diesel/sqlite", "diesel_migrations/sqlite", "dep:libsqlite3-sys"]
# Enable to use a vendored and statically linked openssl
vendored_openssl = ["openssl/vendored"]
# Enable MiMalloc memory allocator to replace the default malloc
# This can improve performance for Alpine builds
enable_mimalloc = ["dep:mimalloc"]
s3 = ["opendal/services-s3", "dep:aws-config", "dep:aws-credential-types", "dep:aws-smithy-runtime-api", "dep:anyhow", "dep:http", "dep:reqsign"]

# OIDC specific features
oidc-accept-rfc3339-timestamps = ["openidconnect/accept-rfc3339-timestamps"]
oidc-accept-string-booleans = ["openidconnect/accept-string-booleans"]

# Enable unstable features, requires nightly
# Currently only used to enable rusts official ip support
unstable = []

[target."cfg(unix)".dependencies]
# Logging
syslog = "7.0.0"

[dependencies]
macros = { path = "./macros" }

# Logging
log = "0.4.29"
fern = { version = "0.7.1", features = ["syslog-7", "reopen-1"] }
tracing = { version = "0.1.44", features = ["log"] } # Needed to have lettre and webauthn-rs trace logging to work

# A `dotenv` implementation for Rust
dotenvy = { version = "0.15.7", default-features = false }

# Numerical libraries
num-traits = "0.2.19"
num-derive = "0.4.2"
bigdecimal = "0.4.10"

# Web framework
rocket = { version = "0.5.1", features = ["tls", "json"], default-features = false }
rocket_ws = { version ="0.1.1" }

# WebSockets libraries
rmpv = "1.3.1" # MessagePack library

# Concurrent HashMap used for WebSocket messaging and favicons
dashmap = "6.1.0"

# Async futures
futures = "0.3.32"
tokio = { version = "1.50.0", features = ["rt-multi-thread", "fs", "io-util", "parking_lot", "time", "signal", "net"] }
tokio-util = { version = "0.7.18", features = ["compat"]}

# A generic serialization/deserialization framework
serde = { version = "1.0.228", features = ["derive"] }
serde_json = "1.0.149"

# A safe, extensible ORM and Query builder
# Currently pinned diesel to v2.3.3 as newer version break MySQL/MariaDB compatibility
diesel = { version = "2.3.6", features = ["chrono", "r2d2", "numeric"] }
diesel_migrations = "2.3.1"

derive_more = { version = "2.1.1", features = ["from", "into", "as_ref", "deref", "display"] }
diesel-derive-newtype = "2.1.2"

# Bundled/Static SQLite
libsqlite3-sys = { version = "0.35.0", features = ["bundled"], optional = true }

# Crypto-related libraries
rand = "0.10.0"
ring = "0.17.14"
subtle = "2.6.1"

# UUID generation
uuid = { version = "1.22.0", features = ["v4"] }

# Date and time libraries
chrono = { version = "0.4.44", features = ["clock", "serde"], default-features = false }
chrono-tz = "0.10.4"
time = "0.3.47"

# Job scheduler
job_scheduler_ng = "2.4.0"

# Data encoding library Hex/Base32/Base64
data-encoding = "2.10.0"

# JWT library
jsonwebtoken = { version = "10.3.0", features = ["use_pem", "rust_crypto"], default-features = false }

# TOTP library
totp-lite = "2.0.1"

# Yubico Library
yubico = { package = "yubico_ng", version = "0.14.1", features = ["online-tokio"], default-features = false }

# WebAuthn libraries
# danger-allow-state-serialisation is needed to save the state in the db
# danger-credential-internals is needed to support U2F to Webauthn migration
webauthn-rs = { version = "0.5.4", features = ["danger-allow-state-serialisation", "danger-credential-internals"] }
webauthn-rs-proto = "0.5.4"
webauthn-rs-core = "0.5.4"

# Handling of URL's for WebAuthn and favicons
url = "2.5.8"

# Email libraries
lettre = { version = "0.11.19", features = ["smtp-transport", "sendmail-transport", "builder", "serde", "hostname", "tracing", "tokio1-rustls", "ring", "rustls-native-certs"], default-features = false }
percent-encoding = "2.3.2" # URL encoding library used for URL's in the emails
email_address = "0.2.9"

# HTML Template library
handlebars = { version = "6.4.0", features = ["dir_source"] }

# HTTP client (Used for favicons, version check, DUO and HIBP API)
reqwest = { version = "0.12.28", features = ["rustls-tls", "rustls-tls-native-roots", "stream", "json", "deflate", "gzip", "brotli", "zstd", "socks", "cookies", "charset", "http2", "system-proxy"], default-features = false}
hickory-resolver = "0.25.2"

# Favicon extraction libraries
html5gum = "0.8.3"
regex = { version = "1.12.3", features = ["std", "perf", "unicode-perl"], default-features = false }
data-url = "0.3.2"
bytes = "1.11.1"
svg-hush = "0.9.6"

# Cache function results (Used for version check and favicon fetching)
cached = { version = "0.56.0", features = ["async"] }

# Used for custom short lived cookie jar during favicon extraction
cookie = "0.18.1"
cookie_store = "0.22.1"

# Used by U2F, JWT and PostgreSQL
openssl = "0.10.75"

# CLI argument parsing
pico-args = "0.5.0"

# Macro ident concatenation
pastey = "0.2.1"
governor = "0.10.4"

# OIDC for SSO
openidconnect = { version = "4.0.1", features = ["reqwest", "rustls-tls"] }
moka = { version = "0.12.13", features = ["future"] }

# Check client versions for specific features.
semver = "1.0.27"

# Allow overriding the default memory allocator
# Mainly used for the musl builds, since the default musl malloc is very slow
mimalloc = { version = "0.1.48", features = ["secure"], default-features = false, optional = true }

which = "8.0.1"

# Argon2 library with support for the PHC format
argon2 = "0.5.3"

# Reading a password from the cli for generating the Argon2id ADMIN_TOKEN
rpassword = "7.4.0"

# Loading a dynamic CSS Stylesheet
grass_compiler = { version = "0.13.4", default-features = false }

# File are accessed through Apache OpenDAL
opendal = { version = "0.55.0", features = ["services-fs"], default-features = false }

# For retrieving AWS credentials, including temporary SSO credentials
anyhow = { version = "1.0.102", optional = true }
aws-config = { version = "1.8.15", features = ["behavior-version-latest", "rt-tokio", "credentials-process", "sso"], default-features = false, optional = true }
aws-credential-types = { version = "1.2.14", optional = true }
aws-smithy-runtime-api = { version = "1.11.6", optional = true }
http = { version = "1.4.0", optional = true }
reqsign = { version = "0.16.5", optional = true }

# Strip debuginfo from the release builds
# The debug symbols are to provide better panic traces
# Also enable fat LTO and use 1 codegen unit for optimizations
[profile.release]
strip = "debuginfo"
lto = "fat"
codegen-units = 1
debug = false

# Optimize for size
[profile.release-micro]
inherits = "release"
strip = "symbols"
opt-level = "z"
panic = "abort"

# Profile for systems with low resources
# It will use less resources during build
[profile.release-low]
inherits = "release"
strip = "symbols"
lto = "thin"
codegen-units = 16

# Used for profiling and debugging like valgrind or heaptrack
# Inherits release to be sure all optimizations have been done
[profile.dbg]
inherits = "release"
strip = "none"
split-debuginfo = "off"
debug = "full"

# A little bit of a speedup for generic building
[profile.dev]
split-debuginfo = "unpacked"
debug = "line-tables-only"

# Used for CI builds to improve compile time
[profile.ci]
inherits = "dev"
debug = false
debug-assertions = false
strip = "symbols"
panic = "abort"

# Always build argon2 using opt-level 3
# This is a huge speed improvement during testing
[profile.dev.package.argon2]
opt-level = 3

# Linting config
# https://doc.rust-lang.org/rustc/lints/groups.html
[workspace.lints.rust]
# Forbid
unsafe_code = "forbid"
non_ascii_idents = "forbid"

# Deny
deprecated_in_future = "deny"
deprecated_safe = { level = "deny", priority = -1 }
future_incompatible = { level = "deny", priority = -1 }
keyword_idents = { level = "deny", priority = -1 }
let_underscore = { level = "deny", priority = -1 }
nonstandard_style = { level = "deny", priority = -1 }
noop_method_call = "deny"
refining_impl_trait = { level = "deny", priority = -1 }
rust_2018_idioms = { level = "deny", priority = -1 }
rust_2021_compatibility = { level = "deny", priority = -1 }
rust_2024_compatibility = { level = "deny", priority = -1 }
single_use_lifetimes = "deny"
trivial_casts = "deny"
trivial_numeric_casts = "deny"
unused = { level = "deny", priority = -1 }
unused_import_braces = "deny"
unused_lifetimes = "deny"
unused_qualifications = "deny"
variant_size_differences = "deny"
# Allow the following lints since these cause issues with Rust v1.84.0 or newer
# Building Vaultwarden with Rust v1.85.0 with edition 2024 also works without issues
edition_2024_expr_fragment_specifier = "allow" # Once changed to Rust 2024 this should be removed and macro's should be validated again
if_let_rescope = "allow"
tail_expr_drop_order = "allow"

# https://rust-lang.github.io/rust-clippy/stable/index.html
[workspace.lints.clippy]
# Warn
dbg_macro = "warn"
todo = "warn"

# Ignore/Allow
result_large_err = "allow"

# Deny
branches_sharing_code = "deny"
case_sensitive_file_extension_comparisons = "deny"
cast_lossless = "deny"
clone_on_ref_ptr = "deny"
equatable_if_let = "deny"
excessive_precision = "deny"
filter_map_next = "deny"
float_cmp_const = "deny"
implicit_clone = "deny"
inefficient_to_string = "deny"
iter_on_empty_collections = "deny"
iter_on_single_items = "deny"
linkedlist = "deny"
macro_use_imports = "deny"
manual_assert = "deny"
manual_instant_elapsed = "deny"
manual_string_new = "deny"
match_wildcard_for_single_variants = "deny"
mem_forget = "deny"
needless_borrow = "deny"
needless_collect = "deny"
needless_continue = "deny"
needless_lifetimes = "deny"
option_option = "deny"
redundant_clone = "deny"
string_add_assign = "deny"
unnecessary_join = "deny"
unnecessary_self_imports = "deny"
unnested_or_patterns = "deny"
unused_async = "deny"
unused_self = "deny"
useless_let_if_seq = "deny"
verbose_file_reads = "deny"
zero_sized_map_values = "deny"

[lints]
workspace = true


================================================
FILE: LICENSE.txt
================================================
                    GNU AFFERO GENERAL PUBLIC LICENSE
                       Version 3, 19 November 2007

 Copyright (C) 2007 Free Software Foundation, Inc. <https://fsf.org/>
 Everyone is permitted to copy and distribute verbatim copies
 of this license document, but changing it is not allowed.

                            Preamble

  The GNU Affero General Public License is a free, copyleft license for
software and other kinds of works, specifically designed to ensure
cooperation with the community in the case of network server software.

  The licenses for most software and other practical works are designed
to take away your freedom to share and change the works.  By contrast,
our General Public Licenses are intended to guarantee your freedom to
share and change all versions of a program--to make sure it remains free
software for all its users.

  When we speak of free software, we are referring to freedom, not
price.  Our General Public Licenses are designed to make sure that you
have the freedom to distribute copies of free software (and charge for
them if you wish), that you receive source code or can get it if you
want it, that you can change the software or use pieces of it in new
free programs, and that you know you can do these things.

  Developers that use our General Public Licenses protect your rights
with two steps: (1) assert copyright on the software, and (2) offer
you this License which gives you legal permission to copy, distribute
and/or modify the software.

  A secondary benefit of defending all users' freedom is that
improvements made in alternate versions of the program, if they
receive widespread use, become available for other developers to
incorporate.  Many developers of free software are heartened and
encouraged by the resulting cooperation.  However, in the case of
software used on network servers, this result may fail to come about.
The GNU General Public License permits making a modified version and
letting the public access it on a server without ever releasing its
source code to the public.

  The GNU Affero General Public License is designed specifically to
ensure that, in such cases, the modified source code becomes available
to the community.  It requires the operator of a network server to
provide the source code of the modified version running there to the
users of that server.  Therefore, public use of a modified version, on
a publicly accessible server, gives the public access to the source
code of the modified version.

  An older license, called the Affero General Public License and
published by Affero, was designed to accomplish similar goals.  This is
a different license, not a version of the Affero GPL, but Affero has
released a new version of the Affero GPL which permits relicensing under
this license.

  The precise terms and conditions for copying, distribution and
modification follow.

                       TERMS AND CONDITIONS

  0. Definitions.

  "This License" refers to version 3 of the GNU Affero General Public License.

  "Copyright" also means copyright-like laws that apply to other kinds of
works, such as semiconductor masks.

  "The Program" refers to any copyrightable work licensed under this
License.  Each licensee is addressed as "you".  "Licensees" and
"recipients" may be individuals or organizations.

  To "modify" a work means to copy from or adapt all or part of the work
in a fashion requiring copyright permission, other than the making of an
exact copy.  The resulting work is called a "modified version" of the
earlier work or a work "based on" the earlier work.

  A "covered work" means either the unmodified Program or a work based
on the Program.

  To "propagate" a work means to do anything with it that, without
permission, would make you directly or secondarily liable for
infringement under applicable copyright law, except executing it on a
computer or modifying a private copy.  Propagation includes copying,
distribution (with or without modification), making available to the
public, and in some countries other activities as well.

  To "convey" a work means any kind of propagation that enables other
parties to make or receive copies.  Mere interaction with a user through
a computer network, with no transfer of a copy, is not conveying.

  An interactive user interface displays "Appropriate Legal Notices"
to the extent that it includes a convenient and prominently visible
feature that (1) displays an appropriate copyright notice, and (2)
tells the user that there is no warranty for the work (except to the
extent that warranties are provided), that licensees may convey the
work under this License, and how to view a copy of this License.  If
the interface presents a list of user commands or options, such as a
menu, a prominent item in the list meets this criterion.

  1. Source Code.

  The "source code" for a work means the preferred form of the work
for making modifications to it.  "Object code" means any non-source
form of a work.

  A "Standard Interface" means an interface that either is an official
standard defined by a recognized standards body, or, in the case of
interfaces specified for a particular programming language, one that
is widely used among developers working in that language.

  The "System Libraries" of an executable work include anything, other
than the work as a whole, that (a) is included in the normal form of
packaging a Major Component, but which is not part of that Major
Component, and (b) serves only to enable use of the work with that
Major Component, or to implement a Standard Interface for which an
implementation is available to the public in source code form.  A
"Major Component", in this context, means a major essential component
(kernel, window system, and so on) of the specific operating system
(if any) on which the executable work runs, or a compiler used to
produce the work, or an object code interpreter used to run it.

  The "Corresponding Source" for a work in object code form means all
the source code needed to generate, install, and (for an executable
work) run the object code and to modify the work, including scripts to
control those activities.  However, it does not include the work's
System Libraries, or general-purpose tools or generally available free
programs which are used unmodified in performing those activities but
which are not part of the work.  For example, Corresponding Source
includes interface definition files associated with source files for
the work, and the source code for shared libraries and dynamically
linked subprograms that the work is specifically designed to require,
such as by intimate data communication or control flow between those
subprograms and other parts of the work.

  The Corresponding Source need not include anything that users
can regenerate automatically from other parts of the Corresponding
Source.

  The Corresponding Source for a work in source code form is that
same work.

  2. Basic Permissions.

  All rights granted under this License are granted for the term of
copyright on the Program, and are irrevocable provided the stated
conditions are met.  This License explicitly affirms your unlimited
permission to run the unmodified Program.  The output from running a
covered work is covered by this License only if the output, given its
content, constitutes a covered work.  This License acknowledges your
rights of fair use or other equivalent, as provided by copyright law.

  You may make, run and propagate covered works that you do not
convey, without conditions so long as your license otherwise remains
in force.  You may convey covered works to others for the sole purpose
of having them make modifications exclusively for you, or provide you
with facilities for running those works, provided that you comply with
the terms of this License in conveying all material for which you do
not control copyright.  Those thus making or running the covered works
for you must do so exclusively on your behalf, under your direction
and control, on terms that prohibit them from making any copies of
your copyrighted material outside their relationship with you.

  Conveying under any other circumstances is permitted solely under
the conditions stated below.  Sublicensing is not allowed; section 10
makes it unnecessary.

  3. Protecting Users' Legal Rights From Anti-Circumvention Law.

  No covered work shall be deemed part of an effective technological
measure under any applicable law fulfilling obligations under article
11 of the WIPO copyright treaty adopted on 20 December 1996, or
similar laws prohibiting or restricting circumvention of such
measures.

  When you convey a covered work, you waive any legal power to forbid
circumvention of technological measures to the extent such circumvention
is effected by exercising rights under this License with respect to
the covered work, and you disclaim any intention to limit operation or
modification of the work as a means of enforcing, against the work's
users, your or third parties' legal rights to forbid circumvention of
technological measures.

  4. Conveying Verbatim Copies.

  You may convey verbatim copies of the Program's source code as you
receive it, in any medium, provided that you conspicuously and
appropriately publish on each copy an appropriate copyright notice;
keep intact all notices stating that this License and any
non-permissive terms added in accord with section 7 apply to the code;
keep intact all notices of the absence of any warranty; and give all
recipients a copy of this License along with the Program.

  You may charge any price or no price for each copy that you convey,
and you may offer support or warranty protection for a fee.

  5. Conveying Modified Source Versions.

  You may convey a work based on the Program, or the modifications to
produce it from the Program, in the form of source code under the
terms of section 4, provided that you also meet all of these conditions:

    a) The work must carry prominent notices stating that you modified
    it, and giving a relevant date.

    b) The work must carry prominent notices stating that it is
    released under this License and any conditions added under section
    7.  This requirement modifies the requirement in section 4 to
    "keep intact all notices".

    c) You must license the entire work, as a whole, under this
    License to anyone who comes into possession of a copy.  This
    License will therefore apply, along with any applicable section 7
    additional terms, to the whole of the work, and all its parts,
    regardless of how they are packaged.  This License gives no
    permission to license the work in any other way, but it does not
    invalidate such permission if you have separately received it.

    d) If the work has interactive user interfaces, each must display
    Appropriate Legal Notices; however, if the Program has interactive
    interfaces that do not display Appropriate Legal Notices, your
    work need not make them do so.

  A compilation of a covered work with other separate and independent
works, which are not by their nature extensions of the covered work,
and which are not combined with it such as to form a larger program,
in or on a volume of a storage or distribution medium, is called an
"aggregate" if the compilation and its resulting copyright are not
used to limit the access or legal rights of the compilation's users
beyond what the individual works permit.  Inclusion of a covered work
in an aggregate does not cause this License to apply to the other
parts of the aggregate.

  6. Conveying Non-Source Forms.

  You may convey a covered work in object code form under the terms
of sections 4 and 5, provided that you also convey the
machine-readable Corresponding Source under the terms of this License,
in one of these ways:

    a) Convey the object code in, or embodied in, a physical product
    (including a physical distribution medium), accompanied by the
    Corresponding Source fixed on a durable physical medium
    customarily used for software interchange.

    b) Convey the object code in, or embodied in, a physical product
    (including a physical distribution medium), accompanied by a
    written offer, valid for at least three years and valid for as
    long as you offer spare parts or customer support for that product
    model, to give anyone who possesses the object code either (1) a
    copy of the Corresponding Source for all the software in the
    product that is covered by this License, on a durable physical
    medium customarily used for software interchange, for a price no
    more than your reasonable cost of physically performing this
    conveying of source, or (2) access to copy the
    Corresponding Source from a network server at no charge.

    c) Convey individual copies of the object code with a copy of the
    written offer to provide the Corresponding Source.  This
    alternative is allowed only occasionally and noncommercially, and
    only if you received the object code with such an offer, in accord
    with subsection 6b.

    d) Convey the object code by offering access from a designated
    place (gratis or for a charge), and offer equivalent access to the
    Corresponding Source in the same way through the same place at no
    further charge.  You need not require recipients to copy the
    Corresponding Source along with the object code.  If the place to
    copy the object code is a network server, the Corresponding Source
    may be on a different server (operated by you or a third party)
    that supports equivalent copying facilities, provided you maintain
    clear directions next to the object code saying where to find the
    Corresponding Source.  Regardless of what server hosts the
    Corresponding Source, you remain obligated to ensure that it is
    available for as long as needed to satisfy these requirements.

    e) Convey the object code using peer-to-peer transmission, provided
    you inform other peers where the object code and Corresponding
    Source of the work are being offered to the general public at no
    charge under subsection 6d.

  A separable portion of the object code, whose source code is excluded
from the Corresponding Source as a System Library, need not be
included in conveying the object code work.

  A "User Product" is either (1) a "consumer product", which means any
tangible personal property which is normally used for personal, family,
or household purposes, or (2) anything designed or sold for incorporation
into a dwelling.  In determining whether a product is a consumer product,
doubtful cases shall be resolved in favor of coverage.  For a particular
product received by a particular user, "normally used" refers to a
typical or common use of that class of product, regardless of the status
of the particular user or of the way in which the particular user
actually uses, or expects or is expected to use, the product.  A product
is a consumer product regardless of whether the product has substantial
commercial, industrial or non-consumer uses, unless such uses represent
the only significant mode of use of the product.

  "Installation Information" for a User Product means any methods,
procedures, authorization keys, or other information required to install
and execute modified versions of a covered work in that User Product from
a modified version of its Corresponding Source.  The information must
suffice to ensure that the continued functioning of the modified object
code is in no case prevented or interfered with solely because
modification has been made.

  If you convey an object code work under this section in, or with, or
specifically for use in, a User Product, and the conveying occurs as
part of a transaction in which the right of possession and use of the
User Product is transferred to the recipient in perpetuity or for a
fixed term (regardless of how the transaction is characterized), the
Corresponding Source conveyed under this section must be accompanied
by the Installation Information.  But this requirement does not apply
if neither you nor any third party retains the ability to install
modified object code on the User Product (for example, the work has
been installed in ROM).

  The requirement to provide Installation Information does not include a
requirement to continue to provide support service, warranty, or updates
for a work that has been modified or installed by the recipient, or for
the User Product in which it has been modified or installed.  Access to a
network may be denied when the modification itself materially and
adversely affects the operation of the network or violates the rules and
protocols for communication across the network.

  Corresponding Source conveyed, and Installation Information provided,
in accord with this section must be in a format that is publicly
documented (and with an implementation available to the public in
source code form), and must require no special password or key for
unpacking, reading or copying.

  7. Additional Terms.

  "Additional permissions" are terms that supplement the terms of this
License by making exceptions from one or more of its conditions.
Additional permissions that are applicable to the entire Program shall
be treated as though they were included in this License, to the extent
that they are valid under applicable law.  If additional permissions
apply only to part of the Program, that part may be used separately
under those permissions, but the entire Program remains governed by
this License without regard to the additional permissions.

  When you convey a copy of a covered work, you may at your option
remove any additional permissions from that copy, or from any part of
it.  (Additional permissions may be written to require their own
removal in certain cases when you modify the work.)  You may place
additional permissions on material, added by you to a covered work,
for which you have or can give appropriate copyright permission.

  Notwithstanding any other provision of this License, for material you
add to a covered work, you may (if authorized by the copyright holders of
that material) supplement the terms of this License with terms:

    a) Disclaiming warranty or limiting liability differently from the
    terms of sections 15 and 16 of this License; or

    b) Requiring preservation of specified reasonable legal notices or
    author attributions in that material or in the Appropriate Legal
    Notices displayed by works containing it; or

    c) Prohibiting misrepresentation of the origin of that material, or
    requiring that modified versions of such material be marked in
    reasonable ways as different from the original version; or

    d) Limiting the use for publicity purposes of names of licensors or
    authors of the material; or

    e) Declining to grant rights under trademark law for use of some
    trade names, trademarks, or service marks; or

    f) Requiring indemnification of licensors and authors of that
    material by anyone who conveys the material (or modified versions of
    it) with contractual assumptions of liability to the recipient, for
    any liability that these contractual assumptions directly impose on
    those licensors and authors.

  All other non-permissive additional terms are considered "further
restrictions" within the meaning of section 10.  If the Program as you
received it, or any part of it, contains a notice stating that it is
governed by this License along with a term that is a further
restriction, you may remove that term.  If a license document contains
a further restriction but permits relicensing or conveying under this
License, you may add to a covered work material governed by the terms
of that license document, provided that the further restriction does
not survive such relicensing or conveying.

  If you add terms to a covered work in accord with this section, you
must place, in the relevant source files, a statement of the
additional terms that apply to those files, or a notice indicating
where to find the applicable terms.

  Additional terms, permissive or non-permissive, may be stated in the
form of a separately written license, or stated as exceptions;
the above requirements apply either way.

  8. Termination.

  You may not propagate or modify a covered work except as expressly
provided under this License.  Any attempt otherwise to propagate or
modify it is void, and will automatically terminate your rights under
this License (including any patent licenses granted under the third
paragraph of section 11).

  However, if you cease all violation of this License, then your
license from a particular copyright holder is reinstated (a)
provisionally, unless and until the copyright holder explicitly and
finally terminates your license, and (b) permanently, if the copyright
holder fails to notify you of the violation by some reasonable means
prior to 60 days after the cessation.

  Moreover, your license from a particular copyright holder is
reinstated permanently if the copyright holder notifies you of the
violation by some reasonable means, this is the first time you have
received notice of violation of this License (for any work) from that
copyright holder, and you cure the violation prior to 30 days after
your receipt of the notice.

  Termination of your rights under this section does not terminate the
licenses of parties who have received copies or rights from you under
this License.  If your rights have been terminated and not permanently
reinstated, you do not qualify to receive new licenses for the same
material under section 10.

  9. Acceptance Not Required for Having Copies.

  You are not required to accept this License in order to receive or
run a copy of the Program.  Ancillary propagation of a covered work
occurring solely as a consequence of using peer-to-peer transmission
to receive a copy likewise does not require acceptance.  However,
nothing other than this License grants you permission to propagate or
modify any covered work.  These actions infringe copyright if you do
not accept this License.  Therefore, by modifying or propagating a
covered work, you indicate your acceptance of this License to do so.

  10. Automatic Licensing of Downstream Recipients.

  Each time you convey a covered work, the recipient automatically
receives a license from the original licensors, to run, modify and
propagate that work, subject to this License.  You are not responsible
for enforcing compliance by third parties with this License.

  An "entity transaction" is a transaction transferring control of an
organization, or substantially all assets of one, or subdividing an
organization, or merging organizations.  If propagation of a covered
work results from an entity transaction, each party to that
transaction who receives a copy of the work also receives whatever
licenses to the work the party's predecessor in interest had or could
give under the previous paragraph, plus a right to possession of the
Corresponding Source of the work from the predecessor in interest, if
the predecessor has it or can get it with reasonable efforts.

  You may not impose any further restrictions on the exercise of the
rights granted or affirmed under this License.  For example, you may
not impose a license fee, royalty, or other charge for exercise of
rights granted under this License, and you may not initiate litigation
(including a cross-claim or counterclaim in a lawsuit) alleging that
any patent claim is infringed by making, using, selling, offering for
sale, or importing the Program or any portion of it.

  11. Patents.

  A "contributor" is a copyright holder who authorizes use under this
License of the Program or a work on which the Program is based.  The
work thus licensed is called the contributor's "contributor version".

  A contributor's "essential patent claims" are all patent claims
owned or controlled by the contributor, whether already acquired or
hereafter acquired, that would be infringed by some manner, permitted
by this License, of making, using, or selling its contributor version,
but do not include claims that would be infringed only as a
consequence of further modification of the contributor version.  For
purposes of this definition, "control" includes the right to grant
patent sublicenses in a manner consistent with the requirements of
this License.

  Each contributor grants you a non-exclusive, worldwide, royalty-free
patent license under the contributor's essential patent claims, to
make, use, sell, offer for sale, import and otherwise run, modify and
propagate the contents of its contributor version.

  In the following three paragraphs, a "patent license" is any express
agreement or commitment, however denominated, not to enforce a patent
(such as an express permission to practice a patent or covenant not to
sue for patent infringement).  To "grant" such a patent license to a
party means to make such an agreement or commitment not to enforce a
patent against the party.

  If you convey a covered work, knowingly relying on a patent license,
and the Corresponding Source of the work is not available for anyone
to copy, free of charge and under the terms of this License, through a
publicly available network server or other readily accessible means,
then you must either (1) cause the Corresponding Source to be so
available, or (2) arrange to deprive yourself of the benefit of the
patent license for this particular work, or (3) arrange, in a manner
consistent with the requirements of this License, to extend the patent
license to downstream recipients.  "Knowingly relying" means you have
actual knowledge that, but for the patent license, your conveying the
covered work in a country, or your recipient's use of the covered work
in a country, would infringe one or more identifiable patents in that
country that you have reason to believe are valid.

  If, pursuant to or in connection with a single transaction or
arrangement, you convey, or propagate by procuring conveyance of, a
covered work, and grant a patent license to some of the parties
receiving the covered work authorizing them to use, propagate, modify
or convey a specific copy of the covered work, then the patent license
you grant is automatically extended to all recipients of the covered
work and works based on it.

  A patent license is "discriminatory" if it does not include within
the scope of its coverage, prohibits the exercise of, or is
conditioned on the non-exercise of one or more of the rights that are
specifically granted under this License.  You may not convey a covered
work if you are a party to an arrangement with a third party that is
in the business of distributing software, under which you make payment
to the third party based on the extent of your activity of conveying
the work, and under which the third party grants, to any of the
parties who would receive the covered work from you, a discriminatory
patent license (a) in connection with copies of the covered work
conveyed by you (or copies made from those copies), or (b) primarily
for and in connection with specific products or compilations that
contain the covered work, unless you entered into that arrangement,
or that patent license was granted, prior to 28 March 2007.

  Nothing in this License shall be construed as excluding or limiting
any implied license or other defenses to infringement that may
otherwise be available to you under applicable patent law.

  12. No Surrender of Others' Freedom.

  If conditions are imposed on you (whether by court order, agreement or
otherwise) that contradict the conditions of this License, they do not
excuse you from the conditions of this License.  If you cannot convey a
covered work so as to satisfy simultaneously your obligations under this
License and any other pertinent obligations, then as a consequence you may
not convey it at all.  For example, if you agree to terms that obligate you
to collect a royalty for further conveying from those to whom you convey
the Program, the only way you could satisfy both those terms and this
License would be to refrain entirely from conveying the Program.

  13. Remote Network Interaction; Use with the GNU General Public License.

  Notwithstanding any other provision of this License, if you modify the
Program, your modified version must prominently offer all users
interacting with it remotely through a computer network (if your version
supports such interaction) an opportunity to receive the Corresponding
Source of your version by providing access to the Corresponding Source
from a network server at no charge, through some standard or customary
means of facilitating copying of software.  This Corresponding Source
shall include the Corresponding Source for any work covered by version 3
of the GNU General Public License that is incorporated pursuant to the
following paragraph.

  Notwithstanding any other provision of this License, you have
permission to link or combine any covered work with a work licensed
under version 3 of the GNU General Public License into a single
combined work, and to convey the resulting work.  The terms of this
License will continue to apply to the part which is the covered work,
but the work with which it is combined will remain governed by version
3 of the GNU General Public License.

  14. Revised Versions of this License.

  The Free Software Foundation may publish revised and/or new versions of
the GNU Affero General Public License from time to time.  Such new versions
will be similar in spirit to the present version, but may differ in detail to
address new problems or concerns.

  Each version is given a distinguishing version number.  If the
Program specifies that a certain numbered version of the GNU Affero General
Public License "or any later version" applies to it, you have the
option of following the terms and conditions either of that numbered
version or of any later version published by the Free Software
Foundation.  If the Program does not specify a version number of the
GNU Affero General Public License, you may choose any version ever published
by the Free Software Foundation.

  If the Program specifies that a proxy can decide which future
versions of the GNU Affero General Public License can be used, that proxy's
public statement of acceptance of a version permanently authorizes you
to choose that version for the Program.

  Later license versions may give you additional or different
permissions.  However, no additional obligations are imposed on any
author or copyright holder as a result of your choosing to follow a
later version.

  15. Disclaimer of Warranty.

  THERE IS NO WARRANTY FOR THE PROGRAM, TO THE EXTENT PERMITTED BY
APPLICABLE LAW.  EXCEPT WHEN OTHERWISE STATED IN WRITING THE COPYRIGHT
HOLDERS AND/OR OTHER PARTIES PROVIDE THE PROGRAM "AS IS" WITHOUT WARRANTY
OF ANY KIND, EITHER EXPRESSED OR IMPLIED, INCLUDING, BUT NOT LIMITED TO,
THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
PURPOSE.  THE ENTIRE RISK AS TO THE QUALITY AND PERFORMANCE OF THE PROGRAM
IS WITH YOU.  SHOULD THE PROGRAM PROVE DEFECTIVE, YOU ASSUME THE COST OF
ALL NECESSARY SERVICING, REPAIR OR CORRECTION.

  16. Limitation of Liability.

  IN NO EVENT UNLESS REQUIRED BY APPLICABLE LAW OR AGREED TO IN WRITING
WILL ANY COPYRIGHT HOLDER, OR ANY OTHER PARTY WHO MODIFIES AND/OR CONVEYS
THE PROGRAM AS PERMITTED ABOVE, BE LIABLE TO YOU FOR DAMAGES, INCLUDING ANY
GENERAL, SPECIAL, INCIDENTAL OR CONSEQUENTIAL DAMAGES ARISING OUT OF THE
USE OR INABILITY TO USE THE PROGRAM (INCLUDING BUT NOT LIMITED TO LOSS OF
DATA OR DATA BEING RENDERED INACCURATE OR LOSSES SUSTAINED BY YOU OR THIRD
PARTIES OR A FAILURE OF THE PROGRAM TO OPERATE WITH ANY OTHER PROGRAMS),
EVEN IF SUCH HOLDER OR OTHER PARTY HAS BEEN ADVISED OF THE POSSIBILITY OF
SUCH DAMAGES.

  17. Interpretation of Sections 15 and 16.

  If the disclaimer of warranty and limitation of liability provided
above cannot be given local legal effect according to their terms,
reviewing courts shall apply local law that most closely approximates
an absolute waiver of all civil liability in connection with the
Program, unless a warranty or assumption of liability accompanies a
copy of the Program in return for a fee.

                     END OF TERMS AND CONDITIONS

            How to Apply These Terms to Your New Programs

  If you develop a new program, and you want it to be of the greatest
possible use to the public, the best way to achieve this is to make it
free software which everyone can redistribute and change under these terms.

  To do so, attach the following notices to the program.  It is safest
to attach them to the start of each source file to most effectively
state the exclusion of warranty; and each file should have at least
the "copyright" line and a pointer to where the full notice is found.

    <one line to give the program's name and a brief idea of what it does.>
    Copyright (C) <year>  <name of author>

    This program is free software: you can redistribute it and/or modify
    it under the terms of the GNU Affero General Public License as published
    by the Free Software Foundation, either version 3 of the License, or
    (at your option) any later version.

    This program is distributed in the hope that it will be useful,
    but WITHOUT ANY WARRANTY; without even the implied warranty of
    MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
    GNU Affero General Public License for more details.

    You should have received a copy of the GNU Affero General Public License
    along with this program.  If not, see <https://www.gnu.org/licenses/>.

Also add information on how to contact you by electronic and paper mail.

  If your software can interact with users remotely through a computer
network, you should also make sure that it provides a way for users to
get its source.  For example, if your program is a web application, its
interface could display a "Source" link that leads users to an archive
of the code.  There are many ways you could offer source, and different
solutions will be better for different programs; see section 13 for the
specific requirements.

  You should also get your employer (if you work as a programmer) or school,
if any, to sign a "copyright disclaimer" for the program, if necessary.
For more information on this, and how to apply and follow the GNU AGPL, see
<https://www.gnu.org/licenses/>.


================================================
FILE: README.md
================================================
![Vaultwarden Logo](./resources/vaultwarden-logo-auto.svg)

An alternative server implementation of the Bitwarden Client API, written in Rust and compatible with [official Bitwarden clients](https://bitwarden.com/download/) [[disclaimer](#disclaimer)], perfect for self-hosted deployment where running the official resource-heavy service might not be ideal.

---

[![GitHub Release](https://img.shields.io/github/release/dani-garcia/vaultwarden.svg?style=for-the-badge&logo=vaultwarden&color=005AA4)](https://github.com/dani-garcia/vaultwarden/releases/latest)
[![ghcr.io Pulls](https://img.shields.io/badge/dynamic/json?style=for-the-badge&logo=github&logoColor=fff&color=005AA4&url=https%3A%2F%2Fipitio.github.io%2Fbackage%2Fdani-garcia%2Fvaultwarden%2Fvaultwarden.json&query=%24.downloads&label=ghcr.io%20pulls&cacheSeconds=14400)](https://github.com/dani-garcia/vaultwarden/pkgs/container/vaultwarden)
[![Docker Pulls](https://img.shields.io/docker/pulls/vaultwarden/server.svg?style=for-the-badge&logo=docker&logoColor=fff&color=005AA4&label=docker.io%20pulls)](https://hub.docker.com/r/vaultwarden/server)
[![Quay.io](https://img.shields.io/badge/quay.io-download-005AA4?style=for-the-badge&logo=redhat&cacheSeconds=14400)](https://quay.io/repository/vaultwarden/server) <br>
[![Contributors](https://img.shields.io/github/contributors-anon/dani-garcia/vaultwarden.svg?style=flat-square&logo=vaultwarden&color=005AA4)](https://github.com/dani-garcia/vaultwarden/graphs/contributors)
[![Forks](https://img.shields.io/github/forks/dani-garcia/vaultwarden.svg?style=flat-square&logo=github&logoColor=fff&color=005AA4)](https://github.com/dani-garcia/vaultwarden/network/members)
[![Stars](https://img.shields.io/github/stars/dani-garcia/vaultwarden.svg?style=flat-square&logo=github&logoColor=fff&color=005AA4)](https://github.com/dani-garcia/vaultwarden/stargazers)
[![Issues Open](https://img.shields.io/github/issues/dani-garcia/vaultwarden.svg?style=flat-square&logo=github&logoColor=fff&color=005AA4&cacheSeconds=300)](https://github.com/dani-garcia/vaultwarden/issues)
[![Issues Closed](https://img.shields.io/github/issues-closed/dani-garcia/vaultwarden.svg?style=flat-square&logo=github&logoColor=fff&color=005AA4&cacheSeconds=300)](https://github.com/dani-garcia/vaultwarden/issues?q=is%3Aissue+is%3Aclosed)
[![AGPL-3.0 Licensed](https://img.shields.io/github/license/dani-garcia/vaultwarden.svg?style=flat-square&logo=vaultwarden&color=944000&cacheSeconds=14400)](https://github.com/dani-garcia/vaultwarden/blob/main/LICENSE.txt) <br>
[![Dependency Status](https://img.shields.io/badge/dynamic/xml?url=https%3A%2F%2Fdeps.rs%2Frepo%2Fgithub%2Fdani-garcia%2Fvaultwarden%2Fstatus.svg&query=%2F*%5Blocal-name()%3D'svg'%5D%2F*%5Blocal-name()%3D'g'%5D%5B2%5D%2F*%5Blocal-name()%3D'text'%5D%5B4%5D&style=flat-square&logo=rust&label=dependencies&color=005AA4)](https://deps.rs/repo/github/dani-garcia/vaultwarden)
[![GHA Release](https://img.shields.io/github/actions/workflow/status/dani-garcia/vaultwarden/release.yml?style=flat-square&logo=github&logoColor=fff&label=Release%20Workflow)](https://github.com/dani-garcia/vaultwarden/actions/workflows/release.yml)
[![GHA Build](https://img.shields.io/github/actions/workflow/status/dani-garcia/vaultwarden/build.yml?style=flat-square&logo=github&logoColor=fff&label=Build%20Workflow)](https://github.com/dani-garcia/vaultwarden/actions/workflows/build.yml) <br>
[![Matrix Chat](https://img.shields.io/matrix/vaultwarden:matrix.org.svg?style=flat-square&logo=matrix&logoColor=fff&color=953B00&cacheSeconds=14400)](https://matrix.to/#/#vaultwarden:matrix.org)
[![GitHub Discussions](https://img.shields.io/github/discussions/dani-garcia/vaultwarden?style=flat-square&logo=github&logoColor=fff&color=953B00&cacheSeconds=300)](https://github.com/dani-garcia/vaultwarden/discussions)
[![Discourse Discussions](https://img.shields.io/discourse/topics?server=https%3A%2F%2Fvaultwarden.discourse.group%2F&style=flat-square&logo=discourse&color=953B00)](https://vaultwarden.discourse.group/)

> [!IMPORTANT]
> **When using this server, please report any bugs or suggestions directly to us (see [Get in touch](#get-in-touch)), regardless of whatever clients you are using (mobile, desktop, browser...). DO NOT use the official Bitwarden support channels.**

<br>

## Features

A nearly complete implementation of the Bitwarden Client API is provided, including:

 * [Personal Vault](https://bitwarden.com/help/managing-items/)
 * [Send](https://bitwarden.com/help/about-send/)
 * [Attachments](https://bitwarden.com/help/attachments/)
 * [Website icons](https://bitwarden.com/help/website-icons/)
 * [Personal API Key](https://bitwarden.com/help/personal-api-key/)
 * [Organizations](https://bitwarden.com/help/getting-started-organizations/)
   - [Collections](https://bitwarden.com/help/about-collections/),
     [Password Sharing](https://bitwarden.com/help/sharing/),
     [Member Roles](https://bitwarden.com/help/user-types-access-control/),
     [Groups](https://bitwarden.com/help/about-groups/),
     [Event Logs](https://bitwarden.com/help/event-logs/),
     [Admin Password Reset](https://bitwarden.com/help/admin-reset/),
     [Directory Connector](https://bitwarden.com/help/directory-sync/),
     [Policies](https://bitwarden.com/help/policies/)
 * [Multi/Two Factor Authentication](https://bitwarden.com/help/bitwarden-field-guide-two-step-login/)
   - [Authenticator](https://bitwarden.com/help/setup-two-step-login-authenticator/),
     [Email](https://bitwarden.com/help/setup-two-step-login-email/),
     [FIDO2 WebAuthn](https://bitwarden.com/help/setup-two-step-login-fido/),
     [YubiKey](https://bitwarden.com/help/setup-two-step-login-yubikey/),
     [Duo](https://bitwarden.com/help/setup-two-step-login-duo/)
 * [Emergency Access](https://bitwarden.com/help/emergency-access/)
 * [Vaultwarden Admin Backend](https://github.com/dani-garcia/vaultwarden/wiki/Enabling-admin-page)
 * [Modified Web Vault client](https://github.com/dani-garcia/bw_web_builds) (Bundled within our containers)

<br>

## Usage

> [!IMPORTANT]
> The web-vault requires the use a secure context for the [Web Crypto API](https://developer.mozilla.org/en-US/docs/Web/API/Web_Crypto_API).
> That means it will only work via `http://localhost:8000` (using the port from the example below) or if you [enable HTTPS](https://github.com/dani-garcia/vaultwarden/wiki/Enabling-HTTPS).

The recommended way to install and use Vaultwarden is via our container images which are published to [ghcr.io](https://github.com/dani-garcia/vaultwarden/pkgs/container/vaultwarden), [docker.io](https://hub.docker.com/r/vaultwarden/server) and [quay.io](https://quay.io/repository/vaultwarden/server).
See [which container image to use](https://github.com/dani-garcia/vaultwarden/wiki/Which-container-image-to-use) for an explanation of the provided tags.

There are also [community driven packages](https://github.com/dani-garcia/vaultwarden/wiki/Third-party-packages) which can be used, but those might be lagging behind the latest version or might deviate in the way Vaultwarden is configured, as described in our [Wiki](https://github.com/dani-garcia/vaultwarden/wiki).

Alternatively, you can also [build Vaultwarden](https://github.com/dani-garcia/vaultwarden/wiki/Building-binary) yourself.

While Vaultwarden is based upon the [Rocket web framework](https://rocket.rs) which has built-in support for TLS our recommendation would be that you setup a reverse proxy (see [proxy examples](https://github.com/dani-garcia/vaultwarden/wiki/Proxy-examples)).

> [!TIP]
>**For more detailed examples on how to install, use and configure Vaultwarden you can check our [Wiki](https://github.com/dani-garcia/vaultwarden/wiki).**

### Docker/Podman CLI

Pull the container image and mount a volume from the host for persistent storage.<br>
You can replace `docker` with `podman` if you prefer to use podman.

```shell
docker pull vaultwarden/server:latest
docker run --detach --name vaultwarden \
  --env DOMAIN="https://vw.domain.tld" \
  --volume /vw-data/:/data/ \
  --restart unless-stopped \
  --publish 127.0.0.1:8000:80 \
  vaultwarden/server:latest
```

This will preserve any persistent data under `/vw-data/`, you can adapt the path to whatever suits you.

### Docker Compose

To use Docker compose you need to create a `compose.yaml` which will hold the configuration to run the Vaultwarden container.

```yaml
services:
  vaultwarden:
    image: vaultwarden/server:latest
    container_name: vaultwarden
    restart: unless-stopped
    environment:
      DOMAIN: "https://vw.domain.tld"
    volumes:
      - ./vw-data/:/data/
    ports:
      - 127.0.0.1:8000:80
```

<br>

## Get in touch

Have a question, suggestion or need help? Join our community on [Matrix](https://matrix.to/#/#vaultwarden:matrix.org), [GitHub Discussions](https://github.com/dani-garcia/vaultwarden/discussions) or [Discourse Forums](https://vaultwarden.discourse.group/).

Encountered a bug or crash? Please search our issue tracker and discussions to see if it's already been reported. If not, please [start a new discussion](https://github.com/dani-garcia/vaultwarden/discussions) or [create a new issue](https://github.com/dani-garcia/vaultwarden/issues/). Ensure you're using the latest version of Vaultwarden and there aren't any similar issues open or closed!

<br>

## Contributors

Thanks for your contribution to the project!

[![Contributors Count](https://img.shields.io/github/contributors-anon/dani-garcia/vaultwarden?style=for-the-badge&logo=vaultwarden&color=005AA4)](https://github.com/dani-garcia/vaultwarden/graphs/contributors)<br>
[![Contributors Avatars](https://contributors-img.web.app/image?repo=dani-garcia/vaultwarden)](https://github.com/dani-garcia/vaultwarden/graphs/contributors)

<br>

## Disclaimer

**This project is not associated with [Bitwarden](https://bitwarden.com/) or Bitwarden, Inc.**

However, one of the active maintainers for Vaultwarden is employed by Bitwarden and is allowed to contribute to the project on their own time. These contributions are independent of Bitwarden and are reviewed by other maintainers.

The maintainers work together to set the direction for the project, focusing on serving the self-hosting community, including individuals, families, and small organizations, while ensuring the project's sustainability.

**Please note:** We cannot be held liable for any data loss that may occur while using Vaultwarden. This includes passwords, attachments, and other information handled by the application. We highly recommend performing regular backups of your files and database. However, should you experience data loss, we encourage you to contact us immediately.

<br>

## Bitwarden_RS

This project was known as Bitwarden_RS and has been renamed to separate itself from the official Bitwarden server in the hopes of avoiding confusion and trademark/branding issues.<br>
Please see [#1642 - v1.21.0 release and project rename to Vaultwarden](https://github.com/dani-garcia/vaultwarden/discussions/1642) for more explanation.


================================================
FILE: SECURITY.md
================================================
Vaultwarden tries to prevent security issues but there could always slip something through.
If you believe you've found a security issue in our application, we encourage you to
notify us. We welcome working with you to resolve the issue promptly. Thanks in advance!

# Disclosure Policy

- Let us know as soon as possible upon discovery of a potential security issue, and we'll make every
  effort to quickly resolve the issue.
- Provide us a reasonable amount of time to resolve the issue before any disclosure to the public or a
  third-party. We may publicly disclose the issue before resolving it, if appropriate.
- Make a good faith effort to avoid privacy violations, destruction of data, and interruption or
  degradation of our service. Only interact with accounts you own or with explicit permission of the
  account holder.

# In-scope

- Security issues in any current release of Vaultwarden. Source code is available at https://github.com/dani-garcia/vaultwarden. This includes the current `latest` release and `main / testing` release.

# Exclusions

The following bug classes are out-of scope:

- Bugs that are already reported on Vaultwarden's issue tracker (https://github.com/dani-garcia/vaultwarden/issues)
- Bugs that are not part of Vaultwarden, like on the web-vault or mobile and desktop clients. These issues need to be reported in the respective project issue tracker at https://github.com/bitwarden to which we are not associated
- Issues in an upstream software dependency (ex: Rust, or External Libraries) which are already reported to the upstream maintainer
- Attacks requiring physical access to a user's device
- Issues related to software or protocols not under Vaultwarden's control
- Vulnerabilities in outdated versions of Vaultwarden
- Missing security best practices that do not directly lead to a vulnerability (You may still report them as a normal issue)
- Issues that do not have any impact on the general public

While researching, we'd like to ask you to refrain from:

- Denial of service
- Spamming
- Social engineering (including phishing) of Vaultwarden developers, contributors or users

Thank you for helping keep Vaultwarden and our users safe!

# How to contact us

- You can contact us on Matrix https://matrix.to/#/#vaultwarden:matrix.org (users: `@danig:matrix.org` and/or `@blackdex:matrix.org`)
- You can send an ![security-contact](/.github/security-contact.gif) to report a security issue.<br>
  If you want to send an encrypted email you can use the following GPG key: 13BB3A34C9E380258CE43D595CB150B31F6426BC<br>
  It can be found on several public GPG key servers.<br>
    * https://keys.openpgp.org/search?q=security%40vaultwarden.org
    * https://keys.mailvelope.com/pks/lookup?op=get&search=security%40vaultwarden.org
    * https://pgpkeys.eu/pks/lookup?search=security%40vaultwarden.org&fingerprint=on&op=index
    * https://keyserver.ubuntu.com/pks/lookup?search=security%40vaultwarden.org&fingerprint=on&op=index


================================================
FILE: build.rs
================================================
use std::env;
use std::process::Command;

fn main() {
    // This allow using #[cfg(sqlite)] instead of #[cfg(feature = "sqlite")], which helps when trying to add them through macros
    #[cfg(feature = "sqlite")]
    println!("cargo:rustc-cfg=sqlite");
    #[cfg(feature = "mysql")]
    println!("cargo:rustc-cfg=mysql");
    #[cfg(feature = "postgresql")]
    println!("cargo:rustc-cfg=postgresql");
    #[cfg(feature = "s3")]
    println!("cargo:rustc-cfg=s3");

    #[cfg(not(any(feature = "sqlite", feature = "mysql", feature = "postgresql")))]
    compile_error!(
        "You need to enable one DB backend. To build with previous defaults do: cargo build --features sqlite"
    );

    // Use check-cfg to let cargo know which cfg's we define,
    // and avoid warnings when they are used in the code.
    println!("cargo::rustc-check-cfg=cfg(sqlite)");
    println!("cargo::rustc-check-cfg=cfg(mysql)");
    println!("cargo::rustc-check-cfg=cfg(postgresql)");
    println!("cargo::rustc-check-cfg=cfg(s3)");

    // Rerun when these paths are changed.
    // Someone could have checked-out a tag or specific commit, but no other files changed.
    println!("cargo:rerun-if-changed=.git");
    println!("cargo:rerun-if-changed=.git/HEAD");
    println!("cargo:rerun-if-changed=.git/index");
    println!("cargo:rerun-if-changed=.git/refs/tags");

    // Support $BWRS_VERSION for legacy compatibility, but default to $VW_VERSION.
    // If neither exist, read from git.
    let maybe_vaultwarden_version =
        env::var("VW_VERSION").or_else(|_| env::var("BWRS_VERSION")).or_else(|_| version_from_git_info());

    if let Ok(version) = maybe_vaultwarden_version {
        println!("cargo:rustc-env=VW_VERSION={version}");
        println!("cargo:rustc-env=CARGO_PKG_VERSION={version}");
    }
}

fn run(args: &[&str]) -> Result<String, std::io::Error> {
    let out = Command::new(args[0]).args(&args[1..]).output()?;
    if !out.status.success() {
        use std::io::Error;
        return Err(Error::other("Command not successful"));
    }
    Ok(String::from_utf8(out.stdout).unwrap().trim().to_string())
}

/// This method reads info from Git, namely tags, branch, and revision
/// To access these values, use:
///    - `env!("GIT_EXACT_TAG")`
///    - `env!("GIT_LAST_TAG")`
///    - `env!("GIT_BRANCH")`
///    - `env!("GIT_REV")`
///    - `env!("VW_VERSION")`
fn version_from_git_info() -> Result<String, std::io::Error> {
    // The exact tag for the current commit, can be empty when
    // the current commit doesn't have an associated tag
    let exact_tag = run(&["git", "describe", "--abbrev=0", "--tags", "--exact-match"]).ok();
    if let Some(ref exact) = exact_tag {
        println!("cargo:rustc-env=GIT_EXACT_TAG={exact}");
    }

    // The last available tag, equal to exact_tag when
    // the current commit is tagged
    let last_tag = run(&["git", "describe", "--abbrev=0", "--tags"])?;
    println!("cargo:rustc-env=GIT_LAST_TAG={last_tag}");

    // The current branch name
    let branch = run(&["git", "rev-parse", "--abbrev-ref", "HEAD"])?;
    println!("cargo:rustc-env=GIT_BRANCH={branch}");

    // The current git commit hash
    let rev = run(&["git", "rev-parse", "HEAD"])?;
    let rev_short = rev.get(..8).unwrap_or_default();
    println!("cargo:rustc-env=GIT_REV={rev_short}");

    // Combined version
    if let Some(exact) = exact_tag {
        Ok(exact)
    } else if &branch != "main" && &branch != "master" && &branch != "HEAD" {
        Ok(format!("{last_tag}-{rev_short} ({branch})"))
    } else {
        Ok(format!("{last_tag}-{rev_short}"))
    }
}


================================================
FILE: diesel.toml
================================================
# For documentation on how to configure this file,
# see diesel.rs/guides/configuring-diesel-cli

[print_schema]
file = "src/db/schema.rs"

================================================
FILE: docker/DockerSettings.yaml
================================================
---
vault_version: "v2026.2.0"
vault_image_digest: "sha256:37c8661fa59dcdfbd3baa8366b6e950ef292b15adfeff1f57812b075c1fd3447"
# Cross Compile Docker Helper Scripts v1.9.0
# We use the linux/amd64 platform shell scripts since there is no difference between the different platform scripts
# https://github.com/tonistiigi/xx | https://hub.docker.com/r/tonistiigi/xx/tags
xx_image_digest: "sha256:c64defb9ed5a91eacb37f96ccc3d4cd72521c4bd18d5442905b95e2226b0e707"
rust_version: 1.94.0 # Rust version to be used
debian_version: trixie # Debian release name to be used
alpine_version: "3.23" # Alpine version to be used
# For which platforms/architectures will we try to build images
platforms: ["linux/amd64", "linux/arm64", "linux/arm/v7", "linux/arm/v6"]
# Determine the build images per OS/Arch
build_stage_image:
  debian:
    image: "docker.io/library/rust:{{rust_version}}-slim-{{debian_version}}"
    platform: "$BUILDPLATFORM"
  alpine:
    image: "build_${TARGETARCH}${TARGETVARIANT}"
    arch_image:
      amd64: "ghcr.io/blackdex/rust-musl:x86_64-musl-stable-{{rust_version}}"
      arm64: "ghcr.io/blackdex/rust-musl:aarch64-musl-stable-{{rust_version}}"
      armv7: "ghcr.io/blackdex/rust-musl:armv7-musleabihf-stable-{{rust_version}}"
      armv6: "ghcr.io/blackdex/rust-musl:arm-musleabi-stable-{{rust_version}}"
# The final image which will be used to distribute the container images
runtime_stage_image:
  debian: "docker.io/library/debian:{{debian_version}}-slim"
  alpine: "docker.io/library/alpine:{{alpine_version}}"


================================================
FILE: docker/Dockerfile.alpine
================================================
# syntax=docker/dockerfile:1
# check=skip=FromPlatformFlagConstDisallowed,RedundantTargetPlatform

# This file was generated using a Jinja2 template.
# Please make your changes in `DockerSettings.yaml` or `Dockerfile.j2` and then `make`
# This will generate two Dockerfile's `Dockerfile.debian` and `Dockerfile.alpine`

# Using multistage build:
# 	https://docs.docker.com/develop/develop-images/multistage-build/
# 	https://whitfin.io/speeding-up-rust-docker-builds/

####################### VAULT BUILD IMAGE #######################
# The web-vault digest specifies a particular web-vault build on Docker Hub.
# Using the digest instead of the tag name provides better security,
# as the digest of an image is immutable, whereas a tag name can later
# be changed to point to a malicious image.
#
# To verify the current digest for a given tag name:
# - From https://hub.docker.com/r/vaultwarden/web-vault/tags,
#   click the tag name to view the digest of the image it currently points to.
# - From the command line:
#     $ docker pull docker.io/vaultwarden/web-vault:v2026.2.0
#     $ docker image inspect --format "{{.RepoDigests}}" docker.io/vaultwarden/web-vault:v2026.2.0
#     [docker.io/vaultwarden/web-vault@sha256:37c8661fa59dcdfbd3baa8366b6e950ef292b15adfeff1f57812b075c1fd3447]
#
# - Conversely, to get the tag name from the digest:
#     $ docker image inspect --format "{{.RepoTags}}" docker.io/vaultwarden/web-vault@sha256:37c8661fa59dcdfbd3baa8366b6e950ef292b15adfeff1f57812b075c1fd3447
#     [docker.io/vaultwarden/web-vault:v2026.2.0]
#
FROM --platform=linux/amd64 docker.io/vaultwarden/web-vault@sha256:37c8661fa59dcdfbd3baa8366b6e950ef292b15adfeff1f57812b075c1fd3447 AS vault

########################## ALPINE BUILD IMAGES ##########################
## NOTE: The Alpine Base Images do not support other platforms then linux/amd64 and linux/arm64
## And for Alpine we define all build images here, they will only be loaded when actually used
FROM --platform=$BUILDPLATFORM ghcr.io/blackdex/rust-musl:x86_64-musl-stable-1.94.0 AS build_amd64
FROM --platform=$BUILDPLATFORM ghcr.io/blackdex/rust-musl:aarch64-musl-stable-1.94.0 AS build_arm64
FROM --platform=$BUILDPLATFORM ghcr.io/blackdex/rust-musl:armv7-musleabihf-stable-1.94.0 AS build_armv7
FROM --platform=$BUILDPLATFORM ghcr.io/blackdex/rust-musl:arm-musleabi-stable-1.94.0 AS build_armv6

########################## BUILD IMAGE ##########################
# hadolint ignore=DL3006
FROM --platform=$BUILDPLATFORM build_${TARGETARCH}${TARGETVARIANT} AS build
ARG TARGETARCH
ARG TARGETVARIANT
ARG TARGETPLATFORM

SHELL ["/bin/bash", "-o", "pipefail", "-c"]

# Build time options to avoid dpkg warnings and help with reproducible builds.
ENV DEBIAN_FRONTEND=noninteractive \
    LANG=C.UTF-8 \
    TZ=UTC \
    TERM=xterm-256color \
    CARGO_HOME="/root/.cargo" \
    USER="root" \
    # Use PostgreSQL v17 during Alpine/MUSL builds instead of the default v16
    # Debian Trixie uses libpq v17
    PQ_LIB_DIR="/usr/local/musl/pq17/lib"


# Create CARGO_HOME folder and don't download rust docs
RUN mkdir -pv "${CARGO_HOME}" && \
    rustup set profile minimal

# Creates a dummy project used to grab dependencies
RUN USER=root cargo new --bin /app
WORKDIR /app

# Environment variables for Cargo on Alpine based builds
RUN echo "export CARGO_TARGET=${RUST_MUSL_CROSS_TARGET}" >> /env-cargo && \
    # Output the current contents of the file
    cat /env-cargo

RUN source /env-cargo && \
    rustup target add "${CARGO_TARGET}"

# Copies over *only* your manifests and build files
COPY ./Cargo.* ./rust-toolchain.toml ./build.rs ./
COPY ./macros ./macros

ARG CARGO_PROFILE=release

# Configure the DB ARG as late as possible to not invalidate the cached layers above
# Enable MiMalloc to improve performance on Alpine builds
ARG DB=sqlite,mysql,postgresql,enable_mimalloc

# Builds your dependencies and removes the
# dummy project, except the target folder
# This folder contains the compiled dependencies
RUN source /env-cargo && \
    cargo build --features ${DB} --profile "${CARGO_PROFILE}" --target="${CARGO_TARGET}" && \
    find . -not -path "./target*" -delete

# Copies the complete project
# To avoid copying unneeded files, use .dockerignore
COPY . .

ARG VW_VERSION

# Builds again, this time it will be the actual source files being build
RUN source /env-cargo && \
    # Make sure that we actually build the project by updating the src/main.rs timestamp
    # Also do this for build.rs to ensure the version is rechecked
    touch build.rs src/main.rs && \
    # Create a symlink to the binary target folder to easy copy the binary in the final stage
    cargo build --features ${DB} --profile "${CARGO_PROFILE}" --target="${CARGO_TARGET}" && \
    if [[ "${CARGO_PROFILE}" == "dev" ]] ; then \
        ln -vfsr "/app/target/${CARGO_TARGET}/debug" /app/target/final ; \
    else \
        ln -vfsr "/app/target/${CARGO_TARGET}/${CARGO_PROFILE}" /app/target/final ; \
    fi


######################## RUNTIME IMAGE  ########################
# Create a new stage with a minimal image
# because we already have a binary built
#
# To build these images you need to have qemu binfmt support.
# See the following pages to help install these tools locally
# Ubuntu/Debian: https://wiki.debian.org/QemuUserEmulation
# Arch Linux: https://wiki.archlinux.org/title/QEMU#Chrooting_into_arm/arm64_environment_from_x86_64
#
# Or use a Docker image which modifies your host system to support this.
# The GitHub Actions Workflow uses the same image as used below.
# See: https://github.com/tonistiigi/binfmt
# Usage: docker run --privileged --rm tonistiigi/binfmt --install arm64,arm
# To uninstall: docker run --privileged --rm tonistiigi/binfmt --uninstall 'qemu-*'
#
# We need to add `--platform` here, because of a podman bug: https://github.com/containers/buildah/issues/4742
FROM --platform=$TARGETPLATFORM docker.io/library/alpine:3.23

ENV ROCKET_PROFILE="release" \
    ROCKET_ADDRESS=0.0.0.0 \
    ROCKET_PORT=80 \
    SSL_CERT_DIR=/etc/ssl/certs

# Create data folder and Install needed libraries
RUN mkdir /data && \
    apk --no-cache add \
        ca-certificates \
        curl \
        openssl \
        tzdata

VOLUME /data
EXPOSE 80

# Copies the files from the context (Rocket.toml file and web-vault)
# and the binary from the "build" stage to the current stage
WORKDIR /

COPY docker/healthcheck.sh docker/start.sh /

COPY --from=vault /web-vault ./web-vault
COPY --from=build /app/target/final/vaultwarden .

HEALTHCHECK --interval=60s --timeout=10s CMD ["/healthcheck.sh"]

CMD ["/start.sh"]


================================================
FILE: docker/Dockerfile.debian
================================================
# syntax=docker/dockerfile:1
# check=skip=FromPlatformFlagConstDisallowed,RedundantTargetPlatform

# This file was generated using a Jinja2 template.
# Please make your changes in `DockerSettings.yaml` or `Dockerfile.j2` and then `make`
# This will generate two Dockerfile's `Dockerfile.debian` and `Dockerfile.alpine`

# Using multistage build:
# 	https://docs.docker.com/develop/develop-images/multistage-build/
# 	https://whitfin.io/speeding-up-rust-docker-builds/

####################### VAULT BUILD IMAGE #######################
# The web-vault digest specifies a particular web-vault build on Docker Hub.
# Using the digest instead of the tag name provides better security,
# as the digest of an image is immutable, whereas a tag name can later
# be changed to point to a malicious image.
#
# To verify the current digest for a given tag name:
# - From https://hub.docker.com/r/vaultwarden/web-vault/tags,
#   click the tag name to view the digest of the image it currently points to.
# - From the command line:
#     $ docker pull docker.io/vaultwarden/web-vault:v2026.2.0
#     $ docker image inspect --format "{{.RepoDigests}}" docker.io/vaultwarden/web-vault:v2026.2.0
#     [docker.io/vaultwarden/web-vault@sha256:37c8661fa59dcdfbd3baa8366b6e950ef292b15adfeff1f57812b075c1fd3447]
#
# - Conversely, to get the tag name from the digest:
#     $ docker image inspect --format "{{.RepoTags}}" docker.io/vaultwarden/web-vault@sha256:37c8661fa59dcdfbd3baa8366b6e950ef292b15adfeff1f57812b075c1fd3447
#     [docker.io/vaultwarden/web-vault:v2026.2.0]
#
FROM --platform=linux/amd64 docker.io/vaultwarden/web-vault@sha256:37c8661fa59dcdfbd3baa8366b6e950ef292b15adfeff1f57812b075c1fd3447 AS vault

########################## Cross Compile Docker Helper Scripts ##########################
## We use the linux/amd64 no matter which Build Platform, since these are all bash scripts
## And these bash scripts do not have any significant difference if at all
FROM --platform=linux/amd64 docker.io/tonistiigi/xx@sha256:c64defb9ed5a91eacb37f96ccc3d4cd72521c4bd18d5442905b95e2226b0e707 AS xx

########################## BUILD IMAGE ##########################
# hadolint ignore=DL3006
FROM --platform=$BUILDPLATFORM docker.io/library/rust:1.94.0-slim-trixie AS build
COPY --from=xx / /
ARG TARGETARCH
ARG TARGETVARIANT
ARG TARGETPLATFORM

SHELL ["/bin/bash", "-o", "pipefail", "-c"]

# Build time options to avoid dpkg warnings and help with reproducible builds.
ENV DEBIAN_FRONTEND=noninteractive \
    LANG=C.UTF-8 \
    TZ=UTC \
    TERM=xterm-256color \
    CARGO_HOME="/root/.cargo" \
    USER="root"
# Install clang to get `xx-cargo` working
# Install pkg-config to allow amd64 builds to find all libraries
# Install git so build.rs can determine the correct version
# Install the libc cross packages based upon the debian-arch
RUN apt-get update && \
    apt-get install -y \
        --no-install-recommends \
        clang \
        pkg-config \
        git \
        "libc6-$(xx-info debian-arch)-cross" \
        "libc6-dev-$(xx-info debian-arch)-cross" \
        "linux-libc-dev-$(xx-info debian-arch)-cross" && \
    xx-apt-get install -y \
        --no-install-recommends \
        gcc \
        libpq-dev \
        libpq5 \
        libssl-dev \
        libmariadb-dev \
        zlib1g-dev && \
    # Run xx-cargo early, since it sometimes seems to break when run at a later stage
    echo "export CARGO_TARGET=$(xx-cargo --print-target-triple)" >> /env-cargo

# Create CARGO_HOME folder and don't download rust docs
RUN mkdir -pv "${CARGO_HOME}" && \
    rustup set profile minimal

# Creates a dummy project used to grab dependencies
RUN USER=root cargo new --bin /app
WORKDIR /app

# Environment variables for Cargo on Debian based builds
ARG TARGET_PKG_CONFIG_PATH

RUN source /env-cargo && \
    if xx-info is-cross ; then \
        # We can't use xx-cargo since that uses clang, which doesn't work for our libraries.
        # Because of this we generate the needed environment variables here which we can load in the needed steps.
        echo "export CC_$(echo "${CARGO_TARGET}" | tr '[:upper:]' '[:lower:]' | tr - _)=/usr/bin/$(xx-info)-gcc" >> /env-cargo && \
        echo "export CARGO_TARGET_$(echo "${CARGO_TARGET}" | tr '[:lower:]' '[:upper:]' | tr - _)_LINKER=/usr/bin/$(xx-info)-gcc" >> /env-cargo && \
        echo "export CROSS_COMPILE=1" >> /env-cargo && \
        echo "export PKG_CONFIG_ALLOW_CROSS=1" >> /env-cargo && \
        # For some architectures `xx-info` returns a triple which doesn't matches the path on disk
        # In those cases you can override this by setting the `TARGET_PKG_CONFIG_PATH` build-arg
        if [[ -n "${TARGET_PKG_CONFIG_PATH}" ]]; then \
            echo "export TARGET_PKG_CONFIG_PATH=${TARGET_PKG_CONFIG_PATH}" >> /env-cargo ; \
        else \
            echo "export PKG_CONFIG_PATH=/usr/lib/$(xx-info)/pkgconfig" >> /env-cargo ; \
        fi && \
        echo "# End of env-cargo" >> /env-cargo ; \
    fi && \
    # Output the current contents of the file
    cat /env-cargo

RUN source /env-cargo && \
    rustup target add "${CARGO_TARGET}"

# Copies over *only* your manifests and build files
COPY ./Cargo.* ./rust-toolchain.toml ./build.rs ./
COPY ./macros ./macros

ARG CARGO_PROFILE=release

# Configure the DB ARG as late as possible to not invalidate the cached layers above
ARG DB=sqlite,mysql,postgresql

# Builds your dependencies and removes the
# dummy project, except the target folder
# This folder contains the compiled dependencies
RUN source /env-cargo && \
    cargo build --features ${DB} --profile "${CARGO_PROFILE}" --target="${CARGO_TARGET}" && \
    find . -not -path "./target*" -delete

# Copies the complete project
# To avoid copying unneeded files, use .dockerignore
COPY . .

ARG VW_VERSION

# Builds again, this time it will be the actual source files being build
RUN source /env-cargo && \
    # Make sure that we actually build the project by updating the src/main.rs timestamp
    # Also do this for build.rs to ensure the version is rechecked
    touch build.rs src/main.rs && \
    # Create a symlink to the binary target folder to easy copy the binary in the final stage
    cargo build --features ${DB} --profile "${CARGO_PROFILE}" --target="${CARGO_TARGET}" && \
    if [[ "${CARGO_PROFILE}" == "dev" ]] ; then \
        ln -vfsr "/app/target/${CARGO_TARGET}/debug" /app/target/final ; \
    else \
        ln -vfsr "/app/target/${CARGO_TARGET}/${CARGO_PROFILE}" /app/target/final ; \
    fi


######################## RUNTIME IMAGE  ########################
# Create a new stage with a minimal image
# because we already have a binary built
#
# To build these images you need to have qemu binfmt support.
# See the following pages to help install these tools locally
# Ubuntu/Debian: https://wiki.debian.org/QemuUserEmulation
# Arch Linux: https://wiki.archlinux.org/title/QEMU#Chrooting_into_arm/arm64_environment_from_x86_64
#
# Or use a Docker image which modifies your host system to support this.
# The GitHub Actions Workflow uses the same image as used below.
# See: https://github.com/tonistiigi/binfmt
# Usage: docker run --privileged --rm tonistiigi/binfmt --install arm64,arm
# To uninstall: docker run --privileged --rm tonistiigi/binfmt --uninstall 'qemu-*'
#
# We need to add `--platform` here, because of a podman bug: https://github.com/containers/buildah/issues/4742
FROM --platform=$TARGETPLATFORM docker.io/library/debian:trixie-slim

ENV ROCKET_PROFILE="release" \
    ROCKET_ADDRESS=0.0.0.0 \
    ROCKET_PORT=80 \
    DEBIAN_FRONTEND=noninteractive

# Create data folder and Install needed libraries
RUN mkdir /data && \
    apt-get update && apt-get install -y \
        --no-install-recommends \
        ca-certificates \
        curl \
        libmariadb3 \
        libpq5 \
        openssl && \
    apt-get clean && \
    rm -rf /var/lib/apt/lists/*

VOLUME /data
EXPOSE 80

# Copies the files from the context (Rocket.toml file and web-vault)
# and the binary from the "build" stage to the current stage
WORKDIR /

COPY docker/healthcheck.sh docker/start.sh /

COPY --from=vault /web-vault ./web-vault
COPY --from=build /app/target/final/vaultwarden .

HEALTHCHECK --interval=60s --timeout=10s CMD ["/healthcheck.sh"]

CMD ["/start.sh"]


================================================
FILE: docker/Dockerfile.j2
================================================
# syntax=docker/dockerfile:1
# check=skip=FromPlatformFlagConstDisallowed,RedundantTargetPlatform

# This file was generated using a Jinja2 template.
# Please make your changes in `DockerSettings.yaml` or `Dockerfile.j2` and then `make`
# This will generate two Dockerfile's `Dockerfile.debian` and `Dockerfile.alpine`

# Using multistage build:
# 	https://docs.docker.com/develop/develop-images/multistage-build/
# 	https://whitfin.io/speeding-up-rust-docker-builds/

####################### VAULT BUILD IMAGE #######################
# The web-vault digest specifies a particular web-vault build on Docker Hub.
# Using the digest instead of the tag name provides better security,
# as the digest of an image is immutable, whereas a tag name can later
# be changed to point to a malicious image.
#
# To verify the current digest for a given tag name:
# - From https://hub.docker.com/r/vaultwarden/web-vault/tags,
#   click the tag name to view the digest of the image it currently points to.
# - From the command line:
#     $ docker pull docker.io/vaultwarden/web-vault:{{ vault_version | replace('+', '_') }}
#     $ docker image inspect --format "{{ '{{' }}.RepoDigests}}" docker.io/vaultwarden/web-vault:{{ vault_version | replace('+', '_') }}
#     [docker.io/vaultwarden/web-vault@{{ vault_image_digest }}]
#
# - Conversely, to get the tag name from the digest:
#     $ docker image inspect --format "{{ '{{' }}.RepoTags}}" docker.io/vaultwarden/web-vault@{{ vault_image_digest }}
#     [docker.io/vaultwarden/web-vault:{{ vault_version | replace('+', '_') }}]
#
FROM --platform=linux/amd64 docker.io/vaultwarden/web-vault@{{ vault_image_digest }} AS vault

{% if base == "debian" %}
########################## Cross Compile Docker Helper Scripts ##########################
## We use the linux/amd64 no matter which Build Platform, since these are all bash scripts
## And these bash scripts do not have any significant difference if at all
FROM --platform=linux/amd64 docker.io/tonistiigi/xx@{{ xx_image_digest }} AS xx
{% elif base == "alpine" %}
########################## ALPINE BUILD IMAGES ##########################
## NOTE: The Alpine Base Images do not support other platforms then linux/amd64 and linux/arm64
## And for Alpine we define all build images here, they will only be loaded when actually used
{% for arch in build_stage_image[base].arch_image %}
FROM --platform=$BUILDPLATFORM {{ build_stage_image[base].arch_image[arch] }} AS build_{{ arch }}
{% endfor %}
{% endif %}

########################## BUILD IMAGE ##########################
# hadolint ignore=DL3006
FROM --platform=$BUILDPLATFORM {{ build_stage_image[base].image }} AS build
{% if base == "debian" %}
COPY --from=xx / /
{% endif %}
ARG TARGETARCH
ARG TARGETVARIANT
ARG TARGETPLATFORM

SHELL ["/bin/bash", "-o", "pipefail", "-c"]

# Build time options to avoid dpkg warnings and help with reproducible builds.
ENV DEBIAN_FRONTEND=noninteractive \
    LANG=C.UTF-8 \
    TZ=UTC \
    TERM=xterm-256color \
    CARGO_HOME="/root/.cargo" \
    USER="root"
{%- if base == "alpine" %} \
    # Use PostgreSQL v17 during Alpine/MUSL builds instead of the default v16
    # Debian Trixie uses libpq v17
    PQ_LIB_DIR="/usr/local/musl/pq17/lib"
{% endif %}

{% if base == "debian" %}
# Install clang to get `xx-cargo` working
# Install pkg-config to allow amd64 builds to find all libraries
# Install git so build.rs can determine the correct version
# Install the libc cross packages based upon the debian-arch
RUN apt-get update && \
    apt-get install -y \
        --no-install-recommends \
        clang \
        pkg-config \
        git \
        "libc6-$(xx-info debian-arch)-cross" \
        "libc6-dev-$(xx-info debian-arch)-cross" \
        "linux-libc-dev-$(xx-info debian-arch)-cross" && \
    xx-apt-get install -y \
        --no-install-recommends \
        gcc \
        libpq-dev \
        libpq5 \
        libssl-dev \
        libmariadb-dev \
        zlib1g-dev && \
    # Run xx-cargo early, since it sometimes seems to break when run at a later stage
    echo "export CARGO_TARGET=$(xx-cargo --print-target-triple)" >> /env-cargo
{% endif %}

# Create CARGO_HOME folder and don't download rust docs
RUN mkdir -pv "${CARGO_HOME}" && \
    rustup set profile minimal

# Creates a dummy project used to grab dependencies
RUN USER=root cargo new --bin /app
WORKDIR /app

{% if base == "debian" %}
# Environment variables for Cargo on Debian based builds
ARG TARGET_PKG_CONFIG_PATH

RUN source /env-cargo && \
    if xx-info is-cross ; then \
        # We can't use xx-cargo since that uses clang, which doesn't work for our libraries.
        # Because of this we generate the needed environment variables here which we can load in the needed steps.
        echo "export CC_$(echo "${CARGO_TARGET}" | tr '[:upper:]' '[:lower:]' | tr - _)=/usr/bin/$(xx-info)-gcc" >> /env-cargo && \
        echo "export CARGO_TARGET_$(echo "${CARGO_TARGET}" | tr '[:lower:]' '[:upper:]' | tr - _)_LINKER=/usr/bin/$(xx-info)-gcc" >> /env-cargo && \
        echo "export CROSS_COMPILE=1" >> /env-cargo && \
        echo "export PKG_CONFIG_ALLOW_CROSS=1" >> /env-cargo && \
        # For some architectures `xx-info` returns a triple which doesn't matches the path on disk
        # In those cases you can override this by setting the `TARGET_PKG_CONFIG_PATH` build-arg
        if [[ -n "${TARGET_PKG_CONFIG_PATH}" ]]; then \
            echo "export TARGET_PKG_CONFIG_PATH=${TARGET_PKG_CONFIG_PATH}" >> /env-cargo ; \
        else \
            echo "export PKG_CONFIG_PATH=/usr/lib/$(xx-info)/pkgconfig" >> /env-cargo ; \
        fi && \
        echo "# End of env-cargo" >> /env-cargo ; \
    fi && \
    # Output the current contents of the file
    cat /env-cargo

{% elif base == "alpine" %}
# Environment variables for Cargo on Alpine based builds
RUN echo "export CARGO_TARGET=${RUST_MUSL_CROSS_TARGET}" >> /env-cargo && \
    # Output the current contents of the file
    cat /env-cargo

{% endif %}
RUN source /env-cargo && \
    rustup target add "${CARGO_TARGET}"

# Copies over *only* your manifests and build files
COPY ./Cargo.* ./rust-toolchain.toml ./build.rs ./
COPY ./macros ./macros

ARG CARGO_PROFILE=release

# Configure the DB ARG as late as possible to not invalidate the cached layers above
{% if base == "debian" %}
ARG DB=sqlite,mysql,postgresql
{% elif base == "alpine" %}
# Enable MiMalloc to improve performance on Alpine builds
ARG DB=sqlite,mysql,postgresql,enable_mimalloc
{% endif %}

# Builds your dependencies and removes the
# dummy project, except the target folder
# This folder contains the compiled dependencies
RUN source /env-cargo && \
    cargo build --features ${DB} --profile "${CARGO_PROFILE}" --target="${CARGO_TARGET}" && \
    find . -not -path "./target*" -delete

# Copies the complete project
# To avoid copying unneeded files, use .dockerignore
COPY . .

ARG VW_VERSION

# Builds again, this time it will be the actual source files being build
RUN source /env-cargo && \
    # Make sure that we actually build the project by updating the src/main.rs timestamp
    # Also do this for build.rs to ensure the version is rechecked
    touch build.rs src/main.rs && \
    # Create a symlink to the binary target folder to easy copy the binary in the final stage
    cargo build --features ${DB} --profile "${CARGO_PROFILE}" --target="${CARGO_TARGET}" && \
    if [[ "${CARGO_PROFILE}" == "dev" ]] ; then \
        ln -vfsr "/app/target/${CARGO_TARGET}/debug" /app/target/final ; \
    else \
        ln -vfsr "/app/target/${CARGO_TARGET}/${CARGO_PROFILE}" /app/target/final ; \
    fi


######################## RUNTIME IMAGE  ########################
# Create a new stage with a minimal image
# because we already have a binary built
#
# To build these images you need to have qemu binfmt support.
# See the following pages to help install these tools locally
# Ubuntu/Debian: https://wiki.debian.org/QemuUserEmulation
# Arch Linux: https://wiki.archlinux.org/title/QEMU#Chrooting_into_arm/arm64_environment_from_x86_64
#
# Or use a Docker image which modifies your host system to support this.
# The GitHub Actions Workflow uses the same image as used below.
# See: https://github.com/tonistiigi/binfmt
# Usage: docker run --privileged --rm tonistiigi/binfmt --install arm64,arm
# To uninstall: docker run --privileged --rm tonistiigi/binfmt --uninstall 'qemu-*'
#
# We need to add `--platform` here, because of a podman bug: https://github.com/containers/buildah/issues/4742
FROM --platform=$TARGETPLATFORM {{ runtime_stage_image[base] }}

ENV ROCKET_PROFILE="release" \
    ROCKET_ADDRESS=0.0.0.0 \
    ROCKET_PORT=80
{%- if base == "debian" %} \
    DEBIAN_FRONTEND=noninteractive
{% elif base == "alpine" %} \
    SSL_CERT_DIR=/etc/ssl/certs
{% endif %}

# Create data folder and Install needed libraries
RUN mkdir /data && \
{% if base == "debian" %}
    apt-get update && apt-get install -y \
        --no-install-recommends \
        ca-certificates \
        curl \
        libmariadb3 \
        libpq5 \
        openssl && \
    apt-get clean && \
    rm -rf /var/lib/apt/lists/*
{% elif base == "alpine" %}
    apk --no-cache add \
        ca-certificates \
        curl \
        openssl \
        tzdata
{% endif %}

VOLUME /data
EXPOSE 80

# Copies the files from the context (Rocket.toml file and web-vault)
# and the binary from the "build" stage to the current stage
WORKDIR /

COPY docker/healthcheck.sh docker/start.sh /

COPY --from=vault /web-vault ./web-vault
COPY --from=build /app/target/final/vaultwarden .

HEALTHCHECK --interval=60s --timeout=10s CMD ["/healthcheck.sh"]

CMD ["/start.sh"]


================================================
FILE: docker/Makefile
================================================
all:
	./render_template Dockerfile.j2 '{"base": "debian"}' > Dockerfile.debian
	./render_template Dockerfile.j2 '{"base": "alpine"}' > Dockerfile.alpine
.PHONY: all


================================================
FILE: docker/README.md
================================================
# Vaultwarden Container Building

To build and release new testing and stable releases of Vaultwarden we use `docker buildx bake`.<br>
This can be used locally by running the command yourself, but it is also used by GitHub Actions.

This makes it easier for us to test and maintain the different architectures we provide.<br>
We also just have two Dockerfile's one for Debian and one for Alpine based images.<br>
With just these two files we can build both Debian and Alpine images for the following platforms:
 - amd64 (linux/amd64)
 - arm64 (linux/arm64)
 - armv7 (linux/arm/v7)
 - armv6 (linux/arm/v6)

Some unsupported platforms for Debian based images. These are not built and tested by default and are only provided to make it easier for users to build for these architectures.
- 386     (linux/386)
- ppc64le (linux/ppc64le)
- s390x   (linux/s390x)

To build these containers you need to enable QEMU binfmt support to be able to run/emulate architectures which are different then your host.<br>
This ensures the container build process can run binaries from other architectures.<br>

**NOTE**: Run all the examples below from the root of the repo.<br>


## How to install QEMU binfmt support

This is different per host OS, but most support this in some way.<br>

### Ubuntu/Debian
```bash
apt install binfmt-support qemu-user-static
```

### Arch Linux (others based upon it)
```bash
pacman -S qemu-user-static qemu-user-static-binfmt
```

### Fedora
```bash
dnf install qemu-user-static
```

### Others
There also is an option to use an other docker container to provide support for this.
```bash
# To install and activate
docker run --privileged --rm tonistiigi/binfmt --install arm64,arm
# To uninstall
docker run --privileged --rm tonistiigi/binfmt --uninstall 'qemu-*'
```


## Single architecture container building

You can build a container per supported architecture as long as you have QEMU binfmt support installed on your system.<br>

```bash
# Default bake triggers a Debian build using the hosts architecture
docker buildx bake --file docker/docker-bake.hcl

# Bake Debian ARM64 using a debug build
CARGO_PROFILE=dev \
SOURCE_COMMIT="$(git rev-parse HEAD)" \
docker buildx bake --file docker/docker-bake.hcl debian-arm64

# Bake Alpine ARMv6 as a release build
SOURCE_COMMIT="$(git rev-parse HEAD)" \
docker buildx bake --file docker/docker-bake.hcl alpine-armv6
```


## Local Multi Architecture container building

Start the initialization, this only needs to be done once.

```bash
# Create and use a new buildx builder instance which connects to the host network
docker buildx create --name vaultwarden --use --driver-opt network=host

# Validate it runs
docker buildx inspect --bootstrap

# Create a local container registry directly reachable on the localhost
docker run -d --name registry --network host registry:2
```

After that is done, you should be able to build and push to the local registry.<br>
Use the following command with the modified variables to bake the Alpine images.<br>
Replace `alpine` with `debian` if you want to build the debian multi arch images.

```bash
# Start a buildx bake using a debug build
CARGO_PROFILE=dev \
SOURCE_COMMIT="$(git rev-parse HEAD)" \
CONTAINER_REGISTRIES="localhost:5000/vaultwarden/server" \
docker buildx bake --file docker/docker-bake.hcl alpine-multi
```


## Using the `bake.sh` script

To make it a bit more easier to trigger a build, there also is a `bake.sh` script.<br>
This script calls `docker buildx bake` with all the right parameters and also generates the `SOURCE_COMMIT` and `SOURCE_VERSION` variables.<br>
This script can be called from both the repo root or within the docker directory.

So, if you want to build a Multi Arch Alpine container pushing to your localhost registry you can run this from within the docker directory. (Just make sure you executed the initialization steps above first)
```bash
CONTAINER_REGISTRIES="localhost:5000/vaultwarden/server" \
./bake.sh alpine-multi
```

Or if you want to just build a Debian container from the repo root, you can run this.
```bash
docker/bake.sh
```

You can append both `alpine` and `debian` with `-amd64`, `-arm64`, `-armv7` or `-armv6`, which will trigger a build for that specific platform.<br>
This will also append those values to the tag so you can see the built container when running `docker images`.

You can also append extra arguments after the target if you want. This can be useful for example to print what bake will use.
```bash
docker/bake.sh alpine-all --print
```

### Testing baked images

To test these images you can run these images by using the correct tag and provide the platform.<br>
For example, after you have build an arm64 image via `./bake.sh debian-arm64` you can run:
```bash
docker run --rm -it \
  -e DISABLE_ADMIN_TOKEN=true \
  -e I_REALLY_WANT_VOLATILE_STORAGE=true \
  -p8080:80 --platform=linux/arm64 \
  vaultwarden/server:testing-arm64
```


## Using the `podman-bake.sh` script

To also make building easier using podman, there is a `podman-bake.sh` script.<br>
This script calls `podman buildx build` with the needed parameters and the same as `bake.sh`, it will generate some variables automatically.<br>
This script can be called from both the repo root or within the docker directory.

**NOTE:** Unlike the `bake.sh` script, this only supports a single `CONTAINER_REGISTRIES`, and a single `BASE_TAGS` value, no comma separated values. It also only supports building separate architectures, no Multi Arch containers.

To build an Alpine arm64 image with only sqlite support and mimalloc, run this:
```bash
DB="sqlite,enable_mimalloc" \
./podman-bake.sh alpine-arm64
```

Or if you want to just build a Debian container from the repo root, you can run this.
```bash
docker/podman-bake.sh
```

You can append extra arguments after the target if you want. This can be useful for example to disable cache like this.
```bash
./podman-bake.sh alpine-arm64 --no-cache
```

For the podman builds you can, just like the `bake.sh` script, also append the architecture to build for that specific platform.<br>

### Testing podman built images

The command to start a podman built container is almost the same as for the docker/bake built containers. The images start with `localhost/`, so you need to prepend that.

```bash
podman run --rm -it \
  -e DISABLE_ADMIN_TOKEN=true \
  -e I_REALLY_WANT_VOLATILE_STORAGE=true \
  -p8080:80 --platform=linux/arm64 \
  localhost/vaultwarden/server:testing-arm64
```


## Variables supported
| Variable              | default | description |
| --------------------- | ------------------ | ----------- |
| CARGO_PROFILE         | null               | Which cargo profile to use. `null` means what is defined in the Dockerfile                                         |
| DB                    | null               | Which `features` to build. `null` means what is defined in the Dockerfile                                          |
| SOURCE_REPOSITORY_URL | null               | The source repository form where this build is triggered                                                           |
| SOURCE_COMMIT         | null               | The commit hash of the current commit for this build                                                               |
| SOURCE_VERSION        | null               | The current exact tag of this commit, else the last tag and the first 8 chars of the source commit                 |
| BASE_TAGS             | testing            | Tags to be used. Can be a comma separated value like "latest,1.29.2"                                               |
| CONTAINER_REGISTRIES  | vaultwarden/server | Comma separated value of container registries. Like `ghcr.io/dani-garcia/vaultwarden,docker.io/vaultwarden/server` |
| VW_VERSION            | null               | To override the `SOURCE_VERSION` value. This is also used by the `build.rs` code for example                       |


================================================
FILE: docker/bake.sh
================================================
#!/usr/bin/env bash

# Determine the basedir of this script.
# It should be located in the same directory as the docker-bake.hcl
# This ensures you can run this script from both inside and outside of the docker directory
BASEDIR=$(RL=$(readlink -n "$0"); SP="${RL:-$0}"; dirname "$(cd "$(dirname "${SP}")" || exit; pwd)/$(basename "${SP}")")

# Load build env's
source "${BASEDIR}/bake_env.sh"

# Be verbose on what is being executed
set -x

# Make sure we set the context to `..` so it will go up one directory
docker buildx bake --progress plain --set "*.context=${BASEDIR}/.." -f "${BASEDIR}/docker-bake.hcl" "$@"


================================================
FILE: docker/bake_env.sh
================================================
#!/usr/bin/env bash

# If SOURCE_COMMIT is provided via env skip this
if [ -z "${SOURCE_COMMIT+x}" ]; then
    SOURCE_COMMIT="$(git rev-parse HEAD)"
fi

# If VW_VERSION is provided via env use it as SOURCE_VERSION
# Else define it using git
if [[ -n "${VW_VERSION}" ]]; then
    SOURCE_VERSION="${VW_VERSION}"
else
    GIT_EXACT_TAG="$(git describe --tags --abbrev=0 --exact-match 2>/dev/null)"
    if [[ -n "${GIT_EXACT_TAG}" ]]; then
        SOURCE_VERSION="${GIT_EXACT_TAG}"
    else
        GIT_LAST_TAG="$(git describe --tags --abbrev=0)"
        SOURCE_VERSION="${GIT_LAST_TAG}-${SOURCE_COMMIT:0:8}"
        GIT_BRANCH="$(git rev-parse --abbrev-ref HEAD)"
        case "${GIT_BRANCH}" in
            main|master|HEAD)
                # Do not add the branch name for these branches
                ;;
            *)
                SOURCE_VERSION="${SOURCE_VERSION} (${GIT_BRANCH})"
                ;;
        esac
    fi
fi

# Export the rendered variables above so bake will use them
export SOURCE_COMMIT
export SOURCE_VERSION


================================================
FILE: docker/docker-bake.hcl
================================================
// ==== Baking Variables ====

// Set which cargo profile to use, dev or release for example
// Use the value provided in the Dockerfile as default
variable "CARGO_PROFILE" {
  default = null
}

// Set which DB's (features) to enable
// Use the value provided in the Dockerfile as default
variable "DB" {
  default = null
}

// The repository this build was triggered from
variable "SOURCE_REPOSITORY_URL" {
  default = null
}

// The commit hash of the current commit this build was triggered on
variable "SOURCE_COMMIT" {
  default = null
}

// The version of this build
// Typically the current exact tag of this commit,
// else the last tag and the first 8 characters of the source commit
variable "SOURCE_VERSION" {
  default = null
}

// This can be used to overwrite SOURCE_VERSION
// It will be used during the build.rs building stage
variable "VW_VERSION" {
  default = null
}

// The base tag(s) to use
// This can be a comma separated value like "testing,1.29.2"
variable "BASE_TAGS" {
  default = "testing"
}

// Which container registries should be used for the tagging
// This can be a comma separated value
// Use a full URI like `ghcr.io/dani-garcia/vaultwarden,docker.io/vaultwarden/server`
variable "CONTAINER_REGISTRIES" {
  default = "vaultwarden/server"
}


// ==== Baking Groups ====

group "default" {
  targets = ["debian"]
}


// ==== Shared Baking ====
function "labels" {
  params = []
  result = {
    "org.opencontainers.image.description" = "Unofficial Bitwarden compatible server written in Rust - ${SOURCE_VERSION}"
    "org.opencontainers.image.licenses" = "AGPL-3.0-only"
    "org.opencontainers.image.documentation" = "https://github.com/dani-garcia/vaultwarden/wiki"
    "org.opencontainers.image.url" = "https://github.com/dani-garcia/vaultwarden"
    "org.opencontainers.image.created" =  "${formatdate("YYYY-MM-DD'T'hh:mm:ssZZZZZ", timestamp())}"
    "org.opencontainers.image.source" = "${SOURCE_REPOSITORY_URL}"
    "org.opencontainers.image.revision" = "${SOURCE_COMMIT}"
    "org.opencontainers.image.version" = "${SOURCE_VERSION}"
  }
}

target "_default_attributes" {
  labels = labels()
  args = {
    DB = "${DB}"
    CARGO_PROFILE = "${CARGO_PROFILE}"
    VW_VERSION = "${VW_VERSION}"
  }
}


// ==== Debian Baking ====

// Default Debian target, will build a container using the hosts platform architecture
target "debian" {
  inherits = ["_default_attributes"]
  dockerfile = "docker/Dockerfile.debian"
  tags = generate_tags("", platform_tag())
  output = ["type=docker"]
}

// Multi Platform target, will build one tagged manifest with all supported architectures
// This is mainly used by GitHub Actions to build and push new containers
target "debian-multi" {
  inherits = ["debian"]
  platforms = ["linux/amd64", "linux/arm64", "linux/arm/v7", "linux/arm/v6"]
  tags = generate_tags("", "")
  output = [join(",", flatten([["type=registry"], image_index_annotations()]))]
}

// Per platform targets, to individually test building per platform locally
target "debian-amd64" {
  inherits = ["debian"]
  platforms = ["linux/amd64"]
  tags = generate_tags("", "-amd64")
}

target "debian-arm64" {
  inherits = ["debian"]
  platforms = ["linux/arm64"]
  tags = generate_tags("", "-arm64")
}

target "debian-armv7" {
  inherits = ["debian"]
  platforms = ["linux/arm/v7"]
  tags = generate_tags("", "-armv7")
}

target "debian-armv6" {
  inherits = ["debian"]
  platforms = ["linux/arm/v6"]
  tags = generate_tags("", "-armv6")
}

// ==== Start of unsupported Debian architecture targets ===
// These are provided just to help users build for these rare platforms
// They will not be built by default
target "debian-386" {
  inherits = ["debian"]
  platforms = ["linux/386"]
  tags = generate_tags("", "-386")
  args = {
    TARGET_PKG_CONFIG_PATH = "/usr/lib/i386-linux-gnu/pkgconfig"
  }
}

target "debian-ppc64le" {
  inherits = ["debian"]
  platforms = ["linux/ppc64le"]
  tags = generate_tags("", "-ppc64le")
}

target "debian-s390x" {
  inherits = ["debian"]
  platforms = ["linux/s390x"]
  tags = generate_tags("", "-s390x")
}
// ==== End of unsupported Debian architecture targets ===

// A Group to build all platforms individually for local testing
group "debian-all" {
  targets = ["debian-amd64", "debian-arm64", "debian-armv7", "debian-armv6"]
}


// ==== Alpine Baking ====

// Default Alpine target, will build a container using the hosts platform architecture
target "alpine" {
  inherits = ["_default_attributes"]
  dockerfile = "docker/Dockerfile.alpine"
  tags = generate_tags("-alpine", platform_tag())
  output = ["type=docker"]
}

// Multi Platform target, will build one tagged manifest with all supported architectures
// This is mainly used by GitHub Actions to build and push new containers
target "alpine-multi" {
  inherits = ["alpine"]
  platforms = ["linux/amd64", "linux/arm64", "linux/arm/v7", "linux/arm/v6"]
  tags = generate_tags("-alpine", "")
  output = [join(",", flatten([["type=registry"], image_index_annotations()]))]
}

// Per platform targets, to individually test building per platform locally
target "alpine-amd64" {
  inherits = ["alpine"]
  platforms = ["linux/amd64"]
  tags = generate_tags("-alpine", "-amd64")
}

target "alpine-arm64" {
  inherits = ["alpine"]
  platforms = ["linux/arm64"]
  tags = generate_tags("-alpine", "-arm64")
}

target "alpine-armv7" {
  inherits = ["alpine"]
  platforms = ["linux/arm/v7"]
  tags = generate_tags("-alpine", "-armv7")
}

target "alpine-armv6" {
  inherits = ["alpine"]
  platforms = ["linux/arm/v6"]
  tags = generate_tags("-alpine", "-armv6")
}

// A Group to build all platforms individually for local testing
group "alpine-all" {
  targets = ["alpine-amd64", "alpine-arm64", "alpine-armv7", "alpine-armv6"]
}


// ==== Bake everything locally ====

group "all" {
  targets = ["debian-all", "alpine-all"]
}


// ==== Baking functions ====

// This will return the local platform as amd64, arm64 or armv7 for example
// It can be used for creating a local image tag
function "platform_tag" {
  params = []
  result = "-${replace(replace(BAKE_LOCAL_PLATFORM, "linux/", ""), "/", "")}"
}


function "get_container_registries" {
  params = []
  result = flatten(split(",", CONTAINER_REGISTRIES))
}

function "get_base_tags" {
  params = []
  result = flatten(split(",", BASE_TAGS))
}

function "generate_tags" {
  params = [
    suffix,   // What to append to the BASE_TAG when needed, like `-alpine` for example
    platform  // the platform we are building for if needed
  ]
  result = flatten([
    for registry in get_container_registries() :
      [for base_tag in get_base_tags() :
        concat(
          # If the base_tag contains latest, and the suffix contains `-alpine` add a `:alpine` tag too
          base_tag == "latest" ? suffix == "-alpine" ? ["${registry}:alpine${platform}"] : [] : [],
          # The default tagging strategy
          ["${registry}:${base_tag}${suffix}${platform}"]
        )
      ]
  ])
}

function "image_index_annotations" {
  params = []
  result = flatten([
    for key, value in labels() :
      value != null ? formatlist("annotation-index.%s=%s", "${key}", "${value}") : []
  ])
}


================================================
FILE: docker/healthcheck.sh
================================================
#!/usr/bin/env sh

# Use the value of the corresponding env var (if present),
# or a default value otherwise.
: "${DATA_FOLDER:="/data"}"
: "${ROCKET_PORT:="80"}"
: "${ENV_FILE:="/.env"}"

CONFIG_FILE="${DATA_FOLDER}"/config.json

# Check if the $ENV_FILE file exist and is readable
# If that is the case, load it into the environment before running any check
if [ -r "${ENV_FILE}" ]; then
    # shellcheck disable=SC1090
    . "${ENV_FILE}"
fi

# Given a config key, return the corresponding config value from the
# config file. If the key doesn't exist, return an empty string.
get_config_val() {
    key="$1"
    # Extract a line of the form:
    #   "domain": "https://bw.example.com/path",
    grep "\"${key}\":" "${CONFIG_FILE}" |
    # To extract just the value (https://bw.example.com/path), delete:
    # (1) everything up to and including the first ':',
    # (2) whitespace and '"' from the front,
    # (3) ',' and '"' from the back.
    sed -e 's/[^:]\+://' -e 's/^[ "]\+//' -e 's/[,"]\+$//'
}

# Extract the base path from a domain URL. For example:
# - `` -> ``
# - `https://bw.example.com` -> ``
# - `https://bw.example.com/` -> ``
# - `https://bw.example.com/path` -> `/path`
# - `https://bw.example.com/multi/path` -> `/multi/path`
get_base_path() {
    echo "$1" |
    # Delete:
    # (1) everything up to and including '://',
    # (2) everything up to '/',
    # (3) trailing '/' from the back.
    sed -e 's|.*://||' -e 's|[^/]\+||' -e 's|/*$||'
}

# Read domain URL from config.json, if present.
if [ -r "${CONFIG_FILE}" ]; then
    domain="$(get_config_val 'domain')"
    if [ -n "${domain}" ]; then
        # config.json 'domain' overrides the DOMAIN env var.
        DOMAIN="${domain}"
    fi
fi

addr="${ROCKET_ADDRESS}"
if [ -z "${addr}" ] || [ "${addr}" = '0.0.0.0' ] || [ "${addr}" = '::' ]; then
    addr='localhost'
fi
base_path="$(get_base_path "${DOMAIN}")"
if [ -n "${ROCKET_TLS}" ]; then
    s='s'
fi
curl --insecure --fail --silent --show-error \
     "http${s}://${addr}:${ROCKET_PORT}${base_path}/alive" || exit 1


================================================
FILE: docker/podman-bake.sh
================================================
#!/usr/bin/env bash

# Determine the basedir of this script.
# It should be located in the same directory as the docker-bake.hcl
# This ensures you can run this script from both inside and outside of the docker directory
BASEDIR=$(RL=$(readlink -n "$0"); SP="${RL:-$0}"; dirname "$(cd "$(dirname "${SP}")" || exit; pwd)/$(basename "${SP}")")

# Load build env's
source "${BASEDIR}/bake_env.sh"

# Check if a target is given as first argument
# If not we assume the defaults and pass the given arguments to the podman command
case "${1}" in
    alpine*|debian*)
        TARGET="${1}"
        # Now shift the $@ array so we only have the rest of the arguments
        # This allows us too append these as extra arguments too the podman buildx build command
        shift
    ;;
esac

LABEL_ARGS=(
    --label org.opencontainers.image.description="Unofficial Bitwarden compatible server written in Rust"
    --label org.opencontainers.image.licenses="AGPL-3.0-only"
    --label org.opencontainers.image.documentation="https://github.com/dani-garcia/vaultwarden/wiki"
    --label org.opencontainers.image.url="https://github.com/dani-garcia/vaultwarden"
    --label org.opencontainers.image.created="$(date --utc --iso-8601=seconds)"
)
if [[ -n "${SOURCE_REPOSITORY_URL}" ]]; then
    LABEL_ARGS+=(--label org.opencontainers.image.source="${SOURCE_REPOSITORY_URL}")
fi
if [[ -n "${SOURCE_COMMIT}" ]]; then
    LABEL_ARGS+=(--label org.opencontainers.image.revision="${SOURCE_COMMIT}")
fi
if [[ -n "${SOURCE_VERSION}" ]]; then
    LABEL_ARGS+=(--label org.opencontainers.image.version="${SOURCE_VERSION}")
fi

# Check if and which --build-arg arguments we need to configure
BUILD_ARGS=()
if [[ -n "${DB}" ]]; then
    BUILD_ARGS+=(--build-arg DB="${DB}")
fi
if [[ -n "${CARGO_PROFILE}" ]]; then
    BUILD_ARGS+=(--build-arg CARGO_PROFILE="${CARGO_PROFILE}")
fi
if [[ -n "${VW_VERSION}" ]]; then
    BUILD_ARGS+=(--build-arg VW_VERSION="${VW_VERSION}")
fi

# Set the default BASE_TAGS if non are provided
if [[ -z "${BASE_TAGS}" ]]; then
    BASE_TAGS="testing"
fi

# Set the default CONTAINER_REGISTRIES if non are provided
if [[ -z "${CONTAINER_REGISTRIES}" ]]; then
    CONTAINER_REGISTRIES="vaultwarden/server"
fi

# Check which Dockerfile we need to use, default is debian
case "${TARGET}" in
    alpine*)
        BASE_TAGS="${BASE_TAGS}-alpine"
        DOCKERFILE="Dockerfile.alpine"
        ;;
    *)
        DOCKERFILE="Dockerfile.debian"
        ;;
esac

# Check which platform we need to build and append the BASE_TAGS with the architecture
case "${TARGET}" in
    *-arm64)
        BASE_TAGS="${BASE_TAGS}-arm64"
        PLATFORM="linux/arm64"
        ;;
    *-armv7)
        BASE_TAGS="${BASE_TAGS}-armv7"
        PLATFORM="linux/arm/v7"
        ;;
    *-armv6)
        BASE_TAGS="${BASE_TAGS}-armv6"
        PLATFORM="linux/arm/v6"
        ;;
    *)
        BASE_TAGS="${BASE_TAGS}-amd64"
        PLATFORM="linux/amd64"
        ;;
esac

# Be verbose on what is being executed
set -x

# Build the image with podman
# We use the docker format here since we are using `SHELL`, which is not supported by OCI
# shellcheck disable=SC2086
podman buildx build \
  --platform="${PLATFORM}" \
  --tag="${CONTAINER_REGISTRIES}:${BASE_TAGS}" \
  --format=docker \
  "${LABEL_ARGS[@]}" \
  "${BUILD_ARGS[@]}" \
  --file="${BASEDIR}/${DOCKERFILE}" "$@" \
  "${BASEDIR}/.."


================================================
FILE: docker/render_template
================================================
#!/usr/bin/env python3

import os
import argparse
import json
import yaml
import jinja2

# Load settings file
with open("DockerSettings.yaml", 'r') as yaml_file:
	yaml_data = yaml.safe_load(yaml_file)

settings_env = jinja2.Environment(
	loader=jinja2.FileSystemLoader(os.getcwd()),
)
settings_yaml = yaml.safe_load(settings_env.get_template("DockerSettings.yaml").render(yaml_data))

args_parser = argparse.ArgumentParser()
args_parser.add_argument('template_file', help='Jinja2 template file to render.')
args_parser.add_argument('render_vars', help='JSON-encoded data to pass to the templating engine.')
cli_args = args_parser.parse_args()

# Merge the default config yaml with the json arguments given.
render_vars = json.loads(cli_args.render_vars)
settings_yaml.update(render_vars)

environment = jinja2.Environment(
	loader=jinja2.FileSystemLoader(os.getcwd()),
	trim_blocks=True,
)
print(environment.get_template(cli_args.template_file).render(settings_yaml))


================================================
FILE: docker/start.sh
================================================
#!/bin/sh

if [ -n "${UMASK}" ]; then
    umask "${UMASK}"
fi

if [ -r /etc/vaultwarden.sh ]; then
    . /etc/vaultwarden.sh
elif [ -r /etc/bitwarden_rs.sh ]; then
    echo "### You are using the old /etc/bitwarden_rs.sh script, please migrate to /etc/vaultwarden.sh ###"
    . /etc/bitwarden_rs.sh
fi

if [ -d /etc/vaultwarden.d ]; then
    for f in /etc/vaultwarden.d/*.sh; do
        if [ -r "${f}" ]; then
            . "${f}"
        fi
    done
elif [ -d /etc/bitwarden_rs.d ]; then
    echo "### You are using the old /etc/bitwarden_rs.d script directory, please migrate to /etc/vaultwarden.d ###"
    for f in /etc/bitwarden_rs.d/*.sh; do
        if [ -r "${f}" ]; then
            . "${f}"
        fi
    done
fi

exec /vaultwarden "${@}"


================================================
FILE: macros/Cargo.toml
================================================
[package]
name = "macros"
version = "0.1.0"
repository.workspace = true
edition.workspace = true
rust-version.workspace = true
license.workspace = true
publish.workspace = true

[lib]
name = "macros"
path = "src/lib.rs"
proc-macro = true

[dependencies]
quote = "1.0.45"
syn = "2.0.117"

[lints]
workspace = true


================================================
FILE: macros/src/lib.rs
================================================
use proc_macro::TokenStream;
use quote::quote;

#[proc_macro_derive(UuidFromParam)]
pub fn derive_uuid_from_param(input: TokenStream) -> TokenStream {
    let ast = syn::parse(input).unwrap();

    impl_derive_uuid_macro(&ast)
}

fn impl_derive_uuid_macro(ast: &syn::DeriveInput) -> TokenStream {
    let name = &ast.ident;
    let gen_derive = quote! {
        #[automatically_derived]
        impl<'r> rocket::request::FromParam<'r> for #name {
            type Error = ();

            #[inline(always)]
            fn from_param(param: &'r str) -> Result<Self, Self::Error> {
                if uuid::Uuid::parse_str(param).is_ok() {
                    Ok(Self(param.to_string()))
                } else {
                    Err(())
                }
            }
        }
    };
    gen_derive.into()
}

#[proc_macro_derive(IdFromParam)]
pub fn derive_id_from_param(input: TokenStream) -> TokenStream {
    let ast = syn::parse(input).unwrap();

    impl_derive_safestring_macro(&ast)
}

fn impl_derive_safestring_macro(ast: &syn::DeriveInput) -> TokenStream {
    let name = &ast.ident;
    let gen_derive = quote! {
        #[automatically_derived]
        impl<'r> rocket::request::FromParam<'r> for #name {
            type Error = ();

            #[inline(always)]
            fn from_param(param: &'r str) -> Result<Self, Self::Error> {
                if param.chars().all(|c| matches!(c, 'a'..='z' | 'A'..='Z' |'0'..='9' | '-')) {
                    Ok(Self(param.to_string()))
                } else {
                    Err(())
                }
            }
        }
    };
    gen_derive.into()
}


================================================
FILE: migrations/mysql/2018-01-14-171611_create_tables/down.sql
================================================
DROP TABLE users;

DROP TABLE devices;

DROP TABLE ciphers;

DROP TABLE attachments;

DROP TABLE folders;

================================================
FILE: migrations/mysql/2018-01-14-171611_create_tables/up.sql
================================================
CREATE TABLE users (
  uuid                CHAR(36) NOT NULL PRIMARY KEY,
  created_at          DATETIME NOT NULL,
  updated_at          DATETIME NOT NULL,
  email               VARCHAR(255) NOT NULL UNIQUE,
  name                TEXT     NOT NULL,
  password_hash       BLOB     NOT NULL,
  salt                BLOB     NOT NULL,
  password_iterations INTEGER  NOT NULL,
  password_hint       TEXT,
  `key`               TEXT     NOT NULL,
  private_key         TEXT,
  public_key          TEXT,
  totp_secret         TEXT,
  totp_recover        TEXT,
  security_stamp      TEXT     NOT NULL,
  equivalent_domains  TEXT     NOT NULL,
  excluded_globals    TEXT     NOT NULL
);

CREATE TABLE devices (
  uuid          CHAR(36) NOT NULL PRIMARY KEY,
  created_at    DATETIME NOT NULL,
  updated_at    DATETIME NOT NULL,
  user_uuid     CHAR(36) NOT NULL REFERENCES users (uuid),
  name          TEXT     NOT NULL,
  type          INTEGER  NOT NULL,
  push_token    TEXT,
  refresh_token TEXT     NOT NULL
);

CREATE TABLE ciphers (
  uuid              CHAR(36) NOT NULL PRIMARY KEY,
  created_at        DATETIME NOT NULL,
  updated_at        DATETIME NOT NULL,
  user_uuid         CHAR(36) NOT NULL REFERENCES users (uuid),
  folder_uuid       CHAR(36) REFERENCES folders (uuid),
  organization_uuid CHAR(36),
  type              INTEGER  NOT NULL,
  name              TEXT     NOT NULL,
  notes             TEXT,
  fields            TEXT,
  data              TEXT     NOT NULL,
  favorite          BOOLEAN  NOT NULL
);

CREATE TABLE attachments (
  id          CHAR(36) NOT NULL PRIMARY KEY,
  cipher_uuid CHAR(36) NOT NULL REFERENCES ciphers (uuid),
  file_name   TEXT    NOT NULL,
  file_size   INTEGER NOT NULL

);

CREATE TABLE folders (
  uuid       CHAR(36) NOT NULL PRIMARY KEY,
  created_at DATETIME NOT NULL,
  updated_at DATETIME NOT NULL,
  user_uuid  CHAR(36) NOT NULL REFERENCES users (uuid),
  name       TEXT     NOT NULL
);
  


================================================
FILE: migrations/mysql/2018-02-17-205753_create_collections_and_orgs/down.sql
================================================
DROP TABLE collections;

DROP TABLE organizations;


DROP TABLE users_collections;

DROP TABLE users_organizations;


================================================
FILE: migrations/mysql/2018-02-17-205753_create_collections_and_orgs/up.sql
================================================
CREATE TABLE collections (
  uuid     VARCHAR(40) NOT NULL PRIMARY KEY,
  org_uuid VARCHAR(40) NOT NULL REFERENCES organizations (uuid),
  name     TEXT NOT NULL
);

CREATE TABLE organizations (
  uuid          VARCHAR(40) NOT NULL PRIMARY KEY,
  name          TEXT NOT NULL,
  billing_email TEXT NOT NULL
);

CREATE TABLE users_collections (
  user_uuid       CHAR(36) NOT NULL REFERENCES users (uuid),
  collection_uuid CHAR(36) NOT NULL REFERENCES collections (uuid),
  PRIMARY KEY (user_uuid, collection_uuid)
);

CREATE TABLE users_organizations (
  uuid       CHAR(36) NOT NULL PRIMARY KEY,
  user_uuid  CHAR(36) NOT NULL REFERENCES users (uuid),
  org_uuid   CHAR(36) NOT NULL REFERENCES organizations (uuid),

  access_all BOOLEAN NOT NULL,
  `key`      TEXT    NOT NULL,
  status     INTEGER NOT NULL,
  type       INTEGER NOT NULL,

  UNIQUE (user_uuid, org_uuid)
);


================================================
FILE: migrations/mysql/2018-04-27-155151_create_users_ciphers/down.sql
================================================


================================================
FILE: migrations/mysql/2018-04-27-155151_create_users_ciphers/up.sql
================================================
ALTER TABLE ciphers RENAME TO oldCiphers;

CREATE TABLE ciphers (
  uuid              CHAR(36) NOT NULL PRIMARY KEY,
  created_at        DATETIME NOT NULL,
  updated_at        DATETIME NOT NULL,
  user_uuid         CHAR(36) REFERENCES users (uuid), -- Make this optional
  organization_uuid CHAR(36) REFERENCES organizations (uuid), -- Add reference to orgs table
  -- Remove folder_uuid
  type              INTEGER  NOT NULL,
  name              TEXT     NOT NULL,
  notes             TEXT,
  fields            TEXT,
  data              TEXT     NOT NULL,
  favorite          BOOLEAN  NOT NULL
);

CREATE TABLE folders_ciphers (
  cipher_uuid CHAR(36) NOT NULL REFERENCES ciphers (uuid),
  folder_uuid CHAR(36) NOT NULL REFERENCES folders (uuid),

  PRIMARY KEY (cipher_uuid, folder_uuid)
);

INSERT INTO ciphers (uuid, created_at, updated_at, user_uuid, organization_uuid, type, name, notes, fields, data, favorite) 
SELECT uuid, created_at, updated_at, user_uuid, organization_uuid, type, name, notes, fields, data, favorite FROM oldCiphers;

INSERT INTO folders_ciphers (cipher_uuid, folder_uuid)
SELECT uuid, folder_uuid FROM oldCiphers WHERE folder_uuid IS NOT NULL;


DROP TABLE oldCiphers;

ALTER TABLE users_collections ADD COLUMN read_only BOOLEAN NOT NULL DEFAULT 0; -- False


================================================
FILE: migrations/mysql/2018-05-08-161616_create_collection_cipher_map/down.sql
================================================
DROP TABLE ciphers_collections;

================================================
FILE: migrations/mysql/2018-05-08-161616_create_collection_cipher_map/up.sql
================================================
CREATE TABLE ciphers_collections (
  cipher_uuid       CHAR(36) NOT NULL REFERENCES ciphers (uuid),
  collection_uuid CHAR(36) NOT NULL REFERENCES collections (uuid),
  PRIMARY KEY (cipher_uuid, collection_uuid)
);


================================================
FILE: migrations/mysql/2018-05-25-232323_update_attachments_reference/down.sql
================================================


================================================
FILE: migrations/mysql/2018-05-25-232323_update_attachments_reference/up.sql
================================================
ALTER TABLE attachments RENAME TO oldAttachments;

CREATE TABLE attachments (
  id          CHAR(36) NOT NULL PRIMARY KEY,
  cipher_uuid CHAR(36) NOT NULL REFERENCES ciphers (uuid),
  file_name   TEXT    NOT NULL,
  file_size   INTEGER NOT NULL

);

INSERT INTO attachments (id, cipher_uuid, file_name, file_size) 
SELECT id, cipher_uuid, file_name, file_size FROM oldAttachments;

DROP TABLE oldAttachments;


================================================
FILE: migrations/mysql/2018-06-01-112529_update_devices_twofactor_remember/down.sql
================================================
-- This file should undo anything in `up.sql`

================================================
FILE: migrations/mysql/2018-06-01-112529_update_devices_twofactor_remember/up.sql
================================================
ALTER TABLE devices
    ADD COLUMN
    twofactor_remember TEXT;

================================================
FILE: migrations/mysql/2018-07-11-181453_create_u2f_twofactor/down.sql
================================================
UPDATE users
SET totp_secret = (
    SELECT twofactor.data FROM twofactor
    WHERE twofactor.type = 0 
    AND twofactor.user_uuid = users.uuid
);

DROP TABLE twofactor;

================================================
FILE: migrations/mysql/2018-07-11-181453_create_u2f_twofactor/up.sql
================================================
CREATE TABLE twofactor (
  uuid      CHAR(36) NOT NULL PRIMARY KEY,
  user_uuid CHAR(36) NOT NULL REFERENCES users (uuid),
  type      INTEGER  NOT NULL,
  enabled   BOOLEAN  NOT NULL,
  data      TEXT     NOT NULL,

  UNIQUE (user_uuid, type)
);


INSERT INTO twofactor (uuid, user_uuid, type, enabled, data) 
SELECT UUID(), uuid, 0, 1, u.totp_secret FROM users u where u.totp_secret IS NOT NULL;

UPDATE users SET totp_secret = NULL; -- Instead of recreating the table, just leave the columns empty


================================================
FILE: migrations/mysql/2018-08-27-172114_update_ciphers/down.sql
================================================


================================================
FILE: migrations/mysql/2018-08-27-172114_update_ciphers/up.sql
================================================
ALTER TABLE ciphers
    ADD COLUMN
    password_history TEXT;

================================================
FILE: migrations/mysql/2018-09-10-111213_add_invites/down.sql
================================================
DROP TABLE invitations;

================================================
FILE: migrations/mysql/2018-09-10-111213_add_invites/up.sql
================================================
CREATE TABLE invitations (
    email   VARCHAR(255) NOT NULL PRIMARY KEY
);


================================================
FILE: migrations/mysql/2018-09-19-144557_add_kdf_columns/down.sql
================================================


================================================
FILE: migrations/mysql/2018-09-19-144557_add_kdf_columns/up.sql
================================================
ALTER TABLE users
    ADD COLUMN
    client_kdf_type INTEGER NOT NULL DEFAULT 0; -- PBKDF2

ALTER TABLE users
    ADD COLUMN
    client_kdf_iter INTEGER NOT NULL DEFAULT 100000;


================================================
FILE: migrations/mysql/2018-11-27-152651_add_att_key_columns/down.sql
================================================


================================================
FILE: migrations/mysql/2018-11-27-152651_add_att_key_columns/up.sql
================================================
ALTER TABLE attachments
    ADD COLUMN
    `key` TEXT;


================================================
FILE: migrations/mysql/2019-05-26-216651_rename_key_and_type_columns/down.sql
================================================
ALTER TABLE attachments CHANGE COLUMN akey `key` TEXT;
ALTER TABLE ciphers CHANGE COLUMN atype type INTEGER NOT NULL;
ALTER TABLE devices CHANGE COLUMN atype type INTEGER NOT NULL;
ALTER TABLE twofactor CHANGE COLUMN atype type INTEGER NOT NULL;
ALTER TABLE users CHANGE COLUMN akey `key` TEXT;
ALTER TABLE users_organizations CHANGE COLUMN akey `key` TEXT;
ALTER TABLE users_organizations CHANGE COLUMN atype type INTEGER NOT NULL;

================================================
FILE: migrations/mysql/2019-05-26-216651_rename_key_and_type_columns/up.sql
================================================
ALTER TABLE attachments CHANGE COLUMN `key` akey TEXT;
ALTER TABLE ciphers CHANGE COLUMN type atype INTEGER NOT NULL;
ALTER TABLE devices CHANGE COLUMN type atype INTEGER NOT NULL;
ALTER TABLE twofactor CHANGE COLUMN type atype INTEGER NOT NULL;
ALTER TABLE users CHANGE COLUMN `key` akey TEXT;
ALTER TABLE users_organizations CHANGE COLUMN `key` akey TEXT;
ALTER TABLE users_organizations CHANGE COLUMN type atype INTEGER NOT NULL;

================================================
FILE: migrations/mysql/2019-10-10-083032_add_column_to_twofactor/down.sql
================================================


================================================
FILE: migrations/mysql/2019-10-10-083032_add_column_to_twofactor/up.sql
================================================
ALTER TABLE twofactor ADD COLUMN last_used INTEGER NOT NULL DEFAULT 0;

================================================
FILE: migrations/mysql/2019-11-17-011009_add_email_verification/down.sql
================================================



================================================
FILE: migrations/mysql/2019-11-17-011009_add_email_verification/up.sql
================================================
ALTER TABLE users ADD COLUMN verified_at DATETIME DEFAULT NULL;
ALTER TABLE users ADD COLUMN last_verifying_at DATETIME DEFAULT NULL;
ALTER TABLE users ADD COLUMN login_verify_count INTEGER NOT NULL DEFAULT 0;
ALTER TABLE users ADD COLUMN email_new VARCHAR(255) DEFAULT NULL;
ALTER TABLE users ADD COLUMN email_new_token VARCHAR(16) DEFAULT NULL;


================================================
FILE: migrations/mysql/2020-03-13-205045_add_policy_table/down.sql
================================================
DROP TABLE org_policies;


================================================
FILE: migrations/mysql/2020-03-13-205045_add_policy_table/up.sql
================================================
CREATE TABLE org_policies (
  uuid      CHAR(36) NOT NULL PRIMARY KEY,
  org_uuid  CHAR(36) NOT NULL REFERENCES organizations (uuid),
  atype     INTEGER  NOT NULL,
  enabled   BOOLEAN  NOT NULL,
  data      TEXT     NOT NULL,

  UNIQUE (org_uuid, atype)
);


================================================
FILE: migrations/mysql/2020-04-09-235005_add_cipher_delete_date/down.sql
================================================



================================================
FILE: migrations/mysql/2020-04-09-235005_add_cipher_delete_date/up.sql
================================================
ALTER TABLE ciphers
    ADD COLUMN
    deleted_at DATETIME;


================================================
FILE: migrations/mysql/2020-07-01-214531_add_hide_passwords/down.sql
================================================


================================================
FILE: migrations/mysql/2020-07-01-214531_add_hide_passwords/up.sql
================================================
ALTER TABLE users_collections
ADD COLUMN hide_passwords BOOLEAN NOT NULL DEFAULT FALSE;


================================================
FILE: migrations/mysql/2020-08-02-025025_add_favorites_table/down.sql
================================================
ALTER TABLE ciphers
ADD COLUMN favorite BOOLEAN NOT NULL DEFAULT FALSE;

-- Transfer favorite status for user-owned ciphers.
UPDATE ciphers
SET favorite = TRUE
WHERE EXISTS (
  SELECT * FROM favorites
  WHERE favorites.user_uuid = ciphers.user_uuid
    AND favorites.cipher_uuid = ciphers.uuid
);

DROP TABLE favorites;


================================================
FILE: migrations/mysql/2020-08-02-025025_add_favorites_table/up.sql
================================================
CREATE TABLE favorites (
  user_uuid   CHAR(36) NOT NULL REFERENCES users(uuid),
  cipher_uuid CHAR(36) NOT NULL REFERENCES ciphers(uuid),

  PRIMARY KEY (user_uuid, cipher_uuid)
);

-- Transfer favorite status for user-owned ciphers.
INSERT INTO favorites(user_uuid, cipher_uuid)
SELECT user_uuid, uuid
FROM ciphers
WHERE favorite = TRUE
  AND user_uuid IS NOT NULL;

ALTER TABLE ciphers
DROP COLUMN favorite;


================================================
FILE: migrations/mysql/2020-11-30-224000_add_user_enabled/down.sql
================================================


================================================
FILE: migrations/mysql/2020-11-30-224000_add_user_enabled/up.sql
================================================
ALTER TABLE users ADD COLUMN enabled BOOLEAN NOT NULL DEFAULT 1;


================================================
FILE: migrations/mysql/2020-12-09-173101_add_stamp_exception/down.sql
================================================


================================================
FILE: migrations/mysql/2020-12-09-173101_add_stamp_exception/up.sql
================================================
ALTER TABLE users ADD COLUMN stamp_exception TEXT DEFAULT NULL;

================================================
FILE: migrations/mysql/2021-03-11-190243_add_sends/down.sql
================================================
DROP TABLE sends;


================================================
FILE: migrations/mysql/2021-03-11-190243_add_sends/up.sql
================================================
CREATE TABLE sends (
  uuid              CHAR(36) NOT NULL   PRIMARY KEY,
  user_uuid         CHAR(36)            REFERENCES users (uuid),
  organization_uuid CHAR(36)            REFERENCES organizations (uuid),

  name              TEXT    NOT NULL,
  notes             TEXT,

  atype             INTEGER NOT NULL,
  data              TEXT    NOT NULL,
  akey              TEXT    NOT NULL,
  password_hash     BLOB,
  password_salt     BLOB,
  password_iter     INTEGER,

  max_access_count  INTEGER,
  access_count      INTEGER NOT NULL,

  creation_date     DATETIME NOT NULL,
  revision_date     DATETIME NOT NULL,
  expiration_date   DATETIME,
  deletion_date     DATETIME NOT NULL,

  disabled          BOOLEAN NOT NULL
);

================================================
FILE: migrations/mysql/2021-04-30-233251_add_reprompt/down.sql
================================================


================================================
FILE: migrations/mysql/2021-04-30-233251_add_reprompt/up.sql
================================================
ALTER TABLE ciphers
ADD COLUMN reprompt INTEGER;


================================================
FILE: migrations/mysql/2021-05-11-205202_add_hide_email/down.sql
================================================


================================================
FILE: migrations/mysql/2021-05-11-205202_add_hide_email/up.sql
================================================
ALTER TABLE sends
ADD COLUMN hide_email BOOLEAN;


================================================
FILE: migrations/mysql/2021-07-01-203140_add_password_reset_keys/down.sql
================================================


================================================
FILE: migrations/mysql/2021-07-01-203140_add_password_reset_keys/up.sql
================================================
ALTER TABLE organizations
  ADD COLUMN private_key TEXT;

ALTER TABLE organizations
  ADD COLUMN public_key TEXT;


================================================
FILE: migrations/mysql/2021-08-30-193501_create_emergency_access/down.sql
================================================
DROP TABLE emergency_access;


================================================
FILE: migrations/mysql/2021-08-30-193501_create_emergency_access/up.sql
================================================
CREATE TABLE emergency_access (
  uuid                      CHAR(36)     NOT NULL PRIMARY KEY,
  grantor_uuid              CHAR(36)     REFERENCES users (uuid),
  grantee_uuid              CHAR(36)     REFERENCES users (uuid),
  email                     VARCHAR(255),
  key_encrypted             TEXT,
  atype                     INTEGER  NOT NULL,
  status                    INTEGER  NOT NULL,
  wait_time_days            INTEGER  NOT NULL,
  recovery_initiated_at     DATETIME,
  last_notification_at      DATETIME,
  updated_at                DATETIME NOT NULL,
  created_at                DATETIME NOT NULL
);


================================================
FILE: migrations/mysql/2021-10-24-164321_add_2fa_incomplete/down.sql
================================================
DROP TABLE twofactor_incomplete;


================================================
FILE: migrations/mysql/2021-10-24-164321_add_2fa_incomplete/up.sql
================================================
CREATE TABLE twofactor_incomplete (
  user_uuid   CHAR(36) NOT NULL REFERENCES users(uuid),
  device_uuid CHAR(36) NOT NULL,
  device_name TEXT     NOT NULL,
  login_time  DATETIME NOT NULL,
  ip_address  TEXT     NOT NULL,

  PRIMARY KEY (user_uuid, device_uuid)
);


================================================
FILE: migrations/mysql/2022-01-17-234911_add_api_key/down.sql
================================================


================================================
FILE: migrations/mysql/2022-01-17-234911_add_api_key/up.sql
================================================
ALTER TABLE users
ADD COLUMN api_key VARCHAR(255);


================================================
FILE: migrations/mysql/2022-03-02-210038_update_devices_primary_key/down.sql
================================================


================================================
FILE: migrations/mysql/2022-03-02-210038_update_devices_primary_key/up.sql
================================================
-- First remove the previous primary key
ALTER TABLE devices DROP PRIMARY KEY;
-- Add a new combined one
ALTER TABLE devices ADD PRIMARY KEY (uuid, user_uuid);


================================================
FILE: migrations/mysql/2022-07-27-110000_add_group_support/down.sql
================================================
DROP TABLE `groups`;
DROP TABLE groups_users;
DROP TABLE collections_groups;

================================================
FILE: migrations/mysql/2022-07-27-110000_add_group_support/up.sql
================================================
CREATE TABLE `groups` (
  uuid                              CHAR(36) NOT NULL PRIMARY KEY,
  organizations_uuid                VARCHAR(40) NOT NULL REFERENCES organizations (uuid),
  name                              VARCHAR(100) NOT NULL,
  access_all                        BOOLEAN NOT NULL,
  external_id                       VARCHAR(300) NULL,
  creation_date                     DATETIME NOT NULL,
  revision_date                     DATETIME NOT NULL
);

CREATE TABLE groups_users (
  groups_uuid                       CHAR(36) NOT NULL REFERENCES `groups` (uuid),
  users_organizations_uuid          VARCHAR(36) NOT NULL REFERENCES users_organizations (uuid),
  UNIQUE (groups_uuid, users_organizations_uuid)
);

CREATE TABLE collections_groups (
  collections_uuid                  VARCHAR(40) NOT NULL REFERENCES collections (uuid),
  groups_uuid                       CHAR(36) NOT NULL REFERENCES `groups` (uuid),
  read_only                         BOOLEAN NOT NULL,
  hide_passwords                    BOOLEAN NOT NULL,
  UNIQUE (collections_uuid, groups_uuid)
);

================================================
FILE: migrations/mysql/2022-10-18-170602_add_events/down.sql
================================================
DROP TABLE event;


================================================
FILE: migrations/mysql/2022-10-18-170602_add_events/up.sql
================================================
CREATE TABLE event (
  uuid               CHAR(36)    NOT NULL PRIMARY KEY,
  event_type         INTEGER     NOT NULL,
  user_uuid          CHAR(36),
  org_uuid           CHAR(36),
  cipher_uuid        CHAR(36),
  collection_uuid    CHAR(36),
  group_uuid         CHAR(36),
  org_user_uuid      CHAR(36),
  act_user_uuid      CHAR(36),
  device_type        INTEGER,
  ip_address         TEXT,
  event_date         DATETIME    NOT NULL,
  policy_uuid        CHAR(36),
  provider_uuid      CHAR(36),
  provider_user_uuid CHAR(36),
  provider_org_uuid  CHAR(36),
  UNIQUE (uuid)
);


================================================
FILE: migrations/mysql/2023-01-06-151600_add_reset_password_support/down.sql
================================================


================================================
FILE: migrations/mysql/2023-01-06-151600_add_reset_password_support/up.sql
================================================
ALTER TABLE users_organizations
ADD COLUMN reset_password_key TEXT;


================================================
FILE: migrations/mysql/2023-01-11-205851_add_avatar_color/down.sql
================================================


================================================
FILE: migrations/mysql/2023-01-11-205851_add_avatar_color/up.sql
================================================
ALTER TABLE users
ADD COLUMN avatar_color VARCHAR(7);


================================================
FILE: migrations/mysql/2023-01-31-222222_add_argon2/down.sql
================================================


================================================
FILE: migrations/mysql/2023-01-31-222222_add_argon2/up.sql
================================================
ALTER TABLE users
    ADD COLUMN
    client_kdf_memory INTEGER DEFAULT NULL;

ALTER TABLE users
    ADD COLUMN
    client_kdf_parallelism INTEGER DEFAULT NULL;


================================================
FILE: migrations/mysql/2023-02-18-125735_push_uuid_table/down.sql
================================================


================================================
FILE: migrations/mysql/2023-02-18-125735_push_uuid_table/up.sql
================================================
ALTER TABLE devices ADD COLUMN push_uuid TEXT;

================================================
FILE: migrations/mysql/2023-06-02-200424_create_organization_api_key/down.sql
================================================


================================================
FILE: migrations/mysql/2023-06-02-200424_create_organization_api_key/up.sql
================================================
CREATE TABLE organization_api_key (
	uuid			CHAR(36) NOT NULL,
	org_uuid		CHAR(36) NOT NULL REFERENCES organizations(uuid),
	atype			INTEGER NOT NULL,
	api_key			VARCHAR(255) NOT NULL,
	revision_date	DATETIME NOT NULL,
	PRIMARY KEY(uuid, org_uuid)
);

ALTER TABLE users ADD COLUMN external_id TEXT;


================================================
FILE: migrations/mysql/2023-06-17-200424_create_auth_requests_table/down.sql
================================================


================================================
FILE: migrations/mysql/2023-06-17-200424_create_auth_requests_table/up.sql
================================================
CREATE TABLE auth_requests (
	uuid            CHAR(36) NOT NULL PRIMARY KEY,
	user_uuid	    CHAR(36) NOT NULL,
	organization_uuid           CHAR(36),
	request_device_identifier         CHAR(36) NOT NULL,
	device_type         INTEGER NOT NULL,
	request_ip         TEXT NOT NULL,
	response_device_id         CHAR(36),
	access_code         TEXT NOT NULL,
	public_key         TEXT NOT NULL,
	enc_key         TEXT NOT NULL,
	master_password_hash         TEXT NOT NULL,
	approved         BOOLEAN,
	creation_date         DATETIME NOT NULL,
	response_date         DATETIME,
	authentication_date         DATETIME,
	FOREIGN KEY(user_uuid) REFERENCES users(uuid),
	FOREIGN KEY(organization_uuid) REFERENCES organizations(uuid)
);

================================================
FILE: migrations/mysql/2023-06-28-133700_add_collection_external_id/down.sql
================================================


================================================
FILE: migrations/mysql/2023-06-28-133700_add_collection_external_id/up.sql
================================================
ALTER TABLE collections ADD COLUMN external_id TEXT;


================================================
FILE: migrations/mysql/2023-09-01-170620_update_auth_request_table/down.sql
================================================


================================================
FILE: migrations/mysql/2023-09-01-170620_update_auth_request_table/up.sql
================================================
ALTER TABLE auth_requests
MODIFY master_password_hash TEXT;

ALTER TABLE auth_requests
MODIFY enc_key TEXT;


================================================
FILE: migrations/mysql/2023-09-02-212336_move_user_external_id/down.sql
================================================


================================================
FILE: migrations/mysql/2023-09-02-212336_move_user_external_id/up.sql
================================================
ALTER TABLE users_organizations
ADD COLUMN external_id TEXT;


================================================
FILE: migrations/mysql/2023-09-10-133000_add_sso/down.sql
================================================
DROP TABLE sso_nonce;


================================================
FILE: migrations/mysql/2023-09-10-133000_add_sso/up.sql
================================================
CREATE TABLE sso_nonce (
  nonce               CHAR(36) NOT NULL PRIMARY KEY,
  created_at          DATETIME NOT NULL DEFAULT CURRENT_TIMESTAMP
);


================================================
FILE: migrations/mysql/2023-09-14-133000_add_users_organizations_invited_by_email/down.sql
================================================
ALTER TABLE users_organizations DROP COLUMN invited_by_email;


================================================
FILE: migrations/mysql/2023-09-14-133000_add_users_organizations_invited_by_email/up.sql
================================================
ALTER TABLE users_organizations ADD COLUMN invited_by_email TEXT DEFAULT NULL;


================================================
FILE: migrations/mysql/2023-10-21-221242_add_cipher_key/down.sql
================================================


================================================
FILE: migrations/mysql/2023-10-21-221242_add_cipher_key/up.sql
================================================
ALTER TABLE ciphers
ADD COLUMN `key` TEXT;


================================================
FILE: migrations/mysql/2024-01-12-210182_change_attachment_size/down.sql
================================================


================================================
FILE: migrations/mysql/2024-01-12-210182_change_attachment_size/up.sql
================================================
ALTER TABLE attachments MODIFY file_size BIGINT NOT NULL;


================================================
FILE: migrations/mysql/2024-02-14-135828_change_time_stamp_data_type/down.sql
================================================


================================================
FILE: migrations/mysql/2024-02-14-135828_change_time_stamp_data_type/up.sql
================================================
ALTER TABLE twofactor MODIFY last_used BIGINT NOT NULL;


================================================
FILE: migrations/mysql/2024-02-14-170000_add_state_to_sso_nonce/down.sql
================================================
DROP TABLE IF EXISTS sso_nonce;

CREATE TABLE sso_nonce (
  nonce               CHAR(36) NOT NULL PRIMARY KEY,
  created_at          DATETIME NOT NULL DEFAULT CURRENT_TIMESTAMP
);


================================================
FILE: migrations/mysql/2024-02-14-170000_add_state_to_sso_nonce/up.sql
================================================
DROP TABLE IF EXISTS sso_nonce;

CREATE TABLE sso_nonce (
	state               VARCHAR(512) NOT NULL PRIMARY KEY,
  	nonce               TEXT NOT NULL,
  	redirect_uri 		TEXT NOT NULL,
  	created_at          TIMESTAMP NOT NULL DEFAULT now()
);


================================================
FILE: migrations/mysql/2024-02-26-170000_add_pkce_to_sso_nonce/down.sql
================================================
DROP TABLE IF EXISTS sso_nonce;

CREATE TABLE sso_nonce (
    state               VARCHAR(512) NOT NULL PRIMARY KEY,
    nonce               TEXT NOT NULL,
    redirect_uri        TEXT NOT NULL,
    created_at          TIMESTAMP NOT NULL DEFAULT now()
);


================================================
FILE: migrations/mysql/2024-02-26-170000_add_pkce_to_sso_nonce/up.sql
================================================
DROP TABLE IF EXISTS sso_nonce;

CREATE TABLE sso_nonce (
    state               VARCHAR(512) NOT NULL PRIMARY KEY,
  	nonce               TEXT NOT NULL,
    verifier            TEXT,
  	redirect_uri 		TEXT NOT NULL,
  	created_at          TIMESTAMP NOT NULL DEFAULT now()
);


================================================
FILE: migrations/mysql/2024-03-06-170000_add_sso_users/down.sql
================================================
DROP TABLE IF EXISTS sso_users;


================================================
FILE: migrations/mysql/2024-03-06-170000_add_sso_users/up.sql
================================================
CREATE TABLE sso_users (
  user_uuid           CHAR(36) NOT NULL PRIMARY KEY,
  identifier          VARCHAR(768) NOT NULL UNIQUE,
  created_at          TIMESTAMP NOT NULL DEFAULT now(),

  FOREIGN KEY(user_uuid) REFERENCES users(uuid)
);


================================================
FILE: migrations/mysql/2024-03-13-170000_sso_users_cascade/down.sql
================================================


================================================
FILE: migrations/mysql/2024-03-13-170000_sso_users_cascade/up.sql
================================================
-- Dynamically create DROP FOREIGN KEY
-- Some versions of MySQL or MariaDB might fail if the key doesn't exists
-- This checks if the key exists, and if so, will drop it.
SET @drop_sso_fk = IF((SELECT true FROM information_schema.TABLE_CONSTRAINTS WHERE
    CONSTRAINT_SCHEMA = DATABASE() AND
    TABLE_NAME = 'sso_users' AND
    CONSTRAINT_NAME = 'sso_users_ibfk_1' AND
    CONSTRAINT_TYPE = 'FOREIGN KEY') = true,
    'ALTER TABLE sso_users DROP FOREIGN KEY sso_users_ibfk_1',
    'SELECT 1');
PREPARE stmt FROM @drop_sso_fk;
EXECUTE stmt;
DEALLOCATE PREPARE stmt;

ALTER TABLE sso_users ADD FOREIGN KEY(user_uuid) REFERENCES users(uuid) ON UPDATE CASCADE ON DELETE CASCADE;


================================================
FILE: migrations/mysql/2024-06-05-131359_add_2fa_duo_store/down.sql
================================================
DROP TABLE twofactor_duo_ctx;

================================================
FILE: migrations/mysql/2024-06-05-131359_add_2fa_duo_store/up.sql
================================================
CREATE TABLE twofactor_duo_ctx (
    state      VARCHAR(64)  NOT NULL,
    user_email VARCHAR(255) NOT NULL,
    nonce      VARCHAR(64)  NOT NULL,
    exp        BIGINT       NOT NULL,

    PRIMARY KEY (state)
);

================================================
FILE: migrations/mysql/2024-09-04-091351_use_device_type_for_mails/down.sql
================================================
ALTER TABLE `twofactor_incomplete` DROP COLUMN `device_type`;


================================================
FILE: migrations/mysql/2024-09-04-091351_use_device_type_for_mails/up.sql
================================================
ALTER TABLE twofactor_incomplete ADD COLUMN device_type INTEGER NOT NULL DEFAULT 14; -- 14 = Unknown Browser


================================================
FILE: migrations/mysql/2025-01-09-172300_add_manage/down.sql
================================================


================================================
FILE: migrations/mysql/2025-01-09-172300_add_manage/up.sql
================================================
ALTER TABLE users_collections
ADD COLUMN manage BOOLEAN NOT NULL DEFAULT FALSE;

ALTER TABLE collections_groups
ADD COLUMN manage BOOLEAN NOT NULL DEFAULT FALSE;


================================================
FILE: migrations/mysql/2025-08-20-120000_sso_nonce_to_auth/down.sql
================================================
DROP TABLE IF EXISTS sso_auth;

CREATE TABLE sso_nonce (
    state               VARCHAR(512) NOT NULL PRIMARY KEY,
    nonce               TEXT NOT NULL,
    verifier            TEXT,
    redirect_uri        TEXT NOT NULL,
    created_at          TIMESTAMP NOT NULL DEFAULT now()
);


================================================
FILE: migrations/mysql/2025-08-20-120000_sso_nonce_to_auth/up.sql
================================================
DROP TABLE IF EXISTS sso_nonce;

CREATE TABLE sso_auth (
    state               VARCHAR(512) NOT NULL PRIMARY KEY,
    client_challenge    TEXT NOT NULL,
    nonce               TEXT NOT NULL,
    redirect_uri        TEXT NOT NULL,
    code_response       TEXT,
    auth_response       TEXT,
    created_at          TIMESTAMP NOT NULL DEFAULT now(),
    updated_at          TIMESTAMP NOT NULL DEFAULT now()
);


================================================
FILE: migrations/postgresql/2019-09-12-100000_create_tables/down.sql
================================================
DROP TABLE devices;
DROP TABLE attachments;
DROP TABLE users_collections;
DROP TABLE users_organizations;
DROP TABLE folders_ciphers;
DROP TABLE ciphers_collections;
DROP TABLE twofactor;
DROP TABLE invitations;
DROP TABLE collections;
DROP TABLE folders;
DROP TABLE ciphers;
DROP TABLE users;
DROP TABLE organizations;


================================================
FILE: migrations/postgresql/2019-09-12-100000_create_tables/up.sql
================================================
CREATE TABLE users (
  uuid                CHAR(36) NOT NULL PRIMARY KEY,
  created_at          TIMESTAMP NOT NULL,
  updated_at          TIMESTAMP NOT NULL,
  email               VARCHAR(255) NOT NULL UNIQUE,
  name                TEXT     NOT NULL,
  password_hash       BYTEA     NOT NULL,
  salt                BYTEA     NOT NULL,
  password_iterations INTEGER  NOT NULL,
  password_hint       TEXT,
  akey                TEXT     NOT NULL,
  private_key         TEXT,
  public_key          TEXT,
  totp_secret         TEXT,
  totp_recover        TEXT,
  security_stamp      TEXT     NOT NULL,
  equivalent_domains  TEXT     NOT NULL,
  excluded_globals    TEXT     NOT NULL,
  client_kdf_type     INTEGER NOT NULL DEFAULT 0,
  client_kdf_iter INTEGER NOT NULL DEFAULT 100000
);

CREATE TABLE devices (
  uuid          CHAR(36) NOT NULL PRIMARY KEY,
  created_at    TIMESTAMP NOT NULL,
  updated_at    TIMESTAMP NOT NULL,
  user_uuid     CHAR(36) NOT NULL REFERENCES users (uuid),
  name          TEXT     NOT NULL,
  atype         INTEGER  NOT NULL,
  push_token    TEXT,
  refresh_token TEXT     NOT NULL,
  twofactor_remember TEXT
);

CREATE TABLE organizations (
  uuid          VARCHAR(40) NOT NULL PRIMARY KEY,
  name          TEXT NOT NULL,
  billing_email TEXT NOT NULL
);

CREATE TABLE ciphers (
  uuid              CHAR(36) NOT NULL PRIMARY KEY,
  created_at        TIMESTAMP NOT NULL,
  updated_at        TIMESTAMP NOT NULL,
  user_uuid         CHAR(36) REFERENCES users (uuid),
  organization_uuid CHAR(36) REFERENCES organizations (uuid),
  atype             INTEGER  NOT NULL,
  name              TEXT     NOT NULL,
  notes             TEXT,
  fields            TEXT,
  data              TEXT     NOT NULL,
  favorite          BOOLEAN  NOT NULL,
  password_history  TEXT
);

CREATE TABLE attachments (
  id          CHAR(36) NOT NULL PRIMARY KEY,
  cipher_uuid CHAR(36) NOT NULL REFERENCES ciphers (uuid),
  file_name   TEXT    NOT NULL,
  file_size   INTEGER NOT NULL,
  akey        TEXT
);

CREATE TABLE folders (
  uuid       CHAR(36) NOT NULL PRIMARY KEY,
  created_at TIMESTAMP NOT NULL,
  updated_at TIMESTAMP NOT NULL,
  user_uuid  CHAR(36) NOT NULL REFERENCES users (uuid),
  name       TEXT     NOT NULL
);

CREATE TABLE collections (
  uuid     VARCHAR(40) NOT NULL PRIMARY KEY,
  org_uuid VARCHAR(40) NOT NULL REFERENCES organizations (uuid),
  name     TEXT NOT NULL
);

CREATE TABLE users_collections (
Download .txt
gitextract_kb30epk3/

├── .dockerignore
├── .editorconfig
├── .gitattributes
├── .github/
│   ├── CODEOWNERS
│   ├── FUNDING.yml
│   ├── ISSUE_TEMPLATE/
│   │   ├── bug_report.yml
│   │   └── config.yml
│   └── workflows/
│       ├── build.yml
│       ├── check-templates.yml
│       ├── hadolint.yml
│       ├── release.yml
│       ├── releasecache-cleanup.yml
│       ├── trivy.yml
│       ├── typos.yml
│       └── zizmor.yml
├── .gitignore
├── .hadolint.yaml
├── .pre-commit-config.yaml
├── .typos.toml
├── Cargo.toml
├── LICENSE.txt
├── README.md
├── SECURITY.md
├── build.rs
├── diesel.toml
├── docker/
│   ├── DockerSettings.yaml
│   ├── Dockerfile.alpine
│   ├── Dockerfile.debian
│   ├── Dockerfile.j2
│   ├── Makefile
│   ├── README.md
│   ├── bake.sh
│   ├── bake_env.sh
│   ├── docker-bake.hcl
│   ├── healthcheck.sh
│   ├── podman-bake.sh
│   ├── render_template
│   └── start.sh
├── macros/
│   ├── Cargo.toml
│   └── src/
│       └── lib.rs
├── migrations/
│   ├── mysql/
│   │   ├── 2018-01-14-171611_create_tables/
│   │   │   ├── down.sql
│   │   │   └── up.sql
│   │   ├── 2018-02-17-205753_create_collections_and_orgs/
│   │   │   ├── down.sql
│   │   │   └── up.sql
│   │   ├── 2018-04-27-155151_create_users_ciphers/
│   │   │   ├── down.sql
│   │   │   └── up.sql
│   │   ├── 2018-05-08-161616_create_collection_cipher_map/
│   │   │   ├── down.sql
│   │   │   └── up.sql
│   │   ├── 2018-05-25-232323_update_attachments_reference/
│   │   │   ├── down.sql
│   │   │   └── up.sql
│   │   ├── 2018-06-01-112529_update_devices_twofactor_remember/
│   │   │   ├── down.sql
│   │   │   └── up.sql
│   │   ├── 2018-07-11-181453_create_u2f_twofactor/
│   │   │   ├── down.sql
│   │   │   └── up.sql
│   │   ├── 2018-08-27-172114_update_ciphers/
│   │   │   ├── down.sql
│   │   │   └── up.sql
│   │   ├── 2018-09-10-111213_add_invites/
│   │   │   ├── down.sql
│   │   │   └── up.sql
│   │   ├── 2018-09-19-144557_add_kdf_columns/
│   │   │   ├── down.sql
│   │   │   └── up.sql
│   │   ├── 2018-11-27-152651_add_att_key_columns/
│   │   │   ├── down.sql
│   │   │   └── up.sql
│   │   ├── 2019-05-26-216651_rename_key_and_type_columns/
│   │   │   ├── down.sql
│   │   │   └── up.sql
│   │   ├── 2019-10-10-083032_add_column_to_twofactor/
│   │   │   ├── down.sql
│   │   │   └── up.sql
│   │   ├── 2019-11-17-011009_add_email_verification/
│   │   │   ├── down.sql
│   │   │   └── up.sql
│   │   ├── 2020-03-13-205045_add_policy_table/
│   │   │   ├── down.sql
│   │   │   └── up.sql
│   │   ├── 2020-04-09-235005_add_cipher_delete_date/
│   │   │   ├── down.sql
│   │   │   └── up.sql
│   │   ├── 2020-07-01-214531_add_hide_passwords/
│   │   │   ├── down.sql
│   │   │   └── up.sql
│   │   ├── 2020-08-02-025025_add_favorites_table/
│   │   │   ├── down.sql
│   │   │   └── up.sql
│   │   ├── 2020-11-30-224000_add_user_enabled/
│   │   │   ├── down.sql
│   │   │   └── up.sql
│   │   ├── 2020-12-09-173101_add_stamp_exception/
│   │   │   ├── down.sql
│   │   │   └── up.sql
│   │   ├── 2021-03-11-190243_add_sends/
│   │   │   ├── down.sql
│   │   │   └── up.sql
│   │   ├── 2021-04-30-233251_add_reprompt/
│   │   │   ├── down.sql
│   │   │   └── up.sql
│   │   ├── 2021-05-11-205202_add_hide_email/
│   │   │   ├── down.sql
│   │   │   └── up.sql
│   │   ├── 2021-07-01-203140_add_password_reset_keys/
│   │   │   ├── down.sql
│   │   │   └── up.sql
│   │   ├── 2021-08-30-193501_create_emergency_access/
│   │   │   ├── down.sql
│   │   │   └── up.sql
│   │   ├── 2021-10-24-164321_add_2fa_incomplete/
│   │   │   ├── down.sql
│   │   │   └── up.sql
│   │   ├── 2022-01-17-234911_add_api_key/
│   │   │   ├── down.sql
│   │   │   └── up.sql
│   │   ├── 2022-03-02-210038_update_devices_primary_key/
│   │   │   ├── down.sql
│   │   │   └── up.sql
│   │   ├── 2022-07-27-110000_add_group_support/
│   │   │   ├── down.sql
│   │   │   └── up.sql
│   │   ├── 2022-10-18-170602_add_events/
│   │   │   ├── down.sql
│   │   │   └── up.sql
│   │   ├── 2023-01-06-151600_add_reset_password_support/
│   │   │   ├── down.sql
│   │   │   └── up.sql
│   │   ├── 2023-01-11-205851_add_avatar_color/
│   │   │   ├── down.sql
│   │   │   └── up.sql
│   │   ├── 2023-01-31-222222_add_argon2/
│   │   │   ├── down.sql
│   │   │   └── up.sql
│   │   ├── 2023-02-18-125735_push_uuid_table/
│   │   │   ├── down.sql
│   │   │   └── up.sql
│   │   ├── 2023-06-02-200424_create_organization_api_key/
│   │   │   ├── down.sql
│   │   │   └── up.sql
│   │   ├── 2023-06-17-200424_create_auth_requests_table/
│   │   │   ├── down.sql
│   │   │   └── up.sql
│   │   ├── 2023-06-28-133700_add_collection_external_id/
│   │   │   ├── down.sql
│   │   │   └── up.sql
│   │   ├── 2023-09-01-170620_update_auth_request_table/
│   │   │   ├── down.sql
│   │   │   └── up.sql
│   │   ├── 2023-09-02-212336_move_user_external_id/
│   │   │   ├── down.sql
│   │   │   └── up.sql
│   │   ├── 2023-09-10-133000_add_sso/
│   │   │   ├── down.sql
│   │   │   └── up.sql
│   │   ├── 2023-09-14-133000_add_users_organizations_invited_by_email/
│   │   │   ├── down.sql
│   │   │   └── up.sql
│   │   ├── 2023-10-21-221242_add_cipher_key/
│   │   │   ├── down.sql
│   │   │   └── up.sql
│   │   ├── 2024-01-12-210182_change_attachment_size/
│   │   │   ├── down.sql
│   │   │   └── up.sql
│   │   ├── 2024-02-14-135828_change_time_stamp_data_type/
│   │   │   ├── down.sql
│   │   │   └── up.sql
│   │   ├── 2024-02-14-170000_add_state_to_sso_nonce/
│   │   │   ├── down.sql
│   │   │   └── up.sql
│   │   ├── 2024-02-26-170000_add_pkce_to_sso_nonce/
│   │   │   ├── down.sql
│   │   │   └── up.sql
│   │   ├── 2024-03-06-170000_add_sso_users/
│   │   │   ├── down.sql
│   │   │   └── up.sql
│   │   ├── 2024-03-13-170000_sso_users_cascade/
│   │   │   ├── down.sql
│   │   │   └── up.sql
│   │   ├── 2024-06-05-131359_add_2fa_duo_store/
│   │   │   ├── down.sql
│   │   │   └── up.sql
│   │   ├── 2024-09-04-091351_use_device_type_for_mails/
│   │   │   ├── down.sql
│   │   │   └── up.sql
│   │   ├── 2025-01-09-172300_add_manage/
│   │   │   ├── down.sql
│   │   │   └── up.sql
│   │   └── 2025-08-20-120000_sso_nonce_to_auth/
│   │       ├── down.sql
│   │       └── up.sql
│   ├── postgresql/
│   │   ├── 2019-09-12-100000_create_tables/
│   │   │   ├── down.sql
│   │   │   └── up.sql
│   │   ├── 2019-09-16-150000_fix_attachments/
│   │   │   ├── down.sql
│   │   │   └── up.sql
│   │   ├── 2019-10-10-083032_add_column_to_twofactor/
│   │   │   ├── down.sql
│   │   │   └── up.sql
│   │   ├── 2019-11-17-011009_add_email_verification/
│   │   │   ├── down.sql
│   │   │   └── up.sql
│   │   ├── 2020-03-13-205045_add_policy_table/
│   │   │   ├── down.sql
│   │   │   └── up.sql
│   │   ├── 2020-04-09-235005_add_cipher_delete_date/
│   │   │   ├── down.sql
│   │   │   └── up.sql
│   │   ├── 2020-07-01-214531_add_hide_passwords/
│   │   │   ├── down.sql
│   │   │   └── up.sql
│   │   ├── 2020-08-02-025025_add_favorites_table/
│   │   │   ├── down.sql
│   │   │   └── up.sql
│   │   ├── 2020-11-30-224000_add_user_enabled/
│   │   │   ├── down.sql
│   │   │   └── up.sql
│   │   ├── 2020-12-09-173101_add_stamp_exception/
│   │   │   ├── down.sql
│   │   │   └── up.sql
│   │   ├── 2021-03-11-190243_add_sends/
│   │   │   ├── down.sql
│   │   │   └── up.sql
│   │   ├── 2021-03-15-163412_rename_send_key/
│   │   │   ├── down.sql
│   │   │   └── up.sql
│   │   ├── 2021-04-30-233251_add_reprompt/
│   │   │   ├── down.sql
│   │   │   └── up.sql
│   │   ├── 2021-05-11-205202_add_hide_email/
│   │   │   ├── down.sql
│   │   │   └── up.sql
│   │   ├── 2021-07-01-203140_add_password_reset_keys/
│   │   │   ├── down.sql
│   │   │   └── up.sql
│   │   ├── 2021-08-30-193501_create_emergency_access/
│   │   │   ├── down.sql
│   │   │   └── up.sql
│   │   ├── 2021-10-24-164321_add_2fa_incomplete/
│   │   │   ├── down.sql
│   │   │   └── up.sql
│   │   ├── 2022-01-17-234911_add_api_key/
│   │   │   ├── down.sql
│   │   │   └── up.sql
│   │   ├── 2022-03-02-210038_update_devices_primary_key/
│   │   │   ├── down.sql
│   │   │   └── up.sql
│   │   ├── 2022-07-27-110000_add_group_support/
│   │   │   ├── down.sql
│   │   │   └── up.sql
│   │   ├── 2022-10-18-170602_add_events/
│   │   │   ├── down.sql
│   │   │   └── up.sql
│   │   ├── 2023-01-06-151600_add_reset_password_support/
│   │   │   ├── down.sql
│   │   │   └── up.sql
│   │   ├── 2023-01-11-205851_add_avatar_color/
│   │   │   ├── down.sql
│   │   │   └── up.sql
│   │   ├── 2023-01-31-222222_add_argon2/
│   │   │   ├── down.sql
│   │   │   └── up.sql
│   │   ├── 2023-02-18-125735_push_uuid_table/
│   │   │   ├── down.sql
│   │   │   └── up.sql
│   │   ├── 2023-06-02-200424_create_organization_api_key/
│   │   │   ├── down.sql
│   │   │   └── up.sql
│   │   ├── 2023-06-17-200424_create_auth_requests_table/
│   │   │   ├── down.sql
│   │   │   └── up.sql
│   │   ├── 2023-06-28-133700_add_collection_external_id/
│   │   │   ├── down.sql
│   │   │   └── up.sql
│   │   ├── 2023-09-01-170620_update_auth_request_table/
│   │   │   ├── down.sql
│   │   │   └── up.sql
│   │   ├── 2023-09-02-212336_move_user_external_id/
│   │   │   ├── down.sql
│   │   │   └── up.sql
│   │   ├── 2023-09-10-133000_add_sso/
│   │   │   ├── down.sql
│   │   │   └── up.sql
│   │   ├── 2023-09-14-133000_add_users_organizations_invited_by_email/
│   │   │   ├── down.sql
│   │   │   └── up.sql
│   │   ├── 2023-10-21-221242_add_cipher_key/
│   │   │   ├── down.sql
│   │   │   └── up.sql
│   │   ├── 2024-01-12-210182_change_attachment_size/
│   │   │   ├── down.sql
│   │   │   └── up.sql
│   │   ├── 2024-02-14-135953_change_time_stamp_data_type/
│   │   │   ├── down.sql
│   │   │   └── up.sql
│   │   ├── 2024-02-14-170000_add_state_to_sso_nonce/
│   │   │   ├── down.sql
│   │   │   └── up.sql
│   │   ├── 2024-02-26-170000_add_pkce_to_sso_nonce/
│   │   │   ├── down.sql
│   │   │   └── up.sql
│   │   ├── 2024-03-06-170000_add_sso_users/
│   │   │   ├── down.sql
│   │   │   └── up.sql
│   │   ├── 2024-03-13-170000_sso_users_cascade/
│   │   │   ├── down.sql
│   │   │   └── up.sql
│   │   ├── 2024-06-05-131359_add_2fa_duo_store/
│   │   │   ├── down.sql
│   │   │   └── up.sql
│   │   ├── 2024-09-04-091351_use_device_type_for_mails/
│   │   │   ├── down.sql
│   │   │   └── up.sql
│   │   ├── 2025-01-09-172300_add_manage/
│   │   │   ├── down.sql
│   │   │   └── up.sql
│   │   └── 2025-08-20-120000_sso_nonce_to_auth/
│   │       ├── down.sql
│   │       └── up.sql
│   └── sqlite/
│       ├── 2018-01-14-171611_create_tables/
│       │   ├── down.sql
│       │   └── up.sql
│       ├── 2018-02-17-205753_create_collections_and_orgs/
│       │   ├── down.sql
│       │   └── up.sql
│       ├── 2018-04-27-155151_create_users_ciphers/
│       │   ├── down.sql
│       │   └── up.sql
│       ├── 2018-05-08-161616_create_collection_cipher_map/
│       │   ├── down.sql
│       │   └── up.sql
│       ├── 2018-05-25-232323_update_attachments_reference/
│       │   ├── down.sql
│       │   └── up.sql
│       ├── 2018-06-01-112529_update_devices_twofactor_remember/
│       │   ├── down.sql
│       │   └── up.sql
│       ├── 2018-07-11-181453_create_u2f_twofactor/
│       │   ├── down.sql
│       │   └── up.sql
│       ├── 2018-08-27-172114_update_ciphers/
│       │   ├── down.sql
│       │   └── up.sql
│       ├── 2018-09-10-111213_add_invites/
│       │   ├── down.sql
│       │   └── up.sql
│       ├── 2018-09-19-144557_add_kdf_columns/
│       │   ├── down.sql
│       │   └── up.sql
│       ├── 2018-11-27-152651_add_att_key_columns/
│       │   ├── down.sql
│       │   └── up.sql
│       ├── 2019-05-26-216651_rename_key_and_type_columns/
│       │   ├── down.sql
│       │   └── up.sql
│       ├── 2019-10-10-083032_add_column_to_twofactor/
│       │   ├── down.sql
│       │   └── up.sql
│       ├── 2019-11-17-011009_add_email_verification/
│       │   ├── down.sql
│       │   └── up.sql
│       ├── 2020-03-13-205045_add_policy_table/
│       │   ├── down.sql
│       │   └── up.sql
│       ├── 2020-04-09-235005_add_cipher_delete_date/
│       │   ├── down.sql
│       │   └── up.sql
│       ├── 2020-07-01-214531_add_hide_passwords/
│       │   ├── down.sql
│       │   └── up.sql
│       ├── 2020-08-02-025025_add_favorites_table/
│       │   ├── down.sql
│       │   └── up.sql
│       ├── 2020-11-30-224000_add_user_enabled/
│       │   ├── down.sql
│       │   └── up.sql
│       ├── 2020-12-09-173101_add_stamp_exception/
│       │   ├── down.sql
│       │   └── up.sql
│       ├── 2021-03-11-190243_add_sends/
│       │   ├── down.sql
│       │   └── up.sql
│       ├── 2021-03-15-163412_rename_send_key/
│       │   ├── down.sql
│       │   └── up.sql
│       ├── 2021-04-30-233251_add_reprompt/
│       │   ├── down.sql
│       │   └── up.sql
│       ├── 2021-05-11-205202_add_hide_email/
│       │   ├── down.sql
│       │   └── up.sql
│       ├── 2021-07-01-203140_add_password_reset_keys/
│       │   ├── down.sql
│       │   └── up.sql
│       ├── 2021-08-30-193501_create_emergency_access/
│       │   ├── down.sql
│       │   └── up.sql
│       ├── 2021-10-24-164321_add_2fa_incomplete/
│       │   ├── down.sql
│       │   └── up.sql
│       ├── 2022-01-17-234911_add_api_key/
│       │   ├── down.sql
│       │   └── up.sql
│       ├── 2022-03-02-210038_update_devices_primary_key/
│       │   ├── down.sql
│       │   └── up.sql
│       ├── 2022-07-27-110000_add_group_support/
│       │   ├── down.sql
│       │   └── up.sql
│       ├── 2022-10-18-170602_add_events/
│       │   ├── down.sql
│       │   └── up.sql
│       ├── 2023-01-06-151600_add_reset_password_support/
│       │   ├── down.sql
│       │   └── up.sql
│       ├── 2023-01-11-205851_add_avatar_color/
│       │   ├── down.sql
│       │   └── up.sql
│       ├── 2023-01-31-222222_add_argon2/
│       │   ├── down.sql
│       │   └── up.sql
│       ├── 2023-02-18-125735_push_uuid_table/
│       │   ├── down.sql
│       │   └── up.sql
│       ├── 2023-06-02-200424_create_organization_api_key/
│       │   ├── down.sql
│       │   └── up.sql
│       ├── 2023-06-17-200424_create_auth_requests_table/
│       │   ├── down.sql
│       │   └── up.sql
│       ├── 2023-06-28-133700_add_collection_external_id/
│       │   ├── down.sql
│       │   └── up.sql
│       ├── 2023-09-01-170620_update_auth_request_table/
│       │   ├── down.sql
│       │   └── up.sql
│       ├── 2023-09-02-212336_move_user_external_id/
│       │   ├── down.sql
│       │   └── up.sql
│       ├── 2023-09-10-133000_add_sso/
│       │   ├── down.sql
│       │   └── up.sql
│       ├── 2023-09-14-133000_add_users_organizations_invited_by_email/
│       │   ├── down.sql
│       │   └── up.sql
│       ├── 2023-10-21-221242_add_cipher_key/
│       │   ├── down.sql
│       │   └── up.sql
│       ├── 2024-01-12-210182_change_attachment_size/
│       │   ├── down.sql
│       │   └── up.sql
│       ├── 2024-02-14-140000_change_time_stamp_data_type/
│       │   ├── down.sql
│       │   └── up.sql
│       ├── 2024-02-14-170000_add_state_to_sso_nonce/
│       │   ├── down.sql
│       │   └── up.sql
│       ├── 2024-02-26-170000_add_pkce_to_sso_nonce/
│       │   ├── down.sql
│       │   └── up.sql
│       ├── 2024-03-06-170000_add_sso_users/
│       │   ├── down.sql
│       │   └── up.sql
│       ├── 2024-03-13_170000_sso_userscascade/
│       │   ├── down.sql
│       │   └── up.sql
│       ├── 2024-06-05-131359_add_2fa_duo_store/
│       │   ├── down.sql
│       │   └── up.sql
│       ├── 2024-09-04-091351_use_device_type_for_mails/
│       │   ├── down.sql
│       │   └── up.sql
│       ├── 2025-01-09-172300_add_manage/
│       │   ├── down.sql
│       │   └── up.sql
│       └── 2025-08-20-120000_sso_nonce_to_auth/
│           ├── down.sql
│           └── up.sql
├── playwright/
│   ├── .gitignore
│   ├── README.md
│   ├── compose/
│   │   ├── keycloak/
│   │   │   ├── Dockerfile
│   │   │   └── setup.sh
│   │   ├── playwright/
│   │   │   └── Dockerfile
│   │   └── warden/
│   │       ├── Dockerfile
│   │       └── build.sh
│   ├── docker-compose.yml
│   ├── global-setup.ts
│   ├── global-utils.ts
│   ├── package.json
│   ├── playwright.config.ts
│   ├── test.env
│   └── tests/
│       ├── collection.spec.ts
│       ├── login.smtp.spec.ts
│       ├── login.spec.ts
│       ├── organization.smtp.spec.ts
│       ├── organization.spec.ts
│       ├── setups/
│       │   ├── 2fa.ts
│       │   ├── db-setup.ts
│       │   ├── db-teardown.ts
│       │   ├── db-test.ts
│       │   ├── orgs.ts
│       │   ├── sso-setup.ts
│       │   ├── sso-teardown.ts
│       │   ├── sso.ts
│       │   └── user.ts
│       ├── sso_login.smtp.spec.ts
│       ├── sso_login.spec.ts
│       ├── sso_organization.smtp.spec.ts
│       └── sso_organization.spec.ts
├── rust-toolchain.toml
├── rustfmt.toml
├── src/
│   ├── api/
│   │   ├── admin.rs
│   │   ├── core/
│   │   │   ├── accounts.rs
│   │   │   ├── ciphers.rs
│   │   │   ├── emergency_access.rs
│   │   │   ├── events.rs
│   │   │   ├── folders.rs
│   │   │   ├── mod.rs
│   │   │   ├── organizations.rs
│   │   │   ├── public.rs
│   │   │   ├── sends.rs
│   │   │   └── two_factor/
│   │   │       ├── authenticator.rs
│   │   │       ├── duo.rs
│   │   │       ├── duo_oidc.rs
│   │   │       ├── email.rs
│   │   │       ├── mod.rs
│   │   │       ├── protected_actions.rs
│   │   │       ├── webauthn.rs
│   │   │       └── yubikey.rs
│   │   ├── icons.rs
│   │   ├── identity.rs
│   │   ├── mod.rs
│   │   ├── notifications.rs
│   │   ├── push.rs
│   │   └── web.rs
│   ├── auth.rs
│   ├── config.rs
│   ├── crypto.rs
│   ├── db/
│   │   ├── mod.rs
│   │   ├── models/
│   │   │   ├── attachment.rs
│   │   │   ├── auth_request.rs
│   │   │   ├── cipher.rs
│   │   │   ├── collection.rs
│   │   │   ├── device.rs
│   │   │   ├── emergency_access.rs
│   │   │   ├── event.rs
│   │   │   ├── favorite.rs
│   │   │   ├── folder.rs
│   │   │   ├── group.rs
│   │   │   ├── mod.rs
│   │   │   ├── org_policy.rs
│   │   │   ├── organization.rs
│   │   │   ├── send.rs
│   │   │   ├── sso_auth.rs
│   │   │   ├── two_factor.rs
│   │   │   ├── two_factor_duo_context.rs
│   │   │   ├── two_factor_incomplete.rs
│   │   │   └── user.rs
│   │   ├── query_logger.rs
│   │   └── schema.rs
│   ├── error.rs
│   ├── http_client.rs
│   ├── mail.rs
│   ├── main.rs
│   ├── ratelimit.rs
│   ├── sso.rs
│   ├── sso_client.rs
│   ├── static/
│   │   ├── global_domains.json
│   │   ├── scripts/
│   │   │   ├── 404.css
│   │   │   ├── admin.css
│   │   │   ├── admin.js
│   │   │   ├── admin_diagnostics.js
│   │   │   ├── admin_organizations.js
│   │   │   ├── admin_settings.js
│   │   │   ├── admin_users.js
│   │   │   ├── bootstrap.bundle.js
│   │   │   ├── bootstrap.css
│   │   │   ├── datatables.css
│   │   │   ├── datatables.js
│   │   │   ├── jdenticon-3.3.0.js
│   │   │   └── jquery-4.0.0.slim.js
│   │   └── templates/
│   │       ├── 404.hbs
│   │       ├── admin/
│   │       │   ├── base.hbs
│   │       │   ├── diagnostics.hbs
│   │       │   ├── login.hbs
│   │       │   ├── organizations.hbs
│   │       │   ├── settings.hbs
│   │       │   └── users.hbs
│   │       ├── email/
│   │       │   ├── admin_reset_password.hbs
│   │       │   ├── admin_reset_password.html.hbs
│   │       │   ├── change_email.hbs
│   │       │   ├── change_email.html.hbs
│   │       │   ├── change_email_existing.hbs
│   │       │   ├── change_email_existing.html.hbs
│   │       │   ├── change_email_invited.hbs
│   │       │   ├── change_email_invited.html.hbs
│   │       │   ├── delete_account.hbs
│   │       │   ├── delete_account.html.hbs
│   │       │   ├── email_footer.hbs
│   │       │   ├── email_footer_text.hbs
│   │       │   ├── email_header.hbs
│   │       │   ├── emergency_access_invite_accepted.hbs
│   │       │   ├── emergency_access_invite_accepted.html.hbs
│   │       │   ├── emergency_access_invite_confirmed.hbs
│   │       │   ├── emergency_access_invite_confirmed.html.hbs
│   │       │   ├── emergency_access_recovery_approved.hbs
│   │       │   ├── emergency_access_recovery_approved.html.hbs
│   │       │   ├── emergency_access_recovery_initiated.hbs
│   │       │   ├── emergency_access_recovery_initiated.html.hbs
│   │       │   ├── emergency_access_recovery_rejected.hbs
│   │       │   ├── emergency_access_recovery_rejected.html.hbs
│   │       │   ├── emergency_access_recovery_reminder.hbs
│   │       │   ├── emergency_access_recovery_reminder.html.hbs
│   │       │   ├── emergency_access_recovery_timed_out.hbs
│   │       │   ├── emergency_access_recovery_timed_out.html.hbs
│   │       │   ├── incomplete_2fa_login.hbs
│   │       │   ├── incomplete_2fa_login.html.hbs
│   │       │   ├── invite_accepted.hbs
│   │       │   ├── invite_accepted.html.hbs
│   │       │   ├── invite_confirmed.hbs
│   │       │   ├── invite_confirmed.html.hbs
│   │       │   ├── new_device_logged_in.hbs
│   │       │   ├── new_device_logged_in.html.hbs
│   │       │   ├── protected_action.hbs
│   │       │   ├── protected_action.html.hbs
│   │       │   ├── pw_hint_none.hbs
│   │       │   ├── pw_hint_none.html.hbs
│   │       │   ├── pw_hint_some.hbs
│   │       │   ├── pw_hint_some.html.hbs
│   │       │   ├── register_verify_email.hbs
│   │       │   ├── register_verify_email.html.hbs
│   │       │   ├── send_2fa_removed_from_org.hbs
│   │       │   ├── send_2fa_removed_from_org.html.hbs
│   │       │   ├── send_emergency_access_invite.hbs
│   │       │   ├── send_emergency_access_invite.html.hbs
│   │       │   ├── send_org_invite.hbs
│   │       │   ├── send_org_invite.html.hbs
│   │       │   ├── send_single_org_removed_from_org.hbs
│   │       │   ├── send_single_org_removed_from_org.html.hbs
│   │       │   ├── smtp_test.hbs
│   │       │   ├── smtp_test.html.hbs
│   │       │   ├── sso_change_email.hbs
│   │       │   ├── sso_change_email.html.hbs
│   │       │   ├── twofactor_email.hbs
│   │       │   ├── twofactor_email.html.hbs
│   │       │   ├── verify_email.hbs
│   │       │   ├── verify_email.html.hbs
│   │       │   ├── welcome.hbs
│   │       │   ├── welcome.html.hbs
│   │       │   ├── welcome_must_verify.hbs
│   │       │   └── welcome_must_verify.html.hbs
│   │       └── scss/
│   │           ├── user.vaultwarden.scss.hbs
│   │           └── vaultwarden.scss.hbs
│   └── util.rs
└── tools/
    └── global_domains.py
Download .txt
Showing preview only (207K chars total). Download the full file or copy to clipboard to get everything.
SYMBOL INDEX (2375 symbols across 145 files)

FILE: build.rs
  function main (line 4) | fn main() {
  function run (line 45) | fn run(args: &[&str]) -> Result<String, std::io::Error> {
  function version_from_git_info (line 61) | fn version_from_git_info() -> Result<String, std::io::Error> {

FILE: macros/src/lib.rs
  function derive_uuid_from_param (line 5) | pub fn derive_uuid_from_param(input: TokenStream) -> TokenStream {
  function impl_derive_uuid_macro (line 11) | fn impl_derive_uuid_macro(ast: &syn::DeriveInput) -> TokenStream {
  function derive_id_from_param (line 32) | pub fn derive_id_from_param(input: TokenStream) -> TokenStream {
  function impl_derive_safestring_macro (line 38) | fn impl_derive_safestring_macro(ast: &syn::DeriveInput) -> TokenStream {

FILE: migrations/mysql/2018-01-14-171611_create_tables/up.sql
  type users (line 1) | CREATE TABLE users (
  type devices (line 21) | CREATE TABLE devices (
  type ciphers (line 32) | CREATE TABLE ciphers (
  type attachments (line 47) | CREATE TABLE attachments (
  type folders (line 55) | CREATE TABLE folders (

FILE: migrations/mysql/2018-02-17-205753_create_collections_and_orgs/up.sql
  type collections (line 1) | CREATE TABLE collections (
  type organizations (line 7) | CREATE TABLE organizations (
  type users_collections (line 13) | CREATE TABLE users_collections (
  type users_organizations (line 19) | CREATE TABLE users_organizations (

FILE: migrations/mysql/2018-04-27-155151_create_users_ciphers/up.sql
  type ciphers (line 3) | CREATE TABLE ciphers (
  type folders_ciphers (line 18) | CREATE TABLE folders_ciphers (

FILE: migrations/mysql/2018-05-08-161616_create_collection_cipher_map/up.sql
  type ciphers_collections (line 1) | CREATE TABLE ciphers_collections (

FILE: migrations/mysql/2018-05-25-232323_update_attachments_reference/up.sql
  type attachments (line 3) | CREATE TABLE attachments (

FILE: migrations/mysql/2018-07-11-181453_create_u2f_twofactor/up.sql
  type twofactor (line 1) | CREATE TABLE twofactor (

FILE: migrations/mysql/2018-09-10-111213_add_invites/up.sql
  type invitations (line 1) | CREATE TABLE invitations (

FILE: migrations/mysql/2020-03-13-205045_add_policy_table/up.sql
  type org_policies (line 1) | CREATE TABLE org_policies (

FILE: migrations/mysql/2020-08-02-025025_add_favorites_table/up.sql
  type favorites (line 1) | CREATE TABLE favorites (

FILE: migrations/mysql/2021-03-11-190243_add_sends/up.sql
  type sends (line 1) | CREATE TABLE sends (

FILE: migrations/mysql/2021-08-30-193501_create_emergency_access/up.sql
  type emergency_access (line 1) | CREATE TABLE emergency_access (

FILE: migrations/mysql/2021-10-24-164321_add_2fa_incomplete/up.sql
  type twofactor_incomplete (line 1) | CREATE TABLE twofactor_incomplete (

FILE: migrations/mysql/2022-07-27-110000_add_group_support/up.sql
  type `groups` (line 1) | CREATE TABLE `groups` (
  type groups_users (line 11) | CREATE TABLE groups_users (
  type collections_groups (line 17) | CREATE TABLE collections_groups (

FILE: migrations/mysql/2022-10-18-170602_add_events/up.sql
  type event (line 1) | CREATE TABLE event (

FILE: migrations/mysql/2023-06-02-200424_create_organization_api_key/up.sql
  type organization_api_key (line 1) | CREATE TABLE organization_api_key (

FILE: migrations/mysql/2023-06-17-200424_create_auth_requests_table/up.sql
  type auth_requests (line 1) | CREATE TABLE auth_requests (

FILE: migrations/mysql/2023-09-10-133000_add_sso/up.sql
  type sso_nonce (line 1) | CREATE TABLE sso_nonce (

FILE: migrations/mysql/2024-02-14-170000_add_state_to_sso_nonce/down.sql
  type sso_nonce (line 3) | CREATE TABLE sso_nonce (

FILE: migrations/mysql/2024-02-14-170000_add_state_to_sso_nonce/up.sql
  type sso_nonce (line 3) | CREATE TABLE sso_nonce (

FILE: migrations/mysql/2024-02-26-170000_add_pkce_to_sso_nonce/down.sql
  type sso_nonce (line 3) | CREATE TABLE sso_nonce (

FILE: migrations/mysql/2024-02-26-170000_add_pkce_to_sso_nonce/up.sql
  type sso_nonce (line 3) | CREATE TABLE sso_nonce (

FILE: migrations/mysql/2024-03-06-170000_add_sso_users/up.sql
  type sso_users (line 1) | CREATE TABLE sso_users (

FILE: migrations/mysql/2024-06-05-131359_add_2fa_duo_store/up.sql
  type twofactor_duo_ctx (line 1) | CREATE TABLE twofactor_duo_ctx (

FILE: migrations/mysql/2025-08-20-120000_sso_nonce_to_auth/down.sql
  type sso_nonce (line 3) | CREATE TABLE sso_nonce (

FILE: migrations/mysql/2025-08-20-120000_sso_nonce_to_auth/up.sql
  type sso_auth (line 3) | CREATE TABLE sso_auth (

FILE: migrations/postgresql/2019-09-12-100000_create_tables/up.sql
  type users (line 1) | CREATE TABLE users (
  type devices (line 23) | CREATE TABLE devices (
  type organizations (line 35) | CREATE TABLE organizations (
  type ciphers (line 41) | CREATE TABLE ciphers (
  type attachments (line 56) | CREATE TABLE attachments (
  type folders (line 64) | CREATE TABLE folders (
  type collections (line 72) | CREATE TABLE collections (
  type users_collections (line 78) | CREATE TABLE users_collections (
  type users_organizations (line 85) | CREATE TABLE users_organizations (
  type folders_ciphers (line 98) | CREATE TABLE folders_ciphers (
  type ciphers_collections (line 104) | CREATE TABLE ciphers_collections (
  type twofactor (line 110) | CREATE TABLE twofactor (
  type invitations (line 119) | CREATE TABLE invitations (

FILE: migrations/postgresql/2020-03-13-205045_add_policy_table/up.sql
  type org_policies (line 1) | CREATE TABLE org_policies (

FILE: migrations/postgresql/2020-08-02-025025_add_favorites_table/up.sql
  type favorites (line 1) | CREATE TABLE favorites (

FILE: migrations/postgresql/2021-03-11-190243_add_sends/up.sql
  type sends (line 1) | CREATE TABLE sends (

FILE: migrations/postgresql/2021-08-30-193501_create_emergency_access/up.sql
  type emergency_access (line 1) | CREATE TABLE emergency_access (

FILE: migrations/postgresql/2021-10-24-164321_add_2fa_incomplete/up.sql
  type twofactor_incomplete (line 1) | CREATE TABLE twofactor_incomplete (

FILE: migrations/postgresql/2022-07-27-110000_add_group_support/up.sql
  type groups (line 1) | CREATE TABLE groups (
  type groups_users (line 11) | CREATE TABLE groups_users (
  type collections_groups (line 17) | CREATE TABLE collections_groups (

FILE: migrations/postgresql/2022-10-18-170602_add_events/up.sql
  type event (line 1) | CREATE TABLE event (

FILE: migrations/postgresql/2023-06-02-200424_create_organization_api_key/up.sql
  type organization_api_key (line 1) | CREATE TABLE organization_api_key (

FILE: migrations/postgresql/2023-06-17-200424_create_auth_requests_table/up.sql
  type auth_requests (line 1) | CREATE TABLE auth_requests (

FILE: migrations/postgresql/2023-09-10-133000_add_sso/up.sql
  type sso_nonce (line 1) | CREATE TABLE sso_nonce (

FILE: migrations/postgresql/2024-02-14-170000_add_state_to_sso_nonce/down.sql
  type sso_nonce (line 3) | CREATE TABLE sso_nonce (

FILE: migrations/postgresql/2024-02-14-170000_add_state_to_sso_nonce/up.sql
  type sso_nonce (line 3) | CREATE TABLE sso_nonce (

FILE: migrations/postgresql/2024-02-26-170000_add_pkce_to_sso_nonce/down.sql
  type sso_nonce (line 3) | CREATE TABLE sso_nonce (

FILE: migrations/postgresql/2024-02-26-170000_add_pkce_to_sso_nonce/up.sql
  type sso_nonce (line 3) | CREATE TABLE sso_nonce (

FILE: migrations/postgresql/2024-03-06-170000_add_sso_users/up.sql
  type sso_users (line 1) | CREATE TABLE sso_users (

FILE: migrations/postgresql/2024-06-05-131359_add_2fa_duo_store/up.sql
  type twofactor_duo_ctx (line 1) | CREATE TABLE twofactor_duo_ctx (

FILE: migrations/postgresql/2025-08-20-120000_sso_nonce_to_auth/down.sql
  type sso_nonce (line 3) | CREATE TABLE sso_nonce (

FILE: migrations/postgresql/2025-08-20-120000_sso_nonce_to_auth/up.sql
  type sso_auth (line 3) | CREATE TABLE sso_auth (

FILE: migrations/sqlite/2018-01-14-171611_create_tables/up.sql
  type users (line 1) | CREATE TABLE users (
  type devices (line 21) | CREATE TABLE devices (
  type ciphers (line 32) | CREATE TABLE ciphers (
  type attachments (line 47) | CREATE TABLE attachments (
  type folders (line 55) | CREATE TABLE folders (

FILE: migrations/sqlite/2018-02-17-205753_create_collections_and_orgs/up.sql
  type collections (line 1) | CREATE TABLE collections (
  type organizations (line 7) | CREATE TABLE organizations (
  type users_collections (line 14) | CREATE TABLE users_collections (
  type users_organizations (line 20) | CREATE TABLE users_organizations (

FILE: migrations/sqlite/2018-04-27-155151_create_users_ciphers/up.sql
  type ciphers (line 3) | CREATE TABLE ciphers (
  type folders_ciphers (line 18) | CREATE TABLE folders_ciphers (

FILE: migrations/sqlite/2018-05-08-161616_create_collection_cipher_map/up.sql
  type ciphers_collections (line 1) | CREATE TABLE ciphers_collections (

FILE: migrations/sqlite/2018-05-25-232323_update_attachments_reference/up.sql
  type attachments (line 3) | CREATE TABLE attachments (

FILE: migrations/sqlite/2018-07-11-181453_create_u2f_twofactor/up.sql
  type twofactor (line 1) | CREATE TABLE twofactor (

FILE: migrations/sqlite/2018-09-10-111213_add_invites/up.sql
  type invitations (line 1) | CREATE TABLE invitations (

FILE: migrations/sqlite/2020-03-13-205045_add_policy_table/up.sql
  type org_policies (line 1) | CREATE TABLE org_policies (

FILE: migrations/sqlite/2020-08-02-025025_add_favorites_table/up.sql
  type favorites (line 1) | CREATE TABLE favorites (
  type new_ciphers (line 33) | CREATE TABLE new_ciphers(

FILE: migrations/sqlite/2021-03-11-190243_add_sends/up.sql
  type sends (line 1) | CREATE TABLE sends (

FILE: migrations/sqlite/2021-08-30-193501_create_emergency_access/up.sql
  type emergency_access (line 1) | CREATE TABLE emergency_access (

FILE: migrations/sqlite/2021-10-24-164321_add_2fa_incomplete/up.sql
  type twofactor_incomplete (line 1) | CREATE TABLE twofactor_incomplete (

FILE: migrations/sqlite/2022-03-02-210038_update_devices_primary_key/up.sql
  type devices_new (line 2) | CREATE TABLE devices_new (

FILE: migrations/sqlite/2022-07-27-110000_add_group_support/up.sql
  type groups (line 1) | CREATE TABLE groups (
  type groups_users (line 11) | CREATE TABLE groups_users (
  type collections_groups (line 17) | CREATE TABLE collections_groups (

FILE: migrations/sqlite/2022-10-18-170602_add_events/up.sql
  type event (line 1) | CREATE TABLE event (

FILE: migrations/sqlite/2023-06-02-200424_create_organization_api_key/up.sql
  type organization_api_key (line 1) | CREATE TABLE organization_api_key (

FILE: migrations/sqlite/2023-06-17-200424_create_auth_requests_table/up.sql
  type auth_requests (line 1) | CREATE TABLE auth_requests (

FILE: migrations/sqlite/2023-09-01-170620_update_auth_request_table/up.sql
  type auth_requests_new (line 2) | CREATE TABLE auth_requests_new (

FILE: migrations/sqlite/2023-09-10-133000_add_sso/up.sql
  type sso_nonce (line 1) | CREATE TABLE sso_nonce (

FILE: migrations/sqlite/2024-02-14-170000_add_state_to_sso_nonce/down.sql
  type sso_nonce (line 3) | CREATE TABLE sso_nonce (

FILE: migrations/sqlite/2024-02-14-170000_add_state_to_sso_nonce/up.sql
  type sso_nonce (line 3) | CREATE TABLE sso_nonce (

FILE: migrations/sqlite/2024-02-26-170000_add_pkce_to_sso_nonce/down.sql
  type sso_nonce (line 3) | CREATE TABLE sso_nonce (

FILE: migrations/sqlite/2024-02-26-170000_add_pkce_to_sso_nonce/up.sql
  type sso_nonce (line 3) | CREATE TABLE sso_nonce (

FILE: migrations/sqlite/2024-03-06-170000_add_sso_users/up.sql
  type sso_users (line 1) | CREATE TABLE sso_users (

FILE: migrations/sqlite/2024-03-13_170000_sso_userscascade/up.sql
  type sso_users (line 3) | CREATE TABLE sso_users (

FILE: migrations/sqlite/2024-06-05-131359_add_2fa_duo_store/up.sql
  type twofactor_duo_ctx (line 1) | CREATE TABLE twofactor_duo_ctx (

FILE: migrations/sqlite/2025-08-20-120000_sso_nonce_to_auth/down.sql
  type sso_nonce (line 3) | CREATE TABLE sso_nonce (

FILE: migrations/sqlite/2025-08-20-120000_sso_nonce_to_auth/up.sql
  type sso_auth (line 3) | CREATE TABLE sso_auth (

FILE: playwright/global-setup.ts
  function globalSetup (line 9) | async function globalSetup(config: FullConfig) {

FILE: playwright/global-utils.ts
  function loadEnv (line 12) | function loadEnv(){
  function waitFor (line 35) | async function waitFor(url: String, browser: Browser) {
  function startComposeService (line 56) | function startComposeService(serviceName: String){
  function stopComposeService (line 61) | function stopComposeService(serviceName: String){
  function wipeSqlite (line 66) | function wipeSqlite(){
  function wipeMariaDB (line 72) | async function wipeMariaDB(){
  function wipeMysqlDB (line 102) | async function wipeMysqlDB(){
  function wipePostgres (line 132) | async function wipePostgres(){
  function dbConfig (line 155) | function dbConfig(testInfo: TestInfo){
  function startVault (line 177) | async function startVault(browser: Browser, testInfo: TestInfo, env = {}...
  function stopVault (line 209) | async function stopVault(force: boolean = false) {
  function restartVault (line 218) | async function restartVault(page: Page, testInfo: TestInfo, env, resetDB...
  function checkNotification (line 223) | async function checkNotification(page: Page, hasText: string) {
  function cleanLanding (line 233) | async function cleanLanding(page: Page) {
  function logout (line 244) | async function logout(test: Test, page: Page, user: { name: string }) {
  function ignoreExtension (line 252) | async function ignoreExtension(page: Page) {

FILE: playwright/tests/setups/2fa.ts
  function activateTOTP (line 7) | async function activateTOTP(test: Test, page: Page, user: { name: string...
  function disableTOTP (line 29) | async function disableTOTP(test: Test, page: Page, user: { password: str...
  function activateEmail (line 45) | async function activateEmail(test: Test, page: Page, user: { name: strin...
  function retrieveEmailCode (line 66) | async function retrieveEmailCode(test: Test, page: Page, mailBuffer: Mai...
  function disableEmail (line 77) | async function disableEmail(test: Test, page: Page, user: { password: st...

FILE: playwright/tests/setups/db-test.ts
  type TestOptions (line 3) | type TestOptions = {

FILE: playwright/tests/setups/orgs.ts
  function create (line 5) | async function create(test, page: Page, name: string) {
  function policies (line 17) | async function policies(test, page: Page, name: string) {
  function members (line 29) | async function members(test, page: Page, name: string) {
  function invite (line 41) | async function invite(test, page: Page, name: string, email: string) {
  function confirm (line 57) | async function confirm(test, page: Page, name: string, user_email: strin...
  function revoke (line 68) | async function revoke(test, page: Page, name: string, user_email: string) {

FILE: playwright/tests/setups/sso.ts
  function logNewUser (line 11) | async function logNewUser(
  function logUser (line 64) | async function logUser(

FILE: playwright/tests/setups/user.ts
  function createAccount (line 7) | async function createAccount(test, page: Page, user: { email: string, na...
  function logUser (line 38) | async function logUser(test, page: Page, user: { email: string, password...

FILE: src/api/admin.rs
  function routes (line 40) | pub fn routes() -> Vec<Route> {
  function catchers (line 76) | pub fn catchers() -> Vec<Catcher> {
  function admin_disabled (line 101) | fn admin_disabled() -> &'static str {
  constant COOKIE_NAME (line 105) | const COOKIE_NAME: &str = "VW_ADMIN";
  constant ADMIN_PATH (line 106) | const ADMIN_PATH: &str = "/admin";
  constant DT_FMT (line 107) | const DT_FMT: &str = "%Y-%m-%d %H:%M:%S %Z";
  constant BASE_TEMPLATE (line 109) | const BASE_TEMPLATE: &str = "admin/base";
  constant ACTING_ADMIN_USER (line 111) | const ACTING_ADMIN_USER: &str = "vaultwarden-admin-00000-000000000000";
  constant FAKE_ADMIN_UUID (line 112) | pub const FAKE_ADMIN_UUID: &str = "00000000-0000-0000-0000-000000000000";
  function admin_path (line 114) | fn admin_path() -> String {
  type IpHeader (line 119) | struct IpHeader(Option<String>);
    type Error (line 123) | type Error = ();
    method from_request (line 125) | async fn from_request(req: &'r Request<'_>) -> Outcome<Self, Self::Err...
  function admin_url (line 140) | fn admin_url() -> String {
  type AdminResponse (line 145) | enum AdminResponse {
  function admin_login (line 155) | fn admin_login(request: &Request<'_>) -> ApiResult<Html<String>> {
  function render_admin_login (line 163) | fn render_admin_login(msg: Option<&str>, redirect: Option<&str>) -> ApiR...
  type LoginForm (line 179) | struct LoginForm {
  function post_admin_login (line 185) | fn post_admin_login(
  function _validate_token (line 229) | fn _validate_token(token: &str) -> bool {
  type AdminTemplateData (line 250) | struct AdminTemplateData {
    method new (line 259) | fn new(page_content: &str, page_data: Value) -> Self {
    method render (line 269) | fn render(self) -> Result<String, Error> {
  function render_admin_page (line 274) | fn render_admin_page() -> ApiResult<Html<String>> {
  function admin_page (line 284) | fn admin_page(_token: AdminToken) -> ApiResult<Html<String>> {
  function admin_page_login (line 289) | fn admin_page_login() -> ApiResult<Html<String>> {
  type InviteData (line 295) | struct InviteData {
  function get_user_or_404 (line 299) | async fn get_user_or_404(user_id: &UserId, conn: &DbConn) -> ApiResult<U...
  function invite_user (line 308) | async fn invite_user(data: Json<InviteData>, _token: AdminToken, conn: D...
  function test_smtp (line 334) | async fn test_smtp(data: Json<InviteData>, _token: AdminToken) -> EmptyR...
  function logout (line 345) | fn logout(cookies: &CookieJar<'_>) -> Redirect {
  function get_users_json (line 351) | async fn get_users_json(_token: AdminToken, conn: DbConn) -> Json<Value> {
  function users_overview (line 369) | async fn users_overview(_token: AdminToken, conn: DbConn) -> ApiResult<H...
  function get_user_by_mail_json (line 394) | async fn get_user_by_mail_json(mail: &str, _token: AdminToken, conn: DbC...
  function get_user_json (line 406) | async fn get_user_json(user_id: UserId, _token: AdminToken, conn: DbConn...
  function delete_user (line 415) | async fn delete_user(user_id: UserId, token: AdminToken, conn: DbConn) -...
  function delete_sso_user (line 439) | async fn delete_sso_user(user_id: UserId, token: AdminToken, conn: DbCon...
  function deauth_user (line 460) | async fn deauth_user(user_id: UserId, _token: AdminToken, conn: DbConn, ...
  function disable_user (line 481) | async fn disable_user(user_id: UserId, _token: AdminToken, conn: DbConn,...
  function enable_user (line 495) | async fn enable_user(user_id: UserId, _token: AdminToken, conn: DbConn) ...
  function remove_2fa (line 503) | async fn remove_2fa(user_id: UserId, token: AdminToken, conn: DbConn) ->...
  function resend_user_invite (line 512) | async fn resend_user_invite(user_id: UserId, _token: AdminToken, conn: D...
  type MembershipTypeData (line 532) | struct MembershipTypeData {
  function update_membership_type (line 539) | async fn update_membership_type(data: Json<MembershipTypeData>, token: A...
  function update_revision_users (line 578) | async fn update_revision_users(_token: AdminToken, conn: DbConn) -> Empt...
  function organizations_overview (line 583) | async fn organizations_overview(_token: AdminToken, conn: DbConn) -> Api...
  function delete_organization (line 603) | async fn delete_organization(org_id: OrganizationId, _token: AdminToken,...
  type GitRelease (line 609) | struct GitRelease {
  type GitCommit (line 614) | struct GitCommit {
  function get_json_api (line 618) | async fn get_json_api<T: DeserializeOwned>(url: &str) -> Result<T, Error> {
  function get_text_api (line 622) | async fn get_text_api(url: &str) -> Result<String, Error> {
  function has_http_access (line 626) | async fn has_http_access() -> bool {
  function get_release_info (line 642) | async fn get_release_info(has_http_access: bool) -> (String, String, Str...
  function get_ntp_time (line 673) | async fn get_ntp_time(has_http_access: bool) -> String {
  function web_vault_compare (line 692) | fn web_vault_compare(active: &str, latest: &str) -> i8 {
  function diagnostics (line 713) | async fn diagnostics(_token: AdminToken, ip_header: IpHeader, conn: DbCo...
  function get_diagnostics_config (line 772) | fn get_diagnostics_config(_token: AdminToken) -> Json<Value> {
  function get_diagnostics_http (line 778) | fn get_diagnostics_http(code: u16, _token: AdminToken) -> EmptyResult {
  function post_config (line 783) | async fn post_config(data: Json<ConfigBuilder>, _token: AdminToken) -> E...
  function delete_config (line 792) | async fn delete_config(_token: AdminToken) -> EmptyResult {
  function backup_db (line 800) | fn backup_db(_token: AdminToken) -> ApiResult<String> {
  type AdminToken (line 811) | pub struct AdminToken {
    type Error (line 817) | type Error = &'static str;
    method from_request (line 819) | async fn from_request(request: &'r Request<'_>) -> Outcome<Self, Self:...
  function validate_web_vault_compare (line 862) | fn validate_web_vault_compare() {

FILE: src/api/core/accounts.rs
  function routes (line 34) | pub fn routes() -> Vec<rocket::Route> {
  type KDFData (line 80) | pub struct KDFData {
  type RegisterData (line 93) | pub struct RegisterData {
  type SetPasswordData (line 122) | pub struct SetPasswordData {
  type KeysData (line 135) | struct KeysData {
  function clean_password_hint (line 141) | fn clean_password_hint(password_hint: &Option<String>) -> Option<String> {
  function enforce_password_hint_setting (line 151) | fn enforce_password_hint_setting(password_hint: &Option<String>) -> Empt...
  function is_email_2fa_required (line 157) | async fn is_email_2fa_required(member_id: Option<MembershipId>, conn: &D...
  function _register (line 170) | pub async fn _register(data: Json<RegisterData>, email_verification: boo...
  function post_set_password (line 347) | async fn post_set_password(data: Json<SetPasswordData>, headers: Headers...
  function profile (line 409) | async fn profile(headers: Headers, conn: DbConn) -> Json<Value> {
  type ProfileData (line 415) | struct ProfileData {
  function put_profile (line 421) | async fn put_profile(data: Json<ProfileData>, headers: Headers, conn: Db...
  function post_profile (line 426) | async fn post_profile(data: Json<ProfileData>, headers: Headers, conn: D...
  type AvatarData (line 444) | struct AvatarData {
  function put_avatar (line 449) | async fn put_avatar(data: Json<AvatarData>, headers: Headers, conn: DbCo...
  function get_public_keys (line 469) | async fn get_public_keys(user_id: UserId, _headers: Headers, conn: DbCon...
  function post_keys (line 484) | async fn post_keys(data: Json<KeysData>, headers: Headers, conn: DbConn)...
  type ChangePassData (line 503) | struct ChangePassData {
  function post_password (line 511) | async fn post_password(data: Json<ChangePassData>, headers: Headers, con...
  function set_kdf_data (line 547) | fn set_kdf_data(user: &mut User, data: &KDFData) -> EmptyResult {
  type AuthenticationData (line 585) | struct AuthenticationData {
  type UnlockData (line 594) | struct UnlockData {
  type ChangeKdfData (line 603) | struct ChangeKdfData {
  function post_kdf (line 612) | async fn post_kdf(data: Json<ChangeKdfData>, headers: Headers, conn: DbC...
  type UpdateFolderData (line 646) | struct UpdateFolderData {
  type UpdateEmergencyAccessData (line 656) | struct UpdateEmergencyAccessData {
  type UpdateResetPasswordData (line 663) | struct UpdateResetPasswordData {
  type KeyData (line 673) | struct KeyData {
  type RotateAccountUnlockData (line 682) | struct RotateAccountUnlockData {
  type MasterPasswordUnlockData (line 690) | struct MasterPasswordUnlockData {
  type RotateAccountKeys (line 702) | struct RotateAccountKeys {
  type RotateAccountData (line 709) | struct RotateAccountData {
  function validate_keydata (line 715) | fn validate_keydata(
  function post_rotatekey (line 794) | async fn post_rotatekey(data: Json<KeyData>, headers: Headers, conn: DbC...
  function post_sstamp (line 916) | async fn post_sstamp(data: Json<PasswordOrOtpData>, headers: Headers, co...
  type EmailTokenData (line 933) | struct EmailTokenData {
  function post_email_token (line 939) | async fn post_email_token(data: Json<EmailTokenData>, headers: Headers, ...
  type ChangeEmailData (line 991) | struct ChangeEmailData {
  function post_email (line 1001) | async fn post_email(data: Json<ChangeEmailData>, headers: Headers, conn:...
  function post_verify_email (line 1055) | async fn post_verify_email(headers: Headers) -> EmptyResult {
  type VerifyEmailTokenData (line 1071) | struct VerifyEmailTokenData {
  function post_verify_email_token (line 1077) | async fn post_verify_email_token(data: Json<VerifyEmailTokenData>, conn:...
  type DeleteRecoverData (line 1102) | struct DeleteRecoverData {
  function post_delete_recover (line 1107) | async fn post_delete_recover(data: Json<DeleteRecoverData>, conn: DbConn...
  type DeleteRecoverTokenData (line 1128) | struct DeleteRecoverTokenData {
  function post_delete_recover_token (line 1134) | async fn post_delete_recover_token(data: Json<DeleteRecoverTokenData>, c...
  function post_delete_account (line 1152) | async fn post_delete_account(data: Json<PasswordOrOtpData>, headers: Hea...
  function delete_account (line 1157) | async fn delete_account(data: Json<PasswordOrOtpData>, headers: Headers,...
  function revision_date (line 1167) | fn revision_date(headers: Headers) -> JsonResult {
  type PasswordHintData (line 1174) | struct PasswordHintData {
  function password_hint (line 1179) | async fn password_hint(data: Json<PasswordHintData>, conn: DbConn) -> Em...
  type PreloginData (line 1221) | pub struct PreloginData {
  function prelogin (line 1226) | async fn prelogin(data: Json<PreloginData>, conn: DbConn) -> Json<Value> {
  function _prelogin (line 1230) | pub async fn _prelogin(data: Json<PreloginData>, conn: DbConn) -> Json<V...
  type SecretVerificationRequest (line 1249) | struct SecretVerificationRequest {
  function kdf_upgrade (line 1254) | pub async fn kdf_upgrade(user: &mut User, pwd_hash: &str, conn: &DbConn)...
  function verify_password (line 1267) | async fn verify_password(data: Json<SecretVerificationRequest>, headers:...
  function _api_key (line 1280) | async fn _api_key(data: Json<PasswordOrOtpData>, rotate: bool, headers: ...
  function api_key (line 1301) | async fn api_key(data: Json<PasswordOrOtpData>, headers: Headers, conn: ...
  function rotate_api_key (line 1306) | async fn rotate_api_key(data: Json<PasswordOrOtpData>, headers: Headers,...
  function get_known_device (line 1311) | async fn get_known_device(device: KnownDevice, conn: DbConn) -> JsonResu...
  type KnownDevice (line 1320) | struct KnownDevice {
    type Error (line 1327) | type Error = &'static str;
    method from_request (line 1329) | async fn from_request(req: &'r Request<'_>) -> Outcome<Self, Self::Err...
  function get_all_devices (line 1363) | async fn get_all_devices(headers: Headers, conn: DbConn) -> JsonResult {
  function get_device (line 1375) | async fn get_device(device_id: DeviceId, headers: Headers, conn: DbConn)...
  type PushToken (line 1384) | struct PushToken {
  function post_device_token (line 1389) | async fn post_device_token(device_id: DeviceId, data: Json<PushToken>, h...
  function put_device_token (line 1394) | async fn put_device_token(device_id: DeviceId, data: Json<PushToken>, he...
  function put_clear_device_token (line 1421) | async fn put_clear_device_token(device_id: DeviceId, conn: DbConn) -> Em...
  function post_clear_device_token (line 1443) | async fn post_clear_device_token(device_id: DeviceId, conn: DbConn) -> E...
  function get_tasks (line 1448) | fn get_tasks(_client_headers: ClientHeaders) -> JsonResult {
  type AuthRequestRequest (line 1457) | struct AuthRequestRequest {
  function post_auth_request (line 1468) | async fn post_auth_request(
  function get_auth_request (line 1523) | async fn get_auth_request(auth_request_id: AuthRequestId, headers: Heade...
  type AuthResponseRequest (line 1548) | struct AuthResponseRequest {
  function put_auth_request (line 1556) | async fn put_auth_request(
  function get_auth_request_response (line 1629) | async fn get_auth_request_response(
  function get_auth_requests (line 1666) | async fn get_auth_requests(headers: Headers, conn: DbConn) -> JsonResult {
  function get_auth_requests_pending (line 1671) | async fn get_auth_requests_pending(headers: Headers, conn: DbConn) -> Js...
  function purge_auth_requests (line 1700) | pub async fn purge_auth_requests(pool: DbPool) {

FILE: src/api/core/ciphers.rs
  function routes (line 33) | pub fn routes() -> Vec<Route> {
  function purge_trashed_ciphers (line 101) | pub async fn purge_trashed_ciphers(pool: DbPool) {
  type SyncData (line 111) | struct SyncData {
  function sync (line 117) | async fn sync(data: SyncData, headers: Headers, client_version: Option<C...
  function get_ciphers (line 202) | async fn get_ciphers(headers: Headers, conn: DbConn) -> JsonResult {
  function get_cipher (line 221) | async fn get_cipher(cipher_id: CipherId, headers: Headers, conn: DbConn)...
  function get_cipher_admin (line 234) | async fn get_cipher_admin(cipher_id: CipherId, headers: Headers, conn: D...
  function get_cipher_details (line 240) | async fn get_cipher_details(cipher_id: CipherId, headers: Headers, conn:...
  type CipherData (line 246) | pub struct CipherData {
  type PartialCipherData (line 298) | pub struct PartialCipherData {
  type Attachments2Data (line 305) | pub struct Attachments2Data {
  function post_ciphers_admin (line 312) | async fn post_ciphers_admin(data: Json<ShareCipherData>, headers: Header...
  function post_ciphers_create (line 320) | async fn post_ciphers_create(
  function post_ciphers (line 354) | async fn post_ciphers(data: Json<CipherData>, headers: Headers, conn: Db...
  function enforce_personal_ownership_policy (line 376) | async fn enforce_personal_ownership_policy(data: Option<&CipherData>, he...
  function update_cipher_from_data (line 387) | pub async fn update_cipher_from_data(
  type ImportData (line 569) | struct ImportData {
  type RelationsData (line 577) | struct RelationsData {
  function post_ciphers_import (line 585) | async fn post_ciphers_import(data: Json<ImportData>, headers: Headers, c...
  function put_cipher_admin (line 637) | async fn put_cipher_admin(
  function post_cipher_admin (line 648) | async fn post_cipher_admin(
  function post_cipher (line 659) | async fn post_cipher(
  function put_cipher (line 670) | async fn put_cipher(
  function post_cipher_partial (line 698) | async fn post_cipher_partial(
  function put_cipher_partial (line 709) | async fn put_cipher_partial(
  type CollectionsAdminData (line 741) | struct CollectionsAdminData {
  function put_collections2_update (line 747) | async fn put_collections2_update(
  function post_collections2_update (line 758) | async fn post_collections2_update(
  function put_collections_update (line 774) | async fn put_collections_update(
  function post_collections_update (line 785) | async fn post_collections_update(
  function put_collections_admin (line 850) | async fn put_collections_admin(
  function post_collections_admin (line 861) | async fn post_collections_admin(
  type ShareCipherData (line 927) | struct ShareCipherData {
  function post_cipher_share (line 935) | async fn post_cipher_share(
  function put_cipher_share (line 948) | async fn put_cipher_share(
  type ShareSelectedCipherData (line 962) | struct ShareSelectedCipherData {
  function put_cipher_share_selected (line 968) | async fn put_cipher_share_selected(
  function share_cipher_by_uuid (line 1010) | async fn share_cipher_by_uuid(
  function get_attachment (line 1069) | async fn get_attachment(
  type AttachmentRequestData (line 1092) | struct AttachmentRequestData {
  type FileUploadType (line 1099) | enum FileUploadType {
  function post_attachment_v2 (line 1109) | async fn post_attachment_v2(
  type UploadData (line 1150) | struct UploadData<'f> {
  function save_attachment (line 1163) | async fn save_attachment(
  function post_attachment_v2_data (line 1331) | async fn post_attachment_v2_data(
  function post_attachment (line 1352) | async fn post_attachment(
  function post_attachment_admin (line 1369) | async fn post_attachment_admin(
  function post_attachment_share (line 1380) | async fn post_attachment_share(
  function delete_attachment_post_admin (line 1393) | async fn delete_attachment_post_admin(
  function delete_attachment_post (line 1404) | async fn delete_attachment_post(
  function delete_attachment (line 1415) | async fn delete_attachment(
  function delete_attachment_admin (line 1426) | async fn delete_attachment_admin(
  function delete_cipher_post (line 1437) | async fn delete_cipher_post(cipher_id: CipherId, headers: Headers, conn:...
  function delete_cipher_post_admin (line 1443) | async fn delete_cipher_post_admin(cipher_id: CipherId, headers: Headers,...
  function delete_cipher_put (line 1449) | async fn delete_cipher_put(cipher_id: CipherId, headers: Headers, conn: ...
  function delete_cipher_put_admin (line 1455) | async fn delete_cipher_put_admin(cipher_id: CipherId, headers: Headers, ...
  function delete_cipher (line 1461) | async fn delete_cipher(cipher_id: CipherId, headers: Headers, conn: DbCo...
  function delete_cipher_admin (line 1467) | async fn delete_cipher_admin(cipher_id: CipherId, headers: Headers, conn...
  function delete_cipher_selected (line 1473) | async fn delete_cipher_selected(
  function delete_cipher_selected_post (line 1484) | async fn delete_cipher_selected_post(
  function delete_cipher_selected_put (line 1495) | async fn delete_cipher_selected_put(
  function delete_cipher_selected_admin (line 1506) | async fn delete_cipher_selected_admin(
  function delete_cipher_selected_post_admin (line 1517) | async fn delete_cipher_selected_post_admin(
  function delete_cipher_selected_put_admin (line 1528) | async fn delete_cipher_selected_put_admin(
  function restore_cipher_put (line 1539) | async fn restore_cipher_put(cipher_id: CipherId, headers: Headers, conn:...
  function restore_cipher_put_admin (line 1544) | async fn restore_cipher_put_admin(cipher_id: CipherId, headers: Headers,...
  function restore_cipher_selected_admin (line 1549) | async fn restore_cipher_selected_admin(
  function restore_cipher_selected (line 1559) | async fn restore_cipher_selected(
  type MoveCipherData (line 1570) | struct MoveCipherData {
  function move_cipher_selected (line 1576) | async fn move_cipher_selected(
  function move_cipher_selected_put (line 1630) | async fn move_cipher_selected_put(
  type OrganizationIdData (line 1640) | struct OrganizationIdData {
  function delete_all (line 1646) | async fn delete_all(
  type CipherDeleteOptions (line 1707) | pub enum CipherDeleteOptions {
  function _delete_cipher_by_uuid (line 1714) | async fn _delete_cipher_by_uuid(
  type CipherIdsData (line 1776) | struct CipherIdsData {
  function _delete_multiple_ciphers (line 1780) | async fn _delete_multiple_ciphers(
  function _restore_cipher_by_uuid (line 1801) | async fn _restore_cipher_by_uuid(
  function _restore_multiple_ciphers (line 1847) | async fn _restore_multiple_ciphers(
  function _delete_cipher_attachment_by_id (line 1873) | async fn _delete_cipher_attachment_by_id(
  type CipherSyncData (line 1928) | pub struct CipherSyncData {
    method new (line 1946) | pub async fn new(user_id: &UserId, sync_type: CipherSyncType, conn: &D...
  type CipherSyncType (line 1940) | pub enum CipherSyncType {

FILE: src/api/core/emergency_access.rs
  function routes (line 23) | pub fn routes() -> Vec<Route> {
  function get_contacts (line 49) | async fn get_contacts(headers: Headers, conn: DbConn) -> Json<Value> {
  function get_grantees (line 70) | async fn get_grantees(headers: Headers, conn: DbConn) -> Json<Value> {
  function get_emergency_access (line 89) | async fn get_emergency_access(emer_id: EmergencyAccessId, headers: Heade...
  type EmergencyAccessUpdateData (line 106) | struct EmergencyAccessUpdateData {
  function put_emergency_access (line 113) | async fn put_emergency_access(
  function post_emergency_access (line 123) | async fn post_emergency_access(
  function delete_emergency_access (line 159) | async fn delete_emergency_access(emer_id: EmergencyAccessId, headers: He...
  function post_delete_emergency_access (line 182) | async fn post_delete_emergency_access(emer_id: EmergencyAccessId, header...
  type EmergencyAccessInviteData (line 192) | struct EmergencyAccessInviteData {
  function send_invite (line 199) | async fn send_invite(data: Json<EmergencyAccessInviteData>, headers: Hea...
  function resend_invite (line 277) | async fn resend_invite(emer_id: EmergencyAccessId, headers: Headers, con...
  type AcceptData (line 322) | struct AcceptData {
  function accept_invite (line 327) | async fn accept_invite(
  type ConfirmData (line 384) | struct ConfirmData {
  function confirm_emergency_access (line 389) | async fn confirm_emergency_access(
  function initiate_emergency_access (line 442) | async fn initiate_emergency_access(emer_id: EmergencyAccessId, headers: ...
  function approve_emergency_access (line 480) | async fn approve_emergency_access(emer_id: EmergencyAccessId, headers: H...
  function reject_emergency_access (line 515) | async fn reject_emergency_access(emer_id: EmergencyAccessId, headers: He...
  function view_emergency_access (line 552) | async fn view_emergency_access(emer_id: EmergencyAccessId, headers: Head...
  function takeover_emergency_access (line 590) | async fn takeover_emergency_access(emer_id: EmergencyAccessId, headers: ...
  type EmergencyAccessPasswordData (line 622) | struct EmergencyAccessPasswordData {
  function password_emergency_access (line 628) | async fn password_emergency_access(
  function policies_emergency_access (line 674) | async fn policies_emergency_access(emer_id: EmergencyAccessId, headers: ...
  function is_valid_request (line 700) | fn is_valid_request(
  function check_emergency_access_enabled (line 711) | fn check_emergency_access_enabled() -> EmptyResult {
  function emergency_request_timeout_job (line 718) | pub async fn emergency_request_timeout_job(pool: DbPool) {
  function emergency_notification_reminder_job (line 773) | pub async fn emergency_notification_reminder_job(pool: DbPool) {

FILE: src/api/core/events.rs
  function routes (line 20) | pub fn routes() -> Vec<Route> {
  type EventRange (line 25) | struct EventRange {
  function get_org_events (line 34) | async fn get_org_events(org_id: OrganizationId, data: EventRange, header...
  function get_cipher_events (line 66) | async fn get_cipher_events(cipher_id: CipherId, data: EventRange, header...
  function get_user_events (line 98) | async fn get_user_events(
  function get_continuation_token (line 134) | fn get_continuation_token(events_json: &[Value]) -> Option<&str> {
  function main_routes (line 150) | pub fn main_routes() -> Vec<Route> {
  type EventCollection (line 156) | struct EventCollection {
  function post_events_collect (line 170) | async fn post_events_collect(data: Json<Vec<EventCollection>>, headers: ...
  function log_user_event (line 228) | pub async fn log_user_event(event_type: i32, user_id: &UserId, device_ty...
  function _log_user_event (line 235) | async fn _log_user_event(
  function log_event (line 269) | pub async fn log_event(
  function _log_event (line 285) | async fn _log_event(
  function event_cleanup_job (line 331) | pub async fn event_cleanup_job(pool: DbPool) {

FILE: src/api/core/folders.rs
  function routes (line 13) | pub fn routes() -> Vec<rocket::Route> {
  function get_folders (line 18) | async fn get_folders(headers: Headers, conn: DbConn) -> Json<Value> {
  function get_folder (line 30) | async fn get_folder(folder_id: FolderId, headers: Headers, conn: DbConn)...
  type FolderData (line 39) | pub struct FolderData {
  function post_folders (line 45) | async fn post_folders(data: Json<FolderData>, headers: Headers, conn: Db...
  function post_folder (line 57) | async fn post_folder(
  function put_folder (line 68) | async fn put_folder(
  function delete_folder_post (line 90) | async fn delete_folder_post(folder_id: FolderId, headers: Headers, conn:...
  function delete_folder (line 95) | async fn delete_folder(folder_id: FolderId, headers: Headers, conn: DbCo...

FILE: src/api/core/mod.rs
  function routes (line 18) | pub fn routes() -> Vec<Route> {
  function events_routes (line 40) | pub fn events_routes() -> Vec<Route> {
  type GlobalDomain (line 67) | struct GlobalDomain {
  constant GLOBAL_DOMAINS (line 73) | const GLOBAL_DOMAINS: &str = include_str!("../../static/global_domains.j...
  function get_eq_domains (line 76) | fn get_eq_domains(headers: Headers) -> Json<Value> {
  function _get_eq_domains (line 80) | fn _get_eq_domains(headers: &Headers, no_excluded: bool) -> Json<Value> {
  type EquivDomainData (line 106) | struct EquivDomainData {
  function post_eq_domains (line 112) | async fn post_eq_domains(data: Json<EquivDomainData>, headers: Headers, ...
  function put_eq_domains (line 132) | async fn put_eq_domains(data: Json<EquivDomainData>, headers: Headers, c...
  function hibp_breach (line 137) | async fn hibp_breach(username: &str, _headers: Headers) -> JsonResult {
  function alive (line 172) | fn alive(_conn: DbConn) -> Json<String> {
  function now (line 177) | pub fn now() -> Json<String> {
  function version (line 182) | fn version() -> Json<&'static str> {
  function get_api_webauthn (line 187) | fn get_api_webauthn(_headers: Headers) -> Json<Value> {
  function config (line 199) | fn config() -> Json<Value> {
  function catchers (line 248) | pub fn catchers() -> Vec<Catcher> {
  function api_not_found (line 253) | fn api_not_found() -> Json<Value> {
  function accept_org_invite (line 263) | async fn accept_org_invite(

FILE: src/api/core/organizations.rs
  function routes (line 27) | pub fn routes() -> Vec<Route> {
  type OrgData (line 107) | struct OrgData {
  type OrganizationUpdateData (line 119) | struct OrganizationUpdateData {
  type FullCollectionData (line 126) | struct FullCollectionData {
  type CollectionGroupData (line 136) | struct CollectionGroupData {
  type CollectionMembershipData (line 145) | struct CollectionMembershipData {
  type OrgKeyData (line 154) | struct OrgKeyData {
  type BulkGroupIds (line 161) | struct BulkGroupIds {
  type BulkMembershipIds (line 167) | struct BulkMembershipIds {
  function create_organization (line 172) | async fn create_organization(headers: Headers, data: Json<OrgData>, conn...
  function delete_organization (line 206) | async fn delete_organization(
  function post_delete_organization (line 226) | async fn post_delete_organization(
  function leave_organization (line 236) | async fn leave_organization(org_id: OrganizationId, headers: Headers, co...
  function get_organization (line 263) | async fn get_organization(org_id: OrganizationId, headers: OwnerHeaders,...
  function put_organization (line 274) | async fn put_organization(
  function post_organization (line 284) | async fn post_organization(
  function get_user_collections (line 321) | async fn get_user_collections(headers: Headers, conn: DbConn) -> Json<Va...
  function get_auto_enroll_status (line 337) | async fn get_auto_enroll_status(identifier: &str, headers: Headers, conn...
  function get_org_collections (line 364) | async fn get_org_collections(org_id: OrganizationId, headers: ManagerHea...
  function get_org_collections_details (line 381) | async fn get_org_collections_details(org_id: OrganizationId, headers: Ma...
  function _get_org_collections (line 468) | async fn _get_org_collections(org_id: &OrganizationId, conn: &DbConn) ->...
  function post_organization_collections (line 473) | async fn post_organization_collections(
  type BulkCollectionAccessData (line 537) | struct BulkCollectionAccessData {
  function post_bulk_access_collections (line 544) | async fn post_bulk_access_collections(
  function put_organization_collection_update (line 608) | async fn put_organization_collection_update(
  function post_organization_collection_update (line 619) | async fn post_organization_collection_update(
  function _delete_organization_collection (line 684) | async fn _delete_organization_collection(
  function delete_organization_collection (line 710) | async fn delete_organization_collection(
  function post_organization_collection_delete (line 720) | async fn post_organization_collection_delete(
  type BulkCollectionIds (line 731) | struct BulkCollectionIds {
  function bulk_delete_organization_collections (line 736) | async fn bulk_delete_organization_collections(
  function get_org_collection_detail (line 758) | async fn get_org_collection_detail(
  function get_collection_users (line 825) | async fn get_collection_users(
  type OrgIdData (line 853) | struct OrgIdData {
  function get_org_details (line 859) | async fn get_org_details(data: OrgIdData, headers: ManagerHeadersLoose, ...
  function _get_org_details (line 875) | async fn _get_org_details(
  type OrgDomainDetails (line 893) | struct OrgDomainDetails {
  function get_org_domain_sso_verified (line 901) | async fn get_org_domain_sso_verified(data: Json<OrgDomainDetails>, conn:...
  type GetOrgUserData (line 925) | struct GetOrgUserData {
  function get_members (line 933) | async fn get_members(
  function post_org_keys (line 962) | async fn post_org_keys(
  type InviteData (line 997) | struct InviteData {
  function send_invite (line 1007) | async fn send_invite(
  function bulk_reinvite_members (line 1148) | async fn bulk_reinvite_members(
  function reinvite_member (line 1183) | async fn reinvite_member(
  function _reinvite_member (line 1195) | async fn _reinvite_member(
  type AcceptData (line 1239) | struct AcceptData {
  function accept_invite (line 1245) | async fn accept_invite(
  type ConfirmData (line 1301) | struct ConfirmData {
  type BulkConfirmData (line 1308) | struct BulkConfirmData {
  function bulk_confirm_invite (line 1313) | async fn bulk_confirm_invite(
  function confirm_invite (line 1356) | async fn confirm_invite(
  function _confirm_invite (line 1369) | async fn _confirm_invite(
  function get_org_user_mini_details (line 1435) | async fn get_org_user_mini_details(org_id: OrganizationId, headers: Mana...
  function get_user (line 1452) | async fn get_user(
  type EditUserData (line 1474) | struct EditUserData {
  function put_member (line 1483) | async fn put_member(
  function edit_member (line 1494) | async fn edit_member(
  function bulk_delete_member (line 1603) | async fn bulk_delete_member(
  function delete_member (line 1639) | async fn delete_member(
  function _delete_member (line 1649) | async fn _delete_member(
  function bulk_public_keys (line 1694) | async fn bulk_public_keys(
  type ImportData (line 1738) | struct ImportData {
  type RelationsData (line 1746) | struct RelationsData {
  function post_org_import (line 1755) | async fn post_org_import(
  type BulkCollectionsData (line 1843) | struct BulkCollectionsData {
  function post_bulk_collections (line 1853) | async fn post_bulk_collections(data: Json<BulkCollectionsData>, headers:...
  function list_policies (line 1903) | async fn list_policies(org_id: OrganizationId, headers: AdminHeaders, co...
  function list_policies_token (line 1918) | async fn list_policies_token(org_id: OrganizationId, token: &str, conn: ...
  function get_master_password_policy (line 1944) | async fn get_master_password_policy(org_id: OrganizationId, _headers: He...
  function get_policy (line 1959) | async fn get_policy(org_id: OrganizationId, pol_type: i32, headers: Admi...
  type PolicyData (line 1977) | struct PolicyData {
  function put_policy (line 1983) | async fn put_policy(
  type PolicyDataVnext (line 2102) | struct PolicyDataVnext {
  function put_policy_vnext (line 2111) | async fn put_policy_vnext(
  function get_plans (line 2124) | fn get_plans() -> Json<Value> {
  function get_billing_metadata (line 2152) | fn get_billing_metadata(_org_id: OrganizationId, _headers: Headers) -> J...
  function get_billing_warnings (line 2158) | fn get_billing_warnings(_org_id: OrganizationId, _headers: Headers) -> J...
  function _empty_data_json (line 2167) | fn _empty_data_json() -> Value {
  type BulkRevokeMembershipIds (line 2177) | struct BulkRevokeMembershipIds {
  function revoke_member (line 2182) | async fn revoke_member(
  function bulk_revoke_members (line 2192) | async fn bulk_revoke_members(
  function _revoke_member (line 2231) | async fn _revoke_member(
  function restore_member (line 2275) | async fn restore_member(
  function bulk_restore_members (line 2285) | async fn bulk_restore_members(
  function _restore_member (line 2319) | async fn _restore_member(
  function get_groups_data (line 2360) | async fn get_groups_data(
  function get_groups (line 2397) | async fn get_groups(org_id: OrganizationId, headers: ManagerHeadersLoose...
  function get_groups_details (line 2402) | async fn get_groups_details(org_id: OrganizationId, headers: ManagerHead...
  type GroupRequest (line 2408) | struct GroupRequest {
    method to_group (line 2418) | pub fn to_group(&self, org_uuid: &OrganizationId) -> Group {
    method update_group (line 2422) | pub fn update_group(&self, mut group: Group) -> Group {
  type CollectionData (line 2434) | struct CollectionData {
    method to_collection_group (line 2442) | pub fn to_collection_group(&self, groups_uuid: GroupId) -> CollectionG...
  function post_group (line 2448) | async fn post_group(
  function post_groups (line 2459) | async fn post_groups(
  function put_group (line 2490) | async fn put_group(
  function add_update_group (line 2528) | async fn add_update_group(
  function get_group_details (line 2570) | async fn get_group_details(
  function post_delete_group (line 2591) | async fn post_delete_group(
  function delete_group (line 2601) | async fn delete_group(org_id: OrganizationId, group_id: GroupId, headers...
  function _delete_group (line 2605) | async fn _delete_group(
  function bulk_delete_groups (line 2637) | async fn bulk_delete_groups(
  function get_group (line 2659) | async fn get_group(org_id: OrganizationId, group_id: GroupId, headers: A...
  function get_group_members (line 2675) | async fn get_group_members(
  function put_group_members (line 2702) | async fn put_group_members(
  function post_delete_group_member (line 2743) | async fn post_delete_group_member(
  type OrganizationUserResetPasswordEnrollmentRequest (line 2781) | struct OrganizationUserResetPasswordEnrollmentRequest {
  type OrganizationUserResetPasswordRequest (line 2789) | struct OrganizationUserResetPasswordRequest {
  function get_organization_public_key (line 2798) | async fn get_organization_public_key(org_id: OrganizationId, headers: Or...
  function get_organization_keys (line 2815) | async fn get_organization_keys(org_id: OrganizationId, headers: OrgMembe...
  function put_reset_password (line 2820) | async fn put_reset_password(
  function get_reset_password_details (line 2881) | async fn get_reset_password_details(
  function check_reset_password_applicable_and_permissions (line 2917) | async fn check_reset_password_applicable_and_permissions(
  function check_reset_password_applicable (line 2937) | async fn check_reset_password_applicable(org_id: &OrganizationId, conn: ...
  function put_reset_password_enrollment (line 2954) | async fn put_reset_password_enrollment(
  function get_org_export (line 3009) | async fn get_org_export(org_id: OrganizationId, headers: AdminHeaders, c...
  function _api_key (line 3020) | async fn _api_key(
  function api_key (line 3061) | async fn api_key(
  function rotate_api_key (line 3071) | async fn rotate_api_key(

FILE: src/api/core/public.rs
  function routes (line 23) | pub fn routes() -> Vec<Route> {
  type OrgImportGroupData (line 29) | struct OrgImportGroupData {
  type OrgImportUserData (line 37) | struct OrgImportUserData {
  type OrgImportData (line 45) | struct OrgImportData {
  function ldap_import (line 53) | async fn ldap_import(data: Json<OrgImportData>, token: PublicToken, conn...
  type PublicToken (line 196) | pub struct PublicToken(OrganizationId);
    type Error (line 200) | type Error = &'static str;
    method from_request (line 202) | async fn from_request(request: &'r Request<'_>) -> Outcome<Self, Self:...

FILE: src/api/core/sends.rs
  constant SEND_INACCESSIBLE_MSG (line 24) | const SEND_INACCESSIBLE_MSG: &str = "Send does not exist or is no longer...
  constant SIZE_525_MB (line 42) | const SIZE_525_MB: i64 = 550_502_400;
  function routes (line 44) | pub fn routes() -> Vec<rocket::Route> {
  function purge_sends (line 61) | pub async fn purge_sends(pool: DbPool) {
  type SendData (line 72) | pub struct SendData {
  function enforce_disable_send_policy (line 101) | async fn enforce_disable_send_policy(headers: &Headers, conn: &DbConn) -...
  function enforce_disable_hide_email_policy (line 117) | async fn enforce_disable_hide_email_policy(data: &SendData, headers: &He...
  function create_send (line 129) | fn create_send(data: SendData, user_id: UserId) -> ApiResult<Send> {
  function get_sends (line 169) | async fn get_sends(headers: Headers, conn: DbConn) -> Json<Value> {
  function get_send (line 181) | async fn get_send(send_id: SendId, headers: Headers, conn: DbConn) -> Js...
  function post_send (line 189) | async fn post_send(data: Json<SendData>, headers: Headers, conn: DbConn,...
  type UploadData (line 214) | struct UploadData<'f> {
  type UploadDataV2 (line 220) | struct UploadDataV2<'f> {
  function post_send_file (line 230) | async fn post_send_file(data: Form<UploadData<'_>>, headers: Headers, co...
  function post_send_file_v2 (line 302) | async fn post_send_file_v2(data: Json<SendData>, headers: Headers, conn:...
  type SendFileData (line 365) | pub struct SendFileData {
  function post_send_file_v2_data (line 373) | async fn post_send_file_v2_data(
  type SendAccessData (line 444) | pub struct SendAccessData {
  function post_access (line 449) | async fn post_access(
  function post_access_file (line 508) | async fn post_access_file(
  function download_url (line 568) | async fn download_url(host: &Host, send_id: &SendId, file_id: &SendFileI...
  function download_send (line 582) | async fn download_send(send_id: SendId, file_id: SendFileId, t: &str) ->...
  function put_send (line 592) | async fn put_send(send_id: SendId, data: Json<SendData>, headers: Header...
  function update_send_from_data (line 607) | pub async fn update_send_from_data(
  function delete_send (line 666) | async fn delete_send(send_id: SendId, headers: Headers, conn: DbConn, nt...
  function put_remove_password (line 685) | async fn put_remove_password(send_id: SendId, headers: Headers, conn: Db...

FILE: src/api/core/two_factor/authenticator.rs
  function routes (line 18) | pub fn routes() -> Vec<Route> {
  function generate_authenticator (line 23) | async fn generate_authenticator(data: Json<PasswordOrOtpData>, headers: ...
  type EnableAuthenticatorData (line 50) | struct EnableAuthenticatorData {
  function activate_authenticator (line 58) | async fn activate_authenticator(data: Json<EnableAuthenticatorData>, hea...
  function activate_authenticator_put (line 97) | async fn activate_authenticator_put(data: Json<EnableAuthenticatorData>,...
  function validate_totp_code_str (line 101) | pub async fn validate_totp_code_str(
  function validate_totp_code (line 115) | pub async fn validate_totp_code(
  type DisableAuthenticatorData (line 185) | struct DisableAuthenticatorData {
  function disable_authenticator (line 192) | async fn disable_authenticator(data: Json<DisableAuthenticatorData>, hea...

FILE: src/api/core/two_factor/duo.rs
  function routes (line 22) | pub fn routes() -> Vec<Route> {
  type DuoData (line 27) | struct DuoData {
    method global (line 34) | fn global() -> Option<Self> {
    method msg (line 44) | fn msg(s: &str) -> Self {
    method secret (line 51) | fn secret() -> Self {
    method obscure (line 54) | fn obscure(self) -> Self {
    method from (line 142) | fn from(d: EnableDuoData) -> Self {
  type DuoStatus (line 74) | enum DuoStatus {
    method data (line 83) | fn data(self) -> Option<DuoData> {
  constant DISABLED_MESSAGE_DEFAULT (line 92) | const DISABLED_MESSAGE_DEFAULT: &str = "<To use the global Duo keys, ple...
  function get_duo (line 95) | async fn get_duo(data: Json<PasswordOrOtpData>, headers: Headers, conn: ...
  type EnableDuoData (line 133) | struct EnableDuoData {
  function check_duo_fields_custom (line 151) | fn check_duo_fields_custom(data: &EnableDuoData) -> bool {
  function activate_duo (line 161) | async fn activate_duo(data: Json<EnableDuoData>, headers: Headers, conn:...
  function activate_duo_put (line 199) | async fn activate_duo_put(data: Json<EnableDuoData>, headers: Headers, c...
  function duo_api_request (line 203) | async fn duo_api_request(method: &str, path: &str, params: &str, data: &...
  constant DUO_EXPIRE (line 227) | const DUO_EXPIRE: i64 = 300;
  constant APP_EXPIRE (line 228) | const APP_EXPIRE: i64 = 3600;
  constant AUTH_PREFIX (line 230) | const AUTH_PREFIX: &str = "AUTH";
  constant DUO_PREFIX (line 231) | const DUO_PREFIX: &str = "TX";
  constant APP_PREFIX (line 232) | const APP_PREFIX: &str = "APP";
  function get_user_duo_data (line 234) | async fn get_user_duo_data(user_id: &UserId, conn: &DbConn) -> DuoStatus {
  function get_duo_keys_email (line 257) | pub(crate) async fn get_duo_keys_email(email: &str, conn: &DbConn) -> Ap...
  function generate_duo_signature (line 267) | pub async fn generate_duo_signature(email: &str, conn: &DbConn) -> ApiRe...
  function sign_duo_values (line 278) | fn sign_duo_values(key: &str, email: &str, ikey: &str, prefix: &str, exp...
  function validate_duo_login (line 285) | pub async fn validate_duo_login(email: &str, response: &str, conn: &DbCo...
  function parse_duo_values (line 318) | fn parse_duo_values(key: &str, val: &str, ikey: &str, prefix: &str, time...

FILE: src/api/core/two_factor/duo_oidc.rs
  constant DUO_REDIRECT_LOCATION (line 25) | const DUO_REDIRECT_LOCATION: &str = "duo-redirect-connector.html";
  constant JWT_VALIDITY_SECS (line 28) | const JWT_VALIDITY_SECS: i64 = 300;
  constant CTX_VALIDITY_SECS (line 31) | const CTX_VALIDITY_SECS: i64 = 300;
  constant DUO_RESP_SIGNATURE_ALG (line 34) | const DUO_RESP_SIGNATURE_ALG: Algorithm = Algorithm::HS512;
  constant JWT_SIGNATURE_ALG (line 37) | const JWT_SIGNATURE_ALG: Algorithm = Algorithm::HS512;
  constant STATE_LENGTH (line 42) | const STATE_LENGTH: usize = 64;
  type ClientAssertion (line 46) | struct ClientAssertion {
  type AuthorizationRequest (line 57) | struct AuthorizationRequest {
  type HealthCheckResponse (line 73) | enum HealthCheckResponse {
  type IdTokenResponse (line 85) | struct IdTokenResponse {
  type IdTokenClaims (line 94) | struct IdTokenClaims {
  type DuoClient (line 101) | struct DuoClient {
    method new (line 110) | fn new(client_id: String, client_secret: String, api_host: String, red...
    method new_client_assertion (line 120) | fn new_client_assertion(&self, url: &str) -> ClientAssertion {
    method encode_duo_jwt (line 135) | fn encode_duo_jwt<T: Serialize>(&self, jwt_payload: T) -> Result<Strin...
    method health_check (line 149) | async fn health_check(&self) -> Result<(), Error> {
    method make_authz_req_url (line 198) | fn make_authz_req_url(&self, duo_username: &str, state: String, nonce:...
    method exchange_authz_code_for_result (line 236) | async fn exchange_authz_code_for_result(
  type DuoAuthContext (line 311) | struct DuoAuthContext {
  function extract_context (line 320) | async fn extract_context(state: &str, conn: &DbConn) -> Option<DuoAuthCo...
  function purge_duo_contexts (line 345) | pub async fn purge_duo_contexts(pool: DbPool) {
  function make_callback_url (line 355) | fn make_callback_url(client_name: &str) -> Result<String, Error> {
  function get_duo_auth_url (line 379) | pub async fn get_duo_auth_url(
  function validate_duo_login (line 416) | pub async fn validate_duo_login(

FILE: src/api/core/two_factor/email.rs
  function routes (line 20) | pub fn routes() -> Vec<Route> {
  type SendEmailLoginData (line 26) | struct SendEmailLoginData {
  function send_email_login (line 40) | async fn send_email_login(data: Json<SendEmailLoginData>, client_headers...
  function send_token (line 106) | pub async fn send_token(user_id: &UserId, conn: &DbConn) -> EmptyResult {
  function get_email (line 124) | async fn get_email(data: Json<PasswordOrOtpData>, headers: Headers, conn...
  type SendEmailData (line 148) | struct SendEmailData {
  function send_email (line 157) | async fn send_email(data: Json<SendEmailData>, headers: Headers, conn: D...
  type EmailData (line 192) | struct EmailData {
  function email (line 201) | async fn email(data: Json<EmailData>, headers: Headers, conn: DbConn) ->...
  function validate_email_code_str (line 244) | pub async fn validate_email_code_str(
  type EmailTokenData (line 300) | pub struct EmailTokenData {
    method new (line 313) | pub fn new(email: String, token: String) -> EmailTokenData {
    method set_token (line 322) | pub fn set_token(&mut self, token: String) {
    method reset_token (line 327) | pub fn reset_token(&mut self) {
    method add_attempt (line 332) | pub fn add_attempt(&mut self) {
    method to_json (line 336) | pub fn to_json(&self) -> String {
    method from_json (line 340) | pub fn from_json(string: &str) -> Result<EmailTokenData, Error> {
  function activate_email_2fa (line 349) | pub async fn activate_email_2fa(user: &User, conn: &DbConn) -> EmptyResu...
  function obscure_email (line 359) | pub fn obscure_email(email: &str) -> String {
  function find_and_activate_email_2fa (line 379) | pub async fn find_and_activate_email_2fa(user_id: &UserId, conn: &DbConn...
  function test_obscure_email_long (line 392) | fn test_obscure_email_long() {
  function test_obscure_email_short (line 402) | fn test_obscure_email_short() {

FILE: src/api/core/two_factor/mod.rs
  function routes (line 34) | pub fn routes() -> Vec<Route> {
  function get_twofactor (line 54) | async fn get_twofactor(headers: Headers, conn: DbConn) -> Json<Value> {
  function get_recover (line 66) | async fn get_recover(data: Json<PasswordOrOtpData>, headers: Headers, co...
  function _generate_recover_code (line 78) | async fn _generate_recover_code(user: &mut User, conn: &DbConn) {
  type DisableTwoFactorData (line 88) | struct DisableTwoFactorData {
  function disable_twofactor (line 95) | async fn disable_twofactor(data: Json<DisableTwoFactorData>, headers: He...
  function disable_twofactor_put (line 127) | async fn disable_twofactor_put(data: Json<DisableTwoFactorData>, headers...
  function enforce_2fa_policy (line 131) | pub async fn enforce_2fa_policy(
  function enforce_2fa_policy_for_org (line 167) | pub async fn enforce_2fa_policy_for_org(
  function send_incomplete_2fa_notifications (line 202) | pub async fn send_incomplete_2fa_notifications(pool: DbPool) {
  function get_device_verification_settings (line 258) | fn get_device_verification_settings(_headers: Headers, _conn: DbConn) ->...

FILE: src/api/core/two_factor/protected_actions.rs
  function routes (line 16) | pub fn routes() -> Vec<Route> {
  type ProtectedActionData (line 22) | pub struct ProtectedActionData {
    method new (line 33) | pub fn new(token: String) -> Self {
    method to_json (line 41) | pub fn to_json(&self) -> String {
    method from_json (line 45) | pub fn from_json(string: &str) -> Result<Self, Error> {
    method add_attempt (line 53) | pub fn add_attempt(&mut self) {
    method time_since_sent (line 57) | pub fn time_since_sent(&self) -> TimeDelta {
  function request_otp (line 63) | async fn request_otp(headers: Headers, conn: DbConn) -> EmptyResult {
  type ProtectedActionVerify (line 97) | struct ProtectedActionVerify {
  function verify_otp (line 103) | async fn verify_otp(data: Json<ProtectedActionVerify>, headers: Headers,...
  function validate_protected_action_otp (line 116) | pub async fn validate_protected_action_otp(

FILE: src/api/core/two_factor/webauthn.rs
  function routes (line 46) | pub fn routes() -> Vec<Route> {
  type Registration (line 54) | pub struct Registration {
  type U2FRegistration (line 62) | pub struct U2FRegistration {
  type WebauthnRegistration (line 73) | pub struct WebauthnRegistration {
    method to_json (line 82) | fn to_json(&self) -> Value {
    method set_backup_eligible (line 90) | fn set_backup_eligible(&mut self, backup_eligible: bool, backup_state:...
  function get_webauthn (line 110) | async fn get_webauthn(data: Json<PasswordOrOtpData>, headers: Headers, c...
  function generate_webauthn_challenge (line 131) | async fn generate_webauthn_challenge(data: Json<PasswordOrOtpData>, head...
  type EnableWebauthnData (line 173) | struct EnableWebauthnData {
  type RegisterPublicKeyCredentialCopy (line 183) | struct RegisterPublicKeyCredentialCopy {
  type AuthenticatorAttestationResponseRawCopy (line 193) | pub struct AuthenticatorAttestationResponseRawCopy {
  method from (line 201) | fn from(r: RegisterPublicKeyCredentialCopy) -> Self {
  type PublicKeyCredentialCopy (line 218) | pub struct PublicKeyCredentialCopy {
  type AuthenticatorAssertionResponseRawCopy (line 229) | pub struct AuthenticatorAssertionResponseRawCopy {
  method from (line 238) | fn from(r: PublicKeyCredentialCopy) -> Self {
  function activate_webauthn (line 255) | async fn activate_webauthn(data: Json<EnableWebauthnData>, headers: Head...
  function activate_webauthn_put (line 307) | async fn activate_webauthn_put(data: Json<EnableWebauthnData>, headers: ...
  type DeleteU2FData (line 313) | struct DeleteU2FData {
  function delete_webauthn (line 319) | async fn delete_webauthn(data: Json<DeleteU2FData>, headers: Headers, co...
  function get_webauthn_registrations (line 366) | pub async fn get_webauthn_registrations(
  function generate_webauthn_login (line 377) | pub async fn generate_webauthn_login(user_id: &UserId, conn: &DbConn) ->...
  function validate_webauthn_login (line 417) | pub async fn validate_webauthn_login(user_id: &UserId, response: &str, c...
  function check_and_update_backup_eligible (line 467) | fn check_and_update_backup_eligible(

FILE: src/api/core/two_factor/yubikey.rs
  function routes (line 20) | pub fn routes() -> Vec<Route> {
  type EnableYubikeyData (line 26) | struct EnableYubikeyData {
  type YubikeyMetadata (line 39) | pub struct YubikeyMetadata {
  function parse_yubikeys (line 46) | fn parse_yubikeys(data: &EnableYubikeyData) -> Vec<String> {
  function jsonify_yubikeys (line 52) | fn jsonify_yubikeys(yubikeys: Vec<String>) -> Value {
  function get_yubico_credentials (line 62) | fn get_yubico_credentials() -> Result<(String, String), Error> {
  function verify_yubikey_otp (line 73) | async fn verify_yubikey_otp(otp: String) -> EmptyResult {
  function generate_yubikey (line 86) | async fn generate_yubikey(data: Json<PasswordOrOtpData>, headers: Header...
  function activate_yubikey (line 119) | async fn activate_yubikey(data: Json<EnableYubikeyData>, headers: Header...
  function activate_yubikey_put (line 179) | async fn activate_yubikey_put(data: Json<EnableYubikeyData>, headers: He...
  function validate_yubikey_login (line 183) | pub async fn validate_yubikey_login(response: &str, twofactor_data: &str...

FILE: src/api/icons.rs
  function routes (line 27) | pub fn routes() -> Vec<Route> {
  function icon_external (line 85) | fn icon_external(domain: &str) -> Cached<Option<Redirect>> {
  function icon_internal (line 111) | async fn icon_internal(domain: &str) -> Cached<(ContentType, Vec<u8>)> {
  function is_valid_domain (line 144) | fn is_valid_domain(domain: &str) -> bool {
  function get_icon (line 176) | async fn get_icon(domain: &str) -> Option<(Vec<u8>, String)> {
  function get_cached_icon (line 215) | async fn get_cached_icon(path: &str) -> Option<Vec<u8>> {
  function file_is_expired (line 231) | async fn file_is_expired(path: &str, ttl: u64) -> Result<bool, Error> {
  function icon_is_negcached (line 241) | async fn icon_is_negcached(path: &str) -> bool {
  function icon_is_expired (line 265) | async fn icon_is_expired(path: &str) -> bool {
  type Icon (line 270) | struct Icon {
    method new (line 276) | const fn new(priority: u8, href: String) -> Self {
  function get_favicons_node (line 284) | fn get_favicons_node(dom: Tokenizer<StringReader<'_>, FaviconEmitter>, i...
  type IconUrlResult (line 333) | struct IconUrlResult {
  function get_icon_url (line 350) | async fn get_icon_url(domain: &str) -> Result<IconUrlResult, Error> {
  function get_page (line 438) | async fn get_page(url: &str) -> Result<Response, Error> {
  function get_page_with_referer (line 442) | async fn get_page_with_referer(url: &str, referer: &str) -> Result<Respo...
  function get_icon_priority (line 463) | fn get_icon_priority(href: &str, sizes: &str) -> u8 {
  function parse_sizes (line 510) | fn parse_sizes(sizes: &str) -> (u16, u16) {
  function download_icon (line 527) | async fn download_icon(domain: &str) -> Result<(Bytes, Option<&str>), Er...
  function save_icon (line 596) | async fn save_icon(path: &str, icon: Vec<u8>) {
  function get_icon_type (line 610) | fn get_icon_type(bytes: &[u8]) -> Option<&'static str> {
  function stream_to_bytes_limit (line 636) | async fn stream_to_bytes_limit(res: Response, max_size: usize) -> Result...
  type Jar (line 663) | pub struct Jar(std::sync::RwLock<CookieStore>);
    method set_cookies (line 666) | fn set_cookies(&self, cookie_headers: &mut dyn Iterator<Item = &Header...
    method cookies (line 685) | fn cookies(&self, url: &url::Url) -> Option<HeaderValue> {
  type Tag (line 709) | pub struct Tag {
  type FaviconToken (line 719) | struct FaviconToken {
  type FaviconEmitter (line 725) | struct FaviconEmitter {
    method flush_current_attribute (line 733) | fn flush_current_attribute(&mut self, emit_current_tag: bool) {
  type Token (line 766) | type Token = FaviconToken;
  method set_last_start_tag (line 768) | fn set_last_start_tag(&mut self, last_start_tag: Option<&[u8]>) {
  method pop_token (line 773) | fn pop_token(&mut self) -> Option<Self::Token> {
  method init_start_tag (line 781) | fn init_start_tag(&mut self) {
  method init_end_tag (line 788) | fn init_end_tag(&mut self) {
  method emit_current_tag (line 795) | fn emit_current_tag(&mut self) -> Option<html5gum::State> {
  method push_tag_name (line 807) | fn push_tag_name(&mut self, s: &[u8]) {
  method init_attribute (line 813) | fn init_attribute(&mut self) {
  method push_attribute_name (line 827) | fn push_attribute_name(&mut self, s: &[u8]) {
  method push_attribute_value (line 833) | fn push_attribute_value(&mut self, s: &[u8]) {
  method current_is_appropriate_end_tag_token (line 839) | fn current_is_appropriate_end_tag_token(&mut self) -> bool {
  method emit_current_comment (line 848) | fn emit_current_comment(&mut self) {}
  method emit_current_doctype (line 849) | fn emit_current_doctype(&mut self) {}
  method emit_eof (line 850) | fn emit_eof(&mut self) {}
  method emit_error (line 851) | fn emit_error(&mut self, _: html5gum::Error) {}
  method emit_string (line 852) | fn emit_string(&mut self, _: &[u8]) {}
  method init_comment (line 853) | fn init_comment(&mut self) {}
  method init_doctype (line 854) | fn init_doctype(&mut self) {}
  method push_comment (line 855) | fn push_comment(&mut self, _: &[u8]) {}
  method push_doctype_name (line 856) | fn push_doctype_name(&mut self, _: &[u8]) {}
  method push_doctype_public_identifier (line 857) | fn push_doctype_public_identifier(&mut self, _: &[u8]) {}
  method push_doctype_system_identifier (line 858) | fn push_doctype_system_identifier(&mut self, _: &[u8]) {}
  method set_doctype_public_identifier (line 859) | fn set_doctype_public_identifier(&mut self, _: &[u8]) {}
  method set_doctype_system_identifier (line 860) | fn set_doctype_system_identifier(&mut self, _: &[u8]) {}
  method set_force_quirks (line 861) | fn set_force_quirks(&mut self) {}
  method set_self_closing (line 862) | fn set_self_closing(&mut self) {}

FILE: src/api/identity.rs
  function routes (line 38) | pub fn routes() -> Vec<Route> {
  function login (line 53) | async fn login(
  function _refresh_login (line 132) | async fn _refresh_login(data: ConnectData, conn: &DbConn, ip: &ClientIp)...
  function _sso_login (line 167) | async fn _sso_login(
  function _password_login (line 310) | async fn _password_login(
  function authenticated_response (line 432) | async fn authenticated_response(
  function _api_key_login (line 532) | async fn _api_key_login(data: ConnectData, user_id: &mut Option<UserId>,...
  function _user_api_key_login (line 544) | async fn _user_api_key_login(
  function _organization_api_key_login (line 676) | async fn _organization_api_key_login(data: ConnectData, conn: &DbConn, i...
  function get_device (line 705) | async fn get_device(data: &ConnectData, conn: &DbConn, user: &User) -> A...
  function twofactor_auth (line 724) | async fn twofactor_auth(
  function _selected_data (line 838) | fn _selected_data(tf: Option<TwoFactor>) -> ApiResult<String> {
  function _json_err_twofactor (line 842) | async fn _json_err_twofactor(
  function prelogin (line 946) | async fn prelogin(data: Json<PreloginData>, conn: DbConn) -> Json<Value> {
  function identity_register (line 951) | async fn identity_register(data: Json<RegisterData>, conn: DbConn) -> Js...
  type RegisterVerificationData (line 957) | struct RegisterVerificationData {
  type RegisterVerificationResponse (line 964) | enum RegisterVerificationResponse {
  function register_verification_email (line 971) | async fn register_verification_email(
  function register_finish (line 1012) | async fn register_finish(data: Json<RegisterData>, conn: DbConn) -> Json...
  type ConnectData (line 1019) | struct ConnectData {
  function _check_is_some (line 1076) | fn _check_is_some<T>(value: &Option<T>, msg: &str) -> EmptyResult {
  function prevalidate (line 1084) | fn prevalidate() -> JsonResult {
  function oidcsignin (line 1096) | async fn oidcsignin(code: OIDCCode, state: String, mut conn: DbConn) -> ...
  function oidcsignin_error (line 1110) | async fn oidcsignin_error(
  function _oidcsignin_redirect (line 1130) | async fn _oidcsignin_redirect(
  type AuthorizeData (line 1162) | struct AuthorizeData {
  function authorize (line 1187) | async fn authorize(data: AuthorizeData, conn: DbConn) -> ApiResult<Redir...

FILE: src/api/mod.rs
  type ApiResult (line 42) | pub type ApiResult<T> = Result<T, crate::error::Error>;
  type JsonResult (line 43) | pub type JsonResult = ApiResult<Json<Value>>;
  type EmptyResult (line 44) | pub type EmptyResult = ApiResult<()>;
  type PasswordOrOtpData (line 49) | struct PasswordOrOtpData {
    method validate (line 59) | pub async fn validate(&self, user: &User, delete_if_valid: bool, conn:...
  type MasterPasswordPolicy (line 79) | pub struct MasterPasswordPolicy {
  function master_password_policy (line 90) | async fn master_password_policy(user: &User, conn: &DbConn) -> Value {

FILE: src/api/notifications.rs
  function routes (line 41) | pub fn routes() -> Vec<Route> {
  type WsAccessToken (line 51) | struct WsAccessToken {
  type WSEntryMapGuard (line 55) | struct WSEntryMapGuard {
    method new (line 63) | fn new(users: Arc<WebSocketUsers>, user_uuid: UserId, entry_uuid: uuid...
  method drop (line 74) | fn drop(&mut self) {
  type WSAnonymousEntryMapGuard (line 82) | struct WSAnonymousEntryMapGuard {
    method new (line 89) | fn new(subscriptions: Arc<AnonymousWebSocketSubscriptions>, token: Str...
  method drop (line 99) | fn drop(&mut self) {
  function websockets_hub (line 107) | fn websockets_hub<'r>(
  function anonymous_websockets_hub (line 191) | fn anonymous_websockets_hub<'r>(ws: WebSocket, token: String, ip: Client...
  function serialize (line 259) | fn serialize(val: &Value) -> Vec<u8> {
  function serialize_date (line 289) | fn serialize_date(date: NaiveDateTime) -> Value {
  function convert_option (line 301) | fn convert_option<T: Into<Value>>(option: Option<T>) -> Value {
  constant RECORD_SEPARATOR (line 308) | const RECORD_SEPARATOR: u8 = 0x1e;
  constant INITIAL_RESPONSE (line 309) | const INITIAL_RESPONSE: [u8; 3] = [0x7b, 0x7d, RECORD_SEPARATOR];
  type InitialMessage (line 312) | struct InitialMessage<'a> {
  type UserSenders (line 323) | type UserSenders = (uuid::Uuid, Sender<Message>);
  type WebSocketUsers (line 325) | pub struct WebSocketUsers {
    method send_update (line 330) | async fn send_update(&self, user_id: &UserId, data: &[u8]) {
    method send_user_update (line 341) | pub async fn send_user_update(&self, ut: UpdateType, user: &User, push...
    method send_logout (line 361) | pub async fn send_logout(&self, user: &User, acting_device_id: Option<...
    method send_folder_update (line 381) | pub async fn send_folder_update(&self, ut: UpdateType, folder: &Folder...
    method send_cipher_update (line 405) | pub async fn send_cipher_update(
    method send_send_update (line 454) | pub async fn send_send_update(
    method send_auth_request (line 488) | pub async fn send_auth_request(&self, user_id: &UserId, auth_request_u...
    method send_auth_response (line 507) | pub async fn send_auth_response(
  type AnonymousWebSocketSubscriptions (line 534) | pub struct AnonymousWebSocketSubscriptions {
    method send_update (line 539) | async fn send_update(&self, token: &str, data: &[u8]) {
    method send_auth_response (line 547) | pub async fn send_auth_response(&self, user_id: &UserId, auth_request_...
  function create_update (line 575) | fn create_update(payload: Vec<(Value, Value)>, ut: UpdateType, acting_de...
  function create_anonymous_update (line 593) | fn create_anonymous_update(payload: Vec<(Value, Value)>, ut: UpdateType,...
  function create_ping (line 614) | fn create_ping() -> Vec<u8> {
  type UpdateType (line 620) | pub enum UpdateType {
  type Notify (line 654) | pub type Notify<'a> = &'a rocket::State<Arc<WebSocketUsers>>;
  type AnonymousNotify (line 655) | pub type AnonymousNotify<'a> = &'a rocket::State<Arc<AnonymousWebSocketS...

FILE: src/api/push.rs
  type AuthPushToken (line 25) | struct AuthPushToken {
  type LocalAuthPushToken (line 31) | struct LocalAuthPushToken {
  function get_auth_api_token (line 36) | async fn get_auth_api_token() -> ApiResult<String> {
  function register_push_device (line 87) | pub async fn register_push_device(device: &mut Device, conn: &DbConn) ->...
  function unregister_push_device (line 138) | pub async fn unregister_push_device(push_id: &Option<PushId>) -> EmptyRe...
  function push_cipher_update (line 160) | pub async fn push_cipher_update(ut: UpdateType, cipher: &Cipher, device:...
  function push_logout (line 191) | pub async fn push_logout(user: &User, acting_device_id: Option<DeviceId>...
  function push_user_update (line 211) | pub async fn push_user_update(ut: UpdateType, user: &User, push_uuid: &O...
  function push_folder_update (line 229) | pub async fn push_folder_update(ut: UpdateType, folder: &Folder, device:...
  function push_send_update (line 248) | pub async fn push_send_update(ut: UpdateType, send: &Send, device: &Devi...
  function send_to_push_relay (line 269) | async fn send_to_push_relay(notification_data: Value) {
  function push_auth_request (line 304) | pub async fn push_auth_request(user_id: &UserId, auth_request_id: &str, ...
  function push_auth_response (line 322) | pub async fn push_auth_response(user_id: &UserId, auth_request_id: &Auth...

FILE: src/api/web.rs
  function routes (line 21) | pub fn routes() -> Vec<Route> {
  function catchers (line 37) | pub fn catchers() -> Vec<Catcher> {
  function not_found (line 46) | fn not_found() -> ApiResult<Html<String>> {
  function vaultwarden_css (line 56) | fn vaultwarden_css() -> Cached<Css<String>> {
  function web_index (line 109) | async fn web_index() -> Cached<Option<NamedFile>> {
  function web_index_direct (line 116) | fn web_index_direct() -> Redirect {
  function web_index_head (line 121) | fn web_index_head() -> EmptyResult {
  function app_id (line 132) | fn app_id() -> Cached<(ContentType, Json<Value>)> {
  function web_files (line 164) | async fn web_files(p: PathBuf) -> Cached<Option<NamedFile>> {
  function attachments (line 169) | async fn attachments(cipher_id: CipherId, file_id: AttachmentId, token: ...
  function alive (line 183) | fn alive(_conn: DbConn) -> Json<String> {
  function alive_head (line 188) | fn alive_head(_conn: DbConn) -> EmptyResult {
  function _static_files_dev (line 200) | pub async fn _static_files_dev(filename: PathBuf) -> Option<NamedFile> {
  function static_files (line 218) | pub fn static_files(filename: &str) -> Result<(ContentType, &'static [u8...

FILE: src/auth.rs
  constant JWT_ALGORITHM (line 25) | const JWT_ALGORITHM: Algorithm = Algorithm::RS256;
  function initialize_keys (line 53) | pub async fn initialize_keys() -> Result<(), Error> {
  function encode_jwt (line 93) | pub fn encode_jwt<T: Serialize>(claims: &T) -> String {
  function decode_jwt (line 100) | pub fn decode_jwt<T: DeserializeOwned>(token: &str, issuer: String) -> R...
  function decode_refresh (line 119) | pub fn decode_refresh(token: &str) -> Result<RefreshJwtClaims, Error> {
  function decode_login (line 123) | pub fn decode_login(token: &str) -> Result<LoginJwtClaims, Error> {
  function decode_invite (line 127) | pub fn decode_invite(token: &str) -> Result<InviteJwtClaims, Error> {
  function decode_emergency_access_invite (line 131) | pub fn decode_emergency_access_invite(token: &str) -> Result<EmergencyAc...
  function decode_delete (line 135) | pub fn decode_delete(token: &str) -> Result<BasicJwtClaims, Error> {
  function decode_verify_email (line 139) | pub fn decode_verify_email(token: &str) -> Result<BasicJwtClaims, Error> {
  function decode_admin (line 143) | pub fn decode_admin(token: &str) -> Result<BasicJwtClaims, Error> {
  function decode_send (line 147) | pub fn decode_send(token: &str) -> Result<BasicJwtClaims, Error> {
  function decode_api_org (line 151) | pub fn decode_api_org(token: &str) -> Result<OrgApiKeyLoginJwtClaims, Er...
  function decode_file_download (line 155) | pub fn decode_file_download(token: &str) -> Result<FileDownloadClaims, E...
  function decode_register_verify (line 159) | pub fn decode_register_verify(token: &str) -> Result<RegisterVerifyClaim...
  type LoginJwtClaims (line 164) | pub struct LoginJwtClaims {
    method new (line 206) | pub fn new(
    method default (line 261) | pub fn default(device: &Device, user: &User, auth_method: &AuthMethod,...
    method token (line 274) | pub fn token(&self) -> String {
    method expires_in (line 278) | pub fn expires_in(&self) -> i64 {
  type InviteJwtClaims (line 284) | pub struct InviteJwtClaims {
  function generate_invite_claims (line 300) | pub fn generate_invite_claims(
  type EmergencyAccessInviteJwtClaims (line 322) | pub struct EmergencyAccessInviteJwtClaims {
  function generate_emergency_access_invite_claims (line 338) | pub fn generate_emergency_access_invite_claims(
  type OrgApiKeyLoginJwtClaims (line 360) | pub struct OrgApiKeyLoginJwtClaims {
  function generate_organization_api_key_login_claims (line 375) | pub fn generate_organization_api_key_login_claims(
  type FileDownloadClaims (line 392) | pub struct FileDownloadClaims {
  function generate_file_download_claims (line 405) | pub fn generate_file_download_claims(cipher_id: CipherId, file_id: Attac...
  type RegisterVerifyClaims (line 417) | pub struct RegisterVerifyClaims {
  function generate_register_verify_claims (line 431) | pub fn generate_register_verify_claims(email: String, name: Option<Strin...
  type BasicJwtClaims (line 444) | pub struct BasicJwtClaims {
  function generate_delete_claims (line 455) | pub fn generate_delete_claims(uuid: String) -> BasicJwtClaims {
  function generate_verify_email_claims (line 466) | pub fn generate_verify_email_claims(user_id: &UserId) -> BasicJwtClaims {
  function generate_admin_claims (line 477) | pub fn generate_admin_claims() -> BasicJwtClaims {
  function generate_send_claims (line 487) | pub fn generate_send_claims(send_id: &SendId, file_id: &SendFileId) -> B...
  type Host (line 510) | pub struct Host {
    type Error (line 516) | type Error = &'static str;
    method from_request (line 518) | async fn from_request(request: &'r Request<'_>) -> Outcome<Self, Self:...
  type ClientHeaders (line 551) | pub struct ClientHeaders {
    type Error (line 558) | type Error = &'static str;
    method from_request (line 560) | async fn from_request(request: &'r Request<'_>) -> Outcome<Self, Self:...
  type Headers (line 576) | pub struct Headers {
    type Error (line 585) | type Error = &'static str;
    method from_request (line 587) | async fn from_request(request: &'r Request<'_>) -> Outcome<Self, Self:...
    method from (line 850) | fn from(h: ManagerHeaders) -> Headers {
    method from (line 891) | fn from(h: ManagerHeadersLoose) -> Headers {
    method from (line 981) | fn from(h: OrgMemberHeaders) -> Headers {
  type OrgHeaders (line 665) | pub struct OrgHeaders {
    method is_member (line 676) | fn is_member(&self) -> bool {
    method is_confirmed_and_admin (line 682) | fn is_confirmed_and_admin(&self) -> bool {
    method is_confirmed_and_manager (line 685) | fn is_confirmed_and_manager(&self) -> bool {
    method is_confirmed_and_owner (line 688) | fn is_confirmed_and_owner(&self) -> bool {
    type Error (line 695) | type Error = &'static str;
    method from_request (line 697) | async fn from_request(request: &'r Request<'_>) -> Outcome<Self, Self:...
  type AdminHeaders (line 755) | pub struct AdminHeaders {
    type Error (line 766) | type Error = &'static str;
    method from_request (line 768) | async fn from_request(request: &'r Request<'_>) -> Outcome<Self, Self:...
  function get_col_id (line 788) | fn get_col_id(request: &Request<'_>) -> Option<CollectionId> {
  type ManagerHeaders (line 807) | pub struct ManagerHeaders {
    type Error (line 817) | type Error = &'static str;
    method from_request (line 819) | async fn from_request(request: &'r Request<'_>) -> Outcome<Self, Self:...
    method from_loose (line 902) | pub async fn from_loose(
  type ManagerHeadersLoose (line 862) | pub struct ManagerHeadersLoose {
    type Error (line 872) | type Error = &'static str;
    method from_request (line 874) | async fn from_request(request: &'r Request<'_>) -> Outcome<Self, Self:...
  type OwnerHeaders (line 926) | pub struct OwnerHeaders {
    type Error (line 935) | type Error = &'static str;
    method from_request (line 937) | async fn from_request(request: &'r Request<'_>) -> Outcome<Self, Self:...
  type OrgMemberHeaders (line 952) | pub struct OrgMemberHeaders {
    type Error (line 962) | type Error = &'static str;
    method from_request (line 964) | async fn from_request(request: &'r Request<'_>) -> Outcome<Self, Self:...
  type ClientIp (line 995) | pub struct ClientIp {
    type Error (line 1001) | type Error = ();
    method from_request (line 1003) | async fn from_request(req: &'r Request<'_>) -> Outcome<Self, Self::Err...
  type Secure (line 1026) | pub struct Secure {
    type Error (line 1032) | type Error = ();
    method from_request (line 1034) | async fn from_request(request: &'r Request<'_>) -> Outcome<Self, Self:...
  type WsAccessTokenHeader (line 1055) | pub struct WsAccessTokenHeader {
    type Error (line 1061) | type Error = ();
    method from_request (line 1063) | async fn from_request(request: &'r Request<'_>) -> Outcome<Self, Self:...
  type ClientVersion (line 1078) | pub struct ClientVersion(pub semver::Version);
    type Error (line 1082) | type Error = &'static str;
    method from_request (line 1084) | async fn from_request(request: &'r Request<'_>) -> Outcome<Self, Self:...
  type AuthMethod (line 1101) | pub enum AuthMethod {
    method scope (line 1109) | pub fn scope(&self) -> String {
    method scope_vec (line 1118) | pub fn scope_vec(&self) -> Vec<String> {
    method check_scope (line 1122) | pub fn check_scope(&self, scope: Option<&String>) -> ApiResult<String> {
  type TokenWrapper (line 1133) | pub enum TokenWrapper {
  type RefreshJwtClaims (line 1139) | pub struct RefreshJwtClaims {
  type AuthTokens (line 1155) | pub struct AuthTokens {
    method refresh_token (line 1161) | pub fn refresh_token(&self) -> String {
    method access_token (line 1165) | pub fn access_token(&self) -> String {
    method expires_in (line 1169) | pub fn expires_in(&self) -> i64 {
    method scope (line 1173) | pub fn scope(&self) -> String {
    method new (line 1178) | pub fn new(device: &Device, user: &User, sub: AuthMethod, client_id: O...
  function refresh_tokens (line 1205) | pub async fn refresh_tokens(

FILE: src/config.rs
  type Pass (line 56) | pub type Pass = String;
  function validate_config (line 923) | fn validate_config(cfg: &ConfigItems) -> Result<(), Error> {
  function validate_internal_sso_issuer_url (line 1278) | fn validate_internal_sso_issuer_url(sso_authority: &String) -> Result<op...
  function validate_internal_sso_redirect_url (line 1285) | fn validate_internal_sso_redirect_url(sso_callback_path: &String) -> Res...
  function validate_sso_master_password_policy (line 1292) | fn validate_sso_master_password_policy(
  function extract_url_origin (line 1308) | fn extract_url_origin(url: &str) -> String {
  function extract_url_path (line 1320) | fn extract_url_path(url: &str) -> String {
  function generate_smtp_img_src (line 1330) | fn generate_smtp_img_src(embed_images: bool, domain: &str) -> String {
  function generate_sso_callback_path (line 1340) | fn generate_sso_callback_path(domain: &str) -> String {
  function generate_icon_service_url (line 1348) | fn generate_icon_service_url(icon_service: &str) -> String {
  function generate_icon_service_csp (line 1359) | fn generate_icon_service_csp(icon_service: &str, icon_service_url: &str)...
  function smtp_convert_deprecated_ssl_options (line 1375) | fn smtp_convert_deprecated_ssl_options(smtp_ssl: Option<bool>, smtp_expl...
  function opendal_operator_for_path (line 1388) | fn opendal_operator_for_path(path: &str) -> Result<opendal::Operator, Er...
  function opendal_s3_operator_for_path (line 1414) | fn opendal_s3_operator_for_path(path: &str) -> Result<opendal::Operator,...
  type PathType (line 1472) | pub enum PathType {
  method load (line 1481) | pub async fn load() -> Result<Self, Error> {
  method update_config (line 1508) | pub async fn update_config(&self, other: ConfigBuilder, ignore_non_edita...
  method update_config_partial (line 1546) | async fn update_config_partial(&self, other: ConfigBuilder) -> Result<()...
  method is_email_domain_allowed (line 1558) | pub fn is_email_domain_allowed(&self, email: &str) -> bool {
  method is_signup_allowed (line 1572) | pub fn is_signup_allowed(&self, email: &str) -> bool {
  method is_signup_disabled (line 1584) | pub fn is_signup_disabled(&self) -> bool {
  method is_org_creation_allowed (line 1592) | pub fn is_org_creation_allowed(&self, email: &str) -> bool {
  method delete_user_config (line 1604) | pub async fn delete_user_config(&self) -> Result<(), Error> {
  method private_rsa_key (line 1628) | pub fn private_rsa_key(&self) -> String {
  method mail_enabled (line 1631) | pub fn mail_enabled(&self) -> bool {
  method get_duo_akey (line 1636) | pub async fn get_duo_akey(&self) -> String {
  method is_webauthn_2fa_supported (line 1653) | pub fn is_webauthn_2fa_supported(&self) -> bool {
  method is_admin_token_set (line 1658) | pub fn is_admin_token_set(&self) -> bool {
  method opendal_operator_for_path_type (line 1664) | pub fn opendal_operator_for_path_type(&self, path_type: &PathType) -> Re...
  method render_template (line 1681) | pub fn render_template<T: serde::ser::Serialize>(&self, name: &str, data...
  method render_fallback_template (line 1692) | pub fn render_fallback_template<T: serde::ser::Serialize>(&self, name: &...
  method set_rocket_shutdown_handle (line 1697) | pub fn set_rocket_shutdown_handle(&self, handle: rocket::Shutdown) {
  method shutdown (line 1701) | pub fn shutdown(&self) {
  method sso_issuer_url (line 1709) | pub fn sso_issuer_url(&self) -> Result<openidconnect::IssuerUrl, Error> {
  method sso_redirect_url (line 1713) | pub fn sso_redirect_url(&self) -> Result<openidconnect::RedirectUrl, Err...
  method sso_master_password_policy_value (line 1717) | pub fn sso_master_password_policy_value(&self) -> Option<serde_json::Val...
  method sso_scopes_vec (line 1721) | pub fn sso_scopes_vec(&self) -> Vec<String> {
  method sso_authorize_extra_params_vec (line 1725) | pub fn sso_authorize_extra_params_vec(&self) -> Vec<(String, String)> {
  function load_templates (line 1735) | fn load_templates<P>(path: P) -> Handlebars<'static>
  function case_helper (line 1820) | fn case_helper<'reg, 'rc>(
  function to_json (line 1838) | fn to_json<'reg, 'rc>(

FILE: src/crypto.rs
  constant DIGEST_ALG (line 9) | const DIGEST_ALG: pbkdf2::Algorithm = pbkdf2::PBKDF2_HMAC_SHA256;
  constant OUTPUT_LEN (line 10) | const OUTPUT_LEN: usize = digest::SHA256_OUTPUT_LEN;
  function hash_password (line 12) | pub fn hash_password(secret: &[u8], salt: &[u8], iterations: u32) -> Vec...
  function verify_password_hash (line 21) | pub fn verify_password_hash(secret: &[u8], salt: &[u8], previous: &[u8],...
  function hmac_sign (line 29) | pub fn hmac_sign(key: &str, data: &str) -> String {
  function get_random_bytes (line 41) | pub fn get_random_bytes<const N: usize>() -> [u8; N] {
  function encode_random_bytes (line 51) | pub fn encode_random_bytes<const N: usize>(e: &Encoding) -> String {
  function get_random_string (line 56) | pub fn get_random_string(alphabet: &[u8], num_chars: usize) -> String {
  function get_random_string_numeric (line 70) | pub fn get_random_string_numeric(num_chars: usize) -> String {
  function get_random_string_alphanum (line 76) | pub fn get_random_string_alphanum(num_chars: usize) -> String {
  function generate_id (line 83) | pub fn generate_id<const N: usize>() -> String {
  function generate_send_file_id (line 87) | pub fn generate_send_file_id() -> String {
  function generate_attachment_id (line 93) | pub fn generate_attachment_id() -> AttachmentId {
  function generate_email_token (line 99) | pub fn generate_email_token(token_size: u8) -> String {
  function generate_api_key (line 105) | pub fn generate_api_key() -> String {
  function ct_eq (line 112) | pub fn ct_eq<T: AsRef<[u8]>, U: AsRef<[u8]>>(a: T, b: U) -> bool {

FILE: src/db/mod.rs
  function run_blocking (line 32) | pub async fn run_blocking<F, R>(job: F) -> R
  type DbConnInner (line 48) | pub enum DbConnInner {
  type DbConnManager (line 58) | pub struct DbConnManager {
    method new (line 63) | pub fn new(database_url: &str) -> Self {
    method establish_connection (line 69) | fn establish_connection(&self) -> Result<DbConnInner, diesel::r2d2::Er...
    type Connection (line 95) | type Connection = DbConnInner;
    type Error (line 96) | type Error = diesel::r2d2::Error;
    method connect (line 98) | fn connect(&self) -> Result<Self::Connection, Self::Error> {
    method is_valid (line 102) | fn is_valid(&self, conn: &mut Self::Connection) -> Result<(), Self::Er...
    method has_broken (line 107) | fn has_broken(&self, conn: &mut Self::Connection) -> bool {
  type DbConnType (line 114) | pub enum DbConnType {
    method from_url (line 258) | pub fn from_url(url: &str) -> Result<Self, Error> {
    method get_init_stmts (line 285) | pub fn get_init_stmts(&self) -> String {
    method default_init_stmts (line 294) | pub fn default_init_stmts(&self) -> String {
  type DbConn (line 125) | pub struct DbConn {
    method run (line 307) | pub async fn run<F, R>(&self, f: F) -> R
    type Error (line 445) | type Error = ();
    method from_request (line 447) | async fn from_request(request: &'r Request<'_>) -> Outcome<Self, Self:...
  type DbConnOptions (line 131) | pub struct DbConnOptions {
    method on_acquire (line 136) | fn on_acquire(&self, conn: &mut DbConnInner) -> Result<(), diesel::r2d...
  type DbPool (line 145) | pub struct DbPool {
    method from_config (line 185) | pub fn from_config() -> Result<Self, Error> {
    method get (line 237) | pub async fn get(&self) -> Result<DbConn, Error> {
  method drop (line 152) | fn drop(&mut self) {
  method drop (line 173) | fn drop(&mut self) {
  function backup_sqlite (line 388) | pub fn backup_sqlite() -> Result<String, Error> {
  function backup_sqlite (line 420) | pub fn backup_sqlite() -> Result<String, Error> {
  function get_sql_server_version (line 425) | pub async fn get_sql_server_version(conn: &DbConn) -> String {
  constant MIGRATIONS (line 465) | pub const MIGRATIONS: EmbeddedMigrations = embed_migrations!("migrations...
  function run_migrations (line 467) | pub fn run_migrations(db_url: &str) -> Result<(), super::Error> {
  constant MIGRATIONS (line 493) | pub const MIGRATIONS: EmbeddedMigrations = embed_migrations!("migrations...
  function run_migrations (line 495) | pub fn run_migrations(db_url: &str) -> Result<(), super::Error> {
  constant MIGRATIONS (line 514) | pub const MIGRATIONS: EmbeddedMigrations = embed_migrations!("migrations...
  function run_migrations (line 516) | pub fn run_migrations(db_url: &str) -> Result<(), super::Error> {

FILE: src/db/models/attachment.rs
  type Attachment (line 16) | pub struct Attachment {
    method new (line 26) | pub const fn new(
    method get_file_path (line 42) | pub fn get_file_path(&self) -> String {
    method get_url (line 46) | pub async fn get_url(&self, host: &str) -> Result<String, crate::Error> {
    method to_json (line 57) | pub async fn to_json(&self, host: &str) -> Result<Value, crate::Error> {
    method save (line 78) | pub async fn save(&self, conn: &DbConn) -> EmptyResult {
    method delete (line 109) | pub async fn delete(&self, conn: &DbConn) -> EmptyResult {
    method delete_all_by_cipher (line 134) | pub async fn delete_all_by_cipher(cipher_uuid: &CipherId, conn: &DbCon...
    method find_by_id (line 141) | pub async fn find_by_id(id: &AttachmentId, conn: &DbConn) -> Option<Se...
    method find_by_cipher (line 150) | pub async fn find_by_cipher(cipher_uuid: &CipherId, conn: &DbConn) -> ...
    method size_by_user (line 159) | pub async fn size_by_user(user_uuid: &UserId, conn: &DbConn) -> i64 {
    method count_by_user (line 176) | pub async fn count_by_user(user_uuid: &UserId, conn: &DbConn) -> i64 {
    method size_by_org (line 187) | pub async fn size_by_org(org_uuid: &OrganizationId, conn: &DbConn) -> ...
    method count_by_org (line 204) | pub async fn count_by_org(org_uuid: &OrganizationId, conn: &DbConn) ->...
    method find_all_by_user_and_orgs (line 218) | pub async fn find_all_by_user_and_orgs(
  type AttachmentId (line 250) | pub struct AttachmentId(pub String);

FILE: src/db/models/auth_request.rs
  type AuthRequest (line 14) | pub struct AuthRequest {
    method new (line 39) | pub fn new(
    method to_json_for_pending_device (line 69) | pub fn to_json_for_pending_device(&self) -> Value {
    method save (line 83) | pub async fn save(&mut self, conn: &DbConn) -> EmptyResult {
    method find_by_uuid (line 114) | pub async fn find_by_uuid(uuid: &AuthRequestId, conn: &DbConn) -> Opti...
    method find_by_uuid_and_user (line 123) | pub async fn find_by_uuid_and_user(uuid: &AuthRequestId, user_uuid: &U...
    method find_by_user (line 133) | pub async fn find_by_user(user_uuid: &UserId, conn: &DbConn) -> Vec<Se...
    method find_by_user_and_requested_device (line 142) | pub async fn find_by_user_and_requested_device(
    method find_created_before (line 158) | pub async fn find_created_before(dt: &NaiveDateTime, conn: &DbConn) ->...
    method delete (line 167) | pub async fn delete(&self, conn: &DbConn) -> EmptyResult {
    method check_access_code (line 175) | pub fn check_access_code(&self, access_code: &str) -> bool {
    method purge_expired_auth_requests (line 179) | pub async fn purge_expired_auth_requests(conn: &DbConn) {
  type AuthRequestId (line 205) | pub struct AuthRequestId(String);

FILE: src/db/models/cipher.rs
  type Cipher (line 25) | pub struct Cipher {
    method new (line 61) | pub fn new(atype: i32, name: String) -> Self {
    method validate_cipher_data (line 87) | pub fn validate_cipher_data(cipher_data: &[CipherData]) -> EmptyResult {
    method to_json (line 140) | pub async fn to_json(
    method update_users_revision (line 408) | pub async fn update_users_revision(&self, conn: &DbConn) -> Vec<UserId> {
    method save (line 436) | pub async fn save(&mut self, conn: &DbConn) -> EmptyResult {
    method delete (line 470) | pub async fn delete(&self, conn: &DbConn) -> EmptyResult {
    method delete_all_by_organization (line 485) | pub async fn delete_all_by_organization(org_uuid: &OrganizationId, con...
    method delete_all_by_user (line 493) | pub async fn delete_all_by_user(user_uuid: &UserId, conn: &DbConn) -> ...
    method purge_trash (line 501) | pub async fn purge_trash(conn: &DbConn) {
    method move_to_folder (line 511) | pub async fn move_to_folder(
    method is_owned_by_user (line 546) | pub fn is_owned_by_user(&self, user_uuid: &UserId) -> bool {
    method is_in_full_access_org (line 551) | async fn is_in_full_access_org(
    method is_in_full_access_group (line 570) | async fn is_in_full_access_group(
    method get_access_restrictions (line 594) | pub async fn get_access_restrictions(
    method get_user_collections_access_flags (line 664) | async fn get_user_collections_access_flags(&self, user_uuid: &UserId, ...
    method get_group_collections_access_flags (line 681) | async fn get_group_collections_access_flags(&self, user_uuid: &UserId,...
    method is_write_accessible_to_user (line 707) | pub async fn is_write_accessible_to_user(&self, user_uuid: &UserId, co...
    method is_in_editable_collection_by_user (line 716) | pub async fn is_in_editable_collection_by_user(&self, user_uuid: &User...
    method is_accessible_to_user (line 723) | pub async fn is_accessible_to_user(&self, user_uuid: &UserId, conn: &D...
    method is_favorite (line 728) | pub async fn is_favorite(&self, user_uuid: &UserId, conn: &DbConn) -> ...
    method set_favorite (line 733) | pub async fn set_favorite(&self, favorite: Option<bool>, user_uuid: &U...
    method get_folder_uuid (line 740) | pub async fn get_folder_uuid(&self, user_uuid: &UserId, conn: &DbConn)...
    method find_by_uuid (line 752) | pub async fn find_by_uuid(uuid: &CipherId, conn: &DbConn) -> Option<Se...
    method find_by_uuid_and_org (line 761) | pub async fn find_by_uuid_and_org(
    method find_by_user (line 787) | pub async fn find_by_user(
    method find_by_user_visible (line 890) | pub async fn find_by_user_visible(user_uuid: &UserId, conn: &DbConn) -...
    method find_by_user_and_ciphers (line 894) | pub async fn find_by_user_and_ciphers(
    method find_by_user_and_cipher (line 902) | pub async fn find_by_user_and_cipher(user_uuid: &UserId, cipher_uuid: ...
    method find_owned_by_user (line 907) | pub async fn find_owned_by_user(user_uuid: &UserId, conn: &DbConn) -> ...
    method count_owned_by_user (line 919) | pub async fn count_owned_by_user(user_uuid: &UserId, conn: &DbConn) ->...
    method find_by_org (line 930) | pub async fn find_by_org(org_uuid: &OrganizationId, conn: &DbConn) -> ...
    method count_by_org (line 939) | pub async fn count_by_org(org_uuid: &OrganizationId, conn: &DbConn) ->...
    method find_by_folder (line 950) | pub async fn find_by_folder(folder_uuid: &FolderId, conn: &DbConn) -> ...
    method find_deleted_before (line 961) | pub async fn find_deleted_before(dt: &NaiveDateTime, conn: &DbConn) ->...
    method get_collections (line 970) | pub async fn get_collections(&self, user_uuid: UserId, conn: &DbConn) ...
    method get_admin_collections (line 1031) | pub async fn get_admin_collections(&self, user_uuid: UserId, conn: &Db...
    method get_collections_with_cipher_by_user (line 1096) | pub async fn get_collections_with_cipher_by_user(
  type RepromptType (line 54) | pub enum RepromptType {
  type CipherId (line 1155) | pub struct CipherId(String);

FILE: src/db/models/collection.rs
  type Collection (line 19) | pub struct Collection {
    method new (line 47) | pub fn new(org_uuid: OrganizationId, name: String, external_id: Option...
    method to_json (line 59) | pub fn to_json(&self) -> Value {
    method set_external_id (line 69) | pub fn set_external_id(&mut self, external_id: Option<String>) {
    method to_json_details (line 84) | pub async fn to_json_details(
    method can_access_collection (line 140) | pub async fn can_access_collection(member: &Membership, col_id: &Colle...
    method save (line 157) | pub async fn save(&self, conn: &DbConn) -> EmptyResult {
    method delete (line 190) | pub async fn delete(self, conn: &DbConn) -> EmptyResult {
    method delete_all_by_organization (line 203) | pub async fn delete_all_by_organization(org_uuid: &OrganizationId, con...
    method update_users_revision (line 210) | pub async fn update_users_revision(&self, conn: &DbConn) {
    method find_by_uuid (line 216) | pub async fn find_by_uuid(uuid: &CollectionId, conn: &DbConn) -> Optio...
    method find_by_user_uuid (line 225) | pub async fn find_by_user_uuid(user_uuid: UserId, conn: &DbConn) -> Ve...
    method find_by_organization_and_user_uuid (line 298) | pub async fn find_by_organization_and_user_uuid(
    method find_by_organization (line 310) | pub async fn find_by_organization(org_uuid: &OrganizationId, conn: &Db...
    method count_by_org (line 319) | pub async fn count_by_org(org_uuid: &OrganizationId, conn: &DbConn) ->...
    method find_by_uuid_and_org (line 330) | pub async fn find_by_uuid_and_org(uuid: &CollectionId, org_uuid: &Orga...
    method find_by_uuid_and_user (line 341) | pub async fn find_by_uuid_and_user(uuid: &CollectionId, user_uuid: Use...
    method is_writable_by_user (line 408) | pub async fn is_writable_by_user(&self, user_uuid: &UserId, conn: &DbC...
    method hide_passwords_for_user (line 470) | pub async fn hide_passwords_for_user(&self, user_uuid: &UserId, conn: ...
    method is_coll_manageable_by_user (line 516) | pub async fn is_coll_manageable_by_user(uuid: &CollectionId, user_uuid...
    method is_manageable_by_user (line 563) | pub async fn is_manageable_by_user(&self, user_uuid: &UserId, conn: &D...
  type CollectionUser (line 29) | pub struct CollectionUser {
    method find_by_organization_and_user_uuid (line 570) | pub async fn find_by_organization_and_user_uuid(
    method find_by_organization_swap_user_uuid_with_member_uuid (line 586) | pub async fn find_by_organization_swap_user_uuid_with_member_uuid(
    method save (line 603) | pub async fn save(
    method delete (line 666) | pub async fn delete(self, conn: &DbConn) -> EmptyResult {
    method find_by_collection (line 680) | pub async fn find_by_collection(collection_uuid: &CollectionId, conn: ...
    method find_by_org_and_coll_swap_user_uuid_with_member_uuid (line 690) | pub async fn find_by_org_and_coll_swap_user_uuid_with_member_uuid(
    method find_by_collection_and_user (line 707) | pub async fn find_by_collection_and_user(
    method find_by_user (line 722) | pub async fn find_by_user(user_uuid: &UserId, conn: &DbConn) -> Vec<Se...
    method delete_all_by_collection (line 732) | pub async fn delete_all_by_collection(collection_uuid: &CollectionId, ...
    method delete_all_by_user_and_org (line 744) | pub async fn delete_all_by_user_and_org(
    method has_access_to_collection_by_user (line 764) | pub async fn has_access_to_collection_by_user(col_id: &CollectionId, u...
  type CollectionCipher (line 40) | pub struct CollectionCipher {
    method save (line 771) | pub async fn save(cipher_uuid: &CipherId, collection_uuid: &Collection...
    method delete (line 801) | pub async fn delete(cipher_uuid: &CipherId, collection_uuid: &Collecti...
    method delete_all_by_cipher (line 815) | pub async fn delete_all_by_cipher(cipher_uuid: &CipherId, conn: &DbCon...
    method delete_all_by_collection (line 823) | pub async fn delete_all_by_collection(collection_uuid: &CollectionId, ...
    method update_users_revision (line 831) | pub async fn update_users_revision(collection_uuid: &CollectionId, con...
  type CollectionMembership (line 839) | pub struct CollectionMembership {
    method to_json_details_for_member (line 848) | pub fn to_json_details_for_member(&self, membership_type: i32) -> Value {
    method from (line 863) | fn from(c: CollectionUser) -> Self {
  type CollectionId (line 890) | pub struct CollectionId(String);

FILE: src/db/models/device.rs
  type Device (line 20) | pub struct Device {
    method new (line 38) | pub fn new(uuid: DeviceId, user_uuid: UserId, name: String, atype: i32...
    method to_json (line 57) | pub fn to_json(&self) -> Value {
    method refresh_twofactor_remember (line 69) | pub fn refresh_twofactor_remember(&mut self) -> String {
    method delete_twofactor_remember (line 76) | pub fn delete_twofactor_remember(&mut self) {
    method is_new (line 81) | pub fn is_new(&self) -> bool {
    method is_push_device (line 85) | pub fn is_push_device(&self) -> bool {
    method is_cli (line 89) | pub fn is_cli(&self) -> bool {
    method is_mobile (line 93) | pub fn is_mobile(&self) -> bool {
    method save (line 137) | pub async fn save(&mut self, update_time: bool, conn: &DbConn) -> Empt...
    method delete_all_by_user (line 165) | pub async fn delete_all_by_user(user_uuid: &UserId, conn: &DbConn) -> ...
    method find_by_uuid_and_user (line 173) | pub async fn find_by_uuid_and_user(uuid: &DeviceId, user_uuid: &UserId...
    method find_with_auth_request_by_user (line 183) | pub async fn find_with_auth_request_by_user(user_uuid: &UserId, conn: ...
    method find_by_user (line 193) | pub async fn find_by_user(user_uuid: &UserId, conn: &DbConn) -> Vec<Se...
    method find_by_uuid (line 202) | pub async fn find_by_uuid(uuid: &DeviceId, conn: &DbConn) -> Option<Se...
    method clear_push_token_by_uuid (line 211) | pub async fn clear_push_token_by_uuid(uuid: &DeviceId, conn: &DbConn) ...
    method find_by_refresh_token (line 220) | pub async fn find_by_refresh_token(refresh_token: &str, conn: &DbConn)...
    method find_latest_active_by_user (line 229) | pub async fn find_latest_active_by_user(user_uuid: &UserId, conn: &DbC...
    method find_push_devices_by_user (line 239) | pub async fn find_push_devices_by_user(user_uuid: &UserId, conn: &DbCo...
    method check_user_has_push_device (line 249) | pub async fn check_user_has_push_device(user_uuid: &UserId, conn: &DbC...
  type DeviceWithAuthRequest (line 98) | pub struct DeviceWithAuthRequest {
    method to_json (line 104) | pub fn to_json(&self) -> Value {
    method from (line 123) | pub fn from(c: Device, a: Option<AuthRequest>) -> Self {
  type DeviceType (line 263) | pub enum DeviceType {
    method from_i32 (line 319) | pub fn from_i32(value: i32) -> DeviceType {
  type DeviceId (line 355) | pub struct DeviceId(String);
  type PushId (line 358) | pub struct PushId(pub String);

FILE: src/db/models/emergency_access.rs
  type EmergencyAccess (line 15) | pub struct EmergencyAccess {
    method new (line 32) | pub fn new(grantor_uuid: UserId, email: String, status: i32, atype: i3...
    method get_type_as_str (line 51) | pub fn get_type_as_str(&self) -> &'static str {
    method to_json (line 59) | pub fn to_json(&self) -> Value {
    method to_json_grantor_details (line 69) | pub async fn to_json_grantor_details(&self, conn: &DbConn) -> Value {
    method to_json_grantee_details (line 85) | pub async fn to_json_grantee_details(&self, conn: &DbConn) -> Option<V...
    method save (line 142) | pub async fn save(&mut self, conn: &DbConn) -> EmptyResult {
    method update_access_status_and_save (line 176) | pub async fn update_access_status_and_save(
    method update_last_notification_date_and_save (line 197) | pub async fn update_last_notification_date_and_save(&mut self, date: &...
    method delete_all_by_user (line 211) | pub async fn delete_all_by_user(user_uuid: &UserId, conn: &DbConn) -> ...
    method delete_all_by_grantee_email (line 221) | pub async fn delete_all_by_grantee_email(grantee_email: &str, conn: &D...
    method delete (line 228) | pub async fn delete(self, conn: &DbConn) -> EmptyResult {
    method find_by_grantor_uuid_and_grantee_uuid_or_email (line 238) | pub async fn find_by_grantor_uuid_and_grantee_uuid_or_email(
    method find_all_recoveries_initiated (line 253) | pub async fn find_all_recoveries_initiated(conn: &DbConn) -> Vec<Self> {
    method find_by_uuid_and_grantor_uuid (line 263) | pub async fn find_by_uuid_and_grantor_uuid(
    method find_by_uuid_and_grantee_uuid (line 277) | pub async fn find_by_uuid_and_grantee_uuid(
    method find_by_uuid_and_grantee_email (line 291) | pub async fn find_by_uuid_and_grantee_email(
    method find_all_by_grantee_uuid (line 305) | pub async fn find_all_by_grantee_uuid(grantee_uuid: &UserId, conn: &Db...
    method find_invited_by_grantee_email (line 314) | pub async fn find_invited_by_grantee_email(grantee_email: &str, conn: ...
    method find_all_invited_by_grantee_email (line 324) | pub async fn find_all_invited_by_grantee_email(grantee_email: &str, co...
    method find_all_by_grantor_uuid (line 334) | pub async fn find_all_by_grantor_uuid(grantor_uuid: &UserId, conn: &Db...
    method find_all_confirmed_by_grantor_uuid (line 343) | pub async fn find_all_confirmed_by_grantor_uuid(grantor_uuid: &UserId,...
    method accept_invite (line 353) | pub async fn accept_invite(&mut self, grantee_uuid: &UserId, grantee_e...
  type EmergencyAccessType (line 116) | pub enum EmergencyAccessType {
    method from_str (line 122) | pub fn from_str(s: &str) -> Option<Self> {
  type EmergencyAccessStatus (line 131) | pub enum EmergencyAccessStatus {
  type EmergencyAccessId (line 387) | pub struct EmergencyAccessId(String);

FILE: src/db/models/event.rs
  type Event (line 19) | pub struct Event {
    method new (line 139) | pub fn new(event_type: i32, event_date: Option<NaiveDateTime>) -> Self {
    method to_json (line 165) | pub fn to_json(&self) -> Value {
    constant PAGE_SIZE (line 192) | pub const PAGE_SIZE: i64 = 30;
    method save (line 196) | pub async fn save(&self, conn: &DbConn) -> EmptyResult {
    method save_user_event (line 216) | pub async fn save_user_event(events: Vec<Event>, conn: &DbConn) -> Emp...
    method delete (line 251) | pub async fn delete(self, conn: &DbConn) -> EmptyResult {
    method find_by_organization_uuid (line 261) | pub async fn find_by_organization_uuid(
    method count_by_org (line 278) | pub async fn count_by_org(org_uuid: &OrganizationId, conn: &DbConn) ->...
    method find_by_org_and_member (line 289) | pub async fn find_by_org_and_member(
    method find_by_cipher_uuid (line 310) | pub async fn find_by_cipher_uuid(
    method clean_events (line 327) | pub async fn clean_events(conn: &DbConn) -> EmptyResult {
  type EventType (line 41) | pub enum EventType {
  type EventId (line 342) | pub struct EventId(String);

FILE: src/db/models/favorite.rs
  type Favorite (line 8) | pub struct Favorite {
    method is_favorite (line 20) | pub async fn is_favorite(cipher_uuid: &CipherId, user_uuid: &UserId, c...
    method set_favorite (line 34) | pub async fn set_favorite(
    method delete_all_by_cipher (line 72) | pub async fn delete_all_by_cipher(cipher_uuid: &CipherId, conn: &DbCon...
    method delete_all_by_user (line 81) | pub async fn delete_all_by_user(user_uuid: &UserId, conn: &DbConn) -> ...
    method get_all_cipher_uuid_by_user (line 91) | pub async fn get_all_cipher_uuid_by_user(user_uuid: &UserId, conn: &Db...

FILE: src/db/models/folder.rs
  type Folder (line 13) | pub struct Folder {
    method new (line 31) | pub fn new(user_uuid: UserId, name: String) -> Self {
    method to_json (line 44) | pub fn to_json(&self) -> Value {
    method save (line 72) | pub async fn save(&mut self, conn: &DbConn) -> EmptyResult {
    method delete (line 106) | pub async fn delete(&self, conn: &DbConn) -> EmptyResult {
    method delete_all_by_user (line 117) | pub async fn delete_all_by_user(user_uuid: &UserId, conn: &DbConn) -> ...
    method find_by_uuid_and_user (line 124) | pub async fn find_by_uuid_and_user(uuid: &FolderId, user_uuid: &UserId...
    method find_by_user (line 134) | pub async fn find_by_user(user_uuid: &UserId, conn: &DbConn) -> Vec<Se...
  type FolderCipher (line 24) | pub struct FolderCipher {
    method new (line 57) | pub fn new(folder_uuid: FolderId, cipher_uuid: CipherId) -> Self {
    method save (line 145) | pub async fn save(&self, conn: &DbConn) -> EmptyResult {
    method delete (line 167) | pub async fn delete(self, conn: &DbConn) -> EmptyResult {
    method delete_all_by_cipher (line 179) | pub async fn delete_all_by_cipher(cipher_uuid: &CipherId, conn: &DbCon...
    method delete_all_by_folder (line 187) | pub async fn delete_all_by_folder(folder_uuid: &FolderId, conn: &DbCon...
    method find_by_folder_and_cipher (line 195) | pub async fn find_by_folder_and_cipher(
    method find_by_folder (line 209) | pub async fn find_by_folder(folder_uuid: &FolderId, conn: &DbConn) -> ...
    method find_by_user (line 220) | pub async fn find_by_user(user_uuid: &UserId, conn: &DbConn) -> Vec<(C...
  type FolderId (line 248) | pub struct FolderId(String);

FILE: src/db/models/group.rs
  type Group (line 16) | pub struct Group {
    method new (line 47) | pub fn new(
    method to_json (line 70) | pub fn to_json(&self) -> Value {
    method to_json_details (line 80) | pub async fn to_json_details(&self, conn: &DbConn) -> Value {
    method set_external_id (line 108) | pub fn set_external_id(&mut self, external_id: Option<String>) {
    method save (line 159) | pub async fn save(&mut self, conn: &DbConn) -> EmptyResult {
    method delete_all_by_organization (line 192) | pub async fn delete_all_by_organization(org_uuid: &OrganizationId, con...
    method find_by_organization (line 199) | pub async fn find_by_organization(org_uuid: &OrganizationId, conn: &Db...
    method count_by_org (line 208) | pub async fn count_by_org(org_uuid: &OrganizationId, conn: &DbConn) ->...
    method find_by_uuid_and_org (line 219) | pub async fn find_by_uuid_and_org(uuid: &GroupId, org_uuid: &Organizat...
    method find_by_external_id_and_org (line 229) | pub async fn find_by_external_id_and_org(
    method get_orgs_by_user_with_full_access (line 243) | pub async fn get_orgs_by_user_with_full_access(user_uuid: &UserId, con...
    method is_in_full_access_group (line 261) | pub async fn is_in_full_access_group(user_uuid: &UserId, org_uuid: &Or...
    method delete (line 279) | pub async fn delete(&self, conn: &DbConn) -> EmptyResult {
    method update_revision (line 290) | pub async fn update_revision(uuid: &GroupId, conn: &DbConn) {
    method _update_revision (line 296) | async fn _update_revision(uuid: &GroupId, date: &NaiveDateTime, conn: ...
  type CollectionGroup (line 29) | pub struct CollectionGroup {
    method new (line 118) | pub fn new(
    method to_json_details_for_group (line 134) | pub fn to_json_details_for_group(&self) -> Value {
    method save (line 309) | pub async fn save(&mut self, conn: &DbConn) -> EmptyResult {
    method find_by_group (line 368) | pub async fn find_by_group(group_uuid: &GroupId, conn: &DbConn) -> Vec...
    method find_by_user (line 377) | pub async fn find_by_user(user_uuid: &UserId, conn: &DbConn) -> Vec<Se...
    method find_by_collection (line 393) | pub async fn find_by_collection(collection_uuid: &CollectionId, conn: ...
    method delete (line 403) | pub async fn delete(&self, conn: &DbConn) -> EmptyResult {
    method delete_all_by_group (line 418) | pub async fn delete_all_by_group(group_uuid: &GroupId, conn: &DbConn) ...
    method delete_all_by_collection (line 432) | pub async fn delete_all_by_collection(collection_uuid: &CollectionId, ...
  type GroupUser (line 40) | pub struct GroupUser {
    method new (line 149) | pub fn new(groups_uuid: GroupId, users_organizations_uuid: MembershipI...
    method save (line 451) | pub async fn save(&mut self, conn: &DbConn) -> EmptyResult {
    method find_by_group (line 497) | pub async fn find_by_group(group_uuid: &GroupId, conn: &DbConn) -> Vec...
    method find_by_member (line 506) | pub async fn find_by_member(member_uuid: &MembershipId, conn: &DbConn)...
    method has_access_to_collection_by_member (line 515) | pub async fn has_access_to_collection_by_member(
    method has_full_access_by_member (line 533) | pub async fn has_full_access_by_member(
    method update_user_revision (line 552) | pub async fn update_user_revision(&self, conn: &DbConn) {
    method delete_by_group_and_member (line 559) | pub async fn delete_by_group_and_member(
    method delete_all_by_group (line 578) | pub async fn delete_all_by_group(group_uuid: &GroupId, conn: &DbConn) ...
    method delete_all_by_member (line 592) | pub async fn delete_all_by_member(member_uuid: &MembershipId, conn: &D...
  type GroupId (line 623) | pub struct GroupId(String);

FILE: src/db/models/org_policy.rs
  type OrgPolicy (line 18) | pub struct OrgPolicy {
    method new (line 69) | pub fn new(org_uuid: OrganizationId, atype: OrgPolicyType, enabled: bo...
    method has_type (line 79) | pub fn has_type(&self, policy_type: OrgPolicyType) -> bool {
    method to_json (line 83) | pub fn to_json(&self) -> Value {
    method save (line 108) | pub async fn save(&self, conn: &DbConn) -> EmptyResult {
    method delete (line 150) | pub async fn delete(self, conn: &DbConn) -> EmptyResult {
    method find_by_org (line 158) | pub async fn find_by_org(org_uuid: &OrganizationId, conn: &DbConn) -> ...
    method find_confirmed_by_user (line 167) | pub async fn find_confirmed_by_user(user_uuid: &UserId, conn: &DbConn)...
    method find_by_org_and_type (line 184) | pub async fn find_by_org_and_type(
    method delete_all_by_organization (line 198) | pub async fn delete_all_by_organization(org_uuid: &OrganizationId, con...
    method find_accepted_and_confirmed_by_user_and_active_policy (line 206) | pub async fn find_accepted_and_confirmed_by_user_and_active_policy(
    method find_confirmed_by_user_and_active_policy (line 232) | pub async fn find_confirmed_by_user_and_active_policy(
    method is_applicable_to_user (line 258) | pub async fn is_applicable_to_user(
    method check_user_allowed (line 281) | pub async fn check_user_allowed(m: &Membership, action: &str, conn: &D...
    method org_is_reset_password_auto_enroll (line 315) | pub async fn org_is_reset_password_auto_enroll(org_uuid: &Organization...
    method is_hide_email_disabled (line 331) | pub async fn is_hide_email_disabled(user_uuid: &UserId, conn: &DbConn)...
    method is_enabled_for_member (line 351) | pub async fn is_enabled_for_member(member_uuid: &MembershipId, policy_...
  type OrgPolicyType (line 28) | pub enum OrgPolicyType {
  type SendOptionsPolicyData (line 54) | pub struct SendOptionsPolicyData {
  type ResetPasswordDataModel (line 62) | pub struct ResetPasswordDataModel {
  type OrgPolicyId (line 362) | pub struct OrgPolicyId(String);

FILE: src/db/models/organization.rs
  type Organization (line 26) | pub struct Organization {
    method new (line 175) | pub fn new(name: String, billing_email: &str, private_key: Option<Stri...
    method to_json (line 186) | pub fn to_json(&self) -> Value {
    method save (line 331) | pub async fn save(&self, conn: &DbConn) -> EmptyResult {
    method delete (line 371) | pub async fn delete(self, conn: &DbConn) -> EmptyResult {
    method find_by_uuid (line 388) | pub async fn find_by_uuid(uuid: &OrganizationId, conn: &DbConn) -> Opt...
    method find_by_name (line 397) | pub async fn find_by_name(name: &str, conn: &DbConn) -> Option<Self> {
    method get_all (line 406) | pub async fn get_all(conn: &DbConn) -> Vec<Self> {
    method find_main_org_user_email (line 414) | pub async fn find_main_org_user_email(user_email: &str, conn: &DbConn)...
    method find_org_user_email (line 430) | pub async fn find_org_user_email(user_email: &str, conn: &DbConn) -> V...
  type Membership (line 38) | pub struct Membership {
    method new (line 243) | pub fn new(user_uuid: UserId, org_uuid: OrganizationId, invited_by_ema...
    method restore (line 260) | pub fn restore(&mut self) -> bool {
    method revoke (line 268) | pub fn revoke(&mut self) -> bool {
    method get_unrevoked_status (line 277) | pub fn get_unrevoked_status(&self) -> i32 {
    method set_external_id (line 284) | pub fn set_external_id(&mut self, external_id: Option<String>) -> bool {
    method type_manager_as_custom (line 299) | pub fn type_manager_as_custom(&self) -> i32 {
    method to_json (line 448) | pub async fn to_json(&self, conn: &DbConn) -> Value {
    method to_json_user_details (line 539) | pub async fn to_json_user_details(&self, include_collections: bool, in...
    method to_json_user_access_restrictions (line 672) | pub fn to_json_user_access_restrictions(&self, col_user: &CollectionUs...
    method to_json_details (line 681) | pub async fn to_json_details(&self, conn: &DbConn) -> Value {
    method to_json_mini_details (line 721) | pub async fn to_json_mini_details(&self, conn: &DbConn) -> Value {
    method save (line 743) | pub async fn save(&self, conn: &DbConn) -> EmptyResult {
    method delete (line 776) | pub async fn delete(self, conn: &DbConn) -> EmptyResult {
    method delete_all_by_organization (line 789) | pub async fn delete_all_by_organization(org_uuid: &OrganizationId, con...
    method delete_all_by_user (line 796) | pub async fn delete_all_by_user(user_uuid: &UserId, conn: &DbConn) -> ...
    method find_by_email_and_org (line 803) | pub async fn find_by_email_and_org(email: &str, org_uuid: &Organizatio...
    method has_status (line 813) | pub fn has_status(&self, status: MembershipStatus) -> bool {
    method has_type (line 817) | pub fn has_type(&self, user_type: MembershipType) -> bool {
    method has_full_access (line 821) | pub fn has_full_access(&self) -> bool {
    method find_by_uuid (line 825) | pub async fn find_by_uuid(uuid: &MembershipId, conn: &DbConn) -> Optio...
    method find_by_uuid_and_org (line 834) | pub async fn find_by_uuid_and_org(uuid: &MembershipId, org_uuid: &Orga...
    method find_confirmed_by_user (line 844) | pub async fn find_confirmed_by_user(user_uuid: &UserId, conn: &DbConn)...
    method find_invited_by_user (line 854) | pub async fn find_invited_by_user(user_uuid: &UserId, conn: &DbConn) -...
    method accept_user_invitations (line 866) | pub async fn accept_user_invitations(user_uuid: &UserId, conn: &DbConn...
    method find_any_state_by_user (line 877) | pub async fn find_any_state_by_user(user_uuid: &UserId, conn: &DbConn)...
    method count_accepted_and_confirmed_by_user (line 886) | pub async fn count_accepted_and_confirmed_by_user(
    method find_by_org (line 902) | pub async fn find_by_org(org_uuid: &OrganizationId, conn: &DbConn) -> ...
    method find_confirmed_by_org (line 911) | pub async fn find_confirmed_by_org(org_uuid: &OrganizationId, conn: &D...
    method find_confirmed_and_manage_all_by_org (line 922) | pub async fn find_confirmed_and_manage_all_by_org(org_uuid: &Organizat...
    method count_by_org (line 936) | pub async fn count_by_org(org_uuid: &OrganizationId, conn: &DbConn) ->...
    method find_by_org_and_type (line 947) | pub async fn find_by_org_and_type(org_uuid: &OrganizationId, atype: Me...
    method count_confirmed_by_org_and_type (line 957) | pub async fn count_confirmed_by_org_and_type(
    method find_by_user_and_org (line 973) | pub async fn find_by_user_and_org(user_uuid: &UserId, org_uuid: &Organ...
    method find_confirmed_by_user_and_org (line 983) | pub async fn find_confirmed_by_user_and_org(
    method find_by_user (line 1000) | pub async fn find_by_user(user_uuid: &UserId, conn: &DbConn) -> Vec<Se...
    method get_orgs_by_user (line 1009) | pub async fn get_orgs_by_user(user_uuid: &UserId, conn: &DbConn) -> Ve...
    method find_by_user_and_policy (line 1019) | pub async fn find_by_user_and_policy(user_uuid: &UserId, policy_type: ...
    method find_by_cipher_and_org (line 1038) | pub async fn find_by_cipher_and_org(cipher_uuid: &CipherId, org_uuid: ...
    method find_by_cipher_and_org_with_group (line 1062) | pub async fn find_by_cipher_and_org_with_group(
    method user_has_ge_admin_access_to_cipher (line 1093) | pub async fn user_has_ge_admin_access_to_cipher(user_uuid: &UserId, ci...
    method find_by_collection_and_org (line 1106) | pub async fn find_by_collection_and_org(
    method find_by_external_id_and_org (line 1128) | pub async fn find_by_external_id_and_org(ext_id: &str, org_uuid: &Orga...
    method find_main_user_org (line 1140) | pub async fn find_main_user_org(user_uuid: &str, conn: &DbConn) -> Opt...
  type OrganizationApiKey (line 56) | pub struct OrganizationApiKey {
    method new (line 308) | pub fn new(org_uuid: OrganizationId, api_key: String) -> Self {
    method check_valid_api_key (line 319) | pub fn check_valid_api_key(&self, api_key: &str) -> bool {
    method save (line 1153) | pub async fn save(&self, conn: &DbConn) -> EmptyResult {
    method find_by_org_uuid (line 1185) | pub async fn find_by_org_uuid(org_uuid: &OrganizationId, conn: &DbConn...
    method delete_all_by_organization (line 1194) | pub async fn delete_all_by_organization(org_uuid: &OrganizationId, con...
  type MembershipStatus (line 66) | pub enum MembershipStatus {
    method from_i32 (line 74) | pub fn from_i32(status: i32) -> Option<Self> {
  type MembershipType (line 87) | pub enum MembershipType {
    method from_str (line 95) | pub fn from_str(s: &str) -> Option<Self> {
    method eq (line 128) | fn eq(&self, other: &i32) -> bool {
    method partial_cmp (line 134) | fn partial_cmp(&self, other: &i32) -> Option<Ordering> {
    method gt (line 141) | fn gt(&self, other: &i32) -> bool {
    method ge (line 145) | fn ge(&self, other: &i32) -> bool {
  method cmp (line 109) | fn cmp(&self, other: &MembershipType) -> Ordering {
  method partial_cmp (line 122) | fn partial_cmp(&self, other: &MembershipType) -> Option<Ordering> {
  function eq (line 151) | fn eq(&self, other: &MembershipType) -> bool {
  function partial_cmp (line 157) | fn partial_cmp(&self, other: &MembershipType) -> Option<Ordering> {
  function lt (line 164) | fn lt(&self, other: &MembershipType) -> bool {
  function le (line 168) | fn le(&self, other: &MembershipType) -> bool {
  constant ACTIVATE_REVOKE_DIFF (line 240) | const ACTIVATE_REVOKE_DIFF: i32 = 128;
  type OrganizationId (line 1221) | pub struct OrganizationId(String);
  type MembershipId (line 1238) | pub struct MembershipId(String);
  type OrgApiKeyId (line 1241) | pub struct OrgApiKeyId(String);
  function partial_cmp_MembershipType (line 1249) | fn partial_cmp_MembershipType() {

FILE: src/db/models/send.rs
  type Send (line 15) | pub struct Send {
    method new (line 50) | pub fn new(atype: i32, name: String, data: String, akey: String, delet...
    method set_password (line 81) | pub fn set_password(&mut self, password: Option<&str>) {
    method check_password (line 97) | pub fn check_password(&self, password: &str) -> bool {
    method creator_identifier (line 106) | pub async fn creator_identifier(&self, conn: &DbConn) -> Option<String> {
    method to_json (line 122) | pub fn to_json(&self) -> Value {
    method to_json_access (line 158) | pub async fn to_json_access(&self, conn: &DbConn) -> Value {
    method save (line 190) | pub async fn save(&mut self, conn: &DbConn) -> EmptyResult {
    method delete (line 224) | pub async fn delete(&self, conn: &DbConn) -> EmptyResult {
    method purge (line 240) | pub async fn purge(conn: &DbConn) {
    method update_users_revision (line 246) | pub async fn update_users_revision(&self, conn: &DbConn) -> Vec<UserId> {
    method delete_all_by_user (line 260) | pub async fn delete_all_by_user(user_uuid: &UserId, conn: &DbConn) -> ...
    method find_by_access_id (line 267) | pub async fn find_by_access_id(access_id: &str, conn: &DbConn) -> Opti...
    method find_by_uuid (line 283) | pub async fn find_by_uuid(uuid: &SendId, conn: &DbConn) -> Option<Self> {
    method find_by_uuid_and_user (line 292) | pub async fn find_by_uuid_and_user(uuid: &SendId, user_uuid: &UserId, ...
    method find_by_user (line 302) | pub async fn find_by_user(user_uuid: &UserId, conn: &DbConn) -> Vec<Se...
    method size_by_user (line 311) | pub async fn size_by_user(user_uuid: &UserId, conn: &DbConn) -> Option...
    method find_by_org (line 334) | pub async fn find_by_org(org_uuid: &OrganizationId, conn: &DbConn) -> ...
    method find_by_past_deletion_date (line 343) | pub async fn find_by_past_deletion_date(conn: &DbConn) -> Vec<Self> {
  type SendType (line 44) | pub enum SendType {
  type SendId (line 377) | pub struct SendId(String);
    method as_ref (line 381) | fn as_ref(&self) -> &Path {
  type SendFileId (line 389) | pub struct SendFileId(String);
    method as_ref (line 393) | fn as_ref(&self) -> &Path {

FILE: src/db/models/sso_auth.rs
  type OIDCCodeWrapper (line 18) | pub enum OIDCCodeWrapper {
  type OIDCAuthenticatedUser (line 32) | pub struct OIDCAuthenticatedUser {
  type SsoAuth (line 48) | pub struct SsoAuth {
    method new (line 61) | pub fn new(state: OIDCState, client_challenge: OIDCCodeChallenge, nonc...
    method save (line 79) | pub async fn save(&self, conn: &DbConn) -> EmptyResult {
    method find (line 102) | pub async fn find(state: &OIDCState, conn: &DbConn) -> Option<Self> {
    method delete (line 113) | pub async fn delete(self, conn: &DbConn) -> EmptyResult {
    method delete_expired (line 121) | pub async fn delete_expired(pool: DbPool) -> EmptyResult {

FILE: src/db/models/two_factor.rs
  type TwoFactor (line 14) | pub struct TwoFactor {
    method new (line 49) | pub fn new(user_uuid: UserId, atype: TwoFactorType, data: String) -> S...
    method to_json (line 60) | pub fn to_json(&self) -> Value {
    method to_json_provider (line 68) | pub fn to_json_provider(&self) -> Value {
    method save (line 79) | pub async fn save(&self, conn: &DbConn) -> EmptyResult {
    method delete (line 117) | pub async fn delete(self, conn: &DbConn) -> EmptyResult {
    method find_by_user (line 125) | pub async fn find_by_user(user_uuid: &UserId, conn: &DbConn) -> Vec<Se...
    method find_by_user_and_type (line 135) | pub async fn find_by_user_and_type(user_uuid: &UserId, atype: i32, con...
    method delete_all_by_user (line 145) | pub async fn delete_all_by_user(user_uuid: &UserId, conn: &DbConn) -> ...
    method migrate_u2f_to_webauthn (line 153) | pub async fn migrate_u2f_to_webauthn(conn: &DbConn) -> EmptyResult {
    method migrate_credential_to_passkey (line 230) | pub async fn migrate_credential_to_passkey(conn: &DbConn) -> EmptyResu...
  type TwoFactorType (line 25) | pub enum TwoFactorType {
  type TwoFactorId (line 257) | pub struct TwoFactorId(String);
  type WebauthnRegistrationV3 (line 260) | pub struct WebauthnRegistrationV3 {
  method from (line 268) | fn from(value: WebauthnRegistrationV3) -> Self {

FILE: src/db/models/two_factor_duo_context.rs
  type TwoFactorDuoContext (line 10) | pub struct TwoFactorDuoContext {
    method find_by_state (line 18) | pub async fn find_by_state(state: &str, conn: &DbConn) -> Option<Self> {
    method save (line 27) | pub async fn save(state: &str, user_email: &str, nonce: &str, ttl: i64...
    method find_expired (line 49) | pub async fn find_expired(conn: &DbConn) -> Vec<Self> {
    method delete (line 59) | pub async fn delete(&self, conn: &DbConn) -> EmptyResult {
    method purge_expired_duo_contexts (line 69) | pub async fn purge_expired_duo_contexts(conn: &DbConn) {

FILE: src/db/models/two_factor_incomplete.rs
  type TwoFactorIncomplete (line 19) | pub struct TwoFactorIncomplete {
    method mark_incomplete (line 32) | pub async fn mark_incomplete(
    method mark_complete (line 67) | pub async fn mark_complete(user_uuid: &UserId, device_uuid: &DeviceId,...
    method find_by_user_and_device (line 75) | pub async fn find_by_user_and_device(user_uuid: &UserId, device_uuid: ...
    method find_logins_before (line 85) | pub async fn find_logins_before(dt: &NaiveDateTime, conn: &DbConn) -> ...
    method delete (line 94) | pub async fn delete(self, conn: &DbConn) -> EmptyResult {
    method delete_by_user_and_device (line 98) | pub async fn delete_by_user_and_device(user_uuid: &UserId, device_uuid...
    method delete_all_by_user (line 108) | pub async fn delete_all_by_user(user_uuid: &UserId, conn: &DbConn) -> ...

FILE: src/db/models/user.rs
  type User (line 25) | pub struct User {
    constant CLIENT_KDF_TYPE_DEFAULT (line 105) | pub const CLIENT_KDF_TYPE_DEFAULT: i32 = UserKdfType::Pbkdf2 as i32;
    constant CLIENT_KDF_ITER_DEFAULT (line 106) | pub const CLIENT_KDF_ITER_DEFAULT: i32 = 600_000;
    method new (line 108) | pub fn new(email: &str, name: Option<String>) -> Self {
    method check_valid_password (line 156) | pub fn check_valid_password(&self, password: &str) -> bool {
    method check_valid_recovery_code (line 165) | pub fn check_valid_recovery_code(&self, recovery_code: &str) -> bool {
    method check_valid_api_key (line 173) | pub fn check_valid_api_key(&self, key: &str) -> bool {
    method set_password (line 188) | pub fn set_password(
    method reset_security_stamp (line 210) | pub fn reset_security_stamp(&mut self) {
    method set_stamp_exception (line 221) | pub fn set_stamp_exception(&mut self, route_exception: Vec<String>) {
    method reset_stamp_exception (line 231) | pub fn reset_stamp_exception(&mut self) {
    method display_name (line 235) | pub fn display_name(&self) -> &str {
    method to_json (line 247) | pub async fn to_json(&self, conn: &DbConn) -> Value {
    method save (line 286) | pub async fn save(&mut self, conn: &DbConn) -> EmptyResult {
    method delete (line 315) | pub async fn delete(self, conn: &DbConn) -> EmptyResult {
    method update_uuid_revision (line 343) | pub async fn update_uuid_revision(uuid: &UserId, conn: &DbConn) {
    method update_all_revisions (line 349) | pub async fn update_all_revisions(conn: &DbConn) -> EmptyResult {
    method update_revision (line 362) | pub async fn update_revision(&mut self, conn: &DbConn) -> EmptyResult {
    method _update_revision (line 368) | async fn _update_revision(uuid: &UserId, date: &NaiveDateTime, conn: &...
    method find_by_mail (line 379) | pub async fn find_by_mail(mail: &str, conn: &DbConn) -> Option<Self> {
    method find_by_uuid (line 389) | pub async fn find_by_uuid(uuid: &UserId, conn: &DbConn) -> Option<Self> {
    method find_by_device_for_email2fa (line 398) | pub async fn find_by_device_for_email2fa(device_uuid: &DeviceId, conn:...
    method get_all (line 412) | pub async fn get_all(conn: &DbConn) -> Vec<(Self, Option<SsoUser>)> {
    method last_active (line 424) | pub async fn last_active(&self, conn: &DbConn) -> Option<NaiveDateTime> {
  type Invitation (line 73) | pub struct Invitation {
    method new (line 433) | pub fn new(email: &str) -> Self {
    method save (line 440) | pub async fn save(&self, conn: &DbConn) -> EmptyResult {
    method delete (line 465) | pub async fn delete(self, conn: &DbConn) -> EmptyResult {
    method find_by_mail (line 473) | pub async fn find_by_mail(mail: &str, conn: &DbConn) -> Option<Self> {
    method take (line 483) | pub async fn take(mail: &str, conn: &DbConn) -> bool {
  type SsoUser (line 80) | pub struct SsoUser {
    method save (line 512) | pub async fn save(&self, conn: &DbConn) -> EmptyResult {
    method find_by_identifier (line 529) | pub async fn find_by_identifier(identifier: &str, conn: &DbConn) -> Op...
    method find_by_mail (line 540) | pub async fn find_by_mail(mail: &str, conn: &DbConn) -> Option<(User, ...
    method delete (line 553) | pub async fn delete(user_uuid: &UserId, conn: &DbConn) -> EmptyResult {
  type UserKdfType (line 85) | pub enum UserKdfType {
  type UserStatus (line 90) | enum UserStatus {
  type UserStampException (line 97) | pub struct UserStampException {
  type UserId (line 509) | pub struct UserId(String);

FILE: src/db/query_logger.rs
  function simple_logger (line 8) | pub fn simple_logger() -> Option<Box<dyn Instrumentation>> {

FILE: src/error.rs
  type Empty (line 63) | pub struct Empty {}
  type Compact (line 65) | pub struct Compact {}
  method fmt (line 110) | fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
  method new (line 130) | pub fn new<M: Into<String>, N: Into<String>>(usr_msg: M, log_msg: N) -> ...
  method new_msg (line 134) | pub fn new_msg<M: Into<String> + Clone>(usr_msg: M) -> Self {
  method empty (line 138) | pub fn empty() -> Self {
  method with_msg (line 143) | pub fn with_msg<M: Into<String>>(mut self, msg: M) -> Self {
  method with_kind (line 149) | pub fn with_kind(mut self, kind: ErrorKind) -> Self {
  method with_code (line 155) | pub const fn with_code(mut self, code: u16) -> Self {
  method with_event (line 161) | pub fn with_event(mut self, event: ErrorEvent) -> Self {
  method get_event (line 166) | pub fn get_event(&self) -> &Option<ErrorEvent> {
  method message (line 170) | pub fn message(&self) -> &str {
  type MapResult (line 175) | pub trait MapResult<S> {
    method map_res (line 176) | fn map_res(self, msg: &str) -> Result<S, Error>;
  function map_res (line 180) | fn map_res(self, msg: &str) -> Result<S, Error> {
  function map_res (line 186) | fn map_res(self, msg: &str) -> Result<(), Error> {
  function map_res (line 192) | fn map_res(self, msg: &str) -> Result<S, Error> {
  function _has_source (line 197) | const fn _has_source<T>(e: T) -> Option<T> {
  function _no_source (line 200) | fn _no_source<T, S>(_: T) -> Option<S> {
  function _serialize (line 204) | fn _serialize(e: &impl Serialize, _msg: &str) -> String {
  method serialize (line 213) | fn serialize<S>(&self, serializer: S) -> Result<S::Ok, S::Error>
  method serialize (line 253) | fn serialize<S>(&self, serializer: S) -> Result<S::Ok, S::Error>
  type ApiErrorMsg (line 273) | struct ApiErrorMsg<'a> {
  type ApiErrorResponse (line 278) | struct ApiErrorResponse<'a>(ApiErrorMsg<'a>);
  type CompactApiErrorResponse (line 281) | struct CompactApiErrorResponse<'a>(ApiErrorMsg<'a>);
  function _api_error (line 283) | fn _api_error(_: &impl std::any::Any, msg: &str) -> String {
  function _compact_api_error (line 290) | fn _compact_api_error(_: &impl std::any::Any, msg: &str) -> String {
  method respond_to (line 307) | fn respond_to(self, _: &Request<'_>) -> response::Result<'static> {

FILE: src/http_client.rs
  function make_http_request (line 19) | pub fn make_http_request(method: reqwest::Method, url: &str) -> Result<r...
  function get_reqwest_client_builder (line 35) | pub fn get_reqwest_client_builder() -> ClientBuilder {
  function should_block_address (line 62) | pub fn should_block_address(domain_or_ip: &str) -> bool {
  function should_block_ip (line 72) | fn should_block_ip(ip: IpAddr) -> bool {
  function should_block_address_regex (line 80) | fn should_block_address_regex(domain_or_ip: &str) -> bool {
  function should_block_host (line 103) | fn should_block_host(host: &Host<&str>) -> Result<(), CustomHttpClientEr...
  type CustomHttpClientError (line 129) | pub enum CustomHttpClientError {
    method downcast_ref (line 140) | pub fn downcast_ref(e: &dyn std::error::Error) -> Option<&Self> {
    method fmt (line 154) | fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
  type CustomDnsResolver (line 174) | enum CustomDnsResolver {
    method instance (line 181) | fn instance() -> Arc<Self> {
    method new (line 186) | fn new() -> Arc<Self> {
    method resolve_domain (line 203) | async fn resolve_domain(&self, name: &str) -> Result<Option<SocketAddr...
  type BoxError (line 178) | type BoxError = Box<dyn std::error::Error + Send + Sync>;
  function pre_resolve (line 219) | fn pre_resolve(name: &str) -> Result<(), CustomHttpClientError> {
  function post_resolve (line 229) | fn post_resolve(name: &str, ip: IpAddr) -> Result<(), CustomHttpClientEr...
  method resolve (line 241) | fn resolve(&self, name: Name) -> Resolving {
  type AwsReqwestConnector (line 263) | pub(crate) struct AwsReqwestConnector {
  method call (line 268) | fn call(&self, request: aws_smithy_runtime_api::client::orchestrator::Ht...
  method http_connector (line 297) | fn http_connector(

FILE: src/mail.rs
  function sendmail_transport (line 24) | fn sendmail_transport() -> AsyncSendmailTransport<Tokio1Executor> {
  function smtp_transport (line 32) | fn smtp_transport() -> AsyncSmtpTransport<Tokio1Executor> {
  function sanitize_data (line 99) | fn sanitize_data(data: &mut serde_json::Value) {
  function get_text (line 120) | fn get_text(template_name: &'static str, data: serde_json::Value) -> Res...
  function get_template (line 128) | fn get_template(template_name: &str, data: &serde_json::Value) -> Result...
  function send_password_hint (line 149) | pub async fn send_password_hint(address: &str, hint: Option<String>) -> ...
  function send_delete_account (line 168) | pub async fn send_delete_account(address: &str, user_id: &UserId) -> Emp...
  function send_verify_email (line 186) | pub async fn send_verify_email(address: &str, user_id: &UserId) -> Empty...
  function send_register_verify_email (line 204) | pub async fn send_register_verify_email(email: &str, token: &str) -> Emp...
  function send_welcome (line 225) | pub async fn send_welcome(address: &str) -> EmptyResult {
  function send_welcome_must_verify (line 237) | pub async fn send_welcome_must_verify(address: &str, user_id: &UserId) -...
  function send_2fa_removed_from_org (line 254) | pub async fn send_2fa_removed_from_org(address: &str, org_name: &str) ->...
  function send_single_org_removed_from_org (line 267) | pub async fn send_single_org_removed_from_org(address: &str, org_name: &...
  function send_invite (line 280) | pub async fn send_invite(
  function send_emergency_access_invite (line 330) | pub async fn send_emergency_access_invite(
  function send_emergency_access_invite_accepted (line 373) | pub async fn send_emergency_access_invite_accepted(address: &str, grante...
  function send_emergency_access_invite_confirmed (line 386) | pub async fn send_emergency_access_invite_confirmed(address: &str, grant...
  function send_emergency_access_recovery_approved (line 399) | pub async fn send_emergency_access_recovery_approved(address: &str, gran...
  function send_emergency_access_recovery_initiated (line 412) | pub async fn send_emergency_access_recovery_initiated(
  function send_emergency_access_recovery_reminder (line 432) | pub async fn send_emergency_access_recovery_reminder(
  function send_emergency_access_recovery_rejected (line 452) | pub async fn send_emergency_access_recovery_rejected(address: &str, gran...
  function send_emergency_access_recovery_timed_out (line 465) | pub async fn send_emergency_access_recovery_timed_out(address: &str, gra...
  function send_invite_accepted (line 479) | pub async fn send_invite_accepted(new_user_email: &str, address: &str, o...
  function send_invite_confirmed (line 493) | pub async fn send_invite_confirmed(address: &str, org_name: &str) -> Emp...
  function send_new_device_logged_in (line 506) | pub async fn send_new_device_logged_in(address: &str, ip: &str, dt: &Nai...
  function send_incomplete_2fa_login (line 525) | pub async fn send_incomplete_2fa_login(
  function send_token (line 551) | pub async fn send_token(address: &str, token: &str) -> EmptyResult {
  function send_change_email (line 564) | pub async fn send_change_email(address: &str, token: &str) -> EmptyResult {
  function send_change_email_existing (line 577) | pub async fn send_change_email_existing(address: &str, acting_address: &...
  function send_change_email_invited (line 591) | pub async fn send_change_email_invited(address: &str, acting_address: &s...
  function send_sso_change_email (line 605) | pub async fn send_sso_change_email(address: &str) -> EmptyResult {
  function send_test (line 617) | pub async fn send_test(address: &str) -> EmptyResult {
  function send_admin_reset_password (line 629) | pub async fn send_admin_reset_password(address: &str, user_name: &str, o...
  function send_protected_action_token (line 642) | pub async fn send_protected_action_token(address: &str, token: &str) -> ...
  function send_with_selected_transport (line 655) | async fn send_with_selected_transport(email: Message) -> EmptyResult {
  function send_email (line 707) | async fn send_email(address: &str, subject: &str, body_html: String, bod...

FILE: src/main.rs
  function main (line 72) | async fn main() -> Result<(), Error> {
  constant HELP (line 96) | const HELP: &str = "\
  constant VERSION (line 117) | pub const VERSION: Option<&str> = option_env!("VW_VERSION");
  function parse_args (line 119) | fn parse_args() {
  function launch_info (line 205) | fn launch_info() {
  function init_logging (line 230) | fn init_logging() -> Result<log::LevelFilter, Error> {
  function chain_syslog (line 423) | fn chain_syslog(logger: fern::Dispatch) -> fern::Dispatch {
  function create_dir (line 440) | fn create_dir(path: &str, description: &str) {
  function check_data_folder (line 446) | async fn check_data_folder() {
  function container_data_folder_is_persistent (line 502) | async fn container_data_folder_is_persistent(data_folder: &str) -> bool {
  function check_web_vault (line 529) | fn check_web_vault() {
  function create_db_pool (line 547) | async fn create_db_pool() -> db::DbPool {
  function launch_rocket (line 557) | async fn launch_rocket(pool: db::DbPool, extra_debug: bool) -> Result<()...
  function schedule_jobs (line 624) | fn schedule_jobs(pool: db::DbPool) {

FILE: src/ratelimit.rs
  type Limiter (line 7) | type Limiter<T = IpAddr> = RateLimiter<T, DashMapStateStore<T>, DefaultC...
  function check_limit_login (line 21) | pub fn check_limit_login(ip: &IpAddr) -> Result<(), Error> {
  function check_limit_admin (line 30) | pub fn check_limit_admin(ip: &IpAddr) -> Result<(), Error> {

FILE: src/sso.rs
  type OIDCCode (line 45) | pub struct OIDCCode(String);
  type OIDCCodeChallenge (line 66) | pub struct OIDCCodeChallenge(String);
  type OIDCCodeVerifier (line 86) | pub struct OIDCCodeVerifier(String);
  type OIDCState (line 106) | pub struct OIDCState(String);
  type SsoTokenJwtClaims (line 109) | struct SsoTokenJwtClaims {
  function encode_ssotoken_claims (line 120) | pub fn encode_ssotoken_claims() -> String {
  type BasicTokenClaims (line 133) | struct BasicTokenClaims {
    method nbf (line 146) | fn nbf(&self) -> i64 {
  type BasicTokenClaimsValidation (line 140) | struct BasicTokenClaimsValidation {
  function decode_token_claims (line 151) | fn decode_token_claims(token_name: &str, token: &str) -> ApiResult<Basic...
  function decode_state (line 173) | pub fn decode_state(base64_state: &str) -> ApiResult<OIDCState> {
  function authorize_url (line 186) | pub async fn authorize_url(
  type OIDCIdentifier (line 229) | pub struct OIDCIdentifier(String);
    method new (line 232) | fn new(issuer: &str, subject: &str) -> Self {
  function exchange_code (line 241) | pub async fn exchange_code(
  function redeem (line 314) | pub async fn redeem(
  function create_auth_tokens (line 354) | pub fn create_auth_tokens(
  function _create_auth_tokens (line 380) | fn _create_auth_tokens(
  function exchange_refresh_token (line 422) | pub async fn exchange_refresh_token(

FILE: src/sso_client.rs
  type CustomClient (line 25) | pub type CustomClient = openidconnect::Client<
  type RefreshTokenResponse (line 45) | pub type RefreshTokenResponse = (Option<String>, String, Option<Duration>);
  type Client (line 48) | pub struct Client {
    method _get_client (line 55) | async fn _get_client() -> ApiResult<Self> {
    method cached (line 95) | pub async fn cached() -> ApiResult<Self> {
    method invalidate (line 109) | pub fn invalidate() {
    method authorize_url (line 116) | pub async fn authorize_url(
    method exchange_code (line 145) | pub async fn exchange_code(
    method user_info (line 208) | pub async fn user_info(&self, access_token: AccessToken) -> ApiResult<...
    method check_validity (line 215) | pub async fn check_validity(access_token: String) -> EmptyResult {
    method vw_id_token_verifier (line 225) | pub fn vw_id_token_verifier(&self) -> CoreIdTokenVerifier<'_> {
    method exchange_refresh_token (line 240) | pub async fn exchange_refresh_token(refresh_token: String) -> ApiResul...
    method _exchange_refresh_token (line 249) | async fn _exchange_refresh_token(&self, refresh_token: String) -> Resu...
  type AuthorizationRequestExt (line 266) | trait AuthorizationRequestExt<'a> {
    method add_extra_params (line 267) | fn add_extra_params<N: Into<Cow<'a, str>>, V: Into<Cow<'a, str>>>(self...
  function add_extra_params (line 273) | fn add_extra_params<N: Into<Cow<'a, str>>, V: Into<Cow<'a, str>>>(mut se...

FILE: src/static/scripts/admin.js
  function getBaseUrl (line 5) | function getBaseUrl() {
  constant BASE_URL (line 18) | const BASE_URL = getBaseUrl();
  function reload (line 20) | function reload() {
  function msg (line 26) | function msg(text, reload_page = true) {
  function _fetch (line 31) | function _fetch(method, url, successMsg, errMsg, body, reload_page = tru...
  function _post (line 68) | function _post(url, successMsg, errMsg, body, reload_page = true) {
  function _delete (line 72) | function _delete(url, successMsg, errMsg, body, reload_page = true) {

FILE: src/static/scripts/admin_diagnostics.js
  function isValidIp (line 26) | function isValidIp(ip) {
  function checkVersions (line 32) | function checkVersions(platform, installed, latest, commit=null, compare...
  function generateSupportString (line 79) | async function generateSupportString(event, dj) {
  function copyToClipboard (line 150) | function copyToClipboard(event) {
  function checkTimeDrift (line 170) | function checkTimeDrift(utcTimeA, utcTimeB, statusPrefix) {
  function checkDomain (line 184) | function checkDomain(browserURL, serverURL) {
  function initVersionCheck (line 201) | function initVersionCheck(dj) {
  function checkDns (line 216) | function checkDns(dns_resolved) {
  function fetchCheckUrl (line 225) | async function fetchCheckUrl(url) {
  function checkSecurityHeaders (line 235) | function checkSecurityHeaders(headers, omit) {
  function checkHttpResponse (line 281) | async function checkHttpResponse() {
  function fetchWsUrl (line 355) | async function fetchWsUrl(wsUrl) {
  function checkWebsocketConnection (line 373) | async function checkWebsocketConnection() {
  function init (line 384) | function init(dj) {

FILE: src/static/scripts/admin_organizations.js
  function deleteOrganization (line 5) | function deleteOrganization(event) {
  function initActions (line 33) | function initActions() {

FILE: src/static/scripts/admin_settings.js
  function smtpTest (line 5) | function smtpTest(event) {
  function getFormData (line 29) | function getFormData() {
  function saveConfig (line 46) | function saveConfig(event) {
  function deleteConf (line 56) | function deleteConf(event) {
  function backupDatabase (line 73) | function backupDatabase(event) {
  function initChangeDetection (line 84) | function initChangeDetection(form) {
  function formHasChanges (line 93) | function formHasChanges(form) {
  function preventFormSubmitOnEnter (line 98) | function preventFormSubmitOnEnter(form) {
  function submitTestEmailOnEnter (line 109) | function submitTestEmailOnEnter() {
  function colorRiskSettings (line 122) | function colorRiskSettings() {
  function toggleVis (line 131) | function toggleVis(event) {
  function masterCheck (line 144) | function masterCheck(check_id, inputs_query) {
  function checkAdminToken (line 163) | function checkAdminToken() {
  function showWarnings (line 191) | function showWarnings() {

FILE: src/static/scripts/admin_users.js
  function deleteUser (line 5) | function deleteUser(event) {
  function deleteSSOUser (line 27) | function deleteSSOUser(event) {
  function remove2fa (line 49) | function remove2fa(event) {
  function deauthUser (line 67) | function deauthUser(event) {
  function disableUser (line 85) | function disableUser(event) {
  function enableUser (line 103) | function enableUser(event) {
  function updateRevisions (line 121) | function updateRevisions(event) {
  function inviteUser (line 130) | function inviteUser(event) {
  function resendUserInvite (line 145) | function resendUserInvite (event) {
  constant ORG_TYPES (line 163) | const ORG_TYPES = {
  function updateUserOrgType (line 238) | function updateUserOrgType(event) {
  function initUserTable (line 251) | function initUserTable() {

FILE: src/static/scripts/bootstrap.bundle.js
  method set (line 25) | set(element, key, instance) {
  method get (line 40) | get(element, key) {
  method remove (line 46) | remove(element, key) {
  function makeEventUid (line 333) | function makeEventUid(element, uid) {
  function getElementEvents (line 336) | function getElementEvents(element) {
  function bootstrapHandler (line 342) | function bootstrapHandler(element, fn) {
  function bootstrapDelegationHandler (line 353) | function bootstrapDelegationHandler(element, selector, fn) {
  function findHandler (line 374) | function findHandler(events, callable, delegationSelector = null) {
  function normalizeParameters (line 377) | function normalizeParameters(originalTypeEvent, handler, delegationFunct...
  function addHandler (line 387) | function addHandler(element, originalTypeEvent, handler, delegationFunct...
  function removeHandler (line 421) | function removeHandler(element, events, typeEvent, handler, delegationSe...
  function removeNamespacedHandlers (line 429) | function removeNamespacedHandlers(element, events, typeEvent, namespace) {
  function getTypeEvent (line 437) | function getTypeEvent(event) {
  method on (line 443) | on(element, event, handler, delegationFunction) {
  method one (line 446) | one(element, event, handler, delegationFunction) {
  method off (line 449) | off(element, originalTypeEvent, handler, delegationFunction) {
  method trigger (line 478) | trigger(element, event, args) {
  function hydrateObj (line 512) | function hydrateObj(obj, meta = {}) {
  function normalizeData (line 535) | function normalizeData(value) {
  function normalizeDataKey (line 557) | function normalizeDataKey(key) {
  method setDataAttribute (line 561) | setDataAttribute(element, key, value) {
  method removeDataAttribute (line 564) | removeDataAttribute(element, key) {
  method getDataAttributes (line 567) | getDataAttributes(element) {
  method getDataAttribute (line 580) | getDataAttribute(element, key) {
  class Config (line 597) | class Config {
    method Default (line 599) | static get Default() {
    method DefaultType (line 602) | static get DefaultType() {
    method NAME (line 605) | static get NAME() {
    method _getConfig (line 608) | _getConfig(config) {
    method _configAfterMerge (line 614) | _configAfterMerge(config) {
    method _mergeConfigObj (line 617) | _mergeConfigObj(config, element) {
    method _typeCheckConfig (line 627) | _typeCheckConfig(config, configTypes = this.constructor.DefaultType) {
  class BaseComponent (line 656) | class BaseComponent extends Config {
    method constructor (line 657) | constructor(element, config) {
    method dispose (line 669) | dispose() {
    method _queueCallback (line 678) | _queueCallback(callback, element, isAnimated = true) {
    method _getConfig (line 681) | _getConfig(config) {
    method getInstance (line 689) | static getInstance(element) {
    method getOrCreateInstance (line 692) | static getOrCreateInstance(element, config = {}) {
    method VERSION (line 695) | static get VERSION() {
    method DATA_KEY (line 698) | static get DATA_KEY() {
    method EVENT_KEY (line 701) | static get EVENT_KEY() {
    method eventName (line 704) | static eventName(name) {
  method find (line 738) | find(selector, element = document.documentElement) {
  method findOne (line 741) | findOne(selector, element = document.documentElement) {
  method children (line 744) | children(element, selector) {
  method parents (line 747) | parents(element, selector) {
  method prev (line 756) | prev(element, selector) {
  method next (line 767) | next(element, selector) {
  method focusableChildren (line 777) | focusableChildren(element) {
  method getSelectorFromElement (line 781) | getSelectorFromElement(element) {
  method getElementFromSelector (line 788) | getElementFromSelector(element) {
  method getMultipleElementsFromSelector (line 792) | getMultipleElementsFromSelector(element) {
  class Alert (line 847) | class Alert extends BaseComponent {
    method NAME (line 849) | static get NAME() {
    method close (line 854) | close() {
    method _destroyElement (line 865) | _destroyElement() {
    method jQueryInterface (line 872) | static jQueryInterface(config) {
  class Button (line 922) | class Button extends BaseComponent {
    method NAME (line 924) | static get NAME() {
    method toggle (line 929) | toggle() {
    method jQueryInterface (line 935) | static jQueryInterface(config) {
  class Swipe (line 1000) | class Swipe extends Config {
    method constructor (line 1001) | constructor(element, config) {
    method Default (line 1014) | static get Default() {
    method DefaultType (line 1017) | static get DefaultType() {
    method NAME (line 1020) | static get NAME() {
    method dispose (line 1025) | dispose() {
    method _start (line 1030) | _start(event) {
    method _end (line 1039) | _end(event) {
    method _move (line 1046) | _move(event) {
    method _handleSwipe (line 1049) | _handleSwipe() {
    method _initEvents (line 1061) | _initEvents() {
    method _eventIsPointerPenTouch (line 1072) | _eventIsPointerPenTouch(event) {
    method isSupported (line 1077) | static isSupported() {
  class Carousel (line 1154) | class Carousel extends BaseComponent {
    method constructor (line 1155) | constructor(element, config) {
    method Default (line 1170) | static get Default() {
    method DefaultType (line 1173) | static get DefaultType() {
    method NAME (line 1176) | static get NAME() {
    method next (line 1181) | next() {
    method nextWhenVisible (line 1184) | nextWhenVisible() {
    method prev (line 1192) | prev() {
    method pause (line 1195) | pause() {
    method cycle (line 1201) | cycle() {
    method _maybeEnableCycle (line 1206) | _maybeEnableCycle() {
    method to (line 1216) | to(index) {
    method dispose (line 1232) | dispose() {
    method _configAfterMerge (line 1240) | _configAfterMerge(config) {
    method _addEventListeners (line 1244) | _addEventListeners() {
    method _addTouchEventListeners (line 1256) | _addTouchEventListeners() {
    method _keydown (line 1286) | _keydown(event) {
    method _getItemIndex (line 1296) | _getItemIndex(element) {
    method _setActiveIndicatorElement (line 1299) | _setActiveIndicatorElement(index) {
    method _updateInterval (line 1312) | _updateInterval() {
    method _slide (line 1320) | _slide(order, element = null) {
    method _isAnimated (line 1371) | _isAnimated() {
    method _getActive (line 1374) | _getActive() {
    method _getItems (line 1377) | _getItems() {
    method _clearInterval (line 1380) | _clearInterval() {
    method _directionToOrder (line 1386) | _directionToOrder(direction) {
    method _orderToDirection (line 1392) | _orderToDirection(order) {
    method jQueryInterface (line 1400) | static jQueryInterface(config) {
  class Collapse (line 1499) | class Collapse extends BaseComponent {
    method constructor (line 1500) | constructor(element, config) {
    method Default (line 1522) | static get Default() {
    method DefaultType (line 1525) | static get DefaultType() {
    method NAME (line 1528) | static get NAME() {
    method toggle (line 1533) | toggle() {
    method show (line 1540) | show() {
    method hide (line 1580) | hide() {
    method _isShown (line 1611) | _isShown(element = this._element) {
    method _configAfterMerge (line 1614) | _configAfterMerge(config) {
    method _getDimension (line 1619) | _getDimension() {
    method _initializeChildren (line 1622) | _initializeChildren() {
    method _getFirstLevelChildren (line 1634) | _getFirstLevelChildren(selector) {
    method _addAriaAndCollapsedClass (line 1639) | _addAriaAndCollapsedClass(triggerArray, isOpen) {
    method jQueryInterface (line 1650) | static jQueryInterface(config) {
  function getNodeName (line 1721) | function getNodeName(element) {
  function getWindow (line 1725) | function getWindow(node) {
  function isElement (line 1738) | function isElement(node) {
  function isHTMLElement (line 1743) | function isHTMLElement(node) {
  function isShadowRoot (line 1748) | function isShadowRoot(node) {
  function applyStyles (line 1760) | function applyStyles(_ref) {
  function effect$2 (line 1787) | function effect$2(_ref2) {
  function getBasePlacement (line 1841) | function getBasePlacement(placement) {
  function getUAString (line 1849) | function getUAString() {
  function isLayoutViewport (line 1861) | function isLayoutViewport() {
  function getBoundingClientRect (line 1865) | function getBoundingClientRect(element, includeScale, isFixedStrategy) {
  function getLayoutRect (line 1905) | function getLayoutRect(element) {
  function contains (line 1928) | function contains(parent, child) {
  function getComputedStyle$1 (line 1951) | function getComputedStyle$1(element) {
  function isTableElement (line 1955) | function isTableElement(element) {
  function getDocumentElement (line 1959) | function getDocumentElement(element) {
  function getParentNode (line 1965) | function getParentNode(element) {
  function getTrueOffsetParent (line 1982) | function getTrueOffsetParent(element) {
  function getContainingBlock (line 1993) | function getContainingBlock(element) {
  function getOffsetParent (line 2029) | function getOffsetParent(element) {
  function getMainAxisFromPlacement (line 2044) | function getMainAxisFromPlacement(placement) {
  function within (line 2048) | function within(min$1, value, max$1) {
  function withinMaxClamp (line 2051) | function withinMaxClamp(min, value, max) {
  function getFreshSideObject (line 2056) | function getFreshSideObject() {
  function mergePaddingObject (line 2065) | function mergePaddingObject(paddingObject) {
  function expandToHashMap (line 2069) | function expandToHashMap(value, keys) {
  function arrow (line 2083) | function arrow(_ref) {
  function effect$1 (line 2120) | function effect$1(_ref2) {
  function getVariation (line 2157) | function getVariation(placement) {
  function roundOffsetsByDPR (line 2170) | function roundOffsetsByDPR(_ref, win) {
  function mapToStyles (line 2180) | function mapToStyles(_ref2) {
  function computeStyles (line 2272) | function computeStyles(_ref5) {
  function effect (line 2326) | function effect(_ref) {
  function getOppositePlacement (line 2376) | function getOppositePlacement(placement) {
  function getOppositeVariationPlacement (line 2386) | function getOppositeVariationPlacement(placement) {
  function getWindowScroll (line 2392) | function getWindowScroll(node) {
  function getWindowScrollBarX (line 2402) | function getWindowScrollBarX(element) {
  function getViewportRect (line 2413) | function getViewportRect(element, strategy) {
  function getDocumentRect (line 2443) | function getDocumentRect(element) {
  function isScrollParent (line 2466) | function isScrollParent(element) {
  function getScrollParent (line 2476) | function getScrollParent(node) {
  function listScrollParents (line 2496) | function listScrollParents(element, list) {
  function rectToClientRect (line 2512) | function rectToClientRect(rect) {
  function getInnerBoundingClientRect (line 2521) | function getInnerBoundingClientRect(element, strategy) {
  function getClientRectFromMixedType (line 2534) | function getClientRectFromMixedType(element, clippingParent, strategy) {
  function getClippingParents (line 2541) | function getClippingParents(element) {
  function getClippingRect (line 2558) | function getClippingRect(element, boundary, rootBoundary, strategy) {
  function computeOffsets (line 2577) | function computeOffsets(_ref) {
  function detectOverflow (line 2642) | function detectOverflow(state, options) {
  function computeAutoPlacement (line 2697) | function computeAutoPlacement(state, options) {
  function getExpandedFallbackPlacements (line 2737) | function getExpandedFallbackPlacements(placement) {
  function flip (line 2746) | function flip(_ref) {
  function getSideOffsets (line 2877) | function getSideOffsets(overflow, rect, preventedOffsets) {
  function isAnySideFullyClipped (line 2893) | function isAnySideFullyClipped(overflow) {
  function hide (line 2899) | function hide(_ref) {
  function distanceAndSkiddingToXY (line 2936) | function distanceAndSkiddingToXY(placement, rects, offset) {
  function offset (line 2957) | function offset(_ref2) {
  function popperOffsets (line 2988) | function popperOffsets(_ref) {
  function getAltAxis (line 3011) | function getAltAxis(axis) {
  function preventOverflow (line 3015) | function preventOverflow(_ref) {
  function getHTMLElementScroll (line 3146) | function getHTMLElementScroll(element) {
  function getNodeScroll (line 3153) | function getNodeScroll(node) {
  function isElementScaled (line 3161) | function isElementScaled(element) {
  function getCompositeRect (line 3170) | function getCompositeRect(elementOrVirtualElement, offsetParent, isFixed) {
  function order (line 3211) | function order(modifiers) {
  function orderModifiers (line 3243) | function orderModifiers(modifiers) {
  function debounce (line 3254) | function debounce(fn) {
  function mergeByName (line 3270) | function mergeByName(modifiers) {
  function areValidElements (line 3291) | function areValidElements() {
  function popperGenerator (line 3301) | function popperGenerator(generatorOptions) {
  class Dropdown (line 3596) | class Dropdown extends BaseComponent {
    method constructor (line 3597) | constructor(element, config) {
    method Default (line 3607) | static get Default() {
    method DefaultType (line 3610) | static get DefaultType() {
    method NAME (line 3613) | static get NAME() {
    method toggle (line 3618) | toggle() {
    method show (line 3621) | show() {
    method hide (line 3649) | hide() {
    method dispose (line 3658) | dispose() {
    method update (line 3664) | update() {
    method _completeHide (line 3672) | _completeHide(relatedTarget) {
    method _getConfig (line 3694) | _getConfig(config) {
    method _createPopper (line 3702) | _createPopper() {
    method _isShown (line 3717) | _isShown() {
    method _getPlacement (line 3720) | _getPlacement() {
    method _detectNavbar (line 3742) | _detectNavbar() {
    method _getOffset (line 3745) | _getOffset() {
    method _getPopperConfig (line 3757) | _getPopperConfig() {
    method _selectMenuItem (line 3786) | _selectMenuItem({
    method jQueryInterface (line 3801) | static jQueryInterface(config) {
    method clearMenus (line 3813) | static clearMenus(event) {
    method dataApiKeydownHandler (line 3842) | static dataApiKeydownHandler(event) {
  class Backdrop (line 3930) | class Backdrop extends Config {
    method constructor (line 3931) | constructor(config) {
    method Default (line 3939) | static get Default() {
    method DefaultType (line 3942) | static get DefaultType() {
    method NAME (line 3945) | static get NAME() {
    method show (line 3950) | show(callback) {
    method hide (line 3965) | hide(callback) {
    method dispose (line 3976) | dispose() {
    method _getElement (line 3986) | _getElement() {
    method _configAfterMerge (line 3997) | _configAfterMerge(config) {
    method _append (line 4002) | _append() {
    method _emulateAnimation (line 4013) | _emulateAnimation(callback) {
  class FocusTrap (line 4051) | class FocusTrap extends Config {
    method constructor (line 4052) | constructor(config) {
    method Default (line 4060) | static get Default() {
    method DefaultType (line 4063) | static get DefaultType() {
    method NAME (line 4066) | static get NAME() {
    method activate (line 4071) | activate() {
    method deactivate (line 4083) | deactivate() {
    method _handleFocusin (line 4092) | _handleFocusin(event) {
    method _handleKeydown (line 4108) | _handleKeydown(event) {
  class ScrollBarHelper (line 4137) | class ScrollBarHelper {
    method constructor (line 4138) | constructor() {
    method getWidth (line 4143) | getWidth() {
    method hide (line 4148) | hide() {
    method reset (line 4157) | reset() {
    method isOverflowing (line 4163) | isOverflowing() {
    method _disableOverFlow (line 4168) | _disableOverFlow() {
    method _setElementAttributes (line 4172) | _setElementAttributes(selector, styleProperty, callback) {
    method _saveInitialAttribute (line 4184) | _saveInitialAttribute(element, styleProperty) {
    method _resetElementAttributes (line 4190) | _resetElementAttributes(selector, styleProperty) {
    method _applyManipulationCallback (line 4203) | _applyManipulationCallback(selector, callBack) {
  class Modal (line 4264) | class Modal extends BaseComponent {
    method constructor (line 4265) | constructor(element, config) {
    method Default (line 4277) | static get Default() {
    method DefaultType (line 4280) | static get DefaultType() {
    method NAME (line 4283) | static get NAME() {
    method toggle (line 4288) | toggle(relatedTarget) {
    method show (line 4291) | show(relatedTarget) {
    method hide (line 4308) | hide() {
    method dispose (line 4322) | dispose() {
    method handleUpdate (line 4329) | handleUpdate() {
    method _initializeBackDrop (line 4334) | _initializeBackDrop() {
    method _initializeFocusTrap (line 4341) | _initializeFocusTrap() {
    method _showElement (line 4346) | _showElement(relatedTarget) {
    method _addEventListeners (line 4373) | _addEventListeners() {
    method _hideModal (line 4405) | _hideModal() {
    method _isAnimated (line 4418) | _isAnimated() {
    method _triggerBackdropTransition (line 4421) | _triggerBackdropTransition() {
    method _adjustDialog (line 4449) | _adjustDialog() {
    method _resetAdjustments (line 4462) | _resetAdjustments() {
    method jQueryInterface (line 4468) | static jQueryInterface(config, relatedTarget) {
  class Offcanvas (line 4566) | class Offcanvas extends BaseComponent {
    method constructor (line 4567) | constructor(element, config) {
    method Default (line 4576) | static get Default() {
    method DefaultType (line 4579) | static get DefaultType() {
    method NAME (line 4582) | static get NAME() {
    method toggle (line 4587) | toggle(relatedTarget) {
    method show (line 4590) | show(relatedTarget) {
    method hide (line 4620) | hide() {
    method dispose (line 4644) | dispose() {
    method _initializeBackDrop (line 4651) | _initializeBackDrop() {
    method _initializeFocusTrap (line 4670) | _initializeFocusTrap() {
    method _addEventListeners (line 4675) | _addEventListeners() {
    method jQueryInterface (line 4689) | static jQueryInterface(config) {
  function sanitizeHtml (line 4818) | function sanitizeHtml(unsafeHtml, allowList, sanitizeFunction) {
  class TemplateFactory (line 4886) | class TemplateFactory extends Config {
    method constructor (line 4887) | constructor(config) {
    method Default (line 4893) | static get Default() {
    method DefaultType (line 4896) | static get DefaultType() {
    method NAME (line 4899) | static get NAME() {
    method getContent (line 4904) | getContent() {
    method hasContent (line 4907) | hasContent() {
    method changeContent (line 4910) | changeContent(content) {
    method toHtml (line 4918) | toHtml() {
    method _typeCheckConfig (line 4933) | _typeCheckConfig(config) {
    method _checkContent (line 4937) | _checkContent(arg) {
    method _setContent (line 4945) | _setContent(template, content, selector) {
    method _maybeSanitize (line 4965) | _maybeSanitize(arg) {
    method _resolvePossibleFunction (line 4968) | _resolvePossibleFunction(arg) {
    method _putElementInTemplate (line 4971) | _putElementInTemplate(element, templateElement) {
  class Tooltip (line 5065) | class Tooltip extends BaseComponent {
    method constructor (line 5066) | constructor(element, config) {
    method Default (line 5090) | static get Default() {
    method DefaultType (line 5093) | static get DefaultType() {
    method NAME (line 5096) | static get NAME() {
    method enable (line 5101) | enable() {
    method disable (line 5104) | disable() {
    method toggleEnabled (line 5107) | toggleEnabled() {
    method toggle (line 5110) | toggle() {
    method dispose (line 5120) | dispose() {
    method show (line 5129) | show() {
    method hide (line 5175) | hide() {
    method update (line 5210) | update() {
    method _isWithContent (line 5217) | _isWithContent() {
    method _getTipElement (line 5220) | _getTipElement() {
    method _createTipElement (line 5226) | _createTipElement(content) {
    method setContent (line 5243) | setContent(content) {
    method _getTemplateFactory (line 5250) | _getTemplateFactory(content) {
    method _getContentForTemplate (line 5264) | _getContentForTemplate() {
    method _getTitle (line 5269) | _getTitle() {
    method _initializeOnDelegatedTarget (line 5274) | _initializeOnDelegatedTarget(event) {
    method _isAnimated (line 5277) | _isAnimated() {
    method _isShown (line 5280) | _isShown() {
    method _createPopper (line 5283) | _createPopper(tip) {
    method _getOffset (line 5288) | _getOffset() {
    method _resolvePossibleFunction (line 5300) | _resolvePossibleFunction(arg) {
    method _getPopperConfig (line 5303) | _getPopperConfig(attachment) {
    method _setListeners (line 5342) | _setListeners() {
    method _fixTitle (line 5373) | _fixTitle() {
    method _enter (line 5384) | _enter() {
    method _leave (line 5396) | _leave() {
    method _setTimeout (line 5407) | _setTimeout(handler, timeout) {
    method _isWithActiveTrigger (line 5411) | _isWithActiveTrigger() {
    method _getConfig (line 5414) | _getConfig(config) {
    method _configAfterMerge (line 5430) | _configAfterMerge(config) {
    method _getDelegateConfig (line 5446) | _getDelegateConfig() {
    method _disposePopper (line 5461) | _disposePopper() {
    method jQueryInterface (line 5473) | static jQueryInterface(config) {
  class Popover (line 5525) | class Popover extends Tooltip {
    method Default (line 5527) | static get Default() {
    method DefaultType (line 5530) | static get DefaultType() {
    method NAME (line 5533) | static get NAME() {
    method _isWithContent (line 5538) | _isWithContent() {
    method _getContentForTemplate (line 5543) | _getContentForTemplate() {
    method _getContent (line 5549) | _getContent() {
    method jQueryInterface (line 5554) | static jQueryInterface(config) {
  class ScrollSpy (line 5625) | class ScrollSpy extends BaseComponent {
    method constructor (line 5626) | constructor(element, config) {
    method Default (line 5643) | static get Default() {
    method DefaultType (line 5646) | static get DefaultType() {
    method NAME (line 5649) | static get NAME() {
    method refresh (line 5654) | refresh() {
    method dispose (line 5666) | dispose() {
    method _configAfterMerge (line 5672) | _configAfterMerge(config) {
    method _maybeEnableSmoothScroll (line 5683) | _maybeEnableSmoothScroll() {
    method _getNewObserver (line 5709) | _getNewObserver() {
    method _observerCallback (line 5719) | _observerCallback(entries) {
    method _initializeTargetsAndObservables (line 5751) | _initializeTargetsAndObservables() {
    method _process (line 5769) | _process(target) {
    method _activateParents (line 5781) | _activateParents(target) {
    method _clearActiveClass (line 5795) | _clearActiveClass(parent) {
    method jQueryInterface (line 5804) | static jQueryInterface(config) {
  class Tab (line 5880) | class Tab extends BaseComponent {
    method constructor (line 5881) | constructor(element) {
    method NAME (line 5896) | static get NAME() {
    method show (line 5901) | show() {
    method _activate (line 5924) | _activate(element, relatedElem) {
    method _deactivate (line 5945) | _deactivate(element, relatedElem) {
    method _keydown (line 5967) | _keydown(event) {
    method _getChildren (line 5988) | _getChildren() {
    method _getActiveElem (line 5992) | _getActiveElem() {
    method _setInitialAttributes (line 5995) | _setInitialAttributes(parent, children) {
    method _setInitialAttributesOnChild (line 6001) | _setInitialAttributesOnChild(child) {
    method _setInitialAttributesOnTargetPanel (line 6017) | _setInitialAttributesOnTargetPanel(child) {
    method _toggleDropDown (line 6027) | _toggleDropDown(element, open) {
    method _setAttributeIfNotExists (line 6042) | _setAttributeIfNotExists(element, attribute, value) {
    method _elemIsActive (line 6047) | _elemIsActive(elem) {
    method _getInnerElement (line 6052) | _getInnerElement(elem) {
    method _getOuterElement (line 6057) | _getOuterElement(elem) {
    method jQueryInterface (line 6062) | static jQueryInterface(config) {
  class Toast (line 6146) | class Toast extends BaseComponent {
    method constructor (line 6147) | constructor(element, config) {
    method Default (line 6156) | static get Default() {
    method DefaultType (line 6159) | static get DefaultType() {
    method NAME (line 6162) | static get NAME() {
    method show (line 6167) | show() {
    method hide (line 6186) | hide() {
    method dispose (line 6202) | dispose() {
    method isShown (line 6209) | isShown() {
    method _maybeScheduleHide (line 6214) | _maybeScheduleHide() {
    method _onInteraction (line 6225) | _onInteraction(event, isInteracting) {
    method _setListeners (line 6250) | _setListeners() {
    method _clearTimeout (line 6256) | _clearTimeout() {
    method jQueryInterface (line 6262) | static jQueryInterface(config) {

FILE: src/static/scripts/datatables.js
  function _addClass (line 1451) | function _addClass(el, name) {
  function _fnHungarianMap (line 1819) | function _fnHungarianMap ( o )
  function _fnCamelToHungarian (line 1857) | function _fnCamelToHungarian ( src, user, force )
  function _fnCompatOpts (line 1907) | function _fnCompatOpts ( init )
  function _fnCompatCols (line 1973) | function _fnCompatCols ( init )
  function _fnBrowserDetect (line 1993) | function _fnBrowserDetect( settings )
  function _fnAddColumn (line 2053) | function _fnAddColumn( oSettings )
  function _fnColumnOptions (line 2082) | function _fnColumnOptions( oSettings, iCol, oOptions )
  function _fnAdjustColumnSizing (line 2188) | function _fnAdjustColumnSizing ( settings )
  function _fnColumnSizes (line 2206) | function _fnColumnSizes ( settings )
  function _fnVisibleToColumnIndex (line 2230) | function _fnVisibleToColumnIndex( oSettings, iMatch )
  function _fnColumnIndexToVisible (line 2248) | function _fnColumnIndexToVisible( oSettings, iMatch )
  function _fnVisibleColumns (line 2263) | function _fnVisibleColumns( settings )
  function _fnGetColumns (line 2289) | function _fnGetColumns( oSettings, sParam )
  function _typeResult (line 2312) | function _typeResult (typeDetect, res) {
  function _fnColumnTypes (line 2323) | function _fnColumnTypes ( settings )
  function _columnAutoRender (line 2442) | function _columnAutoRender(settings, colIdx) {
  function _columnAutoClass (line 2464) | function _columnAutoClass(container, colIdx, className) {
  function _fnApplyColumnDefs (line 2484) | function _fnApplyColumnDefs( oSettings, aoColDefs, aoCols, headerLayout,...
  function _fnColumnsSumWidth (line 2593) | function _fnColumnsSumWidth( settings, targets, original, incVisible ) {
  function _fnColumnsFromHeader (line 2634) | function _fnColumnsFromHeader( cell )
  function _fnAddData (line 2659) | function _fnAddData ( settings, dataIn, tr, tds )
  function _fnAddTr (line 2707) | function _fnAddTr( settings, trs )
  function _fnGetCellData (line 2732) | function _fnGetCellData( settings, rowIdx, colIdx, type )
  function _fnSetCellData (line 2807) | function _fnSetCellData( settings, rowIdx, colIdx, val )
  function _fnWriteCell (line 2824) | function _fnWriteCell(td, val)
  function _fnSplitObjNotation (line 2846) | function _fnSplitObjNotation( str )
  function _fnGetDataMaster (line 2882) | function _fnGetDataMaster ( settings )
  function _fnClearTable (line 2893) | function _fnClearTable( settings )
  function _fnInvalidate (line 2918) | function _fnInvalidate( settings, rowIdx, src, colIdx )
  function _fnGetRowElements (line 2992) | function _fnGetRowElements( settings, row, colIdx, d )
  function _fnGetRowDisplay (line 3096) | function _fnGetRowDisplay (settings, rowIdx) {
  function _fnCreateTr (line 3124) | function _fnCreateTr ( oSettings, iRow, nTrIn, anTds )
  function _fnRowAttributes (line 3219) | function _fnRowAttributes( settings, row )
  function _fnBuildHead (line 3259) | function _fnBuildHead( settings, side )
  function _fnHeaderLayout (line 3327) | function _fnHeaderLayout( settings, source, incColumns )
  function _fnDrawHead (line 3416) | function _fnDrawHead( settings, source )
  function _fnDraw (line 3452) | function _fnDraw( oSettings, ajaxComplete )
  function _fnReDraw (line 3583) | function _fnReDraw( settings, holdPosition, recompute )
  function _emptyRow (line 3626) | function _emptyRow ( settings ) {
  function _layoutItems (line 3652) | function _layoutItems (row, align, items) {
  function _layoutGetRow (line 3696) | function _layoutGetRow(rows, rowNum, align) {
  function _layoutArray (line 3744) | function _layoutArray ( settings, layout, side ) {
  function _layoutResolve (line 3811) | function _layoutResolve( settings, row ) {
  function _fnAddOptionsHtml (line 3863) | function _fnAddOptionsHtml ( settings )
  function _fnLayoutDom (line 3916) | function _fnLayoutDom( settings, dom, insert )
  function _fnDetectHeader (line 3997) | function _fnDetectHeader ( settings, thead, write )
  function _fnStart (line 4166) | function _fnStart( oSettings )
  function _fnBuildAjax (line 4193) | function _fnBuildAjax(oSettings, data, fn) {
  function _fnAjaxUpdate (line 4320) | function _fnAjaxUpdate(settings) {
  function _fnAjaxParameters (line 4336) | function _fnAjaxParameters(settings) {
  function _fnAjaxUpdateDraw (line 4404) | function _fnAjaxUpdateDraw(settings, json) {
  function _fnAjaxDataSrc (line 4446) | function _fnAjaxDataSrc(settings, json, write) {
  function _fnAjaxDataSrcParam (line 4483) | function _fnAjaxDataSrcParam(settings, param, json) {
  function _fnFilterComplete (line 4515) | function _fnFilterComplete ( settings, input )
  function _fnFilterCustom (line 4572) | function _fnFilterCustom( settings )
  function _fnFilter (line 4602) | function _fnFilter( searchRows, settings, input, options, column )
  function _fnFilterCreateSearch (line 4650) | function _fnFilterCreateSearch( search, inOpts )
  function _fnFilterData (line 4751) | function _fnFilterData ( settings )
  function _fnInitialise (line 4821) | function _fnInitialise ( settings )
  function _fnInitComplete (line 4915) | function _fnInitComplete ( settings )
  function _fnLengthChange (line 4933) | function _fnLengthChange ( settings, val )
  function _fnPageChange (line 4953) | function _fnPageChange ( settings, action, redraw )
  function _processingHtml (line 5025) | function _processingHtml ( settings )
  function _fnProcessingDisplay (line 5059) | function _fnProcessingDisplay ( settings, show )
  function _fnProcessingRun (line 5076) | function _fnProcessingRun( settings, enable, run ) {
  function _fnFeatureHtmlTable (line 5098) | function _fnFeatureHtmlTable ( settings )
  function _fnScrollDraw (line 5259) | function _fnScrollDraw ( settings )
  function _fnCalculateColumnWidths (line 5427) | function _fnCalculateColumnWidths ( settings )
  function _fnWrapperWidth (line 5689) | function _fnWrapperWidth(settings) {
  function _fnGetWideStrings (line 5711) | function _fnGetWideStrings( settings, colIdx )
  function _fnStringToCss (line 5792) | function _fnStringToCss( s )
  function _colGroup (line 5815) | function _colGroup( settings ) {
  function _fnSortInit (line 5828) | function _fnSortInit( settings ) {
  function _fnSortAttachListener (line 5864) | function _fnSortAttachListener(settings, node, selector, column, callbac...
  function _fnSortDisplay (line 5910) | function _fnSortDisplay(settings, display) {
  function _fnSortResolve (line 5937) | function _fnSortResolve (settings, nestedSort, sort) {
  function _fnSortFlatten (line 5977) | function _fnSortFlatten ( settings )
  function _fnSort (line 6048) | function _fnSort ( oSettings, col, dir )
  function _fnSortAdd (line 6188) | function _fnSortAdd ( settings, colIdx, addIndex, shift )
  function _fnSortingClasses (line 6273) | function _fnSortingClasses( settings )
  function _fnSortData (line 6306) | function _fnSortData( settings, colIdx )
  function _fnSaveState (line 6355) | function _fnSaveState ( settings )
  function _fnLoadState (line 6404) | function _fnLoadState ( settings, init, callback )
  function _fnImplementState (line 6425) | function _fnImplementState ( settings, s, callback) {
  function _fnLog (line 6598) | function _fnLog( settings, level, msg, tn )
  function _fnMap (line 6641) | function _fnMap( ret, src, name, mappedName )
  function _fnExtend (line 6683) | function _fnExtend( out, extender, breakRefs )
  function _fnBindAction (line 6720) | function _fnBindAction( n, selector, fn )
  function _fnCallbackReg (line 6747) | function _fnCallbackReg( settings, store, fn )
  function _fnCallbackFire (line 6770) | function _fnCallbackFire( settings, callbackArr, eventName, args, bubbles )
  function _fnLengthOverflow (line 6802) | function _fnLengthOverflow ( settings )
  function _fnRenderer (line 6827) | function _fnRenderer( settings, type )
  function _fnDataSource (line 6856) | function _fnDataSource ( settings )
  function _fnMacros (line 6875) | function _fnMacros ( settings, str, entries )
  function _fnArrayApply (line 6906) | function _fnArrayApply(arr, data) {
  function _fnListener (line 6930) | function _fnListener(that, name, src) {
  function _fnEscapeObject (line 6943) | function _fnEscapeObject(obj) {
  function _api_scope (line 7357) | function _api_scope( scope, fn, struct ) {
  function _api_find (line 7367) | function _api_find( src, name ) {
  function cleanHeader (line 10287) | function cleanHeader(node, className) {
  function __mld (line 12574) | function __mld( dtLib, momentFn, luxonFn, dateFn, arg1 ) {
  function resolveWindowLibs (line 12593) | function resolveWindowLibs() {
  function __mldObj (line 12603) | function __mldObj (d, format, locale) {
  function __mlHelper (line 12643) | function __mlHelper (localeString) {
  function _divProp (line 13491) | function _divProp(el, prop, val) {
  function _fnUpdateInfo (line 13559) | function _fnUpdateInfo ( settings, opts, node )
  function _pagingDynamic (line 13748) | function _pagingDynamic(opts) {
  function _pagingDraw (line 13768) | function _pagingDraw(settings, host, opts) {
  function _pagingButtonInfo (line 13879) | function _pagingButtonInfo(settings, button, page, pages) {
  function _pagingNumbers (line 13947) | function _pagingNumbers ( page, pages, buttons, addFirstLast ) {

FILE: src/static/scripts/jdenticon-3.3.0.js
  function parseHex (line 53) | function parseHex(hash, startPosition, octets) {
  function decToHex (line 57) | function decToHex(v) {
  function hueToRgb (line 65) | function hueToRgb(m1, m2, h) {
  function parseColor (line 78) | function parseColor(color) {
  function toCss3Color (line 103) | function toCss3Color(hexColor) {
  function hsl (line 126) | function hsl(hue, saturation, lightness) {
  function correctedHsl (line 153) | function correctedHsl(hue, saturation, lightness) {
  function defineConfigProperty (line 193) | function defineConfigProperty(rootObject) {
  function configure (line 201) | function configure(newConfiguration) {
  function getConfiguration (line 218) | function getConfiguration(paddingOrLocalConfig, defaultPadding) {
  function getIdenticonType (line 314) | function getIdenticonType(el) {
  function whenDocumentIsReady (line 328) | function whenDocumentIsReady(/** @type {Function} */ callback) {
  function observer (line 349) | function observer(updateCallback) {
  function Point (line 392) | function Point(x, y) {
  function Transform (line 401) | function Transform(x, y, size, rotation) {
  function Graphics (line 432) | function Graphics(renderer) {
  function centerShape (line 537) | function centerShape(index, g, cell, positionIndex) {
  function outerShape (line 658) | function outerShape(index, g, cell) {
  function colorTheme (line 684) | function colorTheme(hue, config) {
  function iconGenerator (line 706) | function iconGenerator(renderer, hash, config) {
  function sha1 (line 790) | function sha1(message) {
  function isValidHash (line 914) | function isValidHash(hashCandidate) {
  function computeHash (line 922) | function computeHash(value) {
  function CanvasRenderer (line 932) | function CanvasRenderer(ctx, iconSize) {
  function drawIcon (line 1029) | function drawIcon(ctx, hashOrValue, size, config) {
  function svgValue (line 1049) | function svgValue(value) {
  function SvgPath (line 1056) | function SvgPath() {
  function SvgRenderer (line 1101) | function SvgRenderer(target) {
  function SvgWriter (line 1193) | function SvgWriter(iconSize) {
  function toSvg (line 1247) | function toSvg(hashOrValue, size, config) {
  function SvgElement_append (line 1261) | function SvgElement_append(parentNode, name) {
  function SvgElement (line 1281) | function SvgElement(element) {
  function updateAll (line 1338) | function updateAll() {
  function updateAllConditional (line 1348) | function updateAllConditional() {
  function update (line 1372) | function update(el, hashOrValue, config) {
  function renderDomElement (line 1391) | function renderDomElement(el, hashOrValue, config, rendererFactory) {
  function jdenticonJqueryPlugin (line 1444) | function jdenticonJqueryPlugin(hashOrValue, config) {
  function jdenticonStartup (line 1486) | function jdenticonStartup() {

FILE: src/static/scripts/jquery-4.0.0.slim.js
  function toType (line 65) | function toType( obj ) {
  function isWindow (line 75) | function isWindow( obj ) {
  function isArrayLike (line 79) | function isArrayLike( obj ) {
  function DOMEval (line 101) | function DOMEval( code, node, doc ) {
  function nodeName (line 522) | function nodeName( elem, name ) {
  function createCache (line 576) | function createCache() {
  function testContext (line 598) | function testContext( context ) {
  function unescapeSelector (line 662) | function unescapeSelector( sel ) {
  function selectorError (line 666) | function selectorError( msg ) {
  function tokenize (line 674) | function tokenize( selector, parseOnly ) {
  function toSelector (line 829) | function toSelector( tokens ) {
  function access (line 841) | function access( elems, fn, key, value, chainable, emptyGet, raw ) {
  function fcssescape (line 1009) | function fcssescape( ch, asCodePoint ) {
  function sortOrder (line 1036) | function sortOrder( a, b ) {
  function find (line 1174) | function find( selector, context, results, seed ) {
  function markFunction (line 1305) | function markFunction( fn ) {
  function createInputPseudo (line 1314) | function createInputPseudo( type ) {
  function createButtonPseudo (line 1324) | function createButtonPseudo( type ) {
  function createDisabledPseudo (line 1335) | function createDisabledPseudo( disabled ) {
  function createPositionalPseudo (line 1390) | function createPositionalPseudo( fn ) {
  function setDocument (line 1412) | function setDocument( node ) {
  function setFilters (line 1940) | function setFilters() {}
  function addCombinator (line 1944) | function addCombinator( matcher, combinator, base ) {
  function elementMatcher (line 2006) | function elementMatcher( matchers ) {
  function multipleContexts (line 2020) | function multipleContexts( selector, contexts, results ) {
  function condense (line 2029) | function condense( unmatched, map, filter, context, xml ) {
  function setMatcher (line 2050) | function setMatcher( preFilter, selector, matcher, postFilter, postFinde...
  function matcherFromTokens (line 2149) | function matcherFromTokens( tokens ) {
  function matcherFromGroupMatchers (line 2217) | function matcherFromGroupMatchers( elementMatchers, setMatchers ) {
  function compile (line 2339) | function compile( selector, match /* Internal Use Only */ ) {
  function select (line 2380) | function select( selector, context, results, seed ) {
  function dir (line 2468) | function dir( elem, dir, until ) {
  function siblings (line 2483) | function siblings( n, elem ) {
  function isObviousHtml (line 2501) | function isObviousHtml( input ) {
  function winnow (line 2508) | function winnow( elements, qualifier, not ) {
  function sibling (line 2801) | function sibling( cur, dir ) {
  function fcamelCase (line 2894) | function fcamelCase( _all, letter ) {
  function camelCase (line 2899) | function camelCase( string ) {
  function acceptData (line 2906) | function acceptData( owner ) {
  function Data (line 2917) | function Data() {
  function getData (line 3085) | function getData( data ) {
  function dataAttr (line 3110) | function dataAttr( elem, key, data ) {
  function stripAndCollapse (line 3373) | function stripAndCollapse( value ) {
  function getClass (line 3378) | function getClass( elem ) {
  function classesToArray (line 3382) | function classesToArray( value ) {
  function returnTrue (line 3696) | function returnTrue() {
  function returnFalse (line 3700) | function returnFalse() {
  function on (line 3704) | function on( elem, types, selector, data, fn, one ) {
  function leverageNative (line 4189) | function leverageNative( el, type, isSetup ) {
  function focusMappedHandler (line 4418) | function focusMappedHandler( nativeEvent ) {
  function getAll (line 4780) | function getAll( context, tag ) {
  function setGlobalEval (line 4808) | function setGlobalEval( elems, refElements ) {
  function buildFragment (line 4823) | function buildFragment( elems, context, scripts, selection, ignored ) {
  function disableScript (line 4909) | function disableScript( elem ) {
  function restoreScript (line 4913) | function restoreScript( elem ) {
  function domManip (line 4923) | function domManip( collection, args, callback, ignored ) {
  function manipulationTarget (line 5015) | function manipulationTarget( elem, content ) {
  function cloneCopyEvent (line 5025) | function cloneCopyEvent( src, dest ) {
  function remove (line 5049) | function remove( elem, selector, keepData ) {
  function isAutoPx (line 5424) | function isAutoPx( prop ) {
  functi
Condensed preview — 513 files, each showing path, character count, and a content snippet. Download the .json file or copy for the full structured content (2,994K chars).
[
  {
    "path": ".dockerignore",
    "chars": 189,
    "preview": "// Ignore everything\n*\n\n// Allow what is needed\n!.git\n!docker/healthcheck.sh\n!docker/start.sh\n!macros\n!migrations\n!src\n\n"
  },
  {
    "path": ".editorconfig",
    "chars": 386,
    "preview": "# EditorConfig is awesome: https://EditorConfig.org\n\n# top-most EditorConfig file\nroot = true\n\n[*]\nend_of_line = lf\nchar"
  },
  {
    "path": ".gitattributes",
    "chars": 82,
    "preview": "# Ignore vendored scripts in GitHub stats\nsrc/static/scripts/* linguist-vendored\n\n"
  },
  {
    "path": ".github/CODEOWNERS",
    "chars": 241,
    "preview": "/.github @dani-garcia @BlackDex\n/.github/** @dani-garcia @BlackDex\n/.github/CODEOWNERS @dani-garcia @BlackDex\n/.github/I"
  },
  {
    "path": ".github/FUNDING.yml",
    "chars": 80,
    "preview": "github: dani-garcia\nliberapay: dani-garcia\ncustom: [\"https://paypal.me/DaniGG\"]\n"
  },
  {
    "path": ".github/ISSUE_TEMPLATE/bug_report.yml",
    "chars": 6049,
    "preview": "name: Bug Report\ndescription: File a bug report\nlabels: [\"bug\"]\nbody:\n  #\n  - type: markdown\n    attributes:\n      value"
  },
  {
    "path": ".github/ISSUE_TEMPLATE/config.yml",
    "chars": 409,
    "preview": "blank_issues_enabled: false\ncontact_links:\n  - name: GitHub Discussions for Vaultwarden\n    url: https://github.com/dani"
  },
  {
    "path": ".github/workflows/build.yml",
    "chars": 8648,
    "preview": "name: Build\npermissions: {}\n\nconcurrency:\n  group: ${{ github.workflow }}-${{ github.event.pull_request.number || github"
  },
  {
    "path": ".github/workflows/check-templates.yml",
    "chars": 805,
    "preview": "name: Check templates\npermissions: {}\n\nconcurrency:\n  group: ${{ github.workflow }}-${{ github.event.pull_request.number"
  },
  {
    "path": ".github/workflows/hadolint.yml",
    "chars": 1976,
    "preview": "name: Hadolint\npermissions: {}\n\nconcurrency:\n  group: ${{ github.workflow }}-${{ github.event.pull_request.number || git"
  },
  {
    "path": ".github/workflows/release.yml",
    "chars": 15271,
    "preview": "name: Release\npermissions: {}\n\nconcurrency:\n  # Apply concurrency control only on the upstream repo\n  group: ${{ github."
  },
  {
    "path": ".github/workflows/releasecache-cleanup.yml",
    "chars": 862,
    "preview": "name: Cleanup\npermissions: {}\n\nconcurrency:\n  group: ${{ github.workflow }}\n  cancel-in-progress: false\n\non:\n  workflow_"
  },
  {
    "path": ".github/workflows/trivy.yml",
    "chars": 1671,
    "preview": "name: Trivy\npermissions: {}\n\nconcurrency:\n  group: ${{ github.workflow }}-${{ github.event.pull_request.number || github"
  },
  {
    "path": ".github/workflows/typos.yml",
    "chars": 733,
    "preview": "name: Code Spell Checking\npermissions: {}\n\nconcurrency:\n  group: ${{ github.workflow }}-${{ github.event.pull_request.nu"
  },
  {
    "path": ".github/workflows/zizmor.yml",
    "chars": 840,
    "preview": "name: Security Analysis with zizmor\npermissions: {}\n\nconcurrency:\n  group: ${{ github.workflow }}-${{ github.event.pull_"
  },
  {
    "path": ".gitignore",
    "chars": 132,
    "preview": "# Local build artifacts\ntarget\n\n# Data folder\ndata\n\n# IDE files\n.vscode\n.idea\n*.iml\n\n# Environment file\n.env\n\n# Web vaul"
  },
  {
    "path": ".hadolint.yaml",
    "chars": 325,
    "preview": "ignored:\n  # To prevent issues and make clear some images only work on linux/amd64, we ignore this\n  - DL3029\n  # disabl"
  },
  {
    "path": ".pre-commit-config.yaml",
    "chars": 1819,
    "preview": "---\nrepos:\n-   repo: https://github.com/pre-commit/pre-commit-hooks\n    rev: 3e8a8703264a2f4a69428a0aa4dcb512790b2c8c # "
  },
  {
    "path": ".typos.toml",
    "chars": 951,
    "preview": "[files]\nextend-exclude = [\n    \".git/\",\n    \"playwright/\",\n    \"*.js\", # Ignore all JavaScript files\n    \"!admin*.js\", #"
  },
  {
    "path": "Cargo.toml",
    "chars": 10539,
    "preview": "[workspace.package]\nedition = \"2021\"\nrust-version = \"1.92.0\"\nlicense = \"AGPL-3.0-only\"\nrepository = \"https://github.com/"
  },
  {
    "path": "LICENSE.txt",
    "chars": 34523,
    "preview": "                    GNU AFFERO GENERAL PUBLIC LICENSE\n                       Version 3, 19 November 2007\n\n Copyright (C)"
  },
  {
    "path": "README.md",
    "chars": 11097,
    "preview": "![Vaultwarden Logo](./resources/vaultwarden-logo-auto.svg)\n\nAn alternative server implementation of the Bitwarden Client"
  },
  {
    "path": "SECURITY.md",
    "chars": 2981,
    "preview": "Vaultwarden tries to prevent security issues but there could always slip something through.\nIf you believe you've found "
  },
  {
    "path": "build.rs",
    "chars": 3611,
    "preview": "use std::env;\nuse std::process::Command;\n\nfn main() {\n    // This allow using #[cfg(sqlite)] instead of #[cfg(feature = "
  },
  {
    "path": "diesel.toml",
    "chars": 138,
    "preview": "# For documentation on how to configure this file,\n# see diesel.rs/guides/configuring-diesel-cli\n\n[print_schema]\nfile = "
  },
  {
    "path": "docker/DockerSettings.yaml",
    "chars": 1532,
    "preview": "---\nvault_version: \"v2026.2.0\"\nvault_image_digest: \"sha256:37c8661fa59dcdfbd3baa8366b6e950ef292b15adfeff1f57812b075c1fd3"
  },
  {
    "path": "docker/Dockerfile.alpine",
    "chars": 6594,
    "preview": "# syntax=docker/dockerfile:1\n# check=skip=FromPlatformFlagConstDisallowed,RedundantTargetPlatform\n\n# This file was gener"
  },
  {
    "path": "docker/Dockerfile.debian",
    "chars": 8270,
    "preview": "# syntax=docker/dockerfile:1\n# check=skip=FromPlatformFlagConstDisallowed,RedundantTargetPlatform\n\n# This file was gener"
  },
  {
    "path": "docker/Dockerfile.j2",
    "chars": 9644,
    "preview": "# syntax=docker/dockerfile:1\n# check=skip=FromPlatformFlagConstDisallowed,RedundantTargetPlatform\n\n# This file was gener"
  },
  {
    "path": "docker/Makefile",
    "chars": 165,
    "preview": "all:\n\t./render_template Dockerfile.j2 '{\"base\": \"debian\"}' > Dockerfile.debian\n\t./render_template Dockerfile.j2 '{\"base\""
  },
  {
    "path": "docker/README.md",
    "chars": 7941,
    "preview": "# Vaultwarden Container Building\n\nTo build and release new testing and stable releases of Vaultwarden we use `docker bui"
  },
  {
    "path": "docker/bake.sh",
    "chars": 617,
    "preview": "#!/usr/bin/env bash\n\n# Determine the basedir of this script.\n# It should be located in the same directory as the docker-"
  },
  {
    "path": "docker/bake_env.sh",
    "chars": 1035,
    "preview": "#!/usr/bin/env bash\n\n# If SOURCE_COMMIT is provided via env skip this\nif [ -z \"${SOURCE_COMMIT+x}\" ]; then\n    SOURCE_CO"
  },
  {
    "path": "docker/docker-bake.hcl",
    "chars": 7170,
    "preview": "// ==== Baking Variables ====\n\n// Set which cargo profile to use, dev or release for example\n// Use the value provided i"
  },
  {
    "path": "docker/healthcheck.sh",
    "chars": 2053,
    "preview": "#!/usr/bin/env sh\n\n# Use the value of the corresponding env var (if present),\n# or a default value otherwise.\n: \"${DATA_"
  },
  {
    "path": "docker/podman-bake.sh",
    "chars": 3365,
    "preview": "#!/usr/bin/env bash\n\n# Determine the basedir of this script.\n# It should be located in the same directory as the docker-"
  },
  {
    "path": "docker/render_template",
    "chars": 968,
    "preview": "#!/usr/bin/env python3\n\nimport os\nimport argparse\nimport json\nimport yaml\nimport jinja2\n\n# Load settings file\nwith open("
  },
  {
    "path": "docker/start.sh",
    "chars": 748,
    "preview": "#!/bin/sh\n\nif [ -n \"${UMASK}\" ]; then\n    umask \"${UMASK}\"\nfi\n\nif [ -r /etc/vaultwarden.sh ]; then\n    . /etc/vaultwarde"
  },
  {
    "path": "macros/Cargo.toml",
    "chars": 313,
    "preview": "[package]\nname = \"macros\"\nversion = \"0.1.0\"\nrepository.workspace = true\nedition.workspace = true\nrust-version.workspace "
  },
  {
    "path": "macros/src/lib.rs",
    "chars": 1623,
    "preview": "use proc_macro::TokenStream;\nuse quote::quote;\n\n#[proc_macro_derive(UuidFromParam)]\npub fn derive_uuid_from_param(input:"
  },
  {
    "path": "migrations/mysql/2018-01-14-171611_create_tables/down.sql",
    "chars": 105,
    "preview": "DROP TABLE users;\n\nDROP TABLE devices;\n\nDROP TABLE ciphers;\n\nDROP TABLE attachments;\n\nDROP TABLE folders;"
  },
  {
    "path": "migrations/mysql/2018-01-14-171611_create_tables/up.sql",
    "chars": 1943,
    "preview": "CREATE TABLE users (\n  uuid                CHAR(36) NOT NULL PRIMARY KEY,\n  created_at          DATETIME NOT NULL,\n  upd"
  },
  {
    "path": "migrations/mysql/2018-02-17-205753_create_collections_and_orgs/down.sql",
    "chars": 116,
    "preview": "DROP TABLE collections;\n\nDROP TABLE organizations;\n\n\nDROP TABLE users_collections;\n\nDROP TABLE users_organizations;\n"
  },
  {
    "path": "migrations/mysql/2018-02-17-205753_create_collections_and_orgs/up.sql",
    "chars": 877,
    "preview": "CREATE TABLE collections (\n  uuid     VARCHAR(40) NOT NULL PRIMARY KEY,\n  org_uuid VARCHAR(40) NOT NULL REFERENCES organ"
  },
  {
    "path": "migrations/mysql/2018-04-27-155151_create_users_ciphers/down.sql",
    "chars": 0,
    "preview": ""
  },
  {
    "path": "migrations/mysql/2018-04-27-155151_create_users_ciphers/up.sql",
    "chars": 1287,
    "preview": "ALTER TABLE ciphers RENAME TO oldCiphers;\n\nCREATE TABLE ciphers (\n  uuid              CHAR(36) NOT NULL PRIMARY KEY,\n  c"
  },
  {
    "path": "migrations/mysql/2018-05-08-161616_create_collection_cipher_map/down.sql",
    "chars": 31,
    "preview": "DROP TABLE ciphers_collections;"
  },
  {
    "path": "migrations/mysql/2018-05-08-161616_create_collection_cipher_map/up.sql",
    "chars": 215,
    "preview": "CREATE TABLE ciphers_collections (\n  cipher_uuid       CHAR(36) NOT NULL REFERENCES ciphers (uuid),\n  collection_uuid CH"
  },
  {
    "path": "migrations/mysql/2018-05-25-232323_update_attachments_reference/down.sql",
    "chars": 0,
    "preview": ""
  },
  {
    "path": "migrations/mysql/2018-05-25-232323_update_attachments_reference/up.sql",
    "chars": 409,
    "preview": "ALTER TABLE attachments RENAME TO oldAttachments;\n\nCREATE TABLE attachments (\n  id          CHAR(36) NOT NULL PRIMARY KE"
  },
  {
    "path": "migrations/mysql/2018-06-01-112529_update_devices_twofactor_remember/down.sql",
    "chars": 45,
    "preview": "-- This file should undo anything in `up.sql`"
  },
  {
    "path": "migrations/mysql/2018-06-01-112529_update_devices_twofactor_remember/up.sql",
    "chars": 63,
    "preview": "ALTER TABLE devices\n    ADD COLUMN\n    twofactor_remember TEXT;"
  },
  {
    "path": "migrations/mysql/2018-07-11-181453_create_u2f_twofactor/down.sql",
    "chars": 170,
    "preview": "UPDATE users\nSET totp_secret = (\n    SELECT twofactor.data FROM twofactor\n    WHERE twofactor.type = 0 \n    AND twofacto"
  },
  {
    "path": "migrations/mysql/2018-07-11-181453_create_u2f_twofactor/up.sql",
    "chars": 501,
    "preview": "CREATE TABLE twofactor (\n  uuid      CHAR(36) NOT NULL PRIMARY KEY,\n  user_uuid CHAR(36) NOT NULL REFERENCES users (uuid"
  },
  {
    "path": "migrations/mysql/2018-08-27-172114_update_ciphers/down.sql",
    "chars": 0,
    "preview": ""
  },
  {
    "path": "migrations/mysql/2018-08-27-172114_update_ciphers/up.sql",
    "chars": 61,
    "preview": "ALTER TABLE ciphers\n    ADD COLUMN\n    password_history TEXT;"
  },
  {
    "path": "migrations/mysql/2018-09-10-111213_add_invites/down.sql",
    "chars": 23,
    "preview": "DROP TABLE invitations;"
  },
  {
    "path": "migrations/mysql/2018-09-10-111213_add_invites/up.sql",
    "chars": 76,
    "preview": "CREATE TABLE invitations (\n    email   VARCHAR(255) NOT NULL PRIMARY KEY\n);\n"
  },
  {
    "path": "migrations/mysql/2018-09-19-144557_add_kdf_columns/down.sql",
    "chars": 0,
    "preview": ""
  },
  {
    "path": "migrations/mysql/2018-09-19-144557_add_kdf_columns/up.sql",
    "chars": 178,
    "preview": "ALTER TABLE users\n    ADD COLUMN\n    client_kdf_type INTEGER NOT NULL DEFAULT 0; -- PBKDF2\n\nALTER TABLE users\n    ADD CO"
  },
  {
    "path": "migrations/mysql/2018-11-27-152651_add_att_key_columns/down.sql",
    "chars": 0,
    "preview": ""
  },
  {
    "path": "migrations/mysql/2018-11-27-152651_add_att_key_columns/up.sql",
    "chars": 55,
    "preview": "ALTER TABLE attachments\n    ADD COLUMN\n    `key` TEXT;\n"
  },
  {
    "path": "migrations/mysql/2019-05-26-216651_rename_key_and_type_columns/down.sql",
    "chars": 432,
    "preview": "ALTER TABLE attachments CHANGE COLUMN akey `key` TEXT;\nALTER TABLE ciphers CHANGE COLUMN atype type INTEGER NOT NULL;\nAL"
  },
  {
    "path": "migrations/mysql/2019-05-26-216651_rename_key_and_type_columns/up.sql",
    "chars": 432,
    "preview": "ALTER TABLE attachments CHANGE COLUMN `key` akey TEXT;\nALTER TABLE ciphers CHANGE COLUMN type atype INTEGER NOT NULL;\nAL"
  },
  {
    "path": "migrations/mysql/2019-10-10-083032_add_column_to_twofactor/down.sql",
    "chars": 0,
    "preview": ""
  },
  {
    "path": "migrations/mysql/2019-10-10-083032_add_column_to_twofactor/up.sql",
    "chars": 70,
    "preview": "ALTER TABLE twofactor ADD COLUMN last_used INTEGER NOT NULL DEFAULT 0;"
  },
  {
    "path": "migrations/mysql/2019-11-17-011009_add_email_verification/down.sql",
    "chars": 1,
    "preview": "\n"
  },
  {
    "path": "migrations/mysql/2019-11-17-011009_add_email_verification/up.sql",
    "chars": 347,
    "preview": "ALTER TABLE users ADD COLUMN verified_at DATETIME DEFAULT NULL;\nALTER TABLE users ADD COLUMN last_verifying_at DATETIME "
  },
  {
    "path": "migrations/mysql/2020-03-13-205045_add_policy_table/down.sql",
    "chars": 25,
    "preview": "DROP TABLE org_policies;\n"
  },
  {
    "path": "migrations/mysql/2020-03-13-205045_add_policy_table/up.sql",
    "chars": 258,
    "preview": "CREATE TABLE org_policies (\n  uuid      CHAR(36) NOT NULL PRIMARY KEY,\n  org_uuid  CHAR(36) NOT NULL REFERENCES organiza"
  },
  {
    "path": "migrations/mysql/2020-04-09-235005_add_cipher_delete_date/down.sql",
    "chars": 1,
    "preview": "\n"
  },
  {
    "path": "migrations/mysql/2020-04-09-235005_add_cipher_delete_date/up.sql",
    "chars": 60,
    "preview": "ALTER TABLE ciphers\n    ADD COLUMN\n    deleted_at DATETIME;\n"
  },
  {
    "path": "migrations/mysql/2020-07-01-214531_add_hide_passwords/down.sql",
    "chars": 0,
    "preview": ""
  },
  {
    "path": "migrations/mysql/2020-07-01-214531_add_hide_passwords/up.sql",
    "chars": 88,
    "preview": "ALTER TABLE users_collections\nADD COLUMN hide_passwords BOOLEAN NOT NULL DEFAULT FALSE;\n"
  },
  {
    "path": "migrations/mysql/2020-08-02-025025_add_favorites_table/down.sql",
    "chars": 320,
    "preview": "ALTER TABLE ciphers\nADD COLUMN favorite BOOLEAN NOT NULL DEFAULT FALSE;\n\n-- Transfer favorite status for user-owned ciph"
  },
  {
    "path": "migrations/mysql/2020-08-02-025025_add_favorites_table/up.sql",
    "chars": 411,
    "preview": "CREATE TABLE favorites (\n  user_uuid   CHAR(36) NOT NULL REFERENCES users(uuid),\n  cipher_uuid CHAR(36) NOT NULL REFEREN"
  },
  {
    "path": "migrations/mysql/2020-11-30-224000_add_user_enabled/down.sql",
    "chars": 0,
    "preview": ""
  },
  {
    "path": "migrations/mysql/2020-11-30-224000_add_user_enabled/up.sql",
    "chars": 65,
    "preview": "ALTER TABLE users ADD COLUMN enabled BOOLEAN NOT NULL DEFAULT 1;\n"
  },
  {
    "path": "migrations/mysql/2020-12-09-173101_add_stamp_exception/down.sql",
    "chars": 0,
    "preview": ""
  },
  {
    "path": "migrations/mysql/2020-12-09-173101_add_stamp_exception/up.sql",
    "chars": 63,
    "preview": "ALTER TABLE users ADD COLUMN stamp_exception TEXT DEFAULT NULL;"
  },
  {
    "path": "migrations/mysql/2021-03-11-190243_add_sends/down.sql",
    "chars": 18,
    "preview": "DROP TABLE sends;\n"
  },
  {
    "path": "migrations/mysql/2021-03-11-190243_add_sends/up.sql",
    "chars": 729,
    "preview": "CREATE TABLE sends (\n  uuid              CHAR(36) NOT NULL   PRIMARY KEY,\n  user_uuid         CHAR(36)            REFERE"
  },
  {
    "path": "migrations/mysql/2021-04-30-233251_add_reprompt/down.sql",
    "chars": 0,
    "preview": ""
  },
  {
    "path": "migrations/mysql/2021-04-30-233251_add_reprompt/up.sql",
    "chars": 49,
    "preview": "ALTER TABLE ciphers\nADD COLUMN reprompt INTEGER;\n"
  },
  {
    "path": "migrations/mysql/2021-05-11-205202_add_hide_email/down.sql",
    "chars": 0,
    "preview": ""
  },
  {
    "path": "migrations/mysql/2021-05-11-205202_add_hide_email/up.sql",
    "chars": 49,
    "preview": "ALTER TABLE sends\nADD COLUMN hide_email BOOLEAN;\n"
  },
  {
    "path": "migrations/mysql/2021-07-01-203140_add_password_reset_keys/down.sql",
    "chars": 0,
    "preview": ""
  },
  {
    "path": "migrations/mysql/2021-07-01-203140_add_password_reset_keys/up.sql",
    "chars": 114,
    "preview": "ALTER TABLE organizations\n  ADD COLUMN private_key TEXT;\n\nALTER TABLE organizations\n  ADD COLUMN public_key TEXT;\n"
  },
  {
    "path": "migrations/mysql/2021-08-30-193501_create_emergency_access/down.sql",
    "chars": 29,
    "preview": "DROP TABLE emergency_access;\n"
  },
  {
    "path": "migrations/mysql/2021-08-30-193501_create_emergency_access/up.sql",
    "chars": 616,
    "preview": "CREATE TABLE emergency_access (\n  uuid                      CHAR(36)     NOT NULL PRIMARY KEY,\n  grantor_uuid           "
  },
  {
    "path": "migrations/mysql/2021-10-24-164321_add_2fa_incomplete/down.sql",
    "chars": 33,
    "preview": "DROP TABLE twofactor_incomplete;\n"
  },
  {
    "path": "migrations/mysql/2021-10-24-164321_add_2fa_incomplete/up.sql",
    "chars": 267,
    "preview": "CREATE TABLE twofactor_incomplete (\n  user_uuid   CHAR(36) NOT NULL REFERENCES users(uuid),\n  device_uuid CHAR(36) NOT N"
  },
  {
    "path": "migrations/mysql/2022-01-17-234911_add_api_key/down.sql",
    "chars": 0,
    "preview": ""
  },
  {
    "path": "migrations/mysql/2022-01-17-234911_add_api_key/up.sql",
    "chars": 51,
    "preview": "ALTER TABLE users\nADD COLUMN api_key VARCHAR(255);\n"
  },
  {
    "path": "migrations/mysql/2022-03-02-210038_update_devices_primary_key/down.sql",
    "chars": 0,
    "preview": ""
  },
  {
    "path": "migrations/mysql/2022-03-02-210038_update_devices_primary_key/up.sql",
    "chars": 160,
    "preview": "-- First remove the previous primary key\nALTER TABLE devices DROP PRIMARY KEY;\n-- Add a new combined one\nALTER TABLE dev"
  },
  {
    "path": "migrations/mysql/2022-07-27-110000_add_group_support/down.sql",
    "chars": 76,
    "preview": "DROP TABLE `groups`;\nDROP TABLE groups_users;\nDROP TABLE collections_groups;"
  },
  {
    "path": "migrations/mysql/2022-07-27-110000_add_group_support/up.sql",
    "chars": 1076,
    "preview": "CREATE TABLE `groups` (\n  uuid                              CHAR(36) NOT NULL PRIMARY KEY,\n  organizations_uuid         "
  },
  {
    "path": "migrations/mysql/2022-10-18-170602_add_events/down.sql",
    "chars": 18,
    "preview": "DROP TABLE event;\n"
  },
  {
    "path": "migrations/mysql/2022-10-18-170602_add_events/up.sql",
    "chars": 579,
    "preview": "CREATE TABLE event (\n  uuid               CHAR(36)    NOT NULL PRIMARY KEY,\n  event_type         INTEGER     NOT NULL,\n "
  },
  {
    "path": "migrations/mysql/2023-01-06-151600_add_reset_password_support/down.sql",
    "chars": 0,
    "preview": ""
  },
  {
    "path": "migrations/mysql/2023-01-06-151600_add_reset_password_support/up.sql",
    "chars": 68,
    "preview": "ALTER TABLE users_organizations\nADD COLUMN reset_password_key TEXT;\n"
  },
  {
    "path": "migrations/mysql/2023-01-11-205851_add_avatar_color/down.sql",
    "chars": 0,
    "preview": ""
  },
  {
    "path": "migrations/mysql/2023-01-11-205851_add_avatar_color/up.sql",
    "chars": 54,
    "preview": "ALTER TABLE users\nADD COLUMN avatar_color VARCHAR(7);\n"
  },
  {
    "path": "migrations/mysql/2023-01-31-222222_add_argon2/down.sql",
    "chars": 0,
    "preview": ""
  },
  {
    "path": "migrations/mysql/2023-01-31-222222_add_argon2/up.sql",
    "chars": 160,
    "preview": "ALTER TABLE users\n    ADD COLUMN\n    client_kdf_memory INTEGER DEFAULT NULL;\n\nALTER TABLE users\n    ADD COLUMN\n    clien"
  },
  {
    "path": "migrations/mysql/2023-02-18-125735_push_uuid_table/down.sql",
    "chars": 0,
    "preview": ""
  },
  {
    "path": "migrations/mysql/2023-02-18-125735_push_uuid_table/up.sql",
    "chars": 46,
    "preview": "ALTER TABLE devices ADD COLUMN push_uuid TEXT;"
  },
  {
    "path": "migrations/mysql/2023-06-02-200424_create_organization_api_key/down.sql",
    "chars": 0,
    "preview": ""
  },
  {
    "path": "migrations/mysql/2023-06-02-200424_create_organization_api_key/up.sql",
    "chars": 299,
    "preview": "CREATE TABLE organization_api_key (\n\tuuid\t\t\tCHAR(36) NOT NULL,\n\torg_uuid\t\tCHAR(36) NOT NULL REFERENCES organizations(uui"
  },
  {
    "path": "migrations/mysql/2023-06-17-200424_create_auth_requests_table/down.sql",
    "chars": 0,
    "preview": ""
  },
  {
    "path": "migrations/mysql/2023-06-17-200424_create_auth_requests_table/up.sql",
    "chars": 718,
    "preview": "CREATE TABLE auth_requests (\n\tuuid            CHAR(36) NOT NULL PRIMARY KEY,\n\tuser_uuid\t    CHAR(36) NOT NULL,\n\torganiza"
  },
  {
    "path": "migrations/mysql/2023-06-28-133700_add_collection_external_id/down.sql",
    "chars": 0,
    "preview": ""
  },
  {
    "path": "migrations/mysql/2023-06-28-133700_add_collection_external_id/up.sql",
    "chars": 53,
    "preview": "ALTER TABLE collections ADD COLUMN external_id TEXT;\n"
  },
  {
    "path": "migrations/mysql/2023-09-01-170620_update_auth_request_table/down.sql",
    "chars": 0,
    "preview": ""
  },
  {
    "path": "migrations/mysql/2023-09-01-170620_update_auth_request_table/up.sql",
    "chars": 108,
    "preview": "ALTER TABLE auth_requests\nMODIFY master_password_hash TEXT;\n\nALTER TABLE auth_requests\nMODIFY enc_key TEXT;\n"
  },
  {
    "path": "migrations/mysql/2023-09-02-212336_move_user_external_id/down.sql",
    "chars": 0,
    "preview": ""
  },
  {
    "path": "migrations/mysql/2023-09-02-212336_move_user_external_id/up.sql",
    "chars": 61,
    "preview": "ALTER TABLE users_organizations\nADD COLUMN external_id TEXT;\n"
  },
  {
    "path": "migrations/mysql/2023-09-10-133000_add_sso/down.sql",
    "chars": 22,
    "preview": "DROP TABLE sso_nonce;\n"
  },
  {
    "path": "migrations/mysql/2023-09-10-133000_add_sso/up.sql",
    "chars": 147,
    "preview": "CREATE TABLE sso_nonce (\n  nonce               CHAR(36) NOT NULL PRIMARY KEY,\n  created_at          DATETIME NOT NULL DE"
  },
  {
    "path": "migrations/mysql/2023-09-14-133000_add_users_organizations_invited_by_email/down.sql",
    "chars": 62,
    "preview": "ALTER TABLE users_organizations DROP COLUMN invited_by_email;\n"
  },
  {
    "path": "migrations/mysql/2023-09-14-133000_add_users_organizations_invited_by_email/up.sql",
    "chars": 79,
    "preview": "ALTER TABLE users_organizations ADD COLUMN invited_by_email TEXT DEFAULT NULL;\n"
  },
  {
    "path": "migrations/mysql/2023-10-21-221242_add_cipher_key/down.sql",
    "chars": 0,
    "preview": ""
  },
  {
    "path": "migrations/mysql/2023-10-21-221242_add_cipher_key/up.sql",
    "chars": 43,
    "preview": "ALTER TABLE ciphers\nADD COLUMN `key` TEXT;\n"
  },
  {
    "path": "migrations/mysql/2024-01-12-210182_change_attachment_size/down.sql",
    "chars": 0,
    "preview": ""
  },
  {
    "path": "migrations/mysql/2024-01-12-210182_change_attachment_size/up.sql",
    "chars": 58,
    "preview": "ALTER TABLE attachments MODIFY file_size BIGINT NOT NULL;\n"
  },
  {
    "path": "migrations/mysql/2024-02-14-135828_change_time_stamp_data_type/down.sql",
    "chars": 0,
    "preview": ""
  },
  {
    "path": "migrations/mysql/2024-02-14-135828_change_time_stamp_data_type/up.sql",
    "chars": 56,
    "preview": "ALTER TABLE twofactor MODIFY last_used BIGINT NOT NULL;\n"
  },
  {
    "path": "migrations/mysql/2024-02-14-170000_add_state_to_sso_nonce/down.sql",
    "chars": 180,
    "preview": "DROP TABLE IF EXISTS sso_nonce;\n\nCREATE TABLE sso_nonce (\n  nonce               CHAR(36) NOT NULL PRIMARY KEY,\n  created"
  },
  {
    "path": "migrations/mysql/2024-02-14-170000_add_state_to_sso_nonce/up.sql",
    "chars": 244,
    "preview": "DROP TABLE IF EXISTS sso_nonce;\n\nCREATE TABLE sso_nonce (\n\tstate               VARCHAR(512) NOT NULL PRIMARY KEY,\n  \tnon"
  },
  {
    "path": "migrations/mysql/2024-02-26-170000_add_pkce_to_sso_nonce/down.sql",
    "chars": 255,
    "preview": "DROP TABLE IF EXISTS sso_nonce;\n\nCREATE TABLE sso_nonce (\n    state               VARCHAR(512) NOT NULL PRIMARY KEY,\n   "
  },
  {
    "path": "migrations/mysql/2024-02-26-170000_add_pkce_to_sso_nonce/up.sql",
    "chars": 277,
    "preview": "DROP TABLE IF EXISTS sso_nonce;\n\nCREATE TABLE sso_nonce (\n    state               VARCHAR(512) NOT NULL PRIMARY KEY,\n  \t"
  },
  {
    "path": "migrations/mysql/2024-03-06-170000_add_sso_users/down.sql",
    "chars": 32,
    "preview": "DROP TABLE IF EXISTS sso_users;\n"
  },
  {
    "path": "migrations/mysql/2024-03-06-170000_add_sso_users/up.sql",
    "chars": 238,
    "preview": "CREATE TABLE sso_users (\n  user_uuid           CHAR(36) NOT NULL PRIMARY KEY,\n  identifier          VARCHAR(768) NOT NUL"
  },
  {
    "path": "migrations/mysql/2024-03-13-170000_sso_users_cascade/down.sql",
    "chars": 0,
    "preview": ""
  },
  {
    "path": "migrations/mysql/2024-03-13-170000_sso_users_cascade/up.sql",
    "chars": 678,
    "preview": "-- Dynamically create DROP FOREIGN KEY\n-- Some versions of MySQL or MariaDB might fail if the key doesn't exists\n-- This"
  },
  {
    "path": "migrations/mysql/2024-06-05-131359_add_2fa_duo_store/down.sql",
    "chars": 29,
    "preview": "DROP TABLE twofactor_duo_ctx;"
  },
  {
    "path": "migrations/mysql/2024-06-05-131359_add_2fa_duo_store/up.sql",
    "chars": 212,
    "preview": "CREATE TABLE twofactor_duo_ctx (\n    state      VARCHAR(64)  NOT NULL,\n    user_email VARCHAR(255) NOT NULL,\n    nonce  "
  },
  {
    "path": "migrations/mysql/2024-09-04-091351_use_device_type_for_mails/down.sql",
    "chars": 62,
    "preview": "ALTER TABLE `twofactor_incomplete` DROP COLUMN `device_type`;\n"
  },
  {
    "path": "migrations/mysql/2024-09-04-091351_use_device_type_for_mails/up.sql",
    "chars": 109,
    "preview": "ALTER TABLE twofactor_incomplete ADD COLUMN device_type INTEGER NOT NULL DEFAULT 14; -- 14 = Unknown Browser\n"
  },
  {
    "path": "migrations/mysql/2025-01-09-172300_add_manage/down.sql",
    "chars": 0,
    "preview": ""
  },
  {
    "path": "migrations/mysql/2025-01-09-172300_add_manage/up.sql",
    "chars": 162,
    "preview": "ALTER TABLE users_collections\nADD COLUMN manage BOOLEAN NOT NULL DEFAULT FALSE;\n\nALTER TABLE collections_groups\nADD COLU"
  },
  {
    "path": "migrations/mysql/2025-08-20-120000_sso_nonce_to_auth/down.sql",
    "chars": 284,
    "preview": "DROP TABLE IF EXISTS sso_auth;\n\nCREATE TABLE sso_nonce (\n    state               VARCHAR(512) NOT NULL PRIMARY KEY,\n    "
  },
  {
    "path": "migrations/mysql/2025-08-20-120000_sso_nonce_to_auth/up.sql",
    "chars": 411,
    "preview": "DROP TABLE IF EXISTS sso_nonce;\n\nCREATE TABLE sso_auth (\n    state               VARCHAR(512) NOT NULL PRIMARY KEY,\n    "
  },
  {
    "path": "migrations/postgresql/2019-09-12-100000_create_tables/down.sql",
    "chars": 320,
    "preview": "DROP TABLE devices;\nDROP TABLE attachments;\nDROP TABLE users_collections;\nDROP TABLE users_organizations;\nDROP TABLE fol"
  },
  {
    "path": "migrations/postgresql/2019-09-12-100000_create_tables/up.sql",
    "chars": 3752,
    "preview": "CREATE TABLE users (\n  uuid                CHAR(36) NOT NULL PRIMARY KEY,\n  created_at          TIMESTAMP NOT NULL,\n  up"
  },
  {
    "path": "migrations/postgresql/2019-09-16-150000_fix_attachments/down.sql",
    "chars": 1608,
    "preview": "ALTER TABLE attachments ALTER COLUMN id TYPE CHAR(36);\nALTER TABLE attachments ALTER COLUMN cipher_uuid TYPE CHAR(36);\nA"
  },
  {
    "path": "migrations/postgresql/2019-09-16-150000_fix_attachments/up.sql",
    "chars": 1729,
    "preview": "-- Switch from CHAR() types to VARCHAR() types to avoid padding issues.\nALTER TABLE attachments ALTER COLUMN id TYPE TEX"
  },
  {
    "path": "migrations/postgresql/2019-10-10-083032_add_column_to_twofactor/down.sql",
    "chars": 0,
    "preview": ""
  },
  {
    "path": "migrations/postgresql/2019-10-10-083032_add_column_to_twofactor/up.sql",
    "chars": 70,
    "preview": "ALTER TABLE twofactor ADD COLUMN last_used INTEGER NOT NULL DEFAULT 0;"
  },
  {
    "path": "migrations/postgresql/2019-11-17-011009_add_email_verification/down.sql",
    "chars": 1,
    "preview": "\n"
  },
  {
    "path": "migrations/postgresql/2019-11-17-011009_add_email_verification/up.sql",
    "chars": 349,
    "preview": "ALTER TABLE users ADD COLUMN verified_at TIMESTAMP DEFAULT NULL;\nALTER TABLE users ADD COLUMN last_verifying_at TIMESTAM"
  },
  {
    "path": "migrations/postgresql/2020-03-13-205045_add_policy_table/down.sql",
    "chars": 25,
    "preview": "DROP TABLE org_policies;\n"
  },
  {
    "path": "migrations/postgresql/2020-03-13-205045_add_policy_table/up.sql",
    "chars": 260,
    "preview": "CREATE TABLE org_policies (\n  uuid      CHAR(36) NOT NULL PRIMARY KEY,\n  org_uuid  CHAR(36) NOT NULL REFERENCES organiza"
  },
  {
    "path": "migrations/postgresql/2020-04-09-235005_add_cipher_delete_date/down.sql",
    "chars": 1,
    "preview": "\n"
  },
  {
    "path": "migrations/postgresql/2020-04-09-235005_add_cipher_delete_date/up.sql",
    "chars": 61,
    "preview": "ALTER TABLE ciphers\n    ADD COLUMN\n    deleted_at TIMESTAMP;\n"
  },
  {
    "path": "migrations/postgresql/2020-07-01-214531_add_hide_passwords/down.sql",
    "chars": 0,
    "preview": ""
  },
  {
    "path": "migrations/postgresql/2020-07-01-214531_add_hide_passwords/up.sql",
    "chars": 88,
    "preview": "ALTER TABLE users_collections\nADD COLUMN hide_passwords BOOLEAN NOT NULL DEFAULT FALSE;\n"
  },
  {
    "path": "migrations/postgresql/2020-08-02-025025_add_favorites_table/down.sql",
    "chars": 320,
    "preview": "ALTER TABLE ciphers\nADD COLUMN favorite BOOLEAN NOT NULL DEFAULT FALSE;\n\n-- Transfer favorite status for user-owned ciph"
  },
  {
    "path": "migrations/postgresql/2020-08-02-025025_add_favorites_table/up.sql",
    "chars": 417,
    "preview": "CREATE TABLE favorites (\n  user_uuid   VARCHAR(40) NOT NULL REFERENCES users(uuid),\n  cipher_uuid VARCHAR(40) NOT NULL R"
  },
  {
    "path": "migrations/postgresql/2020-11-30-224000_add_user_enabled/down.sql",
    "chars": 0,
    "preview": ""
  },
  {
    "path": "migrations/postgresql/2020-11-30-224000_add_user_enabled/up.sql",
    "chars": 68,
    "preview": "ALTER TABLE users ADD COLUMN enabled BOOLEAN NOT NULL DEFAULT true;\n"
  },
  {
    "path": "migrations/postgresql/2020-12-09-173101_add_stamp_exception/down.sql",
    "chars": 0,
    "preview": ""
  },
  {
    "path": "migrations/postgresql/2020-12-09-173101_add_stamp_exception/up.sql",
    "chars": 63,
    "preview": "ALTER TABLE users ADD COLUMN stamp_exception TEXT DEFAULT NULL;"
  },
  {
    "path": "migrations/postgresql/2021-03-11-190243_add_sends/down.sql",
    "chars": 18,
    "preview": "DROP TABLE sends;\n"
  },
  {
    "path": "migrations/postgresql/2021-03-11-190243_add_sends/up.sql",
    "chars": 735,
    "preview": "CREATE TABLE sends (\n  uuid              CHAR(36) NOT NULL   PRIMARY KEY,\n  user_uuid         CHAR(36)            REFERE"
  },
  {
    "path": "migrations/postgresql/2021-03-15-163412_rename_send_key/down.sql",
    "chars": 0,
    "preview": ""
  },
  {
    "path": "migrations/postgresql/2021-03-15-163412_rename_send_key/up.sql",
    "chars": 45,
    "preview": "ALTER TABLE sends RENAME COLUMN key TO akey;\n"
  },
  {
    "path": "migrations/postgresql/2021-04-30-233251_add_reprompt/down.sql",
    "chars": 0,
    "preview": ""
  },
  {
    "path": "migrations/postgresql/2021-04-30-233251_add_reprompt/up.sql",
    "chars": 49,
    "preview": "ALTER TABLE ciphers\nADD COLUMN reprompt INTEGER;\n"
  },
  {
    "path": "migrations/postgresql/2021-05-11-205202_add_hide_email/down.sql",
    "chars": 0,
    "preview": ""
  },
  {
    "path": "migrations/postgresql/2021-05-11-205202_add_hide_email/up.sql",
    "chars": 49,
    "preview": "ALTER TABLE sends\nADD COLUMN hide_email BOOLEAN;\n"
  },
  {
    "path": "migrations/postgresql/2021-07-01-203140_add_password_reset_keys/down.sql",
    "chars": 0,
    "preview": ""
  },
  {
    "path": "migrations/postgresql/2021-07-01-203140_add_password_reset_keys/up.sql",
    "chars": 114,
    "preview": "ALTER TABLE organizations\n  ADD COLUMN private_key TEXT;\n\nALTER TABLE organizations\n  ADD COLUMN public_key TEXT;\n"
  },
  {
    "path": "migrations/postgresql/2021-08-30-193501_create_emergency_access/down.sql",
    "chars": 29,
    "preview": "DROP TABLE emergency_access;\n"
  },
  {
    "path": "migrations/postgresql/2021-08-30-193501_create_emergency_access/up.sql",
    "chars": 620,
    "preview": "CREATE TABLE emergency_access (\n  uuid                      CHAR(36)     NOT NULL PRIMARY KEY,\n  grantor_uuid           "
  },
  {
    "path": "migrations/postgresql/2021-10-24-164321_add_2fa_incomplete/down.sql",
    "chars": 33,
    "preview": "DROP TABLE twofactor_incomplete;\n"
  },
  {
    "path": "migrations/postgresql/2021-10-24-164321_add_2fa_incomplete/up.sql",
    "chars": 282,
    "preview": "CREATE TABLE twofactor_incomplete (\n  user_uuid   VARCHAR(40) NOT NULL REFERENCES users(uuid),\n  device_uuid VARCHAR(40)"
  },
  {
    "path": "migrations/postgresql/2022-01-17-234911_add_api_key/down.sql",
    "chars": 0,
    "preview": ""
  },
  {
    "path": "migrations/postgresql/2022-01-17-234911_add_api_key/up.sql",
    "chars": 43,
    "preview": "ALTER TABLE users\nADD COLUMN api_key TEXT;\n"
  },
  {
    "path": "migrations/postgresql/2022-03-02-210038_update_devices_primary_key/down.sql",
    "chars": 0,
    "preview": ""
  },
  {
    "path": "migrations/postgresql/2022-03-02-210038_update_devices_primary_key/up.sql",
    "chars": 172,
    "preview": "-- First remove the previous primary key\nALTER TABLE devices DROP CONSTRAINT devices_pkey;\n-- Add a new combined one\nALT"
  },
  {
    "path": "migrations/postgresql/2022-07-27-110000_add_group_support/down.sql",
    "chars": 74,
    "preview": "DROP TABLE groups;\nDROP TABLE groups_users;\nDROP TABLE collections_groups;"
  },
  {
    "path": "migrations/postgresql/2022-07-27-110000_add_group_support/up.sql",
    "chars": 1083,
    "preview": "CREATE TABLE groups (\n  uuid                              CHAR(36) NOT NULL PRIMARY KEY,\n  organizations_uuid           "
  },
  {
    "path": "migrations/postgresql/2022-10-18-170602_add_events/down.sql",
    "chars": 18,
    "preview": "DROP TABLE event;\n"
  },
  {
    "path": "migrations/postgresql/2022-10-18-170602_add_events/up.sql",
    "chars": 584,
    "preview": "CREATE TABLE event (\n  uuid               CHAR(36)        NOT NULL PRIMARY KEY,\n  event_type         INTEGER     NOT NUL"
  },
  {
    "path": "migrations/postgresql/2023-01-06-151600_add_reset_password_support/down.sql",
    "chars": 0,
    "preview": ""
  },
  {
    "path": "migrations/postgresql/2023-01-06-151600_add_reset_password_support/up.sql",
    "chars": 68,
    "preview": "ALTER TABLE users_organizations\nADD COLUMN reset_password_key TEXT;\n"
  },
  {
    "path": "migrations/postgresql/2023-01-11-205851_add_avatar_color/down.sql",
    "chars": 0,
    "preview": ""
  },
  {
    "path": "migrations/postgresql/2023-01-11-205851_add_avatar_color/up.sql",
    "chars": 48,
    "preview": "ALTER TABLE users\nADD COLUMN avatar_color TEXT;\n"
  },
  {
    "path": "migrations/postgresql/2023-01-31-222222_add_argon2/down.sql",
    "chars": 0,
    "preview": ""
  },
  {
    "path": "migrations/postgresql/2023-01-31-222222_add_argon2/up.sql",
    "chars": 160,
    "preview": "ALTER TABLE users\n    ADD COLUMN\n    client_kdf_memory INTEGER DEFAULT NULL;\n\nALTER TABLE users\n    ADD COLUMN\n    clien"
  },
  {
    "path": "migrations/postgresql/2023-02-18-125735_push_uuid_table/down.sql",
    "chars": 0,
    "preview": ""
  },
  {
    "path": "migrations/postgresql/2023-02-18-125735_push_uuid_table/up.sql",
    "chars": 46,
    "preview": "ALTER TABLE devices ADD COLUMN push_uuid TEXT;"
  },
  {
    "path": "migrations/postgresql/2023-06-02-200424_create_organization_api_key/down.sql",
    "chars": 0,
    "preview": ""
  },
  {
    "path": "migrations/postgresql/2023-06-02-200424_create_organization_api_key/up.sql",
    "chars": 291,
    "preview": "CREATE TABLE organization_api_key (\n\tuuid\t\t\tCHAR(36) NOT NULL,\n\torg_uuid\t\tCHAR(36) NOT NULL REFERENCES organizations(uui"
  },
  {
    "path": "migrations/postgresql/2023-06-17-200424_create_auth_requests_table/down.sql",
    "chars": 0,
    "preview": ""
  },
  {
    "path": "migrations/postgresql/2023-06-17-200424_create_auth_requests_table/up.sql",
    "chars": 721,
    "preview": "CREATE TABLE auth_requests (\n\tuuid            CHAR(36) NOT NULL PRIMARY KEY,\n\tuser_uuid\t    CHAR(36) NOT NULL,\n\torganiza"
  },
  {
    "path": "migrations/postgresql/2023-06-28-133700_add_collection_external_id/down.sql",
    "chars": 0,
    "preview": ""
  },
  {
    "path": "migrations/postgresql/2023-06-28-133700_add_collection_external_id/up.sql",
    "chars": 53,
    "preview": "ALTER TABLE collections ADD COLUMN external_id TEXT;\n"
  }
]

// ... and 313 more files (download for full content)

About this extraction

This page contains the full source code of the dani-garcia/vaultwarden GitHub repository, extracted and formatted as plain text for AI agents and large language models (LLMs). The extraction includes 513 files (2.7 MB), approximately 729.1k tokens, and a symbol index with 2375 extracted functions, classes, methods, constants, and types. Use this with OpenClaw, Claude, ChatGPT, Cursor, Windsurf, or any other AI tool that accepts text input. You can copy the full output to your clipboard or download it as a .txt file.

Extracted by GitExtract — free GitHub repo to text converter for AI. Built by Nikandr Surkov.

Copied to clipboard!