Repository: upsuper/rust-cheatsheet Branch: master Commit: 143099fe9de2 Files: 21 Total size: 96.0 KB Directory structure: gitextract_uk14uits/ ├── .github/ │ ├── FUNDING.yml │ └── workflows/ │ ├── check.yml │ └── deploy.yml ├── .gitignore ├── Cargo.toml ├── LICENSE ├── build_pages.sh ├── data/ │ ├── chrono.yml │ ├── fs.yml │ └── index.yml ├── src/ │ ├── input.rs │ ├── macros.rs │ ├── main.rs │ ├── page_gen.rs │ ├── parser.rs │ ├── template.html │ └── token.rs └── static/ ├── script.js ├── style.css ├── theme-dark.css └── theme-light.css ================================================ FILE CONTENTS ================================================ ================================================ FILE: .github/FUNDING.yml ================================================ github: [upsuper] ================================================ FILE: .github/workflows/check.yml ================================================ on: [push, pull_request] name: Check jobs: format: name: Format runs-on: ubuntu-latest steps: - name: Checkout sources uses: actions/checkout@v2 - name: Install stable toolchain uses: actions-rs/toolchain@v1 with: profile: minimal toolchain: stable override: true components: rustfmt - name: Run cargo fmt uses: actions-rs/cargo@v1 with: command: fmt args: --all -- --check clippy: name: Clippy runs-on: ubuntu-latest steps: - name: Checkout sources uses: actions/checkout@v2 - name: Install stable toolchain id: toolchain uses: actions-rs/toolchain@v1 with: profile: minimal toolchain: stable override: true components: clippy - name: Cache Cargo uses: actions/cache@v2 with: path: | ~/.cargo/registry ~/.cargo/git key: ${{ runner.os }}-cargo-${{ hashFiles('**/Cargo.lock') }} restore-keys: | ${{ runner.os }}-cargo- - name: Cache target uses: actions/cache@v2 with: path: target key: ${{ runner.os }}-clippy-${{ steps.toolchain.outputs.rustc_hash }}-${{ hashFiles('**/Cargo.lock') }}-${{ hashFiles('**/*.rs') }} restore-keys: | ${{ runner.os }}-clippy-${{ steps.toolchain.outputs.rustc_hash }}-${{ hashFiles('**/Cargo.lock') }}- ${{ runner.os }}-clippy-${{ steps.toolchain.outputs.rustc_hash }}- - name: Run cargo clippy uses: actions-rs/cargo@v1 with: command: clippy args: -- -D warnings ================================================ FILE: .github/workflows/deploy.yml ================================================ on: push: branches: - master name: Deploy jobs: deploy: name: Deploy runs-on: ubuntu-latest steps: - name: Checkout sources uses: actions/checkout@v2 - name: Install stable toolchain id: toolchain uses: actions-rs/toolchain@v1 with: profile: minimal toolchain: stable override: true - name: Cache Cargo uses: actions/cache@v2 with: path: | ~/.cargo/registry ~/.cargo/git key: ${{ runner.os }}-cargo-${{ hashFiles('**/Cargo.lock') }} restore-keys: | ${{ runner.os }}-cargo- - name: Cache target uses: actions/cache@v2 with: path: target key: ${{ runner.os }}-build-${{ steps.toolchain.outputs.rustc_hash }}-${{ hashFiles('**/Cargo.lock') }}-${{ hashFiles('**/*.rs') }} restore-keys: | ${{ runner.os }}-build-${{ steps.toolchain.outputs.rustc_hash }}-${{ hashFiles('**/Cargo.lock') }}- ${{ runner.os }}-build-${{ steps.toolchain.outputs.rustc_hash }}- - name: Build pages shell: bash run: ./build_pages.sh - name: Deploy uses: peaceiris/actions-gh-pages@v3 with: github_token: ${{ secrets.GITHUB_TOKEN }} publish_dir: ./out ================================================ FILE: .gitignore ================================================ /.idea /out /target **/*.rs.bk ================================================ FILE: Cargo.toml ================================================ [package] name = "cheatsheet-gen" version = "0.1.0" authors = ["Xidorn Quan "] edition = "2018" publish = false [dependencies] bitflags = "1.0.4" combine = "4.0.1" either_n = "0.2.0" lazy_static = "1.3.0" serde_yaml = "0.8.9" v_htmlescape = "0.13.1" [dependencies.serde] version = "1.0.90" features = ["derive"] [dev-dependencies] pretty_assertions = "1" ================================================ FILE: LICENSE ================================================ Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions: The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software. THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. ================================================ FILE: build_pages.sh ================================================ #!/usr/bin/env bash set -xe mkdir out cp static/* out for f in data/*.yml; do cargo run -- "${f}" "out/$(basename "${f}" .yml).html" done ================================================ FILE: data/chrono.yml ================================================ title: Chrono base: chrono: "https://docs.rs/chrono/0.4.6/chrono/" time: "https://docs.rs/time/0.1.42/time/" main: # DateTime & NaiveDateTime - - type: "DateTime" constraints: "where Tz: TimeZone" impls: - "Datelike" - "Timelike" - "Sub => Output = Duration" - "Add => Output = Self" - "Sub => Output = Self" - "Add => Output = Self" - "Sub => Output = Self" groups: - items: - "::from_utc (NaiveDateTime, Tz::Offset) -> Self" - name: "Checked calculation" items: - "checked_add_signed (Duration) -> Option" - "checked_sub_signed (Duration) -> Option" - "signed_duration_since (DateTime) -> Duration" - type: "DateTime" groups: - name: "Parse" items: - "::parse_from_rfc2822 (&str) -> ParseResult" - "::parse_from_rfc3339 (&str) -> ParseResult" - "::parse_from_str (&str, &str) -> ParseResult" - type: "&DateTime" constraints: "where Tz: TimeZone" groups: - name: "Data & time" items: - "date () -> Date" - "time () -> NaiveTime" - name: "Timestamp" items: - "timestamp () -> i64" - "timestamp_millis () -> i64" - "timestamp_nanos () -> i64" - name: "Timestamp sub-second part" items: - "timestamp_subsec_millis () -> u32" - "timestamp_subsec_micros () -> u32" - "timestamp_subsec_nanos () -> u32" - name: "Timezone" items: - "offset () -> &Tz::Offset" - "timezone () -> Tz" - "with_timezone (&Tz2) -> DateTime" - name: "To NaiveDateTime" items: - "naive_utc () -> NaiveDateTime" - "naive_local () -> NaiveDateTime" - type: "&DateTime" constraints: |- where Tz: TimeZone, Tz::Offset: Display groups: - items: - "format (&str) -> Display" - "to_rfc2822 () -> String" - "to_rfc3339 () -> String" - "to_rfc3339_opts (SecondsFormat, bool) -> String" - type: "NaiveDateTime" impls: - "Datelike" - "Timelike" - "AddAssign" - "SubAssign" - "Add => Output = Self" - "Sub => Output = Self" - "Add => Output = Self" - "Sub => Output = Self" groups: - name: "Construct" items: - "::new (NaiveDate, NaiveTime) -> Self" - "::from_timestamp (i64, u32) -> Self" - "::from_timestamp_opt (i64, u32) -> Self" - "::parse_from_str (&str, &str) -> ParseResult" - name: "Checked calculation" items: - "checked_add_signed (Duration) -> Option" - "checked_sub_signed (Duration) -> Option" - "signed_duration_since (NaiveDateTime) -> Duration" - type: "&NaiveDateTime" groups: - name: "Date & time" items: - "date () -> NaiveDate" - "time () -> NaiveTime" - name: "Timestamp" items: - "timestamp () -> i64" - "timestamp_millis () -> i64" - "timestamp_nanos () -> i64" - name: "Timestamp sub-second part" items: - "timestamp_subsec_millis () -> u32" - "timestamp_subsec_micros () -> u32" - "timestamp_subsec_nanos () -> u32" - name: "Format" items: - "format (&str) -> Display" # Date & NaiveDate - - type: "Date" constraints: "where Tz: TimeZone" impls: - "Datelike" - "Sub => Output = Duration" - "Add => Output = Self" - "Sub => Output = Self" groups: - items: - "::from_utc (NaiveDate, Tz::Offset) -> Self" - name: "Checked calculation" items: - "checked_add_signed (Duration) -> Option" - "checked_sub_signed (Duration) -> Option" - "signed_duration_since (Date) -> Duration" - type: "&Date" constraints: "where Tz: TimeZone" groups: - name: "To DateTime (panic when invalid)" items: - "and_hms (u32, u32, u32) -> DateTime" - "and_hms_milli (u32, u32, u32, u32) -> DateTime" - "and_hms_micro (u32, u32, u32, u32) -> DateTime" - "and_hms_nano (u32, u32, u32, u32) -> DateTime" - name: "To DateTime (None when invalid)" items: - "and_time (NaiveTime) -> Option>" - "and_hms_opt (u32, u32, u32) -> Option>" - "and_hms_milli_opt (u32, u32, u32, u32) -> Option>" - "and_hms_micro_opt (u32, u32, u32, u32) -> Option>" - "and_hms_nano_opt (u32, u32, u32, u32) -> Option>" - name: "Next / previous date" items: - "succ () -> Self" - "pred () -> Self" - "succ_opt () -> Option" - "pred_opt () -> Option" - name: "Timezone" items: - "offset () -> &Tz::Offset" - "timezone () -> Tz" - "with_timezone (&Tz2) -> Date" - name: "To NaiveDate" items: - "naive_utc () -> NaiveDate" - "naive_local () -> NaiveDate" - name: "Format" items: - "format (&str) -> Display where Tz::Offset: Display" - type: "NaiveDate" impls: - "Datelike" - "AddAssign" - "SubAssign" - "Sub => Output = Duration" - "Add => Output = Self" - "Sub => Output = Self" groups: - name: "Construct (panic when invalid)" items: - "::from_ymd (i32, u32, u32) -> Self" - "::from_yo (i32, u32) -> Self" - "::from_isoywd (i32, u32, Weekday) -> Self" - "::from_num_days_from_ce (i32) -> Self" - name: "Construct (None when invalid)" items: - "::from_ymd_opt (i32, u32, u32) -> Option" - "::from_yo_opt (i32, u32) -> Option" - "::from_isoywd_opt (i32, u32, Weekday) -> Option" - "::from_num_days_from_ce_opt (i32) -> Option" - name: "Parse" items: - "::parse_from_str (&str, &str) -> ParseResult" - name: "Checked calculation" items: - "checked_add_signed (Duration) -> Option" - "checked_sub_signed (Duration) -> Option" - "signed_duration_since (Self) -> Duration" - type: "&NaiveDate" groups: - name: "To NaiveDateTime" items: - "and_time (NaiveTime) -> NaiveDateTime" - name: "To NaiveDateTime (panic when invalid)" items: - "and_hms (u32, u32, u32) -> NaiveDateTime" - "and_hms_milli (u32, u32, u32, u32) -> NaiveDateTime" - "and_hms_micro (u32, u32, u32, u32) -> NaiveDateTime" - "and_hms_nano (u32, u32, u32, u32) -> NaiveDateTime" - name: "To NaiveDateTime (Option when invalid)" items: - "and_hms_opt (u32, u32, u32) -> Option" - "and_hms_milli_opt (u32, u32, u32, u32) -> Option" - "and_hms_micro_opt (u32, u32, u32, u32) -> Option" - "and_hms_nano_opt (u32, u32, u32, u32) -> Option" - name: "Next / previous date" items: - "succ () -> Self" - "pred () -> Self" - "succ_opt () -> Option" - "pred_opt () -> Option" - name: "Format" items: - "format (&str) -> Display" # NaiveTime & Duration - - type: "NaiveTime" impls: - "Timelike" - "AddAssign" - "SubAssign" - "Sub => Output = Duration" - "Add => Output = Self" - "Sub => Output = Self" - "Add => Output = Self" - "Sub => Output = Self" groups: - name: "Construct (panic when invalid)" items: - "::from_hms (u32, u32, u32) -> Self" - "::from_hms_milli (u32, u32, u32, u32) -> Self" - "::from_hms_micro (u32, u32, u32, u32) -> Self" - "::from_hms_nano (u32, u32, u32, u32) -> Self" - "::from_num_seconds_from_midnight (u32, u32) -> Self" - name: "Construct (None when invalid)" items: - "::from_hms_opt (u32, u32, u32) -> Option" - "::from_hms_milli_opt (u32, u32, u32, u32) -> Option" - "::from_hms_micro_opt (u32, u32, u32, u32) -> Option" - "::from_hms_nano_opt (u32, u32, u32, u32) -> Option" - "::from_num_seconds_from_midnight_opt (u32, u32) -> Option" - name: "Parse" items: - "::parse_from_str (&str, &str) -> ParseResult" - name: "Calculation" items: - "signed_duration_since (Self) -> Duration" - type: "&NaiveTime" groups: - name: "Overflowing calculation" items: - "overflowing_add_signed (Duration) -> (Self, i64)" - "overflowing_sub_signed (Duration) -> (Self, i64)" - name: "Format" items: - "format (&str) -> Display" - type: "Duration" impls: - "Neg => Output = Self" - "Mul => Output = Self" - "Div => Output = Self" - "Add => Output = Self" - "Sub => Output = Self" groups: - name: "Construct from numbers" items: - "::weeks (i64) -> Self" - "::days (i64) -> Self" - "::hours (i64) -> Self" - "::minutes (i64) -> Self" - "::seconds (i64) -> Self" - "::milliseconds (i64) -> Self" - "::microseconds (i64) -> Self" - "::nanoseconds (i64) -> Self" - name: "Measure duration" items: - "::span (() -> ()) -> Self" - name: "Special values" items: - "::zero () -> Self" - "::min_value () -> Self" - "::max_value () -> Self" - name: "From Duration in std" items: - "::from_std (StdDuration) -> Result" - type: "&Duration" groups: - name: "Numbers" items: - "num_weeks () -> i64" - "num_days () -> i64" - "num_hours () -> i64" - "num_minutes () -> i64" - "num_seconds () -> i64" - "num_milliseconds () -> i64" - "num_microseconds () -> Option" - "num_nanoseconds () -> Option" - name: "Checked calculation" items: - "checked_add (&Self) -> Option" - "checked_sub (&Self) -> Option" - items: - "is_zero () -> bool" - "to_std () -> Result" # Datelike & Timelike - - type: "&Datelike" groups: - name: "Date numbers" items: - "year () -> i32" - "month () -> u32" - "day () -> u32" - "ordinal () -> u32" - name: "Date numbers (zero-based)" items: - "month0 () -> u32" - "day0 () -> u32" - "ordinal0 () -> u32" - name: "Week" items: - "weekday () -> Weekday" - "iso_week () -> IsoWeek" - name: "Construct with different date number" items: - "with_year (i32) -> Option" - "with_month (u32) -> Option" - "with_day (u32) -> Option" - "with_ordinal (u32) -> Option" - name: "Construct with different date number (zero-based)" items: - "with_month0 (u32) -> Option" - "with_day0 (u32) -> Option" - "with_ordinal0 (u32) -> Option" - name: "Common era" items: - "year_ce () -> (bool, u32)" - "num_days_from_ce () -> i32" - type: "&Timelike" groups: - name: "Time numbers" items: - "hour () -> u32" - "minute () -> u32" - "second () -> u32" - "nanosecond () -> u32" - name: "Construct with different numbers" items: - "with_hour (u32) -> Option" - "with_minute (u32) -> Option" - "with_second (u32) -> Option" - "with_nanosecond (u32) -> Option" - items: - "hour12 () -> (bool, u32)" - "num_seconds_from_midnight () -> u32" trait_impls: - pat: "TimeZone" impls: - "Utc" - "Local" - "FixedOffset" references: - kind: enum names: - "chrono::Weekday" - "chrono::SecondsFormat" - "chrono::format::Item" - "std::option::Option" - "std::result::Result" - kind: struct names: - "chrono::Date" - "chrono::DateTime" - "chrono::Duration" - "chrono::naive::IsoWeek" - "chrono::naive::NaiveDate" - "chrono::naive::NaiveDateTime" - "chrono::naive::NaiveTime" - "chrono::offset::FixedOffset" - "chrono::offset::Local" - "chrono::offset::Utc" - "std::string::String" - "time::OutOfRangeError" aliases: StdDuration: "std::time::Duration" - kind: trait names: - "chrono::Datelike" - "chrono::Timelike" - "chrono::offset::TimeZone" - "std::clone::Clone" - "std::fmt::Display" - "std::iter::Iterator" - "std::ops::Add" - "std::ops::AddAssign" - "std::ops::Div" - "std::ops::Mul" - "std::ops::Neg" - "std::ops::Sub" - "std::ops::SubAssign" - kind: type names: - "chrono::format::ParseResult" ================================================ FILE: data/fs.yml ================================================ title: Filesystem base: fs2: "https://docs.rs/fs2/0.4.3/fs2/" main: # fs & fs2 - - mod: "fs" path: "std::fs" groups: - name: "Read & write" items: - "read (AsRef) -> Result>" - "read_to_string (AsRef) -> Result" - "write (AsRef, &str) -> Result<()>" - name: "Directory" items: - "read_dir (AsRef) -> Result>>" - "create_dir (AsRef) -> Result<()>" - "create_dir_all (AsRef) -> Result<()>" - "remove_dir (AsRef) -> Result<()>" - "remove_dir_all (AsRef) -> Result<()>" - name: "File operation" items: - "copy (AsRef, AsRef) -> Result" - "rename (AsRef, AsRef) -> Result<()>" - "remove_file (AsRef) -> Result<()>" - name: "Metadata" items: - "metadata (AsRef) -> Result" - "symlink_metadata (AsRef) -> Result" - "set_permissions (AsRef, Permissions) -> Result<()>" - name: "Link" items: - "canonicalize (AsRef) -> Result" - "hard_link (AsRef, AsRef) -> Result<()>" - "read_link (AsRef) -> Result" - mod: "fs2" path: "fs2" groups: - name: "Filesystem info" items: - "available_space (AsRef) -> Result" - "free_space (AsRef) -> Result" - "total_space (AsRef) -> Result" - "allocation_granularity (AsRef) -> Result" - "statvfs (AsRef) -> Result" # Path - - type: "&Path" groups: - name: "Type conversion and display" items: - "as_os_str () -> &OsStr" - "to_path_buf () -> PathBuf" - "to_str () -> Option<&str>" - "to_string_lossy () -> Cow" - "display () -> Display" - name: "Path type" items: - "has_root () -> bool" - "is_absolute () -> bool" - "is_relative () -> bool" - name: "Filename in path" items: - "file_name () -> Option<&OsStr>" - "file_stem () -> Option<&OsStr>" - "extension () -> Option<&OsStr>" - name: "Components of path" items: - "iter () -> Iterator" - "components () -> Iterator" - name: "Ancestors" items: - "parent () -> Option<&Path>" - "ancestors () -> Iterator" - name: "Prefix / suffix" items: # TODO correctly use std::result::Result instead - "strip_prefix () -> Result<&Path, StripPrefixError>" - "starts_with (AsRef) -> bool" - "ends_with (AsRef) -> bool" - name: "Construct new path" items: - "join (AsRef) -> PathBuf" - "with_file_name (AsRef) -> PathBuf" - "with_extension (AsRef) -> PathBuf" - name: "Property of path target" items: - "exists () -> bool" - "is_file () -> bool" - "is_dir () -> bool" - name: "Metadata" items: - "metadata () -> Result" - "symlink_metadata () -> Result" - name: "Misc" items: - "read_dir () -> Result>>" - "read_link () -> Result" - "canonicalize () -> Result" - type: "PathBuf" groups: - items: - "::new () -> PathBuf" - "into_os_string () -> OsString" - "into_boxed_path () -> Box" - type: "&mut PathBuf" groups: - items: - "push (AsRef)" - "pop () -> bool" - "set_file_name (AsRef)" - "set_extension (AsRef) -> bool" # File & fs2::FileExt - - type: "File" impls: - "Read" - "Write" - "Seek" - "FileExt" groups: - name: "Open file" items: - "::open (AsRef) -> Result" - "::create (AsRef) -> Result" - type: "&File" impls: - "Read" - "Write" - "Seek" groups: - name: "Syncing" items: - "sync_all () -> Result<()>" - "sync_data () -> Result<()>" - name: "Metadata" items: - "metadata () -> Result" - "set_permissions (Permissions) -> Result<()>" - name: "Other" items: - "set_len (u64) -> Result<()>" - "try_clone () -> Result" - type: "&FileExt" groups: - name: "Allocation" items: - "allocate (u64) -> Result<()>" - "allocated_size () -> Result" - name: "Lock" items: - "unlock () -> Result<()>" - "lock_shared () -> Result<()>" - "lock_exclusive () -> Result<()>" - "try_lock_shared () -> Result<()>" - "try_lock_exclusive () -> Result<()>" # Metadata - - type: "&Metadata" groups: - name: "File type" items: - "is_dir () -> bool" - "is_file () -> bool" - "file_type () -> FileType" - name: "Time" items: - "created () -> Result" - "modified () -> Result" - "accessed () -> Result" - name: "Misc" items: - "len () -> u64" - "permissions () -> Permissions" trait_impls: - pat: "AsRef" impls: - "&str" - "&Path" - "&OsStr" - pat: "AsRef" impls: - "&str" - "&OsStr" - "&Path" references: - kind: trait names: - "fs2::FileExt" - "std::convert::AsRef" - "std::fmt::Display" - "std::io::Read" - "std::io::Seek" - "std::io::Write" - "std::iter::Iterator" - kind: enum names: - "std::borrow::Cow" - "std::io::SeekFrom" - "std::option::Option" - "std::path::Component" - kind: struct names: - "fs2::FsStats" - "std::boxed::Box" - "std::ffi::OsStr" - "std::ffi::OsString" - "std::fs::DirEntry" - "std::fs::File" - "std::fs::FileType" - "std::fs::Metadata" - "std::fs::Permissions" - "std::path::Path" - "std::path::PathBuf" - "std::path::StripPrefixError" - "std::string::String" - "std::time::SystemTime" - "std::vec::Vec" - kind: type names: - "std::io::Result" ================================================ FILE: data/index.yml ================================================ title: Basics main: # Option & Result - - type: "Option" groups: - name: "To inner type" items: - "unwrap () -> T" - "unwrap_or (T) -> T" - "unwrap_or_else (() -> T) -> T" - "unwrap_or_default () -> T where T: Default" - "expect (&str) -> T" - name: "Converting to another type" items: - "map ((T) -> U) -> Option" - "map_or (U, (T) -> U) -> U" - "map_or_else (() -> U, (T) -> U) -> U" - name: "To Result" items: - "ok_or (E) -> Result" - "ok_or_else (() -> E) -> Result" - name: "Conditioning" items: - "filter ((&T) -> bool) -> Option" - "and (Option) -> Option" - "and_then ((T) -> Option) -> Option" - "or (Option) -> Option" - "or_else (() -> Option) -> Option" - "xor (Option) -> Option" - type: "Option<&T>" groups: - name: "Cloning inner" items: - "cloned () -> Option where T: Clone" - "copied () -> Option where T: Copy" - type: "Option>" groups: - items: - "flatten () -> Option" - type: "Option>" groups: - items: - "transpose () -> Result, E>" - type: "&Option" groups: - name: "Checking inner" items: - "is_some () -> bool" - "is_none () -> bool" - name: "To inner reference" items: - "as_ref () -> Option<&T>" - "iter () -> Iterator<&T>" - |- as_deref () -> Option<&U> where T: Deref - type: "&mut Option" groups: - name: "To inner mutable reference" items: - "as_mut () -> Option<&mut T>" - "iter_mut () -> Iterator<&mut T>" - |- as_deref_mut () -> Option<&mut U> where T: DerefMut + Deref - name: "Mutation" items: - "take () -> Option" - "replace (T) -> Option" - "insert (T) -> &mut T" - "get_or_insert (T) -> &mut T" - "get_or_insert_with (() -> T) -> &mut T" - type: "Result" groups: - name: "To inner type" items: - "unwrap () -> T where E: Debug" - "unwrap_err () -> E where T: Debug" - "unwrap_or (T) -> T" - "unwrap_or_else ((E) -> T) -> T" - "unwrap_or_default () -> T where T: Default" - "expect (&str) -> T" - "expect_err (&str) -> E" - "ok () -> Option" - "err () -> Option" - name: "Mapping" items: - "map ((T) -> U) -> Result" - "map_err ((E) -> F) -> Result" - "map_or (U, (T) -> U) -> U" - "map_or_else ((E) -> U, (T) -> U) -> U" - name: "Conditioning" items: - "and (Result) -> Result" - "and_then ((T) -> Result) -> Result" - "or (Result) -> Result" - "or_else ((E) -> Result) -> Result" - type: "Result, E>" groups: - name: "Transposing" items: - "transpose () -> Option>" - type: "&Result" groups: - name: "Checking inner" items: - "is_ok () -> bool" - "is_err () -> bool" - name: "To inner reference" items: - "as_ref () -> Result<&T, &E>" - "iter () -> Iterator" - type: "&mut Result" groups: - name: "To inner mutable reference" items: - "as_mut () -> Result<&mut T, &mut E>" - "iter_mut () -> Iterator" # Iterator - - type: "Iterator" groups: - name: "Mapping and filtering" items: - "map (( T) -> U) -> Iterator" - "filter ((&T) -> bool) -> Iterator" - "filter_map (( T) -> Option) -> Iterator" - name: "Collecting and folding" items: - "fold (S, (S, T) -> S) -> S" - "collect () -> B where B: FromIterator" - "partition ((&T) -> bool) -> (B, B) where B: Default + Extend" - name: "Counting and enumerating" items: - "count () -> usize" - "last () -> Option" - "enumerate () -> Iterator" - name: "Combining with other iterators" items: - "zip (IntoIterator) -> Iterator" - "chain (IntoIterator) -> Iterator" - name: "Flattening" items: - "flatten () -> Iterator where T: IntoIterator" - "flat_map ((T) -> IntoIterator) -> Iterator" - name: "Taking and skipping" items: - "skip (usize) -> Iterator" - "take (usize) -> Iterator" - "skip_while ((&T) -> bool) -> Iterator" - "take_while ((&T) -> bool) -> Iterator" - "step_by (usize) -> Iterator" - name: "Misc. iterating" items: - "for_each ((T) -> ()) -> ()" - "inspect ((&T) -> ()) -> Iterator" - "scan (S, (&mut S, T) -> Option) -> Iterator" - name: "Calculations" items: - "sum () -> S where S: Sum" - "product () -> P where P: Product" - name: "Maximum and minimum" items: - "max () -> Option where T: Ord" - "min () -> Option where T: Ord" - "max_by ((&T, &T) -> Ordering) -> Option" - "min_by ((&T, &T) -> Ordering) -> Option" - "max_by_key ((&T) -> U) -> Option where U: Ord" - "min_by_key ((&T) -> U) -> Option where U: Ord" - name: "Comparing with another iterator" items: - "eq (IntoIterator) -> bool where T: PartialEq" - "ne (IntoIterator) -> bool where T: PartialEq" - "lt (IntoIterator) -> bool where T: PartialOrd" - "le (IntoIterator) -> bool where T: PartialOrd" - "gt (IntoIterator) -> bool where T: PartialOrd" - "ge (IntoIterator) -> bool where T: PartialOrd" - "cmp (IntoIterator) -> Ordering where T: Ord" - "partial_cmp (IntoIterator)\n-> Option where T: PartialOrd" - name: "Reversing and cycling" items: - "rev () -> Iterator where Self: DoubleEndedIterator" - "cycle () -> Iterator where Self: Clone" - type: "Iterator" groups: - name: "Cloning inner" items: - "cloned () -> Iterator where T: Clone" - "copied () -> Iterator where T: Copy" - type: "&mut Iterator" groups: - name: "Finding and positioning" items: - "find ((&T) -> bool) -> Option" - "find_map (( T) -> Option) -> Option" - "position (( T) -> bool) -> Option" - |- rposition (( T) -> bool) -> Option where Self: ExactSizeIterator + DoubleEndedIterator - name: "Boolean operations" items: - "all ((T) -> bool) -> bool" - "any ((T) -> bool) -> bool" - name: "Try iterating" items: - "try_for_each ((T) -> R) -> R where R: Try" - "try_fold (S, (S, T) -> R) -> R where R: Try" - mod: "iter" path: "std::iter" groups: - name: "Creating simple iterators" items: - "empty () -> Iterator" - "once (T) -> Iterator" - "once_with (() -> T) -> Iterator" - "repeat (T) -> Iterator where T: Clone" - "repeat_with (() -> T) -> Iterator" - "from_fn (() -> Option) -> Iterator" - "successors (Option, (&T) -> Option) -> Iterator" # Slice & Vec - - type: "&[T]" groups: - name: "Splitting to iterator" items: - "split ((&T) -> bool) -> Iterator" - "rsplit ((&T) -> bool) -> Iterator" - "splitn (usize, (&T) -> bool) -> Iterator" - "rsplitn (usize, (&T) -> bool) -> Iterator" - name: "Splitting at position" items: - "split_at (usize) -> (&[T], &[T])" - "split_first () -> Option<(&T, &[T])>" - "split_last () -> Option<(&T, &[T])>" - name: "Chunks and windows" items: - "chunks (usize) -> Iterator" - "chunks_exact (usize) -> Iterator" - "rchunks (usize) -> Iterator" - "rchunks_exact (usize) -> Iterator" - "windows (usize) -> Iterator" - name: "Matching" items: - "contains (&T) -> bool where T: PartialEq" - "starts_with (&[T]) -> bool where T: PartialEq" - "ends_with (&[T]) -> bool where T: PartialEq" - name: "Binary searching" items: - "binary_search (&T) -> Result where T: Ord" - "binary_search_by ((&T) -> Ordering) -> Result" - "binary_search_by_key (&B, (&T) -> B) -> Result where B: Ord" - name: "Getting and iterating" items: - "first () -> Option<&T>" - "last () -> Option<&T>" - "get (SliceIndex<[T]>) -> Option<&T>" - "iter () -> Iterator" - name: "Length" items: - "len () -> usize" - "is_empty () -> bool" - type: "&mut [T]" groups: - name: "Splitting to iterator" items: - "split_mut ((&T) -> bool) -> Iterator" - "rsplit_mut ((&T) -> bool) -> Iterator" - "splitn_mut (usize, (&T) -> bool) -> Iterator" - "rsplitn_mut (usize, (&T) -> bool) -> Iterator" - name: "Splitting at position" items: - "split_at_mut (usize) -> (&mut [T], &mut [T])" - "split_first_mut () -> Option<(&mut T, &mut [T])>" - "split_last_mut () -> Option<(&mut T, &mut [T])>" - name: "Chunks" items: - "chunks_mut (usize) -> Iterator" - "chunks_exact_mut (usize) -> Iterator" - "rchunks_mut (usize) -> Iterator" - "rchunks_exact_mut (usize) -> Iterator" - name: "Sorting" items: - "sort () where T: Ord" - "sort_by ((&T, &T) -> Ordering)" - "sort_by_key ((&T) -> K) where K: Ord" - "sort_by_cached_key ((&T) -> K) where K: Ord" - "sort_unstable () where T: Ord" - "sort_unstable_by ((&T, &T) -> Ordering)" - "sort_unstable_by_key ((&T) -> K) where K: Ord" - name: "Rearranging" items: - "swap (usize, usize)" - "reverse ()" - "rotate_left (usize)" - "rotate_right (usize)" - name: "Overriding" items: - "swap_with_slice (&mut [T])" - "copy_from_slice (&[T]) where T: Copy" - "clone_from_slice (&[T]) where T: Clone" - name: "Getting and iterating" items: - "first_mut () -> Option<&mut T>" - "last_mut () -> Option<&mut T>" - "get_mut (SliceIndex<[T]>) -> Option<&mut T>" - "iter_mut () -> Iterator" - type: "&mut Vec" groups: - name: "Adding and removing single item" items: - "push (T)" - "pop () -> Option" - "insert (usize, T)" - "remove (usize) -> T" - "swap_remove (usize) -> T" - name: "Extending" items: - "append (&mut Vec)" - trait_impl: "Extend" content: "extend (IntoIterator)" - trait_impl: "Extend<&'a T>" content: "extend (IntoIterator) where T: Copy" - "extend_from_slice (&[T]) where T: Clone" - name: "Resizing" items: - "truncate (usize)" - "resize (usize, T) where T: Clone" - "resize_with (usize, () -> T)" - name: "Clearing" items: - "clear ()" - "retain ((&T) -> bool)" - name: "Removing or replacing range into iterator" items: - "drain (RangeBounds) -> Iterator" - "splice (RangeBounds, IntoIterator) -> Iterator" - name: "Deduplicating" items: - "dedup () where T: PartialEq" - "dedup_by ((&mut T, &mut T) -> bool)" - "dedup_by_key ((&mut T) -> K) where K: PartialEq" - name: "Splitting off" items: - "split_off (usize) -> Vec" - name: "Capacity manipulation" items: - "reserve (usize)" - "reserve_exact (usize)" - "shrink_to_fit ()" - mod: "slice" path: "std::slice" groups: - name: "Creating slice from reference" items: - "from_ref (&T) -> &[T]" - "from_mut (&mut T) -> &mut [T]" # String-related - - type: "&[u8]" groups: - name: "ASCII" items: - "is_ascii () -> bool" - "eq_ignore_ascii_case (&[u8]) -> bool" - "to_ascii_uppercase () -> Vec" - "to_ascii_lowercase () -> Vec" - type: "&mut [u8]" groups: - name: "ASCII" items: - "make_ascii_uppercase ()" - "make_ascii_lowercase ()" - mod: "str" path: "std::str" groups: - name: "Bytes" items: - "from_utf8 (&[u8]) -> Result<&str, Utf8Error>" - "from_utf8_mut (&mut [u8]) -> Result<&mut str, Utf8Error>" - type: "&str" groups: - name: "Chars" items: - "chars () -> Iterator" - "char_indices () -> Iterator" - "is_char_boundary (usize) -> bool" - name: "Bytes" items: - "bytes () -> Iterator" - "as_bytes () -> &[u8]" - name: "Splitting to two parts" items: - "split_at (usize) -> (&str, &str)" - name: "Splitting to iterator" items: - "lines () -> Iterator" - "split_whitespace () -> Iterator" - "split_ascii_whitespace () -> Iterator" - "split (Pattern) -> Iterator" - "rsplit (Pattern) -> Iterator" - "splitn (usize, Pattern) -> Iterator" - "rsplitn (usize, Pattern) -> Iterator" - "split_terminator (Pattern) -> Iterator" - "rsplit_terminator (Pattern) -> Iterator" - name: "Trimming" items: - "trim () -> &str" - "trim_start () -> &str" - "trim_end () -> &str" - "trim_matches (Pattern) -> &str" - "trim_start_matches (Pattern) -> &str" - "trim_end_matches (Pattern) -> &str" - name: "Matching and finding" items: - "contains (Pattern) -> bool" - "starts_with (Pattern) -> bool" - "ends_with (Pattern) -> bool" - "find (Pattern) -> Option" - "rfind (Pattern) -> Option" - "matches (Pattern) -> Iterator" - "rmatches (Pattern) -> Iterator" - "match_indices (Pattern) -> Iterator" - "rmatch_indices (Pattern) -> Iterator" - name: "Case" items: - "to_uppercase () -> String" - "to_lowercase () -> String" - "to_ascii_uppercase () -> String" - "to_ascii_lowercase () -> String" - "eq_ignore_ascii_case (&str) -> bool" - name: "Replacing" items: - "replace (Pattern, &str) -> String" - "replacen (Pattern, &str, usize) -> String" - name: "Length" items: - "len () -> usize" - "is_empty () -> bool" - name: "Misc." items: - "is_ascii () -> bool" - "repeat (usize) -> String" - "encode_utf16 () -> Iterator" - "parse () -> Result where F: FromStr" - type: "&mut str" groups: - name: "Splitting to two parts" items: - "split_at_mut (usize) -> (&mut str, &mut str)" - name: "Case conversion" items: - "make_ascii_uppercase ()" - "make_ascii_lowercase ()" - type: "&mut String" groups: - name: "Inserting and appending string" items: - "push_str (&str)" - "insert_str (usize, &str)" - name: "Adding and removing char" items: - "push (char)" - "pop () -> Option" - "insert (usize, char)" - "remove (usize) -> char" - name: "Clearing" items: - "clear ()" - "truncate (usize)" - "retain ((char) -> bool)" - name: "Capacity manipulation" items: - "reserve (usize)" - "reserve_exact (usize)" - "shrink_to_fit ()" - name: "Misc." items: - "split_off (usize) -> String" - "replace_range (RangeBounds, &str)" - "drain (RangeBounds) -> Iterator" trait_impls: - pat: "Try" generic: "T" impls: - "Option" - "Result" - pat: "Pattern" impls: - "char" - "&str" - "&[char]" - "(char) -> bool" - pat: "SliceIndex<[T]>" generic: "T" impls: - "usize" - "usize..usize" - "usize.." - " ..usize" - "usize..=usize" - " ..=usize" - " .." - pat: "RangeBounds" impls: - "usize..usize" - "usize.." - " ..usize" - "usize..=usize" - " ..=usize" - " .." references: - kind: trait names: - "std::clone::Clone" - "std::cmp::Ord" - "std::cmp::PartialEq" - "std::cmp::PartialOrd" - "std::default::Default" - "std::fmt::Debug" - "std::iter::DoubleEndedIterator" - "std::iter::ExactSizeIterator" - "std::iter::Extend" - "std::iter::FromIterator" - "std::iter::IntoIterator" - "std::iter::Iterator" - "std::iter::Product" - "std::iter::Sum" - "std::marker::Copy" - "std::ops::Deref" - "std::ops::DerefMut" - "std::ops::RangeBounds" - "std::ops::Try" - "std::slice::SliceIndex" - "std::str::FromStr" - "std::str::pattern::Pattern" - kind: enum names: - "std::option::Option" - "std::cmp::Ordering" - "std::result::Result" - kind: struct names: - "std::ops::Range" - "std::str::Utf8Error" - "std::string::String" - "std::vec::Vec" ================================================ FILE: src/input.rs ================================================ use serde::Deserialize; use std::collections::HashMap; use std::error::Error; use std::fs::File; use std::path::Path; const DEFAULT_STD_URL: &str = "https://doc.rust-lang.org/std/"; #[derive(Debug, Deserialize)] pub struct InputData { pub title: String, #[serde(default)] pub base: BaseUrlMap, pub main: Vec>, #[serde(default)] pub trait_impls: Vec, pub references: Vec, } impl InputData { pub fn from_file(path: impl AsRef) -> Result> { let file = File::open(path)?; Ok(serde_yaml::from_reader(file)?) } } #[derive(Debug, Default, Deserialize)] pub struct BaseUrlMap(HashMap); impl BaseUrlMap { pub fn get_url_for(&self, name: &str) -> Option<&str> { self.0.get(name).map(String::as_str).or(match name { "std" => Some(DEFAULT_STD_URL), _ => None, }) } } // TODO: try to avoid using untagged here // because untagged makes it hard to debug data file when parsing fails. // Also see https://github.com/serde-rs/serde/issues/1520 #[derive(Debug, Deserialize)] #[serde(untagged)] pub enum Part { Mod(Mod), Type(Type), } #[derive(Debug, Deserialize)] pub struct Mod { #[serde(rename = "mod")] pub name: String, pub path: String, pub groups: Vec, } #[derive(Debug, Deserialize)] pub struct Type { #[serde(rename = "type")] pub ty: String, pub constraints: Option, pub impls: Option>, pub groups: Vec, } #[derive(Clone, Copy, Debug, Deserialize)] #[serde(rename_all = "lowercase")] pub enum Kind { Enum, Primitive, Struct, Trait, Type, Union, } impl Kind { pub fn to_str(self) -> &'static str { match self { Kind::Enum => "enum", Kind::Primitive => "primitive", Kind::Struct => "struct", Kind::Trait => "trait", Kind::Type => "type", Kind::Union => "union", } } } #[derive(Debug, Deserialize)] pub struct Group { pub name: Option, pub items: Vec, } #[derive(Debug, Deserialize)] #[serde(untagged)] pub enum InputItem { Plain(String), Detailed { trait_impl: Option, content: String, }, } impl InputItem { pub fn content(&self) -> &str { match self { InputItem::Plain(content) => content.as_str(), InputItem::Detailed { content, .. } => content.as_str(), } } pub fn trait_impl(&self) -> Option<&str> { match self { InputItem::Plain(_) => None, InputItem::Detailed { trait_impl, .. } => trait_impl.as_ref().map(String::as_str), } } } #[derive(Debug, Deserialize)] pub struct TraitImplPattern { pub pat: String, pub generic: Option, pub impls: Vec, } #[derive(Debug, Deserialize)] pub struct References { pub kind: Kind, pub names: Vec, #[serde(default)] pub aliases: HashMap, } ================================================ FILE: src/macros.rs ================================================ #[cfg(test)] #[macro_export] macro_rules! tokens { ($($t:tt)*) => {{ #[allow(unused_imports)] use crate::token::{Primitive, Range, Token, TokenStream}; let mut result = vec![]; tokens_impl!(result $($t)*); result }}; } #[cfg(test)] #[macro_export] macro_rules! tokens_impl { ($result:ident) => {}; ($result:ident where $($t:tt)*) => { $result.push(Token::Where); tokens_impl!($result $($t)*); }; ($result:ident +$ident:ident $($t:tt)*) => { $result.push(Token::AssocType(stringify!($ident))); tokens_impl!($result $($t)*); }; ($result:ident $ident:ident $($t:tt)*) => { $result.push(Token::Identifier(stringify!($ident))); tokens_impl!($result $($t)*); }; ($result:ident $str:literal $($t:tt)*) => { $result.push(Token::Text($str)); tokens_impl!($result $($t)*); }; ($result:ident &$r:literal $($t:tt)*) => { $result.push(Token::Primitive(Primitive::Ref(concat!("&", $r)))); tokens_impl!($result $($t)*); }; ($result:ident *$r:literal $($t:tt)*) => { $result.push(Token::Primitive(Primitive::Ptr(concat!("*", $r)))); tokens_impl!($result $($t)*); }; ($result:ident @() $($t:tt)*) => { $result.push(Token::Type(TokenStream(vec![ Token::Primitive(Primitive::Unit), ]))); tokens_impl!($result $($t)*); }; ($result:ident @( $($inner:tt)* ) $($t:tt)*) => { $result.push(Token::Type(TokenStream(vec![ Token::Primitive(Primitive::TupleStart), Token::Nested(TokenStream(tokens!($($inner)*))), Token::Primitive(Primitive::TupleEnd), ]))); tokens_impl!($result $($t)*); }; ($result:ident @[ $($inner:tt)* ] $($t:tt)*) => { let mut inner = vec![]; inner.push(Token::Primitive(Primitive::SliceStart)); tokens_impl!(inner $($inner)*); inner.push(Token::Primitive(Primitive::SliceEnd)); $result.push(Token::Type(TokenStream(inner))); tokens_impl!($result $($t)*); }; ($result:ident ~$range:ident $($t:tt)*) => { $result.push(Token::Range(Range::$range)); tokens_impl!($result $($t)*); }; ($result:ident @$ident:ident $($t:tt)*) => { $result.push(Token::Type(TokenStream(vec![ Token::Primitive(Primitive::Named(stringify!($ident))), ]))); tokens_impl!($result $($t)*); }; ($result:ident ^$ident:ident $($t:tt)*) => { $result.push(Token::Type(TokenStream(vec![ Token::Identifier(stringify!($ident)), ]))); tokens_impl!($result $($t)*); }; ($result:ident ^[ $($inner:tt)* ] $($t:tt)*) => { $result.push(Token::Type(TokenStream(tokens!($($inner)*)))); tokens_impl!($result $($t)*); }; ($result:ident { $($inner:tt)* } $($t:tt)*) => { $result.push(Token::Nested(TokenStream(tokens!($($inner)*)))); tokens_impl!($result $($t)*); }; } ================================================ FILE: src/main.rs ================================================ #[macro_use] extern crate combine; use crate::input::InputData; use crate::page_gen::generate_to; use std::env; use std::error::Error; #[macro_use] mod macros; mod input; mod page_gen; mod parser; mod token; fn main() -> Result<(), Box> { let mut args = env::args(); let _ = args.next(); // executable path // Get path of input data file and output file let input_file = args.next().expect("must specify input file"); let output_file = args.next().expect("must specify output file"); // Generate the page let input = InputData::from_file(&input_file)?; generate_to(&output_file, &input)?; Ok(()) } ================================================ FILE: src/page_gen.rs ================================================ use crate::input::{ BaseUrlMap, Group, InputData, InputItem, Kind, Mod, Part, References, TraitImplPattern, Type, }; use crate::parser::{self, ParsedItem}; use crate::token::{Primitive, RangeToken, Token, TokenStream}; use bitflags::bitflags; use std::collections::HashMap; use std::fmt::{Display, Formatter, Result, Write as _}; use std::fs::File; use std::io::{self, Write as _}; use std::iter; use std::path::Path; use v_htmlescape::escape; pub fn generate_to(path: impl AsRef, input: &InputData) -> io::Result<()> { let mut file = File::create(path)?; let content_writer = PageContentWriter { input }; write!( file, include_str!("template.html"), title = escape(&input.title), content = content_writer )?; Ok(()) } struct PageContentWriter<'a> { input: &'a InputData, } impl Display for PageContentWriter<'_> { fn fmt(&self, f: &mut Formatter) -> Result { let InputData { base, trait_impls, references, main, .. } = self.input; Generator::new(base, trait_impls, references).generate(f, main) } } struct Generator<'a> { base: &'a BaseUrlMap, trait_impls: Vec>, references: HashMap<&'a str, Reference>, } struct TraitImpl<'a> { pat: TokenStream<'a>, generic: Option<&'a str>, impls: Vec>, } impl<'a> Generator<'a> { fn new( base: &'a BaseUrlMap, trait_impls: &'a [TraitImplPattern], ref_data: &'a [References], ) -> Self { let trait_impls = trait_impls .iter() .map(|trait_impl| { let pat = parse_type(&trait_impl.pat); let generic = trait_impl.generic.as_deref(); let impls = trait_impl .impls .iter() .map(|impl_| { parser::parse_trait_impl(impl_) .map_err(|_| format!("failed to parse trait impl: {}", impl_)) .unwrap() }) .collect(); TraitImpl { pat, generic, impls, } }) .collect(); let references = ref_data .iter() .flat_map(|reference| { let kind = reference.kind; iter::empty() .chain(reference.names.iter().map(move |item| { let (path, name) = parse_path(item); let url = build_type_url(base, &path, kind, name); (name, Reference { kind, url }) })) .chain(reference.aliases.iter().map(move |(alias, path)| { let (path, name) = parse_path(path); let url = build_type_url(base, &path, kind, name); (alias.as_str(), Reference { kind, url }) })) }) .collect(); Generator { base, trait_impls, references, } } fn generate(&self, f: &mut Formatter, data: &[Vec]) -> Result { write!(f, "
")?; data.iter() .try_for_each(|section| self.generate_section(f, section))?; write!(f, "
")?; Ok(()) } fn generate_section(&self, f: &mut Formatter, section: &[Part]) -> Result { write!(f, r#"
"#)?; section .iter() .try_for_each(|part| self.generate_part(f, part))?; write!(f, "
")?; Ok(()) } fn generate_part(&self, f: &mut Formatter, part: &Part) -> Result { let info = self.build_part_info(part); write!(f, r#"
"#)?; write!( f, r#"

{}

"#, info.base_url, escape(info.title) )?; if let Some(constraints) = &info.constraints { write!(f, r#"

"#)?; self.generate_tokens(f, constraints, Flags::LINKIFY | Flags::EXPAND_TRAIT)?; write!(f, "

")?; } write!(f, "
")?; if let Part::Type(ty) = part { if let Some(impls) = &ty.impls { self.generate_impls(f, impls)?; } } info.groups .iter() .try_for_each(|group| self.generate_group(f, group, &info)) } fn build_part_info(&self, part: &'a Part) -> PartInfo<'a> { match part { Part::Mod(m) => self.build_part_info_for_mod(m), Part::Type(t) => self.build_part_info_for_type(t), } } fn build_part_info_for_mod(&self, m: &'a Mod) -> PartInfo<'a> { let path: Vec<_> = m.path.split("::").collect(); let url = build_path_url(self.base, &path); PartInfo { title: &m.name, base_url: url, constraints: None, groups: &m.groups, fn_type: FunctionType::Function, } } fn build_part_info_for_type(&self, t: &'a Type) -> PartInfo<'a> { let ty = parse_type(&t.ty); // Unwrap references let mut inner = &ty; loop { let mut iter = inner.0.iter().filter(|token| !token.is_whitespace_only()); let next_token = match iter.next() { Some(Token::Primitive(Primitive::Ref(_))) => iter.next(), _ => break, }; inner = match next_token { Some(Token::Type(inner)) => inner, _ => unreachable!("unexpected token after ref: {:?}", next_token), }; } // Use the first token as the source of base url for this part let first_token = inner.0.first().expect("empty inner"); let url = match first_token { Token::Identifier(ident) => match self.references.get(ident) { Some(r) => r.url.clone(), None => unreachable!("unknown name: {}", ident), }, Token::Primitive(primitive) => self.get_primitive_url(primitive), _ => unreachable!("unexpected token inside type: {}", first_token), }; let constraints = t.constraints.as_ref().map(|constraints| { match parser::parse_constraints(constraints.as_str()) { Ok(tokens) => tokens, Err(_) => unreachable!("failed to parse: {}", constraints), } }); PartInfo { title: &t.ty, base_url: url, constraints, groups: &t.groups, fn_type: FunctionType::Method, } } fn generate_impls(&self, f: &mut Formatter, impls: &[String]) -> Result { write!(f, r#"
    "#)?; for impl_item in impls.iter() { let parsed = match parser::parse_impl(impl_item) { Ok(tokens) => tokens, Err(_) => unreachable!("failed to parse impl: {}", impl_item), }; write!(f, "
  • impl ")?; self.generate_tokens(f, &parsed, Flags::LINKIFY)?; write!(f, "
  • ")?; } write!(f, "
")?; Ok(()) } fn generate_group(&self, f: &mut Formatter, group: &Group, part_info: &PartInfo) -> Result { if let Some(name) = &group.name { write!(f, r#"

{}

"#, escape(name))?; } write!(f, r#"
    "#)?; group .items .iter() .try_for_each(|item| self.generate_item(f, item, part_info))?; write!(f, "
")?; Ok(()) } fn generate_item(&self, f: &mut Formatter, item: &InputItem, part_info: &PartInfo) -> Result { let parsed = ParsedItem::parse(item.content()) .map_err(|_| format!("failed to parse `{}`", item.content())) .unwrap(); let kind = match part_info.fn_type { FunctionType::Function => "fn", FunctionType::Method => { if parsed.takes_self { "method" } else { "fn" } } }; write!(f, r#"
  • "#, kind)?; write!(f, r#"fn "#)?; let url = match part_info.fn_type { FunctionType::Function => format!("fn.{}.html", parsed.name), FunctionType::Method => match item.trait_impl() { Some(trait_impl) => format!("#impl-{}", escape(trait_impl)), None => format!("#method.{}", parsed.name), }, }; write!( f, r#"{}"#, part_info.base_url, url, kind, parsed.name )?; self.generate_tokens(f, &parsed.tokens, Flags::LINKIFY | Flags::EXPAND_TRAIT)?; write!(f, "
  • ")?; Ok(()) } fn generate_tokens(&self, f: &mut Formatter, tokens: &TokenStream<'_>, flags: Flags) -> Result { tokens.0.iter().try_for_each(|token| match token { Token::Text(text) => write!(f, "{}", escape(text)), Token::Where => write!(f, r#"where"#), Token::Identifier(ident) => self.generate_identifier(f, ident, flags), Token::AssocType(ty) => write!(f, r#"{}"#, ty), Token::Primitive(primitive) => self.generate_primitive(f, primitive, flags), Token::Range(range) => self.generate_range(f, *range, flags), Token::Type(ty) => self.generate_type(f, ty, flags), Token::Nested(nested) => { write!(f, r#""#)?; self.generate_tokens(f, nested, flags)?; write!(f, "") } }) } fn generate_type(&self, f: &mut Formatter, tokens: &TokenStream<'_>, flags: Flags) -> Result { if !flags.contains(Flags::EXPAND_TRAIT) { return self.generate_tokens(f, tokens, flags); } let matched = self.trait_impls.iter().find_map(|trait_impl| { match tokens.matches(&trait_impl.pat, trait_impl.generic) { Ok(replacement) => Some((trait_impl, replacement)), Err(()) => None, } }); let (trait_impl, replacement) = match matched { Some(matched) => matched, None => return self.generate_tokens(f, tokens, flags), }; write!(f, r#""#)?; self.generate_tokens(f, tokens, flags & !(Flags::LINKIFY | Flags::EXPAND_TRAIT))?; write!(f, r#"")?; Ok(()) } fn generate_identifier(&self, f: &mut Formatter, ident: &str, flags: Flags) -> Result { match self.references.get(ident) { Some(r) => { let kind = r.kind.to_str(); if flags.contains(Flags::LINKIFY) { write!(f, r#"{}"#, r.url, kind, ident) } else { write!(f, r#"{}"#, kind, ident) } } None => write!(f, "{}", ident), } } fn generate_primitive( &self, f: &mut Formatter, primitive: &Primitive<'_>, flags: Flags, ) -> Result { if flags.contains(Flags::LINKIFY) { let url = self.get_primitive_url(primitive); write!( f, r#"{}"#, url, primitive, ) } else { write!(f, r#"{}"#, primitive) } } fn get_primitive_url(&self, primitive: &Primitive<'_>) -> String { let name = match primitive { Primitive::SliceStart | Primitive::SliceEnd => "slice", Primitive::TupleStart | Primitive::TupleEnd => "tuple", Primitive::Unit => "unit", Primitive::Ref(_) => "reference", Primitive::Ptr(_) => "pointer", Primitive::Named(name) => name, }; let std_url = self.base.get_url_for("std").unwrap(); format!("{}primitive.{}.html", std_url, name) } fn generate_range(&self, f: &mut Formatter, range: RangeToken, flags: Flags) -> Result { if flags.contains(Flags::LINKIFY) { let name = match range { RangeToken::Range => "Range", RangeToken::RangeFrom => "RangeFrom", RangeToken::RangeFull => "RangeFull", RangeToken::RangeInclusive => "RangeInclusive", RangeToken::RangeTo => "RangeTo", RangeToken::RangeToInclusive => "RangeToInclusive", }; write!( f, r#"{}"#, self.base.get_url_for("std").unwrap(), name, range ) } else { write!(f, "{}", range) } } } fn parse_type(ty: &str) -> TokenStream<'_> { parser::parse_type(ty) .map_err(|_| format!("failed to parse `{}`", ty)) .unwrap() } fn build_type_url(base: &BaseUrlMap, path: &[&str], kind: Kind, name: &str) -> String { let mut url = build_path_url(base, path); write!(url, "{}.{}.html", kind.to_str(), name).unwrap(); url } fn build_path_url(base: &BaseUrlMap, path: &[&str]) -> String { let (crate_name, path) = path.split_first().expect("zero-length path"); let mut url = base .get_url_for(crate_name) .expect("unknown crate") .to_string(); for s in path.iter() { url.push_str(s); url.push('/'); } url } fn build_tokens_with_replacement<'a>( tokens: &'a TokenStream<'a>, generic: &str, replacement: &'a TokenStream<'a>, ) -> TokenStream<'a> { tokens .0 .iter() .map(|token| match token { Token::Type(nested) => Token::Type(match nested.0.as_slice() { [Token::Identifier(ident)] if *ident == generic => replacement.clone(), _ => build_tokens_with_replacement(nested, generic, replacement), }), Token::Nested(nested) => { Token::Nested(build_tokens_with_replacement(nested, generic, replacement)) } _ => token.clone(), }) .collect() } #[derive(Debug)] struct Reference { kind: Kind, url: String, } struct PartInfo<'a> { title: &'a str, base_url: String, constraints: Option>, groups: &'a [Group], fn_type: FunctionType, } fn parse_path(s: &str) -> (Box<[&str]>, &str) { let mut path: Vec<_> = s.split("::").collect(); let name = path.pop().unwrap(); let path = path.into_boxed_slice(); (path, name) } #[derive(Clone, Copy)] enum FunctionType { Function, Method, } bitflags! { struct Flags: u8 { /// Linkify identifiers and symbols when possible const LINKIFY = 0b0001; /// Expand trait to list of types when available const EXPAND_TRAIT = 0b0010; } } ================================================ FILE: src/parser.rs ================================================ use crate::token::{Primitive, RangeToken, Token, TokenStream}; use combine::error::StringStreamError; use combine::parser::{ char::{alpha_num, char, letter, spaces, string}, choice::{choice, optional}, combinator::attempt, range::recognize, repeat::{many, skip_many1}, Parser, }; use either_n::{Either2, Either3, Either7}; use std::iter; pub struct ParsedItem<'a> { pub takes_self: bool, pub name: &'a str, pub tokens: TokenStream<'a>, } impl<'a> ParsedItem<'a> { pub fn parse(input: &'a str) -> Result { let parser = (optional(string("::")), identifier_str(), item_after_name()); parse(parser, input).map(|(prefix, name, rest)| ParsedItem { takes_self: prefix.is_none(), name, tokens: TokenStream(rest.collect()), }) } } pub fn parse_constraints(input: &str) -> Result, ()> { parse(where_clause(), input).map(Iterator::collect) } pub fn parse_type(input: &str) -> Result, ()> { parse(single_type_like_token(), input).map(|token| match token { Token::Type(inner) => inner, _ => unreachable!(), }) } pub fn parse_impl(input: &str) -> Result, ()> { let parser = chain2( single_type_like(), optional_tokens(chain2(lex("=>"), sep1_by_lex(assoc_type_param, ","))), ); parse(parser, input).map(Iterator::collect) } pub fn parse_trait_impl(input: &str) -> Result, ()> { let parser = chain2( single_type_like(), optional_tokens(chain2(lex("=>"), sep1_by_lex(assoc_type_param, ","))), ); parse(parser, input).map(Iterator::collect) } // TODO: Replace this macro with named existential type when it's available. // See https://github.com/rust-lang/rust/issues/34511 macro_rules! parser_str_to_iter_token { ($a:lifetime) => { parser_str_to!($a, impl Iterator>) }; } macro_rules! parser_str_to { ($a:lifetime, $ty:ty) => { impl Parser<&$a str, Output = $ty> } } fn parse<'a, T>(mut parser: parser_str_to!('a, T), input: &'a str) -> Result { parser .parse(input) .map_err(|_| ()) .and_then(|(result, remaining)| match remaining { "" => Ok(result), _ => Err(()), }) } fn item_after_name<'a>() -> parser_str_to_iter_token!('a) { chain5( lex("("), nested_type_like_list(), lex(")"), optional_tokens(chain2(lex("->"), single_type_like())), optional_tokens(where_clause()), ) } fn where_clause<'a>() -> parser_str_to_iter_token!('a) { chain2( wrap("where", Token::Where), sep1_by_lex(single_where_constraint, ","), ) } fn single_where_constraint<'a>() -> parser_str_to_iter_token!('a) { chain3( single_type_like(), lex(":"), sep1_by_lex(simple_named_type, "+"), ) } type BoxedTokenIter<'a> = Box> + 'a>; // Add an extra wrapper for this parser so that it can be invoked recursively. parser! { fn type_like['a]()(&'a str) -> BoxedTokenIter<'a> where [] { type_like_inner() } } fn type_like_inner<'a>() -> parser_str_to!('a, BoxedTokenIter<'a>) { sep1_by_lex(single_type_like, "|").map(to_boxed_iter) } // Add an extra wrapper for this parser so that we don't have too deep type name. parser! { fn single_type_like['a]()(&'a str) -> BoxedTokenIter<'a> where [] { single_type_like_inner() } } fn single_type_like_inner<'a>() -> parser_str_to!('a, BoxedTokenIter<'a>) { single_type_like_token().map(iter::once).map(to_boxed_iter) } fn to_boxed_iter<'a, T>(iter: impl Iterator + 'a) -> Box + 'a> { Box::new(iter) } fn single_type_like_token<'a>() -> parser_str_to!('a, Token<'a>) { to_type_token(choice(( attempt(ref_type()).map(Either7::One), attempt(ptr_type()).map(Either7::Two), attempt(slice_type()).map(Either7::Three), attempt(fn_type()).map(Either7::Four), attempt(tuple_type()).map(Either7::Five), attempt(range_type()).map(Either7::Six), named_type().map(Either7::Seven), ))) } fn ref_type<'a>() -> parser_str_to_iter_token!('a) { chain3( recognize(( char('&'), optional(string("mut")), optional(attempt((spaces(), lifetime()))), )) .map(|s| iter::once(Token::Primitive(Primitive::Ref(s)))), maybe_spaces(), single_type_like(), ) } fn ptr_type<'a>() -> parser_str_to_iter_token!('a) { chain3( recognize((char('*'), choice((string("const"), string("mut"))))) .map(|s| iter::once(Token::Primitive(Primitive::Ptr(s)))), maybe_spaces(), single_type_like(), ) } fn slice_type<'a>() -> parser_str_to_iter_token!('a) { chain3( wrap_start("[", Primitive::SliceStart), type_like(), wrap_end("]", Primitive::SliceEnd), ) } fn fn_type<'a>() -> parser_str_to_iter_token!('a) { chain4( text((char('('), spaces())), nested_type_like_list(), text((spaces(), char(')'), spaces(), string("->"), spaces())), type_like(), ) } fn tuple_type<'a>() -> parser_str_to_iter_token!('a) { choice(( attempt(wrap("()", Primitive::Unit)).map(Either2::One), chain3( wrap_start("(", Primitive::TupleStart), choice(( attempt(chain2( type_like(), text((spaces(), char(','), spaces(), string("..."), spaces())), )) .map(|tokens| Either2::One(iter::once(Token::Nested(tokens.collect())))), nested_type_like_list().map(Either2::Two), )), wrap_end(")", Primitive::TupleEnd), ) .map(Either2::Two), )) } fn nested_type_like_list<'a>() -> parser_str_to_iter_token!('a) { optional( sep1_by_lex(type_like, ",") .map(Iterator::collect) .map(Token::Nested), ) .map(IntoIterator::into_iter) } fn range_type<'a>() -> parser_str_to_iter_token!('a) { ( optional(named_type()), choice((attempt(lex_str("..=")), attempt(lex_str("..")))), optional(named_type()), ) .and_then(|(start, op, end)| { let range = match (&start, op.trim(), &end) { (None, "..", None) => RangeToken::RangeFull, (None, "..", Some(_)) => RangeToken::RangeTo, (None, "..=", Some(_)) => RangeToken::RangeToInclusive, (Some(_), "..", None) => RangeToken::RangeFrom, (Some(_), "..", Some(_)) => RangeToken::Range, (Some(_), "..=", Some(_)) => RangeToken::RangeInclusive, _ => return Err(StringStreamError::UnexpectedParse), }; let start = start.into_iter().flatten(); let end = end.into_iter().flatten(); Ok(iter::empty() .chain(start) .chain(range_token(op, range)) .chain(end)) }) } fn range_token(s: &str, range: RangeToken) -> impl Iterator> { let start = match &s[..s.len() - s.trim_start().len()] { "" => None, spaces => Some(Token::Text(spaces)), }; let end = match &s[s.trim_end().len()..] { "" => None, spaces => Some(Token::Text(spaces)), }; iter::empty() .chain(start) .chain(iter::once(Token::Range(range))) .chain(end) } fn named_type<'a>() -> parser_str_to_iter_token!('a) { chain4( optional_tokens(lex("dyn ")), simple_named_type(), // Associated items many::, _, _>(attempt(chain2( lex("::"), identifier_str().map(Token::AssocType).map(iter::once), ))), // Additional bounds optional_tokens(chain2(lex("+"), sep1_by_lex(simple_named_type, "+"))), ) } // Add an extra wrapper for this parser so that we don't have too deep type name. parser! { fn simple_named_type['a]()(&'a str) -> BoxedTokenIter<'a> where [] { simple_named_type_inner() } } fn simple_named_type_inner<'a>() -> parser_str_to!('a, BoxedTokenIter<'a>) { chain2( // Name identifier_str().map(|ident| { iter::once(if is_primitive(ident) { Token::Primitive(Primitive::Named(ident)) } else { Token::Identifier(ident) }) }), // Optional parameters optional_tokens(chain3( lex("<"), sep1_by_lex(type_param, ","), text((spaces(), char('>'))), )), ) .map(|ty| to_boxed_iter(iter::once(Token::Type(ty.collect())))) } fn to_type_token<'a>(inner: parser_str_to_iter_token!('a)) -> parser_str_to!('a, Token<'a>) { inner.map(|ty| { let mut inner: Vec<_> = ty.collect(); match inner.as_ref() as &[_] { [Token::Type(_)] => inner.remove(0), _ => Token::Type(TokenStream(inner)), } }) } #[rustfmt::skip] fn is_primitive(ident: &str) -> bool { matches!( ident, "bool" | "char" | "str" | "i8" | "i16" | "i32" | "i64" | "i128" | "isize" | "u8" | "u16" | "u32" | "u64" | "u128" | "usize" ) } fn type_param<'a>() -> parser_str_to_iter_token!('a) { choice(( attempt(lifetime_param()).map(Either3::One), attempt(assoc_type_param()).map(Either3::Two), type_like().map(Either3::Three), )) } fn lifetime_param<'a>() -> parser_str_to_iter_token!('a) { text(lifetime()) } fn assoc_type_param<'a>() -> parser_str_to_iter_token!('a) { chain3( identifier_str().map(Token::AssocType).map(iter::once), lex("="), type_like(), ) } fn optional_tokens<'a>(inner: parser_str_to_iter_token!('a)) -> parser_str_to_iter_token!('a) { optional(attempt(inner)) .map(IntoIterator::into_iter) .map(Iterator::flatten) } fn sep1_by_lex<'a, P, I>( parser_fn: impl Fn() -> P, sep: &'static str, ) -> parser_str_to_iter_token!('a) where P: Parser<&'a str, Output = I>, I: Iterator>, { chain2( parser_fn(), many::, _, _>(attempt(chain2(lex(sep), parser_fn()))), ) } fn lex<'a>(s: &'static str) -> parser_str_to_iter_token!('a) { text(lex_str(s)) } fn lex_str<'a>(s: &'static str) -> parser_str_to!('a, &'a str) { recognize((spaces(), string(s), spaces())) } fn wrap_start<'a>( inner: &'static str, token: impl Into>, ) -> parser_str_to_iter_token!('a) { let token = token.into(); chain2( string(inner).map(move |_| iter::once(token.clone())), maybe_spaces(), ) } fn wrap_end<'a>(inner: &'static str, token: impl Into>) -> parser_str_to_iter_token!('a) { let token = token.into(); chain2( maybe_spaces(), string(inner).map(move |_| iter::once(token.clone())), ) } fn wrap<'a>(inner: &'static str, token: impl Into>) -> parser_str_to_iter_token!('a) { let token = token.into(); chain3( maybe_spaces(), string(inner).map(move |_| iter::once(token.clone())), maybe_spaces(), ) } fn maybe_spaces<'a>() -> parser_str_to_iter_token!('a) { recognize(spaces()).map(|s| match s { "" => None.into_iter(), s => Some(Token::Text(s)).into_iter(), }) } fn text<'a>(inner: impl Parser<&'a str>) -> parser_str_to_iter_token!('a) { text_token(inner).map(iter::once) } fn text_token<'a>(inner: impl Parser<&'a str>) -> impl Parser<&'a str, Output = Token<'a>> { recognize(inner).map(Token::Text) } fn lifetime<'a>() -> parser_str_to!('a, &'a str) { recognize((char('\''), skip_many1(letter()))) } fn identifier_str<'a>() -> parser_str_to!('a, &'a str) { recognize(skip_many1(choice((alpha_num(), char('_'))))) } macro_rules! impl_chain { ($name:ident: $($v:ident)+) => { fn $name<'a>($( $v: parser_str_to!('a, impl IntoIterator>), )+) -> parser_str_to_iter_token!('a) { ($($v),+).map(|($($v),+)| { iter::empty() $(.chain($v.into_iter()))+ }) } } } impl_chain!(chain2: a b); impl_chain!(chain3: a b c); impl_chain!(chain4: a b c d); impl_chain!(chain5: a b c d e); #[cfg(test)] mod tests { use combine::Parser; use pretty_assertions::assert_eq; macro_rules! test { ($parser:ident: [$($input:literal => [$($expected:tt)*],)*]) => { #[test] fn $parser() { $( let (tokens, remaining) = super::$parser().parse($input) .expect("failed to parse"); assert_eq!(remaining, "", "unparsed content"); assert_eq!(tokens.collect::>(), tokens!($($expected)*)); )* } }; } test!(item_after_name: [ " ((T) -> ())" => [" (" { ^["(" { ^T } ") -> " @()] } ")"], " ((&T) -> bool) -> (B, B) where B: Default + Extend" => [ " (" { ^["(" { ^[&"" ^T] } ") -> " @bool] } ") " "-> " @( ^B ", " ^B ) " " where " " ^B ": " ^Default " + " ^[ Extend "<" ^T ">" ] ], " (S, T) -> S where S: Default + Clone, Tz::Offset: Display" => [ " (" { ^S ", " ^T } ") " "-> " ^S " " where " " ^S ": " ^Default " + " ^Clone ", " ^[ ^Tz "::" +Offset ] ": " ^Display ], ]); test!(type_like: [ // Named "Foo" => [^Foo], "Option" => [^[Option "<" ^Foo ">"]], "Foo::Err" => [^[^Foo "::" +Err]], "Box" => [^[Box "<" ^["dyn " ^Foo] ">"]], "Iterator + Add + Clone" => [ ^[^[Iterator "<" +Item " = " ^T ">"] " + " ^[Add "<" +Rhs " = " ^Self ">"] " + " ^Clone] ], // References "&Foo" => [^[&"" ^Foo]], "&'a Foo" => [^[&"'a" " " ^Foo]], "&mut Foo" => [^[&"mut" " " ^Foo]], "&mut 'a Foo" => [^[&"mut 'a" " " ^Foo]], "&[Foo]" => [^[&"" @[^Foo]]], "&dyn Foo" => [^[&"" ^["dyn " ^Foo]]], // Pointers "*const Foo" => [^[*"const" " " ^Foo]], "*mut Foo" => [^[*"mut" " " ^Foo]], "*const [Foo]" => [^[*"const" " " @[^Foo]]], // Tuple-like "()" => [@()], "(Foo, &Bar)" => [@(^Foo ", " ^[&"" ^Bar])], "(Foo, ...)" => [@(^Foo ", ...")], // Range "usize.. usize" => [^[@usize ~Range " " @usize]], "usize..=usize" => [^[@usize ~RangeInclusive @usize]], " .. usize" => [^[" " ~RangeTo " " @usize]], " ..=usize" => [^[" " ~RangeToInclusive @usize]], "usize.. " => [^[@usize ~RangeFrom " "]], " .. " => [^[" " ~RangeFull " "]], // Function "() -> Foo" => [^["(" ") -> " ^Foo]], "(Iterator) -> Result<(), T>" => [ ^["(" { ^[Iterator "<" +Item " = " ^T ">"] } ") -> " ^[Result "<" @() ", " ^T ">"]] ], "(Foo, &(Bar, &mut 'a [Baz])) -> T" => [ ^["(" { ^Foo ", " ^[&"" @(^Bar ", " ^[&"mut 'a" " " @[^Baz]])] } ") -> " ^T] ], // Union (pseudo-type) "Foo | &Bar | (Baz) -> bool" => [ ^Foo " | " ^[&"" ^[Bar "<" ^T ">"]] " | " ^["(" { ^Baz } ") -> " @bool] ], ]); } ================================================ FILE: src/template.html ================================================ {title} - Rust cheat sheet {content} ================================================ FILE: src/token.rs ================================================ use std::fmt::{self, Display, Write as _}; use std::iter::FromIterator; #[derive(Clone, Debug, Default, Eq, PartialEq)] pub struct TokenStream<'a>(pub Vec>); impl<'a> TokenStream<'a> { pub fn matches( &'a self, pat: &TokenStream<'_>, generic: Option<&str>, ) -> Result>, ()> { let mut replacement = None; if tokens_match(self, pat, generic, &mut replacement) { Ok(replacement) } else { Err(()) } } } fn tokens_match<'a>( tokens: &'a TokenStream<'a>, pat: &TokenStream<'_>, generic: Option<&str>, replacement: &mut Option<&'a TokenStream<'a>>, ) -> bool { tokens .0 .iter() .zip(pat.0.iter()) .all(|(token, pat)| match (token, pat) { (Token::Where, Token::Where) => true, (Token::Identifier(this), Token::Identifier(pat)) => this == pat, (Token::Primitive(this), Token::Primitive(pat)) => this == pat, (Token::Range(this), Token::Range(pat)) => this == pat, (Token::AssocType(this), Token::AssocType(pat)) => this == pat, (Token::Nested(this), Token::Nested(pat)) => { tokens_match(this, pat, generic, replacement) } (Token::Text(this), Token::Text(pat)) => this .split_ascii_whitespace() .zip(pat.split_ascii_whitespace()) .all(|(this, pat)| this == pat), (Token::Type(this), Token::Type(pat)) => match (pat.0.as_slice(), generic) { ([Token::Identifier(ident)], Some(generic)) if *ident == generic => { if let Some(replacement) = replacement { tokens_match(this, replacement, None, &mut None) } else { *replacement = Some(this); true } } _ => tokens_match(this, pat, generic, replacement), }, _ => false, }) } impl Display for TokenStream<'_> { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { self.0.iter().try_for_each(|token| write!(f, "{}", token)) } } impl<'a> FromIterator> for TokenStream<'a> { fn from_iter>>(iter: I) -> Self { TokenStream(Vec::from_iter(iter)) } } impl<'a> IntoIterator for TokenStream<'a> { type Item = Token<'a>; type IntoIter = > as IntoIterator>::IntoIter; fn into_iter(self) -> Self::IntoIter { self.0.into_iter() } } impl<'a> Extend> for TokenStream<'a> { fn extend>>(&mut self, iter: I) { self.0.extend(iter); } } impl<'a, Iter> Extend for TokenStream<'a> where Iter: IntoIterator>, { fn extend>(&mut self, iter: I) { self.0.extend(iter.into_iter().flatten()) } } #[derive(Clone, Debug, Eq, PartialEq)] pub enum Token<'a> { Text(&'a str), Nested(TokenStream<'a>), Type(TokenStream<'a>), Primitive(Primitive<'a>), Identifier(&'a str), AssocType(&'a str), Range(RangeToken), Where, } impl Token<'_> { pub fn is_whitespace_only(&self) -> bool { match self { Token::Text(text) => text.trim().is_empty(), _ => false, } } } impl<'a> From> for Token<'a> { fn from(primitive: Primitive<'a>) -> Self { Token::Primitive(primitive) } } impl Display for Token<'_> { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { match self { Token::Text(s) | Token::Identifier(s) | Token::AssocType(s) => f.write_str(s), Token::Nested(inner) | Token::Type(inner) => write!(f, "{}", inner), Token::Primitive(p) => write!(f, "{}", p), Token::Range(r) => write!(f, "{}", r), Token::Where => f.write_str("where"), } } } #[derive(Clone, Copy, Debug, Eq, PartialEq)] pub enum Primitive<'a> { Ref(&'a str), Ptr(&'a str), SliceStart, SliceEnd, TupleStart, TupleEnd, Unit, Named(&'a str), } impl Display for Primitive<'_> { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { match self { Primitive::Ref(s) | Primitive::Ptr(s) | Primitive::Named(s) => f.write_str(s), Primitive::SliceStart => f.write_char('['), Primitive::SliceEnd => f.write_char(']'), Primitive::TupleStart => f.write_char('('), Primitive::TupleEnd => f.write_char(')'), Primitive::Unit => f.write_str("()"), } } } #[derive(Clone, Copy, Debug, Eq, PartialEq)] pub enum RangeToken { Range, RangeFrom, RangeFull, RangeInclusive, RangeTo, RangeToInclusive, } impl Display for RangeToken { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { f.write_str(match self { RangeToken::Range | RangeToken::RangeFrom | RangeToken::RangeFull | RangeToken::RangeTo => "..", RangeToken::RangeInclusive | RangeToken::RangeToInclusive => "..=", }) } } #[cfg(test)] mod tests { use super::Token; use crate::parser::parse_type; #[test] fn token_stream_matches() { fn check_match( pat: &str, generic: Option<&str>, cases: &[(&str, Result]>, ()>)], ) { let pat = parse_type(pat).unwrap(); for (ty, expected) in cases.iter() { let ty = parse_type(ty).unwrap(); let actual = ty.matches(&pat, generic); let expected = match expected { Ok(Some([Token::Type(tokens)])) => Ok(Some(tokens)), Ok(None) => Ok(None), Err(()) => Err(()), _ => unreachable!("unexpected `expected`: `{:?}`", expected), }; assert_eq!(actual, expected); } } check_match( "Try", Some("T"), &[ ("Try ", Ok(Some(&tokens!(@usize)))), ( "Try >", Ok(Some(&tokens!(^[Option "<" ^T ">"]))), ), ( "Try Option >", Ok(Some(&tokens!(^["(" ") -> " ^[Option "<" ^T ">"]]))), ), ("Try", Err(())), ("Result", Err(())), ("&Try", Err(())), ], ); check_match( "SliceIndex<[T]>", Some("T"), &[ ("SliceIndex<[usize]>", Ok(Some(&tokens!(@usize)))), ("SliceIndex<[()]>", Ok(Some(&tokens!(@())))), ("SliceIndex<[[T]]>", Ok(Some(&tokens!(@[^T])))), ("SliceIndex", Err(())), ], ); check_match( "RangeBounds", None, &[ ("RangeBounds", Ok(None)), ("RangeBounds < usize >", Ok(None)), ("RangeBounds", Err(())), ], ); } } ================================================ FILE: static/script.js ================================================ const root = document.documentElement; const args = location.search.slice(1).split(',').filter(arg => !!arg); for (const arg of args) { switch (arg) { case 'dark': document.getElementById('theme').href = 'theme-dark.css'; break; case 'large': case 'single': root.classList.add(arg); break; default: console.warn(`Unknown argument ${arg}`); } } window.addEventListener("DOMContentLoaded", () => { const footer = document.querySelector('footer'); const modeSwitches = footer.querySelectorAll('li > a[href^="?"]'); for (const a of modeSwitches) { const mode = a.getAttribute('href').slice(1); if (args.includes(mode)) { a.parentNode.classList.add('on'); a.href = '?' + args.filter(arg => arg !== mode).join(','); } else { a.href = '?' + [...args, mode].join(','); } } }, { once: true }); ================================================ FILE: static/style.css ================================================ @import url(https://cdn.jsdelivr.net/gh/tonsky/FiraCode@2/distr/fira_code.css); body { font: 16px/1.5 'Fira Code', monospace; --opacity: 1; } main { position: absolute; width: auto; display: flex; white-space: pre; transform-origin: 0 0; transform: scale(0.5); } .single main { display: unset; } .large main { transform: unset; } a { text-decoration: none; color: inherit; } a:hover { text-decoration: underline; } ul { list-style: none; } ul, li { padding: 0; margin: 0; } .section { padding: 0 2em 5em; } .part-title-group { margin: 1.5em 0 1em; } .part-title { font-size: 2em; line-height: normal; } .part-title, .part-subtitle { margin: 0; } .part-subtitle { font-size: 1.5em; } .group-title { font: inherit; margin: 1em 0 0; } .group-title::before { content: "// "; } .group-list, .type-impls { margin-bottom: 1em; } .group-list { padding-left: 2em; } .item::before { content: "=> "; margin-left: -2em; } .item-fn::before { content: ":: "; } .type-impls a, .group-list a { font-weight: 500; } .prefix-fn { margin-left: -1.8em; opacity: 0; -moz-user-select: none; user-select: none; } .nested { --opacity: 0.6; } .nested .nested { --opacity: 0.4; } .trait-matched { position: relative; display: inline-block; cursor: pointer; outline: 0 none; } .trait-matched:hover .impls { visibility: visible; } .impls { visibility: hidden; position: absolute; top: 0; left: 0; z-index: 1; cursor: auto; pointer-events: none; padding-bottom: 2em; --opacity: 1; } .impls .nested { --opacity: 0.6; } .impls .nested .nested { --opacity: 0.4; } .impls-title { font: inherit; margin: 0; width: max-content; pointer-events: auto; } .impls-list { padding: 2px 10px; margin: 0 -10px; border: 1px solid; border-radius: 5px; width: max-content; pointer-events: auto; } footer { font-size: 0.6em; position: fixed; bottom: 16px; right: 16px; border: 1px solid; border-radius: 5px; opacity: .8; } footer ul { display: flex; } footer li:first-child { margin-left: 1px; } footer li a { display: block; padding: 5px .5em; border-radius: 5px; margin-right: 1px; } footer li:last-child { border-left: 1px solid; } footer a:hover { text-decoration: none; } ================================================ FILE: static/theme-dark.css ================================================ body, .impls h4, .impls ul { color: #ddd; background: #353535; } .group-title { color: #8d8d8b; } main a, main span { color: rgba(221, 221, 221, var(--opacity)); } .method, .fn { color: rgba(43, 171, 99, var(--opacity)); } .trait { color: rgba(183, 140, 242, var(--opacity)); } .enum { color: rgba(130, 176, 137, var(--opacity)); } .primitive { color: rgba(67, 174, 199, var(--opacity)); } .struct { color: rgba(45, 191, 184, var(--opacity)); } .type, .assoc-type { color: rgba(255, 127, 0, var(--opacity)); } .union { color: rgba(166, 174, 55, var(--opacity)); } .where { color: rgba(221, 221, 221, var(--opacity)); } footer { background: #353535; color: #ccc; } footer, footer li:last-child { border-color: #999; } footer li.on a { color: #333; background: #ccc; } ================================================ FILE: static/theme-light.css ================================================ body, .impls h4, .impls ul { color: black; background: white; } .group-title { color: #8e908c; } main a, main span { color: rgba(0, 0, 0, var(--opacity)); } .method, .fn { color: rgba(154, 110, 49, var(--opacity)); } .trait { color: rgba(124, 90, 243, var(--opacity)); } .enum { color: rgba(80, 129, 87, var(--opacity)); } .primitive { color: rgba(44, 128, 147, var(--opacity)); } .struct { color: rgba(173, 68, 142, var(--opacity)); } .type, .assoc-type { color: rgba(186, 93, 0, var(--opacity)); } .union { color: rgba(118, 123, 39, var(--opacity)); } .where { color: rgba(78, 76, 76, var(--opacity)); } footer { background: white; color: #999; } footer, footer li:last-child { border-color: #ccc; } footer li.on a { color: #333; background: #ccc; }