Repository: dtolnay/cargo-tally
Branch: master
Commit: e683620d9c11
Files: 42
Total size: 149.8 KB
Directory structure:
gitextract_nzl9eeax/
├── .github/
│ ├── FUNDING.yml
│ └── workflows/
│ ├── ci.yml
│ └── install.yml
├── .gitignore
├── Cargo.toml
├── LICENSE-APACHE
├── LICENSE-MIT
├── README.md
├── build.rs
└── src/
├── alloc.rs
├── arena.rs
├── args.rs
├── clean.rs
├── collect.rs
├── communication.rs
├── cratemap.rs
├── cratename.rs
├── dependency.rs
├── feature.rs
├── filter.rs
├── hidden.rs
├── hint.rs
├── id.rs
├── impls.rs
├── index.html
├── lib.rs
├── load.rs
├── log.rs
├── macros.rs
├── main.rs
├── matrix.rs
├── max.rs
├── mend.rs
├── present.rs
├── query.rs
├── render.rs
├── stream.rs
├── timestamp.rs
├── total.rs
├── trace.rs
├── user.rs
└── version.rs
================================================
FILE CONTENTS
================================================
================================================
FILE: .github/FUNDING.yml
================================================
github: dtolnay
================================================
FILE: .github/workflows/ci.yml
================================================
name: CI
on:
push:
pull_request:
workflow_dispatch:
schedule: [cron: "40 1 * * *"]
permissions:
contents: read
env:
RUSTFLAGS: -Dwarnings
jobs:
pre_ci:
uses: dtolnay/.github/.github/workflows/pre_ci.yml@master
test:
name: Rust ${{matrix.rust}}
needs: pre_ci
if: needs.pre_ci.outputs.continue
runs-on: ubuntu-latest
strategy:
fail-fast: false
matrix:
rust: [nightly, beta, stable, 1.88.0]
timeout-minutes: 45
steps:
- uses: actions/checkout@v6
- uses: dtolnay/rust-toolchain@master
with:
toolchain: ${{matrix.rust}}
- name: Enable type layout randomization
run: echo RUSTFLAGS=${RUSTFLAGS}\ -Zrandomize-layout >> $GITHUB_ENV
if: matrix.rust == 'nightly'
- run: cargo check
- run: cargo test
- uses: actions/upload-artifact@v6
if: matrix.rust == 'nightly' && always()
with:
name: Cargo.lock
path: Cargo.lock
continue-on-error: true
windows:
name: Windows
needs: pre_ci
if: needs.pre_ci.outputs.continue
runs-on: windows-latest
timeout-minutes: 45
steps:
- uses: actions/checkout@v6
- uses: dtolnay/rust-toolchain@stable
- run: cargo check
clippy:
name: Clippy
runs-on: ubuntu-latest
if: github.event_name != 'pull_request'
timeout-minutes: 45
steps:
- uses: actions/checkout@v6
- uses: dtolnay/rust-toolchain@clippy
- run: cargo clippy -- -Dclippy::all -Dclippy::pedantic
outdated:
name: Outdated
runs-on: ubuntu-latest
if: github.event_name != 'pull_request'
timeout-minutes: 45
steps:
- uses: actions/checkout@v6
- uses: dtolnay/rust-toolchain@stable
- uses: dtolnay/install@cargo-outdated
- run: cargo outdated --workspace --exit-code 1
================================================
FILE: .github/workflows/install.yml
================================================
name: Install
on:
workflow_dispatch:
schedule: [cron: "40 1 * * *"]
push: {tags: ['*']}
permissions: {}
env:
RUSTFLAGS: -Dwarnings
jobs:
install:
name: Install
uses: dtolnay/.github/.github/workflows/check_install.yml@master
with:
crate: cargo-tally
================================================
FILE: .gitignore
================================================
/*.tar.gz
/Cargo.lock
/dataflow-graph/
/report.txt
/target/
================================================
FILE: Cargo.toml
================================================
[package]
name = "cargo-tally"
version = "1.0.73"
authors = ["David Tolnay <dtolnay@gmail.com>"]
categories = ["development-tools::cargo-plugins"]
description = "Cargo subcommand for drawing graphs of the number of dependencies on a crate over time"
edition = "2021"
keywords = ["cargo", "subcommand"]
license = "MIT OR Apache-2.0"
repository = "https://github.com/dtolnay/cargo-tally"
rust-version = "1.88"
[lib]
path = "src/hidden.rs"
[dependencies]
abomonation = "0.7"
anyhow = "1.0.79"
atomic-take = "1.0"
bytesize = "2"
cargo-subcommand-metadata = "0.1"
chrono = "0.4.35"
clap = { version = "4", features = ["deprecated"] }
db-dump = "0.7.15"
differential-dataflow-master = { version = "=0.13.0-dev.1", default-features = false }
foldhash = "0.2"
minipre = "0.2"
num_cpus = "1.0"
opener = "0.8"
ref-cast = "1.0"
regex = { version = "1.9.2", default-features = false, features = ["perf", "std"] }
semver = "1.0"
serde = { package = "serde_core", version = "1.0.220" }
sysinfo = { version = "0.38", default-features = false, features = ["system"] }
termcolor = "1.1"
thiserror = "2"
timely-master = { version = "=0.13.0-dev.1", default-features = false }
typed-arena = "2.0"
[package.metadata.docs.rs]
targets = ["x86_64-unknown-linux-gnu"]
================================================
FILE: LICENSE-APACHE
================================================
Apache License
Version 2.0, January 2004
http://www.apache.org/licenses/
TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION
1. Definitions.
"License" shall mean the terms and conditions for use, reproduction,
and distribution as defined by Sections 1 through 9 of this document.
"Licensor" shall mean the copyright owner or entity authorized by
the copyright owner that is granting the License.
"Legal Entity" shall mean the union of the acting entity and all
other entities that control, are controlled by, or are under common
control with that entity. For the purposes of this definition,
"control" means (i) the power, direct or indirect, to cause the
direction or management of such entity, whether by contract or
otherwise, or (ii) ownership of fifty percent (50%) or more of the
outstanding shares, or (iii) beneficial ownership of such entity.
"You" (or "Your") shall mean an individual or Legal Entity
exercising permissions granted by this License.
"Source" form shall mean the preferred form for making modifications,
including but not limited to software source code, documentation
source, and configuration files.
"Object" form shall mean any form resulting from mechanical
transformation or translation of a Source form, including but
not limited to compiled object code, generated documentation,
and conversions to other media types.
"Work" shall mean the work of authorship, whether in Source or
Object form, made available under the License, as indicated by a
copyright notice that is included in or attached to the work
(an example is provided in the Appendix below).
"Derivative Works" shall mean any work, whether in Source or Object
form, that is based on (or derived from) the Work and for which the
editorial revisions, annotations, elaborations, or other modifications
represent, as a whole, an original work of authorship. For the purposes
of this License, Derivative Works shall not include works that remain
separable from, or merely link (or bind by name) to the interfaces of,
the Work and Derivative Works thereof.
"Contribution" shall mean any work of authorship, including
the original version of the Work and any modifications or additions
to that Work or Derivative Works thereof, that is intentionally
submitted to Licensor for inclusion in the Work by the copyright owner
or by an individual or Legal Entity authorized to submit on behalf of
the copyright owner. For the purposes of this definition, "submitted"
means any form of electronic, verbal, or written communication sent
to the Licensor or its representatives, including but not limited to
communication on electronic mailing lists, source code control systems,
and issue tracking systems that are managed by, or on behalf of, the
Licensor for the purpose of discussing and improving the Work, but
excluding communication that is conspicuously marked or otherwise
designated in writing by the copyright owner as "Not a Contribution."
"Contributor" shall mean Licensor and any individual or Legal Entity
on behalf of whom a Contribution has been received by Licensor and
subsequently incorporated within the Work.
2. Grant of Copyright License. Subject to the terms and conditions of
this License, each Contributor hereby grants to You a perpetual,
worldwide, non-exclusive, no-charge, royalty-free, irrevocable
copyright license to reproduce, prepare Derivative Works of,
publicly display, publicly perform, sublicense, and distribute the
Work and such Derivative Works in Source or Object form.
3. Grant of Patent License. Subject to the terms and conditions of
this License, each Contributor hereby grants to You a perpetual,
worldwide, non-exclusive, no-charge, royalty-free, irrevocable
(except as stated in this section) patent license to make, have made,
use, offer to sell, sell, import, and otherwise transfer the Work,
where such license applies only to those patent claims licensable
by such Contributor that are necessarily infringed by their
Contribution(s) alone or by combination of their Contribution(s)
with the Work to which such Contribution(s) was submitted. If You
institute patent litigation against any entity (including a
cross-claim or counterclaim in a lawsuit) alleging that the Work
or a Contribution incorporated within the Work constitutes direct
or contributory patent infringement, then any patent licenses
granted to You under this License for that Work shall terminate
as of the date such litigation is filed.
4. Redistribution. You may reproduce and distribute copies of the
Work or Derivative Works thereof in any medium, with or without
modifications, and in Source or Object form, provided that You
meet the following conditions:
(a) You must give any other recipients of the Work or
Derivative Works a copy of this License; and
(b) You must cause any modified files to carry prominent notices
stating that You changed the files; and
(c) You must retain, in the Source form of any Derivative Works
that You distribute, all copyright, patent, trademark, and
attribution notices from the Source form of the Work,
excluding those notices that do not pertain to any part of
the Derivative Works; and
(d) If the Work includes a "NOTICE" text file as part of its
distribution, then any Derivative Works that You distribute must
include a readable copy of the attribution notices contained
within such NOTICE file, excluding those notices that do not
pertain to any part of the Derivative Works, in at least one
of the following places: within a NOTICE text file distributed
as part of the Derivative Works; within the Source form or
documentation, if provided along with the Derivative Works; or,
within a display generated by the Derivative Works, if and
wherever such third-party notices normally appear. The contents
of the NOTICE file are for informational purposes only and
do not modify the License. You may add Your own attribution
notices within Derivative Works that You distribute, alongside
or as an addendum to the NOTICE text from the Work, provided
that such additional attribution notices cannot be construed
as modifying the License.
You may add Your own copyright statement to Your modifications and
may provide additional or different license terms and conditions
for use, reproduction, or distribution of Your modifications, or
for any such Derivative Works as a whole, provided Your use,
reproduction, and distribution of the Work otherwise complies with
the conditions stated in this License.
5. Submission of Contributions. Unless You explicitly state otherwise,
any Contribution intentionally submitted for inclusion in the Work
by You to the Licensor shall be under the terms and conditions of
this License, without any additional terms or conditions.
Notwithstanding the above, nothing herein shall supersede or modify
the terms of any separate license agreement you may have executed
with Licensor regarding such Contributions.
6. Trademarks. This License does not grant permission to use the trade
names, trademarks, service marks, or product names of the Licensor,
except as required for reasonable and customary use in describing the
origin of the Work and reproducing the content of the NOTICE file.
7. Disclaimer of Warranty. Unless required by applicable law or
agreed to in writing, Licensor provides the Work (and each
Contributor provides its Contributions) on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
implied, including, without limitation, any warranties or conditions
of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A
PARTICULAR PURPOSE. You are solely responsible for determining the
appropriateness of using or redistributing the Work and assume any
risks associated with Your exercise of permissions under this License.
8. Limitation of Liability. In no event and under no legal theory,
whether in tort (including negligence), contract, or otherwise,
unless required by applicable law (such as deliberate and grossly
negligent acts) or agreed to in writing, shall any Contributor be
liable to You for damages, including any direct, indirect, special,
incidental, or consequential damages of any character arising as a
result of this License or out of the use or inability to use the
Work (including but not limited to damages for loss of goodwill,
work stoppage, computer failure or malfunction, or any and all
other commercial damages or losses), even if such Contributor
has been advised of the possibility of such damages.
9. Accepting Warranty or Additional Liability. While redistributing
the Work or Derivative Works thereof, You may choose to offer,
and charge a fee for, acceptance of support, warranty, indemnity,
or other liability obligations and/or rights consistent with this
License. However, in accepting such obligations, You may act only
on Your own behalf and on Your sole responsibility, not on behalf
of any other Contributor, and only if You agree to indemnify,
defend, and hold each Contributor harmless for any liability
incurred by, or claims asserted against, such Contributor by reason
of your accepting any such warranty or additional liability.
END OF TERMS AND CONDITIONS
================================================
FILE: LICENSE-MIT
================================================
Permission is hereby granted, free of charge, to any
person obtaining a copy of this software and associated
documentation files (the "Software"), to deal in the
Software without restriction, including without
limitation the rights to use, copy, modify, merge,
publish, distribute, sublicense, and/or sell copies of
the Software, and to permit persons to whom the Software
is furnished to do so, subject to the following
conditions:
The above copyright notice and this permission notice
shall be included in all copies or substantial portions
of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF
ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED
TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A
PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT
SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY
CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR
IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER
DEALINGS IN THE SOFTWARE.
================================================
FILE: README.md
================================================
# Cargo tally
<img alt="Number of crates that depend directly on each regex version" src="https://user-images.githubusercontent.com/1940490/122184090-bc75d600-ce40-11eb-856b-affc568d2e15.png" width="30%"> <img alt="Fraction of crates that depend on failure vs anyhow and thiserror" src="https://user-images.githubusercontent.com/1940490/122184103-bf70c680-ce40-11eb-890c-988cd96f4428.png" width="30%"> <img alt="Fraction of crates.io that depends transitively on libc" src="https://github.com/user-attachments/assets/712804c7-f5de-4f99-9cb2-214665c0586f" width="30%">
**`cargo tally` is a Cargo subcommand for drawing graphs of the number of crates
that depend directly or indirectly on a crate over time.**
```
Usage: cargo tally [options] queries...
Options:
--db <PATH> Path to crates.io's database dump [default: ./db-dump.tar.gz]
--jobs, -j <N> Number of threads to run differential dataflow
--relative Display as a fraction of total crates, not absolute number
--transitive Count transitive dependencies, not just direct dependencies
```
[<img alt="github" src="https://img.shields.io/badge/github-dtolnay/cargo--tally-8da0cb?style=for-the-badge&labelColor=555555&logo=github" height="20">](https://github.com/dtolnay/cargo-tally)
[<img alt="crates.io" src="https://img.shields.io/crates/v/cargo-tally.svg?style=for-the-badge&color=fc8d62&logo=rust" height="20">](https://crates.io/crates/cargo-tally)
[<img alt="build status" src="https://img.shields.io/github/actions/workflow/status/dtolnay/cargo-tally/ci.yml?branch=master&style=for-the-badge" height="20">](https://github.com/dtolnay/cargo-tally/actions?query=branch%3Amaster)
<br>
## Installation
```console
$ wget https://static.crates.io/db-dump.tar.gz
$ cargo install cargo-tally
```
- Data is drawn from crates.io database dumps, which are published nightly by
automation running on crates.io. You can download a new dump whenever you feel
like having fresh data.
- The tally command accepts a list of which crates to tally. This can either be
the name of a crate like `serde` or a name with arbitrary semver version
specification like `serde:1.0`. If a version is not specified, dependencies on
all versions of the crate are tallied together.
- The generated graphs use [D3](https://d3js.org/); the cargo tally command
should pop open a browser showing your graph. It uses the same mechanism that
`cargo doc --open` uses so hopefully it works well on various systems.
---
<br>
## Examples
- Number of crates that depend directly on each major version of the regex
crate.
**`$ cargo tally regex:0.1 regex:0.2 regex:1.0`**
![Number of crates that depend directly on each major version of regex][regex]
---
<br>
- Fraction of crates.io that depends directly on each major version of the regex
crate. This is the same graph as the previous, but scaled to the exponentially
growing total number of crates on crates.io.
**`$ cargo tally regex:0.1 regex:0.2 regex:1.0 --relative`**
![Fraction of crates.io that depends directly on each major version of regex][regex-relative]
---
<br>
- Fraction of crates.io that depends directly on various error handling
libraries. Note that crates are not double-counted; a crate that depends on
*both* `anyhow` and `thiserror` counts as only one for the purpose of the
`anyhow+thiserror` curve.
**`$ cargo tally --relative quick-error error-chain failure anyhow+thiserror snafu eyre+color-eyre`**
![Fraction of crates.io that depends directly on various error handling libraries][failure-anyhow-thiserror]
---
<br>
- Fraction of crates.io that depends transitively on libc.
**`$ cargo tally --relative --transitive libc`**
![Fraction of crates.io that depends transitively on libc][libc]
[regex]: https://github.com/user-attachments/assets/4595d8a3-5c10-4fc2-9e38-d3ec47389257
[regex-relative]: https://github.com/user-attachments/assets/9ecafff0-ba5b-4fec-8a75-e84ba4cd54d1
[failure-anyhow-thiserror]: https://github.com/user-attachments/assets/885fd931-7eff-48c5-83f2-93c8b149860f
[libc]: https://github.com/user-attachments/assets/712804c7-f5de-4f99-9cb2-214665c0586f
---
<br>
## Credits
The implementation is powered by [differential-dataflow].
<img src="https://raw.github.com/dtolnay/cargo-tally/72612d2290b0ab564fdc6e332bb69f556e1bb41b/ddshow.svg">
[differential-dataflow]: https://github.com/TimelyDataflow/differential-dataflow
<br>
#### License
<sup>
Licensed under either of <a href="LICENSE-APACHE">Apache License, Version
2.0</a> or <a href="LICENSE-MIT">MIT license</a> at your option.
</sup>
<br>
<sub>
Unless you explicitly state otherwise, any contribution intentionally submitted
for inclusion in this project by you, as defined in the Apache-2.0 license,
shall be dual licensed as above, without any additional terms or conditions.
</sub>
================================================
FILE: build.rs
================================================
use std::env;
use std::fs;
use std::path::Path;
use std::process;
const CARGO_TALLY_MEMORY_LIMIT: &str = "CARGO_TALLY_MEMORY_LIMIT";
fn main() {
let limit = if let Some(value) = env::var_os(CARGO_TALLY_MEMORY_LIMIT) {
let Some(value) = value.to_str() else {
eprintln!("failed to parse ${CARGO_TALLY_MEMORY_LIMIT}");
process::exit(1);
};
let value = match value.parse::<u64>() {
Ok(int) => int,
Err(err) => {
eprintln!("failed to parse ${CARGO_TALLY_MEMORY_LIMIT}: {err}");
process::exit(1);
}
};
Some(value)
} else {
None
};
let out_dir = env::var_os("OUT_DIR").unwrap();
let out = Path::new(&out_dir).join("limit.mem");
fs::write(out, format!("{limit:?}\n")).unwrap();
let host = env::var_os("HOST").unwrap();
if let Some("windows") = host.to_str().unwrap().split('-').nth(2) {
println!("cargo:rustc-cfg=host_os=\"windows\"");
}
println!("cargo:rerun-if-env-changed={CARGO_TALLY_MEMORY_LIMIT}");
println!("cargo:rustc-check-cfg=cfg(host_os, values(\"windows\"))");
}
================================================
FILE: src/alloc.rs
================================================
use bytesize::ByteSize;
use std::alloc::{self, GlobalAlloc, Layout, System};
use std::fmt::{self, Display};
use std::ptr;
use std::sync::atomic::{AtomicU64, Ordering};
struct Allocator<A = System> {
alloc: A,
count: AtomicU64,
total: AtomicU64,
current: AtomicU64,
peak: AtomicU64,
}
#[global_allocator]
static ALLOC: Allocator = Allocator {
alloc: System,
count: AtomicU64::new(0),
total: AtomicU64::new(0),
current: AtomicU64::new(0),
peak: AtomicU64::new(0),
};
#[cfg(not(host_os = "windows"))]
const LIMIT: Option<u64> = include!(concat!(env!("OUT_DIR"), "/limit.mem"));
#[cfg(host_os = "windows")]
const LIMIT: Option<u64> = include!(concat!(env!("OUT_DIR"), "\\limit.mem"));
unsafe impl<A> GlobalAlloc for Allocator<A>
where
A: GlobalAlloc,
{
unsafe fn alloc(&self, layout: Layout) -> *mut u8 {
self.count.fetch_add(1, Ordering::Relaxed);
let size = layout.size() as u64;
let prev = self.current.fetch_add(size, Ordering::Relaxed);
self.total.fetch_add(size, Ordering::Relaxed);
let peak = self
.peak
.fetch_max(prev + size, Ordering::Relaxed)
.max(prev + size);
if let Some(limit) = LIMIT {
if peak > limit {
alloc::handle_alloc_error(layout);
}
}
unsafe { self.alloc.alloc(layout) }
}
unsafe fn dealloc(&self, ptr: *mut u8, layout: Layout) {
unsafe { self.alloc.dealloc(ptr, layout) };
let size = layout.size() as u64;
self.current.fetch_sub(size, Ordering::Relaxed);
}
unsafe fn alloc_zeroed(&self, layout: Layout) -> *mut u8 {
self.count.fetch_add(1, Ordering::Relaxed);
let size = layout.size() as u64;
let prev = self.current.fetch_add(size, Ordering::Relaxed);
self.total.fetch_add(size, Ordering::Relaxed);
let peak = self
.peak
.fetch_max(prev + size, Ordering::Relaxed)
.max(prev + size);
if let Some(limit) = LIMIT {
if peak > limit {
alloc::handle_alloc_error(layout);
}
}
unsafe { self.alloc.alloc_zeroed(layout) }
}
unsafe fn realloc(&self, ptr: *mut u8, old_layout: Layout, new_size: usize) -> *mut u8 {
self.count.fetch_add(1, Ordering::Relaxed);
let align = old_layout.align();
let new_layout = unsafe { Layout::from_size_align_unchecked(new_size, align) };
let new_ptr = unsafe { self.alloc.realloc(ptr, old_layout, new_size) };
let old_size = old_layout.size() as u64;
let new_size = new_size as u64;
let peak = if ptr::eq(new_ptr, ptr) {
if new_size > old_size {
self.total.fetch_add(new_size - old_size, Ordering::Relaxed);
let prev = self
.current
.fetch_add(new_size - old_size, Ordering::Relaxed);
self.peak
.fetch_max(prev + new_size - old_size, Ordering::Relaxed)
.max(prev + new_size - old_size)
} else {
self.current
.fetch_sub(old_size - new_size, Ordering::Relaxed);
0
}
} else {
self.total.fetch_add(new_size, Ordering::Relaxed);
let prev = if new_size > old_size {
self.current
.fetch_add(new_size - old_size, Ordering::Relaxed)
} else {
self.current
.fetch_sub(old_size - new_size, Ordering::Relaxed)
};
self.peak
.fetch_max(prev + new_size, Ordering::Relaxed)
.max(prev + new_size)
};
if let Some(limit) = LIMIT {
if peak > limit {
alloc::handle_alloc_error(new_layout);
}
}
new_ptr
}
}
pub(crate) struct AllocStat {
count: u64,
total: ByteSize,
peak: ByteSize,
}
pub(crate) fn stat() -> AllocStat {
AllocStat {
count: ALLOC.count.load(Ordering::Relaxed),
total: ByteSize::b(ALLOC.total.load(Ordering::Relaxed)),
peak: ByteSize::b(ALLOC.peak.load(Ordering::Relaxed)),
}
}
impl Display for AllocStat {
fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
write!(
formatter,
"{} allocations, total {}, peak {}",
self.count, self.total, self.peak,
)
}
}
================================================
FILE: src/arena.rs
================================================
use foldhash::HashMap;
use std::any::TypeId;
use std::fmt::{self, Debug};
use std::iter::Copied;
use std::ptr;
use std::slice::Iter;
use std::sync::OnceLock;
use std::sync::{Mutex, PoisonError};
use typed_arena::Arena;
#[derive(Ord, PartialOrd, Eq, PartialEq, Hash)]
pub struct Slice<T: 'static> {
contents: &'static [T],
}
impl<T> Slice<T>
where
T: 'static,
{
pub const EMPTY: Self = Slice { contents: &[] };
pub fn new(slice: &[T]) -> Self
where
T: Send + Clone,
{
slice.iter().cloned().collect()
}
pub const fn from(contents: &'static [T]) -> Self {
Slice { contents }
}
pub fn iter(&self) -> impl Iterator<Item = T>
where
T: Copy,
{
(*self).into_iter()
}
pub fn iter_ref(&self) -> impl Iterator<Item = &'static T> {
self.contents.iter()
}
pub fn is_empty(&self) -> bool {
self.contents.is_empty()
}
}
impl<T> Copy for Slice<T> where T: 'static {}
impl<T> Clone for Slice<T>
where
T: 'static,
{
fn clone(&self) -> Self {
*self
}
}
impl<T> FromIterator<T> for Slice<T>
where
T: 'static + Send + Clone,
{
fn from_iter<I>(iter: I) -> Self
where
I: IntoIterator<Item = T>,
{
let iter = iter.into_iter();
if iter.size_hint() == (0, Some(0)) {
return Slice::EMPTY;
}
static ARENA: OnceLock<Mutex<HashMap<TypeId, Box<dyn Send>>>> = OnceLock::new();
let mut map = ARENA
.get_or_init(Mutex::default)
.lock()
.unwrap_or_else(PoisonError::into_inner);
let arena: &Box<dyn Send> = map
.entry(TypeId::of::<T>())
.or_insert_with(|| Box::new(Arena::<T>::new()));
let arena = unsafe { &*(ptr::from_ref::<dyn Send>(&**arena).cast::<Arena<T>>()) };
Slice {
contents: arena.alloc_extend(iter),
}
}
}
impl<T> IntoIterator for Slice<T>
where
T: 'static + Copy,
{
type Item = T;
type IntoIter = Copied<Iter<'static, T>>;
fn into_iter(self) -> Self::IntoIter {
self.contents.iter().copied()
}
}
impl<T> Debug for Slice<T>
where
T: 'static + Debug,
{
fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
Debug::fmt(self.contents, formatter)
}
}
================================================
FILE: src/args.rs
================================================
use crate::{cratename, user};
use clap::builder::{ArgAction, ValueParser};
use clap::{Arg, Command};
use regex::Regex;
use semver::VersionReq;
use std::env;
use std::ffi::{OsStr, OsString};
use std::path::PathBuf;
use std::str::FromStr;
use thiserror::Error;
#[derive(Debug)]
pub(crate) struct Opt {
pub db: PathBuf,
pub exclude: Vec<Regex>,
pub jobs: usize,
pub relative: bool,
pub title: Option<String>,
pub transitive: bool,
pub queries: Vec<String>,
}
const USAGE: &str = "\
cargo tally [OPTIONS] QUERIES...
cargo tally serde:1.0 'anyhow:^1.0 + thiserror'";
const TEMPLATE: &str = "\
{bin} {version}
David Tolnay <dtolnay@gmail.com>
https://github.com/dtolnay/cargo-tally
{usage-heading}
{usage}
{all-args}\
";
fn app(jobs_help: &String) -> Command {
let mut app = Command::new("cargo-tally")
.override_usage(USAGE)
.help_template(TEMPLATE)
.arg(arg_db())
.arg(arg_exclude())
.arg(arg_jobs(jobs_help))
.arg(arg_relative())
.arg(arg_title())
.arg(arg_transitive())
.arg(arg_queries());
if let Some(version) = option_env!("CARGO_PKG_VERSION") {
app = app.version(version);
}
app
}
const DB: &str = "db";
const EXCLUDE: &str = "exclude";
const JOBS: &str = "jobs";
const RELATIVE: &str = "relative";
const TITLE: &str = "title";
const TRANSITIVE: &str = "transitive";
const QUERIES: &str = "queries";
pub(crate) fn parse() -> Opt {
// | threads | duration | allocated | peak |
// |---------|----------|-----------|---------|
// | 1 | 38.6 s | 55.2 GB | 11.0 GB |
// | 2 | 24.8 s | 55.4 GB | 10.2 GB |
// | 4 | 14.2 s | 55.8 GB | 8.8 GB |
// | 8 | 12.7 s | 58.4 GB | 8.3 GB |
// | 16 | 12.6 s | 59.2 GB | 8.2 GB |
// | 32 | 12.8 s | 63.2 GB | 8.4 GB |
// | 64 | 14.0 s | 69.5 GB | 11.1 GB |
let default_jobs = num_cpus::get().min(32);
let jobs_help = format!(
"Number of threads to run differential dataflow [default: {}]",
default_jobs,
);
let mut args: Vec<_> = env::args_os().collect();
if let Some(first) = args.get_mut(0) {
*first = OsString::from("cargo-tally");
}
if args.get(1).map(OsString::as_os_str) == Some(OsStr::new("tally")) {
args.remove(1);
}
let matches = app(&jobs_help).get_matches_from(args);
let db = PathBuf::from(matches.get_one::<PathBuf>(DB).unwrap());
let exclude = matches
.get_many::<Regex>(EXCLUDE)
.unwrap_or_default()
.cloned()
.collect();
let jobs = matches
.get_one::<usize>(JOBS)
.copied()
.unwrap_or(default_jobs);
let title = matches.get_one::<String>(TITLE).map(String::clone);
let relative = matches.get_flag(RELATIVE);
let transitive = matches.get_flag(TRANSITIVE);
let queries = matches
.get_many::<String>(QUERIES)
.unwrap()
.map(String::clone)
.collect();
Opt {
db,
exclude,
jobs,
relative,
title,
transitive,
queries,
}
}
fn arg_db() -> Arg {
Arg::new(DB)
.long(DB)
.num_args(1)
.value_name("PATH")
.default_value("./db-dump.tar.gz")
.value_parser(ValueParser::path_buf())
.help("Path to crates.io's database dump")
}
fn arg_exclude() -> Arg {
Arg::new(EXCLUDE)
.long(EXCLUDE)
.hide(true)
.action(ArgAction::Append)
.value_name("REGEX")
.value_parser(Regex::from_str)
.help("Ignore a dependency coming from any crates matching regex")
}
fn arg_jobs(help: &String) -> Arg {
Arg::new(JOBS)
.long(JOBS)
.short('j')
.num_args(1)
.value_name("N")
.value_parser(usize::from_str)
.help(help)
}
fn arg_relative() -> Arg {
Arg::new(RELATIVE)
.long(RELATIVE)
.num_args(0)
.help("Display as a fraction of total crates, not absolute number")
}
fn arg_title() -> Arg {
Arg::new(TITLE)
.long(TITLE)
.hide(true)
.num_args(1)
.value_name("TITLE")
.value_parser(ValueParser::string())
.help("Graph title")
}
fn arg_transitive() -> Arg {
Arg::new(TRANSITIVE)
.long(TRANSITIVE)
.num_args(0)
.help("Count transitive dependencies, not just direct dependencies")
}
fn arg_queries() -> Arg {
Arg::new(QUERIES)
.required(true)
.num_args(0..)
.value_name("QUERIES")
.value_parser(validate_query)
.help("Queries")
.hide(true)
}
#[derive(Error, Debug)]
enum Error {
#[error("invalid crates.io username")]
InvalidUsername,
#[error("invalid crate name according to crates.io")]
InvalidCrateName,
#[error(transparent)]
Semver(#[from] semver::Error),
}
fn validate_query(string: &str) -> Result<String, Error> {
for predicate in string.split('+') {
let predicate = predicate.trim();
if let Some(username) = predicate.strip_prefix('@') {
if username.split('/').all(user::valid) {
continue;
} else {
return Err(Error::InvalidUsername);
}
}
let (name, req) = if let Some((name, req)) = predicate.split_once(':') {
(name, Some(req))
} else {
(predicate, None)
};
if !cratename::valid(name.trim()) {
return Err(Error::InvalidCrateName);
}
if let Some(req) = req {
VersionReq::from_str(req)?;
}
}
Ok(string.to_owned())
}
#[test]
fn test_cli() {
let jobs_help = String::new();
app(&jobs_help).debug_assert();
}
================================================
FILE: src/clean.rs
================================================
use crate::cratemap::CrateMap;
use cargo_tally::arena::Slice;
use cargo_tally::id::{CrateId, VersionId};
use cargo_tally::version::Version;
use cargo_tally::{DbDump, Dependency};
use semver::{Comparator, Op};
use std::cmp;
use std::collections::btree_map::{BTreeMap as Map, Entry};
pub(crate) fn clean(db_dump: &mut DbDump, crates: &CrateMap) {
let mut crate_max_version: Map<CrateId, &Version> = Map::new();
let mut dependencies_per_version: Map<VersionId, Vec<&mut Dependency>> = Map::new();
for dep in &mut db_dump.dependencies {
dependencies_per_version
.entry(dep.version_id)
.or_insert_with(Vec::new)
.push(dep);
}
for rel in &db_dump.releases {
match crate_max_version.entry(rel.crate_id) {
Entry::Vacant(entry) => {
entry.insert(&rel.num);
}
Entry::Occupied(entry) => {
let entry = entry.into_mut();
*entry = cmp::max(entry, &rel.num);
}
}
let mut no_dependencies = Vec::new();
let dependencies = dependencies_per_version
.get_mut(&rel.id)
.unwrap_or(&mut no_dependencies);
let mut i = 0;
while let Some(dep) = dependencies.get_mut(i) {
if !crate_max_version.contains_key(&dep.crate_id) {
// If every published version of a crate is a prerelease, Cargo
// will resolve a `*` wildcard dependency to the max prerelease,
// which we don't track.
//
// Other times, crates just go missing from the index, maybe for
// legal reasons or because of leaked secrets.
// https://github.com/rust-lang/crates.io-index/commit/a95f8bff541de7461638b5e4f75ee58747829ea3
if crate::trace::VERBOSE {
eprintln!(
"unresolved dep {} {} on {} {}",
crates.name(rel.crate_id).unwrap(),
rel.num,
crates.name(dep.crate_id).unwrap(),
dep.req,
);
}
dependencies.remove(i);
continue;
}
let max_version = crate_max_version[&dep.crate_id];
let mut incompatible_version = Version(semver::Version {
major: 0,
minor: 0,
patch: 0,
pre: semver::Prerelease::EMPTY,
build: semver::BuildMetadata::EMPTY,
});
// Produce a synthetic version which is semver incompatible with the
// highest version currently published.
if max_version.major > 0 {
incompatible_version.major = max_version.major + 1;
} else if max_version.minor > 0 {
incompatible_version.minor = max_version.minor + 1;
} else {
incompatible_version.patch = max_version.patch + 1;
}
if dep.req.matches(&incompatible_version) {
// If the declared dependency requirement claims this crate
// works with the incompatible future release, we deem the
// dependency silly and constrain it to remain compatible with
// the current max published. This affects reqs like `0.*`.
dep.req.comparators = Slice::new(&[Comparator {
op: Op::Caret,
major: max_version.major,
minor: Some(max_version.minor),
patch: Some(max_version.patch),
pre: semver::Prerelease::EMPTY,
}]);
}
i += 1;
}
}
}
================================================
FILE: src/collect.rs
================================================
use differential_dataflow::collection::Collection;
use differential_dataflow::difference::Semigroup;
use std::mem;
use std::sync::{Arc, Mutex, PoisonError};
use timely::dataflow::Scope;
use timely::Data;
pub(crate) trait Collect<T> {
fn collect_into(&self, result: &Emitter<T>);
}
pub(crate) struct ResultCollection<T> {
out: Arc<Mutex<Vec<T>>>,
}
pub(crate) struct Emitter<T> {
out: Arc<Mutex<Vec<T>>>,
}
impl<T> ResultCollection<T> {
pub(crate) fn new() -> Self {
let out = Arc::new(Mutex::new(Vec::new()));
ResultCollection { out }
}
pub(crate) fn emitter(&self) -> Emitter<T> {
let out = Arc::clone(&self.out);
Emitter { out }
}
}
impl<D, T, R> ResultCollection<(D, T, R)>
where
T: Ord,
{
pub(crate) fn sort(&self) {
self.out
.lock()
.unwrap_or_else(PoisonError::into_inner)
.sort_by(
|(_ldata, ltimestamp, _ldiff), (_rdata, rtimestamp, _rdiff)| {
ltimestamp.cmp(rtimestamp)
},
);
}
}
impl<G, D, R> Collect<(D, G::Timestamp, R)> for Collection<G, D, R>
where
G: Scope,
D: Data,
R: Semigroup,
G::Timestamp: Data,
{
fn collect_into(&self, result: &Emitter<(D, G::Timestamp, R)>) {
let out = Arc::clone(&result.out);
self.inspect_batch(move |_timestamp, slice| {
out.lock()
.unwrap_or_else(PoisonError::into_inner)
.extend_from_slice(slice);
});
}
}
impl<T> IntoIterator for ResultCollection<T> {
type Item = T;
type IntoIter = <Vec<T> as IntoIterator>::IntoIter;
fn into_iter(self) -> Self::IntoIter {
let mut out = self.out.lock().unwrap_or_else(PoisonError::into_inner);
mem::take(&mut *out).into_iter()
}
}
================================================
FILE: src/communication.rs
================================================
// As far as I can tell, timely dataflow uses abomonation only for interprocess
// communication. Within a single process, it uses the Clone impl instead. We
// stub out the Abomonation impl since it will never be called.
macro_rules! do_not_abomonate {
($($path:ident)::+ $(<$param:ident>)? $(where $($clause:tt)*)?) => {
impl $(<$param>)? abomonation::Abomonation for $($path)::+ $(<$param>)? $(where $($clause)*)? {
unsafe fn entomb<W: std::io::Write>(&self, _write: &mut W) -> std::io::Result<()> {
unimplemented!("unexpected abomonation entomb");
}
unsafe fn exhume<'a>(&mut self, _bytes: &'a mut [u8]) -> Option<&'a mut [u8]> {
// Unwinding here is unsound because abomonation would have
// blitted the source bytes into the destination with dangling
// pointers, and is now relying on exhume to fix it up into a
// valid object. We abort instead.
std::process::exit(1);
}
fn extent(&self) -> usize {
unimplemented!("unexpected abomonation extent");
}
}
impl $(<$param>)? serde::Serialize for $($path)::+ $(<$param>)? $(where $($clause)*)? {
fn serialize<S>(&self, _serializer: S) -> Result<S::Ok, S::Error>
where
S: serde::Serializer,
{
unimplemented!("unexpected serde serialize");
}
}
impl<'de, $($param)?> serde::Deserialize<'de> for $($path)::+ $(<$param>)? $(where $($clause)*)? {
fn deserialize<D>(_deserializer: D) -> Result<Self, D::Error>
where
D: serde::Deserializer<'de>,
{
unimplemented!("unexpected serde deserialize");
}
}
};
}
do_not_abomonate!(crate::Dependency);
do_not_abomonate!(crate::Query);
do_not_abomonate!(crate::Release);
do_not_abomonate!(crate::arena::Slice<T> where T: 'static);
do_not_abomonate!(crate::feature::CrateFeature);
do_not_abomonate!(crate::feature::DefaultFeatures);
do_not_abomonate!(crate::feature::FeatureId);
do_not_abomonate!(crate::feature::VersionFeature);
do_not_abomonate!(crate::id::CrateId);
do_not_abomonate!(crate::id::DependencyId);
do_not_abomonate!(crate::id::QueryId);
do_not_abomonate!(crate::id::VersionId);
do_not_abomonate!(crate::max::Max<T>);
do_not_abomonate!(crate::present::Present);
do_not_abomonate!(crate::timestamp::DateTime);
do_not_abomonate!(crate::version::Version);
do_not_abomonate!(crate::version::VersionReq);
================================================
FILE: src/cratemap.rs
================================================
use crate::cratename::{CrateName, CrateNameQuery};
use crate::user::User;
use cargo_tally::id::CrateId;
use db_dump::crate_owners::OwnerId;
use ref_cast::RefCast;
use std::collections::BTreeMap as Map;
#[derive(Default)]
pub struct CrateMap {
names: Map<CrateId, String>,
ids: Map<CrateName, CrateId>,
pub(crate) users: Map<User, OwnerId>,
pub(crate) owners: Map<OwnerId, Vec<CrateId>>,
}
impl CrateMap {
pub fn new() -> Self {
CrateMap::default()
}
pub fn insert(&mut self, id: CrateId, name: String) {
assert!(!self.ids.contains_key(CrateNameQuery::ref_cast(&name)));
assert!(!self.names.contains_key(&id));
self.ids.insert(CrateName::new(name.clone()), id);
self.names.insert(id, name);
}
pub fn name(&self, id: CrateId) -> Option<&str> {
self.names.get(&id).map(String::as_str)
}
pub fn id(&self, name: &str) -> Option<CrateId> {
self.ids.get(CrateNameQuery::ref_cast(name)).copied()
}
}
================================================
FILE: src/cratename.rs
================================================
use ref_cast::RefCast;
use std::borrow::Borrow;
use std::cmp::Ordering;
pub const MAX_NAME_LENGTH: usize = 64;
// Mirrored from https://github.com/rust-lang/crates.io/blob/54a3f10794db7f57e3602426389c369290a8a3d5/src/models/krate.rs
pub fn valid(name: &str) -> bool {
let under_max_length = name.chars().take(MAX_NAME_LENGTH + 1).count() <= MAX_NAME_LENGTH;
valid_ident(name) && under_max_length
}
fn valid_ident(name: &str) -> bool {
valid_feature_prefix(name) && name.chars().next().is_some_and(char::is_alphabetic)
}
fn valid_feature_prefix(name: &str) -> bool {
!name.is_empty()
&& name
.chars()
.all(|c| c.is_ascii_alphanumeric() || c == '_' || c == '-')
}
pub(crate) struct CrateName(String);
impl CrateName {
pub(crate) fn new(string: String) -> Self {
CrateName(string)
}
}
impl Ord for CrateName {
fn cmp(&self, rhs: &Self) -> Ordering {
CrateNameQuery::ref_cast(&self.0).cmp(CrateNameQuery::ref_cast(&rhs.0))
}
}
impl PartialOrd for CrateName {
fn partial_cmp(&self, rhs: &Self) -> Option<Ordering> {
Some(self.cmp(rhs))
}
}
impl Eq for CrateName {}
impl PartialEq for CrateName {
fn eq(&self, rhs: &Self) -> bool {
CrateNameQuery::ref_cast(&self.0).eq(CrateNameQuery::ref_cast(&rhs.0))
}
}
#[derive(RefCast)]
#[repr(transparent)]
pub(crate) struct CrateNameQuery(str);
impl Borrow<CrateNameQuery> for CrateName {
fn borrow(&self) -> &CrateNameQuery {
CrateNameQuery::ref_cast(&self.0)
}
}
impl Ord for CrateNameQuery {
fn cmp(&self, rhs: &Self) -> Ordering {
self.0
.bytes()
.map(SeparatorAgnosticByte)
.cmp(rhs.0.bytes().map(SeparatorAgnosticByte))
}
}
impl PartialOrd for CrateNameQuery {
fn partial_cmp(&self, rhs: &Self) -> Option<Ordering> {
Some(self.cmp(rhs))
}
}
impl Eq for CrateNameQuery {}
impl PartialEq for CrateNameQuery {
fn eq(&self, rhs: &Self) -> bool {
self.0
.bytes()
.map(SeparatorAgnosticByte)
.eq(rhs.0.bytes().map(SeparatorAgnosticByte))
}
}
struct SeparatorAgnosticByte(u8);
impl Ord for SeparatorAgnosticByte {
fn cmp(&self, rhs: &Self) -> Ordering {
let lhs = if self.0 == b'_' { b'-' } else { self.0 };
let rhs = if rhs.0 == b'_' { b'-' } else { rhs.0 };
lhs.cmp(&rhs)
}
}
impl PartialOrd for SeparatorAgnosticByte {
fn partial_cmp(&self, rhs: &Self) -> Option<Ordering> {
Some(self.cmp(rhs))
}
}
impl Eq for SeparatorAgnosticByte {}
impl PartialEq for SeparatorAgnosticByte {
fn eq(&self, rhs: &Self) -> bool {
self.cmp(rhs) == Ordering::Equal
}
}
================================================
FILE: src/dependency.rs
================================================
#[derive(Copy, Clone, Debug)]
pub enum DependencyKind {
Normal,
Build,
Dev,
}
impl From<db_dump::dependencies::DependencyKind> for DependencyKind {
fn from(dependency_kind: db_dump::dependencies::DependencyKind) -> Self {
match dependency_kind {
db_dump::dependencies::DependencyKind::Normal => DependencyKind::Normal,
db_dump::dependencies::DependencyKind::Build => DependencyKind::Build,
db_dump::dependencies::DependencyKind::Dev => DependencyKind::Dev,
}
}
}
================================================
FILE: src/feature.rs
================================================
use crate::arena::Slice;
use crate::id::{CrateId, VersionId};
use std::collections::BTreeMap as Map;
#[derive(Copy, Clone, Ord, PartialOrd, Eq, PartialEq, Hash, Debug)]
#[repr(transparent)]
pub struct FeatureId(pub u32);
impl FeatureId {
pub const CRATE: Self = FeatureId(0);
pub const DEFAULT: Self = FeatureId(1);
pub const TBD: Self = FeatureId(!0);
}
#[derive(Copy, Clone, Debug)]
pub struct FeatureEnables {
pub id: FeatureId,
pub enables: Slice<CrateFeature>,
pub weak_enables: Slice<CrateFeature>,
}
#[derive(Copy, Clone, Debug)]
pub struct CrateFeature {
pub crate_id: CrateId,
pub feature_id: FeatureId,
}
#[derive(Copy, Clone, Ord, PartialOrd, Eq, PartialEq, Hash, Debug)]
pub struct VersionFeature {
pub version_id: VersionId,
pub feature_id: FeatureId,
}
#[derive(Copy, Clone, Ord, PartialOrd, Eq, PartialEq, Debug)]
pub struct DefaultFeatures(pub bool);
pub struct FeatureNames {
names: Vec<String>,
map: Map<String, FeatureId>,
}
impl FeatureNames {
pub fn new() -> Self {
let mut feature_names = FeatureNames {
names: Vec::new(),
map: Map::new(),
};
assert_eq!(feature_names.id(""), FeatureId::CRATE);
assert_eq!(feature_names.id("default"), FeatureId::DEFAULT);
feature_names
}
pub fn id(&mut self, name: &str) -> FeatureId {
if let Some(id) = self.map.get(name) {
*id
} else {
let new_id = FeatureId(u32::try_from(self.names.len()).unwrap());
self.names.push(name.to_owned());
self.map.insert(name.to_owned(), new_id);
new_id
}
}
pub fn name(&self, id: FeatureId) -> &str {
&self.names[id.0 as usize]
}
}
impl Default for FeatureNames {
fn default() -> Self {
FeatureNames::new()
}
}
pub struct FeatureIter {
krate: bool,
default: bool,
other: <Slice<FeatureId> as IntoIterator>::IntoIter,
}
impl FeatureIter {
pub fn new(default_features: DefaultFeatures, features: Slice<FeatureId>) -> Self {
FeatureIter {
krate: !default_features.0 && features.is_empty(),
default: default_features.0,
other: features.into_iter(),
}
}
}
impl Iterator for FeatureIter {
type Item = FeatureId;
fn next(&mut self) -> Option<Self::Item> {
if self.krate {
self.krate = false;
Some(FeatureId::CRATE)
} else if self.default {
self.default = false;
Some(FeatureId::DEFAULT)
} else {
self.other.next()
}
}
}
================================================
FILE: src/filter.rs
================================================
use crate::cratemap::CrateMap;
use cargo_tally::DbDump;
use regex::Regex;
pub(crate) fn filter(db_dump: &mut DbDump, crates: &CrateMap, exclude: &[Regex]) {
if exclude.is_empty() {
return;
}
db_dump.releases.retain(|rel| {
let crate_name = crates.name(rel.crate_id).unwrap();
exclude.iter().all(|exclude| !exclude.is_match(crate_name))
});
}
================================================
FILE: src/hidden.rs
================================================
// There is no library public API. Only the command line tool is considered
// public API.
#[path = "lib.rs"]
mod lib;
#[doc(hidden)]
pub use crate::lib::*;
================================================
FILE: src/hint.rs
================================================
use differential_dataflow::collection::Collection;
use differential_dataflow::difference::Semigroup;
use timely::dataflow::Scope;
#[allow(non_snake_case)]
pub(crate) trait TypeHint: Sized {
type Element;
fn T<D>(self) -> Self
where
Self: TypeHint<Element = D>,
{
self
}
fn KV<K, V>(self) -> Self
where
Self: TypeHint<Element = (K, V)>,
{
self
}
}
impl<G, D, R> TypeHint for Collection<G, D, R>
where
G: Scope,
R: Semigroup,
{
type Element = D;
}
impl<G, D, R> TypeHint for &Collection<G, D, R>
where
G: Scope,
R: Semigroup,
{
type Element = D;
}
================================================
FILE: src/id.rs
================================================
#[derive(Copy, Clone, Ord, PartialOrd, Eq, PartialEq, Hash, Debug)]
#[repr(transparent)]
pub struct QueryId(pub u8);
#[derive(Copy, Clone, Ord, PartialOrd, Eq, PartialEq, Hash, Debug)]
#[repr(transparent)]
pub struct CrateId(pub u32);
#[derive(Copy, Clone, Ord, PartialOrd, Eq, PartialEq, Hash, Debug)]
#[repr(transparent)]
pub struct VersionId(pub u32);
#[derive(Copy, Clone, Ord, PartialOrd, Eq, PartialEq, Hash, Debug)]
#[repr(transparent)]
pub struct DependencyId(pub u32);
impl From<db_dump::crates::CrateId> for CrateId {
fn from(id: db_dump::crates::CrateId) -> Self {
CrateId(id.0)
}
}
impl From<db_dump::versions::VersionId> for VersionId {
fn from(id: db_dump::versions::VersionId) -> Self {
VersionId(id.0)
}
}
impl From<u32> for DependencyId {
fn from(id: u32) -> Self {
DependencyId(id)
}
}
================================================
FILE: src/impls.rs
================================================
use crate::{Dependency, Query, Release};
use std::cmp::Ordering;
impl Ord for Query {
fn cmp(&self, other: &Self) -> Ordering {
self.id.cmp(&other.id)
}
}
impl PartialOrd for Query {
fn partial_cmp(&self, other: &Self) -> Option<Ordering> {
Some(self.cmp(other))
}
}
impl Eq for Query {}
impl PartialEq for Query {
fn eq(&self, other: &Self) -> bool {
self.id == other.id
}
}
impl Ord for Release {
fn cmp(&self, other: &Self) -> Ordering {
self.id.cmp(&other.id)
}
}
impl PartialOrd for Release {
fn partial_cmp(&self, other: &Self) -> Option<Ordering> {
Some(self.cmp(other))
}
}
impl Eq for Release {}
impl PartialEq for Release {
fn eq(&self, other: &Self) -> bool {
self.id == other.id
}
}
impl Ord for Dependency {
fn cmp(&self, other: &Self) -> Ordering {
self.id.cmp(&other.id)
}
}
impl PartialOrd for Dependency {
fn partial_cmp(&self, other: &Self) -> Option<Ordering> {
Some(self.cmp(other))
}
}
impl Eq for Dependency {}
impl PartialEq for Dependency {
fn eq(&self, other: &Self) -> bool {
self.id == other.id
}
}
================================================
FILE: src/index.html
================================================
<!DOCTYPE html>
<html>
<head>
<meta charset="utf-8" />
<script src="https://d3js.org/d3.v7.min.js"></script>
<style>
body {
font: 14px sans-serif;
}
.axis path, .axis line {
shape-rendering: crispEdges;
}
.line {
fill: none;
stroke-width: 1.5px;
}
svg {
overflow: visible;
}
</style>
</head>
<body>
<script>
var data = CARGO_TALLY_DATA;
var margin = { top: 20, right: 100, bottom: 30, left: 50 };
var width = 950 - margin.left - margin.right;
var height = 500 - margin.top - margin.bottom;
var x = d3.scaleTime().range([0, width]);
var y = d3.scaleLinear().range([height, 0]);
var color = d3.scaleOrdinal(d3.schemeCategory10);
var xAxis = d3.axisBottom(x);
var yAxis = d3.axisLeft(y);
var line = d3.line()
.x(function(d) {
return x(d.time);
})
.y(function(d) {
return y(d.edges);
});
color.domain(data.map(function(dataset) {
return dataset.name;
}));
data.forEach(function(dataset) {
dataset.values.forEach(function(d) {
d.time = new Date(d.time);
});
});
var minDate = d3.min(data, function(dataset) {
return dataset.values[0].time;
});
var maxDate = d3.max(data, function(dataset) {
return dataset.values[dataset.values.length - 1].time;
});
var maxValue = d3.max(data, function(c) {
return d3.max(c.values, function(v) {
return v.edges;
});
});
x.domain([(21 * minDate - maxDate) / 20, maxDate]);
y.domain([0, 1.025 * maxValue]);
#if CARGO_TALLY_RELATIVE
// NOTE The ticks should be read only after setting the domain values on `y`
var stepSize = y.ticks()[1] - y.ticks()[0]
var yFormatter = d3.format(`.${Math.max(0, d3.precisionFixed(stepSize) - 2)}%`);
var tooltipFormatter = d3.format(`.${Math.max(1, d3.precisionFixed(stepSize / 10) - 2)}%`);
#else
var yFormatter = d3.format(",");
var tooltipFormatter = d3.format(",");
#endif
yAxis.tickFormat(yFormatter);
var svg = d3.select("body")
.append("svg")
.attr("width", width + margin.left + margin.right)
.attr("height", height + margin.top + margin.bottom)
.append("g")
.attr("transform", `translate(${margin.left} ${margin.top})`);
var filter = svg.append("defs")
.append("filter")
.attr("x", "0")
.attr("y", "0")
.attr("width", "1")
.attr("height", "1")
.attr("id", "solid");
filter.append("feFlood")
.attr("flood-color", "white");
filter.append("feComposite")
.attr("in", "SourceGraphic");
var legend = svg.selectAll()
.data(data)
.enter()
.append("g");
legend.append("rect")
.attr("x", 50)
.attr("y", function(d, i) {
return i * 20;
})
.attr("width", 10)
.attr("height", 10)
.style("fill", function(d) {
return color(d.name);
});
legend.append("text")
.attr("x", 64)
.attr("y", function(d, i) {
return (i * 20) + 9;
})
.text(function(d) {
return d.name;
});
svg.append("g")
.attr("class", "x axis")
.attr("transform", `translate(0 ${height})`)
.call(xAxis);
svg.append("g")
.attr("class", "y axis")
.call(yAxis);
svg.append("text")
.attr("transform", "rotate(-90)")
.attr("y", 6)
.attr("dy", ".71em")
.style("text-anchor", "end")
.text(CARGO_TALLY_TITLE);
var curve = svg.selectAll()
.data(data)
.enter()
.append("g");
curve.append("path")
.attr("class", "line")
.attr("d", function(d) {
return line(d.values);
})
.style("stroke", function(d) {
return color(d.name);
})
.style("stroke-linejoin", "round");
curve.append("text")
.attr("transform", function(d) {
var last = d.values[d.values.length - 1];
return `translate(${x(last.time)} ${y(last.edges)})`;
})
.attr("x", 3)
.attr("dy", ".35em")
.text(function(d) {
return d.name;
});
var mouseG = svg.append("g")
.style("opacity", "0");
mouseG.append("path") // this is the black vertical line to follow mouse
.style("stroke", "black")
.style("stroke-width", "1px")
.attr("d", `M0 ${height + xAxis.tickSize()} 0 0`);
var mouseDate = mouseG.append("text")
.attr("y", height + 9)
.attr("dy", "0.71em")
.attr("text-anchor", "middle")
.attr("filter", "url(#solid)");
var mousePerLine = mouseG.selectAll()
.data(data)
.enter()
.append("g")
.attr("class", "mouse-per-line")
.style("opacity", "0");
mousePerLine.append("circle")
.attr("r", 7)
.style("stroke", function(d) {
return color(d.name);
})
.style("fill", "none")
.style("stroke-width", "1px");
mousePerLine.append("text")
.attr("x", -6)
.attr("y", -4)
.style("text-anchor", "end");
svg.append("rect") // append a rect to catch mouse movements on canvas
.attr("width", width) // can't catch mouse events on a g element
.attr("height", height + xAxis.tickSize() + 16)
.attr("fill", "none")
.attr("pointer-events", "all")
.on("mouseout", function() { // on mouse out hide line, circles and text
mouseG.style("opacity", "0");
})
.on("mouseover", function() { // on mouse in show line, circles and text
mouseG.style("opacity", "1");
})
.on("mousemove", function(event) { // mouse moving over canvas
var mouse = d3.pointer(event);
mouseG.attr("transform", `translate(${mouse[0]} 0)`);
mousePerLine.attr("transform", function(d, i) {
var xDate = x.invert(mouse[0]);
var bisect = d3.bisector(function(d) { return d.time; }).right;
var idx = bisect(d.values, xDate);
var below = d.values[idx - (idx > 0)];
var above = d.values[idx - (idx == d.values.length)];
var interp = below.time == above.time ? 0 : (xDate - below.time) / (above.time - below.time);
var val = d3.interpolateNumber(below.edges, above.edges)(interp);
d3.select(this)
.style("opacity", below.edges ? "1" : "0")
.select("text")
.text(tooltipFormatter(below.edges));
mouseDate.text(d3.timeFormat("%b %-d")(xDate));
return `translate(0 ${y(val)})`;
});
});
</script>
</body>
</html>
================================================
FILE: src/lib.rs
================================================
#![deny(unsafe_op_in_unsafe_fn)]
#![allow(non_camel_case_types)]
#![allow(
clippy::arc_with_non_send_sync, // https://github.com/rust-lang/rust-clippy/issues/11076
clippy::borrow_as_ptr,
clippy::borrowed_box,
clippy::cast_possible_truncation,
clippy::cast_precision_loss,
clippy::cast_ptr_alignment,
clippy::cast_sign_loss,
clippy::elidable_lifetime_names,
clippy::into_iter_without_iter,
clippy::items_after_statements,
clippy::iter_not_returning_iterator, // https://github.com/rust-lang/rust-clippy/issues/8285
clippy::let_underscore_untyped,
clippy::mismatching_type_param_order, // https://github.com/rust-lang/rust-clippy/issues/8962
clippy::missing_errors_doc,
clippy::missing_panics_doc,
clippy::module_name_repetitions,
clippy::must_use_candidate,
clippy::needless_lifetimes,
clippy::needless_pass_by_value,
clippy::significant_drop_in_scrutinee,
clippy::too_many_lines,
clippy::uninlined_format_args,
clippy::unseparated_literal_suffix
)]
#![allow(unknown_lints, mismatched_lifetime_syntaxes)]
#[macro_use]
mod stream;
pub mod arena;
pub(crate) mod collect;
mod communication;
pub mod dependency;
pub mod feature;
pub(crate) mod hint;
pub mod id;
mod impls;
pub mod matrix;
pub(crate) mod max;
pub(crate) mod present;
pub mod timestamp;
pub mod version;
use crate::arena::Slice;
use crate::collect::{Collect, Emitter, ResultCollection};
use crate::dependency::DependencyKind;
use crate::feature::{
DefaultFeatures, FeatureEnables, FeatureId, FeatureIter, FeatureNames, VersionFeature,
};
use crate::hint::TypeHint;
use crate::id::{CrateId, DependencyId, QueryId, VersionId};
use crate::matrix::Matrix;
use crate::max::MaxByKey;
use crate::present::Present;
use crate::timestamp::{DateTime, Duration};
use crate::version::{Version, VersionReq};
use atomic_take::AtomicTake;
use differential_dataflow::input::InputSession;
use differential_dataflow::operators::arrange::{ArrangeByKey, ArrangeBySelf};
use differential_dataflow::operators::iterate::Variable;
use differential_dataflow::operators::{Join, JoinCore, Threshold};
use std::env;
use std::iter::once;
use std::net::TcpStream;
use std::ops::Deref;
use timely::communication::allocator::Process;
use timely::dataflow::operators::capture::EventWriter;
use timely::dataflow::scopes::Child;
use timely::dataflow::Scope;
use timely::logging::{BatchLogger, TimelyEvent};
use timely::order::Product;
use timely::progress::Timestamp;
use timely::worker::{Config as WorkerConfig, Worker};
#[derive(Default)]
pub struct DbDump {
pub releases: Vec<Release>,
pub dependencies: Vec<Dependency>,
pub features: FeatureNames,
}
#[derive(Clone, Debug)]
pub struct Release {
pub id: VersionId,
pub crate_id: CrateId,
pub num: Version,
pub created_at: DateTime,
pub features: Slice<FeatureEnables>,
}
#[derive(Copy, Clone, Debug)]
pub struct Dependency {
pub id: DependencyId,
pub version_id: VersionId,
pub crate_id: CrateId,
pub req: VersionReq,
pub feature_id: FeatureId,
pub default_features: DefaultFeatures,
pub features: Slice<FeatureId>,
pub kind: DependencyKind,
}
#[derive(Copy, Clone, Debug)]
pub struct Query {
pub id: QueryId,
pub predicates: Slice<Predicate>,
}
#[derive(Copy, Clone, Debug)]
pub struct Predicate {
pub crate_id: CrateId,
pub req: Option<VersionReq>,
}
#[derive(Default)]
struct Input {
db_dump: DbDump,
queries: Vec<Query>,
}
pub fn run(db_dump: DbDump, jobs: usize, transitive: bool, queries: &[Query]) -> Matrix {
let num_queries = queries.len();
let queries = queries.to_owned();
let input = AtomicTake::new(Input { db_dump, queries });
let collection = ResultCollection::<(QueryId, DateTime, isize)>::new();
let results = collection.emitter();
let allocators = Process::new_vector(jobs);
let other = Box::new(());
timely::communication::initialize_from(allocators, other, move |allocator| {
let mut worker = Worker::new(WorkerConfig::default(), allocator);
set_timely_worker_log(&worker);
let mut queries = InputSession::<DateTime, Query, Present>::new();
let mut releases = InputSession::<DateTime, Release, Present>::new();
let mut dependencies = InputSession::<DateTime, Dependency, Present>::new();
worker.dataflow(|scope| {
dataflow(
scope,
&mut queries,
&mut releases,
&mut dependencies,
transitive,
&results,
);
});
let input = input.take().unwrap_or_default();
for query in input.queries {
queries.update(query, Present);
}
queries.close();
for dep in input.db_dump.dependencies {
dependencies.update(dep, Present);
}
dependencies.close();
for rel in input.db_dump.releases {
releases.advance_to(rel.created_at);
releases.update(rel, Present);
}
releases.close();
while worker.step_or_park(None) {}
})
.unwrap();
let mut time = DateTime::minimum();
let mut values = vec![0u32; num_queries];
let mut matrix = Matrix::new(num_queries);
collection.sort();
for (i, (query_id, timestamp, diff)) in collection.into_iter().enumerate() {
if timestamp > time {
if i > 0 {
matrix.push(time, values.clone());
}
time = timestamp;
}
let cell = &mut values[query_id.0 as usize];
if diff > 0 {
*cell += diff as u32;
} else {
*cell = cell.checked_sub(-diff as u32).expect("value went negative");
}
}
if match matrix.iter().next_back() {
Some((_timestamp, last)) => values != **last,
None => values.iter().any(|&n| n != 0),
} {
matrix.push(time, values);
}
matrix
}
fn set_timely_worker_log(worker: &Worker<Process>) {
let Some(addr) = env::var_os("TIMELY_WORKER_LOG_ADDR") else {
return;
};
let stream = match TcpStream::connect(addr.to_str().unwrap()) {
Ok(stream) => stream,
Err(err) => panic!(
"Could not connect logging stream to {addr}: {err}",
addr = addr.display(),
),
};
worker.log_register().insert::<TimelyEvent, _>("timely", {
let writer = EventWriter::new(stream);
let mut logger = BatchLogger::new(writer);
move |time, data| logger.publish_batch(time, data)
});
}
fn dataflow(
scope: &mut Child<Worker<Process>, DateTime>,
queries: &mut InputSession<DateTime, Query, Present>,
releases: &mut InputSession<DateTime, Release, Present>,
dependencies: &mut InputSession<DateTime, Dependency, Present>,
transitive: bool,
results: &Emitter<(QueryId, DateTime, isize)>,
) {
type queries<'a> = stream![Query; Present];
let queries: queries = queries.to_collection(scope);
type releases<'a> = stream![Release; Present];
let releases: releases = releases.to_collection(scope);
type dependencies<'a> = stream![Dependency; Present];
let dependencies: dependencies = dependencies.to_collection(scope);
// the version ids and version numbers that exist of each crate
type releases_by_crate_id<'a> = stream![CrateId => (VersionId, Version); Present];
let releases_by_crate_id: releases_by_crate_id =
releases.map(|rel| (rel.crate_id, (rel.id, rel.num)));
let releases_by_crate_id = releases_by_crate_id.arrange_by_key();
// for each dependency spec, what release does it refer to currently?
type resolved<'a> = stream![(CrateId, VersionReq) => VersionId; isize];
let resolved: resolved = dependencies
.map(|dep| (dep.crate_id, dep.req))
.KV::<CrateId, VersionReq>()
.join_core(
&releases_by_crate_id,
|crate_id, req, (version_id, version)| {
req.matches(version)
.then(|| ((*crate_id, *req), (version.clone(), *version_id)))
},
)
.KV::<(CrateId, VersionReq), (Version, VersionId)>()
.max_by_key()
.KV::<(CrateId, VersionReq), (Version, VersionId)>()
.map(|((crate_id, req), (_version, version_id))| ((crate_id, req), version_id));
let resolved = resolved.arrange_by_key();
// full dependency graph across all versions of all crates
type dependency_edges<'a> = stream![VersionId => VersionId; isize];
let direct_dependency_edges: dependency_edges = dependencies
.map(|dep| ((dep.crate_id, dep.req), dep.version_id))
.KV::<(CrateId, VersionReq), VersionId>()
.join_core(
&resolved,
|(_crate_id, _req), from_version_id, to_version_id| {
once((*from_version_id, *to_version_id))
},
);
// releases that are the most recent of their crate
type most_recent_crate_version<'a> = stream![VersionId; isize];
let most_recent_crate_version: most_recent_crate_version = releases
.map(|rel| {
(
rel.crate_id,
(rel.num.pre.is_empty(), rel.created_at, rel.id),
)
})
.KV::<CrateId, (bool, DateTime, VersionId)>()
.max_by_key()
.KV::<CrateId, (bool, DateTime, VersionId)>()
.map(|(_crate_id, (_not_prerelease, _created_at, version_id))| version_id);
let most_recent_crate_version = most_recent_crate_version.arrange_by_self();
// releases that satisfy the predicate of each query
type match_releases<'a> = stream![VersionId => QueryId; Present];
let match_releases: match_releases = queries
.flat_map(|query| {
query
.predicates
.iter()
.map(move |pred| (pred.crate_id, (query.id, pred.req)))
})
.KV::<CrateId, (QueryId, Option<VersionReq>)>()
.join_core(
&releases_by_crate_id,
|_crate_id, (query_id, version_req), (version_id, version)| {
let matches = match version_req {
None => true,
Some(req) => req.matches(version),
};
matches.then_some((*version_id, *query_id))
},
);
// releases that contribute into the result of each query
type query_results<'a> = stream![VersionId => QueryId; isize];
let mut query_results: query_results = direct_dependency_edges
.join_core(&most_recent_crate_version, |edge_from, edge_to, ()| {
once((*edge_to, *edge_from))
})
.KV::<VersionId, VersionId>()
.join_map(&match_releases, |_edge_to, edge_from, query_id| {
(*edge_from, *query_id)
});
if transitive {
type dependency_edges<'a> = stream![VersionFeature => VersionFeature; isize];
// dependency edges arising from an entry under [dependencies]
let dep_dependency_edges: dependency_edges = dependencies
.flat_map(|dep| match dep.kind {
DependencyKind::Normal | DependencyKind::Build => Some((
(dep.crate_id, dep.req),
(
dep.version_id,
dep.feature_id,
dep.default_features,
dep.features,
),
)),
DependencyKind::Dev => None,
})
.KV::<(CrateId, VersionReq), (VersionId, FeatureId, DefaultFeatures, Slice<FeatureId>)>(
)
.join_core(
&resolved,
|(_crate_id, _req),
(version_id, feature_id, default_features, features),
resolved_version_id| {
let edge_from = VersionFeature {
version_id: *version_id,
feature_id: *feature_id,
};
let resolved_version_id = *resolved_version_id;
FeatureIter::new(*default_features, *features).map(move |feature_id| {
let edge_to = VersionFeature {
version_id: resolved_version_id,
feature_id,
};
(edge_from, edge_to)
})
},
);
// dependency edges from crate feature enabling other feature of same crate
let feature_intracrate_edges: dependency_edges = releases.explode(|rel| {
let version_id = rel.id;
let crate_id = rel.crate_id;
rel.features
.iter()
.flat_map(move |feature| {
let edge_from = VersionFeature {
version_id,
feature_id: feature.id,
};
feature
.enables
.into_iter()
.filter_map(move |crate_feature| {
if crate_feature.crate_id == crate_id {
let edge_to = VersionFeature {
version_id,
feature_id: crate_feature.feature_id,
};
Some((edge_from, edge_to))
} else {
None
}
})
.chain({
if feature.id == FeatureId::DEFAULT {
None
} else {
let edge_to = VersionFeature {
version_id,
feature_id: FeatureId::CRATE,
};
Some((edge_from, edge_to))
}
})
})
.chain({
let edge_from = VersionFeature {
version_id,
feature_id: FeatureId::DEFAULT,
};
let edge_to = VersionFeature {
version_id,
feature_id: FeatureId::CRATE,
};
once((edge_from, edge_to))
})
.map(|(edge_from, edge_to)| ((edge_from, edge_to), 1))
});
// dependency edges from crate feature enabling feature of other crate
let feature_dependency_edges: dependency_edges = releases
.flat_map(|rel| {
let version_id = rel.id;
let crate_id = rel.crate_id;
rel.features.into_iter().flat_map(move |feature| {
// TODO: also handle `weak_enables`
// https://github.com/dtolnay/cargo-tally/issues/56
feature
.enables
.into_iter()
.filter_map(move |crate_feature| {
if crate_feature.crate_id == crate_id {
None
} else {
Some((
(version_id, crate_feature.crate_id),
(feature.id, crate_feature.feature_id),
))
}
})
})
})
.KV::<(VersionId, CrateId), (FeatureId, FeatureId)>()
.join_map(
&dependencies
.map(|dep| ((dep.version_id, dep.crate_id), dep.req))
.KV::<(VersionId, CrateId), VersionReq>(),
|(version_id, crate_id), (from_feature, to_feature), req| {
((*crate_id, *req), (*version_id, *from_feature, *to_feature))
},
)
.KV::<(CrateId, VersionReq), (VersionId, FeatureId, FeatureId)>()
.join_core(
&resolved,
|(_crate_id, _req),
(from_version_id, from_feature_id, to_feature_id),
to_version_id| {
let edge_from = VersionFeature {
version_id: *from_version_id,
feature_id: *from_feature_id,
};
let edge_to = VersionFeature {
version_id: *to_version_id,
feature_id: *to_feature_id,
};
Some((edge_from, edge_to))
},
);
// full dependency graph across all versions of all crates
let incoming_transitive_dependency_edges = dep_dependency_edges
.concat(&feature_intracrate_edges)
.concat(&feature_dependency_edges)
.KV::<VersionFeature, VersionFeature>()
.map_in_place(|edge| {
let (edge_from, edge_to) = *edge;
*edge = (edge_to, edge_from);
})
.KV::<VersionFeature, VersionFeature>()
.arrange_by_key();
// fixed point of transitive dependencies graph
type addend_transitive_releases<'a> = stream![VersionId => QueryId; isize];
let addend_transitive_releases: addend_transitive_releases = scope
.iterative::<u16, _, _>(|nested| {
let match_releases = match_releases
.KV::<VersionId, QueryId>()
.explode(|(version_id, query_id)| {
let version_feature = VersionFeature {
version_id,
feature_id: FeatureId::CRATE,
};
once(((version_feature, query_id), 1))
})
.KV::<VersionFeature, QueryId>()
.enter(nested);
let summary = Product::new(Duration::default(), 1);
let variable = Variable::new_from(match_releases, summary);
let result = variable
.deref()
.KV::<VersionFeature, QueryId>()
.join_core(
&incoming_transitive_dependency_edges.enter(nested),
|_edge_to, query_id, edge_from| Some((*edge_from, *query_id)),
)
.KV::<VersionFeature, QueryId>()
.concat(&variable)
.KV::<VersionFeature, QueryId>()
.distinct();
variable.set(&result).leave()
})
.KV::<VersionFeature, QueryId>()
.map(|(version_feature, query_id)| (version_feature.version_id, query_id));
query_results = addend_transitive_releases
.join_core(&most_recent_crate_version, |version_id, query_id, ()| {
Some((*version_id, *query_id))
})
.KV::<VersionId, QueryId>()
.concat(&query_results);
}
query_results
.distinct()
.map(|(_version_id, query_id)| query_id)
.consolidate()
.collect_into(results);
}
================================================
FILE: src/load.rs
================================================
use crate::cratemap::CrateMap;
use crate::user::User;
use anyhow::{bail, Result};
use cargo_tally::arena::Slice;
use cargo_tally::dependency::DependencyKind;
use cargo_tally::feature::{
CrateFeature, DefaultFeatures, FeatureEnables, FeatureId, FeatureNames,
};
use cargo_tally::id::{CrateId, DependencyId, VersionId};
use cargo_tally::timestamp::DateTime;
use cargo_tally::version::{Version, VersionReq};
use cargo_tally::{DbDump, Dependency, Release};
use db_dump::crate_owners::OwnerId;
use std::cell::RefCell;
use std::collections::{BTreeMap as Map, BTreeSet as Set};
use std::mem;
use std::path::Path;
pub(crate) fn load(path: impl AsRef<Path>) -> Result<(DbDump, CrateMap)> {
let mut crates = CrateMap::new();
let mut users: Map<User, OwnerId> = Map::new();
let mut teams: Map<User, OwnerId> = Map::new();
let mut owners: Map<OwnerId, Vec<CrateId>> = Map::new();
let mut releases: Vec<Release> = Vec::new();
let mut dependencies: Vec<Dependency> = Vec::new();
let mut release_features: Vec<Vec<(FeatureId, Vec<CrateFeature>, Vec<CrateFeature>)>> =
Vec::new();
let mut dep_renames: Map<DependencyId, String> = Map::new();
let mut dep_renames_resolve: Map<(VersionId, FeatureId), CrateId> = Map::new();
let feature_names = RefCell::new(FeatureNames::new());
db_dump::Loader::new()
.crates(|row| {
let crate_id = CrateId::from(row.id);
crates.insert(crate_id, row.name);
})
.users(|row| {
users.insert(User::new(row.gh_login), OwnerId::User(row.id));
})
.teams(|row| {
if let Some(team) = row.login.strip_prefix("github:") {
if team.contains(':') {
let team = team.replace(':', "/");
teams.insert(User::new(team), OwnerId::Team(row.id));
}
}
})
.crate_owners(|row| {
owners
.entry(row.owner_id)
.or_insert_with(Vec::new)
.push(CrateId::from(row.crate_id));
})
.versions(|row| {
if row.yanked {
return;
}
let crate_id = CrateId::from(row.crate_id);
let mut features = Vec::new();
if !row.features.is_empty() {
let mut feature_names = feature_names.borrow_mut();
for (feature, raw_enables) in &row.features {
let feature_id = feature_names.id(feature);
let mut enables = Vec::new();
let mut weak_enables = Vec::new();
for feature in raw_enables {
let crate_id;
let mut crate_feature_vec = &mut enables;
let mut feature = feature.as_str();
if let Some(slash) = feature.find('/') {
let mut crate_name = &feature[..slash];
if let Some(crate_name_weak) = crate_name.strip_suffix('?') {
crate_name = crate_name_weak;
crate_feature_vec = &mut weak_enables;
}
crate_id = feature_names.id(crate_name);
feature = &feature[slash + 1..];
} else {
crate_id = FeatureId::CRATE;
}
let feature_id = feature_names.id(feature);
crate_feature_vec.push(CrateFeature {
crate_id: CrateId(crate_id.0),
feature_id,
});
}
features.push((feature_id, enables, weak_enables));
}
}
releases.push(Release {
id: VersionId::from(row.id),
crate_id,
num: Version(row.num),
created_at: DateTime::from(row.created_at),
features: {
release_features.push(features);
Slice::EMPTY
},
});
})
.dependencies(|row| {
let dependency_id = DependencyId::from(row.id);
let version_id = VersionId::from(row.version_id);
let crate_id = CrateId::from(row.crate_id);
let feature_id = if row.optional {
FeatureId::TBD
} else {
FeatureId::CRATE
};
let mut default_features = row.default_features;
let mut features = Set::new();
if !row.features.is_empty() {
let mut feature_names = feature_names.borrow_mut();
for feature in &row.features {
let feature_id = feature_names.id(feature);
if feature_id == FeatureId::DEFAULT {
default_features = true;
} else {
features.insert(feature_id);
}
}
}
if let Some(explicit_name) = row.explicit_name {
let mut feature_names = feature_names.borrow_mut();
dep_renames_resolve
.insert((version_id, feature_names.id(&explicit_name)), crate_id);
dep_renames.insert(dependency_id, explicit_name);
}
dependencies.push(Dependency {
id: dependency_id,
version_id,
crate_id,
req: VersionReq::from(row.req),
feature_id,
default_features: DefaultFeatures(default_features),
features: Slice::from_iter(features),
kind: DependencyKind::from(row.kind),
});
})
.load(path)?;
crate::mend::mend_crates(&mut crates);
let known_broken = [(crates.id("modbus"), &Version::new(0, 1, 0), "test-server")];
let mut feature_names = mem::take(&mut *feature_names.borrow_mut());
let mut feature_buffer = Vec::new();
for (release, mut features) in releases.iter_mut().zip(release_features) {
for (feature, enables, weak_enables) in &mut features {
for crate_features in [&mut *enables, &mut *weak_enables] {
for feature in crate_features {
let feature_id = FeatureId(feature.crate_id.0);
feature.crate_id = if feature_id == FeatureId::CRATE {
release.crate_id
} else if let Some(crate_id) =
dep_renames_resolve.get(&(release.id, feature_id))
{
*crate_id
} else if let Some(crate_id) = {
let name = feature_names.name(feature_id);
crates.id(name)
} {
crate_id
} else if known_broken.contains(&(
Some(release.crate_id),
&release.num,
feature_names.name(feature_id),
)) {
release.crate_id
} else {
bail!(
"{} v{} depends on {} which is not found",
crates.name(release.crate_id).unwrap(),
release.num,
feature_names.name(feature_id),
);
};
}
}
feature_buffer.push(FeatureEnables {
id: *feature,
enables: Slice::new(enables),
weak_enables: Slice::new(weak_enables),
});
}
release.features = Slice::new(&feature_buffer);
feature_buffer.clear();
}
for dep in &mut dependencies {
if dep.feature_id == FeatureId::TBD {
dep.feature_id = feature_names.id(match dep_renames.get(&dep.id) {
Some(explicit_name) => explicit_name,
None => crates.name(dep.crate_id).unwrap(),
});
}
}
let mut db_dump = DbDump {
releases,
dependencies,
features: feature_names,
};
crates.owners = owners;
crates.users = users;
crates.users.extend(teams);
crate::mend::mend_releases(&mut db_dump, &crates);
Ok((db_dump, crates))
}
================================================
FILE: src/log.rs
================================================
use std::fmt;
use std::io::Write;
use termcolor::{Color, ColorSpec, StandardStream, WriteColor};
pub trait Log {
fn trace(&mut self) -> LogStream;
fn warning(&mut self) -> LogStream;
fn error(&mut self) -> LogStream;
fn red(&mut self) -> LogStream;
}
impl Log for StandardStream {
fn trace(&mut self) -> LogStream {
let mut color = ColorSpec::new();
color.set_fg(Some(Color::Magenta)).set_dimmed(true);
let _ = self.set_color(&color);
LogStream(self)
}
fn warning(&mut self) -> LogStream {
let mut color = ColorSpec::new();
color.set_fg(Some(Color::Yellow));
let _ = self.set_color(&color);
LogStream(self)
}
fn error(&mut self) -> LogStream {
let mut color = ColorSpec::new();
color.set_fg(Some(Color::Red)).set_bold(true);
let _ = self.set_color(&color);
let _ = write!(self, "error:");
let _ = self.reset();
let _ = write!(self, " ");
LogStream(self)
}
fn red(&mut self) -> LogStream {
let mut color = ColorSpec::new();
color.set_fg(Some(Color::Red));
let _ = self.set_color(&color);
LogStream(self)
}
}
pub struct LogStream<'a>(&'a mut StandardStream);
impl<'a> LogStream<'a> {
pub fn write_fmt(&mut self, args: fmt::Arguments) {
let _ = self.0.write_fmt(args);
}
}
impl<'a> Drop for LogStream<'a> {
fn drop(&mut self) {
let _ = self.0.reset();
}
}
================================================
FILE: src/macros.rs
================================================
macro_rules! const_assert_eq {
($left:expr, $right:expr) => {
const _: [(); $left as usize] = [(); $right as usize];
};
}
macro_rules! const_assert {
($($cond:expr),* $(,)?) => {
const_assert_eq!($($cond)&&*, true);
};
}
macro_rules! version {
($major_minor:tt . $patch:tt) => {{
const major_minor: &'static [u8] = stringify!($major_minor).as_bytes();
const_assert! {
major_minor.len() == 3,
major_minor[0] >= b'0' && major_minor[0] <= b'9',
major_minor[1] == b'.',
major_minor[2] >= b'0' && major_minor[2] <= b'9',
}
cargo_tally::version::Version(semver::Version {
major: (major_minor[0] - b'0') as u64,
minor: (major_minor[2] - b'0') as u64,
patch: $patch,
pre: semver::Prerelease::EMPTY,
build: semver::BuildMetadata::EMPTY,
})
}};
}
macro_rules! version_req {
(^ $major_minor:tt) => {{
const major_minor: &'static [u8] = stringify!($major_minor).as_bytes();
const_assert! {
major_minor.len() == 3,
major_minor[0] >= b'0' && major_minor[0] <= b'9',
major_minor[1] == b'.',
major_minor[2] >= b'0' && major_minor[2] <= b'9',
}
const comparators: &'static [semver::Comparator] = &[semver::Comparator {
op: semver::Op::Caret,
major: (major_minor[0] - b'0') as u64,
minor: Some((major_minor[2] - b'0') as u64),
patch: None,
pre: semver::Prerelease::EMPTY,
}];
cargo_tally::version::VersionReq {
comparators: cargo_tally::arena::Slice::from(comparators),
}
}};
}
macro_rules! datetime {
($day:tt $month:ident $year:tt $hour:tt : $min:tt : $sec:tt) => {{
const_assert! {
$day >= 1 && $day <= 31,
$year >= 2014,
$hour >= 0 && $hour <= 23,
$min >= 0 && $min <= 59,
$sec >= 0 && $sec <= 60,
}
cargo_tally::timestamp::DateTime::new(
chrono::NaiveDate::from_ymd_opt($year, month_number!($month), $day).unwrap(),
chrono::NaiveTime::from_hms_opt($hour, $min, $sec).unwrap(),
)
}};
}
#[rustfmt::skip]
#[allow(unknown_lints, unused_macro_rules)]
macro_rules! month_number {
(Jan) => { 1 };
(Feb) => { 2 };
(Mar) => { 3 };
(Apr) => { 4 };
(May) => { 5 };
(Jun) => { 6 };
(Jul) => { 7 };
(Aug) => { 8 };
(Sep) => { 9 };
(Oct) => { 10 };
(Nov) => { 11 };
(Dec) => { 12 };
}
================================================
FILE: src/main.rs
================================================
#![deny(unsafe_op_in_unsafe_fn)]
#![allow(non_upper_case_globals)]
#![allow(
clippy::cast_lossless,
clippy::cast_possible_truncation,
clippy::cast_precision_loss,
clippy::collapsible_else_if,
clippy::elidable_lifetime_names,
clippy::expl_impl_clone_on_copy,
clippy::let_underscore_untyped,
clippy::manual_range_contains,
clippy::map_clone,
clippy::module_name_repetitions,
clippy::needless_lifetimes,
clippy::redundant_else,
clippy::single_match_else,
clippy::too_many_lines,
clippy::type_complexity,
clippy::unconditional_recursion, // https://github.com/rust-lang/rust-clippy/issues/12133
clippy::uninlined_format_args,
clippy::unwrap_or_default,
clippy::zero_prefixed_literal
)]
#![allow(unknown_lints, mismatched_lifetime_syntaxes)]
#[macro_use]
mod macros;
mod alloc;
mod args;
mod clean;
mod cratemap;
mod cratename;
mod filter;
mod load;
mod log;
mod mend;
mod query;
mod render;
mod total;
mod trace;
mod user;
use crate::load::load;
use crate::log::Log;
use crate::total::Total;
use anyhow::Result;
use std::io::{self, IsTerminal, Write};
use std::process;
use std::time::Instant;
use termcolor::{ColorChoice, StandardStream};
cargo_subcommand_metadata::description!(
"Draw graphs of the number of dependencies on a crate over time"
);
fn main() {
let mut stderr = StandardStream::stderr(ColorChoice::Auto);
if let Err(err) = try_main(&mut stderr) {
writeln!(stderr.error(), "{}", err);
process::exit(1);
}
}
fn try_main(stderr: &mut StandardStream) -> Result<()> {
let opt = args::parse();
if !opt.db.is_file() {
write!(stderr.error(), "Database dump file does not exist: ");
write!(stderr.red(), "{}", opt.db.display());
let _ = writeln!(
stderr,
"\nDownload one from https://static.crates.io/db-dump.tar.gz",
);
process::exit(1);
}
let mut sysinfo = sysinfo::System::new();
sysinfo.refresh_memory();
let total_memory = sysinfo.total_memory();
let (min_memory, advised) = if opt.transitive {
(10 * 1024 * 1024 * 1024, "12 GB")
} else {
(7 * 1024 * 1024 * 1024, "8 GB")
};
if total_memory < min_memory && total_memory > 0 {
writeln!(
stderr.warning(),
"warning: running with <{advised} memory is not advised.",
);
}
let stdout_isatty = io::stdout().is_terminal();
let stderr_isatty = io::stderr().is_terminal();
let instant = Instant::now();
let (mut db_dump, crates) = crate::load(&opt.db)?;
crate::filter::filter(&mut db_dump, &crates, &opt.exclude);
db_dump.releases.sort_by_key(|v| v.created_at);
crate::clean::clean(&mut db_dump, &crates);
let total = opt.relative.then(|| Total::index(&db_dump.releases));
if stderr_isatty {
writeln!(stderr.trace(), "load time: {:.2?}", instant.elapsed());
}
let query_strings = opt.queries.iter().map(String::as_str);
let queries = query::parse(query_strings, &crates)?;
let instant = Instant::now();
let results = cargo_tally::run(db_dump, opt.jobs, opt.transitive, &queries);
if stderr_isatty {
writeln!(stderr.trace(), "dataflow time: {:.2?}", instant.elapsed());
}
let _ = stderr.flush();
let len = results.len();
let stdout = io::stdout();
let mut stdout = stdout.lock();
for (i, (timestamp, data)) in results.iter().enumerate() {
if stdout_isatty && 10 + i == len && len > 20 {
let _ = writeln!(stdout, "...");
}
if !stdout_isatty || i < 10 || 10 + i >= len {
if let Some(total) = &total {
let total = total.eval(timestamp);
let _ = writeln!(stdout, "{:?} {:?}", timestamp, data / total);
} else {
let _ = writeln!(stdout, "{:?} {:?}", timestamp, data);
}
}
}
let _ = stdout.flush();
let graph_path = if stdout_isatty {
if results.is_empty() {
writeln!(stderr.red(), "zero results");
None
} else {
let labels = opt
.queries
.iter()
.map(|query| query::format(query, &crates))
.collect::<Vec<_>>();
let graph_path = render::graph(
opt.title.as_deref(),
opt.transitive,
&results,
&labels,
total.as_ref(),
)?;
Some(graph_path)
}
} else {
None
};
if stderr_isatty {
writeln!(stderr.trace(), "{}", alloc::stat());
}
if let Some(path) = graph_path {
writeln!(stderr.trace(), "graph written to {}", path.display());
let _ = opener::open(&path);
}
Ok(())
}
================================================
FILE: src/matrix.rs
================================================
use crate::timestamp::DateTime;
use ref_cast::RefCast;
use std::fmt::{self, Debug};
use std::iter::Copied;
use std::ops::{Deref, Div, Index};
use std::slice;
pub struct Matrix {
queries: usize,
rows: Vec<(DateTime, Vec<u32>)>,
}
#[derive(RefCast)]
#[repr(transparent)]
pub struct Row([u32]);
impl Matrix {
pub(crate) fn new(queries: usize) -> Self {
Matrix {
queries,
rows: Vec::new(),
}
}
pub fn width(&self) -> usize {
self.queries
}
pub fn is_empty(&self) -> bool {
self.rows.is_empty()
}
pub fn len(&self) -> usize {
self.rows.len()
}
pub fn iter(&self) -> Iter {
Iter(self.rows.iter())
}
pub(crate) fn push(&mut self, timestamp: DateTime, data: Vec<u32>) {
self.rows.push((timestamp, data));
}
}
impl<'a> IntoIterator for &'a Matrix {
type Item = (DateTime, &'a Row);
type IntoIter = Iter<'a>;
fn into_iter(self) -> Self::IntoIter {
self.iter()
}
}
pub struct Iter<'a>(slice::Iter<'a, (DateTime, Vec<u32>)>);
impl<'a> Iterator for Iter<'a> {
type Item = (DateTime, &'a Row);
fn next(&mut self) -> Option<Self::Item> {
self.0
.next()
.map(|(timestamp, data)| (*timestamp, Row::ref_cast(data)))
}
}
impl<'a> DoubleEndedIterator for Iter<'a> {
fn next_back(&mut self) -> Option<Self::Item> {
self.0
.next_back()
.map(|(timestamp, data)| (*timestamp, Row::ref_cast(data)))
}
}
impl Index<usize> for Row {
type Output = u32;
fn index(&self, i: usize) -> &Self::Output {
&self.0[i]
}
}
impl<'a> IntoIterator for &'a Row {
type Item = u32;
type IntoIter = Copied<slice::Iter<'a, u32>>;
fn into_iter(self) -> Self::IntoIter {
self.0.iter().copied()
}
}
impl Deref for Row {
type Target = [u32];
fn deref(&self) -> &Self::Target {
&self.0
}
}
pub struct RelativeRow<'a> {
row: &'a Row,
total: u32,
}
impl<'a> Div<u32> for &'a Row {
type Output = RelativeRow<'a>;
fn div(self, rhs: u32) -> Self::Output {
RelativeRow {
row: self,
total: rhs,
}
}
}
impl Debug for Row {
fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
formatter.debug_list().entries(&self.0).finish()
}
}
impl<'a> Debug for RelativeRow<'a> {
fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
let mut list = formatter.debug_list();
for value in self.row {
list.entry(&(value as f32 / self.total as f32));
}
list.finish()
}
}
================================================
FILE: src/max.rs
================================================
use crate::hint::TypeHint;
use crate::present::Present;
use differential_dataflow::collection::Collection;
use differential_dataflow::difference::{Multiply, Semigroup};
use differential_dataflow::lattice::Lattice;
use differential_dataflow::operators::CountTotal;
use differential_dataflow::ExchangeData;
use std::fmt::Debug;
use std::hash::Hash;
use std::iter::once;
use timely::dataflow::Scope;
use timely::order::TotalOrder;
pub(crate) trait MaxByKey<G, K, V, R>
where
G: Scope,
{
fn max_by_key(&self) -> Collection<G, (K, V), isize>;
}
impl<G, K, V, R> MaxByKey<G, K, V, R> for Collection<G, (K, V), R>
where
G: Scope,
K: Clone + ExchangeData + Hash,
V: Clone + Ord + ExchangeData + Debug,
R: Semigroup,
Max<V>: Multiply<R, Output = Max<V>>,
G::Timestamp: TotalOrder + Lattice,
{
fn max_by_key(&self) -> Collection<G, (K, V), isize> {
self.explode(|(key, value)| once((key, Max { value })))
.T::<K>()
.count_total()
.KV::<K, Max<V>>()
.map(|(key, max)| (key, max.value))
}
}
#[derive(Clone, Ord, PartialOrd, Eq, PartialEq, Debug)]
pub(crate) struct Max<T> {
value: T,
}
impl<T> Multiply<Present> for Max<T> {
type Output = Self;
fn multiply(self, rhs: &Present) -> Self::Output {
let _ = rhs;
self
}
}
impl<T> Semigroup for Max<T>
where
T: Ord + Clone + Debug + 'static,
{
fn plus_equals(&mut self, rhs: &Self) {
if self.value < rhs.value {
self.value = rhs.value.clone();
}
}
fn is_zero(&self) -> bool {
false
}
}
================================================
FILE: src/mend.rs
================================================
//! Fill back in some deleted releases that cause nontrivial number of
//! dependencies downstream to fail to resolve.
use crate::cratemap::CrateMap;
use cargo_tally::arena::Slice;
use cargo_tally::dependency::DependencyKind;
use cargo_tally::feature::{CrateFeature, DefaultFeatures, FeatureEnables, FeatureId};
use cargo_tally::id::{CrateId, DependencyId, VersionId};
use cargo_tally::{DbDump, Dependency, Release};
use std::collections::BTreeSet as Set;
pub(crate) fn mend_crates(crates: &mut CrateMap) {
let mut next_crate_id = CrateId(1);
for crate_name in [
"futures",
"git-version",
"lazy_static",
"partial-io",
"quickcheck",
"tokio-core",
"tokio-io",
"vela-utils",
"xcm",
"xcm-executor",
] {
if crates.id(crate_name).is_none() {
while crates.name(next_crate_id).is_some() {
next_crate_id.0 += 1;
}
crates.insert(next_crate_id, crate_name.to_owned());
}
}
}
pub(crate) fn mend_releases(db_dump: &mut DbDump, crates: &CrateMap) {
let mut used_version_ids = Set::new();
let mut used_version_numbers = Set::new();
for rel in &db_dump.releases {
used_version_ids.insert(rel.id);
used_version_numbers.insert((rel.crate_id, rel.num.clone()));
}
let mut used_dependency_ids = Set::new();
for dep in &db_dump.dependencies {
used_dependency_ids.insert(dep.id);
}
let mut next_version_id = VersionId(0);
let mut next_version_id = || {
while !used_version_ids.insert(next_version_id) {
next_version_id.0 += 1;
}
next_version_id
};
let mut next_dependency_id = DependencyId(0);
let mut next_dependency_id = || {
while !used_dependency_ids.insert(next_dependency_id) {
next_dependency_id.0 += 1;
}
next_dependency_id
};
let releases = &mut db_dump.releases;
let mut push_release = |rel: Release| {
assert!(used_version_numbers.insert((rel.crate_id, rel.num.clone())));
releases.push(rel);
};
{
let crate_id = crates.id("git-version").unwrap();
push_release(Release {
id: next_version_id(),
crate_id,
num: version!(0.1.0),
created_at: datetime!(18 Oct 2017 13:53:11),
features: Slice::EMPTY,
});
push_release(Release {
id: next_version_id(),
crate_id,
num: version!(0.1.1),
created_at: datetime!(18 Oct 2017 13:55:40),
features: Slice::EMPTY,
});
push_release(Release {
id: next_version_id(),
crate_id,
num: version!(0.1.2),
created_at: datetime!(18 Oct 2017 13:57:15),
features: Slice::EMPTY,
});
push_release(Release {
id: next_version_id(),
crate_id,
num: version!(0.2.0),
created_at: datetime!(5 Apr 2018 09:14:16),
features: Slice::EMPTY,
});
}
{
let crate_id = crates.id("partial-io").unwrap();
let features = Slice::new(&[FeatureEnables {
id: db_dump.features.id("tokio"),
enables: Slice::new(&[
CrateFeature {
crate_id,
feature_id: db_dump.features.id("tokio-io"),
},
CrateFeature {
crate_id,
feature_id: db_dump.features.id("futures"),
},
]),
weak_enables: Slice::new(&[]),
}]);
push_release({
let release = Release {
id: next_version_id(),
crate_id,
num: version!(0.1.0),
created_at: datetime!(26 May 2017 02:38:58),
features,
};
db_dump.dependencies.push(Dependency {
id: next_dependency_id(),
version_id: release.id,
crate_id: crates.id("futures").unwrap(),
req: version_req!(^0.1),
feature_id: db_dump.features.id("futures"),
default_features: DefaultFeatures(true),
features: Slice::EMPTY,
kind: DependencyKind::Normal,
});
db_dump.dependencies.push(Dependency {
id: next_dependency_id(),
version_id: release.id,
crate_id: crates.id("lazy_static").unwrap(),
req: version_req!(^0.2),
feature_id: FeatureId::CRATE,
default_features: DefaultFeatures(true),
features: Slice::EMPTY,
kind: DependencyKind::Dev,
});
db_dump.dependencies.push(Dependency {
id: next_dependency_id(),
version_id: release.id,
crate_id: crates.id("quickcheck").unwrap(),
req: version_req!(^0.4),
feature_id: db_dump.features.id("quickcheck"),
default_features: DefaultFeatures(true),
features: Slice::EMPTY,
kind: DependencyKind::Normal,
});
db_dump.dependencies.push(Dependency {
id: next_dependency_id(),
version_id: release.id,
crate_id: crates.id("quickcheck").unwrap(),
req: version_req!(^0.4),
feature_id: FeatureId::CRATE,
default_features: DefaultFeatures(true),
features: Slice::EMPTY,
kind: DependencyKind::Dev,
});
db_dump.dependencies.push(Dependency {
id: next_dependency_id(),
version_id: release.id,
crate_id: crates.id("tokio-core").unwrap(),
req: version_req!(^0.1),
feature_id: FeatureId::CRATE,
default_features: DefaultFeatures(true),
features: Slice::EMPTY,
kind: DependencyKind::Dev,
});
db_dump.dependencies.push(Dependency {
id: next_dependency_id(),
version_id: release.id,
crate_id: crates.id("tokio-io").unwrap(),
req: version_req!(^0.1),
feature_id: db_dump.features.id("tokio-io"),
default_features: DefaultFeatures(true),
features: Slice::EMPTY,
kind: DependencyKind::Normal,
});
release
});
push_release({
let release = Release {
id: next_version_id(),
crate_id,
num: version!(0.1.1),
created_at: datetime!(27 May 2017 00:56:37),
features,
};
db_dump.dependencies.push(Dependency {
id: next_dependency_id(),
version_id: release.id,
crate_id: crates.id("futures").unwrap(),
req: version_req!(^0.1),
feature_id: db_dump.features.id("futures"),
default_features: DefaultFeatures(true),
features: Slice::EMPTY,
kind: DependencyKind::Normal,
});
db_dump.dependencies.push(Dependency {
id: next_dependency_id(),
version_id: release.id,
crate_id: crates.id("lazy_static").unwrap(),
req: version_req!(^0.2),
feature_id: FeatureId::CRATE,
default_features: DefaultFeatures(true),
features: Slice::EMPTY,
kind: DependencyKind::Dev,
});
db_dump.dependencies.push(Dependency {
id: next_dependency_id(),
version_id: release.id,
crate_id: crates.id("quickcheck").unwrap(),
req: version_req!(^0.4),
feature_id: FeatureId::CRATE,
default_features: DefaultFeatures(true),
features: Slice::EMPTY,
kind: DependencyKind::Dev,
});
db_dump.dependencies.push(Dependency {
id: next_dependency_id(),
version_id: release.id,
crate_id: crates.id("quickcheck").unwrap(),
req: version_req!(^0.4),
feature_id: db_dump.features.id("quickcheck"),
default_features: DefaultFeatures(true),
features: Slice::EMPTY,
kind: DependencyKind::Normal,
});
db_dump.dependencies.push(Dependency {
id: next_dependency_id(),
version_id: release.id,
crate_id: crates.id("tokio-core").unwrap(),
req: version_req!(^0.1),
feature_id: FeatureId::CRATE,
default_features: DefaultFeatures(true),
features: Slice::EMPTY,
kind: DependencyKind::Dev,
});
db_dump.dependencies.push(Dependency {
id: next_dependency_id(),
version_id: release.id,
crate_id: crates.id("tokio-io").unwrap(),
req: version_req!(^0.1),
feature_id: db_dump.features.id("tokio-io"),
default_features: DefaultFeatures(true),
features: Slice::EMPTY,
kind: DependencyKind::Normal,
});
release
});
push_release({
let release = Release {
id: next_version_id(),
crate_id,
num: version!(0.2.0),
created_at: datetime!(30 May 2017 21:01:28),
features,
};
db_dump.dependencies.push(Dependency {
id: next_dependency_id(),
version_id: release.id,
crate_id: crates.id("futures").unwrap(),
req: version_req!(^0.1),
feature_id: db_dump.features.id("futures"),
default_features: DefaultFeatures(true),
features: Slice::EMPTY,
kind: DependencyKind::Normal,
});
db_dump.dependencies.push(Dependency {
id: next_dependency_id(),
version_id: release.id,
crate_id: crates.id("lazy_static").unwrap(),
req: version_req!(^0.2),
feature_id: FeatureId::CRATE,
default_features: DefaultFeatures(true),
features: Slice::EMPTY,
kind: DependencyKind::Dev,
});
db_dump.dependencies.push(Dependency {
id: next_dependency_id(),
version_id: release.id,
crate_id: crates.id("quickcheck").unwrap(),
req: version_req!(^0.4),
feature_id: FeatureId::CRATE,
default_features: DefaultFeatures(true),
features: Slice::EMPTY,
kind: DependencyKind::Dev,
});
db_dump.dependencies.push(Dependency {
id: next_dependency_id(),
version_id: release.id,
crate_id: crates.id("quickcheck").unwrap(),
req: version_req!(^0.4),
feature_id: db_dump.features.id("quickcheck"),
default_features: DefaultFeatures(true),
features: Slice::EMPTY,
kind: DependencyKind::Normal,
});
db_dump.dependencies.push(Dependency {
id: next_dependency_id(),
version_id: release.id,
crate_id: crates.id("tokio-core").unwrap(),
req: version_req!(^0.1),
feature_id: FeatureId::CRATE,
default_features: DefaultFeatures(true),
features: Slice::EMPTY,
kind: DependencyKind::Dev,
});
db_dump.dependencies.push(Dependency {
id: next_dependency_id(),
version_id: release.id,
crate_id: crates.id("tokio-io").unwrap(),
req: version_req!(^0.1),
feature_id: db_dump.features.id("tokio-io"),
default_features: DefaultFeatures(true),
features: Slice::EMPTY,
kind: DependencyKind::Normal,
});
release
});
push_release({
let release = Release {
id: next_version_id(),
crate_id,
num: version!(0.2.1),
created_at: datetime!(30 May 2017 21:47:41),
features,
};
db_dump.dependencies.push(Dependency {
id: next_dependency_id(),
version_id: release.id,
crate_id: crates.id("futures").unwrap(),
req: version_req!(^0.1),
feature_id: db_dump.features.id("futures"),
default_features: DefaultFeatures(true),
features: Slice::EMPTY,
kind: DependencyKind::Normal,
});
db_dump.dependencies.push(Dependency {
id: next_dependency_id(),
version_id: release.id,
crate_id: crates.id("lazy_static").unwrap(),
req: version_req!(^0.2),
feature_id: FeatureId::CRATE,
default_features: DefaultFeatures(true),
features: Slice::EMPTY,
kind: DependencyKind::Dev,
});
db_dump.dependencies.push(Dependency {
id: next_dependency_id(),
version_id: release.id,
crate_id: crates.id("quickcheck").unwrap(),
req: version_req!(^0.4),
feature_id: FeatureId::CRATE,
default_features: DefaultFeatures(true),
features: Slice::EMPTY,
kind: DependencyKind::Dev,
});
db_dump.dependencies.push(Dependency {
id: next_dependency_id(),
version_id: release.id,
crate_id: crates.id("quickcheck").unwrap(),
req: version_req!(^0.4),
feature_id: db_dump.features.id("quickcheck"),
default_features: DefaultFeatures(true),
features: Slice::EMPTY,
kind: DependencyKind::Normal,
});
db_dump.dependencies.push(Dependency {
id: next_dependency_id(),
version_id: release.id,
crate_id: crates.id("tokio-core").unwrap(),
req: version_req!(^0.1),
feature_id: FeatureId::CRATE,
default_features: DefaultFeatures(true),
features: Slice::EMPTY,
kind: DependencyKind::Dev,
});
db_dump.dependencies.push(Dependency {
id: next_dependency_id(),
version_id: release.id,
crate_id: crates.id("tokio-io").unwrap(),
req: version_req!(^0.1),
feature_id: db_dump.features.id("tokio-io"),
default_features: DefaultFeatures(true),
features: Slice::EMPTY,
kind: DependencyKind::Normal,
});
release
});
push_release({
let release = Release {
id: next_version_id(),
crate_id,
num: version!(0.2.2),
created_at: datetime!(12 Jun 2017 05:26:52),
features,
};
db_dump.dependencies.push(Dependency {
id: next_dependency_id(),
version_id: release.id,
crate_id: crates.id("futures").unwrap(),
req: version_req!(^0.1),
feature_id: db_dump.features.id("futures"),
default_features: DefaultFeatures(true),
features: Slice::EMPTY,
kind: DependencyKind::Normal,
});
db_dump.dependencies.push(Dependency {
id: next_dependency_id(),
version_id: release.id,
crate_id: crates.id("lazy_static").unwrap(),
req: version_req!(^0.2),
feature_id: FeatureId::CRATE,
default_features: DefaultFeatures(true),
features: Slice::EMPTY,
kind: DependencyKind::Dev,
});
db_dump.dependencies.push(Dependency {
id: next_dependency_id(),
version_id: release.id,
crate_id: crates.id("quickcheck").unwrap(),
req: version_req!(^0.4),
feature_id: FeatureId::CRATE,
default_features: DefaultFeatures(true),
features: Slice::EMPTY,
kind: DependencyKind::Dev,
});
db_dump.dependencies.push(Dependency {
id: next_dependency_id(),
version_id: release.id,
crate_id: crates.id("quickcheck").unwrap(),
req: version_req!(^0.4),
feature_id: db_dump.features.id("quickcheck"),
default_features: DefaultFeatures(true),
features: Slice::EMPTY,
kind: DependencyKind::Normal,
});
db_dump.dependencies.push(Dependency {
id: next_dependency_id(),
version_id: release.id,
crate_id: crates.id("tokio-core").unwrap(),
req: version_req!(^0.1),
feature_id: FeatureId::CRATE,
default_features: DefaultFeatures(true),
features: Slice::EMPTY,
kind: DependencyKind::Dev,
});
db_dump.dependencies.push(Dependency {
id: next_dependency_id(),
version_id: release.id,
crate_id: crates.id("tokio-io").unwrap(),
req: version_req!(^0.1),
feature_id: db_dump.features.id("tokio-io"),
default_features: DefaultFeatures(true),
features: Slice::EMPTY,
kind: DependencyKind::Normal,
});
release
});
push_release({
let release = Release {
id: next_version_id(),
crate_id,
num: version!(0.2.3),
created_at: datetime!(20 Jul 2017 20:01:22),
features,
};
db_dump.dependencies.push(Dependency {
id: next_dependency_id(),
version_id: release.id,
crate_id: crates.id("futures").unwrap(),
req: version_req!(^0.1),
feature_id: db_dump.features.id("futures"),
default_features: DefaultFeatures(true),
features: Slice::EMPTY,
kind: DependencyKind::Normal,
});
db_dump.dependencies.push(Dependency {
id: next_dependency_id(),
version_id: release.id,
crate_id: crates.id("lazy_static").unwrap(),
req: version_req!(^0.2),
feature_id: FeatureId::CRATE,
default_features: DefaultFeatures(true),
features: Slice::EMPTY,
kind: DependencyKind::Dev,
});
db_dump.dependencies.push(Dependency {
id: next_dependency_id(),
version_id: release.id,
crate_id: crates.id("quickcheck").unwrap(),
req: version_req!(^0.4),
feature_id: FeatureId::CRATE,
default_features: DefaultFeatures(true),
features: Slice::EMPTY,
kind: DependencyKind::Dev,
});
db_dump.dependencies.push(Dependency {
id: next_dependency_id(),
version_id: release.id,
crate_id: crates.id("quickcheck").unwrap(),
req: version_req!(^0.4),
feature_id: db_dump.features.id("quickcheck"),
default_features: DefaultFeatures(true),
features: Slice::EMPTY,
kind: DependencyKind::Normal,
});
db_dump.dependencies.push(Dependency {
id: next_dependency_id(),
version_id: release.id,
crate_id: crates.id("tokio-core").unwrap(),
req: version_req!(^0.1),
feature_id: FeatureId::CRATE,
default_features: DefaultFeatures(true),
features: Slice::EMPTY,
kind: DependencyKind::Dev,
});
db_dump.dependencies.push(Dependency {
id: next_dependency_id(),
version_id: release.id,
crate_id: crates.id("tokio-io").unwrap(),
req: version_req!(^0.1),
feature_id: db_dump.features.id("tokio-io"),
default_features: DefaultFeatures(true),
features: Slice::EMPTY,
kind: DependencyKind::Normal,
});
release
});
push_release({
let release = Release {
id: next_version_id(),
crate_id,
num: version!(0.2.4),
created_at: datetime!(19 Aug 2017 23:37:51),
features,
};
db_dump.dependencies.push(Dependency {
id: next_dependency_id(),
version_id: release.id,
crate_id: crates.id("futures").unwrap(),
req: version_req!(^0.1),
feature_id: db_dump.features.id("futures"),
default_features: DefaultFeatures(true),
features: Slice::EMPTY,
kind: DependencyKind::Normal,
});
db_dump.dependencies.push(Dependency {
id: next_dependency_id(),
version_id: release.id,
crate_id: crates.id("lazy_static").unwrap(),
req: version_req!(^0.2),
feature_id: FeatureId::CRATE,
default_features: DefaultFeatures(true),
features: Slice::EMPTY,
kind: DependencyKind::Dev,
});
db_dump.dependencies.push(Dependency {
id: next_dependency_id(),
version_id: release.id,
crate_id: crates.id("quickcheck").unwrap(),
req: version_req!(^0.4),
feature_id: FeatureId::CRATE,
default_features: DefaultFeatures(true),
features: Slice::EMPTY,
kind: DependencyKind::Dev,
});
db_dump.dependencies.push(Dependency {
id: next_dependency_id(),
version_id: release.id,
crate_id: crates.id("quickcheck").unwrap(),
req: version_req!(^0.4),
feature_id: db_dump.features.id("quickcheck"),
default_features: DefaultFeatures(true),
features: Slice::EMPTY,
kind: DependencyKind::Normal,
});
db_dump.dependencies.push(Dependency {
id: next_dependency_id(),
version_id: release.id,
crate_id: crates.id("tokio-core").unwrap(),
req: version_req!(^0.1),
feature_id: FeatureId::CRATE,
default_features: DefaultFeatures(true),
features: Slice::EMPTY,
kind: DependencyKind::Dev,
});
db_dump.dependencies.push(Dependency {
id: next_dependency_id(),
version_id: release.id,
crate_id: crates.id("tokio-io").unwrap(),
req: version_req!(^0.1),
feature_id: db_dump.features.id("tokio-io"),
default_features: DefaultFeatures(true),
features: Slice::EMPTY,
kind: DependencyKind::Normal,
});
release
});
push_release({
let release = Release {
id: next_version_id(),
crate_id,
num: version!(0.2.5),
created_at: datetime!(18 Nov 2017 02:26:25),
features,
};
db_dump.dependencies.push(Dependency {
id: next_dependency_id(),
version_id: release.id,
crate_id: crates.id("futures").unwrap(),
req: version_req!(^0.1),
feature_id: db_dump.features.id("futures"),
default_features: DefaultFeatures(true),
features: Slice::EMPTY,
kind: DependencyKind::Normal,
});
db_dump.dependencies.push(Dependency {
id: next_dependency_id(),
version_id: release.id,
crate_id: crates.id("lazy_static").unwrap(),
req: version_req!(^0.2),
feature_id: FeatureId::CRATE,
default_features: DefaultFeatures(true),
features: Slice::EMPTY,
kind: DependencyKind::Dev,
});
db_dump.dependencies.push(Dependency {
id: next_dependency_id(),
version_id: release.id,
crate_id: crates.id("quickcheck").unwrap(),
req: version_req!(^0.4),
feature_id: FeatureId::CRATE,
default_features: DefaultFeatures(true),
features: Slice::EMPTY,
kind: DependencyKind::Dev,
});
db_dump.dependencies.push(Dependency {
id: next_dependency_id(),
version_id: release.id,
crate_id: crates.id("quickcheck").unwrap(),
req: version_req!(^0.4),
feature_id: db_dump.features.id("quickcheck"),
default_features: DefaultFeatures(true),
features: Slice::EMPTY,
kind: DependencyKind::Normal,
});
db_dump.dependencies.push(Dependency {
id: next_dependency_id(),
version_id: release.id,
crate_id: crates.id("tokio-core").unwrap(),
req: version_req!(^0.1),
feature_id: FeatureId::CRATE,
default_features: DefaultFeatures(true),
features: Slice::EMPTY,
kind: DependencyKind::Dev,
});
db_dump.dependencies.push(Dependency {
id: next_dependency_id(),
version_id: release.id,
crate_id: crates.id("tokio-io").unwrap(),
req: version_req!(^0.1),
feature_id: db_dump.features.id("tokio-io"),
default_features: DefaultFeatures(true),
features: Slice::EMPTY,
kind: DependencyKind::Normal,
});
release
});
push_release({
let release = Release {
id: next_version_id(),
crate_id,
num: version!(0.3.0),
created_at: datetime!(12 Jan 2018 22:15:15),
features,
};
db_dump.dependencies.push(Dependency {
id: next_dependency_id(),
version_id: release.id,
crate_id: crates.id("futures").unwrap(),
req: version_req!(^0.1),
feature_id: db_dump.features.id("futures"),
default_features: DefaultFeatures(true),
features: Slice::EMPTY,
kind: DependencyKind::Normal,
});
db_dump.dependencies.push(Dependency {
id: next_dependency_id(),
version_id: release.id,
crate_id: crates.id("lazy_static").unwrap(),
req: version_req!(^1.0),
feature_id: FeatureId::CRATE,
default_features: DefaultFeatures(true),
features: Slice::EMPTY,
kind: DependencyKind::Dev,
});
db_dump.dependencies.push(Dependency {
id: next_dependency_id(),
version_id: release.id,
crate_id: crates.id("quickcheck").unwrap(),
req: version_req!(^0.6),
feature_id: FeatureId::CRATE,
default_features: DefaultFeatures(true),
features: Slice::EMPTY,
kind: DependencyKind::Dev,
});
db_dump.dependencies.push(Dependency {
id: next_dependency_id(),
version_id: release.id,
crate_id: crates.id("quickcheck").unwrap(),
req: version_req!(^0.6),
feature_id: db_dump.features.id("quickcheck"),
default_features: DefaultFeatures(true),
features: Slice::EMPTY,
kind: DependencyKind::Normal,
});
db_dump.dependencies.push(Dependency {
id: next_dependency_id(),
version_id: release.id,
crate_id: crates.id("tokio-core").unwrap(),
req: version_req!(^0.1),
feature_id: FeatureId::CRATE,
default_features: DefaultFeatures(true),
features: Slice::EMPTY,
kind: DependencyKind::Dev,
});
db_dump.dependencies.push(Dependency {
id: next_dependency_id(),
version_id: release.id,
crate_id: crates.id("tokio-io").unwrap(),
req: version_req!(^0.1),
feature_id: db_dump.features.id("tokio-io"),
default_features: DefaultFeatures(true),
features: Slice::EMPTY,
kind: DependencyKind::Normal,
});
release
});
}
{
let crate_id = crates.id("xcm").unwrap();
push_release(Release {
id: next_version_id(),
crate_id,
num: version!(0.0.0),
created_at: datetime!(9 Mar 2021 05:51:34),
features: Slice::EMPTY,
});
}
{
let crate_id = crates.id("xcm-executor").unwrap();
push_release(Release {
id: next_version_id(),
crate_id,
num: version!(0.0.0),
created_at: datetime!(9 Mar 2021 06:21:39),
features: Slice::EMPTY,
});
}
}
================================================
FILE: src/present.rs
================================================
use differential_dataflow::difference::{Multiply, Semigroup};
#[derive(Clone, Ord, PartialOrd, Eq, PartialEq, Debug)]
pub(crate) struct Present;
impl Semigroup for Present {
fn plus_equals(&mut self, rhs: &Present) {
let _ = rhs;
}
fn is_zero(&self) -> bool {
false
}
}
impl Multiply<Present> for Present {
type Output = Present;
fn multiply(self, rhs: &Present) -> Self::Output {
let _ = rhs;
Present
}
}
impl Multiply<Present> for isize {
type Output = isize;
fn multiply(self, rhs: &Present) -> Self::Output {
let _ = rhs;
self
}
}
impl Multiply<isize> for Present {
type Output = isize;
fn multiply(self, rhs: &isize) -> Self::Output {
*rhs
}
}
================================================
FILE: src/query.rs
================================================
use crate::cratemap::CrateMap;
use crate::user::UserQuery;
use anyhow::{bail, format_err, Error, Result};
use cargo_tally::arena::Slice;
use cargo_tally::id::QueryId;
use cargo_tally::version::VersionReq;
use cargo_tally::{Predicate, Query};
use ref_cast::RefCast;
use std::fmt::{self, Display};
use std::str::{FromStr, Split};
// for example &["serde:1.0", "anyhow:^1.0 + thiserror"]
pub fn parse<'a>(
queries: impl IntoIterator<Item = &'a str>,
crates: &CrateMap,
) -> Result<Vec<Query>> {
queries
.into_iter()
.enumerate()
.map(|(i, query)| {
let id = QueryId(u8::try_from(i).unwrap());
match parse_predicates(query, crates) {
Ok(predicates) => Ok(Query { id, predicates }),
Err(err) => bail!("failed to parse query {:?}: {}", query, err),
}
})
.collect()
}
fn parse_predicates(string: &str, crates: &CrateMap) -> Result<Slice<Predicate>> {
let mut predicates = Vec::new();
for predicate in IterPredicates::new(string, crates) {
let predicate = predicate?;
match predicate {
RawPredicate::Crate(predicate) => predicates.push(predicate),
RawPredicate::User(username) => {
let Some(user_id) = crates.users.get(username) else {
let kind = if username.is_team() { "team" } else { "user" };
bail!("no crates owned by {} @{}", kind, username);
};
predicates.extend(
crates
.owners
.get(user_id)
.map(Vec::as_slice)
.unwrap_or_default()
.iter()
.map(|&crate_id| Predicate {
crate_id,
req: None,
}),
);
}
}
}
Ok(Slice::new(&predicates))
}
pub fn format(query: &str, crates: &CrateMap) -> String {
DisplayQuery { query, crates }.to_string()
}
struct DisplayQuery<'a> {
query: &'a str,
crates: &'a CrateMap,
}
impl<'a> Display for DisplayQuery<'a> {
fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
for (i, predicate) in IterPredicates::new(self.query, self.crates).enumerate() {
if i > 0 {
formatter.write_str(" or ")?;
}
let predicate = predicate.unwrap();
match predicate {
RawPredicate::Crate(predicate) => {
let original_name = self.crates.name(predicate.crate_id).unwrap();
formatter.write_str(original_name)?;
if let Some(req) = predicate.req {
write!(formatter, ":{}", req)?;
}
}
RawPredicate::User(username) => {
let (username, _user_id) = self.crates.users.get_key_value(username).unwrap();
write!(formatter, "@{}", username)?;
}
}
}
Ok(())
}
}
enum RawPredicate<'a> {
Crate(Predicate),
User(&'a UserQuery),
}
struct IterPredicates<'a> {
split: Split<'a, char>,
crates: &'a CrateMap,
}
impl<'a> IterPredicates<'a> {
fn new(query: &'a str, crates: &'a CrateMap) -> Self {
IterPredicates {
split: query.split('+'),
crates,
}
}
}
impl<'a> Iterator for IterPredicates<'a> {
type Item = Result<RawPredicate<'a>>;
fn next(&mut self) -> Option<Self::Item> {
let predicate = self.split.next()?.trim();
if let Some(username) = predicate.strip_prefix('@') {
return Some(Ok(RawPredicate::User(UserQuery::ref_cast(username))));
}
let (name, req) = if let Some((name, req)) = predicate.split_once(':') {
match VersionReq::from_str(req) {
Ok(req) => (name, Some(req)),
Err(err) => return Some(Err(Error::new(err))),
}
} else {
(predicate, None)
};
let Some(crate_id) = self.crates.id(name) else {
return Some(Err(format_err!("no crate named {}", name)));
};
Some(Ok(RawPredicate::Crate(Predicate { crate_id, req })))
}
}
================================================
FILE: src/render.rs
================================================
use crate::total::Total;
use anyhow::Result;
use cargo_tally::matrix::Matrix;
use cargo_tally::timestamp::DateTime;
use std::cmp;
use std::env;
use std::fmt::{self, Display};
use std::fs;
use std::path::PathBuf;
pub(crate) fn graph(
title: Option<&str>,
transitive: bool,
results: &Matrix,
labels: &[String],
total: Option<&Total>,
) -> Result<PathBuf> {
let now = DateTime::now();
let relative = total.is_some();
let title = if let Some(title) = title {
title
} else if relative {
if transitive {
"fraction of crates.io depending transitively"
} else {
"fraction of crates.io depending directly"
}
} else {
if transitive {
"number of crates depending transitively"
} else {
"number of crates depending directly"
}
};
let mut data = String::new();
data += "[\n";
for (i, label) in labels.iter().enumerate() {
data += " {\"name\":\"";
data += label;
data += "\", \"values\":[\n";
let mut prev = None;
for (timestamp, row) in results {
let value = row[i];
if prev.is_none() {
if value == 0 {
continue;
}
let mut secs = timestamp.seconds();
if timestamp.subsec_nanos() == 0 {
secs = secs.saturating_sub(1);
}
let timestamp = DateTime::from_timestamp(secs, 0);
data += &Row(timestamp, 0, total).to_string();
} else if prev == Some(value) {
continue;
}
data += &Row(timestamp, value, total).to_string();
prev = Some(value);
}
let (timestamp, last) = results.iter().next_back().unwrap();
if timestamp < now {
data += &Row(now, last[i], total).to_string();
}
data += " ]},\n";
}
data += " ]";
let template = include_str!("index.html");
let mut preprocessor_context = minipre::Context::new();
preprocessor_context
.define("CARGO_TALLY_TITLE", format!("\"{}\"", title.escape_debug()))
.define("CARGO_TALLY_DATA", data)
.define("CARGO_TALLY_RELATIVE", (relative as usize).to_string());
let html = minipre::process_str(template, &mut preprocessor_context)?;
let dir = env::temp_dir().join("cargo-tally");
fs::create_dir_all(&dir)?;
let path = dir.join(format!("{}.html", now.millis()));
fs::write(&path, html)?;
Ok(path)
}
struct Row<'a>(DateTime, u32, Option<&'a Total>);
impl<'a> Display for Row<'a> {
fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
formatter.write_str(" {\"time\":")?;
write!(formatter, "{}", self.0.millis())?;
formatter.write_str(", \"edges\":")?;
if let Some(total) = self.2 {
let total = total.eval(self.0);
if total == 0 {
formatter.write_str("0")?;
} else if self.1 == total {
// Bump a 100% down to 50%. The only graph affected by this is
// `cargo tally --relative --transitive @alexcrichton` and while
// 50% is not an accurate datum, this hack makes that graph more
// readable by avoiding the y-axis getting extended all the way
// to 100% in the first day of crates.io's existence.
formatter.write_str("0.5")?;
} else {
let fraction = self.1 as f32 / total as f32;
write_truncated(formatter, fraction)?;
}
} else {
write!(formatter, "{}", self.1)?;
}
formatter.write_str("},\n")?;
Ok(())
}
}
fn write_truncated(formatter: &mut fmt::Formatter, fraction: f32) -> fmt::Result {
let mut repr = fraction.to_string();
let nonzero_digit = |ch: char| ch >= '1' && ch <= '9';
if let Some(first_nonzero) = repr.find(nonzero_digit) {
repr.truncate(cmp::min(first_nonzero + 4, repr.len()));
}
if let Some(last_nonzero) = repr.rfind(nonzero_digit) {
repr.truncate(last_nonzero + 1);
}
formatter.write_str(&repr)
}
================================================
FILE: src/stream.rs
================================================
macro_rules! stream {
($k:ty => $v:ty; $r:ty) => {
stream![($k, $v); $r]
};
($d:ty; $r:ty) => {
differential_dataflow::collection::Collection<
timely::dataflow::scopes::Child<
'a,
timely::worker::Worker<timely::communication::allocator::Process>,
crate::timestamp::DateTime,
>,
$d,
$r,
>
};
}
================================================
FILE: src/timestamp.rs
================================================
use chrono::{NaiveDate, NaiveDateTime, NaiveTime, TimeZone, Utc};
use differential_dataflow::lattice::Lattice;
use std::cmp;
use std::fmt::{self, Debug, Display};
use timely::order::{PartialOrder, TotalOrder};
use timely::progress::timestamp::{PathSummary, Refines, Timestamp};
#[derive(Copy, Clone, Ord, PartialOrd, Eq, PartialEq, Hash)]
#[repr(transparent)]
pub struct DateTime(chrono::DateTime<Utc>);
#[derive(Copy, Clone, Ord, PartialOrd, Eq, PartialEq)]
#[repr(transparent)]
pub struct Duration(chrono::Duration);
impl DateTime {
pub fn new(date: NaiveDate, time: NaiveTime) -> Self {
DateTime(Utc.from_utc_datetime(&NaiveDateTime::new(date, time)))
}
pub fn now() -> Self {
DateTime(Utc::now())
}
pub fn seconds(&self) -> i64 {
self.0.timestamp()
}
pub fn millis(&self) -> i64 {
self.0.timestamp_millis()
}
pub fn subsec_nanos(&self) -> u32 {
self.0.timestamp_subsec_nanos()
}
pub fn from_timestamp(secs: i64, nanos: u32) -> Self {
DateTime(chrono::DateTime::from_timestamp(secs, nanos).unwrap())
}
}
impl From<chrono::DateTime<Utc>> for DateTime {
fn from(date_time: chrono::DateTime<Utc>) -> Self {
DateTime(date_time)
}
}
impl Timestamp for DateTime {
type Summary = Duration;
fn minimum() -> Self {
Self::from_timestamp(0, 0)
}
}
impl Lattice for DateTime {
fn join(&self, other: &Self) -> Self {
cmp::max(*self, *other)
}
fn meet(&self, other: &Self) -> Self {
cmp::min(*self, *other)
}
}
impl PartialOrder for DateTime {
fn less_than(&self, other: &Self) -> bool {
self < other
}
fn less_equal(&self, other: &Self) -> bool {
self <= other
}
}
impl TotalOrder for DateTime {}
impl PathSummary<DateTime> for Duration {
fn results_in(&self, src: &DateTime) -> Option<DateTime> {
src.0.checked_add_signed(self.0).map(DateTime)
}
fn followed_by(&self, other: &Self) -> Option<Self> {
self.0.checked_add(&other.0).map(Duration)
}
}
impl Refines<()> for DateTime {
fn to_inner(_other: ()) -> Self {
Self::minimum()
}
#[allow(clippy::unused_unit)]
fn to_outer(self) -> () {}
#[allow(clippy::unused_unit)]
fn summarize(_path: <Self as Timestamp>::Summary) -> () {}
}
impl PartialOrder for Duration {
fn less_than(&self, other: &Self) -> bool {
self < other
}
fn less_equal(&self, other: &Self) -> bool {
self <= other
}
}
impl Default for DateTime {
fn default() -> Self {
Self::minimum()
}
}
impl Display for DateTime {
fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
Display::fmt(&self.0, formatter)
}
}
impl Debug for DateTime {
fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
Debug::fmt(&self.0, formatter)
}
}
impl Default for Duration {
fn default() -> Self {
Duration(chrono::Duration::nanoseconds(0))
}
}
impl Debug for Duration {
fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
Debug::fmt(&self.0, formatter)
}
}
================================================
FILE: src/total.rs
================================================
use cargo_tally::timestamp::DateTime;
use cargo_tally::Release;
use std::collections::BTreeSet as Set;
pub(crate) struct Total {
times: Vec<DateTime>,
}
impl Total {
pub(crate) fn index(releases: &[Release]) -> Self {
let mut crate_ids = Set::new();
let mut times = Vec::new();
for release in releases {
if crate_ids.insert(release.crate_id) {
times.push(release.created_at);
}
}
Total { times }
}
pub(crate) fn eval(&self, time: DateTime) -> u32 {
match self.times.binary_search(&time) {
Ok(i) => 1 + i as u32,
Err(i) => i as u32,
}
}
}
================================================
FILE: src/trace.rs
================================================
pub(crate) const VERBOSE: bool = false;
================================================
FILE: src/user.rs
================================================
use ref_cast::RefCast;
use std::borrow::Borrow;
use std::cmp::Ordering;
use std::fmt::{self, Display};
pub(crate) fn valid(name: &str) -> bool {
name.chars().all(|ch| {
(ch >= '0' && ch <= '9')
|| (ch >= 'A' && ch <= 'Z')
|| (ch >= 'a' && ch <= 'z')
|| ch == '-'
}) && !name.contains("--")
&& !name.starts_with('-')
&& !name.ends_with('-')
&& !name.is_empty()
&& name.len() <= 39
}
pub(crate) struct User(String);
impl User {
pub(crate) fn new(string: String) -> Self {
User(string)
}
}
impl Ord for User {
fn cmp(&self, rhs: &Self) -> Ordering {
UserQuery::ref_cast(&self.0).cmp(UserQuery::ref_cast(&rhs.0))
}
}
impl PartialOrd for User {
fn partial_cmp(&self, rhs: &Self) -> Option<Ordering> {
Some(self.cmp(rhs))
}
}
impl Eq for User {}
impl PartialEq for User {
fn eq(&self, rhs: &Self) -> bool {
UserQuery::ref_cast(&self.0).eq(UserQuery::ref_cast(&rhs.0))
}
}
impl Display for User {
fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
Display::fmt(&self.0, formatter)
}
}
#[derive(RefCast)]
#[repr(transparent)]
pub(crate) struct UserQuery(str);
impl UserQuery {
pub(crate) fn is_team(&self) -> bool {
self.0.contains('/')
}
}
impl Borrow<UserQuery> for User {
fn borrow(&self) -> &UserQuery {
UserQuery::ref_cast(&self.0)
}
}
impl Ord for UserQuery {
fn cmp(&self, rhs: &Self) -> Ordering {
self.0
.bytes()
.map(CaseAgnosticByte)
.cmp(rhs.0.bytes().map(CaseAgnosticByte))
}
}
impl PartialOrd for UserQuery {
fn partial_cmp(&self, rhs: &Self) -> Option<Ordering> {
Some(self.cmp(rhs))
}
}
impl Eq for UserQuery {}
impl PartialEq for UserQuery {
fn eq(&self, rhs: &Self) -> bool {
self.0
.bytes()
.map(CaseAgnosticByte)
.eq(rhs.0.bytes().map(CaseAgnosticByte))
}
}
impl Display for UserQuery {
fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
Display::fmt(&self.0, formatter)
}
}
struct CaseAgnosticByte(u8);
impl Ord for CaseAgnosticByte {
fn cmp(&self, rhs: &Self) -> Ordering {
self.0.to_ascii_lowercase().cmp(&rhs.0.to_ascii_lowercase())
}
}
impl PartialOrd for CaseAgnosticByte {
fn partial_cmp(&self, rhs: &Self) -> Option<Ordering> {
Some(self.cmp(rhs))
}
}
impl Eq for CaseAgnosticByte {}
impl PartialEq for CaseAgnosticByte {
fn eq(&self, rhs: &Self) -> bool {
self.cmp(rhs) == Ordering::Equal
}
}
================================================
FILE: src/version.rs
================================================
use crate::arena::Slice;
use semver::{Comparator, Op};
use std::cmp::Ordering;
use std::fmt::{self, Debug, Display};
use std::ops::{Deref, DerefMut};
use std::str::FromStr;
#[derive(Clone, Ord, PartialOrd, Eq, PartialEq, Hash)]
pub struct Version(pub semver::Version);
impl Version {
pub const fn new(major: u64, minor: u64, patch: u64) -> Self {
Version(semver::Version::new(major, minor, patch))
}
}
#[derive(Copy, Clone, Eq, PartialEq, Hash)]
pub struct VersionReq {
pub comparators: Slice<Comparator>,
}
impl VersionReq {
pub fn matches(&self, version: &Version) -> bool {
matches_req(self.comparators, version)
}
}
impl Deref for Version {
type Target = semver::Version;
fn deref(&self) -> &Self::Target {
&self.0
}
}
impl DerefMut for Version {
fn deref_mut(&mut self) -> &mut Self::Target {
&mut self.0
}
}
impl Display for Version {
fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
Display::fmt(&self.0, formatter)
}
}
impl Debug for Version {
fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
write!(formatter, "Version({})", self)
}
}
impl Ord for VersionReq {
fn cmp(&self, other: &Self) -> Ordering {
let mut lhs = self.comparators.iter_ref();
let mut rhs = other.comparators.iter_ref();
loop {
let Some(x) = lhs.next() else {
return if rhs.next().is_none() {
Ordering::Equal
} else {
Ordering::Less
};
};
let Some(y) = rhs.next() else {
return Ordering::Greater;
};
match (x.op as usize, x.major, x.minor, x.patch, &x.pre).cmp(&(
y.op as usize,
y.major,
y.minor,
y.patch,
&y.pre,
)) {
Ordering::Equal => (),
non_eq => return non_eq,
}
}
}
}
impl PartialOrd for VersionReq {
fn partial_cmp(&self, other: &Self) -> Option<Ordering> {
Some(self.cmp(other))
}
}
impl From<semver::VersionReq> for VersionReq {
fn from(req: semver::VersionReq) -> Self {
let comparators = Slice::new(&req.comparators);
VersionReq { comparators }
}
}
impl FromStr for VersionReq {
type Err = semver::Error;
fn from_str(string: &str) -> Result<Self, Self::Err> {
semver::VersionReq::from_str(string).map(VersionReq::from)
}
}
impl Display for VersionReq {
fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
if self.comparators.is_empty() {
return formatter.write_str("*");
}
for (i, comparator) in self.comparators.iter_ref().enumerate() {
if i > 0 {
formatter.write_str(", ")?;
}
write!(formatter, "{}", comparator)?;
}
Ok(())
}
}
impl Debug for VersionReq {
fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
write!(formatter, "VersionReq({})", self)
}
}
fn matches_req(comparators: Slice<Comparator>, ver: &Version) -> bool {
for cmp in comparators.iter_ref() {
if !matches_impl(cmp, ver) {
return false;
}
}
if ver.pre.is_empty() {
return true;
}
// If a version has a prerelease tag (for example, 1.2.3-alpha.3) then it
// will only be allowed to satisfy req if at least one comparator with the
// same major.minor.patch also has a prerelease tag.
for cmp in comparators.iter_ref() {
if pre_is_compatible(cmp, ver) {
return true;
}
}
false
}
fn matches_impl(cmp: &Comparator, ver: &Version) -> bool {
match cmp.op {
Op::Exact | Op::Wildcard => matches_exact(cmp, ver),
Op::Greater => matches_greater(cmp, ver),
Op::GreaterEq => matches_exact(cmp, ver) || matches_greater(cmp, ver),
Op::Less => matches_less(cmp, ver),
Op::LessEq => matches_exact(cmp, ver) || matches_less(cmp, ver),
Op::Tilde => matches_tilde(cmp, ver),
Op::Caret => matches_caret(cmp, ver),
_ => unimplemented!(),
}
}
fn matches_exact(cmp: &Comparator, ver: &Version) -> bool {
if ver.major != cmp.major {
return false;
}
if let Some(minor) = cmp.minor {
if ver.minor != minor {
return false;
}
}
if let Some(patch) = cmp.patch {
if ver.patch != patch {
return false;
}
}
ver.pre == cmp.pre
}
fn matches_greater(cmp: &Comparator, ver: &Version) -> bool {
if ver.major != cmp.major {
return ver.major > cmp.major;
}
match cmp.minor {
None => return false,
Some(minor) => {
if ver.minor != minor {
return ver.minor > minor;
}
}
}
match cmp.patch {
None => return false,
Some(patch) => {
if ver.patch != patch {
return ver.patch > patch;
}
}
}
ver.pre > cmp.pre
}
fn matches_less(cmp: &Comparator, ver: &Version) -> bool {
if ver.major != cmp.major {
return ver.major < cmp.major;
}
match cmp.minor {
None => return false,
Some(minor) => {
if ver.minor != minor {
return ver.minor < minor;
}
}
}
match cmp.patch {
None => return false,
Some(patch) => {
if ver.patch != patch {
return ver.patch < patch;
}
}
}
ver.pre < cmp.pre
}
fn matches_tilde(cmp: &Comparator, ver: &Version) -> bool {
if ver.major != cmp.major {
return false;
}
if let Some(minor) = cmp.minor {
if ver.minor != minor {
return false;
}
}
if let Some(patch) = cmp.patch {
if ver.patch != patch {
return ver.patch > patch;
}
}
ver.pre >= cmp.pre
}
fn matches_caret(cmp: &Comparator, ver: &Version) -> bool {
if ver.major != cmp.major {
return false;
}
let Some(minor) = cmp.minor else {
return true;
};
let Some(patch) = cmp.patch else {
return if cmp.major > 0 {
ver.minor >= minor
} else {
ver.minor == minor
};
};
if cmp.major > 0 {
if ver.minor != minor {
return ver.minor > minor;
} else if ver.patch != patch {
return ver.patch > patch;
}
} else if minor > 0 {
if ver.minor != minor {
return false;
} else if ver.patch != patch {
return ver.patch > patch;
}
} else if ver.minor != minor || ver.patch != patch {
return false;
}
ver.pre >= cmp.pre
}
fn pre_is_compatible(cmp: &Comparator, ver: &Version) -> bool {
cmp.major == ver.major
&& cmp.minor == Some(ver.minor)
&& cmp.patch == Some(ver.patch)
&& !cmp.pre.is_empty()
}
gitextract_nzl9eeax/
├── .github/
│ ├── FUNDING.yml
│ └── workflows/
│ ├── ci.yml
│ └── install.yml
├── .gitignore
├── Cargo.toml
├── LICENSE-APACHE
├── LICENSE-MIT
├── README.md
├── build.rs
└── src/
├── alloc.rs
├── arena.rs
├── args.rs
├── clean.rs
├── collect.rs
├── communication.rs
├── cratemap.rs
├── cratename.rs
├── dependency.rs
├── feature.rs
├── filter.rs
├── hidden.rs
├── hint.rs
├── id.rs
├── impls.rs
├── index.html
├── lib.rs
├── load.rs
├── log.rs
├── macros.rs
├── main.rs
├── matrix.rs
├── max.rs
├── mend.rs
├── present.rs
├── query.rs
├── render.rs
├── stream.rs
├── timestamp.rs
├── total.rs
├── trace.rs
├── user.rs
└── version.rs
SYMBOL INDEX (280 symbols across 29 files)
FILE: build.rs
constant CARGO_TALLY_MEMORY_LIMIT (line 6) | const CARGO_TALLY_MEMORY_LIMIT: &str = "CARGO_TALLY_MEMORY_LIMIT";
function main (line 8) | fn main() {
FILE: src/alloc.rs
type Allocator (line 7) | struct Allocator<A = System> {
constant LIMIT (line 25) | const LIMIT: Option<u64> = include!(concat!(env!("OUT_DIR"), "/limit.mem...
constant LIMIT (line 28) | const LIMIT: Option<u64> = include!(concat!(env!("OUT_DIR"), "\\limit.me...
method alloc (line 34) | unsafe fn alloc(&self, layout: Layout) -> *mut u8 {
method dealloc (line 54) | unsafe fn dealloc(&self, ptr: *mut u8, layout: Layout) {
method alloc_zeroed (line 61) | unsafe fn alloc_zeroed(&self, layout: Layout) -> *mut u8 {
method realloc (line 81) | unsafe fn realloc(&self, ptr: *mut u8, old_layout: Layout, new_size: usi...
type AllocStat (line 129) | pub(crate) struct AllocStat {
function stat (line 135) | pub(crate) fn stat() -> AllocStat {
method fmt (line 144) | fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
FILE: src/arena.rs
type Slice (line 12) | pub struct Slice<T: 'static> {
constant EMPTY (line 20) | pub const EMPTY: Self = Slice { contents: &[] };
function new (line 22) | pub fn new(slice: &[T]) -> Self
function from (line 29) | pub const fn from(contents: &'static [T]) -> Self {
function iter (line 33) | pub fn iter(&self) -> impl Iterator<Item = T>
function iter_ref (line 40) | pub fn iter_ref(&self) -> impl Iterator<Item = &'static T> {
function is_empty (line 44) | pub fn is_empty(&self) -> bool {
method clone (line 55) | fn clone(&self) -> Self {
function from_iter (line 64) | fn from_iter<I>(iter: I) -> Self
type Item (line 93) | type Item = T;
type IntoIter (line 94) | type IntoIter = Copied<Iter<'static, T>>;
method into_iter (line 96) | fn into_iter(self) -> Self::IntoIter {
method fmt (line 105) | fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
FILE: src/args.rs
type Opt (line 13) | pub(crate) struct Opt {
constant USAGE (line 23) | const USAGE: &str = "\
constant TEMPLATE (line 27) | const TEMPLATE: &str = "\
function app (line 38) | fn app(jobs_help: &String) -> Command {
constant DB (line 55) | const DB: &str = "db";
constant EXCLUDE (line 56) | const EXCLUDE: &str = "exclude";
constant JOBS (line 57) | const JOBS: &str = "jobs";
constant RELATIVE (line 58) | const RELATIVE: &str = "relative";
constant TITLE (line 59) | const TITLE: &str = "title";
constant TRANSITIVE (line 60) | const TRANSITIVE: &str = "transitive";
constant QUERIES (line 61) | const QUERIES: &str = "queries";
function parse (line 63) | pub(crate) fn parse() -> Opt {
function arg_db (line 123) | fn arg_db() -> Arg {
function arg_exclude (line 133) | fn arg_exclude() -> Arg {
function arg_jobs (line 143) | fn arg_jobs(help: &String) -> Arg {
function arg_relative (line 153) | fn arg_relative() -> Arg {
function arg_title (line 160) | fn arg_title() -> Arg {
function arg_transitive (line 170) | fn arg_transitive() -> Arg {
function arg_queries (line 177) | fn arg_queries() -> Arg {
type Error (line 188) | enum Error {
function validate_query (line 197) | fn validate_query(string: &str) -> Result<String, Error> {
function test_cli (line 228) | fn test_cli() {
FILE: src/clean.rs
function clean (line 10) | pub(crate) fn clean(db_dump: &mut DbDump, crates: &CrateMap) {
FILE: src/collect.rs
type Collect (line 8) | pub(crate) trait Collect<T> {
method collect_into (line 9) | fn collect_into(&self, result: &Emitter<T>);
type ResultCollection (line 12) | pub(crate) struct ResultCollection<T> {
type Emitter (line 16) | pub(crate) struct Emitter<T> {
function new (line 21) | pub(crate) fn new() -> Self {
function emitter (line 26) | pub(crate) fn emitter(&self) -> Emitter<T> {
function sort (line 36) | pub(crate) fn sort(&self) {
function collect_into (line 55) | fn collect_into(&self, result: &Emitter<(D, G::Timestamp, R)>) {
type Item (line 66) | type Item = T;
type IntoIter (line 67) | type IntoIter = <Vec<T> as IntoIterator>::IntoIter;
method into_iter (line 69) | fn into_iter(self) -> Self::IntoIter {
FILE: src/cratemap.rs
type CrateMap (line 9) | pub struct CrateMap {
method new (line 17) | pub fn new() -> Self {
method insert (line 21) | pub fn insert(&mut self, id: CrateId, name: String) {
method name (line 28) | pub fn name(&self, id: CrateId) -> Option<&str> {
method id (line 32) | pub fn id(&self, name: &str) -> Option<CrateId> {
FILE: src/cratename.rs
constant MAX_NAME_LENGTH (line 5) | pub const MAX_NAME_LENGTH: usize = 64;
function valid (line 8) | pub fn valid(name: &str) -> bool {
function valid_ident (line 13) | fn valid_ident(name: &str) -> bool {
function valid_feature_prefix (line 17) | fn valid_feature_prefix(name: &str) -> bool {
type CrateName (line 24) | pub(crate) struct CrateName(String);
method new (line 27) | pub(crate) fn new(string: String) -> Self {
method borrow (line 57) | fn borrow(&self) -> &CrateNameQuery {
method cmp (line 33) | fn cmp(&self, rhs: &Self) -> Ordering {
method partial_cmp (line 39) | fn partial_cmp(&self, rhs: &Self) -> Option<Ordering> {
method eq (line 47) | fn eq(&self, rhs: &Self) -> bool {
type CrateNameQuery (line 54) | pub(crate) struct CrateNameQuery(str);
method cmp (line 63) | fn cmp(&self, rhs: &Self) -> Ordering {
method partial_cmp (line 72) | fn partial_cmp(&self, rhs: &Self) -> Option<Ordering> {
method eq (line 80) | fn eq(&self, rhs: &Self) -> bool {
type SeparatorAgnosticByte (line 88) | struct SeparatorAgnosticByte(u8);
method cmp (line 91) | fn cmp(&self, rhs: &Self) -> Ordering {
method partial_cmp (line 99) | fn partial_cmp(&self, rhs: &Self) -> Option<Ordering> {
method eq (line 107) | fn eq(&self, rhs: &Self) -> bool {
FILE: src/dependency.rs
type DependencyKind (line 2) | pub enum DependencyKind {
method from (line 9) | fn from(dependency_kind: db_dump::dependencies::DependencyKind) -> Self {
FILE: src/feature.rs
type FeatureId (line 7) | pub struct FeatureId(pub u32);
constant CRATE (line 10) | pub const CRATE: Self = FeatureId(0);
constant DEFAULT (line 11) | pub const DEFAULT: Self = FeatureId(1);
constant TBD (line 12) | pub const TBD: Self = FeatureId(!0);
type FeatureEnables (line 16) | pub struct FeatureEnables {
type CrateFeature (line 23) | pub struct CrateFeature {
type VersionFeature (line 29) | pub struct VersionFeature {
type DefaultFeatures (line 35) | pub struct DefaultFeatures(pub bool);
type FeatureNames (line 37) | pub struct FeatureNames {
method new (line 43) | pub fn new() -> Self {
method id (line 53) | pub fn id(&mut self, name: &str) -> FeatureId {
method name (line 64) | pub fn name(&self, id: FeatureId) -> &str {
method default (line 70) | fn default() -> Self {
type FeatureIter (line 75) | pub struct FeatureIter {
method new (line 82) | pub fn new(default_features: DefaultFeatures, features: Slice<FeatureI...
type Item (line 92) | type Item = FeatureId;
method next (line 94) | fn next(&mut self) -> Option<Self::Item> {
FILE: src/filter.rs
function filter (line 5) | pub(crate) fn filter(db_dump: &mut DbDump, crates: &CrateMap, exclude: &...
FILE: src/hint.rs
type TypeHint (line 6) | pub(crate) trait TypeHint: Sized {
method T (line 9) | fn T<D>(self) -> Self
method KV (line 16) | fn KV<K, V>(self) -> Self
type Element (line 29) | type Element = D;
type Element (line 37) | type Element = D;
FILE: src/id.rs
type QueryId (line 3) | pub struct QueryId(pub u8);
type CrateId (line 7) | pub struct CrateId(pub u32);
method from (line 18) | fn from(id: db_dump::crates::CrateId) -> Self {
type VersionId (line 11) | pub struct VersionId(pub u32);
method from (line 24) | fn from(id: db_dump::versions::VersionId) -> Self {
type DependencyId (line 15) | pub struct DependencyId(pub u32);
method from (line 30) | fn from(id: u32) -> Self {
FILE: src/impls.rs
method cmp (line 5) | fn cmp(&self, other: &Self) -> Ordering {
method partial_cmp (line 11) | fn partial_cmp(&self, other: &Self) -> Option<Ordering> {
method eq (line 19) | fn eq(&self, other: &Self) -> bool {
method cmp (line 25) | fn cmp(&self, other: &Self) -> Ordering {
method partial_cmp (line 31) | fn partial_cmp(&self, other: &Self) -> Option<Ordering> {
method eq (line 39) | fn eq(&self, other: &Self) -> bool {
method cmp (line 45) | fn cmp(&self, other: &Self) -> Ordering {
method partial_cmp (line 51) | fn partial_cmp(&self, other: &Self) -> Option<Ordering> {
method eq (line 59) | fn eq(&self, other: &Self) -> bool {
FILE: src/lib.rs
type DbDump (line 79) | pub struct DbDump {
type Release (line 86) | pub struct Release {
type Dependency (line 95) | pub struct Dependency {
type Query (line 107) | pub struct Query {
type Predicate (line 113) | pub struct Predicate {
type Input (line 119) | struct Input {
function run (line 124) | pub fn run(db_dump: DbDump, jobs: usize, transitive: bool, queries: &[Qu...
function set_timely_worker_log (line 201) | fn set_timely_worker_log(worker: &Worker<Process>) {
function dataflow (line 221) | fn dataflow(
FILE: src/load.rs
function load (line 19) | pub(crate) fn load(path: impl AsRef<Path>) -> Result<(DbDump, CrateMap)> {
FILE: src/log.rs
type Log (line 5) | pub trait Log {
method trace (line 6) | fn trace(&mut self) -> LogStream;
method warning (line 7) | fn warning(&mut self) -> LogStream;
method error (line 8) | fn error(&mut self) -> LogStream;
method red (line 9) | fn red(&mut self) -> LogStream;
method trace (line 13) | fn trace(&mut self) -> LogStream {
method warning (line 20) | fn warning(&mut self) -> LogStream {
method error (line 27) | fn error(&mut self) -> LogStream {
method red (line 37) | fn red(&mut self) -> LogStream {
type LogStream (line 45) | pub struct LogStream<'a>(&'a mut StandardStream);
function write_fmt (line 48) | pub fn write_fmt(&mut self, args: fmt::Arguments) {
method drop (line 54) | fn drop(&mut self) {
FILE: src/main.rs
function main (line 57) | fn main() {
function try_main (line 65) | fn try_main(stderr: &mut StandardStream) -> Result<()> {
FILE: src/matrix.rs
type Matrix (line 8) | pub struct Matrix {
method new (line 18) | pub(crate) fn new(queries: usize) -> Self {
method width (line 25) | pub fn width(&self) -> usize {
method is_empty (line 29) | pub fn is_empty(&self) -> bool {
method len (line 33) | pub fn len(&self) -> usize {
method iter (line 37) | pub fn iter(&self) -> Iter {
method push (line 41) | pub(crate) fn push(&mut self, timestamp: DateTime, data: Vec<u32>) {
type Row (line 15) | pub struct Row([u32]);
type Output (line 76) | type Output = u32;
method index (line 78) | fn index(&self, i: usize) -> &Self::Output {
type Item (line 47) | type Item = (DateTime, &'a Row);
type IntoIter (line 48) | type IntoIter = Iter<'a>;
method into_iter (line 50) | fn into_iter(self) -> Self::IntoIter {
type Iter (line 55) | pub struct Iter<'a>(slice::Iter<'a, (DateTime, Vec<u32>)>);
type Item (line 58) | type Item = (DateTime, &'a Row);
method next (line 60) | fn next(&mut self) -> Option<Self::Item> {
method next_back (line 68) | fn next_back(&mut self) -> Option<Self::Item> {
type Item (line 84) | type Item = u32;
type IntoIter (line 85) | type IntoIter = Copied<slice::Iter<'a, u32>>;
method into_iter (line 87) | fn into_iter(self) -> Self::IntoIter {
type Target (line 93) | type Target = [u32];
method deref (line 95) | fn deref(&self) -> &Self::Target {
type RelativeRow (line 100) | pub struct RelativeRow<'a> {
type Output (line 106) | type Output = RelativeRow<'a>;
function div (line 108) | fn div(self, rhs: u32) -> Self::Output {
method fmt (line 117) | fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
method fmt (line 123) | fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
FILE: src/max.rs
type MaxByKey (line 14) | pub(crate) trait MaxByKey<G, K, V, R>
method max_by_key (line 18) | fn max_by_key(&self) -> Collection<G, (K, V), isize>;
function max_by_key (line 30) | fn max_by_key(&self) -> Collection<G, (K, V), isize> {
type Max (line 40) | pub(crate) struct Max<T> {
type Output (line 45) | type Output = Self;
function multiply (line 47) | fn multiply(self, rhs: &Present) -> Self::Output {
method plus_equals (line 57) | fn plus_equals(&mut self, rhs: &Self) {
method is_zero (line 63) | fn is_zero(&self) -> bool {
FILE: src/mend.rs
function mend_crates (line 12) | pub(crate) fn mend_crates(crates: &mut CrateMap) {
function mend_releases (line 36) | pub(crate) fn mend_releases(db_dump: &mut DbDump, crates: &CrateMap) {
FILE: src/present.rs
type Present (line 4) | pub(crate) struct Present;
type Output (line 17) | type Output = Present;
method multiply (line 19) | fn multiply(self, rhs: &Present) -> Self::Output {
type Output (line 35) | type Output = isize;
method multiply (line 37) | fn multiply(self, rhs: &isize) -> Self::Output {
method plus_equals (line 7) | fn plus_equals(&mut self, rhs: &Present) {
method is_zero (line 11) | fn is_zero(&self) -> bool {
type Output (line 26) | type Output = isize;
function multiply (line 28) | fn multiply(self, rhs: &Present) -> Self::Output {
FILE: src/query.rs
function parse (line 13) | pub fn parse<'a>(
function parse_predicates (line 30) | fn parse_predicates(string: &str, crates: &CrateMap) -> Result<Slice<Pre...
function format (line 61) | pub fn format(query: &str, crates: &CrateMap) -> String {
type DisplayQuery (line 65) | struct DisplayQuery<'a> {
method fmt (line 71) | fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
type RawPredicate (line 96) | enum RawPredicate<'a> {
type IterPredicates (line 101) | struct IterPredicates<'a> {
function new (line 107) | fn new(query: &'a str, crates: &'a CrateMap) -> Self {
type Item (line 116) | type Item = Result<RawPredicate<'a>>;
method next (line 118) | fn next(&mut self) -> Option<Self::Item> {
FILE: src/render.rs
function graph (line 11) | pub(crate) fn graph(
type Row (line 85) | struct Row<'a>(DateTime, u32, Option<&'a Total>);
method fmt (line 88) | fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
function write_truncated (line 115) | fn write_truncated(formatter: &mut fmt::Formatter, fraction: f32) -> fmt...
FILE: src/timestamp.rs
type DateTime (line 10) | pub struct DateTime(chrono::DateTime<Utc>);
method new (line 17) | pub fn new(date: NaiveDate, time: NaiveTime) -> Self {
method now (line 21) | pub fn now() -> Self {
method seconds (line 25) | pub fn seconds(&self) -> i64 {
method millis (line 29) | pub fn millis(&self) -> i64 {
method subsec_nanos (line 33) | pub fn subsec_nanos(&self) -> u32 {
method from_timestamp (line 37) | pub fn from_timestamp(secs: i64, nanos: u32) -> Self {
method from (line 43) | fn from(date_time: chrono::DateTime<Utc>) -> Self {
method to_inner (line 89) | fn to_inner(_other: ()) -> Self {
method to_outer (line 94) | fn to_outer(self) -> () {}
method summarize (line 97) | fn summarize(_path: <Self as Timestamp>::Summary) -> () {}
type Duration (line 14) | pub struct Duration(chrono::Duration);
method results_in (line 79) | fn results_in(&self, src: &DateTime) -> Option<DateTime> {
method followed_by (line 83) | fn followed_by(&self, other: &Self) -> Option<Self> {
type Summary (line 49) | type Summary = Duration;
method minimum (line 51) | fn minimum() -> Self {
method join (line 57) | fn join(&self, other: &Self) -> Self {
method meet (line 61) | fn meet(&self, other: &Self) -> Self {
method less_than (line 67) | fn less_than(&self, other: &Self) -> bool {
method less_equal (line 71) | fn less_equal(&self, other: &Self) -> bool {
method less_than (line 101) | fn less_than(&self, other: &Self) -> bool {
method less_equal (line 105) | fn less_equal(&self, other: &Self) -> bool {
method default (line 111) | fn default() -> Self {
method fmt (line 117) | fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
method fmt (line 123) | fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
method default (line 129) | fn default() -> Self {
method fmt (line 135) | fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
FILE: src/total.rs
type Total (line 5) | pub(crate) struct Total {
method index (line 10) | pub(crate) fn index(releases: &[Release]) -> Self {
method eval (line 21) | pub(crate) fn eval(&self, time: DateTime) -> u32 {
FILE: src/trace.rs
constant VERBOSE (line 1) | pub(crate) const VERBOSE: bool = false;
FILE: src/user.rs
function valid (line 6) | pub(crate) fn valid(name: &str) -> bool {
type User (line 19) | pub(crate) struct User(String);
method new (line 22) | pub(crate) fn new(string: String) -> Self {
method borrow (line 64) | fn borrow(&self) -> &UserQuery {
method cmp (line 28) | fn cmp(&self, rhs: &Self) -> Ordering {
method partial_cmp (line 34) | fn partial_cmp(&self, rhs: &Self) -> Option<Ordering> {
method eq (line 42) | fn eq(&self, rhs: &Self) -> bool {
method fmt (line 48) | fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
type UserQuery (line 55) | pub(crate) struct UserQuery(str);
method is_team (line 58) | pub(crate) fn is_team(&self) -> bool {
method cmp (line 70) | fn cmp(&self, rhs: &Self) -> Ordering {
method partial_cmp (line 79) | fn partial_cmp(&self, rhs: &Self) -> Option<Ordering> {
method eq (line 87) | fn eq(&self, rhs: &Self) -> bool {
method fmt (line 96) | fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
type CaseAgnosticByte (line 101) | struct CaseAgnosticByte(u8);
method cmp (line 104) | fn cmp(&self, rhs: &Self) -> Ordering {
method partial_cmp (line 110) | fn partial_cmp(&self, rhs: &Self) -> Option<Ordering> {
method eq (line 118) | fn eq(&self, rhs: &Self) -> bool {
FILE: src/version.rs
type Version (line 9) | pub struct Version(pub semver::Version);
method new (line 12) | pub const fn new(major: u64, minor: u64, patch: u64) -> Self {
type VersionReq (line 18) | pub struct VersionReq {
method matches (line 23) | pub fn matches(&self, version: &Version) -> bool {
method from (line 93) | fn from(req: semver::VersionReq) -> Self {
type Target (line 29) | type Target = semver::Version;
method deref (line 31) | fn deref(&self) -> &Self::Target {
method deref_mut (line 37) | fn deref_mut(&mut self) -> &mut Self::Target {
method fmt (line 43) | fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
method fmt (line 49) | fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
method cmp (line 55) | fn cmp(&self, other: &Self) -> Ordering {
method partial_cmp (line 87) | fn partial_cmp(&self, other: &Self) -> Option<Ordering> {
type Err (line 100) | type Err = semver::Error;
method from_str (line 102) | fn from_str(string: &str) -> Result<Self, Self::Err> {
method fmt (line 108) | fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
method fmt (line 123) | fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
function matches_req (line 128) | fn matches_req(comparators: Slice<Comparator>, ver: &Version) -> bool {
function matches_impl (line 151) | fn matches_impl(cmp: &Comparator, ver: &Version) -> bool {
function matches_exact (line 164) | fn matches_exact(cmp: &Comparator, ver: &Version) -> bool {
function matches_greater (line 184) | fn matches_greater(cmp: &Comparator, ver: &Version) -> bool {
function matches_less (line 210) | fn matches_less(cmp: &Comparator, ver: &Version) -> bool {
function matches_tilde (line 236) | fn matches_tilde(cmp: &Comparator, ver: &Version) -> bool {
function matches_caret (line 256) | fn matches_caret(cmp: &Comparator, ver: &Version) -> bool {
function pre_is_compatible (line 292) | fn pre_is_compatible(cmp: &Comparator, ver: &Version) -> bool {
Condensed preview — 42 files, each showing path, character count, and a content snippet. Download the .json file or copy for the full structured content (161K chars).
[
{
"path": ".github/FUNDING.yml",
"chars": 16,
"preview": "github: dtolnay\n"
},
{
"path": ".github/workflows/ci.yml",
"chars": 1858,
"preview": "name: CI\n\non:\n push:\n pull_request:\n workflow_dispatch:\n schedule: [cron: \"40 1 * * *\"]\n\npermissions:\n contents: re"
},
{
"path": ".github/workflows/install.yml",
"chars": 282,
"preview": "name: Install\n\non:\n workflow_dispatch:\n schedule: [cron: \"40 1 * * *\"]\n push: {tags: ['*']}\n\npermissions: {}\n\nenv:\n "
},
{
"path": ".gitignore",
"chars": 60,
"preview": "/*.tar.gz\n/Cargo.lock\n/dataflow-graph/\n/report.txt\n/target/\n"
},
{
"path": "Cargo.toml",
"chars": 1247,
"preview": "[package]\nname = \"cargo-tally\"\nversion = \"1.0.73\"\nauthors = [\"David Tolnay <dtolnay@gmail.com>\"]\ncategories = [\"developm"
},
{
"path": "LICENSE-APACHE",
"chars": 9723,
"preview": " Apache License\n Version 2.0, January 2004\n http"
},
{
"path": "LICENSE-MIT",
"chars": 1023,
"preview": "Permission is hereby granted, free of charge, to any\nperson obtaining a copy of this software and associated\ndocumentati"
},
{
"path": "README.md",
"chars": 4872,
"preview": "# Cargo tally\n\n<img alt=\"Number of crates that depend directly on each regex version\" src=\"https://user-images.githubuse"
},
{
"path": "build.rs",
"chars": 1165,
"preview": "use std::env;\nuse std::fs;\nuse std::path::Path;\nuse std::process;\n\nconst CARGO_TALLY_MEMORY_LIMIT: &str = \"CARGO_TALLY_M"
},
{
"path": "src/alloc.rs",
"chars": 4524,
"preview": "use bytesize::ByteSize;\nuse std::alloc::{self, GlobalAlloc, Layout, System};\nuse std::fmt::{self, Display};\nuse std::ptr"
},
{
"path": "src/arena.rs",
"chars": 2325,
"preview": "use foldhash::HashMap;\nuse std::any::TypeId;\nuse std::fmt::{self, Debug};\nuse std::iter::Copied;\nuse std::ptr;\nuse std::"
},
{
"path": "src/args.rs",
"chars": 5798,
"preview": "use crate::{cratename, user};\nuse clap::builder::{ArgAction, ValueParser};\nuse clap::{Arg, Command};\nuse regex::Regex;\nu"
},
{
"path": "src/clean.rs",
"chars": 3781,
"preview": "use crate::cratemap::CrateMap;\nuse cargo_tally::arena::Slice;\nuse cargo_tally::id::{CrateId, VersionId};\nuse cargo_tally"
},
{
"path": "src/collect.rs",
"chars": 1825,
"preview": "use differential_dataflow::collection::Collection;\nuse differential_dataflow::difference::Semigroup;\nuse std::mem;\nuse s"
},
{
"path": "src/communication.rs",
"chars": 2600,
"preview": "// As far as I can tell, timely dataflow uses abomonation only for interprocess\n// communication. Within a single proces"
},
{
"path": "src/cratemap.rs",
"chars": 1001,
"preview": "use crate::cratename::{CrateName, CrateNameQuery};\nuse crate::user::User;\nuse cargo_tally::id::CrateId;\nuse db_dump::cra"
},
{
"path": "src/cratename.rs",
"chars": 2721,
"preview": "use ref_cast::RefCast;\nuse std::borrow::Borrow;\nuse std::cmp::Ordering;\n\npub const MAX_NAME_LENGTH: usize = 64;\n\n// Mirr"
},
{
"path": "src/dependency.rs",
"chars": 536,
"preview": "#[derive(Copy, Clone, Debug)]\npub enum DependencyKind {\n Normal,\n Build,\n Dev,\n}\n\nimpl From<db_dump::dependenci"
},
{
"path": "src/feature.rs",
"chars": 2638,
"preview": "use crate::arena::Slice;\nuse crate::id::{CrateId, VersionId};\nuse std::collections::BTreeMap as Map;\n\n#[derive(Copy, Clo"
},
{
"path": "src/filter.rs",
"chars": 383,
"preview": "use crate::cratemap::CrateMap;\nuse cargo_tally::DbDump;\nuse regex::Regex;\n\npub(crate) fn filter(db_dump: &mut DbDump, cr"
},
{
"path": "src/hidden.rs",
"chars": 159,
"preview": "// There is no library public API. Only the command line tool is considered\n// public API.\n\n#[path = \"lib.rs\"]\nmod lib;\n"
},
{
"path": "src/hint.rs",
"chars": 643,
"preview": "use differential_dataflow::collection::Collection;\nuse differential_dataflow::difference::Semigroup;\nuse timely::dataflo"
},
{
"path": "src/id.rs",
"chars": 858,
"preview": "#[derive(Copy, Clone, Ord, PartialOrd, Eq, PartialEq, Hash, Debug)]\n#[repr(transparent)]\npub struct QueryId(pub u8);\n\n#["
},
{
"path": "src/impls.rs",
"chars": 1182,
"preview": "use crate::{Dependency, Query, Release};\nuse std::cmp::Ordering;\n\nimpl Ord for Query {\n fn cmp(&self, other: &Self) -"
},
{
"path": "src/index.html",
"chars": 6569,
"preview": "<!DOCTYPE html>\n<html>\n<head>\n <meta charset=\"utf-8\" />\n <script src=\"https://d3js.org/d3.v7.min.js\"></script>\n <styl"
},
{
"path": "src/lib.rs",
"chars": 19430,
"preview": "#![deny(unsafe_op_in_unsafe_fn)]\n#![allow(non_camel_case_types)]\n#![allow(\n clippy::arc_with_non_send_sync, // https:"
},
{
"path": "src/load.rs",
"chars": 8520,
"preview": "use crate::cratemap::CrateMap;\nuse crate::user::User;\nuse anyhow::{bail, Result};\nuse cargo_tally::arena::Slice;\nuse car"
},
{
"path": "src/log.rs",
"chars": 1493,
"preview": "use std::fmt;\nuse std::io::Write;\nuse termcolor::{Color, ColorSpec, StandardStream, WriteColor};\n\npub trait Log {\n fn"
},
{
"path": "src/macros.rs",
"chars": 2607,
"preview": "macro_rules! const_assert_eq {\n ($left:expr, $right:expr) => {\n const _: [(); $left as usize] = [(); $right as"
},
{
"path": "src/main.rs",
"chars": 4834,
"preview": "#![deny(unsafe_op_in_unsafe_fn)]\n#![allow(non_upper_case_globals)]\n#![allow(\n clippy::cast_lossless,\n clippy::cast"
},
{
"path": "src/matrix.rs",
"chars": 2673,
"preview": "use crate::timestamp::DateTime;\nuse ref_cast::RefCast;\nuse std::fmt::{self, Debug};\nuse std::iter::Copied;\nuse std::ops:"
},
{
"path": "src/max.rs",
"chars": 1610,
"preview": "use crate::hint::TypeHint;\nuse crate::present::Present;\nuse differential_dataflow::collection::Collection;\nuse different"
},
{
"path": "src/mend.rs",
"chars": 30948,
"preview": "//! Fill back in some deleted releases that cause nontrivial number of\n//! dependencies downstream to fail to resolve.\n\n"
},
{
"path": "src/present.rs",
"chars": 766,
"preview": "use differential_dataflow::difference::{Multiply, Semigroup};\n\n#[derive(Clone, Ord, PartialOrd, Eq, PartialEq, Debug)]\np"
},
{
"path": "src/query.rs",
"chars": 4353,
"preview": "use crate::cratemap::CrateMap;\nuse crate::user::UserQuery;\nuse anyhow::{bail, format_err, Error, Result};\nuse cargo_tall"
},
{
"path": "src/render.rs",
"chars": 4257,
"preview": "use crate::total::Total;\nuse anyhow::Result;\nuse cargo_tally::matrix::Matrix;\nuse cargo_tally::timestamp::DateTime;\nuse "
},
{
"path": "src/stream.rs",
"chars": 429,
"preview": "macro_rules! stream {\n ($k:ty => $v:ty; $r:ty) => {\n stream![($k, $v); $r]\n };\n ($d:ty; $r:ty) => {\n "
},
{
"path": "src/timestamp.rs",
"chars": 3170,
"preview": "use chrono::{NaiveDate, NaiveDateTime, NaiveTime, TimeZone, Utc};\nuse differential_dataflow::lattice::Lattice;\nuse std::"
},
{
"path": "src/total.rs",
"chars": 681,
"preview": "use cargo_tally::timestamp::DateTime;\nuse cargo_tally::Release;\nuse std::collections::BTreeSet as Set;\n\npub(crate) struc"
},
{
"path": "src/trace.rs",
"chars": 40,
"preview": "pub(crate) const VERBOSE: bool = false;\n"
},
{
"path": "src/user.rs",
"chars": 2639,
"preview": "use ref_cast::RefCast;\nuse std::borrow::Borrow;\nuse std::cmp::Ordering;\nuse std::fmt::{self, Display};\n\npub(crate) fn va"
},
{
"path": "src/version.rs",
"chars": 7113,
"preview": "use crate::arena::Slice;\nuse semver::{Comparator, Op};\nuse std::cmp::Ordering;\nuse std::fmt::{self, Debug, Display};\nuse"
}
]
About this extraction
This page contains the full source code of the dtolnay/cargo-tally GitHub repository, extracted and formatted as plain text for AI agents and large language models (LLMs). The extraction includes 42 files (149.8 KB), approximately 37.1k tokens, and a symbol index with 280 extracted functions, classes, methods, constants, and types. Use this with OpenClaw, Claude, ChatGPT, Cursor, Windsurf, or any other AI tool that accepts text input. You can copy the full output to your clipboard or download it as a .txt file.
Extracted by GitExtract — free GitHub repo to text converter for AI. Built by Nikandr Surkov.