Repository: danobi/prr
Branch: master
Commit: b3929ff88c03
Files: 60
Total size: 169.9 KB
Directory structure:
gitextract_46xp_y2d/
├── .github/
│ └── workflows/
│ ├── docs.yml
│ └── rust.yml
├── .gitignore
├── Cargo.toml
├── LICENSE
├── NOTES.md
├── README.md
├── build.rs
├── completions/
│ └── _prr
├── src/
│ ├── cli.rs
│ ├── main.rs
│ ├── parser.rs
│ ├── prr.rs
│ └── review.rs
├── testdata/
│ ├── add_oneliner
│ ├── approve_review
│ ├── back_to_back_span
│ ├── cross_file_span_ignored
│ ├── cross_hunk_span
│ ├── deleted_file
│ ├── empty_file
│ ├── file_comment
│ ├── hunk_start_no_trailing_whitespace
│ ├── inline_and_review_comments_with_pr_description
│ ├── multiline_comment
│ ├── multiple_files
│ ├── reject_review
│ ├── review/
│ │ ├── apply_pr/
│ │ │ └── review/
│ │ │ ├── .1
│ │ │ └── 1.prr
│ │ ├── pr_description/
│ │ │ ├── metadata
│ │ │ └── review
│ │ ├── pr_description_interleaving/
│ │ │ ├── metadata
│ │ │ └── review
│ │ ├── snip_comments/
│ │ │ ├── gold
│ │ │ ├── metadata
│ │ │ └── review
│ │ ├── snip_multiple/
│ │ │ ├── gold
│ │ │ ├── metadata
│ │ │ └── review
│ │ ├── snip_single/
│ │ │ ├── gold
│ │ │ ├── metadata
│ │ │ └── review
│ │ ├── status/
│ │ │ ├── metadata
│ │ │ └── review
│ │ └── trailing_whitespace/
│ │ ├── metadata
│ │ └── review
│ ├── review_comment
│ ├── review_comment_whitespace
│ ├── review_comments_interleaved_with_pr_description
│ ├── single_comment
│ ├── spaces_in_filename
│ ├── testgitrepo/
│ │ ├── README-applied.md
│ │ └── README.md
│ ├── trailing_comment
│ ├── unknown_directive
│ ├── unterminated_back_to_back_span
│ └── unterminated_span
└── vim/
├── ftdetect/
│ └── prr.vim
├── ftplugin/
│ └── prr.vim
└── syntax/
└── prr.vim
================================================
FILE CONTENTS
================================================
================================================
FILE: .github/workflows/docs.yml
================================================
name: Deploy docs
on:
push:
branches:
- master
jobs:
deploy:
runs-on: ubuntu-latest
permissions:
contents: write # To push a branch
pages: write # To push to a GitHub Pages site
id-token: write # To update the deployment status
steps:
- uses: actions/checkout@v4
with:
fetch-depth: 0
- name: Install latest mdbook
run: |
tag=$(curl 'https://api.github.com/repos/rust-lang/mdbook/releases/latest' | jq -r '.tag_name')
url="https://github.com/rust-lang/mdbook/releases/download/${tag}/mdbook-${tag}-x86_64-unknown-linux-gnu.tar.gz"
mkdir mdbook
curl -sSL $url | tar -xz --directory=./mdbook
echo `pwd`/mdbook >> $GITHUB_PATH
- name: Build Book
run: |
cd book
mdbook build
- name: Setup Pages
uses: actions/configure-pages@v5
- name: Upload artifact
uses: actions/upload-pages-artifact@v3
with:
path: 'book/book'
- name: Deploy to GitHub Pages
id: deployment
uses: actions/deploy-pages@v4
================================================
FILE: .github/workflows/rust.yml
================================================
name: Rust
on:
push:
branches: [ master ]
pull_request:
branches: [ master ]
env:
CARGO_TERM_COLOR: always
jobs:
build:
runs-on: ubuntu-latest
env:
RUSTFLAGS: -Dwarnings
steps:
- uses: actions/checkout@v4
- name: Build
run: cargo test --verbose --no-run
- name: Run tests
run: cargo test --verbose
static:
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v4
- uses: dtolnay/rust-toolchain@stable
with:
targets: x86_64-unknown-linux-musl
- name: Install host deps
run: sudo apt-get install -y musl-tools
- name: Build + link statically
run: cargo build --verbose --release --target=x86_64-unknown-linux-musl --features vendored-openssl
- name: Validate binary is statically linked
run: ldd ./target/x86_64-unknown-linux-musl/release/prr 2>&1 | grep -q "statically linked"
format:
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v2
- name: Run rustfmt
run: cargo fmt --check
================================================
FILE: .gitignore
================================================
/target
================================================
FILE: Cargo.toml
================================================
[package]
name = "prr"
description = "Mailing list style code reviews for github"
license = "GPL-2.0-or-later"
repository = "https://github.com/danobi/prr"
version = "0.21.0"
edition = "2021"
rust-version = "1.78.0"
build = "build.rs"
[dependencies]
anyhow = "1.0"
clap = { version = "4.4", features = ["derive"] }
git2 = "0.20.0"
http = "1.1.0"
lazy_static = "1.4"
octocrab = "0.38"
prettytable-rs = "0.10.0"
regex = "1.5"
serde = "1.0"
serde_derive = "1.0"
serde_json = "1.0"
tokio = { version = "1.17", default-features = false, features = ["macros", "rt-multi-thread"] }
toml = "0.5"
xdg = "2.4"
[dev-dependencies]
pretty_assertions = "1.4.0"
tempfile = "3.8.1"
[build-dependencies]
anyhow = "1.0"
clap = { version = "4.4", features = ["derive"] }
clap_complete = "4.5.2"
clap_mangen = "0.2.20"
[features]
# Statically link a vendored copy OpenSSL. OpenSSL is used by all of `git2`, `reqwest` and
# `octocrab`, enabling vendoring for just one of them should be enough.
vendored-openssl = ["git2/vendored-openssl"]
================================================
FILE: LICENSE
================================================
GNU GENERAL PUBLIC LICENSE
Version 2, June 1991
Copyright (C) 1989, 1991 Free Software Foundation, Inc.,
51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
Everyone is permitted to copy and distribute verbatim copies
of this license document, but changing it is not allowed.
Preamble
The licenses for most software are designed to take away your
freedom to share and change it. By contrast, the GNU General Public
License is intended to guarantee your freedom to share and change free
software--to make sure the software is free for all its users. This
General Public License applies to most of the Free Software
Foundation's software and to any other program whose authors commit to
using it. (Some other Free Software Foundation software is covered by
the GNU Lesser General Public License instead.) You can apply it to
your programs, too.
When we speak of free software, we are referring to freedom, not
price. Our General Public Licenses are designed to make sure that you
have the freedom to distribute copies of free software (and charge for
this service if you wish), that you receive source code or can get it
if you want it, that you can change the software or use pieces of it
in new free programs; and that you know you can do these things.
To protect your rights, we need to make restrictions that forbid
anyone to deny you these rights or to ask you to surrender the rights.
These restrictions translate to certain responsibilities for you if you
distribute copies of the software, or if you modify it.
For example, if you distribute copies of such a program, whether
gratis or for a fee, you must give the recipients all the rights that
you have. You must make sure that they, too, receive or can get the
source code. And you must show them these terms so they know their
rights.
We protect your rights with two steps: (1) copyright the software, and
(2) offer you this license which gives you legal permission to copy,
distribute and/or modify the software.
Also, for each author's protection and ours, we want to make certain
that everyone understands that there is no warranty for this free
software. If the software is modified by someone else and passed on, we
want its recipients to know that what they have is not the original, so
that any problems introduced by others will not reflect on the original
authors' reputations.
Finally, any free program is threatened constantly by software
patents. We wish to avoid the danger that redistributors of a free
program will individually obtain patent licenses, in effect making the
program proprietary. To prevent this, we have made it clear that any
patent must be licensed for everyone's free use or not licensed at all.
The precise terms and conditions for copying, distribution and
modification follow.
GNU GENERAL PUBLIC LICENSE
TERMS AND CONDITIONS FOR COPYING, DISTRIBUTION AND MODIFICATION
0. This License applies to any program or other work which contains
a notice placed by the copyright holder saying it may be distributed
under the terms of this General Public License. The "Program", below,
refers to any such program or work, and a "work based on the Program"
means either the Program or any derivative work under copyright law:
that is to say, a work containing the Program or a portion of it,
either verbatim or with modifications and/or translated into another
language. (Hereinafter, translation is included without limitation in
the term "modification".) Each licensee is addressed as "you".
Activities other than copying, distribution and modification are not
covered by this License; they are outside its scope. The act of
running the Program is not restricted, and the output from the Program
is covered only if its contents constitute a work based on the
Program (independent of having been made by running the Program).
Whether that is true depends on what the Program does.
1. You may copy and distribute verbatim copies of the Program's
source code as you receive it, in any medium, provided that you
conspicuously and appropriately publish on each copy an appropriate
copyright notice and disclaimer of warranty; keep intact all the
notices that refer to this License and to the absence of any warranty;
and give any other recipients of the Program a copy of this License
along with the Program.
You may charge a fee for the physical act of transferring a copy, and
you may at your option offer warranty protection in exchange for a fee.
2. You may modify your copy or copies of the Program or any portion
of it, thus forming a work based on the Program, and copy and
distribute such modifications or work under the terms of Section 1
above, provided that you also meet all of these conditions:
a) You must cause the modified files to carry prominent notices
stating that you changed the files and the date of any change.
b) You must cause any work that you distribute or publish, that in
whole or in part contains or is derived from the Program or any
part thereof, to be licensed as a whole at no charge to all third
parties under the terms of this License.
c) If the modified program normally reads commands interactively
when run, you must cause it, when started running for such
interactive use in the most ordinary way, to print or display an
announcement including an appropriate copyright notice and a
notice that there is no warranty (or else, saying that you provide
a warranty) and that users may redistribute the program under
these conditions, and telling the user how to view a copy of this
License. (Exception: if the Program itself is interactive but
does not normally print such an announcement, your work based on
the Program is not required to print an announcement.)
These requirements apply to the modified work as a whole. If
identifiable sections of that work are not derived from the Program,
and can be reasonably considered independent and separate works in
themselves, then this License, and its terms, do not apply to those
sections when you distribute them as separate works. But when you
distribute the same sections as part of a whole which is a work based
on the Program, the distribution of the whole must be on the terms of
this License, whose permissions for other licensees extend to the
entire whole, and thus to each and every part regardless of who wrote it.
Thus, it is not the intent of this section to claim rights or contest
your rights to work written entirely by you; rather, the intent is to
exercise the right to control the distribution of derivative or
collective works based on the Program.
In addition, mere aggregation of another work not based on the Program
with the Program (or with a work based on the Program) on a volume of
a storage or distribution medium does not bring the other work under
the scope of this License.
3. You may copy and distribute the Program (or a work based on it,
under Section 2) in object code or executable form under the terms of
Sections 1 and 2 above provided that you also do one of the following:
a) Accompany it with the complete corresponding machine-readable
source code, which must be distributed under the terms of Sections
1 and 2 above on a medium customarily used for software interchange; or,
b) Accompany it with a written offer, valid for at least three
years, to give any third party, for a charge no more than your
cost of physically performing source distribution, a complete
machine-readable copy of the corresponding source code, to be
distributed under the terms of Sections 1 and 2 above on a medium
customarily used for software interchange; or,
c) Accompany it with the information you received as to the offer
to distribute corresponding source code. (This alternative is
allowed only for noncommercial distribution and only if you
received the program in object code or executable form with such
an offer, in accord with Subsection b above.)
The source code for a work means the preferred form of the work for
making modifications to it. For an executable work, complete source
code means all the source code for all modules it contains, plus any
associated interface definition files, plus the scripts used to
control compilation and installation of the executable. However, as a
special exception, the source code distributed need not include
anything that is normally distributed (in either source or binary
form) with the major components (compiler, kernel, and so on) of the
operating system on which the executable runs, unless that component
itself accompanies the executable.
If distribution of executable or object code is made by offering
access to copy from a designated place, then offering equivalent
access to copy the source code from the same place counts as
distribution of the source code, even though third parties are not
compelled to copy the source along with the object code.
4. You may not copy, modify, sublicense, or distribute the Program
except as expressly provided under this License. Any attempt
otherwise to copy, modify, sublicense or distribute the Program is
void, and will automatically terminate your rights under this License.
However, parties who have received copies, or rights, from you under
this License will not have their licenses terminated so long as such
parties remain in full compliance.
5. You are not required to accept this License, since you have not
signed it. However, nothing else grants you permission to modify or
distribute the Program or its derivative works. These actions are
prohibited by law if you do not accept this License. Therefore, by
modifying or distributing the Program (or any work based on the
Program), you indicate your acceptance of this License to do so, and
all its terms and conditions for copying, distributing or modifying
the Program or works based on it.
6. Each time you redistribute the Program (or any work based on the
Program), the recipient automatically receives a license from the
original licensor to copy, distribute or modify the Program subject to
these terms and conditions. You may not impose any further
restrictions on the recipients' exercise of the rights granted herein.
You are not responsible for enforcing compliance by third parties to
this License.
7. If, as a consequence of a court judgment or allegation of patent
infringement or for any other reason (not limited to patent issues),
conditions are imposed on you (whether by court order, agreement or
otherwise) that contradict the conditions of this License, they do not
excuse you from the conditions of this License. If you cannot
distribute so as to satisfy simultaneously your obligations under this
License and any other pertinent obligations, then as a consequence you
may not distribute the Program at all. For example, if a patent
license would not permit royalty-free redistribution of the Program by
all those who receive copies directly or indirectly through you, then
the only way you could satisfy both it and this License would be to
refrain entirely from distribution of the Program.
If any portion of this section is held invalid or unenforceable under
any particular circumstance, the balance of the section is intended to
apply and the section as a whole is intended to apply in other
circumstances.
It is not the purpose of this section to induce you to infringe any
patents or other property right claims or to contest validity of any
such claims; this section has the sole purpose of protecting the
integrity of the free software distribution system, which is
implemented by public license practices. Many people have made
generous contributions to the wide range of software distributed
through that system in reliance on consistent application of that
system; it is up to the author/donor to decide if he or she is willing
to distribute software through any other system and a licensee cannot
impose that choice.
This section is intended to make thoroughly clear what is believed to
be a consequence of the rest of this License.
8. If the distribution and/or use of the Program is restricted in
certain countries either by patents or by copyrighted interfaces, the
original copyright holder who places the Program under this License
may add an explicit geographical distribution limitation excluding
those countries, so that distribution is permitted only in or among
countries not thus excluded. In such case, this License incorporates
the limitation as if written in the body of this License.
9. The Free Software Foundation may publish revised and/or new versions
of the General Public License from time to time. Such new versions will
be similar in spirit to the present version, but may differ in detail to
address new problems or concerns.
Each version is given a distinguishing version number. If the Program
specifies a version number of this License which applies to it and "any
later version", you have the option of following the terms and conditions
either of that version or of any later version published by the Free
Software Foundation. If the Program does not specify a version number of
this License, you may choose any version ever published by the Free Software
Foundation.
10. If you wish to incorporate parts of the Program into other free
programs whose distribution conditions are different, write to the author
to ask for permission. For software which is copyrighted by the Free
Software Foundation, write to the Free Software Foundation; we sometimes
make exceptions for this. Our decision will be guided by the two goals
of preserving the free status of all derivatives of our free software and
of promoting the sharing and reuse of software generally.
NO WARRANTY
11. BECAUSE THE PROGRAM IS LICENSED FREE OF CHARGE, THERE IS NO WARRANTY
FOR THE PROGRAM, TO THE EXTENT PERMITTED BY APPLICABLE LAW. EXCEPT WHEN
OTHERWISE STATED IN WRITING THE COPYRIGHT HOLDERS AND/OR OTHER PARTIES
PROVIDE THE PROGRAM "AS IS" WITHOUT WARRANTY OF ANY KIND, EITHER EXPRESSED
OR IMPLIED, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE. THE ENTIRE RISK AS
TO THE QUALITY AND PERFORMANCE OF THE PROGRAM IS WITH YOU. SHOULD THE
PROGRAM PROVE DEFECTIVE, YOU ASSUME THE COST OF ALL NECESSARY SERVICING,
REPAIR OR CORRECTION.
12. IN NO EVENT UNLESS REQUIRED BY APPLICABLE LAW OR AGREED TO IN WRITING
WILL ANY COPYRIGHT HOLDER, OR ANY OTHER PARTY WHO MAY MODIFY AND/OR
REDISTRIBUTE THE PROGRAM AS PERMITTED ABOVE, BE LIABLE TO YOU FOR DAMAGES,
INCLUDING ANY GENERAL, SPECIAL, INCIDENTAL OR CONSEQUENTIAL DAMAGES ARISING
OUT OF THE USE OR INABILITY TO USE THE PROGRAM (INCLUDING BUT NOT LIMITED
TO LOSS OF DATA OR DATA BEING RENDERED INACCURATE OR LOSSES SUSTAINED BY
YOU OR THIRD PARTIES OR A FAILURE OF THE PROGRAM TO OPERATE WITH ANY OTHER
PROGRAMS), EVEN IF SUCH HOLDER OR OTHER PARTY HAS BEEN ADVISED OF THE
POSSIBILITY OF SUCH DAMAGES.
END OF TERMS AND CONDITIONS
How to Apply These Terms to Your New Programs
If you develop a new program, and you want it to be of the greatest
possible use to the public, the best way to achieve this is to make it
free software which everyone can redistribute and change under these terms.
To do so, attach the following notices to the program. It is safest
to attach them to the start of each source file to most effectively
convey the exclusion of warranty; and each file should have at least
the "copyright" line and a pointer to where the full notice is found.
<one line to give the program's name and a brief idea of what it does.>
Copyright (C) <year> <name of author>
This program is free software; you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation; either version 2 of the License, or
(at your option) any later version.
This program is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
You should have received a copy of the GNU General Public License along
with this program; if not, write to the Free Software Foundation, Inc.,
51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
Also add information on how to contact you by electronic and paper mail.
If the program is interactive, make it output a short notice like this
when it starts in an interactive mode:
Gnomovision version 69, Copyright (C) year name of author
Gnomovision comes with ABSOLUTELY NO WARRANTY; for details type `show w'.
This is free software, and you are welcome to redistribute it
under certain conditions; type `show c' for details.
The hypothetical commands `show w' and `show c' should show the appropriate
parts of the General Public License. Of course, the commands you use may
be called something other than `show w' and `show c'; they could even be
mouse-clicks or menu items--whatever suits your program.
You should also get your employer (if you work as a programmer) or your
school, if any, to sign a "copyright disclaimer" for the program, if
necessary. Here is a sample; alter the names:
Yoyodyne, Inc., hereby disclaims all copyright interest in the program
`Gnomovision' (which makes passes at compilers) written by James Hacker.
<signature of Ty Coon>, 1 April 1989
Ty Coon, President of Vice
This General Public License does not permit incorporating your program into
proprietary programs. If your program is a subroutine library, you may
consider it more useful to permit linking proprietary applications with the
library. If this is what you want to do, use the GNU Lesser General
Public License instead of this License.
================================================
FILE: NOTES.md
================================================
# TODO
- [x] Parse review files
- [x] Create parser
- [x] Create `include_str!()` based unit-tests for expected comments
- [x] Test invalid spans (span that does not have a comment that
terminates it and another span starts)
- [x] Wire up comment uploading to GH
- [x] Inspect response error codes and body
- [x] Fix bug where `line` and `start_line` are being set instead of `position`
- [x] Check if `start_position` is accepted
- [x] Add test for trying to comment on a hunk start
- [x] Figure out how to calculate line for diffs w/ changes on both sides
- [x] Add test for comment at end of review file
- [x] Prohibit cross hunk spanned comments
- [x] Support review-level comments at top of review file
- [x] Manual test that comments on a changed file work
- [x] Support approve/rejecting PRs
- [x] Need some kind of meta syntax (like go's //+)
- [ ] Think about if it could be generalized to comment threads
- [x] Support updating a PR's review file, but ask for confirmation if review file has been modified and not submitted yet
- [x] Maybe even check mtime between review file and submission time?
- [x] Support parsing github url from stdin
- [x] Save commit hash of downloaded review file
- [x] Support [...] snipping
- [ ] Support comment threads. Maybe do nested '>'s like actual mailing lists?
# Thoughts
* Make a comment spanned by inserting a whitespace line before the
start of the span
* To compose with back-to-back spanned comments, the latter comment
must be assumed to be a single line comment. Otherwise, using
a single spanned comment always makes the next comment a span.
This kinda actually makes sense conceptually too cuz if the user
actually wants back-to-back spans then they should've just used
a single, larger span.
* Need to be careful to prohibit a spanned comment over multiple files
================================================
FILE: README.md
================================================
# Pull request review
[](https://github.com/danobi/prr/actions/workflows/rust.yml)
`prr` is a tool that brings mailing list style code reviews to Github PRs.
This means offline reviews and inline comments, more or less.
To that end, `prr` introduces a new workflow for reviewing PRs:
1. Download the PR into a "review file" on your filesystem
1. Mark up the review file using your favorite text editor
1. Submit the review at your convenience
The tool was born of frustration from using the point-and-click editor text
boxes on PRs. I happen to do a lot of code review and tabbing to and from the
browser to cross reference code from the changes was driving me nuts.
For full documentation, please visit https://doc.dxuuu.xyz/prr/.
================================================
FILE: build.rs
================================================
mod cli {
include!("src/cli.rs");
}
const LONG_ABOUT: &str =
"prr is a tool that brings mailing list style code reviews to Github PRs. This \
means offline reviews and inline comments, more or less.
To that end, prr introduces a new workflow for reviewing PRs:
1. Download the PR into a \"review file\" on your filesystem
2. Mark up the review file using your favorite text editor
3. Submit the review at your convenience
For full documentation, please visit https://doc.dxuuu.xyz/prr/.";
fn main() -> std::io::Result<()> {
if let Some(out_path) = std::env::var_os("GEN_DIR").or(std::env::var_os("OUT_DIR")) {
use clap::CommandFactory;
#[allow(unused_variables)]
let out_dir = std::path::PathBuf::from(out_path);
#[allow(unused_mut, unused_variables)]
let mut cmd = cli::Cli::command()
.author("Daniel Xu <dxu@apache.org>")
.about("Mailing list style code reviews for GitHub")
.long_about(LONG_ABOUT);
let man_dir = std::path::Path::join(&out_dir, "man");
std::fs::create_dir_all(&man_dir)?;
clap_mangen::generate_to(cmd.clone(), &man_dir)?;
use clap::ValueEnum;
let completions_dir = std::path::Path::join(&out_dir, "completions");
std::fs::create_dir_all(&completions_dir)?;
for shell in clap_complete::Shell::value_variants() {
clap_complete::generate_to(*shell, &mut cmd, "prr", &completions_dir)?;
}
}
println!(
"cargo:rustc-env=TARGET={}",
std::env::var("TARGET").unwrap()
);
println!("cargo:rerun-if-env-changed=GEN_DIR");
Ok(())
}
================================================
FILE: completions/_prr
================================================
#compdef prr
_prr_reviews() {
local -a reviews
local handle r_status file
if (( ${_PRR_IN_COMPLETION:-0} )); then
return 1
fi
local _PRR_IN_COMPLETION=1
while IFS=' ' read -r handle r_status file _; do
[[ -z $handle ]] && continue
reviews+=("${handle}:${r_status} ${file}")
done < <(prr status --no-titles 2>/dev/null)
(( ${#reviews[@]} )) || return 1
_describe -t reviews 'review' reviews
}
_prr() {
local -a commands
local curcontext="$curcontext" state line
commands=(
'get:Get a pull request and begin a review'
'edit:Open an existing review in $EDITOR'
'submit:Submit a review'
'apply:Apply a pull request to the working directory'
'status:Print a status summary of all known reviews'
'remove:Remove a review'
)
_arguments -C \
'(-h --help)'{-h,--help}'[Print help information]' \
'(-V --version)'{-V,--version}'[Print version information]' \
'--config[Path to config file]:config file:_files' \
'1: :->cmd' \
'*:: :->args'
case $state in
cmd)
_describe -t commands 'prr command' commands
;;
args)
case $line[1] in
get)
_arguments \
'(-h --help)'{-h,--help}'[Print help information]' \
'(-f --force)'{-f,--force}'[Ignore unsubmitted review checks]' \
'--open[Open review file in $EDITOR after download]' \
'1:pull request (eg. danobi/prr/24):'
;;
edit)
_arguments \
'(-h --help)'{-h,--help}'[Print help information]' \
'1:review to edit:_prr_reviews'
;;
submit)
_arguments \
'(-h --help)'{-h,--help}'[Print help information]' \
'(-d --debug)'{-d,--debug}'[Print debug output while submitting]' \
'1:review to submit:_prr_reviews'
;;
apply)
_arguments \
'(-h --help)'{-h,--help}'[Print help information]' \
'1:pull request (eg. danobi/prr/24):'
;;
status)
_arguments \
'(-h --help)'{-h,--help}'[Print help information]' \
'(-n --no-titles)'{-n,--no-titles}'[Hide column titles from output]' \
'1::status argument:'
;;
remove)
_arguments \
'(-h --help)'{-h,--help}'[Print help information]' \
'(-f --force)'{-f,--force}'[Ignore unsubmitted review checks]' \
'(-s --submitted)'{-s,--submitted}'[Remove submitted reviews in addition to provided reviews]' \
'1:review to remove:_prr_reviews'
;;
esac
;;
esac
}
================================================
FILE: src/cli.rs
================================================
use clap::{Parser, Subcommand};
use std::path::PathBuf;
#[derive(Subcommand, Debug)]
pub(crate) enum Command {
/// Get a pull request and begin a review
Get {
/// Ignore unsubmitted review checks
#[clap(short, long)]
force: bool,
/// Pull request to review (eg. `danobi/prr/24`)
pr: String,
/// Open review file in $EDITOR after download
#[clap(long)]
open: bool,
},
/// Open an existing review in $EDITOR
Edit {
/// Pull request to edit (eg. `danobi/prr/24`)
pr: String,
},
/// Submit a review
Submit {
/// Pull request to review (eg. `danobi/prr/24`)
pr: String,
#[clap(short, long)]
debug: bool,
},
/// Apply a pull request to the working directory
///
/// This can be useful for building/testing PRs
Apply { pr: String },
/// Print a status summary of all known reviews
Status {
/// Hide column titles from output
#[clap(short, long)]
no_titles: bool,
},
/// Remove a review
Remove {
/// Pull requests to remove (eg. `danobi/prr/24`)
prs: Vec<String>,
/// Ignore unsubmitted review checks
#[clap(short, long)]
force: bool,
/// Remove submitted reviews in addition to provided reviews
#[clap(short, long)]
submitted: bool,
},
}
#[derive(Parser, Debug)]
#[clap(version)]
#[command(name = "prr")]
pub struct Cli {
/// Path to config file
#[clap(long)]
pub(crate) config: Option<PathBuf>,
#[clap(subcommand)]
pub(crate) command: Command,
}
================================================
FILE: src/main.rs
================================================
use std::env;
use std::path::{Path, PathBuf};
use std::process;
use anyhow::{bail, Context, Result};
use clap::Parser;
mod cli;
mod parser;
mod prr;
mod review;
use cli::*;
use prr::Prr;
/// The name of the local configuration file
pub const LOCAL_CONFIG_FILE_NAME: &str = ".prr.toml";
/// Returns if exists the config file for the current project
fn find_project_config_file() -> Option<PathBuf> {
env::current_dir().ok().and_then(|mut path| loop {
path.push(LOCAL_CONFIG_FILE_NAME);
if path.exists() {
return Some(path);
}
path.pop();
if !path.pop() {
return None;
}
})
}
/// Opens a file in $EDITOR
fn open_review(file: &Path) -> Result<()> {
// This check should only ever trip for prr-edit
if !file.try_exists()? {
bail!("Review file does not exist yet");
}
let editor = env::var("EDITOR").context("Failed to read $EDITOR")?;
let status = process::Command::new(editor)
.arg(file)
.status()
.context("Failed to execute editor process")?;
match status.code() {
Some(0) => Ok(()),
Some(rc) => bail!("EDITOR exited unclean: {}", rc),
None => bail!("Failed to get EDITOR exit status"),
}
}
#[tokio::main]
async fn main() -> Result<()> {
let args = Cli::parse();
// Figure out where config file is
let config_path = match args.config {
Some(c) => c,
None => {
let xdg_dirs = xdg::BaseDirectories::with_prefix("prr")?;
xdg_dirs.get_config_file("config.toml")
}
};
let prr = Prr::new(&config_path, find_project_config_file())?;
match args.command {
Command::Get { pr, force, open } => {
let (owner, repo, pr_num) = prr.parse_pr_str(&pr)?;
let review = prr.get_pr(&owner, &repo, pr_num, force).await?;
let path = review.path();
println!("{}", path.display());
if open {
open_review(&path).context("Failed to open review file")?;
}
}
Command::Edit { pr } => {
let (owner, repo, pr_num) = prr.parse_pr_str(&pr)?;
let review = prr.get_review(&owner, &repo, pr_num)?;
open_review(&review.path()).context("Failed to open review file")?;
}
Command::Submit { pr, debug } => {
let (owner, repo, pr_num) = prr.parse_pr_str(&pr)?;
prr.submit_pr(&owner, &repo, pr_num, debug).await?;
}
Command::Apply { pr } => {
let (owner, repo, pr_num) = prr.parse_pr_str(&pr)?;
prr.apply_pr(&owner, &repo, pr_num, Path::new("./"))?;
}
Command::Status { no_titles } => {
prr.print_status(no_titles)?;
}
Command::Remove {
prs,
force,
submitted,
} => {
prr.remove(&prs, force, submitted).await?;
}
}
Ok(())
}
================================================
FILE: src/parser.rs
================================================
use anyhow::{anyhow, bail, Context, Result};
use lazy_static::lazy_static;
use regex::Regex;
// Use lazy static to ensure regex is only compiled once
lazy_static! {
// Regex for the start of a hunk. The start of a hunk should look like:
//
// `@@ -731,7 +731,7 @@[...]`
//
static ref HUNK_START: Regex = Regex::new(r"^@@ -(?P<lstart>\d+)(?:,(?P<llen>\d+))? \+(?P<rstart>\d+)(?:,(?P<rlen>\d+))? @@").unwrap();
// Regex for start of a file diff. The start of a file diff should look like:
//
// `diff --git a/ch1.txt b/ch1.txt`
//
static ref DIFF_START: Regex = Regex::new(r"^diff --git a/.+ b/(?P<new>.+)$").unwrap();
}
/// The location of a line
///
/// The distinction between Left and Right is important when commenting on
/// deleted or added lines. A useful way to think about the line location is
/// the line number a comment should be attached to in the file pre-change (left)
/// or the file post-change (right)
#[derive(Debug, PartialEq, Eq, Clone)]
pub enum LineLocation {
/// The "red"/deleted side of the diff
Left(u64),
/// The "green"/added or "white"/existing side of the diff
Right(u64),
}
/// Represents a single inline comment on a review
#[derive(Debug, PartialEq, Eq)]
pub struct InlineComment {
/// File the comment is in
///
/// Note that this is the new filename if the file was also moved
pub file: String,
pub line: LineLocation,
/// For a spanned comment, the first line of the span. See `line` for docs on semantics
pub start_line: Option<LineLocation>,
/// The user-supplied review comment
pub comment: String,
}
/// Represents a single file-level comment on a review
#[derive(Debug, PartialEq, Eq)]
pub struct FileComment {
/// File the comment is in
///
/// Note that this is the new filename if the file was also moved
pub file: String,
/// The user-supplied review comment
pub comment: String,
}
#[derive(Debug, PartialEq, Eq)]
pub enum ReviewAction {
Approve,
RequestChanges,
Comment,
}
/// Represents a comment of some sort on a review
#[derive(Debug, PartialEq, Eq)]
pub enum Comment {
/// Overall review comment (the summary comment)
Review(String),
/// An inline comment (attached to a line)
Inline(InlineComment),
/// Overall approve, reject, or comment on review
ReviewAction(ReviewAction),
// A file-level comment (attached to the whole file)
File(FileComment),
}
#[derive(Default)]
struct StartState {
/// Each line of review-level comment is stored as an entry
comment: Vec<String>,
/// Only if there was unquoted content in the Start state we should
/// send a review comment. If there was no unquoted content, there
/// was no review comment.
had_unquoted_content: bool,
/// After we have seen the prr directive we assume the review
/// comment is done. This allows us to ignore the PR description as
/// long as we have the prr directive before it.
had_review_action: bool,
}
struct FilePreambleState {
/// Relative path of the file under diff
file: String,
/// Each line of file-level comment is stored as an entry
comment: Vec<String>,
}
#[derive(Clone)]
struct FileDiffState {
/// Relative path of the file under diff
file: String,
/// Current left line position. See `LineLocation` for docs on semantics of `line`
left_line: u64,
/// Current right line position. See `LineLocation` for docs on semantics of `line`
right_line: u64,
/// Current line position
line: LineLocation,
/// First line of the span. See `LineLocation` for docs on
/// semantics of `line`
span_start_line: Option<LineLocation>,
}
struct SpanStartOrCommentState {
/// State of the file diff before we entered this state
file_diff_state: FileDiffState,
}
struct CommentState {
/// State of the file diff before we entered comment processing
file_diff_state: FileDiffState,
/// Each line of comment is stored as an entry
comment: Vec<String>,
}
/// State machine states
///
/// Only the following state transitions are valid:
///
/// +---------------+
/// | |
/// v |
/// Start -> FilePreamble -> FileDiff -> StartSpanOrComment -> Comment
/// ^ ^ | | ^ ^ |
/// | | | | | | |
/// | +--------+--+-+----------------------------+---+
/// | | | |
/// +-------------+ +------------------------------+
///
enum State {
/// Starting state
Start(StartState),
/// The `diff --git a/...` preamble as well as the lines before the first hunk
FilePreamble(FilePreambleState),
/// We are inside the diff of a file
FileDiff(FileDiffState),
/// We are either the start of a span or the beginning of a comment
///
/// The uncertainty comes from the fact that comments typically begin with one
/// or more newlines
SpanStartOrComment(SpanStartOrCommentState),
/// We are inside a user-supplied comment
Comment(CommentState),
}
/// Simple state machine to parse a review file
pub struct ReviewParser {
state: State,
}
fn is_diff_header(s: &str) -> bool {
s.starts_with("diff --git ")
}
/// Parses lines in the form of `@prr DIRECTIVE`
///
/// Returns Some(directive) if found, else None
fn is_prr_directive(s: &str) -> Option<&str> {
let t = s.trim();
if let Some(d) = t.strip_prefix("@prr ") {
Some(d)
} else {
None
}
}
/// Parses the new filename out of a diff header
fn parse_diff_header(line: &str) -> Result<String> {
if let Some(captures) = DIFF_START.captures(line) {
let new: &str = captures.name("new").unwrap().as_str();
Ok(new.trim().to_owned())
} else {
Err(anyhow!("Invalid diff header: could not parse"))
}
}
/// Parses the starting left & right lines out of the hunk start
fn parse_hunk_start(line: &str) -> Result<Option<(u64, u64)>> {
if let Some(captures) = HUNK_START.captures(line) {
let hunk_start_line_left: u64 = captures
.name("lstart")
.unwrap()
.as_str()
.parse()
.context("Failed to parse hunk start left line")?;
let hunk_start_line_right: u64 = captures
.name("rstart")
.map(|s| s.as_str())
.unwrap_or_else(|| {
if hunk_start_line_left == 0 {
"0"
} else {
unreachable!(
"Unexpected non-zero left-hand-side of git diff header. Expected 0."
)
}
})
.parse()
.context("Failed to parse hunk start right line")?;
// Note that for newly added files or deleted files, both sides
// of the line info might be zero. `saturating_*` operations must hence
// be used for the following subtraction to be safe.
return Ok(Some((hunk_start_line_left, hunk_start_line_right)));
}
Ok(None)
}
fn is_left_line(line: &str) -> bool {
line.starts_with('-')
}
/// Given the current line and line positions, returns what the next line positions should be
fn get_next_lines(line: &str, left: u64, right: u64) -> (u64, u64) {
if is_left_line(line) {
(left + 1, right)
} else if line.starts_with('+') {
(left, right + 1)
} else {
(left + 1, right + 1)
}
}
impl ReviewParser {
pub fn new() -> ReviewParser {
ReviewParser {
state: State::Start(StartState::default()),
}
}
pub fn parse_line(&mut self, mut line: &str) -> Result<Option<Comment>> {
let is_quoted = line.starts_with('>');
if is_quoted {
if let Some(stripped) = line.strip_prefix("> ") {
line = stripped;
} else if let Some(stripped) = line.strip_prefix('>') {
line = stripped;
}
}
match &mut self.state {
// we are adding all the lines, regardless if they are
// quoted are not because they may be interleaving the
// PR description as long as we haven't seen the prr
// directive. Once the diff header starts, we determine
// whether or not we should send the review comment. The
// comment should only be sent, if we ever encountered a
// non-quoted string in this state.
State::Start(state) => {
if is_quoted {
if !is_diff_header(line) {
if !state.had_review_action {
state.comment.push("> ".to_owned() + line);
}
return Ok(None);
}
let mut review_comment = None;
if state.had_unquoted_content {
review_comment =
Some(Comment::Review(state.comment.join("\n").trim().to_string()));
}
self.state = State::FilePreamble(FilePreambleState {
file: parse_diff_header(line)?,
comment: vec![],
});
return Ok(review_comment);
} else if let Some(d) = is_prr_directive(line) {
state.had_review_action = true;
return match d {
"approve" => Ok(Some(Comment::ReviewAction(ReviewAction::Approve))),
"reject" => Ok(Some(Comment::ReviewAction(ReviewAction::RequestChanges))),
"comment" => Ok(Some(Comment::ReviewAction(ReviewAction::Comment))),
_ => bail!("Unknown @prr directive: {}", d),
};
} else {
state.comment.push(line.to_owned());
if !state.had_unquoted_content {
state.had_unquoted_content = true
}
}
Ok(None)
}
State::FilePreamble(state) => {
if !is_quoted {
state.comment.push(line.to_owned());
}
if is_diff_header(line) {
self.state = State::FilePreamble(FilePreambleState {
file: parse_diff_header(line)?,
comment: vec![],
});
return Ok(None);
}
if let Some((mut left_start, mut right_start)) = parse_hunk_start(line)? {
// Subtract 1 b/c this line is before the actual diff hunk
left_start = left_start.saturating_sub(1);
right_start = right_start.saturating_sub(1);
// Finish up our file-level comment if we had one
let comment = if !state.comment.is_empty() {
Some(Comment::File(FileComment {
file: state.file.to_owned(),
comment: state.comment.join("\n").trim().to_string(),
}))
} else {
None
};
self.state = State::FileDiff(FileDiffState {
file: state.file.to_owned(),
left_line: left_start,
right_line: right_start,
line: if is_left_line(line) {
LineLocation::Left(left_start)
} else {
LineLocation::Right(right_start)
},
span_start_line: None,
});
if let Some(comment) = comment {
return Ok(Some(comment));
}
}
Ok(None)
}
State::FileDiff(state) => {
if is_quoted {
if is_diff_header(line) {
if state.span_start_line.is_some() {
bail!(
"Detected span that was not terminated with a comment, file: {}",
state.file
);
}
self.state = State::FilePreamble(FilePreambleState {
file: parse_diff_header(line)?,
comment: vec![],
});
} else if let Some((mut left_start, mut right_start)) = parse_hunk_start(line)?
{
if state.span_start_line.is_some() {
bail!("Detected cross chunk span, file: {}", state.file);
}
// Subtract 1 b/c this line is before the actual diff hunk
left_start = left_start.saturating_sub(1);
right_start = right_start.saturating_sub(1);
state.left_line = left_start;
state.right_line = right_start;
if is_left_line(line) {
state.line = LineLocation::Left(left_start);
} else {
state.line = LineLocation::Right(right_start);
}
} else {
let (next_left, next_right) =
get_next_lines(line, state.left_line, state.right_line);
state.left_line = next_left;
state.right_line = next_right;
if is_left_line(line) {
state.line = LineLocation::Left(next_left);
} else {
state.line = LineLocation::Right(next_right);
}
}
return Ok(None);
}
// Now that we know this line is not quoted, there's only two options:
// 1) beginning of a spanned comment
// 2) beginning of a comment
if line.trim().is_empty() {
self.state = State::SpanStartOrComment(SpanStartOrCommentState {
file_diff_state: state.clone(),
})
} else {
self.state = State::Comment(CommentState {
file_diff_state: state.clone(),
comment: vec![line.to_owned()],
})
}
Ok(None)
}
State::SpanStartOrComment(state) => {
if is_quoted {
if state.file_diff_state.span_start_line.is_some() {
bail!(
"Detected span that was not terminated with a comment, file: {}",
state.file_diff_state.file
);
}
// Back to the original file diff
let (next_left, next_right) = get_next_lines(
line,
state.file_diff_state.left_line,
state.file_diff_state.right_line,
);
self.state = State::FileDiff(FileDiffState {
file: state.file_diff_state.file.to_owned(),
left_line: next_left,
right_line: next_right,
line: if is_left_line(line) {
LineLocation::Left(next_left)
} else {
LineLocation::Right(next_right)
},
span_start_line: Some(if is_left_line(line) {
LineLocation::Left(next_left)
} else {
LineLocation::Right(next_right)
}),
});
Ok(None)
} else if line.trim().is_empty() {
// In a multi-line span spart
Ok(None)
} else {
// In a comment now
self.state = State::Comment(CommentState {
file_diff_state: state.file_diff_state.clone(),
comment: vec![line.to_owned()],
});
Ok(None)
}
}
State::Comment(state) => {
if is_quoted {
let comment = Comment::Inline(InlineComment {
file: state.file_diff_state.file.clone(),
line: state.file_diff_state.line.clone(),
start_line: state.file_diff_state.span_start_line.clone(),
comment: state.comment.join("\n").trim_end().to_string(),
});
if is_diff_header(line) {
self.state = State::FilePreamble(FilePreambleState {
file: parse_diff_header(line)?,
comment: vec![],
});
} else {
let (next_left, next_right) = get_next_lines(
line,
state.file_diff_state.left_line,
state.file_diff_state.right_line,
);
self.state = State::FileDiff(FileDiffState {
file: state.file_diff_state.file.to_owned(),
left_line: next_left,
right_line: next_right,
line: if is_left_line(line) {
LineLocation::Left(next_left)
} else {
LineLocation::Right(next_right)
},
span_start_line: None,
});
}
return Ok(Some(comment));
}
state.comment.push(line.to_owned());
Ok(None)
}
}
}
pub fn finish(self) -> Option<Comment> {
match self.state {
State::Comment(state) => Some(Comment::Inline(InlineComment {
file: state.file_diff_state.file,
line: state.file_diff_state.line,
start_line: state.file_diff_state.span_start_line,
comment: state.comment.join("\n").trim_end().to_string(),
})),
_ => None,
}
}
}
#[cfg(test)]
mod tests {
use super::*;
fn test_fail(input: &str) {
let mut parser = ReviewParser::new();
for line in input.lines() {
if parser.parse_line(line).is_err() {
return;
}
}
panic!("Parser succeeded when it should have failed");
}
fn test(input: &str, expected: &[Comment]) {
let mut parser = ReviewParser::new();
let mut comments = Vec::new();
for line in input.lines() {
if let Some(c) = parser.parse_line(line).unwrap() {
comments.push(c);
}
}
if let Some(c) = parser.finish() {
comments.push(c);
}
assert!(
comments == expected,
"Parsed different comments than expected.\n Got: {:#?}\nExpected: {:#?}",
comments,
expected
);
}
#[test]
fn single_comment() {
let input = include_str!("../testdata/single_comment");
let expected = vec![Comment::Inline(InlineComment {
file: "libbpf-cargo/src/btf/btf.rs".to_string(),
line: LineLocation::Right(734),
start_line: Some(LineLocation::Right(731)),
comment: "Comment 1".to_string(),
})];
test(input, &expected);
}
#[test]
fn approve_review() {
let input = include_str!("../testdata/approve_review");
let expected = vec![
Comment::ReviewAction(ReviewAction::Approve),
Comment::Inline(InlineComment {
file: "libbpf-cargo/src/btf/btf.rs".to_string(),
line: LineLocation::Right(734),
start_line: Some(LineLocation::Right(731)),
comment: "Comment 1".to_string(),
}),
];
test(input, &expected);
}
#[test]
fn reject_review() {
let input = include_str!("../testdata/reject_review");
let expected = vec![
Comment::ReviewAction(ReviewAction::RequestChanges),
Comment::Inline(InlineComment {
file: "libbpf-cargo/src/btf/btf.rs".to_string(),
line: LineLocation::Right(734),
start_line: Some(LineLocation::Right(731)),
comment: "Comment 1".to_string(),
}),
];
test(input, &expected);
}
#[test]
fn review_comment() {
let input = include_str!("../testdata/review_comment");
let expected = vec![
Comment::Review("Review comment".to_string()),
Comment::Inline(InlineComment {
file: "libbpf-cargo/src/btf/btf.rs".to_string(),
line: LineLocation::Right(734),
start_line: Some(LineLocation::Right(731)),
comment: "Comment 1".to_string(),
}),
];
test(input, &expected);
}
#[test]
fn file_comment() {
let input = include_str!("../testdata/file_comment");
let expected = vec![Comment::File(FileComment {
file: "libbpf-cargo/src/btf/btf.rs".to_string(),
comment: "This is a file-level comment!".to_string(),
})];
test(input, &expected);
}
#[test]
fn review_comment_whitespace() {
let input = include_str!("../testdata/review_comment_whitespace");
let expected = vec![
Comment::ReviewAction(ReviewAction::Approve),
Comment::Review("Review comment".to_string()),
];
test(input, &expected);
}
#[test]
fn multiline_comment() {
let input = include_str!("../testdata/multiline_comment");
let expected = vec![Comment::Inline(InlineComment {
file: "libbpf-cargo/src/btf/btf.rs".to_string(),
line: LineLocation::Right(736),
start_line: None,
comment: "Comment line 1\nComment line 2\n\nComment line 4".to_string(),
})];
test(input, &expected);
}
#[test]
fn back_to_back_span() {
let input = include_str!("../testdata/back_to_back_span");
let expected = vec![
Comment::Inline(InlineComment {
file: "libbpf-cargo/src/btf/btf.rs".to_string(),
line: LineLocation::Right(734),
start_line: Some(LineLocation::Right(731)),
comment: "Comment 1".to_string(),
}),
Comment::Inline(InlineComment {
file: "libbpf-cargo/src/btf/btf.rs".to_string(),
line: LineLocation::Right(737),
start_line: None,
comment: "Comment 2".to_string(),
}),
];
test(input, &expected);
}
#[test]
fn multiple_files() {
let input = include_str!("../testdata/multiple_files");
let expected = vec![
Comment::Inline(InlineComment {
file: "libbpf-cargo/src/btf/btf.rs".to_string(),
line: LineLocation::Right(734),
start_line: None,
comment: "Comment 1".to_string(),
}),
Comment::Inline(InlineComment {
file: "libbpf-cargo/src/test.rs".to_string(),
line: LineLocation::Right(2159),
start_line: None,
comment: "Comment 2".to_string(),
}),
];
test(input, &expected);
}
#[test]
fn hunk_start_no_trailing_whitespace() {
let input = include_str!("../testdata/hunk_start_no_trailing_whitespace");
let expected = vec![Comment::Inline(InlineComment {
file: "ch5.txt".to_string(),
line: LineLocation::Right(7),
start_line: None,
comment: "Great passage".to_string(),
})];
test(input, &expected);
}
#[test]
fn add_oneliner() {
let input = include_str!("../testdata/add_oneliner");
let expected = vec![
Comment::Inline(InlineComment {
file: "foo.rs".to_string(),
line: LineLocation::Right(0),
start_line: None,
comment: "Comment 1".to_string(),
}),
Comment::Inline(InlineComment {
file: "foo.rs".to_string(),
line: LineLocation::Right(1),
start_line: None,
comment: "Comment 2".to_string(),
}),
];
test(input, &expected);
}
#[test]
fn deleted_file() {
let input = include_str!("../testdata/deleted_file");
let expected = vec![Comment::Inline(InlineComment {
file: "ch1.txt".to_string(),
line: LineLocation::Left(58),
start_line: Some(LineLocation::Left(1)),
comment: "Comment 1".to_string(),
})];
test(input, &expected);
}
#[test]
fn empty_file() {
let input = include_str!("../testdata/empty_file");
let expected = vec![Comment::Inline(InlineComment {
file: "libbpf-cargo/src/test.rs".to_string(),
line: LineLocation::Right(2159),
start_line: None,
comment: "Comment".to_string(),
})];
test(input, &expected);
}
#[test]
fn trailing_comment() {
let input = include_str!("../testdata/trailing_comment");
let expected = vec![Comment::Inline(InlineComment {
file: "ch1.txt".to_string(),
line: LineLocation::Left(59),
start_line: Some(LineLocation::Left(1)),
comment: "Comment 1".to_string(),
})];
test(input, &expected);
}
#[test]
/// https://github.com/danobi/prr/issues/3
fn spaces_in_filename() {
let input = include_str!("../testdata/spaces_in_filename");
let expected = vec![Comment::Inline(InlineComment {
file: "build/scripts/grafana/provisioning/dashboards/Docker Prometheus Monitoring-1571332751387.json".to_string(),
line: LineLocation::Right(2),
start_line: None,
comment: "foo".to_string(),
})];
test(input, &expected);
}
#[test]
fn unterminated_span() {
let input = include_str!("../testdata/unterminated_span");
test_fail(input);
}
#[test]
fn cross_file_span_ignored() {
let input = include_str!("../testdata/cross_file_span_ignored");
test_fail(input);
}
#[test]
fn unterminated_back_to_back_span() {
let input = include_str!("../testdata/unterminated_back_to_back_span");
test_fail(input);
}
#[test]
fn cross_hunk_span() {
let input = include_str!("../testdata/cross_hunk_span");
test_fail(input);
}
#[test]
fn unknown_directive() {
let input = include_str!("../testdata/unknown_directive");
test_fail(input);
}
#[test]
fn hunk_oneliner_regex() {
let captures = HUNK_START
.captures("@@ -0,0 +1 @@")
.expect("Must match regex.");
assert_eq!(captures.name("rstart").unwrap().as_str(), "1");
assert!(captures.name("rlen").is_none());
assert_eq!(captures.name("lstart").unwrap().as_str(), "0");
assert_eq!(captures.name("llen").unwrap().as_str(), "0");
}
#[test]
fn hunk_normal_regex() {
let captures = HUNK_START
.captures("@@ -0,7 +0,1 @@")
.expect("Must match regex.");
assert_eq!(captures.name("rstart").unwrap().as_str(), "0");
assert_eq!(captures.name("rlen").unwrap().as_str(), "1");
assert_eq!(captures.name("lstart").unwrap().as_str(), "0");
assert_eq!(captures.name("llen").unwrap().as_str(), "7");
}
#[test]
fn hunk_only_one_line_on_each_side() {
let captures = HUNK_START
.captures("@@ -5 +5 @@")
.expect("Must match regex.");
assert_eq!(captures.name("rstart").unwrap().as_str(), "5");
assert!(captures.name("rlen").is_none());
assert_eq!(captures.name("lstart").unwrap().as_str(), "5");
assert!(captures.name("llen").is_none());
}
#[test]
fn inline_and_review_comments_with_pr_description_present() {
let input = include_str!("../testdata/inline_and_review_comments_with_pr_description");
let expected = vec![
Comment::ReviewAction(ReviewAction::RequestChanges),
Comment::Review("Not necessary.".to_string()),
Comment::Inline(InlineComment {
file: "README.md".to_string(),
line: LineLocation::Right(2),
start_line: None,
comment: "Doesn't seem necessary ...".to_string(),
}),
];
test(input, &expected);
}
#[test]
fn review_comments_interleaved_with_pr_description() {
let input = include_str!("../testdata/review_comments_interleaved_with_pr_description");
let expected = vec![
Comment::ReviewAction(ReviewAction::RequestChanges),
Comment::Review("Not necessary.\n\n\n> This is just for testing purposes.\n\nThis might be fine or not.".to_string()),
Comment::Inline(InlineComment {
file: "README.md".to_string(),
line: LineLocation::Right(2),
start_line: None,
comment: "Doesn't seem necessary ...".to_string(),
}),
];
test(input, &expected);
}
}
================================================
FILE: src/prr.rs
================================================
use std::env;
use std::fs;
use std::path::{Path, PathBuf};
use anyhow::{anyhow, bail, Context, Result};
use git2::{ApplyLocation, Diff, Repository, StatusOptions};
use http::{StatusCode, Uri};
use lazy_static::lazy_static;
use octocrab::Octocrab;
use prettytable::{format, row, Table};
use serde_derive::Deserialize;
use serde_json::{json, Value};
use crate::parser::{FileComment, LineLocation, ReviewAction};
use crate::review::{get_all_existing, Review, ReviewStatus};
use regex::Regex;
// Use lazy static to ensure regex is only compiled once
lazy_static! {
// Regex for short input. Example:
//
// danobi/prr-test-repo/6
//
static ref SHORT: Regex = Regex::new(r"^(?P<org>[\w\-_\.]+)/(?P<repo>[\w\-_\.]+)/(?P<pr_num>\d+)").unwrap();
}
const GITHUB_BASE_URL: &str = "https://api.github.com";
/// Resolves a GitHub token from either environment variables or config value.
///
/// If a config token is provided and not empty, returns the config token as-is.
/// If no config token is provided or it's empty, we check standard GitHub environment variables
/// in order of precedence as per https://cli.github.com/manual/gh_help_environment:
/// GH_TOKEN, GITHUB_TOKEN, GH_ENTERPRISE_TOKEN, GITHUB_ENTERPRISE_TOKEN.
/// If none are found, returns an error.
fn resolve_github_token<F>(config_token: Option<&str>, env_lookup: F) -> Result<String>
where
F: for<'a> Fn(&'a str) -> Result<String, std::env::VarError>,
{
if let Some(token) = config_token {
if !token.is_empty() {
return Ok(token.to_string());
}
}
let known_env_vars = [
"GH_TOKEN",
"GITHUB_TOKEN",
"GH_ENTERPRISE_TOKEN",
"GITHUB_ENTERPRISE_TOKEN",
];
for env_var in &known_env_vars {
if let Ok(token) = env_lookup(env_var) {
if token.is_empty() {
bail!("Environment variable '{}' located but is empty", env_var);
}
return Ok(token);
}
}
bail!("No GitHub token found in config or environment variables")
}
#[derive(Debug, Deserialize)]
struct PrrConfig {
/// GH personal token
token: Option<String>,
/// Directory to place review files
workdir: Option<String>,
/// Github URL
///
/// Useful for enterprise instances with custom URLs
url: Option<String>,
/// Activate experimental PR metadata support. Currently this option
/// just activates downloading the actual PR description in addition
/// to the diff.
#[serde(default)]
activate_pr_metadata_experiment: bool,
}
#[derive(Debug, Deserialize)]
struct PrrLocalConfig {
/// Default url for this current project
repository: Option<String>,
/// Local workdir override
workdir: Option<String>,
}
#[derive(Debug, Deserialize)]
struct Config {
prr: PrrConfig,
local: Option<PrrLocalConfig>,
}
/// Main struct that coordinates all business logic and talks to GH
pub struct Prr {
/// User config
config: Config,
/// Path to local config file
local_config: Option<PathBuf>,
/// Instantiated github client
crab: Octocrab,
}
impl Config {
/// Returns GH URL to use. Sanitizes if necessary.
fn url(&self) -> String {
match &self.prr.url {
Some(url) => {
// Custom URLs must have a trailing `/`. Otherwise the custom
// path can be truncated.
//
// See: https://docs.rs/reqwest/0.11.22/reqwest/struct.Url.html#method.join
let mut sanitized = url.clone();
if !url.ends_with('/') {
sanitized.push('/');
}
sanitized
}
None => GITHUB_BASE_URL.into(),
}
}
}
impl Prr {
/// Create a new Prr object using the main config and/or the local config.
/// If a local config has the `[prr]` section use this one instead of the main config.
/// If `[prr]` section is not defined merge the local config with the main local.
/// If local config file does not exist, use only the main config.
///
/// A `[prr]` redefinition must be complete; if not, panics with a
/// `redefinition of table `prr` for key `prr` at ...`
pub fn new(config_path: &Path, local_config_path: Option<PathBuf>) -> Result<Prr> {
let config_contents = fs::read_to_string(config_path).context("Failed to read config")?;
let local_config_contents = if let Some(project_config_path) = &local_config_path {
fs::read_to_string(project_config_path).context("Failed to read local config")?
} else {
String::new()
};
let override_config = toml::from_str::<Config>(&local_config_contents);
let config: Config = match override_config {
// If `override_config` does not raise an error, use this one as config.
Ok(config) => config,
// Else merge the two config contents.
Err(_) => {
let contents = format!("{}\n{}", config_contents, local_config_contents);
toml::from_str::<Config>(&contents)?
}
};
let token = resolve_github_token(config.prr.token.as_deref(), |var| env::var(var))
.context("Failed to locate GitHub token")?;
let octocrab = Octocrab::builder()
.personal_token(token)
.base_uri(config.url())
.context("Failed to parse github base URL")?
.build()
.context("Failed to create GH client")?;
Ok(Prr {
config,
local_config: local_config_path,
crab: octocrab,
})
}
/// Returns path to prr workdir
fn workdir(&self) -> Result<PathBuf> {
// Try local config first
if let Some(lcfg) = &self.config.local {
// Can't have a parsed local config without a stored path
debug_assert!(self.local_config.is_some());
if let Some(wd) = &lcfg.workdir {
if wd.starts_with('~') {
bail!("Invalid workdir={wd}: may not use '~'");
}
// We allow resolving relative paths in local config relative to the local config file
let mut resolved_wd = PathBuf::new();
// No parent seems impossible but I think it's correct to not push anything
if let Some(local_dir) = self.local_config.as_ref().unwrap().parent() {
resolved_wd.push(local_dir);
}
// NB: pushing an absolute path overwrites the PathBuf
resolved_wd.push(wd);
return Ok(resolved_wd);
}
}
// Now try global config
if let Some(wd) = &self.config.prr.workdir {
if wd.starts_with('~') {
bail!("Invalid workdir={wd}: may not use '~'");
}
let p = Path::new(wd).to_path_buf();
if !p.is_absolute() {
bail!("Invalid workdir={wd}: must be absolute path");
}
return Ok(p);
}
// Default workdir
let xdg_dirs = xdg::BaseDirectories::with_prefix("prr")?;
Ok(xdg_dirs.get_data_home())
}
pub fn is_pr_metadata_experiment_active(&self) -> bool {
self.config.prr.activate_pr_metadata_experiment
}
/// Parses a PR string in the form of `danobi/prr/24` and returns
/// a tuple ("danobi", "prr", 24) or an error if string is malformed.
pub fn parse_pr_str(&self, s: &str) -> Result<(String, String, u64)> {
let repo = if let Some(local_config) = &self.config.local {
if let Some(url) = &local_config.repository {
if url.ends_with('/') {
format!("{}{}", url, s)
} else {
format!("{}/{}", url, s)
}
} else {
s.to_string()
}
} else {
s.to_string()
};
if let Some(captures) = SHORT.captures(&repo) {
let owner = captures.name("org").unwrap().as_str().to_owned();
let repo = captures.name("repo").unwrap().as_str().to_owned();
let pr_nr: u64 = captures
.name("pr_num")
.unwrap()
.as_str()
.parse()
.context("Failed to parse pr number")?;
return Ok((owner, repo, pr_nr));
}
if repo.starts_with("http") || repo.contains("://") {
let uri: Uri = repo.parse().context("Failed to parse URL")?;
let path = uri.path().trim_start_matches('/');
let segments: Vec<_> = path.split('/').collect();
if segments.len() >= 4 && segments[2] == "pull" {
let pr_num = segments[3]
.parse::<u64>()
.context("Failed to parse PR number")?;
return Ok((segments[0].to_string(), segments[1].to_string(), pr_num));
}
}
bail!("Invalid PR ref format")
}
/// Gets a new review from the internet and writes it to the filesystem
pub async fn get_pr(
&self,
owner: &str,
repo: &str,
pr_num: u64,
force: bool,
) -> Result<Review> {
let pr_handler = self.crab.pulls(owner, repo);
let diff = pr_handler
.get_diff(pr_num)
.await
.context("Failed to fetch diff")?;
let pr = pr_handler.get(pr_num).await.context("Failed to fetch pr")?;
let commit_id = pr.head.sha;
let mut pr_description = None;
if self.is_pr_metadata_experiment_active() {
pr_description = Some(pr.body.unwrap_or("".to_string()));
}
Review::new(
&self.workdir()?,
diff,
owner,
repo,
pr_description,
pr_num,
commit_id,
force,
)
}
/// Gets an existing review from the filesystem
pub fn get_review(&self, owner: &str, repo: &str, pr_num: u64) -> Result<Review> {
let workdir = self.workdir()?;
Ok(Review::new_existing(&workdir, owner, repo, pr_num))
}
pub async fn submit_pr(&self, owner: &str, repo: &str, pr_num: u64, debug: bool) -> Result<()> {
let review = Review::new_existing(&self.workdir()?, owner, repo, pr_num);
let (review_action, review_comment, inline_comments, file_comments) = review.comments()?;
if review_comment.is_empty()
&& inline_comments.is_empty()
&& review_action != ReviewAction::Approve
{
bail!("No review comments");
}
let mut body = json!({
"body": review_comment,
"event": match review_action {
ReviewAction::Approve => "APPROVE",
ReviewAction::RequestChanges => "REQUEST_CHANGES",
ReviewAction::Comment => "COMMENT"
},
"comments": inline_comments
.iter()
.map(|c| {
let (line, side) = match c.line {
LineLocation::Left(line) => (line, "LEFT"),
LineLocation::Right(line) => (line, "RIGHT"),
};
let mut json_comment = json!({
"path": c.file,
"line": line,
"body": c.comment,
"side": side,
});
if let Some(start_line) = &c.start_line {
let (line, side) = match start_line {
LineLocation::Left(line) => (line, "LEFT"),
LineLocation::Right(line) => (line, "RIGHT"),
};
json_comment["start_line"] = (*line).into();
json_comment["start_side"] = side.into();
}
json_comment
})
.collect::<Vec<Value>>(),
});
let commit = review.commit_id()?;
if let Some(id) = &commit {
if let serde_json::Value::Object(ref mut obj) = body {
obj.insert("commit_id".to_string(), json!(id));
}
} else if !file_comments.is_empty() {
bail!(
"Metadata contained no commit_id, but it's required to leave file-level comments"
);
}
if debug {
println!("{}", serde_json::to_string_pretty(&body)?);
}
self.submit_review(&review, owner, repo, pr_num, &body)
.await?;
for fc in &file_comments {
self.submit_file_comment(owner, repo, pr_num, commit.as_ref().unwrap(), fc)
.await?
}
Ok(())
}
async fn submit_review(
&self,
review: &Review,
owner: &str,
repo: &str,
pr_num: u64,
body: &Value,
) -> Result<()> {
let path = format!("repos/{}/{}/pulls/{}/reviews", owner, repo, pr_num);
let uri = Uri::builder()
.path_and_query(path)
.build()
.context("Invalid URI")?;
match self.crab._post(uri, Some(body)).await {
Ok(resp) => {
let status = resp.status();
if status != StatusCode::OK {
let text = self
.crab
.body_to_string(resp)
.await
.context("Failed to decode failed response")?;
bail!("Error during POST: Status code: {}, Body: {}", status, text);
}
review
.mark_submitted()
.context("Failed to update review metadata")?;
Ok(())
}
// GH is known to send unescaped control characters in JSON responses which
// serde will fail to parse (not that it should succeed)
Err(octocrab::Error::Json {
source: _,
backtrace: _,
}) => {
eprintln!("Warning: GH response had invalid JSON");
Ok(())
}
Err(e) => bail!("Error during POST: {}", e),
}
}
async fn submit_file_comment(
&self,
owner: &str,
repo: &str,
pr_num: u64,
commit_id: &str,
fc: &FileComment,
) -> Result<()> {
let body = json!({
"body": fc.comment,
"commit_id": commit_id,
"path": fc.file,
"subject_type": "file",
});
let path = format!("repos/{}/{}/pulls/{}/comments", owner, repo, pr_num);
let uri = Uri::builder()
.path_and_query(path)
.build()
.context("Invalid URI")?;
match self.crab._post(uri, Some(&body)).await {
Ok(resp) => {
let status = resp.status();
if status != StatusCode::CREATED {
let text = self
.crab
.body_to_string(resp)
.await
.context("Failed to decode failed response")?;
bail!("Error during POST: Status code: {}, Body: {}", status, text);
}
Ok(())
}
// GH is known to send unescaped control characters in JSON responses which
// serde will fail to parse (not that it should succeed)
Err(octocrab::Error::Json {
source: _,
backtrace: _,
}) => {
eprintln!("Warning: GH response had invalid JSON");
Ok(())
}
Err(e) => bail!("Error during POST: {}", e),
}
}
pub fn apply_pr(&self, owner: &str, repo: &str, pr_num: u64, apply_repo: &Path) -> Result<()> {
let review = Review::new_existing(&self.workdir()?, owner, repo, pr_num);
let diff = Diff::from_buffer(review.diff()?.as_bytes()).context("Failed to load diff")?;
let apply_repo_path = Path::new(apply_repo);
let apply_repo =
Repository::open(apply_repo_path).context("Failed to open git repository")?;
// Best effort check to prevent clobbering any work in progress
let mut opts = StatusOptions::new();
opts.include_ignored(false);
let statuses = apply_repo
.statuses(Some(&mut opts))
.context("Failed to get repo status")?;
if !statuses.is_empty() {
bail!("Working directory is dirty");
}
apply_repo
.apply(&diff, ApplyLocation::WorkDir, None)
.context("Failed to apply diff")
}
pub fn print_status(&self, no_titles: bool) -> Result<()> {
let mut table = Table::new();
let mut table_fmt = *format::consts::FORMAT_CLEAN;
// Get rid of leading padding on each line
table_fmt.padding(0, 2);
table.set_format(table_fmt);
if !no_titles {
table.set_titles(row!["Handle", "Status", "Review file"])
}
let reviews = get_all_existing(&self.workdir()?).context("Failed to get all reviews")?;
for review in reviews {
table.add_row(row![
review.handle(),
review.status()?,
review.path().display()
]);
}
table.printstd();
Ok(())
}
/// Removes reviews from the filesystem
pub async fn remove(&self, prs: &[String], force: bool, submitted: bool) -> Result<()> {
for pr in prs {
let (owner, repo, pr_num) = self.parse_pr_str(pr)?;
let review = self.get_review(&owner, &repo, pr_num)?;
review
.remove(force)
.with_context(|| anyhow!("Failed to remove {}", pr))?;
}
if !submitted {
return Ok(());
}
let reviews = get_all_existing(&self.workdir()?).context("Failed to all reviews")?;
for review in reviews {
if review.status()? == ReviewStatus::Submitted {
let handle = review.handle();
review
.remove(force)
.with_context(|| anyhow!("Failed to remove {}", handle))?;
}
}
Ok(())
}
}
#[cfg(test)]
mod tests {
use super::*;
use std::fs::File;
use std::io::Write;
use tempfile::TempDir;
// Lays down configs in a tempdir
//
// NB: Configs get deleted if returned `TempDir` is dropped
fn config(global: &str, local: Option<&str>) -> (Prr, TempDir) {
let dir = TempDir::new().unwrap();
let gpath = dir.path().join("config.toml");
let mut gfile = File::create(&gpath).unwrap();
write!(&mut gfile, "{}", global).unwrap();
let lpath = if let Some(lcontents) = local {
let lpath = dir.path().join("local_config.toml");
let mut lfile = File::create(&lpath).unwrap();
write!(&mut lfile, "{}", lcontents).unwrap();
Some(lpath)
} else {
None
};
let prr = Prr::new(&gpath, lpath).unwrap();
(prr, dir)
}
lazy_static! {
// Basic dummy config just to avoid errors
static ref PRR: (Prr, TempDir) = {
let gconfig = r#"
[prr]
token = "test"
workdir = "/tmp"
"#;
config(gconfig, None)
};
}
#[tokio::test]
async fn test_parse_basic_pr_str() {
let pr_ref = "example/prr/42";
assert_eq!(
PRR.0.parse_pr_str(pr_ref).unwrap(),
("example".to_string(), "prr".to_string(), 42)
)
}
#[tokio::test]
async fn test_parse_dotted_pr_str() {
let pr_ref = "example/prr.test/42";
assert_eq!(
PRR.0.parse_pr_str(pr_ref).unwrap(),
("example".to_string(), "prr.test".to_string(), 42)
)
}
#[tokio::test]
async fn test_parse_underscored_pr_str() {
let pr_ref = "example/prr_test/42";
assert_eq!(
PRR.0.parse_pr_str(pr_ref).unwrap(),
("example".to_string(), "prr_test".to_string(), 42)
)
}
#[tokio::test]
async fn test_parse_dashed_pr_str() {
let pr_ref = "example/prr-test/42";
assert_eq!(
PRR.0.parse_pr_str(pr_ref).unwrap(),
("example".to_string(), "prr-test".to_string(), 42)
)
}
#[tokio::test]
async fn test_parse_numbered_pr_str() {
let pr_ref = "example/prr1/42";
assert_eq!(
PRR.0.parse_pr_str(pr_ref).unwrap(),
("example".to_string(), "prr1".to_string(), 42)
)
}
#[tokio::test]
async fn test_parse_mixed_pr_str() {
let pr_ref = "example/prr1.test_test-/42";
assert_eq!(
PRR.0.parse_pr_str(pr_ref).unwrap(),
("example".to_string(), "prr1.test_test-".to_string(), 42)
)
}
#[tokio::test]
async fn test_parse_github_url() {
let pr_ref = "https://github.com/example/repo/pull/42";
assert_eq!(
PRR.0.parse_pr_str(pr_ref).unwrap(),
("example".to_string(), "repo".to_string(), 42)
)
}
#[tokio::test]
async fn test_parse_github_url_with_extra_path() {
let pr_ref = "https://github.com/example/repo/pull/42/files";
assert_eq!(
PRR.0.parse_pr_str(pr_ref).unwrap(),
("example".to_string(), "repo".to_string(), 42)
)
}
#[tokio::test]
async fn test_parse_github_url_with_complex_path() {
let pr_ref = "https://github.com/example/repo/pull/42/files/abc123..def456";
assert_eq!(
PRR.0.parse_pr_str(pr_ref).unwrap(),
("example".to_string(), "repo".to_string(), 42)
)
}
#[tokio::test]
async fn test_parse_custom_github_host() {
let pr_ref = "https://github.acme.com/example/repo/pull/42";
assert_eq!(
PRR.0.parse_pr_str(pr_ref).unwrap(),
("example".to_string(), "repo".to_string(), 42)
)
}
#[tokio::test]
async fn test_local_config_repository() {
let gconfig = r#"
[prr]
token = "test"
"#;
let lconfig = r#"
[local]
repository = "testorg/testrepo"
"#;
let (prr, _dir) = config(gconfig, Some(lconfig));
assert_eq!(
prr.parse_pr_str("42").unwrap(),
("testorg".to_string(), "testrepo".to_string(), 42)
)
}
#[tokio::test]
async fn test_global_workdir() {
let gconfig = r#"
[prr]
token = "test"
workdir = "/globalworkdir"
"#;
let (prr, _dir) = config(gconfig, None);
assert_eq!(prr.workdir().unwrap(), Path::new("/globalworkdir"))
}
#[tokio::test]
async fn test_local_workdir() {
let gconfig = r#"
[prr]
token = "test"
"#;
let lconfig = r#"
[local]
workdir = "/localworkdir"
"#;
let (prr, _dir) = config(gconfig, Some(lconfig));
assert_eq!(prr.workdir().unwrap(), Path::new("/localworkdir"))
}
#[tokio::test]
async fn test_local_workdir_relative() {
let gconfig = r#"
[prr]
token = "test"
"#;
let lconfig = r#"
[local]
workdir = "localrelativeworkdir"
"#;
let (prr, dir) = config(gconfig, Some(lconfig));
assert_eq!(
prr.workdir().unwrap(),
dir.path().join("localrelativeworkdir")
)
}
#[tokio::test]
async fn test_local_workdir_override() {
let gconfig = r#"
[prr]
token = "test"
workdir = "/globalworkdir"
"#;
let lconfig = r#"
[local]
workdir = "/localworkdir"
"#;
let (prr, _dir) = config(gconfig, Some(lconfig));
assert_eq!(prr.workdir().unwrap(), Path::new("/localworkdir"))
}
#[tokio::test]
async fn test_invalid_relative_workdir() {
let gconfig = r#"
[prr]
token = "test"
workdir = "relativeworkdir"
"#;
let (prr, _dir) = config(gconfig, None);
assert!(prr.workdir().is_err());
}
fn copy_dir_all(src: impl AsRef<Path>, dst: impl AsRef<Path>) {
fs::create_dir_all(&dst).expect("could not create_dir_all");
for entry in fs::read_dir(src).expect("could not read_dir") {
let entry = entry.expect("entry is not valid");
let ty = entry.file_type().expect("cannot get filetype");
if ty.is_dir() {
copy_dir_all(entry.path(), dst.as_ref().join(entry.file_name()));
} else {
fs::copy(entry.path(), dst.as_ref().join(entry.file_name()))
.expect("copy in copy_dir_all failed");
}
}
}
#[test]
fn test_resolve_github_token_with_no_config_token_fallback_to_env() {
let env_lookup = |var: &str| -> Result<String, std::env::VarError> {
match var {
"GITHUB_TOKEN" => Ok("fallback_env_token".to_string()),
_ => Err(std::env::VarError::NotPresent),
}
};
let result = resolve_github_token(None, env_lookup).unwrap();
assert_eq!(result, "fallback_env_token");
}
#[test]
fn test_resolve_github_token_with_no_config_token_no_env_error() {
let env_lookup = |_var: &str| -> Result<String, std::env::VarError> {
Err(std::env::VarError::NotPresent)
};
let result = resolve_github_token(None, env_lookup);
assert!(result.is_err());
let error_msg = result.err().unwrap().to_string();
assert!(error_msg.contains("No GitHub token found in config or environment variables"));
}
#[test]
fn test_resolve_github_token_config_token_preferred_over_env() {
let env_lookup = |var: &str| -> Result<String, std::env::VarError> {
match var {
"GITHUB_TOKEN" => Ok("env_token".to_string()),
_ => Err(std::env::VarError::NotPresent),
}
};
let result = resolve_github_token(Some("config_token"), env_lookup).unwrap();
assert_eq!(result, "config_token");
}
#[test]
fn test_resolve_github_token_empty_config_token_falls_back_to_env() {
let env_lookup = |var: &str| -> Result<String, std::env::VarError> {
match var {
"GITHUB_TOKEN" => Ok("env_token".to_string()),
_ => Err(std::env::VarError::NotPresent),
}
};
let result = resolve_github_token(Some(""), env_lookup).unwrap();
assert_eq!(result, "env_token");
}
#[test]
fn test_resolve_github_token_env_var_precedence() {
let env_lookup = |var: &str| -> Result<String, std::env::VarError> {
match var {
"GH_TOKEN" => Ok("gh_token".to_string()),
"GITHUB_TOKEN" => Ok("github_token".to_string()),
_ => Err(std::env::VarError::NotPresent),
}
};
let result = resolve_github_token(None, env_lookup).unwrap();
// GH_TOKEN should have higher precedence
assert_eq!(result, "gh_token");
}
#[test]
fn test_resolve_github_token_empty_env_var_error() {
let env_lookup = |var: &str| -> Result<String, std::env::VarError> {
match var {
// Empty token
"GITHUB_TOKEN" => Ok("".to_string()),
_ => Err(std::env::VarError::NotPresent),
}
};
let result = resolve_github_token(None, env_lookup);
assert!(result.is_err());
let error_msg = result.err().unwrap().to_string();
assert!(error_msg.contains("Environment variable 'GITHUB_TOKEN' located but is empty"));
}
#[tokio::test]
async fn test_apply_pr() {
let gconfig = r#"
[prr]
token = "doesn'tmatter"
workdir = "doesn'tmatter"
"#;
let lconfig = r#"
[local]
workdir = "testdata/"
"#;
let (prr, dir) = config(gconfig, Some(lconfig));
let test_review_path =
dir.path().to_str().expect("tmp path invalid").to_string() + "/testdata/apply_pr";
fs::create_dir_all(&test_review_path).expect("failed to create temp directory");
copy_dir_all("testdata/review/apply_pr", &test_review_path);
let test_repo_path = dir.path().to_str().unwrap().to_string() + "/testgitrepo/";
fs::create_dir_all(&test_repo_path).expect("couldn't create testgitrepo");
let test_repo = git2::Repository::init(&test_repo_path).expect("couldn't init testgitrepo");
std::fs::copy(
"testdata/testgitrepo/README.md",
test_repo_path.clone() + "README.md",
)
.expect("copy README.md failed");
let mut index = test_repo.index().expect("couldn't get repo index");
index
.add_path(Path::new("README.md"))
.expect("couldn't add path");
let new_tree_oid = index.write_tree().expect("couldn't write tree");
index.write().expect("couldn't write index");
let signature = git2::Signature::now("someone", "someone@somewhere.com")
.expect("failed to create signature");
let new_tree = test_repo.find_tree(new_tree_oid).unwrap();
test_repo
.commit(
Some("HEAD"),
&signature,
&signature,
"Initial commit",
&new_tree,
&[],
)
.expect("failed to commit");
// add non-tracked file for testing purposes
std::fs::copy(
"testdata/testgitrepo/README.md",
test_repo_path.clone() + "README-not-tracked.md",
)
.expect("copy README-not-tracked.md failed");
prr.apply_pr("apply_pr", "review", 1, Path::new(&test_repo_path))
.expect("apply_pr failed");
let got_after_apply = fs::read(test_repo_path.clone() + "README.md")
.expect("failed to read README.md with diff applied");
let want_after_apply = fs::read("testdata/testgitrepo/README-applied.md")
.expect("failed to read README-applied.md");
assert_eq!(got_after_apply, want_after_apply);
}
}
================================================
FILE: src/review.rs
================================================
use std::fmt::{Display, Formatter, Result as fmt_result, Write as fmt_write};
use std::fs;
use std::fs::OpenOptions;
use std::io::Write;
use std::path::{Path, PathBuf};
use std::time::SystemTime;
use anyhow::{anyhow, bail, Context, Result};
use serde_derive::{Deserialize, Serialize};
use crate::parser::{Comment, FileComment, InlineComment, ReviewAction, ReviewParser};
/// We support a few common variants of snips.
/// These are semantically identical.
const SNIP_VARIANTS: &[&str] = &["[..]", "[...]"];
/// Represents the state of a single review
pub struct Review {
/// Path to workdir
workdir: PathBuf,
/// Name of the owner of the repository
owner: String,
/// Name of the repository
repo: String,
/// Issue # of the pull request
pr_num: u64,
}
/// Metadata for a single review. Stored as dotfile next to user-facing review file
#[derive(Serialize, Deserialize, Debug)]
struct ReviewMetadata {
/// Original .diff file contents. Used to detect corrupted review files
original: String,
/// Time (seconds since epoch) the review file was last submitted
submitted: Option<u64>,
/// The commit hash of the PR at the time the review was started
commit_id: Option<String>,
}
/// Status of a review
#[derive(PartialEq, Debug)]
pub enum ReviewStatus {
/// Newly downloaded review; no changes yet
New,
/// Unsubmitted changes have been made to review file
Reviewed,
/// Review has been submitted. Any further changes to the review file are ignored
Submitted,
}
/// Represents a single line in a review file.
enum LineType<'a> {
/// Original text (but stored without the leading `> `)
Quoted(&'a str),
/// A snip (`[..]`)
Snip,
/// User supplied comment
Comment(&'a str),
}
impl Display for ReviewStatus {
fn fmt(&self, f: &mut Formatter<'_>) -> fmt_result {
let text = match self {
Self::New => "NEW",
Self::Reviewed => "REVIEWED",
Self::Submitted => "SUBMITTED",
};
write!(f, "{text}")
}
}
impl<'a> From<&'a str> for LineType<'a> {
fn from(line: &'a str) -> Self {
if let Some(text) = line.strip_prefix("> ") {
Self::Quoted(text)
} else if SNIP_VARIANTS.contains(&line.trim()) {
Self::Snip
} else {
Self::Comment(line)
}
}
}
fn prefix_lines(s: &str, prefix: &str) -> String {
let mut ret = String::with_capacity(s.len());
for line in s.lines() {
if line.is_empty() {
ret += &(prefix.to_owned() + " \n");
} else {
// Appending to heap allocated string cannot fail
writeln!(ret, "{} {}", prefix, line).expect("Failed to write to string");
}
}
ret
}
/// Returns a list of all reviews in a workdir
pub fn get_all_existing(workdir: &Path) -> Result<Vec<Review>> {
// This pipeline does the following:
// * Iterate through all org directories in workdir
// * For each org directory, iterate through all contained repo directories
// * For each repo directory, enumerate all non-metadata review files
let reviews: Vec<PathBuf> = fs::read_dir(workdir)
.context("Failed to read workdir")?
.filter_map(|entry| entry.ok())
.map(|org| org.path())
.filter(|org| org.is_dir())
.filter_map(|org| fs::read_dir(org).ok())
.flatten()
.filter_map(|repo| repo.ok())
.map(|repo| repo.path())
.filter(|repo| repo.is_dir())
.filter_map(|repo| fs::read_dir(repo).ok())
.flatten()
.filter_map(|review| review.ok())
.map(|review| review.path())
.filter(|review| review.is_file())
.filter(|review| match review.extension() {
Some(e) => e == "prr",
None => false,
})
.collect();
let mut ret = Vec::with_capacity(reviews.len());
for review in reviews {
let parts: Vec<_> = review
.iter()
.rev()
.take(3)
.map(|p| p.to_string_lossy())
.collect();
if parts.len() != 3 {
bail!("malformed review file path: {}", review.display());
}
let pr_num: u64 = parts[0]
.strip_suffix(".prr")
.unwrap_or(&parts[0])
.parse()
.with_context(|| format!("Failed to parse PR num: {}", review.display()))?;
// Note the vec has components reversed
let r = Review::new_existing(workdir, &parts[2], &parts[1], pr_num);
ret.push(r);
}
Ok(ret)
}
/// Recursive helper for `resolve_snips()`.
///
/// This function will return Some(lines), where lines is a Vec of resolved
/// lines. There should not be any trailing newlines in `lines`.
///
/// The problem of resolving snips transposes pretty cleanly to the classic
/// glob matching algorithm. We implement the glob matching fairly naively
/// using recursion b/c it's cleaner to recurse when we want to eventually
/// return a value.
///
/// This would be in contrast to rsc's glob algorithm [0] where it's more
/// efficient and has less pathological corner cases. We choose to trade off
/// performance for simplicity here.
///
/// [0]: https://research.swtch.com/glob
fn resolve_snips_recurse<'a>(pattern: &[LineType<'a>], text: &[&'a str]) -> Option<Vec<String>> {
let mut resolved = Vec::new();
let mut pattern_idx = 0;
let mut text_idx = 0;
while pattern_idx < pattern.len() || text_idx < text.len() {
if pattern_idx < pattern.len() {
match pattern[pattern_idx] {
LineType::Quoted(line) => {
if text_idx < text.len() && text[text_idx] == line {
resolved.push(format!("> {line}"));
pattern_idx += 1;
text_idx += 1;
continue;
}
}
// Comments are semantically irrelevant to snip resolution. But we still
// need to account for them in returned output.
LineType::Comment(line) => {
resolved.push(line.to_string());
pattern_idx += 1;
continue;
}
// Begin glob logic
LineType::Snip => {
// Here we try making the snip consume 0 lines, 1 line, and so forth.
//
// Skipping comments is technically a noop and in theory we could rework
// this code to only skip matchable text. But that is just an optimization.
for cand_text_idx in text_idx..=text.len() {
let cand_pattern = &pattern[pattern_idx + 1..];
let cand_text = &text[cand_text_idx..];
if let Some(mut r) = resolve_snips_recurse(cand_pattern, cand_text) {
let skipped: Vec<String> = text[text_idx..cand_text_idx]
.iter()
.map(|&line| format!("> {line}"))
.collect();
resolved.extend_from_slice(&skipped);
resolved.append(&mut r);
return Some(resolved);
}
}
}
}
}
// If we reach here, we either have some `pattern` or `text` still left to
// process. Meaning one ran out before the other. Which implies a resolution
// failure.
return None;
}
// We've finished processing all of `text` and `pattern`. So resolution success.
Some(resolved)
}
impl Review {
/// Creates a new `Review`
///
/// `review_file` is the path where the user-facing review file should
/// be created. Additional metadata files (dotfiles) may be created in the same
/// directory.
pub fn new(
workdir: &Path,
diff: String,
owner: &str,
repo: &str,
pr_description: Option<String>,
pr_num: u64,
commit_id: String,
force: bool,
) -> Result<Review> {
let review = Review {
workdir: workdir.to_owned(),
owner: owner.to_owned(),
repo: repo.to_owned(),
pr_num,
};
// First create directories leading up to review file if necessary
let review_path = review.path();
let review_dir = review_path
.parent()
.ok_or_else(|| anyhow!("Review path has no parent!"))?;
fs::create_dir_all(review_dir).context("Failed to create workdir directories")?;
// Check if there are unsubmitted changes
if !force && review.has_metadata() && review.status()? == ReviewStatus::Reviewed {
bail!(
"You have unsubmitted changes to the requested review. \
Either submit the existing changes, delete the existing review file, \
or re-run this command with --force."
);
}
// Now create review file
let mut review_file = OpenOptions::new()
.write(true)
.create(true)
.truncate(true)
.open(&review_path)
.context("Failed to create review file")?;
let mut description = pr_description.unwrap_or_else(String::default);
if !description.is_empty() {
description += "\n";
}
let original_contents = description + &diff;
let prefixed_contents = prefix_lines(&original_contents, ">");
review_file
.write_all(prefixed_contents.as_bytes())
.context("Failed to write review file")?;
// Create metadata file
let metadata = ReviewMetadata {
original: original_contents,
submitted: None,
commit_id: Some(commit_id),
};
let json = serde_json::to_string(&metadata)?;
let metadata_path = review.metadata_path();
let mut metadata_file = OpenOptions::new()
.write(true)
.create(true)
.truncate(true)
.open(metadata_path)
.context("Failed to create metadata file")?;
metadata_file
.write_all(json.as_bytes())
.context("Failed to write metadata file")?;
Ok(review)
}
/// Creates a `Review` that already exists on disk
///
/// Note we do not check that anything actually exists on disk because that is
/// inherently racy. We'll handle ENOENT errors when we actually use any files.
pub fn new_existing(workdir: &Path, owner: &str, repo: &str, pr_num: u64) -> Review {
Review {
workdir: workdir.to_owned(),
owner: owner.to_owned(),
repo: repo.to_owned(),
pr_num,
}
}
/// Parse the user-supplied comments on a review
///
/// Returns (overall review action, overall review comment, inline comments, file comments)
pub fn comments(&self) -> Result<(ReviewAction, String, Vec<InlineComment>, Vec<FileComment>)> {
let raw = fs::read_to_string(self.path()).context("Failed to read review file")?;
let contents = self.resolve_snips(&raw)?;
self.validate_review_file(&contents)?;
let mut parser = ReviewParser::new();
let mut review_action = ReviewAction::Comment;
let mut review_comment = String::new();
let mut inline_comments = Vec::new();
let mut file_comments = Vec::new();
for (idx, line) in contents.lines().enumerate() {
let res = parser
.parse_line(line)
.with_context(|| format!("Failed to parse review on line {}", idx + 1))?;
match res {
Some(Comment::Review(c)) => {
if !review_comment.is_empty() {
bail!("Somehow saw more than one review comment");
}
review_comment = c;
}
Some(Comment::Inline(c)) => inline_comments.push(c),
Some(Comment::ReviewAction(a)) => review_action = a,
Some(Comment::File(fc)) => file_comments.push(fc),
None => {}
}
}
match parser.finish() {
Some(Comment::Inline(c)) => inline_comments.push(c),
// Original diff must have been short to begin with
Some(Comment::Review(_)) => bail!("Unexpected review comment at parser finish"),
Some(Comment::ReviewAction(_)) => bail!("Unexpected review action at parser finish"),
Some(Comment::File(_)) => bail!("Unexpected file-level comment at parser finish"),
None => {}
};
Ok((
review_action,
review_comment,
inline_comments,
file_comments,
))
}
/// Update the review file's submission time
pub fn mark_submitted(&self) -> Result<()> {
let metadata_path = self.metadata_path();
let mut metadata = self.metadata()?;
let submission_time = SystemTime::now()
.duration_since(SystemTime::UNIX_EPOCH)
.expect("Time went backwards");
metadata.submitted = Some(submission_time.as_secs());
let json = serde_json::to_string(&metadata)?;
let mut metadata_file = OpenOptions::new()
.write(true)
.create(true)
.truncate(true)
.open(metadata_path)
.context("Failed to create metadata file")?;
metadata_file
.write_all(json.as_bytes())
.context("Failed to write metadata file")?;
Ok(())
}
/// Replaces all snips (`[...]`s) from `contents` with original, quoted text.
/// Returns resolved contents as new string.
fn resolve_snips(&self, contents: &str) -> Result<String> {
// First, classify contents into line types. This is henceforth
// known as the "pattern" we want to resolve against original text.
let pattern: Vec<LineType> = contents.lines().map(LineType::from).collect();
// If the review file does not have any snips, just skip snip resolution.
//
// We do this so user gets more informative error message thru validate_review_file()
// if they corrupted a quoted line. If we naively (and more efficiently) always
// try to resolve snips, they might get the less informative error below.
if !pattern.iter().any(|line| matches!(line, LineType::Snip)) {
return Ok(contents.to_string());
}
// Next, store original text as lines. It's easier to index into this way.
// The original text here is unquoted.
let original = self.metadata()?.original;
let text: Vec<&str> = original.lines().collect();
Ok(resolve_snips_recurse(&pattern, &text)
.ok_or_else(|| anyhow!("Failed to resolve snips. Did you corrupt quoted text?"))?
.iter()
.map(|line| format!("{line}\n"))
.collect())
}
/// Validates whether the user corrupted the quoted contents
fn validate_review_file(&self, contents: &str) -> Result<()> {
let mut reconstructed = String::with_capacity(contents.len());
for line in contents.lines() {
if let Some(stripped) = line.strip_prefix("> ") {
reconstructed += stripped.trim_end();
reconstructed += "\n";
}
if line == ">" {
reconstructed += "\n";
}
}
let metadata = self.metadata()?;
let original: String = metadata
.original
.lines()
.map(|line| line.trim_end().to_owned() + "\n")
.collect();
if reconstructed != original {
// Be helpful and provide exact line number of mismatch.
//
// This loop on zip() will work as long as there isn't any truncation or trailing junk
// in the original text. To handle this case, there's the final bail!()
for (idx, (l, r)) in reconstructed.lines().zip(original.lines()).enumerate() {
if l != r {
// Get number of user generated lines up until the mismatch
let user_lines = contents
.lines()
.take(idx)
.filter(|l| !l.starts_with('>'))
.count();
let err = format!("Line {}, found '{l}' expected '{r}'", idx + 1 + user_lines);
bail!("Detected corruption in quoted part of review file: {err}");
}
}
bail!("Detected corruption in quoted part of review file: found trailing or truncated lines");
}
Ok(())
}
/// Returns whether or not there exists review comments
fn reviewed(&self) -> Result<bool> {
let (_, review_comment, comments, file_comments) = self
.comments()
.with_context(|| anyhow!("Failed to parse comments for {}", self.path().display()))?;
Ok(!review_comment.is_empty() || !comments.is_empty() || !file_comments.is_empty())
}
/// Returns path to user-facing review file
pub fn path(&self) -> PathBuf {
let mut p = self.workdir.clone();
p.push(&self.owner);
p.push(&self.repo);
p.push(format!("{}.prr", self.pr_num));
p
}
/// Loads and returns the parsed contents of the metadata file for the review file
fn metadata(&self) -> Result<ReviewMetadata> {
let meta =
fs::read_to_string(self.metadata_path()).context("Failed to load metadata file")?;
serde_json::from_str::<ReviewMetadata>(&meta).context("Failed to parse metadata file")
}
fn has_metadata(&self) -> bool {
fs::metadata(self.metadata_path()).is_ok()
}
fn metadata_path(&self) -> PathBuf {
let mut metadata_path = self.path();
metadata_path.set_file_name(format!(".{}", self.pr_num));
metadata_path
}
/// Returns the commit_id associated with the review
pub fn commit_id(&self) -> Result<Option<String>> {
Ok(self.metadata()?.commit_id.clone())
}
/// Returns the original review diff
pub fn diff(&self) -> Result<String> {
Ok(self.metadata()?.original.clone())
}
/// Returns a handle (eg "owner/repo/pr_num") to this review
pub fn handle(&self) -> String {
format!("{}/{}/{}", self.owner, self.repo, self.pr_num)
}
/// Gets the status of a review
pub fn status(&self) -> Result<ReviewStatus> {
let metadata = self.metadata()?;
let reviewed = self.reviewed()?;
let status = if metadata.submitted.is_some() {
ReviewStatus::Submitted
} else if reviewed {
ReviewStatus::Reviewed
} else {
ReviewStatus::New
};
Ok(status)
}
/// Remove review from filesystem
pub fn remove(self, force: bool) -> Result<()> {
if !force && self.status()? == ReviewStatus::Reviewed {
bail!(
"You have unsubmitted changes to the requested review. \
Re-run this command with --force to ignore this check."
);
}
fs::remove_file(self.path()).context("Failed to remove review file")?;
fs::remove_file(self.metadata_path()).context("Failed to remove metadata file")?;
Ok(())
}
}
#[cfg(test)]
mod tests {
use std::collections::VecDeque;
use std::fs::{create_dir_all, File};
use pretty_assertions::assert_eq as assert_eq_pretty;
use tempfile::{tempdir, TempDir};
use super::*;
fn setup(review: &str, metadata: &str) -> (Review, TempDir) {
let dir = tempdir().expect("Failed to create tempdir");
// Create directory structure
let project_dir = dir.path().join("some_owner").join("some_repo");
create_dir_all(&project_dir).expect("Failed to create workdir structure");
// Create and write review file
let mut review_file =
File::create(project_dir.join("3.prr")).expect("Failed to create review file");
review_file
.write_all(review.as_bytes())
.expect("Failed to write review file");
// Create and write metadata file
let mut metadata_file =
File::create(project_dir.join(".3")).expect("Failed to create metadata file");
metadata_file
.write_all(metadata.as_bytes())
.expect("Failed to write metadata file");
let r = Review::new_existing(dir.path(), "some_owner", "some_repo", 3);
(r, dir)
}
// Review file has all trailing whitespace stripped
#[test]
fn test_validate_stripped() {
let review = include_str!("../testdata/review/trailing_whitespace/review");
let metadata = include_str!("../testdata/review/trailing_whitespace/metadata");
let (r, _dir) = setup(review, metadata);
r.validate_review_file(review)
.expect("Failed to validate review file");
}
// Step through review status state machine and validate each state
#[test]
fn test_review_status() {
let review = include_str!("../testdata/review/status/review");
let metadata = include_str!("../testdata/review/status/metadata");
let (r, _dir) = setup(review, metadata);
// Using more verbose match to ensure build failure if new states added.
// We only need this verbosity once.
match r.status().expect("Failed to get review status") {
ReviewStatus::New => (),
ReviewStatus::Reviewed => panic!("Unexpected Reviewed state"),
ReviewStatus::Submitted => panic!("Unpexected Submitted state"),
};
// Do a "review"
let mut file = OpenOptions::new()
.write(true)
.append(true)
.open(r.path())
.expect("Failed to open review file");
file.write_all(b"asdf\n")
.expect("Failed to write review comment");
assert_eq!(r.status().unwrap(), ReviewStatus::Reviewed);
// "Submit" the review
r.mark_submitted().expect("Failed to submit review");
assert_eq!(r.status().unwrap(), ReviewStatus::Submitted);
}
#[test]
fn test_review_validation_with_pr_description() {
let review = include_str!("../testdata/review/pr_description/review");
let metadata = include_str!("../testdata/review/pr_description/metadata");
let (r, _dir) = setup(review, metadata);
r.validate_review_file(review)
.expect("Failed to validate review file with PR description");
}
#[test]
fn test_review_validation_with_interleaving_pr_description() {
let review = include_str!("../testdata/review/pr_description_interleaving/review");
let metadata = include_str!("../testdata/review/pr_description_interleaving/metadata");
let (r, _dir) = setup(review, metadata);
r.validate_review_file(review)
.expect("Failed to validate review file with interleaving PR description");
}
// Tests creation of a new review
#[test]
fn test_new_review() {
// Create directory structure
let workdir = tempdir().expect("Failed to create tempdir");
// Create a review
let review = Review::new(
workdir.path(),
"some_review_contents".to_string(),
"some_owner",
"some_repo",
Some("some_pr_desc".to_string()),
3,
"111".to_string(),
false,
)
.expect("Failed to create new non-existent review");
// Check on disk "database"
fs::metadata(review.path()).expect("Failed to read review file");
fs::metadata(review.metadata_path()).expect("Failed to read review file");
}
#[test]
fn test_snip_single() {
let review = include_str!("../testdata/review/snip_single/review");
let gold = include_str!("../testdata/review/snip_single/gold");
let metadata = include_str!("../testdata/review/snip_single/metadata");
let (r, _dir) = setup(review, metadata);
assert_eq_pretty!(r.resolve_snips(review).unwrap(), gold);
}
#[test]
fn test_snip_multiple() {
let review = include_str!("../testdata/review/snip_multiple/review");
let gold = include_str!("../testdata/review/snip_multiple/gold");
let metadata = include_str!("../testdata/review/snip_multiple/metadata");
let (r, _dir) = setup(review, metadata);
assert_eq_pretty!(r.resolve_snips(review).unwrap(), gold);
}
#[test]
fn test_snip_comments() {
let review = include_str!("../testdata/review/snip_comments/review");
let gold = include_str!("../testdata/review/snip_comments/gold");
let metadata = include_str!("../testdata/review/snip_comments/metadata");
let (r, _dir) = setup(review, metadata);
assert_eq_pretty!(r.resolve_snips(review).unwrap(), gold);
}
// Here we exhaustively check all possible single snips. It may be worth doing something
// similar for multiple snips but it'll be a bit more complicated to implement.
#[test]
fn test_snip_single_exhaustive() {
let gold = include_str!("../testdata/review/snip_single/gold");
let metadata = include_str!("../testdata/review/snip_single/metadata");
let (r, _dir) = setup("", metadata);
let nr_lines = gold.lines().count();
for position in 0..=nr_lines {
for length in 0..=nr_lines {
let mut lines: VecDeque<&str> = gold.lines().collect();
let mut contents = String::new();
let mut idx = 0;
while !lines.is_empty() {
if idx == position {
writeln!(&mut contents, "[...]").unwrap();
for _ in 0..length {
lines.pop_front();
idx += 1;
}
}
// A snip appended to gold file will go past "end" of lines
if let Some(line) = lines.pop_front() {
writeln!(&mut contents, "{line}").unwrap();
}
idx += 1;
}
// Handle 0 length trailing snip
if idx == position {
writeln!(&mut contents, "[...]").unwrap();
}
assert_eq_pretty!(r.resolve_snips(&contents).unwrap(), gold);
}
}
}
}
================================================
FILE: testdata/add_oneliner
================================================
> diff --git a/foo.rs b/foo.rs
> new file mode 100644
> index 0000000..5a64612
> --- /dev/null
> +++ b/foo.rs
> @@ -0,0 +1 @@
Comment 1
> +License: Unlicense
Comment 2
================================================
FILE: testdata/approve_review
================================================
@prr approve
> diff --git a/libbpf-cargo/src/btf/btf.rs b/libbpf-cargo/src/btf/btf.rs
> index a26b2a5..fffb281 100644
> --- a/libbpf-cargo/src/btf/btf.rs
> +++ b/libbpf-cargo/src/btf/btf.rs
> @@ -731,7 +731,7 @@ impl<'a> Btf<'a> {
> fn load_type(&mut self, data: &'a [u8]) -> Result<BtfType<'a>> {
> let t = data.pread::<btf_type>(0)?;
> let extra = &data[size_of::<btf_type>()..];
> - let kind = (t.info >> 24) & 0xf;
> + let kind = (t.info >> 24) & 0x1f;
Comment 1
>
> match BtfKind::try_from(kind)? {
> BtfKind::Void => {
> diff --git a/libbpf-cargo/src/test.rs b/libbpf-cargo/src/test.rs
> index 5b08843..82a0586 100644
> --- a/libbpf-cargo/src/test.rs
> +++ b/libbpf-cargo/src/test.rs
> @@ -2145,3 +2145,27 @@ pub struct __anon_3 {
>
> assert_definition(&btf, struct_bpf_sock_tuple, expected_output);
> }
> +
> +#[test]
> +fn test_btf_dump_float() {
> + let prog_text = r#"
> +float f = 2.16;
> +double d = 12.15;
> +"#;
> +
> + let btf = build_btf_prog(prog_text);
> +
> + let f = find_type_in_btf!(btf, Var, "f");
> + let d = find_type_in_btf!(btf, Var, "d");
> +
> + assert_eq!(
> + "f32",
> + btf.type_declaration(f)
> + .expect("Failed to generate f decl")
> + );
> + assert_eq!(
> + "f64",
> + btf.type_declaration(d)
> + .expect("Failed to generate d decl")
> + );
> +}
================================================
FILE: testdata/back_to_back_span
================================================
> diff --git a/libbpf-cargo/src/btf/btf.rs b/libbpf-cargo/src/btf/btf.rs
> index a26b2a5..fffb281 100644
> --- a/libbpf-cargo/src/btf/btf.rs
> +++ b/libbpf-cargo/src/btf/btf.rs
> @@ -731,7 +731,7 @@ impl<'a> Btf<'a> {
> fn load_type(&mut self, data: &'a [u8]) -> Result<BtfType<'a>> {
> let t = data.pread::<btf_type>(0)?;
> let extra = &data[size_of::<btf_type>()..];
> - let kind = (t.info >> 24) & 0xf;
> + let kind = (t.info >> 24) & 0x1f;
Comment 1
>
> match BtfKind::try_from(kind)? {
> BtfKind::Void => {
Comment 2
> diff --git a/libbpf-cargo/src/test.rs b/libbpf-cargo/src/test.rs
> index 5b08843..82a0586 100644
> --- a/libbpf-cargo/src/test.rs
> +++ b/libbpf-cargo/src/test.rs
> @@ -2145,3 +2145,27 @@ pub struct __anon_3 {
>
> assert_definition(&btf, struct_bpf_sock_tuple, expected_output);
> }
> +
> +#[test]
> +fn test_btf_dump_float() {
> + let prog_text = r#"
> +float f = 2.16;
> +double d = 12.15;
> +"#;
> +
> + let btf = build_btf_prog(prog_text);
> +
> + let f = find_type_in_btf!(btf, Var, "f");
> + let d = find_type_in_btf!(btf, Var, "d");
> +
> + assert_eq!(
> + "f32",
> + btf.type_declaration(f)
> + .expect("Failed to generate f decl")
> + );
> + assert_eq!(
> + "f64",
> + btf.type_declaration(d)
> + .expect("Failed to generate d decl")
> + );
> +}
================================================
FILE: testdata/cross_file_span_ignored
================================================
> diff --git a/libbpf-cargo/src/btf/btf.rs b/libbpf-cargo/src/btf/btf.rs
> index a26b2a5..fffb281 100644
> --- a/libbpf-cargo/src/btf/btf.rs
> +++ b/libbpf-cargo/src/btf/btf.rs
> @@ -731,7 +731,7 @@ impl<'a> Btf<'a> {
> fn load_type(&mut self, data: &'a [u8]) -> Result<BtfType<'a>> {
> let t = data.pread::<btf_type>(0)?;
> let extra = &data[size_of::<btf_type>()..];
> - let kind = (t.info >> 24) & 0xf;
> + let kind = (t.info >> 24) & 0x1f;
>
> match BtfKind::try_from(kind)? {
> BtfKind::Void => {
> diff --git a/libbpf-cargo/src/test.rs b/libbpf-cargo/src/test.rs
> index 5b08843..82a0586 100644
> --- a/libbpf-cargo/src/test.rs
> +++ b/libbpf-cargo/src/test.rs
> @@ -2145,3 +2145,27 @@ pub struct __anon_3 {
>
> assert_definition(&btf, struct_bpf_sock_tuple, expected_output);
> }
> +
> +#[test]
> +fn test_btf_dump_float() {
> + let prog_text = r#"
> +float f = 2.16;
> +double d = 12.15;
> +"#;
> +
> + let btf = build_btf_prog(prog_text);
Comment 1
> +
> + let f = find_type_in_btf!(btf, Var, "f");
> + let d = find_type_in_btf!(btf, Var, "d");
> +
> + assert_eq!(
> + "f32",
> + btf.type_declaration(f)
> + .expect("Failed to generate f decl")
> + );
> + assert_eq!(
> + "f64",
> + btf.type_declaration(d)
> + .expect("Failed to generate d decl")
> + );
> +}
================================================
FILE: testdata/cross_hunk_span
================================================
> diff --git a/ch2.txt b/ch2.txt
> index 4d729e6..2641120 100644
> --- a/ch2.txt
> +++ b/ch2.txt
> @@ -2,13 +2,6 @@ CHAPTER 2. WAGING WAR
>
> 1. Sun Tzu said: In the operations of war, where there are in the field a thousand swift chariots, as many heavy chariots, and a hundred thousand mail-clad soldiers, with provisions enough to carry them a thousand LI, the expenditure at home and at the front, including entertainment of guests, small items such as glue and paint, and sums spent on chariots and armor, will reach the total of a thousand ounces of silver per day. Such is the cost of raising an army of 100,000 men.
>
> -2. When you engage in actual fighting, if victory is long in coming, then men's weapons will grow dull and their ardor will be damped. If you lay siege to a town, you will exhaust your strength.
> -
> -3. Again, if the campaign is protracted, the resources of the State will not be equal to the strain.
> -
> -4. Now, when your weapons are dulled, your ardor damped, your strength exhausted and your treasure spent, other chieftains will spring up to take advantage of your extremity. Then no man, however wise, will be able to avert the consequences that must ensue.
> -
> -5. Thus, though we have heard of stupid haste in war, cleverness has never been seen associated with long delays.
>
> 6. There is no instance of a country having benefited from prolonged warfare.
>
> @@ -30,6 +23,11 @@ CHAPTER 2. WAGING WAR
>
> 16. Now in order to kill the enemy, our men must be roused to anger; that there may be advantage from defeating the enemy, they must have their rewards.
>
> +asdf
> +asdf
> +asdf
> +adsf
> +
Cross span comment
> 17. Therefore in chariot fighting, when ten or more chariots have been taken, those should be rewarded who took the first. Our own flags should be substituted for those of the enemy, and the chariots mingled and used in conjunction with ours. The captured soldiers should be kindly treated and kept.
>
> 18. This is called, using the conquered foe to augment one's own strength.
================================================
FILE: testdata/deleted_file
================================================
> diff --git a/ch1.txt b/ch1.txt
> deleted file mode 100644
> index d30353f..0000000
> --- a/ch1.txt
> +++ /dev/null
> @@ -1,59 +0,0 @@
> -THE ART OF WAR BY SUN TZU
> -
> -Translated by Lionel Giles
> -Originally published 1910
> -
> -This version was generated automatically at www.suntzusaid.com.
> -
> -
> -CHAPTER 1. LAYING PLANS
> -
> -1. Sun Tzu said: The art of war is of vital importance to the State.
> -
> -2. It is a matter of life and death, a road either to safety or to ruin. Hence it is a subject of inquiry which can on no account be neglected.
> -
> -3. The art of war, then, is governed by five constant factors, to be taken into account in one's deliberations, when seeking to determine the conditions obtaining in the field.
> -
> -4. These are: (1) The Moral Law; (2) Heaven; (3) Earth; (4) The Commander; (5) Method and discipline.
> -
> -5,6. The MORAL LAW causes the people to be in complete accord with their ruler, so that they will follow him regardless of their lives, undismayed by any danger.
> -
> -7. HEAVEN signifies night and day, cold and heat, times and seasons.
> -
> -8. EARTH comprises distances, great and small; danger and security; open ground and narrow passes; the chances of life and death.
> -
> -9. The COMMANDER stands for the virtues of wisdom, sincerity, benevolence, courage and strictness.
> -
> -10. By METHOD AND DISCIPLINE are to be understood the marshaling of the army in its proper subdivisions, the graduations of rank among the officers, the maintenance of roads by which supplies may reach the army, and the control of military expenditure.
> -
> -11. These five heads should be familiar to every general: he who knows them will be victorious; he who knows them not will fail.
> -
> -12. Therefore, in your deliberations, when seeking to determine the military conditions, let them be made the basis of a comparison, in this wise:
> -
> -13. (1) Which of the two sovereigns is imbued with the Moral law? (2) Which of the two generals has most ability? (3) With whom lie the advantages derived from Heaven and Earth? (4) On which side is discipline most rigorously enforced? (5) Which army is stronger? (6) On which side are officers and men more highly trained? (7) In which army is there the greater constancy both in reward and punishment?
> -
> -14. By means of these seven considerations I can forecast victory or defeat.
> -
> -15. The general that hearkens to my counsel and acts upon it, will conquer: let such a one be retained in command! The general that hearkens not to my counsel nor acts upon it, will suffer defeat: let such a one be dismissed!
> -
> -16. While heeding the profit of my counsel, avail yourself also of any helpful circumstances over and beyond the ordinary rules.
> -
> -17. According as circumstances are favorable, one should modify one's plans.
> -
> -18. All warfare is based on deception.
> -
> -19. Hence, when able to attack, we must seem unable; when using our forces, we must seem inactive; when we are near, we must make the enemy believe we are far away; when far away, we must make him believe we are near.
> -
> -20. Hold out baits to entice the enemy. Feign disorder, and crush him.
> -
> -21. If he is secure at all points, be prepared for him. If he is in superior strength, evade him.
> -
> -22. If your opponent is of choleric temper, seek to irritate him. Pretend to be weak, that he may grow arrogant.
> -
> -23. If he is taking his ease, give him no rest. If his forces are united, separate them.
> -
> -24. Attack him where he is unprepared, appear where you are not expected.
> -
> -25. These military devices, leading to victory, must not be divulged beforehand.
> -
Comment 1
> -26. Now the general who wins a battle makes many calculations in his temple ere the battle is fought. The general who loses a battle makes but few calculations beforehand. Thus do many calculations lead to victory, and few calculations to defeat: how much more no calculation at all! It is by attention to this point that I can foresee who is likely to win or lose.
================================================
FILE: testdata/empty_file
================================================
> diff --git a/libbpf-cargo/src/btf/btf.rs b/libbpf-cargo/src/btf/btf.rs
> index 0000000..fffb281 100644
> --- /dev/null
> +++ b/libbpf-cargo/src/btf/btf.rs
> diff --git a/libbpf-cargo/src/test.rs b/libbpf-cargo/src/test.rs
> index 5b08843..82a0586 100644
> --- a/libbpf-cargo/src/test.rs
> +++ b/libbpf-cargo/src/test.rs
> @@ -2145,3 +2145,27 @@ pub struct __anon_3 {
>
> assert_definition(&btf, struct_bpf_sock_tuple, expected_output);
> }
> +
> +#[test]
> +fn test_btf_dump_float() {
> + let prog_text = r#"
> +float f = 2.16;
> +double d = 12.15;
> +"#;
> +
> + let btf = build_btf_prog(prog_text);
> +
> + let f = find_type_in_btf!(btf, Var, "f");
> + let d = find_type_in_btf!(btf, Var, "d");
Comment
> +
> + assert_eq!(
> + "f32",
> + btf.type_declaration(f)
> + .expect("Failed to generate f decl")
> + );
> + assert_eq!(
> + "f64",
> + btf.type_declaration(d)
> + .expect("Failed to generate d decl")
> + );
> +}
================================================
FILE: testdata/file_comment
================================================
> diff --git a/libbpf-cargo/src/btf/btf.rs b/libbpf-cargo/src/btf/btf.rs
This is a file-level comment!
> index a26b2a5..fffb281 100644
> --- a/libbpf-cargo/src/btf/btf.rs
> +++ b/libbpf-cargo/src/btf/btf.rs
> @@ -731,7 +731,7 @@ impl<'a> Btf<'a> {
> fn load_type(&mut self, data: &'a [u8]) -> Result<BtfType<'a>> {
> let t = data.pread::<btf_type>(0)?;
> let extra = &data[size_of::<btf_type>()..];
> - let kind = (t.info >> 24) & 0xf;
> + let kind = (t.info >> 24) & 0x1f;
>
> match BtfKind::try_from(kind)? {
> BtfKind::Void => {
> diff --git a/libbpf-cargo/src/test.rs b/libbpf-cargo/src/test.rs
> index 5b08843..82a0586 100644
> --- a/libbpf-cargo/src/test.rs
> +++ b/libbpf-cargo/src/test.rs
> @@ -2145,3 +2145,27 @@ pub struct __anon_3 {
>
> assert_definition(&btf, struct_bpf_sock_tuple, expected_output);
> }
> +
> +#[test]
> +fn test_btf_dump_float() {
> + let prog_text = r#"
> +float f = 2.16;
> +double d = 12.15;
> +"#;
> +
> + let btf = build_btf_prog(prog_text);
> +
> + let f = find_type_in_btf!(btf, Var, "f");
> + let d = find_type_in_btf!(btf, Var, "d");
> +
> + assert_eq!(
> + "f32",
> + btf.type_declaration(f)
> + .expect("Failed to generate f decl")
> + );
> + assert_eq!(
> + "f64",
> + btf.type_declaration(d)
> + .expect("Failed to generate d decl")
> + );
> +}
================================================
FILE: testdata/hunk_start_no_trailing_whitespace
================================================
> diff --git a/ch5.txt b/ch5.txt
> new file mode 100644
> index 0000000..762722f
> --- /dev/null
> +++ b/ch5.txt
> @@ -0,0 +1,47 @@
> +CHAPTER 5. ENERGY
> +
> +1. Sun Tzu said: The control of a large force is the same principle as the control of a few men: it is merely a question of dividing up their numbers.
> +
> +2. Fighting with a large army under your command is nowise different from fighting with a small one: it is merely a question of instituting signs and signals.
> +
> +3. To ensure that your whole host may withstand the brunt of the enemy's attack and remain unshaken---this is effected by maneuvers direct and indirect.
Great passage
> +
> +4. That the impact of your army may be like a grindstone dashed against an egg---this is effected by the science of weak points and strong.
> +
> +5. In all fighting, the direct method may be used for joining battle, but indirect methods will be needed in order to secure victory.
> +
> +6. Indirect tactics, efficiently applied, are inexhaustible as Heaven and Earth, unending as the flow of rivers and streams; like the sun and moon, they end but to begin anew; like the four seasons, they pass away to return once more.
> +
> +7. There are not more than five musical notes, yet the combinations of these five give rise to more melodies than can ever be heard.
> +
> +8. There are not more than five primary colors (blue, yellow, red, white, and black), yet in combination they produce more hues than can ever been seen.
> +
> +9. There are not more than five cardinal tastes (sour, acrid, salt, sweet, bitter), yet combinations of them yield more flavors than can ever be tasted.
> +
> +10. In battle, there are not more than two methods of attack: the direct and the indirect; yet these two in combination give rise to an endless series of maneuvers.
> +
> +11. The direct and the indirect lead on to each other in turn. It is like moving in a circle---you never come to an end. Who can exhaust the possibilities of their combination?
> +
> +12. The onset of troops is like the rush of a torrent which will even roll stones along in its course.
> +
> +13. The quality of decision is like the well-timed swoop of a falcon which enables it to strike and destroy its victim.
> +
> +14. Therefore the good fighter will be terrible in his onset, and prompt in his decision.
> +
> +15. Energy may be likened to the bending of a crossbow; decision, to the releasing of a trigger.
> +
> +16. Amid the turmoil and tumult of battle, there may be seeming disorder and yet no real disorder at all; amid confusion and chaos, your array may be without head or tail, yet it will be proof against defeat.
> +
> +17. Simulated disorder postulates perfect discipline, simulated fear postulates courage; simulated weakness postulates strength.
> +
> +18. Hiding order beneath the cloak of disorder is simply a question of subdivision; concealing courage under a show of timidity presupposes a fund of latent energy; masking strength with weakness is to be effected by tactical dispositions.
> +
> +19. Thus one who is skillful at keeping the enemy on the move maintains deceitful appearances, according to which the enemy will act. He sacrifices something, that the enemy may snatch at it.
> +
> +20. By holding out baits, he keeps him on the march; then with a body of picked men he lies in wait for him.
> +
> +21. The clever combatant looks to the effect of combined energy, and does not require too much from individuals. Hence his ability to pick out the right men and utilize combined energy.
> +
> +22. When he utilizes combined energy, his fighting men become as it were like unto rolling logs or stones. For it is the nature of a log or stone to remain motionless on level ground, and to move when on a slope; if four-cornered, to come to a standstill, but if round-shaped, to go rolling down.
> +
> +23. Thus the energy developed by good fighting men is as the momentum of a round stone rolled down a mountain thousands of feet in height. So much on the subject of energy.
================================================
FILE: testdata/inline_and_review_comments_with_pr_description
================================================
Not necessary.
@prr reject
> This is just for testing purposes.
> diff --git a/README.md b/README.md
> index eda592e..ba84732 100644
> --- a/README.md
> +++ b/README.md
> @@ -1,5 +1,5 @@
> -MA-FSA for Go
> -=============
> +MA-FSA for Golang
> +=================
Doesn't seem necessary ...
>
> Package mafsa implements Minimal Acyclic Finite State Automata (MA-FSA)
> with Minimal Perfect Hashing (MPH). Basically, it's a set of strings that
================================================
FILE: testdata/multiline_comment
================================================
> diff --git a/libbpf-cargo/src/btf/btf.rs b/libbpf-cargo/src/btf/btf.rs
> index a26b2a5..fffb281 100644
> --- a/libbpf-cargo/src/btf/btf.rs
> +++ b/libbpf-cargo/src/btf/btf.rs
> @@ -731,7 +731,7 @@ impl<'a> Btf<'a> {
> fn load_type(&mut self, data: &'a [u8]) -> Result<BtfType<'a>> {
> let t = data.pread::<btf_type>(0)?;
> let extra = &data[size_of::<btf_type>()..];
> - let kind = (t.info >> 24) & 0xf;
> + let kind = (t.info >> 24) & 0x1f;
>
> match BtfKind::try_from(kind)? {
Comment line 1
Comment line 2
Comment line 4
> BtfKind::Void => {
> diff --git a/libbpf-cargo/src/test.rs b/libbpf-cargo/src/test.rs
> index 5b08843..82a0586 100644
> --- a/libbpf-cargo/src/test.rs
> +++ b/libbpf-cargo/src/test.rs
> @@ -2145,3 +2145,27 @@ pub struct __anon_3 {
>
> assert_definition(&btf, struct_bpf_sock_tuple, expected_output);
> }
> +
> +#[test]
> +fn test_btf_dump_float() {
> + let prog_text = r#"
> +float f = 2.16;
> +double d = 12.15;
> +"#;
> +
> + let btf = build_btf_prog(prog_text);
> +
> + let f = find_type_in_btf!(btf, Var, "f");
> + let d = find_type_in_btf!(btf, Var, "d");
> +
> + assert_eq!(
> + "f32",
> + btf.type_declaration(f)
> + .expect("Failed to generate f decl")
> + );
> + assert_eq!(
> + "f64",
> + btf.type_declaration(d)
> + .expect("Failed to generate d decl")
> + );
> +}
================================================
FILE: testdata/multiple_files
================================================
> diff --git a/libbpf-cargo/src/btf/btf.rs b/libbpf-cargo/src/btf/btf.rs
> index a26b2a5..fffb281 100644
> --- a/libbpf-cargo/src/btf/btf.rs
> +++ b/libbpf-cargo/src/btf/btf.rs
> @@ -731,7 +731,7 @@ impl<'a> Btf<'a> {
> fn load_type(&mut self, data: &'a [u8]) -> Result<BtfType<'a>> {
> let t = data.pread::<btf_type>(0)?;
> let extra = &data[size_of::<btf_type>()..];
> - let kind = (t.info >> 24) & 0xf;
> + let kind = (t.info >> 24) & 0x1f;
Comment 1
>
> match BtfKind::try_from(kind)? {
> BtfKind::Void => {
> diff --git a/libbpf-cargo/src/test.rs b/libbpf-cargo/src/test.rs
> index 5b08843..82a0586 100644
> --- a/libbpf-cargo/src/test.rs
> +++ b/libbpf-cargo/src/test.rs
> @@ -2145,3 +2145,27 @@ pub struct __anon_3 {
>
> assert_definition(&btf, struct_bpf_sock_tuple, expected_output);
> }
> +
> +#[test]
> +fn test_btf_dump_float() {
> + let prog_text = r#"
> +float f = 2.16;
> +double d = 12.15;
> +"#;
> +
> + let btf = build_btf_prog(prog_text);
> +
> + let f = find_type_in_btf!(btf, Var, "f");
> + let d = find_type_in_btf!(btf, Var, "d");
Comment 2
> +
> + assert_eq!(
> + "f32",
> + btf.type_declaration(f)
> + .expect("Failed to generate f decl")
> + );
> + assert_eq!(
> + "f64",
> + btf.type_declaration(d)
> + .expect("Failed to generate d decl")
> + );
> +}
================================================
FILE: testdata/reject_review
================================================
@prr reject
> diff --git a/libbpf-cargo/src/btf/btf.rs b/libbpf-cargo/src/btf/btf.rs
> index a26b2a5..fffb281 100644
> --- a/libbpf-cargo/src/btf/btf.rs
> +++ b/libbpf-cargo/src/btf/btf.rs
> @@ -731,7 +731,7 @@ impl<'a> Btf<'a> {
> fn load_type(&mut self, data: &'a [u8]) -> Result<BtfType<'a>> {
> let t = data.pread::<btf_type>(0)?;
> let extra = &data[size_of::<btf_type>()..];
> - let kind = (t.info >> 24) & 0xf;
> + let kind = (t.info >> 24) & 0x1f;
Comment 1
>
> match BtfKind::try_from(kind)? {
> BtfKind::Void => {
> diff --git a/libbpf-cargo/src/test.rs b/libbpf-cargo/src/test.rs
> index 5b08843..82a0586 100644
> --- a/libbpf-cargo/src/test.rs
> +++ b/libbpf-cargo/src/test.rs
> @@ -2145,3 +2145,27 @@ pub struct __anon_3 {
>
> assert_definition(&btf, struct_bpf_sock_tuple, expected_output);
> }
> +
> +#[test]
> +fn test_btf_dump_float() {
> + let prog_text = r#"
> +float f = 2.16;
> +double d = 12.15;
> +"#;
> +
> + let btf = build_btf_prog(prog_text);
> +
> + let f = find_type_in_btf!(btf, Var, "f");
> + let d = find_type_in_btf!(btf, Var, "d");
> +
> + assert_eq!(
> + "f32",
> + btf.type_declaration(f)
> + .expect("Failed to generate f decl")
> + );
> + assert_eq!(
> + "f64",
> + btf.type_declaration(d)
> + .expect("Failed to generate d decl")
> + );
> +}
================================================
FILE: testdata/review/apply_pr/review/.1
================================================
{"original":"Much better this way!\n\ndiff --git a/README.md b/README.md\nindex 007306a..1a480f2 100644\n--- a/README.md\n+++ b/README.md\n@@ -1,4 +1,4 @@\n For testing\n ===========\n \n-This is a readme\n+This is a better readme line!\n","submitted":null,"commit_id":"7e427a4fffc13a306d91deba55957dc800ee50a8"}
================================================
FILE: testdata/review/apply_pr/review/1.prr
================================================
> Much better this way!
>
> diff --git a/README.md b/README.md
> index 007306a..1a480f2 100644
> --- a/README.md
> +++ b/README.md
> @@ -1,4 +1,4 @@
> For testing
> ===========
>
> -This is a readme
> +This is a better readme line!
================================================
FILE: testdata/review/pr_description/metadata
================================================
{"original":"This is just for testing purposes.\ndiff --git a/README.md b/README.md\nindex eda592e..ba84732 100644\n--- a/README.md\n+++ b/README.md\n@@ -1,5 +1,5 @@\n-MA-FSA for Go\n-=============\n+MA-FSA for Golang\n+=================\n \n Package mafsa implements Minimal Acyclic Finite State Automata (MA-FSA)\n with Minimal Perfect Hashing (MPH). Basically, it's a set of strings that\n","submitted":null,"commit_id":"2c4e14eeb62b2279f185cec0eb986e388da96760"}
================================================
FILE: testdata/review/pr_description/review
================================================
Not necessary but seems ok.
@prr approve
> This is just for testing purposes.
> diff --git a/README.md b/README.md
> index eda592e..ba84732 100644
> --- a/README.md
> +++ b/README.md
> @@ -1,5 +1,5 @@
> -MA-FSA for Go
> -=============
> +MA-FSA for Golang
> +=================
Doesn't seem necessary ...
>
> Package mafsa implements Minimal Acyclic Finite State Automata (MA-FSA)
> with Minimal Perfect Hashing (MPH). Basically, it's a set of strings that
================================================
FILE: testdata/review/pr_description_interleaving/metadata
================================================
{"original":"This is just for testing purposes.\ndiff --git a/README.md b/README.md\nindex eda592e..ba84732 100644\n--- a/README.md\n+++ b/README.md\n@@ -1,5 +1,5 @@\n-MA-FSA for Go\n-=============\n+MA-FSA for Golang\n+=================\n \n Package mafsa implements Minimal Acyclic Finite State Automata (MA-FSA)\n with Minimal Perfect Hashing (MPH). Basically, it's a set of strings that\n","submitted":null,"commit_id":"2c4e14eeb62b2279f185cec0eb986e388da96760"}
================================================
FILE: testdata/review/pr_description_interleaving/review
================================================
Not necessary.
@prr reject
> This is just for testing purposes.
This might be fine or not.
> diff --git a/README.md b/README.md
> index eda592e..ba84732 100644
> --- a/README.md
> +++ b/README.md
> @@ -1,5 +1,5 @@
> -MA-FSA for Go
> -=============
> +MA-FSA for Golang
> +=================
Doesn't seem necessary ...
>
> Package mafsa implements Minimal Acyclic Finite State Automata (MA-FSA)
> with Minimal Perfect Hashing (MPH). Basically, it's a set of strings that
================================================
FILE: testdata/review/snip_comments/gold
================================================
> diff --git a/src/review.rs b/src/review.rs
file comment!
> index e39fd6f..e94680e 100644
> --- a/src/review.rs
> +++ b/src/review.rs
> @@ -155,7 +155,7 @@ impl Review {
> fs::create_dir_all(review_dir).context("Failed to create workdir directories")?;
>
> // Check if there are unsubmitted changes
> - if !force && review.status()? == ReviewStatus::Reviewed {
> + if !force && review.has_metadata() && review.status()? == ReviewStatus::Reviewed {
woah!
> bail!(
> "You have unsubmitted changes to the requested review. \
> Either submit the existing changes, delete the existing review file, \
> @@ -353,6 +353,10 @@ impl Review {
> serde_json::from_str::<ReviewMetadata>(&meta).context("Failed to parse metadata file")
> }
>
> + fn has_metadata(&self) -> bool {
nice
> + fs::metadata(self.metadata_path()).is_ok()
sheesh
> + }
> +
> fn metadata_path(&self) -> PathBuf {
> let mut metadata_path = self.path();
> metadata_path.set_file_name(format!(".{}", self.pr_num));
================================================
FILE: testdata/review/snip_comments/metadata
================================================
{"original":"diff --git a/src/review.rs b/src/review.rs\nindex e39fd6f..e94680e 100644\n--- a/src/review.rs\n+++ b/src/review.rs\n@@ -155,7 +155,7 @@ impl Review {\n fs::create_dir_all(review_dir).context(\"Failed to create workdir directories\")?;\n \n // Check if there are unsubmitted changes\n- if !force && review.status()? == ReviewStatus::Reviewed {\n+ if !force && review.has_metadata() && review.status()? == ReviewStatus::Reviewed {\n bail!(\n \"You have unsubmitted changes to the requested review. \\\n Either submit the existing changes, delete the existing review file, \\\n@@ -353,6 +353,10 @@ impl Review {\n serde_json::from_str::<ReviewMetadata>(&meta).context(\"Failed to parse metadata file\")\n }\n \n+ fn has_metadata(&self) -> bool {\n+ fs::metadata(self.metadata_path()).is_ok()\n+ }\n+\n fn metadata_path(&self) -> PathBuf {\n let mut metadata_path = self.path();\n metadata_path.set_file_name(format!(\".{}\", self.pr_num));\n","submitted":null,"commit_id":"1fa28537aa07ac608e9d5bc4ed953ddd4b348cee"}
================================================
FILE: testdata/review/snip_comments/review
================================================
> diff --git a/src/review.rs b/src/review.rs
file comment!
[...]
> - if !force && review.status()? == ReviewStatus::Reviewed {
> + if !force && review.has_metadata() && review.status()? == ReviewStatus::Reviewed {
woah!
> bail!(
> "You have unsubmitted changes to the requested review. \
> Either submit the existing changes, delete the existing review file, \
> @@ -353,6 +353,10 @@ impl Review {
> serde_json::from_str::<ReviewMetadata>(&meta).context("Failed to parse metadata file")
> }
[...]
nice
> + fs::metadata(self.metadata_path()).is_ok()
sheesh
[...]
================================================
FILE: testdata/review/snip_multiple/gold
================================================
> diff --git a/src/review.rs b/src/review.rs
> index e39fd6f..e94680e 100644
> --- a/src/review.rs
> +++ b/src/review.rs
> @@ -155,7 +155,7 @@ impl Review {
> fs::create_dir_all(review_dir).context("Failed to create workdir directories")?;
>
> // Check if there are unsubmitted changes
> - if !force && review.status()? == ReviewStatus::Reviewed {
> + if !force && review.has_metadata() && review.status()? == ReviewStatus::Reviewed {
> bail!(
> "You have unsubmitted changes to the requested review. \
> Either submit the existing changes, delete the existing review file, \
> @@ -353,6 +353,10 @@ impl Review {
> serde_json::from_str::<ReviewMetadata>(&meta).context("Failed to parse metadata file")
> }
>
> + fn has_metadata(&self) -> bool {
> + fs::metadata(self.metadata_path()).is_ok()
> + }
> +
> fn metadata_path(&self) -> PathBuf {
> let mut metadata_path = self.path();
> metadata_path.set_file_name(format!(".{}", self.pr_num));
================================================
FILE: testdata/review/snip_multiple/metadata
================================================
{"original":"diff --git a/src/review.rs b/src/review.rs\nindex e39fd6f..e94680e 100644\n--- a/src/review.rs\n+++ b/src/review.rs\n@@ -155,7 +155,7 @@ impl Review {\n fs::create_dir_all(review_dir).context(\"Failed to create workdir directories\")?;\n \n // Check if there are unsubmitted changes\n- if !force && review.status()? == ReviewStatus::Reviewed {\n+ if !force && review.has_metadata() && review.status()? == ReviewStatus::Reviewed {\n bail!(\n \"You have unsubmitted changes to the requested review. \\\n Either submit the existing changes, delete the existing review file, \\\n@@ -353,6 +353,10 @@ impl Review {\n serde_json::from_str::<ReviewMetadata>(&meta).context(\"Failed to parse metadata file\")\n }\n \n+ fn has_metadata(&self) -> bool {\n+ fs::metadata(self.metadata_path()).is_ok()\n+ }\n+\n fn metadata_path(&self) -> PathBuf {\n let mut metadata_path = self.path();\n metadata_path.set_file_name(format!(\".{}\", self.pr_num));\n","submitted":null,"commit_id":"1fa28537aa07ac608e9d5bc4ed953ddd4b348cee"}
================================================
FILE: testdata/review/snip_multiple/review
================================================
[...]
> index e39fd6f..e94680e 100644
[..]
> +++ b/src/review.rs
> @@ -155,7 +155,7 @@ impl Review {
> fs::create_dir_all(review_dir).context("Failed to create workdir directories")?;
>
[...]
> + if !force && review.has_metadata() && review.status()? == ReviewStatus::Reviewed {
> bail!(
> "You have unsubmitted changes to the requested review. \
[...]
> }
>
> + fn has_metadata(&self) -> bool {
> + fs::metadata(self.metadata_path()).is_ok()
> + }
> +
> fn metadata_path(&self) -> PathBuf {
[..]
================================================
FILE: testdata/review/snip_single/gold
================================================
> diff --git a/src/review.rs b/src/review.rs
> index e39fd6f..e94680e 100644
> --- a/src/review.rs
> +++ b/src/review.rs
> @@ -155,7 +155,7 @@ impl Review {
> fs::create_dir_all(review_dir).context("Failed to create workdir directories")?;
>
> // Check if there are unsubmitted changes
> - if !force && review.status()? == ReviewStatus::Reviewed {
> + if !force && review.has_metadata() && review.status()? == ReviewStatus::Reviewed {
> bail!(
> "You have unsubmitted changes to the requested review. \
> Either submit the existing changes, delete the existing review file, \
> @@ -353,6 +353,10 @@ impl Review {
> serde_json::from_str::<ReviewMetadata>(&meta).context("Failed to parse metadata file")
> }
>
> + fn has_metadata(&self) -> bool {
> + fs::metadata(self.metadata_path()).is_ok()
> + }
> +
> fn metadata_path(&self) -> PathBuf {
> let mut metadata_path = self.path();
> metadata_path.set_file_name(format!(".{}", self.pr_num));
================================================
FILE: testdata/review/snip_single/metadata
================================================
{"original":"diff --git a/src/review.rs b/src/review.rs\nindex e39fd6f..e94680e 100644\n--- a/src/review.rs\n+++ b/src/review.rs\n@@ -155,7 +155,7 @@ impl Review {\n fs::create_dir_all(review_dir).context(\"Failed to create workdir directories\")?;\n \n // Check if there are unsubmitted changes\n- if !force && review.status()? == ReviewStatus::Reviewed {\n+ if !force && review.has_metadata() && review.status()? == ReviewStatus::Reviewed {\n bail!(\n \"You have unsubmitted changes to the requested review. \\\n Either submit the existing changes, delete the existing review file, \\\n@@ -353,6 +353,10 @@ impl Review {\n serde_json::from_str::<ReviewMetadata>(&meta).context(\"Failed to parse metadata file\")\n }\n \n+ fn has_metadata(&self) -> bool {\n+ fs::metadata(self.metadata_path()).is_ok()\n+ }\n+\n fn metadata_path(&self) -> PathBuf {\n let mut metadata_path = self.path();\n metadata_path.set_file_name(format!(\".{}\", self.pr_num));\n","submitted":null,"commit_id":"1fa28537aa07ac608e9d5bc4ed953ddd4b348cee"}
================================================
FILE: testdata/review/snip_single/review
================================================
> diff --git a/src/review.rs b/src/review.rs
[...]
> }
>
> + fn has_metadata(&self) -> bool {
> + fs::metadata(self.metadata_path()).is_ok()
> + }
> +
> fn metadata_path(&self) -> PathBuf {
> let mut metadata_path = self.path();
> metadata_path.set_file_name(format!(".{}", self.pr_num));
================================================
FILE: testdata/review/status/metadata
================================================
{"original":"diff --git a/src/utils.cpp b/src/utils.cpp\nindex b02c16bc3df..fca25d6c839 100644\n--- a/src/utils.cpp\n+++ b/src/utils.cpp\n@@ -1,5 +1,6 @@\n #include <algorithm>\n #include <array>\n+#include <climits>\n #include <cmath>\n #include <cstring>\n #include <errno.h>\n","submitted":null,"commit_id":"58c3ef68bbb9082e5a16449baa15c18ccf7e5707"}
================================================
FILE: testdata/review/status/review
================================================
> diff --git a/src/utils.cpp b/src/utils.cpp
> index b02c16bc3df..fca25d6c839 100644
> --- a/src/utils.cpp
> +++ b/src/utils.cpp
> @@ -1,5 +1,6 @@
> #include <algorithm>
> #include <array>
> +#include <climits>
> #include <cmath>
> #include <cstring>
> #include <errno.h>
================================================
FILE: testdata/review/trailing_whitespace/metadata
================================================
{"original":"diff --git a/ch2.txt b/ch2.txt\nindex 4d729e6..2641120 100644\n--- a/ch2.txt\n+++ b/ch2.txt\n@@ -2,13 +2,6 @@ CHAPTER 2. WAGING WAR\n \n 1. Sun Tzu said: In the operations of war, where there are in the field a thousand swift chariots, as many heavy chariots, and a hundred thousand mail-clad soldiers, with provisions enough to carry them a thousand LI, the expenditure at home and at the front, including entertainment of guests, small items such as glue and paint, and sums spent on chariots and armor, will reach the total of a thousand ounces of silver per day. Such is the cost of raising an army of 100,000 men. \n \n-2. When you engage in actual fighting, if victory is long in coming, then men's weapons will grow dull and their ardor will be damped. If you lay siege to a town, you will exhaust your strength. \n-\n-3. Again, if the campaign is protracted, the resources of the State will not be equal to the strain. \n-\n-4. Now, when your weapons are dulled, your ardor damped, your strength exhausted and your treasure spent, other chieftains will spring up to take advantage of your extremity. Then no man, however wise, will be able to avert the consequences that must ensue. \n-\n-5. Thus, though we have heard of stupid haste in war, cleverness has never been seen associated with long delays. \n \n 6. There is no instance of a country having benefited from prolonged warfare. \n \n@@ -30,6 +23,11 @@ CHAPTER 2. WAGING WAR\n \n 16. Now in order to kill the enemy, our men must be roused to anger; that there may be advantage from defeating the enemy, they must have their rewards. \n \n+asdf\n+asdf\n+asdf\n+adsf\n+\n 17. Therefore in chariot fighting, when ten or more chariots have been taken, those should be rewarded who took the first. Our own flags should be substituted for those of the enemy, and the chariots mingled and used in conjunction with ours. The captured soldiers should be kindly treated and kept. \n \n 18. This is called, using the conquered foe to augment one's own strength. \n","submitted":null,"commit_id":"ddb1d0d0d4accb2e9e39feb4597a7be1f21b62a9"}
================================================
FILE: testdata/review/trailing_whitespace/review
================================================
> diff --git a/ch2.txt b/ch2.txt
> index 4d729e6..2641120 100644
> --- a/ch2.txt
> +++ b/ch2.txt
> @@ -2,13 +2,6 @@ CHAPTER 2. WAGING WAR
>
> 1. Sun Tzu said: In the operations of war, where there are in the field a thousand swift chariots, as many heavy chariots, and a hundred thousand mail-clad soldiers, with provisions enough to carry them a thousand LI, the expenditure at home and at the front, including entertainment of guests, small items such as glue and paint, and sums spent on chariots and armor, will reach the total of a thousand ounces of silver per day. Such is the cost of raising an army of 100,000 men.
>
> -2. When you engage in actual fighting, if victory is long in coming, then men's weapons will grow dull and their ardor will be damped. If you lay siege to a town, you will exhaust your strength.
> -
> -3. Again, if the campaign is protracted, the resources of the State will not be equal to the strain.
> -
> -4. Now, when your weapons are dulled, your ardor damped, your strength exhausted and your treasure spent, other chieftains will spring up to take advantage of your extremity. Then no man, however wise, will be able to avert the consequences that must ensue.
> -
> -5. Thus, though we have heard of stupid haste in war, cleverness has never been seen associated with long delays.
>
> 6. There is no instance of a country having benefited from prolonged warfare.
>
> @@ -30,6 +23,11 @@ CHAPTER 2. WAGING WAR
>
> 16. Now in order to kill the enemy, our men must be roused to anger; that there may be advantage from defeating the enemy, they must have their rewards.
>
> +asdf
> +asdf
> +asdf
> +adsf
> +
> 17. Therefore in chariot fighting, when ten or more chariots have been taken, those should be rewarded who took the first. Our own flags should be substituted for those of the enemy, and the chariots mingled and used in conjunction with ours. The captured soldiers should be kindly treated and kept.
>
> 18. This is called, using the conquered foe to augment one's own strength.
================================================
FILE: testdata/review_comment
================================================
Review comment
> diff --git a/libbpf-cargo/src/btf/btf.rs b/libbpf-cargo/src/btf/btf.rs
> index a26b2a5..fffb281 100644
> --- a/libbpf-cargo/src/btf/btf.rs
> +++ b/libbpf-cargo/src/btf/btf.rs
> @@ -731,7 +731,7 @@ impl<'a> Btf<'a> {
> fn load_type(&mut self, data: &'a [u8]) -> Result<BtfType<'a>> {
> let t = data.pread::<btf_type>(0)?;
> let extra = &data[size_of::<btf_type>()..];
> - let kind = (t.info >> 24) & 0xf;
> + let kind = (t.info >> 24) & 0x1f;
Comment 1
>
> match BtfKind::try_from(kind)? {
> BtfKind::Void => {
> diff --git a/libbpf-cargo/src/test.rs b/libbpf-cargo/src/test.rs
> index 5b08843..82a0586 100644
> --- a/libbpf-cargo/src/test.rs
> +++ b/libbpf-cargo/src/test.rs
> @@ -2145,3 +2145,27 @@ pub struct __anon_3 {
>
> assert_definition(&btf, struct_bpf_sock_tuple, expected_output);
> }
> +
> +#[test]
> +fn test_btf_dump_float() {
> + let prog_text = r#"
> +float f = 2.16;
> +double d = 12.15;
> +"#;
> +
> + let btf = build_btf_prog(prog_text);
> +
> + let f = find_type_in_btf!(btf, Var, "f");
> + let d = find_type_in_btf!(btf, Var, "d");
> +
> + assert_eq!(
> + "f32",
> + btf.type_declaration(f)
> + .expect("Failed to generate f decl")
> + );
> + assert_eq!(
> + "f64",
> + btf.type_declaration(d)
> + .expect("Failed to generate d decl")
> + );
> +}
================================================
FILE: testdata/review_comment_whitespace
================================================
@prr approve
Review comment
> diff --git a/libbpf-cargo/src/btf/btf.rs b/libbpf-cargo/src/btf/btf.rs
> index a26b2a5..fffb281 100644
> --- a/libbpf-cargo/src/btf/btf.rs
> +++ b/libbpf-cargo/src/btf/btf.rs
> @@ -731,7 +731,7 @@ impl<'a> Btf<'a> {
> fn load_type(&mut self, data: &'a [u8]) -> Result<BtfType<'a>> {
> let t = data.pread::<btf_type>(0)?;
> let extra = &data[size_of::<btf_type>()..];
> - let kind = (t.info >> 24) & 0xf;
> + let kind = (t.info >> 24) & 0x1f;
>
> match BtfKind::try_from(kind)? {
> BtfKind::Void => {
> diff --git a/libbpf-cargo/src/test.rs b/libbpf-cargo/src/test.rs
> index 5b08843..82a0586 100644
> --- a/libbpf-cargo/src/test.rs
> +++ b/libbpf-cargo/src/test.rs
> @@ -2145,3 +2145,27 @@ pub struct __anon_3 {
>
> assert_definition(&btf, struct_bpf_sock_tuple, expected_output);
> }
> +
> +#[test]
> +fn test_btf_dump_float() {
> + let prog_text = r#"
> +float f = 2.16;
> +double d = 12.15;
> +"#;
> +
> + let btf = build_btf_prog(prog_text);
> +
> + let f = find_type_in_btf!(btf, Var, "f");
> + let d = find_type_in_btf!(btf, Var, "d");
> +
> + assert_eq!(
> + "f32",
> + btf.type_declaration(f)
> + .expect("Failed to generate f decl")
> + );
> + assert_eq!(
> + "f64",
> + btf.type_declaration(d)
> + .expect("Failed to generate d decl")
> + );
> +}
================================================
FILE: testdata/review_comments_interleaved_with_pr_description
================================================
Not necessary.
> This is just for testing purposes.
This might be fine or not.
@prr reject
> diff --git a/README.md b/README.md
> index eda592e..ba84732 100644
> --- a/README.md
> +++ b/README.md
> @@ -1,5 +1,5 @@
> -MA-FSA for Go
> -=============
> +MA-FSA for Golang
> +=================
Doesn't seem necessary ...
>
> Package mafsa implements Minimal Acyclic Finite State Automata (MA-FSA)
> with Minimal Perfect Hashing (MPH). Basically, it's a set of strings that
================================================
FILE: testdata/single_comment
================================================
> diff --git a/libbpf-cargo/src/btf/btf.rs b/libbpf-cargo/src/btf/btf.rs
> index a26b2a5..fffb281 100644
> --- a/libbpf-cargo/src/btf/btf.rs
> +++ b/libbpf-cargo/src/btf/btf.rs
> @@ -731,7 +731,7 @@ impl<'a> Btf<'a> {
> fn load_type(&mut self, data: &'a [u8]) -> Result<BtfType<'a>> {
> let t = data.pread::<btf_type>(0)?;
> let extra = &data[size_of::<btf_type>()..];
> - let kind = (t.info >> 24) & 0xf;
> + let kind = (t.info >> 24) & 0x1f;
Comment 1
>
> match BtfKind::try_from(kind)? {
> BtfKind::Void => {
> diff --git a/libbpf-cargo/src/test.rs b/libbpf-cargo/src/test.rs
> index 5b08843..82a0586 100644
> --- a/libbpf-cargo/src/test.rs
> +++ b/libbpf-cargo/src/test.rs
> @@ -2145,3 +2145,27 @@ pub struct __anon_3 {
>
> assert_definition(&btf, struct_bpf_sock_tuple, expected_output);
> }
> +
> +#[test]
> +fn test_btf_dump_float() {
> + let prog_text = r#"
> +float f = 2.16;
> +double d = 12.15;
> +"#;
> +
> + let btf = build_btf_prog(prog_text);
> +
> + let f = find_type_in_btf!(btf, Var, "f");
> + let d = find_type_in_btf!(btf, Var, "d");
> +
> + assert_eq!(
> + "f32",
> + btf.type_declaration(f)
> + .expect("Failed to generate f decl")
> + );
> + assert_eq!(
> + "f64",
> + btf.type_declaration(d)
> + .expect("Failed to generate d decl")
> + );
> +}
================================================
FILE: testdata/spaces_in_filename
================================================
> diff --git a/build/scripts/grafana/provisioning/dashboards/Docker Prometheus Monitoring-1571332751387.json b/build/scripts/grafana/provisioning/dashboards/Docker Prometheus Monitoring-1571332751387.json
> new file mode 100644
> index 0000000..5daac7b
> --- /dev/null
> +++ b/build/scripts/grafana/provisioning/dashboards/Docker Prometheus Monitoring-1571332751387.json
> @@ -0,0 +1,1831 @@
> +{
> + "annotations": {
foo
> + "list": [
> + {
> + "builtIn": 1,
> + "datasource": "-- Grafana --",
================================================
FILE: testdata/testgitrepo/README-applied.md
================================================
For testing
===========
This is a better readme line!
================================================
FILE: testdata/testgitrepo/README.md
================================================
For testing
===========
This is a readme
================================================
FILE: testdata/trailing_comment
================================================
> diff --git a/ch1.txt b/ch1.txt
> deleted file mode 100644
> index d30353f..0000000
> --- a/ch1.txt
> +++ /dev/null
> @@ -1,59 +0,0 @@
> -THE ART OF WAR BY SUN TZU
> -
> -Translated by Lionel Giles
> -Originally published 1910
> -
> -This version was generated automatically at www.suntzusaid.com.
> -
> -
> -CHAPTER 1. LAYING PLANS
> -
> -1. Sun Tzu said: The art of war is of vital importance to the State.
> -
> -2. It is a matter of life and death, a road either to safety or to ruin. Hence it is a subject of inquiry which can on no account be neglected.
> -
> -3. The art of war, then, is governed by five constant factors, to be taken into account in one's deliberations, when seeking to determine the conditions obtaining in the field.
> -
> -4. These are: (1) The Moral Law; (2) Heaven; (3) Earth; (4) The Commander; (5) Method and discipline.
> -
> -5,6. The MORAL LAW causes the people to be in complete accord with their ruler, so that they will follow him regardless of their lives, undismayed by any danger.
> -
> -7. HEAVEN signifies night and day, cold and heat, times and seasons.
> -
> -8. EARTH comprises distances, great and small; danger and security; open ground and narrow passes; the chances of life and death.
> -
> -9. The COMMANDER stands for the virtues of wisdom, sincerity, benevolence, courage and strictness.
> -
> -10. By METHOD AND DISCIPLINE are to be understood the marshaling of the army in its proper subdivisions, the graduations of rank among the officers, the maintenance of roads by which supplies may reach the army, and the control of military expenditure.
> -
> -11. These five heads should be familiar to every general: he who knows them will be victorious; he who knows them not will fail.
> -
> -12. Therefore, in your deliberations, when seeking to determine the military conditions, let them be made the basis of a comparison, in this wise:
> -
> -13. (1) Which of the two sovereigns is imbued with the Moral law? (2) Which of the two generals has most ability? (3) With whom lie the advantages derived from Heaven and Earth? (4) On which side is discipline most rigorously enforced? (5) Which army is stronger? (6) On which side are officers and men more highly trained? (7) In which army is there the greater constancy both in reward and punishment?
> -
> -14. By means of these seven considerations I can forecast victory or defeat.
> -
> -15. The general that hearkens to my counsel and acts upon it, will conquer: let such a one be retained in command! The general that hearkens not to my counsel nor acts upon it, will suffer defeat: let such a one be dismissed!
> -
> -16. While heeding the profit of my counsel, avail yourself also of any helpful circumstances over and beyond the ordinary rules.
> -
> -17. According as circumstances are favorable, one should modify one's plans.
> -
> -18. All warfare is based on deception.
> -
> -19. Hence, when able to attack, we must seem unable; when using our forces, we must seem inactive; when we are near, we must make the enemy believe we are far away; when far away, we must make him believe we are near.
> -
> -20. Hold out baits to entice the enemy. Feign disorder, and crush him.
> -
> -21. If he is secure at all points, be prepared for him. If he is in superior strength, evade him.
> -
> -22. If your opponent is of choleric temper, seek to irritate him. Pretend to be weak, that he may grow arrogant.
> -
> -23. If he is taking his ease, give him no rest. If his forces are united, separate them.
> -
> -24. Attack him where he is unprepared, appear where you are not expected.
> -
> -25. These military devices, leading to victory, must not be divulged beforehand.
> -
> -26. Now the general who wins a battle makes many calculations in his temple ere the battle is fought. The general who loses a battle makes but few calculations beforehand. Thus do many calculations lead to victory, and few calculations to defeat: how much more no calculation at all! It is by attention to this point that I can foresee who is likely to win or lose.
Comment 1
================================================
FILE: testdata/unknown_directive
================================================
@prr asdf
> diff --git a/libbpf-cargo/src/btf/btf.rs b/libbpf-cargo/src/btf/btf.rs
> index a26b2a5..fffb281 100644
> --- a/libbpf-cargo/src/btf/btf.rs
> +++ b/libbpf-cargo/src/btf/btf.rs
> @@ -731,7 +731,7 @@ impl<'a> Btf<'a> {
> fn load_type(&mut self, data: &'a [u8]) -> Result<BtfType<'a>> {
> let t = data.pread::<btf_type>(0)?;
> let extra = &data[size_of::<btf_type>()..];
> - let kind = (t.info >> 24) & 0xf;
> + let kind = (t.info >> 24) & 0x1f;
Comment 1
>
> match BtfKind::try_from(kind)? {
> BtfKind::Void => {
> diff --git a/libbpf-cargo/src/test.rs b/libbpf-cargo/src/test.rs
> index 5b08843..82a0586 100644
> --- a/libbpf-cargo/src/test.rs
> +++ b/libbpf-cargo/src/test.rs
> @@ -2145,3 +2145,27 @@ pub struct __anon_3 {
>
> assert_definition(&btf, struct_bpf_sock_tuple, expected_output);
> }
> +
> +#[test]
> +fn test_btf_dump_float() {
> + let prog_text = r#"
> +float f = 2.16;
> +double d = 12.15;
> +"#;
> +
> + let btf = build_btf_prog(prog_text);
> +
> + let f = find_type_in_btf!(btf, Var, "f");
> + let d = find_type_in_btf!(btf, Var, "d");
> +
> + assert_eq!(
> + "f32",
> + btf.type_declaration(f)
> + .expect("Failed to generate f decl")
> + );
> + assert_eq!(
> + "f64",
> + btf.type_declaration(d)
> + .expect("Failed to generate d decl")
> + );
> +}
================================================
FILE: testdata/unterminated_back_to_back_span
================================================
> diff --git a/libbpf-cargo/src/btf/btf.rs b/libbpf-cargo/src/btf/btf.rs
> index a26b2a5..fffb281 100644
> --- a/libbpf-cargo/src/btf/btf.rs
> +++ b/libbpf-cargo/src/btf/btf.rs
> @@ -731,7 +731,7 @@ impl<'a> Btf<'a> {
> fn load_type(&mut self, data: &'a [u8]) -> Result<BtfType<'a>> {
> let t = data.pread::<btf_type>(0)?;
> let extra = &data[size_of::<btf_type>()..];
> - let kind = (t.info >> 24) & 0xf;
> + let kind = (t.info >> 24) & 0x1f;
>
> match BtfKind::try_from(kind)? {
> BtfKind::Void => {
Comment 1
> diff --git a/libbpf-cargo/src/test.rs b/libbpf-cargo/src/test.rs
> index 5b08843..82a0586 100644
> --- a/libbpf-cargo/src/test.rs
> +++ b/libbpf-cargo/src/test.rs
> @@ -2145,3 +2145,27 @@ pub struct __anon_3 {
>
> assert_definition(&btf, struct_bpf_sock_tuple, expected_output);
> }
> +
> +#[test]
> +fn test_btf_dump_float() {
> + let prog_text = r#"
> +float f = 2.16;
> +double d = 12.15;
> +"#;
> +
> + let btf = build_btf_prog(prog_text);
> +
> + let f = find_type_in_btf!(btf, Var, "f");
> + let d = find_type_in_btf!(btf, Var, "d");
> +
> + assert_eq!(
> + "f32",
> + btf.type_declaration(f)
> + .expect("Failed to generate f decl")
> + );
> + assert_eq!(
> + "f64",
> + btf.type_declaration(d)
> + .expect("Failed to generate d decl")
> + );
> +}
================================================
FILE: testdata/unterminated_span
================================================
> diff --git a/libbpf-cargo/src/btf/btf.rs b/libbpf-cargo/src/btf/btf.rs
> index a26b2a5..fffb281 100644
> --- a/libbpf-cargo/src/btf/btf.rs
> +++ b/libbpf-cargo/src/btf/btf.rs
> @@ -731,7 +731,7 @@ impl<'a> Btf<'a> {
> fn load_type(&mut self, data: &'a [u8]) -> Result<BtfType<'a>> {
> let t = data.pread::<btf_type>(0)?;
> let extra = &data[size_of::<btf_type>()..];
> - let kind = (t.info >> 24) & 0xf;
> + let kind = (t.info >> 24) & 0x1f;
>
> match BtfKind::try_from(kind)? {
> BtfKind::Void => {
> diff --git a/libbpf-cargo/src/test.rs b/libbpf-cargo/src/test.rs
> index 5b08843..82a0586 100644
> --- a/libbpf-cargo/src/test.rs
> +++ b/libbpf-cargo/src/test.rs
> @@ -2145,3 +2145,27 @@ pub struct __anon_3 {
>
> assert_definition(&btf, struct_bpf_sock_tuple, expected_output);
> }
> +
> +#[test]
> +fn test_btf_dump_float() {
> + let prog_text = r#"
> +float f = 2.16;
> +double d = 12.15;
> +"#;
> +
> + let btf = build_btf_prog(prog_text);
> +
> + let f = find_type_in_btf!(btf, Var, "f");
> + let d = find_type_in_btf!(btf, Var, "d");
> +
> + assert_eq!(
> + "f32",
> + btf.type_declaration(f)
> + .expect("Failed to generate f decl")
> + );
> + assert_eq!(
> + "f64",
> + btf.type_declaration(d)
> + .expect("Failed to generate d decl")
> + );
> +}
================================================
FILE: vim/ftdetect/prr.vim
================================================
au BufNewFile,BufRead *.prr set filetype=prr
================================================
FILE: vim/ftplugin/prr.vim
================================================
setlocal foldmethod=expr
setlocal foldexpr=s:DiffFoldLevel()
setlocal foldcolumn=3
" Adapted from https://github.com/sgeb/vim-diff-fold
function! s:DiffFoldLevel()
let l:line=getline(v:lnum)
if l:line =~# '^> \(diff\|Index\)' " file
return '>1'
elseif l:line =~# '^> \(@@\|\d\)' " hunk
return '>2'
else
return '='
endif
endfunction
let b:undo_ftplugin = 'setl fdm< | setl fde< | setl fdc<'
================================================
FILE: vim/syntax/prr.vim
================================================
" Vim syntax file
" Language: prr
" Maintainer: Daniel Xu <dxu@dxuuu.xyz>
" Last Change: 2023 Nov 07
" Credits: Bram Moolenaar <Bram@vim.org>,
" Łukasz Niemier <lukasz@niemier.pl>
" Check whether an earlier file has defined a syntax already
if exists("b:current_syntax")
finish
endif
" match + but not +++
syn match prrAdded "^> +\(++\)\@!.*"
" match - but not ---
syn match prrRemoved "^> -\(--\)\@!.*"
syn match prrHeader "^> diff --git .*"
syn match prrIndex "^> index \w*\.\.\w*\( \w*\)\?"
syn match prrChunkH "^> @@ .* @@"
syn match prrTag "^@prr .*" contains=prrTagName,prrResult transparent
syn match prrTagName contained "@prr" nextgroup=prrResult
syn keyword prrResult contained approve reject comment
" Define the default highlighting.
" Only used when an item doesn't have highlighting yet
hi def link prrAdded Added
hi def link prrRemoved Removed
hi def link prrTagName Keyword
hi def link prrResult String
hi def link prrHeader Include
hi def link prrIndex Comment
hi def link prrChunkH Function
let b:current_syntax = "prr"
gitextract_46xp_y2d/
├── .github/
│ └── workflows/
│ ├── docs.yml
│ └── rust.yml
├── .gitignore
├── Cargo.toml
├── LICENSE
├── NOTES.md
├── README.md
├── build.rs
├── completions/
│ └── _prr
├── src/
│ ├── cli.rs
│ ├── main.rs
│ ├── parser.rs
│ ├── prr.rs
│ └── review.rs
├── testdata/
│ ├── add_oneliner
│ ├── approve_review
│ ├── back_to_back_span
│ ├── cross_file_span_ignored
│ ├── cross_hunk_span
│ ├── deleted_file
│ ├── empty_file
│ ├── file_comment
│ ├── hunk_start_no_trailing_whitespace
│ ├── inline_and_review_comments_with_pr_description
│ ├── multiline_comment
│ ├── multiple_files
│ ├── reject_review
│ ├── review/
│ │ ├── apply_pr/
│ │ │ └── review/
│ │ │ ├── .1
│ │ │ └── 1.prr
│ │ ├── pr_description/
│ │ │ ├── metadata
│ │ │ └── review
│ │ ├── pr_description_interleaving/
│ │ │ ├── metadata
│ │ │ └── review
│ │ ├── snip_comments/
│ │ │ ├── gold
│ │ │ ├── metadata
│ │ │ └── review
│ │ ├── snip_multiple/
│ │ │ ├── gold
│ │ │ ├── metadata
│ │ │ └── review
│ │ ├── snip_single/
│ │ │ ├── gold
│ │ │ ├── metadata
│ │ │ └── review
│ │ ├── status/
│ │ │ ├── metadata
│ │ │ └── review
│ │ └── trailing_whitespace/
│ │ ├── metadata
│ │ └── review
│ ├── review_comment
│ ├── review_comment_whitespace
│ ├── review_comments_interleaved_with_pr_description
│ ├── single_comment
│ ├── spaces_in_filename
│ ├── testgitrepo/
│ │ ├── README-applied.md
│ │ └── README.md
│ ├── trailing_comment
│ ├── unknown_directive
│ ├── unterminated_back_to_back_span
│ └── unterminated_span
└── vim/
├── ftdetect/
│ └── prr.vim
├── ftplugin/
│ └── prr.vim
└── syntax/
└── prr.vim
SYMBOL INDEX (136 symbols across 6 files)
FILE: build.rs
constant LONG_ABOUT (line 5) | const LONG_ABOUT: &str =
function main (line 16) | fn main() -> std::io::Result<()> {
FILE: src/cli.rs
type Command (line 5) | pub(crate) enum Command {
type Cli (line 55) | pub struct Cli {
FILE: src/main.rs
constant LOCAL_CONFIG_FILE_NAME (line 17) | pub const LOCAL_CONFIG_FILE_NAME: &str = ".prr.toml";
function find_project_config_file (line 20) | fn find_project_config_file() -> Option<PathBuf> {
function open_review (line 35) | fn open_review(file: &Path) -> Result<()> {
function main (line 55) | async fn main() -> Result<()> {
FILE: src/parser.rs
type LineLocation (line 26) | pub enum LineLocation {
type InlineComment (line 35) | pub struct InlineComment {
type FileComment (line 49) | pub struct FileComment {
type ReviewAction (line 59) | pub enum ReviewAction {
type Comment (line 67) | pub enum Comment {
type StartState (line 79) | struct StartState {
type FilePreambleState (line 94) | struct FilePreambleState {
type FileDiffState (line 102) | struct FileDiffState {
type SpanStartOrCommentState (line 116) | struct SpanStartOrCommentState {
type CommentState (line 121) | struct CommentState {
type State (line 142) | enum State {
type ReviewParser (line 159) | pub struct ReviewParser {
method new (line 240) | pub fn new() -> ReviewParser {
method parse_line (line 246) | pub fn parse_line(&mut self, mut line: &str) -> Result<Option<Comment>> {
method finish (line 500) | pub fn finish(self) -> Option<Comment> {
function is_diff_header (line 163) | fn is_diff_header(s: &str) -> bool {
function is_prr_directive (line 170) | fn is_prr_directive(s: &str) -> Option<&str> {
function parse_diff_header (line 180) | fn parse_diff_header(line: &str) -> Result<String> {
function parse_hunk_start (line 191) | fn parse_hunk_start(line: &str) -> Result<Option<(u64, u64)>> {
function is_left_line (line 224) | fn is_left_line(line: &str) -> bool {
function get_next_lines (line 229) | fn get_next_lines(line: &str, left: u64, right: u64) -> (u64, u64) {
function test_fail (line 517) | fn test_fail(input: &str) {
function test (line 529) | fn test(input: &str, expected: &[Comment]) {
function single_comment (line 552) | fn single_comment() {
function approve_review (line 565) | fn approve_review() {
function reject_review (line 581) | fn reject_review() {
function review_comment (line 597) | fn review_comment() {
function file_comment (line 613) | fn file_comment() {
function review_comment_whitespace (line 624) | fn review_comment_whitespace() {
function multiline_comment (line 635) | fn multiline_comment() {
function back_to_back_span (line 648) | fn back_to_back_span() {
function multiple_files (line 669) | fn multiple_files() {
function hunk_start_no_trailing_whitespace (line 690) | fn hunk_start_no_trailing_whitespace() {
function add_oneliner (line 703) | fn add_oneliner() {
function deleted_file (line 724) | fn deleted_file() {
function empty_file (line 737) | fn empty_file() {
function trailing_comment (line 750) | fn trailing_comment() {
function spaces_in_filename (line 764) | fn spaces_in_filename() {
function unterminated_span (line 777) | fn unterminated_span() {
function cross_file_span_ignored (line 783) | fn cross_file_span_ignored() {
function unterminated_back_to_back_span (line 789) | fn unterminated_back_to_back_span() {
function cross_hunk_span (line 795) | fn cross_hunk_span() {
function unknown_directive (line 801) | fn unknown_directive() {
function hunk_oneliner_regex (line 807) | fn hunk_oneliner_regex() {
function hunk_normal_regex (line 818) | fn hunk_normal_regex() {
function hunk_only_one_line_on_each_side (line 829) | fn hunk_only_one_line_on_each_side() {
function inline_and_review_comments_with_pr_description_present (line 840) | fn inline_and_review_comments_with_pr_description_present() {
function review_comments_interleaved_with_pr_description (line 857) | fn review_comments_interleaved_with_pr_description() {
FILE: src/prr.rs
constant GITHUB_BASE_URL (line 27) | const GITHUB_BASE_URL: &str = "https://api.github.com";
function resolve_github_token (line 36) | fn resolve_github_token<F>(config_token: Option<&str>, env_lookup: F) ->...
type PrrConfig (line 66) | struct PrrConfig {
type PrrLocalConfig (line 84) | struct PrrLocalConfig {
type Config (line 92) | struct Config {
method url (line 109) | fn url(&self) -> String {
type Prr (line 98) | pub struct Prr {
method new (line 136) | pub fn new(config_path: &Path, local_config_path: Option<PathBuf>) -> ...
method workdir (line 175) | fn workdir(&self) -> Result<PathBuf> {
method is_pr_metadata_experiment_active (line 218) | pub fn is_pr_metadata_experiment_active(&self) -> bool {
method parse_pr_str (line 224) | pub fn parse_pr_str(&self, s: &str) -> Result<(String, String, u64)> {
method get_pr (line 271) | pub async fn get_pr(
method get_review (line 306) | pub fn get_review(&self, owner: &str, repo: &str, pr_num: u64) -> Resu...
method submit_pr (line 311) | pub async fn submit_pr(&self, owner: &str, repo: &str, pr_num: u64, de...
method submit_review (line 383) | async fn submit_review(
method submit_file_comment (line 427) | async fn submit_file_comment(
method apply_pr (line 472) | pub fn apply_pr(&self, owner: &str, repo: &str, pr_num: u64, apply_rep...
method print_status (line 494) | pub fn print_status(&self, no_titles: bool) -> Result<()> {
method remove (line 519) | pub async fn remove(&self, prs: &[String], force: bool, submitted: boo...
function config (line 556) | fn config(global: &str, local: Option<&str>) -> (Prr, TempDir) {
function test_parse_basic_pr_str (line 589) | async fn test_parse_basic_pr_str() {
function test_parse_dotted_pr_str (line 598) | async fn test_parse_dotted_pr_str() {
function test_parse_underscored_pr_str (line 607) | async fn test_parse_underscored_pr_str() {
function test_parse_dashed_pr_str (line 616) | async fn test_parse_dashed_pr_str() {
function test_parse_numbered_pr_str (line 625) | async fn test_parse_numbered_pr_str() {
function test_parse_mixed_pr_str (line 634) | async fn test_parse_mixed_pr_str() {
function test_parse_github_url (line 643) | async fn test_parse_github_url() {
function test_parse_github_url_with_extra_path (line 652) | async fn test_parse_github_url_with_extra_path() {
function test_parse_github_url_with_complex_path (line 661) | async fn test_parse_github_url_with_complex_path() {
function test_parse_custom_github_host (line 670) | async fn test_parse_custom_github_host() {
function test_local_config_repository (line 679) | async fn test_local_config_repository() {
function test_global_workdir (line 697) | async fn test_global_workdir() {
function test_local_workdir (line 709) | async fn test_local_workdir() {
function test_local_workdir_relative (line 724) | async fn test_local_workdir_relative() {
function test_local_workdir_override (line 742) | async fn test_local_workdir_override() {
function test_invalid_relative_workdir (line 758) | async fn test_invalid_relative_workdir() {
function copy_dir_all (line 769) | fn copy_dir_all(src: impl AsRef<Path>, dst: impl AsRef<Path>) {
function test_resolve_github_token_with_no_config_token_fallback_to_env (line 784) | fn test_resolve_github_token_with_no_config_token_fallback_to_env() {
function test_resolve_github_token_with_no_config_token_no_env_error (line 797) | fn test_resolve_github_token_with_no_config_token_no_env_error() {
function test_resolve_github_token_config_token_preferred_over_env (line 809) | fn test_resolve_github_token_config_token_preferred_over_env() {
function test_resolve_github_token_empty_config_token_falls_back_to_env (line 822) | fn test_resolve_github_token_empty_config_token_falls_back_to_env() {
function test_resolve_github_token_env_var_precedence (line 835) | fn test_resolve_github_token_env_var_precedence() {
function test_resolve_github_token_empty_env_var_error (line 850) | fn test_resolve_github_token_empty_env_var_error() {
function test_apply_pr (line 866) | async fn test_apply_pr() {
FILE: src/review.rs
constant SNIP_VARIANTS (line 15) | const SNIP_VARIANTS: &[&str] = &["[..]", "[...]"];
type Review (line 18) | pub struct Review {
method new (line 230) | pub fn new(
method new_existing (line 307) | pub fn new_existing(workdir: &Path, owner: &str, repo: &str, pr_num: u...
method comments (line 319) | pub fn comments(&self) -> Result<(ReviewAction, String, Vec<InlineComm...
method mark_submitted (line 367) | pub fn mark_submitted(&self) -> Result<()> {
method resolve_snips (line 392) | fn resolve_snips(&self, contents: &str) -> Result<String> {
method validate_review_file (line 419) | fn validate_review_file(&self, contents: &str) -> Result<()> {
method reviewed (line 464) | fn reviewed(&self) -> Result<bool> {
method path (line 473) | pub fn path(&self) -> PathBuf {
method metadata (line 483) | fn metadata(&self) -> Result<ReviewMetadata> {
method has_metadata (line 489) | fn has_metadata(&self) -> bool {
method metadata_path (line 493) | fn metadata_path(&self) -> PathBuf {
method commit_id (line 501) | pub fn commit_id(&self) -> Result<Option<String>> {
method diff (line 506) | pub fn diff(&self) -> Result<String> {
method handle (line 511) | pub fn handle(&self) -> String {
method status (line 516) | pub fn status(&self) -> Result<ReviewStatus> {
method remove (line 531) | pub fn remove(self, force: bool) -> Result<()> {
type ReviewMetadata (line 31) | struct ReviewMetadata {
type ReviewStatus (line 42) | pub enum ReviewStatus {
type LineType (line 52) | enum LineType<'a> {
method fmt (line 62) | fn fmt(&self, f: &mut Formatter<'_>) -> fmt_result {
function from (line 74) | fn from(line: &'a str) -> Self {
function prefix_lines (line 85) | fn prefix_lines(s: &str, prefix: &str) -> String {
function get_all_existing (line 101) | pub fn get_all_existing(workdir: &Path) -> Result<Vec<Review>> {
function resolve_snips_recurse (line 169) | fn resolve_snips_recurse<'a>(pattern: &[LineType<'a>], text: &[&'a str])...
function setup (line 556) | fn setup(review: &str, metadata: &str) -> (Review, TempDir) {
function test_validate_stripped (line 584) | fn test_validate_stripped() {
function test_review_status (line 595) | fn test_review_status() {
function test_review_validation_with_pr_description (line 624) | fn test_review_validation_with_pr_description() {
function test_review_validation_with_interleaving_pr_description (line 634) | fn test_review_validation_with_interleaving_pr_description() {
function test_new_review (line 645) | fn test_new_review() {
function test_snip_single (line 668) | fn test_snip_single() {
function test_snip_multiple (line 678) | fn test_snip_multiple() {
function test_snip_comments (line 688) | fn test_snip_comments() {
function test_snip_single_exhaustive (line 700) | fn test_snip_single_exhaustive() {
Condensed preview — 60 files, each showing path, character count, and a content snippet. Download the .json file or copy for the full structured content (184K chars).
[
{
"path": ".github/workflows/docs.yml",
"chars": 1127,
"preview": "name: Deploy docs\non:\n push:\n branches:\n - master\n\njobs:\n deploy:\n runs-on: ubuntu-latest\n permissions:\n"
},
{
"path": ".github/workflows/rust.yml",
"chars": 1035,
"preview": "name: Rust\n\non:\n push:\n branches: [ master ]\n pull_request:\n branches: [ master ]\n\nenv:\n CARGO_TERM_COLOR: alwa"
},
{
"path": ".gitignore",
"chars": 8,
"preview": "/target\n"
},
{
"path": "Cargo.toml",
"chars": 1022,
"preview": "[package]\nname = \"prr\"\ndescription = \"Mailing list style code reviews for github\"\nlicense = \"GPL-2.0-or-later\"\nrepositor"
},
{
"path": "LICENSE",
"chars": 18092,
"preview": " GNU GENERAL PUBLIC LICENSE\n Version 2, June 1991\n\n Copyright (C) 1989, 1991 Fr"
},
{
"path": "NOTES.md",
"chars": 1916,
"preview": "# TODO\n\n- [x] Parse review files\n - [x] Create parser\n - [x] Create `include_str!()` based unit-tests for expected"
},
{
"path": "README.md",
"chars": 827,
"preview": "# Pull request review\n\n[](https"
},
{
"path": "build.rs",
"chars": 1653,
"preview": "mod cli {\n include!(\"src/cli.rs\");\n}\n\nconst LONG_ABOUT: &str =\n \"prr is a tool that brings mailing list style code"
},
{
"path": "completions/_prr",
"chars": 2637,
"preview": "#compdef prr\n\n_prr_reviews() {\n local -a reviews\n local handle r_status file\n\n if (( ${_PRR_IN_COMPLETION:-0} )); the"
},
{
"path": "src/cli.rs",
"chars": 1638,
"preview": "use clap::{Parser, Subcommand};\nuse std::path::PathBuf;\n\n#[derive(Subcommand, Debug)]\npub(crate) enum Command {\n /// "
},
{
"path": "src/main.rs",
"chars": 2972,
"preview": "use std::env;\nuse std::path::{Path, PathBuf};\nuse std::process;\n\nuse anyhow::{bail, Context, Result};\nuse clap::Parser;\n"
},
{
"path": "src/parser.rs",
"chars": 30647,
"preview": "use anyhow::{anyhow, bail, Context, Result};\nuse lazy_static::lazy_static;\nuse regex::Regex;\n\n// Use lazy static to ensu"
},
{
"path": "src/prr.rs",
"chars": 30872,
"preview": "use std::env;\nuse std::fs;\nuse std::path::{Path, PathBuf};\n\nuse anyhow::{anyhow, bail, Context, Result};\nuse git2::{Appl"
},
{
"path": "src/review.rs",
"chars": 26959,
"preview": "use std::fmt::{Display, Formatter, Result as fmt_result, Write as fmt_write};\nuse std::fs;\nuse std::fs::OpenOptions;\nuse"
},
{
"path": "testdata/add_oneliner",
"chars": 171,
"preview": "> diff --git a/foo.rs b/foo.rs\n> new file mode 100644\n> index 0000000..5a64612\n> --- /dev/null\n> +++ b/foo.rs\n> @@ -0,0 "
},
{
"path": "testdata/approve_review",
"chars": 1428,
"preview": "@prr approve\n> diff --git a/libbpf-cargo/src/btf/btf.rs b/libbpf-cargo/src/btf/btf.rs\n> index a26b2a5..fffb281 100644\n> "
},
{
"path": "testdata/back_to_back_span",
"chars": 1427,
"preview": "> diff --git a/libbpf-cargo/src/btf/btf.rs b/libbpf-cargo/src/btf/btf.rs\n> index a26b2a5..fffb281 100644\n> --- a/libbpf-"
},
{
"path": "testdata/cross_file_span_ignored",
"chars": 1413,
"preview": "> diff --git a/libbpf-cargo/src/btf/btf.rs b/libbpf-cargo/src/btf/btf.rs\n> index a26b2a5..fffb281 100644\n> --- a/libbpf-"
},
{
"path": "testdata/cross_hunk_span",
"chars": 2072,
"preview": "> diff --git a/ch2.txt b/ch2.txt\n> index 4d729e6..2641120 100644\n> --- a/ch2.txt\n> +++ b/ch2.txt\n> @@ -2,13 +2,6 @@ CHAP"
},
{
"path": "testdata/deleted_file",
"chars": 4092,
"preview": "> diff --git a/ch1.txt b/ch1.txt\n> deleted file mode 100644\n> index d30353f..0000000\n> --- a/ch1.txt\n> +++ /dev/null\n> @"
},
{
"path": "testdata/empty_file",
"chars": 1006,
"preview": "> diff --git a/libbpf-cargo/src/btf/btf.rs b/libbpf-cargo/src/btf/btf.rs\n> index 0000000..fffb281 100644\n> --- /dev/null"
},
{
"path": "testdata/file_comment",
"chars": 1434,
"preview": "> diff --git a/libbpf-cargo/src/btf/btf.rs b/libbpf-cargo/src/btf/btf.rs\n\nThis is a file-level comment!\n\n> index a26b2a5"
},
{
"path": "testdata/hunk_start_no_trailing_whitespace",
"chars": 4052,
"preview": "> diff --git a/ch5.txt b/ch5.txt\n> new file mode 100644\n> index 0000000..762722f\n> --- /dev/null\n> +++ b/ch5.txt\n> @@ -0"
},
{
"path": "testdata/inline_and_review_comments_with_pr_description",
"chars": 451,
"preview": "Not necessary.\n\n@prr reject\n\n> This is just for testing purposes.\n> diff --git a/README.md b/README.md\n> index eda592e.."
},
{
"path": "testdata/multiline_comment",
"chars": 1457,
"preview": "> diff --git a/libbpf-cargo/src/btf/btf.rs b/libbpf-cargo/src/btf/btf.rs\n> index a26b2a5..fffb281 100644\n> --- a/libbpf-"
},
{
"path": "testdata/multiple_files",
"chars": 1423,
"preview": "> diff --git a/libbpf-cargo/src/btf/btf.rs b/libbpf-cargo/src/btf/btf.rs\n> index a26b2a5..fffb281 100644\n> --- a/libbpf-"
},
{
"path": "testdata/reject_review",
"chars": 1427,
"preview": "@prr reject\n> diff --git a/libbpf-cargo/src/btf/btf.rs b/libbpf-cargo/src/btf/btf.rs\n> index a26b2a5..fffb281 100644\n> -"
},
{
"path": "testdata/review/apply_pr/review/.1",
"chars": 312,
"preview": "{\"original\":\"Much better this way!\\n\\ndiff --git a/README.md b/README.md\\nindex 007306a..1a480f2 100644\\n--- a/README.md"
},
{
"path": "testdata/review/apply_pr/review/1.prr",
"chars": 237,
"preview": "> Much better this way!\n> \n> diff --git a/README.md b/README.md\n> index 007306a..1a480f2 100644\n> --- a/README.md\n> +++ "
},
{
"path": "testdata/review/pr_description/metadata",
"chars": 466,
"preview": "{\"original\":\"This is just for testing purposes.\\ndiff --git a/README.md b/README.md\\nindex eda592e..ba84732 100644\\n--- "
},
{
"path": "testdata/review/pr_description/review",
"chars": 465,
"preview": "Not necessary but seems ok.\n\n@prr approve\n\n> This is just for testing purposes.\n> diff --git a/README.md b/README.md\n> i"
},
{
"path": "testdata/review/pr_description_interleaving/metadata",
"chars": 466,
"preview": "{\"original\":\"This is just for testing purposes.\\ndiff --git a/README.md b/README.md\\nindex eda592e..ba84732 100644\\n--- "
},
{
"path": "testdata/review/pr_description_interleaving/review",
"chars": 480,
"preview": "Not necessary.\n\n@prr reject\n\n> This is just for testing purposes.\n\nThis might be fine or not.\n\n> diff --git a/README.md "
},
{
"path": "testdata/review/snip_comments/gold",
"chars": 1110,
"preview": "> diff --git a/src/review.rs b/src/review.rs\n\n\nfile comment!\n> index e39fd6f..e94680e 100644\n> --- a/src/review.rs\n> +++"
},
{
"path": "testdata/review/snip_comments/metadata",
"chars": 1145,
"preview": "{\"original\":\"diff --git a/src/review.rs b/src/review.rs\\nindex e39fd6f..e94680e 100644\\n--- a/src/review.rs\\n+++ b/src/r"
},
{
"path": "testdata/review/snip_comments/review",
"chars": 649,
"preview": "> diff --git a/src/review.rs b/src/review.rs\n\n\nfile comment!\n[...]\n> - if !force && review.status()? == ReviewSta"
},
{
"path": "testdata/review/snip_multiple/gold",
"chars": 1073,
"preview": "> diff --git a/src/review.rs b/src/review.rs\n> index e39fd6f..e94680e 100644\n> --- a/src/review.rs\n> +++ b/src/review.rs"
},
{
"path": "testdata/review/snip_multiple/metadata",
"chars": 1145,
"preview": "{\"original\":\"diff --git a/src/review.rs b/src/review.rs\\nindex e39fd6f..e94680e 100644\\n--- a/src/review.rs\\n+++ b/src/r"
},
{
"path": "testdata/review/snip_multiple/review",
"chars": 570,
"preview": "[...]\n> index e39fd6f..e94680e 100644\n[..]\n> +++ b/src/review.rs\n> @@ -155,7 +155,7 @@ impl Review {\n> fs::crea"
},
{
"path": "testdata/review/snip_single/gold",
"chars": 1073,
"preview": "> diff --git a/src/review.rs b/src/review.rs\n> index e39fd6f..e94680e 100644\n> --- a/src/review.rs\n> +++ b/src/review.rs"
},
{
"path": "testdata/review/snip_single/metadata",
"chars": 1145,
"preview": "{\"original\":\"diff --git a/src/review.rs b/src/review.rs\\nindex e39fd6f..e94680e 100644\\n--- a/src/review.rs\\n+++ b/src/r"
},
{
"path": "testdata/review/snip_single/review",
"chars": 332,
"preview": "> diff --git a/src/review.rs b/src/review.rs\n[...]\n> }\n> \n> + fn has_metadata(&self) -> bool {\n> + fs::m"
},
{
"path": "testdata/review/status/metadata",
"chars": 353,
"preview": "{\"original\":\"diff --git a/src/utils.cpp b/src/utils.cpp\\nindex b02c16bc3df..fca25d6c839 100644\\n--- a/src/utils.cpp\\n+++"
},
{
"path": "testdata/review/status/review",
"chars": 277,
"preview": "> diff --git a/src/utils.cpp b/src/utils.cpp\n> index b02c16bc3df..fca25d6c839 100644\n> --- a/src/utils.cpp\n> +++ b/src/u"
},
{
"path": "testdata/review/trailing_whitespace/metadata",
"chars": 2107,
"preview": "{\"original\":\"diff --git a/ch2.txt b/ch2.txt\\nindex 4d729e6..2641120 100644\\n--- a/ch2.txt\\n+++ b/ch2.txt\\n@@ -2,13 +2,6 "
},
{
"path": "testdata/review/trailing_whitespace/review",
"chars": 2027,
"preview": "> diff --git a/ch2.txt b/ch2.txt\n> index 4d729e6..2641120 100644\n> --- a/ch2.txt\n> +++ b/ch2.txt\n> @@ -2,13 +2,6 @@ CHAP"
},
{
"path": "testdata/review_comment",
"chars": 1432,
"preview": "\nReview comment\n\n> diff --git a/libbpf-cargo/src/btf/btf.rs b/libbpf-cargo/src/btf/btf.rs\n> index a26b2a5..fffb281 10064"
},
{
"path": "testdata/review_comment_whitespace",
"chars": 1434,
"preview": "\n@prr approve\n\nReview comment\n\n\n> diff --git a/libbpf-cargo/src/btf/btf.rs b/libbpf-cargo/src/btf/btf.rs\n> index a26b2a5"
},
{
"path": "testdata/review_comments_interleaved_with_pr_description",
"chars": 481,
"preview": "Not necessary.\n\n\n> This is just for testing purposes.\n\nThis might be fine or not.\n\n@prr reject\n\n> diff --git a/README.md"
},
{
"path": "testdata/single_comment",
"chars": 1415,
"preview": "> diff --git a/libbpf-cargo/src/btf/btf.rs b/libbpf-cargo/src/btf/btf.rs\n> index a26b2a5..fffb281 100644\n> --- a/libbpf-"
},
{
"path": "testdata/spaces_in_filename",
"chars": 530,
"preview": "> diff --git a/build/scripts/grafana/provisioning/dashboards/Docker Prometheus Monitoring-1571332751387.json b/build/scr"
},
{
"path": "testdata/testgitrepo/README-applied.md",
"chars": 55,
"preview": "For testing\n===========\n\nThis is a better readme line!\n"
},
{
"path": "testdata/testgitrepo/README.md",
"chars": 42,
"preview": "For testing\n===========\n\nThis is a readme\n"
},
{
"path": "testdata/trailing_comment",
"chars": 4091,
"preview": "> diff --git a/ch1.txt b/ch1.txt\n> deleted file mode 100644\n> index d30353f..0000000\n> --- a/ch1.txt\n> +++ /dev/null\n> @"
},
{
"path": "testdata/unknown_directive",
"chars": 1425,
"preview": "@prr asdf\n> diff --git a/libbpf-cargo/src/btf/btf.rs b/libbpf-cargo/src/btf/btf.rs\n> index a26b2a5..fffb281 100644\n> ---"
},
{
"path": "testdata/unterminated_back_to_back_span",
"chars": 1416,
"preview": "> diff --git a/libbpf-cargo/src/btf/btf.rs b/libbpf-cargo/src/btf/btf.rs\n> index a26b2a5..fffb281 100644\n> --- a/libbpf-"
},
{
"path": "testdata/unterminated_span",
"chars": 1403,
"preview": "> diff --git a/libbpf-cargo/src/btf/btf.rs b/libbpf-cargo/src/btf/btf.rs\n> index a26b2a5..fffb281 100644\n> --- a/libbpf-"
},
{
"path": "vim/ftdetect/prr.vim",
"chars": 45,
"preview": "au BufNewFile,BufRead *.prr set filetype=prr\n"
},
{
"path": "vim/ftplugin/prr.vim",
"chars": 437,
"preview": "setlocal foldmethod=expr\nsetlocal foldexpr=s:DiffFoldLevel()\nsetlocal foldcolumn=3\n\n\" Adapted from https://github.com/sg"
},
{
"path": "vim/syntax/prr.vim",
"chars": 1094,
"preview": "\" Vim syntax file\n\" Language: prr\n\" Maintainer: Daniel Xu <dxu@dxuuu.xyz>\n\" Last Change: 2023 Nov 07\n\" Credits: "
}
]
About this extraction
This page contains the full source code of the danobi/prr GitHub repository, extracted and formatted as plain text for AI agents and large language models (LLMs). The extraction includes 60 files (169.9 KB), approximately 44.9k tokens, and a symbol index with 136 extracted functions, classes, methods, constants, and types. Use this with OpenClaw, Claude, ChatGPT, Cursor, Windsurf, or any other AI tool that accepts text input. You can copy the full output to your clipboard or download it as a .txt file.
Extracted by GitExtract — free GitHub repo to text converter for AI. Built by Nikandr Surkov.