Repository: raverrr/plution
Branch: main
Commit: 0a45d4e24ae4
Files: 2
Total size: 5.8 KB
Directory structure:
gitextract_bg4ydtos/
├── README.md
└── plution.go
================================================
FILE CONTENTS
================================================
================================================
FILE: README.md
================================================
# plution
Prototype pollution scanner using headless chrome

# What this is
Plution is a convenient way to scan at scale for pages that are vulnerable to client side prototype pollution via a URL payload. In the default configuration, it will use a hardcoded payload that can detect 11 of the cases documented here: https://github.com/BlackFan/client-side-prototype-pollution/tree/master/pp
# What this is not
This is not a one stop shop. Prototype pollution is a complicated beast. This tool does nothing you couldn't do manually. This is not a polished bug-free super tool. It is functional but poorly coded and to be considered alpha at best.
# How it works
Plution appends a payload to supplied URLs, naviguates to each URL with headless chrome and runs javascript on the page to verify if a prototype was successfully polluted.
# how it is used
* Basic scan, output only to screen:
`cat URLs.txt | plution`
* Scan with a supplied payload rather than hardcoded one:
`cat URLs.txt|plution -p '__proto__.zzzc=example'`
**Note on custom payloads: The variable you are hoping to inject must be called or render to "zzzc". This is because 'window.zzzc' will be run on each page to verify pollution.**
* Output:
`Passing '-o' followed by a location will output only URLs of pages that were successfully polluted.`
* Concurrency:
* `Pass the '-c' option to specify how many concurrent jobs are run (default is 5)`
# questions and answers
* How do I install it?
`go get -u github.com/raverrr/plution`
* why specifically limit it to checking if window.zzzc is defined?
`zzzc is a short pattern that is unlikely to already be in a prototype. If you want more freedom in regards to the javascript use https://github.com/detectify/page-fetch instead`
* Got a more specific question?
`Ask me on twitter @divadbate.`
================================================
FILE: plution.go
================================================
package main
import (
"bufio"
"context"
"flag"
"fmt"
"log"
"os"
"regexp"
"sync"
"time"
"github.com/chromedp/chromedp"
"github.com/fatih/color"
)
func init() {
fmt.Println(color.YellowString("=================================================="))
fmt.Println(color.CyanString(" ▛▀▖▜ ▐ ▗ "))
fmt.Println(color.CyanString(" ▙▄▘▐ ▌ ▌▜▀ ▄ ▞▀▖▛▀▖ "))
fmt.Println(color.CyanString(" ▌ ▐ ▌ ▌▐ ▖▐ ▌ ▌▌ ▌ "))
fmt.Println(color.CyanString("▀▀▀▀▀▀ ▘ ▘▝▀▘ ▀ ▀▘▝▀ ▘ ▘▀▀▀▀▀▀") + "v0.1 By @divadbate")
fmt.Println(color.BlueString("Scans URLs for Prototype Pollution via query parameter."))
fmt.Println(color.YellowString("=================================================="))
fmt.Println(color.CyanString("Credits:"))
fmt.Println("-@tomnomnom for inspiring me with Page-fetch")
fmt.Println("-Blackfan (github.com/BlackFan/client-side-prototype-pollution)")
fmt.Println(color.YellowString("==================================================\n"))
}
var output string
var concurrency int
var customPayload string
var URLpayload string
func main() {
log.SetFlags(0) //supress date and time on each line
flag.StringVar(&customPayload, "p", "", "--> Set custom URL payload (The varable RENDERED must be called 'zzzc')"+"\n") //do this with hasempty
flag.StringVar(&output, "o", "/dev/null", "--> Output (Will only output vulnerable URLs)"+"\n")
flag.IntVar(&concurrency, "c", 5, "--> Number of concurrent threads (default 5)"+"\n")
flag.Parse()
//create the output file
file, err := os.OpenFile(output, os.O_APPEND|os.O_CREATE|os.O_WRONLY, 0644)
if err != nil {
log.Fatalf("failed creating file: %s", err)
}
datawriter := bufio.NewWriter(file)
copts := append(chromedp.DefaultExecAllocatorOptions[:],
chromedp.Flag("ignore-certificate-errors", true),
)
ectx, ecancel := chromedp.NewExecAllocator(context.Background(), copts...)
defer ecancel()
pctx, pcancel := chromedp.NewContext(ectx)
defer pcancel()
// start the browser to ensure we end up making new tabs in an
// existing browser instead of making a new browser each time.
// see: https://godoc.org/github.com/chromedp/chromedp#NewContext
if err := chromedp.Run(pctx); err != nil {
fmt.Fprintf(os.Stderr, "error starting browser: %s\n", err)
return
}
sc := bufio.NewScanner(os.Stdin)
var wg sync.WaitGroup
jobs := make(chan string)
for i := 0; i < concurrency; i++ {
wg.Add(1)
go func() {
for requestURL := range jobs {
ctx, cancel := context.WithTimeout(pctx, time.Second*10)
ctx, _ = chromedp.NewContext(ctx)
var res string
err := chromedp.Run(ctx,
chromedp.Navigate(requestURL+hasQuery(requestURL)+URLpayload),
chromedp.Evaluate("window.zzzc", &res),
)
//fmt.Println(requestURL + hasQuery(requestURL) + URLpayload)
if res != "" || err.Error() == "json: cannot unmarshal array into Go value of type string" { //fix this hack
log.Printf("%s: %v", color.GreenString("[+]")+requestURL, color.GreenString("Vulnerable!"))
datawriter.WriteString(requestURL + "\n")
datawriter.Flush()
}
if err != nil && err.Error() != "json: cannot unmarshal array into Go value of type string" { //fix this hack
fmt.Println(color.RedString("[-]"), requestURL, color.RedString(err.Error()))
}
cancel()
}
wg.Done()
}()
}
for sc.Scan() {
jobs <- sc.Text()
}
close(jobs)
wg.Wait()
}
//Does the URL contain a query already?
func hasQuery(url string) string {
var Qmark = regexp.MustCompile(`\?`)
var p = ""
urlPayload()
if Qmark.MatchString(url) {
p = "&"
} else {
p = "?"
}
return p
}
//todo add chuncking
func urlPayload() {
if !containsEmpty(customPayload) {
URLpayload = customPayload
} else {
URLpayload = "constructor.prototype.zzzc=cccz&__proto__[zzzc]=cccz&constructor[prototype][zzzc]=cccz&__proto__.zzzc=cccz#__proto__[zzzc]=cccz"
}
}
//check if header flags are empty
func containsEmpty(ss ...string) bool {
for _, s := range ss {
if s == "" {
return true
}
}
return false
}