Full Code of raverrr/plution for AI

main 0a45d4e24ae4 cached
2 files
5.8 KB
1.7k tokens
5 symbols
1 requests
Download .txt
Repository: raverrr/plution
Branch: main
Commit: 0a45d4e24ae4
Files: 2
Total size: 5.8 KB

Directory structure:
gitextract_bg4ydtos/

├── README.md
└── plution.go

================================================
FILE CONTENTS
================================================

================================================
FILE: README.md
================================================
# plution
<h2>Prototype pollution scanner using headless chrome</h2>

![alt text](https://i.imgur.com/xumApSF.png)

# What this is
Plution is a convenient way to scan at scale for pages that are vulnerable to client side prototype pollution via a URL payload. In the default configuration, it will use a hardcoded payload that can detect 11 of the cases documented here: https://github.com/BlackFan/client-side-prototype-pollution/tree/master/pp

# What this is not
This is not a one stop shop. Prototype pollution is a complicated beast. This tool does nothing you couldn't do manually. This is not a polished bug-free super tool. It is functional but poorly coded and to be considered alpha at best.

# How it works
Plution appends a payload to supplied URLs, naviguates to each URL with headless chrome and runs javascript on the page to verify if a prototype was successfully polluted.

# how it is used
* Basic scan, output only to screen:<br />
 `cat URLs.txt | plution`

* Scan with a supplied payload rather than hardcoded one:<br />
`cat URLs.txt|plution -p '__proto__.zzzc=example'`<br />
**Note on custom payloads: The variable you are hoping to inject must be called or render to "zzzc". This is because 'window.zzzc' will be run on each page to verify pollution.**

* Output:<br />
`Passing '-o' followed by a location will output only URLs of pages that were successfully polluted.`

* Concurrency:<br />
* `Pass the '-c' option to specify how many concurrent jobs are run (default is 5)`

# questions and answers
* How do I install it?<br />
`go get -u github.com/raverrr/plution`

* why specifically limit it to checking if window.zzzc is defined?<br />
`zzzc is a short pattern that is unlikely to already be in a prototype. If you want more freedom in regards to the javascript use https://github.com/detectify/page-fetch instead`

* Got a more specific question?<br />
`Ask me on twitter @divadbate.`




================================================
FILE: plution.go
================================================
package main

import (
	"bufio"
	"context"
	"flag"
	"fmt"
	"log"
	"os"
	"regexp"
	"sync"
	"time"

	"github.com/chromedp/chromedp"

	"github.com/fatih/color"
)

func init() {
	fmt.Println(color.YellowString("=================================================="))
	fmt.Println(color.CyanString("       ▛▀▖▜    ▐  ▗             "))
	fmt.Println(color.CyanString("       ▙▄▘▐ ▌ ▌▜▀ ▄ ▞▀▖▛▀▖      "))
	fmt.Println(color.CyanString("       ▌  ▐ ▌ ▌▐ ▖▐ ▌ ▌▌ ▌      "))
	fmt.Println(color.CyanString("▀▀▀▀▀▀ ▘   ▘▝▀▘ ▀ ▀▘▝▀ ▘ ▘▀▀▀▀▀▀") + "v0.1 By @divadbate")

	fmt.Println(color.BlueString("Scans URLs for Prototype Pollution via query parameter."))
	fmt.Println(color.YellowString("=================================================="))
	fmt.Println(color.CyanString("Credits:"))
	fmt.Println("-@tomnomnom for inspiring me with Page-fetch")
	fmt.Println("-Blackfan (github.com/BlackFan/client-side-prototype-pollution)")
	fmt.Println(color.YellowString("==================================================\n"))

}

var output string
var concurrency int
var customPayload string
var URLpayload string

func main() {
	log.SetFlags(0) //supress date and time on each line

	flag.StringVar(&customPayload, "p", "", "--> Set custom URL payload (The varable RENDERED must be called 'zzzc')"+"\n") //do this with hasempty
	flag.StringVar(&output, "o", "/dev/null", "--> Output (Will only output vulnerable URLs)"+"\n")
	flag.IntVar(&concurrency, "c", 5, "--> Number of concurrent threads (default 5)"+"\n")
	flag.Parse()

	//create the output file
	file, err := os.OpenFile(output, os.O_APPEND|os.O_CREATE|os.O_WRONLY, 0644)
	if err != nil {
		log.Fatalf("failed creating file: %s", err)

	}
	datawriter := bufio.NewWriter(file)

	copts := append(chromedp.DefaultExecAllocatorOptions[:],
		chromedp.Flag("ignore-certificate-errors", true),
	)

	ectx, ecancel := chromedp.NewExecAllocator(context.Background(), copts...)
	defer ecancel()

	pctx, pcancel := chromedp.NewContext(ectx)
	defer pcancel()

	// start the browser to ensure we end up making new tabs in an
	// existing browser instead of making a new browser each time.
	// see: https://godoc.org/github.com/chromedp/chromedp#NewContext
	if err := chromedp.Run(pctx); err != nil {
		fmt.Fprintf(os.Stderr, "error starting browser: %s\n", err)
		return
	}

	sc := bufio.NewScanner(os.Stdin)

	var wg sync.WaitGroup
	jobs := make(chan string)

	for i := 0; i < concurrency; i++ {
		wg.Add(1)
		go func() {
			for requestURL := range jobs {

				ctx, cancel := context.WithTimeout(pctx, time.Second*10)
				ctx, _ = chromedp.NewContext(ctx)
				var res string

				err := chromedp.Run(ctx,
					chromedp.Navigate(requestURL+hasQuery(requestURL)+URLpayload),
					chromedp.Evaluate("window.zzzc", &res),
				)
				//fmt.Println(requestURL + hasQuery(requestURL) + URLpayload)

				if res != "" || err.Error() == "json: cannot unmarshal array into Go value of type string" { //fix this hack
					log.Printf("%s: %v", color.GreenString("[+]")+requestURL, color.GreenString("Vulnerable!"))
					datawriter.WriteString(requestURL + "\n")
					datawriter.Flush()
				}

				if err != nil && err.Error() != "json: cannot unmarshal array into Go value of type string" { //fix this hack
					fmt.Println(color.RedString("[-]"), requestURL, color.RedString(err.Error()))
				}

				cancel()
			}
			wg.Done()
		}()
	}
	for sc.Scan() {
		jobs <- sc.Text()
	}
	close(jobs)
	wg.Wait()
}

//Does the URL contain a query already?
func hasQuery(url string) string {
	var Qmark = regexp.MustCompile(`\?`)
	var p = ""
	urlPayload()
	if Qmark.MatchString(url) {
		p = "&"

	} else {
		p = "?"
	}
	return p
}

//todo add chuncking
func urlPayload() {

	if !containsEmpty(customPayload) {
		URLpayload = customPayload
	} else {
		URLpayload = "constructor.prototype.zzzc=cccz&__proto__[zzzc]=cccz&constructor[prototype][zzzc]=cccz&__proto__.zzzc=cccz#__proto__[zzzc]=cccz"

	}
}

//check if header flags are empty
func containsEmpty(ss ...string) bool {
	for _, s := range ss {
		if s == "" {
			return true
		}
	}
	return false
}

Download .txt
gitextract_bg4ydtos/

├── README.md
└── plution.go
Download .txt
SYMBOL INDEX (5 symbols across 1 files)

FILE: plution.go
  function init (line 19) | func init() {
  function main (line 40) | func main() {
  function hasQuery (line 117) | func hasQuery(url string) string {
  function urlPayload (line 131) | func urlPayload() {
  function containsEmpty (line 142) | func containsEmpty(ss ...string) bool {
Condensed preview — 2 files, each showing path, character count, and a content snippet. Download the .json file or copy for the full structured content (7K chars).
[
  {
    "path": "README.md",
    "chars": 1922,
    "preview": "# plution\n<h2>Prototype pollution scanner using headless chrome</h2>\n\n![alt text](https://i.imgur.com/xumApSF.png)\n\n# Wh"
  },
  {
    "path": "plution.go",
    "chars": 4051,
    "preview": "package main\n\nimport (\n\t\"bufio\"\n\t\"context\"\n\t\"flag\"\n\t\"fmt\"\n\t\"log\"\n\t\"os\"\n\t\"regexp\"\n\t\"sync\"\n\t\"time\"\n\n\t\"github.com/chromedp/"
  }
]

About this extraction

This page contains the full source code of the raverrr/plution GitHub repository, extracted and formatted as plain text for AI agents and large language models (LLMs). The extraction includes 2 files (5.8 KB), approximately 1.7k tokens, and a symbol index with 5 extracted functions, classes, methods, constants, and types. Use this with OpenClaw, Claude, ChatGPT, Cursor, Windsurf, or any other AI tool that accepts text input. You can copy the full output to your clipboard or download it as a .txt file.

Extracted by GitExtract — free GitHub repo to text converter for AI. Built by Nikandr Surkov.

Copied to clipboard!